diff --git a/.gn b/.gn index 09f1b650a1..a2e2a90425 100644 --- a/.gn +++ b/.gn @@ -21,7 +21,6 @@ secondary_source = "//build/secondary/" # their includes checked for proper dependencies when you run either # "gn check" or "gn gen --check". check_targets = [ - ":webrtc_common", "//api/*", "//audio/*", "//backup/*", @@ -72,5 +71,7 @@ default_args = { # for unittests, it can be disabled (see third_party/libyuv/BUILD.gn) libyuv_use_gflags = false + enable_libaom = true + gtest_enable_absl_printers = true } diff --git a/.vpython b/.vpython index fb75db51d8..e8dc8e26a1 100644 --- a/.vpython +++ b/.vpython @@ -66,3 +66,7 @@ wheel: < name: "infra/python/wheels/mock-py2_py3" version: "version:2.0.0" > +wheel: < + name: "infra/python/wheels/requests-py2_py3" + version: "version:2.13.0" +> diff --git a/AUTHORS b/AUTHORS index c9893aef5f..74b1faef35 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,122 +1,148 @@ -# Names should be added to this file like so: -# Name or Organization +# Names should be added to this file with this pattern: +# +# For individuals: +# Name +# +# For organizations: +# Organization +# +# See python fnmatch module documentation for more information. +# +# Please keep the list sorted. +# BEGIN individuals section. Adam Fedor Akshay Shah Alexander Brauckmann Alexandre Gouaillard +Alex Henrie Andrew MacDonald Andrey Efremov +Andrew Johnson Anil Kumar Ben Strong Bob Withers Bridger Maxwell -Chris Tserng Christophe Dumez +Chris Tserng Cody Barnes Colin Plumb Cyril Lashkevich +CZ Theng +Danail Kirov David Porter Dax Booysen -Danail Kirov +Dennis Angelo +Dharmesh Chauhan Dirk-Jan C. Binnema Dmitry Lizin +Eike Rathke Eric Rescorla, RTFM Inc. Frederik Riedel, Frogg GmbH Giji Gangadharan Graham Yoakum Gustavo Garcia +Hans Knoechel Hugues Ekra Jake Hilton James H. Brown +Jan Grulich Jan Kalab Jens Nielsen Jiawei Ou Jie Mao +Jiwon Kim Jose Antonio Olivera Ortega +Kiran Thind +Korniltsev Anatoly +Lennart Grahl Luke Weber Maksim Khobat Mallikarjuna Rao V Manish Jethani Martin Storsjo Matthias Liebig +Maxim Pavlov Maxim Potapov Michael Iedema +Michel Promonet +Miguel Paris Mike Gilbert +Min Wang Mo Zanaty Pali Rohar Paul Kapustin -Philipp Hancke Peng Yu +Philipp Hancke +Piasy Xu Rafael Lopez Diez Ralph Giles +Raman Budny +Ramprakash Jelari Riku Voipio Robert Bares Robert Nagy Ryan Yoakum -Satender Saroha Sarah Thompson +Satender Saroha Saul Kravitz +Sergio Garcia Murillo Silviu Caragea Stefan Gula +Stephan Hartmann Steve Reid Tarun Chawla +Todd Wong +Tomas Popela +Trevor Hayes Uladzislau Susha -Vladimir Beloborodov Vicken Simonian Victor Costan +Vladimir Beloborodov Xiaohong Xu Xiaolei Yu Yura Yaroshevich Yuriy Pavlyshak -Hans Knoechel -Korniltsev Anatoly -Todd Wong -Sergio Garcia Murillo -Maxim Pavlov Yusuke Suzuki -Piasy Xu -Tomas Popela -Jan Grulich -Jiwon Kim -Eike Rathke -Michel Promonet -Min Wang -Ramprakash Jelari -CZ Theng -Miguel Paris -Raman Budny +# END individuals section. -&yet LLC <*@andyet.com> +# BEGIN organizations section. +8x8 Inc. <*@8x8.com> +8x8 Inc. <*@sip-communicator.org> Agora IO <*@agora.io> ARM Holdings <*@arm.com> BroadSoft Inc. <*@broadsoft.com> +CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> Facebook Inc. <*@fb.com> Google Inc. <*@google.com> +Highfive, Inc. <*@highfive.com> HyperConnect Inc. <*@hpcnt.com> -Life On Air Inc. <*@lifeonair.com> Intel Corporation <*@intel.com> +Life On Air Inc. <*@lifeonair.com> Microsoft Corporation <*@microsoft.com> MIPS Technologies <*@mips.com> Mozilla Foundation <*@mozilla.com> +Netgem S.A. <*@netgem.com> NVIDIA Corporation <*@nvidia.com> Opera Software ASA <*@opera.com> Optical Tone Ltd <*@opticaltone.com> Pengutronix e.K. <*@pengutronix.de> RingCentral, Inc. <*@ringcentral.com> +Signal Messenger, LLC <*@signal.org> Sinch AB <*@sinch.com> struktur AG <*@struktur.de> Telenor Digital AS <*@telenor.com> Temasys Communications <*@temasys.io> The Chromium Authors <*@chromium.org> The WebRTC Authors <*@webrtc.org> +Threema GmbH <*@threema.ch> +Tuple, LLC <*@tuple.app> Twilio, Inc. <*@twilio.com> +Vewd Software AS <*@vewd.com> +Videona Socialmedia <*@videona.com> Videxio AS <*@videxio.com> Vidyo, Inc. <*@vidyo.com> Vonage Holdings Corp. <*@vonage.com> Wire Swiss GmbH <*@wire.com> -Vewd Software AS <*@vewd.com> -Highfive, Inc. <*@highfive.com> -CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> -Tuple, LLC <*@tuple.app> -Videona Socialmedia <*@videona.com> +&yet LLC <*@andyet.com> +# END organizations section. diff --git a/BUILD.gn b/BUILD.gn index 24dc06735c..b2cdb4e781 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -40,6 +40,7 @@ if (!build_with_chromium) { ":rtc_unittests", ":slow_tests", ":video_engine_tests", + ":voip_unittests", ":webrtc_nonparallel_tests", ":webrtc_perf_tests", "common_audio:common_audio_unittests", @@ -56,13 +57,13 @@ if (!build_with_chromium) { "pc:peerconnection_unittests", "pc:rtc_pc_unittests", "rtc_tools:rtp_generator", + "rtc_tools:video_replay", "stats:rtc_stats_unittests", "system_wrappers:system_wrappers_unittests", "test", "video:screenshare_loopback", "video:sv_loopback", "video:video_loopback", - "video:video_replay", ] if (!is_asan) { # Do not build :webrtc_lib_link_test because lld complains on some OS @@ -128,6 +129,10 @@ config("common_inherited_config") { defines += [ "RTC_DISABLE_CHECK_MSG" ] } + if (rtc_enable_avx2) { + defines += [ "WEBRTC_ENABLE_AVX2" ] + } + # Some tests need to declare their own trace event handlers. If this define is # not set, the first time TRACE_EVENT_* is called it will store the return # value for the current handler in an static variable, so that subsequent @@ -168,7 +173,7 @@ config("common_inherited_config") { "WEBRTC_IOS", ] } - if (is_linux) { + if (is_linux || is_chromeos) { defines += [ "WEBRTC_LINUX" ] } if (is_mac) { @@ -203,6 +208,13 @@ config("common_inherited_config") { if (is_ubsan) { cflags += [ "-fsanitize=float-cast-overflow" ] } + + if (!rtc_use_h265) { + defines += [ "DISABLE_H265" ] + } + if (!rtc_use_recorder) { + defines += [ "DISABLE_RECORDER" ] + } } # TODO(bugs.webrtc.org/9693): Remove the possibility to suppress this warning @@ -264,6 +276,10 @@ config("common_config") { defines += [ "WEBRTC_USE_H264" ] } + if (rtc_use_absl_mutex) { + defines += [ "WEBRTC_ABSL_MUTEX" ] + } + if (rtc_disable_logging) { defines += [ "RTC_DISABLE_LOGGING" ] } @@ -276,6 +292,14 @@ config("common_config") { defines += [ "RTC_DISABLE_METRICS" ] } + if (rtc_exclude_transient_suppressor) { + defines += [ "WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR" ] + } + + if (rtc_exclude_audio_processing_module) { + defines += [ "WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE" ] + } + cflags = [] if (build_with_chromium) { @@ -401,7 +425,7 @@ config("common_config") { } config("common_objc") { - libs = [ "Foundation.framework" ] + frameworks = [ "Foundation.framework" ] if (rtc_use_metal_rendering) { defines = [ "RTC_SUPPORTS_METAL" ] @@ -423,7 +447,6 @@ if (!build_with_chromium) { defines = [] deps = [ - ":webrtc_common", "api:create_peerconnection_factory", "api:libjingle_peerconnection_api", "api:rtc_error", @@ -484,7 +507,7 @@ if (!build_with_chromium) { } } - if (rtc_include_tests) { + if (rtc_include_tests && !is_asan) { rtc_executable("webrtc_lib_link_test") { testonly = true @@ -498,15 +521,6 @@ if (!build_with_chromium) { } } -rtc_source_set("webrtc_common") { - # Client code SHOULD NOT USE THIS TARGET, but for now it needs to be public - # because there exists client code that uses it. - # TODO(bugs.webrtc.org/9808): Move to private visibility as soon as that - # client code gets updated. - visibility = [ "*" ] - sources = [ "common_types.h" ] -} - if (use_libfuzzer || use_afl) { # This target is only here for gn to discover fuzzer build targets under # webrtc/test/fuzzers/. @@ -521,17 +535,18 @@ if (rtc_include_tests) { testonly = true deps = [ - ":webrtc_common", "api:compile_all_headers", "api:rtc_api_unittests", "api/audio/test:audio_api_unittests", "api/audio_codecs/test:audio_codecs_api_unittests", + "api/numerics:numerics_unittests", "api/transport:stun_unittest", "api/video/test:rtc_api_video_unittests", "api/video_codecs/test:video_codecs_api_unittests", "call:fake_network_pipe_unittests", "p2p:libstunprober_unittests", "p2p:rtc_p2p_unittests", + "rtc_base:callback_list_unittests", "rtc_base:rtc_base_approved_unittests", "rtc_base:rtc_base_unittests", "rtc_base:rtc_json_unittests", @@ -539,9 +554,11 @@ if (rtc_include_tests) { "rtc_base:rtc_operations_chain_unittests", "rtc_base:rtc_task_queue_unittests", "rtc_base:sigslot_unittest", + "rtc_base:untyped_function_unittest", "rtc_base:weak_ptr_unittests", "rtc_base/experiments:experiments_unittests", "rtc_base/synchronization:sequence_checker_unittests", + "rtc_base/task_utils:pending_task_safety_flag_unittests", "rtc_base/task_utils:to_queued_task_unittests", "sdk:sdk_tests", "test:rtp_test_utils", @@ -570,6 +587,14 @@ if (rtc_include_tests) { } } + rtc_test("benchmarks") { + testonly = true + deps = [ + "rtc_base/synchronization:mutex_benchmark", + "test:benchmark_main", + ] + } + # This runs tests that must run in real time and therefore can take some # time to execute. They are in a separate executable to avoid making the # regular unittest suite too slow to run frequently. @@ -608,6 +633,7 @@ if (rtc_include_tests) { "test:test_main", "test:video_test_common", "video:video_tests", + "video/adaptation:video_adaptation_tests", ] data = video_engine_tests_resources if (is_android) { @@ -672,6 +698,18 @@ if (rtc_include_tests) { shard_timeout = 900 } } + + rtc_test("voip_unittests") { + testonly = true + deps = [ + "api/voip:voip_engine_factory_unittests", + "audio/voip/test:audio_channel_unittests", + "audio/voip/test:audio_egress_unittests", + "audio/voip/test:audio_ingress_unittests", + "audio/voip/test:voip_core_unittests", + "test:test_main", + ] + } } # ---- Poisons ---- diff --git a/DEPS b/DEPS index a6366c29b1..e1f4f2185b 100644 --- a/DEPS +++ b/DEPS @@ -1,41 +1,51 @@ # This file contains dependencies for WebRTC. +gclient_gn_args_file = 'src/build/config/gclient_args.gni' +gclient_gn_args = [ + 'mac_xcode_version', +] + vars = { # By default, we should check out everything needed to run on the main # chromium waterfalls. More info at: crbug.com/570091. 'checkout_configuration': 'default', 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', - 'chromium_revision': 'dd5a54c29b5eaa6452755a9373ef8d026cc10fb4', + 'chromium_revision': '56e883537df53a3b3cbc39c264799bedb695b5f9', + + # This can be overridden, e.g. with custom_vars, to download a nonstandard + # Xcode version in build/mac_toolchain.py + # instead of downloading the prebuilt pinned revision. + 'mac_xcode_version': 'default', } deps = { # TODO(kjellander): Move this to be Android-only once the libevent dependency # in base/third_party/libevent is solved. 'src/base': - 'https://chromium.googlesource.com/chromium/src/base@ce806f00e6ef611bd0666aca2cd776f2ef22dbc4', + 'https://chromium.googlesource.com/chromium/src/base@cdccd610777e3e49936c03a35b3ef7cb7d8a6794', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@2f17606c25956e800b6c4670c294a03620e78551', + 'https://chromium.googlesource.com/chromium/src/build@6b0abd7198f91211eb4b6ad65636cce6b2c0308c', 'src/buildtools': - 'https://chromium.googlesource.com/chromium/src/buildtools@afc5b798c72905e85f9991152be878714c579958', - # Gradle 4.3-rc4. Used for testing Android Studio project generation for WebRTC. + 'https://chromium.googlesource.com/chromium/src/buildtools@6302c1175607a436e18947a5abe9df2209e845fc', + # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. 'src/examples/androidtests/third_party/gradle': { - 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@89af43c4d0506f69980f00dde78c97b2f81437f8', + 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3', 'condition': 'checkout_android', }, 'src/ios': { - 'url': 'https://chromium.googlesource.com/chromium/src/ios@31829ea7dd800288df28be3dd3061fbf4bd619b3', + 'url': 'https://chromium.googlesource.com/chromium/src/ios@5720c93651b31160238c133f69e7a05366360e19', 'condition': 'checkout_ios', }, 'src/testing': - 'https://chromium.googlesource.com/chromium/src/testing@403d2930e541c7a69af88869efd8c27a40b04534', + 'https://chromium.googlesource.com/chromium/src/testing@d54c4bf27e02291ec35e4c040e47c09e02d8a452', 'src/third_party': - 'https://chromium.googlesource.com/chromium/src/third_party@18f4ad54fc72cd2c408774dde375e25482b62b4b', + 'https://chromium.googlesource.com/chromium/src/third_party@346e496c9b3e7ac74b3f0c8ac8980fe834d5fee8', 'src/buildtools/linux64': { 'packages': [ { 'package': 'gn/gn/linux-amd64', - 'version': 'git_revision:97cc440d84f050f99ff0161f9414bfa2ffa38f65', + 'version': 'git_revision:53d92014bf94c3893886470a1c7c1289f8818db0', } ], 'dep_type': 'cipd', @@ -45,7 +55,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/mac-amd64', - 'version': 'git_revision:97cc440d84f050f99ff0161f9414bfa2ffa38f65', + 'version': 'git_revision:53d92014bf94c3893886470a1c7c1289f8818db0', } ], 'dep_type': 'cipd', @@ -55,7 +65,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/windows-amd64', - 'version': 'git_revision:97cc440d84f050f99ff0161f9414bfa2ffa38f65', + 'version': 'git_revision:53d92014bf94c3893886470a1c7c1289f8818db0', } ], 'dep_type': 'cipd', @@ -65,11 +75,11 @@ deps = { 'src/buildtools/clang_format/script': 'https://chromium.googlesource.com/chromium/llvm-project/cfe/tools/clang-format.git@96636aa0e9f047f17447f2d45a094d0b59ed7917', 'src/buildtools/third_party/libc++/trunk': - 'https://chromium.googlesource.com/chromium/llvm-project/libcxx.git@78d6a7767ed57b50122a161b91f59f19c9bd0d19', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@d9040c75cfea5928c804ab7c235fed06a63f743a', 'src/buildtools/third_party/libc++abi/trunk': - 'https://chromium.googlesource.com/chromium/llvm-project/libcxxabi.git@0d529660e32d77d9111912d73f2c74fc5fa2a858', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@196ba1aaa8ac285d94f4ea8d9836390a45360533', 'src/buildtools/third_party/libunwind/trunk': - 'https://chromium.googlesource.com/external/llvm.org/libunwind.git@69d9b84cca8354117b9fe9705a4430d789ee599b', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@d999d54f4bca789543a2eb6c995af2d9b5a1f3ed', 'src/tools/clang/dsymutil': { 'packages': [ @@ -82,11 +92,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_system_sdk': { + 'packages': [ + { + 'package': 'chromium/third_party/android_system_sdk', + 'version': 'no8ss5nRg6uYDM08HboypuIQuix7bS1kVqRGyWmwP-YC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_build_tools/aapt2': { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/aapt2', - 'version': 'LKH_DI44rZhQ4RkScMFQLGSJ4jZyuPcff0llITnq-i4C', + 'version': 'R2k5wwOlIaS6sjv2TIyHotiPJod-6KqnZO8NH-KFK8sC', }, ], 'condition': 'checkout_android', @@ -97,7 +118,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_tools_bundletool', - 'version': 'P0-ZY8wc-hAu5TZYFH7bId8H9Ucy7mNGCg1IPzXuZpEC', + 'version': 'gB66fGCdzqmQO6U6hxhoZDCGjOg-oqxhT_4uywaUw1oC', }, ], 'condition': 'checkout_android', @@ -105,30 +126,39 @@ deps = { }, 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@1cc95ac07c17d61bea601832bbdc1f8d13d313db', + 'https://boringssl.googlesource.com/boringssl.git@1607f54fed72c6589d560254626909a64124f091', + 'src/third_party/breakpad/breakpad': + 'https://chromium.googlesource.com/breakpad/breakpad.git@9c4671f2e3a63c0f155d9b2511192d0b5fa7f760', 'src/third_party/catapult': - 'https://chromium.googlesource.com/catapult.git@2e0a0cb9ad546be8c835e65d7537507cb7896e03', + 'https://chromium.googlesource.com/catapult.git@434681c2378b686117c2b003a58c54d78f22185f', 'src/third_party/ced/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', - 'condition': 'checkout_android', }, 'src/third_party/colorama/src': 'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8', 'src/third_party/depot_tools': - 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@ea8b58b970c0c94b4a36270b806ee307547cd77e', + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@17cd53de67f982f123bfa6458780c31705221e60', 'src/third_party/ffmpeg': - 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@bcc5d9fec0a32ec5a90b831e5d0414639af34e1f', + 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@6d9096c9e3f7f5d4e6528104ed77987ec9327315', 'src/third_party/findbugs': { 'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67', 'condition': 'checkout_android', }, + # Used for embedded builds. CrOS & Linux use the system version. + 'src/third_party/fontconfig/src': { + 'url': 'https://chromium.googlesource.com/external/fontconfig.git@452be8125f0e2a18a7dfef469e05d19374d36307', + 'condition': 'checkout_linux', + }, 'src/third_party/freetype/src': - 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@e5038be70414cf66da6c4d5ce4e30375884c30d8', + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@32b14552d662f28290e2792ce775fcd65397479a', 'src/third_party/harfbuzz-ng/src': - 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@5440313924172e155e34391f033f5e6c5e2390b3', + 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@c39ab82c90479341dcf28eaa8174af6f08c0d7ae', + 'src/third_party/google_benchmark/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@ffe1342eb2faa7d2e7c35b4db2ccf99fab81ec20', + }, # WebRTC-only dependency (not present in Chromium). 'src/third_party/gtest-parallel': - 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@df0b4e476f98516cea7d593e5dbb0fca44f6ee7f', + 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@b0a18bc755c25e213b60868f97b72171c3601725', 'src/third_party/google-truth': { 'packages': [ { @@ -140,13 +170,9 @@ deps = { 'dep_type': 'cipd', }, 'src/third_party/googletest/src': - 'https://chromium.googlesource.com/external/github.com/google/googletest.git@306f3754a71d6d1ac644681d3544d06744914228', + 'https://chromium.googlesource.com/external/github.com/google/googletest.git@4fe018038f87675c083d0cfb6a6b57c274fb1753', 'src/third_party/icu': { - 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@dbd3825b31041d782c5b504c59dcfb5ac7dda08c', - }, - 'src/third_party/jsr-305/src': { - 'url': 'https://chromium.googlesource.com/external/jsr-305.git@642c508235471f7220af6d5df2d3210e3bfc0919', - 'condition': 'checkout_android', + 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@c2a4cae149aae7fd30c4cbe3cf1b30df03b386f1', }, 'src/third_party/jdk': { 'packages': [ @@ -163,7 +189,7 @@ deps = { 'dep_type': 'cipd', }, 'src/third_party/jsoncpp/source': - 'https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git@645250b6690785be60ab6780ce4b58698d884d11', # from svn 248 + 'https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git@9059f5cad030ba11d37818847443a53918c327b1', # from svn 248 'src/third_party/junit/src': { 'url': 'https://chromium.googlesource.com/external/junit.git@64155f8a9babcfcf4263cf4d08253a1556e75481', 'condition': 'checkout_android', @@ -172,15 +198,23 @@ deps = { 'src/third_party/libFuzzer/src': 'https://chromium.googlesource.com/chromium/llvm-project/compiler-rt/lib/fuzzer.git@debe7d2d1982e540fbd6bd78604bf001753f9e74', 'src/third_party/libjpeg_turbo': - 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@ce0e57e8e636f5132fe6f0590a4dba91f92fd935', + 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@d5148db386ceb4a608058320071cbed890bd6ad2', 'src/third_party/libsrtp': - 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@650611720ecc23e0e6b32b0e3100f8b4df91696c', + 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@6907d995c7975c2865f6f94f79638c25c342e95c', + 'src/third_party/libaom/source/libaom': + 'https://aomedia.googlesource.com/aom.git@87c414ed32b50cc41ec54db9653f61b556450d41', + 'src/third_party/libunwindstack': { + 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@11659d420a71e7323b379ea8781f07c6f384bc7e', + 'condition': 'checkout_android', + }, + 'src/third_party/perfetto': + 'https://android.googlesource.com/platform/external/perfetto.git@81e9301838fcce3f967c51b92628ea4f7caa4c20', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@4254ecaa075e672b66b9d723ebdd3d7ed7125055', + 'https://chromium.googlesource.com/webm/libvpx.git@12059d9566fff24ef12e4154438b046abd3a8da7', 'src/third_party/libyuv': - 'https://chromium.googlesource.com/libyuv/libyuv.git@6afd9becdf58822b1da6770598d8597c583ccfad', + 'https://chromium.googlesource.com/libyuv/libyuv.git@f014dbd87ab06258569ad10a4cb6f84a8a7f7dd2', 'src/third_party/lss': { - 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@f70e2f1641e280e777edfdad7f73a2cfa38139c7', + 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@29f7c7e018f4ce706a709f0b0afbf8bacf869480', 'condition': 'checkout_android or checkout_linux', }, 'src/third_party/mockito/src': { @@ -190,16 +224,16 @@ deps = { # Used by boringssl. 'src/third_party/nasm': { - 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@21eb595319746a669a742d210eaa413c728e7fad' + 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@19f3fad68da99277b2882939d3b2fa4c4b8d51d9' }, 'src/third_party/openh264/src': - 'https://chromium.googlesource.com/external/github.com/cisco/openh264@6f26bce0b1c4e8ce0e13332f7c0083788def5fdf', + 'https://chromium.googlesource.com/external/github.com/cisco/openh264@3dd5b80bc4f172dd82925bb259cb7c82348409c5', 'src/third_party/r8': { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': '-oXGY8FjY2ZuIBHoGAByn8N6Vn2b0wB2QO8Ct_169XoC', + 'version': 'jIJhJ4I22Dx5V-URd7bp-x1RILv5vrLEJdjjt77MRPcC', }, ], 'condition': 'checkout_android', @@ -209,18 +243,14 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/proguard', - 'version': '3bd778c422ea5496de2ef25c007a517dbb5ce5ca', + 'version': 'Fd91BJFVlmiO6c46YMTsdy7n2f5Sk2hVVGlzPLvqZPsC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, 'src/third_party/requests/src': { - 'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@f172b30356d821d180fa4ecfa3e71c7274a32de4', - 'condition': 'checkout_android', - }, - 'src/third_party/robolectric/robolectric': { - 'url': 'https://chromium.googlesource.com/external/robolectric.git@3d6dcabf5521e028c8efc2778ab6bd8c7b6d923c', + 'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@refs/tags/v2.23.0', 'condition': 'checkout_android', }, 'src/third_party/ub-uiautomator/lib': { @@ -228,18 +258,16 @@ deps = { 'condition': 'checkout_android', }, 'src/third_party/usrsctp/usrsctplib': - 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@7a8bc9a90ca96634aa56ee712856d97f27d903f8', + 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@2caaff2b0a1d550751bb6aa1aaa6ece4829848d4', # Dependency used by libjpeg-turbo. 'src/third_party/yasm/binaries': { 'url': 'https://chromium.googlesource.com/chromium/deps/yasm/binaries.git@52f9b3f4b0aa06da24ef8b123058bb61ee468881', 'condition': 'checkout_win', }, - 'src/third_party/yasm/source/patched-yasm': - 'https://chromium.googlesource.com/chromium/deps/yasm/patched-yasm.git@720b70524a4424b15fc57e82263568c8ba0496ad', 'src/tools': - 'https://chromium.googlesource.com/chromium/src/tools@39d70a29501d4b2999d4e648ecc16ece9bc76555', + 'https://chromium.googlesource.com/chromium/src/tools@2bb144cfe3f82806c48e30a671169eed729f070b', 'src/tools/swarming_client': - 'https://chromium.googlesource.com/infra/luci/client-py.git@885b3febcc170a60f25795304e60927b77d1e92d', + 'https://chromium.googlesource.com/infra/luci/client-py.git@d46ea7635f2911208268170512cb611412488fd8', 'src/third_party/accessibility_test_framework': { 'packages': [ @@ -263,17 +291,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/apk-patch-size-estimator': { - 'packages': [ - { - 'package': 'chromium/third_party/apk-patch-size-estimator', - 'version': 'b603e99dca9b90d6a99519c232cd811878283b08', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/bazel': { 'packages': [ { @@ -322,18 +339,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/espresso', - 'version': 'c92dcfc4e894555a0b3c309f2b7939640eb1fee4', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/gson': { - 'packages': [ - { - 'package': 'chromium/third_party/gson', - 'version': '681931c9778045903a0ed59856ce2dd8dd7bf7ca', + 'version': 'y8fIfH8Leo2cPm7iGCYnBxZpwOlgLv8rm2mlcmJlvGsC', }, ], 'condition': 'checkout_android', @@ -344,7 +350,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/guava', - 'version': 'a6fba501f3a0de88b9be1daa2052632de5b96a46', + 'version': 'y8Zx7cKTiOunLhOrfC4hOt5kDQrLJ_Rq7ISDmXkPdYsC', }, ], 'condition': 'checkout_android', @@ -370,12 +376,12 @@ deps = { 'src/third_party/android_sdk/public': { 'packages': [ { - 'package': 'chromium/third_party/android_sdk/public/build-tools/29.0.2', - 'version': 'n-b1Qd7iFb8qzHlr1C_jIeu070UDgO_BwePtH42UqGcC', + 'package': 'chromium/third_party/android_sdk/public/build-tools/30.0.1', + 'version': '8LZujEmLjSh0g3JciDA3cslSptxKs9HOa_iUPXkOeYQC', }, { 'package': 'chromium/third_party/android_sdk/public/emulator', - 'version': 'f4WdgkPvDdVCE8zBWPzcSIj4N9WFhKp3CSKDWylXuLEC', + 'version': 'A4EvXZUIuQho0QRDJopMUpgyp6NA3aiDQjGKPUKbowMC', }, { 'package': 'chromium/third_party/android_sdk/public/extras', @@ -387,27 +393,19 @@ deps = { }, { 'package': 'chromium/third_party/android_sdk/public/platform-tools', - 'version': 'Jxtur3_L9RzY4q79K-AwIahwFW4oi5uYVD5URx9h62wC', + 'version': '8tF0AOj7Dwlv4j7_nfkhxWB0jzrvWWYjEIpirt8FIWYC', }, { - 'package': 'chromium/third_party/android_sdk/public/platforms/android-29', - 'version': 'yb33klKQV9UzzB-lDSsq36vzhTXOUZ2aRONBvPGwvdcC', + 'package': 'chromium/third_party/android_sdk/public/platforms/android-30', + 'version': 'YMUu9EHNZ__2Xcxl-KsaSf-dI5TMt_P62IseUVsxktMC', }, { 'package': 'chromium/third_party/android_sdk/public/sources/android-29', 'version': '4gxhM8E62bvZpQs7Q3d0DinQaW0RLCIefhXrQBFkNy8C', }, - { - 'package': 'chromium/third_party/android_sdk/public/tools', - 'version': 'wYcRQC2WHsw2dKWs4EA7fw9Qsyzu1ds1_fRjKmGxe5QC', - }, - { - 'package': 'chromium/third_party/android_sdk/public/tools-lint', - 'version': '89hXqZYzCum3delB5RV7J_QyWkaRodqdtQS0s3LMh3wC', - }, { 'package': 'chromium/third_party/android_sdk/public/cmdline-tools', - 'version': 'CR25ixsRhwuRnhdgDpGFyl9S0C_0HO9SUgFrwX46zq8C', + 'version': 'V__2Ycej-H2-6AcXX5A3gi7sIk74SuN44PBm2uC_N1sC', }, ], 'condition': 'checkout_android', @@ -425,17 +423,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/intellij': { - 'packages': [ - { - 'package': 'chromium/third_party/intellij', - 'version': '77c2721b024b36ee073402c08e6d8428c0295336', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/objenesis': { 'packages': [ { @@ -447,48 +434,42 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/ow2_asm': { + 'src/third_party/robolectric': { 'packages': [ { - 'package': 'chromium/third_party/ow2_asm', - 'version': 'GcO_KsVh2dc5GF8PLNKrpDksY_yqfiuZ6wprQw7s1EgC', + 'package': 'chromium/third_party/robolectric', + 'version': 'iC6RDM5EH3GEAzR-1shW_Mg0FeeNE5shq1okkFfuuNQC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/robolectric': { + 'src/third_party/sqlite4java': { 'packages': [ { - 'package': 'chromium/third_party/robolectric', - 'version': '1KXoOiNP1a_uZNdM2ybWKwAQNow1dHTXTig-ZK4Xgq8C', + 'package': 'chromium/third_party/sqlite4java', + 'version': 'LofjKH9dgXIAJhRYCPQlMFywSwxYimrfDeBmaHc-Z5EC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/androidx': { + 'src/third_party/turbine': { 'packages': [ { - 'package': 'chromium/third_party/androidx', - 'version': 'BgU0HKOH7unGo87kXkIKJlPMmaSOCFhvUKcIr9aborwC', + 'package': 'chromium/third_party/turbine', + 'version': 'O_jNDJ4VdwYKBSDbd2BJ3mknaTFoVkvE7Po8XIiKy8sC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/sqlite4java': { - 'packages': [ - { - 'package': 'chromium/third_party/sqlite4java', - 'version': 'LofjKH9dgXIAJhRYCPQlMFywSwxYimrfDeBmaHc-Z5EC', - }, - ], + 'src/third_party/turbine/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '0f2a5024fe4a9bb745bcd5ac7c655cebe11649bc', 'condition': 'checkout_android', - 'dep_type': 'cipd', }, 'src/third_party/xstream': { @@ -506,15 +487,15 @@ deps = { 'packages': [ { 'package': 'infra/tools/luci/isolate/${{platform}}', - 'version': 'git_revision:37a855b64d59b7f079c9a0e5368f2757099d14d3', + 'version': 'git_revision:1a022d3a4c50be4207ee93451255d71896416596', }, { 'package': 'infra/tools/luci/isolated/${{platform}}', - 'version': 'git_revision:37a855b64d59b7f079c9a0e5368f2757099d14d3', + 'version': 'git_revision:1a022d3a4c50be4207ee93451255d71896416596', }, { 'package': 'infra/tools/luci/swarming/${{platform}}', - 'version': 'git_revision:37a855b64d59b7f079c9a0e5368f2757099d14d3', + 'version': 'git_revision:1a022d3a4c50be4207ee93451255d71896416596', }, ], 'dep_type': 'cipd', @@ -611,11 +592,33 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/androidx_activity_activity': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_activity_activity', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/androidx_annotation_annotation': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_annotation_annotation_experimental': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation_experimental', + 'version': 'version:1.1.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', @@ -626,7 +629,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.2.0-beta01-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_appcompat_appcompat_resources': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat_resources', + 'version': 'version:1.2.0-beta01-cr0', }, ], 'condition': 'checkout_android', @@ -637,7 +651,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_common', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -648,7 +662,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_runtime', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -659,7 +673,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_asynclayoutinflater_asynclayoutinflater', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', @@ -670,7 +684,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_cardview_cardview', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', @@ -681,7 +695,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_collection_collection', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -692,7 +706,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_concurrent_concurrent_futures', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.2.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', @@ -703,7 +717,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.2.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', @@ -714,7 +728,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_core_core', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.3.0-beta01-cr0', }, ], 'condition': 'checkout_android', @@ -765,11 +779,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/androidx_exifinterface_exifinterface': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_exifinterface_exifinterface', + 'version': 'version:1.4.0-SNAPSHOT-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/androidx_fragment_fragment': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_fragment_fragment', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.2.5-cr0', }, ], 'condition': 'checkout_android', @@ -853,17 +878,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_legacy_legacy_support_v13': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/androidx_legacy_legacy_support_v13', - 'version': 'version:1.0.0-cr0', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/android_deps/libs/androidx_legacy_legacy_support_v4': { 'packages': [ { @@ -879,7 +893,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -912,7 +926,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata_core', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -923,7 +937,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_runtime', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -934,7 +948,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -956,7 +981,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_localbroadcastmanager_localbroadcastmanager', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', @@ -1011,7 +1036,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_preference_preference', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.1-cr0', }, ], 'condition': 'checkout_android', @@ -1033,17 +1058,17 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_recyclerview_recyclerview', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_slidingpanelayout_slidingpanelayout': { + 'src/third_party/android_deps/libs/androidx_savedstate_savedstate': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_slidingpanelayout_slidingpanelayout', + 'package': 'chromium/third_party/android_deps/libs/androidx_savedstate_savedstate', 'version': 'version:1.0.0-cr0', }, ], @@ -1051,10 +1076,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_swiperefreshlayout_swiperefreshlayout': { + 'src/third_party/android_deps/libs/androidx_slice_slice_builders': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_swiperefreshlayout_swiperefreshlayout', + 'package': 'chromium/third_party/android_deps/libs/androidx_slice_slice_builders', 'version': 'version:1.0.0-cr0', }, ], @@ -1062,10 +1087,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_test_core': { + 'src/third_party/android_deps/libs/androidx_slice_slice_core': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_test_core', + 'package': 'chromium/third_party/android_deps/libs/androidx_slice_slice_core', 'version': 'version:1.0.0-cr0', }, ], @@ -1073,10 +1098,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_test_ext_junit': { + 'src/third_party/android_deps/libs/androidx_slidingpanelayout_slidingpanelayout': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_test_ext_junit', + 'package': 'chromium/third_party/android_deps/libs/androidx_slidingpanelayout_slidingpanelayout', 'version': 'version:1.0.0-cr0', }, ], @@ -1084,263 +1109,263 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_test_monitor': { + 'src/third_party/android_deps/libs/androidx_swiperefreshlayout_swiperefreshlayout': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_test_monitor', - 'version': 'version:1.1.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_swiperefreshlayout_swiperefreshlayout', + 'version': 'version:1.2.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_test_rules': { + 'src/third_party/android_deps/libs/androidx_test_core': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_test_rules', - 'version': 'version:1.1.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_core', + 'version': 'version:1.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_test_runner': { + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_test_runner', - 'version': 'version:1.1.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_test_uiautomator_uiautomator': { + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_core': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_test_uiautomator_uiautomator', - 'version': 'version:2.2.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_core', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_transition_transition': { + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition', - 'version': 'version:1.0.0-rc02-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable': { + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_intents': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable', - 'version': 'version:1.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_intents', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated': { + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_web': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated', - 'version': 'version:1.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_web', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable': { + 'src/third_party/android_deps/libs/androidx_test_ext_junit': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable', - 'version': 'version:1.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_ext_junit', + 'version': 'version:1.1.1-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/androidx_viewpager_viewpager': { + 'src/third_party/android_deps/libs/androidx_test_monitor': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager_viewpager', - 'version': 'version:1.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_monitor', + 'version': 'version:1.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_animated_vector_drawable': { + 'src/third_party/android_deps/libs/androidx_test_rules': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_animated_vector_drawable', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_rules', + 'version': 'version:1.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_appcompat_v7': { + 'src/third_party/android_deps/libs/androidx_test_runner': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_appcompat_v7', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_runner', + 'version': 'version:1.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_asynclayoutinflater': { + 'src/third_party/android_deps/libs/androidx_test_uiautomator_uiautomator': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_asynclayoutinflater', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_test_uiautomator_uiautomator', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_cardview_v7': { + 'src/third_party/android_deps/libs/androidx_transition_transition': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_cardview_v7', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition', + 'version': 'version:1.4.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_collections': { + 'src/third_party/android_deps/libs/androidx_tvprovider_tvprovider': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_collections', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_tvprovider_tvprovider', + 'version': 'version:1.1.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_coordinatorlayout': { + 'src/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_coordinatorlayout', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable', + 'version': 'version:1.2.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_cursoradapter': { + 'src/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_cursoradapter', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated', + 'version': 'version:1.2.0-SNAPSHOT-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_customview': { + 'src/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_customview', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_design': { + 'src/third_party/android_deps/libs/androidx_viewpager2_viewpager2': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_design', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager2_viewpager2', + 'version': 'version:1.0.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_documentfile': { + 'src/third_party/android_deps/libs/androidx_viewpager_viewpager': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_documentfile', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager_viewpager', + 'version': 'version:1.0.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_drawerlayout': { + 'src/third_party/android_deps/libs/androidx_webkit_webkit': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_drawerlayout', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_webkit_webkit', + 'version': 'version:1.3.0-rc01-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_gridlayout_v7': { + 'src/third_party/android_deps/libs/androidx_window_window': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_gridlayout_v7', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/androidx_window_window', + 'version': 'version:1.0.0-alpha01-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_interpolator': { + 'src/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_interpolator', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent', + 'version': 'version:3.1-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_leanback_v17': { + 'src/third_party/android_deps/libs/classworlds_classworlds': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_leanback_v17', - 'version': 'version:28.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/classworlds_classworlds', + 'version': 'version:1.1-alpha-2-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_loader': { + 'src/third_party/android_deps/libs/com_android_support_animated_vector_drawable': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_loader', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_animated_vector_drawable', 'version': 'version:28.0.0-cr0', }, ], @@ -1348,10 +1373,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_localbroadcastmanager': { + 'src/third_party/android_deps/libs/com_android_support_appcompat_v7': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_localbroadcastmanager', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_appcompat_v7', 'version': 'version:28.0.0-cr0', }, ], @@ -1359,10 +1384,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_mediarouter_v7': { + 'src/third_party/android_deps/libs/com_android_support_asynclayoutinflater': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_mediarouter_v7', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_asynclayoutinflater', 'version': 'version:28.0.0-cr0', }, ], @@ -1370,21 +1395,21 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_multidex': { + 'src/third_party/android_deps/libs/com_android_support_cardview_v7': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_multidex', - 'version': 'version:1.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_cardview_v7', + 'version': 'version:28.0.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_palette_v7': { + 'src/third_party/android_deps/libs/com_android_support_collections': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_palette_v7', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_collections', 'version': 'version:28.0.0-cr0', }, ], @@ -1392,10 +1417,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_preference_leanback_v17': { + 'src/third_party/android_deps/libs/com_android_support_coordinatorlayout': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_preference_leanback_v17', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_coordinatorlayout', 'version': 'version:28.0.0-cr0', }, ], @@ -1403,10 +1428,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_preference_v14': { + 'src/third_party/android_deps/libs/com_android_support_cursoradapter': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_preference_v14', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_cursoradapter', 'version': 'version:28.0.0-cr0', }, ], @@ -1414,10 +1439,10 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_preference_v7': { + 'src/third_party/android_deps/libs/com_android_support_customview': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_preference_v7', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_customview', 'version': 'version:28.0.0-cr0', }, ], @@ -1425,6 +1450,83 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/com_android_support_design': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_design', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_documentfile': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_documentfile', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_drawerlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_drawerlayout', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_interpolator': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_interpolator', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_loader': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_loader', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_localbroadcastmanager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_localbroadcastmanager', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_multidex': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_multidex', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_android_support_print': { 'packages': [ { @@ -1524,17 +1626,6 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_support_v13': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_v13', - 'version': 'version:28.0.0-cr0', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - 'src/third_party/android_deps/libs/com_android_support_support_v4': { 'packages': [ { @@ -1623,11 +1714,33 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs', + 'version': 'version:1.0.10-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs_configuration', + 'version': 'version:1.0.10-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine', - 'version': 'version:2.7.0-cr0', + 'version': 'version:2.8.0-cr0', }, ], 'condition': 'checkout_android', @@ -1649,7 +1762,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1660,7 +1773,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_api_phone', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.5.0-cr0', }, ], 'condition': 'checkout_android', @@ -1671,7 +1784,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_base', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1682,7 +1795,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_base', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -1693,7 +1806,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_basement', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -1704,7 +1817,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast', - 'version': 'version:16.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1715,7 +1828,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast_framework', - 'version': 'version:16.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1726,7 +1839,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_clearcut', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1737,7 +1850,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_fido', - 'version': 'version:15.0.1-cr0', + 'version': 'version:18.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -1748,7 +1861,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_flags', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1759,7 +1872,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_gcm', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1770,7 +1883,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_iid', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1781,7 +1894,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_instantapps', - 'version': 'version:16.0.0-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1792,7 +1905,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_location', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1803,7 +1916,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_phenotype', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1814,7 +1927,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_places_placereport', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1825,7 +1938,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_stats', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1836,7 +1949,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_tasks', - 'version': 'version:15.0.1-cr0', + 'version': 'version:17.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1847,7 +1960,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_vision', - 'version': 'version:15.0.1-cr0', + 'version': 'version:18.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1858,7 +1971,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_vision_common', - 'version': 'version:15.0.1-cr0', + 'version': 'version:18.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1869,7 +1982,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material', - 'version': 'version:1.0.0-rc02-cr0', + 'version': 'version:1.2.0-alpha06-cr0', }, ], 'condition': 'checkout_android', @@ -1909,6 +2022,17 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations', + 'version': 'version:1.7-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_google_code_findbugs_jFormatString': { 'packages': [ { @@ -1931,11 +2055,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/com_google_code_gson_gson': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_code_gson_gson', + 'version': 'version:2.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_google_dagger_dagger': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger', - 'version': 'version:2.17-cr0', + 'version': 'version:2.26-cr0', }, ], 'condition': 'checkout_android', @@ -1946,7 +2081,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_compiler', - 'version': 'version:2.17-cr0', + 'version': 'version:2.26-cr0', }, ], 'condition': 'checkout_android', @@ -1957,7 +2092,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_producers', - 'version': 'version:2.17-cr0', + 'version': 'version:2.26-cr0', }, ], 'condition': 'checkout_android', @@ -1968,7 +2103,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_spi', - 'version': 'version:2.17-cr0', + 'version': 'version:2.26-cr0', }, ], 'condition': 'checkout_android', @@ -1979,7 +2114,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -1990,7 +2125,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2001,7 +2136,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2012,7 +2147,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_core', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2023,7 +2158,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2078,7 +2213,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava', - 'version': 'version:27.0.1-jre-cr0', + 'version': 'version:27.1-jre-cr0', }, ], 'condition': 'checkout_android', @@ -2118,11 +2253,11 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_protobuf_protobuf_lite': { + 'src/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_lite', - 'version': 'version:3.0.1-cr0', + 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite', + 'version': 'version:3.13.0-cr0', }, ], 'condition': 'checkout_android', @@ -2144,7 +2279,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_squareup_javapoet', - 'version': 'version:1.11.0-cr0', + 'version': 'version:1.11.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_squareup_javawriter': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_squareup_javawriter', + 'version': 'version:2.1.1-cr0', }, ], 'condition': 'checkout_android', @@ -2195,6 +2341,39 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/nekohtml_nekohtml': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/nekohtml_nekohtml', + 'version': 'version:1.9.6.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/nekohtml_xercesMinimal': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/nekohtml_xercesminimal', + 'version': 'version:1.9.6.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/net_ltgt_gradle_incap_incap': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/net_ltgt_gradle_incap_incap', + 'version': 'version:0.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/net_sf_kxml_kxml2': { 'packages': [ { @@ -2206,11 +2385,198 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/org_apache_ant_ant': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant', + 'version': 'version:1.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_ant_ant_launcher': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant_launcher', + 'version': 'version:1.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks', + 'version': 'version:2.1.3-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_model': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_model', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_profile': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_profile', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_project': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_project', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_settings': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_settings', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup', + 'version': 'version:1.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_compat_qual', - 'version': 'version:2.3.0-cr0', + 'version': 'version:2.5.3-cr0', }, ], 'condition': 'checkout_android', @@ -2221,40 +2587,62 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_qual', - 'version': 'version:3.0.0-cr0', + 'version': 'version:2.10.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_dataflow': { + 'src/third_party/android_deps/libs/org_checkerframework_dataflow_shaded': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow', - 'version': 'version:3.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow_shaded', + 'version': 'version:3.1.2-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_javacutil': { + 'src/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_javacutil', - 'version': 'version:3.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations', + 'version': 'version:1.17-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations': { + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations', - 'version': 'version:1.17-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default', + 'version': 'version:1.0-alpha-9-stable-1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation', + 'version': 'version:1.11-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils', + 'version': 'version:1.5.15-cr0', }, ], 'condition': 'checkout_android', @@ -2272,11 +2660,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/org_jetbrains_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_annotations', + 'version': 'version:13.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib', - 'version': 'version:1.3.41-cr0', + 'version': 'version:1.3.50-cr0', }, ], 'condition': 'checkout_android', @@ -2287,7 +2686,73 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_common', - 'version': 'version:1.3.41-cr0', + 'version': 'version:1.3.50-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_metadata_jvm': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_metadata_jvm', + 'version': 'version:0.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_analysis': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_analysis', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_commons': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_commons', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_tree': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_tree', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_util': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_util', + 'version': 'version:7.0-cr0', }, ], 'condition': 'checkout_android', @@ -2305,33 +2770,154 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_plumelib_plume_util': { + 'src/third_party/android_deps/libs/org_robolectric_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_plumelib_plume_util', - 'version': 'version:1.0.6-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_annotations', + 'version': 'version:4.3.1-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_plumelib_reflection_util': { + 'src/third_party/android_deps/libs/org_robolectric_junit': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_plumelib_reflection_util', - 'version': 'version:0.0.2-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_junit', + 'version': 'version:4.3.1-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_plumelib_require_javadoc': { + 'src/third_party/android_deps/libs/org_robolectric_pluginapi': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_plumelib_require_javadoc', - 'version': 'version:0.1.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_pluginapi', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_resources': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_resources', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_robolectric': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_robolectric', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_sandbox': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_sandbox', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadowapi': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadowapi', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_framework': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_framework', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_multidex': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_multidex', + 'version': 'version:4.3.1-cr1', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_utils_reflector': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_threeten_threeten_extra': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_threeten_threeten_extra', + 'version': 'version:1.5.0-cr0', }, ], 'condition': 'checkout_android', @@ -2431,19 +3017,8 @@ hooks = [ 'name': 'mac_toolchain', 'pattern': '.', 'condition': 'checkout_mac', - 'action': ['python', 'src/build/mac_toolchain.py'], - }, - # Pull binutils for linux, enabled debug fission for faster linking / - # debugging when used with clang on Ubuntu Precise. - # https://code.google.com/p/chromium/issues/detail?id=352046 - { - 'name': 'binutils', - 'pattern': 'src/third_party/binutils', - 'condition': 'host_os == "linux"', - 'action': [ - 'python', - 'src/third_party/binutils/download.py', - ], + 'action': ['python', 'src/build/mac_toolchain.py', + '--xcode-version', Var('mac_xcode_version')], }, { # Note: On Win, this should run after win_toolchain, as it may use it. @@ -2532,6 +3107,17 @@ hooks = [ '-s', 'src/build/toolchain/win/rc/linux64/rc.sha1', ], }, + { + 'name': 'test_fonts', + 'pattern': '.', + 'action': [ 'download_from_google_storage', + '--no_resume', + '--extract', + '--no_auth', + '--bucket', 'chromium-fonts', + '-s', 'src/third_party/test_fonts/test_fonts.tar.gz.sha1', + ], + }, { 'name': 'msan_chained_origins', 'pattern': '.', @@ -2591,11 +3177,12 @@ include_rules = [ "+test", "+rtc_tools", - # Abseil whitelist. Keep this in sync with abseil-in-webrtc.md. + # Abseil allowlist. Keep this in sync with abseil-in-webrtc.md. "+absl/algorithm/algorithm.h", "+absl/algorithm/container.h", "+absl/base/attributes.h", "+absl/base/config.h", + "+absl/base/const_init.h", "+absl/base/macros.h", "+absl/container/inlined_vector.h", "+absl/memory/memory.h", diff --git a/OWNERS b/OWNERS index cd9d3d042c..d6a78420b1 100644 --- a/OWNERS +++ b/OWNERS @@ -2,7 +2,6 @@ henrika@webrtc.org juberti@webrtc.org kwiberg@webrtc.org mflodman@webrtc.org -phoglund@webrtc.org stefan@webrtc.org tommi@webrtc.org per-file .gitignore=* diff --git a/PRESUBMIT.py b/PRESUBMIT.py index 247b78eaa0..7b45a85766 100755 --- a/PRESUBMIT.py +++ b/PRESUBMIT.py @@ -14,30 +14,30 @@ from contextlib import contextmanager # Files and directories that are *skipped* by cpplint in the presubmit script. -CPPLINT_BLACKLIST = [ - 'api/video_codecs/video_decoder.h', - 'common_types.cc', - 'common_types.h', - 'examples/objc', - 'media/base/stream_params.h', - 'media/base/video_common.h', - 'media/sctp/sctp_transport.cc', - 'modules/audio_coding', - 'modules/audio_device', - 'modules/audio_processing', - 'modules/desktop_capture', - 'modules/include/module_common_types.h', - 'modules/utility', - 'modules/video_capture', - 'p2p/base/pseudo_tcp.cc', - 'p2p/base/pseudo_tcp.h', - 'rtc_base', - 'sdk/android/src/jni', - 'sdk/objc', - 'system_wrappers', - 'test', - 'tools_webrtc', - 'voice_engine', +CPPLINT_EXCEPTIONS = [ + 'api/video_codecs/video_decoder.h', + 'common_types.cc', + 'common_types.h', + 'examples/objc', + 'media/base/stream_params.h', + 'media/base/video_common.h', + 'media/sctp/sctp_transport.cc', + 'modules/audio_coding', + 'modules/audio_device', + 'modules/audio_processing', + 'modules/desktop_capture', + 'modules/include/module_common_types.h', + 'modules/utility', + 'modules/video_capture', + 'p2p/base/pseudo_tcp.cc', + 'p2p/base/pseudo_tcp.h', + 'rtc_base', + 'sdk/android/src/jni', + 'sdk/objc', + 'system_wrappers', + 'test', + 'tools_webrtc', + 'voice_engine', ] # These filters will always be removed, even if the caller specifies a filter @@ -45,13 +45,16 @@ # # Justifications for each filter: # - build/c++11 : Rvalue ref checks are unreliable (false positives), -# include file and feature blacklists are +# include file and feature blocklists are # google3-specific. +# - runtime/references : Mutable references are not banned by the Google +# C++ style guide anymore (starting from May 2020). # - whitespace/operators: Same as above (doesn't seem sufficient to eliminate # all move-related errors). -BLACKLIST_LINT_FILTERS = [ - '-build/c++11', - '-whitespace/operators', +DISABLED_LINT_FILTERS = [ + '-build/c++11', + '-runtime/references', + '-whitespace/operators', ] # List of directories of "supported" native APIs. That means changes to headers @@ -62,31 +65,31 @@ # webrtc-users@google.com (internal list). # 4. (later) The deprecated APIs are removed. NATIVE_API_DIRS = ( - 'api', # All subdirectories of api/ are included as well. - 'media/base', - 'media/engine', - 'modules/audio_device/include', - 'pc', + 'api', # All subdirectories of api/ are included as well. + 'media/base', + 'media/engine', + 'modules/audio_device/include', + 'pc', ) # These directories should not be used but are maintained only to avoid breaking # some legacy downstream code. LEGACY_API_DIRS = ( - 'common_audio/include', - 'modules/audio_coding/include', - 'modules/audio_processing/include', - 'modules/congestion_controller/include', - 'modules/include', - 'modules/remote_bitrate_estimator/include', - 'modules/rtp_rtcp/include', - 'modules/rtp_rtcp/source', - 'modules/utility/include', - 'modules/video_coding/codecs/h264/include', - 'modules/video_coding/codecs/vp8/include', - 'modules/video_coding/codecs/vp9/include', - 'modules/video_coding/include', - 'rtc_base', - 'system_wrappers/include', + 'common_audio/include', + 'modules/audio_coding/include', + 'modules/audio_processing/include', + 'modules/congestion_controller/include', + 'modules/include', + 'modules/remote_bitrate_estimator/include', + 'modules/rtp_rtcp/include', + 'modules/rtp_rtcp/source', + 'modules/utility/include', + 'modules/video_coding/codecs/h264/include', + 'modules/video_coding/codecs/vp8/include', + 'modules/video_coding/codecs/vp9/include', + 'modules/video_coding/include', + 'rtc_base', + 'system_wrappers/include', ) # NOTE: The set of directories in API_DIRS should be the same as those @@ -94,54 +97,60 @@ API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:] # TARGET_RE matches a GN target, and extracts the target name and the contents. -TARGET_RE = re.compile(r'(?P\s*)\w+\("(?P\w+)"\) {' - r'(?P.*?)' - r'(?P=indent)}', - re.MULTILINE | re.DOTALL) +TARGET_RE = re.compile( + r'(?P\s*)(?P\w+)\("(?P\w+)"\) {' + r'(?P.*?)' + r'(?P=indent)}', re.MULTILINE | re.DOTALL) # SOURCES_RE matches a block of sources inside a GN target. SOURCES_RE = re.compile(r'sources \+?= \[(?P.*?)\]', re.MULTILINE | re.DOTALL) +# DEPS_RE matches a block of sources inside a GN target. +DEPS_RE = re.compile(r'\bdeps \+?= \[(?P.*?)\]', + re.MULTILINE | re.DOTALL) + # FILE_PATH_RE matchies a file path. FILE_PATH_RE = re.compile(r'"(?P(\w|\/)+)(?P\.\w+)"') def FindSrcDirPath(starting_dir): - """Returns the abs path to the src/ dir of the project.""" - src_dir = starting_dir - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir + """Returns the abs path to the src/ dir of the project.""" + src_dir = starting_dir + while os.path.basename(src_dir) != 'src': + src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) + return src_dir @contextmanager def _AddToPath(*paths): - original_sys_path = sys.path - sys.path.extend(paths) - try: - yield - finally: - # Restore sys.path to what it was before. - sys.path = original_sys_path + original_sys_path = sys.path + sys.path.extend(paths) + try: + yield + finally: + # Restore sys.path to what it was before. + sys.path = original_sys_path def VerifyNativeApiHeadersListIsValid(input_api, output_api): - """Ensures the list of native API header directories is up to date.""" - non_existing_paths = [] - native_api_full_paths = [ - input_api.os_path.join(input_api.PresubmitLocalPath(), - *path.split('/')) for path in API_DIRS] - for path in native_api_full_paths: - if not os.path.isdir(path): - non_existing_paths.append(path) - if non_existing_paths: - return [output_api.PresubmitError( - 'Directories to native API headers have changed which has made the ' - 'list in PRESUBMIT.py outdated.\nPlease update it to the current ' - 'location of our native APIs.', - non_existing_paths)] - return [] + """Ensures the list of native API header directories is up to date.""" + non_existing_paths = [] + native_api_full_paths = [ + input_api.os_path.join(input_api.PresubmitLocalPath(), + *path.split('/')) for path in API_DIRS + ] + for path in native_api_full_paths: + if not os.path.isdir(path): + non_existing_paths.append(path) + if non_existing_paths: + return [ + output_api.PresubmitError( + 'Directories to native API headers have changed which has made the ' + 'list in PRESUBMIT.py outdated.\nPlease update it to the current ' + 'location of our native APIs.', non_existing_paths) + ] + return [] API_CHANGE_MSG = """ @@ -165,532 +174,606 @@ def VerifyNativeApiHeadersListIsValid(input_api, output_api): def CheckNativeApiHeaderChanges(input_api, output_api): - """Checks to remind proper changing of native APIs.""" - files = [] - source_file_filter = lambda x: input_api.FilterSourceFile( - x, white_list=[r'.+\.(gn|gni|h)$']) - for f in input_api.AffectedSourceFiles(source_file_filter): - for path in API_DIRS: - dn = os.path.dirname(f.LocalPath()) - if path == 'api': - # Special case: Subdirectories included. - if dn == 'api' or dn.startswith('api/'): - files.append(f.LocalPath()) - else: - # Normal case: Subdirectories not included. - if dn == path: - files.append(f.LocalPath()) - - if files: - return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)] - return [] - - -def CheckNoIOStreamInHeaders(input_api, output_api, - source_file_filter): - """Checks to make sure no .h files include .""" - files = [] - pattern = input_api.re.compile(r'^#include\s*', - input_api.re.MULTILINE) - file_filter = lambda x: (input_api.FilterSourceFile(x) - and source_file_filter(x)) - for f in input_api.AffectedSourceFiles(file_filter): - if not f.LocalPath().endswith('.h'): - continue - contents = input_api.ReadFile(f) - if pattern.search(contents): - files.append(f) - - if len(files): - return [output_api.PresubmitError( - 'Do not #include in header files, since it inserts static ' + - 'initialization into every file including the header. Instead, ' + - '#include . See http://crbug.com/94794', - files)] - return [] - - -def CheckNoPragmaOnce(input_api, output_api, - source_file_filter): - """Make sure that banned functions are not used.""" - files = [] - pattern = input_api.re.compile(r'^#pragma\s+once', - input_api.re.MULTILINE) - file_filter = lambda x: (input_api.FilterSourceFile(x) - and source_file_filter(x)) - for f in input_api.AffectedSourceFiles(file_filter): - if not f.LocalPath().endswith('.h'): - continue - contents = input_api.ReadFile(f) - if pattern.search(contents): - files.append(f) - - if files: - return [output_api.PresubmitError( - 'Do not use #pragma once in header files.\n' - 'See http://www.chromium.org/developers/coding-style#TOC-File-headers', - files)] - return [] - - -def CheckNoFRIEND_TEST(input_api, output_api, # pylint: disable=invalid-name - source_file_filter): - """Make sure that gtest's FRIEND_TEST() macro is not used, the - FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be - used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes.""" - problems = [] + """Checks to remind proper changing of native APIs.""" + files = [] + source_file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=[r'.+\.(gn|gni|h)$']) + for f in input_api.AffectedSourceFiles(source_file_filter): + for path in API_DIRS: + dn = os.path.dirname(f.LocalPath()) + if path == 'api': + # Special case: Subdirectories included. + if dn == 'api' or dn.startswith('api/'): + files.append(f.LocalPath()) + else: + # Normal case: Subdirectories not included. + if dn == path: + files.append(f.LocalPath()) + + if files: + return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)] + return [] + + +def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter): + """Checks to make sure no .h files include .""" + files = [] + pattern = input_api.re.compile(r'^#include\s*', + input_api.re.MULTILINE) + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h'): + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.append(f) + + if len(files): + return [ + output_api.PresubmitError( + 'Do not #include in header files, since it inserts static ' + + + 'initialization into every file including the header. Instead, ' + + '#include . See http://crbug.com/94794', files) + ] + return [] - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) - and source_file_filter(f)) - for f in input_api.AffectedFiles(file_filter=file_filter): - for line_num, line in f.ChangedContents(): - if 'FRIEND_TEST(' in line: - problems.append(' %s:%d' % (f.LocalPath(), line_num)) - if not problems: +def CheckNoPragmaOnce(input_api, output_api, source_file_filter): + """Make sure that banned functions are not used.""" + files = [] + pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE) + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h'): + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.append(f) + + if files: + return [ + output_api.PresubmitError( + 'Do not use #pragma once in header files.\n' + 'See http://www.chromium.org/developers/coding-style#TOC-File-headers', + files) + ] return [] - return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use ' - 'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and ' - 'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))] -def IsLintBlacklisted(blacklist_paths, file_path): - """ Checks if a file is blacklisted for lint check.""" - for path in blacklist_paths: - if file_path == path or os.path.dirname(file_path).startswith(path): - return True - return False +def CheckNoFRIEND_TEST( + input_api, + output_api, # pylint: disable=invalid-name + source_file_filter): + """Make sure that gtest's FRIEND_TEST() macro is not used, the + FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be + used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes.""" + problems = [] + + file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and + source_file_filter(f)) + for f in input_api.AffectedFiles(file_filter=file_filter): + for line_num, line in f.ChangedContents(): + if 'FRIEND_TEST(' in line: + problems.append(' %s:%d' % (f.LocalPath(), line_num)) + + if not problems: + return [] + return [ + output_api.PresubmitPromptWarning( + 'WebRTC\'s code should not use ' + 'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and ' + 'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems)) + ] + + +def IsLintDisabled(disabled_paths, file_path): + """ Checks if a file is disabled for lint check.""" + for path in disabled_paths: + if file_path == path or os.path.dirname(file_path).startswith(path): + return True + return False def CheckApprovedFilesLintClean(input_api, output_api, source_file_filter=None): - """Checks that all new or non-blacklisted .cc and .h files pass cpplint.py. + """Checks that all new or non-exempt .cc and .h files pass cpplint.py. This check is based on CheckChangeLintsClean in depot_tools/presubmit_canned_checks.py but has less filters and only checks added files.""" - result = [] - - # Initialize cpplint. - import cpplint - # Access to a protected member _XX of a client class - # pylint: disable=W0212 - cpplint._cpplint_state.ResetErrorCounts() - - lint_filters = cpplint._Filters() - lint_filters.extend(BLACKLIST_LINT_FILTERS) - cpplint._SetFilters(','.join(lint_filters)) - - # Create a platform independent blacklist for cpplint. - blacklist_paths = [input_api.os_path.join(*path.split('/')) - for path in CPPLINT_BLACKLIST] - - # Use the strictest verbosity level for cpplint.py (level 1) which is the - # default when running cpplint.py from command line. To make it possible to - # work with not-yet-converted code, we're only applying it to new (or - # moved/renamed) files and files not listed in CPPLINT_BLACKLIST. - verbosity_level = 1 - files = [] - for f in input_api.AffectedSourceFiles(source_file_filter): - # Note that moved/renamed files also count as added. - if f.Action() == 'A' or not IsLintBlacklisted(blacklist_paths, - f.LocalPath()): - files.append(f.AbsoluteLocalPath()) - - for file_name in files: - cpplint.ProcessFile(file_name, verbosity_level) - - if cpplint._cpplint_state.error_count > 0: - if input_api.is_committing: - res_type = output_api.PresubmitError - else: - res_type = output_api.PresubmitPromptWarning - result = [res_type('Changelist failed cpplint.py check.')] + result = [] - return result + # Initialize cpplint. + import cpplint + # Access to a protected member _XX of a client class + # pylint: disable=W0212 + cpplint._cpplint_state.ResetErrorCounts() + + lint_filters = cpplint._Filters() + lint_filters.extend(DISABLED_LINT_FILTERS) + cpplint._SetFilters(','.join(lint_filters)) + + # Create a platform independent exempt list for cpplint. + disabled_paths = [ + input_api.os_path.join(*path.split('/')) for path in CPPLINT_EXCEPTIONS + ] + + # Use the strictest verbosity level for cpplint.py (level 1) which is the + # default when running cpplint.py from command line. To make it possible to + # work with not-yet-converted code, we're only applying it to new (or + # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS. + verbosity_level = 1 + files = [] + for f in input_api.AffectedSourceFiles(source_file_filter): + # Note that moved/renamed files also count as added. + if f.Action() == 'A' or not IsLintDisabled(disabled_paths, + f.LocalPath()): + files.append(f.AbsoluteLocalPath()) + + for file_name in files: + cpplint.ProcessFile(file_name, verbosity_level) + + if cpplint._cpplint_state.error_count > 0: + if input_api.is_committing: + res_type = output_api.PresubmitError + else: + res_type = output_api.PresubmitPromptWarning + result = [res_type('Changelist failed cpplint.py check.')] + + return result def CheckNoSourcesAbove(input_api, gn_files, output_api): - # Disallow referencing source files with paths above the GN file location. - source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', - re.MULTILINE | re.DOTALL) - file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"') - violating_gn_files = set() - violating_source_entries = [] - for gn_file in gn_files: - contents = input_api.ReadFile(gn_file) - for source_block_match in source_pattern.finditer(contents): - # Find all source list entries starting with ../ in the source block - # (exclude overrides entries). - for file_list_match in file_pattern.finditer(source_block_match.group(1)): - source_file = file_list_match.group(1) - if 'overrides/' not in source_file: - violating_source_entries.append(source_file) - violating_gn_files.add(gn_file) - if violating_gn_files: - return [output_api.PresubmitError( - 'Referencing source files above the directory of the GN file is not ' - 'allowed. Please introduce new GN targets in the proper location ' - 'instead.\n' - 'Invalid source entries:\n' - '%s\n' - 'Violating GN files:' % '\n'.join(violating_source_entries), - items=violating_gn_files)] - return [] + # Disallow referencing source files with paths above the GN file location. + source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', + re.MULTILINE | re.DOTALL) + file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"') + violating_gn_files = set() + violating_source_entries = [] + for gn_file in gn_files: + contents = input_api.ReadFile(gn_file) + for source_block_match in source_pattern.finditer(contents): + # Find all source list entries starting with ../ in the source block + # (exclude overrides entries). + for file_list_match in file_pattern.finditer( + source_block_match.group(1)): + source_file = file_list_match.group(1) + if 'overrides/' not in source_file: + violating_source_entries.append(source_file) + violating_gn_files.add(gn_file) + if violating_gn_files: + return [ + output_api.PresubmitError( + 'Referencing source files above the directory of the GN file is not ' + 'allowed. Please introduce new GN targets in the proper location ' + 'instead.\n' + 'Invalid source entries:\n' + '%s\n' + 'Violating GN files:' % '\n'.join(violating_source_entries), + items=violating_gn_files) + ] + return [] + + +def CheckAbseilDependencies(input_api, gn_files, output_api): + """Checks that Abseil dependencies are declared in `absl_deps`.""" + absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL) + target_types_to_check = [ + 'rtc_library', + 'rtc_source_set', + 'rtc_static_library', + 'webrtc_fuzzer_test', + ] + error_msg = ('Abseil dependencies in target "%s" (file: %s) ' + 'should be moved to the "absl_deps" parameter.') + errors = [] + + for gn_file in gn_files: + gn_file_content = input_api.ReadFile(gn_file) + for target_match in TARGET_RE.finditer(gn_file_content): + target_type = target_match.group('target_type') + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + if target_type in target_types_to_check: + for deps_match in DEPS_RE.finditer(target_contents): + deps = deps_match.group('deps').splitlines() + for dep in deps: + if re.search(absl_re, dep): + errors.append( + output_api.PresubmitError( + error_msg % + (target_name, gn_file.LocalPath()))) + break # no need to warn more than once per target + return errors def CheckNoMixingSources(input_api, gn_files, output_api): - """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target. + """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target. See bugs.webrtc.org/7743 for more context. """ - def _MoreThanOneSourceUsed(*sources_lists): - sources_used = 0 - for source_list in sources_lists: - if len(source_list): - sources_used += 1 - return sources_used > 1 - - errors = defaultdict(lambda: []) - for gn_file in gn_files: - gn_file_content = input_api.ReadFile(gn_file) - for target_match in TARGET_RE.finditer(gn_file_content): - # list_of_sources is a list of tuples of the form - # (c_files, cc_files, objc_files) that keeps track of all the sources - # defined in a target. A GN target can have more that on definition of - # sources (since it supports if/else statements). - # E.g.: - # rtc_static_library("foo") { - # if (is_win) { - # sources = [ "foo.cc" ] - # } else { - # sources = [ "foo.mm" ] - # } - # } - # This is allowed and the presubmit check should support this case. - list_of_sources = [] - c_files = [] - cc_files = [] - objc_files = [] - target_name = target_match.group('target_name') - target_contents = target_match.group('target_contents') - for sources_match in SOURCES_RE.finditer(target_contents): - if '+=' not in sources_match.group(0): - if c_files or cc_files or objc_files: + def _MoreThanOneSourceUsed(*sources_lists): + sources_used = 0 + for source_list in sources_lists: + if len(source_list): + sources_used += 1 + return sources_used > 1 + + errors = defaultdict(lambda: []) + for gn_file in gn_files: + gn_file_content = input_api.ReadFile(gn_file) + for target_match in TARGET_RE.finditer(gn_file_content): + # list_of_sources is a list of tuples of the form + # (c_files, cc_files, objc_files) that keeps track of all the sources + # defined in a target. A GN target can have more that on definition of + # sources (since it supports if/else statements). + # E.g.: + # rtc_static_library("foo") { + # if (is_win) { + # sources = [ "foo.cc" ] + # } else { + # sources = [ "foo.mm" ] + # } + # } + # This is allowed and the presubmit check should support this case. + list_of_sources = [] + c_files = [] + cc_files = [] + objc_files = [] + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + for sources_match in SOURCES_RE.finditer(target_contents): + if '+=' not in sources_match.group(0): + if c_files or cc_files or objc_files: + list_of_sources.append((c_files, cc_files, objc_files)) + c_files = [] + cc_files = [] + objc_files = [] + for file_match in FILE_PATH_RE.finditer( + sources_match.group(1)): + file_path = file_match.group('file_path') + extension = file_match.group('extension') + if extension == '.c': + c_files.append(file_path + extension) + if extension == '.cc': + cc_files.append(file_path + extension) + if extension in ['.m', '.mm']: + objc_files.append(file_path + extension) list_of_sources.append((c_files, cc_files, objc_files)) - c_files = [] - cc_files = [] - objc_files = [] - for file_match in FILE_PATH_RE.finditer(sources_match.group(1)): - file_path = file_match.group('file_path') - extension = file_match.group('extension') - if extension == '.c': - c_files.append(file_path + extension) - if extension == '.cc': - cc_files.append(file_path + extension) - if extension in ['.m', '.mm']: - objc_files.append(file_path + extension) - list_of_sources.append((c_files, cc_files, objc_files)) - for c_files_list, cc_files_list, objc_files_list in list_of_sources: - if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list): - all_sources = sorted(c_files_list + cc_files_list + objc_files_list) - errors[gn_file.LocalPath()].append((target_name, all_sources)) - if errors: - return [output_api.PresubmitError( - 'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n' - 'Please create a separate target for each collection of sources.\n' - 'Mixed sources: \n' - '%s\n' - 'Violating GN files:\n%s\n' % (json.dumps(errors, indent=2), - '\n'.join(errors.keys())))] - return [] + for c_files_list, cc_files_list, objc_files_list in list_of_sources: + if _MoreThanOneSourceUsed(c_files_list, cc_files_list, + objc_files_list): + all_sources = sorted(c_files_list + cc_files_list + + objc_files_list) + errors[gn_file.LocalPath()].append( + (target_name, all_sources)) + if errors: + return [ + output_api.PresubmitError( + 'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n' + 'Please create a separate target for each collection of sources.\n' + 'Mixed sources: \n' + '%s\n' + 'Violating GN files:\n%s\n' % + (json.dumps(errors, indent=2), '\n'.join(errors.keys()))) + ] + return [] def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api): - cwd = input_api.PresubmitLocalPath() - with _AddToPath(input_api.os_path.join( - cwd, 'tools_webrtc', 'presubmit_checks_lib')): - from check_package_boundaries import CheckPackageBoundaries - build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files] - errors = CheckPackageBoundaries(cwd, build_files)[:5] - if errors: - return [output_api.PresubmitError( - 'There are package boundary violations in the following GN files:', - long_text='\n\n'.join(str(err) for err in errors))] - return [] + cwd = input_api.PresubmitLocalPath() + with _AddToPath( + input_api.os_path.join(cwd, 'tools_webrtc', + 'presubmit_checks_lib')): + from check_package_boundaries import CheckPackageBoundaries + build_files = [ + os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files + ] + errors = CheckPackageBoundaries(cwd, build_files)[:5] + if errors: + return [ + output_api.PresubmitError( + 'There are package boundary violations in the following GN files:', + long_text='\n\n'.join(str(err) for err in errors)) + ] + return [] def _ReportFileAndLine(filename, line_num): - """Default error formatter for _FindNewViolationsOfRule.""" - return '%s (line %s)' % (filename, line_num) + """Default error formatter for _FindNewViolationsOfRule.""" + return '%s (line %s)' % (filename, line_num) -def CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api, +def CheckNoWarningSuppressionFlagsAreAdded(gn_files, + input_api, + output_api, error_formatter=_ReportFileAndLine): - """Make sure that warning suppression flags are not added wihtout a reason.""" - msg = ('Usage of //build/config/clang:extra_warnings is discouraged ' - 'in WebRTC.\n' - 'If you are not adding this code (e.g. you are just moving ' - 'existing code) or you want to add an exception,\n' - 'you can add a comment on the line that causes the problem:\n\n' - '"-Wno-odr" # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n' - '\n' - 'Affected files:\n') - errors = [] # 2-element tuples with (file, line number) - clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings') - no_presubmit_re = input_api.re.compile( - r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') - for f in gn_files: - for line_num, line in f.ChangedContents(): - if clang_warn_re.search(line) and not no_presubmit_re.search(line): - errors.append(error_formatter(f.LocalPath(), line_num)) - if errors: - return [output_api.PresubmitError(msg, errors)] - return [] - - -def CheckNoTestCaseUsageIsAdded(input_api, output_api, source_file_filter, + """Make sure that warning suppression flags are not added wihtout a reason.""" + msg = ('Usage of //build/config/clang:extra_warnings is discouraged ' + 'in WebRTC.\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code) or you want to add an exception,\n' + 'you can add a comment on the line that causes the problem:\n\n' + '"-Wno-odr" # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n' + '\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + clang_warn_re = input_api.re.compile( + r'//build/config/clang:extra_warnings') + no_presubmit_re = input_api.re.compile( + r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') + for f in gn_files: + for line_num, line in f.ChangedContents(): + if clang_warn_re.search(line) and not no_presubmit_re.search(line): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(msg, errors)] + return [] + + +def CheckNoTestCaseUsageIsAdded(input_api, + output_api, + source_file_filter, error_formatter=_ReportFileAndLine): - error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the ' - 'new API: https://github.com/google/googletest/blob/master/' - 'googletest/docs/primer.md#beware-of-the-nomenclature.\n' - 'Affected files:\n') - errors = [] # 2-element tuples with (file, line number) - test_case_re = input_api.re.compile(r'TEST_CASE') - file_filter = lambda f: (source_file_filter(f) - and f.LocalPath().endswith('.cc')) - for f in input_api.AffectedSourceFiles(file_filter): - for line_num, line in f.ChangedContents(): - if test_case_re.search(line): - errors.append(error_formatter(f.LocalPath(), line_num)) - if errors: - return [output_api.PresubmitError(error_msg, errors)] - return [] - - -def CheckNoStreamUsageIsAdded(input_api, output_api, + error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the ' + 'new API: https://github.com/google/googletest/blob/master/' + 'googletest/docs/primer.md#beware-of-the-nomenclature.\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + test_case_re = input_api.re.compile(r'TEST_CASE') + file_filter = lambda f: (source_file_filter(f) and f.LocalPath().endswith( + '.cc')) + for f in input_api.AffectedSourceFiles(file_filter): + for line_num, line in f.ChangedContents(): + if test_case_re.search(line): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(error_msg, errors)] + return [] + + +def CheckNoStreamUsageIsAdded(input_api, + output_api, source_file_filter, error_formatter=_ReportFileAndLine): - """Make sure that no more dependencies on stringstream are added.""" - error_msg = ('Usage of , and in WebRTC is ' - 'deprecated.\n' - 'This includes the following types:\n' - 'std::istringstream, std::ostringstream, std::wistringstream, ' - 'std::wostringstream,\n' - 'std::wstringstream, std::ostream, std::wostream, std::istream,' - 'std::wistream,\n' - 'std::iostream, std::wiostream.\n' - 'If you are not adding this code (e.g. you are just moving ' - 'existing code),\n' - 'you can add a comment on the line that causes the problem:\n\n' - '#include // no-presubmit-check TODO(webrtc:8982)\n' - 'std::ostream& F() { // no-presubmit-check TODO(webrtc:8982)\n' - '\n' - 'If you are adding new code, consider using ' - 'rtc::SimpleStringBuilder\n' - '(in rtc_base/strings/string_builder.h).\n' - 'Affected files:\n') - errors = [] # 2-element tuples with (file, line number) - include_re = input_api.re.compile(r'#include <(i|o|s)stream>') - usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream') - no_presubmit_re = input_api.re.compile( - r'// no-presubmit-check TODO\(webrtc:8982\)') - file_filter = lambda x: (input_api.FilterSourceFile(x) - and source_file_filter(x)) - - def _IsException(file_path): - is_test = any(file_path.endswith(x) for x in ['_test.cc', '_tests.cc', - '_unittest.cc', - '_unittests.cc']) - return (file_path.startswith('examples') or - file_path.startswith('test') or - is_test) - - - for f in input_api.AffectedSourceFiles(file_filter): - # Usage of stringstream is allowed under examples/ and in tests. - if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()): - continue - for line_num, line in f.ChangedContents(): - if ((include_re.search(line) or usage_re.search(line)) - and not no_presubmit_re.search(line)): - errors.append(error_formatter(f.LocalPath(), line_num)) - if errors: - return [output_api.PresubmitError(error_msg, errors)] - return [] + """Make sure that no more dependencies on stringstream are added.""" + error_msg = ( + 'Usage of , and in WebRTC is ' + 'deprecated.\n' + 'This includes the following types:\n' + 'std::istringstream, std::ostringstream, std::wistringstream, ' + 'std::wostringstream,\n' + 'std::wstringstream, std::ostream, std::wostream, std::istream,' + 'std::wistream,\n' + 'std::iostream, std::wiostream.\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code),\n' + 'you can add a comment on the line that causes the problem:\n\n' + '#include // no-presubmit-check TODO(webrtc:8982)\n' + 'std::ostream& F() { // no-presubmit-check TODO(webrtc:8982)\n' + '\n' + 'If you are adding new code, consider using ' + 'rtc::SimpleStringBuilder\n' + '(in rtc_base/strings/string_builder.h).\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + include_re = input_api.re.compile(r'#include <(i|o|s)stream>') + usage_re = input_api.re.compile( + r'std::(w|i|o|io|wi|wo|wio)(string)*stream') + no_presubmit_re = input_api.re.compile( + r'// no-presubmit-check TODO\(webrtc:8982\)') + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + + def _IsException(file_path): + is_test = any( + file_path.endswith(x) for x in + ['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc']) + return (file_path.startswith('examples') + or file_path.startswith('test') or is_test) + + for f in input_api.AffectedSourceFiles(file_filter): + # Usage of stringstream is allowed under examples/ and in tests. + if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()): + continue + for line_num, line in f.ChangedContents(): + if ((include_re.search(line) or usage_re.search(line)) + and not no_presubmit_re.search(line)): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(error_msg, errors)] + return [] def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api): - """Checks that public_deps is not used without a good reason.""" - result = [] - no_presubmit_check_re = input_api.re.compile( - r'# no-presubmit-check TODO\(webrtc:\d+\)') - error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files ' - 'because it doesn\'t map well to downstream build systems.\n' - 'Used in: %s (line %d).\n' - 'If you are not adding this code (e.g. you are just moving ' - 'existing code) or you have a good reason, you can add this ' - 'comment (verbatim) on the line that causes the problem:\n\n' - 'public_deps = [ # no-presubmit-check TODO(webrtc:8603)\n') - for affected_file in gn_files: - for (line_number, affected_line) in affected_file.ChangedContents(): - if 'public_deps' in affected_line: - surpressed = no_presubmit_check_re.search(affected_line) - if not surpressed: - result.append( - output_api.PresubmitError(error_msg % (affected_file.LocalPath(), - line_number))) - return result + """Checks that public_deps is not used without a good reason.""" + result = [] + no_presubmit_check_re = input_api.re.compile( + r'# no-presubmit-check TODO\(webrtc:\d+\)') + error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files ' + 'because it doesn\'t map well to downstream build systems.\n' + 'Used in: %s (line %d).\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code) or you have a good reason, you can add this ' + 'comment (verbatim) on the line that causes the problem:\n\n' + 'public_deps = [ # no-presubmit-check TODO(webrtc:8603)\n') + for affected_file in gn_files: + for (line_number, affected_line) in affected_file.ChangedContents(): + if 'public_deps' in affected_line: + surpressed = no_presubmit_check_re.search(affected_line) + if not surpressed: + result.append( + output_api.PresubmitError( + error_msg % + (affected_file.LocalPath(), line_number))) + return result def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api): - result = [] - error_msg = ('check_includes overrides are not allowed since it can cause ' - 'incorrect dependencies to form. It effectively means that your ' - 'module can include any .h file without depending on its ' - 'corresponding target. There are some exceptional cases when ' - 'this is allowed: if so, get approval from a .gn owner in the ' - 'root OWNERS file.\n' - 'Used in: %s (line %d).') - no_presubmit_re = input_api.re.compile( - r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') - for affected_file in gn_files: - for (line_number, affected_line) in affected_file.ChangedContents(): - if ('check_includes' in affected_line - and not no_presubmit_re.search(affected_line)): - result.append( - output_api.PresubmitError(error_msg % (affected_file.LocalPath(), - line_number))) - return result + result = [] + error_msg = ( + 'check_includes overrides are not allowed since it can cause ' + 'incorrect dependencies to form. It effectively means that your ' + 'module can include any .h file without depending on its ' + 'corresponding target. There are some exceptional cases when ' + 'this is allowed: if so, get approval from a .gn owner in the ' + 'root OWNERS file.\n' + 'Used in: %s (line %d).') + no_presubmit_re = input_api.re.compile( + r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') + for affected_file in gn_files: + for (line_number, affected_line) in affected_file.ChangedContents(): + if ('check_includes' in affected_line + and not no_presubmit_re.search(affected_line)): + result.append( + output_api.PresubmitError( + error_msg % (affected_file.LocalPath(), line_number))) + return result def CheckGnChanges(input_api, output_api): - file_filter = lambda x: (input_api.FilterSourceFile( - x, white_list=(r'.+\.(gn|gni)$',), - black_list=(r'.*/presubmit_checks_lib/testdata/.*',))) - - gn_files = [] - for f in input_api.AffectedSourceFiles(file_filter): - gn_files.append(f) - - result = [] - if gn_files: - result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api)) - result.extend(CheckNoMixingSources(input_api, gn_files, output_api)) - result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files, + file_filter = lambda x: (input_api.FilterSourceFile( + x, + files_to_check=(r'.+\.(gn|gni)$', ), + files_to_skip=(r'.*/presubmit_checks_lib/testdata/.*', ))) + + gn_files = [] + for f in input_api.AffectedSourceFiles(file_filter): + gn_files.append(f) + + result = [] + if gn_files: + result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api)) + result.extend(CheckNoMixingSources(input_api, gn_files, output_api)) + result.extend(CheckAbseilDependencies(input_api, gn_files, output_api)) + result.extend( + CheckNoPackageBoundaryViolations(input_api, gn_files, output_api)) + result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, + output_api)) + result.extend( + CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api)) + result.extend( + CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api)) - result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api)) - result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api)) - result.extend(CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, - output_api)) - return result + return result def CheckGnGen(input_api, output_api): - """Runs `gn gen --check` with default args to detect mismatches between + """Runs `gn gen --check` with default args to detect mismatches between #includes and dependencies in the BUILD.gn files, as well as general build errors. """ - with _AddToPath(input_api.os_path.join( - input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')): - from build_helpers import RunGnCheck - errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5] - if errors: - return [output_api.PresubmitPromptWarning( - 'Some #includes do not match the build dependency graph. Please run:\n' - ' gn gen --check ', - long_text='\n\n'.join(errors))] - return [] + with _AddToPath( + input_api.os_path.join(input_api.PresubmitLocalPath(), + 'tools_webrtc', 'presubmit_checks_lib')): + from build_helpers import RunGnCheck + errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5] + if errors: + return [ + output_api.PresubmitPromptWarning( + 'Some #includes do not match the build dependency graph. Please run:\n' + ' gn gen --check ', + long_text='\n\n'.join(errors)) + ] + return [] def CheckUnwantedDependencies(input_api, output_api, source_file_filter): - """Runs checkdeps on #include statements added in this + """Runs checkdeps on #include statements added in this change. Breaking - rules is an error, breaking ! rules is a warning. """ - # Copied from Chromium's src/PRESUBMIT.py. - - # We need to wait until we have an input_api object and use this - # roundabout construct to import checkdeps because this file is - # eval-ed and thus doesn't have __file__. - src_path = FindSrcDirPath(input_api.PresubmitLocalPath()) - checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps') - if not os.path.exists(checkdeps_path): - return [output_api.PresubmitError( - 'Cannot find checkdeps at %s\nHave you run "gclient sync" to ' - 'download all the DEPS entries?' % checkdeps_path)] - with _AddToPath(checkdeps_path): - import checkdeps - from cpp_checker import CppChecker - from rules import Rule - - added_includes = [] - for f in input_api.AffectedFiles(file_filter=source_file_filter): - if not CppChecker.IsCppFile(f.LocalPath()): - continue - - changed_lines = [line for _, line in f.ChangedContents()] - added_includes.append([f.LocalPath(), changed_lines]) - - deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath()) - - error_descriptions = [] - warning_descriptions = [] - for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes( - added_includes): - description_with_path = '%s\n %s' % (path, rule_description) - if rule_type == Rule.DISALLOW: - error_descriptions.append(description_with_path) - else: - warning_descriptions.append(description_with_path) - - results = [] - if error_descriptions: - results.append(output_api.PresubmitError( - 'You added one or more #includes that violate checkdeps rules.\n' - 'Check that the DEPS files in these locations contain valid rules.\n' - 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' - 'more details about checkdeps.', - error_descriptions)) - if warning_descriptions: - results.append(output_api.PresubmitPromptOrNotify( - 'You added one or more #includes of files that are temporarily\n' - 'allowed but being removed. Can you avoid introducing the\n' - '#include? See relevant DEPS file(s) for details and contacts.\n' - 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' - 'more details about checkdeps.', - warning_descriptions)) - return results + # Copied from Chromium's src/PRESUBMIT.py. + + # We need to wait until we have an input_api object and use this + # roundabout construct to import checkdeps because this file is + # eval-ed and thus doesn't have __file__. + src_path = FindSrcDirPath(input_api.PresubmitLocalPath()) + checkdeps_path = input_api.os_path.join(src_path, 'buildtools', + 'checkdeps') + if not os.path.exists(checkdeps_path): + return [ + output_api.PresubmitError( + 'Cannot find checkdeps at %s\nHave you run "gclient sync" to ' + 'download all the DEPS entries?' % checkdeps_path) + ] + with _AddToPath(checkdeps_path): + import checkdeps + from cpp_checker import CppChecker + from rules import Rule + + added_includes = [] + for f in input_api.AffectedFiles(file_filter=source_file_filter): + if not CppChecker.IsCppFile(f.LocalPath()): + continue + + changed_lines = [line for _, line in f.ChangedContents()] + added_includes.append([f.LocalPath(), changed_lines]) + + deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath()) + + error_descriptions = [] + warning_descriptions = [] + for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes( + added_includes): + description_with_path = '%s\n %s' % (path, rule_description) + if rule_type == Rule.DISALLOW: + error_descriptions.append(description_with_path) + else: + warning_descriptions.append(description_with_path) + + results = [] + if error_descriptions: + results.append( + output_api.PresubmitError( + 'You added one or more #includes that violate checkdeps rules.\n' + 'Check that the DEPS files in these locations contain valid rules.\n' + 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' + 'more details about checkdeps.', error_descriptions)) + if warning_descriptions: + results.append( + output_api.PresubmitPromptOrNotify( + 'You added one or more #includes of files that are temporarily\n' + 'allowed but being removed. Can you avoid introducing the\n' + '#include? See relevant DEPS file(s) for details and contacts.\n' + 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' + 'more details about checkdeps.', warning_descriptions)) + return results def CheckCommitMessageBugEntry(input_api, output_api): - """Check that bug entries are well-formed in commit message.""" - bogus_bug_msg = ( - 'Bogus Bug entry: %s. Please specify the issue tracker prefix and the ' - 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.') - results = [] - for bug in input_api.change.BugsFromDescription(): - bug = bug.strip() - if bug.lower() == 'none': - continue - if 'b/' not in bug and ':' not in bug: - try: - if int(bug) > 100000: - # Rough indicator for current chromium bugs. - prefix_guess = 'chromium' - else: - prefix_guess = 'webrtc' - results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' % - (prefix_guess, bug)) - except ValueError: - results.append(bogus_bug_msg % bug) - elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)): - results.append(bogus_bug_msg % bug) - return [output_api.PresubmitError(r) for r in results] + """Check that bug entries are well-formed in commit message.""" + bogus_bug_msg = ( + 'Bogus Bug entry: %s. Please specify the issue tracker prefix and the ' + 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.' + ) + results = [] + for bug in input_api.change.BugsFromDescription(): + bug = bug.strip() + if bug.lower() == 'none': + continue + if 'b/' not in bug and ':' not in bug: + try: + if int(bug) > 100000: + # Rough indicator for current chromium bugs. + prefix_guess = 'chromium' + else: + prefix_guess = 'webrtc' + results.append( + 'Bug entry requires issue tracker prefix, e.g. %s:%s' % + (prefix_guess, bug)) + except ValueError: + results.append(bogus_bug_msg % bug) + elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)): + results.append(bogus_bug_msg % bug) + return [output_api.PresubmitError(r) for r in results] def CheckChangeHasBugField(input_api, output_api): - """Requires that the changelist is associated with a bug. + """Requires that the changelist is associated with a bug. This check is stricter than the one in depot_tools/presubmit_canned_checks.py since it fails the presubmit if the bug field is missing or doesn't contain @@ -699,212 +782,259 @@ def CheckChangeHasBugField(input_api, output_api): This supports both 'BUG=' and 'Bug:' since we are in the process of migrating to Gerrit and it encourages the usage of 'Bug:'. """ - if input_api.change.BugsFromDescription(): - return [] - else: - return [output_api.PresubmitError( - 'The "Bug: [bug number]" footer is mandatory. Please create a bug and ' - 'reference it using either of:\n' - ' * https://bugs.webrtc.org - reference it using Bug: webrtc:XXXX\n' - ' * https://crbug.com - reference it using Bug: chromium:XXXXXX')] + if input_api.change.BugsFromDescription(): + return [] + else: + return [ + output_api.PresubmitError( + 'The "Bug: [bug number]" footer is mandatory. Please create a bug and ' + 'reference it using either of:\n' + ' * https://bugs.webrtc.org - reference it using Bug: webrtc:XXXX\n' + ' * https://crbug.com - reference it using Bug: chromium:XXXXXX' + ) + ] def CheckJSONParseErrors(input_api, output_api, source_file_filter): - """Check that JSON files do not contain syntax errors.""" - - def FilterFile(affected_file): - return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json' - and source_file_filter(affected_file)) - - def GetJSONParseError(input_api, filename): - try: - contents = input_api.ReadFile(filename) - input_api.json.loads(contents) - except ValueError as e: - return e - return None - - results = [] - for affected_file in input_api.AffectedFiles( - file_filter=FilterFile, include_deletes=False): - parse_error = GetJSONParseError(input_api, - affected_file.AbsoluteLocalPath()) - if parse_error: - results.append(output_api.PresubmitError('%s could not be parsed: %s' % - (affected_file.LocalPath(), - parse_error))) - return results + """Check that JSON files do not contain syntax errors.""" + + def FilterFile(affected_file): + return (input_api.os_path.splitext( + affected_file.LocalPath())[1] == '.json' + and source_file_filter(affected_file)) + + def GetJSONParseError(input_api, filename): + try: + contents = input_api.ReadFile(filename) + input_api.json.loads(contents) + except ValueError as e: + return e + return None + + results = [] + for affected_file in input_api.AffectedFiles(file_filter=FilterFile, + include_deletes=False): + parse_error = GetJSONParseError(input_api, + affected_file.AbsoluteLocalPath()) + if parse_error: + results.append( + output_api.PresubmitError( + '%s could not be parsed: %s' % + (affected_file.LocalPath(), parse_error))) + return results def RunPythonTests(input_api, output_api): - def Join(*args): - return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) - - test_directories = [ - input_api.PresubmitLocalPath(), - Join('rtc_tools', 'py_event_log_analyzer'), - Join('audio', 'test', 'unittests'), - ] + [ - root for root, _, files in os.walk(Join('tools_webrtc')) - if any(f.endswith('_test.py') for f in files) - ] - - tests = [] - for directory in test_directories: - tests.extend( - input_api.canned_checks.GetUnitTestsInDirectory( - input_api, - output_api, - directory, - whitelist=[r'.+_test\.py$'])) - return input_api.RunTests(tests, parallel=True) + def Join(*args): + return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) + + test_directories = [ + input_api.PresubmitLocalPath(), + Join('rtc_tools', 'py_event_log_analyzer'), + Join('audio', 'test', 'unittests'), + ] + [ + root for root, _, files in os.walk(Join('tools_webrtc')) if any( + f.endswith('_test.py') for f in files) + ] + + tests = [] + for directory in test_directories: + tests.extend( + input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + directory, + files_to_check=[r'.+_test\.py$'])) + return input_api.RunTests(tests, parallel=True) def CheckUsageOfGoogleProtobufNamespace(input_api, output_api, source_file_filter): - """Checks that the namespace google::protobuf has not been used.""" - files = [] - pattern = input_api.re.compile(r'google::protobuf') - proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h') - file_filter = lambda x: (input_api.FilterSourceFile(x) - and source_file_filter(x)) - for f in input_api.AffectedSourceFiles(file_filter): - if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: - continue - contents = input_api.ReadFile(f) - if pattern.search(contents): - files.append(f) - - if files: - return [output_api.PresubmitError( - 'Please avoid to use namespace `google::protobuf` directly.\n' - 'Add a using directive in `%s` and include that header instead.' - % proto_utils_path, files)] - return [] + """Checks that the namespace google::protobuf has not been used.""" + files = [] + pattern = input_api.re.compile(r'google::protobuf') + proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h') + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.append(f) + + if files: + return [ + output_api.PresubmitError( + 'Please avoid to use namespace `google::protobuf` directly.\n' + 'Add a using directive in `%s` and include that header instead.' + % proto_utils_path, files) + ] + return [] def _LicenseHeader(input_api): - """Returns the license header regexp.""" - # Accept any year number from 2003 to the current year - current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) - years_re = '(' + '|'.join(allowed_years) + ')' - license_header = ( - r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' + """Returns the license header regexp.""" + # Accept any year number from 2003 to the current year + current_year = int(input_api.time.strftime('%Y')) + allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) + years_re = '(' + '|'.join(allowed_years) + ')' + license_header = ( + r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' r'All [Rr]ights [Rr]eserved\.\n' - r'.*?\n' - r'.*? Use of this source code is governed by a BSD-style license\n' - r'.*? that can be found in the LICENSE file in the root of the source\n' - r'.*? tree\. An additional intellectual property rights grant can be ' + r'.*?\n' + r'.*? Use of this source code is governed by a BSD-style license\n' + r'.*? that can be found in the LICENSE file in the root of the source\n' + r'.*? tree\. An additional intellectual property rights grant can be ' r'found\n' - r'.*? in the file PATENTS\. All contributing project authors may\n' - r'.*? be found in the AUTHORS file in the root of the source tree\.\n' - ) % { - 'year': years_re, - } - return license_header + r'.*? in the file PATENTS\. All contributing project authors may\n' + r'.*? be found in the AUTHORS file in the root of the source tree\.\n' + ) % { + 'year': years_re, + } + return license_header def CommonChecks(input_api, output_api): - """Checks common to both upload and commit.""" - results = [] - # Filter out files that are in objc or ios dirs from being cpplint-ed since - # they do not follow C++ lint rules. - black_list = input_api.DEFAULT_BLACK_LIST + ( - r".*\bobjc[\\\/].*", - r".*objc\.[hcm]+$", - ) - source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list) - results.extend(CheckApprovedFilesLintClean( - input_api, output_api, source_file_filter)) - results.extend(input_api.canned_checks.CheckLicense( - input_api, output_api, _LicenseHeader(input_api))) - results.extend(input_api.canned_checks.RunPylint(input_api, output_api, - black_list=(r'^base[\\\/].*\.py$', - r'^build[\\\/].*\.py$', - r'^buildtools[\\\/].*\.py$', - r'^infra[\\\/].*\.py$', - r'^ios[\\\/].*\.py$', - r'^out.*[\\\/].*\.py$', - r'^testing[\\\/].*\.py$', - r'^third_party[\\\/].*\.py$', - r'^tools[\\\/].*\.py$', - # TODO(phoglund): should arguably be checked. - r'^tools_webrtc[\\\/]mb[\\\/].*\.py$', - r'^xcodebuild.*[\\\/].*\.py$',), - pylintrc='pylintrc')) - - # TODO(nisse): talk/ is no more, so make below checks simpler? - # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function since - # we need to have different license checks in talk/ and webrtc/ directories. - # Instead, hand-picked checks are included below. - - # .m and .mm files are ObjC files. For simplicity we will consider .h files in - # ObjC subdirectories ObjC headers. - objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$') - # Skip long-lines check for DEPS and GN files. - build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS') - # Also we will skip most checks for third_party directory. - third_party_filter_list = (r'^third_party[\\\/].+',) - eighty_char_sources = lambda x: input_api.FilterSourceFile(x, - black_list=build_file_filter_list + objc_filter_list + - third_party_filter_list) - hundred_char_sources = lambda x: input_api.FilterSourceFile(x, - white_list=objc_filter_list) - non_third_party_sources = lambda x: input_api.FilterSourceFile(x, - black_list=third_party_filter_list) - - results.extend(input_api.canned_checks.CheckLongLines( - input_api, output_api, maxlen=80, source_file_filter=eighty_char_sources)) - results.extend(input_api.canned_checks.CheckLongLines( - input_api, output_api, maxlen=100, - source_file_filter=hundred_char_sources)) - results.extend(input_api.canned_checks.CheckChangeHasNoTabs( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(input_api.canned_checks.CheckAuthorizedAuthor( - input_api, output_api, bot_whitelist=[ - 'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com' - ])) - results.extend(input_api.canned_checks.CheckChangeTodoHasOwner( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(input_api.canned_checks.CheckPatchFormatted( - input_api, output_api)) - results.extend(CheckNativeApiHeaderChanges(input_api, output_api)) - results.extend(CheckNoIOStreamInHeaders( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckNoPragmaOnce( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckNoFRIEND_TEST( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckGnChanges(input_api, output_api)) - results.extend(CheckUnwantedDependencies( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckJSONParseErrors( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(RunPythonTests(input_api, output_api)) - results.extend(CheckUsageOfGoogleProtobufNamespace( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckOrphanHeaders( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckNewlineAtTheEndOfProtoFiles( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(CheckNoStreamUsageIsAdded( - input_api, output_api, non_third_party_sources)) - results.extend(CheckNoTestCaseUsageIsAdded( - input_api, output_api, non_third_party_sources)) - results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api)) - results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api)) - results.extend(CheckAbslMemoryInclude( - input_api, output_api, non_third_party_sources)) - results.extend(CheckBannedAbslMakeUnique( - input_api, output_api, non_third_party_sources)) - return results + """Checks common to both upload and commit.""" + results = [] + # Filter out files that are in objc or ios dirs from being cpplint-ed since + # they do not follow C++ lint rules. + exception_list = input_api.DEFAULT_FILES_TO_SKIP + ( + r".*\bobjc[\\\/].*", + r".*objc\.[hcm]+$", + ) + source_file_filter = lambda x: input_api.FilterSourceFile( + x, None, exception_list) + results.extend( + CheckApprovedFilesLintClean(input_api, output_api, source_file_filter)) + results.extend( + input_api.canned_checks.CheckLicense(input_api, output_api, + _LicenseHeader(input_api))) + results.extend( + input_api.canned_checks.RunPylint( + input_api, + output_api, + files_to_skip=( + r'^base[\\\/].*\.py$', + r'^build[\\\/].*\.py$', + r'^buildtools[\\\/].*\.py$', + r'^infra[\\\/].*\.py$', + r'^ios[\\\/].*\.py$', + r'^out.*[\\\/].*\.py$', + r'^testing[\\\/].*\.py$', + r'^third_party[\\\/].*\.py$', + r'^tools[\\\/].*\.py$', + # TODO(phoglund): should arguably be checked. + r'^tools_webrtc[\\\/]mb[\\\/].*\.py$', + r'^xcodebuild.*[\\\/].*\.py$', + ), + pylintrc='pylintrc')) + + # TODO(nisse): talk/ is no more, so make below checks simpler? + # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function since + # we need to have different license checks in talk/ and webrtc/ directories. + # Instead, hand-picked checks are included below. + + # .m and .mm files are ObjC files. For simplicity we will consider .h files in + # ObjC subdirectories ObjC headers. + objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$') + # Skip long-lines check for DEPS and GN files. + build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS') + # Also we will skip most checks for third_party directory. + third_party_filter_list = (r'^third_party[\\\/].+', ) + eighty_char_sources = lambda x: input_api.FilterSourceFile( + x, + files_to_skip=build_file_filter_list + objc_filter_list + + third_party_filter_list) + hundred_char_sources = lambda x: input_api.FilterSourceFile( + x, files_to_check=objc_filter_list) + non_third_party_sources = lambda x: input_api.FilterSourceFile( + x, files_to_skip=third_party_filter_list) + + results.extend( + input_api.canned_checks.CheckLongLines( + input_api, + output_api, + maxlen=80, + source_file_filter=eighty_char_sources)) + results.extend( + input_api.canned_checks.CheckLongLines( + input_api, + output_api, + maxlen=100, + source_file_filter=hundred_char_sources)) + results.extend( + input_api.canned_checks.CheckChangeHasNoTabs( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + input_api.canned_checks.CheckChangeHasNoStrayWhitespace( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + input_api.canned_checks.CheckAuthorizedAuthor( + input_api, + output_api, + bot_allowlist=[ + 'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com' + ])) + results.extend( + input_api.canned_checks.CheckChangeTodoHasOwner( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + input_api.canned_checks.CheckPatchFormatted(input_api, output_api)) + results.extend(CheckNativeApiHeaderChanges(input_api, output_api)) + results.extend( + CheckNoIOStreamInHeaders(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckNoPragmaOnce(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckNoFRIEND_TEST(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend(CheckGnChanges(input_api, output_api)) + results.extend( + CheckUnwantedDependencies(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckJSONParseErrors(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend(RunPythonTests(input_api, output_api)) + results.extend( + CheckUsageOfGoogleProtobufNamespace( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + CheckOrphanHeaders(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckNewlineAtTheEndOfProtoFiles( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + CheckNoStreamUsageIsAdded(input_api, output_api, + non_third_party_sources)) + results.extend( + CheckNoTestCaseUsageIsAdded(input_api, output_api, + non_third_party_sources)) + results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api)) + results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api)) + results.extend( + CheckAbslMemoryInclude(input_api, output_api, non_third_party_sources)) + results.extend( + CheckBannedAbslMakeUnique(input_api, output_api, + non_third_party_sources)) + results.extend( + CheckObjcApiSymbols(input_api, output_api, non_third_party_sources)) + return results def CheckApiDepsFileIsUpToDate(input_api, output_api): - """Check that 'include_rules' in api/DEPS is up to date. + """Check that 'include_rules' in api/DEPS is up to date. The file api/DEPS must be kept up to date in order to avoid to avoid to include internal header from WebRTC's api/ headers. @@ -913,307 +1043,362 @@ def CheckApiDepsFileIsUpToDate(input_api, output_api): rule for each root level directory. More focused allow rules can be added to 'specific_include_rules'. """ - results = [] - api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS') - with open(api_deps) as f: - deps_content = _ParseDeps(f.read()) - - include_rules = deps_content.get('include_rules', []) - dirs_to_skip = set(['api', 'docs']) - - # Only check top level directories affected by the current CL. - dirs_to_check = set() - for f in input_api.AffectedFiles(): - path_tokens = [t for t in f.LocalPath().split(os.sep) if t] - if len(path_tokens) > 1: - if (path_tokens[0] not in dirs_to_skip and - os.path.isdir(os.path.join(input_api.PresubmitLocalPath(), - path_tokens[0]))): - dirs_to_check.add(path_tokens[0]) - - missing_include_rules = set() - for p in dirs_to_check: - rule = '-%s' % p - if rule not in include_rules: - missing_include_rules.add(rule) - - if missing_include_rules: - error_msg = [ - 'include_rules = [\n', - ' ...\n', - ] + results = [] + api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS') + with open(api_deps) as f: + deps_content = _ParseDeps(f.read()) + + include_rules = deps_content.get('include_rules', []) + dirs_to_skip = set(['api', 'docs']) + + # Only check top level directories affected by the current CL. + dirs_to_check = set() + for f in input_api.AffectedFiles(): + path_tokens = [t for t in f.LocalPath().split(os.sep) if t] + if len(path_tokens) > 1: + if (path_tokens[0] not in dirs_to_skip and os.path.isdir( + os.path.join(input_api.PresubmitLocalPath(), + path_tokens[0]))): + dirs_to_check.add(path_tokens[0]) + + missing_include_rules = set() + for p in dirs_to_check: + rule = '-%s' % p + if rule not in include_rules: + missing_include_rules.add(rule) + + if missing_include_rules: + error_msg = [ + 'include_rules = [\n', + ' ...\n', + ] + + for r in sorted(missing_include_rules): + error_msg.append(' "%s",\n' % str(r)) + + error_msg.append(' ...\n') + error_msg.append(']\n') + + results.append( + output_api.PresubmitError( + 'New root level directory detected! WebRTC api/ headers should ' + 'not #include headers from \n' + 'the new directory, so please update "include_rules" in file\n' + '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg)))) + + return results - for r in sorted(missing_include_rules): - error_msg.append(' "%s",\n' % str(r)) - error_msg.append(' ...\n') - error_msg.append(']\n') +def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter): + file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and + source_file_filter(f)) + + files = [] + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + for _, line in f.ChangedContents(): + if 'absl::make_unique' in line: + files.append(f) + break + + if len(files): + return [ + output_api.PresubmitError( + 'Please use std::make_unique instead of absl::make_unique.\n' + 'Affected files:', files) + ] + return [] - results.append(output_api.PresubmitError( - 'New root level directory detected! WebRTC api/ headers should ' - 'not #include headers from \n' - 'the new directory, so please update "include_rules" in file\n' - '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg)))) - return results +def CheckObjcApiSymbols(input_api, output_api, source_file_filter): + rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}', + re.MULTILINE | re.DOTALL) + file_filter = lambda f: (f.LocalPath().endswith(('.h')) and + source_file_filter(f)) + + files = [] + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath(): + continue + contents = input_api.ReadFile(f) + for match in rtc_objc_export.finditer(contents): + export_block = match.group(0) + if 'RTC_OBJC_TYPE' not in export_block: + files.append(f.LocalPath()) + + if len(files): + return [ + output_api.PresubmitError( + 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' + + 'macro.\n\n' + 'For example:\n' + + 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'Please fix the following files:', files) + ] + return [] -def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter): - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) - and source_file_filter(f)) - - files = [] - for f in input_api.AffectedFiles( - include_deletes=False, file_filter=file_filter): - for _, line in f.ChangedContents(): - if 'absl::make_unique' in line: - files.append(f) - break - - if len(files): - return [output_api.PresubmitError( - 'Please use std::make_unique instead of absl::make_unique.\n' - 'Affected files:', - files)] - return [] def CheckAbslMemoryInclude(input_api, output_api, source_file_filter): - pattern = input_api.re.compile( - r'^#include\s*"absl/memory/memory.h"', input_api.re.MULTILINE) - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) - and source_file_filter(f)) - - files = [] - for f in input_api.AffectedFiles( - include_deletes=False, file_filter=file_filter): - contents = input_api.ReadFile(f) - if pattern.search(contents): - continue - for _, line in f.ChangedContents(): - if 'absl::WrapUnique' in line: - files.append(f) - break - - if len(files): - return [output_api.PresubmitError( - 'Please include "absl/memory/memory.h" header for absl::WrapUnique.\n' - 'This header may or may not be included transitively depending on the ' - 'C++ standard version.', - files)] - return [] + pattern = input_api.re.compile(r'^#include\s*"absl/memory/memory.h"', + input_api.re.MULTILINE) + file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and + source_file_filter(f)) + + files = [] + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + contents = input_api.ReadFile(f) + if pattern.search(contents): + continue + for _, line in f.ChangedContents(): + if 'absl::WrapUnique' in line: + files.append(f) + break + + if len(files): + return [ + output_api.PresubmitError( + 'Please include "absl/memory/memory.h" header for absl::WrapUnique.\n' + 'This header may or may not be included transitively depending on the ' + 'C++ standard version.', files) + ] + return [] + def CheckChangeOnUpload(input_api, output_api): - results = [] - results.extend(CommonChecks(input_api, output_api)) - results.extend(CheckGnGen(input_api, output_api)) - results.extend( - input_api.canned_checks.CheckGNFormatted(input_api, output_api)) - return results + results = [] + results.extend(CommonChecks(input_api, output_api)) + results.extend(CheckGnGen(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckGNFormatted(input_api, output_api)) + return results def CheckChangeOnCommit(input_api, output_api): - results = [] - results.extend(CommonChecks(input_api, output_api)) - results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api)) - results.extend(input_api.canned_checks.CheckOwners(input_api, output_api)) - results.extend(input_api.canned_checks.CheckChangeWasUploaded( - input_api, output_api)) - results.extend(input_api.canned_checks.CheckChangeHasDescription( - input_api, output_api)) - results.extend(CheckChangeHasBugField(input_api, output_api)) - results.extend(CheckCommitMessageBugEntry(input_api, output_api)) - results.extend(input_api.canned_checks.CheckTreeIsOpen( - input_api, output_api, - json_url='http://webrtc-status.appspot.com/current?format=json')) - return results + results = [] + results.extend(CommonChecks(input_api, output_api)) + results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api)) + results.extend(input_api.canned_checks.CheckOwners(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckChangeHasDescription( + input_api, output_api)) + results.extend(CheckChangeHasBugField(input_api, output_api)) + results.extend(CheckCommitMessageBugEntry(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckTreeIsOpen( + input_api, + output_api, + json_url='http://webrtc-status.appspot.com/current?format=json')) + return results def CheckOrphanHeaders(input_api, output_api, source_file_filter): - # We need to wait until we have an input_api object and use this - # roundabout construct to import prebubmit_checks_lib because this file is - # eval-ed and thus doesn't have __file__. - error_msg = """{} should be listed in {}.""" - results = [] - orphan_blacklist = [ - os.path.join('tools_webrtc', 'ios', 'SDK'), - ] - with _AddToPath(input_api.os_path.join( - input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')): - from check_orphan_headers import GetBuildGnPathFromFilePath - from check_orphan_headers import IsHeaderInBuildGn - - file_filter = lambda x: input_api.FilterSourceFile( - x, black_list=orphan_blacklist) and source_file_filter(x) - for f in input_api.AffectedSourceFiles(file_filter): - if f.LocalPath().endswith('.h'): - file_path = os.path.abspath(f.LocalPath()) - root_dir = os.getcwd() - gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists, - root_dir) - in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) - if not in_build_gn: - results.append(output_api.PresubmitError(error_msg.format( - f.LocalPath(), os.path.relpath(gn_file_path)))) - return results - - -def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter): - """Checks that all .proto files are terminated with a newline.""" - error_msg = 'File {} must end with exactly one newline.' - results = [] - file_filter = lambda x: input_api.FilterSourceFile( - x, white_list=(r'.+\.proto$',)) and source_file_filter(x) - for f in input_api.AffectedSourceFiles(file_filter): - file_path = f.LocalPath() - with open(file_path) as f: - lines = f.readlines() - if len(lines) > 0 and not lines[-1].endswith('\n'): - results.append(output_api.PresubmitError(error_msg.format(file_path))) - return results + # We need to wait until we have an input_api object and use this + # roundabout construct to import prebubmit_checks_lib because this file is + # eval-ed and thus doesn't have __file__. + error_msg = """{} should be listed in {}.""" + results = [] + exempt_paths = [ + os.path.join('tools_webrtc', 'ios', 'SDK'), + ] + with _AddToPath( + input_api.os_path.join(input_api.PresubmitLocalPath(), + 'tools_webrtc', 'presubmit_checks_lib')): + from check_orphan_headers import GetBuildGnPathFromFilePath + from check_orphan_headers import IsHeaderInBuildGn + + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_skip=exempt_paths) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + if f.LocalPath().endswith('.h'): + file_path = os.path.abspath(f.LocalPath()) + root_dir = os.getcwd() + gn_file_path = GetBuildGnPathFromFilePath(file_path, + os.path.exists, root_dir) + in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) + if not in_build_gn: + results.append( + output_api.PresubmitError( + error_msg.format(f.LocalPath(), + os.path.relpath(gn_file_path)))) + return results + + +def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, + source_file_filter): + """Checks that all .proto files are terminated with a newline.""" + error_msg = 'File {} must end with exactly one newline.' + results = [] + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=(r'.+\.proto$', )) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + file_path = f.LocalPath() + with open(file_path) as f: + lines = f.readlines() + if len(lines) > 0 and not lines[-1].endswith('\n'): + results.append( + output_api.PresubmitError(error_msg.format(file_path))) + return results def _ExtractAddRulesFromParsedDeps(parsed_deps): - """Extract the rules that add dependencies from a parsed DEPS file. + """Extract the rules that add dependencies from a parsed DEPS file. Args: parsed_deps: the locals dictionary from evaluating the DEPS file.""" - add_rules = set() - add_rules.update([ - rule[1:] for rule in parsed_deps.get('include_rules', []) - if rule.startswith('+') or rule.startswith('!') - ]) - for _, rules in parsed_deps.get('specific_include_rules', - {}).iteritems(): + add_rules = set() add_rules.update([ - rule[1:] for rule in rules + rule[1:] for rule in parsed_deps.get('include_rules', []) if rule.startswith('+') or rule.startswith('!') ]) - return add_rules + for _, rules in parsed_deps.get('specific_include_rules', {}).iteritems(): + add_rules.update([ + rule[1:] for rule in rules + if rule.startswith('+') or rule.startswith('!') + ]) + return add_rules def _ParseDeps(contents): - """Simple helper for parsing DEPS files.""" - # Stubs for handling special syntax in the root DEPS file. - class VarImpl(object): + """Simple helper for parsing DEPS files.""" - def __init__(self, local_scope): - self._local_scope = local_scope + # Stubs for handling special syntax in the root DEPS file. + class VarImpl(object): + def __init__(self, local_scope): + self._local_scope = local_scope - def Lookup(self, var_name): - """Implements the Var syntax.""" - try: - return self._local_scope['vars'][var_name] - except KeyError: - raise Exception('Var is not defined: %s' % var_name) + def Lookup(self, var_name): + """Implements the Var syntax.""" + try: + return self._local_scope['vars'][var_name] + except KeyError: + raise Exception('Var is not defined: %s' % var_name) - local_scope = {} - global_scope = { - 'Var': VarImpl(local_scope).Lookup, - } - exec contents in global_scope, local_scope - return local_scope + local_scope = {} + global_scope = { + 'Var': VarImpl(local_scope).Lookup, + } + exec contents in global_scope, local_scope + return local_scope def _CalculateAddedDeps(os_path, old_contents, new_contents): - """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns + """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns a set of DEPS entries that we should look up. For a directory (rather than a specific filename) we fake a path to a specific filename by adding /DEPS. This is chosen as a file that will seldom or never be subject to per-file include_rules. """ - # We ignore deps entries on auto-generated directories. - auto_generated_dirs = ['grit', 'jni'] + # We ignore deps entries on auto-generated directories. + auto_generated_dirs = ['grit', 'jni'] - old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents)) - new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents)) + old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents)) + new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents)) - added_deps = new_deps.difference(old_deps) + added_deps = new_deps.difference(old_deps) - results = set() - for added_dep in added_deps: - if added_dep.split('/')[0] in auto_generated_dirs: - continue - # Assume that a rule that ends in .h is a rule for a specific file. - if added_dep.endswith('.h'): - results.add(added_dep) - else: - results.add(os_path.join(added_dep, 'DEPS')) - return results + results = set() + for added_dep in added_deps: + if added_dep.split('/')[0] in auto_generated_dirs: + continue + # Assume that a rule that ends in .h is a rule for a specific file. + if added_dep.endswith('.h'): + results.add(added_dep) + else: + results.add(os_path.join(added_dep, 'DEPS')) + return results def CheckAddedDepsHaveTargetApprovals(input_api, output_api): - """When a dependency prefixed with + is added to a DEPS file, we + """When a dependency prefixed with + is added to a DEPS file, we want to make sure that the change is reviewed by an OWNER of the target file or directory, to avoid layering violations from being introduced. This check verifies that this happens. """ - virtual_depended_on_files = set() - - file_filter = lambda f: not input_api.re.match( - r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath()) - for f in input_api.AffectedFiles(include_deletes=False, - file_filter=file_filter): - filename = input_api.os_path.basename(f.LocalPath()) - if filename == 'DEPS': - virtual_depended_on_files.update(_CalculateAddedDeps( - input_api.os_path, - '\n'.join(f.OldContents()), - '\n'.join(f.NewContents()))) - - if not virtual_depended_on_files: - return [] + virtual_depended_on_files = set() + + file_filter = lambda f: not input_api.re.match( + r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath()) + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + filename = input_api.os_path.basename(f.LocalPath()) + if filename == 'DEPS': + virtual_depended_on_files.update( + _CalculateAddedDeps(input_api.os_path, + '\n'.join(f.OldContents()), + '\n'.join(f.NewContents()))) + + if not virtual_depended_on_files: + return [] - if input_api.is_committing: - if input_api.tbr: - return [output_api.PresubmitNotifyResult( - '--tbr was specified, skipping OWNERS check for DEPS additions')] - if input_api.dry_run: - return [output_api.PresubmitNotifyResult( - 'This is a dry run, skipping OWNERS check for DEPS additions')] - if not input_api.change.issue: - return [output_api.PresubmitError( - "DEPS approval by OWNERS check failed: this change has " - "no change number, so we can't check it for approvals.")] - output = output_api.PresubmitError - else: - output = output_api.PresubmitNotifyResult - - owners_db = input_api.owners_db - owner_email, reviewers = ( - input_api.canned_checks.GetCodereviewOwnerAndReviewers( - input_api, - owners_db.email_regexp, - approval_needed=input_api.is_committing)) - - owner_email = owner_email or input_api.change.author_email - - reviewers_plus_owner = set(reviewers) - if owner_email: - reviewers_plus_owner.add(owner_email) - missing_files = owners_db.files_not_covered_by(virtual_depended_on_files, - reviewers_plus_owner) - - # We strip the /DEPS part that was added by - # _FilesToCheckForIncomingDeps to fake a path to a file in a - # directory. - def StripDeps(path): - start_deps = path.rfind('/DEPS') - if start_deps != -1: - return path[:start_deps] + if input_api.is_committing: + if input_api.tbr: + return [ + output_api.PresubmitNotifyResult( + '--tbr was specified, skipping OWNERS check for DEPS additions' + ) + ] + if input_api.dry_run: + return [ + output_api.PresubmitNotifyResult( + 'This is a dry run, skipping OWNERS check for DEPS additions' + ) + ] + if not input_api.change.issue: + return [ + output_api.PresubmitError( + "DEPS approval by OWNERS check failed: this change has " + "no change number, so we can't check it for approvals.") + ] + output = output_api.PresubmitError else: - return path - unapproved_dependencies = ["'+%s'," % StripDeps(path) - for path in missing_files] - - if unapproved_dependencies: - output_list = [ - output('You need LGTM from owners of depends-on paths in DEPS that were ' - 'modified in this CL:\n %s' % - '\n '.join(sorted(unapproved_dependencies)))] - suggested_owners = owners_db.reviewers_for(missing_files, owner_email) - output_list.append(output( - 'Suggested missing target path OWNERS:\n %s' % - '\n '.join(suggested_owners or []))) - return output_list - - return [] + output = output_api.PresubmitNotifyResult + + owners_db = input_api.owners_db + owner_email, reviewers = ( + input_api.canned_checks.GetCodereviewOwnerAndReviewers( + input_api, + owners_db.email_regexp, + approval_needed=input_api.is_committing)) + + owner_email = owner_email or input_api.change.author_email + + reviewers_plus_owner = set(reviewers) + if owner_email: + reviewers_plus_owner.add(owner_email) + missing_files = owners_db.files_not_covered_by(virtual_depended_on_files, + reviewers_plus_owner) + + # We strip the /DEPS part that was added by + # _FilesToCheckForIncomingDeps to fake a path to a file in a + # directory. + def StripDeps(path): + start_deps = path.rfind('/DEPS') + if start_deps != -1: + return path[:start_deps] + else: + return path + + unapproved_dependencies = [ + "'+%s'," % StripDeps(path) for path in missing_files + ] + + if unapproved_dependencies: + output_list = [ + output( + 'You need LGTM from owners of depends-on paths in DEPS that were ' + 'modified in this CL:\n %s' % + '\n '.join(sorted(unapproved_dependencies))) + ] + suggested_owners = owners_db.reviewers_for(missing_files, owner_email) + output_list.append( + output('Suggested missing target path OWNERS:\n %s' % + '\n '.join(suggested_owners or []))) + return output_list + + return [] diff --git a/README.chromium b/README.chromium index 246c13dc09..58c8da8403 100644 --- a/README.chromium +++ b/README.chromium @@ -1,13 +1,14 @@ -Name: WebRTC -URL: http://www.webrtc.org -Version: 90 -License: BSD -License File: LICENSE - -Description: -WebRTC provides real time voice and video processing -functionality to enable the implementation of -PeerConnection/MediaStream. - -Third party code used in this project is described -in the file LICENSE_THIRD_PARTY. +Name: WebRTC +URL: http://www.webrtc.org +Version: 90 +CPEPrefix: cpe:/a:webrtc_project:webrtc:90 +License: BSD +License File: LICENSE + +Description: +WebRTC provides real time voice and video processing +functionality to enable the implementation of +PeerConnection/MediaStream. + +Third party code used in this project is described +in the file LICENSE_THIRD_PARTY. diff --git a/README.md b/README.md index 2a24595476..1624e98cbb 100644 --- a/README.md +++ b/README.md @@ -11,8 +11,8 @@ amongst others. ### Development -See http://www.webrtc.org/native-code/development for instructions on how to get -started developing with the native code. +See [here][native-dev] for instructions on how to get started +developing with the native code. [Authoritative list](native-api.md) of directories that contain the native API header files. @@ -26,3 +26,6 @@ native API header files. * Continuous build: http://build.chromium.org/p/client.webrtc * [Coding style guide](style-guide.md) * [Code of conduct](CODE_OF_CONDUCT.md) + * [Reporting bugs](docs/bug-reporting.md) + +[native-dev]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/index.md diff --git a/WATCHLISTS b/WATCHLISTS index fe5d59fc3c..cf4ea32c40 100644 --- a/WATCHLISTS +++ b/WATCHLISTS @@ -114,8 +114,7 @@ 'saza@webrtc.org'], 'audio': ['peah@webrtc.org'], 'api': ['kwiberg@webrtc.org','peah@webrtc.org'], - 'base': ['kwiberg@webrtc.org', - 'benwright@webrtc.org'], + 'base': ['kwiberg@webrtc.org'], 'call': ['mflodman@webrtc.org', 'stefan@webrtc.org'], 'video': ['mflodman@webrtc.org', diff --git a/abseil-in-webrtc.md b/abseil-in-webrtc.md index 0541d3c7a5..79b1031ffd 100644 --- a/abseil-in-webrtc.md +++ b/abseil-in-webrtc.md @@ -9,6 +9,17 @@ adds the first use. [abseil]: https://abseil.io/about/ + +## How to depend on Abseil + +For build targets of type `rtc_library`, `rtc_source_set` and +`rtc_static_library`, dependencies on Abseil need to be listed in `absl_deps` +instead of `deps`. + +This is needed in order to support the Abseil component build in Chromium. In +that build mode, WebRTC will depend on a monolithic Abseil build target that +will generate a shared library. + ## **Allowed** * `absl::InlinedVector` @@ -23,9 +34,11 @@ adds the first use. * `absl::variant` and related stuff from `absl/types/variant.h`. * The functions in `absl/algorithm/algorithm.h` and `absl/algorithm/container.h`. +* `absl/base/const_init.h` for mutex initialization. * The macros in `absl/base/attributes.h`, `absl/base/config.h` and `absl/base/macros.h`. + ## **Disallowed** ### `absl::make_unique` @@ -34,7 +47,7 @@ adds the first use. ### `absl::Mutex` -*Use `rtc::CriticalSection` instead.* +*Use `webrtc::Mutex` instead.* Chromium has a ban on new static initializers, and `absl::Mutex` uses one. To make `absl::Mutex` available, we would need to nicely ask the diff --git a/api/BUILD.gn b/api/BUILD.gn index 860ac36e91..f02c5fd434 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -55,6 +55,7 @@ if (!build_with_chromium) { "audio:audio_mixer_api", "audio_codecs:audio_codecs_api", "task_queue:default_task_queue_factory", + "transport:field_trial_based_config", "video_codecs:video_codecs_api", ] } @@ -68,11 +69,10 @@ rtc_library("rtp_headers") { ] deps = [ ":array_view", - "..:webrtc_common", "units:timestamp", "video:video_rtp_headers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_packet_info") { @@ -87,12 +87,10 @@ rtc_library("rtp_packet_info") { ":refcountedbase", ":rtp_headers", ":scoped_refptr", - "..:webrtc_common", - "../rtc_base:deprecation", "../rtc_base:rtc_base_approved", "../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("media_stream_interface") { @@ -100,6 +98,7 @@ rtc_library("media_stream_interface") { sources = [ "media_stream_interface.cc", "media_stream_interface.h", + "media_stream_track.h", "notifier.h", ] deps = [ @@ -112,8 +111,8 @@ rtc_library("media_stream_interface") { "../rtc_base/system:rtc_export", "video:recordable_encoded_frame", "video:video_frame", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("libjingle_peerconnection_api") { @@ -150,6 +149,7 @@ rtc_library("libjingle_peerconnection_api") { "rtp_transceiver_interface.h", "sctp_transport_interface.cc", "sctp_transport_interface.h", + "set_local_description_observer_interface.h", "set_remote_description_observer_interface.h", "stats_types.cc", "stats_types.h", @@ -162,15 +162,19 @@ rtc_library("libjingle_peerconnection_api") { ":audio_options_api", ":callfactory_api", ":fec_controller_api", + ":frame_transformer_interface", ":libjingle_logging_api", ":media_stream_interface", ":network_state_predictor_api", ":packet_socket_factory", + ":priority", ":rtc_error", ":rtc_stats_api", ":rtp_packet_info", ":rtp_parameters", + ":rtp_transceiver_direction", ":scoped_refptr", + "adaptation:resource_adaptation_api", "audio:audio_mixer_api", "audio_codecs:audio_codecs_api", "crypto:frame_decryptor_interface", @@ -180,28 +184,20 @@ rtc_library("libjingle_peerconnection_api") { "rtc_event_log", "task_queue", "transport:bitrate_settings", - "transport:datagram_transport_interface", "transport:enums", "transport:network_control", + "transport:sctp_transport_factory_interface", "transport:webrtc_key_value_config", - "transport/media:audio_interfaces", - "transport/media:media_transport_interface", - "transport/media:video_interfaces", "transport/rtp:rtp_source", "units:data_rate", "units:timestamp", "video:encoded_image", "video:video_frame", "video:video_rtp_headers", - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", # Basically, don't add stuff here. You might break sensitive downstream # targets like pnacl. API should not depend on anything outside of this # file, really. All these should arguably go away in time. - "..:webrtc_common", "../media:rtc_media_base", "../media:rtc_media_config", "../modules/audio_processing:audio_processing_statistics", @@ -211,6 +207,23 @@ rtc_library("libjingle_peerconnection_api") { "../rtc_base:rtc_base_approved", "../rtc_base/system:rtc_export", ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("frame_transformer_interface") { + visibility = [ "*" ] + sources = [ "frame_transformer_interface.h" ] + deps = [ + ":scoped_refptr", + "../rtc_base:refcount", + "video:encoded_frame", + "video:video_frame_metadata", + ] } rtc_library("rtc_error") { @@ -224,8 +237,8 @@ rtc_library("rtc_error") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("packet_socket_factory") { @@ -261,7 +274,6 @@ rtc_source_set("video_quality_test_fixture_api") { "../test:video_test_common", "transport:bitrate_settings", "transport:network_control", - "transport/media:media_transport_interface", "video_codecs:video_codecs_api", ] } @@ -272,18 +284,32 @@ rtc_source_set("video_quality_analyzer_api") { sources = [ "test/video_quality_analyzer_interface.h" ] deps = [ + ":array_view", ":stats_observer_interface", "video:encoded_image", "video:video_frame", "video:video_rtp_headers", "video_codecs:video_codecs_api", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } -rtc_source_set("track_id_stream_label_map") { +rtc_source_set("track_id_stream_info_map") { + visibility = [ "*" ] + sources = [ "test/track_id_stream_info_map.h" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_source_set("rtp_transceiver_direction") { visibility = [ "*" ] - sources = [ "test/track_id_stream_label_map.h" ] + sources = [ "rtp_transceiver_direction.h" ] +} + +rtc_source_set("priority") { + sources = [ "priority.h" ] } rtc_library("rtp_parameters") { @@ -296,13 +322,24 @@ rtc_library("rtp_parameters") { ] deps = [ ":array_view", + ":priority", + ":rtp_transceiver_direction", "../rtc_base:checks", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } +if (is_android) { + java_cpp_enum("priority_enums") { + sources = [ "priority.h" ] + } +} + rtc_source_set("audio_quality_analyzer_api") { visibility = [ "*" ] testonly = true @@ -310,7 +347,7 @@ rtc_source_set("audio_quality_analyzer_api") { deps = [ ":stats_observer_interface", - ":track_id_stream_label_map", + ":track_id_stream_info_map", ] } @@ -319,11 +356,9 @@ rtc_source_set("stats_observer_interface") { testonly = true sources = [ "test/stats_observer_interface.h" ] - deps = [ - # For api/stats_types.h - ":libjingle_peerconnection_api", - ":rtp_parameters", - ] + deps = [ ":rtc_stats_api" ] + + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("peer_connection_quality_test_fixture_api") { @@ -338,21 +373,26 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { ":frame_generator_api", ":function_view", ":libjingle_peerconnection_api", + ":media_stream_interface", ":network_state_predictor_api", ":packet_socket_factory", + ":rtp_parameters", ":simulated_network_api", ":stats_observer_interface", + ":track_id_stream_info_map", ":video_quality_analyzer_api", "../media:rtc_media_base", "../rtc_base:rtc_base", "rtc_event_log", "task_queue", "transport:network_control", - "transport/media:media_transport_interface", "units:time_delta", "video:video_frame", "video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -360,13 +400,16 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { rtc_source_set("frame_generator_api") { visibility = [ "*" ] testonly = true - sources = [ "test/frame_generator_interface.h" ] + sources = [ + "test/frame_generator_interface.cc", + "test/frame_generator_interface.h", + ] deps = [ ":scoped_refptr", "video:video_frame", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("test_dependency_factory") { @@ -379,7 +422,7 @@ rtc_library("test_dependency_factory") { deps = [ ":video_quality_test_fixture_api", "../rtc_base:checks", - "../rtc_base:thread_checker", + "../rtc_base:platform_thread_types", ] } @@ -425,6 +468,7 @@ if (rtc_include_tests) { deps = [ ":audio_quality_analyzer_api", ":peer_connection_quality_test_fixture_api", + ":time_controller", ":video_quality_analyzer_api", "../test/pc/e2e:peerconnection_quality_test", ] @@ -443,8 +487,25 @@ rtc_library("create_frame_generator") { "../rtc_base:checks", "../system_wrappers", "../test:frame_generator_impl", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("create_peer_connection_quality_test_frame_generator") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_peer_connection_quality_test_frame_generator.cc", + "test/create_peer_connection_quality_test_frame_generator.h", + ] + deps = [ + ":create_frame_generator", + ":frame_generator_api", + ":peer_connection_quality_test_fixture_api", + "../rtc_base:checks", + "../test:fileutils", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("libjingle_logging_api") { @@ -497,8 +558,8 @@ rtc_library("audio_options_api") { ":array_view", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("transport_api") { @@ -522,11 +583,8 @@ rtc_source_set("bitrate_allocation") { rtc_source_set("simulated_network_api") { visibility = [ "*" ] sources = [ "test/simulated_network.h" ] - deps = [ - "../rtc_base", - "../rtc_base:criticalsection", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "../rtc_base" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } # TODO(srte): Move to network_emulation sub directory. @@ -537,6 +595,7 @@ rtc_source_set("network_emulation_manager_api") { "test/network_emulation_manager.h", ] deps = [ + ":array_view", ":simulated_network_api", ":time_controller", "../call:simulated_network", @@ -659,9 +718,12 @@ if (rtc_include_tests) { "../modules/audio_coding:neteq_test_factory", "../rtc_base:checks", "neteq:neteq_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -696,7 +758,7 @@ if (rtc_include_tests) { "test/videocodec_test_stats.h", ] deps = [ - "..:webrtc_common", + "../media:rtc_h264_profile_id", "../modules/video_coding:video_codec_interface", "../rtc_base:stringutils", "video:video_frame_type", @@ -730,6 +792,17 @@ if (rtc_include_tests) { ] } + rtc_source_set("mock_data_channel") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_data_channel.h" ] + + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + rtc_source_set("mock_fec_controller_override") { testonly = true sources = [ "test/mock_fec_controller_override.h" ] @@ -741,10 +814,7 @@ if (rtc_include_tests) { rtc_library("mock_frame_encryptor") { testonly = true - sources = [ - "test/mock_frame_encryptor.cc", - "test/mock_frame_encryptor.h", - ] + sources = [ "test/mock_frame_encryptor.h" ] deps = [ # For api/crypto/frame_encryptor_interface.h ":libjingle_peerconnection_api", @@ -755,10 +825,7 @@ if (rtc_include_tests) { rtc_library("mock_frame_decryptor") { testonly = true - sources = [ - "test/mock_frame_decryptor.cc", - "test/mock_frame_decryptor.h", - ] + sources = [ "test/mock_frame_decryptor.h" ] deps = [ ":libjingle_peerconnection_api", "../test:test_support", @@ -776,7 +843,6 @@ if (rtc_include_tests) { ":array_view", ":libjingle_peerconnection_api", ":rtp_parameters", - "..:webrtc_common", "../rtc_base:checks", "../rtc_base:rtc_base_approved", "crypto:frame_encryptor_interface", @@ -793,13 +859,23 @@ if (rtc_include_tests) { ":array_view", ":libjingle_peerconnection_api", ":rtp_parameters", - "..:webrtc_common", "../rtc_base:checks", "../rtc_base:rtc_base_approved", "crypto:frame_decryptor_interface", ] } + rtc_source_set("mock_media_stream_interface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_media_stream_interface.h" ] + + deps = [ + ":media_stream_interface", + "../test:test_support", + ] + } + rtc_source_set("dummy_peer_connection") { visibility = [ "*" ] testonly = true @@ -814,6 +890,7 @@ if (rtc_include_tests) { } rtc_source_set("mock_peerconnectioninterface") { + visibility = [ "*" ] testonly = true sources = [ "test/mock_peerconnectioninterface.h" ] @@ -823,9 +900,22 @@ if (rtc_include_tests) { ] } + rtc_source_set("mock_peer_connection_factory_interface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_peer_connection_factory_interface.h" ] + + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + rtc_source_set("mock_rtp") { + visibility = [ "*" ] testonly = true sources = [ + "test/mock_rtp_transceiver.h", "test/mock_rtpreceiver.h", "test/mock_rtpsender.h", ] @@ -836,6 +926,16 @@ if (rtc_include_tests) { ] } + rtc_source_set("mock_transformable_video_frame") { + testonly = true + sources = [ "test/mock_transformable_video_frame.h" ] + + deps = [ + ":frame_transformer_interface", + "../test:test_support", + ] + } + rtc_source_set("mock_video_bitrate_allocator") { testonly = true sources = [ "test/mock_video_bitrate_allocator.h" ] @@ -873,10 +973,7 @@ if (rtc_include_tests) { visibility = [ "*" ] testonly = true - sources = [ - "test/mock_video_decoder.cc", - "test/mock_video_decoder.h", - ] + sources = [ "test/mock_video_decoder.h" ] deps = [ "../api/video_codecs:video_codecs_api", @@ -888,10 +985,7 @@ if (rtc_include_tests) { visibility = [ "*" ] testonly = true - sources = [ - "test/mock_video_encoder.cc", - "test/mock_video_encoder.h", - ] + sources = [ "test/mock_video_encoder.h" ] deps = [ "../api/video_codecs:video_codecs_api", @@ -899,39 +993,6 @@ if (rtc_include_tests) { ] } - rtc_source_set("fake_media_transport") { - testonly = true - - sources = [ - "test/fake_datagram_transport.h", - "test/fake_media_transport.h", - ] - - deps = [ - "../rtc_base:checks", - "transport:datagram_transport_interface", - "transport/media:media_transport_interface", - "//third_party/abseil-cpp/absl/algorithm:container", - ] - } - - rtc_library("loopback_media_transport") { - testonly = true - - sources = [ - "test/loopback_media_transport.cc", - "test/loopback_media_transport.h", - ] - - deps = [ - "../rtc_base", - "../rtc_base:checks", - "transport:datagram_transport_interface", - "transport/media:media_transport_interface", - "//third_party/abseil-cpp/absl/algorithm:container", - ] - } - rtc_library("create_time_controller") { visibility = [ "*" ] testonly = true @@ -943,6 +1004,7 @@ if (rtc_include_tests) { deps = [ ":callfactory_api", ":time_controller", + "../call", "../call:call_interfaces", "../test/time_controller", ] @@ -961,7 +1023,6 @@ if (rtc_include_tests) { "rtp_parameters_unittest.cc", "scoped_refptr_unittest.cc", "test/create_time_controller_unittest.cc", - "test/loopback_media_transport_unittest.cc", ] deps = [ @@ -969,7 +1030,6 @@ if (rtc_include_tests) { ":create_time_controller", ":function_view", ":libjingle_peerconnection_api", - ":loopback_media_transport", ":rtc_error", ":rtc_event_log_output_file", ":rtp_packet_info", @@ -997,15 +1057,18 @@ if (rtc_include_tests) { sources = [ "test/compile_all_headers.cc" ] deps = [ + ":dummy_peer_connection", ":fake_frame_decryptor", ":fake_frame_encryptor", - ":fake_media_transport", - ":loopback_media_transport", ":mock_audio_mixer", + ":mock_data_channel", ":mock_frame_decryptor", ":mock_frame_encryptor", + ":mock_media_stream_interface", + ":mock_peer_connection_factory_interface", ":mock_peerconnectioninterface", ":mock_rtp", + ":mock_transformable_video_frame", ":mock_video_bitrate_allocator", ":mock_video_bitrate_allocator_factory", ":mock_video_codec_factory", diff --git a/api/DEPS b/api/DEPS index ef9db30804..4b93438c3e 100644 --- a/api/DEPS +++ b/api/DEPS @@ -90,6 +90,10 @@ specific_include_rules = { "+modules/include/module_fec_types.h", ], + "frame_transformer_interface\.h": [ + "+rtc_base/ref_count.h", + ], + "ice_transport_interface\.h": [ "+rtc_base/ref_count.h", ], @@ -111,11 +115,6 @@ specific_include_rules = { "+rtc_base/ref_count.h", ], - "media_transport_interface\.h": [ - "+rtc_base/copy_on_write_buffer.h", # As used by datachannelinterface.h - "+rtc_base/network_route.h", - ], - "packet_socket_factory\.h": [ "+rtc_base/proxy_info.h", "+rtc_base/async_packet_socket.h", @@ -129,7 +128,7 @@ specific_include_rules = { "+media/base/media_config.h", "+media/base/media_engine.h", "+p2p/base/port_allocator.h", - "+rtc_base/network.h", + "+rtc_base/network_monitor_factory.h", "+rtc_base/rtc_certificate.h", "+rtc_base/rtc_certificate_generator.h", "+rtc_base/socket_address.h", @@ -173,6 +172,9 @@ specific_include_rules = { "+rtc_base/ref_count.h", ], + "set_local_description_observer_interface\.h": [ + "+rtc_base/ref_count.h", + ], "set_remote_description_observer_interface\.h": [ "+rtc_base/ref_count.h", ], @@ -242,6 +244,10 @@ specific_include_rules = { "+modules/audio_processing/include/audio_processing.h", ], + "echo_detector_creator\.h": [ + "+modules/audio_processing/include/audio_processing.h", + ], + "fake_frame_decryptor\.h": [ "+rtc_base/ref_counted_object.h", ], @@ -255,7 +261,6 @@ specific_include_rules = { ], "simulated_network\.h": [ - "+rtc_base/critical_section.h", "+rtc_base/random.h", "+rtc_base/thread_annotations.h", ], diff --git a/api/OWNERS b/api/OWNERS index 11f1803cf4..4cf3915175 100644 --- a/api/OWNERS +++ b/api/OWNERS @@ -1,20 +1,14 @@ -glaznev@webrtc.org +crodbro@webrtc.org +deadbeef@webrtc.org +hta@webrtc.org juberti@webrtc.org +kwiberg@webrtc.org +magjed@webrtc.org perkj@webrtc.org tkchin@webrtc.org tommi@webrtc.org -kwiberg@webrtc.org -steveanton@webrtc.org -shampson@webrtc.org per-file peer_connection*=hbos@webrtc.org -per-file *.gn=phoglund@webrtc.org -per-file *.gni=phoglund@webrtc.org - per-file DEPS=mbonadei@webrtc.org per-file DEPS=kwiberg@webrtc.org - -per-file *media_transport*=sukhanov@webrtc.org -per-file *media_transport*=psla@webrtc.org -per-file *media_transport*=mellem@webrtc.org diff --git a/api/adaptation/BUILD.gn b/api/adaptation/BUILD.gn new file mode 100644 index 0000000000..2cba5f407e --- /dev/null +++ b/api/adaptation/BUILD.gn @@ -0,0 +1,24 @@ +# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved. +# +# Use of this source code is governed by a BSD - style license +# that can be found in the LICENSE file in the root of the source +# tree.An additional intellectual property rights grant can be found +# in the file PATENTS.All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_source_set("resource_adaptation_api") { + visibility = [ "*" ] + sources = [ + "resource.cc", + "resource.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base:refcount", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] +} diff --git a/api/adaptation/DEPS b/api/adaptation/DEPS new file mode 100644 index 0000000000..cab7fb8e14 --- /dev/null +++ b/api/adaptation/DEPS @@ -0,0 +1,7 @@ +specific_include_rules = { + "resource\.h": [ + # ref_count.h is a public_deps of rtc_base_approved. Necessary because of + # rtc::RefCountInterface. + "+rtc_base/ref_count.h", + ], +} \ No newline at end of file diff --git a/call/adaptation/resource.cc b/api/adaptation/resource.cc similarity index 59% rename from call/adaptation/resource.cc rename to api/adaptation/resource.cc index e6974b1d9d..dac03fe019 100644 --- a/call/adaptation/resource.cc +++ b/api/adaptation/resource.cc @@ -8,34 +8,26 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "call/adaptation/resource.h" +#include "api/adaptation/resource.h" -#include "rtc_base/strings/string_builder.h" +#include "rtc_base/checks.h" namespace webrtc { -namespace { - const char* ResourceUsageStateToString(ResourceUsageState usage_state) { switch (usage_state) { case ResourceUsageState::kOveruse: - return "overuse"; - case ResourceUsageState::kStable: - return "stable"; + return "kOveruse"; case ResourceUsageState::kUnderuse: - return "underuse"; + return "kUnderuse"; } + RTC_CHECK_NOTREACHED(); } -} // namespace +ResourceListener::~ResourceListener() {} -Resource::~Resource() {} +Resource::Resource() {} -std::string Resource::ToString() const { - rtc::StringBuilder sb; - sb << Name() << ": " << CurrentUsage() << " " << UsageUnitsOfMeasurement(); - sb << " (" << ResourceUsageStateToString(CurrentUsageState()) << ")"; - return sb.str(); -} +Resource::~Resource() {} } // namespace webrtc diff --git a/api/adaptation/resource.h b/api/adaptation/resource.h new file mode 100644 index 0000000000..9b3968055f --- /dev/null +++ b/api/adaptation/resource.h @@ -0,0 +1,67 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ADAPTATION_RESOURCE_H_ +#define API_ADAPTATION_RESOURCE_H_ + +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class Resource; + +enum class ResourceUsageState { + // Action is needed to minimze the load on this resource. + kOveruse, + // Increasing the load on this resource is desired, if possible. + kUnderuse, +}; + +RTC_EXPORT const char* ResourceUsageStateToString( + ResourceUsageState usage_state); + +class RTC_EXPORT ResourceListener { + public: + virtual ~ResourceListener(); + + virtual void OnResourceUsageStateMeasured( + rtc::scoped_refptr resource, + ResourceUsageState usage_state) = 0; +}; + +// A Resource monitors an implementation-specific resource. It may report +// kOveruse or kUnderuse when resource usage is high or low enough that we +// should perform some sort of mitigation to fulfil the resource's constraints. +// +// The methods on this interface are invoked on the adaptation task queue. +// Resource usage measurements may be performed on an any task queue. +// +// The Resource is reference counted to prevent use-after-free when posting +// between task queues. As such, the implementation MUST NOT make any +// assumptions about which task queue Resource is destructed on. +class RTC_EXPORT Resource : public rtc::RefCountInterface { + public: + Resource(); + // Destruction may happen on any task queue. + ~Resource() override; + + virtual std::string Name() const = 0; + // The |listener| may be informed of resource usage measurements on any task + // queue, but not after this method is invoked with the null argument. + virtual void SetResourceListener(ResourceListener* listener) = 0; +}; + +} // namespace webrtc + +#endif // API_ADAPTATION_RESOURCE_H_ diff --git a/api/array_view.h b/api/array_view.h index f7130dcc37..df365cb740 100644 --- a/api/array_view.h +++ b/api/array_view.h @@ -13,6 +13,7 @@ #include #include +#include #include #include "rtc_base/checks.h" @@ -213,6 +214,14 @@ class ArrayView final : public impl::ArrayViewBase { : ArrayView(u.data(), u.size()) { static_assert(U::size() == Size, "Sizes must match exactly"); } + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(const U& u) // NOLINT(runtime/explicit) + : ArrayView(u.data(), u.size()) { + static_assert(U::size() == Size, "Sizes must match exactly"); + } // (Only if size is variable.) Construct an ArrayView from any type U that // has a size() method whose return value converts implicitly to size_t, and @@ -250,6 +259,18 @@ class ArrayView final : public impl::ArrayViewBase { T* end() const { return this->data() + this->size(); } const T* cbegin() const { return this->data(); } const T* cend() const { return this->data() + this->size(); } + std::reverse_iterator rbegin() const { + return std::make_reverse_iterator(end()); + } + std::reverse_iterator rend() const { + return std::make_reverse_iterator(begin()); + } + std::reverse_iterator crbegin() const { + return std::make_reverse_iterator(cend()); + } + std::reverse_iterator crend() const { + return std::make_reverse_iterator(cbegin()); + } ArrayView subview(size_t offset, size_t size) const { return offset < this->size() diff --git a/api/array_view_unittest.cc b/api/array_view_unittest.cc index 6fdd516544..97267df006 100644 --- a/api/array_view_unittest.cc +++ b/api/array_view_unittest.cc @@ -38,7 +38,7 @@ void CallFixed(ArrayView av) {} } // namespace -TEST(ArrayViewTest, TestConstructFromPtrAndArray) { +TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) { char arr[] = "Arrr!"; const char carr[] = "Carrr!"; EXPECT_EQ(6u, Call(arr)); @@ -82,7 +82,7 @@ TEST(ArrayViewTest, TestConstructFromPtrAndArray) { // ArrayView n(arr + 2, 2); } -TEST(ArrayViewTest, TestCopyConstructorVariable) { +TEST(ArrayViewTest, TestCopyConstructorVariableLvalue) { char arr[] = "Arrr!"; ArrayView x = arr; EXPECT_EQ(6u, x.size()); @@ -99,7 +99,24 @@ TEST(ArrayViewTest, TestCopyConstructorVariable) { // ArrayView v = z; // Compile error, because can't drop const. } -TEST(ArrayViewTest, TestCopyConstructorFixed) { +TEST(ArrayViewTest, TestCopyConstructorVariableRvalue) { + char arr[] = "Arrr!"; + ArrayView x = arr; + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView z = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, z.size()); + EXPECT_EQ(arr, z.data()); + ArrayView w = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, w.size()); + EXPECT_EQ(arr, w.data()); + // ArrayView v = std::move(z); // Error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyConstructorFixedLvalue) { char arr[] = "Arrr!"; ArrayView x = arr; static_assert(x.size() == 6, ""); @@ -130,7 +147,38 @@ TEST(ArrayViewTest, TestCopyConstructorFixed) { // ArrayView vv = z; // Compile error, because can't drop const. } -TEST(ArrayViewTest, TestCopyAssignmentVariable) { +TEST(ArrayViewTest, TestCopyConstructorFixedRvalue) { + char arr[] = "Arrr!"; + ArrayView x = arr; + static_assert(x.size() == 6, ""); + EXPECT_EQ(arr, x.data()); + + // Copy fixed -> fixed. + ArrayView y = std::move(x); // Copy non-const -> non-const. + static_assert(y.size() == 6, ""); + EXPECT_EQ(arr, y.data()); + ArrayView z = std::move(x); // Copy non-const -> const. + static_assert(z.size() == 6, ""); + EXPECT_EQ(arr, z.data()); + ArrayView w = std::move(z); // Copy const -> const. + static_assert(w.size() == 6, ""); + EXPECT_EQ(arr, w.data()); + // ArrayView v = std::move(z); // Error, because can't drop const. + + // Copy fixed -> variable. + ArrayView yv = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, yv.size()); + EXPECT_EQ(arr, yv.data()); + ArrayView zv = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, zv.size()); + EXPECT_EQ(arr, zv.data()); + ArrayView wv = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, wv.size()); + EXPECT_EQ(arr, wv.data()); + // ArrayView vv = std::move(z); // Error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyAssignmentVariableLvalue) { char arr[] = "Arrr!"; ArrayView x(arr); EXPECT_EQ(6u, x.size()); @@ -151,7 +199,28 @@ TEST(ArrayViewTest, TestCopyAssignmentVariable) { // v = z; // Compile error, because can't drop const. } -TEST(ArrayViewTest, TestCopyAssignmentFixed) { +TEST(ArrayViewTest, TestCopyAssignmentVariableRvalue) { + char arr[] = "Arrr!"; + ArrayView x(arr); + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y; + y = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView z; + z = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, z.size()); + EXPECT_EQ(arr, z.data()); + ArrayView w; + w = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, w.size()); + EXPECT_EQ(arr, w.data()); + // ArrayView v; + // v = std::move(z); // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyAssignmentFixedLvalue) { char arr[] = "Arrr!"; char init[] = "Init!"; ArrayView x(arr); @@ -187,6 +256,42 @@ TEST(ArrayViewTest, TestCopyAssignmentFixed) { // v = z; // Compile error, because can't drop const. } +TEST(ArrayViewTest, TestCopyAssignmentFixedRvalue) { + char arr[] = "Arrr!"; + char init[] = "Init!"; + ArrayView x(arr); + EXPECT_EQ(arr, x.data()); + + // Copy fixed -> fixed. + ArrayView y(init); + y = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(arr, y.data()); + ArrayView z(init); + z = std::move(x); // Copy non-const -> const. + EXPECT_EQ(arr, z.data()); + ArrayView w(init); + w = std::move(z); // Copy const -> const. + EXPECT_EQ(arr, w.data()); + // ArrayView v(init); + // v = std::move(z); // Compile error, because can't drop const. + + // Copy fixed -> variable. + ArrayView yv; + yv = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, yv.size()); + EXPECT_EQ(arr, yv.data()); + ArrayView zv; + zv = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, zv.size()); + EXPECT_EQ(arr, zv.data()); + ArrayView wv; + wv = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, wv.size()); + EXPECT_EQ(arr, wv.data()); + // ArrayView v; + // v = std::move(z); // Compile error, because can't drop const. +} + TEST(ArrayViewTest, TestStdArray) { EXPECT_EQ(4u, Call(std::array{1, 2, 3, 4})); CallFixed(std::array{2, 3, 4}); @@ -304,7 +409,7 @@ TEST(FixArrayViewTest, TestSwapFixed) { // swap(x, w); // Compile error, because different sizes. } -TEST(ArrayViewTest, TestIndexing) { +TEST(ArrayViewDeathTest, TestIndexing) { char arr[] = "abcdefg"; ArrayView x(arr); const ArrayView y(arr); @@ -346,6 +451,20 @@ TEST(ArrayViewTest, TestIterationEmpty) { } } +TEST(ArrayViewTest, TestReverseIterationEmpty) { + // Variable-size. + ArrayView>>> av; + EXPECT_EQ(av.rbegin(), av.rend()); + EXPECT_EQ(av.crbegin(), av.crend()); + EXPECT_TRUE(av.empty()); + + // Fixed-size. + ArrayView>>, 0> af; + EXPECT_EQ(af.begin(), af.end()); + EXPECT_EQ(af.cbegin(), af.cend()); + EXPECT_TRUE(af.empty()); +} + TEST(ArrayViewTest, TestIterationVariable) { char arr[] = "Arrr!"; ArrayView av(arr); @@ -367,6 +486,25 @@ TEST(ArrayViewTest, TestIterationVariable) { } } +TEST(ArrayViewTest, TestReverseIterationVariable) { + char arr[] = "Arrr!"; + ArrayView av(arr); + EXPECT_EQ('\0', *av.rbegin()); + EXPECT_EQ('\0', *av.crbegin()); + EXPECT_EQ('A', *(av.rend() - 1)); + EXPECT_EQ('A', *(av.crend() - 1)); + + const char* cit = av.cend() - 1; + for (auto crit = av.crbegin(); crit != av.crend(); ++crit, --cit) { + EXPECT_EQ(*cit, *crit); + } + + char* it = av.end() - 1; + for (auto rit = av.rbegin(); rit != av.rend(); ++rit, --it) { + EXPECT_EQ(*it, *rit); + } +} + TEST(ArrayViewTest, TestIterationFixed) { char arr[] = "Arrr!"; ArrayView av(arr); @@ -388,6 +526,25 @@ TEST(ArrayViewTest, TestIterationFixed) { } } +TEST(ArrayViewTest, TestReverseIterationFixed) { + char arr[] = "Arrr!"; + ArrayView av(arr); + EXPECT_EQ('\0', *av.rbegin()); + EXPECT_EQ('\0', *av.crbegin()); + EXPECT_EQ('A', *(av.rend() - 1)); + EXPECT_EQ('A', *(av.crend() - 1)); + + const char* cit = av.cend() - 1; + for (auto crit = av.crbegin(); crit != av.crend(); ++crit, --cit) { + EXPECT_EQ(*cit, *crit); + } + + char* it = av.end() - 1; + for (auto rit = av.rbegin(); rit != av.rend(); ++rit, --it) { + EXPECT_EQ(*it, *rit); + } +} + TEST(ArrayViewTest, TestEmpty) { EXPECT_TRUE(ArrayView().empty()); const int a[] = {1, 2, 3}; diff --git a/api/audio/BUILD.gn b/api/audio/BUILD.gn index 2405d9d041..d0465bbc40 100644 --- a/api/audio/BUILD.gn +++ b/api/audio/BUILD.gn @@ -24,6 +24,11 @@ rtc_library("audio_frame_api") { ] } +rtc_source_set("audio_frame_processor") { + visibility = [ "*" ] + sources = [ "audio_frame_processor.h" ] +} + rtc_source_set("audio_mixer_api") { visibility = [ "*" ] sources = [ "audio_mixer.h" ] @@ -61,8 +66,8 @@ rtc_library("aec3_config_json") { "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_json", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("aec3_factory") { @@ -87,3 +92,17 @@ rtc_source_set("echo_control") { sources = [ "echo_control.h" ] deps = [ "../../rtc_base:checks" ] } + +rtc_source_set("echo_detector_creator") { + visibility = [ "*" ] + sources = [ + "echo_detector_creator.cc", + "echo_detector_creator.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../modules/audio_processing:api", + "../../modules/audio_processing:audio_processing", + "../../rtc_base:refcount", + ] +} diff --git a/api/audio/audio_frame.cc b/api/audio/audio_frame.cc index d9212a211d..c6e5cf4dd6 100644 --- a/api/audio/audio_frame.cc +++ b/api/audio/audio_frame.cc @@ -11,6 +11,8 @@ #include "api/audio/audio_frame.h" #include +#include +#include #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" @@ -22,6 +24,28 @@ AudioFrame::AudioFrame() { static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes"); } +void swap(AudioFrame& a, AudioFrame& b) { + using std::swap; + swap(a.timestamp_, b.timestamp_); + swap(a.elapsed_time_ms_, b.elapsed_time_ms_); + swap(a.ntp_time_ms_, b.ntp_time_ms_); + swap(a.samples_per_channel_, b.samples_per_channel_); + swap(a.sample_rate_hz_, b.sample_rate_hz_); + swap(a.num_channels_, b.num_channels_); + swap(a.channel_layout_, b.channel_layout_); + swap(a.speech_type_, b.speech_type_); + swap(a.vad_activity_, b.vad_activity_); + swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_); + swap(a.packet_infos_, b.packet_infos_); + const size_t length_a = a.samples_per_channel_ * a.num_channels_; + const size_t length_b = b.samples_per_channel_ * b.num_channels_; + RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples); + RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples); + std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_); + swap(a.muted_, b.muted_); + swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_); +} + void AudioFrame::Reset() { ResetWithoutMuting(); muted_ = true; @@ -41,6 +65,7 @@ void AudioFrame::ResetWithoutMuting() { vad_activity_ = kVadUnknown; profile_timestamp_ms_ = 0; packet_infos_ = RtpPacketInfos(); + absolute_capture_timestamp_ms_ = absl::nullopt; } void AudioFrame::UpdateFrame(uint32_t timestamp, @@ -86,6 +111,7 @@ void AudioFrame::CopyFrom(const AudioFrame& src) { vad_activity_ = src.vad_activity_; num_channels_ = src.num_channels_; channel_layout_ = src.channel_layout_; + absolute_capture_timestamp_ms_ = src.absolute_capture_timestamp_ms(); const size_t length = samples_per_channel_ * num_channels_; RTC_CHECK_LE(length, kMaxDataSizeSamples); diff --git a/api/audio/audio_frame.h b/api/audio/audio_frame.h index cda8c26780..78539f57eb 100644 --- a/api/audio/audio_frame.h +++ b/api/audio/audio_frame.h @@ -14,6 +14,8 @@ #include #include +#include + #include "api/audio/channel_layout.h" #include "api/rtp_packet_infos.h" #include "rtc_base/constructor_magic.h" @@ -58,6 +60,8 @@ class AudioFrame { AudioFrame(); + friend void swap(AudioFrame& a, AudioFrame& b); + // Resets all members to their default state. void Reset(); // Same as Reset(), but leaves mute state unchanged. Muting a frame requires @@ -104,6 +108,15 @@ class AudioFrame { ChannelLayout channel_layout() const { return channel_layout_; } int sample_rate_hz() const { return sample_rate_hz_; } + void set_absolute_capture_timestamp_ms( + int64_t absolute_capture_time_stamp_ms) { + absolute_capture_timestamp_ms_ = absolute_capture_time_stamp_ms; + } + + absl::optional absolute_capture_timestamp_ms() const { + return absolute_capture_timestamp_ms_; + } + // RTP timestamp of the first sample in the AudioFrame. uint32_t timestamp_ = 0; // Time since the first frame in milliseconds. @@ -121,8 +134,8 @@ class AudioFrame { // Monotonically increasing timestamp intended for profiling of audio frames. // Typically used for measuring elapsed time between two different points in // the audio path. No lock is used to save resources and we are thread safe - // by design. Also, absl::optional is not used since it will cause a "complex - // class/struct needs an explicit out-of-line destructor" build error. + // by design. + // TODO(nisse@webrtc.org): consider using absl::optional. int64_t profile_timestamp_ms_ = 0; // Information about packets used to assemble this audio frame. This is needed @@ -150,6 +163,12 @@ class AudioFrame { int16_t data_[kMaxDataSizeSamples]; bool muted_ = true; + // Absolute capture timestamp when this audio frame was originally captured. + // This is only valid for audio frames captured on this machine. The absolute + // capture timestamp of a received frame is found in |packet_infos_|. + // This timestamp MUST be based on the same clock as rtc::TimeMillis(). + absl::optional absolute_capture_timestamp_ms_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame); }; diff --git a/api/audio/audio_frame_processor.h b/api/audio/audio_frame_processor.h new file mode 100644 index 0000000000..bc21d14858 --- /dev/null +++ b/api/audio/audio_frame_processor.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_FRAME_PROCESSOR_H_ +#define API_AUDIO_AUDIO_FRAME_PROCESSOR_H_ + +#include +#include + +namespace webrtc { + +class AudioFrame; + +// If passed into PeerConnectionFactory, will be used for additional +// processing of captured audio frames, performed before encoding. +// Implementations must be thread-safe. +class AudioFrameProcessor { + public: + using OnAudioFrameCallback = std::function)>; + virtual ~AudioFrameProcessor() = default; + + // Processes the frame received from WebRTC, is called by WebRTC off the + // realtime audio capturing path. AudioFrameProcessor must reply with + // processed frames by calling |sink_callback| if it was provided in SetSink() + // call. |sink_callback| can be called in the context of Process(). + virtual void Process(std::unique_ptr frame) = 0; + + // Atomically replaces the current sink with the new one. Before the + // first call to this function, or if the provided |sink_callback| is nullptr, + // processed frames are simply discarded. + virtual void SetSink(OnAudioFrameCallback sink_callback) = 0; +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_FRAME_PROCESSOR_H_ diff --git a/api/audio/echo_canceller3_config.cc b/api/audio/echo_canceller3_config.cc index 7fd0744c37..aeb809efa9 100644 --- a/api/audio/echo_canceller3_config.cc +++ b/api/audio/echo_canceller3_config.cc @@ -118,35 +118,36 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) { res = res & Limit(&c->delay.delay_selection_thresholds.initial, 1, 250); res = res & Limit(&c->delay.delay_selection_thresholds.converged, 1, 250); - res = res & FloorLimit(&c->filter.main.length_blocks, 1); - res = res & Limit(&c->filter.main.leakage_converged, 0.f, 1000.f); - res = res & Limit(&c->filter.main.leakage_diverged, 0.f, 1000.f); - res = res & Limit(&c->filter.main.error_floor, 0.f, 1000.f); - res = res & Limit(&c->filter.main.error_ceil, 0.f, 100000000.f); - res = res & Limit(&c->filter.main.noise_gate, 0.f, 100000000.f); - - res = res & FloorLimit(&c->filter.main_initial.length_blocks, 1); - res = res & Limit(&c->filter.main_initial.leakage_converged, 0.f, 1000.f); - res = res & Limit(&c->filter.main_initial.leakage_diverged, 0.f, 1000.f); - res = res & Limit(&c->filter.main_initial.error_floor, 0.f, 1000.f); - res = res & Limit(&c->filter.main_initial.error_ceil, 0.f, 100000000.f); - res = res & Limit(&c->filter.main_initial.noise_gate, 0.f, 100000000.f); - - if (c->filter.main.length_blocks < c->filter.main_initial.length_blocks) { - c->filter.main_initial.length_blocks = c->filter.main.length_blocks; + res = res & FloorLimit(&c->filter.refined.length_blocks, 1); + res = res & Limit(&c->filter.refined.leakage_converged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined.leakage_diverged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined.error_floor, 0.f, 1000.f); + res = res & Limit(&c->filter.refined.error_ceil, 0.f, 100000000.f); + res = res & Limit(&c->filter.refined.noise_gate, 0.f, 100000000.f); + + res = res & FloorLimit(&c->filter.refined_initial.length_blocks, 1); + res = res & Limit(&c->filter.refined_initial.leakage_converged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined_initial.leakage_diverged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined_initial.error_floor, 0.f, 1000.f); + res = res & Limit(&c->filter.refined_initial.error_ceil, 0.f, 100000000.f); + res = res & Limit(&c->filter.refined_initial.noise_gate, 0.f, 100000000.f); + + if (c->filter.refined.length_blocks < + c->filter.refined_initial.length_blocks) { + c->filter.refined_initial.length_blocks = c->filter.refined.length_blocks; res = false; } - res = res & FloorLimit(&c->filter.shadow.length_blocks, 1); - res = res & Limit(&c->filter.shadow.rate, 0.f, 1.f); - res = res & Limit(&c->filter.shadow.noise_gate, 0.f, 100000000.f); + res = res & FloorLimit(&c->filter.coarse.length_blocks, 1); + res = res & Limit(&c->filter.coarse.rate, 0.f, 1.f); + res = res & Limit(&c->filter.coarse.noise_gate, 0.f, 100000000.f); - res = res & FloorLimit(&c->filter.shadow_initial.length_blocks, 1); - res = res & Limit(&c->filter.shadow_initial.rate, 0.f, 1.f); - res = res & Limit(&c->filter.shadow_initial.noise_gate, 0.f, 100000000.f); + res = res & FloorLimit(&c->filter.coarse_initial.length_blocks, 1); + res = res & Limit(&c->filter.coarse_initial.rate, 0.f, 1.f); + res = res & Limit(&c->filter.coarse_initial.noise_gate, 0.f, 100000000.f); - if (c->filter.shadow.length_blocks < c->filter.shadow_initial.length_blocks) { - c->filter.shadow_initial.length_blocks = c->filter.shadow.length_blocks; + if (c->filter.coarse.length_blocks < c->filter.coarse_initial.length_blocks) { + c->filter.coarse_initial.length_blocks = c->filter.coarse.length_blocks; res = false; } @@ -160,7 +161,7 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) { c->erle.min = std::min(c->erle.max_l, c->erle.max_h); res = false; } - res = res & Limit(&c->erle.num_sections, 1, c->filter.main.length_blocks); + res = res & Limit(&c->erle.num_sections, 1, c->filter.refined.length_blocks); res = res & Limit(&c->ep_strength.default_gain, 0.f, 1000000.f); res = res & Limit(&c->ep_strength.default_len, -1.f, 1.f); @@ -192,6 +193,8 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) { res = res & Limit(&c->echo_model.render_pre_window_size, 0, 100); res = res & Limit(&c->echo_model.render_post_window_size, 0, 100); + res = res & Limit(&c->comfort_noise.noise_floor_dbfs, -200.f, 0.f); + res = res & Limit(&c->suppressor.nearend_average_blocks, 1, 5000); res = res & diff --git a/api/audio/echo_canceller3_config.h b/api/audio/echo_canceller3_config.h index a63318f1da..3ed11ff8b3 100644 --- a/api/audio/echo_canceller3_config.h +++ b/api/audio/echo_canceller3_config.h @@ -61,7 +61,7 @@ struct RTC_EXPORT EchoCanceller3Config { } delay; struct Filter { - struct MainConfiguration { + struct RefinedConfiguration { size_t length_blocks; float leakage_converged; float leakage_diverged; @@ -70,23 +70,24 @@ struct RTC_EXPORT EchoCanceller3Config { float noise_gate; }; - struct ShadowConfiguration { + struct CoarseConfiguration { size_t length_blocks; float rate; float noise_gate; }; - MainConfiguration main = {13, 0.00005f, 0.05f, 0.001f, 2.f, 20075344.f}; - ShadowConfiguration shadow = {13, 0.7f, 20075344.f}; + RefinedConfiguration refined = {13, 0.00005f, 0.05f, + 0.001f, 2.f, 20075344.f}; + CoarseConfiguration coarse = {13, 0.7f, 20075344.f}; - MainConfiguration main_initial = {12, 0.005f, 0.5f, - 0.001f, 2.f, 20075344.f}; - ShadowConfiguration shadow_initial = {12, 0.9f, 20075344.f}; + RefinedConfiguration refined_initial = {12, 0.005f, 0.5f, + 0.001f, 2.f, 20075344.f}; + CoarseConfiguration coarse_initial = {12, 0.9f, 20075344.f}; size_t config_change_duration_blocks = 250; float initial_state_seconds = 2.5f; bool conservative_initial_phase = false; - bool enable_shadow_filter_output_usage = true; + bool enable_coarse_filter_output_usage = true; bool use_linear_filter = true; bool export_linear_aec_output = false; } filter; @@ -142,8 +143,13 @@ struct RTC_EXPORT EchoCanceller3Config { float noise_gate_slope = 0.3f; size_t render_pre_window_size = 1; size_t render_post_window_size = 1; + bool model_reverb_in_nonlinear_mode = true; } echo_model; + struct ComfortNoise { + float noise_floor_dbfs = -96.03406f; + } comfort_noise; + struct Suppressor { Suppressor(); Suppressor(const Suppressor& e); @@ -210,11 +216,12 @@ struct RTC_EXPORT EchoCanceller3Config { struct HighBandsSuppression { float enr_threshold = 1.f; float max_gain_during_echo = 1.f; - float anti_howling_activation_threshold = 25.f; - float anti_howling_gain = 0.01f; + float anti_howling_activation_threshold = 400.f; + float anti_howling_gain = 1.f; } high_bands_suppression; float floor_first_increase = 0.00001f; + bool conservative_hf_suppression = false; } suppressor; }; } // namespace webrtc diff --git a/api/audio/echo_canceller3_config_json.cc b/api/audio/echo_canceller3_config_json.cc index 1364cb7c0a..907b472714 100644 --- a/api/audio/echo_canceller3_config_json.cc +++ b/api/audio/echo_canceller3_config_json.cc @@ -55,7 +55,7 @@ void ReadParam(const Json::Value& root, std::string param_name, float* param) { void ReadParam(const Json::Value& root, std::string param_name, - EchoCanceller3Config::Filter::MainConfiguration* param) { + EchoCanceller3Config::Filter::RefinedConfiguration* param) { RTC_DCHECK(param); Json::Value json_array; if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { @@ -76,7 +76,7 @@ void ReadParam(const Json::Value& root, void ReadParam(const Json::Value& root, std::string param_name, - EchoCanceller3Config::Filter::ShadowConfiguration* param) { + EchoCanceller3Config::Filter::CoarseConfiguration* param) { RTC_DCHECK(param); Json::Value json_array; if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { @@ -215,18 +215,18 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, } if (rtc::GetValueFromJsonObject(aec3_root, "filter", §ion)) { - ReadParam(section, "main", &cfg.filter.main); - ReadParam(section, "shadow", &cfg.filter.shadow); - ReadParam(section, "main_initial", &cfg.filter.main_initial); - ReadParam(section, "shadow_initial", &cfg.filter.shadow_initial); + ReadParam(section, "refined", &cfg.filter.refined); + ReadParam(section, "coarse", &cfg.filter.coarse); + ReadParam(section, "refined_initial", &cfg.filter.refined_initial); + ReadParam(section, "coarse_initial", &cfg.filter.coarse_initial); ReadParam(section, "config_change_duration_blocks", &cfg.filter.config_change_duration_blocks); ReadParam(section, "initial_state_seconds", &cfg.filter.initial_state_seconds); ReadParam(section, "conservative_initial_phase", &cfg.filter.conservative_initial_phase); - ReadParam(section, "enable_shadow_filter_output_usage", - &cfg.filter.enable_shadow_filter_output_usage); + ReadParam(section, "enable_coarse_filter_output_usage", + &cfg.filter.enable_coarse_filter_output_usage); ReadParam(section, "use_linear_filter", &cfg.filter.use_linear_filter); ReadParam(section, "export_linear_aec_output", &cfg.filter.export_linear_aec_output); @@ -302,6 +302,12 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.echo_model.render_pre_window_size); ReadParam(section, "render_post_window_size", &cfg.echo_model.render_post_window_size); + ReadParam(section, "model_reverb_in_nonlinear_mode", + &cfg.echo_model.model_reverb_in_nonlinear_mode); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "comfort_noise", §ion)) { + ReadParam(section, "noise_floor_dbfs", &cfg.comfort_noise.noise_floor_dbfs); } Json::Value subsection; @@ -377,6 +383,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "floor_first_increase", &cfg.suppressor.floor_first_increase); + ReadParam(section, "conservative_hf_suppression", + &cfg.suppressor.conservative_hf_suppression); } } @@ -459,34 +467,35 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "},"; ost << "\"filter\": {"; - ost << "\"main\": ["; - ost << config.filter.main.length_blocks << ","; - ost << config.filter.main.leakage_converged << ","; - ost << config.filter.main.leakage_diverged << ","; - ost << config.filter.main.error_floor << ","; - ost << config.filter.main.error_ceil << ","; - ost << config.filter.main.noise_gate; + + ost << "\"refined\": ["; + ost << config.filter.refined.length_blocks << ","; + ost << config.filter.refined.leakage_converged << ","; + ost << config.filter.refined.leakage_diverged << ","; + ost << config.filter.refined.error_floor << ","; + ost << config.filter.refined.error_ceil << ","; + ost << config.filter.refined.noise_gate; ost << "],"; - ost << "\"shadow\": ["; - ost << config.filter.shadow.length_blocks << ","; - ost << config.filter.shadow.rate << ","; - ost << config.filter.shadow.noise_gate; + ost << "\"coarse\": ["; + ost << config.filter.coarse.length_blocks << ","; + ost << config.filter.coarse.rate << ","; + ost << config.filter.coarse.noise_gate; ost << "],"; - ost << "\"main_initial\": ["; - ost << config.filter.main_initial.length_blocks << ","; - ost << config.filter.main_initial.leakage_converged << ","; - ost << config.filter.main_initial.leakage_diverged << ","; - ost << config.filter.main_initial.error_floor << ","; - ost << config.filter.main_initial.error_ceil << ","; - ost << config.filter.main_initial.noise_gate; + ost << "\"refined_initial\": ["; + ost << config.filter.refined_initial.length_blocks << ","; + ost << config.filter.refined_initial.leakage_converged << ","; + ost << config.filter.refined_initial.leakage_diverged << ","; + ost << config.filter.refined_initial.error_floor << ","; + ost << config.filter.refined_initial.error_ceil << ","; + ost << config.filter.refined_initial.noise_gate; ost << "],"; - ost << "\"shadow_initial\": ["; - ost << config.filter.shadow_initial.length_blocks << ","; - ost << config.filter.shadow_initial.rate << ","; - ost << config.filter.shadow_initial.noise_gate; + ost << "\"coarse_initial\": ["; + ost << config.filter.coarse_initial.length_blocks << ","; + ost << config.filter.coarse_initial.rate << ","; + ost << config.filter.coarse_initial.noise_gate; ost << "],"; ost << "\"config_change_duration_blocks\": " @@ -495,8 +504,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { << ","; ost << "\"conservative_initial_phase\": " << (config.filter.conservative_initial_phase ? "true" : "false") << ","; - ost << "\"enable_shadow_filter_output_usage\": " - << (config.filter.enable_shadow_filter_output_usage ? "true" : "false") + ost << "\"enable_coarse_filter_output_usage\": " + << (config.filter.enable_coarse_filter_output_usage ? "true" : "false") << ","; ost << "\"use_linear_filter\": " << (config.filter.use_linear_filter ? "true" : "false") << ","; @@ -580,7 +589,13 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "\"render_pre_window_size\": " << config.echo_model.render_pre_window_size << ","; ost << "\"render_post_window_size\": " - << config.echo_model.render_post_window_size; + << config.echo_model.render_post_window_size << ","; + ost << "\"model_reverb_in_nonlinear_mode\": " + << (config.echo_model.model_reverb_in_nonlinear_mode ? "true" : "false"); + ost << "},"; + + ost << "\"comfort_noise\": {"; + ost << "\"noise_floor_dbfs\": " << config.comfort_noise.noise_floor_dbfs; ost << "},"; ost << "\"suppressor\": {"; @@ -663,7 +678,10 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "\"anti_howling_gain\": " << config.suppressor.high_bands_suppression.anti_howling_gain; ost << "},"; - ost << "\"floor_first_increase\": " << config.suppressor.floor_first_increase; + ost << "\"floor_first_increase\": " << config.suppressor.floor_first_increase + << ","; + ost << "\"conservative_hf_suppression\": " + << config.suppressor.conservative_hf_suppression; ost << "}"; ost << "}"; ost << "}"; diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc new file mode 100644 index 0000000000..4c3d9e61fe --- /dev/null +++ b/api/audio/echo_detector_creator.cc @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/audio/echo_detector_creator.h" + +#include "modules/audio_processing/residual_echo_detector.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +rtc::scoped_refptr CreateEchoDetector() { + return new rtc::RefCountedObject(); +} + +} // namespace webrtc diff --git a/api/audio/echo_detector_creator.h b/api/audio/echo_detector_creator.h new file mode 100644 index 0000000000..5ba171de97 --- /dev/null +++ b/api/audio/echo_detector_creator.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_DETECTOR_CREATOR_H_ +#define API_AUDIO_ECHO_DETECTOR_CREATOR_H_ + +#include "api/scoped_refptr.h" +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { + +// Returns an instance of the WebRTC implementation of a residual echo detector. +// It can be provided to the webrtc::AudioProcessingBuilder to obtain the +// usual residual echo metrics. +rtc::scoped_refptr CreateEchoDetector(); + +} // namespace webrtc + +#endif // API_AUDIO_ECHO_DETECTOR_CREATOR_H_ diff --git a/api/audio/test/audio_frame_unittest.cc b/api/audio/test/audio_frame_unittest.cc index dbf45ceabc..f8d3318274 100644 --- a/api/audio/test/audio_frame_unittest.cc +++ b/api/audio/test/audio_frame_unittest.cc @@ -133,4 +133,54 @@ TEST(AudioFrameTest, CopyFrom) { EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples))); } +TEST(AudioFrameTest, SwapFrames) { + AudioFrame frame1, frame2; + int16_t samples1[kNumChannelsMono * kSamplesPerChannel]; + for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) { + samples1[i] = i; + } + frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz, + AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsMono); + frame1.set_absolute_capture_timestamp_ms(12345678); + const auto frame1_channel_layout = frame1.channel_layout(); + + int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)]; + for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1); + ++i) { + samples2[i] = 1000 + i; + } + frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1, + kSampleRateHz + 1, AudioFrame::kNormalSpeech, + AudioFrame::kVadPassive, kNumChannelsMono + 1); + const auto frame2_channel_layout = frame2.channel_layout(); + + swap(frame1, frame2); + + EXPECT_EQ(kTimestamp + 1, frame1.timestamp_); + ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_); + EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_); + EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_); + EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_); + ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_); + for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1); + ++i) { + EXPECT_EQ(samples2[i], frame1.data()[i]); + } + EXPECT_FALSE(frame1.absolute_capture_timestamp_ms()); + EXPECT_EQ(frame2_channel_layout, frame1.channel_layout()); + + EXPECT_EQ(kTimestamp, frame2.timestamp_); + ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_); + EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_); + EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_); + EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_); + ASSERT_EQ(kNumChannelsMono, frame2.num_channels_); + for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) { + EXPECT_EQ(samples1[i], frame2.data()[i]); + } + EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms()); + EXPECT_EQ(frame1_channel_layout, frame2.channel_layout()); +} + } // namespace webrtc diff --git a/api/audio/test/echo_canceller3_config_json_unittest.cc b/api/audio/test/echo_canceller3_config_json_unittest.cc index acb21c3c45..4a952fe910 100644 --- a/api/audio/test/echo_canceller3_config_json_unittest.cc +++ b/api/audio/test/echo_canceller3_config_json_unittest.cc @@ -19,7 +19,10 @@ TEST(EchoCanceller3JsonHelpers, ToStringAndParseJson) { EchoCanceller3Config cfg; cfg.delay.down_sampling_factor = 1u; cfg.delay.log_warning_on_delay_changes = true; - cfg.filter.shadow_initial.length_blocks = 7u; + cfg.filter.refined.error_floor = 2.f; + cfg.filter.coarse_initial.length_blocks = 3u; + cfg.comfort_noise.noise_floor_dbfs = 100.f; + cfg.echo_model.model_reverb_in_nonlinear_mode = false; cfg.suppressor.normal_tuning.mask_hf.enr_suppress = .5f; cfg.suppressor.subband_nearend_detection.nearend_average_blocks = 3; cfg.suppressor.subband_nearend_detection.subband1 = {1, 3}; @@ -30,8 +33,6 @@ TEST(EchoCanceller3JsonHelpers, ToStringAndParseJson) { EchoCanceller3Config cfg_transformed = Aec3ConfigFromJsonString(json_string); // Expect unchanged values to remain default. - EXPECT_EQ(cfg.filter.main.error_floor, - cfg_transformed.filter.main.error_floor); EXPECT_EQ(cfg.ep_strength.default_len, cfg_transformed.ep_strength.default_len); EXPECT_EQ(cfg.suppressor.normal_tuning.mask_lf.enr_suppress, @@ -42,8 +43,14 @@ TEST(EchoCanceller3JsonHelpers, ToStringAndParseJson) { cfg_transformed.delay.down_sampling_factor); EXPECT_EQ(cfg.delay.log_warning_on_delay_changes, cfg_transformed.delay.log_warning_on_delay_changes); - EXPECT_EQ(cfg.filter.shadow_initial.length_blocks, - cfg_transformed.filter.shadow_initial.length_blocks); + EXPECT_EQ(cfg.filter.coarse_initial.length_blocks, + cfg_transformed.filter.coarse_initial.length_blocks); + EXPECT_EQ(cfg.filter.refined.error_floor, + cfg_transformed.filter.refined.error_floor); + EXPECT_EQ(cfg.comfort_noise.noise_floor_dbfs, + cfg_transformed.comfort_noise.noise_floor_dbfs); + EXPECT_EQ(cfg.echo_model.model_reverb_in_nonlinear_mode, + cfg_transformed.echo_model.model_reverb_in_nonlinear_mode); EXPECT_EQ(cfg.suppressor.normal_tuning.mask_hf.enr_suppress, cfg_transformed.suppressor.normal_tuning.mask_hf.enr_suppress); EXPECT_EQ(cfg.suppressor.subband_nearend_detection.nearend_average_blocks, diff --git a/api/audio_codecs/BUILD.gn b/api/audio_codecs/BUILD.gn index 987e20f178..b6292de570 100644 --- a/api/audio_codecs/BUILD.gn +++ b/api/audio_codecs/BUILD.gn @@ -38,6 +38,8 @@ rtc_library("audio_codecs_api") { "../../rtc_base:sanitizer", "../../rtc_base/system:rtc_export", "../units:time_delta", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/L16/BUILD.gn b/api/audio_codecs/L16/BUILD.gn index bef671237e..1f7a1e5a0b 100644 --- a/api/audio_codecs/L16/BUILD.gn +++ b/api/audio_codecs/L16/BUILD.gn @@ -25,6 +25,8 @@ rtc_library("audio_encoder_L16") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -42,6 +44,8 @@ rtc_library("audio_decoder_L16") { "../../../modules/audio_coding:pcm16b", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/OWNERS b/api/audio_codecs/OWNERS index a52dd93e5e..fe417854d6 100644 --- a/api/audio_codecs/OWNERS +++ b/api/audio_codecs/OWNERS @@ -1,2 +1 @@ kwiberg@webrtc.org -ossu@webrtc.org diff --git a/api/audio_codecs/audio_decoder.cc b/api/audio_codecs/audio_decoder.cc index 97cda27a03..fd8bf385a3 100644 --- a/api/audio_codecs/audio_decoder.cc +++ b/api/audio_codecs/audio_decoder.cc @@ -45,6 +45,10 @@ class OldStyleEncodedFrame final : public AudioDecoder::EncodedAudioFrame { {static_cast(ret), speech_type}); } + AudioEncoder::CodecType CodecType() override { return decoder_->CodecType(); } + int PayloadSize() override { return payload_.size(); } + const uint8_t* PayloadData() override { return payload_.data(); } + private: AudioDecoder* const decoder_; const rtc::Buffer payload_; diff --git a/api/audio_codecs/audio_decoder.h b/api/audio_codecs/audio_decoder.h index 557ffe2759..21d9b0dab7 100644 --- a/api/audio_codecs/audio_decoder.h +++ b/api/audio_codecs/audio_decoder.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" @@ -53,6 +54,10 @@ class AudioDecoder { // Returns true if this packet contains DTX. virtual bool IsDtxPacket() const; + virtual AudioEncoder::CodecType CodecType() = 0; + virtual int PayloadSize() = 0; + virtual const uint8_t* PayloadData() = 0; + // Decodes this frame of audio and writes the result in |decoded|. // |decoded| must be large enough to store as many samples as indicated by a // call to Duration() . On success, returns an absl::optional containing the @@ -170,6 +175,8 @@ class AudioDecoder { // during the lifetime of the decoder. virtual size_t Channels() const = 0; + virtual AudioEncoder::CodecType CodecType() = 0; + protected: static SpeechType ConvertSpeechType(int16_t type); diff --git a/api/audio_codecs/audio_decoder_factory.h b/api/audio_codecs/audio_decoder_factory.h index c36a0e103b..2811f6704b 100644 --- a/api/audio_codecs/audio_decoder_factory.h +++ b/api/audio_codecs/audio_decoder_factory.h @@ -38,6 +38,8 @@ class AudioDecoderFactory : public rtc::RefCountInterface { // communication between the AudioEncoder and AudioDecoder instances, which is // needed for some codecs with built-in bandwidth adaptation.) // + // Returns null if the format isn't supported. + // // Note: Implementations need to be robust against combinations other than // one encoder, one decoder getting the same ID; such decoders must still // work. diff --git a/api/audio_codecs/audio_decoder_factory_template.h b/api/audio_codecs/audio_decoder_factory_template.h index f3bc13645a..e628cb62dc 100644 --- a/api/audio_codecs/audio_decoder_factory_template.h +++ b/api/audio_codecs/audio_decoder_factory_template.h @@ -90,7 +90,7 @@ class AudioDecoderFactoryT : public AudioDecoderFactory { // be a struct with the following static member functions: // // // Converts |audio_format| to a ConfigType instance. Returns an empty -// // optional if |audio_format| doesn't correctly specify an decoder of our +// // optional if |audio_format| doesn't correctly specify a decoder of our // // type. // absl::optional SdpToConfig(const SdpAudioFormat& audio_format); // diff --git a/api/audio_codecs/audio_encoder.cc b/api/audio_codecs/audio_encoder.cc index 2c1111814e..cd4d2000d2 100644 --- a/api/audio_codecs/audio_encoder.cc +++ b/api/audio_codecs/audio_encoder.cc @@ -110,9 +110,4 @@ ANAStats AudioEncoder::GetANAStats() const { return ANAStats(); } -absl::optional> -AudioEncoder::GetFrameLengthRange() const { - return absl::nullopt; -} - } // namespace webrtc diff --git a/api/audio_codecs/audio_encoder.h b/api/audio_codecs/audio_encoder.h index c6e572c87b..fd2d948863 100644 --- a/api/audio_codecs/audio_encoder.h +++ b/api/audio_codecs/audio_encoder.h @@ -244,7 +244,7 @@ class AudioEncoder { // information. This is used to calculated the full bitrate range, including // overhead. virtual absl::optional> GetFrameLengthRange() - const; + const = 0; protected: // Subclasses implement this to perform the actual encoding. Called by diff --git a/api/audio_codecs/audio_encoder_factory.h b/api/audio_codecs/audio_encoder_factory.h index 48995a876d..6128b1b6f3 100644 --- a/api/audio_codecs/audio_encoder_factory.h +++ b/api/audio_codecs/audio_encoder_factory.h @@ -44,6 +44,8 @@ class AudioEncoderFactory : public rtc::RefCountInterface { // communication between the AudioEncoder and AudioDecoder instances, which is // needed for some codecs with built-in bandwidth adaptation.) // + // Returns null if the format isn't supported. + // // Note: Implementations need to be robust against combinations other than // one encoder, one decoder getting the same ID; such encoders must still // work. diff --git a/api/audio_codecs/g711/BUILD.gn b/api/audio_codecs/g711/BUILD.gn index ba0586b901..92d77bed9f 100644 --- a/api/audio_codecs/g711/BUILD.gn +++ b/api/audio_codecs/g711/BUILD.gn @@ -25,6 +25,8 @@ rtc_library("audio_encoder_g711") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -42,6 +44,8 @@ rtc_library("audio_decoder_g711") { "../../../modules/audio_coding:g711", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/g722/BUILD.gn b/api/audio_codecs/g722/BUILD.gn index 8738ef889a..a186eabbb7 100644 --- a/api/audio_codecs/g722/BUILD.gn +++ b/api/audio_codecs/g722/BUILD.gn @@ -31,6 +31,8 @@ rtc_library("audio_encoder_g722") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -48,6 +50,8 @@ rtc_library("audio_decoder_g722") { "../../../modules/audio_coding:g722", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/ilbc/BUILD.gn b/api/audio_codecs/ilbc/BUILD.gn index 066a73cff2..b6a5045eaf 100644 --- a/api/audio_codecs/ilbc/BUILD.gn +++ b/api/audio_codecs/ilbc/BUILD.gn @@ -30,6 +30,8 @@ rtc_library("audio_encoder_ilbc") { "../../../modules/audio_coding:ilbc", "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -46,6 +48,8 @@ rtc_library("audio_decoder_ilbc") { "..:audio_codecs_api", "../../../modules/audio_coding:ilbc", "../../../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc.cc b/api/audio_codecs/ilbc/audio_encoder_ilbc.cc index bd653b7979..035b0dc34f 100644 --- a/api/audio_codecs/ilbc/audio_encoder_ilbc.cc +++ b/api/audio_codecs/ilbc/audio_encoder_ilbc.cc @@ -32,7 +32,7 @@ int GetIlbcBitrate(int ptime) { // 50 bytes per frame of 30 ms => (approx) 13333 bits/s. return 13333; default: - FATAL(); + RTC_CHECK_NOTREACHED(); } } } // namespace diff --git a/api/audio_codecs/isac/BUILD.gn b/api/audio_codecs/isac/BUILD.gn index 9eb32147e1..6ff6e5f092 100644 --- a/api/audio_codecs/isac/BUILD.gn +++ b/api/audio_codecs/isac/BUILD.gn @@ -68,6 +68,8 @@ rtc_library("audio_encoder_isac_fix") { "../../../modules/audio_coding:isac_fix", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -85,6 +87,8 @@ rtc_library("audio_decoder_isac_fix") { "../../../modules/audio_coding:isac_fix", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -102,6 +106,8 @@ rtc_library("audio_encoder_isac_float") { "../../../modules/audio_coding:isac", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -119,6 +125,8 @@ rtc_library("audio_decoder_isac_float") { "../../../modules/audio_coding:isac", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/isac/audio_encoder_isac_fix.cc b/api/audio_codecs/isac/audio_encoder_isac_fix.cc index 7bcb78ef5a..7cf55b9163 100644 --- a/api/audio_codecs/isac/audio_encoder_isac_fix.cc +++ b/api/audio_codecs/isac/audio_encoder_isac_fix.cc @@ -56,6 +56,7 @@ std::unique_ptr AudioEncoderIsacFix::MakeAudioEncoder( RTC_DCHECK(config.IsOk()); AudioEncoderIsacFixImpl::Config c; c.frame_size_ms = config.frame_size_ms; + c.bit_rate = config.bit_rate; c.payload_type = payload_type; return std::make_unique(c); } diff --git a/api/audio_codecs/isac/audio_encoder_isac_fix.h b/api/audio_codecs/isac/audio_encoder_isac_fix.h index 9bedde59f5..e50d9f5112 100644 --- a/api/audio_codecs/isac/audio_encoder_isac_fix.h +++ b/api/audio_codecs/isac/audio_encoder_isac_fix.h @@ -26,8 +26,17 @@ namespace webrtc { // parameter to CreateAudioEncoderFactory<...>(). struct RTC_EXPORT AudioEncoderIsacFix { struct Config { - bool IsOk() const { return frame_size_ms == 30 || frame_size_ms == 60; } + bool IsOk() const { + if (frame_size_ms != 30 && frame_size_ms != 60) { + return false; + } + if (bit_rate < 10000 || bit_rate > 32000) { + return false; + } + return true; + } int frame_size_ms = 30; + int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. }; static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedEncoders(std::vector* specs); diff --git a/api/audio_codecs/opus/BUILD.gn b/api/audio_codecs/opus/BUILD.gn index 5fb626d990..586e9b3dd8 100644 --- a/api/audio_codecs/opus/BUILD.gn +++ b/api/audio_codecs/opus/BUILD.gn @@ -23,8 +23,8 @@ rtc_library("audio_encoder_opus_config") { deps = [ "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] if (rtc_opus_variable_complexity) { defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ] @@ -49,6 +49,8 @@ rtc_library("audio_encoder_opus") { "../../../modules/audio_coding:webrtc_opus", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -66,6 +68,8 @@ rtc_library("audio_decoder_opus") { "../../../modules/audio_coding:webrtc_opus", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -82,8 +86,8 @@ rtc_library("audio_encoder_multiopus") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", "../opus:audio_encoder_opus_config", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_decoder_multiopus") { @@ -99,6 +103,8 @@ rtc_library("audio_decoder_multiopus") { "../../../modules/audio_coding:webrtc_multiopus", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", diff --git a/api/audio_options.h b/api/audio_options.h index b714998c6b..1b0d1ad0bd 100644 --- a/api/audio_options.h +++ b/api/audio_options.h @@ -75,6 +75,8 @@ struct RTC_EXPORT AudioOptions { // and check if any other AudioOptions members are unused. absl::optional combined_audio_video_bwe; // Enable audio network adaptor. + // TODO(webrtc:11717): Remove this API in favor of adaptivePtime in + // RtpEncodingParameters. absl::optional audio_network_adaptor; // Config string for audio network adaptor. absl::optional audio_network_adaptor_config; diff --git a/api/call/bitrate_allocation.h b/api/call/bitrate_allocation.h index 24530c9755..13c7f74b3a 100644 --- a/api/call/bitrate_allocation.h +++ b/api/call/bitrate_allocation.h @@ -34,6 +34,10 @@ struct BitrateAllocationUpdate { TimeDelta round_trip_time = TimeDelta::PlusInfinity(); // |bwe_period| is deprecated, use |stable_target_bitrate| allocation instead. TimeDelta bwe_period = TimeDelta::PlusInfinity(); + // Congestion window pushback bitrate reduction fraction. Used in + // VideoStreamEncoder to reduce the bitrate by the given fraction + // by dropping frames. + double cwnd_reduce_ratio = 0; }; } // namespace webrtc diff --git a/api/create_peerconnection_factory.cc b/api/create_peerconnection_factory.cc index 6223150079..008fce3e80 100644 --- a/api/create_peerconnection_factory.cc +++ b/api/create_peerconnection_factory.cc @@ -18,6 +18,7 @@ #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "media/base/media_engine.h" #include "media/engine/webrtc_media_engine.h" #include "modules/audio_device/include/audio_device.h" @@ -36,7 +37,8 @@ rtc::scoped_refptr CreatePeerConnectionFactory( std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing) { + rtc::scoped_refptr audio_processing, + AudioFrameProcessor* audio_frame_processor) { PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = network_thread; dependencies.worker_thread = worker_thread; @@ -45,12 +47,14 @@ rtc::scoped_refptr CreatePeerConnectionFactory( dependencies.call_factory = CreateCallFactory(); dependencies.event_log_factory = std::make_unique( dependencies.task_queue_factory.get()); + dependencies.trials = std::make_unique(); cricket::MediaEngineDependencies media_dependencies; media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); media_dependencies.adm = std::move(default_adm); media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory); media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory); + media_dependencies.audio_frame_processor = audio_frame_processor; if (audio_processing) { media_dependencies.audio_processing = std::move(audio_processing); } else { @@ -59,6 +63,7 @@ rtc::scoped_refptr CreatePeerConnectionFactory( media_dependencies.audio_mixer = std::move(audio_mixer); media_dependencies.video_encoder_factory = std::move(video_encoder_factory); media_dependencies.video_decoder_factory = std::move(video_decoder_factory); + media_dependencies.trials = dependencies.trials.get(); dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_dependencies)); diff --git a/api/create_peerconnection_factory.h b/api/create_peerconnection_factory.h index ac50736b80..4eb0a00e54 100644 --- a/api/create_peerconnection_factory.h +++ b/api/create_peerconnection_factory.h @@ -31,6 +31,7 @@ class Thread; namespace webrtc { class AudioDeviceModule; +class AudioFrameProcessor; class AudioProcessing; // Create a new instance of PeerConnectionFactoryInterface with optional video @@ -47,7 +48,8 @@ CreatePeerConnectionFactory( std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing); + rtc::scoped_refptr audio_processing, + AudioFrameProcessor* audio_frame_processor = nullptr); } // namespace webrtc diff --git a/api/crypto/frame_decryptor_interface.h b/api/crypto/frame_decryptor_interface.h index ec900ab80a..2f6bdac4b4 100644 --- a/api/crypto/frame_decryptor_interface.h +++ b/api/crypto/frame_decryptor_interface.h @@ -27,7 +27,6 @@ namespace webrtc { // without it. You may assume that this interface will have the same lifetime // as the RTPReceiver it is attached to. It must only be attached to one // RTPReceiver. Additional data may be null. -// Note: This interface is not ready for production use. class FrameDecryptorInterface : public rtc::RefCountInterface { public: // The Status enum represents all possible states that can be diff --git a/api/crypto/frame_encryptor_interface.h b/api/crypto/frame_encryptor_interface.h index d5b6d8a2d5..1452b80189 100644 --- a/api/crypto/frame_encryptor_interface.h +++ b/api/crypto/frame_encryptor_interface.h @@ -24,7 +24,6 @@ namespace webrtc { // addition to the standard SRTP mechanism and is not intended to be used // without it. Implementations of this interface will have the same lifetime as // the RTPSenders it is attached to. Additional data may be null. -// Note: This interface is not ready for production use. class FrameEncryptorInterface : public rtc::RefCountInterface { public: ~FrameEncryptorInterface() override {} diff --git a/api/data_channel_interface.h b/api/data_channel_interface.h index e08830feaf..5b2b1263ab 100644 --- a/api/data_channel_interface.h +++ b/api/data_channel_interface.h @@ -20,6 +20,7 @@ #include #include "absl/types/optional.h" +#include "api/priority.h" #include "api/rtc_error.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" @@ -61,6 +62,9 @@ struct DataChannelInit { // The stream id, or SID, for SCTP data channels. -1 if unset (see above). int id = -1; + + // https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member + absl::optional priority; }; // At the JavaScript level, data can be passed in as a string or a blob, so @@ -154,6 +158,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface { // If negotiated in-band, this ID will be populated once the DTLS role is // determined, and until then this will return -1. virtual int id() const = 0; + virtual Priority priority() const { return Priority::kLow; } virtual DataState state() const = 0; // When state is kClosed, and the DataChannel was not closed using // the closing procedure, returns the error information about the closing. diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h new file mode 100644 index 0000000000..2cfe6edb88 --- /dev/null +++ b/api/frame_transformer_interface.h @@ -0,0 +1,99 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FRAME_TRANSFORMER_INTERFACE_H_ +#define API_FRAME_TRANSFORMER_INTERFACE_H_ + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/encoded_frame.h" +#include "api/video/video_frame_metadata.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// Owns the frame payload data. +class TransformableFrameInterface { + public: + virtual ~TransformableFrameInterface() = default; + + // Returns the frame payload data. The data is valid until the next non-const + // method call. + virtual rtc::ArrayView GetData() const = 0; + + // Copies |data| into the owned frame payload data. + virtual void SetData(rtc::ArrayView data) = 0; + + virtual uint32_t GetTimestamp() const = 0; + virtual uint32_t GetSsrc() const = 0; +}; + +class TransformableVideoFrameInterface : public TransformableFrameInterface { + public: + virtual ~TransformableVideoFrameInterface() = default; + virtual bool IsKeyFrame() const = 0; + + // Returns data needed in the frame transformation logic; for example, + // when the transformation applied to the frame is encryption/decryption, the + // additional data holds the serialized generic frame descriptor extension + // calculated in webrtc::RtpDescriptorAuthentication. + // TODO(bugs.webrtc.org/11380) remove from interface once + // webrtc::RtpDescriptorAuthentication is exposed in api/. + virtual std::vector GetAdditionalData() const = 0; + + virtual const VideoFrameMetadata& GetMetadata() const = 0; +}; + +// Extends the TransformableFrameInterface to expose audio-specific information. +class TransformableAudioFrameInterface : public TransformableFrameInterface { + public: + virtual ~TransformableAudioFrameInterface() = default; + + // Exposes the frame header, enabling the interface clients to use the + // information in the header as needed, for example to compile the list of + // csrcs. + virtual const RTPHeader& GetHeader() const = 0; +}; + +// Objects implement this interface to be notified with the transformed frame. +class TransformedFrameCallback : public rtc::RefCountInterface { + public: + virtual void OnTransformedFrame( + std::unique_ptr frame) = 0; + + protected: + ~TransformedFrameCallback() override = default; +}; + +// Transforms encoded frames. The transformed frame is sent in a callback using +// the TransformedFrameCallback interface (see above). +class FrameTransformerInterface : public rtc::RefCountInterface { + public: + // Transforms |frame| using the implementing class' processing logic. + virtual void Transform( + std::unique_ptr transformable_frame) = 0; + + virtual void RegisterTransformedFrameCallback( + rtc::scoped_refptr) {} + virtual void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr, + uint32_t ssrc) {} + virtual void UnregisterTransformedFrameCallback() {} + virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) {} + + protected: + ~FrameTransformerInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_FRAME_TRANSFORMER_INTERFACE_H_ diff --git a/api/ice_transport_factory.cc b/api/ice_transport_factory.cc index c32d7d2e11..ac7e34e869 100644 --- a/api/ice_transport_factory.cc +++ b/api/ice_transport_factory.cc @@ -13,6 +13,7 @@ #include #include +#include "api/media_types.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/port_allocator.h" @@ -59,7 +60,7 @@ rtc::scoped_refptr CreateIceTransport( IceTransportInit init) { return new rtc::RefCountedObject( std::make_unique( - "", 0, init.port_allocator(), init.async_resolver_factory(), + "", cricket::MediaType::MEDIA_TYPE_AUDIO, 0, init.port_allocator(), init.async_resolver_factory(), init.event_log())); } diff --git a/api/ice_transport_interface.h b/api/ice_transport_interface.h index d2f1edc012..cd0e15bde6 100644 --- a/api/ice_transport_interface.h +++ b/api/ice_transport_interface.h @@ -14,6 +14,7 @@ #include #include "api/async_resolver_factory.h" +#include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/scoped_refptr.h" @@ -87,6 +88,7 @@ class IceTransportFactory { // peer connection should offer transports compatible with these assumptions. virtual rtc::scoped_refptr CreateIceTransport( const std::string& transport_name, + cricket::MediaType media_type, int component, IceTransportInit init) = 0; }; diff --git a/api/jsep.cc b/api/jsep.cc index ddb39b6181..5fdc8905c7 100644 --- a/api/jsep.cc +++ b/api/jsep.cc @@ -21,23 +21,6 @@ size_t SessionDescriptionInterface::RemoveCandidates( return 0; } -void CreateSessionDescriptionObserver::OnFailure(RTCError error) { - OnFailure(error.message()); -} - -void CreateSessionDescriptionObserver::OnFailure(const std::string& error) { - OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); -} - -void SetSessionDescriptionObserver::OnFailure(RTCError error) { - std::string message(error.message()); - OnFailure(message); -} - -void SetSessionDescriptionObserver::OnFailure(const std::string& error) { - OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); -} - const char SessionDescriptionInterface::kOffer[] = "offer"; const char SessionDescriptionInterface::kPrAnswer[] = "pranswer"; const char SessionDescriptionInterface::kAnswer[] = "answer"; diff --git a/api/jsep.h b/api/jsep.h index 86f4162f84..cf8aeb0cb4 100644 --- a/api/jsep.h +++ b/api/jsep.h @@ -28,6 +28,7 @@ #include "absl/types/optional.h" #include "api/rtc_error.h" +#include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -222,11 +223,9 @@ class RTC_EXPORT CreateSessionDescriptionObserver // error code and a string. // RTCError is non-copyable, so it must be passed using std::move. // Earlier versions of the API used a string argument. This version - // is deprecated; in order to let clients remove the old version, it has a - // default implementation. If both versions are unimplemented, the - // result will be a runtime error (stack overflow). This is intentional. - virtual void OnFailure(RTCError error); - virtual void OnFailure(const std::string& error); + // is removed; its functionality was the same as passing + // error.message. + virtual void OnFailure(RTCError error) = 0; protected: ~CreateSessionDescriptionObserver() override = default; @@ -237,9 +236,7 @@ class RTC_EXPORT SetSessionDescriptionObserver : public rtc::RefCountInterface { public: virtual void OnSuccess() = 0; // See description in CreateSessionDescriptionObserver for OnFailure. - virtual void OnFailure(RTCError error); - - virtual void OnFailure(const std::string& error); + virtual void OnFailure(RTCError error) = 0; protected: ~SetSessionDescriptionObserver() override = default; diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h index 79d463d788..bd4a2c0292 100644 --- a/api/media_stream_interface.h +++ b/api/media_stream_interface.h @@ -137,15 +137,11 @@ class VideoTrackSourceInterface : public MediaSourceInterface, virtual bool GetStats(Stats* stats) = 0; // Returns true if encoded output can be enabled in the source. - // TODO(bugs.webrtc.org/11114): make pure virtual once downstream project - // adapts. - virtual bool SupportsEncodedOutput() const { return false; } + virtual bool SupportsEncodedOutput() const = 0; // Reliably cause a key frame to be generated in encoded output. // TODO(bugs.webrtc.org/11115): find optimal naming. - // TODO(bugs.webrtc.org/11114): make pure virtual once downstream project - // adapts. - virtual void GenerateKeyFrame() {} + virtual void GenerateKeyFrame() = 0; // Add an encoded video sink to the source and additionally cause // a key frame to be generated from the source. The sink will be @@ -153,13 +149,11 @@ class VideoTrackSourceInterface : public MediaSourceInterface, // TODO(bugs.webrtc.org/11114): make pure virtual once downstream project // adapts. virtual void AddEncodedSink( - rtc::VideoSinkInterface* sink) {} + rtc::VideoSinkInterface* sink) = 0; // Removes an encoded video sink from the source. - // TODO(bugs.webrtc.org/11114): make pure virtual once downstream project - // adapts. virtual void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) {} + rtc::VideoSinkInterface* sink) = 0; protected: ~VideoTrackSourceInterface() override = default; diff --git a/api/media_stream_proxy.h b/api/media_stream_proxy.h index 516967998f..8ee33ca0ee 100644 --- a/api/media_stream_proxy.h +++ b/api/media_stream_proxy.h @@ -22,7 +22,7 @@ namespace webrtc { // are called on is an implementation detail. BEGIN_SIGNALING_PROXY_MAP(MediaStream) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_METHOD0(AudioTrackVector, GetAudioTracks) PROXY_METHOD0(VideoTrackVector, GetVideoTracks) PROXY_METHOD1(rtc::scoped_refptr, diff --git a/pc/media_stream_track.h b/api/media_stream_track.h similarity index 88% rename from pc/media_stream_track.h rename to api/media_stream_track.h index 358d89a25b..738f034143 100644 --- a/pc/media_stream_track.h +++ b/api/media_stream_track.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef PC_MEDIA_STREAM_TRACK_H_ -#define PC_MEDIA_STREAM_TRACK_H_ +#ifndef API_MEDIA_STREAM_TRACK_H_ +#define API_MEDIA_STREAM_TRACK_H_ #include @@ -38,6 +38,7 @@ class MediaStreamTrack : public Notifier { } return fire_on_change; } + void set_ended() { set_state(MediaStreamTrackInterface::TrackState::kEnded); } protected: explicit MediaStreamTrack(const std::string& id) @@ -53,10 +54,10 @@ class MediaStreamTrack : public Notifier { private: bool enabled_; - std::string id_; + const std::string id_; MediaStreamTrackInterface::TrackState state_; }; } // namespace webrtc -#endif // PC_MEDIA_STREAM_TRACK_H_ +#endif // API_MEDIA_STREAM_TRACK_H_ diff --git a/api/media_stream_track_proxy.h b/api/media_stream_track_proxy.h index d3dc25504b..59dcb77244 100644 --- a/api/media_stream_track_proxy.h +++ b/api/media_stream_track_proxy.h @@ -26,8 +26,8 @@ namespace webrtc { BEGIN_SIGNALING_PROXY_MAP(AudioTrack) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(std::string, kind) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(std::string, kind) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(TrackState, state) PROXY_CONSTMETHOD0(bool, enabled) PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource) @@ -42,8 +42,8 @@ END_PROXY_MAP() BEGIN_PROXY_MAP(VideoTrack) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(std::string, kind) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(std::string, kind) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(TrackState, state) PROXY_CONSTMETHOD0(bool, enabled) PROXY_METHOD1(bool, set_enabled, bool) diff --git a/api/media_types.cc b/api/media_types.cc index 6bc693860d..e0a548e93e 100644 --- a/api/media_types.cc +++ b/api/media_types.cc @@ -17,6 +17,7 @@ namespace cricket { const char kMediaTypeVideo[] = "video"; const char kMediaTypeAudio[] = "audio"; const char kMediaTypeData[] = "data"; +const char kMediaTypeScreen[] = "screen"; std::string MediaTypeToString(MediaType type) { switch (type) { @@ -26,10 +27,14 @@ std::string MediaTypeToString(MediaType type) { return kMediaTypeVideo; case MEDIA_TYPE_DATA: return kMediaTypeData; + case MEDIA_TYPE_SCREEN: + return kMediaTypeScreen; + case MEDIA_TYPE_UNSUPPORTED: + // Unsupported media stores the m= differently. + RTC_NOTREACHED(); + return ""; } - FATAL(); - // Not reachable; avoids compile warning. - return ""; + RTC_CHECK_NOTREACHED(); } } // namespace cricket diff --git a/api/media_types.h b/api/media_types.h index 8c6ba3d1ed..02a9f7ad97 100644 --- a/api/media_types.h +++ b/api/media_types.h @@ -20,11 +20,18 @@ namespace cricket { -enum MediaType { MEDIA_TYPE_AUDIO, MEDIA_TYPE_VIDEO, MEDIA_TYPE_DATA }; +enum MediaType { + MEDIA_TYPE_AUDIO, + MEDIA_TYPE_VIDEO, + MEDIA_TYPE_DATA, + MEDIA_TYPE_SCREEN, + MEDIA_TYPE_UNSUPPORTED +}; extern const char kMediaTypeAudio[]; extern const char kMediaTypeVideo[]; extern const char kMediaTypeData[]; +extern const char kMediaTypeScreen[]; RTC_EXPORT std::string MediaTypeToString(MediaType type); diff --git a/api/neteq/BUILD.gn b/api/neteq/BUILD.gn index 1ab02ec92b..4e85c4d268 100644 --- a/api/neteq/BUILD.gn +++ b/api/neteq/BUILD.gn @@ -23,8 +23,8 @@ rtc_source_set("neteq_api") { "../../rtc_base:rtc_base_approved", "../../system_wrappers:system_wrappers", "../audio_codecs:audio_codecs_api", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("custom_neteq_factory") { @@ -56,8 +56,8 @@ rtc_source_set("neteq_controller_api") { ":tick_timer", "../../rtc_base:rtc_base_approved", "../../system_wrappers:system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("default_neteq_controller_factory") { diff --git a/api/neteq/OWNERS b/api/neteq/OWNERS index 2a16031983..da887989eb 100644 --- a/api/neteq/OWNERS +++ b/api/neteq/OWNERS @@ -1,2 +1,2 @@ ivoc@webrtc.org -hlundin@webrtc.org +henrik.lundin@webrtc.org diff --git a/api/neteq/neteq.cc b/api/neteq/neteq.cc index 155ddf2cf3..e8ef4dbd39 100644 --- a/api/neteq/neteq.cc +++ b/api/neteq/neteq.cc @@ -30,7 +30,8 @@ std::string NetEq::Config::ToString() const { << ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate=" << (enable_fast_accelerate ? "true" : "false") << ", enable_muted_state=" << (enable_muted_state ? "true" : "false") - << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false"); + << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false") + << ", extra_output_delay_ms=" << extra_output_delay_ms; return ss.str(); } diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h index 67dbd507a1..aca0d65fd1 100644 --- a/api/neteq/neteq.h +++ b/api/neteq/neteq.h @@ -30,13 +30,15 @@ namespace webrtc { class AudioFrame; class AudioDecoderFactory; class Clock; +#ifndef DISABLE_RECORDER +class Recorder; +#endif struct NetEqNetworkStatistics { uint16_t current_buffer_size_ms; // Current jitter buffer size in ms. uint16_t preferred_buffer_size_ms; // Target buffer size in ms. uint16_t jitter_peaks_found; // 1 if adding extra delay due to peaky // jitter; 0 otherwise. - uint16_t packet_loss_rate; // Loss rate (network + late) in Q14. uint16_t expand_rate; // Fraction (of original stream) of synthesized // audio inserted through expansion (in Q14). uint16_t speech_expand_rate; // Fraction (of original stream) of synthesized @@ -49,7 +51,6 @@ struct NetEqNetworkStatistics { // decoding (in Q14). uint16_t secondary_discarded_rate; // Fraction of discarded FEC/RED data (in // Q14). - size_t added_zero_samples; // Number of zero samples added in "off" mode. // Statistics for packet waiting times, i.e., the time between a packet // arrives until it is decoded. int mean_waiting_time_ms; @@ -68,6 +69,7 @@ struct NetEqLifetimeStatistics { uint64_t concealment_events = 0; uint64_t jitter_buffer_delay_ms = 0; uint64_t jitter_buffer_emitted_count = 0; + uint64_t jitter_buffer_target_delay_ms = 0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; uint64_t silent_concealed_samples = 0; @@ -137,6 +139,10 @@ class NetEq { bool enable_rtx_handling = false; absl::optional codec_pair_id; bool for_test_no_time_stretching = false; // Use only for testing. + // Adds extra delay to the output of NetEq, without affecting jitter or + // loss behavior. This is mainly for testing. Value must be a non-negative + // multiple of 10 ms. + int extra_output_delay_ms = 0; }; enum ReturnCodes { kOK = 0, kFail = -1 }; @@ -195,13 +201,6 @@ class NetEq { virtual int InsertPacket(const RTPHeader& rtp_header, rtc::ArrayView payload) = 0; - // Deprecated. Use the version without the `receive_timestamp` argument. - int InsertPacket(const RTPHeader& rtp_header, - rtc::ArrayView payload, - uint32_t /*receive_timestamp*/) { - return InsertPacket(rtp_header, payload); - } - // Lets NetEq know that a packet arrived with an empty payload. This typically // happens when empty packets are used for probing the network channel, and // these packets use RTP sequence numbers from the same series as the actual @@ -276,6 +275,9 @@ class NetEq { // after the call. virtual int NetworkStatistics(NetEqNetworkStatistics* stats) = 0; + // Current values only, not resetting any state. + virtual NetEqNetworkStatistics CurrentNetworkStatistics() const = 0; + // Returns a copy of this class's lifetime statistics. These statistics are // never reset. virtual NetEqLifetimeStatistics GetLifetimeStatistics() const = 0; @@ -329,6 +331,10 @@ class NetEq { // Returns the length of the audio yet to play in the sync buffer. // Mainly intended for testing. virtual int SyncBufferSizeMs() const = 0; + +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif }; } // namespace webrtc diff --git a/api/neteq/neteq_controller.h b/api/neteq/neteq_controller.h index 1d47eaca78..2c09c3e15f 100644 --- a/api/neteq/neteq_controller.h +++ b/api/neteq/neteq_controller.h @@ -97,6 +97,14 @@ class NetEqController { size_t sync_buffer_samples; }; + struct PacketArrivedInfo { + size_t packet_length_samples; + uint32_t main_timestamp; + uint16_t main_sequence_number; + bool is_cng_or_dtmf; + bool is_dtx; + }; + virtual ~NetEqController() = default; // Resets object to a clean state. @@ -152,16 +160,17 @@ class NetEqController { virtual void AddSampleMemory(int32_t value) = 0; // Returns the target buffer level in ms. - virtual int TargetLevelMs() = 0; + virtual int TargetLevelMs() const = 0; // Notify the NetEqController that a packet has arrived. Returns the relative // arrival delay, if it can be computed. - virtual absl::optional PacketArrived(bool last_cng_or_dtmf, - size_t packet_length_samples, + virtual absl::optional PacketArrived(int fs_hz, bool should_update_stats, - uint16_t main_sequence_number, - uint32_t main_timestamp, - int fs_hz) = 0; + const PacketArrivedInfo& info) = 0; + + // Notify the NetEqController that we are currently in muted state. + // TODO(ivoc): Make pure virtual when downstream is updated. + virtual void NotifyMutedState() {} // Returns true if a peak was found. virtual bool PeakFound() const = 0; diff --git a/api/numerics/BUILD.gn b/api/numerics/BUILD.gn new file mode 100644 index 0000000000..408dc5b9f1 --- /dev/null +++ b/api/numerics/BUILD.gn @@ -0,0 +1,41 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("numerics") { + visibility = [ "*" ] + + sources = [ + "samples_stats_counter.cc", + "samples_stats_counter.h", + ] + deps = [ + "..:array_view", + "../../rtc_base:checks", + "../../rtc_base:rtc_numerics", + "../../rtc_base:timeutils", + "../units:timestamp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] +} + +if (rtc_include_tests) { + rtc_library("numerics_unittests") { + visibility = [ "*" ] + testonly = true + + sources = [ "samples_stats_counter_unittest.cc" ] + + deps = [ + ":numerics", + "../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] + } +} diff --git a/api/numerics/DEPS b/api/numerics/DEPS new file mode 100644 index 0000000000..2d89d57557 --- /dev/null +++ b/api/numerics/DEPS @@ -0,0 +1,6 @@ +specific_include_rules = { + # Some internal headers are allowed even in API headers: + "samples_stats_counter\.h": [ + "+rtc_base/numerics/running_statistics.h", + ] +} diff --git a/rtc_base/numerics/samples_stats_counter.cc b/api/numerics/samples_stats_counter.cc similarity index 95% rename from rtc_base/numerics/samples_stats_counter.cc rename to api/numerics/samples_stats_counter.cc index c262d48be9..36871a6713 100644 --- a/rtc_base/numerics/samples_stats_counter.cc +++ b/api/numerics/samples_stats_counter.cc @@ -8,8 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/numerics/samples_stats_counter.h" +#include "api/numerics/samples_stats_counter.h" +#include #include #include "absl/algorithm/container.h" @@ -27,7 +28,7 @@ SamplesStatsCounter& SamplesStatsCounter::operator=(SamplesStatsCounter&&) = default; void SamplesStatsCounter::AddSample(double value) { - AddSample(StatsSample{value, Timestamp::us(rtc::TimeMicros())}); + AddSample(StatsSample{value, Timestamp::Micros(rtc::TimeMicros())}); } void SamplesStatsCounter::AddSample(StatsSample sample) { diff --git a/rtc_base/numerics/samples_stats_counter.h b/api/numerics/samples_stats_counter.h similarity index 92% rename from rtc_base/numerics/samples_stats_counter.h rename to api/numerics/samples_stats_counter.h index a4ec443d31..283c1e4ed2 100644 --- a/rtc_base/numerics/samples_stats_counter.h +++ b/api/numerics/samples_stats_counter.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NUMERICS_SAMPLES_STATS_COUNTER_H_ -#define RTC_BASE_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#ifndef API_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#define API_NUMERICS_SAMPLES_STATS_COUNTER_H_ #include @@ -45,6 +45,8 @@ class SamplesStatsCounter { // Returns if there are any values in O(1) time. bool IsEmpty() const { return samples_.empty(); } + // Returns the amount of samples added into counter in O(1) time. + int64_t NumSamples() const { return stats_.Size(); } // Returns min in O(1) time. This function may not be called if there are no // samples. @@ -98,7 +100,7 @@ class SamplesStatsCounter { } private: - RunningStatistics stats_; + webrtc_impl::RunningStatistics stats_; std::vector samples_; bool sorted_ = false; }; @@ -116,4 +118,4 @@ SamplesStatsCounter operator/(const SamplesStatsCounter& counter, double value); } // namespace webrtc -#endif // RTC_BASE_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#endif // API_NUMERICS_SAMPLES_STATS_COUNTER_H_ diff --git a/rtc_base/numerics/samples_stats_counter_unittest.cc b/api/numerics/samples_stats_counter_unittest.cc similarity index 99% rename from rtc_base/numerics/samples_stats_counter_unittest.cc rename to api/numerics/samples_stats_counter_unittest.cc index 1221e9b2a5..1f9cabfb29 100644 --- a/rtc_base/numerics/samples_stats_counter_unittest.cc +++ b/api/numerics/samples_stats_counter_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/numerics/samples_stats_counter.h" +#include "api/numerics/samples_stats_counter.h" #include diff --git a/api/peer_connection_interface.cc b/api/peer_connection_interface.cc index 0c25405784..f82e84b80f 100644 --- a/api/peer_connection_interface.cc +++ b/api/peer_connection_interface.cc @@ -53,27 +53,6 @@ RTCError PeerConnectionInterface::SetConfiguration( return RTCError(); } -RTCError PeerConnectionInterface::SetBitrate(const BitrateSettings& bitrate) { - BitrateParameters bitrate_parameters; - bitrate_parameters.min_bitrate_bps = bitrate.min_bitrate_bps; - bitrate_parameters.current_bitrate_bps = bitrate.start_bitrate_bps; - bitrate_parameters.max_bitrate_bps = bitrate.max_bitrate_bps; - return SetBitrate(bitrate_parameters); -} - -RTCError PeerConnectionInterface::SetBitrate( - const BitrateParameters& bitrate_parameters) { - BitrateSettings bitrate; - bitrate.min_bitrate_bps = bitrate_parameters.min_bitrate_bps; - bitrate.start_bitrate_bps = bitrate_parameters.current_bitrate_bps; - bitrate.max_bitrate_bps = bitrate_parameters.max_bitrate_bps; - return SetBitrate(bitrate); -} - -PeerConnectionInterface::BitrateParameters::BitrateParameters() = default; - -PeerConnectionInterface::BitrateParameters::~BitrateParameters() = default; - PeerConnectionDependencies::PeerConnectionDependencies( PeerConnectionObserver* observer_in) : observer(observer_in) {} diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index cc37dabd65..8aaffbf71e 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -73,6 +73,7 @@ #include #include +#include "api/adaptation/resource.h" #include "api/async_resolver_factory.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_decoder_factory.h" @@ -96,14 +97,15 @@ #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" #include "api/sctp_transport_interface.h" +#include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" #include "api/stats_types.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" #include "api/transport/enums.h" -#include "api/transport/media/media_transport_interface.h" #include "api/transport/network_control.h" +#include "api/transport/sctp_transport_factory_interface.h" #include "api/transport/webrtc_key_value_config.h" #include "api/turn_customizer.h" #include "media/base/media_config.h" @@ -112,7 +114,7 @@ // inject a PacketSocketFactory and/or NetworkManager, and not expose // PortAllocator in the PeerConnection api. #include "p2p/base/port_allocator.h" // nogncheck -#include "rtc_base/network.h" +#include "rtc_base/network_monitor_factory.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_address.h" @@ -613,34 +615,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // correctly. This flag will be deprecated soon. Do not rely on it. bool active_reset_srtp_params = false; - // DEPRECATED. Do not use. This option is ignored by peer connection. - // TODO(webrtc:9719): Delete this option. - bool use_media_transport = false; - - // DEPRECATED. Do not use. This option is ignored by peer connection. - // TODO(webrtc:9719): Delete this option. - bool use_media_transport_for_data_channels = false; - - // If MediaTransportFactory is provided in PeerConnectionFactory, this flag - // informs PeerConnection that it should use the DatagramTransportInterface - // for packets instead DTLS. It's invalid to set it to |true| if the - // MediaTransportFactory wasn't provided. - absl::optional use_datagram_transport; - - // If MediaTransportFactory is provided in PeerConnectionFactory, this flag - // informs PeerConnection that it should use the DatagramTransport's - // implementation of DataChannelTransportInterface for data channels instead - // of SCTP-DTLS. - absl::optional use_datagram_transport_for_data_channels; - - // If true, this PeerConnection will only use datagram transport for data - // channels when receiving an incoming offer that includes datagram - // transport parameters. It will not request use of a datagram transport - // when it creates the initial, outgoing offer. - // This setting only applies when |use_datagram_transport_for_data_channels| - // is true. - absl::optional use_datagram_transport_for_data_channels_receive_only; - // Defines advanced optional cryptographic settings related to SRTP and // frame encryption for native WebRTC. Setting this will overwrite any // settings set in PeerConnectionFactory (which is deprecated). @@ -666,6 +640,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Whether network condition based codec switching is allowed. absl::optional allow_codec_switching; + // The delay before doing a usage histogram report for long-lived + // PeerConnections. Used for testing only. + absl::optional report_usage_pattern_delay_ms; // // Don't forget to update operator== if adding something. // @@ -936,6 +913,10 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { const std::string& label, const DataChannelInit* config) = 0; + // NOTE: For the following 6 methods, it's only safe to dereference the + // SessionDescriptionInterface on signaling_thread() (for example, calling + // ToString). + // Returns the more recently applied description; "pending" if it exists, and // otherwise "current". See below. virtual const SessionDescriptionInterface* local_description() const = 0; @@ -975,26 +956,66 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { const RTCOfferAnswerOptions& options) = 0; // Sets the local session description. - // The PeerConnection takes the ownership of |desc| even if it fails. - // The |observer| callback will be called when done. - // TODO(deadbeef): Change |desc| to be a unique_ptr, to make it clear - // that this method always takes ownership of it. + // + // According to spec, the local session description MUST be the same as was + // returned by CreateOffer() or CreateAnswer() or else the operation should + // fail. Our implementation however allows some amount of "SDP munging", but + // please note that this is HIGHLY DISCOURAGED. If you do not intent to munge + // SDP, the method below that doesn't take |desc| as an argument will create + // the offer or answer for you. + // + // The observer is invoked as soon as the operation completes, which could be + // before or after the SetLocalDescription() method has exited. + virtual void SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) {} + // Creates an offer or answer (depending on current signaling state) and sets + // it as the local session description. + // + // The observer is invoked as soon as the operation completes, which could be + // before or after the SetLocalDescription() method has exited. + virtual void SetLocalDescription( + rtc::scoped_refptr observer) {} + // Like SetLocalDescription() above, but the observer is invoked with a delay + // after the operation completes. This helps avoid recursive calls by the + // observer but also makes it possible for states to change in-between the + // operation completing and the observer getting called. This makes them racy + // for synchronizing peer connection states to the application. + // TODO(https://crbug.com/webrtc/11798): Delete these methods in favor of the + // ones taking SetLocalDescriptionObserverInterface as argument. virtual void SetLocalDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) = 0; - // Implicitly creates an offer or answer (depending on the current signaling - // state) and performs SetLocalDescription() with the newly generated session - // description. - // TODO(hbos): Make pure virtual when implemented by downstream projects. virtual void SetLocalDescription(SetSessionDescriptionObserver* observer) {} + // Sets the remote session description. - // The PeerConnection takes the ownership of |desc| even if it fails. - // The |observer| callback will be called when done. - // TODO(hbos): Remove when Chrome implements the new signature. - virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer, - SessionDescriptionInterface* desc) {} + // + // (Unlike "SDP munging" before SetLocalDescription(), modifying a remote + // offer or answer is allowed by the spec.) + // + // The observer is invoked as soon as the operation completes, which could be + // before or after the SetRemoteDescription() method has exited. virtual void SetRemoteDescription( std::unique_ptr desc, rtc::scoped_refptr observer) = 0; + // Like SetRemoteDescription() above, but the observer is invoked with a delay + // after the operation completes. This helps avoid recursive calls by the + // observer but also makes it possible for states to change in-between the + // operation completing and the observer getting called. This makes them racy + // for synchronizing peer connection states to the application. + // TODO(https://crbug.com/webrtc/11798): Delete this method in favor of the + // ones taking SetRemoteDescriptionObserverInterface as argument. + virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) {} + + // According to spec, we must only fire "negotiationneeded" if the Operations + // Chain is empty. This method takes care of validating an event previously + // generated with PeerConnectionObserver::OnNegotiationNeededEvent() to make + // sure that even if there was a delay (e.g. due to a PostTask) between the + // event being generated and the time of firing, the Operations Chain is empty + // and the event is still valid to be fired. + virtual bool ShouldFireNegotiationNeededEvent(uint32_t event_id) { + return true; + } virtual PeerConnectionInterface::RTCConfiguration GetConfiguration() = 0; @@ -1043,28 +1064,13 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { virtual bool RemoveIceCandidates( const std::vector& candidates) = 0; - // 0 <= min <= current <= max should hold for set parameters. - struct BitrateParameters { - BitrateParameters(); - ~BitrateParameters(); - - absl::optional min_bitrate_bps; - absl::optional current_bitrate_bps; - absl::optional max_bitrate_bps; - }; - // SetBitrate limits the bandwidth allocated for all RTP streams sent by // this PeerConnection. Other limitations might affect these limits and // are respected (for example "b=AS" in SDP). // // Setting |current_bitrate_bps| will reset the current bitrate estimate // to the provided value. - virtual RTCError SetBitrate(const BitrateSettings& bitrate); - - // TODO(nisse): Deprecated - use version above. These two default - // implementations require subclasses to implement one or the other - // of the methods. - virtual RTCError SetBitrate(const BitrateParameters& bitrate_parameters); + virtual RTCError SetBitrate(const BitrateSettings& bitrate) = 0; // Enable/disable playout of received audio streams. Enabled by default. Note // that even if playout is enabled, streams will only be played out if the @@ -1109,6 +1115,21 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { virtual IceGatheringState ice_gathering_state() = 0; + // Returns the current state of canTrickleIceCandidates per + // https://w3c.github.io/webrtc-pc/#attributes-1 + virtual absl::optional can_trickle_ice_candidates() { + // TODO(crbug.com/708484): Remove default implementation. + return absl::nullopt; + } + + // When a resource is overused, the PeerConnection will try to reduce the load + // on the sysem, for example by reducing the resolution or frame rate of + // encoded streams. The Resource API allows injecting platform-specific usage + // measurements. The conditions to trigger kOveruse or kUnderuse are up to the + // implementation. + // TODO(hbos): Make pure virtual when implemented by downstream projects. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) {} + // Start RtcEventLog using an existing output-sink. Takes ownership of // |output| and passes it on to Call, which will take the ownership. If the // operation fails the output will be closed and deallocated. The event log @@ -1125,6 +1146,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Stops logging the RtcEventLog. virtual void StopRtcEventLog() = 0; + virtual int32_t StartRecorder(int32_t dir, std::string path) = 0; + virtual int32_t StopRecorder(int32_t dir) = 0; + // Terminates all media, closes the transports, and in general releases any // resources used by the PeerConnection. This is an irreversible operation. // @@ -1133,6 +1157,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // thus the observer object can be safely destroyed. virtual void Close() = 0; + // The thread on which all PeerConnectionObserver callbacks will be invoked, + // as well as callbacks for other classes such as DataChannelObserver. + // + // Also the only thread on which it's safe to use SessionDescriptionInterface + // pointers. + // TODO(deadbeef): Make pure virtual when all subclasses implement it. + virtual rtc::Thread* signaling_thread() const { return nullptr; } + protected: // Dtor protected as objects shouldn't be deleted via this interface. ~PeerConnectionInterface() override = default; @@ -1161,7 +1193,17 @@ class PeerConnectionObserver { // Triggered when renegotiation is needed. For example, an ICE restart // has begun. - virtual void OnRenegotiationNeeded() = 0; + // TODO(hbos): Delete in favor of OnNegotiationNeededEvent() when downstream + // projects have migrated. + virtual void OnRenegotiationNeeded() {} + // Used to fire spec-compliant onnegotiationneeded events, which should only + // fire when the Operations Chain is empty. The observer is responsible for + // queuing a task (e.g. Chromium: jump to main thread) to maybe fire the + // event. The event identified using |event_id| must only fire if + // PeerConnection::ShouldFireNegotiationNeededEvent() returns true since it is + // possible for the event to become invalidated by operations subsequently + // chained. + virtual void OnNegotiationNeededEvent(uint32_t event_id) {} // Called any time the legacy IceConnectionState changes. // @@ -1323,8 +1365,12 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { std::unique_ptr network_state_predictor_factory; std::unique_ptr network_controller_factory; - std::unique_ptr media_transport_factory; + // This will only be used if CreatePeerConnection is called without a + // |port_allocator|, causing the default allocator and network manager to be + // used. + std::unique_ptr network_monitor_factory; std::unique_ptr neteq_factory; + std::unique_ptr sctp_factory; std::unique_ptr trials; }; diff --git a/api/peer_connection_proxy.h b/api/peer_connection_proxy.h index 1b4ceeaeff..3ef8a28869 100644 --- a/api/peer_connection_proxy.h +++ b/api/peer_connection_proxy.h @@ -98,17 +98,25 @@ PROXY_METHOD2(void, const RTCOfferAnswerOptions&) PROXY_METHOD2(void, SetLocalDescription, - SetSessionDescriptionObserver*, - SessionDescriptionInterface*) -PROXY_METHOD1(void, SetLocalDescription, SetSessionDescriptionObserver*) + std::unique_ptr, + rtc::scoped_refptr) +PROXY_METHOD1(void, + SetLocalDescription, + rtc::scoped_refptr) PROXY_METHOD2(void, - SetRemoteDescription, + SetLocalDescription, SetSessionDescriptionObserver*, SessionDescriptionInterface*) +PROXY_METHOD1(void, SetLocalDescription, SetSessionDescriptionObserver*) PROXY_METHOD2(void, SetRemoteDescription, std::unique_ptr, rtc::scoped_refptr) +PROXY_METHOD2(void, + SetRemoteDescription, + SetSessionDescriptionObserver*, + SessionDescriptionInterface*) +PROXY_METHOD1(bool, ShouldFireNegotiationNeededEvent, uint32_t) PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration) PROXY_METHOD1(RTCError, SetConfiguration, @@ -131,13 +139,18 @@ PROXY_METHOD0(IceConnectionState, ice_connection_state) PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state) PROXY_METHOD0(PeerConnectionState, peer_connection_state) PROXY_METHOD0(IceGatheringState, ice_gathering_state) +PROXY_METHOD0(absl::optional, can_trickle_ice_candidates) +PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr) PROXY_METHOD2(bool, StartRtcEventLog, std::unique_ptr, int64_t) PROXY_METHOD1(bool, StartRtcEventLog, std::unique_ptr) PROXY_METHOD0(void, StopRtcEventLog) +PROXY_METHOD2(int32_t, StartRecorder, int32_t, std::string) +PROXY_METHOD1(int32_t, StopRecorder, int32_t) PROXY_METHOD0(void, Close) +BYPASS_PROXY_CONSTMETHOD0(rtc::Thread*, signaling_thread) END_PROXY_MAP() } // namespace webrtc diff --git a/api/test/mock_frame_encryptor.cc b/api/priority.h similarity index 61% rename from api/test/mock_frame_encryptor.cc rename to api/priority.h index 6c05efd543..4953e453a3 100644 --- a/api/test/mock_frame_encryptor.cc +++ b/api/priority.h @@ -1,5 +1,5 @@ /* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/test/mock_frame_encryptor.h" - -#include "test/gmock.h" +#ifndef API_PRIORITY_H_ +#define API_PRIORITY_H_ namespace webrtc { -MockFrameEncryptor::MockFrameEncryptor() = default; -MockFrameEncryptor::~MockFrameEncryptor() = default; +// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc +enum class Priority { + kVeryLow, + kLow, + kMedium, + kHigh, +}; } // namespace webrtc + +#endif // API_PRIORITY_H_ diff --git a/api/proxy.cc b/api/proxy.cc index e668285ba2..67318e7dab 100644 --- a/api/proxy.cc +++ b/api/proxy.cc @@ -10,28 +10,3 @@ #include "api/proxy.h" -namespace webrtc { -namespace internal { - -SynchronousMethodCall::SynchronousMethodCall(rtc::MessageHandler* proxy) - : proxy_(proxy) {} - -SynchronousMethodCall::~SynchronousMethodCall() = default; - -void SynchronousMethodCall::Invoke(const rtc::Location& posted_from, - rtc::Thread* t) { - if (t->IsCurrent()) { - proxy_->OnMessage(nullptr); - } else { - t->Post(posted_from, this, 0); - e_.Wait(rtc::Event::kForever); - } -} - -void SynchronousMethodCall::OnMessage(rtc::Message*) { - proxy_->OnMessage(nullptr); - e_.Set(); -} - -} // namespace internal -} // namespace webrtc diff --git a/api/proxy.h b/api/proxy.h index 385992e659..05f7414bc0 100644 --- a/api/proxy.h +++ b/api/proxy.h @@ -55,9 +55,12 @@ #include #include #include +#include #include #include "api/scoped_refptr.h" +#include "api/task_queue/queued_task.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/event.h" #include "rtc_base/message_handler.h" #include "rtc_base/ref_counted_object.h" @@ -95,27 +98,8 @@ class ReturnType { void moved_result() {} }; -namespace internal { - -class RTC_EXPORT SynchronousMethodCall : public rtc::MessageData, - public rtc::MessageHandler { - public: - explicit SynchronousMethodCall(rtc::MessageHandler* proxy); - ~SynchronousMethodCall() override; - - void Invoke(const rtc::Location& posted_from, rtc::Thread* t); - - private: - void OnMessage(rtc::Message*) override; - - rtc::Event e_; - rtc::MessageHandler* proxy_; -}; - -} // namespace internal - template -class MethodCall : public rtc::Message, public rtc::MessageHandler { +class MethodCall : public QueuedTask { public: typedef R (C::*Method)(Args...); MethodCall(C* c, Method m, Args&&... args) @@ -124,12 +108,21 @@ class MethodCall : public rtc::Message, public rtc::MessageHandler { args_(std::forward_as_tuple(std::forward(args)...)) {} R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { - internal::SynchronousMethodCall(this).Invoke(posted_from, t); + if (t->IsCurrent()) { + Invoke(std::index_sequence_for()); + } else { + t->PostTask(std::unique_ptr(this)); + event_.Wait(rtc::Event::kForever); + } return r_.moved_result(); } private: - void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for()); } + bool Run() override { + Invoke(std::index_sequence_for()); + event_.Set(); + return false; + } template void Invoke(std::index_sequence) { @@ -140,10 +133,11 @@ class MethodCall : public rtc::Message, public rtc::MessageHandler { Method m_; ReturnType r_; std::tuple args_; + rtc::Event event_; }; template -class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { +class ConstMethodCall : public QueuedTask { public: typedef R (C::*Method)(Args...) const; ConstMethodCall(const C* c, Method m, Args&&... args) @@ -152,12 +146,21 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { args_(std::forward_as_tuple(std::forward(args)...)) {} R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { - internal::SynchronousMethodCall(this).Invoke(posted_from, t); + if (t->IsCurrent()) { + Invoke(std::index_sequence_for()); + } else { + t->PostTask(std::unique_ptr(this)); + event_.Wait(rtc::Event::kForever); + } return r_.moved_result(); } private: - void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for()); } + bool Run() override { + Invoke(std::index_sequence_for()); + event_.Set(); + return false; + } template void Invoke(std::index_sequence) { @@ -168,6 +171,7 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { Method m_; ReturnType r_; std::tuple args_; + rtc::Event event_; }; // Helper macros to reduce code duplication. @@ -396,6 +400,18 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { return call.Marshal(RTC_FROM_HERE, worker_thread_); \ } +// For use when returning purely const state (set during construction). +// Use with caution. This method should only be used when the return value will +// always be the same. +#define BYPASS_PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + static_assert( \ + std::is_same::value || !std::is_pointer::value, \ + "Type is a pointer"); \ + static_assert(!std::is_reference::value, "Type is a reference"); \ + return c_->method(); \ + } + } // namespace webrtc #endif // API_PROXY_H_ diff --git a/api/rtc_error.h b/api/rtc_error.h index b8cb7f0bcd..d24737c251 100644 --- a/api/rtc_error.h +++ b/api/rtc_error.h @@ -137,7 +137,7 @@ class RTC_EXPORT RTCError { RTCErrorDetailType error_detail() const { return error_detail_; } void set_error_detail(RTCErrorDetailType detail) { error_detail_ = detail; } - absl::optional sctp_cause_code() { return sctp_cause_code_; } + absl::optional sctp_cause_code() const { return sctp_cause_code_; } void set_sctp_cause_code(uint16_t cause_code) { sctp_cause_code_ = cause_code; } diff --git a/api/rtc_event_log/BUILD.gn b/api/rtc_event_log/BUILD.gn index e853058e25..158dc06a7b 100644 --- a/api/rtc_event_log/BUILD.gn +++ b/api/rtc_event_log/BUILD.gn @@ -37,6 +37,7 @@ rtc_library("rtc_event_log_factory") { ":rtc_event_log", "../../rtc_base:checks", "../../rtc_base/system:rtc_export", + "../../system_wrappers:field_trial", "../task_queue", ] diff --git a/api/rtc_event_log/rtc_event.h b/api/rtc_event_log/rtc_event.h index 101f78f255..51db8f0b4d 100644 --- a/api/rtc_event_log/rtc_event.h +++ b/api/rtc_event_log/rtc_event.h @@ -52,7 +52,8 @@ class RtcEvent { VideoSendStreamConfig, GenericPacketSent, GenericPacketReceived, - GenericAckReceived + GenericAckReceived, + FrameDecoded }; RtcEvent(); diff --git a/api/rtc_event_log/rtc_event_log_factory.cc b/api/rtc_event_log/rtc_event_log_factory.cc index 2013584399..fdf267b7ba 100644 --- a/api/rtc_event_log/rtc_event_log_factory.cc +++ b/api/rtc_event_log/rtc_event_log_factory.cc @@ -14,6 +14,7 @@ #include #include "rtc_base/checks.h" +#include "system_wrappers/include/field_trial.h" #ifdef WEBRTC_ENABLE_RTC_EVENT_LOG #include "logging/rtc_event_log/rtc_event_log_impl.h" @@ -29,6 +30,9 @@ RtcEventLogFactory::RtcEventLogFactory(TaskQueueFactory* task_queue_factory) std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( RtcEventLog::EncodingType encoding_type) { #ifdef WEBRTC_ENABLE_RTC_EVENT_LOG + if (field_trial::IsEnabled("WebRTC-RtcEventLogKillSwitch")) { + return std::make_unique(); + } return std::make_unique(encoding_type, task_queue_factory_); #else return std::make_unique(); diff --git a/api/rtc_event_log_output_file_unittest.cc b/api/rtc_event_log_output_file_unittest.cc index 071909b2c5..4274215491 100644 --- a/api/rtc_event_log_output_file_unittest.cc +++ b/api/rtc_event_log_output_file_unittest.cc @@ -141,14 +141,16 @@ TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(RtcEventLogOutputFileTest, WritingToInactiveFileForbidden) { +class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {}; + +TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) { RtcEventLogOutputFile output_file(output_file_name_, 2); ASSERT_FALSE(output_file.Write("abc")); ASSERT_FALSE(output_file.IsActive()); EXPECT_DEATH(output_file.Write("abc"), ""); } -TEST_F(RtcEventLogOutputFileTest, DisallowUnreasonableFileSizeLimits) { +TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) { // Keeping in a temporary unique_ptr to make it clearer that the death is // triggered by construction, not destruction. std::unique_ptr output_file; diff --git a/api/rtp_headers.cc b/api/rtp_headers.cc index bf973b6fe5..e0ad9eb26e 100644 --- a/api/rtp_headers.cc +++ b/api/rtp_headers.cc @@ -26,9 +26,7 @@ RTPHeaderExtension::RTPHeaderExtension() videoRotation(kVideoRotation_0), hasVideoContentType(false), videoContentType(VideoContentType::UNSPECIFIED), - has_video_timing(false), - has_frame_marking(false), - frame_marking({false, false, false, false, false, 0xFF, 0, 0}) {} + has_video_timing(false) {} RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) = default; diff --git a/api/rtp_headers.h b/api/rtp_headers.h index 44d2deeb90..b9a97c885d 100644 --- a/api/rtp_headers.h +++ b/api/rtp_headers.h @@ -21,10 +21,8 @@ #include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/video_content_type.h" -#include "api/video/video_frame_marking.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT(build/include) namespace webrtc { @@ -101,8 +99,8 @@ struct RTPHeaderExtension { Timestamp GetAbsoluteSendTimestamp() const { RTC_DCHECK(hasAbsoluteSendTime); RTC_DCHECK(absoluteSendTime < (1ul << 24)); - return Timestamp::us((absoluteSendTime * 1000000ll) / - (1 << kAbsSendTimeFraction)); + return Timestamp::Micros((absoluteSendTime * 1000000ll) / + (1 << kAbsSendTimeFraction)); } TimeDelta GetAbsoluteSendTimeDelta(uint32_t previous_sendtime) const { @@ -111,7 +109,7 @@ struct RTPHeaderExtension { RTC_DCHECK(previous_sendtime < (1ul << 24)); int32_t delta = static_cast((absoluteSendTime - previous_sendtime) << 8) >> 8; - return TimeDelta::us((delta * 1000000ll) / (1 << kAbsSendTimeFraction)); + return TimeDelta::Micros((delta * 1000000ll) / (1 << kAbsSendTimeFraction)); } bool hasTransmissionTimeOffset; @@ -143,10 +141,7 @@ struct RTPHeaderExtension { bool has_video_timing; VideoSendTiming video_timing; - bool has_frame_marking; - FrameMarking frame_marking; - - PlayoutDelay playout_delay = {-1, -1}; + VideoPlayoutDelay playout_delay; // For identification of a stream when ssrc is not signaled. See // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 diff --git a/api/rtp_packet_info.cc b/api/rtp_packet_info.cc index 54e26b418b..a9ebd9df48 100644 --- a/api/rtp_packet_info.cc +++ b/api/rtp_packet_info.cc @@ -32,18 +32,6 @@ RtpPacketInfo::RtpPacketInfo( absolute_capture_time_(absolute_capture_time), receive_time_ms_(receive_time_ms) {} -RtpPacketInfo::RtpPacketInfo(uint32_t ssrc, - std::vector csrcs, - uint32_t rtp_timestamp, - absl::optional audio_level, - int64_t receive_time_ms) - : RtpPacketInfo(ssrc, - std::move(csrcs), - rtp_timestamp, - audio_level, - /*absolute_capture_time=*/absl::nullopt, - receive_time_ms) {} - RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms) : ssrc_(rtp_header.ssrc), diff --git a/api/rtp_packet_info.h b/api/rtp_packet_info.h index 21cfefb748..639ba32770 100644 --- a/api/rtp_packet_info.h +++ b/api/rtp_packet_info.h @@ -17,7 +17,6 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" -#include "rtc_base/deprecation.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -38,14 +37,6 @@ class RTC_EXPORT RtpPacketInfo { absl::optional absolute_capture_time, int64_t receive_time_ms); - // TODO(bugs.webrtc.org/10739): Will be removed sometime after 2019-09-19. - RTC_DEPRECATED - RtpPacketInfo(uint32_t ssrc, - std::vector csrcs, - uint32_t rtp_timestamp, - absl::optional audio_level, - int64_t receive_time_ms); - RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms); RtpPacketInfo(const RtpPacketInfo& other) = default; diff --git a/api/rtp_parameters.cc b/api/rtp_parameters.cc index c3f14d8f32..92f99e9bb8 100644 --- a/api/rtp_parameters.cc +++ b/api/rtp_parameters.cc @@ -11,12 +11,28 @@ #include #include +#include #include "api/array_view.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { +const char* DegradationPreferenceToString( + DegradationPreference degradation_preference) { + switch (degradation_preference) { + case DegradationPreference::DISABLED: + return "disabled"; + case DegradationPreference::MAINTAIN_FRAMERATE: + return "maintain-framerate"; + case DegradationPreference::MAINTAIN_RESOLUTION: + return "maintain-resolution"; + case DegradationPreference::BALANCED: + return "balanced"; + } + RTC_CHECK_NOTREACHED(); +} + const double kDefaultBitratePriority = 1.0; RtcpFeedback::RtcpFeedback() = default; @@ -32,17 +48,22 @@ RtpCodecCapability::~RtpCodecCapability() = default; RtpHeaderExtensionCapability::RtpHeaderExtensionCapability() = default; RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( - const std::string& uri) + absl::string_view uri) : uri(uri) {} RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( - const std::string& uri, + absl::string_view uri, int preferred_id) : uri(uri), preferred_id(preferred_id) {} +RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( + absl::string_view uri, + int preferred_id, + RtpTransceiverDirection direction) + : uri(uri), preferred_id(preferred_id), direction(direction) {} RtpHeaderExtensionCapability::~RtpHeaderExtensionCapability() = default; RtpExtension::RtpExtension() = default; -RtpExtension::RtpExtension(const std::string& uri, int id) : uri(uri), id(id) {} -RtpExtension::RtpExtension(const std::string& uri, int id, bool encrypt) +RtpExtension::RtpExtension(absl::string_view uri, int id) : uri(uri), id(id) {} +RtpExtension::RtpExtension(absl::string_view uri, int id, bool encrypt) : uri(uri), id(id), encrypt(encrypt) {} RtpExtension::~RtpExtension() = default; @@ -91,61 +112,24 @@ std::string RtpExtension::ToString() const { return sb.str(); } -const char RtpExtension::kAudioLevelUri[] = - "urn:ietf:params:rtp-hdrext:ssrc-audio-level"; - -const char RtpExtension::kTimestampOffsetUri[] = - "urn:ietf:params:rtp-hdrext:toffset"; - -const char RtpExtension::kAbsSendTimeUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"; - -const char RtpExtension::kAbsoluteCaptureTimeUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time"; - -const char RtpExtension::kVideoRotationUri[] = "urn:3gpp:video-orientation"; - -const char RtpExtension::kTransportSequenceNumberUri[] = - "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"; -const char RtpExtension::kTransportSequenceNumberV2Uri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02"; - -// This extension allows applications to adaptively limit the playout delay -// on frames as per the current needs. For example, a gaming application -// has very different needs on end-to-end delay compared to a video-conference -// application. -const char RtpExtension::kPlayoutDelayUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/playout-delay"; - -const char RtpExtension::kVideoContentTypeUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"; - -const char RtpExtension::kVideoTimingUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"; - -const char RtpExtension::kMidUri[] = "urn:ietf:params:rtp-hdrext:sdes:mid"; - -const char RtpExtension::kFrameMarkingUri[] = - "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07"; - -const char RtpExtension::kGenericFrameDescriptorUri00[] = - "http://www.webrtc.org/experiments/rtp-hdrext/generic-frame-descriptor-00"; -const char RtpExtension::kGenericFrameDescriptorUri01[] = - "http://www.webrtc.org/experiments/rtp-hdrext/generic-frame-descriptor-01"; -const char RtpExtension::kGenericFrameDescriptorUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/generic-frame-descriptor-00"; - -const char RtpExtension::kEncryptHeaderExtensionsUri[] = - "urn:ietf:params:rtp-hdrext:encrypt"; - -const char RtpExtension::kColorSpaceUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/color-space"; - -const char RtpExtension::kRidUri[] = - "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id"; - -const char RtpExtension::kRepairedRidUri[] = - "urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id"; +constexpr char RtpExtension::kEncryptHeaderExtensionsUri[]; +constexpr char RtpExtension::kAudioLevelUri[]; +constexpr char RtpExtension::kTimestampOffsetUri[]; +constexpr char RtpExtension::kAbsSendTimeUri[]; +constexpr char RtpExtension::kAbsoluteCaptureTimeUri[]; +constexpr char RtpExtension::kVideoRotationUri[]; +constexpr char RtpExtension::kVideoContentTypeUri[]; +constexpr char RtpExtension::kVideoTimingUri[]; +constexpr char RtpExtension::kGenericFrameDescriptorUri00[]; +constexpr char RtpExtension::kDependencyDescriptorUri[]; +constexpr char RtpExtension::kVideoLayersAllocationUri[]; +constexpr char RtpExtension::kTransportSequenceNumberUri[]; +constexpr char RtpExtension::kTransportSequenceNumberV2Uri[]; +constexpr char RtpExtension::kPlayoutDelayUri[]; +constexpr char RtpExtension::kColorSpaceUri[]; +constexpr char RtpExtension::kMidUri[]; +constexpr char RtpExtension::kRidUri[]; +constexpr char RtpExtension::kRepairedRidUri[]; constexpr int RtpExtension::kMinId; constexpr int RtpExtension::kMaxId; @@ -153,11 +137,10 @@ constexpr int RtpExtension::kMaxValueSize; constexpr int RtpExtension::kOneByteHeaderExtensionMaxId; constexpr int RtpExtension::kOneByteHeaderExtensionMaxValueSize; -bool RtpExtension::IsSupportedForAudio(const std::string& uri) { +bool RtpExtension::IsSupportedForAudio(absl::string_view uri) { return uri == webrtc::RtpExtension::kAudioLevelUri || uri == webrtc::RtpExtension::kAbsSendTimeUri || - // TODO(bugs.webrtc.org/10739): Uncomment once the audio impl is ready. - // uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || + uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || uri == webrtc::RtpExtension::kTransportSequenceNumberUri || uri == webrtc::RtpExtension::kTransportSequenceNumberV2Uri || uri == webrtc::RtpExtension::kMidUri || @@ -165,11 +148,10 @@ bool RtpExtension::IsSupportedForAudio(const std::string& uri) { uri == webrtc::RtpExtension::kRepairedRidUri; } -bool RtpExtension::IsSupportedForVideo(const std::string& uri) { +bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { return uri == webrtc::RtpExtension::kTimestampOffsetUri || uri == webrtc::RtpExtension::kAbsSendTimeUri || - // TODO(bugs.webrtc.org/10739): Uncomment once the video impl is ready. - // uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || + uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || uri == webrtc::RtpExtension::kVideoRotationUri || uri == webrtc::RtpExtension::kTransportSequenceNumberUri || uri == webrtc::RtpExtension::kTransportSequenceNumberV2Uri || @@ -177,15 +159,15 @@ bool RtpExtension::IsSupportedForVideo(const std::string& uri) { uri == webrtc::RtpExtension::kVideoContentTypeUri || uri == webrtc::RtpExtension::kVideoTimingUri || uri == webrtc::RtpExtension::kMidUri || - uri == webrtc::RtpExtension::kFrameMarkingUri || uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 || - uri == webrtc::RtpExtension::kGenericFrameDescriptorUri01 || + uri == webrtc::RtpExtension::kDependencyDescriptorUri || uri == webrtc::RtpExtension::kColorSpaceUri || uri == webrtc::RtpExtension::kRidUri || - uri == webrtc::RtpExtension::kRepairedRidUri; + uri == webrtc::RtpExtension::kRepairedRidUri || + uri == webrtc::RtpExtension::kVideoLayersAllocationUri; } -bool RtpExtension::IsEncryptionSupported(const std::string& uri) { +bool RtpExtension::IsEncryptionSupported(absl::string_view uri) { return uri == webrtc::RtpExtension::kAudioLevelUri || uri == webrtc::RtpExtension::kTimestampOffsetUri || #if !defined(ENABLE_EXTERNAL_AUTH) @@ -204,12 +186,13 @@ bool RtpExtension::IsEncryptionSupported(const std::string& uri) { uri == webrtc::RtpExtension::kVideoContentTypeUri || uri == webrtc::RtpExtension::kMidUri || uri == webrtc::RtpExtension::kRidUri || - uri == webrtc::RtpExtension::kRepairedRidUri; + uri == webrtc::RtpExtension::kRepairedRidUri || + uri == webrtc::RtpExtension::kVideoLayersAllocationUri; } const RtpExtension* RtpExtension::FindHeaderExtensionByUri( const std::vector& extensions, - const std::string& uri) { + absl::string_view uri) { for (const auto& extension : extensions) { if (extension.uri == uri) { return &extension; diff --git a/api/rtp_parameters.h b/api/rtp_parameters.h index 2ce0b48f8d..369d277a5d 100644 --- a/api/rtp_parameters.h +++ b/api/rtp_parameters.h @@ -13,12 +13,15 @@ #include +#include #include -#include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/media_types.h" +#include "api/priority.h" +#include "api/rtp_transceiver_direction.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -89,6 +92,9 @@ enum class DegradationPreference { BALANCED, }; +RTC_EXPORT const char* DegradationPreferenceToString( + DegradationPreference degradation_preference); + RTC_EXPORT extern const double kDefaultBitratePriority; struct RTC_EXPORT RtcpFeedback { @@ -157,12 +163,12 @@ struct RTC_EXPORT RtpCodecCapability { // Contrary to ORTC, these parameters are named using all lowercase strings. // This helps make the mapping to SDP simpler, if an application is using SDP. // Boolean values are represented by the string "1". - std::unordered_map parameters; + std::map parameters; // Codec-specific parameters that may optionally be signaled to the remote // party. // TODO(deadbeef): Not implemented. - std::unordered_map options; + std::map options; // Maximum number of temporal layer extensions supported by this codec. // For example, a value of 1 indicates that 2 total layers are supported. @@ -193,7 +199,8 @@ struct RTC_EXPORT RtpCodecCapability { bool operator!=(const RtpCodecCapability& o) const { return !(*this == o); } }; -// Used in RtpCapabilities; represents the capabilities/preferences of an +// Used in RtpCapabilities and RtpTransceiverInterface's header extensions query +// and setup methods; represents the capabilities/preferences of an // implementation for a header extension. // // Just called "RtpHeaderExtension" in ORTC, but the "Capability" suffix was @@ -203,7 +210,7 @@ struct RTC_EXPORT RtpCodecCapability { // Note that ORTC includes a "kind" field, but we omit this because it's // redundant; if you call "RtpReceiver::GetCapabilities(MEDIA_TYPE_AUDIO)", // you know you're getting audio capabilities. -struct RtpHeaderExtensionCapability { +struct RTC_EXPORT RtpHeaderExtensionCapability { // URI of this extension, as defined in RFC8285. std::string uri; @@ -214,15 +221,23 @@ struct RtpHeaderExtensionCapability { // TODO(deadbeef): Not implemented. bool preferred_encrypt = false; + // The direction of the extension. The kStopped value is only used with + // RtpTransceiverInterface::HeaderExtensionsToOffer() and + // SetOfferedRtpHeaderExtensions(). + RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv; + // Constructors for convenience. RtpHeaderExtensionCapability(); - explicit RtpHeaderExtensionCapability(const std::string& uri); - RtpHeaderExtensionCapability(const std::string& uri, int preferred_id); + explicit RtpHeaderExtensionCapability(absl::string_view uri); + RtpHeaderExtensionCapability(absl::string_view uri, int preferred_id); + RtpHeaderExtensionCapability(absl::string_view uri, + int preferred_id, + RtpTransceiverDirection direction); ~RtpHeaderExtensionCapability(); bool operator==(const RtpHeaderExtensionCapability& o) const { return uri == o.uri && preferred_id == o.preferred_id && - preferred_encrypt == o.preferred_encrypt; + preferred_encrypt == o.preferred_encrypt && direction == o.direction; } bool operator!=(const RtpHeaderExtensionCapability& o) const { return !(*this == o); @@ -232,23 +247,24 @@ struct RtpHeaderExtensionCapability { // RTP header extension, see RFC8285. struct RTC_EXPORT RtpExtension { RtpExtension(); - RtpExtension(const std::string& uri, int id); - RtpExtension(const std::string& uri, int id, bool encrypt); + RtpExtension(absl::string_view uri, int id); + RtpExtension(absl::string_view uri, int id, bool encrypt); ~RtpExtension(); + std::string ToString() const; bool operator==(const RtpExtension& rhs) const { return uri == rhs.uri && id == rhs.id && encrypt == rhs.encrypt; } - static bool IsSupportedForAudio(const std::string& uri); - static bool IsSupportedForVideo(const std::string& uri); + static bool IsSupportedForAudio(absl::string_view uri); + static bool IsSupportedForVideo(absl::string_view uri); // Return "true" if the given RTP header extension URI may be encrypted. - static bool IsEncryptionSupported(const std::string& uri); + static bool IsEncryptionSupported(absl::string_view uri); // Returns the named header extension if found among all extensions, // nullptr otherwise. static const RtpExtension* FindHeaderExtensionByUri( const std::vector& extensions, - const std::string& uri); + absl::string_view uri); // Return a list of RTP header extensions with the non-encrypted extensions // removed if both the encrypted and non-encrypted extension is present for @@ -256,65 +272,86 @@ struct RTC_EXPORT RtpExtension { static std::vector FilterDuplicateNonEncrypted( const std::vector& extensions); + // Encryption of Header Extensions, see RFC 6904 for details: + // https://tools.ietf.org/html/rfc6904 + static constexpr char kEncryptHeaderExtensionsUri[] = + "urn:ietf:params:rtp-hdrext:encrypt"; + // Header extension for audio levels, as defined in: - // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-03 - static const char kAudioLevelUri[]; + // https://tools.ietf.org/html/rfc6464 + static constexpr char kAudioLevelUri[] = + "urn:ietf:params:rtp-hdrext:ssrc-audio-level"; // Header extension for RTP timestamp offset, see RFC 5450 for details: // http://tools.ietf.org/html/rfc5450 - static const char kTimestampOffsetUri[]; + static constexpr char kTimestampOffsetUri[] = + "urn:ietf:params:rtp-hdrext:toffset"; // Header extension for absolute send time, see url for details: // http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time - static const char kAbsSendTimeUri[]; + static constexpr char kAbsSendTimeUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"; // Header extension for absolute capture time, see url for details: // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time - static const char kAbsoluteCaptureTimeUri[]; + static constexpr char kAbsoluteCaptureTimeUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time"; // Header extension for coordination of video orientation, see url for // details: // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf - static const char kVideoRotationUri[]; + static constexpr char kVideoRotationUri[] = "urn:3gpp:video-orientation"; // Header extension for video content type. E.g. default or screenshare. - static const char kVideoContentTypeUri[]; + static constexpr char kVideoContentTypeUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"; // Header extension for video timing. - static const char kVideoTimingUri[]; - - // Header extension for video frame marking. - static const char kFrameMarkingUri[]; + static constexpr char kVideoTimingUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"; // Experimental codec agnostic frame descriptor. - static const char kGenericFrameDescriptorUri00[]; - static const char kGenericFrameDescriptorUri01[]; - // TODO(bugs.webrtc.org/10243): Remove once dependencies have been updated. - static const char kGenericFrameDescriptorUri[]; + static constexpr char kGenericFrameDescriptorUri00[] = + "http://www.webrtc.org/experiments/rtp-hdrext/" + "generic-frame-descriptor-00"; + static constexpr char kDependencyDescriptorUri[] = + "https://aomediacodec.github.io/av1-rtp-spec/" + "#dependency-descriptor-rtp-header-extension"; + + // Experimental extension for signalling target bitrate per layer. + static constexpr char kVideoLayersAllocationUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00"; // Header extension for transport sequence number, see url for details: // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions - static const char kTransportSequenceNumberUri[]; - static const char kTransportSequenceNumberV2Uri[]; + static constexpr char kTransportSequenceNumberUri[] = + "http://www.ietf.org/id/" + "draft-holmer-rmcat-transport-wide-cc-extensions-01"; + static constexpr char kTransportSequenceNumberV2Uri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02"; + + // This extension allows applications to adaptively limit the playout delay + // on frames as per the current needs. For example, a gaming application + // has very different needs on end-to-end delay compared to a video-conference + // application. + static constexpr char kPlayoutDelayUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/playout-delay"; - static const char kPlayoutDelayUri[]; + // Header extension for color space information. + static constexpr char kColorSpaceUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/color-space"; // Header extension for identifying media section within a transport. // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-49#section-15 - static const char kMidUri[]; - - // Encryption of Header Extensions, see RFC 6904 for details: - // https://tools.ietf.org/html/rfc6904 - static const char kEncryptHeaderExtensionsUri[]; - - // Header extension for color space information. - static const char kColorSpaceUri[]; + static constexpr char kMidUri[] = "urn:ietf:params:rtp-hdrext:sdes:mid"; // Header extension for RIDs and Repaired RIDs // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15 - static const char kRidUri[]; - static const char kRepairedRidUri[]; + static constexpr char kRidUri[] = + "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id"; + static constexpr char kRepairedRidUri[] = + "urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id"; // Inclusive min and max IDs for two-byte header extensions and one-byte // header extensions, per RFC8285 Section 4.2-4.3. @@ -380,6 +417,11 @@ struct RTC_EXPORT RtpEncodingParameters { // The relative bitrate priority of this encoding. Currently this is // implemented for the entire rtp sender by using the value of the first // encoding parameter. + // See: https://w3c.github.io/webrtc-priority/#enumdef-rtcprioritytype + // "very-low" = 0.5 + // "low" = 1.0 + // "medium" = 2.0 + // "high" = 4.0 // TODO(webrtc.bugs.org/8630): Implement this per encoding parameter. // Currently there is logic for how bitrate is distributed per simulcast layer // in the VideoBitrateAllocator. This must be updated to incorporate relative @@ -388,11 +430,11 @@ struct RTC_EXPORT RtpEncodingParameters { // The relative DiffServ Code Point priority for this encoding, allowing // packets to be marked relatively higher or lower without affecting - // bandwidth allocations. See https://w3c.github.io/webrtc-dscp-exp/ . NB - // we follow chromium's translation of the allowed string enum values for - // this field to 1.0, 0.5, et cetera, similar to bitrate_priority above. + // bandwidth allocations. See https://w3c.github.io/webrtc-dscp-exp/ . // TODO(http://crbug.com/webrtc/8630): Implement this per encoding parameter. - double network_priority = kDefaultBitratePriority; + // TODO(http://crbug.com/webrtc/11379): TCP connections should use a single + // DSCP value even if shared by multiple senders; this is not implemented. + Priority network_priority = Priority::kLow; // If set, this represents the Transport Independent Application Specific // maximum bandwidth defined in RFC3890. If unset, there is no maximum @@ -435,6 +477,10 @@ struct RTC_EXPORT RtpEncodingParameters { // Called "encodingId" in ORTC. std::string rid; + // Allow dynamic frame length changes for audio: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime + bool adaptive_ptime = false; + bool operator==(const RtpEncodingParameters& o) const { return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority && network_priority == o.network_priority && @@ -443,7 +489,8 @@ struct RTC_EXPORT RtpEncodingParameters { max_framerate == o.max_framerate && num_temporal_layers == o.num_temporal_layers && scale_resolution_down_by == o.scale_resolution_down_by && - active == o.active && rid == o.rid; + active == o.active && rid == o.rid && + adaptive_ptime == o.adaptive_ptime; } bool operator!=(const RtpEncodingParameters& o) const { return !(*this == o); @@ -500,7 +547,7 @@ struct RTC_EXPORT RtpCodecParameters { // Contrary to ORTC, these parameters are named using all lowercase strings. // This helps make the mapping to SDP simpler, if an application is using SDP. // Boolean values are represented by the string "1". - std::unordered_map parameters; + std::map parameters; bool operator==(const RtpCodecParameters& o) const { return name == o.name && kind == o.kind && payload_type == o.payload_type && @@ -599,8 +646,7 @@ struct RTC_EXPORT RtpParameters { // When bandwidth is constrained and the RtpSender needs to choose between // degrading resolution or degrading framerate, degradationPreference // indicates which is preferred. Only for video tracks. - DegradationPreference degradation_preference = - DegradationPreference::BALANCED; + absl::optional degradation_preference; bool operator==(const RtpParameters& o) const { return mid == o.mid && codecs == o.codecs && diff --git a/api/rtp_receiver_interface.cc b/api/rtp_receiver_interface.cc index d20516b67c..bc9aef5aef 100644 --- a/api/rtp_receiver_interface.cc +++ b/api/rtp_receiver_interface.cc @@ -38,4 +38,7 @@ RtpReceiverInterface::dtls_transport() const { return nullptr; } +void RtpReceiverInterface::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) {} + } // namespace webrtc diff --git a/api/rtp_receiver_interface.h b/api/rtp_receiver_interface.h index 6052763341..786ea3aceb 100644 --- a/api/rtp_receiver_interface.h +++ b/api/rtp_receiver_interface.h @@ -19,6 +19,7 @@ #include "api/crypto/frame_decryptor_interface.h" #include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/proxy.h" @@ -107,6 +108,12 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { // user. This can be used to update the state of the object. virtual rtc::scoped_refptr GetFrameDecryptor() const; + // Sets a frame transformer between the depacketizer and the decoder to enable + // client code to transform received frames according to their own processing + // logic. + virtual void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer); + protected: ~RtpReceiverInterface() override = default; }; @@ -121,8 +128,8 @@ PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector>, streams) -PROXY_CONSTMETHOD0(cricket::MediaType, media_type) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(RtpParameters, GetParameters) PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*) PROXY_METHOD1(void, SetJitterBufferMinimumDelay, absl::optional) @@ -132,6 +139,9 @@ PROXY_METHOD1(void, rtc::scoped_refptr) PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetFrameDecryptor) +PROXY_METHOD1(void, + SetDepacketizerToDecoderFrameTransformer, + rtc::scoped_refptr) END_PROXY_MAP() } // namespace webrtc diff --git a/api/rtp_sender_interface.cc b/api/rtp_sender_interface.cc index d23fd1844c..57a5a10fb5 100644 --- a/api/rtp_sender_interface.cc +++ b/api/rtp_sender_interface.cc @@ -30,4 +30,7 @@ rtc::scoped_refptr RtpSenderInterface::dtls_transport() return nullptr; } +void RtpSenderInterface::SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer) {} + } // namespace webrtc diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h index 5d6271f5d3..a33b80042e 100644 --- a/api/rtp_sender_interface.h +++ b/api/rtp_sender_interface.h @@ -20,6 +20,7 @@ #include "api/crypto/frame_encryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/dtmf_sender_interface.h" +#include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/proxy.h" @@ -93,6 +94,9 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // user. This can be used to update the state of the object. virtual rtc::scoped_refptr GetFrameEncryptor() const; + virtual void SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer); + protected: ~RtpSenderInterface() override = default; }; @@ -106,8 +110,8 @@ PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(uint32_t, ssrc) -PROXY_CONSTMETHOD0(cricket::MediaType, media_type) -PROXY_CONSTMETHOD0(std::string, id) +BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector, init_send_encodings) PROXY_CONSTMETHOD0(RtpParameters, GetParameters) @@ -119,6 +123,9 @@ PROXY_METHOD1(void, PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetFrameEncryptor) PROXY_METHOD1(void, SetStreams, const std::vector&) +PROXY_METHOD1(void, + SetEncoderToPacketizerFrameTransformer, + rtc::scoped_refptr) END_PROXY_MAP() } // namespace webrtc diff --git a/api/rtp_transceiver_direction.h b/api/rtp_transceiver_direction.h new file mode 100644 index 0000000000..3c7d4cb0ad --- /dev/null +++ b/api/rtp_transceiver_direction.h @@ -0,0 +1,27 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_TRANSCEIVER_DIRECTION_H_ +#define API_RTP_TRANSCEIVER_DIRECTION_H_ + +namespace webrtc { + +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection +enum class RtpTransceiverDirection { + kSendRecv, + kSendOnly, + kRecvOnly, + kInactive, + kStopped, +}; + +} // namespace webrtc + +#endif // API_RTP_TRANSCEIVER_DIRECTION_H_ diff --git a/api/rtp_transceiver_interface.cc b/api/rtp_transceiver_interface.cc index dc82fadd39..1dc0fcc79e 100644 --- a/api/rtp_transceiver_interface.cc +++ b/api/rtp_transceiver_interface.cc @@ -25,6 +25,23 @@ RtpTransceiverInterface::fired_direction() const { return absl::nullopt; } +bool RtpTransceiverInterface::stopping() const { + return false; +} + +void RtpTransceiverInterface::Stop() { + StopInternal(); +} + +RTCError RtpTransceiverInterface::StopStandard() { + RTC_NOTREACHED() << "DEBUG: RtpTransceiverInterface::StopStandard called"; + return RTCError::OK(); +} + +void RtpTransceiverInterface::StopInternal() { + RTC_NOTREACHED() << "DEBUG: RtpTransceiverInterface::StopInternal called"; +} + RTCError RtpTransceiverInterface::SetCodecPreferences( rtc::ArrayView) { RTC_NOTREACHED() << "Not implemented"; @@ -36,4 +53,28 @@ std::vector RtpTransceiverInterface::codec_preferences() return {}; } +std::vector +RtpTransceiverInterface::HeaderExtensionsToOffer() const { + return {}; +} + +webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer) { + return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +// TODO(bugs.webrtc.org/11839) Remove default implementations when clients +// are updated. +void RtpTransceiverInterface::SetDirection( + RtpTransceiverDirection new_direction) { + SetDirectionWithError(new_direction); +} + +RTCError RtpTransceiverInterface::SetDirectionWithError( + RtpTransceiverDirection new_direction) { + RTC_NOTREACHED() << "Default implementation called"; + return RTCError::OK(); +} + } // namespace webrtc diff --git a/api/rtp_transceiver_interface.h b/api/rtp_transceiver_interface.h index 2a60f98be6..fd3555fb40 100644 --- a/api/rtp_transceiver_interface.h +++ b/api/rtp_transceiver_interface.h @@ -20,20 +20,13 @@ #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" #include "api/scoped_refptr.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { -// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection -enum class RtpTransceiverDirection { - kSendRecv, - kSendOnly, - kRecvOnly, - kInactive -}; - // Structure for initializing an RtpTransceiver in a call to // PeerConnectionInterface::AddTransceiver. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit @@ -96,6 +89,16 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped virtual bool stopped() const = 0; + // The stopping attribute indicates that the user has indicated that the + // sender of this transceiver will stop sending, and that the receiver will + // no longer receive. It is always true if stopped() is true. + // If stopping() is true and stopped() is false, it means that the + // transceiver's stop() method has been called, but the negotiation with + // the other end for shutting down the transceiver is not yet done. + // https://w3c.github.io/webrtc-pc/#dfn-stopping-0 + // TODO(hta): Remove default implementation. + virtual bool stopping() const; + // The direction attribute indicates the preferred direction of this // transceiver, which will be used in calls to CreateOffer and CreateAnswer. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction @@ -106,7 +109,11 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // CreateOffer and CreateAnswer mark the corresponding media descriptions as // sendrecv, sendonly, recvonly, or inactive. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction - virtual void SetDirection(RtpTransceiverDirection new_direction) = 0; + // TODO(hta): Deprecate SetDirection without error and rename + // SetDirectionWithError to SetDirection, remove default implementations. + RTC_DEPRECATED virtual void SetDirection( + RtpTransceiverDirection new_direction); + virtual RTCError SetDirectionWithError(RtpTransceiverDirection new_direction); // The current_direction attribute indicates the current direction negotiated // for this transceiver. If this transceiver has never been represented in an @@ -121,10 +128,19 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // Exposed in the public interface for use by Chromium. virtual absl::optional fired_direction() const; - // The Stop method irreversibly stops the RtpTransceiver. The sender of this - // transceiver will no longer send, the receiver will no longer receive. + // Initiates a stop of the transceiver. + // The stop is complete when stopped() returns true. + // A stopped transceiver can be reused for a different track. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop - virtual void Stop() = 0; + // TODO(hta): Rename to Stop() when users of the non-standard Stop() are + // updated. + virtual RTCError StopStandard(); + + // Stops a transceiver immediately, without waiting for signalling. + // This is an internal function, and is exposed for historical reasons. + // https://w3c.github.io/webrtc-pc/#dfn-stop-the-rtcrtptransceiver + virtual void StopInternal(); + RTC_DEPRECATED virtual void Stop(); // The SetCodecPreferences method overrides the default codec preferences used // by WebRTC for this transceiver. @@ -133,6 +149,20 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { rtc::ArrayView codecs); virtual std::vector codec_preferences() const; + // Readonly attribute which contains the set of header extensions that was set + // with SetOfferedRtpHeaderExtensions, or a default set if it has not been + // called. + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface + virtual std::vector HeaderExtensionsToOffer() + const; + + // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation + // so that it negotiates use of header extensions which are not kStopped. + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface + virtual webrtc::RTCError SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer); + protected: ~RtpTransceiverInterface() override = default; }; diff --git a/api/set_local_description_observer_interface.h b/api/set_local_description_observer_interface.h new file mode 100644 index 0000000000..90d000cd81 --- /dev/null +++ b/api/set_local_description_observer_interface.h @@ -0,0 +1,30 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ +#define API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ + +#include "api/rtc_error.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// OnSetLocalDescriptionComplete() invokes as soon as +// PeerConnectionInterface::SetLocalDescription() operation completes, allowing +// the observer to examine the effects of the operation without delay. +class SetLocalDescriptionObserverInterface : public rtc::RefCountInterface { + public: + // On success, |error.ok()| is true. + virtual void OnSetLocalDescriptionComplete(RTCError error) = 0; +}; + +} // namespace webrtc + +#endif // API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ diff --git a/api/stats/OWNERS b/api/stats/OWNERS index 633d6b9a76..7e98070d5d 100644 --- a/api/stats/OWNERS +++ b/api/stats/OWNERS @@ -1,7 +1,2 @@ hbos@webrtc.org hta@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h index d45902e0a5..5de5b7fbb0 100644 --- a/api/stats/rtc_stats.h +++ b/api/stats/rtc_stats.h @@ -319,6 +319,14 @@ class RTCStatsMember : public RTCStatsMemberInterface { std::string ValueToString() const override; std::string ValueToJson() const override; + template + inline T ValueOrDefault(U default_value) const { + if (is_defined()) { + return *(*this); + } + return default_value; + } + // Assignment operators. T& operator=(const T& value) { value_ = value; diff --git a/api/stats/rtc_stats_report.h b/api/stats/rtc_stats_report.h index dc15937690..94bd813b07 100644 --- a/api/stats/rtc_stats_report.h +++ b/api/stats/rtc_stats_report.h @@ -84,8 +84,8 @@ class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface { // Removes the stats object from the report, returning ownership of it or null // if there is no object with |id|. std::unique_ptr Take(const std::string& id); - // Takes ownership of all the stats in |victim|, leaving it empty. - void TakeMembersFrom(rtc::scoped_refptr victim); + // Takes ownership of all the stats in |other|, leaving it empty. + void TakeMembersFrom(rtc::scoped_refptr other); // Stats iterators. Stats are ordered lexicographically on |RTCStats::id|. ConstIterator begin() const; diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h index af91a85aed..938ab75853 100644 --- a/api/stats/rtcstats_objects.h +++ b/api/stats/rtcstats_objects.h @@ -118,9 +118,7 @@ class RTC_EXPORT RTCCodecStats final : public RTCStats { RTCStatsMember payload_type; RTCStatsMember mime_type; RTCStatsMember clock_rate; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7061 RTCStatsMember channels; - // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7061 RTCStatsMember sdp_fmtp_line; }; @@ -136,7 +134,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats { RTCStatsMember label; RTCStatsMember protocol; - RTCStatsMember datachannelid; + RTCStatsMember data_channel_identifier; // TODO(hbos): Support enum types? "RTCStatsMember"? RTCStatsMember state; RTCStatsMember messages_sent; @@ -294,9 +292,6 @@ class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { RTCStatsMember detached; // See |RTCMediaStreamTrackKind| for valid values. RTCStatsMember kind; - // TODO(gustaf): Implement jitter_buffer_delay for video (currently - // implemented for audio only). - // https://crbug.com/webrtc/8318 RTCStatsMember jitter_buffer_delay; RTCStatsMember jitter_buffer_emitted_count; // Video-only members @@ -332,6 +327,14 @@ class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { RTCNonStandardStatsMember jitter_buffer_flushes; RTCNonStandardStatsMember delayed_packet_outage_samples; RTCNonStandardStatsMember relative_packet_arrival_delay; + // Non-standard metric showing target delay of jitter buffer. + // This value is increased by the target jitter buffer delay every time a + // sample is emitted by the jitter buffer. The added target is the target + // delay, in seconds, at the time that the sample was emitted from the jitter + // buffer. (https://github.com/w3c/webrtc-provisional-stats/pull/20) + // Currently it is implemented only for audio. + // TODO(titovartem) implement for video streams when will be requested. + RTCNonStandardStatsMember jitter_buffer_target_delay; // TODO(henrik.lundin): Add description of the interruption metrics at // https://github.com/henbos/webrtc-provisional-stats/issues/17 RTCNonStandardStatsMember interruption_count; @@ -416,6 +419,18 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { // TODO(hbos): Collect and populate this value for both "audio" and "video", // currently not collected for "video". https://bugs.webrtc.org/7065 RTCStatsMember jitter; + RTCStatsMember jitter_buffer_delay; + RTCStatsMember jitter_buffer_emitted_count; + RTCStatsMember total_samples_received; + RTCStatsMember concealed_samples; + RTCStatsMember silent_concealed_samples; + RTCStatsMember concealment_events; + RTCStatsMember inserted_samples_for_deceleration; + RTCStatsMember removed_samples_for_acceleration; + RTCStatsMember audio_level; + RTCStatsMember total_audio_energy; + RTCStatsMember total_samples_duration; + RTCStatsMember frames_received; // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 RTCStatsMember round_trip_time; // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 @@ -438,8 +453,14 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember gap_loss_rate; // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 RTCStatsMember gap_discard_rate; + RTCStatsMember frame_width; + RTCStatsMember frame_height; + RTCStatsMember frame_bit_depth; + RTCStatsMember frames_per_second; RTCStatsMember frames_decoded; + RTCStatsMember frames_rendered; RTCStatsMember key_frames_decoded; + RTCStatsMember frames_dropped; RTCStatsMember total_decode_time; RTCStatsMember total_inter_frame_delay; RTCStatsMember total_squared_inter_frame_delay; @@ -466,6 +487,7 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember media_source_id; RTCStatsMember remote_id; + RTCStatsMember rid; RTCStatsMember packets_sent; RTCStatsMember retransmitted_packets_sent; RTCStatsMember bytes_sent; @@ -477,6 +499,11 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember key_frames_encoded; RTCStatsMember total_encode_time; RTCStatsMember total_encoded_bytes_target; + RTCStatsMember frame_width; + RTCStatsMember frame_height; + RTCStatsMember frames_per_second; + RTCStatsMember frames_sent; + RTCStatsMember huge_frames_sent; // TODO(https://crbug.com/webrtc/10635): This is only implemented for video; // implement it for audio as well. RTCStatsMember total_packet_send_delay; @@ -593,7 +620,9 @@ class RTC_EXPORT RTCTransportStats final : public RTCStats { ~RTCTransportStats() override; RTCStatsMember bytes_sent; + RTCStatsMember packets_sent; RTCStatsMember bytes_received; + RTCStatsMember packets_received; RTCStatsMember rtcp_transport_stats_id; // TODO(hbos): Support enum types? "RTCStatsMember"? RTCStatsMember dtls_state; diff --git a/api/stats_types.cc b/api/stats_types.cc index 7dcbd134a1..63d8feb292 100644 --- a/api/stats_types.cc +++ b/api/stats_types.cc @@ -401,6 +401,8 @@ const char* StatsReport::Value::display_name() const { return "datachannelid"; case kStatsValueNameFramesDecoded: return "framesDecoded"; + case kStatsValueNameFramesRendered: + return "framesRendered"; case kStatsValueNameFramesEncoded: return "framesEncoded"; case kStatsValueNameJitterBufferDelay: diff --git a/api/stats_types.h b/api/stats_types.h index c1922a8a22..4d43774380 100644 --- a/api/stats_types.h +++ b/api/stats_types.h @@ -108,6 +108,7 @@ class RTC_EXPORT StatsReport { kStatsValueNameConcealmentEvents, kStatsValueNameDataChannelId, kStatsValueNameFramesDecoded, + kStatsValueNameFramesRendered, kStatsValueNameFramesEncoded, kStatsValueNameJitterBufferDelay, kStatsValueNameMediaType, diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn index 4c9f591ec1..1072057e3f 100644 --- a/api/task_queue/BUILD.gn +++ b/api/task_queue/BUILD.gn @@ -21,6 +21,8 @@ rtc_library("task_queue") { "../../rtc_base:checks", "../../rtc_base:macromagic", "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:config", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings", @@ -51,6 +53,8 @@ rtc_library("task_queue_test") { deps = [ "../../../webrtc_overrides:webrtc_component", "../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", ] @@ -62,6 +66,8 @@ rtc_library("task_queue_test") { "../../rtc_base:timeutils", "../../rtc_base/task_utils:to_queued_task", "../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", ] diff --git a/api/task_queue/task_queue_test.cc b/api/task_queue/task_queue_test.cc index a8a799f11b..0d411d2d9c 100644 --- a/api/task_queue/task_queue_test.cc +++ b/api/task_queue/task_queue_test.cc @@ -37,9 +37,11 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) { rtc::Event event; auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent"); - // We're not running a task, so there shouldn't be a current queue. + // We're not running a task, so |queue| shouldn't be current. + // Note that because rtc::Thread also supports the TQ interface and + // TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that + // means that TaskQueueBase::Current() will still return a valid value. EXPECT_FALSE(queue->IsCurrent()); - EXPECT_FALSE(TaskQueueBase::Current()); queue->PostTask(ToQueuedTask([&event, &queue] { EXPECT_TRUE(queue->IsCurrent()); @@ -269,5 +271,10 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) { EXPECT_TRUE(done.Wait(1000)); } +// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase. +// Tests are instantiated next to the concrete implementation(s). +// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest); + } // namespace } // namespace webrtc diff --git a/api/test/DEPS b/api/test/DEPS index 1a02bf16e9..d97ac49df6 100644 --- a/api/test/DEPS +++ b/api/test/DEPS @@ -11,12 +11,6 @@ specific_include_rules = { "fake_constraints\.h": [ "+rtc_base/string_encode.h", ], - "loopback_media_transport\.h": [ - "+rtc_base/async_invoker.h", - "+rtc_base/critical_section.h", - "+rtc_base/thread.h", - "+rtc_base/thread_checker.h", - ], "neteq_factory_with_codecs\.h": [ "+system_wrappers/include/clock.h", ], @@ -41,4 +35,7 @@ specific_include_rules = { "create_frame_generator\.h": [ "+system_wrappers/include/clock.h", ], + "videocodec_test_fixture\.h": [ + "+media/base/h264_profile_level_id.h" + ], } diff --git a/api/test/audio_quality_analyzer_interface.h b/api/test/audio_quality_analyzer_interface.h index 88392d7fd2..c1044795d1 100644 --- a/api/test/audio_quality_analyzer_interface.h +++ b/api/test/audio_quality_analyzer_interface.h @@ -14,7 +14,7 @@ #include #include "api/test/stats_observer_interface.h" -#include "api/test/track_id_stream_label_map.h" +#include "api/test/track_id_stream_info_map.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -31,7 +31,7 @@ class AudioQualityAnalyzerInterface : public StatsObserverInterface { // stream_id matching. The caller is responsible for ensuring the // AnalyzerHelper outlives the instance of the AudioQualityAnalyzerInterface. virtual void Start(std::string test_case_name, - TrackIdStreamLabelMap* analyzer_helper) = 0; + TrackIdStreamInfoMap* analyzer_helper) = 0; // Will be called by the framework at the end of the test. The analyzer // has to finalize all its stats and it should report them. diff --git a/api/test/audioproc_float.cc b/api/test/audioproc_float.cc index bba9c622a1..c8d7ff7193 100644 --- a/api/test/audioproc_float.cc +++ b/api/test/audioproc_float.cc @@ -17,6 +17,12 @@ namespace webrtc { namespace test { +int AudioprocFloat(rtc::scoped_refptr audio_processing, + int argc, + char* argv[]) { + return AudioprocFloatImpl(std::move(audio_processing), argc, argv); +} + int AudioprocFloat(std::unique_ptr ap_builder, int argc, char* argv[]) { diff --git a/api/test/audioproc_float.h b/api/test/audioproc_float.h index 2625e6ad9a..fec2ad11fa 100644 --- a/api/test/audioproc_float.h +++ b/api/test/audioproc_float.h @@ -19,6 +19,22 @@ namespace webrtc { namespace test { +// This is an interface for the audio processing simulation utility. This +// utility can be used to simulate the audioprocessing module using a recording +// (either an AEC dump or wav files), and generate the output as a wav file. +// Any audio_processing object specified in the input is used for the +// simulation. The optional |audio_processing| object provides the +// AudioProcessing instance that is used during the simulation. Note that when +// the audio_processing object is specified all functionality that relies on +// using the AudioProcessingBuilder is deactivated, since the AudioProcessing +// object is already created and the builder is not used in the simulation. It +// is needed to pass the command line flags as |argc| and |argv|, so these can +// be interpreted properly by the utility. To see a list of all supported +// command line flags, run the executable with the '--help' flag. +int AudioprocFloat(rtc::scoped_refptr audio_processing, + int argc, + char* argv[]); + // This is an interface for the audio processing simulation utility. This // utility can be used to simulate the audioprocessing module using a recording // (either an AEC dump or wav files), and generate the output as a wav file. diff --git a/api/test/compile_all_headers.cc b/api/test/compile_all_headers.cc index 47c5c6ec84..6f06742995 100644 --- a/api/test/compile_all_headers.cc +++ b/api/test/compile_all_headers.cc @@ -27,16 +27,20 @@ // "api/test/videocodec_test_fixture.h" // "api/test/videocodec_test_stats.h" +#include "api/test/dummy_peer_connection.h" #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" -#include "api/test/fake_media_transport.h" -#include "api/test/loopback_media_transport.h" #include "api/test/mock_audio_mixer.h" +#include "api/test/mock_data_channel.h" #include "api/test/mock_frame_decryptor.h" #include "api/test/mock_frame_encryptor.h" +#include "api/test/mock_media_stream_interface.h" +#include "api/test/mock_peer_connection_factory_interface.h" #include "api/test/mock_peerconnectioninterface.h" +#include "api/test/mock_rtp_transceiver.h" #include "api/test/mock_rtpreceiver.h" #include "api/test/mock_rtpsender.h" +#include "api/test/mock_transformable_video_frame.h" #include "api/test/mock_video_bitrate_allocator.h" #include "api/test/mock_video_bitrate_allocator_factory.h" #include "api/test/mock_video_decoder.h" diff --git a/api/test/create_network_emulation_manager.h b/api/test/create_network_emulation_manager.h index c57c34874c..f444743786 100644 --- a/api/test/create_network_emulation_manager.h +++ b/api/test/create_network_emulation_manager.h @@ -1,4 +1,3 @@ - /* * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. * @@ -18,6 +17,7 @@ namespace webrtc { +// Returns a non-null NetworkEmulationManager instance. std::unique_ptr CreateNetworkEmulationManager( TimeMode mode = TimeMode::kRealTime); diff --git a/api/test/create_peer_connection_quality_test_frame_generator.cc b/api/test/create_peer_connection_quality_test_frame_generator.cc new file mode 100644 index 0000000000..7f0ba20c85 --- /dev/null +++ b/api/test/create_peer_connection_quality_test_frame_generator.cc @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_peer_connection_quality_test_frame_generator.h" + +#include +#include + +#include "api/test/create_frame_generator.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "rtc_base/checks.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +using VideoConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; +using ScreenShareConfig = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::ScreenShareConfig; + +void ValidateScreenShareConfig(const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config) { + if (screen_share_config.slides_yuv_file_names.empty()) { + if (screen_share_config.scrolling_params) { + // If we have scrolling params, then its |source_width| and |source_heigh| + // will be used as width and height of video input, so we have to validate + // it against width and height of default input. + RTC_CHECK_EQ(screen_share_config.scrolling_params->source_width, + kDefaultSlidesWidth); + RTC_CHECK_EQ(screen_share_config.scrolling_params->source_height, + kDefaultSlidesHeight); + } else { + RTC_CHECK_EQ(video_config.width, kDefaultSlidesWidth); + RTC_CHECK_EQ(video_config.height, kDefaultSlidesHeight); + } + } + if (screen_share_config.scrolling_params) { + RTC_CHECK_LE(screen_share_config.scrolling_params->duration, + screen_share_config.slide_change_interval); + RTC_CHECK_GE(screen_share_config.scrolling_params->source_width, + video_config.width); + RTC_CHECK_GE(screen_share_config.scrolling_params->source_height, + video_config.height); + } +} + +std::unique_ptr CreateSquareFrameGenerator( + const VideoConfig& video_config, + absl::optional type) { + return test::CreateSquareFrameGenerator( + video_config.width, video_config.height, std::move(type), absl::nullopt); +} + +std::unique_ptr CreateFromYuvFileFrameGenerator( + const VideoConfig& video_config, + std::string filename) { + return test::CreateFromYuvFileFrameGenerator( + {std::move(filename)}, video_config.width, video_config.height, + /*frame_repeat_count=*/1); +} + +std::unique_ptr CreateScreenShareFrameGenerator( + const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config) { + ValidateScreenShareConfig(video_config, screen_share_config); + if (screen_share_config.generate_slides) { + return test::CreateSlideFrameGenerator( + video_config.width, video_config.height, + screen_share_config.slide_change_interval.seconds() * video_config.fps); + } + std::vector slides = screen_share_config.slides_yuv_file_names; + if (slides.empty()) { + // If slides is empty we need to add default slides as source. In such case + // video width and height is validated to be equal to kDefaultSlidesWidth + // and kDefaultSlidesHeight. + slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); + } + if (!screen_share_config.scrolling_params) { + // Cycle image every slide_change_interval seconds. + return test::CreateFromYuvFileFrameGenerator( + slides, video_config.width, video_config.height, + screen_share_config.slide_change_interval.seconds() * video_config.fps); + } + + TimeDelta pause_duration = screen_share_config.slide_change_interval - + screen_share_config.scrolling_params->duration; + RTC_DCHECK(pause_duration >= TimeDelta::Zero()); + return test::CreateScrollingInputFromYuvFilesFrameGenerator( + Clock::GetRealTimeClock(), slides, + screen_share_config.scrolling_params->source_width, + screen_share_config.scrolling_params->source_height, video_config.width, + video_config.height, screen_share_config.scrolling_params->duration.ms(), + pause_duration.ms()); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/api/test/create_peer_connection_quality_test_frame_generator.h b/api/test/create_peer_connection_quality_test_frame_generator.h new file mode 100644 index 0000000000..ff87331204 --- /dev/null +++ b/api/test/create_peer_connection_quality_test_frame_generator.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ +#define API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/peerconnection_quality_test_fixture.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Creates a frame generator that produces frames with small squares that move +// randomly towards the lower right corner. |type| has the default value +// FrameGeneratorInterface::OutputType::I420. video_config specifies frame +// weight and height. +std::unique_ptr CreateSquareFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + absl::optional type); + +// Creates a frame generator that plays frames from the yuv file. +std::unique_ptr CreateFromYuvFileFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + std::string filename); + +// Creates a proper frame generator for testing screen sharing. +std::unique_ptr CreateScreenShareFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + const PeerConnectionE2EQualityTestFixture::ScreenShareConfig& + screen_share_config); + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ diff --git a/api/test/create_peerconnection_quality_test_fixture.cc b/api/test/create_peerconnection_quality_test_fixture.cc index 1e027bf31a..2d9d0821fc 100644 --- a/api/test/create_peerconnection_quality_test_fixture.cc +++ b/api/test/create_peerconnection_quality_test_fixture.cc @@ -13,6 +13,7 @@ #include #include +#include "api/test/time_controller.h" #include "test/pc/e2e/peer_connection_quality_test.h" namespace webrtc { @@ -21,11 +22,12 @@ namespace webrtc_pc_e2e { std::unique_ptr CreatePeerConnectionE2EQualityTestFixture( std::string test_case_name, + TimeController& time_controller, std::unique_ptr audio_quality_analyzer, std::unique_ptr video_quality_analyzer) { return std::make_unique( - std::move(test_case_name), std::move(audio_quality_analyzer), - std::move(video_quality_analyzer)); + std::move(test_case_name), time_controller, + std::move(audio_quality_analyzer), std::move(video_quality_analyzer)); } } // namespace webrtc_pc_e2e diff --git a/api/test/create_peerconnection_quality_test_fixture.h b/api/test/create_peerconnection_quality_test_fixture.h index 330d86de02..95b9ced5d2 100644 --- a/api/test/create_peerconnection_quality_test_fixture.h +++ b/api/test/create_peerconnection_quality_test_fixture.h @@ -15,19 +15,25 @@ #include "api/test/audio_quality_analyzer_interface.h" #include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/time_controller.h" #include "api/test/video_quality_analyzer_interface.h" namespace webrtc { namespace webrtc_pc_e2e { // API is in development. Can be changed/removed without notice. + // Create test fixture to establish test call between Alice and Bob. // During the test Alice will be caller and Bob will answer the call. // |test_case_name| is a name of test case, that will be used for all metrics // reporting. +// |time_controller| is used to manage all rtc::Thread's and TaskQueue +// instances. Instance of |time_controller| have to outlive created fixture. +// Returns a non-null PeerConnectionE2EQualityTestFixture instance. std::unique_ptr CreatePeerConnectionE2EQualityTestFixture( std::string test_case_name, + TimeController& time_controller, std::unique_ptr audio_quality_analyzer, std::unique_ptr video_quality_analyzer); diff --git a/api/test/create_time_controller.cc b/api/test/create_time_controller.cc index 09682bd279..a2c0cb713f 100644 --- a/api/test/create_time_controller.cc +++ b/api/test/create_time_controller.cc @@ -14,6 +14,7 @@ #include "call/call.h" #include "test/time_controller/external_time_controller.h" +#include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -22,6 +23,11 @@ std::unique_ptr CreateTimeController( return std::make_unique(alarm); } +std::unique_ptr CreateSimulatedTimeController() { + return std::make_unique( + Timestamp::Seconds(10000)); +} + std::unique_ptr CreateTimeControllerBasedCallFactory( TimeController* time_controller) { class TimeControllerBasedCallFactory : public CallFactoryInterface { @@ -29,13 +35,18 @@ std::unique_ptr CreateTimeControllerBasedCallFactory( explicit TimeControllerBasedCallFactory(TimeController* time_controller) : time_controller_(time_controller) {} Call* CreateCall(const Call::Config& config) override { - return Call::Create(config, time_controller_->GetClock(), - time_controller_->CreateProcessThread("CallModules"), + if (!module_thread_) { + module_thread_ = SharedModuleThread::Create( + time_controller_->CreateProcessThread("CallModules"), + [this]() { module_thread_ = nullptr; }); + } + return Call::Create(config, time_controller_->GetClock(), module_thread_, time_controller_->CreateProcessThread("Pacer")); } private: TimeController* time_controller_; + rtc::scoped_refptr module_thread_; }; return std::make_unique(time_controller); } diff --git a/api/test/create_time_controller.h b/api/test/create_time_controller.h index f4bd3f2c7b..1b6896f2b4 100644 --- a/api/test/create_time_controller.h +++ b/api/test/create_time_controller.h @@ -17,9 +17,13 @@ namespace webrtc { +// Creates a time coltroller that wraps |alarm|. std::unique_ptr CreateTimeController( ControlledAlarmClock* alarm); +// Creates a time controller that runs in simulated time. +std::unique_ptr CreateSimulatedTimeController(); + // This is creates a call factory that creates Call instances that are backed by // a time controller. std::unique_ptr CreateTimeControllerBasedCallFactory( diff --git a/api/test/create_time_controller_unittest.cc b/api/test/create_time_controller_unittest.cc index e39a453da0..0ea868c5cc 100644 --- a/api/test/create_time_controller_unittest.cc +++ b/api/test/create_time_controller_unittest.cc @@ -68,7 +68,7 @@ void FakeAlarm::Sleep(TimeDelta duration) { } TEST(CreateTimeControllerTest, CreatesNonNullController) { - FakeAlarm alarm(Timestamp::ms(100)); + FakeAlarm alarm(Timestamp::Millis(100)); EXPECT_NE(CreateTimeController(&alarm), nullptr); } diff --git a/api/test/dummy_peer_connection.h b/api/test/dummy_peer_connection.h index fcd91e3e2f..4d17aeddd0 100644 --- a/api/test/dummy_peer_connection.h +++ b/api/test/dummy_peer_connection.h @@ -36,7 +36,7 @@ class DummyPeerConnection : public PeerConnectionInterface { bool AddStream(MediaStreamInterface* stream) override { return false; } void RemoveStream(MediaStreamInterface* stream) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } RTCErrorOr> AddTrack( @@ -100,17 +100,17 @@ class DummyPeerConnection : public PeerConnectionInterface { } void GetStats(RTCStatsCollectorCallback* callback) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void GetStats( rtc::scoped_refptr selector, rtc::scoped_refptr callback) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void GetStats( rtc::scoped_refptr selector, rtc::scoped_refptr callback) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void ClearStatsCache() override {} @@ -145,33 +145,33 @@ class DummyPeerConnection : public PeerConnectionInterface { return nullptr; } - void RestartIce() override { FATAL() << "Not implemented"; } + void RestartIce() override { RTC_CHECK_NOTREACHED(); } // Create a new offer. // The CreateSessionDescriptionObserver callback will be called when done. void CreateOffer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void CreateAnswer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void SetLocalDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void SetRemoteDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } void SetRemoteDescription( std::unique_ptr desc, rtc::scoped_refptr observer) override { - FATAL() << "Not implemented"; + RTC_CHECK_NOTREACHED(); } PeerConnectionInterface::RTCConfiguration GetConfiguration() override { @@ -194,14 +194,8 @@ class DummyPeerConnection : public PeerConnectionInterface { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCError SetBitrate(const BitrateParameters& bitrate_parameters) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - - void SetAudioPlayout(bool playout) override { FATAL() << "Not implemented"; } - void SetAudioRecording(bool recording) override { - FATAL() << "Not implemented"; - } + void SetAudioPlayout(bool playout) override { RTC_CHECK_NOTREACHED(); } + void SetAudioRecording(bool recording) override { RTC_CHECK_NOTREACHED(); } rtc::scoped_refptr LookupDtlsTransportByMid( const std::string& mid) override { @@ -229,6 +223,8 @@ class DummyPeerConnection : public PeerConnectionInterface { return IceGatheringState(); } + absl::optional can_trickle_ice_candidates() { return absl::nullopt; } + bool StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) override { return false; @@ -237,9 +233,13 @@ class DummyPeerConnection : public PeerConnectionInterface { return false; } - void StopRtcEventLog() { FATAL() << "Not implemented"; } + void StopRtcEventLog() { RTC_CHECK_NOTREACHED(); } + + void Close() override {} - void Close() {} + rtc::Thread* signaling_thread() const override { + return rtc::Thread::Current(); + } }; static_assert( diff --git a/api/test/fake_datagram_transport.h b/api/test/fake_datagram_transport.h deleted file mode 100644 index 847b4d842a..0000000000 --- a/api/test/fake_datagram_transport.h +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_FAKE_DATAGRAM_TRANSPORT_H_ -#define API_TEST_FAKE_DATAGRAM_TRANSPORT_H_ - -#include -#include - -#include "api/transport/datagram_transport_interface.h" -#include "api/transport/media/media_transport_interface.h" - -namespace webrtc { - -// Maxmum size of datagrams sent by |FakeDatagramTransport|. -constexpr size_t kMaxFakeDatagramSize = 1000; - -// Fake datagram transport. Does not support making an actual connection -// or sending data. Only used for tests that need to stub out a transport. -class FakeDatagramTransport : public DatagramTransportInterface { - public: - FakeDatagramTransport( - const MediaTransportSettings& settings, - std::string transport_parameters, - const std::function& - are_parameters_compatible) - : settings_(settings), - transport_parameters_(transport_parameters), - are_parameters_compatible_(are_parameters_compatible) {} - - ~FakeDatagramTransport() override { RTC_DCHECK(!state_callback_); } - - void Connect(rtc::PacketTransportInternal* packet_transport) override { - packet_transport_ = packet_transport; - } - - CongestionControlInterface* congestion_control() override { - return nullptr; // Datagram interface doesn't provide this yet. - } - - void SetTransportStateCallback( - MediaTransportStateCallback* callback) override { - state_callback_ = callback; - } - - RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) override { - return RTCError::OK(); - } - - size_t GetLargestDatagramSize() const override { - return kMaxFakeDatagramSize; - } - - void SetDatagramSink(DatagramSinkInterface* sink) override {} - - std::string GetTransportParameters() const override { - if (settings_.remote_transport_parameters) { - return *settings_.remote_transport_parameters; - } - return transport_parameters_; - } - - RTCError SetRemoteTransportParameters( - absl::string_view remote_parameters) override { - if (are_parameters_compatible_(GetTransportParameters(), - remote_parameters)) { - return RTCError::OK(); - } - return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, - "Incompatible remote transport parameters"); - } - - RTCError OpenChannel(int channel_id) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCError CloseChannel(int channel_id) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - void SetDataSink(DataChannelSink* /*sink*/) override {} - - bool IsReadyToSend() const override { return false; } - - rtc::PacketTransportInternal* packet_transport() { return packet_transport_; } - - void set_state(webrtc::MediaTransportState state) { - if (state_callback_) { - state_callback_->OnStateChanged(state); - } - } - - const MediaTransportSettings& settings() { return settings_; } - - private: - const MediaTransportSettings settings_; - const std::string transport_parameters_; - const std::function - are_parameters_compatible_; - - rtc::PacketTransportInternal* packet_transport_ = nullptr; - MediaTransportStateCallback* state_callback_ = nullptr; -}; - -} // namespace webrtc - -#endif // API_TEST_FAKE_DATAGRAM_TRANSPORT_H_ diff --git a/api/test/fake_media_transport.h b/api/test/fake_media_transport.h deleted file mode 100644 index 530394710a..0000000000 --- a/api/test/fake_media_transport.h +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_FAKE_MEDIA_TRANSPORT_H_ -#define API_TEST_FAKE_MEDIA_TRANSPORT_H_ - -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "api/test/fake_datagram_transport.h" -#include "api/transport/media/media_transport_interface.h" - -namespace webrtc { - -// Fake media transport factory creates fake media transport. -// Also creates fake datagram transport, since both media and datagram -// transports are created by |MediaTransportFactory|. -class FakeMediaTransportFactory : public MediaTransportFactory { - public: - explicit FakeMediaTransportFactory( - const absl::optional& transport_offer = "") - : transport_offer_(transport_offer) {} - ~FakeMediaTransportFactory() = default; - - std::string GetTransportName() const override { return "fake"; } - - RTCErrorOr> CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCErrorOr> CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCErrorOr> - CreateDatagramTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings) override { - return std::unique_ptr( - new FakeDatagramTransport(settings, transport_offer_.value_or(""), - transport_parameters_comparison_)); - } - - void set_transport_parameters_comparison( - std::function comparison) { - transport_parameters_comparison_ = std::move(comparison); - } - - private: - const absl::optional transport_offer_; - std::function - transport_parameters_comparison_ = - [](absl::string_view local, absl::string_view remote) { - return local == remote; - }; -}; - -} // namespace webrtc - -#endif // API_TEST_FAKE_MEDIA_TRANSPORT_H_ diff --git a/api/test/frame_generator_interface.cc b/api/test/frame_generator_interface.cc new file mode 100644 index 0000000000..356fe3af53 --- /dev/null +++ b/api/test/frame_generator_interface.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/frame_generator_interface.h" + +namespace webrtc { +namespace test { + +// static +const char* FrameGeneratorInterface::OutputTypeToString( + FrameGeneratorInterface::OutputType type) { + switch (type) { + case OutputType::kI420: + return "I420"; + case OutputType::kI420A: + return "I420A"; + case OutputType::kI010: + return "I010"; + case OutputType::kNV12: + return "NV12"; + default: + RTC_NOTREACHED(); + } +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/frame_generator_interface.h b/api/test/frame_generator_interface.h index 691b6ee3f7..90e60debac 100644 --- a/api/test/frame_generator_interface.h +++ b/api/test/frame_generator_interface.h @@ -32,7 +32,8 @@ class FrameGeneratorInterface { absl::optional update_rect; }; - enum class OutputType { kI420, kI420A, kI010 }; + enum class OutputType { kI420, kI420A, kI010, kNV12 }; + static const char* OutputTypeToString(OutputType type); virtual ~FrameGeneratorInterface() = default; diff --git a/api/test/loopback_media_transport.cc b/api/test/loopback_media_transport.cc deleted file mode 100644 index 847ca4864a..0000000000 --- a/api/test/loopback_media_transport.cc +++ /dev/null @@ -1,373 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/loopback_media_transport.h" - -#include - -#include "absl/algorithm/container.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -namespace { - -constexpr size_t kLoopbackMaxDatagramSize = 1200; - -class WrapperDatagramTransport : public DatagramTransportInterface { - public: - explicit WrapperDatagramTransport(DatagramTransportInterface* wrapped) - : wrapped_(wrapped) {} - - // Datagram transport overrides. - void Connect(rtc::PacketTransportInternal* packet_transport) override { - return wrapped_->Connect(packet_transport); - } - - CongestionControlInterface* congestion_control() override { - return wrapped_->congestion_control(); - } - - void SetTransportStateCallback( - MediaTransportStateCallback* callback) override { - return wrapped_->SetTransportStateCallback(callback); - } - - RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) override { - return wrapped_->SendDatagram(data, datagram_id); - } - - size_t GetLargestDatagramSize() const override { - return wrapped_->GetLargestDatagramSize(); - } - - void SetDatagramSink(DatagramSinkInterface* sink) override { - return wrapped_->SetDatagramSink(sink); - } - - std::string GetTransportParameters() const override { - return wrapped_->GetTransportParameters(); - } - - RTCError SetRemoteTransportParameters(absl::string_view parameters) override { - return wrapped_->SetRemoteTransportParameters(parameters); - } - - // Data channel overrides. - RTCError OpenChannel(int channel_id) override { - return wrapped_->OpenChannel(channel_id); - } - - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override { - return wrapped_->SendData(channel_id, params, buffer); - } - - RTCError CloseChannel(int channel_id) override { - return wrapped_->CloseChannel(channel_id); - } - - void SetDataSink(DataChannelSink* sink) override { - wrapped_->SetDataSink(sink); - } - - bool IsReadyToSend() const override { return wrapped_->IsReadyToSend(); } - - private: - DatagramTransportInterface* wrapped_; -}; - -} // namespace - -WrapperMediaTransportFactory::WrapperMediaTransportFactory( - DatagramTransportInterface* wrapped_datagram_transport) - : wrapped_datagram_transport_(wrapped_datagram_transport) {} - -WrapperMediaTransportFactory::WrapperMediaTransportFactory( - MediaTransportFactory* wrapped) - : wrapped_factory_(wrapped) {} - -RTCErrorOr> -WrapperMediaTransportFactory::CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); -} - -RTCErrorOr> -WrapperMediaTransportFactory::CreateDatagramTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - created_transport_count_++; - if (wrapped_factory_) { - return wrapped_factory_->CreateDatagramTransport(network_thread, settings); - } - return { - std::make_unique(wrapped_datagram_transport_)}; -} - -std::string WrapperMediaTransportFactory::GetTransportName() const { - if (wrapped_factory_) { - return wrapped_factory_->GetTransportName(); - } - return "wrapped-transport"; -} - -int WrapperMediaTransportFactory::created_transport_count() const { - return created_transport_count_; -} - -RTCErrorOr> -WrapperMediaTransportFactory::CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); -} - -MediaTransportPair::MediaTransportPair(rtc::Thread* thread) - : first_datagram_transport_(thread), - second_datagram_transport_(thread), - first_factory_(&first_datagram_transport_), - second_factory_(&second_datagram_transport_) { - first_datagram_transport_.Connect(&second_datagram_transport_); - second_datagram_transport_.Connect(&first_datagram_transport_); -} - -MediaTransportPair::~MediaTransportPair() = default; - -MediaTransportPair::LoopbackDataChannelTransport::LoopbackDataChannelTransport( - rtc::Thread* thread) - : thread_(thread) {} - -MediaTransportPair::LoopbackDataChannelTransport:: - ~LoopbackDataChannelTransport() { - RTC_CHECK(data_sink_ == nullptr); -} - -void MediaTransportPair::LoopbackDataChannelTransport::Connect( - LoopbackDataChannelTransport* other) { - other_ = other; -} - -RTCError MediaTransportPair::LoopbackDataChannelTransport::OpenChannel( - int channel_id) { - // No-op. No need to open channels for the loopback. - return RTCError::OK(); -} - -RTCError MediaTransportPair::LoopbackDataChannelTransport::SendData( - int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, - [this, channel_id, params, buffer] { - other_->OnData(channel_id, params.type, buffer); - }); - return RTCError::OK(); -} - -RTCError MediaTransportPair::LoopbackDataChannelTransport::CloseChannel( - int channel_id) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, channel_id] { - other_->OnRemoteCloseChannel(channel_id); - rtc::CritScope lock(&sink_lock_); - if (data_sink_) { - data_sink_->OnChannelClosed(channel_id); - } - }); - return RTCError::OK(); -} - -void MediaTransportPair::LoopbackDataChannelTransport::SetDataSink( - DataChannelSink* sink) { - rtc::CritScope lock(&sink_lock_); - data_sink_ = sink; - if (data_sink_ && ready_to_send_) { - data_sink_->OnReadyToSend(); - } -} - -bool MediaTransportPair::LoopbackDataChannelTransport::IsReadyToSend() const { - rtc::CritScope lock(&sink_lock_); - return ready_to_send_; -} - -void MediaTransportPair::LoopbackDataChannelTransport::FlushAsyncInvokes() { - invoker_.Flush(thread_); -} - -void MediaTransportPair::LoopbackDataChannelTransport::OnData( - int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) { - rtc::CritScope lock(&sink_lock_); - if (data_sink_) { - data_sink_->OnDataReceived(channel_id, type, buffer); - } -} - -void MediaTransportPair::LoopbackDataChannelTransport::OnRemoteCloseChannel( - int channel_id) { - rtc::CritScope lock(&sink_lock_); - if (data_sink_) { - data_sink_->OnChannelClosing(channel_id); - data_sink_->OnChannelClosed(channel_id); - } -} - -void MediaTransportPair::LoopbackDataChannelTransport::OnReadyToSend( - bool ready_to_send) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, ready_to_send] { - rtc::CritScope lock(&sink_lock_); - ready_to_send_ = ready_to_send; - // Propagate state to data channel sink, if present. - if (data_sink_ && ready_to_send_) { - data_sink_->OnReadyToSend(); - } - }); -} - -MediaTransportPair::LoopbackDatagramTransport::LoopbackDatagramTransport( - rtc::Thread* thread) - : thread_(thread), dc_transport_(thread) {} - -void MediaTransportPair::LoopbackDatagramTransport::Connect( - LoopbackDatagramTransport* other) { - other_ = other; - dc_transport_.Connect(&other->dc_transport_); -} - -void MediaTransportPair::LoopbackDatagramTransport::Connect( - rtc::PacketTransportInternal* packet_transport) { - if (state_after_connect_) { - SetState(*state_after_connect_); - } -} - -CongestionControlInterface* -MediaTransportPair::LoopbackDatagramTransport::congestion_control() { - return nullptr; -} - -void MediaTransportPair::LoopbackDatagramTransport::SetTransportStateCallback( - MediaTransportStateCallback* callback) { - RTC_DCHECK_RUN_ON(thread_); - state_callback_ = callback; - if (state_callback_) { - state_callback_->OnStateChanged(state_); - } -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::SendDatagram( - rtc::ArrayView data, - DatagramId datagram_id) { - rtc::CopyOnWriteBuffer buffer; - buffer.SetData(data.data(), data.size()); - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread_, [this, datagram_id, buffer = std::move(buffer)] { - RTC_DCHECK_RUN_ON(thread_); - other_->DeliverDatagram(std::move(buffer)); - if (sink_) { - DatagramAck ack; - ack.datagram_id = datagram_id; - ack.receive_timestamp = Timestamp::us(rtc::TimeMicros()); - sink_->OnDatagramAcked(ack); - } - }); - return RTCError::OK(); -} - -size_t MediaTransportPair::LoopbackDatagramTransport::GetLargestDatagramSize() - const { - return kLoopbackMaxDatagramSize; -} - -void MediaTransportPair::LoopbackDatagramTransport::SetDatagramSink( - DatagramSinkInterface* sink) { - RTC_DCHECK_RUN_ON(thread_); - sink_ = sink; -} - -std::string -MediaTransportPair::LoopbackDatagramTransport::GetTransportParameters() const { - return transport_parameters_; -} - -RTCError -MediaTransportPair::LoopbackDatagramTransport::SetRemoteTransportParameters( - absl::string_view remote_parameters) { - RTC_DCHECK_RUN_ON(thread_); - if (transport_parameters_comparison_(GetTransportParameters(), - remote_parameters)) { - return RTCError::OK(); - } - return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, - "Incompatible remote transport parameters"); -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::OpenChannel( - int channel_id) { - return dc_transport_.OpenChannel(channel_id); -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::SendData( - int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - return dc_transport_.SendData(channel_id, params, buffer); -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::CloseChannel( - int channel_id) { - return dc_transport_.CloseChannel(channel_id); -} - -void MediaTransportPair::LoopbackDatagramTransport::SetDataSink( - DataChannelSink* sink) { - dc_transport_.SetDataSink(sink); -} - -bool MediaTransportPair::LoopbackDatagramTransport::IsReadyToSend() const { - return dc_transport_.IsReadyToSend(); -} - -void MediaTransportPair::LoopbackDatagramTransport::SetState( - MediaTransportState state) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, state] { - RTC_DCHECK_RUN_ON(thread_); - state_ = state; - if (state_callback_) { - state_callback_->OnStateChanged(state_); - } - }); - dc_transport_.OnReadyToSend(state == MediaTransportState::kWritable); -} - -void MediaTransportPair::LoopbackDatagramTransport::SetStateAfterConnect( - MediaTransportState state) { - state_after_connect_ = state; -} - -void MediaTransportPair::LoopbackDatagramTransport::FlushAsyncInvokes() { - dc_transport_.FlushAsyncInvokes(); -} - -void MediaTransportPair::LoopbackDatagramTransport::DeliverDatagram( - rtc::CopyOnWriteBuffer buffer) { - RTC_DCHECK_RUN_ON(thread_); - if (sink_) { - sink_->OnDatagramReceived(buffer); - } -} - -} // namespace webrtc diff --git a/api/test/loopback_media_transport.h b/api/test/loopback_media_transport.h deleted file mode 100644 index 468965ba31..0000000000 --- a/api/test/loopback_media_transport.h +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_ -#define API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_ - -#include -#include -#include -#include - -#include "api/transport/datagram_transport_interface.h" -#include "api/transport/media/media_transport_interface.h" -#include "rtc_base/async_invoker.h" -#include "rtc_base/critical_section.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" - -namespace webrtc { - -// Wrapper used to hand out unique_ptrs to loopback media -// transport without ownership changes to the underlying -// transport. -// It works in two modes: -// It can either wrap a factory, or it can wrap an existing interface. -// In the former mode, it delegates the work to the wrapped factory. -// In the latter mode, it always returns static instance of the transport -// interface. -// -// Example use: -// Factory wrap_static_interface = Wrapper(media_transport_interface); -// Factory wrap_factory = Wrapper(wrap_static_interface); -// The second factory may be created multiple times, and ownership may be passed -// to the client. The first factory counts the number of invocations of -// CreateMediaTransport(); -class WrapperMediaTransportFactory : public MediaTransportFactory { - public: - explicit WrapperMediaTransportFactory( - DatagramTransportInterface* wrapped_datagram_transport); - explicit WrapperMediaTransportFactory(MediaTransportFactory* wrapped); - - RTCErrorOr> CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override; - - RTCErrorOr> CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override; - - RTCErrorOr> - CreateDatagramTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings) override; - - std::string GetTransportName() const override; - - int created_transport_count() const; - - private: - DatagramTransportInterface* wrapped_datagram_transport_ = nullptr; - MediaTransportFactory* wrapped_factory_ = nullptr; - int created_transport_count_ = 0; -}; - -// Contains two MediaTransportsInterfaces that are connected to each other. -// Currently supports audio only. -class MediaTransportPair { - public: - struct Stats { - int sent_audio_frames = 0; - int received_audio_frames = 0; - int sent_video_frames = 0; - int received_video_frames = 0; - }; - - explicit MediaTransportPair(rtc::Thread* thread); - ~MediaTransportPair(); - - DatagramTransportInterface* first_datagram_transport() { - return &first_datagram_transport_; - } - DatagramTransportInterface* second_datagram_transport() { - return &second_datagram_transport_; - } - - std::unique_ptr first_factory() { - return std::make_unique(&first_factory_); - } - - std::unique_ptr second_factory() { - return std::make_unique(&second_factory_); - } - - void SetState(MediaTransportState state) { - first_datagram_transport_.SetState(state); - second_datagram_transport_.SetState(state); - } - - void SetFirstState(MediaTransportState state) { - first_datagram_transport_.SetState(state); - } - - void SetSecondStateAfterConnect(MediaTransportState state) { - second_datagram_transport_.SetState(state); - } - - void SetFirstDatagramTransportParameters(const std::string& params) { - first_datagram_transport_.set_transport_parameters(params); - } - - void SetSecondDatagramTransportParameters(const std::string& params) { - second_datagram_transport_.set_transport_parameters(params); - } - - void SetFirstDatagramTransportParametersComparison( - std::function comparison) { - first_datagram_transport_.set_transport_parameters_comparison( - std::move(comparison)); - } - - void SetSecondDatagramTransportParametersComparison( - std::function comparison) { - second_datagram_transport_.set_transport_parameters_comparison( - std::move(comparison)); - } - - void FlushAsyncInvokes() { - first_datagram_transport_.FlushAsyncInvokes(); - second_datagram_transport_.FlushAsyncInvokes(); - } - - int first_factory_transport_count() const { - return first_factory_.created_transport_count(); - } - - int second_factory_transport_count() const { - return second_factory_.created_transport_count(); - } - - private: - class LoopbackDataChannelTransport : public DataChannelTransportInterface { - public: - explicit LoopbackDataChannelTransport(rtc::Thread* thread); - ~LoopbackDataChannelTransport() override; - - void Connect(LoopbackDataChannelTransport* other); - - RTCError OpenChannel(int channel_id) override; - - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; - - RTCError CloseChannel(int channel_id) override; - - bool IsReadyToSend() const override; - - void SetDataSink(DataChannelSink* sink) override; - - void OnReadyToSend(bool ready_to_send); - - void FlushAsyncInvokes(); - - private: - void OnData(int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer); - - void OnRemoteCloseChannel(int channel_id); - - rtc::Thread* const thread_; - rtc::CriticalSection sink_lock_; - DataChannelSink* data_sink_ RTC_GUARDED_BY(sink_lock_) = nullptr; - - bool ready_to_send_ RTC_GUARDED_BY(sink_lock_) = false; - - LoopbackDataChannelTransport* other_; - - rtc::AsyncInvoker invoker_; - }; - - class LoopbackDatagramTransport : public DatagramTransportInterface { - public: - explicit LoopbackDatagramTransport(rtc::Thread* thread); - - void Connect(LoopbackDatagramTransport* other); - - // Datagram transport overrides. - void Connect(rtc::PacketTransportInternal* packet_transport) override; - CongestionControlInterface* congestion_control() override; - void SetTransportStateCallback( - MediaTransportStateCallback* callback) override; - RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) override; - size_t GetLargestDatagramSize() const override; - void SetDatagramSink(DatagramSinkInterface* sink) override; - std::string GetTransportParameters() const override; - RTCError SetRemoteTransportParameters( - absl::string_view remote_parameters) override; - - // Data channel overrides. - RTCError OpenChannel(int channel_id) override; - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; - RTCError CloseChannel(int channel_id) override; - void SetDataSink(DataChannelSink* sink) override; - bool IsReadyToSend() const override; - - // Loopback-specific functionality. - void SetState(MediaTransportState state); - - // When Connect() is called, the datagram transport will enter this state. - // This is useful for mimicking zero-RTT connectivity, for example. - void SetStateAfterConnect(MediaTransportState state); - void FlushAsyncInvokes(); - - void set_transport_parameters(const std::string& value) { - transport_parameters_ = value; - } - - void set_transport_parameters_comparison( - std::function comparison) { - thread_->Invoke( - RTC_FROM_HERE, [this, comparison = std::move(comparison)] { - RTC_DCHECK_RUN_ON(thread_); - transport_parameters_comparison_ = std::move(comparison); - }); - } - - private: - void DeliverDatagram(rtc::CopyOnWriteBuffer buffer); - - rtc::Thread* thread_; - LoopbackDataChannelTransport dc_transport_; - - MediaTransportState state_ RTC_GUARDED_BY(thread_) = - MediaTransportState::kPending; - DatagramSinkInterface* sink_ RTC_GUARDED_BY(thread_) = nullptr; - MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(thread_) = - nullptr; - LoopbackDatagramTransport* other_; - - std::string transport_parameters_; - std::function - transport_parameters_comparison_ RTC_GUARDED_BY(thread_) = - [](absl::string_view a, absl::string_view b) { return a == b; }; - - absl::optional state_after_connect_; - - rtc::AsyncInvoker invoker_; - }; - - LoopbackDatagramTransport first_datagram_transport_; - LoopbackDatagramTransport second_datagram_transport_; - WrapperMediaTransportFactory first_factory_; - WrapperMediaTransportFactory second_factory_; -}; - -} // namespace webrtc - -#endif // API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_ diff --git a/api/test/loopback_media_transport_unittest.cc b/api/test/loopback_media_transport_unittest.cc deleted file mode 100644 index f036de3eae..0000000000 --- a/api/test/loopback_media_transport_unittest.cc +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/loopback_media_transport.h" - -#include -#include -#include - -#include "test/gmock.h" - -namespace webrtc { - -namespace { - -class MockMediaTransportAudioSinkInterface - : public MediaTransportAudioSinkInterface { - public: - MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedAudioFrame)); -}; - -class MockMediaTransportVideoSinkInterface - : public MediaTransportVideoSinkInterface { - public: - MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedVideoFrame)); -}; - -class MockMediaTransportKeyFrameRequestCallback - : public MediaTransportKeyFrameRequestCallback { - public: - MOCK_METHOD1(OnKeyFrameRequested, void(uint64_t)); -}; - -class MockDataChannelSink : public DataChannelSink { - public: - MOCK_METHOD3(OnDataReceived, - void(int, DataMessageType, const rtc::CopyOnWriteBuffer&)); - MOCK_METHOD1(OnChannelClosing, void(int)); - MOCK_METHOD1(OnChannelClosed, void(int)); - MOCK_METHOD0(OnReadyToSend, void()); -}; - -class MockStateCallback : public MediaTransportStateCallback { - public: - MOCK_METHOD1(OnStateChanged, void(MediaTransportState)); -}; - -} // namespace - -TEST(LoopbackMediaTransport, DataDeliveredToSink) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink sink; - transport_pair.first_datagram_transport()->SetDataSink(&sink); - - const int channel_id = 1; - EXPECT_CALL( - sink, OnDataReceived( - channel_id, DataMessageType::kText, - ::testing::Property( - &rtc::CopyOnWriteBuffer::cdata, ::testing::StrEq("foo")))); - - SendDataParams params; - params.type = DataMessageType::kText; - rtc::CopyOnWriteBuffer buffer("foo"); - transport_pair.second_datagram_transport()->SendData(channel_id, params, - buffer); - - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, CloseDeliveredToSink) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink first_sink; - transport_pair.first_datagram_transport()->SetDataSink(&first_sink); - - MockDataChannelSink second_sink; - transport_pair.second_datagram_transport()->SetDataSink(&second_sink); - - const int channel_id = 1; - { - ::testing::InSequence s; - EXPECT_CALL(second_sink, OnChannelClosing(channel_id)); - EXPECT_CALL(second_sink, OnChannelClosed(channel_id)); - EXPECT_CALL(first_sink, OnChannelClosed(channel_id)); - } - - transport_pair.first_datagram_transport()->CloseChannel(channel_id); - - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); - transport_pair.second_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, InitialStateDeliveredWhenCallbackSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockStateCallback state_callback; - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending)); - - thread->Invoke(RTC_FROM_HERE, [&transport_pair, &state_callback] { - transport_pair.first_datagram_transport()->SetTransportStateCallback( - &state_callback); - }); - transport_pair.FlushAsyncInvokes(); -} - -TEST(LoopbackMediaTransport, ChangedStateDeliveredWhenCallbackSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); - - MockStateCallback state_callback; - - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable)); - thread->Invoke(RTC_FROM_HERE, [&transport_pair, &state_callback] { - transport_pair.first_datagram_transport()->SetTransportStateCallback( - &state_callback); - }); - transport_pair.FlushAsyncInvokes(); -} - -TEST(LoopbackMediaTransport, StateChangeDeliveredToCallback) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockStateCallback state_callback; - - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending)); - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable)); - thread->Invoke(RTC_FROM_HERE, [&transport_pair, &state_callback] { - transport_pair.first_datagram_transport()->SetTransportStateCallback( - &state_callback); - }); - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); -} - -TEST(LoopbackMediaTransport, NotReadyToSendWhenDataSinkSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink data_channel_sink; - EXPECT_CALL(data_channel_sink, OnReadyToSend()).Times(0); - - transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink); - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, ReadyToSendWhenDataSinkSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); - - MockDataChannelSink data_channel_sink; - EXPECT_CALL(data_channel_sink, OnReadyToSend()); - - transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink); - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, StateChangeDeliveredToDataSink) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink data_channel_sink; - EXPECT_CALL(data_channel_sink, OnReadyToSend()); - - transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink); - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -} // namespace webrtc diff --git a/api/test/mock_audio_mixer.h b/api/test/mock_audio_mixer.h index bb303e2c54..88dc108ca3 100644 --- a/api/test/mock_audio_mixer.h +++ b/api/test/mock_audio_mixer.h @@ -19,11 +19,9 @@ namespace test { class MockAudioMixer : public AudioMixer { public: - MOCK_METHOD1(AddSource, bool(Source* audio_source)); - MOCK_METHOD1(RemoveSource, void(Source* audio_source)); - MOCK_METHOD2(Mix, - void(size_t number_of_channels, - AudioFrame* audio_frame_for_mixing)); + MOCK_METHOD(bool, AddSource, (Source*), (override)); + MOCK_METHOD(void, RemoveSource, (Source*), (override)); + MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override)); }; } // namespace test } // namespace webrtc diff --git a/api/test/mock_data_channel.h b/api/test/mock_data_channel.h new file mode 100644 index 0000000000..9346ffd638 --- /dev/null +++ b/api/test/mock_data_channel.h @@ -0,0 +1,60 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_DATA_CHANNEL_H_ +#define API_TEST_MOCK_DATA_CHANNEL_H_ + +#include + +#include "api/data_channel_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockDataChannelInterface final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockDataChannelInterface(); + } + + MOCK_METHOD(void, + RegisterObserver, + (DataChannelObserver * observer), + (override)); + MOCK_METHOD(void, UnregisterObserver, (), (override)); + MOCK_METHOD(std::string, label, (), (const, override)); + MOCK_METHOD(bool, reliable, (), (const, override)); + MOCK_METHOD(bool, ordered, (), (const, override)); + MOCK_METHOD(uint16_t, maxRetransmitTime, (), (const, override)); + MOCK_METHOD(uint16_t, maxRetransmits, (), (const, override)); + MOCK_METHOD(absl::optional, maxRetransmitsOpt, (), (const, override)); + MOCK_METHOD(absl::optional, maxPacketLifeTime, (), (const, override)); + MOCK_METHOD(std::string, protocol, (), (const, override)); + MOCK_METHOD(bool, negotiated, (), (const, override)); + MOCK_METHOD(int, id, (), (const, override)); + MOCK_METHOD(Priority, priority, (), (const, override)); + MOCK_METHOD(DataState, state, (), (const, override)); + MOCK_METHOD(RTCError, error, (), (const, override)); + MOCK_METHOD(uint32_t, messages_sent, (), (const, override)); + MOCK_METHOD(uint64_t, bytes_sent, (), (const, override)); + MOCK_METHOD(uint32_t, messages_received, (), (const, override)); + MOCK_METHOD(uint64_t, bytes_received, (), (const, override)); + MOCK_METHOD(uint64_t, buffered_amount, (), (const, override)); + MOCK_METHOD(void, Close, (), (override)); + MOCK_METHOD(bool, Send, (const DataBuffer& buffer), (override)); + + protected: + MockDataChannelInterface() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_DATA_CHANNEL_H_ diff --git a/api/test/mock_fec_controller_override.h b/api/test/mock_fec_controller_override.h index a7ec8360ab..8f3accbc03 100644 --- a/api/test/mock_fec_controller_override.h +++ b/api/test/mock_fec_controller_override.h @@ -18,9 +18,7 @@ namespace webrtc { class MockFecControllerOverride : public FecControllerOverride { public: - ~MockFecControllerOverride() override = default; - - MOCK_METHOD1(SetFecAllowed, void(bool fec_allowed)); + MOCK_METHOD(void, SetFecAllowed, (bool fec_allowed), (override)); }; } // namespace webrtc diff --git a/api/test/mock_frame_decryptor.h b/api/test/mock_frame_decryptor.h index 77aa4f9147..9604b96cc2 100644 --- a/api/test/mock_frame_decryptor.h +++ b/api/test/mock_frame_decryptor.h @@ -20,18 +20,19 @@ namespace webrtc { class MockFrameDecryptor : public FrameDecryptorInterface { public: - MockFrameDecryptor(); - ~MockFrameDecryptor() override; - - MOCK_METHOD5(Decrypt, - Result(cricket::MediaType, - const std::vector&, - rtc::ArrayView, - rtc::ArrayView, - rtc::ArrayView)); - - MOCK_METHOD2(GetMaxPlaintextByteSize, - size_t(cricket::MediaType, size_t encrypted_frame_size)); + MOCK_METHOD(Result, + Decrypt, + (cricket::MediaType, + const std::vector&, + rtc::ArrayView, + rtc::ArrayView, + rtc::ArrayView), + (override)); + + MOCK_METHOD(size_t, + GetMaxPlaintextByteSize, + (cricket::MediaType, size_t encrypted_frame_size), + (override)); }; } // namespace webrtc diff --git a/api/test/mock_frame_encryptor.h b/api/test/mock_frame_encryptor.h index 44b5e3426d..e47321f801 100644 --- a/api/test/mock_frame_encryptor.h +++ b/api/test/mock_frame_encryptor.h @@ -18,19 +18,20 @@ namespace webrtc { class MockFrameEncryptor : public FrameEncryptorInterface { public: - MockFrameEncryptor(); - ~MockFrameEncryptor() override; - - MOCK_METHOD6(Encrypt, - int(cricket::MediaType, - uint32_t, - rtc::ArrayView, - rtc::ArrayView, - rtc::ArrayView, - size_t*)); - - MOCK_METHOD2(GetMaxCiphertextByteSize, - size_t(cricket::MediaType media_type, size_t frame_size)); + MOCK_METHOD(int, + Encrypt, + (cricket::MediaType, + uint32_t, + rtc::ArrayView, + rtc::ArrayView, + rtc::ArrayView, + size_t*), + (override)); + + MOCK_METHOD(size_t, + GetMaxCiphertextByteSize, + (cricket::MediaType media_type, size_t frame_size), + (override)); }; } // namespace webrtc diff --git a/api/test/mock_media_stream_interface.h b/api/test/mock_media_stream_interface.h new file mode 100644 index 0000000000..29521e6e23 --- /dev/null +++ b/api/test/mock_media_stream_interface.h @@ -0,0 +1,89 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ +#define API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ + +#include + +#include "api/media_stream_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockAudioSource final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockAudioSource(); + } + + MOCK_METHOD(void, + RegisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(void, + UnregisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(SourceState, state, (), (const, override)); + MOCK_METHOD(bool, remote, (), (const, override)); + MOCK_METHOD(void, SetVolume, (double volume), (override)); + MOCK_METHOD(void, + RegisterAudioObserver, + (AudioObserver * observer), + (override)); + MOCK_METHOD(void, + UnregisterAudioObserver, + (AudioObserver * observer), + (override)); + MOCK_METHOD(void, AddSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(void, RemoveSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(const cricket::AudioOptions, options, (), (const, override)); + + private: + MockAudioSource() = default; +}; + +class MockAudioTrack final : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockAudioTrack(); + } + + MOCK_METHOD(void, + RegisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(void, + UnregisterObserver, + (ObserverInterface * observer), + (override)); + MOCK_METHOD(std::string, kind, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(bool, enabled, (), (const, override)); + MOCK_METHOD(bool, set_enabled, (bool enable), (override)); + MOCK_METHOD(TrackState, state, (), (const, override)); + MOCK_METHOD(AudioSourceInterface*, GetSource, (), (const, override)); + MOCK_METHOD(void, AddSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(void, RemoveSink, (AudioTrackSinkInterface * sink), (override)); + MOCK_METHOD(bool, GetSignalLevel, (int* level), (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetAudioProcessor, + (), + (override)); + + private: + MockAudioTrack() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ diff --git a/api/test/mock_peer_connection_factory_interface.h b/api/test/mock_peer_connection_factory_interface.h new file mode 100644 index 0000000000..7319cebbcc --- /dev/null +++ b/api/test/mock_peer_connection_factory_interface.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ +#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ + +#include +#include + +#include "api/peer_connection_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockPeerConnectionFactoryInterface final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockPeerConnectionFactoryInterface(); + } + + MOCK_METHOD(void, SetOptions, (const Options&), (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreatePeerConnection, + (const PeerConnectionInterface::RTCConfiguration&, + PeerConnectionDependencies), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreatePeerConnection, + (const PeerConnectionInterface::RTCConfiguration&, + std::unique_ptr, + std::unique_ptr, + PeerConnectionObserver*), + (override)); + MOCK_METHOD(RtpCapabilities, + GetRtpSenderCapabilities, + (cricket::MediaType), + (const override)); + MOCK_METHOD(RtpCapabilities, + GetRtpReceiverCapabilities, + (cricket::MediaType), + (const override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateLocalMediaStream, + (const std::string&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateAudioSource, + (const cricket::AudioOptions&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateVideoTrack, + (const std::string&, VideoTrackSourceInterface*), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateAudioTrack, + (const std::string&, AudioSourceInterface*), + (override)); + MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override)); + MOCK_METHOD(void, StopAecDump, (), (override)); + + protected: + MockPeerConnectionFactoryInterface() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h index aacaaf6cab..be34df0b32 100644 --- a/api/test/mock_peerconnectioninterface.h +++ b/api/test/mock_peerconnectioninterface.h @@ -27,111 +27,171 @@ class MockPeerConnectionInterface : public rtc::RefCountedObject { public: // PeerConnectionInterface - MOCK_METHOD0(local_streams, rtc::scoped_refptr()); - MOCK_METHOD0(remote_streams, rtc::scoped_refptr()); - MOCK_METHOD1(AddStream, bool(MediaStreamInterface*)); - MOCK_METHOD1(RemoveStream, void(MediaStreamInterface*)); - MOCK_METHOD2(AddTrack, - RTCErrorOr>( - rtc::scoped_refptr, - const std::vector&)); - MOCK_METHOD2(AddTrack, - rtc::scoped_refptr( - MediaStreamTrackInterface*, - std::vector)); - MOCK_METHOD1(RemoveTrack, bool(RtpSenderInterface*)); - MOCK_METHOD1(RemoveTrackNew, - RTCError(rtc::scoped_refptr)); - MOCK_METHOD1(AddTransceiver, - RTCErrorOr>( - rtc::scoped_refptr)); - MOCK_METHOD2(AddTransceiver, - RTCErrorOr>( - rtc::scoped_refptr, - const RtpTransceiverInit&)); - MOCK_METHOD1(AddTransceiver, - RTCErrorOr>( - cricket::MediaType)); - MOCK_METHOD2(AddTransceiver, - RTCErrorOr>( - cricket::MediaType, - const RtpTransceiverInit&)); - MOCK_METHOD2(CreateSender, - rtc::scoped_refptr(const std::string&, - const std::string&)); - MOCK_CONST_METHOD0(GetSenders, - std::vector>()); - MOCK_CONST_METHOD0(GetReceivers, - std::vector>()); - MOCK_CONST_METHOD0( - GetTransceivers, - std::vector>()); - MOCK_METHOD3(GetStats, - bool(StatsObserver*, - MediaStreamTrackInterface*, - StatsOutputLevel)); - MOCK_METHOD1(GetStats, void(RTCStatsCollectorCallback*)); - MOCK_METHOD2(GetStats, - void(rtc::scoped_refptr, - rtc::scoped_refptr)); - MOCK_METHOD2(GetStats, - void(rtc::scoped_refptr, - rtc::scoped_refptr)); - MOCK_METHOD0(ClearStatsCache, void()); - MOCK_CONST_METHOD0(GetSctpTransport, - rtc::scoped_refptr()); - MOCK_METHOD2( - CreateDataChannel, - rtc::scoped_refptr(const std::string&, - const DataChannelInit*)); - MOCK_CONST_METHOD0(local_description, const SessionDescriptionInterface*()); - MOCK_CONST_METHOD0(remote_description, const SessionDescriptionInterface*()); - MOCK_CONST_METHOD0(current_local_description, - const SessionDescriptionInterface*()); - MOCK_CONST_METHOD0(current_remote_description, - const SessionDescriptionInterface*()); - MOCK_CONST_METHOD0(pending_local_description, - const SessionDescriptionInterface*()); - MOCK_CONST_METHOD0(pending_remote_description, - const SessionDescriptionInterface*()); - MOCK_METHOD0(RestartIce, void()); - MOCK_METHOD2(CreateOffer, - void(CreateSessionDescriptionObserver*, - const RTCOfferAnswerOptions&)); - MOCK_METHOD2(CreateAnswer, - void(CreateSessionDescriptionObserver*, - const RTCOfferAnswerOptions&)); - MOCK_METHOD2(SetLocalDescription, - void(SetSessionDescriptionObserver*, - SessionDescriptionInterface*)); - MOCK_METHOD2(SetRemoteDescription, - void(SetSessionDescriptionObserver*, - SessionDescriptionInterface*)); - MOCK_METHOD2(SetRemoteDescription, - void(std::unique_ptr, - rtc::scoped_refptr)); - MOCK_METHOD0(GetConfiguration, PeerConnectionInterface::RTCConfiguration()); - MOCK_METHOD1(SetConfiguration, - RTCError(const PeerConnectionInterface::RTCConfiguration&)); - MOCK_METHOD1(AddIceCandidate, bool(const IceCandidateInterface*)); - MOCK_METHOD1(RemoveIceCandidates, - bool(const std::vector&)); - MOCK_METHOD1(SetBitrate, RTCError(const BitrateSettings&)); - MOCK_METHOD1(SetBitrate, RTCError(const BitrateParameters&)); - MOCK_METHOD1(SetAudioPlayout, void(bool)); - MOCK_METHOD1(SetAudioRecording, void(bool)); - MOCK_METHOD1(LookupDtlsTransportByMid, - rtc::scoped_refptr(const std::string&)); - MOCK_METHOD0(signaling_state, SignalingState()); - MOCK_METHOD0(ice_connection_state, IceConnectionState()); - MOCK_METHOD0(standardized_ice_connection_state, IceConnectionState()); - MOCK_METHOD0(peer_connection_state, PeerConnectionState()); - MOCK_METHOD0(ice_gathering_state, IceGatheringState()); - MOCK_METHOD2(StartRtcEventLog, - bool(std::unique_ptr, int64_t)); - MOCK_METHOD1(StartRtcEventLog, bool(std::unique_ptr)); - MOCK_METHOD0(StopRtcEventLog, void()); - MOCK_METHOD0(Close, void()); + MOCK_METHOD(rtc::scoped_refptr, + local_streams, + (), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + remote_streams, + (), + (override)); + MOCK_METHOD(bool, AddStream, (MediaStreamInterface*), (override)); + MOCK_METHOD(void, RemoveStream, (MediaStreamInterface*), (override)); + MOCK_METHOD(RTCErrorOr>, + AddTrack, + (rtc::scoped_refptr, + const std::vector&), + (override)); + MOCK_METHOD(bool, RemoveTrack, (RtpSenderInterface*), (override)); + MOCK_METHOD(RTCError, + RemoveTrackNew, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (rtc::scoped_refptr, + const RtpTransceiverInit&), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (cricket::MediaType), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (cricket::MediaType, const RtpTransceiverInit&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateSender, + (const std::string&, const std::string&), + (override)); + MOCK_METHOD(std::vector>, + GetSenders, + (), + (const override)); + MOCK_METHOD(std::vector>, + GetReceivers, + (), + (const override)); + MOCK_METHOD(std::vector>, + GetTransceivers, + (), + (const override)); + MOCK_METHOD(bool, + GetStats, + (StatsObserver*, MediaStreamTrackInterface*, StatsOutputLevel), + (override)); + MOCK_METHOD(void, GetStats, (RTCStatsCollectorCallback*), (override)); + MOCK_METHOD(void, + GetStats, + (rtc::scoped_refptr, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + GetStats, + (rtc::scoped_refptr, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, ClearStatsCache, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetSctpTransport, + (), + (const override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateDataChannel, + (const std::string&, const DataChannelInit*), + (override)); + MOCK_METHOD(const SessionDescriptionInterface*, + local_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + remote_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + current_local_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + current_remote_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + pending_local_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + pending_remote_description, + (), + (const override)); + MOCK_METHOD(void, RestartIce, (), (override)); + MOCK_METHOD(void, + CreateOffer, + (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&), + (override)); + MOCK_METHOD(void, + CreateAnswer, + (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&), + (override)); + MOCK_METHOD(void, + SetLocalDescription, + (SetSessionDescriptionObserver*, SessionDescriptionInterface*), + (override)); + MOCK_METHOD(void, + SetRemoteDescription, + (SetSessionDescriptionObserver*, SessionDescriptionInterface*), + (override)); + MOCK_METHOD(void, + SetRemoteDescription, + (std::unique_ptr, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(PeerConnectionInterface::RTCConfiguration, + GetConfiguration, + (), + (override)); + MOCK_METHOD(RTCError, + SetConfiguration, + (const PeerConnectionInterface::RTCConfiguration&), + (override)); + MOCK_METHOD(bool, + AddIceCandidate, + (const IceCandidateInterface*), + (override)); + MOCK_METHOD(bool, + RemoveIceCandidates, + (const std::vector&), + (override)); + MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override)); + MOCK_METHOD(void, SetAudioPlayout, (bool), (override)); + MOCK_METHOD(void, SetAudioRecording, (bool), (override)); + MOCK_METHOD(rtc::scoped_refptr, + LookupDtlsTransportByMid, + (const std::string&), + (override)); + MOCK_METHOD(SignalingState, signaling_state, (), (override)); + MOCK_METHOD(IceConnectionState, ice_connection_state, (), (override)); + MOCK_METHOD(IceConnectionState, + standardized_ice_connection_state, + (), + (override)); + MOCK_METHOD(PeerConnectionState, peer_connection_state, (), (override)); + MOCK_METHOD(IceGatheringState, ice_gathering_state, (), (override)); + MOCK_METHOD(absl::optional, can_trickle_ice_candidates, (), (override)); + MOCK_METHOD(bool, + StartRtcEventLog, + (std::unique_ptr, int64_t), + (override)); + MOCK_METHOD(bool, + StartRtcEventLog, + (std::unique_ptr), + (override)); + MOCK_METHOD(void, StopRtcEventLog, (), (override)); + MOCK_METHOD(void, Close, (), (override)); }; static_assert(!std::is_abstract::value, ""); diff --git a/api/test/mock_rtp_transceiver.h b/api/test/mock_rtp_transceiver.h new file mode 100644 index 0000000000..a0a08c4772 --- /dev/null +++ b/api/test/mock_rtp_transceiver.h @@ -0,0 +1,85 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_RTP_TRANSCEIVER_H_ +#define API_TEST_MOCK_RTP_TRANSCEIVER_H_ + +#include +#include + +#include "api/rtp_transceiver_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockRtpTransceiver final + : public rtc::RefCountedObject { + public: + static rtc::scoped_refptr Create() { + return new MockRtpTransceiver(); + } + + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(absl::optional, mid, (), (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + sender, + (), + (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + receiver, + (), + (const, override)); + MOCK_METHOD(bool, stopped, (), (const, override)); + MOCK_METHOD(bool, stopping, (), (const, override)); + MOCK_METHOD(RtpTransceiverDirection, direction, (), (const, override)); + MOCK_METHOD(void, + SetDirection, + (RtpTransceiverDirection new_direction), + (override)); + MOCK_METHOD(RTCError, + SetDirectionWithError, + (RtpTransceiverDirection new_direction), + (override)); + MOCK_METHOD(absl::optional, + current_direction, + (), + (const, override)); + MOCK_METHOD(absl::optional, + fired_direction, + (), + (const, override)); + MOCK_METHOD(RTCError, StopStandard, (), (override)); + MOCK_METHOD(void, StopInternal, (), (override)); + MOCK_METHOD(void, Stop, (), (override)); + MOCK_METHOD(RTCError, + SetCodecPreferences, + (rtc::ArrayView codecs), + (override)); + MOCK_METHOD(std::vector, + codec_preferences, + (), + (const, override)); + MOCK_METHOD(std::vector, + HeaderExtensionsToOffer, + (), + (const, override)); + MOCK_METHOD(webrtc::RTCError, + SetOfferedRtpHeaderExtensions, + (rtc::ArrayView + header_extensions_to_offer), + (override)); + + private: + MockRtpTransceiver() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_RTP_TRANSCEIVER_H_ diff --git a/api/test/mock_rtpreceiver.h b/api/test/mock_rtpreceiver.h index d4da90886c..a0b79e0bed 100644 --- a/api/test/mock_rtpreceiver.h +++ b/api/test/mock_rtpreceiver.h @@ -21,16 +21,23 @@ namespace webrtc { class MockRtpReceiver : public rtc::RefCountedObject { public: - MOCK_METHOD1(SetTrack, void(MediaStreamTrackInterface*)); - MOCK_CONST_METHOD0(track, rtc::scoped_refptr()); - MOCK_CONST_METHOD0(streams, - std::vector>()); - MOCK_CONST_METHOD0(media_type, cricket::MediaType()); - MOCK_CONST_METHOD0(id, std::string()); - MOCK_CONST_METHOD0(GetParameters, RtpParameters()); - MOCK_METHOD1(SetObserver, void(RtpReceiverObserverInterface*)); - MOCK_METHOD1(SetJitterBufferMinimumDelay, void(absl::optional)); - MOCK_CONST_METHOD0(GetSources, std::vector()); + MOCK_METHOD(rtc::scoped_refptr, + track, + (), + (const override)); + MOCK_METHOD(std::vector>, + streams, + (), + (const override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); + MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); + MOCK_METHOD(void, + SetJitterBufferMinimumDelay, + (absl::optional), + (override)); + MOCK_METHOD(std::vector, GetSources, (), (const override)); }; } // namespace webrtc diff --git a/api/test/mock_rtpsender.h b/api/test/mock_rtpsender.h index 6a656ea56e..f12a6185a6 100644 --- a/api/test/mock_rtpsender.h +++ b/api/test/mock_rtpsender.h @@ -21,16 +21,25 @@ namespace webrtc { class MockRtpSender : public rtc::RefCountedObject { public: - MOCK_METHOD1(SetTrack, bool(MediaStreamTrackInterface*)); - MOCK_CONST_METHOD0(track, rtc::scoped_refptr()); - MOCK_CONST_METHOD0(ssrc, uint32_t()); - MOCK_CONST_METHOD0(media_type, cricket::MediaType()); - MOCK_CONST_METHOD0(id, std::string()); - MOCK_CONST_METHOD0(stream_ids, std::vector()); - MOCK_CONST_METHOD0(init_send_encodings, std::vector()); - MOCK_CONST_METHOD0(GetParameters, RtpParameters()); - MOCK_METHOD1(SetParameters, RTCError(const RtpParameters&)); - MOCK_CONST_METHOD0(GetDtmfSender, rtc::scoped_refptr()); + MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override)); + MOCK_METHOD(rtc::scoped_refptr, + track, + (), + (const override)); + MOCK_METHOD(uint32_t, ssrc, (), (const override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(std::vector, stream_ids, (), (const override)); + MOCK_METHOD(std::vector, + init_send_encodings, + (), + (const override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); + MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetDtmfSender, + (), + (const override)); }; } // namespace webrtc diff --git a/api/test/mock_transformable_video_frame.h b/api/test/mock_transformable_video_frame.h new file mode 100644 index 0000000000..36798b5d73 --- /dev/null +++ b/api/test/mock_transformable_video_frame.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ +#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockTransformableVideoFrame + : public webrtc::TransformableVideoFrameInterface { + public: + MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); + MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); + MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); + MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); + MOCK_METHOD(const webrtc::VideoFrameMetadata&, + GetMetadata, + (), + (const, override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ diff --git a/api/test/mock_video_bitrate_allocator.h b/api/test/mock_video_bitrate_allocator.h index 5d21d9147b..76cf49e955 100644 --- a/api/test/mock_video_bitrate_allocator.h +++ b/api/test/mock_video_bitrate_allocator.h @@ -17,10 +17,10 @@ namespace webrtc { class MockVideoBitrateAllocator : public webrtc::VideoBitrateAllocator { - MOCK_METHOD1( - Allocate, - VideoBitrateAllocation(VideoBitrateAllocationParameters parameters)); - MOCK_METHOD1(GetPreferredBitrateBps, uint32_t(uint32_t framerate)); + MOCK_METHOD(VideoBitrateAllocation, + Allocate, + (VideoBitrateAllocationParameters parameters), + (override)); }; } // namespace webrtc diff --git a/api/test/mock_video_bitrate_allocator_factory.h b/api/test/mock_video_bitrate_allocator_factory.h index 0cae061ab7..16af191970 100644 --- a/api/test/mock_video_bitrate_allocator_factory.h +++ b/api/test/mock_video_bitrate_allocator_factory.h @@ -21,15 +21,12 @@ namespace webrtc { class MockVideoBitrateAllocatorFactory : public webrtc::VideoBitrateAllocatorFactory { public: - virtual std::unique_ptr CreateVideoBitrateAllocator( - const VideoCodec& codec) { - return std::unique_ptr( - CreateVideoBitrateAllocatorProxy(codec)); - } - ~MockVideoBitrateAllocatorFactory() { Die(); } - MOCK_METHOD1(CreateVideoBitrateAllocatorProxy, - VideoBitrateAllocator*(const VideoCodec&)); - MOCK_METHOD0(Die, void()); + ~MockVideoBitrateAllocatorFactory() override { Die(); } + MOCK_METHOD(std::unique_ptr, + CreateVideoBitrateAllocator, + (const VideoCodec&), + (override)); + MOCK_METHOD(void, Die, ()); }; } // namespace webrtc diff --git a/api/test/mock_video_decoder.h b/api/test/mock_video_decoder.h index e7d42098c4..faadabc4d7 100644 --- a/api/test/mock_video_decoder.h +++ b/api/test/mock_video_decoder.h @@ -18,34 +18,40 @@ namespace webrtc { class MockDecodedImageCallback : public DecodedImageCallback { public: - MockDecodedImageCallback(); - ~MockDecodedImageCallback() override; - - MOCK_METHOD1(Decoded, int32_t(VideoFrame& decodedImage)); // NOLINT - MOCK_METHOD2(Decoded, - int32_t(VideoFrame& decodedImage, // NOLINT - int64_t decode_time_ms)); - MOCK_METHOD3(Decoded, - void(VideoFrame& decodedImage, // NOLINT - absl::optional decode_time_ms, - absl::optional qp)); + MOCK_METHOD(int32_t, + Decoded, + (VideoFrame & decoded_image), // NOLINT + (override)); + MOCK_METHOD(int32_t, + Decoded, + (VideoFrame & decoded_image, // NOLINT + int64_t decode_time_ms), + (override)); + MOCK_METHOD(void, + Decoded, + (VideoFrame & decoded_image, // NOLINT + absl::optional decode_time_ms, + absl::optional qp), + (override)); }; class MockVideoDecoder : public VideoDecoder { public: - MockVideoDecoder(); - ~MockVideoDecoder() override; - - MOCK_METHOD2(InitDecode, - int32_t(const VideoCodec* codecSettings, int32_t numberOfCores)); - MOCK_METHOD3(Decode, - int32_t(const EncodedImage& inputImage, - bool missingFrames, - int64_t renderTimeMs)); - MOCK_METHOD1(RegisterDecodeCompleteCallback, - int32_t(DecodedImageCallback* callback)); - MOCK_METHOD0(Release, int32_t()); - MOCK_METHOD0(Copy, VideoDecoder*()); + MOCK_METHOD(int32_t, + InitDecode, + (const VideoCodec* codec_settings, int32_t number_of_cores), + (override)); + MOCK_METHOD(int32_t, + Decode, + (const EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms), + (override)); + MOCK_METHOD(int32_t, + RegisterDecodeCompleteCallback, + (DecodedImageCallback * callback), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); }; } // namespace webrtc diff --git a/api/test/mock_video_decoder_factory.h b/api/test/mock_video_decoder_factory.h index 915e3911f0..98a5d40eb6 100644 --- a/api/test/mock_video_decoder_factory.h +++ b/api/test/mock_video_decoder_factory.h @@ -22,20 +22,17 @@ namespace webrtc { class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory { public: - MOCK_CONST_METHOD0(GetSupportedFormats, - std::vector()); - - // We need to proxy to a return type that is copyable. - std::unique_ptr CreateVideoDecoder( - const webrtc::SdpVideoFormat& format) { - return std::unique_ptr( - CreateVideoDecoderProxy(format)); - } - MOCK_METHOD1(CreateVideoDecoderProxy, - webrtc::VideoDecoder*(const webrtc::SdpVideoFormat&)); - - MOCK_METHOD0(Die, void()); - ~MockVideoDecoderFactory() { Die(); } + ~MockVideoDecoderFactory() override { Die(); } + + MOCK_METHOD(std::vector, + GetSupportedFormats, + (), + (const, override)); + MOCK_METHOD(std::unique_ptr, + CreateVideoDecoder, + (const webrtc::SdpVideoFormat&), + (override)); + MOCK_METHOD(void, Die, ()); }; } // namespace webrtc diff --git a/api/test/mock_video_encoder.h b/api/test/mock_video_encoder.h index 65de14f98b..11e0f64b3f 100644 --- a/api/test/mock_video_encoder.h +++ b/api/test/mock_video_encoder.h @@ -20,38 +20,52 @@ namespace webrtc { class MockEncodedImageCallback : public EncodedImageCallback { public: - MockEncodedImageCallback(); - ~MockEncodedImageCallback(); - MOCK_METHOD3(OnEncodedImage, - Result(const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation)); + MOCK_METHOD(Result, + OnEncodedImage, + (const EncodedImage&, const CodecSpecificInfo*), + (override)); + MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override)); }; class MockVideoEncoder : public VideoEncoder { public: - MockVideoEncoder(); - ~MockVideoEncoder(); - MOCK_METHOD1(SetFecControllerOverride, - void(FecControllerOverride* fec_controller_override)); - MOCK_CONST_METHOD2(Version, int32_t(int8_t* version, int32_t length)); - MOCK_METHOD3(InitEncode, - int32_t(const VideoCodec* codecSettings, - int32_t numberOfCores, - size_t maxPayloadSize)); - MOCK_METHOD2(InitEncode, - int32_t(const VideoCodec* codecSettings, - const VideoEncoder::Settings& settings)); - - MOCK_METHOD2(Encode, - int32_t(const VideoFrame& inputImage, - const std::vector* frame_types)); - MOCK_METHOD1(RegisterEncodeCompleteCallback, - int32_t(EncodedImageCallback* callback)); - MOCK_METHOD0(Release, int32_t()); - MOCK_METHOD0(Reset, int32_t()); - MOCK_METHOD1(SetRates, void(const RateControlParameters& parameters)); - MOCK_CONST_METHOD0(GetEncoderInfo, EncoderInfo(void)); + MOCK_METHOD(void, + SetFecControllerOverride, + (FecControllerOverride*), + (override)); + MOCK_METHOD(int32_t, + InitEncode, + (const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize), + (override)); + MOCK_METHOD(int32_t, + InitEncode, + (const VideoCodec*, const VideoEncoder::Settings& settings), + (override)); + + MOCK_METHOD(int32_t, + Encode, + (const VideoFrame& inputImage, + const std::vector*), + (override)); + MOCK_METHOD(int32_t, + RegisterEncodeCompleteCallback, + (EncodedImageCallback*), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); + MOCK_METHOD(void, + SetRates, + (const RateControlParameters& parameters), + (override)); + MOCK_METHOD(void, + OnPacketLossRateUpdate, + (float packet_loss_rate), + (override)); + MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override)); + MOCK_METHOD(void, + OnLossNotification, + (const LossNotification& loss_notification), + (override)); + MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override)); }; } // namespace webrtc diff --git a/api/test/mock_video_encoder_factory.h b/api/test/mock_video_encoder_factory.h index a694b636e0..1aa14631be 100644 --- a/api/test/mock_video_encoder_factory.h +++ b/api/test/mock_video_encoder_factory.h @@ -22,22 +22,22 @@ namespace webrtc { class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory { public: - MOCK_CONST_METHOD0(GetSupportedFormats, - std::vector()); - MOCK_CONST_METHOD1(QueryVideoEncoder, - CodecInfo(const webrtc::SdpVideoFormat&)); - - // We need to proxy to a return type that is copyable. - std::unique_ptr CreateVideoEncoder( - const webrtc::SdpVideoFormat& format) { - return std::unique_ptr( - CreateVideoEncoderProxy(format)); - } - MOCK_METHOD1(CreateVideoEncoderProxy, - webrtc::VideoEncoder*(const webrtc::SdpVideoFormat&)); - - MOCK_METHOD0(Die, void()); - ~MockVideoEncoderFactory() { Die(); } + ~MockVideoEncoderFactory() override { Die(); } + + MOCK_METHOD(std::vector, + GetSupportedFormats, + (), + (const, override)); + MOCK_METHOD(CodecInfo, + QueryVideoEncoder, + (const SdpVideoFormat&), + (const, override)); + MOCK_METHOD(std::unique_ptr, + CreateVideoEncoder, + (const SdpVideoFormat&), + (override)); + + MOCK_METHOD(void, Die, ()); }; } // namespace webrtc diff --git a/api/test/neteq_simulator_factory.cc b/api/test/neteq_simulator_factory.cc index fe056be4ea..82b27e546d 100644 --- a/api/test/neteq_simulator_factory.cc +++ b/api/test/neteq_simulator_factory.cc @@ -21,6 +21,24 @@ namespace webrtc { namespace test { +namespace { +NetEqTestFactory::Config convertConfig( + const NetEqSimulatorFactory::Config& simulation_config, + absl::string_view replacement_audio_filename) { + NetEqTestFactory::Config config; + config.replacement_audio_file = std::string(replacement_audio_filename); + config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; + config.initial_dummy_packets = simulation_config.initial_dummy_packets; + config.skip_get_audio_events = simulation_config.skip_get_audio_events; + config.field_trial_string = simulation_config.field_trial_string; + config.output_audio_filename = simulation_config.output_audio_filename; + config.pythonplot = simulation_config.python_plot_filename.has_value(); + config.plot_scripts_basename = simulation_config.python_plot_filename; + config.textlog = simulation_config.text_log_filename.has_value(); + config.textlog_filename = simulation_config.text_log_filename; + return config; +} +} // namespace NetEqSimulatorFactory::NetEqSimulatorFactory() : factory_(std::make_unique()) {} @@ -31,12 +49,8 @@ std::unique_ptr NetEqSimulatorFactory::CreateSimulatorFromFile( absl::string_view event_log_filename, absl::string_view replacement_audio_filename, Config simulation_config) { - NetEqTestFactory::Config config; - config.replacement_audio_file = std::string(replacement_audio_filename); - config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; - config.initial_dummy_packets = simulation_config.initial_dummy_packets; - config.skip_get_audio_events = simulation_config.skip_get_audio_events; - config.field_trial_string = simulation_config.field_trial_string; + NetEqTestFactory::Config config = + convertConfig(simulation_config, replacement_audio_filename); return factory_->InitializeTestFromFile( std::string(event_log_filename), simulation_config.neteq_factory, config); } @@ -46,12 +60,8 @@ NetEqSimulatorFactory::CreateSimulatorFromString( absl::string_view event_log_file_contents, absl::string_view replacement_audio_filename, Config simulation_config) { - NetEqTestFactory::Config config; - config.replacement_audio_file = std::string(replacement_audio_filename); - config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; - config.initial_dummy_packets = simulation_config.initial_dummy_packets; - config.skip_get_audio_events = simulation_config.skip_get_audio_events; - config.field_trial_string = simulation_config.field_trial_string; + NetEqTestFactory::Config config = + convertConfig(simulation_config, replacement_audio_filename); return factory_->InitializeTestFromString( std::string(event_log_file_contents), simulation_config.neteq_factory, config); diff --git a/api/test/neteq_simulator_factory.h b/api/test/neteq_simulator_factory.h index 925b4f4c03..2a716e665e 100644 --- a/api/test/neteq_simulator_factory.h +++ b/api/test/neteq_simulator_factory.h @@ -15,6 +15,7 @@ #include #include "absl/strings/string_view.h" +#include "absl/types/optional.h" #include "api/neteq/neteq_factory.h" #include "api/test/neteq_simulator.h" @@ -41,6 +42,12 @@ class NetEqSimulatorFactory { int skip_get_audio_events = 0; // A WebRTC field trial string to be used during the simulation. std::string field_trial_string; + // A filename for the generated output audio file. + absl::optional output_audio_filename; + // A filename for the python plot. + absl::optional python_plot_filename; + // A filename for the text log. + absl::optional text_log_filename; // A custom NetEqFactory can be used. NetEqFactory* neteq_factory = nullptr; }; diff --git a/api/test/network_emulation/BUILD.gn b/api/test/network_emulation/BUILD.gn index 5fda1e288a..fb7bedc003 100644 --- a/api/test/network_emulation/BUILD.gn +++ b/api/test/network_emulation/BUILD.gn @@ -17,12 +17,14 @@ rtc_library("network_emulation") { ] deps = [ + "../..:array_view", "../../../rtc_base", "../../../rtc_base:checks", "../../../rtc_base:rtc_base_approved", + "../../numerics", "../../units:data_rate", "../../units:data_size", "../../units:timestamp", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/api/test/network_emulation/network_emulation_interfaces.cc b/api/test/network_emulation/network_emulation_interfaces.cc index e023334af7..ac2eb1d971 100644 --- a/api/test/network_emulation/network_emulation_interfaces.cc +++ b/api/test/network_emulation/network_emulation_interfaces.cc @@ -9,17 +9,9 @@ */ #include "api/test/network_emulation/network_emulation_interfaces.h" -namespace webrtc { - -namespace { -constexpr int kIPv4HeaderSize = 20; -constexpr int kIPv6HeaderSize = 40; -constexpr int kUdpHeaderSize = 8; -int IpHeaderSize(const rtc::SocketAddress& address) { - return (address.family() == AF_INET) ? kIPv4HeaderSize : kIPv6HeaderSize; -} -} // namespace +#include "rtc_base/net_helper.h" +namespace webrtc { EmulatedIpPacket::EmulatedIpPacket(const rtc::SocketAddress& from, const rtc::SocketAddress& to, rtc::CopyOnWriteBuffer data, @@ -28,7 +20,8 @@ EmulatedIpPacket::EmulatedIpPacket(const rtc::SocketAddress& from, : from(from), to(to), data(data), - headers_size(IpHeaderSize(to) + application_overhead + kUdpHeaderSize), + headers_size(to.ipaddr().overhead() + application_overhead + + cricket::kUdpHeaderSize), arrival_time(arrival_time) { RTC_DCHECK(to.family() == AF_INET || to.family() == AF_INET6); } diff --git a/api/test/network_emulation/network_emulation_interfaces.h b/api/test/network_emulation/network_emulation_interfaces.h index 0986df4a08..36fb996549 100644 --- a/api/test/network_emulation/network_emulation_interfaces.h +++ b/api/test/network_emulation/network_emulation_interfaces.h @@ -10,7 +10,13 @@ #ifndef API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ #define API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ +#include +#include +#include + #include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" @@ -56,40 +62,145 @@ class EmulatedNetworkReceiverInterface { virtual void OnPacketReceived(EmulatedIpPacket packet) = 0; }; -struct EmulatedNetworkStats { - int64_t packets_sent = 0; - DataSize bytes_sent = DataSize::Zero(); +class EmulatedNetworkOutgoingStats { + public: + virtual ~EmulatedNetworkOutgoingStats() = default; + + virtual int64_t PacketsSent() const = 0; + + virtual DataSize BytesSent() const = 0; + + // Returns the timestamped sizes of all sent packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& SentPacketsSizeCounter() const = 0; + + virtual DataSize FirstSentPacketSize() const = 0; + + // Returns time of the first packet sent or infinite value if no packets were + // sent. + virtual Timestamp FirstPacketSentTime() const = 0; + + // Returns time of the last packet sent or infinite value if no packets were + // sent. + virtual Timestamp LastPacketSentTime() const = 0; + + // Returns average send rate. Requires that at least 2 packets were sent. + virtual DataRate AverageSendRate() const = 0; +}; + +class EmulatedNetworkIncomingStats { + public: + virtual ~EmulatedNetworkIncomingStats() = default; + // Total amount of packets received with or without destination. - int64_t packets_received = 0; + virtual int64_t PacketsReceived() const = 0; // Total amount of bytes in received packets. - DataSize bytes_received = DataSize::Zero(); + virtual DataSize BytesReceived() const = 0; + // Returns the timestamped sizes of all received packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& ReceivedPacketsSizeCounter() const = 0; // Total amount of packets that were received, but no destination was found. - int64_t packets_dropped = 0; + virtual int64_t PacketsDropped() const = 0; // Total amount of bytes in dropped packets. - DataSize bytes_dropped = DataSize::Zero(); - - DataSize first_received_packet_size = DataSize::Zero(); - DataSize first_sent_packet_size = DataSize::Zero(); - - Timestamp first_packet_sent_time = Timestamp::PlusInfinity(); - Timestamp last_packet_sent_time = Timestamp::PlusInfinity(); - Timestamp first_packet_received_time = Timestamp::PlusInfinity(); - Timestamp last_packet_received_time = Timestamp::PlusInfinity(); - - DataRate AverageSendRate() const { - RTC_DCHECK_GE(packets_sent, 2); - return (bytes_sent - first_sent_packet_size) / - (last_packet_sent_time - first_packet_sent_time); - } - DataRate AverageReceiveRate() const { - RTC_DCHECK_GE(packets_received, 2); - return (bytes_received - first_received_packet_size) / - (last_packet_received_time - first_packet_received_time); - } + virtual DataSize BytesDropped() const = 0; + // Returns the timestamped sizes of all packets that were received, + // but no destination was found if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& DroppedPacketsSizeCounter() const = 0; + + virtual DataSize FirstReceivedPacketSize() const = 0; + + // Returns time of the first packet received or infinite value if no packets + // were received. + virtual Timestamp FirstPacketReceivedTime() const = 0; + + // Returns time of the last packet received or infinite value if no packets + // were received. + virtual Timestamp LastPacketReceivedTime() const = 0; + + virtual DataRate AverageReceiveRate() const = 0; +}; + +class EmulatedNetworkStats { + public: + virtual ~EmulatedNetworkStats() = default; + + // List of IP addresses that were used to send data considered in this stats + // object. + virtual std::vector LocalAddresses() const = 0; + + virtual int64_t PacketsSent() const = 0; + + virtual DataSize BytesSent() const = 0; + // Returns the timestamped sizes of all sent packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& SentPacketsSizeCounter() const = 0; + // Returns the timestamped duration between packet was received on + // network interface and was dispatched to the network in microseconds if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& SentPacketsQueueWaitTimeUs() const = 0; + + virtual DataSize FirstSentPacketSize() const = 0; + // Returns time of the first packet sent or infinite value if no packets were + // sent. + virtual Timestamp FirstPacketSentTime() const = 0; + // Returns time of the last packet sent or infinite value if no packets were + // sent. + virtual Timestamp LastPacketSentTime() const = 0; + + virtual DataRate AverageSendRate() const = 0; + // Total amount of packets received regardless of the destination address. + virtual int64_t PacketsReceived() const = 0; + // Total amount of bytes in received packets. + virtual DataSize BytesReceived() const = 0; + // Returns the timestamped sizes of all received packets if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& ReceivedPacketsSizeCounter() const = 0; + // Total amount of packets that were received, but no destination was found. + virtual int64_t PacketsDropped() const = 0; + // Total amount of bytes in dropped packets. + virtual DataSize BytesDropped() const = 0; + // Returns counter with timestamped sizes of all packets that were received, + // but no destination was found if + // EmulatedEndpointConfig::stats_gatherming_mode was set to + // StatsGatheringMode::kDebug; otherwise, the returned value will be empty. + // Returned reference is valid until the next call to a non-const method. + virtual const SamplesStatsCounter& DroppedPacketsSizeCounter() const = 0; + + virtual DataSize FirstReceivedPacketSize() const = 0; + // Returns time of the first packet received or infinite value if no packets + // were received. + virtual Timestamp FirstPacketReceivedTime() const = 0; + // Returns time of the last packet received or infinite value if no packets + // were received. + virtual Timestamp LastPacketReceivedTime() const = 0; + + virtual DataRate AverageReceiveRate() const = 0; + + virtual std::map> + OutgoingStatsPerDestination() const = 0; + + virtual std::map> + IncomingStatsPerSource() const = 0; }; // EmulatedEndpoint is an abstraction for network interface on device. Instances -// of this are created by NetworkEmulationManager::CreateEndpoint. +// of this are created by NetworkEmulationManager::CreateEndpoint and +// thread safe. class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { public: // Send packet into network. @@ -117,8 +228,6 @@ class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { virtual void UnbindReceiver(uint16_t port) = 0; virtual rtc::IPAddress GetPeerLocalAddress() const = 0; - virtual EmulatedNetworkStats stats() = 0; - private: // Ensure that there can be no other subclass than EmulatedEndpointImpl. This // means that it's always safe to downcast EmulatedEndpoint instances to diff --git a/api/test/network_emulation_manager.h b/api/test/network_emulation_manager.h index 3e9cf113d2..58ee3bfd1a 100644 --- a/api/test/network_emulation_manager.h +++ b/api/test/network_emulation_manager.h @@ -11,9 +11,11 @@ #ifndef API_TEST_NETWORK_EMULATION_MANAGER_H_ #define API_TEST_NETWORK_EMULATION_MANAGER_H_ +#include #include #include +#include "api/array_view.h" #include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" @@ -44,6 +46,13 @@ class EmulatedRoute; struct EmulatedEndpointConfig { enum class IpAddressFamily { kIpv4, kIpv6 }; + enum class StatsGatheringMode { + // Gather main network stats counters. + kDefault, + // kDefault + also gather per packet statistics. In this mode more memory + // will be used. + kDebug + }; IpAddressFamily generated_ip_family = IpAddressFamily::kIpv4; // If specified will be used as IP address for endpoint node. Must be unique @@ -54,6 +63,7 @@ struct EmulatedEndpointConfig { bool start_as_enabled = true; // Network type which will be used to represent endpoint to WebRTC. rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; + StatsGatheringMode stats_gathering_mode = StatsGatheringMode::kDefault; }; @@ -64,12 +74,24 @@ class EmulatedNetworkManagerInterface { public: virtual ~EmulatedNetworkManagerInterface() = default; + // Returns non-null pointer to thread that have to be used as network thread + // for WebRTC to properly setup network emulation. Returned thread is owned + // by EmulatedNetworkManagerInterface implementation. virtual rtc::Thread* network_thread() = 0; + // Returns non-null pointer to network manager that have to be injected into + // WebRTC to properly setup network emulation. Returned manager is owned by + // EmulatedNetworkManagerInterface implementation. virtual rtc::NetworkManager* network_manager() = 0; + // Returns list of endpoints that are associated with this instance. Pointers + // are guaranteed to be non-null and are owned by NetworkEmulationManager. + virtual std::vector endpoints() const = 0; - // Returns summarized network stats for endpoints for this manager. + // Passes summarized network stats for endpoints for this manager into + // specified |stats_callback|. Callback will be executed on network emulation + // internal task queue. virtual void GetStats( - std::function stats_callback) const = 0; + std::function)> stats_callback) + const = 0; }; enum class TimeMode { kRealTime, kSimulated }; @@ -180,6 +202,14 @@ class NetworkEmulationManager { virtual EmulatedNetworkManagerInterface* CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) = 0; + + // Passes summarized network stats for specified |endpoints| into specified + // |stats_callback|. Callback will be executed on network emulation + // internal task queue. + virtual void GetStats( + rtc::ArrayView endpoints, + std::function)> + stats_callback) = 0; }; } // namespace webrtc diff --git a/api/test/peerconnection_quality_test_fixture.h b/api/test/peerconnection_quality_test_fixture.h index 7e9282b2ad..f370478956 100644 --- a/api/test/peerconnection_quality_test_fixture.h +++ b/api/test/peerconnection_quality_test_fixture.h @@ -17,19 +17,23 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" #include "api/async_resolver_factory.h" #include "api/call/call_factory_interface.h" #include "api/fec_controller.h" #include "api/function_view.h" +#include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtp_parameters.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/audio_quality_analyzer_interface.h" #include "api/test/frame_generator_interface.h" #include "api/test/simulated_network.h" #include "api/test/stats_observer_interface.h" +#include "api/test/track_id_stream_info_map.h" #include "api/test/video_quality_analyzer_interface.h" -#include "api/transport/media/media_transport_interface.h" #include "api/transport/network_control.h" #include "api/units/time_delta.h" #include "api/video_codecs/video_decoder_factory.h" @@ -50,6 +54,12 @@ constexpr size_t kDefaultSlidesHeight = 1110; // API is in development. Can be changed/removed without notice. class PeerConnectionE2EQualityTestFixture { public: + // The index of required capturing device in OS provided list of video + // devices. On Linux and Windows the list will be obtained via + // webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via + // [RTCCameraVideoCapturer captureDevices]. + enum class CapturingDeviceIndex : size_t {}; + // Contains parameters for screen share scrolling. // // If scrolling is enabled, then it will be done by putting sliding window @@ -111,13 +121,8 @@ class PeerConnectionE2EQualityTestFixture { // must be equal to |kDefaultSlidesWidth| and // |ScrollingParams::source_height| must be equal to |kDefaultSlidesHeight|. std::vector slides_yuv_file_names; - // If true will set VideoTrackInterface::ContentHint::kText for current - // video track. - bool use_text_content_hint = true; }; - enum VideoGeneratorType { kDefault, kI420A, kI010 }; - // Config for Vp8 simulcast or Vp9 SVC testing. // // SVC support is limited: @@ -131,6 +136,10 @@ class PeerConnectionE2EQualityTestFixture { // available layer and won't restore lower layers, so analyzer won't // receive required data which will cause wrong results or test failures. struct VideoSimulcastConfig { + explicit VideoSimulcastConfig(int simulcast_streams_count) + : simulcast_streams_count(simulcast_streams_count) { + RTC_CHECK_GT(simulcast_streams_count, 1); + } VideoSimulcastConfig(int simulcast_streams_count, int target_spatial_index) : simulcast_streams_count(simulcast_streams_count), target_spatial_index(target_spatial_index) { @@ -152,7 +161,18 @@ class PeerConnectionE2EQualityTestFixture { // in such case |target_spatial_index| will specify the top interesting // spatial layer and all layers below, including target one will be // processed. All layers above target one will be dropped. - int target_spatial_index; + // If not specified than whatever stream will be received will be analyzed. + // It requires Selective Forwarding Unit (SFU) to be configured in the + // network. + absl::optional target_spatial_index; + + // Encoding parameters per simulcast layer. If not empty, |encoding_params| + // size have to be equal to |simulcast_streams_count|. Will be used to set + // transceiver send encoding params for simulcast layers. Applicable only + // for codecs that support simulcast (ex. Vp8) and will be ignored + // otherwise. RtpEncodingParameters::rid may be changed by fixture + // implementation to ensure signaling correctness. + std::vector encoding_params; }; // Contains properties of single video stream. @@ -168,27 +188,9 @@ class PeerConnectionE2EQualityTestFixture { // Have to be unique among all specified configs for all peers in the call. // Will be auto generated if omitted. absl::optional stream_label; - // You can specify one of |generator|, |input_file_name|, - // |screen_share_config| and |capturing_device_index|. - // If none of them are specified: - // * If config is added to the PeerConfigurer without specifying any video - // source, then |generator| will be set to VideoGeneratorType::kDefault. - // * If config is added with own video source implementation, then that - // video source will be used. - - // If specified generator of this type will be used to produce input video. - absl::optional generator; - // If specified this file will be used as input. Input video will be played - // in a circle. - absl::optional input_file_name; - // If specified screen share video stream will be created as input. - absl::optional screen_share_config; - // If specified this capturing device will be used to get input video. The - // |capturing_device_index| is the index of required capturing device in OS - // provided list of video devices. On Linux and Windows the list will be - // obtained via webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via - // [RTCCameraVideoCapturer captureDevices]. - absl::optional capturing_device_index; + // Will be set for current video track. If equals to kText or kDetailed - + // screencast in on. + absl::optional content_hint; // If presented video will be transfered in simulcast/SVC mode depending on // which encoder is used. // @@ -202,7 +204,7 @@ class PeerConnectionE2EQualityTestFixture { // each RtpEncodingParameters of RtpParameters of corresponding // RtpSenderInterface for this video stream. absl::optional temporal_layers_count; - // Sets the maxiumum encode bitrate in bps. If this value is not set, the + // Sets the maximum encode bitrate in bps. If this value is not set, the // encoder will be capped at an internal maximum value around 2 Mbps // depending on the resolution. This means that it will never be able to // utilize a high bandwidth link. @@ -225,6 +227,10 @@ class PeerConnectionE2EQualityTestFixture { absl::optional output_dump_file_name; // If true will display input and output video on the user's screen. bool show_on_screen = false; + // If specified, determines a sync group to which this video stream belongs. + // According to bugs.webrtc.org/4762 WebRTC supports synchronization only + // for pair of single audio and single video stream. + absl::optional sync_group; }; // Contains properties for audio in the call. @@ -248,6 +254,10 @@ class PeerConnectionE2EQualityTestFixture { cricket::AudioOptions audio_options; // Sampling frequency of input audio data (from file or generated). int sampling_frequency_in_hz = 48000; + // If specified, determines a sync group to which this audio stream belongs. + // According to bugs.webrtc.org/4762 WebRTC supports synchronization only + // for pair of single audio and single video stream. + absl::optional sync_group; }; // This class is used to fully configure one peer inside the call. @@ -255,6 +265,11 @@ class PeerConnectionE2EQualityTestFixture { public: virtual ~PeerConfigurer() = default; + // Sets peer name that will be used to report metrics related to this peer. + // If not set, some default name will be assigned. All names have to be + // unique. + virtual PeerConfigurer* SetName(absl::string_view name) = 0; + // The parameters of the following 9 methods will be passed to the // PeerConnectionFactoryInterface implementation that will be created for // this peer. @@ -270,8 +285,6 @@ class PeerConnectionE2EQualityTestFixture { virtual PeerConfigurer* SetNetworkControllerFactory( std::unique_ptr network_controller_factory) = 0; - virtual PeerConfigurer* SetMediaTransportFactory( - std::unique_ptr media_transport_factory) = 0; virtual PeerConfigurer* SetVideoEncoderFactory( std::unique_ptr video_encoder_factory) = 0; virtual PeerConfigurer* SetVideoDecoderFactory( @@ -295,12 +308,18 @@ class PeerConnectionE2EQualityTestFixture { std::unique_ptr factory) = 0; // Add new video stream to the call that will be sent from this peer. + // Default implementation of video frames generator will be used. virtual PeerConfigurer* AddVideoConfig(VideoConfig config) = 0; // Add new video stream to the call that will be sent from this peer with // provided own implementation of video frames generator. virtual PeerConfigurer* AddVideoConfig( VideoConfig config, std::unique_ptr generator) = 0; + // Add new video stream to the call that will be sent from this peer. + // Capturing device with specified index will be used to get input video. + virtual PeerConfigurer* AddVideoConfig( + VideoConfig config, + CapturingDeviceIndex capturing_device_index) = 0; // Set the audio stream for the call from this peer. If this method won't // be invoked, this peer will send no audio. virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0; @@ -314,15 +333,15 @@ class PeerConnectionE2EQualityTestFixture { PeerConnectionInterface::RTCConfiguration configuration) = 0; // Set bitrate parameters on PeerConnection. This constraints will be // applied to all summed RTP streams for this peer. - virtual PeerConfigurer* SetBitrateParameters( - PeerConnectionInterface::BitrateParameters bitrate_params) = 0; + virtual PeerConfigurer* SetBitrateSettings( + BitrateSettings bitrate_settings) = 0; }; // Contains configuration for echo emulator. struct EchoEmulationConfig { // Delay which represents the echo path delay, i.e. how soon rendered signal // should reach capturer. - TimeDelta echo_delay = TimeDelta::ms(50); + TimeDelta echo_delay = TimeDelta::Millis(50); }; struct VideoCodecConfig { @@ -356,30 +375,13 @@ class PeerConnectionE2EQualityTestFixture { // it will be shut downed. TimeDelta run_duration; - // Deprecated. Use |video_codecs| instead. - // Next two fields are used to specify concrete video codec, that should be - // used in the test. Video code will be negotiated in SDP during offer/ - // answer exchange. - // Video codec name. You can find valid names in - // media/base/media_constants.h - std::string video_codec_name = cricket::kVp8CodecName; - // Deprecated. Use |video_codecs| instead. - // Map of parameters, that have to be specified on SDP codec. Each parameter - // is described by key and value. Codec parameters will match the specified - // map if and only if for each key from |video_codec_required_params| there - // will be a parameter with name equal to this key and parameter value will - // be equal to the value from |video_codec_required_params| for this key. - // If empty then only name will be used to match the codec. - std::map video_codec_required_params; // List of video codecs to use during the test. These codecs will be // negotiated in SDP during offer/answer exchange. The order of these codecs // during negotiation will be the same as in |video_codecs|. Codecs have // to be available in codecs list provided by peer connection to be // negotiated. If some of specified codecs won't be found, the test will // crash. - // TODO(titovartem) replace with Vp8 will be used as default after cleanup. - // If list is empty |video_codec_name| and |video_codec_required_params| - // will be used. + // If list is empty Vp8 with no required_params will be used. std::vector video_codecs; bool use_ulp_fec = false; bool use_flex_fec = false; @@ -406,7 +408,14 @@ class PeerConnectionE2EQualityTestFixture { // Invoked by framework after peer connection factory and peer connection // itself will be created but before offer/answer exchange will be started. - virtual void Start(absl::string_view test_case_name) = 0; + // |test_case_name| is name of test case, that should be used to report all + // metrics. + // |reporter_helper| is a pointer to a class that will allow track_id to + // stream_id matching. The caller is responsible for ensuring the + // TrackIdStreamInfoMap will be valid from Start() to + // StopAndReportResults(). + virtual void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) = 0; // Invoked by framework after call is ended and peer connection factory and // peer connection are destroyed. @@ -442,6 +451,12 @@ class PeerConnectionE2EQualityTestFixture { virtual void AddPeer(rtc::Thread* network_thread, rtc::NetworkManager* network_manager, rtc::FunctionView configurer) = 0; + // Runs the media quality test, which includes setting up the call with + // configured participants, running it according to provided |run_params| and + // terminating it properly at the end. During call duration media quality + // metrics are gathered, which are then reported to stdout and (if configured) + // to the json/protobuf output file through the WebRTC perf test results + // reporting system. virtual void Run(RunParams run_params) = 0; // Returns real test duration - the time of test execution measured during diff --git a/api/test/simulated_network.h b/api/test/simulated_network.h index 7612923c2b..3fba61f74d 100644 --- a/api/test/simulated_network.h +++ b/api/test/simulated_network.h @@ -19,7 +19,6 @@ #include #include "absl/types/optional.h" -#include "rtc_base/critical_section.h" #include "rtc_base/random.h" #include "rtc_base/thread_annotations.h" @@ -87,6 +86,8 @@ class SimulatedNetworkInterface : public NetworkBehaviorInterface { public: // Sets a new configuration. This won't affect packets already in the pipe. virtual void SetConfig(const BuiltInNetworkBehaviorConfig& config) = 0; + virtual void UpdateConfig( + std::function config_modifier) = 0; virtual void PauseTransmissionUntil(int64_t until_us) = 0; }; diff --git a/api/test/simulcast_test_fixture.h b/api/test/simulcast_test_fixture.h index 5270d13306..cd470703c3 100644 --- a/api/test/simulcast_test_fixture.h +++ b/api/test/simulcast_test_fixture.h @@ -34,6 +34,8 @@ class SimulcastTestFixture { virtual void TestSpatioTemporalLayers321PatternEncoder() = 0; virtual void TestStrideEncodeDecode() = 0; virtual void TestDecodeWidthHeightSet() = 0; + virtual void + TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation() = 0; }; } // namespace test diff --git a/api/test/stats_observer_interface.h b/api/test/stats_observer_interface.h index 98c8dd937f..ea4d6c23db 100644 --- a/api/test/stats_observer_interface.h +++ b/api/test/stats_observer_interface.h @@ -11,9 +11,8 @@ #ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_ #define API_TEST_STATS_OBSERVER_INTERFACE_H_ -#include - -#include "api/stats_types.h" +#include "absl/strings/string_view.h" +#include "api/stats/rtc_stats_report.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -25,8 +24,9 @@ class StatsObserverInterface { // Method called when stats reports are available for the PeerConnection // identified by |pc_label|. - virtual void OnStatsReports(const std::string& pc_label, - const StatsReports& reports) = 0; + virtual void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) = 0; }; } // namespace webrtc_pc_e2e diff --git a/api/test/test_dependency_factory.cc b/api/test/test_dependency_factory.cc index e72f55aab5..41ad70cc3f 100644 --- a/api/test/test_dependency_factory.cc +++ b/api/test/test_dependency_factory.cc @@ -14,22 +14,24 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/platform_thread_types.h" namespace webrtc { +namespace { // This checks everything in this file gets called on the same thread. It's // static because it needs to look at the static methods too. -rtc::ThreadChecker* GetThreadChecker() { - static rtc::ThreadChecker checker; - return &checker; +bool IsValidTestDependencyFactoryThread() { + const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef(); + return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef()); } +} // namespace std::unique_ptr TestDependencyFactory::instance_ = nullptr; const TestDependencyFactory& TestDependencyFactory::GetInstance() { - RTC_DCHECK(GetThreadChecker()->IsCurrent()); + RTC_DCHECK(IsValidTestDependencyFactoryThread()); if (instance_ == nullptr) { instance_ = std::make_unique(); } @@ -38,14 +40,14 @@ const TestDependencyFactory& TestDependencyFactory::GetInstance() { void TestDependencyFactory::SetInstance( std::unique_ptr instance) { - RTC_DCHECK(GetThreadChecker()->IsCurrent()); + RTC_DCHECK(IsValidTestDependencyFactoryThread()); RTC_CHECK(instance_ == nullptr); instance_ = std::move(instance); } std::unique_ptr TestDependencyFactory::CreateComponents() const { - RTC_DCHECK(GetThreadChecker()->IsCurrent()); + RTC_DCHECK(IsValidTestDependencyFactoryThread()); return nullptr; } diff --git a/api/test/time_controller.cc b/api/test/time_controller.cc index b3b2f463c5..364dbc235d 100644 --- a/api/test/time_controller.cc +++ b/api/test/time_controller.cc @@ -26,18 +26,18 @@ std::unique_ptr TimeController::CreateTaskQueueFactory() { }; return std::make_unique(GetTaskQueueFactory()); } -bool TimeController::Wait(const std::function& done, +bool TimeController::Wait(const std::function& condition, TimeDelta max_duration) { // Step size is chosen to be short enough to not significantly affect latency // in real time tests while being long enough to avoid adding too much load to // the system. - const auto kStep = TimeDelta::ms(5); + const auto kStep = TimeDelta::Millis(5); for (auto elapsed = TimeDelta::Zero(); elapsed < max_duration; elapsed += kStep) { - if (done()) + if (condition()) return true; AdvanceTime(kStep); } - return done(); + return condition(); } } // namespace webrtc diff --git a/api/test/time_controller.h b/api/test/time_controller.h index aa69c5200c..bd3192ddf2 100644 --- a/api/test/time_controller.h +++ b/api/test/time_controller.h @@ -46,6 +46,7 @@ class TimeController { const char* thread_name) = 0; // Creates an rtc::Thread instance. If |socket_server| is nullptr, a default // noop socket server is created. + // Returned thread is not null and started. virtual std::unique_ptr CreateThread( const std::string& name, std::unique_ptr socket_server = nullptr) = 0; @@ -57,9 +58,12 @@ class TimeController { // for the given |duration|. virtual void AdvanceTime(TimeDelta duration) = 0; - // Waits until done() == true, polling done() in small time intervals. - bool Wait(const std::function& done, - TimeDelta max_duration = TimeDelta::seconds(5)); + // Waits until condition() == true, polling condition() in small time + // intervals. + // Returns true if condition() was evaluated to true before |max_duration| + // elapsed and false otherwise. + bool Wait(const std::function& condition, + TimeDelta max_duration = TimeDelta::Seconds(5)); }; // Interface for telling time, scheduling an event to fire at a particular time, diff --git a/api/test/track_id_stream_info_map.h b/api/test/track_id_stream_info_map.h new file mode 100644 index 0000000000..bb73cfd997 --- /dev/null +++ b/api/test/track_id_stream_info_map.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ +#define API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ + +#include "absl/strings/string_view.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Instances of |TrackIdStreamInfoMap| provide bookkeeping capabilities that +// are useful to associate stats reports track_ids to the remote stream info. +class TrackIdStreamInfoMap { + public: + virtual ~TrackIdStreamInfoMap() = default; + + // These methods must be called on the same thread where + // StatsObserverInterface::OnStatsReports is invoked. + + // Returns a reference to a stream label owned by the TrackIdStreamInfoMap. + // Precondition: |track_id| must be already mapped to stream label. + virtual absl::string_view GetStreamLabelFromTrackId( + absl::string_view track_id) const = 0; + + // Returns a reference to a sync group name owned by the TrackIdStreamInfoMap. + // Precondition: |track_id| must be already mapped to sync group. + virtual absl::string_view GetSyncGroupLabelFromTrackId( + absl::string_view track_id) const = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ diff --git a/api/test/track_id_stream_label_map.h b/api/test/track_id_stream_label_map.h deleted file mode 100644 index e8dc947ab1..0000000000 --- a/api/test/track_id_stream_label_map.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_ -#define API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_ - -#include - -namespace webrtc { -namespace webrtc_pc_e2e { - -// Instances of |TrackIdStreamLabelMap| provide bookkeeping capabilities that -// are useful to associate stats reports track_ids to the remote stream_id. -class TrackIdStreamLabelMap { - public: - virtual ~TrackIdStreamLabelMap() = default; - - // This method must be called on the same thread where - // StatsObserverInterface::OnStatsReports is invoked. - // Returns a reference to a stream label owned by the TrackIdStreamLabelMap. - // Precondition: |track_id| must be already mapped to a stream_label. - virtual const std::string& GetStreamLabelFromTrackId( - const std::string& track_id) const = 0; -}; - -} // namespace webrtc_pc_e2e -} // namespace webrtc - -#endif // API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_ diff --git a/api/test/video/function_video_decoder_factory.h b/api/test/video/function_video_decoder_factory.h index 23214ccf40..86abdd0746 100644 --- a/api/test/video/function_video_decoder_factory.h +++ b/api/test/video/function_video_decoder_factory.h @@ -28,7 +28,9 @@ class FunctionVideoDecoderFactory final : public VideoDecoderFactory { public: explicit FunctionVideoDecoderFactory( std::function()> create) - : create_([create](const SdpVideoFormat&) { return create(); }) {} + : create_([create = std::move(create)](const SdpVideoFormat&) { + return create(); + }) {} explicit FunctionVideoDecoderFactory( std::function(const SdpVideoFormat&)> create) @@ -36,8 +38,10 @@ class FunctionVideoDecoderFactory final : public VideoDecoderFactory { FunctionVideoDecoderFactory( std::function()> create, std::vector sdp_video_formats) - : create_([create](const SdpVideoFormat&) { return create(); }), - sdp_video_formats_(sdp_video_formats) {} + : create_([create = std::move(create)](const SdpVideoFormat&) { + return create(); + }), + sdp_video_formats_(std::move(sdp_video_formats)) {} std::vector GetSupportedFormats() const override { return sdp_video_formats_; diff --git a/api/test/video/function_video_encoder_factory.h b/api/test/video/function_video_encoder_factory.h index 85f848cd1f..a452eee7c4 100644 --- a/api/test/video/function_video_encoder_factory.h +++ b/api/test/video/function_video_encoder_factory.h @@ -29,7 +29,9 @@ class FunctionVideoEncoderFactory final : public VideoEncoderFactory { public: explicit FunctionVideoEncoderFactory( std::function()> create) - : create_([create](const SdpVideoFormat&) { return create(); }) {} + : create_([create = std::move(create)](const SdpVideoFormat&) { + return create(); + }) {} explicit FunctionVideoEncoderFactory( std::function(const SdpVideoFormat&)> create) @@ -41,14 +43,6 @@ class FunctionVideoEncoderFactory final : public VideoEncoderFactory { return {}; } - CodecInfo QueryVideoEncoder( - const SdpVideoFormat& /* format */) const override { - CodecInfo codec_info; - codec_info.is_hardware_accelerated = false; - codec_info.has_internal_source = false; - return codec_info; - } - std::unique_ptr CreateVideoEncoder( const SdpVideoFormat& format) override { return create_(format); diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h index 990548af9e..c5370a7089 100644 --- a/api/test/video_quality_analyzer_interface.h +++ b/api/test/video_quality_analyzer_interface.h @@ -14,7 +14,9 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/array_view.h" #include "api/test/stats_observer_interface.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" @@ -53,6 +55,20 @@ namespace webrtc_pc_e2e { // The analyzer will be injected in all points from A to F. class VideoQualityAnalyzerInterface : public StatsObserverInterface { public: + // Contains extra statistic provided by video encoder. + struct EncoderStats { + // TODO(hbos) https://crbug.com/webrtc/9547, + // https://crbug.com/webrtc/11443: improve stats API to make available + // there. + uint32_t target_encode_bitrate; + }; + // Contains extra statistic provided by video decoder. + struct DecoderStats { + // Decode time provided by decoder itself. If decoder doesn’t produce such + // information can be omitted. + absl::optional decode_time_ms; + }; + ~VideoQualityAnalyzerInterface() override = default; // Will be called by framework before test. @@ -62,44 +78,65 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface { // calculations. Analyzer can perform simple calculations on the calling // thread in each method, but should remember, that it is the same thread, // that is used in video pipeline. - virtual void Start(std::string test_case_name, int max_threads_count) {} + virtual void Start(std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) {} // Will be called when frame was generated from the input stream. + // |peer_name| is name of the peer on which side frame was captured. // Returns frame id, that will be set by framework to the frame. - virtual uint16_t OnFrameCaptured(const std::string& stream_label, + virtual uint16_t OnFrameCaptured(absl::string_view peer_name, + const std::string& stream_label, const VideoFrame& frame) = 0; // Will be called before calling the encoder. - virtual void OnFramePreEncode(const VideoFrame& frame) {} + // |peer_name| is name of the peer on which side frame came to encoder. + virtual void OnFramePreEncode(absl::string_view peer_name, + const VideoFrame& frame) {} // Will be called for each EncodedImage received from encoder. Single // VideoFrame can produce multiple EncodedImages. Each encoded image will // have id from VideoFrame. - virtual void OnFrameEncoded(uint16_t frame_id, - const EncodedImage& encoded_image) {} + // |peer_name| is name of the peer on which side frame was encoded. + virtual void OnFrameEncoded(absl::string_view peer_name, + uint16_t frame_id, + const EncodedImage& encoded_image, + const EncoderStats& stats) {} // Will be called for each frame dropped by encoder. - virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {} + // |peer_name| is name of the peer on which side frame drop was detected. + virtual void OnFrameDropped(absl::string_view peer_name, + EncodedImageCallback::DropReason reason) {} // Will be called before calling the decoder. - virtual void OnFramePreDecode(uint16_t frame_id, + // |peer_name| is name of the peer on which side frame was received. + virtual void OnFramePreDecode(absl::string_view peer_name, + uint16_t frame_id, const EncodedImage& encoded_image) {} - // Will be called after decoding the frame. |decode_time_ms| is a decode - // time provided by decoder itself. If decoder doesn’t produce such - // information can be omitted. - virtual void OnFrameDecoded(const VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) {} + // Will be called after decoding the frame. + // |peer_name| is name of the peer on which side frame was decoded. + virtual void OnFrameDecoded(absl::string_view peer_name, + const VideoFrame& frame, + const DecoderStats& stats) {} // Will be called when frame will be obtained from PeerConnection stack. - virtual void OnFrameRendered(const VideoFrame& frame) {} + // |peer_name| is name of the peer on which side frame was rendered. + virtual void OnFrameRendered(absl::string_view peer_name, + const VideoFrame& frame) {} // Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK. // All available codes are listed in // modules/video_coding/include/video_error_codes.h - virtual void OnEncoderError(const VideoFrame& frame, int32_t error_code) {} + // |peer_name| is name of the peer on which side error acquired. + virtual void OnEncoderError(absl::string_view peer_name, + const VideoFrame& frame, + int32_t error_code) {} // Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK. // All available codes are listed in // modules/video_coding/include/video_error_codes.h - virtual void OnDecoderError(uint16_t frame_id, int32_t error_code) {} + // |peer_name| is name of the peer on which side error acquired. + virtual void OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, + int32_t error_code) {} // Will be called every time new stats reports are available for the // Peer Connection identified by |pc_label|. - void OnStatsReports(const std::string& pc_label, - const StatsReports& stats_reports) override {} + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override {} // Tells analyzer that analysis complete and it should calculate final // statistics. diff --git a/api/test/video_quality_test_fixture.h b/api/test/video_quality_test_fixture.h index ec07c23cd4..92c398aa54 100644 --- a/api/test/video_quality_test_fixture.h +++ b/api/test/video_quality_test_fixture.h @@ -22,6 +22,7 @@ #include "api/test/simulated_network.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_config.h" #include "api/video_codecs/video_encoder_factory.h" @@ -31,60 +32,56 @@ namespace webrtc { class VideoQualityTestFixtureInterface { public: // Parameters are grouped into smaller structs to make it easier to set - // the desired elements and skip unused, using aggregate initialization. - // Unfortunately, C++11 (as opposed to C11) doesn't support unnamed structs, - // which makes the implementation of VideoQualityTest a bit uglier. + // the desired elements and skip unused. struct Params { - Params(); - ~Params(); struct CallConfig { - bool send_side_bwe; - bool generic_descriptor; + bool send_side_bwe = false; + bool generic_descriptor = false; BitrateConstraints call_bitrate_config; - int num_thumbnails; + int num_thumbnails = 0; // Indicates if secondary_(video|ss|screenshare) structures are used. - bool dual_video; + bool dual_video = false; } call; struct Video { - bool enabled; - size_t width; - size_t height; - int32_t fps; - int min_bitrate_bps; - int target_bitrate_bps; - int max_bitrate_bps; - bool suspend_below_min_bitrate; - std::string codec; - int num_temporal_layers; - int selected_tl; - int min_transmit_bps; - bool ulpfec; - bool flexfec; - bool automatic_scaling; + bool enabled = false; + size_t width = 640; + size_t height = 480; + int32_t fps = 30; + int min_bitrate_bps = 50; + int target_bitrate_bps = 800; + int max_bitrate_bps = 800; + bool suspend_below_min_bitrate = false; + std::string codec = "VP8"; + int num_temporal_layers = 1; + int selected_tl = -1; + int min_transmit_bps = 0; + bool ulpfec = false; + bool flexfec = false; + bool automatic_scaling = false; std::string clip_path; // "Generator" to generate frames instead. - size_t capture_device_index; + size_t capture_device_index = 0; SdpVideoFormat::Parameters sdp_params; - double encoder_overshoot_factor; + double encoder_overshoot_factor = 0.0; } video[2]; struct Audio { - bool enabled; - bool sync_video; - bool dtx; - bool use_real_adm; + bool enabled = false; + bool sync_video = false; + bool dtx = false; + bool use_real_adm = false; absl::optional ana_config; } audio; struct Screenshare { - bool enabled; - bool generate_slides; - int32_t slide_change_interval; - int32_t scroll_duration; + bool enabled = false; + bool generate_slides = false; + int32_t slide_change_interval = 10; + int32_t scroll_duration = 0; std::vector slides; } screenshare[2]; struct Analyzer { std::string test_label; - double avg_psnr_threshold; // (*) - double avg_ssim_threshold; // (*) - int test_durations_secs; + double avg_psnr_threshold = 0.0; // (*) + double avg_ssim_threshold = 0.0; // (*) + int test_durations_secs = 0; std::string graph_data_output_filename; std::string graph_title; } analyzer; @@ -95,14 +92,14 @@ class VideoQualityTestFixtureInterface { absl::optional config; struct SS { // Spatial scalability. std::vector streams; // If empty, one stream is assumed. - size_t selected_stream; - int num_spatial_layers; - int selected_sl; - InterLayerPredMode inter_layer_pred; + size_t selected_stream = 0; + int num_spatial_layers = 0; + int selected_sl = -1; + InterLayerPredMode inter_layer_pred = InterLayerPredMode::kOn; // If empty, bitrates are generated in VP9Impl automatically. std::vector spatial_layers; // If set, default parameters will be used instead of |streams|. - bool infer_streams; + bool infer_streams = false; } ss[2]; struct Logging { std::string rtc_event_log_name; diff --git a/api/test/videocodec_test_fixture.h b/api/test/videocodec_test_fixture.h index afb3f8a5e4..395c5cb800 100644 --- a/api/test/videocodec_test_fixture.h +++ b/api/test/videocodec_test_fixture.h @@ -17,6 +17,7 @@ #include "api/test/videocodec_test_stats.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" +#include "media/base/h264_profile_level_id.h" #include "modules/video_coding/include/video_codec_interface.h" namespace webrtc { @@ -137,6 +138,9 @@ class VideoCodecTestFixture { bool save_encoded_ivf = false; bool save_decoded_y4m = false; } visualization_params; + + // Enables quality analysis for dropped frames. + bool analyze_quality_of_dropped_frames = false; }; virtual ~VideoCodecTestFixture() = default; diff --git a/api/test/videocodec_test_stats.h b/api/test/videocodec_test_stats.h index 63e15768dc..df1aed73aa 100644 --- a/api/test/videocodec_test_stats.h +++ b/api/test/videocodec_test_stats.h @@ -67,6 +67,7 @@ class VideoCodecTestStats { int qp = -1; // Quality. + bool quality_analysis_successful = false; float psnr_y = 0.0f; float psnr_u = 0.0f; float psnr_v = 0.0f; diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn index 0f07301fe4..6a7cc57cd2 100644 --- a/api/transport/BUILD.gn +++ b/api/transport/BUILD.gn @@ -14,10 +14,8 @@ rtc_library("bitrate_settings") { "bitrate_settings.cc", "bitrate_settings.h", ] - deps = [ - "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "../../rtc_base/system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("enums") { @@ -41,6 +39,8 @@ rtc_library("network_control") { "../units:data_size", "../units:time_delta", "../units:timestamp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/types:optional", ] @@ -49,10 +49,8 @@ rtc_library("network_control") { rtc_source_set("webrtc_key_value_config") { visibility = [ "*" ] sources = [ "webrtc_key_value_config.h" ] - deps = [ - "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/strings", - ] + deps = [ "../../rtc_base/system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("field_trial_based_config") { @@ -64,26 +62,20 @@ rtc_library("field_trial_based_config") { deps = [ ":webrtc_key_value_config", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } +# TODO(nisse): Rename? rtc_source_set("datagram_transport_interface") { visibility = [ "*" ] - sources = [ - "congestion_control_interface.h", - "data_channel_transport_interface.h", - "datagram_transport_interface.h", - ] + sources = [ "data_channel_transport_interface.h" ] deps = [ - ":network_control", "..:array_view", "..:rtc_error", "../../rtc_base:rtc_base_approved", - "../units:data_rate", - "../units:timestamp", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("goog_cc") { @@ -101,6 +93,11 @@ rtc_library("goog_cc") { ] } +rtc_source_set("sctp_transport_factory_interface") { + visibility = [ "*" ] + sources = [ "sctp_transport_factory_interface.h" ] +} + rtc_source_set("stun_types") { visibility = [ "*" ] sources = [ @@ -147,7 +144,6 @@ if (rtc_include_tests) { deps = [ ":stun_types", "../../rtc_base", - "../../rtc_base:macromagic", "../../rtc_base:rtc_base_approved", "../../test:test_support", "//testing/gtest", diff --git a/api/transport/congestion_control_interface.h b/api/transport/congestion_control_interface.h deleted file mode 100644 index 40552cb4ff..0000000000 --- a/api/transport/congestion_control_interface.h +++ /dev/null @@ -1,75 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media and datagram transports. - -#ifndef API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_ -#define API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_ - -#include -#include -#include - -#include "api/transport/network_control.h" -#include "api/units/data_rate.h" - -namespace webrtc { - -// TODO(nisse): Defined together with MediaTransportInterface. But we should use -// types that aren't tied to media, so that MediaTransportInterface can depend -// on CongestionControlInterface, but not the other way around. -// api/transport/network_control.h may be a reasonable place. -class MediaTransportRttObserver; -struct MediaTransportAllocatedBitrateLimits; -struct MediaTransportTargetRateConstraints; - -// Defines congestion control feedback interface for media and datagram -// transports. -class CongestionControlInterface { - public: - virtual ~CongestionControlInterface() = default; - - // Updates allocation limits. - virtual void SetAllocatedBitrateLimits( - const MediaTransportAllocatedBitrateLimits& limits) = 0; - - // Sets starting rate. - virtual void SetTargetBitrateLimits( - const MediaTransportTargetRateConstraints& target_rate_constraints) = 0; - - // Intended for receive side. AddRttObserver registers an observer to be - // called for each RTT measurement, typically once per ACK. Before media - // transport is destructed the observer must be unregistered. - // - // TODO(sukhanov): Looks like AddRttObserver and RemoveRttObserver were - // never implemented for media transport, so keeping noop implementation. - virtual void AddRttObserver(MediaTransportRttObserver* observer) {} - virtual void RemoveRttObserver(MediaTransportRttObserver* observer) {} - - // Adds a target bitrate observer. Before media transport is destructed - // the observer must be unregistered (by calling - // RemoveTargetTransferRateObserver). - // A newly registered observer will be called back with the latest recorded - // target rate, if available. - virtual void AddTargetTransferRateObserver( - TargetTransferRateObserver* observer) = 0; - - // Removes an existing |observer| from observers. If observer was never - // registered, an error is logged and method does nothing. - virtual void RemoveTargetTransferRateObserver( - TargetTransferRateObserver* observer) = 0; - - // Returns the last known target transfer rate as reported to the above - // observers. - virtual absl::optional GetLatestTargetTransferRate() = 0; -}; - -} // namespace webrtc - -#endif // API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_ diff --git a/api/transport/data_channel_transport_interface.h b/api/transport/data_channel_transport_interface.h index 671deffc6e..7b8c653c39 100644 --- a/api/transport/data_channel_transport_interface.h +++ b/api/transport/data_channel_transport_interface.h @@ -35,8 +35,8 @@ enum class DataMessageType { // sent reliably and in-order, even if the data channel is configured for // unreliable delivery. struct SendDataParams { - SendDataParams(); - SendDataParams(const SendDataParams&); + SendDataParams() = default; + SendDataParams(const SendDataParams&) = default; DataMessageType type = DataMessageType::kText; diff --git a/api/transport/datagram_transport_interface.h b/api/transport/datagram_transport_interface.h deleted file mode 100644 index 01736b978d..0000000000 --- a/api/transport/datagram_transport_interface.h +++ /dev/null @@ -1,151 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media and datagram transports. - -#ifndef API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_ -#define API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_ - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtc_error.h" -#include "api/transport/congestion_control_interface.h" -#include "api/transport/data_channel_transport_interface.h" -#include "api/units/data_rate.h" -#include "api/units/timestamp.h" - -namespace rtc { -class PacketTransportInternal; -} // namespace rtc - -namespace webrtc { - -class MediaTransportStateCallback; - -typedef int64_t DatagramId; - -struct DatagramAck { - // |datagram_id| is same as passed in - // DatagramTransportInterface::SendDatagram. - DatagramId datagram_id; - - // The timestamp at which the remote peer received the identified datagram, - // according to that peer's clock. - Timestamp receive_timestamp = Timestamp::MinusInfinity(); -}; - -// All sink methods are called on network thread. -class DatagramSinkInterface { - public: - virtual ~DatagramSinkInterface() {} - - // Called when new packet is received. - virtual void OnDatagramReceived(rtc::ArrayView data) = 0; - - // Called when datagram is actually sent (datragram can be delayed due - // to congestion control or fusing). |datagram_id| is same as passed in - // DatagramTransportInterface::SendDatagram. - virtual void OnDatagramSent(DatagramId datagram_id) = 0; - - // Called when datagram is ACKed. - virtual void OnDatagramAcked(const DatagramAck& datagram_ack) = 0; - - // Called when a datagram is lost. - virtual void OnDatagramLost(DatagramId datagram_id) = 0; -}; - -// Datagram transport allows to send and receive unreliable packets (datagrams) -// and receive feedback from congestion control (via -// CongestionControlInterface). The idea is to send RTP packets as datagrams and -// have underlying implementation of datagram transport to use QUIC datagram -// protocol. -class DatagramTransportInterface : public DataChannelTransportInterface { - public: - virtual ~DatagramTransportInterface() = default; - - // Connect the datagram transport to the ICE transport. - // The implementation must be able to ignore incoming packets that don't - // belong to it. - virtual void Connect(rtc::PacketTransportInternal* packet_transport) = 0; - - // Returns congestion control feedback interface or nullptr if datagram - // transport does not implement congestion control. - // - // Note that right now datagram transport is used without congestion control, - // but we plan to use it in the future. - virtual CongestionControlInterface* congestion_control() = 0; - - // Sets a state observer callback. Before datagram transport is destroyed, the - // callback must be unregistered by setting it to nullptr. - // A newly registered callback will be called with the current state. - // Datagram transport does not invoke this callback concurrently. - virtual void SetTransportStateCallback( - MediaTransportStateCallback* callback) = 0; - - // Start asynchronous send of datagram. The status returned by this method - // only pertains to the synchronous operations (e.g. serialization / - // packetization), not to the asynchronous operation. - // - // Datagrams larger than GetLargestDatagramSize() will fail and return error. - // - // Datagrams are sent in FIFO order. - // - // |datagram_id| is only used in ACK/LOST notifications in - // DatagramSinkInterface and does not need to be unique. - virtual RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) = 0; - - // Returns maximum size of datagram message, does not change. - // TODO(sukhanov): Because value may be undefined before connection setup - // is complete, consider returning error when called before connection is - // established. Currently returns hardcoded const, because integration - // prototype may call before connection is established. - virtual size_t GetLargestDatagramSize() const = 0; - - // Sets packet sink. Sink must be unset by calling - // SetDataTransportSink(nullptr) before the data transport is destroyed or - // before new sink is set. - virtual void SetDatagramSink(DatagramSinkInterface* sink) = 0; - - // Retrieves transport parameters for this datagram transport. May be called - // on either client- or server-perspective transports. - // - // For servers, the parameters represent what kind of connections and data the - // server is prepared to accept. This is generally a superset of acceptable - // parameters. - // - // For clients, the parameters echo the server configuration used to create - // the client, possibly removing any fields or parameters which the client - // does not understand. - virtual std::string GetTransportParameters() const = 0; - - // Sets remote transport parameters. |remote_params| is a serialized string - // of opaque parameters, understood by the datagram transport implementation. - // Returns an error if |remote_params| are not compatible with this transport. - // - // TODO(mellem): Make pure virtual. The default implementation maintains - // original negotiation behavior (negotiation falls back to RTP if the - // remote datagram transport fails to echo exactly the local parameters). - virtual RTCError SetRemoteTransportParameters( - absl::string_view remote_params) { - if (remote_params == GetTransportParameters()) { - return RTCError::OK(); - } - return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, - "Local and remote transport parameters do not match"); - } -}; - -} // namespace webrtc - -#endif // API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_ diff --git a/api/transport/goog_cc_factory.cc b/api/transport/goog_cc_factory.cc index ccadb8bc22..fd1189901f 100644 --- a/api/transport/goog_cc_factory.cc +++ b/api/transport/goog_cc_factory.cc @@ -53,7 +53,7 @@ GoogCcNetworkControllerFactory::Create(NetworkControllerConfig config) { TimeDelta GoogCcNetworkControllerFactory::GetProcessInterval() const { const int64_t kUpdateIntervalMs = 25; - return TimeDelta::ms(kUpdateIntervalMs); + return TimeDelta::Millis(kUpdateIntervalMs); } GoogCcFeedbackNetworkControllerFactory::GoogCcFeedbackNetworkControllerFactory( diff --git a/api/transport/media/BUILD.gn b/api/transport/media/BUILD.gn deleted file mode 100644 index 24a364c2e5..0000000000 --- a/api/transport/media/BUILD.gn +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") - -rtc_library("media_transport_interface") { - visibility = [ "*" ] - sources = [ - "media_transport_config.cc", - "media_transport_config.h", - "media_transport_interface.cc", - "media_transport_interface.h", - ] - deps = [ - ":audio_interfaces", - ":video_interfaces", - "..:datagram_transport_interface", - "..:network_control", - "../..:array_view", - "../..:rtc_error", - "../../..:webrtc_common", - "../../../rtc_base", - "../../../rtc_base:checks", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base:stringutils", - "../../units:data_rate", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_interfaces") { - visibility = [ "*" ] - sources = [ - "audio_transport.cc", - "audio_transport.h", - ] - deps = [ "../..:array_view" ] -} - -rtc_library("video_interfaces") { - visibility = [ "*" ] - sources = [ - "video_transport.cc", - "video_transport.h", - ] - deps = [ "../../video:encoded_image" ] -} diff --git a/api/transport/media/OWNERS b/api/transport/media/OWNERS deleted file mode 100644 index b353f1282f..0000000000 --- a/api/transport/media/OWNERS +++ /dev/null @@ -1,3 +0,0 @@ -sukhanov@webrtc.org -psla@webrtc.org -mellem@webrtc.org diff --git a/api/transport/media/audio_transport.cc b/api/transport/media/audio_transport.cc deleted file mode 100644 index 0f5fe8bcf2..0000000000 --- a/api/transport/media/audio_transport.cc +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#include "api/transport/media/audio_transport.h" - -#include - -namespace webrtc { - -MediaTransportEncodedAudioFrame::~MediaTransportEncodedAudioFrame() {} - -MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame( - int sampling_rate_hz, - int starting_sample_index, - int samples_per_channel, - int sequence_number, - FrameType frame_type, - int payload_type, - std::vector encoded_data) - : sampling_rate_hz_(sampling_rate_hz), - starting_sample_index_(starting_sample_index), - samples_per_channel_(samples_per_channel), - sequence_number_(sequence_number), - frame_type_(frame_type), - payload_type_(payload_type), - encoded_data_(std::move(encoded_data)) {} - -MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=( - const MediaTransportEncodedAudioFrame&) = default; - -MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=( - MediaTransportEncodedAudioFrame&&) = default; - -MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame( - const MediaTransportEncodedAudioFrame&) = default; - -MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame( - MediaTransportEncodedAudioFrame&&) = default; - -} // namespace webrtc diff --git a/api/transport/media/audio_transport.h b/api/transport/media/audio_transport.h deleted file mode 100644 index dcbdcd7afe..0000000000 --- a/api/transport/media/audio_transport.h +++ /dev/null @@ -1,120 +0,0 @@ -/* Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#ifndef API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_ -#define API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_ - -#include - -#include "api/array_view.h" - -namespace webrtc { - -// Represents encoded audio frame in any encoding (type of encoding is opaque). -// To avoid copying of encoded data use move semantics when passing by value. -class MediaTransportEncodedAudioFrame final { - public: - enum class FrameType { - // Normal audio frame (equivalent to webrtc::kAudioFrameSpeech). - kSpeech, - - // DTX frame (equivalent to webrtc::kAudioFrameCN). - kDiscontinuousTransmission, - // TODO(nisse): Mis-spelled version, update users, then delete. - kDiscountinuousTransmission = kDiscontinuousTransmission, - }; - - MediaTransportEncodedAudioFrame( - // Audio sampling rate, for example 48000. - int sampling_rate_hz, - - // Starting sample index of the frame, i.e. how many audio samples were - // before this frame since the beginning of the call or beginning of time - // in one channel (the starting point should not matter for NetEq). In - // WebRTC it is used as a timestamp of the frame. - // TODO(sukhanov): Starting_sample_index is currently adjusted on the - // receiver side in RTP path. Non-RTP implementations should preserve it. - // For NetEq initial offset should not matter so we should consider fixing - // RTP path. - int starting_sample_index, - - // Number of audio samples in audio frame in 1 channel. - int samples_per_channel, - - // Sequence number of the frame in the order sent, it is currently - // required by NetEq, but we can fix NetEq, because starting_sample_index - // should be enough. - int sequence_number, - - // If audio frame is a speech or discontinued transmission. - FrameType frame_type, - - // Opaque payload type. In RTP codepath payload type is stored in RTP - // header. In other implementations it should be simply passed through the - // wire -- it's needed for decoder. - int payload_type, - - // Vector with opaque encoded data. - std::vector encoded_data); - - ~MediaTransportEncodedAudioFrame(); - MediaTransportEncodedAudioFrame(const MediaTransportEncodedAudioFrame&); - MediaTransportEncodedAudioFrame& operator=( - const MediaTransportEncodedAudioFrame& other); - MediaTransportEncodedAudioFrame& operator=( - MediaTransportEncodedAudioFrame&& other); - MediaTransportEncodedAudioFrame(MediaTransportEncodedAudioFrame&&); - - // Getters. - int sampling_rate_hz() const { return sampling_rate_hz_; } - int starting_sample_index() const { return starting_sample_index_; } - int samples_per_channel() const { return samples_per_channel_; } - int sequence_number() const { return sequence_number_; } - - int payload_type() const { return payload_type_; } - FrameType frame_type() const { return frame_type_; } - - rtc::ArrayView encoded_data() const { return encoded_data_; } - - private: - int sampling_rate_hz_; - int starting_sample_index_; - int samples_per_channel_; - - // TODO(sukhanov): Refactor NetEq so we don't need sequence number. - // Having sample_index and samples_per_channel should be enough. - int sequence_number_; - - FrameType frame_type_; - - int payload_type_; - - std::vector encoded_data_; -}; - -// Interface for receiving encoded audio frames from MediaTransportInterface -// implementations. -class MediaTransportAudioSinkInterface { - public: - virtual ~MediaTransportAudioSinkInterface() = default; - - // Called when new encoded audio frame is received. - virtual void OnData(uint64_t channel_id, - MediaTransportEncodedAudioFrame frame) = 0; -}; - -} // namespace webrtc -#endif // API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_ diff --git a/api/transport/media/media_transport_config.cc b/api/transport/media/media_transport_config.cc deleted file mode 100644 index b9b19cb6f0..0000000000 --- a/api/transport/media/media_transport_config.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/transport/media/media_transport_config.h" - -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { - -MediaTransportConfig::MediaTransportConfig(size_t rtp_max_packet_size) - : rtp_max_packet_size(rtp_max_packet_size) { - RTC_DCHECK_GT(rtp_max_packet_size, 0); -} - -std::string MediaTransportConfig::DebugString() const { - rtc::StringBuilder result; - result << "{rtp_max_packet_size: " << rtp_max_packet_size.value_or(0) << "}"; - return result.Release(); -} - -} // namespace webrtc diff --git a/api/transport/media/media_transport_config.h b/api/transport/media/media_transport_config.h deleted file mode 100644 index 7ef65453ae..0000000000 --- a/api/transport/media/media_transport_config.h +++ /dev/null @@ -1,38 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_ -#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_ - -#include -#include -#include - -#include "absl/types/optional.h" - -namespace webrtc { - -// Media transport config is made available to both transport and audio / video -// layers, but access to individual interfaces should not be open without -// necessity. -struct MediaTransportConfig { - // Default constructor for no-media transport scenarios. - MediaTransportConfig() = default; - - // Constructor for datagram transport scenarios. - explicit MediaTransportConfig(size_t rtp_max_packet_size); - - std::string DebugString() const; - - // If provided, limits RTP packet size (excludes ICE, IP or network overhead). - absl::optional rtp_max_packet_size; -}; - -} // namespace webrtc - -#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_ diff --git a/api/transport/media/media_transport_interface.cc b/api/transport/media/media_transport_interface.cc deleted file mode 100644 index 323ddca689..0000000000 --- a/api/transport/media/media_transport_interface.cc +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#include "api/transport/media/media_transport_interface.h" - -#include -#include - -#include "api/transport/datagram_transport_interface.h" - -namespace webrtc { - -MediaTransportSettings::MediaTransportSettings() = default; -MediaTransportSettings::MediaTransportSettings(const MediaTransportSettings&) = - default; -MediaTransportSettings& MediaTransportSettings::operator=( - const MediaTransportSettings&) = default; -MediaTransportSettings::~MediaTransportSettings() = default; - -SendDataParams::SendDataParams() = default; -SendDataParams::SendDataParams(const SendDataParams&) = default; - -RTCErrorOr> -MediaTransportFactory::CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return std::unique_ptr(nullptr); -} - -RTCErrorOr> -MediaTransportFactory::CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return std::unique_ptr(nullptr); -} - -RTCErrorOr> -MediaTransportFactory::CreateDatagramTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return std::unique_ptr(nullptr); -} - -std::string MediaTransportFactory::GetTransportName() const { - return ""; -} - -MediaTransportInterface::MediaTransportInterface() = default; -MediaTransportInterface::~MediaTransportInterface() = default; - -absl::optional -MediaTransportInterface::GetTransportParametersOffer() const { - return absl::nullopt; -} - -void MediaTransportInterface::Connect( - rtc::PacketTransportInternal* packet_transport) {} - -void MediaTransportInterface::SetKeyFrameRequestCallback( - MediaTransportKeyFrameRequestCallback* callback) {} - -absl::optional -MediaTransportInterface::GetLatestTargetTransferRate() { - return absl::nullopt; -} - -void MediaTransportInterface::AddNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback) {} - -void MediaTransportInterface::RemoveNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback) {} - -void MediaTransportInterface::SetFirstAudioPacketReceivedObserver( - AudioPacketReceivedObserver* observer) {} - -void MediaTransportInterface::AddTargetTransferRateObserver( - TargetTransferRateObserver* observer) {} -void MediaTransportInterface::RemoveTargetTransferRateObserver( - TargetTransferRateObserver* observer) {} - -void MediaTransportInterface::AddRttObserver( - MediaTransportRttObserver* observer) {} -void MediaTransportInterface::RemoveRttObserver( - MediaTransportRttObserver* observer) {} - -size_t MediaTransportInterface::GetAudioPacketOverhead() const { - return 0; -} - -void MediaTransportInterface::SetAllocatedBitrateLimits( - const MediaTransportAllocatedBitrateLimits& limits) {} - -} // namespace webrtc diff --git a/api/transport/media/media_transport_interface.h b/api/transport/media/media_transport_interface.h deleted file mode 100644 index 04a8e50031..0000000000 --- a/api/transport/media/media_transport_interface.h +++ /dev/null @@ -1,328 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_ -#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_ - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtc_error.h" -#include "api/transport/data_channel_transport_interface.h" -#include "api/transport/media/audio_transport.h" -#include "api/transport/media/video_transport.h" -#include "api/transport/network_control.h" -#include "api/units/data_rate.h" -#include "common_types.h" // NOLINT(build/include) -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/network_route.h" - -namespace rtc { -class PacketTransportInternal; -class Thread; -} // namespace rtc - -namespace webrtc { - -class DatagramTransportInterface; -class RtcEventLog; - -class AudioPacketReceivedObserver { - public: - virtual ~AudioPacketReceivedObserver() = default; - - // Invoked for the first received audio packet on a given channel id. - // It will be invoked once for each channel id. - virtual void OnFirstAudioPacketReceived(int64_t channel_id) = 0; -}; - -// Used to configure stream allocations. -struct MediaTransportAllocatedBitrateLimits { - DataRate min_pacing_rate = DataRate::Zero(); - DataRate max_padding_bitrate = DataRate::Zero(); - DataRate max_total_allocated_bitrate = DataRate::Zero(); -}; - -// Used to configure target bitrate constraints. -// If the value is provided, the constraint is updated. -// If the value is omitted, the value is left unchanged. -struct MediaTransportTargetRateConstraints { - absl::optional min_bitrate; - absl::optional max_bitrate; - absl::optional starting_bitrate; -}; - -// A collection of settings for creation of media transport. -struct MediaTransportSettings final { - MediaTransportSettings(); - MediaTransportSettings(const MediaTransportSettings&); - MediaTransportSettings& operator=(const MediaTransportSettings&); - ~MediaTransportSettings(); - - // Group calls are not currently supported, in 1:1 call one side must set - // is_caller = true and another is_caller = false. - bool is_caller; - - // Must be set if a pre-shared key is used for the call. - // TODO(bugs.webrtc.org/9944): This should become zero buffer in the distant - // future. - absl::optional pre_shared_key; - - // If present, this is a config passed from the caller to the answerer in the - // offer. Each media transport knows how to understand its own parameters. - absl::optional remote_transport_parameters; - - // If present, provides the event log that media transport should use. - // Media transport does not own it. The lifetime of |event_log| will exceed - // the lifetime of the instance of MediaTransportInterface instance. - RtcEventLog* event_log = nullptr; -}; - -// Callback to notify about network route changes. -class MediaTransportNetworkChangeCallback { - public: - virtual ~MediaTransportNetworkChangeCallback() = default; - - // Called when the network route is changed, with the new network route. - virtual void OnNetworkRouteChanged( - const rtc::NetworkRoute& new_network_route) = 0; -}; - -// State of the media transport. Media transport begins in the pending state. -// It transitions to writable when it is ready to send media. It may transition -// back to pending if the connection is blocked. It may transition to closed at -// any time. Closed is terminal: a transport will never re-open once closed. -enum class MediaTransportState { - kPending, - kWritable, - kClosed, -}; - -// Callback invoked whenever the state of the media transport changes. -class MediaTransportStateCallback { - public: - virtual ~MediaTransportStateCallback() = default; - - // Invoked whenever the state of the media transport changes. - virtual void OnStateChanged(MediaTransportState state) = 0; -}; - -// Callback for RTT measurements on the receive side. -// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's -// somewhat unclear what type of measurement is needed. It's used to configure -// NACK generation and playout buffer. Either raw measurement values or recent -// maximum would make sense for this use. Need consolidation of RTT signalling. -class MediaTransportRttObserver { - public: - virtual ~MediaTransportRttObserver() = default; - - // Invoked when a new RTT measurement is available, typically once per ACK. - virtual void OnRttUpdated(int64_t rtt_ms) = 0; -}; - -// Media transport interface for sending / receiving encoded audio/video frames -// and receiving bandwidth estimate update from congestion control. -class MediaTransportInterface : public DataChannelTransportInterface { - public: - MediaTransportInterface(); - virtual ~MediaTransportInterface(); - - // Retrieves callers config (i.e. media transport offer) that should be passed - // to the callee, before the call is connected. Such config is opaque to SDP - // (sdp just passes it through). The config is a binary blob, so SDP may - // choose to use base64 to serialize it (or any other approach that guarantees - // that the binary blob goes through). This should only be called for the - // caller's perspective. - // - // This may return an unset optional, which means that the given media - // transport is not supported / disabled and shouldn't be reported in SDP. - // - // It may also return an empty string, in which case the media transport is - // supported, but without any extra settings. - // TODO(psla): Make abstract. - virtual absl::optional GetTransportParametersOffer() const; - - // Connect the media transport to the ICE transport. - // The implementation must be able to ignore incoming packets that don't - // belong to it. - // TODO(psla): Make abstract. - virtual void Connect(rtc::PacketTransportInternal* packet_transport); - - // Start asynchronous send of audio frame. The status returned by this method - // only pertains to the synchronous operations (e.g. - // serialization/packetization), not to the asynchronous operation. - - virtual RTCError SendAudioFrame(uint64_t channel_id, - MediaTransportEncodedAudioFrame frame) = 0; - - // Start asynchronous send of video frame. The status returned by this method - // only pertains to the synchronous operations (e.g. - // serialization/packetization), not to the asynchronous operation. - virtual RTCError SendVideoFrame( - uint64_t channel_id, - const MediaTransportEncodedVideoFrame& frame) = 0; - - // Used by video sender to be notified on key frame requests. - virtual void SetKeyFrameRequestCallback( - MediaTransportKeyFrameRequestCallback* callback); - - // Requests a keyframe for the particular channel (stream). The caller should - // check that the keyframe is not present in a jitter buffer already (i.e. - // don't request a keyframe if there is one that you will get from the jitter - // buffer in a moment). - virtual RTCError RequestKeyFrame(uint64_t channel_id) = 0; - - // Sets audio sink. Sink must be unset by calling SetReceiveAudioSink(nullptr) - // before the media transport is destroyed or before new sink is set. - virtual void SetReceiveAudioSink(MediaTransportAudioSinkInterface* sink) = 0; - - // Registers a video sink. Before destruction of media transport, you must - // pass a nullptr. - virtual void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) = 0; - - // Adds a target bitrate observer. Before media transport is destructed - // the observer must be unregistered (by calling - // RemoveTargetTransferRateObserver). - // A newly registered observer will be called back with the latest recorded - // target rate, if available. - virtual void AddTargetTransferRateObserver( - TargetTransferRateObserver* observer); - - // Removes an existing |observer| from observers. If observer was never - // registered, an error is logged and method does nothing. - virtual void RemoveTargetTransferRateObserver( - TargetTransferRateObserver* observer); - - // Sets audio packets observer, which gets informed about incoming audio - // packets. Before destruction, the observer must be unregistered by setting - // nullptr. - // - // This method may be temporary, when the multiplexer is implemented (or - // multiplexer may use it to demultiplex channel ids). - virtual void SetFirstAudioPacketReceivedObserver( - AudioPacketReceivedObserver* observer); - - // Intended for receive side. AddRttObserver registers an observer to be - // called for each RTT measurement, typically once per ACK. Before media - // transport is destructed the observer must be unregistered. - virtual void AddRttObserver(MediaTransportRttObserver* observer); - virtual void RemoveRttObserver(MediaTransportRttObserver* observer); - - // Returns the last known target transfer rate as reported to the above - // observers. - virtual absl::optional GetLatestTargetTransferRate(); - - // Gets the audio packet overhead in bytes. Returned overhead does not include - // transport overhead (ipv4/6, turn channeldata, tcp/udp, etc.). - // If the transport is capable of fusing packets together, this overhead - // might not be a very accurate number. - // TODO(nisse): Deprecated. - virtual size_t GetAudioPacketOverhead() const; - - // Corresponding observers for audio and video overhead. Before destruction, - // the observers must be unregistered by setting nullptr. - - // TODO(nisse): Should move to per-stream objects, since packetization - // overhead can vary per stream, e.g., depending on negotiated extensions. In - // addition, we should move towards reporting total overhead including all - // layers. Currently, overhead of the lower layers is reported elsewhere, - // e.g., on route change between IPv4 and IPv6. - virtual void SetAudioOverheadObserver(OverheadObserver* observer) {} - - // Registers an observer for network change events. If the network route is - // already established when the callback is added, |callback| will be called - // immediately with the current network route. Before media transport is - // destroyed, the callback must be removed. - virtual void AddNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback); - virtual void RemoveNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback); - - // Sets a state observer callback. Before media transport is destroyed, the - // callback must be unregistered by setting it to nullptr. - // A newly registered callback will be called with the current state. - // Media transport does not invoke this callback concurrently. - virtual void SetMediaTransportStateCallback( - MediaTransportStateCallback* callback) = 0; - - // Updates allocation limits. - // TODO(psla): Make abstract when downstream implementation implement it. - virtual void SetAllocatedBitrateLimits( - const MediaTransportAllocatedBitrateLimits& limits); - - // Sets starting rate. - // TODO(psla): Make abstract when downstream implementation implement it. - virtual void SetTargetBitrateLimits( - const MediaTransportTargetRateConstraints& target_rate_constraints) {} - - // TODO(sukhanov): RtcEventLogs. -}; - -// If media transport factory is set in peer connection factory, it will be -// used to create media transport for sending/receiving encoded frames and -// this transport will be used instead of default RTP/SRTP transport. -// -// Currently Media Transport negotiation is not supported in SDP. -// If application is using media transport, it must negotiate it before -// setting media transport factory in peer connection. -class MediaTransportFactory { - public: - virtual ~MediaTransportFactory() = default; - - // Creates media transport. - // - Does not take ownership of packet_transport or network_thread. - // - Does not support group calls, in 1:1 call one side must set - // is_caller = true and another is_caller = false. - virtual RTCErrorOr> - CreateMediaTransport(rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings); - - // Creates a new Media Transport in a disconnected state. If the media - // transport for the caller is created, one can then call - // MediaTransportInterface::GetTransportParametersOffer on that new instance. - // TODO(psla): Make abstract. - virtual RTCErrorOr> - CreateMediaTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings); - - // Creates a new Datagram Transport in a disconnected state. If the datagram - // transport for the caller is created, one can then call - // DatagramTransportInterface::GetTransportParametersOffer on that new - // instance. - // - // TODO(sukhanov): Consider separating media and datagram transport factories. - // TODO(sukhanov): Move factory to a separate .h file. - virtual RTCErrorOr> - CreateDatagramTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings); - - // Gets a transport name which is supported by the implementation. - // Different factories should return different transport names, and at runtime - // it will be checked that different names were used. - // For example, "rtp" or "generic" may be returned by two different - // implementations. - // The value returned by this method must never change in the lifetime of the - // factory. - // TODO(psla): Make abstract. - virtual std::string GetTransportName() const; -}; - -} // namespace webrtc -#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_ diff --git a/api/transport/media/video_transport.cc b/api/transport/media/video_transport.cc deleted file mode 100644 index a6f5304048..0000000000 --- a/api/transport/media/video_transport.cc +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#include "api/transport/media/video_transport.h" - -#include - -namespace webrtc { - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame() = default; - -MediaTransportEncodedVideoFrame::~MediaTransportEncodedVideoFrame() = default; - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame( - int64_t frame_id, - std::vector referenced_frame_ids, - int payload_type, - const webrtc::EncodedImage& encoded_image) - : payload_type_(payload_type), - encoded_image_(encoded_image), - frame_id_(frame_id), - referenced_frame_ids_(std::move(referenced_frame_ids)) {} - -MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=( - const MediaTransportEncodedVideoFrame&) = default; - -MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=( - MediaTransportEncodedVideoFrame&&) = default; - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame( - const MediaTransportEncodedVideoFrame& o) - : MediaTransportEncodedVideoFrame() { - *this = o; -} - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame( - MediaTransportEncodedVideoFrame&& o) - : MediaTransportEncodedVideoFrame() { - *this = std::move(o); -} - -} // namespace webrtc diff --git a/api/transport/media/video_transport.h b/api/transport/media/video_transport.h deleted file mode 100644 index affd2e0d38..0000000000 --- a/api/transport/media/video_transport.h +++ /dev/null @@ -1,101 +0,0 @@ -/* Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#ifndef API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_ -#define API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_ - -#include - -#include "api/video/encoded_image.h" - -namespace webrtc { - -// Represents encoded video frame, along with the codec information. -class MediaTransportEncodedVideoFrame final { - public: - MediaTransportEncodedVideoFrame(int64_t frame_id, - std::vector referenced_frame_ids, - int payload_type, - const webrtc::EncodedImage& encoded_image); - ~MediaTransportEncodedVideoFrame(); - MediaTransportEncodedVideoFrame(const MediaTransportEncodedVideoFrame&); - MediaTransportEncodedVideoFrame& operator=( - const MediaTransportEncodedVideoFrame& other); - MediaTransportEncodedVideoFrame& operator=( - MediaTransportEncodedVideoFrame&& other); - MediaTransportEncodedVideoFrame(MediaTransportEncodedVideoFrame&&); - - int payload_type() const { return payload_type_; } - const webrtc::EncodedImage& encoded_image() const { return encoded_image_; } - - int64_t frame_id() const { return frame_id_; } - const std::vector& referenced_frame_ids() const { - return referenced_frame_ids_; - } - - // Hack to workaround lack of ownership of the EncodedImage buffer. If we - // don't already own the underlying data, make a copy. - void Retain() { encoded_image_.Retain(); } - - private: - MediaTransportEncodedVideoFrame(); - - int payload_type_; - - // The buffer is not always owned by the encoded image. On the sender it means - // that it will need to make a copy using the Retain() method, if it wants to - // deliver it asynchronously. - webrtc::EncodedImage encoded_image_; - - // Frame id uniquely identifies a frame in a stream. It needs to be unique in - // a given time window (i.e. technically unique identifier for the lifetime of - // the connection is not needed, but you need to guarantee that remote side - // got rid of the previous frame_id if you plan to reuse it). - // - // It is required by a remote jitter buffer, and is the same as - // EncodedFrame::id::picture_id. - // - // This data must be opaque to the media transport, and media transport should - // itself not make any assumptions about what it is and its uniqueness. - int64_t frame_id_; - - // A single frame might depend on other frames. This is set of identifiers on - // which the current frame depends. - std::vector referenced_frame_ids_; -}; - -// Interface for receiving encoded video frames from MediaTransportInterface -// implementations. -class MediaTransportVideoSinkInterface { - public: - virtual ~MediaTransportVideoSinkInterface() = default; - - // Called when new encoded video frame is received. - virtual void OnData(uint64_t channel_id, - MediaTransportEncodedVideoFrame frame) = 0; -}; - -// Interface for video sender to be notified of received key frame request. -class MediaTransportKeyFrameRequestCallback { - public: - virtual ~MediaTransportKeyFrameRequestCallback() = default; - - // Called when a key frame request is received on the transport. - virtual void OnKeyFrameRequested(uint64_t channel_id) = 0; -}; - -} // namespace webrtc -#endif // API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_ diff --git a/api/transport/network_control.h b/api/transport/network_control.h index 6fc1f7c0d1..c2b005e713 100644 --- a/api/transport/network_control.h +++ b/api/transport/network_control.h @@ -61,42 +61,42 @@ class NetworkControllerInterface { virtual ~NetworkControllerInterface() = default; // Called when network availabilty changes. - virtual NetworkControlUpdate OnNetworkAvailability(NetworkAvailability) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnNetworkAvailability( + NetworkAvailability) = 0; // Called when the receiving or sending endpoint changes address. - virtual NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnNetworkRouteChange( + NetworkRouteChange) = 0; // Called periodically with a periodicy as specified by // NetworkControllerFactoryInterface::GetProcessInterval. - virtual NetworkControlUpdate OnProcessInterval(ProcessInterval) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnProcessInterval( + ProcessInterval) = 0; // Called when remotely calculated bitrate is received. - virtual NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnRemoteBitrateReport( + RemoteBitrateReport) = 0; // Called round trip time has been calculated by protocol specific mechanisms. - virtual NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnRoundTripTimeUpdate( + RoundTripTimeUpdate) = 0; // Called when a packet is sent on the network. - virtual NetworkControlUpdate OnSentPacket(SentPacket) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnSentPacket( + SentPacket) = 0; // Called when a packet is received from the remote client. - virtual NetworkControlUpdate OnReceivedPacket(ReceivedPacket) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnReceivedPacket( + ReceivedPacket) = 0; // Called when the stream specific configuration has been updated. - virtual NetworkControlUpdate OnStreamsConfig(StreamsConfig) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnStreamsConfig( + StreamsConfig) = 0; // Called when target transfer rate constraints has been changed. - virtual NetworkControlUpdate OnTargetRateConstraints(TargetRateConstraints) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnTargetRateConstraints( + TargetRateConstraints) = 0; // Called when a protocol specific calculation of packet loss has been made. - virtual NetworkControlUpdate OnTransportLossReport(TransportLossReport) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnTransportLossReport( + TransportLossReport) = 0; // Called with per packet feedback regarding receive time. - virtual NetworkControlUpdate OnTransportPacketsFeedback( - TransportPacketsFeedback) ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnTransportPacketsFeedback( + TransportPacketsFeedback) = 0; // Called with network state estimate updates. - virtual NetworkControlUpdate OnNetworkStateEstimate(NetworkStateEstimate) - ABSL_MUST_USE_RESULT = 0; + ABSL_MUST_USE_RESULT virtual NetworkControlUpdate OnNetworkStateEstimate( + NetworkStateEstimate) = 0; }; // NetworkControllerFactoryInterface is an interface for creating a network diff --git a/api/transport/network_types.h b/api/transport/network_types.h index f658b34494..10fc0beedf 100644 --- a/api/transport/network_types.h +++ b/api/transport/network_types.h @@ -107,7 +107,11 @@ struct SentPacket { DataSize size = DataSize::Zero(); // Size of preceeding packets that are not part of feedback. DataSize prior_unacked_data = DataSize::Zero(); + // Probe cluster id and parameters including bitrate, number of packets and + // number of bytes. PacedPacketInfo pacing_info; + // True if the packet is an audio packet, false for video, padding, RTX etc. + bool audio = false; // Transport independent sequence number, any tracked packet should have a // sequence number that is unique over the whole call and increasing by 1 for // each packet. @@ -218,6 +222,7 @@ struct TargetTransferRate { NetworkEstimate network_estimate; DataRate target_rate = DataRate::Zero(); DataRate stable_target_rate = DataRate::Zero(); + double cwnd_reduce_ratio = 0; }; // Contains updates of network controller comand state. Using optionals to diff --git a/api/transport/rtp/BUILD.gn b/api/transport/rtp/BUILD.gn index b0849502c8..7b01169360 100644 --- a/api/transport/rtp/BUILD.gn +++ b/api/transport/rtp/BUILD.gn @@ -14,15 +14,20 @@ rtc_source_set("rtp_source") { deps = [ "../../../api:rtp_headers", "../../../rtc_base:checks", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("dependency_descriptor") { visibility = [ "*" ] - sources = [ "dependency_descriptor.h" ] - deps = [ + sources = [ + "dependency_descriptor.cc", + "dependency_descriptor.h", + ] + deps = [ "../../../rtc_base:checks" ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/transport/rtp/dependency_descriptor.cc b/api/transport/rtp/dependency_descriptor.cc new file mode 100644 index 0000000000..2a9b6d9a71 --- /dev/null +++ b/api/transport/rtp/dependency_descriptor.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/rtp/dependency_descriptor.h" + +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +constexpr int DependencyDescriptor::kMaxSpatialIds; +constexpr int DependencyDescriptor::kMaxTemporalIds; +constexpr int DependencyDescriptor::kMaxTemplates; +constexpr int DependencyDescriptor::kMaxDecodeTargets; + +namespace webrtc_impl { + +absl::InlinedVector StringToDecodeTargetIndications( + absl::string_view symbols) { + absl::InlinedVector dtis; + dtis.reserve(symbols.size()); + for (char symbol : symbols) { + DecodeTargetIndication indication; + switch (symbol) { + case '-': + indication = DecodeTargetIndication::kNotPresent; + break; + case 'D': + indication = DecodeTargetIndication::kDiscardable; + break; + case 'R': + indication = DecodeTargetIndication::kRequired; + break; + case 'S': + indication = DecodeTargetIndication::kSwitch; + break; + default: + RTC_NOTREACHED(); + } + dtis.push_back(indication); + } + return dtis; +} + +} // namespace webrtc_impl +} // namespace webrtc diff --git a/api/transport/rtp/dependency_descriptor.h b/api/transport/rtp/dependency_descriptor.h index a488f56dfd..6967c83517 100644 --- a/api/transport/rtp/dependency_descriptor.h +++ b/api/transport/rtp/dependency_descriptor.h @@ -13,10 +13,12 @@ #include +#include #include #include #include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" namespace webrtc { @@ -52,6 +54,13 @@ enum class DecodeTargetIndication { }; struct FrameDependencyTemplate { + // Setters are named briefly to chain them when building the template. + FrameDependencyTemplate& S(int spatial_layer); + FrameDependencyTemplate& T(int temporal_layer); + FrameDependencyTemplate& Dtis(absl::string_view dtis); + FrameDependencyTemplate& FrameDiffs(std::initializer_list diffs); + FrameDependencyTemplate& ChainDiffs(std::initializer_list diffs); + friend bool operator==(const FrameDependencyTemplate& lhs, const FrameDependencyTemplate& rhs) { return lhs.spatial_id == rhs.spatial_id && @@ -82,14 +91,18 @@ struct FrameDependencyStructure { int num_decode_targets = 0; int num_chains = 0; // If chains are used (num_chains > 0), maps decode target index into index of - // the chain protecting that target or |num_chains| value if decode target is - // not protected by a chain. + // the chain protecting that target. absl::InlinedVector decode_target_protected_by_chain; absl::InlinedVector resolutions; std::vector templates; }; struct DependencyDescriptor { + static constexpr int kMaxSpatialIds = 4; + static constexpr int kMaxTemporalIds = 8; + static constexpr int kMaxDecodeTargets = 32; + static constexpr int kMaxTemplates = 64; + bool first_packet_in_frame = true; bool last_packet_in_frame = true; int frame_number = 0; @@ -99,6 +112,37 @@ struct DependencyDescriptor { std::unique_ptr attached_structure; }; +// Below are implementation details. +namespace webrtc_impl { +absl::InlinedVector StringToDecodeTargetIndications( + absl::string_view indication_symbols); +} // namespace webrtc_impl + +inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) { + this->spatial_id = spatial_layer; + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) { + this->temporal_id = temporal_layer; + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis( + absl::string_view dtis) { + this->decode_target_indications = + webrtc_impl::StringToDecodeTargetIndications(dtis); + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs( + std::initializer_list diffs) { + this->frame_diffs.assign(diffs.begin(), diffs.end()); + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs( + std::initializer_list diffs) { + this->chain_diffs.assign(diffs.begin(), diffs.end()); + return *this; +} + } // namespace webrtc #endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_ diff --git a/api/transport/sctp_transport_factory_interface.h b/api/transport/sctp_transport_factory_interface.h new file mode 100644 index 0000000000..912be3a374 --- /dev/null +++ b/api/transport/sctp_transport_factory_interface.h @@ -0,0 +1,42 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_SCTP_TRANSPORT_FACTORY_INTERFACE_H_ +#define API_TRANSPORT_SCTP_TRANSPORT_FACTORY_INTERFACE_H_ + +#include + +// These classes are not part of the API, and are treated as opaque pointers. +namespace cricket { +class SctpTransportInternal; +} // namespace cricket + +namespace rtc { +class PacketTransportInternal; +} // namespace rtc + +namespace webrtc { + +// Factory class which can be used to allow fake SctpTransports to be injected +// for testing. An application is not intended to implement this interface nor +// 'cricket::SctpTransportInternal' because SctpTransportInternal is not +// guaranteed to remain stable in future WebRTC versions. +class SctpTransportFactoryInterface { + public: + virtual ~SctpTransportFactoryInterface() = default; + + // Create an SCTP transport using |channel| for the underlying transport. + virtual std::unique_ptr CreateSctpTransport( + rtc::PacketTransportInternal* channel) = 0; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_SCTP_TRANSPORT_FACTORY_INTERFACE_H_ diff --git a/api/transport/stun.cc b/api/transport/stun.cc index 5ed4900088..7fee6ea78a 100644 --- a/api/transport/stun.cc +++ b/api/transport/stun.cc @@ -47,6 +47,7 @@ namespace cricket { const char STUN_ERROR_REASON_TRY_ALTERNATE_SERVER[] = "Try Alternate Server"; const char STUN_ERROR_REASON_BAD_REQUEST[] = "Bad Request"; const char STUN_ERROR_REASON_UNAUTHORIZED[] = "Unauthorized"; +const char STUN_ERROR_REASON_UNKNOWN_ATTRIBUTE[] = "Unknown Attribute"; const char STUN_ERROR_REASON_FORBIDDEN[] = "Forbidden"; const char STUN_ERROR_REASON_STALE_CREDENTIALS[] = "Stale Credentials"; const char STUN_ERROR_REASON_ALLOCATION_MISMATCH[] = "Allocation Mismatch"; @@ -140,6 +141,18 @@ void StunMessage::ClearAttributes() { length_ = 0; } +std::vector StunMessage::GetNonComprehendedAttributes() const { + std::vector unknown_attributes; + for (auto& attr : attrs_) { + // "comprehension-required" range is 0x0000-0x7FFF. + if (attr->type() >= 0x0000 && attr->type() <= 0x7FFF && + GetAttributeValueType(attr->type()) == STUN_VALUE_UNKNOWN) { + unknown_attributes.push_back(attr->type()); + } + } + return unknown_attributes; +} + const StunAddressAttribute* StunMessage::GetAddress(int type) const { switch (type) { case STUN_ATTR_MAPPED_ADDRESS: { @@ -542,7 +555,7 @@ StunAttributeValueType StunMessage::GetAttributeValueType(int type) const { return STUN_VALUE_BYTE_STRING; case STUN_ATTR_RETRANSMIT_COUNT: return STUN_VALUE_UINT32; - case STUN_ATTR_LAST_ICE_CHECK_RECEIVED: + case STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED: return STUN_VALUE_BYTE_STRING; case STUN_ATTR_GOOG_MISC_INFO: return STUN_VALUE_UINT16_LIST; @@ -1296,7 +1309,7 @@ StunMessage* TurnMessage::CreateNew() const { StunAttributeValueType IceMessage::GetAttributeValueType(int type) const { switch (type) { case STUN_ATTR_PRIORITY: - case STUN_ATTR_NETWORK_INFO: + case STUN_ATTR_GOOG_NETWORK_INFO: case STUN_ATTR_NOMINATION: return STUN_VALUE_UINT32; case STUN_ATTR_USE_CANDIDATE: diff --git a/api/transport/stun.h b/api/transport/stun.h index 41f76a1ba7..db37b8e365 100644 --- a/api/transport/stun.h +++ b/api/transport/stun.h @@ -163,6 +163,10 @@ class StunMessage { void SetType(int type) { type_ = static_cast(type); } bool SetTransactionID(const std::string& str); + // Get a list of all of the attribute types in the "comprehension required" + // range that were not recognized. + std::vector GetNonComprehendedAttributes() const; + // Gets the desired attribute value, or NULL if no such attribute type exists. const StunAddressAttribute* GetAddress(int type) const; const StunUInt32Attribute* GetUInt32(int type) const; @@ -663,11 +667,16 @@ enum IceAttributeType { STUN_ATTR_NOMINATION = 0xC001, // UInt32 // UInt32. The higher 16 bits are the network ID. The lower 16 bits are the // network cost. - STUN_ATTR_NETWORK_INFO = 0xC057, + STUN_ATTR_GOOG_NETWORK_INFO = 0xC057, // Experimental: Transaction ID of the last connectivity check received. - STUN_ATTR_LAST_ICE_CHECK_RECEIVED = 0xC058, + STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED = 0xC058, // Uint16List. Miscellaneous attributes for future extension. STUN_ATTR_GOOG_MISC_INFO = 0xC059, + // Obsolete. + STUN_ATTR_GOOG_OBSOLETE_1 = 0xC05A, + STUN_ATTR_GOOG_CONNECTION_ID = 0xC05B, // Not yet implemented. + STUN_ATTR_GOOG_DELTA = 0xC05C, // Not yet implemented. + STUN_ATTR_GOOG_DELTA_ACK = 0xC05D, // Not yet implemented. // MESSAGE-INTEGRITY truncated to 32-bit. STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 = 0xC060, }; diff --git a/api/transport/test/feedback_generator_interface.h b/api/transport/test/feedback_generator_interface.h index cff67dd1df..6e5118cbf4 100644 --- a/api/transport/test/feedback_generator_interface.h +++ b/api/transport/test/feedback_generator_interface.h @@ -21,8 +21,8 @@ class FeedbackGenerator { struct Config { BuiltInNetworkBehaviorConfig send_link; BuiltInNetworkBehaviorConfig return_link; - TimeDelta feedback_interval = TimeDelta::ms(50); - DataSize feedback_packet_size = DataSize::bytes(20); + TimeDelta feedback_interval = TimeDelta::Millis(50); + DataSize feedback_packet_size = DataSize::Bytes(20); }; virtual ~FeedbackGenerator() = default; virtual Timestamp Now() = 0; diff --git a/api/transport/test/mock_network_control.h b/api/transport/test/mock_network_control.h index 54a416cb77..f613004fb7 100644 --- a/api/transport/test/mock_network_control.h +++ b/api/transport/test/mock_network_control.h @@ -18,11 +18,16 @@ namespace webrtc { class MockNetworkStateEstimator : public NetworkStateEstimator { public: - MOCK_METHOD0(GetCurrentEstimate, absl::optional()); - MOCK_METHOD1(OnTransportPacketsFeedback, - void(const TransportPacketsFeedback&)); - MOCK_METHOD1(OnReceivedPacket, void(const PacketResult&)); - MOCK_METHOD1(OnRouteChange, void(const NetworkRouteChange&)); + MOCK_METHOD(absl::optional, + GetCurrentEstimate, + (), + (override)); + MOCK_METHOD(void, + OnTransportPacketsFeedback, + (const TransportPacketsFeedback&), + (override)); + MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override)); + MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override)); }; } // namespace webrtc diff --git a/api/uma_metrics.h b/api/uma_metrics.h index 8436d4f9e5..30543b68b1 100644 --- a/api/uma_metrics.h +++ b/api/uma_metrics.h @@ -8,42 +8,34 @@ * be found in the AUTHORS file in the root of the source tree. */ -// This file contains enums related to IPv4/IPv6 metrics. +// This file contains enums related to Chrome UMA histograms. See +// https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#requirements +// for requirements when adding or changing metrics. #ifndef API_UMA_METRICS_H_ #define API_UMA_METRICS_H_ -#include "rtc_base/ref_count.h" - namespace webrtc { -// Currently this contains information related to WebRTC network/transport -// information. - -// The difference between PeerConnectionEnumCounter and -// PeerConnectionMetricsName is that the "EnumCounter" is only counting the -// occurrences of events, while "Name" has a value associated with it which is -// used to form a histogram. - -// This enum is backed by Chromium's histograms.xml, -// chromium/src/tools/metrics/histograms/histograms.xml -// Existing values cannot be re-ordered and new enums must be added -// before kBoundary. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum PeerConnectionAddressFamilyCounter { - kPeerConnection_IPv4, - kPeerConnection_IPv6, - kBestConnections_IPv4, - kBestConnections_IPv6, - kPeerConnectionAddressFamilyCounter_Max, + kPeerConnection_IPv4 = 0, + kPeerConnection_IPv6 = 1, + kBestConnections_IPv4 = 2, + kBestConnections_IPv6 = 3, + kPeerConnectionAddressFamilyCounter_Max }; // This enum defines types for UMA samples, which will have a range. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum PeerConnectionMetricsName { - kNetworkInterfaces_IPv4, // Number of IPv4 interfaces. - kNetworkInterfaces_IPv6, // Number of IPv6 interfaces. - kTimeToConnect, // In milliseconds. - kLocalCandidates_IPv4, // Number of IPv4 local candidates. - kLocalCandidates_IPv6, // Number of IPv6 local candidates. + kNetworkInterfaces_IPv4 = 0, // Number of IPv4 interfaces. + kNetworkInterfaces_IPv6 = 1, // Number of IPv6 interfaces. + kTimeToConnect = 2, // In milliseconds. + kLocalCandidates_IPv4 = 3, // Number of IPv4 local candidates. + kLocalCandidates_IPv6 = 4, // Number of IPv6 local candidates. kPeerConnectionMetricsName_Max }; @@ -51,109 +43,134 @@ enum PeerConnectionMetricsName { // _. It is recorded based on the // type of candidate pair used when the PeerConnection first goes to a completed // state. When BUNDLE is enabled, only the first transport gets recorded. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum IceCandidatePairType { // HostHost is deprecated. It was replaced with the set of types at the bottom // to report private or public host IP address. - kIceCandidatePairHostHost, - kIceCandidatePairHostSrflx, - kIceCandidatePairHostRelay, - kIceCandidatePairHostPrflx, - kIceCandidatePairSrflxHost, - kIceCandidatePairSrflxSrflx, - kIceCandidatePairSrflxRelay, - kIceCandidatePairSrflxPrflx, - kIceCandidatePairRelayHost, - kIceCandidatePairRelaySrflx, - kIceCandidatePairRelayRelay, - kIceCandidatePairRelayPrflx, - kIceCandidatePairPrflxHost, - kIceCandidatePairPrflxSrflx, - kIceCandidatePairPrflxRelay, + kIceCandidatePairHostHost = 0, + kIceCandidatePairHostSrflx = 1, + kIceCandidatePairHostRelay = 2, + kIceCandidatePairHostPrflx = 3, + kIceCandidatePairSrflxHost = 4, + kIceCandidatePairSrflxSrflx = 5, + kIceCandidatePairSrflxRelay = 6, + kIceCandidatePairSrflxPrflx = 7, + kIceCandidatePairRelayHost = 8, + kIceCandidatePairRelaySrflx = 9, + kIceCandidatePairRelayRelay = 10, + kIceCandidatePairRelayPrflx = 11, + kIceCandidatePairPrflxHost = 12, + kIceCandidatePairPrflxSrflx = 13, + kIceCandidatePairPrflxRelay = 14, // The following 9 types tell whether local and remote hosts have hostname, // private or public IP addresses. - kIceCandidatePairHostPrivateHostPrivate, - kIceCandidatePairHostPrivateHostPublic, - kIceCandidatePairHostPublicHostPrivate, - kIceCandidatePairHostPublicHostPublic, - kIceCandidatePairHostNameHostName, - kIceCandidatePairHostNameHostPrivate, - kIceCandidatePairHostNameHostPublic, - kIceCandidatePairHostPrivateHostName, - kIceCandidatePairHostPublicHostName, + kIceCandidatePairHostPrivateHostPrivate = 15, + kIceCandidatePairHostPrivateHostPublic = 16, + kIceCandidatePairHostPublicHostPrivate = 17, + kIceCandidatePairHostPublicHostPublic = 18, + kIceCandidatePairHostNameHostName = 19, + kIceCandidatePairHostNameHostPrivate = 20, + kIceCandidatePairHostNameHostPublic = 21, + kIceCandidatePairHostPrivateHostName = 22, + kIceCandidatePairHostPublicHostName = 23, kIceCandidatePairMax }; +// The difference between PeerConnectionEnumCounter and +// PeerConnectionMetricsName is that the "EnumCounter" is only counting the +// occurrences of events, while "Name" has a value associated with it which is +// used to form a histogram. + +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum KeyExchangeProtocolType { - kEnumCounterKeyProtocolDtls, - kEnumCounterKeyProtocolSdes, + kEnumCounterKeyProtocolDtls = 0, + kEnumCounterKeyProtocolSdes = 1, kEnumCounterKeyProtocolMax }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum KeyExchangeProtocolMedia { - kEnumCounterKeyProtocolMediaTypeDtlsAudio, - kEnumCounterKeyProtocolMediaTypeDtlsVideo, - kEnumCounterKeyProtocolMediaTypeDtlsData, - kEnumCounterKeyProtocolMediaTypeSdesAudio, - kEnumCounterKeyProtocolMediaTypeSdesVideo, - kEnumCounterKeyProtocolMediaTypeSdesData, + kEnumCounterKeyProtocolMediaTypeDtlsAudio = 0, + kEnumCounterKeyProtocolMediaTypeDtlsVideo = 1, + kEnumCounterKeyProtocolMediaTypeDtlsData = 2, + kEnumCounterKeyProtocolMediaTypeSdesAudio = 3, + kEnumCounterKeyProtocolMediaTypeSdesVideo = 4, + kEnumCounterKeyProtocolMediaTypeSdesData = 5, kEnumCounterKeyProtocolMediaTypeMax }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SdpSemanticRequested { - kSdpSemanticRequestDefault, - kSdpSemanticRequestPlanB, - kSdpSemanticRequestUnifiedPlan, + kSdpSemanticRequestDefault = 0, + kSdpSemanticRequestPlanB = 1, + kSdpSemanticRequestUnifiedPlan = 2, kSdpSemanticRequestMax }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SdpSemanticNegotiated { - kSdpSemanticNegotiatedNone, - kSdpSemanticNegotiatedPlanB, - kSdpSemanticNegotiatedUnifiedPlan, - kSdpSemanticNegotiatedMixed, + kSdpSemanticNegotiatedNone = 0, + kSdpSemanticNegotiatedPlanB = 1, + kSdpSemanticNegotiatedUnifiedPlan = 2, + kSdpSemanticNegotiatedMixed = 3, kSdpSemanticNegotiatedMax }; // Metric which records the format of the received SDP for tracking how much the // difference between Plan B and Unified Plan affect users. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SdpFormatReceived { // No audio or video tracks. This is worth special casing since it seems to be // the most common scenario (data-channel only). - kSdpFormatReceivedNoTracks, + kSdpFormatReceivedNoTracks = 0, // No more than one audio and one video track. Should be compatible with both // Plan B and Unified Plan endpoints. - kSdpFormatReceivedSimple, + kSdpFormatReceivedSimple = 1, // More than one audio track or more than one video track in the Plan B format // (e.g., one audio media section with multiple streams). - kSdpFormatReceivedComplexPlanB, + kSdpFormatReceivedComplexPlanB = 2, // More than one audio track or more than one video track in the Unified Plan // format (e.g., two audio media sections). - kSdpFormatReceivedComplexUnifiedPlan, + kSdpFormatReceivedComplexUnifiedPlan = 3, kSdpFormatReceivedMax }; // Metric for counting the outcome of adding an ICE candidate +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum AddIceCandidateResult { - kAddIceCandidateSuccess, - kAddIceCandidateFailClosed, - kAddIceCandidateFailNoRemoteDescription, - kAddIceCandidateFailNullCandidate, - kAddIceCandidateFailNotValid, - kAddIceCandidateFailNotReady, - kAddIceCandidateFailInAddition, - kAddIceCandidateFailNotUsable, + kAddIceCandidateSuccess = 0, + kAddIceCandidateFailClosed = 1, + kAddIceCandidateFailNoRemoteDescription = 2, + kAddIceCandidateFailNullCandidate = 3, + kAddIceCandidateFailNotValid = 4, + kAddIceCandidateFailNotReady = 5, + kAddIceCandidateFailInAddition = 6, + kAddIceCandidateFailNotUsable = 7, kAddIceCandidateMax }; // Metric for recording which api surface was used to enable simulcast. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. enum SimulcastApiVersion { - kSimulcastApiVersionNone, - kSimulcastApiVersionLegacy, - kSimulcastApiVersionSpecCompliant, - kSimulcastApiVersionMax, + kSimulcastApiVersionNone = 0, + kSimulcastApiVersionLegacy = 1, + kSimulcastApiVersionSpecCompliant = 2, + kSimulcastApiVersionMax }; +// When adding new metrics please consider using the style described in +// https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#usage +// instead of the legacy enums used above. + } // namespace webrtc #endif // API_UMA_METRICS_H_ diff --git a/api/units/data_rate.h b/api/units/data_rate.h index 78c2e0ed9a..5c8a61fd9c 100644 --- a/api/units/data_rate.h +++ b/api/units/data_rate.h @@ -31,31 +31,25 @@ namespace webrtc { // second (bps). class DataRate final : public rtc_units_impl::RelativeUnit { public: - DataRate() = delete; - static constexpr DataRate Infinity() { return PlusInfinity(); } - template - static constexpr DataRate BitsPerSec() { - return FromValue(bps); - } - template - static constexpr DataRate KilobitsPerSec() { - return FromFraction(1000, kbps); - } template - static constexpr DataRate bps(T bits_per_second) { + static constexpr DataRate BitsPerSec(T value) { static_assert(std::is_arithmetic::value, ""); - return FromValue(bits_per_second); + return FromValue(value); } template - static constexpr DataRate bytes_per_sec(T bytes_per_second) { + static constexpr DataRate BytesPerSec(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(8, bytes_per_second); + return FromFraction(8, value); } template - static constexpr DataRate kbps(T kilobits_per_sec) { + static constexpr DataRate KilobitsPerSec(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1000, kilobits_per_sec); + return FromFraction(1000, value); } + static constexpr DataRate Infinity() { return PlusInfinity(); } + + DataRate() = delete; + template constexpr T bps() const { return ToValue(); @@ -103,15 +97,15 @@ inline constexpr int64_t MillibytePerSec(const DataRate& size) { inline constexpr DataRate operator/(const DataSize size, const TimeDelta duration) { - return DataRate::bps(data_rate_impl::Microbits(size) / duration.us()); + return DataRate::BitsPerSec(data_rate_impl::Microbits(size) / duration.us()); } inline constexpr TimeDelta operator/(const DataSize size, const DataRate rate) { - return TimeDelta::us(data_rate_impl::Microbits(size) / rate.bps()); + return TimeDelta::Micros(data_rate_impl::Microbits(size) / rate.bps()); } inline constexpr DataSize operator*(const DataRate rate, const TimeDelta duration) { int64_t microbits = rate.bps() * duration.us(); - return DataSize::bytes((microbits + 4000000) / 8000000); + return DataSize::Bytes((microbits + 4000000) / 8000000); } inline constexpr DataSize operator*(const TimeDelta duration, const DataRate rate) { @@ -123,10 +117,10 @@ inline constexpr DataSize operator/(const DataRate rate, int64_t millihertz = frequency.millihertz(); // Note that the value is truncated here reather than rounded, potentially // introducing an error of .5 bytes if rounding were expected. - return DataSize::bytes(data_rate_impl::MillibytePerSec(rate) / millihertz); + return DataSize::Bytes(data_rate_impl::MillibytePerSec(rate) / millihertz); } inline constexpr Frequency operator/(const DataRate rate, const DataSize size) { - return Frequency::millihertz(data_rate_impl::MillibytePerSec(rate) / + return Frequency::MilliHertz(data_rate_impl::MillibytePerSec(rate) / size.bytes()); } inline constexpr DataRate operator*(const DataSize size, @@ -136,7 +130,7 @@ inline constexpr DataRate operator*(const DataSize size, frequency.millihertz()); int64_t millibits_per_second = size.bytes() * 8 * frequency.millihertz(); - return DataRate::bps((millibits_per_second + 500) / 1000); + return DataRate::BitsPerSec((millibits_per_second + 500) / 1000); } inline constexpr DataRate operator*(const Frequency frequency, const DataSize size) { diff --git a/api/units/data_rate_unittest.cc b/api/units/data_rate_unittest.cc index d887107dea..f77b3702d4 100644 --- a/api/units/data_rate_unittest.cc +++ b/api/units/data_rate_unittest.cc @@ -17,8 +17,8 @@ namespace webrtc { namespace test { TEST(DataRateTest, CompilesWithChecksAndLogs) { - DataRate a = DataRate::kbps(300); - DataRate b = DataRate::kbps(210); + DataRate a = DataRate::KilobitsPerSec(300); + DataRate b = DataRate::KilobitsPerSec(210); RTC_CHECK_GT(a, b); RTC_LOG(LS_INFO) << a; } @@ -32,8 +32,8 @@ TEST(DataRateTest, ConstExpr) { static_assert(kDataRateInf.bps_or(-1) == -1, ""); static_assert(kDataRateInf > kDataRateZero, ""); - constexpr DataRate kDataRateBps = DataRate::BitsPerSec(); - constexpr DataRate kDataRateKbps = DataRate::KilobitsPerSec(); + constexpr DataRate kDataRateBps = DataRate::BitsPerSec(kValue); + constexpr DataRate kDataRateKbps = DataRate::KilobitsPerSec(kValue); static_assert(kDataRateBps.bps() == kValue, ""); static_assert(kDataRateBps.bps_or(0) == kValue, ""); static_assert(kDataRateKbps.kbps_or(0) == kValue, ""); @@ -41,36 +41,36 @@ TEST(DataRateTest, ConstExpr) { TEST(DataRateTest, GetBackSameValues) { const int64_t kValue = 123 * 8; - EXPECT_EQ(DataRate::bps(kValue).bps(), kValue); - EXPECT_EQ(DataRate::kbps(kValue).kbps(), kValue); + EXPECT_EQ(DataRate::BitsPerSec(kValue).bps(), kValue); + EXPECT_EQ(DataRate::KilobitsPerSec(kValue).kbps(), kValue); } TEST(DataRateTest, GetDifferentPrefix) { const int64_t kValue = 123 * 8000; - EXPECT_EQ(DataRate::bps(kValue).kbps(), kValue / 1000); + EXPECT_EQ(DataRate::BitsPerSec(kValue).kbps(), kValue / 1000); } TEST(DataRateTest, IdentityChecks) { const int64_t kValue = 3000; EXPECT_TRUE(DataRate::Zero().IsZero()); - EXPECT_FALSE(DataRate::bps(kValue).IsZero()); + EXPECT_FALSE(DataRate::BitsPerSec(kValue).IsZero()); EXPECT_TRUE(DataRate::Infinity().IsInfinite()); EXPECT_FALSE(DataRate::Zero().IsInfinite()); - EXPECT_FALSE(DataRate::bps(kValue).IsInfinite()); + EXPECT_FALSE(DataRate::BitsPerSec(kValue).IsInfinite()); EXPECT_FALSE(DataRate::Infinity().IsFinite()); - EXPECT_TRUE(DataRate::bps(kValue).IsFinite()); + EXPECT_TRUE(DataRate::BitsPerSec(kValue).IsFinite()); EXPECT_TRUE(DataRate::Zero().IsFinite()); } TEST(DataRateTest, ComparisonOperators) { const int64_t kSmall = 450; const int64_t kLarge = 451; - const DataRate small = DataRate::bps(kSmall); - const DataRate large = DataRate::bps(kLarge); + const DataRate small = DataRate::BitsPerSec(kSmall); + const DataRate large = DataRate::BitsPerSec(kLarge); - EXPECT_EQ(DataRate::Zero(), DataRate::bps(0)); + EXPECT_EQ(DataRate::Zero(), DataRate::BitsPerSec(0)); EXPECT_EQ(DataRate::Infinity(), DataRate::Infinity()); EXPECT_EQ(small, small); EXPECT_LE(small, small); @@ -90,23 +90,23 @@ TEST(DataRateTest, ConvertsToAndFromDouble) { const double kDoubleKbps = kValue * 1e-3; const double kFloatKbps = static_cast(kDoubleKbps); - EXPECT_EQ(DataRate::bps(kValue).bps(), kDoubleValue); - EXPECT_EQ(DataRate::bps(kValue).kbps(), kDoubleKbps); - EXPECT_EQ(DataRate::bps(kValue).kbps(), kFloatKbps); - EXPECT_EQ(DataRate::bps(kDoubleValue).bps(), kValue); - EXPECT_EQ(DataRate::kbps(kDoubleKbps).bps(), kValue); + EXPECT_EQ(DataRate::BitsPerSec(kValue).bps(), kDoubleValue); + EXPECT_EQ(DataRate::BitsPerSec(kValue).kbps(), kDoubleKbps); + EXPECT_EQ(DataRate::BitsPerSec(kValue).kbps(), kFloatKbps); + EXPECT_EQ(DataRate::BitsPerSec(kDoubleValue).bps(), kValue); + EXPECT_EQ(DataRate::KilobitsPerSec(kDoubleKbps).bps(), kValue); const double kInfinity = std::numeric_limits::infinity(); EXPECT_EQ(DataRate::Infinity().bps(), kInfinity); - EXPECT_TRUE(DataRate::bps(kInfinity).IsInfinite()); - EXPECT_TRUE(DataRate::kbps(kInfinity).IsInfinite()); + EXPECT_TRUE(DataRate::BitsPerSec(kInfinity).IsInfinite()); + EXPECT_TRUE(DataRate::KilobitsPerSec(kInfinity).IsInfinite()); } TEST(DataRateTest, Clamping) { - const DataRate upper = DataRate::kbps(800); - const DataRate lower = DataRate::kbps(100); - const DataRate under = DataRate::kbps(100); - const DataRate inside = DataRate::kbps(500); - const DataRate over = DataRate::kbps(1000); + const DataRate upper = DataRate::KilobitsPerSec(800); + const DataRate lower = DataRate::KilobitsPerSec(100); + const DataRate under = DataRate::KilobitsPerSec(100); + const DataRate inside = DataRate::KilobitsPerSec(500); + const DataRate over = DataRate::KilobitsPerSec(1000); EXPECT_EQ(under.Clamped(lower, upper), lower); EXPECT_EQ(inside.Clamped(lower, upper), inside); EXPECT_EQ(over.Clamped(lower, upper), upper); @@ -125,8 +125,8 @@ TEST(DataRateTest, Clamping) { TEST(DataRateTest, MathOperations) { const int64_t kValueA = 450; const int64_t kValueB = 267; - const DataRate rate_a = DataRate::bps(kValueA); - const DataRate rate_b = DataRate::bps(kValueB); + const DataRate rate_a = DataRate::BitsPerSec(kValueA); + const DataRate rate_b = DataRate::BitsPerSec(kValueB); const int32_t kInt32Value = 123; const double kFloatValue = 123.0; @@ -142,7 +142,7 @@ TEST(DataRateTest, MathOperations) { EXPECT_EQ((rate_a / 10).bps(), kValueA / 10); EXPECT_NEAR((rate_a / 0.5).bps(), kValueA * 2, 1); - DataRate mutable_rate = DataRate::bps(kValueA); + DataRate mutable_rate = DataRate::BitsPerSec(kValueA); mutable_rate += rate_b; EXPECT_EQ(mutable_rate.bps(), kValueA + kValueB); mutable_rate -= rate_a; @@ -153,9 +153,9 @@ TEST(UnitConversionTest, DataRateAndDataSizeAndTimeDelta) { const int64_t kSeconds = 5; const int64_t kBitsPerSecond = 440; const int64_t kBytes = 44000; - const TimeDelta delta_a = TimeDelta::seconds(kSeconds); - const DataRate rate_b = DataRate::bps(kBitsPerSecond); - const DataSize size_c = DataSize::bytes(kBytes); + const TimeDelta delta_a = TimeDelta::Seconds(kSeconds); + const DataRate rate_b = DataRate::BitsPerSec(kBitsPerSecond); + const DataSize size_c = DataSize::Bytes(kBytes); EXPECT_EQ((delta_a * rate_b).bytes(), kSeconds * kBitsPerSecond / 8); EXPECT_EQ((rate_b * delta_a).bytes(), kSeconds * kBitsPerSecond / 8); EXPECT_EQ((size_c / delta_a).bps(), kBytes * 8 / kSeconds); @@ -166,29 +166,29 @@ TEST(UnitConversionTest, DataRateAndDataSizeAndFrequency) { const int64_t kHertz = 30; const int64_t kBitsPerSecond = 96000; const int64_t kBytes = 1200; - const Frequency freq_a = Frequency::hertz(kHertz); - const DataRate rate_b = DataRate::bps(kBitsPerSecond); - const DataSize size_c = DataSize::bytes(kBytes); + const Frequency freq_a = Frequency::Hertz(kHertz); + const DataRate rate_b = DataRate::BitsPerSec(kBitsPerSecond); + const DataSize size_c = DataSize::Bytes(kBytes); EXPECT_EQ((freq_a * size_c).bps(), kHertz * kBytes * 8); EXPECT_EQ((size_c * freq_a).bps(), kHertz * kBytes * 8); EXPECT_EQ((rate_b / size_c).hertz(), kBitsPerSecond / kBytes / 8); EXPECT_EQ((rate_b / freq_a).bytes(), kBitsPerSecond / kHertz / 8); } -TEST(UnitConversionTest, DivisionFailsOnLargeSize) { +TEST(UnitConversionDeathTest, DivisionFailsOnLargeSize) { // Note that the failure is expected since the current implementation is // implementated in a way that does not support division of large sizes. If // the implementation is changed, this test can safely be removed. const int64_t kJustSmallEnoughForDivision = std::numeric_limits::max() / 8000000; - const DataSize large_size = DataSize::bytes(kJustSmallEnoughForDivision); - const DataRate data_rate = DataRate::kbps(100); - const TimeDelta time_delta = TimeDelta::ms(100); + const DataSize large_size = DataSize::Bytes(kJustSmallEnoughForDivision); + const DataRate data_rate = DataRate::KilobitsPerSec(100); + const TimeDelta time_delta = TimeDelta::Millis(100); EXPECT_TRUE((large_size / data_rate).IsFinite()); EXPECT_TRUE((large_size / time_delta).IsFinite()); #if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) && RTC_DCHECK_IS_ON const int64_t kToolargeForDivision = kJustSmallEnoughForDivision + 1; - const DataSize too_large_size = DataSize::bytes(kToolargeForDivision); + const DataSize too_large_size = DataSize::Bytes(kToolargeForDivision); EXPECT_DEATH(too_large_size / data_rate, ""); EXPECT_DEATH(too_large_size / time_delta, ""); #endif // GTEST_HAS_DEATH_TEST && !!defined(WEBRTC_ANDROID) && RTC_DCHECK_IS_ON diff --git a/api/units/data_size.h b/api/units/data_size.h index d294016489..27a2a4e4dc 100644 --- a/api/units/data_size.h +++ b/api/units/data_size.h @@ -24,18 +24,15 @@ namespace webrtc { // DataSize is a class represeting a count of bytes. class DataSize final : public rtc_units_impl::RelativeUnit { public: - DataSize() = delete; - static constexpr DataSize Infinity() { return PlusInfinity(); } - template - static constexpr DataSize Bytes() { - return FromValue(bytes); - } - template - static constexpr DataSize bytes(T bytes) { + static constexpr DataSize Bytes(T value) { static_assert(std::is_arithmetic::value, ""); - return FromValue(bytes); + return FromValue(value); } + static constexpr DataSize Infinity() { return PlusInfinity(); } + + DataSize() = delete; + template constexpr T bytes() const { return ToValue(); diff --git a/api/units/data_size_unittest.cc b/api/units/data_size_unittest.cc index fe7f591dc0..eb8d98c1f0 100644 --- a/api/units/data_size_unittest.cc +++ b/api/units/data_size_unittest.cc @@ -9,6 +9,9 @@ */ #include "api/units/data_size.h" + +#include + #include "test/gtest.h" namespace webrtc { @@ -23,7 +26,7 @@ TEST(DataSizeTest, ConstExpr) { static_assert(kDataSizeInf.bytes_or(-1) == -1, ""); static_assert(kDataSizeInf > kDataSizeZero, ""); - constexpr DataSize kDataSize = DataSize::Bytes(); + constexpr DataSize kDataSize = DataSize::Bytes(kValue); static_assert(kDataSize.bytes_or(-1) == kValue, ""); EXPECT_EQ(kDataSize.bytes(), kValue); @@ -31,30 +34,30 @@ TEST(DataSizeTest, ConstExpr) { TEST(DataSizeTest, GetBackSameValues) { const int64_t kValue = 123 * 8; - EXPECT_EQ(DataSize::bytes(kValue).bytes(), kValue); + EXPECT_EQ(DataSize::Bytes(kValue).bytes(), kValue); } TEST(DataSizeTest, IdentityChecks) { const int64_t kValue = 3000; EXPECT_TRUE(DataSize::Zero().IsZero()); - EXPECT_FALSE(DataSize::bytes(kValue).IsZero()); + EXPECT_FALSE(DataSize::Bytes(kValue).IsZero()); EXPECT_TRUE(DataSize::Infinity().IsInfinite()); EXPECT_FALSE(DataSize::Zero().IsInfinite()); - EXPECT_FALSE(DataSize::bytes(kValue).IsInfinite()); + EXPECT_FALSE(DataSize::Bytes(kValue).IsInfinite()); EXPECT_FALSE(DataSize::Infinity().IsFinite()); - EXPECT_TRUE(DataSize::bytes(kValue).IsFinite()); + EXPECT_TRUE(DataSize::Bytes(kValue).IsFinite()); EXPECT_TRUE(DataSize::Zero().IsFinite()); } TEST(DataSizeTest, ComparisonOperators) { const int64_t kSmall = 450; const int64_t kLarge = 451; - const DataSize small = DataSize::bytes(kSmall); - const DataSize large = DataSize::bytes(kLarge); + const DataSize small = DataSize::Bytes(kSmall); + const DataSize large = DataSize::Bytes(kLarge); - EXPECT_EQ(DataSize::Zero(), DataSize::bytes(0)); + EXPECT_EQ(DataSize::Zero(), DataSize::Bytes(0)); EXPECT_EQ(DataSize::Infinity(), DataSize::Infinity()); EXPECT_EQ(small, small); EXPECT_LE(small, small); @@ -72,19 +75,19 @@ TEST(DataSizeTest, ConvertsToAndFromDouble) { const int64_t kValue = 128; const double kDoubleValue = static_cast(kValue); - EXPECT_EQ(DataSize::bytes(kValue).bytes(), kDoubleValue); - EXPECT_EQ(DataSize::bytes(kDoubleValue).bytes(), kValue); + EXPECT_EQ(DataSize::Bytes(kValue).bytes(), kDoubleValue); + EXPECT_EQ(DataSize::Bytes(kDoubleValue).bytes(), kValue); const double kInfinity = std::numeric_limits::infinity(); EXPECT_EQ(DataSize::Infinity().bytes(), kInfinity); - EXPECT_TRUE(DataSize::bytes(kInfinity).IsInfinite()); + EXPECT_TRUE(DataSize::Bytes(kInfinity).IsInfinite()); } TEST(DataSizeTest, MathOperations) { const int64_t kValueA = 450; const int64_t kValueB = 267; - const DataSize size_a = DataSize::bytes(kValueA); - const DataSize size_b = DataSize::bytes(kValueB); + const DataSize size_a = DataSize::Bytes(kValueA); + const DataSize size_b = DataSize::Bytes(kValueB); EXPECT_EQ((size_a + size_b).bytes(), kValueA + kValueB); EXPECT_EQ((size_a - size_b).bytes(), kValueA - kValueB); @@ -97,7 +100,7 @@ TEST(DataSizeTest, MathOperations) { EXPECT_EQ((size_a / 10).bytes(), kValueA / 10); EXPECT_EQ(size_a / size_b, static_cast(kValueA) / kValueB); - DataSize mutable_size = DataSize::bytes(kValueA); + DataSize mutable_size = DataSize::Bytes(kValueA); mutable_size += size_b; EXPECT_EQ(mutable_size.bytes(), kValueA + kValueB); mutable_size -= size_a; diff --git a/api/units/frequency.h b/api/units/frequency.h index 53bd7c902f..88912c64d5 100644 --- a/api/units/frequency.h +++ b/api/units/frequency.h @@ -26,26 +26,24 @@ namespace webrtc { class Frequency final : public rtc_units_impl::RelativeUnit { public: - Frequency() = delete; - template - static constexpr Frequency Hertz() { - return FromFraction(1000, hertz); - } template - static constexpr Frequency kHz(T hertz) { + static constexpr Frequency MilliHertz(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1000000, hertz); + return FromValue(value); } template - static constexpr Frequency hertz(T hertz) { + static constexpr Frequency Hertz(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1000, hertz); + return FromFraction(1'000, value); } template - static constexpr Frequency millihertz(T hertz) { + static constexpr Frequency KiloHertz(T value) { static_assert(std::is_arithmetic::value, ""); - return FromValue(hertz); + return FromFraction(1'000'000, value); } + + Frequency() = delete; + template constexpr T hertz() const { return ToFraction<1000, T>(); @@ -67,7 +65,7 @@ inline constexpr Frequency operator/(int64_t nominator, RTC_DCHECK_LE(nominator, std::numeric_limits::max() / kKiloPerMicro); RTC_CHECK(interval.IsFinite()); RTC_CHECK(!interval.IsZero()); - return Frequency::millihertz(nominator * kKiloPerMicro / interval.us()); + return Frequency::MilliHertz(nominator * kKiloPerMicro / interval.us()); } inline constexpr TimeDelta operator/(int64_t nominator, @@ -76,7 +74,7 @@ inline constexpr TimeDelta operator/(int64_t nominator, RTC_DCHECK_LE(nominator, std::numeric_limits::max() / kMegaPerMilli); RTC_CHECK(frequency.IsFinite()); RTC_CHECK(!frequency.IsZero()); - return TimeDelta::us(nominator * kMegaPerMilli / frequency.millihertz()); + return TimeDelta::Micros(nominator * kMegaPerMilli / frequency.millihertz()); } inline constexpr double operator*(Frequency frequency, TimeDelta time_delta) { diff --git a/api/units/frequency_unittest.cc b/api/units/frequency_unittest.cc index 16a7e1bb85..1260c2107d 100644 --- a/api/units/frequency_unittest.cc +++ b/api/units/frequency_unittest.cc @@ -28,29 +28,30 @@ TEST(FrequencyTest, ConstExpr) { TEST(FrequencyTest, GetBackSameValues) { const int64_t kValue = 31; - EXPECT_EQ(Frequency::hertz(kValue).hertz(), kValue); + EXPECT_EQ(Frequency::Hertz(kValue).hertz(), kValue); EXPECT_EQ(Frequency::Zero().hertz(), 0); } TEST(FrequencyTest, GetDifferentPrefix) { const int64_t kValue = 30000; - EXPECT_EQ(Frequency::millihertz(kValue).hertz(), kValue / 1000); - EXPECT_EQ(Frequency::hertz(kValue).millihertz(), kValue * 1000); + EXPECT_EQ(Frequency::MilliHertz(kValue).hertz(), kValue / 1000); + EXPECT_EQ(Frequency::Hertz(kValue).millihertz(), kValue * 1000); + EXPECT_EQ(Frequency::KiloHertz(kValue).hertz(), kValue * 1000); } TEST(FrequencyTest, IdentityChecks) { const int64_t kValue = 31; EXPECT_TRUE(Frequency::Zero().IsZero()); - EXPECT_FALSE(Frequency::hertz(kValue).IsZero()); + EXPECT_FALSE(Frequency::Hertz(kValue).IsZero()); EXPECT_TRUE(Frequency::PlusInfinity().IsInfinite()); EXPECT_TRUE(Frequency::MinusInfinity().IsInfinite()); EXPECT_FALSE(Frequency::Zero().IsInfinite()); - EXPECT_FALSE(Frequency::hertz(kValue).IsInfinite()); + EXPECT_FALSE(Frequency::Hertz(kValue).IsInfinite()); EXPECT_FALSE(Frequency::PlusInfinity().IsFinite()); EXPECT_FALSE(Frequency::MinusInfinity().IsFinite()); - EXPECT_TRUE(Frequency::hertz(kValue).IsFinite()); + EXPECT_TRUE(Frequency::Hertz(kValue).IsFinite()); EXPECT_TRUE(Frequency::Zero().IsFinite()); EXPECT_TRUE(Frequency::PlusInfinity().IsPlusInfinity()); @@ -63,19 +64,19 @@ TEST(FrequencyTest, IdentityChecks) { TEST(FrequencyTest, ComparisonOperators) { const int64_t kSmall = 42; const int64_t kLarge = 45; - const Frequency small = Frequency::hertz(kSmall); - const Frequency large = Frequency::hertz(kLarge); + const Frequency small = Frequency::Hertz(kSmall); + const Frequency large = Frequency::Hertz(kLarge); - EXPECT_EQ(Frequency::Zero(), Frequency::hertz(0)); + EXPECT_EQ(Frequency::Zero(), Frequency::Hertz(0)); EXPECT_EQ(Frequency::PlusInfinity(), Frequency::PlusInfinity()); - EXPECT_EQ(small, Frequency::hertz(kSmall)); - EXPECT_LE(small, Frequency::hertz(kSmall)); - EXPECT_GE(small, Frequency::hertz(kSmall)); - EXPECT_NE(small, Frequency::hertz(kLarge)); - EXPECT_LE(small, Frequency::hertz(kLarge)); - EXPECT_LT(small, Frequency::hertz(kLarge)); - EXPECT_GE(large, Frequency::hertz(kSmall)); - EXPECT_GT(large, Frequency::hertz(kSmall)); + EXPECT_EQ(small, Frequency::Hertz(kSmall)); + EXPECT_LE(small, Frequency::Hertz(kSmall)); + EXPECT_GE(small, Frequency::Hertz(kSmall)); + EXPECT_NE(small, Frequency::Hertz(kLarge)); + EXPECT_LE(small, Frequency::Hertz(kLarge)); + EXPECT_LT(small, Frequency::Hertz(kLarge)); + EXPECT_GE(large, Frequency::Hertz(kSmall)); + EXPECT_GT(large, Frequency::Hertz(kSmall)); EXPECT_LT(Frequency::Zero(), small); EXPECT_GT(Frequency::PlusInfinity(), large); @@ -83,11 +84,11 @@ TEST(FrequencyTest, ComparisonOperators) { } TEST(FrequencyTest, Clamping) { - const Frequency upper = Frequency::hertz(800); - const Frequency lower = Frequency::hertz(100); - const Frequency under = Frequency::hertz(100); - const Frequency inside = Frequency::hertz(500); - const Frequency over = Frequency::hertz(1000); + const Frequency upper = Frequency::Hertz(800); + const Frequency lower = Frequency::Hertz(100); + const Frequency under = Frequency::Hertz(100); + const Frequency inside = Frequency::Hertz(500); + const Frequency over = Frequency::Hertz(1000); EXPECT_EQ(under.Clamped(lower, upper), lower); EXPECT_EQ(inside.Clamped(lower, upper), inside); EXPECT_EQ(over.Clamped(lower, upper), upper); @@ -106,40 +107,40 @@ TEST(FrequencyTest, Clamping) { TEST(FrequencyTest, MathOperations) { const int64_t kValueA = 457; const int64_t kValueB = 260; - const Frequency frequency_a = Frequency::hertz(kValueA); - const Frequency frequency_b = Frequency::hertz(kValueB); + const Frequency frequency_a = Frequency::Hertz(kValueA); + const Frequency frequency_b = Frequency::Hertz(kValueB); EXPECT_EQ((frequency_a + frequency_b).hertz(), kValueA + kValueB); EXPECT_EQ((frequency_a - frequency_b).hertz(), kValueA - kValueB); - EXPECT_EQ((Frequency::hertz(kValueA) * kValueB).hertz(), + EXPECT_EQ((Frequency::Hertz(kValueA) * kValueB).hertz(), kValueA * kValueB); EXPECT_EQ((frequency_b / 10).hertz(), kValueB / 10); EXPECT_EQ(frequency_b / frequency_a, static_cast(kValueB) / kValueA); - Frequency mutable_frequency = Frequency::hertz(kValueA); - mutable_frequency += Frequency::hertz(kValueB); - EXPECT_EQ(mutable_frequency, Frequency::hertz(kValueA + kValueB)); - mutable_frequency -= Frequency::hertz(kValueB); - EXPECT_EQ(mutable_frequency, Frequency::hertz(kValueA)); + Frequency mutable_frequency = Frequency::Hertz(kValueA); + mutable_frequency += Frequency::Hertz(kValueB); + EXPECT_EQ(mutable_frequency, Frequency::Hertz(kValueA + kValueB)); + mutable_frequency -= Frequency::Hertz(kValueB); + EXPECT_EQ(mutable_frequency, Frequency::Hertz(kValueA)); } TEST(FrequencyTest, Rounding) { - const Frequency freq_high = Frequency::hertz(23.976); + const Frequency freq_high = Frequency::Hertz(23.976); EXPECT_EQ(freq_high.hertz(), 24); - EXPECT_EQ(freq_high.RoundDownTo(Frequency::hertz(1)), Frequency::hertz(23)); - EXPECT_EQ(freq_high.RoundTo(Frequency::hertz(1)), Frequency::hertz(24)); - EXPECT_EQ(freq_high.RoundUpTo(Frequency::hertz(1)), Frequency::hertz(24)); + EXPECT_EQ(freq_high.RoundDownTo(Frequency::Hertz(1)), Frequency::Hertz(23)); + EXPECT_EQ(freq_high.RoundTo(Frequency::Hertz(1)), Frequency::Hertz(24)); + EXPECT_EQ(freq_high.RoundUpTo(Frequency::Hertz(1)), Frequency::Hertz(24)); - const Frequency freq_low = Frequency::hertz(23.4); + const Frequency freq_low = Frequency::Hertz(23.4); EXPECT_EQ(freq_low.hertz(), 23); - EXPECT_EQ(freq_low.RoundDownTo(Frequency::hertz(1)), Frequency::hertz(23)); - EXPECT_EQ(freq_low.RoundTo(Frequency::hertz(1)), Frequency::hertz(23)); - EXPECT_EQ(freq_low.RoundUpTo(Frequency::hertz(1)), Frequency::hertz(24)); + EXPECT_EQ(freq_low.RoundDownTo(Frequency::Hertz(1)), Frequency::Hertz(23)); + EXPECT_EQ(freq_low.RoundTo(Frequency::Hertz(1)), Frequency::Hertz(23)); + EXPECT_EQ(freq_low.RoundUpTo(Frequency::Hertz(1)), Frequency::Hertz(24)); } TEST(FrequencyTest, InfinityOperations) { const double kValue = 267; - const Frequency finite = Frequency::hertz(kValue); + const Frequency finite = Frequency::Hertz(kValue); EXPECT_TRUE((Frequency::PlusInfinity() + finite).IsPlusInfinity()); EXPECT_TRUE((Frequency::PlusInfinity() - finite).IsPlusInfinity()); EXPECT_TRUE((finite + Frequency::PlusInfinity()).IsPlusInfinity()); @@ -152,9 +153,9 @@ TEST(FrequencyTest, InfinityOperations) { } TEST(UnitConversionTest, TimeDeltaAndFrequency) { - EXPECT_EQ(1 / Frequency::hertz(50), TimeDelta::ms(20)); - EXPECT_EQ(1 / TimeDelta::ms(20), Frequency::hertz(50)); - EXPECT_EQ(Frequency::kHz(200) * TimeDelta::ms(2), 400.0); + EXPECT_EQ(1 / Frequency::Hertz(50), TimeDelta::Millis(20)); + EXPECT_EQ(1 / TimeDelta::Millis(20), Frequency::Hertz(50)); + EXPECT_EQ(Frequency::KiloHertz(200) * TimeDelta::Millis(2), 400.0); } } // namespace test } // namespace webrtc diff --git a/api/units/time_delta.h b/api/units/time_delta.h index 030974f8c2..173affcc56 100644 --- a/api/units/time_delta.h +++ b/api/units/time_delta.h @@ -32,34 +32,24 @@ namespace webrtc { // microseconds (us). class TimeDelta final : public rtc_units_impl::RelativeUnit { public: - TimeDelta() = delete; - template - static constexpr TimeDelta Seconds() { - return FromFraction(1'000'000, seconds); - } - template - static constexpr TimeDelta Millis() { - return FromFraction(1000, ms); - } - template - static constexpr TimeDelta Micros() { - return FromValue(us); - } template - static constexpr TimeDelta seconds(T seconds) { + static constexpr TimeDelta Seconds(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1'000'000, seconds); + return FromFraction(1'000'000, value); } template - static constexpr TimeDelta ms(T milliseconds) { + static constexpr TimeDelta Millis(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1000, milliseconds); + return FromFraction(1'000, value); } template - static constexpr TimeDelta us(T microseconds) { + static constexpr TimeDelta Micros(T value) { static_assert(std::is_arithmetic::value, ""); - return FromValue(microseconds); + return FromValue(value); } + + TimeDelta() = delete; + template constexpr T seconds() const { return ToFraction<1000000, T>(); @@ -88,7 +78,7 @@ class TimeDelta final : public rtc_units_impl::RelativeUnit { } constexpr TimeDelta Abs() const { - return us() < 0 ? TimeDelta::us(-us()) : *this; + return us() < 0 ? TimeDelta::Micros(-us()) : *this; } private: diff --git a/api/units/time_delta_unittest.cc b/api/units/time_delta_unittest.cc index a46ba835cb..cb43860531 100644 --- a/api/units/time_delta_unittest.cc +++ b/api/units/time_delta_unittest.cc @@ -28,9 +28,9 @@ TEST(TimeDeltaTest, ConstExpr) { static_assert(kTimeDeltaPlusInf > kTimeDeltaZero, ""); - constexpr TimeDelta kTimeDeltaSeconds = TimeDelta::Seconds(); - constexpr TimeDelta kTimeDeltaMs = TimeDelta::Millis(); - constexpr TimeDelta kTimeDeltaUs = TimeDelta::Micros(); + constexpr TimeDelta kTimeDeltaSeconds = TimeDelta::Seconds(kValue); + constexpr TimeDelta kTimeDeltaMs = TimeDelta::Millis(kValue); + constexpr TimeDelta kTimeDeltaUs = TimeDelta::Micros(kValue); static_assert(kTimeDeltaSeconds.seconds_or(0) == kValue, ""); static_assert(kTimeDeltaMs.ms_or(0) == kValue, ""); @@ -41,40 +41,40 @@ TEST(TimeDeltaTest, GetBackSameValues) { const int64_t kValue = 499; for (int sign = -1; sign <= 1; ++sign) { int64_t value = kValue * sign; - EXPECT_EQ(TimeDelta::ms(value).ms(), value); - EXPECT_EQ(TimeDelta::us(value).us(), value); - EXPECT_EQ(TimeDelta::seconds(value).seconds(), value); - EXPECT_EQ(TimeDelta::seconds(value).seconds(), value); + EXPECT_EQ(TimeDelta::Millis(value).ms(), value); + EXPECT_EQ(TimeDelta::Micros(value).us(), value); + EXPECT_EQ(TimeDelta::Seconds(value).seconds(), value); + EXPECT_EQ(TimeDelta::Seconds(value).seconds(), value); } EXPECT_EQ(TimeDelta::Zero().us(), 0); } TEST(TimeDeltaTest, GetDifferentPrefix) { const int64_t kValue = 3000000; - EXPECT_EQ(TimeDelta::us(kValue).seconds(), kValue / 1000000); - EXPECT_EQ(TimeDelta::ms(kValue).seconds(), kValue / 1000); - EXPECT_EQ(TimeDelta::us(kValue).ms(), kValue / 1000); + EXPECT_EQ(TimeDelta::Micros(kValue).seconds(), kValue / 1000000); + EXPECT_EQ(TimeDelta::Millis(kValue).seconds(), kValue / 1000); + EXPECT_EQ(TimeDelta::Micros(kValue).ms(), kValue / 1000); - EXPECT_EQ(TimeDelta::ms(kValue).us(), kValue * 1000); - EXPECT_EQ(TimeDelta::seconds(kValue).ms(), kValue * 1000); - EXPECT_EQ(TimeDelta::seconds(kValue).us(), kValue * 1000000); + EXPECT_EQ(TimeDelta::Millis(kValue).us(), kValue * 1000); + EXPECT_EQ(TimeDelta::Seconds(kValue).ms(), kValue * 1000); + EXPECT_EQ(TimeDelta::Seconds(kValue).us(), kValue * 1000000); } TEST(TimeDeltaTest, IdentityChecks) { const int64_t kValue = 3000; EXPECT_TRUE(TimeDelta::Zero().IsZero()); - EXPECT_FALSE(TimeDelta::ms(kValue).IsZero()); + EXPECT_FALSE(TimeDelta::Millis(kValue).IsZero()); EXPECT_TRUE(TimeDelta::PlusInfinity().IsInfinite()); EXPECT_TRUE(TimeDelta::MinusInfinity().IsInfinite()); EXPECT_FALSE(TimeDelta::Zero().IsInfinite()); - EXPECT_FALSE(TimeDelta::ms(-kValue).IsInfinite()); - EXPECT_FALSE(TimeDelta::ms(kValue).IsInfinite()); + EXPECT_FALSE(TimeDelta::Millis(-kValue).IsInfinite()); + EXPECT_FALSE(TimeDelta::Millis(kValue).IsInfinite()); EXPECT_FALSE(TimeDelta::PlusInfinity().IsFinite()); EXPECT_FALSE(TimeDelta::MinusInfinity().IsFinite()); - EXPECT_TRUE(TimeDelta::ms(-kValue).IsFinite()); - EXPECT_TRUE(TimeDelta::ms(kValue).IsFinite()); + EXPECT_TRUE(TimeDelta::Millis(-kValue).IsFinite()); + EXPECT_TRUE(TimeDelta::Millis(kValue).IsFinite()); EXPECT_TRUE(TimeDelta::Zero().IsFinite()); EXPECT_TRUE(TimeDelta::PlusInfinity().IsPlusInfinity()); @@ -87,33 +87,33 @@ TEST(TimeDeltaTest, IdentityChecks) { TEST(TimeDeltaTest, ComparisonOperators) { const int64_t kSmall = 450; const int64_t kLarge = 451; - const TimeDelta small = TimeDelta::ms(kSmall); - const TimeDelta large = TimeDelta::ms(kLarge); + const TimeDelta small = TimeDelta::Millis(kSmall); + const TimeDelta large = TimeDelta::Millis(kLarge); - EXPECT_EQ(TimeDelta::Zero(), TimeDelta::ms(0)); + EXPECT_EQ(TimeDelta::Zero(), TimeDelta::Millis(0)); EXPECT_EQ(TimeDelta::PlusInfinity(), TimeDelta::PlusInfinity()); - EXPECT_EQ(small, TimeDelta::ms(kSmall)); - EXPECT_LE(small, TimeDelta::ms(kSmall)); - EXPECT_GE(small, TimeDelta::ms(kSmall)); - EXPECT_NE(small, TimeDelta::ms(kLarge)); - EXPECT_LE(small, TimeDelta::ms(kLarge)); - EXPECT_LT(small, TimeDelta::ms(kLarge)); - EXPECT_GE(large, TimeDelta::ms(kSmall)); - EXPECT_GT(large, TimeDelta::ms(kSmall)); + EXPECT_EQ(small, TimeDelta::Millis(kSmall)); + EXPECT_LE(small, TimeDelta::Millis(kSmall)); + EXPECT_GE(small, TimeDelta::Millis(kSmall)); + EXPECT_NE(small, TimeDelta::Millis(kLarge)); + EXPECT_LE(small, TimeDelta::Millis(kLarge)); + EXPECT_LT(small, TimeDelta::Millis(kLarge)); + EXPECT_GE(large, TimeDelta::Millis(kSmall)); + EXPECT_GT(large, TimeDelta::Millis(kSmall)); EXPECT_LT(TimeDelta::Zero(), small); - EXPECT_GT(TimeDelta::Zero(), TimeDelta::ms(-kSmall)); - EXPECT_GT(TimeDelta::Zero(), TimeDelta::ms(-kSmall)); + EXPECT_GT(TimeDelta::Zero(), TimeDelta::Millis(-kSmall)); + EXPECT_GT(TimeDelta::Zero(), TimeDelta::Millis(-kSmall)); EXPECT_GT(TimeDelta::PlusInfinity(), large); EXPECT_LT(TimeDelta::MinusInfinity(), TimeDelta::Zero()); } TEST(TimeDeltaTest, Clamping) { - const TimeDelta upper = TimeDelta::ms(800); - const TimeDelta lower = TimeDelta::ms(100); - const TimeDelta under = TimeDelta::ms(100); - const TimeDelta inside = TimeDelta::ms(500); - const TimeDelta over = TimeDelta::ms(1000); + const TimeDelta upper = TimeDelta::Millis(800); + const TimeDelta lower = TimeDelta::Millis(100); + const TimeDelta under = TimeDelta::Millis(100); + const TimeDelta inside = TimeDelta::Millis(500); + const TimeDelta over = TimeDelta::Millis(1000); EXPECT_EQ(under.Clamped(lower, upper), lower); EXPECT_EQ(inside.Clamped(lower, upper), inside); EXPECT_EQ(over.Clamped(lower, upper), upper); @@ -131,9 +131,10 @@ TEST(TimeDeltaTest, Clamping) { TEST(TimeDeltaTest, CanBeInititializedFromLargeInt) { const int kMaxInt = std::numeric_limits::max(); - EXPECT_EQ(TimeDelta::seconds(kMaxInt).us(), + EXPECT_EQ(TimeDelta::Seconds(kMaxInt).us(), static_cast(kMaxInt) * 1000000); - EXPECT_EQ(TimeDelta::ms(kMaxInt).us(), static_cast(kMaxInt) * 1000); + EXPECT_EQ(TimeDelta::Millis(kMaxInt).us(), + static_cast(kMaxInt) * 1000); } TEST(TimeDeltaTest, ConvertsToAndFromDouble) { @@ -143,16 +144,16 @@ TEST(TimeDeltaTest, ConvertsToAndFromDouble) { const double kMillisDouble = kMicros * 1e-3; const double kSecondsDouble = kMillisDouble * 1e-3; - EXPECT_EQ(TimeDelta::us(kMicros).seconds(), kSecondsDouble); - EXPECT_EQ(TimeDelta::seconds(kSecondsDouble).us(), kMicros); + EXPECT_EQ(TimeDelta::Micros(kMicros).seconds(), kSecondsDouble); + EXPECT_EQ(TimeDelta::Seconds(kSecondsDouble).us(), kMicros); - EXPECT_EQ(TimeDelta::us(kMicros).ms(), kMillisDouble); - EXPECT_EQ(TimeDelta::ms(kMillisDouble).us(), kMicros); + EXPECT_EQ(TimeDelta::Micros(kMicros).ms(), kMillisDouble); + EXPECT_EQ(TimeDelta::Millis(kMillisDouble).us(), kMicros); - EXPECT_EQ(TimeDelta::us(kMicros).us(), kMicrosDouble); - EXPECT_EQ(TimeDelta::us(kMicrosDouble).us(), kMicros); + EXPECT_EQ(TimeDelta::Micros(kMicros).us(), kMicrosDouble); + EXPECT_EQ(TimeDelta::Micros(kMicrosDouble).us(), kMicros); - EXPECT_NEAR(TimeDelta::us(kMicros).ns(), kNanosDouble, 1); + EXPECT_NEAR(TimeDelta::Micros(kMicros).ns(), kNanosDouble, 1); const double kPlusInfinity = std::numeric_limits::infinity(); const double kMinusInfinity = -kPlusInfinity; @@ -166,44 +167,46 @@ TEST(TimeDeltaTest, ConvertsToAndFromDouble) { EXPECT_EQ(TimeDelta::PlusInfinity().ns(), kPlusInfinity); EXPECT_EQ(TimeDelta::MinusInfinity().ns(), kMinusInfinity); - EXPECT_TRUE(TimeDelta::seconds(kPlusInfinity).IsPlusInfinity()); - EXPECT_TRUE(TimeDelta::seconds(kMinusInfinity).IsMinusInfinity()); - EXPECT_TRUE(TimeDelta::ms(kPlusInfinity).IsPlusInfinity()); - EXPECT_TRUE(TimeDelta::ms(kMinusInfinity).IsMinusInfinity()); - EXPECT_TRUE(TimeDelta::us(kPlusInfinity).IsPlusInfinity()); - EXPECT_TRUE(TimeDelta::us(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(TimeDelta::Seconds(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(TimeDelta::Seconds(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(TimeDelta::Millis(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(TimeDelta::Millis(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(TimeDelta::Micros(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(TimeDelta::Micros(kMinusInfinity).IsMinusInfinity()); } TEST(TimeDeltaTest, MathOperations) { const int64_t kValueA = 267; const int64_t kValueB = 450; - const TimeDelta delta_a = TimeDelta::ms(kValueA); - const TimeDelta delta_b = TimeDelta::ms(kValueB); + const TimeDelta delta_a = TimeDelta::Millis(kValueA); + const TimeDelta delta_b = TimeDelta::Millis(kValueB); EXPECT_EQ((delta_a + delta_b).ms(), kValueA + kValueB); EXPECT_EQ((delta_a - delta_b).ms(), kValueA - kValueB); const int32_t kInt32Value = 123; const double kFloatValue = 123.0; - EXPECT_EQ((TimeDelta::us(kValueA) * kValueB).us(), kValueA * kValueB); - EXPECT_EQ((TimeDelta::us(kValueA) * kInt32Value).us(), kValueA * kInt32Value); - EXPECT_EQ((TimeDelta::us(kValueA) * kFloatValue).us(), kValueA * kFloatValue); + EXPECT_EQ((TimeDelta::Micros(kValueA) * kValueB).us(), kValueA * kValueB); + EXPECT_EQ((TimeDelta::Micros(kValueA) * kInt32Value).us(), + kValueA * kInt32Value); + EXPECT_EQ((TimeDelta::Micros(kValueA) * kFloatValue).us(), + kValueA * kFloatValue); EXPECT_EQ((delta_b / 10).ms(), kValueB / 10); EXPECT_EQ(delta_b / delta_a, static_cast(kValueB) / kValueA); - EXPECT_EQ(TimeDelta::us(-kValueA).Abs().us(), kValueA); - EXPECT_EQ(TimeDelta::us(kValueA).Abs().us(), kValueA); + EXPECT_EQ(TimeDelta::Micros(-kValueA).Abs().us(), kValueA); + EXPECT_EQ(TimeDelta::Micros(kValueA).Abs().us(), kValueA); - TimeDelta mutable_delta = TimeDelta::ms(kValueA); - mutable_delta += TimeDelta::ms(kValueB); - EXPECT_EQ(mutable_delta, TimeDelta::ms(kValueA + kValueB)); - mutable_delta -= TimeDelta::ms(kValueB); - EXPECT_EQ(mutable_delta, TimeDelta::ms(kValueA)); + TimeDelta mutable_delta = TimeDelta::Millis(kValueA); + mutable_delta += TimeDelta::Millis(kValueB); + EXPECT_EQ(mutable_delta, TimeDelta::Millis(kValueA + kValueB)); + mutable_delta -= TimeDelta::Millis(kValueB); + EXPECT_EQ(mutable_delta, TimeDelta::Millis(kValueA)); } TEST(TimeDeltaTest, InfinityOperations) { const int64_t kValue = 267; - const TimeDelta finite = TimeDelta::ms(kValue); + const TimeDelta finite = TimeDelta::Millis(kValue); EXPECT_TRUE((TimeDelta::PlusInfinity() + finite).IsPlusInfinity()); EXPECT_TRUE((TimeDelta::PlusInfinity() - finite).IsPlusInfinity()); EXPECT_TRUE((finite + TimeDelta::PlusInfinity()).IsPlusInfinity()); diff --git a/api/units/timestamp.h b/api/units/timestamp.h index f9ed408a22..f83477e808 100644 --- a/api/units/timestamp.h +++ b/api/units/timestamp.h @@ -28,36 +28,24 @@ namespace webrtc { // difference of two Timestamps results in a TimeDelta. class Timestamp final : public rtc_units_impl::UnitBase { public: - Timestamp() = delete; - - template - static constexpr Timestamp Seconds() { - return FromFraction(1'000'000, seconds); - } - template - static constexpr Timestamp Millis() { - return FromFraction(1000, ms); - } - template - static constexpr Timestamp Micros() { - return FromValue(us); - } - template - static constexpr Timestamp seconds(T seconds) { + static constexpr Timestamp Seconds(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1'000'000, seconds); + return FromFraction(1'000'000, value); } template - static constexpr Timestamp ms(T milliseconds) { + static constexpr Timestamp Millis(T value) { static_assert(std::is_arithmetic::value, ""); - return FromFraction(1000, milliseconds); + return FromFraction(1'000, value); } template - static constexpr Timestamp us(T microseconds) { + static constexpr Timestamp Micros(T value) { static_assert(std::is_arithmetic::value, ""); - return FromValue(microseconds); + return FromValue(value); } + + Timestamp() = delete; + template constexpr T seconds() const { return ToFraction<1000000, T>(); @@ -91,7 +79,7 @@ class Timestamp final : public rtc_units_impl::UnitBase { RTC_DCHECK(!delta.IsPlusInfinity()); return MinusInfinity(); } - return Timestamp::us(us() + delta.us()); + return Timestamp::Micros(us() + delta.us()); } constexpr Timestamp operator-(const TimeDelta delta) const { if (IsPlusInfinity() || delta.IsMinusInfinity()) { @@ -103,7 +91,7 @@ class Timestamp final : public rtc_units_impl::UnitBase { RTC_DCHECK(!delta.IsMinusInfinity()); return MinusInfinity(); } - return Timestamp::us(us() - delta.us()); + return Timestamp::Micros(us() - delta.us()); } constexpr TimeDelta operator-(const Timestamp other) const { if (IsPlusInfinity() || other.IsMinusInfinity()) { @@ -115,7 +103,7 @@ class Timestamp final : public rtc_units_impl::UnitBase { RTC_DCHECK(!other.IsMinusInfinity()); return TimeDelta::MinusInfinity(); } - return TimeDelta::us(us() - other.us()); + return TimeDelta::Micros(us() - other.us()); } constexpr Timestamp& operator-=(const TimeDelta delta) { *this = *this - delta; diff --git a/api/units/timestamp_unittest.cc b/api/units/timestamp_unittest.cc index 6c2d1eea82..43b2985d43 100644 --- a/api/units/timestamp_unittest.cc +++ b/api/units/timestamp_unittest.cc @@ -8,6 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include + #include "api/units/timestamp.h" #include "test/gtest.h" @@ -19,9 +21,9 @@ TEST(TimestampTest, ConstExpr) { static_assert(kTimestampInf.IsInfinite(), ""); static_assert(kTimestampInf.ms_or(-1) == -1, ""); - constexpr Timestamp kTimestampSeconds = Timestamp::Seconds(); - constexpr Timestamp kTimestampMs = Timestamp::Millis(); - constexpr Timestamp kTimestampUs = Timestamp::Micros(); + constexpr Timestamp kTimestampSeconds = Timestamp::Seconds(kValue); + constexpr Timestamp kTimestampMs = Timestamp::Millis(kValue); + constexpr Timestamp kTimestampUs = Timestamp::Micros(kValue); static_assert(kTimestampSeconds.seconds_or(0) == kValue, ""); static_assert(kTimestampMs.ms_or(0) == kValue, ""); @@ -36,20 +38,20 @@ TEST(TimestampTest, ConstExpr) { TEST(TimestampTest, GetBackSameValues) { const int64_t kValue = 499; - EXPECT_EQ(Timestamp::ms(kValue).ms(), kValue); - EXPECT_EQ(Timestamp::us(kValue).us(), kValue); - EXPECT_EQ(Timestamp::seconds(kValue).seconds(), kValue); + EXPECT_EQ(Timestamp::Millis(kValue).ms(), kValue); + EXPECT_EQ(Timestamp::Micros(kValue).us(), kValue); + EXPECT_EQ(Timestamp::Seconds(kValue).seconds(), kValue); } TEST(TimestampTest, GetDifferentPrefix) { const int64_t kValue = 3000000; - EXPECT_EQ(Timestamp::us(kValue).seconds(), kValue / 1000000); - EXPECT_EQ(Timestamp::ms(kValue).seconds(), kValue / 1000); - EXPECT_EQ(Timestamp::us(kValue).ms(), kValue / 1000); + EXPECT_EQ(Timestamp::Micros(kValue).seconds(), kValue / 1000000); + EXPECT_EQ(Timestamp::Millis(kValue).seconds(), kValue / 1000); + EXPECT_EQ(Timestamp::Micros(kValue).ms(), kValue / 1000); - EXPECT_EQ(Timestamp::ms(kValue).us(), kValue * 1000); - EXPECT_EQ(Timestamp::seconds(kValue).ms(), kValue * 1000); - EXPECT_EQ(Timestamp::seconds(kValue).us(), kValue * 1000000); + EXPECT_EQ(Timestamp::Millis(kValue).us(), kValue * 1000); + EXPECT_EQ(Timestamp::Seconds(kValue).ms(), kValue * 1000); + EXPECT_EQ(Timestamp::Seconds(kValue).us(), kValue * 1000000); } TEST(TimestampTest, IdentityChecks) { @@ -57,11 +59,11 @@ TEST(TimestampTest, IdentityChecks) { EXPECT_TRUE(Timestamp::PlusInfinity().IsInfinite()); EXPECT_TRUE(Timestamp::MinusInfinity().IsInfinite()); - EXPECT_FALSE(Timestamp::ms(kValue).IsInfinite()); + EXPECT_FALSE(Timestamp::Millis(kValue).IsInfinite()); EXPECT_FALSE(Timestamp::PlusInfinity().IsFinite()); EXPECT_FALSE(Timestamp::MinusInfinity().IsFinite()); - EXPECT_TRUE(Timestamp::ms(kValue).IsFinite()); + EXPECT_TRUE(Timestamp::Millis(kValue).IsFinite()); EXPECT_TRUE(Timestamp::PlusInfinity().IsPlusInfinity()); EXPECT_FALSE(Timestamp::MinusInfinity().IsPlusInfinity()); @@ -76,22 +78,23 @@ TEST(TimestampTest, ComparisonOperators) { EXPECT_EQ(Timestamp::PlusInfinity(), Timestamp::PlusInfinity()); EXPECT_GE(Timestamp::PlusInfinity(), Timestamp::PlusInfinity()); - EXPECT_GT(Timestamp::PlusInfinity(), Timestamp::ms(kLarge)); - EXPECT_EQ(Timestamp::ms(kSmall), Timestamp::ms(kSmall)); - EXPECT_LE(Timestamp::ms(kSmall), Timestamp::ms(kSmall)); - EXPECT_GE(Timestamp::ms(kSmall), Timestamp::ms(kSmall)); - EXPECT_NE(Timestamp::ms(kSmall), Timestamp::ms(kLarge)); - EXPECT_LE(Timestamp::ms(kSmall), Timestamp::ms(kLarge)); - EXPECT_LT(Timestamp::ms(kSmall), Timestamp::ms(kLarge)); - EXPECT_GE(Timestamp::ms(kLarge), Timestamp::ms(kSmall)); - EXPECT_GT(Timestamp::ms(kLarge), Timestamp::ms(kSmall)); + EXPECT_GT(Timestamp::PlusInfinity(), Timestamp::Millis(kLarge)); + EXPECT_EQ(Timestamp::Millis(kSmall), Timestamp::Millis(kSmall)); + EXPECT_LE(Timestamp::Millis(kSmall), Timestamp::Millis(kSmall)); + EXPECT_GE(Timestamp::Millis(kSmall), Timestamp::Millis(kSmall)); + EXPECT_NE(Timestamp::Millis(kSmall), Timestamp::Millis(kLarge)); + EXPECT_LE(Timestamp::Millis(kSmall), Timestamp::Millis(kLarge)); + EXPECT_LT(Timestamp::Millis(kSmall), Timestamp::Millis(kLarge)); + EXPECT_GE(Timestamp::Millis(kLarge), Timestamp::Millis(kSmall)); + EXPECT_GT(Timestamp::Millis(kLarge), Timestamp::Millis(kSmall)); } TEST(TimestampTest, CanBeInititializedFromLargeInt) { const int kMaxInt = std::numeric_limits::max(); - EXPECT_EQ(Timestamp::seconds(kMaxInt).us(), + EXPECT_EQ(Timestamp::Seconds(kMaxInt).us(), static_cast(kMaxInt) * 1000000); - EXPECT_EQ(Timestamp::ms(kMaxInt).us(), static_cast(kMaxInt) * 1000); + EXPECT_EQ(Timestamp::Millis(kMaxInt).us(), + static_cast(kMaxInt) * 1000); } TEST(TimestampTest, ConvertsToAndFromDouble) { @@ -100,14 +103,14 @@ TEST(TimestampTest, ConvertsToAndFromDouble) { const double kMillisDouble = kMicros * 1e-3; const double kSecondsDouble = kMillisDouble * 1e-3; - EXPECT_EQ(Timestamp::us(kMicros).seconds(), kSecondsDouble); - EXPECT_EQ(Timestamp::seconds(kSecondsDouble).us(), kMicros); + EXPECT_EQ(Timestamp::Micros(kMicros).seconds(), kSecondsDouble); + EXPECT_EQ(Timestamp::Seconds(kSecondsDouble).us(), kMicros); - EXPECT_EQ(Timestamp::us(kMicros).ms(), kMillisDouble); - EXPECT_EQ(Timestamp::ms(kMillisDouble).us(), kMicros); + EXPECT_EQ(Timestamp::Micros(kMicros).ms(), kMillisDouble); + EXPECT_EQ(Timestamp::Millis(kMillisDouble).us(), kMicros); - EXPECT_EQ(Timestamp::us(kMicros).us(), kMicrosDouble); - EXPECT_EQ(Timestamp::us(kMicrosDouble).us(), kMicros); + EXPECT_EQ(Timestamp::Micros(kMicros).us(), kMicrosDouble); + EXPECT_EQ(Timestamp::Micros(kMicrosDouble).us(), kMicros); const double kPlusInfinity = std::numeric_limits::infinity(); const double kMinusInfinity = -kPlusInfinity; @@ -119,25 +122,25 @@ TEST(TimestampTest, ConvertsToAndFromDouble) { EXPECT_EQ(Timestamp::PlusInfinity().us(), kPlusInfinity); EXPECT_EQ(Timestamp::MinusInfinity().us(), kMinusInfinity); - EXPECT_TRUE(Timestamp::seconds(kPlusInfinity).IsPlusInfinity()); - EXPECT_TRUE(Timestamp::seconds(kMinusInfinity).IsMinusInfinity()); - EXPECT_TRUE(Timestamp::ms(kPlusInfinity).IsPlusInfinity()); - EXPECT_TRUE(Timestamp::ms(kMinusInfinity).IsMinusInfinity()); - EXPECT_TRUE(Timestamp::us(kPlusInfinity).IsPlusInfinity()); - EXPECT_TRUE(Timestamp::us(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(Timestamp::Seconds(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(Timestamp::Seconds(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(Timestamp::Millis(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(Timestamp::Millis(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(Timestamp::Micros(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(Timestamp::Micros(kMinusInfinity).IsMinusInfinity()); } TEST(UnitConversionTest, TimestampAndTimeDeltaMath) { const int64_t kValueA = 267; const int64_t kValueB = 450; - const Timestamp time_a = Timestamp::ms(kValueA); - const Timestamp time_b = Timestamp::ms(kValueB); - const TimeDelta delta_a = TimeDelta::ms(kValueA); - const TimeDelta delta_b = TimeDelta::ms(kValueB); + const Timestamp time_a = Timestamp::Millis(kValueA); + const Timestamp time_b = Timestamp::Millis(kValueB); + const TimeDelta delta_a = TimeDelta::Millis(kValueA); + const TimeDelta delta_b = TimeDelta::Millis(kValueB); - EXPECT_EQ((time_a - time_b), TimeDelta::ms(kValueA - kValueB)); - EXPECT_EQ((time_b - delta_a), Timestamp::ms(kValueB - kValueA)); - EXPECT_EQ((time_b + delta_a), Timestamp::ms(kValueB + kValueA)); + EXPECT_EQ((time_a - time_b), TimeDelta::Millis(kValueA - kValueB)); + EXPECT_EQ((time_b - delta_a), Timestamp::Millis(kValueB - kValueA)); + EXPECT_EQ((time_b + delta_a), Timestamp::Millis(kValueB + kValueA)); Timestamp mutable_time = time_a; mutable_time += delta_b; @@ -148,8 +151,8 @@ TEST(UnitConversionTest, TimestampAndTimeDeltaMath) { TEST(UnitConversionTest, InfinityOperations) { const int64_t kValue = 267; - const Timestamp finite_time = Timestamp::ms(kValue); - const TimeDelta finite_delta = TimeDelta::ms(kValue); + const Timestamp finite_time = Timestamp::Millis(kValue); + const TimeDelta finite_delta = TimeDelta::Millis(kValue); EXPECT_TRUE((Timestamp::PlusInfinity() + finite_delta).IsInfinite()); EXPECT_TRUE((Timestamp::PlusInfinity() - finite_delta).IsInfinite()); EXPECT_TRUE((finite_time + TimeDelta::PlusInfinity()).IsInfinite()); diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index aaa0d56ecb..163a5e83db 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -7,6 +7,10 @@ # be found in the AUTHORS file in the root of the source tree. import("../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} rtc_library("video_rtp_headers") { visibility = [ "*" ] @@ -17,7 +21,6 @@ rtc_library("video_rtp_headers") { "hdr_metadata.h", "video_content_type.cc", "video_content_type.h", - "video_frame_marking.h", "video_rotation.h", "video_timing.cc", "video_timing.h", @@ -27,6 +30,10 @@ rtc_library("video_rtp_headers") { "..:array_view", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:rtc_export", + "../units:data_rate", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -34,6 +41,8 @@ rtc_library("video_rtp_headers") { rtc_library("video_frame") { visibility = [ "*" ] sources = [ + "i420_buffer.cc", + "i420_buffer.h", "video_codec_type.h", "video_frame.cc", "video_frame.h", @@ -51,40 +60,44 @@ rtc_library("video_frame") { "..:scoped_refptr", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../rtc_base/memory:aligned_malloc", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } -rtc_source_set("recordable_encoded_frame") { - visibility = [ "*" ] - sources = [ "recordable_encoded_frame.h" ] +if (is_android) { + java_cpp_enum("video_frame_enums") { + sources = [ "video_codec_type.h" ] + } +} +rtc_library("video_frame_i010") { + visibility = [ "*" ] + sources = [ + "i010_buffer.cc", + "i010_buffer.h", + ] deps = [ - ":encoded_image", ":video_frame", ":video_rtp_headers", - "..:array_view", "..:scoped_refptr", - "../../rtc_base:refcount", - "../units:timestamp", + "../../rtc_base", + "../../rtc_base:checks", + "../../rtc_base/memory:aligned_malloc", + "//third_party/libyuv", ] } -rtc_source_set("video_frame_type") { - visibility = [ "*" ] - sources = [ "video_frame_type.h" ] -} - -rtc_library("video_frame_i420") { +rtc_library("video_frame_nv12") { visibility = [ "*" ] sources = [ - "i420_buffer.cc", - "i420_buffer.h", + "nv12_buffer.cc", + "nv12_buffer.h", ] deps = [ ":video_frame", - ":video_rtp_headers", "..:scoped_refptr", "../../rtc_base", "../../rtc_base:checks", @@ -94,24 +107,26 @@ rtc_library("video_frame_i420") { ] } -rtc_library("video_frame_i010") { +rtc_source_set("recordable_encoded_frame") { visibility = [ "*" ] - sources = [ - "i010_buffer.cc", - "i010_buffer.h", - ] + sources = [ "recordable_encoded_frame.h" ] + deps = [ + ":encoded_image", ":video_frame", - ":video_frame_i420", ":video_rtp_headers", + "..:array_view", "..:scoped_refptr", - "../../rtc_base", - "../../rtc_base:checks", - "../../rtc_base/memory:aligned_malloc", - "//third_party/libyuv", + "../../rtc_base:refcount", + "../units:timestamp", ] } +rtc_source_set("video_frame_type") { + visibility = [ "*" ] + sources = [ "video_frame_type.h" ] +} + rtc_library("encoded_image") { visibility = [ "*" ] sources = [ @@ -126,13 +141,12 @@ rtc_library("encoded_image") { "..:refcountedbase", "..:rtp_packet_info", "..:scoped_refptr", - "../..:webrtc_common", "../../rtc_base:checks", "../../rtc_base:deprecation", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("encoded_frame") { @@ -163,8 +177,15 @@ rtc_library("video_bitrate_allocation") { "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("video_layers_allocation") { + visibility = [ "*" ] + sources = [ "video_layers_allocation.h" ] + deps = [ "../units:data_rate" ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_library("video_bitrate_allocator") { @@ -196,11 +217,12 @@ rtc_source_set("video_stream_decoder") { deps = [ ":encoded_frame", ":video_frame", + ":video_rtp_headers", "../task_queue", "../units:time_delta", "../video_codecs:video_codecs_api", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_stream_decoder_create") { @@ -219,26 +241,60 @@ rtc_library("video_stream_decoder_create") { ] } -rtc_library("video_stream_encoder") { +rtc_library("video_adaptation") { + visibility = [ "*" ] + sources = [ + "video_adaptation_counters.cc", + "video_adaptation_counters.h", + "video_adaptation_reason.h", + ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:stringutils", + ] +} + +rtc_source_set("video_stream_encoder") { visibility = [ "*" ] sources = [ "video_stream_encoder_interface.h", - "video_stream_encoder_observer.cc", "video_stream_encoder_observer.h", "video_stream_encoder_settings.h", ] deps = [ + ":video_adaptation", ":video_bitrate_allocation", ":video_bitrate_allocator", ":video_bitrate_allocator_factory", ":video_codec_constants", ":video_frame", + ":video_layers_allocation", "..:rtp_parameters", + "..:scoped_refptr", "../:fec_controller_api", "../:rtp_parameters", + "../adaptation:resource_adaptation_api", "../units:data_rate", "../video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("video_frame_metadata") { + visibility = [ "*" ] + sources = [ + "video_frame_metadata.cc", + "video_frame_metadata.h", + ] + deps = [ + "..:array_view", + "../../modules/rtp_rtcp:rtp_video_header", + "../transport/rtp:dependency_descriptor", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -255,6 +311,7 @@ rtc_library("video_stream_encoder_create") { ":video_stream_encoder", "../../api:scoped_refptr", "../../video:video_stream_encoder_impl", + "../../video/adaptation:video_adaptation", "../task_queue", "../video_codecs:video_codecs_api", ] @@ -271,22 +328,26 @@ rtc_library("builtin_video_bitrate_allocator_factory") { ":video_bitrate_allocation", ":video_bitrate_allocator", ":video_bitrate_allocator_factory", - "../../:webrtc_common", "../../api:scoped_refptr", "../../media:rtc_media_base", "../../modules/video_coding:video_coding_utility", - "../../modules/video_coding:webrtc_vp9_helpers", + "../../modules/video_coding/svc:svc_rate_allocator", "../video_codecs:video_codecs_api", - "//third_party/abseil-cpp/absl/base:core_headers", ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } if (rtc_include_tests) { rtc_library("video_unittests") { testonly = true - sources = [ "video_stream_decoder_create_unittest.cc" ] + sources = [ + "video_frame_metadata_unittest.cc", + "video_stream_decoder_create_unittest.cc", + ] deps = [ + ":video_frame_metadata", ":video_stream_decoder_create", + "../../modules/rtp_rtcp:rtp_video_header", "../../test:test_support", "../task_queue:default_task_queue_factory", "../video_codecs:builtin_video_decoder_factory", diff --git a/api/video/DEPS b/api/video/DEPS index 3af594cd8a..1cb8ad83cb 100644 --- a/api/video/DEPS +++ b/api/video/DEPS @@ -18,6 +18,10 @@ specific_include_rules = { "+rtc_base/memory/aligned_malloc.h", ], + "nv12_buffer\.h": [ + "+rtc_base/memory/aligned_malloc.h", + ], + "recordable_encoded_frame\.h": [ "+rtc_base/ref_count.h", ], diff --git a/api/video/OWNERS b/api/video/OWNERS index 315f85e7d0..e4a16c360a 100644 --- a/api/video/OWNERS +++ b/api/video/OWNERS @@ -1,3 +1,4 @@ +brandtr@webrtc.org magjed@webrtc.org nisse@webrtc.org diff --git a/api/video/builtin_video_bitrate_allocator_factory.cc b/api/video/builtin_video_bitrate_allocator_factory.cc index bdf7bd6f75..4c24a0e75d 100644 --- a/api/video/builtin_video_bitrate_allocator_factory.cc +++ b/api/video/builtin_video_bitrate_allocator_factory.cc @@ -15,8 +15,7 @@ #include "absl/base/macros.h" #include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_codec.h" -#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" -#include "modules/video_coding/utility/default_video_bitrate_allocator.h" +#include "modules/video_coding/svc/svc_rate_allocator.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" namespace webrtc { @@ -31,20 +30,13 @@ class BuiltinVideoBitrateAllocatorFactory std::unique_ptr CreateVideoBitrateAllocator( const VideoCodec& codec) override { - std::unique_ptr rate_allocator; switch (codec.codecType) { - case kVideoCodecVP8: - ABSL_FALLTHROUGH_INTENDED; - case kVideoCodecH264: - rate_allocator.reset(new SimulcastRateAllocator(codec)); - break; + case kVideoCodecAV1: case kVideoCodecVP9: - rate_allocator.reset(new SvcRateAllocator(codec)); - break; + return std::make_unique(codec); default: - rate_allocator.reset(new DefaultVideoBitrateAllocator(codec)); + return std::make_unique(codec); } - return rate_allocator; } }; diff --git a/api/video/encoded_image.cc b/api/video/encoded_image.cc index 13d57ef5ff..1c73bdabe6 100644 --- a/api/video/encoded_image.cc +++ b/api/video/encoded_image.cc @@ -61,7 +61,7 @@ void EncodedImageBuffer::Realloc(size_t size) { size_ = size; } -EncodedImage::EncodedImage() : EncodedImage(nullptr, 0, 0) {} +EncodedImage::EncodedImage() = default; EncodedImage::EncodedImage(EncodedImage&&) = default; EncodedImage::EncodedImage(const EncodedImage&) = default; diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h index b375d4825c..650766ab64 100644 --- a/api/video/encoded_image.h +++ b/api/video/encoded_image.h @@ -21,12 +21,10 @@ #include "api/scoped_refptr.h" #include "api/video/color_space.h" #include "api/video/video_codec_constants.h" -#include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT(build/include) #include "rtc_base/checks.h" #include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" @@ -74,9 +72,8 @@ class RTC_EXPORT EncodedImage { public: EncodedImage(); EncodedImage(EncodedImage&&); - // Discouraged: potentially expensive. EncodedImage(const EncodedImage&); - EncodedImage(uint8_t* buffer, size_t length, size_t capacity); + RTC_DEPRECATED EncodedImage(uint8_t* buffer, size_t length, size_t capacity); ~EncodedImage(); @@ -94,6 +91,8 @@ class RTC_EXPORT EncodedImage { void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms); + int64_t NtpTimeMs() const { return ntp_time_ms_; } + absl::optional SpatialIndex() const { return spatial_index_; } void SetSpatialIndex(absl::optional spatial_index) { RTC_DCHECK_GE(spatial_index.value_or(0), 0); @@ -129,16 +128,6 @@ class RTC_EXPORT EncodedImage { RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity()); size_ = new_size; } - // TODO(nisse): Delete, provide only read-only access to the buffer. - size_t capacity() const { - return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0); - } - - void set_buffer(uint8_t* buffer, size_t capacity) { - buffer_ = buffer; - capacity_ = capacity; - } - void SetEncodedData( rtc::scoped_refptr encoded_data) { encoded_data_ = encoded_data; @@ -158,24 +147,10 @@ class RTC_EXPORT EncodedImage { return encoded_data_; } - // TODO(nisse): Delete, provide only read-only access to the buffer. - uint8_t* data() { - return buffer_ ? buffer_ - : (encoded_data_ ? encoded_data_->data() : nullptr); - } const uint8_t* data() const { return buffer_ ? buffer_ : (encoded_data_ ? encoded_data_->data() : nullptr); } - // TODO(nisse): At some places, code accepts a const ref EncodedImage, but - // still writes to it, to clear padding at the end of the encoded data. - // Padding is required by ffmpeg; the best way to deal with that is likely to - // make this class ensure that buffers always have a few zero padding bytes. - uint8_t* mutable_data() const { return const_cast(data()); } - - // TODO(bugs.webrtc.org/9378): Delete. Used by code that wants to modify a - // buffer corresponding to a const EncodedImage. Requires an un-owned buffer. - uint8_t* buffer() const { return buffer_; } // Hack to workaround lack of ownership of the encoded data. If we don't // already own the underlying data, make an owned copy. @@ -184,18 +159,18 @@ class RTC_EXPORT EncodedImage { uint32_t _encodedWidth = 0; uint32_t _encodedHeight = 0; // NTP time of the capture time in local timebase in milliseconds. + // TODO(minyue): make this member private. int64_t ntp_time_ms_ = 0; int64_t capture_time_ms_ = 0; VideoFrameType _frameType = VideoFrameType::kVideoFrameDelta; VideoRotation rotation_ = kVideoRotation_0; VideoContentType content_type_ = VideoContentType::UNSPECIFIED; - bool _completeFrame = false; int qp_ = -1; // Quantizer value. // When an application indicates non-zero values here, it is taken as an // indication that all future frames will be constrained with those limits // until the application indicates a change again. - PlayoutDelay playout_delay_ = {-1, -1}; + VideoPlayoutDelay playout_delay_; struct Timing { uint8_t flags = VideoSendTiming::kInvalid; @@ -210,14 +185,18 @@ class RTC_EXPORT EncodedImage { } timing_; private: + size_t capacity() const { + return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0); + } + // TODO(bugs.webrtc.org/9378): We're transitioning to always owning the // encoded data. rtc::scoped_refptr encoded_data_; - size_t size_; // Size of encoded frame data. + size_t size_ = 0; // Size of encoded frame data. // Non-null when used with an un-owned buffer. - uint8_t* buffer_; + uint8_t* buffer_ = nullptr; // Allocated size of _buffer; relevant only if it's non-null. - size_t capacity_; + size_t capacity_ = 0; uint32_t timestamp_rtp_ = 0; absl::optional spatial_index_; std::map spatial_layer_frame_size_bytes_; diff --git a/api/video/i420_buffer.cc b/api/video/i420_buffer.cc index 62fa1837ed..2a52217ce3 100644 --- a/api/video/i420_buffer.cc +++ b/api/video/i420_buffer.cc @@ -215,9 +215,11 @@ void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src, void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src) { const int crop_width = - std::min(src.width(), width() * src.height() / height()); + height() > 0 ? std::min(src.width(), width() * src.height() / height()) + : src.width(); const int crop_height = - std::min(src.height(), height() * src.width() / width()); + width() > 0 ? std::min(src.height(), height() * src.width() / width()) + : src.height(); CropAndScaleFrom(src, (src.width() - crop_width) / 2, (src.height() - crop_height) / 2, crop_width, crop_height); diff --git a/api/video/nv12_buffer.cc b/api/video/nv12_buffer.cc new file mode 100644 index 0000000000..cfa85ac52e --- /dev/null +++ b/api/video/nv12_buffer.cc @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/nv12_buffer.h" + +#include "api/video/i420_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +namespace webrtc { + +namespace { + +static const int kBufferAlignment = 64; + +int NV12DataSize(int height, int stride_y, int stride_uv) { + return stride_y * height + stride_uv * ((height + 1) / 2); +} + +} // namespace + +NV12Buffer::NV12Buffer(int width, int height) + : NV12Buffer(width, height, width, width + width % 2) {} + +NV12Buffer::NV12Buffer(int width, int height, int stride_y, int stride_uv) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_uv_(stride_uv), + data_(static_cast( + AlignedMalloc(NV12DataSize(height_, stride_y_, stride_uv), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_uv, (width + width % 2)); +} + +NV12Buffer::~NV12Buffer() = default; + +// static +rtc::scoped_refptr NV12Buffer::Create(int width, int height) { + return new rtc::RefCountedObject(width, height); +} + +// static +rtc::scoped_refptr NV12Buffer::Create(int width, + int height, + int stride_y, + int stride_uv) { + return new rtc::RefCountedObject(width, height, stride_y, + stride_uv); +} + +// static +rtc::scoped_refptr NV12Buffer::Copy( + const I420BufferInterface& i420_buffer) { + rtc::scoped_refptr buffer = + NV12Buffer::Create(i420_buffer.width(), i420_buffer.height()); + libyuv::I420ToNV12( + i420_buffer.DataY(), i420_buffer.StrideY(), i420_buffer.DataU(), + i420_buffer.StrideU(), i420_buffer.DataV(), i420_buffer.StrideV(), + buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataUV(), + buffer->StrideUV(), buffer->width(), buffer->height()); + return buffer; +} + +rtc::scoped_refptr NV12Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::NV12ToI420(DataY(), StrideY(), DataUV(), StrideUV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +int NV12Buffer::width() const { + return width_; +} +int NV12Buffer::height() const { + return height_; +} + +int NV12Buffer::StrideY() const { + return stride_y_; +} +int NV12Buffer::StrideUV() const { + return stride_uv_; +} + +const uint8_t* NV12Buffer::DataY() const { + return data_.get(); +} + +const uint8_t* NV12Buffer::DataUV() const { + return data_.get() + UVOffset(); +} + +uint8_t* NV12Buffer::MutableDataY() { + return data_.get(); +} + +uint8_t* NV12Buffer::MutableDataUV() { + return data_.get() + UVOffset(); +} + +size_t NV12Buffer::UVOffset() const { + return stride_y_ * height_; +} + +void NV12Buffer::InitializeData() { + memset(data_.get(), 0, NV12DataSize(height_, stride_y_, stride_uv_)); +} + +void NV12Buffer::CropAndScaleFrom(const NV12BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y / 2; + offset_x = uv_offset_x * 2; + offset_y = uv_offset_y * 2; + + const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint8_t* uv_plane = + src.DataUV() + src.StrideUV() * uv_offset_y + uv_offset_x * 2; + + // kFilterBox is unsupported in libyuv, so using kFilterBilinear instead. + int res = libyuv::NV12Scale(y_plane, src.StrideY(), uv_plane, src.StrideUV(), + crop_width, crop_height, MutableDataY(), + StrideY(), MutableDataUV(), StrideUV(), width(), + height(), libyuv::kFilterBilinear); + + RTC_DCHECK_EQ(res, 0); +} + +} // namespace webrtc diff --git a/api/video/nv12_buffer.h b/api/video/nv12_buffer.h new file mode 100644 index 0000000000..cb989e84b0 --- /dev/null +++ b/api/video/nv12_buffer.h @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_NV12_BUFFER_H_ +#define API_VIDEO_NV12_BUFFER_H_ + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// NV12 is a biplanar encoding format, with full-resolution Y and +// half-resolution interleved UV. More information can be found at +// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12. +class RTC_EXPORT NV12Buffer : public NV12BufferInterface { + public: + static rtc::scoped_refptr Create(int width, int height); + static rtc::scoped_refptr Create(int width, + int height, + int stride_y, + int stride_uv); + static rtc::scoped_refptr Copy( + const I420BufferInterface& i420_buffer); + + rtc::scoped_refptr ToI420() override; + + int width() const override; + int height() const override; + + int StrideY() const override; + int StrideUV() const override; + + const uint8_t* DataY() const override; + const uint8_t* DataUV() const override; + + uint8_t* MutableDataY(); + uint8_t* MutableDataUV(); + + // Sets all three planes to all zeros. Used to work around for + // quirks in memory checkers + // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and + // ffmpeg (http://crbug.com/390941). + // TODO(nisse): Deprecated. Should be deleted if/when those issues + // are resolved in a better way. Or in the mean time, use SetBlack. + void InitializeData(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const NV12BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + protected: + NV12Buffer(int width, int height); + NV12Buffer(int width, int height, int stride_y, int stride_uv); + + ~NV12Buffer() override; + + private: + size_t UVOffset() const; + + const int width_; + const int height_; + const int stride_y_; + const int stride_uv_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_NV12_BUFFER_H_ diff --git a/api/video/test/BUILD.gn b/api/video/test/BUILD.gn index 3dcb90981e..72f50494bb 100644 --- a/api/video/test/BUILD.gn +++ b/api/video/test/BUILD.gn @@ -12,15 +12,20 @@ rtc_library("rtc_api_video_unittests") { testonly = true sources = [ "color_space_unittest.cc", + "nv12_buffer_unittest.cc", + "video_adaptation_counters_unittest.cc", "video_bitrate_allocation_unittest.cc", ] deps = [ + "..:video_adaptation", "..:video_bitrate_allocation", "..:video_frame", + "..:video_frame_nv12", "..:video_rtp_headers", + "../../../test:frame_utils", "../../../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("mock_recordable_encoded_frame") { diff --git a/api/video/test/mock_recordable_encoded_frame.h b/api/video/test/mock_recordable_encoded_frame.h index 1788a493c6..2178932d2a 100644 --- a/api/video/test/mock_recordable_encoded_frame.h +++ b/api/video/test/mock_recordable_encoded_frame.h @@ -17,13 +17,18 @@ namespace webrtc { class MockRecordableEncodedFrame : public RecordableEncodedFrame { public: - MOCK_CONST_METHOD0(encoded_buffer, - rtc::scoped_refptr()); - MOCK_CONST_METHOD0(color_space, absl::optional()); - MOCK_CONST_METHOD0(codec, VideoCodecType()); - MOCK_CONST_METHOD0(is_key_frame, bool()); - MOCK_CONST_METHOD0(resolution, EncodedResolution()); - MOCK_CONST_METHOD0(render_time, Timestamp()); + MOCK_METHOD(rtc::scoped_refptr, + encoded_buffer, + (), + (const, override)); + MOCK_METHOD(absl::optional, + color_space, + (), + (const, override)); + MOCK_METHOD(VideoCodecType, codec, (), (const, override)); + MOCK_METHOD(bool, is_key_frame, (), (const, override)); + MOCK_METHOD(EncodedResolution, resolution, (), (const, override)); + MOCK_METHOD(Timestamp, render_time, (), (const, override)); }; } // namespace webrtc #endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ diff --git a/api/video/test/nv12_buffer_unittest.cc b/api/video/test/nv12_buffer_unittest.cc new file mode 100644 index 0000000000..d84adb5bf5 --- /dev/null +++ b/api/video/test/nv12_buffer_unittest.cc @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/nv12_buffer.h" + +#include "api/video/i420_buffer.h" +#include "test/frame_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { +int GetY(rtc::scoped_refptr buf, int col, int row) { + return buf->DataY()[row * buf->StrideY() + col]; +} + +int GetU(rtc::scoped_refptr buf, int col, int row) { + return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2]; +} + +int GetV(rtc::scoped_refptr buf, int col, int row) { + return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2 + 1]; +} + +void FillNV12Buffer(rtc::scoped_refptr buf) { + const uint8_t Y = 1; + const uint8_t U = 2; + const uint8_t V = 3; + for (int row = 0; row < buf->height(); ++row) { + for (int col = 0; col < buf->width(); ++col) { + buf->MutableDataY()[row * buf->StrideY() + col] = Y; + } + } + // Fill interleaving UV values. + for (int row = 0; row < buf->ChromaHeight(); row++) { + for (int col = 0; col < buf->StrideUV(); col += 2) { + int uv_index = row * buf->StrideUV() + col; + buf->MutableDataUV()[uv_index] = U; + buf->MutableDataUV()[uv_index + 1] = V; + } + } +} + +} // namespace + +TEST(NV12BufferTest, InitialData) { + constexpr int stride_y = 3; + constexpr int stride_uv = 4; + constexpr int width = 3; + constexpr int height = 3; + + rtc::scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); + EXPECT_EQ(width, nv12_buffer->width()); + EXPECT_EQ(height, nv12_buffer->height()); + EXPECT_EQ(stride_y, nv12_buffer->StrideY()); + EXPECT_EQ(stride_uv, nv12_buffer->StrideUV()); + EXPECT_EQ(2, nv12_buffer->ChromaWidth()); + EXPECT_EQ(2, nv12_buffer->ChromaHeight()); +} + +TEST(NV12BufferTest, ReadPixels) { + constexpr int width = 3; + constexpr int height = 3; + + rtc::scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); + // Y = 1, U = 2, V = 3. + FillNV12Buffer(nv12_buffer); + for (int row = 0; row < height; row++) { + for (int col = 0; col < width; col++) { + EXPECT_EQ(1, GetY(nv12_buffer, col, row)); + EXPECT_EQ(2, GetU(nv12_buffer, col, row)); + EXPECT_EQ(3, GetV(nv12_buffer, col, row)); + } + } +} + +TEST(NV12BufferTest, ToI420) { + constexpr int width = 3; + constexpr int height = 3; + constexpr int size_y = width * height; + constexpr int size_u = (width + 1) / 2 * (height + 1) / 2; + constexpr int size_v = (width + 1) / 2 * (height + 1) / 2; + rtc::scoped_refptr reference(I420Buffer::Create(width, height)); + memset(reference->MutableDataY(), 8, size_y); + memset(reference->MutableDataU(), 4, size_u); + memset(reference->MutableDataV(), 2, size_v); + + rtc::scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); + // Convert the reference buffer to NV12. + memset(nv12_buffer->MutableDataY(), 8, size_y); + // Interleaving u/v values. + for (int i = 0; i < size_u + size_v; i += 2) { + nv12_buffer->MutableDataUV()[i] = 4; + nv12_buffer->MutableDataUV()[i + 1] = 2; + } + // Confirm YUV values are as expected. + for (int row = 0; row < height; row++) { + for (int col = 0; col < width; col++) { + EXPECT_EQ(8, GetY(nv12_buffer, col, row)); + EXPECT_EQ(4, GetU(nv12_buffer, col, row)); + EXPECT_EQ(2, GetV(nv12_buffer, col, row)); + } + } + + rtc::scoped_refptr i420_buffer(nv12_buffer->ToI420()); + EXPECT_EQ(height, i420_buffer->height()); + EXPECT_EQ(width, i420_buffer->width()); + EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer)); +} + +} // namespace webrtc diff --git a/api/video/test/video_adaptation_counters_unittest.cc b/api/video/test/video_adaptation_counters_unittest.cc new file mode 100644 index 0000000000..a7d0bda7d2 --- /dev/null +++ b/api/video/test/video_adaptation_counters_unittest.cc @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_adaptation_counters.h" + +#include "test/gtest.h" + +namespace webrtc { + +TEST(AdaptationCountersTest, Addition) { + VideoAdaptationCounters a{0, 0}; + VideoAdaptationCounters b{1, 2}; + VideoAdaptationCounters total = a + b; + EXPECT_EQ(1, total.resolution_adaptations); + EXPECT_EQ(2, total.fps_adaptations); +} + +TEST(AdaptationCountersTest, Equality) { + VideoAdaptationCounters a{1, 2}; + VideoAdaptationCounters b{2, 1}; + EXPECT_EQ(a, a); + EXPECT_NE(a, b); +} + +} // namespace webrtc diff --git a/api/video/video_adaptation_counters.cc b/api/video/video_adaptation_counters.cc new file mode 100644 index 0000000000..df1769d5d4 --- /dev/null +++ b/api/video/video_adaptation_counters.cc @@ -0,0 +1,42 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_adaptation_counters.h" + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +bool VideoAdaptationCounters::operator==( + const VideoAdaptationCounters& rhs) const { + return fps_adaptations == rhs.fps_adaptations && + resolution_adaptations == rhs.resolution_adaptations; +} + +bool VideoAdaptationCounters::operator!=( + const VideoAdaptationCounters& rhs) const { + return !(rhs == *this); +} + +VideoAdaptationCounters VideoAdaptationCounters::operator+( + const VideoAdaptationCounters& other) const { + return VideoAdaptationCounters( + resolution_adaptations + other.resolution_adaptations, + fps_adaptations + other.fps_adaptations); +} + +std::string VideoAdaptationCounters::ToString() const { + rtc::StringBuilder ss; + ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations + << " }"; + return ss.Release(); +} + +} // namespace webrtc diff --git a/api/video/video_adaptation_counters.h b/api/video/video_adaptation_counters.h new file mode 100644 index 0000000000..2dea902f2f --- /dev/null +++ b/api/video/video_adaptation_counters.h @@ -0,0 +1,46 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ +#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +// Counts the number of adaptations have resulted due to resource overuse. +// Today we can adapt resolution and fps. +struct VideoAdaptationCounters { + VideoAdaptationCounters() : resolution_adaptations(0), fps_adaptations(0) {} + VideoAdaptationCounters(int resolution_adaptations, int fps_adaptations) + : resolution_adaptations(resolution_adaptations), + fps_adaptations(fps_adaptations) { + RTC_DCHECK_GE(resolution_adaptations, 0); + RTC_DCHECK_GE(fps_adaptations, 0); + } + + int Total() const { return fps_adaptations + resolution_adaptations; } + + bool operator==(const VideoAdaptationCounters& rhs) const; + bool operator!=(const VideoAdaptationCounters& rhs) const; + + VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const; + + std::string ToString() const; + + int resolution_adaptations; + int fps_adaptations; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ diff --git a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.cc b/api/video/video_adaptation_reason.h similarity index 60% rename from modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.cc rename to api/video/video_adaptation_reason.h index 1aca566bb6..3b7fc36eed 100644 --- a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.cc +++ b/api/video/video_adaptation_reason.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,11 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h" +#ifndef API_VIDEO_VIDEO_ADAPTATION_REASON_H_ +#define API_VIDEO_VIDEO_ADAPTATION_REASON_H_ namespace webrtc { -MockRtcpRttStats::MockRtcpRttStats() = default; -MockRtcpRttStats::~MockRtcpRttStats() = default; +enum class VideoAdaptationReason { kQuality, kCpu }; } // namespace webrtc + +#endif // API_VIDEO_VIDEO_ADAPTATION_REASON_H_ diff --git a/api/video/video_bitrate_allocator.cc b/api/video/video_bitrate_allocator.cc index 6ad16c93f2..f4e843b348 100644 --- a/api/video/video_bitrate_allocator.cc +++ b/api/video/video_bitrate_allocator.cc @@ -15,8 +15,8 @@ namespace webrtc { VideoBitrateAllocationParameters::VideoBitrateAllocationParameters( uint32_t total_bitrate_bps, uint32_t framerate) - : total_bitrate(DataRate::bps(total_bitrate_bps)), - stable_bitrate(DataRate::bps(total_bitrate_bps)), + : total_bitrate(DataRate::BitsPerSec(total_bitrate_bps)), + stable_bitrate(DataRate::BitsPerSec(total_bitrate_bps)), framerate(static_cast(framerate)) {} VideoBitrateAllocationParameters::VideoBitrateAllocationParameters( @@ -39,8 +39,8 @@ VideoBitrateAllocationParameters::~VideoBitrateAllocationParameters() = default; VideoBitrateAllocation VideoBitrateAllocator::GetAllocation( uint32_t total_bitrate_bps, uint32_t framerate) { - return Allocate({DataRate::bps(total_bitrate_bps), - DataRate::bps(total_bitrate_bps), + return Allocate({DataRate::BitsPerSec(total_bitrate_bps), + DataRate::BitsPerSec(total_bitrate_bps), static_cast(framerate)}); } @@ -49,4 +49,6 @@ VideoBitrateAllocation VideoBitrateAllocator::Allocate( return GetAllocation(parameters.total_bitrate.bps(), parameters.framerate); } +void VideoBitrateAllocator::SetLegacyConferenceMode(bool enabled) {} + } // namespace webrtc diff --git a/api/video/video_bitrate_allocator.h b/api/video/video_bitrate_allocator.h index 04de04c1b0..fdc86dbc57 100644 --- a/api/video/video_bitrate_allocator.h +++ b/api/video/video_bitrate_allocator.h @@ -40,6 +40,10 @@ class VideoBitrateAllocator { virtual VideoBitrateAllocation Allocate( VideoBitrateAllocationParameters parameters); + + // Deprecated: Only used to work around issues with the legacy conference + // screenshare mode and shouldn't be needed by any subclasses. + virtual void SetLegacyConferenceMode(bool enabled); }; class VideoBitrateAllocationObserver { diff --git a/api/video/video_codec_constants.h b/api/video/video_codec_constants.h index 6b6feee4cb..5859f9b4cf 100644 --- a/api/video/video_codec_constants.h +++ b/api/video/video_codec_constants.h @@ -17,6 +17,7 @@ enum : int { kMaxEncoderBuffers = 8 }; enum : int { kMaxSimulcastStreams = 3 }; enum : int { kMaxSpatialLayers = 5 }; enum : int { kMaxTemporalStreams = 4 }; +enum : int { kMaxPreferredPixelFormats = 5 }; } // namespace webrtc diff --git a/api/video/video_codec_type.h b/api/video/video_codec_type.h index efbe3cc8cb..04013e389d 100644 --- a/api/video/video_codec_type.h +++ b/api/video/video_codec_type.h @@ -13,7 +13,20 @@ namespace webrtc { -// Video codec types +// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc +#ifndef DISABLE_H265 +enum VideoCodecType { + // Java_cpp_enum.py does not allow ifdef in enum class, + // so we have to create two version of VideoCodecType here + kVideoCodecGeneric = 0, + kVideoCodecVP8, + kVideoCodecVP9, + kVideoCodecAV1, + kVideoCodecH264, + kVideoCodecH265, + kVideoCodecMultiplex, +}; +#else enum VideoCodecType { // There are various memset(..., 0, ...) calls in the code that rely on // kVideoCodecGeneric being zero. @@ -24,6 +37,7 @@ enum VideoCodecType { kVideoCodecH264, kVideoCodecMultiplex, }; +#endif } // namespace webrtc diff --git a/api/video/video_frame.h b/api/video/video_frame.h index 08c939d916..e62aae8e5d 100644 --- a/api/video/video_frame.h +++ b/api/video/video_frame.h @@ -186,6 +186,16 @@ class RTC_EXPORT VideoFrame { color_space_ = color_space; } + // max_composition_delay_in_frames() is used in an experiment of a low-latency + // renderer algorithm see crbug.com/1138888. + absl::optional max_composition_delay_in_frames() const { + return max_composition_delay_in_frames_; + } + void set_max_composition_delay_in_frames( + absl::optional max_composition_delay_in_frames) { + max_composition_delay_in_frames_ = max_composition_delay_in_frames; + } + // Get render time in milliseconds. // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). int64_t render_time_ms() const; @@ -255,6 +265,7 @@ class RTC_EXPORT VideoFrame { int64_t timestamp_us_; VideoRotation rotation_; absl::optional color_space_; + absl::optional max_composition_delay_in_frames_; // Updated since the last frame area. If present it means that the bounding // box of all the changes is within the rectangular area and is close to it. // If absent, it means that there's no information about the change at all and diff --git a/api/video/video_frame_buffer.cc b/api/video/video_frame_buffer.cc index b9fd9cd92a..64f339448b 100644 --- a/api/video/video_frame_buffer.cc +++ b/api/video/video_frame_buffer.cc @@ -10,10 +10,25 @@ #include "api/video/video_frame_buffer.h" +#include "api/video/i420_buffer.h" #include "rtc_base/checks.h" namespace webrtc { +rtc::scoped_refptr VideoFrameBuffer::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + rtc::scoped_refptr result = + I420Buffer::Create(scaled_width, scaled_height); + result->CropAndScaleFrom(*this->ToI420(), offset_x, offset_y, crop_width, + crop_height); + return result; +} + const I420BufferInterface* VideoFrameBuffer::GetI420() const { // Overridden by subclasses that can return an I420 buffer without any // conversion, in particular, I420BufferInterface. @@ -35,10 +50,40 @@ const I010BufferInterface* VideoFrameBuffer::GetI010() const { return static_cast(this); } +const NV12BufferInterface* VideoFrameBuffer::GetNV12() const { + RTC_CHECK(type() == Type::kNV12); + return static_cast(this); +} + +rtc::scoped_refptr VideoFrameBuffer::GetMappedFrameBuffer( + rtc::ArrayView types) { + RTC_CHECK(type() == Type::kNative); + return nullptr; +} + VideoFrameBuffer::Type I420BufferInterface::type() const { return Type::kI420; } +const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type) { + switch (type) { + case VideoFrameBuffer::Type::kNative: + return "kNative"; + case VideoFrameBuffer::Type::kI420: + return "kI420"; + case VideoFrameBuffer::Type::kI420A: + return "kI420A"; + case VideoFrameBuffer::Type::kI444: + return "kI444"; + case VideoFrameBuffer::Type::kI010: + return "kI010"; + case VideoFrameBuffer::Type::kNV12: + return "kNV12"; + default: + RTC_NOTREACHED(); + } +} + int I420BufferInterface::ChromaWidth() const { return (width() + 1) / 2; } @@ -83,4 +128,15 @@ int I010BufferInterface::ChromaHeight() const { return (height() + 1) / 2; } +VideoFrameBuffer::Type NV12BufferInterface::type() const { + return Type::kNV12; +} + +int NV12BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int NV12BufferInterface::ChromaHeight() const { + return (height() + 1) / 2; +} } // namespace webrtc diff --git a/api/video/video_frame_buffer.h b/api/video/video_frame_buffer.h index d87a4230a4..67b8797325 100644 --- a/api/video/video_frame_buffer.h +++ b/api/video/video_frame_buffer.h @@ -13,6 +13,7 @@ #include +#include "api/array_view.h" #include "api/scoped_refptr.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -23,6 +24,7 @@ class I420BufferInterface; class I420ABufferInterface; class I444BufferInterface; class I010BufferInterface; +class NV12BufferInterface; // Base class for frame buffers of different types of pixel format and storage. // The tag in type() indicates how the data is represented, and each type is @@ -50,6 +52,7 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { kI420A, kI444, kI010, + kNV12, }; // This function specifies in what pixel format the data is stored in. @@ -72,18 +75,50 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { // WebrtcVideoFrameAdapter in Chrome - it's I420 buffer backed by a shared // memory buffer. Therefore it must have type kNative. Yet, ToI420() // doesn't affect binary data at all. Another example is any I420A buffer. + // TODO(https://crbug.com/webrtc/12021): Make this method non-virtual and + // behave as the other GetXXX methods below. virtual const I420BufferInterface* GetI420() const; + // A format specific scale function. Default implementation works by + // converting to I420. But more efficient implementations may override it, + // especially for kNative. + // First, the image is cropped to |crop_width| and |crop_height| and then + // scaled to |scaled_width| and |scaled_height|. + virtual rtc::scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height); + + // Alias for common use case. + rtc::scoped_refptr Scale(int scaled_width, + int scaled_height) { + return CropAndScale(0, 0, width(), height(), scaled_width, scaled_height); + } + // These functions should only be called if type() is of the correct type. // Calling with a different type will result in a crash. const I420ABufferInterface* GetI420A() const; const I444BufferInterface* GetI444() const; const I010BufferInterface* GetI010() const; + const NV12BufferInterface* GetNV12() const; + + // From a kNative frame, returns a VideoFrameBuffer with a pixel format in + // the list of types that is in the main memory with a pixel perfect + // conversion for encoding with a software encoder. Returns nullptr if the + // frame type is not supported, mapping is not possible, or if the kNative + // frame has not implemented this method. Only callable if type() is kNative. + virtual rtc::scoped_refptr GetMappedFrameBuffer( + rtc::ArrayView types); protected: ~VideoFrameBuffer() override {} }; +// Update when VideoFrameBuffer::Type is updated. +const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type); + // This interface represents planar formats. class PlanarYuvBuffer : public VideoFrameBuffer { public: @@ -175,6 +210,42 @@ class I010BufferInterface : public PlanarYuv16BBuffer { ~I010BufferInterface() override {} }; +class BiplanarYuvBuffer : public VideoFrameBuffer { + public: + virtual int ChromaWidth() const = 0; + virtual int ChromaHeight() const = 0; + + // Returns the number of steps(in terms of Data*() return type) between + // successive rows for a given plane. + virtual int StrideY() const = 0; + virtual int StrideUV() const = 0; + + protected: + ~BiplanarYuvBuffer() override {} +}; + +class BiplanarYuv8Buffer : public BiplanarYuvBuffer { + public: + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataUV() const = 0; + + protected: + ~BiplanarYuv8Buffer() override {} +}; + +// Represents Type::kNV12. NV12 is full resolution Y and half-resolution +// interleved UV. +class RTC_EXPORT NV12BufferInterface : public BiplanarYuv8Buffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~NV12BufferInterface() override {} +}; + } // namespace webrtc #endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_ diff --git a/api/video/video_frame_metadata.cc b/api/video/video_frame_metadata.cc new file mode 100644 index 0000000000..df82875eb9 --- /dev/null +++ b/api/video/video_frame_metadata.cc @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_metadata.h" + +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +namespace webrtc { + +VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header) + : width_(header.width), height_(header.height) { + if (header.generic) { + frame_id_ = header.generic->frame_id; + spatial_index_ = header.generic->spatial_index; + temporal_index_ = header.generic->temporal_index; + frame_dependencies_ = header.generic->dependencies; + decode_target_indications_ = header.generic->decode_target_indications; + } +} + +} // namespace webrtc diff --git a/api/video/video_frame_metadata.h b/api/video/video_frame_metadata.h new file mode 100644 index 0000000000..2e9309841b --- /dev/null +++ b/api/video/video_frame_metadata.h @@ -0,0 +1,59 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_ +#define API_VIDEO_VIDEO_FRAME_METADATA_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +struct RTPVideoHeader; + +// A subset of metadata from the RTP video header, exposed in insertable streams +// API. +class VideoFrameMetadata { + public: + explicit VideoFrameMetadata(const RTPVideoHeader& header); + VideoFrameMetadata(const VideoFrameMetadata&) = default; + VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default; + + uint16_t GetWidth() const { return width_; } + uint16_t GetHeight() const { return height_; } + absl::optional GetFrameId() const { return frame_id_; } + int GetSpatialIndex() const { return spatial_index_; } + int GetTemporalIndex() const { return temporal_index_; } + + rtc::ArrayView GetFrameDependencies() const { + return frame_dependencies_; + } + + rtc::ArrayView GetDecodeTargetIndications() + const { + return decode_target_indications_; + } + + private: + int16_t width_; + int16_t height_; + absl::optional frame_id_; + int spatial_index_ = 0; + int temporal_index_ = 0; + absl::InlinedVector frame_dependencies_; + absl::InlinedVector decode_target_indications_; +}; +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_ diff --git a/api/video/video_frame_metadata_unittest.cc b/api/video/video_frame_metadata_unittest.cc new file mode 100644 index 0000000000..7a808e1ea9 --- /dev/null +++ b/api/video/video_frame_metadata_unittest.cc @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_metadata.h" + +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; + +TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) { + RTPVideoHeader video_header; + video_header.width = 1280u; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetWidth(), video_header.width); +} + +TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) { + RTPVideoHeader video_header; + video_header.height = 720u; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetHeight(), video_header.height); +} + +TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetFrameId().value(), 10); +} + +TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetFrameId(), absl::nullopt); +} + +TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.spatial_index = 2; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); +} + +TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetSpatialIndex(), 0); +} + +TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.temporal_index = 3; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); +} + +TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetTemporalIndex(), 0); +} + +TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.dependencies = {5, 6, 7}; + VideoFrameMetadata metadata(video_header); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); +} + +TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); +} + +TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + VideoFrameMetadata metadata(video_header); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); +} + +TEST(VideoFrameMetadata, + DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); +} + +} // namespace +} // namespace webrtc diff --git a/api/video/video_layers_allocation.h b/api/video/video_layers_allocation.h new file mode 100644 index 0000000000..39734151ae --- /dev/null +++ b/api/video/video_layers_allocation.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_ +#define API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "api/units/data_rate.h" + +namespace webrtc { + +// This struct contains additional stream-level information needed by a +// Selective Forwarding Middlebox to make relay decisions of RTP streams. +struct VideoLayersAllocation { + static constexpr int kMaxSpatialIds = 4; + static constexpr int kMaxTemporalIds = 4; + + friend bool operator==(const VideoLayersAllocation& lhs, + const VideoLayersAllocation& rhs) { + return lhs.rtp_stream_index == rhs.rtp_stream_index && + lhs.resolution_and_frame_rate_is_valid == + rhs.resolution_and_frame_rate_is_valid && + lhs.active_spatial_layers == rhs.active_spatial_layers; + } + + friend bool operator!=(const VideoLayersAllocation& lhs, + const VideoLayersAllocation& rhs) { + return !(lhs == rhs); + } + + struct SpatialLayer { + friend bool operator==(const SpatialLayer& lhs, const SpatialLayer& rhs) { + return lhs.rtp_stream_index == rhs.rtp_stream_index && + lhs.spatial_id == rhs.spatial_id && + lhs.target_bitrate_per_temporal_layer == + rhs.target_bitrate_per_temporal_layer && + lhs.width == rhs.width && lhs.height == rhs.height && + lhs.frame_rate_fps == rhs.frame_rate_fps; + } + + friend bool operator!=(const SpatialLayer& lhs, const SpatialLayer& rhs) { + return !(lhs == rhs); + } + int rtp_stream_index = 0; + // Index of the spatial layer per `rtp_stream_index`. + int spatial_id = 0; + // Target bitrate per decode target. + absl::InlinedVector + target_bitrate_per_temporal_layer; + + // These fields are only valid if `resolution_and_frame_rate_is_valid` is + // true + uint16_t width = 0; + uint16_t height = 0; + // Max frame rate used in any temporal layer of this spatial layer. + uint8_t frame_rate_fps = 0; + }; + + // Index of the rtp stream this allocation is sent on. Used for mapping + // a SpatialLayer to a rtp stream. + int rtp_stream_index = 0; + bool resolution_and_frame_rate_is_valid = false; + absl::InlinedVector active_spatial_layers; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_ diff --git a/api/video/video_stream_decoder.h b/api/video/video_stream_decoder.h index 8f27fa4dbe..4bf8b985c4 100644 --- a/api/video/video_stream_decoder.h +++ b/api/video/video_stream_decoder.h @@ -17,6 +17,7 @@ #include "api/units/time_delta.h" #include "api/video/encoded_frame.h" +#include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" @@ -29,6 +30,11 @@ class VideoStreamDecoderInterface { public: virtual ~Callbacks() = default; + struct FrameInfo { + absl::optional qp; + VideoContentType content_type; + }; + // Called when the VideoStreamDecoder enters a non-decodable state. virtual void OnNonDecodableState() = 0; @@ -36,10 +42,8 @@ class VideoStreamDecoderInterface { virtual void OnContinuousUntil( const video_coding::VideoLayerFrameId& key) = 0; - // Called with the decoded frame. - virtual void OnDecodedFrame(VideoFrame decodedImage, - absl::optional decode_time_ms, - absl::optional qp) = 0; + virtual void OnDecodedFrame(VideoFrame frame, + const FrameInfo& frame_info) = 0; }; virtual ~VideoStreamDecoderInterface() = default; diff --git a/api/video/video_stream_decoder_create_unittest.cc b/api/video/video_stream_decoder_create_unittest.cc index 7b142a90b9..93edb4b8a2 100644 --- a/api/video/video_stream_decoder_create_unittest.cc +++ b/api/video/video_stream_decoder_create_unittest.cc @@ -22,9 +22,9 @@ class NullCallbacks : public VideoStreamDecoderInterface::Callbacks { ~NullCallbacks() override = default; void OnNonDecodableState() override {} void OnContinuousUntil(const video_coding::VideoLayerFrameId& key) override {} - void OnDecodedFrame(VideoFrame decodedImage, - absl::optional decode_time_ms, - absl::optional qp) override {} + void OnDecodedFrame(VideoFrame frame, + const VideoStreamDecoderInterface::Callbacks::FrameInfo& + frame_info) override {} }; TEST(VideoStreamDecoderCreate, CreateVideoStreamDecoder) { diff --git a/api/video/video_stream_encoder_create.cc b/api/video/video_stream_encoder_create.cc index ac2f6b9819..3a2ebe79e1 100644 --- a/api/video/video_stream_encoder_create.cc +++ b/api/video/video_stream_encoder_create.cc @@ -12,7 +12,7 @@ #include -#include "video/overuse_frame_detector.h" +#include "video/adaptation/overuse_frame_detector.h" #include "video/video_stream_encoder.h" namespace webrtc { diff --git a/api/video/video_stream_encoder_interface.h b/api/video/video_stream_encoder_interface.h index d2a90bb00f..34fa6421c4 100644 --- a/api/video/video_stream_encoder_interface.h +++ b/api/video/video_stream_encoder_interface.h @@ -13,10 +13,13 @@ #include +#include "api/adaptation/resource.h" #include "api/fec_controller_override.h" #include "api/rtp_parameters.h" // For DegradationPreference. +#include "api/scoped_refptr.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" +#include "api/video/video_layers_allocation.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video_codecs/video_encoder.h" @@ -44,10 +47,26 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface { public: virtual void OnEncoderConfigurationChanged( std::vector streams, + bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) = 0; + + virtual void OnBitrateAllocationUpdated( + const VideoBitrateAllocation& allocation) = 0; + + virtual void OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) = 0; }; + // If the resource is overusing, the VideoStreamEncoder will try to reduce + // resolution or frame rate until no resource is overusing. + // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor + // is moved to Call this method could be deleted altogether in favor of + // Call-level APIs only. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + virtual std::vector> + GetAdaptationResources() = 0; + // Sets the source that will provide video frames to the VideoStreamEncoder's // OnFrame method. |degradation_preference| control whether or not resolution // or frame rate may be reduced. The VideoStreamEncoder registers itself with @@ -95,12 +114,8 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface { DataRate stable_target_bitrate, DataRate link_allocation, uint8_t fraction_lost, - int64_t round_trip_time_ms) = 0; - - // Register observer for the bitrate allocation between the temporal - // and spatial layers. - virtual void SetBitrateAllocationObserver( - VideoBitrateAllocationObserver* bitrate_observer) = 0; + int64_t round_trip_time_ms, + double cwnd_reduce_ratio) = 0; // Set a FecControllerOverride, through which the encoder may override // decisions made by FecController. diff --git a/api/video/video_stream_encoder_observer.h b/api/video/video_stream_encoder_observer.h index 49531ae539..e027755377 100644 --- a/api/video/video_stream_encoder_observer.h +++ b/api/video/video_stream_encoder_observer.h @@ -15,6 +15,8 @@ #include #include "absl/types/optional.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_constants.h" #include "api/video_codecs/video_encoder.h" @@ -38,20 +40,17 @@ class CpuOveruseMetricsObserver { class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { public: - // Number of resolution and framerate reductions (unset if disabled). - struct AdaptationSteps { - AdaptationSteps(); - absl::optional num_resolution_reductions = 0; - absl::optional num_framerate_reductions = 0; - }; + struct AdaptationSettings { + AdaptationSettings() + : resolution_scaling_enabled(false), framerate_scaling_enabled(false) {} + + AdaptationSettings(bool resolution_scaling_enabled, + bool framerate_scaling_enabled) + : resolution_scaling_enabled(resolution_scaling_enabled), + framerate_scaling_enabled(framerate_scaling_enabled) {} - // TODO(nisse): There are too many enums to represent this. Besides - // this one, see AdaptationObserverInterface::AdaptReason and - // WebRtcVideoChannel::AdaptReason. - enum class AdaptationReason { - kNone, // Used for reset of counters. - kCpu, - kQuality, + bool resolution_scaling_enabled; + bool framerate_scaling_enabled; }; // TODO(nisse): Duplicates enum EncodedImageCallback::DropReason. @@ -59,7 +58,8 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { kSource, kEncoderQueue, kEncoder, - kMediaOptimization + kMediaOptimization, + kCongestionWindow }; ~VideoStreamEncoderObserver() override = default; @@ -82,9 +82,15 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { const VideoEncoderConfig& encoder_config, const std::vector& streams) = 0; - virtual void OnAdaptationChanged(AdaptationReason reason, - const AdaptationSteps& cpu_steps, - const AdaptationSteps& quality_steps) = 0; + virtual void OnAdaptationChanged( + VideoAdaptationReason reason, + const VideoAdaptationCounters& cpu_steps, + const VideoAdaptationCounters& quality_steps) = 0; + virtual void ClearAdaptationStats() = 0; + + virtual void UpdateAdaptationSettings( + AdaptationSettings cpu_settings, + AdaptationSettings quality_settings) = 0; virtual void OnMinPixelLimitReached() = 0; virtual void OnInitialQualityResolutionAdaptDown() = 0; @@ -94,6 +100,11 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { const VideoCodec& codec, const VideoBitrateAllocation& allocation) {} + // Informes observer if an internal encoder scaler has reduced video + // resolution or not. |is_scaled| is a flag indicating if the video is scaled + // down. + virtual void OnEncoderInternalScalerUpdate(bool is_scaled) {} + // TODO(nisse): VideoStreamEncoder wants to query the stats, which makes this // not a pure observer. GetInputFrameRate is needed for the cpu adaptation, so // can be deleted if that responsibility is moved out to a VideoStreamAdaptor diff --git a/api/video/video_stream_encoder_settings.h b/api/video/video_stream_encoder_settings.h index 4997327971..cbeed3d07a 100644 --- a/api/video/video_stream_encoder_settings.h +++ b/api/video/video_stream_encoder_settings.h @@ -34,9 +34,17 @@ class EncoderSwitchRequestCallback { // Requests that a switch to a specific encoder is performed. virtual void RequestEncoderSwitch(const Config& conf) = 0; + + virtual void RequestEncoderSwitch(const SdpVideoFormat& format) = 0; }; struct VideoStreamEncoderSettings { + enum class BitrateAllocationCallbackType { + kVideoBitrateAllocation, + kVideoBitrateAllocationWhenScreenSharing, + kVideoLayersAllocation + }; + explicit VideoStreamEncoderSettings( const VideoEncoder::Capabilities& capabilities) : capabilities(capabilities) {} @@ -57,6 +65,11 @@ struct VideoStreamEncoderSettings { // Negotiated capabilities which the VideoEncoder may expect the other // side to use. VideoEncoder::Capabilities capabilities; + + // TODO(bugs.webrtc.org/12000): Reporting of VideoBitrateAllocation is beeing + // deprecated. Instead VideoLayersAllocation should be reported. + BitrateAllocationCallbackType allocation_cb_type = + BitrateAllocationCallbackType::kVideoBitrateAllocationWhenScreenSharing; }; } // namespace webrtc diff --git a/api/video/video_timing.h b/api/video/video_timing.h index 4cc75dd0b0..fbd92254a0 100644 --- a/api/video/video_timing.h +++ b/api/video/video_timing.h @@ -100,6 +100,30 @@ struct TimingFrameInfo { uint8_t flags; // Flags indicating validity and/or why tracing was triggered. }; +// Minimum and maximum playout delay values from capture to render. +// These are best effort values. +// +// A value < 0 indicates no change from previous valid value. +// +// min = max = 0 indicates that the receiver should try and render +// frame as soon as possible. +// +// min = x, max = y indicates that the receiver is free to adapt +// in the range (x, y) based on network jitter. +struct VideoPlayoutDelay { + VideoPlayoutDelay() = default; + VideoPlayoutDelay(int min_ms, int max_ms) : min_ms(min_ms), max_ms(max_ms) {} + int min_ms = -1; + int max_ms = -1; + + bool operator==(const VideoPlayoutDelay& rhs) const { + return min_ms == rhs.min_ms && max_ms == rhs.max_ms; + } +}; + +// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated. +using PlayoutDelay = VideoPlayoutDelay; + } // namespace webrtc #endif // API_VIDEO_VIDEO_TIMING_H_ diff --git a/api/video_codecs/BUILD.gn b/api/video_codecs/BUILD.gn index 5a16e6bc13..a99027641e 100644 --- a/api/video_codecs/BUILD.gn +++ b/api/video_codecs/BUILD.gn @@ -17,6 +17,8 @@ rtc_library("video_codecs_api") { sources = [ "sdp_video_format.cc", "sdp_video_format.h", + "spatial_layer.cc", + "spatial_layer.h", "video_codec.cc", "video_codec.h", "video_decoder.cc", @@ -38,7 +40,6 @@ rtc_library("video_codecs_api") { deps = [ "..:fec_controller_api", "..:scoped_refptr", - "../..:webrtc_common", "../../modules/video_coding:codec_globals_headers", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", @@ -49,6 +50,8 @@ rtc_library("video_codecs_api") { "../video:video_codec_constants", "../video:video_frame", "../video:video_rtp_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", @@ -100,8 +103,8 @@ rtc_library("builtin_video_encoder_factory") { "../../media:rtc_media_base", "../../rtc_base:checks", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("vp8_temporal_layers_factory") { @@ -134,18 +137,24 @@ rtc_library("rtc_software_fallback_wrappers") { deps = [ ":video_codecs_api", "..:fec_controller_api", + "../../api/video:video_frame", "../../media:rtc_h264_profile_id", "../../media:rtc_media_base", "../../modules/video_coding:video_codec_interface", + "../../modules/video_coding:video_coding_utility", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:rtc_export", "../../system_wrappers:field_trial", + "../../system_wrappers:metrics", "../video:encoded_image", "../video:video_bitrate_allocation", "../video:video_frame", "../video:video_rtp_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/video_codecs/builtin_video_encoder_factory.cc b/api/video_codecs/builtin_video_encoder_factory.cc index 6888daae48..2f722a4a5c 100644 --- a/api/video_codecs/builtin_video_encoder_factory.cc +++ b/api/video_codecs/builtin_video_encoder_factory.cc @@ -50,8 +50,6 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory { RTC_DCHECK(IsFormatSupported( internal_encoder_factory_->GetSupportedFormats(), format)); VideoEncoderFactory::CodecInfo info; - info.has_internal_source = false; - info.is_hardware_accelerated = false; return info; } diff --git a/api/video_codecs/spatial_layer.cc b/api/video_codecs/spatial_layer.cc new file mode 100644 index 0000000000..25ccdfeb48 --- /dev/null +++ b/api/video_codecs/spatial_layer.cc @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/spatial_layer.h" + +namespace webrtc { + +bool SpatialLayer::operator==(const SpatialLayer& other) const { + return (width == other.width && height == other.height && + maxFramerate == other.maxFramerate && + numberOfTemporalLayers == other.numberOfTemporalLayers && + maxBitrate == other.maxBitrate && + targetBitrate == other.targetBitrate && + minBitrate == other.minBitrate && qpMax == other.qpMax && + active == other.active); +} + +} // namespace webrtc diff --git a/api/video_codecs/spatial_layer.h b/api/video_codecs/spatial_layer.h new file mode 100644 index 0000000000..5a1b425427 --- /dev/null +++ b/api/video_codecs/spatial_layer.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_SPATIAL_LAYER_H_ +#define API_VIDEO_CODECS_SPATIAL_LAYER_H_ + +namespace webrtc { + +struct SpatialLayer { + bool operator==(const SpatialLayer& other) const; + bool operator!=(const SpatialLayer& other) const { return !(*this == other); } + + unsigned short width; // NOLINT(runtime/int) + unsigned short height; // NOLINT(runtime/int) + float maxFramerate; // fps. + unsigned char numberOfTemporalLayers; + unsigned int maxBitrate; // kilobits/sec. + unsigned int targetBitrate; // kilobits/sec. + unsigned int minBitrate; // kilobits/sec. + unsigned int qpMax; // minimum quality + bool active; // encoded and sent. +}; + +} // namespace webrtc +#endif // API_VIDEO_CODECS_SPATIAL_LAYER_H_ diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn index e3172643a7..c47e5b919e 100644 --- a/api/video_codecs/test/BUILD.gn +++ b/api/video_codecs/test/BUILD.gn @@ -32,12 +32,13 @@ if (rtc_include_tests) { "../../../rtc_base:rtc_base_tests_utils", "../../../test:field_trial", "../../../test:test_support", + "../../../test:video_test_common", "../../video:encoded_image", "../../video:video_bitrate_allocation", "../../video:video_frame", - "../../video:video_frame_i420", "../../video:video_rtp_headers", "//testing/gtest", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc index ee61893563..30d5287c94 100644 --- a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc @@ -218,6 +218,68 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest, fallback_wrapper_->Release(); } +TEST_F(VideoDecoderSoftwareFallbackWrapperTest, FallbacksOnTooManyErrors) { + VideoCodec codec = {}; + fallback_wrapper_->InitDecode(&codec, 2); + + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + // Doesn't fallback from a single error. + fallback_wrapper_->Decode(encoded_image, false, -1); + EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName()); + + // However, many frames with the same error, fallback should happen. + const int kNumFramesToEncode = 10; + for (int i = 0; i < kNumFramesToEncode; ++i) { + fallback_wrapper_->Decode(encoded_image, false, -1); + } + // Hard coded expected value since libvpx is the software implementation name + // for VP8. Change accordingly if the underlying implementation does. + EXPECT_STREQ("libvpx (fallback from: fake-decoder)", + fallback_wrapper_->ImplementationName()); + fallback_wrapper_->Release(); +} + +TEST_F(VideoDecoderSoftwareFallbackWrapperTest, + DoesNotFallbackOnDeltaFramesErrors) { + VideoCodec codec = {}; + fallback_wrapper_->InitDecode(&codec, 2); + + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + + // Many decoded frames with the same error + const int kNumFramesToEncode = 10; + for (int i = 0; i < kNumFramesToEncode; ++i) { + fallback_wrapper_->Decode(encoded_image, false, -1); + } + EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName()); + + fallback_wrapper_->Release(); +} + +TEST_F(VideoDecoderSoftwareFallbackWrapperTest, + DoesNotFallbacksOnNonConsequtiveErrors) { + VideoCodec codec = {}; + fallback_wrapper_->InitDecode(&codec, 2); + + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + + const int kNumFramesToEncode = 10; + for (int i = 0; i < kNumFramesToEncode; ++i) { + // Interleaved errors and successful decodes. + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; + fallback_wrapper_->Decode(encoded_image, false, -1); + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_OK; + fallback_wrapper_->Decode(encoded_image, false, -1); + } + EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName()); + fallback_wrapper_->Release(); +} + class ForcedSoftwareDecoderFallbackTest : public VideoDecoderSoftwareFallbackWrapperTest { public: diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc index 574bc6fd5a..db20e4c47a 100644 --- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc @@ -29,17 +29,18 @@ #include "api/video/video_rotation.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/fake_clock.h" +#include "test/fake_texture_frame.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { +using ::testing::_; using ::testing::Return; namespace { @@ -76,19 +77,30 @@ VideoEncoder::EncoderInfo GetEncoderInfoWithInternalSource( info.has_internal_source = internal_source; return info; } + +class FakeEncodedImageCallback : public EncodedImageCallback { + public: + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + ++callback_count_; + return Result(Result::OK, callback_count_); + } + int callback_count_ = 0; +}; } // namespace -class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test { +class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test { protected: - VideoEncoderSoftwareFallbackWrapperTest() - : VideoEncoderSoftwareFallbackWrapperTest("") {} - explicit VideoEncoderSoftwareFallbackWrapperTest( - const std::string& field_trials) + VideoEncoderSoftwareFallbackWrapperTestBase( + const std::string& field_trials, + std::unique_ptr sw_encoder) : override_field_trials_(field_trials), fake_encoder_(new CountingFakeEncoder()), + wrapper_initialized_(false), fallback_wrapper_(CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(VP8Encoder::Create()), - std::unique_ptr(fake_encoder_))) {} + std::move(sw_encoder), + std::unique_ptr(fake_encoder_), + false)) {} class CountingFakeEncoder : public VideoEncoder { public: @@ -106,10 +118,10 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test { int32_t Encode(const VideoFrame& frame, const std::vector* frame_types) override { ++encode_count_; + last_video_frame_ = frame; if (encode_complete_callback_ && encode_return_code_ == WEBRTC_VIDEO_CODEC_OK) { - encode_complete_callback_->OnEncodedImage(EncodedImage(), nullptr, - nullptr); + encode_complete_callback_->OnEncodedImage(EncodedImage(), nullptr); } return encode_return_code_; } @@ -125,16 +137,14 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test { return WEBRTC_VIDEO_CODEC_OK; } - void SetRates(const RateControlParameters& parameters) override { - ++set_rates_count_; - } + void SetRates(const RateControlParameters& parameters) override {} EncoderInfo GetEncoderInfo() const override { ++supports_native_handle_count_; EncoderInfo info; info.scaling_settings = ScalingSettings(kLowThreshold, kHighThreshold); info.supports_native_handle = supports_native_handle_; - info.implementation_name = "fake-encoder"; + info.implementation_name = implementation_name_; return info; } @@ -144,23 +154,13 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test { int encode_count_ = 0; EncodedImageCallback* encode_complete_callback_ = nullptr; int release_count_ = 0; - int set_rates_count_ = 0; mutable int supports_native_handle_count_ = 0; bool supports_native_handle_ = false; + std::string implementation_name_ = "fake-encoder"; + absl::optional last_video_frame_; }; - class FakeEncodedImageCallback : public EncodedImageCallback { - public: - Result OnEncodedImage( - const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { - ++callback_count_; - return Result(Result::OK, callback_count_); - } - int callback_count_ = 0; - }; - + void InitEncode(); void UtilizeFallbackEncoder(); void FallbackFromEncodeRequest(); void EncodeFrame(); @@ -174,17 +174,37 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test { FakeEncodedImageCallback callback_; // |fake_encoder_| is owned and released by |fallback_wrapper_|. CountingFakeEncoder* fake_encoder_; + CountingFakeEncoder* fake_sw_encoder_; + bool wrapper_initialized_; std::unique_ptr fallback_wrapper_; VideoCodec codec_ = {}; std::unique_ptr frame_; std::unique_ptr rate_allocator_; }; -void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() { +class VideoEncoderSoftwareFallbackWrapperTest + : public VideoEncoderSoftwareFallbackWrapperTestBase { + protected: + VideoEncoderSoftwareFallbackWrapperTest() + : VideoEncoderSoftwareFallbackWrapperTest(new CountingFakeEncoder()) {} + explicit VideoEncoderSoftwareFallbackWrapperTest( + CountingFakeEncoder* fake_sw_encoder) + : VideoEncoderSoftwareFallbackWrapperTestBase( + "", + std::unique_ptr(fake_sw_encoder)), + fake_sw_encoder_(fake_sw_encoder) { + fake_sw_encoder_->implementation_name_ = "fake_sw_encoder"; + } + + CountingFakeEncoder* fake_sw_encoder_; +}; + +void VideoEncoderSoftwareFallbackWrapperTestBase::EncodeFrame() { EncodeFrame(WEBRTC_VIDEO_CODEC_OK); } -void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame(int expected_ret) { +void VideoEncoderSoftwareFallbackWrapperTestBase::EncodeFrame( + int expected_ret) { rtc::scoped_refptr buffer = I420Buffer::Create(codec_.width, codec_.height); I420Buffer::SetBlack(buffer); @@ -199,9 +219,42 @@ void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame(int expected_ret) { EXPECT_EQ(expected_ret, fallback_wrapper_->Encode(*frame_, &types)); } -void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() { - fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_); - EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_); +void VideoEncoderSoftwareFallbackWrapperTestBase::InitEncode() { + if (!wrapper_initialized_) { + fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_); + EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_); + } + + // Register fake encoder as main. + codec_.codecType = kVideoCodecVP8; + codec_.maxFramerate = kFramerate; + codec_.width = kWidth; + codec_.height = kHeight; + codec_.VP8()->numberOfTemporalLayers = 1; + rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + + if (wrapper_initialized_) { + fallback_wrapper_->Release(); + } + + fake_encoder_->init_encode_return_code_ = WEBRTC_VIDEO_CODEC_OK; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + fallback_wrapper_->InitEncode(&codec_, kSettings)); + + if (!wrapper_initialized_) { + fallback_wrapper_->SetRates(VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(300000, kFramerate)), + kFramerate)); + } + wrapper_initialized_ = true; +} + +void VideoEncoderSoftwareFallbackWrapperTestBase::UtilizeFallbackEncoder() { + if (!wrapper_initialized_) { + fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_); + EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_); + } // Register with failing fake encoder. Should succeed with VP8 fallback. codec_.codecType = kVideoCodecVP8; @@ -211,6 +264,10 @@ void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() { codec_.VP8()->numberOfTemporalLayers = 1; rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + if (wrapper_initialized_) { + fallback_wrapper_->Release(); + } + fake_encoder_->init_encode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->InitEncode(&codec_, kSettings)); @@ -226,7 +283,7 @@ void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() { EXPECT_EQ(callback_count + 1, callback_.callback_count_); } -void VideoEncoderSoftwareFallbackWrapperTest::FallbackFromEncodeRequest() { +void VideoEncoderSoftwareFallbackWrapperTestBase::FallbackFromEncodeRequest() { fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_); codec_.codecType = kVideoCodecVP8; codec_.maxFramerate = kFramerate; @@ -234,6 +291,9 @@ void VideoEncoderSoftwareFallbackWrapperTest::FallbackFromEncodeRequest() { codec_.height = kHeight; codec_.VP8()->numberOfTemporalLayers = 1; rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + if (wrapper_initialized_) { + fallback_wrapper_->Release(); + } fallback_wrapper_->InitEncode(&codec_, kSettings); fallback_wrapper_->SetRates(VideoEncoder::RateControlParameters( rate_allocator_->Allocate( @@ -272,11 +332,24 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, CanUtilizeFallbackEncoder) { TEST_F(VideoEncoderSoftwareFallbackWrapperTest, InternalEncoderReleasedDuringFallback) { + EXPECT_EQ(0, fake_encoder_->init_encode_count_); + EXPECT_EQ(0, fake_encoder_->release_count_); + + InitEncode(); + + EXPECT_EQ(1, fake_encoder_->init_encode_count_); EXPECT_EQ(0, fake_encoder_->release_count_); + UtilizeFallbackEncoder(); + + // One successful InitEncode(), one failed. + EXPECT_EQ(2, fake_encoder_->init_encode_count_); EXPECT_EQ(1, fake_encoder_->release_count_); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release()); + // No extra release when the fallback is released. + EXPECT_EQ(2, fake_encoder_->init_encode_count_); EXPECT_EQ(1, fake_encoder_->release_count_); } @@ -292,29 +365,30 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, TEST_F(VideoEncoderSoftwareFallbackWrapperTest, CanRegisterCallbackWhileUsingFallbackEncoder) { + InitEncode(); + EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_); + UtilizeFallbackEncoder(); - // Registering an encode-complete callback should still work when fallback - // encoder is being used. + + // Registering an encode-complete callback will now pass to the fallback + // instead of the main encoder. FakeEncodedImageCallback callback2; fallback_wrapper_->RegisterEncodeCompleteCallback(&callback2); - EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_); + EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_); // Encoding a frame using the fallback should arrive at the new callback. std::vector types(1, VideoFrameType::kVideoFrameKey); frame_->set_timestamp(frame_->timestamp() + 1000); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types)); + EXPECT_EQ(callback2.callback_count_, 1); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release()); -} + // Re-initialize to use the main encoder, the new callback should be in use. + InitEncode(); + EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_); -TEST_F(VideoEncoderSoftwareFallbackWrapperTest, - SetRatesForwardedDuringFallback) { - UtilizeFallbackEncoder(); - EXPECT_EQ(1, fake_encoder_->set_rates_count_); - fallback_wrapper_->SetRates( - VideoEncoder::RateControlParameters(VideoBitrateAllocation(), 1)); - EXPECT_EQ(2, fake_encoder_->set_rates_count_); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release()); + frame_->set_timestamp(frame_->timestamp() + 2000); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types)); + EXPECT_EQ(callback2.callback_count_, 2); } TEST_F(VideoEncoderSoftwareFallbackWrapperTest, @@ -347,9 +421,52 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, ReportsImplementationName) { TEST_F(VideoEncoderSoftwareFallbackWrapperTest, ReportsFallbackImplementationName) { UtilizeFallbackEncoder(); - // Hard coded expected value since libvpx is the software implementation name - // for VP8. Change accordingly if the underlying implementation does. - CheckLastEncoderName("libvpx"); + CheckLastEncoderName(fake_sw_encoder_->implementation_name_.c_str()); +} + +TEST_F(VideoEncoderSoftwareFallbackWrapperTest, + OnEncodeFallbackNativeFrameScaledIfFallbackDoesNotSupportNativeFrames) { + fake_encoder_->supports_native_handle_ = true; + fake_sw_encoder_->supports_native_handle_ = false; + InitEncode(); + int width = codec_.width * 2; + int height = codec_.height * 2; + VideoFrame native_frame = test::FakeNativeBuffer::CreateFrame( + width, height, 0, 0, VideoRotation::kVideoRotation_0); + std::vector types(1, VideoFrameType::kVideoFrameKey); + fake_encoder_->encode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + fallback_wrapper_->Encode(native_frame, &types)); + EXPECT_EQ(1, fake_sw_encoder_->encode_count_); + ASSERT_TRUE(fake_sw_encoder_->last_video_frame_.has_value()); + EXPECT_NE(VideoFrameBuffer::Type::kNative, + fake_sw_encoder_->last_video_frame_->video_frame_buffer()->type()); + EXPECT_EQ(codec_.width, fake_sw_encoder_->last_video_frame_->width()); + EXPECT_EQ(codec_.height, fake_sw_encoder_->last_video_frame_->height()); +} + +TEST_F(VideoEncoderSoftwareFallbackWrapperTest, + OnEncodeFallbackNativeFrameForwardedToFallbackIfItSupportsNativeFrames) { + fake_encoder_->supports_native_handle_ = true; + fake_sw_encoder_->supports_native_handle_ = true; + InitEncode(); + int width = codec_.width * 2; + int height = codec_.height * 2; + VideoFrame native_frame = test::FakeNativeBuffer::CreateFrame( + width, height, 0, 0, VideoRotation::kVideoRotation_0); + std::vector types(1, VideoFrameType::kVideoFrameKey); + fake_encoder_->encode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + fallback_wrapper_->Encode(native_frame, &types)); + EXPECT_EQ(1, fake_sw_encoder_->encode_count_); + ASSERT_TRUE(fake_sw_encoder_->last_video_frame_.has_value()); + EXPECT_EQ(VideoFrameBuffer::Type::kNative, + fake_sw_encoder_->last_video_frame_->video_frame_buffer()->type()); + EXPECT_EQ(native_frame.width(), fake_sw_encoder_->last_video_frame_->width()); + EXPECT_EQ(native_frame.height(), + fake_sw_encoder_->last_video_frame_->height()); } namespace { @@ -358,25 +475,27 @@ const int kMinPixelsPerFrame = 1; const char kFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2"; } // namespace -class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTest { +class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTestBase { public: explicit ForcedFallbackTest(const std::string& field_trials) - : VideoEncoderSoftwareFallbackWrapperTest(field_trials) {} + : VideoEncoderSoftwareFallbackWrapperTestBase(field_trials, + VP8Encoder::Create()) {} ~ForcedFallbackTest() override {} protected: void SetUp() override { - clock_.SetTime(Timestamp::us(1234)); + clock_.SetTime(Timestamp::Micros(1234)); ConfigureVp8Codec(); } void TearDown() override { - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release()); + if (wrapper_initialized_) { + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release()); + } } void ConfigureVp8Codec() { - fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_); codec_.codecType = kVideoCodecVP8; codec_.maxFramerate = kFramerate; codec_.width = kWidth; @@ -390,8 +509,13 @@ class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTest { void InitEncode(int width, int height) { codec_.width = width; codec_.height = height; + if (wrapper_initialized_) { + fallback_wrapper_->Release(); + } EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->InitEncode(&codec_, kSettings)); + fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_); + wrapper_initialized_ = true; SetRateAllocation(kBitrateKbps); } @@ -494,11 +618,11 @@ TEST_F(ForcedFallbackTestEnabled, FallbackIsEndedForNonValidSettings) { EXPECT_EQ(1, fake_encoder_->init_encode_count_); EncodeFrameAndVerifyLastName("fake-encoder"); - // Re-initialize encoder with valid setting but fallback disabled from now on. + // Re-initialize encoder with valid setting. codec_.VP8()->numberOfTemporalLayers = 1; InitEncode(kWidth, kHeight); - EXPECT_EQ(2, fake_encoder_->init_encode_count_); - EncodeFrameAndVerifyLastName("fake-encoder"); + EXPECT_EQ(1, fake_encoder_->init_encode_count_); + EncodeFrameAndVerifyLastName("libvpx"); } TEST_F(ForcedFallbackTestEnabled, MultipleStartEndFallback) { @@ -607,6 +731,8 @@ TEST(SoftwareFallbackEncoderTest, HwRateControllerTrusted) { EXPECT_TRUE(wrapper->GetEncoderInfo().has_trusted_rate_controller); VideoCodec codec_ = {}; + codec_.width = 100; + codec_.height = 100; wrapper->InitEncode(&codec_, kSettings); // Trigger fallback to software. @@ -650,6 +776,8 @@ TEST(SoftwareFallbackEncoderTest, ReportsHardwareAccelerated) { EXPECT_TRUE(wrapper->GetEncoderInfo().is_hardware_accelerated); VideoCodec codec_ = {}; + codec_.width = 100; + codec_.height = 100; wrapper->InitEncode(&codec_, kSettings); // Trigger fallback to software. @@ -677,6 +805,8 @@ TEST(SoftwareFallbackEncoderTest, ReportsInternalSource) { EXPECT_TRUE(wrapper->GetEncoderInfo().has_internal_source); VideoCodec codec_ = {}; + codec_.width = 100; + codec_.height = 100; wrapper->InitEncode(&codec_, kSettings); // Trigger fallback to software. @@ -689,4 +819,247 @@ TEST(SoftwareFallbackEncoderTest, ReportsInternalSource) { EXPECT_FALSE(wrapper->GetEncoderInfo().has_internal_source); } +class PreferTemporalLayersFallbackTest : public ::testing::Test { + public: + PreferTemporalLayersFallbackTest() {} + void SetUp() override { + sw_ = new ::testing::NiceMock(); + sw_info_.implementation_name = "sw"; + EXPECT_CALL(*sw_, GetEncoderInfo).WillRepeatedly([&]() { + return sw_info_; + }); + EXPECT_CALL(*sw_, InitEncode(_, _, _)) + .WillRepeatedly(Return(WEBRTC_VIDEO_CODEC_OK)); + + hw_ = new ::testing::NiceMock(); + hw_info_.implementation_name = "hw"; + EXPECT_CALL(*hw_, GetEncoderInfo()).WillRepeatedly([&]() { + return hw_info_; + }); + EXPECT_CALL(*hw_, InitEncode(_, _, _)) + .WillRepeatedly(Return(WEBRTC_VIDEO_CODEC_OK)); + + wrapper_ = CreateVideoEncoderSoftwareFallbackWrapper( + std::unique_ptr(sw_), std::unique_ptr(hw_), + /*prefer_temporal_support=*/true); + + codec_settings.codecType = kVideoCodecVP8; + codec_settings.maxFramerate = kFramerate; + codec_settings.width = kWidth; + codec_settings.height = kHeight; + codec_settings.numberOfSimulcastStreams = 1; + codec_settings.VP8()->numberOfTemporalLayers = 1; + } + + protected: + void SetSupportsLayers(VideoEncoder::EncoderInfo* info, bool tl_enabled) { + info->fps_allocation[0].clear(); + int num_layers = 1; + if (tl_enabled) { + num_layers = codec_settings.VP8()->numberOfTemporalLayers; + } + for (int i = 0; i < num_layers; ++i) { + info->fps_allocation[0].push_back( + VideoEncoder::EncoderInfo::kMaxFramerateFraction >> + (num_layers - i - 1)); + } + } + + VideoCodec codec_settings; + ::testing::NiceMock* sw_; + ::testing::NiceMock* hw_; + VideoEncoder::EncoderInfo sw_info_; + VideoEncoder::EncoderInfo hw_info_; + std::unique_ptr wrapper_; +}; + +TEST_F(PreferTemporalLayersFallbackTest, UsesMainWhenLayersNotUsed) { + codec_settings.VP8()->numberOfTemporalLayers = 1; + SetSupportsLayers(&hw_info_, true); + SetSupportsLayers(&sw_info_, true); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw"); +} + +TEST_F(PreferTemporalLayersFallbackTest, UsesMainWhenLayersSupported) { + codec_settings.VP8()->numberOfTemporalLayers = 2; + SetSupportsLayers(&hw_info_, true); + SetSupportsLayers(&sw_info_, true); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw"); +} + +TEST_F(PreferTemporalLayersFallbackTest, + UsesFallbackWhenLayersNotSupportedOnMain) { + codec_settings.VP8()->numberOfTemporalLayers = 2; + SetSupportsLayers(&hw_info_, false); + SetSupportsLayers(&sw_info_, true); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "sw"); +} + +TEST_F(PreferTemporalLayersFallbackTest, UsesMainWhenNeitherSupportsTemporal) { + codec_settings.VP8()->numberOfTemporalLayers = 2; + SetSupportsLayers(&hw_info_, false); + SetSupportsLayers(&sw_info_, false); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw"); +} + +TEST_F(PreferTemporalLayersFallbackTest, PrimesEncoderOnSwitch) { + codec_settings.VP8()->numberOfTemporalLayers = 2; + // Both support temporal layers, will use main one. + SetSupportsLayers(&hw_info_, true); + SetSupportsLayers(&sw_info_, true); + + // On first InitEncode most params have no state and will not be + // called to update. + EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback).Times(0); + EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback).Times(0); + + EXPECT_CALL(*hw_, SetFecControllerOverride).Times(0); + EXPECT_CALL(*sw_, SetFecControllerOverride).Times(0); + + EXPECT_CALL(*hw_, SetRates).Times(0); + EXPECT_CALL(*hw_, SetRates).Times(0); + + EXPECT_CALL(*hw_, OnPacketLossRateUpdate).Times(0); + EXPECT_CALL(*sw_, OnPacketLossRateUpdate).Times(0); + + EXPECT_CALL(*hw_, OnRttUpdate).Times(0); + EXPECT_CALL(*sw_, OnRttUpdate).Times(0); + + EXPECT_CALL(*hw_, OnLossNotification).Times(0); + EXPECT_CALL(*sw_, OnLossNotification).Times(0); + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw"); + + FakeEncodedImageCallback callback1; + class DummyFecControllerOverride : public FecControllerOverride { + public: + void SetFecAllowed(bool fec_allowed) override {} + }; + DummyFecControllerOverride fec_controller_override1; + VideoEncoder::RateControlParameters rate_params1; + float packet_loss1 = 0.1; + int64_t rtt1 = 1; + VideoEncoder::LossNotification lntf1; + + EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback(&callback1)); + EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback).Times(0); + wrapper_->RegisterEncodeCompleteCallback(&callback1); + + EXPECT_CALL(*hw_, SetFecControllerOverride(&fec_controller_override1)); + EXPECT_CALL(*sw_, SetFecControllerOverride).Times(0); + wrapper_->SetFecControllerOverride(&fec_controller_override1); + + EXPECT_CALL(*hw_, SetRates(rate_params1)); + EXPECT_CALL(*sw_, SetRates).Times(0); + wrapper_->SetRates(rate_params1); + + EXPECT_CALL(*hw_, OnPacketLossRateUpdate(packet_loss1)); + EXPECT_CALL(*sw_, OnPacketLossRateUpdate).Times(0); + wrapper_->OnPacketLossRateUpdate(packet_loss1); + + EXPECT_CALL(*hw_, OnRttUpdate(rtt1)); + EXPECT_CALL(*sw_, OnRttUpdate).Times(0); + wrapper_->OnRttUpdate(rtt1); + + EXPECT_CALL(*hw_, OnLossNotification).Times(1); + EXPECT_CALL(*sw_, OnLossNotification).Times(0); + wrapper_->OnLossNotification(lntf1); + + // Release and re-init, with fallback to software. This should trigger + // the software encoder to be primed with the current state. + wrapper_->Release(); + EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback(&callback1)); + EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback).Times(0); + + EXPECT_CALL(*sw_, SetFecControllerOverride(&fec_controller_override1)); + EXPECT_CALL(*hw_, SetFecControllerOverride).Times(0); + + // Rate control parameters are cleared on InitEncode. + EXPECT_CALL(*sw_, SetRates).Times(0); + EXPECT_CALL(*hw_, SetRates).Times(0); + + EXPECT_CALL(*sw_, OnPacketLossRateUpdate(packet_loss1)); + EXPECT_CALL(*hw_, OnPacketLossRateUpdate).Times(0); + + EXPECT_CALL(*sw_, OnRttUpdate(rtt1)); + EXPECT_CALL(*hw_, OnRttUpdate).Times(0); + + EXPECT_CALL(*sw_, OnLossNotification).Times(1); + EXPECT_CALL(*hw_, OnLossNotification).Times(0); + + SetSupportsLayers(&hw_info_, false); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "sw"); + + // Update with all-new params for the software encoder. + FakeEncodedImageCallback callback2; + DummyFecControllerOverride fec_controller_override2; + VideoEncoder::RateControlParameters rate_params2; + float packet_loss2 = 0.2; + int64_t rtt2 = 2; + VideoEncoder::LossNotification lntf2; + + EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback(&callback2)); + EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback).Times(0); + wrapper_->RegisterEncodeCompleteCallback(&callback2); + + EXPECT_CALL(*sw_, SetFecControllerOverride(&fec_controller_override2)); + EXPECT_CALL(*hw_, SetFecControllerOverride).Times(0); + wrapper_->SetFecControllerOverride(&fec_controller_override2); + + EXPECT_CALL(*sw_, SetRates(rate_params2)); + EXPECT_CALL(*hw_, SetRates).Times(0); + wrapper_->SetRates(rate_params2); + + EXPECT_CALL(*sw_, OnPacketLossRateUpdate(packet_loss2)); + EXPECT_CALL(*hw_, OnPacketLossRateUpdate).Times(0); + wrapper_->OnPacketLossRateUpdate(packet_loss2); + + EXPECT_CALL(*sw_, OnRttUpdate(rtt2)); + EXPECT_CALL(*hw_, OnRttUpdate).Times(0); + wrapper_->OnRttUpdate(rtt2); + + EXPECT_CALL(*sw_, OnLossNotification).Times(1); + EXPECT_CALL(*hw_, OnLossNotification).Times(0); + wrapper_->OnLossNotification(lntf2); + + // Release and re-init, back to main encoder. This should trigger + // the main encoder to be primed with the current state. + wrapper_->Release(); + EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback(&callback2)); + EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback).Times(0); + + EXPECT_CALL(*hw_, SetFecControllerOverride(&fec_controller_override2)); + EXPECT_CALL(*sw_, SetFecControllerOverride).Times(0); + + // Rate control parameters are cleared on InitEncode. + EXPECT_CALL(*sw_, SetRates).Times(0); + EXPECT_CALL(*hw_, SetRates).Times(0); + + EXPECT_CALL(*hw_, OnPacketLossRateUpdate(packet_loss2)); + EXPECT_CALL(*sw_, OnPacketLossRateUpdate).Times(0); + + EXPECT_CALL(*hw_, OnRttUpdate(rtt2)); + EXPECT_CALL(*sw_, OnRttUpdate).Times(0); + + EXPECT_CALL(*hw_, OnLossNotification).Times(1); + EXPECT_CALL(*sw_, OnLossNotification).Times(0); + + SetSupportsLayers(&hw_info_, true); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + wrapper_->InitEncode(&codec_settings, kSettings)); + EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw"); +} + } // namespace webrtc diff --git a/api/video_codecs/video_codec.cc b/api/video_codecs/video_codec.cc index d03082b91e..d7ee165e66 100644 --- a/api/video_codecs/video_codec.cc +++ b/api/video_codecs/video_codec.cc @@ -25,6 +25,9 @@ constexpr char kPayloadNameVp9[] = "VP9"; // frozen. constexpr char kPayloadNameAv1[] = "AV1X"; constexpr char kPayloadNameH264[] = "H264"; +#ifndef DISABLE_H265 +constexpr char kPayloadNameH265[] = "H265"; +#endif constexpr char kPayloadNameGeneric[] = "Generic"; constexpr char kPayloadNameMultiplex[] = "Multiplex"; } // namespace @@ -56,19 +59,19 @@ bool VideoCodecH264::operator==(const VideoCodecH264& other) const { numberOfTemporalLayers == other.numberOfTemporalLayers); } -bool SpatialLayer::operator==(const SpatialLayer& other) const { - return (width == other.width && height == other.height && - maxFramerate == other.maxFramerate && - numberOfTemporalLayers == other.numberOfTemporalLayers && - maxBitrate == other.maxBitrate && - targetBitrate == other.targetBitrate && - minBitrate == other.minBitrate && qpMax == other.qpMax && - active == other.active); +#ifndef DISABLE_H265 +bool VideoCodecH265::operator==(const VideoCodecH265& other) const { + return (frameDroppingOn == other.frameDroppingOn && + keyFrameInterval == other.keyFrameInterval && + vpsLen == other.vpsLen && spsLen == other.spsLen && + ppsLen == other.ppsLen && + (spsLen == 0 || memcmp(spsData, other.spsData, spsLen) == 0) && + (ppsLen == 0 || memcmp(ppsData, other.ppsData, ppsLen) == 0)); } +#endif VideoCodec::VideoCodec() : codecType(kVideoCodecGeneric), - plType(0), width(0), height(0), startBitrate(0), @@ -83,6 +86,7 @@ VideoCodec::VideoCodec() mode(VideoCodecMode::kRealtimeVideo), expect_encode_from_texture(false), timing_frame_thresholds({0, 0}), + legacy_conference_mode(false), codec_specific_() {} VideoCodecVP8* VideoCodec::VP8() { @@ -115,6 +119,18 @@ const VideoCodecH264& VideoCodec::H264() const { return codec_specific_.H264; } +#ifndef DISABLE_H265 +VideoCodecH265* VideoCodec::H265() { + RTC_DCHECK_EQ(codecType, kVideoCodecH265); + return &codec_specific_.H265; +} + +const VideoCodecH265& VideoCodec::H265() const { + RTC_DCHECK_EQ(codecType, kVideoCodecH265); + return codec_specific_.H265; +} +#endif + const char* CodecTypeToPayloadString(VideoCodecType type) { switch (type) { case kVideoCodecVP8: @@ -125,11 +141,17 @@ const char* CodecTypeToPayloadString(VideoCodecType type) { return kPayloadNameAv1; case kVideoCodecH264: return kPayloadNameH264; +#ifndef DISABLE_H265 + case kVideoCodecH265: + return kPayloadNameH265; +#endif case kVideoCodecMultiplex: return kPayloadNameMultiplex; case kVideoCodecGeneric: + default: return kPayloadNameGeneric; } + RTC_CHECK_NOTREACHED(); } VideoCodecType PayloadStringToCodecType(const std::string& name) { @@ -143,6 +165,10 @@ VideoCodecType PayloadStringToCodecType(const std::string& name) { return kVideoCodecH264; if (absl::EqualsIgnoreCase(name, kPayloadNameMultiplex)) return kVideoCodecMultiplex; +#ifndef DISABLE_H265 + if (absl::EqualsIgnoreCase(name, kPayloadNameH265)) + return kVideoCodecH265; +#endif return kVideoCodecGeneric; } diff --git a/api/video_codecs/video_codec.h b/api/video_codecs/video_codec.h index 330bbbce19..14da6006e1 100644 --- a/api/video_codecs/video_codec.h +++ b/api/video_codecs/video_codec.h @@ -16,10 +16,11 @@ #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" -#include "common_types.h" // NOLINT(build/include) +#include "api/video_codecs/spatial_layer.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -84,6 +85,23 @@ struct VideoCodecH264 { uint8_t numberOfTemporalLayers; }; +#ifndef DISABLE_H265 +struct VideoCodecH265 { + bool operator==(const VideoCodecH265& other) const; + bool operator!=(const VideoCodecH265& other) const { + return !(*this == other); + } + bool frameDroppingOn; + int keyFrameInterval; + const uint8_t* vpsData; + size_t vpsLen; + const uint8_t* spsData; + size_t spsLen; + const uint8_t* ppsData; + size_t ppsLen; +}; +#endif + // Translates from name of codec to codec type and vice versa. RTC_EXPORT const char* CodecTypeToPayloadString(VideoCodecType type); RTC_EXPORT VideoCodecType PayloadStringToCodecType(const std::string& name); @@ -92,6 +110,9 @@ union VideoCodecUnion { VideoCodecVP8 VP8; VideoCodecVP9 VP9; VideoCodecH264 H264; +#ifndef DISABLE_H265 + VideoCodecH265 H265; +#endif }; enum class VideoCodecMode { kRealtimeVideo, kScreensharing }; @@ -101,9 +122,16 @@ class RTC_EXPORT VideoCodec { public: VideoCodec(); + // Scalability mode as described in + // https://www.w3.org/TR/webrtc-svc/#scalabilitymodes* + // or value 'NONE' to indicate no scalability. + absl::string_view ScalabilityMode() const { return scalability_mode_; } + void SetScalabilityMode(absl::string_view scalability_mode) { + scalability_mode_ = std::string(scalability_mode); + } + // Public variables. TODO(hta): Make them private with accessors. VideoCodecType codecType; - unsigned char plType; // TODO(nisse): Change to int, for consistency. uint16_t width; @@ -121,7 +149,7 @@ class RTC_EXPORT VideoCodec { unsigned int qpMax; unsigned char numberOfSimulcastStreams; - SimulcastStream simulcastStream[kMaxSimulcastStreams]; + SpatialLayer simulcastStream[kMaxSimulcastStreams]; SpatialLayer spatialLayers[kMaxSpatialLayers]; VideoCodecMode mode; @@ -146,6 +174,9 @@ class RTC_EXPORT VideoCodec { uint16_t outlier_ratio_percent; } timing_frame_thresholds; + // Legacy Google conference mode flag for simulcast screenshare + bool legacy_conference_mode; + bool operator==(const VideoCodec& other) const = delete; bool operator!=(const VideoCodec& other) const = delete; @@ -159,11 +190,16 @@ class RTC_EXPORT VideoCodec { const VideoCodecVP9& VP9() const; VideoCodecH264* H264(); const VideoCodecH264& H264() const; +#ifndef DISABLE_H265 + VideoCodecH265* H265(); + const VideoCodecH265& H265() const; +#endif private: // TODO(hta): Consider replacing the union with a pointer type. // This will allow removing the VideoCodec* types from this file. VideoCodecUnion codec_specific_; + std::string scalability_mode_; }; } // namespace webrtc diff --git a/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/api/video_codecs/video_decoder_software_fallback_wrapper.cc index 3987db6154..20b312cc06 100644 --- a/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -24,11 +24,14 @@ #include "rtc_base/logging.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { namespace { +constexpr size_t kMaxConsequtiveHwErrors = 4; + class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { public: VideoDecoderSoftwareFallbackWrapper( @@ -53,6 +56,8 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { private: bool InitFallbackDecoder(); + void UpdateFallbackDecoderHistograms(); + int32_t InitHwDecoder(); VideoDecoder& active_decoder() const; @@ -70,6 +75,8 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { const std::unique_ptr fallback_decoder_; const std::string fallback_implementation_name_; DecodedImageCallback* callback_; + int32_t hw_decoded_frames_since_last_fallback_; + size_t hw_consequtive_generic_errors_; }; VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper( @@ -81,7 +88,9 @@ VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper( fallback_implementation_name_( std::string(fallback_decoder_->ImplementationName()) + " (fallback from: " + hw_decoder_->ImplementationName() + ")"), - callback_(nullptr) {} + callback_(nullptr), + hw_decoded_frames_since_last_fallback_(0), + hw_consequtive_generic_errors_(0) {} VideoDecoderSoftwareFallbackWrapper::~VideoDecoderSoftwareFallbackWrapper() = default; @@ -134,6 +143,8 @@ bool VideoDecoderSoftwareFallbackWrapper::InitFallbackDecoder() { return false; } + UpdateFallbackDecoderHistograms(); + if (decoder_type_ == DecoderType::kHardware) { hw_decoder_->Release(); } @@ -144,6 +155,45 @@ bool VideoDecoderSoftwareFallbackWrapper::InitFallbackDecoder() { return true; } +void VideoDecoderSoftwareFallbackWrapper::UpdateFallbackDecoderHistograms() { + const std::string kFallbackHistogramsUmaPrefix = + "WebRTC.Video.HardwareDecodedFramesBetweenSoftwareFallbacks."; + // Each histogram needs its own code path for this to work otherwise the + // histogram names will be mixed up by the optimization that takes place. + switch (codec_settings_.codecType) { + case kVideoCodecGeneric: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Generic", + hw_decoded_frames_since_last_fallback_); + break; + case kVideoCodecVP8: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Vp8", + hw_decoded_frames_since_last_fallback_); + break; + case kVideoCodecVP9: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Vp9", + hw_decoded_frames_since_last_fallback_); + break; + case kVideoCodecAV1: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Av1", + hw_decoded_frames_since_last_fallback_); + break; + case kVideoCodecH264: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H264", + hw_decoded_frames_since_last_fallback_); + break; +#ifndef DISABLE_H265 + case kVideoCodecH265: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H265", + hw_decoded_frames_since_last_fallback_); + break; +#endif + case kVideoCodecMultiplex: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Multiplex", + hw_decoded_frames_since_last_fallback_); + break; + } +} + int32_t VideoDecoderSoftwareFallbackWrapper::Decode( const EncodedImage& input_image, bool missing_frames, @@ -156,11 +206,24 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Decode( int32_t ret = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; ret = hw_decoder_->Decode(input_image, missing_frames, render_time_ms); if (ret != WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE) { - return ret; + if (ret != WEBRTC_VIDEO_CODEC_ERROR) { + ++hw_decoded_frames_since_last_fallback_; + hw_consequtive_generic_errors_ = 0; + return ret; + } + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { + // Only count errors on key-frames, since generic errors can happen + // with hw decoder due to many arbitrary reasons. + // However, requesting a key-frame is supposed to fix the issue. + ++hw_consequtive_generic_errors_; + } + if (hw_consequtive_generic_errors_ < kMaxConsequtiveHwErrors) { + return ret; + } } // HW decoder returned WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE or - // initialization failed, fallback to software. + // too many generic errors on key-frames encountered. if (!InitFallbackDecoder()) { return ret; } diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc index 52e2866485..e3d15e8ae0 100644 --- a/api/video_codecs/video_encoder.cc +++ b/api/video_codecs/video_encoder.cc @@ -11,6 +11,7 @@ #include "api/video_codecs/video_encoder.h" #include +#include #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" @@ -38,7 +39,7 @@ VideoCodecVP9 VideoEncoder::GetDefaultVp9Settings() { vp9_settings.numberOfTemporalLayers = 1; vp9_settings.denoisingOn = true; vp9_settings.frameDroppingOn = true; - vp9_settings.keyFrameInterval = 3000; + vp9_settings.keyFrameInterval = 60; vp9_settings.adaptiveQpMode = true; vp9_settings.automaticResizeOn = true; vp9_settings.numberOfSpatialLayers = 1; @@ -59,6 +60,23 @@ VideoCodecH264 VideoEncoder::GetDefaultH264Settings() { return h264_settings; } +#ifndef DISABLE_H265 +VideoCodecH265 VideoEncoder::GetDefaultH265Settings() { + VideoCodecH265 h265_settings; + memset(&h265_settings, 0, sizeof(h265_settings)); + + // h265_settings.profile = kProfileBase; + h265_settings.frameDroppingOn = true; + h265_settings.keyFrameInterval = 3000; + h265_settings.spsData = nullptr; + h265_settings.spsLen = 0; + h265_settings.ppsData = nullptr; + h265_settings.ppsLen = 0; + + return h265_settings; +} +#endif + VideoEncoder::ScalingSettings::ScalingSettings() = default; VideoEncoder::ScalingSettings::ScalingSettings(KOff) : ScalingSettings() {} @@ -93,6 +111,7 @@ bool VideoEncoder::ResolutionBitrateLimits::operator==( VideoEncoder::EncoderInfo::EncoderInfo() : scaling_settings(VideoEncoder::ScalingSettings::kOff), requested_resolution_alignment(1), + apply_alignment_to_all_simulcast_layers(false), supports_native_handle(false), implementation_name("unknown"), has_trusted_rate_controller(false), @@ -101,7 +120,8 @@ VideoEncoder::EncoderInfo::EncoderInfo() fps_allocation{absl::InlinedVector( 1, kMaxFramerateFraction)}, - supports_simulcast(false) {} + supports_simulcast(false), + preferred_pixel_formats{VideoFrameBuffer::Type::kI420} {} VideoEncoder::EncoderInfo::EncoderInfo(const EncoderInfo&) = default; @@ -122,6 +142,8 @@ std::string VideoEncoder::EncoderInfo::ToString() const { oss << "min_pixels_per_frame = " << scaling_settings.min_pixels_per_frame << " }"; oss << ", requested_resolution_alignment = " << requested_resolution_alignment + << ", apply_alignment_to_all_simulcast_layers = " + << apply_alignment_to_all_simulcast_layers << ", supports_native_handle = " << supports_native_handle << ", implementation_name = '" << implementation_name << "'" @@ -165,7 +187,15 @@ std::string VideoEncoder::EncoderInfo::ToString() const { } oss << "] " ", supports_simulcast = " - << supports_simulcast << "}"; + << supports_simulcast; + oss << ", preferred_pixel_formats = ["; + for (size_t i = 0; i < preferred_pixel_formats.size(); ++i) { + if (i > 0) + oss << ", "; + oss << VideoFrameBufferTypeToString(preferred_pixel_formats.at(i)); + } + oss << "]"; + oss << "}"; return oss.str(); } @@ -208,6 +238,42 @@ bool VideoEncoder::EncoderInfo::operator==(const EncoderInfo& rhs) const { return true; } +absl::optional +VideoEncoder::EncoderInfo::GetEncoderBitrateLimitsForResolution( + int frame_size_pixels) const { + std::vector bitrate_limits = + resolution_bitrate_limits; + + // Sort the list of bitrate limits by resolution. + sort(bitrate_limits.begin(), bitrate_limits.end(), + [](const ResolutionBitrateLimits& lhs, + const ResolutionBitrateLimits& rhs) { + return lhs.frame_size_pixels < rhs.frame_size_pixels; + }); + + for (size_t i = 0; i < bitrate_limits.size(); ++i) { + RTC_DCHECK_GE(bitrate_limits[i].min_bitrate_bps, 0); + RTC_DCHECK_GE(bitrate_limits[i].min_start_bitrate_bps, 0); + RTC_DCHECK_GE(bitrate_limits[i].max_bitrate_bps, + bitrate_limits[i].min_bitrate_bps); + if (i > 0) { + // The bitrate limits aren't expected to decrease with resolution. + RTC_DCHECK_GE(bitrate_limits[i].min_bitrate_bps, + bitrate_limits[i - 1].min_bitrate_bps); + RTC_DCHECK_GE(bitrate_limits[i].min_start_bitrate_bps, + bitrate_limits[i - 1].min_start_bitrate_bps); + RTC_DCHECK_GE(bitrate_limits[i].max_bitrate_bps, + bitrate_limits[i - 1].max_bitrate_bps); + } + + if (bitrate_limits[i].frame_size_pixels >= frame_size_pixels) { + return absl::optional(bitrate_limits[i]); + } + } + + return absl::nullopt; +} + VideoEncoder::RateControlParameters::RateControlParameters() : bitrate(VideoBitrateAllocation()), framerate_fps(0.0), @@ -218,7 +284,7 @@ VideoEncoder::RateControlParameters::RateControlParameters( double framerate_fps) : bitrate(bitrate), framerate_fps(framerate_fps), - bandwidth_allocation(DataRate::bps(bitrate.get_sum_bps())) {} + bandwidth_allocation(DataRate::BitsPerSec(bitrate.get_sum_bps())) {} VideoEncoder::RateControlParameters::RateControlParameters( const VideoBitrateAllocation& bitrate, diff --git a/api/video_codecs/video_encoder.h b/api/video_codecs/video_encoder.h index 34502c8ab0..3c9c2376a9 100644 --- a/api/video_codecs/video_encoder.h +++ b/api/video_codecs/video_encoder.h @@ -30,13 +30,12 @@ namespace webrtc { -class RTPFragmentationHeader; // TODO(pbos): Expose these through a public (root) header or change these APIs. struct CodecSpecificInfo; constexpr int kDefaultMinPixelsPerFrame = 320 * 180; -class EncodedImageCallback { +class RTC_EXPORT EncodedImageCallback { public: virtual ~EncodedImageCallback() {} @@ -75,8 +74,7 @@ class EncodedImageCallback { // Callback function which is called when an image has been encoded. virtual Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) = 0; + const CodecSpecificInfo* codec_specific_info) = 0; virtual void OnDroppedFrame(DropReason reason) {} }; @@ -176,6 +174,15 @@ class RTC_EXPORT VideoEncoder { // requirements the encoder has on the incoming video frame buffers. int requested_resolution_alignment; + // Same as above but if true, each simulcast layer should also be divisible + // by |requested_resolution_alignment|. + // Note that scale factors |scale_resolution_down_by| may be adjusted so a + // common multiple is not too large to avoid largely cropped frames and + // possibly with an aspect ratio far from the original. + // Warning: large values of scale_resolution_down_by could be changed + // considerably, especially if |requested_resolution_alignment| is large. + bool apply_alignment_to_all_simulcast_layers; + // If true, encoder supports working with a native handle (e.g. texture // handle for hw codecs) rather than requiring a raw I420 buffer. bool supports_native_handle; @@ -236,12 +243,23 @@ class RTC_EXPORT VideoEncoder { // Recommended bitrate limits for different resolutions. std::vector resolution_bitrate_limits; + // Obtains the limits from |resolution_bitrate_limits| that best matches the + // |frame_size_pixels|. + absl::optional + GetEncoderBitrateLimitsForResolution(int frame_size_pixels) const; + // If true, this encoder has internal support for generating simulcast // streams. Otherwise, an adapter class will be needed. // Even if true, the config provided to InitEncode() might not be supported, // in such case the encoder should return // WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED. bool supports_simulcast; + + // The list of pixel formats preferred by the encoder. It is assumed that if + // the list is empty and supports_native_handle is false, then {I420} is the + // preferred pixel format. The order of the formats does not matter. + absl::InlinedVector + preferred_pixel_formats; }; struct RTC_EXPORT RateControlParameters { @@ -255,6 +273,9 @@ class RTC_EXPORT VideoEncoder { // Target bitrate, per spatial/temporal layer. // A target bitrate of 0bps indicates a layer should not be encoded at all. + VideoBitrateAllocation target_bitrate; + // Adjusted target bitrate, per spatial/temporal layer. May be lower or + // higher than the target depending on encoder behaviour. VideoBitrateAllocation bitrate; // Target framerate, in fps. A value <= 0.0 is invalid and should be // interpreted as framerate target not available. In this case the encoder @@ -315,6 +336,9 @@ class RTC_EXPORT VideoEncoder { static VideoCodecVP8 GetDefaultVp8Settings(); static VideoCodecVP9 GetDefaultVp9Settings(); static VideoCodecH264 GetDefaultH264Settings(); +#ifndef DISABLE_H265 + static VideoCodecH265 GetDefaultH265Settings(); +#endif virtual ~VideoEncoder() {} @@ -363,7 +387,7 @@ class RTC_EXPORT VideoEncoder { // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. virtual int32_t Release() = 0; - // Encode an I420 image (as a part of a video stream). The encoded image + // Encode an image (as a part of a video stream). The encoded image // will be returned to the user through the encode complete callback. // // Input: diff --git a/api/video_codecs/video_encoder_config.cc b/api/video_codecs/video_encoder_config.cc index 6efcbf2bdd..45d579503d 100644 --- a/api/video_codecs/video_encoder_config.cc +++ b/api/video_codecs/video_encoder_config.cc @@ -43,6 +43,7 @@ std::string VideoStream::ToString() const { ss << ", num_temporal_layers: " << num_temporal_layers.value_or(1); ss << ", bitrate_priority: " << bitrate_priority.value_or(0); ss << ", active: " << active; + ss << ", scale_down_by: " << scale_resolution_down_by; return ss.str(); } @@ -55,7 +56,8 @@ VideoEncoderConfig::VideoEncoderConfig() min_transmit_bitrate_bps(0), max_bitrate_bps(0), bitrate_priority(1.0), - number_of_streams(0) {} + number_of_streams(0), + legacy_conference_mode(false) {} VideoEncoderConfig::VideoEncoderConfig(VideoEncoderConfig&&) = default; @@ -93,6 +95,10 @@ void VideoEncoderConfig::EncoderSpecificSettings::FillEncoderSpecificSettings( FillVideoCodecVp8(codec->VP8()); } else if (codec->codecType == kVideoCodecVP9) { FillVideoCodecVp9(codec->VP9()); +#ifndef DISABLE_H265 + } else if (codec->codecType == kVideoCodecH265) { + FillVideoCodecH265(codec->H265()); +#endif } else { RTC_NOTREACHED() << "Encoder specifics set/used for unknown codec type."; } @@ -103,6 +109,13 @@ void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecH264( RTC_NOTREACHED(); } +#ifndef DISABLE_H265 +void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecH265( + VideoCodecH265* h265_settings) const { + RTC_NOTREACHED(); +} +#endif + void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecVp8( VideoCodecVP8* vp8_settings) const { RTC_NOTREACHED(); @@ -122,6 +135,17 @@ void VideoEncoderConfig::H264EncoderSpecificSettings::FillVideoCodecH264( *h264_settings = specifics_; } +#ifndef DISABLE_H265 +VideoEncoderConfig::H265EncoderSpecificSettings::H265EncoderSpecificSettings( + const VideoCodecH265& specifics) + : specifics_(specifics) {} + +void VideoEncoderConfig::H265EncoderSpecificSettings::FillVideoCodecH265( + VideoCodecH265* h265_settings) const { + *h265_settings = specifics_; +} +#endif + VideoEncoderConfig::Vp8EncoderSpecificSettings::Vp8EncoderSpecificSettings( const VideoCodecVP8& specifics) : specifics_(specifics) {} diff --git a/api/video_codecs/video_encoder_config.h b/api/video_codecs/video_encoder_config.h index ef8db100a3..b613218d47 100644 --- a/api/video_codecs/video_encoder_config.h +++ b/api/video_codecs/video_encoder_config.h @@ -84,6 +84,9 @@ class VideoEncoderConfig { virtual void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const; virtual void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const; virtual void FillVideoCodecH264(VideoCodecH264* h264_settings) const; +#ifndef DISABLE_H265 + virtual void FillVideoCodecH265(VideoCodecH265* h265_settings) const; +#endif private: ~EncoderSpecificSettings() override {} @@ -99,6 +102,16 @@ class VideoEncoderConfig { VideoCodecH264 specifics_; }; +#ifndef DISABLE_H265 + class H265EncoderSpecificSettings : public EncoderSpecificSettings { + public: + explicit H265EncoderSpecificSettings(const VideoCodecH265& specifics); + void FillVideoCodecH265(VideoCodecH265* h265_settings) const override; + + private: + VideoCodecH265 specifics_; + }; +#endif class Vp8EncoderSpecificSettings : public EncoderSpecificSettings { public: explicit Vp8EncoderSpecificSettings(const VideoCodecVP8& specifics); @@ -176,6 +189,9 @@ class VideoEncoderConfig { // Max number of encoded VideoStreams to produce. size_t number_of_streams; + // Legacy Google conference mode flag for simulcast screenshare + bool legacy_conference_mode; + private: // Access to the copy constructor is private to force use of the Copy() // method for those exceptional cases where we do use it. diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index 1f80fa74db..22430eb19d 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -14,6 +14,8 @@ #include #include +#include "absl/types/optional.h" +#include "api/units/data_rate.h" #include "api/video_codecs/sdp_video_format.h" namespace webrtc { @@ -26,15 +28,32 @@ class VideoEncoderFactory { public: // TODO(magjed): Try to get rid of this struct. struct CodecInfo { - // |is_hardware_accelerated| is true if the encoders created by this factory - // of the given codec will use hardware support. - bool is_hardware_accelerated; // |has_internal_source| is true if encoders created by this factory of the // given codec will use internal camera sources, meaning that they don't // require/expect frames to be delivered via webrtc::VideoEncoder::Encode. // This flag is used as the internal_source parameter to // webrtc::ViEExternalCodec::RegisterExternalSendCodec. - bool has_internal_source; + bool has_internal_source = false; + }; + + // An injectable class that is continuously updated with encoding conditions + // and selects the best encoder given those conditions. + class EncoderSelectorInterface { + public: + virtual ~EncoderSelectorInterface() {} + + // Informs the encoder selector about which encoder that is currently being + // used. + virtual void OnCurrentEncoder(const SdpVideoFormat& format) = 0; + + // Called every time the available bitrate is updated. Should return a + // non-empty if an encoder switch should be performed. + virtual absl::optional OnAvailableBitrate( + const DataRate& rate) = 0; + + // Called if the currently used encoder reports itself as broken. Should + // return a non-empty if an encoder switch should be performed. + virtual absl::optional OnEncoderBroken() = 0; }; // Returns a list of supported video formats in order of preference, to use @@ -51,13 +70,22 @@ class VideoEncoderFactory { // Returns information about how this format will be encoded. The specified // format must be one of the supported formats by this factory. - // TODO(magjed): Try to get rid of this method. - virtual CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const = 0; + + // TODO(magjed): Try to get rid of this method. Since is_hardware_accelerated + // is unused, only factories producing internal source encoders (in itself a + // deprecated feature) needs to override this method. + virtual CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const { + return CodecInfo(); + } // Creates a VideoEncoder for the specified format. virtual std::unique_ptr CreateVideoEncoder( const SdpVideoFormat& format) = 0; + virtual std::unique_ptr GetEncoderSelector() const { + return nullptr; + } + virtual ~VideoEncoderFactory() {} }; diff --git a/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/api/video_codecs/video_encoder_software_fallback_wrapper.cc index fe3274128e..95a41d0e30 100644 --- a/api/video_codecs/video_encoder_software_fallback_wrapper.cc +++ b/api/video_codecs/video_encoder_software_fallback_wrapper.cc @@ -15,16 +15,18 @@ #include #include #include -#include #include +#include "absl/strings/match.h" #include "absl/types/optional.h" #include "api/fec_controller_override.h" +#include "api/video/i420_buffer.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_frame.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -33,52 +35,89 @@ namespace webrtc { namespace { +// If forced fallback is allowed, either: +// +// 1) The forced fallback is requested if the resolution is less than or equal +// to |max_pixels_|. The resolution is allowed to be scaled down to +// |min_pixels_|. +// +// 2) The forced fallback is requested if temporal support is preferred and the +// SW fallback supports temporal layers while the HW encoder does not. + +struct ForcedFallbackParams { + public: + bool SupportsResolutionBasedSwitch(const VideoCodec& codec) const { + return enable_resolution_based_switch && + codec.codecType == kVideoCodecVP8 && + codec.numberOfSimulcastStreams <= 1 && + codec.VP8().numberOfTemporalLayers == 1 && + codec.width * codec.height <= max_pixels; + } + + bool SupportsTemporalBasedSwitch(const VideoCodec& codec) const { + return enable_temporal_based_switch && + SimulcastUtility::NumberOfTemporalLayers(codec, 0) > 1; + } + + bool enable_temporal_based_switch = false; + bool enable_resolution_based_switch = false; + int min_pixels = 320 * 180; + int max_pixels = 320 * 240; +}; + const char kVp8ForceFallbackEncoderFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2"; -bool EnableForcedFallback() { - return field_trial::IsEnabled(kVp8ForceFallbackEncoderFieldTrial); -} +absl::optional ParseFallbackParamsFromFieldTrials( + const VideoEncoder& main_encoder) { + const std::string field_trial = + webrtc::field_trial::FindFullName(kVp8ForceFallbackEncoderFieldTrial); + if (!absl::StartsWith(field_trial, "Enabled")) { + return absl::nullopt; + } -bool IsForcedFallbackPossible(const VideoCodec& codec_settings) { - return codec_settings.codecType == kVideoCodecVP8 && - codec_settings.numberOfSimulcastStreams <= 1 && - codec_settings.VP8().numberOfTemporalLayers == 1; -} + int max_pixels_lower_bound = + main_encoder.GetEncoderInfo().scaling_settings.min_pixels_per_frame - 1; -void GetForcedFallbackParamsFromFieldTrialGroup(int* param_min_pixels, - int* param_max_pixels, - int minimum_max_pixels) { - RTC_DCHECK(param_min_pixels); - RTC_DCHECK(param_max_pixels); - std::string group = - webrtc::field_trial::FindFullName(kVp8ForceFallbackEncoderFieldTrial); - if (group.empty()) - return; - - int min_pixels; - int max_pixels; - int min_bps; - if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &min_pixels, &max_pixels, - &min_bps) != 3) { + ForcedFallbackParams params; + params.enable_resolution_based_switch = true; + + int min_bps = 0; + if (sscanf(field_trial.c_str(), "Enabled-%d,%d,%d", ¶ms.min_pixels, + ¶ms.max_pixels, &min_bps) != 3) { RTC_LOG(LS_WARNING) << "Invalid number of forced fallback parameters provided."; - return; - } - if (min_pixels <= 0 || max_pixels < minimum_max_pixels || - max_pixels < min_pixels || min_bps <= 0) { + return absl::nullopt; + } else if (params.min_pixels <= 0 || + params.max_pixels < max_pixels_lower_bound || + params.max_pixels < params.min_pixels || min_bps <= 0) { RTC_LOG(LS_WARNING) << "Invalid forced fallback parameter value provided."; - return; + return absl::nullopt; + } + + return params; +} + +absl::optional GetForcedFallbackParams( + bool prefer_temporal_support, + const VideoEncoder& main_encoder) { + absl::optional params = + ParseFallbackParamsFromFieldTrials(main_encoder); + if (prefer_temporal_support) { + if (!params.has_value()) { + params.emplace(); + } + params->enable_temporal_based_switch = prefer_temporal_support; } - *param_min_pixels = min_pixels; - *param_max_pixels = max_pixels; + return params; } class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { public: VideoEncoderSoftwareFallbackWrapper( std::unique_ptr sw_encoder, - std::unique_ptr hw_encoder); + std::unique_ptr hw_encoder, + bool prefer_temporal_support); ~VideoEncoderSoftwareFallbackWrapper() override; void SetFecControllerOverride( @@ -106,28 +145,29 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { EncoderInfo GetEncoderInfo() const override; private: - bool InitFallbackEncoder(); - - // If |forced_fallback_possible_| is true: - // The forced fallback is requested if the resolution is less than or equal to - // |max_pixels_|. The resolution is allowed to be scaled down to - // |min_pixels_|. - class ForcedFallbackParams { - public: - bool IsValid(const VideoCodec& codec) const { - return codec.width * codec.height <= max_pixels_; + bool InitFallbackEncoder(bool is_forced); + bool TryInitForcedFallbackEncoder(); + bool IsFallbackActive() const; + + VideoEncoder* current_encoder() { + switch (encoder_state_) { + case EncoderState::kUninitialized: + RTC_LOG(LS_WARNING) + << "Trying to access encoder in uninitialized fallback wrapper."; + // Return main encoder to preserve previous behavior. + ABSL_FALLTHROUGH_INTENDED; + case EncoderState::kMainEncoderUsed: + return encoder_.get(); + case EncoderState::kFallbackDueToFailure: + case EncoderState::kForcedFallback: + return fallback_encoder_.get(); } + RTC_CHECK_NOTREACHED(); + } - bool active_ = false; - int min_pixels_ = 320 * 180; - int max_pixels_ = 320 * 240; - }; - - bool TryInitForcedFallbackEncoder(); - bool TryReInitForcedFallbackEncoder(); - void ValidateSettingsForForcedFallback(); - bool IsForcedFallbackActive() const; - void MaybeModifyCodecForFallback(); + // Updates encoder with last observed parameters, such as callbacks, rates, + // etc. + void PrimeEncoder(VideoEncoder* encoder) const; // Settings used in the last InitEncode call and used if a dynamic fallback to // software is required. @@ -137,65 +177,97 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { // The last rate control settings, if set. absl::optional rate_control_parameters_; - // The last channel parameters set, and a flag for noting they are set. - bool channel_parameters_set_; - uint32_t packet_loss_; - int64_t rtt_; + // The last channel parameters set. + absl::optional packet_loss_; + absl::optional rtt_; + FecControllerOverride* fec_controller_override_; + absl::optional loss_notification_; + + enum class EncoderState { + kUninitialized, + kMainEncoderUsed, + kFallbackDueToFailure, + kForcedFallback + }; - bool use_fallback_encoder_; + EncoderState encoder_state_; const std::unique_ptr encoder_; - const std::unique_ptr fallback_encoder_; + EncodedImageCallback* callback_; - bool forced_fallback_possible_; - ForcedFallbackParams forced_fallback_; + const absl::optional fallback_params_; + int32_t EncodeWithMainEncoder(const VideoFrame& frame, + const std::vector* frame_types); }; VideoEncoderSoftwareFallbackWrapper::VideoEncoderSoftwareFallbackWrapper( std::unique_ptr sw_encoder, - std::unique_ptr hw_encoder) - : channel_parameters_set_(false), - packet_loss_(0), - rtt_(0), - use_fallback_encoder_(false), + std::unique_ptr hw_encoder, + bool prefer_temporal_support) + : fec_controller_override_(nullptr), + encoder_state_(EncoderState::kUninitialized), encoder_(std::move(hw_encoder)), fallback_encoder_(std::move(sw_encoder)), callback_(nullptr), - forced_fallback_possible_(EnableForcedFallback()) { + fallback_params_( + GetForcedFallbackParams(prefer_temporal_support, *encoder_)) { RTC_DCHECK(fallback_encoder_); - if (forced_fallback_possible_) { - GetForcedFallbackParamsFromFieldTrialGroup( - &forced_fallback_.min_pixels_, &forced_fallback_.max_pixels_, - encoder_->GetEncoderInfo().scaling_settings.min_pixels_per_frame - - 1); // No HW below. - } } + VideoEncoderSoftwareFallbackWrapper::~VideoEncoderSoftwareFallbackWrapper() = default; -bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder() { +void VideoEncoderSoftwareFallbackWrapper::PrimeEncoder( + VideoEncoder* encoder) const { + RTC_DCHECK(encoder); + // Replay callback, rates, and channel parameters. + if (callback_) { + encoder->RegisterEncodeCompleteCallback(callback_); + } + if (rate_control_parameters_) { + encoder->SetRates(*rate_control_parameters_); + } + if (rtt_.has_value()) { + encoder->OnRttUpdate(rtt_.value()); + } + if (packet_loss_.has_value()) { + encoder->OnPacketLossRateUpdate(packet_loss_.value()); + } + if (fec_controller_override_) { + encoder->SetFecControllerOverride(fec_controller_override_); + } + if (loss_notification_.has_value()) { + encoder->OnLossNotification(loss_notification_.value()); + } +} + +bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder(bool is_forced) { RTC_LOG(LS_WARNING) << "Encoder falling back to software encoding."; RTC_DCHECK(encoder_settings_.has_value()); const int ret = fallback_encoder_->InitEncode(&codec_settings_, encoder_settings_.value()); - use_fallback_encoder_ = (ret == WEBRTC_VIDEO_CODEC_OK); - if (!use_fallback_encoder_) { + + if (ret != WEBRTC_VIDEO_CODEC_OK) { RTC_LOG(LS_ERROR) << "Failed to initialize software-encoder fallback."; fallback_encoder_->Release(); return false; } - // Replay callback, rates, and channel parameters. - if (callback_) - fallback_encoder_->RegisterEncodeCompleteCallback(callback_); - if (rate_control_parameters_) - fallback_encoder_->SetRates(*rate_control_parameters_); - - // Since we're switching to the fallback encoder, Release the real encoder. It - // may be re-initialized via InitEncode later, and it will continue to get - // Set calls for rates and channel parameters in the meantime. - encoder_->Release(); + + if (encoder_state_ == EncoderState::kMainEncoderUsed) { + // Since we're switching to the fallback encoder, Release the real encoder. + // It may be re-initialized via InitEncode later, and it will continue to + // get Set calls for rates and channel parameters in the meantime. + encoder_->Release(); + } + + if (is_forced) { + encoder_state_ = EncoderState::kForcedFallback; + } else { + encoder_state_ = EncoderState::kFallbackDueToFailure; + } + return true; } @@ -204,8 +276,9 @@ void VideoEncoderSoftwareFallbackWrapper::SetFecControllerOverride( // It is important that only one of those would ever interact with the // |fec_controller_override| at a given time. This is the responsibility // of |this| to maintain. - encoder_->SetFecControllerOverride(fec_controller_override); - fallback_encoder_->SetFecControllerOverride(fec_controller_override); + + fec_controller_override_ = fec_controller_override; + current_encoder()->SetFecControllerOverride(fec_controller_override); } int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode( @@ -217,93 +290,124 @@ int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode( encoder_settings_ = settings; // Clear stored rate/channel parameters. rate_control_parameters_ = absl::nullopt; - ValidateSettingsForForcedFallback(); - // Try to reinit forced software codec if it is in use. - if (TryReInitForcedFallbackEncoder()) { - return WEBRTC_VIDEO_CODEC_OK; - } + RTC_DCHECK_EQ(encoder_state_, EncoderState::kUninitialized) + << "InitEncode() should never be called on an active instance!"; + // Try to init forced software codec if it should be used. if (TryInitForcedFallbackEncoder()) { + PrimeEncoder(current_encoder()); return WEBRTC_VIDEO_CODEC_OK; } - forced_fallback_.active_ = false; int32_t ret = encoder_->InitEncode(codec_settings, settings); if (ret == WEBRTC_VIDEO_CODEC_OK) { - if (use_fallback_encoder_) { - RTC_LOG(LS_WARNING) - << "InitEncode OK, no longer using the software fallback encoder."; - fallback_encoder_->Release(); - use_fallback_encoder_ = false; - } - if (callback_) - encoder_->RegisterEncodeCompleteCallback(callback_); + encoder_state_ = EncoderState::kMainEncoderUsed; + PrimeEncoder(current_encoder()); return ret; } + // Try to instantiate software codec. - if (InitFallbackEncoder()) { + if (InitFallbackEncoder(/*is_forced=*/false)) { + PrimeEncoder(current_encoder()); return WEBRTC_VIDEO_CODEC_OK; } - // Software encoder failed, use original return code. + + // Software encoder failed too, use original return code. + encoder_state_ = EncoderState::kUninitialized; return ret; } int32_t VideoEncoderSoftwareFallbackWrapper::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) { callback_ = callback; - int32_t ret = encoder_->RegisterEncodeCompleteCallback(callback); - if (use_fallback_encoder_) - return fallback_encoder_->RegisterEncodeCompleteCallback(callback); - return ret; + return current_encoder()->RegisterEncodeCompleteCallback(callback); } int32_t VideoEncoderSoftwareFallbackWrapper::Release() { - return use_fallback_encoder_ ? fallback_encoder_->Release() - : encoder_->Release(); + if (encoder_state_ == EncoderState::kUninitialized) { + return WEBRTC_VIDEO_CODEC_OK; + } + int32_t ret = current_encoder()->Release(); + encoder_state_ = EncoderState::kUninitialized; + return ret; } int32_t VideoEncoderSoftwareFallbackWrapper::Encode( const VideoFrame& frame, const std::vector* frame_types) { - if (use_fallback_encoder_) - return fallback_encoder_->Encode(frame, frame_types); + switch (encoder_state_) { + case EncoderState::kUninitialized: + return WEBRTC_VIDEO_CODEC_ERROR; + case EncoderState::kMainEncoderUsed: { + return EncodeWithMainEncoder(frame, frame_types); + } + case EncoderState::kFallbackDueToFailure: + case EncoderState::kForcedFallback: + return fallback_encoder_->Encode(frame, frame_types); + } + RTC_CHECK_NOTREACHED(); +} + +int32_t VideoEncoderSoftwareFallbackWrapper::EncodeWithMainEncoder( + const VideoFrame& frame, + const std::vector* frame_types) { int32_t ret = encoder_->Encode(frame, frame_types); // If requested, try a software fallback. bool fallback_requested = (ret == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE); - if (fallback_requested && InitFallbackEncoder()) { + if (fallback_requested && InitFallbackEncoder(/*is_forced=*/false)) { // Start using the fallback with this frame. - return fallback_encoder_->Encode(frame, frame_types); + PrimeEncoder(current_encoder()); + if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative && + fallback_encoder_->GetEncoderInfo().supports_native_handle) { + return fallback_encoder_->Encode(frame, frame_types); + } else { + RTC_LOG(INFO) << "Fallback encoder does not support native handle - " + "converting frame to I420"; + rtc::scoped_refptr src_buffer = + frame.video_frame_buffer()->ToI420(); + if (!src_buffer) { + RTC_LOG(LS_ERROR) << "Failed to convert from to I420"; + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } + rtc::scoped_refptr dst_buffer = + src_buffer->Scale(codec_settings_.width, codec_settings_.height); + if (!dst_buffer) { + RTC_LOG(LS_ERROR) << "Failed to scale video frame."; + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } + VideoFrame scaled_frame = frame; + scaled_frame.set_video_frame_buffer(dst_buffer); + scaled_frame.set_update_rect(VideoFrame::UpdateRect{ + 0, 0, scaled_frame.width(), scaled_frame.height()}); + return fallback_encoder_->Encode(scaled_frame, frame_types); + } } + // Fallback encoder failed too, return original error code. return ret; } void VideoEncoderSoftwareFallbackWrapper::SetRates( const RateControlParameters& parameters) { rate_control_parameters_ = parameters; - encoder_->SetRates(parameters); - if (use_fallback_encoder_) - fallback_encoder_->SetRates(parameters); + return current_encoder()->SetRates(parameters); } void VideoEncoderSoftwareFallbackWrapper::OnPacketLossRateUpdate( float packet_loss_rate) { - VideoEncoder* encoder = - use_fallback_encoder_ ? fallback_encoder_.get() : encoder_.get(); - encoder->OnPacketLossRateUpdate(packet_loss_rate); + packet_loss_ = packet_loss_rate; + current_encoder()->OnPacketLossRateUpdate(packet_loss_rate); } void VideoEncoderSoftwareFallbackWrapper::OnRttUpdate(int64_t rtt_ms) { - VideoEncoder* encoder = - use_fallback_encoder_ ? fallback_encoder_.get() : encoder_.get(); - encoder->OnRttUpdate(rtt_ms); + rtt_ = rtt_ms; + current_encoder()->OnRttUpdate(rtt_ms); } void VideoEncoderSoftwareFallbackWrapper::OnLossNotification( const LossNotification& loss_notification) { - VideoEncoder* encoder = - use_fallback_encoder_ ? fallback_encoder_.get() : encoder_.get(); - encoder->OnLossNotification(loss_notification); + loss_notification_ = loss_notification; + current_encoder()->OnLossNotification(loss_notification); } VideoEncoder::EncoderInfo VideoEncoderSoftwareFallbackWrapper::GetEncoderInfo() @@ -312,17 +416,17 @@ VideoEncoder::EncoderInfo VideoEncoderSoftwareFallbackWrapper::GetEncoderInfo() EncoderInfo default_encoder_info = encoder_->GetEncoderInfo(); EncoderInfo info = - use_fallback_encoder_ ? fallback_encoder_info : default_encoder_info; + IsFallbackActive() ? fallback_encoder_info : default_encoder_info; - if (forced_fallback_possible_) { - const auto settings = forced_fallback_.active_ + if (fallback_params_.has_value()) { + const auto settings = (encoder_state_ == EncoderState::kForcedFallback) ? fallback_encoder_info.scaling_settings : default_encoder_info.scaling_settings; info.scaling_settings = settings.thresholds ? VideoEncoder::ScalingSettings(settings.thresholds->low, settings.thresholds->high, - forced_fallback_.min_pixels_) + fallback_params_->min_pixels) : VideoEncoder::ScalingSettings::kOff; } else { info.scaling_settings = default_encoder_info.scaling_settings; @@ -331,72 +435,82 @@ VideoEncoder::EncoderInfo VideoEncoderSoftwareFallbackWrapper::GetEncoderInfo() return info; } -bool VideoEncoderSoftwareFallbackWrapper::IsForcedFallbackActive() const { - return (forced_fallback_possible_ && use_fallback_encoder_ && - forced_fallback_.active_); +bool VideoEncoderSoftwareFallbackWrapper::IsFallbackActive() const { + return encoder_state_ == EncoderState::kForcedFallback || + encoder_state_ == EncoderState::kFallbackDueToFailure; } bool VideoEncoderSoftwareFallbackWrapper::TryInitForcedFallbackEncoder() { - if (!forced_fallback_possible_ || use_fallback_encoder_) { - return false; - } - // Fallback not active. - if (!forced_fallback_.IsValid(codec_settings_)) { - return false; - } - // Settings valid, try to instantiate software codec. - RTC_LOG(LS_INFO) << "Request forced SW encoder fallback: " - << codec_settings_.width << "x" << codec_settings_.height; - if (!InitFallbackEncoder()) { + if (!fallback_params_) { return false; } - forced_fallback_.active_ = true; - return true; -} -bool VideoEncoderSoftwareFallbackWrapper::TryReInitForcedFallbackEncoder() { - if (!IsForcedFallbackActive()) { - return false; - } + RTC_DCHECK_EQ(encoder_state_, EncoderState::kUninitialized); - // Forced fallback active. - if (!forced_fallback_.IsValid(codec_settings_)) { - RTC_LOG(LS_INFO) << "Stop forced SW encoder fallback, max pixels exceeded."; - return false; + if (fallback_params_->SupportsResolutionBasedSwitch(codec_settings_)) { + // Settings valid, try to instantiate software codec. + RTC_LOG(LS_INFO) << "Request forced SW encoder fallback: " + << codec_settings_.width << "x" << codec_settings_.height; + return InitFallbackEncoder(/*is_forced=*/true); } - // Settings valid, reinitialize the forced fallback encoder. - RTC_DCHECK(encoder_settings_.has_value()); - if (fallback_encoder_->InitEncode(&codec_settings_, - encoder_settings_.value()) != - WEBRTC_VIDEO_CODEC_OK) { - RTC_LOG(LS_ERROR) << "Failed to init forced SW encoder fallback."; - return false; - } - return true; -} + if (fallback_params_->SupportsTemporalBasedSwitch(codec_settings_)) { + // First init main encoder to see if that supports temporal layers. + if (encoder_->InitEncode(&codec_settings_, encoder_settings_.value()) == + WEBRTC_VIDEO_CODEC_OK) { + encoder_state_ = EncoderState::kMainEncoderUsed; + } -void VideoEncoderSoftwareFallbackWrapper::ValidateSettingsForForcedFallback() { - if (!forced_fallback_possible_) - return; + if (encoder_state_ == EncoderState::kMainEncoderUsed && + encoder_->GetEncoderInfo().fps_allocation[0].size() > 1) { + // Primary encoder already supports temporal layers, use that instead. + return true; + } - if (!IsForcedFallbackPossible(codec_settings_)) { - if (IsForcedFallbackActive()) { - fallback_encoder_->Release(); - use_fallback_encoder_ = false; + // Try to initialize fallback and check if it supports temporal layers. + if (fallback_encoder_->InitEncode(&codec_settings_, + encoder_settings_.value()) == + WEBRTC_VIDEO_CODEC_OK) { + if (fallback_encoder_->GetEncoderInfo().fps_allocation[0].size() > 1) { + // Fallback encoder available and supports temporal layers, use it! + if (encoder_state_ == EncoderState::kMainEncoderUsed) { + // Main encoder initialized but does not support temporal layers, + // release it again. + encoder_->Release(); + } + encoder_state_ = EncoderState::kForcedFallback; + RTC_LOG(LS_INFO) + << "Forced switch to SW encoder due to temporal support."; + return true; + } else { + // Fallback encoder intialization succeeded, but it does not support + // temporal layers either - release it. + fallback_encoder_->Release(); + } + } + + if (encoder_state_ == EncoderState::kMainEncoderUsed) { + // Main encoder already initialized - make use of it. + RTC_LOG(LS_INFO) + << "Cannot fall back for temporal support since fallback that " + "supports is not available. Using main encoder instead."; + return true; } - RTC_LOG(LS_INFO) << "Disable forced_fallback_possible_ due to settings."; - forced_fallback_possible_ = false; } + + // Neither forced fallback mode supported. + return false; } } // namespace std::unique_ptr CreateVideoEncoderSoftwareFallbackWrapper( std::unique_ptr sw_fallback_encoder, - std::unique_ptr hw_encoder) { + std::unique_ptr hw_encoder, + bool prefer_temporal_support) { return std::make_unique( - std::move(sw_fallback_encoder), std::move(hw_encoder)); + std::move(sw_fallback_encoder), std::move(hw_encoder), + prefer_temporal_support); } } // namespace webrtc diff --git a/api/video_codecs/video_encoder_software_fallback_wrapper.h b/api/video_codecs/video_encoder_software_fallback_wrapper.h index fa93ab82e9..5282dcb0c0 100644 --- a/api/video_codecs/video_encoder_software_fallback_wrapper.h +++ b/api/video_codecs/video_encoder_software_fallback_wrapper.h @@ -12,6 +12,7 @@ #define API_VIDEO_CODECS_VIDEO_ENCODER_SOFTWARE_FALLBACK_WRAPPER_H_ #include +#include #include "api/video_codecs/video_encoder.h" #include "rtc_base/system/rtc_export.h" @@ -21,10 +22,25 @@ namespace webrtc { // Used to wrap external VideoEncoders to provide a fallback option on // software encoding when a hardware encoder fails to encode a stream due to // hardware restrictions, such as max resolution. +// |bool prefer_temporal_support| indicates that if the software fallback +// encoder supports temporal layers but the hardware encoder does not, a +// fallback should be forced even if the encoder otherwise works. RTC_EXPORT std::unique_ptr CreateVideoEncoderSoftwareFallbackWrapper( std::unique_ptr sw_fallback_encoder, - std::unique_ptr hw_encoder); + std::unique_ptr hw_encoder, + bool prefer_temporal_support); + +// Default fallback for call-sites not yet updated with +// |prefer_temporal_support|. +// TODO(sprang): Remove when usage is gone. +RTC_EXPORT inline std::unique_ptr +CreateVideoEncoderSoftwareFallbackWrapper( + std::unique_ptr sw_fallback_encoder, + std::unique_ptr hw_encoder) { + return CreateVideoEncoderSoftwareFallbackWrapper( + std::move(sw_fallback_encoder), std::move(hw_encoder), false); +} } // namespace webrtc diff --git a/api/video_track_source_proxy.h b/api/video_track_source_proxy.h index 528b7cf701..692ff6493f 100644 --- a/api/video_track_source_proxy.h +++ b/api/video_track_source_proxy.h @@ -23,8 +23,8 @@ namespace webrtc { BEGIN_PROXY_MAP(VideoTrackSource) PROXY_SIGNALING_THREAD_DESTRUCTOR() PROXY_CONSTMETHOD0(SourceState, state) -PROXY_CONSTMETHOD0(bool, remote) -PROXY_CONSTMETHOD0(bool, is_screencast) +BYPASS_PROXY_CONSTMETHOD0(bool, remote) +BYPASS_PROXY_CONSTMETHOD0(bool, is_screencast) PROXY_CONSTMETHOD0(absl::optional, needs_denoising) PROXY_METHOD1(bool, GetStats, Stats*) PROXY_WORKER_METHOD2(void, diff --git a/api/voip/BUILD.gn b/api/voip/BUILD.gn new file mode 100644 index 0000000000..a62dd14207 --- /dev/null +++ b/api/voip/BUILD.gn @@ -0,0 +1,60 @@ +# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved. +# +# Use of this source code is governed by a BSD - style license +# that can be found in the LICENSE file in the root of the source +# tree.An additional intellectual property rights grant can be found +# in the file PATENTS.All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_source_set("voip_api") { + visibility = [ "*" ] + sources = [ + "voip_base.h", + "voip_codec.h", + "voip_dtmf.h", + "voip_engine.h", + "voip_network.h", + "voip_statistics.h", + ] + deps = [ + "..:array_view", + "../audio_codecs:audio_codecs_api", + "../neteq:neteq_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("voip_engine_factory") { + visibility = [ "*" ] + sources = [ + "voip_engine_factory.cc", + "voip_engine_factory.h", + ] + deps = [ + ":voip_api", + "..:scoped_refptr", + "../../audio/voip:voip_core", + "../../modules/audio_device:audio_device_api", + "../../modules/audio_processing:api", + "../../rtc_base:logging", + "../audio_codecs:audio_codecs_api", + "../task_queue", + ] +} + +if (rtc_include_tests) { + rtc_library("voip_engine_factory_unittests") { + testonly = true + sources = [ "voip_engine_factory_unittest.cc" ] + deps = [ + ":voip_engine_factory", + "../../modules/audio_device:mock_audio_device", + "../../modules/audio_processing:mocks", + "../../test:audio_codec_mocks", + "../../test:test_support", + "../task_queue:default_task_queue_factory", + ] + } +} diff --git a/api/voip/DEPS b/api/voip/DEPS new file mode 100644 index 0000000000..3845dffab0 --- /dev/null +++ b/api/voip/DEPS @@ -0,0 +1,10 @@ +specific_include_rules = { + ".*\.h": [ + "+third_party/absl/types/optional.h", + ], + + "voip_engine_factory.h": [ + "+modules/audio_device/include/audio_device.h", + "+modules/audio_processing/include/audio_processing.h", + ], +} diff --git a/api/voip/voip_base.h b/api/voip/voip_base.h new file mode 100644 index 0000000000..ef83b51ed8 --- /dev/null +++ b/api/voip/voip_base.h @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_BASE_H_ +#define API_VOIP_VOIP_BASE_H_ + +#include "absl/types/optional.h" + +namespace webrtc { + +class Transport; + +// VoipBase interface +// +// VoipBase provides a management interface on a media session using a +// concept called 'channel'. A channel represents an interface handle +// for application to request various media session operations. This +// notion of channel is used throughout other interfaces as well. +// +// Underneath the interface, a channel id is mapped into an audio session +// object that is capable of sending and receiving a single RTP stream with +// another media endpoint. It's possible to create and use multiple active +// channels simultaneously which would mean that particular application +// session has RTP streams with multiple remote endpoints. +// +// A typical example for the usage context is outlined in VoipEngine +// header file. + +enum class ChannelId : int {}; + +class VoipBase { + public: + // Creates a channel. + // Each channel handle maps into one audio media session where each has + // its own separate module for send/receive rtp packet with one peer. + // Caller must set |transport|, webrtc::Transport callback pointer to + // receive rtp/rtcp packets from corresponding media session in VoIP engine. + // VoipEngine framework expects applications to handle network I/O directly + // and injection for incoming RTP from remote endpoint is handled via + // VoipNetwork interface. |local_ssrc| is optional and when local_ssrc is not + // set, some random value will be used by voip engine. + // Returns value is optional as to indicate the failure to create channel. + virtual absl::optional CreateChannel( + Transport* transport, + absl::optional local_ssrc) = 0; + + // Releases |channel_id| that no longer has any use. + virtual void ReleaseChannel(ChannelId channel_id) = 0; + + // Starts sending on |channel_id|. This will start microphone if not started + // yet. Returns false if initialization has failed on selected microphone + // device. API is subject to expand to reflect error condition to application + // later. + virtual bool StartSend(ChannelId channel_id) = 0; + + // Stops sending on |channel_id|. If this is the last active channel, it will + // stop microphone input from underlying audio platform layer. + // Returns false if termination logic has failed on selected microphone + // device. API is subject to expand to reflect error condition to application + // later. + virtual bool StopSend(ChannelId channel_id) = 0; + + // Starts playing on speaker device for |channel_id|. + // This will start underlying platform speaker device if not started. + // Returns false if initialization has failed + // on selected speaker device. API is subject to expand to reflect error + // condition to application later. + virtual bool StartPlayout(ChannelId channel_id) = 0; + + // Stops playing on speaker device for |channel_id|. + // If this is the last active channel playing, then it will stop speaker + // from the platform layer. + // Returns false if termination logic has failed on selected speaker device. + // API is subject to expand to reflect error condition to application later. + virtual bool StopPlayout(ChannelId channel_id) = 0; + + protected: + virtual ~VoipBase() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_BASE_H_ diff --git a/api/voip/voip_codec.h b/api/voip/voip_codec.h new file mode 100644 index 0000000000..eb42c449d9 --- /dev/null +++ b/api/voip/voip_codec.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_CODEC_H_ +#define API_VOIP_VOIP_CODEC_H_ + +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/voip/voip_base.h" + +namespace webrtc { + +// VoipCodec interface currently provides any codec related interface +// such as setting encoder and decoder types that are negotiated with +// remote endpoint. Typically after SDP offer and answer exchange, +// the local endpoint understands what are the codec payload types that +// are used with negotiated codecs. This interface is subject to expand +// as needed in future. +// +// This interface requires a channel id created via VoipBase interface. +class VoipCodec { + public: + // Set encoder type here along with its payload type to use. + virtual void SetSendCodec(ChannelId channel_id, + int payload_type, + const SdpAudioFormat& encoder_spec) = 0; + + // Set decoder payload type here. In typical offer and answer model, + // this should be called after payload type has been agreed in media + // session. Note that payload type can differ with same codec in each + // direction. + virtual void SetReceiveCodecs( + ChannelId channel_id, + const std::map& decoder_specs) = 0; + + protected: + virtual ~VoipCodec() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_CODEC_H_ diff --git a/api/voip/voip_dtmf.h b/api/voip/voip_dtmf.h new file mode 100644 index 0000000000..56817bae50 --- /dev/null +++ b/api/voip/voip_dtmf.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_DTMF_H_ +#define API_VOIP_VOIP_DTMF_H_ + +#include "api/voip/voip_base.h" + +namespace webrtc { + +// DTMF events and their event codes as defined in +// https://tools.ietf.org/html/rfc4733#section-7 +enum class DtmfEvent : uint8_t { + kDigitZero = 0, + kDigitOne, + kDigitTwo, + kDigitThree, + kDigitFour, + kDigitFive, + kDigitSix, + kDigitSeven, + kDigitEight, + kDigitNine, + kAsterisk, + kHash, + kLetterA, + kLetterB, + kLetterC, + kLetterD +}; + +// VoipDtmf interface provides DTMF related interfaces such +// as sending DTMF events to the remote endpoint. +class VoipDtmf { + public: + // Register the payload type and sample rate for DTMF (RFC 4733) payload. + // Must be called exactly once prior to calling SendDtmfEvent after payload + // type has been negotiated with remote. + virtual void RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) = 0; + + // Send DTMF named event as specified by + // https://tools.ietf.org/html/rfc4733#section-3.2 + // |duration_ms| specifies the duration of DTMF packets that will be emitted + // in place of real RTP packets instead. + // Must be called after RegisterTelephoneEventType and VoipBase::StartSend + // have been called. + // Returns true if the requested DTMF event is successfully scheduled. + virtual bool SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) = 0; + + protected: + virtual ~VoipDtmf() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_DTMF_H_ diff --git a/api/voip/voip_engine.h b/api/voip/voip_engine.h new file mode 100644 index 0000000000..5724b6b5d9 --- /dev/null +++ b/api/voip/voip_engine.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_ENGINE_H_ +#define API_VOIP_VOIP_ENGINE_H_ + +namespace webrtc { + +class VoipBase; +class VoipCodec; +class VoipNetwork; +class VoipDtmf; +class VoipStatistics; + +// VoipEngine is the main interface serving as the entry point for all VoIP +// APIs. A single instance of VoipEngine should suffice the most of the need for +// typical VoIP applications as it handles multiple media sessions including a +// specialized session type like ad-hoc mesh conferencing. Below example code +// describes the typical sequence of API usage. Each API header contains more +// description on what the methods are used for. +// +// // Caller is responsible of setting desired audio components. +// VoipEngineConfig config; +// config.encoder_factory = CreateBuiltinAudioEncoderFactory(); +// config.decoder_factory = CreateBuiltinAudioDecoderFactory(); +// config.task_queue_factory = CreateDefaultTaskQueueFactory(); +// config.audio_device = +// AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio, +// config.task_queue_factory.get()); +// config.audio_processing = AudioProcessingBuilder().Create(); +// +// auto voip_engine = CreateVoipEngine(std::move(config)); +// if (!voip_engine) return some_failure; +// +// auto& voip_base = voip_engine->Base(); +// auto& voip_codec = voip_engine->Codec(); +// auto& voip_network = voip_engine->Network(); +// +// absl::optional channel = +// voip_base.CreateChannel(&app_transport_); +// if (!channel) return some_failure; +// +// // After SDP offer/answer, set payload type and codecs that have been +// // decided through SDP negotiation. +// voip_codec.SetSendCodec(*channel, ...); +// voip_codec.SetReceiveCodecs(*channel, ...); +// +// // Start sending and playing RTP on voip channel. +// voip_base.StartSend(*channel); +// voip_base.StartPlayout(*channel); +// +// // Inject received RTP/RTCP through VoipNetwork interface. +// voip_network.ReceivedRTPPacket(*channel, ...); +// voip_network.ReceivedRTCPPacket(*channel, ...); +// +// // Stop and release voip channel. +// voip_base.StopSend(*channel); +// voip_base.StopPlayout(*channel); +// voip_base.ReleaseChannel(*channel); +// +// Current VoipEngine defines three sub-API classes and is subject to expand in +// near future. +class VoipEngine { + public: + virtual ~VoipEngine() = default; + + // VoipBase is the audio session management interface that + // creates/releases/starts/stops an one-to-one audio media session. + virtual VoipBase& Base() = 0; + + // VoipNetwork provides injection APIs that would enable application + // to send and receive RTP/RTCP packets. There is no default network module + // that provides RTP transmission and reception. + virtual VoipNetwork& Network() = 0; + + // VoipCodec provides codec configuration APIs for encoder and decoders. + virtual VoipCodec& Codec() = 0; + + // VoipDtmf provides DTMF event APIs to register and send DTMF events. + virtual VoipDtmf& Dtmf() = 0; + + // VoipStatistics provides performance metrics around audio decoding module + // and jitter buffer (NetEq). + virtual VoipStatistics& Statistics() = 0; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_ENGINE_H_ diff --git a/api/voip/voip_engine_factory.cc b/api/voip/voip_engine_factory.cc new file mode 100644 index 0000000000..88f63f9c92 --- /dev/null +++ b/api/voip/voip_engine_factory.cc @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/voip/voip_engine_factory.h" + +#include + +#include "audio/voip/voip_core.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +std::unique_ptr CreateVoipEngine(VoipEngineConfig config) { + RTC_CHECK(config.encoder_factory); + RTC_CHECK(config.decoder_factory); + RTC_CHECK(config.task_queue_factory); + RTC_CHECK(config.audio_device_module); + + if (!config.audio_processing) { + RTC_DLOG(INFO) << "No audio processing functionality provided."; + } + + return std::make_unique(std::move(config.encoder_factory), + std::move(config.decoder_factory), + std::move(config.task_queue_factory), + std::move(config.audio_device_module), + std::move(config.audio_processing)); +} + +} // namespace webrtc diff --git a/api/voip/voip_engine_factory.h b/api/voip/voip_engine_factory.h new file mode 100644 index 0000000000..62fe8011a6 --- /dev/null +++ b/api/voip/voip_engine_factory.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_ENGINE_FACTORY_H_ +#define API_VOIP_VOIP_ENGINE_FACTORY_H_ + +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/voip/voip_engine.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { + +// VoipEngineConfig is a struct that defines parameters to instantiate a +// VoipEngine instance through CreateVoipEngine factory method. Each member is +// marked with comments as either mandatory or optional and default +// implementations that applications can use. +struct VoipEngineConfig { + // Mandatory (e.g. api/audio_codec/builtin_audio_encoder_factory). + // AudioEncoderFactory provides a set of audio codecs for VoipEngine to encode + // the audio input sample. Application can choose to limit the set to reduce + // application footprint. + rtc::scoped_refptr encoder_factory; + + // Mandatory (e.g. api/audio_codec/builtin_audio_decoder_factory). + // AudioDecoderFactory provides a set of audio codecs for VoipEngine to decode + // the received RTP packets from remote media endpoint. Application can choose + // to limit the set to reduce application footprint. + rtc::scoped_refptr decoder_factory; + + // Mandatory (e.g. api/task_queue/default_task_queue_factory). + // TaskQeueuFactory provided for VoipEngine to work asynchronously on its + // encoding flow. + std::unique_ptr task_queue_factory; + + // Mandatory (e.g. modules/audio_device/include). + // AudioDeviceModule that periocally provides audio input samples from + // recording device (e.g. microphone) and requests audio output samples to + // play through its output device (e.g. speaker). + rtc::scoped_refptr audio_device_module; + + // Optional (e.g. modules/audio_processing/include). + // AudioProcessing provides audio procesing functionalities (e.g. acoustic + // echo cancellation, noise suppression, gain control, etc) on audio input + // samples for VoipEngine. When optionally not set, VoipEngine will not have + // such functionalities to perform on audio input samples received from + // AudioDeviceModule. + rtc::scoped_refptr audio_processing; +}; + +// Creates a VoipEngine instance with provided VoipEngineConfig. +std::unique_ptr CreateVoipEngine(VoipEngineConfig config); + +} // namespace webrtc + +#endif // API_VOIP_VOIP_ENGINE_FACTORY_H_ diff --git a/api/voip/voip_engine_factory_unittest.cc b/api/voip/voip_engine_factory_unittest.cc new file mode 100644 index 0000000000..d0b8438368 --- /dev/null +++ b/api/voip/voip_engine_factory_unittest.cc @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/task_queue/default_task_queue_factory.h" +#include "api/voip/voip_engine_factory.h" +#include "modules/audio_device/include/mock_audio_device.h" +#include "modules/audio_processing/include/mock_audio_processing.h" +#include "test/gtest.h" +#include "test/mock_audio_decoder_factory.h" +#include "test/mock_audio_encoder_factory.h" + +namespace webrtc { +namespace { + +// Create voip engine with mock modules as normal use case. +TEST(VoipEngineFactoryTest, CreateEngineWithMockModules) { + VoipEngineConfig config; + config.encoder_factory = new rtc::RefCountedObject(); + config.decoder_factory = new rtc::RefCountedObject(); + config.task_queue_factory = CreateDefaultTaskQueueFactory(); + config.audio_processing = + new rtc::RefCountedObject(); + config.audio_device_module = test::MockAudioDeviceModule::CreateNice(); + + auto voip_engine = CreateVoipEngine(std::move(config)); + EXPECT_NE(voip_engine, nullptr); +} + +// Create voip engine without setting audio processing as optional component. +TEST(VoipEngineFactoryTest, UseNoAudioProcessing) { + VoipEngineConfig config; + config.encoder_factory = new rtc::RefCountedObject(); + config.decoder_factory = new rtc::RefCountedObject(); + config.task_queue_factory = CreateDefaultTaskQueueFactory(); + config.audio_device_module = test::MockAudioDeviceModule::CreateNice(); + + auto voip_engine = CreateVoipEngine(std::move(config)); + EXPECT_NE(voip_engine, nullptr); +} + +} // namespace +} // namespace webrtc diff --git a/api/voip/voip_network.h b/api/voip/voip_network.h new file mode 100644 index 0000000000..c49c7695b9 --- /dev/null +++ b/api/voip/voip_network.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_NETWORK_H_ +#define API_VOIP_VOIP_NETWORK_H_ + +#include "api/array_view.h" +#include "api/voip/voip_base.h" + +namespace webrtc { + +// VoipNetwork interface provides any network related interfaces such as +// processing received RTP/RTCP packet from remote endpoint. This interface +// requires a ChannelId created via VoipBase interface. Note that using invalid +// (previously released) ChannelId will silently fail these API calls as it +// would have released underlying audio components. It's anticipated that caller +// may be using different thread for network I/O where released channel id is +// still used to input incoming RTP packets in which case we should silently +// ignore. The interface is subjected to expand as needed in near future. +class VoipNetwork { + public: + // The data received from the network including RTP header is passed here. + virtual void ReceivedRTPPacket(ChannelId channel_id, + rtc::ArrayView rtp_packet) = 0; + + // The data received from the network including RTCP header is passed here. + virtual void ReceivedRTCPPacket( + ChannelId channel_id, + rtc::ArrayView rtcp_packet) = 0; + + protected: + virtual ~VoipNetwork() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_NETWORK_H_ diff --git a/api/voip/voip_statistics.h b/api/voip/voip_statistics.h new file mode 100644 index 0000000000..cf01e95e9e --- /dev/null +++ b/api/voip/voip_statistics.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VOIP_VOIP_STATISTICS_H_ +#define API_VOIP_VOIP_STATISTICS_H_ + +#include "api/neteq/neteq.h" +#include "api/voip/voip_base.h" + +namespace webrtc { + +struct IngressStatistics { + // Stats included from api/neteq/neteq.h. + NetEqLifetimeStatistics neteq_stats; + + // Represents the total duration in seconds of all samples that have been + // received. + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalsamplesduration + double total_duration = 0.0; +}; + +// VoipStatistics interface provides the interfaces for querying metrics around +// the jitter buffer (NetEq) performance. +class VoipStatistics { + public: + // Gets the audio ingress statistics. Returns absl::nullopt when channel_id is + // invalid. + virtual absl::optional GetIngressStatistics( + ChannelId channel_id) = 0; + + protected: + virtual ~VoipStatistics() = default; +}; + +} // namespace webrtc + +#endif // API_VOIP_VOIP_STATISTICS_H_ diff --git a/audio/BUILD.gn b/audio/BUILD.gn index 80f2d523e1..bc61c3169b 100644 --- a/audio/BUILD.gn +++ b/audio/BUILD.gn @@ -26,8 +26,12 @@ rtc_library("audio") { "audio_transport_impl.h", "channel_receive.cc", "channel_receive.h", + "channel_receive_frame_transformer_delegate.cc", + "channel_receive_frame_transformer_delegate.h", "channel_send.cc", "channel_send.h", + "channel_send_frame_transformer_delegate.cc", + "channel_send_frame_transformer_delegate.h", "conversion.h", "null_audio_poller.cc", "null_audio_poller.h", @@ -38,6 +42,7 @@ rtc_library("audio") { deps = [ "../api:array_view", "../api:call_api", + "../api:frame_transformer_interface", "../api:function_view", "../api:rtp_headers", "../api:rtp_parameters", @@ -45,6 +50,7 @@ rtc_library("audio") { "../api:transport_api", "../api/audio:aec3_factory", "../api/audio:audio_frame_api", + "../api/audio:audio_frame_processor", "../api/audio:audio_mixer_api", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", @@ -62,13 +68,17 @@ rtc_library("audio") { "../common_audio:common_audio_c", "../logging:rtc_event_audio", "../logging:rtc_stream_config", + "../modules/async_audio_processing", "../modules/audio_coding", "../modules/audio_coding:audio_coding_module_typedefs", "../modules/audio_coding:audio_encoder_cng", "../modules/audio_coding:audio_network_adaptor_config", + "../modules/audio_coding:red", "../modules/audio_device", "../modules/audio_processing", "../modules/audio_processing:api", + "../modules/audio_processing:audio_frame_proxies", + "../modules/audio_processing:rms_level", "../modules/pacing", "../modules/remote_bitrate_estimator", "../modules/rtp_rtcp", @@ -82,10 +92,15 @@ rtc_library("audio") { "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", "../rtc_base/experiments:field_trial_parser", + "../rtc_base/synchronization:mutex", + "../rtc_base/synchronization:sequence_checker", + "../rtc_base/task_utils:to_queued_task", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", "utility:audio_frame_operations", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", ] @@ -118,6 +133,8 @@ if (rtc_include_tests) { "audio_send_stream_tests.cc", "audio_send_stream_unittest.cc", "audio_state_unittest.cc", + "channel_receive_frame_transformer_delegate_unittest.cc", + "channel_send_frame_transformer_delegate_unittest.cc", "mock_voe_channel_proxy.h", "remix_resample_unittest.cc", "test/audio_stats_test.cc", @@ -163,6 +180,9 @@ if (rtc_include_tests) { "../system_wrappers", "../test:audio_codec_mocks", "../test:field_trial", + "../test:mock_frame_transformer", + "../test:mock_transformable_frame", + "../test:mock_transport", "../test:rtp_test_utils", "../test:test_common", "../test:test_support", @@ -188,6 +208,7 @@ if (rtc_include_tests) { "../api:network_emulation_manager_api", "../api:peer_connection_quality_test_fixture_api", "../api:simulated_network_api", + "../api:time_controller", "../call:simulated_network", "../common_audio", "../system_wrappers", @@ -203,7 +224,6 @@ if (rtc_include_tests) { if (is_android) { deps += [ "//testing/android/native_test:native_test_native_code" ] } - data = [ "../resources/voice_engine/audio_tiny16.wav", "../resources/voice_engine/audio_tiny48.wav", @@ -213,20 +233,30 @@ if (rtc_include_tests) { group("low_bandwidth_audio_perf_test") { testonly = true - deps = [ ":low_bandwidth_audio_test" ] + deps = [ + ":low_bandwidth_audio_test", + "//third_party/catapult/tracing/tracing/proto:histogram_proto", + "//third_party/protobuf:py_proto_runtime", + ] data = [ "test/low_bandwidth_audio_test.py", "../resources/voice_engine/audio_tiny16.wav", "../resources/voice_engine/audio_tiny48.wav", + "${root_out_dir}/pyproto/tracing/tracing/proto/histogram_pb2.py", ] + + # TODO(http://crbug.com/1029452): Create a cleaner target with just the + # tracing python code. We don't need Polymer for instance. + data_deps = [ "//third_party/catapult/tracing:convert_chart_json" ] + if (is_win) { data += [ "${root_out_dir}/low_bandwidth_audio_test.exe" ] } else { data += [ "${root_out_dir}/low_bandwidth_audio_test" ] } - if (is_linux || is_android) { + if (is_linux || is_chromeos || is_android) { data += [ "../tools_webrtc/audio_quality/linux/PolqaOem64", "../tools_webrtc/audio_quality/linux/pesq", diff --git a/audio/DEPS b/audio/DEPS index 8bb1f80805..16f8194022 100644 --- a/audio/DEPS +++ b/audio/DEPS @@ -2,6 +2,7 @@ include_rules = [ "+call", "+common_audio", "+logging/rtc_event_log", + "+modules/async_audio_processing", "+modules/audio_coding", "+modules/audio_device", "+modules/audio_mixer", diff --git a/audio/OWNERS b/audio/OWNERS index 1a4efa65c9..c0255e4d5f 100644 --- a/audio/OWNERS +++ b/audio/OWNERS @@ -1,10 +1,3 @@ -solenberg@webrtc.org -ossu@webrtc.org gustaf@webrtc.org peah@webrtc.org saza@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/audio/audio_level.cc b/audio/audio_level.cc index 06702b4c0d..7874b73f1c 100644 --- a/audio/audio_level.cc +++ b/audio/audio_level.cc @@ -22,7 +22,7 @@ AudioLevel::AudioLevel() AudioLevel::~AudioLevel() {} void AudioLevel::Reset() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); abs_max_ = 0; count_ = 0; current_level_full_range_ = 0; @@ -31,24 +31,24 @@ void AudioLevel::Reset() { } int16_t AudioLevel::LevelFullRange() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return current_level_full_range_; } void AudioLevel::ResetLevelFullRange() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); abs_max_ = 0; count_ = 0; current_level_full_range_ = 0; } double AudioLevel::TotalEnergy() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return total_energy_; } double AudioLevel::TotalDuration() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return total_duration_; } @@ -63,7 +63,7 @@ void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) { // Protect member access using a lock since this method is called on a // dedicated audio thread in the RecordedDataIsAvailable() callback. - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); if (abs_value > abs_max_) abs_max_ = abs_value; diff --git a/audio/audio_level.h b/audio/audio_level.h index 430edb1703..acd1231fe2 100644 --- a/audio/audio_level.h +++ b/audio/audio_level.h @@ -11,7 +11,7 @@ #ifndef AUDIO_AUDIO_LEVEL_H_ #define AUDIO_AUDIO_LEVEL_H_ -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -59,14 +59,14 @@ class AudioLevel { private: enum { kUpdateFrequency = 10 }; - rtc::CriticalSection crit_sect_; + mutable Mutex mutex_; - int16_t abs_max_ RTC_GUARDED_BY(crit_sect_); - int16_t count_ RTC_GUARDED_BY(crit_sect_); - int16_t current_level_full_range_ RTC_GUARDED_BY(crit_sect_); + int16_t abs_max_ RTC_GUARDED_BY(mutex_); + int16_t count_ RTC_GUARDED_BY(mutex_); + int16_t current_level_full_range_ RTC_GUARDED_BY(mutex_); - double total_energy_ RTC_GUARDED_BY(crit_sect_) = 0.0; - double total_duration_ RTC_GUARDED_BY(crit_sect_) = 0.0; + double total_energy_ RTC_GUARDED_BY(mutex_) = 0.0; + double total_duration_ RTC_GUARDED_BY(mutex_) = 0.0; }; } // namespace voe diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc index 2e02388531..6c8e94269d 100644 --- a/audio/audio_receive_stream.cc +++ b/audio/audio_receive_stream.cc @@ -82,7 +82,8 @@ std::unique_ptr CreateChannelReceive( config.jitter_buffer_max_packets, config.jitter_buffer_fast_accelerate, config.jitter_buffer_min_delay_ms, config.jitter_buffer_enable_rtx_handling, config.decoder_factory, - config.codec_pair_id, config.frame_decryptor, config.crypto_options); + config.codec_pair_id, config.frame_decryptor, config.crypto_options, + std::move(config.frame_transformer)); } } // namespace @@ -172,7 +173,8 @@ void AudioReceiveStream::Stop() { audio_state()->RemoveReceivingStream(this); } -webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const { +webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( + bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); webrtc::AudioReceiveStream::Stats stats; stats.remote_ssrc = config_.rtp.remote_ssrc; @@ -209,7 +211,7 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const { rtc::TimeMillis()); // Get jitter buffer and total delay (alg + jitter + playout) stats. - auto ns = channel_receive_->GetNetworkStatistics(); + auto ns = channel_receive_->GetNetworkStatistics(get_and_clear_legacy_stats); stats.fec_packets_received = ns.fecPacketsReceived; stats.fec_packets_discarded = ns.fecPacketsDiscarded; stats.jitter_buffer_ms = ns.currentBufferSize; @@ -222,6 +224,9 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const { static_cast(ns.jitterBufferDelayMs) / static_cast(rtc::kNumMillisecsPerSec); stats.jitter_buffer_emitted_count = ns.jitterBufferEmittedCount; + stats.jitter_buffer_target_delay_seconds = + static_cast(ns.jitterBufferTargetDelayMs) / + static_cast(rtc::kNumMillisecsPerSec); stats.inserted_samples_for_deceleration = ns.insertedSamplesForDeceleration; stats.removed_samples_for_acceleration = ns.removedSamplesForAcceleration; stats.expand_rate = Q14ToFloat(ns.currentExpandRate); @@ -276,6 +281,12 @@ std::vector AudioReceiveStream::GetSources() const { return source_tracker_.GetSources(); } +#ifndef DISABLE_RECORDER +void AudioReceiveStream::InjectRecorder(Recorder* recorder) { + channel_receive_->InjectRecorder(recorder); +} +#endif + AudioMixer::Source::AudioFrameInfo AudioReceiveStream::GetAudioFrameWithInfo( int sample_rate_hz, AudioFrame* audio_frame) { @@ -295,7 +306,7 @@ int AudioReceiveStream::PreferredSampleRate() const { return channel_receive_->PreferredSampleRate(); } -int AudioReceiveStream::id() const { +uint32_t AudioReceiveStream::id() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return config_.rtp.remote_ssrc; } @@ -325,7 +336,7 @@ void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs( time_ms); } -void AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { +bool AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK_RUN_ON(&module_process_thread_checker_); return channel_receive_->SetMinimumPlayoutDelay(delay_ms); } @@ -345,14 +356,6 @@ void AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { channel_receive_->ReceivedRTCPPacket(packet, length); } -void AudioReceiveStream::OnRtpPacket(const RtpPacketReceived& packet) { - // TODO(solenberg): Tests call this function on a network thread, libjingle - // calls on the worker thread. We should move towards always using a network - // thread. Then this check can be enabled. - // RTC_DCHECK(!thread_checker_.IsCurrent()); - channel_receive_->OnRtpPacket(packet); -} - const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return config_; @@ -406,6 +409,12 @@ void AudioReceiveStream::ConfigureStream(AudioReceiveStream* stream, channel_receive->SetReceiveCodecs(new_config.decoder_map); } + if (first_time || + old_config.frame_transformer != new_config.frame_transformer) { + channel_receive->SetDepacketizerToDecoderFrameTransformer( + new_config.frame_transformer); + } + stream->config_ = new_config; } } // namespace internal diff --git a/audio/audio_receive_stream.h b/audio/audio_receive_stream.h index 24dcbf247d..b6e189ee62 100644 --- a/audio/audio_receive_stream.h +++ b/audio/audio_receive_stream.h @@ -21,7 +21,6 @@ #include "call/audio_receive_stream.h" #include "call/syncable.h" #include "modules/rtp_rtcp/source/source_tracker.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" @@ -61,24 +60,28 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, const rtc::scoped_refptr& audio_state, webrtc::RtcEventLog* event_log, std::unique_ptr channel_receive); + + AudioReceiveStream() = delete; + AudioReceiveStream(const AudioReceiveStream&) = delete; + AudioReceiveStream& operator=(const AudioReceiveStream&) = delete; + ~AudioReceiveStream() override; // webrtc::AudioReceiveStream implementation. void Reconfigure(const webrtc::AudioReceiveStream::Config& config) override; void Start() override; void Stop() override; - webrtc::AudioReceiveStream::Stats GetStats() const override; + webrtc::AudioReceiveStream::Stats GetStats( + bool get_and_clear_legacy_stats) const override; void SetSink(AudioSinkInterface* sink) override; void SetGain(float gain) override; bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override; int GetBaseMinimumPlayoutDelayMs() const override; std::vector GetSources() const override; - // TODO(nisse): We don't formally implement RtpPacketSinkInterface, and this - // method shouldn't be needed. But it's currently used by the - // AudioReceiveStreamTest.ReceiveRtpPacket unittest. Figure out if that test - // shuld be refactored or deleted, and then delete this method. - void OnRtpPacket(const RtpPacketReceived& packet); +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif // AudioMixer::Source AudioFrameInfo GetAudioFrameWithInfo(int sample_rate_hz, @@ -87,13 +90,13 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, int PreferredSampleRate() const override; // Syncable - int id() const override; + uint32_t id() const override; absl::optional GetInfo() const override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) override; - void SetMinimumPlayoutDelay(int delay_ms) override; + bool SetMinimumPlayoutDelay(int delay_ms) override; void AssociateSendStream(AudioSendStream* send_stream); void DeliverRtcp(const uint8_t* packet, size_t length); @@ -118,8 +121,6 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false; std::unique_ptr rtp_stream_receiver_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioReceiveStream); }; } // namespace internal } // namespace webrtc diff --git a/audio/audio_receive_stream_unittest.cc b/audio/audio_receive_stream_unittest.cc index b8eff0a443..f0f150a0b5 100644 --- a/audio/audio_receive_stream_unittest.cc +++ b/audio/audio_receive_stream_unittest.cc @@ -53,8 +53,6 @@ AudioDecodingCallStats MakeAudioDecodeStatsForTest() { const uint32_t kRemoteSsrc = 1234; const uint32_t kLocalSsrc = 5678; -const size_t kOneByteExtensionHeaderLength = 4; -const size_t kOneByteExtensionLength = 4; const int kAudioLevelId = 3; const int kTransportSequenceNumberId = 4; const int kJitterBufferDelay = -7; @@ -69,21 +67,26 @@ const std::pair kReceiveCodec = { 123, {"codec_name_recv", 96000, 0}}; const NetworkStatistics kNetworkStats = { - 123, 456, false, 789012, 3456, 123, 456, 789, 543, 432, - 321, 123, 101, 0, {}, 789, 12, 345, 678, 901, - 0, -1, -1, -1, -1, 0, 0, 0, 0}; + 123, 456, false, 789012, 3456, 123, 456, 789, 543, 123, 432, 321, 123, + 101, 789, 12, 345, 678, 901, 0, -1, -1, 0, 0, 0, 0}; const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest(); struct ConfigHelper { - ConfigHelper() : ConfigHelper(new rtc::RefCountedObject()) {} + explicit ConfigHelper(bool use_null_audio_processing) + : ConfigHelper(new rtc::RefCountedObject(), + use_null_audio_processing) {} - explicit ConfigHelper(rtc::scoped_refptr audio_mixer) + ConfigHelper(rtc::scoped_refptr audio_mixer, + bool use_null_audio_processing) : audio_mixer_(audio_mixer) { using ::testing::Invoke; AudioState::Config config; config.audio_mixer = audio_mixer_; - config.audio_processing = new rtc::RefCountedObject(); + config.audio_processing = + use_null_audio_processing + ? nullptr + : new rtc::RefCountedObject(); config.audio_device_module = new rtc::RefCountedObject>(); audio_state_ = AudioState::Create(config); @@ -100,6 +103,8 @@ struct ConfigHelper { .WillRepeatedly(Invoke([](const std::map& codecs) { EXPECT_THAT(codecs, ::testing::IsEmpty()); })); + EXPECT_CALL(*channel_receive_, SetDepacketizerToDecoderFrameTransformer(_)) + .Times(1); stream_config_.rtp.local_ssrc = kLocalSsrc; stream_config_.rtp.remote_ssrc = kRemoteSsrc; @@ -140,7 +145,7 @@ struct ConfigHelper { .WillOnce(Return(kTotalOutputEnergy)); EXPECT_CALL(*channel_receive_, GetTotalOutputDuration()) .WillOnce(Return(kTotalOutputDuration)); - EXPECT_CALL(*channel_receive_, GetNetworkStatistics()) + EXPECT_CALL(*channel_receive_, GetNetworkStatistics(_)) .WillOnce(Return(kNetworkStats)); EXPECT_CALL(*channel_receive_, GetDecodingCallStatistics()) .WillOnce(Return(kAudioDecodeStats)); @@ -161,45 +166,6 @@ struct ConfigHelper { MockTransport rtcp_send_transport_; }; -void BuildOneByteExtension(std::vector::iterator it, - int id, - uint32_t extension_value, - size_t value_length) { - const uint16_t kRtpOneByteHeaderExtensionId = 0xBEDE; - ByteWriter::WriteBigEndian(&(*it), kRtpOneByteHeaderExtensionId); - it += 2; - - ByteWriter::WriteBigEndian(&(*it), kOneByteExtensionLength / 4); - it += 2; - const size_t kExtensionDataLength = kOneByteExtensionLength - 1; - uint32_t shifted_value = extension_value - << (8 * (kExtensionDataLength - value_length)); - *it = (id << 4) + (static_cast(value_length) - 1); - ++it; - ByteWriter::WriteBigEndian(&(*it), - shifted_value); -} - -const std::vector CreateRtpHeaderWithOneByteExtension( - int extension_id, - uint32_t extension_value, - size_t value_length) { - std::vector header; - header.resize(webrtc::kRtpHeaderSize + kOneByteExtensionHeaderLength + - kOneByteExtensionLength); - header[0] = 0x80; // Version 2. - header[0] |= 0x10; // Set extension bit. - header[1] = 100; // Payload type. - header[1] |= 0x80; // Marker bit is set. - ByteWriter::WriteBigEndian(&header[2], 0x1234); // Sequence number. - ByteWriter::WriteBigEndian(&header[4], 0x5678); // Timestamp. - ByteWriter::WriteBigEndian(&header[8], 0x4321); // SSRC. - - BuildOneByteExtension(header.begin() + webrtc::kRtpHeaderSize, extension_id, - extension_value, value_length); - return header; -} - const std::vector CreateRtcpSenderReport() { std::vector packet; const size_t kRtcpSrLength = 28; // In bytes. @@ -228,179 +194,180 @@ TEST(AudioReceiveStreamTest, ConfigToString) { } TEST(AudioReceiveStreamTest, ConstructDestruct) { - ConfigHelper helper; - auto recv_stream = helper.CreateAudioReceiveStream(); -} - -TEST(AudioReceiveStreamTest, ReceiveRtpPacket) { - ConfigHelper helper; - helper.config().rtp.transport_cc = true; - auto recv_stream = helper.CreateAudioReceiveStream(); - const int kTransportSequenceNumberValue = 1234; - std::vector rtp_packet = CreateRtpHeaderWithOneByteExtension( - kTransportSequenceNumberId, kTransportSequenceNumberValue, 2); - constexpr int64_t packet_time_us = 5678000; - - RtpPacketReceived parsed_packet; - ASSERT_TRUE(parsed_packet.Parse(&rtp_packet[0], rtp_packet.size())); - parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000); - - EXPECT_CALL(*helper.channel_receive(), - OnRtpPacket(::testing::Ref(parsed_packet))); - - recv_stream->OnRtpPacket(parsed_packet); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + auto recv_stream = helper.CreateAudioReceiveStream(); + } } TEST(AudioReceiveStreamTest, ReceiveRtcpPacket) { - ConfigHelper helper; - helper.config().rtp.transport_cc = true; - auto recv_stream = helper.CreateAudioReceiveStream(); - std::vector rtcp_packet = CreateRtcpSenderReport(); - EXPECT_CALL(*helper.channel_receive(), - ReceivedRTCPPacket(&rtcp_packet[0], rtcp_packet.size())) - .WillOnce(Return()); - recv_stream->DeliverRtcp(&rtcp_packet[0], rtcp_packet.size()); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + helper.config().rtp.transport_cc = true; + auto recv_stream = helper.CreateAudioReceiveStream(); + std::vector rtcp_packet = CreateRtcpSenderReport(); + EXPECT_CALL(*helper.channel_receive(), + ReceivedRTCPPacket(&rtcp_packet[0], rtcp_packet.size())) + .WillOnce(Return()); + recv_stream->DeliverRtcp(&rtcp_packet[0], rtcp_packet.size()); + } } TEST(AudioReceiveStreamTest, GetStats) { - ConfigHelper helper; - auto recv_stream = helper.CreateAudioReceiveStream(); - helper.SetupMockForGetStats(); - AudioReceiveStream::Stats stats = recv_stream->GetStats(); - EXPECT_EQ(kRemoteSsrc, stats.remote_ssrc); - EXPECT_EQ(kCallStats.payload_bytes_rcvd, stats.payload_bytes_rcvd); - EXPECT_EQ(kCallStats.header_and_padding_bytes_rcvd, - stats.header_and_padding_bytes_rcvd); - EXPECT_EQ(static_cast(kCallStats.packetsReceived), - stats.packets_rcvd); - EXPECT_EQ(kCallStats.cumulativeLost, stats.packets_lost); - EXPECT_EQ(kReceiveCodec.second.name, stats.codec_name); - EXPECT_EQ( - kCallStats.jitterSamples / (kReceiveCodec.second.clockrate_hz / 1000), - stats.jitter_ms); - EXPECT_EQ(kNetworkStats.currentBufferSize, stats.jitter_buffer_ms); - EXPECT_EQ(kNetworkStats.preferredBufferSize, - stats.jitter_buffer_preferred_ms); - EXPECT_EQ(static_cast(kJitterBufferDelay + kPlayoutBufferDelay), - stats.delay_estimate_ms); - EXPECT_EQ(static_cast(kSpeechOutputLevel), stats.audio_level); - EXPECT_EQ(kTotalOutputEnergy, stats.total_output_energy); - EXPECT_EQ(kNetworkStats.totalSamplesReceived, stats.total_samples_received); - EXPECT_EQ(kTotalOutputDuration, stats.total_output_duration); - EXPECT_EQ(kNetworkStats.concealedSamples, stats.concealed_samples); - EXPECT_EQ(kNetworkStats.concealmentEvents, stats.concealment_events); - EXPECT_EQ(static_cast(kNetworkStats.jitterBufferDelayMs) / - static_cast(rtc::kNumMillisecsPerSec), - stats.jitter_buffer_delay_seconds); - EXPECT_EQ(kNetworkStats.jitterBufferEmittedCount, - stats.jitter_buffer_emitted_count); - EXPECT_EQ(Q14ToFloat(kNetworkStats.currentExpandRate), stats.expand_rate); - EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSpeechExpandRate), - stats.speech_expand_rate); - EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSecondaryDecodedRate), - stats.secondary_decoded_rate); - EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSecondaryDiscardedRate), - stats.secondary_discarded_rate); - EXPECT_EQ(Q14ToFloat(kNetworkStats.currentAccelerateRate), - stats.accelerate_rate); - EXPECT_EQ(Q14ToFloat(kNetworkStats.currentPreemptiveRate), - stats.preemptive_expand_rate); - EXPECT_EQ(kAudioDecodeStats.calls_to_silence_generator, - stats.decoding_calls_to_silence_generator); - EXPECT_EQ(kAudioDecodeStats.calls_to_neteq, stats.decoding_calls_to_neteq); - EXPECT_EQ(kAudioDecodeStats.decoded_normal, stats.decoding_normal); - EXPECT_EQ(kAudioDecodeStats.decoded_neteq_plc, stats.decoding_plc); - EXPECT_EQ(kAudioDecodeStats.decoded_codec_plc, stats.decoding_codec_plc); - EXPECT_EQ(kAudioDecodeStats.decoded_cng, stats.decoding_cng); - EXPECT_EQ(kAudioDecodeStats.decoded_plc_cng, stats.decoding_plc_cng); - EXPECT_EQ(kAudioDecodeStats.decoded_muted_output, - stats.decoding_muted_output); - EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_, - stats.capture_start_ntp_time_ms); - EXPECT_EQ(kPlayoutNtpTimestampMs, stats.estimated_playout_ntp_timestamp_ms); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + auto recv_stream = helper.CreateAudioReceiveStream(); + helper.SetupMockForGetStats(); + AudioReceiveStream::Stats stats = + recv_stream->GetStats(/*get_and_clear_legacy_stats=*/true); + EXPECT_EQ(kRemoteSsrc, stats.remote_ssrc); + EXPECT_EQ(kCallStats.payload_bytes_rcvd, stats.payload_bytes_rcvd); + EXPECT_EQ(kCallStats.header_and_padding_bytes_rcvd, + stats.header_and_padding_bytes_rcvd); + EXPECT_EQ(static_cast(kCallStats.packetsReceived), + stats.packets_rcvd); + EXPECT_EQ(kCallStats.cumulativeLost, stats.packets_lost); + EXPECT_EQ(kReceiveCodec.second.name, stats.codec_name); + EXPECT_EQ( + kCallStats.jitterSamples / (kReceiveCodec.second.clockrate_hz / 1000), + stats.jitter_ms); + EXPECT_EQ(kNetworkStats.currentBufferSize, stats.jitter_buffer_ms); + EXPECT_EQ(kNetworkStats.preferredBufferSize, + stats.jitter_buffer_preferred_ms); + EXPECT_EQ(static_cast(kJitterBufferDelay + kPlayoutBufferDelay), + stats.delay_estimate_ms); + EXPECT_EQ(static_cast(kSpeechOutputLevel), stats.audio_level); + EXPECT_EQ(kTotalOutputEnergy, stats.total_output_energy); + EXPECT_EQ(kNetworkStats.totalSamplesReceived, stats.total_samples_received); + EXPECT_EQ(kTotalOutputDuration, stats.total_output_duration); + EXPECT_EQ(kNetworkStats.concealedSamples, stats.concealed_samples); + EXPECT_EQ(kNetworkStats.concealmentEvents, stats.concealment_events); + EXPECT_EQ(static_cast(kNetworkStats.jitterBufferDelayMs) / + static_cast(rtc::kNumMillisecsPerSec), + stats.jitter_buffer_delay_seconds); + EXPECT_EQ(kNetworkStats.jitterBufferEmittedCount, + stats.jitter_buffer_emitted_count); + EXPECT_EQ(static_cast(kNetworkStats.jitterBufferTargetDelayMs) / + static_cast(rtc::kNumMillisecsPerSec), + stats.jitter_buffer_target_delay_seconds); + EXPECT_EQ(Q14ToFloat(kNetworkStats.currentExpandRate), stats.expand_rate); + EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSpeechExpandRate), + stats.speech_expand_rate); + EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSecondaryDecodedRate), + stats.secondary_decoded_rate); + EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSecondaryDiscardedRate), + stats.secondary_discarded_rate); + EXPECT_EQ(Q14ToFloat(kNetworkStats.currentAccelerateRate), + stats.accelerate_rate); + EXPECT_EQ(Q14ToFloat(kNetworkStats.currentPreemptiveRate), + stats.preemptive_expand_rate); + EXPECT_EQ(kAudioDecodeStats.calls_to_silence_generator, + stats.decoding_calls_to_silence_generator); + EXPECT_EQ(kAudioDecodeStats.calls_to_neteq, stats.decoding_calls_to_neteq); + EXPECT_EQ(kAudioDecodeStats.decoded_normal, stats.decoding_normal); + EXPECT_EQ(kAudioDecodeStats.decoded_neteq_plc, stats.decoding_plc); + EXPECT_EQ(kAudioDecodeStats.decoded_codec_plc, stats.decoding_codec_plc); + EXPECT_EQ(kAudioDecodeStats.decoded_cng, stats.decoding_cng); + EXPECT_EQ(kAudioDecodeStats.decoded_plc_cng, stats.decoding_plc_cng); + EXPECT_EQ(kAudioDecodeStats.decoded_muted_output, + stats.decoding_muted_output); + EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_, + stats.capture_start_ntp_time_ms); + EXPECT_EQ(kPlayoutNtpTimestampMs, stats.estimated_playout_ntp_timestamp_ms); + } } TEST(AudioReceiveStreamTest, SetGain) { - ConfigHelper helper; - auto recv_stream = helper.CreateAudioReceiveStream(); - EXPECT_CALL(*helper.channel_receive(), - SetChannelOutputVolumeScaling(FloatEq(0.765f))); - recv_stream->SetGain(0.765f); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + auto recv_stream = helper.CreateAudioReceiveStream(); + EXPECT_CALL(*helper.channel_receive(), + SetChannelOutputVolumeScaling(FloatEq(0.765f))); + recv_stream->SetGain(0.765f); + } } TEST(AudioReceiveStreamTest, StreamsShouldBeAddedToMixerOnceOnStart) { - ConfigHelper helper1; - ConfigHelper helper2(helper1.audio_mixer()); - auto recv_stream1 = helper1.CreateAudioReceiveStream(); - auto recv_stream2 = helper2.CreateAudioReceiveStream(); - - EXPECT_CALL(*helper1.channel_receive(), StartPlayout()).Times(1); - EXPECT_CALL(*helper2.channel_receive(), StartPlayout()).Times(1); - EXPECT_CALL(*helper1.channel_receive(), StopPlayout()).Times(1); - EXPECT_CALL(*helper2.channel_receive(), StopPlayout()).Times(1); - EXPECT_CALL(*helper1.audio_mixer(), AddSource(recv_stream1.get())) - .WillOnce(Return(true)); - EXPECT_CALL(*helper1.audio_mixer(), AddSource(recv_stream2.get())) - .WillOnce(Return(true)); - EXPECT_CALL(*helper1.audio_mixer(), RemoveSource(recv_stream1.get())) - .Times(1); - EXPECT_CALL(*helper1.audio_mixer(), RemoveSource(recv_stream2.get())) - .Times(1); - - recv_stream1->Start(); - recv_stream2->Start(); - - // One more should not result in any more mixer sources added. - recv_stream1->Start(); - - // Stop stream before it is being destructed. - recv_stream2->Stop(); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper1(use_null_audio_processing); + ConfigHelper helper2(helper1.audio_mixer(), use_null_audio_processing); + auto recv_stream1 = helper1.CreateAudioReceiveStream(); + auto recv_stream2 = helper2.CreateAudioReceiveStream(); + + EXPECT_CALL(*helper1.channel_receive(), StartPlayout()).Times(1); + EXPECT_CALL(*helper2.channel_receive(), StartPlayout()).Times(1); + EXPECT_CALL(*helper1.channel_receive(), StopPlayout()).Times(1); + EXPECT_CALL(*helper2.channel_receive(), StopPlayout()).Times(1); + EXPECT_CALL(*helper1.audio_mixer(), AddSource(recv_stream1.get())) + .WillOnce(Return(true)); + EXPECT_CALL(*helper1.audio_mixer(), AddSource(recv_stream2.get())) + .WillOnce(Return(true)); + EXPECT_CALL(*helper1.audio_mixer(), RemoveSource(recv_stream1.get())) + .Times(1); + EXPECT_CALL(*helper1.audio_mixer(), RemoveSource(recv_stream2.get())) + .Times(1); + + recv_stream1->Start(); + recv_stream2->Start(); + + // One more should not result in any more mixer sources added. + recv_stream1->Start(); + + // Stop stream before it is being destructed. + recv_stream2->Stop(); + } } TEST(AudioReceiveStreamTest, ReconfigureWithSameConfig) { - ConfigHelper helper; - auto recv_stream = helper.CreateAudioReceiveStream(); - recv_stream->Reconfigure(helper.config()); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + auto recv_stream = helper.CreateAudioReceiveStream(); + recv_stream->Reconfigure(helper.config()); + } } TEST(AudioReceiveStreamTest, ReconfigureWithUpdatedConfig) { - ConfigHelper helper; - auto recv_stream = helper.CreateAudioReceiveStream(); - - auto new_config = helper.config(); - new_config.rtp.nack.rtp_history_ms = 300 + 20; - new_config.rtp.extensions.clear(); - new_config.rtp.extensions.push_back( - RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1)); - new_config.rtp.extensions.push_back( - RtpExtension(RtpExtension::kTransportSequenceNumberUri, - kTransportSequenceNumberId + 1)); - new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1)); - - MockChannelReceive& channel_receive = *helper.channel_receive(); - EXPECT_CALL(channel_receive, SetNACKStatus(true, 15 + 1)).Times(1); - EXPECT_CALL(channel_receive, SetReceiveCodecs(new_config.decoder_map)); - - recv_stream->Reconfigure(new_config); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + auto recv_stream = helper.CreateAudioReceiveStream(); + + auto new_config = helper.config(); + new_config.rtp.nack.rtp_history_ms = 300 + 20; + new_config.rtp.extensions.clear(); + new_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelId + 1)); + new_config.rtp.extensions.push_back( + RtpExtension(RtpExtension::kTransportSequenceNumberUri, + kTransportSequenceNumberId + 1)); + new_config.decoder_map.emplace(1, SdpAudioFormat("foo", 8000, 1)); + + MockChannelReceive& channel_receive = *helper.channel_receive(); + EXPECT_CALL(channel_receive, SetNACKStatus(true, 15 + 1)).Times(1); + EXPECT_CALL(channel_receive, SetReceiveCodecs(new_config.decoder_map)); + + recv_stream->Reconfigure(new_config); + } } TEST(AudioReceiveStreamTest, ReconfigureWithFrameDecryptor) { - ConfigHelper helper; - auto recv_stream = helper.CreateAudioReceiveStream(); - - auto new_config_0 = helper.config(); - rtc::scoped_refptr mock_frame_decryptor_0( - new rtc::RefCountedObject()); - new_config_0.frame_decryptor = mock_frame_decryptor_0; - - recv_stream->Reconfigure(new_config_0); - - auto new_config_1 = helper.config(); - rtc::scoped_refptr mock_frame_decryptor_1( - new rtc::RefCountedObject()); - new_config_1.frame_decryptor = mock_frame_decryptor_1; - new_config_1.crypto_options.sframe.require_frame_encryption = true; - recv_stream->Reconfigure(new_config_1); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(use_null_audio_processing); + auto recv_stream = helper.CreateAudioReceiveStream(); + + auto new_config_0 = helper.config(); + rtc::scoped_refptr mock_frame_decryptor_0( + new rtc::RefCountedObject()); + new_config_0.frame_decryptor = mock_frame_decryptor_0; + + recv_stream->Reconfigure(new_config_0); + + auto new_config_1 = helper.config(); + rtc::scoped_refptr mock_frame_decryptor_1( + new rtc::RefCountedObject()); + new_config_1.frame_decryptor = mock_frame_decryptor_1; + new_config_1.crypto_options.sframe.require_frame_encryption = true; + recv_stream->Reconfigure(new_config_1); + } } } // namespace test diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index 96dcf75875..32beb8fcaa 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -31,6 +31,7 @@ #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" +#include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "rtc_base/checks.h" @@ -114,20 +115,21 @@ AudioSendStream::AudioSendStream( rtp_transport, bitrate_allocator, event_log, - rtcp_rtt_stats, suspended_rtp_state, - voe::CreateChannelSend(clock, - task_queue_factory, - module_process_thread, - /*overhead_observer=*/this, - config.send_transport, - rtcp_rtt_stats, - event_log, - config.frame_encryptor, - config.crypto_options, - config.rtp.extmap_allow_mixed, - config.rtcp_report_interval_ms, - config.rtp.ssrc)) {} + voe::CreateChannelSend( + clock, + task_queue_factory, + module_process_thread, + config.send_transport, + rtcp_rtt_stats, + event_log, + config.frame_encryptor, + config.crypto_options, + config.rtp.extmap_allow_mixed, + config.rtcp_report_interval_ms, + config.rtp.ssrc, + config.frame_transformer, + rtp_transport->transport_feedback_observer())) {} AudioSendStream::AudioSendStream( Clock* clock, @@ -137,7 +139,6 @@ AudioSendStream::AudioSendStream( RtpTransportControllerSendInterface* rtp_transport, BitrateAllocatorInterface* bitrate_allocator, RtcEventLog* event_log, - RtcpRttStats* rtcp_rtt_stats, const absl::optional& suspended_rtp_state, std::unique_ptr channel_send) : clock_(clock), @@ -148,7 +149,7 @@ AudioSendStream::AudioSendStream( enable_audio_alr_probing_( !field_trial::IsDisabled("WebRTC-Audio-AlrProbing")), send_side_bwe_with_overhead_( - field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")), + !field_trial::IsDisabled("WebRTC-SendSideBwe-WithOverhead")), config_(Config(/*send_transport=*/nullptr)), audio_state_(audio_state), channel_send_(std::move(channel_send)), @@ -212,6 +213,8 @@ AudioSendStream::ExtensionIds AudioSendStream::FindExtensionIds( ids.rid = extension.id; } else if (extension.uri == RtpExtension::kRepairedRidUri) { ids.repaired_rid = extension.id; + } else if (extension.uri == RtpExtension::kAbsoluteCaptureTimeUri) { + ids.abs_capture_time = extension.id; } } return ids; @@ -247,6 +250,12 @@ void AudioSendStream::ConfigureStream( channel_send_->SetFrameEncryptor(new_config.frame_encryptor); } + if (first_time || + new_config.frame_transformer != old_config.frame_transformer) { + channel_send_->SetEncoderToPacketizerFrameTransformer( + new_config.frame_transformer); + } + if (first_time || new_config.rtp.extmap_allow_mixed != old_config.rtp.extmap_allow_mixed) { rtp_rtcp_module_->SetExtmapAllowMixed(new_config.rtp.extmap_allow_mixed); @@ -323,10 +332,36 @@ void AudioSendStream::ConfigureStream( rtp_rtcp_module_->SetRid(new_config.rtp.rid); } + if (first_time || new_ids.abs_capture_time != old_ids.abs_capture_time) { + rtp_rtcp_module_->DeregisterSendRtpHeaderExtension( + kRtpExtensionAbsoluteCaptureTime); + if (new_ids.abs_capture_time) { + rtp_rtcp_module_->RegisterRtpHeaderExtension( + AbsoluteCaptureTimeExtension::kUri, new_ids.abs_capture_time); + } + } + if (!ReconfigureSendCodec(new_config)) { RTC_LOG(LS_ERROR) << "Failed to set up send codec state."; } + // Set currently known overhead (used in ANA, opus only). + { + MutexLock lock(&overhead_per_packet_lock_); + UpdateOverheadForEncoder(); + } + + channel_send_->CallEncoder([this](AudioEncoder* encoder) { + if (!encoder) { + return; + } + worker_queue_->PostTask( + [this, length_range = encoder->GetFrameLengthRange()] { + RTC_DCHECK_RUN_ON(worker_queue_); + frame_length_range_ = length_range; + }); + }); + if (sending_) { ReconfigureBitrateObserver(new_config); } @@ -387,7 +422,7 @@ void AudioSendStream::SendAudioData(std::unique_ptr audio_frame) { // TODO(https://crbug.com/webrtc/10771): All "media-source" related stats // should move from send-streams to the local audio sources or tracks; a // send-stream should not be required to read the microphone audio levels. - rtc::CritScope cs(&audio_level_lock_); + MutexLock lock(&audio_level_lock_); audio_level_.ComputeLevel(*audio_frame, duration); } channel_send_->ProcessAndEncodeAudio(std::move(audio_frame)); @@ -453,7 +488,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( } { - rtc::CritScope cs(&audio_level_lock_); + MutexLock lock(&audio_level_lock_); stats.audio_level = audio_level_.LevelFullRange(); stats.total_input_energy = audio_level_.TotalEnergy(); stats.total_input_duration = audio_level_.TotalDuration(); @@ -461,30 +496,43 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( stats.typing_noise_detected = audio_state()->typing_noise_detected(); stats.ana_statistics = channel_send_->GetANAStatistics(); - RTC_DCHECK(audio_state_->audio_processing()); - stats.apm_statistics = - audio_state_->audio_processing()->GetStatistics(has_remote_tracks); + + AudioProcessing* ap = audio_state_->audio_processing(); + if (ap) { + stats.apm_statistics = ap->GetStatistics(has_remote_tracks); + } stats.report_block_datas = std::move(call_stats.report_block_datas); return stats; } +#ifndef DISABLE_RECORDER +void AudioSendStream::InjectRecorder(Recorder* recorder) { + channel_send_->InjectRecorder(recorder); +} +#endif + void AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - // TODO(solenberg): Tests call this function on a network thread, libjingle - // calls on the worker thread. We should move towards always using a network - // thread. Then this check can be enabled. - // RTC_DCHECK(!worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_send_->ReceivedRTCPPacket(packet, length); + worker_queue_->PostTask([&]() { + // Poll if overhead has changed, which it can do if ack triggers us to stop + // sending mid/rid. + MutexLock lock(&overhead_per_packet_lock_); + UpdateOverheadForEncoder(); + }); } uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { RTC_DCHECK_RUN_ON(worker_queue_); + // Pick a target bitrate between the constraints. Overrules the allocator if // it 1) allocated a bitrate of zero to disable the stream or 2) allocated a // higher than max to allow for e.g. extra FEC. auto constraints = GetMinMaxBitrateConstraints(); update.target_bitrate.Clamp(constraints.min, constraints.max); + update.stable_target_bitrate.Clamp(constraints.min, constraints.max); channel_send_->OnBitrateAllocation(update); @@ -496,27 +544,22 @@ uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { void AudioSendStream::SetTransportOverhead( int transport_overhead_per_packet_bytes) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes; UpdateOverheadForEncoder(); } -void AudioSendStream::OnOverheadChanged( - size_t overhead_bytes_per_packet_bytes) { - rtc::CritScope cs(&overhead_per_packet_lock_); - audio_overhead_per_packet_bytes_ = overhead_bytes_per_packet_bytes; - UpdateOverheadForEncoder(); -} - void AudioSendStream::UpdateOverheadForEncoder() { - const size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes(); - if (overhead_per_packet_bytes == 0) { - return; // Overhead is not known yet, do not tell the encoder. + size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes(); + if (overhead_per_packet_ == overhead_per_packet_bytes) { + return; } + overhead_per_packet_ = overhead_per_packet_bytes; + channel_send_->CallEncoder([&](AudioEncoder* encoder) { encoder->OnReceivedOverhead(overhead_per_packet_bytes); }); - worker_queue_->PostTask([this, overhead_per_packet_bytes] { + auto update_task = [this, overhead_per_packet_bytes] { RTC_DCHECK_RUN_ON(worker_queue_); if (total_packet_overhead_bytes_ != overhead_per_packet_bytes) { total_packet_overhead_bytes_ = overhead_per_packet_bytes; @@ -524,17 +567,22 @@ void AudioSendStream::UpdateOverheadForEncoder() { ConfigureBitrateObserver(); } } - }); + }; + if (worker_queue_->IsCurrent()) { + update_task(); + } else { + worker_queue_->PostTask(update_task); + } } size_t AudioSendStream::TestOnlyGetPerPacketOverheadBytes() const { - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); return GetPerPacketOverheadBytes(); } size_t AudioSendStream::GetPerPacketOverheadBytes() const { return transport_overhead_per_packet_bytes_ + - audio_overhead_per_packet_bytes_; + rtp_rtcp_module_->ExpectedPerPacketOverhead(); } RtpState AudioSendStream::GetRtpState() const { @@ -596,14 +644,15 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { if (new_config.audio_network_adaptor_config) { if (encoder->EnableAudioNetworkAdaptor( *new_config.audio_network_adaptor_config, event_log_)) { - RTC_DLOG(LS_INFO) << "Audio network adaptor enabled on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Audio network adaptor enabled on SSRC " + << new_config.rtp.ssrc; } else { - RTC_NOTREACHED(); + RTC_LOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " + << new_config.rtp.ssrc; } } - // Wrap the encoder in a an AudioEncoderCNG, if VAD is enabled. + // Wrap the encoder in an AudioEncoderCNG, if VAD is enabled. if (spec.cng_payload_type) { AudioEncoderCngConfig cng_config; cng_config.num_channels = encoder->NumChannels(); @@ -616,19 +665,23 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { new_config.send_codec_spec->format.clockrate_hz); } + // Wrap the encoder in a RED encoder, if RED is enabled. + if (spec.red_payload_type) { + AudioEncoderCopyRed::Config red_config; + red_config.payload_type = *spec.red_payload_type; + red_config.speech_encoder = std::move(encoder); + encoder = std::make_unique(std::move(red_config)); + } + // Set currently known overhead (used in ANA, opus only). // If overhead changes later, it will be updated in UpdateOverheadForEncoder. { - rtc::CritScope cs(&overhead_per_packet_lock_); - if (GetPerPacketOverheadBytes() > 0) { - encoder->OnReceivedOverhead(GetPerPacketOverheadBytes()); + MutexLock lock(&overhead_per_packet_lock_); + size_t overhead = GetPerPacketOverheadBytes(); + if (overhead > 0) { + encoder->OnReceivedOverhead(overhead); } } - worker_queue_->PostTask( - [this, length_range = encoder->GetFrameLengthRange()] { - RTC_DCHECK_RUN_ON(worker_queue_); - frame_length_range_ = length_range; - }); StoreEncoderProperties(encoder->SampleRateHz(), encoder->NumChannels()); channel_send_->SetEncoder(new_config.send_codec_spec->payload_type, @@ -678,12 +731,6 @@ bool AudioSendStream::ReconfigureSendCodec(const Config& new_config) { ReconfigureANA(new_config); ReconfigureCNG(new_config); - // Set currently known overhead (used in ANA, opus only). - { - rtc::CritScope cs(&overhead_per_packet_lock_); - UpdateOverheadForEncoder(); - } - return true; } @@ -693,20 +740,29 @@ void AudioSendStream::ReconfigureANA(const Config& new_config) { return; } if (new_config.audio_network_adaptor_config) { + // This lock needs to be acquired before CallEncoder, since it aquires + // another lock and we need to maintain the same order at all call sites to + // avoid deadlock. + MutexLock lock(&overhead_per_packet_lock_); + size_t overhead = GetPerPacketOverheadBytes(); channel_send_->CallEncoder([&](AudioEncoder* encoder) { if (encoder->EnableAudioNetworkAdaptor( *new_config.audio_network_adaptor_config, event_log_)) { - RTC_DLOG(LS_INFO) << "Audio network adaptor enabled on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Audio network adaptor enabled on SSRC " + << new_config.rtp.ssrc; + if (overhead > 0) { + encoder->OnReceivedOverhead(overhead); + } } else { - RTC_NOTREACHED(); + RTC_LOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " + << new_config.rtp.ssrc; } }); } else { channel_send_->CallEncoder( [&](AudioEncoder* encoder) { encoder->DisableAudioNetworkAdaptor(); }); - RTC_DLOG(LS_INFO) << "Audio network adaptor disabled on SSRC " - << new_config.rtp.ssrc; + RTC_LOG(LS_INFO) << "Audio network adaptor disabled on SSRC " + << new_config.rtp.ssrc; } } @@ -760,7 +816,9 @@ void AudioSendStream::ReconfigureBitrateObserver( config_.max_bitrate_bps == new_config.max_bitrate_bps && config_.bitrate_priority == new_config.bitrate_priority && (TransportSeqNumId(config_) == TransportSeqNumId(new_config) || - !audio_send_side_bwe_)) { + !audio_send_side_bwe_) && + config_.audio_network_adaptor_config == + new_config.audio_network_adaptor_config) { return; } @@ -772,7 +830,6 @@ void AudioSendStream::ReconfigureBitrateObserver( rtc::Event thread_sync_event; worker_queue_->PostTask([&] { RTC_DCHECK_RUN_ON(worker_queue_); - registered_with_allocator_ = true; // We may get a callback immediately as the observer is registered, so // make // sure the bitrate limits in config_ are up-to-date. @@ -802,16 +859,16 @@ void AudioSendStream::ConfigureBitrateObserver() { if (use_legacy_overhead_calculation_) { // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) constexpr int kOverheadPerPacket = 20 + 8 + 10 + 12; - const TimeDelta kMinPacketDuration = TimeDelta::ms(20); + const TimeDelta kMinPacketDuration = TimeDelta::Millis(20); DataRate max_overhead = - DataSize::bytes(kOverheadPerPacket) / kMinPacketDuration; + DataSize::Bytes(kOverheadPerPacket) / kMinPacketDuration; priority_bitrate += max_overhead; } else { RTC_DCHECK(frame_length_range_); - const DataSize kOverheadPerPacket = - DataSize::bytes(total_packet_overhead_bytes_); - DataRate max_overhead = kOverheadPerPacket / frame_length_range_->first; - priority_bitrate += max_overhead; + const DataSize overhead_per_packet = + DataSize::Bytes(total_packet_overhead_bytes_); + DataRate min_overhead = overhead_per_packet / frame_length_range_->second; + priority_bitrate += min_overhead; } } if (allocation_settings_.priority_bitrate_raw) @@ -824,6 +881,7 @@ void AudioSendStream::ConfigureBitrateObserver() { priority_bitrate.bps(), true, allocation_settings_.bitrate_priority.value_or( config_.bitrate_priority)}); + registered_with_allocator_ = true; } void AudioSendStream::RemoveBitrateObserver() { @@ -841,8 +899,8 @@ void AudioSendStream::RemoveBitrateObserver() { AudioSendStream::TargetAudioBitrateConstraints AudioSendStream::GetMinMaxBitrateConstraints() const { TargetAudioBitrateConstraints constraints{ - DataRate::bps(config_.min_bitrate_bps), - DataRate::bps(config_.max_bitrate_bps)}; + DataRate::BitsPerSec(config_.min_bitrate_bps), + DataRate::BitsPerSec(config_.max_bitrate_bps)}; // If bitrates were explicitly overriden via field trial, use those values. if (allocation_settings_.min_bitrate) @@ -856,16 +914,16 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { if (send_side_bwe_with_overhead_) { if (use_legacy_overhead_calculation_) { // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) - const DataSize kOverheadPerPacket = DataSize::bytes(20 + 8 + 10 + 12); + const DataSize kOverheadPerPacket = DataSize::Bytes(20 + 8 + 10 + 12); const TimeDelta kMaxFrameLength = - TimeDelta::ms(60); // Based on Opus spec + TimeDelta::Millis(60); // Based on Opus spec const DataRate kMinOverhead = kOverheadPerPacket / kMaxFrameLength; constraints.min += kMinOverhead; constraints.max += kMinOverhead; } else { RTC_DCHECK(frame_length_range_); const DataSize kOverheadPerPacket = - DataSize::bytes(total_packet_overhead_bytes_); + DataSize::Bytes(total_packet_overhead_bytes_); constraints.min += kOverheadPerPacket / frame_length_range_->second; constraints.max += kOverheadPerPacket / frame_length_range_->first; } diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index 6875915ee2..b9fb98f032 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -20,10 +20,10 @@ #include "call/audio_send_stream.h" #include "call/audio_state.h" #include "call/bitrate_allocator.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" -#include "rtc_base/constructor_magic.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" @@ -52,8 +52,7 @@ namespace internal { class AudioState; class AudioSendStream final : public webrtc::AudioSendStream, - public webrtc::BitrateAllocatorObserver, - public webrtc::OverheadObserver { + public webrtc::BitrateAllocatorObserver { public: AudioSendStream(Clock* clock, const webrtc::AudioSendStream::Config& config, @@ -73,9 +72,13 @@ class AudioSendStream final : public webrtc::AudioSendStream, RtpTransportControllerSendInterface* rtp_transport, BitrateAllocatorInterface* bitrate_allocator, RtcEventLog* event_log, - RtcpRttStats* rtcp_rtt_stats, const absl::optional& suspended_rtp_state, std::unique_ptr channel_send); + + AudioSendStream() = delete; + AudioSendStream(const AudioSendStream&) = delete; + AudioSendStream& operator=(const AudioSendStream&) = delete; + ~AudioSendStream() override; // webrtc::AudioSendStream implementation. @@ -93,6 +96,10 @@ class AudioSendStream final : public webrtc::AudioSendStream, webrtc::AudioSendStream::Stats GetStats( bool has_remote_tracks) const override; +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif + void DeliverRtcp(const uint8_t* packet, size_t length); // Implements BitrateAllocatorObserver. @@ -100,10 +107,6 @@ class AudioSendStream final : public webrtc::AudioSendStream, void SetTransportOverhead(int transport_overhead_per_packet_bytes); - // OverheadObserver override reports audio packetization overhead from - // RTP/RTCP module or Media Transport. - void OnOverheadChanged(size_t overhead_bytes_per_packet_bytes) override; - RtpState GetRtpState() const; const voe::ChannelSendInterface* GetChannel() const; @@ -172,16 +175,16 @@ class AudioSendStream final : public webrtc::AudioSendStream, int encoder_sample_rate_hz_ = 0; size_t encoder_num_channels_ = 0; bool sending_ = false; - rtc::CriticalSection audio_level_lock_; + mutable Mutex audio_level_lock_; // Keeps track of audio level, total audio energy and total samples duration. // https://w3c.github.io/webrtc-stats/#dom-rtcaudiohandlerstats-totalaudioenergy - webrtc::voe::AudioLevel audio_level_; + webrtc::voe::AudioLevel audio_level_ RTC_GUARDED_BY(audio_level_lock_); BitrateAllocatorInterface* const bitrate_allocator_ RTC_GUARDED_BY(worker_queue_); RtpTransportControllerSendInterface* const rtp_transport_; - RtpRtcp* const rtp_rtcp_module_; + RtpRtcpInterface* const rtp_rtcp_module_; absl::optional const suspended_rtp_state_; // RFC 5285: Each distinct extension MUST have a unique ID. The value 0 is @@ -190,6 +193,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, struct ExtensionIds { int audio_level = 0; int abs_send_time = 0; + int abs_capture_time = 0; int transport_sequence_number = 0; int mid = 0; int rid = 0; @@ -199,22 +203,17 @@ class AudioSendStream final : public webrtc::AudioSendStream, const std::vector& extensions); static int TransportSeqNumId(const Config& config); - rtc::CriticalSection overhead_per_packet_lock_; + mutable Mutex overhead_per_packet_lock_; + size_t overhead_per_packet_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; // Current transport overhead (ICE, TURN, etc.) size_t transport_overhead_per_packet_bytes_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; - // Current audio packetization overhead (RTP or Media Transport). - size_t audio_overhead_per_packet_bytes_ - RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; - bool registered_with_allocator_ RTC_GUARDED_BY(worker_queue_) = false; size_t total_packet_overhead_bytes_ RTC_GUARDED_BY(worker_queue_) = 0; absl::optional> frame_length_range_ RTC_GUARDED_BY(worker_queue_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioSendStream); }; } // namespace internal } // namespace webrtc diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc index 3b9fbb7f39..bfec59bf92 100644 --- a/audio/audio_send_stream_unittest.cc +++ b/audio/audio_send_stream_unittest.cc @@ -29,7 +29,6 @@ #include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/audio_processing/include/mock_audio_processing.h" #include "modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h" -#include "modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h" #include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" #include "rtc_base/task_queue_for_test.h" #include "system_wrappers/include/clock.h" @@ -46,6 +45,7 @@ using ::testing::_; using ::testing::AnyNumber; using ::testing::Eq; using ::testing::Field; +using ::testing::InSequence; using ::testing::Invoke; using ::testing::Ne; using ::testing::Return; @@ -82,15 +82,18 @@ const AudioCodecSpec kCodecSpecs[] = { // TODO(dklee): This mirrors calculation in audio_send_stream.cc, which // should be made more precise in the future. This can be changed when that // logic is more accurate. -const DataSize kOverheadPerPacket = DataSize::bytes(20 + 8 + 10 + 12); -const TimeDelta kMinFrameLength = TimeDelta::ms(20); -const TimeDelta kMaxFrameLength = TimeDelta::ms(120); +const DataSize kOverheadPerPacket = DataSize::Bytes(20 + 8 + 10 + 12); +const TimeDelta kMinFrameLength = TimeDelta::Millis(20); +const TimeDelta kMaxFrameLength = TimeDelta::Millis(120); const DataRate kMinOverheadRate = kOverheadPerPacket / kMaxFrameLength; const DataRate kMaxOverheadRate = kOverheadPerPacket / kMinFrameLength; class MockLimitObserver : public BitrateAllocator::LimitObserver { public: - MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits)); + MOCK_METHOD(void, + OnAllocationLimitsChanged, + (BitrateAllocationLimits), + (override)); }; std::unique_ptr SetupAudioEncoderMock( @@ -108,7 +111,7 @@ std::unique_ptr SetupAudioEncoderMock( .WillByDefault(Return(spec.format.clockrate_hz)); ON_CALL(*encoder.get(), GetFrameLengthRange()) .WillByDefault(Return(absl::optional>{ - {TimeDelta::ms(20), TimeDelta::ms(120)}})); + {TimeDelta::Millis(20), TimeDelta::Millis(120)}})); return encoder; } } @@ -141,11 +144,16 @@ rtc::scoped_refptr SetupEncoderFactoryMock() { } struct ConfigHelper { - ConfigHelper(bool audio_bwe_enabled, bool expect_set_encoder_call) + ConfigHelper(bool audio_bwe_enabled, + bool expect_set_encoder_call, + bool use_null_audio_processing) : clock_(1000000), task_queue_factory_(CreateDefaultTaskQueueFactory()), stream_config_(/*send_transport=*/nullptr), - audio_processing_(new rtc::RefCountedObject()), + audio_processing_( + use_null_audio_processing + ? nullptr + : new rtc::RefCountedObject()), bitrate_allocator_(&limit_observer_), worker_queue_(task_queue_factory_->CreateTaskQueue( "ConfigHelper_worker_queue", @@ -162,6 +170,7 @@ struct ConfigHelper { SetupDefaultChannelSend(audio_bwe_enabled); SetupMockForSetupSendCodec(expect_set_encoder_call); + SetupMockForCallEncoder(); // Use ISAC as default codec so as to prevent unnecessary |channel_proxy_| // calls from the default ctor behavior. @@ -186,7 +195,7 @@ struct ConfigHelper { new internal::AudioSendStream( Clock::GetRealTimeClock(), stream_config_, audio_state_, task_queue_factory_.get(), &rtp_transport_, &bitrate_allocator_, - &event_log_, &rtcp_rtt_stats_, absl::nullopt, + &event_log_, absl::nullopt, std::unique_ptr(channel_send_))); } @@ -195,7 +204,7 @@ struct ConfigHelper { return *static_cast( stream_config_.encoder_factory.get()); } - MockRtpRtcp* rtp_rtcp() { return &rtp_rtcp_; } + MockRtpRtcpInterface* rtp_rtcp() { return &rtp_rtcp_; } MockChannelSend* channel_send() { return channel_send_; } RtpTransportControllerSendInterface* transport() { return &rtp_transport_; } @@ -214,6 +223,8 @@ struct ConfigHelper { EXPECT_CALL(rtp_rtcp_, SSRC).WillRepeatedly(Return(kSsrc)); EXPECT_CALL(*channel_send_, SetRTCP_CNAME(StrEq(kCName))).Times(1); EXPECT_CALL(*channel_send_, SetFrameEncryptor(_)).Times(1); + EXPECT_CALL(*channel_send_, SetEncoderToPacketizerFrameTransformer(_)) + .Times(1); EXPECT_CALL(rtp_rtcp_, SetExtmapAllowMixed(false)).Times(1); EXPECT_CALL(*channel_send_, SetSendAudioLevelIndicationStatus(true, kAudioLevelId)) @@ -240,12 +251,12 @@ struct ConfigHelper { void SetupMockForSetupSendCodec(bool expect_set_encoder_call) { if (expect_set_encoder_call) { - EXPECT_CALL(*channel_send_, SetEncoderForMock(_, _)) - .WillOnce(Invoke( - [this](int payload_type, std::unique_ptr* encoder) { - this->audio_encoder_ = std::move(*encoder); + EXPECT_CALL(*channel_send_, SetEncoder) + .WillOnce( + [this](int payload_type, std::unique_ptr encoder) { + this->audio_encoder_ = std::move(encoder); return true; - })); + }); } } @@ -270,7 +281,7 @@ struct ConfigHelper { .WillOnce(Return(true)); } - void SetupMockForGetStats() { + void SetupMockForGetStats(bool use_null_audio_processing) { using ::testing::DoAll; using ::testing::SetArgPointee; using ::testing::SetArgReferee; @@ -302,10 +313,13 @@ struct ConfigHelper { audio_processing_stats_.residual_echo_likelihood = kResidualEchoLikelihood; audio_processing_stats_.residual_echo_likelihood_recent_max = kResidualEchoLikelihoodMax; - - EXPECT_CALL(*audio_processing_, GetStatistics(true)) - .WillRepeatedly(Return(audio_processing_stats_)); + if (!use_null_audio_processing) { + ASSERT_TRUE(audio_processing_); + EXPECT_CALL(*audio_processing_, GetStatistics(true)) + .WillRepeatedly(Return(audio_processing_stats_)); + } } + TaskQueueForTest* worker() { return &worker_queue_; } private: @@ -319,8 +333,7 @@ struct ConfigHelper { ::testing::StrictMock bandwidth_observer_; ::testing::NiceMock event_log_; ::testing::NiceMock rtp_transport_; - ::testing::NiceMock rtp_rtcp_; - MockRtcpRttStats rtcp_rtt_stats_; + ::testing::NiceMock rtp_rtcp_; ::testing::NiceMock limit_observer_; BitrateAllocator bitrate_allocator_; // |worker_queue| is defined last to ensure all pending tasks are cancelled @@ -354,11 +367,13 @@ TEST(AudioSendStreamTest, ConfigToString) { config.rtp.c_name = kCName; config.min_bitrate_bps = 12000; config.max_bitrate_bps = 34000; + config.has_dscp = true; config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(kIsacPayloadType, kIsacFormat); config.send_codec_spec->nack_enabled = true; config.send_codec_spec->transport_cc_enabled = false; config.send_codec_spec->cng_payload_type = 42; + config.send_codec_spec->red_payload_type = 43; config.encoder_factory = MockAudioEncoderFactory::CreateUnusedFactory(); config.rtp.extmap_allow_mixed = true; config.rtp.extensions.push_back( @@ -369,242 +384,320 @@ TEST(AudioSendStreamTest, ConfigToString) { "urn:ietf:params:rtp-hdrext:ssrc-audio-level, id: 2}], " "c_name: foo_name}, rtcp_report_interval_ms: 2500, " "send_transport: null, " - "min_bitrate_bps: 12000, max_bitrate_bps: 34000, " + "min_bitrate_bps: 12000, max_bitrate_bps: 34000, has " + "audio_network_adaptor_config: false, has_dscp: true, " "send_codec_spec: {nack_enabled: true, transport_cc_enabled: false, " - "cng_payload_type: 42, payload_type: 103, " + "cng_payload_type: 42, red_payload_type: 43, payload_type: 103, " "format: {name: isac, clockrate_hz: 16000, num_channels: 1, " "parameters: {}}}}", config.ToString()); } TEST(AudioSendStreamTest, ConstructDestruct) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + } } TEST(AudioSendStreamTest, SendTelephoneEvent) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - helper.SetupMockForSendTelephoneEvent(); - EXPECT_TRUE(send_stream->SendTelephoneEvent( - kTelephoneEventPayloadType, kTelephoneEventPayloadFrequency, - kTelephoneEventCode, kTelephoneEventDuration)); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + helper.SetupMockForSendTelephoneEvent(); + EXPECT_TRUE(send_stream->SendTelephoneEvent( + kTelephoneEventPayloadType, kTelephoneEventPayloadFrequency, + kTelephoneEventCode, kTelephoneEventDuration)); + } } TEST(AudioSendStreamTest, SetMuted) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL(*helper.channel_send(), SetInputMute(true)); - send_stream->SetMuted(true); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL(*helper.channel_send(), SetInputMute(true)); + send_stream->SetMuted(true); + } } TEST(AudioSendStreamTest, AudioBweCorrectObjectsOnChannelProxy) { ScopedFieldTrials field_trials("WebRTC-Audio-SendSideBwe/Enabled/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + } } TEST(AudioSendStreamTest, NoAudioBweCorrectObjectsOnChannelProxy) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + } } TEST(AudioSendStreamTest, GetStats) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - helper.SetupMockForGetStats(); - AudioSendStream::Stats stats = send_stream->GetStats(true); - EXPECT_EQ(kSsrc, stats.local_ssrc); - EXPECT_EQ(kCallStats.payload_bytes_sent, stats.payload_bytes_sent); - EXPECT_EQ(kCallStats.header_and_padding_bytes_sent, - stats.header_and_padding_bytes_sent); - EXPECT_EQ(kCallStats.packetsSent, stats.packets_sent); - EXPECT_EQ(kReportBlock.cumulative_num_packets_lost, stats.packets_lost); - EXPECT_EQ(Q8ToFloat(kReportBlock.fraction_lost), stats.fraction_lost); - EXPECT_EQ(kIsacFormat.name, stats.codec_name); - EXPECT_EQ(static_cast(kReportBlock.interarrival_jitter / - (kIsacFormat.clockrate_hz / 1000)), - stats.jitter_ms); - EXPECT_EQ(kCallStats.rttMs, stats.rtt_ms); - EXPECT_EQ(0, stats.audio_level); - EXPECT_EQ(0, stats.total_input_energy); - EXPECT_EQ(0, stats.total_input_duration); - EXPECT_EQ(kEchoDelayMedian, stats.apm_statistics.delay_median_ms); - EXPECT_EQ(kEchoDelayStdDev, stats.apm_statistics.delay_standard_deviation_ms); - EXPECT_EQ(kEchoReturnLoss, stats.apm_statistics.echo_return_loss); - EXPECT_EQ(kEchoReturnLossEnhancement, - stats.apm_statistics.echo_return_loss_enhancement); - EXPECT_EQ(kDivergentFilterFraction, - stats.apm_statistics.divergent_filter_fraction); - EXPECT_EQ(kResidualEchoLikelihood, - stats.apm_statistics.residual_echo_likelihood); - EXPECT_EQ(kResidualEchoLikelihoodMax, - stats.apm_statistics.residual_echo_likelihood_recent_max); - EXPECT_FALSE(stats.typing_noise_detected); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + helper.SetupMockForGetStats(use_null_audio_processing); + AudioSendStream::Stats stats = send_stream->GetStats(true); + EXPECT_EQ(kSsrc, stats.local_ssrc); + EXPECT_EQ(kCallStats.payload_bytes_sent, stats.payload_bytes_sent); + EXPECT_EQ(kCallStats.header_and_padding_bytes_sent, + stats.header_and_padding_bytes_sent); + EXPECT_EQ(kCallStats.packetsSent, stats.packets_sent); + EXPECT_EQ(kReportBlock.cumulative_num_packets_lost, stats.packets_lost); + EXPECT_EQ(Q8ToFloat(kReportBlock.fraction_lost), stats.fraction_lost); + EXPECT_EQ(kIsacFormat.name, stats.codec_name); + EXPECT_EQ(static_cast(kReportBlock.interarrival_jitter / + (kIsacFormat.clockrate_hz / 1000)), + stats.jitter_ms); + EXPECT_EQ(kCallStats.rttMs, stats.rtt_ms); + EXPECT_EQ(0, stats.audio_level); + EXPECT_EQ(0, stats.total_input_energy); + EXPECT_EQ(0, stats.total_input_duration); + + if (!use_null_audio_processing) { + EXPECT_EQ(kEchoDelayMedian, stats.apm_statistics.delay_median_ms); + EXPECT_EQ(kEchoDelayStdDev, + stats.apm_statistics.delay_standard_deviation_ms); + EXPECT_EQ(kEchoReturnLoss, stats.apm_statistics.echo_return_loss); + EXPECT_EQ(kEchoReturnLossEnhancement, + stats.apm_statistics.echo_return_loss_enhancement); + EXPECT_EQ(kDivergentFilterFraction, + stats.apm_statistics.divergent_filter_fraction); + EXPECT_EQ(kResidualEchoLikelihood, + stats.apm_statistics.residual_echo_likelihood); + EXPECT_EQ(kResidualEchoLikelihoodMax, + stats.apm_statistics.residual_echo_likelihood_recent_max); + EXPECT_FALSE(stats.typing_noise_detected); + } + } } TEST(AudioSendStreamTest, GetStatsAudioLevel) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - helper.SetupMockForGetStats(); - EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudioForMock(_)) - .Times(AnyNumber()); - - constexpr int kSampleRateHz = 48000; - constexpr size_t kNumChannels = 1; - - constexpr int16_t kSilentAudioLevel = 0; - constexpr int16_t kMaxAudioLevel = 32767; // Audio level is [0,32767]. - constexpr int kAudioFrameDurationMs = 10; - - // Process 10 audio frames (100 ms) of silence. After this, on the next - // (11-th) frame, the audio level will be updated with the maximum audio level - // of the first 11 frames. See AudioLevel. - for (size_t i = 0; i < 10; ++i) { - send_stream->SendAudioData(CreateAudioFrame1kHzSineWave( - kSilentAudioLevel, kAudioFrameDurationMs, kSampleRateHz, kNumChannels)); - } - AudioSendStream::Stats stats = send_stream->GetStats(); - EXPECT_EQ(kSilentAudioLevel, stats.audio_level); - EXPECT_NEAR(0.0f, stats.total_input_energy, kTolerance); - EXPECT_NEAR(0.1f, stats.total_input_duration, kTolerance); // 100 ms = 0.1 s - - // Process 10 audio frames (100 ms) of maximum audio level. - // Note that AudioLevel updates the audio level every 11th frame, processing - // 10 frames above was needed to see a non-zero audio level here. - for (size_t i = 0; i < 10; ++i) { - send_stream->SendAudioData(CreateAudioFrame1kHzSineWave( - kMaxAudioLevel, kAudioFrameDurationMs, kSampleRateHz, kNumChannels)); - } - stats = send_stream->GetStats(); - EXPECT_EQ(kMaxAudioLevel, stats.audio_level); - // Energy increases by energy*duration, where energy is audio level in [0,1]. - EXPECT_NEAR(0.1f, stats.total_input_energy, kTolerance); // 0.1 s of max - EXPECT_NEAR(0.2f, stats.total_input_duration, kTolerance); // 200 ms = 0.2 s + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + helper.SetupMockForGetStats(use_null_audio_processing); + EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudio) + .Times(AnyNumber()); + + constexpr int kSampleRateHz = 48000; + constexpr size_t kNumChannels = 1; + + constexpr int16_t kSilentAudioLevel = 0; + constexpr int16_t kMaxAudioLevel = 32767; // Audio level is [0,32767]. + constexpr int kAudioFrameDurationMs = 10; + + // Process 10 audio frames (100 ms) of silence. After this, on the next + // (11-th) frame, the audio level will be updated with the maximum audio + // level of the first 11 frames. See AudioLevel. + for (size_t i = 0; i < 10; ++i) { + send_stream->SendAudioData( + CreateAudioFrame1kHzSineWave(kSilentAudioLevel, kAudioFrameDurationMs, + kSampleRateHz, kNumChannels)); + } + AudioSendStream::Stats stats = send_stream->GetStats(); + EXPECT_EQ(kSilentAudioLevel, stats.audio_level); + EXPECT_NEAR(0.0f, stats.total_input_energy, kTolerance); + EXPECT_NEAR(0.1f, stats.total_input_duration, + kTolerance); // 100 ms = 0.1 s + + // Process 10 audio frames (100 ms) of maximum audio level. + // Note that AudioLevel updates the audio level every 11th frame, processing + // 10 frames above was needed to see a non-zero audio level here. + for (size_t i = 0; i < 10; ++i) { + send_stream->SendAudioData(CreateAudioFrame1kHzSineWave( + kMaxAudioLevel, kAudioFrameDurationMs, kSampleRateHz, kNumChannels)); + } + stats = send_stream->GetStats(); + EXPECT_EQ(kMaxAudioLevel, stats.audio_level); + // Energy increases by energy*duration, where energy is audio level in + // [0,1]. + EXPECT_NEAR(0.1f, stats.total_input_energy, kTolerance); // 0.1 s of max + EXPECT_NEAR(0.2f, stats.total_input_duration, + kTolerance); // 200 ms = 0.2 s + } } TEST(AudioSendStreamTest, SendCodecAppliesAudioNetworkAdaptor) { - ConfigHelper helper(false, true); - helper.config().send_codec_spec = - AudioSendStream::Config::SendCodecSpec(0, kOpusFormat); - const std::string kAnaConfigString = "abcde"; - const std::string kAnaReconfigString = "12345"; - - helper.config().rtp.extensions.push_back(RtpExtension( - RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId)); - helper.config().audio_network_adaptor_config = kAnaConfigString; - - EXPECT_CALL(helper.mock_encoder_factory(), MakeAudioEncoderMock(_, _, _, _)) - .WillOnce(Invoke([&kAnaConfigString, &kAnaReconfigString]( - int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value) { - auto mock_encoder = SetupAudioEncoderMock(payload_type, format); - EXPECT_CALL(*mock_encoder, - EnableAudioNetworkAdaptor(StrEq(kAnaConfigString), _)) - .WillOnce(Return(true)); - EXPECT_CALL(*mock_encoder, - EnableAudioNetworkAdaptor(StrEq(kAnaReconfigString), _)) - .WillOnce(Return(true)); - *return_value = std::move(mock_encoder); - })); - - auto send_stream = helper.CreateAudioSendStream(); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + helper.config().send_codec_spec = + AudioSendStream::Config::SendCodecSpec(0, kOpusFormat); + const std::string kAnaConfigString = "abcde"; + const std::string kAnaReconfigString = "12345"; + + helper.config().rtp.extensions.push_back(RtpExtension( + RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId)); + helper.config().audio_network_adaptor_config = kAnaConfigString; + + EXPECT_CALL(helper.mock_encoder_factory(), MakeAudioEncoderMock(_, _, _, _)) + .WillOnce(Invoke([&kAnaConfigString, &kAnaReconfigString]( + int payload_type, const SdpAudioFormat& format, + absl::optional codec_pair_id, + std::unique_ptr* return_value) { + auto mock_encoder = SetupAudioEncoderMock(payload_type, format); + EXPECT_CALL(*mock_encoder, + EnableAudioNetworkAdaptor(StrEq(kAnaConfigString), _)) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_encoder, + EnableAudioNetworkAdaptor(StrEq(kAnaReconfigString), _)) + .WillOnce(Return(true)); + *return_value = std::move(mock_encoder); + })); + + auto send_stream = helper.CreateAudioSendStream(); + + auto stream_config = helper.config(); + stream_config.audio_network_adaptor_config = kAnaReconfigString; + + send_stream->Reconfigure(stream_config); + } +} - auto stream_config = helper.config(); - stream_config.audio_network_adaptor_config = kAnaReconfigString; +TEST(AudioSendStreamTest, AudioNetworkAdaptorReceivesOverhead) { + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + helper.config().send_codec_spec = + AudioSendStream::Config::SendCodecSpec(0, kOpusFormat); + const std::string kAnaConfigString = "abcde"; + helper.config().rtp.extensions.push_back(RtpExtension( + RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId)); - helper.SetupMockForCallEncoder(); - send_stream->Reconfigure(stream_config); + EXPECT_CALL(helper.mock_encoder_factory(), MakeAudioEncoderMock(_, _, _, _)) + .WillOnce(Invoke( + [&kAnaConfigString](int payload_type, const SdpAudioFormat& format, + absl::optional codec_pair_id, + std::unique_ptr* return_value) { + auto mock_encoder = SetupAudioEncoderMock(payload_type, format); + InSequence s; + EXPECT_CALL( + *mock_encoder, + OnReceivedOverhead(Eq(kOverheadPerPacket.bytes()))) + .Times(2); + EXPECT_CALL(*mock_encoder, + EnableAudioNetworkAdaptor(StrEq(kAnaConfigString), _)) + .WillOnce(Return(true)); + // Note: Overhead is received AFTER ANA has been enabled. + EXPECT_CALL( + *mock_encoder, + OnReceivedOverhead(Eq(kOverheadPerPacket.bytes()))) + .WillOnce(Return()); + *return_value = std::move(mock_encoder); + })); + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + + auto send_stream = helper.CreateAudioSendStream(); + + auto stream_config = helper.config(); + stream_config.audio_network_adaptor_config = kAnaConfigString; + + send_stream->Reconfigure(stream_config); + } } // VAD is applied when codec is mono and the CNG frequency matches the codec // clock rate. TEST(AudioSendStreamTest, SendCodecCanApplyVad) { - ConfigHelper helper(false, false); - helper.config().send_codec_spec = - AudioSendStream::Config::SendCodecSpec(9, kG722Format); - helper.config().send_codec_spec->cng_payload_type = 105; - using ::testing::Invoke; - std::unique_ptr stolen_encoder; - EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _)) - .WillOnce( - Invoke([&stolen_encoder](int payload_type, - std::unique_ptr* encoder) { - stolen_encoder = std::move(*encoder); - return true; - })); - EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000)); - - auto send_stream = helper.CreateAudioSendStream(); - - // We cannot truly determine if the encoder created is an AudioEncoderCng. It - // is the only reasonable implementation that will return something from - // ReclaimContainedEncoders, though. - ASSERT_TRUE(stolen_encoder); - EXPECT_FALSE(stolen_encoder->ReclaimContainedEncoders().empty()); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, false, use_null_audio_processing); + helper.config().send_codec_spec = + AudioSendStream::Config::SendCodecSpec(9, kG722Format); + helper.config().send_codec_spec->cng_payload_type = 105; + std::unique_ptr stolen_encoder; + EXPECT_CALL(*helper.channel_send(), SetEncoder) + .WillOnce([&stolen_encoder](int payload_type, + std::unique_ptr encoder) { + stolen_encoder = std::move(encoder); + return true; + }); + EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000)); + + auto send_stream = helper.CreateAudioSendStream(); + + // We cannot truly determine if the encoder created is an AudioEncoderCng. + // It is the only reasonable implementation that will return something from + // ReclaimContainedEncoders, though. + ASSERT_TRUE(stolen_encoder); + EXPECT_FALSE(stolen_encoder->ReclaimContainedEncoders().empty()); + } } TEST(AudioSendStreamTest, DoesNotPassHigherBitrateThanMaxBitrate) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL(*helper.channel_send(), - OnBitrateAllocation( - Field(&BitrateAllocationUpdate::target_bitrate, - Eq(DataRate::bps(helper.config().max_bitrate_bps))))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::bps(helper.config().max_bitrate_bps + 5000); - update.packet_loss_ratio = 0; - update.round_trip_time = TimeDelta::ms(50); - update.bwe_period = TimeDelta::ms(6000); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL( + *helper.channel_send(), + OnBitrateAllocation( + Field(&BitrateAllocationUpdate::target_bitrate, + Eq(DataRate::BitsPerSec(helper.config().max_bitrate_bps))))); + BitrateAllocationUpdate update; + update.target_bitrate = + DataRate::BitsPerSec(helper.config().max_bitrate_bps + 5000); + update.packet_loss_ratio = 0; + update.round_trip_time = TimeDelta::Millis(50); + update.bwe_period = TimeDelta::Millis(6000); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, SSBweTargetInRangeRespected) { ScopedFieldTrials field_trials("WebRTC-Audio-SendSideBwe/Enabled/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL(*helper.channel_send(), - OnBitrateAllocation(Field( - &BitrateAllocationUpdate::target_bitrate, - Eq(DataRate::bps(helper.config().max_bitrate_bps - 5000))))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::bps(helper.config().max_bitrate_bps - 5000); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL( + *helper.channel_send(), + OnBitrateAllocation(Field( + &BitrateAllocationUpdate::target_bitrate, + Eq(DataRate::BitsPerSec(helper.config().max_bitrate_bps - 5000))))); + BitrateAllocationUpdate update; + update.target_bitrate = + DataRate::BitsPerSec(helper.config().max_bitrate_bps - 5000); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, SSBweFieldTrialMinRespected) { ScopedFieldTrials field_trials( "WebRTC-Audio-SendSideBwe/Enabled/" "WebRTC-Audio-Allocation/min:6kbps,max:64kbps/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL( - *helper.channel_send(), - OnBitrateAllocation(Field(&BitrateAllocationUpdate::target_bitrate, - Eq(DataRate::kbps(6))))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::kbps(1); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL( + *helper.channel_send(), + OnBitrateAllocation(Field(&BitrateAllocationUpdate::target_bitrate, + Eq(DataRate::KilobitsPerSec(6))))); + BitrateAllocationUpdate update; + update.target_bitrate = DataRate::KilobitsPerSec(1); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, SSBweFieldTrialMaxRespected) { ScopedFieldTrials field_trials( "WebRTC-Audio-SendSideBwe/Enabled/" "WebRTC-Audio-Allocation/min:6kbps,max:64kbps/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL( - *helper.channel_send(), - OnBitrateAllocation(Field(&BitrateAllocationUpdate::target_bitrate, - Eq(DataRate::kbps(64))))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::kbps(128); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL( + *helper.channel_send(), + OnBitrateAllocation(Field(&BitrateAllocationUpdate::target_bitrate, + Eq(DataRate::KilobitsPerSec(64))))); + BitrateAllocationUpdate update; + update.target_bitrate = DataRate::KilobitsPerSec(128); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, SSBweWithOverhead) { @@ -612,19 +705,22 @@ TEST(AudioSendStreamTest, SSBweWithOverhead) { "WebRTC-Audio-SendSideBwe/Enabled/" "WebRTC-SendSideBwe-WithOverhead/Enabled/" "WebRTC-Audio-LegacyOverhead/Disabled/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL(*helper.channel_send(), CallEncoder(_)).Times(1); - send_stream->OnOverheadChanged(kOverheadPerPacket.bytes()); - const DataRate bitrate = - DataRate::bps(helper.config().max_bitrate_bps) + kMaxOverheadRate; - EXPECT_CALL(*helper.channel_send(), - OnBitrateAllocation(Field( - &BitrateAllocationUpdate::target_bitrate, Eq(bitrate)))); - BitrateAllocationUpdate update; - update.target_bitrate = bitrate; - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + auto send_stream = helper.CreateAudioSendStream(); + const DataRate bitrate = + DataRate::BitsPerSec(helper.config().max_bitrate_bps) + + kMaxOverheadRate; + EXPECT_CALL(*helper.channel_send(), + OnBitrateAllocation(Field( + &BitrateAllocationUpdate::target_bitrate, Eq(bitrate)))); + BitrateAllocationUpdate update; + update.target_bitrate = bitrate; + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, SSBweWithOverheadMinRespected) { @@ -633,18 +729,20 @@ TEST(AudioSendStreamTest, SSBweWithOverheadMinRespected) { "WebRTC-SendSideBwe-WithOverhead/Enabled/" "WebRTC-Audio-LegacyOverhead/Disabled/" "WebRTC-Audio-Allocation/min:6kbps,max:64kbps/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL(*helper.channel_send(), CallEncoder(_)).Times(1); - send_stream->OnOverheadChanged(kOverheadPerPacket.bytes()); - const DataRate bitrate = DataRate::kbps(6) + kMinOverheadRate; - EXPECT_CALL(*helper.channel_send(), - OnBitrateAllocation(Field( - &BitrateAllocationUpdate::target_bitrate, Eq(bitrate)))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::kbps(1); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + auto send_stream = helper.CreateAudioSendStream(); + const DataRate bitrate = DataRate::KilobitsPerSec(6) + kMinOverheadRate; + EXPECT_CALL(*helper.channel_send(), + OnBitrateAllocation(Field( + &BitrateAllocationUpdate::target_bitrate, Eq(bitrate)))); + BitrateAllocationUpdate update; + update.target_bitrate = DataRate::KilobitsPerSec(1); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, SSBweWithOverheadMaxRespected) { @@ -653,151 +751,212 @@ TEST(AudioSendStreamTest, SSBweWithOverheadMaxRespected) { "WebRTC-SendSideBwe-WithOverhead/Enabled/" "WebRTC-Audio-LegacyOverhead/Disabled/" "WebRTC-Audio-Allocation/min:6kbps,max:64kbps/"); - ConfigHelper helper(true, true); - auto send_stream = helper.CreateAudioSendStream(); - EXPECT_CALL(*helper.channel_send(), CallEncoder(_)).Times(1); - send_stream->OnOverheadChanged(kOverheadPerPacket.bytes()); - const DataRate bitrate = DataRate::kbps(64) + kMaxOverheadRate; - EXPECT_CALL(*helper.channel_send(), - OnBitrateAllocation(Field( - &BitrateAllocationUpdate::target_bitrate, Eq(bitrate)))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::kbps(128); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(true, true, use_null_audio_processing); + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + auto send_stream = helper.CreateAudioSendStream(); + const DataRate bitrate = DataRate::KilobitsPerSec(64) + kMaxOverheadRate; + EXPECT_CALL(*helper.channel_send(), + OnBitrateAllocation(Field( + &BitrateAllocationUpdate::target_bitrate, Eq(bitrate)))); + BitrateAllocationUpdate update; + update.target_bitrate = DataRate::KilobitsPerSec(128); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } TEST(AudioSendStreamTest, ProbingIntervalOnBitrateUpdated) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - - EXPECT_CALL(*helper.channel_send(), - OnBitrateAllocation(Field(&BitrateAllocationUpdate::bwe_period, - Eq(TimeDelta::ms(5000))))); - BitrateAllocationUpdate update; - update.target_bitrate = DataRate::bps(helper.config().max_bitrate_bps + 5000); - update.packet_loss_ratio = 0; - update.round_trip_time = TimeDelta::ms(50); - update.bwe_period = TimeDelta::ms(5000); - helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, - RTC_FROM_HERE); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + + EXPECT_CALL(*helper.channel_send(), + OnBitrateAllocation(Field(&BitrateAllocationUpdate::bwe_period, + Eq(TimeDelta::Millis(5000))))); + BitrateAllocationUpdate update; + update.target_bitrate = + DataRate::BitsPerSec(helper.config().max_bitrate_bps + 5000); + update.packet_loss_ratio = 0; + update.round_trip_time = TimeDelta::Millis(50); + update.bwe_period = TimeDelta::Millis(5000); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + } } // Test that AudioSendStream doesn't recreate the encoder unnecessarily. TEST(AudioSendStreamTest, DontRecreateEncoder) { - ConfigHelper helper(false, false); - // WillOnce is (currently) the default used by ConfigHelper if asked to set an - // expectation for SetEncoder. Since this behavior is essential for this test - // to be correct, it's instead set-up manually here. Otherwise a simple change - // to ConfigHelper (say to WillRepeatedly) would silently make this test - // useless. - EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _)) - .WillOnce(Return()); - - EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000)); - - helper.config().send_codec_spec = - AudioSendStream::Config::SendCodecSpec(9, kG722Format); - helper.config().send_codec_spec->cng_payload_type = 105; - auto send_stream = helper.CreateAudioSendStream(); - send_stream->Reconfigure(helper.config()); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, false, use_null_audio_processing); + // WillOnce is (currently) the default used by ConfigHelper if asked to set + // an expectation for SetEncoder. Since this behavior is essential for this + // test to be correct, it's instead set-up manually here. Otherwise a simple + // change to ConfigHelper (say to WillRepeatedly) would silently make this + // test useless. + EXPECT_CALL(*helper.channel_send(), SetEncoder).WillOnce(Return()); + + EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000)); + + helper.config().send_codec_spec = + AudioSendStream::Config::SendCodecSpec(9, kG722Format); + helper.config().send_codec_spec->cng_payload_type = 105; + auto send_stream = helper.CreateAudioSendStream(); + send_stream->Reconfigure(helper.config()); + } } TEST(AudioSendStreamTest, ReconfigureTransportCcResetsFirst) { ScopedFieldTrials field_trials("WebRTC-Audio-SendSideBwe/Enabled/"); - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - auto new_config = helper.config(); - ConfigHelper::AddBweToConfig(&new_config); - - EXPECT_CALL(*helper.rtp_rtcp(), - RegisterRtpHeaderExtension(TransportSequenceNumber::kUri, - kTransportSequenceNumberId)) - .Times(1); - { - ::testing::InSequence seq; - EXPECT_CALL(*helper.channel_send(), ResetSenderCongestionControlObjects()) + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); + ConfigHelper::AddBweToConfig(&new_config); + + EXPECT_CALL(*helper.rtp_rtcp(), + RegisterRtpHeaderExtension(TransportSequenceNumber::kUri, + kTransportSequenceNumberId)) .Times(1); - EXPECT_CALL(*helper.channel_send(), RegisterSenderCongestionControlObjects( - helper.transport(), Ne(nullptr))) - .Times(1); - } + { + ::testing::InSequence seq; + EXPECT_CALL(*helper.channel_send(), ResetSenderCongestionControlObjects()) + .Times(1); + EXPECT_CALL(*helper.channel_send(), + RegisterSenderCongestionControlObjects(helper.transport(), + Ne(nullptr))) + .Times(1); + } - send_stream->Reconfigure(new_config); + send_stream->Reconfigure(new_config); + } } TEST(AudioSendStreamTest, OnTransportOverheadChanged) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - auto new_config = helper.config(); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); - // CallEncoder will be called on overhead change. - EXPECT_CALL(*helper.channel_send(), CallEncoder(::testing::_)).Times(1); + // CallEncoder will be called on overhead change. + EXPECT_CALL(*helper.channel_send(), CallEncoder); - const size_t transport_overhead_per_packet_bytes = 333; - send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); + const size_t transport_overhead_per_packet_bytes = 333; + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); - EXPECT_EQ(transport_overhead_per_packet_bytes, - send_stream->TestOnlyGetPerPacketOverheadBytes()); + EXPECT_EQ(transport_overhead_per_packet_bytes, + send_stream->TestOnlyGetPerPacketOverheadBytes()); + } } -TEST(AudioSendStreamTest, OnAudioOverheadChanged) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - auto new_config = helper.config(); +TEST(AudioSendStreamTest, DoesntCallEncoderWhenOverheadUnchanged) { + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); - // CallEncoder will be called on overhead change. - EXPECT_CALL(*helper.channel_send(), CallEncoder(::testing::_)).Times(1); + // CallEncoder will be called on overhead change. + EXPECT_CALL(*helper.channel_send(), CallEncoder); + const size_t transport_overhead_per_packet_bytes = 333; + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); - const size_t audio_overhead_per_packet_bytes = 555; - send_stream->OnOverheadChanged(audio_overhead_per_packet_bytes); - EXPECT_EQ(audio_overhead_per_packet_bytes, - send_stream->TestOnlyGetPerPacketOverheadBytes()); -} - -TEST(AudioSendStreamTest, OnAudioAndTransportOverheadChanged) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - auto new_config = helper.config(); - - // CallEncoder will be called when each of overhead changes. - EXPECT_CALL(*helper.channel_send(), CallEncoder(::testing::_)).Times(2); + // Set the same overhead again, CallEncoder should not be called again. + EXPECT_CALL(*helper.channel_send(), CallEncoder).Times(0); + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); - const size_t transport_overhead_per_packet_bytes = 333; - send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); + // New overhead, call CallEncoder again + EXPECT_CALL(*helper.channel_send(), CallEncoder); + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes + 1); + } +} - const size_t audio_overhead_per_packet_bytes = 555; - send_stream->OnOverheadChanged(audio_overhead_per_packet_bytes); +TEST(AudioSendStreamTest, AudioOverheadChanged) { + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + const size_t audio_overhead_per_packet_bytes = 555; + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(audio_overhead_per_packet_bytes)); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); + + BitrateAllocationUpdate update; + update.target_bitrate = + DataRate::BitsPerSec(helper.config().max_bitrate_bps) + + kMaxOverheadRate; + EXPECT_CALL(*helper.channel_send(), OnBitrateAllocation); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + + EXPECT_EQ(audio_overhead_per_packet_bytes, + send_stream->TestOnlyGetPerPacketOverheadBytes()); + + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(audio_overhead_per_packet_bytes + 20)); + EXPECT_CALL(*helper.channel_send(), OnBitrateAllocation); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + + EXPECT_EQ(audio_overhead_per_packet_bytes + 20, + send_stream->TestOnlyGetPerPacketOverheadBytes()); + } +} - EXPECT_EQ( - transport_overhead_per_packet_bytes + audio_overhead_per_packet_bytes, - send_stream->TestOnlyGetPerPacketOverheadBytes()); +TEST(AudioSendStreamTest, OnAudioAndTransportOverheadChanged) { + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + const size_t audio_overhead_per_packet_bytes = 555; + EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) + .WillRepeatedly(Return(audio_overhead_per_packet_bytes)); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); + + const size_t transport_overhead_per_packet_bytes = 333; + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); + + BitrateAllocationUpdate update; + update.target_bitrate = + DataRate::BitsPerSec(helper.config().max_bitrate_bps) + + kMaxOverheadRate; + EXPECT_CALL(*helper.channel_send(), OnBitrateAllocation); + helper.worker()->SendTask([&] { send_stream->OnBitrateUpdated(update); }, + RTC_FROM_HERE); + + EXPECT_EQ( + transport_overhead_per_packet_bytes + audio_overhead_per_packet_bytes, + send_stream->TestOnlyGetPerPacketOverheadBytes()); + } } // Validates that reconfiguring the AudioSendStream with a Frame encryptor // correctly reconfigures on the object without crashing. TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { - ConfigHelper helper(false, true); - auto send_stream = helper.CreateAudioSendStream(); - auto new_config = helper.config(); - - rtc::scoped_refptr mock_frame_encryptor_0( - new rtc::RefCountedObject()); - new_config.frame_encryptor = mock_frame_encryptor_0; - EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))).Times(1); - send_stream->Reconfigure(new_config); - - // Not updating the frame encryptor shouldn't force it to reconfigure. - EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(_)).Times(0); - send_stream->Reconfigure(new_config); - - // Updating frame encryptor to a new object should force a call to the proxy. - rtc::scoped_refptr mock_frame_encryptor_1( - new rtc::RefCountedObject()); - new_config.frame_encryptor = mock_frame_encryptor_1; - new_config.crypto_options.sframe.require_frame_encryption = true; - EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))).Times(1); - send_stream->Reconfigure(new_config); + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); + + rtc::scoped_refptr mock_frame_encryptor_0( + new rtc::RefCountedObject()); + new_config.frame_encryptor = mock_frame_encryptor_0; + EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) + .Times(1); + send_stream->Reconfigure(new_config); + + // Not updating the frame encryptor shouldn't force it to reconfigure. + EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(_)).Times(0); + send_stream->Reconfigure(new_config); + + // Updating frame encryptor to a new object should force a call to the + // proxy. + rtc::scoped_refptr mock_frame_encryptor_1( + new rtc::RefCountedObject()); + new_config.frame_encryptor = mock_frame_encryptor_1; + new_config.crypto_options.sframe.require_frame_encryption = true; + EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) + .Times(1); + send_stream->Reconfigure(new_config); + } } } // namespace test } // namespace webrtc diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 1a4fd77ed2..566bae1311 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -28,7 +28,9 @@ namespace internal { AudioState::AudioState(const AudioState::Config& config) : config_(config), - audio_transport_(config_.audio_mixer, config_.audio_processing.get()) { + audio_transport_(config_.audio_mixer, + config_.audio_processing.get(), + config_.async_audio_processing_factory.get()) { process_thread_checker_.Detach(); RTC_DCHECK(config_.audio_mixer); RTC_DCHECK(config_.audio_device_module); @@ -41,7 +43,6 @@ AudioState::~AudioState() { } AudioProcessing* AudioState::audio_processing() { - RTC_DCHECK(config_.audio_processing); return config_.audio_processing.get(); } diff --git a/audio/audio_state.h b/audio/audio_state.h index f696d5a8fe..5e766428d9 100644 --- a/audio/audio_state.h +++ b/audio/audio_state.h @@ -18,8 +18,6 @@ #include "audio/audio_transport_impl.h" #include "audio/null_audio_poller.h" #include "call/audio_state.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ref_count.h" #include "rtc_base/thread_checker.h" @@ -33,6 +31,11 @@ namespace internal { class AudioState : public webrtc::AudioState { public: explicit AudioState(const AudioState::Config& config); + + AudioState() = delete; + AudioState(const AudioState&) = delete; + AudioState& operator=(const AudioState&) = delete; + ~AudioState() override; AudioProcessing* audio_processing() override; @@ -83,8 +86,6 @@ class AudioState : public webrtc::AudioState { size_t num_channels = 0; }; std::map sending_streams_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioState); }; } // namespace internal } // namespace webrtc diff --git a/audio/audio_state_unittest.cc b/audio/audio_state_unittest.cc index bf79529365..02fc04e6dc 100644 --- a/audio/audio_state_unittest.cc +++ b/audio/audio_state_unittest.cc @@ -11,6 +11,7 @@ #include "audio/audio_state.h" #include +#include #include #include "call/test/mock_audio_send_stream.h" @@ -24,23 +25,108 @@ namespace webrtc { namespace test { namespace { +using ::testing::_; +using ::testing::Matcher; +using ::testing::NiceMock; +using ::testing::StrictMock; +using ::testing::Values; + constexpr int kSampleRate = 16000; constexpr int kNumberOfChannels = 1; +struct FakeAsyncAudioProcessingHelper { + class FakeTaskQueue : public StrictMock { + public: + FakeTaskQueue() = default; + + void Delete() override { delete this; } + void PostTask(std::unique_ptr task) override { + std::move(task)->Run(); + } + MOCK_METHOD(void, + PostDelayedTask, + (std::unique_ptr task, uint32_t milliseconds), + (override)); + }; + + class FakeTaskQueueFactory : public TaskQueueFactory { + public: + FakeTaskQueueFactory() = default; + ~FakeTaskQueueFactory() override = default; + std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const override { + return std::unique_ptr( + new FakeTaskQueue()); + } + }; + + class MockAudioFrameProcessor : public AudioFrameProcessor { + public: + ~MockAudioFrameProcessor() override = default; + + MOCK_METHOD(void, ProcessCalled, ()); + MOCK_METHOD(void, SinkSet, ()); + MOCK_METHOD(void, SinkCleared, ()); + + void Process(std::unique_ptr frame) override { + ProcessCalled(); + sink_callback_(std::move(frame)); + } + + void SetSink(OnAudioFrameCallback sink_callback) override { + sink_callback_ = std::move(sink_callback); + if (sink_callback_ == nullptr) + SinkCleared(); + else + SinkSet(); + } + + private: + OnAudioFrameCallback sink_callback_; + }; + + NiceMock audio_frame_processor_; + FakeTaskQueueFactory task_queue_factory_; + + rtc::scoped_refptr CreateFactory() { + return new rtc::RefCountedObject( + audio_frame_processor_, task_queue_factory_); + } +}; + struct ConfigHelper { - ConfigHelper() : audio_mixer(AudioMixerImpl::Create()) { + struct Params { + bool use_null_audio_processing; + bool use_async_audio_processing; + }; + + explicit ConfigHelper(const Params& params) + : audio_mixer(AudioMixerImpl::Create()) { audio_state_config.audio_mixer = audio_mixer; audio_state_config.audio_processing = - new rtc::RefCountedObject>(); + params.use_null_audio_processing + ? nullptr + : new rtc::RefCountedObject< + testing::NiceMock>(); audio_state_config.audio_device_module = - new rtc::RefCountedObject(); + new rtc::RefCountedObject>(); + if (params.use_async_audio_processing) { + audio_state_config.async_audio_processing_factory = + async_audio_processing_helper_.CreateFactory(); + } } AudioState::Config& config() { return audio_state_config; } rtc::scoped_refptr mixer() { return audio_mixer; } + NiceMock& + mock_audio_frame_processor() { + return async_audio_processing_helper_.audio_frame_processor_; + } private: AudioState::Config audio_state_config; rtc::scoped_refptr audio_mixer; + FakeAsyncAudioProcessingHelper async_audio_processing_helper_; }; class FakeAudioSource : public AudioMixer::Source { @@ -53,8 +139,10 @@ class FakeAudioSource : public AudioMixer::Source { int PreferredSampleRate() const /*override*/ { return kSampleRate; } - MOCK_METHOD2(GetAudioFrameWithInfo, - AudioFrameInfo(int sample_rate_hz, AudioFrame* audio_frame)); + MOCK_METHOD(AudioFrameInfo, + GetAudioFrameWithInfo, + (int sample_rate_hz, AudioFrame*), + (override)); }; std::vector Create10msTestData(int sample_rate_hz, @@ -84,20 +172,29 @@ std::vector ComputeChannelLevels(AudioFrame* audio_frame) { } } // namespace -TEST(AudioStateTest, Create) { - ConfigHelper helper; +class AudioStateTest : public ::testing::TestWithParam {}; + +TEST_P(AudioStateTest, Create) { + ConfigHelper helper(GetParam()); auto audio_state = AudioState::Create(helper.config()); EXPECT_TRUE(audio_state.get()); } -TEST(AudioStateTest, ConstructDestruct) { - ConfigHelper helper; +TEST_P(AudioStateTest, ConstructDestruct) { + ConfigHelper helper(GetParam()); rtc::scoped_refptr audio_state( new rtc::RefCountedObject(helper.config())); } -TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) { - ConfigHelper helper; +TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) { + ConfigHelper helper(GetParam()); + + if (GetParam().use_async_audio_processing) { + EXPECT_CALL(helper.mock_audio_frame_processor(), SinkSet); + EXPECT_CALL(helper.mock_audio_frame_processor(), ProcessCalled); + EXPECT_CALL(helper.mock_audio_frame_processor(), SinkCleared); + } + rtc::scoped_refptr audio_state( new rtc::RefCountedObject(helper.config())); @@ -117,10 +214,14 @@ TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) { EXPECT_EQ(0u, levels[1]); })); MockAudioProcessing* ap = - static_cast(audio_state->audio_processing()); - EXPECT_CALL(*ap, set_stream_delay_ms(0)); - EXPECT_CALL(*ap, set_stream_key_pressed(false)); - EXPECT_CALL(*ap, ProcessStream(::testing::_)); + GetParam().use_null_audio_processing + ? nullptr + : static_cast(audio_state->audio_processing()); + if (ap) { + EXPECT_CALL(*ap, set_stream_delay_ms(0)); + EXPECT_CALL(*ap, set_stream_key_pressed(false)); + EXPECT_CALL(*ap, ProcessStream(_, _, _, Matcher(_))); + } constexpr int kSampleRate = 16000; constexpr size_t kNumChannels = 2; @@ -134,8 +235,15 @@ TEST(AudioStateTest, RecordedAudioArrivesAtSingleStream) { audio_state->RemoveSendingStream(&stream); } -TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { - ConfigHelper helper; +TEST_P(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { + ConfigHelper helper(GetParam()); + + if (GetParam().use_async_audio_processing) { + EXPECT_CALL(helper.mock_audio_frame_processor(), SinkSet); + EXPECT_CALL(helper.mock_audio_frame_processor(), ProcessCalled); + EXPECT_CALL(helper.mock_audio_frame_processor(), SinkCleared); + } + rtc::scoped_refptr audio_state( new rtc::RefCountedObject(helper.config())); @@ -168,9 +276,11 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { })); MockAudioProcessing* ap = static_cast(audio_state->audio_processing()); - EXPECT_CALL(*ap, set_stream_delay_ms(5)); - EXPECT_CALL(*ap, set_stream_key_pressed(true)); - EXPECT_CALL(*ap, ProcessStream(::testing::_)); + if (ap) { + EXPECT_CALL(*ap, set_stream_delay_ms(5)); + EXPECT_CALL(*ap, set_stream_key_pressed(true)); + EXPECT_CALL(*ap, ProcessStream(_, _, _, Matcher(_))); + } constexpr int kSampleRate = 16000; constexpr size_t kNumChannels = 1; @@ -185,11 +295,18 @@ TEST(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { audio_state->RemoveSendingStream(&stream_2); } -TEST(AudioStateTest, EnableChannelSwap) { +TEST_P(AudioStateTest, EnableChannelSwap) { constexpr int kSampleRate = 16000; constexpr size_t kNumChannels = 2; - ConfigHelper helper; + ConfigHelper helper(GetParam()); + + if (GetParam().use_async_audio_processing) { + EXPECT_CALL(helper.mock_audio_frame_processor(), SinkSet); + EXPECT_CALL(helper.mock_audio_frame_processor(), ProcessCalled); + EXPECT_CALL(helper.mock_audio_frame_processor(), SinkCleared); + } + rtc::scoped_refptr audio_state( new rtc::RefCountedObject(helper.config())); @@ -198,7 +315,7 @@ TEST(AudioStateTest, EnableChannelSwap) { MockAudioSendStream stream; audio_state->AddSendingStream(&stream, kSampleRate, kNumChannels); - EXPECT_CALL(stream, SendAudioDataForMock(::testing::_)) + EXPECT_CALL(stream, SendAudioDataForMock(_)) .WillOnce( // Verify that channels are swapped. ::testing::Invoke([](AudioFrame* audio_frame) { @@ -217,15 +334,15 @@ TEST(AudioStateTest, EnableChannelSwap) { audio_state->RemoveSendingStream(&stream); } -TEST(AudioStateTest, - QueryingTransportForAudioShouldResultInGetAudioCallOnMixerSource) { - ConfigHelper helper; +TEST_P(AudioStateTest, + QueryingTransportForAudioShouldResultInGetAudioCallOnMixerSource) { + ConfigHelper helper(GetParam()); auto audio_state = AudioState::Create(helper.config()); FakeAudioSource fake_source; helper.mixer()->AddSource(&fake_source); - EXPECT_CALL(fake_source, GetAudioFrameWithInfo(::testing::_, ::testing::_)) + EXPECT_CALL(fake_source, GetAudioFrameWithInfo(_, _)) .WillOnce( ::testing::Invoke([](int sample_rate_hz, AudioFrame* audio_frame) { audio_frame->sample_rate_hz_ = sample_rate_hz; @@ -242,5 +359,13 @@ TEST(AudioStateTest, kSampleRate / 100, kNumberOfChannels * 2, kNumberOfChannels, kSampleRate, audio_buffer, n_samples_out, &elapsed_time_ms, &ntp_time_ms); } + +INSTANTIATE_TEST_SUITE_P(AudioStateTest, + AudioStateTest, + Values(ConfigHelper::Params({false, false}), + ConfigHelper::Params({true, false}), + ConfigHelper::Params({false, true}), + ConfigHelper::Params({true, true}))); + } // namespace test } // namespace webrtc diff --git a/audio/audio_transport_impl.cc b/audio/audio_transport_impl.cc index 347e86b532..06d1b5b5fc 100644 --- a/audio/audio_transport_impl.cc +++ b/audio/audio_transport_impl.cc @@ -13,10 +13,15 @@ #include #include #include +#include +#include +#include #include "audio/remix_resample.h" #include "audio/utility/audio_frame_operations.h" #include "call/audio_sender.h" +#include "modules/async_audio_processing/async_audio_processing.h" +#include "modules/audio_processing/include/audio_frame_proxies.h" #include "rtc_base/checks.h" namespace webrtc { @@ -48,12 +53,15 @@ void ProcessCaptureFrame(uint32_t delay_ms, bool swap_stereo_channels, AudioProcessing* audio_processing, AudioFrame* audio_frame) { - RTC_DCHECK(audio_processing); RTC_DCHECK(audio_frame); - audio_processing->set_stream_delay_ms(delay_ms); - audio_processing->set_stream_key_pressed(key_pressed); - int error = audio_processing->ProcessStream(audio_frame); - RTC_DCHECK_EQ(0, error) << "ProcessStream() error: " << error; + if (audio_processing) { + audio_processing->set_stream_delay_ms(delay_ms); + audio_processing->set_stream_key_pressed(key_pressed); + int error = ProcessAudioFrame(audio_processing, audio_frame); + + RTC_DCHECK_EQ(0, error) << "ProcessStream() error: " << error; + } + if (swap_stereo_channels) { AudioFrameOperations::SwapStereoChannels(audio_frame); } @@ -79,14 +87,54 @@ int Resample(const AudioFrame& frame, } } // namespace -AudioTransportImpl::AudioTransportImpl(AudioMixer* mixer, - AudioProcessing* audio_processing) - : audio_processing_(audio_processing), mixer_(mixer) { +AudioTransportImpl::AudioTransportImpl( + AudioMixer* mixer, + AudioProcessing* audio_processing, + AsyncAudioProcessing::Factory* async_audio_processing_factory) + : audio_processing_(audio_processing), + async_audio_processing_( + async_audio_processing_factory + ? async_audio_processing_factory->CreateAsyncAudioProcessing( + [this](std::unique_ptr frame) { + this->SendProcessedData(std::move(frame)); + }) + : nullptr), + mixer_(mixer), + pre_deliver_callback_(nullptr), + pre_deliver_callback_opaque_(nullptr) { RTC_DCHECK(mixer); - RTC_DCHECK(audio_processing); + +#if defined(AVCONF_DUMP_RECORD_AUDIO) + mkdir("/sdcard/avconf/", S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH); + int64_t now = std::chrono::duration_cast( + std::chrono::steady_clock::now().time_since_epoch()) + .count(); + char dump_name[1024]; + snprintf(dump_name, 1023, "/sdcard/avconf/cap_%" PRId64 ".pcm", now); + dump_cap_ = fopen(dump_name, "wb"); + snprintf(dump_name, 1023, "/sdcard/avconf/proc_%" PRId64 ".pcm", now); + dump_proc_ = fopen(dump_name, "wb"); + snprintf(dump_name, 1023, "/sdcard/avconf/mixed_%" PRId64 ".pcm", now); + dump_mixed_ = fopen(dump_name, "wb"); +#endif } -AudioTransportImpl::~AudioTransportImpl() {} +AudioTransportImpl::~AudioTransportImpl() { +#if defined(AVCONF_DUMP_RECORD_AUDIO) + if (dump_cap_) { + fclose(dump_cap_); + dump_cap_ = nullptr; + } + if (dump_proc_) { + fclose(dump_proc_); + dump_proc_ = nullptr; + } + if (dump_mixed_) { + fclose(dump_mixed_); + dump_mixed_ = nullptr; + } +#endif +} // Not used in Chromium. Process captured audio and distribute to all sending // streams, and try to do this at the lowest possible sample rate. @@ -115,12 +163,20 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( size_t send_num_channels = 0; bool swap_stereo_channels = false; { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); send_sample_rate_hz = send_sample_rate_hz_; send_num_channels = send_num_channels_; swap_stereo_channels = swap_stereo_channels_; } +#if defined(AVCONF_DUMP_RECORD_AUDIO) + if (dump_cap_) { + fwrite(audio_data, 1, + number_of_channels * number_of_frames * bytes_per_sample, + dump_cap_); + } +#endif + std::unique_ptr audio_frame(new AudioFrame()); InitializeCaptureFrame(sample_rate, send_sample_rate_hz, number_of_channels, send_num_channels, audio_frame.get()); @@ -131,39 +187,77 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( swap_stereo_channels, audio_processing_, audio_frame.get()); +#if defined(AVCONF_DUMP_RECORD_AUDIO) + if (dump_proc_) { + fwrite(audio_frame->data(), 1, + audio_frame->num_channels() * audio_frame->samples_per_channel() * + bytes_per_sample, + dump_proc_); + } +#endif + // Typing detection (utilizes the APM/VAD decision). We let the VAD determine // if we're using this feature or not. // TODO(solenberg): GetConfig() takes a lock. Work around that. bool typing_detected = false; - if (audio_processing_->GetConfig().voice_detection.enabled) { + if (audio_processing_ && + audio_processing_->GetConfig().voice_detection.enabled) { if (audio_frame->vad_activity_ != AudioFrame::kVadUnknown) { bool vad_active = audio_frame->vad_activity_ == AudioFrame::kVadActive; typing_detected = typing_detection_.Process(key_pressed, vad_active); } } + if (pre_deliver_callback_) { + pre_deliver_callback_(pre_deliver_callback_opaque_, + audio_frame->mutable_data(), + audio_frame->samples_per_channel(), + bytes_per_sample, audio_frame->num_channels(), + audio_frame->sample_rate_hz()); + } + +#if defined(AVCONF_DUMP_RECORD_AUDIO) + if (dump_mixed_) { + fwrite(audio_frame->data(), 1, + audio_frame->num_channels() * audio_frame->samples_per_channel() * + bytes_per_sample, + dump_mixed_); + } +#endif + // Copy frame and push to each sending stream. The copy is required since an // encoding task will be posted internally to each stream. { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); typing_noise_detected_ = typing_detected; - - RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); - if (!audio_senders_.empty()) { - auto it = audio_senders_.begin(); - while (++it != audio_senders_.end()) { - std::unique_ptr audio_frame_copy(new AudioFrame()); - audio_frame_copy->CopyFrom(*audio_frame); - (*it)->SendAudioData(std::move(audio_frame_copy)); - } - // Send the original frame to the first stream w/o copying. - (*audio_senders_.begin())->SendAudioData(std::move(audio_frame)); - } } + RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); + if (async_audio_processing_) + async_audio_processing_->Process(std::move(audio_frame)); + else + SendProcessedData(std::move(audio_frame)); + return 0; } +void AudioTransportImpl::SendProcessedData( + std::unique_ptr audio_frame) { + RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); + MutexLock lock(&capture_lock_); + if (audio_senders_.empty()) + return; + + auto it = audio_senders_.begin(); + while (++it != audio_senders_.end()) { + auto audio_frame_copy = std::make_unique(); + audio_frame_copy->CopyFrom(*audio_frame); + (*it)->SendAudioData(std::move(audio_frame_copy)); + } + // Send the original frame to the first stream w/o copying. + (*audio_senders_.begin())->SendAudioData(std::move(audio_frame)); +} + // Mix all received streams, feed the result to the AudioProcessing module, then // resample the result to the requested output rate. int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, @@ -190,8 +284,11 @@ int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, *elapsed_time_ms = mixed_frame_.elapsed_time_ms_; *ntp_time_ms = mixed_frame_.ntp_time_ms_; - const auto error = audio_processing_->ProcessReverseStream(&mixed_frame_); - RTC_DCHECK_EQ(error, AudioProcessing::kNoError); + if (audio_processing_) { + const auto error = + ProcessReverseAudioFrame(audio_processing_, &mixed_frame_); + RTC_DCHECK_EQ(error, AudioProcessing::kNoError); + } nSamplesOut = Resample(mixed_frame_, samplesPerSec, &render_resampler_, static_cast(audioSamples)); @@ -230,19 +327,28 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample, void AudioTransportImpl::UpdateAudioSenders(std::vector senders, int send_sample_rate_hz, size_t send_num_channels) { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); audio_senders_ = std::move(senders); send_sample_rate_hz_ = send_sample_rate_hz; send_num_channels_ = send_num_channels; } void AudioTransportImpl::SetStereoChannelSwapping(bool enable) { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); swap_stereo_channels_ = enable; } bool AudioTransportImpl::typing_noise_detected() const { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); return typing_noise_detected_; } + +void AudioTransportImpl::AddPlaybackSource(AudioMixer::Source* source) { + mixer_->AddSource(source); +} + +void AudioTransportImpl::RemovePlaybackSource(AudioMixer::Source* source) { + mixer_->RemoveSource(source); + delete source; +} } // namespace webrtc diff --git a/audio/audio_transport_impl.h b/audio/audio_transport_impl.h index 2d9b4cf3a1..8f4be99442 100644 --- a/audio/audio_transport_impl.h +++ b/audio/audio_transport_impl.h @@ -11,25 +11,40 @@ #ifndef AUDIO_AUDIO_TRANSPORT_IMPL_H_ #define AUDIO_AUDIO_TRANSPORT_IMPL_H_ +#include #include #include "api/audio/audio_mixer.h" #include "api/scoped_refptr.h" #include "common_audio/resampler/include/push_resampler.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/typing_detection.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" +//#define AVCONF_DUMP_RECORD_AUDIO 1 + namespace webrtc { class AudioSender; class AudioTransportImpl : public AudioTransport { public: - AudioTransportImpl(AudioMixer* mixer, AudioProcessing* audio_processing); + typedef void (*PreDeliverRecordedDataCallback)( + void* opaque, void* audioSamples, const size_t nSamples, + const size_t nBytesPerSample, const size_t nChannels, + const uint32_t samplesPerSec); + AudioTransportImpl( + AudioMixer* mixer, + AudioProcessing* audio_processing, + AsyncAudioProcessing::Factory* async_audio_processing_factory); + + AudioTransportImpl() = delete; + AudioTransportImpl(const AudioTransportImpl&) = delete; + AudioTransportImpl& operator=(const AudioTransportImpl&) = delete; + ~AudioTransportImpl() override; int32_t RecordedDataIsAvailable(const void* audioSamples, @@ -66,12 +81,27 @@ class AudioTransportImpl : public AudioTransport { void SetStereoChannelSwapping(bool enable); bool typing_noise_detected() const; + void AddPlaybackSource(AudioMixer::Source* source); + void RemovePlaybackSource(AudioMixer::Source* source); + + void SetPreDeliverRecordedDataCallback( + PreDeliverRecordedDataCallback callback, void* opaque) { + pre_deliver_callback_ = callback; + pre_deliver_callback_opaque_ = opaque; + } + private: + void SendProcessedData(std::unique_ptr audio_frame); + // Shared. AudioProcessing* audio_processing_ = nullptr; // Capture side. - rtc::CriticalSection capture_lock_; + + // Thread-safe. + const std::unique_ptr async_audio_processing_; + + mutable Mutex capture_lock_; std::vector audio_senders_ RTC_GUARDED_BY(capture_lock_); int send_sample_rate_hz_ RTC_GUARDED_BY(capture_lock_) = 8000; size_t send_num_channels_ RTC_GUARDED_BY(capture_lock_) = 1; @@ -81,12 +111,20 @@ class AudioTransportImpl : public AudioTransport { TypingDetection typing_detection_; // Render side. + rtc::scoped_refptr mixer_; AudioFrame mixed_frame_; // Converts mixed audio to the audio device output rate. PushResampler render_resampler_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioTransportImpl); + PreDeliverRecordedDataCallback pre_deliver_callback_; + void* pre_deliver_callback_opaque_; + +#if defined(AVCONF_DUMP_RECORD_AUDIO) + FILE* dump_cap_; + FILE* dump_proc_; + FILE* dump_mixed_; +#endif }; } // namespace webrtc diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index d0c17fbbe2..1b57ab59ee 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -20,8 +20,10 @@ #include #include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/rtc_event_log/rtc_event_log.h" #include "audio/audio_level.h" +#include "audio/channel_receive_frame_transformer_delegate.h" #include "audio/channel_send.h" #include "audio/utility/audio_frame_operations.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" @@ -31,18 +33,19 @@ #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/format_macros.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" @@ -78,22 +81,24 @@ AudioCodingModule::Config AcmConfig( class ChannelReceive : public ChannelReceiveInterface { public: // Used for receive streams. - ChannelReceive(Clock* clock, - ProcessThread* module_process_thread, - NetEqFactory* neteq_factory, - AudioDeviceModule* audio_device_module, - Transport* rtcp_send_transport, - RtcEventLog* rtc_event_log, - uint32_t local_ssrc, - uint32_t remote_ssrc, - size_t jitter_buffer_max_packets, - bool jitter_buffer_fast_playout, - int jitter_buffer_min_delay_ms, - bool jitter_buffer_enable_rtx_handling, - rtc::scoped_refptr decoder_factory, - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options); + ChannelReceive( + Clock* clock, + ProcessThread* module_process_thread, + NetEqFactory* neteq_factory, + AudioDeviceModule* audio_device_module, + Transport* rtcp_send_transport, + RtcEventLog* rtc_event_log, + uint32_t local_ssrc, + uint32_t remote_ssrc, + size_t jitter_buffer_max_packets, + bool jitter_buffer_fast_playout, + int jitter_buffer_min_delay_ms, + bool jitter_buffer_enable_rtx_handling, + rtc::scoped_refptr decoder_factory, + absl::optional codec_pair_id, + rtc::scoped_refptr frame_decryptor, + const webrtc::CryptoOptions& crypto_options, + rtc::scoped_refptr frame_transformer); ~ChannelReceive() override; void SetSink(AudioSinkInterface* sink) override; @@ -123,12 +128,13 @@ class ChannelReceive : public ChannelReceiveInterface { double GetTotalOutputDuration() const override; // Stats. - NetworkStatistics GetNetworkStatistics() const override; + NetworkStatistics GetNetworkStatistics( + bool get_and_clear_legacy_stats) const override; AudioDecodingCallStats GetDecodingCallStatistics() const override; // Audio+Video Sync. uint32_t GetDelayEstimate() const override; - void SetMinimumPlayoutDelay(int delayMs) override; + bool SetMinimumPlayoutDelay(int delayMs) override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, @@ -160,6 +166,16 @@ class ChannelReceive : public ChannelReceiveInterface { // Used for obtaining RTT for a receive-only channel. void SetAssociatedSendChannel(const ChannelSendInterface* channel) override; + // Sets a frame transformer between the depacketizer and the decoder, to + // transform the received frames before decoding them. + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) + override; + +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif + private: void ReceivePacket(const uint8_t* packet, size_t packet_length, @@ -173,8 +189,11 @@ class ChannelReceive : public ChannelReceiveInterface { void OnReceivedPayloadData(rtc::ArrayView payload, const RTPHeader& rtpHeader); + void InitFrameTransformerDelegate( + rtc::scoped_refptr frame_transformer); + bool Playing() const { - rtc::CritScope lock(&playing_lock_); + MutexLock lock(&playing_lock_); return playing_; } @@ -190,10 +209,10 @@ class ChannelReceive : public ChannelReceiveInterface { // audio thread to another, but access is still sequential. rtc::RaceChecker audio_thread_race_checker_; rtc::RaceChecker video_capture_thread_race_checker_; - rtc::CriticalSection _callbackCritSect; - rtc::CriticalSection volume_settings_critsect_; + Mutex callback_mutex_; + Mutex volume_settings_mutex_; - rtc::CriticalSection playing_lock_; + mutable Mutex playing_lock_; bool playing_ RTC_GUARDED_BY(&playing_lock_) = false; RtcEventLog* const event_log_; @@ -202,12 +221,12 @@ class ChannelReceive : public ChannelReceiveInterface { std::map payload_type_frequencies_; std::unique_ptr rtp_receive_statistics_; - std::unique_ptr _rtpRtcpModule; + std::unique_ptr rtp_rtcp_; const uint32_t remote_ssrc_; // Info for GetSyncInfo is updated on network or worker thread, and queried on // the worker thread. - rtc::CriticalSection sync_info_lock_; + mutable Mutex sync_info_lock_; absl::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(&sync_info_lock_); absl::optional last_received_rtp_system_time_ms_ @@ -223,7 +242,7 @@ class ChannelReceive : public ChannelReceiveInterface { // Timestamp of the audio pulled from NetEq. absl::optional jitter_buffer_playout_timestamp_; - rtc::CriticalSection video_sync_lock_; + mutable Mutex video_sync_lock_; uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_); absl::optional playout_timestamp_rtp_time_ms_ RTC_GUARDED_BY(video_sync_lock_); @@ -233,7 +252,7 @@ class ChannelReceive : public ChannelReceiveInterface { absl::optional playout_timestamp_ntp_time_ms_ RTC_GUARDED_BY(video_sync_lock_); - rtc::CriticalSection ts_stats_lock_; + mutable Mutex ts_stats_lock_; std::unique_ptr rtp_ts_wraparound_handler_; // The rtp timestamp of the first played out audio frame. @@ -245,10 +264,10 @@ class ChannelReceive : public ChannelReceiveInterface { // uses ProcessThread* _moduleProcessThreadPtr; AudioDeviceModule* _audioDeviceModulePtr; - float _outputGain RTC_GUARDED_BY(volume_settings_critsect_); + float _outputGain RTC_GUARDED_BY(volume_settings_mutex_); // An associated send channel. - rtc::CriticalSection assoc_send_channel_lock_; + mutable Mutex assoc_send_channel_lock_; const ChannelSendInterface* associated_send_channel_ RTC_GUARDED_BY(assoc_send_channel_lock_); @@ -259,6 +278,11 @@ class ChannelReceive : public ChannelReceiveInterface { // E2EE Audio Frame Decryption rtc::scoped_refptr frame_decryptor_; webrtc::CryptoOptions crypto_options_; + + webrtc::AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_; + + rtc::scoped_refptr + frame_transformer_delegate_; }; void ChannelReceive::OnReceivedPayloadData( @@ -278,7 +302,7 @@ void ChannelReceive::OnReceivedPayloadData( } int64_t round_trip_time = 0; - _rtpRtcpModule->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL); + rtp_rtcp_->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL); std::vector nack_list = acm_receiver_.GetNackList(round_trip_time); if (!nack_list.empty()) { @@ -288,6 +312,25 @@ void ChannelReceive::OnReceivedPayloadData( } } +void ChannelReceive::InitFrameTransformerDelegate( + rtc::scoped_refptr frame_transformer) { + RTC_DCHECK(frame_transformer); + RTC_DCHECK(!frame_transformer_delegate_); + + // Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by + // the delegate to receive transformed audio. + ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback + receive_audio_callback = [this](rtc::ArrayView packet, + const RTPHeader& header) { + OnReceivedPayloadData(packet, header); + }; + frame_transformer_delegate_ = + new rtc::RefCountedObject( + std::move(receive_audio_callback), std::move(frame_transformer), + rtc::Thread::Current()); + frame_transformer_delegate_->Init(); +} + AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( int sample_rate_hz, AudioFrame* audio_frame) { @@ -321,7 +364,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( // scaling/panning, as that applies to the mix operation. // External recipients of the audio (e.g. via AudioTrack), will do their // own mixing/dynamic processing. - rtc::CritScope cs(&_callbackCritSect); + MutexLock lock(&callback_mutex_); if (audio_sink_) { AudioSinkInterface::Data data( audio_frame->data(), audio_frame->samples_per_channel_, @@ -333,7 +376,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( float output_gain = 1.0f; { - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); output_gain = _outputGain; } @@ -365,7 +408,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( (GetRtpTimestampRateHz() / 1000); { - rtc::CritScope lock(&ts_stats_lock_); + MutexLock lock(&ts_stats_lock_); // Compute ntp time. audio_frame->ntp_time_ms_ = ntp_estimator_.Estimate(audio_frame->timestamp_); @@ -383,7 +426,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs", acm_receiver_.TargetDelayMs()); const int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs(); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDelayEstimateMs", jitter_buffer_delay + playout_delay_ms_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverJitterBufferDelayMs", @@ -419,7 +462,8 @@ ChannelReceive::ChannelReceive( rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options) + const webrtc::CryptoOptions& crypto_options, + rtc::scoped_refptr frame_transformer) : event_log_(rtc_event_log), rtp_receive_statistics_(ReceiveStatistics::Create(clock)), remote_ssrc_(remote_ssrc), @@ -440,7 +484,8 @@ ChannelReceive::ChannelReceive( _outputGain(1.0f), associated_send_channel_(nullptr), frame_decryptor_(frame_decryptor), - crypto_options_(crypto_options) { + crypto_options_(crypto_options), + absolute_capture_time_receiver_(clock) { // TODO(nisse): Use _moduleProcessThreadPtr instead? module_process_thread_checker_.Detach(); @@ -455,7 +500,7 @@ ChannelReceive::ChannelReceive( _outputAudioLevel.ResetLevelFullRange(); rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc_, true); - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.clock = clock; configuration.audio = true; configuration.receiver_only = true; @@ -464,40 +509,47 @@ ChannelReceive::ChannelReceive( configuration.event_log = event_log_; configuration.local_media_ssrc = local_ssrc; - _rtpRtcpModule = RtpRtcp::Create(configuration); - _rtpRtcpModule->SetSendingMediaStatus(false); - _rtpRtcpModule->SetRemoteSSRC(remote_ssrc_); + if (frame_transformer) + InitFrameTransformerDelegate(std::move(frame_transformer)); - _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp_->SetSendingMediaStatus(false); + rtp_rtcp_->SetRemoteSSRC(remote_ssrc_); + + _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); // Ensure that RTCP is enabled for the created channel. - _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound); + rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); } ChannelReceive::~ChannelReceive() { RTC_DCHECK(construction_thread_.IsCurrent()); + // Resets the delegate's callback to ChannelReceive::OnReceivedPayloadData. + if (frame_transformer_delegate_) + frame_transformer_delegate_->Reset(); + StopPlayout(); if (_moduleProcessThreadPtr) - _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); + _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get()); } void ChannelReceive::SetSink(AudioSinkInterface* sink) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope cs(&_callbackCritSect); + MutexLock lock(&callback_mutex_); audio_sink_ = sink; } void ChannelReceive::StartPlayout() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&playing_lock_); + MutexLock lock(&playing_lock_); playing_ = true; } void ChannelReceive::StopPlayout() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&playing_lock_); + MutexLock lock(&playing_lock_); playing_ = false; _outputAudioLevel.ResetLevelFullRange(); } @@ -523,7 +575,7 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { int64_t now_ms = rtc::TimeMillis(); { - rtc::CritScope cs(&sync_info_lock_); + MutexLock lock(&sync_info_lock_); last_received_rtp_timestamp_ = packet.Timestamp(); last_received_rtp_system_time_ms_ = now_ms; } @@ -543,6 +595,15 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { RTPHeader header; packet_copy.GetHeader(&header); + // Interpolates absolute capture timestamp RTP header extension. + header.extension.absolute_capture_time = + absolute_capture_time_receiver_.OnReceivePacket( + AbsoluteCaptureTimeReceiver::GetSource(header.ssrc, + header.arrOfCSRCs), + header.timestamp, + rtc::saturated_cast(packet_copy.payload_type_frequency()), + header.extension.absolute_capture_time); + ReceivePacket(packet_copy.data(), packet_copy.size(), header); } @@ -587,8 +648,14 @@ void ChannelReceive::ReceivePacket(const uint8_t* packet, payload_data_length = 0; } - OnReceivedPayloadData( - rtc::ArrayView(payload, payload_data_length), header); + rtc::ArrayView payload_data(payload, payload_data_length); + if (frame_transformer_delegate_) { + // Asynchronously transform the received payload. After the payload is + // transformed, the delegate will call OnReceivedPayloadData to handle it. + frame_transformer_delegate_->Transform(payload_data, header, remote_ssrc_); + } else { + OnReceivedPayloadData(payload_data, header); + } } // May be called on either worker thread or network thread. @@ -597,7 +664,7 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { UpdatePlayoutTimestamp(true, rtc::TimeMillis()); // Deliver RTCP packet to RTP/RTCP module for parsing - _rtpRtcpModule->IncomingRtcpPacket(data, length); + rtp_rtcp_->IncomingRtcpPacket(data, length); int64_t rtt = GetRTT(); if (rtt == 0) { @@ -608,14 +675,14 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { uint32_t ntp_secs = 0; uint32_t ntp_frac = 0; uint32_t rtp_timestamp = 0; - if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, - &rtp_timestamp)) { + if (0 != + rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, &rtp_timestamp)) { // Waiting for RTCP. return; } { - rtc::CritScope lock(&ts_stats_lock_); + MutexLock lock(&ts_stats_lock_); ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); } } @@ -637,7 +704,7 @@ double ChannelReceive::GetTotalOutputDuration() const { void ChannelReceive::SetChannelOutputVolumeScaling(float scaling) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); _outputGain = scaling; } @@ -647,14 +714,14 @@ void ChannelReceive::RegisterReceiverCongestionControlObjects( RTC_DCHECK(packet_router); RTC_DCHECK(!packet_router_); constexpr bool remb_candidate = false; - packet_router->AddReceiveRtpModule(_rtpRtcpModule.get(), remb_candidate); + packet_router->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); packet_router_ = packet_router; } void ChannelReceive::ResetReceiverCongestionControlObjects() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); RTC_DCHECK(packet_router_); - packet_router_->RemoveReceiveRtpModule(_rtpRtcpModule.get()); + packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); packet_router_ = nullptr; } @@ -697,7 +764,7 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { // --- Timestamps { - rtc::CritScope lock(&ts_stats_lock_); + MutexLock lock(&ts_stats_lock_); stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; } return stats; @@ -719,20 +786,37 @@ void ChannelReceive::SetNACKStatus(bool enable, int max_packets) { // Called when we are missing one or more packets. int ChannelReceive::ResendPackets(const uint16_t* sequence_numbers, int length) { - return _rtpRtcpModule->SendNACK(sequence_numbers, length); + return rtp_rtcp_->SendNACK(sequence_numbers, length); } void ChannelReceive::SetAssociatedSendChannel( const ChannelSendInterface* channel) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&assoc_send_channel_lock_); + MutexLock lock(&assoc_send_channel_lock_); associated_send_channel_ = channel; } -NetworkStatistics ChannelReceive::GetNetworkStatistics() const { +void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + RTC_DCHECK(worker_thread_checker_.IsCurrent()); + // Depending on when the channel is created, the transformer might be set + // twice. Don't replace the delegate if it was already initialized. + if (!frame_transformer || frame_transformer_delegate_) + return; + InitFrameTransformerDelegate(std::move(frame_transformer)); +} + +#ifndef DISABLE_RECORDER +void ChannelReceive::InjectRecorder(Recorder* recorder) { + acm_receiver_.InjectRecorder(recorder); +} +#endif + +NetworkStatistics ChannelReceive::GetNetworkStatistics( + bool get_and_clear_legacy_stats) const { RTC_DCHECK(worker_thread_checker_.IsCurrent()); NetworkStatistics stats; - acm_receiver_.GetNetworkStatistics(&stats); + acm_receiver_.GetNetworkStatistics(&stats, get_and_clear_legacy_stats); return stats; } @@ -746,11 +830,11 @@ AudioDecodingCallStats ChannelReceive::GetDecodingCallStatistics() const { uint32_t ChannelReceive::GetDelayEstimate() const { RTC_DCHECK(worker_thread_checker_.IsCurrent() || module_process_thread_checker_.IsCurrent()); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_; } -void ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { +bool ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK(module_process_thread_checker_.IsCurrent()); // Limit to range accepted by both VoE and ACM, so we're at least getting as // close as possible, instead of failing. @@ -759,14 +843,16 @@ void ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { if (acm_receiver_.SetMinimumDelay(delay_ms) != 0) { RTC_DLOG(LS_ERROR) << "SetMinimumPlayoutDelay() failed to set min playout delay"; + return false; } + return true; } bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const { RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_); { - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); if (!playout_timestamp_rtp_time_ms_) return false; *rtp_timestamp = playout_timestamp_rtp_; @@ -778,7 +864,7 @@ bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) { RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); playout_timestamp_ntp_ = ntp_timestamp_ms; playout_timestamp_ntp_time_ms_ = time_ms; } @@ -786,7 +872,7 @@ void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, absl::optional ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_) return absl::nullopt; @@ -805,13 +891,13 @@ int ChannelReceive::GetBaseMinimumPlayoutDelayMs() const { absl::optional ChannelReceive::GetSyncInfo() const { RTC_DCHECK(module_process_thread_checker_.IsCurrent()); Syncable::Info info; - if (_rtpRtcpModule->RemoteNTP(&info.capture_time_ntp_secs, - &info.capture_time_ntp_frac, nullptr, nullptr, - &info.capture_time_source_clock) != 0) { + if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, + &info.capture_time_ntp_frac, nullptr, nullptr, + &info.capture_time_source_clock) != 0) { return absl::nullopt; } { - rtc::CritScope cs(&sync_info_lock_); + MutexLock lock(&sync_info_lock_); if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { return absl::nullopt; } @@ -845,8 +931,8 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) { playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000)); { - rtc::CritScope lock(&video_sync_lock_); - if (!rtcp) { + MutexLock lock(&video_sync_lock_); + if (!rtcp && playout_timestamp != playout_timestamp_rtp_) { playout_timestamp_rtp_ = playout_timestamp; playout_timestamp_rtp_time_ms_ = now_ms; } @@ -870,12 +956,12 @@ int ChannelReceive::GetRtpTimestampRateHz() const { int64_t ChannelReceive::GetRTT() const { std::vector report_blocks; - _rtpRtcpModule->RemoteRTCPStat(&report_blocks); + rtp_rtcp_->RemoteRTCPStat(&report_blocks); // TODO(nisse): Could we check the return value from the ->RTT() call below, // instead of checking if we have any report blocks? if (report_blocks.empty()) { - rtc::CritScope lock(&assoc_send_channel_lock_); + MutexLock lock(&assoc_send_channel_lock_); // Tries to get RTT from an associated channel. if (!associated_send_channel_) { return 0; @@ -889,8 +975,7 @@ int64_t ChannelReceive::GetRTT() const { int64_t min_rtt = 0; // TODO(nisse): This method computes RTT based on sender reports, even though // a receive stream is not supposed to do that. - if (_rtpRtcpModule->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != - 0) { + if (rtp_rtcp_->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != 0) { return 0; } return rtt; @@ -914,13 +999,15 @@ std::unique_ptr CreateChannelReceive( rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options) { + const webrtc::CryptoOptions& crypto_options, + rtc::scoped_refptr frame_transformer) { return std::make_unique( clock, module_process_thread, neteq_factory, audio_device_module, rtcp_send_transport, rtc_event_log, local_ssrc, remote_ssrc, jitter_buffer_max_packets, jitter_buffer_fast_playout, jitter_buffer_min_delay_ms, jitter_buffer_enable_rtx_handling, - decoder_factory, codec_pair_id, frame_decryptor, crypto_options); + decoder_factory, codec_pair_id, frame_decryptor, crypto_options, + std::move(frame_transformer)); } } // namespace voe diff --git a/audio/channel_receive.h b/audio/channel_receive.h index 034ac7b059..b6ef4dfeb9 100644 --- a/audio/channel_receive.h +++ b/audio/channel_receive.h @@ -22,11 +22,15 @@ #include "api/call/audio_sink.h" #include "api/call/transport.h" #include "api/crypto/crypto_options.h" +#include "api/frame_transformer_interface.h" #include "api/neteq/neteq_factory.h" #include "api/transport/rtp/rtp_source.h" #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "system_wrappers/include/clock.h" // TODO(solenberg, nisse): This file contains a few NOLINT marks, to silence @@ -98,12 +102,13 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual double GetTotalOutputDuration() const = 0; // Stats. - virtual NetworkStatistics GetNetworkStatistics() const = 0; + virtual NetworkStatistics GetNetworkStatistics( + bool get_and_clear_legacy_stats) const = 0; virtual AudioDecodingCallStats GetDecodingCallStatistics() const = 0; // Audio+Video Sync. virtual uint32_t GetDelayEstimate() const = 0; - virtual void SetMinimumPlayoutDelay(int delay_ms) = 0; + virtual bool SetMinimumPlayoutDelay(int delay_ms) = 0; virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const = 0; virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, @@ -137,6 +142,16 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { // Used for obtaining RTT for a receive-only channel. virtual void SetAssociatedSendChannel( const ChannelSendInterface* channel) = 0; + + // Sets a frame transformer between the depacketizer and the decoder, to + // transform the received frames before decoding them. + virtual void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr + frame_transformer) = 0; + +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif }; std::unique_ptr CreateChannelReceive( @@ -155,7 +170,8 @@ std::unique_ptr CreateChannelReceive( rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options); + const webrtc::CryptoOptions& crypto_options, + rtc::scoped_refptr frame_transformer); } // namespace voe } // namespace webrtc diff --git a/audio/channel_receive_frame_transformer_delegate.cc b/audio/channel_receive_frame_transformer_delegate.cc new file mode 100644 index 0000000000..261afbb100 --- /dev/null +++ b/audio/channel_receive_frame_transformer_delegate.cc @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/channel_receive_frame_transformer_delegate.h" + +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/task_utils/to_queued_task.h" + +namespace webrtc { +namespace { + +class TransformableAudioFrame : public TransformableAudioFrameInterface { + public: + TransformableAudioFrame(rtc::ArrayView payload, + const RTPHeader& header, + uint32_t ssrc) + : payload_(payload.data(), payload.size()), + header_(header), + ssrc_(ssrc) {} + ~TransformableAudioFrame() override = default; + rtc::ArrayView GetData() const override { return payload_; } + + void SetData(rtc::ArrayView data) override { + payload_.SetData(data.data(), data.size()); + } + + uint32_t GetTimestamp() const override { return header_.timestamp; } + uint32_t GetSsrc() const override { return ssrc_; } + const RTPHeader& GetHeader() const override { return header_; } + + private: + rtc::Buffer payload_; + RTPHeader header_; + uint32_t ssrc_; +}; +} // namespace + +ChannelReceiveFrameTransformerDelegate::ChannelReceiveFrameTransformerDelegate( + ReceiveFrameCallback receive_frame_callback, + rtc::scoped_refptr frame_transformer, + rtc::Thread* channel_receive_thread) + : receive_frame_callback_(receive_frame_callback), + frame_transformer_(std::move(frame_transformer)), + channel_receive_thread_(channel_receive_thread) {} + +void ChannelReceiveFrameTransformerDelegate::Init() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + frame_transformer_->RegisterTransformedFrameCallback( + rtc::scoped_refptr(this)); +} + +void ChannelReceiveFrameTransformerDelegate::Reset() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + frame_transformer_->UnregisterTransformedFrameCallback(); + frame_transformer_ = nullptr; + receive_frame_callback_ = ReceiveFrameCallback(); +} + +void ChannelReceiveFrameTransformerDelegate::Transform( + rtc::ArrayView packet, + const RTPHeader& header, + uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + frame_transformer_->Transform( + std::make_unique(packet, header, ssrc)); +} + +void ChannelReceiveFrameTransformerDelegate::OnTransformedFrame( + std::unique_ptr frame) { + rtc::scoped_refptr delegate = this; + channel_receive_thread_->PostTask(ToQueuedTask( + [delegate = std::move(delegate), frame = std::move(frame)]() mutable { + delegate->ReceiveFrame(std::move(frame)); + })); +} + +void ChannelReceiveFrameTransformerDelegate::ReceiveFrame( + std::unique_ptr frame) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (!receive_frame_callback_) + return; + auto* transformed_frame = static_cast(frame.get()); + receive_frame_callback_(transformed_frame->GetData(), + transformed_frame->GetHeader()); +} +} // namespace webrtc diff --git a/audio/channel_receive_frame_transformer_delegate.h b/audio/channel_receive_frame_transformer_delegate.h new file mode 100644 index 0000000000..73112d10e3 --- /dev/null +++ b/audio/channel_receive_frame_transformer_delegate.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_CHANNEL_RECEIVE_FRAME_TRANSFORMER_DELEGATE_H_ +#define AUDIO_CHANNEL_RECEIVE_FRAME_TRANSFORMER_DELEGATE_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +// Delegates calls to FrameTransformerInterface to transform frames, and to +// ChannelReceive to receive the transformed frames using the +// |receive_frame_callback_| on the |channel_receive_thread_|. +class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback { + public: + using ReceiveFrameCallback = + std::function packet, + const RTPHeader& header)>; + ChannelReceiveFrameTransformerDelegate( + ReceiveFrameCallback receive_frame_callback, + rtc::scoped_refptr frame_transformer, + rtc::Thread* channel_receive_thread); + + // Registers |this| as callback for |frame_transformer_|, to get the + // transformed frames. + void Init(); + + // Unregisters and releases the |frame_transformer_| reference, and resets + // |receive_frame_callback_| on |channel_receive_thread_|. Called from + // ChannelReceive destructor to prevent running the callback on a dangling + // channel. + void Reset(); + + // Delegates the call to FrameTransformerInterface::Transform, to transform + // the frame asynchronously. + void Transform(rtc::ArrayView packet, + const RTPHeader& header, + uint32_t ssrc); + + // Implements TransformedFrameCallback. Can be called on any thread. + void OnTransformedFrame( + std::unique_ptr frame) override; + + // Delegates the call to ChannelReceive::OnReceivedPayloadData on the + // |channel_receive_thread_|, by calling |receive_frame_callback_|. + void ReceiveFrame(std::unique_ptr frame) const; + + protected: + ~ChannelReceiveFrameTransformerDelegate() override = default; + + private: + SequenceChecker sequence_checker_; + ReceiveFrameCallback receive_frame_callback_ + RTC_GUARDED_BY(sequence_checker_); + rtc::scoped_refptr frame_transformer_ + RTC_GUARDED_BY(sequence_checker_); + rtc::Thread* channel_receive_thread_; +}; + +} // namespace webrtc +#endif // AUDIO_CHANNEL_RECEIVE_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/audio/channel_receive_frame_transformer_delegate_unittest.cc b/audio/channel_receive_frame_transformer_delegate_unittest.cc new file mode 100644 index 0000000000..e7f5a454b8 --- /dev/null +++ b/audio/channel_receive_frame_transformer_delegate_unittest.cc @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/channel_receive_frame_transformer_delegate.h" + +#include +#include + +#include "rtc_base/ref_counted_object.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_frame_transformer.h" +#include "test/mock_transformable_frame.h" + +namespace webrtc { +namespace { + +using ::testing::NiceMock; +using ::testing::SaveArg; + +class MockChannelReceive { + public: + MOCK_METHOD(void, + ReceiveFrame, + (rtc::ArrayView packet, const RTPHeader& header)); + + ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback callback() { + return [this](rtc::ArrayView packet, + const RTPHeader& header) { ReceiveFrame(packet, header); }; + } +}; + +// Test that the delegate registers itself with the frame transformer on Init(). +TEST(ChannelReceiveFrameTransformerDelegateTest, + RegisterTransformedFrameCallbackOnInit) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject(); + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(), + mock_frame_transformer, nullptr); + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback); + delegate->Init(); +} + +// Test that the delegate unregisters itself from the frame transformer on +// Reset(). +TEST(ChannelReceiveFrameTransformerDelegateTest, + UnregisterTransformedFrameCallbackOnReset) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject(); + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(), + mock_frame_transformer, nullptr); + EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); + delegate->Reset(); +} + +// Test that when the delegate receives a transformed frame from the frame +// transformer, it passes it to the channel using the ReceiveFrameCallback. +TEST(ChannelReceiveFrameTransformerDelegateTest, + TransformRunsChannelReceiveCallback) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + MockChannelReceive mock_channel; + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + mock_channel.callback(), mock_frame_transformer, + rtc::Thread::Current()); + rtc::scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + delegate->Init(); + ASSERT_TRUE(callback); + + const uint8_t data[] = {1, 2, 3, 4}; + rtc::ArrayView packet(data, sizeof(data)); + RTPHeader header; + EXPECT_CALL(mock_channel, ReceiveFrame); + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&callback](std::unique_ptr frame) { + callback->OnTransformedFrame(std::move(frame)); + }); + delegate->Transform(packet, header, 1111 /*ssrc*/); + rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); +} + +// Test that if the delegate receives a transformed frame after it has been +// reset, it does not run the ReceiveFrameCallback, as the channel is destroyed +// after resetting the delegate. +TEST(ChannelReceiveFrameTransformerDelegateTest, + OnTransformedDoesNotRunChannelReceiveCallbackAfterReset) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + MockChannelReceive mock_channel; + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + mock_channel.callback(), mock_frame_transformer, + rtc::Thread::Current()); + + delegate->Reset(); + EXPECT_CALL(mock_channel, ReceiveFrame).Times(0); + delegate->OnTransformedFrame(std::make_unique()); + rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); +} + +} // namespace +} // namespace webrtc diff --git a/audio/channel_send.cc b/audio/channel_send.cc index dd866f3f7b..6b326f85ae 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -21,6 +21,7 @@ #include "api/call/transport.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "audio/channel_send_frame_transformer_delegate.h" #include "audio/utility/audio_frame_operations.h" #include "call/rtp_transport_controller_send_interface.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" @@ -28,6 +29,7 @@ #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_processing/rms_level.h" #include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" @@ -37,6 +39,7 @@ #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" @@ -53,7 +56,6 @@ constexpr int64_t kMaxRetransmissionWindowMs = 1000; constexpr int64_t kMinRetransmissionWindowMs = 30; class RtpPacketSenderProxy; -class TransportFeedbackProxy; class TransportSequenceNumberProxy; class VoERtcpObserver; @@ -68,7 +70,6 @@ class ChannelSend : public ChannelSendInterface, ChannelSend(Clock* clock, TaskQueueFactory* task_queue_factory, ProcessThread* module_process_thread, - OverheadObserver* overhead_observer, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, RtcEventLog* rtc_event_log, @@ -76,7 +77,9 @@ class ChannelSend : public ChannelSendInterface, const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, - uint32_t ssrc); + uint32_t ssrc, + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer); ~ChannelSend() override; @@ -105,7 +108,7 @@ class ChannelSend : public ChannelSendInterface, ANAStats GetANAStatistics() const override; // Used by AudioSendStream. - RtpRtcp* GetRtpRtcp() const override; + RtpRtcpInterface* GetRtpRtcp() const override; void RegisterCngPayloadType(int payload_type, int payload_frequency) override; @@ -142,6 +145,16 @@ class ChannelSend : public ChannelSendInterface, void SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) override; + // Sets a frame transformer between encoder and packetizer, to transform + // encoded frames before sending them out the network. + void SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer) + override; + +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif + private: // From AudioPacketizationCallback in the ACM int32_t SendData(AudioFrameType frameType, @@ -163,6 +176,9 @@ class ChannelSend : public ChannelSendInterface, void OnReceivedRtt(int64_t rtt_ms); + void InitFrameTransformerDelegate( + rtc::scoped_refptr frame_transformer); + // Thread checkers document and lock usage of some methods on voe::Channel to // specific threads we know about. The goal is to eventually split up // voe::Channel into parts with single-threaded semantics, and thereby reduce @@ -175,13 +191,13 @@ class ChannelSend : public ChannelSendInterface, // audio thread to another, but access is still sequential. rtc::RaceChecker audio_thread_race_checker_; - rtc::CriticalSection volume_settings_critsect_; + mutable Mutex volume_settings_mutex_; bool sending_ RTC_GUARDED_BY(&worker_thread_checker_) = false; RtcEventLog* const event_log_; - std::unique_ptr _rtpRtcpModule; + std::unique_ptr rtp_rtcp_; std::unique_ptr rtp_sender_audio_; std::unique_ptr audio_coding_; @@ -190,7 +206,7 @@ class ChannelSend : public ChannelSendInterface, // uses ProcessThread* const _moduleProcessThreadPtr; RmsLevel rms_level_ RTC_GUARDED_BY(encoder_queue_); - bool input_mute_ RTC_GUARDED_BY(volume_settings_critsect_); + bool input_mute_ RTC_GUARDED_BY(volume_settings_mutex_); bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_); // VoeRTP_RTCP // TODO(henrika): can today be accessed on the main thread and on the @@ -202,7 +218,7 @@ class ChannelSend : public ChannelSendInterface, PacketRouter* packet_router_ RTC_GUARDED_BY(&worker_thread_checker_) = nullptr; - const std::unique_ptr feedback_observer_proxy_; + TransportFeedbackObserver* const feedback_observer_; const std::unique_ptr rtp_packet_pacer_proxy_; const std::unique_ptr retransmission_rate_limiter_; @@ -217,8 +233,14 @@ class ChannelSend : public ChannelSendInterface, // E2EE Frame Encryption Options const webrtc::CryptoOptions crypto_options_; - rtc::CriticalSection bitrate_crit_section_; - int configured_bitrate_bps_ RTC_GUARDED_BY(bitrate_crit_section_) = 0; + // Delegates calls to a frame transformer to transform audio, and + // receives callbacks with the transformed frames; delegates calls to + // ChannelSend::SendRtpAudio to send the transformed audio. + rtc::scoped_refptr + frame_transformer_delegate_ RTC_GUARDED_BY(encoder_queue_); + + mutable Mutex bitrate_mutex_; + int configured_bitrate_bps_ RTC_GUARDED_BY(bitrate_mutex_) = 0; // Defined last to ensure that there are no running tasks when the other // members are destroyed. @@ -227,63 +249,26 @@ class ChannelSend : public ChannelSendInterface, const int kTelephoneEventAttenuationdB = 10; -class TransportFeedbackProxy : public TransportFeedbackObserver { - public: - TransportFeedbackProxy() : feedback_observer_(nullptr) { - pacer_thread_.Detach(); - network_thread_.Detach(); - } - - void SetTransportFeedbackObserver( - TransportFeedbackObserver* feedback_observer) { - RTC_DCHECK(thread_checker_.IsCurrent()); - rtc::CritScope lock(&crit_); - feedback_observer_ = feedback_observer; - } - - // Implements TransportFeedbackObserver. - void OnAddPacket(const RtpPacketSendInfo& packet_info) override { - RTC_DCHECK(pacer_thread_.IsCurrent()); - rtc::CritScope lock(&crit_); - if (feedback_observer_) - feedback_observer_->OnAddPacket(packet_info); - } - - void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override { - RTC_DCHECK(network_thread_.IsCurrent()); - rtc::CritScope lock(&crit_); - if (feedback_observer_) - feedback_observer_->OnTransportFeedback(feedback); - } - - private: - rtc::CriticalSection crit_; - rtc::ThreadChecker thread_checker_; - rtc::ThreadChecker pacer_thread_; - rtc::ThreadChecker network_thread_; - TransportFeedbackObserver* feedback_observer_ RTC_GUARDED_BY(&crit_); -}; - class RtpPacketSenderProxy : public RtpPacketSender { public: RtpPacketSenderProxy() : rtp_packet_pacer_(nullptr) {} void SetPacketPacer(RtpPacketSender* rtp_packet_pacer) { RTC_DCHECK(thread_checker_.IsCurrent()); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); rtp_packet_pacer_ = rtp_packet_pacer; } void EnqueuePackets( std::vector> packets) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); rtp_packet_pacer_->EnqueuePackets(std::move(packets)); } private: rtc::ThreadChecker thread_checker_; - rtc::CriticalSection crit_; - RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&mutex_); }; class VoERtcpObserver : public RtcpBandwidthObserver { @@ -293,12 +278,12 @@ class VoERtcpObserver : public RtcpBandwidthObserver { ~VoERtcpObserver() override {} void SetBandwidthObserver(RtcpBandwidthObserver* bandwidth_observer) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); bandwidth_observer_ = bandwidth_observer; } void OnReceivedEstimatedBitrate(uint32_t bitrate) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (bandwidth_observer_) { bandwidth_observer_->OnReceivedEstimatedBitrate(bitrate); } @@ -308,7 +293,7 @@ class VoERtcpObserver : public RtcpBandwidthObserver { int64_t rtt, int64_t now_ms) override { { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (bandwidth_observer_) { bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, rtt, now_ms); @@ -356,8 +341,8 @@ class VoERtcpObserver : public RtcpBandwidthObserver { ChannelSend* owner_; // Maps remote side ssrc to extended highest sequence number received. std::map extended_max_sequence_number_; - rtc::CriticalSection crit_; - RtcpBandwidthObserver* bandwidth_observer_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + RtcpBandwidthObserver* bandwidth_observer_ RTC_GUARDED_BY(mutex_); }; int32_t ChannelSend::SendData(AudioFrameType frameType, @@ -368,6 +353,15 @@ int32_t ChannelSend::SendData(AudioFrameType frameType, int64_t absolute_capture_timestamp_ms) { RTC_DCHECK_RUN_ON(&encoder_queue_); rtc::ArrayView payload(payloadData, payloadSize); + if (frame_transformer_delegate_) { + // Asynchronously transform the payload before sending it. After the payload + // is transformed, the delegate will call SendRtpAudio to send it. + frame_transformer_delegate_->Transform( + frameType, payloadType, rtp_timestamp, rtp_rtcp_->StartTimestamp(), + payloadData, payloadSize, absolute_capture_timestamp_ms, + rtp_rtcp_->SSRC()); + return 0; + } return SendRtpAudio(frameType, payloadType, rtp_timestamp, payload, absolute_capture_timestamp_ms); } @@ -402,7 +396,7 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // Encrypt the audio payload into the buffer. size_t bytes_written = 0; int encrypt_status = frame_encryptor_->Encrypt( - cricket::MEDIA_TYPE_AUDIO, _rtpRtcpModule->SSRC(), + cricket::MEDIA_TYPE_AUDIO, rtp_rtcp_->SSRC(), /*additional_data=*/nullptr, payload, encrypted_audio_payload, &bytes_written); if (encrypt_status != 0) { @@ -424,12 +418,12 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // Push data from ACM to RTP/RTCP-module to deliver audio frame for // packetization. - if (!_rtpRtcpModule->OnSendingRtpFrame(rtp_timestamp, - // Leaving the time when this frame was - // received from the capture device as - // undefined for voice for now. - -1, payloadType, - /*force_sender_report=*/false)) { + if (!rtp_rtcp_->OnSendingRtpFrame(rtp_timestamp, + // Leaving the time when this frame was + // received from the capture device as + // undefined for voice for now. + -1, payloadType, + /*force_sender_report=*/false)) { return -1; } @@ -441,9 +435,8 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // This call will trigger Transport::SendPacket() from the RTP/RTCP module. if (!rtp_sender_audio_->SendAudio( - frameType, payloadType, - rtp_timestamp + _rtpRtcpModule->StartTimestamp(), payload.data(), - payload.size(), absolute_capture_timestamp_ms)) { + frameType, payloadType, rtp_timestamp + rtp_rtcp_->StartTimestamp(), + payload.data(), payload.size(), absolute_capture_timestamp_ms)) { RTC_DLOG(LS_ERROR) << "ChannelSend::SendData() failed to send data to RTP/RTCP module"; return -1; @@ -452,18 +445,20 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, return 0; } -ChannelSend::ChannelSend(Clock* clock, - TaskQueueFactory* task_queue_factory, - ProcessThread* module_process_thread, - OverheadObserver* overhead_observer, - Transport* rtp_transport, - RtcpRttStats* rtcp_rtt_stats, - RtcEventLog* rtc_event_log, - FrameEncryptorInterface* frame_encryptor, - const webrtc::CryptoOptions& crypto_options, - bool extmap_allow_mixed, - int rtcp_report_interval_ms, - uint32_t ssrc) +ChannelSend::ChannelSend( + Clock* clock, + TaskQueueFactory* task_queue_factory, + ProcessThread* module_process_thread, + Transport* rtp_transport, + RtcpRttStats* rtcp_rtt_stats, + RtcEventLog* rtc_event_log, + FrameEncryptorInterface* frame_encryptor, + const webrtc::CryptoOptions& crypto_options, + bool extmap_allow_mixed, + int rtcp_report_interval_ms, + uint32_t ssrc, + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer) : event_log_(rtc_event_log), _timeStamp(0), // This is just an offset, RTP module will add it's own // random offset @@ -472,7 +467,7 @@ ChannelSend::ChannelSend(Clock* clock, previous_frame_muted_(false), _includeAudioLevelIndication(false), rtcp_observer_(new VoERtcpObserver(this)), - feedback_observer_proxy_(new TransportFeedbackProxy()), + feedback_observer_(feedback_observer), rtp_packet_pacer_proxy_(new RtpPacketSenderProxy()), retransmission_rate_limiter_( new RateLimiter(clock, kMaxRetransmissionWindowMs)), @@ -486,10 +481,9 @@ ChannelSend::ChannelSend(Clock* clock, audio_coding_.reset(AudioCodingModule::Create(AudioCodingModule::Config())); - RtpRtcp::Configuration configuration; - configuration.overhead_observer = overhead_observer; + RtpRtcpInterface::Configuration configuration; configuration.bandwidth_callback = rtcp_observer_.get(); - configuration.transport_feedback_callback = feedback_observer_proxy_.get(); + configuration.transport_feedback_callback = feedback_observer_; configuration.clock = (clock ? clock : Clock::GetRealTimeClock()); configuration.audio = true; configuration.outgoing_transport = rtp_transport; @@ -505,30 +499,36 @@ ChannelSend::ChannelSend(Clock* clock, configuration.local_media_ssrc = ssrc; - _rtpRtcpModule = RtpRtcp::Create(configuration); - _rtpRtcpModule->SetSendingMediaStatus(false); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp_->SetSendingMediaStatus(false); - rtp_sender_audio_ = std::make_unique( - configuration.clock, _rtpRtcpModule->RtpSender()); + rtp_sender_audio_ = std::make_unique(configuration.clock, + rtp_rtcp_->RtpSender()); - _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE); + _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); // Ensure that RTCP is enabled by default for the created channel. - _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound); + rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); int error = audio_coding_->RegisterTransportCallback(this); RTC_DCHECK_EQ(0, error); + if (frame_transformer) + InitFrameTransformerDelegate(std::move(frame_transformer)); } ChannelSend::~ChannelSend() { RTC_DCHECK(construction_thread_.IsCurrent()); + // Resets the delegate's callback to ChannelSend::SendRtpAudio. + if (frame_transformer_delegate_) + frame_transformer_delegate_->Reset(); + StopSend(); int error = audio_coding_->RegisterTransportCallback(NULL); RTC_DCHECK_EQ(0, error); if (_moduleProcessThreadPtr) - _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); + _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get()); } void ChannelSend::StartSend() { @@ -536,8 +536,8 @@ void ChannelSend::StartSend() { RTC_DCHECK(!sending_); sending_ = true; - _rtpRtcpModule->SetSendingMediaStatus(true); - int ret = _rtpRtcpModule->SetSendingStatus(true); + rtp_rtcp_->SetSendingMediaStatus(true); + int ret = rtp_rtcp_->SetSendingStatus(true); RTC_DCHECK_EQ(0, ret); // It is now OK to start processing on the encoder task queue. encoder_queue_.PostTask([this] { @@ -563,10 +563,10 @@ void ChannelSend::StopSend() { // Reset sending SSRC and sequence number and triggers direct transmission // of RTCP BYE - if (_rtpRtcpModule->SetSendingStatus(false) == -1) { + if (rtp_rtcp_->SetSendingStatus(false) == -1) { RTC_DLOG(LS_ERROR) << "StartSend() RTP/RTCP failed to stop sending"; } - _rtpRtcpModule->SetSendingMediaStatus(false); + rtp_rtcp_->SetSendingMediaStatus(false); } void ChannelSend::SetEncoder(int payload_type, @@ -577,8 +577,8 @@ void ChannelSend::SetEncoder(int payload_type, // The RTP/RTCP module needs to know the RTP timestamp rate (i.e. clockrate) // as well as some other things, so we collect this info and send it along. - _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, - encoder->RtpTimestampRateHz()); + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, + encoder->RtpTimestampRateHz()); rtp_sender_audio_->RegisterAudioPayload("audio", payload_type, encoder->RtpTimestampRateHz(), encoder->NumChannels(), 0); @@ -611,7 +611,7 @@ void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) { // rules. // RTC_DCHECK(worker_thread_checker_.IsCurrent() || // module_process_thread_checker_.IsCurrent()); - rtc::CritScope lock(&bitrate_crit_section_); + MutexLock lock(&bitrate_mutex_); CallEncoder([&](AudioEncoder* encoder) { encoder->OnReceivedUplinkAllocation(update); @@ -621,7 +621,7 @@ void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) { } int ChannelSend::GetBitrate() const { - rtc::CritScope lock(&bitrate_crit_section_); + MutexLock lock(&bitrate_mutex_); return configured_bitrate_bps_; } @@ -632,8 +632,10 @@ void ChannelSend::OnUplinkPacketLossRate(float packet_loss_rate) { } void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + // Deliver RTCP packet to RTP/RTCP module for parsing - _rtpRtcpModule->IncomingRtcpPacket(data, length); + rtp_rtcp_->IncomingRtcpPacket(data, length); int64_t rtt = GetRTT(); if (rtt == 0) { @@ -654,12 +656,12 @@ void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) { void ChannelSend::SetInputMute(bool enable) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); input_mute_ = enable; } bool ChannelSend::InputMute() const { - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); return input_mute_; } @@ -682,7 +684,7 @@ bool ChannelSend::SendTelephoneEventOutband(int event, int duration_ms) { void ChannelSend::RegisterCngPayloadType(int payload_type, int payload_frequency) { - _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency); + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency); rtp_sender_audio_->RegisterAudioPayload("CN", payload_type, payload_frequency, 1, 0); } @@ -692,7 +694,7 @@ void ChannelSend::SetSendTelephoneEventPayloadType(int payload_type, RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK_LE(0, payload_type); RTC_DCHECK_GE(127, payload_type); - _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency); + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency); rtp_sender_audio_->RegisterAudioPayload("telephone-event", payload_type, payload_frequency, 0, 0); } @@ -701,9 +703,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); _includeAudioLevelIndication = enable; if (enable) { - _rtpRtcpModule->RegisterRtpHeaderExtension(AudioLevel::kUri, id); + rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::kUri, id); } else { - _rtpRtcpModule->DeregisterSendRtpHeaderExtension(AudioLevel::kUri); + rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::kUri); } } @@ -712,31 +714,25 @@ void ChannelSend::RegisterSenderCongestionControlObjects( RtcpBandwidthObserver* bandwidth_observer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RtpPacketSender* rtp_packet_pacer = transport->packet_sender(); - TransportFeedbackObserver* transport_feedback_observer = - transport->transport_feedback_observer(); PacketRouter* packet_router = transport->packet_router(); RTC_DCHECK(rtp_packet_pacer); - RTC_DCHECK(transport_feedback_observer); RTC_DCHECK(packet_router); RTC_DCHECK(!packet_router_); rtcp_observer_->SetBandwidthObserver(bandwidth_observer); - feedback_observer_proxy_->SetTransportFeedbackObserver( - transport_feedback_observer); rtp_packet_pacer_proxy_->SetPacketPacer(rtp_packet_pacer); - _rtpRtcpModule->SetStorePacketsStatus(true, 600); + rtp_rtcp_->SetStorePacketsStatus(true, 600); constexpr bool remb_candidate = false; - packet_router->AddSendRtpModule(_rtpRtcpModule.get(), remb_candidate); + packet_router->AddSendRtpModule(rtp_rtcp_.get(), remb_candidate); packet_router_ = packet_router; } void ChannelSend::ResetSenderCongestionControlObjects() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(packet_router_); - _rtpRtcpModule->SetStorePacketsStatus(false, 600); + rtp_rtcp_->SetStorePacketsStatus(false, 600); rtcp_observer_->SetBandwidthObserver(nullptr); - feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr); - packet_router_->RemoveSendRtpModule(_rtpRtcpModule.get()); + packet_router_->RemoveSendRtpModule(rtp_rtcp_.get()); packet_router_ = nullptr; rtp_packet_pacer_proxy_->SetPacketPacer(nullptr); } @@ -745,7 +741,7 @@ void ChannelSend::SetRTCP_CNAME(absl::string_view c_name) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Note: SetCNAME() accepts a c string of length at most 255. const std::string c_name_limited(c_name.substr(0, 255)); - int ret = _rtpRtcpModule->SetCNAME(c_name_limited.c_str()) != 0; + int ret = rtp_rtcp_->SetCNAME(c_name_limited.c_str()) != 0; RTC_DCHECK_EQ(0, ret) << "SetRTCP_CNAME() failed to set RTCP CNAME"; } @@ -756,7 +752,7 @@ std::vector ChannelSend::GetRemoteRTCPReportBlocks() const { // report block according to RFC 3550. std::vector rtcp_report_blocks; - int ret = _rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks); + int ret = rtp_rtcp_->RemoteRTCPStat(&rtcp_report_blocks); RTC_DCHECK_EQ(0, ret); std::vector report_blocks; @@ -785,7 +781,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { StreamDataCounters rtp_stats; StreamDataCounters rtx_stats; - _rtpRtcpModule->GetSendStreamDataCounters(&rtp_stats, &rtx_stats); + rtp_rtcp_->GetSendStreamDataCounters(&rtp_stats, &rtx_stats); stats.payload_bytes_sent = rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes; stats.header_and_padding_bytes_sent = @@ -798,7 +794,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { stats.packetsSent = rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; stats.retransmitted_packets_sent = rtp_stats.retransmitted.packets; - stats.report_block_datas = _rtpRtcpModule->GetLatestReportBlockData(); + stats.report_block_datas = rtp_rtcp_->GetLatestReportBlockData(); return stats; } @@ -863,14 +859,14 @@ ANAStats ChannelSend::GetANAStatistics() const { return audio_coding_->GetANAStats(); } -RtpRtcp* ChannelSend::GetRtpRtcp() const { +RtpRtcpInterface* ChannelSend::GetRtpRtcp() const { RTC_DCHECK(module_process_thread_checker_.IsCurrent()); - return _rtpRtcpModule.get(); + return rtp_rtcp_.get(); } int64_t ChannelSend::GetRTT() const { std::vector report_blocks; - _rtpRtcpModule->RemoteRTCPStat(&report_blocks); + rtp_rtcp_->RemoteRTCPStat(&report_blocks); if (report_blocks.empty()) { return 0; @@ -882,8 +878,8 @@ int64_t ChannelSend::GetRTT() const { int64_t min_rtt = 0; // We don't know in advance the remote ssrc used by the other end's receiver // reports, so use the SSRC of the first report block for calculating the RTT. - if (_rtpRtcpModule->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, - &min_rtt, &max_rtt) != 0) { + if (rtp_rtcp_->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, &min_rtt, + &max_rtt) != 0) { return 0; } return rtt; @@ -898,19 +894,60 @@ void ChannelSend::SetFrameEncryptor( }); } +void ChannelSend::SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + if (!frame_transformer) + return; + + encoder_queue_.PostTask( + [this, frame_transformer = std::move(frame_transformer)]() mutable { + RTC_DCHECK_RUN_ON(&encoder_queue_); + InitFrameTransformerDelegate(std::move(frame_transformer)); + }); +} + +#ifndef DISABLE_RECORDER +void ChannelSend::InjectRecorder(Recorder* recorder) { + audio_coding_->InjectRecorder(recorder); +} +#endif + void ChannelSend::OnReceivedRtt(int64_t rtt_ms) { // Invoke audio encoders OnReceivedRtt(). CallEncoder( [rtt_ms](AudioEncoder* encoder) { encoder->OnReceivedRtt(rtt_ms); }); } +void ChannelSend::InitFrameTransformerDelegate( + rtc::scoped_refptr frame_transformer) { + RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK(frame_transformer); + RTC_DCHECK(!frame_transformer_delegate_); + + // Pass a callback to ChannelSend::SendRtpAudio, to be called by the delegate + // to send the transformed audio. + ChannelSendFrameTransformerDelegate::SendFrameCallback send_audio_callback = + [this](AudioFrameType frameType, uint8_t payloadType, + uint32_t rtp_timestamp, rtc::ArrayView payload, + int64_t absolute_capture_timestamp_ms) { + RTC_DCHECK_RUN_ON(&encoder_queue_); + return SendRtpAudio(frameType, payloadType, rtp_timestamp, payload, + absolute_capture_timestamp_ms); + }; + frame_transformer_delegate_ = + new rtc::RefCountedObject( + std::move(send_audio_callback), std::move(frame_transformer), + &encoder_queue_); + frame_transformer_delegate_->Init(); +} + } // namespace std::unique_ptr CreateChannelSend( Clock* clock, TaskQueueFactory* task_queue_factory, ProcessThread* module_process_thread, - OverheadObserver* overhead_observer, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, RtcEventLog* rtc_event_log, @@ -918,11 +955,14 @@ std::unique_ptr CreateChannelSend( const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, - uint32_t ssrc) { + uint32_t ssrc, + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer) { return std::make_unique( - clock, task_queue_factory, module_process_thread, overhead_observer, - rtp_transport, rtcp_rtt_stats, rtc_event_log, frame_encryptor, - crypto_options, extmap_allow_mixed, rtcp_report_interval_ms, ssrc); + clock, task_queue_factory, module_process_thread, rtp_transport, + rtcp_rtt_stats, rtc_event_log, frame_encryptor, crypto_options, + extmap_allow_mixed, rtcp_report_interval_ms, ssrc, + std::move(frame_transformer), feedback_observer); } } // namespace voe diff --git a/audio/channel_send.h b/audio/channel_send.h index 0fe434b684..73f8ea7487 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -18,10 +18,14 @@ #include "api/audio/audio_frame.h" #include "api/audio_codecs/audio_encoder.h" #include "api/crypto/crypto_options.h" +#include "api/frame_transformer_interface.h" #include "api/function_view.h" #include "api/task_queue/task_queue_factory.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_audio.h" namespace webrtc { @@ -29,7 +33,6 @@ namespace webrtc { class FrameEncryptorInterface; class ProcessThread; class RtcEventLog; -class RtpRtcp; class RtpTransportControllerSendInterface; struct CallSendStatistics { @@ -96,7 +99,7 @@ class ChannelSendInterface { virtual void ProcessAndEncodeAudio( std::unique_ptr audio_frame) = 0; - virtual RtpRtcp* GetRtpRtcp() const = 0; + virtual RtpRtcpInterface* GetRtpRtcp() const = 0; // In RTP we currently rely on RTCP packets (|ReceivedRTCPPacket|) to inform // about RTT. @@ -115,13 +118,22 @@ class ChannelSendInterface { // E2EE Custom Audio Frame Encryption (Optional) virtual void SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) = 0; + + // Sets a frame transformer between encoder and packetizer, to transform + // encoded frames before sending them out the network. + virtual void SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr + frame_transformer) = 0; + +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif }; std::unique_ptr CreateChannelSend( Clock* clock, TaskQueueFactory* task_queue_factory, ProcessThread* module_process_thread, - OverheadObserver* overhead_observer, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, RtcEventLog* rtc_event_log, @@ -129,7 +141,9 @@ std::unique_ptr CreateChannelSend( const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, - uint32_t ssrc); + uint32_t ssrc, + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer); } // namespace voe } // namespace webrtc diff --git a/audio/channel_send_frame_transformer_delegate.cc b/audio/channel_send_frame_transformer_delegate.cc new file mode 100644 index 0000000000..72a459d897 --- /dev/null +++ b/audio/channel_send_frame_transformer_delegate.cc @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/channel_send_frame_transformer_delegate.h" + +#include + +namespace webrtc { +namespace { + +class TransformableAudioFrame : public TransformableFrameInterface { + public: + TransformableAudioFrame(AudioFrameType frame_type, + uint8_t payload_type, + uint32_t rtp_timestamp, + uint32_t rtp_start_timestamp, + const uint8_t* payload_data, + size_t payload_size, + int64_t absolute_capture_timestamp_ms, + uint32_t ssrc) + : frame_type_(frame_type), + payload_type_(payload_type), + rtp_timestamp_(rtp_timestamp), + rtp_start_timestamp_(rtp_start_timestamp), + payload_(payload_data, payload_size), + absolute_capture_timestamp_ms_(absolute_capture_timestamp_ms), + ssrc_(ssrc) {} + ~TransformableAudioFrame() override = default; + rtc::ArrayView GetData() const override { return payload_; } + void SetData(rtc::ArrayView data) override { + payload_.SetData(data.data(), data.size()); + } + uint32_t GetTimestamp() const override { + return rtp_timestamp_ + rtp_start_timestamp_; + } + uint32_t GetStartTimestamp() const { return rtp_start_timestamp_; } + uint32_t GetSsrc() const override { return ssrc_; } + + AudioFrameType GetFrameType() const { return frame_type_; } + uint8_t GetPayloadType() const { return payload_type_; } + int64_t GetAbsoluteCaptureTimestampMs() const { + return absolute_capture_timestamp_ms_; + } + + private: + AudioFrameType frame_type_; + uint8_t payload_type_; + uint32_t rtp_timestamp_; + uint32_t rtp_start_timestamp_; + rtc::Buffer payload_; + int64_t absolute_capture_timestamp_ms_; + uint32_t ssrc_; +}; +} // namespace + +ChannelSendFrameTransformerDelegate::ChannelSendFrameTransformerDelegate( + SendFrameCallback send_frame_callback, + rtc::scoped_refptr frame_transformer, + rtc::TaskQueue* encoder_queue) + : send_frame_callback_(send_frame_callback), + frame_transformer_(std::move(frame_transformer)), + encoder_queue_(encoder_queue) {} + +void ChannelSendFrameTransformerDelegate::Init() { + frame_transformer_->RegisterTransformedFrameCallback( + rtc::scoped_refptr(this)); +} + +void ChannelSendFrameTransformerDelegate::Reset() { + frame_transformer_->UnregisterTransformedFrameCallback(); + frame_transformer_ = nullptr; + + MutexLock lock(&send_lock_); + send_frame_callback_ = SendFrameCallback(); +} + +void ChannelSendFrameTransformerDelegate::Transform( + AudioFrameType frame_type, + uint8_t payload_type, + uint32_t rtp_timestamp, + uint32_t rtp_start_timestamp, + const uint8_t* payload_data, + size_t payload_size, + int64_t absolute_capture_timestamp_ms, + uint32_t ssrc) { + frame_transformer_->Transform(std::make_unique( + frame_type, payload_type, rtp_timestamp, rtp_start_timestamp, + payload_data, payload_size, absolute_capture_timestamp_ms, ssrc)); +} + +void ChannelSendFrameTransformerDelegate::OnTransformedFrame( + std::unique_ptr frame) { + MutexLock lock(&send_lock_); + if (!send_frame_callback_) + return; + rtc::scoped_refptr delegate = this; + encoder_queue_->PostTask( + [delegate = std::move(delegate), frame = std::move(frame)]() mutable { + delegate->SendFrame(std::move(frame)); + }); +} + +void ChannelSendFrameTransformerDelegate::SendFrame( + std::unique_ptr frame) const { + MutexLock lock(&send_lock_); + RTC_DCHECK_RUN_ON(encoder_queue_); + if (!send_frame_callback_) + return; + auto* transformed_frame = static_cast(frame.get()); + send_frame_callback_(transformed_frame->GetFrameType(), + transformed_frame->GetPayloadType(), + transformed_frame->GetTimestamp() - + transformed_frame->GetStartTimestamp(), + transformed_frame->GetData(), + transformed_frame->GetAbsoluteCaptureTimestampMs()); +} + +} // namespace webrtc diff --git a/audio/channel_send_frame_transformer_delegate.h b/audio/channel_send_frame_transformer_delegate.h new file mode 100644 index 0000000000..531d1bc110 --- /dev/null +++ b/audio/channel_send_frame_transformer_delegate.h @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_CHANNEL_SEND_FRAME_TRANSFORMER_DELEGATE_H_ +#define AUDIO_CHANNEL_SEND_FRAME_TRANSFORMER_DELEGATE_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" + +namespace webrtc { + +// Delegates calls to FrameTransformerInterface to transform frames, and to +// ChannelSend to send the transformed frames using |send_frame_callback_| on +// the |encoder_queue_|. +// OnTransformedFrame() can be called from any thread, the delegate ensures +// thread-safe access to the ChannelSend callback. +class ChannelSendFrameTransformerDelegate : public TransformedFrameCallback { + public: + using SendFrameCallback = + std::function payload, + int64_t absolute_capture_timestamp_ms)>; + ChannelSendFrameTransformerDelegate( + SendFrameCallback send_frame_callback, + rtc::scoped_refptr frame_transformer, + rtc::TaskQueue* encoder_queue); + + // Registers |this| as callback for |frame_transformer_|, to get the + // transformed frames. + void Init(); + + // Unregisters and releases the |frame_transformer_| reference, and resets + // |send_frame_callback_| under lock. Called from ChannelSend destructor to + // prevent running the callback on a dangling channel. + void Reset(); + + // Delegates the call to FrameTransformerInterface::TransformFrame, to + // transform the frame asynchronously. + void Transform(AudioFrameType frame_type, + uint8_t payload_type, + uint32_t rtp_timestamp, + uint32_t rtp_start_timestamp, + const uint8_t* payload_data, + size_t payload_size, + int64_t absolute_capture_timestamp_ms, + uint32_t ssrc); + + // Implements TransformedFrameCallback. Can be called on any thread. + void OnTransformedFrame( + std::unique_ptr frame) override; + + // Delegates the call to ChannelSend::SendRtpAudio on the |encoder_queue_|, + // by calling |send_audio_callback_|. + void SendFrame(std::unique_ptr frame) const; + + protected: + ~ChannelSendFrameTransformerDelegate() override = default; + + private: + mutable Mutex send_lock_; + SendFrameCallback send_frame_callback_ RTC_GUARDED_BY(send_lock_); + rtc::scoped_refptr frame_transformer_; + rtc::TaskQueue* encoder_queue_ RTC_GUARDED_BY(send_lock_); +}; +} // namespace webrtc +#endif // AUDIO_CHANNEL_SEND_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/audio/channel_send_frame_transformer_delegate_unittest.cc b/audio/channel_send_frame_transformer_delegate_unittest.cc new file mode 100644 index 0000000000..e2f3647c0a --- /dev/null +++ b/audio/channel_send_frame_transformer_delegate_unittest.cc @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/channel_send_frame_transformer_delegate.h" + +#include +#include + +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_queue_for_test.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_frame_transformer.h" +#include "test/mock_transformable_frame.h" + +namespace webrtc { +namespace { + +using ::testing::NiceMock; +using ::testing::SaveArg; + +class MockChannelSend { + public: + MockChannelSend() = default; + ~MockChannelSend() = default; + + MOCK_METHOD(int32_t, + SendFrame, + (AudioFrameType frameType, + uint8_t payloadType, + uint32_t rtp_timestamp, + rtc::ArrayView payload, + int64_t absolute_capture_timestamp_ms)); + + ChannelSendFrameTransformerDelegate::SendFrameCallback callback() { + return [this](AudioFrameType frameType, uint8_t payloadType, + uint32_t rtp_timestamp, rtc::ArrayView payload, + int64_t absolute_capture_timestamp_ms) { + return SendFrame(frameType, payloadType, rtp_timestamp, payload, + absolute_capture_timestamp_ms); + }; + } +}; + +// Test that the delegate registers itself with the frame transformer on Init(). +TEST(ChannelSendFrameTransformerDelegateTest, + RegisterTransformedFrameCallbackOnInit) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject(); + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + ChannelSendFrameTransformerDelegate::SendFrameCallback(), + mock_frame_transformer, nullptr); + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback); + delegate->Init(); +} + +// Test that the delegate unregisters itself from the frame transformer on +// Reset(). +TEST(ChannelSendFrameTransformerDelegateTest, + UnregisterTransformedFrameCallbackOnReset) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject(); + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + ChannelSendFrameTransformerDelegate::SendFrameCallback(), + mock_frame_transformer, nullptr); + EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); + delegate->Reset(); +} + +// Test that when the delegate receives a transformed frame from the frame +// transformer, it passes it to the channel using the SendFrameCallback. +TEST(ChannelSendFrameTransformerDelegateTest, + TransformRunsChannelSendCallback) { + TaskQueueForTest channel_queue("channel_queue"); + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + MockChannelSend mock_channel; + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + mock_channel.callback(), mock_frame_transformer, &channel_queue); + rtc::scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + delegate->Init(); + ASSERT_TRUE(callback); + + const uint8_t data[] = {1, 2, 3, 4}; + EXPECT_CALL(mock_channel, SendFrame); + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&callback](std::unique_ptr frame) { + callback->OnTransformedFrame(std::move(frame)); + }); + delegate->Transform(AudioFrameType::kEmptyFrame, 0, 0, 0, data, sizeof(data), + 0, 0); + channel_queue.WaitForPreviouslyPostedTasks(); +} + +// Test that if the delegate receives a transformed frame after it has been +// reset, it does not run the SendFrameCallback, as the channel is destroyed +// after resetting the delegate. +TEST(ChannelSendFrameTransformerDelegateTest, + OnTransformedDoesNotRunChannelSendCallbackAfterReset) { + TaskQueueForTest channel_queue("channel_queue"); + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + MockChannelSend mock_channel; + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + mock_channel.callback(), mock_frame_transformer, &channel_queue); + + delegate->Reset(); + EXPECT_CALL(mock_channel, SendFrame).Times(0); + delegate->OnTransformedFrame(std::make_unique()); + channel_queue.WaitForPreviouslyPostedTasks(); +} + +} // namespace +} // namespace webrtc diff --git a/audio/mock_voe_channel_proxy.h b/audio/mock_voe_channel_proxy.h index e4c60a1a4b..52e5b2fc83 100644 --- a/audio/mock_voe_channel_proxy.h +++ b/audio/mock_voe_channel_proxy.h @@ -28,96 +28,147 @@ namespace test { class MockChannelReceive : public voe::ChannelReceiveInterface { public: - MOCK_METHOD2(SetNACKStatus, void(bool enable, int max_packets)); - MOCK_METHOD1(RegisterReceiverCongestionControlObjects, - void(PacketRouter* packet_router)); - MOCK_METHOD0(ResetReceiverCongestionControlObjects, void()); - MOCK_CONST_METHOD0(GetRTCPStatistics, CallReceiveStatistics()); - MOCK_CONST_METHOD0(GetNetworkStatistics, NetworkStatistics()); - MOCK_CONST_METHOD0(GetDecodingCallStatistics, AudioDecodingCallStats()); - MOCK_CONST_METHOD0(GetSpeechOutputLevelFullRange, int()); - MOCK_CONST_METHOD0(GetTotalOutputEnergy, double()); - MOCK_CONST_METHOD0(GetTotalOutputDuration, double()); - MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t()); - MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink)); - MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived& packet)); - MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length)); - MOCK_METHOD1(SetChannelOutputVolumeScaling, void(float scaling)); - MOCK_METHOD2(GetAudioFrameWithInfo, - AudioMixer::Source::AudioFrameInfo(int sample_rate_hz, - AudioFrame* audio_frame)); - MOCK_CONST_METHOD0(PreferredSampleRate, int()); - MOCK_METHOD1(SetAssociatedSendChannel, - void(const voe::ChannelSendInterface* send_channel)); - MOCK_CONST_METHOD2(GetPlayoutRtpTimestamp, - bool(uint32_t* rtp_timestamp, int64_t* time_ms)); - MOCK_METHOD2(SetEstimatedPlayoutNtpTimestampMs, - void(int64_t ntp_timestamp_ms, int64_t time_ms)); - MOCK_CONST_METHOD1(GetCurrentEstimatedPlayoutNtpTimestampMs, - absl::optional(int64_t now_ms)); - MOCK_CONST_METHOD0(GetSyncInfo, absl::optional()); - MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms)); - MOCK_METHOD1(SetBaseMinimumPlayoutDelayMs, bool(int delay_ms)); - MOCK_CONST_METHOD0(GetBaseMinimumPlayoutDelayMs, int()); - MOCK_CONST_METHOD0(GetReceiveCodec, - absl::optional>()); - MOCK_METHOD1(SetReceiveCodecs, - void(const std::map& codecs)); - MOCK_CONST_METHOD0(GetSources, std::vector()); - MOCK_METHOD0(StartPlayout, void()); - MOCK_METHOD0(StopPlayout, void()); + MOCK_METHOD(void, SetNACKStatus, (bool enable, int max_packets), (override)); + MOCK_METHOD(void, + RegisterReceiverCongestionControlObjects, + (PacketRouter*), + (override)); + MOCK_METHOD(void, ResetReceiverCongestionControlObjects, (), (override)); + MOCK_METHOD(CallReceiveStatistics, GetRTCPStatistics, (), (const, override)); + MOCK_METHOD(NetworkStatistics, + GetNetworkStatistics, + (bool), + (const, override)); + MOCK_METHOD(AudioDecodingCallStats, + GetDecodingCallStatistics, + (), + (const, override)); + MOCK_METHOD(int, GetSpeechOutputLevelFullRange, (), (const, override)); + MOCK_METHOD(double, GetTotalOutputEnergy, (), (const, override)); + MOCK_METHOD(double, GetTotalOutputDuration, (), (const, override)); + MOCK_METHOD(uint32_t, GetDelayEstimate, (), (const, override)); + MOCK_METHOD(void, SetSink, (AudioSinkInterface*), (override)); + MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived& packet), (override)); + MOCK_METHOD(void, + ReceivedRTCPPacket, + (const uint8_t*, size_t length), + (override)); + MOCK_METHOD(void, SetChannelOutputVolumeScaling, (float scaling), (override)); + MOCK_METHOD(AudioMixer::Source::AudioFrameInfo, + GetAudioFrameWithInfo, + (int sample_rate_hz, AudioFrame*), + (override)); + MOCK_METHOD(int, PreferredSampleRate, (), (const, override)); + MOCK_METHOD(void, + SetAssociatedSendChannel, + (const voe::ChannelSendInterface*), + (override)); + MOCK_METHOD(bool, + GetPlayoutRtpTimestamp, + (uint32_t*, int64_t*), + (const, override)); + MOCK_METHOD(void, + SetEstimatedPlayoutNtpTimestampMs, + (int64_t ntp_timestamp_ms, int64_t time_ms), + (override)); + MOCK_METHOD(absl::optional, + GetCurrentEstimatedPlayoutNtpTimestampMs, + (int64_t now_ms), + (const, override)); + MOCK_METHOD(absl::optional, + GetSyncInfo, + (), + (const, override)); + MOCK_METHOD(bool, SetMinimumPlayoutDelay, (int delay_ms), (override)); + MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int delay_ms), (override)); + MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const, override)); + MOCK_METHOD((absl::optional>), + GetReceiveCodec, + (), + (const, override)); + MOCK_METHOD(void, + SetReceiveCodecs, + ((const std::map& codecs)), + (override)); + MOCK_METHOD(void, StartPlayout, (), (override)); + MOCK_METHOD(void, StopPlayout, (), (override)); + MOCK_METHOD( + void, + SetDepacketizerToDecoderFrameTransformer, + (rtc::scoped_refptr frame_transformer), + (override)); }; class MockChannelSend : public voe::ChannelSendInterface { public: - // GMock doesn't like move-only types, like std::unique_ptr. - virtual void SetEncoder(int payload_type, - std::unique_ptr encoder) { - return SetEncoderForMock(payload_type, &encoder); - } - MOCK_METHOD2(SetEncoderForMock, - void(int payload_type, std::unique_ptr* encoder)); - MOCK_METHOD1( + MOCK_METHOD(void, + SetEncoder, + (int payload_type, std::unique_ptr encoder), + (override)); + MOCK_METHOD( + void, ModifyEncoder, - void(rtc::FunctionView*)> modifier)); - MOCK_METHOD1(CallEncoder, - void(rtc::FunctionView modifier)); - MOCK_METHOD1(SetRTCP_CNAME, void(absl::string_view c_name)); - MOCK_METHOD2(SetSendAudioLevelIndicationStatus, void(bool enable, int id)); - MOCK_METHOD2(RegisterSenderCongestionControlObjects, - void(RtpTransportControllerSendInterface* transport, - RtcpBandwidthObserver* bandwidth_observer)); - MOCK_METHOD0(ResetSenderCongestionControlObjects, void()); - MOCK_CONST_METHOD0(GetRTCPStatistics, CallSendStatistics()); - MOCK_CONST_METHOD0(GetRemoteRTCPReportBlocks, std::vector()); - MOCK_CONST_METHOD0(GetANAStatistics, ANAStats()); - MOCK_METHOD2(RegisterCngPayloadType, - void(int payload_type, int payload_frequency)); - MOCK_METHOD2(SetSendTelephoneEventPayloadType, - void(int payload_type, int payload_frequency)); - MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms)); - MOCK_METHOD1(OnBitrateAllocation, void(BitrateAllocationUpdate update)); - MOCK_METHOD1(SetInputMute, void(bool muted)); - MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length)); - // GMock doesn't like move-only types, like std::unique_ptr. - virtual void ProcessAndEncodeAudio(std::unique_ptr audio_frame) { - ProcessAndEncodeAudioForMock(&audio_frame); - } - MOCK_METHOD1(ProcessAndEncodeAudioForMock, - void(std::unique_ptr* audio_frame)); - MOCK_METHOD1(SetTransportOverhead, - void(size_t transport_overhead_per_packet)); - MOCK_CONST_METHOD0(GetRtpRtcp, RtpRtcp*()); - MOCK_CONST_METHOD0(GetBitrate, int()); - MOCK_METHOD1(OnTwccBasedUplinkPacketLossRate, void(float packet_loss_rate)); - MOCK_METHOD1(OnRecoverableUplinkPacketLossRate, - void(float recoverable_packet_loss_rate)); - MOCK_CONST_METHOD0(GetRTT, int64_t()); - MOCK_METHOD0(StartSend, void()); - MOCK_METHOD0(StopSend, void()); - MOCK_METHOD1( - SetFrameEncryptor, - void(rtc::scoped_refptr frame_encryptor)); + (rtc::FunctionView*)> modifier), + (override)); + MOCK_METHOD(void, + CallEncoder, + (rtc::FunctionView modifier), + (override)); + MOCK_METHOD(void, SetRTCP_CNAME, (absl::string_view c_name), (override)); + MOCK_METHOD(void, + SetSendAudioLevelIndicationStatus, + (bool enable, int id), + (override)); + MOCK_METHOD(void, + RegisterSenderCongestionControlObjects, + (RtpTransportControllerSendInterface*, RtcpBandwidthObserver*), + (override)); + MOCK_METHOD(void, ResetSenderCongestionControlObjects, (), (override)); + MOCK_METHOD(CallSendStatistics, GetRTCPStatistics, (), (const, override)); + MOCK_METHOD(std::vector, + GetRemoteRTCPReportBlocks, + (), + (const, override)); + MOCK_METHOD(ANAStats, GetANAStatistics, (), (const, override)); + MOCK_METHOD(void, + RegisterCngPayloadType, + (int payload_type, int payload_frequency), + (override)); + MOCK_METHOD(void, + SetSendTelephoneEventPayloadType, + (int payload_type, int payload_frequency), + (override)); + MOCK_METHOD(bool, + SendTelephoneEventOutband, + (int event, int duration_ms), + (override)); + MOCK_METHOD(void, + OnBitrateAllocation, + (BitrateAllocationUpdate update), + (override)); + MOCK_METHOD(void, SetInputMute, (bool muted), (override)); + MOCK_METHOD(void, + ReceivedRTCPPacket, + (const uint8_t*, size_t length), + (override)); + MOCK_METHOD(void, + ProcessAndEncodeAudio, + (std::unique_ptr), + (override)); + MOCK_METHOD(RtpRtcpInterface*, GetRtpRtcp, (), (const, override)); + MOCK_METHOD(int, GetBitrate, (), (const, override)); + MOCK_METHOD(int64_t, GetRTT, (), (const, override)); + MOCK_METHOD(void, StartSend, (), (override)); + MOCK_METHOD(void, StopSend, (), (override)); + MOCK_METHOD(void, + SetFrameEncryptor, + (rtc::scoped_refptr frame_encryptor), + (override)); + MOCK_METHOD( + void, + SetEncoderToPacketizerFrameTransformer, + (rtc::scoped_refptr frame_transformer), + (override)); }; } // namespace test } // namespace webrtc diff --git a/audio/test/audio_stats_test.cc b/audio/test/audio_stats_test.cc index c91183c66b..ea3327056b 100644 --- a/audio/test/audio_stats_test.cc +++ b/audio/test/audio_stats_test.cc @@ -65,7 +65,8 @@ class NoLossTest : public AudioEndToEndTest { EXPECT_FALSE(send_stats.apm_statistics.residual_echo_likelihood_recent_max); EXPECT_EQ(false, send_stats.typing_noise_detected); - AudioReceiveStream::Stats recv_stats = receive_stream()->GetStats(); + AudioReceiveStream::Stats recv_stats = + receive_stream()->GetStats(/*get_and_clear_legacy_stats=*/true); EXPECT_PRED2(IsNear, kBytesSent, recv_stats.payload_bytes_rcvd); EXPECT_PRED2(IsNear, kPacketsSent, recv_stats.packets_rcvd); EXPECT_EQ(0u, recv_stats.packets_lost); diff --git a/audio/test/low_bandwidth_audio_test.cc b/audio/test/low_bandwidth_audio_test.cc index 049b5e5150..50cf499920 100644 --- a/audio/test/low_bandwidth_audio_test.cc +++ b/audio/test/low_bandwidth_audio_test.cc @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "absl/flags/declare.h" #include "absl/flags/flag.h" #include "api/test/simulated_network.h" #include "audio/test/audio_end_to_end_test.h" diff --git a/audio/test/low_bandwidth_audio_test.py b/audio/test/low_bandwidth_audio_test.py index cc6a70eeaf..9aaf30f364 100755 --- a/audio/test/low_bandwidth_audio_test.py +++ b/audio/test/low_bandwidth_audio_test.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """ This script is the wrapper that runs the low-bandwidth audio test. @@ -16,7 +15,6 @@ import argparse import collections -import json import logging import os import re @@ -24,289 +22,352 @@ import subprocess import sys - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir)) NO_TOOLS_ERROR_MESSAGE = ( - 'Could not find PESQ or POLQA at %s.\n' - '\n' - 'To fix this run:\n' - ' python %s %s\n' - '\n' - 'Note that these tools are Google-internal due to licensing, so in order to ' - 'use them you will have to get your own license and manually put them in the ' - 'right location.\n' - 'See https://cs.chromium.org/chromium/src/third_party/webrtc/tools_webrtc/' - 'download_tools.py?rcl=bbceb76f540159e2dba0701ac03c514f01624130&l=13') + 'Could not find PESQ or POLQA at %s.\n' + '\n' + 'To fix this run:\n' + ' python %s %s\n' + '\n' + 'Note that these tools are Google-internal due to licensing, so in order to ' + 'use them you will have to get your own license and manually put them in the ' + 'right location.\n' + 'See https://cs.chromium.org/chromium/src/third_party/webrtc/tools_webrtc/' + 'download_tools.py?rcl=bbceb76f540159e2dba0701ac03c514f01624130&l=13') def _LogCommand(command): - logging.info('Running %r', command) - return command + logging.info('Running %r', command) + return command def _ParseArgs(): - parser = argparse.ArgumentParser(description='Run low-bandwidth audio tests.') - parser.add_argument('build_dir', - help='Path to the build directory (e.g. out/Release).') - parser.add_argument('--remove', action='store_true', - help='Remove output audio files after testing.') - parser.add_argument('--android', action='store_true', - help='Perform the test on a connected Android device instead.') - parser.add_argument('--adb-path', help='Path to adb binary.', default='adb') - parser.add_argument('--num-retries', default='0', - help='Number of times to retry the test on Android.') - parser.add_argument('--isolated-script-test-perf-output', default=None, - help='Path to store perf results in chartjson format.') - parser.add_argument('--isolated-script-test-output', default=None, - help='Path to output an empty JSON file which Chromium infra requires.') - parser.add_argument('--extra-test-args', default=[], action='append', - help='Extra args to path to the test binary.') - - # Ignore Chromium-specific flags - parser.add_argument('--test-launcher-summary-output', - type=str, default=None) - args = parser.parse_args() - - return args + parser = argparse.ArgumentParser( + description='Run low-bandwidth audio tests.') + parser.add_argument('build_dir', + help='Path to the build directory (e.g. out/Release).') + parser.add_argument('--remove', + action='store_true', + help='Remove output audio files after testing.') + parser.add_argument( + '--android', + action='store_true', + help='Perform the test on a connected Android device instead.') + parser.add_argument('--adb-path', + help='Path to adb binary.', + default='adb') + parser.add_argument('--num-retries', + default='0', + help='Number of times to retry the test on Android.') + parser.add_argument( + '--isolated-script-test-perf-output', + default=None, + help='Path to store perf results in histogram proto format.') + parser.add_argument('--extra-test-args', + default=[], + action='append', + help='Extra args to path to the test binary.') + + # Ignore Chromium-specific flags + parser.add_argument('--test-launcher-summary-output', + type=str, + default=None) + args = parser.parse_args() + + return args def _GetPlatform(): - if sys.platform == 'win32': - return 'win' - elif sys.platform == 'darwin': - return 'mac' - elif sys.platform.startswith('linux'): - return 'linux' + if sys.platform == 'win32': + return 'win' + elif sys.platform == 'darwin': + return 'mac' + elif sys.platform.startswith('linux'): + return 'linux' def _GetExtension(): - return '.exe' if sys.platform == 'win32' else '' + return '.exe' if sys.platform == 'win32' else '' def _GetPathToTools(): - tools_dir = os.path.join(SRC_DIR, 'tools_webrtc') - toolchain_dir = os.path.join(tools_dir, 'audio_quality') + tools_dir = os.path.join(SRC_DIR, 'tools_webrtc') + toolchain_dir = os.path.join(tools_dir, 'audio_quality') - platform = _GetPlatform() - ext = _GetExtension() + platform = _GetPlatform() + ext = _GetExtension() - pesq_path = os.path.join(toolchain_dir, platform, 'pesq' + ext) - if not os.path.isfile(pesq_path): - pesq_path = None + pesq_path = os.path.join(toolchain_dir, platform, 'pesq' + ext) + if not os.path.isfile(pesq_path): + pesq_path = None - polqa_path = os.path.join(toolchain_dir, platform, 'PolqaOem64' + ext) - if not os.path.isfile(polqa_path): - polqa_path = None + polqa_path = os.path.join(toolchain_dir, platform, 'PolqaOem64' + ext) + if not os.path.isfile(polqa_path): + polqa_path = None - if (platform != 'mac' and not polqa_path) or not pesq_path: - logging.error(NO_TOOLS_ERROR_MESSAGE, - toolchain_dir, - os.path.join(tools_dir, 'download_tools.py'), - toolchain_dir) + if (platform != 'mac' and not polqa_path) or not pesq_path: + logging.error(NO_TOOLS_ERROR_MESSAGE, toolchain_dir, + os.path.join(tools_dir, 'download_tools.py'), + toolchain_dir) - return pesq_path, polqa_path + return pesq_path, polqa_path def ExtractTestRuns(lines, echo=False): - """Extracts information about tests from the output of a test runner. + """Extracts information about tests from the output of a test runner. Produces tuples (android_device, test_name, reference_file, degraded_file, cur_perf_results). """ - for line in lines: - if echo: - sys.stdout.write(line) - - # Output from Android has a prefix with the device name. - android_prefix_re = r'(?:I\b.+\brun_tests_on_device\((.+?)\)\s*)?' - test_re = r'^' + android_prefix_re + (r'TEST (\w+) ([^ ]+?) ([^\s]+)' - r' ?([^\s]+)?\s*$') - - match = re.search(test_re, line) - if match: - yield match.groups() - - -def _GetFile(file_path, out_dir, move=False, - android=False, adb_prefix=('adb',)): - out_file_name = os.path.basename(file_path) - out_file_path = os.path.join(out_dir, out_file_name) - - if android: - # Pull the file from the connected Android device. - adb_command = adb_prefix + ('pull', file_path, out_dir) - subprocess.check_call(_LogCommand(adb_command)) - if move: - # Remove that file. - adb_command = adb_prefix + ('shell', 'rm', file_path) - subprocess.check_call(_LogCommand(adb_command)) - elif os.path.abspath(file_path) != os.path.abspath(out_file_path): - if move: - shutil.move(file_path, out_file_path) - else: - shutil.copy(file_path, out_file_path) - - return out_file_path - - -def _RunPesq(executable_path, reference_file, degraded_file, + for line in lines: + if echo: + sys.stdout.write(line) + + # Output from Android has a prefix with the device name. + android_prefix_re = r'(?:I\b.+\brun_tests_on_device\((.+?)\)\s*)?' + test_re = r'^' + android_prefix_re + (r'TEST (\w+) ([^ ]+?) ([^\s]+)' + r' ?([^\s]+)?\s*$') + + match = re.search(test_re, line) + if match: + yield match.groups() + + +def _GetFile(file_path, + out_dir, + move=False, + android=False, + adb_prefix=('adb', )): + out_file_name = os.path.basename(file_path) + out_file_path = os.path.join(out_dir, out_file_name) + + if android: + # Pull the file from the connected Android device. + adb_command = adb_prefix + ('pull', file_path, out_dir) + subprocess.check_call(_LogCommand(adb_command)) + if move: + # Remove that file. + adb_command = adb_prefix + ('shell', 'rm', file_path) + subprocess.check_call(_LogCommand(adb_command)) + elif os.path.abspath(file_path) != os.path.abspath(out_file_path): + if move: + shutil.move(file_path, out_file_path) + else: + shutil.copy(file_path, out_file_path) + + return out_file_path + + +def _RunPesq(executable_path, + reference_file, + degraded_file, sample_rate_hz=16000): - directory = os.path.dirname(reference_file) - assert os.path.dirname(degraded_file) == directory - - # Analyze audio. - command = [executable_path, '+%d' % sample_rate_hz, - os.path.basename(reference_file), - os.path.basename(degraded_file)] - # Need to provide paths in the current directory due to a bug in PESQ: - # On Mac, for some 'path/to/file.wav', if 'file.wav' is longer than - # 'path/to', PESQ crashes. - out = subprocess.check_output(_LogCommand(command), - cwd=directory, stderr=subprocess.STDOUT) - - # Find the scores in stdout of PESQ. - match = re.search( - r'Prediction \(Raw MOS, MOS-LQO\):\s+=\s+([\d.]+)\s+([\d.]+)', out) - if match: - raw_mos, _ = match.groups() - - return {'pesq_mos': (raw_mos, 'score')} - else: - logging.error('PESQ: %s', out.splitlines()[-1]) - return {} - + directory = os.path.dirname(reference_file) + assert os.path.dirname(degraded_file) == directory + + # Analyze audio. + command = [ + executable_path, + '+%d' % sample_rate_hz, + os.path.basename(reference_file), + os.path.basename(degraded_file) + ] + # Need to provide paths in the current directory due to a bug in PESQ: + # On Mac, for some 'path/to/file.wav', if 'file.wav' is longer than + # 'path/to', PESQ crashes. + out = subprocess.check_output(_LogCommand(command), + cwd=directory, + stderr=subprocess.STDOUT) + + # Find the scores in stdout of PESQ. + match = re.search( + r'Prediction \(Raw MOS, MOS-LQO\):\s+=\s+([\d.]+)\s+([\d.]+)', out) + if match: + raw_mos, _ = match.groups() -def _RunPolqa(executable_path, reference_file, degraded_file): - # Analyze audio. - command = [executable_path, '-q', '-LC', 'NB', - '-Ref', reference_file, '-Test', degraded_file] - process = subprocess.Popen(_LogCommand(command), - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = process.communicate() - - # Find the scores in stdout of POLQA. - match = re.search(r'\bMOS-LQO:\s+([\d.]+)', out) - - if process.returncode != 0 or not match: - if process.returncode == 2: - logging.warning('%s (2)', err.strip()) - logging.warning('POLQA license error, skipping test.') + return {'pesq_mos': (raw_mos, 'unitless')} else: - logging.error('%s (%d)', err.strip(), process.returncode) - return {} - - mos_lqo, = match.groups() - return {'polqa_mos_lqo': (mos_lqo, 'score')} - - -def _AddChart(charts, metric, test_name, value, units): - chart = charts.setdefault(metric, {}) - chart[test_name] = { - "type": "scalar", - "value": value, - "units": units, - } - + logging.error('PESQ: %s', out.splitlines()[-1]) + return {} -def _AddRunPerfResults(charts, run_perf_results_file): - with open(run_perf_results_file, 'rb') as f: - per_run_perf_results = json.load(f) - if 'charts' not in per_run_perf_results: - return - for metric, cases in per_run_perf_results['charts'].items(): - chart = charts.setdefault(metric, {}) - for case_name, case_value in cases.items(): - if case_name in chart: - logging.error('Overriding results for %s/%s', metric, case_name) - chart[case_name] = case_value - -Analyzer = collections.namedtuple('Analyzer', ['name', 'func', 'executable', - 'sample_rate_hz']) +def _RunPolqa(executable_path, reference_file, degraded_file): + # Analyze audio. + command = [ + executable_path, '-q', '-LC', 'NB', '-Ref', reference_file, '-Test', + degraded_file + ] + process = subprocess.Popen(_LogCommand(command), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + out, err = process.communicate() + + # Find the scores in stdout of POLQA. + match = re.search(r'\bMOS-LQO:\s+([\d.]+)', out) + + if process.returncode != 0 or not match: + if process.returncode == 2: + logging.warning('%s (2)', err.strip()) + logging.warning('POLQA license error, skipping test.') + else: + logging.error('%s (%d)', err.strip(), process.returncode) + return {} + + mos_lqo, = match.groups() + return {'polqa_mos_lqo': (mos_lqo, 'unitless')} + + +def _MergeInPerfResultsFromCcTests(histograms, run_perf_results_file): + from tracing.value import histogram_set + + cc_histograms = histogram_set.HistogramSet() + with open(run_perf_results_file, 'rb') as f: + contents = f.read() + if not contents: + return + + cc_histograms.ImportProto(contents) + + histograms.Merge(cc_histograms) + + +Analyzer = collections.namedtuple( + 'Analyzer', ['name', 'func', 'executable', 'sample_rate_hz']) + + +def _ConfigurePythonPath(args): + script_dir = os.path.dirname(os.path.realpath(__file__)) + checkout_root = os.path.abspath( + os.path.join(script_dir, os.pardir, os.pardir)) + + # TODO(https://crbug.com/1029452): Use a copy rule and add these from the out + # dir like for the third_party/protobuf code. + sys.path.insert( + 0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing')) + + # The low_bandwidth_audio_perf_test gn rule will build the protobuf stub for + # python, so put it in the path for this script before we attempt to import + # it. + histogram_proto_path = os.path.join(os.path.abspath(args.build_dir), + 'pyproto', 'tracing', 'tracing', + 'proto') + sys.path.insert(0, histogram_proto_path) + proto_stub_path = os.path.join(os.path.abspath(args.build_dir), 'pyproto') + sys.path.insert(0, proto_stub_path) + + # Fail early in case the proto hasn't been built. + try: + import histogram_pb2 + except ImportError as e: + logging.exception(e) + raise ImportError( + 'Could not import histogram_pb2. You need to build the ' + 'low_bandwidth_audio_perf_test target before invoking ' + 'this script. Expected to find ' + 'histogram_pb2.py in %s.' % histogram_proto_path) def main(): - # pylint: disable=W0101 - logging.basicConfig(level=logging.INFO) - - args = _ParseArgs() - - pesq_path, polqa_path = _GetPathToTools() - if pesq_path is None: - return 1 - - out_dir = os.path.join(args.build_dir, '..') - if args.android: - test_command = [os.path.join(args.build_dir, 'bin', - 'run_low_bandwidth_audio_test'), - '-v', '--num-retries', args.num_retries] - else: - test_command = [os.path.join(args.build_dir, 'low_bandwidth_audio_test')] - - analyzers = [Analyzer('pesq', _RunPesq, pesq_path, 16000)] - # Check if POLQA can run at all, or skip the 48 kHz tests entirely. - example_path = os.path.join(SRC_DIR, 'resources', - 'voice_engine', 'audio_tiny48.wav') - if polqa_path and _RunPolqa(polqa_path, example_path, example_path): - analyzers.append(Analyzer('polqa', _RunPolqa, polqa_path, 48000)) - - charts = {} - - for analyzer in analyzers: - # Start the test executable that produces audio files. - test_process = subprocess.Popen( - _LogCommand(test_command + [ + # pylint: disable=W0101 + logging.basicConfig(level=logging.INFO) + logging.info('Invoked with %s', str(sys.argv)) + + args = _ParseArgs() + + _ConfigurePythonPath(args) + + # Import catapult modules here after configuring the pythonpath. + from tracing.value import histogram_set + from tracing.value.diagnostics import reserved_infos + from tracing.value.diagnostics import generic_set + + pesq_path, polqa_path = _GetPathToTools() + if pesq_path is None: + return 1 + + out_dir = os.path.join(args.build_dir, '..') + if args.android: + test_command = [ + os.path.join(args.build_dir, 'bin', + 'run_low_bandwidth_audio_test'), '-v', + '--num-retries', args.num_retries + ] + else: + test_command = [ + os.path.join(args.build_dir, 'low_bandwidth_audio_test') + ] + + analyzers = [Analyzer('pesq', _RunPesq, pesq_path, 16000)] + # Check if POLQA can run at all, or skip the 48 kHz tests entirely. + example_path = os.path.join(SRC_DIR, 'resources', 'voice_engine', + 'audio_tiny48.wav') + if polqa_path and _RunPolqa(polqa_path, example_path, example_path): + analyzers.append(Analyzer('polqa', _RunPolqa, polqa_path, 48000)) + + histograms = histogram_set.HistogramSet() + for analyzer in analyzers: + # Start the test executable that produces audio files. + test_process = subprocess.Popen(_LogCommand(test_command + [ '--sample_rate_hz=%d' % analyzer.sample_rate_hz, - '--test_case_prefix=%s' % analyzer.name - ] + args.extra_test_args), - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - perf_results_file = None - try: - lines = iter(test_process.stdout.readline, '') - for result in ExtractTestRuns(lines, echo=True): - (android_device, test_name, reference_file, degraded_file, - perf_results_file) = result - - adb_prefix = (args.adb_path,) - if android_device: - adb_prefix += ('-s', android_device) - - reference_file = _GetFile(reference_file, out_dir, - android=args.android, adb_prefix=adb_prefix) - degraded_file = _GetFile(degraded_file, out_dir, move=True, - android=args.android, adb_prefix=adb_prefix) - - analyzer_results = analyzer.func(analyzer.executable, - reference_file, degraded_file) - for metric, (value, units) in analyzer_results.items(): - # Output a result for the perf dashboard. - print 'RESULT %s: %s= %s %s' % (metric, test_name, value, units) - _AddChart(charts, metric, test_name, value, units) - - if args.remove: - os.remove(reference_file) - os.remove(degraded_file) - finally: - test_process.terminate() - if perf_results_file: - perf_results_file = _GetFile(perf_results_file, out_dir, move=True, - android=args.android, adb_prefix=adb_prefix) - _AddRunPerfResults(charts, perf_results_file) - if args.remove: - os.remove(perf_results_file) - - if args.isolated_script_test_perf_output: - with open(args.isolated_script_test_perf_output, 'w') as f: - json.dump({"format_version": "1.0", "charts": charts}, f) - - if args.isolated_script_test_output: - with open(args.isolated_script_test_output, 'w') as f: - json.dump({"version": 3}, f) - - return test_process.wait() + '--test_case_prefix=%s' % analyzer.name, + ] + args.extra_test_args), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + perf_results_file = None + try: + lines = iter(test_process.stdout.readline, '') + for result in ExtractTestRuns(lines, echo=True): + (android_device, test_name, reference_file, degraded_file, + perf_results_file) = result + + adb_prefix = (args.adb_path, ) + if android_device: + adb_prefix += ('-s', android_device) + + reference_file = _GetFile(reference_file, + out_dir, + android=args.android, + adb_prefix=adb_prefix) + degraded_file = _GetFile(degraded_file, + out_dir, + move=True, + android=args.android, + adb_prefix=adb_prefix) + + analyzer_results = analyzer.func(analyzer.executable, + reference_file, degraded_file) + for metric, (value, units) in analyzer_results.items(): + hist = histograms.CreateHistogram(metric, units, [value]) + user_story = generic_set.GenericSet([test_name]) + hist.diagnostics[reserved_infos.STORIES.name] = user_story + + # Output human readable results. + print 'RESULT %s: %s= %s %s' % (metric, test_name, value, + units) + + if args.remove: + os.remove(reference_file) + os.remove(degraded_file) + finally: + test_process.terminate() + if perf_results_file: + perf_results_file = _GetFile(perf_results_file, + out_dir, + move=True, + android=args.android, + adb_prefix=adb_prefix) + _MergeInPerfResultsFromCcTests(histograms, perf_results_file) + if args.remove: + os.remove(perf_results_file) + + if args.isolated_script_test_perf_output: + with open(args.isolated_script_test_perf_output, 'wb') as f: + f.write(histograms.AsProto().SerializeToString()) + + return test_process.wait() if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/audio/test/pc_low_bandwidth_audio_test.cc b/audio/test/pc_low_bandwidth_audio_test.cc index e372fabc72..95a32238c5 100644 --- a/audio/test/pc_low_bandwidth_audio_test.cc +++ b/audio/test/pc_low_bandwidth_audio_test.cc @@ -10,12 +10,14 @@ #include +#include "absl/flags/declare.h" #include "absl/flags/flag.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/create_peerconnection_quality_test_fixture.h" #include "api/test/network_emulation_manager.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" +#include "api/test/time_controller.h" #include "call/simulated_network.h" #include "test/gtest.h" #include "test/pc/e2e/network_quality_metrics_reporter.h" @@ -70,12 +72,13 @@ CreateTwoNetworkLinks(NetworkEmulationManager* emulation, std::unique_ptr CreateTestFixture(const std::string& test_case_name, + TimeController& time_controller, std::pair network_links, rtc::FunctionView alice_configurer, rtc::FunctionView bob_configurer) { auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( - test_case_name, /*audio_quality_analyzer=*/nullptr, + test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr); fixture->AddPeer(network_links.first->network_thread(), network_links.first->network_manager(), alice_configurer); @@ -105,12 +108,12 @@ std::string AudioOutputFile() { std::string PerfResultsOutputFile() { return webrtc::test::OutputPath() + "PCLowBandwidth_perf_" + - FileSampleRateSuffix() + ".json"; + FileSampleRateSuffix() + ".pb"; } void LogTestResults() { std::string perf_results_output_file = PerfResultsOutputFile(); - webrtc::test::WritePerfResults(perf_results_output_file); + EXPECT_TRUE(webrtc::test::WritePerfResults(perf_results_output_file)); const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); @@ -127,7 +130,7 @@ TEST(PCLowBandwidthAudioTest, PCGoodNetworkHighBitrate) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - GetMetricTestCaseName(), + GetMetricTestCaseName(), *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { @@ -140,7 +143,7 @@ TEST(PCLowBandwidthAudioTest, PCGoodNetworkHighBitrate) { alice->SetAudioConfig(std::move(audio)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::ms( + fixture->Run(RunParams(TimeDelta::Millis( absl::GetFlag(FLAGS_quick) ? kQuickTestDurationMs : kTestDurationMs))); LogTestResults(); } @@ -154,7 +157,7 @@ TEST(PCLowBandwidthAudioTest, PC40kbpsNetwork) { config.queue_delay_ms = 400; config.loss_percent = 1; auto fixture = CreateTestFixture( - GetMetricTestCaseName(), + GetMetricTestCaseName(), *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { AudioConfig audio; @@ -166,7 +169,7 @@ TEST(PCLowBandwidthAudioTest, PC40kbpsNetwork) { alice->SetAudioConfig(std::move(audio)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::ms( + fixture->Run(RunParams(TimeDelta::Millis( absl::GetFlag(FLAGS_quick) ? kQuickTestDurationMs : kTestDurationMs))); LogTestResults(); } diff --git a/audio/test/unittests/low_bandwidth_audio_test_test.py b/audio/test/unittests/low_bandwidth_audio_test_test.py index 7403663cd4..1b73269528 100755 --- a/audio/test/unittests/low_bandwidth_audio_test_test.py +++ b/audio/test/unittests/low_bandwidth_audio_test_test.py @@ -11,7 +11,6 @@ import unittest import sys - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir) sys.path.append(PARENT_DIR) @@ -19,46 +18,51 @@ class TestExtractTestRuns(unittest.TestCase): - def _TestLog(self, log, *expected): - self.assertEqual( - tuple(low_bandwidth_audio_test.ExtractTestRuns(log.splitlines(True))), - expected) + def _TestLog(self, log, *expected): + self.assertEqual( + tuple( + low_bandwidth_audio_test.ExtractTestRuns( + log.splitlines(True))), expected) - def testLinux(self): - self._TestLog(LINUX_LOG, - (None, 'GoodNetworkHighBitrate', - '/webrtc/src/resources/voice_engine/audio_tiny16.wav', - '/webrtc/src/out/LowBandwidth_GoodNetworkHighBitrate.wav', None), - (None, 'Mobile2GNetwork', - '/webrtc/src/resources/voice_engine/audio_tiny16.wav', - '/webrtc/src/out/LowBandwidth_Mobile2GNetwork.wav', None), - (None, 'PCGoodNetworkHighBitrate', - '/webrtc/src/resources/voice_engine/audio_tiny16.wav', - '/webrtc/src/out/PCLowBandwidth_PCGoodNetworkHighBitrate.wav', - '/webrtc/src/out/PCLowBandwidth_perf_48.json'), - (None, 'PCMobile2GNetwork', - '/webrtc/src/resources/voice_engine/audio_tiny16.wav', - '/webrtc/src/out/PCLowBandwidth_PCMobile2GNetwork.wav', - '/webrtc/src/out/PCLowBandwidth_perf_48.json')) + def testLinux(self): + self._TestLog( + LINUX_LOG, + (None, 'GoodNetworkHighBitrate', + '/webrtc/src/resources/voice_engine/audio_tiny16.wav', + '/webrtc/src/out/LowBandwidth_GoodNetworkHighBitrate.wav', None), + (None, 'Mobile2GNetwork', + '/webrtc/src/resources/voice_engine/audio_tiny16.wav', + '/webrtc/src/out/LowBandwidth_Mobile2GNetwork.wav', None), + (None, 'PCGoodNetworkHighBitrate', + '/webrtc/src/resources/voice_engine/audio_tiny16.wav', + '/webrtc/src/out/PCLowBandwidth_PCGoodNetworkHighBitrate.wav', + '/webrtc/src/out/PCLowBandwidth_perf_48.json'), + (None, 'PCMobile2GNetwork', + '/webrtc/src/resources/voice_engine/audio_tiny16.wav', + '/webrtc/src/out/PCLowBandwidth_PCMobile2GNetwork.wav', + '/webrtc/src/out/PCLowBandwidth_perf_48.json')) - def testAndroid(self): - self._TestLog(ANDROID_LOG, - ('ddfa6149', 'Mobile2GNetwork', - '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', - '/sdcard/chromium_tests_root/LowBandwidth_Mobile2GNetwork.wav', None), - ('TA99205CNO', 'GoodNetworkHighBitrate', - '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', - '/sdcard/chromium_tests_root/LowBandwidth_GoodNetworkHighBitrate.wav', - None), - ('ddfa6149', 'PCMobile2GNetwork', - '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', - '/sdcard/chromium_tests_root/PCLowBandwidth_PCMobile2GNetwork.wav', - '/sdcard/chromium_tests_root/PCLowBandwidth_perf_48.json'), - ('TA99205CNO', 'PCGoodNetworkHighBitrate', - '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', - ('/sdcard/chromium_tests_root/' - 'PCLowBandwidth_PCGoodNetworkHighBitrate.wav'), - '/sdcard/chromium_tests_root/PCLowBandwidth_perf_48.json')) + def testAndroid(self): + self._TestLog(ANDROID_LOG, ( + 'ddfa6149', 'Mobile2GNetwork', + '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', + '/sdcard/chromium_tests_root/LowBandwidth_Mobile2GNetwork.wav', + None + ), ( + 'TA99205CNO', 'GoodNetworkHighBitrate', + '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', + '/sdcard/chromium_tests_root/LowBandwidth_GoodNetworkHighBitrate.wav', + None + ), ( + 'ddfa6149', 'PCMobile2GNetwork', + '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', + '/sdcard/chromium_tests_root/PCLowBandwidth_PCMobile2GNetwork.wav', + '/sdcard/chromium_tests_root/PCLowBandwidth_perf_48.json' + ), ('TA99205CNO', 'PCGoodNetworkHighBitrate', + '/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav', + ('/sdcard/chromium_tests_root/' + 'PCLowBandwidth_PCGoodNetworkHighBitrate.wav'), + '/sdcard/chromium_tests_root/PCLowBandwidth_perf_48.json')) LINUX_LOG = r'''\ @@ -233,6 +237,5 @@ def testAndroid(self): I 16.608s tear_down_device(TA99205CNO) Wrote device cache: /webrtc/src/out/debug-android/device_cache_TA99305CMO.json ''' - if __name__ == "__main__": - unittest.main() + unittest.main() diff --git a/audio/utility/audio_frame_operations_unittest.cc b/audio/utility/audio_frame_operations_unittest.cc index 1d38875add..1a2c16e45f 100644 --- a/audio/utility/audio_frame_operations_unittest.cc +++ b/audio/utility/audio_frame_operations_unittest.cc @@ -27,6 +27,8 @@ class AudioFrameOperationsTest : public ::testing::Test { AudioFrame frame_; }; +class AudioFrameOperationsDeathTest : public AudioFrameOperationsTest {}; + void SetFrameData(int16_t ch1, int16_t ch2, int16_t ch3, @@ -105,7 +107,7 @@ void VerifyFrameDataBounds(const AudioFrame& frame, } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) { +TEST_F(AudioFrameOperationsDeathTest, MonoToStereoFailsWithBadParameters) { EXPECT_DEATH(AudioFrameOperations::UpmixChannels(2, &frame_), ""); frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples; frame_.num_channels_ = 1; @@ -136,7 +138,7 @@ TEST_F(AudioFrameOperationsTest, MonoToStereoMuted) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) { +TEST_F(AudioFrameOperationsDeathTest, StereoToMonoFailsWithBadParameters) { frame_.num_channels_ = 1; EXPECT_DEATH(AudioFrameOperations::DownmixChannels(1, &frame_), ""); } diff --git a/audio/voip/BUILD.gn b/audio/voip/BUILD.gn new file mode 100644 index 0000000000..52f9d07f17 --- /dev/null +++ b/audio/voip/BUILD.gn @@ -0,0 +1,103 @@ +# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved. +# +# Use of this source code is governed by a BSD - style license +# that can be found in the LICENSE file in the root of the source +# tree.An additional intellectual property rights grant can be found +# in the file PATENTS.All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("voip_core") { + sources = [ + "voip_core.cc", + "voip_core.h", + ] + deps = [ + ":audio_channel", + "..:audio", + "../../api:scoped_refptr", + "../../api/audio_codecs:audio_codecs_api", + "../../api/task_queue", + "../../api/voip:voip_api", + "../../modules/audio_device:audio_device_api", + "../../modules/audio_mixer:audio_mixer_impl", + "../../modules/audio_processing:api", + "../../modules/utility:utility", + "../../rtc_base:criticalsection", + "../../rtc_base:logging", + "../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("audio_channel") { + sources = [ + "audio_channel.cc", + "audio_channel.h", + ] + deps = [ + ":audio_egress", + ":audio_ingress", + "../../api:transport_api", + "../../api/audio_codecs:audio_codecs_api", + "../../api/task_queue", + "../../api/voip:voip_api", + "../../modules/audio_device:audio_device_api", + "../../modules/rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/utility", + "../../rtc_base:criticalsection", + "../../rtc_base:logging", + "../../rtc_base:refcount", + "../../rtc_base:rtc_base_approved", + ] +} + +rtc_library("audio_ingress") { + sources = [ + "audio_ingress.cc", + "audio_ingress.h", + ] + deps = [ + "..:audio", + "../../api:array_view", + "../../api:rtp_headers", + "../../api:scoped_refptr", + "../../api:transport_api", + "../../api/audio:audio_mixer_api", + "../../api/audio_codecs:audio_codecs_api", + "../../modules/audio_coding", + "../../modules/rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/utility", + "../../rtc_base:criticalsection", + "../../rtc_base:logging", + "../../rtc_base:safe_minmax", + "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", + "../utility:audio_frame_operations", + ] +} + +rtc_library("audio_egress") { + sources = [ + "audio_egress.cc", + "audio_egress.h", + ] + deps = [ + "..:audio", + "../../api/audio_codecs:audio_codecs_api", + "../../api/task_queue", + "../../call:audio_sender_interface", + "../../modules/audio_coding", + "../../modules/rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../rtc_base:logging", + "../../rtc_base:rtc_task_queue", + "../../rtc_base:thread_checker", + "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", + "../utility:audio_frame_operations", + ] +} diff --git a/audio/voip/audio_channel.cc b/audio/voip/audio_channel.cc new file mode 100644 index 0000000000..926130dc7e --- /dev/null +++ b/audio/voip/audio_channel.cc @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/audio_channel.h" + +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/task_queue/task_queue_factory.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { + +constexpr int kRtcpReportIntervalMs = 5000; + +} // namespace + +AudioChannel::AudioChannel( + Transport* transport, + uint32_t local_ssrc, + TaskQueueFactory* task_queue_factory, + ProcessThread* process_thread, + AudioMixer* audio_mixer, + rtc::scoped_refptr decoder_factory) + : audio_mixer_(audio_mixer), process_thread_(process_thread) { + RTC_DCHECK(task_queue_factory); + RTC_DCHECK(process_thread); + RTC_DCHECK(audio_mixer); + + Clock* clock = Clock::GetRealTimeClock(); + receive_statistics_ = ReceiveStatistics::Create(clock); + + RtpRtcpInterface::Configuration rtp_config; + rtp_config.clock = clock; + rtp_config.audio = true; + rtp_config.receive_statistics = receive_statistics_.get(); + rtp_config.rtcp_report_interval_ms = kRtcpReportIntervalMs; + rtp_config.outgoing_transport = transport; + rtp_config.local_media_ssrc = local_ssrc; + + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config); + + rtp_rtcp_->SetSendingMediaStatus(false); + rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); + + // ProcessThread periodically services RTP stack for RTCP. + process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); + + ingress_ = std::make_unique(rtp_rtcp_.get(), clock, + receive_statistics_.get(), + std::move(decoder_factory)); + egress_ = + std::make_unique(rtp_rtcp_.get(), clock, task_queue_factory); + + // Set the instance of audio ingress to be part of audio mixer for ADM to + // fetch audio samples to play. + audio_mixer_->AddSource(ingress_.get()); +} + +AudioChannel::~AudioChannel() { + if (egress_->IsSending()) { + StopSend(); + } + if (ingress_->IsPlaying()) { + StopPlay(); + } + + audio_mixer_->RemoveSource(ingress_.get()); + process_thread_->DeRegisterModule(rtp_rtcp_.get()); +} + +bool AudioChannel::StartSend() { + // If encoder has not been set, return false. + if (!egress_->StartSend()) { + return false; + } + + // Start sending with RTP stack if it has not been sending yet. + if (!rtp_rtcp_->Sending()) { + rtp_rtcp_->SetSendingStatus(true); + } + return true; +} + +void AudioChannel::StopSend() { + egress_->StopSend(); + + // Deactivate RTP stack when both sending and receiving are stopped. + // SetSendingStatus(false) triggers the transmission of RTCP BYE + // message to remote endpoint. + if (!ingress_->IsPlaying() && rtp_rtcp_->Sending()) { + rtp_rtcp_->SetSendingStatus(false); + } +} + +bool AudioChannel::StartPlay() { + // If decoders have not been set, return false. + if (!ingress_->StartPlay()) { + return false; + } + + // If RTP stack is not sending then start sending as in recv-only mode, RTCP + // receiver report is expected. + if (!rtp_rtcp_->Sending()) { + rtp_rtcp_->SetSendingStatus(true); + } + return true; +} + +void AudioChannel::StopPlay() { + ingress_->StopPlay(); + + // Deactivate RTP stack only when both sending and receiving are stopped. + if (!rtp_rtcp_->SendingMedia() && rtp_rtcp_->Sending()) { + rtp_rtcp_->SetSendingStatus(false); + } +} + +IngressStatistics AudioChannel::GetIngressStatistics() { + IngressStatistics ingress_stats; + NetworkStatistics stats = ingress_->GetNetworkStatistics(); + ingress_stats.neteq_stats.total_samples_received = stats.totalSamplesReceived; + ingress_stats.neteq_stats.concealed_samples = stats.concealedSamples; + ingress_stats.neteq_stats.concealment_events = stats.concealmentEvents; + ingress_stats.neteq_stats.jitter_buffer_delay_ms = stats.jitterBufferDelayMs; + ingress_stats.neteq_stats.jitter_buffer_emitted_count = + stats.jitterBufferEmittedCount; + ingress_stats.neteq_stats.jitter_buffer_target_delay_ms = + stats.jitterBufferTargetDelayMs; + ingress_stats.neteq_stats.inserted_samples_for_deceleration = + stats.insertedSamplesForDeceleration; + ingress_stats.neteq_stats.removed_samples_for_acceleration = + stats.removedSamplesForAcceleration; + ingress_stats.neteq_stats.silent_concealed_samples = + stats.silentConcealedSamples; + ingress_stats.neteq_stats.fec_packets_received = stats.fecPacketsReceived; + ingress_stats.neteq_stats.fec_packets_discarded = stats.fecPacketsDiscarded; + ingress_stats.neteq_stats.delayed_packet_outage_samples = + stats.delayedPacketOutageSamples; + ingress_stats.neteq_stats.relative_packet_arrival_delay_ms = + stats.relativePacketArrivalDelayMs; + ingress_stats.neteq_stats.interruption_count = stats.interruptionCount; + ingress_stats.neteq_stats.total_interruption_duration_ms = + stats.totalInterruptionDurationMs; + ingress_stats.total_duration = ingress_->GetTotalDuration(); + return ingress_stats; +} + +} // namespace webrtc diff --git a/audio/voip/audio_channel.h b/audio/voip/audio_channel.h new file mode 100644 index 0000000000..a8946a7aa6 --- /dev/null +++ b/audio/voip/audio_channel.h @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_VOIP_AUDIO_CHANNEL_H_ +#define AUDIO_VOIP_AUDIO_CHANNEL_H_ + +#include +#include +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/voip/voip_base.h" +#include "api/voip/voip_statistics.h" +#include "audio/voip/audio_egress.h" +#include "audio/voip/audio_ingress.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// AudioChannel represents a single media session and provides APIs over +// AudioIngress and AudioEgress. Note that a single RTP stack is shared with +// these two classes as it has both sending and receiving capabilities. +class AudioChannel : public rtc::RefCountInterface { + public: + AudioChannel(Transport* transport, + uint32_t local_ssrc, + TaskQueueFactory* task_queue_factory, + ProcessThread* process_thread, + AudioMixer* audio_mixer, + rtc::scoped_refptr decoder_factory); + ~AudioChannel() override; + + // Set and get ChannelId that this audio channel belongs for debugging and + // logging purpose. + void SetId(ChannelId id) { id_ = id; } + ChannelId GetId() const { return id_; } + + // APIs to start/stop audio channel on each direction. + // StartSend/StartPlay returns false if encoder/decoders + // have not been set, respectively. + bool StartSend(); + void StopSend(); + bool StartPlay(); + void StopPlay(); + + // APIs relayed to AudioEgress. + bool IsSendingMedia() const { return egress_->IsSending(); } + AudioSender* GetAudioSender() { return egress_.get(); } + void SetEncoder(int payload_type, + const SdpAudioFormat& encoder_format, + std::unique_ptr encoder) { + egress_->SetEncoder(payload_type, encoder_format, std::move(encoder)); + } + absl::optional GetEncoderFormat() const { + return egress_->GetEncoderFormat(); + } + void RegisterTelephoneEventType(int rtp_payload_type, int sample_rate_hz) { + egress_->RegisterTelephoneEventType(rtp_payload_type, sample_rate_hz); + } + bool SendTelephoneEvent(int dtmf_event, int duration_ms) { + return egress_->SendTelephoneEvent(dtmf_event, duration_ms); + } + + // APIs relayed to AudioIngress. + bool IsPlaying() const { return ingress_->IsPlaying(); } + void ReceivedRTPPacket(rtc::ArrayView rtp_packet) { + ingress_->ReceivedRTPPacket(rtp_packet); + } + void ReceivedRTCPPacket(rtc::ArrayView rtcp_packet) { + ingress_->ReceivedRTCPPacket(rtcp_packet); + } + void SetReceiveCodecs(const std::map& codecs) { + ingress_->SetReceiveCodecs(codecs); + } + IngressStatistics GetIngressStatistics(); + + private: + // ChannelId that this audio channel belongs for logging purpose. + ChannelId id_; + + // Synchronization is handled internally by AudioMixer. + AudioMixer* audio_mixer_; + + // Synchronization is handled internally by ProcessThread. + ProcessThread* process_thread_; + + // Listed in order for safe destruction of AudioChannel object. + // Synchronization for these are handled internally. + std::unique_ptr receive_statistics_; + std::unique_ptr rtp_rtcp_; + std::unique_ptr ingress_; + std::unique_ptr egress_; +}; + +} // namespace webrtc + +#endif // AUDIO_VOIP_AUDIO_CHANNEL_H_ diff --git a/audio/voip/audio_egress.cc b/audio/voip/audio_egress.cc new file mode 100644 index 0000000000..90e069e1cc --- /dev/null +++ b/audio/voip/audio_egress.cc @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/audio_egress.h" + +#include +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +AudioEgress::AudioEgress(RtpRtcpInterface* rtp_rtcp, + Clock* clock, + TaskQueueFactory* task_queue_factory) + : rtp_rtcp_(rtp_rtcp), + rtp_sender_audio_(clock, rtp_rtcp_->RtpSender()), + audio_coding_(AudioCodingModule::Create(AudioCodingModule::Config())), + encoder_queue_(task_queue_factory->CreateTaskQueue( + "AudioEncoder", + TaskQueueFactory::Priority::NORMAL)) { + audio_coding_->RegisterTransportCallback(this); +} + +AudioEgress::~AudioEgress() { + audio_coding_->RegisterTransportCallback(nullptr); +} + +bool AudioEgress::IsSending() const { + return rtp_rtcp_->SendingMedia(); +} + +void AudioEgress::SetEncoder(int payload_type, + const SdpAudioFormat& encoder_format, + std::unique_ptr encoder) { + RTC_DCHECK_GE(payload_type, 0); + RTC_DCHECK_LE(payload_type, 127); + + SetEncoderFormat(encoder_format); + + // The RTP/RTCP module needs to know the RTP timestamp rate (i.e. clockrate) + // as well as some other things, so we collect this info and send it along. + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, + encoder->RtpTimestampRateHz()); + rtp_sender_audio_.RegisterAudioPayload("audio", payload_type, + encoder->RtpTimestampRateHz(), + encoder->NumChannels(), 0); + + audio_coding_->SetEncoder(std::move(encoder)); +} + +bool AudioEgress::StartSend() { + if (!GetEncoderFormat()) { + RTC_DLOG(LS_WARNING) << "Send codec has not been set yet"; + return false; + } + rtp_rtcp_->SetSendingMediaStatus(true); + return true; +} + +void AudioEgress::StopSend() { + rtp_rtcp_->SetSendingMediaStatus(false); +} + +void AudioEgress::SendAudioData(std::unique_ptr audio_frame) { + RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); + RTC_DCHECK_LE(audio_frame->num_channels_, 8); + + encoder_queue_.PostTask( + [this, audio_frame = std::move(audio_frame)]() mutable { + RTC_DCHECK_RUN_ON(&encoder_queue_); + if (!rtp_rtcp_->SendingMedia()) { + return; + } + + AudioFrameOperations::Mute(audio_frame.get(), + encoder_context_.previously_muted_, + encoder_context_.mute_); + encoder_context_.previously_muted_ = encoder_context_.mute_; + + audio_frame->timestamp_ = encoder_context_.frame_rtp_timestamp_; + + // This call will trigger AudioPacketizationCallback::SendData if + // encoding is done and payload is ready for packetization and + // transmission. Otherwise, it will return without invoking the + // callback. + if (audio_coding_->Add10MsData(*audio_frame) < 0) { + RTC_DLOG(LS_ERROR) << "ACM::Add10MsData() failed."; + return; + } + + encoder_context_.frame_rtp_timestamp_ += + rtc::dchecked_cast(audio_frame->samples_per_channel_); + }); +} + +int32_t AudioEgress::SendData(AudioFrameType frame_type, + uint8_t payload_type, + uint32_t timestamp, + const uint8_t* payload_data, + size_t payload_size) { + RTC_DCHECK_RUN_ON(&encoder_queue_); + + rtc::ArrayView payload(payload_data, payload_size); + + // Currently we don't get a capture time from downstream modules (ADM, + // AudioTransportImpl). + // TODO(natim@webrtc.org): Integrate once it's ready. + constexpr uint32_t kUndefinedCaptureTime = -1; + + // Push data from ACM to RTP/RTCP-module to deliver audio frame for + // packetization. + if (!rtp_rtcp_->OnSendingRtpFrame(timestamp, kUndefinedCaptureTime, + payload_type, + /*force_sender_report=*/false)) { + return -1; + } + + const uint32_t rtp_timestamp = timestamp + rtp_rtcp_->StartTimestamp(); + + // This call will trigger Transport::SendPacket() from the RTP/RTCP module. + if (!rtp_sender_audio_.SendAudio(frame_type, payload_type, rtp_timestamp, + payload.data(), payload.size())) { + RTC_DLOG(LS_ERROR) + << "AudioEgress::SendData() failed to send data to RTP/RTCP module"; + return -1; + } + + return 0; +} + +void AudioEgress::RegisterTelephoneEventType(int rtp_payload_type, + int sample_rate_hz) { + RTC_DCHECK_GE(rtp_payload_type, 0); + RTC_DCHECK_LE(rtp_payload_type, 127); + + rtp_rtcp_->RegisterSendPayloadFrequency(rtp_payload_type, sample_rate_hz); + rtp_sender_audio_.RegisterAudioPayload("telephone-event", rtp_payload_type, + sample_rate_hz, 0, 0); +} + +bool AudioEgress::SendTelephoneEvent(int dtmf_event, int duration_ms) { + RTC_DCHECK_GE(dtmf_event, 0); + RTC_DCHECK_LE(dtmf_event, 255); + RTC_DCHECK_GE(duration_ms, 0); + RTC_DCHECK_LE(duration_ms, 65535); + + if (!IsSending()) { + return false; + } + + constexpr int kTelephoneEventAttenuationdB = 10; + + if (rtp_sender_audio_.SendTelephoneEvent(dtmf_event, duration_ms, + kTelephoneEventAttenuationdB) != 0) { + RTC_DLOG(LS_ERROR) << "SendTelephoneEvent() failed to send event"; + return false; + } + return true; +} + +void AudioEgress::SetMute(bool mute) { + encoder_queue_.PostTask([this, mute] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_context_.mute_ = mute; + }); +} + +} // namespace webrtc diff --git a/audio/voip/audio_egress.h b/audio/voip/audio_egress.h new file mode 100644 index 0000000000..6b2d374717 --- /dev/null +++ b/audio/voip/audio_egress.h @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_VOIP_AUDIO_EGRESS_H_ +#define AUDIO_VOIP_AUDIO_EGRESS_H_ + +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/task_queue/task_queue_factory.h" +#include "audio/utility/audio_frame_operations.h" +#include "call/audio_sender.h" +#include "modules/audio_coding/include/audio_coding_module.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sender_audio.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/thread_checker.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +// AudioEgress receives input samples from AudioDeviceModule via +// AudioTransportImpl through AudioSender interface. Once it encodes the sample +// via selected encoder through AudioPacketizationCallback interface, the +// encoded payload will be packetized by the RTP stack, resulting in ready to +// send RTP packet to remote endpoint. +// +// TaskQueue is used to encode and send RTP asynchrounously as some OS platform +// uses the same thread for both audio input and output sample deliveries which +// can affect audio quality. +// +// Note that this class is originally based on ChannelSend in +// audio/channel_send.cc with non-audio related logic trimmed as aimed for +// smaller footprint. +class AudioEgress : public AudioSender, public AudioPacketizationCallback { + public: + AudioEgress(RtpRtcpInterface* rtp_rtcp, + Clock* clock, + TaskQueueFactory* task_queue_factory); + ~AudioEgress() override; + + // Set the encoder format and payload type for AudioCodingModule. + // It's possible to change the encoder type during its active usage. + // |payload_type| must be the type that is negotiated with peer through + // offer/answer. + void SetEncoder(int payload_type, + const SdpAudioFormat& encoder_format, + std::unique_ptr encoder); + + // Start or stop sending operation of AudioEgress. This will start/stop + // the RTP stack also causes encoder queue thread to start/stop + // processing input audio samples. StartSend will return false if + // a send codec has not been set. + bool StartSend(); + void StopSend(); + + // Query the state of the RTP stack. This returns true if StartSend() + // called and false if StopSend() is called. + bool IsSending() const; + + // Enable or disable Mute state. + void SetMute(bool mute); + + // Retrieve current encoder format info. This returns encoder format set + // by SetEncoder() and if encoder is not set, this will return nullopt. + absl::optional GetEncoderFormat() const { + MutexLock lock(&lock_); + return encoder_format_; + } + + // Register the payload type and sample rate for DTMF (RFC 4733) payload. + void RegisterTelephoneEventType(int rtp_payload_type, int sample_rate_hz); + + // Send DTMF named event as specified by + // https://tools.ietf.org/html/rfc4733#section-3.2 + // |duration_ms| specifies the duration of DTMF packets that will be emitted + // in place of real RTP packets instead. + // This will return true when requested dtmf event is successfully scheduled + // otherwise false when the dtmf queue reached maximum of 20 events. + bool SendTelephoneEvent(int dtmf_event, int duration_ms); + + // Implementation of AudioSender interface. + void SendAudioData(std::unique_ptr audio_frame) override; + + // Implementation of AudioPacketizationCallback interface. + int32_t SendData(AudioFrameType frame_type, + uint8_t payload_type, + uint32_t timestamp, + const uint8_t* payload_data, + size_t payload_size) override; + + private: + void SetEncoderFormat(const SdpAudioFormat& encoder_format) { + MutexLock lock(&lock_); + encoder_format_ = encoder_format; + } + + mutable Mutex lock_; + + // Current encoder format selected by caller. + absl::optional encoder_format_ RTC_GUARDED_BY(lock_); + + // Synchronization is handled internally by RtpRtcp. + RtpRtcpInterface* const rtp_rtcp_; + + // Synchronization is handled internally by RTPSenderAudio. + RTPSenderAudio rtp_sender_audio_; + + // Synchronization is handled internally by AudioCodingModule. + const std::unique_ptr audio_coding_; + + // Struct that holds all variables used by encoder task queue. + struct EncoderContext { + // Offset used to mark rtp timestamp in sample rate unit in + // newly received audio frame from AudioTransport. + uint32_t frame_rtp_timestamp_ = 0; + + // Flag to track mute state from caller. |previously_muted_| is used to + // track previous state as part of input to AudioFrameOperations::Mute + // to implement fading effect when (un)mute is invoked. + bool mute_ = false; + bool previously_muted_ = false; + }; + + EncoderContext encoder_context_ RTC_GUARDED_BY(encoder_queue_); + + // Defined last to ensure that there are no running tasks when the other + // members are destroyed. + rtc::TaskQueue encoder_queue_; +}; + +} // namespace webrtc + +#endif // AUDIO_VOIP_AUDIO_EGRESS_H_ diff --git a/audio/voip/audio_ingress.cc b/audio/voip/audio_ingress.cc new file mode 100644 index 0000000000..07def99559 --- /dev/null +++ b/audio/voip/audio_ingress.cc @@ -0,0 +1,232 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/audio_ingress.h" + +#include +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "audio/utility/audio_frame_operations.h" +#include "modules/audio_coding/include/audio_coding_module.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { + +namespace { + +AudioCodingModule::Config CreateAcmConfig( + rtc::scoped_refptr decoder_factory) { + AudioCodingModule::Config acm_config; + acm_config.neteq_config.enable_muted_state = true; + acm_config.decoder_factory = decoder_factory; + return acm_config; +} + +} // namespace + +AudioIngress::AudioIngress( + RtpRtcpInterface* rtp_rtcp, + Clock* clock, + ReceiveStatistics* receive_statistics, + rtc::scoped_refptr decoder_factory) + : playing_(false), + remote_ssrc_(0), + first_rtp_timestamp_(-1), + rtp_receive_statistics_(receive_statistics), + rtp_rtcp_(rtp_rtcp), + acm_receiver_(CreateAcmConfig(decoder_factory)), + ntp_estimator_(clock) {} + +AudioIngress::~AudioIngress() = default; + +AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo( + int sampling_rate, + AudioFrame* audio_frame) { + audio_frame->sample_rate_hz_ = sampling_rate; + + // Get 10ms raw PCM data from the ACM. + bool muted = false; + if (acm_receiver_.GetAudio(sampling_rate, audio_frame, &muted) == -1) { + RTC_DLOG(LS_ERROR) << "GetAudio() failed!"; + // In all likelihood, the audio in this frame is garbage. We return an + // error so that the audio mixer module doesn't add it to the mix. As + // a result, it won't be played out and the actions skipped here are + // irrelevant. + return AudioMixer::Source::AudioFrameInfo::kError; + } + + if (muted) { + AudioFrameOperations::Mute(audio_frame); + } + + // Measure audio level. + constexpr double kAudioSampleDurationSeconds = 0.01; + output_audio_level_.ComputeLevel(*audio_frame, kAudioSampleDurationSeconds); + + // If caller invoked StopPlay(), then mute the frame. + if (!playing_) { + AudioFrameOperations::Mute(audio_frame); + muted = true; + } + + // Set first rtp timestamp with first audio frame with valid timestamp. + if (first_rtp_timestamp_ < 0 && audio_frame->timestamp_ != 0) { + first_rtp_timestamp_ = audio_frame->timestamp_; + } + + if (first_rtp_timestamp_ >= 0) { + // Compute elapsed and NTP times. + int64_t unwrap_timestamp; + { + MutexLock lock(&lock_); + unwrap_timestamp = + timestamp_wrap_handler_.Unwrap(audio_frame->timestamp_); + audio_frame->ntp_time_ms_ = + ntp_estimator_.Estimate(audio_frame->timestamp_); + } + // For clock rate, default to the playout sampling rate if we haven't + // received any packets yet. + absl::optional> decoder = + acm_receiver_.LastDecoder(); + int clock_rate = decoder ? decoder->second.clockrate_hz + : acm_receiver_.last_output_sample_rate_hz(); + RTC_DCHECK_GT(clock_rate, 0); + audio_frame->elapsed_time_ms_ = + (unwrap_timestamp - first_rtp_timestamp_) / (clock_rate / 1000); + } + + return muted ? AudioMixer::Source::AudioFrameInfo::kMuted + : AudioMixer::Source::AudioFrameInfo::kNormal; +} + +bool AudioIngress::StartPlay() { + { + MutexLock lock(&lock_); + if (receive_codec_info_.empty()) { + RTC_DLOG(LS_WARNING) << "Receive codecs have not been set yet"; + return false; + } + } + playing_ = true; + return true; +} + +void AudioIngress::SetReceiveCodecs( + const std::map& codecs) { + { + MutexLock lock(&lock_); + for (const auto& kv : codecs) { + receive_codec_info_[kv.first] = kv.second.clockrate_hz; + } + } + acm_receiver_.SetCodecs(codecs); +} + +void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { + RtpPacketReceived rtp_packet_received; + rtp_packet_received.Parse(rtp_packet.data(), rtp_packet.size()); + + // Set payload type's sampling rate before we feed it into ReceiveStatistics. + { + MutexLock lock(&lock_); + const auto& it = + receive_codec_info_.find(rtp_packet_received.PayloadType()); + // If sampling rate info is not available in our received codec set, it + // would mean that remote media endpoint is sending incorrect payload id + // which can't be processed correctly especially on payload type id in + // dynamic range. + if (it == receive_codec_info_.end()) { + RTC_DLOG(LS_WARNING) << "Unexpected payload id received: " + << rtp_packet_received.PayloadType(); + return; + } + rtp_packet_received.set_payload_type_frequency(it->second); + } + + rtp_receive_statistics_->OnRtpPacket(rtp_packet_received); + + RTPHeader header; + rtp_packet_received.GetHeader(&header); + + size_t packet_length = rtp_packet_received.size(); + if (packet_length < header.headerLength || + (packet_length - header.headerLength) < header.paddingLength) { + RTC_DLOG(LS_ERROR) << "Packet length(" << packet_length << ") header(" + << header.headerLength << ") padding(" + << header.paddingLength << ")"; + return; + } + + const uint8_t* payload = rtp_packet_received.data() + header.headerLength; + size_t payload_length = packet_length - header.headerLength; + size_t payload_data_length = payload_length - header.paddingLength; + auto data_view = rtc::ArrayView(payload, payload_data_length); + + // Push the incoming payload (parsed and ready for decoding) into the ACM. + if (acm_receiver_.InsertPacket(header, data_view) != 0) { + RTC_DLOG(LS_ERROR) << "AudioIngress::ReceivedRTPPacket() unable to " + "push data to the ACM"; + } +} + +void AudioIngress::ReceivedRTCPPacket( + rtc::ArrayView rtcp_packet) { + // Deliver RTCP packet to RTP/RTCP module for parsing. + rtp_rtcp_->IncomingRtcpPacket(rtcp_packet.data(), rtcp_packet.size()); + + int64_t rtt = GetRoundTripTime(); + if (rtt == -1) { + // Waiting for valid RTT. + return; + } + + uint32_t ntp_secs = 0, ntp_frac = 0, rtp_timestamp = 0; + if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr, + &rtp_timestamp) != 0) { + // Waiting for RTCP. + return; + } + + { + MutexLock lock(&lock_); + ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); + } +} + +int64_t AudioIngress::GetRoundTripTime() { + const std::vector& report_data = + rtp_rtcp_->GetLatestReportBlockData(); + + // If we do not have report block which means remote RTCP hasn't be received + // yet, return -1 as to indicate uninitialized value. + if (report_data.empty()) { + return -1; + } + + // We don't know in advance the remote SSRC used by the other end's receiver + // reports, so use the SSRC of the first report block as remote SSRC for now. + // TODO(natim@webrtc.org): handle the case where remote end is changing ssrc + // and update accordingly here. + const ReportBlockData& block_data = report_data[0]; + + const uint32_t sender_ssrc = block_data.report_block().sender_ssrc; + + if (sender_ssrc != remote_ssrc_.load()) { + remote_ssrc_.store(sender_ssrc); + rtp_rtcp_->SetRemoteSSRC(sender_ssrc); + } + + return (block_data.has_rtt() ? block_data.last_rtt_ms() : -1); +} + +} // namespace webrtc diff --git a/audio/voip/audio_ingress.h b/audio/voip/audio_ingress.h new file mode 100644 index 0000000000..acb84c0b94 --- /dev/null +++ b/audio/voip/audio_ingress.h @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_VOIP_AUDIO_INGRESS_H_ +#define AUDIO_VOIP_AUDIO_INGRESS_H_ + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/audio/audio_mixer.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "audio/audio_level.h" +#include "modules/audio_coding/acm2/acm_receiver.h" +#include "modules/audio_coding/include/audio_coding_module.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +// AudioIngress handles incoming RTP/RTCP packets from the remote +// media endpoint. Received RTP packets are injected into AcmReceiver and +// when audio output thread requests for audio samples to play through system +// output such as speaker device, AudioIngress provides the samples via its +// implementation on AudioMixer::Source interface. +// +// Note that this class is originally based on ChannelReceive in +// audio/channel_receive.cc with non-audio related logic trimmed as aimed for +// smaller footprint. +class AudioIngress : public AudioMixer::Source { + public: + AudioIngress(RtpRtcpInterface* rtp_rtcp, + Clock* clock, + ReceiveStatistics* receive_statistics, + rtc::scoped_refptr decoder_factory); + ~AudioIngress() override; + + // Start or stop receiving operation of AudioIngress. + bool StartPlay(); + void StopPlay() { + playing_ = false; + output_audio_level_.ResetLevelFullRange(); + } + + // Query the state of the AudioIngress. + bool IsPlaying() const { return playing_; } + + // Set the decoder formats and payload type for AcmReceiver where the + // key type (int) of the map is the payload type of SdpAudioFormat. + void SetReceiveCodecs(const std::map& codecs); + + // APIs to handle received RTP/RTCP packets from caller. + void ReceivedRTPPacket(rtc::ArrayView rtp_packet); + void ReceivedRTCPPacket(rtc::ArrayView rtcp_packet); + + // Retrieve highest speech output level in last 100 ms. Note that + // this isn't RMS but absolute raw audio level on int16_t sample unit. + // Therefore, the return value will vary between 0 ~ 0xFFFF. This type of + // value may be useful to be used for measuring active speaker gauge. + int GetSpeechOutputLevelFullRange() const { + return output_audio_level_.LevelFullRange(); + } + // Retrieves the total duration for all samples played so far as explained in + // audio/AudioLevel.h. + double GetTotalDuration() const { + return output_audio_level_.TotalDuration(); + } + + // Returns network round trip time (RTT) measued by RTCP exchange with + // remote media endpoint. RTT value -1 indicates that it's not initialized. + int64_t GetRoundTripTime(); + + NetworkStatistics GetNetworkStatistics() const { + NetworkStatistics stats; + acm_receiver_.GetNetworkStatistics(&stats, + /*get_and_clear_legacy_stats=*/false); + return stats; + } + + // Implementation of AudioMixer::Source interface. + AudioMixer::Source::AudioFrameInfo GetAudioFrameWithInfo( + int sampling_rate, + AudioFrame* audio_frame) override; + int Ssrc() const override { + return rtc::dchecked_cast(remote_ssrc_.load()); + } + int PreferredSampleRate() const override { + // If we haven't received any RTP packet from remote and thus + // last_packet_sampling_rate is not available then use NetEq's sampling + // rate as that would be what would be used for audio output sample. + return std::max(acm_receiver_.last_packet_sample_rate_hz().value_or(0), + acm_receiver_.last_output_sample_rate_hz()); + } + + private: + // Indicates AudioIngress status as caller invokes Start/StopPlaying. + // If not playing, incoming RTP data processing is skipped, thus + // producing no data to output device. + std::atomic playing_; + + // Currently active remote ssrc from remote media endpoint. + std::atomic remote_ssrc_; + + // The first rtp timestamp of the output audio frame that is used to + // calculate elasped time for subsequent audio frames. + std::atomic first_rtp_timestamp_; + + // Synchronizaton is handled internally by ReceiveStatistics. + ReceiveStatistics* const rtp_receive_statistics_; + + // Synchronizaton is handled internally by RtpRtcpInterface. + RtpRtcpInterface* const rtp_rtcp_; + + // Synchronizaton is handled internally by acm2::AcmReceiver. + acm2::AcmReceiver acm_receiver_; + + // Synchronizaton is handled internally by voe::AudioLevel. + voe::AudioLevel output_audio_level_; + + Mutex lock_; + + RemoteNtpTimeEstimator ntp_estimator_ RTC_GUARDED_BY(lock_); + + // For receiving RTP statistics, this tracks the sampling rate value + // per payload type set when caller set via SetReceiveCodecs. + std::map receive_codec_info_ RTC_GUARDED_BY(lock_); + + rtc::TimestampWrapAroundHandler timestamp_wrap_handler_ RTC_GUARDED_BY(lock_); +}; + +} // namespace webrtc + +#endif // AUDIO_VOIP_AUDIO_INGRESS_H_ diff --git a/audio/voip/test/BUILD.gn b/audio/voip/test/BUILD.gn new file mode 100644 index 0000000000..ade10764f2 --- /dev/null +++ b/audio/voip/test/BUILD.gn @@ -0,0 +1,86 @@ +# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved. +# +# Use of this source code is governed by a BSD - style license +# that can be found in the LICENSE file in the root of the source +# tree.An additional intellectual property rights grant can be found +# in the file PATENTS.All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +if (rtc_include_tests) { + rtc_library("voip_core_unittests") { + testonly = true + sources = [ "voip_core_unittest.cc" ] + deps = [ + "..:voip_core", + "../../../api/audio_codecs:builtin_audio_decoder_factory", + "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/task_queue:default_task_queue_factory", + "../../../modules/audio_device:mock_audio_device", + "../../../modules/audio_processing:mocks", + "../../../modules/utility:mock_process_thread", + "../../../test:audio_codec_mocks", + "../../../test:mock_transport", + "../../../test:test_support", + ] + } + + rtc_library("audio_channel_unittests") { + testonly = true + sources = [ "audio_channel_unittest.cc" ] + deps = [ + "..:audio_channel", + "../../../api:transport_api", + "../../../api/audio_codecs:builtin_audio_decoder_factory", + "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/task_queue:default_task_queue_factory", + "../../../modules/audio_mixer:audio_mixer_impl", + "../../../modules/audio_mixer:audio_mixer_test_utils", + "../../../modules/rtp_rtcp:rtp_rtcp", + "../../../modules/rtp_rtcp:rtp_rtcp_format", + "../../../modules/utility", + "../../../rtc_base:logging", + "../../../rtc_base:rtc_event", + "../../../test:mock_transport", + "../../../test:test_support", + ] + } + + rtc_library("audio_ingress_unittests") { + testonly = true + sources = [ "audio_ingress_unittest.cc" ] + deps = [ + "..:audio_egress", + "..:audio_ingress", + "../../../api:transport_api", + "../../../api/audio_codecs:builtin_audio_decoder_factory", + "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/task_queue:default_task_queue_factory", + "../../../modules/audio_mixer:audio_mixer_test_utils", + "../../../modules/rtp_rtcp:rtp_rtcp", + "../../../rtc_base:logging", + "../../../rtc_base:rtc_event", + "../../../test:mock_transport", + "../../../test:test_support", + ] + } + + rtc_library("audio_egress_unittests") { + testonly = true + sources = [ "audio_egress_unittest.cc" ] + deps = [ + "..:audio_egress", + "../../../api:transport_api", + "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/task_queue:default_task_queue_factory", + "../../../modules/audio_mixer:audio_mixer_test_utils", + "../../../modules/rtp_rtcp:rtp_rtcp", + "../../../modules/rtp_rtcp:rtp_rtcp_format", + "../../../rtc_base:logging", + "../../../rtc_base:rtc_event", + "../../../test:mock_transport", + "../../../test:test_support", + ] + } +} diff --git a/audio/voip/test/audio_channel_unittest.cc b/audio/voip/test/audio_channel_unittest.cc new file mode 100644 index 0000000000..34b595cf9b --- /dev/null +++ b/audio/voip/test/audio_channel_unittest.cc @@ -0,0 +1,226 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/audio_channel.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/call/transport.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/audio_mixer/sine_wave_generator.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_transport.h" + +namespace webrtc { +namespace { + +using ::testing::Invoke; +using ::testing::NiceMock; +using ::testing::Unused; + +constexpr uint64_t kStartTime = 123456789; +constexpr uint32_t kLocalSsrc = 0xdeadc0de; +constexpr int16_t kAudioLevel = 3004; // used for sine wave level +constexpr int kPcmuPayload = 0; + +class AudioChannelTest : public ::testing::Test { + public: + const SdpAudioFormat kPcmuFormat = {"pcmu", 8000, 1}; + + AudioChannelTest() + : fake_clock_(kStartTime), wave_generator_(1000.0, kAudioLevel) { + process_thread_ = ProcessThread::Create("ModuleProcessThread"); + audio_mixer_ = AudioMixerImpl::Create(); + task_queue_factory_ = CreateDefaultTaskQueueFactory(); + encoder_factory_ = CreateBuiltinAudioEncoderFactory(); + decoder_factory_ = CreateBuiltinAudioDecoderFactory(); + } + + void SetUp() override { + audio_channel_ = new rtc::RefCountedObject( + &transport_, kLocalSsrc, task_queue_factory_.get(), + process_thread_.get(), audio_mixer_.get(), decoder_factory_); + + audio_channel_->SetEncoder(kPcmuPayload, kPcmuFormat, + encoder_factory_->MakeAudioEncoder( + kPcmuPayload, kPcmuFormat, absl::nullopt)); + audio_channel_->SetReceiveCodecs({{kPcmuPayload, kPcmuFormat}}); + audio_channel_->StartSend(); + audio_channel_->StartPlay(); + } + + void TearDown() override { + audio_channel_->StopSend(); + audio_channel_->StopPlay(); + audio_channel_ = nullptr; + } + + std::unique_ptr GetAudioFrame(int order) { + auto frame = std::make_unique(); + frame->sample_rate_hz_ = kPcmuFormat.clockrate_hz; + frame->samples_per_channel_ = kPcmuFormat.clockrate_hz / 100; // 10 ms. + frame->num_channels_ = kPcmuFormat.num_channels; + frame->timestamp_ = frame->samples_per_channel_ * order; + wave_generator_.GenerateNextFrame(frame.get()); + return frame; + } + + SimulatedClock fake_clock_; + SineWaveGenerator wave_generator_; + NiceMock transport_; + std::unique_ptr task_queue_factory_; + rtc::scoped_refptr audio_mixer_; + rtc::scoped_refptr decoder_factory_; + rtc::scoped_refptr encoder_factory_; + std::unique_ptr process_thread_; + rtc::scoped_refptr audio_channel_; +}; + +// Validate RTP packet generation by feeding audio frames with sine wave. +// Resulted RTP packet is looped back into AudioChannel and gets decoded into +// audio frame to see if it has some signal to indicate its validity. +TEST_F(AudioChannelTest, PlayRtpByLocalLoop) { + rtc::Event event; + auto loop_rtp = [&](const uint8_t* packet, size_t length, Unused) { + audio_channel_->ReceivedRTPPacket( + rtc::ArrayView(packet, length)); + event.Set(); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillOnce(Invoke(loop_rtp)); + + auto audio_sender = audio_channel_->GetAudioSender(); + audio_sender->SendAudioData(GetAudioFrame(0)); + audio_sender->SendAudioData(GetAudioFrame(1)); + + event.Wait(/*ms=*/1000); + + AudioFrame empty_frame, audio_frame; + empty_frame.Mute(); + empty_frame.mutable_data(); // This will zero out the data. + audio_frame.CopyFrom(empty_frame); + audio_mixer_->Mix(/*number_of_channels*/ 1, &audio_frame); + + // We expect now audio frame to pick up something. + EXPECT_NE(memcmp(empty_frame.data(), audio_frame.data(), + AudioFrame::kMaxDataSizeBytes), + 0); +} + +// Validate assigned local SSRC is resulted in RTP packet. +TEST_F(AudioChannelTest, VerifyLocalSsrcAsAssigned) { + RtpPacketReceived rtp; + rtc::Event event; + auto loop_rtp = [&](const uint8_t* packet, size_t length, Unused) { + rtp.Parse(packet, length); + event.Set(); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillOnce(Invoke(loop_rtp)); + + auto audio_sender = audio_channel_->GetAudioSender(); + audio_sender->SendAudioData(GetAudioFrame(0)); + audio_sender->SendAudioData(GetAudioFrame(1)); + + event.Wait(/*ms=*/1000); + + EXPECT_EQ(rtp.Ssrc(), kLocalSsrc); +} + +// Check metrics after processing an RTP packet. +TEST_F(AudioChannelTest, TestIngressStatistics) { + auto event = std::make_unique(); + auto loop_rtp = [&](const uint8_t* packet, size_t length, Unused) { + audio_channel_->ReceivedRTPPacket( + rtc::ArrayView(packet, length)); + event->Set(); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(loop_rtp)); + + auto audio_sender = audio_channel_->GetAudioSender(); + audio_sender->SendAudioData(GetAudioFrame(0)); + audio_sender->SendAudioData(GetAudioFrame(1)); + event->Wait(/*give_up_after_ms=*/1000); + + AudioFrame audio_frame; + audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); + audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); + + absl::optional ingress_stats = + audio_channel_->GetIngressStatistics(); + EXPECT_TRUE(ingress_stats); + EXPECT_EQ(ingress_stats->neteq_stats.total_samples_received, 160ULL); + EXPECT_EQ(ingress_stats->neteq_stats.concealed_samples, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.concealment_events, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.inserted_samples_for_deceleration, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.removed_samples_for_acceleration, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.silent_concealed_samples, 0ULL); + // To extract the jitter buffer length in millisecond, jitter_buffer_delay_ms + // needs to be divided by jitter_buffer_emitted_count (number of samples). + EXPECT_EQ(ingress_stats->neteq_stats.jitter_buffer_delay_ms, 1600ULL); + EXPECT_EQ(ingress_stats->neteq_stats.jitter_buffer_emitted_count, 160ULL); + EXPECT_GT(ingress_stats->neteq_stats.jitter_buffer_target_delay_ms, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.interruption_count, 0); + EXPECT_EQ(ingress_stats->neteq_stats.total_interruption_duration_ms, 0); + EXPECT_DOUBLE_EQ(ingress_stats->total_duration, 0.02); + + // Now without any RTP pending in jitter buffer pull more. + audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); + audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); + + // Send another RTP packet to intentionally break PLC. + event = std::make_unique(); + audio_sender->SendAudioData(GetAudioFrame(2)); + audio_sender->SendAudioData(GetAudioFrame(3)); + event->Wait(/*give_up_after_ms=*/1000); + + ingress_stats = audio_channel_->GetIngressStatistics(); + EXPECT_TRUE(ingress_stats); + EXPECT_EQ(ingress_stats->neteq_stats.total_samples_received, 320ULL); + EXPECT_EQ(ingress_stats->neteq_stats.concealed_samples, 168ULL); + EXPECT_EQ(ingress_stats->neteq_stats.concealment_events, 1ULL); + EXPECT_EQ(ingress_stats->neteq_stats.inserted_samples_for_deceleration, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.removed_samples_for_acceleration, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.silent_concealed_samples, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.jitter_buffer_delay_ms, 1600ULL); + EXPECT_EQ(ingress_stats->neteq_stats.jitter_buffer_emitted_count, 160ULL); + EXPECT_GT(ingress_stats->neteq_stats.jitter_buffer_target_delay_ms, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.interruption_count, 0); + EXPECT_EQ(ingress_stats->neteq_stats.total_interruption_duration_ms, 0); + EXPECT_DOUBLE_EQ(ingress_stats->total_duration, 0.04); + + // Pull the last RTP packet. + audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); + audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); + + ingress_stats = audio_channel_->GetIngressStatistics(); + EXPECT_TRUE(ingress_stats); + EXPECT_EQ(ingress_stats->neteq_stats.total_samples_received, 480ULL); + EXPECT_EQ(ingress_stats->neteq_stats.concealed_samples, 168ULL); + EXPECT_EQ(ingress_stats->neteq_stats.concealment_events, 1ULL); + EXPECT_EQ(ingress_stats->neteq_stats.inserted_samples_for_deceleration, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.removed_samples_for_acceleration, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.silent_concealed_samples, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.jitter_buffer_delay_ms, 3200ULL); + EXPECT_EQ(ingress_stats->neteq_stats.jitter_buffer_emitted_count, 320ULL); + EXPECT_GT(ingress_stats->neteq_stats.jitter_buffer_target_delay_ms, 0ULL); + EXPECT_EQ(ingress_stats->neteq_stats.interruption_count, 0); + EXPECT_EQ(ingress_stats->neteq_stats.total_interruption_duration_ms, 0); + EXPECT_DOUBLE_EQ(ingress_stats->total_duration, 0.06); +} + +} // namespace +} // namespace webrtc diff --git a/audio/voip/test/audio_egress_unittest.cc b/audio/voip/test/audio_egress_unittest.cc new file mode 100644 index 0000000000..70fb6dcf36 --- /dev/null +++ b/audio/voip/test/audio_egress_unittest.cc @@ -0,0 +1,290 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/audio_egress.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/call/transport.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "modules/audio_mixer/sine_wave_generator.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_transport.h" + +namespace webrtc { +namespace { + +using ::testing::Invoke; +using ::testing::NiceMock; +using ::testing::Unused; + +std::unique_ptr CreateRtpStack(Clock* clock, + Transport* transport, + uint32_t remote_ssrc) { + RtpRtcpInterface::Configuration rtp_config; + rtp_config.clock = clock; + rtp_config.audio = true; + rtp_config.rtcp_report_interval_ms = 5000; + rtp_config.outgoing_transport = transport; + rtp_config.local_media_ssrc = remote_ssrc; + auto rtp_rtcp = ModuleRtpRtcpImpl2::Create(rtp_config); + rtp_rtcp->SetSendingMediaStatus(false); + rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); + return rtp_rtcp; +} + +// AudioEgressTest configures audio egress by using Rtp Stack, fake clock, +// and task queue factory. Encoder factory is needed to create codec and +// configure the RTP stack in audio egress. +class AudioEgressTest : public ::testing::Test { + public: + static constexpr int16_t kAudioLevel = 3004; // Used for sine wave level. + static constexpr uint16_t kSeqNum = 12345; + static constexpr uint64_t kStartTime = 123456789; + static constexpr uint32_t kRemoteSsrc = 0xDEADBEEF; + const SdpAudioFormat kPcmuFormat = {"pcmu", 8000, 1}; + + AudioEgressTest() + : fake_clock_(kStartTime), wave_generator_(1000.0, kAudioLevel) { + rtp_rtcp_ = CreateRtpStack(&fake_clock_, &transport_, kRemoteSsrc); + task_queue_factory_ = CreateDefaultTaskQueueFactory(); + encoder_factory_ = CreateBuiltinAudioEncoderFactory(); + } + + // Prepare test on audio egress by using PCMu codec with specific + // sequence number and its status to be running. + void SetUp() override { + egress_ = std::make_unique(rtp_rtcp_.get(), &fake_clock_, + task_queue_factory_.get()); + constexpr int kPcmuPayload = 0; + egress_->SetEncoder(kPcmuPayload, kPcmuFormat, + encoder_factory_->MakeAudioEncoder( + kPcmuPayload, kPcmuFormat, absl::nullopt)); + egress_->StartSend(); + rtp_rtcp_->SetSequenceNumber(kSeqNum); + rtp_rtcp_->SetSendingStatus(true); + } + + // Make sure we have shut down rtp stack and reset egress for each test. + void TearDown() override { + egress_->StopSend(); + rtp_rtcp_->SetSendingStatus(false); + egress_.reset(); + } + + // Create an audio frame prepared for pcmu encoding. Timestamp is + // increased per RTP specification which is the number of samples it contains. + // Wave generator writes sine wave which has expected high level set + // by kAudioLevel. + std::unique_ptr GetAudioFrame(int order) { + auto frame = std::make_unique(); + frame->sample_rate_hz_ = kPcmuFormat.clockrate_hz; + frame->samples_per_channel_ = kPcmuFormat.clockrate_hz / 100; // 10 ms. + frame->num_channels_ = kPcmuFormat.num_channels; + frame->timestamp_ = frame->samples_per_channel_ * order; + wave_generator_.GenerateNextFrame(frame.get()); + return frame; + } + + // SimulatedClock doesn't directly affect this testcase as the the + // AudioFrame's timestamp is driven by GetAudioFrame. + SimulatedClock fake_clock_; + NiceMock transport_; + SineWaveGenerator wave_generator_; + std::unique_ptr rtp_rtcp_; + std::unique_ptr task_queue_factory_; + rtc::scoped_refptr encoder_factory_; + std::unique_ptr egress_; +}; + +TEST_F(AudioEgressTest, SendingStatusAfterStartAndStop) { + EXPECT_TRUE(egress_->IsSending()); + egress_->StopSend(); + EXPECT_FALSE(egress_->IsSending()); +} + +TEST_F(AudioEgressTest, ProcessAudioWithMute) { + constexpr int kExpected = 10; + rtc::Event event; + int rtp_count = 0; + RtpPacketReceived rtp; + auto rtp_sent = [&](const uint8_t* packet, size_t length, Unused) { + rtp.Parse(packet, length); + if (++rtp_count == kExpected) { + event.Set(); + } + return true; + }; + + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(rtp_sent)); + + egress_->SetMute(true); + + // Two 10 ms audio frames will result in rtp packet with ptime 20. + for (size_t i = 0; i < kExpected * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } + + event.Wait(/*ms=*/1000); + EXPECT_EQ(rtp_count, kExpected); + + // we expect on pcmu payload to result in 255 for silenced payload + RTPHeader header; + rtp.GetHeader(&header); + size_t packet_length = rtp.size(); + size_t payload_length = packet_length - header.headerLength; + size_t payload_data_length = payload_length - header.paddingLength; + const uint8_t* payload = rtp.data() + header.headerLength; + for (size_t i = 0; i < payload_data_length; ++i) { + EXPECT_EQ(*payload++, 255); + } +} + +TEST_F(AudioEgressTest, ProcessAudioWithSineWave) { + constexpr int kExpected = 10; + rtc::Event event; + int rtp_count = 0; + RtpPacketReceived rtp; + auto rtp_sent = [&](const uint8_t* packet, size_t length, Unused) { + rtp.Parse(packet, length); + if (++rtp_count == kExpected) { + event.Set(); + } + return true; + }; + + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(rtp_sent)); + + // Two 10 ms audio frames will result in rtp packet with ptime 20. + for (size_t i = 0; i < kExpected * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } + + event.Wait(/*ms=*/1000); + EXPECT_EQ(rtp_count, kExpected); + + // we expect on pcmu to result in < 255 for payload with sine wave + RTPHeader header; + rtp.GetHeader(&header); + size_t packet_length = rtp.size(); + size_t payload_length = packet_length - header.headerLength; + size_t payload_data_length = payload_length - header.paddingLength; + const uint8_t* payload = rtp.data() + header.headerLength; + for (size_t i = 0; i < payload_data_length; ++i) { + EXPECT_NE(*payload++, 255); + } +} + +TEST_F(AudioEgressTest, SkipAudioEncodingAfterStopSend) { + constexpr int kExpected = 10; + rtc::Event event; + int rtp_count = 0; + auto rtp_sent = [&](const uint8_t* packet, size_t length, Unused) { + if (++rtp_count == kExpected) { + event.Set(); + } + return true; + }; + + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(rtp_sent)); + + // Two 10 ms audio frames will result in rtp packet with ptime 20. + for (size_t i = 0; i < kExpected * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } + + event.Wait(/*ms=*/1000); + EXPECT_EQ(rtp_count, kExpected); + + // Now stop send and yet feed more data. + egress_->StopSend(); + + // It should be safe to exit the test case while encoder_queue_ has + // outstanding data to process. We are making sure that this doesn't + // result in crahses or sanitizer errors due to remaining data. + for (size_t i = 0; i < kExpected * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } +} + +TEST_F(AudioEgressTest, ChangeEncoderFromPcmuToOpus) { + absl::optional pcmu = egress_->GetEncoderFormat(); + EXPECT_TRUE(pcmu); + EXPECT_EQ(pcmu->clockrate_hz, kPcmuFormat.clockrate_hz); + EXPECT_EQ(pcmu->num_channels, kPcmuFormat.num_channels); + + constexpr int kOpusPayload = 120; + const SdpAudioFormat kOpusFormat = {"opus", 48000, 2}; + + egress_->SetEncoder(kOpusPayload, kOpusFormat, + encoder_factory_->MakeAudioEncoder( + kOpusPayload, kOpusFormat, absl::nullopt)); + + absl::optional opus = egress_->GetEncoderFormat(); + EXPECT_TRUE(opus); + EXPECT_EQ(opus->clockrate_hz, kOpusFormat.clockrate_hz); + EXPECT_EQ(opus->num_channels, kOpusFormat.num_channels); +} + +TEST_F(AudioEgressTest, SendDTMF) { + constexpr int kExpected = 7; + constexpr int kPayloadType = 100; + constexpr int kDurationMs = 100; + constexpr int kSampleRate = 8000; + constexpr int kEvent = 3; + + egress_->RegisterTelephoneEventType(kPayloadType, kSampleRate); + // 100 ms duration will produce total 7 DTMF + // 1 @ 20 ms, 2 @ 40 ms, 3 @ 60 ms, 4 @ 80 ms + // 5, 6, 7 @ 100 ms (last one sends 3 dtmf) + egress_->SendTelephoneEvent(kEvent, kDurationMs); + + rtc::Event event; + int dtmf_count = 0; + auto is_dtmf = [&](RtpPacketReceived& rtp) { + return (rtp.PayloadType() == kPayloadType && + rtp.SequenceNumber() == kSeqNum + dtmf_count && + rtp.padding_size() == 0 && rtp.Marker() == (dtmf_count == 0) && + rtp.Ssrc() == kRemoteSsrc); + }; + + // It's possible that we may have actual audio RTP packets along with + // DTMF packtets. We are only interested in the exact number of DTMF + // packets rtp stack is emitting. + auto rtp_sent = [&](const uint8_t* packet, size_t length, Unused) { + RtpPacketReceived rtp; + rtp.Parse(packet, length); + if (is_dtmf(rtp) && ++dtmf_count == kExpected) { + event.Set(); + } + return true; + }; + + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(rtp_sent)); + + // Two 10 ms audio frames will result in rtp packet with ptime 20. + for (size_t i = 0; i < kExpected * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } + + event.Wait(/*ms=*/1000); + EXPECT_EQ(dtmf_count, kExpected); +} + +} // namespace +} // namespace webrtc diff --git a/audio/voip/test/audio_ingress_unittest.cc b/audio/voip/test/audio_ingress_unittest.cc new file mode 100644 index 0000000000..01b4d67dad --- /dev/null +++ b/audio/voip/test/audio_ingress_unittest.cc @@ -0,0 +1,228 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/audio_ingress.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/call/transport.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "audio/voip/audio_egress.h" +#include "modules/audio_mixer/sine_wave_generator.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_transport.h" + +namespace webrtc { +namespace { + +using ::testing::Invoke; +using ::testing::NiceMock; +using ::testing::Unused; + +constexpr int16_t kAudioLevel = 3004; // Used for sine wave level. + +class AudioIngressTest : public ::testing::Test { + public: + const SdpAudioFormat kPcmuFormat = {"pcmu", 8000, 1}; + + AudioIngressTest() + : fake_clock_(123456789), wave_generator_(1000.0, kAudioLevel) { + receive_statistics_ = ReceiveStatistics::Create(&fake_clock_); + + RtpRtcpInterface::Configuration rtp_config; + rtp_config.clock = &fake_clock_; + rtp_config.audio = true; + rtp_config.receive_statistics = receive_statistics_.get(); + rtp_config.rtcp_report_interval_ms = 5000; + rtp_config.outgoing_transport = &transport_; + rtp_config.local_media_ssrc = 0xdeadc0de; + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config); + + rtp_rtcp_->SetSendingMediaStatus(false); + rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); + + task_queue_factory_ = CreateDefaultTaskQueueFactory(); + encoder_factory_ = CreateBuiltinAudioEncoderFactory(); + decoder_factory_ = CreateBuiltinAudioDecoderFactory(); + } + + void SetUp() override { + constexpr int kPcmuPayload = 0; + ingress_ = std::make_unique(rtp_rtcp_.get(), &fake_clock_, + receive_statistics_.get(), + decoder_factory_); + ingress_->SetReceiveCodecs({{kPcmuPayload, kPcmuFormat}}); + + egress_ = std::make_unique(rtp_rtcp_.get(), &fake_clock_, + task_queue_factory_.get()); + egress_->SetEncoder(kPcmuPayload, kPcmuFormat, + encoder_factory_->MakeAudioEncoder( + kPcmuPayload, kPcmuFormat, absl::nullopt)); + egress_->StartSend(); + ingress_->StartPlay(); + rtp_rtcp_->SetSendingStatus(true); + } + + void TearDown() override { + rtp_rtcp_->SetSendingStatus(false); + ingress_->StopPlay(); + egress_->StopSend(); + egress_.reset(); + ingress_.reset(); + } + + std::unique_ptr GetAudioFrame(int order) { + auto frame = std::make_unique(); + frame->sample_rate_hz_ = kPcmuFormat.clockrate_hz; + frame->samples_per_channel_ = kPcmuFormat.clockrate_hz / 100; // 10 ms. + frame->num_channels_ = kPcmuFormat.num_channels; + frame->timestamp_ = frame->samples_per_channel_ * order; + wave_generator_.GenerateNextFrame(frame.get()); + return frame; + } + + SimulatedClock fake_clock_; + SineWaveGenerator wave_generator_; + NiceMock transport_; + std::unique_ptr receive_statistics_; + std::unique_ptr rtp_rtcp_; + rtc::scoped_refptr encoder_factory_; + rtc::scoped_refptr decoder_factory_; + std::unique_ptr task_queue_factory_; + std::unique_ptr ingress_; + std::unique_ptr egress_; +}; + +TEST_F(AudioIngressTest, PlayingAfterStartAndStop) { + EXPECT_EQ(ingress_->IsPlaying(), true); + ingress_->StopPlay(); + EXPECT_EQ(ingress_->IsPlaying(), false); +} + +TEST_F(AudioIngressTest, GetAudioFrameAfterRtpReceived) { + rtc::Event event; + auto handle_rtp = [&](const uint8_t* packet, size_t length, Unused) { + ingress_->ReceivedRTPPacket(rtc::ArrayView(packet, length)); + event.Set(); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(handle_rtp)); + egress_->SendAudioData(GetAudioFrame(0)); + egress_->SendAudioData(GetAudioFrame(1)); + event.Wait(/*ms=*/1000); + + AudioFrame audio_frame; + EXPECT_EQ( + ingress_->GetAudioFrameWithInfo(kPcmuFormat.clockrate_hz, &audio_frame), + AudioMixer::Source::AudioFrameInfo::kNormal); + EXPECT_FALSE(audio_frame.muted()); + EXPECT_EQ(audio_frame.num_channels_, 1u); + EXPECT_EQ(audio_frame.samples_per_channel_, + static_cast(kPcmuFormat.clockrate_hz / 100)); + EXPECT_EQ(audio_frame.sample_rate_hz_, kPcmuFormat.clockrate_hz); + EXPECT_NE(audio_frame.timestamp_, 0u); + EXPECT_EQ(audio_frame.elapsed_time_ms_, 0); +} + +TEST_F(AudioIngressTest, GetSpeechOutputLevelFullRange) { + // Per audio_level's kUpdateFrequency, we need 11 RTP to get audio level. + constexpr int kNumRtp = 11; + int rtp_count = 0; + rtc::Event event; + auto handle_rtp = [&](const uint8_t* packet, size_t length, Unused) { + ingress_->ReceivedRTPPacket(rtc::ArrayView(packet, length)); + if (++rtp_count == kNumRtp) { + event.Set(); + } + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(handle_rtp)); + for (int i = 0; i < kNumRtp * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } + event.Wait(/*ms=*/1000); + + for (int i = 0; i < kNumRtp; ++i) { + AudioFrame audio_frame; + EXPECT_EQ( + ingress_->GetAudioFrameWithInfo(kPcmuFormat.clockrate_hz, &audio_frame), + AudioMixer::Source::AudioFrameInfo::kNormal); + } + EXPECT_EQ(ingress_->GetSpeechOutputLevelFullRange(), kAudioLevel); +} + +TEST_F(AudioIngressTest, PreferredSampleRate) { + rtc::Event event; + auto handle_rtp = [&](const uint8_t* packet, size_t length, Unused) { + ingress_->ReceivedRTPPacket(rtc::ArrayView(packet, length)); + event.Set(); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(handle_rtp)); + egress_->SendAudioData(GetAudioFrame(0)); + egress_->SendAudioData(GetAudioFrame(1)); + event.Wait(/*ms=*/1000); + + AudioFrame audio_frame; + EXPECT_EQ( + ingress_->GetAudioFrameWithInfo(kPcmuFormat.clockrate_hz, &audio_frame), + AudioMixer::Source::AudioFrameInfo::kNormal); + EXPECT_EQ(ingress_->PreferredSampleRate(), kPcmuFormat.clockrate_hz); +} + +// This test highlights the case where caller invokes StopPlay() which then +// AudioIngress should play silence frame afterwards. +TEST_F(AudioIngressTest, GetMutedAudioFrameAfterRtpReceivedAndStopPlay) { + // StopPlay before we start sending RTP packet with sine wave. + ingress_->StopPlay(); + + // Send 6 RTP packets to generate more than 100 ms audio sample to get + // valid speech level. + constexpr int kNumRtp = 6; + int rtp_count = 0; + rtc::Event event; + auto handle_rtp = [&](const uint8_t* packet, size_t length, Unused) { + ingress_->ReceivedRTPPacket(rtc::ArrayView(packet, length)); + if (++rtp_count == kNumRtp) { + event.Set(); + } + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(handle_rtp)); + for (int i = 0; i < kNumRtp * 2; i++) { + egress_->SendAudioData(GetAudioFrame(i)); + fake_clock_.AdvanceTimeMilliseconds(10); + } + event.Wait(/*give_up_after_ms=*/1000); + + for (int i = 0; i < kNumRtp * 2; ++i) { + AudioFrame audio_frame; + EXPECT_EQ( + ingress_->GetAudioFrameWithInfo(kPcmuFormat.clockrate_hz, &audio_frame), + AudioMixer::Source::AudioFrameInfo::kMuted); + const int16_t* audio_data = audio_frame.data(); + size_t length = + audio_frame.samples_per_channel_ * audio_frame.num_channels_; + for (size_t j = 0; j < length; ++j) { + EXPECT_EQ(audio_data[j], 0); + } + } + + // Now we should still see valid speech output level as StopPlay won't affect + // the measurement. + EXPECT_EQ(ingress_->GetSpeechOutputLevelFullRange(), kAudioLevel); +} + +} // namespace +} // namespace webrtc diff --git a/audio/voip/test/voip_core_unittest.cc b/audio/voip/test/voip_core_unittest.cc new file mode 100644 index 0000000000..9763d588d5 --- /dev/null +++ b/audio/voip/test/voip_core_unittest.cc @@ -0,0 +1,215 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/voip_core.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "modules/audio_device/include/mock_audio_device.h" +#include "modules/audio_processing/include/mock_audio_processing.h" +#include "modules/utility/include/mock/mock_process_thread.h" +#include "test/gtest.h" +#include "test/mock_transport.h" + +namespace webrtc { +namespace { + +using ::testing::NiceMock; +using ::testing::Return; + +constexpr int kPcmuPayload = 0; +constexpr int kPcmuSampleRateHz = 8000; +constexpr int kDtmfEventDurationMs = 1000; +constexpr DtmfEvent kDtmfEventCode = DtmfEvent::kDigitZero; + +class VoipCoreTest : public ::testing::Test { + public: + const SdpAudioFormat kPcmuFormat = {"pcmu", 8000, 1}; + + VoipCoreTest() { audio_device_ = test::MockAudioDeviceModule::CreateNice(); } + + void SetUp() override { + auto encoder_factory = CreateBuiltinAudioEncoderFactory(); + auto decoder_factory = CreateBuiltinAudioDecoderFactory(); + rtc::scoped_refptr audio_processing = + new rtc::RefCountedObject(); + + auto process_thread = std::make_unique>(); + // Hold the pointer to use for testing. + process_thread_ = process_thread.get(); + + voip_core_ = std::make_unique( + std::move(encoder_factory), std::move(decoder_factory), + CreateDefaultTaskQueueFactory(), audio_device_, + std::move(audio_processing), std::move(process_thread)); + } + + std::unique_ptr voip_core_; + NiceMock transport_; + rtc::scoped_refptr audio_device_; + NiceMock* process_thread_; +}; + +// Validate expected API calls that involves with VoipCore. Some verification is +// involved with checking mock audio device. +TEST_F(VoipCoreTest, BasicVoipCoreOperation) { + // Program mock as non-operational and ready to start. + EXPECT_CALL(*audio_device_, Recording()).WillOnce(Return(false)); + EXPECT_CALL(*audio_device_, Playing()).WillOnce(Return(false)); + EXPECT_CALL(*audio_device_, InitRecording()).WillOnce(Return(0)); + EXPECT_CALL(*audio_device_, InitPlayout()).WillOnce(Return(0)); + EXPECT_CALL(*audio_device_, StartRecording()).WillOnce(Return(0)); + EXPECT_CALL(*audio_device_, StartPlayout()).WillOnce(Return(0)); + + auto channel = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + EXPECT_TRUE(channel); + + voip_core_->SetSendCodec(*channel, kPcmuPayload, kPcmuFormat); + voip_core_->SetReceiveCodecs(*channel, {{kPcmuPayload, kPcmuFormat}}); + + EXPECT_TRUE(voip_core_->StartSend(*channel)); + EXPECT_TRUE(voip_core_->StartPlayout(*channel)); + + voip_core_->RegisterTelephoneEventType(*channel, kPcmuPayload, + kPcmuSampleRateHz); + + EXPECT_TRUE(voip_core_->SendDtmfEvent(*channel, kDtmfEventCode, + kDtmfEventDurationMs)); + + // Program mock as operational that is ready to be stopped. + EXPECT_CALL(*audio_device_, Recording()).WillOnce(Return(true)); + EXPECT_CALL(*audio_device_, Playing()).WillOnce(Return(true)); + EXPECT_CALL(*audio_device_, StopRecording()).WillOnce(Return(0)); + EXPECT_CALL(*audio_device_, StopPlayout()).WillOnce(Return(0)); + + EXPECT_TRUE(voip_core_->StopSend(*channel)); + EXPECT_TRUE(voip_core_->StopPlayout(*channel)); + voip_core_->ReleaseChannel(*channel); +} + +TEST_F(VoipCoreTest, ExpectFailToUseReleasedChannelId) { + auto channel = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + EXPECT_TRUE(channel); + + // Release right after creation. + voip_core_->ReleaseChannel(*channel); + + // Now use released channel. + + // These should be no-op. + voip_core_->SetSendCodec(*channel, kPcmuPayload, kPcmuFormat); + voip_core_->SetReceiveCodecs(*channel, {{kPcmuPayload, kPcmuFormat}}); + voip_core_->RegisterTelephoneEventType(*channel, kPcmuPayload, + kPcmuSampleRateHz); + + EXPECT_FALSE(voip_core_->StartSend(*channel)); + EXPECT_FALSE(voip_core_->StartPlayout(*channel)); + EXPECT_FALSE(voip_core_->SendDtmfEvent(*channel, kDtmfEventCode, + kDtmfEventDurationMs)); +} + +TEST_F(VoipCoreTest, SendDtmfEventWithoutRegistering) { + // Program mock as non-operational and ready to start send. + EXPECT_CALL(*audio_device_, Recording()).WillOnce(Return(false)); + EXPECT_CALL(*audio_device_, InitRecording()).WillOnce(Return(0)); + EXPECT_CALL(*audio_device_, StartRecording()).WillOnce(Return(0)); + + auto channel = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + EXPECT_TRUE(channel); + + voip_core_->SetSendCodec(*channel, kPcmuPayload, kPcmuFormat); + + EXPECT_TRUE(voip_core_->StartSend(*channel)); + // Send Dtmf event without registering beforehand, thus payload + // type is not set and false is expected. + EXPECT_FALSE(voip_core_->SendDtmfEvent(*channel, kDtmfEventCode, + kDtmfEventDurationMs)); + + // Program mock as sending and is ready to be stopped. + EXPECT_CALL(*audio_device_, Recording()).WillOnce(Return(true)); + EXPECT_CALL(*audio_device_, StopRecording()).WillOnce(Return(0)); + + EXPECT_TRUE(voip_core_->StopSend(*channel)); + voip_core_->ReleaseChannel(*channel); +} + +TEST_F(VoipCoreTest, SendDtmfEventWithoutStartSend) { + auto channel = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + EXPECT_TRUE(channel); + + voip_core_->RegisterTelephoneEventType(*channel, kPcmuPayload, + kPcmuSampleRateHz); + // Send Dtmf event without calling StartSend beforehand, thus + // Dtmf events cannot be sent and false is expected. + EXPECT_FALSE(voip_core_->SendDtmfEvent(*channel, kDtmfEventCode, + kDtmfEventDurationMs)); + + voip_core_->ReleaseChannel(*channel); +} + +TEST_F(VoipCoreTest, StartSendAndPlayoutWithoutSettingCodec) { + auto channel = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + EXPECT_TRUE(channel); + + // Call StartSend and StartPlayout without setting send/receive + // codec. Code should see that codecs aren't set and return false. + EXPECT_FALSE(voip_core_->StartSend(*channel)); + EXPECT_FALSE(voip_core_->StartPlayout(*channel)); + + voip_core_->ReleaseChannel(*channel); +} + +TEST_F(VoipCoreTest, StopSendAndPlayoutWithoutStarting) { + auto channel = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + EXPECT_TRUE(channel); + + voip_core_->SetSendCodec(*channel, kPcmuPayload, kPcmuFormat); + voip_core_->SetReceiveCodecs(*channel, {{kPcmuPayload, kPcmuFormat}}); + + // Call StopSend and StopPlayout without starting them in + // the first place. Should see that it is already in the + // stopped state and return true. + EXPECT_TRUE(voip_core_->StopSend(*channel)); + EXPECT_TRUE(voip_core_->StopPlayout(*channel)); + + voip_core_->ReleaseChannel(*channel); +} + +// This tests correctness on ProcessThread usage where we expect the first/last +// channel creation/release triggers its Start/Stop method once only. +TEST_F(VoipCoreTest, TestProcessThreadOperation) { + EXPECT_CALL(*process_thread_, Start); + EXPECT_CALL(*process_thread_, RegisterModule).Times(2); + + auto channel_one = voip_core_->CreateChannel(&transport_, 0xdeadc0de); + auto channel_two = voip_core_->CreateChannel(&transport_, 0xdeadbeef); + EXPECT_TRUE(channel_one); + EXPECT_TRUE(channel_two); + + EXPECT_CALL(*process_thread_, Stop); + EXPECT_CALL(*process_thread_, DeRegisterModule).Times(2); + + voip_core_->ReleaseChannel(*channel_one); + voip_core_->ReleaseChannel(*channel_two); + + EXPECT_CALL(*process_thread_, Start); + EXPECT_CALL(*process_thread_, RegisterModule); + + auto channel_three = voip_core_->CreateChannel(&transport_, absl::nullopt); + EXPECT_TRUE(channel_three); + + EXPECT_CALL(*process_thread_, Stop); + EXPECT_CALL(*process_thread_, DeRegisterModule); + + voip_core_->ReleaseChannel(*channel_three); +} + +} // namespace +} // namespace webrtc diff --git a/audio/voip/voip_core.cc b/audio/voip/voip_core.cc new file mode 100644 index 0000000000..92b80b5b71 --- /dev/null +++ b/audio/voip/voip_core.cc @@ -0,0 +1,423 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio/voip/voip_core.h" + +#include +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { + +// For Windows, use specific enum type to initialize default audio device as +// defined in AudioDeviceModule::WindowsDeviceType. +#if defined(WEBRTC_WIN) +constexpr AudioDeviceModule::WindowsDeviceType kAudioDeviceId = + AudioDeviceModule::WindowsDeviceType::kDefaultCommunicationDevice; +#else +constexpr uint16_t kAudioDeviceId = 0; +#endif // defined(WEBRTC_WIN) + +// Maximum value range limit on ChannelId. This can be increased without any +// side effect and only set at this moderate value for better readability for +// logging. +static constexpr int kMaxChannelId = 100000; + +} // namespace + +VoipCore::VoipCore(rtc::scoped_refptr encoder_factory, + rtc::scoped_refptr decoder_factory, + std::unique_ptr task_queue_factory, + rtc::scoped_refptr audio_device_module, + rtc::scoped_refptr audio_processing, + std::unique_ptr process_thread) { + encoder_factory_ = std::move(encoder_factory); + decoder_factory_ = std::move(decoder_factory); + task_queue_factory_ = std::move(task_queue_factory); + audio_device_module_ = std::move(audio_device_module); + audio_processing_ = std::move(audio_processing); + process_thread_ = std::move(process_thread); + + if (!process_thread_) { + process_thread_ = ProcessThread::Create("ModuleProcessThread"); + } + audio_mixer_ = AudioMixerImpl::Create(); + + // AudioTransportImpl depends on audio mixer and audio processing instances. + audio_transport_ = std::make_unique( + audio_mixer_.get(), audio_processing_.get(), nullptr); +} + +bool VoipCore::InitializeIfNeeded() { + // |audio_device_module_| internally owns a lock and the whole logic here + // needs to be executed atomically once using another lock in VoipCore. + // Further changes in this method will need to make sure that no deadlock is + // introduced in the future. + MutexLock lock(&lock_); + + if (initialized_) { + return true; + } + + // Initialize ADM. + if (audio_device_module_->Init() != 0) { + RTC_LOG(LS_ERROR) << "Failed to initialize the ADM."; + return false; + } + + // Note that failures on initializing default recording/speaker devices are + // not considered to be fatal here. In certain case, caller may not care about + // recording device functioning (e.g webinar where only speaker is available). + // It's also possible that there are other audio devices available that may + // work. + + // Initialize default speaker device. + if (audio_device_module_->SetPlayoutDevice(kAudioDeviceId) != 0) { + RTC_LOG(LS_WARNING) << "Unable to set playout device."; + } + if (audio_device_module_->InitSpeaker() != 0) { + RTC_LOG(LS_WARNING) << "Unable to access speaker."; + } + + // Initialize default recording device. + if (audio_device_module_->SetRecordingDevice(kAudioDeviceId) != 0) { + RTC_LOG(LS_WARNING) << "Unable to set recording device."; + } + if (audio_device_module_->InitMicrophone() != 0) { + RTC_LOG(LS_WARNING) << "Unable to access microphone."; + } + + // Set number of channels on speaker device. + bool available = false; + if (audio_device_module_->StereoPlayoutIsAvailable(&available) != 0) { + RTC_LOG(LS_WARNING) << "Unable to query stereo playout."; + } + if (audio_device_module_->SetStereoPlayout(available) != 0) { + RTC_LOG(LS_WARNING) << "Unable to set mono/stereo playout mode."; + } + + // Set number of channels on recording device. + available = false; + if (audio_device_module_->StereoRecordingIsAvailable(&available) != 0) { + RTC_LOG(LS_WARNING) << "Unable to query stereo recording."; + } + if (audio_device_module_->SetStereoRecording(available) != 0) { + RTC_LOG(LS_WARNING) << "Unable to set stereo recording mode."; + } + + if (audio_device_module_->RegisterAudioCallback(audio_transport_.get()) != + 0) { + RTC_LOG(LS_WARNING) << "Unable to register audio callback."; + } + + initialized_ = true; + + return true; +} + +absl::optional VoipCore::CreateChannel( + Transport* transport, + absl::optional local_ssrc) { + absl::optional channel_id; + + // Set local ssrc to random if not set by caller. + if (!local_ssrc) { + Random random(rtc::TimeMicros()); + local_ssrc = random.Rand(); + } + + rtc::scoped_refptr channel = + new rtc::RefCountedObject( + transport, local_ssrc.value(), task_queue_factory_.get(), + process_thread_.get(), audio_mixer_.get(), decoder_factory_); + + // Check if we need to start the process thread. + bool start_process_thread = false; + + { + MutexLock lock(&lock_); + + // Start process thread if the channel is the first one. + start_process_thread = channels_.empty(); + + channel_id = static_cast(next_channel_id_); + channels_[*channel_id] = channel; + next_channel_id_++; + if (next_channel_id_ >= kMaxChannelId) { + next_channel_id_ = 0; + } + } + + // Set ChannelId in audio channel for logging/debugging purpose. + channel->SetId(*channel_id); + + if (start_process_thread) { + process_thread_->Start(); + } + + return channel_id; +} + +void VoipCore::ReleaseChannel(ChannelId channel_id) { + // Destroy channel outside of the lock. + rtc::scoped_refptr channel; + + bool no_channels_after_release = false; + + { + MutexLock lock(&lock_); + + auto iter = channels_.find(channel_id); + if (iter != channels_.end()) { + channel = std::move(iter->second); + channels_.erase(iter); + } + + no_channels_after_release = channels_.empty(); + } + + if (!channel) { + RTC_LOG(LS_WARNING) << "Channel " << channel_id << " not found"; + } + + if (no_channels_after_release) { + // Release audio channel first to have it DeRegisterModule first. + channel = nullptr; + process_thread_->Stop(); + + // Make sure to stop playout on ADM if it is playing. + if (audio_device_module_->Playing()) { + if (audio_device_module_->StopPlayout() != 0) { + RTC_LOG(LS_WARNING) << "StopPlayout failed"; + } + } + } +} + +rtc::scoped_refptr VoipCore::GetChannel(ChannelId channel_id) { + rtc::scoped_refptr channel; + { + MutexLock lock(&lock_); + auto iter = channels_.find(channel_id); + if (iter != channels_.end()) { + channel = iter->second; + } + } + if (!channel) { + RTC_LOG(LS_ERROR) << "Channel " << channel_id << " not found"; + } + return channel; +} + +bool VoipCore::UpdateAudioTransportWithSenders() { + std::vector audio_senders; + + // Gather a list of audio channel that are currently sending along with + // highest sampling rate and channel numbers to configure into audio + // transport. + int max_sampling_rate = 8000; + size_t max_num_channels = 1; + { + MutexLock lock(&lock_); + // Reserve to prevent run time vector re-allocation. + audio_senders.reserve(channels_.size()); + for (auto kv : channels_) { + rtc::scoped_refptr& channel = kv.second; + if (channel->IsSendingMedia()) { + auto encoder_format = channel->GetEncoderFormat(); + if (!encoder_format) { + RTC_LOG(LS_ERROR) + << "channel " << channel->GetId() << " encoder is not set"; + continue; + } + audio_senders.push_back(channel->GetAudioSender()); + max_sampling_rate = + std::max(max_sampling_rate, encoder_format->clockrate_hz); + max_num_channels = + std::max(max_num_channels, encoder_format->num_channels); + } + } + } + + audio_transport_->UpdateAudioSenders(audio_senders, max_sampling_rate, + max_num_channels); + + // Depending on availability of senders, turn on or off ADM recording. + if (!audio_senders.empty()) { + // Initialize audio device module and default device if needed. + if (!InitializeIfNeeded()) { + return false; + } + + if (!audio_device_module_->Recording()) { + if (audio_device_module_->InitRecording() != 0) { + RTC_LOG(LS_ERROR) << "InitRecording failed"; + return false; + } + if (audio_device_module_->StartRecording() != 0) { + RTC_LOG(LS_ERROR) << "StartRecording failed"; + return false; + } + } + } else { + if (audio_device_module_->Recording() && + audio_device_module_->StopRecording() != 0) { + RTC_LOG(LS_ERROR) << "StopRecording failed"; + return false; + } + } + return true; +} + +bool VoipCore::StartSend(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel || !channel->StartSend()) { + return false; + } + + return UpdateAudioTransportWithSenders(); +} + +bool VoipCore::StopSend(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { + return false; + } + + channel->StopSend(); + + return UpdateAudioTransportWithSenders(); +} + +bool VoipCore::StartPlayout(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { + return false; + } + + if (channel->IsPlaying()) { + return true; + } + + if (!channel->StartPlay()) { + return false; + } + + // Initialize audio device module and default device if needed. + if (!InitializeIfNeeded()) { + return false; + } + + if (!audio_device_module_->Playing()) { + if (audio_device_module_->InitPlayout() != 0) { + RTC_LOG(LS_ERROR) << "InitPlayout failed"; + return false; + } + if (audio_device_module_->StartPlayout() != 0) { + RTC_LOG(LS_ERROR) << "StartPlayout failed"; + return false; + } + } + return true; +} + +bool VoipCore::StopPlayout(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { + return false; + } + + channel->StopPlay(); + + return true; +} + +void VoipCore::ReceivedRTPPacket(ChannelId channel_id, + rtc::ArrayView rtp_packet) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->ReceivedRTPPacket(rtp_packet); + } +} + +void VoipCore::ReceivedRTCPPacket(ChannelId channel_id, + rtc::ArrayView rtcp_packet) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->ReceivedRTCPPacket(rtcp_packet); + } +} + +void VoipCore::SetSendCodec(ChannelId channel_id, + int payload_type, + const SdpAudioFormat& encoder_format) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + auto encoder = encoder_factory_->MakeAudioEncoder( + payload_type, encoder_format, absl::nullopt); + channel->SetEncoder(payload_type, encoder_format, std::move(encoder)); + } +} + +void VoipCore::SetReceiveCodecs( + ChannelId channel_id, + const std::map& decoder_specs) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->SetReceiveCodecs(decoder_specs); + } +} + +void VoipCore::RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + channel->RegisterTelephoneEventType(rtp_payload_type, sample_rate_hz); + } +} + +bool VoipCore::SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + return channel->SendTelephoneEvent(static_cast(dtmf_event), + duration_ms); + } + return false; +} + +absl::optional VoipCore::GetIngressStatistics( + ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (channel) { + return channel->GetIngressStatistics(); + } + return absl::nullopt; +} + +} // namespace webrtc diff --git a/audio/voip/voip_core.h b/audio/voip/voip_core.h new file mode 100644 index 0000000000..4279f770d9 --- /dev/null +++ b/audio/voip/voip_core.h @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_VOIP_VOIP_CORE_H_ +#define AUDIO_VOIP_VOIP_CORE_H_ + +#include +#include +#include +#include +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/voip/voip_base.h" +#include "api/voip/voip_codec.h" +#include "api/voip/voip_dtmf.h" +#include "api/voip/voip_engine.h" +#include "api/voip/voip_network.h" +#include "api/voip/voip_statistics.h" +#include "audio/audio_transport_impl.h" +#include "audio/voip/audio_channel.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +// VoipCore is the implementatino of VoIP APIs listed in api/voip directory. +// It manages a vector of AudioChannel objects where each is mapped with a +// ChannelId (int) type. ChannelId is the primary key to locate a specific +// AudioChannel object to operate requested VoIP API from the caller. +// +// This class receives required audio components from caller at construction and +// owns the life cycle of them to orchestrate the proper destruction sequence. +class VoipCore : public VoipEngine, + public VoipBase, + public VoipNetwork, + public VoipCodec, + public VoipDtmf, + public VoipStatistics { + public: + // Construct VoipCore with provided arguments. + // ProcessThread implementation can be injected by |process_thread| + // (mainly for testing purpose) and when set to nullptr, default + // implementation will be used. + VoipCore(rtc::scoped_refptr encoder_factory, + rtc::scoped_refptr decoder_factory, + std::unique_ptr task_queue_factory, + rtc::scoped_refptr audio_device_module, + rtc::scoped_refptr audio_processing, + std::unique_ptr process_thread = nullptr); + ~VoipCore() override = default; + + // Implements VoipEngine interfaces. + VoipBase& Base() override { return *this; } + VoipNetwork& Network() override { return *this; } + VoipCodec& Codec() override { return *this; } + VoipDtmf& Dtmf() override { return *this; } + VoipStatistics& Statistics() override { return *this; } + + // Implements VoipBase interfaces. + absl::optional CreateChannel( + Transport* transport, + absl::optional local_ssrc) override; + void ReleaseChannel(ChannelId channel_id) override; + bool StartSend(ChannelId channel_id) override; + bool StopSend(ChannelId channel_id) override; + bool StartPlayout(ChannelId channel_id) override; + bool StopPlayout(ChannelId channel_id) override; + + // Implements VoipNetwork interfaces. + void ReceivedRTPPacket(ChannelId channel_id, + rtc::ArrayView rtp_packet) override; + void ReceivedRTCPPacket(ChannelId channel_id, + rtc::ArrayView rtcp_packet) override; + + // Implements VoipCodec interfaces. + void SetSendCodec(ChannelId channel_id, + int payload_type, + const SdpAudioFormat& encoder_format) override; + void SetReceiveCodecs( + ChannelId channel_id, + const std::map& decoder_specs) override; + + // Implements VoipDtmf interfaces. + void RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) override; + bool SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) override; + + // Implements VoipStatistics interfaces. + absl::optional GetIngressStatistics( + ChannelId channel_id) override; + + private: + // Initialize ADM and default audio device if needed. + // Returns true if ADM is successfully initialized or already in such state + // (e.g called more than once). Returns false when ADM fails to initialize + // which would presumably render further processing useless. Note that such + // failure won't necessarily succeed in next initialization attempt as it + // would mean changing the ADM implementation. From Android N and onwards, the + // mobile app may not be able to gain microphone access when in background + // mode. Therefore it would be better to delay the logic as late as possible. + bool InitializeIfNeeded(); + + // Fetches the corresponding AudioChannel assigned with given |channel|. + // Returns nullptr if not found. + rtc::scoped_refptr GetChannel(ChannelId channel_id); + + // Updates AudioTransportImpl with a new set of actively sending AudioSender + // (AudioEgress). This needs to be invoked whenever StartSend/StopSend is + // involved by caller. Returns false when the selected audio device fails to + // initialize where it can't expect to deliver any audio input sample. + bool UpdateAudioTransportWithSenders(); + + // Synchronization for these are handled internally. + rtc::scoped_refptr encoder_factory_; + rtc::scoped_refptr decoder_factory_; + std::unique_ptr task_queue_factory_; + + // Synchronization is handled internally by AudioProcessing. + // Must be placed before |audio_device_module_| for proper destruction. + rtc::scoped_refptr audio_processing_; + + // Synchronization is handled internally by AudioMixer. + // Must be placed before |audio_device_module_| for proper destruction. + rtc::scoped_refptr audio_mixer_; + + // Synchronization is handled internally by AudioTransportImpl. + // Must be placed before |audio_device_module_| for proper destruction. + std::unique_ptr audio_transport_; + + // Synchronization is handled internally by AudioDeviceModule. + rtc::scoped_refptr audio_device_module_; + + // Synchronization is handled internally by ProcessThread. + // Must be placed before |channels_| for proper destruction. + std::unique_ptr process_thread_; + + Mutex lock_; + + // Member to track a next ChannelId for new AudioChannel. + int next_channel_id_ RTC_GUARDED_BY(lock_) = 0; + + // Container to track currently active AudioChannel objects mapped by + // ChannelId. + std::unordered_map> channels_ + RTC_GUARDED_BY(lock_); + + // Boolean flag to ensure initialization only occurs once. + bool initialized_ RTC_GUARDED_BY(lock_) = false; +}; + +} // namespace webrtc + +#endif // AUDIO_VOIP_VOIP_CORE_H_ diff --git a/build_overrides/OWNERS b/build_overrides/OWNERS index 5465ed8e6a..48e6927746 100644 --- a/build_overrides/OWNERS +++ b/build_overrides/OWNERS @@ -1,2 +1 @@ mbonadei@webrtc.org -phoglund@webrtc.org diff --git a/build_overrides/build.gni b/build_overrides/build.gni index 669044db81..38b8ce4eba 100644 --- a/build_overrides/build.gni +++ b/build_overrides/build.gni @@ -16,25 +16,39 @@ linux_use_bundled_binutils_override = true # only needed to support both WebRTC standalone and Chromium builds. build_with_chromium = false +# WebRTC checks out google_benchmark by default since it is always used. +checkout_google_benchmark = true + # Use our own suppressions files. asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc" lsan_suppressions_file = "//tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc" tsan_suppressions_file = "//tools_webrtc/sanitizers/tsan_suppressions_webrtc.cc" msan_blacklist_path = - rebase_path("//tools_webrtc/msan/blacklist.txt", root_build_dir) + rebase_path("//tools_webrtc/msan/suppressions.txt", root_build_dir) ubsan_blacklist_path = - rebase_path("//tools_webrtc/ubsan/blacklist.txt", root_build_dir) + rebase_path("//tools_webrtc/ubsan/suppressions.txt", root_build_dir) ubsan_vptr_blacklist_path = - rebase_path("//tools_webrtc/ubsan/vptr_blacklist.txt", root_build_dir) - -# Android lint suppressions file -lint_suppressions_file = "//tools_webrtc/android/suppressions.xml" + rebase_path("//tools_webrtc/ubsan/vptr_suppressions.txt", root_build_dir) # For Chromium, Android 32-bit non-component, non-clang builds hit a 4GiB size # limit, making them requiring symbol_level=2. WebRTC doesn't hit that problem # so we just ignore that assert. See https://crbug.com/648948 for more info. ignore_elf32_limitations = true +if (is_win || is_ios || is_android) { + rtc_use_h265 = true +} else { + rtc_use_h265 = false +} + +if (is_ios || is_android) { + rtc_use_recorder = true + rtc_use_bt_mixer = true +} else { + rtc_use_recorder = false + rtc_use_bt_mixer = false +} + # Use bundled hermetic Xcode installation maintainted by Chromium, # except for local iOS builds where it's unsupported. if (host_os == "mac") { @@ -46,3 +60,15 @@ if (host_os == "mac") { "hermetic toolchain if the minimum OS version is not met.") use_system_xcode = _result == 0 } + +declare_args() { + # WebRTC doesn't depend on //base from production code but only for testing + # purposes. In any case, it doesn't depend on //third_party/perfetto which + # is used for base tracing, so this feature is disabled. + enable_base_tracing = false + use_perfetto_client_library = false + + # If true, it assumes that //third_party/abseil-cpp is an available + # dependency for googletest. + gtest_enable_absl_printers = true +} diff --git a/call/BUILD.gn b/call/BUILD.gn index e14370c53d..f2c1d73e19 100644 --- a/call/BUILD.gn +++ b/call/BUILD.gn @@ -32,12 +32,15 @@ rtc_library("call_interfaces") { ":rtp_interfaces", ":video_stream_api", "../api:fec_controller_api", + "../api:frame_transformer_interface", "../api:network_state_predictor_api", "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", "../api:scoped_refptr", "../api:transport_api", + "../api/adaptation:resource_adaptation_api", + "../api/audio:audio_frame_processor", "../api/audio:audio_mixer_api", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", @@ -49,6 +52,7 @@ rtc_library("call_interfaces") { "../api/transport:network_control", "../api/transport:webrtc_key_value_config", "../api/transport/rtp:rtp_source", + "../modules/async_audio_processing", "../modules/audio_device", "../modules/audio_processing", "../modules/audio_processing:api", @@ -60,8 +64,8 @@ rtc_library("call_interfaces") { "../rtc_base:checks", "../rtc_base:rtc_base_approved", "../rtc_base/network:sent_packet", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("audio_sender_interface") { @@ -79,7 +83,6 @@ rtc_library("rtp_interfaces") { # client code gets updated. visibility = [ "*" ] sources = [ - "rtcp_packet_sink_interface.h", "rtp_config.cc", "rtp_config.h", "rtp_packet_sink_interface.h", @@ -89,14 +92,20 @@ rtc_library("rtp_interfaces") { deps = [ "../api:array_view", "../api:fec_controller_api", + "../api:frame_transformer_interface", "../api:rtp_headers", "../api:rtp_parameters", "../api/crypto:options", "../api/rtc_event_log", "../api/transport:bitrate_settings", "../api/units:timestamp", + "../common_video:frame_counts", "../modules/rtp_rtcp:rtp_rtcp_format", + "../rtc_base:checks", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -104,17 +113,12 @@ rtc_library("rtp_interfaces") { rtc_library("rtp_receiver") { visibility = [ "*" ] sources = [ - "rtcp_demuxer.cc", - "rtcp_demuxer.h", "rtp_demuxer.cc", "rtp_demuxer.h", - "rtp_rtcp_demuxer_helper.cc", - "rtp_rtcp_demuxer_helper.h", "rtp_stream_receiver_controller.cc", "rtp_stream_receiver_controller.h", "rtx_receive_stream.cc", "rtx_receive_stream.h", - "ssrc_binding_observer.h", ] deps = [ ":rtp_interfaces", @@ -124,8 +128,8 @@ rtc_library("rtp_receiver") { "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_sender") { @@ -151,10 +155,12 @@ rtc_library("rtp_sender") { "../api/transport:field_trial_based_config", "../api/transport:goog_cc", "../api/transport:network_control", + "../api/transport:webrtc_key_value_config", "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", "../api/video:video_frame", + "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../logging:rtc_event_bwe", @@ -166,15 +172,19 @@ rtc_library("rtp_sender") { "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/rtp_rtcp:rtp_video_header", "../modules/utility", + "../modules/video_coding:chain_diff_calculator", "../modules/video_coding:codec_globals_headers", + "../modules/video_coding:frame_dependencies_calculator", "../modules/video_coding:video_codec_interface", "../rtc_base", "../rtc_base:checks", "../rtc_base:rate_limiter", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_task_queue", + "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", - "../system_wrappers:field_trial", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings:strings", @@ -194,10 +204,11 @@ rtc_library("bitrate_configurator") { # For api/bitrate_constraints.h "../api:libjingle_peerconnection_api", "../api/transport:bitrate_settings", + "../api/units:data_rate", "../rtc_base:checks", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("bitrate_allocator") { @@ -217,8 +228,8 @@ rtc_library("bitrate_allocator") { "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/algorithm:container", ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("call") { @@ -273,14 +284,19 @@ rtc_library("call") { "../rtc_base:safe_minmax", "../rtc_base/experiments:field_trial_parser", "../rtc_base/network:sent_packet", - "../rtc_base/synchronization:rw_lock_wrapper", "../rtc_base/synchronization:sequence_checker", + "../rtc_base/task_utils:pending_task_safety_flag", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", "../video", - "//third_party/abseil-cpp/absl/types:optional", + "adaptation:resource_adaptation", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + + if (rtc_use_recorder) { + deps += [ "../modules/recording:recording" ] + } } rtc_library("video_stream_api") { @@ -292,9 +308,12 @@ rtc_library("video_stream_api") { ] deps = [ ":rtp_interfaces", + "../api:frame_transformer_interface", "../api:rtp_headers", "../api:rtp_parameters", + "../api:scoped_refptr", "../api:transport_api", + "../api/adaptation:resource_adaptation_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", @@ -305,11 +324,12 @@ rtc_library("video_stream_api") { "../api/video:video_stream_encoder", "../api/video_codecs:video_codecs_api", "../common_video", + "../common_video:frame_counts", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("simulated_network") { @@ -325,9 +345,10 @@ rtc_library("simulated_network") { "../api/units:timestamp", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("simulated_packet_receiver") { @@ -353,6 +374,7 @@ rtc_library("fake_network") { "../modules/utility", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../system_wrappers", ] @@ -368,11 +390,9 @@ if (rtc_include_tests) { "call_unittest.cc", "flexfec_receive_stream_unittest.cc", "receive_time_calculator_unittest.cc", - "rtcp_demuxer_unittest.cc", "rtp_bitrate_configurator_unittest.cc", "rtp_demuxer_unittest.cc", "rtp_payload_params_unittest.cc", - "rtp_rtcp_demuxer_helper_unittest.cc", "rtp_video_sender_unittest.cc", "rtx_receive_stream_unittest.cc", ] @@ -395,7 +415,9 @@ if (rtc_include_tests) { "../api/audio_codecs:builtin_audio_decoder_factory", "../api/rtc_event_log", "../api/task_queue:default_task_queue_factory", + "../api/test/video:function_video_factory", "../api/transport:field_trial_based_config", + "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_frame", "../api/video:video_rtp_headers", "../audio", @@ -416,23 +438,30 @@ if (rtc_include_tests) { "../rtc_base:rate_limiter", "../rtc_base:rtc_base_approved", "../rtc_base:task_queue_for_test", + "../rtc_base/synchronization:mutex", "../system_wrappers", "../test:audio_codec_mocks", "../test:direct_transport", "../test:encoder_settings", "../test:fake_video_codecs", "../test:field_trial", + "../test:mock_frame_transformer", + "../test:mock_transport", "../test:test_common", "../test:test_support", "../test:video_test_common", "../test/time_controller:time_controller", "../video", + "adaptation:resource_adaptation_test_utilities", "//test/scenario:scenario", "//testing/gmock", "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/types:variant", ] } @@ -469,6 +498,7 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_approved", "../rtc_base:task_queue_for_test", "../rtc_base:task_queue_for_test", + "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../system_wrappers:metrics", @@ -485,8 +515,8 @@ if (rtc_include_tests) { "../test:video_test_common", "../video", "//testing/gtest", - "//third_party/abseil-cpp/absl/flags:flag", ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ] } # TODO(eladalon): This should be moved, as with the TODO for |rtp_interfaces|. @@ -499,6 +529,7 @@ if (rtc_include_tests) { ] deps = [ ":rtp_interfaces", + "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", @@ -543,7 +574,7 @@ if (rtc_include_tests) { "../system_wrappers", "../test:test_support", "//testing/gtest", - "//third_party/abseil-cpp/absl/algorithm:container", ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } } diff --git a/call/DEPS b/call/DEPS index f823a7b9c3..2260ceaf53 100644 --- a/call/DEPS +++ b/call/DEPS @@ -1,6 +1,7 @@ include_rules = [ "+audio", "+logging/rtc_event_log", + "+modules/async_audio_processing", "+modules/audio_coding", "+modules/audio_device", "+modules/audio_mixer", @@ -17,11 +18,12 @@ include_rules = [ specific_include_rules = { "video_receive_stream\.h": [ - "+common_video/include", - "+media/base", + "+common_video/frame_counts.h", ], "video_send_stream\.h": [ - "+common_video/include", - "+media/base", + "+common_video", ], + "rtp_transport_controller_send_interface\.h": [ + "+common_video/frame_counts.h", + ] } diff --git a/call/OWNERS b/call/OWNERS index c4f0b5648b..f863b939bc 100644 --- a/call/OWNERS +++ b/call/OWNERS @@ -1,9 +1,5 @@ mflodman@webrtc.org -nisse@webrtc.org stefan@webrtc.org srte@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* +terelius@webrtc.org +sprang@webrtc.org diff --git a/call/adaptation/BUILD.gn b/call/adaptation/BUILD.gn index 10e8cc607c..d88e19acc2 100644 --- a/call/adaptation/BUILD.gn +++ b/call/adaptation/BUILD.gn @@ -10,26 +10,49 @@ import("../../webrtc.gni") rtc_library("resource_adaptation") { sources = [ - "resource.cc", - "resource.h", - "resource_adaptation_module_interface.cc", - "resource_adaptation_module_interface.h", + "adaptation_constraint.cc", + "adaptation_constraint.h", + "broadcast_resource_listener.cc", + "broadcast_resource_listener.h", + "degradation_preference_provider.cc", + "degradation_preference_provider.h", + "encoder_settings.cc", + "encoder_settings.h", "resource_adaptation_processor.cc", "resource_adaptation_processor.h", - "resource_consumer.cc", - "resource_consumer.h", - "resource_consumer_configuration.cc", - "resource_consumer_configuration.h", + "resource_adaptation_processor_interface.cc", + "resource_adaptation_processor_interface.h", "video_source_restrictions.cc", "video_source_restrictions.h", + "video_stream_adapter.cc", + "video_stream_adapter.h", + "video_stream_input_state.cc", + "video_stream_input_state.h", + "video_stream_input_state_provider.cc", + "video_stream_input_state_provider.h", ] deps = [ "../../api:rtp_parameters", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:task_queue", + "../../api/video:video_adaptation", "../../api/video:video_frame", + "../../api/video:video_stream_encoder", "../../api/video_codecs:video_codecs_api", + "../../modules/video_coding:video_coding_utility", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_task_queue", + "../../rtc_base/experiments:balanced_degradation_settings", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:to_queued_task", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/types:variant", ] } @@ -37,29 +60,59 @@ if (rtc_include_tests) { rtc_library("resource_adaptation_tests") { testonly = true - sources = [ "resource_adaptation_processor_unittest.cc" ] + sources = [ + "broadcast_resource_listener_unittest.cc", + "resource_adaptation_processor_unittest.cc", + "resource_unittest.cc", + "video_source_restrictions_unittest.cc", + "video_stream_adapter_unittest.cc", + "video_stream_input_state_provider_unittest.cc", + ] deps = [ ":resource_adaptation", ":resource_adaptation_test_utilities", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:default_task_queue_factory", + "../../api/task_queue:task_queue", + "../../api/video:video_adaptation", + "../../api/video_codecs:video_codecs_api", "../../rtc_base:checks", + "../../rtc_base:gunit_helpers", "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_task_queue", + "../../rtc_base:task_queue_for_test", + "../../rtc_base/synchronization:mutex", + "../../test:field_trial", + "../../test:rtc_expect_death", "../../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("resource_adaptation_test_utilities") { testonly = true sources = [ + "test/fake_adaptation_constraint.cc", + "test/fake_adaptation_constraint.h", + "test/fake_frame_rate_provider.cc", + "test/fake_frame_rate_provider.h", "test/fake_resource.cc", "test/fake_resource.h", - "test/fake_resource_consumer_configuration.cc", - "test/fake_resource_consumer_configuration.h", + "test/mock_resource_listener.h", ] deps = [ ":resource_adaptation", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:task_queue", + "../../api/video:video_stream_encoder", "../../rtc_base:rtc_base_approved", + "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:to_queued_task", + "../../test:test_support", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/call/adaptation/OWNERS b/call/adaptation/OWNERS index 8a355d83f4..b65c763efc 100644 --- a/call/adaptation/OWNERS +++ b/call/adaptation/OWNERS @@ -1,7 +1,3 @@ +eshr@google.com hbos@webrtc.org -sprang@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* +ilnik@webrtc.org diff --git a/api/video/video_stream_encoder_observer.cc b/call/adaptation/adaptation_constraint.cc similarity index 66% rename from api/video/video_stream_encoder_observer.cc rename to call/adaptation/adaptation_constraint.cc index 3b9bd522c3..d62bb74f87 100644 --- a/api/video/video_stream_encoder_observer.cc +++ b/call/adaptation/adaptation_constraint.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/video/video_stream_encoder_observer.h" +#include "call/adaptation/adaptation_constraint.h" namespace webrtc { -VideoStreamEncoderObserver::AdaptationSteps::AdaptationSteps() = default; +AdaptationConstraint::~AdaptationConstraint() {} } // namespace webrtc diff --git a/call/adaptation/adaptation_constraint.h b/call/adaptation/adaptation_constraint.h new file mode 100644 index 0000000000..9ad6414cd1 --- /dev/null +++ b/call/adaptation/adaptation_constraint.h @@ -0,0 +1,41 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_ +#define CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_ + +#include + +#include "api/adaptation/resource.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" + +namespace webrtc { + +// Adaptation constraints have the ability to prevent applying a proposed +// adaptation (expressed as restrictions before/after adaptation). +class AdaptationConstraint { + public: + virtual ~AdaptationConstraint(); + + virtual std::string Name() const = 0; + + // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation + // support, this interface needs to indicate which stream the adaptation + // applies to. + virtual bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const = 0; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_ diff --git a/call/adaptation/broadcast_resource_listener.cc b/call/adaptation/broadcast_resource_listener.cc new file mode 100644 index 0000000000..59bd1e0c7f --- /dev/null +++ b/call/adaptation/broadcast_resource_listener.cc @@ -0,0 +1,120 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/broadcast_resource_listener.h" + +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +// The AdapterResource redirects resource usage measurements from its parent to +// a single ResourceListener. +class BroadcastResourceListener::AdapterResource : public Resource { + public: + explicit AdapterResource(std::string name) : name_(std::move(name)) {} + ~AdapterResource() override { RTC_DCHECK(!listener_); } + + // The parent is letting us know we have a usage neasurement. + void OnResourceUsageStateMeasured(ResourceUsageState usage_state) { + MutexLock lock(&lock_); + if (!listener_) + return; + listener_->OnResourceUsageStateMeasured(this, usage_state); + } + + // Resource implementation. + std::string Name() const override { return name_; } + void SetResourceListener(ResourceListener* listener) override { + MutexLock lock(&lock_); + RTC_DCHECK(!listener_ || !listener); + listener_ = listener; + } + + private: + const std::string name_; + Mutex lock_; + ResourceListener* listener_ RTC_GUARDED_BY(lock_) = nullptr; +}; + +BroadcastResourceListener::BroadcastResourceListener( + rtc::scoped_refptr source_resource) + : source_resource_(source_resource), is_listening_(false) { + RTC_DCHECK(source_resource_); +} + +BroadcastResourceListener::~BroadcastResourceListener() { + RTC_DCHECK(!is_listening_); +} + +rtc::scoped_refptr BroadcastResourceListener::SourceResource() const { + return source_resource_; +} + +void BroadcastResourceListener::StartListening() { + MutexLock lock(&lock_); + RTC_DCHECK(!is_listening_); + source_resource_->SetResourceListener(this); + is_listening_ = true; +} + +void BroadcastResourceListener::StopListening() { + MutexLock lock(&lock_); + RTC_DCHECK(is_listening_); + RTC_DCHECK(adapters_.empty()); + source_resource_->SetResourceListener(nullptr); + is_listening_ = false; +} + +rtc::scoped_refptr +BroadcastResourceListener::CreateAdapterResource() { + MutexLock lock(&lock_); + RTC_DCHECK(is_listening_); + rtc::scoped_refptr adapter = + new rtc::RefCountedObject(source_resource_->Name() + + "Adapter"); + adapters_.push_back(adapter); + return adapter; +} + +void BroadcastResourceListener::RemoveAdapterResource( + rtc::scoped_refptr resource) { + MutexLock lock(&lock_); + auto it = std::find(adapters_.begin(), adapters_.end(), resource); + RTC_DCHECK(it != adapters_.end()); + adapters_.erase(it); +} + +std::vector> +BroadcastResourceListener::GetAdapterResources() { + std::vector> resources; + MutexLock lock(&lock_); + for (const auto& adapter : adapters_) { + resources.push_back(adapter); + } + return resources; +} + +void BroadcastResourceListener::OnResourceUsageStateMeasured( + rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + RTC_DCHECK_EQ(resource, source_resource_); + MutexLock lock(&lock_); + for (const auto& adapter : adapters_) { + adapter->OnResourceUsageStateMeasured(usage_state); + } +} + +} // namespace webrtc diff --git a/call/adaptation/broadcast_resource_listener.h b/call/adaptation/broadcast_resource_listener.h new file mode 100644 index 0000000000..2c5a5c703b --- /dev/null +++ b/call/adaptation/broadcast_resource_listener.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_ +#define CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_ + +#include + +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +// Responsible for forwarding 1 resource usage measurement to N listeners by +// creating N "adapter" resources. +// +// Example: +// If we have ResourceA, ResourceListenerX and ResourceListenerY we can create a +// BroadcastResourceListener that listens to ResourceA, use CreateAdapter() to +// spawn adapter resources ResourceX and ResourceY and let ResourceListenerX +// listen to ResourceX and ResourceListenerY listen to ResourceY. When ResourceA +// makes a measurement it will be echoed by both ResourceX and ResourceY. +// +// TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor is +// moved to call there will only be one ResourceAdaptationProcessor that needs +// to listen to the injected resources. When this is the case, delete this class +// and DCHECK that a Resource's listener is never overwritten. +class BroadcastResourceListener : public ResourceListener { + public: + explicit BroadcastResourceListener( + rtc::scoped_refptr source_resource); + ~BroadcastResourceListener() override; + + rtc::scoped_refptr SourceResource() const; + void StartListening(); + void StopListening(); + + // Creates a Resource that redirects any resource usage measurements that + // BroadcastResourceListener receives to its listener. + rtc::scoped_refptr CreateAdapterResource(); + + // Unregister the adapter from the BroadcastResourceListener; it will no + // longer receive resource usage measurement and will no longer be referenced. + // Use this to prevent memory leaks of old adapters. + void RemoveAdapterResource(rtc::scoped_refptr resource); + std::vector> GetAdapterResources(); + + // ResourceListener implementation. + void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) override; + + private: + class AdapterResource; + friend class AdapterResource; + + const rtc::scoped_refptr source_resource_; + Mutex lock_; + bool is_listening_ RTC_GUARDED_BY(lock_); + // The AdapterResource unregisters itself prior to destruction, guaranteeing + // that these pointers are safe to use. + std::vector> adapters_ + RTC_GUARDED_BY(lock_); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_ diff --git a/call/adaptation/broadcast_resource_listener_unittest.cc b/call/adaptation/broadcast_resource_listener_unittest.cc new file mode 100644 index 0000000000..9cd80500c2 --- /dev/null +++ b/call/adaptation/broadcast_resource_listener_unittest.cc @@ -0,0 +1,121 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/broadcast_resource_listener.h" + +#include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/test/mock_resource_listener.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::_; +using ::testing::StrictMock; + +TEST(BroadcastResourceListenerTest, CreateAndRemoveAdapterResource) { + rtc::scoped_refptr source_resource = + FakeResource::Create("SourceResource"); + BroadcastResourceListener broadcast_resource_listener(source_resource); + broadcast_resource_listener.StartListening(); + + EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty()); + rtc::scoped_refptr adapter = + broadcast_resource_listener.CreateAdapterResource(); + StrictMock listener; + adapter->SetResourceListener(&listener); + EXPECT_EQ(std::vector>{adapter}, + broadcast_resource_listener.GetAdapterResources()); + + // The removed adapter is not referenced by the broadcaster. + broadcast_resource_listener.RemoveAdapterResource(adapter); + EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty()); + // The removed adapter is not forwarding measurements. + EXPECT_CALL(listener, OnResourceUsageStateMeasured(_, _)).Times(0); + source_resource->SetUsageState(ResourceUsageState::kOveruse); + // Cleanup. + adapter->SetResourceListener(nullptr); + broadcast_resource_listener.StopListening(); +} + +TEST(BroadcastResourceListenerTest, AdapterNameIsBasedOnSourceResourceName) { + rtc::scoped_refptr source_resource = + FakeResource::Create("FooBarResource"); + BroadcastResourceListener broadcast_resource_listener(source_resource); + broadcast_resource_listener.StartListening(); + + rtc::scoped_refptr adapter = + broadcast_resource_listener.CreateAdapterResource(); + EXPECT_EQ("FooBarResourceAdapter", adapter->Name()); + + broadcast_resource_listener.RemoveAdapterResource(adapter); + broadcast_resource_listener.StopListening(); +} + +TEST(BroadcastResourceListenerTest, AdaptersForwardsUsageMeasurements) { + rtc::scoped_refptr source_resource = + FakeResource::Create("SourceResource"); + BroadcastResourceListener broadcast_resource_listener(source_resource); + broadcast_resource_listener.StartListening(); + + StrictMock destination_listener1; + StrictMock destination_listener2; + rtc::scoped_refptr adapter1 = + broadcast_resource_listener.CreateAdapterResource(); + adapter1->SetResourceListener(&destination_listener1); + rtc::scoped_refptr adapter2 = + broadcast_resource_listener.CreateAdapterResource(); + adapter2->SetResourceListener(&destination_listener2); + + // Expect kOveruse to be echoed. + EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter1, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter2, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + source_resource->SetUsageState(ResourceUsageState::kOveruse); + + // Expect kUnderuse to be echoed. + EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter1, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter2, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + source_resource->SetUsageState(ResourceUsageState::kUnderuse); + + // Adapters have to be unregistered before they or the broadcaster is + // destroyed, ensuring safe use of raw pointers. + adapter1->SetResourceListener(nullptr); + adapter2->SetResourceListener(nullptr); + + broadcast_resource_listener.RemoveAdapterResource(adapter1); + broadcast_resource_listener.RemoveAdapterResource(adapter2); + broadcast_resource_listener.StopListening(); +} + +} // namespace webrtc diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h b/call/adaptation/degradation_preference_provider.cc similarity index 61% rename from sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h rename to call/adaptation/degradation_preference_provider.cc index 88515bb3fe..c87e49f366 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h +++ b/call/adaptation/degradation_preference_provider.cc @@ -1,5 +1,5 @@ /* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,4 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "api/peerconnection/RTCAudioTrack.h" +#include "call/adaptation/degradation_preference_provider.h" + +webrtc::DegradationPreferenceProvider::~DegradationPreferenceProvider() = + default; diff --git a/call/adaptation/degradation_preference_provider.h b/call/adaptation/degradation_preference_provider.h new file mode 100644 index 0000000000..1f75901cc5 --- /dev/null +++ b/call/adaptation/degradation_preference_provider.h @@ -0,0 +1,27 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_ +#define CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_ + +#include "api/rtp_parameters.h" + +namespace webrtc { + +class DegradationPreferenceProvider { + public: + virtual ~DegradationPreferenceProvider(); + + virtual DegradationPreference degradation_preference() const = 0; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_ diff --git a/call/adaptation/resource_adaptation_module_interface.cc b/call/adaptation/encoder_settings.cc similarity index 57% rename from call/adaptation/resource_adaptation_module_interface.cc rename to call/adaptation/encoder_settings.cc index 63cfb7279f..c894e833ed 100644 --- a/call/adaptation/resource_adaptation_module_interface.cc +++ b/call/adaptation/encoder_settings.cc @@ -1,5 +1,5 @@ /* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "call/adaptation/resource_adaptation_module_interface.h" +#include "call/adaptation/encoder_settings.h" #include @@ -21,6 +21,18 @@ EncoderSettings::EncoderSettings(VideoEncoder::EncoderInfo encoder_info, encoder_config_(std::move(encoder_config)), video_codec_(std::move(video_codec)) {} +EncoderSettings::EncoderSettings(const EncoderSettings& other) + : encoder_info_(other.encoder_info_), + encoder_config_(other.encoder_config_.Copy()), + video_codec_(other.video_codec_) {} + +EncoderSettings& EncoderSettings::operator=(const EncoderSettings& other) { + encoder_info_ = other.encoder_info_; + encoder_config_ = other.encoder_config_.Copy(); + video_codec_ = other.video_codec_; + return *this; +} + const VideoEncoder::EncoderInfo& EncoderSettings::encoder_info() const { return encoder_info_; } @@ -33,8 +45,10 @@ const VideoCodec& EncoderSettings::video_codec() const { return video_codec_; } -ResourceAdaptationModuleListener::~ResourceAdaptationModuleListener() {} - -ResourceAdaptationModuleInterface::~ResourceAdaptationModuleInterface() {} +VideoCodecType GetVideoCodecTypeOrGeneric( + const absl::optional& settings) { + return settings.has_value() ? settings->encoder_config().codec_type + : kVideoCodecGeneric; +} } // namespace webrtc diff --git a/call/adaptation/encoder_settings.h b/call/adaptation/encoder_settings.h new file mode 100644 index 0000000000..ddb198a96e --- /dev/null +++ b/call/adaptation/encoder_settings.h @@ -0,0 +1,48 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_ENCODER_SETTINGS_H_ +#define CALL_ADAPTATION_ENCODER_SETTINGS_H_ + +#include "absl/types/optional.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_config.h" + +namespace webrtc { + +// Information about an encoder available when reconfiguring the encoder. +class EncoderSettings { + public: + EncoderSettings(VideoEncoder::EncoderInfo encoder_info, + VideoEncoderConfig encoder_config, + VideoCodec video_codec); + EncoderSettings(const EncoderSettings& other); + EncoderSettings& operator=(const EncoderSettings& other); + + // Encoder capabilities, implementation info, etc. + const VideoEncoder::EncoderInfo& encoder_info() const; + // Configuration parameters, ultimately coming from the API and negotiation. + const VideoEncoderConfig& encoder_config() const; + // Lower level config, heavily based on the VideoEncoderConfig. + const VideoCodec& video_codec() const; + + private: + VideoEncoder::EncoderInfo encoder_info_; + VideoEncoderConfig encoder_config_; + VideoCodec video_codec_; +}; + +VideoCodecType GetVideoCodecTypeOrGeneric( + const absl::optional& settings); + +} // namespace webrtc + +#endif // CALL_ADAPTATION_ENCODER_SETTINGS_H_ diff --git a/call/adaptation/resource.h b/call/adaptation/resource.h deleted file mode 100644 index 0bd142168f..0000000000 --- a/call/adaptation/resource.h +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_ADAPTATION_RESOURCE_H_ -#define CALL_ADAPTATION_RESOURCE_H_ - -#include - -namespace webrtc { - -enum class ResourceUsageState { - // Action is needed to minimze the load on this resource. - kOveruse, - // No action needed for this resource, increasing the load on this resource - // is not allowed. - kStable, - // Increasing the load on this resource is allowed. - kUnderuse, -}; - -// A Resource is something which can be measured as "overused", "stable" or -// "underused". For example, if we are overusing CPU we may need to lower the -// resolution of one of the streams. In other words, one of the ResourceConumers -// - representing an encoder - needs to be reconfigured with a different -// ResourceConsumerConfiguration - representing a different encoder setting. -// -// This is an abstract class used by the ResourceAdaptationProcessor to make -// decisions about which configurations to use. How a resource is measured or -// what measurements map to different ResourceUsageState values is -// implementation-specific. -class Resource { - public: - virtual ~Resource(); - - // Informational, not formally part of the decision-making process. - virtual std::string Name() const = 0; - virtual std::string UsageUnitsOfMeasurement() const = 0; - // Valid ranges are implementation-specific. - virtual double CurrentUsage() const = 0; - - // The current usage state of this resource. Used by the - // ResourceAdaptationProcessor to calculate the desired consumer - // configurations. - virtual ResourceUsageState CurrentUsageState() const = 0; - - std::string ToString() const; -}; - -} // namespace webrtc - -#endif // CALL_ADAPTATION_RESOURCE_H_ diff --git a/call/adaptation/resource_adaptation_module_interface.h b/call/adaptation/resource_adaptation_module_interface.h deleted file mode 100644 index 3a3deb2499..0000000000 --- a/call/adaptation/resource_adaptation_module_interface.h +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_ADAPTATION_RESOURCE_ADAPTATION_MODULE_INTERFACE_H_ -#define CALL_ADAPTATION_RESOURCE_ADAPTATION_MODULE_INTERFACE_H_ - -#include "absl/types/optional.h" -#include "api/rtp_parameters.h" -#include "api/video/video_frame.h" -#include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" -#include "call/adaptation/video_source_restrictions.h" - -namespace webrtc { - -// Information about an encoder available when reconfiguring the encoder. -class EncoderSettings { - public: - EncoderSettings(VideoEncoder::EncoderInfo encoder_info, - VideoEncoderConfig encoder_config, - VideoCodec video_codec); - - // Encoder capabilities, implementation info, etc. - const VideoEncoder::EncoderInfo& encoder_info() const; - // Configuration parameters, ultimately coming from the API and negotiation. - const VideoEncoderConfig& encoder_config() const; - // Lower level config, heavily based on the VideoEncoderConfig. - const VideoCodec& video_codec() const; - - private: - VideoEncoder::EncoderInfo encoder_info_; - VideoEncoderConfig encoder_config_; - VideoCodec video_codec_; -}; - -// The listener is responsible for carrying out the reconfiguration of the video -// source such that the VideoSourceRestrictions are fulfilled. -class ResourceAdaptationModuleListener { - public: - virtual ~ResourceAdaptationModuleListener(); - - // TODO(hbos): When we support the muli-stream use case, the arguments need to - // specify which video stream's source needs to be reconfigured. - virtual void OnVideoSourceRestrictionsUpdated( - VideoSourceRestrictions restrictions) = 0; -}; - -// Responsible for reconfiguring encoded streams based on resource consumption, -// such as scaling down resolution or frame rate when CPU is overused. This -// interface is meant to be injectable into VideoStreamEncoder. -// -// [UNDER CONSTRUCTION] This interface is work-in-progress. In the future it -// needs to be able to handle all the necessary input and output for resource -// adaptation decision making. -// -// TODO(https://crbug.com/webrtc/11222): Make this interface feature-complete so -// that a module (such as OveruseFrameDetectorResourceAdaptationModule) is fully -// operational through this abstract interface. -class ResourceAdaptationModuleInterface { - public: - virtual ~ResourceAdaptationModuleInterface(); - - // TODO(hbos): When input/output of the module is adequetly handled by this - // interface, these methods need to say which stream to start/stop, enabling - // multi-stream aware implementations of ResourceAdaptationModuleInterface. We - // don't want to do this before we have the right interfaces (e.g. if we pass - // in a VideoStreamEncoder here directly then have a dependency on a different - // build target). For the multi-stream use case we may consider making - // ResourceAdaptationModuleInterface reference counted. - virtual void StartResourceAdaptation( - ResourceAdaptationModuleListener* adaptation_listener) = 0; - virtual void StopResourceAdaptation() = 0; - - // The following methods are callable whether or not adaption is started. - - // Informs the module whether we have input video. By default, the module must - // assume the value is false. - virtual void SetHasInputVideo(bool has_input_video) = 0; - virtual void SetDegradationPreference( - DegradationPreference degradation_preference) = 0; - virtual void SetEncoderSettings(EncoderSettings encoder_settings) = 0; - virtual void SetEncoderTargetBitrate( - absl::optional target_bitrate_bps) = 0; - // Removes all restrictions; the module will need to adapt all over again. - // TODO(hbos): It's not clear why anybody should be able to tell the module to - // reset like this; can we get rid of this method? - virtual void ResetVideoSourceRestrictions() = 0; - - // The following methods correspond to the pipeline that a frame goes through. - // Note that if the encoder is parallelized, multiple frames may be processed - // in parallel and methods may be invoked in unexpected orders. - // - // The implementation must not retain VideoFrames. Doing so may keep video - // frame buffers alive - this may even stall encoding. - // TODO(hbos): Can we replace VideoFrame with a different struct, maybe width - // and height is enough, and some sort of way to identify it at each step? - - // 1. A frame is delivered to the encoder, e.g. from the camera. Next up: it - // may get dropped or it may get encoded, see OnFrameDroppedDueToSize() and - // OnEncodeStarted(). - virtual void OnFrame(const VideoFrame& frame) = 0; - // 2.i) An input frame was dropped because its resolution is too big (e.g. for - // the target bitrate). This frame will not continue through the rest of the - // pipeline. The module should adapt down in resolution to avoid subsequent - // frames getting dropped for the same reason. - // TODO(hbos): If we take frame rate into account perhaps it would be valid to - // adapt down in frame rate as well. - virtual void OnFrameDroppedDueToSize() = 0; - // 2.ii) An input frame is about to be encoded. It may have been cropped and - // have different dimensions than what was observed at OnFrame(). Next - // up: encoding completes or fails, see OnEncodeCompleted(). There is - // currently no signal for encode failure. - virtual void OnEncodeStarted(const VideoFrame& cropped_frame, - int64_t time_when_first_seen_us) = 0; - // 3. The frame has successfully completed encoding. Next up: The encoded - // frame is dropped or packetized and sent over the network. There is - // currently no signal what happens beyond this point. - virtual void OnEncodeCompleted(uint32_t timestamp, - int64_t time_sent_in_us, - int64_t capture_time_us, - absl::optional encode_duration_us) = 0; -}; - -} // namespace webrtc - -#endif // CALL_ADAPTATION_RESOURCE_ADAPTATION_MODULE_INTERFACE_H_ diff --git a/call/adaptation/resource_adaptation_processor.cc b/call/adaptation/resource_adaptation_processor.cc index e4f209fe9d..ac1b1db174 100644 --- a/call/adaptation/resource_adaptation_processor.cc +++ b/call/adaptation/resource_adaptation_processor.cc @@ -1,5 +1,5 @@ /* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -10,119 +10,383 @@ #include "call/adaptation/resource_adaptation_processor.h" -#include +#include +#include #include -#include "rtc_base/checks.h" +#include "absl/algorithm/container.h" +#include "api/video/video_adaptation_counters.h" +#include "call/adaptation/video_stream_adapter.h" +#include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { -namespace { - -ResourceConsumerConfiguration* FindMostPreferredConfiguration( - const std::vector& configurations) { - if (configurations.empty()) - return nullptr; - ResourceConsumerConfiguration* most_preferred_configuration = - configurations[0]; - double most_preferred_configuration_preference = - most_preferred_configuration->Preference(); - RTC_DCHECK_GE(most_preferred_configuration_preference, 0.0); - for (size_t i = 1; i < configurations.size(); ++i) { - auto* configuration = configurations[i]; - double preference = configuration->Preference(); - RTC_DCHECK_GE(preference, 0.0); - if (most_preferred_configuration_preference < preference) { - most_preferred_configuration = configuration; - most_preferred_configuration_preference = preference; - } +ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate( + ResourceAdaptationProcessor* processor) + : task_queue_(nullptr), processor_(processor) {} + +void ResourceAdaptationProcessor::ResourceListenerDelegate::SetTaskQueue( + TaskQueueBase* task_queue) { + RTC_DCHECK(!task_queue_); + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + RTC_DCHECK_RUN_ON(task_queue_); +} + +void ResourceAdaptationProcessor::ResourceListenerDelegate:: + OnProcessorDestroyed() { + RTC_DCHECK_RUN_ON(task_queue_); + processor_ = nullptr; +} + +void ResourceAdaptationProcessor::ResourceListenerDelegate:: + OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + if (!task_queue_->IsCurrent()) { + task_queue_->PostTask(ToQueuedTask( + [this_ref = rtc::scoped_refptr(this), + resource, usage_state] { + this_ref->OnResourceUsageStateMeasured(resource, usage_state); + })); + return; + } + RTC_DCHECK_RUN_ON(task_queue_); + if (processor_) { + processor_->OnResourceUsageStateMeasured(resource, usage_state); } - return most_preferred_configuration; } -} // namespace +ResourceAdaptationProcessor::MitigationResultAndLogMessage:: + MitigationResultAndLogMessage() + : result(MitigationResult::kAdaptationApplied), message() {} -ConsumerConfigurationPair::ConsumerConfigurationPair( - ResourceConsumer* consumer, - ResourceConsumerConfiguration* configuration) - : consumer(consumer), configuration(configuration) {} +ResourceAdaptationProcessor::MitigationResultAndLogMessage:: + MitigationResultAndLogMessage(MitigationResult result, std::string message) + : result(result), message(std::move(message)) {} -absl::optional -ResourceAdaptationProcessor::FindNextConfiguration() { - ResourceUsageState overall_usage = ResourceUsageState::kUnderuse; - for (auto& resource : resources_) { - ResourceUsageState resource_usage = resource->CurrentUsageState(); - if (resource_usage == ResourceUsageState::kStable) { - // If any resource is "stable", we are not underusing. - if (overall_usage == ResourceUsageState::kUnderuse) - overall_usage = ResourceUsageState::kStable; - } else if (resource_usage == ResourceUsageState::kOveruse) { - // If any resource is "overuse", we are overusing. - overall_usage = ResourceUsageState::kOveruse; - break; +ResourceAdaptationProcessor::ResourceAdaptationProcessor( + VideoStreamAdapter* stream_adapter) + : task_queue_(nullptr), + resource_listener_delegate_( + new rtc::RefCountedObject(this)), + resources_(), + stream_adapter_(stream_adapter), + last_reported_source_restrictions_(), + previous_mitigation_results_() { + RTC_DCHECK(stream_adapter_); +} + +ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { + RTC_DCHECK_RUN_ON(task_queue_); + RTC_DCHECK(resources_.empty()) + << "There are resource(s) attached to a ResourceAdaptationProcessor " + << "being destroyed."; + stream_adapter_->RemoveRestrictionsListener(this); + resource_listener_delegate_->OnProcessorDestroyed(); +} + +void ResourceAdaptationProcessor::SetTaskQueue(TaskQueueBase* task_queue) { + RTC_DCHECK(!task_queue_); + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + resource_listener_delegate_->SetTaskQueue(task_queue); + RTC_DCHECK_RUN_ON(task_queue_); + // Now that we have the queue we can attach as adaptation listener. + stream_adapter_->AddRestrictionsListener(this); +} + +void ResourceAdaptationProcessor::AddResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) { + RTC_DCHECK_RUN_ON(task_queue_); + RTC_DCHECK(std::find(resource_limitations_listeners_.begin(), + resource_limitations_listeners_.end(), + limitations_listener) == + resource_limitations_listeners_.end()); + resource_limitations_listeners_.push_back(limitations_listener); +} + +void ResourceAdaptationProcessor::RemoveResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) { + RTC_DCHECK_RUN_ON(task_queue_); + auto it = + std::find(resource_limitations_listeners_.begin(), + resource_limitations_listeners_.end(), limitations_listener); + RTC_DCHECK(it != resource_limitations_listeners_.end()); + resource_limitations_listeners_.erase(it); +} + +void ResourceAdaptationProcessor::AddResource( + rtc::scoped_refptr resource) { + RTC_DCHECK(resource); + { + MutexLock crit(&resources_lock_); + RTC_DCHECK(absl::c_find(resources_, resource) == resources_.end()) + << "Resource \"" << resource->Name() << "\" was already registered."; + resources_.push_back(resource); + } + resource->SetResourceListener(resource_listener_delegate_); + RTC_LOG(INFO) << "Registered resource \"" << resource->Name() << "\"."; +} + +std::vector> +ResourceAdaptationProcessor::GetResources() const { + MutexLock crit(&resources_lock_); + return resources_; +} + +void ResourceAdaptationProcessor::RemoveResource( + rtc::scoped_refptr resource) { + RTC_DCHECK(resource); + RTC_LOG(INFO) << "Removing resource \"" << resource->Name() << "\"."; + resource->SetResourceListener(nullptr); + { + MutexLock crit(&resources_lock_); + auto it = absl::c_find(resources_, resource); + RTC_DCHECK(it != resources_.end()) << "Resource \"" << resource->Name() + << "\" was not a registered resource."; + resources_.erase(it); + } + RemoveLimitationsImposedByResource(std::move(resource)); +} + +void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( + rtc::scoped_refptr resource) { + if (!task_queue_->IsCurrent()) { + task_queue_->PostTask(ToQueuedTask( + [this, resource]() { RemoveLimitationsImposedByResource(resource); })); + return; + } + RTC_DCHECK_RUN_ON(task_queue_); + auto resource_adaptation_limits = + adaptation_limits_by_resources_.find(resource); + if (resource_adaptation_limits != adaptation_limits_by_resources_.end()) { + VideoStreamAdapter::RestrictionsWithCounters adaptation_limits = + resource_adaptation_limits->second; + adaptation_limits_by_resources_.erase(resource_adaptation_limits); + if (adaptation_limits_by_resources_.empty()) { + // Only the resource being removed was adapted so clear restrictions. + stream_adapter_->ClearRestrictions(); + return; + } + + VideoStreamAdapter::RestrictionsWithCounters most_limited = + FindMostLimitedResources().second; + + if (adaptation_limits.counters.Total() <= most_limited.counters.Total()) { + // The removed limitations were less limited than the most limited + // resource. Don't change the current restrictions. + return; + } + + // Apply the new most limited resource as the next restrictions. + Adaptation adapt_to = stream_adapter_->GetAdaptationTo( + most_limited.counters, most_limited.restrictions); + RTC_DCHECK_EQ(adapt_to.status(), Adaptation::Status::kValid); + stream_adapter_->ApplyAdaptation(adapt_to, nullptr); + + RTC_LOG(INFO) << "Most limited resource removed. Restoring restrictions to " + "next most limited restrictions: " + << most_limited.restrictions.ToString() << " with counters " + << most_limited.counters.ToString(); + } +} + +void ResourceAdaptationProcessor::OnResourceUsageStateMeasured( + rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + RTC_DCHECK_RUN_ON(task_queue_); + RTC_DCHECK(resource); + // |resource| could have been removed after signalling. + { + MutexLock crit(&resources_lock_); + if (absl::c_find(resources_, resource) == resources_.end()) { + RTC_LOG(INFO) << "Ignoring signal from removed resource \"" + << resource->Name() << "\"."; + return; } } - // If we are stable we should neither adapt up or down: stay where we are. - if (overall_usage == ResourceUsageState::kStable) - return absl::nullopt; - if (overall_usage == ResourceUsageState::kOveruse) { - // If we are overusing, we adapt down the most expensive consumer to its - // most preferred lower neighbor. - ResourceConsumer* max_cost_consumer = - FindMostExpensiveConsumerThatCanBeAdaptedDown(); - if (!max_cost_consumer) - return absl::nullopt; - ResourceConsumerConfiguration* next_configuration = - FindMostPreferredConfiguration( - max_cost_consumer->configuration()->lower_neighbors()); - RTC_DCHECK(next_configuration); - return ConsumerConfigurationPair(max_cost_consumer, next_configuration); + MitigationResultAndLogMessage result_and_message; + switch (usage_state) { + case ResourceUsageState::kOveruse: + result_and_message = OnResourceOveruse(resource); + break; + case ResourceUsageState::kUnderuse: + result_and_message = OnResourceUnderuse(resource); + break; + } + // Maybe log the result of the operation. + auto it = previous_mitigation_results_.find(resource.get()); + if (it != previous_mitigation_results_.end() && + it->second == result_and_message.result) { + // This resource has previously reported the same result and we haven't + // successfully adapted since - don't log to avoid spam. + return; + } + RTC_LOG(INFO) << "Resource \"" << resource->Name() << "\" signalled " + << ResourceUsageStateToString(usage_state) << ". " + << result_and_message.message; + if (result_and_message.result == MitigationResult::kAdaptationApplied) { + previous_mitigation_results_.clear(); } else { - RTC_DCHECK_EQ(overall_usage, ResourceUsageState::kUnderuse); - // If we are underusing, we adapt up the least expensive consumer to its - // most preferred upper neighbor. - ResourceConsumer* min_cost_consumer = - FindLeastExpensiveConsumerThatCanBeAdaptedUp(); - if (!min_cost_consumer) - return absl::nullopt; - ResourceConsumerConfiguration* next_configuration = - FindMostPreferredConfiguration( - min_cost_consumer->configuration()->upper_neighbors()); - RTC_DCHECK(next_configuration); - return ConsumerConfigurationPair(min_cost_consumer, next_configuration); - } -} - -ResourceConsumer* -ResourceAdaptationProcessor::FindMostExpensiveConsumerThatCanBeAdaptedDown() { - ResourceConsumer* max_cost_consumer = nullptr; - double max_cost = -1.0; - for (auto& consumer : consumers_) { - if (consumer->configuration()->lower_neighbors().empty()) - continue; - double cost = consumer->configuration()->Cost(); - if (max_cost < cost) { - max_cost_consumer = consumer.get(); - max_cost = cost; + previous_mitigation_results_.insert( + std::make_pair(resource.get(), result_and_message.result)); + } +} + +ResourceAdaptationProcessor::MitigationResultAndLogMessage +ResourceAdaptationProcessor::OnResourceUnderuse( + rtc::scoped_refptr reason_resource) { + RTC_DCHECK_RUN_ON(task_queue_); + // How can this stream be adapted up? + Adaptation adaptation = stream_adapter_->GetAdaptationUp(); + if (adaptation.status() != Adaptation::Status::kValid) { + rtc::StringBuilder message; + message << "Not adapting up because VideoStreamAdapter returned " + << Adaptation::StatusToString(adaptation.status()); + return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter, + message.Release()); + } + // Check that resource is most limited. + std::vector> most_limited_resources; + VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions; + std::tie(most_limited_resources, most_limited_restrictions) = + FindMostLimitedResources(); + + // If the most restricted resource is less limited than current restrictions + // then proceed with adapting up. + if (!most_limited_resources.empty() && + most_limited_restrictions.counters.Total() >= + stream_adapter_->adaptation_counters().Total()) { + // If |reason_resource| is not one of the most limiting resources then abort + // adaptation. + if (absl::c_find(most_limited_resources, reason_resource) == + most_limited_resources.end()) { + rtc::StringBuilder message; + message << "Resource \"" << reason_resource->Name() + << "\" was not the most limited resource."; + return MitigationResultAndLogMessage( + MitigationResult::kNotMostLimitedResource, message.Release()); + } + + if (most_limited_resources.size() > 1) { + // If there are multiple most limited resources, all must signal underuse + // before the adaptation is applied. + UpdateResourceLimitations(reason_resource, adaptation.restrictions(), + adaptation.counters()); + rtc::StringBuilder message; + message << "Resource \"" << reason_resource->Name() + << "\" was not the only most limited resource."; + return MitigationResultAndLogMessage( + MitigationResult::kSharedMostLimitedResource, message.Release()); } } - return max_cost_consumer; + // Apply adaptation. + stream_adapter_->ApplyAdaptation(adaptation, reason_resource); + rtc::StringBuilder message; + message << "Adapted up successfully. Unfiltered adaptations: " + << stream_adapter_->adaptation_counters().ToString(); + return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied, + message.Release()); +} + +ResourceAdaptationProcessor::MitigationResultAndLogMessage +ResourceAdaptationProcessor::OnResourceOveruse( + rtc::scoped_refptr reason_resource) { + RTC_DCHECK_RUN_ON(task_queue_); + // How can this stream be adapted up? + Adaptation adaptation = stream_adapter_->GetAdaptationDown(); + if (adaptation.status() == Adaptation::Status::kLimitReached) { + // Add resource as most limited. + VideoStreamAdapter::RestrictionsWithCounters restrictions; + std::tie(std::ignore, restrictions) = FindMostLimitedResources(); + UpdateResourceLimitations(reason_resource, restrictions.restrictions, + restrictions.counters); + } + if (adaptation.status() != Adaptation::Status::kValid) { + rtc::StringBuilder message; + message << "Not adapting down because VideoStreamAdapter returned " + << Adaptation::StatusToString(adaptation.status()); + return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter, + message.Release()); + } + // Apply adaptation. + UpdateResourceLimitations(reason_resource, adaptation.restrictions(), + adaptation.counters()); + stream_adapter_->ApplyAdaptation(adaptation, reason_resource); + rtc::StringBuilder message; + message << "Adapted down successfully. Unfiltered adaptations: " + << stream_adapter_->adaptation_counters().ToString(); + return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied, + message.Release()); +} + +std::pair>, + VideoStreamAdapter::RestrictionsWithCounters> +ResourceAdaptationProcessor::FindMostLimitedResources() const { + std::vector> most_limited_resources; + VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions{ + VideoSourceRestrictions(), VideoAdaptationCounters()}; + + for (const auto& resource_and_adaptation_limit_ : + adaptation_limits_by_resources_) { + const auto& restrictions_with_counters = + resource_and_adaptation_limit_.second; + if (restrictions_with_counters.counters.Total() > + most_limited_restrictions.counters.Total()) { + most_limited_restrictions = restrictions_with_counters; + most_limited_resources.clear(); + most_limited_resources.push_back(resource_and_adaptation_limit_.first); + } else if (most_limited_restrictions.counters == + restrictions_with_counters.counters) { + most_limited_resources.push_back(resource_and_adaptation_limit_.first); + } + } + return std::make_pair(std::move(most_limited_resources), + most_limited_restrictions); +} + +void ResourceAdaptationProcessor::UpdateResourceLimitations( + rtc::scoped_refptr reason_resource, + const VideoSourceRestrictions& restrictions, + const VideoAdaptationCounters& counters) { + auto& adaptation_limits = adaptation_limits_by_resources_[reason_resource]; + if (adaptation_limits.restrictions == restrictions && + adaptation_limits.counters == counters) { + return; + } + adaptation_limits = {restrictions, counters}; + + std::map, VideoAdaptationCounters> limitations; + for (const auto& p : adaptation_limits_by_resources_) { + limitations.insert(std::make_pair(p.first, p.second.counters)); + } + for (auto limitations_listener : resource_limitations_listeners_) { + limitations_listener->OnResourceLimitationChanged(reason_resource, + limitations); + } } -ResourceConsumer* -ResourceAdaptationProcessor::FindLeastExpensiveConsumerThatCanBeAdaptedUp() { - ResourceConsumer* min_cost_consumer = nullptr; - double min_cost = std::numeric_limits::infinity(); - for (auto& consumer : consumers_) { - if (consumer->configuration()->upper_neighbors().empty()) - continue; - double cost = consumer->configuration()->Cost(); - if (min_cost > cost) { - min_cost_consumer = consumer.get(); - min_cost = cost; +void ResourceAdaptationProcessor::OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) { + RTC_DCHECK_RUN_ON(task_queue_); + if (reason) { + UpdateResourceLimitations(reason, unfiltered_restrictions, + adaptation_counters); + } else if (adaptation_counters.Total() == 0) { + // Adaptations are cleared. + adaptation_limits_by_resources_.clear(); + previous_mitigation_results_.clear(); + for (auto limitations_listener : resource_limitations_listeners_) { + limitations_listener->OnResourceLimitationChanged(nullptr, {}); } } - return min_cost_consumer; } } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor.h b/call/adaptation/resource_adaptation_processor.h index 2855302beb..c84d359fec 100644 --- a/call/adaptation/resource_adaptation_processor.h +++ b/call/adaptation/resource_adaptation_processor.h @@ -1,5 +1,5 @@ /* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,106 +11,156 @@ #ifndef CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_H_ #define CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_H_ +#include #include +#include #include #include #include "absl/types/optional.h" -#include "call/adaptation/resource.h" -#include "call/adaptation/resource_consumer.h" -#include "call/adaptation/resource_consumer_configuration.h" +#include "api/adaptation/resource.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_frame.h" +#include "api/video/video_stream_encoder_observer.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_adapter.h" +#include "call/adaptation/video_stream_input_state.h" +#include "call/adaptation/video_stream_input_state_provider.h" namespace webrtc { -struct ConsumerConfigurationPair { - ConsumerConfigurationPair(ResourceConsumer* consumer, - ResourceConsumerConfiguration* configuration); - - ResourceConsumer* consumer; - ResourceConsumerConfiguration* configuration; -}; - -// Given a set of Resources, ResourceConsumers and -// ResourceConsumerConfigurations, the processor calculates which consumer, if -// any, should be reconfigured and how, in order to adapt to resource -// constraints. -// Example: "CPU" is a resource, a video stream being encoded is a consumer -// and the encoder setting (e.g. VP8/720p/30fps) is a configuration. -// -// A resource can be "overused", "stable" or "underused". The processor -// maximises quality without overusing any resource as follows: -// 1. If we are "overusing" on any resource, find the most expensive consumer -// and adapt it one step "down". -// 2. If we are "underusing" on all resources, find the least expensive consumer -// and adapt it one step "up". -// -// The expensiveness of a consumer is the expensiveness of its current -// configuration and the cost of a configuration is estimated based on pixels -// per second. How a consumer can be reconfigured in terms of one step "up" or -// "down" is expressed as a graph: each configuration has a set of "upper" -// neighbors and "lower" neighbors. When there are multiple options, neighbors -// are chosen based on configuration preferences. +// The Resource Adaptation Processor is responsible for reacting to resource +// usage measurements (e.g. overusing or underusing CPU). When a resource is +// overused the Processor is responsible for performing mitigations in order to +// consume less resources. // -// See FindNextConfiguration(). +// Today we have one Processor per VideoStreamEncoder and the Processor is only +// capable of restricting resolution or frame rate of the encoded stream. In the +// future we should have a single Processor responsible for all encoded streams, +// and it should be capable of reconfiguring other things than just +// VideoSourceRestrictions (e.g. reduce render frame rate). +// See Resource-Adaptation hotlist: +// https://bugs.chromium.org/u/590058293/hotlists/Resource-Adaptation // -// This class owns all resources, consumers and configurations. As long as it is -// alive, raw pointers to these are safe to use. -class ResourceAdaptationProcessor { +// The ResourceAdaptationProcessor is single-threaded. It may be constructed on +// any thread but MUST subsequently be used and destroyed on a single sequence, +// i.e. the "resource adaptation task queue". Resources can be added and removed +// from any thread. +class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, + public VideoSourceRestrictionsListener, + public ResourceListener { public: - const std::vector>& resources() const { - return resources_; - } - const std::vector>& - configurations() const { - return configurations_; - } - const std::vector>& consumers() const { - return consumers_; - } - - // Takes on ownership of the argument. A raw pointer is returned to the object - // for convenience; it is valid for the lifetime of the - // ResourceAdaptationProcessor. - // T = any subclass of Resource - template - T* AddResource(std::unique_ptr resource) { - T* resource_ptr = resource.get(); - resources_.push_back(std::move(resource)); - return resource_ptr; - } - // T = any subclass of ResourceConsumerConfiguration - template - T* AddConfiguration(std::unique_ptr configuration) { - T* configuration_ptr = configuration.get(); - configurations_.push_back(std::move(configuration)); - return configuration_ptr; - } - // T = any subclass of ResourceConsumer - template - T* AddConsumer(std::unique_ptr consumer) { - T* consumer_ptr = consumer.get(); - consumers_.push_back(std::move(consumer)); - return consumer_ptr; - } - - // Based on the current state of the resources and consumers, finds the - // consumer that should be reconfigured up or down in order to maximies - // quality without overusing any resources, as described in - // ResourceAdaptationProcessor's class description. - // - // When this is used in a real system, care needs to be taken for how often - // FindNextConfiguration() is called. There may be a delay between - // reconfiguring a consumer and the desired effects being observed on resource - // usage. - absl::optional FindNextConfiguration(); + explicit ResourceAdaptationProcessor( + VideoStreamAdapter* video_stream_adapter); + ~ResourceAdaptationProcessor() override; + + void SetTaskQueue(TaskQueueBase* task_queue) override; + + // ResourceAdaptationProcessorInterface implementation. + void AddResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) override; + void RemoveResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) override; + void AddResource(rtc::scoped_refptr resource) override; + std::vector> GetResources() const override; + void RemoveResource(rtc::scoped_refptr resource) override; + + // ResourceListener implementation. + // Triggers OnResourceUnderuse() or OnResourceOveruse(). + void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) override; + + // VideoSourceRestrictionsListener implementation. + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override; private: - ResourceConsumer* FindMostExpensiveConsumerThatCanBeAdaptedDown(); - ResourceConsumer* FindLeastExpensiveConsumerThatCanBeAdaptedUp(); + // If resource usage measurements happens off the adaptation task queue, this + // class takes care of posting the measurement for the processor to handle it + // on the adaptation task queue. + class ResourceListenerDelegate : public rtc::RefCountInterface, + public ResourceListener { + public: + explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor); + + void SetTaskQueue(TaskQueueBase* task_queue); + void OnProcessorDestroyed(); + + // ResourceListener implementation. + void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) override; + + private: + TaskQueueBase* task_queue_; + ResourceAdaptationProcessor* processor_ RTC_GUARDED_BY(task_queue_); + }; + + enum class MitigationResult { + kNotMostLimitedResource, + kSharedMostLimitedResource, + kRejectedByAdapter, + kAdaptationApplied, + }; + + struct MitigationResultAndLogMessage { + MitigationResultAndLogMessage(); + MitigationResultAndLogMessage(MitigationResult result, std::string message); + MitigationResult result; + std::string message; + }; + + // Performs the adaptation by getting the next target, applying it and + // informing listeners of the new VideoSourceRestriction and adaptation + // counters. + MitigationResultAndLogMessage OnResourceUnderuse( + rtc::scoped_refptr reason_resource); + MitigationResultAndLogMessage OnResourceOveruse( + rtc::scoped_refptr reason_resource); + + void UpdateResourceLimitations(rtc::scoped_refptr reason_resource, + const VideoSourceRestrictions& restrictions, + const VideoAdaptationCounters& counters) + RTC_RUN_ON(task_queue_); + + // Searches |adaptation_limits_by_resources_| for each resource with the + // highest total adaptation counts. Adaptation up may only occur if the + // resource performing the adaptation is the only most limited resource. This + // function returns the list of all most limited resources as well as the + // corresponding adaptation of that resource. + std::pair>, + VideoStreamAdapter::RestrictionsWithCounters> + FindMostLimitedResources() const RTC_RUN_ON(task_queue_); + + void RemoveLimitationsImposedByResource( + rtc::scoped_refptr resource); - std::vector> resources_; - std::vector> configurations_; - std::vector> consumers_; + TaskQueueBase* task_queue_; + rtc::scoped_refptr resource_listener_delegate_; + // Input and output. + mutable Mutex resources_lock_; + std::vector> resources_ + RTC_GUARDED_BY(resources_lock_); + std::vector resource_limitations_listeners_ + RTC_GUARDED_BY(task_queue_); + // Purely used for statistics, does not ensure mapped resources stay alive. + std::map, + VideoStreamAdapter::RestrictionsWithCounters> + adaptation_limits_by_resources_ RTC_GUARDED_BY(task_queue_); + // Responsible for generating and applying possible adaptations. + VideoStreamAdapter* const stream_adapter_ RTC_GUARDED_BY(task_queue_); + VideoSourceRestrictions last_reported_source_restrictions_ + RTC_GUARDED_BY(task_queue_); + // Keeps track of previous mitigation results per resource since the last + // successful adaptation. Used to avoid RTC_LOG spam. + std::map previous_mitigation_results_ + RTC_GUARDED_BY(task_queue_); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.cc b/call/adaptation/resource_adaptation_processor_interface.cc similarity index 57% rename from modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.cc rename to call/adaptation/resource_adaptation_processor_interface.cc index e0183f89c0..79f099b267 100644 --- a/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.cc +++ b/call/adaptation/resource_adaptation_processor_interface.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,11 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" namespace webrtc { -MockRecoveredPacketReceiver::MockRecoveredPacketReceiver() = default; -MockRecoveredPacketReceiver::~MockRecoveredPacketReceiver() = default; +ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() = + default; + +ResourceLimitationsListener::~ResourceLimitationsListener() = default; } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor_interface.h b/call/adaptation/resource_adaptation_processor_interface.h new file mode 100644 index 0000000000..8b1f94b73a --- /dev/null +++ b/call/adaptation/resource_adaptation_processor_interface.h @@ -0,0 +1,69 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_ +#define CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_frame.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/encoder_settings.h" +#include "call/adaptation/video_source_restrictions.h" + +namespace webrtc { + +class ResourceLimitationsListener { + public: + virtual ~ResourceLimitationsListener(); + + // The limitations on a resource were changed. This does not mean the current + // video restrictions have changed. + virtual void OnResourceLimitationChanged( + rtc::scoped_refptr resource, + const std::map, VideoAdaptationCounters>& + resource_limitations) = 0; +}; + +// The Resource Adaptation Processor is responsible for reacting to resource +// usage measurements (e.g. overusing or underusing CPU). When a resource is +// overused the Processor is responsible for performing mitigations in order to +// consume less resources. +class ResourceAdaptationProcessorInterface { + public: + virtual ~ResourceAdaptationProcessorInterface(); + + virtual void SetTaskQueue(TaskQueueBase* task_queue) = 0; + + virtual void AddResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) = 0; + virtual void RemoveResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) = 0; + // Starts or stops listening to resources, effectively enabling or disabling + // processing. May be called from anywhere. + // TODO(https://crbug.com/webrtc/11172): Automatically register and unregister + // with AddResource() and RemoveResource() instead. When the processor is + // multi-stream aware, stream-specific resouces will get added and removed + // over time. + virtual void AddResource(rtc::scoped_refptr resource) = 0; + virtual std::vector> GetResources() const = 0; + virtual void RemoveResource(rtc::scoped_refptr resource) = 0; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_ diff --git a/call/adaptation/resource_adaptation_processor_unittest.cc b/call/adaptation/resource_adaptation_processor_unittest.cc index 38f9fa1143..5e4f44b221 100644 --- a/call/adaptation/resource_adaptation_processor_unittest.cc +++ b/call/adaptation/resource_adaptation_processor_unittest.cc @@ -1,5 +1,5 @@ /* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -10,252 +10,728 @@ #include "call/adaptation/resource_adaptation_processor.h" -#include "absl/types/optional.h" -#include "call/adaptation/resource.h" +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" +#include "api/video/video_adaptation_counters.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" +#include "call/adaptation/test/fake_frame_rate_provider.h" #include "call/adaptation/test/fake_resource.h" -#include "call/adaptation/test/fake_resource_consumer_configuration.h" -#include "test/gmock.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state_provider.h" +#include "rtc_base/event.h" +#include "rtc_base/gunit.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue_for_test.h" #include "test/gtest.h" namespace webrtc { -// The indices of different resolutions returned by -// AddStandardResolutionConfigurations(). -static size_t k1080pIndex = 0; -static size_t k720pIndex = 1; -static size_t k360pIndex = 2; -static size_t k180pIndex = 3; - -void ConnectNeighbors(ResourceConsumerConfiguration* upper, - ResourceConsumerConfiguration* lower) { - upper->AddLowerNeighbor(lower); - lower->AddUpperNeighbor(upper); +namespace { + +const int kDefaultFrameRate = 30; +const int kDefaultFrameSize = 1280 * 720; +const int kDefaultTimeoutMs = 5000; + +class VideoSourceRestrictionsListenerForTesting + : public VideoSourceRestrictionsListener { + public: + VideoSourceRestrictionsListenerForTesting() + : restrictions_updated_count_(0), + restrictions_(), + adaptation_counters_(), + reason_(nullptr) {} + ~VideoSourceRestrictionsListenerForTesting() override {} + + size_t restrictions_updated_count() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return restrictions_updated_count_; + } + VideoSourceRestrictions restrictions() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return restrictions_; + } + VideoAdaptationCounters adaptation_counters() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return adaptation_counters_; + } + rtc::scoped_refptr reason() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return reason_; + } + + // VideoSourceRestrictionsListener implementation. + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ++restrictions_updated_count_; + restrictions_ = restrictions; + adaptation_counters_ = adaptation_counters; + reason_ = reason; + } + + private: + SequenceChecker sequence_checker_; + size_t restrictions_updated_count_ RTC_GUARDED_BY(&sequence_checker_); + VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); + VideoAdaptationCounters adaptation_counters_ + RTC_GUARDED_BY(&sequence_checker_); + rtc::scoped_refptr reason_ RTC_GUARDED_BY(&sequence_checker_); +}; + +class ResourceAdaptationProcessorTest : public ::testing::Test { + public: + ResourceAdaptationProcessorTest() + : frame_rate_provider_(), + input_state_provider_(&frame_rate_provider_), + resource_(FakeResource::Create("FakeResource")), + other_resource_(FakeResource::Create("OtherFakeResource")), + video_stream_adapter_( + std::make_unique(&input_state_provider_, + &frame_rate_provider_)), + processor_(std::make_unique( + video_stream_adapter_.get())) { + processor_->SetTaskQueue(TaskQueueBase::Current()); + video_stream_adapter_->AddRestrictionsListener(&restrictions_listener_); + processor_->AddResource(resource_); + processor_->AddResource(other_resource_); + } + ~ResourceAdaptationProcessorTest() override { + if (processor_) { + DestroyProcessor(); + } + } + + void SetInputStates(bool has_input, int fps, int frame_size) { + input_state_provider_.OnHasInputChanged(has_input); + frame_rate_provider_.set_fps(fps); + input_state_provider_.OnFrameSizeObserved(frame_size); + } + + void RestrictSource(VideoSourceRestrictions restrictions) { + SetInputStates( + true, restrictions.max_frame_rate().value_or(kDefaultFrameRate), + restrictions.target_pixels_per_frame().has_value() + ? restrictions.target_pixels_per_frame().value() + : restrictions.max_pixels_per_frame().value_or(kDefaultFrameSize)); + } + + void DestroyProcessor() { + if (resource_) { + processor_->RemoveResource(resource_); + } + if (other_resource_) { + processor_->RemoveResource(other_resource_); + } + video_stream_adapter_->RemoveRestrictionsListener(&restrictions_listener_); + processor_.reset(); + } + + static void WaitUntilTaskQueueIdle() { + ASSERT_TRUE(rtc::Thread::Current()->ProcessMessages(0)); + } + + protected: + FakeFrameRateProvider frame_rate_provider_; + VideoStreamInputStateProvider input_state_provider_; + rtc::scoped_refptr resource_; + rtc::scoped_refptr other_resource_; + std::unique_ptr video_stream_adapter_; + std::unique_ptr processor_; + VideoSourceRestrictionsListenerForTesting restrictions_listener_; +}; + +} // namespace + +TEST_F(ResourceAdaptationProcessorTest, DisabledByDefault) { + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + // Adaptation does not happen when disabled. + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); } -std::vector -AddStandardResolutionConfigurations(ResourceAdaptationProcessor* processor) { - std::vector configs; - configs.push_back(processor->AddConfiguration( - std::make_unique(1920, 1080, 30.0, - 1.0))); - configs.push_back(processor->AddConfiguration( - std::make_unique(1280, 720, 30.0, - 1.0))); - configs.push_back(processor->AddConfiguration( - std::make_unique(640, 360, 30.0, - 1.0))); - configs.push_back(processor->AddConfiguration( - std::make_unique(320, 180, 30.0, - 1.0))); - for (size_t i = 1; i < configs.size(); ++i) { - ConnectNeighbors(configs[i - 1], configs[i]); +TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + // Adaptation does not happen if input is insufficient. + // When frame size is missing (OnFrameSizeObserved not called yet). + input_state_provider_.OnHasInputChanged(true); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); + // When "has input" is missing. + SetInputStates(false, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); + // Note: frame rate cannot be missing, if unset it is 0. +} + +// These tests verify that restrictions are applied, but not exactly how much +// the source is restricted. This ensures that the VideoStreamAdapter is wired +// up correctly but not exactly how the VideoStreamAdapter generates +// restrictions. For that, see video_stream_adapter_unittest.cc. +TEST_F(ResourceAdaptationProcessorTest, + OveruseTriggersRestrictingResolutionInMaintainFrameRate) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + EXPECT_TRUE( + restrictions_listener_.restrictions().max_pixels_per_frame().has_value()); +} + +TEST_F(ResourceAdaptationProcessorTest, + OveruseTriggersRestrictingFrameRateInMaintainResolution) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_RESOLUTION); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + EXPECT_TRUE( + restrictions_listener_.restrictions().max_frame_rate().has_value()); +} + +TEST_F(ResourceAdaptationProcessorTest, + OveruseTriggersRestrictingFrameRateAndResolutionInBalanced) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::BALANCED); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + // Adapting multiple times eventually resticts both frame rate and + // resolution. Exactly many times we need to adapt depends on + // BalancedDegradationSettings, VideoStreamAdapter and default input + // states. This test requires it to be achieved within 4 adaptations. + for (size_t i = 0; i < 4; ++i) { + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(i + 1, restrictions_listener_.restrictions_updated_count()); + RestrictSource(restrictions_listener_.restrictions()); } - return configs; + EXPECT_TRUE( + restrictions_listener_.restrictions().max_pixels_per_frame().has_value()); + EXPECT_TRUE( + restrictions_listener_.restrictions().max_frame_rate().has_value()); } -TEST(ResourceAdaptationProcessorTest, - SingleStreamAndResourceDontAdaptDownWhenStable) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kStable)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs[k1080pIndex])); - EXPECT_EQ(absl::nullopt, processor.FindNextConfiguration()); +TEST_F(ResourceAdaptationProcessorTest, AwaitingPreviousAdaptation) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + // If we don't restrict the source then adaptation will not happen again + // due to "awaiting previous adaptation". This prevents "double-adapt". + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); } -TEST(ResourceAdaptationProcessorTest, - SingleStreamAndResourceAdaptDownOnOveruse) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kOveruse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - auto* consumer = processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs[k1080pIndex])); - auto next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(consumer, next_config->consumer); - EXPECT_EQ(resolution_configs[k720pIndex], next_config->configuration); +TEST_F(ResourceAdaptationProcessorTest, CannotAdaptUpWhenUnrestricted) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); } -TEST(ResourceAdaptationProcessorTest, - SingleStreamAndResourceDontAdaptOnOveruseIfMinResolution) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kOveruse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs.back())); - EXPECT_EQ(absl::nullopt, processor.FindNextConfiguration()); +TEST_F(ResourceAdaptationProcessorTest, UnderuseTakesUsBackToUnrestricted) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2u, restrictions_listener_.restrictions_updated_count()); + EXPECT_EQ(VideoSourceRestrictions(), restrictions_listener_.restrictions()); } -TEST(ResourceAdaptationProcessorTest, - SingleStreamAndResourceAdaptUpOnUnderuse) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kUnderuse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - auto* consumer = processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs[k720pIndex])); - auto next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(consumer, next_config->consumer); - EXPECT_EQ(resolution_configs[k1080pIndex], next_config->configuration); +TEST_F(ResourceAdaptationProcessorTest, + ResourcesCanNotAdaptUpIfNeverAdaptedDown) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + RestrictSource(restrictions_listener_.restrictions()); + + // Other resource signals under-use + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); } -TEST(ResourceAdaptationProcessorTest, - SingleStreamAndResourceDontAdaptOnUnderuseIfMaxResolution) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kUnderuse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs[k1080pIndex])); - EXPECT_EQ(absl::nullopt, processor.FindNextConfiguration()); +TEST_F(ResourceAdaptationProcessorTest, + ResourcesCanNotAdaptUpIfNotAdaptedDownAfterReset) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + + video_stream_adapter_->ClearRestrictions(); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // resource_ did not overuse after we reset the restrictions, so adapt + // up should be disallowed. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); } -TEST(ResourceAdaptationProcessorTest, - MultipleStreamsLargestStreamGetsAdaptedDownOnOveruse) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kOveruse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - auto* first_stream = processor.AddConsumer(std::make_unique( - "FirstStream", resolution_configs[k1080pIndex])); - auto* second_stream = - processor.AddConsumer(std::make_unique( - "SecondStream", resolution_configs[k720pIndex])); - // When the first stream is larger. - auto next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(first_stream, next_config->consumer); - // When the second stream is larger. - first_stream->SetConfiguration(resolution_configs[k720pIndex]); - second_stream->SetConfiguration(resolution_configs[k1080pIndex]); - next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(second_stream, next_config->consumer); +TEST_F(ResourceAdaptationProcessorTest, OnlyMostLimitedResourceMayAdaptUp) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // |other_resource_| is most limited, resource_ can't adapt up. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // |resource_| and |other_resource_| are now most limited, so both must + // signal underuse to adapt up. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); +} + +TEST_F(ResourceAdaptationProcessorTest, + MultipleResourcesCanTriggerMultipleAdaptations) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // resource_ is not most limited so can't adapt from underuse. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // resource_ is still not most limited so can't adapt from underuse. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // However it will be after overuse + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // Now other_resource_ can't adapt up as it is not most restricted. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // resource_ is limited at 3 adaptations and other_resource_ 2. + // With the most limited resource signalling underuse in the following + // order we get back to unrestricted video. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // Both resource_ and other_resource_ are most limited. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // Again both are most limited. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); } -TEST(ResourceAdaptationProcessorTest, - MultipleStreamsSmallestStreamGetsAdaptedUpOnUnderuse) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kUnderuse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - auto* first_stream = processor.AddConsumer(std::make_unique( - "FirstStream", resolution_configs[k360pIndex])); - auto* second_stream = - processor.AddConsumer(std::make_unique( - "SecondStream", resolution_configs[k180pIndex])); - // When the first stream is larger. - auto next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(second_stream, next_config->consumer); - // When the second stream is larger. - first_stream->SetConfiguration(resolution_configs[k180pIndex]); - second_stream->SetConfiguration(resolution_configs[k360pIndex]); - next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(first_stream, next_config->consumer); +TEST_F(ResourceAdaptationProcessorTest, + MostLimitedResourceAdaptationWorksAfterChangingDegradataionPreference) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + // Adapt down until we can't anymore. + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + int last_total = restrictions_listener_.adaptation_counters().Total(); + + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_RESOLUTION); + // resource_ can not adapt up since we have never reduced FPS. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total()); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(last_total + 1, + restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // other_resource_ is most limited so should be able to adapt up. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total()); } -// If both streams are equally valid to adapt down, the first one is preferred. -TEST(ResourceAdaptationProcessorTest, - MultipleStreamsAdaptFirstStreamWhenBothStreamsHaveSameCost) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kOveruse)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - auto* first_stream = processor.AddConsumer(std::make_unique( - "FirstStream", resolution_configs[k720pIndex])); - processor.AddConsumer(std::make_unique( - "SecondStream", resolution_configs[k720pIndex])); - auto next_config = processor.FindNextConfiguration(); - EXPECT_TRUE(next_config.has_value()); - EXPECT_EQ(first_stream, next_config->consumer); +TEST_F(ResourceAdaptationProcessorTest, + AdaptsDownWhenOtherResourceIsAlwaysUnderused) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + // Does not trigger adapataion because there's no restriction. + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + // Adapts down even if other resource asked for adapting up. + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + // Doesn't adapt up because adaptation is due to another resource. + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); } -TEST(ResourceAdaptationProcessorTest, - MultipleResourcesAdaptDownIfAnyIsOverused) { - ResourceAdaptationProcessor processor; - auto* first_resource = processor.AddResource( - std::make_unique(ResourceUsageState::kOveruse)); - auto* second_resource = processor.AddResource( - std::make_unique(ResourceUsageState::kStable)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs[k1080pIndex])); - // When the first resource is overused. - EXPECT_TRUE(processor.FindNextConfiguration().has_value()); - // When the second resource is overused. - first_resource->set_usage(ResourceUsageState::kStable); - second_resource->set_usage(ResourceUsageState::kOveruse); - EXPECT_TRUE(processor.FindNextConfiguration().has_value()); +TEST_F(ResourceAdaptationProcessorTest, + TriggerOveruseNotOnAdaptationTaskQueue) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + TaskQueueForTest resource_task_queue("ResourceTaskQueue"); + resource_task_queue.PostTask(ToQueuedTask( + [&]() { resource_->SetUsageState(ResourceUsageState::kOveruse); })); + + EXPECT_EQ_WAIT(1u, restrictions_listener_.restrictions_updated_count(), + kDefaultTimeoutMs); } -TEST(ResourceAdaptationProcessorTest, - MultipleResourcesAdaptUpIfAllAreUnderused) { - ResourceAdaptationProcessor processor; - processor.AddResource( - std::make_unique(ResourceUsageState::kUnderuse)); - auto* second_resource = processor.AddResource( - std::make_unique(ResourceUsageState::kStable)); - auto resolution_configs = AddStandardResolutionConfigurations(&processor); - processor.AddConsumer(std::make_unique( - "OnlyStream", resolution_configs[k720pIndex])); - // When only the first resource is underused. - EXPECT_EQ(absl::nullopt, processor.FindNextConfiguration()); - // When all resources are underused. - second_resource->set_usage(ResourceUsageState::kUnderuse); - EXPECT_TRUE(processor.FindNextConfiguration().has_value()); +TEST_F(ResourceAdaptationProcessorTest, + DestroyProcessorWhileResourceListenerDelegateHasTaskInFlight) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + // Wait for |resource_| to signal oversue first so we know that the delegate + // has passed it on to the processor's task queue. + rtc::Event resource_event; + TaskQueueForTest resource_task_queue("ResourceTaskQueue"); + resource_task_queue.PostTask(ToQueuedTask([&]() { + resource_->SetUsageState(ResourceUsageState::kOveruse); + resource_event.Set(); + })); + + EXPECT_TRUE(resource_event.Wait(kDefaultTimeoutMs)); + // Now destroy the processor while handling the overuse is in flight. + DestroyProcessor(); + + // Because the processor was destroyed by the time the delegate's task ran, + // the overuse signal must not have been handled. + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); } -TEST(ResourceAdaptationProcessorTest, - HighestPreferredNeighborIsPickedWhenAdapting) { - ResourceAdaptationProcessor processor; - // Set up the following graph, where (#) is the preference. - // - // Downward arrows Upward arrows - // - // a(1) -----> b(2) a(1) <----- b(2) - // | ^ | ^ / ^ - // | / | | / | - // v / v | v | - // c(1.5) ---> d(2) c(1.5) <--- d(2) - // - auto* a = processor.AddConfiguration( - std::make_unique(1, 1, 1, 1.0)); - auto* b = processor.AddConfiguration( - std::make_unique(1, 1, 1, 2.0)); - auto* c = processor.AddConfiguration( - std::make_unique(1, 1, 1, 1.5)); - auto* d = processor.AddConfiguration( - std::make_unique(1, 1, 1, 2.0)); - ConnectNeighbors(a, b); - ConnectNeighbors(a, c); - ConnectNeighbors(b, d); - ConnectNeighbors(c, b); - ConnectNeighbors(c, d); - - auto* resource = processor.AddResource( - std::make_unique(ResourceUsageState::kOveruse)); - auto* consumer = processor.AddConsumer( - std::make_unique("OnlyStream", a)); - - // We should expect adapting down: a -> b -> d - EXPECT_EQ(b, processor.FindNextConfiguration()->configuration); - consumer->SetConfiguration(b); - EXPECT_EQ(d, processor.FindNextConfiguration()->configuration); - consumer->SetConfiguration(d); - - // We should expect to adapt up: d -> b -> c -> a - resource->set_usage(ResourceUsageState::kUnderuse); - EXPECT_EQ(b, processor.FindNextConfiguration()->configuration); - consumer->SetConfiguration(b); - EXPECT_EQ(c, processor.FindNextConfiguration()->configuration); - consumer->SetConfiguration(c); - EXPECT_EQ(a, processor.FindNextConfiguration()->configuration); +TEST_F(ResourceAdaptationProcessorTest, + ResourceOveruseIgnoredWhenSignalledDuringRemoval) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + rtc::Event overuse_event; + TaskQueueForTest resource_task_queue("ResourceTaskQueue"); + // Queues task for |resource_| overuse while |processor_| is still listening. + resource_task_queue.PostTask(ToQueuedTask([&]() { + resource_->SetUsageState(ResourceUsageState::kOveruse); + overuse_event.Set(); + })); + EXPECT_TRUE(overuse_event.Wait(kDefaultTimeoutMs)); + // Once we know the overuse task is queued, remove |resource_| so that + // |processor_| is not listening to it. + processor_->RemoveResource(resource_); + + // Runs the queued task so |processor_| gets signalled kOveruse from + // |resource_| even though |processor_| was not listening. + WaitUntilTaskQueueIdle(); + + // No restrictions should change even though |resource_| signaled |kOveruse|. + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingOnlyAdaptedResourceResetsAdaptation) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + processor_->RemoveResource(resource_); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceSetsAdaptationToNextLimitedLevel) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::BALANCED); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Removing most limited |resource_| should revert us back to + processor_->RemoveResource(resource_); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceSetsAdaptationIfInputStateUnchanged) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + + // Overuse twice and underuse once. After the underuse we don't restrict the + // source. Normally this would block future underuses. + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Removing most limited |resource_| should revert us back to, even though we + // did not call RestrictSource() after |resource_| was overused. Normally + // adaptation for MAINTAIN_FRAMERATE would be blocked here but for removal we + // allow this anyways. + processor_->RemoveResource(resource_); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingResourceNotMostLimitedHasNoEffectOnLimitations) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::BALANCED); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + VideoSourceRestrictions current_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters current_counters = + restrictions_listener_.adaptation_counters(); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Removing most limited |resource_| should revert us back to + processor_->RemoveResource(other_resource_); + EXPECT_EQ(current_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(current_counters, restrictions_listener_.adaptation_counters()); + + // Delete |other_resource_| for cleanup. + other_resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceAfterSwitchingDegradationPreferences) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_RESOLUTION); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Revert to |other_resource_| when removing |resource_| even though the + // degradation preference was different when it was overused. + processor_->RemoveResource(resource_); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // After switching back to MAINTAIN_FRAMERATE, the next most limited settings + // are restored. + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceSetsNextLimitationsInDisabled) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::DISABLED); + + // Revert to |other_resource_| when removing |resource_| even though the + // current degradataion preference is disabled. + processor_->RemoveResource(resource_); + + // After switching back to MAINTAIN_FRAMERATE, the next most limited settings + // are restored. + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovedResourceSignalsIgnoredByProcessor) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + processor_->RemoveResource(resource_); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingResourceWhenMultipleMostLimtedHasNoEffect) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + // Adapt |resource_| up and then down so that both resource's are most + // limited at 1 adaptation. + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + // Removing |resource_| has no effect since both |resource_| and + // |other_resource_| are most limited. + processor_->RemoveResource(resource_); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + ResourceOverusedAtLimitReachedWillShareMostLimited) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + bool has_reached_min_pixels = false; + ON_CALL(frame_rate_provider_, OnMinPixelLimitReached()) + .WillByDefault(testing::Assign(&has_reached_min_pixels, true)); + + // Adapt 10 times, which should make us hit the limit. + for (int i = 0; i < 10; ++i) { + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + } + EXPECT_TRUE(has_reached_min_pixels); + auto last_update_count = restrictions_listener_.restrictions_updated_count(); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + // Now both |resource_| and |other_resource_| are most limited. Underuse of + // |resource_| will not adapt up. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(last_update_count, + restrictions_listener_.restrictions_updated_count()); } } // namespace webrtc diff --git a/call/adaptation/resource_consumer.cc b/call/adaptation/resource_consumer.cc deleted file mode 100644 index 3f9dfd825f..0000000000 --- a/call/adaptation/resource_consumer.cc +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/adaptation/resource_consumer.h" - -#include - -#include "call/adaptation/resource_consumer_configuration.h" -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { - -ResourceConsumer::ResourceConsumer(std::string name, - ResourceConsumerConfiguration* configuration) - : name_(std::move(name)), configuration_(configuration) { - RTC_DCHECK(!name_.empty()); - RTC_DCHECK(configuration_); -} - -ResourceConsumer::~ResourceConsumer() {} - -std::string ResourceConsumer::name() const { - return name_; -} - -ResourceConsumerConfiguration* ResourceConsumer::configuration() const { - return configuration_; -} - -void ResourceConsumer::SetConfiguration( - ResourceConsumerConfiguration* configuration) { - RTC_DCHECK(configuration); - configuration_ = configuration; -} - -std::string ResourceConsumer::ToString() const { - rtc::StringBuilder sb; - sb << name_ << ": " << configuration_->Name(); - return sb.str(); -} - -} // namespace webrtc diff --git a/call/adaptation/resource_consumer.h b/call/adaptation/resource_consumer.h deleted file mode 100644 index 131aa45c34..0000000000 --- a/call/adaptation/resource_consumer.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_ADAPTATION_RESOURCE_CONSUMER_H_ -#define CALL_ADAPTATION_RESOURCE_CONSUMER_H_ - -#include - -namespace webrtc { - -class ResourceConsumerConfiguration; - -// Something which affects resource consumption. Used by the -// ResourceAdaptationProcessor to calculate which configurations to use. -// -// For example, this could represent an encoder, and valid -// ResourceConsumerConfigurations would be encoder settings. How a consumer -// affects a resource is described by the ResourceConsumerConfiguration. -// -// The functionality provided by the base class is a name and pointer to the -// current configuration. How a consumers and configurations affect real parts -// of the system (like actual encoders) is implementation-specific. -class ResourceConsumer { - public: - ResourceConsumer(std::string name, - ResourceConsumerConfiguration* configuration); - ~ResourceConsumer(); - - std::string name() const; - ResourceConsumerConfiguration* configuration() const; - void SetConfiguration(ResourceConsumerConfiguration* configuration); - - std::string ToString() const; - - private: - std::string name_; - ResourceConsumerConfiguration* configuration_; -}; - -} // namespace webrtc - -#endif // CALL_ADAPTATION_RESOURCE_CONSUMER_H_ diff --git a/call/adaptation/resource_consumer_configuration.cc b/call/adaptation/resource_consumer_configuration.cc deleted file mode 100644 index ca3462eb4a..0000000000 --- a/call/adaptation/resource_consumer_configuration.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/adaptation/resource_consumer_configuration.h" - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { - -ResourceConsumerConfiguration::~ResourceConsumerConfiguration() {} - -const std::vector& -ResourceConsumerConfiguration::upper_neighbors() const { - return upper_neighbors_; -} - -const std::vector& -ResourceConsumerConfiguration::lower_neighbors() const { - return lower_neighbors_; -} - -void ResourceConsumerConfiguration::AddUpperNeighbor( - ResourceConsumerConfiguration* upper_neighbor) { - upper_neighbors_.push_back(upper_neighbor); -} - -void ResourceConsumerConfiguration::AddLowerNeighbor( - ResourceConsumerConfiguration* lower_neighbor) { - lower_neighbors_.push_back(lower_neighbor); -} - -} // namespace webrtc diff --git a/call/adaptation/resource_consumer_configuration.h b/call/adaptation/resource_consumer_configuration.h deleted file mode 100644 index 462c339439..0000000000 --- a/call/adaptation/resource_consumer_configuration.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_ADAPTATION_RESOURCE_CONSUMER_CONFIGURATION_H_ -#define CALL_ADAPTATION_RESOURCE_CONSUMER_CONFIGURATION_H_ - -#include -#include -#include - -namespace webrtc { - -class Resource; - -// Represents a possible state for a ResourceConsumer. For example, if an -// encoder consumer can have the states "HD" and "VGA", there is one -// ResourceConsumerConfiguration for each state. "HD" is an upper neighbor of -// "VGA" and "VGA" is a lower neighbor of "HD". -class ResourceConsumerConfiguration { - public: - virtual ~ResourceConsumerConfiguration(); - - const std::vector& upper_neighbors() const; - const std::vector& lower_neighbors() const; - void AddUpperNeighbor(ResourceConsumerConfiguration* upper_neighbor); - void AddLowerNeighbor(ResourceConsumerConfiguration* lower_neighbor); - - virtual std::string Name() const = 0; - - // How expensive this configuration is. This is an abstract unit used by the - // ResourceAdaptationProcessor to compare configurations. When overusing, the - // consumer with the most expensive configuration will be adapted down. When - // underusing, the consumer with the least expensive configuration will be - // adapted up. The cost generally scales with pixels per second. The value - // must be non-negative. - virtual double Cost() const = 0; - - // How preferable this configuration is. The is an abstract unit used by the - // ResourceAdaptationProcessor to compare configurations. When a consumer is - // reconfigured to a neighbor configuration, the configuration with the - // highest preference value is preferred. The value must be non-negative. - virtual double Preference() const = 0; - - private: - // Configurations we can adapt "up" to when we are in |this| configuration, - // such as higher resolutions. - std::vector upper_neighbors_; - // Configurations we can adapt "down" to when we are in |this| configuration, - // such as lower resolutions. - std::vector lower_neighbors_; -}; - -} // namespace webrtc - -#endif // CALL_ADAPTATION_RESOURCE_CONSUMER_CONFIGURATION_H_ diff --git a/call/adaptation/resource_unittest.cc b/call/adaptation/resource_unittest.cc new file mode 100644 index 0000000000..a2291dfdce --- /dev/null +++ b/call/adaptation/resource_unittest.cc @@ -0,0 +1,55 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/adaptation/resource.h" + +#include + +#include "api/scoped_refptr.h" +#include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/test/mock_resource_listener.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::_; +using ::testing::StrictMock; + +class ResourceTest : public ::testing::Test { + public: + ResourceTest() : fake_resource_(FakeResource::Create("FakeResource")) {} + + protected: + rtc::scoped_refptr fake_resource_; +}; + +TEST_F(ResourceTest, RegisteringListenerReceivesCallbacks) { + StrictMock resource_listener; + fake_resource_->SetResourceListener(&resource_listener); + EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + fake_resource_->SetUsageState(ResourceUsageState::kOveruse); + fake_resource_->SetResourceListener(nullptr); +} + +TEST_F(ResourceTest, UnregisteringListenerStopsCallbacks) { + StrictMock resource_listener; + fake_resource_->SetResourceListener(&resource_listener); + fake_resource_->SetResourceListener(nullptr); + EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)).Times(0); + fake_resource_->SetUsageState(ResourceUsageState::kOveruse); +} + +} // namespace webrtc diff --git a/call/adaptation/test/fake_adaptation_constraint.cc b/call/adaptation/test/fake_adaptation_constraint.cc new file mode 100644 index 0000000000..18b8e8b696 --- /dev/null +++ b/call/adaptation/test/fake_adaptation_constraint.cc @@ -0,0 +1,38 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/test/fake_adaptation_constraint.h" + +#include + +namespace webrtc { + +FakeAdaptationConstraint::FakeAdaptationConstraint(std::string name) + : name_(std::move(name)), is_adaptation_up_allowed_(true) {} + +FakeAdaptationConstraint::~FakeAdaptationConstraint() = default; + +void FakeAdaptationConstraint::set_is_adaptation_up_allowed( + bool is_adaptation_up_allowed) { + is_adaptation_up_allowed_ = is_adaptation_up_allowed; +} + +std::string FakeAdaptationConstraint::Name() const { + return name_; +} + +bool FakeAdaptationConstraint::IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const { + return is_adaptation_up_allowed_; +} + +} // namespace webrtc diff --git a/call/adaptation/test/fake_adaptation_constraint.h b/call/adaptation/test/fake_adaptation_constraint.h new file mode 100644 index 0000000000..021e46a501 --- /dev/null +++ b/call/adaptation/test/fake_adaptation_constraint.h @@ -0,0 +1,41 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_ +#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_ + +#include + +#include "call/adaptation/adaptation_constraint.h" + +namespace webrtc { + +class FakeAdaptationConstraint : public AdaptationConstraint { + public: + explicit FakeAdaptationConstraint(std::string name); + ~FakeAdaptationConstraint() override; + + void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed); + + // AdaptationConstraint implementation. + std::string Name() const override; + bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const override; + + private: + const std::string name_; + bool is_adaptation_up_allowed_; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_ diff --git a/api/test/mock_video_decoder.cc b/call/adaptation/test/fake_frame_rate_provider.cc similarity index 52% rename from api/test/mock_video_decoder.cc rename to call/adaptation/test/fake_frame_rate_provider.cc index 85ed0e1678..65fee6a7ba 100644 --- a/api/test/mock_video_decoder.cc +++ b/call/adaptation/test/fake_frame_rate_provider.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/test/mock_video_decoder.h" +#include "call/adaptation/test/fake_frame_rate_provider.h" + +#include "test/gmock.h" + +using ::testing::Return; namespace webrtc { -MockDecodedImageCallback::MockDecodedImageCallback() = default; -MockDecodedImageCallback::~MockDecodedImageCallback() = default; -MockVideoDecoder::MockVideoDecoder() = default; -MockVideoDecoder::~MockVideoDecoder() = default; +FakeFrameRateProvider::FakeFrameRateProvider() { + set_fps(0); +} + +void FakeFrameRateProvider::set_fps(int fps) { + EXPECT_CALL(*this, GetInputFrameRate()).WillRepeatedly(Return(fps)); +} } // namespace webrtc diff --git a/call/adaptation/test/fake_frame_rate_provider.h b/call/adaptation/test/fake_frame_rate_provider.h new file mode 100644 index 0000000000..3638f478f3 --- /dev/null +++ b/call/adaptation/test/fake_frame_rate_provider.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_TEST_FAKE_FRAME_RATE_PROVIDER_H_ +#define CALL_ADAPTATION_TEST_FAKE_FRAME_RATE_PROVIDER_H_ + +#include +#include + +#include "api/video/video_stream_encoder_observer.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoStreamEncoderObserver : public VideoStreamEncoderObserver { + public: + MOCK_METHOD(void, OnEncodedFrameTimeMeasured, (int, int), (override)); + MOCK_METHOD(void, OnIncomingFrame, (int, int), (override)); + MOCK_METHOD(void, + OnSendEncodedImage, + (const EncodedImage&, const CodecSpecificInfo*), + (override)); + MOCK_METHOD(void, + OnEncoderImplementationChanged, + (const std::string&), + (override)); + MOCK_METHOD(void, OnFrameDropped, (DropReason), (override)); + MOCK_METHOD(void, + OnEncoderReconfigured, + (const VideoEncoderConfig&, const std::vector&), + (override)); + MOCK_METHOD(void, + OnAdaptationChanged, + (VideoAdaptationReason, + const VideoAdaptationCounters&, + const VideoAdaptationCounters&), + (override)); + MOCK_METHOD(void, ClearAdaptationStats, (), (override)); + MOCK_METHOD(void, + UpdateAdaptationSettings, + (AdaptationSettings, AdaptationSettings), + (override)); + MOCK_METHOD(void, OnMinPixelLimitReached, (), (override)); + MOCK_METHOD(void, OnInitialQualityResolutionAdaptDown, (), (override)); + MOCK_METHOD(void, OnSuspendChange, (bool), (override)); + MOCK_METHOD(void, + OnBitrateAllocationUpdated, + (const VideoCodec&, const VideoBitrateAllocation&), + (override)); + MOCK_METHOD(void, OnEncoderInternalScalerUpdate, (bool), (override)); + MOCK_METHOD(int, GetInputFrameRate, (), (const, override)); +}; + +class FakeFrameRateProvider : public MockVideoStreamEncoderObserver { + public: + FakeFrameRateProvider(); + void set_fps(int fps); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_TEST_FAKE_FRAME_RATE_PROVIDER_H_ diff --git a/call/adaptation/test/fake_resource.cc b/call/adaptation/test/fake_resource.cc index 363fc26fe7..fa69e886bf 100644 --- a/call/adaptation/test/fake_resource.cc +++ b/call/adaptation/test/fake_resource.cc @@ -10,43 +10,35 @@ #include "call/adaptation/test/fake_resource.h" +#include #include +#include "rtc_base/ref_counted_object.h" + namespace webrtc { -FakeResource::FakeResource(std::string name, ResourceUsageState usage) - : name_(std::move(name)), usage_(usage) {} +// static +rtc::scoped_refptr FakeResource::Create(std::string name) { + return new rtc::RefCountedObject(name); +} -FakeResource::FakeResource(ResourceUsageState usage) - : FakeResource("UnnamedResource", usage) {} +FakeResource::FakeResource(std::string name) + : Resource(), name_(std::move(name)), listener_(nullptr) {} FakeResource::~FakeResource() {} -void FakeResource::set_usage(ResourceUsageState usage) { - usage_ = usage; +void FakeResource::SetUsageState(ResourceUsageState usage_state) { + if (listener_) { + listener_->OnResourceUsageStateMeasured(this, usage_state); + } } std::string FakeResource::Name() const { return name_; } -std::string FakeResource::UsageUnitsOfMeasurement() const { - return "%"; -} - -double FakeResource::CurrentUsage() const { - switch (usage_) { - case ResourceUsageState::kOveruse: - return 1.2; - case ResourceUsageState::kStable: - return 0.8; - case ResourceUsageState::kUnderuse: - return 0.4; - } -} - -ResourceUsageState FakeResource::CurrentUsageState() const { - return usage_; +void FakeResource::SetResourceListener(ResourceListener* listener) { + listener_ = listener; } } // namespace webrtc diff --git a/call/adaptation/test/fake_resource.h b/call/adaptation/test/fake_resource.h index 60291af6ae..e88d97db7a 100644 --- a/call/adaptation/test/fake_resource.h +++ b/call/adaptation/test/fake_resource.h @@ -12,31 +12,31 @@ #define CALL_ADAPTATION_TEST_FAKE_RESOURCE_H_ #include +#include -#include "call/adaptation/resource.h" +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" namespace webrtc { -// Fake resource used for testing. ResourceUsageState is controlled with a -// setter. The arbitrarily chosen unit of measurement is percentage, with the -// following current usage reported based on the current usage: kOveruse = 120%, -// kStable = 80% and kUnderuse = 40%. +// Fake resource used for testing. class FakeResource : public Resource { public: - FakeResource(std::string name, ResourceUsageState usage); - explicit FakeResource(ResourceUsageState usage); + static rtc::scoped_refptr Create(std::string name); + + explicit FakeResource(std::string name); ~FakeResource() override; - void set_usage(ResourceUsageState usage); + void SetUsageState(ResourceUsageState usage_state); + // Resource implementation. std::string Name() const override; - std::string UsageUnitsOfMeasurement() const override; - double CurrentUsage() const override; - ResourceUsageState CurrentUsageState() const override; + void SetResourceListener(ResourceListener* listener) override; private: - std::string name_; - ResourceUsageState usage_; + const std::string name_; + ResourceListener* listener_; }; } // namespace webrtc diff --git a/call/adaptation/test/fake_resource_consumer_configuration.cc b/call/adaptation/test/fake_resource_consumer_configuration.cc deleted file mode 100644 index afc743cf4c..0000000000 --- a/call/adaptation/test/fake_resource_consumer_configuration.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/adaptation/test/fake_resource_consumer_configuration.h" - -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { - -FakeResourceConsumerConfiguration::FakeResourceConsumerConfiguration( - int width, - int height, - double frame_rate_hz, - double preference) - : width_(width), - height_(height), - frame_rate_hz_(frame_rate_hz), - preference_(preference) {} - -std::string FakeResourceConsumerConfiguration::Name() const { - rtc::StringBuilder sb; - sb << width_ << "x" << height_ << "@" << rtc::ToString(frame_rate_hz_); - sb << "/" << rtc::ToString(preference_); - return sb.str(); -} - -double FakeResourceConsumerConfiguration::Cost() const { - return width_ * height_ * frame_rate_hz_; -} - -double FakeResourceConsumerConfiguration::Preference() const { - return preference_; -} - -} // namespace webrtc diff --git a/call/adaptation/test/fake_resource_consumer_configuration.h b/call/adaptation/test/fake_resource_consumer_configuration.h deleted file mode 100644 index d0d25961ed..0000000000 --- a/call/adaptation/test/fake_resource_consumer_configuration.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_ADAPTATION_TEST_FAKE_RESOURCE_CONSUMER_CONFIGURATION_H_ -#define CALL_ADAPTATION_TEST_FAKE_RESOURCE_CONSUMER_CONFIGURATION_H_ - -#include - -#include "call/adaptation/resource_consumer_configuration.h" - -namespace webrtc { - -class FakeResourceConsumerConfiguration : public ResourceConsumerConfiguration { - public: - FakeResourceConsumerConfiguration(int width, - int height, - double frame_rate_hz, - double preference); - - std::string Name() const override; - double Cost() const override; - double Preference() const override; - - private: - int width_; - int height_; - double frame_rate_hz_; - double preference_; -}; - -} // namespace webrtc - -#endif // CALL_ADAPTATION_TEST_FAKE_RESOURCE_CONSUMER_CONFIGURATION_H_ diff --git a/call/adaptation/test/mock_resource_listener.h b/call/adaptation/test/mock_resource_listener.h new file mode 100644 index 0000000000..f0f998f2e3 --- /dev/null +++ b/call/adaptation/test/mock_resource_listener.h @@ -0,0 +1,31 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ +#define CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ + +#include "api/adaptation/resource.h" + +#include "test/gmock.h" + +namespace webrtc { + +class MockResourceListener : public ResourceListener { + public: + MOCK_METHOD(void, + OnResourceUsageStateMeasured, + (rtc::scoped_refptr resource, + ResourceUsageState usage_state), + (override)); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ diff --git a/call/adaptation/video_source_restrictions.cc b/call/adaptation/video_source_restrictions.cc index b848bf80bc..e9d6c26137 100644 --- a/call/adaptation/video_source_restrictions.cc +++ b/call/adaptation/video_source_restrictions.cc @@ -13,6 +13,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -36,6 +37,19 @@ VideoSourceRestrictions::VideoSourceRestrictions( RTC_DCHECK(!max_frame_rate_.has_value() || max_frame_rate_.value() > 0.0); } +std::string VideoSourceRestrictions::ToString() const { + rtc::StringBuilder ss; + ss << "{"; + if (max_frame_rate_) + ss << " max_fps=" << max_frame_rate_.value(); + if (max_pixels_per_frame_) + ss << " max_pixels_per_frame=" << max_pixels_per_frame_.value(); + if (target_pixels_per_frame_) + ss << " target_pixels_per_frame=" << target_pixels_per_frame_.value(); + ss << " }"; + return ss.Release(); +} + const absl::optional& VideoSourceRestrictions::max_pixels_per_frame() const { return max_pixels_per_frame_; @@ -65,4 +79,70 @@ void VideoSourceRestrictions::set_max_frame_rate( max_frame_rate_ = std::move(max_frame_rate); } +bool DidRestrictionsIncrease(VideoSourceRestrictions before, + VideoSourceRestrictions after) { + bool decreased_resolution = DidDecreaseResolution(before, after); + bool decreased_framerate = DidDecreaseFrameRate(before, after); + bool same_resolution = + before.max_pixels_per_frame() == after.max_pixels_per_frame(); + bool same_framerate = before.max_frame_rate() == after.max_frame_rate(); + + return (decreased_resolution && decreased_framerate) || + (decreased_resolution && same_framerate) || + (same_resolution && decreased_framerate); +} + +bool DidRestrictionsDecrease(VideoSourceRestrictions before, + VideoSourceRestrictions after) { + bool increased_resolution = DidIncreaseResolution(before, after); + bool increased_framerate = DidIncreaseFrameRate(before, after); + bool same_resolution = + before.max_pixels_per_frame() == after.max_pixels_per_frame(); + bool same_framerate = before.max_frame_rate() == after.max_frame_rate(); + + return (increased_resolution && increased_framerate) || + (increased_resolution && same_framerate) || + (same_resolution && increased_framerate); +} + +bool DidIncreaseResolution(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after) { + if (!restrictions_before.max_pixels_per_frame().has_value()) + return false; + if (!restrictions_after.max_pixels_per_frame().has_value()) + return true; + return restrictions_after.max_pixels_per_frame().value() > + restrictions_before.max_pixels_per_frame().value(); +} + +bool DidDecreaseResolution(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after) { + if (!restrictions_after.max_pixels_per_frame().has_value()) + return false; + if (!restrictions_before.max_pixels_per_frame().has_value()) + return true; + return restrictions_after.max_pixels_per_frame().value() < + restrictions_before.max_pixels_per_frame().value(); +} + +bool DidIncreaseFrameRate(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after) { + if (!restrictions_before.max_frame_rate().has_value()) + return false; + if (!restrictions_after.max_frame_rate().has_value()) + return true; + return restrictions_after.max_frame_rate().value() > + restrictions_before.max_frame_rate().value(); +} + +bool DidDecreaseFrameRate(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after) { + if (!restrictions_after.max_frame_rate().has_value()) + return false; + if (!restrictions_before.max_frame_rate().has_value()) + return true; + return restrictions_after.max_frame_rate().value() < + restrictions_before.max_frame_rate().value(); +} + } // namespace webrtc diff --git a/call/adaptation/video_source_restrictions.h b/call/adaptation/video_source_restrictions.h index a992084d06..7f79a48e5d 100644 --- a/call/adaptation/video_source_restrictions.h +++ b/call/adaptation/video_source_restrictions.h @@ -11,6 +11,7 @@ #ifndef CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ #define CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ +#include #include #include "absl/types/optional.h" @@ -38,7 +39,19 @@ class VideoSourceRestrictions { return !(*this == rhs); } + std::string ToString() const; + + // The source must produce a resolution less than or equal to + // max_pixels_per_frame(). const absl::optional& max_pixels_per_frame() const; + // The source should produce a resolution as close to the + // target_pixels_per_frame() as possible, provided this does not exceed + // max_pixels_per_frame(). + // The actual pixel count selected depends on the capabilities of the source. + // TODO(hbos): Clarify how "target" is used. One possible implementation: open + // the camera in the smallest resolution that is greater than or equal to the + // target and scale it down to the target if it is greater. Is this an + // accurate description of what this does today, or do we do something else? const absl::optional& target_pixels_per_frame() const; const absl::optional& max_frame_rate() const; @@ -50,13 +63,24 @@ class VideoSourceRestrictions { private: // These map to rtc::VideoSinkWants's |max_pixel_count| and // |target_pixel_count|. - // TODO(hbos): It's not clear what "target" means; either make it well-defined - // or remove it in favor of only using |max_pixels_per_frame_|. absl::optional max_pixels_per_frame_; absl::optional target_pixels_per_frame_; absl::optional max_frame_rate_; }; +bool DidRestrictionsIncrease(VideoSourceRestrictions before, + VideoSourceRestrictions after); +bool DidRestrictionsDecrease(VideoSourceRestrictions before, + VideoSourceRestrictions after); +bool DidIncreaseResolution(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after); +bool DidDecreaseResolution(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after); +bool DidIncreaseFrameRate(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after); +bool DidDecreaseFrameRate(VideoSourceRestrictions restrictions_before, + VideoSourceRestrictions restrictions_after); + } // namespace webrtc #endif // CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ diff --git a/call/adaptation/video_source_restrictions_unittest.cc b/call/adaptation/video_source_restrictions_unittest.cc new file mode 100644 index 0000000000..92e34f96f3 --- /dev/null +++ b/call/adaptation/video_source_restrictions_unittest.cc @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/video_source_restrictions.h" + +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +const size_t kHdPixels = 1280 * 720; + +const VideoSourceRestrictions kUnlimited; +const VideoSourceRestrictions k15fps(absl::nullopt, absl::nullopt, 15.0); +const VideoSourceRestrictions kHd(kHdPixels, kHdPixels, absl::nullopt); +const VideoSourceRestrictions kHd15fps(kHdPixels, kHdPixels, 15.0); +const VideoSourceRestrictions kVga7fps(kHdPixels / 2, kHdPixels / 2, 7.0); + +VideoSourceRestrictions RestrictionsFromMaxPixelsPerFrame( + size_t max_pixels_per_frame) { + return VideoSourceRestrictions(max_pixels_per_frame, absl::nullopt, + absl::nullopt); +} + +VideoSourceRestrictions RestrictionsFromMaxFrameRate(double max_frame_rate) { + return VideoSourceRestrictions(absl::nullopt, absl::nullopt, max_frame_rate); +} + +} // namespace + +TEST(VideoSourceRestrictionsTest, DidIncreaseResolution) { + // smaller restrictions -> larger restrictions + EXPECT_TRUE(DidIncreaseResolution(RestrictionsFromMaxPixelsPerFrame(10), + RestrictionsFromMaxPixelsPerFrame(11))); + // unrestricted -> restricted + EXPECT_FALSE(DidIncreaseResolution(VideoSourceRestrictions(), + RestrictionsFromMaxPixelsPerFrame(10))); + // restricted -> unrestricted + EXPECT_TRUE(DidIncreaseResolution(RestrictionsFromMaxPixelsPerFrame(10), + VideoSourceRestrictions())); + // restricted -> equally restricted + EXPECT_FALSE(DidIncreaseResolution(RestrictionsFromMaxPixelsPerFrame(10), + RestrictionsFromMaxPixelsPerFrame(10))); + // unrestricted -> unrestricted + EXPECT_FALSE(DidIncreaseResolution(VideoSourceRestrictions(), + VideoSourceRestrictions())); + // larger restrictions -> smaller restrictions + EXPECT_FALSE(DidIncreaseResolution(RestrictionsFromMaxPixelsPerFrame(10), + RestrictionsFromMaxPixelsPerFrame(9))); +} + +TEST(VideoSourceRestrictionsTest, DidDecreaseFrameRate) { + // samller restrictions -> larger restrictions + EXPECT_FALSE(DidDecreaseFrameRate(RestrictionsFromMaxFrameRate(10), + RestrictionsFromMaxFrameRate(11))); + // unrestricted -> restricted + EXPECT_TRUE(DidDecreaseFrameRate(VideoSourceRestrictions(), + RestrictionsFromMaxFrameRate(10))); + // restricted -> unrestricted + EXPECT_FALSE(DidDecreaseFrameRate(RestrictionsFromMaxFrameRate(10), + VideoSourceRestrictions())); + // restricted -> equally restricted + EXPECT_FALSE(DidDecreaseFrameRate(RestrictionsFromMaxFrameRate(10), + RestrictionsFromMaxFrameRate(10))); + // unrestricted -> unrestricted + EXPECT_FALSE(DidDecreaseFrameRate(VideoSourceRestrictions(), + VideoSourceRestrictions())); + // larger restrictions -> samller restrictions + EXPECT_TRUE(DidDecreaseFrameRate(RestrictionsFromMaxFrameRate(10), + RestrictionsFromMaxFrameRate(9))); +} + +TEST(VideoSourceRestrictionsTest, DidRestrictionsChangeFalseForSame) { + EXPECT_FALSE(DidRestrictionsDecrease(kUnlimited, kUnlimited)); + EXPECT_FALSE(DidRestrictionsIncrease(kUnlimited, kUnlimited)); + + // Both resolution and fps restricted. + EXPECT_FALSE(DidRestrictionsDecrease(kHd15fps, kHd15fps)); + EXPECT_FALSE(DidRestrictionsIncrease(kHd15fps, kHd15fps)); +} + +TEST(VideoSourceRestrictions, + DidRestrictionsIncreaseTrueWhenPixelsOrFrameRateDecreased) { + // Unlimited > Limited resolution. + EXPECT_TRUE(DidRestrictionsIncrease(kUnlimited, kHd)); + // Unlimited > limited fps. + EXPECT_TRUE(DidRestrictionsIncrease(kUnlimited, k15fps)); + // Unlimited > limited resolution + limited fps. + EXPECT_TRUE(DidRestrictionsIncrease(kUnlimited, kHd15fps)); + // Limited resolution > limited resolution + limited fps. + EXPECT_TRUE(DidRestrictionsIncrease(kHd, kHd15fps)); + // Limited fps > limited resolution + limited fps. + EXPECT_TRUE(DidRestrictionsIncrease(k15fps, kHd15fps)); + // Limited resolution + fps > More limited resolution + more limited fps + EXPECT_TRUE(DidRestrictionsIncrease(kHd15fps, kVga7fps)); +} + +TEST(VideoSourceRestrictions, + DidRestrictionsDecreaseTrueWhenPixelsOrFrameRateIncreased) { + // Limited resolution < Unlimited. + EXPECT_TRUE(DidRestrictionsDecrease(kHd, kUnlimited)); + // Limited fps < Unlimited. + EXPECT_TRUE(DidRestrictionsDecrease(k15fps, kUnlimited)); + // Limited resolution + limited fps < unlimited. + EXPECT_TRUE(DidRestrictionsDecrease(kHd15fps, kUnlimited)); + // Limited resolution + limited fps < limited resolution. + EXPECT_TRUE(DidRestrictionsDecrease(kHd15fps, kHd)); + // Limited resolution + limited fps < limited fps. + EXPECT_TRUE(DidRestrictionsDecrease(kHd15fps, k15fps)); + // More limited resolution + more limited fps < limited resolution + fps + EXPECT_TRUE(DidRestrictionsDecrease(kVga7fps, kHd15fps)); +} + +TEST(VideoSourceRestrictions, + DidRestrictionsChangeFalseWhenFrameRateAndPixelsChangeDifferently) { + // One changed framerate, the other resolution; not an increase or decrease. + EXPECT_FALSE(DidRestrictionsIncrease(kHd, k15fps)); + EXPECT_FALSE(DidRestrictionsDecrease(kHd, k15fps)); +} + +} // namespace webrtc diff --git a/call/adaptation/video_stream_adapter.cc b/call/adaptation/video_stream_adapter.cc new file mode 100644 index 0000000000..4fc4743a32 --- /dev/null +++ b/call/adaptation/video_stream_adapter.cc @@ -0,0 +1,696 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/video_stream_adapter.h" + +#include +#include +#include + +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video_codecs/video_encoder.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +const int kMinFrameRateFps = 2; + +namespace { + +// For frame rate, the steps we take are 2/3 (down) and 3/2 (up). +int GetLowerFrameRateThan(int fps) { + RTC_DCHECK(fps != std::numeric_limits::max()); + return (fps * 2) / 3; +} +// TODO(hbos): Use absl::optional<> instead? +int GetHigherFrameRateThan(int fps) { + return fps != std::numeric_limits::max() + ? (fps * 3) / 2 + : std::numeric_limits::max(); +} + +// For resolution, the steps we take are 3/5 (down) and 5/3 (up). +// Notice the asymmetry of which restriction property is set depending on if +// we are adapting up or down: +// - VideoSourceRestrictor::DecreaseResolution() sets the max_pixels_per_frame() +// to the desired target and target_pixels_per_frame() to null. +// - VideoSourceRestrictor::IncreaseResolutionTo() sets the +// target_pixels_per_frame() to the desired target, and max_pixels_per_frame() +// is set according to VideoSourceRestrictor::GetIncreasedMaxPixelsWanted(). +int GetLowerResolutionThan(int pixel_count) { + RTC_DCHECK(pixel_count != std::numeric_limits::max()); + return (pixel_count * 3) / 5; +} + +int GetIncreasedMaxPixelsWanted(int target_pixels) { + if (target_pixels == std::numeric_limits::max()) + return std::numeric_limits::max(); + // When we decrease resolution, we go down to at most 3/5 of current pixels. + // Thus to increase resolution, we need 3/5 to get back to where we started. + // When going up, the desired max_pixels_per_frame() has to be significantly + // higher than the target because the source's native resolutions might not + // match the target. We pick 12/5 of the target. + // + // (This value was historically 4 times the old target, which is (3/5)*4 of + // the new target - or 12/5 - assuming the target is adjusted according to + // the above steps.) + RTC_DCHECK(target_pixels != std::numeric_limits::max()); + return (target_pixels * 12) / 5; +} + +bool CanDecreaseResolutionTo(int target_pixels, + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions) { + int max_pixels_per_frame = + rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( + std::numeric_limits::max())); + return target_pixels < max_pixels_per_frame && + target_pixels >= input_state.min_pixels_per_frame(); +} + +bool CanIncreaseResolutionTo(int target_pixels, + const VideoSourceRestrictions& restrictions) { + int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); + int max_pixels_per_frame = + rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( + std::numeric_limits::max())); + return max_pixels_wanted > max_pixels_per_frame; +} + +bool CanDecreaseFrameRateTo(int max_frame_rate, + const VideoSourceRestrictions& restrictions) { + const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate); + return fps_wanted < + rtc::dchecked_cast(restrictions.max_frame_rate().value_or( + std::numeric_limits::max())); +} + +bool CanIncreaseFrameRateTo(int max_frame_rate, + const VideoSourceRestrictions& restrictions) { + return max_frame_rate > + rtc::dchecked_cast(restrictions.max_frame_rate().value_or( + std::numeric_limits::max())); +} + +bool MinPixelLimitReached(const VideoStreamInputState& input_state) { + return input_state.frame_size_pixels().has_value() && + GetLowerResolutionThan(input_state.frame_size_pixels().value()) < + input_state.min_pixels_per_frame(); +} + +} // namespace + +VideoSourceRestrictionsListener::~VideoSourceRestrictionsListener() = default; + +VideoSourceRestrictions FilterRestrictionsByDegradationPreference( + VideoSourceRestrictions source_restrictions, + DegradationPreference degradation_preference) { + switch (degradation_preference) { + case DegradationPreference::BALANCED: + break; + case DegradationPreference::MAINTAIN_FRAMERATE: + source_restrictions.set_max_frame_rate(absl::nullopt); + break; + case DegradationPreference::MAINTAIN_RESOLUTION: + source_restrictions.set_max_pixels_per_frame(absl::nullopt); + source_restrictions.set_target_pixels_per_frame(absl::nullopt); + break; + case DegradationPreference::DISABLED: + source_restrictions.set_max_pixels_per_frame(absl::nullopt); + source_restrictions.set_target_pixels_per_frame(absl::nullopt); + source_restrictions.set_max_frame_rate(absl::nullopt); + } + return source_restrictions; +} + +// TODO(hbos): Use absl::optional<> instead? +int GetHigherResolutionThan(int pixel_count) { + return pixel_count != std::numeric_limits::max() + ? (pixel_count * 5) / 3 + : std::numeric_limits::max(); +} + +// static +const char* Adaptation::StatusToString(Adaptation::Status status) { + switch (status) { + case Adaptation::Status::kValid: + return "kValid"; + case Adaptation::Status::kLimitReached: + return "kLimitReached"; + case Adaptation::Status::kAwaitingPreviousAdaptation: + return "kAwaitingPreviousAdaptation"; + case Status::kInsufficientInput: + return "kInsufficientInput"; + case Status::kAdaptationDisabled: + return "kAdaptationDisabled"; + case Status::kRejectedByConstraint: + return "kRejectedByConstraint"; + } + RTC_CHECK_NOTREACHED(); +} + +Adaptation::Adaptation(int validation_id, + VideoSourceRestrictions restrictions, + VideoAdaptationCounters counters, + VideoStreamInputState input_state) + : validation_id_(validation_id), + status_(Status::kValid), + input_state_(std::move(input_state)), + restrictions_(std::move(restrictions)), + counters_(std::move(counters)) {} + +Adaptation::Adaptation(int validation_id, Status invalid_status) + : validation_id_(validation_id), status_(invalid_status) { + RTC_DCHECK_NE(status_, Status::kValid); +} + +Adaptation::Status Adaptation::status() const { + return status_; +} + +const VideoStreamInputState& Adaptation::input_state() const { + return input_state_; +} + +const VideoSourceRestrictions& Adaptation::restrictions() const { + return restrictions_; +} + +const VideoAdaptationCounters& Adaptation::counters() const { + return counters_; +} + +VideoStreamAdapter::VideoStreamAdapter( + VideoStreamInputStateProvider* input_state_provider, + VideoStreamEncoderObserver* encoder_stats_observer) + : input_state_provider_(input_state_provider), + encoder_stats_observer_(encoder_stats_observer), + adaptation_validation_id_(0), + degradation_preference_(DegradationPreference::DISABLED), + awaiting_frame_size_change_(absl::nullopt) { + sequence_checker_.Detach(); + RTC_DCHECK(input_state_provider_); + RTC_DCHECK(encoder_stats_observer_); +} + +VideoStreamAdapter::~VideoStreamAdapter() { + RTC_DCHECK(adaptation_constraints_.empty()) + << "There are constaint(s) attached to a VideoStreamAdapter being " + "destroyed."; +} + +VideoSourceRestrictions VideoStreamAdapter::source_restrictions() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return current_restrictions_.restrictions; +} + +const VideoAdaptationCounters& VideoStreamAdapter::adaptation_counters() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return current_restrictions_.counters; +} + +void VideoStreamAdapter::ClearRestrictions() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Invalidate any previously returned Adaptation. + RTC_LOG(INFO) << "Resetting restrictions"; + ++adaptation_validation_id_; + current_restrictions_ = {VideoSourceRestrictions(), + VideoAdaptationCounters()}; + awaiting_frame_size_change_ = absl::nullopt; + BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(), + nullptr); +} + +void VideoStreamAdapter::AddRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(std::find(restrictions_listeners_.begin(), + restrictions_listeners_.end(), + restrictions_listener) == restrictions_listeners_.end()); + restrictions_listeners_.push_back(restrictions_listener); +} + +void VideoStreamAdapter::RemoveRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + auto it = std::find(restrictions_listeners_.begin(), + restrictions_listeners_.end(), restrictions_listener); + RTC_DCHECK(it != restrictions_listeners_.end()); + restrictions_listeners_.erase(it); +} + +void VideoStreamAdapter::AddAdaptationConstraint( + AdaptationConstraint* adaptation_constraint) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(std::find(adaptation_constraints_.begin(), + adaptation_constraints_.end(), + adaptation_constraint) == adaptation_constraints_.end()); + adaptation_constraints_.push_back(adaptation_constraint); +} + +void VideoStreamAdapter::RemoveAdaptationConstraint( + AdaptationConstraint* adaptation_constraint) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + auto it = std::find(adaptation_constraints_.begin(), + adaptation_constraints_.end(), adaptation_constraint); + RTC_DCHECK(it != adaptation_constraints_.end()); + adaptation_constraints_.erase(it); +} + +void VideoStreamAdapter::SetDegradationPreference( + DegradationPreference degradation_preference) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (degradation_preference_ == degradation_preference) + return; + // Invalidate any previously returned Adaptation. + ++adaptation_validation_id_; + bool balanced_switch = + degradation_preference == DegradationPreference::BALANCED || + degradation_preference_ == DegradationPreference::BALANCED; + degradation_preference_ = degradation_preference; + if (balanced_switch) { + // ClearRestrictions() calls BroadcastVideoRestrictionsUpdate(nullptr). + ClearRestrictions(); + } else { + BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(), + nullptr); + } +} + +struct VideoStreamAdapter::RestrictionsOrStateVisitor { + Adaptation operator()(const RestrictionsWithCounters& r) const { + return Adaptation(adaptation_validation_id, r.restrictions, r.counters, + input_state); + } + Adaptation operator()(const Adaptation::Status& status) const { + RTC_DCHECK_NE(status, Adaptation::Status::kValid); + return Adaptation(adaptation_validation_id, status); + } + + const int adaptation_validation_id; + const VideoStreamInputState& input_state; +}; + +Adaptation VideoStreamAdapter::RestrictionsOrStateToAdaptation( + VideoStreamAdapter::RestrictionsOrState step_or_state, + const VideoStreamInputState& input_state) const { + RTC_DCHECK(!step_or_state.valueless_by_exception()); + return absl::visit( + RestrictionsOrStateVisitor{adaptation_validation_id_, input_state}, + step_or_state); +} + +Adaptation VideoStreamAdapter::GetAdaptationUp( + const VideoStreamInputState& input_state) const { + RestrictionsOrState step = GetAdaptationUpStep(input_state); + // If an adaptation proposed, check with the constraints that it is ok. + if (absl::holds_alternative(step)) { + RestrictionsWithCounters restrictions = + absl::get(step); + for (const auto* constraint : adaptation_constraints_) { + if (!constraint->IsAdaptationUpAllowed(input_state, + current_restrictions_.restrictions, + restrictions.restrictions)) { + RTC_LOG(INFO) << "Not adapting up because constraint \"" + << constraint->Name() << "\" disallowed it"; + step = Adaptation::Status::kRejectedByConstraint; + } + } + } + return RestrictionsOrStateToAdaptation(step, input_state); +} + +Adaptation VideoStreamAdapter::GetAdaptationUp() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + ++adaptation_validation_id_; + Adaptation adaptation = GetAdaptationUp(input_state); + return adaptation; +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep( + const VideoStreamInputState& input_state) const { + if (!HasSufficientInputForAdaptation(input_state)) { + return Adaptation::Status::kInsufficientInput; + } + // Don't adapt if we're awaiting a previous adaptation to have an effect. + if (awaiting_frame_size_change_ && + awaiting_frame_size_change_->pixels_increased && + degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE && + input_state.frame_size_pixels().value() <= + awaiting_frame_size_change_->frame_size_pixels) { + return Adaptation::Status::kAwaitingPreviousAdaptation; + } + + // Maybe propose targets based on degradation preference. + switch (degradation_preference_) { + case DegradationPreference::BALANCED: { + // Attempt to increase target frame rate. + RestrictionsOrState increase_frame_rate = + IncreaseFramerate(input_state, current_restrictions_); + if (absl::holds_alternative( + increase_frame_rate)) { + return increase_frame_rate; + } + // else, increase resolution. + ABSL_FALLTHROUGH_INTENDED; + } + case DegradationPreference::MAINTAIN_FRAMERATE: { + // Attempt to increase pixel count. + return IncreaseResolution(input_state, current_restrictions_); + } + case DegradationPreference::MAINTAIN_RESOLUTION: { + // Scale up framerate. + return IncreaseFramerate(input_state, current_restrictions_); + } + case DegradationPreference::DISABLED: + return Adaptation::Status::kAdaptationDisabled; + } + RTC_CHECK_NOTREACHED(); +} + +Adaptation VideoStreamAdapter::GetAdaptationDown() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + ++adaptation_validation_id_; + RestrictionsOrState restrictions_or_state = + GetAdaptationDownStep(input_state, current_restrictions_); + if (MinPixelLimitReached(input_state)) { + encoder_stats_observer_->OnMinPixelLimitReached(); + } + // Check for min_fps + if (degradation_preference_ == DegradationPreference::BALANCED && + absl::holds_alternative( + restrictions_or_state)) { + restrictions_or_state = AdaptIfFpsDiffInsufficient( + input_state, + absl::get(restrictions_or_state)); + } + return RestrictionsOrStateToAdaptation(restrictions_or_state, input_state); +} + +VideoStreamAdapter::RestrictionsOrState +VideoStreamAdapter::AdaptIfFpsDiffInsufficient( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& restrictions) const { + RTC_DCHECK_EQ(degradation_preference_, DegradationPreference::BALANCED); + absl::optional min_fps_diff = + balanced_settings_.MinFpsDiff(input_state.frame_size_pixels().value()); + if (current_restrictions_.counters.fps_adaptations < + restrictions.counters.fps_adaptations && + min_fps_diff && input_state.frames_per_second() > 0) { + int fps_diff = input_state.frames_per_second() - + restrictions.restrictions.max_frame_rate().value(); + if (fps_diff < min_fps_diff.value()) { + return GetAdaptationDownStep(input_state, restrictions); + } + } + return restrictions; +} + +VideoStreamAdapter::RestrictionsOrState +VideoStreamAdapter::GetAdaptationDownStep( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const { + if (!HasSufficientInputForAdaptation(input_state)) { + return Adaptation::Status::kInsufficientInput; + } + // Don't adapt if we're awaiting a previous adaptation to have an effect or + // if we switched degradation preference. + if (awaiting_frame_size_change_ && + !awaiting_frame_size_change_->pixels_increased && + degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE && + input_state.frame_size_pixels().value() >= + awaiting_frame_size_change_->frame_size_pixels) { + return Adaptation::Status::kAwaitingPreviousAdaptation; + } + // Maybe propose targets based on degradation preference. + switch (degradation_preference_) { + case DegradationPreference::BALANCED: { + // Try scale down framerate, if lower. + RestrictionsOrState decrease_frame_rate = + DecreaseFramerate(input_state, current_restrictions); + if (absl::holds_alternative( + decrease_frame_rate)) { + return decrease_frame_rate; + } + // else, decrease resolution. + ABSL_FALLTHROUGH_INTENDED; + } + case DegradationPreference::MAINTAIN_FRAMERATE: { + return DecreaseResolution(input_state, current_restrictions); + } + case DegradationPreference::MAINTAIN_RESOLUTION: { + return DecreaseFramerate(input_state, current_restrictions); + } + case DegradationPreference::DISABLED: + return Adaptation::Status::kAdaptationDisabled; + } + RTC_CHECK_NOTREACHED(); +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) { + int target_pixels = + GetLowerResolutionThan(input_state.frame_size_pixels().value()); + if (!CanDecreaseResolutionTo(target_pixels, input_state, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + RestrictionsWithCounters new_restrictions = current_restrictions; + RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: " << target_pixels; + new_restrictions.restrictions.set_max_pixels_per_frame( + target_pixels != std::numeric_limits::max() + ? absl::optional(target_pixels) + : absl::nullopt); + new_restrictions.restrictions.set_target_pixels_per_frame(absl::nullopt); + ++new_restrictions.counters.resolution_adaptations; + return new_restrictions; +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const { + int max_frame_rate; + if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) { + max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second()); + } else if (degradation_preference_ == DegradationPreference::BALANCED) { + max_frame_rate = + balanced_settings_.MinFps(input_state.video_codec_type(), + input_state.frame_size_pixels().value()); + } else { + RTC_NOTREACHED(); + max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second()); + } + if (!CanDecreaseFrameRateTo(max_frame_rate, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + RestrictionsWithCounters new_restrictions = current_restrictions; + max_frame_rate = std::max(kMinFrameRateFps, max_frame_rate); + RTC_LOG(LS_INFO) << "Scaling down framerate: " << max_frame_rate; + new_restrictions.restrictions.set_max_frame_rate( + max_frame_rate != std::numeric_limits::max() + ? absl::optional(max_frame_rate) + : absl::nullopt); + ++new_restrictions.counters.fps_adaptations; + return new_restrictions; +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) { + int target_pixels = input_state.frame_size_pixels().value(); + if (current_restrictions.counters.resolution_adaptations == 1) { + RTC_LOG(LS_INFO) << "Removing resolution down-scaling setting."; + target_pixels = std::numeric_limits::max(); + } + target_pixels = GetHigherResolutionThan(target_pixels); + if (!CanIncreaseResolutionTo(target_pixels, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); + RestrictionsWithCounters new_restrictions = current_restrictions; + RTC_LOG(LS_INFO) << "Scaling up resolution, max pixels: " + << max_pixels_wanted; + new_restrictions.restrictions.set_max_pixels_per_frame( + max_pixels_wanted != std::numeric_limits::max() + ? absl::optional(max_pixels_wanted) + : absl::nullopt); + new_restrictions.restrictions.set_target_pixels_per_frame( + max_pixels_wanted != std::numeric_limits::max() + ? absl::optional(target_pixels) + : absl::nullopt); + --new_restrictions.counters.resolution_adaptations; + RTC_DCHECK_GE(new_restrictions.counters.resolution_adaptations, 0); + return new_restrictions; +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const { + int max_frame_rate; + if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) { + max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second()); + } else if (degradation_preference_ == DegradationPreference::BALANCED) { + max_frame_rate = + balanced_settings_.MaxFps(input_state.video_codec_type(), + input_state.frame_size_pixels().value()); + // In BALANCED, the max_frame_rate must be checked before proceeding. This + // is because the MaxFps might be the current Fps and so the balanced + // settings may want to scale up the resolution.= + if (!CanIncreaseFrameRateTo(max_frame_rate, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + } else { + RTC_NOTREACHED(); + max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second()); + } + if (current_restrictions.counters.fps_adaptations == 1) { + RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting."; + max_frame_rate = std::numeric_limits::max(); + } + if (!CanIncreaseFrameRateTo(max_frame_rate, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + RTC_LOG(LS_INFO) << "Scaling up framerate: " << max_frame_rate; + RestrictionsWithCounters new_restrictions = current_restrictions; + new_restrictions.restrictions.set_max_frame_rate( + max_frame_rate != std::numeric_limits::max() + ? absl::optional(max_frame_rate) + : absl::nullopt); + --new_restrictions.counters.fps_adaptations; + RTC_DCHECK_GE(new_restrictions.counters.fps_adaptations, 0); + return new_restrictions; +} + +Adaptation VideoStreamAdapter::GetAdaptDownResolution() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + switch (degradation_preference_) { + case DegradationPreference::DISABLED: + return RestrictionsOrStateToAdaptation( + Adaptation::Status::kAdaptationDisabled, input_state); + case DegradationPreference::MAINTAIN_RESOLUTION: + return RestrictionsOrStateToAdaptation(Adaptation::Status::kLimitReached, + input_state); + case DegradationPreference::MAINTAIN_FRAMERATE: + return GetAdaptationDown(); + case DegradationPreference::BALANCED: { + return RestrictionsOrStateToAdaptation( + GetAdaptDownResolutionStepForBalanced(input_state), input_state); + } + } + RTC_CHECK_NOTREACHED(); +} + +VideoStreamAdapter::RestrictionsOrState +VideoStreamAdapter::GetAdaptDownResolutionStepForBalanced( + const VideoStreamInputState& input_state) const { + // Adapt twice if the first adaptation did not decrease resolution. + auto first_step = GetAdaptationDownStep(input_state, current_restrictions_); + if (!absl::holds_alternative(first_step)) { + return first_step; + } + auto first_restrictions = absl::get(first_step); + if (first_restrictions.counters.resolution_adaptations > + current_restrictions_.counters.resolution_adaptations) { + return first_step; + } + // We didn't decrease resolution so force it; amend a resolution resuction + // to the existing framerate reduction in |first_restrictions|. + auto second_step = DecreaseResolution(input_state, first_restrictions); + if (absl::holds_alternative(second_step)) { + return second_step; + } + // If the second step was not successful then settle for the first one. + return first_step; +} + +void VideoStreamAdapter::ApplyAdaptation( + const Adaptation& adaptation, + rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_); + if (adaptation.status() != Adaptation::Status::kValid) + return; + // Remember the input pixels and fps of this adaptation. Used to avoid + // adapting again before this adaptation has had an effect. + if (DidIncreaseResolution(current_restrictions_.restrictions, + adaptation.restrictions())) { + awaiting_frame_size_change_.emplace( + true, adaptation.input_state().frame_size_pixels().value()); + } else if (DidDecreaseResolution(current_restrictions_.restrictions, + adaptation.restrictions())) { + awaiting_frame_size_change_.emplace( + false, adaptation.input_state().frame_size_pixels().value()); + } else { + awaiting_frame_size_change_ = absl::nullopt; + } + current_restrictions_ = {adaptation.restrictions(), adaptation.counters()}; + BroadcastVideoRestrictionsUpdate(adaptation.input_state(), resource); +} + +Adaptation VideoStreamAdapter::GetAdaptationTo( + const VideoAdaptationCounters& counters, + const VideoSourceRestrictions& restrictions) { + // Adapts up/down from the current levels so counters are equal. + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + return Adaptation(adaptation_validation_id_, restrictions, counters, + input_state); +} + +void VideoStreamAdapter::BroadcastVideoRestrictionsUpdate( + const VideoStreamInputState& input_state, + const rtc::scoped_refptr& resource) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoSourceRestrictions filtered = FilterRestrictionsByDegradationPreference( + source_restrictions(), degradation_preference_); + if (last_filtered_restrictions_ == filtered) { + return; + } + for (auto* restrictions_listener : restrictions_listeners_) { + restrictions_listener->OnVideoSourceRestrictionsUpdated( + filtered, current_restrictions_.counters, resource, + source_restrictions()); + } + last_video_source_restrictions_ = current_restrictions_.restrictions; + last_filtered_restrictions_ = filtered; +} + +bool VideoStreamAdapter::HasSufficientInputForAdaptation( + const VideoStreamInputState& input_state) const { + return input_state.HasInputFrameSizeAndFramesPerSecond() && + (degradation_preference_ != + DegradationPreference::MAINTAIN_RESOLUTION || + input_state.frames_per_second() >= kMinFrameRateFps); +} + +VideoStreamAdapter::AwaitingFrameSizeChange::AwaitingFrameSizeChange( + bool pixels_increased, + int frame_size_pixels) + : pixels_increased(pixels_increased), + frame_size_pixels(frame_size_pixels) {} + +} // namespace webrtc diff --git a/call/adaptation/video_stream_adapter.h b/call/adaptation/video_stream_adapter.h new file mode 100644 index 0000000000..3c3595759e --- /dev/null +++ b/call/adaptation/video_stream_adapter.h @@ -0,0 +1,263 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_ +#define CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/adaptation/resource.h" +#include "api/rtp_parameters.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_stream_encoder_observer.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/degradation_preference_provider.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" +#include "call/adaptation/video_stream_input_state_provider.h" +#include "modules/video_coding/utility/quality_scaler.h" +#include "rtc_base/experiments/balanced_degradation_settings.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// The listener is responsible for carrying out the reconfiguration of the video +// source such that the VideoSourceRestrictions are fulfilled. +class VideoSourceRestrictionsListener { + public: + virtual ~VideoSourceRestrictionsListener(); + + // The |restrictions| are filtered by degradation preference but not the + // |adaptation_counters|, which are currently only reported for legacy stats + // calculation purposes. + virtual void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) = 0; +}; + +class VideoStreamAdapter; + +extern const int kMinFrameRateFps; + +VideoSourceRestrictions FilterRestrictionsByDegradationPreference( + VideoSourceRestrictions source_restrictions, + DegradationPreference degradation_preference); + +int GetHigherResolutionThan(int pixel_count); + +// Either represents the next VideoSourceRestrictions the VideoStreamAdapter +// will take, or provides a Status code indicating the reason for not adapting +// if the adaptation is not valid. +class Adaptation final { + public: + enum class Status { + // Applying this adaptation will have an effect. All other Status codes + // indicate that adaptation is not possible and why. + kValid, + // Cannot adapt. The minimum or maximum adaptation has already been reached. + // There are no more steps to take. + kLimitReached, + // Cannot adapt. The resolution or frame rate requested by a recent + // adaptation has not yet been reflected in the input resolution or frame + // rate; adaptation is refused to avoid "double-adapting". + kAwaitingPreviousAdaptation, + // Not enough input. + kInsufficientInput, + // Adaptation disabled via degradation preference. + kAdaptationDisabled, + // Adaptation up was rejected by a VideoAdaptationConstraint. + kRejectedByConstraint, + }; + + static const char* StatusToString(Status status); + + Status status() const; + const VideoStreamInputState& input_state() const; + const VideoSourceRestrictions& restrictions() const; + const VideoAdaptationCounters& counters() const; + + private: + friend class VideoStreamAdapter; + + // Constructs with a valid adaptation. Status is kValid. + Adaptation(int validation_id, + VideoSourceRestrictions restrictions, + VideoAdaptationCounters counters, + VideoStreamInputState input_state); + // Constructor when adaptation is not valid. Status MUST NOT be kValid. + Adaptation(int validation_id, Status invalid_status); + + // An Adaptation can become invalidated if the state of VideoStreamAdapter is + // modified before the Adaptation is applied. To guard against this, this ID + // has to match VideoStreamAdapter::adaptation_validation_id_ when applied. + // TODO(https://crbug.com/webrtc/11700): Remove the validation_id_. + const int validation_id_; + const Status status_; + // Input state when adaptation was made. + const VideoStreamInputState input_state_; + const VideoSourceRestrictions restrictions_; + const VideoAdaptationCounters counters_; +}; + +// Owns the VideoSourceRestriction for a single stream and is responsible for +// adapting it up or down when told to do so. This class serves the following +// purposes: +// 1. Keep track of a stream's restrictions. +// 2. Provide valid ways to adapt up or down the stream's restrictions. +// 3. Modify the stream's restrictions in one of the valid ways. +class VideoStreamAdapter { + public: + VideoStreamAdapter(VideoStreamInputStateProvider* input_state_provider, + VideoStreamEncoderObserver* encoder_stats_observer); + ~VideoStreamAdapter(); + + VideoSourceRestrictions source_restrictions() const; + const VideoAdaptationCounters& adaptation_counters() const; + void ClearRestrictions(); + + void AddRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener); + void RemoveRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener); + void AddAdaptationConstraint(AdaptationConstraint* adaptation_constraint); + void RemoveAdaptationConstraint(AdaptationConstraint* adaptation_constraint); + + // TODO(hbos): Setting the degradation preference should not clear + // restrictions! This is not defined in the spec and is unexpected, there is a + // tiny risk that people would discover and rely on this behavior. + void SetDegradationPreference(DegradationPreference degradation_preference); + + // Returns an adaptation that we are guaranteed to be able to apply, or a + // status code indicating the reason why we cannot adapt. + Adaptation GetAdaptationUp(); + Adaptation GetAdaptationDown(); + Adaptation GetAdaptationTo(const VideoAdaptationCounters& counters, + const VideoSourceRestrictions& restrictions); + // Tries to adapt the resolution one step. This is used for initial frame + // dropping. Does nothing if the degradation preference is not BALANCED or + // MAINTAIN_FRAMERATE. In the case of BALANCED, it will try twice to reduce + // the resolution. If it fails twice it gives up. + Adaptation GetAdaptDownResolution(); + + // Updates source_restrictions() the Adaptation. + void ApplyAdaptation(const Adaptation& adaptation, + rtc::scoped_refptr resource); + + struct RestrictionsWithCounters { + VideoSourceRestrictions restrictions; + VideoAdaptationCounters counters; + }; + + private: + void BroadcastVideoRestrictionsUpdate( + const VideoStreamInputState& input_state, + const rtc::scoped_refptr& resource); + + bool HasSufficientInputForAdaptation(const VideoStreamInputState& input_state) + const RTC_RUN_ON(&sequence_checker_); + + using RestrictionsOrState = + absl::variant; + RestrictionsOrState GetAdaptationUpStep( + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState GetAdaptationDownStep( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState GetAdaptDownResolutionStepForBalanced( + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState AdaptIfFpsDiffInsufficient( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& restrictions) const + RTC_RUN_ON(&sequence_checker_); + + Adaptation GetAdaptationUp(const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + Adaptation GetAdaptationDown(const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + + static RestrictionsOrState DecreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions); + static RestrictionsOrState IncreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions); + // Framerate methods are member functions because they need internal state + // if the degradation preference is BALANCED. + RestrictionsOrState DecreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState IncreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const + RTC_RUN_ON(&sequence_checker_); + + struct RestrictionsOrStateVisitor; + Adaptation RestrictionsOrStateToAdaptation( + RestrictionsOrState step_or_state, + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + + SequenceChecker sequence_checker_ RTC_GUARDED_BY(&sequence_checker_); + // Gets the input state which is the basis of all adaptations. + // Thread safe. + VideoStreamInputStateProvider* input_state_provider_; + // Used to signal when min pixel limit has been reached. + VideoStreamEncoderObserver* const encoder_stats_observer_; + // Decides the next adaptation target in DegradationPreference::BALANCED. + const BalancedDegradationSettings balanced_settings_; + // To guard against applying adaptations that have become invalidated, an + // Adaptation that is applied has to have a matching validation ID. + int adaptation_validation_id_ RTC_GUARDED_BY(&sequence_checker_); + // When deciding the next target up or down, different strategies are used + // depending on the DegradationPreference. + // https://w3c.github.io/mst-content-hint/#dom-rtcdegradationpreference + DegradationPreference degradation_preference_ + RTC_GUARDED_BY(&sequence_checker_); + // Used to avoid adapting twice. Stores the resolution at the time of the last + // adaptation. + // TODO(hbos): Can we implement a more general "cooldown" mechanism of + // resources intead? If we already have adapted it seems like we should wait + // a while before adapting again, so that we are not acting on usage + // measurements that are made obsolete/unreliable by an "ongoing" adaptation. + struct AwaitingFrameSizeChange { + AwaitingFrameSizeChange(bool pixels_increased, int frame_size); + const bool pixels_increased; + const int frame_size_pixels; + }; + absl::optional awaiting_frame_size_change_ + RTC_GUARDED_BY(&sequence_checker_); + // The previous restrictions value. Starts as unrestricted. + VideoSourceRestrictions last_video_source_restrictions_ + RTC_GUARDED_BY(&sequence_checker_); + VideoSourceRestrictions last_filtered_restrictions_ + RTC_GUARDED_BY(&sequence_checker_); + + std::vector restrictions_listeners_ + RTC_GUARDED_BY(&sequence_checker_); + std::vector adaptation_constraints_ + RTC_GUARDED_BY(&sequence_checker_); + + RestrictionsWithCounters current_restrictions_ + RTC_GUARDED_BY(&sequence_checker_); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_ diff --git a/call/adaptation/video_stream_adapter_unittest.cc b/call/adaptation/video_stream_adapter_unittest.cc new file mode 100644 index 0000000000..534be61fcb --- /dev/null +++ b/call/adaptation/video_stream_adapter_unittest.cc @@ -0,0 +1,966 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/video_stream_adapter.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_config.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/encoder_settings.h" +#include "call/adaptation/test/fake_frame_rate_provider.h" +#include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/string_encode.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/rtc_expect_death.h" + +namespace webrtc { + +using ::testing::_; +using ::testing::DoAll; +using ::testing::Return; +using ::testing::SaveArg; + +namespace { + +const int kBalancedHighResolutionPixels = 1280 * 720; +const int kBalancedHighFrameRateFps = 30; + +const int kBalancedMediumResolutionPixels = 640 * 480; +const int kBalancedMediumFrameRateFps = 20; + +const int kBalancedLowResolutionPixels = 320 * 240; +const int kBalancedLowFrameRateFps = 10; + +std::string BalancedFieldTrialConfig() { + return "WebRTC-Video-BalancedDegradationSettings/pixels:" + + rtc::ToString(kBalancedLowResolutionPixels) + "|" + + rtc::ToString(kBalancedMediumResolutionPixels) + "|" + + rtc::ToString(kBalancedHighResolutionPixels) + + ",fps:" + rtc::ToString(kBalancedLowFrameRateFps) + "|" + + rtc::ToString(kBalancedMediumFrameRateFps) + "|" + + rtc::ToString(kBalancedHighFrameRateFps) + "/"; +} + +class FakeVideoStreamInputStateProvider : public VideoStreamInputStateProvider { + public: + FakeVideoStreamInputStateProvider() + : VideoStreamInputStateProvider(nullptr) {} + virtual ~FakeVideoStreamInputStateProvider() = default; + + void SetInputState(int input_pixels, + int input_fps, + int min_pixels_per_frame) { + VideoStreamInputState input_state; + input_state.set_has_input(true); + input_state.set_frame_size_pixels(input_pixels); + input_state.set_frames_per_second(input_fps); + input_state.set_min_pixels_per_frame(min_pixels_per_frame); + fake_input_state_ = input_state; + } + VideoStreamInputState InputState() override { return fake_input_state_; } + + private: + VideoStreamInputState fake_input_state_; +}; + +// Responsible for adjusting the inputs to VideoStreamAdapter (SetInput), such +// as pixels and frame rate, according to the most recent source restrictions. +// This helps tests that apply adaptations multiple times: if the input is not +// adjusted between adaptations, the subsequent adaptations fail with +// kAwaitingPreviousAdaptation. +class FakeVideoStream { + public: + FakeVideoStream(VideoStreamAdapter* adapter, + FakeVideoStreamInputStateProvider* provider, + int input_pixels, + int input_fps, + int min_pixels_per_frame) + : adapter_(adapter), + provider_(provider), + input_pixels_(input_pixels), + input_fps_(input_fps), + min_pixels_per_frame_(min_pixels_per_frame) { + provider_->SetInputState(input_pixels_, input_fps_, min_pixels_per_frame_); + } + + int input_pixels() const { return input_pixels_; } + int input_fps() const { return input_fps_; } + + // Performs ApplyAdaptation() followed by SetInput() with input pixels and + // frame rate adjusted according to the resulting restrictions. + void ApplyAdaptation(Adaptation adaptation) { + adapter_->ApplyAdaptation(adaptation, nullptr); + // Update input pixels and fps according to the resulting restrictions. + auto restrictions = adapter_->source_restrictions(); + if (restrictions.target_pixels_per_frame().has_value()) { + RTC_DCHECK(!restrictions.max_pixels_per_frame().has_value() || + restrictions.max_pixels_per_frame().value() >= + restrictions.target_pixels_per_frame().value()); + input_pixels_ = restrictions.target_pixels_per_frame().value(); + } else if (restrictions.max_pixels_per_frame().has_value()) { + input_pixels_ = restrictions.max_pixels_per_frame().value(); + } + if (restrictions.max_frame_rate().has_value()) { + input_fps_ = restrictions.max_frame_rate().value(); + } + provider_->SetInputState(input_pixels_, input_fps_, min_pixels_per_frame_); + } + + private: + VideoStreamAdapter* adapter_; + FakeVideoStreamInputStateProvider* provider_; + int input_pixels_; + int input_fps_; + int min_pixels_per_frame_; +}; + +class FakeVideoStreamAdapterListner : public VideoSourceRestrictionsListener { + public: + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override { + calls_++; + last_restrictions_ = unfiltered_restrictions; + } + + int calls() const { return calls_; } + + VideoSourceRestrictions last_restrictions() const { + return last_restrictions_; + } + + private: + int calls_ = 0; + VideoSourceRestrictions last_restrictions_; +}; + +class MockAdaptationConstraint : public AdaptationConstraint { + public: + MOCK_METHOD(bool, + IsAdaptationUpAllowed, + (const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after), + (const, override)); + + // MOCK_METHOD(std::string, Name, (), (const, override)); + std::string Name() const override { return "MockAdaptationConstraint"; } +}; + +} // namespace + +class VideoStreamAdapterTest : public ::testing::Test { + public: + VideoStreamAdapterTest() + : field_trials_(BalancedFieldTrialConfig()), + resource_(FakeResource::Create("FakeResource")), + adapter_(&input_state_provider_, &encoder_stats_observer_) {} + + protected: + webrtc::test::ScopedFieldTrials field_trials_; + FakeVideoStreamInputStateProvider input_state_provider_; + rtc::scoped_refptr resource_; + testing::StrictMock encoder_stats_observer_; + VideoStreamAdapter adapter_; +}; + +TEST_F(VideoStreamAdapterTest, NoRestrictionsByDefault) { + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); +} + +TEST_F(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToThreeFifths) { + const int kInputPixels = 1280 * 720; + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(kInputPixels, 30, + kDefaultMinPixelsPerFrame); + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + adapter_.ApplyAdaptation(adaptation, nullptr); + EXPECT_EQ(static_cast((kInputPixels * 3) / 5), + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_DecreasesPixelsToLimitReached) { + const int kMinPixelsPerFrame = 640 * 480; + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(kMinPixelsPerFrame + 1, 30, + kMinPixelsPerFrame); + EXPECT_CALL(encoder_stats_observer_, OnMinPixelLimitReached()); + // Even though we are above kMinPixelsPerFrame, because adapting down would + // have exceeded the limit, we are said to have reached the limit already. + // This differs from the frame rate adaptation logic, which would have clamped + // to the limit in the first step and reported kLimitReached in the second + // step. + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status()); +} + +TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToFiveThirds) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Go down twice, ensuring going back up is still a restricted resolution. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + int input_pixels = fake_stream.input_pixels(); + // Go up once. The target is 5/3 and the max is 12/5 of the target. + const int target = (input_pixels * 5) / 3; + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(static_cast((target * 12) / 5), + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(static_cast(target), + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToUnrestricted) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // We are unrestricted by default and should not be able to adapt up. + EXPECT_EQ(Adaptation::Status::kLimitReached, + adapter_.GetAdaptationUp().status()); + // If we go down once and then back up we should not have any restrictions. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); +} + +TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToTwoThirds) { + const int kInputFps = 30; + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + input_state_provider_.SetInputState(1280 * 720, kInputFps, + kDefaultMinPixelsPerFrame); + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + adapter_.ApplyAdaptation(adaptation, nullptr); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(static_cast((kInputFps * 2) / 3), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToLimitReached) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, + kMinFrameRateFps + 1, kDefaultMinPixelsPerFrame); + // If we are not yet at the limit and the next step would exceed it, the step + // is clamped such that we end up exactly on the limit. + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(static_cast(kMinFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + // Having reached the limit, the next adaptation down is not valid. + EXPECT_EQ(Adaptation::Status::kLimitReached, + adapter_.GetAdaptationDown().status()); +} + +TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToThreeHalves) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Go down twice, ensuring going back up is still a restricted frame rate. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(2, adapter_.adaptation_counters().fps_adaptations); + int input_fps = fake_stream.input_fps(); + // Go up once. The target is 3/2 of the input. + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(static_cast((input_fps * 3) / 2), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToUnrestricted) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // We are unrestricted by default and should not be able to adapt up. + EXPECT_EQ(Adaptation::Status::kLimitReached, + adapter_.GetAdaptationUp().status()); + // If we go down once and then back up we should not have any restrictions. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); +} + +TEST_F(VideoStreamAdapterTest, Balanced_DecreaseFrameRate) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(kBalancedMediumResolutionPixels, + kBalancedHighFrameRateFps, + kDefaultMinPixelsPerFrame); + // If our frame rate is higher than the frame rate associated with our + // resolution we should try to adapt to the frame rate associated with our + // resolution: kBalancedMediumFrameRateFps. + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + adapter_.ApplyAdaptation(adaptation, nullptr); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(static_cast(kBalancedMediumFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, Balanced_DecreaseResolution) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + FakeVideoStream fake_stream( + &adapter_, &input_state_provider_, kBalancedHighResolutionPixels, + kBalancedHighFrameRateFps, kDefaultMinPixelsPerFrame); + // If we are not below the current resolution's frame rate limit, we should + // adapt resolution according to "maintain-framerate" logic (three fifths). + // + // However, since we are unlimited at the start and input frame rate is not + // below kBalancedHighFrameRateFps, we first restrict the frame rate to + // kBalancedHighFrameRateFps even though that is our current frame rate. This + // does prevent the source from going higher, though, so it's technically not + // a NO-OP. + { + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + } + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + // Verify "maintain-framerate" logic the second time we adapt: Frame rate + // restrictions remains the same and resolution goes down. + { + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + } + constexpr size_t kReducedPixelsFirstStep = + static_cast((kBalancedHighResolutionPixels * 3) / 5); + EXPECT_EQ(kReducedPixelsFirstStep, + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + // If we adapt again, because the balanced settings' proposed frame rate is + // still kBalancedHighFrameRateFps, "maintain-framerate" will trigger again. + static_assert(kReducedPixelsFirstStep > kBalancedMediumResolutionPixels, + "The reduced resolution is still greater than the next lower " + "balanced setting resolution"); + constexpr size_t kReducedPixelsSecondStep = (kReducedPixelsFirstStep * 3) / 5; + { + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + } + EXPECT_EQ(kReducedPixelsSecondStep, + adapter_.source_restrictions().max_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); +} + +// Testing when to adapt frame rate and when to adapt resolution is quite +// entangled, so this test covers both cases. +// +// There is an asymmetry: When we adapt down we do it in one order, but when we +// adapt up we don't do it in the reverse order. Instead we always try to adapt +// frame rate first according to balanced settings' configs and only when the +// frame rate is already achieved do we adjust the resolution. +TEST_F(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + FakeVideoStream fake_stream( + &adapter_, &input_state_provider_, kBalancedHighResolutionPixels, + kBalancedHighFrameRateFps, kDefaultMinPixelsPerFrame); + // The desired starting point of this test is having adapted frame rate twice. + // This requires performing a number of adaptations. + constexpr size_t kReducedPixelsFirstStep = + static_cast((kBalancedHighResolutionPixels * 3) / 5); + constexpr size_t kReducedPixelsSecondStep = (kReducedPixelsFirstStep * 3) / 5; + constexpr size_t kReducedPixelsThirdStep = (kReducedPixelsSecondStep * 3) / 5; + static_assert(kReducedPixelsFirstStep > kBalancedMediumResolutionPixels, + "The first pixel reduction is greater than the balanced " + "settings' medium pixel configuration"); + static_assert(kReducedPixelsSecondStep > kBalancedMediumResolutionPixels, + "The second pixel reduction is greater than the balanced " + "settings' medium pixel configuration"); + static_assert(kReducedPixelsThirdStep <= kBalancedMediumResolutionPixels, + "The third pixel reduction is NOT greater than the balanced " + "settings' medium pixel configuration"); + // The first adaptation should affect the frame rate: See + // Balanced_DecreaseResolution for explanation why. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + // The next three adaptations affects the resolution, because we have to reach + // kBalancedMediumResolutionPixels before a lower frame rate is considered by + // BalancedDegradationSettings. The number three is derived from the + // static_asserts above. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(kReducedPixelsFirstStep, + adapter_.source_restrictions().max_pixels_per_frame()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(kReducedPixelsSecondStep, + adapter_.source_restrictions().max_pixels_per_frame()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(kReducedPixelsThirdStep, + adapter_.source_restrictions().max_pixels_per_frame()); + // Thus, the next adaptation will reduce frame rate to + // kBalancedMediumFrameRateFps. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(static_cast(kBalancedMediumFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(3, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(2, adapter_.adaptation_counters().fps_adaptations); + // Adapt up! + // While our resolution is in the medium-range, the frame rate associated with + // the next resolution configuration up ("high") is kBalancedHighFrameRateFps + // and "balanced" prefers adapting frame rate if not already applied. + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(3, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + } + // Now that we have already achieved the next frame rate up, we act according + // to "maintain-framerate". We go back up in resolution. Due to rounding + // errors we don't end up back at kReducedPixelsSecondStep. Rather we get to + // kReducedPixelsSecondStepUp, which is off by one compared to + // kReducedPixelsSecondStep. + constexpr size_t kReducedPixelsSecondStepUp = + (kReducedPixelsThirdStep * 5) / 3; + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(kReducedPixelsSecondStepUp, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + } + // Now that our resolution is back in the high-range, the next frame rate to + // try out is "unlimited". + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); + } + // Now only adapting resolution remains. + constexpr size_t kReducedPixelsFirstStepUp = + (kReducedPixelsSecondStepUp * 5) / 3; + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(kReducedPixelsFirstStepUp, + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); + } + // The last step up should make us entirely unrestricted. + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); + } +} + +TEST_F(VideoStreamAdapterTest, Balanced_LimitReached) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + FakeVideoStream fake_stream( + &adapter_, &input_state_provider_, kBalancedLowResolutionPixels, + kBalancedLowFrameRateFps, kDefaultMinPixelsPerFrame); + // Attempting to adapt up while unrestricted should result in kLimitReached. + EXPECT_EQ(Adaptation::Status::kLimitReached, + adapter_.GetAdaptationUp().status()); + // Adapting down once result in restricted frame rate, in this case we reach + // the lowest possible frame rate immediately: kBalancedLowFrameRateFps. + EXPECT_CALL(encoder_stats_observer_, OnMinPixelLimitReached()).Times(2); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(static_cast(kBalancedLowFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + // Any further adaptation must follow "maintain-framerate" rules (these are + // covered in more depth by the MaintainFramerate tests). This test does not + // assert exactly how resolution is adjusted, only that resolution always + // decreases and that we eventually reach kLimitReached. + size_t previous_resolution = kBalancedLowResolutionPixels; + bool did_reach_limit = false; + // If we have not reached the limit within 5 adaptations something is wrong... + for (int i = 0; i < 5; i++) { + Adaptation adaptation = adapter_.GetAdaptationDown(); + if (adaptation.status() == Adaptation::Status::kLimitReached) { + did_reach_limit = true; + break; + } + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_LT(adapter_.source_restrictions().max_pixels_per_frame().value(), + previous_resolution); + previous_resolution = + adapter_.source_restrictions().max_pixels_per_frame().value(); + } + EXPECT_TRUE(did_reach_limit); + // Frame rate restrictions are the same as before. + EXPECT_EQ(static_cast(kBalancedLowFrameRateFps), + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); +} + +// kAwaitingPreviousAdaptation is only supported in "maintain-framerate". +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_AwaitingPreviousAdaptationDown) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once, but don't update the input. + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + { + // Having performed the adaptation, but not updated the input based on the + // new restrictions, adapting again in the same direction will not work. + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, + adaptation.status()); + } +} + +// kAwaitingPreviousAdaptation is only supported in "maintain-framerate". +TEST_F(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationUp) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Perform two adaptation down so that adapting up twice is possible. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + // Adapt up once, but don't update the input. + adapter_.ApplyAdaptation(adapter_.GetAdaptationUp(), nullptr); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + { + // Having performed the adaptation, but not updated the input based on the + // new restrictions, adapting again in the same direction will not work. + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, + adaptation.status()); + } +} + +TEST_F(VideoStreamAdapterTest, + MaintainResolution_AdaptsUpAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down in fps for later. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); + + // We should be able to adapt in framerate one last time after the change of + // degradation preference. + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_AdaptsUpAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down in resolution for later. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); + + // We should be able to adapt in framerate one last time after the change of + // degradation preference. + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp()); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + PendingResolutionIncreaseAllowsAdaptUpAfterSwitchToMaintainResolution) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt fps down so we can adapt up later in the test. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + // Apply adaptation up but don't update input. + adapter_.ApplyAdaptation(adapter_.GetAdaptationUp(), nullptr); + EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, + adapter_.GetAdaptationUp().status()); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); +} + +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_AdaptsDownAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once, should change FPS. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + // Adaptation down should apply after the degradation prefs change. + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + MaintainResolution_AdaptsDownAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once, should change FPS. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F( + VideoStreamAdapterTest, + PendingResolutionDecreaseAllowsAdaptDownAfterSwitchToMaintainResolution) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Apply adaptation but don't update the input. + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, + adapter_.GetAdaptationDown().status()); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); +} + +TEST_F(VideoStreamAdapterTest, RestrictionBroadcasted) { + FakeVideoStreamAdapterListner listener; + adapter_.AddRestrictionsListener(&listener); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Not broadcast on invalid ApplyAdaptation. + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + adapter_.ApplyAdaptation(adaptation, nullptr); + EXPECT_EQ(0, listener.calls()); + } + + // Broadcast on ApplyAdaptation. + { + Adaptation adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(1, listener.calls()); + EXPECT_EQ(adaptation.restrictions(), listener.last_restrictions()); + } + + // Broadcast on ClearRestrictions(). + adapter_.ClearRestrictions(); + EXPECT_EQ(2, listener.calls()); + EXPECT_EQ(VideoSourceRestrictions(), listener.last_restrictions()); +} + +TEST_F(VideoStreamAdapterTest, AdaptationHasNextRestrcitions) { + // Any non-disabled DegradationPreference will do. + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // When adaptation is not possible. + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status()); + EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adaptation.counters().Total()); + } + // When we adapt down. + { + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions()); + EXPECT_EQ(adaptation.counters(), adapter_.adaptation_counters()); + } + // When we adapt up. + { + Adaptation adaptation = adapter_.GetAdaptationUp(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions()); + EXPECT_EQ(adaptation.counters(), adapter_.adaptation_counters()); + } +} + +TEST_F(VideoStreamAdapterTest, + SetDegradationPreferenceToOrFromBalancedClearsRestrictions) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_NE(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_NE(0, adapter_.adaptation_counters().Total()); + // Changing from non-balanced to balanced clears the restrictions. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); + // Apply adaptation again. + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_NE(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_NE(0, adapter_.adaptation_counters().Total()); + // Changing from balanced to non-balanced clears the restrictions. + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsResolutionInMaintainFramerate) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(1, adaptation.counters().resolution_adaptations); + EXPECT_EQ(0, adaptation.counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionReturnsWithStatusInDisabledAndMaintainResolution) { + adapter_.SetDegradationPreference(DegradationPreference::DISABLED); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptDownResolution().status()); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + EXPECT_EQ(Adaptation::Status::kLimitReached, + adapter_.GetAdaptDownResolution().status()); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsFpsAndResolutionInBalanced) { + // Note: This test depends on BALANCED implementation, but with current + // implementation and input state settings, BALANCED will adapt resolution and + // frame rate once. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(1, adaptation.counters().resolution_adaptations); + EXPECT_EQ(1, adaptation.counters().fps_adaptations); +} + +TEST_F( + VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsOnlyResolutionIfFpsAlreadyAdapterInBalanced) { + // Note: This test depends on BALANCED implementation, but with current + // implementation and input state settings, BALANCED will adapt resolution + // only. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(1280 * 720, 5, kDefaultMinPixelsPerFrame); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + + auto first_adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(first_adaptation); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(1, adaptation.counters().resolution_adaptations); + EXPECT_EQ(first_adaptation.counters().fps_adaptations, + adaptation.counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsOnlyFpsIfResolutionLowInBalanced) { + // Note: This test depends on BALANCED implementation, but with current + // implementation and input state settings, BALANCED will adapt resolution + // only. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(kDefaultMinPixelsPerFrame, 30, + kDefaultMinPixelsPerFrame); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(0, adaptation.counters().resolution_adaptations); + EXPECT_EQ(1, adaptation.counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + AdaptationDisabledStatusAlwaysWhenDegradationPreferenceDisabled) { + adapter_.SetDegradationPreference(DegradationPreference::DISABLED); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptationDown().status()); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptationUp().status()); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptDownResolution().status()); +} + +TEST_F(VideoStreamAdapterTest, AdaptationConstraintAllowsAdaptationsUp) { + testing::StrictMock adaptation_constraint; + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + adapter_.AddAdaptationConstraint(&adaptation_constraint); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once so we can adapt up later. + auto first_adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(first_adaptation); + + EXPECT_CALL(adaptation_constraint, + IsAdaptationUpAllowed(_, first_adaptation.restrictions(), _)) + .WillOnce(Return(true)); + EXPECT_EQ(Adaptation::Status::kValid, adapter_.GetAdaptationUp().status()); + adapter_.RemoveAdaptationConstraint(&adaptation_constraint); +} + +TEST_F(VideoStreamAdapterTest, AdaptationConstraintDisallowsAdaptationsUp) { + testing::StrictMock adaptation_constraint; + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + adapter_.AddAdaptationConstraint(&adaptation_constraint); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once so we can adapt up later. + auto first_adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(first_adaptation); + + EXPECT_CALL(adaptation_constraint, + IsAdaptationUpAllowed(_, first_adaptation.restrictions(), _)) + .WillOnce(Return(false)); + EXPECT_EQ(Adaptation::Status::kRejectedByConstraint, + adapter_.GetAdaptationUp().status()); + adapter_.RemoveAdaptationConstraint(&adaptation_constraint); +} + +// Death tests. +// Disabled on Android because death tests misbehave on Android, see +// base/test/gtest_util.h. +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +TEST(VideoStreamAdapterDeathTest, + SetDegradationPreferenceInvalidatesAdaptations) { + FakeVideoStreamInputStateProvider input_state_provider; + testing::StrictMock encoder_stats_observer_; + VideoStreamAdapter adapter(&input_state_provider, &encoder_stats_observer_); + adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider.SetInputState(1280 * 720, 30, kDefaultMinPixelsPerFrame); + Adaptation adaptation = adapter.GetAdaptationDown(); + adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + EXPECT_DEATH(adapter.ApplyAdaptation(adaptation, nullptr), ""); +} + +TEST(VideoStreamAdapterDeathTest, AdaptDownInvalidatesAdaptations) { + FakeVideoStreamInputStateProvider input_state_provider; + testing::StrictMock encoder_stats_observer_; + VideoStreamAdapter adapter(&input_state_provider, &encoder_stats_observer_); + adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + input_state_provider.SetInputState(1280 * 720, 30, kDefaultMinPixelsPerFrame); + Adaptation adaptation = adapter.GetAdaptationDown(); + adapter.GetAdaptationDown(); + EXPECT_DEATH(adapter.ApplyAdaptation(adaptation, nullptr), ""); +} + +#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +} // namespace webrtc diff --git a/call/adaptation/video_stream_input_state.cc b/call/adaptation/video_stream_input_state.cc new file mode 100644 index 0000000000..dc3315e6d0 --- /dev/null +++ b/call/adaptation/video_stream_input_state.cc @@ -0,0 +1,70 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/video_stream_input_state.h" + +#include "api/video_codecs/video_encoder.h" + +namespace webrtc { + +VideoStreamInputState::VideoStreamInputState() + : has_input_(false), + frame_size_pixels_(absl::nullopt), + frames_per_second_(0), + video_codec_type_(VideoCodecType::kVideoCodecGeneric), + min_pixels_per_frame_(kDefaultMinPixelsPerFrame) {} + +void VideoStreamInputState::set_has_input(bool has_input) { + has_input_ = has_input; +} + +void VideoStreamInputState::set_frame_size_pixels( + absl::optional frame_size_pixels) { + frame_size_pixels_ = frame_size_pixels; +} + +void VideoStreamInputState::set_frames_per_second(int frames_per_second) { + frames_per_second_ = frames_per_second; +} + +void VideoStreamInputState::set_video_codec_type( + VideoCodecType video_codec_type) { + video_codec_type_ = video_codec_type; +} + +void VideoStreamInputState::set_min_pixels_per_frame(int min_pixels_per_frame) { + min_pixels_per_frame_ = min_pixels_per_frame; +} + +bool VideoStreamInputState::has_input() const { + return has_input_; +} + +absl::optional VideoStreamInputState::frame_size_pixels() const { + return frame_size_pixels_; +} + +int VideoStreamInputState::frames_per_second() const { + return frames_per_second_; +} + +VideoCodecType VideoStreamInputState::video_codec_type() const { + return video_codec_type_; +} + +int VideoStreamInputState::min_pixels_per_frame() const { + return min_pixels_per_frame_; +} + +bool VideoStreamInputState::HasInputFrameSizeAndFramesPerSecond() const { + return has_input_ && frame_size_pixels_.has_value(); +} + +} // namespace webrtc diff --git a/call/adaptation/video_stream_input_state.h b/call/adaptation/video_stream_input_state.h new file mode 100644 index 0000000000..af0d7c78e9 --- /dev/null +++ b/call/adaptation/video_stream_input_state.h @@ -0,0 +1,49 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_H_ +#define CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_H_ + +#include "absl/types/optional.h" +#include "api/video/video_codec_type.h" + +namespace webrtc { + +// The source resolution, frame rate and other properties of a +// VideoStreamEncoder. +class VideoStreamInputState { + public: + VideoStreamInputState(); + + void set_has_input(bool has_input); + void set_frame_size_pixels(absl::optional frame_size_pixels); + void set_frames_per_second(int frames_per_second); + void set_video_codec_type(VideoCodecType video_codec_type); + void set_min_pixels_per_frame(int min_pixels_per_frame); + + bool has_input() const; + absl::optional frame_size_pixels() const; + int frames_per_second() const; + VideoCodecType video_codec_type() const; + int min_pixels_per_frame() const; + + bool HasInputFrameSizeAndFramesPerSecond() const; + + private: + bool has_input_; + absl::optional frame_size_pixels_; + int frames_per_second_; + VideoCodecType video_codec_type_; + int min_pixels_per_frame_; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_H_ diff --git a/call/adaptation/video_stream_input_state_provider.cc b/call/adaptation/video_stream_input_state_provider.cc new file mode 100644 index 0000000000..3c0a7e3fa2 --- /dev/null +++ b/call/adaptation/video_stream_input_state_provider.cc @@ -0,0 +1,49 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/video_stream_input_state_provider.h" + +namespace webrtc { + +VideoStreamInputStateProvider::VideoStreamInputStateProvider( + VideoStreamEncoderObserver* frame_rate_provider) + : frame_rate_provider_(frame_rate_provider) {} + +VideoStreamInputStateProvider::~VideoStreamInputStateProvider() {} + +void VideoStreamInputStateProvider::OnHasInputChanged(bool has_input) { + MutexLock lock(&mutex_); + input_state_.set_has_input(has_input); +} + +void VideoStreamInputStateProvider::OnFrameSizeObserved(int frame_size_pixels) { + RTC_DCHECK_GT(frame_size_pixels, 0); + MutexLock lock(&mutex_); + input_state_.set_frame_size_pixels(frame_size_pixels); +} + +void VideoStreamInputStateProvider::OnEncoderSettingsChanged( + EncoderSettings encoder_settings) { + MutexLock lock(&mutex_); + input_state_.set_video_codec_type( + encoder_settings.encoder_config().codec_type); + input_state_.set_min_pixels_per_frame( + encoder_settings.encoder_info().scaling_settings.min_pixels_per_frame); +} + +VideoStreamInputState VideoStreamInputStateProvider::InputState() { + // GetInputFrameRate() is thread-safe. + int input_fps = frame_rate_provider_->GetInputFrameRate(); + MutexLock lock(&mutex_); + input_state_.set_frames_per_second(input_fps); + return input_state_; +} + +} // namespace webrtc diff --git a/call/adaptation/video_stream_input_state_provider.h b/call/adaptation/video_stream_input_state_provider.h new file mode 100644 index 0000000000..f4a3e0bfa0 --- /dev/null +++ b/call/adaptation/video_stream_input_state_provider.h @@ -0,0 +1,41 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ +#define CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ + +#include "api/video/video_stream_encoder_observer.h" +#include "call/adaptation/encoder_settings.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +class VideoStreamInputStateProvider { + public: + VideoStreamInputStateProvider( + VideoStreamEncoderObserver* frame_rate_provider); + virtual ~VideoStreamInputStateProvider(); + + void OnHasInputChanged(bool has_input); + void OnFrameSizeObserved(int frame_size_pixels); + void OnEncoderSettingsChanged(EncoderSettings encoder_settings); + + virtual VideoStreamInputState InputState(); + + private: + Mutex mutex_; + VideoStreamEncoderObserver* const frame_rate_provider_; + VideoStreamInputState input_state_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ diff --git a/call/adaptation/video_stream_input_state_provider_unittest.cc b/call/adaptation/video_stream_input_state_provider_unittest.cc new file mode 100644 index 0000000000..49c662c581 --- /dev/null +++ b/call/adaptation/video_stream_input_state_provider_unittest.cc @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/video_stream_input_state_provider.h" + +#include + +#include "api/video_codecs/video_encoder.h" +#include "call/adaptation/encoder_settings.h" +#include "call/adaptation/test/fake_frame_rate_provider.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(VideoStreamInputStateProviderTest, DefaultValues) { + FakeFrameRateProvider frame_rate_provider; + VideoStreamInputStateProvider input_state_provider(&frame_rate_provider); + VideoStreamInputState input_state = input_state_provider.InputState(); + EXPECT_EQ(false, input_state.has_input()); + EXPECT_EQ(absl::nullopt, input_state.frame_size_pixels()); + EXPECT_EQ(0, input_state.frames_per_second()); + EXPECT_EQ(VideoCodecType::kVideoCodecGeneric, input_state.video_codec_type()); + EXPECT_EQ(kDefaultMinPixelsPerFrame, input_state.min_pixels_per_frame()); +} + +TEST(VideoStreamInputStateProviderTest, ValuesSet) { + FakeFrameRateProvider frame_rate_provider; + VideoStreamInputStateProvider input_state_provider(&frame_rate_provider); + input_state_provider.OnHasInputChanged(true); + input_state_provider.OnFrameSizeObserved(42); + frame_rate_provider.set_fps(123); + VideoEncoder::EncoderInfo encoder_info; + encoder_info.scaling_settings.min_pixels_per_frame = 1337; + VideoEncoderConfig encoder_config; + encoder_config.codec_type = VideoCodecType::kVideoCodecVP9; + input_state_provider.OnEncoderSettingsChanged(EncoderSettings( + std::move(encoder_info), std::move(encoder_config), VideoCodec())); + VideoStreamInputState input_state = input_state_provider.InputState(); + EXPECT_EQ(true, input_state.has_input()); + EXPECT_EQ(42, input_state.frame_size_pixels()); + EXPECT_EQ(123, input_state.frames_per_second()); + EXPECT_EQ(VideoCodecType::kVideoCodecVP9, input_state.video_codec_type()); + EXPECT_EQ(1337, input_state.min_pixels_per_frame()); +} + +} // namespace webrtc diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h index 090fb82090..d9d3e21cd5 100644 --- a/call/audio_receive_stream.h +++ b/call/audio_receive_stream.h @@ -21,10 +21,14 @@ #include "api/call/transport.h" #include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" #include "call/rtp_config.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif namespace webrtc { class AudioSinkInterface; @@ -58,6 +62,7 @@ class AudioReceiveStream { uint64_t concealment_events = 0; double jitter_buffer_delay_seconds = 0.0; uint64_t jitter_buffer_emitted_count = 0; + double jitter_buffer_target_delay_seconds = 0.0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; // Stats below DO NOT correspond directly to anything in the WebRTC stats @@ -149,6 +154,10 @@ class AudioReceiveStream { // decrypted in whatever way the caller choses. This is not required by // default. rtc::scoped_refptr frame_decryptor; + + // An optional frame transformer used by insertable streams to transform + // encoded frames. + rtc::scoped_refptr frame_transformer; }; // Reconfigure the stream according to the Configuration. @@ -161,7 +170,8 @@ class AudioReceiveStream { // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; - virtual Stats GetStats() const = 0; + virtual Stats GetStats(bool get_and_clear_legacy_stats) const = 0; + Stats GetStats() { return GetStats(/*get_and_clear_legacy_stats=*/true); } // Sets an audio sink that receives unmixed audio from the receive stream. // Ownership of the sink is managed by the caller. @@ -187,6 +197,10 @@ class AudioReceiveStream { virtual std::vector GetSources() const = 0; +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif + protected: virtual ~AudioReceiveStream() {} }; diff --git a/call/audio_send_stream.cc b/call/audio_send_stream.cc index ddcba031a7..5acdc9618f 100644 --- a/call/audio_send_stream.cc +++ b/call/audio_send_stream.cc @@ -34,6 +34,9 @@ std::string AudioSendStream::Config::ToString() const { ss << ", send_transport: " << (send_transport ? "(Transport)" : "null"); ss << ", min_bitrate_bps: " << min_bitrate_bps; ss << ", max_bitrate_bps: " << max_bitrate_bps; + ss << ", has audio_network_adaptor_config: " + << (audio_network_adaptor_config ? "true" : "false"); + ss << ", has_dscp: " << (has_dscp ? "true" : "false"); ss << ", send_codec_spec: " << (send_codec_spec ? send_codec_spec->ToString() : ""); ss << '}'; @@ -75,6 +78,8 @@ std::string AudioSendStream::Config::SendCodecSpec::ToString() const { ss << ", transport_cc_enabled: " << (transport_cc_enabled ? "true" : "false"); ss << ", cng_payload_type: " << (cng_payload_type ? rtc::ToString(*cng_payload_type) : ""); + ss << ", red_payload_type: " + << (red_payload_type ? rtc::ToString(*red_payload_type) : ""); ss << ", payload_type: " << payload_type; ss << ", format: " << rtc::ToString(format); ss << '}'; @@ -86,6 +91,7 @@ bool AudioSendStream::Config::SendCodecSpec::operator==( if (nack_enabled == rhs.nack_enabled && transport_cc_enabled == rhs.transport_cc_enabled && cng_payload_type == rhs.cng_payload_type && + red_payload_type == rhs.red_payload_type && payload_type == rhs.payload_type && format == rhs.format && target_bitrate_bps == rhs.target_bitrate_bps) { return true; diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index e60bfcdc12..b584708094 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -23,11 +23,15 @@ #include "api/call/transport.h" #include "api/crypto/crypto_options.h" #include "api/crypto/frame_encryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "call/audio_sender.h" #include "call/rtp_config.h" #include "modules/audio_processing/include/audio_processing_statistics.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "modules/rtp_rtcp/include/report_block_data.h" namespace webrtc { @@ -139,6 +143,7 @@ class AudioSendStream : public AudioSender { bool nack_enabled = false; bool transport_cc_enabled = false; absl::optional cng_payload_type; + absl::optional red_payload_type; // If unset, use the encoder's default target bitrate. absl::optional target_bitrate_bps; }; @@ -157,6 +162,10 @@ class AudioSendStream : public AudioSender { // encryptor in whatever way the caller choses. This is not required by // default. rtc::scoped_refptr frame_encryptor; + + // An optional frame transformer used by insertable streams to transform + // encoded frames. + rtc::scoped_refptr frame_transformer; }; virtual ~AudioSendStream() = default; @@ -183,6 +192,10 @@ class AudioSendStream : public AudioSender { virtual Stats GetStats() const = 0; virtual Stats GetStats(bool has_remote_tracks) const = 0; + +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif }; } // namespace webrtc diff --git a/call/audio_state.h b/call/audio_state.h index 89267c5ab3..79fb5cf981 100644 --- a/call/audio_state.h +++ b/call/audio_state.h @@ -12,6 +12,7 @@ #include "api/audio/audio_mixer.h" #include "api/scoped_refptr.h" +#include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/ref_count.h" @@ -37,6 +38,9 @@ class AudioState : public rtc::RefCountInterface { // TODO(solenberg): Temporary: audio device module. rtc::scoped_refptr audio_device_module; + + rtc::scoped_refptr + async_audio_processing_factory; }; virtual AudioProcessing* audio_processing() = 0; diff --git a/call/bitrate_allocator.cc b/call/bitrate_allocator.cc index 7d9e5cb651..8e2006defa 100644 --- a/call/bitrate_allocator.cc +++ b/call/bitrate_allocator.cc @@ -404,11 +404,13 @@ void BitrateAllocator::OnNetworkEstimateChanged(TargetTransferRate msg) { uint32_t allocated_stable_target_rate = stable_bitrate_allocation[config.observer]; BitrateAllocationUpdate update; - update.target_bitrate = DataRate::bps(allocated_bitrate); - update.stable_target_bitrate = DataRate::bps(allocated_stable_target_rate); + update.target_bitrate = DataRate::BitsPerSec(allocated_bitrate); + update.stable_target_bitrate = + DataRate::BitsPerSec(allocated_stable_target_rate); update.packet_loss_ratio = last_fraction_loss_ / 256.0; - update.round_trip_time = TimeDelta::ms(last_rtt_); - update.bwe_period = TimeDelta::ms(last_bwe_period_ms_); + update.round_trip_time = TimeDelta::Millis(last_rtt_); + update.bwe_period = TimeDelta::Millis(last_bwe_period_ms_); + update.cwnd_reduce_ratio = msg.cwnd_reduce_ratio; uint32_t protection_bitrate = config.observer->OnBitrateUpdated(update); if (allocated_bitrate == 0 && config.allocated_bitrate_bps > 0) { @@ -468,11 +470,12 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, uint32_t allocated_stable_bitrate = stable_bitrate_allocation[config.observer]; BitrateAllocationUpdate update; - update.target_bitrate = DataRate::bps(allocated_bitrate); - update.stable_target_bitrate = DataRate::bps(allocated_stable_bitrate); + update.target_bitrate = DataRate::BitsPerSec(allocated_bitrate); + update.stable_target_bitrate = + DataRate::BitsPerSec(allocated_stable_bitrate); update.packet_loss_ratio = last_fraction_loss_ / 256.0; - update.round_trip_time = TimeDelta::ms(last_rtt_); - update.bwe_period = TimeDelta::ms(last_bwe_period_ms_); + update.round_trip_time = TimeDelta::Millis(last_rtt_); + update.bwe_period = TimeDelta::Millis(last_bwe_period_ms_); uint32_t protection_bitrate = config.observer->OnBitrateUpdated(update); config.allocated_bitrate_bps = allocated_bitrate; if (allocated_bitrate > 0) @@ -487,8 +490,8 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, update.target_bitrate = DataRate::Zero(); update.stable_target_bitrate = DataRate::Zero(); update.packet_loss_ratio = last_fraction_loss_ / 256.0; - update.round_trip_time = TimeDelta::ms(last_rtt_); - update.bwe_period = TimeDelta::ms(last_bwe_period_ms_); + update.round_trip_time = TimeDelta::Millis(last_rtt_); + update.bwe_period = TimeDelta::Millis(last_bwe_period_ms_); observer->OnBitrateUpdated(update); } UpdateAllocationLimits(); @@ -500,13 +503,14 @@ void BitrateAllocator::UpdateAllocationLimits() { uint32_t stream_padding = config.config.pad_up_bitrate_bps; if (config.config.enforce_min_bitrate) { limits.min_allocatable_rate += - DataRate::bps(config.config.min_bitrate_bps); + DataRate::BitsPerSec(config.config.min_bitrate_bps); } else if (config.allocated_bitrate_bps == 0) { stream_padding = std::max(config.MinBitrateWithHysteresis(), stream_padding); } - limits.max_padding_rate += DataRate::bps(stream_padding); - limits.max_allocatable_rate += DataRate::bps(config.config.max_bitrate_bps); + limits.max_padding_rate += DataRate::BitsPerSec(stream_padding); + limits.max_allocatable_rate += + DataRate::BitsPerSec(config.config.max_bitrate_bps); } if (limits.min_allocatable_rate == current_limits_.min_allocatable_rate && diff --git a/call/bitrate_allocator_unittest.cc b/call/bitrate_allocator_unittest.cc index edb3f7e30e..00fb236948 100644 --- a/call/bitrate_allocator_unittest.cc +++ b/call/bitrate_allocator_unittest.cc @@ -30,24 +30,27 @@ auto AllocationLimitsEq(uint32_t min_allocatable_rate_bps, uint32_t max_padding_rate_bps, uint32_t max_allocatable_rate_bps) { return AllOf(Field(&BitrateAllocationLimits::min_allocatable_rate, - DataRate::bps(min_allocatable_rate_bps)), + DataRate::BitsPerSec(min_allocatable_rate_bps)), Field(&BitrateAllocationLimits::max_allocatable_rate, - DataRate::bps(max_allocatable_rate_bps)), + DataRate::BitsPerSec(max_allocatable_rate_bps)), Field(&BitrateAllocationLimits::max_padding_rate, - DataRate::bps(max_padding_rate_bps))); + DataRate::BitsPerSec(max_padding_rate_bps))); } auto AllocationLimitsEq(uint32_t min_allocatable_rate_bps, uint32_t max_padding_rate_bps) { return AllOf(Field(&BitrateAllocationLimits::min_allocatable_rate, - DataRate::bps(min_allocatable_rate_bps)), + DataRate::BitsPerSec(min_allocatable_rate_bps)), Field(&BitrateAllocationLimits::max_padding_rate, - DataRate::bps(max_padding_rate_bps))); + DataRate::BitsPerSec(max_padding_rate_bps))); } class MockLimitObserver : public BitrateAllocator::LimitObserver { public: - MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits)); + MOCK_METHOD(void, + OnAllocationLimitsChanged, + (BitrateAllocationLimits), + (override)); }; class TestBitrateObserver : public BitrateAllocatorObserver { @@ -88,13 +91,13 @@ TargetTransferRate CreateTargetRateMessage(uint32_t target_bitrate_bps, TargetTransferRate msg; // The timestamp is just for log output, keeping it fixed just means fewer log // messages in the test. - msg.at_time = Timestamp::seconds(10000); - msg.target_rate = DataRate::bps(target_bitrate_bps); + msg.at_time = Timestamp::Seconds(10000); + msg.target_rate = DataRate::BitsPerSec(target_bitrate_bps); msg.stable_target_rate = msg.target_rate; msg.network_estimate.bandwidth = msg.target_rate; msg.network_estimate.loss_rate_ratio = fraction_loss / 255.0; - msg.network_estimate.round_trip_time = TimeDelta::ms(rtt_ms); - msg.network_estimate.bwe_period = TimeDelta::ms(bwe_period_ms); + msg.network_estimate.round_trip_time = TimeDelta::Millis(rtt_ms); + msg.network_estimate.bwe_period = TimeDelta::Millis(bwe_period_ms); return msg; } } // namespace diff --git a/call/bitrate_estimator_tests.cc b/call/bitrate_estimator_tests.cc index 50da12bbdf..4634f6e147 100644 --- a/call/bitrate_estimator_tests.cc +++ b/call/bitrate_estimator_tests.cc @@ -19,6 +19,7 @@ #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread_annotations.h" #include "test/call_test.h" @@ -49,7 +50,7 @@ class LogObserver { class Callback : public rtc::LogSink { public: void OnLogMessage(const std::string& message) override { - rtc::CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); // Ignore log lines that are due to missing AST extensions, these are // logged when we switch back from AST to TOF until the wrapping bitrate // estimator gives up on using AST. @@ -78,15 +79,15 @@ class LogObserver { bool Wait() { return done_.Wait(test::CallTest::kDefaultTimeoutMs); } void PushExpectedLogLine(const std::string& expected_log_line) { - rtc::CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); expected_log_lines_.push_back(expected_log_line); } private: typedef std::list Strings; - rtc::CriticalSection crit_sect_; - Strings received_log_lines_ RTC_GUARDED_BY(crit_sect_); - Strings expected_log_lines_ RTC_GUARDED_BY(crit_sect_); + Mutex mutex_; + Strings received_log_lines_ RTC_GUARDED_BY(mutex_); + Strings expected_log_lines_ RTC_GUARDED_BY(mutex_); rtc::Event done_; }; @@ -190,7 +191,7 @@ class BitrateEstimatorTest : public test::CallTest { send_stream_->Start(); VideoReceiveStream::Decoder decoder; - decoder.decoder_factory = &decoder_factory_; + test_->receive_config_.decoder_factory = &decoder_factory_; decoder.payload_type = test_->GetVideoSendConfig()->rtp.payload_type; decoder.video_format = SdpVideoFormat(test_->GetVideoSendConfig()->rtp.payload_name); diff --git a/call/call.cc b/call/call.cc index e2d896249c..42e632dd07 100644 --- a/call/call.cc +++ b/call/call.cc @@ -21,10 +21,12 @@ #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_transceiver_interface.h" #include "api/transport/network_control.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" #include "audio/audio_state.h" +#include "call/adaptation/broadcast_resource_listener.h" #include "call/bitrate_allocator.h" #include "call/flexfec_receive_stream_impl.h" #include "call/receive_time_calculator.h" @@ -37,6 +39,9 @@ #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/congestion_controller/include/receive_side_congestion_controller.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/byte_io.h" @@ -49,8 +54,8 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" #include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -58,10 +63,10 @@ #include "system_wrappers/include/cpu_info.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" -#include "video/call_stats.h" +#include "video/call_stats2.h" #include "video/send_delay_stats.h" #include "video/stats_counter.h" -#include "video/video_receive_stream.h" +#include "video/video_receive_stream2.h" #include "video/video_send_stream.h" namespace webrtc { @@ -157,10 +162,58 @@ bool IsRtcp(const uint8_t* packet, size_t length) { return rtp_parser.RTCP(); } +TaskQueueBase* GetCurrentTaskQueueOrThread() { + TaskQueueBase* current = TaskQueueBase::Current(); + if (!current) + current = rtc::ThreadManager::Instance()->CurrentThread(); + return current; +} + } // namespace namespace internal { +// Wraps an injected resource in a BroadcastResourceListener and handles adding +// and removing adapter resources to individual VideoSendStreams. +class ResourceVideoSendStreamForwarder { + public: + ResourceVideoSendStreamForwarder( + rtc::scoped_refptr resource) + : broadcast_resource_listener_(resource) { + broadcast_resource_listener_.StartListening(); + } + ~ResourceVideoSendStreamForwarder() { + RTC_DCHECK(adapter_resources_.empty()); + broadcast_resource_listener_.StopListening(); + } + + rtc::scoped_refptr Resource() const { + return broadcast_resource_listener_.SourceResource(); + } + + void OnCreateVideoSendStream(VideoSendStream* video_send_stream) { + RTC_DCHECK(adapter_resources_.find(video_send_stream) == + adapter_resources_.end()); + auto adapter_resource = + broadcast_resource_listener_.CreateAdapterResource(); + video_send_stream->AddAdaptationResource(adapter_resource); + adapter_resources_.insert( + std::make_pair(video_send_stream, adapter_resource)); + } + + void OnDestroyVideoSendStream(VideoSendStream* video_send_stream) { + auto it = adapter_resources_.find(video_send_stream); + RTC_DCHECK(it != adapter_resources_.end()); + broadcast_resource_listener_.RemoveAdapterResource(it->second); + adapter_resources_.erase(it); + } + + private: + BroadcastResourceListener broadcast_resource_listener_; + std::map> + adapter_resources_; +}; + class Call final : public webrtc::Call, public PacketReceiver, public RecoveredPacketReceiver, @@ -170,7 +223,7 @@ class Call final : public webrtc::Call, Call(Clock* clock, const Call::Config& config, std::unique_ptr transport_send, - std::unique_ptr module_process_thread, + rtc::scoped_refptr module_process_thread, TaskQueueFactory* task_queue_factory); ~Call() override; @@ -200,15 +253,22 @@ class Call final : public webrtc::Call, void DestroyVideoReceiveStream( webrtc::VideoReceiveStream* receive_stream) override; + int32_t StartRecorder(int32_t dir, std::string path) override; + int32_t StopRecorder(int32_t dir) override; + FlexfecReceiveStream* CreateFlexfecReceiveStream( const FlexfecReceiveStream::Config& config) override; void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + RtpTransportControllerSendInterface* GetTransportControllerSend() override; Stats GetStats() const override; + const WebRtcKeyValueConfig& trials() const override; + // Implements PacketReceiver. DeliveryStatus DeliverPacket(MediaType media_type, rtc::CopyOnWriteBuffer packet, @@ -236,54 +296,56 @@ class Call final : public webrtc::Call, private: DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet, - size_t length); + size_t length) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); DeliveryStatus DeliverRtp(MediaType media_type, rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us); + int64_t packet_time_us) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void ConfigureSync(const std::string& sync_group) - RTC_EXCLUSIVE_LOCKS_REQUIRED(receive_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, MediaType media_type) - RTC_SHARED_LOCKS_REQUIRED(receive_crit_); + RTC_SHARED_LOCKS_REQUIRED(worker_thread_); void UpdateSendHistograms(Timestamp first_sent_packet) - RTC_EXCLUSIVE_LOCKS_REQUIRED(&bitrate_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void UpdateReceiveHistograms(); void UpdateHistograms(); void UpdateAggregateNetworkState(); - void RegisterRateObserver(); + // Ensure that necessary process threads are started, and any required + // callbacks have been registered. + void EnsureStarted() RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); - rtc::TaskQueue* network_queue() const { + rtc::TaskQueue* send_transport_queue() const { return transport_send_ptr_->GetWorkerQueue(); } Clock* const clock_; TaskQueueFactory* const task_queue_factory_; + TaskQueueBase* const worker_thread_; const int num_cpu_cores_; - const std::unique_ptr module_process_thread_; + const rtc::scoped_refptr module_process_thread_; const std::unique_ptr call_stats_; const std::unique_ptr bitrate_allocator_; Call::Config config_; - SequenceChecker configuration_sequence_checker_; - SequenceChecker worker_sequence_checker_; NetworkState audio_network_state_; NetworkState video_network_state_; - bool aggregate_network_up_ RTC_GUARDED_BY(configuration_sequence_checker_); + bool aggregate_network_up_ RTC_GUARDED_BY(worker_thread_); - std::unique_ptr receive_crit_; // Audio, Video, and FlexFEC receive streams are owned by the client that // creates them. std::set audio_receive_streams_ - RTC_GUARDED_BY(receive_crit_); - std::set video_receive_streams_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); + std::set video_receive_streams_ + RTC_GUARDED_BY(worker_thread_); std::map sync_stream_mapping_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); // TODO(nisse): Should eventually be injected at creation, // with a single object in the bundled case. @@ -317,25 +379,26 @@ class Call final : public webrtc::Call, const bool use_send_side_bwe; }; std::map receive_rtp_config_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); - std::unique_ptr send_crit_; // Audio and Video send streams are owned by the client that creates them. std::map audio_send_ssrcs_ - RTC_GUARDED_BY(send_crit_); + RTC_GUARDED_BY(worker_thread_); std::map video_send_ssrcs_ - RTC_GUARDED_BY(send_crit_); - std::set video_send_streams_ RTC_GUARDED_BY(send_crit_); + RTC_GUARDED_BY(worker_thread_); + std::set video_send_streams_ RTC_GUARDED_BY(worker_thread_); + + // Each forwarder wraps an adaptation resource that was added to the call. + std::vector> + adaptation_resource_forwarders_ RTC_GUARDED_BY(worker_thread_); using RtpStateMap = std::map; - RtpStateMap suspended_audio_send_ssrcs_ - RTC_GUARDED_BY(configuration_sequence_checker_); - RtpStateMap suspended_video_send_ssrcs_ - RTC_GUARDED_BY(configuration_sequence_checker_); + RtpStateMap suspended_audio_send_ssrcs_ RTC_GUARDED_BY(worker_thread_); + RtpStateMap suspended_video_send_ssrcs_ RTC_GUARDED_BY(worker_thread_); using RtpPayloadStateMap = std::map; RtpPayloadStateMap suspended_video_payload_states_ - RTC_GUARDED_BY(configuration_sequence_checker_); + RTC_GUARDED_BY(worker_thread_); webrtc::RtcEventLog* event_log_; @@ -351,17 +414,14 @@ class Call final : public webrtc::Call, absl::optional first_received_rtp_video_ms_; absl::optional last_received_rtp_video_ms_; - rtc::CriticalSection last_bandwidth_bps_crit_; - uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(&last_bandwidth_bps_crit_); + uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(worker_thread_); // TODO(holmer): Remove this lock once BitrateController no longer calls // OnNetworkChanged from multiple threads. - rtc::CriticalSection bitrate_crit_; - uint32_t min_allocated_send_bitrate_bps_ - RTC_GUARDED_BY(&worker_sequence_checker_); - uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(&bitrate_crit_); + uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(worker_thread_); + uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(worker_thread_); AvgCounter estimated_send_bitrate_kbps_counter_ - RTC_GUARDED_BY(&bitrate_crit_); - AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(&bitrate_crit_); + RTC_GUARDED_BY(worker_thread_); + AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(worker_thread_); ReceiveSideCongestionController receive_side_cc_; @@ -370,6 +430,16 @@ class Call final : public webrtc::Call, const std::unique_ptr video_send_delay_stats_; const int64_t start_ms_; + // Note that |task_safety_| needs to be at a greater scope than the task queue + // owned by |transport_send_| since calls might arrive on the network thread + // while Call is being deleted and the task queue is being torn down. + ScopedTaskSafety task_safety_; + +#ifndef DISABLE_RECORDER + Recorder* send_recorder_; + Recorder* recv_recorder_; +#endif + // Caches transport_send_.get(), to avoid racing with destructor. // Note that this is declared before transport_send_ to ensure that it is not // invalidated until no more tasks can be running on the transport_send_ task @@ -379,8 +449,7 @@ class Call final : public webrtc::Call, // last ensures that it is destroyed first and any running tasks are finished. std::unique_ptr transport_send_; - bool is_target_rate_observer_registered_ - RTC_GUARDED_BY(&configuration_sequence_checker_) = false; + bool is_started_ RTC_GUARDED_BY(worker_thread_) = false; RTC_DISALLOW_COPY_AND_ASSIGN(Call); }; @@ -400,14 +469,21 @@ std::string Call::Stats::ToString(int64_t time_ms) const { } Call* Call::Create(const Call::Config& config) { - return Create(config, Clock::GetRealTimeClock(), - ProcessThread::Create("ModuleProcessThread"), + rtc::scoped_refptr call_thread = + SharedModuleThread::Create(ProcessThread::Create("ModuleProcessThread"), + nullptr); + return Create(config, std::move(call_thread)); +} + +Call* Call::Create(const Call::Config& config, + rtc::scoped_refptr call_thread) { + return Create(config, Clock::GetRealTimeClock(), std::move(call_thread), ProcessThread::Create("PacerThread")); } Call* Call::Create(const Call::Config& config, Clock* clock, - std::unique_ptr call_thread, + rtc::scoped_refptr call_thread, std::unique_ptr pacer_thread) { RTC_DCHECK(config.task_queue_factory); return new internal::Call( @@ -419,6 +495,98 @@ Call* Call::Create(const Call::Config& config, std::move(call_thread), config.task_queue_factory); } +class SharedModuleThread::Impl { + public: + Impl(std::unique_ptr process_thread, + std::function on_one_ref_remaining) + : module_thread_(std::move(process_thread)), + on_one_ref_remaining_(std::move(on_one_ref_remaining)) {} + + void EnsureStarted() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (started_) + return; + started_ = true; + module_thread_->Start(); + } + + ProcessThread* process_thread() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return module_thread_.get(); + } + + void AddRef() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ++ref_count_; + } + + rtc::RefCountReleaseStatus Release() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + --ref_count_; + + if (ref_count_ == 0) { + module_thread_->Stop(); + return rtc::RefCountReleaseStatus::kDroppedLastRef; + } + + if (ref_count_ == 1 && on_one_ref_remaining_) { + auto moved_fn = std::move(on_one_ref_remaining_); + // NOTE: after this function returns, chances are that |this| has been + // deleted - do not touch any member variables. + // If the owner of the last reference implements a lambda that releases + // that last reference inside of the callback (which is legal according + // to this implementation), we will recursively enter Release() above, + // call Stop() and release the last reference. + moved_fn(); + } + + return rtc::RefCountReleaseStatus::kOtherRefsRemained; + } + + private: + SequenceChecker sequence_checker_; + mutable int ref_count_ RTC_GUARDED_BY(sequence_checker_) = 0; + std::unique_ptr const module_thread_; + std::function const on_one_ref_remaining_; + bool started_ = false; +}; + +SharedModuleThread::SharedModuleThread( + std::unique_ptr process_thread, + std::function on_one_ref_remaining) + : impl_(std::make_unique(std::move(process_thread), + std::move(on_one_ref_remaining))) {} + +SharedModuleThread::~SharedModuleThread() = default; + +// static + +rtc::scoped_refptr SharedModuleThread::Create( + std::unique_ptr process_thread, + std::function on_one_ref_remaining) { + return new SharedModuleThread(std::move(process_thread), + std::move(on_one_ref_remaining)); +} + +void SharedModuleThread::EnsureStarted() { + impl_->EnsureStarted(); +} + +ProcessThread* SharedModuleThread::process_thread() { + return impl_->process_thread(); +} + +void SharedModuleThread::AddRef() const { + impl_->AddRef(); +} + +rtc::RefCountReleaseStatus SharedModuleThread::Release() const { + auto ret = impl_->Release(); + if (ret == rtc::RefCountReleaseStatus::kDroppedLastRef) + delete this; + return ret; +} + // This method here to avoid subclasses has to implement this method. // Call perf test will use Internal::Call::CreateVideoSendStream() to inject // FecController. @@ -434,20 +602,19 @@ namespace internal { Call::Call(Clock* clock, const Call::Config& config, std::unique_ptr transport_send, - std::unique_ptr module_process_thread, + rtc::scoped_refptr module_process_thread, TaskQueueFactory* task_queue_factory) : clock_(clock), task_queue_factory_(task_queue_factory), + worker_thread_(GetCurrentTaskQueueOrThread()), num_cpu_cores_(CpuInfo::DetectNumberOfCores()), module_process_thread_(std::move(module_process_thread)), - call_stats_(new CallStats(clock_, module_process_thread_.get())), + call_stats_(new CallStats(clock_, worker_thread_)), bitrate_allocator_(new BitrateAllocator(this)), config_(config), audio_network_state_(kNetworkDown), video_network_state_(kNetworkDown), aggregate_network_up_(false), - receive_crit_(RWLockWrapper::CreateRWLock()), - send_crit_(RWLockWrapper::CreateRWLock()), event_log_(config.event_log), received_bytes_per_second_counter_(clock_, nullptr, true), received_audio_bytes_per_second_counter_(clock_, nullptr, true), @@ -462,22 +629,26 @@ Call::Call(Clock* clock, receive_time_calculator_(ReceiveTimeCalculator::CreateFromFieldTrial()), video_send_delay_stats_(new SendDelayStats(clock_)), start_ms_(clock_->TimeInMilliseconds()), +#ifndef DISABLE_RECORDER + send_recorder_(nullptr), + recv_recorder_(nullptr), +#endif transport_send_ptr_(transport_send.get()), transport_send_(std::move(transport_send)) { RTC_DCHECK(config.event_log != nullptr); RTC_DCHECK(config.trials != nullptr); - worker_sequence_checker_.Detach(); + RTC_DCHECK(worker_thread_->IsCurrent()); call_stats_->RegisterStatsObserver(&receive_side_cc_); - module_process_thread_->RegisterModule( + module_process_thread_->process_thread()->RegisterModule( receive_side_cc_.GetRemoteBitrateEstimator(true), RTC_FROM_HERE); - module_process_thread_->RegisterModule(call_stats_.get(), RTC_FROM_HERE); - module_process_thread_->RegisterModule(&receive_side_cc_, RTC_FROM_HERE); + module_process_thread_->process_thread()->RegisterModule(&receive_side_cc_, + RTC_FROM_HERE); } Call::~Call() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_CHECK(audio_send_ssrcs_.empty()); RTC_CHECK(video_send_ssrcs_.empty()); @@ -485,11 +656,12 @@ Call::~Call() { RTC_CHECK(audio_receive_streams_.empty()); RTC_CHECK(video_receive_streams_.empty()); - module_process_thread_->Stop(); - module_process_thread_->DeRegisterModule( + StopRecorder((int32_t) RtpTransceiverDirection::kSendOnly); + StopRecorder((int32_t) RtpTransceiverDirection::kRecvOnly); + + module_process_thread_->process_thread()->DeRegisterModule( receive_side_cc_.GetRemoteBitrateEstimator(true)); - module_process_thread_->DeRegisterModule(&receive_side_cc_); - module_process_thread_->DeRegisterModule(call_stats_.get()); + module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_); call_stats_->DeregisterStatsObserver(&receive_side_cc_); absl::optional first_sent_packet_ms = @@ -498,7 +670,6 @@ Call::~Call() { // Only update histograms after process threads have been shut down, so that // they won't try to concurrently update stats. if (first_sent_packet_ms) { - rtc::CritScope lock(&bitrate_crit_); UpdateSendHistograms(*first_sent_packet_ms); } @@ -506,23 +677,22 @@ Call::~Call() { UpdateHistograms(); } -void Call::RegisterRateObserver() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); - - if (is_target_rate_observer_registered_) +void Call::EnsureStarted() { + if (is_started_) { return; - - is_target_rate_observer_registered_ = true; + } + is_started_ = true; // This call seems to kick off a number of things, so probably better left // off being kicked off on request rather than in the ctor. transport_send_ptr_->RegisterTargetTransferRateObserver(this); - module_process_thread_->Start(); + module_process_thread_->EnsureStarted(); + transport_send_ptr_->EnsureStarted(); } void Call::SetClientBitratePreferences(const BitrateSettings& preferences) { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); GetTransportControllerSend()->SetClientBitratePreferences(preferences); } @@ -604,16 +774,16 @@ void Call::UpdateReceiveHistograms() { } PacketReceiver* Call::Receiver() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); return this; } webrtc::AudioSendStream* Call::CreateAudioSendStream( const webrtc::AudioSendStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); - RegisterRateObserver(); + EnsureStarted(); // Stream config is logged in AudioSendStream::ConfigureStream, as it may // change during the stream's lifetime. @@ -625,32 +795,33 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( } } - AudioSendStream* send_stream = - new AudioSendStream(clock_, config, config_.audio_state, - task_queue_factory_, module_process_thread_.get(), - transport_send_ptr_, bitrate_allocator_.get(), - event_log_, call_stats_.get(), suspended_rtp_state); - { - WriteLockScoped write_lock(*send_crit_); - RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) == - audio_send_ssrcs_.end()); - audio_send_ssrcs_[config.rtp.ssrc] = send_stream; - } - { - ReadLockScoped read_lock(*receive_crit_); - for (AudioReceiveStream* stream : audio_receive_streams_) { - if (stream->config().rtp.local_ssrc == config.rtp.ssrc) { - stream->AssociateSendStream(send_stream); - } + AudioSendStream* send_stream = new AudioSendStream( + clock_, config, config_.audio_state, task_queue_factory_, + module_process_thread_->process_thread(), transport_send_ptr_, + bitrate_allocator_.get(), event_log_, call_stats_->AsRtcpRttStats(), + suspended_rtp_state); + +#ifndef DISABLE_RECORDER + send_stream->InjectRecorder(send_recorder_); +#endif + + RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) == + audio_send_ssrcs_.end()); + audio_send_ssrcs_[config.rtp.ssrc] = send_stream; + + for (AudioReceiveStream* stream : audio_receive_streams_) { + if (stream->config().rtp.local_ssrc == config.rtp.ssrc) { + stream->AssociateSendStream(send_stream); } } + UpdateAggregateNetworkState(); return send_stream; } void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { TRACE_EVENT0("webrtc", "Call::DestroyAudioSendStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(send_stream != nullptr); send_stream->Stop(); @@ -659,19 +830,16 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { webrtc::internal::AudioSendStream* audio_send_stream = static_cast(send_stream); suspended_audio_send_ssrcs_[ssrc] = audio_send_stream->GetRtpState(); - { - WriteLockScoped write_lock(*send_crit_); - size_t num_deleted = audio_send_ssrcs_.erase(ssrc); - RTC_DCHECK_EQ(1, num_deleted); - } - { - ReadLockScoped read_lock(*receive_crit_); - for (AudioReceiveStream* stream : audio_receive_streams_) { - if (stream->config().rtp.local_ssrc == ssrc) { - stream->AssociateSendStream(nullptr); - } + + size_t num_deleted = audio_send_ssrcs_.erase(ssrc); + RTC_DCHECK_EQ(1, num_deleted); + + for (AudioReceiveStream* stream : audio_receive_streams_) { + if (stream->config().rtp.local_ssrc == ssrc) { + stream->AssociateSendStream(nullptr); } } + UpdateAggregateNetworkState(); delete send_stream; } @@ -679,29 +847,29 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( const webrtc::AudioReceiveStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); - RegisterRateObserver(); + RTC_DCHECK_RUN_ON(worker_thread_); + EnsureStarted(); event_log_->Log(std::make_unique( CreateRtcLogStreamConfig(config))); AudioReceiveStream* receive_stream = new AudioReceiveStream( clock_, &audio_receiver_controller_, transport_send_ptr_->packet_router(), - module_process_thread_.get(), config_.neteq_factory, config, + module_process_thread_->process_thread(), config_.neteq_factory, config, config_.audio_state, event_log_); - { - WriteLockScoped write_lock(*receive_crit_); - receive_rtp_config_.emplace(config.rtp.remote_ssrc, - ReceiveRtpConfig(config)); - audio_receive_streams_.insert(receive_stream); - ConfigureSync(config.sync_group); - } - { - ReadLockScoped read_lock(*send_crit_); - auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc); - if (it != audio_send_ssrcs_.end()) { - receive_stream->AssociateSendStream(it->second); - } +#ifndef DISABLE_RECORDER + receive_stream->InjectRecorder(recv_recorder_); +#endif + + receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config)); + audio_receive_streams_.insert(receive_stream); + + ConfigureSync(config.sync_group); + + auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc); + if (it != audio_send_ssrcs_.end()) { + receive_stream->AssociateSendStream(it->second); } + UpdateAggregateNetworkState(); return receive_stream; } @@ -709,26 +877,24 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( void Call::DestroyAudioReceiveStream( webrtc::AudioReceiveStream* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyAudioReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); webrtc::internal::AudioReceiveStream* audio_receive_stream = static_cast(receive_stream); - { - WriteLockScoped write_lock(*receive_crit_); - const AudioReceiveStream::Config& config = audio_receive_stream->config(); - uint32_t ssrc = config.rtp.remote_ssrc; - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) - ->RemoveStream(ssrc); - audio_receive_streams_.erase(audio_receive_stream); - const std::string& sync_group = audio_receive_stream->config().sync_group; - const auto it = sync_stream_mapping_.find(sync_group); - if (it != sync_stream_mapping_.end() && - it->second == audio_receive_stream) { - sync_stream_mapping_.erase(it); - ConfigureSync(sync_group); - } - receive_rtp_config_.erase(ssrc); + + const AudioReceiveStream::Config& config = audio_receive_stream->config(); + uint32_t ssrc = config.rtp.remote_ssrc; + receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) + ->RemoveStream(ssrc); + audio_receive_streams_.erase(audio_receive_stream); + const std::string& sync_group = audio_receive_stream->config().sync_group; + const auto it = sync_stream_mapping_.find(sync_group); + if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) { + sync_stream_mapping_.erase(it); + ConfigureSync(sync_group); } + receive_rtp_config_.erase(ssrc); + UpdateAggregateNetworkState(); delete audio_receive_stream; } @@ -739,9 +905,9 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( VideoEncoderConfig encoder_config, std::unique_ptr fec_controller) { TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); - RegisterRateObserver(); + EnsureStarted(); video_send_delay_stats_->AddSsrcs(config); for (size_t ssrc_index = 0; ssrc_index < config.rtp.ssrcs.size(); @@ -756,20 +922,25 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( std::vector ssrcs = config.rtp.ssrcs; VideoSendStream* send_stream = new VideoSendStream( - clock_, num_cpu_cores_, module_process_thread_.get(), task_queue_factory_, - call_stats_.get(), transport_send_ptr_, bitrate_allocator_.get(), - video_send_delay_stats_.get(), event_log_, std::move(config), - std::move(encoder_config), suspended_video_send_ssrcs_, + clock_, num_cpu_cores_, module_process_thread_->process_thread(), + task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_ptr_, + bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_, + std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_, suspended_video_payload_states_, std::move(fec_controller)); +#ifndef DISABLE_RECORDER + send_stream->InjectRecorder(send_recorder_); +#endif - { - WriteLockScoped write_lock(*send_crit_); - for (uint32_t ssrc : ssrcs) { - RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); - video_send_ssrcs_[ssrc] = send_stream; - } - video_send_streams_.insert(send_stream); + for (uint32_t ssrc : ssrcs) { + RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); + video_send_ssrcs_[ssrc] = send_stream; } + video_send_streams_.insert(send_stream); + // Forward resources that were previously added to the call to the new stream. + for (const auto& resource_forwarder : adaptation_resource_forwarders_) { + resource_forwarder->OnCreateVideoSendStream(send_stream); + } + UpdateAggregateNetworkState(); return send_stream; @@ -792,24 +963,27 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { TRACE_EVENT0("webrtc", "Call::DestroyVideoSendStream"); RTC_DCHECK(send_stream != nullptr); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); send_stream->Stop(); VideoSendStream* send_stream_impl = nullptr; - { - WriteLockScoped write_lock(*send_crit_); - auto it = video_send_ssrcs_.begin(); - while (it != video_send_ssrcs_.end()) { - if (it->second == static_cast(send_stream)) { - send_stream_impl = it->second; - video_send_ssrcs_.erase(it++); - } else { - ++it; - } + + auto it = video_send_ssrcs_.begin(); + while (it != video_send_ssrcs_.end()) { + if (it->second == static_cast(send_stream)) { + send_stream_impl = it->second; + video_send_ssrcs_.erase(it++); + } else { + ++it; } - video_send_streams_.erase(send_stream_impl); } + // Stop forwarding resources to the stream being destroyed. + for (const auto& resource_forwarder : adaptation_resource_forwarders_) { + resource_forwarder->OnDestroyVideoSendStream(send_stream_impl); + } + video_send_streams_.erase(send_stream_impl); + RTC_CHECK(send_stream_impl != nullptr); VideoSendStream::RtpStateMap rtp_states; @@ -830,34 +1004,37 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( webrtc::VideoReceiveStream::Config configuration) { TRACE_EVENT0("webrtc", "Call::CreateVideoReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); receive_side_cc_.SetSendPeriodicFeedback( SendPeriodicFeedback(configuration.rtp.extensions)); - RegisterRateObserver(); + EnsureStarted(); - VideoReceiveStream* receive_stream = new VideoReceiveStream( - task_queue_factory_, &video_receiver_controller_, num_cpu_cores_, + TaskQueueBase* current = GetCurrentTaskQueueOrThread(); + RTC_CHECK(current); + VideoReceiveStream2* receive_stream = new VideoReceiveStream2( + task_queue_factory_, current, &video_receiver_controller_, num_cpu_cores_, transport_send_ptr_->packet_router(), std::move(configuration), - module_process_thread_.get(), call_stats_.get(), clock_); + module_process_thread_->process_thread(), call_stats_.get(), clock_, + new VCMTiming(clock_)); + +#ifndef DISABLE_RECORDER + receive_stream->InjectRecorder(recv_recorder_); +#endif const webrtc::VideoReceiveStream::Config& config = receive_stream->config(); - { - WriteLockScoped write_lock(*receive_crit_); - if (config.rtp.rtx_ssrc) { - // We record identical config for the rtx stream as for the main - // stream. Since the transport_send_cc negotiation is per payload - // type, we may get an incorrect value for the rtx stream, but - // that is unlikely to matter in practice. - receive_rtp_config_.emplace(config.rtp.rtx_ssrc, - ReceiveRtpConfig(config)); - } - receive_rtp_config_.emplace(config.rtp.remote_ssrc, - ReceiveRtpConfig(config)); - video_receive_streams_.insert(receive_stream); - ConfigureSync(config.sync_group); + if (config.rtp.rtx_ssrc) { + // We record identical config for the rtx stream as for the main + // stream. Since the transport_send_cc negotiation is per payload + // type, we may get an incorrect value for the rtx stream, but + // that is unlikely to matter in practice. + receive_rtp_config_.emplace(config.rtp.rtx_ssrc, ReceiveRtpConfig(config)); } + receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config)); + video_receive_streams_.insert(receive_stream); + ConfigureSync(config.sync_group); + receive_stream->SignalNetworkState(video_network_state_); UpdateAggregateNetworkState(); event_log_->Log(std::make_unique( @@ -868,22 +1045,20 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( void Call::DestroyVideoReceiveStream( webrtc::VideoReceiveStream* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyVideoReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); - VideoReceiveStream* receive_stream_impl = - static_cast(receive_stream); + VideoReceiveStream2* receive_stream_impl = + static_cast(receive_stream); const VideoReceiveStream::Config& config = receive_stream_impl->config(); - { - WriteLockScoped write_lock(*receive_crit_); - // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a - // separate SSRC there can be either one or two. - receive_rtp_config_.erase(config.rtp.remote_ssrc); - if (config.rtp.rtx_ssrc) { - receive_rtp_config_.erase(config.rtp.rtx_ssrc); - } - video_receive_streams_.erase(receive_stream_impl); - ConfigureSync(config.sync_group); + + // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a + // separate SSRC there can be either one or two. + receive_rtp_config_.erase(config.rtp.remote_ssrc); + if (config.rtp.rtx_ssrc) { + receive_rtp_config_.erase(config.rtp.rtx_ssrc); } + video_receive_streams_.erase(receive_stream_impl); + ConfigureSync(config.sync_group); receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) ->RemoveStream(config.rtp.remote_ssrc); @@ -892,33 +1067,115 @@ void Call::DestroyVideoReceiveStream( delete receive_stream_impl; } +int32_t Call::StartRecorder(int32_t dir, std::string path) { +#ifndef DISABLE_RECORDER + RTC_LOG(LS_INFO) << "Call::StartRecorder " << dir << " " << path; + + if (path.rfind(".mkv") == std::string::npos) { + return -1; + } + + if (dir == (int32_t) RtpTransceiverDirection::kSendOnly) { + if (send_recorder_) { + return -2; + } + send_recorder_ = new Recorder(task_queue_factory_); + int res = send_recorder_->Start(path); + if (res != 0) { + return res; + } + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this]() { + RTC_DCHECK_RUN_ON(worker_thread_); + for (auto send_stream : video_send_streams_) { + send_stream->InjectRecorder(send_recorder_); + } + for (const auto& kv : audio_send_ssrcs_) { + kv.second->InjectRecorder(send_recorder_); + } + })); + } else if (dir == (int32_t) RtpTransceiverDirection::kRecvOnly) { + if (recv_recorder_) { + return -3; + } + recv_recorder_ = new Recorder(task_queue_factory_); + int res = recv_recorder_->Start(path); + if (res != 0) { + return res; + } + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this]() { + RTC_DCHECK_RUN_ON(worker_thread_); + for (auto recv_stream : video_receive_streams_) { + recv_stream->InjectRecorder(recv_recorder_); + } + for (auto recv_stream : audio_receive_streams_) { + recv_stream->InjectRecorder(recv_recorder_); + } + })); + } else { + return -4; + } +#endif + return 0; +} + +int32_t Call::StopRecorder(int32_t dir) { +#ifndef DISABLE_RECORDER + RTC_LOG(LS_INFO) << "Call::StopRecorder " << dir; + if (dir == (int32_t) RtpTransceiverDirection::kSendOnly && send_recorder_) { + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this]() { + RTC_DCHECK_RUN_ON(worker_thread_); + for (VideoSendStream* send_stream : video_send_streams_) { + send_stream->InjectRecorder(nullptr); + } + for (const auto& kv : audio_send_ssrcs_) { + kv.second->InjectRecorder(nullptr); + } + + send_recorder_->Stop(); + delete send_recorder_; + send_recorder_ = nullptr; + })); + } else if (dir == (int32_t) RtpTransceiverDirection::kRecvOnly && recv_recorder_) { + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this]() { + RTC_DCHECK_RUN_ON(worker_thread_); + for (auto recv_stream : video_receive_streams_) { + recv_stream->InjectRecorder(nullptr); + } + for (auto recv_stream : audio_receive_streams_) { + recv_stream->InjectRecorder(nullptr); + } + + recv_recorder_->Stop(); + delete recv_recorder_; + recv_recorder_ = nullptr; + })); + } +#endif + return 0; +} + FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( const FlexfecReceiveStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateFlexfecReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RecoveredPacketReceiver* recovered_packet_receiver = this; FlexfecReceiveStreamImpl* receive_stream; - { - WriteLockScoped write_lock(*receive_crit_); - // Unlike the video and audio receive streams, - // FlexfecReceiveStream implements RtpPacketSinkInterface itself, - // and hence its constructor passes its |this| pointer to - // video_receiver_controller_->CreateStream(). Calling the - // constructor while holding |receive_crit_| ensures that we don't - // call OnRtpPacket until the constructor is finished and the - // object is in a valid state. - // TODO(nisse): Fix constructor so that it can be moved outside of - // this locked scope. - receive_stream = new FlexfecReceiveStreamImpl( - clock_, &video_receiver_controller_, config, recovered_packet_receiver, - call_stats_.get(), module_process_thread_.get()); - - RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) == - receive_rtp_config_.end()); - receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config)); - } + + // Unlike the video and audio receive streams, FlexfecReceiveStream implements + // RtpPacketSinkInterface itself, and hence its constructor passes its |this| + // pointer to video_receiver_controller_->CreateStream(). Calling the + // constructor while on the worker thread ensures that we don't call + // OnRtpPacket until the constructor is finished and the object is + // in a valid state, since OnRtpPacket runs on the same thread. + receive_stream = new FlexfecReceiveStreamImpl( + clock_, &video_receiver_controller_, config, recovered_packet_receiver, + call_stats_->AsRtcpRttStats(), module_process_thread_->process_thread()); + + RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) == + receive_rtp_config_.end()); + receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config)); // TODO(brandtr): Store config in RtcEventLog here. @@ -927,39 +1184,37 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyFlexfecReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); - { - WriteLockScoped write_lock(*receive_crit_); - - const FlexfecReceiveStream::Config& config = receive_stream->GetConfig(); - uint32_t ssrc = config.remote_ssrc; - receive_rtp_config_.erase(ssrc); + const FlexfecReceiveStream::Config& config = receive_stream->GetConfig(); + uint32_t ssrc = config.remote_ssrc; + receive_rtp_config_.erase(ssrc); - // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be - // destroyed. - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) - ->RemoveStream(ssrc); - } + // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be + // destroyed. + receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) + ->RemoveStream(ssrc); delete receive_stream; } +void Call::AddAdaptationResource(rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(worker_thread_); + adaptation_resource_forwarders_.push_back( + std::make_unique(resource)); + const auto& resource_forwarder = adaptation_resource_forwarders_.back(); + for (VideoSendStream* send_stream : video_send_streams_) { + resource_forwarder->OnCreateVideoSendStream(send_stream); + } +} + RtpTransportControllerSendInterface* Call::GetTransportControllerSend() { return transport_send_ptr_; } Call::Stats Call::GetStats() const { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); - - // TODO(tommi): The following stats are managed on the process thread: - // - pacer_delay_ms (PacedSender::Process) - // - rtt_ms - // - recv_bandwidth_bps - // These are delivered on the network TQ: - // - send_bandwidth_bps (see OnTargetTransferRate) - // - max_padding_bitrate_bps (see OnAllocationLimitsChanged) + RTC_DCHECK_RUN_ON(worker_thread_); Stats stats; // TODO(srte): It is unclear if we only want to report queues if network is @@ -975,22 +1230,18 @@ Call::Stats Call::GetStats() const { receive_side_cc_.GetRemoteBitrateEstimator(false)->LatestEstimate( &ssrcs, &recv_bandwidth); stats.recv_bandwidth_bps = recv_bandwidth; - - { - rtc::CritScope cs(&last_bandwidth_bps_crit_); - stats.send_bandwidth_bps = last_bandwidth_bps_; - } - - { - rtc::CritScope cs(&bitrate_crit_); - stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_; - } + stats.send_bandwidth_bps = last_bandwidth_bps_; + stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_; return stats; } +const WebRtcKeyValueConfig& Call::trials() const { + return *config_.trials; +} + void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); switch (media) { case MediaType::AUDIO: audio_network_state_ = state; @@ -1005,40 +1256,25 @@ void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { } UpdateAggregateNetworkState(); - { - ReadLockScoped read_lock(*receive_crit_); - for (VideoReceiveStream* video_receive_stream : video_receive_streams_) { - video_receive_stream->SignalNetworkState(video_network_state_); - } + for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) { + video_receive_stream->SignalNetworkState(video_network_state_); } } void Call::OnAudioTransportOverheadChanged(int transport_overhead_per_packet) { - ReadLockScoped read_lock(*send_crit_); + RTC_DCHECK_RUN_ON(worker_thread_); for (auto& kv : audio_send_ssrcs_) { kv.second->SetTransportOverhead(transport_overhead_per_packet); } } void Call::UpdateAggregateNetworkState() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); - bool have_audio = false; - bool have_video = false; - { - ReadLockScoped read_lock(*send_crit_); - if (!audio_send_ssrcs_.empty()) - have_audio = true; - if (!video_send_ssrcs_.empty()) - have_video = true; - } - { - ReadLockScoped read_lock(*receive_crit_); - if (!audio_receive_streams_.empty()) - have_audio = true; - if (!video_receive_streams_.empty()) - have_video = true; - } + bool have_audio = + !audio_send_ssrcs_.empty() || !audio_receive_streams_.empty(); + bool have_video = + !video_send_ssrcs_.empty() || !video_receive_streams_.empty(); bool aggregate_network_up = ((have_video && video_network_state_ == kNetworkUp) || @@ -1065,61 +1301,50 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { } void Call::OnStartRateUpdate(DataRate start_rate) { - RTC_DCHECK(network_queue()->IsCurrent()); + RTC_DCHECK_RUN_ON(send_transport_queue()); bitrate_allocator_->UpdateStartRate(start_rate.bps()); } void Call::OnTargetTransferRate(TargetTransferRate msg) { - RTC_DCHECK(network_queue()->IsCurrent()); - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - { - rtc::CritScope cs(&last_bandwidth_bps_crit_); - last_bandwidth_bps_ = msg.target_rate.bps(); - } + RTC_DCHECK_RUN_ON(send_transport_queue()); uint32_t target_bitrate_bps = msg.target_rate.bps(); // For controlling the rate of feedback messages. receive_side_cc_.OnBitrateChanged(target_bitrate_bps); bitrate_allocator_->OnNetworkEstimateChanged(msg); - // Ignore updates if bitrate is zero (the aggregate network state is down). - if (target_bitrate_bps == 0) { - rtc::CritScope lock(&bitrate_crit_); - estimated_send_bitrate_kbps_counter_.ProcessAndPause(); - pacer_bitrate_kbps_counter_.ProcessAndPause(); - return; - } - - bool sending_video; - { - ReadLockScoped read_lock(*send_crit_); - sending_video = !video_send_streams_.empty(); - } + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [this, target_bitrate_bps]() { + RTC_DCHECK_RUN_ON(worker_thread_); + last_bandwidth_bps_ = target_bitrate_bps; + + // Ignore updates if bitrate is zero (the aggregate network state is + // down) or if we're not sending video. + if (target_bitrate_bps == 0 || video_send_streams_.empty()) { + estimated_send_bitrate_kbps_counter_.ProcessAndPause(); + pacer_bitrate_kbps_counter_.ProcessAndPause(); + return; + } - rtc::CritScope lock(&bitrate_crit_); - if (!sending_video) { - // Do not update the stats if we are not sending video. - estimated_send_bitrate_kbps_counter_.ProcessAndPause(); - pacer_bitrate_kbps_counter_.ProcessAndPause(); - return; - } - estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000); - // Pacer bitrate may be higher than bitrate estimate if enforcing min bitrate. - uint32_t pacer_bitrate_bps = - std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_); - pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000); + estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000); + // Pacer bitrate may be higher than bitrate estimate if enforcing min + // bitrate. + uint32_t pacer_bitrate_bps = + std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_); + pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000); + })); } void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) { - RTC_DCHECK(network_queue()->IsCurrent()); - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_DCHECK_RUN_ON(send_transport_queue()); transport_send_ptr_->SetAllocatedSendBitrateLimits(limits); - min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps(); - - rtc::CritScope lock(&bitrate_crit_); - configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps(); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, limits]() { + RTC_DCHECK_RUN_ON(worker_thread_); + min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps(); + configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps(); + })); } void Call::ConfigureSync(const std::string& sync_group) { @@ -1150,7 +1375,7 @@ void Call::ConfigureSync(const std::string& sync_group) { if (sync_audio_stream) sync_stream_mapping_[sync_group] = sync_audio_stream; size_t num_synced_streams = 0; - for (VideoReceiveStream* video_stream : video_receive_streams_) { + for (VideoReceiveStream2* video_stream : video_receive_streams_) { if (video_stream->config().sync_group != sync_group) continue; ++num_synced_streams; @@ -1186,28 +1411,24 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type, } bool rtcp_delivered = false; if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) { - ReadLockScoped read_lock(*receive_crit_); - for (VideoReceiveStream* stream : video_receive_streams_) { + for (VideoReceiveStream2* stream : video_receive_streams_) { if (stream->DeliverRtcp(packet, length)) rtcp_delivered = true; } } if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) { - ReadLockScoped read_lock(*receive_crit_); for (AudioReceiveStream* stream : audio_receive_streams_) { stream->DeliverRtcp(packet, length); rtcp_delivered = true; } } if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) { - ReadLockScoped read_lock(*send_crit_); for (VideoSendStream* stream : video_send_streams_) { stream->DeliverRtcp(packet, length); rtcp_delivered = true; } } if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) { - ReadLockScoped read_lock(*send_crit_); for (auto& kv : audio_send_ssrcs_) { kv.second->DeliverRtcp(packet, length); rtcp_delivered = true; @@ -1251,17 +1472,15 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO || is_keep_alive_packet); - ReadLockScoped read_lock(*receive_crit_); auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); if (it == receive_rtp_config_.end()) { RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " << parsed_packet.Ssrc(); // Destruction of the receive stream, including deregistering from the - // RtpDemuxer, is not protected by the |receive_crit_| lock. But - // deregistering in the |receive_rtp_config_| map is protected by that lock. - // So by not passing the packet on to demuxing in this case, we prevent - // incoming packets to be passed on via the demuxer to a receive stream - // which is being torned down. + // RtpDemuxer, is not protected by the |worker_thread_|. + // But deregistering in the |receive_rtp_config_| map is. So by not passing + // the packet on to demuxing in this case, we prevent incoming packets to be + // passed on via the demuxer to a receive stream which is being torned down. return DELIVERY_UNKNOWN_SSRC; } @@ -1307,7 +1526,8 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket( MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); + if (IsRtcp(packet.cdata(), packet.size())) return DeliverRtcp(media_type, packet.cdata(), packet.size()); @@ -1315,20 +1535,20 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket( } void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) { + RTC_DCHECK_RUN_ON(worker_thread_); RtpPacketReceived parsed_packet; if (!parsed_packet.Parse(packet, length)) return; parsed_packet.set_recovered(true); - ReadLockScoped read_lock(*receive_crit_); auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); if (it == receive_rtp_config_.end()) { RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " << parsed_packet.Ssrc(); // Destruction of the receive stream, including deregistering from the - // RtpDemuxer, is not protected by the |receive_crit_| lock. But - // deregistering in the |receive_rtp_config_| map is protected by that lock. + // RtpDemuxer, is not protected by the |worker_thread_|. + // But deregistering in the |receive_rtp_config_| map is. // So by not passing the packet on to demuxing in this case, we prevent // incoming packets to be passed on via the demuxer to a receive stream // which is being torn down. @@ -1351,8 +1571,8 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, packet.GetHeader(&header); ReceivedPacket packet_msg; - packet_msg.size = DataSize::bytes(packet.payload_size()); - packet_msg.receive_time = Timestamp::ms(packet.arrival_time_ms()); + packet_msg.size = DataSize::Bytes(packet.payload_size()); + packet_msg.receive_time = Timestamp::Millis(packet.arrival_time_ms()); if (header.extension.hasAbsoluteSendTime) { packet_msg.send_time = header.extension.GetAbsoluteSendTimestamp(); } diff --git a/call/call.h b/call/call.h index 77cd3d2690..0d2a9f5f1d 100644 --- a/call/call.h +++ b/call/call.h @@ -15,6 +15,7 @@ #include #include +#include "api/adaptation/resource.h" #include "api/media_types.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" @@ -28,9 +29,41 @@ #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" +#include "rtc_base/ref_count.h" namespace webrtc { +// A restricted way to share the module process thread across multiple instances +// of Call that are constructed on the same worker thread (which is what the +// peer connection factory guarantees). +// SharedModuleThread supports a callback that is issued when only one reference +// remains, which is used to indicate to the original owner that the thread may +// be discarded. +class SharedModuleThread : public rtc::RefCountInterface { + protected: + SharedModuleThread(std::unique_ptr process_thread, + std::function on_one_ref_remaining); + friend class rtc::scoped_refptr; + ~SharedModuleThread() override; + + public: + // Allows injection of an externally created process thread. + static rtc::scoped_refptr Create( + std::unique_ptr process_thread, + std::function on_one_ref_remaining); + + void EnsureStarted(); + + ProcessThread* process_thread(); + + private: + void AddRef() const override; + rtc::RefCountReleaseStatus Release() const override; + + class Impl; + mutable std::unique_ptr impl_; +}; + // A Call instance can contain several send and/or receive streams. All streams // are assumed to have the same remote endpoint and will share bitrate estimates // etc. @@ -49,9 +82,11 @@ class Call { }; static Call* Create(const Call::Config& config); + static Call* Create(const Call::Config& config, + rtc::scoped_refptr call_thread); static Call* Create(const Call::Config& config, Clock* clock, - std::unique_ptr call_thread, + rtc::scoped_refptr call_thread, std::unique_ptr pacer_thread); virtual AudioSendStream* CreateAudioSendStream( @@ -78,6 +113,9 @@ class Call { virtual void DestroyVideoReceiveStream( VideoReceiveStream* receive_stream) = 0; + virtual int32_t StartRecorder(int32_t dir, std::string path) = 0; + virtual int32_t StopRecorder(int32_t dir) = 0; + // In order for a created VideoReceiveStream to be aware that it is // protected by a FlexfecReceiveStream, the latter should be created before // the former. @@ -86,6 +124,11 @@ class Call { virtual void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) = 0; + // When a resource is overused, the Call will try to reduce the load on the + // sysem, for example by reducing the resolution or frame rate of encoded + // streams. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + // All received RTP and RTCP packets for the call should be inserted to this // PacketReceiver. The PacketReceiver pointer is valid as long as the // Call instance exists. @@ -116,6 +159,8 @@ class Call { virtual void SetClientBitratePreferences( const BitrateSettings& preferences) = 0; + virtual const WebRtcKeyValueConfig& trials() const = 0; + virtual ~Call() {} }; diff --git a/call/call_config.h b/call/call_config.h index cd0ef3352e..205f7a48bb 100644 --- a/call/call_config.h +++ b/call/call_config.h @@ -30,8 +30,6 @@ struct CallConfig { CallConfig(const CallConfig&); ~CallConfig(); - RTC_DEPRECATED static constexpr int kDefaultStartBitrateBps = 300000; - // Bitrate config used until valid bitrate estimates are calculated. Also // used to cap total bitrate used. This comes from the remote connection. BitrateConstraints bitrate_config; diff --git a/call/call_factory.cc b/call/call_factory.cc index 6b4f419742..cc02c02835 100644 --- a/call/call_factory.cc +++ b/call/call_factory.cc @@ -70,7 +70,12 @@ absl::optional ParseDegradationConfig( } } // namespace +CallFactory::CallFactory() { + call_thread_.Detach(); +} + Call* CallFactory::CreateCall(const Call::Config& config) { + RTC_DCHECK_RUN_ON(&call_thread_); absl::optional send_degradation_config = ParseDegradationConfig(true); absl::optional @@ -82,7 +87,15 @@ Call* CallFactory::CreateCall(const Call::Config& config) { config.task_queue_factory); } - return Call::Create(config); + if (!module_thread_) { + module_thread_ = SharedModuleThread::Create( + ProcessThread::Create("SharedModThread"), [this]() { + RTC_DCHECK_RUN_ON(&call_thread_); + module_thread_ = nullptr; + }); + } + + return Call::Create(config, module_thread_); } std::unique_ptr CreateCallFactory() { diff --git a/call/call_factory.h b/call/call_factory.h index f0d695c915..65c0b6532a 100644 --- a/call/call_factory.h +++ b/call/call_factory.h @@ -14,13 +14,22 @@ #include "api/call/call_factory_interface.h" #include "call/call.h" #include "call/call_config.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { class CallFactory : public CallFactoryInterface { + public: + CallFactory(); + + private: ~CallFactory() override {} Call* CreateCall(const CallConfig& config) override; + + SequenceChecker call_thread_; + rtc::scoped_refptr module_thread_ + RTC_GUARDED_BY(call_thread_); }; } // namespace webrtc diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc index 2d23087cc8..aa8894e9ae 100644 --- a/call/call_perf_tests.cc +++ b/call/call_perf_tests.cc @@ -29,6 +29,7 @@ #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -96,21 +97,24 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, static const int kMinRunTimeMs = 30000; public: - explicit VideoRtcpAndSyncObserver(Clock* clock, const std::string& test_label) + explicit VideoRtcpAndSyncObserver(TaskQueueBase* task_queue, + Clock* clock, + const std::string& test_label) : test::RtpRtcpObserver(CallPerfTest::kLongTimeoutMs), clock_(clock), test_label_(test_label), creation_time_ms_(clock_->TimeInMilliseconds()), - first_time_in_sync_(-1), - receive_stream_(nullptr) {} + task_queue_(task_queue) {} void OnFrame(const VideoFrame& video_frame) override { - VideoReceiveStream::Stats stats; - { - rtc::CritScope lock(&crit_); - if (receive_stream_) - stats = receive_stream_->GetStats(); - } + task_queue_->PostTask(ToQueuedTask([this]() { CheckStats(); })); + } + + void CheckStats() { + if (!receive_stream_) + return; + + VideoReceiveStream::Stats stats = receive_stream_->GetStats(); if (stats.sync_offset_ms == std::numeric_limits::max()) return; @@ -135,7 +139,8 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, } void set_receive_stream(VideoReceiveStream* receive_stream) { - rtc::CritScope lock(&crit_); + RTC_DCHECK_EQ(task_queue_, TaskQueueBase::Current()); + // Note that receive_stream may be nullptr. receive_stream_ = receive_stream; } @@ -148,10 +153,10 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, Clock* const clock_; std::string test_label_; const int64_t creation_time_ms_; - int64_t first_time_in_sync_; - rtc::CriticalSection crit_; - VideoReceiveStream* receive_stream_ RTC_GUARDED_BY(crit_); + int64_t first_time_in_sync_ = -1; + VideoReceiveStream* receive_stream_ = nullptr; std::vector sync_offset_ms_list_; + TaskQueueBase* const task_queue_; }; void CallPerfTest::TestAudioVideoSync(FecMode fec, @@ -168,7 +173,8 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, audio_net_config.queue_delay_ms = 500; audio_net_config.loss_percent = 5; - VideoRtcpAndSyncObserver observer(Clock::GetRealTimeClock(), test_label); + auto observer = std::make_unique( + task_queue(), Clock::GetRealTimeClock(), test_label); std::map audio_pt_map; std::map video_pt_map; @@ -176,7 +182,6 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, std::unique_ptr audio_send_transport; std::unique_ptr video_send_transport; std::unique_ptr receive_transport; - test::NullTransport rtcp_send_transport; AudioSendStream* audio_send_stream; AudioReceiveStream* audio_receive_stream; @@ -218,7 +223,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, }); audio_send_transport = std::make_unique( - task_queue(), sender_call_.get(), &observer, + task_queue(), sender_call_.get(), observer.get(), test::PacketTransport::kSender, audio_pt_map, std::make_unique( Clock::GetRealTimeClock(), @@ -226,7 +231,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, audio_send_transport->SetReceiver(receiver_call_->Receiver()); video_send_transport = std::make_unique( - task_queue(), sender_call_.get(), &observer, + task_queue(), sender_call_.get(), observer.get(), test::PacketTransport::kSender, video_pt_map, std::make_unique(Clock::GetRealTimeClock(), std::make_unique( @@ -234,7 +239,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, video_send_transport->SetReceiver(receiver_call_->Receiver()); receive_transport = std::make_unique( - task_queue(), receiver_call_.get(), &observer, + task_queue(), receiver_call_.get(), observer.get(), test::PacketTransport::kReceiver, payload_type_map_, std::make_unique(Clock::GetRealTimeClock(), std::make_unique( @@ -259,13 +264,13 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, video_receive_configs_[0].rtp.ulpfec_payload_type = kUlpfecPayloadType; } video_receive_configs_[0].rtp.nack.rtp_history_ms = 1000; - video_receive_configs_[0].renderer = &observer; + video_receive_configs_[0].renderer = observer.get(); video_receive_configs_[0].sync_group = kSyncGroup; AudioReceiveStream::Config audio_recv_config; audio_recv_config.rtp.remote_ssrc = kAudioSendSsrc; audio_recv_config.rtp.local_ssrc = kAudioRecvSsrc; - audio_recv_config.rtcp_send_transport = &rtcp_send_transport; + audio_recv_config.rtcp_send_transport = receive_transport.get(); audio_recv_config.sync_group = kSyncGroup; audio_recv_config.decoder_factory = audio_decoder_factory_; audio_recv_config.decoder_map = { @@ -281,7 +286,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, receiver_call_->CreateAudioReceiveStream(audio_recv_config); } EXPECT_EQ(1u, video_receive_streams_.size()); - observer.set_receive_stream(video_receive_streams_[0]); + observer->set_receive_stream(video_receive_streams_[0]); drifting_clock = std::make_unique(clock_, video_ntp_speed); CreateFrameGeneratorCapturerWithDrift(drifting_clock.get(), video_rtp_speed, kDefaultFramerate, kDefaultWidth, @@ -293,10 +298,13 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, audio_receive_stream->Start(); }); - EXPECT_TRUE(observer.Wait()) + EXPECT_TRUE(observer->Wait()) << "Timed out while waiting for audio and video to be synchronized."; SendTask(RTC_FROM_HERE, task_queue(), [&]() { + // Clear the pointer to the receive stream since it will now be deleted. + observer->set_receive_stream(nullptr); + audio_send_stream->Stop(); audio_receive_stream->Stop(); @@ -314,7 +322,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, DestroyCalls(); }); - observer.PrintResults(); + observer->PrintResults(); // In quick test synchronization may not be achieved in time. if (!field_trial::IsEnabled("WebRTC-QuickPerfTest")) { @@ -323,29 +331,34 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs")); #endif } + + task_queue()->PostTask( + ToQueuedTask([to_delete = observer.release()]() { delete to_delete; })); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithoutClockDrift) { +TEST_F(CallPerfTest, Synchronization_PlaysOutAudioAndVideoWithoutClockDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::kNoDrift, DriftingClock::kNoDrift, DriftingClock::kNoDrift, "_video_no_drift"); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithVideoNtpDrift) { +TEST_F(CallPerfTest, Synchronization_PlaysOutAudioAndVideoWithVideoNtpDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::PercentsFaster(10.0f), DriftingClock::kNoDrift, DriftingClock::kNoDrift, "_video_ntp_drift"); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithAudioFasterThanVideoDrift) { +TEST_F(CallPerfTest, + Synchronization_PlaysOutAudioAndVideoWithAudioFasterThanVideoDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::kNoDrift, DriftingClock::PercentsSlower(30.0f), DriftingClock::PercentsFaster(30.0f), "_audio_faster"); } -TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithVideoFasterThanAudioDrift) { +TEST_F(CallPerfTest, + Synchronization_PlaysOutAudioAndVideoWithVideoFasterThanAudioDrift) { TestAudioVideoSync(FecMode::kOn, CreateOrder::kVideoFirst, DriftingClock::kNoDrift, DriftingClock::PercentsFaster(30.0f), @@ -398,7 +411,7 @@ void CallPerfTest::TestCaptureNtpTime( } void OnFrame(const VideoFrame& video_frame) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (video_frame.ntp_time_ms() <= 0) { // Haven't got enough RTCP SR in order to calculate the capture ntp // time. @@ -434,7 +447,7 @@ void CallPerfTest::TestCaptureNtpTime( } Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -477,7 +490,7 @@ void CallPerfTest::TestCaptureNtpTime( time_offset_ms_list_, "ms", true); } - rtc::CriticalSection crit_; + Mutex mutex_; const BuiltInNetworkBehaviorConfig net_config_; Clock* const clock_; int threshold_ms_; @@ -488,7 +501,7 @@ void CallPerfTest::TestCaptureNtpTime( bool rtp_start_timestamp_set_; uint32_t rtp_start_timestamp_; typedef std::map FrameCaptureTimeList; - FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&crit_); + FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&mutex_); std::vector time_offset_ms_list_; } test(net_config, threshold_ms, start_time_ms, run_time_ms); @@ -497,7 +510,7 @@ void CallPerfTest::TestCaptureNtpTime( // Flaky tests, disabled on Mac and Windows due to webrtc:8291. #if !(defined(WEBRTC_MAC) || defined(WEBRTC_WIN)) -TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkDelay) { +TEST_F(CallPerfTest, Real_Estimated_CaptureNtpTimeWithNetworkDelay) { BuiltInNetworkBehaviorConfig net_config; net_config.queue_delay_ms = 100; // TODO(wu): lower the threshold as the calculation/estimatation becomes more @@ -508,7 +521,7 @@ TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkDelay) { TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs); } -TEST_F(CallPerfTest, CaptureNtpTimeWithNetworkJitter) { +TEST_F(CallPerfTest, Real_Estimated_CaptureNtpTimeWithNetworkJitter) { BuiltInNetworkBehaviorConfig net_config; net_config.queue_delay_ms = 100; net_config.delay_standard_deviation_ms = 10; @@ -698,11 +711,11 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { RunBaseTest(&test); } -TEST_F(CallPerfTest, PadsToMinTransmitBitrate) { +TEST_F(CallPerfTest, Bitrate_Kbps_PadsToMinTransmitBitrate) { TestMinTransmitBitrate(true); } -TEST_F(CallPerfTest, NoPadWithoutMinTransmitBitrate) { +TEST_F(CallPerfTest, Bitrate_Kbps_NoPadWithoutMinTransmitBitrate) { TestMinTransmitBitrate(false); } @@ -718,6 +731,11 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { static const uint32_t kInitialBitrateKbps = 400; static const uint32_t kReconfigureThresholdKbps = 600; + // We get lower bitrate than expected by this test if the following field + // trial is enabled. + test::ScopedFieldTrials field_trials( + "WebRTC-SendSideBwe-WithOverhead/Disabled/"); + class VideoStreamFactory : public VideoEncoderConfig::VideoStreamFactoryInterface { public: @@ -991,11 +1009,11 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, // TODO(bugs.webrtc.org/8878) #if defined(WEBRTC_MAC) -#define MAYBE_MinVideoAndAudioBitrate DISABLED_MinVideoAndAudioBitrate +#define MAYBE_Min_Bitrate_VideoAndAudio DISABLED_Min_Bitrate_VideoAndAudio #else -#define MAYBE_MinVideoAndAudioBitrate MinVideoAndAudioBitrate +#define MAYBE_Min_Bitrate_VideoAndAudio Min_Bitrate_VideoAndAudio #endif -TEST_F(CallPerfTest, MAYBE_MinVideoAndAudioBitrate) { +TEST_F(CallPerfTest, MAYBE_Min_Bitrate_VideoAndAudio) { TestMinAudioVideoBitrate(110, 40, -10, 10000, 70000, 200000); } diff --git a/call/call_unittest.cc b/call/call_unittest.cc index a8cf534f90..e165107d98 100644 --- a/call/call_unittest.cc +++ b/call/call_unittest.cc @@ -20,28 +20,39 @@ #include "api/rtc_event_log/rtc_event_log.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/test/mock_audio_mixer.h" +#include "api/test/video/function_video_encoder_factory.h" #include "api/transport/field_trial_based_config.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" +#include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/test/mock_resource_listener.h" #include "call/audio_state.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_processing/include/mock_audio_processing.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "test/fake_encoder.h" #include "test/gtest.h" #include "test/mock_audio_decoder_factory.h" #include "test/mock_transport.h" +#include "test/run_loop.h" namespace { +using ::testing::_; +using ::testing::Contains; +using ::testing::StrictMock; + struct CallHelper { - CallHelper() { + explicit CallHelper(bool use_null_audio_processing) { task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory(); webrtc::AudioState::Config audio_state_config; audio_state_config.audio_mixer = new rtc::RefCountedObject(); audio_state_config.audio_processing = - new rtc::RefCountedObject(); + use_null_audio_processing + ? nullptr + : new rtc::RefCountedObject(); audio_state_config.audio_device_module = new rtc::RefCountedObject(); webrtc::Call::Config config(&event_log_); @@ -54,6 +65,7 @@ struct CallHelper { webrtc::Call* operator->() { return call_.get(); } private: + webrtc::test::RunLoop loop_; webrtc::RtcEventLogNull event_log_; webrtc::FieldTrialBasedConfig field_trials_; std::unique_ptr task_queue_factory_; @@ -63,237 +75,454 @@ struct CallHelper { namespace webrtc { +namespace { + +rtc::scoped_refptr FindResourceWhoseNameContains( + const std::vector>& resources, + const std::string& name_contains) { + for (const auto& resource : resources) { + if (resource->Name().find(name_contains) != std::string::npos) + return resource; + } + return nullptr; +} + +} // namespace + TEST(CallTest, ConstructDestruct) { - CallHelper call; + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + } } TEST(CallTest, CreateDestroy_AudioSendStream) { - CallHelper call; - MockTransport send_transport; - AudioSendStream::Config config(&send_transport); - config.rtp.ssrc = 42; - AudioSendStream* stream = call->CreateAudioSendStream(config); - EXPECT_NE(stream, nullptr); - call->DestroyAudioSendStream(stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + MockTransport send_transport; + AudioSendStream::Config config(&send_transport); + config.rtp.ssrc = 42; + AudioSendStream* stream = call->CreateAudioSendStream(config); + EXPECT_NE(stream, nullptr); + call->DestroyAudioSendStream(stream); + } } TEST(CallTest, CreateDestroy_AudioReceiveStream) { - CallHelper call; - AudioReceiveStream::Config config; - MockTransport rtcp_send_transport; - config.rtp.remote_ssrc = 42; - config.rtcp_send_transport = &rtcp_send_transport; - config.decoder_factory = - new rtc::RefCountedObject(); - AudioReceiveStream* stream = call->CreateAudioReceiveStream(config); - EXPECT_NE(stream, nullptr); - call->DestroyAudioReceiveStream(stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + AudioReceiveStream::Config config; + MockTransport rtcp_send_transport; + config.rtp.remote_ssrc = 42; + config.rtcp_send_transport = &rtcp_send_transport; + config.decoder_factory = + new rtc::RefCountedObject(); + AudioReceiveStream* stream = call->CreateAudioReceiveStream(config); + EXPECT_NE(stream, nullptr); + call->DestroyAudioReceiveStream(stream); + } } TEST(CallTest, CreateDestroy_AudioSendStreams) { - CallHelper call; - MockTransport send_transport; - AudioSendStream::Config config(&send_transport); - std::list streams; - for (int i = 0; i < 2; ++i) { - for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { - config.rtp.ssrc = ssrc; - AudioSendStream* stream = call->CreateAudioSendStream(config); - EXPECT_NE(stream, nullptr); - if (ssrc & 1) { - streams.push_back(stream); - } else { - streams.push_front(stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + MockTransport send_transport; + AudioSendStream::Config config(&send_transport); + std::list streams; + for (int i = 0; i < 2; ++i) { + for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { + config.rtp.ssrc = ssrc; + AudioSendStream* stream = call->CreateAudioSendStream(config); + EXPECT_NE(stream, nullptr); + if (ssrc & 1) { + streams.push_back(stream); + } else { + streams.push_front(stream); + } } + for (auto s : streams) { + call->DestroyAudioSendStream(s); + } + streams.clear(); } - for (auto s : streams) { - call->DestroyAudioSendStream(s); - } - streams.clear(); } } TEST(CallTest, CreateDestroy_AudioReceiveStreams) { - CallHelper call; - AudioReceiveStream::Config config; - MockTransport rtcp_send_transport; - config.rtcp_send_transport = &rtcp_send_transport; - config.decoder_factory = - new rtc::RefCountedObject(); - std::list streams; - for (int i = 0; i < 2; ++i) { - for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { - config.rtp.remote_ssrc = ssrc; - AudioReceiveStream* stream = call->CreateAudioReceiveStream(config); - EXPECT_NE(stream, nullptr); - if (ssrc & 1) { - streams.push_back(stream); - } else { - streams.push_front(stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + AudioReceiveStream::Config config; + MockTransport rtcp_send_transport; + config.rtcp_send_transport = &rtcp_send_transport; + config.decoder_factory = + new rtc::RefCountedObject(); + std::list streams; + for (int i = 0; i < 2; ++i) { + for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { + config.rtp.remote_ssrc = ssrc; + AudioReceiveStream* stream = call->CreateAudioReceiveStream(config); + EXPECT_NE(stream, nullptr); + if (ssrc & 1) { + streams.push_back(stream); + } else { + streams.push_front(stream); + } } + for (auto s : streams) { + call->DestroyAudioReceiveStream(s); + } + streams.clear(); } - for (auto s : streams) { - call->DestroyAudioReceiveStream(s); - } - streams.clear(); } } TEST(CallTest, CreateDestroy_AssociateAudioSendReceiveStreams_RecvFirst) { - CallHelper call; - AudioReceiveStream::Config recv_config; - MockTransport rtcp_send_transport; - recv_config.rtp.remote_ssrc = 42; - recv_config.rtp.local_ssrc = 777; - recv_config.rtcp_send_transport = &rtcp_send_transport; - recv_config.decoder_factory = - new rtc::RefCountedObject(); - AudioReceiveStream* recv_stream = call->CreateAudioReceiveStream(recv_config); - EXPECT_NE(recv_stream, nullptr); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + AudioReceiveStream::Config recv_config; + MockTransport rtcp_send_transport; + recv_config.rtp.remote_ssrc = 42; + recv_config.rtp.local_ssrc = 777; + recv_config.rtcp_send_transport = &rtcp_send_transport; + recv_config.decoder_factory = + new rtc::RefCountedObject(); + AudioReceiveStream* recv_stream = + call->CreateAudioReceiveStream(recv_config); + EXPECT_NE(recv_stream, nullptr); - MockTransport send_transport; - AudioSendStream::Config send_config(&send_transport); - send_config.rtp.ssrc = 777; - AudioSendStream* send_stream = call->CreateAudioSendStream(send_config); - EXPECT_NE(send_stream, nullptr); + MockTransport send_transport; + AudioSendStream::Config send_config(&send_transport); + send_config.rtp.ssrc = 777; + AudioSendStream* send_stream = call->CreateAudioSendStream(send_config); + EXPECT_NE(send_stream, nullptr); - internal::AudioReceiveStream* internal_recv_stream = - static_cast(recv_stream); - EXPECT_EQ(send_stream, - internal_recv_stream->GetAssociatedSendStreamForTesting()); + internal::AudioReceiveStream* internal_recv_stream = + static_cast(recv_stream); + EXPECT_EQ(send_stream, + internal_recv_stream->GetAssociatedSendStreamForTesting()); - call->DestroyAudioSendStream(send_stream); - EXPECT_EQ(nullptr, internal_recv_stream->GetAssociatedSendStreamForTesting()); + call->DestroyAudioSendStream(send_stream); + EXPECT_EQ(nullptr, + internal_recv_stream->GetAssociatedSendStreamForTesting()); - call->DestroyAudioReceiveStream(recv_stream); + call->DestroyAudioReceiveStream(recv_stream); + } } TEST(CallTest, CreateDestroy_AssociateAudioSendReceiveStreams_SendFirst) { - CallHelper call; - MockTransport send_transport; - AudioSendStream::Config send_config(&send_transport); - send_config.rtp.ssrc = 777; - AudioSendStream* send_stream = call->CreateAudioSendStream(send_config); - EXPECT_NE(send_stream, nullptr); - - AudioReceiveStream::Config recv_config; - MockTransport rtcp_send_transport; - recv_config.rtp.remote_ssrc = 42; - recv_config.rtp.local_ssrc = 777; - recv_config.rtcp_send_transport = &rtcp_send_transport; - recv_config.decoder_factory = - new rtc::RefCountedObject(); - AudioReceiveStream* recv_stream = call->CreateAudioReceiveStream(recv_config); - EXPECT_NE(recv_stream, nullptr); - - internal::AudioReceiveStream* internal_recv_stream = - static_cast(recv_stream); - EXPECT_EQ(send_stream, - internal_recv_stream->GetAssociatedSendStreamForTesting()); - - call->DestroyAudioReceiveStream(recv_stream); - - call->DestroyAudioSendStream(send_stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + MockTransport send_transport; + AudioSendStream::Config send_config(&send_transport); + send_config.rtp.ssrc = 777; + AudioSendStream* send_stream = call->CreateAudioSendStream(send_config); + EXPECT_NE(send_stream, nullptr); + + AudioReceiveStream::Config recv_config; + MockTransport rtcp_send_transport; + recv_config.rtp.remote_ssrc = 42; + recv_config.rtp.local_ssrc = 777; + recv_config.rtcp_send_transport = &rtcp_send_transport; + recv_config.decoder_factory = + new rtc::RefCountedObject(); + AudioReceiveStream* recv_stream = + call->CreateAudioReceiveStream(recv_config); + EXPECT_NE(recv_stream, nullptr); + + internal::AudioReceiveStream* internal_recv_stream = + static_cast(recv_stream); + EXPECT_EQ(send_stream, + internal_recv_stream->GetAssociatedSendStreamForTesting()); + + call->DestroyAudioReceiveStream(recv_stream); + + call->DestroyAudioSendStream(send_stream); + } } TEST(CallTest, CreateDestroy_FlexfecReceiveStream) { - CallHelper call; - MockTransport rtcp_send_transport; - FlexfecReceiveStream::Config config(&rtcp_send_transport); - config.payload_type = 118; - config.remote_ssrc = 38837212; - config.protected_media_ssrcs = {27273}; - - FlexfecReceiveStream* stream = call->CreateFlexfecReceiveStream(config); - EXPECT_NE(stream, nullptr); - call->DestroyFlexfecReceiveStream(stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + MockTransport rtcp_send_transport; + FlexfecReceiveStream::Config config(&rtcp_send_transport); + config.payload_type = 118; + config.remote_ssrc = 38837212; + config.protected_media_ssrcs = {27273}; + + FlexfecReceiveStream* stream = call->CreateFlexfecReceiveStream(config); + EXPECT_NE(stream, nullptr); + call->DestroyFlexfecReceiveStream(stream); + } } TEST(CallTest, CreateDestroy_FlexfecReceiveStreams) { - CallHelper call; - MockTransport rtcp_send_transport; - FlexfecReceiveStream::Config config(&rtcp_send_transport); - config.payload_type = 118; - std::list streams; - - for (int i = 0; i < 2; ++i) { - for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { - config.remote_ssrc = ssrc; - config.protected_media_ssrcs = {ssrc + 1}; - FlexfecReceiveStream* stream = call->CreateFlexfecReceiveStream(config); - EXPECT_NE(stream, nullptr); - if (ssrc & 1) { - streams.push_back(stream); - } else { - streams.push_front(stream); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + MockTransport rtcp_send_transport; + FlexfecReceiveStream::Config config(&rtcp_send_transport); + config.payload_type = 118; + std::list streams; + + for (int i = 0; i < 2; ++i) { + for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { + config.remote_ssrc = ssrc; + config.protected_media_ssrcs = {ssrc + 1}; + FlexfecReceiveStream* stream = call->CreateFlexfecReceiveStream(config); + EXPECT_NE(stream, nullptr); + if (ssrc & 1) { + streams.push_back(stream); + } else { + streams.push_front(stream); + } } + for (auto s : streams) { + call->DestroyFlexfecReceiveStream(s); + } + streams.clear(); } - for (auto s : streams) { - call->DestroyFlexfecReceiveStream(s); - } - streams.clear(); } } TEST(CallTest, MultipleFlexfecReceiveStreamsProtectingSingleVideoStream) { - CallHelper call; - MockTransport rtcp_send_transport; - FlexfecReceiveStream::Config config(&rtcp_send_transport); - config.payload_type = 118; - config.protected_media_ssrcs = {1324234}; - FlexfecReceiveStream* stream; - std::list streams; - - config.remote_ssrc = 838383; - stream = call->CreateFlexfecReceiveStream(config); - EXPECT_NE(stream, nullptr); - streams.push_back(stream); - - config.remote_ssrc = 424993; - stream = call->CreateFlexfecReceiveStream(config); - EXPECT_NE(stream, nullptr); - streams.push_back(stream); - - config.remote_ssrc = 99383; - stream = call->CreateFlexfecReceiveStream(config); - EXPECT_NE(stream, nullptr); - streams.push_back(stream); - - config.remote_ssrc = 5548; - stream = call->CreateFlexfecReceiveStream(config); - EXPECT_NE(stream, nullptr); - streams.push_back(stream); - - for (auto s : streams) { - call->DestroyFlexfecReceiveStream(s); + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); + MockTransport rtcp_send_transport; + FlexfecReceiveStream::Config config(&rtcp_send_transport); + config.payload_type = 118; + config.protected_media_ssrcs = {1324234}; + FlexfecReceiveStream* stream; + std::list streams; + + config.remote_ssrc = 838383; + stream = call->CreateFlexfecReceiveStream(config); + EXPECT_NE(stream, nullptr); + streams.push_back(stream); + + config.remote_ssrc = 424993; + stream = call->CreateFlexfecReceiveStream(config); + EXPECT_NE(stream, nullptr); + streams.push_back(stream); + + config.remote_ssrc = 99383; + stream = call->CreateFlexfecReceiveStream(config); + EXPECT_NE(stream, nullptr); + streams.push_back(stream); + + config.remote_ssrc = 5548; + stream = call->CreateFlexfecReceiveStream(config); + EXPECT_NE(stream, nullptr); + streams.push_back(stream); + + for (auto s : streams) { + call->DestroyFlexfecReceiveStream(s); + } } } TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) { constexpr uint32_t kSSRC = 12345; - CallHelper call; + for (bool use_null_audio_processing : {false, true}) { + CallHelper call(use_null_audio_processing); - auto create_stream_and_get_rtp_state = [&](uint32_t ssrc) { - MockTransport send_transport; - AudioSendStream::Config config(&send_transport); - config.rtp.ssrc = ssrc; - AudioSendStream* stream = call->CreateAudioSendStream(config); - const RtpState rtp_state = - static_cast(stream)->GetRtpState(); - call->DestroyAudioSendStream(stream); - return rtp_state; + auto create_stream_and_get_rtp_state = [&](uint32_t ssrc) { + MockTransport send_transport; + AudioSendStream::Config config(&send_transport); + config.rtp.ssrc = ssrc; + AudioSendStream* stream = call->CreateAudioSendStream(config); + const RtpState rtp_state = + static_cast(stream)->GetRtpState(); + call->DestroyAudioSendStream(stream); + return rtp_state; + }; + + const RtpState rtp_state1 = create_stream_and_get_rtp_state(kSSRC); + const RtpState rtp_state2 = create_stream_and_get_rtp_state(kSSRC); + + EXPECT_EQ(rtp_state1.sequence_number, rtp_state2.sequence_number); + EXPECT_EQ(rtp_state1.start_timestamp, rtp_state2.start_timestamp); + EXPECT_EQ(rtp_state1.timestamp, rtp_state2.timestamp); + EXPECT_EQ(rtp_state1.capture_time_ms, rtp_state2.capture_time_ms); + EXPECT_EQ(rtp_state1.last_timestamp_time_ms, + rtp_state2.last_timestamp_time_ms); + } +} + +TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) { + CallHelper call(true); + // Create a VideoSendStream. + test::FunctionVideoEncoderFactory fake_encoder_factory([]() { + return std::make_unique(Clock::GetRealTimeClock()); + }); + auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); + MockTransport send_transport; + VideoSendStream::Config config(&send_transport); + config.rtp.payload_type = 110; + config.rtp.ssrcs = {42}; + config.encoder_settings.encoder_factory = &fake_encoder_factory; + config.encoder_settings.bitrate_allocator_factory = + bitrate_allocator_factory.get(); + VideoEncoderConfig encoder_config; + encoder_config.max_bitrate_bps = 1337; + VideoSendStream* stream1 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream1, nullptr); + config.rtp.ssrcs = {43}; + VideoSendStream* stream2 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream2, nullptr); + // Add a fake resource. + auto fake_resource = FakeResource::Create("FakeResource"); + call->AddAdaptationResource(fake_resource); + // An adapter resource mirroring the |fake_resource| should now be present on + // both streams. + auto injected_resource1 = FindResourceWhoseNameContains( + stream1->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource1); + auto injected_resource2 = FindResourceWhoseNameContains( + stream2->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource2); + // Overwrite the real resource listeners with mock ones to verify the signal + // gets through. + injected_resource1->SetResourceListener(nullptr); + StrictMock resource_listener1; + EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource1, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + injected_resource1->SetResourceListener(&resource_listener1); + injected_resource2->SetResourceListener(nullptr); + StrictMock resource_listener2; + EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource2, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + injected_resource2->SetResourceListener(&resource_listener2); + // The kOveruse signal should get to our resource listeners. + fake_resource->SetUsageState(ResourceUsageState::kOveruse); + call->DestroyVideoSendStream(stream1); + call->DestroyVideoSendStream(stream2); +} + +TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) { + CallHelper call(true); + // Add a fake resource. + auto fake_resource = FakeResource::Create("FakeResource"); + call->AddAdaptationResource(fake_resource); + // Create a VideoSendStream. + test::FunctionVideoEncoderFactory fake_encoder_factory([]() { + return std::make_unique(Clock::GetRealTimeClock()); + }); + auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); + MockTransport send_transport; + VideoSendStream::Config config(&send_transport); + config.rtp.payload_type = 110; + config.rtp.ssrcs = {42}; + config.encoder_settings.encoder_factory = &fake_encoder_factory; + config.encoder_settings.bitrate_allocator_factory = + bitrate_allocator_factory.get(); + VideoEncoderConfig encoder_config; + encoder_config.max_bitrate_bps = 1337; + VideoSendStream* stream1 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream1, nullptr); + config.rtp.ssrcs = {43}; + VideoSendStream* stream2 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream2, nullptr); + // An adapter resource mirroring the |fake_resource| should be present on both + // streams. + auto injected_resource1 = FindResourceWhoseNameContains( + stream1->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource1); + auto injected_resource2 = FindResourceWhoseNameContains( + stream2->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource2); + // Overwrite the real resource listeners with mock ones to verify the signal + // gets through. + injected_resource1->SetResourceListener(nullptr); + StrictMock resource_listener1; + EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource1, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + injected_resource1->SetResourceListener(&resource_listener1); + injected_resource2->SetResourceListener(nullptr); + StrictMock resource_listener2; + EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource2, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + injected_resource2->SetResourceListener(&resource_listener2); + // The kUnderuse signal should get to our resource listeners. + fake_resource->SetUsageState(ResourceUsageState::kUnderuse); + call->DestroyVideoSendStream(stream1); + call->DestroyVideoSendStream(stream2); +} + +TEST(CallTest, SharedModuleThread) { + class SharedModuleThreadUser : public Module { + public: + SharedModuleThreadUser(ProcessThread* expected_thread, + rtc::scoped_refptr thread) + : expected_thread_(expected_thread), thread_(std::move(thread)) { + thread_->EnsureStarted(); + thread_->process_thread()->RegisterModule(this, RTC_FROM_HERE); + } + + ~SharedModuleThreadUser() override { + thread_->process_thread()->DeRegisterModule(this); + EXPECT_TRUE(thread_was_checked_); + } + + private: + int64_t TimeUntilNextProcess() override { return 1000; } + void Process() override {} + void ProcessThreadAttached(ProcessThread* process_thread) override { + if (!process_thread) { + // Being detached. + return; + } + EXPECT_EQ(process_thread, expected_thread_); + thread_was_checked_ = true; + } + + bool thread_was_checked_ = false; + ProcessThread* const expected_thread_; + rtc::scoped_refptr thread_; }; - const RtpState rtp_state1 = create_stream_and_get_rtp_state(kSSRC); - const RtpState rtp_state2 = create_stream_and_get_rtp_state(kSSRC); + // Create our test instance and pass a lambda to it that gets executed when + // the reference count goes back to 1 - meaning |shared| again is the only + // reference, which means we can free the variable and deallocate the thread. + rtc::scoped_refptr shared; + shared = + SharedModuleThread::Create(ProcessThread::Create("MySharedProcessThread"), + [&shared]() { shared = nullptr; }); + ProcessThread* process_thread = shared->process_thread(); + + ASSERT_TRUE(shared.get()); + + { + // Create a couple of users of the thread. + // These instances are in a separate scope to trigger the callback to our + // lambda, which will run when these go out of scope. + SharedModuleThreadUser user1(process_thread, shared); + SharedModuleThreadUser user2(process_thread, shared); + } - EXPECT_EQ(rtp_state1.sequence_number, rtp_state2.sequence_number); - EXPECT_EQ(rtp_state1.start_timestamp, rtp_state2.start_timestamp); - EXPECT_EQ(rtp_state1.timestamp, rtp_state2.timestamp); - EXPECT_EQ(rtp_state1.capture_time_ms, rtp_state2.capture_time_ms); - EXPECT_EQ(rtp_state1.last_timestamp_time_ms, - rtp_state2.last_timestamp_time_ms); - EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent); + // The thread should now have been stopped and freed. + EXPECT_FALSE(shared); } } // namespace webrtc diff --git a/call/degraded_call.cc b/call/degraded_call.cc index 9c8d2be508..e978f1000f 100644 --- a/call/degraded_call.cc +++ b/call/degraded_call.cc @@ -235,6 +235,14 @@ void DegradedCall::DestroyVideoReceiveStream( call_->DestroyVideoReceiveStream(receive_stream); } +int32_t DegradedCall::StartRecorder(int32_t dir, std::string path) { + return -1; +} + +int32_t DegradedCall::StopRecorder(int32_t dir) { + return -1; +} + FlexfecReceiveStream* DegradedCall::CreateFlexfecReceiveStream( const FlexfecReceiveStream::Config& config) { return call_->CreateFlexfecReceiveStream(config); @@ -245,6 +253,11 @@ void DegradedCall::DestroyFlexfecReceiveStream( call_->DestroyFlexfecReceiveStream(receive_stream); } +void DegradedCall::AddAdaptationResource( + rtc::scoped_refptr resource) { + call_->AddAdaptationResource(std::move(resource)); +} + PacketReceiver* DegradedCall::Receiver() { if (receive_config_) { return this; @@ -261,6 +274,10 @@ Call::Stats DegradedCall::GetStats() const { return call_->GetStats(); } +const WebRtcKeyValueConfig& DegradedCall::trials() const { + return call_->trials(); +} + void DegradedCall::SignalChannelNetworkState(MediaType media, NetworkState state) { call_->SignalChannelNetworkState(media, state); diff --git a/call/degraded_call.h b/call/degraded_call.h index 49230ca1ed..fb410edcc1 100644 --- a/call/degraded_call.h +++ b/call/degraded_call.h @@ -72,17 +72,24 @@ class DegradedCall : public Call, private PacketReceiver { VideoReceiveStream::Config configuration) override; void DestroyVideoReceiveStream(VideoReceiveStream* receive_stream) override; + int32_t StartRecorder(int32_t dir, std::string path) override; + int32_t StopRecorder(int32_t dir) override; + FlexfecReceiveStream* CreateFlexfecReceiveStream( const FlexfecReceiveStream::Config& config) override; void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + PacketReceiver* Receiver() override; RtpTransportControllerSendInterface* GetTransportControllerSend() override; Stats GetStats() const override; + const WebRtcKeyValueConfig& trials() const override; + void SignalChannelNetworkState(MediaType media, NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; diff --git a/call/fake_network_pipe.cc b/call/fake_network_pipe.cc index 8844700e67..324a7bd793 100644 --- a/call/fake_network_pipe.cc +++ b/call/fake_network_pipe.cc @@ -122,17 +122,17 @@ FakeNetworkPipe::~FakeNetworkPipe() { } void FakeNetworkPipe::SetReceiver(PacketReceiver* receiver) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); receiver_ = receiver; } void FakeNetworkPipe::AddActiveTransport(Transport* transport) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); active_transports_[transport]++; } void FakeNetworkPipe::RemoveActiveTransport(Transport* transport) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); auto it = active_transports_.find(transport); RTC_CHECK(it != active_transports_.end()); if (--(it->second) == 0) { @@ -186,7 +186,7 @@ PacketReceiver::DeliveryStatus FakeNetworkPipe::DeliverPacket( } void FakeNetworkPipe::SetClockOffset(int64_t offset_ms) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); clock_offset_ms_ = offset_ms; } @@ -198,7 +198,7 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, bool is_rtcp, MediaType media_type, absl::optional packet_time_us) { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us, time_now_us, options, is_rtcp, media_type, @@ -209,7 +209,7 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, absl::optional options, bool is_rtcp, Transport* transport) { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us, time_now_us, options, is_rtcp, @@ -233,7 +233,7 @@ bool FakeNetworkPipe::EnqueuePacket(NetworkPacket&& net_packet) { } float FakeNetworkPipe::PercentageLoss() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); if (sent_packets_ == 0) return 0; @@ -242,7 +242,7 @@ float FakeNetworkPipe::PercentageLoss() { } int FakeNetworkPipe::AverageDelay() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); if (sent_packets_ == 0) return 0; @@ -251,12 +251,12 @@ int FakeNetworkPipe::AverageDelay() { } size_t FakeNetworkPipe::DroppedPackets() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); return dropped_packets_; } size_t FakeNetworkPipe::SentPackets() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); return sent_packets_; } @@ -264,7 +264,7 @@ void FakeNetworkPipe::Process() { int64_t time_now_us; std::queue packets_to_deliver; { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); time_now_us = clock_->TimeInMicroseconds(); if (time_now_us - last_log_time_us_ > kLogIntervalMs * 1000) { int64_t queueing_delay_us = 0; @@ -318,7 +318,7 @@ void FakeNetworkPipe::Process() { } } - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); while (!packets_to_deliver.empty()) { NetworkPacket packet = std::move(packets_to_deliver.front()); packets_to_deliver.pop(); @@ -354,7 +354,7 @@ void FakeNetworkPipe::DeliverNetworkPacket(NetworkPacket* packet) { } absl::optional FakeNetworkPipe::TimeUntilNextProcess() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); absl::optional delivery_us = network_behavior_->NextDeliveryTimeUs(); if (delivery_us) { int64_t delay_us = *delivery_us - clock_->TimeInMicroseconds(); @@ -364,17 +364,17 @@ absl::optional FakeNetworkPipe::TimeUntilNextProcess() { } bool FakeNetworkPipe::HasReceiver() const { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); return receiver_ != nullptr; } void FakeNetworkPipe::DeliverPacketWithLock(NetworkPacket* packet) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); DeliverNetworkPacket(packet); } void FakeNetworkPipe::ResetStats() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); dropped_packets_ = 0; sent_packets_ = 0; total_packet_delay_us_ = 0; diff --git a/call/fake_network_pipe.h b/call/fake_network_pipe.h index 24340a2f29..1e5bb513bf 100644 --- a/call/fake_network_pipe.h +++ b/call/fake_network_pipe.h @@ -24,7 +24,7 @@ #include "call/call.h" #include "call/simulated_packet_receiver.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -204,14 +204,14 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { Clock* const clock_; // |config_lock| guards the mostly constant things like the callbacks. - rtc::CriticalSection config_lock_; + mutable Mutex config_lock_; const std::unique_ptr network_behavior_; PacketReceiver* receiver_ RTC_GUARDED_BY(config_lock_); Transport* const global_transport_; // |process_lock| guards the data structures involved in delay and loss // processes, such as the packet queues. - rtc::CriticalSection process_lock_; + Mutex process_lock_; // Packets are added at the back of the deque, this makes the deque ordered // by increasing send time. The common case when removing packets from the // deque is removing early packets, which will be close to the front of the diff --git a/call/fake_network_pipe_unittest.cc b/call/fake_network_pipe_unittest.cc index 9c4a3bf755..852a427222 100644 --- a/call/fake_network_pipe_unittest.cc +++ b/call/fake_network_pipe_unittest.cc @@ -24,8 +24,10 @@ namespace webrtc { class MockReceiver : public PacketReceiver { public: - MOCK_METHOD3(DeliverPacket, - DeliveryStatus(MediaType, rtc::CopyOnWriteBuffer, int64_t)); + MOCK_METHOD(DeliveryStatus, + DeliverPacket, + (MediaType, rtc::CopyOnWriteBuffer, int64_t), + (override)); virtual ~MockReceiver() = default; }; diff --git a/call/flexfec_receive_stream_impl.cc b/call/flexfec_receive_stream_impl.cc index 40005efe83..e629bca347 100644 --- a/call/flexfec_receive_stream_impl.cc +++ b/call/flexfec_receive_stream_impl.cc @@ -22,7 +22,6 @@ #include "call/rtp_stream_receiver_controller_interface.h" #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" @@ -119,12 +118,12 @@ std::unique_ptr MaybeCreateFlexfecReceiver( recovered_packet_receiver)); } -std::unique_ptr CreateRtpRtcpModule( +std::unique_ptr CreateRtpRtcpModule( Clock* clock, ReceiveStatistics* receive_statistics, const FlexfecReceiveStreamImpl::Config& config, RtcpRttStats* rtt_stats) { - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.audio = false; configuration.receiver_only = true; configuration.clock = clock; @@ -132,7 +131,7 @@ std::unique_ptr CreateRtpRtcpModule( configuration.outgoing_transport = config.rtcp_send_transport; configuration.rtt_stats = rtt_stats; configuration.local_media_ssrc = config.local_ssrc; - return RtpRtcp::Create(configuration); + return ModuleRtpRtcpImpl2::Create(configuration); } } // namespace diff --git a/call/flexfec_receive_stream_impl.h b/call/flexfec_receive_stream_impl.h index d4fdc7431a..888dae9ebd 100644 --- a/call/flexfec_receive_stream_impl.h +++ b/call/flexfec_receive_stream_impl.h @@ -15,6 +15,7 @@ #include "call/flexfec_receive_stream.h" #include "call/rtp_packet_sink_interface.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -55,7 +56,7 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { // RTCP reporting. const std::unique_ptr rtp_receive_statistics_; - const std::unique_ptr rtp_rtcp_; + const std::unique_ptr rtp_rtcp_; ProcessThread* process_thread_; std::unique_ptr rtp_stream_receiver_; diff --git a/call/rampup_tests.cc b/call/rampup_tests.cc index 0377126821..89fbe3dde7 100644 --- a/call/rampup_tests.cc +++ b/call/rampup_tests.cc @@ -38,7 +38,7 @@ ABSL_FLAG(std::string, namespace webrtc { namespace { -constexpr TimeDelta kPollInterval = TimeDelta::Millis<20>(); +constexpr TimeDelta kPollInterval = TimeDelta::Millis(20); static const int kExpectedHighVideoBitrateBps = 80000; static const int kExpectedHighAudioBitrateBps = 30000; static const int kLowBandwidthLimitBps = 20000; @@ -362,14 +362,14 @@ void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream, void RampUpTester::TriggerTestDone() { RTC_DCHECK_GE(test_start_ms_, 0); - // TODO(holmer): Add audio send stats here too when those APIs are available. - if (!send_stream_) - return; - // Stop polling stats. // Corner case for field_trials=WebRTC-QuickPerfTest/Enabled/ SendTask(RTC_FROM_HERE, task_queue_, [this] { pending_task_.Stop(); }); + // TODO(holmer): Add audio send stats here too when those APIs are available. + if (!send_stream_) + return; + VideoSendStream::Stats send_stats = send_stream_->GetStats(); send_stream_ = nullptr; // To avoid dereferencing a bad pointer. diff --git a/call/receive_time_calculator.cc b/call/receive_time_calculator.cc index 0675de0aa7..94d1fd18cc 100644 --- a/call/receive_time_calculator.cc +++ b/call/receive_time_calculator.cc @@ -26,10 +26,10 @@ const char kBweReceiveTimeCorrection[] = "WebRTC-Bwe-ReceiveTimeFix"; } // namespace ReceiveTimeCalculatorConfig::ReceiveTimeCalculatorConfig() - : max_packet_time_repair("maxrep", TimeDelta::ms(2000)), - stall_threshold("stall", TimeDelta::ms(5)), - tolerance("tol", TimeDelta::ms(1)), - max_stall("maxstall", TimeDelta::seconds(5)) { + : max_packet_time_repair("maxrep", TimeDelta::Millis(2000)), + stall_threshold("stall", TimeDelta::Millis(5)), + tolerance("tol", TimeDelta::Millis(1)), + max_stall("maxstall", TimeDelta::Seconds(5)) { std::string trial_string = field_trial::FindFullName(kBweReceiveTimeCorrection); ParseFieldTrial( diff --git a/call/rtcp_demuxer.cc b/call/rtcp_demuxer.cc deleted file mode 100644 index 738109fa43..0000000000 --- a/call/rtcp_demuxer.cc +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtcp_demuxer.h" - -#include - -#include -#include - -#include "absl/types/optional.h" -#include "api/rtp_headers.h" -#include "call/rtcp_packet_sink_interface.h" -#include "call/rtp_rtcp_demuxer_helper.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -RtcpDemuxer::RtcpDemuxer() = default; - -RtcpDemuxer::~RtcpDemuxer() { - RTC_DCHECK(ssrc_sinks_.empty()); - RTC_DCHECK(rsid_sinks_.empty()); - RTC_DCHECK(broadcast_sinks_.empty()); -} - -void RtcpDemuxer::AddSink(uint32_t sender_ssrc, RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink)); - RTC_DCHECK(!MultimapAssociationExists(ssrc_sinks_, sender_ssrc, sink)); - ssrc_sinks_.emplace(sender_ssrc, sink); -} - -void RtcpDemuxer::AddSink(const std::string& rsid, - RtcpPacketSinkInterface* sink) { - RTC_DCHECK(IsLegalRsidName(rsid)); - RTC_DCHECK(sink); - RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink)); - RTC_DCHECK(!MultimapAssociationExists(rsid_sinks_, rsid, sink)); - rsid_sinks_.emplace(rsid, sink); -} - -void RtcpDemuxer::AddBroadcastSink(RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - RTC_DCHECK(!MultimapHasValue(ssrc_sinks_, sink)); - RTC_DCHECK(!MultimapHasValue(rsid_sinks_, sink)); - RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink)); - broadcast_sinks_.push_back(sink); -} - -void RtcpDemuxer::RemoveSink(const RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - size_t removal_count = RemoveFromMultimapByValue(&ssrc_sinks_, sink) + - RemoveFromMultimapByValue(&rsid_sinks_, sink); - RTC_DCHECK_GT(removal_count, 0); -} - -void RtcpDemuxer::RemoveBroadcastSink(const RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - auto it = std::find(broadcast_sinks_.begin(), broadcast_sinks_.end(), sink); - RTC_DCHECK(it != broadcast_sinks_.end()); - broadcast_sinks_.erase(it); -} - -void RtcpDemuxer::OnRtcpPacket(rtc::ArrayView packet) { - // Perform sender-SSRC-based demuxing for packets with a sender-SSRC. - absl::optional sender_ssrc = ParseRtcpPacketSenderSsrc(packet); - if (sender_ssrc) { - auto it_range = ssrc_sinks_.equal_range(*sender_ssrc); - for (auto it = it_range.first; it != it_range.second; ++it) { - it->second->OnRtcpPacket(packet); - } - } - - // All packets, even those without a sender-SSRC, are broadcast to sinks - // which listen to broadcasts. - for (RtcpPacketSinkInterface* sink : broadcast_sinks_) { - sink->OnRtcpPacket(packet); - } -} - -void RtcpDemuxer::OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) { - // Record the new SSRC association for all of the sinks that were associated - // with the RSID. - auto it_range = rsid_sinks_.equal_range(rsid); - for (auto it = it_range.first; it != it_range.second; ++it) { - RtcpPacketSinkInterface* sink = it->second; - // Watch out for pre-existing SSRC-based associations. - if (!MultimapAssociationExists(ssrc_sinks_, ssrc, sink)) { - AddSink(ssrc, sink); - } - } - - // RSIDs are uniquely associated with SSRCs; no need to keep in memory - // the RSID-to-sink association of resolved RSIDs. - rsid_sinks_.erase(it_range.first, it_range.second); -} - -} // namespace webrtc diff --git a/call/rtcp_demuxer.h b/call/rtcp_demuxer.h deleted file mode 100644 index 494e0cea4b..0000000000 --- a/call/rtcp_demuxer.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_RTCP_DEMUXER_H_ -#define CALL_RTCP_DEMUXER_H_ - -#include -#include -#include - -#include "api/array_view.h" -#include "call/ssrc_binding_observer.h" - -namespace webrtc { - -class RtcpPacketSinkInterface; - -// This class represents the RTCP demuxing, for a single RTP session (i.e., one -// SSRC space, see RFC 7656). It isn't thread aware, leaving responsibility of -// multithreading issues to the user of this class. -class RtcpDemuxer : public SsrcBindingObserver { - public: - RtcpDemuxer(); - ~RtcpDemuxer() override; - - // Registers a sink. The sink will be notified of incoming RTCP packets with - // that sender-SSRC. The same sink can be registered for multiple SSRCs, and - // the same SSRC can have multiple sinks. Null pointer is not allowed. - // Sinks may be associated with both an SSRC and an RSID. - // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both. - void AddSink(uint32_t sender_ssrc, RtcpPacketSinkInterface* sink); - - // Registers a sink. Once the RSID is resolved to an SSRC, the sink will be - // notified of all RTCP packets with that sender-SSRC. - // The same sink can be registered for multiple RSIDs, and - // the same RSID can have multiple sinks. Null pointer is not allowed. - // Sinks may be associated with both an SSRC and an RSID. - // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both. - void AddSink(const std::string& rsid, RtcpPacketSinkInterface* sink); - - // Registers a sink. The sink will be notified of any incoming RTCP packet. - // Null pointer is not allowed. - // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both. - void AddBroadcastSink(RtcpPacketSinkInterface* sink); - - // Undo previous AddSink() calls with the given sink. - void RemoveSink(const RtcpPacketSinkInterface* sink); - - // Undo AddBroadcastSink(). - void RemoveBroadcastSink(const RtcpPacketSinkInterface* sink); - - // Process a new RTCP packet and forward it to the appropriate sinks. - void OnRtcpPacket(rtc::ArrayView packet); - - // Implement SsrcBindingObserver - become notified whenever RSIDs resolve to - // an SSRC. - void OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) override; - - // TODO(eladalon): Add the ability to resolve RSIDs and inform observers, - // like in the RtpDemuxer case, once the relevant standard is finalized. - - private: - // Records the association SSRCs to sinks. - std::multimap ssrc_sinks_; - - // Records the association RSIDs to sinks. - std::multimap rsid_sinks_; - - // Sinks which will receive notifications of all incoming RTCP packets. - // Additional/removal of sinks is expected to be significantly less frequent - // than RTCP message reception; container chosen for iteration performance. - std::vector broadcast_sinks_; -}; - -} // namespace webrtc - -#endif // CALL_RTCP_DEMUXER_H_ diff --git a/call/rtcp_demuxer_unittest.cc b/call/rtcp_demuxer_unittest.cc deleted file mode 100644 index 23c305c900..0000000000 --- a/call/rtcp_demuxer_unittest.cc +++ /dev/null @@ -1,505 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtcp_demuxer.h" - -#include - -#include - -#include "api/rtp_headers.h" -#include "call/rtcp_packet_sink_interface.h" -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/buffer.h" -#include "rtc_base/checks.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { - -using ::testing::_; -using ::testing::AtLeast; -using ::testing::ElementsAreArray; -using ::testing::InSequence; -using ::testing::Matcher; -using ::testing::NiceMock; - -class MockRtcpPacketSink : public RtcpPacketSinkInterface { - public: - MOCK_METHOD1(OnRtcpPacket, void(rtc::ArrayView)); -}; - -class RtcpDemuxerTest : public ::testing::Test { - protected: - ~RtcpDemuxerTest() { - for (auto* sink : sinks_to_tear_down_) { - demuxer_.RemoveSink(sink); - } - for (auto* sink : broadcast_sinks_to_tear_down_) { - demuxer_.RemoveBroadcastSink(sink); - } - } - - void AddSsrcSink(uint32_t ssrc, RtcpPacketSinkInterface* sink) { - demuxer_.AddSink(ssrc, sink); - sinks_to_tear_down_.insert(sink); - } - - void AddRsidSink(const std::string& rsid, RtcpPacketSinkInterface* sink) { - demuxer_.AddSink(rsid, sink); - sinks_to_tear_down_.insert(sink); - } - - void RemoveSink(RtcpPacketSinkInterface* sink) { - sinks_to_tear_down_.erase(sink); - demuxer_.RemoveSink(sink); - } - - void AddBroadcastSink(RtcpPacketSinkInterface* sink) { - demuxer_.AddBroadcastSink(sink); - broadcast_sinks_to_tear_down_.insert(sink); - } - - void RemoveBroadcastSink(RtcpPacketSinkInterface* sink) { - broadcast_sinks_to_tear_down_.erase(sink); - demuxer_.RemoveBroadcastSink(sink); - } - - RtcpDemuxer demuxer_; - std::set sinks_to_tear_down_; - std::set broadcast_sinks_to_tear_down_; -}; - -// Produces a packet buffer representing an RTCP packet with a given SSRC, -// as it would look when sent over the wire. -// |distinguishing_string| allows different RTCP packets with the same SSRC -// to be distinguished. How this is set into the actual packet is -// unimportant, and depends on which RTCP message we choose to use. -rtc::Buffer CreateRtcpPacket(uint32_t ssrc, - const std::string& distinguishing_string = "") { - rtcp::Bye packet; - packet.SetSenderSsrc(ssrc); - if (distinguishing_string != "") { - // Actual way we use |distinguishing_string| is unimportant, so long - // as it ends up in the packet. - packet.SetReason(distinguishing_string); - } - return packet.Build(); -} - -static Matcher> SamePacketAs( - const rtc::Buffer& other) { - return ElementsAreArray(other.cbegin(), other.cend()); -} - -} // namespace - -TEST_F(RtcpDemuxerTest, OnRtcpPacketCalledOnCorrectSinkBySsrc) { - constexpr uint32_t ssrcs[] = {101, 202, 303}; - MockRtcpPacketSink sinks[arraysize(ssrcs)]; - for (size_t i = 0; i < arraysize(ssrcs); i++) { - AddSsrcSink(ssrcs[i], &sinks[i]); - } - - for (size_t i = 0; i < arraysize(ssrcs); i++) { - auto packet = CreateRtcpPacket(ssrcs[i]); - EXPECT_CALL(sinks[i], OnRtcpPacket(SamePacketAs(packet))).Times(1); - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, OnRtcpPacketCalledOnResolvedRsidSink) { - // Set up some RSID sinks. - const std::string rsids[] = {"a", "b", "c"}; - MockRtcpPacketSink sinks[arraysize(rsids)]; - for (size_t i = 0; i < arraysize(rsids); i++) { - AddRsidSink(rsids[i], &sinks[i]); - } - - // Only resolve one of the sinks. - constexpr size_t resolved_sink_index = 0; - constexpr uint32_t ssrc = 345; - demuxer_.OnSsrcBoundToRsid(rsids[resolved_sink_index], ssrc); - - // The resolved sink gets notifications of RTCP messages with its SSRC. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sinks[resolved_sink_index], OnRtcpPacket(SamePacketAs(packet))) - .Times(1); - - // RTCP received; expected calls triggered. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, - SingleCallbackAfterResolutionOfAnRsidToAlreadyRegisteredSsrc) { - // Associate a sink with an SSRC. - MockRtcpPacketSink sink; - constexpr uint32_t ssrc = 999; - AddSsrcSink(ssrc, &sink); - - // Associate the same sink with an RSID. - const std::string rsid = "r"; - AddRsidSink(rsid, &sink); - - // Resolve the RSID to the aforementioned SSRC. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc); - - // OnRtcpPacket still called only a single time for messages with this SSRC. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, - OnRtcpPacketCalledOnAllBroadcastSinksForAllRtcpPackets) { - MockRtcpPacketSink sinks[3]; - for (MockRtcpPacketSink& sink : sinks) { - AddBroadcastSink(&sink); - } - - constexpr uint32_t ssrc = 747; - auto packet = CreateRtcpPacket(ssrc); - - for (MockRtcpPacketSink& sink : sinks) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - // RTCP received; expected calls triggered. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, PacketsDeliveredInRightOrderToNonBroadcastSink) { - constexpr uint32_t ssrc = 101; - MockRtcpPacketSink sink; - AddSsrcSink(ssrc, &sink); - - std::vector packets; - for (size_t i = 0; i < 5; i++) { - packets.push_back(CreateRtcpPacket(ssrc, std::to_string(i))); - } - - InSequence sequence; - for (const auto& packet : packets) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - for (const auto& packet : packets) { - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, PacketsDeliveredInRightOrderToBroadcastSink) { - MockRtcpPacketSink sink; - AddBroadcastSink(&sink); - - std::vector packets; - for (size_t i = 0; i < 5; i++) { - constexpr uint32_t ssrc = 101; - packets.push_back(CreateRtcpPacket(ssrc, std::to_string(i))); - } - - InSequence sequence; - for (const auto& packet : packets) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - for (const auto& packet : packets) { - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, MultipleSinksMappedToSameSsrc) { - MockRtcpPacketSink sinks[3]; - constexpr uint32_t ssrc = 404; - for (auto& sink : sinks) { - AddSsrcSink(ssrc, &sink); - } - - // Reception of an RTCP packet associated with the shared SSRC triggers the - // callback on all of the sinks associated with it. - auto packet = CreateRtcpPacket(ssrc); - for (auto& sink : sinks) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))); - } - - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, SinkMappedToMultipleSsrcs) { - constexpr uint32_t ssrcs[] = {404, 505, 606}; - MockRtcpPacketSink sink; - for (uint32_t ssrc : ssrcs) { - AddSsrcSink(ssrc, &sink); - } - - // The sink which is associated with multiple SSRCs gets the callback - // triggered for each of those SSRCs. - for (uint32_t ssrc : ssrcs) { - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))); - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, MultipleRsidsOnSameSink) { - // Sink associated with multiple sinks. - MockRtcpPacketSink sink; - const std::string rsids[] = {"a", "b", "c"}; - for (const auto& rsid : rsids) { - AddRsidSink(rsid, &sink); - } - - // RSIDs resolved to SSRCs. - uint32_t ssrcs[arraysize(rsids)]; - for (size_t i = 0; i < arraysize(rsids); i++) { - ssrcs[i] = 1000 + static_cast(i); - demuxer_.OnSsrcBoundToRsid(rsids[i], ssrcs[i]); - } - - // Set up packets to match those RSIDs/SSRCs. - std::vector packets; - for (size_t i = 0; i < arraysize(rsids); i++) { - packets.push_back(CreateRtcpPacket(ssrcs[i])); - } - - // The sink expects to receive all of the packets. - for (const auto& packet : packets) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - // Packet demuxed correctly; OnRtcpPacket() triggered on sink. - for (const auto& packet : packets) { - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, RsidUsedByMultipleSinks) { - MockRtcpPacketSink sinks[3]; - const std::string shared_rsid = "a"; - - for (MockRtcpPacketSink& sink : sinks) { - AddRsidSink(shared_rsid, &sink); - } - - constexpr uint32_t shared_ssrc = 888; - demuxer_.OnSsrcBoundToRsid(shared_rsid, shared_ssrc); - - auto packet = CreateRtcpPacket(shared_ssrc); - - for (MockRtcpPacketSink& sink : sinks) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnSsrcSinkRemovedBeforeFirstPacket) { - constexpr uint32_t ssrc = 404; - MockRtcpPacketSink sink; - AddSsrcSink(ssrc, &sink); - - RemoveSink(&sink); - - // The removed sink does not get callbacks. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnSsrcSinkRemovedAfterFirstPacket) { - constexpr uint32_t ssrc = 404; - NiceMock sink; - AddSsrcSink(ssrc, &sink); - - auto before_packet = CreateRtcpPacket(ssrc); - demuxer_.OnRtcpPacket(before_packet); - - RemoveSink(&sink); - - // The removed sink does not get callbacks. - auto after_packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(after_packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnRsidSinkRemovedBeforeRsidResolution) { - const std::string rsid = "a"; - constexpr uint32_t ssrc = 404; - MockRtcpPacketSink sink; - AddRsidSink(rsid, &sink); - - // Removal before resolution. - RemoveSink(&sink); - demuxer_.OnSsrcBoundToRsid(rsid, ssrc); - - // The removed sink does not get callbacks. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnRsidSinkRemovedAfterRsidResolution) { - const std::string rsid = "a"; - constexpr uint32_t ssrc = 404; - MockRtcpPacketSink sink; - AddRsidSink(rsid, &sink); - - // Removal after resolution. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc); - RemoveSink(&sink); - - // The removed sink does not get callbacks. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnBroadcastSinkRemovedBeforeFirstPacket) { - MockRtcpPacketSink sink; - AddBroadcastSink(&sink); - - RemoveBroadcastSink(&sink); - - // The removed sink does not get callbacks. - constexpr uint32_t ssrc = 404; - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnBroadcastSinkRemovedAfterFirstPacket) { - NiceMock sink; - AddBroadcastSink(&sink); - - constexpr uint32_t ssrc = 404; - auto before_packet = CreateRtcpPacket(ssrc); - demuxer_.OnRtcpPacket(before_packet); - - RemoveBroadcastSink(&sink); - - // The removed sink does not get callbacks. - auto after_packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(after_packet); -} - -// The RSID to SSRC mapping should be one-to-one. If we end up receiving -// two (or more) packets with the same SSRC, but different RSIDs, we guarantee -// remembering the first one; no guarantees are made about further associations. -TEST_F(RtcpDemuxerTest, FirstResolutionOfRsidNotForgotten) { - MockRtcpPacketSink sink; - const std::string rsid = "a"; - AddRsidSink(rsid, &sink); - - constexpr uint32_t ssrc_a = 111; // First resolution - guaranteed effective. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc_a); - - constexpr uint32_t ssrc_b = 222; // Second resolution - no guarantees. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc_b); - - auto packet_a = CreateRtcpPacket(ssrc_a); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet_a))).Times(1); - demuxer_.OnRtcpPacket(packet_a); - - auto packet_b = CreateRtcpPacket(ssrc_b); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet_b))).Times(AtLeast(0)); - demuxer_.OnRtcpPacket(packet_b); -} - -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) - -TEST_F(RtcpDemuxerTest, RepeatedSsrcToSinkAssociationsDisallowed) { - MockRtcpPacketSink sink; - - constexpr uint32_t ssrc = 101; - AddSsrcSink(ssrc, &sink); - EXPECT_DEATH(AddSsrcSink(ssrc, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RepeatedRsidToSinkAssociationsDisallowed) { - MockRtcpPacketSink sink; - - const std::string rsid = "z"; - AddRsidSink(rsid, &sink); - EXPECT_DEATH(AddRsidSink(rsid, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RepeatedBroadcastSinkRegistrationDisallowed) { - MockRtcpPacketSink sink; - - AddBroadcastSink(&sink); - EXPECT_DEATH(AddBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, SsrcSinkCannotAlsoBeRegisteredAsBroadcast) { - MockRtcpPacketSink sink; - - constexpr uint32_t ssrc = 101; - AddSsrcSink(ssrc, &sink); - EXPECT_DEATH(AddBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidSinkCannotAlsoBeRegisteredAsBroadcast) { - MockRtcpPacketSink sink; - - const std::string rsid = "z"; - AddRsidSink(rsid, &sink); - EXPECT_DEATH(AddBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsSsrcSink) { - MockRtcpPacketSink sink; - - AddBroadcastSink(&sink); - constexpr uint32_t ssrc = 101; - EXPECT_DEATH(AddSsrcSink(ssrc, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsRsidSink) { - MockRtcpPacketSink sink; - - AddBroadcastSink(&sink); - const std::string rsid = "j"; - EXPECT_DEATH(AddRsidSink(rsid, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, MayNotCallRemoveSinkOnNeverAddedSink) { - MockRtcpPacketSink sink; - EXPECT_DEATH(RemoveSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, MayNotCallRemoveBroadcastSinkOnNeverAddedSink) { - MockRtcpPacketSink sink; - EXPECT_DEATH(RemoveBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidMustBeNonEmpty) { - MockRtcpPacketSink sink; - EXPECT_DEATH(AddRsidSink("", &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidMustBeAlphaNumeric) { - MockRtcpPacketSink sink; - EXPECT_DEATH(AddRsidSink("a_3", &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidMustNotExceedMaximumLength) { - MockRtcpPacketSink sink; - std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); - EXPECT_DEATH(AddRsidSink(rsid, &sink), ""); -} - -#endif - -} // namespace webrtc diff --git a/call/rtcp_packet_sink_interface.h b/call/rtcp_packet_sink_interface.h deleted file mode 100644 index 8ea3f7d21c..0000000000 --- a/call/rtcp_packet_sink_interface.h +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef CALL_RTCP_PACKET_SINK_INTERFACE_H_ -#define CALL_RTCP_PACKET_SINK_INTERFACE_H_ - -#include "api/array_view.h" - -namespace webrtc { - -// This class represents a receiver of unparsed RTCP packets. -// TODO(eladalon): Replace this by demuxing over parsed rather than raw data. -// Whether this should be over an entire RTCP packet, or over RTCP blocks, -// is still under discussion. -class RtcpPacketSinkInterface { - public: - virtual ~RtcpPacketSinkInterface() = default; - virtual void OnRtcpPacket(rtc::ArrayView packet) = 0; -}; - -} // namespace webrtc - -#endif // CALL_RTCP_PACKET_SINK_INTERFACE_H_ diff --git a/call/rtp_bitrate_configurator.cc b/call/rtp_bitrate_configurator.cc index 99ccfc98f7..264dcdcb81 100644 --- a/call/rtp_bitrate_configurator.cc +++ b/call/rtp_bitrate_configurator.cc @@ -79,6 +79,16 @@ RtpBitrateConfigurator::UpdateWithClientPreferences( return UpdateConstraints(bitrate_mask.start_bitrate_bps); } +// Relay cap can change only max bitrate. +absl::optional RtpBitrateConfigurator::UpdateWithRelayCap( + DataRate cap) { + if (cap.IsFinite()) { + RTC_DCHECK(!cap.IsZero()); + } + max_bitrate_over_relay_ = cap; + return UpdateConstraints(absl::nullopt); +} + absl::optional RtpBitrateConfigurator::UpdateConstraints( const absl::optional& new_start) { BitrateConstraints updated; @@ -89,6 +99,8 @@ absl::optional RtpBitrateConfigurator::UpdateConstraints( updated.max_bitrate_bps = MinPositive(bitrate_config_mask_.max_bitrate_bps.value_or(-1), base_bitrate_config_.max_bitrate_bps); + updated.max_bitrate_bps = + MinPositive(updated.max_bitrate_bps, max_bitrate_over_relay_.bps_or(-1)); // If the combined min ends up greater than the combined max, the max takes // priority. diff --git a/call/rtp_bitrate_configurator.h b/call/rtp_bitrate_configurator.h index 480ed9ee7c..73c4e41f63 100644 --- a/call/rtp_bitrate_configurator.h +++ b/call/rtp_bitrate_configurator.h @@ -13,6 +13,7 @@ #include "absl/types/optional.h" #include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "rtc_base/constructor_magic.h" namespace webrtc { @@ -44,6 +45,9 @@ class RtpBitrateConfigurator { absl::optional UpdateWithClientPreferences( const BitrateSettings& bitrate_mask); + // Apply a cap for relayed calls. + absl::optional UpdateWithRelayCap(DataRate cap); + private: // Applies update to the BitrateConstraints cached in |config_|, resetting // with |new_start| if set. @@ -62,6 +66,9 @@ class RtpBitrateConfigurator { // min >= 0, start != 0, max == -1 || max > 0 BitrateConstraints base_bitrate_config_; + // Bandwidth cap applied for relayed calls. + DataRate max_bitrate_over_relay_ = DataRate::PlusInfinity(); + RTC_DISALLOW_COPY_AND_ASSIGN(RtpBitrateConfigurator); }; } // namespace webrtc diff --git a/call/rtp_config.cc b/call/rtp_config.cc index 7840308ecd..c84a63ee4e 100644 --- a/call/rtp_config.cc +++ b/call/rtp_config.cc @@ -12,11 +12,29 @@ #include +#include "absl/algorithm/container.h" #include "api/array_view.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace { + +uint32_t FindAssociatedSsrc(uint32_t ssrc, + const std::vector& ssrcs, + const std::vector& associated_ssrcs) { + RTC_DCHECK_EQ(ssrcs.size(), associated_ssrcs.size()); + for (size_t i = 0; i < ssrcs.size(); ++i) { + if (ssrcs[i] == ssrc) + return associated_ssrcs[i]; + } + RTC_NOTREACHED(); + return 0; +} + +} // namespace + std::string LntfConfig::ToString() const { return enabled ? "{enabled: true}" : "{enabled: false}"; } @@ -62,7 +80,13 @@ std::string RtpConfig::ToString() const { if (i != ssrcs.size() - 1) ss << ", "; } - ss << ']'; + ss << "], rids: ["; + for (size_t i = 0; i < rids.size(); ++i) { + ss << rids[i]; + if (i != rids.size() - 1) + ss << ", "; + } + ss << "], mid: '" << mid << "'"; ss << ", rtcp_mode: " << (rtcp_mode == RtcpMode::kCompound ? "RtcpMode::kCompound" : "RtcpMode::kReducedSize"); @@ -118,4 +142,62 @@ std::string RtpConfig::Rtx::ToString() const { ss << '}'; return ss.str(); } + +bool RtpConfig::IsMediaSsrc(uint32_t ssrc) const { + return absl::c_linear_search(ssrcs, ssrc); +} + +bool RtpConfig::IsRtxSsrc(uint32_t ssrc) const { + return absl::c_linear_search(rtx.ssrcs, ssrc); +} + +bool RtpConfig::IsFlexfecSsrc(uint32_t ssrc) const { + return flexfec.payload_type != -1 && ssrc == flexfec.ssrc; +} + +absl::optional RtpConfig::GetRtxSsrcAssociatedWithMediaSsrc( + uint32_t media_ssrc) const { + RTC_DCHECK(IsMediaSsrc(media_ssrc)); + // If we don't use RTX there is no association. + if (rtx.ssrcs.empty()) + return absl::nullopt; + // If we use RTX there MUST be an association ssrcs[i] <-> rtx.ssrcs[i]. + RTC_DCHECK_EQ(ssrcs.size(), rtx.ssrcs.size()); + return FindAssociatedSsrc(media_ssrc, ssrcs, rtx.ssrcs); +} + +uint32_t RtpConfig::GetMediaSsrcAssociatedWithRtxSsrc(uint32_t rtx_ssrc) const { + RTC_DCHECK(IsRtxSsrc(rtx_ssrc)); + // If we use RTX there MUST be an association ssrcs[i] <-> rtx.ssrcs[i]. + RTC_DCHECK_EQ(ssrcs.size(), rtx.ssrcs.size()); + return FindAssociatedSsrc(rtx_ssrc, rtx.ssrcs, ssrcs); +} + +uint32_t RtpConfig::GetMediaSsrcAssociatedWithFlexfecSsrc( + uint32_t flexfec_ssrc) const { + RTC_DCHECK(IsFlexfecSsrc(flexfec_ssrc)); + // If we use FlexFEC there MUST be an associated media ssrc. + // + // TODO(brandtr/hbos): The current implementation only supports an association + // with a single media ssrc. If multiple ssrcs are to be supported in the + // future, in order not to break GetStats()'s packet and byte counters, we + // must be able to tell how many packets and bytes have contributed to which + // SSRC. + RTC_DCHECK_EQ(1u, flexfec.protected_media_ssrcs.size()); + uint32_t media_ssrc = flexfec.protected_media_ssrcs[0]; + RTC_DCHECK(IsMediaSsrc(media_ssrc)); + return media_ssrc; +} + +absl::optional RtpConfig::GetRidForSsrc(uint32_t ssrc) const { + auto it = std::find(ssrcs.begin(), ssrcs.end(), ssrc); + if (it != ssrcs.end()) { + size_t ssrc_index = std::distance(ssrcs.begin(), it); + if (ssrc_index < rids.size()) { + return rids[ssrc_index]; + } + } + return absl::nullopt; +} + } // namespace webrtc diff --git a/call/rtp_config.h b/call/rtp_config.h index a0596a8839..298644930c 100644 --- a/call/rtp_config.h +++ b/call/rtp_config.h @@ -17,6 +17,7 @@ #include #include +#include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" @@ -157,6 +158,15 @@ struct RtpConfig { // RTCP CNAME, see RFC 3550. std::string c_name; + + bool IsMediaSsrc(uint32_t ssrc) const; + bool IsRtxSsrc(uint32_t ssrc) const; + bool IsFlexfecSsrc(uint32_t ssrc) const; + absl::optional GetRtxSsrcAssociatedWithMediaSsrc( + uint32_t media_ssrc) const; + uint32_t GetMediaSsrcAssociatedWithRtxSsrc(uint32_t rtx_ssrc) const; + uint32_t GetMediaSsrcAssociatedWithFlexfecSsrc(uint32_t flexfec_ssrc) const; + absl::optional GetRidForSsrc(uint32_t ssrc) const; }; } // namespace webrtc #endif // CALL_RTP_CONFIG_H_ diff --git a/call/rtp_demuxer.cc b/call/rtp_demuxer.cc index 14725cf023..9fc4ba1c16 100644 --- a/call/rtp_demuxer.cc +++ b/call/rtp_demuxer.cc @@ -11,8 +11,6 @@ #include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" -#include "call/rtp_rtcp_demuxer_helper.h" -#include "call/ssrc_binding_observer.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" @@ -20,10 +18,60 @@ #include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace { + +template +size_t RemoveFromMultimapByValue(Container* multimap, const Value& value) { + size_t count = 0; + for (auto it = multimap->begin(); it != multimap->end();) { + if (it->second == value) { + it = multimap->erase(it); + ++count; + } else { + ++it; + } + } + return count; +} + +template +size_t RemoveFromMapByValue(Map* map, const Value& value) { + size_t count = 0; + for (auto it = map->begin(); it != map->end();) { + if (it->second == value) { + it = map->erase(it); + ++count; + } else { + ++it; + } + } + return count; +} + +} // namespace RtpDemuxerCriteria::RtpDemuxerCriteria() = default; RtpDemuxerCriteria::~RtpDemuxerCriteria() = default; +std::string RtpDemuxerCriteria::ToString() const { + rtc::StringBuilder sb; + sb << "{mid: " << (mid.empty() ? "" : mid) + << ", rsid: " << (rsid.empty() ? "" : rsid) << ", ssrcs: ["; + + for (auto ssrc : ssrcs) { + sb << ssrc << ", "; + } + + sb << "], payload_types = ["; + + for (auto pt : payload_types) { + sb << pt << ", "; + } + + sb << "]}"; + return sb.Release(); +} + // static std::string RtpDemuxer::DescribePacket(const RtpPacketReceived& packet) { rtc::StringBuilder sb; @@ -51,7 +99,6 @@ RtpDemuxer::~RtpDemuxer() { RTC_DCHECK(sinks_by_pt_.empty()); RTC_DCHECK(sink_by_mid_and_rsid_.empty()); RTC_DCHECK(sink_by_rsid_.empty()); - RTC_DCHECK(ssrc_binding_observers_.empty()); } bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, @@ -66,6 +113,8 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, // criteria because new sinks are created according to user-specified SDP and // we do not want to crash due to a data validation error. if (CriteriaWouldConflict(criteria)) { + RTC_LOG(LS_ERROR) << "Unable to add sink = " << sink + << " due conflicting criteria " << criteria.ToString(); return false; } @@ -92,6 +141,9 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, RefreshKnownMids(); + RTC_LOG(LS_INFO) << "Added sink = " << sink << " for criteria " + << criteria.ToString(); + return true; } @@ -105,25 +157,40 @@ bool RtpDemuxer::CriteriaWouldConflict( // Adding this criteria would cause one of these rules to be shadowed, so // reject this new criteria. if (known_mids_.find(criteria.mid) != known_mids_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with known mid"; return true; } } else { // If the exact rule already exists, then reject this duplicate. - if (sink_by_mid_and_rsid_.find(std::make_pair( - criteria.mid, criteria.rsid)) != sink_by_mid_and_rsid_.end()) { + const auto sink_by_mid_and_rsid = sink_by_mid_and_rsid_.find( + std::make_pair(criteria.mid, criteria.rsid)); + if (sink_by_mid_and_rsid != sink_by_mid_and_rsid_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with existing sink = " + << sink_by_mid_and_rsid->second + << " by mid+rsid binding"; return true; } // If there is already a sink registered for the bare MID, then this // criteria will never receive any packets because they will just be // directed to that MID sink, so reject this new criteria. - if (sink_by_mid_.find(criteria.mid) != sink_by_mid_.end()) { + const auto sink_by_mid = sink_by_mid_.find(criteria.mid); + if (sink_by_mid != sink_by_mid_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with existing sink = " + << sink_by_mid->second << " by mid binding"; return true; } } } for (uint32_t ssrc : criteria.ssrcs) { - if (sink_by_ssrc_.find(ssrc) != sink_by_ssrc_.end()) { + const auto sink_by_ssrc = sink_by_ssrc_.find(ssrc); + if (sink_by_ssrc != sink_by_ssrc_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with existing sink = " + << sink_by_ssrc->second << " binding by SSRC=" << ssrc; return true; } } @@ -168,7 +235,11 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) { RemoveFromMapByValue(&sink_by_mid_and_rsid_, sink) + RemoveFromMapByValue(&sink_by_rsid_, sink); RefreshKnownMids(); - return num_removed > 0; + bool removed = num_removed > 0; + if (removed) { + RTC_LOG(LS_INFO) << "Removed sink = " << sink << " bindings"; + } + return removed; } bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) { @@ -284,12 +355,7 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMid(const std::string& mid, const auto it = sink_by_mid_.find(mid); if (it != sink_by_mid_.end()) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToMid(mid, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } return nullptr; @@ -302,39 +368,22 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMidRsid( const auto it = sink_by_mid_and_rsid_.find(std::make_pair(mid, rsid)); if (it != sink_by_mid_and_rsid_.end()) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToMidRsid(mid, rsid, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } return nullptr; } -void RtpDemuxer::RegisterRsidResolutionObserver(SsrcBindingObserver* observer) { - RegisterSsrcBindingObserver(observer); -} RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByRsid(const std::string& rsid, uint32_t ssrc) { const auto it = sink_by_rsid_.find(rsid); if (it != sink_by_rsid_.end()) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToRsid(rsid, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } return nullptr; } -void RtpDemuxer::DeregisterRsidResolutionObserver( - const SsrcBindingObserver* observer) { - DeregisterSsrcBindingObserver(observer); -} RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByPayloadType( uint8_t payload_type, @@ -345,54 +394,33 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByPayloadType( const auto end = range.second; if (std::next(it) == end) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToPayloadType(payload_type, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } } return nullptr; } -bool RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc, +void RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink) { if (sink_by_ssrc_.size() >= kMaxSsrcBindings) { RTC_LOG(LS_WARNING) << "New SSRC=" << ssrc << " sink binding ignored; limit of" << kMaxSsrcBindings << " bindings has been reached."; - return false; + return; } auto result = sink_by_ssrc_.emplace(ssrc, sink); auto it = result.first; bool inserted = result.second; if (inserted) { - return true; - } - if (it->second != sink) { + RTC_LOG(LS_INFO) << "Added sink = " << sink + << " binding with SSRC=" << ssrc; + } else if (it->second != sink) { + RTC_LOG(LS_INFO) << "Updated sink = " << sink + << " binding with SSRC=" << ssrc; it->second = sink; - return true; } - return false; -} - -void RtpDemuxer::RegisterSsrcBindingObserver(SsrcBindingObserver* observer) { - RTC_DCHECK(observer); - RTC_DCHECK(!ContainerHasKey(ssrc_binding_observers_, observer)); - - ssrc_binding_observers_.push_back(observer); -} - -void RtpDemuxer::DeregisterSsrcBindingObserver( - const SsrcBindingObserver* observer) { - RTC_DCHECK(observer); - auto it = std::find(ssrc_binding_observers_.begin(), - ssrc_binding_observers_.end(), observer); - RTC_DCHECK(it != ssrc_binding_observers_.end()); - ssrc_binding_observers_.erase(it); } } // namespace webrtc diff --git a/call/rtp_demuxer.h b/call/rtp_demuxer.h index c815c47f72..3aa7e9df26 100644 --- a/call/rtp_demuxer.h +++ b/call/rtp_demuxer.h @@ -21,7 +21,6 @@ namespace webrtc { class RtpPacketReceived; class RtpPacketSinkInterface; -class SsrcBindingObserver; // This struct describes the criteria that will be used to match packets to a // specific sink. @@ -44,6 +43,9 @@ struct RtpDemuxerCriteria { // Will match packets with any of these payload types. std::set payload_types; + + // Return string representation of demux criteria to facilitate logging + std::string ToString() const; }; // This class represents the RTP demuxing, for a single RTP session (i.e., one @@ -130,17 +132,6 @@ class RtpDemuxer { // if the packet was forwarded and false if the packet was dropped. bool OnRtpPacket(const RtpPacketReceived& packet); - // The Observer will be notified when an attribute (e.g., RSID, MID, etc.) is - // bound to an SSRC. - void RegisterSsrcBindingObserver(SsrcBindingObserver* observer); - // Deprecated: Use the above method. - void RegisterRsidResolutionObserver(SsrcBindingObserver* observer); - - // Undo a previous RegisterSsrcBindingObserver(). - void DeregisterSsrcBindingObserver(const SsrcBindingObserver* observer); - // Deprecated: Use the above method. - void DeregisterRsidResolutionObserver(const SsrcBindingObserver* observer); - // Configure whether to look at the MID header extension when demuxing // incoming RTP packets. By default this is enabled. void set_use_mid(bool use_mid) { use_mid_ = use_mid; } @@ -197,14 +188,8 @@ class RtpDemuxer { std::map mid_by_ssrc_; std::map rsid_by_ssrc_; - // Adds a binding from the SSRC to the given sink. Returns true if there was - // not already a sink bound to the SSRC or if the sink replaced a different - // sink. Returns false if the binding was unchanged. - bool AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink); - - // Observers which will be notified when an RSID association to an SSRC is - // resolved by this object. - std::vector ssrc_binding_observers_; + // Adds a binding from the SSRC to the given sink. + void AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink); bool use_mid_ = true; }; diff --git a/call/rtp_demuxer_unittest.cc b/call/rtp_demuxer_unittest.cc index 86b458a0cc..a4abab73ed 100644 --- a/call/rtp_demuxer_unittest.cc +++ b/call/rtp_demuxer_unittest.cc @@ -14,7 +14,6 @@ #include #include -#include "call/ssrc_binding_observer.h" #include "call/test/mock_rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -31,31 +30,15 @@ namespace { using ::testing::_; using ::testing::AtLeast; -using ::testing::AtMost; using ::testing::InSequence; using ::testing::NiceMock; -class MockSsrcBindingObserver : public SsrcBindingObserver { - public: - MOCK_METHOD2(OnSsrcBoundToRsid, void(const std::string& rsid, uint32_t ssrc)); - MOCK_METHOD2(OnSsrcBoundToMid, void(const std::string& mid, uint32_t ssrc)); - MOCK_METHOD3(OnSsrcBoundToMidRsid, - void(const std::string& mid, - const std::string& rsid, - uint32_t ssrc)); - MOCK_METHOD2(OnSsrcBoundToPayloadType, - void(uint8_t payload_type, uint32_t ssrc)); -}; - class RtpDemuxerTest : public ::testing::Test { protected: ~RtpDemuxerTest() { for (auto* sink : sinks_to_tear_down_) { demuxer_.RemoveSink(sink); } - for (auto* observer : observers_to_tear_down_) { - demuxer_.DeregisterSsrcBindingObserver(observer); - } } // These are convenience methods for calling demuxer.AddSink with different @@ -103,20 +86,6 @@ class RtpDemuxerTest : public ::testing::Test { return demuxer_.RemoveSink(sink); } - // These are convenience methods for calling - // demuxer.{Register|Unregister}SsrcBindingObserver such that observers are - // automatically removed when the test finishes. - - void RegisterSsrcBindingObserver(SsrcBindingObserver* observer) { - demuxer_.RegisterSsrcBindingObserver(observer); - observers_to_tear_down_.insert(observer); - } - - void DeregisterSsrcBindingObserver(SsrcBindingObserver* observer) { - demuxer_.DeregisterSsrcBindingObserver(observer); - observers_to_tear_down_.erase(observer); - } - // The CreatePacket* methods are helpers for creating new RTP packets with // various attributes set. Tests should use the helper that provides the // minimum information needed to exercise the behavior under test. Tests also @@ -206,10 +175,11 @@ class RtpDemuxerTest : public ::testing::Test { RtpDemuxer demuxer_; std::set sinks_to_tear_down_; - std::set observers_to_tear_down_; uint16_t next_sequence_number_ = 1; }; +class RtpDemuxerDeathTest : public RtpDemuxerTest {}; + MATCHER_P(SamePacketAs, other, "") { return arg.Ssrc() == other.Ssrc() && arg.SequenceNumber() == other.SequenceNumber(); @@ -746,73 +716,6 @@ TEST_F(RtpDemuxerTest, AssociatingByRsidAndBySsrcCannotTriggerDoubleCall) { EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); } -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToMid) { - const std::string mid = "v"; - constexpr uint32_t ssrc = 10; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrcMid(ssrc, mid); - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc)); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} - -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToRsid) { - const std::string rsid = "1"; - constexpr uint32_t ssrc = 111; - - // Only RSIDs which the demuxer knows may be resolved. - NiceMock sink; - AddSinkOnlyRsid(rsid, &sink); - - NiceMock rsid_resolution_observers[3]; - for (auto& observer : rsid_resolution_observers) { - RegisterSsrcBindingObserver(&observer); - EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - } - - // The expected calls to OnSsrcBoundToRsid() will be triggered by this. - auto packet = CreatePacketWithSsrcRsid(ssrc, rsid); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} - -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToMidRsid) { - const std::string mid = "v"; - const std::string rsid = "1"; - constexpr uint32_t ssrc = 10; - - NiceMock sink; - AddSinkBothMidRsid(mid, rsid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrcMidRsid(ssrc, mid, rsid); - EXPECT_CALL(observer, OnSsrcBoundToMidRsid(mid, rsid, ssrc)); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} - -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToPayloadType) { - constexpr uint8_t payload_type = 3; - constexpr uint32_t ssrc = 10; - - RtpDemuxerCriteria criteria; - criteria.payload_types = {payload_type}; - NiceMock sink; - AddSink(criteria, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrc(ssrc); - packet->SetPayloadType(payload_type); - EXPECT_CALL(observer, OnSsrcBoundToPayloadType(payload_type, ssrc)); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} // If one sink is associated with SSRC x, and another sink with RSID y, then if // we receive a packet with both SSRC x and RSID y, route that to only the sink @@ -847,9 +750,6 @@ TEST_F(RtpDemuxerTest, NiceMock rsid_sink; AddSinkOnlyRsid(rsid, &rsid_sink); - NiceMock observer; - RegisterSsrcBindingObserver(&observer); - // The SSRC was mapped to an SSRC sink, but was even active (packets flowed // over it). auto packet = CreatePacketWithSsrcRsid(ssrc, rsid); @@ -860,7 +760,6 @@ TEST_F(RtpDemuxerTest, // is guaranteed. RemoveSink(&ssrc_sink); EXPECT_CALL(rsid_sink, OnRtpPacket(SamePacketAs(*packet))).Times(AtLeast(0)); - EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(AtLeast(0)); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); } @@ -1355,169 +1254,36 @@ TEST_F(RtpDemuxerTest, PacketWithMidAndUnknownRsidIsNotRoutedByPayloadType) { EXPECT_FALSE(demuxer_.OnRtpPacket(*packet)); } -// Observers are only notified of an SSRC binding to an RSID if we care about -// the RSID (i.e., have a sink added for that RSID). -TEST_F(RtpDemuxerTest, ObserversNotNotifiedOfUntrackedRsids) { - const std::string rsid = "1"; - constexpr uint32_t ssrc = 111; - - MockSsrcBindingObserver rsid_resolution_observers[3]; - for (auto& observer : rsid_resolution_observers) { - RegisterSsrcBindingObserver(&observer); - EXPECT_CALL(observer, OnSsrcBoundToRsid(_, _)).Times(0); - } - - // Since no sink is registered for this SSRC/RSID, expect the packet to not be - // routed and no observers notified of the SSRC -> RSID binding. - EXPECT_FALSE(demuxer_.OnRtpPacket(*CreatePacketWithSsrcRsid(ssrc, rsid))); -} - -// Ensure that observers are notified of SSRC bindings only once per unique -// binding source (e.g., SSRC -> MID, SSRC -> RSID, etc.) -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundtoMidOnlyOnce) { - const std::string mid = "v"; - constexpr uint32_t ssrc = 10; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc)).Times(1); - - demuxer_.OnRtpPacket(*CreatePacketWithSsrcMid(ssrc, mid)); - demuxer_.OnRtpPacket(*CreatePacketWithSsrcMid(ssrc, mid)); -} - -// Ensure that when a new SSRC -> MID binding is discovered observers are also -// notified of that, even if there has already been an SSRC bound to the MID. -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundtoMidWhenSsrcChanges) { - const std::string mid = "v"; - constexpr uint32_t ssrc1 = 10; - constexpr uint32_t ssrc2 = 11; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - InSequence seq; - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc1)).Times(1); - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc2)).Times(1); - - auto p1 = CreatePacketWithSsrcMid(ssrc1, mid); - demuxer_.OnRtpPacket(*p1); - - auto p2 = CreatePacketWithSsrcMid(ssrc2, mid); - demuxer_.OnRtpPacket(*p2); -} - -TEST_F(RtpDemuxerTest, DeregisteredRsidObserversNotInformedOfResolutions) { - constexpr uint32_t ssrc = 111; - const std::string rsid = "a"; - NiceMock sink; - AddSinkOnlyRsid(rsid, &sink); - - // Register several, then deregister only one, to show that not all of the - // observers had been forgotten when one was removed. - MockSsrcBindingObserver observer_1; - MockSsrcBindingObserver observer_2_removed; - MockSsrcBindingObserver observer_3; - - RegisterSsrcBindingObserver(&observer_1); - RegisterSsrcBindingObserver(&observer_2_removed); - RegisterSsrcBindingObserver(&observer_3); - - DeregisterSsrcBindingObserver(&observer_2_removed); - - EXPECT_CALL(observer_1, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - EXPECT_CALL(observer_2_removed, OnSsrcBoundToRsid(_, _)).Times(0); - EXPECT_CALL(observer_3, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - - // The expected calls to OnSsrcBoundToRsid() will be triggered by this. - demuxer_.OnRtpPacket(*CreatePacketWithSsrcRsid(ssrc, rsid)); -} - -TEST_F(RtpDemuxerTest, - PacketFittingBothRsidSinkAndSsrcSinkTriggersResolutionCallbacks) { - constexpr uint32_t ssrc = 111; - NiceMock ssrc_sink; - AddSinkOnlySsrc(ssrc, &ssrc_sink); - - const std::string rsid = "a"; - NiceMock rsid_sink; - AddSinkOnlyRsid(rsid, &rsid_sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrcRsid(ssrc, rsid); - EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - demuxer_.OnRtpPacket(*packet); -} - -TEST_F(RtpDemuxerTest, MaliciousPeerCannotCauseMemoryOveruse) { - const std::string mid = "v"; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - EXPECT_CALL(observer, OnSsrcBoundToMid(_, _)) - .Times(AtMost(RtpDemuxer::kMaxSsrcBindings)); - - for (int i = 0; i < RtpDemuxer::kMaxSsrcBindings + 1; i++) { - auto packet = CreatePacketWithSsrcMid(i, mid); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); - } -} - #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(RtpDemuxerTest, CriteriaMustBeNonEmpty) { +TEST_F(RtpDemuxerDeathTest, CriteriaMustBeNonEmpty) { MockRtpPacketSink sink; RtpDemuxerCriteria criteria; EXPECT_DEATH(AddSink(criteria, &sink), ""); } -TEST_F(RtpDemuxerTest, RsidMustBeAlphaNumeric) { +TEST_F(RtpDemuxerDeathTest, RsidMustBeAlphaNumeric) { MockRtpPacketSink sink; EXPECT_DEATH(AddSinkOnlyRsid("a_3", &sink), ""); } -TEST_F(RtpDemuxerTest, MidMustBeToken) { +TEST_F(RtpDemuxerDeathTest, MidMustBeToken) { MockRtpPacketSink sink; EXPECT_DEATH(AddSinkOnlyMid("a(3)", &sink), ""); } -TEST_F(RtpDemuxerTest, RsidMustNotExceedMaximumLength) { +TEST_F(RtpDemuxerDeathTest, RsidMustNotExceedMaximumLength) { MockRtpPacketSink sink; std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); EXPECT_DEATH(AddSinkOnlyRsid(rsid, &sink), ""); } -TEST_F(RtpDemuxerTest, MidMustNotExceedMaximumLength) { +TEST_F(RtpDemuxerDeathTest, MidMustNotExceedMaximumLength) { MockRtpPacketSink sink; std::string mid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); EXPECT_DEATH(AddSinkOnlyMid(mid, &sink), ""); } -TEST_F(RtpDemuxerTest, DoubleRegisterationOfSsrcBindingObserverDisallowed) { - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - EXPECT_DEATH(RegisterSsrcBindingObserver(&observer), ""); -} - -TEST_F(RtpDemuxerTest, - DregisterationOfNeverRegisteredSsrcBindingObserverDisallowed) { - MockSsrcBindingObserver observer; - EXPECT_DEATH(DeregisterSsrcBindingObserver(&observer), ""); -} - #endif } // namespace diff --git a/call/rtp_payload_params.cc b/call/rtp_payload_params.cc index 408a2a85f6..2b754ab5b0 100644 --- a/call/rtp_payload_params.cc +++ b/call/rtp_payload_params.cc @@ -15,18 +15,19 @@ #include #include "absl/container/inlined_vector.h" +#include "absl/strings/match.h" #include "absl/types/variant.h" #include "api/video/video_timing.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/frame_dependencies_calculator.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/random.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -84,7 +85,7 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, for (int i = 0; i < info.codecSpecific.VP9.num_ref_pics; ++i) { vp9_header.pid_diff[i] = info.codecSpecific.VP9.p_diff[i]; } - vp9_header.end_of_picture = info.codecSpecific.VP9.end_of_picture; + vp9_header.end_of_picture = info.end_of_picture; return; } case kVideoCodecH264: { @@ -92,17 +93,16 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, h264_header.packetization_mode = info.codecSpecific.H264.packetization_mode; rtp->simulcastIdx = spatial_index.value_or(0); - rtp->frame_marking.temporal_id = kNoTemporalIdx; - if (info.codecSpecific.H264.temporal_idx != kNoTemporalIdx) { - rtp->frame_marking.temporal_id = info.codecSpecific.H264.temporal_idx; - rtp->frame_marking.layer_id = 0; - rtp->frame_marking.independent_frame = - info.codecSpecific.H264.idr_frame; - rtp->frame_marking.base_layer_sync = - info.codecSpecific.H264.base_layer_sync; - } return; } +#ifndef DISABLE_H265 + case kVideoCodecH265: { + auto& h265_header = rtp->video_type_header.emplace(); + h265_header.packetization_mode = + info.codecSpecific.H265.packetization_mode; + } + return; +#endif case kVideoCodecMultiplex: case kVideoCodecGeneric: rtp->codec = kVideoCodecGeneric; @@ -133,12 +133,12 @@ void SetVideoTiming(const EncodedImage& image, VideoSendTiming* timing) { } // namespace RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, - const RtpPayloadState* state) + const RtpPayloadState* state, + const WebRtcKeyValueConfig& trials) : ssrc_(ssrc), generic_picture_id_experiment_( - field_trial::IsEnabled("WebRTC-GenericPictureId")), - generic_descriptor_experiment_( - field_trial::IsEnabled("WebRTC-GenericDescriptor")) { + absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), + "Enabled")) { for (auto& spatial_layer : last_shared_frame_id_) spatial_layer.fill(-1); @@ -182,9 +182,8 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( SetCodecSpecific(&rtp_video_header, first_frame_in_picture); - if (generic_descriptor_experiment_) - SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, - &rtp_video_header); + SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, + &rtp_video_header); return rtp_video_header; } @@ -233,14 +232,6 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, vp9_header.tl0_pic_idx = state_.tl0_pic_idx; } } - if (rtp_video_header->codec == kVideoCodecH264) { - if (rtp_video_header->frame_marking.temporal_id != kNoTemporalIdx) { - if (rtp_video_header->frame_marking.temporal_id == 0) { - ++state_.tl0_pic_idx; - } - rtp_video_header->frame_marking.tl0_pic_idx = state_.tl0_pic_idx; - } - } if (generic_picture_id_experiment_ && rtp_video_header->codec == kVideoCodecGeneric) { rtp_video_header->video_type_header.emplace() @@ -248,10 +239,41 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, } } +RTPVideoHeader::GenericDescriptorInfo +RtpPayloadParams::GenericDescriptorFromFrameInfo( + const GenericFrameInfo& frame_info, + int64_t frame_id, + VideoFrameType frame_type) { + RTPVideoHeader::GenericDescriptorInfo generic; + generic.frame_id = frame_id; + generic.dependencies = dependencies_calculator_.FromBuffersUsage( + frame_type, frame_id, frame_info.encoder_buffers); + generic.chain_diffs = + chains_calculator_.From(frame_id, frame_info.part_of_chain); + generic.spatial_index = frame_info.spatial_id; + generic.temporal_index = frame_info.temporal_id; + generic.decode_target_indications = frame_info.decode_target_indications; + generic.active_decode_targets = frame_info.active_decode_targets; + return generic; +} + void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header) { + if (codec_specific_info && codec_specific_info->generic_frame_info && + !codec_specific_info->generic_frame_info->encoder_buffers.empty()) { + if (is_keyframe) { + // Key frame resets all chains it is in. + chains_calculator_.Reset( + codec_specific_info->generic_frame_info->part_of_chain); + } + rtp_video_header->generic = + GenericDescriptorFromFrameInfo(*codec_specific_info->generic_frame_info, + frame_id, rtp_video_header->frame_type); + return; + } + switch (rtp_video_header->codec) { case VideoCodecType::kVideoCodecGeneric: GenericToGeneric(frame_id, is_keyframe, rtp_video_header); @@ -272,6 +294,9 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, is_keyframe, rtp_video_header); } return; +#ifndef DISABLE_H265 + case VideoCodecType::kVideoCodecH265: +#endif case VideoCodecType::kVideoCodecMultiplex: return; } diff --git a/call/rtp_payload_params.h b/call/rtp_payload_params.h index b012398518..2e0faeb5c9 100644 --- a/call/rtp_payload_params.h +++ b/call/rtp_payload_params.h @@ -14,10 +14,13 @@ #include #include "absl/types/optional.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/chain_diff_calculator.h" +#include "modules/video_coding/frame_dependencies_calculator.h" #include "modules/video_coding/include/video_codec_interface.h" namespace webrtc { @@ -28,7 +31,9 @@ class RtpRtcp; // TODO(nisse): Make these properties not codec specific. class RtpPayloadParams final { public: - RtpPayloadParams(const uint32_t ssrc, const RtpPayloadState* state); + RtpPayloadParams(const uint32_t ssrc, + const RtpPayloadState* state, + const WebRtcKeyValueConfig& trials); RtpPayloadParams(const RtpPayloadParams& other); ~RtpPayloadParams(); @@ -43,6 +48,10 @@ class RtpPayloadParams final { private: void SetCodecSpecific(RTPVideoHeader* rtp_video_header, bool first_frame_in_picture); + RTPVideoHeader::GenericDescriptorInfo GenericDescriptorFromFrameInfo( + const GenericFrameInfo& frame_info, + int64_t frame_id, + VideoFrameType frame_type); void SetGeneric(const CodecSpecificInfo* codec_specific_info, int64_t frame_id, bool is_keyframe, @@ -79,6 +88,8 @@ class RtpPayloadParams final { bool layer_sync, RTPVideoHeader::GenericDescriptorInfo* generic); + FrameDependenciesCalculator dependencies_calculator_; + ChainDiffCalculator chains_calculator_; // TODO(bugs.webrtc.org/10242): Remove once all encoder-wrappers are updated. // Holds the last shared frame id for a given (spatial, temporal) layer. std::array, @@ -103,7 +114,6 @@ class RtpPayloadParams final { RtpPayloadState state_; const bool generic_picture_id_experiment_; - const bool generic_descriptor_experiment_; }; } // namespace webrtc #endif // CALL_RTP_PAYLOAD_PARAMS_H_ diff --git a/call/rtp_payload_params_unittest.cc b/call/rtp_payload_params_unittest.cc index ad5d8e1303..56ed2cdea6 100644 --- a/call/rtp_payload_params_unittest.cc +++ b/call/rtp_payload_params_unittest.cc @@ -18,6 +18,7 @@ #include "absl/container/inlined_vector.h" #include "absl/types/optional.h" #include "absl/types/variant.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/video_content_type.h" #include "api/video/video_rotation.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" @@ -31,6 +32,7 @@ using ::testing::ElementsAre; using ::testing::IsEmpty; +using ::testing::SizeIs; namespace webrtc { namespace { @@ -50,7 +52,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp8) { state2.tl0_pic_idx = kTl0PicIdx; std::map states = {{kSsrc2, state2}}; - RtpPayloadParams params(kSsrc2, &state2); + RtpPayloadParams params(kSsrc2, &state2, FieldTrialBasedConfig()); EncodedImage encoded_image; encoded_image.rotation_ = kVideoRotation_90; encoded_image.content_type_ = VideoContentType::SCREENSHARE; @@ -90,7 +92,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { RtpPayloadState state; state.picture_id = kPictureId; state.tl0_pic_idx = kTl0PicIdx; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); EncodedImage encoded_image; encoded_image.rotation_ = kVideoRotation_90; @@ -101,7 +103,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { codec_info.codecSpecific.VP9.num_spatial_layers = 3; codec_info.codecSpecific.VP9.first_frame_in_picture = true; codec_info.codecSpecific.VP9.temporal_idx = 2; - codec_info.codecSpecific.VP9.end_of_picture = false; + codec_info.end_of_picture = false; RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); @@ -118,12 +120,11 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { EXPECT_EQ(vp9_header.spatial_idx, encoded_image.SpatialIndex()); EXPECT_EQ(vp9_header.num_spatial_layers, codec_info.codecSpecific.VP9.num_spatial_layers); - EXPECT_EQ(vp9_header.end_of_picture, - codec_info.codecSpecific.VP9.end_of_picture); + EXPECT_EQ(vp9_header.end_of_picture, codec_info.end_of_picture); // Next spatial layer. codec_info.codecSpecific.VP9.first_frame_in_picture = false; - codec_info.codecSpecific.VP9.end_of_picture = true; + codec_info.end_of_picture = true; encoded_image.SetSpatialIndex(1); ColorSpace color_space( @@ -142,56 +143,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { EXPECT_EQ(vp9_header.spatial_idx, encoded_image.SpatialIndex()); EXPECT_EQ(vp9_header.num_spatial_layers, codec_info.codecSpecific.VP9.num_spatial_layers); - EXPECT_EQ(vp9_header.end_of_picture, - codec_info.codecSpecific.VP9.end_of_picture); -} - -TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_H264) { - RtpPayloadState state; - state.picture_id = kPictureId; - state.tl0_pic_idx = kInitialTl0PicIdx1; - RtpPayloadParams params(kSsrc1, &state); - - EncodedImage encoded_image; - CodecSpecificInfo codec_info; - CodecSpecificInfoH264* h264info = &codec_info.codecSpecific.H264; - codec_info.codecType = kVideoCodecH264; - h264info->packetization_mode = H264PacketizationMode::SingleNalUnit; - h264info->temporal_idx = kNoTemporalIdx; - - RTPVideoHeader header = - params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); - - EXPECT_EQ(0, header.simulcastIdx); - EXPECT_EQ(kVideoCodecH264, header.codec); - const auto& h264 = absl::get(header.video_type_header); - EXPECT_EQ(H264PacketizationMode::SingleNalUnit, h264.packetization_mode); - - // test temporal param 1 - h264info->temporal_idx = 1; - h264info->base_layer_sync = true; - h264info->idr_frame = false; - - header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); - - EXPECT_EQ(kVideoCodecH264, header.codec); - EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1); - EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx); - EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync); - EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame); - - // test temporal param 2 - h264info->temporal_idx = 0; - h264info->base_layer_sync = false; - h264info->idr_frame = true; - - header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); - - EXPECT_EQ(kVideoCodecH264, header.codec); - EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1 + 1); - EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx); - EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync); - EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame); + EXPECT_EQ(vp9_header.end_of_picture, codec_info.end_of_picture); } TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) { @@ -203,7 +155,7 @@ TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) { CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecVP8; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); EXPECT_EQ(kVideoCodecVP8, header.codec); @@ -226,7 +178,7 @@ TEST(RtpPayloadParamsTest, PictureIdWraps) { codec_info.codecType = kVideoCodecVP8; codec_info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); EXPECT_EQ(kVideoCodecVP8, header.codec); @@ -250,7 +202,7 @@ TEST(RtpPayloadParamsTest, Tl0PicIdxUpdatedForVp8) { codec_info.codecType = kVideoCodecVP8; codec_info.codecSpecific.VP8.temporalIdx = 1; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); @@ -286,7 +238,7 @@ TEST(RtpPayloadParamsTest, Tl0PicIdxUpdatedForVp9) { codec_info.codecSpecific.VP9.temporal_idx = 1; codec_info.codecSpecific.VP9.first_frame_in_picture = true; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); @@ -327,10 +279,11 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) { EncodedImage encoded_image; CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecGeneric; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); RTPVideoHeader header = - params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); + params.GetRtpVideoHeader(encoded_image, &codec_info, 10); EXPECT_EQ(kVideoCodecGeneric, header.codec); const auto* generic = @@ -338,7 +291,8 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) { ASSERT_TRUE(generic); EXPECT_EQ(0, generic->picture_id); - header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + header = params.GetRtpVideoHeader(encoded_image, &codec_info, 20); generic = absl::get_if(&header.video_type_header); ASSERT_TRUE(generic); @@ -346,8 +300,6 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) { } TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) { - test::ScopedFieldTrials generic_picture_id( - "WebRTC-GenericDescriptor/Enabled/"); RtpPayloadState state{}; EncodedImage encoded_image; @@ -355,7 +307,7 @@ TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) { CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecGeneric; - RtpPayloadParams params(kSsrc1, &state); + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, 0); @@ -371,14 +323,56 @@ TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) { EXPECT_THAT(header.generic->dependencies, ElementsAre(0)); } +TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) { + RtpPayloadState state; + EncodedImage encoded_image; + CodecSpecificInfo codec_info; + + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); + + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + codec_info.generic_frame_info = + GenericFrameInfo::Builder().S(1).T(0).Dtis("S").Build(); + codec_info.generic_frame_info->encoder_buffers = { + {/*id=*/0, /*referenced=*/false, /*updated=*/true}}; + codec_info.generic_frame_info->part_of_chain = {true, false}; + RTPVideoHeader key_header = + params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/1); + + ASSERT_TRUE(key_header.generic); + EXPECT_EQ(key_header.generic->spatial_index, 1); + EXPECT_EQ(key_header.generic->temporal_index, 0); + EXPECT_EQ(key_header.generic->frame_id, 1); + EXPECT_THAT(key_header.generic->dependencies, IsEmpty()); + EXPECT_THAT(key_header.generic->decode_target_indications, + ElementsAre(DecodeTargetIndication::kSwitch)); + EXPECT_THAT(key_header.generic->chain_diffs, SizeIs(2)); + + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + codec_info.generic_frame_info = + GenericFrameInfo::Builder().S(2).T(3).Dtis("D").Build(); + codec_info.generic_frame_info->encoder_buffers = { + {/*id=*/0, /*referenced=*/true, /*updated=*/false}}; + codec_info.generic_frame_info->part_of_chain = {false, false}; + RTPVideoHeader delta_header = + params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/3); + + ASSERT_TRUE(delta_header.generic); + EXPECT_EQ(delta_header.generic->spatial_index, 2); + EXPECT_EQ(delta_header.generic->temporal_index, 3); + EXPECT_EQ(delta_header.generic->frame_id, 3); + EXPECT_THAT(delta_header.generic->dependencies, ElementsAre(1)); + EXPECT_THAT(delta_header.generic->decode_target_indications, + ElementsAre(DecodeTargetIndication::kDiscardable)); + EXPECT_THAT(delta_header.generic->chain_diffs, SizeIs(2)); +} + class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test { public: enum LayerSync { kNoSync, kSync }; RtpPayloadParamsVp8ToGenericTest() - : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"), - state_(), - params_(123, &state_) {} + : state_(), params_(123, &state_, trials_config_) {} void ConvertAndCheck(int temporal_index, int64_t shared_frame_id, @@ -414,7 +408,7 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test { } protected: - test::ScopedFieldTrials generic_descriptor_field_trial_; + FieldTrialBasedConfig trials_config_; RtpPayloadState state_; RtpPayloadParams params_; }; @@ -472,9 +466,7 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { enum LayerSync { kNoSync, kSync }; RtpPayloadParamsH264ToGenericTest() - : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"), - state_(), - params_(123, &state_) {} + : state_(), params_(123, &state_, trials_config_) {} void ConvertAndCheck(int temporal_index, int64_t shared_frame_id, @@ -510,7 +502,7 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { } protected: - test::ScopedFieldTrials generic_descriptor_field_trial_; + FieldTrialBasedConfig trials_config_; RtpPayloadState state_; RtpPayloadParams params_; }; diff --git a/call/rtp_rtcp_demuxer_helper.cc b/call/rtp_rtcp_demuxer_helper.cc deleted file mode 100644 index 125169b077..0000000000 --- a/call/rtp_rtcp_demuxer_helper.cc +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtp_rtcp_demuxer_helper.h" - -#include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" - -namespace webrtc { - -absl::optional ParseRtcpPacketSenderSsrc( - rtc::ArrayView packet) { - rtcp::CommonHeader header; - for (const uint8_t* next_packet = packet.begin(); next_packet < packet.end(); - next_packet = header.NextPacket()) { - if (!header.Parse(next_packet, packet.end() - next_packet)) { - return absl::nullopt; - } - - switch (header.type()) { - case rtcp::Bye::kPacketType: - case rtcp::ExtendedReports::kPacketType: - case rtcp::Psfb::kPacketType: - case rtcp::ReceiverReport::kPacketType: - case rtcp::Rtpfb::kPacketType: - case rtcp::SenderReport::kPacketType: { - // Sender SSRC at the beginning of the RTCP payload. - if (header.payload_size_bytes() >= sizeof(uint32_t)) { - const uint32_t ssrc_sender = - ByteReader::ReadBigEndian(header.payload()); - return ssrc_sender; - } else { - return absl::nullopt; - } - } - } - } - - return absl::nullopt; -} - -} // namespace webrtc diff --git a/call/rtp_rtcp_demuxer_helper.h b/call/rtp_rtcp_demuxer_helper.h deleted file mode 100644 index 6134d56143..0000000000 --- a/call/rtp_rtcp_demuxer_helper.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_RTP_RTCP_DEMUXER_HELPER_H_ -#define CALL_RTP_RTCP_DEMUXER_HELPER_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" - -namespace webrtc { - -// TODO(eladalon): Remove this in the next CL. -template -bool MultimapAssociationExists(const Container& multimap, - const typename Container::key_type& key, - const typename Container::mapped_type& val) { - auto it_range = multimap.equal_range(key); - using Reference = typename Container::const_reference; - return std::any_of(it_range.first, it_range.second, - [val](Reference elem) { return elem.second == val; }); -} - -template -size_t RemoveFromMultimapByValue(Container* multimap, const Value& value) { - size_t count = 0; - for (auto it = multimap->begin(); it != multimap->end();) { - if (it->second == value) { - it = multimap->erase(it); - ++count; - } else { - ++it; - } - } - return count; -} - -template -size_t RemoveFromMapByValue(Map* map, const Value& value) { - size_t count = 0; - for (auto it = map->begin(); it != map->end();) { - if (it->second == value) { - it = map->erase(it); - ++count; - } else { - ++it; - } - } - return count; -} - -template -bool ContainerHasKey(const Container& c, const Key& k) { - return std::find(c.cbegin(), c.cend(), k) != c.cend(); -} - -// TODO(eladalon): Remove this in the next CL. -template -bool MultimapHasValue(const Container& c, - const typename Container::mapped_type& v) { - auto predicate = [v](const typename Container::value_type& it) { - return it.second == v; - }; - return std::any_of(c.cbegin(), c.cend(), predicate); -} - -template -bool MapHasValue(const Map& map, const typename Map::mapped_type& value) { - auto predicate = [value](const typename Map::value_type& it) { - return it.second == value; - }; - return std::any_of(map.cbegin(), map.cend(), predicate); -} - -template -bool MultimapHasKey(const Container& c, - const typename Container::key_type& key) { - auto it_range = c.equal_range(key); - return it_range.first != it_range.second; -} - -absl::optional ParseRtcpPacketSenderSsrc( - rtc::ArrayView packet); - -} // namespace webrtc - -#endif // CALL_RTP_RTCP_DEMUXER_HELPER_H_ diff --git a/call/rtp_rtcp_demuxer_helper_unittest.cc b/call/rtp_rtcp_demuxer_helper_unittest.cc deleted file mode 100644 index 17e6617fb0..0000000000 --- a/call/rtp_rtcp_demuxer_helper_unittest.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtp_rtcp_demuxer_helper.h" - -#include - -#include - -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" -#include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/buffer.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { -constexpr uint32_t kSsrc = 8374; -} // namespace - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_ByePacket) { - webrtc::rtcp::Bye rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, - ParseRtcpPacketSenderSsrc_ExtendedReportsPacket) { - webrtc::rtcp::ExtendedReports rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_PsfbPacket) { - webrtc::rtcp::Pli rtcp_packet; // Psfb is abstract; use a subclass. - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_ReceiverReportPacket) { - webrtc::rtcp::ReceiverReport rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_RtpfbPacket) { - // Rtpfb is abstract; use a subclass. - webrtc::rtcp::RapidResyncRequest rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_SenderReportPacket) { - webrtc::rtcp::SenderReport rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_MalformedRtcpPacket) { - uint8_t garbage[100]; - memset(&garbage[0], 0, arraysize(garbage)); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(garbage); - EXPECT_FALSE(ssrc); -} - -TEST(RtpRtcpDemuxerHelperTest, - ParseRtcpPacketSenderSsrc_RtcpMessageWithoutSenderSsrc) { - webrtc::rtcp::ExtendedJitterReport rtcp_packet; // Has no sender SSRC. - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_FALSE(ssrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_TruncatedRtcpMessage) { - webrtc::rtcp::Bye rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - constexpr size_t rtcp_length_bytes = 8; - ASSERT_EQ(rtcp_length_bytes, raw_packet.size()); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc( - rtc::ArrayView(raw_packet.data(), rtcp_length_bytes - 1)); - EXPECT_FALSE(ssrc); -} - -} // namespace webrtc diff --git a/call/rtp_stream_receiver_controller.h b/call/rtp_stream_receiver_controller.h index 045af3cf8d..62447aa521 100644 --- a/call/rtp_stream_receiver_controller.h +++ b/call/rtp_stream_receiver_controller.h @@ -14,7 +14,7 @@ #include "call/rtp_demuxer.h" #include "call/rtp_stream_receiver_controller_interface.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" namespace webrtc { @@ -63,7 +63,7 @@ class RtpStreamReceiverController // to be called on the same thread, and OnRtpPacket to be called // by a single, but possibly distinct, thread. But applications not // using Call may have use threads differently. - rtc::CriticalSection lock_; + rtc::RecursiveCriticalSection lock_; RtpDemuxer demuxer_ RTC_GUARDED_BY(&lock_); }; diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc index 20f3a996e5..f5adae68ae 100644 --- a/call/rtp_transport_controller_send.cc +++ b/call/rtp_transport_controller_send.cc @@ -13,6 +13,7 @@ #include #include +#include "absl/strings/match.h" #include "absl/types/optional.h" #include "api/transport/goog_cc_factory.h" #include "api/transport/network_types.h" @@ -32,20 +33,22 @@ namespace { static const int64_t kRetransmitWindowSizeMs = 500; static const size_t kMaxOverheadBytes = 500; -constexpr TimeDelta kPacerQueueUpdateInterval = TimeDelta::Millis<25>(); +constexpr TimeDelta kPacerQueueUpdateInterval = TimeDelta::Millis(25); TargetRateConstraints ConvertConstraints(int min_bitrate_bps, int max_bitrate_bps, int start_bitrate_bps, Clock* clock) { TargetRateConstraints msg; - msg.at_time = Timestamp::ms(clock->TimeInMilliseconds()); - msg.min_data_rate = - min_bitrate_bps >= 0 ? DataRate::bps(min_bitrate_bps) : DataRate::Zero(); - msg.max_data_rate = max_bitrate_bps > 0 ? DataRate::bps(max_bitrate_bps) - : DataRate::Infinity(); + msg.at_time = Timestamp::Millis(clock->TimeInMilliseconds()); + msg.min_data_rate = min_bitrate_bps >= 0 + ? DataRate::BitsPerSec(min_bitrate_bps) + : DataRate::Zero(); + msg.max_data_rate = max_bitrate_bps > 0 + ? DataRate::BitsPerSec(max_bitrate_bps) + : DataRate::Infinity(); if (start_bitrate_bps > 0) - msg.starting_rate = DataRate::bps(start_bitrate_bps); + msg.starting_rate = DataRate::BitsPerSec(start_bitrate_bps); return msg; } @@ -58,7 +61,16 @@ TargetRateConstraints ConvertConstraints(const BitrateConstraints& contraints, bool IsEnabled(const WebRtcKeyValueConfig* trials, absl::string_view key) { RTC_DCHECK(trials != nullptr); - return trials->Lookup(key).find("Enabled") == 0; + return absl::StartsWith(trials->Lookup(key), "Enabled"); +} + +bool IsDisabled(const WebRtcKeyValueConfig* trials, absl::string_view key) { + RTC_DCHECK(trials != nullptr); + return absl::StartsWith(trials->Lookup(key), "Disabled"); +} + +bool IsRelayed(const rtc::NetworkRoute& route) { + return route.local.uses_turn() || route.remote.uses_turn(); } } // namespace @@ -75,6 +87,7 @@ RtpTransportControllerSend::RtpTransportControllerSend( : clock_(clock), event_log_(event_log), bitrate_configurator_(bitrate_config), + process_thread_started_(false), process_thread_(std::move(process_thread)), use_task_queue_pacer_(IsEnabled(trials, "WebRTC-TaskQueuePacer")), process_thread_pacer_(use_task_queue_pacer_ @@ -84,48 +97,52 @@ RtpTransportControllerSend::RtpTransportControllerSend( event_log, trials, process_thread_.get())), - task_queue_pacer_(use_task_queue_pacer_ - ? new TaskQueuePacedSender(clock, - &packet_router_, - event_log, - trials, - task_queue_factory) - : nullptr), + task_queue_pacer_( + use_task_queue_pacer_ + ? new TaskQueuePacedSender( + clock, + &packet_router_, + event_log, + trials, + task_queue_factory, + /*hold_back_window = */ PacingController::kMinSleepTime) + : nullptr), observer_(nullptr), controller_factory_override_(controller_factory), controller_factory_fallback_( std::make_unique(predictor_factory)), process_interval_(controller_factory_fallback_->GetProcessInterval()), - last_report_block_time_(Timestamp::ms(clock_->TimeInMilliseconds())), + last_report_block_time_(Timestamp::Millis(clock_->TimeInMilliseconds())), reset_feedback_on_route_change_( !IsEnabled(trials, "WebRTC-Bwe-NoFeedbackReset")), send_side_bwe_with_overhead_( - IsEnabled(trials, "WebRTC-SendSideBwe-WithOverhead")), + !IsDisabled(trials, "WebRTC-SendSideBwe-WithOverhead")), add_pacing_to_cwin_( IsEnabled(trials, "WebRTC-AddPacingToCongestionWindowPushback")), + relay_bandwidth_cap_("relay_cap", DataRate::PlusInfinity()), transport_overhead_bytes_per_packet_(0), network_available_(false), retransmission_rate_limiter_(clock, kRetransmitWindowSizeMs), task_queue_(task_queue_factory->CreateTaskQueue( "rtp_send_controller", TaskQueueFactory::Priority::NORMAL)) { + ParseFieldTrial({&relay_bandwidth_cap_}, + trials->Lookup("WebRTC-Bwe-NetworkRouteConstraints")); initial_config_.constraints = ConvertConstraints(bitrate_config, clock_); initial_config_.event_log = event_log; initial_config_.key_value_config = trials; RTC_DCHECK(bitrate_config.start_bitrate_bps > 0); - pacer()->SetPacingRates(DataRate::bps(bitrate_config.start_bitrate_bps), - DataRate::Zero()); + pacer()->SetPacingRates( + DataRate::BitsPerSec(bitrate_config.start_bitrate_bps), DataRate::Zero()); - if (!use_task_queue_pacer_) { - process_thread_->Start(); + if (absl::StartsWith(trials->Lookup("WebRTC-LazyPacerStart"), "Disabled")) { + EnsureStarted(); } } RtpTransportControllerSend::~RtpTransportControllerSend() { - if (!use_task_queue_pacer_) { - process_thread_->Stop(); - } + process_thread_->Stop(); } RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( @@ -137,7 +154,8 @@ RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( const RtpSenderObservers& observers, RtcEventLog* event_log, std::unique_ptr fec_controller, - const RtpSenderFrameEncryptionConfig& frame_encryption_config) { + const RtpSenderFrameEncryptionConfig& frame_encryption_config, + rtc::scoped_refptr frame_transformer) { video_rtp_senders_.push_back(std::make_unique( clock_, suspended_ssrcs, states, rtp_config, rtcp_report_interval_ms, send_transport, observers, @@ -145,7 +163,7 @@ RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( // the parts of RtpTransportControllerSendInterface that are really used. this, event_log, &retransmission_rate_limiter_, std::move(fec_controller), frame_encryption_config.frame_encryptor, - frame_encryption_config.crypto_options)); + frame_encryption_config.crypto_options, std::move(frame_transformer))); return video_rtp_senders_.back().get(); } @@ -225,7 +243,7 @@ void RtpTransportControllerSend::SetPacingFactor(float pacing_factor) { UpdateStreamsConfig(); } void RtpTransportControllerSend::SetQueueTimeLimit(int limit_ms) { - pacer()->SetQueueTimeLimit(TimeDelta::ms(limit_ms)); + pacer()->SetQueueTimeLimit(TimeDelta::Millis(limit_ms)); } StreamFeedbackProvider* RtpTransportControllerSend::GetStreamFeedbackProvider() { @@ -242,37 +260,71 @@ void RtpTransportControllerSend::RegisterTargetTransferRateObserver( MaybeCreateControllers(); }); } + +bool RtpTransportControllerSend::IsRelevantRouteChange( + const rtc::NetworkRoute& old_route, + const rtc::NetworkRoute& new_route) const { + // TODO(bugs.webrtc.org/11438): Experiment with using more information/ + // other conditions. + bool connected_changed = old_route.connected != new_route.connected; + bool route_ids_changed = + old_route.local.network_id() != new_route.local.network_id() || + old_route.remote.network_id() != new_route.remote.network_id(); + if (relay_bandwidth_cap_->IsFinite()) { + bool relaying_changed = IsRelayed(old_route) != IsRelayed(new_route); + return connected_changed || route_ids_changed || relaying_changed; + } else { + return connected_changed || route_ids_changed; + } +} + void RtpTransportControllerSend::OnNetworkRouteChanged( const std::string& transport_name, const rtc::NetworkRoute& network_route) { // Check if the network route is connected. + if (!network_route.connected) { - RTC_LOG(LS_INFO) << "Transport " << transport_name << " is disconnected"; // TODO(honghaiz): Perhaps handle this in SignalChannelNetworkState and // consider merging these two methods. return; } + absl::optional relay_constraint_update = + ApplyOrLiftRelayCap(IsRelayed(network_route)); + // Check whether the network route has changed on each transport. auto result = network_routes_.insert(std::make_pair(transport_name, network_route)); auto kv = result.first; bool inserted = result.second; + if (inserted || !(kv->second == network_route)) { + RTC_LOG(LS_INFO) << "Network route changed on transport " << transport_name + << ": new_route = " << network_route.DebugString(); + if (!inserted) { + RTC_LOG(LS_INFO) << "old_route = " << kv->second.DebugString(); + } + } + if (inserted) { + if (relay_constraint_update.has_value()) { + UpdateBitrateConstraints(*relay_constraint_update); + } + task_queue_.PostTask([this, network_route] { + RTC_DCHECK_RUN_ON(&task_queue_); + transport_overhead_bytes_per_packet_ = network_route.packet_overhead; + }); // No need to reset BWE if this is the first time the network connects. return; } - if (kv->second.connected != network_route.connected || - kv->second.local_network_id != network_route.local_network_id || - kv->second.remote_network_id != network_route.remote_network_id) { - kv->second = network_route; + + const rtc::NetworkRoute old_route = kv->second; + kv->second = network_route; + + // Check if enough conditions of the new/old route has changed + // to trigger resetting of bitrates (and a probe). + if (IsRelevantRouteChange(old_route, network_route)) { BitrateConstraints bitrate_config = bitrate_configurator_.GetConfig(); - RTC_LOG(LS_INFO) << "Network route changed on transport " << transport_name - << ": new local network id " - << network_route.local_network_id - << " new remote network id " - << network_route.remote_network_id - << " Reset bitrates to min: " + RTC_LOG(LS_INFO) << "Reset bitrates to min: " << bitrate_config.min_bitrate_bps << " bps, start: " << bitrate_config.start_bitrate_bps << " bps, max: " << bitrate_config.max_bitrate_bps @@ -284,14 +336,13 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( network_route.connected, network_route.packet_overhead)); } NetworkRouteChange msg; - msg.at_time = Timestamp::ms(clock_->TimeInMilliseconds()); + msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); msg.constraints = ConvertConstraints(bitrate_config, clock_); task_queue_.PostTask([this, msg, network_route] { RTC_DCHECK_RUN_ON(&task_queue_); transport_overhead_bytes_per_packet_ = network_route.packet_overhead; if (reset_feedback_on_route_change_) { - transport_feedback_adapter_.SetNetworkIds( - network_route.local_network_id, network_route.remote_network_id); + transport_feedback_adapter_.SetNetworkRoute(network_route); } if (controller_) { PostUpdates(controller_->OnNetworkRouteChange(msg)); @@ -306,7 +357,7 @@ void RtpTransportControllerSend::OnNetworkAvailability(bool network_available) { RTC_LOG(LS_VERBOSE) << "SignalNetworkState " << (network_available ? "Up" : "Down"); NetworkAvailability msg; - msg.at_time = Timestamp::ms(clock_->TimeInMilliseconds()); + msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); msg.network_available = network_available; task_queue_.PostTask([this, msg]() { RTC_DCHECK_RUN_ON(&task_queue_); @@ -372,20 +423,25 @@ void RtpTransportControllerSend::OnReceivedPacket( }); } +void RtpTransportControllerSend::UpdateBitrateConstraints( + const BitrateConstraints& updated) { + TargetRateConstraints msg = ConvertConstraints(updated, clock_); + task_queue_.PostTask([this, msg]() { + RTC_DCHECK_RUN_ON(&task_queue_); + if (controller_) { + PostUpdates(controller_->OnTargetRateConstraints(msg)); + } else { + UpdateInitialConstraints(msg); + } + }); +} + void RtpTransportControllerSend::SetSdpBitrateParameters( const BitrateConstraints& constraints) { absl::optional updated = bitrate_configurator_.UpdateWithSdpParameters(constraints); if (updated.has_value()) { - TargetRateConstraints msg = ConvertConstraints(*updated, clock_); - task_queue_.PostTask([this, msg]() { - RTC_DCHECK_RUN_ON(&task_queue_); - if (controller_) { - PostUpdates(controller_->OnTargetRateConstraints(msg)); - } else { - UpdateInitialConstraints(msg); - } - }); + UpdateBitrateConstraints(*updated); } else { RTC_LOG(LS_VERBOSE) << "WebRTC.RtpTransportControllerSend.SetSdpBitrateParameters: " @@ -398,15 +454,7 @@ void RtpTransportControllerSend::SetClientBitratePreferences( absl::optional updated = bitrate_configurator_.UpdateWithClientPreferences(preferences); if (updated.has_value()) { - TargetRateConstraints msg = ConvertConstraints(*updated, clock_); - task_queue_.PostTask([this, msg]() { - RTC_DCHECK_RUN_ON(&task_queue_); - if (controller_) { - PostUpdates(controller_->OnTargetRateConstraints(msg)); - } else { - UpdateInitialConstraints(msg); - } - }); + UpdateBitrateConstraints(*updated); } else { RTC_LOG(LS_VERBOSE) << "WebRTC.RtpTransportControllerSend.SetClientBitratePreferences: " @@ -414,6 +462,12 @@ void RtpTransportControllerSend::SetClientBitratePreferences( } } +absl::optional +RtpTransportControllerSend::ApplyOrLiftRelayCap(bool is_relayed) { + DataRate cap = is_relayed ? relay_bandwidth_cap_ : DataRate::PlusInfinity(); + return bitrate_configurator_.UpdateWithRelayCap(cap); +} + void RtpTransportControllerSend::OnTransportOverheadChanged( size_t transport_overhead_bytes_per_packet) { if (transport_overhead_bytes_per_packet >= kMaxOverheadBytes) { @@ -422,7 +476,7 @@ void RtpTransportControllerSend::OnTransportOverheadChanged( } pacer()->SetTransportOverhead( - DataSize::bytes(transport_overhead_bytes_per_packet)); + DataSize::Bytes(transport_overhead_bytes_per_packet)); // TODO(holmer): Call AudioRtpSenders when they have been moved to // RtpTransportControllerSend. @@ -441,10 +495,17 @@ void RtpTransportControllerSend::IncludeOverheadInPacedSender() { pacer()->SetIncludeOverhead(); } +void RtpTransportControllerSend::EnsureStarted() { + if (!use_task_queue_pacer_ && !process_thread_started_) { + process_thread_started_ = true; + process_thread_->Start(); + } +} + void RtpTransportControllerSend::OnReceivedEstimatedBitrate(uint32_t bitrate) { RemoteBitrateReport msg; - msg.receive_time = Timestamp::ms(clock_->TimeInMilliseconds()); - msg.bandwidth = DataRate::bps(bitrate); + msg.receive_time = Timestamp::Millis(clock_->TimeInMilliseconds()); + msg.bandwidth = DataRate::BitsPerSec(bitrate); task_queue_.PostTask([this, msg]() { RTC_DCHECK_RUN_ON(&task_queue_); if (controller_) @@ -464,8 +525,8 @@ void RtpTransportControllerSend::OnReceivedRtcpReceiverReport( task_queue_.PostTask([this, now_ms, rtt_ms]() { RTC_DCHECK_RUN_ON(&task_queue_); RoundTripTimeUpdate report; - report.receive_time = Timestamp::ms(now_ms); - report.round_trip_time = TimeDelta::ms(rtt_ms); + report.receive_time = Timestamp::Millis(now_ms); + report.round_trip_time = TimeDelta::Millis(rtt_ms); report.smoothed = false; if (controller_ && !report.round_trip_time.IsZero()) PostUpdates(controller_->OnRoundTripTimeUpdate(report)); @@ -476,7 +537,7 @@ void RtpTransportControllerSend::OnAddPacket( const RtpPacketSendInfo& packet_info) { feedback_demuxer_.AddPacket(packet_info); - Timestamp creation_time = Timestamp::ms(clock_->TimeInMilliseconds()); + Timestamp creation_time = Timestamp::Millis(clock_->TimeInMilliseconds()); task_queue_.PostTask([this, packet_info, creation_time]() { RTC_DCHECK_RUN_ON(&task_queue_); transport_feedback_adapter_.AddPacket( @@ -489,7 +550,7 @@ void RtpTransportControllerSend::OnAddPacket( void RtpTransportControllerSend::OnTransportFeedback( const rtcp::TransportFeedback& feedback) { feedback_demuxer_.OnTransportFeedback(feedback); - auto feedback_time = Timestamp::ms(clock_->TimeInMilliseconds()); + auto feedback_time = Timestamp::Millis(clock_->TimeInMilliseconds()); task_queue_.PostTask([this, feedback, feedback_time]() { RTC_DCHECK_RUN_ON(&task_queue_); absl::optional feedback_msg = @@ -509,7 +570,7 @@ void RtpTransportControllerSend::OnRemoteNetworkEstimate( event_log_->Log(std::make_unique( estimate.link_capacity_lower, estimate.link_capacity_upper)); } - estimate.update_time = Timestamp::ms(clock_->TimeInMilliseconds()); + estimate.update_time = Timestamp::Millis(clock_->TimeInMilliseconds()); task_queue_.PostTask([this, estimate] { RTC_DCHECK_RUN_ON(&task_queue_); if (controller_) @@ -526,7 +587,7 @@ void RtpTransportControllerSend::MaybeCreateControllers() { control_handler_ = std::make_unique(); initial_config_.constraints.at_time = - Timestamp::ms(clock_->TimeInMilliseconds()); + Timestamp::Millis(clock_->TimeInMilliseconds()); initial_config_.stream_based_config = streams_config_; // TODO(srte): Use fallback controller if no feedback is available. @@ -576,14 +637,14 @@ void RtpTransportControllerSend::StartProcessPeriodicTasks() { void RtpTransportControllerSend::UpdateControllerWithTimeInterval() { RTC_DCHECK(controller_); ProcessInterval msg; - msg.at_time = Timestamp::ms(clock_->TimeInMilliseconds()); + msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); if (add_pacing_to_cwin_) msg.pacer_queue = pacer()->QueueSizeData(); PostUpdates(controller_->OnProcessInterval(msg)); } void RtpTransportControllerSend::UpdateStreamsConfig() { - streams_config_.at_time = Timestamp::ms(clock_->TimeInMilliseconds()); + streams_config_.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); if (controller_) PostUpdates(controller_->OnStreamsConfig(streams_config_)); } @@ -637,7 +698,7 @@ void RtpTransportControllerSend::OnReceivedRtcpReceiverReportBlocks( if (packets_received_delta < 1) return; - Timestamp now = Timestamp::ms(now_ms); + Timestamp now = Timestamp::Millis(now_ms); TransportLossReport msg; msg.packets_lost_delta = total_packets_lost_delta; msg.packets_received_delta = packets_received_delta; diff --git a/call/rtp_transport_controller_send.h b/call/rtp_transport_controller_send.h index b07bea73d8..7025b03312 100644 --- a/call/rtp_transport_controller_send.h +++ b/call/rtp_transport_controller_send.h @@ -19,6 +19,7 @@ #include "api/network_state_predictor.h" #include "api/transport/network_control.h" +#include "api/units/data_rate.h" #include "call/rtp_bitrate_configurator.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender.h" @@ -71,7 +72,8 @@ class RtpTransportControllerSend final const RtpSenderObservers& observers, RtcEventLog* event_log, std::unique_ptr fec_controller, - const RtpSenderFrameEncryptionConfig& frame_encryption_config) override; + const RtpSenderFrameEncryptionConfig& frame_encryption_config, + rtc::scoped_refptr frame_transformer) override; void DestroyRtpVideoSender( RtpVideoSenderInterface* rtp_video_sender) override; @@ -104,10 +106,11 @@ class RtpTransportControllerSend final void SetClientBitratePreferences(const BitrateSettings& preferences) override; void OnTransportOverheadChanged( - size_t transport_overhead_per_packet) override; + size_t transport_overhead_bytes_per_packet) override; void AccountForAudioPacketsInPacedSender(bool account_for_audio) override; void IncludeOverheadInPacedSender() override; + void EnsureStarted() override; // Implements RtcpBandwidthObserver interface void OnReceivedEstimatedBitrate(uint32_t bitrate) override; @@ -130,6 +133,10 @@ class RtpTransportControllerSend final void StartProcessPeriodicTasks() RTC_RUN_ON(task_queue_); void UpdateControllerWithTimeInterval() RTC_RUN_ON(task_queue_); + absl::optional ApplyOrLiftRelayCap(bool is_relayed); + bool IsRelevantRouteChange(const rtc::NetworkRoute& old_route, + const rtc::NetworkRoute& new_route) const; + void UpdateBitrateConstraints(const BitrateConstraints& updated); void UpdateStreamsConfig() RTC_RUN_ON(task_queue_); void OnReceivedRtcpReceiverReportBlocks(const ReportBlockList& report_blocks, int64_t now_ms) @@ -145,6 +152,7 @@ class RtpTransportControllerSend final std::vector> video_rtp_senders_; RtpBitrateConfigurator bitrate_configurator_; std::map network_routes_; + bool process_thread_started_; const std::unique_ptr process_thread_; const bool use_task_queue_pacer_; std::unique_ptr process_thread_pacer_; @@ -179,6 +187,7 @@ class RtpTransportControllerSend final const bool reset_feedback_on_route_change_; const bool send_side_bwe_with_overhead_; const bool add_pacing_to_cwin_; + FieldTrialParameter relay_bandwidth_cap_; size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(task_queue_); bool network_available_ RTC_GUARDED_BY(task_queue_); diff --git a/call/rtp_transport_controller_send_interface.h b/call/rtp_transport_controller_send_interface.h index b40aabdc2c..602908e2a4 100644 --- a/call/rtp_transport_controller_send_interface.h +++ b/call/rtp_transport_controller_send_interface.h @@ -21,10 +21,12 @@ #include "absl/types/optional.h" #include "api/crypto/crypto_options.h" #include "api/fec_controller.h" +#include "api/frame_transformer_interface.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/bitrate_settings.h" #include "api/units/timestamp.h" #include "call/rtp_config.h" +#include "common_video/frame_counts.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" @@ -110,7 +112,8 @@ class RtpTransportControllerSendInterface { const RtpSenderObservers& observers, RtcEventLog* event_log, std::unique_ptr fec_controller, - const RtpSenderFrameEncryptionConfig& frame_encryption_config) = 0; + const RtpSenderFrameEncryptionConfig& frame_encryption_config, + rtc::scoped_refptr frame_transformer) = 0; virtual void DestroyRtpVideoSender( RtpVideoSenderInterface* rtp_video_sender) = 0; @@ -151,6 +154,8 @@ class RtpTransportControllerSendInterface { virtual void AccountForAudioPacketsInPacedSender(bool account_for_audio) = 0; virtual void IncludeOverheadInPacedSender() = 0; + + virtual void EnsureStarted() = 0; }; } // namespace webrtc diff --git a/call/rtp_video_sender.cc b/call/rtp_video_sender.cc index 413171fa67..9dad424c86 100644 --- a/call/rtp_video_sender.cc +++ b/call/rtp_video_sender.cc @@ -22,28 +22,27 @@ #include "api/video_codecs/video_codec.h" #include "call/rtp_transport_controller_send_interface.h" #include "modules/pacing/packet_router.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/playout_delay_oracle.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/task_queue.h" namespace webrtc { namespace webrtc_internal_rtp_video_sender { RtpStreamSender::RtpStreamSender( - std::unique_ptr playout_delay_oracle, - std::unique_ptr rtp_rtcp, - std::unique_ptr sender_video) - : playout_delay_oracle(std::move(playout_delay_oracle)), - rtp_rtcp(std::move(rtp_rtcp)), - sender_video(std::move(sender_video)) {} + std::unique_ptr rtp_rtcp, + std::unique_ptr sender_video, + std::unique_ptr fec_generator) + : rtp_rtcp(std::move(rtp_rtcp)), + sender_video(std::move(sender_video)), + fec_generator(std::move(fec_generator)) {} RtpStreamSender::~RtpStreamSender() = default; @@ -56,20 +55,22 @@ static const size_t kPathMTU = 1500; using webrtc_internal_rtp_video_sender::RtpStreamSender; -bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { +bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name, + const WebRtcKeyValueConfig& trials) { const VideoCodecType codecType = PayloadStringToCodecType(payload_name); if (codecType == kVideoCodecVP8 || codecType == kVideoCodecVP9) { return true; } if (codecType == kVideoCodecGeneric && - field_trial::IsEnabled("WebRTC-GenericPictureId")) { + absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), "Enabled")) { return true; } return false; } bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, - const RtpConfig& rtp_config) { + const RtpConfig& rtp_config, + const WebRtcKeyValueConfig& trials) { // Consistency of NACK and RED+ULPFEC parameters is checked in this function. const bool nack_enabled = rtp_config.nack.rtp_history_ms > 0; @@ -81,7 +82,8 @@ bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, bool should_disable_red_and_ulpfec = false; - if (webrtc::field_trial::IsEnabled("WebRTC-DisableUlpFecExperiment")) { + if (absl::StartsWith(trials.Lookup("WebRTC-DisableUlpFecExperiment"), + "Enabled")) { RTC_LOG(LS_INFO) << "Experiment to disable sending ULPFEC is enabled."; should_disable_red_and_ulpfec = true; } @@ -100,7 +102,7 @@ bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, // is a waste of bandwidth since FEC packets still have to be transmitted. // Note that this is not the case with FlexFEC. if (nack_enabled && IsUlpfecEnabled() && - !PayloadTypeSupportsSkippingFecPackets(rtp_config.payload_name)) { + !PayloadTypeSupportsSkippingFecPackets(rtp_config.payload_name, trials)) { RTC_LOG(LS_WARNING) << "Transmitting payload type without picture ID using " "NACK+ULPFEC is a waste of bandwidth since ULPFEC packets " @@ -118,6 +120,69 @@ bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, return should_disable_red_and_ulpfec; } +// TODO(brandtr): Update this function when we support multistream protection. +std::unique_ptr MaybeCreateFecGenerator( + Clock* clock, + const RtpConfig& rtp, + const std::map& suspended_ssrcs, + int simulcast_index, + const WebRtcKeyValueConfig& trials) { + // If flexfec is configured that takes priority. + if (rtp.flexfec.payload_type >= 0) { + RTC_DCHECK_GE(rtp.flexfec.payload_type, 0); + RTC_DCHECK_LE(rtp.flexfec.payload_type, 127); + if (rtp.flexfec.ssrc == 0) { + RTC_LOG(LS_WARNING) << "FlexFEC is enabled, but no FlexFEC SSRC given. " + "Therefore disabling FlexFEC."; + return nullptr; + } + if (rtp.flexfec.protected_media_ssrcs.empty()) { + RTC_LOG(LS_WARNING) + << "FlexFEC is enabled, but no protected media SSRC given. " + "Therefore disabling FlexFEC."; + return nullptr; + } + + if (rtp.flexfec.protected_media_ssrcs.size() > 1) { + RTC_LOG(LS_WARNING) + << "The supplied FlexfecConfig contained multiple protected " + "media streams, but our implementation currently only " + "supports protecting a single media stream. " + "To avoid confusion, disabling FlexFEC completely."; + return nullptr; + } + + if (absl::c_find(rtp.flexfec.protected_media_ssrcs, + rtp.ssrcs[simulcast_index]) == + rtp.flexfec.protected_media_ssrcs.end()) { + // Media SSRC not among flexfec protected SSRCs. + return nullptr; + } + + const RtpState* rtp_state = nullptr; + auto it = suspended_ssrcs.find(rtp.flexfec.ssrc); + if (it != suspended_ssrcs.end()) { + rtp_state = &it->second; + } + + RTC_DCHECK_EQ(1U, rtp.flexfec.protected_media_ssrcs.size()); + return std::make_unique( + rtp.flexfec.payload_type, rtp.flexfec.ssrc, + rtp.flexfec.protected_media_ssrcs[0], rtp.mid, rtp.extensions, + RTPSender::FecExtensionSizes(), rtp_state, clock); + } else if (rtp.ulpfec.red_payload_type >= 0 && + rtp.ulpfec.ulpfec_payload_type >= 0 && + !ShouldDisableRedAndUlpfec(/*flexfec_enabled=*/false, rtp, + trials)) { + // Flexfec not configured, but ulpfec is and is not disabled. + return std::make_unique( + rtp.ulpfec.red_payload_type, rtp.ulpfec.ulpfec_payload_type, clock); + } + + // Not a single FEC is given. + return nullptr; +} + std::vector CreateRtpStreamSenders( Clock* clock, const RtpConfig& rtp_config, @@ -126,15 +191,16 @@ std::vector CreateRtpStreamSenders( Transport* send_transport, RtcpBandwidthObserver* bandwidth_callback, RtpTransportControllerSendInterface* transport, - FlexfecSender* flexfec_sender, + const std::map& suspended_ssrcs, RtcEventLog* event_log, RateLimiter* retransmission_rate_limiter, - OverheadObserver* overhead_observer, FrameEncryptorInterface* frame_encryptor, - const CryptoOptions& crypto_options) { + const CryptoOptions& crypto_options, + rtc::scoped_refptr frame_transformer, + const WebRtcKeyValueConfig& trials) { RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0); - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.clock = clock; configuration.audio = false; configuration.receiver_only = false; @@ -150,127 +216,87 @@ std::vector CreateRtpStreamSenders( configuration.rtt_stats = observers.rtcp_rtt_stats; configuration.rtcp_packet_type_counter_observer = observers.rtcp_type_observer; + configuration.rtcp_statistics_callback = observers.rtcp_stats; + configuration.report_block_data_observer = + observers.report_block_data_observer; configuration.paced_sender = transport->packet_sender(); configuration.send_bitrate_observer = observers.bitrate_observer; configuration.send_side_delay_observer = observers.send_delay_observer; configuration.send_packet_observer = observers.send_packet_observer; configuration.event_log = event_log; configuration.retransmission_rate_limiter = retransmission_rate_limiter; - configuration.overhead_observer = overhead_observer; configuration.rtp_stats_callback = observers.rtp_stats; configuration.frame_encryptor = frame_encryptor; configuration.require_frame_encryption = crypto_options.sframe.require_frame_encryption; configuration.extmap_allow_mixed = rtp_config.extmap_allow_mixed; configuration.rtcp_report_interval_ms = rtcp_report_interval_ms; + configuration.field_trials = &trials; std::vector rtp_streams; - const std::vector& flexfec_protected_ssrcs = - rtp_config.flexfec.protected_media_ssrcs; + RTC_DCHECK(rtp_config.rtx.ssrcs.empty() || - rtp_config.rtx.ssrcs.size() == rtp_config.rtx.ssrcs.size()); + rtp_config.rtx.ssrcs.size() == rtp_config.ssrcs.size()); for (size_t i = 0; i < rtp_config.ssrcs.size(); ++i) { + RTPSenderVideo::Config video_config; configuration.local_media_ssrc = rtp_config.ssrcs[i]; - bool enable_flexfec = flexfec_sender != nullptr && - std::find(flexfec_protected_ssrcs.begin(), - flexfec_protected_ssrcs.end(), - configuration.local_media_ssrc) != - flexfec_protected_ssrcs.end(); - configuration.flexfec_sender = enable_flexfec ? flexfec_sender : nullptr; - auto playout_delay_oracle = std::make_unique(); - - configuration.ack_observer = playout_delay_oracle.get(); - if (rtp_config.rtx.ssrcs.size() > i) { - configuration.rtx_send_ssrc = rtp_config.rtx.ssrcs[i]; - } - auto rtp_rtcp = RtpRtcp::Create(configuration); + std::unique_ptr fec_generator = + MaybeCreateFecGenerator(clock, rtp_config, suspended_ssrcs, i, trials); + configuration.fec_generator = fec_generator.get(); + + configuration.rtx_send_ssrc = + rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]); + RTC_DCHECK_EQ(configuration.rtx_send_ssrc.has_value(), + !rtp_config.rtx.ssrcs.empty()); + + configuration.need_rtp_packet_infos = rtp_config.lntf.enabled; + + std::unique_ptr rtp_rtcp( + ModuleRtpRtcpImpl2::Create(configuration)); rtp_rtcp->SetSendingStatus(false); rtp_rtcp->SetSendingMediaStatus(false); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); // Set NACK. rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize); - FieldTrialBasedConfig field_trial_config; - RTPSenderVideo::Config video_config; video_config.clock = configuration.clock; video_config.rtp_sender = rtp_rtcp->RtpSender(); - video_config.flexfec_sender = configuration.flexfec_sender; - video_config.playout_delay_oracle = playout_delay_oracle.get(); video_config.frame_encryptor = frame_encryptor; video_config.require_frame_encryption = crypto_options.sframe.require_frame_encryption; - video_config.need_rtp_packet_infos = rtp_config.lntf.enabled; video_config.enable_retransmit_all_layers = false; - video_config.field_trials = &field_trial_config; + video_config.field_trials = &trials; + + const bool using_flexfec = + fec_generator && + fec_generator->GetFecType() == VideoFecGenerator::FecType::kFlexFec; const bool should_disable_red_and_ulpfec = - ShouldDisableRedAndUlpfec(enable_flexfec, rtp_config); - if (rtp_config.ulpfec.red_payload_type != -1 && - !should_disable_red_and_ulpfec) { + ShouldDisableRedAndUlpfec(using_flexfec, rtp_config, trials); + if (!should_disable_red_and_ulpfec && + rtp_config.ulpfec.red_payload_type != -1) { video_config.red_payload_type = rtp_config.ulpfec.red_payload_type; } - if (rtp_config.ulpfec.ulpfec_payload_type != -1 && - !should_disable_red_and_ulpfec) { - video_config.ulpfec_payload_type = rtp_config.ulpfec.ulpfec_payload_type; + if (fec_generator) { + video_config.fec_type = fec_generator->GetFecType(); + video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead(); } + video_config.frame_transformer = frame_transformer; + video_config.send_transport_queue = transport->GetWorkerQueue()->Get(); auto sender_video = std::make_unique(video_config); - rtp_streams.emplace_back(std::move(playout_delay_oracle), - std::move(rtp_rtcp), std::move(sender_video)); + rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video), + std::move(fec_generator)); } return rtp_streams; } -// TODO(brandtr): Update this function when we support multistream protection. -std::unique_ptr MaybeCreateFlexfecSender( - Clock* clock, - const RtpConfig& rtp, - const std::map& suspended_ssrcs) { - if (rtp.flexfec.payload_type < 0) { - return nullptr; - } - RTC_DCHECK_GE(rtp.flexfec.payload_type, 0); - RTC_DCHECK_LE(rtp.flexfec.payload_type, 127); - if (rtp.flexfec.ssrc == 0) { - RTC_LOG(LS_WARNING) << "FlexFEC is enabled, but no FlexFEC SSRC given. " - "Therefore disabling FlexFEC."; - return nullptr; - } - if (rtp.flexfec.protected_media_ssrcs.empty()) { - RTC_LOG(LS_WARNING) - << "FlexFEC is enabled, but no protected media SSRC given. " - "Therefore disabling FlexFEC."; - return nullptr; - } - - if (rtp.flexfec.protected_media_ssrcs.size() > 1) { - RTC_LOG(LS_WARNING) - << "The supplied FlexfecConfig contained multiple protected " - "media streams, but our implementation currently only " - "supports protecting a single media stream. " - "To avoid confusion, disabling FlexFEC completely."; - return nullptr; - } - - const RtpState* rtp_state = nullptr; - auto it = suspended_ssrcs.find(rtp.flexfec.ssrc); - if (it != suspended_ssrcs.end()) { - rtp_state = &it->second; - } - - RTC_DCHECK_EQ(1U, rtp.flexfec.protected_media_ssrcs.size()); - return std::make_unique( - rtp.flexfec.payload_type, rtp.flexfec.ssrc, - rtp.flexfec.protected_media_ssrcs[0], rtp.mid, rtp.extensions, - RTPSender::FecExtensionSizes(), rtp_state, clock); -} - DataRate CalculateOverheadRate(DataRate data_rate, DataSize packet_size, DataSize overhead_per_packet) { Frequency packet_rate = data_rate / packet_size; // TOSO(srte): We should not need to round to nearest whole packet per second // rate here. - return packet_rate.RoundUpTo(Frequency::hertz(1)) * overhead_per_packet; + return packet_rate.RoundUpTo(Frequency::Hertz(1)) * overhead_per_packet; } absl::optional GetVideoCodecType(const RtpConfig& config) { @@ -279,8 +305,8 @@ absl::optional GetVideoCodecType(const RtpConfig& config) { } return PayloadStringToCodecType(config.payload_name); } -bool TransportSeqNumExtensionConfigured(const RtpConfig& config_config) { - return absl::c_any_of(config_config.extensions, [](const RtpExtension& ext) { +bool TransportSeqNumExtensionConfigured(const RtpConfig& config) { + return absl::c_any_of(config.extensions, [](const RtpExtension& ext) { return ext.uri == RtpExtension::kTransportSequenceNumberUri; }); } @@ -299,19 +325,15 @@ RtpVideoSender::RtpVideoSender( RateLimiter* retransmission_limiter, std::unique_ptr fec_controller, FrameEncryptorInterface* frame_encryptor, - const CryptoOptions& crypto_options) - : send_side_bwe_with_overhead_( - webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")), - account_for_packetization_overhead_(!webrtc::field_trial::IsDisabled( - "WebRTC-SubtractPacketizationOverhead")), - use_early_loss_detection_( - !webrtc::field_trial::IsDisabled("WebRTC-UseEarlyLossDetection")), + const CryptoOptions& crypto_options, + rtc::scoped_refptr frame_transformer) + : send_side_bwe_with_overhead_(!absl::StartsWith( + field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"), + "Disabled")), has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), active_(false), module_process_thread_(nullptr), suspended_ssrcs_(std::move(suspended_ssrcs)), - flexfec_sender_( - MaybeCreateFlexfecSender(clock, rtp_config, suspended_ssrcs_)), fec_controller_(std::move(fec_controller)), fec_allowed_(true), rtp_streams_(CreateRtpStreamSenders(clock, @@ -321,17 +343,17 @@ RtpVideoSender::RtpVideoSender( send_transport, transport->GetBandwidthObserver(), transport, - flexfec_sender_.get(), + suspended_ssrcs_, event_log, retransmission_limiter, - this, frame_encryptor, - crypto_options)), + crypto_options, + std::move(frame_transformer), + field_trials_)), rtp_config_(rtp_config), codec_type_(GetVideoCodecType(rtp_config)), transport_(transport), transport_overhead_bytes_per_packet_(0), - overhead_bytes_per_packet_(0), encoder_target_rate_bps_(0), frame_counts_(rtp_config.ssrcs.size()), frame_count_observer_(observers.frame_count_observer) { @@ -348,7 +370,7 @@ RtpVideoSender::RtpVideoSender( state = &it->second; shared_frame_id_ = std::max(shared_frame_id_, state->shared_frame_id); } - params_.push_back(RtpPayloadParams(ssrc, state)); + params_.push_back(RtpPayloadParams(ssrc, state, field_trials_)); } // RTP/RTCP initialization. @@ -383,19 +405,20 @@ RtpVideoSender::RtpVideoSender( } } + bool fec_enabled = false; for (const RtpStreamSender& stream : rtp_streams_) { // Simulcast has one module for each layer. Set the CNAME on all modules. stream.rtp_rtcp->SetCNAME(rtp_config_.c_name.c_str()); - stream.rtp_rtcp->RegisterRtcpStatisticsCallback(observers.rtcp_stats); - stream.rtp_rtcp->SetReportBlockDataObserver( - observers.report_block_data_observer); stream.rtp_rtcp->SetMaxRtpPacketSize(rtp_config_.max_packet_size); stream.rtp_rtcp->RegisterSendPayloadFrequency(rtp_config_.payload_type, kVideoPayloadTypeFrequency); + if (stream.fec_generator != nullptr) { + fec_enabled = true; + } } // Currently, both ULPFEC and FlexFEC use the same FEC rate calculation logic, // so enable that logic if either of those FEC schemes are enabled. - fec_controller_->SetProtectionMethod(FecEnabled(), NackEnabled()); + fec_controller_->SetProtectionMethod(fec_enabled, NackEnabled()); fec_controller_->SetProtectionCallback(this); // Signal congestion controller this object is ready for OnPacket* callbacks. @@ -430,15 +453,20 @@ void RtpVideoSender::DeRegisterProcessThread() { } void RtpVideoSender::SetActive(bool active) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (active_ == active) return; const std::vector active_modules(rtp_streams_.size(), active); - SetActiveModules(active_modules); + SetActiveModulesLocked(active_modules); } void RtpVideoSender::SetActiveModules(const std::vector active_modules) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + return SetActiveModulesLocked(active_modules); +} + +void RtpVideoSender::SetActiveModulesLocked( + const std::vector active_modules) { RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; for (size_t i = 0; i < active_modules.size(); ++i) { @@ -453,17 +481,20 @@ void RtpVideoSender::SetActiveModules(const std::vector active_modules) { } bool RtpVideoSender::IsActive() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + return IsActiveLocked(); +} + +bool RtpVideoSender::IsActiveLocked() { return active_ && !rtp_streams_.empty(); } EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codec_specific_info) { fec_controller_->UpdateWithEncodedData(encoded_image.size(), encoded_image._frameType); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RTC_DCHECK(!rtp_streams_.empty()); if (!active_) return Result(Result::ERROR_SEND_FAILED); @@ -501,9 +532,18 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( rtp_streams_[stream_index].rtp_rtcp->ExpectedRetransmissionTimeMs(); } - bool send_result = rtp_streams_[stream_index].sender_video->SendVideo( - rtp_config_.payload_type, codec_type_, rtp_timestamp, - encoded_image.capture_time_ms_, encoded_image, fragmentation, + if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { + // If encoder adapter produce FrameDependencyStructure, pass it so that + // dependency descriptor rtp header extension can be used. + // If not supported, disable using dependency descriptor by passing nullptr. + rtp_streams_[stream_index].sender_video->SetVideoStructure( + (codec_specific_info && codec_specific_info->template_structure) + ? &*codec_specific_info->template_structure + : nullptr); + } + + bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage( + rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image, params_[stream_index].GetRtpVideoHeader( encoded_image, codec_specific_info, shared_frame_id_), expected_retransmission_time_ms); @@ -527,8 +567,8 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( void RtpVideoSender::OnBitrateAllocationUpdated( const VideoBitrateAllocation& bitrate) { - rtc::CritScope lock(&crit_); - if (IsActive()) { + MutexLock lock(&mutex_); + if (IsActiveLocked()) { if (rtp_streams_.size() == 1) { // If spatial scalability is enabled, it is covered by a single stream. rtp_streams_[0].rtp_rtcp->SetVideoBitrateAllocation(bitrate); @@ -552,13 +592,17 @@ void RtpVideoSender::OnBitrateAllocationUpdated( } } } - -bool RtpVideoSender::FecEnabled() const { - const bool flexfec_enabled = (flexfec_sender_ != nullptr); - const bool ulpfec_enabled = - !webrtc::field_trial::IsEnabled("WebRTC-DisableUlpFecExperiment") && - (rtp_config_.ulpfec.ulpfec_payload_type >= 0); - return flexfec_enabled || ulpfec_enabled; +void RtpVideoSender::OnVideoLayersAllocationUpdated( + const VideoLayersAllocation& allocation) { + MutexLock lock(&mutex_); + if (IsActiveLocked()) { + for (size_t i = 0; i < rtp_streams_.size(); ++i) { + VideoLayersAllocation stream_allocation = allocation; + stream_allocation.rtp_stream_index = i; + rtp_streams_[i].sender_video->SetVideoLayersAllocation( + std::move(stream_allocation)); + } + } } bool RtpVideoSender::NackEnabled() const { @@ -588,7 +632,7 @@ void RtpVideoSender::ConfigureSsrcs() { RTC_CHECK(ssrc_to_rtp_module_.empty()); for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) { uint32_t ssrc = rtp_config_.ssrcs[i]; - RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); + RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); // Restore RTP state if previous existed. auto it = suspended_ssrcs_.find(ssrc); @@ -605,7 +649,7 @@ void RtpVideoSender::ConfigureSsrcs() { RTC_DCHECK_EQ(rtp_config_.rtx.ssrcs.size(), rtp_config_.ssrcs.size()); for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) { uint32_t ssrc = rtp_config_.rtx.ssrcs[i]; - RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); + RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); auto it = suspended_ssrcs_.find(ssrc); if (it != suspended_ssrcs_.end()) rtp_rtcp->SetRtxState(it->second); @@ -655,6 +699,17 @@ std::map RtpVideoSender::GetRtpStates() const { uint32_t ssrc = rtp_config_.ssrcs[i]; RTC_DCHECK_EQ(ssrc, rtp_streams_[i].rtp_rtcp->SSRC()); rtp_states[ssrc] = rtp_streams_[i].rtp_rtcp->GetRtpState(); + + // Only happens during shutdown, when RTP module is already inactive, + // so OK to call fec generator here. + if (rtp_streams_[i].fec_generator) { + absl::optional fec_state = + rtp_streams_[i].fec_generator->GetRtpState(); + if (fec_state) { + uint32_t ssrc = rtp_config_.flexfec.ssrc; + rtp_states[ssrc] = *fec_state; + } + } } for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) { @@ -662,17 +717,12 @@ std::map RtpVideoSender::GetRtpStates() const { rtp_states[ssrc] = rtp_streams_[i].rtp_rtcp->GetRtxState(); } - if (flexfec_sender_) { - uint32_t ssrc = rtp_config_.flexfec.ssrc; - rtp_states[ssrc] = flexfec_sender_->GetRtpState(); - } - return rtp_states; } std::map RtpVideoSender::GetRtpPayloadStates() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); std::map payload_states; for (const auto& param : params_) { payload_states[param.ssrc()] = param.state(); @@ -683,7 +733,7 @@ std::map RtpVideoSender::GetRtpPayloadStates() void RtpVideoSender::OnTransportOverheadChanged( size_t transport_overhead_bytes_per_packet) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); transport_overhead_bytes_per_packet_ = transport_overhead_bytes_per_packet; size_t max_rtp_packet_size = @@ -694,18 +744,25 @@ void RtpVideoSender::OnTransportOverheadChanged( } } -void RtpVideoSender::OnOverheadChanged(size_t overhead_bytes_per_packet) { - rtc::CritScope lock(&crit_); - overhead_bytes_per_packet_ = overhead_bytes_per_packet; -} - void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) { // Substract overhead from bitrate. - rtc::CritScope lock(&crit_); - DataSize packet_overhead = DataSize::bytes( - overhead_bytes_per_packet_ + transport_overhead_bytes_per_packet_); - DataSize max_total_packet_size = DataSize::bytes( + MutexLock lock(&mutex_); + size_t num_active_streams = 0; + size_t overhead_bytes_per_packet = 0; + for (const auto& stream : rtp_streams_) { + if (stream.rtp_rtcp->SendingMedia()) { + overhead_bytes_per_packet += stream.rtp_rtcp->ExpectedPerPacketOverhead(); + ++num_active_streams; + } + } + if (num_active_streams > 1) { + overhead_bytes_per_packet /= num_active_streams; + } + + DataSize packet_overhead = DataSize::Bytes( + overhead_bytes_per_packet + transport_overhead_bytes_per_packet_); + DataSize max_total_packet_size = DataSize::Bytes( rtp_config_.max_packet_size + transport_overhead_bytes_per_packet_); uint32_t payload_bitrate_bps = update.target_bitrate.bps(); if (send_side_bwe_with_overhead_ && has_packet_feedback_) { @@ -730,16 +787,13 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, // since |fec_allowed_| may be toggled back on at any moment. } - uint32_t packetization_rate_bps = 0; - if (account_for_packetization_overhead_) { // Subtract packetization overhead from the encoder target. If target rate // is really low, cap the overhead at 50%. This also avoids the case where // |encoder_target_rate_bps_| is 0 due to encoder pause event while the // packetization rate is positive since packets are still flowing. - packetization_rate_bps = - std::min(GetPacketizationOverheadRate(), encoder_target_rate_bps_ / 2); - encoder_target_rate_bps_ -= packetization_rate_bps; - } + uint32_t packetization_rate_bps = + std::min(GetPacketizationOverheadRate(), encoder_target_rate_bps_ / 2); + encoder_target_rate_bps_ -= packetization_rate_bps; loss_mask_vector_.clear(); @@ -750,8 +804,8 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, // make sense to use different packet rates for different overhead // calculations. DataRate encoder_overhead_rate = CalculateOverheadRate( - DataRate::bps(encoder_target_rate_bps_), - max_total_packet_size - DataSize::bytes(overhead_bytes_per_packet_), + DataRate::BitsPerSec(encoder_target_rate_bps_), + max_total_packet_size - DataSize::Bytes(overhead_bytes_per_packet), packet_overhead); encoder_overhead_rate_bps = std::min( encoder_overhead_rate.bps(), @@ -762,7 +816,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, const uint32_t media_rate = encoder_target_rate_bps_ + encoder_overhead_rate_bps + packetization_rate_bps; - RTC_DCHECK_GE(update.target_bitrate, DataRate::bps(media_rate)); + RTC_DCHECK_GE(update.target_bitrate, DataRate::BitsPerSec(media_rate)); protection_bitrate_bps_ = update.target_bitrate.bps() - media_rate; } @@ -779,7 +833,7 @@ std::vector RtpVideoSender::GetSentRtpPacketInfos( rtc::ArrayView sequence_numbers) const { for (const auto& rtp_stream : rtp_streams_) { if (ssrc == rtp_stream.rtp_rtcp->SSRC()) { - return rtp_stream.sender_video->GetSentRtpPacketInfos(sequence_numbers); + return rtp_stream.rtp_rtcp->GetSentRtpPacketInfos(sequence_numbers); } } return std::vector(); @@ -794,27 +848,27 @@ int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params, *sent_nack_rate_bps = 0; *sent_fec_rate_bps = 0; for (const RtpStreamSender& stream : rtp_streams_) { - uint32_t not_used = 0; - uint32_t module_nack_rate = 0; - stream.sender_video->SetFecParameters(*delta_params, *key_params); - *sent_video_rate_bps += stream.sender_video->VideoBitrateSent(); - *sent_fec_rate_bps += stream.sender_video->FecOverheadRate(); - stream.rtp_rtcp->BitrateSent(¬_used, /*video_rate=*/nullptr, - /*fec_rate=*/nullptr, &module_nack_rate); - *sent_nack_rate_bps += module_nack_rate; + stream.rtp_rtcp->SetFecProtectionParams(*delta_params, *key_params); + + auto send_bitrate = stream.rtp_rtcp->GetSendRates(); + *sent_video_rate_bps += send_bitrate[RtpPacketMediaType::kVideo].bps(); + *sent_fec_rate_bps += + send_bitrate[RtpPacketMediaType::kForwardErrorCorrection].bps(); + *sent_nack_rate_bps += + send_bitrate[RtpPacketMediaType::kRetransmission].bps(); } return 0; } void RtpVideoSender::SetFecAllowed(bool fec_allowed) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); fec_allowed_ = fec_allowed; } void RtpVideoSender::OnPacketFeedbackVector( std::vector packet_feedback_vector) { if (fec_controller_->UseLossVectorMask()) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); for (const StreamPacketInfo& packet : packet_feedback_vector) { loss_mask_vector_.push_back(!packet.received); } @@ -828,7 +882,6 @@ void RtpVideoSender::OnPacketFeedbackVector( } } - if (use_early_loss_detection_) { // Map from SSRC to vector of RTP sequence numbers that are indicated as // lost by feedback, without being trailed by any received packets. std::map> early_loss_detected_per_ssrc; @@ -854,7 +907,6 @@ void RtpVideoSender::OnPacketFeedbackVector( rtp_sender->ReSendPacket(sequence_number); } } - } for (const auto& kv : acked_packets_per_ssrc) { const uint32_t ssrc = kv.first; diff --git a/call/rtp_video_sender.h b/call/rtp_video_sender.h index eb7e4315be..49fd3cc0d2 100644 --- a/call/rtp_video_sender.h +++ b/call/rtp_video_sender.h @@ -22,46 +22,46 @@ #include "api/fec_controller.h" #include "api/fec_controller_override.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/field_trial_based_config.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" #include "call/rtp_payload_params.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" #include "modules/rtp_rtcp/include/flexfec_sender.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" namespace webrtc { class FrameEncryptorInterface; -class RTPFragmentationHeader; -class RtpRtcp; class RtpTransportControllerSendInterface; namespace webrtc_internal_rtp_video_sender { // RTP state for a single simulcast stream. Internal to the implementation of // RtpVideoSender. struct RtpStreamSender { - RtpStreamSender(std::unique_ptr playout_delay_oracle, - std::unique_ptr rtp_rtcp, - std::unique_ptr sender_video); + RtpStreamSender(std::unique_ptr rtp_rtcp, + std::unique_ptr sender_video, + std::unique_ptr fec_generator); ~RtpStreamSender(); RtpStreamSender(RtpStreamSender&&) = default; RtpStreamSender& operator=(RtpStreamSender&&) = default; // Note: Needs pointer stability. - std::unique_ptr playout_delay_oracle; - std::unique_ptr rtp_rtcp; + std::unique_ptr rtp_rtcp; std::unique_ptr sender_video; + std::unique_ptr fec_generator; }; } // namespace webrtc_internal_rtp_video_sender @@ -69,7 +69,6 @@ struct RtpStreamSender { // RtpVideoSender routes outgoing data to the correct sending RTP module, based // on the simulcast layer in RTPVideoHeader. class RtpVideoSender : public RtpVideoSenderInterface, - public OverheadObserver, public VCMProtectionCallback, public StreamFeedbackObserver { public: @@ -87,7 +86,8 @@ class RtpVideoSender : public RtpVideoSenderInterface, RateLimiter* retransmission_limiter, // move inside RtpTransport std::unique_ptr fec_controller, FrameEncryptorInterface* frame_encryptor, - const CryptoOptions& crypto_options); // move inside RtpTransport + const CryptoOptions& crypto_options, // move inside RtpTransport + rtc::scoped_refptr frame_transformer); ~RtpVideoSender() override; // RegisterProcessThread register |module_process_thread| with those objects @@ -95,89 +95,96 @@ class RtpVideoSender : public RtpVideoSenderInterface, // |module_process_thread| was created (libjingle's worker thread). // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue, // maybe |worker_queue|. - void RegisterProcessThread(ProcessThread* module_process_thread) override; - void DeRegisterProcessThread() override; + void RegisterProcessThread(ProcessThread* module_process_thread) + RTC_LOCKS_EXCLUDED(mutex_) override; + void DeRegisterProcessThread() RTC_LOCKS_EXCLUDED(mutex_) override; // RtpVideoSender will only route packets if being active, all packets will be // dropped otherwise. - void SetActive(bool active) override; + void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override; // Sets the sending status of the rtp modules and appropriately sets the // payload router to active if any rtp modules are active. - void SetActiveModules(const std::vector active_modules) override; - bool IsActive() override; + void SetActiveModules(const std::vector active_modules) + RTC_LOCKS_EXCLUDED(mutex_) override; + bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; - void OnNetworkAvailability(bool network_available) override; - std::map GetRtpStates() const override; - std::map GetRtpPayloadStates() const override; + void OnNetworkAvailability(bool network_available) + RTC_LOCKS_EXCLUDED(mutex_) override; + std::map GetRtpStates() const + RTC_LOCKS_EXCLUDED(mutex_) override; + std::map GetRtpPayloadStates() const + RTC_LOCKS_EXCLUDED(mutex_) override; - void DeliverRtcp(const uint8_t* packet, size_t length) override; + void DeliverRtcp(const uint8_t* packet, size_t length) + RTC_LOCKS_EXCLUDED(mutex_) override; // Implements webrtc::VCMProtectionCallback. int ProtectionRequest(const FecProtectionParams* delta_params, const FecProtectionParams* key_params, uint32_t* sent_video_rate_bps, uint32_t* sent_nack_rate_bps, - uint32_t* sent_fec_rate_bps) override; + uint32_t* sent_fec_rate_bps) + RTC_LOCKS_EXCLUDED(mutex_) override; // Implements FecControllerOverride. - void SetFecAllowed(bool fec_allowed) override; + void SetFecAllowed(bool fec_allowed) RTC_LOCKS_EXCLUDED(mutex_) override; // Implements EncodedImageCallback. // Returns 0 if the packet was routed / sent, -1 otherwise. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; - - void OnBitrateAllocationUpdated( - const VideoBitrateAllocation& bitrate) override; - - void OnTransportOverheadChanged( - size_t transport_overhead_bytes_per_packet) override; - // Implements OverheadObserver. - void OnOverheadChanged(size_t overhead_bytes_per_packet) override; - void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) override; - uint32_t GetPayloadBitrateBps() const override; - uint32_t GetProtectionBitrateBps() const override; - void SetEncodingData(size_t width, - size_t height, - size_t num_temporal_layers) override; + const CodecSpecificInfo* codec_specific_info) + RTC_LOCKS_EXCLUDED(mutex_) override; + + void OnBitrateAllocationUpdated(const VideoBitrateAllocation& bitrate) + RTC_LOCKS_EXCLUDED(mutex_) override; + void OnVideoLayersAllocationUpdated( + const VideoLayersAllocation& layers) override; + void OnTransportOverheadChanged(size_t transport_overhead_bytes_per_packet) + RTC_LOCKS_EXCLUDED(mutex_) override; + void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) + RTC_LOCKS_EXCLUDED(mutex_) override; + uint32_t GetPayloadBitrateBps() const RTC_LOCKS_EXCLUDED(mutex_) override; + uint32_t GetProtectionBitrateBps() const RTC_LOCKS_EXCLUDED(mutex_) override; + void SetEncodingData(size_t width, size_t height, size_t num_temporal_layers) + RTC_LOCKS_EXCLUDED(mutex_) override; std::vector GetSentRtpPacketInfos( uint32_t ssrc, - rtc::ArrayView sequence_numbers) const override; + rtc::ArrayView sequence_numbers) const + RTC_LOCKS_EXCLUDED(mutex_) override; // From StreamFeedbackObserver. void OnPacketFeedbackVector( - std::vector packet_feedback_vector) override; + std::vector packet_feedback_vector) + RTC_LOCKS_EXCLUDED(mutex_) override; private: - void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void SetActiveModulesLocked(const std::vector active_modules) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); void ConfigureSsrcs(); void ConfigureRids(); - bool FecEnabled() const; bool NackEnabled() const; uint32_t GetPacketizationOverheadRate() const; + const FieldTrialBasedConfig field_trials_; const bool send_side_bwe_with_overhead_; - const bool account_for_packetization_overhead_; - const bool use_early_loss_detection_; const bool has_packet_feedback_; - // TODO(holmer): Remove crit_ once RtpVideoSender runs on the + // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the // transport task queue. - rtc::CriticalSection crit_; - bool active_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + bool active_ RTC_GUARDED_BY(mutex_); ProcessThread* module_process_thread_; rtc::ThreadChecker module_process_thread_checker_; std::map suspended_ssrcs_; - std::unique_ptr flexfec_sender_; - const std::unique_ptr fec_controller_; - bool fec_allowed_ RTC_GUARDED_BY(crit_); + bool fec_allowed_ RTC_GUARDED_BY(mutex_); // Rtp modules are assumed to be sorted in simulcast index order. const std::vector @@ -191,22 +198,21 @@ class RtpVideoSender : public RtpVideoSenderInterface, // rewrite the frame id), therefore |shared_frame_id| has to live in a place // where we are aware of all the different streams. int64_t shared_frame_id_ = 0; - std::vector params_ RTC_GUARDED_BY(crit_); + std::vector params_ RTC_GUARDED_BY(mutex_); - size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(crit_); - size_t overhead_bytes_per_packet_ RTC_GUARDED_BY(crit_); + size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(mutex_); uint32_t protection_bitrate_bps_; uint32_t encoder_target_rate_bps_; - std::vector loss_mask_vector_ RTC_GUARDED_BY(crit_); + std::vector loss_mask_vector_ RTC_GUARDED_BY(mutex_); - std::vector frame_counts_ RTC_GUARDED_BY(crit_); + std::vector frame_counts_ RTC_GUARDED_BY(mutex_); FrameCountObserver* const frame_count_observer_; // Effectively const map from SSRC to RtpRtcp, for all media SSRCs. // This map is set at construction time and never changed, but it's // non-trivial to make it properly const. - std::map ssrc_to_rtp_module_; + std::map ssrc_to_rtp_module_; RTC_DISALLOW_COPY_AND_ASSIGN(RtpVideoSender); }; diff --git a/call/rtp_video_sender_interface.h b/call/rtp_video_sender_interface.h index bb72eb5996..632c9e835a 100644 --- a/call/rtp_video_sender_interface.h +++ b/call/rtp_video_sender_interface.h @@ -18,6 +18,7 @@ #include "api/array_view.h" #include "api/call/bitrate_allocation.h" #include "api/fec_controller_override.h" +#include "api/video/video_layers_allocation.h" #include "call/rtp_config.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" @@ -50,6 +51,8 @@ class RtpVideoSenderInterface : public EncodedImageCallback, virtual void OnBitrateAllocationUpdated( const VideoBitrateAllocation& bitrate) = 0; + virtual void OnVideoLayersAllocationUpdated( + const VideoLayersAllocation& allocation) = 0; virtual void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) = 0; virtual void OnTransportOverheadChanged( diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc index 7935fac389..af0b5032f3 100644 --- a/call/rtp_video_sender_unittest.cc +++ b/call/rtp_video_sender_unittest.cc @@ -18,14 +18,15 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/fec_controller_default.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/event.h" #include "rtc_base/rate_limiter.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/mock_frame_transformer.h" #include "test/mock_transport.h" #include "test/scenario/scenario.h" #include "test/time_controller/simulated_time_controller.h" @@ -34,10 +35,9 @@ #include "video/send_statistics_proxy.h" using ::testing::_; -using ::testing::Invoke; using ::testing::NiceMock; using ::testing::SaveArg; -using ::testing::Unused; +using ::testing::SizeIs; namespace webrtc { namespace { @@ -52,10 +52,11 @@ const int16_t kInitialTl0PicIdx1 = 99; const int16_t kInitialTl0PicIdx2 = 199; const int64_t kRetransmitWindowSizeMs = 500; const int kTransportsSequenceExtensionId = 7; +const int kDependencyDescriptorExtensionId = 8; class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver { public: - MOCK_METHOD1(OnReceivedIntraFrameRequest, void(uint32_t)); + MOCK_METHOD(void, OnReceivedIntraFrameRequest, (uint32_t), (override)); }; RtpSenderObservers CreateObservers( @@ -105,6 +106,8 @@ VideoSendStream::Config CreateVideoSendStreamConfig( config.rtp.nack.rtp_history_ms = 1000; config.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri, kTransportsSequenceExtensionId); + config.rtp.extensions.emplace_back(RtpDependencyDescriptorExtension::kUri, + kDependencyDescriptorExtensionId); return config; } @@ -115,29 +118,32 @@ class RtpVideoSenderTestFixture { const std::vector& rtx_ssrcs, int payload_type, const std::map& suspended_payload_states, - FrameCountObserver* frame_count_observer) - : time_controller_(Timestamp::ms(1000000)), + FrameCountObserver* frame_count_observer, + rtc::scoped_refptr frame_transformer) + : time_controller_(Timestamp::Millis(1000000)), config_(CreateVideoSendStreamConfig(&transport_, ssrcs, rtx_ssrcs, payload_type)), send_delay_stats_(time_controller_.GetClock()), bitrate_config_(GetBitrateConfig()), - transport_controller_(time_controller_.GetClock(), - &event_log_, - nullptr, - nullptr, - bitrate_config_, - ProcessThread::Create("PacerThread"), - time_controller_.GetTaskQueueFactory(), - &field_trials_), - process_thread_(ProcessThread::Create("test_thread")), + transport_controller_( + time_controller_.GetClock(), + &event_log_, + nullptr, + nullptr, + bitrate_config_, + time_controller_.CreateProcessThread("PacerThread"), + time_controller_.GetTaskQueueFactory(), + &field_trials_), + process_thread_(time_controller_.CreateProcessThread("test_thread")), call_stats_(time_controller_.GetClock(), process_thread_.get()), stats_proxy_(time_controller_.GetClock(), config_, VideoEncoderConfig::ContentType::kRealtimeVideo), retransmission_rate_limiter_(time_controller_.GetClock(), kRetransmitWindowSizeMs) { + transport_controller_.EnsureStarted(); std::map suspended_ssrcs; router_ = std::make_unique( time_controller_.GetClock(), suspended_ssrcs, suspended_payload_states, @@ -148,8 +154,22 @@ class RtpVideoSenderTestFixture { &send_delay_stats_), &transport_controller_, &event_log_, &retransmission_rate_limiter_, std::make_unique(time_controller_.GetClock()), - nullptr, CryptoOptions{}); + nullptr, CryptoOptions{}, frame_transformer); } + + RtpVideoSenderTestFixture( + const std::vector& ssrcs, + const std::vector& rtx_ssrcs, + int payload_type, + const std::map& suspended_payload_states, + FrameCountObserver* frame_count_observer) + : RtpVideoSenderTestFixture(ssrcs, + rtx_ssrcs, + payload_type, + suspended_payload_states, + frame_count_observer, + /*frame_transformer=*/nullptr) {} + RtpVideoSenderTestFixture( const std::vector& ssrcs, const std::vector& rtx_ssrcs, @@ -159,7 +179,8 @@ class RtpVideoSenderTestFixture { rtx_ssrcs, payload_type, suspended_payload_states, - /*frame_count_observer=*/nullptr) {} + /*frame_count_observer=*/nullptr, + /*frame_transformer=*/nullptr) {} RtpVideoSender* router() { return router_.get(); } MockTransport& transport() { return transport_; } @@ -176,6 +197,7 @@ class RtpVideoSenderTestFixture { const FieldTrialBasedConfig field_trials_; RtpTransportControllerSend transport_controller_; std::unique_ptr process_thread_; + // TODO(tommi): Use internal::CallStats. CallStats call_stats_; SendStatisticsProxy stats_proxy_; RateLimiter retransmission_rate_limiter_; @@ -192,24 +214,20 @@ TEST(RtpVideoSenderTest, SendOnOneModule) { encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {}); - EXPECT_NE( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_NE(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); test.router()->SetActive(true); - EXPECT_EQ( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_EQ(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); test.router()->SetActive(false); - EXPECT_NE( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_NE(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); test.router()->SetActive(true); - EXPECT_EQ( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_EQ(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); } TEST(RtpVideoSenderTest, SendSimulcastSetActive) { @@ -228,27 +246,19 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { test.router()->SetActive(true); EXPECT_EQ(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_1, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EncodedImage encoded_image_2(encoded_image_1); encoded_image_2.SetSpatialIndex(1); EXPECT_EQ(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_2, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); // Inactive. test.router()->SetActive(false); EXPECT_NE(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_1, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EXPECT_NE(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_2, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); } // Tests how setting individual rtp modules to active affects the overall @@ -276,9 +286,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) { std::vector active_modules({true, false}); test.router()->SetActiveModules(active_modules); EXPECT_EQ(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_1, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); // Setting both streams to inactive will turn the payload router to // inactive. @@ -287,13 +295,9 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) { // An incoming encoded image will not ask the module to send outgoing data // because the payload router is inactive. EXPECT_NE(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_1, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EXPECT_NE(EncodedImageCallback::Result::OK, - test.router() - ->OnEncodedImage(encoded_image_1, &codec_info, nullptr) - .error); + test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); } TEST(RtpVideoSenderTest, CreateWithNoPreviousStates) { @@ -340,8 +344,10 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) { TEST(RtpVideoSenderTest, FrameCountCallbacks) { class MockFrameCountObserver : public FrameCountObserver { public: - MOCK_METHOD2(FrameCountUpdated, - void(const FrameCounts& frame_counts, uint32_t ssrc)); + MOCK_METHOD(void, + FrameCountUpdated, + (const FrameCounts& frame_counts, uint32_t ssrc), + (override)); } callback; RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {}, @@ -358,9 +364,8 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { // No callbacks when not active. EXPECT_CALL(callback, FrameCountUpdated).Times(0); - EXPECT_NE( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_NE(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); ::testing::Mock::VerifyAndClearExpectations(&callback); test.router()->SetActive(true); @@ -368,9 +373,8 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { FrameCounts frame_counts; EXPECT_CALL(callback, FrameCountUpdated(_, kSsrc1)) .WillOnce(SaveArg<0>(&frame_counts)); - EXPECT_EQ( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_EQ(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); EXPECT_EQ(1, frame_counts.key_frames); EXPECT_EQ(0, frame_counts.delta_frames); @@ -380,9 +384,8 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { encoded_image._frameType = VideoFrameType::kVideoFrameDelta; EXPECT_CALL(callback, FrameCountUpdated(_, kSsrc1)) .WillOnce(SaveArg<0>(&frame_counts)); - EXPECT_EQ( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + EXPECT_EQ(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); EXPECT_EQ(1, frame_counts.key_frames); EXPECT_EQ(1, frame_counts.delta_frames); @@ -392,8 +395,6 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { // that the packet is removed from RtpPacketHistory and won't be retransmitted // again. TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { - const int64_t kTimeoutMs = 500; - RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); test.router()->SetActive(true); @@ -406,36 +407,27 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); // Send two tiny images, mapping to two RTP packets. Capture sequence numbers. - rtc::Event event; std::vector rtp_sequence_numbers; std::vector transport_sequence_numbers; EXPECT_CALL(test.transport(), SendRtp) .Times(2) - .WillRepeatedly( - [&event, &rtp_sequence_numbers, &transport_sequence_numbers]( - const uint8_t* packet, size_t length, - const PacketOptions& options) { - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet, length)); - rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); - transport_sequence_numbers.push_back(options.packet_id); - if (transport_sequence_numbers.size() == 2) { - event.Set(); - } - return true; - }); - EXPECT_EQ( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); + .WillRepeatedly([&rtp_sequence_numbers, &transport_sequence_numbers]( + const uint8_t* packet, size_t length, + const PacketOptions& options) { + RtpPacket rtp_packet; + EXPECT_TRUE(rtp_packet.Parse(packet, length)); + rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); + transport_sequence_numbers.push_back(options.packet_id); + return true; + }); + EXPECT_EQ(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); encoded_image.SetTimestamp(2); encoded_image.capture_time_ms_ = 3; - EXPECT_EQ( - EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image, nullptr, nullptr).error); - - test.AdvanceTime(TimeDelta::ms(33)); + EXPECT_EQ(EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, nullptr).error); - ASSERT_TRUE(event.Wait(kTimeoutMs)); + test.AdvanceTime(TimeDelta::Millis(33)); // Construct a NACK message for requesting retransmission of both packet. rtcp::Nack nack; @@ -446,7 +438,7 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { std::vector retransmitted_rtp_sequence_numbers; EXPECT_CALL(test.transport(), SendRtp) .Times(2) - .WillRepeatedly([&event, &retransmitted_rtp_sequence_numbers]( + .WillRepeatedly([&retransmitted_rtp_sequence_numbers]( const uint8_t* packet, size_t length, const PacketOptions& options) { RtpPacket rtp_packet; @@ -456,14 +448,10 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { rtc::ArrayView payload = rtp_packet.payload(); retransmitted_rtp_sequence_numbers.push_back( ByteReader::ReadBigEndian(payload.data())); - if (retransmitted_rtp_sequence_numbers.size() == 2) { - event.Set(); - } return true; }); test.router()->DeliverRtcp(nack_buffer.data(), nack_buffer.size()); - test.AdvanceTime(TimeDelta::ms(33)); - ASSERT_TRUE(event.Wait(kTimeoutMs)); + test.AdvanceTime(TimeDelta::Millis(33)); // Verify that both packets were retransmitted. EXPECT_EQ(retransmitted_rtp_sequence_numbers, rtp_sequence_numbers); @@ -488,11 +476,10 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { // the history has been notified of the ack and removed the packet. The // second packet, included in the feedback but not marked as received, should // still be retransmitted. - test.AdvanceTime(TimeDelta::ms(33)); + test.AdvanceTime(TimeDelta::Millis(33)); EXPECT_CALL(test.transport(), SendRtp) - .WillOnce([&event, &lost_packet_feedback](const uint8_t* packet, - size_t length, - const PacketOptions& options) { + .WillOnce([&lost_packet_feedback](const uint8_t* packet, size_t length, + const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); EXPECT_EQ(rtp_packet.Ssrc(), kRtxSsrc1); @@ -500,12 +487,10 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { rtc::ArrayView payload = rtp_packet.payload(); EXPECT_EQ(lost_packet_feedback.rtp_sequence_number, ByteReader::ReadBigEndian(payload.data())); - event.Set(); return true; }); test.router()->DeliverRtcp(nack_buffer.data(), nack_buffer.size()); - test.AdvanceTime(TimeDelta::ms(33)); - ASSERT_TRUE(event.Wait(kTimeoutMs)); + test.AdvanceTime(TimeDelta::Millis(33)); } // This tests that we utilize transport wide feedback to retransmit lost @@ -518,14 +503,14 @@ TEST(RtpVideoSenderTest, RetransmitsOnTransportWideLossInfo) { test::Scenario s(test_info_); test::CallClientConfig call_conf; // Keeping the bitrate fixed to avoid RTX due to probing. - call_conf.transport.rates.max_rate = DataRate::kbps(300); - call_conf.transport.rates.start_rate = DataRate::kbps(300); + call_conf.transport.rates.max_rate = DataRate::KilobitsPerSec(300); + call_conf.transport.rates.start_rate = DataRate::KilobitsPerSec(300); test::NetworkSimulationConfig net_conf; - net_conf.bandwidth = DataRate::kbps(300); + net_conf.bandwidth = DataRate::KilobitsPerSec(300); auto send_node = s.CreateSimulationNode(net_conf); + auto* callee = s.CreateClient("return", call_conf); auto* route = s.CreateRoutes(s.CreateClient("send", call_conf), {send_node}, - s.CreateClient("return", call_conf), - {s.CreateSimulationNode(net_conf)}); + callee, {s.CreateSimulationNode(net_conf)}); test::VideoStreamConfig lossy_config; lossy_config.source.framerate = 5; @@ -551,24 +536,28 @@ TEST(RtpVideoSenderTest, RetransmitsOnTransportWideLossInfo) { // Run for a short duration and reset counters to avoid counting RTX packets // from initial probing. - s.RunFor(TimeDelta::seconds(1)); + s.RunFor(TimeDelta::Seconds(1)); rtx_packets = 0; - int decoded_baseline = lossy->receive()->GetStats().frames_decoded; - s.RunFor(TimeDelta::seconds(1)); + int decoded_baseline = 0; + callee->SendTask([&decoded_baseline, &lossy]() { + decoded_baseline = lossy->receive()->GetStats().frames_decoded; + }); + s.RunFor(TimeDelta::Seconds(1)); // We expect both that RTX packets were sent and that an appropriate number of // frames were received. This is somewhat redundant but reduces the risk of // false positives in future regressions (e.g. RTX is send due to probing). EXPECT_GE(rtx_packets, 1); - int frames_decoded = - lossy->receive()->GetStats().frames_decoded - decoded_baseline; + int frames_decoded = 0; + callee->SendTask([&decoded_baseline, &frames_decoded, &lossy]() { + frames_decoded = + lossy->receive()->GetStats().frames_decoded - decoded_baseline; + }); EXPECT_EQ(frames_decoded, 5); } // Integration test verifying that retransmissions are sent for packets which // can be detected as lost early, using transport wide feedback. TEST(RtpVideoSenderTest, EarlyRetransmits) { - const int64_t kTimeoutMs = 500; - RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); test.router()->SetActive(true); @@ -587,61 +576,52 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { // Send two tiny images, mapping to single RTP packets. Capture sequence // numbers. - rtc::Event event; uint16_t frame1_rtp_sequence_number = 0; uint16_t frame1_transport_sequence_number = 0; EXPECT_CALL(test.transport(), SendRtp) - .WillOnce([&event, &frame1_rtp_sequence_number, - &frame1_transport_sequence_number]( - const uint8_t* packet, size_t length, - const PacketOptions& options) { - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet, length)); - frame1_rtp_sequence_number = rtp_packet.SequenceNumber(); - frame1_transport_sequence_number = options.packet_id; - EXPECT_EQ(rtp_packet.Ssrc(), kSsrc1); - event.Set(); - return true; - }); - EXPECT_EQ(test.router() - ->OnEncodedImage(encoded_image, &codec_specific, nullptr) - .error, + .WillOnce( + [&frame1_rtp_sequence_number, &frame1_transport_sequence_number]( + const uint8_t* packet, size_t length, + const PacketOptions& options) { + RtpPacket rtp_packet; + EXPECT_TRUE(rtp_packet.Parse(packet, length)); + frame1_rtp_sequence_number = rtp_packet.SequenceNumber(); + frame1_transport_sequence_number = options.packet_id; + EXPECT_EQ(rtp_packet.Ssrc(), kSsrc1); + return true; + }); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, EncodedImageCallback::Result::OK); - test.AdvanceTime(TimeDelta::ms(33)); - ASSERT_TRUE(event.Wait(kTimeoutMs)); + test.AdvanceTime(TimeDelta::Millis(33)); uint16_t frame2_rtp_sequence_number = 0; uint16_t frame2_transport_sequence_number = 0; encoded_image.SetSpatialIndex(1); EXPECT_CALL(test.transport(), SendRtp) - .WillOnce([&event, &frame2_rtp_sequence_number, - &frame2_transport_sequence_number]( - const uint8_t* packet, size_t length, - const PacketOptions& options) { - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet, length)); - frame2_rtp_sequence_number = rtp_packet.SequenceNumber(); - frame2_transport_sequence_number = options.packet_id; - EXPECT_EQ(rtp_packet.Ssrc(), kSsrc2); - event.Set(); - return true; - }); - EXPECT_EQ(test.router() - ->OnEncodedImage(encoded_image, &codec_specific, nullptr) - .error, + .WillOnce( + [&frame2_rtp_sequence_number, &frame2_transport_sequence_number]( + const uint8_t* packet, size_t length, + const PacketOptions& options) { + RtpPacket rtp_packet; + EXPECT_TRUE(rtp_packet.Parse(packet, length)); + frame2_rtp_sequence_number = rtp_packet.SequenceNumber(); + frame2_transport_sequence_number = options.packet_id; + EXPECT_EQ(rtp_packet.Ssrc(), kSsrc2); + return true; + }); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, EncodedImageCallback::Result::OK); - test.AdvanceTime(TimeDelta::ms(33)); - ASSERT_TRUE(event.Wait(kTimeoutMs)); + test.AdvanceTime(TimeDelta::Millis(33)); EXPECT_NE(frame1_transport_sequence_number, frame2_transport_sequence_number); // Inject a transport feedback where the packet for the first frame is lost, // expect a retransmission for it. EXPECT_CALL(test.transport(), SendRtp) - .WillOnce([&event, &frame1_rtp_sequence_number]( - const uint8_t* packet, size_t length, - const PacketOptions& options) { + .WillOnce([&frame1_rtp_sequence_number](const uint8_t* packet, + size_t length, + const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); EXPECT_EQ(rtp_packet.Ssrc(), kRtxSsrc1); @@ -651,7 +631,6 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { rtc::ArrayView payload = rtp_packet.payload(); EXPECT_EQ(ByteReader::ReadBigEndian(payload.data()), frame1_rtp_sequence_number); - event.Set(); return true; }); @@ -669,8 +648,124 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { {first_packet_feedback, second_packet_feedback}); // Wait for pacer to run and send the RTX packet. - test.AdvanceTime(TimeDelta::ms(33)); - ASSERT_TRUE(event.Wait(kTimeoutMs)); + test.AdvanceTime(TimeDelta::Millis(33)); +} + +TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { + RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); + test.router()->SetActive(true); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault([&](const uint8_t* packet, size_t length, + const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet, length)); + return true; + }); + + const uint8_t kPayload[1] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetTimestamp(1); + encoded_image.capture_time_ms_ = 2; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = VideoCodecType::kVideoCodecGeneric; + codec_specific.template_structure.emplace(); + codec_specific.template_structure->num_decode_targets = 1; + codec_specific.template_structure->templates = { + FrameDependencyTemplate().T(0).Dtis("S"), + FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}), + FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}), + }; + + // Send two tiny images, mapping to single RTP packets. + // Send in key frame. + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + codec_specific.generic_frame_info = + GenericFrameInfo::Builder().T(0).Dtis("S").Build(); + codec_specific.generic_frame_info->encoder_buffers = {{0, false, true}}; + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(1)); + EXPECT_TRUE( + sent_packets.back().HasExtension()); + + // Send in delta frame. + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + codec_specific.template_structure = absl::nullopt; + codec_specific.generic_frame_info = + GenericFrameInfo::Builder().T(1).Dtis("D").Build(); + codec_specific.generic_frame_info->encoder_buffers = {{0, true, false}}; + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(2)); + EXPECT_TRUE( + sent_packets.back().HasExtension()); +} + +TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { + RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); + test.router()->SetActive(true); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault([&](const uint8_t* packet, size_t length, + const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet, length)); + return true; + }); + + const uint8_t kPayload[1] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetTimestamp(1); + encoded_image.capture_time_ms_ = 2; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = VideoCodecType::kVideoCodecGeneric; + codec_specific.template_structure.emplace(); + codec_specific.template_structure->num_decode_targets = 1; + codec_specific.template_structure->templates = { + FrameDependencyTemplate().T(0).Dtis("S"), + FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}), + FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}), + }; + + // Send two tiny images, mapping to single RTP packets. + // Send in a key frame. + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + codec_specific.generic_frame_info = + GenericFrameInfo::Builder().T(0).Dtis("S").Build(); + codec_specific.generic_frame_info->encoder_buffers = {{0, false, true}}; + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(1)); + EXPECT_TRUE( + sent_packets.back().HasExtension()); + + // Send in a new key frame without the support for the dependency descriptor. + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + codec_specific.template_structure = absl::nullopt; + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(2)); + EXPECT_FALSE( + sent_packets.back().HasExtension()); } TEST(RtpVideoSenderTest, CanSetZeroBitrateWithOverhead) { @@ -694,4 +789,17 @@ TEST(RtpVideoSenderTest, CanSetZeroBitrateWithoutOverhead) { test.router()->OnBitrateUpdated(update, /*framerate*/ 0); } + +TEST(RtpVideoSenderTest, SimulcastSenderRegistersFrameTransformers) { + rtc::scoped_refptr transformer = + new rtc::RefCountedObject(); + + EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc1)); + EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc2)); + RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, + kPayloadType, {}, nullptr, transformer); + + EXPECT_CALL(*transformer, UnregisterTransformedFrameSinkCallback(kSsrc1)); + EXPECT_CALL(*transformer, UnregisterTransformedFrameSinkCallback(kSsrc2)); +} } // namespace webrtc diff --git a/call/rtx_receive_stream.h b/call/rtx_receive_stream.h index 8ffa4400a9..a389fc2a57 100644 --- a/call/rtx_receive_stream.h +++ b/call/rtx_receive_stream.h @@ -11,6 +11,7 @@ #ifndef CALL_RTX_RECEIVE_STREAM_H_ #define CALL_RTX_RECEIVE_STREAM_H_ +#include #include #include "call/rtp_packet_sink_interface.h" diff --git a/call/simulated_network.cc b/call/simulated_network.cc index f90446480b..f8a5bd893d 100644 --- a/call/simulated_network.cc +++ b/call/simulated_network.cc @@ -21,7 +21,7 @@ namespace webrtc { namespace { -constexpr TimeDelta kDefaultProcessDelay = TimeDelta::Millis<5>(); +constexpr TimeDelta kDefaultProcessDelay = TimeDelta::Millis(5); } // namespace CoDelSimulation::CoDelSimulation() = default; @@ -31,10 +31,10 @@ bool CoDelSimulation::DropDequeuedPacket(Timestamp now, Timestamp enqueing_time, DataSize packet_size, DataSize queue_size) { - constexpr TimeDelta kWindow = TimeDelta::Millis<100>(); - constexpr TimeDelta kDelayThreshold = TimeDelta::Millis<5>(); - constexpr TimeDelta kDropCountMemory = TimeDelta::Millis<1600>(); - constexpr DataSize kMaxPacketSize = DataSize::Bytes<1500>(); + constexpr TimeDelta kWindow = TimeDelta::Millis(100); + constexpr TimeDelta kDelayThreshold = TimeDelta::Millis(5); + constexpr TimeDelta kDropCountMemory = TimeDelta::Millis(1600); + constexpr DataSize kMaxPacketSize = DataSize::Bytes(1500); // Compensates for process interval in simulation; not part of standard CoDel. TimeDelta queuing_time = now - enqueing_time - kDefaultProcessDelay; @@ -77,6 +77,7 @@ bool CoDelSimulation::DropDequeuedPacket(Timestamp now, } return false; } + RTC_CHECK_NOTREACHED(); } SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) @@ -87,7 +88,7 @@ SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) SimulatedNetwork::~SimulatedNetwork() = default; void SimulatedNetwork::SetConfig(const Config& config) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); config_state_.config = config; // Shallow copy of the struct. double prob_loss = config.loss_percent / 100.0; if (config_state_.config.avg_burst_loss_length == -1) { @@ -111,8 +112,14 @@ void SimulatedNetwork::SetConfig(const Config& config) { } } +void SimulatedNetwork::UpdateConfig( + std::function config_modifier) { + MutexLock lock(&config_lock_); + config_modifier(&config_state_.config); +} + void SimulatedNetwork::PauseTransmissionUntil(int64_t until_us) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); config_state_.pause_transmission_until_us = until_us; } @@ -191,10 +198,10 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, if (state.config.codel_active_queue_management) { while (!capacity_link_.empty() && codel_controller_.DropDequeuedPacket( - Timestamp::us(time_us), - Timestamp::us(capacity_link_.front().packet.send_time_us), - DataSize::bytes(capacity_link_.front().packet.size), - DataSize::bytes(queue_size_bytes_))) { + Timestamp::Micros(time_us), + Timestamp::Micros(capacity_link_.front().packet.send_time_us), + DataSize::Bytes(capacity_link_.front().packet.size), + DataSize::Bytes(queue_size_bytes_))) { PacketInfo dropped = capacity_link_.front(); capacity_link_.pop(); queue_size_bytes_ -= dropped.packet.size; @@ -254,7 +261,7 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, } SimulatedNetwork::ConfigState SimulatedNetwork::GetConfigState() const { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); return config_state_; } diff --git a/call/simulated_network.h b/call/simulated_network.h index fa72762b44..b53ecc0ddb 100644 --- a/call/simulated_network.h +++ b/call/simulated_network.h @@ -20,9 +20,9 @@ #include "api/test/simulated_network.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" -#include "rtc_base/critical_section.h" #include "rtc_base/race_checker.h" #include "rtc_base/random.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" @@ -62,6 +62,8 @@ class SimulatedNetwork : public SimulatedNetworkInterface { // Sets a new configuration. This won't affect packets already in the pipe. void SetConfig(const Config& config) override; + void UpdateConfig(std::function + config_modifier) override; void PauseTransmissionUntil(int64_t until_us) override; // NetworkBehaviorInterface @@ -94,7 +96,7 @@ class SimulatedNetwork : public SimulatedNetworkInterface { RTC_RUN_ON(&process_checker_); ConfigState GetConfigState() const; - rtc::CriticalSection config_lock_; + mutable Mutex config_lock_; // |process_checker_| guards the data structures involved in delay and loss // processes, such as the packet queues. diff --git a/call/simulated_network_unittest.cc b/call/simulated_network_unittest.cc index 25fb1c1399..a4545e5988 100644 --- a/call/simulated_network_unittest.cc +++ b/call/simulated_network_unittest.cc @@ -24,10 +24,10 @@ constexpr int kNotReceived = PacketDeliveryInfo::kNotReceived; } TEST(SimulatedNetworkTest, CodelDoesNothingAtCapacity) { - const TimeDelta kRuntime = TimeDelta::seconds(30); + const TimeDelta kRuntime = TimeDelta::Seconds(30); - DataRate link_capacity = DataRate::kbps(1000); - const DataSize packet_size = DataSize::bytes(1000); + DataRate link_capacity = DataRate::KilobitsPerSec(1000); + const DataSize packet_size = DataSize::Bytes(1000); SimulatedNetwork::Config config; config.codel_active_queue_management = true; @@ -37,10 +37,10 @@ TEST(SimulatedNetworkTest, CodelDoesNothingAtCapacity) { // Need to round up here as otherwise we actually will choke. const TimeDelta packet_inverval = - packet_size / link_capacity + TimeDelta::ms(1); + packet_size / link_capacity + TimeDelta::Millis(1); // Send at capacity and see we get no loss. - Timestamp start_time = Timestamp::ms(0); + Timestamp start_time = Timestamp::Millis(0); Timestamp current_time = start_time; Timestamp next_packet_time = start_time; uint64_t next_id = 0; @@ -56,7 +56,7 @@ TEST(SimulatedNetworkTest, CodelDoesNothingAtCapacity) { } Timestamp next_delivery = Timestamp::PlusInfinity(); if (network.NextDeliveryTimeUs()) - next_delivery = Timestamp::us(*network.NextDeliveryTimeUs()); + next_delivery = Timestamp::Micros(*network.NextDeliveryTimeUs()); current_time = std::min(next_packet_time, next_delivery); if (current_time >= next_delivery) { for (PacketDeliveryInfo packet : @@ -77,11 +77,11 @@ TEST(SimulatedNetworkTest, CodelDoesNothingAtCapacity) { } TEST(SimulatedNetworkTest, CodelLimitsDelayAndDropsPacketsOnOverload) { - const TimeDelta kRuntime = TimeDelta::seconds(30); - const TimeDelta kCheckInterval = TimeDelta::ms(2000); + const TimeDelta kRuntime = TimeDelta::Seconds(30); + const TimeDelta kCheckInterval = TimeDelta::Millis(2000); - DataRate link_capacity = DataRate::kbps(1000); - const DataSize rough_packet_size = DataSize::bytes(1500); + DataRate link_capacity = DataRate::KilobitsPerSec(1000); + const DataSize rough_packet_size = DataSize::Bytes(1500); const double overload_rate = 1.5; SimulatedNetwork::Config config; @@ -94,7 +94,7 @@ TEST(SimulatedNetworkTest, CodelLimitsDelayAndDropsPacketsOnOverload) { const DataSize packet_size = overload_rate * link_capacity * packet_inverval; // Send above capacity and see delays are still controlled at the cost of // packet loss. - Timestamp start_time = Timestamp::ms(0); + Timestamp start_time = Timestamp::Millis(0); Timestamp current_time = start_time; Timestamp next_packet_time = start_time; Timestamp last_check = start_time; @@ -113,7 +113,7 @@ TEST(SimulatedNetworkTest, CodelLimitsDelayAndDropsPacketsOnOverload) { } Timestamp next_delivery = Timestamp::PlusInfinity(); if (network.NextDeliveryTimeUs()) - next_delivery = Timestamp::us(*network.NextDeliveryTimeUs()); + next_delivery = Timestamp::Micros(*network.NextDeliveryTimeUs()); current_time = std::min(next_packet_time, next_delivery); if (current_time >= next_delivery) { for (PacketDeliveryInfo packet : @@ -130,7 +130,8 @@ TEST(SimulatedNetworkTest, CodelLimitsDelayAndDropsPacketsOnOverload) { if (current_time > last_check + kCheckInterval) { last_check = current_time; TimeDelta average_delay = - TimeDelta::us(absl::c_accumulate(delays_us, 0)) / delays_us.size(); + TimeDelta::Micros(absl::c_accumulate(delays_us, 0)) / + delays_us.size(); double loss_ratio = static_cast(lost) / (lost + delays_us.size()); EXPECT_LT(average_delay.ms(), 200) << "Time " << (current_time - start_time).ms() << "\n"; diff --git a/call/ssrc_binding_observer.h b/call/ssrc_binding_observer.h deleted file mode 100644 index ada505610f..0000000000 --- a/call/ssrc_binding_observer.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef CALL_SSRC_BINDING_OBSERVER_H_ -#define CALL_SSRC_BINDING_OBSERVER_H_ - -#include - -namespace webrtc { - -// With newer versions of SDP, SSRC is often not explicitly signaled and must -// be learned on the fly. This happens by correlating packet SSRCs with included -// RTP extension headers like MID and RSID, or by receiving information from -// RTCP messages. -// SsrcBindingObservers will be notified when a new binding is learned, which -// can happen during call setup and/or during the call. -class SsrcBindingObserver { - public: - virtual ~SsrcBindingObserver() = default; - - virtual void OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) {} - - virtual void OnSsrcBoundToMid(const std::string& mid, uint32_t ssrc) {} - - virtual void OnSsrcBoundToMidRsid(const std::string& mid, - const std::string& rsid, - uint32_t ssrc) {} - - virtual void OnSsrcBoundToPayloadType(uint8_t payload_type, uint32_t ssrc) {} -}; - -} // namespace webrtc - -#endif // CALL_SSRC_BINDING_OBSERVER_H_ diff --git a/call/syncable.h b/call/syncable.h index 067e01c006..43b16a0720 100644 --- a/call/syncable.h +++ b/call/syncable.h @@ -33,11 +33,11 @@ class Syncable { virtual ~Syncable(); - virtual int id() const = 0; + virtual uint32_t id() const = 0; virtual absl::optional GetInfo() const = 0; virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const = 0; - virtual void SetMinimumPlayoutDelay(int delay_ms) = 0; + virtual bool SetMinimumPlayoutDelay(int delay_ms) = 0; virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) = 0; }; diff --git a/call/test/mock_audio_send_stream.h b/call/test/mock_audio_send_stream.h index 489e826d0e..4164dd550e 100644 --- a/call/test/mock_audio_send_stream.h +++ b/call/test/mock_audio_send_stream.h @@ -21,23 +21,26 @@ namespace test { class MockAudioSendStream : public AudioSendStream { public: - MOCK_CONST_METHOD0(GetConfig, const webrtc::AudioSendStream::Config&()); - MOCK_METHOD1(Reconfigure, void(const Config& config)); - MOCK_METHOD0(Start, void()); - MOCK_METHOD0(Stop, void()); + MOCK_METHOD(const webrtc::AudioSendStream::Config&, + GetConfig, + (), + (const, override)); + MOCK_METHOD(void, Reconfigure, (const Config& config), (override)); + MOCK_METHOD(void, Start, (), (override)); + MOCK_METHOD(void, Stop, (), (override)); // GMock doesn't like move-only types, such as std::unique_ptr. - virtual void SendAudioData(std::unique_ptr audio_frame) { + void SendAudioData(std::unique_ptr audio_frame) override { SendAudioDataForMock(audio_frame.get()); } - MOCK_METHOD1(SendAudioDataForMock, void(webrtc::AudioFrame* audio_frame)); - MOCK_METHOD4(SendTelephoneEvent, - bool(int payload_type, - int payload_frequency, - int event, - int duration_ms)); - MOCK_METHOD1(SetMuted, void(bool muted)); - MOCK_CONST_METHOD0(GetStats, Stats()); - MOCK_CONST_METHOD1(GetStats, Stats(bool has_remote_tracks)); + MOCK_METHOD(void, SendAudioDataForMock, (webrtc::AudioFrame*)); + MOCK_METHOD( + bool, + SendTelephoneEvent, + (int payload_type, int payload_frequency, int event, int duration_ms), + (override)); + MOCK_METHOD(void, SetMuted, (bool muted), (override)); + MOCK_METHOD(Stats, GetStats, (), (const, override)); + MOCK_METHOD(Stats, GetStats, (bool has_remote_tracks), (const, override)); }; } // namespace test } // namespace webrtc diff --git a/call/test/mock_bitrate_allocator.h b/call/test/mock_bitrate_allocator.h index f00ed79c59..b08916fe4f 100644 --- a/call/test/mock_bitrate_allocator.h +++ b/call/test/mock_bitrate_allocator.h @@ -18,10 +18,15 @@ namespace webrtc { class MockBitrateAllocator : public BitrateAllocatorInterface { public: - MOCK_METHOD2(AddObserver, - void(BitrateAllocatorObserver*, MediaStreamAllocationConfig)); - MOCK_METHOD1(RemoveObserver, void(BitrateAllocatorObserver*)); - MOCK_CONST_METHOD1(GetStartBitrate, int(BitrateAllocatorObserver*)); + MOCK_METHOD(void, + AddObserver, + (BitrateAllocatorObserver*, MediaStreamAllocationConfig), + (override)); + MOCK_METHOD(void, RemoveObserver, (BitrateAllocatorObserver*), (override)); + MOCK_METHOD(int, + GetStartBitrate, + (BitrateAllocatorObserver*), + (const, override)); }; } // namespace webrtc #endif // CALL_TEST_MOCK_BITRATE_ALLOCATOR_H_ diff --git a/call/test/mock_rtp_packet_sink_interface.h b/call/test/mock_rtp_packet_sink_interface.h index adc804f941..e6d14f05c5 100644 --- a/call/test/mock_rtp_packet_sink_interface.h +++ b/call/test/mock_rtp_packet_sink_interface.h @@ -17,7 +17,7 @@ namespace webrtc { class MockRtpPacketSink : public RtpPacketSinkInterface { public: - MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&)); + MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override)); }; } // namespace webrtc diff --git a/call/test/mock_rtp_transport_controller_send.h b/call/test/mock_rtp_transport_controller_send.h index fad27b018f..b468aa6cb2 100644 --- a/call/test/mock_rtp_transport_controller_send.h +++ b/call/test/mock_rtp_transport_controller_send.h @@ -18,6 +18,7 @@ #include "api/crypto/crypto_options.h" #include "api/crypto/frame_encryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/transport/bitrate_settings.h" #include "call/rtp_transport_controller_send_interface.h" #include "modules/pacing/packet_router.h" @@ -31,44 +32,74 @@ namespace webrtc { class MockRtpTransportControllerSend : public RtpTransportControllerSendInterface { public: - MOCK_METHOD9( - CreateRtpVideoSender, - RtpVideoSenderInterface*(std::map, - const std::map&, - const RtpConfig&, - int rtcp_report_interval_ms, - Transport*, - const RtpSenderObservers&, - RtcEventLog*, - std::unique_ptr, - const RtpSenderFrameEncryptionConfig&)); - MOCK_METHOD1(DestroyRtpVideoSender, void(RtpVideoSenderInterface*)); - MOCK_METHOD0(GetWorkerQueue, rtc::TaskQueue*()); - MOCK_METHOD0(packet_router, PacketRouter*()); - MOCK_METHOD0(network_state_estimate_observer, - NetworkStateEstimateObserver*()); - MOCK_METHOD0(transport_feedback_observer, TransportFeedbackObserver*()); - MOCK_METHOD0(packet_sender, RtpPacketSender*()); - MOCK_METHOD1(SetAllocatedSendBitrateLimits, void(BitrateAllocationLimits)); - MOCK_METHOD1(SetPacingFactor, void(float)); - MOCK_METHOD1(SetQueueTimeLimit, void(int)); - MOCK_METHOD0(GetStreamFeedbackProvider, StreamFeedbackProvider*()); - MOCK_METHOD1(RegisterTargetTransferRateObserver, - void(TargetTransferRateObserver*)); - MOCK_METHOD2(OnNetworkRouteChanged, - void(const std::string&, const rtc::NetworkRoute&)); - MOCK_METHOD1(OnNetworkAvailability, void(bool)); - MOCK_METHOD0(GetBandwidthObserver, RtcpBandwidthObserver*()); - MOCK_CONST_METHOD0(GetPacerQueuingDelayMs, int64_t()); - MOCK_CONST_METHOD0(GetFirstPacketTime, absl::optional()); - MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool)); - MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&)); - MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&)); - MOCK_METHOD1(SetClientBitratePreferences, void(const BitrateSettings&)); - MOCK_METHOD1(OnTransportOverheadChanged, void(size_t)); - MOCK_METHOD1(AccountForAudioPacketsInPacedSender, void(bool)); - MOCK_METHOD0(IncludeOverheadInPacedSender, void()); - MOCK_METHOD1(OnReceivedPacket, void(const ReceivedPacket&)); + MOCK_METHOD(RtpVideoSenderInterface*, + CreateRtpVideoSender, + ((std::map), + (const std::map&), + const RtpConfig&, + int rtcp_report_interval_ms, + Transport*, + const RtpSenderObservers&, + RtcEventLog*, + std::unique_ptr, + const RtpSenderFrameEncryptionConfig&, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + DestroyRtpVideoSender, + (RtpVideoSenderInterface*), + (override)); + MOCK_METHOD(rtc::TaskQueue*, GetWorkerQueue, (), (override)); + MOCK_METHOD(PacketRouter*, packet_router, (), (override)); + MOCK_METHOD(NetworkStateEstimateObserver*, + network_state_estimate_observer, + (), + (override)); + MOCK_METHOD(TransportFeedbackObserver*, + transport_feedback_observer, + (), + (override)); + MOCK_METHOD(RtpPacketSender*, packet_sender, (), (override)); + MOCK_METHOD(void, + SetAllocatedSendBitrateLimits, + (BitrateAllocationLimits), + (override)); + MOCK_METHOD(void, SetPacingFactor, (float), (override)); + MOCK_METHOD(void, SetQueueTimeLimit, (int), (override)); + MOCK_METHOD(StreamFeedbackProvider*, + GetStreamFeedbackProvider, + (), + (override)); + MOCK_METHOD(void, + RegisterTargetTransferRateObserver, + (TargetTransferRateObserver*), + (override)); + MOCK_METHOD(void, + OnNetworkRouteChanged, + (const std::string&, const rtc::NetworkRoute&), + (override)); + MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); + MOCK_METHOD(RtcpBandwidthObserver*, GetBandwidthObserver, (), (override)); + MOCK_METHOD(int64_t, GetPacerQueuingDelayMs, (), (const, override)); + MOCK_METHOD(absl::optional, + GetFirstPacketTime, + (), + (const, override)); + MOCK_METHOD(void, EnablePeriodicAlrProbing, (bool), (override)); + MOCK_METHOD(void, OnSentPacket, (const rtc::SentPacket&), (override)); + MOCK_METHOD(void, + SetSdpBitrateParameters, + (const BitrateConstraints&), + (override)); + MOCK_METHOD(void, + SetClientBitratePreferences, + (const BitrateSettings&), + (override)); + MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override)); + MOCK_METHOD(void, AccountForAudioPacketsInPacedSender, (bool), (override)); + MOCK_METHOD(void, IncludeOverheadInPacedSender, (), (override)); + MOCK_METHOD(void, OnReceivedPacket, (const ReceivedPacket&), (override)); + MOCK_METHOD(void, EnsureStarted, (), (override)); }; } // namespace webrtc #endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ diff --git a/call/video_receive_stream.cc b/call/video_receive_stream.cc index c4895e465a..e0f3de366b 100644 --- a/call/video_receive_stream.cc +++ b/call/video_receive_stream.cc @@ -24,8 +24,13 @@ std::string VideoReceiveStream::Decoder::ToString() const { ss << "{payload_type: " << payload_type; ss << ", payload_name: " << video_format.name; ss << ", codec_params: {"; - for (const auto& it : video_format.parameters) - ss << it.first << ": " << it.second; + for (auto it = video_format.parameters.begin(); + it != video_format.parameters.end(); ++it) { + if (it != video_format.parameters.begin()) { + ss << ", "; + } + ss << it->first << ": " << it->second; + } ss << '}'; ss << '}'; diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h index 0f5e8e043a..67e889fc74 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h @@ -21,6 +21,7 @@ #include "api/call/transport.h" #include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/transport/rtp/rtp_source.h" @@ -31,6 +32,10 @@ #include "api/video/video_timing.h" #include "api/video_codecs/sdp_video_format.h" #include "call/rtp_config.h" +#include "common_video/frame_counts.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -69,10 +74,6 @@ class VideoReceiveStream { ~Decoder(); std::string ToString() const; - // Ownership stays with WebrtcVideoEngine (delegated from PeerConnection). - // TODO(nisse): Move one level out, to VideoReceiveStream::Config, and later - // to the configuration of VideoStreamDecoder. - VideoDecoderFactory* decoder_factory = nullptr; SdpVideoFormat video_format; // Received RTP packets with this payload type will be sent to this decoder @@ -172,6 +173,9 @@ class VideoReceiveStream { // Decoders for every payload that we can receive. std::vector decoders; + // Ownership stays with WebrtcVideoEngine (delegated from PeerConnection). + VideoDecoderFactory* decoder_factory = nullptr; + // Receive-stream specific RTP settings. struct Rtp { Rtp(); @@ -262,6 +266,8 @@ class VideoReceiveStream { // Per PeerConnection cryptography options. CryptoOptions crypto_options; + + rtc::scoped_refptr frame_transformer; }; // Starts stream activity. @@ -297,6 +303,11 @@ class VideoReceiveStream { virtual void SetFrameDecryptor( rtc::scoped_refptr frame_decryptor) = 0; + // Allows a frame transformer to be attached to a VideoReceiveStream after + // creation without resetting the decoder state. + virtual void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) = 0; + // Sets and returns recording state. The old state is moved out // of the video receive stream and returned to the caller, and |state| // is moved in. If the state's callback is set, it will be called with @@ -312,6 +323,10 @@ class VideoReceiveStream { // Cause eventual generation of a key frame from the sender. virtual void GenerateKeyFrame() = 0; +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif + protected: virtual ~VideoReceiveStream() {} }; diff --git a/call/video_send_stream.cc b/call/video_send_stream.cc index f495d085cf..244d78089c 100644 --- a/call/video_send_stream.cc +++ b/call/video_send_stream.cc @@ -17,12 +17,32 @@ namespace webrtc { +namespace { + +const char* StreamTypeToString(VideoSendStream::StreamStats::StreamType type) { + switch (type) { + case VideoSendStream::StreamStats::StreamType::kMedia: + return "media"; + case VideoSendStream::StreamStats::StreamType::kRtx: + return "rtx"; + case VideoSendStream::StreamStats::StreamType::kFlexfec: + return "flexfec"; + } + RTC_CHECK_NOTREACHED(); +} + +} // namespace + VideoSendStream::StreamStats::StreamStats() = default; VideoSendStream::StreamStats::~StreamStats() = default; std::string VideoSendStream::StreamStats::ToString() const { char buf[1024]; rtc::SimpleStringBuilder ss(buf); + ss << "type: " << StreamTypeToString(type); + if (referenced_media_ssrc.has_value()) + ss << " (for: " << referenced_media_ssrc.value() << ")"; + ss << ", "; ss << "width: " << width << ", "; ss << "height: " << height << ", "; ss << "key: " << frame_counts.key_frames << ", "; @@ -64,7 +84,8 @@ std::string VideoSendStream::Stats::ToString(int64_t time_ms) const { ss << "#quality_adaptations: " << number_of_quality_adapt_changes; ss << '}'; for (const auto& substream : substreams) { - if (!substream.second.is_rtx && !substream.second.is_flexfec) { + if (substream.second.type == + VideoSendStream::StreamStats::StreamType::kMedia) { ss << " {ssrc: " << substream.first << ", "; ss << substream.second.ToString(); ss << '}'; diff --git a/call/video_send_stream.h b/call/video_send_stream.h index 39abdfc808..cf78ba2e80 100644 --- a/call/video_send_stream.h +++ b/call/video_send_stream.h @@ -18,9 +18,12 @@ #include #include "absl/types/optional.h" +#include "api/adaptation/resource.h" #include "api/call/transport.h" #include "api/crypto/crypto_options.h" +#include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" @@ -28,7 +31,11 @@ #include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/video_encoder_config.h" #include "call/rtp_config.h" +#include "common_video/frame_counts.h" #include "common_video/include/quality_limitation_reason.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -39,15 +46,35 @@ class FrameEncryptorInterface; class VideoSendStream { public: + // Multiple StreamStats objects are present if simulcast is used (multiple + // kMedia streams) or if RTX or FlexFEC is negotiated. Multiple SVC layers, on + // the other hand, does not cause additional StreamStats. struct StreamStats { + enum class StreamType { + // A media stream is an RTP stream for audio or video. Retransmissions and + // FEC is either sent over the same SSRC or negotiated to be sent over + // separate SSRCs, in which case separate StreamStats objects exist with + // references to this media stream's SSRC. + kMedia, + // RTX streams are streams dedicated to retransmissions. They have a + // dependency on a single kMedia stream: |referenced_media_ssrc|. + kRtx, + // FlexFEC streams are streams dedicated to FlexFEC. They have a + // dependency on a single kMedia stream: |referenced_media_ssrc|. + kFlexfec, + }; + StreamStats(); ~StreamStats(); std::string ToString() const; + StreamType type = StreamType::kMedia; + // If |type| is kRtx or kFlexfec this value is present. The referenced SSRC + // is the kMedia stream that this stream is performing retransmissions or + // FEC for. If |type| is kMedia, this value is null. + absl::optional referenced_media_ssrc; FrameCounts frame_counts; - bool is_rtx = false; - bool is_flexfec = false; int width = 0; int height = 0; // TODO(holmer): Move bitrate_bps out to the webrtc::Call layer. @@ -62,6 +89,12 @@ class VideoSendStream { // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. absl::optional report_block_data; + double encode_frame_rate = 0.0; + int frames_encoded = 0; + absl::optional qp_sum; + uint64_t total_encode_time_ms = 0; + uint64_t total_encoded_bytes_target = 0; + uint32_t huge_frames_sent = 0; }; struct Stats { @@ -81,8 +114,8 @@ class VideoSendStream { uint32_t frames_dropped_by_capturer = 0; uint32_t frames_dropped_by_encoder_queue = 0; uint32_t frames_dropped_by_rate_limiter = 0; + uint32_t frames_dropped_by_congestion_window = 0; uint32_t frames_dropped_by_encoder = 0; - absl::optional qp_sum; // Bitrate the encoder is currently configured to use due to bandwidth // limitations. int target_media_bitrate_bps = 0; @@ -108,6 +141,7 @@ class VideoSendStream { std::map substreams; webrtc::VideoContentType content_type = webrtc::VideoContentType::UNSPECIFIED; + uint32_t frames_sent = 0; uint32_t huge_frames_sent = 0; }; @@ -162,6 +196,8 @@ class VideoSendStream { // Per PeerConnection cryptography options. CryptoOptions crypto_options; + rtc::scoped_refptr frame_transformer; + private: // Access to the copy constructor is private to force use of the Copy() // method for those exceptional cases where we do use it. @@ -185,6 +221,15 @@ class VideoSendStream { // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; + // If the resource is overusing, the VideoSendStream will try to reduce + // resolution or frame rate until no resource is overusing. + // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor + // is moved to Call this method could be deleted altogether in favor of + // Call-level APIs only. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + virtual std::vector> + GetAdaptationResources() = 0; + virtual void SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) = 0; @@ -196,6 +241,10 @@ class VideoSendStream { virtual Stats GetStats() = 0; +#ifndef DISABLE_RECORDER + virtual void InjectRecorder(Recorder* recorder) = 0; +#endif + protected: virtual ~VideoSendStream() {} }; diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn index 8fc46898fb..a03e9ab659 100644 --- a/common_audio/BUILD.gn +++ b/common_audio/BUILD.gn @@ -32,8 +32,6 @@ rtc_library("common_audio") { "resampler/sinc_resampler.cc", "smoothing_filter.cc", "smoothing_filter.h", - "sparse_fir_filter.cc", - "sparse_fir_filter.h", "vad/include/vad.h", "vad/vad.cc", "wav_file.cc", @@ -47,19 +45,18 @@ rtc_library("common_audio") { deps = [ ":common_audio_c", ":sinc_resampler", + "../api:array_view", "../rtc_base:checks", "../rtc_base:gtest_prod", "../rtc_base:rtc_base_approved", "../rtc_base:sanitizer", - "../rtc_base/memory:aligned_array", "../rtc_base/memory:aligned_malloc", "../rtc_base/system:arch", "../rtc_base/system:file_wrapper", "../system_wrappers", - "../system_wrappers:cpu_features_api", - "third_party/fft4g", - "//third_party/abseil-cpp/absl/types:optional", + "third_party/ooura:fft_size_256", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] @@ -69,6 +66,7 @@ rtc_library("common_audio") { if (current_cpu == "x86" || current_cpu == "x64") { deps += [ ":common_audio_sse2" ] + deps += [ ":common_audio_avx2" ] } } @@ -186,8 +184,7 @@ rtc_library("common_audio_c") { "../rtc_base:sanitizer", "../rtc_base/system:arch", "../system_wrappers", - "../system_wrappers:cpu_features_api", - "third_party/fft4g", + "third_party/ooura:fft_size_256", "third_party/spl_sqrt_floor", ] } @@ -233,10 +230,11 @@ rtc_library("fir_filter_factory") { "../rtc_base:checks", "../rtc_base:rtc_base_approved", "../rtc_base/system:arch", - "../system_wrappers:cpu_features_api", + "../system_wrappers", ] if (current_cpu == "x86" || current_cpu == "x64") { deps += [ ":common_audio_sse2" ] + deps += [ ":common_audio_avx2" ] } if (rtc_build_with_neon) { deps += [ ":common_audio_neon" ] @@ -263,6 +261,31 @@ if (current_cpu == "x86" || current_cpu == "x64") { "../rtc_base/memory:aligned_malloc", ] } + + rtc_library("common_audio_avx2") { + sources = [ + "fir_filter_avx2.cc", + "fir_filter_avx2.h", + "resampler/sinc_resampler_avx2.cc", + ] + + if (is_win) { + cflags = [ "/arch:AVX2" ] + } else { + cflags = [ + "-mavx2", + "-mfma", + ] + } + + deps = [ + ":fir_filter", + ":sinc_resampler", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + "../rtc_base/memory:aligned_malloc", + ] + } } if (rtc_build_with_neon) { @@ -332,7 +355,6 @@ if (rtc_include_tests) { "signal_processing/real_fft_unittest.cc", "signal_processing/signal_processing_unittest.cc", "smoothing_filter_unittest.cc", - "sparse_fir_filter_unittest.cc", "vad/vad_core_unittest.cc", "vad/vad_filterbank_unittest.cc", "vad/vad_gmm_unittest.cc", @@ -359,7 +381,7 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_approved", "../rtc_base:rtc_base_tests_utils", "../rtc_base/system:arch", - "../system_wrappers:cpu_features_api", + "../system_wrappers", "../test:fileutils", "../test:rtc_expect_death", "../test:test_main", diff --git a/common_audio/OWNERS b/common_audio/OWNERS index 7f721ded2a..ba1c8b11f4 100644 --- a/common_audio/OWNERS +++ b/common_audio/OWNERS @@ -1,9 +1,3 @@ henrik.lundin@webrtc.org -jan.skoglund@webrtc.org kwiberg@webrtc.org -tina.legrand@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* +peah@webrtc.org diff --git a/common_audio/channel_buffer.h b/common_audio/channel_buffer.h index dc44369be6..f0270803f5 100644 --- a/common_audio/channel_buffer.h +++ b/common_audio/channel_buffer.h @@ -14,7 +14,9 @@ #include #include +#include +#include "api/array_view.h" #include "common_audio/include/audio_util.h" #include "rtc_base/checks.h" #include "rtc_base/gtest_prod_util.h" @@ -48,40 +50,60 @@ class ChannelBuffer { num_frames_per_band_(num_frames / num_bands), num_allocated_channels_(num_channels), num_channels_(num_channels), - num_bands_(num_bands) { - for (size_t i = 0; i < num_allocated_channels_; ++i) { - for (size_t j = 0; j < num_bands_; ++j) { - channels_[j * num_allocated_channels_ + i] = - &data_[i * num_frames_ + j * num_frames_per_band_]; - bands_[i * num_bands_ + j] = channels_[j * num_allocated_channels_ + i]; + num_bands_(num_bands), + bands_view_(num_allocated_channels_, + std::vector>(num_bands_)), + channels_view_( + num_bands_, + std::vector>(num_allocated_channels_)) { + // Temporarily cast away const_ness to allow populating the array views. + auto* bands_view = + const_cast>>*>(&bands_view_); + auto* channels_view = + const_cast>>*>( + &channels_view_); + + for (size_t ch = 0; ch < num_allocated_channels_; ++ch) { + for (size_t band = 0; band < num_bands_; ++band) { + (*channels_view)[band][ch] = rtc::ArrayView( + &data_[ch * num_frames_ + band * num_frames_per_band_], + num_frames_per_band_); + (*bands_view)[ch][band] = channels_view_[band][ch]; + channels_[band * num_allocated_channels_ + ch] = + channels_view_[band][ch].data(); + bands_[ch * num_bands_ + band] = + channels_[band * num_allocated_channels_ + ch]; } } } - // Returns a pointer array to the full-band channels (or lower band channels). - // Usage: - // channels()[channel][sample]. + // Returns a pointer array to the channels. + // If band is explicitly specificed, the channels for a specific band are + // returned and the usage becomes: channels(band)[channel][sample]. // Where: + // 0 <= band < |num_bands_| // 0 <= channel < |num_allocated_channels_| - // 0 <= sample < |num_frames_| - T* const* channels() { return channels(0); } - const T* const* channels() const { return channels(0); } + // 0 <= sample < |num_frames_per_band_| - // Returns a pointer array to the channels for a specific band. - // Usage: - // channels(band)[channel][sample]. + // If band is not explicitly specified, the full-band channels (or lower band + // channels) are returned and the usage becomes: channels()[channel][sample]. // Where: - // 0 <= band < |num_bands_| // 0 <= channel < |num_allocated_channels_| - // 0 <= sample < |num_frames_per_band_| - const T* const* channels(size_t band) const { + // 0 <= sample < |num_frames_| + const T* const* channels(size_t band = 0) const { RTC_DCHECK_LT(band, num_bands_); return &channels_[band * num_allocated_channels_]; } - T* const* channels(size_t band) { + T* const* channels(size_t band = 0) { const ChannelBuffer* t = this; return const_cast(t->channels(band)); } + rtc::ArrayView> channels_view(size_t band = 0) { + return channels_view_[band]; + } + rtc::ArrayView> channels_view(size_t band = 0) const { + return channels_view_[band]; + } // Returns a pointer array to the bands for a specific channel. // Usage: @@ -100,6 +122,13 @@ class ChannelBuffer { return const_cast(t->bands(channel)); } + rtc::ArrayView> bands_view(size_t channel) { + return bands_view_[channel]; + } + rtc::ArrayView> bands_view(size_t channel) const { + return bands_view_[channel]; + } + // Sets the |slice| pointers to the |start_frame| position for each channel. // Returns |slice| for convenience. const T* const* Slice(T** slice, size_t start_frame) const { @@ -140,6 +169,8 @@ class ChannelBuffer { // Number of channels the user sees. size_t num_channels_; const size_t num_bands_; + const std::vector>> bands_view_; + const std::vector>> channels_view_; }; // One int16_t and one float ChannelBuffer that are kept in sync. The sync is diff --git a/common_audio/channel_buffer_unittest.cc b/common_audio/channel_buffer_unittest.cc index 8ec42346d1..a8b64891d6 100644 --- a/common_audio/channel_buffer_unittest.cc +++ b/common_audio/channel_buffer_unittest.cc @@ -53,12 +53,12 @@ TEST(IFChannelBufferTest, SettingNumChannelsOfOneChannelBufferSetsTheOther) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(ChannelBufferTest, SetNumChannelsDeathTest) { +TEST(ChannelBufferDeathTest, SetNumChannelsDeathTest) { ChannelBuffer chb(kNumFrames, kMono); RTC_EXPECT_DEATH(chb.set_num_channels(kStereo), "num_channels"); } -TEST(IFChannelBufferTest, SetNumChannelsDeathTest) { +TEST(IFChannelBufferDeathTest, SetNumChannelsDeathTest) { IFChannelBuffer ifchb(kNumFrames, kMono); RTC_EXPECT_DEATH(ifchb.ibuf()->set_num_channels(kStereo), "num_channels"); } diff --git a/common_audio/fir_filter_avx2.cc b/common_audio/fir_filter_avx2.cc new file mode 100644 index 0000000000..26468e2981 --- /dev/null +++ b/common_audio/fir_filter_avx2.cc @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_audio/fir_filter_avx2.h" + +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +FIRFilterAVX2::FIRFilterAVX2(const float* unaligned_coefficients, + size_t unaligned_coefficients_length, + size_t max_input_length) + : // Closest higher multiple of eight. + coefficients_length_((unaligned_coefficients_length + 7) & ~0x07), + state_length_(coefficients_length_ - 1), + coefficients_(static_cast( + AlignedMalloc(sizeof(float) * coefficients_length_, 32))), + state_(static_cast( + AlignedMalloc(sizeof(float) * (max_input_length + state_length_), + 32))) { + // Add zeros at the end of the coefficients. + RTC_DCHECK_GE(coefficients_length_, unaligned_coefficients_length); + size_t padding = coefficients_length_ - unaligned_coefficients_length; + memset(coefficients_.get(), 0, padding * sizeof(coefficients_[0])); + // The coefficients are reversed to compensate for the order in which the + // input samples are acquired (most recent last). + for (size_t i = 0; i < unaligned_coefficients_length; ++i) { + coefficients_[i + padding] = + unaligned_coefficients[unaligned_coefficients_length - i - 1]; + } + memset(state_.get(), 0, + (max_input_length + state_length_) * sizeof(state_[0])); +} + +FIRFilterAVX2::~FIRFilterAVX2() = default; + +void FIRFilterAVX2::Filter(const float* in, size_t length, float* out) { + RTC_DCHECK_GT(length, 0); + + memcpy(&state_[state_length_], in, length * sizeof(*in)); + + // Convolves the input signal |in| with the filter kernel |coefficients_| + // taking into account the previous state. + for (size_t i = 0; i < length; ++i) { + float* in_ptr = &state_[i]; + float* coef_ptr = coefficients_.get(); + + __m256 m_sum = _mm256_setzero_ps(); + __m256 m_in; + + // Depending on if the pointer is aligned with 32 bytes or not it is loaded + // differently. + if (reinterpret_cast(in_ptr) & 0x1F) { + for (size_t j = 0; j < coefficients_length_; j += 8) { + m_in = _mm256_loadu_ps(in_ptr + j); + m_sum = _mm256_fmadd_ps(m_in, _mm256_load_ps(coef_ptr + j), m_sum); + } + } else { + for (size_t j = 0; j < coefficients_length_; j += 8) { + m_in = _mm256_load_ps(in_ptr + j); + m_sum = _mm256_fmadd_ps(m_in, _mm256_load_ps(coef_ptr + j), m_sum); + } + } + __m128 m128_sum = _mm_add_ps(_mm256_extractf128_ps(m_sum, 0), + _mm256_extractf128_ps(m_sum, 1)); + m128_sum = _mm_add_ps(_mm_movehl_ps(m128_sum, m128_sum), m128_sum); + _mm_store_ss(out + i, + _mm_add_ss(m128_sum, _mm_shuffle_ps(m128_sum, m128_sum, 1))); + } + + // Update current state. + memmove(state_.get(), &state_[length], state_length_ * sizeof(state_[0])); +} + +} // namespace webrtc diff --git a/common_audio/fir_filter_avx2.h b/common_audio/fir_filter_avx2.h new file mode 100644 index 0000000000..893b60bf6e --- /dev/null +++ b/common_audio/fir_filter_avx2.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_AUDIO_FIR_FILTER_AVX2_H_ +#define COMMON_AUDIO_FIR_FILTER_AVX2_H_ + +#include + +#include + +#include "common_audio/fir_filter.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +class FIRFilterAVX2 : public FIRFilter { + public: + FIRFilterAVX2(const float* coefficients, + size_t coefficients_length, + size_t max_input_length); + ~FIRFilterAVX2() override; + + void Filter(const float* in, size_t length, float* out) override; + + private: + const size_t coefficients_length_; + const size_t state_length_; + std::unique_ptr coefficients_; + std::unique_ptr state_; +}; + +} // namespace webrtc + +#endif // COMMON_AUDIO_FIR_FILTER_AVX2_H_ diff --git a/common_audio/fir_filter_factory.cc b/common_audio/fir_filter_factory.cc index 19528e312e..4bcf05245f 100644 --- a/common_audio/fir_filter_factory.cc +++ b/common_audio/fir_filter_factory.cc @@ -17,6 +17,7 @@ #if defined(WEBRTC_HAS_NEON) #include "common_audio/fir_filter_neon.h" #elif defined(WEBRTC_ARCH_X86_FAMILY) +#include "common_audio/fir_filter_avx2.h" #include "common_audio/fir_filter_sse.h" #include "system_wrappers/include/cpu_features_wrapper.h" // kSSE2, WebRtc_G... #endif @@ -34,18 +35,16 @@ FIRFilter* CreateFirFilter(const float* coefficients, FIRFilter* filter = nullptr; // If we know the minimum architecture at compile time, avoid CPU detection. #if defined(WEBRTC_ARCH_X86_FAMILY) -#if defined(__SSE2__) - filter = - new FIRFilterSSE2(coefficients, coefficients_length, max_input_length); -#else // x86 CPU detection required. - if (WebRtc_GetCPUInfo(kSSE2)) { + if (GetCPUInfo(kAVX2)) { + filter = + new FIRFilterAVX2(coefficients, coefficients_length, max_input_length); + } else if (GetCPUInfo(kSSE2)) { filter = new FIRFilterSSE2(coefficients, coefficients_length, max_input_length); } else { filter = new FIRFilterC(coefficients, coefficients_length); } -#endif #elif defined(WEBRTC_HAS_NEON) filter = new FIRFilterNEON(coefficients, coefficients_length, max_input_length); diff --git a/common_audio/mocks/mock_smoothing_filter.h b/common_audio/mocks/mock_smoothing_filter.h index 712049fa6a..9df49dd11a 100644 --- a/common_audio/mocks/mock_smoothing_filter.h +++ b/common_audio/mocks/mock_smoothing_filter.h @@ -18,9 +18,9 @@ namespace webrtc { class MockSmoothingFilter : public SmoothingFilter { public: - MOCK_METHOD1(AddSample, void(float)); - MOCK_METHOD0(GetAverage, absl::optional()); - MOCK_METHOD1(SetTimeConstantMs, bool(int)); + MOCK_METHOD(void, AddSample, (float), (override)); + MOCK_METHOD(absl::optional, GetAverage, (), (override)); + MOCK_METHOD(bool, SetTimeConstantMs, (int), (override)); }; } // namespace webrtc diff --git a/common_audio/real_fourier_ooura.cc b/common_audio/real_fourier_ooura.cc index 89694c1667..9acda5494c 100644 --- a/common_audio/real_fourier_ooura.cc +++ b/common_audio/real_fourier_ooura.cc @@ -13,7 +13,7 @@ #include #include -#include "common_audio/third_party/fft4g/fft4g.h" +#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/common_audio/real_fourier_ooura.h b/common_audio/real_fourier_ooura.h index b5f1bcff5d..ae85dfd0dd 100644 --- a/common_audio/real_fourier_ooura.h +++ b/common_audio/real_fourier_ooura.h @@ -35,7 +35,7 @@ class RealFourierOoura : public RealFourier { const size_t length_; const size_t complex_length_; // These are work arrays for Ooura. The names are based on the comments in - // fft4g.c. + // common_audio/third_party/ooura/fft_size_256/fft4g.cc. const std::unique_ptr work_ip_; const std::unique_ptr work_w_; }; diff --git a/common_audio/resampler/include/resampler.h b/common_audio/resampler/include/resampler.h index 04c487b331..41940f9a12 100644 --- a/common_audio/resampler/include/resampler.h +++ b/common_audio/resampler/include/resampler.h @@ -90,8 +90,8 @@ class Resampler { size_t num_channels_; // Extra instance for stereo - Resampler* slave_left_; - Resampler* slave_right_; + Resampler* helper_left_; + Resampler* helper_right_; }; } // namespace webrtc diff --git a/common_audio/resampler/push_resampler_unittest.cc b/common_audio/resampler/push_resampler_unittest.cc index 61b9725b3a..4724833fbb 100644 --- a/common_audio/resampler/push_resampler_unittest.cc +++ b/common_audio/resampler/push_resampler_unittest.cc @@ -31,19 +31,19 @@ TEST(PushResamplerTest, VerifiesInputParameters) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(PushResamplerTest, VerifiesBadInputParameters1) { +TEST(PushResamplerDeathTest, VerifiesBadInputParameters1) { PushResampler resampler; RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(-1, 16000, 1), "src_sample_rate_hz"); } -TEST(PushResamplerTest, VerifiesBadInputParameters2) { +TEST(PushResamplerDeathTest, VerifiesBadInputParameters2) { PushResampler resampler; RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, -1, 1), "dst_sample_rate_hz"); } -TEST(PushResamplerTest, VerifiesBadInputParameters3) { +TEST(PushResamplerDeathTest, VerifiesBadInputParameters3) { PushResampler resampler; RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, 16000, 0), "num_channels"); diff --git a/common_audio/resampler/resampler.cc b/common_audio/resampler/resampler.cc index ce38ef56de..ccfed5a014 100644 --- a/common_audio/resampler/resampler.cc +++ b/common_audio/resampler/resampler.cc @@ -37,8 +37,8 @@ Resampler::Resampler() my_out_frequency_khz_(0), my_mode_(kResamplerMode1To1), num_channels_(0), - slave_left_(nullptr), - slave_right_(nullptr) {} + helper_left_(nullptr), + helper_right_(nullptr) {} Resampler::Resampler(int inFreq, int outFreq, size_t num_channels) : Resampler() { @@ -61,11 +61,11 @@ Resampler::~Resampler() { if (out_buffer_) { free(out_buffer_); } - if (slave_left_) { - delete slave_left_; + if (helper_left_) { + delete helper_left_; } - if (slave_right_) { - delete slave_right_; + if (helper_right_) { + delete helper_right_; } } @@ -120,13 +120,13 @@ int Resampler::Reset(int inFreq, int outFreq, size_t num_channels) { free(out_buffer_); out_buffer_ = nullptr; } - if (slave_left_) { - delete slave_left_; - slave_left_ = nullptr; + if (helper_left_) { + delete helper_left_; + helper_left_ = nullptr; } - if (slave_right_) { - delete slave_right_; - slave_right_ = nullptr; + if (helper_right_) { + delete helper_right_; + helper_right_ = nullptr; } in_buffer_size_ = 0; @@ -140,8 +140,8 @@ int Resampler::Reset(int inFreq, int outFreq, size_t num_channels) { if (num_channels_ == 2) { // Create two mono resamplers. - slave_left_ = new Resampler(inFreq, outFreq, 1); - slave_right_ = new Resampler(inFreq, outFreq, 1); + helper_left_ = new Resampler(inFreq, outFreq, 1); + helper_right_ = new Resampler(inFreq, outFreq, 1); } // Now create the states we need. @@ -401,7 +401,7 @@ int Resampler::Push(const int16_t* samplesIn, size_t maxLen, size_t& outLen) { if (num_channels_ == 2) { - // Split up the signal and call the slave object for each channel + // Split up the signal and call the helper object for each channel int16_t* left = static_cast(malloc(lengthIn * sizeof(int16_t) / 2)); int16_t* right = @@ -422,10 +422,10 @@ int Resampler::Push(const int16_t* samplesIn, size_t actualOutLen_left = 0; size_t actualOutLen_right = 0; // Do resampling for right channel - res |= slave_left_->Push(left, lengthIn, out_left, maxLen / 2, - actualOutLen_left); - res |= slave_right_->Push(right, lengthIn, out_right, maxLen / 2, - actualOutLen_right); + res |= helper_left_->Push(left, lengthIn, out_left, maxLen / 2, + actualOutLen_left); + res |= helper_right_->Push(right, lengthIn, out_right, maxLen / 2, + actualOutLen_right); if (res || (actualOutLen_left != actualOutLen_right)) { free(left); free(right); diff --git a/common_audio/resampler/sinc_resampler.cc b/common_audio/resampler/sinc_resampler.cc index 21707e9e4e..4fa78c5ede 100644 --- a/common_audio/resampler/sinc_resampler.cc +++ b/common_audio/resampler/sinc_resampler.cc @@ -122,28 +122,22 @@ double SincScaleFactor(double io_ratio) { const size_t SincResampler::kKernelSize; // If we know the minimum architecture at compile time, avoid CPU detection. -#if defined(WEBRTC_ARCH_X86_FAMILY) -#if defined(__SSE2__) -#define CONVOLVE_FUNC Convolve_SSE -void SincResampler::InitializeCPUSpecificFeatures() {} -#else -// x86 CPU detection required. Function will be set by -// InitializeCPUSpecificFeatures(). -// TODO(dalecurtis): Once Chrome moves to an SSE baseline this can be removed. -#define CONVOLVE_FUNC convolve_proc_ - void SincResampler::InitializeCPUSpecificFeatures() { - convolve_proc_ = WebRtc_GetCPUInfo(kSSE2) ? Convolve_SSE : Convolve_C; -} -#endif -#elif defined(WEBRTC_HAS_NEON) -#define CONVOLVE_FUNC Convolve_NEON -void SincResampler::InitializeCPUSpecificFeatures() {} +#if defined(WEBRTC_HAS_NEON) + convolve_proc_ = Convolve_NEON; +#elif defined(WEBRTC_ARCH_X86_FAMILY) + // Using AVX2 instead of SSE2 when AVX2 supported. + if (GetCPUInfo(kAVX2)) + convolve_proc_ = Convolve_AVX2; + else if (GetCPUInfo(kSSE2)) + convolve_proc_ = Convolve_SSE; + else + convolve_proc_ = Convolve_C; #else -// Unknown architecture. -#define CONVOLVE_FUNC Convolve_C -void SincResampler::InitializeCPUSpecificFeatures() {} + // Unknown architecture. + convolve_proc_ = Convolve_C; #endif +} SincResampler::SincResampler(double io_sample_rate_ratio, size_t request_frames, @@ -152,24 +146,20 @@ SincResampler::SincResampler(double io_sample_rate_ratio, read_cb_(read_cb), request_frames_(request_frames), input_buffer_size_(request_frames_ + kKernelSize), - // Create input buffers with a 16-byte alignment for SSE optimizations. + // Create input buffers with a 32-byte alignment for SIMD optimizations. kernel_storage_(static_cast( - AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))), + AlignedMalloc(sizeof(float) * kKernelStorageSize, 32))), kernel_pre_sinc_storage_(static_cast( - AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))), + AlignedMalloc(sizeof(float) * kKernelStorageSize, 32))), kernel_window_storage_(static_cast( - AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))), + AlignedMalloc(sizeof(float) * kKernelStorageSize, 32))), input_buffer_(static_cast( - AlignedMalloc(sizeof(float) * input_buffer_size_, 16))), -#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__) + AlignedMalloc(sizeof(float) * input_buffer_size_, 32))), convolve_proc_(nullptr), -#endif r1_(input_buffer_.get()), r2_(input_buffer_.get() + kKernelSize / 2) { -#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__) InitializeCPUSpecificFeatures(); RTC_DCHECK(convolve_proc_); -#endif RTC_DCHECK_GT(request_frames_, 0); Flush(); RTC_DCHECK_GT(block_size_, kKernelSize); @@ -302,10 +292,10 @@ void SincResampler::Resample(size_t frames, float* destination) { const float* const k1 = kernel_ptr + offset_idx * kKernelSize; const float* const k2 = k1 + kKernelSize; - // Ensure |k1|, |k2| are 16-byte aligned for SIMD usage. Should always be - // true so long as kKernelSize is a multiple of 16. - RTC_DCHECK_EQ(0, reinterpret_cast(k1) % 16); - RTC_DCHECK_EQ(0, reinterpret_cast(k2) % 16); + // Ensure |k1|, |k2| are 32-byte aligned for SIMD usage. Should always be + // true so long as kKernelSize is a multiple of 32. + RTC_DCHECK_EQ(0, reinterpret_cast(k1) % 32); + RTC_DCHECK_EQ(0, reinterpret_cast(k2) % 32); // Initialize input pointer based on quantized |virtual_source_idx_|. const float* const input_ptr = r1_ + source_idx; @@ -314,7 +304,7 @@ void SincResampler::Resample(size_t frames, float* destination) { const double kernel_interpolation_factor = virtual_offset_idx - offset_idx; *destination++ = - CONVOLVE_FUNC(input_ptr, k1, k2, kernel_interpolation_factor); + convolve_proc_(input_ptr, k1, k2, kernel_interpolation_factor); // Advance the virtual index. virtual_source_idx_ += current_io_ratio; diff --git a/common_audio/resampler/sinc_resampler.h b/common_audio/resampler/sinc_resampler.h index 5181c18dac..a72a0c62c4 100644 --- a/common_audio/resampler/sinc_resampler.h +++ b/common_audio/resampler/sinc_resampler.h @@ -112,6 +112,10 @@ class SincResampler { const float* k1, const float* k2, double kernel_interpolation_factor); + static float Convolve_AVX2(const float* input_ptr, + const float* k1, + const float* k2, + double kernel_interpolation_factor); #elif defined(WEBRTC_HAS_NEON) static float Convolve_NEON(const float* input_ptr, const float* k1, @@ -155,13 +159,11 @@ class SincResampler { // TODO(ajm): Move to using a global static which must only be initialized // once by the user. We're not doing this initially, because we don't have // e.g. a LazyInstance helper in webrtc. -#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__) typedef float (*ConvolveProc)(const float*, const float*, const float*, double); ConvolveProc convolve_proc_; -#endif // Pointers to the various regions inside |input_buffer_|. See the diagram at // the top of the .cc file for more information. diff --git a/common_audio/resampler/sinc_resampler_avx2.cc b/common_audio/resampler/sinc_resampler_avx2.cc new file mode 100644 index 0000000000..3eb5d4a1b1 --- /dev/null +++ b/common_audio/resampler/sinc_resampler_avx2.cc @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include + +#include "common_audio/resampler/sinc_resampler.h" + +namespace webrtc { + +float SincResampler::Convolve_AVX2(const float* input_ptr, + const float* k1, + const float* k2, + double kernel_interpolation_factor) { + __m256 m_input; + __m256 m_sums1 = _mm256_setzero_ps(); + __m256 m_sums2 = _mm256_setzero_ps(); + + // Based on |input_ptr| alignment, we need to use loadu or load. Unrolling + // these loops has not been tested or benchmarked. + bool aligned_input = (reinterpret_cast(input_ptr) & 0x1F) == 0; + if (!aligned_input) { + for (size_t i = 0; i < kKernelSize; i += 8) { + m_input = _mm256_loadu_ps(input_ptr + i); + m_sums1 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k1 + i), m_sums1); + m_sums2 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k2 + i), m_sums2); + } + } else { + for (size_t i = 0; i < kKernelSize; i += 8) { + m_input = _mm256_load_ps(input_ptr + i); + m_sums1 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k1 + i), m_sums1); + m_sums2 = _mm256_fmadd_ps(m_input, _mm256_load_ps(k2 + i), m_sums2); + } + } + + // Linearly interpolate the two "convolutions". + __m128 m128_sums1 = _mm_add_ps(_mm256_extractf128_ps(m_sums1, 0), + _mm256_extractf128_ps(m_sums1, 1)); + __m128 m128_sums2 = _mm_add_ps(_mm256_extractf128_ps(m_sums2, 0), + _mm256_extractf128_ps(m_sums2, 1)); + m128_sums1 = _mm_mul_ps( + m128_sums1, + _mm_set_ps1(static_cast(1.0 - kernel_interpolation_factor))); + m128_sums2 = _mm_mul_ps( + m128_sums2, _mm_set_ps1(static_cast(kernel_interpolation_factor))); + m128_sums1 = _mm_add_ps(m128_sums1, m128_sums2); + + // Sum components together. + float result; + m128_sums2 = _mm_add_ps(_mm_movehl_ps(m128_sums1, m128_sums1), m128_sums1); + _mm_store_ss(&result, _mm_add_ss(m128_sums2, + _mm_shuffle_ps(m128_sums2, m128_sums2, 1))); + + return result; +} + +} // namespace webrtc diff --git a/common_audio/resampler/sinc_resampler_unittest.cc b/common_audio/resampler/sinc_resampler_unittest.cc index 7bcd7f146e..92dff70131 100644 --- a/common_audio/resampler/sinc_resampler_unittest.cc +++ b/common_audio/resampler/sinc_resampler_unittest.cc @@ -23,7 +23,6 @@ #include #include "common_audio/resampler/sinusoidal_linear_chirp_source.h" -#include "rtc_base/stringize_macros.h" #include "rtc_base/system/arch.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/cpu_features_wrapper.h" @@ -40,7 +39,7 @@ static const double kKernelInterpolationFactor = 0.5; // Helper class to ensure ChunkedResample() functions properly. class MockSource : public SincResamplerCallback { public: - MOCK_METHOD2(Run, void(size_t frames, float* destination)); + MOCK_METHOD(void, Run, (size_t frames, float* destination), (override)); }; ACTION(ClearBuffer) { @@ -116,22 +115,14 @@ TEST(SincResamplerTest, DISABLED_SetRatioBench) { printf("SetRatio() took %.2fms.\n", total_time_c_us / 1000); } -// Define platform independent function name for Convolve* tests. -#if defined(WEBRTC_ARCH_X86_FAMILY) -#define CONVOLVE_FUNC Convolve_SSE -#elif defined(WEBRTC_ARCH_ARM_V7) -#define CONVOLVE_FUNC Convolve_NEON -#endif - // Ensure various optimized Convolve() methods return the same value. Only run // this test if other optimized methods exist, otherwise the default Convolve() // will be tested by the parameterized SincResampler tests below. -#if defined(CONVOLVE_FUNC) TEST(SincResamplerTest, Convolve) { #if defined(WEBRTC_ARCH_X86_FAMILY) - ASSERT_TRUE(WebRtc_GetCPUInfo(kSSE2)); + ASSERT_TRUE(GetCPUInfo(kSSE2)); #elif defined(WEBRTC_ARCH_ARM_V7) - ASSERT_TRUE(WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON); + ASSERT_TRUE(GetCPUFeaturesARM() & kCPUFeatureNEON); #endif // Initialize a dummy resampler. @@ -148,7 +139,7 @@ TEST(SincResamplerTest, Convolve) { double result = resampler.Convolve_C( resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); - double result2 = resampler.CONVOLVE_FUNC( + double result2 = resampler.convolve_proc_( resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); EXPECT_NEAR(result2, result, kEpsilon); @@ -157,12 +148,11 @@ TEST(SincResamplerTest, Convolve) { result = resampler.Convolve_C( resampler.kernel_storage_.get() + 1, resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); - result2 = resampler.CONVOLVE_FUNC( + result2 = resampler.convolve_proc_( resampler.kernel_storage_.get() + 1, resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); EXPECT_NEAR(result2, result, kEpsilon); } -#endif // Benchmark for the various Convolve() methods. Make sure to build with // branding=Chrome so that RTC_DCHECKs are compiled out when benchmarking. @@ -190,46 +180,45 @@ TEST(SincResamplerTest, ConvolveBenchmark) { (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; printf("Convolve_C took %.2fms.\n", total_time_c_us / 1000); -#if defined(CONVOLVE_FUNC) #if defined(WEBRTC_ARCH_X86_FAMILY) - ASSERT_TRUE(WebRtc_GetCPUInfo(kSSE2)); + ASSERT_TRUE(GetCPUInfo(kSSE2)); #elif defined(WEBRTC_ARCH_ARM_V7) - ASSERT_TRUE(WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON); + ASSERT_TRUE(GetCPUFeaturesARM() & kCPUFeatureNEON); #endif // Benchmark with unaligned input pointer. start = rtc::TimeNanos(); for (int j = 0; j < kConvolveIterations; ++j) { - resampler.CONVOLVE_FUNC( + resampler.convolve_proc_( resampler.kernel_storage_.get() + 1, resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); } double total_time_optimized_unaligned_us = (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; - printf(STRINGIZE(CONVOLVE_FUNC) "(unaligned) took %.2fms; which is %.2fx " - "faster than Convolve_C.\n", total_time_optimized_unaligned_us / 1000, - total_time_c_us / total_time_optimized_unaligned_us); + printf( + "convolve_proc_(unaligned) took %.2fms; which is %.2fx " + "faster than Convolve_C.\n", + total_time_optimized_unaligned_us / 1000, + total_time_c_us / total_time_optimized_unaligned_us); // Benchmark with aligned input pointer. start = rtc::TimeNanos(); for (int j = 0; j < kConvolveIterations; ++j) { - resampler.CONVOLVE_FUNC( + resampler.convolve_proc_( resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); } double total_time_optimized_aligned_us = (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; - printf(STRINGIZE(CONVOLVE_FUNC) " (aligned) took %.2fms; which is %.2fx " - "faster than Convolve_C and %.2fx faster than " - STRINGIZE(CONVOLVE_FUNC) " (unaligned).\n", - total_time_optimized_aligned_us / 1000, - total_time_c_us / total_time_optimized_aligned_us, - total_time_optimized_unaligned_us / total_time_optimized_aligned_us); -#endif + printf( + "convolve_proc_ (aligned) took %.2fms; which is %.2fx " + "faster than Convolve_C and %.2fx faster than " + "convolve_proc_ (unaligned).\n", + total_time_optimized_aligned_us / 1000, + total_time_c_us / total_time_optimized_aligned_us, + total_time_optimized_unaligned_us / total_time_optimized_aligned_us); } -#undef CONVOLVE_FUNC - typedef std::tuple SincResamplerTestData; class SincResamplerTest : public ::testing::TestWithParam { @@ -352,7 +341,7 @@ INSTANTIATE_TEST_SUITE_P( std::make_tuple(16000, 44100, kResamplingRMSError, -62.54), std::make_tuple(22050, 44100, kResamplingRMSError, -73.53), std::make_tuple(32000, 44100, kResamplingRMSError, -63.32), - std::make_tuple(44100, 44100, kResamplingRMSError, -73.53), + std::make_tuple(44100, 44100, kResamplingRMSError, -73.52), std::make_tuple(48000, 44100, -15.01, -64.04), std::make_tuple(96000, 44100, -18.49, -25.51), std::make_tuple(192000, 44100, -20.50, -13.31), @@ -360,7 +349,7 @@ INSTANTIATE_TEST_SUITE_P( // To 48kHz std::make_tuple(8000, 48000, kResamplingRMSError, -63.43), std::make_tuple(11025, 48000, kResamplingRMSError, -62.61), - std::make_tuple(16000, 48000, kResamplingRMSError, -63.96), + std::make_tuple(16000, 48000, kResamplingRMSError, -63.95), std::make_tuple(22050, 48000, kResamplingRMSError, -62.42), std::make_tuple(32000, 48000, kResamplingRMSError, -64.04), std::make_tuple(44100, 48000, kResamplingRMSError, -62.63), diff --git a/common_audio/smoothing_filter.h b/common_audio/smoothing_filter.h index e5f561ecf2..e96d52a6f7 100644 --- a/common_audio/smoothing_filter.h +++ b/common_audio/smoothing_filter.h @@ -14,7 +14,6 @@ #include #include "absl/types/optional.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -42,6 +41,11 @@ class SmoothingFilterImpl final : public SmoothingFilter { // will be set to |init_time_ms| first and can be changed through // |SetTimeConstantMs|. explicit SmoothingFilterImpl(int init_time_ms); + + SmoothingFilterImpl() = delete; + SmoothingFilterImpl(const SmoothingFilterImpl&) = delete; + SmoothingFilterImpl& operator=(const SmoothingFilterImpl&) = delete; + ~SmoothingFilterImpl() override; void AddSample(float sample) override; @@ -64,8 +68,6 @@ class SmoothingFilterImpl final : public SmoothingFilter { float alpha_; float state_; int64_t last_state_time_ms_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(SmoothingFilterImpl); }; } // namespace webrtc diff --git a/common_audio/smoothing_filter_unittest.cc b/common_audio/smoothing_filter_unittest.cc index 8b311d1498..80230922fe 100644 --- a/common_audio/smoothing_filter_unittest.cc +++ b/common_audio/smoothing_filter_unittest.cc @@ -26,7 +26,7 @@ constexpr int64_t kClockInitialTime = 123456; struct SmoothingFilterStates { explicit SmoothingFilterStates(int init_time_ms) : smoothing_filter(init_time_ms) { - fake_clock.AdvanceTime(TimeDelta::ms(kClockInitialTime)); + fake_clock.AdvanceTime(TimeDelta::Millis(kClockInitialTime)); } rtc::ScopedFakeClock fake_clock; SmoothingFilterImpl smoothing_filter; @@ -42,7 +42,7 @@ void CheckOutput(SmoothingFilterStates* states, int advance_time_ms, float expected_ouput) { states->smoothing_filter.AddSample(sample); - states->fake_clock.AdvanceTime(TimeDelta::ms(advance_time_ms)); + states->fake_clock.AdvanceTime(TimeDelta::Millis(advance_time_ms)); auto output = states->smoothing_filter.GetAverage(); EXPECT_TRUE(output); EXPECT_NEAR(expected_ouput, *output, kMaxAbsError); @@ -142,14 +142,14 @@ TEST(SmoothingFilterTest, CannotChangeTimeConstantDuringInitialization) { states.smoothing_filter.AddSample(0.0); // During initialization, |SetTimeConstantMs| does not take effect. - states.fake_clock.AdvanceTime(TimeDelta::ms(kInitTimeMs - 1)); + states.fake_clock.AdvanceTime(TimeDelta::Millis(kInitTimeMs - 1)); states.smoothing_filter.AddSample(0.0); EXPECT_FALSE(states.smoothing_filter.SetTimeConstantMs(kInitTimeMs * 2)); EXPECT_NE(std::exp(-1.0f / (kInitTimeMs * 2)), states.smoothing_filter.alpha()); - states.fake_clock.AdvanceTime(TimeDelta::ms(1)); + states.fake_clock.AdvanceTime(TimeDelta::Millis(1)); states.smoothing_filter.AddSample(0.0); // When initialization finishes, the time constant should be come // |kInitTimeConstantMs|. diff --git a/common_audio/sparse_fir_filter.cc b/common_audio/sparse_fir_filter.cc deleted file mode 100644 index 772eb82e47..0000000000 --- a/common_audio/sparse_fir_filter.cc +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_audio/sparse_fir_filter.h" - -#include "rtc_base/checks.h" - -namespace webrtc { - -SparseFIRFilter::SparseFIRFilter(const float* nonzero_coeffs, - size_t num_nonzero_coeffs, - size_t sparsity, - size_t offset) - : sparsity_(sparsity), - offset_(offset), - nonzero_coeffs_(nonzero_coeffs, nonzero_coeffs + num_nonzero_coeffs), - state_(sparsity_ * (num_nonzero_coeffs - 1) + offset_, 0.f) { - RTC_CHECK_GE(num_nonzero_coeffs, 1); - RTC_CHECK_GE(sparsity, 1); -} - -SparseFIRFilter::~SparseFIRFilter() = default; - -void SparseFIRFilter::Filter(const float* in, size_t length, float* out) { - // Convolves the input signal |in| with the filter kernel |nonzero_coeffs_| - // taking into account the previous state. - for (size_t i = 0; i < length; ++i) { - out[i] = 0.f; - size_t j; - for (j = 0; i >= j * sparsity_ + offset_ && j < nonzero_coeffs_.size(); - ++j) { - out[i] += in[i - j * sparsity_ - offset_] * nonzero_coeffs_[j]; - } - for (; j < nonzero_coeffs_.size(); ++j) { - out[i] += state_[i + (nonzero_coeffs_.size() - j - 1) * sparsity_] * - nonzero_coeffs_[j]; - } - } - - // Update current state. - if (!state_.empty()) { - if (length >= state_.size()) { - std::memcpy(&state_[0], &in[length - state_.size()], - state_.size() * sizeof(*in)); - } else { - std::memmove(&state_[0], &state_[length], - (state_.size() - length) * sizeof(state_[0])); - std::memcpy(&state_[state_.size() - length], in, length * sizeof(*in)); - } - } -} - -} // namespace webrtc diff --git a/common_audio/sparse_fir_filter.h b/common_audio/sparse_fir_filter.h deleted file mode 100644 index 5197a8e28c..0000000000 --- a/common_audio/sparse_fir_filter.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef COMMON_AUDIO_SPARSE_FIR_FILTER_H_ -#define COMMON_AUDIO_SPARSE_FIR_FILTER_H_ - -#include -#include - -#include "rtc_base/constructor_magic.h" - -namespace webrtc { - -// A Finite Impulse Response filter implementation which takes advantage of a -// sparse structure with uniformly distributed non-zero coefficients. -class SparseFIRFilter final { - public: - // |num_nonzero_coeffs| is the number of non-zero coefficients, - // |nonzero_coeffs|. They are assumed to be uniformly distributed every - // |sparsity| samples and with an initial |offset|. The rest of the filter - // coefficients will be assumed zeros. For example, with sparsity = 3, and - // offset = 1 the filter coefficients will be: - // B = [0 coeffs[0] 0 0 coeffs[1] 0 0 coeffs[2] ... ] - // All initial state values will be zeros. - SparseFIRFilter(const float* nonzero_coeffs, - size_t num_nonzero_coeffs, - size_t sparsity, - size_t offset); - ~SparseFIRFilter(); - - // Filters the |in| data supplied. - // |out| must be previously allocated and it must be at least of |length|. - void Filter(const float* in, size_t length, float* out); - - private: - const size_t sparsity_; - const size_t offset_; - const std::vector nonzero_coeffs_; - std::vector state_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SparseFIRFilter); -}; - -} // namespace webrtc - -#endif // COMMON_AUDIO_SPARSE_FIR_FILTER_H_ diff --git a/common_audio/sparse_fir_filter_unittest.cc b/common_audio/sparse_fir_filter_unittest.cc deleted file mode 100644 index 5dc7b6dc6f..0000000000 --- a/common_audio/sparse_fir_filter_unittest.cc +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_audio/sparse_fir_filter.h" - -#include - -#include "common_audio/fir_filter.h" -#include "common_audio/fir_filter_factory.h" -#include "rtc_base/arraysize.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -static const float kCoeffs[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f}; -static const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f, - 6.f, 7.f, 8.f, 9.f, 10.f}; - -template -void VerifyOutput(const float (&expected_output)[N], const float (&output)[N]) { - EXPECT_EQ(0, memcmp(expected_output, output, sizeof(output))); -} - -} // namespace - -TEST(SparseFIRFilterTest, FilterAsIdentity) { - const float kCoeff = 1.f; - const size_t kNumCoeff = 1; - const size_t kSparsity = 3; - const size_t kOffset = 0; - float output[arraysize(kInput)]; - SparseFIRFilter filter(&kCoeff, kNumCoeff, kSparsity, kOffset); - filter.Filter(kInput, arraysize(kInput), output); - VerifyOutput(kInput, output); -} - -TEST(SparseFIRFilterTest, SameOutputForScalarCoefficientAndDifferentSparsity) { - const float kCoeff = 2.f; - const size_t kNumCoeff = 1; - const size_t kLowSparsity = 1; - const size_t kHighSparsity = 7; - const size_t kOffset = 0; - float low_sparsity_output[arraysize(kInput)]; - float high_sparsity_output[arraysize(kInput)]; - SparseFIRFilter low_sparsity_filter(&kCoeff, kNumCoeff, kLowSparsity, - kOffset); - SparseFIRFilter high_sparsity_filter(&kCoeff, kNumCoeff, kHighSparsity, - kOffset); - low_sparsity_filter.Filter(kInput, arraysize(kInput), low_sparsity_output); - high_sparsity_filter.Filter(kInput, arraysize(kInput), high_sparsity_output); - VerifyOutput(low_sparsity_output, high_sparsity_output); -} - -TEST(SparseFIRFilterTest, FilterUsedAsScalarMultiplication) { - const float kCoeff = 5.f; - const size_t kNumCoeff = 1; - const size_t kSparsity = 5; - const size_t kOffset = 0; - float output[arraysize(kInput)]; - SparseFIRFilter filter(&kCoeff, kNumCoeff, kSparsity, kOffset); - filter.Filter(kInput, arraysize(kInput), output); - EXPECT_FLOAT_EQ(5.f, output[0]); - EXPECT_FLOAT_EQ(20.f, output[3]); - EXPECT_FLOAT_EQ(25.f, output[4]); - EXPECT_FLOAT_EQ(50.f, output[arraysize(kInput) - 1]); -} - -TEST(SparseFIRFilterTest, FilterUsedAsInputShifting) { - const float kCoeff = 1.f; - const size_t kNumCoeff = 1; - const size_t kSparsity = 1; - const size_t kOffset = 4; - float output[arraysize(kInput)]; - SparseFIRFilter filter(&kCoeff, kNumCoeff, kSparsity, kOffset); - filter.Filter(kInput, arraysize(kInput), output); - EXPECT_FLOAT_EQ(0.f, output[0]); - EXPECT_FLOAT_EQ(0.f, output[3]); - EXPECT_FLOAT_EQ(1.f, output[4]); - EXPECT_FLOAT_EQ(2.f, output[5]); - EXPECT_FLOAT_EQ(6.f, output[arraysize(kInput) - 1]); -} - -TEST(SparseFIRFilterTest, FilterUsedAsArbitraryWeighting) { - const size_t kSparsity = 2; - const size_t kOffset = 1; - float output[arraysize(kInput)]; - SparseFIRFilter filter(kCoeffs, arraysize(kCoeffs), kSparsity, kOffset); - filter.Filter(kInput, arraysize(kInput), output); - EXPECT_FLOAT_EQ(0.f, output[0]); - EXPECT_FLOAT_EQ(0.9f, output[3]); - EXPECT_FLOAT_EQ(1.4f, output[4]); - EXPECT_FLOAT_EQ(2.4f, output[5]); - EXPECT_FLOAT_EQ(8.61f, output[arraysize(kInput) - 1]); -} - -TEST(SparseFIRFilterTest, FilterInLengthLesserOrEqualToCoefficientsLength) { - const size_t kSparsity = 1; - const size_t kOffset = 0; - float output[arraysize(kInput)]; - SparseFIRFilter filter(kCoeffs, arraysize(kCoeffs), kSparsity, kOffset); - filter.Filter(kInput, 2, output); - EXPECT_FLOAT_EQ(0.2f, output[0]); - EXPECT_FLOAT_EQ(0.7f, output[1]); -} - -TEST(SparseFIRFilterTest, MultipleFilterCalls) { - const size_t kSparsity = 1; - const size_t kOffset = 0; - float output[arraysize(kInput)]; - SparseFIRFilter filter(kCoeffs, arraysize(kCoeffs), kSparsity, kOffset); - filter.Filter(kInput, 2, output); - EXPECT_FLOAT_EQ(0.2f, output[0]); - EXPECT_FLOAT_EQ(0.7f, output[1]); - filter.Filter(kInput, 2, output); - EXPECT_FLOAT_EQ(1.3f, output[0]); - EXPECT_FLOAT_EQ(2.4f, output[1]); - filter.Filter(kInput, 2, output); - EXPECT_FLOAT_EQ(2.81f, output[0]); - EXPECT_FLOAT_EQ(2.62f, output[1]); - filter.Filter(kInput, 2, output); - EXPECT_FLOAT_EQ(2.81f, output[0]); - EXPECT_FLOAT_EQ(2.62f, output[1]); - filter.Filter(&kInput[3], 3, output); - EXPECT_FLOAT_EQ(3.41f, output[0]); - EXPECT_FLOAT_EQ(4.12f, output[1]); - EXPECT_FLOAT_EQ(6.21f, output[2]); - filter.Filter(&kInput[3], 3, output); - EXPECT_FLOAT_EQ(8.12f, output[0]); - EXPECT_FLOAT_EQ(9.14f, output[1]); - EXPECT_FLOAT_EQ(9.45f, output[2]); -} - -TEST(SparseFIRFilterTest, VerifySampleBasedVsBlockBasedFiltering) { - const size_t kSparsity = 3; - const size_t kOffset = 1; - float output_block_based[arraysize(kInput)]; - SparseFIRFilter filter_block(kCoeffs, arraysize(kCoeffs), kSparsity, kOffset); - filter_block.Filter(kInput, arraysize(kInput), output_block_based); - float output_sample_based[arraysize(kInput)]; - SparseFIRFilter filter_sample(kCoeffs, arraysize(kCoeffs), kSparsity, - kOffset); - for (size_t i = 0; i < arraysize(kInput); ++i) - filter_sample.Filter(&kInput[i], 1, &output_sample_based[i]); - VerifyOutput(output_block_based, output_sample_based); -} - -TEST(SparseFIRFilterTest, SimpleHighPassFilter) { - const size_t kSparsity = 2; - const size_t kOffset = 2; - const float kHPCoeffs[] = {1.f, -1.f}; - const float kConstantInput[] = {1.f, 1.f, 1.f, 1.f, 1.f, - 1.f, 1.f, 1.f, 1.f, 1.f}; - float output[arraysize(kConstantInput)]; - SparseFIRFilter filter(kHPCoeffs, arraysize(kHPCoeffs), kSparsity, kOffset); - filter.Filter(kConstantInput, arraysize(kConstantInput), output); - EXPECT_FLOAT_EQ(0.f, output[0]); - EXPECT_FLOAT_EQ(0.f, output[1]); - EXPECT_FLOAT_EQ(1.f, output[2]); - EXPECT_FLOAT_EQ(1.f, output[3]); - for (size_t i = kSparsity + kOffset; i < arraysize(kConstantInput); ++i) - EXPECT_FLOAT_EQ(0.f, output[i]); -} - -TEST(SparseFIRFilterTest, SimpleLowPassFilter) { - const size_t kSparsity = 2; - const size_t kOffset = 2; - const float kLPCoeffs[] = {1.f, 1.f}; - const float kHighFrequencyInput[] = {1.f, 1.f, -1.f, -1.f, 1.f, - 1.f, -1.f, -1.f, 1.f, 1.f}; - float output[arraysize(kHighFrequencyInput)]; - SparseFIRFilter filter(kLPCoeffs, arraysize(kLPCoeffs), kSparsity, kOffset); - filter.Filter(kHighFrequencyInput, arraysize(kHighFrequencyInput), output); - EXPECT_FLOAT_EQ(0.f, output[0]); - EXPECT_FLOAT_EQ(0.f, output[1]); - EXPECT_FLOAT_EQ(1.f, output[2]); - EXPECT_FLOAT_EQ(1.f, output[3]); - for (size_t i = kSparsity + kOffset; i < arraysize(kHighFrequencyInput); ++i) - EXPECT_FLOAT_EQ(0.f, output[i]); -} - -TEST(SparseFIRFilterTest, SameOutputWhenSwappedCoefficientsAndInput) { - const size_t kSparsity = 1; - const size_t kOffset = 0; - float output[arraysize(kCoeffs)]; - float output_swapped[arraysize(kCoeffs)]; - SparseFIRFilter filter(kCoeffs, arraysize(kCoeffs), kSparsity, kOffset); - // Use arraysize(kCoeffs) for in_length to get same-length outputs. - filter.Filter(kInput, arraysize(kCoeffs), output); - SparseFIRFilter filter_swapped(kInput, arraysize(kCoeffs), kSparsity, - kOffset); - filter_swapped.Filter(kCoeffs, arraysize(kCoeffs), output_swapped); - VerifyOutput(output, output_swapped); -} - -TEST(SparseFIRFilterTest, SameOutputAsFIRFilterWhenSparsityOneAndOffsetZero) { - const size_t kSparsity = 1; - const size_t kOffset = 0; - float output[arraysize(kInput)]; - float sparse_output[arraysize(kInput)]; - std::unique_ptr filter( - CreateFirFilter(kCoeffs, arraysize(kCoeffs), arraysize(kInput))); - SparseFIRFilter sparse_filter(kCoeffs, arraysize(kCoeffs), kSparsity, - kOffset); - filter->Filter(kInput, arraysize(kInput), output); - sparse_filter.Filter(kInput, arraysize(kInput), sparse_output); - for (size_t i = 0; i < arraysize(kInput); ++i) { - EXPECT_FLOAT_EQ(output[i], sparse_output[i]); - } -} - -} // namespace webrtc diff --git a/common_audio/third_party/fft4g/fft4g.c b/common_audio/third_party/fft4g/fft4g.c deleted file mode 100644 index 9cf7b9f6ca..0000000000 --- a/common_audio/third_party/fft4g/fft4g.c +++ /dev/null @@ -1,1332 +0,0 @@ -/* - * http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html - * Copyright Takuya OOURA, 1996-2001 - * - * You may use, copy, modify and distribute this code for any purpose (include - * commercial use) and without fee. Please refer to this package when you modify - * this code. - * - * Changes: - * Trivial type modifications by the WebRTC authors. - */ - -/* -Fast Fourier/Cosine/Sine Transform - dimension :one - data length :power of 2 - decimation :frequency - radix :4, 2 - data :inplace - table :use -functions - cdft: Complex Discrete Fourier Transform - rdft: Real Discrete Fourier Transform - ddct: Discrete Cosine Transform - ddst: Discrete Sine Transform - dfct: Cosine Transform of RDFT (Real Symmetric DFT) - dfst: Sine Transform of RDFT (Real Anti-symmetric DFT) -function prototypes - void cdft(int, int, float *, int *, float *); - void rdft(size_t, int, float *, size_t *, float *); - void ddct(int, int, float *, int *, float *); - void ddst(int, int, float *, int *, float *); - void dfct(int, float *, float *, int *, float *); - void dfst(int, float *, float *, int *, float *); - - --------- Complex DFT (Discrete Fourier Transform) -------- - [definition] - - X[k] = sum_j=0^n-1 x[j]*exp(2*pi*i*j*k/n), 0<=k - X[k] = sum_j=0^n-1 x[j]*exp(-2*pi*i*j*k/n), 0<=k - ip[0] = 0; // first time only - cdft(2*n, 1, a, ip, w); - - ip[0] = 0; // first time only - cdft(2*n, -1, a, ip, w); - [parameters] - 2*n :data length (int) - n >= 1, n = power of 2 - a[0...2*n-1] :input/output data (float *) - input data - a[2*j] = Re(x[j]), - a[2*j+1] = Im(x[j]), 0<=j= 2+sqrt(n) - strictly, - length of ip >= - 2+(1<<(int)(log(n+0.5)/log(2))/2). - ip[0],ip[1] are pointers of the cos/sin table. - w[0...n/2-1] :cos/sin table (float *) - w[],ip[] are initialized if ip[0] == 0. - [remark] - Inverse of - cdft(2*n, -1, a, ip, w); - is - cdft(2*n, 1, a, ip, w); - for (j = 0; j <= 2 * n - 1; j++) { - a[j] *= 1.0 / n; - } - . - - --------- Real DFT / Inverse of Real DFT -------- - [definition] - RDFT - R[k] = sum_j=0^n-1 a[j]*cos(2*pi*j*k/n), 0<=k<=n/2 - I[k] = sum_j=0^n-1 a[j]*sin(2*pi*j*k/n), 0 IRDFT (excluding scale) - a[k] = (R[0] + R[n/2]*cos(pi*k))/2 + - sum_j=1^n/2-1 R[j]*cos(2*pi*j*k/n) + - sum_j=1^n/2-1 I[j]*sin(2*pi*j*k/n), 0<=k - ip[0] = 0; // first time only - rdft(n, 1, a, ip, w); - - ip[0] = 0; // first time only - rdft(n, -1, a, ip, w); - [parameters] - n :data length (size_t) - n >= 2, n = power of 2 - a[0...n-1] :input/output data (float *) - - output data - a[2*k] = R[k], 0<=k - input data - a[2*j] = R[j], 0<=j= 2+sqrt(n/2) - strictly, - length of ip >= - 2+(1<<(int)(log(n/2+0.5)/log(2))/2). - ip[0],ip[1] are pointers of the cos/sin table. - w[0...n/2-1] :cos/sin table (float *) - w[],ip[] are initialized if ip[0] == 0. - [remark] - Inverse of - rdft(n, 1, a, ip, w); - is - rdft(n, -1, a, ip, w); - for (j = 0; j <= n - 1; j++) { - a[j] *= 2.0 / n; - } - . - - --------- DCT (Discrete Cosine Transform) / Inverse of DCT -------- - [definition] - IDCT (excluding scale) - C[k] = sum_j=0^n-1 a[j]*cos(pi*j*(k+1/2)/n), 0<=k DCT - C[k] = sum_j=0^n-1 a[j]*cos(pi*(j+1/2)*k/n), 0<=k - ip[0] = 0; // first time only - ddct(n, 1, a, ip, w); - - ip[0] = 0; // first time only - ddct(n, -1, a, ip, w); - [parameters] - n :data length (int) - n >= 2, n = power of 2 - a[0...n-1] :input/output data (float *) - output data - a[k] = C[k], 0<=k= 2+sqrt(n/2) - strictly, - length of ip >= - 2+(1<<(int)(log(n/2+0.5)/log(2))/2). - ip[0],ip[1] are pointers of the cos/sin table. - w[0...n*5/4-1] :cos/sin table (float *) - w[],ip[] are initialized if ip[0] == 0. - [remark] - Inverse of - ddct(n, -1, a, ip, w); - is - a[0] *= 0.5; - ddct(n, 1, a, ip, w); - for (j = 0; j <= n - 1; j++) { - a[j] *= 2.0 / n; - } - . - - --------- DST (Discrete Sine Transform) / Inverse of DST -------- - [definition] - IDST (excluding scale) - S[k] = sum_j=1^n A[j]*sin(pi*j*(k+1/2)/n), 0<=k DST - S[k] = sum_j=0^n-1 a[j]*sin(pi*(j+1/2)*k/n), 0 - ip[0] = 0; // first time only - ddst(n, 1, a, ip, w); - - ip[0] = 0; // first time only - ddst(n, -1, a, ip, w); - [parameters] - n :data length (int) - n >= 2, n = power of 2 - a[0...n-1] :input/output data (float *) - - input data - a[j] = A[j], 0 - output data - a[k] = S[k], 0= 2+sqrt(n/2) - strictly, - length of ip >= - 2+(1<<(int)(log(n/2+0.5)/log(2))/2). - ip[0],ip[1] are pointers of the cos/sin table. - w[0...n*5/4-1] :cos/sin table (float *) - w[],ip[] are initialized if ip[0] == 0. - [remark] - Inverse of - ddst(n, -1, a, ip, w); - is - a[0] *= 0.5; - ddst(n, 1, a, ip, w); - for (j = 0; j <= n - 1; j++) { - a[j] *= 2.0 / n; - } - . - - --------- Cosine Transform of RDFT (Real Symmetric DFT) -------- - [definition] - C[k] = sum_j=0^n a[j]*cos(pi*j*k/n), 0<=k<=n - [usage] - ip[0] = 0; // first time only - dfct(n, a, t, ip, w); - [parameters] - n :data length - 1 (int) - n >= 2, n = power of 2 - a[0...n] :input/output data (float *) - output data - a[k] = C[k], 0<=k<=n - t[0...n/2] :work area (float *) - ip[0...*] :work area for bit reversal (int *) - length of ip >= 2+sqrt(n/4) - strictly, - length of ip >= - 2+(1<<(int)(log(n/4+0.5)/log(2))/2). - ip[0],ip[1] are pointers of the cos/sin table. - w[0...n*5/8-1] :cos/sin table (float *) - w[],ip[] are initialized if ip[0] == 0. - [remark] - Inverse of - a[0] *= 0.5; - a[n] *= 0.5; - dfct(n, a, t, ip, w); - is - a[0] *= 0.5; - a[n] *= 0.5; - dfct(n, a, t, ip, w); - for (j = 0; j <= n; j++) { - a[j] *= 2.0 / n; - } - . - - --------- Sine Transform of RDFT (Real Anti-symmetric DFT) -------- - [definition] - S[k] = sum_j=1^n-1 a[j]*sin(pi*j*k/n), 0= 2, n = power of 2 - a[0...n-1] :input/output data (float *) - output data - a[k] = S[k], 0= 2+sqrt(n/4) - strictly, - length of ip >= - 2+(1<<(int)(log(n/4+0.5)/log(2))/2). - ip[0],ip[1] are pointers of the cos/sin table. - w[0...n*5/8-1] :cos/sin table (float *) - w[],ip[] are initialized if ip[0] == 0. - [remark] - Inverse of - dfst(n, a, t, ip, w); - is - dfst(n, a, t, ip, w); - for (j = 1; j <= n - 1; j++) { - a[j] *= 2.0 / n; - } - . - - -Appendix : - The cos/sin table is recalculated when the larger table required. - w[] and ip[] are compatible with all routines. -*/ - -#include - -static void makewt(size_t nw, size_t *ip, float *w); -static void makect(size_t nc, size_t *ip, float *c); -static void bitrv2(size_t n, size_t *ip, float *a); -#if 0 // Not used. -static void bitrv2conj(int n, int *ip, float *a); -#endif -static void cftfsub(size_t n, float *a, float *w); -static void cftbsub(size_t n, float *a, float *w); -static void cft1st(size_t n, float *a, float *w); -static void cftmdl(size_t n, size_t l, float *a, float *w); -static void rftfsub(size_t n, float *a, size_t nc, float *c); -static void rftbsub(size_t n, float *a, size_t nc, float *c); -#if 0 // Not used. -static void dctsub(int n, float *a, int nc, float *c) -static void dstsub(int n, float *a, int nc, float *c) -#endif - - -#if 0 // Not used. -void WebRtc_cdft(int n, int isgn, float *a, int *ip, float *w) -{ - if (n > (ip[0] << 2)) { - makewt(n >> 2, ip, w); - } - if (n > 4) { - if (isgn >= 0) { - bitrv2(n, ip + 2, a); - cftfsub(n, a, w); - } else { - bitrv2conj(n, ip + 2, a); - cftbsub(n, a, w); - } - } else if (n == 4) { - cftfsub(n, a, w); - } -} -#endif - - -void WebRtc_rdft(size_t n, int isgn, float *a, size_t *ip, float *w) -{ - size_t nw, nc; - float xi; - - nw = ip[0]; - if (n > (nw << 2)) { - nw = n >> 2; - makewt(nw, ip, w); - } - nc = ip[1]; - if (n > (nc << 2)) { - nc = n >> 2; - makect(nc, ip, w + nw); - } - if (isgn >= 0) { - if (n > 4) { - bitrv2(n, ip + 2, a); - cftfsub(n, a, w); - rftfsub(n, a, nc, w + nw); - } else if (n == 4) { - cftfsub(n, a, w); - } - xi = a[0] - a[1]; - a[0] += a[1]; - a[1] = xi; - } else { - a[1] = 0.5f * (a[0] - a[1]); - a[0] -= a[1]; - if (n > 4) { - rftbsub(n, a, nc, w + nw); - bitrv2(n, ip + 2, a); - cftbsub(n, a, w); - } else if (n == 4) { - cftfsub(n, a, w); - } - } -} - -#if 0 // Not used. -static void ddct(int n, int isgn, float *a, int *ip, float *w) -{ - int j, nw, nc; - float xr; - - nw = ip[0]; - if (n > (nw << 2)) { - nw = n >> 2; - makewt(nw, ip, w); - } - nc = ip[1]; - if (n > nc) { - nc = n; - makect(nc, ip, w + nw); - } - if (isgn < 0) { - xr = a[n - 1]; - for (j = n - 2; j >= 2; j -= 2) { - a[j + 1] = a[j] - a[j - 1]; - a[j] += a[j - 1]; - } - a[1] = a[0] - xr; - a[0] += xr; - if (n > 4) { - rftbsub(n, a, nc, w + nw); - bitrv2(n, ip + 2, a); - cftbsub(n, a, w); - } else if (n == 4) { - cftfsub(n, a, w); - } - } - dctsub(n, a, nc, w + nw); - if (isgn >= 0) { - if (n > 4) { - bitrv2(n, ip + 2, a); - cftfsub(n, a, w); - rftfsub(n, a, nc, w + nw); - } else if (n == 4) { - cftfsub(n, a, w); - } - xr = a[0] - a[1]; - a[0] += a[1]; - for (j = 2; j < n; j += 2) { - a[j - 1] = a[j] - a[j + 1]; - a[j] += a[j + 1]; - } - a[n - 1] = xr; - } -} - - -static void ddst(int n, int isgn, float *a, int *ip, float *w) -{ - int j, nw, nc; - float xr; - - nw = ip[0]; - if (n > (nw << 2)) { - nw = n >> 2; - makewt(nw, ip, w); - } - nc = ip[1]; - if (n > nc) { - nc = n; - makect(nc, ip, w + nw); - } - if (isgn < 0) { - xr = a[n - 1]; - for (j = n - 2; j >= 2; j -= 2) { - a[j + 1] = -a[j] - a[j - 1]; - a[j] -= a[j - 1]; - } - a[1] = a[0] + xr; - a[0] -= xr; - if (n > 4) { - rftbsub(n, a, nc, w + nw); - bitrv2(n, ip + 2, a); - cftbsub(n, a, w); - } else if (n == 4) { - cftfsub(n, a, w); - } - } - dstsub(n, a, nc, w + nw); - if (isgn >= 0) { - if (n > 4) { - bitrv2(n, ip + 2, a); - cftfsub(n, a, w); - rftfsub(n, a, nc, w + nw); - } else if (n == 4) { - cftfsub(n, a, w); - } - xr = a[0] - a[1]; - a[0] += a[1]; - for (j = 2; j < n; j += 2) { - a[j - 1] = -a[j] - a[j + 1]; - a[j] -= a[j + 1]; - } - a[n - 1] = -xr; - } -} - - -static void dfct(int n, float *a, float *t, int *ip, float *w) -{ - int j, k, l, m, mh, nw, nc; - float xr, xi, yr, yi; - - nw = ip[0]; - if (n > (nw << 3)) { - nw = n >> 3; - makewt(nw, ip, w); - } - nc = ip[1]; - if (n > (nc << 1)) { - nc = n >> 1; - makect(nc, ip, w + nw); - } - m = n >> 1; - yi = a[m]; - xi = a[0] + a[n]; - a[0] -= a[n]; - t[0] = xi - yi; - t[m] = xi + yi; - if (n > 2) { - mh = m >> 1; - for (j = 1; j < mh; j++) { - k = m - j; - xr = a[j] - a[n - j]; - xi = a[j] + a[n - j]; - yr = a[k] - a[n - k]; - yi = a[k] + a[n - k]; - a[j] = xr; - a[k] = yr; - t[j] = xi - yi; - t[k] = xi + yi; - } - t[mh] = a[mh] + a[n - mh]; - a[mh] -= a[n - mh]; - dctsub(m, a, nc, w + nw); - if (m > 4) { - bitrv2(m, ip + 2, a); - cftfsub(m, a, w); - rftfsub(m, a, nc, w + nw); - } else if (m == 4) { - cftfsub(m, a, w); - } - a[n - 1] = a[0] - a[1]; - a[1] = a[0] + a[1]; - for (j = m - 2; j >= 2; j -= 2) { - a[2 * j + 1] = a[j] + a[j + 1]; - a[2 * j - 1] = a[j] - a[j + 1]; - } - l = 2; - m = mh; - while (m >= 2) { - dctsub(m, t, nc, w + nw); - if (m > 4) { - bitrv2(m, ip + 2, t); - cftfsub(m, t, w); - rftfsub(m, t, nc, w + nw); - } else if (m == 4) { - cftfsub(m, t, w); - } - a[n - l] = t[0] - t[1]; - a[l] = t[0] + t[1]; - k = 0; - for (j = 2; j < m; j += 2) { - k += l << 2; - a[k - l] = t[j] - t[j + 1]; - a[k + l] = t[j] + t[j + 1]; - } - l <<= 1; - mh = m >> 1; - for (j = 0; j < mh; j++) { - k = m - j; - t[j] = t[m + k] - t[m + j]; - t[k] = t[m + k] + t[m + j]; - } - t[mh] = t[m + mh]; - m = mh; - } - a[l] = t[0]; - a[n] = t[2] - t[1]; - a[0] = t[2] + t[1]; - } else { - a[1] = a[0]; - a[2] = t[0]; - a[0] = t[1]; - } -} - -static void dfst(int n, float *a, float *t, int *ip, float *w) -{ - int j, k, l, m, mh, nw, nc; - float xr, xi, yr, yi; - - nw = ip[0]; - if (n > (nw << 3)) { - nw = n >> 3; - makewt(nw, ip, w); - } - nc = ip[1]; - if (n > (nc << 1)) { - nc = n >> 1; - makect(nc, ip, w + nw); - } - if (n > 2) { - m = n >> 1; - mh = m >> 1; - for (j = 1; j < mh; j++) { - k = m - j; - xr = a[j] + a[n - j]; - xi = a[j] - a[n - j]; - yr = a[k] + a[n - k]; - yi = a[k] - a[n - k]; - a[j] = xr; - a[k] = yr; - t[j] = xi + yi; - t[k] = xi - yi; - } - t[0] = a[mh] - a[n - mh]; - a[mh] += a[n - mh]; - a[0] = a[m]; - dstsub(m, a, nc, w + nw); - if (m > 4) { - bitrv2(m, ip + 2, a); - cftfsub(m, a, w); - rftfsub(m, a, nc, w + nw); - } else if (m == 4) { - cftfsub(m, a, w); - } - a[n - 1] = a[1] - a[0]; - a[1] = a[0] + a[1]; - for (j = m - 2; j >= 2; j -= 2) { - a[2 * j + 1] = a[j] - a[j + 1]; - a[2 * j - 1] = -a[j] - a[j + 1]; - } - l = 2; - m = mh; - while (m >= 2) { - dstsub(m, t, nc, w + nw); - if (m > 4) { - bitrv2(m, ip + 2, t); - cftfsub(m, t, w); - rftfsub(m, t, nc, w + nw); - } else if (m == 4) { - cftfsub(m, t, w); - } - a[n - l] = t[1] - t[0]; - a[l] = t[0] + t[1]; - k = 0; - for (j = 2; j < m; j += 2) { - k += l << 2; - a[k - l] = -t[j] - t[j + 1]; - a[k + l] = t[j] - t[j + 1]; - } - l <<= 1; - mh = m >> 1; - for (j = 1; j < mh; j++) { - k = m - j; - t[j] = t[m + k] + t[m + j]; - t[k] = t[m + k] - t[m + j]; - } - t[0] = t[m + mh]; - m = mh; - } - a[l] = t[0]; - } - a[0] = 0; -} -#endif // Not used. - - -/* -------- initializing routines -------- */ - - -#include - -static void makewt(size_t nw, size_t *ip, float *w) -{ - size_t j, nwh; - float delta, x, y; - - ip[0] = nw; - ip[1] = 1; - if (nw > 2) { - nwh = nw >> 1; - delta = atanf(1.0f) / nwh; - w[0] = 1; - w[1] = 0; - w[nwh] = (float)cos(delta * nwh); - w[nwh + 1] = w[nwh]; - if (nwh > 2) { - for (j = 2; j < nwh; j += 2) { - x = (float)cos(delta * j); - y = (float)sin(delta * j); - w[j] = x; - w[j + 1] = y; - w[nw - j] = y; - w[nw - j + 1] = x; - } - bitrv2(nw, ip + 2, w); - } - } -} - - -static void makect(size_t nc, size_t *ip, float *c) -{ - size_t j, nch; - float delta; - - ip[1] = nc; - if (nc > 1) { - nch = nc >> 1; - delta = atanf(1.0f) / nch; - c[0] = (float)cos(delta * nch); - c[nch] = 0.5f * c[0]; - for (j = 1; j < nch; j++) { - c[j] = 0.5f * (float)cos(delta * j); - c[nc - j] = 0.5f * (float)sin(delta * j); - } - } -} - - -/* -------- child routines -------- */ - - -static void bitrv2(size_t n, size_t *ip, float *a) -{ - size_t j, j1, k, k1, l, m, m2; - float xr, xi, yr, yi; - - ip[0] = 0; - l = n; - m = 1; - while ((m << 3) < l) { - l >>= 1; - for (j = 0; j < m; j++) { - ip[m + j] = ip[j] + l; - } - m <<= 1; - } - m2 = 2 * m; - if ((m << 3) == l) { - for (k = 0; k < m; k++) { - for (j = 0; j < k; j++) { - j1 = 2 * j + ip[k]; - k1 = 2 * k + ip[j]; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 += 2 * m2; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 -= m2; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 += 2 * m2; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - } - j1 = 2 * k + m2 + ip[k]; - k1 = j1 + m2; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - } - } else { - for (k = 1; k < m; k++) { - for (j = 0; j < k; j++) { - j1 = 2 * j + ip[k]; - k1 = 2 * k + ip[j]; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 += m2; - xr = a[j1]; - xi = a[j1 + 1]; - yr = a[k1]; - yi = a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - } - } - } -} - -#if 0 // Not used. -static void bitrv2conj(int n, int *ip, float *a) -{ - int j, j1, k, k1, l, m, m2; - float xr, xi, yr, yi; - - ip[0] = 0; - l = n; - m = 1; - while ((m << 3) < l) { - l >>= 1; - for (j = 0; j < m; j++) { - ip[m + j] = ip[j] + l; - } - m <<= 1; - } - m2 = 2 * m; - if ((m << 3) == l) { - for (k = 0; k < m; k++) { - for (j = 0; j < k; j++) { - j1 = 2 * j + ip[k]; - k1 = 2 * k + ip[j]; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 += 2 * m2; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 -= m2; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 += 2 * m2; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - } - k1 = 2 * k + ip[k]; - a[k1 + 1] = -a[k1 + 1]; - j1 = k1 + m2; - k1 = j1 + m2; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - k1 += m2; - a[k1 + 1] = -a[k1 + 1]; - } - } else { - a[1] = -a[1]; - a[m2 + 1] = -a[m2 + 1]; - for (k = 1; k < m; k++) { - for (j = 0; j < k; j++) { - j1 = 2 * j + ip[k]; - k1 = 2 * k + ip[j]; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - j1 += m2; - k1 += m2; - xr = a[j1]; - xi = -a[j1 + 1]; - yr = a[k1]; - yi = -a[k1 + 1]; - a[j1] = yr; - a[j1 + 1] = yi; - a[k1] = xr; - a[k1 + 1] = xi; - } - k1 = 2 * k + ip[k]; - a[k1 + 1] = -a[k1 + 1]; - a[k1 + m2 + 1] = -a[k1 + m2 + 1]; - } - } -} -#endif - -static void cftfsub(size_t n, float *a, float *w) -{ - size_t j, j1, j2, j3, l; - float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; - - l = 2; - if (n > 8) { - cft1st(n, a, w); - l = 8; - while ((l << 2) < n) { - cftmdl(n, l, a, w); - l <<= 2; - } - } - if ((l << 2) == n) { - for (j = 0; j < l; j += 2) { - j1 = j + l; - j2 = j1 + l; - j3 = j2 + l; - x0r = a[j] + a[j1]; - x0i = a[j + 1] + a[j1 + 1]; - x1r = a[j] - a[j1]; - x1i = a[j + 1] - a[j1 + 1]; - x2r = a[j2] + a[j3]; - x2i = a[j2 + 1] + a[j3 + 1]; - x3r = a[j2] - a[j3]; - x3i = a[j2 + 1] - a[j3 + 1]; - a[j] = x0r + x2r; - a[j + 1] = x0i + x2i; - a[j2] = x0r - x2r; - a[j2 + 1] = x0i - x2i; - a[j1] = x1r - x3i; - a[j1 + 1] = x1i + x3r; - a[j3] = x1r + x3i; - a[j3 + 1] = x1i - x3r; - } - } else { - for (j = 0; j < l; j += 2) { - j1 = j + l; - x0r = a[j] - a[j1]; - x0i = a[j + 1] - a[j1 + 1]; - a[j] += a[j1]; - a[j + 1] += a[j1 + 1]; - a[j1] = x0r; - a[j1 + 1] = x0i; - } - } -} - - -static void cftbsub(size_t n, float *a, float *w) -{ - size_t j, j1, j2, j3, l; - float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; - - l = 2; - if (n > 8) { - cft1st(n, a, w); - l = 8; - while ((l << 2) < n) { - cftmdl(n, l, a, w); - l <<= 2; - } - } - if ((l << 2) == n) { - for (j = 0; j < l; j += 2) { - j1 = j + l; - j2 = j1 + l; - j3 = j2 + l; - x0r = a[j] + a[j1]; - x0i = -a[j + 1] - a[j1 + 1]; - x1r = a[j] - a[j1]; - x1i = -a[j + 1] + a[j1 + 1]; - x2r = a[j2] + a[j3]; - x2i = a[j2 + 1] + a[j3 + 1]; - x3r = a[j2] - a[j3]; - x3i = a[j2 + 1] - a[j3 + 1]; - a[j] = x0r + x2r; - a[j + 1] = x0i - x2i; - a[j2] = x0r - x2r; - a[j2 + 1] = x0i + x2i; - a[j1] = x1r - x3i; - a[j1 + 1] = x1i - x3r; - a[j3] = x1r + x3i; - a[j3 + 1] = x1i + x3r; - } - } else { - for (j = 0; j < l; j += 2) { - j1 = j + l; - x0r = a[j] - a[j1]; - x0i = -a[j + 1] + a[j1 + 1]; - a[j] += a[j1]; - a[j + 1] = -a[j + 1] - a[j1 + 1]; - a[j1] = x0r; - a[j1 + 1] = x0i; - } - } -} - - -static void cft1st(size_t n, float *a, float *w) -{ - size_t j, k1, k2; - float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i; - float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; - - x0r = a[0] + a[2]; - x0i = a[1] + a[3]; - x1r = a[0] - a[2]; - x1i = a[1] - a[3]; - x2r = a[4] + a[6]; - x2i = a[5] + a[7]; - x3r = a[4] - a[6]; - x3i = a[5] - a[7]; - a[0] = x0r + x2r; - a[1] = x0i + x2i; - a[4] = x0r - x2r; - a[5] = x0i - x2i; - a[2] = x1r - x3i; - a[3] = x1i + x3r; - a[6] = x1r + x3i; - a[7] = x1i - x3r; - wk1r = w[2]; - x0r = a[8] + a[10]; - x0i = a[9] + a[11]; - x1r = a[8] - a[10]; - x1i = a[9] - a[11]; - x2r = a[12] + a[14]; - x2i = a[13] + a[15]; - x3r = a[12] - a[14]; - x3i = a[13] - a[15]; - a[8] = x0r + x2r; - a[9] = x0i + x2i; - a[12] = x2i - x0i; - a[13] = x0r - x2r; - x0r = x1r - x3i; - x0i = x1i + x3r; - a[10] = wk1r * (x0r - x0i); - a[11] = wk1r * (x0r + x0i); - x0r = x3i + x1r; - x0i = x3r - x1i; - a[14] = wk1r * (x0i - x0r); - a[15] = wk1r * (x0i + x0r); - k1 = 0; - for (j = 16; j < n; j += 16) { - k1 += 2; - k2 = 2 * k1; - wk2r = w[k1]; - wk2i = w[k1 + 1]; - wk1r = w[k2]; - wk1i = w[k2 + 1]; - wk3r = wk1r - 2 * wk2i * wk1i; - wk3i = 2 * wk2i * wk1r - wk1i; - x0r = a[j] + a[j + 2]; - x0i = a[j + 1] + a[j + 3]; - x1r = a[j] - a[j + 2]; - x1i = a[j + 1] - a[j + 3]; - x2r = a[j + 4] + a[j + 6]; - x2i = a[j + 5] + a[j + 7]; - x3r = a[j + 4] - a[j + 6]; - x3i = a[j + 5] - a[j + 7]; - a[j] = x0r + x2r; - a[j + 1] = x0i + x2i; - x0r -= x2r; - x0i -= x2i; - a[j + 4] = wk2r * x0r - wk2i * x0i; - a[j + 5] = wk2r * x0i + wk2i * x0r; - x0r = x1r - x3i; - x0i = x1i + x3r; - a[j + 2] = wk1r * x0r - wk1i * x0i; - a[j + 3] = wk1r * x0i + wk1i * x0r; - x0r = x1r + x3i; - x0i = x1i - x3r; - a[j + 6] = wk3r * x0r - wk3i * x0i; - a[j + 7] = wk3r * x0i + wk3i * x0r; - wk1r = w[k2 + 2]; - wk1i = w[k2 + 3]; - wk3r = wk1r - 2 * wk2r * wk1i; - wk3i = 2 * wk2r * wk1r - wk1i; - x0r = a[j + 8] + a[j + 10]; - x0i = a[j + 9] + a[j + 11]; - x1r = a[j + 8] - a[j + 10]; - x1i = a[j + 9] - a[j + 11]; - x2r = a[j + 12] + a[j + 14]; - x2i = a[j + 13] + a[j + 15]; - x3r = a[j + 12] - a[j + 14]; - x3i = a[j + 13] - a[j + 15]; - a[j + 8] = x0r + x2r; - a[j + 9] = x0i + x2i; - x0r -= x2r; - x0i -= x2i; - a[j + 12] = -wk2i * x0r - wk2r * x0i; - a[j + 13] = -wk2i * x0i + wk2r * x0r; - x0r = x1r - x3i; - x0i = x1i + x3r; - a[j + 10] = wk1r * x0r - wk1i * x0i; - a[j + 11] = wk1r * x0i + wk1i * x0r; - x0r = x1r + x3i; - x0i = x1i - x3r; - a[j + 14] = wk3r * x0r - wk3i * x0i; - a[j + 15] = wk3r * x0i + wk3i * x0r; - } -} - - -static void cftmdl(size_t n, size_t l, float *a, float *w) -{ - size_t j, j1, j2, j3, k, k1, k2, m, m2; - float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i; - float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; - - m = l << 2; - for (j = 0; j < l; j += 2) { - j1 = j + l; - j2 = j1 + l; - j3 = j2 + l; - x0r = a[j] + a[j1]; - x0i = a[j + 1] + a[j1 + 1]; - x1r = a[j] - a[j1]; - x1i = a[j + 1] - a[j1 + 1]; - x2r = a[j2] + a[j3]; - x2i = a[j2 + 1] + a[j3 + 1]; - x3r = a[j2] - a[j3]; - x3i = a[j2 + 1] - a[j3 + 1]; - a[j] = x0r + x2r; - a[j + 1] = x0i + x2i; - a[j2] = x0r - x2r; - a[j2 + 1] = x0i - x2i; - a[j1] = x1r - x3i; - a[j1 + 1] = x1i + x3r; - a[j3] = x1r + x3i; - a[j3 + 1] = x1i - x3r; - } - wk1r = w[2]; - for (j = m; j < l + m; j += 2) { - j1 = j + l; - j2 = j1 + l; - j3 = j2 + l; - x0r = a[j] + a[j1]; - x0i = a[j + 1] + a[j1 + 1]; - x1r = a[j] - a[j1]; - x1i = a[j + 1] - a[j1 + 1]; - x2r = a[j2] + a[j3]; - x2i = a[j2 + 1] + a[j3 + 1]; - x3r = a[j2] - a[j3]; - x3i = a[j2 + 1] - a[j3 + 1]; - a[j] = x0r + x2r; - a[j + 1] = x0i + x2i; - a[j2] = x2i - x0i; - a[j2 + 1] = x0r - x2r; - x0r = x1r - x3i; - x0i = x1i + x3r; - a[j1] = wk1r * (x0r - x0i); - a[j1 + 1] = wk1r * (x0r + x0i); - x0r = x3i + x1r; - x0i = x3r - x1i; - a[j3] = wk1r * (x0i - x0r); - a[j3 + 1] = wk1r * (x0i + x0r); - } - k1 = 0; - m2 = 2 * m; - for (k = m2; k < n; k += m2) { - k1 += 2; - k2 = 2 * k1; - wk2r = w[k1]; - wk2i = w[k1 + 1]; - wk1r = w[k2]; - wk1i = w[k2 + 1]; - wk3r = wk1r - 2 * wk2i * wk1i; - wk3i = 2 * wk2i * wk1r - wk1i; - for (j = k; j < l + k; j += 2) { - j1 = j + l; - j2 = j1 + l; - j3 = j2 + l; - x0r = a[j] + a[j1]; - x0i = a[j + 1] + a[j1 + 1]; - x1r = a[j] - a[j1]; - x1i = a[j + 1] - a[j1 + 1]; - x2r = a[j2] + a[j3]; - x2i = a[j2 + 1] + a[j3 + 1]; - x3r = a[j2] - a[j3]; - x3i = a[j2 + 1] - a[j3 + 1]; - a[j] = x0r + x2r; - a[j + 1] = x0i + x2i; - x0r -= x2r; - x0i -= x2i; - a[j2] = wk2r * x0r - wk2i * x0i; - a[j2 + 1] = wk2r * x0i + wk2i * x0r; - x0r = x1r - x3i; - x0i = x1i + x3r; - a[j1] = wk1r * x0r - wk1i * x0i; - a[j1 + 1] = wk1r * x0i + wk1i * x0r; - x0r = x1r + x3i; - x0i = x1i - x3r; - a[j3] = wk3r * x0r - wk3i * x0i; - a[j3 + 1] = wk3r * x0i + wk3i * x0r; - } - wk1r = w[k2 + 2]; - wk1i = w[k2 + 3]; - wk3r = wk1r - 2 * wk2r * wk1i; - wk3i = 2 * wk2r * wk1r - wk1i; - for (j = k + m; j < l + (k + m); j += 2) { - j1 = j + l; - j2 = j1 + l; - j3 = j2 + l; - x0r = a[j] + a[j1]; - x0i = a[j + 1] + a[j1 + 1]; - x1r = a[j] - a[j1]; - x1i = a[j + 1] - a[j1 + 1]; - x2r = a[j2] + a[j3]; - x2i = a[j2 + 1] + a[j3 + 1]; - x3r = a[j2] - a[j3]; - x3i = a[j2 + 1] - a[j3 + 1]; - a[j] = x0r + x2r; - a[j + 1] = x0i + x2i; - x0r -= x2r; - x0i -= x2i; - a[j2] = -wk2i * x0r - wk2r * x0i; - a[j2 + 1] = -wk2i * x0i + wk2r * x0r; - x0r = x1r - x3i; - x0i = x1i + x3r; - a[j1] = wk1r * x0r - wk1i * x0i; - a[j1 + 1] = wk1r * x0i + wk1i * x0r; - x0r = x1r + x3i; - x0i = x1i - x3r; - a[j3] = wk3r * x0r - wk3i * x0i; - a[j3 + 1] = wk3r * x0i + wk3i * x0r; - } - } -} - - -static void rftfsub(size_t n, float *a, size_t nc, float *c) -{ - size_t j, k, kk, ks, m; - float wkr, wki, xr, xi, yr, yi; - - m = n >> 1; - ks = 2 * nc / m; - kk = 0; - for (j = 2; j < m; j += 2) { - k = n - j; - kk += ks; - wkr = 0.5f - c[nc - kk]; - wki = c[kk]; - xr = a[j] - a[k]; - xi = a[j + 1] + a[k + 1]; - yr = wkr * xr - wki * xi; - yi = wkr * xi + wki * xr; - a[j] -= yr; - a[j + 1] -= yi; - a[k] += yr; - a[k + 1] -= yi; - } -} - - -static void rftbsub(size_t n, float *a, size_t nc, float *c) -{ - size_t j, k, kk, ks, m; - float wkr, wki, xr, xi, yr, yi; - - a[1] = -a[1]; - m = n >> 1; - ks = 2 * nc / m; - kk = 0; - for (j = 2; j < m; j += 2) { - k = n - j; - kk += ks; - wkr = 0.5f - c[nc - kk]; - wki = c[kk]; - xr = a[j] - a[k]; - xi = a[j + 1] + a[k + 1]; - yr = wkr * xr + wki * xi; - yi = wkr * xi - wki * xr; - a[j] -= yr; - a[j + 1] = yi - a[j + 1]; - a[k] += yr; - a[k + 1] = yi - a[k + 1]; - } - a[m + 1] = -a[m + 1]; -} - -#if 0 // Not used. -static void dctsub(int n, float *a, int nc, float *c) -{ - int j, k, kk, ks, m; - float wkr, wki, xr; - - m = n >> 1; - ks = nc / n; - kk = 0; - for (j = 1; j < m; j++) { - k = n - j; - kk += ks; - wkr = c[kk] - c[nc - kk]; - wki = c[kk] + c[nc - kk]; - xr = wki * a[j] - wkr * a[k]; - a[j] = wkr * a[j] + wki * a[k]; - a[k] = xr; - } - a[m] *= c[0]; -} - - -static void dstsub(int n, float *a, int nc, float *c) -{ - int j, k, kk, ks, m; - float wkr, wki, xr; - - m = n >> 1; - ks = nc / n; - kk = 0; - for (j = 1; j < m; j++) { - k = n - j; - kk += ks; - wkr = c[kk] - c[nc - kk]; - wki = c[kk] + c[nc - kk]; - xr = wki * a[k] - wkr * a[j]; - a[k] = wkr * a[k] + wki * a[j]; - a[j] = xr; - } - a[m] *= c[0]; -} -#endif // Not used. diff --git a/common_audio/third_party/ooura/BUILD.gn b/common_audio/third_party/ooura/BUILD.gn new file mode 100644 index 0000000000..0cdf98e591 --- /dev/null +++ b/common_audio/third_party/ooura/BUILD.gn @@ -0,0 +1,58 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the ../../../LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("fft_size_128") { + sources = [ + "fft_size_128/ooura_fft.cc", + "fft_size_128/ooura_fft.h", + "fft_size_128/ooura_fft_tables_common.h", + ] + deps = [ + "../../../rtc_base/system:arch", + "../../../system_wrappers", + ] + cflags = [] + + if (current_cpu == "x86" || current_cpu == "x64") { + sources += [ + "fft_size_128/ooura_fft_sse2.cc", + "fft_size_128/ooura_fft_tables_neon_sse2.h", + ] + if (is_posix || is_fuchsia) { + cflags += [ "-msse2" ] + } + } + + if (rtc_build_with_neon) { + sources += [ + "fft_size_128/ooura_fft_neon.cc", + "fft_size_128/ooura_fft_tables_neon_sse2.h", + ] + + deps += [ "../../../common_audio" ] + + if (current_cpu != "arm64") { + # Enable compilation for the NEON instruction set. + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags += [ "-mfpu=neon" ] + } + } + + if (current_cpu == "mipsel" && mips_float_abi == "hard") { + sources += [ "fft_size_128/ooura_fft_mips.cc" ] + } +} + +rtc_library("fft_size_256") { + sources = [ + "fft_size_256/fft4g.cc", + "fft_size_256/fft4g.h", + ] +} diff --git a/common_audio/third_party/fft4g/LICENSE b/common_audio/third_party/ooura/LICENSE similarity index 100% rename from common_audio/third_party/fft4g/LICENSE rename to common_audio/third_party/ooura/LICENSE diff --git a/common_audio/third_party/fft4g/README.chromium b/common_audio/third_party/ooura/README.chromium similarity index 100% rename from common_audio/third_party/fft4g/README.chromium rename to common_audio/third_party/ooura/README.chromium diff --git a/modules/audio_processing/utility/ooura_fft.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc similarity index 97% rename from modules/audio_processing/utility/ooura_fft.cc rename to common_audio/third_party/ooura/fft_size_128/ooura_fft.cc index 8628bd39f0..693312012b 100644 --- a/modules/audio_processing/utility/ooura_fft.cc +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc @@ -21,9 +21,9 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/utility/ooura_fft.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" -#include "modules/audio_processing/utility/ooura_fft_tables_common.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h" #include "rtc_base/system/arch.h" #include "system_wrappers/include/cpu_features_wrapper.h" @@ -313,9 +313,17 @@ static void rftbsub_128_C(float* a) { } // namespace +OouraFft::OouraFft(bool sse2_available) { +#if defined(WEBRTC_ARCH_X86_FAMILY) + use_sse2_ = sse2_available; +#else + use_sse2_ = false; +#endif +} + OouraFft::OouraFft() { #if defined(WEBRTC_ARCH_X86_FAMILY) - use_sse2_ = (WebRtc_GetCPUInfo(kSSE2) != 0); + use_sse2_ = (GetCPUInfo(kSSE2) != 0); #else use_sse2_ = false; #endif diff --git a/modules/audio_processing/utility/ooura_fft.h b/common_audio/third_party/ooura/fft_size_128/ooura_fft.h similarity index 90% rename from modules/audio_processing/utility/ooura_fft.h rename to common_audio/third_party/ooura/fft_size_128/ooura_fft.h index 0cdd6aa66f..8273dfe58e 100644 --- a/modules/audio_processing/utility/ooura_fft.h +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.h @@ -38,6 +38,10 @@ void rftbsub_128_neon(float* a); class OouraFft { public: + // Ctor allowing the availability of SSE2 support to be specified. + explicit OouraFft(bool sse2_available); + + // Deprecated: This Ctor will soon be removed. OouraFft(); ~OouraFft(); void Fft(float* a) const; diff --git a/modules/audio_processing/utility/ooura_fft_mips.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft_mips.cc similarity index 99% rename from modules/audio_processing/utility/ooura_fft_mips.cc rename to common_audio/third_party/ooura/fft_size_128/ooura_fft_mips.cc index 42b9d3a496..4c231e357d 100644 --- a/modules/audio_processing/utility/ooura_fft_mips.cc +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft_mips.cc @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/utility/ooura_fft.h" -#include "modules/audio_processing/utility/ooura_fft_tables_common.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h" namespace webrtc { diff --git a/modules/audio_processing/utility/ooura_fft_neon.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft_neon.cc similarity index 98% rename from modules/audio_processing/utility/ooura_fft_neon.cc rename to common_audio/third_party/ooura/fft_size_128/ooura_fft_neon.cc index 95b5f09ae0..acab9722dc 100644 --- a/modules/audio_processing/utility/ooura_fft_neon.cc +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft_neon.cc @@ -16,9 +16,9 @@ #include -#include "modules/audio_processing/utility/ooura_fft.h" -#include "modules/audio_processing/utility/ooura_fft_tables_common.h" -#include "modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_neon_sse2.h" namespace webrtc { diff --git a/modules/audio_processing/utility/ooura_fft_sse2.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc similarity index 98% rename from modules/audio_processing/utility/ooura_fft_sse2.cc rename to common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc index 0e4a44becc..7f0802ddfa 100644 --- a/modules/audio_processing/utility/ooura_fft_sse2.cc +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc @@ -11,9 +11,9 @@ #include #include -#include "modules/audio_processing/utility/ooura_fft.h" -#include "modules/audio_processing/utility/ooura_fft_tables_common.h" -#include "modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_neon_sse2.h" #include "rtc_base/system/arch.h" namespace webrtc { diff --git a/modules/audio_processing/utility/ooura_fft_tables_common.h b/common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h similarity index 97% rename from modules/audio_processing/utility/ooura_fft_tables_common.h rename to common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h index 47d076ea2a..6db1dd9ae4 100644 --- a/modules/audio_processing/utility/ooura_fft_tables_common.h +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_common.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_UTILITY_OOURA_FFT_TABLES_COMMON_H_ #define MODULES_AUDIO_PROCESSING_UTILITY_OOURA_FFT_TABLES_COMMON_H_ -#include "modules/audio_processing/utility/ooura_fft.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" namespace webrtc { diff --git a/modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h b/common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_neon_sse2.h similarity index 98% rename from modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h rename to common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_neon_sse2.h index 10aebac42a..a63d187018 100644 --- a/modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft_tables_neon_sse2.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_UTILITY_OOURA_FFT_TABLES_NEON_SSE2_H_ #define MODULES_AUDIO_PROCESSING_UTILITY_OOURA_FFT_TABLES_NEON_SSE2_H_ -#include "modules/audio_processing/utility/ooura_fft.h" +#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h" #include "rtc_base/system/arch.h" #ifdef _MSC_VER /* visual c++ */ diff --git a/common_audio/third_party/ooura/fft_size_256/fft4g.cc b/common_audio/third_party/ooura/fft_size_256/fft4g.cc new file mode 100644 index 0000000000..d2f7c1c41e --- /dev/null +++ b/common_audio/third_party/ooura/fft_size_256/fft4g.cc @@ -0,0 +1,866 @@ +/* + * http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html + * Copyright Takuya OOURA, 1996-2001 + * + * You may use, copy, modify and distribute this code for any purpose (include + * commercial use) and without fee. Please refer to this package when you modify + * this code. + * + * Changes: + * Trivial type modifications by the WebRTC authors. + */ + +/* +Fast Fourier/Cosine/Sine Transform + dimension :one + data length :power of 2 + decimation :frequency + radix :4, 2 + data :inplace + table :use +functions + cdft: Complex Discrete Fourier Transform + rdft: Real Discrete Fourier Transform + ddct: Discrete Cosine Transform + ddst: Discrete Sine Transform + dfct: Cosine Transform of RDFT (Real Symmetric DFT) + dfst: Sine Transform of RDFT (Real Anti-symmetric DFT) +function prototypes + void cdft(int, int, float *, int *, float *); + void rdft(size_t, int, float *, size_t *, float *); + void ddct(int, int, float *, int *, float *); + void ddst(int, int, float *, int *, float *); + void dfct(int, float *, float *, int *, float *); + void dfst(int, float *, float *, int *, float *); + + +-------- Complex DFT (Discrete Fourier Transform) -------- + [definition] + + X[k] = sum_j=0^n-1 x[j]*exp(2*pi*i*j*k/n), 0<=k + X[k] = sum_j=0^n-1 x[j]*exp(-2*pi*i*j*k/n), 0<=k + ip[0] = 0; // first time only + cdft(2*n, 1, a, ip, w); + + ip[0] = 0; // first time only + cdft(2*n, -1, a, ip, w); + [parameters] + 2*n :data length (int) + n >= 1, n = power of 2 + a[0...2*n-1] :input/output data (float *) + input data + a[2*j] = Re(x[j]), + a[2*j+1] = Im(x[j]), 0<=j= 2+sqrt(n) + strictly, + length of ip >= + 2+(1<<(int)(log(n+0.5)/log(2))/2). + ip[0],ip[1] are pointers of the cos/sin table. + w[0...n/2-1] :cos/sin table (float *) + w[],ip[] are initialized if ip[0] == 0. + [remark] + Inverse of + cdft(2*n, -1, a, ip, w); + is + cdft(2*n, 1, a, ip, w); + for (j = 0; j <= 2 * n - 1; j++) { + a[j] *= 1.0 / n; + } + . + + +-------- Real DFT / Inverse of Real DFT -------- + [definition] + RDFT + R[k] = sum_j=0^n-1 a[j]*cos(2*pi*j*k/n), 0<=k<=n/2 + I[k] = sum_j=0^n-1 a[j]*sin(2*pi*j*k/n), 0 IRDFT (excluding scale) + a[k] = (R[0] + R[n/2]*cos(pi*k))/2 + + sum_j=1^n/2-1 R[j]*cos(2*pi*j*k/n) + + sum_j=1^n/2-1 I[j]*sin(2*pi*j*k/n), 0<=k + ip[0] = 0; // first time only + rdft(n, 1, a, ip, w); + + ip[0] = 0; // first time only + rdft(n, -1, a, ip, w); + [parameters] + n :data length (size_t) + n >= 2, n = power of 2 + a[0...n-1] :input/output data (float *) + + output data + a[2*k] = R[k], 0<=k + input data + a[2*j] = R[j], 0<=j= 2+sqrt(n/2) + strictly, + length of ip >= + 2+(1<<(int)(log(n/2+0.5)/log(2))/2). + ip[0],ip[1] are pointers of the cos/sin table. + w[0...n/2-1] :cos/sin table (float *) + w[],ip[] are initialized if ip[0] == 0. + [remark] + Inverse of + rdft(n, 1, a, ip, w); + is + rdft(n, -1, a, ip, w); + for (j = 0; j <= n - 1; j++) { + a[j] *= 2.0 / n; + } + . + + +-------- DCT (Discrete Cosine Transform) / Inverse of DCT -------- + [definition] + IDCT (excluding scale) + C[k] = sum_j=0^n-1 a[j]*cos(pi*j*(k+1/2)/n), 0<=k DCT + C[k] = sum_j=0^n-1 a[j]*cos(pi*(j+1/2)*k/n), 0<=k + ip[0] = 0; // first time only + ddct(n, 1, a, ip, w); + + ip[0] = 0; // first time only + ddct(n, -1, a, ip, w); + [parameters] + n :data length (int) + n >= 2, n = power of 2 + a[0...n-1] :input/output data (float *) + output data + a[k] = C[k], 0<=k= 2+sqrt(n/2) + strictly, + length of ip >= + 2+(1<<(int)(log(n/2+0.5)/log(2))/2). + ip[0],ip[1] are pointers of the cos/sin table. + w[0...n*5/4-1] :cos/sin table (float *) + w[],ip[] are initialized if ip[0] == 0. + [remark] + Inverse of + ddct(n, -1, a, ip, w); + is + a[0] *= 0.5; + ddct(n, 1, a, ip, w); + for (j = 0; j <= n - 1; j++) { + a[j] *= 2.0 / n; + } + . + + +-------- DST (Discrete Sine Transform) / Inverse of DST -------- + [definition] + IDST (excluding scale) + S[k] = sum_j=1^n A[j]*sin(pi*j*(k+1/2)/n), 0<=k DST + S[k] = sum_j=0^n-1 a[j]*sin(pi*(j+1/2)*k/n), 0 + ip[0] = 0; // first time only + ddst(n, 1, a, ip, w); + + ip[0] = 0; // first time only + ddst(n, -1, a, ip, w); + [parameters] + n :data length (int) + n >= 2, n = power of 2 + a[0...n-1] :input/output data (float *) + + input data + a[j] = A[j], 0 + output data + a[k] = S[k], 0= 2+sqrt(n/2) + strictly, + length of ip >= + 2+(1<<(int)(log(n/2+0.5)/log(2))/2). + ip[0],ip[1] are pointers of the cos/sin table. + w[0...n*5/4-1] :cos/sin table (float *) + w[],ip[] are initialized if ip[0] == 0. + [remark] + Inverse of + ddst(n, -1, a, ip, w); + is + a[0] *= 0.5; + ddst(n, 1, a, ip, w); + for (j = 0; j <= n - 1; j++) { + a[j] *= 2.0 / n; + } + . + + +-------- Cosine Transform of RDFT (Real Symmetric DFT) -------- + [definition] + C[k] = sum_j=0^n a[j]*cos(pi*j*k/n), 0<=k<=n + [usage] + ip[0] = 0; // first time only + dfct(n, a, t, ip, w); + [parameters] + n :data length - 1 (int) + n >= 2, n = power of 2 + a[0...n] :input/output data (float *) + output data + a[k] = C[k], 0<=k<=n + t[0...n/2] :work area (float *) + ip[0...*] :work area for bit reversal (int *) + length of ip >= 2+sqrt(n/4) + strictly, + length of ip >= + 2+(1<<(int)(log(n/4+0.5)/log(2))/2). + ip[0],ip[1] are pointers of the cos/sin table. + w[0...n*5/8-1] :cos/sin table (float *) + w[],ip[] are initialized if ip[0] == 0. + [remark] + Inverse of + a[0] *= 0.5; + a[n] *= 0.5; + dfct(n, a, t, ip, w); + is + a[0] *= 0.5; + a[n] *= 0.5; + dfct(n, a, t, ip, w); + for (j = 0; j <= n; j++) { + a[j] *= 2.0 / n; + } + . + + +-------- Sine Transform of RDFT (Real Anti-symmetric DFT) -------- + [definition] + S[k] = sum_j=1^n-1 a[j]*sin(pi*j*k/n), 0= 2, n = power of 2 + a[0...n-1] :input/output data (float *) + output data + a[k] = S[k], 0= 2+sqrt(n/4) + strictly, + length of ip >= + 2+(1<<(int)(log(n/4+0.5)/log(2))/2). + ip[0],ip[1] are pointers of the cos/sin table. + w[0...n*5/8-1] :cos/sin table (float *) + w[],ip[] are initialized if ip[0] == 0. + [remark] + Inverse of + dfst(n, a, t, ip, w); + is + dfst(n, a, t, ip, w); + for (j = 1; j <= n - 1; j++) { + a[j] *= 2.0 / n; + } + . + + +Appendix : + The cos/sin table is recalculated when the larger table required. + w[] and ip[] are compatible with all routines. +*/ + +#include +#include + +#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" + +namespace webrtc { + +namespace { + +void makewt(size_t nw, size_t* ip, float* w); +void makect(size_t nc, size_t* ip, float* c); +void bitrv2(size_t n, size_t* ip, float* a); +void cftfsub(size_t n, float* a, float* w); +void cftbsub(size_t n, float* a, float* w); +void cft1st(size_t n, float* a, float* w); +void cftmdl(size_t n, size_t l, float* a, float* w); +void rftfsub(size_t n, float* a, size_t nc, float* c); +void rftbsub(size_t n, float* a, size_t nc, float* c); + +/* -------- initializing routines -------- */ + +void makewt(size_t nw, size_t* ip, float* w) { + size_t j, nwh; + float delta, x, y; + + ip[0] = nw; + ip[1] = 1; + if (nw > 2) { + nwh = nw >> 1; + delta = atanf(1.0f) / nwh; + w[0] = 1; + w[1] = 0; + w[nwh] = (float)cos(delta * nwh); + w[nwh + 1] = w[nwh]; + if (nwh > 2) { + for (j = 2; j < nwh; j += 2) { + x = (float)cos(delta * j); + y = (float)sin(delta * j); + w[j] = x; + w[j + 1] = y; + w[nw - j] = y; + w[nw - j + 1] = x; + } + bitrv2(nw, ip + 2, w); + } + } +} + +void makect(size_t nc, size_t* ip, float* c) { + size_t j, nch; + float delta; + + ip[1] = nc; + if (nc > 1) { + nch = nc >> 1; + delta = atanf(1.0f) / nch; + c[0] = (float)cos(delta * nch); + c[nch] = 0.5f * c[0]; + for (j = 1; j < nch; j++) { + c[j] = 0.5f * (float)cos(delta * j); + c[nc - j] = 0.5f * (float)sin(delta * j); + } + } +} + +/* -------- child routines -------- */ + +void bitrv2(size_t n, size_t* ip, float* a) { + size_t j, j1, k, k1, l, m, m2; + float xr, xi, yr, yi; + + ip[0] = 0; + l = n; + m = 1; + while ((m << 3) < l) { + l >>= 1; + for (j = 0; j < m; j++) { + ip[m + j] = ip[j] + l; + } + m <<= 1; + } + m2 = 2 * m; + if ((m << 3) == l) { + for (k = 0; k < m; k++) { + for (j = 0; j < k; j++) { + j1 = 2 * j + ip[k]; + k1 = 2 * k + ip[j]; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + j1 += m2; + k1 += 2 * m2; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + j1 += m2; + k1 -= m2; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + j1 += m2; + k1 += 2 * m2; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + } + j1 = 2 * k + m2 + ip[k]; + k1 = j1 + m2; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + } + } else { + for (k = 1; k < m; k++) { + for (j = 0; j < k; j++) { + j1 = 2 * j + ip[k]; + k1 = 2 * k + ip[j]; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + j1 += m2; + k1 += m2; + xr = a[j1]; + xi = a[j1 + 1]; + yr = a[k1]; + yi = a[k1 + 1]; + a[j1] = yr; + a[j1 + 1] = yi; + a[k1] = xr; + a[k1 + 1] = xi; + } + } + } +} + +void cftfsub(size_t n, float* a, float* w) { + size_t j, j1, j2, j3, l; + float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; + + l = 2; + if (n > 8) { + cft1st(n, a, w); + l = 8; + while ((l << 2) < n) { + cftmdl(n, l, a, w); + l <<= 2; + } + } + if ((l << 2) == n) { + for (j = 0; j < l; j += 2) { + j1 = j + l; + j2 = j1 + l; + j3 = j2 + l; + x0r = a[j] + a[j1]; + x0i = a[j + 1] + a[j1 + 1]; + x1r = a[j] - a[j1]; + x1i = a[j + 1] - a[j1 + 1]; + x2r = a[j2] + a[j3]; + x2i = a[j2 + 1] + a[j3 + 1]; + x3r = a[j2] - a[j3]; + x3i = a[j2 + 1] - a[j3 + 1]; + a[j] = x0r + x2r; + a[j + 1] = x0i + x2i; + a[j2] = x0r - x2r; + a[j2 + 1] = x0i - x2i; + a[j1] = x1r - x3i; + a[j1 + 1] = x1i + x3r; + a[j3] = x1r + x3i; + a[j3 + 1] = x1i - x3r; + } + } else { + for (j = 0; j < l; j += 2) { + j1 = j + l; + x0r = a[j] - a[j1]; + x0i = a[j + 1] - a[j1 + 1]; + a[j] += a[j1]; + a[j + 1] += a[j1 + 1]; + a[j1] = x0r; + a[j1 + 1] = x0i; + } + } +} + +void cftbsub(size_t n, float* a, float* w) { + size_t j, j1, j2, j3, l; + float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; + + l = 2; + if (n > 8) { + cft1st(n, a, w); + l = 8; + while ((l << 2) < n) { + cftmdl(n, l, a, w); + l <<= 2; + } + } + if ((l << 2) == n) { + for (j = 0; j < l; j += 2) { + j1 = j + l; + j2 = j1 + l; + j3 = j2 + l; + x0r = a[j] + a[j1]; + x0i = -a[j + 1] - a[j1 + 1]; + x1r = a[j] - a[j1]; + x1i = -a[j + 1] + a[j1 + 1]; + x2r = a[j2] + a[j3]; + x2i = a[j2 + 1] + a[j3 + 1]; + x3r = a[j2] - a[j3]; + x3i = a[j2 + 1] - a[j3 + 1]; + a[j] = x0r + x2r; + a[j + 1] = x0i - x2i; + a[j2] = x0r - x2r; + a[j2 + 1] = x0i + x2i; + a[j1] = x1r - x3i; + a[j1 + 1] = x1i - x3r; + a[j3] = x1r + x3i; + a[j3 + 1] = x1i + x3r; + } + } else { + for (j = 0; j < l; j += 2) { + j1 = j + l; + x0r = a[j] - a[j1]; + x0i = -a[j + 1] + a[j1 + 1]; + a[j] += a[j1]; + a[j + 1] = -a[j + 1] - a[j1 + 1]; + a[j1] = x0r; + a[j1 + 1] = x0i; + } + } +} + +void cft1st(size_t n, float* a, float* w) { + size_t j, k1, k2; + float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i; + float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; + + x0r = a[0] + a[2]; + x0i = a[1] + a[3]; + x1r = a[0] - a[2]; + x1i = a[1] - a[3]; + x2r = a[4] + a[6]; + x2i = a[5] + a[7]; + x3r = a[4] - a[6]; + x3i = a[5] - a[7]; + a[0] = x0r + x2r; + a[1] = x0i + x2i; + a[4] = x0r - x2r; + a[5] = x0i - x2i; + a[2] = x1r - x3i; + a[3] = x1i + x3r; + a[6] = x1r + x3i; + a[7] = x1i - x3r; + wk1r = w[2]; + x0r = a[8] + a[10]; + x0i = a[9] + a[11]; + x1r = a[8] - a[10]; + x1i = a[9] - a[11]; + x2r = a[12] + a[14]; + x2i = a[13] + a[15]; + x3r = a[12] - a[14]; + x3i = a[13] - a[15]; + a[8] = x0r + x2r; + a[9] = x0i + x2i; + a[12] = x2i - x0i; + a[13] = x0r - x2r; + x0r = x1r - x3i; + x0i = x1i + x3r; + a[10] = wk1r * (x0r - x0i); + a[11] = wk1r * (x0r + x0i); + x0r = x3i + x1r; + x0i = x3r - x1i; + a[14] = wk1r * (x0i - x0r); + a[15] = wk1r * (x0i + x0r); + k1 = 0; + for (j = 16; j < n; j += 16) { + k1 += 2; + k2 = 2 * k1; + wk2r = w[k1]; + wk2i = w[k1 + 1]; + wk1r = w[k2]; + wk1i = w[k2 + 1]; + wk3r = wk1r - 2 * wk2i * wk1i; + wk3i = 2 * wk2i * wk1r - wk1i; + x0r = a[j] + a[j + 2]; + x0i = a[j + 1] + a[j + 3]; + x1r = a[j] - a[j + 2]; + x1i = a[j + 1] - a[j + 3]; + x2r = a[j + 4] + a[j + 6]; + x2i = a[j + 5] + a[j + 7]; + x3r = a[j + 4] - a[j + 6]; + x3i = a[j + 5] - a[j + 7]; + a[j] = x0r + x2r; + a[j + 1] = x0i + x2i; + x0r -= x2r; + x0i -= x2i; + a[j + 4] = wk2r * x0r - wk2i * x0i; + a[j + 5] = wk2r * x0i + wk2i * x0r; + x0r = x1r - x3i; + x0i = x1i + x3r; + a[j + 2] = wk1r * x0r - wk1i * x0i; + a[j + 3] = wk1r * x0i + wk1i * x0r; + x0r = x1r + x3i; + x0i = x1i - x3r; + a[j + 6] = wk3r * x0r - wk3i * x0i; + a[j + 7] = wk3r * x0i + wk3i * x0r; + wk1r = w[k2 + 2]; + wk1i = w[k2 + 3]; + wk3r = wk1r - 2 * wk2r * wk1i; + wk3i = 2 * wk2r * wk1r - wk1i; + x0r = a[j + 8] + a[j + 10]; + x0i = a[j + 9] + a[j + 11]; + x1r = a[j + 8] - a[j + 10]; + x1i = a[j + 9] - a[j + 11]; + x2r = a[j + 12] + a[j + 14]; + x2i = a[j + 13] + a[j + 15]; + x3r = a[j + 12] - a[j + 14]; + x3i = a[j + 13] - a[j + 15]; + a[j + 8] = x0r + x2r; + a[j + 9] = x0i + x2i; + x0r -= x2r; + x0i -= x2i; + a[j + 12] = -wk2i * x0r - wk2r * x0i; + a[j + 13] = -wk2i * x0i + wk2r * x0r; + x0r = x1r - x3i; + x0i = x1i + x3r; + a[j + 10] = wk1r * x0r - wk1i * x0i; + a[j + 11] = wk1r * x0i + wk1i * x0r; + x0r = x1r + x3i; + x0i = x1i - x3r; + a[j + 14] = wk3r * x0r - wk3i * x0i; + a[j + 15] = wk3r * x0i + wk3i * x0r; + } +} + +void cftmdl(size_t n, size_t l, float* a, float* w) { + size_t j, j1, j2, j3, k, k1, k2, m, m2; + float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i; + float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i; + + m = l << 2; + for (j = 0; j < l; j += 2) { + j1 = j + l; + j2 = j1 + l; + j3 = j2 + l; + x0r = a[j] + a[j1]; + x0i = a[j + 1] + a[j1 + 1]; + x1r = a[j] - a[j1]; + x1i = a[j + 1] - a[j1 + 1]; + x2r = a[j2] + a[j3]; + x2i = a[j2 + 1] + a[j3 + 1]; + x3r = a[j2] - a[j3]; + x3i = a[j2 + 1] - a[j3 + 1]; + a[j] = x0r + x2r; + a[j + 1] = x0i + x2i; + a[j2] = x0r - x2r; + a[j2 + 1] = x0i - x2i; + a[j1] = x1r - x3i; + a[j1 + 1] = x1i + x3r; + a[j3] = x1r + x3i; + a[j3 + 1] = x1i - x3r; + } + wk1r = w[2]; + for (j = m; j < l + m; j += 2) { + j1 = j + l; + j2 = j1 + l; + j3 = j2 + l; + x0r = a[j] + a[j1]; + x0i = a[j + 1] + a[j1 + 1]; + x1r = a[j] - a[j1]; + x1i = a[j + 1] - a[j1 + 1]; + x2r = a[j2] + a[j3]; + x2i = a[j2 + 1] + a[j3 + 1]; + x3r = a[j2] - a[j3]; + x3i = a[j2 + 1] - a[j3 + 1]; + a[j] = x0r + x2r; + a[j + 1] = x0i + x2i; + a[j2] = x2i - x0i; + a[j2 + 1] = x0r - x2r; + x0r = x1r - x3i; + x0i = x1i + x3r; + a[j1] = wk1r * (x0r - x0i); + a[j1 + 1] = wk1r * (x0r + x0i); + x0r = x3i + x1r; + x0i = x3r - x1i; + a[j3] = wk1r * (x0i - x0r); + a[j3 + 1] = wk1r * (x0i + x0r); + } + k1 = 0; + m2 = 2 * m; + for (k = m2; k < n; k += m2) { + k1 += 2; + k2 = 2 * k1; + wk2r = w[k1]; + wk2i = w[k1 + 1]; + wk1r = w[k2]; + wk1i = w[k2 + 1]; + wk3r = wk1r - 2 * wk2i * wk1i; + wk3i = 2 * wk2i * wk1r - wk1i; + for (j = k; j < l + k; j += 2) { + j1 = j + l; + j2 = j1 + l; + j3 = j2 + l; + x0r = a[j] + a[j1]; + x0i = a[j + 1] + a[j1 + 1]; + x1r = a[j] - a[j1]; + x1i = a[j + 1] - a[j1 + 1]; + x2r = a[j2] + a[j3]; + x2i = a[j2 + 1] + a[j3 + 1]; + x3r = a[j2] - a[j3]; + x3i = a[j2 + 1] - a[j3 + 1]; + a[j] = x0r + x2r; + a[j + 1] = x0i + x2i; + x0r -= x2r; + x0i -= x2i; + a[j2] = wk2r * x0r - wk2i * x0i; + a[j2 + 1] = wk2r * x0i + wk2i * x0r; + x0r = x1r - x3i; + x0i = x1i + x3r; + a[j1] = wk1r * x0r - wk1i * x0i; + a[j1 + 1] = wk1r * x0i + wk1i * x0r; + x0r = x1r + x3i; + x0i = x1i - x3r; + a[j3] = wk3r * x0r - wk3i * x0i; + a[j3 + 1] = wk3r * x0i + wk3i * x0r; + } + wk1r = w[k2 + 2]; + wk1i = w[k2 + 3]; + wk3r = wk1r - 2 * wk2r * wk1i; + wk3i = 2 * wk2r * wk1r - wk1i; + for (j = k + m; j < l + (k + m); j += 2) { + j1 = j + l; + j2 = j1 + l; + j3 = j2 + l; + x0r = a[j] + a[j1]; + x0i = a[j + 1] + a[j1 + 1]; + x1r = a[j] - a[j1]; + x1i = a[j + 1] - a[j1 + 1]; + x2r = a[j2] + a[j3]; + x2i = a[j2 + 1] + a[j3 + 1]; + x3r = a[j2] - a[j3]; + x3i = a[j2 + 1] - a[j3 + 1]; + a[j] = x0r + x2r; + a[j + 1] = x0i + x2i; + x0r -= x2r; + x0i -= x2i; + a[j2] = -wk2i * x0r - wk2r * x0i; + a[j2 + 1] = -wk2i * x0i + wk2r * x0r; + x0r = x1r - x3i; + x0i = x1i + x3r; + a[j1] = wk1r * x0r - wk1i * x0i; + a[j1 + 1] = wk1r * x0i + wk1i * x0r; + x0r = x1r + x3i; + x0i = x1i - x3r; + a[j3] = wk3r * x0r - wk3i * x0i; + a[j3 + 1] = wk3r * x0i + wk3i * x0r; + } + } +} + +void rftfsub(size_t n, float* a, size_t nc, float* c) { + size_t j, k, kk, ks, m; + float wkr, wki, xr, xi, yr, yi; + + m = n >> 1; + ks = 2 * nc / m; + kk = 0; + for (j = 2; j < m; j += 2) { + k = n - j; + kk += ks; + wkr = 0.5f - c[nc - kk]; + wki = c[kk]; + xr = a[j] - a[k]; + xi = a[j + 1] + a[k + 1]; + yr = wkr * xr - wki * xi; + yi = wkr * xi + wki * xr; + a[j] -= yr; + a[j + 1] -= yi; + a[k] += yr; + a[k + 1] -= yi; + } +} + +void rftbsub(size_t n, float* a, size_t nc, float* c) { + size_t j, k, kk, ks, m; + float wkr, wki, xr, xi, yr, yi; + + a[1] = -a[1]; + m = n >> 1; + ks = 2 * nc / m; + kk = 0; + for (j = 2; j < m; j += 2) { + k = n - j; + kk += ks; + wkr = 0.5f - c[nc - kk]; + wki = c[kk]; + xr = a[j] - a[k]; + xi = a[j + 1] + a[k + 1]; + yr = wkr * xr + wki * xi; + yi = wkr * xi - wki * xr; + a[j] -= yr; + a[j + 1] = yi - a[j + 1]; + a[k] += yr; + a[k + 1] = yi - a[k + 1]; + } + a[m + 1] = -a[m + 1]; +} + +} // namespace + +void WebRtc_rdft(size_t n, int isgn, float* a, size_t* ip, float* w) { + size_t nw, nc; + float xi; + + nw = ip[0]; + if (n > (nw << 2)) { + nw = n >> 2; + makewt(nw, ip, w); + } + nc = ip[1]; + if (n > (nc << 2)) { + nc = n >> 2; + makect(nc, ip, w + nw); + } + if (isgn >= 0) { + if (n > 4) { + bitrv2(n, ip + 2, a); + cftfsub(n, a, w); + rftfsub(n, a, nc, w + nw); + } else if (n == 4) { + cftfsub(n, a, w); + } + xi = a[0] - a[1]; + a[0] += a[1]; + a[1] = xi; + } else { + a[1] = 0.5f * (a[0] - a[1]); + a[0] -= a[1]; + if (n > 4) { + rftbsub(n, a, nc, w + nw); + bitrv2(n, ip + 2, a); + cftbsub(n, a, w); + } else if (n == 4) { + cftfsub(n, a, w); + } + } +} + +} // namespace webrtc diff --git a/common_audio/third_party/fft4g/fft4g.h b/common_audio/third_party/ooura/fft_size_256/fft4g.h similarity index 70% rename from common_audio/third_party/fft4g/fft4g.h rename to common_audio/third_party/ooura/fft_size_256/fft4g.h index 0820b6e908..d41d2c65aa 100644 --- a/common_audio/third_party/fft4g/fft4g.h +++ b/common_audio/third_party/ooura/fft_size_256/fft4g.h @@ -8,18 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef COMMON_AUDIO_THIRD_PARTY_FFT4G_FFT4G_H_ -#define COMMON_AUDIO_THIRD_PARTY_FFT4G_FFT4G_H_ +#ifndef COMMON_AUDIO_THIRD_PARTY_OOURA_FFT_SIZE_256_FFT4G_H_ +#define COMMON_AUDIO_THIRD_PARTY_OOURA_FFT_SIZE_256_FFT4G_H_ -#if defined(__cplusplus) -extern "C" { -#endif +namespace webrtc { // Refer to fft4g.c for documentation. void WebRtc_rdft(size_t n, int isgn, float* a, size_t* ip, float* w); -#if defined(__cplusplus) -} -#endif +} // namespace webrtc -#endif /* COMMON_AUDIO_THIRD_PARTY_FFT4G_FFT4G_H_ */ +#endif // COMMON_AUDIO_THIRD_PARTY_OOURA_FFT_SIZE_256_FFT4G_H_ diff --git a/common_audio/vad/mock/mock_vad.h b/common_audio/vad/mock/mock_vad.h index afe80ef5e1..5a554ce1f9 100644 --- a/common_audio/vad/mock/mock_vad.h +++ b/common_audio/vad/mock/mock_vad.h @@ -18,14 +18,14 @@ namespace webrtc { class MockVad : public Vad { public: - virtual ~MockVad() { Die(); } - MOCK_METHOD0(Die, void()); + ~MockVad() override { Die(); } + MOCK_METHOD(void, Die, ()); - MOCK_METHOD3(VoiceActivity, - enum Activity(const int16_t* audio, - size_t num_samples, - int sample_rate_hz)); - MOCK_METHOD0(Reset, void()); + MOCK_METHOD(enum Activity, + VoiceActivity, + (const int16_t* audio, size_t num_samples, int sample_rate_hz), + (override)); + MOCK_METHOD(void, Reset, (), (override)); }; } // namespace webrtc diff --git a/common_audio/wav_header.cc b/common_audio/wav_header.cc index d3dca9055d..ce119f1095 100644 --- a/common_audio/wav_header.cc +++ b/common_audio/wav_header.cc @@ -132,7 +132,7 @@ uint16_t MapWavFormatToHeaderField(WavFormat format) { case WavFormat::kWavFormatMuLaw: return 7; } - RTC_CHECK(false); + RTC_CHECK_NOTREACHED(); } WavFormat MapHeaderFieldToWavFormat(uint16_t format_header_value) { @@ -278,10 +278,8 @@ size_t GetFormatBytesPerSample(WavFormat format) { return 1; case WavFormat::kWavFormatIeeeFloat: return 4; - default: - RTC_CHECK(false); - return 2; } + RTC_CHECK_NOTREACHED(); } bool CheckWavParameters(size_t num_channels, diff --git a/common_audio/window_generator.h b/common_audio/window_generator.h index 0cbe24a402..c0a89c4f93 100644 --- a/common_audio/window_generator.h +++ b/common_audio/window_generator.h @@ -13,18 +13,17 @@ #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Helper class with generators for various signal transform windows. class WindowGenerator { public: + WindowGenerator() = delete; + WindowGenerator(const WindowGenerator&) = delete; + WindowGenerator& operator=(const WindowGenerator&) = delete; + static void Hanning(int length, float* window); static void KaiserBesselDerived(float alpha, size_t length, float* window); - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WindowGenerator); }; } // namespace webrtc diff --git a/common_types.h b/common_types.h deleted file mode 100644 index aadda4fb99..0000000000 --- a/common_types.h +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef COMMON_TYPES_H_ -#define COMMON_TYPES_H_ - -#include // For size_t - -#include - -namespace webrtc { - -struct FrameCounts { - FrameCounts() : key_frames(0), delta_frames(0) {} - int key_frames; - int delta_frames; -}; - -// Callback, used to notify an observer whenever frame counts have been updated. -class FrameCountObserver { - public: - virtual ~FrameCountObserver() {} - virtual void FrameCountUpdated(const FrameCounts& frame_counts, - uint32_t ssrc) = 0; -}; - -// Callback, used to notify an observer when the overhead per packet -// has changed. -class OverheadObserver { - public: - virtual ~OverheadObserver() = default; - virtual void OnOverheadChanged(size_t overhead_bytes_per_packet) = 0; -}; - -// ================================================================== -// Video specific types -// ================================================================== - -// TODO(magjed): Move this and other H264 related classes out to their own file. -namespace H264 { - -enum Profile { - kProfileConstrainedBaseline, - kProfileBaseline, - kProfileMain, - kProfileConstrainedHigh, - kProfileHigh, -}; - -} // namespace H264 - -struct SpatialLayer { - bool operator==(const SpatialLayer& other) const; - bool operator!=(const SpatialLayer& other) const { return !(*this == other); } - - unsigned short width; - unsigned short height; - float maxFramerate; // fps. - unsigned char numberOfTemporalLayers; - unsigned int maxBitrate; // kilobits/sec. - unsigned int targetBitrate; // kilobits/sec. - unsigned int minBitrate; // kilobits/sec. - unsigned int qpMax; // minimum quality - bool active; // encoded and sent. -}; - -// Simulcast is when the same stream is encoded multiple times with different -// settings such as resolution. -typedef SpatialLayer SimulcastStream; - -// Minimum and maximum playout delay values from capture to render. -// These are best effort values. -// -// A value < 0 indicates no change from previous valid value. -// -// min = max = 0 indicates that the receiver should try and render -// frame as soon as possible. -// -// min = x, max = y indicates that the receiver is free to adapt -// in the range (x, y) based on network jitter. -// -// Note: Given that this gets embedded in a union, it is up-to the owner to -// initialize these values. -struct PlayoutDelay { - int min_ms; - int max_ms; -}; - -} // namespace webrtc - -#endif // COMMON_TYPES_H_ diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn index ddf4c2d495..0594e71e55 100644 --- a/common_video/BUILD.gn +++ b/common_video/BUILD.gn @@ -21,26 +21,42 @@ rtc_library("common_video") { "h264/h264_common.h", "h264/pps_parser.cc", "h264/pps_parser.h", + "h264/prefix_parser.cc", + "h264/prefix_parser.h", "h264/profile_level_id.h", "h264/sps_parser.cc", "h264/sps_parser.h", "h264/sps_vui_rewriter.cc", "h264/sps_vui_rewriter.h", - "i420_buffer_pool.cc", "include/bitrate_adjuster.h", - "include/i420_buffer_pool.h", "include/incoming_video_stream.h", "include/quality_limitation_reason.h", - "include/video_frame.h", "include/video_frame_buffer.h", + "include/video_frame_buffer_pool.h", "incoming_video_stream.cc", "libyuv/include/webrtc_libyuv.h", "libyuv/webrtc_libyuv.cc", "video_frame_buffer.cc", + "video_frame_buffer_pool.cc", "video_render_frames.cc", "video_render_frames.h", ] + if (rtc_use_h265) { + sources += [ + "h265/h265_bitstream_parser.cc", + "h265/h265_bitstream_parser.h", + "h265/h265_common.cc", + "h265/h265_common.h", + "h265/h265_pps_parser.cc", + "h265/h265_pps_parser.h", + "h265/h265_sps_parser.cc", + "h265/h265_sps_parser.h", + "h265/h265_vps_parser.cc", + "h265/h265_vps_parser.h", + ] + } + deps = [ "../api:scoped_refptr", "../api/task_queue", @@ -50,7 +66,7 @@ rtc_library("common_video") { "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator", "../api/video:video_frame", - "../api/video:video_frame_i420", + "../api/video:video_frame_nv12", "../api/video:video_rtp_headers", "../api/video_codecs:bitstream_parser_api", "../media:rtc_h264_profile_id", @@ -58,11 +74,18 @@ rtc_library("common_video") { "../rtc_base:checks", "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", + "../rtc_base/synchronization:mutex", "../rtc_base/system:rtc_export", "../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("frame_counts") { + visibility = [ "*" ] + + sources = [ "frame_counts.h" ] } if (rtc_include_tests) { @@ -87,19 +110,18 @@ if (rtc_include_tests) { "h264/profile_level_id_unittest.cc", "h264/sps_parser_unittest.cc", "h264/sps_vui_rewriter_unittest.cc", - "i420_buffer_pool_unittest.cc", "libyuv/libyuv_unittest.cc", + "video_frame_buffer_pool_unittest.cc", "video_frame_unittest.cc", ] deps = [ ":common_video", - "../:webrtc_common", "../api:scoped_refptr", "../api/units:time_delta", "../api/video:video_frame", "../api/video:video_frame_i010", - "../api/video:video_frame_i420", + "../api/video:video_frame_nv12", "../api/video:video_rtp_headers", "../media:rtc_h264_profile_id", "../rtc_base", @@ -113,6 +135,7 @@ if (rtc_include_tests) { "../test:test_support", "../test:video_test_common", "//testing/gtest", + "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] diff --git a/common_video/OWNERS b/common_video/OWNERS index f160bdec03..b0088e401f 100644 --- a/common_video/OWNERS +++ b/common_video/OWNERS @@ -1,8 +1,3 @@ magjed@webrtc.org marpan@webrtc.org stefan@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/common_video/bitrate_adjuster.cc b/common_video/bitrate_adjuster.cc index ca52ed9e69..c53c3a02f6 100644 --- a/common_video/bitrate_adjuster.cc +++ b/common_video/bitrate_adjuster.cc @@ -39,7 +39,7 @@ BitrateAdjuster::BitrateAdjuster(float min_adjusted_bitrate_pct, } void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); // If the change in target bitrate is large, update the adjusted bitrate // immediately since it's likely we have gained or lost a sizeable amount of // bandwidth and we'll want to respond quickly. @@ -58,22 +58,22 @@ void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) { } uint32_t BitrateAdjuster::GetTargetBitrateBps() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return target_bitrate_bps_; } uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return adjusted_bitrate_bps_; } absl::optional BitrateAdjuster::GetEstimatedBitrateBps() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return bitrate_tracker_.Rate(rtc::TimeMillis()); } void BitrateAdjuster::Update(size_t frame_size) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); uint32_t current_time_ms = rtc::TimeMillis(); bitrate_tracker_.Update(frame_size, current_time_ms); UpdateBitrate(current_time_ms); @@ -100,7 +100,7 @@ uint32_t BitrateAdjuster::GetMaxAdjustedBitrateBps() const { // Only safe to call this after Update calls have stopped void BitrateAdjuster::Reset() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); target_bitrate_bps_ = 0; adjusted_bitrate_bps_ = 0; last_adjusted_target_bitrate_bps_ = 0; diff --git a/common_video/bitrate_adjuster_unittest.cc b/common_video/bitrate_adjuster_unittest.cc index eae15cd078..995aac1c27 100644 --- a/common_video/bitrate_adjuster_unittest.cc +++ b/common_video/bitrate_adjuster_unittest.cc @@ -34,7 +34,7 @@ class BitrateAdjusterTest : public ::testing::Test { const size_t frame_size_bytes = (bitrate_bps * frame_interval_ms) / (8 * 1000); for (size_t i = 0; i < update_frame_interval; ++i) { - clock_.AdvanceTime(webrtc::TimeDelta::ms(frame_interval_ms)); + clock_.AdvanceTime(webrtc::TimeDelta::Millis(frame_interval_ms)); adjuster_.Update(frame_size_bytes); } } diff --git a/common_video/frame_counts.h b/common_video/frame_counts.h new file mode 100644 index 0000000000..663fda4a2f --- /dev/null +++ b/common_video/frame_counts.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_FRAME_COUNTS_H_ +#define COMMON_VIDEO_FRAME_COUNTS_H_ + +namespace webrtc { + +struct FrameCounts { + FrameCounts() : key_frames(0), delta_frames(0) {} + int key_frames; + int delta_frames; +}; + +// Callback, used to notify an observer whenever frame counts have been updated. +class FrameCountObserver { + public: + virtual ~FrameCountObserver() {} + virtual void FrameCountUpdated(const FrameCounts& frame_counts, + uint32_t ssrc) = 0; +}; + +} // namespace webrtc + +#endif // COMMON_VIDEO_FRAME_COUNTS_H_ diff --git a/common_video/frame_rate_estimator.cc b/common_video/frame_rate_estimator.cc index 86f07228e7..4c5a341ac0 100644 --- a/common_video/frame_rate_estimator.cc +++ b/common_video/frame_rate_estimator.cc @@ -27,7 +27,7 @@ absl::optional FrameRateEstimator::GetAverageFps() const { return absl::nullopt; } TimeDelta time_span = frame_times_.back() - frame_times_.front(); - if (time_span < TimeDelta::us(1)) { + if (time_span < TimeDelta::Micros(1)) { return absl::nullopt; } TimeDelta avg_frame_interval = time_span / (frame_times_.size() - 1); diff --git a/common_video/frame_rate_estimator_unittest.cc b/common_video/frame_rate_estimator_unittest.cc index 9058bac414..d11a7d4140 100644 --- a/common_video/frame_rate_estimator_unittest.cc +++ b/common_video/frame_rate_estimator_unittest.cc @@ -16,7 +16,7 @@ namespace webrtc { namespace { -constexpr TimeDelta kDefaultWindow = TimeDelta::Millis<1000>(); +constexpr TimeDelta kDefaultWindow = TimeDelta::Millis(1000); } class FrameRateEstimatorTest : public ::testing::Test { @@ -32,7 +32,7 @@ TEST_F(FrameRateEstimatorTest, NoEstimateWithLessThanTwoFrames) { EXPECT_FALSE(estimator_.GetAverageFps()); estimator_.OnFrame(clock_.CurrentTime()); EXPECT_FALSE(estimator_.GetAverageFps()); - clock_.AdvanceTime(TimeDelta::ms(33)); + clock_.AdvanceTime(TimeDelta::Millis(33)); EXPECT_FALSE(estimator_.GetAverageFps()); } @@ -46,7 +46,7 @@ TEST_F(FrameRateEstimatorTest, NoEstimateWithZeroSpan) { TEST_F(FrameRateEstimatorTest, SingleSpanFps) { const double kExpectedFps = 30.0; estimator_.OnFrame(clock_.CurrentTime()); - clock_.AdvanceTime(TimeDelta::seconds(1) / kExpectedFps); + clock_.AdvanceTime(TimeDelta::Seconds(1) / kExpectedFps); estimator_.OnFrame(clock_.CurrentTime()); EXPECT_NEAR(*estimator_.GetAverageFps(), kExpectedFps, 0.001); } @@ -61,11 +61,11 @@ TEST_F(FrameRateEstimatorTest, AverageFps) { const Timestamp start_time = clock_.CurrentTime(); while (clock_.CurrentTime() - start_time < kDefaultWindow / 2) { estimator_.OnFrame(clock_.CurrentTime()); - clock_.AdvanceTime(TimeDelta::seconds(1) / kLowFps); + clock_.AdvanceTime(TimeDelta::Seconds(1) / kLowFps); } while (clock_.CurrentTime() - start_time < kDefaultWindow) { estimator_.OnFrame(clock_.CurrentTime()); - clock_.AdvanceTime(TimeDelta::seconds(1) / kHighFps); + clock_.AdvanceTime(TimeDelta::Seconds(1) / kHighFps); } EXPECT_NEAR(*estimator_.GetAverageFps(), kExpectedFps, 0.001); @@ -81,13 +81,13 @@ TEST_F(FrameRateEstimatorTest, CullsOldFramesFromAveragingWindow) { // Oldest frame should just be pushed out the window, leaving a single frame // => no estimate possible. - clock_.AdvanceTime(TimeDelta::us(1)); + clock_.AdvanceTime(TimeDelta::Micros(1)); EXPECT_FALSE(estimator_.GetAverageFps(clock_.CurrentTime())); } TEST_F(FrameRateEstimatorTest, Reset) { estimator_.OnFrame(clock_.CurrentTime()); - clock_.AdvanceTime(TimeDelta::seconds(1) / 30); + clock_.AdvanceTime(TimeDelta::Seconds(1) / 30); estimator_.OnFrame(clock_.CurrentTime()); EXPECT_TRUE(estimator_.GetAverageFps()); @@ -95,7 +95,7 @@ TEST_F(FrameRateEstimatorTest, Reset) { // new frame. estimator_.Reset(); EXPECT_FALSE(estimator_.GetAverageFps()); - clock_.AdvanceTime(TimeDelta::seconds(1) / 30); + clock_.AdvanceTime(TimeDelta::Seconds(1) / 30); estimator_.OnFrame(clock_.CurrentTime()); EXPECT_FALSE(estimator_.GetAverageFps()); } diff --git a/common_video/generic_frame_descriptor/BUILD.gn b/common_video/generic_frame_descriptor/BUILD.gn index 05a4e2396c..ab97e887f2 100644 --- a/common_video/generic_frame_descriptor/BUILD.gn +++ b/common_video/generic_frame_descriptor/BUILD.gn @@ -19,6 +19,8 @@ rtc_library("generic_frame_descriptor") { "../../api/transport/rtp:dependency_descriptor", "../../api/video:video_codec_constants", "../../rtc_base:checks", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", diff --git a/common_video/generic_frame_descriptor/OWNERS b/common_video/generic_frame_descriptor/OWNERS index 2f874a3a75..b2351ef6d7 100644 --- a/common_video/generic_frame_descriptor/OWNERS +++ b/common_video/generic_frame_descriptor/OWNERS @@ -1,7 +1,2 @@ philipel@webrtc.org danilchap@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* \ No newline at end of file diff --git a/common_video/generic_frame_descriptor/generic_frame_info.cc b/common_video/generic_frame_descriptor/generic_frame_info.cc index ca61072799..af66bbaf67 100644 --- a/common_video/generic_frame_descriptor/generic_frame_info.cc +++ b/common_video/generic_frame_descriptor/generic_frame_info.cc @@ -15,33 +15,6 @@ namespace webrtc { -absl::InlinedVector -GenericFrameInfo::DecodeTargetInfo(absl::string_view indication_symbols) { - absl::InlinedVector decode_targets; - for (char symbol : indication_symbols) { - DecodeTargetIndication indication; - switch (symbol) { - case '-': - indication = DecodeTargetIndication::kNotPresent; - break; - case 'D': - indication = DecodeTargetIndication::kDiscardable; - break; - case 'R': - indication = DecodeTargetIndication::kRequired; - break; - case 'S': - indication = DecodeTargetIndication::kSwitch; - break; - default: - RTC_NOTREACHED(); - } - decode_targets.push_back(indication); - } - - return decode_targets; -} - GenericFrameInfo::GenericFrameInfo() = default; GenericFrameInfo::GenericFrameInfo(const GenericFrameInfo&) = default; GenericFrameInfo::~GenericFrameInfo() = default; @@ -65,14 +38,8 @@ GenericFrameInfo::Builder& GenericFrameInfo::Builder::S(int spatial_id) { GenericFrameInfo::Builder& GenericFrameInfo::Builder::Dtis( absl::string_view indication_symbols) { - info_.decode_target_indications = DecodeTargetInfo(indication_symbols); - return *this; -} - -GenericFrameInfo::Builder& GenericFrameInfo::Builder::Fdiffs( - std::initializer_list frame_diffs) { - info_.frame_diffs.insert(info_.frame_diffs.end(), frame_diffs.begin(), - frame_diffs.end()); + info_.decode_target_indications = + webrtc_impl::StringToDecodeTargetIndications(indication_symbols); return *this; } diff --git a/common_video/generic_frame_descriptor/generic_frame_info.h b/common_video/generic_frame_descriptor/generic_frame_info.h index ce3ee6c4b3..19f413b5d4 100644 --- a/common_video/generic_frame_descriptor/generic_frame_info.h +++ b/common_video/generic_frame_descriptor/generic_frame_info.h @@ -11,7 +11,9 @@ #ifndef COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_ #define COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_ +#include #include +#include #include "absl/container/inlined_vector.h" #include "absl/strings/string_view.h" @@ -22,7 +24,7 @@ namespace webrtc { // Describes how a certain encoder buffer was used when encoding a frame. struct CodecBufferUsage { - CodecBufferUsage(int id, bool referenced, bool updated) + constexpr CodecBufferUsage(int id, bool referenced, bool updated) : id(id), referenced(referenced), updated(updated) {} int id = 0; @@ -31,17 +33,15 @@ struct CodecBufferUsage { }; struct GenericFrameInfo : public FrameDependencyTemplate { - static absl::InlinedVector DecodeTargetInfo( - absl::string_view indication_symbols); - class Builder; GenericFrameInfo(); GenericFrameInfo(const GenericFrameInfo&); ~GenericFrameInfo(); - int64_t frame_id = 0; absl::InlinedVector encoder_buffers; + std::vector part_of_chain; + std::bitset<32> active_decode_targets = ~uint32_t{0}; }; class GenericFrameInfo::Builder { @@ -53,7 +53,6 @@ class GenericFrameInfo::Builder { Builder& T(int temporal_id); Builder& S(int spatial_id); Builder& Dtis(absl::string_view indication_symbols); - Builder& Fdiffs(std::initializer_list frame_diffs); private: GenericFrameInfo info_; diff --git a/common_video/h264/OWNERS b/common_video/h264/OWNERS new file mode 100644 index 0000000000..361ed7e84a --- /dev/null +++ b/common_video/h264/OWNERS @@ -0,0 +1 @@ +ssilkin@webrtc.org diff --git a/common_video/h264/h264_common.h b/common_video/h264/h264_common.h index 2beef16ac5..0b1843ee38 100644 --- a/common_video/h264/h264_common.h +++ b/common_video/h264/h264_common.h @@ -42,6 +42,7 @@ enum NaluType : uint8_t { kEndOfSequence = 10, kEndOfStream = 11, kFiller = 12, + kPrefix = 14, kStapA = 24, kFuA = 28 }; diff --git a/common_video/h264/prefix_parser.cc b/common_video/h264/prefix_parser.cc new file mode 100644 index 0000000000..11cdcc21c2 --- /dev/null +++ b/common_video/h264/prefix_parser.cc @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/h264/prefix_parser.h" + +#include +#include + +#include "common_video/h264/h264_common.h" +#include "rtc_base/bit_buffer.h" + +namespace { +typedef absl::optional OptionalPrefix; + +#define RETURN_EMPTY_ON_FAIL(x) \ + if (!(x)) { \ + return OptionalPrefix(); \ + } +} // namespace + +namespace webrtc { + +PrefixParser::PrefixState::PrefixState() = default; +PrefixParser::PrefixState::PrefixState(const PrefixState&) = default; +PrefixParser::PrefixState::~PrefixState() = default; + +// General note: this is based off the 02/2016 version of the H.264 standard. +// You can find it on this page: +// http://www.itu.int/rec/T-REC-H.264 + +// Unpack RBSP and parse SVC extension state from the supplied buffer. +absl::optional PrefixParser::ParsePrefix( + const uint8_t* data, + size_t length) { + std::vector unpacked_buffer = H264::ParseRbsp(data, length); + rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); + return ParsePrefixUpToSvcExtension(&bit_buffer); +} + +absl::optional PrefixParser::ParsePrefixUpToSvcExtension( + rtc::BitBuffer* buffer) { + // Now, we need to use a bit buffer to parse through the actual SVC extension + // format. See Section 7.3.1 ("NAL unit syntax") and 7.3.1.1 ("NAL unit header + // SVC extension syntax") of the H.264 standard for a complete description. + + PrefixState svc_extension; + + uint32_t svc_extension_flag = 0; + // Make sure the svc_extension_flag is on. + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension_flag, 1)); + if (!svc_extension_flag) + return OptionalPrefix(); + + // idr_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.idr_flag, 1)); + // priority_id: u(6) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.priority_id, 6)); + // no_inter_layer_pred_flag: u(1) + RETURN_EMPTY_ON_FAIL( + buffer->ReadBits(&svc_extension.no_inter_layer_pred_flag, 1)); + // dependency_id: u(3) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.dependency_id, 3)); + // quality_id: u(4) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.quality_id, 4)); + // temporal_id: u(3) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.temporal_id, 3)); + // use_ref_base_pic_flag: u(1) + RETURN_EMPTY_ON_FAIL( + buffer->ReadBits(&svc_extension.use_ref_base_pic_flag, 1)); + // discardable_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.discardable_flag, 1)); + // output_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&svc_extension.output_flag, 1)); + + return OptionalPrefix(svc_extension); +} + +} // namespace webrtc diff --git a/common_video/h264/prefix_parser.h b/common_video/h264/prefix_parser.h new file mode 100644 index 0000000000..9298eb4276 --- /dev/null +++ b/common_video/h264/prefix_parser.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_H264_PREFIX_PARSER_H_ +#define COMMON_VIDEO_H264_PREFIX_PARSER_H_ + +#include "absl/types/optional.h" + +namespace rtc { +class BitBuffer; +} + +namespace webrtc { + +// A class for parsing out SVC extension data from an H264 prefix NALU +class PrefixParser { + public: + // The parsed state of the SVC extension. Only some select values are stored. + // Add more as they are actually needed. + struct PrefixState { + PrefixState(); + PrefixState(const PrefixState&); + ~PrefixState(); + + uint32_t idr_flag = 0; + uint32_t priority_id = 0; + uint32_t no_inter_layer_pred_flag = 1; + uint32_t dependency_id = 0; + uint32_t quality_id = 0; + uint32_t temporal_id = 0; + uint32_t use_ref_base_pic_flag = 0; + uint32_t discardable_flag = 1; + uint32_t output_flag = 1; + }; + + // Unpack RBSP and parse prefix state from the supplied buffer. + static absl::optional ParsePrefix(const uint8_t* data, size_t length); + + protected: + // Parse the prefix NAL, up till the SVC extension part, for a bit buffer where RBSP + // decoding has already been performed. + static absl::optional ParsePrefixUpToSvcExtension(rtc::BitBuffer* buffer); +}; + +} // namespace webrtc +#endif // COMMON_VIDEO_H264_PREFIX_PARSER_H_ diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc index 1c420a9e4b..0d16be8254 100644 --- a/common_video/h264/sps_vui_rewriter.cc +++ b/common_video/h264/sps_vui_rewriter.cc @@ -210,32 +210,23 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( return result; } -void SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( +rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( rtc::ArrayView buffer, - size_t num_nalus, - const size_t* nalu_offsets, - const size_t* nalu_lengths, - const webrtc::ColorSpace* color_space, - rtc::Buffer* output_buffer, - size_t* output_nalu_offsets, - size_t* output_nalu_lengths) { - // Allocate some extra space for potentially adding a missing VUI. - output_buffer->EnsureCapacity(buffer.size() + num_nalus * kMaxVuiSpsIncrease); + const webrtc::ColorSpace* color_space) { + std::vector nalus = + H264::FindNaluIndices(buffer.data(), buffer.size()); - const uint8_t* prev_nalu_ptr = buffer.data(); - size_t prev_nalu_length = 0; - - for (size_t i = 0; i < num_nalus; ++i) { - const uint8_t* nalu_ptr = buffer.data() + nalu_offsets[i]; - const size_t nalu_length = nalu_lengths[i]; + // Allocate some extra space for potentially adding a missing VUI. + rtc::Buffer output_buffer(/*size=*/0, /*capacity=*/buffer.size() + + nalus.size() * kMaxVuiSpsIncrease); + for (const H264::NaluIndex& nalu : nalus) { // Copy NAL unit start code. - const uint8_t* start_code_ptr = prev_nalu_ptr + prev_nalu_length; + const uint8_t* start_code_ptr = buffer.data() + nalu.start_offset; const size_t start_code_length = - (nalu_ptr - prev_nalu_ptr) - prev_nalu_length; - output_buffer->AppendData(start_code_ptr, start_code_length); - - bool updated_sps = false; + nalu.payload_start_offset - nalu.start_offset; + const uint8_t* nalu_ptr = buffer.data() + nalu.payload_start_offset; + const size_t nalu_length = nalu.payload_size; if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kSps) { // Check if stream uses picture order count type 0, and if so rewrite it @@ -260,22 +251,20 @@ void SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( nalu_ptr + H264::kNaluTypeSize, nalu_length - H264::kNaluTypeSize, &sps, color_space, &output_nalu, Direction::kOutgoing); if (result == ParseResult::kVuiRewritten) { - updated_sps = true; - output_nalu_offsets[i] = output_buffer->size(); - output_nalu_lengths[i] = output_nalu.size(); - output_buffer->AppendData(output_nalu.data(), output_nalu.size()); + output_buffer.AppendData(start_code_ptr, start_code_length); + output_buffer.AppendData(output_nalu.data(), output_nalu.size()); + continue; } + } else if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kAud) { + // Skip the access unit delimiter copy. + continue; } - if (!updated_sps) { - output_nalu_offsets[i] = output_buffer->size(); - output_nalu_lengths[i] = nalu_length; - output_buffer->AppendData(nalu_ptr, nalu_length); - } - - prev_nalu_ptr = nalu_ptr; - prev_nalu_length = nalu_length; + // vui wasn't rewritten and it is not aud, copy the nal unit as is. + output_buffer.AppendData(start_code_ptr, start_code_length); + output_buffer.AppendData(nalu_ptr, nalu_length); } + return output_buffer; } namespace { diff --git a/common_video/h264/sps_vui_rewriter.h b/common_video/h264/sps_vui_rewriter.h index 4cd4cb976d..311db30d50 100644 --- a/common_video/h264/sps_vui_rewriter.h +++ b/common_video/h264/sps_vui_rewriter.h @@ -50,20 +50,11 @@ class SpsVuiRewriter : private SpsParser { rtc::Buffer* destination, Direction Direction); - // Parses NAL units from |buffer| based on |nalu_offsets| and |nalu_lengths| - // and rewrites VUI in SPS blocks if necessary. - // The result is written to |output_buffer| and modified NAL unit offsets - // and lenghts are written to |output_nalu_offsets| and |output_nalu_lenghts| - // to account for any added data. - static void ParseOutgoingBitstreamAndRewriteSps( + // Parses NAL units from |buffer|, strips AUD blocks and rewrites VUI in SPS + // blocks if necessary. + static rtc::Buffer ParseOutgoingBitstreamAndRewrite( rtc::ArrayView buffer, - size_t num_nalus, - const size_t* nalu_offsets, - const size_t* nalu_lengths, - const ColorSpace* color_space, - rtc::Buffer* output_buffer, - size_t* output_nalu_offsets, - size_t* output_nalu_lengths); + const ColorSpace* color_space); private: static ParseResult ParseAndRewriteSps( diff --git a/common_video/h264/sps_vui_rewriter_unittest.cc b/common_video/h264/sps_vui_rewriter_unittest.cc index e008948ce1..2907949e6c 100644 --- a/common_video/h264/sps_vui_rewriter_unittest.cc +++ b/common_video/h264/sps_vui_rewriter_unittest.cc @@ -36,6 +36,7 @@ static const size_t kWidth = 640; static const size_t kHeight = 480; static const uint8_t kStartSequence[] = {0x00, 0x00, 0x00, 0x01}; +static const uint8_t kAud[] = {H264::NaluType::kAud, 0x09, 0x10}; static const uint8_t kSpsNaluType[] = {H264::NaluType::kSps}; static const uint8_t kIdr1[] = {H264::NaluType::kIdr, 0xFF, 0x00, 0x00, 0x04}; static const uint8_t kIdr2[] = {H264::NaluType::kIdr, 0xFF, 0x00, 0x11}; @@ -396,36 +397,13 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamOptimalVui) { GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps); rtc::Buffer buffer; - const size_t kNumNalus = 2; - size_t nalu_offsets[kNumNalus]; - size_t nalu_lengths[kNumNalus]; buffer.AppendData(kStartSequence); - nalu_offsets[0] = buffer.size(); - nalu_lengths[0] = optimal_sps.size(); buffer.AppendData(optimal_sps); buffer.AppendData(kStartSequence); - nalu_offsets[1] = buffer.size(); - nalu_lengths[1] = sizeof(kIdr1); buffer.AppendData(kIdr1); - rtc::Buffer modified_buffer; - size_t modified_nalu_offsets[kNumNalus]; - size_t modified_nalu_lengths[kNumNalus]; - - SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( - buffer, kNumNalus, nalu_offsets, nalu_lengths, nullptr, &modified_buffer, - modified_nalu_offsets, modified_nalu_lengths); - - EXPECT_THAT( - std::vector(modified_buffer.data(), - modified_buffer.data() + modified_buffer.size()), - ::testing::ElementsAreArray(buffer.data(), buffer.size())); - EXPECT_THAT(std::vector(modified_nalu_offsets, - modified_nalu_offsets + kNumNalus), - ::testing::ElementsAreArray(nalu_offsets, kNumNalus)); - EXPECT_THAT(std::vector(modified_nalu_lengths, - modified_nalu_lengths + kNumNalus), - ::testing::ElementsAreArray(nalu_lengths, kNumNalus)); + EXPECT_THAT(SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(buffer, nullptr), + ::testing::ElementsAreArray(buffer)); } TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) { @@ -435,61 +413,51 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) { GenerateFakeSps(kVuiNotPresent, &sps); rtc::Buffer buffer; - const size_t kNumNalus = 3; - size_t nalu_offsets[kNumNalus]; - size_t nalu_lengths[kNumNalus]; buffer.AppendData(kStartSequence); - nalu_offsets[0] = buffer.size(); - nalu_lengths[0] = sizeof(kIdr1); buffer.AppendData(kIdr1); buffer.AppendData(kStartSequence); - nalu_offsets[1] = buffer.size(); - nalu_lengths[1] = sizeof(kSpsNaluType) + sps.size(); buffer.AppendData(kSpsNaluType); buffer.AppendData(sps); buffer.AppendData(kStartSequence); - nalu_offsets[2] = buffer.size(); - nalu_lengths[2] = sizeof(kIdr2); buffer.AppendData(kIdr2); rtc::Buffer optimal_sps; GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps); rtc::Buffer expected_buffer; - size_t expected_nalu_offsets[kNumNalus]; - size_t expected_nalu_lengths[kNumNalus]; expected_buffer.AppendData(kStartSequence); - expected_nalu_offsets[0] = expected_buffer.size(); - expected_nalu_lengths[0] = sizeof(kIdr1); expected_buffer.AppendData(kIdr1); expected_buffer.AppendData(kStartSequence); - expected_nalu_offsets[1] = expected_buffer.size(); - expected_nalu_lengths[1] = sizeof(kSpsNaluType) + optimal_sps.size(); expected_buffer.AppendData(kSpsNaluType); expected_buffer.AppendData(optimal_sps); expected_buffer.AppendData(kStartSequence); - expected_nalu_offsets[2] = expected_buffer.size(); - expected_nalu_lengths[2] = sizeof(kIdr2); expected_buffer.AppendData(kIdr2); - rtc::Buffer modified_buffer; - size_t modified_nalu_offsets[kNumNalus]; - size_t modified_nalu_lengths[kNumNalus]; - - SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( - buffer, kNumNalus, nalu_offsets, nalu_lengths, nullptr, &modified_buffer, - modified_nalu_offsets, modified_nalu_lengths); - - EXPECT_THAT( - std::vector(modified_buffer.data(), - modified_buffer.data() + modified_buffer.size()), - ::testing::ElementsAreArray(expected_buffer.data(), - expected_buffer.size())); - EXPECT_THAT(std::vector(modified_nalu_offsets, - modified_nalu_offsets + kNumNalus), - ::testing::ElementsAreArray(expected_nalu_offsets, kNumNalus)); - EXPECT_THAT(std::vector(modified_nalu_lengths, - modified_nalu_lengths + kNumNalus), - ::testing::ElementsAreArray(expected_nalu_lengths, kNumNalus)); + EXPECT_THAT(SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(buffer, nullptr), + ::testing::ElementsAreArray(expected_buffer)); +} + +TEST(SpsVuiRewriterOutgoingAudTest, ParseOutgoingBitstreamWithAud) { + rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); + + rtc::Buffer optimal_sps; + GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps); + + rtc::Buffer buffer; + buffer.AppendData(kStartSequence); + buffer.AppendData(kAud); + buffer.AppendData(kStartSequence); + buffer.AppendData(optimal_sps); + buffer.AppendData(kStartSequence); + buffer.AppendData(kIdr1); + + rtc::Buffer expected_buffer; + expected_buffer.AppendData(kStartSequence); + expected_buffer.AppendData(optimal_sps); + expected_buffer.AppendData(kStartSequence); + expected_buffer.AppendData(kIdr1); + + EXPECT_THAT(SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(buffer, nullptr), + ::testing::ElementsAreArray(expected_buffer)); } } // namespace webrtc diff --git a/common_video/h265/h265_bitstream_parser.cc b/common_video/h265/h265_bitstream_parser.cc new file mode 100644 index 0000000000..4c766e67f5 --- /dev/null +++ b/common_video/h265/h265_bitstream_parser.cc @@ -0,0 +1,401 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "common_video/h265/h265_bitstream_parser.h" + +#include + +#include +#include + +#include "common_video/h265/h265_common.h" +#include "rtc_base/bit_buffer.h" +#include "rtc_base/logging.h" + +namespace { + +const int kMaxAbsQpDeltaValue = 51; +const int kMinQpValue = 0; +const int kMaxQpValue = 51; + +} // namespace + +namespace webrtc { + +#define RETURN_ON_FAIL(x, res) \ + if (!(x)) { \ + RTC_LOG_F(LS_ERROR) << "FAILED: " #x; \ + return res; \ + } + +#define RETURN_INV_ON_FAIL(x) RETURN_ON_FAIL(x, kInvalidStream) + +H265BitstreamParser::H265BitstreamParser() {} +H265BitstreamParser::~H265BitstreamParser() {} + +H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( + const uint8_t* source, + size_t source_length, + uint8_t nalu_type) { + if (!sps_ || !pps_) + return kInvalidStream; + + last_slice_qp_delta_ = absl::nullopt; + const std::vector slice_rbsp = + H265::ParseRbsp(source, source_length); + if (slice_rbsp.size() < H265::kNaluTypeSize) + return kInvalidStream; + + rtc::BitBuffer slice_reader(slice_rbsp.data() + H265::kNaluTypeSize, + slice_rbsp.size() - H265::kNaluTypeSize); + // Check to see if this is an IDR slice, which has an extra field to parse + // out. + //bool is_idr = (source[0] & 0x0F) == H265::NaluType::kIdr; + //uint8_t nal_ref_idc = (source[0] & 0x60) >> 5; + uint32_t golomb_tmp; + uint32_t bits_tmp; + + // first_slice_segment_in_pic_flag: u(1) + uint32_t first_slice_segment_in_pic_flag = 0; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&first_slice_segment_in_pic_flag, 1)); + if (H265::NaluType::kBlaWLp <= nalu_type && + nalu_type <= H265::NaluType::kRsvIrapVcl23) { + // no_output_of_prior_pics_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + } + // slice_pic_parameter_set_id: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + uint32_t dependent_slice_segment_flag = 0; + if (first_slice_segment_in_pic_flag == 0) { + if (pps_->dependent_slice_segments_enabled_flag) { + // dependent_slice_segment_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&dependent_slice_segment_flag, 1)); + } + + // slice_segment_address: u(v) + int32_t log2_ctb_size_y = sps_->log2_min_luma_coding_block_size_minus3 + 3 + sps_->log2_diff_max_min_luma_coding_block_size; + uint32_t ctb_size_y = 1 << log2_ctb_size_y; + uint32_t pic_width_in_ctbs_y = sps_->pic_width_in_luma_samples / ctb_size_y; + if(sps_->pic_width_in_luma_samples % ctb_size_y) + pic_width_in_ctbs_y++; + + uint32_t pic_height_in_ctbs_y = sps_->pic_height_in_luma_samples / ctb_size_y; + if(sps_->pic_height_in_luma_samples % ctb_size_y) + pic_height_in_ctbs_y++; + + uint32_t slice_segment_address_bits = H265::Log2(pic_height_in_ctbs_y * pic_width_in_ctbs_y); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, slice_segment_address_bits)); + } + + if (dependent_slice_segment_flag == 0) { + for (uint32_t i = 0; i < pps_->num_extra_slice_header_bits; i++) { + // slice_reserved_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + } + // slice_type: ue(v) + uint32_t slice_type = 0; + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&slice_type)); + if (pps_->output_flag_present_flag) { + // pic_output_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + } + if (sps_->separate_colour_plane_flag) { + // colour_plane_id: u(2) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2)); + } + uint32_t num_long_term_sps = 0; + uint32_t num_long_term_pics = 0; + std::vector lt_idx_sps; + std::vector used_by_curr_pic_lt_flag; + uint32_t short_term_ref_pic_set_sps_flag = 0; + uint32_t short_term_ref_pic_set_idx = 0; + H265SpsParser::ShortTermRefPicSet short_term_ref_pic_set; + uint32_t slice_temporal_mvp_enabled_flag = 0; + if (nalu_type != H265::NaluType::kIdrWRadl && nalu_type != H265::NaluType::kIdrNLp) { + // slice_pic_order_cnt_lsb: u(v) + uint32_t slice_pic_order_cnt_lsb_bits = sps_->log2_max_pic_order_cnt_lsb_minus4 + 4; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, slice_pic_order_cnt_lsb_bits)); + // short_term_ref_pic_set_sps_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&short_term_ref_pic_set_sps_flag, 1)); + if (!short_term_ref_pic_set_sps_flag) { + absl::optional ref_pic_set + = H265SpsParser::ParseShortTermRefPicSet(sps_->num_short_term_ref_pic_sets, + sps_->num_short_term_ref_pic_sets, sps_->short_term_ref_pic_set, *sps_, &slice_reader); + if (ref_pic_set) { + short_term_ref_pic_set = *ref_pic_set; + } else { + return kInvalidStream; + } + } else if (sps_->num_short_term_ref_pic_sets > 1) { + // short_term_ref_pic_set_idx: u(v) + uint32_t short_term_ref_pic_set_idx_bits = H265::Log2(sps_->num_short_term_ref_pic_sets); + if ((1 << short_term_ref_pic_set_idx_bits) < sps_->num_short_term_ref_pic_sets) { + short_term_ref_pic_set_idx_bits++; + } + if (short_term_ref_pic_set_idx_bits > 0) { + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&short_term_ref_pic_set_idx, short_term_ref_pic_set_idx_bits)); + } + } + if (sps_->long_term_ref_pics_present_flag) { + if (sps_->num_long_term_ref_pics_sps > 0) { + // num_long_term_sps: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_long_term_sps)); + } + // num_long_term_sps: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_long_term_pics)); + lt_idx_sps.resize(num_long_term_sps + num_long_term_pics, 0); + used_by_curr_pic_lt_flag.resize(num_long_term_sps + num_long_term_pics, 0); + for (uint32_t i = 0; i < num_long_term_sps + num_long_term_pics; i++) { + if (i < num_long_term_sps) { + if (sps_->num_long_term_ref_pics_sps > 1) { + // lt_idx_sps: u(v) + uint32_t lt_idx_sps_bits = H265::Log2(sps_->num_long_term_ref_pics_sps); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(<_idx_sps[i], lt_idx_sps_bits)); + } + } else { + // poc_lsb_lt: u(v) + uint32_t poc_lsb_lt_bits = sps_->log2_max_pic_order_cnt_lsb_minus4 + 4; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, poc_lsb_lt_bits)); + // used_by_curr_pic_lt_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&used_by_curr_pic_lt_flag[i], 1)); + } + // delta_poc_msb_present_flag: u(1) + uint32_t delta_poc_msb_present_flag = 0; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&delta_poc_msb_present_flag, 1)); + if (delta_poc_msb_present_flag) { + // delta_poc_msb_cycle_lt: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + } + } + } + if (sps_->sps_temporal_mvp_enabled_flag) { + // slice_temporal_mvp_enabled_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&slice_temporal_mvp_enabled_flag, 1)); + } + } + + if (sps_->sample_adaptive_offset_enabled_flag) { + // slice_sao_luma_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + uint32_t chroma_array_type = sps_->separate_colour_plane_flag == 0 ? sps_->chroma_format_idc : 0; + if (chroma_array_type != 0) { + // slice_sao_chroma_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + } + } + + if (slice_type == H265::SliceType::kP || slice_type == H265::SliceType::kB) { + // num_ref_idx_active_override_flag: u(1) + uint32_t num_ref_idx_active_override_flag = 0; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&num_ref_idx_active_override_flag, 1)); + uint32_t num_ref_idx_l0_active_minus1 = pps_->num_ref_idx_l0_default_active_minus1; + uint32_t num_ref_idx_l1_active_minus1 = pps_->num_ref_idx_l1_default_active_minus1; + if (num_ref_idx_active_override_flag) { + // num_ref_idx_l0_active_minus1: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_ref_idx_l0_active_minus1)); + if (slice_type == H265::SliceType::kB) { + // num_ref_idx_l1_active_minus1: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_ref_idx_l1_active_minus1)); + } + } + uint32_t num_pic_total_curr = CalcNumPocTotalCurr( + num_long_term_sps, num_long_term_pics, lt_idx_sps, + used_by_curr_pic_lt_flag, short_term_ref_pic_set_sps_flag, + short_term_ref_pic_set_idx, short_term_ref_pic_set); + if (pps_->lists_modification_present_flag && num_pic_total_curr > 1) { + // ref_pic_lists_modification() + uint32_t list_entry_bits = H265::Log2(num_pic_total_curr); + if ((1 << list_entry_bits) < num_pic_total_curr) { + list_entry_bits++; + } + // ref_pic_list_modification_flag_l0: u(1) + uint32_t ref_pic_list_modification_flag_l0 = 0; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&ref_pic_list_modification_flag_l0, 1)); + if (ref_pic_list_modification_flag_l0) { + for (uint32_t i = 0; i < num_ref_idx_l0_active_minus1; i++) { + // list_entry_l0: u(v) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, list_entry_bits)); + } + } + if (slice_type == H265::SliceType::kB) { + // ref_pic_list_modification_flag_l1: u(1) + uint32_t ref_pic_list_modification_flag_l1 = 0; + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&ref_pic_list_modification_flag_l1, 1)); + if (ref_pic_list_modification_flag_l1) { + for (uint32_t i = 0; i < num_ref_idx_l1_active_minus1; i++) { + // list_entry_l1: u(v) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, list_entry_bits)); + } + } + } + } + if (slice_type == H265::SliceType::kB) { + // mvd_l1_zero_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + } + if (pps_->cabac_init_present_flag) { + // cabac_init_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + } + if (slice_temporal_mvp_enabled_flag) { + uint32_t collocated_from_l0_flag = 0; + if (slice_type == H265::SliceType::kB) { + // collocated_from_l0_flag: u(1) + RETURN_INV_ON_FAIL(slice_reader.ReadBits(&collocated_from_l0_flag, 1)); + } + if ((collocated_from_l0_flag && num_ref_idx_l0_active_minus1 > 0) + || (!collocated_from_l0_flag && num_ref_idx_l1_active_minus1 > 0)) { + // collocated_ref_idx: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + } + } + if ((pps_->weighted_pred_flag && slice_type == H265::SliceType::kP) + || (pps_->weighted_bipred_flag && slice_type == H265::SliceType::kB)) { + // pred_weight_table() + // TODO(piasy): Do we need support for pred_weight_table()? + RTC_LOG(LS_ERROR) << "Streams with pred_weight_table unsupported."; + return kUnsupportedStream; + } + // five_minus_max_num_merge_cand: ue(v) + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + // TODO(piasy): motion_vector_resolution_control_idc? + } + } + + // slice_qp_delta: se(v) + int32_t last_slice_qp_delta; + RETURN_INV_ON_FAIL( + slice_reader.ReadSignedExponentialGolomb(&last_slice_qp_delta)); + if (abs(last_slice_qp_delta) > kMaxAbsQpDeltaValue) { + // Something has gone wrong, and the parsed value is invalid. + RTC_LOG(LS_WARNING) << "Parsed QP value out of range."; + return kInvalidStream; + } + + last_slice_qp_delta_ = last_slice_qp_delta; + + return kOk; +} + +uint32_t H265BitstreamParser::CalcNumPocTotalCurr( + uint32_t num_long_term_sps, uint32_t num_long_term_pics, + const std::vector lt_idx_sps, + const std::vector used_by_curr_pic_lt_flag, + uint32_t short_term_ref_pic_set_sps_flag, + uint32_t short_term_ref_pic_set_idx, + const H265SpsParser::ShortTermRefPicSet& short_term_ref_pic_set) { + uint32_t num_poc_total_curr = 0; + uint32_t curr_sps_idx; + + bool used_by_curr_pic_lt[16]; + uint32_t num_long_term = num_long_term_sps + num_long_term_pics; + + for (uint32_t i = 0; i < num_long_term; i++) { + if (i < num_long_term_sps) { + used_by_curr_pic_lt[i] = sps_->used_by_curr_pic_lt_sps_flag[lt_idx_sps[i]]; + } else { + used_by_curr_pic_lt[i] = used_by_curr_pic_lt_flag[i]; + } + } + + if (short_term_ref_pic_set_sps_flag) { + curr_sps_idx = short_term_ref_pic_set_idx; + } else { + curr_sps_idx = sps_->num_short_term_ref_pic_sets; + } + + if (sps_->short_term_ref_pic_set.size() <= curr_sps_idx) { + if (curr_sps_idx != 0 || short_term_ref_pic_set_sps_flag) { + return 0; + } + } + + const H265SpsParser::ShortTermRefPicSet* ref_pic_set; + if (curr_sps_idx < sps_->short_term_ref_pic_set.size()) { + ref_pic_set = &(sps_->short_term_ref_pic_set[curr_sps_idx]); + } else { + ref_pic_set = &short_term_ref_pic_set; + } + + for (uint32_t i = 0; i < ref_pic_set->num_negative_pics; i++) { + if (ref_pic_set->used_by_curr_pic_s0_flag[i]) { + num_poc_total_curr++; + } + } + + for (uint32_t i = 0; i < ref_pic_set->num_positive_pics; i++) { + if (ref_pic_set->used_by_curr_pic_s1_flag[i]) { + num_poc_total_curr++; + } + } + + for (uint32_t i = 0; i < num_long_term_sps + num_long_term_pics; i++) { + if (used_by_curr_pic_lt[i]) { + num_poc_total_curr++; + } + } + + return num_poc_total_curr; +} + +void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { + H265::NaluType nalu_type = H265::ParseNaluType(slice[0]); + if (nalu_type == H265::NaluType::kSps) { + sps_ = H265SpsParser::ParseSps(slice + H265::kNaluTypeSize, + length - H265::kNaluTypeSize); + if (!sps_) { + RTC_LOG(LS_WARNING) << "Unable to parse SPS from H265 bitstream."; + } + } else if (nalu_type == H265::NaluType::kPps) { + pps_ = H265PpsParser::ParsePps(slice + H265::kNaluTypeSize, + length - H265::kNaluTypeSize); + if (!pps_) { + RTC_LOG(LS_WARNING) << "Unable to parse PPS from H265 bitstream."; + } + } else if (nalu_type <= H265::NaluType::kRsvIrapVcl23) { + Result res = ParseNonParameterSetNalu(slice, length, nalu_type); + if (res != kOk) { + RTC_LOG(LS_INFO) << "Failed to parse bitstream. Error: " << res; + } + } +} + +void H265BitstreamParser::ParseBitstream(const uint8_t* bitstream, + size_t length) { + std::vector nalu_indices = + H265::FindNaluIndices(bitstream, length); + for (const H265::NaluIndex& index : nalu_indices) + ParseSlice(&bitstream[index.payload_start_offset], index.payload_size); +} + +bool H265BitstreamParser::GetLastSliceQp(int* qp) const { + if (!last_slice_qp_delta_ || !pps_) { + return false; + } + const int parsed_qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_; + if (parsed_qp < kMinQpValue || parsed_qp > kMaxQpValue) { + RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream."; + return false; + } + *qp = parsed_qp; + return true; +} + +void H265BitstreamParser::ParseBitstream( + rtc::ArrayView bitstream) { + ParseBitstream(bitstream.data(), bitstream.size()); +} + +absl::optional H265BitstreamParser::GetLastSliceQp() const { + int qp; + bool success = GetLastSliceQp(&qp); + return success ? absl::optional(qp) : absl::nullopt; +} + +} // namespace webrtc diff --git a/common_video/h265/h265_bitstream_parser.h b/common_video/h265/h265_bitstream_parser.h new file mode 100644 index 0000000000..ab9596d299 --- /dev/null +++ b/common_video/h265/h265_bitstream_parser.h @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_H265_H265_BITSTREAM_PARSER_H_ +#define COMMON_VIDEO_H265_H265_BITSTREAM_PARSER_H_ +#include +#include + +#include "absl/types/optional.h" +#include "api/video_codecs/bitstream_parser.h" +#include "common_video/h265/h265_pps_parser.h" +#include "common_video/h265/h265_sps_parser.h" + +namespace webrtc { + +// Stateful H265 bitstream parser (due to SPS/PPS). Used to parse out QP values +// from the bitstream. +// TODO(pbos): Unify with RTP SPS parsing and only use one H265 parser. +// TODO(pbos): If/when this gets used on the receiver side CHECKs must be +// removed and gracefully abort as we have no control over receive-side +// bitstreams. +class H265BitstreamParser : public BitstreamParser { + public: + H265BitstreamParser(); + ~H265BitstreamParser() override; + + // These are here for backwards-compatability for the time being. + void ParseBitstream(const uint8_t* bitstream, size_t length); + bool GetLastSliceQp(int* qp) const; + + // New interface. + void ParseBitstream(rtc::ArrayView bitstream) override; + absl::optional GetLastSliceQp() const override; + + protected: + enum Result { + kOk, + kInvalidStream, + kUnsupportedStream, + }; + void ParseSlice(const uint8_t* slice, size_t length); + Result ParseNonParameterSetNalu(const uint8_t* source, + size_t source_length, + uint8_t nalu_type); + + uint32_t CalcNumPocTotalCurr(uint32_t num_long_term_sps, + uint32_t num_long_term_pics, + const std::vector lt_idx_sps, + const std::vector used_by_curr_pic_lt_flag, + uint32_t short_term_ref_pic_set_sps_flag, + uint32_t short_term_ref_pic_set_idx, + const H265SpsParser::ShortTermRefPicSet& short_term_ref_pic_set); + + // SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices. + absl::optional sps_; + absl::optional pps_; + + // Last parsed slice QP. + absl::optional last_slice_qp_delta_; +}; + +} // namespace webrtc + +#endif // COMMON_VIDEO_H265_H265_BITSTREAM_PARSER_H_ diff --git a/common_video/h265/h265_common.cc b/common_video/h265/h265_common.cc new file mode 100644 index 0000000000..aa0cb87430 --- /dev/null +++ b/common_video/h265/h265_common.cc @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/h265/h265_common.h" +#include "common_video/h264/h264_common.h" + +namespace webrtc { +namespace H265 { + +const uint8_t kNaluTypeMask = 0x7E; + +std::vector FindNaluIndices(const uint8_t* buffer, + size_t buffer_size) { + std::vector indices = H264::FindNaluIndices(buffer, buffer_size); + std::vector results; + for (auto& index : indices) { + results.push_back({index.start_offset, index.payload_start_offset, index.payload_size}); + } + return results; +} + +NaluType ParseNaluType(uint8_t data) { + return static_cast((data & kNaluTypeMask) >> 1); +} + +std::vector ParseRbsp(const uint8_t* data, size_t length) { + return H264::ParseRbsp(data, length); +} + +void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination) { + H264::WriteRbsp(bytes, length, destination); +} + +uint32_t Log2(uint32_t value) { + uint32_t result = 0; + // If value is not a power of two an additional bit is required + // to account for the ceil() of log2() below. + if ((value & (value - 1)) != 0) { + ++result; + } + while (value > 0) { + value >>= 1; + ++result; + } + + return result; +} + +} // namespace H265 +} // namespace webrtc diff --git a/common_video/h265/h265_common.h b/common_video/h265/h265_common.h new file mode 100644 index 0000000000..a829195a10 --- /dev/null +++ b/common_video/h265/h265_common.h @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_H265_H265_COMMON_H_ +#define COMMON_VIDEO_H265_H265_COMMON_H_ + +#include +#include + +#include "rtc_base/buffer.h" + +namespace webrtc { + +namespace H265 { +// The size of a full NALU start sequence {0 0 0 1}, used for the first NALU +// of an access unit, and for SPS and PPS blocks. +const size_t kNaluLongStartSequenceSize = 4; + +// The size of a shortened NALU start sequence {0 0 1}, that may be used if +// not the first NALU of an access unit or an SPS or PPS block. +const size_t kNaluShortStartSequenceSize = 3; + +// The size of the NALU type byte (2). +const size_t kNaluTypeSize = 2; + +enum NaluType : uint8_t { + kTrailN = 0, + kTrailR = 1, + kTsaN = 2, + kTsaR = 3, + kStsaN = 4, + kStsaR = 5, + kRadlN = 6, + kRadlR = 7, + kBlaWLp = 16, + kBlaWRadl = 17, + kBlaNLp = 18, + kIdrWRadl = 19, + kIdrNLp = 20, + kCra = 21, + kRsvIrapVcl23 = 23, + kVps = 32, + kSps = 33, + kPps = 34, + kAud = 35, + kPrefixSei = 39, + kSuffixSei = 40, + kAP = 48, + kFU = 49 +}; + +enum SliceType : uint8_t { kB = 0, kP = 1, kI = 2 }; + +struct NaluIndex { + // Start index of NALU, including start sequence. + size_t start_offset; + // Start index of NALU payload, typically type header. + size_t payload_start_offset; + // Length of NALU payload, in bytes, counting from payload_start_offset. + size_t payload_size; +}; + +// Returns a vector of the NALU indices in the given buffer. +std::vector FindNaluIndices(const uint8_t* buffer, + size_t buffer_size); + +// Get the NAL type from the header byte immediately following start sequence. +NaluType ParseNaluType(uint8_t data); + +// Methods for parsing and writing RBSP. See section 7.4.2 of the H265 spec. +// +// The following sequences are illegal, and need to be escaped when encoding: +// 00 00 00 -> 00 00 03 00 +// 00 00 01 -> 00 00 03 01 +// 00 00 02 -> 00 00 03 02 +// And things in the source that look like the emulation byte pattern (00 00 03) +// need to have an extra emulation byte added, so it's removed when decoding: +// 00 00 03 -> 00 00 03 03 +// +// Decoding is simply a matter of finding any 00 00 03 sequence and removing +// the 03 emulation byte. + +// Parse the given data and remove any emulation byte escaping. +std::vector ParseRbsp(const uint8_t* data, size_t length); + +// Write the given data to the destination buffer, inserting and emulation +// bytes in order to escape any data the could be interpreted as a start +// sequence. +void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination); + +uint32_t Log2(uint32_t value); +} // namespace H265 +} // namespace webrtc + +#endif // COMMON_VIDEO_H265_H265_COMMON_H_ diff --git a/common_video/h265/h265_pps_parser.cc b/common_video/h265/h265_pps_parser.cc new file mode 100644 index 0000000000..165cdb653a --- /dev/null +++ b/common_video/h265/h265_pps_parser.cc @@ -0,0 +1,217 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/h265/h265_pps_parser.h" + +#include +#include + +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_sps_parser.h" +#include "rtc_base/bit_buffer.h" +#include "rtc_base/logging.h" + +#define RETURN_EMPTY_ON_FAIL(x) \ + if (!(x)) { \ + return absl::nullopt; \ + } + +namespace { +const int kMaxPicInitQpDeltaValue = 25; +const int kMinPicInitQpDeltaValue = -26; +} // namespace + +namespace webrtc { + +// General note: this is based off the 06/2019 version of the H.265 standard. +// You can find it on this page: +// http://www.itu.int/rec/T-REC-H.265 + +absl::optional H265PpsParser::ParsePps( + const uint8_t* data, + size_t length) { + // First, parse out rbsp, which is basically the source buffer minus emulation + // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in + // section 7.3.1.1 of the H.265 standard. + std::vector unpacked_buffer = H265::ParseRbsp(data, length); + rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); + return ParseInternal(&bit_buffer); +} + +bool H265PpsParser::ParsePpsIds(const uint8_t* data, + size_t length, + uint32_t* pps_id, + uint32_t* sps_id) { + RTC_DCHECK(pps_id); + RTC_DCHECK(sps_id); + // First, parse out rbsp, which is basically the source buffer minus emulation + // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in + // section 7.3.1.1 of the H.265 standard. + std::vector unpacked_buffer = H265::ParseRbsp(data, length); + rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); + return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id); +} + +absl::optional H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( + const uint8_t* data, + size_t length, + uint8_t nalu_type) { + rtc::BitBuffer slice_reader(data, length); + + // first_slice_segment_in_pic_flag: u(1) + uint32_t first_slice_segment_in_pic_flag = 0; + RETURN_EMPTY_ON_FAIL( + slice_reader.ReadBits(&first_slice_segment_in_pic_flag, 1)); + + if (nalu_type >= H265::NaluType::kBlaWLp && + nalu_type <= H265::NaluType::kRsvIrapVcl23) { + // no_output_of_prior_pics_flag: u(1) + RETURN_EMPTY_ON_FAIL(slice_reader.ConsumeBits(1)); + } + + // slice_pic_parameter_set_id: ue(v) + uint32_t slice_pic_parameter_set_id = 0; + if (!slice_reader.ReadExponentialGolomb(&slice_pic_parameter_set_id)) + return absl::nullopt; + + return slice_pic_parameter_set_id; +} + +absl::optional H265PpsParser::ParseInternal( + rtc::BitBuffer* bit_buffer) { + PpsState pps; + + RETURN_EMPTY_ON_FAIL(ParsePpsIdsInternal(bit_buffer, &pps.id, &pps.sps_id)); + + uint32_t bits_tmp; + uint32_t golomb_ignored; + int32_t signed_golomb_ignored; + // dependent_slice_segments_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.dependent_slice_segments_enabled_flag, 1)); + // output_flag_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.output_flag_present_flag, 1)); + // num_extra_slice_header_bits: u(3) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.num_extra_slice_header_bits, 3)); + // sign_data_hiding_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // cabac_init_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.cabac_init_present_flag, 1)); + // num_ref_idx_l0_default_active_minus1: ue(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&pps.num_ref_idx_l0_default_active_minus1)); + // num_ref_idx_l1_default_active_minus1: ue(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&pps.num_ref_idx_l1_default_active_minus1)); + // init_qp_minus26: se(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&pps.pic_init_qp_minus26)); + // Sanity-check parsed value + if (pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue || + pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) { + RETURN_EMPTY_ON_FAIL(false); + } + // constrained_intra_pred_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // transform_skip_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // cu_qp_delta_enabled_flag: u(1) + uint32_t cu_qp_delta_enabled_flag = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&cu_qp_delta_enabled_flag, 1)); + if (cu_qp_delta_enabled_flag) { + // diff_cu_qp_delta_depth: ue(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + } + // pps_cb_qp_offset: se(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + // pps_cr_qp_offset: se(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + // pps_slice_chroma_qp_offsets_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // weighted_pred_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_pred_flag, 1)); + // weighted_bipred_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_bipred_flag, 1)); + // transquant_bypass_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // tiles_enabled_flag: u(1) + uint32_t tiles_enabled_flag = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&tiles_enabled_flag, 1)); + // entropy_coding_sync_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + if (tiles_enabled_flag) { + // num_tile_columns_minus1: ue(v) + uint32_t num_tile_columns_minus1 = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&num_tile_columns_minus1)); + // num_tile_rows_minus1: ue(v) + uint32_t num_tile_rows_minus1 = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&num_tile_rows_minus1)); + // uniform_spacing_flag: u(1) + uint32_t uniform_spacing_flag = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&uniform_spacing_flag, 1)); + if (!uniform_spacing_flag) { + for (uint32_t i = 0; i < num_tile_columns_minus1; i++) { + // column_width_minus1: ue(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + } + for (uint32_t i = 0; i < num_tile_rows_minus1; i++) { + // row_height_minus1: ue(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + } + // loop_filter_across_tiles_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + } + } + // pps_loop_filter_across_slices_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // deblocking_filter_control_present_flag: u(1) + uint32_t deblocking_filter_control_present_flag = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&deblocking_filter_control_present_flag, 1)); + if (deblocking_filter_control_present_flag) { + // deblocking_filter_override_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + // pps_deblocking_filter_disabled_flag: u(1) + uint32_t pps_deblocking_filter_disabled_flag = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps_deblocking_filter_disabled_flag, 1)); + if (!pps_deblocking_filter_disabled_flag) { + // pps_beta_offset_div2: se(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + // pps_tc_offset_div2: se(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + } + } + // pps_scaling_list_data_present_flag: u(1) + uint32_t pps_scaling_list_data_present_flag = 0; + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps_scaling_list_data_present_flag, 1)); + if (pps_scaling_list_data_present_flag) { + // scaling_list_data() + if (!H265SpsParser::ParseScalingListData(bit_buffer)) { + return absl::nullopt; + } + } + // lists_modification_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.lists_modification_present_flag, 1)); + // log2_parallel_merge_level_minus2: ue(v) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + // slice_segment_header_extension_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + + return pps; +} + +bool H265PpsParser::ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer, + uint32_t* pps_id, + uint32_t* sps_id) { + // pic_parameter_set_id: ue(v) + if (!bit_buffer->ReadExponentialGolomb(pps_id)) + return false; + // seq_parameter_set_id: ue(v) + if (!bit_buffer->ReadExponentialGolomb(sps_id)) + return false; + return true; +} + +} // namespace webrtc diff --git a/common_video/h265/h265_pps_parser.h b/common_video/h265/h265_pps_parser.h new file mode 100644 index 0000000000..28c95ea9bc --- /dev/null +++ b/common_video/h265/h265_pps_parser.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_H265_PPS_PARSER_H_ +#define COMMON_VIDEO_H265_PPS_PARSER_H_ + +#include "absl/types/optional.h" + +namespace rtc { +class BitBuffer; +} + +namespace webrtc { + +// A class for parsing out picture parameter set (PPS) data from a H265 NALU. +class H265PpsParser { + public: + // The parsed state of the PPS. Only some select values are stored. + // Add more as they are actually needed. + struct PpsState { + PpsState() = default; + + uint32_t dependent_slice_segments_enabled_flag = 0; + uint32_t cabac_init_present_flag = 0; + uint32_t output_flag_present_flag = 0; + uint32_t num_extra_slice_header_bits = 0; + uint32_t num_ref_idx_l0_default_active_minus1 = 0; + uint32_t num_ref_idx_l1_default_active_minus1 = 0; + int32_t pic_init_qp_minus26 = 0; + uint32_t weighted_pred_flag = 0; + uint32_t weighted_bipred_flag = 0; + uint32_t lists_modification_present_flag = 0; + uint32_t id = 0; + uint32_t sps_id = 0; + }; + + // Unpack RBSP and parse PPS state from the supplied buffer. + static absl::optional ParsePps(const uint8_t* data, size_t length); + + static bool ParsePpsIds(const uint8_t* data, + size_t length, + uint32_t* pps_id, + uint32_t* sps_id); + + static absl::optional ParsePpsIdFromSliceSegmentLayerRbsp( + const uint8_t* data, + size_t length, + uint8_t nalu_type); + + protected: + // Parse the PPS state, for a bit buffer where RBSP decoding has already been + // performed. + static absl::optional ParseInternal(rtc::BitBuffer* bit_buffer); + static bool ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer, + uint32_t* pps_id, + uint32_t* sps_id); +}; + +} // namespace webrtc + +#endif // COMMON_VIDEO_H265_PPS_PARSER_H_ diff --git a/common_video/h265/h265_sps_parser.cc b/common_video/h265/h265_sps_parser.cc new file mode 100644 index 0000000000..eec0a19532 --- /dev/null +++ b/common_video/h265/h265_sps_parser.cc @@ -0,0 +1,408 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_sps_parser.h" +#include "rtc_base/bit_buffer.h" +#include "rtc_base/logging.h" + +namespace { +typedef absl::optional OptionalSps; +typedef absl::optional OptionalShortTermRefPicSet; + +#define RETURN_EMPTY_ON_FAIL(x) \ + if (!(x)) { \ + return OptionalSps(); \ + } + +#define RETURN_FALSE_ON_FAIL(x) \ + if (!(x)) { \ + return false; \ + } + +#define RETURN_EMPTY2_ON_FAIL(x) \ + if (!(x)) { \ + return OptionalShortTermRefPicSet(); \ + } +} // namespace + +namespace webrtc { + +H265SpsParser::SpsState::SpsState() = default; + +H265SpsParser::ShortTermRefPicSet::ShortTermRefPicSet() = default; + +// General note: this is based off the 06/2019 version of the H.265 standard. +// You can find it on this page: +// http://www.itu.int/rec/T-REC-H.265 + +// Unpack RBSP and parse SPS state from the supplied buffer. +absl::optional H265SpsParser::ParseSps( + const uint8_t* data, + size_t length) { + std::vector unpacked_buffer = H265::ParseRbsp(data, length); + rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); + return ParseSpsInternal(&bit_buffer); +} + +bool H265SpsParser::ParseScalingListData(rtc::BitBuffer* buffer) { + uint32_t scaling_list_pred_mode_flag[4][6]; + uint32_t scaling_list_pred_matrix_id_delta[4][6]; + int32_t scaling_list_dc_coef_minus8[4][6]; + int32_t scaling_list[4][6][64]; + for (int size_id = 0; size_id < 4; size_id++) { + for (int matrix_id = 0; matrix_id < 6; matrix_id += (size_id == 3) ? 3 : 1) { + // scaling_list_pred_mode_flag: u(1) + RETURN_FALSE_ON_FAIL(buffer->ReadBits(&scaling_list_pred_mode_flag[size_id][matrix_id], 1)); + if (!scaling_list_pred_mode_flag[size_id][matrix_id]) { + // scaling_list_pred_matrix_id_delta: ue(v) + RETURN_FALSE_ON_FAIL(buffer->ReadExponentialGolomb(&scaling_list_pred_matrix_id_delta[size_id][matrix_id])); + } else { + int32_t next_coef = 8; + uint32_t coef_num = std::min(64, 1 << (4 + (size_id << 1))); + if (size_id > 1) { + // scaling_list_dc_coef_minus8: se(v) + RETURN_FALSE_ON_FAIL(buffer->ReadSignedExponentialGolomb(&scaling_list_dc_coef_minus8[size_id - 2][matrix_id])); + next_coef = scaling_list_dc_coef_minus8[size_id - 2][matrix_id]; + } + for (uint32_t i = 0; i < coef_num; i++) { + // scaling_list_delta_coef: se(v) + int32_t scaling_list_delta_coef = 0; + RETURN_FALSE_ON_FAIL(buffer->ReadSignedExponentialGolomb(&scaling_list_delta_coef)); + next_coef = (next_coef + scaling_list_delta_coef + 256) % 256; + scaling_list[size_id][matrix_id][i] = next_coef; + } + } + } + } + return true; +} + +absl::optional H265SpsParser::ParseShortTermRefPicSet( + uint32_t st_rps_idx, uint32_t num_short_term_ref_pic_sets, + const std::vector& short_term_ref_pic_set, + H265SpsParser::SpsState& sps, rtc::BitBuffer* buffer) { + H265SpsParser::ShortTermRefPicSet ref_pic_set; + + uint32_t inter_ref_pic_set_prediction_flag = 0; + if (st_rps_idx != 0) { + // inter_ref_pic_set_prediction_flag: u(1) + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&inter_ref_pic_set_prediction_flag, 1)); + } + if (inter_ref_pic_set_prediction_flag) { + uint32_t delta_idx_minus1 = 0; + if (st_rps_idx == num_short_term_ref_pic_sets) { + // delta_idx_minus1: ue(v) + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&delta_idx_minus1)); + } + // delta_rps_sign: u(1) + uint32_t delta_rps_sign = 0; + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&delta_rps_sign, 1)); + // abs_delta_rps_minus1: ue(v) + uint32_t abs_delta_rps_minus1 = 0; + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&abs_delta_rps_minus1)); + uint32_t ref_rps_idx = st_rps_idx - (delta_idx_minus1 + 1); + uint32_t num_delta_pocs = 0; + if (short_term_ref_pic_set[ref_rps_idx].inter_ref_pic_set_prediction_flag) { + auto& used_by_curr_pic_flag = short_term_ref_pic_set[ref_rps_idx].used_by_curr_pic_flag; + auto& use_delta_flag = short_term_ref_pic_set[ref_rps_idx].use_delta_flag; + if (used_by_curr_pic_flag.size() != use_delta_flag.size()) { + return OptionalShortTermRefPicSet(); + } + for (uint32_t i = 0; i < used_by_curr_pic_flag.size(); i++) { + if (used_by_curr_pic_flag[i] || use_delta_flag[i]) { + num_delta_pocs++; + } + } + } else { + num_delta_pocs = short_term_ref_pic_set[ref_rps_idx].num_negative_pics + short_term_ref_pic_set[ref_rps_idx].num_positive_pics; + } + ref_pic_set.used_by_curr_pic_flag.resize(num_delta_pocs + 1, 0); + ref_pic_set.use_delta_flag.resize(num_delta_pocs + 1, 1); + for (uint32_t j = 0; j <= num_delta_pocs; j++) { + // used_by_curr_pic_flag: u(1) + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.used_by_curr_pic_flag[j], 1)); + if (!ref_pic_set.used_by_curr_pic_flag[j]) { + // use_delta_flag: u(1) + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.use_delta_flag[j], 1)); + } + } + } else { + // num_negative_pics: ue(v) + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.num_negative_pics)); + // num_positive_pics: ue(v) + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.num_positive_pics)); + + ref_pic_set.delta_poc_s0_minus1.resize(ref_pic_set.num_negative_pics, 0); + ref_pic_set.used_by_curr_pic_s0_flag.resize(ref_pic_set.num_negative_pics, 0); + for (uint32_t i = 0; i < ref_pic_set.num_negative_pics; i++) { + // delta_poc_s0_minus1: ue(v) + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.delta_poc_s0_minus1[i])); + // used_by_curr_pic_s0_flag: u(1) + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.used_by_curr_pic_s0_flag[i], 1)); + } + ref_pic_set.delta_poc_s1_minus1.resize(ref_pic_set.num_positive_pics, 0); + ref_pic_set.used_by_curr_pic_s1_flag.resize(ref_pic_set.num_positive_pics, 0); + for (uint32_t i = 0; i < ref_pic_set.num_positive_pics; i++) { + // delta_poc_s1_minus1: ue(v) + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.delta_poc_s1_minus1[i])); + // used_by_curr_pic_s1_flag: u(1) + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.used_by_curr_pic_s1_flag[i], 1)); + } + } + + return OptionalShortTermRefPicSet(ref_pic_set); +} + +absl::optional H265SpsParser::ParseSpsInternal( + rtc::BitBuffer* buffer) { + // Now, we need to use a bit buffer to parse through the actual HEVC SPS + // format. See Section 7.3.2.2.1 ("General sequence parameter set data + // syntax") of the H.265 standard for a complete description. + // Since we only care about resolution, we ignore the majority of fields, but + // we still have to actively parse through a lot of the data, since many of + // the fields have variable size. + // We're particularly interested in: + // chroma_format_idc -> affects crop units + // pic_{width,height}_* -> resolution of the frame in macroblocks (16x16). + // frame_crop_*_offset -> crop information + + SpsState sps; + + // The golomb values we have to read, not just consume. + uint32_t golomb_ignored; + + // sps_video_parameter_set_id: u(4) + uint32_t sps_video_parameter_set_id = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_video_parameter_set_id, 4)); + // sps_max_sub_layers_minus1: u(3) + uint32_t sps_max_sub_layers_minus1 = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_max_sub_layers_minus1, 3)); + sps.sps_max_sub_layers_minus1 = sps_max_sub_layers_minus1; + sps.sps_max_dec_pic_buffering_minus1.resize(sps_max_sub_layers_minus1 + 1, 0); + // sps_temporal_id_nesting_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); + // profile_tier_level(1, sps_max_sub_layers_minus1). We are acutally not + // using them, so read/skip over it. + // general_profile_space+general_tier_flag+general_prfile_idc: u(8) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1)); + // general_profile_compatabilitiy_flag[32] + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(4)); + // general_progressive_source_flag + interlaced_source_flag+ + // non-packed_constraint flag + frame_only_constraint_flag: u(4) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(4)); + // general_profile_idc decided flags or reserved. u(43) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(43)); + // general_inbld_flag or reserved 0: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); + // general_level_idc: u(8) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1)); + // if max_sub_layers_minus1 >=1, read the sublayer profile information + std::vector sub_layer_profile_present_flags; + std::vector sub_layer_level_present_flags; + uint32_t sub_layer_profile_present = 0; + uint32_t sub_layer_level_present = 0; + for (uint32_t i = 0; i < sps_max_sub_layers_minus1; i++) { + // sublayer_profile_present_flag and sublayer_level_presnet_flag: u(2) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sub_layer_profile_present, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sub_layer_level_present, 1)); + sub_layer_profile_present_flags.push_back(sub_layer_profile_present); + sub_layer_level_present_flags.push_back(sub_layer_level_present); + } + if (sps_max_sub_layers_minus1 > 0) { + for (uint32_t j = sps_max_sub_layers_minus1; j < 8; j++) { + // reserved 2 bits: u(2) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(2)); + } + } + for (uint32_t k = 0; k < sps_max_sub_layers_minus1; k++) { + if (sub_layer_profile_present_flags[k]) { // + // sub_layer profile_space/tier_flag/profile_idc. ignored. u(8) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1)); + // profile_compatability_flag: u(32) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(4)); + // sub_layer progressive_source_flag/interlaced_source_flag/ + // non_packed_constraint_flag/frame_only_constraint_flag: u(4) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(4)); + // following 43-bits are profile_idc specific. We simply read/skip it. + // u(43) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(43)); + // 1-bit profile_idc specific inbld flag. We simply read/skip it. u(1) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); + } + if (sub_layer_level_present_flags[k]) { + // sub_layer_level_idc: u(8) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1)); + } + } + // sps_seq_parameter_set_id: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.id)); + // chrome_format_idc: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.chroma_format_idc)); + if (sps.chroma_format_idc == 3) { + // seperate_colour_plane_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.separate_colour_plane_flag, 1)); + } + uint32_t pic_width_in_luma_samples = 0; + uint32_t pic_height_in_luma_samples = 0; + // pic_width_in_luma_samples: ue(v) + RETURN_EMPTY_ON_FAIL( + buffer->ReadExponentialGolomb(&pic_width_in_luma_samples)); + // pic_height_in_luma_samples: ue(v) + RETURN_EMPTY_ON_FAIL( + buffer->ReadExponentialGolomb(&pic_height_in_luma_samples)); + // conformance_window_flag: u(1) + uint32_t conformance_window_flag = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&conformance_window_flag, 1)); + + uint32_t conf_win_left_offset = 0; + uint32_t conf_win_right_offset = 0; + uint32_t conf_win_top_offset = 0; + uint32_t conf_win_bottom_offset = 0; + if (conformance_window_flag) { + // conf_win_left_offset: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&conf_win_left_offset)); + // conf_win_right_offset: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&conf_win_right_offset)); + // conf_win_top_offset: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&conf_win_top_offset)); + // conf_win_bottom_offset: ue(v) + RETURN_EMPTY_ON_FAIL( + buffer->ReadExponentialGolomb(&conf_win_bottom_offset)); + } + + // bit_depth_luma_minus8: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // bit_depth_chroma_minus8: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // log2_max_pic_order_cnt_lsb_minus4: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.log2_max_pic_order_cnt_lsb_minus4)); + uint32_t sps_sub_layer_ordering_info_present_flag = 0; + // sps_sub_layer_ordering_info_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_sub_layer_ordering_info_present_flag, 1)); + for (uint32_t i = (sps_sub_layer_ordering_info_present_flag != 0) ? 0 : sps_max_sub_layers_minus1; + i <= sps_max_sub_layers_minus1; i++) { + // sps_max_dec_pic_buffering_minus1: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.sps_max_dec_pic_buffering_minus1[i])); + // sps_max_num_reorder_pics: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // sps_max_latency_increase_plus1: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + } + // log2_min_luma_coding_block_size_minus3: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.log2_min_luma_coding_block_size_minus3)); + // log2_diff_max_min_luma_coding_block_size: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.log2_diff_max_min_luma_coding_block_size)); + // log2_min_luma_transform_block_size_minus2: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // log2_diff_max_min_luma_transform_block_size: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // max_transform_hierarchy_depth_inter: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // max_transform_hierarchy_depth_intra: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // scaling_list_enabled_flag: u(1) + uint32_t scaling_list_enabled_flag = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&scaling_list_enabled_flag, 1)); + if (scaling_list_enabled_flag) { + // sps_scaling_list_data_present_flag: u(1) + uint32_t sps_scaling_list_data_present_flag = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_scaling_list_data_present_flag, 1)); + if (sps_scaling_list_data_present_flag) { + // scaling_list_data() + if (!ParseScalingListData(buffer)) { + return OptionalSps(); + } + } + } + + // amp_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); + // sample_adaptive_offset_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.sample_adaptive_offset_enabled_flag, 1)); + // pcm_enabled_flag: u(1) + uint32_t pcm_enabled_flag = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&pcm_enabled_flag, 1)); + if (pcm_enabled_flag) { + // pcm_sample_bit_depth_luma_minus1: u(4) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(4)); + // pcm_sample_bit_depth_chroma_minus1: u(4) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(4)); + // log2_min_pcm_luma_coding_block_size_minus3: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // log2_diff_max_min_pcm_luma_coding_block_size: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + // pcm_loop_filter_disabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); + } + + // num_short_term_ref_pic_sets: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.num_short_term_ref_pic_sets)); + sps.short_term_ref_pic_set.resize(sps.num_short_term_ref_pic_sets); + for (uint32_t st_rps_idx = 0; st_rps_idx < sps.num_short_term_ref_pic_sets; st_rps_idx++) { + // st_ref_pic_set() + OptionalShortTermRefPicSet ref_pic_set = ParseShortTermRefPicSet( + st_rps_idx, sps.num_short_term_ref_pic_sets, sps.short_term_ref_pic_set, sps, buffer); + if (ref_pic_set) { + sps.short_term_ref_pic_set[st_rps_idx] = *ref_pic_set; + } else { + return OptionalSps(); + } + } + + // long_term_ref_pics_present_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.long_term_ref_pics_present_flag, 1)); + if (sps.long_term_ref_pics_present_flag) { + // num_long_term_ref_pics_sps: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.num_long_term_ref_pics_sps)); + sps.used_by_curr_pic_lt_sps_flag.resize(sps.num_long_term_ref_pics_sps, 0); + for (uint32_t i = 0; i < sps.num_long_term_ref_pics_sps; i++) { + // lt_ref_pic_poc_lsb_sps: u(v) + uint32_t lt_ref_pic_poc_lsb_sps_bits = sps.log2_max_pic_order_cnt_lsb_minus4 + 4; + RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(lt_ref_pic_poc_lsb_sps_bits)); + // used_by_curr_pic_lt_sps_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.used_by_curr_pic_lt_sps_flag[i], 1)); + } + } + + // sps_temporal_mvp_enabled_flag: u(1) + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.sps_temporal_mvp_enabled_flag, 1)); + + // Far enough! We don't use the rest of the SPS. + + sps.vps_id = sps_video_parameter_set_id; + + sps.pic_width_in_luma_samples = pic_width_in_luma_samples; + sps.pic_height_in_luma_samples = pic_height_in_luma_samples; + + // Start with the resolution determined by the pic_width/pic_height fields. + sps.width = pic_width_in_luma_samples; + sps.height = pic_height_in_luma_samples; + + if (conformance_window_flag) { + int sub_width_c = ((1 == sps.chroma_format_idc) || (2 == sps.chroma_format_idc)) && + (0 == sps.separate_colour_plane_flag) + ? 2 + : 1; + int sub_height_c = + (1 == sps.chroma_format_idc) && (0 == sps.separate_colour_plane_flag) ? 2 : 1; + // the offset includes the pixel within conformance window. so don't need to + // +1 as per spec + sps.width -= sub_width_c * (conf_win_right_offset + conf_win_left_offset); + sps.height -= sub_height_c * (conf_win_top_offset + conf_win_bottom_offset); + } + + return OptionalSps(sps); +} + +} // namespace webrtc diff --git a/common_video/h265/h265_sps_parser.h b/common_video/h265/h265_sps_parser.h new file mode 100644 index 0000000000..dbd3f0e267 --- /dev/null +++ b/common_video/h265/h265_sps_parser.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_H265_H265_SPS_PARSER_H_ +#define COMMON_VIDEO_H265_H265_SPS_PARSER_H_ + +#include + +#include "absl/types/optional.h" + +namespace rtc { +class BitBuffer; +} + +namespace webrtc { + +// A class for parsing out sequence parameter set (SPS) data from an H265 NALU. +class H265SpsParser { + public: + + struct ShortTermRefPicSet { + ShortTermRefPicSet(); + + uint32_t inter_ref_pic_set_prediction_flag = 0; + std::vector used_by_curr_pic_flag; + std::vector use_delta_flag; + uint32_t num_negative_pics = 0; + uint32_t num_positive_pics = 0; + std::vector delta_poc_s0_minus1; + std::vector used_by_curr_pic_s0_flag; + std::vector delta_poc_s1_minus1; + std::vector used_by_curr_pic_s1_flag; + }; + + // The parsed state of the SPS. Only some select values are stored. + // Add more as they are actually needed. + struct SpsState { + SpsState(); + + uint32_t sps_max_sub_layers_minus1; + uint32_t chroma_format_idc = 0; + uint32_t separate_colour_plane_flag = 0; + uint32_t pic_width_in_luma_samples = 0; + uint32_t pic_height_in_luma_samples = 0; + uint32_t log2_max_pic_order_cnt_lsb_minus4 = 0; + std::vector sps_max_dec_pic_buffering_minus1; + uint32_t log2_min_luma_coding_block_size_minus3 = 0; + uint32_t log2_diff_max_min_luma_coding_block_size = 0; + uint32_t sample_adaptive_offset_enabled_flag = 0; + uint32_t num_short_term_ref_pic_sets = 0; + std::vector short_term_ref_pic_set; + uint32_t long_term_ref_pics_present_flag = 0; + uint32_t num_long_term_ref_pics_sps = 0; + std::vector used_by_curr_pic_lt_sps_flag; + uint32_t sps_temporal_mvp_enabled_flag = 0; + uint32_t width = 0; + uint32_t height = 0; + uint32_t id = 0; + uint32_t vps_id = 0; + }; + + // Unpack RBSP and parse SPS state from the supplied buffer. + static absl::optional ParseSps(const uint8_t* data, size_t length); + + static bool ParseScalingListData(rtc::BitBuffer* buffer); + + static absl::optional ParseShortTermRefPicSet( + uint32_t st_rps_idx, uint32_t num_short_term_ref_pic_sets, + const std::vector& ref_pic_sets, + SpsState& sps, rtc::BitBuffer* buffer); + + protected: + // Parse the SPS state, for a bit buffer where RBSP decoding has already been + // performed. + static absl::optional ParseSpsInternal(rtc::BitBuffer* buffer); +}; + +} // namespace webrtc +#endif // COMMON_VIDEO_H265_H265_SPS_PARSER_H_ diff --git a/common_video/h265/h265_vps_parser.cc b/common_video/h265/h265_vps_parser.cc new file mode 100644 index 0000000000..002aabb1d0 --- /dev/null +++ b/common_video/h265/h265_vps_parser.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_vps_parser.h" +#include "rtc_base/bit_buffer.h" +#include "rtc_base/logging.h" + +namespace { +typedef absl::optional OptionalVps; + +#define RETURN_EMPTY_ON_FAIL(x) \ + if (!(x)) { \ + return OptionalVps(); \ + } +} // namespace + +namespace webrtc { + +H265VpsParser::VpsState::VpsState() = default; + +// General note: this is based off the 06/2019 version of the H.265 standard. +// You can find it on this page: +// http://www.itu.int/rec/T-REC-H.265 + +// Unpack RBSP and parse SPS state from the supplied buffer. +absl::optional H265VpsParser::ParseVps( + const uint8_t* data, + size_t length) { + std::vector unpacked_buffer = H265::ParseRbsp(data, length); + rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); + return ParseInternal(&bit_buffer); +} + +absl::optional H265VpsParser::ParseInternal( + rtc::BitBuffer* buffer) { + // Now, we need to use a bit buffer to parse through the actual HEVC VPS + // format. See Section 7.3.2.1 ("Video parameter set RBSP syntax") of the + // H.265 standard for a complete description. + + VpsState vps; + + // vps_video_parameter_set_id: u(4) + vps.id = 0; + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&vps.id, 4)); + + return OptionalVps(vps); +} + +} // namespace webrtc diff --git a/common_video/h265/h265_vps_parser.h b/common_video/h265/h265_vps_parser.h new file mode 100644 index 0000000000..e8a2775f43 --- /dev/null +++ b/common_video/h265/h265_vps_parser.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_H265_H265_VPS_PARSER_H_ +#define COMMON_VIDEO_H265_H265_VPS_PARSER_H_ + +#include "absl/types/optional.h" + +namespace rtc { +class BitBuffer; +} + +namespace webrtc { + +// A class for parsing out sequence parameter set (VPS) data from an H265 NALU. +class H265VpsParser { + public: + // The parsed state of the VPS. Only some select values are stored. + // Add more as they are actually needed. + struct VpsState { + VpsState(); + + uint32_t id = 0; + }; + + // Unpack RBSP and parse VPS state from the supplied buffer. + static absl::optional ParseVps(const uint8_t* data, size_t length); + + protected: + // Parse the VPS state, for a bit buffer where RBSP decoding has already been + // performed. + static absl::optional ParseInternal(rtc::BitBuffer* bit_buffer); +}; + +} // namespace webrtc +#endif // COMMON_VIDEO_H265_H265_VPS_PARSER_H_ diff --git a/common_video/i420_buffer_pool.cc b/common_video/i420_buffer_pool.cc deleted file mode 100644 index d13da6a172..0000000000 --- a/common_video/i420_buffer_pool.cc +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_video/include/i420_buffer_pool.h" - -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -I420BufferPool::I420BufferPool() : I420BufferPool(false) {} -I420BufferPool::I420BufferPool(bool zero_initialize) - : I420BufferPool(zero_initialize, std::numeric_limits::max()) {} -I420BufferPool::I420BufferPool(bool zero_initialize, - size_t max_number_of_buffers) - : zero_initialize_(zero_initialize), - max_number_of_buffers_(max_number_of_buffers) {} -I420BufferPool::~I420BufferPool() = default; - -void I420BufferPool::Release() { - buffers_.clear(); -} - -bool I420BufferPool::Resize(size_t max_number_of_buffers) { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - size_t used_buffers_count = 0; - for (const rtc::scoped_refptr& buffer : buffers_) { - // If the buffer is in use, the ref count will be >= 2, one from the list we - // are looping over and one from the application. If the ref count is 1, - // then the list we are looping over holds the only reference and it's safe - // to reuse. - if (!buffer->HasOneRef()) { - used_buffers_count++; - } - } - if (used_buffers_count > max_number_of_buffers) { - return false; - } - max_number_of_buffers_ = max_number_of_buffers; - - size_t buffers_to_purge = buffers_.size() - max_number_of_buffers_; - auto iter = buffers_.begin(); - while (iter != buffers_.end() && buffers_to_purge > 0) { - if ((*iter)->HasOneRef()) { - iter = buffers_.erase(iter); - buffers_to_purge--; - } else { - ++iter; - } - } - return true; -} - -rtc::scoped_refptr I420BufferPool::CreateBuffer(int width, - int height) { - // Default stride_y is width, default uv stride is width / 2 (rounding up). - return CreateBuffer(width, height, width, (width + 1) / 2, (width + 1) / 2); -} - -rtc::scoped_refptr I420BufferPool::CreateBuffer(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - // Release buffers with wrong resolution. - for (auto it = buffers_.begin(); it != buffers_.end();) { - const auto& buffer = *it; - if (buffer->width() != width || buffer->height() != height || - buffer->StrideY() != stride_y || buffer->StrideU() != stride_u || - buffer->StrideV() != stride_v) { - it = buffers_.erase(it); - } else { - ++it; - } - } - // Look for a free buffer. - for (const rtc::scoped_refptr& buffer : buffers_) { - // If the buffer is in use, the ref count will be >= 2, one from the list we - // are looping over and one from the application. If the ref count is 1, - // then the list we are looping over holds the only reference and it's safe - // to reuse. - if (buffer->HasOneRef()) - return buffer; - } - - if (buffers_.size() >= max_number_of_buffers_) - return nullptr; - // Allocate new buffer. - rtc::scoped_refptr buffer = - new PooledI420Buffer(width, height, stride_y, stride_u, stride_v); - if (zero_initialize_) - buffer->InitializeData(); - buffers_.push_back(buffer); - return buffer; -} - -} // namespace webrtc diff --git a/common_video/i420_buffer_pool_unittest.cc b/common_video/i420_buffer_pool_unittest.cc deleted file mode 100644 index 27503e5b8a..0000000000 --- a/common_video/i420_buffer_pool_unittest.cc +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "common_video/include/i420_buffer_pool.h" - -#include -#include - -#include "api/scoped_refptr.h" -#include "api/video/i420_buffer.h" -#include "api/video/video_frame_buffer.h" -#include "test/gtest.h" - -namespace webrtc { - -TEST(TestI420BufferPool, SimpleFrameReuse) { - I420BufferPool pool; - auto buffer = pool.CreateBuffer(16, 16); - EXPECT_EQ(16, buffer->width()); - EXPECT_EQ(16, buffer->height()); - // Extract non-refcounted pointers for testing. - const uint8_t* y_ptr = buffer->DataY(); - const uint8_t* u_ptr = buffer->DataU(); - const uint8_t* v_ptr = buffer->DataV(); - // Release buffer so that it is returned to the pool. - buffer = nullptr; - // Check that the memory is resued. - buffer = pool.CreateBuffer(16, 16); - EXPECT_EQ(y_ptr, buffer->DataY()); - EXPECT_EQ(u_ptr, buffer->DataU()); - EXPECT_EQ(v_ptr, buffer->DataV()); -} - -TEST(TestI420BufferPool, FrameReuseWithDefaultThenExplicitStride) { - I420BufferPool pool; - auto buffer = pool.CreateBuffer(15, 16); - EXPECT_EQ(15, buffer->width()); - EXPECT_EQ(16, buffer->height()); - // The default Y stride is width and UV stride is halfwidth (rounded up). - ASSERT_EQ(15, buffer->StrideY()); - ASSERT_EQ(8, buffer->StrideU()); - ASSERT_EQ(8, buffer->StrideV()); - // Extract non-refcounted pointers for testing. - const uint8_t* y_ptr = buffer->DataY(); - const uint8_t* u_ptr = buffer->DataU(); - const uint8_t* v_ptr = buffer->DataV(); - // Release buffer so that it is returned to the pool. - buffer = nullptr; - // Check that the memory is resued with explicit strides if they match the - // assumed default above. - buffer = pool.CreateBuffer(15, 16, 15, 8, 8); - EXPECT_EQ(y_ptr, buffer->DataY()); - EXPECT_EQ(u_ptr, buffer->DataU()); - EXPECT_EQ(v_ptr, buffer->DataV()); - EXPECT_EQ(15, buffer->width()); - EXPECT_EQ(16, buffer->height()); - EXPECT_EQ(15, buffer->StrideY()); - EXPECT_EQ(8, buffer->StrideU()); - EXPECT_EQ(8, buffer->StrideV()); -} - -TEST(TestI420BufferPool, FailToReuseWrongSize) { - // Set max frames to 1, just to make sure the first buffer is being released. - I420BufferPool pool(/*zero_initialize=*/false, 1); - auto buffer = pool.CreateBuffer(16, 16); - EXPECT_EQ(16, buffer->width()); - EXPECT_EQ(16, buffer->height()); - // Release buffer so that it is returned to the pool. - buffer = nullptr; - // Check that the pool doesn't try to reuse buffers of incorrect size. - buffer = pool.CreateBuffer(32, 16); - ASSERT_TRUE(buffer); - EXPECT_EQ(32, buffer->width()); - EXPECT_EQ(16, buffer->height()); -} - -TEST(TestI420BufferPool, FailToReuseWrongStride) { - // Set max frames to 1, just to make sure the first buffer is being released. - I420BufferPool pool(/*zero_initialize=*/false, 1); - auto buffer = pool.CreateBuffer(32, 32, 32, 16, 16); - // Make sure the stride was read correctly, for the rest of the test. - ASSERT_EQ(16, buffer->StrideU()); - ASSERT_EQ(16, buffer->StrideV()); - buffer = pool.CreateBuffer(32, 32, 32, 20, 20); - ASSERT_TRUE(buffer); - EXPECT_EQ(32, buffer->StrideY()); - EXPECT_EQ(20, buffer->StrideU()); - EXPECT_EQ(20, buffer->StrideV()); -} - -TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) { - rtc::scoped_refptr buffer; - { - I420BufferPool pool; - buffer = pool.CreateBuffer(16, 16); - } - EXPECT_EQ(16, buffer->width()); - EXPECT_EQ(16, buffer->height()); - // Try to trigger use-after-free errors by writing to y-plane. - memset(buffer->MutableDataY(), 0xA5, 16 * buffer->StrideY()); -} - -TEST(TestI420BufferPool, MaxNumberOfBuffers) { - I420BufferPool pool(false, 1); - auto buffer1 = pool.CreateBuffer(16, 16); - EXPECT_NE(nullptr, buffer1.get()); - EXPECT_EQ(nullptr, pool.CreateBuffer(16, 16).get()); -} - -} // namespace webrtc diff --git a/common_video/include/bitrate_adjuster.h b/common_video/include/bitrate_adjuster.h index aea1872216..4b208307a1 100644 --- a/common_video/include/bitrate_adjuster.h +++ b/common_video/include/bitrate_adjuster.h @@ -15,8 +15,8 @@ #include #include "absl/types/optional.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -60,29 +60,31 @@ class RTC_EXPORT BitrateAdjuster { bool IsWithinTolerance(uint32_t bitrate_bps, uint32_t target_bitrate_bps); // Returns smallest possible adjusted value. - uint32_t GetMinAdjustedBitrateBps() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + uint32_t GetMinAdjustedBitrateBps() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns largest possible adjusted value. - uint32_t GetMaxAdjustedBitrateBps() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + uint32_t GetMaxAdjustedBitrateBps() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void Reset(); void UpdateBitrate(uint32_t current_time_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - rtc::CriticalSection crit_; + mutable Mutex mutex_; const float min_adjusted_bitrate_pct_; const float max_adjusted_bitrate_pct_; // The bitrate we want. - volatile uint32_t target_bitrate_bps_ RTC_GUARDED_BY(crit_); + volatile uint32_t target_bitrate_bps_ RTC_GUARDED_BY(mutex_); // The bitrate we use to get what we want. - volatile uint32_t adjusted_bitrate_bps_ RTC_GUARDED_BY(crit_); + volatile uint32_t adjusted_bitrate_bps_ RTC_GUARDED_BY(mutex_); // The target bitrate that the adjusted bitrate was computed from. - volatile uint32_t last_adjusted_target_bitrate_bps_ RTC_GUARDED_BY(crit_); + volatile uint32_t last_adjusted_target_bitrate_bps_ RTC_GUARDED_BY(mutex_); // Used to estimate bitrate. - RateStatistics bitrate_tracker_ RTC_GUARDED_BY(crit_); + RateStatistics bitrate_tracker_ RTC_GUARDED_BY(mutex_); // The last time we tried to adjust the bitrate. - uint32_t last_bitrate_update_time_ms_ RTC_GUARDED_BY(crit_); + uint32_t last_bitrate_update_time_ms_ RTC_GUARDED_BY(mutex_); // The number of frames since the last time we tried to adjust the bitrate. - uint32_t frames_since_last_update_ RTC_GUARDED_BY(crit_); + uint32_t frames_since_last_update_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/common_video/include/i420_buffer_pool.h b/common_video/include/video_frame_buffer_pool.h similarity index 53% rename from common_video/include/i420_buffer_pool.h rename to common_video/include/video_frame_buffer_pool.h index 44f4821798..6af117577e 100644 --- a/common_video/include/i420_buffer_pool.h +++ b/common_video/include/video_frame_buffer_pool.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_ -#define COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_ +#ifndef COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_ +#define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_ #include @@ -17,36 +17,33 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/nv12_buffer.h" #include "rtc_base/race_checker.h" #include "rtc_base/ref_counted_object.h" namespace webrtc { -// Simple buffer pool to avoid unnecessary allocations of I420Buffer objects. -// The pool manages the memory of the I420Buffer returned from CreateBuffer. -// When the I420Buffer is destructed, the memory is returned to the pool for use -// by subsequent calls to CreateBuffer. If the resolution passed to CreateBuffer -// changes, old buffers will be purged from the pool. -// Note that CreateBuffer will crash if more than kMaxNumberOfFramesBeforeCrash -// are created. This is to prevent memory leaks where frames are not returned. -class I420BufferPool { +// Simple buffer pool to avoid unnecessary allocations of video frame buffers. +// The pool manages the memory of the I420Buffer/NV12Buffer returned from +// Create(I420|NV12)Buffer. When the buffer is destructed, the memory is +// returned to the pool for use by subsequent calls to Create(I420|NV12)Buffer. +// If the resolution passed to Create(I420|NV12)Buffer changes or requested +// pixel format changes, old buffers will be purged from the pool. +// Note that Create(I420|NV12)Buffer will crash if more than +// kMaxNumberOfFramesBeforeCrash are created. This is to prevent memory leaks +// where frames are not returned. +class VideoFrameBufferPool { public: - I420BufferPool(); - explicit I420BufferPool(bool zero_initialize); - I420BufferPool(bool zero_initialze, size_t max_number_of_buffers); - ~I420BufferPool(); + VideoFrameBufferPool(); + explicit VideoFrameBufferPool(bool zero_initialize); + VideoFrameBufferPool(bool zero_initialize, size_t max_number_of_buffers); + ~VideoFrameBufferPool(); // Returns a buffer from the pool. If no suitable buffer exist in the pool // and there are less than |max_number_of_buffers| pending, a buffer is // created. Returns null otherwise. - rtc::scoped_refptr CreateBuffer(int width, int height); - - // Returns a buffer from the pool with the explicitly specified stride. - rtc::scoped_refptr CreateBuffer(int width, - int height, - int stride_y, - int stride_u, - int stride_v); + rtc::scoped_refptr CreateI420Buffer(int width, int height); + rtc::scoped_refptr CreateNV12Buffer(int width, int height); // Changes the max amount of buffers in the pool to the new value. // Returns true if change was successful and false if the amount of already @@ -58,12 +55,11 @@ class I420BufferPool { void Release(); private: - // Explicitly use a RefCountedObject to get access to HasOneRef, - // needed by the pool to check exclusive access. - using PooledI420Buffer = rtc::RefCountedObject; + rtc::scoped_refptr + GetExistingBuffer(int width, int height, VideoFrameBuffer::Type type); rtc::RaceChecker race_checker_; - std::list> buffers_; + std::list> buffers_; // If true, newly allocated buffers are zero-initialized. Note that recycled // buffers are not zero'd before reuse. This is required of buffers used by // FFmpeg according to http://crbug.com/390941, which only requires it for the @@ -76,4 +72,4 @@ class I420BufferPool { } // namespace webrtc -#endif // COMMON_VIDEO_INCLUDE_I420_BUFFER_POOL_H_ +#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_POOL_H_ diff --git a/common_video/libyuv/include/webrtc_libyuv.h b/common_video/libyuv/include/webrtc_libyuv.h index ba17577216..d27250a93e 100644 --- a/common_video/libyuv/include/webrtc_libyuv.h +++ b/common_video/libyuv/include/webrtc_libyuv.h @@ -58,16 +58,6 @@ const double kPerfectPSNR = 48.0f; // video frame. size_t CalcBufferSize(VideoType type, int width, int height); -// TODO(mikhal): Add unit test for these two functions and determine location. -// Print VideoFrame to file -// Input: -// - frame : Reference to video frame. -// - file : pointer to file object. It is assumed that the file is -// already open for writing. -// Return value: 0 if OK, < 0 otherwise. -int PrintVideoFrame(const VideoFrame& frame, FILE* file); -int PrintVideoFrame(const I420BufferInterface& frame, FILE* file); - // Extract buffer from VideoFrame or I420BufferInterface (consecutive // planes, no stride) // Input: diff --git a/common_video/libyuv/libyuv_unittest.cc b/common_video/libyuv/libyuv_unittest.cc index 2a7992865a..62d9e87fa6 100644 --- a/common_video/libyuv/libyuv_unittest.cc +++ b/common_video/libyuv/libyuv_unittest.cc @@ -31,6 +31,38 @@ void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { *stride_uv = 16 * ((width + 31) / 32); } +int PrintPlane(const uint8_t* buf, + int width, + int height, + int stride, + FILE* file) { + for (int i = 0; i < height; i++, buf += stride) { + if (fwrite(buf, 1, width, file) != static_cast(width)) + return -1; + } + return 0; +} + +int PrintVideoFrame(const I420BufferInterface& frame, FILE* file) { + int width = frame.width(); + int height = frame.height(); + int chroma_width = frame.ChromaWidth(); + int chroma_height = frame.ChromaHeight(); + + if (PrintPlane(frame.DataY(), width, height, frame.StrideY(), file) < 0) { + return -1; + } + if (PrintPlane(frame.DataU(), chroma_width, chroma_height, frame.StrideU(), + file) < 0) { + return -1; + } + if (PrintPlane(frame.DataV(), chroma_width, chroma_height, frame.StrideV(), + file) < 0) { + return -1; + } + return 0; +} + } // Anonymous namespace class TestLibYuv : public ::testing::Test { diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc index 833001cf1c..c7613cefd8 100644 --- a/common_video/libyuv/webrtc_libyuv.cc +++ b/common_video/libyuv/webrtc_libyuv.cc @@ -56,43 +56,6 @@ size_t CalcBufferSize(VideoType type, int width, int height) { return buffer_size; } -static int PrintPlane(const uint8_t* buf, - int width, - int height, - int stride, - FILE* file) { - for (int i = 0; i < height; i++, buf += stride) { - if (fwrite(buf, 1, width, file) != static_cast(width)) - return -1; - } - return 0; -} - -// TODO(nisse): Belongs with the test code? -int PrintVideoFrame(const I420BufferInterface& frame, FILE* file) { - int width = frame.width(); - int height = frame.height(); - int chroma_width = frame.ChromaWidth(); - int chroma_height = frame.ChromaHeight(); - - if (PrintPlane(frame.DataY(), width, height, frame.StrideY(), file) < 0) { - return -1; - } - if (PrintPlane(frame.DataU(), chroma_width, chroma_height, frame.StrideU(), - file) < 0) { - return -1; - } - if (PrintPlane(frame.DataV(), chroma_width, chroma_height, frame.StrideV(), - file) < 0) { - return -1; - } - return 0; -} - -int PrintVideoFrame(const VideoFrame& frame, FILE* file) { - return PrintVideoFrame(*frame.video_frame_buffer()->ToI420(), file); -} - int ExtractBuffer(const rtc::scoped_refptr& input_frame, size_t size, uint8_t* buffer) { diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc new file mode 100644 index 0000000000..6df240d9fe --- /dev/null +++ b/common_video/video_frame_buffer_pool.cc @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/include/video_frame_buffer_pool.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { +bool HasOneRef(const rtc::scoped_refptr& buffer) { + // Cast to rtc::RefCountedObject is safe because this function is only called + // on locally created VideoFrameBuffers, which are either + // |rtc::RefCountedObject| or |rtc::RefCountedObject|. + switch (buffer->type()) { + case VideoFrameBuffer::Type::kI420: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + case VideoFrameBuffer::Type::kNV12: { + return static_cast*>(buffer.get()) + ->HasOneRef(); + } + default: + RTC_NOTREACHED(); + } + return false; +} + +} // namespace + +VideoFrameBufferPool::VideoFrameBufferPool() : VideoFrameBufferPool(false) {} + +VideoFrameBufferPool::VideoFrameBufferPool(bool zero_initialize) + : VideoFrameBufferPool(zero_initialize, + std::numeric_limits::max()) {} + +VideoFrameBufferPool::VideoFrameBufferPool(bool zero_initialize, + size_t max_number_of_buffers) + : zero_initialize_(zero_initialize), + max_number_of_buffers_(max_number_of_buffers) {} + +VideoFrameBufferPool::~VideoFrameBufferPool() = default; + +void VideoFrameBufferPool::Release() { + buffers_.clear(); +} + +bool VideoFrameBufferPool::Resize(size_t max_number_of_buffers) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + size_t used_buffers_count = 0; + for (const rtc::scoped_refptr& buffer : buffers_) { + // If the buffer is in use, the ref count will be >= 2, one from the list we + // are looping over and one from the application. If the ref count is 1, + // then the list we are looping over holds the only reference and it's safe + // to reuse. + if (!HasOneRef(buffer)) { + used_buffers_count++; + } + } + if (used_buffers_count > max_number_of_buffers) { + return false; + } + max_number_of_buffers_ = max_number_of_buffers; + + size_t buffers_to_purge = buffers_.size() - max_number_of_buffers_; + auto iter = buffers_.begin(); + while (iter != buffers_.end() && buffers_to_purge > 0) { + if (HasOneRef(*iter)) { + iter = buffers_.erase(iter); + buffers_to_purge--; + } else { + ++iter; + } + } + return true; +} + +rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI420); + if (existing_buffer) { + // Cast is safe because the only way kI420 buffer is created is + // in the same function below, where |RefCountedObject| is + // created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = + new rtc::RefCountedObject(width, height); + + if (zero_initialize_) + buffer->InitializeData(); + + buffers_.push_back(buffer); + return buffer; +} + +rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( + int width, + int height) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + rtc::scoped_refptr existing_buffer = + GetExistingBuffer(width, height, VideoFrameBuffer::Type::kNV12); + if (existing_buffer) { + // Cast is safe because the only way kI420 buffer is created is + // in the same function below, where |RefCountedObject| is + // created. + rtc::RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); + // Creates a new scoped_refptr, which is also pointing to the same + // RefCountedObject as buffer, increasing ref count. + return rtc::scoped_refptr(raw_buffer); + } + + if (buffers_.size() >= max_number_of_buffers_) + return nullptr; + // Allocate new buffer. + rtc::scoped_refptr buffer = + new rtc::RefCountedObject(width, height); + + if (zero_initialize_) + buffer->InitializeData(); + + buffers_.push_back(buffer); + return buffer; +} + +rtc::scoped_refptr VideoFrameBufferPool::GetExistingBuffer( + int width, + int height, + VideoFrameBuffer::Type type) { + // Release buffers with wrong resolution or different type. + for (auto it = buffers_.begin(); it != buffers_.end();) { + const auto& buffer = *it; + if (buffer->width() != width || buffer->height() != height || + buffer->type() != type) { + it = buffers_.erase(it); + } else { + ++it; + } + } + // Look for a free buffer. + for (const rtc::scoped_refptr& buffer : buffers_) { + // If the buffer is in use, the ref count will be >= 2, one from the list we + // are looping over and one from the application. If the ref count is 1, + // then the list we are looping over holds the only reference and it's safe + // to reuse. + if (HasOneRef(buffer)) { + RTC_CHECK(buffer->type() == type); + return buffer; + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/common_video/video_frame_buffer_pool_unittest.cc b/common_video/video_frame_buffer_pool_unittest.cc new file mode 100644 index 0000000000..eb9b73f1a2 --- /dev/null +++ b/common_video/video_frame_buffer_pool_unittest.cc @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/include/video_frame_buffer_pool.h" + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(TestVideoFrameBufferPool, SimpleFrameReuse) { + VideoFrameBufferPool pool; + auto buffer = pool.CreateI420Buffer(16, 16); + EXPECT_EQ(16, buffer->width()); + EXPECT_EQ(16, buffer->height()); + // Extract non-refcounted pointers for testing. + const uint8_t* y_ptr = buffer->DataY(); + const uint8_t* u_ptr = buffer->DataU(); + const uint8_t* v_ptr = buffer->DataV(); + // Release buffer so that it is returned to the pool. + buffer = nullptr; + // Check that the memory is resued. + buffer = pool.CreateI420Buffer(16, 16); + EXPECT_EQ(y_ptr, buffer->DataY()); + EXPECT_EQ(u_ptr, buffer->DataU()); + EXPECT_EQ(v_ptr, buffer->DataV()); +} + +TEST(TestVideoFrameBufferPool, FailToReuseWrongSize) { + // Set max frames to 1, just to make sure the first buffer is being released. + VideoFrameBufferPool pool(/*zero_initialize=*/false, 1); + auto buffer = pool.CreateI420Buffer(16, 16); + EXPECT_EQ(16, buffer->width()); + EXPECT_EQ(16, buffer->height()); + // Release buffer so that it is returned to the pool. + buffer = nullptr; + // Check that the pool doesn't try to reuse buffers of incorrect size. + buffer = pool.CreateI420Buffer(32, 16); + ASSERT_TRUE(buffer); + EXPECT_EQ(32, buffer->width()); + EXPECT_EQ(16, buffer->height()); +} + +TEST(TestVideoFrameBufferPool, FrameValidAfterPoolDestruction) { + rtc::scoped_refptr buffer; + { + VideoFrameBufferPool pool; + buffer = pool.CreateI420Buffer(16, 16); + } + EXPECT_EQ(16, buffer->width()); + EXPECT_EQ(16, buffer->height()); + // Access buffer, so that ASAN could find any issues if buffer + // doesn't outlive the buffer pool. + memset(buffer->MutableDataY(), 0xA5, 16 * buffer->StrideY()); +} + +TEST(TestVideoFrameBufferPool, MaxNumberOfBuffers) { + VideoFrameBufferPool pool(false, 1); + auto buffer = pool.CreateI420Buffer(16, 16); + EXPECT_NE(nullptr, buffer.get()); + EXPECT_EQ(nullptr, pool.CreateI420Buffer(16, 16).get()); +} + +TEST(TestVideoFrameBufferPool, ProducesNv12) { + VideoFrameBufferPool pool(false, 1); + auto buffer = pool.CreateNV12Buffer(16, 16); + EXPECT_NE(nullptr, buffer.get()); +} + +TEST(TestVideoFrameBufferPool, SwitchingPixelFormat) { + VideoFrameBufferPool pool(false, 1); + auto buffer = pool.CreateNV12Buffer(16, 16); + EXPECT_EQ(nullptr, pool.CreateNV12Buffer(16, 16).get()); + auto buffer2 = pool.CreateI420Buffer(16, 16); + EXPECT_NE(nullptr, buffer2.get()); + EXPECT_EQ(nullptr, pool.CreateI420Buffer(16, 16).get()); +} + +} // namespace webrtc diff --git a/common_video/video_frame_unittest.cc b/common_video/video_frame_unittest.cc index 225a7d3089..9a7a5e2b7c 100644 --- a/common_video/video_frame_unittest.cc +++ b/common_video/video_frame_unittest.cc @@ -15,6 +15,7 @@ #include "api/video/i010_buffer.h" #include "api/video/i420_buffer.h" +#include "api/video/nv12_buffer.h" #include "rtc_base/bind.h" #include "rtc_base/time_utils.h" #include "test/fake_texture_frame.h" @@ -157,6 +158,29 @@ rtc::scoped_refptr CreateGradient(VideoFrameBuffer::Type type, return I010Buffer::Copy(*buffer); } +rtc::scoped_refptr CreateNV12Gradient(int width, + int height) { + rtc::scoped_refptr buffer(NV12Buffer::Create(width, height)); + // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + buffer->MutableDataY()[x + y * width] = + 128 * (x * height + y * width) / (width * height); + } + } + int chroma_width = buffer->ChromaWidth(); + int chroma_height = buffer->ChromaHeight(); + for (int x = 0; x < chroma_width; x++) { + for (int y = 0; y < chroma_height; y++) { + buffer->MutableDataUV()[x * 2 + y * buffer->StrideUV()] = + 255 * x / (chroma_width - 1); + buffer->MutableDataUV()[x * 2 + 1 + y * buffer->StrideUV()] = + 255 * y / (chroma_height - 1); + } + } + return buffer; +} + // The offsets and sizes describe the rectangle extracted from the // original (gradient) frame, in relative coordinates where the // original frame correspond to the unit square, 0.0 <= x, y < 1.0. @@ -495,6 +519,35 @@ INSTANTIATE_TEST_SUITE_P(All, ::testing::Values(VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kI010)); +TEST(TestNV12Buffer, CropAndScale) { + const int kSourceWidth = 640; + const int kSourceHeight = 480; + const int kScaledWidth = 320; + const int kScaledHeight = 240; + const int kCropLeft = 40; + const int kCropTop = 30; + const int kCropRight = 0; + const int kCropBottom = 30; + + rtc::scoped_refptr buf = + CreateNV12Gradient(kSourceWidth, kSourceHeight); + + rtc::scoped_refptr scaled_buffer = buf->CropAndScale( + kCropLeft, kCropTop, kSourceWidth - kCropLeft - kCropRight, + kSourceHeight - kCropTop - kCropBottom, kScaledWidth, kScaledHeight); + + // Parameters to CheckCrop indicate what part of the source frame is in the + // scaled frame. + const float kOffsetX = (kCropLeft + 0.0) / kSourceWidth; + const float kOffsetY = (kCropTop + 0.0) / kSourceHeight; + const float kRelativeWidth = + (kSourceWidth - kCropLeft - kCropRight + 0.0) / kSourceWidth; + const float kRelativeHeight = + (kSourceHeight - kCropTop - kCropBottom + 0.0) / kSourceHeight; + CheckCrop(*scaled_buffer->ToI420(), kOffsetX, kOffsetY, kRelativeWidth, + kRelativeHeight); +} + class TestPlanarYuvBufferRotate : public ::testing::TestWithParam< std::tuple> {}; diff --git a/docs/bug-reporting.md b/docs/bug-reporting.md new file mode 100644 index 0000000000..885d68008f --- /dev/null +++ b/docs/bug-reporting.md @@ -0,0 +1,167 @@ +There are a couple bug trackers relevant to WebRTC: + + * [crbug.com](https://crbug.com) -- for Chrome. + + * [bugzilla.mozilla.org](https://bugzilla.mozilla.org/) -- for Firefox. + + * [bugs.webkit.org](https://bugs.webkit.org/) -- for Safari. + + * [developer.microsoft.com](https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/) -- for Microsoft Edge. + + * [bugs.opera.com/wizard](https://bugs.opera.com/wizard/) -- for Opera. + + * [bugs.webrtc.org](http://bugs.webrtc.org) -- for WebRTC native code. + +Anyone with a [Google account][1] can file bugs in the Chrome and WebRTC trackers and they're continuously triaged by Chrome and WebRTC engineers. + + +### How to File a Good Bug Report + +#### Instructions + +* Identify which bug tracker to use: + + * If you're hitting a problem in Chrome, file the bug using the + [the Chromium issue wizard](https://chromiumbugs.appspot.com/?token=0) + Choose "Web Developer" and "API", then fill out the form. For the component choose + * Blink>GetUserMedia for camera/microphone issues + * Blink>MediaRecording for issues with the MediaRecorder API + * Blink>WebRTC for issues with the RTCPeerConnection API + This ensures the right people will look at your bug. + + * If you're a developer working with the native code, file the bug at + [this link][4]. + +* Include as much as possible from the data points listed below. + +#### Example Data Points + + * Version of the browser/app + + * For Chrome: copy/paste from **chrome://version** + + * For WebRTC native code: if applicable, include the branch (e.g. trunk) + and WebRTC revision (e.g. r8207) your application uses + + * Operating system (Windows, Mac, Linux, Android, iOS, etc.) and version + (e.g. Windows 7, OS X 10.9, Ubuntu 14, etc.) + + * Hardware platform/device model (e.g. PC, Mac, Samsung 4S, Nexus 7, iPhone + 5S, iPad Air 2 etc) + + * Camera and microphone model and version (if applicable) + + * For Chrome audio and video device issues, please run the tests at + . After the tests finish running, click the bug + icon at the top, download the report, and attach the report to the issue + tracker. + + * Web site URL + + * Reproduction steps: detailed information on how to reproduce the bug. If + applicable, please either attach or link to a minimal test page in + HTML+JavaScript. + + * For **crashes** + + * If you experience a crash while using Chrome, please include a crash ID + by following [these instructions][2]. + + * If you experience a crash while using WebRTC native code, please include + the full stacktrace. + + * For **functional** issues or **ICE** issues, in either Chrome or a native + application, please gather a [native log][5]. + + * For **connectivity** issues on Chrome, ensure **chrome://webrtc-internals** + is open in another tab before starting the call and while the call is in progress, + + * expand the **Create Dump** section, + + * click the **Download the PeerConnection updates and stats data** button. + You will be prompted to save the dump to your local machine. Please + attach that dump to the bug report. + + * For **audio quality** issues on Chrome, while the call is in progress, + + * please open **chrome://webrtc-internals** in another tab, + + * expand the **Create Dump** section, + + * fill in the **Enable diagnostic audio recordings** checkbox. You will be + prompted to save the recording to your local machine. After ending the + call, attach the recording to the bug. + + * For **echo** issues, please try to capture an audio recording from the + side that is _generating_ the echo, not the side that _hears_ the echo. + For example, if UserA and UserB are in a call, and UserA hears herself + speak, please obtain an audio recording from UserB. + + * For **regressions**, i.e. things that worked in one version and stopped working in + a later versioņ, provide both versions. If you know steps to reproduce you might + want to try [a bisect](https://www.chromium.org/developers/bisect-builds-py) to + identify the commit that changed the behaviour. + + * For **video problems**, e.g. artifacts or decoder failures, a rtpdump file + with the unencrypted RTP traffic. This can by replayed using the video_replay + tool from the rtc_tools directory. + +### Filing a Security Bug + +The WebRTC team takes security very seriously. If you find a vulnerability in +WebRTC, please file a [Chromium security bug][ChromeSecurity], even if the bug +only affects native WebRTC code and not Chromium. + +A history of fixed Chromium security bugs is best found via [security notes in +Stable Channel updates on the Google Chrome releases blog][ChromeSecurityBlog]. + +You can also find fixed, publicly visible [Type=Bug-Security][ChromeBugList] +bugs in the issue tracker (note: security bugs normally become publicly +visible 14 weeks after they are fixed). If there is a bug in WebRTC code +that Chromium isn’t using (such as the Java/ObjC wrappers for Android/iOS) +we will announce fixes separately on [discuss-webrtc][DiscussWebRTC]. + +[Tracking released security bug disclosures][WebRtcBugList]. + +Note that we will generally NOT merge security fixes backwards to any branches, +so if you’re using older branches it’s your responsibility to make sure the +relevant security fixes get merged. + + +### Receiving notifications about security bugs in Chrome + +To get automatic notifications about activity/comments in security bugs in +Chrome you need to be either explicitly cc:d on specific bugs (by someone who +has access to the bug) or be part of a special mailing list for all security bug +notifications. To get on that list you have to apply to the Chrome Security +team, see more about this on the [Chrome Security page][ChromeSecurity] under +"How can I get access to Chromium vulnerabilities?" at the bottom of the page. + +Please note that Chrome's security-notify list will receive notifications about +all security bugs in Chrome and not just the WebRTC ones. Normally it shouldn't +be a problem to figure out whether an issue affects WebRTC since it will most +likely be tagged with one of the WebRTC-related components (one of Blink>WebRTC, +Blink>GetUserMedia, Blink>MediaStream, Blink>MediaRecording) or their sub- +components. + +Also note that access granted by the list will only apply to bugs of Type=Bug- +Security. Not all bugs with crashes, memory leaks and other potential +vulnerabilities are marked as Bug-Security though. You can read more about what +categories of bugs are deemed security bugs in the [Severity Guidelines for +Security Issues][SeverityGuidelines] and also on the [Security FAQ][SecurityFaq] +page. + + +[1]: https://accounts.google.com/ +[2]: http://www.chromium.org/for-testers/bug-reporting-guidelines/reporting-crash-bug +[3]: https://code.google.com/p/chromium/issues/entry?template=Audio/Video%20Issue +[4]: https://bugs.chromium.org/p/webrtc/issues/entry +[5]: native-code/logging.md +[ChromeSecurity]: https://www.chromium.org/Home/chromium-security/reporting-security-bugs +[DiscussWebRTC]: https://groups.google.com/group/discuss-webrtc +[ChromeSecurityBlog]: https://chromereleases.googleblog.com/search/label/Stable%20updates +[ChromeBugList]: https://bugs.chromium.org/p/chromium/issues/list?can=1&q=Type%3DBug-Security+component%3ABlink%3EWebRTC+-status%3ADuplicate%2CWontfix&sort=-closed&colspec=ID+Pri+M+Component+Status+Owner+Summary+OS+Closed&x=m&y=releaseblock&cells=ids +[WebRtcBugList]: https://bugs.chromium.org/p/webrtc/issues/list?q=Type%3DBug-Security&can=1 +[ChromeSecurity]: https://www.chromium.org/Home/chromium-security +[SeverityGuidelines]: https://chromium.googlesource.com/chromium/src/+/master/docs/security/severity-guidelines.md +[SecurityFaq]: https://chromium.googlesource.com/chromium/src/+/master/docs/security/faq.md diff --git a/docs/faq.md b/docs/faq.md new file mode 100644 index 0000000000..700fd15b9e --- /dev/null +++ b/docs/faq.md @@ -0,0 +1,247 @@ +# FAQ + +### What is WebRTC? + +WebRTC is an open framework for the web that enables Real Time Communications +in the browser. It includes the fundamental building blocks for high-quality +communications on the web, such as network, audio and video components used in +voice and video chat applications. + +These components, when implemented in a browser, can be accessed through a +JavaScript API, enabling developers to easily implement their own RTC web app. + +The WebRTC effort is being standardized on an API level at the W3C and at the +protocol level at the IETF. + + +### Why should I use WebRTC? + +We think you'll want to build your next video-chat style application using +WebRTC. Here's why: + + * A key factor in the success of the web is that its core technologies -- + such as HTML, HTTP, and TCP/IP -- are open and freely implementable. + Currently, there is no free, high-quality, complete solution available + that enables communication in the browser. WebRTC enables this. + + * Already integrated with best-of-breed voice and video engines that have + been deployed on millions of endpoints over the last 8+ years. Google does + not charge royalties for WebRTC. + + * Includes and abstracts key NAT and firewall traversal technology, using + STUN, ICE, TURN, RTP-over-TCP and support for proxies. + + * Builds on the strength of the web browser: WebRTC abstracts signaling by + offering a signaling state machine that maps directly to `PeerConnection`. + Web developers can therefore choose the protocol of choice for their usage + scenario (for example, but not limited to, SIP, XMPP/Jingle, et al.). + + +### What is the Opus audio codec? + +[Opus][opus-link] is a royalty-free audio codec defined by IETF +RFC 6176. It supports constant and variable bitrate encoding from 6 kbit/s to +510 kbit/s, frame sizes from 2.5 ms to 60 ms, and various sampling rates from +8 kHz (with 4 kHz bandwidth) to 48 kHz (with 20 kHz bandwidth, where the +entire hearing range of the human auditory system can be reproduced). + +[opus-link]: http://opus-codec.org/ + +### What is the iSAC audio codec? + +iSAC is a robust, bandwidth-adaptive, wideband and super-wideband voice codec +developed by Global IP Solutions, and is used in many Voice over IP (VoIP) and +streaming audio applications. iSAC is used by industry leaders in hundreds of +millions of VoIP endpoints. This codec is included as part of the WebRTC +project. + + +### What is the iLBC audio codec? + +iLBC is a free narrowband voice codec that was developed by Global IP +Solutions, and is used in many Voice over IP (VoIP) and streaming audio +applications. In 2004, the final IETF RFC versions of the iLBC codec +specification and the iLBC RTP Profile draft became available. This codec is +included as part of the WebRTC project. + + +### What is the VP8 video codec? + +VP8 is a highly-efficient video compression technology developed by the WebM Project. It is the video codec included with WebRTC. + +### What is the VP9 video codec? + +Similar to VP8, VP9 is also from the WebM Project. Its a next-generation open video codec. From Chrome 48 on desktop and Android, VP9 will be an optional video codec for video calls. More details in [Google Developers][vp9-link]. + +[vp9-link]: https://developers.google.com/web/updates/2016/01/vp9-webrtc/ + +### What other components are included in the WebRTC package? + +#### Audio + +WebRTC offers a complete stack for voice communications. It includes not only +the necessary codecs, but other components necessary to great user +experiences. This includes software-based acoustic echo cancellation (AEC), +automatic gain control (AGC), noise reduction, noise suppression, and +hardware access and control across multiple platforms. + + +#### Video + +The WebRTC project builds on the VP8 codec, introduced in 2010 as part of the +[WebM Project][webm-link]. It includes components to conceal +packet loss and clean up noisy images, as well as capture and playback +capabilities across multiple platforms. + +[webm-link]: http://www.webmproject.org/ + +#### Network + +Dynamic jitter buffers and error concealment techniques are included for audio +and video, which help mitigate the effects of packet loss and unreliable +networks. Also included are components for establishing a peer-to-peer +connection using ICE / STUN / Turn / RTP-over-TCP and support for proxies. + + +### How do I access the WebRTC code? + +Go to [https://webrtc.googlesource.com/src][webrtc-repo-link]. + +[webrtc-repo-link]: https://webrtc.googlesource.com/src + + +### How can I test the quality of WebRTC components? + +We have put sample applications [here][examples-link]. + +[examples-link]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/index.md#example-applications + + +### Are WebRTC components subject to change? + +WebRTC is based on a API that is still under development through efforts at +WHATWG, W3C and IETF. We hope to get to a stable API once a few browser +vendors have implementations ready for testing. Once the API is stable, our +goal will be to offer backwards compatibility and interoperability. The WebRTC +API layer will be our main focus for stability and interoperability. The +components under it may be modified to improve quality, performance and +feature set. + + +### WebRTC components are open-source. How do I get the source and contribute code? + +Please see [Getting Started][getting-started-link] and +[Contributing bug fixes][contributing-link] for more information. + +[getting-started-link]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/index.md +[contributing-link]: https://webrtc.org/support/contributing + + +### To be a Contributor, do I need to sign any agreements? + +Yes, each Contributor must sign and return the +[Contributor License Agreement][cla-link] + +### How can I become a WebRTC committer? + +After 10-20 non-trivial patches you can apply for commit rights. If you are +writing a lot of patches you can also apply for try job access before then. +To apply, open a bug on https://bugs.chromium.org/p/webrtc/ specifying the +e-mail you will use to commit code to WebRTC and list all the relevant CLs to +show your previous contributions (even in case you are only requiring try job +access since it is still required a certain number of contributions for that +as well). + +[cla-link]: https://developers.google.com/open-source/cla/individual?hl=en + + +### Do I have to be a programmer to use WebRTC? + +Yes, to build WebRTC support into a software application or contribute +improvements, programming skills are required. However, usage of the +JavaScript APIs that call WebRTC in the browsers will only require typical web +development skills. + + +### Is the WebRTC project owned by Google or is it independent? + +WebRTC is an open-source project supported by Google, Mozilla and Opera. The +API and underlying protocols are being developed jointly at the W3C and IETF. + + +### Are the WebRTC components from Google's acquisition of Global IP Solutions? + +Yes. Some components, such as VoiceEngine, VideoEngine, NetEQ, AEC, et al. all +stem from the GIPS acquisition. + + +### What codecs are supported in WebRTC? + +The currently supported voice codecs are G.711, G.722, iLBC, and iSAC, and VP8 +is the supported video codec. The list of supported codecs may change in the +future. + + +### Please explain how WebRTC is free of charge? + +Some software frameworks, voice and video codecs require end-users, +distributors and manufacturers to pay patent royalties to use the intellectual +property within the software technology and/or codec. Google is not charging +royalties for WebRTC and its components including the codecs it supports (VP8 +for video and iSAC and iLBC for audio). For more information, see the [License +page][license-link]. + +[license-link]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/license/index.md + + +### What does this license let me do? + +Like most BSD licenses, this license allows you to use the WebRTC code with a +minimum of restrictions on your use. You can use the code in proprietary +software as well as open source software. + + +### Do I need to release the source if I make changes? + +No, the license does not require you to release source if you make changes. +However, we would love to see any changes you make and possibly incorporate +them, so if you want to participate please visit the +[code review page][code-review-link] and submit some patches. + +[code-review-link]: https://webrtc-review.googlesource.com/ + + +### Why is there a separate patent grant? + +In order to decouple patents from copyright, thus preserving the pure BSD +nature of the copyright license, the license and the patent grant are +separate. This means we are using a standard (BSD) open source copyright +license, and the patent grant can exist on its own. This makes WebRTC +compatible with all major license scenarios. + + +### What if someone gets the code from Google and gives it to me without changes. Do I have a patent grant from Google? + +Yes, you still have the right to redistribute and you still have a patent +license for Google's patents that cover the code that Google released. + + +### What if someone makes a change to the code and gives it to me. Do I have a patent license from Google for that change? + +You still have the right to redistribute but no patent license for the changes +(if there are any patents covering it). We can't give patent licenses for +changes people make after we distribute the code, as we have no way to predict +what those changes will be. Other common licenses take the same approach, +including the Apache license. + + +### What if Google receives or buys a patent that covers the code I receive sometime after I receive the code. Do I have a patent grant for that patent? + +Yes, you still have the right to redistribute and you still have a patent +license for Google's patents that cover the code that Google released. + + +### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Do they still have a patent license? + +Yes, they still have the right to redistribute and they still have a patent +license for Google's patents that cover the code that Google released. diff --git a/docs/native-code/android/index.md b/docs/native-code/android/index.md index 47070a872f..82078210d1 100644 --- a/docs/native-code/android/index.md +++ b/docs/native-code/android/index.md @@ -23,6 +23,7 @@ build config. See [Development][webrtc-development] for instructions on how to update the code, building etc. + ## Compiling 1. Generate projects using GN. @@ -44,9 +45,12 @@ to enable managing multiple configurations in parallel. 2. Compile using: ``` -$ ninja -C out/Debug +$ autoninja -C out/Debug ``` +(To list all available targets, run `autoninja -C out/Debug -t targets all`.) + + ## Using the Bundled Android SDK/NDK In order to use the Android SDK and NDK that is bundled in @@ -59,6 +63,7 @@ $ . build/android/envsetup.sh Then you'll have `adb` and all the other Android tools in your `PATH`. + ## Running the AppRTCMobile App AppRTCMobile is an Android application using WebRTC Native APIs via JNI (JNI @@ -77,7 +82,7 @@ https://bugs.webrtc.org/9282* generating the build files using GN): ``` -$ ninja -C out/Debug AppRTCMobile +$ autoninja -C out/Debug AppRTCMobile ``` 2. Generate the project files: @@ -97,52 +102,55 @@ Android Studio's SDK. When asked whether to use the Gradle wrapper, press AppRTCMobile should now start on the device. If you do any changes to the C++ code, you have to compile the project using -ninja after the changes (see step 1). +autoninja after the changes (see step 1). *Note: Only "arm" is supported as the target_cpu when using Android Studio. This still allows you to run the application on 64-bit ARM devices. x86-based devices are not supported right now.* -## Running WebRTC Native Tests on an Android Device +## Running Tests on an Android Device To build APKs with the WebRTC native tests, follow these instructions. -1. Ensure you have an Android device set in Developer mode connected via -USB. +1. Ensure you have an Android device set in Developer mode connected via USB. + +2. Compile unit tests and/or instrumentation tests: + +``` +$ autoninja -C out/Debug android_instrumentation_test_apk +$ autoninja -C out/Debug rtc_unittests +``` -2. Compile as described in the section above. +3. You can find the generated test binaries in `out/Debug/bin`. To run instrumentation tests: -3. To see which tests are available: look in `out/Debug/bin`. +``` +$ out/Debug/bin/run_android_instrumentation_test_apk -v +``` -4. Run a test on your device: +To run unit tests: ``` -$ out/Debug/bin/run_modules_unittests +$ out/Debug/bin/run_rtc_unittests -v ``` -5. If you want to limit to a subset of tests, use the `--gtest_filter flag`, e.g. +Show verbose output with `-v` and filter tests with `--gtest-filter=SomeTest.*`. For example: ``` -$ out/Debug/bin/run_modules_unittests \ - --gtest_filter=RtpRtcpAPITest.SSRC:RtpRtcpRtcpTest.* +$ out/Debug/bin/run_android_instrumentation_test_apk -v \ + --gtest_filter=VideoFrameBufferTest.* ``` -6. **NOTICE:** The first time you run a test, you must accept a dialog on +For a full list of command line arguments, use `--help`. + +5. **NOTICE:** The first time you run a test, you must accept a dialog on the device! If want to run Release builds instead; pass `is_debug=false` to GN (and preferably generate the projects files into a directory like `out/Release`). Then use the scripts generated in `out/Release/bin` instead. - -## Running WebRTC Instrumentation Tests on an Android Device - -The instrumentation tests (like AppRTCMobileTest and -libjingle_peerconnection_android_unittest) gets scripts generated in the same -location as the native tests described in the previous section. - -[webrtc-prerequitite-sw]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/prerequisite-sw/index.md +[webrtc-prerequisite-sw]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/prerequisite-sw/index.md [webrtc-jni-doc]: https://webrtc.googlesource.com/src/+/master/sdk/android/README [apprtc-doc]: https://webrtc.googlesource.com/src/+/master/examples/androidapp/README [ninja]: https://ninja-build.org/ diff --git a/docs/native-code/development/index.md b/docs/native-code/development/index.md index c6f00be732..04393a9bb8 100644 --- a/docs/native-code/development/index.md +++ b/docs/native-code/development/index.md @@ -169,9 +169,8 @@ For more details, read Chromium's [Working with Branches][chromium-work-branches ## Contributing Patches Please see [Contributing Fixes][webrtc-contributing] for information on how to run -`git cl upload`, getting your patch reviewed, and getting it submitted. - -This also includes information on how to run tryjobs, if you're a committer. +`git cl upload`, getting your patch reviewed, and getting it submitted. You can also +find info on how to run trybots and applying for try rights. ## Chromium Committers @@ -243,12 +242,6 @@ your connection. Open one more tab using the same page. Connect it too (with a different name). It is now possible to exchange messages between the connected peers. -### Relay Server - -Target name `relayserver`. Relays traffic when a direct peer-to-peer -connection can't be established. Can be used with the call application above. - - ### STUN Server Target name `stunserver`. Implements the STUN protocol for Session Traversal @@ -257,8 +250,7 @@ Utilities for NAT as documented in [RFC 5389][rfc-5389]. ### TURN Server -Target name `turnserver`. In active development to reach compatibility with -[RFC 5766][rfc-5766]. +Target name `turnserver`. Used for unit tests. [ninja]: https://ninja-build.org/ @@ -268,7 +260,7 @@ Target name `turnserver`. In active development to reach compatibility with [webrtc-ios-development]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/ios/index.md [chromium-work-branches]: https://www.chromium.org/developers/how-tos/get-the-code/working-with-branches [chromium-work-release-branches]: https://www.chromium.org/developers/how-tos/get-the-code/working-with-release-branches -[webrtc-contributing]: https://webrtc.org/contributing/ +[webrtc-contributing]: https://webrtc.org/support/contributing/ [depot-tools]: http://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/depot_tools_tutorial.html#_setting_up [rfc-5389]: https://tools.ietf.org/html/rfc5389 [rfc-5766]: https://tools.ietf.org/html/rfc5766 diff --git a/docs/native-code/index.md b/docs/native-code/index.md index 91d024f2b4..f91bbb7360 100644 --- a/docs/native-code/index.md +++ b/docs/native-code/index.md @@ -19,8 +19,8 @@ The change log is available at Please read the [License & Rights][webrtc-license] and [FAQ][webrtc-faq] before downloading the source code. -[webrtc-license]: https://webrtc.org/license/ -[webrtc-faq]: https://webrtc.org/faq/ +[webrtc-license]: https://webrtc.org/support/license +[webrtc-faq]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/faq.md The WebRTC [issue tracker][webrtc-issue-tracker] can be used for submitting bugs found in native code. @@ -33,8 +33,10 @@ bugs found in native code. * [Development][webrtc-development] * [Android][webtc-android-development] * [iOS][webrtc-ios-development] +* [Experimental RTP header extensions][rtp-hdrext] [webrtc-prerequitite-sw]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/prerequisite-sw/index.md [webrtc-development]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/index.md [webtc-android-development]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/android/index.md [webrtc-ios-development]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/ios/index.md +[rtp-hdrext]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/rtp-hdrext/index.md diff --git a/docs/native-code/ios/index.md b/docs/native-code/ios/index.md index 6c9d2de0e8..e2f6c3dfd6 100644 --- a/docs/native-code/ios/index.md +++ b/docs/native-code/ios/index.md @@ -173,7 +173,7 @@ a script is available [here][framework-script] To build the framework with bitcode support, pass the `--bitcode` flag to the script like so ``` -$ python build_ios_libs.py --bitcode +$ python tools_webrtc/ios/build_ios_libs.py --bitcode ``` The resulting framework can be found in out_ios_libs/. diff --git a/docs/native-code/logging.md b/docs/native-code/logging.md new file mode 100644 index 0000000000..1daadbe2b5 --- /dev/null +++ b/docs/native-code/logging.md @@ -0,0 +1,42 @@ +Native logs are often valuable in order to debug issues that can't be easily +reproduced. Following are instructions for gathering logs on various platforms. + +To enable native logs for a native application, you can either: + + * Use a debug build of WebRTC (a build where `NDEBUG` is not defined), + which will enable `INFO` logging by default. + + * Call `rtc::LogMessage::LogToDebug(rtc::LS_INFO)` within your application. + Or use `LS_VERBOSE` to enable `VERBOSE` logging. + +For the location of the log output on different platforms, see below. + +#### Android + +Logged to Android system log. Can be obtained using: + +~~~~ bash +adb logcat -s "libjingle" +~~~~ + +To enable the logging in a non-debug build from Java code, use +`Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO)`. + +#### iOS + +Only logged to `stderr` by default. To log to a file, use `RTCFileLogger`. + +#### Mac + +For debug builds of WebRTC (builds where `NDEBUG` is not defined), logs to +`stderr`. To do this for release builds as well, set a boolean preference named +'logToStderr' to `true` for your application. Or, use `RTCFileLogger` to log to +a file. + +#### Windows + +Logs to the debugger and `stderr`. + +#### Linux/Other Platforms + +Logs to `stderr`. diff --git a/docs/native-code/rtp-hdrext/abs-capture-time/README.md b/docs/native-code/rtp-hdrext/abs-capture-time/README.md new file mode 100644 index 0000000000..171993c2e7 --- /dev/null +++ b/docs/native-code/rtp-hdrext/abs-capture-time/README.md @@ -0,0 +1,121 @@ +# Absolute Capture Time + +The Absolute Capture Time extension is used to stamp RTP packets with a NTP +timestamp showing when the first audio or video frame in a packet was originally +captured. The intent of this extension is to provide a way to accomplish +audio-to-video synchronization when RTCP-terminating intermediate systems (e.g. +mixers) are involved. + +**Name:** +"Absolute Capture Time"; "RTP Header Extension for Absolute Capture Time" + +**Formal name:** + + +**Status:** +This extension is defined here to allow for experimentation. Once experience has +shown that it is useful, we intend to make a proposal based on it for +standardization in the IETF. + +Contact for more info. + +## RTP header extension format + +### Data layout overview +Data layout of the shortened version of `abs-capture-time` with a 1-byte header +\+ 8 bytes of data: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | len=7 | absolute capture timestamp (bit 0-23) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | absolute capture timestamp (bit 24-55) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ... (56-63) | + +-+-+-+-+-+-+-+-+ + +Data layout of the extended version of `abs-capture-time` with a 1-byte header + +16 bytes of data: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | len=15| absolute capture timestamp (bit 0-23) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | absolute capture timestamp (bit 24-55) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ... (56-63) | estimated capture clock offset (bit 0-23) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | estimated capture clock offset (bit 24-55) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ... (56-63) | + +-+-+-+-+-+-+-+-+ + +### Data layout details +#### Absolute capture timestamp + +Absolute capture timestamp is the NTP timestamp of when the first frame in a +packet was originally captured. This timestamp MUST be based on the same clock +as the clock used to generate NTP timestamps for RTCP sender reports on the +capture system. + +It's not always possible to do an NTP clock readout at the exact moment of when +a media frame is captured. A capture system MAY postpone the readout until a +more convenient time. A capture system SHOULD have known delays (e.g. from +hardware buffers) subtracted from the readout to make the final timestamp as +close to the actual capture time as possible. + +This field is encoded as a 64-bit unsigned fixed-point number with the high 32 +bits for the timestamp in seconds and low 32 bits for the fractional part. This +is also known as the UQ32.32 format and is what the RTP specification defines as +the canonical format to represent NTP timestamps. + +#### Estimated capture clock offset + +Estimated capture clock offset is the sender's estimate of the offset between +its own NTP clock and the capture system's NTP clock. The sender is here defined +as the system that owns the NTP clock used to generate the NTP timestamps for +the RTCP sender reports on this stream. The sender system is typically either +the capture system or a mixer. + +This field is encoded as a 64-bit two’s complement **signed** fixed-point number +with the high 32 bits for the seconds and low 32 bits for the fractional part. +It’s intended to make it easy for a receiver, that knows how to estimate the +sender system’s NTP clock, to also estimate the capture system’s NTP clock: + + Capture NTP Clock = Sender NTP Clock + Capture Clock Offset + +### Further details + +#### Capture system + +A receiver MUST treat the first CSRC in the CSRC list of a received packet as if +it belongs to the capture system. If the CSRC list is empty, then the receiver +MUST treat the SSRC as if it belongs to the capture system. Mixers SHOULD put +the most prominent CSRC as the first CSRC in a packet’s CSRC list. + +#### Intermediate systems + +An intermediate system (e.g. mixer) MAY adjust these timestamps as needed. It +MAY also choose to rewrite the timestamps completely, using its own NTP clock as +reference clock, if it wants to present itself as a capture system for A/V-sync +purposes. + +#### Timestamp interpolation + +A sender SHOULD save bandwidth by not sending `abs-capture-time` with every +RTP packet. It SHOULD still send them at regular intervals (e.g. every second) +to help mitigate the impact of clock drift and packet loss. Mixers SHOULD always +send `abs-capture-time` with the first RTP packet after changing capture system. + +A receiver SHOULD memorize the capture system (i.e. CSRC/SSRC), capture +timestamp, and RTP timestamp of the most recently received `abs-capture-time` +packet on each received stream. It can then use that information, in combination +with RTP timestamps of packets without `abs-capture-time`, to extrapolate +missing capture timestamps. + +Timestamp interpolation works fine as long as there’s reasonably low NTP/RTP +clock drift. This is not always true. Senders that detect "jumps" between its +NTP and RTP clock mappings SHOULD send `abs-capture-time` with the first RTP +packet after such a thing happening. diff --git a/docs/native-code/rtp-hdrext/abs-send-time/README.md b/docs/native-code/rtp-hdrext/abs-send-time/README.md new file mode 100644 index 0000000000..86c3c733dc --- /dev/null +++ b/docs/native-code/rtp-hdrext/abs-send-time/README.md @@ -0,0 +1,31 @@ +# Absolute Send Time + +The Absolute Send Time extension is used to stamp RTP packets with a timestamp +showing the departure time from the system that put this packet on the wire +(or as close to this as we can manage). Contact for +more info. + +Name: "Absolute Sender Time" ; "RTP Header Extension for Absolute Sender Time" + +Formal name: + +SDP "a= name": "abs-send-time" ; this is also used in client/cloud signaling. + +Not unlike [RTP with TFRC](http://tools.ietf.org/html/draft-ietf-avt-tfrc-profile-10#section-5) + +Wire format: 1-byte extension, 3 bytes of data. total 4 bytes extra per packet +(plus shared 4 bytes for all extensions present: 2 byte magic word 0xBEDE, 2 +byte # of extensions). Will in practice replace the "toffset" extension so we +should see no long term increase in traffic as a result. + +Encoding: Timestamp is in seconds, 24 bit 6.18 fixed point, yielding 64s +wraparound and 3.8us resolution (one increment for each 477 bytes going out on +a 1Gbps interface). + +Relation to NTP timestamps: abs_send_time_24 = (ntp_timestamp_64 >> 14) & +0x00ffffff ; NTP timestamp is 32 bits for whole seconds, 32 bits fraction of +second. + +Notes: Packets are time stamped when going out, preferably close to metal. +Intermediate RTP relays (entities possibly altering the stream) should remove +the extension or set its own timestamp. diff --git a/docs/native-code/rtp-hdrext/color-space/README.md b/docs/native-code/rtp-hdrext/color-space/README.md new file mode 100644 index 0000000000..3f9485681f --- /dev/null +++ b/docs/native-code/rtp-hdrext/color-space/README.md @@ -0,0 +1,88 @@ +# Color Space + +The color space extension is used to communicate color space information and +optionally also metadata that is needed in order to properly render a high +dynamic range (HDR) video stream. Contact for more info. + +**Name:** "Color space" ; "RTP Header Extension for color space" + +**Formal name:** + +**Status:** This extension is defined here to allow for experimentation. Once experience +has shown that it is useful, we intend to make a proposal based on it for standardization +in the IETF. + +## RTP header extension format + +### Data layout overview +Data layout without HDR metadata (one-byte RTP header extension) + 1-byte header + 4 bytes of data: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | L = 3 | primaries | transfer | matrix | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |range+chr.sit. | + +-+-+-+-+-+-+-+-+ + +Data layout of color space with HDR metadata (two-byte RTP header extension) + 2-byte header + 28 bytes of data: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | length=28 | primaries | transfer | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | matrix |range+chr.sit. | luminance_max | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | luminance_min | mastering_metadata.| + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |primary_r.x and .y | mastering_metadata.| + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |primary_g.x and .y | mastering_metadata.| + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |primary_b.x and .y | mastering_metadata.| + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |white.x and .y | max_content_light_level | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | max_frame_average_light_level | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + +### Data layout details +The data is written in the following order, +Color space information (4 bytes): + * Color primaries value according to ITU-T H.273 Table 2. + * Transfer characteristic value according to ITU-T H.273 Table 3. + * Matrix coefficients value according to ITU-T H.273 Table 4. + * Range and chroma siting as specified at + https://www.webmproject.org/docs/container/#colour. Range (range), horizontal (horz) + and vertical (vert) siting are merged to one byte by the operation: (range << 4) + + (horz << 2) + vert. + +The extension may optionally include HDR metadata written in the following order, +Mastering metadata (20 bytes): + * Luminance max, specified in nits, where 1 nit = 1 cd/m2. + (16-bit unsigned integer) + * Luminance min, scaled by a factor of 10000 and specified in the unit 1/10000 + nits. (16-bit unsigned integer) + * CIE 1931 xy chromaticity coordinates of the primary red, scaled by a factor of 50000. + (2x 16-bit unsigned integers) + * CIE 1931 xy chromaticity coordinates of the primary green, scaled by a factor of 50000. + (2x 16-bit unsigned integers) + * CIE 1931 xy chromaticity coordinates of the primary blue, scaled by a factor of 50000. + (2x 16-bit unsigned integers) + * CIE 1931 xy chromaticity coordinates of the white point, scaled by a factor of 50000. + (2x 16-bit unsigned integers) + +Followed by max light levels (4 bytes): + * Max content light level, specified in nits. (16-bit unsigned integer) + * Max frame average light level, specified in nits. (16-bit unsigned integer) + +Note, the byte order for all integers is big endian. + +See the standard SMPTE ST 2086 for more information about these entities. + +Notes: Extension should be present only in the last packet of video frames. If attached +to other packets it should be ignored. + diff --git a/docs/native-code/rtp-hdrext/inband-cn/README.md b/docs/native-code/rtp-hdrext/inband-cn/README.md new file mode 100644 index 0000000000..70ecdac0fb --- /dev/null +++ b/docs/native-code/rtp-hdrext/inband-cn/README.md @@ -0,0 +1,57 @@ +# Inband Comfort Noise + +**Name:** "Inband Comfort Noise" ; "RTP Header Extension to signal inband comfort noise" + +**Formal name:** + +**Status:** This extension is defined here to allow for experimentation. Once experience has shown that it is useful, we intend to make a proposal based on it for standardization in the IETF. + +## Introduction + +Comfort noise \(CN\) is widely used in real time communication, as it significantly reduces the frequency of RTP packets, and thus saves the network bandwidth, when participants in the communication are constantly actively speaking. + +One way of deploying CN is through \[RFC 3389\]. It defines CN as a special payload, which needs to be encoded and decoded independently from the codec\(s\) applied to active speech signals. This deployment is referred to as outband CN in this context. + +Some codecs, for example RFC 6716: Definition of the Opus Audio Codec, implement their own CN schemes. Basically, the encoder can notify that a CN packet is issued and/or no packet needs to be transmitted. + +Since CN packets have their particularities, cloud and client may need to identify them and treat them differently. Special treatments on CN packets include but are not limited to + +* Upon receiving multiple streams of CN packets, choose only one to relay or mix. +* Adapt jitter buffer wisely according to the discontinuous transmission nature of CN packets. + +While RTP packets that contain outband CN can be easily identified as they bear a different payload type, inband CN cannot. Some codecs may be able to extract the information by decoding the packet, but that depends on codec implementation, not even mentioning that decoding packets is not always feasible. This document proposes using an RTP header extension to signal the inband CN. + +## RTP header extension format + +The inband CN extension can be encoded using either the one-byte or two-byte header defined in \[RFC 5285\]. Figures 1 and 2 show encodings with each of these header formats. + + 0 1 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | len=0 |N| noise level | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + +Figure 1. Encoding Using the One-Byte Header Format + + 0 1 2 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | len=1 |N| noise level | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + +Figure 2. Encoding Using the Two-Byte Header Format + +Noise level is an optional data. The bit "N" being 1 indicates that there is a noise level. The noise level is defined the same way as the audio level in \[RFC 6464\] and therefore can be used to avoid the Audio Level Header Extension on the same RTP packet. This also means that this level is defined the same as the noise level in \[RFC 3389\] and therfore can be compared against outband CN. + +## Further details + +The existence of this header extension in an RTP packet indicates that it has inband CN, and therefore it will be used sparsely, and results in very small transmission cost. + +The end receiver can utilize this RTP header extension to get notified about an upcoming discontinuous transmission. This can be useful for its jitter buffer management. This RTP header extension signals comfort noise, it can also be used by audio mixer to mix streams wisely. As an example, it can avoid mixing multiple comfort noises together. + +Cloud may have the benefits of this RTP header extension as an end receiver, if it does transcoding. It may also utilize this RTP header extension to prioritize RTP packets if it does packet filtering. In both cases, this RTP header extension should not be encrypted. + +## References +* \[RFC 3389\] Zopf, R., "Real-time Transport Protocol \(RTP\) Payload for Comfort Noise \(CN\)", RFC 3389, September 2002. +* \[RFC 6465\] Ivov, E., Ed., Marocco, E., Ed., and J. Lennox, "A Real-time Transport Protocol \(RTP\) Header Extension for Mixer-to-Client Audio Level Indication", RFC 6465, December 2011. +* \[RFC 5285\] Singer, D. and H. Desineni, "A General Mechanism for RTP Header Extensions", RFC 5285, July 2008. diff --git a/docs/native-code/rtp-hdrext/index.md b/docs/native-code/rtp-hdrext/index.md new file mode 100644 index 0000000000..c0c0b75aba --- /dev/null +++ b/docs/native-code/rtp-hdrext/index.md @@ -0,0 +1,12 @@ +# Experimental RTP header extensions + +The following subpages define experiemental RTP header extensions: + + * [abs-send-time](abs-send-time/README.md) + * [abs-capture-time](abs-capture-time/README.md) + * [color-space](color-space/README.md) + * [playout-delay](playout-delay/README.md) + * [transport-wide-cc-02](transport-wide-cc-02/README.md) + * [video-content-type](video-content-type/README.md) + * [video-timing](video-timing/README.md) + * [inband-cn](inband-cn/README.md) diff --git a/docs/native-code/rtp-hdrext/playout-delay/README.md b/docs/native-code/rtp-hdrext/playout-delay/README.md new file mode 100644 index 0000000000..e669b04f83 --- /dev/null +++ b/docs/native-code/rtp-hdrext/playout-delay/README.md @@ -0,0 +1,54 @@ +# Playout Delay + +**Name:** "Playout Delay" ; "RTP Header Extension to control Playout Delay" + +**Formal name:** + +**SDP "a= name":** "playout-delay" ; this is also used in client/cloud signaling. + +**Status:** This extension is defined here to allow for experimentation. Once experience +has shown that it is useful, we intend to make a proposal based on it for standardization +in the IETF. + +## Introduction + +On WebRTC, the RTP receiver continuously measures inter-packet delay and evaluates packet jitter. Besides this, an estimated delay for decode and render at the receiver is computed. The jitter buffer, the local time extrapolation and the predicted render time (based on predicted decode and render time) impact the delay on a frame before it is rendered at the receiver. + +This document proposes an RTP extension to enable the RTP sender to try and limit the amount of playout delay at the receiver in a certain range. A minimum and maximum delay from the sender provides guidance on the range over which the receiver can smooth out rendering. + +Thus, this extension aims to provide the sender’s intent to the receiver on how quickly a frame needs to be rendered. + +The following use cases are addressed by this extension: + +* Interactive streaming (gaming, remote access): Interactive streaming is highly sensitive to end-to-end latency and any delay in render impacts the end-user experience. These use cases prioritize reducing delay over any smoothing done at the receiver. In these cases, the RTP sender would like to disable all smoothing at receiver (min delay = max delay = 0) +* Movie playback: In some scenarios, the user prefers smooth playback and adaptive delay impacts end-user experience (audio can speed up and slow down). In these cases the sender would like to have a fixed delay at all times (min delay = max delay = K) +* Interactive communication: This is the scenarios where the receiver is best suited to adjust the delay adaptively to minimize latency and at the same time add some smoothing based on jitter prevalent due to network conditions (min delay = K1, max delay = K2) + + +## MIN and MAX playout delay + +The playout delay on a frame represents the amount of delay added to a frame the time it is captured at the sender to the time it is expected to be rendered at the receiver. Thus playout delay is essentially: + +Playout delay = ExpectedRenderTime(frame) - ExpectedCaptureTime(frame) + +MIN and MAX playout delay in turn represent the minimum and maximum delay that can be seen on a frame. This restriction range is best effort. The receiver is expected to try and meet the range as best as it can. + +A value of 0 for example is meaningless from the perspective of actually meeting the suggested delay, but it indicates to the receiver that the frame should be rendered as soon as possible. It is up-to the receiver to decide how to handle a frame when it arrives too late (i.e., whether to simply drop or hand over for rendering as soon as possible). + +## RTP header extension format + + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID | len=2 | MIN delay | MAX delay | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + + +12 bits for Minimum and Maximum delay. This represents a range of 0 - 40950 milliseconds for minimum and maximum (with a granularity of 10 ms). A granularity of 10 ms is sufficient since we expect the following typical use cases: + +* 0 ms: Certain gaming scenarios (likely without audio) where we will want to play the frame as soon as possible. Also, for remote desktop without audio where rendering a frame asap makes sense +* 100/150/200 ms: These could be the max target latency for interactive streaming use cases depending on the actual application (gaming, remoting with audio, interactive scenarios) +* 400 ms: Application that want to ensure a network glitch has very little chance of causing a freeze can start with a minimum delay target that is high enough to deal with network issues. Video streaming is one example. + +The header is attached to the RTP packet by the RTP sender when it needs to change the min and max smoothing delay at the receiver. Once the sender is informed that at least one RTP packet which has the min and max details is delivered, it MAY stop providing details on all further RTP packets until another change warrants communicating the details to the receiver again. This is done as follows: + +RTCP feedback to RTP sender includes the highest sequence number that was seen on the RTP receiver. The RTP sender can track the sequence number on the packet that first had the playout delay extension and then stop sending the extension once the received sequence number is greater than the sequence number on the first packet containing the current values playout delay in this extension. diff --git a/docs/native-code/rtp-hdrext/transport-wide-cc-02/README.md b/docs/native-code/rtp-hdrext/transport-wide-cc-02/README.md new file mode 100644 index 0000000000..20b1d51dd2 --- /dev/null +++ b/docs/native-code/rtp-hdrext/transport-wide-cc-02/README.md @@ -0,0 +1,62 @@ +# Transport-Wide Congestion Control + +This RTP header extension is an extended version of the extension defined in + + +**Name:** "Transport-wide congenstion control 02" + +**Formal name:** + + +**Status:** This extension is defined here to allow for experimentation. Once +experience has shown that it is useful, we intend to make a proposal based on +it for standardization in the IETF. + +The original extension defines a transport-wide sequence number that is used in +feedback packets for congestion control. The original implementation sends these +feedback packets at a periodic interval. The extended version presented here has +two changes compared to the original version: +* Feedback is sent only on request by the sender, therefore, the extension has + two optional bytes that signals that a feedback packet is requested. +* The sender determines if timing information should be included or not in the + feedback packet. The original version always include timing information. + +Contact or for more info. + +## RTP header extension format + +### Data layout overview +Data layout of transport-wide sequence number + 1-byte header + 2 bytes of data: + + 0              1 2 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID   | L=1 |transport-wide sequence number | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + +Data layout of transport-wide sequence number and optional feedback request + 1-byte header + 4 bytes of data: + + 0              1 2                   3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | ID   | L=3 |transport-wide sequence number |T|  seq count | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |seq count cont.| + +-+-+-+-+-+-+-+-+ + +### Data layout details +The data is written in the following order, +* transport-wide sequence number (16-bit unsigned integer) +* feedback request (optional) (16-bit unsigned integer)
+ If the extension contains two extra bytes for feedback request, this means + that a feedback packet should be generated and sent immediately. The feedback + request consists of a one-bit field giving the flag value T and a 15-bit + field giving the sequence count as an unsigned number. + - If the bit T is set the feedback packet must contain timing information. + - seq count specifies how many packets of history that should be included in + the feedback packet. If seq count is zero no feedback should be be + generated, which is equivalent of sending the two-byte extension above. + This is added as an option to allow for a fixed packet header size. + diff --git a/docs/native-code/rtp-hdrext/video-content-type/README.md b/docs/native-code/rtp-hdrext/video-content-type/README.md new file mode 100644 index 0000000000..e7eb10d4e8 --- /dev/null +++ b/docs/native-code/rtp-hdrext/video-content-type/README.md @@ -0,0 +1,24 @@ +# Video Content Type + +The Video Content Type extension is used to communicate a video content type +from sender to receiver of rtp video stream. Contact for +more info. + +Name: "Video Content Type" ; "RTP Header Extension for Video Content Type" + +Formal name: + +SDP "a= name": "video-content-type" ; this is also used in client/cloud signaling. + +Wire format: 1-byte extension, 1 bytes of data. total 2 bytes extra per packet +(plus shared 4 bytes for all extensions present: 2 byte magic word 0xBEDE, 2 +byte # of extensions). + +Values: + + * 0x00: *Unspecified*. Default value. Treated the same as an absence of an extension. + * 0x01: *Screenshare*. Video stream is of a screenshare type. + +Notes: Extension shoud be present only in the last packet of key-frames. If +attached to other packets it should be ignored. If extension is absent, +*Unspecified* value is assumed. diff --git a/docs/native-code/rtp-hdrext/video-timing/README.md b/docs/native-code/rtp-hdrext/video-timing/README.md new file mode 100644 index 0000000000..6f862f6157 --- /dev/null +++ b/docs/native-code/rtp-hdrext/video-timing/README.md @@ -0,0 +1,42 @@ +# Video Timing + +The Video Timing extension is used to communicate a timing information on +per-frame basis to receiver of rtp video stream. Contact for +more info. It may be generalized to audio frames as well in the future. + +Name: "Video Timing" ; "RTP Header Extension for Video timing" + +Formal name: + +SDP "a= name": "video-timing" ; this is also used in client/cloud signaling. + +Wire format: 1-byte extension, 13 bytes of data. Total 14 bytes extra per packet +(plus 1-3 padding byte in some cases, plus shared 4 bytes for all extensions +present: 2 byte magic word 0xBEDE, 2 byte # of extensions). + +First byte is a flags field. Defined flags: + + * 0x01 - extension is set due to timer. + * 0x02 - extension is set because the frame is larger than usual. + +Both flags may be set at the same time. All remaining 6 bits are reserved and +should be ignored. + +Next, 6 timestamps are stored as 16-bit values in big-endian order, representing +delta from the capture time of a packet in ms. +Timestamps are, in order: + + * Encode start. + * Encode finish. + * Packetization complete. + * Last packet left the pacer. + * Reserved for network. + * Reserved for network (2). + +Pacer timestamp should be updated inside the RTP packet by pacer component when +the last packet (containing the extension) is sent to the network. Last two, +reserved timstamps, are not set by the sender but are reserved in packet for any +in-network RTP stream processor to modify. + +Notes: Extension shoud be present only in the last packet of video frames. If +attached to other packets it should be ignored. diff --git a/docs/release-notes.md b/docs/release-notes.md new file mode 100644 index 0000000000..5bb501b781 --- /dev/null +++ b/docs/release-notes.md @@ -0,0 +1,71 @@ +# Release notes +This document contains pointers to the WebRTC release notes for each Chrome release. The +release notes are posted to the [discuss-webrtc](https://groups.google.com/group/discuss-webrtc) +mailing list before the release. + +## Current release +To find out the current release and schedule, refer to the +[chromium dashboard](https://chromiumdash.appspot.com/schedule) + +## List of releases + * [M87 Release Notes](https://groups.google.com/g/discuss-webrtc/c/6VmKkCjRK0k/m/YyOTQyQ5AAAJ) + * [M86 Release Notes](https://groups.google.com/g/discuss-webrtc/c/pKCOpi9Llyc/m/QhZjyE02BgAJ) + * [M85 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/Qq3nsR2w2HU/7WGLPscPBwAJ) + * [M84 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/MRAV4jgHYV0/5019yB-HAwAJ) + * [M83 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/EieMDYtQ9sg/7po9fl8_AgAJ) + * NOTE: M82 release was cancelled due to cancellation of Chrome 82 release + * [M81 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/a5_zncyPc3Y/iirhUr6bCwAJ) + * [M80 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/Ozvbd0p7Q1Y/M4WN2cRKCwAJ) + * [M79 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/X8q5Ae9VKco/oEiGuteoBAAJ) + * [M78 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/qbX55tFk1o4/KgFA-ZksCwAJ) + * [M77 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/b1tdwrXKuHI/OH7oSL7OBwAJ) + * [M76 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/Y7TIuNbgP8M/UoXP-RuxAwAJ) + * [M75 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/_jlUbYjv-hQ/mCtjlVyjAgAJ) + * [M74 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/cXEtXIIYrQs/R7y0yIK2AQAJ) + * [M73 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/l0gc3RjBhc0/FsMqOlOSBwAJ) + * [M72 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/3h4y0fimHwg/j6G4dTVvCAAJ) + * [M71 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/HUpIxlDlkSE/qR1nswqZCwAJ) + * [M70 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/6ImvPjWQvbE/AlCtGQnYBQAJ) + * [M69 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/i1Td6qhfKlQ/ryXly46JCwAJ) + * [M68 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/nDdDqIBtFBM/bf_0eknmAwAJ) + * [M67 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/8D5O5NAVzes/QxeMGr0rAwAJ) + * [M66 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/kG4DJSfP2ck/YlI0xyeLAgAJ) + * [M65 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/QJHpBnGQPKk/oKR0pSD-CgAJ) + * [M64 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/fIWg5n67xHo/QIhRnv6vBgAJ) + * [M63 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/qDtSDxoNSII/69b6fAkxAQAJ) + * [M62 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/DFRDTFJmO5g/Sz5zOz-KFQAJ) + * [M61 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/8gJyg8EFPdo/OxUdyMjXBwAJ) + * [M60 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/iw3c5xYXOUw/WF5QxRReBgAJ) + * [M59 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/ogPObezLpHw/hwVgcW57BgAJ) + * [M58 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/13BE3fbHcLU/bQJWNBihBgAJ) + * [M57 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/xXjeKbW_JYI/LIXzVrKWCwAJ) + * [M56 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/DyeVS9IMTLc/1gUM7osoCwAJ) + * [M55 Release Notes](https://groups.google.com/d/msg/discuss-webrtc/BqqFMSR6s1E/rlPYFD0NCQAJ) + * [M54 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/S5yex8rNIjA/discussion) + * [M53 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/ism_KD14rzc/discussion) + * [M52 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/bDfxOA8XiJI/discussion) + * [M51 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/GdsmhrVaxdU/discussion) + * [M50 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/JuXLl5BJoJE/discussion) + * [M49 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/mcApW-3YADI/discussion) + * [M48 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/_5hL0HeBeEA/discussion) + * [M47 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/sq5CVmY69sc/discussion) + * [M46 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/uMWoBvCceSg/discussion) + * [M45 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/RZDCCUga1zc/discussion) + * [M44 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/mrLyXc6Y464/discussion) + * [M43 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/KiKykbMGW9w/discussion) + * [M42 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/PwuzgUypYos/discussion) + * [M41 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/aGsdjGtjIQA/discussion) + * [M40 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/vGW4O3QOyLM/discussion) + * [M39 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/Cv4g9jllrSE/discussion) + * [M38 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/ANnsPbI0PWg/discussion) + * [M37 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/Qt99-FXzKkU/discussion) + * [M36 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/JlU2ItCJuZU/discussion) + * [M35 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/8Um1WESQ97g/discussion) + * [M34 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/Feg4ajTp2Gg/discussion) + * [M33 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/lAn7IvSIQ_g/discussion) + * [M32 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/AefA5Pg_xIU/discussion) + * [M31 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/0dReVX4BX3c/discussion) + * [M30 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/_zdJBwP4vNU/discussion) + * [M29 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/DytR3rKvmw4/discussion) + * [M28 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/CLa_9sYY6ek/discussion) + * [M27 Release Notes](https://groups.google.com/d/topic/discuss-webrtc/NDwzHExp9zM/discussion) diff --git a/examples/BUILD.gn b/examples/BUILD.gn index 4f76cad185..c2678962d7 100644 --- a/examples/BUILD.gn +++ b/examples/BUILD.gn @@ -27,6 +27,7 @@ group("examples") { ":AppRTCMobile", ":AppRTCMobile_test_apk", ":libwebrtc_unity", + "androidvoip", ] # TODO(sakal): We include some code from the tests. Remove this dependency @@ -44,7 +45,7 @@ group("examples") { deps += [ ":AppRTCMobile" ] } - if (is_linux || is_win) { + if (is_linux || is_chromeos || is_win) { deps += [ ":peerconnection_server", ":stunserver", @@ -87,7 +88,7 @@ if (is_android) { testonly = true apk_name = "AppRTCMobile" android_manifest = "androidapp/AndroidManifest.xml" - min_sdk_version = 16 + min_sdk_version = 21 target_sdk_version = 29 deps = [ @@ -101,7 +102,7 @@ if (is_android) { rtc_android_library("AppRTCMobile_javalib") { testonly = true - android_manifest_for_lint = "androidapp/AndroidManifest.xml" + android_manifest = "androidapp/AndroidManifest.xml" sources = [ "androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java", @@ -129,6 +130,7 @@ if (is_android) { "androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java", ] + resources_package = "org.appspot.apprtc" deps = [ ":AppRTCMobile_resources", "../rtc_base:base_java", @@ -154,14 +156,49 @@ if (is_android) { android_resources("AppRTCMobile_resources") { testonly = true - resource_dirs = [ "androidapp/res" ] + sources = [ + "androidapp/res/drawable-hdpi/disconnect.png", + "androidapp/res/drawable-hdpi/ic_action_full_screen.png", + "androidapp/res/drawable-hdpi/ic_action_return_from_full_screen.png", + "androidapp/res/drawable-hdpi/ic_launcher.png", + "androidapp/res/drawable-hdpi/ic_loopback_call.png", + "androidapp/res/drawable-ldpi/disconnect.png", + "androidapp/res/drawable-ldpi/ic_action_full_screen.png", + "androidapp/res/drawable-ldpi/ic_action_return_from_full_screen.png", + "androidapp/res/drawable-ldpi/ic_launcher.png", + "androidapp/res/drawable-ldpi/ic_loopback_call.png", + "androidapp/res/drawable-mdpi/disconnect.png", + "androidapp/res/drawable-mdpi/ic_action_full_screen.png", + "androidapp/res/drawable-mdpi/ic_action_return_from_full_screen.png", + "androidapp/res/drawable-mdpi/ic_launcher.png", + "androidapp/res/drawable-mdpi/ic_loopback_call.png", + "androidapp/res/drawable-xhdpi/disconnect.png", + "androidapp/res/drawable-xhdpi/ic_action_full_screen.png", + "androidapp/res/drawable-xhdpi/ic_action_return_from_full_screen.png", + "androidapp/res/drawable-xhdpi/ic_launcher.png", + "androidapp/res/drawable-xhdpi/ic_loopback_call.png", + "androidapp/res/layout/activity_call.xml", + "androidapp/res/layout/activity_connect.xml", + "androidapp/res/layout/fragment_call.xml", + "androidapp/res/layout/fragment_hud.xml", + "androidapp/res/menu/connect_menu.xml", + "androidapp/res/values-v17/styles.xml", + "androidapp/res/values-v21/styles.xml", + "androidapp/res/values/arrays.xml", + "androidapp/res/values/strings.xml", + "androidapp/res/xml/preferences.xml", + ] + + # Needed for Bazel converter. custom_package = "org.appspot.apprtc" + resource_dirs = [ "androidapp/res" ] + assert(resource_dirs != []) # Mark as used. } rtc_instrumentation_test_apk("AppRTCMobile_test_apk") { apk_name = "AppRTCMobileTest" android_manifest = "androidtests/AndroidManifest.xml" - min_sdk_version = 16 + min_sdk_version = 21 target_sdk_version = 21 sources = [ @@ -172,7 +209,11 @@ if (is_android) { deps = [ ":AppRTCMobile_javalib", + "../sdk/android:base_java", + "../sdk/android:camera_java", "../sdk/android:libjingle_peerconnection_java", + "../sdk/android:peerconnection_java", + "../sdk/android:video_api_java", "../sdk/android:video_java", "//third_party/android_support_test_runner:runner_java", "//third_party/junit", @@ -192,13 +233,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "objc/AppRTCMobile/common/ARDUtilities.m", ] public_configs = [ ":apprtc_common_config" ] - - if (is_ios) { - # iOS must use WebRTC.framework which is dynamically linked. - deps = [ "../sdk:framework_objc+link" ] - } else { - deps = [ "../sdk:mac_framework_objc+link" ] - } + deps = [ "../sdk:base_objc" ] } config("apprtc_signaling_config") { @@ -257,18 +292,17 @@ if (is_ios || (is_mac && target_cpu != "x86")) { deps = [ ":apprtc_common", ":socketrocket", + "../sdk:base_objc", + "../sdk:default_codec_factory_objc", + "../sdk:file_logger_objc", + "../sdk:helpers_objc", + "../sdk:mediaconstraints_objc", + "../sdk:peerconnectionfactory_base_objc", + "../sdk:videocapture_objc", + "../sdk:videoframebuffer_objc", + "../sdk:videosource_objc", ] - - if (is_ios) { - # iOS must use WebRTC.framework which is dynamically linked. - deps += [ - "../sdk:framework_objc+link", - "../sdk:ios_framework_bundle", - ] - } else { - deps += [ "../sdk:mac_framework_objc+link" ] - } - libs = [ + frameworks = [ "CoreMedia.framework", "QuartzCore.framework", ] @@ -306,11 +340,21 @@ if (is_ios || (is_mac && target_cpu != "x86")) { deps = [ ":apprtc_common", ":apprtc_signaling", - "../sdk:framework_objc+link", - "../sdk:ios_framework_bundle", + "../sdk:audio_session_objc", + "../sdk:base_objc", + "../sdk:helpers_objc", + "../sdk:mediaconstraints_objc", + "../sdk:peerconnectionfactory_base_objc", + "../sdk:peerconnectionfactory_base_objc", + "../sdk:ui_objc", + "../sdk:videocapture_objc", + "../sdk:videocodec_objc", ] + if (rtc_use_metal_rendering) { + deps += [ "../sdk:metal_objc" ] + } - libs = [ "AVFoundation.framework" ] + frameworks = [ "AVFoundation.framework" ] } ios_app_bundle("AppRTCMobile") { @@ -344,14 +388,18 @@ if (is_ios || (is_mac && target_cpu != "x86")) { if (rtc_apprtcmobile_broadcast_extension) { bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") { testonly = true - public_deps = [ ":AppRTCMobileBroadcastUpload" ] # no-presubmit-check TODO(webrtc:8603) + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + ":AppRTCMobileBroadcastUpload", # prevent code format + ] sources = [ "$root_out_dir/AppRTCMobileBroadcastUpload.appex" ] outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ] } bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") { testonly = true - public_deps = [ ":AppRTCMobileBroadcastSetupUI" ] # no-presubmit-check TODO(webrtc:8603) + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + ":AppRTCMobileBroadcastSetupUI", # prevent code format + ] sources = [ "$root_out_dir/AppRTCMobileBroadcastSetupUI.appex" ] outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ] } @@ -369,7 +417,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:ios_framework_bundle", ] - libs = [ "ReplayKit.framework" ] + frameworks = [ "ReplayKit.framework" ] } ios_appex_bundle("AppRTCMobileBroadcastUpload") { @@ -393,7 +441,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist" - libs = [ "ReplayKit.framework" ] + frameworks = [ "ReplayKit.framework" ] deps = [ ":AppRTCMobile_ios_bundle_data" ] } @@ -449,6 +497,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../modules/audio_processing:api", "../pc:libjingle_peerconnection", "../rtc_base", + "../rtc_base/synchronization:mutex", "../sdk:base_objc", "../sdk:default_codec_factory_objc", "../sdk:helpers_objc", @@ -493,7 +542,15 @@ if (is_ios || (is_mac && target_cpu != "x86")) { deps = [ ":apprtc_common", ":apprtc_signaling", - "../sdk:mac_framework_objc+link", + "../sdk:base_objc", + "../sdk:helpers_objc", + "../sdk:mediaconstraints_objc", + "../sdk:metal_objc", + "../sdk:peerconnectionfactory_base_objc", + "../sdk:peerconnectionfactory_base_objc", + "../sdk:ui_objc", + "../sdk:videocapture_objc", + "../sdk:videocodec_objc", ] } @@ -507,7 +564,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { info_plist = "objc/AppRTCMobile/mac/Info.plist" - libs = [ "AppKit.framework" ] + frameworks = [ "AppKit.framework" ] ldflags = [ "-rpath", @@ -552,10 +609,10 @@ if (is_ios || (is_mac && target_cpu != "x86")) { configs += [ ":socketrocket_warning_config" ] public_configs = [ ":socketrocket_include_config" ] - libs = [ + libs = [ "icucore" ] + frameworks = [ "CFNetwork.framework", "Security.framework", - "icucore", ] } @@ -578,14 +635,16 @@ if (is_ios || (is_mac && target_cpu != "x86")) { ":AppRTCMobile_lib", ":apprtc_signaling", "../rtc_base", - "../sdk:framework_objc+link", - "../sdk:ios_framework_bundle", + "../sdk:mediaconstraints_objc", + "../sdk:peerconnectionfactory_base_objc", + "../sdk:videocapture_objc", "//build/config/ios:xctest", "//third_party/ocmock", ] } - rtc_ios_xctest_test("apprtcmobile_tests") { + rtc_test("apprtcmobile_tests") { + is_xctest = true info_plist = "objc/AppRTCMobile/ios/Info.plist" sources = [ "objc/AppRTCMobile/tests/main.mm" ] deps = [ @@ -600,7 +659,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { } } -if (is_linux || is_win) { +if (is_linux || is_chromeos || is_win) { rtc_executable("peerconnection_client") { testonly = true sources = [ @@ -620,11 +679,12 @@ if (is_linux || is_win) { "../api:scoped_refptr", "../api/audio:audio_mixer_api", "../api/audio_codecs:audio_codecs_api", - "../api/video:video_frame_i420", + "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../media:rtc_media_base", "../p2p:rtc_p2p", + "../pc:video_track_source", "../rtc_base:checks", "../rtc_base/third_party/sigslot", "../system_wrappers:field_trial", @@ -642,9 +702,12 @@ if (is_linux || is_win) { "peerconnection/client/main_wnd.h", ] configs += [ "//build/config/win:windowed" ] - deps += [ "../media:rtc_media_base" ] + deps += [ + "../media:rtc_media_base", + "../rtc_base:win32", + ] } - if (is_linux) { + if (is_linux || is_chromeos) { sources += [ "peerconnection/client/linux/main.cc", "peerconnection/client/linux/main_wnd.cc", @@ -772,6 +835,7 @@ if (is_win || is_android) { "../modules/video_capture:video_capture_module", "../pc:libjingle_peerconnection", "../pc:peerconnection", + "../pc:video_track_source", "../rtc_base", "../test:platform_video_capturer", "../test:video_test_common", @@ -794,6 +858,7 @@ if (is_android) { "../sdk/android:camera_java", "../sdk/android:libjingle_peerconnection_java", "../sdk/android:peerconnection_java", + "../sdk/android:video_api_java", "../sdk/android:video_java", "//third_party/android_deps:com_android_support_support_annotations_java", ] @@ -824,6 +889,7 @@ if (is_android) { deps = [ ":AppRTCMobile_javalib", + "../sdk/android:peerconnection_java", "//base:base_java_test_support", "//third_party/google-truth:google_truth_java", ] diff --git a/examples/OWNERS b/examples/OWNERS index 65ae2ffc9c..ff1f425462 100644 --- a/examples/OWNERS +++ b/examples/OWNERS @@ -1,10 +1,4 @@ -glaznev@webrtc.org magjed@webrtc.org perkj@webrtc.org tkchin@webrtc.org kthelgason@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/examples/aarproject/app/build.gradle b/examples/aarproject/app/build.gradle index dde0707ace..b4f2acdbdd 100644 --- a/examples/aarproject/app/build.gradle +++ b/examples/aarproject/app/build.gradle @@ -2,10 +2,9 @@ apply plugin: 'com.android.application' android { compileSdkVersion 27 - buildToolsVersion "27.0.1" defaultConfig { applicationId "org.appspot.apprtc" - minSdkVersion 16 + minSdkVersion 21 targetSdkVersion 21 versionCode 1 versionName "1.0" diff --git a/examples/aarproject/build.gradle b/examples/aarproject/build.gradle index 5a016efb7e..6780c439e1 100644 --- a/examples/aarproject/build.gradle +++ b/examples/aarproject/build.gradle @@ -7,7 +7,7 @@ buildscript { jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:3.0.0-beta2' + classpath 'com.android.tools.build:gradle:4.0.0' // NOTE: Do not place your application dependencies here; they belong diff --git a/examples/androidapp/AndroidManifest.xml b/examples/androidapp/AndroidManifest.xml index 8a9035e782..0edefead7f 100644 --- a/examples/androidapp/AndroidManifest.xml +++ b/examples/androidapp/AndroidManifest.xml @@ -8,7 +8,6 @@ - diff --git a/examples/androidapp/OWNERS b/examples/androidapp/OWNERS index ee155665a3..299e8b20ec 100644 --- a/examples/androidapp/OWNERS +++ b/examples/androidapp/OWNERS @@ -1,4 +1,2 @@ magjed@webrtc.org sakal@webrtc.org - -per-file *.py=phoglund@webrtc.org diff --git a/examples/androidapp/res/values/arrays.xml b/examples/androidapp/res/values/arrays.xml index e0e6ccbdc2..bf181550a2 100644 --- a/examples/androidapp/res/values/arrays.xml +++ b/examples/androidapp/res/values/arrays.xml @@ -34,6 +34,7 @@ VP9 H264 Baseline H264 High + H265 diff --git a/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java index 7ae3d838dd..c32ab964ad 100644 --- a/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java +++ b/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java @@ -185,8 +185,8 @@ private AppRTCAudioManager(Context context) { // Note that, the sensor will not be active until start() has been called. proximitySensor = AppRTCProximitySensor.create(context, // This method will be called each time a state change is detected. - // Example: user holds his hand over the device (closer than ~5 cm), - // or removes his hand from the device. + // Example: user holds their hand over the device (closer than ~5 cm), + // or removes their hand from the device. this ::onProximitySensorChangedState); Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice); diff --git a/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/examples/androidapp/src/org/appspot/apprtc/CallActivity.java index d55a9704d3..10d2b6eca2 100644 --- a/examples/androidapp/src/org/appspot/apprtc/CallActivity.java +++ b/examples/androidapp/src/org/appspot/apprtc/CallActivity.java @@ -786,7 +786,7 @@ public void run() { } @Override - public void onRemoteDescription(final SessionDescription sdp) { + public void onRemoteDescription(final SessionDescription desc) { final long delta = System.currentTimeMillis() - callStartedTimeMs; runOnUiThread(new Runnable() { @Override @@ -795,8 +795,8 @@ public void run() { Log.e(TAG, "Received remote SDP for non-initilized peer connection."); return; } - logAndToast("Received remote " + sdp.type + ", delay=" + delta + "ms"); - peerConnectionClient.setRemoteDescription(sdp); + logAndToast("Received remote " + desc.type + ", delay=" + delta + "ms"); + peerConnectionClient.setRemoteDescription(desc); if (!signalingParameters.initiator) { logAndToast("Creating ANSWER..."); // Create answer. Answer SDP will be sent to offering client in @@ -856,17 +856,17 @@ public void onChannelError(final String description) { // All callbacks are invoked from peer connection client looper thread and // are routed to UI thread. @Override - public void onLocalDescription(final SessionDescription sdp) { + public void onLocalDescription(final SessionDescription desc) { final long delta = System.currentTimeMillis() - callStartedTimeMs; runOnUiThread(new Runnable() { @Override public void run() { if (appRtcClient != null) { - logAndToast("Sending " + sdp.type + ", delay=" + delta + "ms"); + logAndToast("Sending " + desc.type + ", delay=" + delta + "ms"); if (signalingParameters.initiator) { - appRtcClient.sendOfferSdp(sdp); + appRtcClient.sendOfferSdp(desc); } else { - appRtcClient.sendAnswerSdp(sdp); + appRtcClient.sendAnswerSdp(desc); } } if (peerConnectionParameters.videoMaxBitrate > 0) { diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java index 2817afea01..e37c3c47f3 100644 --- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java +++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java @@ -15,6 +15,7 @@ import android.os.ParcelFileDescriptor; import android.support.annotation.Nullable; import android.util.Log; +import com.piasy.avconf.AudioMixer; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; @@ -94,6 +95,7 @@ public class PeerConnectionClient { private static final String VIDEO_CODEC_H264 = "H264"; private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline"; private static final String VIDEO_CODEC_H264_HIGH = "H264 High"; + private static final String VIDEO_CODEC_H265 = "H265"; private static final String AUDIO_CODEC_OPUS = "opus"; private static final String AUDIO_CODEC_ISAC = "ISAC"; private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate"; @@ -152,8 +154,7 @@ public class PeerConnectionClient { @Nullable private List queuedRemoteCandidates; private boolean isInitiator; - @Nullable - private SessionDescription localSdp; // either offer or answer SDP + @Nullable private SessionDescription localDescription; // either offer or answer description @Nullable private VideoCapturer videoCapturer; // enableVideo is set to true if video should be rendered and sent. @@ -843,25 +844,24 @@ public void removeRemoteIceCandidates(final IceCandidate[] candidates) { }); } - public void setRemoteDescription(final SessionDescription sdp) { + public void setRemoteDescription(final SessionDescription desc) { executor.execute(() -> { if (peerConnection == null || isError) { return; } - String sdpDescription = sdp.description; + String sdp = desc.description; if (preferIsac) { - sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); + sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true); } if (isVideoCallEnabled()) { - sdpDescription = - preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false); + sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false); } if (peerConnectionParameters.audioStartBitrate > 0) { - sdpDescription = setStartBitrate( - AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate); + sdp = setStartBitrate( + AUDIO_CODEC_OPUS, false, sdp, peerConnectionParameters.audioStartBitrate); } Log.d(TAG, "Set remote SDP."); - SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription); + SessionDescription sdpRemote = new SessionDescription(desc.type, sdp); peerConnection.setRemoteDescription(sdpObserver, sdpRemote); }); } @@ -981,6 +981,8 @@ private static String getSdpVideoCodecName(PeerConnectionParameters parameters) case VIDEO_CODEC_H264_HIGH: case VIDEO_CODEC_H264_BASELINE: return VIDEO_CODEC_H264; + case VIDEO_CODEC_H265: + return VIDEO_CODEC_H265; default: return VIDEO_CODEC_VP8; } @@ -1002,8 +1004,8 @@ private static String getFieldTrials(PeerConnectionParameters peerConnectionPara @SuppressWarnings("StringSplitter") private static String setStartBitrate( - String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) { - String[] lines = sdpDescription.split("\r\n"); + String codec, boolean isVideoCodec, String sdp, int bitrateKbps) { + String[] lines = sdp.split("\r\n"); int rtpmapLineIndex = -1; boolean sdpFormatUpdated = false; String codecRtpMap = null; @@ -1021,7 +1023,7 @@ private static String setStartBitrate( } if (codecRtpMap == null) { Log.w(TAG, "No rtpmap for " + codec + " codec"); - return sdpDescription; + return sdp; } Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]); @@ -1112,12 +1114,12 @@ private static String joinString( return joinString(newLineParts, " ", false /* delimiterAtEnd */); } - private static String preferCodec(String sdpDescription, String codec, boolean isAudio) { - final String[] lines = sdpDescription.split("\r\n"); + private static String preferCodec(String sdp, String codec, boolean isAudio) { + final String[] lines = sdp.split("\r\n"); final int mLineIndex = findMediaDescriptionLine(isAudio, lines); if (mLineIndex == -1) { Log.w(TAG, "No mediaDescription line, so can't prefer " + codec); - return sdpDescription; + return sdp; } // A list with all the payload types with name |codec|. The payload types are integers in the // range 96-127, but they are stored as strings here. @@ -1132,12 +1134,12 @@ private static String preferCodec(String sdpDescription, String codec, boolean i } if (codecPayloadTypes.isEmpty()) { Log.w(TAG, "No payload types with name " + codec); - return sdpDescription; + return sdp; } final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]); if (newMLine == null) { - return sdpDescription; + return sdp; } Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine); lines[mLineIndex] = newMLine; @@ -1169,8 +1171,55 @@ private void switchCameraInternal() { } } + private boolean recording = false; + private AudioMixer mixer; + public void switchCamera() { - executor.execute(this ::switchCameraInternal); + if (false) { + executor.execute(this ::switchCameraInternal); + } else if (false) { + executor.execute(() -> { + if (peerConnection == null) { + return; + } + + recording = !recording; + if (recording) { + peerConnection.startRecorder(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY.ordinal(), "/sdcard/send.mkv"); + } else { + peerConnection.stopRecorder(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY.ordinal()); + } + }); + } else if (true) { + if (mixer != null) { + mixer.stopMixer(); + mixer = null; + } else { + // adb push examples/objc/AppRTCMobile/ios/resources/mozart.mp3 /sdcard/ + mixer = new AudioMixer("/sdcard/mozart.mp3", 48000, 1, 10_000, false, 20, + new AudioMixer.MixerCallback() { + @Override + public void onMixerSsrcFinished(final int ssrc) { + Logging.d(TAG, "onMixerSsrcFinished " + ssrc); + executor.execute(() -> { + mixer.stopMixer(); + mixer = null; + }); + } + + @Override + public void onMixerSsrcError(final int ssrc, final int code) { + Logging.d(TAG, "onMixerSsrcError " + ssrc + " " + code); + executor.execute(() -> { + mixer.stopMixer(); + mixer = null; + }); + } + }); + mixer.startMixer(); + mixer.toggleMusicStreaming(true); + } + } } public void changeCaptureFormat(final int width, final int height, final int framerate) { @@ -1301,25 +1350,24 @@ public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStre // as well as adding remote ICE candidates once the answer SDP is set. private class SDPObserver implements SdpObserver { @Override - public void onCreateSuccess(final SessionDescription origSdp) { - if (localSdp != null) { + public void onCreateSuccess(final SessionDescription desc) { + if (localDescription != null) { reportError("Multiple SDP create."); return; } - String sdpDescription = origSdp.description; + String sdp = desc.description; if (preferIsac) { - sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); + sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true); } if (isVideoCallEnabled()) { - sdpDescription = - preferCodec(sdpDescription, getSdpVideoCodecName(peerConnectionParameters), false); + sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false); } - final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription); - localSdp = sdp; + final SessionDescription newDesc = new SessionDescription(desc.type, sdp); + localDescription = newDesc; executor.execute(() -> { if (peerConnection != null && !isError) { - Log.d(TAG, "Set local SDP from " + sdp.type); - peerConnection.setLocalDescription(sdpObserver, sdp); + Log.d(TAG, "Set local SDP from " + desc.type); + peerConnection.setLocalDescription(sdpObserver, newDesc); } }); } @@ -1336,7 +1384,7 @@ public void onSetSuccess() { if (peerConnection.getRemoteDescription() == null) { // We've just set our local SDP so time to send it. Log.d(TAG, "Local SDP set succesfully"); - events.onLocalDescription(localSdp); + events.onLocalDescription(localDescription); } else { // We've just set remote description, so drain remote // and send local ICE candidates. @@ -1350,7 +1398,7 @@ public void onSetSuccess() { // We've just set our local SDP so time to send it, drain // remote and send local ICE candidates. Log.d(TAG, "Local SDP set succesfully"); - events.onLocalDescription(localSdp); + events.onLocalDescription(localDescription); drainCandidates(); } else { // We've just set remote SDP - do nothing for now - diff --git a/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py b/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py index 491af38f68..b1cf84611f 100644 --- a/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py +++ b/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py @@ -15,110 +15,113 @@ from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice + def main(): - parser = OptionParser() + parser = OptionParser() - parser.add_option('--devname', dest='devname', help='The device id') + parser.add_option('--devname', dest='devname', help='The device id') - parser.add_option( - '--videooutsave', - dest='videooutsave', - help='The path where to save the video out file on local computer') + parser.add_option( + '--videooutsave', + dest='videooutsave', + help='The path where to save the video out file on local computer') - parser.add_option( - '--videoout', - dest='videoout', - help='The path where to put the video out file') + parser.add_option('--videoout', + dest='videoout', + help='The path where to put the video out file') - parser.add_option( - '--videoout_width', - dest='videoout_width', - type='int', - help='The width for the video out file') + parser.add_option('--videoout_width', + dest='videoout_width', + type='int', + help='The width for the video out file') - parser.add_option( - '--videoout_height', - dest='videoout_height', - type='int', - help='The height for the video out file') + parser.add_option('--videoout_height', + dest='videoout_height', + type='int', + help='The height for the video out file') - parser.add_option( - '--videoin', - dest='videoin', - help='The path where to read input file instead of camera') + parser.add_option( + '--videoin', + dest='videoin', + help='The path where to read input file instead of camera') - parser.add_option( - '--call_length', - dest='call_length', - type='int', - help='The length of the call') + parser.add_option('--call_length', + dest='call_length', + type='int', + help='The length of the call') - (options, args) = parser.parse_args() + (options, args) = parser.parse_args() - print (options, args) + print(options, args) - devname = options.devname + devname = options.devname - videoin = options.videoin + videoin = options.videoin - videoout = options.videoout - videoout_width = options.videoout_width - videoout_height = options.videoout_height + videoout = options.videoout + videoout_width = options.videoout_width + videoout_height = options.videoout_height - videooutsave = options.videooutsave + videooutsave = options.videooutsave - call_length = options.call_length or 10 + call_length = options.call_length or 10 - room = ''.join(random.choice(string.ascii_letters + string.digits) - for _ in range(8)) + room = ''.join( + random.choice(string.ascii_letters + string.digits) for _ in range(8)) - # Delete output video file. - if videoout: - subprocess.check_call(['adb', '-s', devname, 'shell', 'rm', '-f', - videoout]) + # Delete output video file. + if videoout: + subprocess.check_call( + ['adb', '-s', devname, 'shell', 'rm', '-f', videoout]) - device = MonkeyRunner.waitForConnection(2, devname) + device = MonkeyRunner.waitForConnection(2, devname) - extras = { - 'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True, - 'org.appspot.apprtc.AUDIOCODEC': 'OPUS', - 'org.appspot.apprtc.LOOPBACK': True, - 'org.appspot.apprtc.VIDEOCODEC': 'VP8', - 'org.appspot.apprtc.CAPTURETOTEXTURE': False, - 'org.appspot.apprtc.CAMERA2': False, - 'org.appspot.apprtc.ROOMID': room} + extras = { + 'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True, + 'org.appspot.apprtc.AUDIOCODEC': 'OPUS', + 'org.appspot.apprtc.LOOPBACK': True, + 'org.appspot.apprtc.VIDEOCODEC': 'VP8', + 'org.appspot.apprtc.CAPTURETOTEXTURE': False, + 'org.appspot.apprtc.CAMERA2': False, + 'org.appspot.apprtc.ROOMID': room + } - if videoin: - extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin}) + if videoin: + extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin}) - if videoout: - extras.update({ - 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE': videoout, - 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH': videoout_width, - 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT': videoout_height}) + if videoout: + extras.update({ + 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE': + videoout, + 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH': + videoout_width, + 'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT': + videoout_height + }) - print extras + print extras - device.startActivity(data='https://appr.tc', - action='android.intent.action.VIEW', - component='org.appspot.apprtc/.ConnectActivity', extras=extras) + device.startActivity(data='https://appr.tc', + action='android.intent.action.VIEW', + component='org.appspot.apprtc/.ConnectActivity', + extras=extras) - print 'Running a call for %d seconds' % call_length - for _ in xrange(call_length): - sys.stdout.write('.') - sys.stdout.flush() - time.sleep(1) - print '\nEnding call.' + print 'Running a call for %d seconds' % call_length + for _ in xrange(call_length): + sys.stdout.write('.') + sys.stdout.flush() + time.sleep(1) + print '\nEnding call.' - # Press back to end the call. Will end on both sides. - device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP) + # Press back to end the call. Will end on both sides. + device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP) - if videooutsave: - time.sleep(2) + if videooutsave: + time.sleep(2) - subprocess.check_call(['adb', '-s', devname, 'pull', - videoout, videooutsave]) + subprocess.check_call( + ['adb', '-s', devname, 'pull', videoout, videooutsave]) -if __name__ == '__main__': - main() +if __name__ == '__main__': + main() diff --git a/examples/androidnativeapi/AndroidManifest.xml b/examples/androidnativeapi/AndroidManifest.xml index f10f55a1b6..9257c4132e 100644 --- a/examples/androidnativeapi/AndroidManifest.xml +++ b/examples/androidnativeapi/AndroidManifest.xml @@ -2,7 +2,7 @@ - + diff --git a/examples/androidnativeapi/BUILD.gn b/examples/androidnativeapi/BUILD.gn index 9dd2bf412b..9253c0bcd9 100644 --- a/examples/androidnativeapi/BUILD.gn +++ b/examples/androidnativeapi/BUILD.gn @@ -5,7 +5,7 @@ if (is_android) { testonly = true apk_name = "androidnativeapi" android_manifest = "AndroidManifest.xml" - min_sdk_version = 19 + min_sdk_version = 21 target_sdk_version = 27 sources = [ @@ -16,6 +16,7 @@ if (is_android) { deps = [ ":resources", "//modules/audio_device:audio_device_java", + "//rtc_base:base_java", "//sdk/android:camera_java", "//sdk/android:surfaceviewrenderer_java", "//sdk/android:video_api_java", @@ -47,6 +48,7 @@ if (is_android) { deps = [ ":generated_jni", "../../api:scoped_refptr", + "../../rtc_base/synchronization:mutex", "//api:libjingle_peerconnection_api", "//api/rtc_event_log:rtc_event_log_factory", "//api/task_queue:default_task_queue_factory", @@ -65,7 +67,14 @@ if (is_android) { android_resources("resources") { testonly = true - resource_dirs = [ "res" ] custom_package = "org.webrtc.examples.androidnativeapi" + sources = [ + "res/layout/activity_main.xml", + "res/values/strings.xml", + ] + + # Needed for Bazel converter. + resource_dirs = [ "res" ] + assert(resource_dirs != []) # Mark as used. } } diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc index 03968335d9..f0b060632d 100644 --- a/examples/androidnativeapi/jni/android_call_client.cc +++ b/examples/androidnativeapi/jni/android_call_client.cc @@ -43,7 +43,7 @@ class AndroidCallClient::PCObserver : public webrtc::PeerConnectionObserver { void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override; private: - const AndroidCallClient* client_; + AndroidCallClient* const client_; }; namespace { @@ -88,7 +88,7 @@ void AndroidCallClient::Call(JNIEnv* env, const webrtc::JavaRef& remote_sink) { RTC_DCHECK_RUN_ON(&thread_checker_); - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); if (call_started_) { RTC_LOG(LS_WARNING) << "Call already started."; return; @@ -112,7 +112,7 @@ void AndroidCallClient::Hangup(JNIEnv* env) { call_started_ = false; { - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); if (pc_ != nullptr) { pc_->Close(); pc_ = nullptr; @@ -174,7 +174,7 @@ void AndroidCallClient::CreatePeerConnectionFactory() { } void AndroidCallClient::CreatePeerConnection() { - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); webrtc::PeerConnectionInterface::RTCConfiguration config; config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; // DTLS SRTP has to be disabled for loopback to work. @@ -205,7 +205,7 @@ void AndroidCallClient::CreatePeerConnection() { } void AndroidCallClient::Connect() { - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); pc_->CreateOffer(new rtc::RefCountedObject(pc_), webrtc::PeerConnectionInterface::RTCOfferAnswerOptions()); } @@ -240,7 +240,7 @@ void AndroidCallClient::PCObserver::OnIceGatheringChange( void AndroidCallClient::PCObserver::OnIceCandidate( const webrtc::IceCandidateInterface* candidate) { RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url(); - rtc::CritScope lock(&client_->pc_mutex_); + webrtc::MutexLock lock(&client_->pc_mutex_); RTC_DCHECK(client_->pc_ != nullptr); client_->pc_->AddIceCandidate(candidate); } diff --git a/examples/androidnativeapi/jni/android_call_client.h b/examples/androidnativeapi/jni/android_call_client.h index 13992f5960..f3f61a4695 100644 --- a/examples/androidnativeapi/jni/android_call_client.h +++ b/examples/androidnativeapi/jni/android_call_client.h @@ -18,7 +18,7 @@ #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" #include "sdk/android/native_api/video/video_source.h" @@ -66,7 +66,7 @@ class AndroidCallClient { rtc::scoped_refptr video_source_ RTC_GUARDED_BY(thread_checker_); - rtc::CriticalSection pc_mutex_; + webrtc::Mutex pc_mutex_; rtc::scoped_refptr pc_ RTC_GUARDED_BY(pc_mutex_); }; diff --git a/examples/androidtests/AndroidManifest.xml b/examples/androidtests/AndroidManifest.xml index dae2e980a6..8e995366dc 100644 --- a/examples/androidtests/AndroidManifest.xml +++ b/examples/androidtests/AndroidManifest.xml @@ -14,7 +14,7 @@ package="org.appspot.apprtc.test"> - + diff --git a/examples/androidtests/OWNERS b/examples/androidtests/OWNERS index af3196a932..3c4e54174e 100644 --- a/examples/androidtests/OWNERS +++ b/examples/androidtests/OWNERS @@ -1,3 +1 @@ sakal@webrtc.org - -per-file *.py=phoglund@webrtc.org diff --git a/examples/androidtests/gradle_project_test.py b/examples/androidtests/gradle_project_test.py index 7db5797ef2..097232d07f 100644 --- a/examples/androidtests/gradle_project_test.py +++ b/examples/androidtests/gradle_project_test.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """ This scripts tests creating an Android Studio project using the generate_gradle.py script and making a debug build using it. @@ -23,58 +22,59 @@ import sys import tempfile - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir)) -GENERATE_GRADLE_SCRIPT = os.path.join(SRC_DIR, - 'build/android/gradle/generate_gradle.py') +GENERATE_GRADLE_SCRIPT = os.path.join( + SRC_DIR, 'build/android/gradle/generate_gradle.py') GRADLEW_BIN = os.path.join(SCRIPT_DIR, 'third_party/gradle/gradlew') def _RunCommand(argv, cwd=SRC_DIR, **kwargs): - logging.info('Running %r', argv) - subprocess.check_call(argv, cwd=cwd, **kwargs) + logging.info('Running %r', argv) + subprocess.check_call(argv, cwd=cwd, **kwargs) def _ParseArgs(): - parser = argparse.ArgumentParser( - description='Test generating Android gradle project.') - parser.add_argument('build_dir_android', - help='The path to the build directory for Android.') - parser.add_argument('--project_dir', - help='A temporary directory to put the output.') + parser = argparse.ArgumentParser( + description='Test generating Android gradle project.') + parser.add_argument('build_dir_android', + help='The path to the build directory for Android.') + parser.add_argument('--project_dir', + help='A temporary directory to put the output.') - args = parser.parse_args() - return args + args = parser.parse_args() + return args def main(): - logging.basicConfig(level=logging.INFO) - args = _ParseArgs() - - project_dir = args.project_dir - if not project_dir: - project_dir = tempfile.mkdtemp() - - output_dir = os.path.abspath(args.build_dir_android) - project_dir = os.path.abspath(project_dir) - - try: - env = os.environ.copy() - env['PATH'] = os.pathsep.join([ - os.path.join(SRC_DIR, 'third_party', 'depot_tools'), env.get('PATH', '') - ]) - _RunCommand([GENERATE_GRADLE_SCRIPT, '--output-directory', output_dir, - '--target', '//examples:AppRTCMobile', - '--project-dir', project_dir, - '--use-gradle-process-resources', '--split-projects'], - env=env) - _RunCommand([GRADLEW_BIN, 'assembleDebug'], project_dir) - finally: - # Do not delete temporary directory if user specified it manually. - if not args.project_dir: - shutil.rmtree(project_dir, True) + logging.basicConfig(level=logging.INFO) + args = _ParseArgs() + + project_dir = args.project_dir + if not project_dir: + project_dir = tempfile.mkdtemp() + + output_dir = os.path.abspath(args.build_dir_android) + project_dir = os.path.abspath(project_dir) + + try: + env = os.environ.copy() + env['PATH'] = os.pathsep.join([ + os.path.join(SRC_DIR, 'third_party', 'depot_tools'), + env.get('PATH', '') + ]) + _RunCommand([ + GENERATE_GRADLE_SCRIPT, '--output-directory', output_dir, + '--target', '//examples:AppRTCMobile', '--project-dir', + project_dir, '--use-gradle-process-resources', '--split-projects' + ], + env=env) + _RunCommand([GRADLEW_BIN, 'assembleDebug'], project_dir) + finally: + # Do not delete temporary directory if user specified it manually. + if not args.project_dir: + shutil.rmtree(project_dir, True) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java index 96a4178eec..99e0ff6531 100644 --- a/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java +++ b/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java @@ -77,9 +77,9 @@ public class PeerConnectionClientTest implements PeerConnectionEvents { private ExecutorService signalingExecutor; private boolean isClosed; private boolean isIceConnected; - private SessionDescription localSdp; + private SessionDescription localDesc; private List iceCandidates = new ArrayList<>(); - private final Object localSdpEvent = new Object(); + private final Object localDescEvent = new Object(); private final Object iceCandidateEvent = new Object(); private final Object iceConnectedEvent = new Object(); private final Object closeEvent = new Object(); @@ -133,11 +133,11 @@ public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException // Peer connection events implementation. @Override - public void onLocalDescription(SessionDescription sdp) { - Log.d(TAG, "LocalSDP type: " + sdp.type); - synchronized (localSdpEvent) { - localSdp = sdp; - localSdpEvent.notifyAll(); + public void onLocalDescription(SessionDescription desc) { + Log.d(TAG, "Local description type: " + desc.type); + synchronized (localDescEvent) { + localDesc = desc; + localDescEvent.notifyAll(); } } @@ -211,15 +211,15 @@ public void onPeerConnectionError(String description) { public void onPeerConnectionStatsReady(StatsReport[] reports) {} // Helper wait functions. - private boolean waitForLocalSDP(int timeoutMs) throws InterruptedException { - synchronized (localSdpEvent) { + private boolean waitForLocalDescription(int timeoutMs) throws InterruptedException { + synchronized (localDescEvent) { final long endTimeMs = System.currentTimeMillis() + timeoutMs; - while (localSdp == null) { + while (localDesc == null) { final long waitTimeMs = endTimeMs - System.currentTimeMillis(); if (waitTimeMs < 0) { return false; } - localSdpEvent.wait(waitTimeMs); + localDescEvent.wait(waitTimeMs); } return true; } @@ -369,8 +369,8 @@ public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException createParametersForVideoCall(VIDEO_CODEC_VP8), createCameraCapturer(false /* captureToTexture */)); - // Wait for local SDP and ice candidates set events. - assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); + // Wait for local description and ice candidates set events. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT)); // Check that local video frames were rendered. @@ -397,11 +397,11 @@ private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer v } pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer); - // Wait for local SDP, rename it to answer and set as remote SDP. - assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); - SessionDescription remoteSdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description); - pcClient.setRemoteDescription(remoteSdp); + // Wait for local description, change type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); // Wait for ICE connection. assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); @@ -520,11 +520,11 @@ public void testCameraSwitch() throws InterruptedException { createParametersForVideoCall(VIDEO_CODEC_VP8), createCameraCapturer(false /* captureToTexture */)); - // Wait for local SDP, rename it to answer and set as remote SDP. - assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); - SessionDescription remoteSdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description); - pcClient.setRemoteDescription(remoteSdp); + // Wait for local description, set type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); // Wait for ICE connection. assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); @@ -568,11 +568,11 @@ public void testVideoSourceRestart() throws InterruptedException { createParametersForVideoCall(VIDEO_CODEC_VP8), createCameraCapturer(false /* captureToTexture */)); - // Wait for local SDP, rename it to answer and set as remote SDP. - assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); - SessionDescription remoteSdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description); - pcClient.setRemoteDescription(remoteSdp); + // Wait for local description, set type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); // Wait for ICE connection. assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); @@ -617,11 +617,11 @@ public void testCaptureFormatChange() throws InterruptedException { createParametersForVideoCall(VIDEO_CODEC_VP8), createCameraCapturer(false /* captureToTexture */)); - // Wait for local SDP, rename it to answer and set as remote SDP. - assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT)); - SessionDescription remoteSdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description); - pcClient.setRemoteDescription(remoteSdp); + // Wait for local description, set type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); // Wait for ICE connection. assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); diff --git a/examples/androidvoip/AndroidManifest.xml b/examples/androidvoip/AndroidManifest.xml new file mode 100644 index 0000000000..106f71171d --- /dev/null +++ b/examples/androidvoip/AndroidManifest.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/androidvoip/BUILD.gn b/examples/androidvoip/BUILD.gn new file mode 100644 index 0000000000..649e601519 --- /dev/null +++ b/examples/androidvoip/BUILD.gn @@ -0,0 +1,89 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("//webrtc.gni") + +if (is_android) { + rtc_android_apk("androidvoip") { + testonly = true + apk_name = "androidvoip" + android_manifest = "AndroidManifest.xml" + min_sdk_version = 21 + target_sdk_version = 27 + + sources = [ + "java/org/webrtc/examples/androidvoip/MainActivity.java", + "java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java", + "java/org/webrtc/examples/androidvoip/VoipClient.java", + ] + + deps = [ + ":resources", + "//modules/audio_device:audio_device_java", + "//rtc_base:base_java", + "//sdk/android:base_java", + "//sdk/android:java_audio_device_module_java", + "//sdk/android:video_java", + "//third_party/android_deps:androidx_core_core_java", + "//third_party/android_deps:androidx_legacy_legacy_support_v4_java", + ] + + shared_libraries = [ ":examples_androidvoip_jni" ] + } + + generate_jni("generated_jni") { + testonly = true + sources = [ "java/org/webrtc/examples/androidvoip/VoipClient.java" ] + namespace = "webrtc_examples" + jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h" + } + + rtc_shared_library("examples_androidvoip_jni") { + testonly = true + sources = [ + "jni/android_voip_client.cc", + "jni/android_voip_client.h", + "jni/onload.cc", + ] + + suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ] + configs += [ "//build/config/android:hide_all_but_jni" ] + + deps = [ + ":generated_jni", + "//api:transport_api", + "//api/audio_codecs:audio_codecs_api", + "//api/audio_codecs:builtin_audio_decoder_factory", + "//api/audio_codecs:builtin_audio_encoder_factory", + "//api/task_queue:default_task_queue_factory", + "//api/voip:voip_api", + "//api/voip:voip_engine_factory", + "//modules/utility:utility", + "//rtc_base", + "//rtc_base/third_party/sigslot:sigslot", + "//sdk/android:native_api_audio_device_module", + "//sdk/android:native_api_base", + "//sdk/android:native_api_jni", + "//third_party/abseil-cpp/absl/memory:memory", + ] + } + + android_resources("resources") { + testonly = true + custom_package = "org.webrtc.examples.androidvoip" + sources = [ + "res/layout/activity_main.xml", + "res/values/colors.xml", + "res/values/strings.xml", + ] + + # Needed for Bazel converter. + resource_dirs = [ "res" ] + assert(resource_dirs != []) # Mark as used. + } +} diff --git a/examples/androidvoip/DEPS b/examples/androidvoip/DEPS new file mode 100644 index 0000000000..edb714dd44 --- /dev/null +++ b/examples/androidvoip/DEPS @@ -0,0 +1,3 @@ +include_rules = [ + "+sdk/android/native_api", +] diff --git a/examples/androidvoip/OWNERS b/examples/androidvoip/OWNERS new file mode 100644 index 0000000000..0fe5182450 --- /dev/null +++ b/examples/androidvoip/OWNERS @@ -0,0 +1,2 @@ +natim@webrtc.org +sakal@webrtc.org diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java new file mode 100644 index 0000000000..d06d6adf0d --- /dev/null +++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java @@ -0,0 +1,341 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.examples.androidvoip; + +import android.Manifest.permission; +import android.app.Activity; +import android.app.AlertDialog; +import android.content.Context; +import android.content.pm.PackageManager; +import android.os.Bundle; +import android.view.Gravity; +import android.view.View; +import android.widget.AdapterView; +import android.widget.ArrayAdapter; +import android.widget.Button; +import android.widget.EditText; +import android.widget.RelativeLayout; +import android.widget.ScrollView; +import android.widget.Spinner; +import android.widget.Switch; +import android.widget.TextView; +import android.widget.Toast; +import android.widget.ToggleButton; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import org.webrtc.ContextUtils; + +public class MainActivity extends Activity implements OnVoipClientTaskCompleted { + private static final int NUM_SUPPORTED_CODECS = 6; + + private VoipClient voipClient; + private List supportedCodecs; + private boolean[] isDecoderSelected; + private Set selectedDecoders; + + private Toast toast; + private ScrollView scrollView; + private TextView localIPAddressTextView; + private EditText localPortNumberEditText; + private EditText remoteIPAddressEditText; + private EditText remotePortNumberEditText; + private Spinner encoderSpinner; + private Button decoderSelectionButton; + private TextView decodersTextView; + private ToggleButton sessionButton; + private RelativeLayout switchLayout; + private Switch sendSwitch; + private Switch playoutSwitch; + + @Override + protected void onCreate(Bundle savedInstance) { + ContextUtils.initialize(getApplicationContext()); + + super.onCreate(savedInstance); + setContentView(R.layout.activity_main); + + System.loadLibrary("examples_androidvoip_jni"); + + voipClient = new VoipClient(getApplicationContext(), this); + voipClient.getAndSetUpLocalIPAddress(); + voipClient.getAndSetUpSupportedCodecs(); + + isDecoderSelected = new boolean[NUM_SUPPORTED_CODECS]; + selectedDecoders = new HashSet<>(); + + toast = Toast.makeText(this, "", Toast.LENGTH_SHORT); + + scrollView = (ScrollView) findViewById(R.id.scroll_view); + localIPAddressTextView = (TextView) findViewById(R.id.local_ip_address_text_view); + localPortNumberEditText = (EditText) findViewById(R.id.local_port_number_edit_text); + remoteIPAddressEditText = (EditText) findViewById(R.id.remote_ip_address_edit_text); + remotePortNumberEditText = (EditText) findViewById(R.id.remote_port_number_edit_text); + encoderSpinner = (Spinner) findViewById(R.id.encoder_spinner); + decoderSelectionButton = (Button) findViewById(R.id.decoder_selection_button); + decodersTextView = (TextView) findViewById(R.id.decoders_text_view); + sessionButton = (ToggleButton) findViewById(R.id.session_button); + switchLayout = (RelativeLayout) findViewById(R.id.switch_layout); + sendSwitch = (Switch) findViewById(R.id.start_send_switch); + playoutSwitch = (Switch) findViewById(R.id.start_playout_switch); + + setUpSessionButton(); + setUpSendAndPlayoutSwitch(); + } + + private void setUpEncoderSpinner(List supportedCodecs) { + ArrayAdapter encoderAdapter = + new ArrayAdapter(this, android.R.layout.simple_spinner_item, supportedCodecs); + encoderAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); + encoderSpinner.setAdapter(encoderAdapter); + encoderSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + voipClient.setEncoder((String) parent.getSelectedItem()); + } + @Override + public void onNothingSelected(AdapterView parent) {} + }); + } + + private List getSelectedDecoders() { + List decoders = new ArrayList<>(); + for (int i = 0; i < supportedCodecs.size(); i++) { + if (selectedDecoders.contains(i)) { + decoders.add(supportedCodecs.get(i)); + } + } + return decoders; + } + + private void setUpDecoderSelectionButton(List supportedCodecs) { + decoderSelectionButton.setOnClickListener((view) -> { + AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(this); + dialogBuilder.setTitle(R.string.dialog_title); + + // Populate multi choice items with supported decoders. + String[] supportedCodecsArray = supportedCodecs.toArray(new String[0]); + dialogBuilder.setMultiChoiceItems( + supportedCodecsArray, isDecoderSelected, (dialog, position, isChecked) -> { + if (isChecked) { + selectedDecoders.add(position); + } else if (!isChecked) { + selectedDecoders.remove(position); + } + }); + + // "Ok" button. + dialogBuilder.setPositiveButton(R.string.ok_label, (dialog, position) -> { + List decoders = getSelectedDecoders(); + String result = decoders.stream().collect(Collectors.joining(", ")); + if (result.isEmpty()) { + decodersTextView.setText(R.string.decoders_text_view_default); + } else { + decodersTextView.setText(result); + } + voipClient.setDecoders(decoders); + }); + + // "Dismiss" button. + dialogBuilder.setNegativeButton( + R.string.dismiss_label, (dialog, position) -> { dialog.dismiss(); }); + + // "Clear All" button. + dialogBuilder.setNeutralButton(R.string.clear_all_label, (dialog, position) -> { + Arrays.fill(isDecoderSelected, false); + selectedDecoders.clear(); + decodersTextView.setText(R.string.decoders_text_view_default); + }); + + AlertDialog dialog = dialogBuilder.create(); + dialog.show(); + }); + } + + private void setUpSessionButton() { + sessionButton.setOnCheckedChangeListener((button, isChecked) -> { + // Ask for permission on RECORD_AUDIO if not granted. + if (ContextCompat.checkSelfPermission(this, permission.RECORD_AUDIO) + != PackageManager.PERMISSION_GRANTED) { + String[] sList = {permission.RECORD_AUDIO}; + ActivityCompat.requestPermissions(this, sList, 1); + } + + if (isChecked) { + // Order matters here, addresses have to be set before starting session + // before setting codec. + voipClient.setLocalAddress(localIPAddressTextView.getText().toString(), + Integer.parseInt(localPortNumberEditText.getText().toString())); + voipClient.setRemoteAddress(remoteIPAddressEditText.getText().toString(), + Integer.parseInt(remotePortNumberEditText.getText().toString())); + voipClient.startSession(); + voipClient.setEncoder((String) encoderSpinner.getSelectedItem()); + voipClient.setDecoders(getSelectedDecoders()); + } else { + voipClient.stopSession(); + } + }); + } + + private void setUpSendAndPlayoutSwitch() { + sendSwitch.setOnCheckedChangeListener((button, isChecked) -> { + if (isChecked) { + voipClient.startSend(); + } else { + voipClient.stopSend(); + } + }); + + playoutSwitch.setOnCheckedChangeListener((button, isChecked) -> { + if (isChecked) { + voipClient.startPlayout(); + } else { + voipClient.stopPlayout(); + } + }); + } + + private void setUpIPAddressEditTexts(String localIPAddress) { + if (localIPAddress.isEmpty()) { + showToast("Please check your network configuration"); + } else { + localIPAddressTextView.setText(localIPAddress); + // By default remote IP address is the same as local IP address. + remoteIPAddressEditText.setText(localIPAddress); + } + } + + private void showToast(String message) { + if (toast != null) { + toast.cancel(); + toast = Toast.makeText(this, message, Toast.LENGTH_SHORT); + toast.setGravity(Gravity.TOP, 0, 200); + toast.show(); + } + } + + @Override + protected void onDestroy() { + voipClient.close(); + voipClient = null; + + super.onDestroy(); + } + + @Override + public void onGetLocalIPAddressCompleted(String localIPAddress) { + runOnUiThread(() -> { setUpIPAddressEditTexts(localIPAddress); }); + } + + @Override + public void onGetSupportedCodecsCompleted(List supportedCodecs) { + runOnUiThread(() -> { + this.supportedCodecs = supportedCodecs; + setUpEncoderSpinner(supportedCodecs); + setUpDecoderSelectionButton(supportedCodecs); + }); + } + + @Override + public void onVoipClientInitializationCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (!isSuccessful) { + showToast("Error initializing audio device"); + } + }); + } + + @Override + public void onStartSessionCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Session started"); + switchLayout.setVisibility(View.VISIBLE); + scrollView.post(() -> { scrollView.fullScroll(ScrollView.FOCUS_DOWN); }); + } else { + showToast("Failed to start session"); + } + }); + } + + @Override + public void onStopSessionCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Session stopped"); + // Set listeners to null so the checked state can be changed programmatically. + sendSwitch.setOnCheckedChangeListener(null); + playoutSwitch.setOnCheckedChangeListener(null); + sendSwitch.setChecked(false); + playoutSwitch.setChecked(false); + // Redo the switch listener setup. + setUpSendAndPlayoutSwitch(); + switchLayout.setVisibility(View.GONE); + } else { + showToast("Failed to stop session"); + } + }); + } + + @Override + public void onStartSendCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Started sending"); + } else { + showToast("Error initializing microphone"); + } + }); + } + + @Override + public void onStopSendCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Stopped sending"); + } else { + showToast("Microphone termination failed"); + } + }); + } + + @Override + public void onStartPlayoutCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Started playout"); + } else { + showToast("Error initializing speaker"); + } + }); + } + + @Override + public void onStopPlayoutCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Stopped playout"); + } else { + showToast("Speaker termination failed"); + } + }); + } + + @Override + public void onUninitializedVoipClient() { + runOnUiThread(() -> { showToast("Voip client is uninitialized"); }); + } +} diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java new file mode 100644 index 0000000000..bb85e048bb --- /dev/null +++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.examples.androidvoip; + +import java.util.List; + +public interface OnVoipClientTaskCompleted { + void onGetLocalIPAddressCompleted(String localIPAddress); + void onGetSupportedCodecsCompleted(List supportedCodecs); + void onVoipClientInitializationCompleted(boolean isSuccessful); + void onStartSessionCompleted(boolean isSuccessful); + void onStopSessionCompleted(boolean isSuccessful); + void onStartSendCompleted(boolean isSuccessful); + void onStopSendCompleted(boolean isSuccessful); + void onStartPlayoutCompleted(boolean isSuccessful); + void onStopPlayoutCompleted(boolean isSuccessful); + void onUninitializedVoipClient(); +} diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java new file mode 100644 index 0000000000..69a993d344 --- /dev/null +++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.examples.androidvoip; + +import android.content.Context; +import android.os.Handler; +import android.os.HandlerThread; +import java.util.ArrayList; +import java.util.List; +import org.webrtc.CalledByNative; + +public class VoipClient { + private long nativeClient; + private OnVoipClientTaskCompleted listener; + + public VoipClient(Context applicationContext, OnVoipClientTaskCompleted listener) { + this.listener = listener; + nativeClient = nativeCreateClient(applicationContext, this); + } + + private boolean isInitialized() { + return nativeClient != 0; + } + + public void getAndSetUpSupportedCodecs() { + if (isInitialized()) { + nativeGetSupportedCodecs(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void getAndSetUpLocalIPAddress() { + if (isInitialized()) { + nativeGetLocalIPAddress(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void setEncoder(String encoder) { + if (isInitialized()) { + nativeSetEncoder(nativeClient, encoder); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void setDecoders(List decoders) { + if (isInitialized()) { + nativeSetDecoders(nativeClient, decoders); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void setLocalAddress(String ipAddress, int portNumber) { + if (isInitialized()) { + nativeSetLocalAddress(nativeClient, ipAddress, portNumber); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void setRemoteAddress(String ipAddress, int portNumber) { + if (isInitialized()) { + nativeSetRemoteAddress(nativeClient, ipAddress, portNumber); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void startSession() { + if (isInitialized()) { + nativeStartSession(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void stopSession() { + if (isInitialized()) { + nativeStopSession(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void startSend() { + if (isInitialized()) { + nativeStartSend(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void stopSend() { + if (isInitialized()) { + nativeStopSend(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void startPlayout() { + if (isInitialized()) { + nativeStartPlayout(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void stopPlayout() { + if (isInitialized()) { + nativeStopPlayout(nativeClient); + } else { + listener.onUninitializedVoipClient(); + } + } + + public void close() { + nativeDelete(nativeClient); + nativeClient = 0; + } + + @CalledByNative + public void onGetLocalIPAddressCompleted(String localIPAddress) { + listener.onGetLocalIPAddressCompleted(localIPAddress); + } + + @CalledByNative + public void onGetSupportedCodecsCompleted(List supportedCodecs) { + listener.onGetSupportedCodecsCompleted(supportedCodecs); + } + + @CalledByNative + public void onStartSessionCompleted(boolean isSuccessful) { + listener.onStartSessionCompleted(isSuccessful); + } + + @CalledByNative + public void onStopSessionCompleted(boolean isSuccessful) { + listener.onStopSessionCompleted(isSuccessful); + } + + @CalledByNative + public void onStartSendCompleted(boolean isSuccessful) { + listener.onStartSendCompleted(isSuccessful); + } + + @CalledByNative + public void onStopSendCompleted(boolean isSuccessful) { + listener.onStopSendCompleted(isSuccessful); + } + + @CalledByNative + public void onStartPlayoutCompleted(boolean isSuccessful) { + listener.onStartPlayoutCompleted(isSuccessful); + } + + @CalledByNative + public void onStopPlayoutCompleted(boolean isSuccessful) { + listener.onStopPlayoutCompleted(isSuccessful); + } + + private static native long nativeCreateClient( + Context applicationContext, VoipClient javaVoipClient); + private static native void nativeGetSupportedCodecs(long nativeAndroidVoipClient); + private static native void nativeGetLocalIPAddress(long nativeAndroidVoipClient); + private static native void nativeSetEncoder(long nativeAndroidVoipClient, String encoder); + private static native void nativeSetDecoders(long nativeAndroidVoipClient, List decoders); + private static native void nativeSetLocalAddress( + long nativeAndroidVoipClient, String ipAddress, int portNumber); + private static native void nativeSetRemoteAddress( + long nativeAndroidVoipClient, String ipAddress, int portNumber); + private static native void nativeStartSession(long nativeAndroidVoipClient); + private static native void nativeStopSession(long nativeAndroidVoipClient); + private static native void nativeStartSend(long nativeAndroidVoipClient); + private static native void nativeStopSend(long nativeAndroidVoipClient); + private static native void nativeStartPlayout(long nativeAndroidVoipClient); + private static native void nativeStopPlayout(long nativeAndroidVoipClient); + private static native void nativeDelete(long nativeAndroidVoipClient); +} diff --git a/examples/androidvoip/jni/android_voip_client.cc b/examples/androidvoip/jni/android_voip_client.cc new file mode 100644 index 0000000000..2ad95bcf8d --- /dev/null +++ b/examples/androidvoip/jni/android_voip_client.cc @@ -0,0 +1,518 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "examples/androidvoip/jni/android_voip_client.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/voip/voip_codec.h" +#include "api/voip/voip_engine_factory.h" +#include "api/voip/voip_network.h" +#include "examples/androidvoip/generated_jni/VoipClient_jni.h" +#include "rtc_base/logging.h" +#include "rtc_base/network.h" +#include "rtc_base/socket_server.h" +#include "sdk/android/native_api/audio_device_module/audio_device_android.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/jni/jvm.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace { + +#define RUN_ON_VOIP_THREAD(method, ...) \ + if (!voip_thread_->IsCurrent()) { \ + voip_thread_->PostTask( \ + RTC_FROM_HERE, \ + std::bind(&AndroidVoipClient::method, this, ##__VA_ARGS__)); \ + return; \ + } \ + RTC_DCHECK_RUN_ON(voip_thread_.get()); + +// Connects a UDP socket to a public address and returns the local +// address associated with it. Since it binds to the "any" address +// internally, it returns the default local address on a multi-homed +// endpoint. Implementation copied from +// BasicNetworkManager::QueryDefaultLocalAddress. +rtc::IPAddress QueryDefaultLocalAddress(int family) { + const char kPublicIPv4Host[] = "8.8.8.8"; + const char kPublicIPv6Host[] = "2001:4860:4860::8888"; + const int kPublicPort = 53; + std::unique_ptr thread = rtc::Thread::CreateWithSocketServer(); + + RTC_DCHECK(thread->socketserver() != nullptr); + RTC_DCHECK(family == AF_INET || family == AF_INET6); + + std::unique_ptr socket( + thread->socketserver()->CreateAsyncSocket(family, SOCK_DGRAM)); + if (!socket) { + RTC_LOG_ERR(LERROR) << "Socket creation failed"; + return rtc::IPAddress(); + } + + auto host = family == AF_INET ? kPublicIPv4Host : kPublicIPv6Host; + if (socket->Connect(rtc::SocketAddress(host, kPublicPort)) < 0) { + if (socket->GetError() != ENETUNREACH && + socket->GetError() != EHOSTUNREACH) { + RTC_LOG(LS_INFO) << "Connect failed with " << socket->GetError(); + } + return rtc::IPAddress(); + } + return socket->GetLocalAddress().ipaddr(); +} + +// Assigned payload type for supported built-in codecs. PCMU, PCMA, +// and G722 have set payload types. Whereas opus, ISAC, and ILBC +// have dynamic payload types. +enum class PayloadType : int { + kPcmu = 0, + kPcma = 8, + kG722 = 9, + kOpus = 96, + kIsac = 97, + kIlbc = 98, +}; + +// Returns the payload type corresponding to codec_name. Only +// supports the built-in codecs. +int GetPayloadType(const std::string& codec_name) { + RTC_DCHECK(codec_name == "PCMU" || codec_name == "PCMA" || + codec_name == "G722" || codec_name == "opus" || + codec_name == "ISAC" || codec_name == "ILBC"); + + if (codec_name == "PCMU") { + return static_cast(PayloadType::kPcmu); + } else if (codec_name == "PCMA") { + return static_cast(PayloadType::kPcma); + } else if (codec_name == "G722") { + return static_cast(PayloadType::kG722); + } else if (codec_name == "opus") { + return static_cast(PayloadType::kOpus); + } else if (codec_name == "ISAC") { + return static_cast(PayloadType::kIsac); + } else if (codec_name == "ILBC") { + return static_cast(PayloadType::kIlbc); + } + + RTC_NOTREACHED(); + return -1; +} + +} // namespace + +namespace webrtc_examples { + +bool AndroidVoipClient::Init( + JNIEnv* env, + const webrtc::JavaParamRef& application_context) { + webrtc::VoipEngineConfig config; + config.encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); + config.decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); + config.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); + config.audio_device_module = + webrtc::CreateJavaAudioDeviceModule(env, application_context.obj()); + config.audio_processing = webrtc::AudioProcessingBuilder().Create(); + + voip_thread_->Start(); + // Due to consistent thread requirement on + // modules/audio_device/android/audio_device_template.h, + // code is invoked in the context of voip_thread_. + return voip_thread_->Invoke(RTC_FROM_HERE, [this, &config] { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + supported_codecs_ = config.encoder_factory->GetSupportedEncoders(); + env_ = webrtc::AttachCurrentThreadIfNeeded(); + voip_engine_ = webrtc::CreateVoipEngine(std::move(config)); + if (!voip_engine_) { + RTC_LOG(LS_ERROR) << "VoipEngine creation failed"; + return false; + } + return true; + }); +} + +AndroidVoipClient::~AndroidVoipClient() { + voip_thread_->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + JavaVM* jvm = nullptr; + env_->GetJavaVM(&jvm); + if (!jvm) { + RTC_LOG(LS_ERROR) << "Failed to retrieve JVM"; + return; + } + jint res = jvm->DetachCurrentThread(); + if (res != JNI_OK) { + RTC_LOG(LS_ERROR) << "DetachCurrentThread failed: " << res; + } + }); + + voip_thread_->Stop(); +} + +AndroidVoipClient* AndroidVoipClient::Create( + JNIEnv* env, + const webrtc::JavaParamRef& application_context, + const webrtc::JavaParamRef& j_voip_client) { + // Using `new` to access a non-public constructor. + auto voip_client = + absl::WrapUnique(new AndroidVoipClient(env, j_voip_client)); + if (!voip_client->Init(env, application_context)) { + return nullptr; + } + return voip_client.release(); +} + +void AndroidVoipClient::GetSupportedCodecs(JNIEnv* env) { + RUN_ON_VOIP_THREAD(GetSupportedCodecs, env); + + std::vector names; + for (const webrtc::AudioCodecSpec& spec : supported_codecs_) { + names.push_back(spec.format.name); + } + webrtc::ScopedJavaLocalRef (*convert_function)( + JNIEnv*, const std::string&) = &webrtc::NativeToJavaString; + Java_VoipClient_onGetSupportedCodecsCompleted( + env_, j_voip_client_, NativeToJavaList(env_, names, convert_function)); +} + +void AndroidVoipClient::GetLocalIPAddress(JNIEnv* env) { + RUN_ON_VOIP_THREAD(GetLocalIPAddress, env); + + std::string local_ip_address; + rtc::IPAddress ipv4_address = QueryDefaultLocalAddress(AF_INET); + if (!ipv4_address.IsNil()) { + local_ip_address = ipv4_address.ToString(); + } else { + rtc::IPAddress ipv6_address = QueryDefaultLocalAddress(AF_INET6); + if (!ipv6_address.IsNil()) { + local_ip_address = ipv6_address.ToString(); + } + } + Java_VoipClient_onGetLocalIPAddressCompleted( + env_, j_voip_client_, webrtc::NativeToJavaString(env_, local_ip_address)); +} + +void AndroidVoipClient::SetEncoder(const std::string& encoder) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + for (const webrtc::AudioCodecSpec& codec : supported_codecs_) { + if (codec.format.name == encoder) { + voip_engine_->Codec().SetSendCodec( + *channel_, GetPayloadType(codec.format.name), codec.format); + return; + } + } +} + +void AndroidVoipClient::SetEncoder( + JNIEnv* env, + const webrtc::JavaParamRef& j_encoder_string) { + const std::string& chosen_encoder = + webrtc::JavaToNativeString(env, j_encoder_string); + voip_thread_->PostTask( + RTC_FROM_HERE, [this, chosen_encoder] { SetEncoder(chosen_encoder); }); +} + +void AndroidVoipClient::SetDecoders(const std::vector& decoders) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + std::map decoder_specs; + for (const webrtc::AudioCodecSpec& codec : supported_codecs_) { + if (std::find(decoders.begin(), decoders.end(), codec.format.name) != + decoders.end()) { + decoder_specs.insert({GetPayloadType(codec.format.name), codec.format}); + } + } + + voip_engine_->Codec().SetReceiveCodecs(*channel_, decoder_specs); +} + +void AndroidVoipClient::SetDecoders( + JNIEnv* env, + const webrtc::JavaParamRef& j_decoder_strings) { + const std::vector& chosen_decoders = + webrtc::JavaListToNativeVector( + env, j_decoder_strings, &webrtc::JavaToNativeString); + voip_thread_->PostTask( + RTC_FROM_HERE, [this, chosen_decoders] { SetDecoders(chosen_decoders); }); +} + +void AndroidVoipClient::SetLocalAddress(const std::string& ip_address, + const int port_number) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + rtp_local_address_ = rtc::SocketAddress(ip_address, port_number); + rtcp_local_address_ = rtc::SocketAddress(ip_address, port_number + 1); +} + +void AndroidVoipClient::SetLocalAddress( + JNIEnv* env, + const webrtc::JavaParamRef& j_ip_address_string, + jint j_port_number_int) { + const std::string& ip_address = + webrtc::JavaToNativeString(env, j_ip_address_string); + voip_thread_->PostTask(RTC_FROM_HERE, [this, ip_address, j_port_number_int] { + SetLocalAddress(ip_address, j_port_number_int); + }); +} + +void AndroidVoipClient::SetRemoteAddress(const std::string& ip_address, + const int port_number) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + rtp_remote_address_ = rtc::SocketAddress(ip_address, port_number); + rtcp_remote_address_ = rtc::SocketAddress(ip_address, port_number + 1); +} + +void AndroidVoipClient::SetRemoteAddress( + JNIEnv* env, + const webrtc::JavaParamRef& j_ip_address_string, + jint j_port_number_int) { + const std::string& ip_address = + webrtc::JavaToNativeString(env, j_ip_address_string); + voip_thread_->PostTask(RTC_FROM_HERE, [this, ip_address, j_port_number_int] { + SetRemoteAddress(ip_address, j_port_number_int); + }); +} + +void AndroidVoipClient::StartSession(JNIEnv* env) { + RUN_ON_VOIP_THREAD(StartSession, env); + + channel_ = voip_engine_->Base().CreateChannel(this, absl::nullopt); + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel creation failed"; + Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + + rtp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), + rtp_local_address_)); + if (!rtp_socket_) { + RTC_LOG_ERR(LERROR) << "Socket creation failed"; + Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + rtp_socket_->SignalReadPacket.connect( + this, &AndroidVoipClient::OnSignalReadRTPPacket); + + rtcp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), + rtcp_local_address_)); + if (!rtcp_socket_) { + RTC_LOG_ERR(LERROR) << "Socket creation failed"; + Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + rtcp_socket_->SignalReadPacket.connect( + this, &AndroidVoipClient::OnSignalReadRTCPPacket); + Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/true); +} + +void AndroidVoipClient::StopSession(JNIEnv* env) { + RUN_ON_VOIP_THREAD(StopSession, env); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + if (!voip_engine_->Base().StopSend(*channel_) || + !voip_engine_->Base().StopPlayout(*channel_)) { + Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + + rtp_socket_->Close(); + rtcp_socket_->Close(); + + voip_engine_->Base().ReleaseChannel(*channel_); + channel_ = absl::nullopt; + Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_, + /*isSuccessful=*/true); +} + +void AndroidVoipClient::StartSend(JNIEnv* env) { + RUN_ON_VOIP_THREAD(StartSend, env); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + Java_VoipClient_onStartSendCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + Java_VoipClient_onStartSendCompleted( + env_, j_voip_client_, voip_engine_->Base().StartSend(*channel_)); +} + +void AndroidVoipClient::StopSend(JNIEnv* env) { + RUN_ON_VOIP_THREAD(StopSend, env); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + Java_VoipClient_onStopSendCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + Java_VoipClient_onStopSendCompleted(env_, j_voip_client_, + voip_engine_->Base().StopSend(*channel_)); +} + +void AndroidVoipClient::StartPlayout(JNIEnv* env) { + RUN_ON_VOIP_THREAD(StartPlayout, env); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + Java_VoipClient_onStartPlayoutCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + Java_VoipClient_onStartPlayoutCompleted( + env_, j_voip_client_, voip_engine_->Base().StartPlayout(*channel_)); +} + +void AndroidVoipClient::StopPlayout(JNIEnv* env) { + RUN_ON_VOIP_THREAD(StopPlayout, env); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + Java_VoipClient_onStopPlayoutCompleted(env_, j_voip_client_, + /*isSuccessful=*/false); + return; + } + Java_VoipClient_onStopPlayoutCompleted( + env_, j_voip_client_, voip_engine_->Base().StopPlayout(*channel_)); +} + +void AndroidVoipClient::Delete(JNIEnv* env) { + delete this; +} + +void AndroidVoipClient::SendRtpPacket(const std::vector& packet_copy) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + if (!rtp_socket_->SendTo(packet_copy.data(), packet_copy.size(), + rtp_remote_address_, rtc::PacketOptions())) { + RTC_LOG(LS_ERROR) << "Failed to send RTP packet"; + } +} + +bool AndroidVoipClient::SendRtp(const uint8_t* packet, + size_t length, + const webrtc::PacketOptions& options) { + std::vector packet_copy(packet, packet + length); + voip_thread_->PostTask(RTC_FROM_HERE, + [this, packet_copy = std::move(packet_copy)] { + SendRtpPacket(packet_copy); + }); + return true; +} + +void AndroidVoipClient::SendRtcpPacket( + const std::vector& packet_copy) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + if (!rtcp_socket_->SendTo(packet_copy.data(), packet_copy.size(), + rtcp_remote_address_, rtc::PacketOptions())) { + RTC_LOG(LS_ERROR) << "Failed to send RTCP packet"; + } +} + +bool AndroidVoipClient::SendRtcp(const uint8_t* packet, size_t length) { + std::vector packet_copy(packet, packet + length); + voip_thread_->PostTask(RTC_FROM_HERE, + [this, packet_copy = std::move(packet_copy)] { + SendRtcpPacket(packet_copy); + }); + return true; +} + +void AndroidVoipClient::ReadRTPPacket(const std::vector& packet_copy) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + voip_engine_->Network().ReceivedRTPPacket( + *channel_, + rtc::ArrayView(packet_copy.data(), packet_copy.size())); +} + +void AndroidVoipClient::OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket, + const char* rtp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp) { + std::vector packet_copy(rtp_packet, rtp_packet + size); + voip_thread_->PostTask(RTC_FROM_HERE, + [this, packet_copy = std::move(packet_copy)] { + ReadRTPPacket(packet_copy); + }); +} + +void AndroidVoipClient::ReadRTCPPacket( + const std::vector& packet_copy) { + RTC_DCHECK_RUN_ON(voip_thread_.get()); + + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + voip_engine_->Network().ReceivedRTCPPacket( + *channel_, + rtc::ArrayView(packet_copy.data(), packet_copy.size())); +} + +void AndroidVoipClient::OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, + const char* rtcp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp) { + std::vector packet_copy(rtcp_packet, rtcp_packet + size); + voip_thread_->PostTask(RTC_FROM_HERE, + [this, packet_copy = std::move(packet_copy)] { + ReadRTCPPacket(packet_copy); + }); +} + +static jlong JNI_VoipClient_CreateClient( + JNIEnv* env, + const webrtc::JavaParamRef& application_context, + const webrtc::JavaParamRef& j_voip_client) { + return webrtc::NativeToJavaPointer( + AndroidVoipClient::Create(env, application_context, j_voip_client)); +} + +} // namespace webrtc_examples diff --git a/examples/androidvoip/jni/android_voip_client.h b/examples/androidvoip/jni/android_voip_client.h new file mode 100644 index 0000000000..4dd0b0a0fb --- /dev/null +++ b/examples/androidvoip/jni/android_voip_client.h @@ -0,0 +1,189 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_ +#define EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_ + +#include + +#include +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/call/transport.h" +#include "api/voip/voip_base.h" +#include "api/voip/voip_engine.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_udp_socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc_examples { + +// AndroidVoipClient facilitates the use of the VoIP API defined in +// api/voip/voip_engine.h. One instance of AndroidVoipClient should +// suffice for most VoIP applications. AndroidVoipClient implements +// webrtc::Transport to send RTP/RTCP packets to the remote endpoint. +// It also creates methods (slots) for sockets to connect to in +// order to receive RTP/RTCP packets. AndroidVoipClient does all +// operations with rtc::Thread (voip_thread_), this is to comply +// with consistent thread usage requirement with ProcessThread used +// within VoipEngine, as well as providing asynchronicity to the +// caller. AndroidVoipClient is meant to be used by Java through JNI. +class AndroidVoipClient : public webrtc::Transport, + public sigslot::has_slots<> { + public: + // Returns a pointer to an AndroidVoipClient object. Clients should + // use this factory method to create AndroidVoipClient objects. The + // method will return a nullptr in case of initialization errors. + // It is the client's responsibility to delete the pointer when + // they are done with it (this class provides a Delete() method). + static AndroidVoipClient* Create( + JNIEnv* env, + const webrtc::JavaParamRef& application_context, + const webrtc::JavaParamRef& j_voip_client); + + ~AndroidVoipClient() override; + + // Provides client with a Java List of Strings containing names of + // the built-in supported codecs through callback. + void GetSupportedCodecs(JNIEnv* env); + + // Provides client with a Java String of the default local IPv4 address + // through callback. If IPv4 address is not found, provide the default + // local IPv6 address. If IPv6 address is not found, provide an empty + // string. + void GetLocalIPAddress(JNIEnv* env); + + // Sets the encoder used by the VoIP API. + void SetEncoder(JNIEnv* env, + const webrtc::JavaParamRef& j_encoder_string); + + // Sets the decoders used by the VoIP API. + void SetDecoders(JNIEnv* env, + const webrtc::JavaParamRef& j_decoder_strings); + + // Sets two local/remote addresses, one for RTP packets, and another for + // RTCP packets. The RTP address will have IP address j_ip_address_string + // and port number j_port_number_int, the RTCP address will have IP address + // j_ip_address_string and port number j_port_number_int+1. + void SetLocalAddress(JNIEnv* env, + const webrtc::JavaParamRef& j_ip_address_string, + jint j_port_number_int); + void SetRemoteAddress( + JNIEnv* env, + const webrtc::JavaParamRef& j_ip_address_string, + jint j_port_number_int); + + // Starts a VoIP session, then calls a callback method with a boolean + // value indicating if the session has started successfully. The VoIP + // operations below can only be used after a session has already started. + void StartSession(JNIEnv* env); + + // Stops the current session, then calls a callback method with a + // boolean value indicating if the session has stopped successfully. + void StopSession(JNIEnv* env); + + // Starts sending RTP/RTCP packets to the remote endpoint, then calls + // a callback method with a boolean value indicating if sending + // has started successfully. + void StartSend(JNIEnv* env); + + // Stops sending RTP/RTCP packets to the remote endpoint, then calls + // a callback method with a boolean value indicating if sending + // has stopped successfully. + void StopSend(JNIEnv* env); + + // Starts playing out the voice data received from the remote endpoint, + // then calls a callback method with a boolean value indicating if + // playout has started successfully. + void StartPlayout(JNIEnv* env); + + // Stops playing out the voice data received from the remote endpoint, + // then calls a callback method with a boolean value indicating if + // playout has stopped successfully. + void StopPlayout(JNIEnv* env); + + // Deletes this object. Used by client when they are done. + void Delete(JNIEnv* env); + + // Implementation for Transport. + bool SendRtp(const uint8_t* packet, + size_t length, + const webrtc::PacketOptions& options) override; + bool SendRtcp(const uint8_t* packet, size_t length) override; + + // Slots for sockets to connect to. + void OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket, + const char* rtp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp); + void OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, + const char* rtcp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp); + + private: + AndroidVoipClient(JNIEnv* env, + const webrtc::JavaParamRef& j_voip_client) + : voip_thread_(rtc::Thread::CreateWithSocketServer()), + j_voip_client_(env, j_voip_client) {} + + bool Init(JNIEnv* env, + const webrtc::JavaParamRef& application_context); + + // Overloaded methods having native C++ variables as arguments. + void SetEncoder(const std::string& encoder); + void SetDecoders(const std::vector& decoders); + void SetLocalAddress(const std::string& ip_address, const int port_number); + void SetRemoteAddress(const std::string& ip_address, const int port_number); + + // Methods to send and receive RTP/RTCP packets. Takes in a + // copy of a packet as a vector to prolong the lifetime of + // the packet as these methods will be called asynchronously. + void SendRtpPacket(const std::vector& packet_copy); + void SendRtcpPacket(const std::vector& packet_copy); + void ReadRTPPacket(const std::vector& packet_copy); + void ReadRTCPPacket(const std::vector& packet_copy); + + // Used to invoke operations and send/receive RTP/RTCP packets. + std::unique_ptr voip_thread_; + // Reference to the VoipClient java instance used to + // invoke callbacks when operations are finished. + webrtc::ScopedJavaGlobalRef j_voip_client_ + RTC_GUARDED_BY(voip_thread_); + // A list of AudioCodecSpec supported by the built-in + // encoder/decoder factories. + std::vector supported_codecs_ + RTC_GUARDED_BY(voip_thread_); + // A JNI context used by the voip_thread_. + JNIEnv* env_ RTC_GUARDED_BY(voip_thread_); + // The entry point to all VoIP APIs. + std::unique_ptr voip_engine_ RTC_GUARDED_BY(voip_thread_); + // Used by the VoIP API to facilitate a VoIP session. + absl::optional channel_ RTC_GUARDED_BY(voip_thread_); + // Members below are used for network related operations. + std::unique_ptr rtp_socket_ RTC_GUARDED_BY(voip_thread_); + std::unique_ptr rtcp_socket_ + RTC_GUARDED_BY(voip_thread_); + rtc::SocketAddress rtp_local_address_ RTC_GUARDED_BY(voip_thread_); + rtc::SocketAddress rtcp_local_address_ RTC_GUARDED_BY(voip_thread_); + rtc::SocketAddress rtp_remote_address_ RTC_GUARDED_BY(voip_thread_); + rtc::SocketAddress rtcp_remote_address_ RTC_GUARDED_BY(voip_thread_); +}; + +} // namespace webrtc_examples + +#endif // EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_ diff --git a/examples/androidvoip/jni/onload.cc b/examples/androidvoip/jni/onload.cc new file mode 100644 index 0000000000..b952de348b --- /dev/null +++ b/examples/androidvoip/jni/onload.cc @@ -0,0 +1,28 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "rtc_base/ssl_adapter.h" +#include "sdk/android/native_api/base/init.h" + +namespace webrtc_examples { + +extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { + webrtc::InitAndroid(jvm); + RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; + return JNI_VERSION_1_6; +} + +extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { + RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; +} + +} // namespace webrtc_examples diff --git a/examples/androidvoip/res/layout/activity_main.xml b/examples/androidvoip/res/layout/activity_main.xml new file mode 100644 index 0000000000..c7fa5a9b31 --- /dev/null +++ b/examples/androidvoip/res/layout/activity_main.xml @@ -0,0 +1,303 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ' - '') - - return self._NEW_LINE.join(html) - - def _BuildScoreTab(self, score_name, anchor_data): - """Builds the content of a tab.""" - # Find unique values. - scores = self._scores_data_frame[ - self._scores_data_frame.eval_score_name == score_name] - apm_configs = sorted(self._FindUniqueTuples(scores, ['apm_config'])) - test_data_gen_configs = sorted(self._FindUniqueTuples( - scores, ['test_data_gen', 'test_data_gen_params'])) - - html = [ - '
', - '
', - '
', - (''), - ] - - # Header. - html.append('') - for test_data_gen_info in test_data_gen_configs: - html.append(''.format( - self._FormatName(test_data_gen_info[0]), test_data_gen_info[1])) - html.append('') - - # Body. - html.append('') - for apm_config in apm_configs: - html.append('') - for test_data_gen_info in test_data_gen_configs: - dialog_id = self._ScoreStatsInspectorDialogId( - score_name, apm_config[0], test_data_gen_info[0], - test_data_gen_info[1]) + html = ['', 'Results'] + + # Add Material Design hosted libs. + html.append('') + html.append( + '') html.append( - ''.format( - dialog_id, self._BuildScoreTableCell( - score_name, test_data_gen_info[0], test_data_gen_info[1], - apm_config[0]))) - html.append('') - html.append('') - - html.append('
APM config / Test data generator{} {}
' + self._FormatName(apm_config[0]) + '{}
') - - html.append(self._BuildScoreStatsInspectorDialogs( - score_name, apm_configs, test_data_gen_configs, - anchor_data)) - - return self._NEW_LINE.join(html) - - def _BuildScoreTableCell(self, score_name, test_data_gen, - test_data_gen_params, apm_config): - """Builds the content of a table cell for a score table.""" - scores = self._SliceDataForScoreTableCell( - score_name, apm_config, test_data_gen, test_data_gen_params) - stats = self._ComputeScoreStats(scores) - - html = [] - items_id_prefix = ( - score_name + test_data_gen + test_data_gen_params + apm_config) - if stats['count'] == 1: - # Show the only available score. - item_id = hashlib.md5(items_id_prefix.encode('utf-8')).hexdigest() - html.append('
{1:f}
'.format( - item_id, scores['score'].mean())) - html.append('
{}' - '
'.format(item_id, 'single value')) - else: - # Show stats. - for stat_name in ['min', 'max', 'mean', 'std dev']: - item_id = hashlib.md5( - (items_id_prefix + stat_name).encode('utf-8')).hexdigest() - html.append('
{1:f}
'.format( - item_id, stats[stat_name])) - html.append('
{}' - '
'.format(item_id, stat_name)) - - return self._NEW_LINE.join(html) - - def _BuildScoreStatsInspectorDialogs( - self, score_name, apm_configs, test_data_gen_configs, anchor_data): - """Builds a set of score stats inspector dialogs.""" - html = [] - for apm_config in apm_configs: - for test_data_gen_info in test_data_gen_configs: - dialog_id = self._ScoreStatsInspectorDialogId( - score_name, apm_config[0], - test_data_gen_info[0], test_data_gen_info[1]) - - html.append(''.format(dialog_id)) - - # Content. - html.append('
') - html.append('
APM config preset: {}
' - 'Test data generator: {} ({})
'.format( - self._FormatName(apm_config[0]), - self._FormatName(test_data_gen_info[0]), - test_data_gen_info[1])) - html.append(self._BuildScoreStatsInspectorDialog( - score_name, apm_config[0], test_data_gen_info[0], - test_data_gen_info[1], anchor_data + (dialog_id,))) + '') + html.append('') + + # Embed custom JavaScript and CSS files. + html.append('') + html.append('') + + html.append('') + + return self._NEW_LINE.join(html) + + def _BuildBody(self): + """Builds the content of the section.""" + score_names = self._scores_data_frame[ + 'eval_score_name'].drop_duplicates().values.tolist() + + html = [ + ('
'), + '
', + '
', + 'APM QA results ({})'.format( + self._output_filepath), + '
', + ] + + # Tab selectors. + html.append('
') + for tab_index, score_name in enumerate(score_names): + is_active = tab_index == 0 + html.append('' + '{}'.format(tab_index, + ' is-active' if is_active else '', + self._FormatName(score_name))) html.append('
') - # Actions. - html.append('
') - html.append('') + html.append('
') + html.append( + '
') + + # Tabs content. + for tab_index, score_name in enumerate(score_names): + html.append('
'.format( + ' is-active' if is_active else '', tab_index)) + html.append('
') + html.append( + self._BuildScoreTab(score_name, ('s{}'.format(tab_index), ))) + html.append('
') + html.append('
') + + html.append('
') html.append('
') - html.append('
') - - return self._NEW_LINE.join(html) - - def _BuildScoreStatsInspectorDialog( - self, score_name, apm_config, test_data_gen, test_data_gen_params, - anchor_data): - """Builds one score stats inspector dialog.""" - scores = self._SliceDataForScoreTableCell( - score_name, apm_config, test_data_gen, test_data_gen_params) - - capture_render_pairs = sorted(self._FindUniqueTuples( - scores, ['capture', 'render'])) - echo_simulators = sorted(self._FindUniqueTuples(scores, ['echo_simulator'])) - - html = [''] - - # Header. - html.append('') - for echo_simulator in echo_simulators: - html.append('') - html.append('') - - # Body. - html.append('') - for row, (capture, render) in enumerate(capture_render_pairs): - html.append(''.format( - capture, render)) - for col, echo_simulator in enumerate(echo_simulators): - score_tuple = self._SliceDataForScoreStatsTableCell( - scores, capture, render, echo_simulator[0]) - cell_class = 'r{}c{}'.format(row, col) - html.append(''.format( - cell_class, self._BuildScoreStatsInspectorTableCell( - score_tuple, anchor_data + (cell_class,)))) - html.append('') - html.append('') - - html.append('
Capture-Render / Echo simulator' + self._FormatName(echo_simulator[0]) +'
{}
{}
{}
') - - # Placeholder for the audio inspector. - html.append('
') - - return self._NEW_LINE.join(html) - - def _BuildScoreStatsInspectorTableCell(self, score_tuple, anchor_data): - """Builds the content of a cell of a score stats inspector.""" - anchor = '&'.join(anchor_data) - html = [('
{}
' - '').format(score_tuple.score, anchor)] - - # Add all the available file paths as hidden data. - for field_name in score_tuple.keys(): - if field_name.endswith('_filepath'): - html.append(''.format( - field_name, score_tuple[field_name])) - - return self._NEW_LINE.join(html) - - def _SliceDataForScoreTableCell( - self, score_name, apm_config, test_data_gen, test_data_gen_params): - """Slices |self._scores_data_frame| to extract the data for a tab.""" - masks = [] - masks.append(self._scores_data_frame.eval_score_name == score_name) - masks.append(self._scores_data_frame.apm_config == apm_config) - masks.append(self._scores_data_frame.test_data_gen == test_data_gen) - masks.append( - self._scores_data_frame.test_data_gen_params == test_data_gen_params) - mask = functools.reduce((lambda i1, i2: i1 & i2), masks) - del masks - return self._scores_data_frame[mask] - - @classmethod - def _SliceDataForScoreStatsTableCell( - cls, scores, capture, render, echo_simulator): - """Slices |scores| to extract the data for a tab.""" - masks = [] - - masks.append(scores.capture == capture) - masks.append(scores.render == render) - masks.append(scores.echo_simulator == echo_simulator) - mask = functools.reduce((lambda i1, i2: i1 & i2), masks) - del masks - - sliced_data = scores[mask] - assert len(sliced_data) == 1, 'single score is expected' - return sliced_data.iloc[0] - - @classmethod - def _FindUniqueTuples(cls, data_frame, fields): - """Slices |data_frame| to a list of fields and finds unique tuples.""" - return data_frame[fields].drop_duplicates().values.tolist() - - @classmethod - def _ComputeScoreStats(cls, data_frame): - """Computes score stats.""" - scores = data_frame['score'] - return { - 'count': scores.count(), - 'min': scores.min(), - 'max': scores.max(), - 'mean': scores.mean(), - 'std dev': scores.std(), - } - - @classmethod - def _ScoreStatsInspectorDialogId(cls, score_name, apm_config, test_data_gen, - test_data_gen_params): - """Assigns a unique name to a dialog.""" - return 'score-stats-dialog-' + hashlib.md5( - 'score-stats-inspector-{}-{}-{}-{}'.format( - score_name, apm_config, test_data_gen, - test_data_gen_params).encode('utf-8')).hexdigest() - - @classmethod - def _Save(cls, output_filepath, html): - """Writes the HTML file. + # Add snackbar for notifications. + html.append( + '
' + '
' + '' + '
') + + return self._NEW_LINE.join(html) + + def _BuildScoreTab(self, score_name, anchor_data): + """Builds the content of a tab.""" + # Find unique values. + scores = self._scores_data_frame[ + self._scores_data_frame.eval_score_name == score_name] + apm_configs = sorted(self._FindUniqueTuples(scores, ['apm_config'])) + test_data_gen_configs = sorted( + self._FindUniqueTuples(scores, + ['test_data_gen', 'test_data_gen_params'])) + + html = [ + '
', + '
', + '
', + (''), + ] + + # Header. + html.append('') + for test_data_gen_info in test_data_gen_configs: + html.append(''.format( + self._FormatName(test_data_gen_info[0]), + test_data_gen_info[1])) + html.append('') + + # Body. + html.append('') + for apm_config in apm_configs: + html.append('') + for test_data_gen_info in test_data_gen_configs: + dialog_id = self._ScoreStatsInspectorDialogId( + score_name, apm_config[0], test_data_gen_info[0], + test_data_gen_info[1]) + html.append( + ''. + format( + dialog_id, + self._BuildScoreTableCell(score_name, + test_data_gen_info[0], + test_data_gen_info[1], + apm_config[0]))) + html.append('') + html.append('') + + html.append( + '
APM config / Test data generator{} {}
' + self._FormatName(apm_config[0]) + '{}
') + + html.append( + self._BuildScoreStatsInspectorDialogs(score_name, apm_configs, + test_data_gen_configs, + anchor_data)) + + return self._NEW_LINE.join(html) + + def _BuildScoreTableCell(self, score_name, test_data_gen, + test_data_gen_params, apm_config): + """Builds the content of a table cell for a score table.""" + scores = self._SliceDataForScoreTableCell(score_name, apm_config, + test_data_gen, + test_data_gen_params) + stats = self._ComputeScoreStats(scores) + + html = [] + items_id_prefix = (score_name + test_data_gen + test_data_gen_params + + apm_config) + if stats['count'] == 1: + # Show the only available score. + item_id = hashlib.md5(items_id_prefix.encode('utf-8')).hexdigest() + html.append('
{1:f}
'.format( + item_id, scores['score'].mean())) + html.append( + '
{}' + '
'.format(item_id, 'single value')) + else: + # Show stats. + for stat_name in ['min', 'max', 'mean', 'std dev']: + item_id = hashlib.md5( + (items_id_prefix + stat_name).encode('utf-8')).hexdigest() + html.append('
{1:f}
'.format( + item_id, stats[stat_name])) + html.append( + '
{}' + '
'.format(item_id, stat_name)) + + return self._NEW_LINE.join(html) + + def _BuildScoreStatsInspectorDialogs(self, score_name, apm_configs, + test_data_gen_configs, anchor_data): + """Builds a set of score stats inspector dialogs.""" + html = [] + for apm_config in apm_configs: + for test_data_gen_info in test_data_gen_configs: + dialog_id = self._ScoreStatsInspectorDialogId( + score_name, apm_config[0], test_data_gen_info[0], + test_data_gen_info[1]) + + html.append(''.format(dialog_id)) + + # Content. + html.append('
') + html.append( + '
APM config preset: {}
' + 'Test data generator: {} ({})
'. + format(self._FormatName(apm_config[0]), + self._FormatName(test_data_gen_info[0]), + test_data_gen_info[1])) + html.append( + self._BuildScoreStatsInspectorDialog( + score_name, apm_config[0], test_data_gen_info[0], + test_data_gen_info[1], anchor_data + (dialog_id, ))) + html.append('
') + + # Actions. + html.append('
') + html.append('') + html.append('
') + + html.append('
') + + return self._NEW_LINE.join(html) + + def _BuildScoreStatsInspectorDialog(self, score_name, apm_config, + test_data_gen, test_data_gen_params, + anchor_data): + """Builds one score stats inspector dialog.""" + scores = self._SliceDataForScoreTableCell(score_name, apm_config, + test_data_gen, + test_data_gen_params) + + capture_render_pairs = sorted( + self._FindUniqueTuples(scores, ['capture', 'render'])) + echo_simulators = sorted( + self._FindUniqueTuples(scores, ['echo_simulator'])) + + html = [ + '' + ] + + # Header. + html.append('') + for echo_simulator in echo_simulators: + html.append('') + html.append('') + + # Body. + html.append('') + for row, (capture, render) in enumerate(capture_render_pairs): + html.append(''.format( + capture, render)) + for col, echo_simulator in enumerate(echo_simulators): + score_tuple = self._SliceDataForScoreStatsTableCell( + scores, capture, render, echo_simulator[0]) + cell_class = 'r{}c{}'.format(row, col) + html.append(''.format( + cell_class, + self._BuildScoreStatsInspectorTableCell( + score_tuple, anchor_data + (cell_class, )))) + html.append('') + html.append('') + + html.append('
Capture-Render / Echo simulator' + self._FormatName(echo_simulator[0]) + '
{}
{}
{}
') + + # Placeholder for the audio inspector. + html.append('
') + + return self._NEW_LINE.join(html) + + def _BuildScoreStatsInspectorTableCell(self, score_tuple, anchor_data): + """Builds the content of a cell of a score stats inspector.""" + anchor = '&'.join(anchor_data) + html = [('
{}
' + '').format(score_tuple.score, anchor)] + + # Add all the available file paths as hidden data. + for field_name in score_tuple.keys(): + if field_name.endswith('_filepath'): + html.append( + ''.format( + field_name, score_tuple[field_name])) + + return self._NEW_LINE.join(html) + + def _SliceDataForScoreTableCell(self, score_name, apm_config, + test_data_gen, test_data_gen_params): + """Slices |self._scores_data_frame| to extract the data for a tab.""" + masks = [] + masks.append(self._scores_data_frame.eval_score_name == score_name) + masks.append(self._scores_data_frame.apm_config == apm_config) + masks.append(self._scores_data_frame.test_data_gen == test_data_gen) + masks.append(self._scores_data_frame.test_data_gen_params == + test_data_gen_params) + mask = functools.reduce((lambda i1, i2: i1 & i2), masks) + del masks + return self._scores_data_frame[mask] + + @classmethod + def _SliceDataForScoreStatsTableCell(cls, scores, capture, render, + echo_simulator): + """Slices |scores| to extract the data for a tab.""" + masks = [] + + masks.append(scores.capture == capture) + masks.append(scores.render == render) + masks.append(scores.echo_simulator == echo_simulator) + mask = functools.reduce((lambda i1, i2: i1 & i2), masks) + del masks + + sliced_data = scores[mask] + assert len(sliced_data) == 1, 'single score is expected' + return sliced_data.iloc[0] + + @classmethod + def _FindUniqueTuples(cls, data_frame, fields): + """Slices |data_frame| to a list of fields and finds unique tuples.""" + return data_frame[fields].drop_duplicates().values.tolist() + + @classmethod + def _ComputeScoreStats(cls, data_frame): + """Computes score stats.""" + scores = data_frame['score'] + return { + 'count': scores.count(), + 'min': scores.min(), + 'max': scores.max(), + 'mean': scores.mean(), + 'std dev': scores.std(), + } + + @classmethod + def _ScoreStatsInspectorDialogId(cls, score_name, apm_config, + test_data_gen, test_data_gen_params): + """Assigns a unique name to a dialog.""" + return 'score-stats-dialog-' + hashlib.md5( + 'score-stats-inspector-{}-{}-{}-{}'.format( + score_name, apm_config, test_data_gen, + test_data_gen_params).encode('utf-8')).hexdigest() + + @classmethod + def _Save(cls, output_filepath, html): + """Writes the HTML file. Args: output_filepath: output file path. html: string with the HTML content. """ - with open(output_filepath, 'w') as f: - f.write(html) + with open(output_filepath, 'w') as f: + f.write(html) - @classmethod - def _FormatName(cls, name): - """Formats a name. + @classmethod + def _FormatName(cls, name): + """Formats a name. Args: name: a string. @@ -399,4 +423,4 @@ def _FormatName(cls, name): Returns: A copy of name in which underscores and dashes are replaced with a space. """ - return re.sub(r'[_\-]', ' ', name) + return re.sub(r'[_\-]', ' ', name) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py index 264af7e994..412aa7c4e7 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Unit tests for the export module. """ @@ -27,60 +26,61 @@ class TestExport(unittest.TestCase): - """Unit tests for the export module. + """Unit tests for the export module. """ - _CLEAN_TMP_OUTPUT = True + _CLEAN_TMP_OUTPUT = True - def setUp(self): - """Creates temporary data to export.""" - self._tmp_path = tempfile.mkdtemp() + def setUp(self): + """Creates temporary data to export.""" + self._tmp_path = tempfile.mkdtemp() - # Run a fake experiment to produce data to export. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)), - evaluation_score_factory=( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join( - os.path.dirname(os.path.abspath(__file__)), 'fake_polqa'), - echo_metric_tool_bin_path=None - )), - ap_wrapper=audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH), - evaluator=evaluation.ApmModuleEvaluator()) - simulator.Run( - config_filepaths=['apm_configs/default.json'], - capture_input_filepaths=[ - os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), - os.path.join(self._tmp_path, 'pure_tone-880_1000.wav'), - ], - test_data_generator_names=['identity', 'white_noise'], - eval_score_names=['audio_level_peak', 'audio_level_mean'], - output_dir=self._tmp_path) + # Run a fake experiment to produce data to export. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)), + evaluation_score_factory=( + eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join( + os.path.dirname(os.path.abspath(__file__)), + 'fake_polqa'), + echo_metric_tool_bin_path=None)), + ap_wrapper=audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper. + DEFAULT_APM_SIMULATOR_BIN_PATH), + evaluator=evaluation.ApmModuleEvaluator()) + simulator.Run( + config_filepaths=['apm_configs/default.json'], + capture_input_filepaths=[ + os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), + os.path.join(self._tmp_path, 'pure_tone-880_1000.wav'), + ], + test_data_generator_names=['identity', 'white_noise'], + eval_score_names=['audio_level_peak', 'audio_level_mean'], + output_dir=self._tmp_path) - # Export results. - p = collect_data.InstanceArgumentsParser() - args = p.parse_args(['--output_dir', self._tmp_path]) - src_path = collect_data.ConstructSrcPath(args) - self._data_to_export = collect_data.FindScores(src_path, args) + # Export results. + p = collect_data.InstanceArgumentsParser() + args = p.parse_args(['--output_dir', self._tmp_path]) + src_path = collect_data.ConstructSrcPath(args) + self._data_to_export = collect_data.FindScores(src_path, args) - def tearDown(self): - """Recursively deletes temporary folders.""" - if self._CLEAN_TMP_OUTPUT: - shutil.rmtree(self._tmp_path) - else: - logging.warning(self.id() + ' did not clean the temporary path ' + ( - self._tmp_path)) + def tearDown(self): + """Recursively deletes temporary folders.""" + if self._CLEAN_TMP_OUTPUT: + shutil.rmtree(self._tmp_path) + else: + logging.warning(self.id() + ' did not clean the temporary path ' + + (self._tmp_path)) - def testCreateHtmlReport(self): - fn_out = os.path.join(self._tmp_path, 'results.html') - exporter = export.HtmlExport(fn_out) - exporter.Export(self._data_to_export) + def testCreateHtmlReport(self): + fn_out = os.path.join(self._tmp_path, 'results.html') + exporter = export.HtmlExport(fn_out) + exporter.Export(self._data_to_export) - document = pq.PyQuery(filename=fn_out) - self.assertIsInstance(document, pq.PyQuery) - # TODO(alessiob): Use PyQuery API to check the HTML file. + document = pq.PyQuery(filename=fn_out) + self.assertIsInstance(document, pq.PyQuery) + # TODO(alessiob): Use PyQuery API to check the HTML file. diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py index 01418d84fe..a7db7b4840 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py @@ -16,62 +16,60 @@ import tempfile try: - import numpy as np + import numpy as np except ImportError: - logging.critical('Cannot import the third-party Python package numpy') - sys.exit(1) + logging.critical('Cannot import the third-party Python package numpy') + sys.exit(1) from . import signal_processing -class ExternalVad(object): - def __init__(self, path_to_binary, name): - """Args: +class ExternalVad(object): + def __init__(self, path_to_binary, name): + """Args: path_to_binary: path to binary that accepts '-i ', '-o '. There must be one float value per 10ms audio name: a name to identify the external VAD. Used for saving the output as extvad_output-. """ - self._path_to_binary = path_to_binary - self.name = name - assert os.path.exists(self._path_to_binary), ( - self._path_to_binary) - self._vad_output = None + self._path_to_binary = path_to_binary + self.name = name + assert os.path.exists(self._path_to_binary), (self._path_to_binary) + self._vad_output = None - def Run(self, wav_file_path): - _signal = signal_processing.SignalProcessingUtils.LoadWav(wav_file_path) - if _signal.channels != 1: - raise NotImplementedError('Multiple-channel' - ' annotations not implemented') - if _signal.frame_rate != 48000: - raise NotImplementedError('Frame rates ' - 'other than 48000 not implemented') + def Run(self, wav_file_path): + _signal = signal_processing.SignalProcessingUtils.LoadWav( + wav_file_path) + if _signal.channels != 1: + raise NotImplementedError('Multiple-channel' + ' annotations not implemented') + if _signal.frame_rate != 48000: + raise NotImplementedError('Frame rates ' + 'other than 48000 not implemented') - tmp_path = tempfile.mkdtemp() - try: - output_file_path = os.path.join( - tmp_path, self.name + '_vad.tmp') - subprocess.call([ - self._path_to_binary, - '-i', wav_file_path, - '-o', output_file_path - ]) - self._vad_output = np.fromfile(output_file_path, np.float32) - except Exception as e: - logging.error('Error while running the ' + self.name + - ' VAD (' + e.message + ')') - finally: - if os.path.exists(tmp_path): - shutil.rmtree(tmp_path) + tmp_path = tempfile.mkdtemp() + try: + output_file_path = os.path.join(tmp_path, self.name + '_vad.tmp') + subprocess.call([ + self._path_to_binary, '-i', wav_file_path, '-o', + output_file_path + ]) + self._vad_output = np.fromfile(output_file_path, np.float32) + except Exception as e: + logging.error('Error while running the ' + self.name + ' VAD (' + + e.message + ')') + finally: + if os.path.exists(tmp_path): + shutil.rmtree(tmp_path) - def GetVadOutput(self): - assert self._vad_output is not None - return self._vad_output + def GetVadOutput(self): + assert self._vad_output is not None + return self._vad_output - @classmethod - def ConstructVadDict(cls, vad_paths, vad_names): - external_vads = {} - for path, name in zip(vad_paths, vad_names): - external_vads[name] = ExternalVad(path, name) - return external_vads + @classmethod + def ConstructVadDict(cls, vad_paths, vad_names): + external_vads = {} + for path, name in zip(vad_paths, vad_names): + external_vads[name] = ExternalVad(path, name) + return external_vads diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py index 7c75e8f5c3..f679f8c94a 100755 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py @@ -9,16 +9,17 @@ import argparse import numpy as np + def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-i', required=True) - parser.add_argument('-o', required=True) + parser = argparse.ArgumentParser() + parser.add_argument('-i', required=True) + parser.add_argument('-o', required=True) - args = parser.parse_args() + args = parser.parse_args() - array = np.arange(100, dtype=np.float32) - array.tofile(open(args.o, 'w')) + array = np.arange(100, dtype=np.float32) + array.tofile(open(args.o, 'w')) if __name__ == '__main__': - main() + main() diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py index b1afe14454..f9125fa7f3 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Input mixer module. """ @@ -17,24 +16,24 @@ class ApmInputMixer(object): - """Class to mix a set of audio segments down to the APM input.""" + """Class to mix a set of audio segments down to the APM input.""" - _HARD_CLIPPING_LOG_MSG = 'hard clipping detected in the mixed signal' + _HARD_CLIPPING_LOG_MSG = 'hard clipping detected in the mixed signal' - def __init__(self): - pass + def __init__(self): + pass - @classmethod - def HardClippingLogMessage(cls): - """Returns the log message used when hard clipping is detected in the mix. + @classmethod + def HardClippingLogMessage(cls): + """Returns the log message used when hard clipping is detected in the mix. This method is mainly intended to be used by the unit tests. """ - return cls._HARD_CLIPPING_LOG_MSG + return cls._HARD_CLIPPING_LOG_MSG - @classmethod - def Mix(cls, output_path, capture_input_filepath, echo_filepath): - """Mixes capture and echo. + @classmethod + def Mix(cls, output_path, capture_input_filepath, echo_filepath): + """Mixes capture and echo. Creates the overall capture input for APM by mixing the "echo-free" capture signal with the echo signal (e.g., echo simulated via the @@ -58,38 +57,41 @@ def Mix(cls, output_path, capture_input_filepath, echo_filepath): Returns: Path to the mix audio track file. """ - if echo_filepath is None: - return capture_input_filepath - - # Build the mix output file name as a function of the echo file name. - # This ensures that if the internal parameters of the echo path simulator - # change, no erroneous cache hit occurs. - echo_file_name, _ = os.path.splitext(os.path.split(echo_filepath)[1]) - capture_input_file_name, _ = os.path.splitext( - os.path.split(capture_input_filepath)[1]) - mix_filepath = os.path.join(output_path, 'mix_capture_{}_{}.wav'.format( - capture_input_file_name, echo_file_name)) - - # Create the mix if not done yet. - mix = None - if not os.path.exists(mix_filepath): - echo_free_capture = signal_processing.SignalProcessingUtils.LoadWav( - capture_input_filepath) - echo = signal_processing.SignalProcessingUtils.LoadWav(echo_filepath) - - if signal_processing.SignalProcessingUtils.CountSamples(echo) < ( - signal_processing.SignalProcessingUtils.CountSamples( - echo_free_capture)): - raise exceptions.InputMixerException( - 'echo cannot be shorter than capture') - - mix = echo_free_capture.overlay(echo) - signal_processing.SignalProcessingUtils.SaveWav(mix_filepath, mix) - - # Check if hard clipping occurs. - if mix is None: - mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) - if signal_processing.SignalProcessingUtils.DetectHardClipping(mix): - logging.warning(cls._HARD_CLIPPING_LOG_MSG) - - return mix_filepath + if echo_filepath is None: + return capture_input_filepath + + # Build the mix output file name as a function of the echo file name. + # This ensures that if the internal parameters of the echo path simulator + # change, no erroneous cache hit occurs. + echo_file_name, _ = os.path.splitext(os.path.split(echo_filepath)[1]) + capture_input_file_name, _ = os.path.splitext( + os.path.split(capture_input_filepath)[1]) + mix_filepath = os.path.join( + output_path, + 'mix_capture_{}_{}.wav'.format(capture_input_file_name, + echo_file_name)) + + # Create the mix if not done yet. + mix = None + if not os.path.exists(mix_filepath): + echo_free_capture = signal_processing.SignalProcessingUtils.LoadWav( + capture_input_filepath) + echo = signal_processing.SignalProcessingUtils.LoadWav( + echo_filepath) + + if signal_processing.SignalProcessingUtils.CountSamples(echo) < ( + signal_processing.SignalProcessingUtils.CountSamples( + echo_free_capture)): + raise exceptions.InputMixerException( + 'echo cannot be shorter than capture') + + mix = echo_free_capture.overlay(echo) + signal_processing.SignalProcessingUtils.SaveWav(mix_filepath, mix) + + # Check if hard clipping occurs. + if mix is None: + mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) + if signal_processing.SignalProcessingUtils.DetectHardClipping(mix): + logging.warning(cls._HARD_CLIPPING_LOG_MSG) + + return mix_filepath diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py index b212614199..4fd5e4f1ee 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py @@ -5,21 +5,15 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Unit tests for the input mixer module. """ import logging import os import shutil -import sys import tempfile import unittest -SRC = os.path.abspath(os.path.join( - os.path.dirname((__file__)), os.pardir, os.pardir, os.pardir, os.pardir)) -sys.path.append(os.path.join(SRC, 'third_party', 'pymock')) - import mock from . import exceptions @@ -28,122 +22,119 @@ class TestApmInputMixer(unittest.TestCase): - """Unit tests for the ApmInputMixer class. + """Unit tests for the ApmInputMixer class. """ - # Audio track file names created in setUp(). - _FILENAMES = ['capture', 'echo_1', 'echo_2', 'shorter', 'longer'] - - # Target peak power level (dBFS) of each audio track file created in setUp(). - # These values are hand-crafted in order to make saturation happen when - # capture and echo_2 are mixed and the contrary for capture and echo_1. - # None means that the power is not changed. - _MAX_PEAK_POWER_LEVELS = [-10.0, -5.0, 0.0, None, None] - - # Audio track file durations in milliseconds. - _DURATIONS = [1000, 1000, 1000, 800, 1200] - - _SAMPLE_RATE = 48000 - - def setUp(self): - """Creates temporary data.""" - self._tmp_path = tempfile.mkdtemp() - - # Create audio track files. - self._audio_tracks = {} - for filename, peak_power, duration in zip( - self._FILENAMES, self._MAX_PEAK_POWER_LEVELS, self._DURATIONS): - audio_track_filepath = os.path.join(self._tmp_path, '{}.wav'.format( - filename)) - - # Create a pure tone with the target peak power level. - template = signal_processing.SignalProcessingUtils.GenerateSilence( - duration=duration, sample_rate=self._SAMPLE_RATE) - signal = signal_processing.SignalProcessingUtils.GeneratePureTone( - template) - if peak_power is not None: - signal = signal.apply_gain(-signal.max_dBFS + peak_power) - - signal_processing.SignalProcessingUtils.SaveWav( - audio_track_filepath, signal) - self._audio_tracks[filename] = { - 'filepath': audio_track_filepath, - 'num_samples': signal_processing.SignalProcessingUtils.CountSamples( - signal) - } - - def tearDown(self): - """Recursively deletes temporary folders.""" - shutil.rmtree(self._tmp_path) - - def testCheckMixSameDuration(self): - """Checks the duration when mixing capture and echo with same duration.""" - mix_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_1']['filepath']) - self.assertTrue(os.path.exists(mix_filepath)) - - mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) - self.assertEqual(self._audio_tracks['capture']['num_samples'], - signal_processing.SignalProcessingUtils.CountSamples(mix)) - - def testRejectShorterEcho(self): - """Rejects echo signals that are shorter than the capture signal.""" - try: - _ = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['shorter']['filepath']) - self.fail('no exception raised') - except exceptions.InputMixerException: - pass - - def testCheckMixDurationWithLongerEcho(self): - """Checks the duration when mixing an echo longer than the capture.""" - mix_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['longer']['filepath']) - self.assertTrue(os.path.exists(mix_filepath)) - - mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) - self.assertEqual(self._audio_tracks['capture']['num_samples'], - signal_processing.SignalProcessingUtils.CountSamples(mix)) - - def testCheckOutputFileNamesConflict(self): - """Checks that different echo files lead to different output file names.""" - mix1_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_1']['filepath']) - self.assertTrue(os.path.exists(mix1_filepath)) - - mix2_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_2']['filepath']) - self.assertTrue(os.path.exists(mix2_filepath)) - - self.assertNotEqual(mix1_filepath, mix2_filepath) - - def testHardClippingLogExpected(self): - """Checks that hard clipping warning is raised when occurring.""" - logging.warning = mock.MagicMock(name='warning') - _ = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_2']['filepath']) - logging.warning.assert_called_once_with( - input_mixer.ApmInputMixer.HardClippingLogMessage()) - - def testHardClippingLogNotExpected(self): - """Checks that hard clipping warning is not raised when not occurring.""" - logging.warning = mock.MagicMock(name='warning') - _ = input_mixer.ApmInputMixer.Mix( - self._tmp_path, - self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_1']['filepath']) - self.assertNotIn( - mock.call(input_mixer.ApmInputMixer.HardClippingLogMessage()), - logging.warning.call_args_list) + # Audio track file names created in setUp(). + _FILENAMES = ['capture', 'echo_1', 'echo_2', 'shorter', 'longer'] + + # Target peak power level (dBFS) of each audio track file created in setUp(). + # These values are hand-crafted in order to make saturation happen when + # capture and echo_2 are mixed and the contrary for capture and echo_1. + # None means that the power is not changed. + _MAX_PEAK_POWER_LEVELS = [-10.0, -5.0, 0.0, None, None] + + # Audio track file durations in milliseconds. + _DURATIONS = [1000, 1000, 1000, 800, 1200] + + _SAMPLE_RATE = 48000 + + def setUp(self): + """Creates temporary data.""" + self._tmp_path = tempfile.mkdtemp() + + # Create audio track files. + self._audio_tracks = {} + for filename, peak_power, duration in zip(self._FILENAMES, + self._MAX_PEAK_POWER_LEVELS, + self._DURATIONS): + audio_track_filepath = os.path.join(self._tmp_path, + '{}.wav'.format(filename)) + + # Create a pure tone with the target peak power level. + template = signal_processing.SignalProcessingUtils.GenerateSilence( + duration=duration, sample_rate=self._SAMPLE_RATE) + signal = signal_processing.SignalProcessingUtils.GeneratePureTone( + template) + if peak_power is not None: + signal = signal.apply_gain(-signal.max_dBFS + peak_power) + + signal_processing.SignalProcessingUtils.SaveWav( + audio_track_filepath, signal) + self._audio_tracks[filename] = { + 'filepath': + audio_track_filepath, + 'num_samples': + signal_processing.SignalProcessingUtils.CountSamples(signal) + } + + def tearDown(self): + """Recursively deletes temporary folders.""" + shutil.rmtree(self._tmp_path) + + def testCheckMixSameDuration(self): + """Checks the duration when mixing capture and echo with same duration.""" + mix_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_1']['filepath']) + self.assertTrue(os.path.exists(mix_filepath)) + + mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) + self.assertEqual( + self._audio_tracks['capture']['num_samples'], + signal_processing.SignalProcessingUtils.CountSamples(mix)) + + def testRejectShorterEcho(self): + """Rejects echo signals that are shorter than the capture signal.""" + try: + _ = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['shorter']['filepath']) + self.fail('no exception raised') + except exceptions.InputMixerException: + pass + + def testCheckMixDurationWithLongerEcho(self): + """Checks the duration when mixing an echo longer than the capture.""" + mix_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['longer']['filepath']) + self.assertTrue(os.path.exists(mix_filepath)) + + mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) + self.assertEqual( + self._audio_tracks['capture']['num_samples'], + signal_processing.SignalProcessingUtils.CountSamples(mix)) + + def testCheckOutputFileNamesConflict(self): + """Checks that different echo files lead to different output file names.""" + mix1_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_1']['filepath']) + self.assertTrue(os.path.exists(mix1_filepath)) + + mix2_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_2']['filepath']) + self.assertTrue(os.path.exists(mix2_filepath)) + + self.assertNotEqual(mix1_filepath, mix2_filepath) + + def testHardClippingLogExpected(self): + """Checks that hard clipping warning is raised when occurring.""" + logging.warning = mock.MagicMock(name='warning') + _ = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_2']['filepath']) + logging.warning.assert_called_once_with( + input_mixer.ApmInputMixer.HardClippingLogMessage()) + + def testHardClippingLogNotExpected(self): + """Checks that hard clipping warning is not raised when not occurring.""" + logging.warning = mock.MagicMock(name='warning') + _ = input_mixer.ApmInputMixer.Mix( + self._tmp_path, self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_1']['filepath']) + self.assertNotIn( + mock.call(input_mixer.ApmInputMixer.HardClippingLogMessage()), + logging.warning.call_args_list) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py index 1feec47b4c..b64fdcca89 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Input signal creator module. """ @@ -14,12 +13,12 @@ class InputSignalCreator(object): - """Input signal creator class. + """Input signal creator class. """ - @classmethod - def Create(cls, name, raw_params): - """Creates a input signal and its metadata. + @classmethod + def Create(cls, name, raw_params): + """Creates a input signal and its metadata. Args: name: Input signal creator name. @@ -28,29 +27,30 @@ def Create(cls, name, raw_params): Returns: (AudioSegment, dict) tuple. """ - try: - signal = {} - params = {} + try: + signal = {} + params = {} - if name == 'pure_tone': - params['frequency'] = float(raw_params[0]) - params['duration'] = int(raw_params[1]) - signal = cls._CreatePureTone(params['frequency'], params['duration']) - else: - raise exceptions.InputSignalCreatorException( - 'Invalid input signal creator name') + if name == 'pure_tone': + params['frequency'] = float(raw_params[0]) + params['duration'] = int(raw_params[1]) + signal = cls._CreatePureTone(params['frequency'], + params['duration']) + else: + raise exceptions.InputSignalCreatorException( + 'Invalid input signal creator name') - # Complete metadata. - params['signal'] = name + # Complete metadata. + params['signal'] = name - return signal, params - except (TypeError, AssertionError) as e: - raise exceptions.InputSignalCreatorException( - 'Invalid signal creator parameters: {}'.format(e)) + return signal, params + except (TypeError, AssertionError) as e: + raise exceptions.InputSignalCreatorException( + 'Invalid signal creator parameters: {}'.format(e)) - @classmethod - def _CreatePureTone(cls, frequency, duration): - """ + @classmethod + def _CreatePureTone(cls, frequency, duration): + """ Generates a pure tone at 48000 Hz. Args: @@ -60,8 +60,9 @@ def _CreatePureTone(cls, frequency, duration): Returns: AudioSegment instance. """ - assert 0 < frequency <= 24000 - assert duration > 0 - template = signal_processing.SignalProcessingUtils.GenerateSilence(duration) - return signal_processing.SignalProcessingUtils.GeneratePureTone( - template, frequency) + assert 0 < frequency <= 24000 + assert duration > 0 + template = signal_processing.SignalProcessingUtils.GenerateSilence( + duration) + return signal_processing.SignalProcessingUtils.GeneratePureTone( + template, frequency) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py index fd731fd19a..e41637cd8d 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Signal processing utility module. """ @@ -16,44 +15,44 @@ import enum try: - import numpy as np + import numpy as np except ImportError: - logging.critical('Cannot import the third-party Python package numpy') - sys.exit(1) + logging.critical('Cannot import the third-party Python package numpy') + sys.exit(1) try: - import pydub - import pydub.generators + import pydub + import pydub.generators except ImportError: - logging.critical('Cannot import the third-party Python package pydub') - sys.exit(1) + logging.critical('Cannot import the third-party Python package pydub') + sys.exit(1) try: - import scipy.signal - import scipy.fftpack + import scipy.signal + import scipy.fftpack except ImportError: - logging.critical('Cannot import the third-party Python package scipy') - sys.exit(1) + logging.critical('Cannot import the third-party Python package scipy') + sys.exit(1) from . import exceptions class SignalProcessingUtils(object): - """Collection of signal processing utilities. + """Collection of signal processing utilities. """ - @enum.unique - class MixPadding(enum.Enum): - NO_PADDING = 0 - ZERO_PADDING = 1 - LOOP = 2 + @enum.unique + class MixPadding(enum.Enum): + NO_PADDING = 0 + ZERO_PADDING = 1 + LOOP = 2 - def __init__(self): - pass + def __init__(self): + pass - @classmethod - def LoadWav(cls, filepath, channels=1): - """Loads wav file. + @classmethod + def LoadWav(cls, filepath, channels=1): + """Loads wav file. Args: filepath: path to the wav audio track file to load. @@ -62,25 +61,26 @@ def LoadWav(cls, filepath, channels=1): Returns: AudioSegment instance. """ - if not os.path.exists(filepath): - logging.error('cannot find the <%s> audio track file', filepath) - raise exceptions.FileNotFoundError() - return pydub.AudioSegment.from_file( - filepath, format='wav', channels=channels) + if not os.path.exists(filepath): + logging.error('cannot find the <%s> audio track file', filepath) + raise exceptions.FileNotFoundError() + return pydub.AudioSegment.from_file(filepath, + format='wav', + channels=channels) - @classmethod - def SaveWav(cls, output_filepath, signal): - """Saves wav file. + @classmethod + def SaveWav(cls, output_filepath, signal): + """Saves wav file. Args: output_filepath: path to the wav audio track file to save. signal: AudioSegment instance. """ - return signal.export(output_filepath, format='wav') + return signal.export(output_filepath, format='wav') - @classmethod - def CountSamples(cls, signal): - """Number of samples per channel. + @classmethod + def CountSamples(cls, signal): + """Number of samples per channel. Args: signal: AudioSegment instance. @@ -88,14 +88,14 @@ def CountSamples(cls, signal): Returns: An integer. """ - number_of_samples = len(signal.get_array_of_samples()) - assert signal.channels > 0 - assert number_of_samples % signal.channels == 0 - return number_of_samples / signal.channels + number_of_samples = len(signal.get_array_of_samples()) + assert signal.channels > 0 + assert number_of_samples % signal.channels == 0 + return number_of_samples / signal.channels - @classmethod - def GenerateSilence(cls, duration=1000, sample_rate=48000): - """Generates silence. + @classmethod + def GenerateSilence(cls, duration=1000, sample_rate=48000): + """Generates silence. This method can also be used to create a template AudioSegment instance. A template can then be used with other Generate*() methods accepting an @@ -108,11 +108,11 @@ def GenerateSilence(cls, duration=1000, sample_rate=48000): Returns: AudioSegment instance. """ - return pydub.AudioSegment.silent(duration, sample_rate) + return pydub.AudioSegment.silent(duration, sample_rate) - @classmethod - def GeneratePureTone(cls, template, frequency=440.0): - """Generates a pure tone. + @classmethod + def GeneratePureTone(cls, template, frequency=440.0): + """Generates a pure tone. The pure tone is generated with the same duration and in the same format of the given template signal. @@ -124,21 +124,18 @@ def GeneratePureTone(cls, template, frequency=440.0): Return: AudioSegment instance. """ - if frequency > template.frame_rate >> 1: - raise exceptions.SignalProcessingException('Invalid frequency') + if frequency > template.frame_rate >> 1: + raise exceptions.SignalProcessingException('Invalid frequency') - generator = pydub.generators.Sine( - sample_rate=template.frame_rate, - bit_depth=template.sample_width * 8, - freq=frequency) + generator = pydub.generators.Sine(sample_rate=template.frame_rate, + bit_depth=template.sample_width * 8, + freq=frequency) - return generator.to_audio_segment( - duration=len(template), - volume=0.0) + return generator.to_audio_segment(duration=len(template), volume=0.0) - @classmethod - def GenerateWhiteNoise(cls, template): - """Generates white noise. + @classmethod + def GenerateWhiteNoise(cls, template): + """Generates white noise. The white noise is generated with the same duration and in the same format of the given template signal. @@ -149,33 +146,32 @@ def GenerateWhiteNoise(cls, template): Return: AudioSegment instance. """ - generator = pydub.generators.WhiteNoise( - sample_rate=template.frame_rate, - bit_depth=template.sample_width * 8) - return generator.to_audio_segment( - duration=len(template), - volume=0.0) - - @classmethod - def AudioSegmentToRawData(cls, signal): - samples = signal.get_array_of_samples() - if samples.typecode != 'h': - raise exceptions.SignalProcessingException('Unsupported samples type') - return np.array(signal.get_array_of_samples(), np.int16) - - @classmethod - def Fft(cls, signal, normalize=True): - if signal.channels != 1: - raise NotImplementedError('multiple-channel FFT not implemented') - x = cls.AudioSegmentToRawData(signal).astype(np.float32) - if normalize: - x /= max(abs(np.max(x)), 1.0) - y = scipy.fftpack.fft(x) - return y[:len(y) / 2] - - @classmethod - def DetectHardClipping(cls, signal, threshold=2): - """Detects hard clipping. + generator = pydub.generators.WhiteNoise( + sample_rate=template.frame_rate, + bit_depth=template.sample_width * 8) + return generator.to_audio_segment(duration=len(template), volume=0.0) + + @classmethod + def AudioSegmentToRawData(cls, signal): + samples = signal.get_array_of_samples() + if samples.typecode != 'h': + raise exceptions.SignalProcessingException( + 'Unsupported samples type') + return np.array(signal.get_array_of_samples(), np.int16) + + @classmethod + def Fft(cls, signal, normalize=True): + if signal.channels != 1: + raise NotImplementedError('multiple-channel FFT not implemented') + x = cls.AudioSegmentToRawData(signal).astype(np.float32) + if normalize: + x /= max(abs(np.max(x)), 1.0) + y = scipy.fftpack.fft(x) + return y[:len(y) / 2] + + @classmethod + def DetectHardClipping(cls, signal, threshold=2): + """Detects hard clipping. Hard clipping is simply detected by counting samples that touch either the lower or upper bound too many times in a row (according to |threshold|). @@ -189,32 +185,33 @@ def DetectHardClipping(cls, signal, threshold=2): Returns: True if hard clipping is detect, False otherwise. """ - if signal.channels != 1: - raise NotImplementedError('multiple-channel clipping not implemented') - if signal.sample_width != 2: # Note that signal.sample_width is in bytes. - raise exceptions.SignalProcessingException( - 'hard-clipping detection only supported for 16 bit samples') - samples = cls.AudioSegmentToRawData(signal) - - # Detect adjacent clipped samples. - samples_type_info = np.iinfo(samples.dtype) - mask_min = samples == samples_type_info.min - mask_max = samples == samples_type_info.max - - def HasLongSequence(vector, min_legth=threshold): - """Returns True if there are one or more long sequences of True flags.""" - seq_length = 0 - for b in vector: - seq_length = seq_length + 1 if b else 0 - if seq_length >= min_legth: - return True - return False - - return HasLongSequence(mask_min) or HasLongSequence(mask_max) - - @classmethod - def ApplyImpulseResponse(cls, signal, impulse_response): - """Applies an impulse response to a signal. + if signal.channels != 1: + raise NotImplementedError( + 'multiple-channel clipping not implemented') + if signal.sample_width != 2: # Note that signal.sample_width is in bytes. + raise exceptions.SignalProcessingException( + 'hard-clipping detection only supported for 16 bit samples') + samples = cls.AudioSegmentToRawData(signal) + + # Detect adjacent clipped samples. + samples_type_info = np.iinfo(samples.dtype) + mask_min = samples == samples_type_info.min + mask_max = samples == samples_type_info.max + + def HasLongSequence(vector, min_legth=threshold): + """Returns True if there are one or more long sequences of True flags.""" + seq_length = 0 + for b in vector: + seq_length = seq_length + 1 if b else 0 + if seq_length >= min_legth: + return True + return False + + return HasLongSequence(mask_min) or HasLongSequence(mask_max) + + @classmethod + def ApplyImpulseResponse(cls, signal, impulse_response): + """Applies an impulse response to a signal. Args: signal: AudioSegment instance. @@ -223,44 +220,48 @@ def ApplyImpulseResponse(cls, signal, impulse_response): Returns: AudioSegment instance. """ - # Get samples. - assert signal.channels == 1, ( - 'multiple-channel recordings not supported') - samples = signal.get_array_of_samples() - - # Convolve. - logging.info('applying %d order impulse response to a signal lasting %d ms', - len(impulse_response), len(signal)) - convolved_samples = scipy.signal.fftconvolve( - in1=samples, - in2=impulse_response, - mode='full').astype(np.int16) - logging.info('convolution computed') - - # Cast. - convolved_samples = array.array(signal.array_type, convolved_samples) - - # Verify. - logging.debug('signal length: %d samples', len(samples)) - logging.debug('convolved signal length: %d samples', len(convolved_samples)) - assert len(convolved_samples) > len(samples) - - # Generate convolved signal AudioSegment instance. - convolved_signal = pydub.AudioSegment( - data=convolved_samples, - metadata={ - 'sample_width': signal.sample_width, - 'frame_rate': signal.frame_rate, - 'frame_width': signal.frame_width, - 'channels': signal.channels, - }) - assert len(convolved_signal) > len(signal) - - return convolved_signal - - @classmethod - def Normalize(cls, signal): - """Normalizes a signal. + # Get samples. + assert signal.channels == 1, ( + 'multiple-channel recordings not supported') + samples = signal.get_array_of_samples() + + # Convolve. + logging.info( + 'applying %d order impulse response to a signal lasting %d ms', + len(impulse_response), len(signal)) + convolved_samples = scipy.signal.fftconvolve(in1=samples, + in2=impulse_response, + mode='full').astype( + np.int16) + logging.info('convolution computed') + + # Cast. + convolved_samples = array.array(signal.array_type, convolved_samples) + + # Verify. + logging.debug('signal length: %d samples', len(samples)) + logging.debug('convolved signal length: %d samples', + len(convolved_samples)) + assert len(convolved_samples) > len(samples) + + # Generate convolved signal AudioSegment instance. + convolved_signal = pydub.AudioSegment(data=convolved_samples, + metadata={ + 'sample_width': + signal.sample_width, + 'frame_rate': + signal.frame_rate, + 'frame_width': + signal.frame_width, + 'channels': signal.channels, + }) + assert len(convolved_signal) > len(signal) + + return convolved_signal + + @classmethod + def Normalize(cls, signal): + """Normalizes a signal. Args: signal: AudioSegment instance. @@ -268,11 +269,11 @@ def Normalize(cls, signal): Returns: An AudioSegment instance. """ - return signal.apply_gain(-signal.max_dBFS) + return signal.apply_gain(-signal.max_dBFS) - @classmethod - def Copy(cls, signal): - """Makes a copy os a signal. + @classmethod + def Copy(cls, signal): + """Makes a copy os a signal. Args: signal: AudioSegment instance. @@ -280,19 +281,21 @@ def Copy(cls, signal): Returns: An AudioSegment instance. """ - return pydub.AudioSegment( - data=signal.get_array_of_samples(), - metadata={ - 'sample_width': signal.sample_width, - 'frame_rate': signal.frame_rate, - 'frame_width': signal.frame_width, - 'channels': signal.channels, - }) - - @classmethod - def MixSignals(cls, signal, noise, target_snr=0.0, - pad_noise=MixPadding.NO_PADDING): - """Mixes |signal| and |noise| with a target SNR. + return pydub.AudioSegment(data=signal.get_array_of_samples(), + metadata={ + 'sample_width': signal.sample_width, + 'frame_rate': signal.frame_rate, + 'frame_width': signal.frame_width, + 'channels': signal.channels, + }) + + @classmethod + def MixSignals(cls, + signal, + noise, + target_snr=0.0, + pad_noise=MixPadding.NO_PADDING): + """Mixes |signal| and |noise| with a target SNR. Mix |signal| and |noise| with a desired SNR by scaling |noise|. If the target SNR is +/- infinite, a copy of signal/noise is returned. @@ -312,45 +315,45 @@ def MixSignals(cls, signal, noise, target_snr=0.0, Returns: An AudioSegment instance. """ - # Handle infinite target SNR. - if target_snr == -np.Inf: - # Return a copy of noise. - logging.warning('SNR = -Inf, returning noise') - return cls.Copy(noise) - elif target_snr == np.Inf: - # Return a copy of signal. - logging.warning('SNR = +Inf, returning signal') - return cls.Copy(signal) - - # Check signal and noise power. - signal_power = float(signal.dBFS) - noise_power = float(noise.dBFS) - if signal_power == -np.Inf: - logging.error('signal has -Inf power, cannot mix') - raise exceptions.SignalProcessingException( - 'cannot mix a signal with -Inf power') - if noise_power == -np.Inf: - logging.error('noise has -Inf power, cannot mix') - raise exceptions.SignalProcessingException( - 'cannot mix a signal with -Inf power') - - # Mix. - gain_db = signal_power - noise_power - target_snr - signal_duration = len(signal) - noise_duration = len(noise) - if signal_duration <= noise_duration: - # Ignore |pad_noise|, |noise| is truncated if longer that |signal|, the - # mix will have the same length of |signal|. - return signal.overlay(noise.apply_gain(gain_db)) - elif pad_noise == cls.MixPadding.NO_PADDING: - # |signal| is longer than |noise|, but no padding is applied to |noise|. - # Truncate |signal|. - return noise.overlay(signal, gain_during_overlay=gain_db) - elif pad_noise == cls.MixPadding.ZERO_PADDING: - # TODO(alessiob): Check that this works as expected. - return signal.overlay(noise.apply_gain(gain_db)) - elif pad_noise == cls.MixPadding.LOOP: - # |signal| is longer than |noise|, extend |noise| by looping. - return signal.overlay(noise.apply_gain(gain_db), loop=True) - else: - raise exceptions.SignalProcessingException('invalid padding type') + # Handle infinite target SNR. + if target_snr == -np.Inf: + # Return a copy of noise. + logging.warning('SNR = -Inf, returning noise') + return cls.Copy(noise) + elif target_snr == np.Inf: + # Return a copy of signal. + logging.warning('SNR = +Inf, returning signal') + return cls.Copy(signal) + + # Check signal and noise power. + signal_power = float(signal.dBFS) + noise_power = float(noise.dBFS) + if signal_power == -np.Inf: + logging.error('signal has -Inf power, cannot mix') + raise exceptions.SignalProcessingException( + 'cannot mix a signal with -Inf power') + if noise_power == -np.Inf: + logging.error('noise has -Inf power, cannot mix') + raise exceptions.SignalProcessingException( + 'cannot mix a signal with -Inf power') + + # Mix. + gain_db = signal_power - noise_power - target_snr + signal_duration = len(signal) + noise_duration = len(noise) + if signal_duration <= noise_duration: + # Ignore |pad_noise|, |noise| is truncated if longer that |signal|, the + # mix will have the same length of |signal|. + return signal.overlay(noise.apply_gain(gain_db)) + elif pad_noise == cls.MixPadding.NO_PADDING: + # |signal| is longer than |noise|, but no padding is applied to |noise|. + # Truncate |signal|. + return noise.overlay(signal, gain_during_overlay=gain_db) + elif pad_noise == cls.MixPadding.ZERO_PADDING: + # TODO(alessiob): Check that this works as expected. + return signal.overlay(noise.apply_gain(gain_db)) + elif pad_noise == cls.MixPadding.LOOP: + # |signal| is longer than |noise|, extend |noise| by looping. + return signal.overlay(noise.apply_gain(gain_db), loop=True) + else: + raise exceptions.SignalProcessingException('invalid padding type') diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py index 30ada41fb9..881fb66800 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Unit tests for the signal_processing module. """ @@ -19,168 +18,166 @@ class TestSignalProcessing(unittest.TestCase): - """Unit tests for the signal_processing module. + """Unit tests for the signal_processing module. """ - def testMixSignals(self): - # Generate a template signal with which white noise can be generated. - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - - # Generate two distinct AudioSegment instances with 1 second of white noise. - signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - noise = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - - # Extract samples. - signal_samples = signal.get_array_of_samples() - noise_samples = noise.get_array_of_samples() - - # Test target SNR -Inf (noise expected). - mix_neg_inf = signal_processing.SignalProcessingUtils.MixSignals( - signal, noise, -np.Inf) - self.assertTrue(len(noise), len(mix_neg_inf)) # Check duration. - mix_neg_inf_samples = mix_neg_inf.get_array_of_samples() - self.assertTrue( # Check samples. - all([x == y for x, y in zip(noise_samples, mix_neg_inf_samples)])) - - # Test target SNR 0.0 (different data expected). - mix_0 = signal_processing.SignalProcessingUtils.MixSignals( - signal, noise, 0.0) - self.assertTrue(len(signal), len(mix_0)) # Check duration. - self.assertTrue(len(noise), len(mix_0)) - mix_0_samples = mix_0.get_array_of_samples() - self.assertTrue( - any([x != y for x, y in zip(signal_samples, mix_0_samples)])) - self.assertTrue( - any([x != y for x, y in zip(noise_samples, mix_0_samples)])) - - # Test target SNR +Inf (signal expected). - mix_pos_inf = signal_processing.SignalProcessingUtils.MixSignals( - signal, noise, np.Inf) - self.assertTrue(len(signal), len(mix_pos_inf)) # Check duration. - mix_pos_inf_samples = mix_pos_inf.get_array_of_samples() - self.assertTrue( # Check samples. - all([x == y for x, y in zip(signal_samples, mix_pos_inf_samples)])) - - def testMixSignalsMinInfPower(self): - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - - with self.assertRaises(exceptions.SignalProcessingException): - _ = signal_processing.SignalProcessingUtils.MixSignals( - signal, silence, 0.0) - - with self.assertRaises(exceptions.SignalProcessingException): - _ = signal_processing.SignalProcessingUtils.MixSignals( - silence, signal, 0.0) - - def testMixSignalNoiseDifferentLengths(self): - # Test signals. - shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) - longer = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - pydub.AudioSegment.silent(duration=2000, frame_rate=8000)) - - # When the signal is shorter than the noise, the mix length always equals - # that of the signal regardless of whether padding is applied. - # No noise padding, length of signal less than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=shorter, - noise=longer, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.NO_PADDING) - self.assertEqual(len(shorter), len(mix)) - # With noise padding, length of signal less than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=shorter, - noise=longer, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.ZERO_PADDING) - self.assertEqual(len(shorter), len(mix)) - - # When the signal is longer than the noise, the mix length depends on - # whether padding is applied. - # No noise padding, length of signal greater than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.NO_PADDING) - self.assertEqual(len(shorter), len(mix)) - # With noise padding, length of signal greater than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.ZERO_PADDING) - self.assertEqual(len(longer), len(mix)) - - def testMixSignalNoisePaddingTypes(self): - # Test signals. - shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) - longer = signal_processing.SignalProcessingUtils.GeneratePureTone( - pydub.AudioSegment.silent(duration=2000, frame_rate=8000), 440.0) - - # Zero padding: expect pure tone only in 1-2s. - mix_zero_pad = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - target_snr=-6, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.ZERO_PADDING) - - # Loop: expect pure tone plus noise in 1-2s. - mix_loop = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - target_snr=-6, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.LOOP) - - def Energy(signal): - samples = signal_processing.SignalProcessingUtils.AudioSegmentToRawData( - signal).astype(np.float32) - return np.sum(samples * samples) - - e_mix_zero_pad = Energy(mix_zero_pad[-1000:]) - e_mix_loop = Energy(mix_loop[-1000:]) - self.assertLess(0, e_mix_zero_pad) - self.assertLess(e_mix_zero_pad, e_mix_loop) - - def testMixSignalSnr(self): - # Test signals. - tone_low = signal_processing.SignalProcessingUtils.GeneratePureTone( - pydub.AudioSegment.silent(duration=64, frame_rate=8000), 250.0) - tone_high = signal_processing.SignalProcessingUtils.GeneratePureTone( - pydub.AudioSegment.silent(duration=64, frame_rate=8000), 3000.0) - - def ToneAmplitudes(mix): - """Returns the amplitude of the coefficients #16 and #192, which + def testMixSignals(self): + # Generate a template signal with which white noise can be generated. + silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) + + # Generate two distinct AudioSegment instances with 1 second of white noise. + signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + noise = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + + # Extract samples. + signal_samples = signal.get_array_of_samples() + noise_samples = noise.get_array_of_samples() + + # Test target SNR -Inf (noise expected). + mix_neg_inf = signal_processing.SignalProcessingUtils.MixSignals( + signal, noise, -np.Inf) + self.assertTrue(len(noise), len(mix_neg_inf)) # Check duration. + mix_neg_inf_samples = mix_neg_inf.get_array_of_samples() + self.assertTrue( # Check samples. + all([x == y for x, y in zip(noise_samples, mix_neg_inf_samples)])) + + # Test target SNR 0.0 (different data expected). + mix_0 = signal_processing.SignalProcessingUtils.MixSignals( + signal, noise, 0.0) + self.assertTrue(len(signal), len(mix_0)) # Check duration. + self.assertTrue(len(noise), len(mix_0)) + mix_0_samples = mix_0.get_array_of_samples() + self.assertTrue( + any([x != y for x, y in zip(signal_samples, mix_0_samples)])) + self.assertTrue( + any([x != y for x, y in zip(noise_samples, mix_0_samples)])) + + # Test target SNR +Inf (signal expected). + mix_pos_inf = signal_processing.SignalProcessingUtils.MixSignals( + signal, noise, np.Inf) + self.assertTrue(len(signal), len(mix_pos_inf)) # Check duration. + mix_pos_inf_samples = mix_pos_inf.get_array_of_samples() + self.assertTrue( # Check samples. + all([x == y for x, y in zip(signal_samples, mix_pos_inf_samples)])) + + def testMixSignalsMinInfPower(self): + silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) + signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + + with self.assertRaises(exceptions.SignalProcessingException): + _ = signal_processing.SignalProcessingUtils.MixSignals( + signal, silence, 0.0) + + with self.assertRaises(exceptions.SignalProcessingException): + _ = signal_processing.SignalProcessingUtils.MixSignals( + silence, signal, 0.0) + + def testMixSignalNoiseDifferentLengths(self): + # Test signals. + shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) + longer = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + pydub.AudioSegment.silent(duration=2000, frame_rate=8000)) + + # When the signal is shorter than the noise, the mix length always equals + # that of the signal regardless of whether padding is applied. + # No noise padding, length of signal less than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=shorter, + noise=longer, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding. + NO_PADDING) + self.assertEqual(len(shorter), len(mix)) + # With noise padding, length of signal less than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=shorter, + noise=longer, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding. + ZERO_PADDING) + self.assertEqual(len(shorter), len(mix)) + + # When the signal is longer than the noise, the mix length depends on + # whether padding is applied. + # No noise padding, length of signal greater than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding. + NO_PADDING) + self.assertEqual(len(shorter), len(mix)) + # With noise padding, length of signal greater than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding. + ZERO_PADDING) + self.assertEqual(len(longer), len(mix)) + + def testMixSignalNoisePaddingTypes(self): + # Test signals. + shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) + longer = signal_processing.SignalProcessingUtils.GeneratePureTone( + pydub.AudioSegment.silent(duration=2000, frame_rate=8000), 440.0) + + # Zero padding: expect pure tone only in 1-2s. + mix_zero_pad = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + target_snr=-6, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding. + ZERO_PADDING) + + # Loop: expect pure tone plus noise in 1-2s. + mix_loop = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + target_snr=-6, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.LOOP) + + def Energy(signal): + samples = signal_processing.SignalProcessingUtils.AudioSegmentToRawData( + signal).astype(np.float32) + return np.sum(samples * samples) + + e_mix_zero_pad = Energy(mix_zero_pad[-1000:]) + e_mix_loop = Energy(mix_loop[-1000:]) + self.assertLess(0, e_mix_zero_pad) + self.assertLess(e_mix_zero_pad, e_mix_loop) + + def testMixSignalSnr(self): + # Test signals. + tone_low = signal_processing.SignalProcessingUtils.GeneratePureTone( + pydub.AudioSegment.silent(duration=64, frame_rate=8000), 250.0) + tone_high = signal_processing.SignalProcessingUtils.GeneratePureTone( + pydub.AudioSegment.silent(duration=64, frame_rate=8000), 3000.0) + + def ToneAmplitudes(mix): + """Returns the amplitude of the coefficients #16 and #192, which correspond to the tones at 250 and 3k Hz respectively.""" - mix_fft = np.absolute(signal_processing.SignalProcessingUtils.Fft(mix)) - return mix_fft[16], mix_fft[192] - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_low, - noise=tone_high, - target_snr=-6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_low, ampl_high) - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_high, - noise=tone_low, - target_snr=-6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_high, ampl_low) - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_low, - noise=tone_high, - target_snr=6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_high, ampl_low) - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_high, - noise=tone_low, - target_snr=6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_low, ampl_high) + mix_fft = np.absolute( + signal_processing.SignalProcessingUtils.Fft(mix)) + return mix_fft[16], mix_fft[192] + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_low, noise=tone_high, target_snr=-6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_low, ampl_high) + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_high, noise=tone_low, target_snr=-6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_high, ampl_low) + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_low, noise=tone_high, target_snr=6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_high, ampl_low) + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_high, noise=tone_low, target_snr=6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_low, ampl_high) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py index 37db2efc27..fe30c9c44c 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """APM module simulator. """ @@ -25,85 +24,93 @@ class ApmModuleSimulator(object): - """Audio processing module (APM) simulator class. + """Audio processing module (APM) simulator class. """ - _TEST_DATA_GENERATOR_CLASSES = ( - test_data_generation.TestDataGenerator.REGISTERED_CLASSES) - _EVAL_SCORE_WORKER_CLASSES = eval_scores.EvaluationScore.REGISTERED_CLASSES - - _PREFIX_APM_CONFIG = 'apmcfg-' - _PREFIX_CAPTURE = 'capture-' - _PREFIX_RENDER = 'render-' - _PREFIX_ECHO_SIMULATOR = 'echosim-' - _PREFIX_TEST_DATA_GEN = 'datagen-' - _PREFIX_TEST_DATA_GEN_PARAMS = 'datagen_params-' - _PREFIX_SCORE = 'score-' - - def __init__(self, test_data_generator_factory, evaluation_score_factory, - ap_wrapper, evaluator, external_vads=None): - if external_vads is None: - external_vads = {} - self._test_data_generator_factory = test_data_generator_factory - self._evaluation_score_factory = evaluation_score_factory - self._audioproc_wrapper = ap_wrapper - self._evaluator = evaluator - self._annotator = annotations.AudioAnnotationsExtractor( - annotations.AudioAnnotationsExtractor.VadType.ENERGY_THRESHOLD | - annotations.AudioAnnotationsExtractor.VadType.WEBRTC_COMMON_AUDIO | - annotations.AudioAnnotationsExtractor.VadType.WEBRTC_APM, - external_vads - ) - - # Init. - self._test_data_generator_factory.SetOutputDirectoryPrefix( - self._PREFIX_TEST_DATA_GEN_PARAMS) - self._evaluation_score_factory.SetScoreFilenamePrefix( - self._PREFIX_SCORE) - - # Properties for each run. - self._base_output_path = None - self._output_cache_path = None - self._test_data_generators = None - self._evaluation_score_workers = None - self._config_filepaths = None - self._capture_input_filepaths = None - self._render_input_filepaths = None - self._echo_path_simulator_class = None - - @classmethod - def GetPrefixApmConfig(cls): - return cls._PREFIX_APM_CONFIG - - @classmethod - def GetPrefixCapture(cls): - return cls._PREFIX_CAPTURE - - @classmethod - def GetPrefixRender(cls): - return cls._PREFIX_RENDER - - @classmethod - def GetPrefixEchoSimulator(cls): - return cls._PREFIX_ECHO_SIMULATOR - - @classmethod - def GetPrefixTestDataGenerator(cls): - return cls._PREFIX_TEST_DATA_GEN - - @classmethod - def GetPrefixTestDataGeneratorParameters(cls): - return cls._PREFIX_TEST_DATA_GEN_PARAMS - - @classmethod - def GetPrefixScore(cls): - return cls._PREFIX_SCORE - - def Run(self, config_filepaths, capture_input_filepaths, - test_data_generator_names, eval_score_names, output_dir, - render_input_filepaths=None, echo_path_simulator_name=( - echo_path_simulation.NoEchoPathSimulator.NAME)): - """Runs the APM simulation. + _TEST_DATA_GENERATOR_CLASSES = ( + test_data_generation.TestDataGenerator.REGISTERED_CLASSES) + _EVAL_SCORE_WORKER_CLASSES = eval_scores.EvaluationScore.REGISTERED_CLASSES + + _PREFIX_APM_CONFIG = 'apmcfg-' + _PREFIX_CAPTURE = 'capture-' + _PREFIX_RENDER = 'render-' + _PREFIX_ECHO_SIMULATOR = 'echosim-' + _PREFIX_TEST_DATA_GEN = 'datagen-' + _PREFIX_TEST_DATA_GEN_PARAMS = 'datagen_params-' + _PREFIX_SCORE = 'score-' + + def __init__(self, + test_data_generator_factory, + evaluation_score_factory, + ap_wrapper, + evaluator, + external_vads=None): + if external_vads is None: + external_vads = {} + self._test_data_generator_factory = test_data_generator_factory + self._evaluation_score_factory = evaluation_score_factory + self._audioproc_wrapper = ap_wrapper + self._evaluator = evaluator + self._annotator = annotations.AudioAnnotationsExtractor( + annotations.AudioAnnotationsExtractor.VadType.ENERGY_THRESHOLD + | annotations.AudioAnnotationsExtractor.VadType.WEBRTC_COMMON_AUDIO + | annotations.AudioAnnotationsExtractor.VadType.WEBRTC_APM, + external_vads) + + # Init. + self._test_data_generator_factory.SetOutputDirectoryPrefix( + self._PREFIX_TEST_DATA_GEN_PARAMS) + self._evaluation_score_factory.SetScoreFilenamePrefix( + self._PREFIX_SCORE) + + # Properties for each run. + self._base_output_path = None + self._output_cache_path = None + self._test_data_generators = None + self._evaluation_score_workers = None + self._config_filepaths = None + self._capture_input_filepaths = None + self._render_input_filepaths = None + self._echo_path_simulator_class = None + + @classmethod + def GetPrefixApmConfig(cls): + return cls._PREFIX_APM_CONFIG + + @classmethod + def GetPrefixCapture(cls): + return cls._PREFIX_CAPTURE + + @classmethod + def GetPrefixRender(cls): + return cls._PREFIX_RENDER + + @classmethod + def GetPrefixEchoSimulator(cls): + return cls._PREFIX_ECHO_SIMULATOR + + @classmethod + def GetPrefixTestDataGenerator(cls): + return cls._PREFIX_TEST_DATA_GEN + + @classmethod + def GetPrefixTestDataGeneratorParameters(cls): + return cls._PREFIX_TEST_DATA_GEN_PARAMS + + @classmethod + def GetPrefixScore(cls): + return cls._PREFIX_SCORE + + def Run(self, + config_filepaths, + capture_input_filepaths, + test_data_generator_names, + eval_score_names, + output_dir, + render_input_filepaths=None, + echo_path_simulator_name=( + echo_path_simulation.NoEchoPathSimulator.NAME)): + """Runs the APM simulation. Initializes paths and required instances, then runs all the simulations. The render input can be optionally added. If added, the number of capture @@ -120,132 +127,140 @@ def Run(self, config_filepaths, capture_input_filepaths, echo_path_simulator_name: name of the echo path simulator to use when render input is provided. """ - assert render_input_filepaths is None or ( - len(capture_input_filepaths) == len(render_input_filepaths)), ( - 'render input set size not matching input set size') - assert render_input_filepaths is None or echo_path_simulator_name in ( - echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES), ( - 'invalid echo path simulator') - self._base_output_path = os.path.abspath(output_dir) - - # Output path used to cache the data shared across simulations. - self._output_cache_path = os.path.join(self._base_output_path, '_cache') - - # Instance test data generators. - self._test_data_generators = [self._test_data_generator_factory.GetInstance( - test_data_generators_class=( - self._TEST_DATA_GENERATOR_CLASSES[name])) for name in ( - test_data_generator_names)] - - # Instance evaluation score workers. - self._evaluation_score_workers = [ - self._evaluation_score_factory.GetInstance( - evaluation_score_class=self._EVAL_SCORE_WORKER_CLASSES[name]) for ( - name) in eval_score_names] - - # Set APM configuration file paths. - self._config_filepaths = self._CreatePathsCollection(config_filepaths) - - # Set probing signal file paths. - if render_input_filepaths is None: - # Capture input only. - self._capture_input_filepaths = self._CreatePathsCollection( - capture_input_filepaths) - self._render_input_filepaths = None - else: - # Set both capture and render input signals. - self._SetTestInputSignalFilePaths( - capture_input_filepaths, render_input_filepaths) - - # Set the echo path simulator class. - self._echo_path_simulator_class = ( - echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES[ - echo_path_simulator_name]) - - self._SimulateAll() - - def _SimulateAll(self): - """Runs all the simulations. + assert render_input_filepaths is None or ( + len(capture_input_filepaths) == len(render_input_filepaths)), ( + 'render input set size not matching input set size') + assert render_input_filepaths is None or echo_path_simulator_name in ( + echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES), ( + 'invalid echo path simulator') + self._base_output_path = os.path.abspath(output_dir) + + # Output path used to cache the data shared across simulations. + self._output_cache_path = os.path.join(self._base_output_path, + '_cache') + + # Instance test data generators. + self._test_data_generators = [ + self._test_data_generator_factory.GetInstance( + test_data_generators_class=( + self._TEST_DATA_GENERATOR_CLASSES[name])) + for name in (test_data_generator_names) + ] + + # Instance evaluation score workers. + self._evaluation_score_workers = [ + self._evaluation_score_factory.GetInstance( + evaluation_score_class=self._EVAL_SCORE_WORKER_CLASSES[name]) + for (name) in eval_score_names + ] + + # Set APM configuration file paths. + self._config_filepaths = self._CreatePathsCollection(config_filepaths) + + # Set probing signal file paths. + if render_input_filepaths is None: + # Capture input only. + self._capture_input_filepaths = self._CreatePathsCollection( + capture_input_filepaths) + self._render_input_filepaths = None + else: + # Set both capture and render input signals. + self._SetTestInputSignalFilePaths(capture_input_filepaths, + render_input_filepaths) + + # Set the echo path simulator class. + self._echo_path_simulator_class = ( + echo_path_simulation.EchoPathSimulator. + REGISTERED_CLASSES[echo_path_simulator_name]) + + self._SimulateAll() + + def _SimulateAll(self): + """Runs all the simulations. Iterates over the combinations of APM configurations, probing signals, and test data generators. This method is mainly responsible for the creation of the cache and output directories required in order to call _Simulate(). """ - without_render_input = self._render_input_filepaths is None - - # Try different APM config files. - for config_name in self._config_filepaths: - config_filepath = self._config_filepaths[config_name] - - # Try different capture-render pairs. - for capture_input_name in self._capture_input_filepaths: - # Output path for the capture signal annotations. - capture_annotations_cache_path = os.path.join( - self._output_cache_path, - self._PREFIX_CAPTURE + capture_input_name) - data_access.MakeDirectory(capture_annotations_cache_path) - - # Capture. - capture_input_filepath = self._capture_input_filepaths[ - capture_input_name] - if not os.path.exists(capture_input_filepath): - # If the input signal file does not exist, try to create using the - # available input signal creators. - self._CreateInputSignal(capture_input_filepath) - assert os.path.exists(capture_input_filepath) - self._ExtractCaptureAnnotations( - capture_input_filepath, capture_annotations_cache_path) - - # Render and simulated echo path (optional). - render_input_filepath = None if without_render_input else ( - self._render_input_filepaths[capture_input_name]) - render_input_name = '(none)' if without_render_input else ( - self._ExtractFileName(render_input_filepath)) - echo_path_simulator = ( - echo_path_simulation_factory.EchoPathSimulatorFactory.GetInstance( - self._echo_path_simulator_class, render_input_filepath)) - - # Try different test data generators. - for test_data_generators in self._test_data_generators: - logging.info('APM config preset: <%s>, capture: <%s>, render: <%s>,' - 'test data generator: <%s>, echo simulator: <%s>', - config_name, capture_input_name, render_input_name, - test_data_generators.NAME, echo_path_simulator.NAME) - - # Output path for the generated test data. - test_data_cache_path = os.path.join( - capture_annotations_cache_path, - self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) - data_access.MakeDirectory(test_data_cache_path) - logging.debug('test data cache path: <%s>', test_data_cache_path) - - # Output path for the echo simulator and APM input mixer output. - echo_test_data_cache_path = os.path.join( - test_data_cache_path, 'echosim-{}'.format( - echo_path_simulator.NAME)) - data_access.MakeDirectory(echo_test_data_cache_path) - logging.debug('echo test data cache path: <%s>', - echo_test_data_cache_path) - - # Full output path. - output_path = os.path.join( - self._base_output_path, - self._PREFIX_APM_CONFIG + config_name, - self._PREFIX_CAPTURE + capture_input_name, - self._PREFIX_RENDER + render_input_name, - self._PREFIX_ECHO_SIMULATOR + echo_path_simulator.NAME, - self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) - data_access.MakeDirectory(output_path) - logging.debug('output path: <%s>', output_path) - - self._Simulate(test_data_generators, capture_input_filepath, - render_input_filepath, test_data_cache_path, - echo_test_data_cache_path, output_path, - config_filepath, echo_path_simulator) - - @staticmethod - def _CreateInputSignal(input_signal_filepath): - """Creates a missing input signal file. + without_render_input = self._render_input_filepaths is None + + # Try different APM config files. + for config_name in self._config_filepaths: + config_filepath = self._config_filepaths[config_name] + + # Try different capture-render pairs. + for capture_input_name in self._capture_input_filepaths: + # Output path for the capture signal annotations. + capture_annotations_cache_path = os.path.join( + self._output_cache_path, + self._PREFIX_CAPTURE + capture_input_name) + data_access.MakeDirectory(capture_annotations_cache_path) + + # Capture. + capture_input_filepath = self._capture_input_filepaths[ + capture_input_name] + if not os.path.exists(capture_input_filepath): + # If the input signal file does not exist, try to create using the + # available input signal creators. + self._CreateInputSignal(capture_input_filepath) + assert os.path.exists(capture_input_filepath) + self._ExtractCaptureAnnotations( + capture_input_filepath, capture_annotations_cache_path) + + # Render and simulated echo path (optional). + render_input_filepath = None if without_render_input else ( + self._render_input_filepaths[capture_input_name]) + render_input_name = '(none)' if without_render_input else ( + self._ExtractFileName(render_input_filepath)) + echo_path_simulator = (echo_path_simulation_factory. + EchoPathSimulatorFactory.GetInstance( + self._echo_path_simulator_class, + render_input_filepath)) + + # Try different test data generators. + for test_data_generators in self._test_data_generators: + logging.info( + 'APM config preset: <%s>, capture: <%s>, render: <%s>,' + 'test data generator: <%s>, echo simulator: <%s>', + config_name, capture_input_name, render_input_name, + test_data_generators.NAME, echo_path_simulator.NAME) + + # Output path for the generated test data. + test_data_cache_path = os.path.join( + capture_annotations_cache_path, + self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) + data_access.MakeDirectory(test_data_cache_path) + logging.debug('test data cache path: <%s>', + test_data_cache_path) + + # Output path for the echo simulator and APM input mixer output. + echo_test_data_cache_path = os.path.join( + test_data_cache_path, + 'echosim-{}'.format(echo_path_simulator.NAME)) + data_access.MakeDirectory(echo_test_data_cache_path) + logging.debug('echo test data cache path: <%s>', + echo_test_data_cache_path) + + # Full output path. + output_path = os.path.join( + self._base_output_path, + self._PREFIX_APM_CONFIG + config_name, + self._PREFIX_CAPTURE + capture_input_name, + self._PREFIX_RENDER + render_input_name, + self._PREFIX_ECHO_SIMULATOR + echo_path_simulator.NAME, + self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) + data_access.MakeDirectory(output_path) + logging.debug('output path: <%s>', output_path) + + self._Simulate(test_data_generators, + capture_input_filepath, + render_input_filepath, test_data_cache_path, + echo_test_data_cache_path, output_path, + config_filepath, echo_path_simulator) + + @staticmethod + def _CreateInputSignal(input_signal_filepath): + """Creates a missing input signal file. The file name is parsed to extract input signal creator and params. If a creator is matched and the parameters are valid, a new signal is generated @@ -257,30 +272,33 @@ def _CreateInputSignal(input_signal_filepath): Raises: InputSignalCreatorException """ - filename = os.path.splitext(os.path.split(input_signal_filepath)[-1])[0] - filename_parts = filename.split('-') - - if len(filename_parts) < 2: - raise exceptions.InputSignalCreatorException( - 'Cannot parse input signal file name') - - signal, metadata = input_signal_creator.InputSignalCreator.Create( - filename_parts[0], filename_parts[1].split('_')) - - signal_processing.SignalProcessingUtils.SaveWav( - input_signal_filepath, signal) - data_access.Metadata.SaveFileMetadata(input_signal_filepath, metadata) - - def _ExtractCaptureAnnotations(self, input_filepath, output_path, - annotation_name=""): - self._annotator.Extract(input_filepath) - self._annotator.Save(output_path, annotation_name) - - def _Simulate(self, test_data_generators, clean_capture_input_filepath, - render_input_filepath, test_data_cache_path, - echo_test_data_cache_path, output_path, config_filepath, - echo_path_simulator): - """Runs a single set of simulation. + filename = os.path.splitext( + os.path.split(input_signal_filepath)[-1])[0] + filename_parts = filename.split('-') + + if len(filename_parts) < 2: + raise exceptions.InputSignalCreatorException( + 'Cannot parse input signal file name') + + signal, metadata = input_signal_creator.InputSignalCreator.Create( + filename_parts[0], filename_parts[1].split('_')) + + signal_processing.SignalProcessingUtils.SaveWav( + input_signal_filepath, signal) + data_access.Metadata.SaveFileMetadata(input_signal_filepath, metadata) + + def _ExtractCaptureAnnotations(self, + input_filepath, + output_path, + annotation_name=""): + self._annotator.Extract(input_filepath) + self._annotator.Save(output_path, annotation_name) + + def _Simulate(self, test_data_generators, clean_capture_input_filepath, + render_input_filepath, test_data_cache_path, + echo_test_data_cache_path, output_path, config_filepath, + echo_path_simulator): + """Runs a single set of simulation. Simulates a given combination of APM configuration, probing signal, and test data generator. It iterates over the test data generator @@ -298,90 +316,92 @@ def _Simulate(self, test_data_generators, clean_capture_input_filepath, config_filepath: APM configuration file to test. echo_path_simulator: EchoPathSimulator instance. """ - # Generate pairs of noisy input and reference signal files. - test_data_generators.Generate( - input_signal_filepath=clean_capture_input_filepath, - test_data_cache_path=test_data_cache_path, - base_output_path=output_path) - - # Extract metadata linked to the clean input file (if any). - apm_input_metadata = None - try: - apm_input_metadata = data_access.Metadata.LoadFileMetadata( - clean_capture_input_filepath) - except IOError as e: - apm_input_metadata = {} - apm_input_metadata['test_data_gen_name'] = test_data_generators.NAME - apm_input_metadata['test_data_gen_config'] = None - - # For each test data pair, simulate a call and evaluate. - for config_name in test_data_generators.config_names: - logging.info(' - test data generator config: <%s>', config_name) - apm_input_metadata['test_data_gen_config'] = config_name - - # Paths to the test data generator output. - # Note that the reference signal does not depend on the render input - # which is optional. - noisy_capture_input_filepath = ( - test_data_generators.noisy_signal_filepaths[config_name]) - reference_signal_filepath = ( - test_data_generators.reference_signal_filepaths[config_name]) - - # Output path for the evaluation (e.g., APM output file). - evaluation_output_path = test_data_generators.apm_output_paths[ - config_name] - - # Paths to the APM input signals. - echo_path_filepath = echo_path_simulator.Simulate( - echo_test_data_cache_path) - apm_input_filepath = input_mixer.ApmInputMixer.Mix( - echo_test_data_cache_path, noisy_capture_input_filepath, - echo_path_filepath) - - # Extract annotations for the APM input mix. - apm_input_basepath, apm_input_filename = os.path.split( - apm_input_filepath) - self._ExtractCaptureAnnotations( - apm_input_filepath, apm_input_basepath, - os.path.splitext(apm_input_filename)[0] + '-') - - # Simulate a call using APM. - self._audioproc_wrapper.Run( - config_filepath=config_filepath, - capture_input_filepath=apm_input_filepath, - render_input_filepath=render_input_filepath, - output_path=evaluation_output_path) - - try: - # Evaluate. - self._evaluator.Run( - evaluation_score_workers=self._evaluation_score_workers, - apm_input_metadata=apm_input_metadata, - apm_output_filepath=self._audioproc_wrapper.output_filepath, - reference_input_filepath=reference_signal_filepath, - render_input_filepath=render_input_filepath, - output_path=evaluation_output_path, - ) - - # Save simulation metadata. - data_access.Metadata.SaveAudioTestDataPaths( - output_path=evaluation_output_path, - clean_capture_input_filepath=clean_capture_input_filepath, - echo_free_capture_filepath=noisy_capture_input_filepath, - echo_filepath=echo_path_filepath, - render_filepath=render_input_filepath, - capture_filepath=apm_input_filepath, - apm_output_filepath=self._audioproc_wrapper.output_filepath, - apm_reference_filepath=reference_signal_filepath, - apm_config_filepath=config_filepath, - ) - except exceptions.EvaluationScoreException as e: - logging.warning('the evaluation failed: %s', e.message) - continue - - def _SetTestInputSignalFilePaths(self, capture_input_filepaths, - render_input_filepaths): - """Sets input and render input file paths collections. + # Generate pairs of noisy input and reference signal files. + test_data_generators.Generate( + input_signal_filepath=clean_capture_input_filepath, + test_data_cache_path=test_data_cache_path, + base_output_path=output_path) + + # Extract metadata linked to the clean input file (if any). + apm_input_metadata = None + try: + apm_input_metadata = data_access.Metadata.LoadFileMetadata( + clean_capture_input_filepath) + except IOError as e: + apm_input_metadata = {} + apm_input_metadata['test_data_gen_name'] = test_data_generators.NAME + apm_input_metadata['test_data_gen_config'] = None + + # For each test data pair, simulate a call and evaluate. + for config_name in test_data_generators.config_names: + logging.info(' - test data generator config: <%s>', config_name) + apm_input_metadata['test_data_gen_config'] = config_name + + # Paths to the test data generator output. + # Note that the reference signal does not depend on the render input + # which is optional. + noisy_capture_input_filepath = ( + test_data_generators.noisy_signal_filepaths[config_name]) + reference_signal_filepath = ( + test_data_generators.reference_signal_filepaths[config_name]) + + # Output path for the evaluation (e.g., APM output file). + evaluation_output_path = test_data_generators.apm_output_paths[ + config_name] + + # Paths to the APM input signals. + echo_path_filepath = echo_path_simulator.Simulate( + echo_test_data_cache_path) + apm_input_filepath = input_mixer.ApmInputMixer.Mix( + echo_test_data_cache_path, noisy_capture_input_filepath, + echo_path_filepath) + + # Extract annotations for the APM input mix. + apm_input_basepath, apm_input_filename = os.path.split( + apm_input_filepath) + self._ExtractCaptureAnnotations( + apm_input_filepath, apm_input_basepath, + os.path.splitext(apm_input_filename)[0] + '-') + + # Simulate a call using APM. + self._audioproc_wrapper.Run( + config_filepath=config_filepath, + capture_input_filepath=apm_input_filepath, + render_input_filepath=render_input_filepath, + output_path=evaluation_output_path) + + try: + # Evaluate. + self._evaluator.Run( + evaluation_score_workers=self._evaluation_score_workers, + apm_input_metadata=apm_input_metadata, + apm_output_filepath=self._audioproc_wrapper. + output_filepath, + reference_input_filepath=reference_signal_filepath, + render_input_filepath=render_input_filepath, + output_path=evaluation_output_path, + ) + + # Save simulation metadata. + data_access.Metadata.SaveAudioTestDataPaths( + output_path=evaluation_output_path, + clean_capture_input_filepath=clean_capture_input_filepath, + echo_free_capture_filepath=noisy_capture_input_filepath, + echo_filepath=echo_path_filepath, + render_filepath=render_input_filepath, + capture_filepath=apm_input_filepath, + apm_output_filepath=self._audioproc_wrapper. + output_filepath, + apm_reference_filepath=reference_signal_filepath, + apm_config_filepath=config_filepath, + ) + except exceptions.EvaluationScoreException as e: + logging.warning('the evaluation failed: %s', e.message) + continue + + def _SetTestInputSignalFilePaths(self, capture_input_filepaths, + render_input_filepaths): + """Sets input and render input file paths collections. Pairs the input and render input files by storing the file paths into two collections. The key is the file name of the input file. @@ -390,20 +410,20 @@ def _SetTestInputSignalFilePaths(self, capture_input_filepaths, capture_input_filepaths: list of file paths. render_input_filepaths: list of file paths. """ - self._capture_input_filepaths = {} - self._render_input_filepaths = {} - assert len(capture_input_filepaths) == len(render_input_filepaths) - for capture_input_filepath, render_input_filepath in zip( - capture_input_filepaths, render_input_filepaths): - name = self._ExtractFileName(capture_input_filepath) - self._capture_input_filepaths[name] = os.path.abspath( - capture_input_filepath) - self._render_input_filepaths[name] = os.path.abspath( - render_input_filepath) - - @classmethod - def _CreatePathsCollection(cls, filepaths): - """Creates a collection of file paths. + self._capture_input_filepaths = {} + self._render_input_filepaths = {} + assert len(capture_input_filepaths) == len(render_input_filepaths) + for capture_input_filepath, render_input_filepath in zip( + capture_input_filepaths, render_input_filepaths): + name = self._ExtractFileName(capture_input_filepath) + self._capture_input_filepaths[name] = os.path.abspath( + capture_input_filepath) + self._render_input_filepaths[name] = os.path.abspath( + render_input_filepath) + + @classmethod + def _CreatePathsCollection(cls, filepaths): + """Creates a collection of file paths. Given a list of file paths, makes a collection with one item for each file path. The value is absolute path, the key is the file name without @@ -415,12 +435,12 @@ def _CreatePathsCollection(cls, filepaths): Returns: A dict. """ - filepaths_collection = {} - for filepath in filepaths: - name = cls._ExtractFileName(filepath) - filepaths_collection[name] = os.path.abspath(filepath) - return filepaths_collection - - @classmethod - def _ExtractFileName(cls, filepath): - return os.path.splitext(os.path.split(filepath)[-1])[0] + filepaths_collection = {} + for filepath in filepaths: + name = cls._ExtractFileName(filepath) + filepaths_collection[name] = os.path.abspath(filepath) + return filepaths_collection + + @classmethod + def _ExtractFileName(cls, filepath): + return os.path.splitext(os.path.split(filepath)[-1])[0] diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py index c39b12dd00..78ca17f589 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py @@ -5,21 +5,15 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Unit tests for the simulation module. """ import logging import os import shutil -import sys import tempfile import unittest -SRC = os.path.abspath(os.path.join( - os.path.dirname((__file__)), os.pardir, os.pardir, os.pardir, os.pardir)) -sys.path.append(os.path.join(SRC, 'third_party', 'pymock')) - import mock import pydub @@ -33,177 +27,177 @@ class TestApmModuleSimulator(unittest.TestCase): - """Unit tests for the ApmModuleSimulator class. + """Unit tests for the ApmModuleSimulator class. """ - def setUp(self): - """Create temporary folders and fake audio track.""" - self._output_path = tempfile.mkdtemp() - self._tmp_path = tempfile.mkdtemp() - - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - fake_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - self._fake_audio_track_path = os.path.join(self._output_path, 'fake.wav') - signal_processing.SignalProcessingUtils.SaveWav( - self._fake_audio_track_path, fake_signal) - - def tearDown(self): - """Recursively delete temporary folders.""" - shutil.rmtree(self._output_path) - shutil.rmtree(self._tmp_path) - - def testSimulation(self): - # Instance dependencies to mock and inject. - ap_wrapper = audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH) - evaluator = evaluation.ApmModuleEvaluator() - ap_wrapper.Run = mock.MagicMock(name='Run') - evaluator.Run = mock.MagicMock(name='Run') - - # Instance non-mocked dependencies. - test_data_generator_factory = ( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)) - evaluation_score_factory = eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join( - os.path.dirname(__file__), 'fake_polqa'), - echo_metric_tool_bin_path=None - ) - - # Instance simulator. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=test_data_generator_factory, - evaluation_score_factory=evaluation_score_factory, - ap_wrapper=ap_wrapper, - evaluator=evaluator, - external_vads={'fake': external_vad.ExternalVad(os.path.join( - os.path.dirname(__file__), 'fake_external_vad.py'), 'fake')} - ) - - # What to simulate. - config_files = ['apm_configs/default.json'] - input_files = [self._fake_audio_track_path] - test_data_generators = ['identity', 'white_noise'] - eval_scores = ['audio_level_mean', 'polqa'] - - # Run all simulations. - simulator.Run( - config_filepaths=config_files, - capture_input_filepaths=input_files, - test_data_generator_names=test_data_generators, - eval_score_names=eval_scores, - output_dir=self._output_path) - - # Check. - # TODO(alessiob): Once the TestDataGenerator classes can be configured by - # the client code (e.g., number of SNR pairs for the white noise test data - # generator), the exact number of calls to ap_wrapper.Run and evaluator.Run - # is known; use that with assertEqual. - min_number_of_simulations = len(config_files) * len(input_files) * len( - test_data_generators) - self.assertGreaterEqual(len(ap_wrapper.Run.call_args_list), - min_number_of_simulations) - self.assertGreaterEqual(len(evaluator.Run.call_args_list), - min_number_of_simulations) - - def testInputSignalCreation(self): - # Instance simulator. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)), - evaluation_score_factory=( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join( - os.path.dirname(__file__), 'fake_polqa'), - echo_metric_tool_bin_path=None - )), - ap_wrapper=audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH), - evaluator=evaluation.ApmModuleEvaluator()) - - # Inexistent input files to be silently created. - input_files = [ - os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), - os.path.join(self._tmp_path, 'pure_tone-1000_500.wav'), - ] - self.assertFalse(any([os.path.exists(input_file) for input_file in ( - input_files)])) - - # The input files are created during the simulation. - simulator.Run( - config_filepaths=['apm_configs/default.json'], - capture_input_filepaths=input_files, - test_data_generator_names=['identity'], - eval_score_names=['audio_level_peak'], - output_dir=self._output_path) - self.assertTrue(all([os.path.exists(input_file) for input_file in ( - input_files)])) - - def testPureToneGenerationWithTotalHarmonicDistorsion(self): - logging.warning = mock.MagicMock(name='warning') - - # Instance simulator. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( + def setUp(self): + """Create temporary folders and fake audio track.""" + self._output_path = tempfile.mkdtemp() + self._tmp_path = tempfile.mkdtemp() + + silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) + fake_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + self._fake_audio_track_path = os.path.join(self._output_path, + 'fake.wav') + signal_processing.SignalProcessingUtils.SaveWav( + self._fake_audio_track_path, fake_signal) + + def tearDown(self): + """Recursively delete temporary folders.""" + shutil.rmtree(self._output_path) + shutil.rmtree(self._tmp_path) + + def testSimulation(self): + # Instance dependencies to mock and inject. + ap_wrapper = audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH) + evaluator = evaluation.ApmModuleEvaluator() + ap_wrapper.Run = mock.MagicMock(name='Run') + evaluator.Run = mock.MagicMock(name='Run') + + # Instance non-mocked dependencies. + test_data_generator_factory = ( test_data_generation_factory.TestDataGeneratorFactory( aechen_ir_database_path='', noise_tracks_path='', - copy_with_identity=False)), - evaluation_score_factory=( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join( - os.path.dirname(__file__), 'fake_polqa'), - echo_metric_tool_bin_path=None - )), - ap_wrapper=audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH), - evaluator=evaluation.ApmModuleEvaluator()) - - # What to simulate. - config_files = ['apm_configs/default.json'] - input_files = [os.path.join(self._tmp_path, 'pure_tone-440_1000.wav')] - eval_scores = ['thd'] - - # Should work. - simulator.Run( - config_filepaths=config_files, - capture_input_filepaths=input_files, - test_data_generator_names=['identity'], - eval_score_names=eval_scores, - output_dir=self._output_path) - self.assertFalse(logging.warning.called) - - # Warning expected. - simulator.Run( - config_filepaths=config_files, - capture_input_filepaths=input_files, - test_data_generator_names=['white_noise'], # Not allowed with THD. - eval_score_names=eval_scores, - output_dir=self._output_path) - logging.warning.assert_called_with('the evaluation failed: %s', ( - 'The THD score cannot be used with any test data generator other than ' - '"identity"')) - - # # Init. - # generator = test_data_generation.IdentityTestDataGenerator('tmp') - # input_signal_filepath = os.path.join( - # self._test_data_cache_path, 'pure_tone-440_1000.wav') - - # # Check that the input signal is generated. - # self.assertFalse(os.path.exists(input_signal_filepath)) - # generator.Generate( - # input_signal_filepath=input_signal_filepath, - # test_data_cache_path=self._test_data_cache_path, - # base_output_path=self._base_output_path) - # self.assertTrue(os.path.exists(input_signal_filepath)) - - # # Check input signal properties. - # input_signal = signal_processing.SignalProcessingUtils.LoadWav( - # input_signal_filepath) - # self.assertEqual(1000, len(input_signal)) + copy_with_identity=False)) + evaluation_score_factory = eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join(os.path.dirname(__file__), + 'fake_polqa'), + echo_metric_tool_bin_path=None) + + # Instance simulator. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=test_data_generator_factory, + evaluation_score_factory=evaluation_score_factory, + ap_wrapper=ap_wrapper, + evaluator=evaluator, + external_vads={ + 'fake': + external_vad.ExternalVad( + os.path.join(os.path.dirname(__file__), + 'fake_external_vad.py'), 'fake') + }) + + # What to simulate. + config_files = ['apm_configs/default.json'] + input_files = [self._fake_audio_track_path] + test_data_generators = ['identity', 'white_noise'] + eval_scores = ['audio_level_mean', 'polqa'] + + # Run all simulations. + simulator.Run(config_filepaths=config_files, + capture_input_filepaths=input_files, + test_data_generator_names=test_data_generators, + eval_score_names=eval_scores, + output_dir=self._output_path) + + # Check. + # TODO(alessiob): Once the TestDataGenerator classes can be configured by + # the client code (e.g., number of SNR pairs for the white noise test data + # generator), the exact number of calls to ap_wrapper.Run and evaluator.Run + # is known; use that with assertEqual. + min_number_of_simulations = len(config_files) * len(input_files) * len( + test_data_generators) + self.assertGreaterEqual(len(ap_wrapper.Run.call_args_list), + min_number_of_simulations) + self.assertGreaterEqual(len(evaluator.Run.call_args_list), + min_number_of_simulations) + + def testInputSignalCreation(self): + # Instance simulator. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)), + evaluation_score_factory=( + eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join(os.path.dirname(__file__), + 'fake_polqa'), + echo_metric_tool_bin_path=None)), + ap_wrapper=audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper. + DEFAULT_APM_SIMULATOR_BIN_PATH), + evaluator=evaluation.ApmModuleEvaluator()) + + # Inexistent input files to be silently created. + input_files = [ + os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), + os.path.join(self._tmp_path, 'pure_tone-1000_500.wav'), + ] + self.assertFalse( + any([os.path.exists(input_file) for input_file in (input_files)])) + + # The input files are created during the simulation. + simulator.Run(config_filepaths=['apm_configs/default.json'], + capture_input_filepaths=input_files, + test_data_generator_names=['identity'], + eval_score_names=['audio_level_peak'], + output_dir=self._output_path) + self.assertTrue( + all([os.path.exists(input_file) for input_file in (input_files)])) + + def testPureToneGenerationWithTotalHarmonicDistorsion(self): + logging.warning = mock.MagicMock(name='warning') + + # Instance simulator. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)), + evaluation_score_factory=( + eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join(os.path.dirname(__file__), + 'fake_polqa'), + echo_metric_tool_bin_path=None)), + ap_wrapper=audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper. + DEFAULT_APM_SIMULATOR_BIN_PATH), + evaluator=evaluation.ApmModuleEvaluator()) + + # What to simulate. + config_files = ['apm_configs/default.json'] + input_files = [os.path.join(self._tmp_path, 'pure_tone-440_1000.wav')] + eval_scores = ['thd'] + + # Should work. + simulator.Run(config_filepaths=config_files, + capture_input_filepaths=input_files, + test_data_generator_names=['identity'], + eval_score_names=eval_scores, + output_dir=self._output_path) + self.assertFalse(logging.warning.called) + + # Warning expected. + simulator.Run( + config_filepaths=config_files, + capture_input_filepaths=input_files, + test_data_generator_names=['white_noise'], # Not allowed with THD. + eval_score_names=eval_scores, + output_dir=self._output_path) + logging.warning.assert_called_with('the evaluation failed: %s', ( + 'The THD score cannot be used with any test data generator other than ' + '"identity"')) + + # # Init. + # generator = test_data_generation.IdentityTestDataGenerator('tmp') + # input_signal_filepath = os.path.join( + # self._test_data_cache_path, 'pure_tone-440_1000.wav') + + # # Check that the input signal is generated. + # self.assertFalse(os.path.exists(input_signal_filepath)) + # generator.Generate( + # input_signal_filepath=input_signal_filepath, + # test_data_cache_path=self._test_data_cache_path, + # base_output_path=self._base_output_path) + # self.assertTrue(os.path.exists(input_signal_filepath)) + + # # Check input signal properties. + # input_signal = signal_processing.SignalProcessingUtils.LoadWav( + # input_signal_filepath) + # self.assertEqual(1000, len(input_signal)) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc b/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc index d58b57e03c..1f24d9d370 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc @@ -100,9 +100,9 @@ int main(int argc, char* argv[]) { // Frame peak level. std::transform(samples.begin(), samples.begin() + audio_frame_length, samples.begin(), [](int16_t s) { return std::abs(s); }); - const auto* peak_level = - std::max_element(samples.begin(), samples.begin() + audio_frame_length); - const float level_curr = static_cast(*peak_level) / 32768.f; + const int16_t peak_level = *std::max_element( + samples.cbegin(), samples.cbegin() + audio_frame_length); + const float level_curr = static_cast(peak_level) / 32768.f; // Temporal smoothing. auto smooth = [&level_prev, &level_curr](float c) { diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py index dac4328588..7e86faccec 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Test data generators producing signals pairs intended to be used to test the APM module. Each pair consists of a noisy input and a reference signal. The former is used as APM input and it is generated by adding noise to a @@ -27,10 +26,10 @@ import sys try: - import scipy.io + import scipy.io except ImportError: - logging.critical('Cannot import the third-party Python package scipy') - sys.exit(1) + logging.critical('Cannot import the third-party Python package scipy') + sys.exit(1) from . import data_access from . import exceptions @@ -38,7 +37,7 @@ class TestDataGenerator(object): - """Abstract class responsible for the generation of noisy signals. + """Abstract class responsible for the generation of noisy signals. Given a clean signal, it generates two streams named noisy signal and reference. The former is the clean signal deteriorated by the noise source, @@ -50,24 +49,24 @@ class TestDataGenerator(object): An test data generator generates one or more pairs. """ - NAME = None - REGISTERED_CLASSES = {} - - def __init__(self, output_directory_prefix): - self._output_directory_prefix = output_directory_prefix - # Init dictionaries with one entry for each test data generator - # configuration (e.g., different SNRs). - # Noisy audio track files (stored separately in a cache folder). - self._noisy_signal_filepaths = None - # Path to be used for the APM simulation output files. - self._apm_output_paths = None - # Reference audio track files (stored separately in a cache folder). - self._reference_signal_filepaths = None - self.Clear() - - @classmethod - def RegisterClass(cls, class_to_register): - """Registers a TestDataGenerator implementation. + NAME = None + REGISTERED_CLASSES = {} + + def __init__(self, output_directory_prefix): + self._output_directory_prefix = output_directory_prefix + # Init dictionaries with one entry for each test data generator + # configuration (e.g., different SNRs). + # Noisy audio track files (stored separately in a cache folder). + self._noisy_signal_filepaths = None + # Path to be used for the APM simulation output files. + self._apm_output_paths = None + # Reference audio track files (stored separately in a cache folder). + self._reference_signal_filepaths = None + self.Clear() + + @classmethod + def RegisterClass(cls, class_to_register): + """Registers a TestDataGenerator implementation. Decorator to automatically register the classes that extend TestDataGenerator. @@ -77,28 +76,28 @@ def RegisterClass(cls, class_to_register): class IdentityGenerator(TestDataGenerator): pass """ - cls.REGISTERED_CLASSES[class_to_register.NAME] = class_to_register - return class_to_register + cls.REGISTERED_CLASSES[class_to_register.NAME] = class_to_register + return class_to_register - @property - def config_names(self): - return self._noisy_signal_filepaths.keys() + @property + def config_names(self): + return self._noisy_signal_filepaths.keys() - @property - def noisy_signal_filepaths(self): - return self._noisy_signal_filepaths + @property + def noisy_signal_filepaths(self): + return self._noisy_signal_filepaths - @property - def apm_output_paths(self): - return self._apm_output_paths + @property + def apm_output_paths(self): + return self._apm_output_paths - @property - def reference_signal_filepaths(self): - return self._reference_signal_filepaths + @property + def reference_signal_filepaths(self): + return self._reference_signal_filepaths - def Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - """Generates a set of noisy input and reference audiotrack file pairs. + def Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + """Generates a set of noisy input and reference audiotrack file pairs. This method initializes an empty set of pairs and calls the _Generate() method implemented in a concrete class. @@ -109,26 +108,26 @@ def Generate( files. base_output_path: base path where output is written. """ - self.Clear() - self._Generate( - input_signal_filepath, test_data_cache_path, base_output_path) + self.Clear() + self._Generate(input_signal_filepath, test_data_cache_path, + base_output_path) - def Clear(self): - """Clears the generated output path dictionaries. + def Clear(self): + """Clears the generated output path dictionaries. """ - self._noisy_signal_filepaths = {} - self._apm_output_paths = {} - self._reference_signal_filepaths = {} + self._noisy_signal_filepaths = {} + self._apm_output_paths = {} + self._reference_signal_filepaths = {} - def _Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - """Abstract method to be implemented in each concrete class. + def _Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + """Abstract method to be implemented in each concrete class. """ - raise NotImplementedError() + raise NotImplementedError() - def _AddNoiseSnrPairs(self, base_output_path, noisy_mix_filepaths, - snr_value_pairs): - """Adds noisy-reference signal pairs. + def _AddNoiseSnrPairs(self, base_output_path, noisy_mix_filepaths, + snr_value_pairs): + """Adds noisy-reference signal pairs. Args: base_output_path: noisy tracks base output path. @@ -136,22 +135,22 @@ def _AddNoiseSnrPairs(self, base_output_path, noisy_mix_filepaths, by noisy track name and SNR level. snr_value_pairs: list of SNR pairs. """ - for noise_track_name in noisy_mix_filepaths: - for snr_noisy, snr_refence in snr_value_pairs: - config_name = '{0}_{1:d}_{2:d}_SNR'.format( - noise_track_name, snr_noisy, snr_refence) - output_path = self._MakeDir(base_output_path, config_name) - self._AddNoiseReferenceFilesPair( - config_name=config_name, - noisy_signal_filepath=noisy_mix_filepaths[ - noise_track_name][snr_noisy], - reference_signal_filepath=noisy_mix_filepaths[ - noise_track_name][snr_refence], - output_path=output_path) - - def _AddNoiseReferenceFilesPair(self, config_name, noisy_signal_filepath, - reference_signal_filepath, output_path): - """Adds one noisy-reference signal pair. + for noise_track_name in noisy_mix_filepaths: + for snr_noisy, snr_refence in snr_value_pairs: + config_name = '{0}_{1:d}_{2:d}_SNR'.format( + noise_track_name, snr_noisy, snr_refence) + output_path = self._MakeDir(base_output_path, config_name) + self._AddNoiseReferenceFilesPair( + config_name=config_name, + noisy_signal_filepath=noisy_mix_filepaths[noise_track_name] + [snr_noisy], + reference_signal_filepath=noisy_mix_filepaths[ + noise_track_name][snr_refence], + output_path=output_path) + + def _AddNoiseReferenceFilesPair(self, config_name, noisy_signal_filepath, + reference_signal_filepath, output_path): + """Adds one noisy-reference signal pair. Args: config_name: name of the APM configuration. @@ -159,264 +158,275 @@ def _AddNoiseReferenceFilesPair(self, config_name, noisy_signal_filepath, reference_signal_filepath: path to reference audio track file. output_path: APM output path. """ - assert config_name not in self._noisy_signal_filepaths - self._noisy_signal_filepaths[config_name] = os.path.abspath( - noisy_signal_filepath) - self._apm_output_paths[config_name] = os.path.abspath(output_path) - self._reference_signal_filepaths[config_name] = os.path.abspath( - reference_signal_filepath) + assert config_name not in self._noisy_signal_filepaths + self._noisy_signal_filepaths[config_name] = os.path.abspath( + noisy_signal_filepath) + self._apm_output_paths[config_name] = os.path.abspath(output_path) + self._reference_signal_filepaths[config_name] = os.path.abspath( + reference_signal_filepath) - def _MakeDir(self, base_output_path, test_data_generator_config_name): - output_path = os.path.join( - base_output_path, - self._output_directory_prefix + test_data_generator_config_name) - data_access.MakeDirectory(output_path) - return output_path + def _MakeDir(self, base_output_path, test_data_generator_config_name): + output_path = os.path.join( + base_output_path, + self._output_directory_prefix + test_data_generator_config_name) + data_access.MakeDirectory(output_path) + return output_path @TestDataGenerator.RegisterClass class IdentityTestDataGenerator(TestDataGenerator): - """Generator that adds no noise. + """Generator that adds no noise. Both the noisy and the reference signals are the input signal. """ - NAME = 'identity' + NAME = 'identity' - def __init__(self, output_directory_prefix, copy_with_identity): - TestDataGenerator.__init__(self, output_directory_prefix) - self._copy_with_identity = copy_with_identity + def __init__(self, output_directory_prefix, copy_with_identity): + TestDataGenerator.__init__(self, output_directory_prefix) + self._copy_with_identity = copy_with_identity - @property - def copy_with_identity(self): - return self._copy_with_identity + @property + def copy_with_identity(self): + return self._copy_with_identity - def _Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - config_name = 'default' - output_path = self._MakeDir(base_output_path, config_name) + def _Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + config_name = 'default' + output_path = self._MakeDir(base_output_path, config_name) - if self._copy_with_identity: - input_signal_filepath_new = os.path.join( - test_data_cache_path, os.path.split(input_signal_filepath)[1]) - logging.info('copying ' + input_signal_filepath + ' to ' + ( - input_signal_filepath_new)) - shutil.copy(input_signal_filepath, input_signal_filepath_new) - input_signal_filepath = input_signal_filepath_new + if self._copy_with_identity: + input_signal_filepath_new = os.path.join( + test_data_cache_path, + os.path.split(input_signal_filepath)[1]) + logging.info('copying ' + input_signal_filepath + ' to ' + + (input_signal_filepath_new)) + shutil.copy(input_signal_filepath, input_signal_filepath_new) + input_signal_filepath = input_signal_filepath_new - self._AddNoiseReferenceFilesPair( - config_name=config_name, - noisy_signal_filepath=input_signal_filepath, - reference_signal_filepath=input_signal_filepath, - output_path=output_path) + self._AddNoiseReferenceFilesPair( + config_name=config_name, + noisy_signal_filepath=input_signal_filepath, + reference_signal_filepath=input_signal_filepath, + output_path=output_path) @TestDataGenerator.RegisterClass class WhiteNoiseTestDataGenerator(TestDataGenerator): - """Generator that adds white noise. + """Generator that adds white noise. """ - NAME = 'white_noise' - - # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. - # The reference (second value of each pair) always has a lower amount of noise - # - i.e., the SNR is 10 dB higher. - _SNR_VALUE_PAIRS = [ - [20, 30], # Smallest noise. - [10, 20], - [5, 15], - [0, 10], # Largest noise. - ] - - _NOISY_SIGNAL_FILENAME_TEMPLATE = 'noise_{0:d}_SNR.wav' - - def __init__(self, output_directory_prefix): - TestDataGenerator.__init__(self, output_directory_prefix) - - def _Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - # Load the input signal. - input_signal = signal_processing.SignalProcessingUtils.LoadWav( - input_signal_filepath) - - # Create the noise track. - noise_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - input_signal) - - # Create the noisy mixes (once for each unique SNR value). - noisy_mix_filepaths = {} - snr_values = set([snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) - for snr in snr_values: - noisy_signal_filepath = os.path.join( - test_data_cache_path, - self._NOISY_SIGNAL_FILENAME_TEMPLATE.format(snr)) - - # Create and save if not done. - if not os.path.exists(noisy_signal_filepath): - # Create noisy signal. - noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( - input_signal, noise_signal, snr) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noisy_signal_filepath, noisy_signal) - - # Add file to the collection of mixes. - noisy_mix_filepaths[snr] = noisy_signal_filepath - - # Add all the noisy-reference signal pairs. - for snr_noisy, snr_refence in self._SNR_VALUE_PAIRS: - config_name = '{0:d}_{1:d}_SNR'.format(snr_noisy, snr_refence) - output_path = self._MakeDir(base_output_path, config_name) - self._AddNoiseReferenceFilesPair( - config_name=config_name, - noisy_signal_filepath=noisy_mix_filepaths[snr_noisy], - reference_signal_filepath=noisy_mix_filepaths[snr_refence], - output_path=output_path) + NAME = 'white_noise' + + # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. + # The reference (second value of each pair) always has a lower amount of noise + # - i.e., the SNR is 10 dB higher. + _SNR_VALUE_PAIRS = [ + [20, 30], # Smallest noise. + [10, 20], + [5, 15], + [0, 10], # Largest noise. + ] + + _NOISY_SIGNAL_FILENAME_TEMPLATE = 'noise_{0:d}_SNR.wav' + + def __init__(self, output_directory_prefix): + TestDataGenerator.__init__(self, output_directory_prefix) + + def _Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + # Load the input signal. + input_signal = signal_processing.SignalProcessingUtils.LoadWav( + input_signal_filepath) + + # Create the noise track. + noise_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + input_signal) + + # Create the noisy mixes (once for each unique SNR value). + noisy_mix_filepaths = {} + snr_values = set( + [snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) + for snr in snr_values: + noisy_signal_filepath = os.path.join( + test_data_cache_path, + self._NOISY_SIGNAL_FILENAME_TEMPLATE.format(snr)) + + # Create and save if not done. + if not os.path.exists(noisy_signal_filepath): + # Create noisy signal. + noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( + input_signal, noise_signal, snr) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noisy_signal_filepath, noisy_signal) + + # Add file to the collection of mixes. + noisy_mix_filepaths[snr] = noisy_signal_filepath + + # Add all the noisy-reference signal pairs. + for snr_noisy, snr_refence in self._SNR_VALUE_PAIRS: + config_name = '{0:d}_{1:d}_SNR'.format(snr_noisy, snr_refence) + output_path = self._MakeDir(base_output_path, config_name) + self._AddNoiseReferenceFilesPair( + config_name=config_name, + noisy_signal_filepath=noisy_mix_filepaths[snr_noisy], + reference_signal_filepath=noisy_mix_filepaths[snr_refence], + output_path=output_path) # TODO(alessiob): remove comment when class implemented. # @TestDataGenerator.RegisterClass class NarrowBandNoiseTestDataGenerator(TestDataGenerator): - """Generator that adds narrow-band noise. + """Generator that adds narrow-band noise. """ - NAME = 'narrow_band_noise' + NAME = 'narrow_band_noise' - def __init__(self, output_directory_prefix): - TestDataGenerator.__init__(self, output_directory_prefix) + def __init__(self, output_directory_prefix): + TestDataGenerator.__init__(self, output_directory_prefix) - def _Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - # TODO(alessiob): implement. - pass + def _Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + # TODO(alessiob): implement. + pass @TestDataGenerator.RegisterClass class AdditiveNoiseTestDataGenerator(TestDataGenerator): - """Generator that adds noise loops. + """Generator that adds noise loops. This generator uses all the wav files in a given path (default: noise_tracks/) and mixes them to the clean speech with different target SNRs (hard-coded). """ - NAME = 'additive_noise' - _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' - - DEFAULT_NOISE_TRACKS_PATH = os.path.join( - os.path.dirname(__file__), os.pardir, 'noise_tracks') - - # TODO(alessiob): Make the list of SNR pairs customizable. - # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. - # The reference (second value of each pair) always has a lower amount of noise - # - i.e., the SNR is 10 dB higher. - _SNR_VALUE_PAIRS = [ - [20, 30], # Smallest noise. - [10, 20], - [5, 15], - [0, 10], # Largest noise. - ] - - def __init__(self, output_directory_prefix, noise_tracks_path): - TestDataGenerator.__init__(self, output_directory_prefix) - self._noise_tracks_path = noise_tracks_path - self._noise_tracks_file_names = [n for n in os.listdir( - self._noise_tracks_path) if n.lower().endswith('.wav')] - if len(self._noise_tracks_file_names) == 0: - raise exceptions.InitializationException( - 'No wav files found in the noise tracks path %s' % ( - self._noise_tracks_path)) - - def _Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - """Generates test data pairs using environmental noise. + NAME = 'additive_noise' + _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' + + DEFAULT_NOISE_TRACKS_PATH = os.path.join(os.path.dirname(__file__), + os.pardir, 'noise_tracks') + + # TODO(alessiob): Make the list of SNR pairs customizable. + # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. + # The reference (second value of each pair) always has a lower amount of noise + # - i.e., the SNR is 10 dB higher. + _SNR_VALUE_PAIRS = [ + [20, 30], # Smallest noise. + [10, 20], + [5, 15], + [0, 10], # Largest noise. + ] + + def __init__(self, output_directory_prefix, noise_tracks_path): + TestDataGenerator.__init__(self, output_directory_prefix) + self._noise_tracks_path = noise_tracks_path + self._noise_tracks_file_names = [ + n for n in os.listdir(self._noise_tracks_path) + if n.lower().endswith('.wav') + ] + if len(self._noise_tracks_file_names) == 0: + raise exceptions.InitializationException( + 'No wav files found in the noise tracks path %s' % + (self._noise_tracks_path)) + + def _Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + """Generates test data pairs using environmental noise. For each noise track and pair of SNR values, the following two audio tracks are created: the noisy signal and the reference signal. The former is obtained by mixing the (clean) input signal to the corresponding noise track enforcing the target SNR. """ - # Init. - snr_values = set([snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) - - # Load the input signal. - input_signal = signal_processing.SignalProcessingUtils.LoadWav( - input_signal_filepath) - - noisy_mix_filepaths = {} - for noise_track_filename in self._noise_tracks_file_names: - # Load the noise track. - noise_track_name, _ = os.path.splitext(noise_track_filename) - noise_track_filepath = os.path.join( - self._noise_tracks_path, noise_track_filename) - if not os.path.exists(noise_track_filepath): - logging.error('cannot find the <%s> noise track', noise_track_filename) - raise exceptions.FileNotFoundError() - - noise_signal = signal_processing.SignalProcessingUtils.LoadWav( - noise_track_filepath) - - # Create the noisy mixes (once for each unique SNR value). - noisy_mix_filepaths[noise_track_name] = {} - for snr in snr_values: - noisy_signal_filepath = os.path.join( - test_data_cache_path, - self._NOISY_SIGNAL_FILENAME_TEMPLATE.format(noise_track_name, snr)) - - # Create and save if not done. - if not os.path.exists(noisy_signal_filepath): - # Create noisy signal. - noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( - input_signal, noise_signal, snr, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.LOOP) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noisy_signal_filepath, noisy_signal) - - # Add file to the collection of mixes. - noisy_mix_filepaths[noise_track_name][snr] = noisy_signal_filepath - - # Add all the noise-SNR pairs. - self._AddNoiseSnrPairs( - base_output_path, noisy_mix_filepaths, self._SNR_VALUE_PAIRS) + # Init. + snr_values = set( + [snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) + + # Load the input signal. + input_signal = signal_processing.SignalProcessingUtils.LoadWav( + input_signal_filepath) + + noisy_mix_filepaths = {} + for noise_track_filename in self._noise_tracks_file_names: + # Load the noise track. + noise_track_name, _ = os.path.splitext(noise_track_filename) + noise_track_filepath = os.path.join(self._noise_tracks_path, + noise_track_filename) + if not os.path.exists(noise_track_filepath): + logging.error('cannot find the <%s> noise track', + noise_track_filename) + raise exceptions.FileNotFoundError() + + noise_signal = signal_processing.SignalProcessingUtils.LoadWav( + noise_track_filepath) + + # Create the noisy mixes (once for each unique SNR value). + noisy_mix_filepaths[noise_track_name] = {} + for snr in snr_values: + noisy_signal_filepath = os.path.join( + test_data_cache_path, + self._NOISY_SIGNAL_FILENAME_TEMPLATE.format( + noise_track_name, snr)) + + # Create and save if not done. + if not os.path.exists(noisy_signal_filepath): + # Create noisy signal. + noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( + input_signal, + noise_signal, + snr, + pad_noise=signal_processing.SignalProcessingUtils. + MixPadding.LOOP) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noisy_signal_filepath, noisy_signal) + + # Add file to the collection of mixes. + noisy_mix_filepaths[noise_track_name][ + snr] = noisy_signal_filepath + + # Add all the noise-SNR pairs. + self._AddNoiseSnrPairs(base_output_path, noisy_mix_filepaths, + self._SNR_VALUE_PAIRS) @TestDataGenerator.RegisterClass class ReverberationTestDataGenerator(TestDataGenerator): - """Generator that adds reverberation noise. + """Generator that adds reverberation noise. TODO(alessiob): Make this class more generic since the impulse response can be anything (not just reverberation); call it e.g., ConvolutionalNoiseTestDataGenerator. """ - NAME = 'reverberation' + NAME = 'reverberation' - _IMPULSE_RESPONSES = { - 'lecture': 'air_binaural_lecture_0_0_1.mat', # Long echo. - 'booth': 'air_binaural_booth_0_0_1.mat', # Short echo. - } - _MAX_IMPULSE_RESPONSE_LENGTH = None + _IMPULSE_RESPONSES = { + 'lecture': 'air_binaural_lecture_0_0_1.mat', # Long echo. + 'booth': 'air_binaural_booth_0_0_1.mat', # Short echo. + } + _MAX_IMPULSE_RESPONSE_LENGTH = None - # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. - # The reference (second value of each pair) always has a lower amount of noise - # - i.e., the SNR is 5 dB higher. - _SNR_VALUE_PAIRS = [ - [3, 8], # Smallest noise. - [-3, 2], # Largest noise. - ] + # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. + # The reference (second value of each pair) always has a lower amount of noise + # - i.e., the SNR is 5 dB higher. + _SNR_VALUE_PAIRS = [ + [3, 8], # Smallest noise. + [-3, 2], # Largest noise. + ] - _NOISE_TRACK_FILENAME_TEMPLATE = '{0}.wav' - _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' + _NOISE_TRACK_FILENAME_TEMPLATE = '{0}.wav' + _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' - def __init__(self, output_directory_prefix, aechen_ir_database_path): - TestDataGenerator.__init__(self, output_directory_prefix) - self._aechen_ir_database_path = aechen_ir_database_path + def __init__(self, output_directory_prefix, aechen_ir_database_path): + TestDataGenerator.__init__(self, output_directory_prefix) + self._aechen_ir_database_path = aechen_ir_database_path - def _Generate( - self, input_signal_filepath, test_data_cache_path, base_output_path): - """Generates test data pairs using reverberation noise. + def _Generate(self, input_signal_filepath, test_data_cache_path, + base_output_path): + """Generates test data pairs using reverberation noise. For each impulse response, one noise track is created. For each impulse response and pair of SNR values, the following 2 audio tracks are @@ -424,61 +434,64 @@ def _Generate( obtained by mixing the (clean) input signal to the corresponding noise track enforcing the target SNR. """ - # Init. - snr_values = set([snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) - - # Load the input signal. - input_signal = signal_processing.SignalProcessingUtils.LoadWav( - input_signal_filepath) - - noisy_mix_filepaths = {} - for impulse_response_name in self._IMPULSE_RESPONSES: - noise_track_filename = self._NOISE_TRACK_FILENAME_TEMPLATE.format( - impulse_response_name) - noise_track_filepath = os.path.join( - test_data_cache_path, noise_track_filename) - noise_signal = None - try: - # Load noise track. - noise_signal = signal_processing.SignalProcessingUtils.LoadWav( - noise_track_filepath) - except exceptions.FileNotFoundError: - # Generate noise track by applying the impulse response. - impulse_response_filepath = os.path.join( - self._aechen_ir_database_path, - self._IMPULSE_RESPONSES[impulse_response_name]) - noise_signal = self._GenerateNoiseTrack( - noise_track_filepath, input_signal, impulse_response_filepath) - assert noise_signal is not None - - # Create the noisy mixes (once for each unique SNR value). - noisy_mix_filepaths[impulse_response_name] = {} - for snr in snr_values: - noisy_signal_filepath = os.path.join( - test_data_cache_path, - self._NOISY_SIGNAL_FILENAME_TEMPLATE.format( - impulse_response_name, snr)) - - # Create and save if not done. - if not os.path.exists(noisy_signal_filepath): - # Create noisy signal. - noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( - input_signal, noise_signal, snr) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noisy_signal_filepath, noisy_signal) - - # Add file to the collection of mixes. - noisy_mix_filepaths[impulse_response_name][snr] = noisy_signal_filepath - - # Add all the noise-SNR pairs. - self._AddNoiseSnrPairs(base_output_path, noisy_mix_filepaths, - self._SNR_VALUE_PAIRS) - - def _GenerateNoiseTrack(self, noise_track_filepath, input_signal, + # Init. + snr_values = set( + [snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) + + # Load the input signal. + input_signal = signal_processing.SignalProcessingUtils.LoadWav( + input_signal_filepath) + + noisy_mix_filepaths = {} + for impulse_response_name in self._IMPULSE_RESPONSES: + noise_track_filename = self._NOISE_TRACK_FILENAME_TEMPLATE.format( + impulse_response_name) + noise_track_filepath = os.path.join(test_data_cache_path, + noise_track_filename) + noise_signal = None + try: + # Load noise track. + noise_signal = signal_processing.SignalProcessingUtils.LoadWav( + noise_track_filepath) + except exceptions.FileNotFoundError: + # Generate noise track by applying the impulse response. + impulse_response_filepath = os.path.join( + self._aechen_ir_database_path, + self._IMPULSE_RESPONSES[impulse_response_name]) + noise_signal = self._GenerateNoiseTrack( + noise_track_filepath, input_signal, + impulse_response_filepath) + assert noise_signal is not None + + # Create the noisy mixes (once for each unique SNR value). + noisy_mix_filepaths[impulse_response_name] = {} + for snr in snr_values: + noisy_signal_filepath = os.path.join( + test_data_cache_path, + self._NOISY_SIGNAL_FILENAME_TEMPLATE.format( + impulse_response_name, snr)) + + # Create and save if not done. + if not os.path.exists(noisy_signal_filepath): + # Create noisy signal. + noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( + input_signal, noise_signal, snr) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noisy_signal_filepath, noisy_signal) + + # Add file to the collection of mixes. + noisy_mix_filepaths[impulse_response_name][ + snr] = noisy_signal_filepath + + # Add all the noise-SNR pairs. + self._AddNoiseSnrPairs(base_output_path, noisy_mix_filepaths, + self._SNR_VALUE_PAIRS) + + def _GenerateNoiseTrack(self, noise_track_filepath, input_signal, impulse_response_filepath): - """Generates noise track. + """Generates noise track. Generate a signal by convolving input_signal with the impulse response in impulse_response_filepath; then save to noise_track_filepath. @@ -491,21 +504,23 @@ def _GenerateNoiseTrack(self, noise_track_filepath, input_signal, Returns: AudioSegment instance. """ - # Load impulse response. - data = scipy.io.loadmat(impulse_response_filepath) - impulse_response = data['h_air'].flatten() - if self._MAX_IMPULSE_RESPONSE_LENGTH is not None: - logging.info('truncating impulse response from %d to %d samples', - len(impulse_response), self._MAX_IMPULSE_RESPONSE_LENGTH) - impulse_response = impulse_response[:self._MAX_IMPULSE_RESPONSE_LENGTH] - - # Apply impulse response. - processed_signal = ( - signal_processing.SignalProcessingUtils.ApplyImpulseResponse( - input_signal, impulse_response)) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noise_track_filepath, processed_signal) - - return processed_signal + # Load impulse response. + data = scipy.io.loadmat(impulse_response_filepath) + impulse_response = data['h_air'].flatten() + if self._MAX_IMPULSE_RESPONSE_LENGTH is not None: + logging.info('truncating impulse response from %d to %d samples', + len(impulse_response), + self._MAX_IMPULSE_RESPONSE_LENGTH) + impulse_response = impulse_response[:self. + _MAX_IMPULSE_RESPONSE_LENGTH] + + # Apply impulse response. + processed_signal = ( + signal_processing.SignalProcessingUtils.ApplyImpulseResponse( + input_signal, impulse_response)) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noise_track_filepath, processed_signal) + + return processed_signal diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py index c80d150228..948888e775 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """TestDataGenerator factory class. """ @@ -16,15 +15,15 @@ class TestDataGeneratorFactory(object): - """Factory class used to create test data generators. + """Factory class used to create test data generators. Usage: Create a factory passing parameters to the ctor with which the generators will be produced. """ - def __init__(self, aechen_ir_database_path, noise_tracks_path, - copy_with_identity): - """Ctor. + def __init__(self, aechen_ir_database_path, noise_tracks_path, + copy_with_identity): + """Ctor. Args: aechen_ir_database_path: Path to the Aechen Impulse Response database. @@ -32,16 +31,16 @@ def __init__(self, aechen_ir_database_path, noise_tracks_path, copy_with_identity: Flag indicating whether the identity generator has to make copies of the clean speech input files. """ - self._output_directory_prefix = None - self._aechen_ir_database_path = aechen_ir_database_path - self._noise_tracks_path = noise_tracks_path - self._copy_with_identity = copy_with_identity + self._output_directory_prefix = None + self._aechen_ir_database_path = aechen_ir_database_path + self._noise_tracks_path = noise_tracks_path + self._copy_with_identity = copy_with_identity - def SetOutputDirectoryPrefix(self, prefix): - self._output_directory_prefix = prefix + def SetOutputDirectoryPrefix(self, prefix): + self._output_directory_prefix = prefix - def GetInstance(self, test_data_generators_class): - """Creates an TestDataGenerator instance given a class object. + def GetInstance(self, test_data_generators_class): + """Creates an TestDataGenerator instance given a class object. Args: test_data_generators_class: TestDataGenerator class object (not an @@ -50,22 +49,23 @@ def GetInstance(self, test_data_generators_class): Returns: TestDataGenerator instance. """ - if self._output_directory_prefix is None: - raise exceptions.InitializationException( - 'The output directory prefix for test data generators is not set') - logging.debug('factory producing %s', test_data_generators_class) + if self._output_directory_prefix is None: + raise exceptions.InitializationException( + 'The output directory prefix for test data generators is not set' + ) + logging.debug('factory producing %s', test_data_generators_class) - if test_data_generators_class == ( - test_data_generation.IdentityTestDataGenerator): - return test_data_generation.IdentityTestDataGenerator( - self._output_directory_prefix, self._copy_with_identity) - elif test_data_generators_class == ( - test_data_generation.ReverberationTestDataGenerator): - return test_data_generation.ReverberationTestDataGenerator( - self._output_directory_prefix, self._aechen_ir_database_path) - elif test_data_generators_class == ( - test_data_generation.AdditiveNoiseTestDataGenerator): - return test_data_generation.AdditiveNoiseTestDataGenerator( - self._output_directory_prefix, self._noise_tracks_path) - else: - return test_data_generators_class(self._output_directory_prefix) + if test_data_generators_class == ( + test_data_generation.IdentityTestDataGenerator): + return test_data_generation.IdentityTestDataGenerator( + self._output_directory_prefix, self._copy_with_identity) + elif test_data_generators_class == ( + test_data_generation.ReverberationTestDataGenerator): + return test_data_generation.ReverberationTestDataGenerator( + self._output_directory_prefix, self._aechen_ir_database_path) + elif test_data_generators_class == ( + test_data_generation.AdditiveNoiseTestDataGenerator): + return test_data_generation.AdditiveNoiseTestDataGenerator( + self._output_directory_prefix, self._noise_tracks_path) + else: + return test_data_generators_class(self._output_directory_prefix) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py index b0d003dbe8..6d0cb79f5b 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Unit tests for the test_data_generation module. """ @@ -23,141 +22,143 @@ class TestTestDataGenerators(unittest.TestCase): - """Unit tests for the test_data_generation module. + """Unit tests for the test_data_generation module. """ - def setUp(self): - """Create temporary folders.""" - self._base_output_path = tempfile.mkdtemp() - self._test_data_cache_path = tempfile.mkdtemp() - self._fake_air_db_path = tempfile.mkdtemp() - - # Fake AIR DB impulse responses. - # TODO(alessiob): ReverberationTestDataGenerator will change to allow custom - # impulse responses. When changed, the coupling below between - # impulse_response_mat_file_names and - # ReverberationTestDataGenerator._IMPULSE_RESPONSES can be removed. - impulse_response_mat_file_names = [ - 'air_binaural_lecture_0_0_1.mat', - 'air_binaural_booth_0_0_1.mat', - ] - for impulse_response_mat_file_name in impulse_response_mat_file_names: - data = {'h_air': np.random.rand(1, 1000).astype('SetRuntimeSetting( + AudioProcessing::RuntimeSetting::CreateCaptureOutputUsedSetting( + setting.capture_output_used())); } } } // namespace webrtc diff --git a/modules/audio_processing/test/test_utils.cc b/modules/audio_processing/test/test_utils.cc index d8d51bc819..839358d497 100644 --- a/modules/audio_processing/test/test_utils.cc +++ b/modules/audio_processing/test/test_utils.cc @@ -133,9 +133,9 @@ size_t SamplesFromRate(int rate) { return static_cast(AudioProcessing::kChunkSizeMs * rate / 1000); } -void SetFrameSampleRate(AudioFrame* frame, int sample_rate_hz) { - frame->sample_rate_hz_ = sample_rate_hz; - frame->samples_per_channel_ = +void SetFrameSampleRate(Int16FrameData* frame, int sample_rate_hz) { + frame->sample_rate_hz = sample_rate_hz; + frame->samples_per_channel = AudioProcessing::kChunkSizeMs * sample_rate_hz / 1000; } @@ -146,8 +146,7 @@ AudioProcessing::ChannelLayout LayoutFromChannels(size_t num_channels) { case 2: return AudioProcessing::kStereo; default: - RTC_CHECK(false); - return AudioProcessing::kMono; + RTC_CHECK_NOTREACHED(); } } diff --git a/modules/audio_processing/test/test_utils.h b/modules/audio_processing/test/test_utils.h index 341f2b2374..e2d243eec4 100644 --- a/modules/audio_processing/test/test_utils.h +++ b/modules/audio_processing/test/test_utils.h @@ -20,7 +20,6 @@ #include #include -#include "api/audio/audio_frame.h" #include "common_audio/channel_buffer.h" #include "common_audio/wav_file.h" #include "modules/audio_processing/include/audio_processing.h" @@ -45,6 +44,34 @@ class RawFile final { RTC_DISALLOW_COPY_AND_ASSIGN(RawFile); }; +// Encapsulates samples and metadata for an integer frame. +struct Int16FrameData { + // Max data size that matches the data size of the AudioFrame class, providing + // storage for 8 channels of 96 kHz data. + static const int kMaxDataSizeSamples = 7680; + + Int16FrameData() { + sample_rate_hz = 0; + num_channels = 0; + samples_per_channel = 0; + data.fill(0); + } + + void CopyFrom(const Int16FrameData& src) { + samples_per_channel = src.samples_per_channel; + sample_rate_hz = src.sample_rate_hz; + num_channels = src.num_channels; + + const size_t length = samples_per_channel * num_channels; + RTC_CHECK_LE(length, kMaxDataSizeSamples); + memcpy(data.data(), src.data.data(), sizeof(int16_t) * length); + } + std::array data; + int32_t sample_rate_hz; + size_t num_channels; + size_t samples_per_channel; +}; + // Reads ChannelBuffers from a provided WavReader. class ChannelBufferWavReader final { public: @@ -113,16 +140,16 @@ FILE* OpenFile(const std::string& filename, const char* mode); size_t SamplesFromRate(int rate); -void SetFrameSampleRate(AudioFrame* frame, int sample_rate_hz); +void SetFrameSampleRate(Int16FrameData* frame, int sample_rate_hz); template void SetContainerFormat(int sample_rate_hz, size_t num_channels, - AudioFrame* frame, + Int16FrameData* frame, std::unique_ptr >* cb) { SetFrameSampleRate(frame, sample_rate_hz); - frame->num_channels_ = num_channels; - cb->reset(new ChannelBuffer(frame->samples_per_channel_, num_channels)); + frame->num_channels = num_channels; + cb->reset(new ChannelBuffer(frame->samples_per_channel, num_channels)); } AudioProcessing::ChannelLayout LayoutFromChannels(size_t num_channels); diff --git a/modules/audio_processing/test/wav_based_simulator.cc b/modules/audio_processing/test/wav_based_simulator.cc index 4b465907cf..8536bf13be 100644 --- a/modules/audio_processing/test/wav_based_simulator.cc +++ b/modules/audio_processing/test/wav_based_simulator.cc @@ -16,6 +16,7 @@ #include "modules/audio_processing/test/test_utils.h" #include "rtc_base/checks.h" +#include "rtc_base/system/file_wrapper.h" namespace webrtc { namespace test { @@ -23,13 +24,14 @@ namespace test { std::vector WavBasedSimulator::GetCustomEventChain(const std::string& filename) { std::vector call_chain; - FILE* stream = OpenFile(filename.c_str(), "r"); + FileWrapper file_wrapper = FileWrapper::OpenReadOnly(filename.c_str()); - RTC_CHECK(stream) << "Could not open the custom call order file, reverting " - "to using the default call order"; + RTC_CHECK(file_wrapper.is_open()) + << "Could not open the custom call order file, reverting " + "to using the default call order"; char c; - size_t num_read = fread(&c, sizeof(char), 1, stream); + size_t num_read = file_wrapper.Read(&c, sizeof(char)); while (num_read > 0) { switch (c) { case 'r': @@ -43,21 +45,29 @@ WavBasedSimulator::GetCustomEventChain(const std::string& filename) { default: FATAL() << "Incorrect custom call order file, reverting to using the " "default call order"; - fclose(stream); return WavBasedSimulator::GetDefaultEventChain(); } - num_read = fread(&c, sizeof(char), 1, stream); + num_read = file_wrapper.Read(&c, sizeof(char)); } - fclose(stream); return call_chain; } WavBasedSimulator::WavBasedSimulator( const SimulationSettings& settings, + rtc::scoped_refptr audio_processing, std::unique_ptr ap_builder) - : AudioProcessingSimulator(settings, std::move(ap_builder)) {} + : AudioProcessingSimulator(settings, + std::move(audio_processing), + std::move(ap_builder)) { + if (settings_.call_order_input_filename) { + call_chain_ = WavBasedSimulator::GetCustomEventChain( + *settings_.call_order_input_filename); + } else { + call_chain_ = WavBasedSimulator::GetDefaultEventChain(); + } +} WavBasedSimulator::~WavBasedSimulator() = default; @@ -71,7 +81,7 @@ WavBasedSimulator::GetDefaultEventChain() { void WavBasedSimulator::PrepareProcessStreamCall() { if (settings_.fixed_interface) { - CopyToAudioFrame(*in_buf_, &fwd_frame_); + fwd_frame_.CopyFrom(*in_buf_); } ap_->set_stream_key_pressed(settings_.use_ts && (*settings_.use_ts)); @@ -84,18 +94,12 @@ void WavBasedSimulator::PrepareProcessStreamCall() { void WavBasedSimulator::PrepareReverseProcessStreamCall() { if (settings_.fixed_interface) { - CopyToAudioFrame(*reverse_in_buf_, &rev_frame_); + rev_frame_.CopyFrom(*reverse_in_buf_); } } void WavBasedSimulator::Process() { - if (settings_.call_order_input_filename) { - call_chain_ = WavBasedSimulator::GetCustomEventChain( - *settings_.call_order_input_filename); - } else { - call_chain_ = WavBasedSimulator::GetDefaultEventChain(); - } - CreateAudioProcessor(); + ConfigureAudioProcessor(); Initialize(); @@ -114,13 +118,13 @@ void WavBasedSimulator::Process() { } break; default: - RTC_CHECK(false); + RTC_CHECK_NOTREACHED(); } call_chain_index = (call_chain_index + 1) % call_chain_.size(); } - DestroyAudioProcessor(); + DetachAecDump(); } bool WavBasedSimulator::HandleProcessStreamCall() { diff --git a/modules/audio_processing/test/wav_based_simulator.h b/modules/audio_processing/test/wav_based_simulator.h index 991f1dbaad..286ce1f587 100644 --- a/modules/audio_processing/test/wav_based_simulator.h +++ b/modules/audio_processing/test/wav_based_simulator.h @@ -14,7 +14,6 @@ #include #include "modules/audio_processing/test/audio_processing_simulator.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { namespace test { @@ -23,7 +22,13 @@ namespace test { class WavBasedSimulator final : public AudioProcessingSimulator { public: WavBasedSimulator(const SimulationSettings& settings, + rtc::scoped_refptr audio_processing, std::unique_ptr ap_builder); + + WavBasedSimulator() = delete; + WavBasedSimulator(const WavBasedSimulator&) = delete; + WavBasedSimulator& operator=(const WavBasedSimulator&) = delete; + ~WavBasedSimulator() override; // Processes the WAV input. @@ -45,8 +50,6 @@ class WavBasedSimulator final : public AudioProcessingSimulator { const std::string& filename); std::vector call_chain_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WavBasedSimulator); }; } // namespace test diff --git a/modules/audio_processing/three_band_filter_bank.cc b/modules/audio_processing/three_band_filter_bank.cc index dbbfc283c5..2a7d272e60 100644 --- a/modules/audio_processing/three_band_filter_bank.cc +++ b/modules/audio_processing/three_band_filter_bank.cc @@ -30,37 +30,33 @@ // // A similar logic can be applied to the synthesis stage. -// MSVC++ requires this to be set before any other includes to get M_PI. -#define _USE_MATH_DEFINES - #include "modules/audio_processing/three_band_filter_bank.h" -#include +#include #include "rtc_base/checks.h" namespace webrtc { namespace { -const size_t kNumBands = 3; -const size_t kSparsity = 4; - -// Factors to take into account when choosing |kNumCoeffs|: -// 1. Higher |kNumCoeffs|, means faster transition, which ensures less +// Factors to take into account when choosing |kFilterSize|: +// 1. Higher |kFilterSize|, means faster transition, which ensures less // aliasing. This is especially important when there is non-linear // processing between the splitting and merging. // 2. The delay that this filter bank introduces is -// |kNumBands| * |kSparsity| * |kNumCoeffs| / 2, so it increases linearly -// with |kNumCoeffs|. -// 3. The computation complexity also increases linearly with |kNumCoeffs|. -const size_t kNumCoeffs = 4; +// |kNumBands| * |kSparsity| * |kFilterSize| / 2, so it increases linearly +// with |kFilterSize|. +// 3. The computation complexity also increases linearly with |kFilterSize|. -// The Matlab code to generate these |kLowpassCoeffs| is: +// The Matlab code to generate these |kFilterCoeffs| is: // -// N = kNumBands * kSparsity * kNumCoeffs - 1; +// N = kNumBands * kSparsity * kFilterSize - 1; // h = fir1(N, 1 / (2 * kNumBands), kaiser(N + 1, 3.5)); -// reshape(h, kNumBands * kSparsity, kNumCoeffs); +// reshape(h, kNumBands * kSparsity, kFilterSize); // +// The code below uses the values of kFilterSize, kNumBands and kSparsity +// specified in the header. + // Because the total bandwidth of the lower and higher band is double the middle // one (because of the spectrum parity), the low-pass prototype is half the // bandwidth of 1 / (2 * |kNumBands|) and is then shifted with cosine modulation @@ -68,39 +64,84 @@ const size_t kNumCoeffs = 4; // A Kaiser window is used because of its flexibility and the alpha is set to // 3.5, since that sets a stop band attenuation of 40dB ensuring a fast // transition. -const float kLowpassCoeffs[kNumBands * kSparsity][kNumCoeffs] = { - {-0.00047749f, -0.00496888f, +0.16547118f, +0.00425496f}, - {-0.00173287f, -0.01585778f, +0.14989004f, +0.00994113f}, - {-0.00304815f, -0.02536082f, +0.12154542f, +0.01157993f}, - {-0.00383509f, -0.02982767f, +0.08543175f, +0.00983212f}, - {-0.00346946f, -0.02587886f, +0.04760441f, +0.00607594f}, - {-0.00154717f, -0.01136076f, +0.01387458f, +0.00186353f}, - {+0.00186353f, +0.01387458f, -0.01136076f, -0.00154717f}, - {+0.00607594f, +0.04760441f, -0.02587886f, -0.00346946f}, - {+0.00983212f, +0.08543175f, -0.02982767f, -0.00383509f}, - {+0.01157993f, +0.12154542f, -0.02536082f, -0.00304815f}, - {+0.00994113f, +0.14989004f, -0.01585778f, -0.00173287f}, - {+0.00425496f, +0.16547118f, -0.00496888f, -0.00047749f}}; - -// Downsamples |in| into |out|, taking one every |kNumbands| starting from -// |offset|. |split_length| is the |out| length. |in| has to be at least -// |kNumBands| * |split_length| long. -void Downsample(const float* in, - size_t split_length, - size_t offset, - float* out) { - for (size_t i = 0; i < split_length; ++i) { - out[i] = in[kNumBands * i + offset]; + +constexpr int kSubSampling = ThreeBandFilterBank::kNumBands; +constexpr int kDctSize = ThreeBandFilterBank::kNumBands; +static_assert(ThreeBandFilterBank::kNumBands * + ThreeBandFilterBank::kSplitBandSize == + ThreeBandFilterBank::kFullBandSize, + "The full band must be split in equally sized subbands"); + +const float + kFilterCoeffs[ThreeBandFilterBank::kNumNonZeroFilters][kFilterSize] = { + {-0.00047749f, -0.00496888f, +0.16547118f, +0.00425496f}, + {-0.00173287f, -0.01585778f, +0.14989004f, +0.00994113f}, + {-0.00304815f, -0.02536082f, +0.12154542f, +0.01157993f}, + {-0.00346946f, -0.02587886f, +0.04760441f, +0.00607594f}, + {-0.00154717f, -0.01136076f, +0.01387458f, +0.00186353f}, + {+0.00186353f, +0.01387458f, -0.01136076f, -0.00154717f}, + {+0.00607594f, +0.04760441f, -0.02587886f, -0.00346946f}, + {+0.00983212f, +0.08543175f, -0.02982767f, -0.00383509f}, + {+0.00994113f, +0.14989004f, -0.01585778f, -0.00173287f}, + {+0.00425496f, +0.16547118f, -0.00496888f, -0.00047749f}}; + +constexpr int kZeroFilterIndex1 = 3; +constexpr int kZeroFilterIndex2 = 9; + +const float kDctModulation[ThreeBandFilterBank::kNumNonZeroFilters][kDctSize] = + {{2.f, 2.f, 2.f}, + {1.73205077f, 0.f, -1.73205077f}, + {1.f, -2.f, 1.f}, + {-1.f, 2.f, -1.f}, + {-1.73205077f, 0.f, 1.73205077f}, + {-2.f, -2.f, -2.f}, + {-1.73205077f, 0.f, 1.73205077f}, + {-1.f, 2.f, -1.f}, + {1.f, -2.f, 1.f}, + {1.73205077f, 0.f, -1.73205077f}}; + +// Filters the input signal |in| with the filter |filter| using a shift by +// |in_shift|, taking into account the previous state. +void FilterCore( + rtc::ArrayView filter, + rtc::ArrayView in, + const int in_shift, + rtc::ArrayView out, + rtc::ArrayView state) { + constexpr int kMaxInShift = (kStride - 1); + RTC_DCHECK_GE(in_shift, 0); + RTC_DCHECK_LE(in_shift, kMaxInShift); + std::fill(out.begin(), out.end(), 0.f); + + for (int k = 0; k < in_shift; ++k) { + for (int i = 0, j = kMemorySize + k - in_shift; i < kFilterSize; + ++i, j -= kStride) { + out[k] += state[j] * filter[i]; + } } -} -// Upsamples |in| into |out|, scaling by |kNumBands| and accumulating it every -// |kNumBands| starting from |offset|. |split_length| is the |in| length. |out| -// has to be at least |kNumBands| * |split_length| long. -void Upsample(const float* in, size_t split_length, size_t offset, float* out) { - for (size_t i = 0; i < split_length; ++i) { - out[kNumBands * i + offset] += kNumBands * in[i]; + for (int k = in_shift, shift = 0; k < kFilterSize * kStride; ++k, ++shift) { + RTC_DCHECK_GE(shift, 0); + const int loop_limit = std::min(kFilterSize, 1 + (shift >> kStrideLog2)); + for (int i = 0, j = shift; i < loop_limit; ++i, j -= kStride) { + out[k] += in[j] * filter[i]; + } + for (int i = loop_limit, j = kMemorySize + shift - loop_limit * kStride; + i < kFilterSize; ++i, j -= kStride) { + out[k] += state[j] * filter[i]; + } } + + for (int k = kFilterSize * kStride, shift = kFilterSize * kStride - in_shift; + k < ThreeBandFilterBank::kSplitBandSize; ++k, ++shift) { + for (int i = 0, j = shift; i < kFilterSize; ++i, j -= kStride) { + out[k] += in[j] * filter[i]; + } + } + + // Update current state. + std::copy(in.begin() + ThreeBandFilterBank::kSplitBandSize - kMemorySize, + in.end(), state.begin()); } } // namespace @@ -108,26 +149,15 @@ void Upsample(const float* in, size_t split_length, size_t offset, float* out) { // Because the low-pass filter prototype has half bandwidth it is possible to // use a DCT to shift it in both directions at the same time, to the center // frequencies [1 / 12, 3 / 12, 5 / 12]. -ThreeBandFilterBank::ThreeBandFilterBank(size_t length) - : in_buffer_(rtc::CheckedDivExact(length, kNumBands)), - out_buffer_(in_buffer_.size()) { - for (size_t i = 0; i < kSparsity; ++i) { - for (size_t j = 0; j < kNumBands; ++j) { - analysis_filters_.push_back( - std::unique_ptr(new SparseFIRFilter( - kLowpassCoeffs[i * kNumBands + j], kNumCoeffs, kSparsity, i))); - synthesis_filters_.push_back( - std::unique_ptr(new SparseFIRFilter( - kLowpassCoeffs[i * kNumBands + j], kNumCoeffs, kSparsity, i))); - } - } - dct_modulation_.resize(kNumBands * kSparsity); - for (size_t i = 0; i < dct_modulation_.size(); ++i) { - dct_modulation_[i].resize(kNumBands); - for (size_t j = 0; j < kNumBands; ++j) { - dct_modulation_[i][j] = - 2.f * cos(2.f * M_PI * i * (2.f * j + 1.f) / dct_modulation_.size()); - } +ThreeBandFilterBank::ThreeBandFilterBank() { + RTC_DCHECK_EQ(state_analysis_.size(), kNumNonZeroFilters); + RTC_DCHECK_EQ(state_synthesis_.size(), kNumNonZeroFilters); + for (int k = 0; k < kNumNonZeroFilters; ++k) { + RTC_DCHECK_EQ(state_analysis_[k].size(), kMemorySize); + RTC_DCHECK_EQ(state_synthesis_[k].size(), kMemorySize); + + state_analysis_[k].fill(0.f); + state_synthesis_[k].fill(0.f); } } @@ -139,20 +169,52 @@ ThreeBandFilterBank::~ThreeBandFilterBank() = default; // decomposition of the low-pass prototype filter and upsampled by a factor // of |kSparsity|. // 3. Modulating with cosines and accumulating to get the desired band. -void ThreeBandFilterBank::Analysis(const float* in, - size_t length, - float* const* out) { - RTC_CHECK_EQ(in_buffer_.size(), rtc::CheckedDivExact(length, kNumBands)); - for (size_t i = 0; i < kNumBands; ++i) { - memset(out[i], 0, in_buffer_.size() * sizeof(*out[i])); +void ThreeBandFilterBank::Analysis( + rtc::ArrayView in, + rtc::ArrayView, ThreeBandFilterBank::kNumBands> + out) { + // Initialize the output to zero. + for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + RTC_DCHECK_EQ(out[band].size(), kSplitBandSize); + std::fill(out[band].begin(), out[band].end(), 0); } - for (size_t i = 0; i < kNumBands; ++i) { - Downsample(in, in_buffer_.size(), kNumBands - i - 1, &in_buffer_[0]); - for (size_t j = 0; j < kSparsity; ++j) { - const size_t offset = i + j * kNumBands; - analysis_filters_[offset]->Filter(&in_buffer_[0], in_buffer_.size(), - &out_buffer_[0]); - DownModulate(&out_buffer_[0], out_buffer_.size(), offset, out); + + for (int downsampling_index = 0; downsampling_index < kSubSampling; + ++downsampling_index) { + // Downsample to form the filter input. + std::array in_subsampled; + for (int k = 0; k < kSplitBandSize; ++k) { + in_subsampled[k] = + in[(kSubSampling - 1) - downsampling_index + kSubSampling * k]; + } + + for (int in_shift = 0; in_shift < kStride; ++in_shift) { + // Choose filter, skip zero filters. + const int index = downsampling_index + in_shift * kSubSampling; + if (index == kZeroFilterIndex1 || index == kZeroFilterIndex2) { + continue; + } + const int filter_index = + index < kZeroFilterIndex1 + ? index + : (index < kZeroFilterIndex2 ? index - 1 : index - 2); + + rtc::ArrayView filter( + kFilterCoeffs[filter_index]); + rtc::ArrayView dct_modulation( + kDctModulation[filter_index]); + rtc::ArrayView state(state_analysis_[filter_index]); + + // Filter. + std::array out_subsampled; + FilterCore(filter, in_subsampled, in_shift, out_subsampled, state); + + // Band and modulate the output. + for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + for (int n = 0; n < kSplitBandSize; ++n) { + out[band][n] += dct_modulation[band] * out_subsampled[n]; + } + } } } } @@ -163,49 +225,50 @@ void ThreeBandFilterBank::Analysis(const float* in, // prototype filter upsampled by a factor of |kSparsity| and accumulating // |kSparsity| signals with different delays. // 3. Parallel to serial upsampling by a factor of |kNumBands|. -void ThreeBandFilterBank::Synthesis(const float* const* in, - size_t split_length, - float* out) { - RTC_CHECK_EQ(in_buffer_.size(), split_length); - memset(out, 0, kNumBands * in_buffer_.size() * sizeof(*out)); - for (size_t i = 0; i < kNumBands; ++i) { - for (size_t j = 0; j < kSparsity; ++j) { - const size_t offset = i + j * kNumBands; - UpModulate(in, in_buffer_.size(), offset, &in_buffer_[0]); - synthesis_filters_[offset]->Filter(&in_buffer_[0], in_buffer_.size(), - &out_buffer_[0]); - Upsample(&out_buffer_[0], out_buffer_.size(), i, out); - } - } -} +void ThreeBandFilterBank::Synthesis( + rtc::ArrayView, ThreeBandFilterBank::kNumBands> + in, + rtc::ArrayView out) { + std::fill(out.begin(), out.end(), 0); + for (int upsampling_index = 0; upsampling_index < kSubSampling; + ++upsampling_index) { + for (int in_shift = 0; in_shift < kStride; ++in_shift) { + // Choose filter, skip zero filters. + const int index = upsampling_index + in_shift * kSubSampling; + if (index == kZeroFilterIndex1 || index == kZeroFilterIndex2) { + continue; + } + const int filter_index = + index < kZeroFilterIndex1 + ? index + : (index < kZeroFilterIndex2 ? index - 1 : index - 2); -// Modulates |in| by |dct_modulation_| and accumulates it in each of the -// |kNumBands| bands of |out|. |offset| is the index in the period of the -// cosines used for modulation. |split_length| is the length of |in| and each -// band of |out|. -void ThreeBandFilterBank::DownModulate(const float* in, - size_t split_length, - size_t offset, - float* const* out) { - for (size_t i = 0; i < kNumBands; ++i) { - for (size_t j = 0; j < split_length; ++j) { - out[i][j] += dct_modulation_[offset][i] * in[j]; - } - } -} + rtc::ArrayView filter( + kFilterCoeffs[filter_index]); + rtc::ArrayView dct_modulation( + kDctModulation[filter_index]); + rtc::ArrayView state(state_synthesis_[filter_index]); + + // Prepare filter input by modulating the banded input. + std::array in_subsampled; + std::fill(in_subsampled.begin(), in_subsampled.end(), 0.f); + for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + RTC_DCHECK_EQ(in[band].size(), kSplitBandSize); + for (int n = 0; n < kSplitBandSize; ++n) { + in_subsampled[n] += dct_modulation[band] * in[band][n]; + } + } + + // Filter. + std::array out_subsampled; + FilterCore(filter, in_subsampled, in_shift, out_subsampled, state); -// Modulates each of the |kNumBands| bands of |in| by |dct_modulation_| and -// accumulates them in |out|. |out| is cleared before starting to accumulate. -// |offset| is the index in the period of the cosines used for modulation. -// |split_length| is the length of each band of |in| and |out|. -void ThreeBandFilterBank::UpModulate(const float* const* in, - size_t split_length, - size_t offset, - float* out) { - memset(out, 0, split_length * sizeof(*out)); - for (size_t i = 0; i < kNumBands; ++i) { - for (size_t j = 0; j < split_length; ++j) { - out[j] += dct_modulation_[offset][i] * in[i][j]; + // Upsample. + constexpr float kUpsamplingScaling = kSubSampling; + for (int k = 0; k < kSplitBandSize; ++k) { + out[upsampling_index + kSubSampling * k] += + kUpsamplingScaling * out_subsampled[k]; + } } } } diff --git a/modules/audio_processing/three_band_filter_bank.h b/modules/audio_processing/three_band_filter_bank.h index ccbf2ddf97..e6346dec44 100644 --- a/modules/audio_processing/three_band_filter_bank.h +++ b/modules/audio_processing/three_band_filter_bank.h @@ -11,14 +11,25 @@ #ifndef MODULES_AUDIO_PROCESSING_THREE_BAND_FILTER_BANK_H_ #define MODULES_AUDIO_PROCESSING_THREE_BAND_FILTER_BANK_H_ +#include #include #include #include -#include "common_audio/sparse_fir_filter.h" +#include "api/array_view.h" namespace webrtc { +constexpr int kSparsity = 4; +constexpr int kStrideLog2 = 2; +constexpr int kStride = 1 << kStrideLog2; +constexpr int kNumZeroFilters = 2; +constexpr int kFilterSize = 4; +constexpr int kMemorySize = kFilterSize * kStride - 1; +static_assert(kMemorySize == 15, + "The memory size must be sufficient to provide memory for the " + "shifted filters"); + // An implementation of a 3-band FIR filter-bank with DCT modulation, similar to // the proposed in "Multirate Signal Processing for Communication Systems" by // Fredric J Harris. @@ -34,34 +45,31 @@ namespace webrtc { // depending on the input signal after compensating for the delay. class ThreeBandFilterBank final { public: - explicit ThreeBandFilterBank(size_t length); + static const int kNumBands = 3; + static const int kFullBandSize = 480; + static const int kSplitBandSize = + ThreeBandFilterBank::kFullBandSize / ThreeBandFilterBank::kNumBands; + static const int kNumNonZeroFilters = + kSparsity * ThreeBandFilterBank::kNumBands - kNumZeroFilters; + + ThreeBandFilterBank(); ~ThreeBandFilterBank(); - // Splits |in| into 3 downsampled frequency bands in |out|. - // |length| is the |in| length. Each of the 3 bands of |out| has to have a - // length of |length| / 3. - void Analysis(const float* in, size_t length, float* const* out); + // Splits |in| of size kFullBandSize into 3 downsampled frequency bands in + // |out|, each of size 160. + void Analysis(rtc::ArrayView in, + rtc::ArrayView, kNumBands> out); - // Merges the 3 downsampled frequency bands in |in| into |out|. - // |split_length| is the length of each band of |in|. |out| has to have at - // least a length of 3 * |split_length|. - void Synthesis(const float* const* in, size_t split_length, float* out); + // Merges the 3 downsampled frequency bands in |in|, each of size 160, into + // |out|, which is of size kFullBandSize. + void Synthesis(rtc::ArrayView, kNumBands> in, + rtc::ArrayView out); private: - void DownModulate(const float* in, - size_t split_length, - size_t offset, - float* const* out); - void UpModulate(const float* const* in, - size_t split_length, - size_t offset, - float* out); - - std::vector in_buffer_; - std::vector out_buffer_; - std::vector> analysis_filters_; - std::vector> synthesis_filters_; - std::vector> dct_modulation_; + std::array, kNumNonZeroFilters> + state_analysis_; + std::array, kNumNonZeroFilters> + state_synthesis_; }; } // namespace webrtc diff --git a/modules/audio_processing/transient/BUILD.gn b/modules/audio_processing/transient/BUILD.gn new file mode 100644 index 0000000000..13e319f88e --- /dev/null +++ b/modules/audio_processing/transient/BUILD.gn @@ -0,0 +1,112 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_source_set("transient_suppressor_api") { + sources = [ "transient_suppressor.h" ] +} + +rtc_library("transient_suppressor_impl") { + visibility = [ + "..:optionally_built_submodule_creators", + ":transient_suppression_test", + ":transient_suppression_unittests", + ":click_annotate", + ] + sources = [ + "common.h", + "daubechies_8_wavelet_coeffs.h", + "dyadic_decimator.h", + "moving_moments.cc", + "moving_moments.h", + "transient_detector.cc", + "transient_detector.h", + "transient_suppressor_impl.cc", + "transient_suppressor_impl.h", + "windows_private.h", + "wpd_node.cc", + "wpd_node.h", + "wpd_tree.cc", + "wpd_tree.h", + ] + deps = [ + ":transient_suppressor_api", + "../../../common_audio:common_audio", + "../../../common_audio:common_audio_c", + "../../../common_audio:fir_filter", + "../../../common_audio:fir_filter_factory", + "../../../common_audio/third_party/ooura:fft_size_256", + "../../../rtc_base:checks", + "../../../rtc_base:gtest_prod", + "../../../rtc_base:logging", + ] +} + +if (rtc_include_tests) { + rtc_executable("click_annotate") { + testonly = true + sources = [ + "click_annotate.cc", + "file_utils.cc", + "file_utils.h", + ] + deps = [ + ":transient_suppressor_impl", + "..:audio_processing", + "../../../rtc_base/system:file_wrapper", + "../../../system_wrappers", + ] + } + + rtc_executable("transient_suppression_test") { + testonly = true + sources = [ + "file_utils.cc", + "file_utils.h", + "transient_suppression_test.cc", + ] + deps = [ + ":transient_suppressor_impl", + "..:audio_processing", + "../../../common_audio", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:file_wrapper", + "../../../system_wrappers", + "../../../test:fileutils", + "../../../test:test_support", + "../agc:level_estimation", + "//testing/gtest", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + ] + } + + rtc_library("transient_suppression_unittests") { + testonly = true + sources = [ + "dyadic_decimator_unittest.cc", + "file_utils.cc", + "file_utils.h", + "file_utils_unittest.cc", + "moving_moments_unittest.cc", + "transient_detector_unittest.cc", + "transient_suppressor_unittest.cc", + "wpd_node_unittest.cc", + "wpd_tree_unittest.cc", + ] + deps = [ + ":transient_suppressor_impl", + "../../../rtc_base:stringutils", + "../../../rtc_base/system:file_wrapper", + "../../../test:fileutils", + "../../../test:test_support", + "//testing/gtest", + ] + } +} diff --git a/modules/audio_processing/transient/transient_suppression_test.cc b/modules/audio_processing/transient/transient_suppression_test.cc index 7a2f6a32fe..d06fd96bac 100644 --- a/modules/audio_processing/transient/transient_suppression_test.cc +++ b/modules/audio_processing/transient/transient_suppression_test.cc @@ -20,7 +20,7 @@ #include "absl/flags/parse.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/agc.h" -#include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/transient/transient_suppressor_impl.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -165,7 +165,7 @@ void void_main() { Agc agc; - TransientSuppressor suppressor; + TransientSuppressorImpl suppressor; suppressor.Initialize(absl::GetFlag(FLAGS_sample_rate_hz), detection_rate_hz, absl::GetFlag(FLAGS_num_channels)); diff --git a/modules/audio_processing/transient/transient_suppressor.h b/modules/audio_processing/transient/transient_suppressor.h index 2322b8f2c1..bb262b0684 100644 --- a/modules/audio_processing/transient/transient_suppressor.h +++ b/modules/audio_processing/transient/transient_suppressor.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -13,23 +13,19 @@ #include #include - #include -#include "rtc_base/gtest_prod_util.h" - namespace webrtc { -class TransientDetector; - // Detects transients in an audio stream and suppress them using a simple // restoration algorithm that attenuates unexpected spikes in the spectrum. class TransientSuppressor { public: - TransientSuppressor(); - ~TransientSuppressor(); + virtual ~TransientSuppressor() {} - int Initialize(int sample_rate_hz, int detector_rate_hz, int num_channels); + virtual int Initialize(int sample_rate_hz, + int detector_rate_hz, + int num_channels) = 0; // Processes a |data| chunk, and returns it with keystrokes suppressed from // it. The float format is assumed to be int16 ranged. If there are more than @@ -48,71 +44,15 @@ class TransientSuppressor { // always be set to 1. // |key_pressed| determines if a key was pressed on this audio chunk. // Returns 0 on success and -1 otherwise. - int Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed); - - private: - FRIEND_TEST_ALL_PREFIXES(TransientSuppressorTest, - TypingDetectionLogicWorksAsExpectedForMono); - void Suppress(float* in_ptr, float* spectral_mean, float* out_ptr); - - void UpdateKeypress(bool key_pressed); - void UpdateRestoration(float voice_probability); - - void UpdateBuffers(float* data); - - void HardRestoration(float* spectral_mean); - void SoftRestoration(float* spectral_mean); - - std::unique_ptr detector_; - - size_t data_length_; - size_t detection_length_; - size_t analysis_length_; - size_t buffer_delay_; - size_t complex_analysis_length_; - int num_channels_; - // Input buffer where the original samples are stored. - std::unique_ptr in_buffer_; - std::unique_ptr detection_buffer_; - // Output buffer where the restored samples are stored. - std::unique_ptr out_buffer_; - - // Arrays for fft. - std::unique_ptr ip_; - std::unique_ptr wfft_; - - std::unique_ptr spectral_mean_; - - // Stores the data for the fft. - std::unique_ptr fft_buffer_; - - std::unique_ptr magnitudes_; - - const float* window_; - - std::unique_ptr mean_factor_; - - float detector_smoothed_; - - int keypress_counter_; - int chunks_since_keypress_; - bool detection_enabled_; - bool suppression_enabled_; - - bool use_hard_restoration_; - int chunks_since_voice_change_; - - uint32_t seed_; - - bool using_reference_; + virtual int Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) = 0; }; } // namespace webrtc diff --git a/modules/audio_processing/transient/transient_suppressor.cc b/modules/audio_processing/transient/transient_suppressor_impl.cc similarity index 89% rename from modules/audio_processing/transient/transient_suppressor.cc rename to modules/audio_processing/transient/transient_suppressor_impl.cc index b10b05518d..d515d30131 100644 --- a/modules/audio_processing/transient/transient_suppressor.cc +++ b/modules/audio_processing/transient/transient_suppressor_impl.cc @@ -8,21 +8,24 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/transient/transient_suppressor_impl.h" #include +#include #include #include #include +#include #include #include "common_audio/include/audio_util.h" #include "common_audio/signal_processing/include/signal_processing_library.h" -#include "common_audio/third_party/fft4g/fft4g.h" -#include "modules/audio_processing/legacy_ns/windows_private.h" +#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" #include "modules/audio_processing/transient/common.h" #include "modules/audio_processing/transient/transient_detector.h" +#include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/transient/windows_private.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -43,7 +46,7 @@ float ComplexMagnitude(float a, float b) { } // namespace -TransientSuppressor::TransientSuppressor() +TransientSuppressorImpl::TransientSuppressorImpl() : data_length_(0), detection_length_(0), analysis_length_(0), @@ -61,11 +64,11 @@ TransientSuppressor::TransientSuppressor() seed_(182), using_reference_(false) {} -TransientSuppressor::~TransientSuppressor() {} +TransientSuppressorImpl::~TransientSuppressorImpl() {} -int TransientSuppressor::Initialize(int sample_rate_hz, - int detection_rate_hz, - int num_channels) { +int TransientSuppressorImpl::Initialize(int sample_rate_hz, + int detection_rate_hz, + int num_channels) { switch (sample_rate_hz) { case ts::kSampleRate8kHz: analysis_length_ = 128u; @@ -155,15 +158,15 @@ int TransientSuppressor::Initialize(int sample_rate_hz, return 0; } -int TransientSuppressor::Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) { +int TransientSuppressorImpl::Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) { if (!data || data_length != data_length_ || num_channels != num_channels_ || detection_length != detection_length_ || voice_probability < 0 || voice_probability > 1) { @@ -222,9 +225,9 @@ int TransientSuppressor::Suppress(float* data, // This should only be called when detection is enabled. UpdateBuffers() must // have been called. At return, |out_buffer_| will be filled with the // processed output. -void TransientSuppressor::Suppress(float* in_ptr, - float* spectral_mean, - float* out_ptr) { +void TransientSuppressorImpl::Suppress(float* in_ptr, + float* spectral_mean, + float* out_ptr) { // Go to frequency domain. for (size_t i = 0; i < analysis_length_; ++i) { // TODO(aluebs): Rename windows @@ -270,7 +273,7 @@ void TransientSuppressor::Suppress(float* in_ptr, } } -void TransientSuppressor::UpdateKeypress(bool key_pressed) { +void TransientSuppressorImpl::UpdateKeypress(bool key_pressed) { const int kKeypressPenalty = 1000 / ts::kChunkSizeMs; const int kIsTypingThreshold = 1000 / ts::kChunkSizeMs; const int kChunksUntilNotTyping = 4000 / ts::kChunkSizeMs; // 4 seconds. @@ -300,7 +303,7 @@ void TransientSuppressor::UpdateKeypress(bool key_pressed) { } } -void TransientSuppressor::UpdateRestoration(float voice_probability) { +void TransientSuppressorImpl::UpdateRestoration(float voice_probability) { const int kHardRestorationOffsetDelay = 3; const int kHardRestorationOnsetDelay = 80; @@ -323,7 +326,7 @@ void TransientSuppressor::UpdateRestoration(float voice_probability) { // Shift buffers to make way for new data. Must be called after // |detection_enabled_| is updated by UpdateKeypress(). -void TransientSuppressor::UpdateBuffers(float* data) { +void TransientSuppressorImpl::UpdateBuffers(float* data) { // TODO(aluebs): Change to ring buffer. memmove(in_buffer_.get(), &in_buffer_[data_length_], (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * @@ -350,7 +353,7 @@ void TransientSuppressor::UpdateBuffers(float* data) { // Attenuates by a certain factor every peak in the |fft_buffer_| that exceeds // the spectral mean. The attenuation depends on |detector_smoothed_|. // If a restoration takes place, the |magnitudes_| are updated to the new value. -void TransientSuppressor::HardRestoration(float* spectral_mean) { +void TransientSuppressorImpl::HardRestoration(float* spectral_mean) { const float detector_result = 1.f - std::pow(1.f - detector_smoothed_, using_reference_ ? 200.f : 50.f); // To restore, we get the peaks in the spectrum. If higher than the previous @@ -377,7 +380,7 @@ void TransientSuppressor::HardRestoration(float* spectral_mean) { // the spectral mean and that is lower than some function of the current block // frequency mean. The attenuation depends on |detector_smoothed_|. // If a restoration takes place, the |magnitudes_| are updated to the new value. -void TransientSuppressor::SoftRestoration(float* spectral_mean) { +void TransientSuppressorImpl::SoftRestoration(float* spectral_mean) { // Get the spectral magnitude mean of the current block. float block_frequency_mean = 0; for (size_t i = kMinVoiceBin; i < kMaxVoiceBin; ++i) { diff --git a/modules/audio_processing/transient/transient_suppressor_impl.h b/modules/audio_processing/transient/transient_suppressor_impl.h new file mode 100644 index 0000000000..4737af517d --- /dev/null +++ b/modules/audio_processing/transient/transient_suppressor_impl.h @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ + +#include +#include + +#include + +#include "modules/audio_processing/transient/transient_suppressor.h" +#include "rtc_base/gtest_prod_util.h" + +namespace webrtc { + +class TransientDetector; + +// Detects transients in an audio stream and suppress them using a simple +// restoration algorithm that attenuates unexpected spikes in the spectrum. +class TransientSuppressorImpl : public TransientSuppressor { + public: + TransientSuppressorImpl(); + ~TransientSuppressorImpl() override; + + int Initialize(int sample_rate_hz, + int detector_rate_hz, + int num_channels) override; + + // Processes a |data| chunk, and returns it with keystrokes suppressed from + // it. The float format is assumed to be int16 ranged. If there are more than + // one channel, the chunks are concatenated one after the other in |data|. + // |data_length| must be equal to |data_length_|. + // |num_channels| must be equal to |num_channels_|. + // A sub-band, ideally the higher, can be used as |detection_data|. If it is + // NULL, |data| is used for the detection too. The |detection_data| is always + // assumed mono. + // If a reference signal (e.g. keyboard microphone) is available, it can be + // passed in as |reference_data|. It is assumed mono and must have the same + // length as |data|. NULL is accepted if unavailable. + // This suppressor performs better if voice information is available. + // |voice_probability| is the probability of voice being present in this chunk + // of audio. If voice information is not available, |voice_probability| must + // always be set to 1. + // |key_pressed| determines if a key was pressed on this audio chunk. + // Returns 0 on success and -1 otherwise. + int Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) override; + + private: + FRIEND_TEST_ALL_PREFIXES(TransientSuppressorImplTest, + TypingDetectionLogicWorksAsExpectedForMono); + void Suppress(float* in_ptr, float* spectral_mean, float* out_ptr); + + void UpdateKeypress(bool key_pressed); + void UpdateRestoration(float voice_probability); + + void UpdateBuffers(float* data); + + void HardRestoration(float* spectral_mean); + void SoftRestoration(float* spectral_mean); + + std::unique_ptr detector_; + + size_t data_length_; + size_t detection_length_; + size_t analysis_length_; + size_t buffer_delay_; + size_t complex_analysis_length_; + int num_channels_; + // Input buffer where the original samples are stored. + std::unique_ptr in_buffer_; + std::unique_ptr detection_buffer_; + // Output buffer where the restored samples are stored. + std::unique_ptr out_buffer_; + + // Arrays for fft. + std::unique_ptr ip_; + std::unique_ptr wfft_; + + std::unique_ptr spectral_mean_; + + // Stores the data for the fft. + std::unique_ptr fft_buffer_; + + std::unique_ptr magnitudes_; + + const float* window_; + + std::unique_ptr mean_factor_; + + float detector_smoothed_; + + int keypress_counter_; + int chunks_since_keypress_; + bool detection_enabled_; + bool suppression_enabled_; + + bool use_hard_restoration_; + int chunks_since_voice_change_; + + uint32_t seed_; + + bool using_reference_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ diff --git a/modules/audio_processing/transient/transient_suppressor_unittest.cc b/modules/audio_processing/transient/transient_suppressor_unittest.cc index 32d9858c64..a5c6bb1922 100644 --- a/modules/audio_processing/transient/transient_suppressor_unittest.cc +++ b/modules/audio_processing/transient/transient_suppressor_unittest.cc @@ -8,17 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/transient/transient_suppressor_impl.h" #include "modules/audio_processing/transient/common.h" #include "test/gtest.h" namespace webrtc { -TEST(TransientSuppressorTest, TypingDetectionLogicWorksAsExpectedForMono) { +TEST(TransientSuppressorImplTest, TypingDetectionLogicWorksAsExpectedForMono) { static const int kNumChannels = 1; - TransientSuppressor ts; + TransientSuppressorImpl ts; ts.Initialize(ts::kSampleRate16kHz, ts::kSampleRate16kHz, kNumChannels); // Each key-press enables detection. diff --git a/modules/audio_processing/legacy_ns/windows_private.h b/modules/audio_processing/transient/windows_private.h similarity index 99% rename from modules/audio_processing/legacy_ns/windows_private.h rename to modules/audio_processing/transient/windows_private.h index 21bb7d4bad..54e3c25785 100644 --- a/modules/audio_processing/legacy_ns/windows_private.h +++ b/modules/audio_processing/transient/windows_private.h @@ -8,8 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_PROCESSING_LEGACY_NS_WINDOWS_PRIVATE_H_ -#define MODULES_AUDIO_PROCESSING_LEGACY_NS_WINDOWS_PRIVATE_H_ +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ + +namespace webrtc { // Hanning window for 4ms 16kHz static const float kHanning64w128[128] = { @@ -550,4 +552,6 @@ static const float kBlocks480w1024[1024] = { 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f}; -#endif // MODULES_AUDIO_PROCESSING_LEGACY_NS_WINDOWS_PRIVATE_H_ +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ diff --git a/modules/audio_processing/utility/BUILD.gn b/modules/audio_processing/utility/BUILD.gn index 88d2bbd318..437b544fc9 100644 --- a/modules/audio_processing/utility/BUILD.gn +++ b/modules/audio_processing/utility/BUILD.gn @@ -30,48 +30,6 @@ rtc_library("legacy_delay_estimator") { deps = [ "../../../rtc_base:checks" ] } -rtc_library("ooura_fft") { - sources = [ - "ooura_fft.cc", - "ooura_fft.h", - "ooura_fft_tables_common.h", - ] - deps = [ - "../../../rtc_base/system:arch", - "../../../system_wrappers:cpu_features_api", - ] - cflags = [] - - if (current_cpu == "x86" || current_cpu == "x64") { - sources += [ - "ooura_fft_sse2.cc", - "ooura_fft_tables_neon_sse2.h", - ] - if (is_posix || is_fuchsia) { - cflags += [ "-msse2" ] - } - } - - if (rtc_build_with_neon) { - sources += [ - "ooura_fft_neon.cc", - "ooura_fft_tables_neon_sse2.h", - ] - - deps += [ "../../../common_audio" ] - - if (current_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags += [ "-mfpu=neon" ] - } - } - - if (current_cpu == "mipsel" && mips_float_abi == "hard") { - sources += [ "ooura_fft_mips.cc" ] - } -} - rtc_library("pffft_wrapper") { visibility = [ "../*" ] sources = [ diff --git a/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc b/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc index 88a31ba7a9..ff7022dba4 100644 --- a/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc +++ b/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/aec3/cascaded_biquad_filter.h" +#include "modules/audio_processing/utility/cascaded_biquad_filter.h" #include @@ -71,7 +71,7 @@ TEST(CascadedBiquadFilter, HighPassConfiguration) { } // Verifies that the reset functionality works as intended. -TEST(CascadedBiquadFilter, HighPassConfiguration) { +TEST(CascadedBiquadFilter, HighPassConfigurationResetFunctionality) { CascadedBiQuadFilter filter(kHighPassFilterCoefficients, 2); std::vector values1(100, 1.f); @@ -103,7 +103,7 @@ TEST(CascadedBiquadFilter, TransparentConfiguration) { #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) // Verifies that the check of the lengths for the input and output works for the // non-in-place call. -TEST(CascadedBiquadFilter, InputSizeCheckVerification) { +TEST(CascadedBiquadFilterDeathTest, InputSizeCheckVerification) { const std::vector input = CreateInputWithIncreasingValues(10); std::vector output(input.size() - 1); diff --git a/modules/audio_processing/utility/delay_estimator.cc b/modules/audio_processing/utility/delay_estimator.cc index fe750f5a68..73c70b0c34 100644 --- a/modules/audio_processing/utility/delay_estimator.cc +++ b/modules/audio_processing/utility/delay_estimator.cc @@ -17,6 +17,10 @@ #include "rtc_base/checks.h" +namespace webrtc { + +namespace { + // Number of right shifts for scaling is linearly depending on number of bits in // the far-end binary spectrum. static const int kShiftsAtZero = 13; // Right shifts at zero binary spectrum. @@ -38,6 +42,8 @@ static const float kFractionSlope = 0.05f; static const float kMinFractionWhenPossiblyCausal = 0.5f; static const float kMinFractionWhenPossiblyNonCausal = 0.25f; +} // namespace + // Counts and returns number of bits of a 32-bit word. static int BitCount(uint32_t u32) { uint32_t tmp = @@ -698,3 +704,5 @@ void WebRtc_MeanEstimatorFix(int32_t new_value, } *mean_value += diff; } + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator.h b/modules/audio_processing/utility/delay_estimator.h index 2f47e26f36..df281bcfdb 100644 --- a/modules/audio_processing/utility/delay_estimator.h +++ b/modules/audio_processing/utility/delay_estimator.h @@ -16,6 +16,8 @@ #include +namespace webrtc { + static const int32_t kMaxBitCountsQ9 = (32 << 9); // 32 matching bits in Q9. typedef struct { @@ -250,4 +252,6 @@ void WebRtc_MeanEstimatorFix(int32_t new_value, int factor, int32_t* mean_value); +} // namespace webrtc + #endif // MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_ diff --git a/modules/audio_processing/utility/delay_estimator_internal.h b/modules/audio_processing/utility/delay_estimator_internal.h index e99fe21a85..fce95d80d7 100644 --- a/modules/audio_processing/utility/delay_estimator_internal.h +++ b/modules/audio_processing/utility/delay_estimator_internal.h @@ -15,6 +15,8 @@ #include "modules/audio_processing/utility/delay_estimator.h" +namespace webrtc { + typedef union { float float_; int32_t int32_; @@ -44,4 +46,6 @@ typedef struct { BinaryDelayEstimator* binary_handle; } DelayEstimator; +} // namespace webrtc + #endif // MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_ diff --git a/modules/audio_processing/utility/delay_estimator_unittest.cc b/modules/audio_processing/utility/delay_estimator_unittest.cc index d3463aa6de..65d8e147fd 100644 --- a/modules/audio_processing/utility/delay_estimator_unittest.cc +++ b/modules/audio_processing/utility/delay_estimator_unittest.cc @@ -14,6 +14,8 @@ #include "modules/audio_processing/utility/delay_estimator_wrapper.h" #include "test/gtest.h" +namespace webrtc { + namespace { enum { kSpectrumSize = 65 }; @@ -615,3 +617,5 @@ TEST_F(DelayEstimatorTest, VerifyHistorySizeIsSetAndKeptAfterInit) { // TODO(bjornv): Add tests for SoftReset...(...). } // namespace + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator_wrapper.cc b/modules/audio_processing/utility/delay_estimator_wrapper.cc index 27c2a3a1a2..8eac2f6974 100644 --- a/modules/audio_processing/utility/delay_estimator_wrapper.cc +++ b/modules/audio_processing/utility/delay_estimator_wrapper.cc @@ -17,6 +17,8 @@ #include "modules/audio_processing/utility/delay_estimator_internal.h" #include "rtc_base/checks.h" +namespace webrtc { + // Only bit |kBandFirst| through bit |kBandLast| are processed and // |kBandFirst| - |kBandLast| must be < 32. enum { kBandFirst = 12 }; @@ -483,3 +485,5 @@ float WebRtc_last_delay_quality(void* handle) { RTC_DCHECK(self); return WebRtc_binary_last_delay_quality(self->binary_handle); } + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator_wrapper.h b/modules/audio_processing/utility/delay_estimator_wrapper.h index 995470f99d..dbcafaf013 100644 --- a/modules/audio_processing/utility/delay_estimator_wrapper.h +++ b/modules/audio_processing/utility/delay_estimator_wrapper.h @@ -16,6 +16,8 @@ #include +namespace webrtc { + // Releases the memory allocated by WebRtc_CreateDelayEstimatorFarend(...) void WebRtc_FreeDelayEstimatorFarend(void* handle); @@ -241,4 +243,6 @@ int WebRtc_last_delay(void* handle); // - delay_quality : >= 0 - Estimation quality of last calculated delay. float WebRtc_last_delay_quality(void* handle); +} // namespace webrtc + #endif // MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_ diff --git a/modules/audio_processing/utility/pffft_wrapper_unittest.cc b/modules/audio_processing/utility/pffft_wrapper_unittest.cc index 9aed548934..2ad6849cd4 100644 --- a/modules/audio_processing/utility/pffft_wrapper_unittest.cc +++ b/modules/audio_processing/utility/pffft_wrapper_unittest.cc @@ -125,23 +125,24 @@ TEST(PffftTest, CreateWrapperWithValidSize) { #if !defined(NDEBUG) && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -class PffftInvalidSizeTest : public ::testing::Test, - public ::testing::WithParamInterface {}; +class PffftInvalidSizeDeathTest : public ::testing::Test, + public ::testing::WithParamInterface { +}; -TEST_P(PffftInvalidSizeTest, DoNotCreateRealWrapper) { +TEST_P(PffftInvalidSizeDeathTest, DoNotCreateRealWrapper) { size_t fft_size = GetParam(); ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kReal)); EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kReal), ""); } -TEST_P(PffftInvalidSizeTest, DoNotCreateComplexWrapper) { +TEST_P(PffftInvalidSizeDeathTest, DoNotCreateComplexWrapper) { size_t fft_size = GetParam(); ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kComplex)); EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kComplex), ""); } INSTANTIATE_TEST_SUITE_P(PffftTest, - PffftInvalidSizeTest, + PffftInvalidSizeDeathTest, ::testing::Values(17, 33, 65, diff --git a/modules/audio_processing/vad/BUILD.gn b/modules/audio_processing/vad/BUILD.gn index c266929dee..71e079d3a3 100644 --- a/modules/audio_processing/vad/BUILD.gn +++ b/modules/audio_processing/vad/BUILD.gn @@ -38,7 +38,7 @@ rtc_library("vad") { "../../../audio/utility:audio_frame_operations", "../../../common_audio", "../../../common_audio:common_audio_c", - "../../../common_audio/third_party/fft4g", + "../../../common_audio/third_party/ooura:fft_size_256", "../../../rtc_base:checks", "../../audio_coding:isac_vad", ] diff --git a/modules/audio_processing/vad/noise_gmm_tables.h b/modules/audio_processing/vad/noise_gmm_tables.h index 15562776e9..944a5401cc 100644 --- a/modules/audio_processing/vad/noise_gmm_tables.h +++ b/modules/audio_processing/vad/noise_gmm_tables.h @@ -13,6 +13,8 @@ #ifndef MODULES_AUDIO_PROCESSING_VAD_NOISE_GMM_TABLES_H_ #define MODULES_AUDIO_PROCESSING_VAD_NOISE_GMM_TABLES_H_ +namespace webrtc { + static const int kNoiseGmmNumMixtures = 12; static const int kNoiseGmmDim = 3; @@ -74,4 +76,7 @@ static const double kNoiseGmmWeights[kNoiseGmmNumMixtures] = { -1.79789356118641e+01, -1.42830169160894e+01, -1.56500228061379e+01, -1.83124990950113e+01, -1.69979436177477e+01, -1.12329424387828e+01, -1.41311785780639e+01, -1.47171861448585e+01, -1.35963362781839e+01}; + +} // namespace webrtc + #endif // MODULES_AUDIO_PROCESSING_VAD_NOISE_GMM_TABLES_H_ diff --git a/modules/audio_processing/vad/pitch_based_vad.h b/modules/audio_processing/vad/pitch_based_vad.h index 22bc0f2263..e005e23aa5 100644 --- a/modules/audio_processing/vad/pitch_based_vad.h +++ b/modules/audio_processing/vad/pitch_based_vad.h @@ -53,4 +53,5 @@ class PitchBasedVad { }; } // namespace webrtc + #endif // MODULES_AUDIO_PROCESSING_VAD_PITCH_BASED_VAD_H_ diff --git a/modules/audio_processing/vad/pitch_internal.cc b/modules/audio_processing/vad/pitch_internal.cc index 7e6bd3e616..8f86918644 100644 --- a/modules/audio_processing/vad/pitch_internal.cc +++ b/modules/audio_processing/vad/pitch_internal.cc @@ -12,6 +12,8 @@ #include +namespace webrtc { + // A 4-to-3 linear interpolation. // The interpolation constants are derived as following: // Input pitch parameters are updated every 7.5 ms. Within a 30-ms interval @@ -49,3 +51,5 @@ void GetSubframesPitchParameters(int sampling_rate_hz, pitch_lag_hz[n] = (sampling_rate_hz) / (pitch_lag_hz[n]); } } + +} // namespace webrtc diff --git a/modules/audio_processing/vad/pitch_internal.h b/modules/audio_processing/vad/pitch_internal.h index 67e0522328..938745d2a3 100644 --- a/modules/audio_processing/vad/pitch_internal.h +++ b/modules/audio_processing/vad/pitch_internal.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_VAD_PITCH_INTERNAL_H_ #define MODULES_AUDIO_PROCESSING_VAD_PITCH_INTERNAL_H_ +namespace webrtc { + // TODO(turajs): Write a description of this function. Also be consistent with // usage of |sampling_rate_hz| vs |kSamplingFreqHz|. void GetSubframesPitchParameters(int sampling_rate_hz, @@ -23,4 +25,6 @@ void GetSubframesPitchParameters(int sampling_rate_hz, double* log_pitch_gain, double* pitch_lag_hz); +} // namespace webrtc + #endif // MODULES_AUDIO_PROCESSING_VAD_PITCH_INTERNAL_H_ diff --git a/modules/audio_processing/vad/pitch_internal_unittest.cc b/modules/audio_processing/vad/pitch_internal_unittest.cc index 19c2e1a271..c851421ba7 100644 --- a/modules/audio_processing/vad/pitch_internal_unittest.cc +++ b/modules/audio_processing/vad/pitch_internal_unittest.cc @@ -14,6 +14,8 @@ #include "test/gtest.h" +namespace webrtc { + TEST(PitchInternalTest, test) { const int kSamplingRateHz = 8000; const int kNumInputParameters = 4; @@ -48,3 +50,5 @@ TEST(PitchInternalTest, test) { EXPECT_NEAR(old_lag, expected_old_lag, 1e-6); EXPECT_NEAR(log_old_gain, expected_log_old_gain, 1e-8); } + +} // namespace webrtc diff --git a/modules/audio_processing/vad/vad_audio_proc.cc b/modules/audio_processing/vad/vad_audio_proc.cc index 53eb6de70f..97cf65151c 100644 --- a/modules/audio_processing/vad/vad_audio_proc.cc +++ b/modules/audio_processing/vad/vad_audio_proc.cc @@ -14,7 +14,7 @@ #include #include -#include "common_audio/third_party/fft4g/fft4g.h" +#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" #include "modules/audio_processing/vad/pitch_internal.h" #include "modules/audio_processing/vad/pole_zero_filter.h" #include "modules/audio_processing/vad/vad_audio_proc_internal.h" diff --git a/modules/audio_processing/voice_detection.cc b/modules/audio_processing/voice_detection.cc index 2774e35571..e6c92ae934 100644 --- a/modules/audio_processing/voice_detection.cc +++ b/modules/audio_processing/voice_detection.cc @@ -10,7 +10,6 @@ #include "modules/audio_processing/voice_detection.h" -#include "api/audio/audio_frame.h" #include "common_audio/vad/include/webrtc_vad.h" #include "modules/audio_processing/audio_buffer.h" #include "rtc_base/checks.h" diff --git a/modules/backing_track/BUILD.gn b/modules/backing_track/BUILD.gn new file mode 100644 index 0000000000..23556aed74 --- /dev/null +++ b/modules/backing_track/BUILD.gn @@ -0,0 +1,48 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_source_set("backing_track") { + visibility = [ "*" ] + sources = [ + "audio_file_decoder.h", + "audio_file_decoder.cc", + "audio_mixer_global.h", + "audio_resampler.h", + "audio_resampler.cc", + "audio_source_compressed.h", + "audio_source_compressed.cc", + "audio_source_pcm.h", + "audio_source_pcm.cc", + "audio_source.h", + "audio_source.cc", + "avx_helper.h", + "bt_audio_mixer.h", + "bt_audio_mixer.cc", + "mixer_config.h", + "mixer_source.h", + "mixer_source.cc", + "pcm_channel.h", + "pcm_channel.cc", + ] + deps = [ + "../../rtc_base:checks", + "../audio_device:audio_device_buffer", + "../audio_mixer:audio_mixer_impl", + ] + + public_configs = [ + "//third_party/ffmpeg:ffmpeg_dependent_config", + ] +} diff --git a/modules/backing_track/audio_file_decoder.cc b/modules/backing_track/audio_file_decoder.cc new file mode 100644 index 0000000000..4658033b67 --- /dev/null +++ b/modules/backing_track/audio_file_decoder.cc @@ -0,0 +1,270 @@ +// +// Created by Piasy on 08/11/2017. +// + +#include + +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/backing_track/audio_file_decoder.h" +#include "modules/backing_track/audio_mixer_global.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +AudioFileDecoder::AudioFileDecoder(TaskQueueFactory* task_queue_factory, + const std::string& filepath) + : packet_consumed_(true), + frame_consumed_(true), + eof_(false), + error_(false), + seeking_(false), + last_decoded_frame_pts_(0), + last_consumed_frame_pts_(0), + decoder_queue_(task_queue_factory->CreateTaskQueue( + "music_dec", TaskQueueFactory::Priority::HIGH)) { + frame_.reset(av_frame_alloc()); + if (!frame_) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: av_frame_alloc fail"; + return; + } + + packet_.reset(av_packet_alloc()); + if (!packet_) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: av_packet_alloc fail"; + return; + } + av_init_packet(packet_.get()); + + { + AVFormatContext* format_context = nullptr; + int32_t error = avformat_open_input(&format_context, filepath.c_str(), + nullptr, nullptr); + if (error < 0) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: avformat_open_input fail " + << filepath.c_str() << " " + << av_err2str(error); + return; + } + + format_context_.reset(format_context); + } + + int32_t error = avformat_find_stream_info(format_context_.get(), nullptr); + if (error < 0) { + RTC_LOG(LS_ERROR) + << "AudioFileDecoder:: avformat_find_stream_info fail " + << av_err2str(error); + return; + } + + AVCodec* codec; + stream_no_ = av_find_best_stream(format_context_.get(), AVMEDIA_TYPE_AUDIO, + -1, -1, &codec, 0); + if (stream_no_ < 0 || !codec + || format_context_->streams[stream_no_]->time_base.den <= 0) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: av_find_best_stream fail " + << av_err2str(stream_no_) << ", codec " + << static_cast(codec); + return; + } + + codec_context_.reset(avcodec_alloc_context3(codec)); + if (!codec_context_) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: avcodec_alloc_context3 fail"; + return; + } + error = avcodec_parameters_to_context( + codec_context_.get(), format_context_->streams[stream_no_]->codecpar); + if (error < 0) { + RTC_LOG(LS_ERROR) + << "AudioFileDecoder:: avcodec_parameters_to_context fail " + << av_err2str(error); + return; + } + + error = avcodec_open2(codec_context_.get(), codec, nullptr); + if (error < 0) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: avcodec_open2 fail " + << av_err2str(error); + return; + } + + fifo_capacity_ = 10 * codec_context_->sample_rate * + webrtc::AudioMixerImpl::kFrameDurationInMs / 1000; + fifo_.reset(av_audio_fifo_alloc(codec_context_->sample_fmt, + codec_context_->channels, fifo_capacity_)); + if (!fifo_) { + RTC_LOG(LS_ERROR) << "AudioFileDecoder:: av_audio_fifo_alloc fail"; + return; + } + + RTC_LOG(LS_INFO) + << "AudioFileDecoder create: start ts " + << format_context_->streams[stream_no_]->start_time * + format_context_->streams[stream_no_]->time_base.num / + (float)format_context_->streams[stream_no_]->time_base.den + << " s, duration " + << format_context_->streams[stream_no_]->duration * + format_context_->streams[stream_no_]->time_base.num / + (float)format_context_->streams[stream_no_]->time_base.den + << " s"; + + FillDecoder(false); + FillFifo(false, nullptr); + Advance(); +} + +AVSampleFormat AudioFileDecoder::sample_format() { + return codec_context_ ? codec_context_->sample_fmt : AV_SAMPLE_FMT_NONE; +} + +int32_t AudioFileDecoder::sample_rate() { + return codec_context_ ? codec_context_->sample_rate : 0; +} + +int32_t AudioFileDecoder::channel_num() { + return codec_context_ ? codec_context_->channels : 0; +} + +int32_t AudioFileDecoder::Consume(void** buffer, int32_t samples) { + if (!fifo_ || codec_context_->sample_rate <= 0) { + return kMixerErrInit; + } + if (error_) { + return kMixerErrDecode; + } + + Advance(); + + MutexLock lock(&fifo_mutex_); + + int32_t target_samples = std::min(av_audio_fifo_size(fifo_.get()), samples); + int32_t actual_samples = + av_audio_fifo_read(fifo_.get(), buffer, target_samples); + last_consumed_frame_pts_ = + last_decoded_frame_pts_ - + 1000 * av_audio_fifo_size(fifo_.get()) / codec_context_->sample_rate; + + return actual_samples * + av_get_bytes_per_sample(codec_context_->sample_fmt) * + codec_context_->channels; +} + +void AudioFileDecoder::Seek(int64_t position_ms) { + seeking_ = true; + + MutexLock lock(&seek_mutex_); + + RTC_LOG(LS_INFO) << "AudioFileDecoder::Seek start, want " << position_ms; + + av_audio_fifo_reset(fifo_.get()); + av_seek_frame(format_context_.get(), stream_no_, + static_cast( + (position_ms - 100) / 1000.0F * + format_context_->streams[stream_no_]->time_base.den / + format_context_->streams[stream_no_]->time_base.num), + AVSEEK_FLAG_ANY); + + int64_t last_frame_ts = 0; + do { + FillDecoder(true); + } while (!eof_ && !error_ && !FillFifo(true, &last_frame_ts) && + last_frame_ts < position_ms); + + seeking_ = false; + + RTC_LOG(LS_INFO) << "AudioFileDecoder::Seek end, actual " << last_frame_ts; +} + +void AudioFileDecoder::FillDecoder(bool seeking) { + while (!eof_ && !error_ && seeking == seeking_) { + if (packet_consumed_) { + int error = av_read_frame(format_context_.get(), packet_.get()); + if (error != 0) { + eof_ = error == AVERROR_EOF; + error_ = error != AVERROR_EOF; + break; + } + if (packet_->stream_index != stream_no_) { + av_packet_unref(packet_.get()); + continue; + } + packet_consumed_ = false; + } + int32_t error = + avcodec_send_packet(codec_context_.get(), packet_.get()); + if (error == 0) { + av_packet_unref(packet_.get()); + packet_consumed_ = true; + continue; + } + if (error == AVERROR(EAGAIN)) { + break; + } + RTC_LOG(LS_ERROR) << "FillDecoder error " << av_err2str(error); + error_ = true; + break; + } +} + +bool AudioFileDecoder::FillFifo(bool seeking, int64_t* last_frame_ts) { + bool fifo_full = false; + while (!eof_ && !error_ && seeking == seeking_) { + if (frame_consumed_) { + int error = + avcodec_receive_frame(codec_context_.get(), frame_.get()); + if (error != 0) { + error_ = error != AVERROR(EAGAIN); + break; + } + + frame_consumed_ = false; + } + + if (seeking) { + frame_consumed_ = true; + if (last_frame_ts) { + *last_frame_ts = + 1000 * frame_->pts * + format_context_->streams[stream_no_]->time_base.num / + format_context_->streams[stream_no_]->time_base.den; + } + break; + } + + MutexLock lock(&fifo_mutex_); + + if (av_audio_fifo_size(fifo_.get()) + frame_->nb_samples < + fifo_capacity_) { + if (av_audio_fifo_write(fifo_.get(), reinterpret_cast( + frame_->extended_data), + frame_->nb_samples) < 0) { + error_ = true; + break; + } + last_decoded_frame_pts_ = + 1000 * frame_->pts * + format_context_->streams[stream_no_]->time_base.num / + format_context_->streams[stream_no_]->time_base.den; + av_frame_unref(frame_.get()); + + frame_consumed_ = true; + } else { + fifo_full = true; + break; + } + } + + return fifo_full; +} + +void AudioFileDecoder::Advance() { + decoder_queue_.PostTask([=]() { + MutexLock lock(&seek_mutex_); + do { + FillDecoder(false); + } while (!eof_ && !error_ && !seeking_ && !FillFifo(false, nullptr)); + }); +} +} diff --git a/modules/backing_track/audio_file_decoder.h b/modules/backing_track/audio_file_decoder.h new file mode 100644 index 0000000000..b5f8642bbf --- /dev/null +++ b/modules/backing_track/audio_file_decoder.h @@ -0,0 +1,81 @@ + + +// +// Created by Piasy on 08/11/2017. +// + +#pragma once + +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue.h" + +#include "modules/backing_track/avx_helper.h" + +namespace webrtc { + +class AudioFileDecoder { +public: + AudioFileDecoder(TaskQueueFactory* task_queue_factory, + const std::string& filepath); + + ~AudioFileDecoder() {} + + AVSampleFormat sample_format(); + + int32_t sample_rate(); + + int32_t channel_num(); + + int64_t consume_progress_ms() { return last_consumed_frame_pts_; } + + int64_t length_ms() { + return format_context_ + ? 1000 * format_context_->streams[stream_no_]->duration * + format_context_->streams[stream_no_]->time_base.num / + format_context_->streams[stream_no_]->time_base.den + : 0; + } + + int32_t Consume(void** buffer, int32_t samples); + + void Seek(int64_t position_ms); + + bool eof() { return eof_; } + +private: + void FillDecoder(bool seeking); + + bool FillFifo(bool seeking, int64_t* last_frame_ts); + + void Advance(); + + int32_t stream_no_; + + std::unique_ptr format_context_; + std::unique_ptr codec_context_; + + std::unique_ptr packet_; + bool packet_consumed_; + std::unique_ptr frame_; + bool frame_consumed_; + + mutable Mutex seek_mutex_; + + mutable Mutex fifo_mutex_; + int32_t fifo_capacity_; + std::unique_ptr fifo_; + + bool eof_; + bool error_; + bool seeking_; + + int64_t last_decoded_frame_pts_; + int64_t last_consumed_frame_pts_; + + rtc::TaskQueue decoder_queue_; +}; +} diff --git a/modules/backing_track/audio_mixer_global.h b/modules/backing_track/audio_mixer_global.h new file mode 100644 index 0000000000..ca6b9ae8b0 --- /dev/null +++ b/modules/backing_track/audio_mixer_global.h @@ -0,0 +1,29 @@ +// +// Created by Piasy on 08/11/2017. +// + +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +#ifdef __cplusplus +} +#endif + +namespace webrtc { + +typedef void (*SourceFinishCallback)(void* opaque, int32_t ssrc); + +typedef void (*SourceErrorCallback)(void* opaque, int32_t ssrc, int32_t code); + +static constexpr AVSampleFormat kOutputSampleFormat = AV_SAMPLE_FMT_S16; + +static constexpr int32_t kMixerErrEof = -99; +static constexpr int32_t kMixerErrInit = -100; +static constexpr int32_t kMixerErrDecode = -101; +static constexpr int32_t kMixerErrResample = -102; +} diff --git a/modules/backing_track/audio_resampler.cc b/modules/backing_track/audio_resampler.cc new file mode 100644 index 0000000000..30497492d4 --- /dev/null +++ b/modules/backing_track/audio_resampler.cc @@ -0,0 +1,88 @@ +// +// Created by Piasy on 04/11/2017. +// + +#include "modules/backing_track/audio_resampler.h" +#include "modules/backing_track/audio_mixer_global.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +AudioResampler::AudioResampler(AVSampleFormat input_format, + int32_t input_sample_rate, + int32_t input_channel_num, + AVSampleFormat output_format, + int32_t output_sample_rate, + int32_t output_channel_num) + : context_(swr_alloc()), + input_format_(input_format), + input_sample_rate_(input_sample_rate), + input_channel_num_(input_channel_num), + output_format_(output_format), + output_sample_rate_(output_sample_rate), + output_channel_num_(output_channel_num) { + if (!context_) { + RTC_LOG(LS_ERROR) << "AudioResampler:: swr_alloc fail"; + return; + } + int64_t input_channel_layout = + (input_channel_num_ == 1) ? AV_CH_LAYOUT_MONO : AV_CH_LAYOUT_STEREO; + int64_t output_channel_layout = + (output_channel_num_ == 1) ? AV_CH_LAYOUT_MONO : AV_CH_LAYOUT_STEREO; + + av_opt_set_int(context_.get(), "in_channel_layout", input_channel_layout, + 0); + av_opt_set_int(context_.get(), "in_sample_rate", input_sample_rate_, 0); + av_opt_set_sample_fmt(context_.get(), "in_sample_fmt", input_format_, 0); + + av_opt_set_int(context_.get(), "out_channel_layout", output_channel_layout, + 0); + av_opt_set_int(context_.get(), "out_sample_rate", output_sample_rate_, 0); + av_opt_set_sample_fmt(context_.get(), "out_sample_fmt", output_format_, 0); + + int32_t error = swr_init(context_.get()); + if (error < 0) { + context_.reset(); + RTC_LOG(LS_ERROR) << "AudioResampler swr_init fail: " + << av_err2str(error); + } +} + +int32_t AudioResampler::Resample(void** input_buffer, int32_t input_size, + void** output_buffer) { + if (!context_ || input_channel_num_ <= 0 + || av_get_bytes_per_sample(input_format_) <= 0) { + return kMixerErrInit; + } + + int32_t input_samples = input_size / input_channel_num_ / + av_get_bytes_per_sample(input_format_); + int32_t output_samples = static_cast(av_rescale_rnd( + input_samples, output_sample_rate_, input_sample_rate_, AV_ROUND_UP)); + + int32_t real_output_samples = swr_convert( + context_.get(), reinterpret_cast(output_buffer), + output_samples, (const uint8_t**)input_buffer, input_samples); + + if (real_output_samples < 0) { + return kMixerErrResample; + } + + return real_output_samples * av_get_bytes_per_sample(output_format_) * + output_channel_num_; +} + +int32_t AudioResampler::CalcOutputSize(int32_t input_size) { + if (input_channel_num_ <= 0 + || av_get_bytes_per_sample(input_format_) <= 0) { + return kMixerErrInit; + } + + int32_t input_samples = input_size / input_channel_num_ / + av_get_bytes_per_sample(input_format_); + int32_t output_samples = static_cast(av_rescale_rnd( + input_samples, output_sample_rate_, input_sample_rate_, AV_ROUND_UP)); + return output_samples * input_channel_num_ * + av_get_bytes_per_sample(input_format_); +} +} diff --git a/modules/backing_track/audio_resampler.h b/modules/backing_track/audio_resampler.h new file mode 100644 index 0000000000..a27306a369 --- /dev/null +++ b/modules/backing_track/audio_resampler.h @@ -0,0 +1,35 @@ +// +// Created by Piasy on 04/11/2017. +// + +#pragma once + +#include + +#include "modules/backing_track/avx_helper.h" + +namespace webrtc { + +class AudioResampler { +public: + AudioResampler(AVSampleFormat input_format, int32_t input_sample_rate, + int32_t input_channel_num, AVSampleFormat output_format, + int32_t output_sample_rate, int32_t output_channel_num); + + ~AudioResampler() {} + + int32_t Resample(void** input_buffer, int32_t input_size, + void** output_buffer); + + int32_t CalcOutputSize(int32_t input_size); + +private: + std::unique_ptr context_; + AVSampleFormat input_format_; + int32_t input_sample_rate_; + int32_t input_channel_num_; + AVSampleFormat output_format_; + int32_t output_sample_rate_; + int32_t output_channel_num_; +}; +} diff --git a/modules/backing_track/audio_source.cc b/modules/backing_track/audio_source.cc new file mode 100644 index 0000000000..d03a6dba0e --- /dev/null +++ b/modules/backing_track/audio_source.cc @@ -0,0 +1,81 @@ +// +// Created by Piasy on 2018/5/28. +// + +#include + +#include "audio/audio_transport_impl.h" +#include "audio/utility/audio_frame_operations.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/backing_track/audio_source.h" + +namespace webrtc { + +AudioSource::AudioSource(int32_t ssrc, int32_t sample_rate, int32_t channel_num, + int32_t frame_duration_us, float volume_left, + float volume_right, bool enabled) + : ssrc_(ssrc), + sample_rate_(sample_rate), + channel_num_(channel_num), + frame_duration_us_(frame_duration_us), + pcm_channel_(nullptr), + volume_left_(volume_left), + volume_right_(volume_right), + enabled_(enabled), + muted_(false) {} + +AudioSource::~AudioSource() { + MutexLock lock(&mutex_); + AudioTransportImpl* audio_transport = nullptr; + AudioDeviceBuffer* adb = AudioDeviceBuffer::Instance(); + if (adb) { + audio_transport = + reinterpret_cast(adb->audio_transport()); + } + if (pcm_channel_) { + if (audio_transport) { + audio_transport->RemovePlaybackSource(pcm_channel_); + } else { + delete pcm_channel_; + } + + pcm_channel_ = nullptr; + } +} + +void AudioSource::SetPcmChannel(PcmChannel* pcm_channel) { + MutexLock lock(&mutex_); + + pcm_channel_ = pcm_channel; +} + +void AudioSource::preProduceFrame(webrtc::AudioFrame* frame, bool remix) { + if (StereoInput()) { + if ((volume_left_ < 0.99f || volume_left_ > 1.01f) || + (volume_right_ < 0.99f || volume_right_ > 1.01f)) { + webrtc::AudioFrameOperations::Scale(volume_left_, volume_right_, + frame); + } + } else { + if ((volume_left_ < 0.99f || volume_left_ > 1.01f)) { + webrtc::AudioFrameOperations::ScaleWithSat(volume_left_, frame); + } + } + + if (remix && frame->num_channels_ == 2) { + webrtc::AudioFrameOperations::DownmixChannels(1, frame); + webrtc::AudioFrameOperations::UpmixChannels(2, frame); + } + + MutexLock lock(&mutex_); + if (pcm_channel_) { + pcm_channel_->FeedData(frame->data(), FrameSize()); + } +} + +int64_t AudioSource::GetTimestamp() { + return std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()).count(); +} + +} diff --git a/modules/backing_track/audio_source.h b/modules/backing_track/audio_source.h new file mode 100644 index 0000000000..a35318e7bb --- /dev/null +++ b/modules/backing_track/audio_source.h @@ -0,0 +1,94 @@ +// +// Created by Piasy on 2018/5/28. +// + +#pragma once + +#include + +#include "api/audio/audio_mixer.h" +#include "rtc_base/synchronization/mutex.h" + +#include "modules/backing_track/pcm_channel.h" + +namespace webrtc { + +class AudioSource : public webrtc::AudioMixer::Source { +public: + AudioSource(int32_t ssrc, int32_t sample_rate, int32_t channel_num, + int32_t frame_duration_us, float volume_left, + float volume_right, bool enabled); + + virtual ~AudioSource() override; + + int32_t Ssrc() const override { return ssrc_; } + + int32_t PreferredSampleRate() const override { return sample_rate_; } + + void UpdateVolume(float volume_left, float volume_right) { + volume_left_ = volume_left; + volume_right_ = volume_right; + } + + virtual void ToggleEnable(bool enabled) { enabled_ = enabled; } + + void ToggleMute(bool mute) { muted_ = mute; } + + virtual bool StereoInput() { return false; } + + virtual int32_t FrameSize() = 0; + + virtual int64_t GetProgressMs() { return 0; } + + virtual int64_t GetLengthMs() { return 0; } + + virtual void Seek(int64_t position_ms) {} + + void SetPcmChannel(PcmChannel* pcm_channel); + + PcmChannel* GetPcmChannel() { return pcm_channel_; } + + virtual void UpdateFrameDurationUs(int32_t frame_duration_us) { + if (sample_rate_ <= 0) { + return; + } + + frame_duration_us_ = frame_duration_us; + + if (pcm_channel_) { + pcm_channel_->SetFrameDurationUs(frame_duration_us); + } + } + + int32_t sample_rate() { return sample_rate_; } + + int32_t channel_num() { return channel_num_; } + + int32_t frame_duration_us() { return frame_duration_us_; } + + bool enabled() { return enabled_.load(); } + + bool muted() { return muted_.load(); } + +protected: + void preProduceFrame(webrtc::AudioFrame* frame, bool remix); + + int64_t GetTimestamp(); + + int32_t ssrc_; + + int32_t sample_rate_; + int32_t channel_num_; + int32_t frame_duration_us_; + + PcmChannel* pcm_channel_; + +private: + float volume_left_; + float volume_right_; + std::atomic_bool enabled_; + std::atomic_bool muted_; + + mutable Mutex mutex_; +}; +} diff --git a/modules/backing_track/audio_source_compressed.cc b/modules/backing_track/audio_source_compressed.cc new file mode 100644 index 0000000000..70c0ea45bf --- /dev/null +++ b/modules/backing_track/audio_source_compressed.cc @@ -0,0 +1,307 @@ +// +// Created by Piasy on 29/10/2017. +// + +#include +#include + +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/backing_track/audio_mixer_global.h" +#include "modules/backing_track/audio_source_compressed.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +static constexpr int32_t kOnceDecodeDurationMs = 10; + +int gcd(int a, int b) { + for (;;) { + if (a == 0) return b; + b %= a; + if (b == 0) return a; + a %= b; + } +} + +int lcm(int a, int b) { + int temp = gcd(a, b); + + return temp ? (a / temp * b) : 0; +} + +AudioSourceCompressed::AudioSourceCompressed( + int32_t ssrc, const std::string& filepath, int32_t output_sample_rate, + int32_t output_channel_num, int32_t frame_duration_us, float volume_left, + float volume_right, bool enabled, bool enable_sync_fix, bool remix, + int32_t waiting_mix_delay_frames, SourceFinishCallback finish_callback, + SourceErrorCallback error_callback, void* callback_opaque) + : AudioSource(ssrc, output_sample_rate, output_channel_num, + frame_duration_us, volume_left, volume_right, enabled), + report_output_samples_(output_sample_rate * + webrtc::AudioMixerImpl::kFrameDurationInMs / 1000), + real_output_samples_(output_sample_rate * frame_duration_us / 1000 / + 1000), + enable_sync_fix_(enable_sync_fix), + sync_fix_threshold_ms_(20), + sync_fix_break_times_(0), + remix_(remix), + input_buffer_(nullptr), + waiting_mix_delay_frames_(waiting_mix_delay_frames), + start_time_(0), + samples_mixed_(0), + first_frame_decoded_(false), + finish_callback_(finish_callback), + error_callback_(error_callback), + callback_opaque_(callback_opaque), + finish_callback_fired_(false), + error_callback_fired_(false) { + AudioDeviceBuffer* adb = AudioDeviceBuffer::Instance(); + if (!adb) { + return; + } + decoder_.reset(new AudioFileDecoder(adb->task_queue_factory(), filepath)); + + input_sample_rate_ = decoder_->sample_rate(); + input_channel_num_ = decoder_->channel_num(); + if (input_sample_rate_ <= 0 || input_channel_num_ <= 0 + || sample_rate_ <= 0) { + return; + } + input_format_ = decoder_->sample_format(); + + int32_t once_decode_us = + lcm(kOnceDecodeDurationMs * 1000, frame_duration_us); + once_decode_samples_ = input_sample_rate_ * once_decode_us / 1000 / 1000; + buffer_.SetSize(static_cast(input_channel_num_ * sample_rate_ * + once_decode_us / 1000 / 1000)); + buffer_pos_ = static_cast(buffer_.size()); + + // to support adjust volume of channels separately, resampler shouldn't + // remix, but let mixer to remix + resampler_.reset(new AudioResampler(input_format_, input_sample_rate_, + input_channel_num_, kOutputSampleFormat, + sample_rate_, input_channel_num_)); + + int32_t error = av_samples_alloc_array_and_samples( + reinterpret_cast(&input_buffer_), nullptr, + input_channel_num_, once_decode_samples_, input_format_, 0); + if (error < 0) { + input_buffer_ = nullptr; + RTC_LOG(LS_ERROR) + << "AudioSourceCompressed:: alloc decode buffer fail: " + << av_err2str(error); + } +} + +AudioSourceCompressed::~AudioSourceCompressed() { + if (input_buffer_) { + av_freep(&input_buffer_[0]); + } + av_freep(&input_buffer_); +} + +void AudioSourceCompressed::ToggleEnable(bool enabled) { + AudioSource::ToggleEnable(enabled); + + start_time_ = 0; + samples_mixed_ = 0; +} + +int32_t AudioSourceCompressed::FrameSize() { + return real_output_samples_ * input_channel_num_ * sizeof(int16_t); +} + +void AudioSourceCompressed::Seek(int64_t position_ms) { + if (decoder_) { + decoder_->Seek(position_ms); + } +} + +webrtc::AudioMixer::Source::AudioFrameInfo +AudioSourceCompressed::GetAudioFrameWithInfo(int32_t sample_rate_hz, + webrtc::AudioFrame* audio_frame) { + if (sample_rate_hz != sample_rate_ || finish_callback_fired_ || + error_callback_fired_ || frame_duration_us_ <= 0 || sample_rate_ <= 0 + || input_channel_num_ <= 0) { + RTC_LOG(LS_INFO) + << "AudioSourceCompressed::GetAudioFrameWithInfo wrong state " + << sample_rate_hz << " !=? " << sample_rate_ + << ", frame_duration_us_ " << frame_duration_us_ + << ", input_channel_num_ " << input_channel_num_; + fireErrorCallback(-999); + return webrtc::AudioMixer::Source::AudioFrameInfo::kError; + } + + if (!enabled()) { + return webrtc::AudioMixer::Source::AudioFrameInfo::kMuted; + } + + int64_t now = GetTimestamp(); + if (start_time_ == 0) { + start_time_ = now; + } + int64_t time_elapsed = now - start_time_; + int64_t data_duration = 1000 * samples_mixed_ / sample_rate_; + + if (enable_sync_fix_ && + data_duration - time_elapsed > sync_fix_threshold_ms_) { + sync_fix_break_times_ = static_cast( + (data_duration - time_elapsed) * 1000 / frame_duration_us_); + RTC_LOG(LS_INFO) + << "AudioSourceCompressed::GetAudioFrameWithInfo consume too fast, " + "take " + << sync_fix_break_times_ << " break"; + } else if (time_elapsed >= data_duration) { + if (sync_fix_break_times_ > 0) { + RTC_LOG(LS_INFO) + << "AudioSourceCompressed::GetAudioFrameWithInfo consume stop " + "break early, " + << sync_fix_break_times_ << " left"; + } + sync_fix_break_times_ = 0; + } + if (sync_fix_break_times_ > 0) { + sync_fix_break_times_--; + return webrtc::AudioMixer::Source::AudioFrameInfo::kMuted; + } + + audio_frame->UpdateFrame( + 0, nullptr, static_cast(report_output_samples_), sample_rate_, + webrtc::AudioFrame::SpeechType::kNormalSpeech, + webrtc::AudioFrame::VADActivity::kVadActive, + static_cast(input_channel_num_)); + + int16_t* output_buffer = audio_frame->mutable_data(); + bool sync_fix_hurry = false; + if (enable_sync_fix_ && + time_elapsed - data_duration > sync_fix_threshold_ms_) { + sync_fix_hurry = true; + } + int32_t read_count = 0; + do { + int32_t read = Read(reinterpret_cast(&output_buffer)); + read_count++; + if (read < 0) { + if (read == kMixerErrEof) { + if (!finish_callback_fired_ && finish_callback_) { + RTC_LOG(LS_INFO) + << "AudioSourceCompressed::GetAudioFrameWithInfo music " + "finished " + << ssrc_; + finish_callback_fired_ = true; + finish_callback_(callback_opaque_, ssrc_); + } + } else { + RTC_LOG(LS_INFO) + << "AudioSourceCompressed::GetAudioFrameWithInfo music " + "error " + << ssrc_ << ", code " << read; + fireErrorCallback(read); + } + return webrtc::AudioMixer::Source::AudioFrameInfo::kError; + } else if (read == 0) { + break; + } + samples_mixed_ += read / input_channel_num_ / sizeof(int16_t); + data_duration = 1000 * samples_mixed_ / sample_rate_; + } while (sync_fix_hurry && GetTimestamp() - start_time_ > data_duration); + + if (read_count > 1) { + RTC_LOG(LS_INFO) + << "AudioSourceCompressed::GetAudioFrameWithInfo consume too slow, " + "hurry up " + << read_count; + } + + preProduceFrame(audio_frame, remix_); + + if (waiting_mix_delay_frames_ > 0) { + checkInitWaitingMixQueue(); + + size_t frame_size = + report_output_samples_ * input_channel_num_ * sizeof(int16_t); + waiting_mix_->WriteBack(output_buffer, frame_size, nullptr); + waiting_mix_->ReadFront(output_buffer, frame_size, nullptr); + } + + return muted() ? webrtc::AudioMixer::Source::AudioFrameInfo::kMuted + : webrtc::AudioMixer::Source::AudioFrameInfo::kNormal; +} + +int32_t AudioSourceCompressed::input_sample_rate() { + return input_sample_rate_; +} + +int32_t AudioSourceCompressed::input_channel_num() { + return input_channel_num_; +} + +int32_t AudioSourceCompressed::Read(void** buffer) { + if (!input_buffer_) { + return kMixerErrInit; + } + + if (static_cast(buffer_.size() - buffer_pos_) >= + input_channel_num_ * real_output_samples_) { + int32_t read_size = + input_channel_num_ * real_output_samples_ * sizeof(int16_t); + memcpy(*buffer, buffer_.data() + buffer_pos_, + static_cast(read_size)); + buffer_pos_ += input_channel_num_ * real_output_samples_; + return read_size; + } + + int32_t consumed = decoder_->Consume(input_buffer_, once_decode_samples_); + if (consumed != + once_decode_samples_ * av_get_bytes_per_sample(input_format_) * + input_channel_num_) { + memset(*buffer, 0, + input_channel_num_ * real_output_samples_ * sizeof(int16_t)); + if (decoder_->eof()) { + return kMixerErrEof; + } else if (consumed < 0) { + return consumed; + } else { + return resampler_->CalcOutputSize(consumed); + } + } + + void* buf = buffer_.data(); + int32_t resampled = resampler_->Resample(input_buffer_, consumed, &buf); + if (resampled < 0) { + return resampled; + } + if (!first_frame_decoded_) { + first_frame_decoded_ = true; + memset(buf, 0, buffer_.size() * sizeof(int16_t)); + } + + buffer_pos_ = 0; + return Read(buffer); +} + +void AudioSourceCompressed::checkInitWaitingMixQueue() { + if (waiting_mix_) { + return; + } + waiting_mix_.reset(new rtc::BufferQueue(waiting_mix_delay_frames_ * 2, + report_output_samples_ * sizeof(int16_t))); + + size_t frame_size = + report_output_samples_ * input_channel_num_ * sizeof(int16_t); + int8_t* delay_buffer = new int8_t[frame_size]; + memset(delay_buffer, 0, frame_size); + for (int32_t i = 0; i < waiting_mix_delay_frames_; i++) { + waiting_mix_->WriteBack(delay_buffer, frame_size, nullptr); + } + delete[] delay_buffer; +} + +void AudioSourceCompressed::fireErrorCallback(int32_t code) { + if (!error_callback_fired_ && error_callback_) { + error_callback_fired_ = true; + error_callback_(callback_opaque_, ssrc_, code); + } +} +} diff --git a/modules/backing_track/audio_source_compressed.h b/modules/backing_track/audio_source_compressed.h new file mode 100644 index 0000000000..d204e269be --- /dev/null +++ b/modules/backing_track/audio_source_compressed.h @@ -0,0 +1,106 @@ +// +// Created by Piasy on 29/10/2017. +// + +#pragma once + +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/buffer_queue.h" + +#include "modules/backing_track/audio_source.h" +#include "modules/backing_track/audio_file_decoder.h" +#include "modules/backing_track/audio_resampler.h" + +namespace webrtc { + +class AudioSourceCompressed : public AudioSource { +public: + AudioSourceCompressed(int32_t ssrc, const std::string& filepath, + int32_t output_sample_rate, + int32_t output_channel_num, int32_t frame_duration_us, + float volume_left, float volume_right, bool enabled, + bool enable_sync_fix, bool remix, + int32_t waiting_mix_delay_frames, + SourceFinishCallback finish_callback, + SourceErrorCallback error_callback, + void* callback_opaque); + + ~AudioSourceCompressed() override; + + void ToggleEnable(bool enabled) override; + + bool StereoInput() override { return input_channel_num_ == 2; } + + int32_t FrameSize() override; + + int64_t GetProgressMs() override { + return decoder_ ? decoder_->consume_progress_ms() : -2; + } + + int64_t GetLengthMs() override { + return decoder_ ? decoder_->length_ms() : 0; + } + + void Seek(int64_t position_ms) override; + + AudioFrameInfo GetAudioFrameWithInfo( + int32_t sample_rate_hz, webrtc::AudioFrame* audio_frame) override; + + int32_t input_sample_rate(); + + int32_t input_channel_num(); + + /** + * @return > 0 for successfully read size + * AVERROR_EOF for end of file + * other value <= 0 for error + */ + int32_t Read(void** buffer); + + void UpdateFrameDurationUs(int32_t frame_duration_us) override { + AudioSource::UpdateFrameDurationUs(frame_duration_us); + real_output_samples_ = sample_rate_ * frame_duration_us / 1000 / 1000; + } + +private: + void checkInitWaitingMixQueue(); + void fireErrorCallback(int32_t code); + + int32_t input_sample_rate_; + int32_t input_channel_num_; + AVSampleFormat input_format_; + int32_t once_decode_samples_; + + int32_t report_output_samples_; + int32_t real_output_samples_; + + bool enable_sync_fix_; + int32_t sync_fix_threshold_ms_; + int32_t sync_fix_break_times_; + + bool remix_; + + void** input_buffer_; + + std::unique_ptr decoder_; + std::unique_ptr resampler_; + + rtc::BufferT buffer_; + int32_t buffer_pos_; + + std::unique_ptr waiting_mix_; + int32_t waiting_mix_delay_frames_; + + int64_t start_time_; + int64_t samples_mixed_; + bool first_frame_decoded_; + + SourceFinishCallback finish_callback_; + SourceErrorCallback error_callback_; + void* callback_opaque_; + bool finish_callback_fired_; + bool error_callback_fired_; +}; +} diff --git a/modules/backing_track/audio_source_pcm.cc b/modules/backing_track/audio_source_pcm.cc new file mode 100644 index 0000000000..cf9af6726f --- /dev/null +++ b/modules/backing_track/audio_source_pcm.cc @@ -0,0 +1,80 @@ +// +// Created by Piasy on 2018/5/28. +// + +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/backing_track/audio_source_pcm.h" +#include "modules/backing_track/audio_mixer_global.h" + +namespace webrtc { + +AudioSourcePcm::AudioSourcePcm(int32_t ssrc, int32_t sample_rate, + int32_t channel_num, int32_t frame_duration_us, + float volume, bool enabled) + : AudioSource(ssrc, sample_rate, channel_num, frame_duration_us, volume, + volume, enabled), + report_output_samples_(sample_rate * + webrtc::AudioMixerImpl::kFrameDurationInMs / 1000), + real_buffer_num_elements_(channel_num * sample_rate * frame_duration_us / + 1000 / 1000), + mv_buf_(nullptr), + mv_buf_size_(0) { + buffer_.Clear(); +} + +AudioSourcePcm::~AudioSourcePcm() { + if (mv_buf_) { + delete[] mv_buf_; + mv_buf_ = nullptr; + mv_buf_size_ = 0; + } +} + +int32_t AudioSourcePcm::FrameSize() { + return real_buffer_num_elements_ * sizeof(int16_t); +} + +void AudioSourcePcm::OnAudioRecorded(const void* data, int32_t size) { + buffer_.AppendData(static_cast(data), + static_cast(size / sizeof(int16_t))); +} + +webrtc::AudioMixer::Source::AudioFrameInfo +AudioSourcePcm::GetAudioFrameWithInfo(int32_t sample_rate_hz, + webrtc::AudioFrame* audio_frame) { + if (sample_rate_hz != sample_rate_) { + return webrtc::AudioMixer::Source::AudioFrameInfo::kError; + } + if (!enabled() || + static_cast(buffer_.size()) < real_buffer_num_elements_) { + return webrtc::AudioMixer::Source::AudioFrameInfo::kMuted; + } + + audio_frame->UpdateFrame( + 0, buffer_.data(), static_cast(report_output_samples_), + sample_rate_, webrtc::AudioFrame::SpeechType::kNormalSpeech, + webrtc::AudioFrame::VADActivity::kVadActive, + static_cast(channel_num_)); + + if (real_buffer_num_elements_ < static_cast(buffer_.size())) { + if (mv_buf_size_ < buffer_.size() - real_buffer_num_elements_) { + mv_buf_size_ = (buffer_.size() - real_buffer_num_elements_) * 2; + if (mv_buf_) { + delete[] mv_buf_; + } + mv_buf_ = new int16_t[mv_buf_size_]; + } + memset(mv_buf_, 0, mv_buf_size_ * sizeof(int16_t)); + memcpy(mv_buf_, buffer_.data() + real_buffer_num_elements_, + (buffer_.size() - real_buffer_num_elements_) * sizeof(int16_t)); + memcpy(buffer_.data(), mv_buf_, + (buffer_.size() - real_buffer_num_elements_) * sizeof(int16_t)); + } + buffer_.SetSize(buffer_.size() - real_buffer_num_elements_); + + preProduceFrame(audio_frame, false); + + return muted() ? webrtc::AudioMixer::Source::AudioFrameInfo::kMuted + : webrtc::AudioMixer::Source::AudioFrameInfo::kNormal; +} +} diff --git a/modules/backing_track/audio_source_pcm.h b/modules/backing_track/audio_source_pcm.h new file mode 100644 index 0000000000..8c484aa2d3 --- /dev/null +++ b/modules/backing_track/audio_source_pcm.h @@ -0,0 +1,42 @@ +// +// Created by Piasy on 2018/5/28. +// + +#pragma once + +#include "rtc_base/buffer.h" + +#include "modules/backing_track/audio_source.h" + +namespace webrtc { + +class AudioSourcePcm : public AudioSource { +public: + AudioSourcePcm(int32_t ssrc, int32_t sample_rate, int32_t channel_num, + int32_t frame_duration_us, float volume, bool enabled); + + ~AudioSourcePcm() override; + + int32_t FrameSize() override; + + void OnAudioRecorded(const void* data, int32_t size); + + AudioFrameInfo GetAudioFrameWithInfo( + int32_t sample_rate_hz, webrtc::AudioFrame* audio_frame) override; + + void UpdateFrameDurationUs(int32_t frame_duration_us) override { + AudioSource::UpdateFrameDurationUs(frame_duration_us); + real_buffer_num_elements_ = + channel_num_ * sample_rate_ * frame_duration_us / 1000 / 1000; + } + +private: + int32_t report_output_samples_; + int32_t real_buffer_num_elements_; + + rtc::BufferT buffer_; + + int16_t* mv_buf_; + size_t mv_buf_size_; +}; +} diff --git a/modules/backing_track/avx_helper.h b/modules/backing_track/avx_helper.h new file mode 100644 index 0000000000..27c0e8e02a --- /dev/null +++ b/modules/backing_track/avx_helper.h @@ -0,0 +1,72 @@ +// +// Created by Piasy on 08/11/2017. +// + +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include +#include +#include +#include +#include + +#ifdef __cplusplus +} +#endif + +namespace webrtc { + +struct AVFormatContextDeleter { + void operator()(AVFormatContext* context) { + if (context) { + avformat_close_input(&context); + } + } +}; + +struct AVCodecContextDeleter { + void operator()(AVCodecContext* context) { + if (context) { + avcodec_free_context(&context); + } + } +}; + +struct AVFrameDeleter { + void operator()(AVFrame* frame) { + if (frame) { + av_frame_free(&frame); + } + } +}; + +struct AVPacketDeleter { + void operator()(AVPacket* packet) { + if (packet) { + av_packet_free(&packet); + } + } +}; + +struct SwrContextDeleter { + void operator()(SwrContext* swrContext) { + if (swrContext) { + swr_free(&swrContext); + } + } +}; + +struct AVAudioFifoDeleter { + void operator()(AVAudioFifo* fifo) { + if (fifo) { + av_audio_fifo_free(fifo); + } + } +}; +} diff --git a/modules/backing_track/bt_audio_mixer.cc b/modules/backing_track/bt_audio_mixer.cc new file mode 100644 index 0000000000..5fc356a0aa --- /dev/null +++ b/modules/backing_track/bt_audio_mixer.cc @@ -0,0 +1,248 @@ +// +// Created by Piasy on 29/10/2017. +// + +#include "audio/audio_transport_impl.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/backing_track/audio_mixer_global.h" +#include "modules/backing_track/audio_source_compressed.h" +#include "modules/backing_track/bt_audio_mixer.h" +#include "modules/backing_track/mixer_source.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +BtAudioMixer::BtAudioMixer(const MixerConfig& config, + SourceFinishCallback finish_callback, + SourceErrorCallback error_callback, + void* callback_opaque) + : mixer_(webrtc::AudioMixerImpl::Create()), + record_source_(nullptr), + mixed_frame_(absl::make_unique()), + output_sample_rate_(config.output_sample_rate), + output_channel_num_(config.output_channel_num), + enable_music_sync_fix_(config.enable_music_sync_fix), + frame_duration_us_(config.frame_duration_us), + report_output_samples_(output_sample_rate_ * + webrtc::AudioMixerImpl::kFrameDurationInMs / 1000), + real_output_samples_(output_sample_rate_ * frame_duration_us_ / 1000 / + 1000), + waiting_mix_delay_frames_(config.waiting_mix_delay_frames), + finish_callback_(finish_callback), + error_callback_(error_callback), + callback_opaque_(callback_opaque) { + RTC_LOG(LS_INFO) << "AudioMixer create: frame_duration_us " + << config.frame_duration_us; + + for (auto& source : config.sources) { + DoAddSource(source); + } + for (const auto& item : sources_) { + mixer_->AddSource(item.second.get()); + } + + mixed_frame_->UpdateFrame( + 0, nullptr, static_cast(report_output_samples_), + output_sample_rate_, webrtc::AudioFrame::SpeechType::kUndefined, + webrtc::AudioFrame::VADActivity::kVadUnknown, + static_cast(output_channel_num_)); +} + +BtAudioMixer::~BtAudioMixer() { + for (const auto& item : sources_) { + mixer_->RemoveSource(item.second.get()); + } + + sources_.clear(); +} + +void BtAudioMixer::UpdateVolume(int32_t ssrc, float volume_left, + float volume_right) { + RTC_LOG(LS_INFO) << "BtAudioMixer::UpdateVolume " << ssrc << " " + << volume_left << " " << volume_right; + auto source = sources_.find(ssrc); + if (source != sources_.end()) { + source->second->UpdateVolume(volume_left, volume_right); + } +} + +void BtAudioMixer::ToggleEnable(int32_t ssrc, bool enable) { + RTC_LOG(LS_INFO) << "BtAudioMixer::ToggleEnable " << ssrc << " " << enable; + auto source = sources_.find(ssrc); + if (source != sources_.end()) { + source->second->ToggleEnable(enable); + } +} + +void BtAudioMixer::ToggleStreaming(int32_t ssrc, bool streaming) { + RTC_LOG(LS_INFO) << "BtAudioMixer::ToggleStreaming " << ssrc << " " + << streaming; + std::shared_ptr music = GetSource(ssrc); + if (music) { + // mute controls streaming, not playback (send to pcm channel, thus adb) + music->ToggleMute(!streaming); + } +} + +void BtAudioMixer::TogglePlayback(int32_t ssrc, bool playback) { + RTC_LOG(LS_INFO) << "BtAudioMixer::TogglePlayback " << ssrc << " " + << playback; + std::shared_ptr source = GetSource(ssrc); + if (source) { + PcmChannel* pcm_channel = source->GetPcmChannel(); + if (pcm_channel) { + // mix controls playback, not streaming + pcm_channel->ToggleMix(playback); + } + } +} + +int64_t BtAudioMixer::GetProgressMs(int32_t ssrc) { + std::shared_ptr source = GetSource(ssrc); + + if (source) { + return source->GetProgressMs(); + } + + return -2; +} + +int64_t BtAudioMixer::GetLengthMs(int32_t ssrc) { + std::shared_ptr source = GetSource(ssrc); + + if (source) { + return source->GetLengthMs(); + } + + return 0; +} + +void BtAudioMixer::Seek(int32_t ssrc, int64_t position_ms) { + std::shared_ptr source = GetSource(ssrc); + if (source) { + source->ToggleMute(true); + source->Seek(position_ms); + source->ToggleMute(false); + } +} + +void BtAudioMixer::UpdateFrameDuration(int32_t frame_duration_us) { + RTC_LOG(LS_INFO) << "BtAudioMixer::UpdateFrameDuration " + << frame_duration_us; + + frame_duration_us_ = frame_duration_us; + real_output_samples_ = + output_sample_rate_ * frame_duration_us_ / 1000 / 1000; + + for (const auto& item : sources_) { + item.second->UpdateFrameDurationUs(frame_duration_us); + } +} + +void BtAudioMixer::AddRawSource(AudioSource* source) { + if (source) { + std::shared_ptr ptr(source); + sources_.emplace(std::make_pair(source->Ssrc(), ptr)); + mixer_->AddSource(source); + } +} + +std::shared_ptr BtAudioMixer::GetSource(int32_t ssrc) { + auto source = sources_.find(ssrc); + return source == sources_.end() ? nullptr : source->second; +} + +int32_t BtAudioMixer::Mix(void* output_buffer) { + mixer_->Mix(static_cast(output_channel_num_), mixed_frame_.get()); + + int32_t size = real_output_samples_ * + av_get_bytes_per_sample(kOutputSampleFormat) * + output_channel_num_; + memcpy(output_buffer, reinterpret_cast(mixed_frame_->data()), + static_cast(size)); + return size; +} + +int32_t BtAudioMixer::AddRecordedDataAndMix(const void* data, int32_t size, + void* output_buffer) { + if (record_source_) { + record_source_->OnAudioRecorded(data, size); + } + + return Mix(output_buffer); +} + +std::shared_ptr BtAudioMixer::DoAddSource( + const MixerSource& source) { + AudioTransportImpl* audio_transport = nullptr; + AudioDeviceBuffer* adb = AudioDeviceBuffer::Instance(); + if (adb) { + audio_transport = + reinterpret_cast(adb->audio_transport()); + } + + if (source.type == MixerSource::TYPE_RECORD) { + if (record_source_) { + RTC_LOG(LS_ERROR) << "BtAudioMixer::DoAddSource error: only one " + "record source is supported"; + return nullptr; + } + if (source.sample_rate != output_sample_rate_ || + source.channel_num != output_channel_num_) { + RTC_LOG(LS_ERROR) << "BtAudioMixer::DoAddSource error: " + "bad setting, sr " + << source.sample_rate << " ac " + << source.channel_num << ", output sr " + << output_sample_rate_ << " ac " + << output_channel_num_; + return nullptr; + } + + record_source_.reset(new AudioSourcePcm( + source.ssrc, output_sample_rate_, output_channel_num_, + frame_duration_us_, source.volume_left, + true /* mic should be enabled when create */ + )); + + if (audio_transport) { + PcmChannel* pcm_channel = new PcmChannel( + record_source_->sample_rate(), record_source_->channel_num(), + record_source_->frame_duration_us()); + RTC_LOG(LS_INFO) << "AudioMixerCreate rec_src " + << record_source_->Ssrc() << ", channel " + << static_cast(pcm_channel); + audio_transport->AddPlaybackSource(pcm_channel); + record_source_->SetPcmChannel(pcm_channel); + } + + sources_.emplace(std::make_pair(source.ssrc, record_source_)); + + return record_source_; + } else { + std::shared_ptr file_source = + std::make_shared( + source.ssrc, source.path, output_sample_rate_, + output_channel_num_, frame_duration_us_, source.volume_left, + source.volume_right, false /* disable when create */, + enable_music_sync_fix_, source.remix, waiting_mix_delay_frames_, + finish_callback_, error_callback_, callback_opaque_); + + if (audio_transport) { + PcmChannel* pcm_channel = new PcmChannel( + file_source->sample_rate(), file_source->input_channel_num(), + file_source->frame_duration_us()); + RTC_LOG(LS_INFO) << "AudioMixerCreate music_src " + << file_source->Ssrc() << ", channel " + << static_cast(pcm_channel); + audio_transport->AddPlaybackSource(pcm_channel); + file_source->SetPcmChannel(pcm_channel); + } + + sources_.emplace(std::make_pair(source.ssrc, file_source)); + + return file_source; + } +} + +} diff --git a/modules/backing_track/bt_audio_mixer.h b/modules/backing_track/bt_audio_mixer.h new file mode 100644 index 0000000000..2239f1415a --- /dev/null +++ b/modules/backing_track/bt_audio_mixer.h @@ -0,0 +1,76 @@ +// +// Created by Piasy on 29/10/2017. +// + +#pragma once + +#include + +#include "api/audio/audio_mixer.h" +#include "api/scoped_refptr.h" + +#include "modules/backing_track/audio_mixer_global.h" +#include "modules/backing_track/audio_source.h" +#include "modules/backing_track/audio_source_pcm.h" +#include "modules/backing_track/mixer_config.h" + +namespace webrtc { + +class BtAudioMixer { +public: + BtAudioMixer(const MixerConfig& config, + SourceFinishCallback finish_callback, + SourceErrorCallback error_callback, void* callback_opaque); + + ~BtAudioMixer(); + + void UpdateVolume(int32_t ssrc, float volume_left, float volume_right); + + void ToggleEnable(int32_t ssrc, bool enable); + + void ToggleStreaming(int32_t ssrc, bool streaming); + + void TogglePlayback(int32_t ssrc, bool playback); + + int64_t GetProgressMs(int32_t ssrc); + + int64_t GetLengthMs(int32_t ssrc); + + void Seek(int32_t ssrc, int64_t position_ms); + + void UpdateFrameDuration(int32_t frame_duration_us); + + void AddRawSource(AudioSource* source); + + std::shared_ptr GetSource(int32_t ssrc); + + int32_t Mix(void* output_buffer); + + int32_t AddRecordedDataAndMix(const void* data, int32_t size, + void* output_buffer); + + int32_t frame_duration_us() { return frame_duration_us_; } + + bool enable_music_sync_fix() { return enable_music_sync_fix_; } + +private: + std::shared_ptr DoAddSource(const MixerSource& source); + + rtc::scoped_refptr mixer_; + std::map> sources_; + std::shared_ptr record_source_; + std::unique_ptr mixed_frame_; + int32_t output_sample_rate_; + int32_t output_channel_num_; + + bool enable_music_sync_fix_; + int32_t frame_duration_us_; + int32_t report_output_samples_; + int32_t real_output_samples_; + int32_t waiting_mix_delay_frames_; + + SourceFinishCallback finish_callback_; + SourceErrorCallback error_callback_; + void* callback_opaque_; +}; +} diff --git a/modules/backing_track/mixer_config.h b/modules/backing_track/mixer_config.h new file mode 100644 index 0000000000..95c9c4a1db --- /dev/null +++ b/modules/backing_track/mixer_config.h @@ -0,0 +1,36 @@ +// AUTOGENERATED FILE - DO NOT MODIFY! +// This file generated by Djinni from audio_mixer.djinni + +#pragma once + +#include "modules/backing_track/mixer_source.h" +#include +#include +#include + +namespace webrtc { + +struct MixerConfig final { + std::vector sources; + int32_t output_sample_rate; + int32_t output_channel_num; + int32_t frame_duration_us; + bool enable_music_sync_fix; + int32_t waiting_mix_delay_frames; + + MixerConfig(std::vector sources_, + int32_t output_sample_rate_, + int32_t output_channel_num_, + int32_t frame_duration_us_, + bool enable_music_sync_fix_, + int32_t waiting_mix_delay_frames_) + : sources(std::move(sources_)) + , output_sample_rate(std::move(output_sample_rate_)) + , output_channel_num(std::move(output_channel_num_)) + , frame_duration_us(std::move(frame_duration_us_)) + , enable_music_sync_fix(std::move(enable_music_sync_fix_)) + , waiting_mix_delay_frames(std::move(waiting_mix_delay_frames_)) + {} +}; + +} // namespace audio_mixer diff --git a/modules/backing_track/mixer_source.cc b/modules/backing_track/mixer_source.cc new file mode 100644 index 0000000000..630c28bfee --- /dev/null +++ b/modules/backing_track/mixer_source.cc @@ -0,0 +1,12 @@ +// AUTOGENERATED FILE - DO NOT MODIFY! +// This file generated by Djinni from audio_mixer.djinni + +#include "modules/backing_track/mixer_source.h" // my header + +namespace webrtc { + +int32_t constexpr MixerSource::TYPE_FILE; + +int32_t constexpr MixerSource::TYPE_RECORD; + +} // namespace audio_mixer diff --git a/modules/backing_track/mixer_source.h b/modules/backing_track/mixer_source.h new file mode 100644 index 0000000000..12fcbe5f6e --- /dev/null +++ b/modules/backing_track/mixer_source.h @@ -0,0 +1,55 @@ +// AUTOGENERATED FILE - DO NOT MODIFY! +// This file generated by Djinni from audio_mixer.djinni + +#pragma once + +#include +#include +#include + +namespace webrtc { + +struct MixerSource final { + + static constexpr int32_t TYPE_FILE = 1; + + static constexpr int32_t TYPE_RECORD = 2; + + int32_t type; + int32_t ssrc; + float volume_left; + float volume_right; + bool enabled; + bool streaming; + bool playback; + bool remix; + std::string path; + int32_t sample_rate; + int32_t channel_num; + + MixerSource(int32_t type_, + int32_t ssrc_, + float volume_left_, + float volume_right_, + bool enabled_, + bool streaming_, + bool playback_, + bool remix_, + std::string path_, + int32_t sample_rate_, + int32_t channel_num_) + : type(std::move(type_)) + , ssrc(std::move(ssrc_)) + , volume_left(std::move(volume_left_)) + , volume_right(std::move(volume_right_)) + , enabled(std::move(enabled_)) + , streaming(std::move(streaming_)) + , playback(std::move(playback_)) + , remix(std::move(remix_)) + , path(std::move(path_)) + , sample_rate(std::move(sample_rate_)) + , channel_num(std::move(channel_num_)) + {} +}; + +} // namespace audio_mixer diff --git a/modules/backing_track/pcm_channel.cc b/modules/backing_track/pcm_channel.cc new file mode 100644 index 0000000000..3253434d89 --- /dev/null +++ b/modules/backing_track/pcm_channel.cc @@ -0,0 +1,109 @@ +// +// Created by Piasy on 29/10/2017. +// + +#include "modules/audio_mixer/audio_mixer_impl.h" +#include "modules/backing_track/pcm_channel.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +PcmChannel::PcmChannel(int32_t sample_rate, int32_t channel_num, + int32_t frame_duration_us) + : ssrc_(0), + sample_rate_(sample_rate), + channel_num_(channel_num), + report_output_samples_(sample_rate * + webrtc::AudioMixerImpl::kFrameDurationInMs / 1000), + real_buffer_num_elements_(channel_num * sample_rate * frame_duration_us / + 1000 / 1000), + enabled_(false), + mv_buf_(nullptr), + mv_buf_size_(0) {} + +PcmChannel::~PcmChannel() { + if (mv_buf_) { + delete[] mv_buf_; + mv_buf_ = nullptr; + mv_buf_size_ = 0; + } +} + +void PcmChannel::FeedData(const void* data, int32_t size) { + if (channel_num_ <= 0 || sample_rate_ <= 0) { + return; + } + + MutexLock lock(&mutex_); + + if (enabled_.load()) { + buffer_.AppendData(static_cast(data), + size / sizeof(int16_t)); + } else { + buffer_.Clear(); + } +} + +webrtc::AudioMixer::Source::AudioFrameInfo PcmChannel::GetAudioFrameWithInfo( + int32_t sample_rate_hz, webrtc::AudioFrame* audio_frame) { + if (channel_num_ <= 0 || sample_rate_ <= 0) { + return webrtc::AudioMixer::Source::AudioFrameInfo::kError; + } + + MutexLock lock(&mutex_); + + int32_t real_buffer_num_elements = real_buffer_num_elements_.load(); + if (!enabled_.load() || + static_cast(buffer_.size()) < real_buffer_num_elements) { + return webrtc::AudioMixer::Source::AudioFrameInfo::kMuted; + } + + audio_frame->UpdateFrame( + 0, nullptr, static_cast(report_output_samples_), sample_rate_, + webrtc::AudioFrame::SpeechType::kNormalSpeech, + webrtc::AudioFrame::VADActivity::kVadActive, + static_cast(channel_num_)); + memcpy(audio_frame->mutable_data(), buffer_.data(), + real_buffer_num_elements * sizeof(int16_t)); + + if (real_buffer_num_elements < static_cast(buffer_.size())) { + if (mv_buf_size_ < buffer_.size() - real_buffer_num_elements) { + // alloc double size + mv_buf_size_ = (buffer_.size() - real_buffer_num_elements) * 2; + if (mv_buf_) { + delete[] mv_buf_; + } + mv_buf_ = new int16_t[mv_buf_size_]; + } + memset(mv_buf_, 0, mv_buf_size_ * sizeof(int16_t)); + memcpy(mv_buf_, buffer_.data() + real_buffer_num_elements, + (buffer_.size() - real_buffer_num_elements) * sizeof(int16_t)); + memcpy(buffer_.data(), mv_buf_, + (buffer_.size() - real_buffer_num_elements) * sizeof(int16_t)); + } + buffer_.SetSize(buffer_.size() - real_buffer_num_elements); + + return webrtc::AudioMixer::Source::AudioFrameInfo::kNormal; +} + +int32_t PcmChannel::Ssrc() const { return ssrc_; } + +int32_t PcmChannel::PreferredSampleRate() const { return sample_rate_; } + +void PcmChannel::ToggleMix(bool enable) { + RTC_LOG(LS_INFO) << "PcmChannel(" << static_cast(this) + << ") ToggleMix " << enable; + enabled_ = enable; +} + +void PcmChannel::SetFrameDurationUs(int32_t frame_duration_us) { + if (channel_num_ <= 0 || sample_rate_ <= 0) { + return; + } + + RTC_LOG(LS_INFO) << "SetFrameDurationUs(" << static_cast(this) + << ") SetFrameDurationUs " << frame_duration_us; + real_buffer_num_elements_ = + channel_num_ * sample_rate_ * frame_duration_us / 1000 / 1000; +} +} diff --git a/modules/backing_track/pcm_channel.h b/modules/backing_track/pcm_channel.h new file mode 100644 index 0000000000..d2e0aa570b --- /dev/null +++ b/modules/backing_track/pcm_channel.h @@ -0,0 +1,49 @@ +// +// Created by Piasy on 08/11/2017. +// + +#pragma once + +#include "api/audio/audio_mixer.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +class PcmChannel : public AudioMixer::Source { +public: + PcmChannel(int32_t sample_rate, int32_t channel_num, int32_t frame_duration_us); + + ~PcmChannel() override; + + void FeedData(const void* data, int32_t size); + + AudioFrameInfo + GetAudioFrameWithInfo(int32_t sample_rate_hz, webrtc::AudioFrame* audio_frame) override; + + int32_t Ssrc() const override; + + int32_t PreferredSampleRate() const override; + + void ToggleMix(bool enable); + + void SetFrameDurationUs(int32_t frame_duration_us); + +private: + int32_t ssrc_; + + int32_t sample_rate_; + int32_t channel_num_; + + int32_t report_output_samples_; + std::atomic_int_least32_t real_buffer_num_elements_; + + mutable Mutex mutex_; + std::atomic_bool enabled_; + rtc::BufferT buffer_; + + int16_t* mv_buf_; + size_t mv_buf_size_; +}; + +} diff --git a/modules/congestion_controller/BUILD.gn b/modules/congestion_controller/BUILD.gn index f65cde43c3..231ff5e0dd 100644 --- a/modules/congestion_controller/BUILD.gn +++ b/modules/congestion_controller/BUILD.gn @@ -28,6 +28,7 @@ rtc_library("congestion_controller") { "..:module_api", "../../api/transport:field_trial_based_config", "../../api/transport:network_control", + "../../rtc_base/synchronization:mutex", "../pacing", "../remote_bitrate_estimator", "../rtp_rtcp:rtp_rtcp_format", @@ -49,7 +50,6 @@ if (rtc_include_tests) { "../../test:test_support", "../../test/scenario", "../pacing", - "bbr:bbr_unittests", "goog_cc:estimators", "goog_cc:goog_cc_unittests", "pcc:pcc_unittests", diff --git a/modules/congestion_controller/OWNERS b/modules/congestion_controller/OWNERS index b6b2f85812..3304c672cb 100644 --- a/modules/congestion_controller/OWNERS +++ b/modules/congestion_controller/OWNERS @@ -5,8 +5,3 @@ crodbro@webrtc.org philipel@webrtc.org mflodman@webrtc.org yinwa@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/congestion_controller/bbr/BUILD.gn b/modules/congestion_controller/bbr/BUILD.gn deleted file mode 100644 index bc9d78f334..0000000000 --- a/modules/congestion_controller/bbr/BUILD.gn +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") - -rtc_library("bbr") { - sources = [ - "bbr_factory.cc", - "bbr_factory.h", - ] - deps = [ - ":bbr_controller", - "../../../api/transport:network_control", - "../../../api/units:time_delta", - "../../../rtc_base:rtc_base_approved", - ] -} - -rtc_library("bbr_controller") { - visibility = [ ":*" ] - sources = [ - "bbr_network_controller.cc", - "bbr_network_controller.h", - ] - deps = [ - ":bandwidth_sampler", - ":loss_rate_filter", - ":rtt_stats", - ":windowed_filter", - "../../../api/transport:network_control", - "../../../rtc_base:checks", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base/experiments:field_trial_parser", - "../../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("bandwidth_sampler") { - visibility = [ ":*" ] - sources = [ - "bandwidth_sampler.cc", - "bandwidth_sampler.h", - ] - deps = [ - ":packet_number_indexed_queue", - "../../../api/units:data_rate", - "../../../api/units:data_size", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("data_transfer_tracker") { - visibility = [ ":*" ] - sources = [ - "data_transfer_tracker.cc", - "data_transfer_tracker.h", - ] - deps = [ - "../../../api/units:data_size", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:rtc_base_approved", - ] -} - -rtc_source_set("packet_number_indexed_queue") { - visibility = [ ":*" ] - sources = [ "packet_number_indexed_queue.h" ] - deps = [ "../../../rtc_base:checks" ] -} - -rtc_library("loss_rate_filter") { - visibility = [ ":*" ] - sources = [ - "loss_rate_filter.cc", - "loss_rate_filter.h", - ] - deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} -rtc_library("rtt_stats") { - visibility = [ ":*" ] - sources = [ - "rtt_stats.cc", - "rtt_stats.h", - ] - deps = [ - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../rtc_base:checks", - "../../../rtc_base:rtc_base_approved", - ] -} -rtc_source_set("windowed_filter") { - visibility = [ ":*" ] - sources = [ "windowed_filter.h" ] -} -if (rtc_include_tests) { - rtc_library("bbr_unittests") { - testonly = true - sources = [ - "bandwidth_sampler_unittest.cc", - "bbr_network_controller_unittest.cc", - "data_transfer_tracker_unittest.cc", - "loss_rate_filter_unittest.cc", - "packet_number_indexed_queue_unittest.cc", - "rtt_stats_unittest.cc", - "windowed_filter_unittest.cc", - ] - deps = [ - ":bandwidth_sampler", - ":bbr", - ":bbr_controller", - ":data_transfer_tracker", - ":loss_rate_filter", - ":packet_number_indexed_queue", - ":rtt_stats", - ":windowed_filter", - "../../../api/units:data_rate", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../rtc_base:logging", - "../../../test:test_support", - "../../../test/scenario", - ] - } -} diff --git a/modules/congestion_controller/bbr/bandwidth_sampler.cc b/modules/congestion_controller/bbr/bandwidth_sampler.cc deleted file mode 100644 index f61e1401e8..0000000000 --- a/modules/congestion_controller/bbr/bandwidth_sampler.cc +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -// Based on the Quic implementation in Chromium. - -#include "modules/congestion_controller/bbr/bandwidth_sampler.h" - -#include - -#include "rtc_base/logging.h" - -namespace webrtc { -namespace bbr { -namespace { -constexpr int64_t kMaxTrackedPackets = 10000; -} - -BandwidthSampler::BandwidthSampler() - : total_data_sent_(DataSize::Zero()), - total_data_acked_(DataSize::Zero()), - total_data_sent_at_last_acked_packet_(DataSize::Zero()), - last_acked_packet_sent_time_(), - last_acked_packet_ack_time_(), - last_sent_packet_(0), - is_app_limited_(false), - end_of_app_limited_phase_(0), - connection_state_map_() {} - -BandwidthSampler::~BandwidthSampler() {} - -void BandwidthSampler::OnPacketSent(Timestamp sent_time, - int64_t packet_number, - DataSize data_size, - DataSize data_in_flight) { - last_sent_packet_ = packet_number; - - total_data_sent_ += data_size; - - // If there are no packets in flight, the time at which the new transmission - // opens can be treated as the A_0 point for the purpose of bandwidth - // sampling. This underestimates bandwidth to some extent, and produces some - // artificially low samples for most packets in flight, but it provides with - // samples at important points where we would not have them otherwise, most - // importantly at the beginning of the connection. - if (data_in_flight.IsZero()) { - last_acked_packet_ack_time_ = sent_time; - total_data_sent_at_last_acked_packet_ = total_data_sent_; - - // In this situation ack compression is not a concern, set send rate to - // effectively infinite. - last_acked_packet_sent_time_ = sent_time; - } - - if (!connection_state_map_.IsEmpty() && - packet_number > - connection_state_map_.last_packet() + kMaxTrackedPackets) { - RTC_LOG(LS_WARNING) - << "BandwidthSampler in-flight packet map has exceeded maximum " - "number " - "of tracked packets."; - } - - bool success = - connection_state_map_.Emplace(packet_number, sent_time, data_size, *this); - if (!success) - RTC_LOG(LS_WARNING) << "BandwidthSampler failed to insert the packet " - "into the map, most likely because it's already " - "in it."; -} - -BandwidthSample BandwidthSampler::OnPacketAcknowledged(Timestamp ack_time, - int64_t packet_number) { - ConnectionStateOnSentPacket* sent_packet_pointer = - connection_state_map_.GetEntry(packet_number); - if (sent_packet_pointer == nullptr) { - return BandwidthSample(); - } - BandwidthSample sample = - OnPacketAcknowledgedInner(ack_time, packet_number, *sent_packet_pointer); - connection_state_map_.Remove(packet_number); - return sample; -} - -BandwidthSample BandwidthSampler::OnPacketAcknowledgedInner( - Timestamp ack_time, - int64_t packet_number, - const ConnectionStateOnSentPacket& sent_packet) { - total_data_acked_ += sent_packet.size; - total_data_sent_at_last_acked_packet_ = sent_packet.total_data_sent; - last_acked_packet_sent_time_ = sent_packet.sent_time; - last_acked_packet_ack_time_ = ack_time; - - // Exit app-limited phase once a packet that was sent while the connection is - // not app-limited is acknowledged. - if (is_app_limited_ && packet_number > end_of_app_limited_phase_) { - is_app_limited_ = false; - } - - // There might have been no packets acknowledged at the moment when the - // current packet was sent. In that case, there is no bandwidth sample to - // make. - if (!sent_packet.last_acked_packet_sent_time || - !sent_packet.last_acked_packet_ack_time) { - return BandwidthSample(); - } - - // Infinite rate indicates that the sampler is supposed to discard the - // current send rate sample and use only the ack rate. - DataRate send_rate = DataRate::Infinity(); - if (sent_packet.sent_time > *sent_packet.last_acked_packet_sent_time) { - DataSize sent_delta = sent_packet.total_data_sent - - sent_packet.total_data_sent_at_last_acked_packet; - TimeDelta time_delta = - sent_packet.sent_time - *sent_packet.last_acked_packet_sent_time; - send_rate = sent_delta / time_delta; - } - - // During the slope calculation, ensure that ack time of the current packet is - // always larger than the time of the previous packet, otherwise division by - // zero or integer underflow can occur. - if (ack_time <= *sent_packet.last_acked_packet_ack_time) { - RTC_LOG(LS_WARNING) - << "Time of the previously acked packet is larger than the time " - "of the current packet."; - return BandwidthSample(); - } - DataSize ack_delta = - total_data_acked_ - sent_packet.total_data_acked_at_the_last_acked_packet; - TimeDelta time_delta = ack_time - *sent_packet.last_acked_packet_ack_time; - DataRate ack_rate = ack_delta / time_delta; - - BandwidthSample sample; - sample.bandwidth = std::min(send_rate, ack_rate); - // Note: this sample does not account for delayed acknowledgement time. This - // means that the RTT measurements here can be artificially high, especially - // on low bandwidth connections. - sample.rtt = ack_time - sent_packet.sent_time; - // A sample is app-limited if the packet was sent during the app-limited - // phase. - sample.is_app_limited = sent_packet.is_app_limited; - return sample; -} - -void BandwidthSampler::OnPacketLost(int64_t packet_number) { - connection_state_map_.Remove(packet_number); -} - -void BandwidthSampler::OnAppLimited() { - is_app_limited_ = true; - end_of_app_limited_phase_ = last_sent_packet_; -} - -void BandwidthSampler::RemoveObsoletePackets(int64_t least_unacked) { - while (!connection_state_map_.IsEmpty() && - connection_state_map_.first_packet() < least_unacked) { - connection_state_map_.Remove(connection_state_map_.first_packet()); - } -} - -DataSize BandwidthSampler::total_data_acked() const { - return total_data_acked_; -} - -bool BandwidthSampler::is_app_limited() const { - return is_app_limited_; -} - -int64_t BandwidthSampler::end_of_app_limited_phase() const { - return end_of_app_limited_phase_; -} - -BandwidthSampler::ConnectionStateOnSentPacket::ConnectionStateOnSentPacket( - Timestamp sent_time, - DataSize size, - const BandwidthSampler& sampler) - : sent_time(sent_time), - size(size), - total_data_sent(sampler.total_data_sent_), - total_data_sent_at_last_acked_packet( - sampler.total_data_sent_at_last_acked_packet_), - last_acked_packet_sent_time(sampler.last_acked_packet_sent_time_), - last_acked_packet_ack_time(sampler.last_acked_packet_ack_time_), - total_data_acked_at_the_last_acked_packet(sampler.total_data_acked_), - is_app_limited(sampler.is_app_limited_) {} - -BandwidthSampler::ConnectionStateOnSentPacket::ConnectionStateOnSentPacket() - : sent_time(Timestamp::MinusInfinity()), - size(DataSize::Zero()), - total_data_sent(DataSize::Zero()), - total_data_sent_at_last_acked_packet(DataSize::Zero()), - last_acked_packet_sent_time(), - last_acked_packet_ack_time(), - total_data_acked_at_the_last_acked_packet(DataSize::Zero()), - is_app_limited(false) {} - -BandwidthSampler::ConnectionStateOnSentPacket::~ConnectionStateOnSentPacket() {} - -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/bandwidth_sampler.h b/modules/congestion_controller/bbr/bandwidth_sampler.h deleted file mode 100644 index 7e0a44e61d..0000000000 --- a/modules/congestion_controller/bbr/bandwidth_sampler.h +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -// Based on the Quic implementation in Chromium. - -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_BANDWIDTH_SAMPLER_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_BANDWIDTH_SAMPLER_H_ - -#include "absl/types/optional.h" -#include "api/units/data_rate.h" -#include "api/units/data_size.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "modules/congestion_controller/bbr/packet_number_indexed_queue.h" - -namespace webrtc { -namespace bbr { - -namespace test { -class BandwidthSamplerPeer; -} // namespace test - -struct BandwidthSample { - // The bandwidth at that particular sample. Zero if no valid bandwidth sample - // is available. - DataRate bandwidth; - - // The RTT measurement at this particular sample. Zero if no RTT sample is - // available. Does not correct for delayed ack time. - TimeDelta rtt; - - // Indicates whether the sample might be artificially low because the sender - // did not have enough data to send in order to saturate the link. - bool is_app_limited; - - BandwidthSample() - : bandwidth(DataRate::Zero()), - rtt(TimeDelta::Zero()), - is_app_limited(false) {} -}; - -// BandwidthSampler keeps track of sent and acknowledged packets and outputs a -// bandwidth sample for every packet acknowledged. The samples are taken for -// individual packets, and are not filtered; the consumer has to filter the -// bandwidth samples itself. In certain cases, the sampler will locally severely -// underestimate the bandwidth, hence a maximum filter with a size of at least -// one RTT is recommended. -// -// This class bases its samples on the slope of two curves: the number of -// data_size sent over time, and the number of data_size acknowledged as -// received over time. It produces a sample of both slopes for every packet that -// gets acknowledged, based on a slope between two points on each of the -// corresponding curves. Note that due to the packet loss, the number of -// data_size on each curve might get further and further away from each other, -// meaning that it is not feasible to compare byte values coming from different -// curves with each other. -// -// The obvious points for measuring slope sample are the ones corresponding to -// the packet that was just acknowledged. Let us denote them as S_1 (point at -// which the current packet was sent) and A_1 (point at which the current packet -// was acknowledged). However, taking a slope requires two points on each line, -// so estimating bandwidth requires picking a packet in the past with respect to -// which the slope is measured. -// -// For that purpose, BandwidthSampler always keeps track of the most recently -// acknowledged packet, and records it together with every outgoing packet. -// When a packet gets acknowledged (A_1), it has not only information about when -// it itself was sent (S_1), but also the information about the latest -// acknowledged packet right before it was sent (S_0 and A_0). -// -// Based on that data, send and ack rate are estimated as: -// send_rate = (data_size(S_1) - data_size(S_0)) / (time(S_1) - time(S_0)) -// ack_rate = (data_size(A_1) - data_size(A_0)) / (time(A_1) - time(A_0)) -// -// Here, the ack rate is intuitively the rate we want to treat as bandwidth. -// However, in certain cases (e.g. ack compression) the ack rate at a point may -// end up higher than the rate at which the data was originally sent, which is -// not indicative of the real bandwidth. Hence, we use the send rate as an upper -// bound, and the sample value is -// rate_sample = min(send_rate, ack_rate) -// -// An important edge case handled by the sampler is tracking the app-limited -// samples. There are multiple meaning of "app-limited" used interchangeably, -// hence it is important to understand and to be able to distinguish between -// them. -// -// Meaning 1: connection state. The connection is said to be app-limited when -// there is no outstanding data to send. This means that certain bandwidth -// samples in the future would not be an accurate indication of the link -// capacity, and it is important to inform consumer about that. Whenever -// connection becomes app-limited, the sampler is notified via OnAppLimited() -// method. -// -// Meaning 2: a phase in the bandwidth sampler. As soon as the bandwidth -// sampler becomes notified about the connection being app-limited, it enters -// app-limited phase. In that phase, all *sent* packets are marked as -// app-limited. Note that the connection itself does not have to be -// app-limited during the app-limited phase, and in fact it will not be -// (otherwise how would it send packets?). The boolean flag below indicates -// whether the sampler is in that phase. -// -// Meaning 3: a flag on the sent packet and on the sample. If a sent packet is -// sent during the app-limited phase, the resulting sample related to the -// packet will be marked as app-limited. -// -// With the terminology issue out of the way, let us consider the question of -// what kind of situation it addresses. -// -// Consider a scenario where we first send packets 1 to 20 at a regular -// bandwidth, and then immediately run out of data. After a few seconds, we send -// packets 21 to 60, and only receive ack for 21 between sending packets 40 and -// 41. In this case, when we sample bandwidth for packets 21 to 40, the S_0/A_0 -// we use to compute the slope is going to be packet 20, a few seconds apart -// from the current packet, hence the resulting estimate would be extremely low -// and not indicative of anything. Only at packet 41 the S_0/A_0 will become 21, -// meaning that the bandwidth sample would exclude the quiescence. -// -// Based on the analysis of that scenario, we implement the following rule: once -// OnAppLimited() is called, all sent packets will produce app-limited samples -// up until an ack for a packet that was sent after OnAppLimited() was called. -// Note that while the scenario above is not the only scenario when the -// connection is app-limited, the approach works in other cases too. -class BandwidthSampler { - public: - BandwidthSampler(); - ~BandwidthSampler(); - // Inputs the sent packet information into the sampler. Assumes that all - // packets are sent in order. The information about the packet will not be - // released from the sampler until the packet is either acknowledged or - // declared lost. - void OnPacketSent(Timestamp sent_time, - int64_t packet_number, - DataSize data_size, - DataSize data_in_flight); - - // Notifies the sampler that the |packet_number| is acknowledged. Returns a - // bandwidth sample. If no bandwidth sample is available, bandwidth is set to - // DataRate::Zero(). - BandwidthSample OnPacketAcknowledged(Timestamp ack_time, - int64_t packet_number); - - // Informs the sampler that a packet is considered lost and it should no - // longer keep track of it. - void OnPacketLost(int64_t packet_number); - - // Informs the sampler that the connection is currently app-limited, causing - // the sampler to enter the app-limited phase. The phase will expire by - // itself. - void OnAppLimited(); - - // Remove all the packets lower than the specified packet number. - void RemoveObsoletePackets(int64_t least_unacked); - - // Total number of data_size currently acknowledged by the receiver. - DataSize total_data_acked() const; - - // Application-limited information exported for debugging. - bool is_app_limited() const; - int64_t end_of_app_limited_phase() const; - - private: - friend class test::BandwidthSamplerPeer; - // ConnectionStateOnSentPacket represents the information about a sent packet - // and the state of the connection at the moment the packet was sent, - // specifically the information about the most recently acknowledged packet at - // that moment. - struct ConnectionStateOnSentPacket { - // Time at which the packet is sent. - Timestamp sent_time; - - // Size of the packet. - DataSize size; - - // The value of |total_data_sent_| at the time the packet was sent. - // Includes the packet itself. - DataSize total_data_sent; - - // The value of |total_data_sent_at_last_acked_packet_| at the time the - // packet was sent. - DataSize total_data_sent_at_last_acked_packet; - - // The value of |last_acked_packet_sent_time_| at the time the packet was - // sent. - absl::optional last_acked_packet_sent_time; - - // The value of |last_acked_packet_ack_time_| at the time the packet was - // sent. - absl::optional last_acked_packet_ack_time; - - // The value of |total_data_acked_| at the time the packet was - // sent. - DataSize total_data_acked_at_the_last_acked_packet; - - // The value of |is_app_limited_| at the time the packet was - // sent. - bool is_app_limited; - - // Snapshot constructor. Records the current state of the bandwidth - // sampler. - ConnectionStateOnSentPacket(Timestamp sent_time, - DataSize size, - const BandwidthSampler& sampler); - - // Default constructor. Required to put this structure into - // PacketNumberIndexedQueue. - ConnectionStateOnSentPacket(); - ~ConnectionStateOnSentPacket(); - }; - - // The total number of congestion controlled data_size sent during the - // connection. - DataSize total_data_sent_; - - // The total number of congestion controlled data_size which were - // acknowledged. - DataSize total_data_acked_; - - // The value of |total_data_sent_| at the time the last acknowledged packet - // was sent. Valid only when |last_acked_packet_sent_time_| is valid. - DataSize total_data_sent_at_last_acked_packet_; - - // The time at which the last acknowledged packet was sent. Set to - // Timestamp::Zero() if no valid timestamp is available. - absl::optional last_acked_packet_sent_time_; - - // The time at which the most recent packet was acknowledged. - absl::optional last_acked_packet_ack_time_; - - // The most recently sent packet. - int64_t last_sent_packet_; - - // Indicates whether the bandwidth sampler is currently in an app-limited - // phase. - bool is_app_limited_; - - // The packet that will be acknowledged after this one will cause the sampler - // to exit the app-limited phase. - int64_t end_of_app_limited_phase_; - - // Record of the connection state at the point where each packet in flight was - // sent, indexed by the packet number. - PacketNumberIndexedQueue connection_state_map_; - - // Handles the actual bandwidth calculations, whereas the outer method handles - // retrieving and removing |sent_packet|. - BandwidthSample OnPacketAcknowledgedInner( - Timestamp ack_time, - int64_t packet_number, - const ConnectionStateOnSentPacket& sent_packet); -}; - -} // namespace bbr -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_BANDWIDTH_SAMPLER_H_ diff --git a/modules/congestion_controller/bbr/bandwidth_sampler_unittest.cc b/modules/congestion_controller/bbr/bandwidth_sampler_unittest.cc deleted file mode 100644 index 3bd205a847..0000000000 --- a/modules/congestion_controller/bbr/bandwidth_sampler_unittest.cc +++ /dev/null @@ -1,337 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -// Based on the Quic implementation in Chromium. - -#include "modules/congestion_controller/bbr/bandwidth_sampler.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { -namespace bbr { -namespace test { - -class BandwidthSamplerPeer { - public: - static size_t GetNumberOfTrackedPackets(const BandwidthSampler& sampler) { - return sampler.connection_state_map_.number_of_present_entries(); - } - - static DataSize GetPacketSize(const BandwidthSampler& sampler, - int64_t packet_number) { - return sampler.connection_state_map_.GetEntry(packet_number)->size; - } -}; - -const int64_t kRegularPacketSizeBytes = 1280; -// Enforce divisibility for some of the tests. -static_assert((kRegularPacketSizeBytes & 31) == 0, - "kRegularPacketSizeBytes has to be five times divisible by 2"); - -const DataSize kRegularPacketSize = DataSize::bytes(kRegularPacketSizeBytes); - -// A test fixture with utility methods for BandwidthSampler tests. -class BandwidthSamplerTest : public ::testing::Test { - protected: - BandwidthSamplerTest() - : clock_(Timestamp::seconds(100)), bytes_in_flight_(DataSize::Zero()) {} - - Timestamp clock_; - BandwidthSampler sampler_; - DataSize bytes_in_flight_; - - void SendPacketInner(int64_t packet_number, DataSize bytes) { - sampler_.OnPacketSent(clock_, packet_number, bytes, bytes_in_flight_); - bytes_in_flight_ += bytes; - } - - void SendPacket(int64_t packet_number) { - SendPacketInner(packet_number, kRegularPacketSize); - } - - BandwidthSample AckPacketInner(int64_t packet_number) { - DataSize size = - BandwidthSamplerPeer::GetPacketSize(sampler_, packet_number); - bytes_in_flight_ -= size; - return sampler_.OnPacketAcknowledged(clock_, packet_number); - } - - // Acknowledge receipt of a packet and expect it to be not app-limited. - DataRate AckPacket(int64_t packet_number) { - BandwidthSample sample = AckPacketInner(packet_number); - EXPECT_FALSE(sample.is_app_limited); - return sample.bandwidth; - } - - void LosePacket(int64_t packet_number) { - DataSize size = - BandwidthSamplerPeer::GetPacketSize(sampler_, packet_number); - bytes_in_flight_ -= size; - sampler_.OnPacketLost(packet_number); - } - - // Sends one packet and acks it. Then, send 20 packets. Finally, send - // another 20 packets while acknowledging previous 20. - void Send40PacketsAndAckFirst20(TimeDelta time_between_packets) { - // Send 20 packets at a constant inter-packet time. - for (int64_t i = 1; i <= 20; i++) { - SendPacket(i); - clock_ += time_between_packets; - } - - // Ack packets 1 to 20, while sending new packets at the same rate as - // before. - for (int64_t i = 1; i <= 20; i++) { - AckPacket(i); - SendPacket(i + 20); - clock_ += time_between_packets; - } - } -}; - -// Test the sampler in a simple stop-and-wait sender setting. -TEST_F(BandwidthSamplerTest, SendAndWait) { - TimeDelta time_between_packets = TimeDelta::ms(10); - DataRate expected_bandwidth = - kRegularPacketSize * 100 / TimeDelta::seconds(1); - - // Send packets at the constant bandwidth. - for (int64_t i = 1; i < 20; i++) { - SendPacket(i); - clock_ += time_between_packets; - DataRate current_sample = AckPacket(i); - EXPECT_EQ(expected_bandwidth, current_sample); - } - - // Send packets at the exponentially decreasing bandwidth. - for (int64_t i = 20; i < 25; i++) { - time_between_packets = time_between_packets * 2; - expected_bandwidth = expected_bandwidth * 0.5; - - SendPacket(i); - clock_ += time_between_packets; - DataRate current_sample = AckPacket(i); - EXPECT_EQ(expected_bandwidth, current_sample); - } - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - EXPECT_TRUE(bytes_in_flight_.IsZero()); -} - -// Test the sampler during regular windowed sender scenario with fixed -// CWND of 20. -TEST_F(BandwidthSamplerTest, SendPaced) { - const TimeDelta time_between_packets = TimeDelta::ms(1); - DataRate expected_bandwidth = kRegularPacketSize / time_between_packets; - - Send40PacketsAndAckFirst20(time_between_packets); - - // Ack the packets 21 to 40, arriving at the correct bandwidth. - DataRate last_bandwidth = DataRate::Zero(); - for (int64_t i = 21; i <= 40; i++) { - last_bandwidth = AckPacket(i); - EXPECT_EQ(expected_bandwidth, last_bandwidth); - clock_ += time_between_packets; - } - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - EXPECT_TRUE(bytes_in_flight_.IsZero()); -} - -// Test the sampler in a scenario where 50% of packets is consistently lost. -TEST_F(BandwidthSamplerTest, SendWithLosses) { - const TimeDelta time_between_packets = TimeDelta::ms(1); - DataRate expected_bandwidth = kRegularPacketSize / time_between_packets * 0.5; - - // Send 20 packets, each 1 ms apart. - for (int64_t i = 1; i <= 20; i++) { - SendPacket(i); - clock_ += time_between_packets; - } - - // Ack packets 1 to 20, losing every even-numbered packet, while sending new - // packets at the same rate as before. - for (int64_t i = 1; i <= 20; i++) { - if (i % 2 == 0) { - AckPacket(i); - } else { - LosePacket(i); - } - SendPacket(i + 20); - clock_ += time_between_packets; - } - - // Ack the packets 21 to 40 with the same loss pattern. - DataRate last_bandwidth = DataRate::Zero(); - for (int64_t i = 21; i <= 40; i++) { - if (i % 2 == 0) { - last_bandwidth = AckPacket(i); - EXPECT_EQ(expected_bandwidth, last_bandwidth); - } else { - LosePacket(i); - } - clock_ += time_between_packets; - } - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - EXPECT_TRUE(bytes_in_flight_.IsZero()); -} - -// Simulate a situation where ACKs arrive in burst and earlier than usual, thus -// producing an ACK rate which is higher than the original send rate. -TEST_F(BandwidthSamplerTest, CompressedAck) { - const TimeDelta time_between_packets = TimeDelta::ms(1); - DataRate expected_bandwidth = kRegularPacketSize / time_between_packets; - - Send40PacketsAndAckFirst20(time_between_packets); - - // Simulate an RTT somewhat lower than the one for 1-to-21 transmission. - clock_ += time_between_packets * 15; - - // Ack the packets 21 to 40 almost immediately at once. - DataRate last_bandwidth = DataRate::Zero(); - TimeDelta ridiculously_small_time_delta = TimeDelta::us(20); - for (int64_t i = 21; i <= 40; i++) { - last_bandwidth = AckPacket(i); - clock_ += ridiculously_small_time_delta; - } - EXPECT_EQ(expected_bandwidth, last_bandwidth); - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - EXPECT_TRUE(bytes_in_flight_.IsZero()); -} - -// Tests receiving ACK packets in the reverse order. -TEST_F(BandwidthSamplerTest, ReorderedAck) { - const TimeDelta time_between_packets = TimeDelta::ms(1); - DataRate expected_bandwidth = kRegularPacketSize / time_between_packets; - - Send40PacketsAndAckFirst20(time_between_packets); - - // Ack the packets 21 to 40 in the reverse order, while sending packets 41 to - // 60. - DataRate last_bandwidth = DataRate::Zero(); - for (int64_t i = 0; i < 20; i++) { - last_bandwidth = AckPacket(40 - i); - EXPECT_EQ(expected_bandwidth, last_bandwidth); - SendPacket(41 + i); - clock_ += time_between_packets; - } - - // Ack the packets 41 to 60, now in the regular order. - for (int64_t i = 41; i <= 60; i++) { - last_bandwidth = AckPacket(i); - EXPECT_EQ(expected_bandwidth, last_bandwidth); - clock_ += time_between_packets; - } - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - EXPECT_TRUE(bytes_in_flight_.IsZero()); -} - -// Test the app-limited logic. -TEST_F(BandwidthSamplerTest, AppLimited) { - const TimeDelta time_between_packets = TimeDelta::ms(1); - DataRate expected_bandwidth = kRegularPacketSize / time_between_packets; - - Send40PacketsAndAckFirst20(time_between_packets); - - // We are now app-limited. Ack 21 to 40 as usual, but do not send anything for - // now. - sampler_.OnAppLimited(); - for (int64_t i = 21; i <= 40; i++) { - DataRate current_sample = AckPacket(i); - EXPECT_EQ(expected_bandwidth, current_sample); - clock_ += time_between_packets; - } - - // Enter quiescence. - clock_ += TimeDelta::seconds(1); - - // Send packets 41 to 60, all of which would be marked as app-limited. - for (int64_t i = 41; i <= 60; i++) { - SendPacket(i); - clock_ += time_between_packets; - } - - // Ack packets 41 to 60, while sending packets 61 to 80. 41 to 60 should be - // app-limited and underestimate the bandwidth due to that. - for (int64_t i = 41; i <= 60; i++) { - BandwidthSample sample = AckPacketInner(i); - EXPECT_TRUE(sample.is_app_limited); - EXPECT_LT(sample.bandwidth, 0.7f * expected_bandwidth); - - SendPacket(i + 20); - clock_ += time_between_packets; - } - - // Run out of packets, and then ack packet 61 to 80, all of which should have - // correct non-app-limited samples. - for (int64_t i = 61; i <= 80; i++) { - DataRate last_bandwidth = AckPacket(i); - EXPECT_EQ(expected_bandwidth, last_bandwidth); - clock_ += time_between_packets; - } - - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - EXPECT_TRUE(bytes_in_flight_.IsZero()); -} - -// Test the samples taken at the first flight of packets sent. -TEST_F(BandwidthSamplerTest, FirstRoundTrip) { - const TimeDelta time_between_packets = TimeDelta::ms(1); - const TimeDelta rtt = TimeDelta::ms(800); - const int num_packets = 10; - const DataSize num_bytes = kRegularPacketSize * num_packets; - const DataRate real_bandwidth = num_bytes / rtt; - - for (int64_t i = 1; i <= 10; i++) { - SendPacket(i); - clock_ += time_between_packets; - } - - clock_ += rtt - num_packets * time_between_packets; - - DataRate last_sample = DataRate::Zero(); - for (int64_t i = 1; i <= 10; i++) { - DataRate sample = AckPacket(i); - EXPECT_GT(sample, last_sample); - last_sample = sample; - clock_ += time_between_packets; - } - - // The final measured sample for the first flight of sample is expected to be - // smaller than the real bandwidth, yet it should not lose more than 10%. The - // specific value of the error depends on the difference between the RTT and - // the time it takes to exhaust the congestion window (i.e. in the limit when - // all packets are sent simultaneously, last sample would indicate the real - // bandwidth). - EXPECT_LT(last_sample, real_bandwidth); - EXPECT_GT(last_sample, 0.9f * real_bandwidth); -} - -// Test sampler's ability to remove obsolete packets. -TEST_F(BandwidthSamplerTest, RemoveObsoletePackets) { - SendPacket(1); - SendPacket(2); - SendPacket(3); - SendPacket(4); - SendPacket(5); - - clock_ += TimeDelta::ms(100); - - EXPECT_EQ(5u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - sampler_.RemoveObsoletePackets(4); - EXPECT_EQ(2u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - sampler_.OnPacketLost(4); - EXPECT_EQ(1u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); - AckPacket(5); - EXPECT_EQ(0u, BandwidthSamplerPeer::GetNumberOfTrackedPackets(sampler_)); -} - -} // namespace test -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/bbr_factory.cc b/modules/congestion_controller/bbr/bbr_factory.cc deleted file mode 100644 index c20123721a..0000000000 --- a/modules/congestion_controller/bbr/bbr_factory.cc +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/bbr_factory.h" - -#include - -#include "modules/congestion_controller/bbr/bbr_network_controller.h" - -namespace webrtc { - -BbrNetworkControllerFactory::BbrNetworkControllerFactory() {} - -std::unique_ptr BbrNetworkControllerFactory::Create( - NetworkControllerConfig config) { - return std::make_unique(config); -} - -TimeDelta BbrNetworkControllerFactory::GetProcessInterval() const { - return TimeDelta::PlusInfinity(); -} - -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/bbr_factory.h b/modules/congestion_controller/bbr/bbr_factory.h deleted file mode 100644 index 9b371551ea..0000000000 --- a/modules/congestion_controller/bbr/bbr_factory.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_BBR_FACTORY_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_BBR_FACTORY_H_ - -#include - -#include "api/transport/network_control.h" -#include "api/units/time_delta.h" - -namespace webrtc { - -class BbrNetworkControllerFactory : public NetworkControllerFactoryInterface { - public: - BbrNetworkControllerFactory(); - std::unique_ptr Create( - NetworkControllerConfig config) override; - TimeDelta GetProcessInterval() const override; -}; -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_BBR_FACTORY_H_ diff --git a/modules/congestion_controller/bbr/bbr_network_controller.cc b/modules/congestion_controller/bbr/bbr_network_controller.cc deleted file mode 100644 index ad08541308..0000000000 --- a/modules/congestion_controller/bbr/bbr_network_controller.cc +++ /dev/null @@ -1,954 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/bbr_network_controller.h" - -#include -#include -#include -#include - -#include "absl/base/macros.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { -namespace bbr { -namespace { - -// If greater than zero, mean RTT variation is multiplied by the specified -// factor and added to the congestion window limit. -const double kBbrRttVariationWeight = 0.0f; - -// Congestion window gain for QUIC BBR during PROBE_BW phase. -const double kProbeBWCongestionWindowGain = 2.0f; - -// The maximum packet size of any QUIC packet, based on ethernet's max size, -// minus the IP and UDP headers. IPv6 has a 40 byte header, UDP adds an -// additional 8 bytes. This is a total overhead of 48 bytes. Ethernet's -// max packet size is 1500 bytes, 1500 - 48 = 1452. -const DataSize kMaxPacketSize = DataSize::Bytes<1452>(); - -// Default maximum packet size used in the Linux TCP implementation. -// Used in QUIC for congestion window computations in bytes. -constexpr DataSize kDefaultTCPMSS = DataSize::Bytes<1460>(); -// Constants based on TCP defaults. -constexpr DataSize kMaxSegmentSize = kDefaultTCPMSS; - -// The gain used for the slow start, equal to 2/ln(2). -const double kHighGain = 2.885f; -// The gain used in STARTUP after loss has been detected. -// 1.5 is enough to allow for 25% exogenous loss and still observe a 25% growth -// in measured bandwidth. -const double kStartupAfterLossGain = 1.5; -// The gain used to drain the queue after the slow start. -const double kDrainGain = 1.f / kHighGain; - -// The length of the gain cycle. -const size_t kGainCycleLength = 8; -// The size of the bandwidth filter window, in round-trips. -const BbrRoundTripCount kBandwidthWindowSize = kGainCycleLength + 2; - -// The time after which the current min_rtt value expires. -constexpr int64_t kMinRttExpirySeconds = 10; -// The minimum time the connection can spend in PROBE_RTT mode. -constexpr int64_t kProbeRttTimeMs = 200; -// If the bandwidth does not increase by the factor of |kStartupGrowthTarget| -// within |kRoundTripsWithoutGrowthBeforeExitingStartup| rounds, the connection -// will exit the STARTUP mode. -const double kStartupGrowthTarget = 1.25; -// Coefficient to determine if a new RTT is sufficiently similar to min_rtt that -// we don't need to enter PROBE_RTT. -const double kSimilarMinRttThreshold = 1.125; - -constexpr int64_t kInitialBandwidthKbps = 300; - -const int64_t kInitialCongestionWindowPackets = 32; -// The minimum CWND to ensure delayed acks don't reduce bandwidth measurements. -// Does not inflate the pacing rate. -const int64_t kDefaultMinCongestionWindowPackets = 4; -const int64_t kDefaultMaxCongestionWindowPackets = 2000; - -const char kBbrConfigTrial[] = "WebRTC-BweBbrConfig"; - -} // namespace - -BbrNetworkController::BbrControllerConfig::BbrControllerConfig( - std::string field_trial) - : probe_bw_pacing_gain_offset("probe_bw_pacing_gain_offset", 0.25), - encoder_rate_gain("encoder_rate_gain", 1), - encoder_rate_gain_in_probe_rtt("encoder_rate_gain_in_probe_rtt", 1), - exit_startup_rtt_threshold("exit_startup_rtt_threshold", - TimeDelta::PlusInfinity()), - initial_congestion_window( - "initial_cwin", - kInitialCongestionWindowPackets * kDefaultTCPMSS), - min_congestion_window( - "min_cwin", - kDefaultMinCongestionWindowPackets * kDefaultTCPMSS), - max_congestion_window( - "max_cwin", - kDefaultMaxCongestionWindowPackets * kDefaultTCPMSS), - probe_rtt_congestion_window_gain("probe_rtt_cwin_gain", 0.75), - pacing_rate_as_target("pacing_rate_as_target", false), - exit_startup_on_loss("exit_startup_on_loss", true), - num_startup_rtts("num_startup_rtts", 3), - rate_based_recovery("rate_based_recovery", false), - max_aggregation_bytes_multiplier("max_aggregation_bytes_multiplier", 0), - slower_startup("slower_startup", false), - rate_based_startup("rate_based_startup", false), - initial_conservation_in_startup("initial_conservation", - CONSERVATION, - { - {"NOT_IN_RECOVERY", NOT_IN_RECOVERY}, - {"CONSERVATION", CONSERVATION}, - {"MEDIUM_GROWTH", MEDIUM_GROWTH}, - {"GROWTH", GROWTH}, - }), - fully_drain_queue("fully_drain_queue", false), - max_ack_height_window_multiplier("max_ack_height_window_multiplier", 1), - probe_rtt_based_on_bdp("probe_rtt_based_on_bdp", false), - probe_rtt_skipped_if_similar_rtt("probe_rtt_skipped_if_similar_rtt", - false), - probe_rtt_disabled_if_app_limited("probe_rtt_disabled_if_app_limited", - false) { - ParseFieldTrial( - { - &exit_startup_on_loss, - &encoder_rate_gain, - &encoder_rate_gain_in_probe_rtt, - &exit_startup_rtt_threshold, - &fully_drain_queue, - &initial_congestion_window, - &initial_conservation_in_startup, - &max_ack_height_window_multiplier, - &max_aggregation_bytes_multiplier, - &max_congestion_window, - &min_congestion_window, - &num_startup_rtts, - &pacing_rate_as_target, - &probe_bw_pacing_gain_offset, - &probe_rtt_based_on_bdp, - &probe_rtt_congestion_window_gain, - &probe_rtt_disabled_if_app_limited, - &probe_rtt_skipped_if_similar_rtt, - &rate_based_recovery, - &rate_based_startup, - &slower_startup, - }, - field_trial); -} -BbrNetworkController::BbrControllerConfig::~BbrControllerConfig() = default; -BbrNetworkController::BbrControllerConfig::BbrControllerConfig( - const BbrControllerConfig&) = default; -BbrNetworkController::BbrControllerConfig -BbrNetworkController::BbrControllerConfig::FromTrial() { - return BbrControllerConfig( - webrtc::field_trial::FindFullName(kBbrConfigTrial)); -} - -BbrNetworkController::DebugState::DebugState(const BbrNetworkController& sender) - : mode(sender.mode_), - max_bandwidth(sender.max_bandwidth_.GetBest()), - round_trip_count(sender.round_trip_count_), - gain_cycle_index(sender.cycle_current_offset_), - congestion_window(sender.congestion_window_), - is_at_full_bandwidth(sender.is_at_full_bandwidth_), - bandwidth_at_last_round(sender.bandwidth_at_last_round_), - rounds_without_bandwidth_gain(sender.rounds_without_bandwidth_gain_), - min_rtt(sender.min_rtt_), - min_rtt_timestamp(sender.min_rtt_timestamp_), - recovery_state(sender.recovery_state_), - recovery_window(sender.recovery_window_), - last_sample_is_app_limited(sender.last_sample_is_app_limited_), - end_of_app_limited_phase(sender.sampler_->end_of_app_limited_phase()) {} - -BbrNetworkController::DebugState::DebugState(const DebugState& state) = default; - -BbrNetworkController::BbrNetworkController(NetworkControllerConfig config) - : config_(BbrControllerConfig::FromTrial()), - rtt_stats_(), - random_(10), - loss_rate_(), - mode_(STARTUP), - sampler_(new BandwidthSampler()), - round_trip_count_(0), - last_sent_packet_(0), - current_round_trip_end_(0), - max_bandwidth_(kBandwidthWindowSize, DataRate::Zero(), 0), - default_bandwidth_(DataRate::kbps(kInitialBandwidthKbps)), - max_ack_height_(kBandwidthWindowSize, DataSize::Zero(), 0), - aggregation_epoch_start_time_(), - aggregation_epoch_bytes_(DataSize::Zero()), - bytes_acked_since_queue_drained_(DataSize::Zero()), - max_aggregation_bytes_multiplier_(0), - min_rtt_(TimeDelta::Zero()), - last_rtt_(TimeDelta::Zero()), - min_rtt_timestamp_(Timestamp::MinusInfinity()), - congestion_window_(config_.initial_congestion_window), - initial_congestion_window_(config_.initial_congestion_window), - min_congestion_window_(config_.min_congestion_window), - max_congestion_window_(config_.max_congestion_window), - pacing_rate_(DataRate::Zero()), - pacing_gain_(1), - congestion_window_gain_constant_(kProbeBWCongestionWindowGain), - rtt_variance_weight_(kBbrRttVariationWeight), - cycle_current_offset_(0), - last_cycle_start_(Timestamp::MinusInfinity()), - is_at_full_bandwidth_(false), - rounds_without_bandwidth_gain_(0), - bandwidth_at_last_round_(DataRate::Zero()), - exiting_quiescence_(false), - exit_probe_rtt_at_(), - probe_rtt_round_passed_(false), - last_sample_is_app_limited_(false), - recovery_state_(NOT_IN_RECOVERY), - end_recovery_at_(), - recovery_window_(max_congestion_window_), - app_limited_since_last_probe_rtt_(false), - min_rtt_since_last_probe_rtt_(TimeDelta::PlusInfinity()) { - RTC_LOG(LS_INFO) << "Creating BBR controller"; - if (config.constraints.starting_rate) - default_bandwidth_ = *config.constraints.starting_rate; - constraints_ = config.constraints; - Reset(); -} - -BbrNetworkController::~BbrNetworkController() {} - -void BbrNetworkController::Reset() { - round_trip_count_ = 0; - rounds_without_bandwidth_gain_ = 0; - if (config_.num_startup_rtts > 0) { - is_at_full_bandwidth_ = false; - EnterStartupMode(); - } else { - is_at_full_bandwidth_ = true; - EnterProbeBandwidthMode(constraints_->at_time); - } -} - -NetworkControlUpdate BbrNetworkController::CreateRateUpdate( - Timestamp at_time) const { - DataRate bandwidth = BandwidthEstimate(); - if (bandwidth.IsZero()) - bandwidth = default_bandwidth_; - TimeDelta rtt = GetMinRtt(); - DataRate pacing_rate = PacingRate(); - DataRate target_rate = - config_.pacing_rate_as_target ? pacing_rate : bandwidth; - - if (mode_ == PROBE_RTT) - target_rate = target_rate * config_.encoder_rate_gain_in_probe_rtt; - else - target_rate = target_rate * config_.encoder_rate_gain; - target_rate = std::min(target_rate, pacing_rate); - - if (constraints_) { - if (constraints_->max_data_rate) { - target_rate = std::min(target_rate, *constraints_->max_data_rate); - pacing_rate = std::min(pacing_rate, *constraints_->max_data_rate); - } - if (constraints_->min_data_rate) { - target_rate = std::max(target_rate, *constraints_->min_data_rate); - pacing_rate = std::max(pacing_rate, *constraints_->min_data_rate); - } - } - - NetworkControlUpdate update; - - TargetTransferRate target_rate_msg; - target_rate_msg.network_estimate.at_time = at_time; - target_rate_msg.network_estimate.round_trip_time = rtt; - - // TODO(srte): Fill in field below with proper value. - target_rate_msg.network_estimate.loss_rate_ratio = 0; - // In in PROBE_BW, target bandwidth is expected to vary over the cycle period. - // In other modes the is no given period, therefore the same value as in - // PROBE_BW is used for consistency. - target_rate_msg.network_estimate.bwe_period = - rtt * static_cast(kGainCycleLength); - - target_rate_msg.target_rate = target_rate; - target_rate_msg.at_time = at_time; - update.target_rate = target_rate_msg; - - PacerConfig pacer_config; - // A small time window ensures an even pacing rate. - pacer_config.time_window = rtt * 0.25; - pacer_config.data_window = pacer_config.time_window * pacing_rate; - - if (IsProbingForMoreBandwidth()) - pacer_config.pad_window = pacer_config.data_window; - else - pacer_config.pad_window = DataSize::Zero(); - - pacer_config.at_time = at_time; - update.pacer_config = pacer_config; - - update.congestion_window = GetCongestionWindow(); - return update; -} - -NetworkControlUpdate BbrNetworkController::OnNetworkAvailability( - NetworkAvailability msg) { - Reset(); - rtt_stats_.OnConnectionMigration(); - return CreateRateUpdate(msg.at_time); -} - -NetworkControlUpdate BbrNetworkController::OnNetworkRouteChange( - NetworkRouteChange msg) { - constraints_ = msg.constraints; - Reset(); - if (msg.constraints.starting_rate) - default_bandwidth_ = *msg.constraints.starting_rate; - - rtt_stats_.OnConnectionMigration(); - return CreateRateUpdate(msg.at_time); -} - -NetworkControlUpdate BbrNetworkController::OnProcessInterval( - ProcessInterval msg) { - return CreateRateUpdate(msg.at_time); -} - -NetworkControlUpdate BbrNetworkController::OnStreamsConfig(StreamsConfig msg) { - return NetworkControlUpdate(); -} - -NetworkControlUpdate BbrNetworkController::OnTargetRateConstraints( - TargetRateConstraints msg) { - constraints_ = msg; - return CreateRateUpdate(msg.at_time); -} - -bool BbrNetworkController::InSlowStart() const { - return mode_ == STARTUP; -} - -NetworkControlUpdate BbrNetworkController::OnSentPacket(SentPacket msg) { - last_sent_packet_ = msg.sequence_number; - - if (msg.data_in_flight.IsZero() && sampler_->is_app_limited()) { - exiting_quiescence_ = true; - } - - if (!aggregation_epoch_start_time_) { - aggregation_epoch_start_time_ = msg.send_time; - } - - sampler_->OnPacketSent(msg.send_time, msg.sequence_number, msg.size, - msg.data_in_flight); - return NetworkControlUpdate(); -} - -bool BbrNetworkController::CanSend(DataSize bytes_in_flight) { - return bytes_in_flight < GetCongestionWindow(); -} - -DataRate BbrNetworkController::PacingRate() const { - if (pacing_rate_.IsZero()) { - return kHighGain * initial_congestion_window_ / GetMinRtt(); - } - return pacing_rate_; -} - -DataRate BbrNetworkController::BandwidthEstimate() const { - return max_bandwidth_.GetBest(); -} - -DataSize BbrNetworkController::GetCongestionWindow() const { - if (mode_ == PROBE_RTT) { - return ProbeRttCongestionWindow(); - } - - if (InRecovery() && !config_.rate_based_recovery && - !(config_.rate_based_startup && mode_ == STARTUP)) { - return std::min(congestion_window_, recovery_window_); - } - - return congestion_window_; -} - -double BbrNetworkController::GetPacingGain(int round_offset) const { - if (round_offset == 0) - return 1 + config_.probe_bw_pacing_gain_offset; - else if (round_offset == 1) - return 1 - config_.probe_bw_pacing_gain_offset; - else - return 1; -} - -bool BbrNetworkController::InRecovery() const { - return recovery_state_ != NOT_IN_RECOVERY; -} - -bool BbrNetworkController::IsProbingForMoreBandwidth() const { - return (mode_ == PROBE_BW && pacing_gain_ > 1) || mode_ == STARTUP; -} - -NetworkControlUpdate BbrNetworkController::OnTransportPacketsFeedback( - TransportPacketsFeedback msg) { - if (msg.packet_feedbacks.empty()) - return NetworkControlUpdate(); - - Timestamp feedback_recv_time = msg.feedback_time; - SentPacket last_sent_packet = msg.PacketsWithFeedback().back().sent_packet; - - Timestamp send_time = last_sent_packet.send_time; - TimeDelta send_delta = feedback_recv_time - send_time; - rtt_stats_.UpdateRtt(send_delta, TimeDelta::Zero(), feedback_recv_time); - - const DataSize total_data_acked_before = sampler_->total_data_acked(); - - bool is_round_start = false; - bool min_rtt_expired = false; - - std::vector lost_packets = msg.LostWithSendInfo(); - DiscardLostPackets(lost_packets); - - std::vector acked_packets = msg.ReceivedWithSendInfo(); - - int packets_sent = - static_cast(lost_packets.size() + acked_packets.size()); - int packets_lost = static_cast(lost_packets.size()); - loss_rate_.UpdateWithLossStatus(msg.feedback_time.ms(), packets_sent, - packets_lost); - - // Input the new data into the BBR model of the connection. - if (!acked_packets.empty()) { - int64_t last_acked_packet = - acked_packets.rbegin()->sent_packet.sequence_number; - - is_round_start = UpdateRoundTripCounter(last_acked_packet); - min_rtt_expired = - UpdateBandwidthAndMinRtt(msg.feedback_time, acked_packets); - UpdateRecoveryState(last_acked_packet, !lost_packets.empty(), - is_round_start); - - const DataSize data_acked = - sampler_->total_data_acked() - total_data_acked_before; - - UpdateAckAggregationBytes(msg.feedback_time, data_acked); - if (max_aggregation_bytes_multiplier_ > 0) { - if (msg.data_in_flight <= - 1.25 * GetTargetCongestionWindow(pacing_gain_)) { - bytes_acked_since_queue_drained_ = DataSize::Zero(); - } else { - bytes_acked_since_queue_drained_ += data_acked; - } - } - } - - // Handle logic specific to PROBE_BW mode. - if (mode_ == PROBE_BW) { - UpdateGainCyclePhase(msg.feedback_time, msg.prior_in_flight, - !lost_packets.empty()); - } - - // Handle logic specific to STARTUP and DRAIN modes. - if (is_round_start && !is_at_full_bandwidth_) { - CheckIfFullBandwidthReached(); - } - MaybeExitStartupOrDrain(msg); - - // Handle logic specific to PROBE_RTT. - MaybeEnterOrExitProbeRtt(msg, is_round_start, min_rtt_expired); - - // Calculate number of packets acked and lost. - DataSize data_acked = sampler_->total_data_acked() - total_data_acked_before; - DataSize data_lost = DataSize::Zero(); - for (const PacketResult& packet : lost_packets) { - data_lost += packet.sent_packet.size; - } - - // After the model is updated, recalculate the pacing rate and congestion - // window. - CalculatePacingRate(); - CalculateCongestionWindow(data_acked); - CalculateRecoveryWindow(data_acked, data_lost, msg.data_in_flight); - // Cleanup internal state. - if (!acked_packets.empty()) { - sampler_->RemoveObsoletePackets( - acked_packets.back().sent_packet.sequence_number); - } - return CreateRateUpdate(msg.feedback_time); -} - -NetworkControlUpdate BbrNetworkController::OnRemoteBitrateReport( - RemoteBitrateReport msg) { - return NetworkControlUpdate(); -} -NetworkControlUpdate BbrNetworkController::OnRoundTripTimeUpdate( - RoundTripTimeUpdate msg) { - return NetworkControlUpdate(); -} -NetworkControlUpdate BbrNetworkController::OnTransportLossReport( - TransportLossReport msg) { - return NetworkControlUpdate(); -} - -NetworkControlUpdate BbrNetworkController::OnReceivedPacket( - ReceivedPacket msg) { - return NetworkControlUpdate(); -} - -NetworkControlUpdate BbrNetworkController::OnNetworkStateEstimate( - NetworkStateEstimate msg) { - return NetworkControlUpdate(); -} - -TimeDelta BbrNetworkController::GetMinRtt() const { - return !min_rtt_.IsZero() ? min_rtt_ - : TimeDelta::us(rtt_stats_.initial_rtt_us()); -} - -DataSize BbrNetworkController::GetTargetCongestionWindow(double gain) const { - DataSize bdp = GetMinRtt() * BandwidthEstimate(); - DataSize congestion_window = gain * bdp; - - // BDP estimate will be zero if no bandwidth samples are available yet. - if (congestion_window.IsZero()) { - congestion_window = gain * initial_congestion_window_; - } - - return std::max(congestion_window, min_congestion_window_); -} - -DataSize BbrNetworkController::ProbeRttCongestionWindow() const { - if (config_.probe_rtt_based_on_bdp) { - return GetTargetCongestionWindow(config_.probe_rtt_congestion_window_gain); - } - return min_congestion_window_; -} - -void BbrNetworkController::EnterStartupMode() { - mode_ = STARTUP; - pacing_gain_ = kHighGain; - congestion_window_gain_ = kHighGain; -} - -void BbrNetworkController::EnterProbeBandwidthMode(Timestamp now) { - mode_ = PROBE_BW; - congestion_window_gain_ = congestion_window_gain_constant_; - - // Pick a random offset for the gain cycle out of {0, 2..7} range. 1 is - // excluded because in that case increased gain and decreased gain would not - // follow each other. - cycle_current_offset_ = random_.Rand(kGainCycleLength - 2); - if (cycle_current_offset_ >= 1) { - cycle_current_offset_ += 1; - } - - last_cycle_start_ = now; - pacing_gain_ = GetPacingGain(cycle_current_offset_); -} - -void BbrNetworkController::DiscardLostPackets( - const std::vector& lost_packets) { - for (const PacketResult& packet : lost_packets) { - sampler_->OnPacketLost(packet.sent_packet.sequence_number); - } -} - -bool BbrNetworkController::UpdateRoundTripCounter(int64_t last_acked_packet) { - if (last_acked_packet > current_round_trip_end_) { - round_trip_count_++; - current_round_trip_end_ = last_sent_packet_; - return true; - } - - return false; -} - -bool BbrNetworkController::UpdateBandwidthAndMinRtt( - Timestamp now, - const std::vector& acked_packets) { - TimeDelta sample_rtt = TimeDelta::PlusInfinity(); - for (const auto& packet : acked_packets) { - BandwidthSample bandwidth_sample = - sampler_->OnPacketAcknowledged(now, packet.sent_packet.sequence_number); - last_sample_is_app_limited_ = bandwidth_sample.is_app_limited; - if (!bandwidth_sample.rtt.IsZero()) { - sample_rtt = std::min(sample_rtt, bandwidth_sample.rtt); - } - - if (!bandwidth_sample.is_app_limited || - bandwidth_sample.bandwidth > BandwidthEstimate()) { - max_bandwidth_.Update(bandwidth_sample.bandwidth, round_trip_count_); - } - } - - // If none of the RTT samples are valid, return immediately. - if (sample_rtt.IsInfinite()) { - return false; - } - - last_rtt_ = sample_rtt; - min_rtt_since_last_probe_rtt_ = - std::min(min_rtt_since_last_probe_rtt_, sample_rtt); - - const TimeDelta kMinRttExpiry = TimeDelta::seconds(kMinRttExpirySeconds); - // Do not expire min_rtt if none was ever available. - bool min_rtt_expired = - !min_rtt_.IsZero() && (now > (min_rtt_timestamp_ + kMinRttExpiry)); - - if (min_rtt_expired || sample_rtt < min_rtt_ || min_rtt_.IsZero()) { - if (ShouldExtendMinRttExpiry()) { - min_rtt_expired = false; - } else { - min_rtt_ = sample_rtt; - } - min_rtt_timestamp_ = now; - // Reset since_last_probe_rtt fields. - min_rtt_since_last_probe_rtt_ = TimeDelta::PlusInfinity(); - app_limited_since_last_probe_rtt_ = false; - } - - return min_rtt_expired; -} - -bool BbrNetworkController::ShouldExtendMinRttExpiry() const { - if (config_.probe_rtt_disabled_if_app_limited && - app_limited_since_last_probe_rtt_) { - // Extend the current min_rtt if we've been app limited recently. - return true; - } - const bool min_rtt_increased_since_last_probe = - min_rtt_since_last_probe_rtt_ > min_rtt_ * kSimilarMinRttThreshold; - if (config_.probe_rtt_skipped_if_similar_rtt && - app_limited_since_last_probe_rtt_ && - !min_rtt_increased_since_last_probe) { - // Extend the current min_rtt if we've been app limited recently and an rtt - // has been measured in that time that's less than 12.5% more than the - // current min_rtt. - return true; - } - return false; -} - -void BbrNetworkController::UpdateGainCyclePhase(Timestamp now, - DataSize prior_in_flight, - bool has_losses) { - // In most cases, the cycle is advanced after an RTT passes. - bool should_advance_gain_cycling = now - last_cycle_start_ > GetMinRtt(); - - // If the pacing gain is above 1.0, the connection is trying to probe the - // bandwidth by increasing the number of bytes in flight to at least - // pacing_gain * BDP. Make sure that it actually reaches the target, as long - // as there are no losses suggesting that the buffers are not able to hold - // that much. - if (pacing_gain_ > 1.0 && !has_losses && - prior_in_flight < GetTargetCongestionWindow(pacing_gain_)) { - should_advance_gain_cycling = false; - } - - // If pacing gain is below 1.0, the connection is trying to drain the extra - // queue which could have been incurred by probing prior to it. If the number - // of bytes in flight falls down to the estimated BDP value earlier, conclude - // that the queue has been successfully drained and exit this cycle early. - if (pacing_gain_ < 1.0 && prior_in_flight <= GetTargetCongestionWindow(1)) { - should_advance_gain_cycling = true; - } - - if (should_advance_gain_cycling) { - cycle_current_offset_ = (cycle_current_offset_ + 1) % kGainCycleLength; - last_cycle_start_ = now; - // Stay in low gain mode until the target BDP is hit. - // Low gain mode will be exited immediately when the target BDP is achieved. - if (config_.fully_drain_queue && pacing_gain_ < 1 && - GetPacingGain(cycle_current_offset_) == 1 && - prior_in_flight > GetTargetCongestionWindow(1)) { - return; - } - pacing_gain_ = GetPacingGain(cycle_current_offset_); - } -} - -void BbrNetworkController::CheckIfFullBandwidthReached() { - if (last_sample_is_app_limited_) { - return; - } - - DataRate target = bandwidth_at_last_round_ * kStartupGrowthTarget; - if (BandwidthEstimate() >= target) { - bandwidth_at_last_round_ = BandwidthEstimate(); - rounds_without_bandwidth_gain_ = 0; - return; - } - - rounds_without_bandwidth_gain_++; - if ((rounds_without_bandwidth_gain_ >= config_.num_startup_rtts) || - (config_.exit_startup_on_loss && InRecovery())) { - is_at_full_bandwidth_ = true; - } -} - -void BbrNetworkController::MaybeExitStartupOrDrain( - const TransportPacketsFeedback& msg) { - TimeDelta exit_threshold = config_.exit_startup_rtt_threshold; - TimeDelta rtt_delta = last_rtt_ - min_rtt_; - if (mode_ == STARTUP && - (is_at_full_bandwidth_ || rtt_delta > exit_threshold)) { - if (rtt_delta > exit_threshold) - RTC_LOG(LS_INFO) << "Exiting startup due to rtt increase from: " - << ToString(min_rtt_) << " to:" << ToString(last_rtt_) - << " > " << ToString(min_rtt_ + exit_threshold); - mode_ = DRAIN; - pacing_gain_ = kDrainGain; - congestion_window_gain_ = kHighGain; - } - if (mode_ == DRAIN && msg.data_in_flight <= GetTargetCongestionWindow(1)) { - EnterProbeBandwidthMode(msg.feedback_time); - } -} - -void BbrNetworkController::MaybeEnterOrExitProbeRtt( - const TransportPacketsFeedback& msg, - bool is_round_start, - bool min_rtt_expired) { - if (min_rtt_expired && !exiting_quiescence_ && mode_ != PROBE_RTT) { - mode_ = PROBE_RTT; - pacing_gain_ = 1; - // Do not decide on the time to exit PROBE_RTT until the |bytes_in_flight| - // is at the target small value. - exit_probe_rtt_at_.reset(); - } - - if (mode_ == PROBE_RTT) { - sampler_->OnAppLimited(); - - if (!exit_probe_rtt_at_) { - // If the window has reached the appropriate size, schedule exiting - // PROBE_RTT. The CWND during PROBE_RTT is kMinimumCongestionWindow, but - // we allow an extra packet since QUIC checks CWND before sending a - // packet. - if (msg.data_in_flight < ProbeRttCongestionWindow() + kMaxPacketSize) { - exit_probe_rtt_at_ = msg.feedback_time + TimeDelta::ms(kProbeRttTimeMs); - probe_rtt_round_passed_ = false; - } - } else { - if (is_round_start) { - probe_rtt_round_passed_ = true; - } - if (msg.feedback_time >= *exit_probe_rtt_at_ && probe_rtt_round_passed_) { - min_rtt_timestamp_ = msg.feedback_time; - if (!is_at_full_bandwidth_) { - EnterStartupMode(); - } else { - EnterProbeBandwidthMode(msg.feedback_time); - } - } - } - } - - exiting_quiescence_ = false; -} - -void BbrNetworkController::UpdateRecoveryState(int64_t last_acked_packet, - bool has_losses, - bool is_round_start) { - // Exit recovery when there are no losses for a round. - if (has_losses) { - end_recovery_at_ = last_sent_packet_; - } - - switch (recovery_state_) { - case NOT_IN_RECOVERY: - // Enter conservation on the first loss. - if (has_losses) { - recovery_state_ = CONSERVATION; - if (mode_ == STARTUP) { - recovery_state_ = config_.initial_conservation_in_startup; - } - // This will cause the |recovery_window_| to be set to the correct - // value in CalculateRecoveryWindow(). - recovery_window_ = DataSize::Zero(); - // Since the conservation phase is meant to be lasting for a whole - // round, extend the current round as if it were started right now. - current_round_trip_end_ = last_sent_packet_; - } - break; - - case CONSERVATION: - case MEDIUM_GROWTH: - if (is_round_start) { - recovery_state_ = GROWTH; - } - ABSL_FALLTHROUGH_INTENDED; - case GROWTH: - // Exit recovery if appropriate. - if (!has_losses && - (!end_recovery_at_ || last_acked_packet > *end_recovery_at_)) { - recovery_state_ = NOT_IN_RECOVERY; - } - - break; - } -} - -void BbrNetworkController::UpdateAckAggregationBytes( - Timestamp ack_time, - DataSize newly_acked_bytes) { - if (!aggregation_epoch_start_time_) { - RTC_LOG(LS_ERROR) - << "Received feedback before information about sent packets."; - RTC_DCHECK(aggregation_epoch_start_time_.has_value()); - return; - } - // Compute how many bytes are expected to be delivered, assuming max bandwidth - // is correct. - DataSize expected_bytes_acked = - max_bandwidth_.GetBest() * (ack_time - *aggregation_epoch_start_time_); - // Reset the current aggregation epoch as soon as the ack arrival rate is less - // than or equal to the max bandwidth. - if (aggregation_epoch_bytes_ <= expected_bytes_acked) { - // Reset to start measuring a new aggregation epoch. - aggregation_epoch_bytes_ = newly_acked_bytes; - aggregation_epoch_start_time_ = ack_time; - return; - } - - // Compute how many extra bytes were delivered vs max bandwidth. - // Include the bytes most recently acknowledged to account for stretch acks. - aggregation_epoch_bytes_ += newly_acked_bytes; - max_ack_height_.Update(aggregation_epoch_bytes_ - expected_bytes_acked, - round_trip_count_); -} - -void BbrNetworkController::CalculatePacingRate() { - if (BandwidthEstimate().IsZero()) { - return; - } - - DataRate target_rate = pacing_gain_ * BandwidthEstimate(); - if (config_.rate_based_recovery && InRecovery()) { - pacing_rate_ = pacing_gain_ * max_bandwidth_.GetThirdBest(); - } - if (is_at_full_bandwidth_) { - pacing_rate_ = target_rate; - return; - } - - // Pace at the rate of initial_window / RTT as soon as RTT measurements are - // available. - if (pacing_rate_.IsZero() && !rtt_stats_.min_rtt().IsZero()) { - pacing_rate_ = initial_congestion_window_ / rtt_stats_.min_rtt(); - return; - } - // Slow the pacing rate in STARTUP once loss has ever been detected. - const bool has_ever_detected_loss = end_recovery_at_.has_value(); - if (config_.slower_startup && has_ever_detected_loss) { - pacing_rate_ = kStartupAfterLossGain * BandwidthEstimate(); - return; - } - - // Do not decrease the pacing rate during the startup. - pacing_rate_ = std::max(pacing_rate_, target_rate); -} - -void BbrNetworkController::CalculateCongestionWindow(DataSize bytes_acked) { - if (mode_ == PROBE_RTT) { - return; - } - - DataSize target_window = GetTargetCongestionWindow(congestion_window_gain_); - - if (rtt_variance_weight_ > 0.f && !BandwidthEstimate().IsZero()) { - target_window += rtt_variance_weight_ * rtt_stats_.mean_deviation() * - BandwidthEstimate(); - } else if (max_aggregation_bytes_multiplier_ > 0 && is_at_full_bandwidth_) { - // Subtracting only half the bytes_acked_since_queue_drained ensures sending - // doesn't completely stop for a long period of time if the queue hasn't - // been drained recently. - if (max_aggregation_bytes_multiplier_ * max_ack_height_.GetBest() > - bytes_acked_since_queue_drained_ / 2) { - target_window += - max_aggregation_bytes_multiplier_ * max_ack_height_.GetBest() - - bytes_acked_since_queue_drained_ / 2; - } - } else if (is_at_full_bandwidth_) { - target_window += max_ack_height_.GetBest(); - } - - // Instead of immediately setting the target CWND as the new one, BBR grows - // the CWND towards |target_window| by only increasing it |bytes_acked| at a - // time. - if (is_at_full_bandwidth_) { - congestion_window_ = - std::min(target_window, congestion_window_ + bytes_acked); - } else if (congestion_window_ < target_window || - sampler_->total_data_acked() < initial_congestion_window_) { - // If the connection is not yet out of startup phase, do not decrease the - // window. - congestion_window_ = congestion_window_ + bytes_acked; - } - - // Enforce the limits on the congestion window. - congestion_window_ = std::max(congestion_window_, min_congestion_window_); - congestion_window_ = std::min(congestion_window_, max_congestion_window_); -} - -void BbrNetworkController::CalculateRecoveryWindow(DataSize bytes_acked, - DataSize bytes_lost, - DataSize bytes_in_flight) { - if (config_.rate_based_recovery || - (config_.rate_based_startup && mode_ == STARTUP)) { - return; - } - - if (recovery_state_ == NOT_IN_RECOVERY) { - return; - } - - // Set up the initial recovery window. - if (recovery_window_.IsZero()) { - recovery_window_ = bytes_in_flight + bytes_acked; - recovery_window_ = std::max(min_congestion_window_, recovery_window_); - return; - } - - // Remove losses from the recovery window, while accounting for a potential - // integer underflow. - recovery_window_ = recovery_window_ >= bytes_lost - ? recovery_window_ - bytes_lost - : kMaxSegmentSize; - - // In CONSERVATION mode, just subtracting losses is sufficient. In GROWTH, - // release additional |bytes_acked| to achieve a slow-start-like behavior. - // In MEDIUM_GROWTH, release |bytes_acked| / 2 to split the difference. - if (recovery_state_ == GROWTH) { - recovery_window_ += bytes_acked; - } else if (recovery_state_ == MEDIUM_GROWTH) { - recovery_window_ += bytes_acked / 2; - } - - // Sanity checks. Ensure that we always allow to send at least - // |bytes_acked| in response. - recovery_window_ = std::max(recovery_window_, bytes_in_flight + bytes_acked); - recovery_window_ = std::max(min_congestion_window_, recovery_window_); -} - -void BbrNetworkController::OnApplicationLimited(DataSize bytes_in_flight) { - if (bytes_in_flight >= GetCongestionWindow()) { - return; - } - - app_limited_since_last_probe_rtt_ = true; - sampler_->OnAppLimited(); - - RTC_LOG(LS_INFO) << "Becoming application limited. Last sent packet: " - << last_sent_packet_ - << ", CWND: " << ToString(GetCongestionWindow()); -} -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/bbr_network_controller.h b/modules/congestion_controller/bbr/bbr_network_controller.h deleted file mode 100644 index 6114970405..0000000000 --- a/modules/congestion_controller/bbr/bbr_network_controller.h +++ /dev/null @@ -1,397 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// BBR (Bottleneck Bandwidth and RTT) congestion control algorithm. -// Based on the Quic BBR implementation in Chromium. - -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_BBR_NETWORK_CONTROLLER_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_BBR_NETWORK_CONTROLLER_H_ - -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/transport/network_control.h" -#include "api/transport/network_types.h" -#include "modules/congestion_controller/bbr/bandwidth_sampler.h" -#include "modules/congestion_controller/bbr/loss_rate_filter.h" -#include "modules/congestion_controller/bbr/rtt_stats.h" -#include "modules/congestion_controller/bbr/windowed_filter.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/experiments/field_trial_units.h" -#include "rtc_base/random.h" - -namespace webrtc { -namespace bbr { - -typedef int64_t BbrRoundTripCount; - -// BbrSender implements BBR congestion control algorithm. BBR aims to estimate -// the current available Bottleneck Bandwidth and RTT (hence the name), and -// regulates the pacing rate and the size of the congestion window based on -// those signals. -// -// BBR relies on pacing in order to function properly. Do not use BBR when -// pacing is disabled. -class BbrNetworkController : public NetworkControllerInterface { - public: - enum Mode { - // Startup phase of the connection. - STARTUP, - // After achieving the highest possible bandwidth during the startup, lower - // the pacing rate in order to drain the queue. - DRAIN, - // Cruising mode. - PROBE_BW, - // Temporarily slow down sending in order to empty the buffer and measure - // the real minimum RTT. - PROBE_RTT, - }; - - // Indicates how the congestion control limits the amount of bytes in flight. - enum RecoveryState { - // Do not limit. - NOT_IN_RECOVERY = 0, - // Allow an extra outstanding byte for each byte acknowledged. - CONSERVATION = 1, - // Allow 1.5 extra outstanding bytes for each byte acknowledged. - MEDIUM_GROWTH = 2, - // Allow two extra outstanding bytes for each byte acknowledged (slow - // start). - GROWTH = 3 - }; - struct BbrControllerConfig { - FieldTrialParameter probe_bw_pacing_gain_offset; - FieldTrialParameter encoder_rate_gain; - FieldTrialParameter encoder_rate_gain_in_probe_rtt; - // RTT delta to determine if startup should be exited due to increased RTT. - FieldTrialParameter exit_startup_rtt_threshold; - - FieldTrialParameter initial_congestion_window; - FieldTrialParameter min_congestion_window; - FieldTrialParameter max_congestion_window; - - FieldTrialParameter probe_rtt_congestion_window_gain; - FieldTrialParameter pacing_rate_as_target; - - // Configurable in QUIC BBR: - FieldTrialParameter exit_startup_on_loss; - // The number of RTTs to stay in STARTUP mode. Defaults to 3. - FieldTrialParameter num_startup_rtts; - // When true, recovery is rate based rather than congestion window based. - FieldTrialParameter rate_based_recovery; - FieldTrialParameter max_aggregation_bytes_multiplier; - // When true, pace at 1.5x and disable packet conservation in STARTUP. - FieldTrialParameter slower_startup; - // When true, disables packet conservation in STARTUP. - FieldTrialParameter rate_based_startup; - // Used as the initial packet conservation mode when first entering - // recovery. - FieldTrialEnum initial_conservation_in_startup; - // If true, will not exit low gain mode until bytes_in_flight drops below - // BDP or it's time for high gain mode. - FieldTrialParameter fully_drain_queue; - - FieldTrialParameter max_ack_height_window_multiplier; - // If true, use a CWND of 0.75*BDP during probe_rtt instead of 4 packets. - FieldTrialParameter probe_rtt_based_on_bdp; - // If true, skip probe_rtt and update the timestamp of the existing min_rtt - // to now if min_rtt over the last cycle is within 12.5% of the current - // min_rtt. Even if the min_rtt is 12.5% too low, the 25% gain cycling and - // 2x CWND gain should overcome an overly small min_rtt. - FieldTrialParameter probe_rtt_skipped_if_similar_rtt; - // If true, disable PROBE_RTT entirely as long as the connection was - // recently app limited. - FieldTrialParameter probe_rtt_disabled_if_app_limited; - - explicit BbrControllerConfig(std::string field_trial); - ~BbrControllerConfig(); - BbrControllerConfig(const BbrControllerConfig&); - static BbrControllerConfig FromTrial(); - }; - - // Debug state can be exported in order to troubleshoot potential congestion - // control issues. - struct DebugState { - explicit DebugState(const BbrNetworkController& sender); - DebugState(const DebugState& state); - - Mode mode; - DataRate max_bandwidth; - BbrRoundTripCount round_trip_count; - int gain_cycle_index; - DataSize congestion_window; - - bool is_at_full_bandwidth; - DataRate bandwidth_at_last_round; - BbrRoundTripCount rounds_without_bandwidth_gain; - - TimeDelta min_rtt; - Timestamp min_rtt_timestamp; - - RecoveryState recovery_state; - DataSize recovery_window; - - bool last_sample_is_app_limited; - int64_t end_of_app_limited_phase; - }; - - explicit BbrNetworkController(NetworkControllerConfig config); - ~BbrNetworkController() override; - - // NetworkControllerInterface - NetworkControlUpdate OnNetworkAvailability(NetworkAvailability msg) override; - NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange msg) override; - NetworkControlUpdate OnProcessInterval(ProcessInterval msg) override; - NetworkControlUpdate OnSentPacket(SentPacket msg) override; - NetworkControlUpdate OnStreamsConfig(StreamsConfig msg) override; - NetworkControlUpdate OnTargetRateConstraints( - TargetRateConstraints msg) override; - NetworkControlUpdate OnTransportPacketsFeedback( - TransportPacketsFeedback msg) override; - - // Part of remote bitrate estimation api, not implemented for BBR - NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport msg) override; - NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate msg) override; - NetworkControlUpdate OnTransportLossReport(TransportLossReport msg) override; - NetworkControlUpdate OnReceivedPacket(ReceivedPacket msg) override; - NetworkControlUpdate OnNetworkStateEstimate( - NetworkStateEstimate msg) override; - - NetworkControlUpdate CreateRateUpdate(Timestamp at_time) const; - - private: - void Reset(); - bool InSlowStart() const; - bool InRecovery() const; - bool IsProbingForMoreBandwidth() const; - - bool CanSend(DataSize bytes_in_flight); - DataRate PacingRate() const; - DataRate BandwidthEstimate() const; - DataSize GetCongestionWindow() const; - - double GetPacingGain(int round_offset) const; - - void OnApplicationLimited(DataSize bytes_in_flight); - // End implementation of SendAlgorithmInterface. - - typedef WindowedFilter, - BbrRoundTripCount, - BbrRoundTripCount> - MaxBandwidthFilter; - - typedef WindowedFilter, - BbrRoundTripCount, - BbrRoundTripCount> - MaxAckDelayFilter; - - typedef WindowedFilter, - BbrRoundTripCount, - BbrRoundTripCount> - MaxAckHeightFilter; - - // Returns the current estimate of the RTT of the connection. Outside of the - // edge cases, this is minimum RTT. - TimeDelta GetMinRtt() const; - // Returns whether the connection has achieved full bandwidth required to exit - // the slow start. - bool IsAtFullBandwidth() const; - // Computes the target congestion window using the specified gain. - DataSize GetTargetCongestionWindow(double gain) const; - // The target congestion window during PROBE_RTT. - DataSize ProbeRttCongestionWindow() const; - // Returns true if the current min_rtt should be kept and we should not enter - // PROBE_RTT immediately. - bool ShouldExtendMinRttExpiry() const; - - // Enters the STARTUP mode. - void EnterStartupMode(); - // Enters the PROBE_BW mode. - void EnterProbeBandwidthMode(Timestamp now); - - // Discards the lost packets from BandwidthSampler state. - void DiscardLostPackets(const std::vector& lost_packets); - // Updates the round-trip counter if a round-trip has passed. Returns true if - // the counter has been advanced. - // |last_acked_packet| is the sequence number of the last acked packet. - bool UpdateRoundTripCounter(int64_t last_acked_packet); - // Updates the current bandwidth and min_rtt estimate based on the samples for - // the received acknowledgements. Returns true if min_rtt has expired. - bool UpdateBandwidthAndMinRtt(Timestamp now, - const std::vector& acked_packets); - // Updates the current gain used in PROBE_BW mode. - void UpdateGainCyclePhase(Timestamp now, - DataSize prior_in_flight, - bool has_losses); - // Tracks for how many round-trips the bandwidth has not increased - // significantly. - void CheckIfFullBandwidthReached(); - // Transitions from STARTUP to DRAIN and from DRAIN to PROBE_BW if - // appropriate. - void MaybeExitStartupOrDrain(const TransportPacketsFeedback&); - // Decides whether to enter or exit PROBE_RTT. - void MaybeEnterOrExitProbeRtt(const TransportPacketsFeedback& msg, - bool is_round_start, - bool min_rtt_expired); - // Determines whether BBR needs to enter, exit or advance state of the - // recovery. - void UpdateRecoveryState(int64_t last_acked_packet, - bool has_losses, - bool is_round_start); - - // Updates the ack aggregation max filter in bytes. - void UpdateAckAggregationBytes(Timestamp ack_time, - DataSize newly_acked_bytes); - - // Determines the appropriate pacing rate for the connection. - void CalculatePacingRate(); - // Determines the appropriate congestion window for the connection. - void CalculateCongestionWindow(DataSize bytes_acked); - // Determines the approriate window that constrains the - // in-flight during recovery. - void CalculateRecoveryWindow(DataSize bytes_acked, - DataSize bytes_lost, - DataSize bytes_in_flight); - - BbrControllerConfig config_; - - RttStats rtt_stats_; - webrtc::Random random_; - LossRateFilter loss_rate_; - - absl::optional constraints_; - - Mode mode_; - - // Bandwidth sampler provides BBR with the bandwidth measurements at - // individual points. - std::unique_ptr sampler_; - - // The number of the round trips that have occurred during the connection. - BbrRoundTripCount round_trip_count_ = 0; - - // The packet number of the most recently sent packet. - int64_t last_sent_packet_; - // Acknowledgement of any packet after |current_round_trip_end_| will cause - // the round trip counter to advance. - int64_t current_round_trip_end_; - - // The filter that tracks the maximum bandwidth over the multiple recent - // round-trips. - MaxBandwidthFilter max_bandwidth_; - - DataRate default_bandwidth_; - - // Tracks the maximum number of bytes acked faster than the sending rate. - MaxAckHeightFilter max_ack_height_; - - // The time this aggregation started and the number of bytes acked during it. - absl::optional aggregation_epoch_start_time_; - DataSize aggregation_epoch_bytes_; - - // The number of bytes acknowledged since the last time bytes in flight - // dropped below the target window. - DataSize bytes_acked_since_queue_drained_; - - // The muliplier for calculating the max amount of extra CWND to add to - // compensate for ack aggregation. - double max_aggregation_bytes_multiplier_; - - // Minimum RTT estimate. Automatically expires within 10 seconds (and - // triggers PROBE_RTT mode) if no new value is sampled during that period. - TimeDelta min_rtt_; - TimeDelta last_rtt_; - // The time at which the current value of |min_rtt_| was assigned. - Timestamp min_rtt_timestamp_; - - // The maximum allowed number of bytes in flight. - DataSize congestion_window_; - - // The initial value of the |congestion_window_|. - DataSize initial_congestion_window_; - - // The smallest value the |congestion_window_| can achieve. - DataSize min_congestion_window_; - - // The largest value the |congestion_window_| can achieve. - DataSize max_congestion_window_; - - // The current pacing rate of the connection. - DataRate pacing_rate_; - - // The gain currently applied to the pacing rate. - double pacing_gain_; - // The gain currently applied to the congestion window. - double congestion_window_gain_; - - // The gain used for the congestion window during PROBE_BW. Latched from - // quic_bbr_cwnd_gain flag. - const double congestion_window_gain_constant_; - // The coefficient by which mean RTT variance is added to the congestion - // window. Latched from quic_bbr_rtt_variation_weight flag. - const double rtt_variance_weight_; - - // Number of round-trips in PROBE_BW mode, used for determining the current - // pacing gain cycle. - int cycle_current_offset_; - // The time at which the last pacing gain cycle was started. - Timestamp last_cycle_start_; - - // Indicates whether the connection has reached the full bandwidth mode. - bool is_at_full_bandwidth_; - // Number of rounds during which there was no significant bandwidth increase. - BbrRoundTripCount rounds_without_bandwidth_gain_; - // The bandwidth compared to which the increase is measured. - DataRate bandwidth_at_last_round_; - - // Set to true upon exiting quiescence. - bool exiting_quiescence_; - - // Time at which PROBE_RTT has to be exited. Setting it to zero indicates - // that the time is yet unknown as the number of packets in flight has not - // reached the required value. - absl::optional exit_probe_rtt_at_; - // Indicates whether a round-trip has passed since PROBE_RTT became active. - bool probe_rtt_round_passed_; - - // Indicates whether the most recent bandwidth sample was marked as - // app-limited. - bool last_sample_is_app_limited_; - - // Current state of recovery. - RecoveryState recovery_state_; - // Receiving acknowledgement of a packet after |end_recovery_at_| will cause - // BBR to exit the recovery mode. A set value indicates at least one - // loss has been detected, so it must not be reset. - absl::optional end_recovery_at_; - // A window used to limit the number of bytes in flight during loss recovery. - DataSize recovery_window_; - - bool app_limited_since_last_probe_rtt_; - TimeDelta min_rtt_since_last_probe_rtt_; - - RTC_DISALLOW_COPY_AND_ASSIGN(BbrNetworkController); -}; - -// Used in log output -std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& os, // no-presubmit-check TODO(webrtc:8982) - const BbrNetworkController::Mode& mode); - -} // namespace bbr -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_BBR_NETWORK_CONTROLLER_H_ diff --git a/modules/congestion_controller/bbr/bbr_network_controller_unittest.cc b/modules/congestion_controller/bbr/bbr_network_controller_unittest.cc deleted file mode 100644 index 3e5403a313..0000000000 --- a/modules/congestion_controller/bbr/bbr_network_controller_unittest.cc +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/bbr_network_controller.h" - -#include -#include - -#include "modules/congestion_controller/bbr/bbr_factory.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "test/scenario/scenario.h" - -using ::testing::_; -using ::testing::AllOf; -using ::testing::Field; -using ::testing::Ge; -using ::testing::Le; -using ::testing::Matcher; -using ::testing::NiceMock; -using ::testing::Property; -using ::testing::StrictMock; - -namespace webrtc { -namespace test { -namespace { - -const DataRate kInitialBitrate = DataRate::kbps(60); -const Timestamp kDefaultStartTime = Timestamp::ms(10000000); - -constexpr double kDataRateMargin = 0.3; -constexpr double kMinDataRateFactor = 1 - kDataRateMargin; -constexpr double kMaxDataRateFactor = 1 + kDataRateMargin; -inline Matcher TargetRateCloseTo(DataRate rate) { - DataRate min_data_rate = rate * kMinDataRateFactor; - DataRate max_data_rate = rate * kMaxDataRateFactor; - return Field(&TargetTransferRate::target_rate, - AllOf(Ge(min_data_rate), Le(max_data_rate))); -} - -NetworkControllerConfig InitialConfig( - int starting_bandwidth_kbps = kInitialBitrate.kbps(), - int min_data_rate_kbps = 0, - int max_data_rate_kbps = 5 * kInitialBitrate.kbps()) { - NetworkControllerConfig config; - config.constraints.at_time = kDefaultStartTime; - config.constraints.min_data_rate = DataRate::kbps(min_data_rate_kbps); - config.constraints.max_data_rate = DataRate::kbps(max_data_rate_kbps); - config.constraints.starting_rate = DataRate::kbps(starting_bandwidth_kbps); - return config; -} - -ProcessInterval InitialProcessInterval() { - ProcessInterval process_interval; - process_interval.at_time = kDefaultStartTime; - return process_interval; -} - -NetworkRouteChange CreateRouteChange(Timestamp at_time, - DataRate start_rate, - DataRate min_rate = DataRate::Zero(), - DataRate max_rate = DataRate::Infinity()) { - NetworkRouteChange route_change; - route_change.at_time = at_time; - route_change.constraints.at_time = at_time; - route_change.constraints.min_data_rate = min_rate; - route_change.constraints.max_data_rate = max_rate; - route_change.constraints.starting_rate = start_rate; - return route_change; -} -} // namespace - -class BbrNetworkControllerTest : public ::testing::Test { - protected: - BbrNetworkControllerTest() {} - ~BbrNetworkControllerTest() override {} -}; - -TEST_F(BbrNetworkControllerTest, SendsConfigurationOnFirstProcess) { - std::unique_ptr controller_; - controller_.reset(new bbr::BbrNetworkController(InitialConfig())); - - NetworkControlUpdate update = - controller_->OnProcessInterval(InitialProcessInterval()); - EXPECT_THAT(*update.target_rate, TargetRateCloseTo(kInitialBitrate)); - EXPECT_THAT(*update.pacer_config, - Property(&PacerConfig::data_rate, Ge(kInitialBitrate))); - EXPECT_THAT(*update.congestion_window, Property(&DataSize::IsFinite, true)); -} - -TEST_F(BbrNetworkControllerTest, SendsConfigurationOnNetworkRouteChanged) { - std::unique_ptr controller_; - controller_.reset(new bbr::BbrNetworkController(InitialConfig())); - - NetworkControlUpdate update = - controller_->OnProcessInterval(InitialProcessInterval()); - EXPECT_TRUE(update.target_rate.has_value()); - EXPECT_TRUE(update.pacer_config.has_value()); - EXPECT_TRUE(update.congestion_window.has_value()); - - DataRate new_bitrate = DataRate::bps(200000); - update = controller_->OnNetworkRouteChange( - CreateRouteChange(kDefaultStartTime, new_bitrate)); - EXPECT_THAT(*update.target_rate, TargetRateCloseTo(new_bitrate)); - EXPECT_THAT(*update.pacer_config, - Property(&PacerConfig::data_rate, Ge(kInitialBitrate))); - EXPECT_TRUE(update.congestion_window.has_value()); -} - -// Bandwidth estimation is updated when feedbacks are received. -// Feedbacks which show an increasing delay cause the estimation to be reduced. -TEST_F(BbrNetworkControllerTest, UpdatesTargetSendRate) { - BbrNetworkControllerFactory factory; - Scenario s("bbr_unit/updates_rate", false); - CallClientConfig config; - config.transport.cc_factory = &factory; - config.transport.rates.min_rate = DataRate::kbps(10); - config.transport.rates.max_rate = DataRate::kbps(1500); - config.transport.rates.start_rate = DataRate::kbps(300); - auto send_net = s.CreateMutableSimulationNode([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(500); - c->delay = TimeDelta::ms(100); - c->loss_rate = 0.0; - }); - auto ret_net = s.CreateMutableSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(100); }); - auto* client = s.CreateClient("send", config); - const DataSize kOverhead = DataSize::bytes(38); // IPV4 + UDP + SRTP - auto routes = s.CreateRoutes(client, {send_net->node()}, kOverhead, - s.CreateClient("recv", CallClientConfig()), - {ret_net->node()}, kOverhead); - s.CreateVideoStream(routes->forward(), VideoStreamConfig()); - - s.RunFor(TimeDelta::seconds(25)); - EXPECT_NEAR(client->send_bandwidth().kbps(), 450, 100); - - send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(800); - c->delay = TimeDelta::ms(100); - }); - - s.RunFor(TimeDelta::seconds(20)); - EXPECT_NEAR(client->send_bandwidth().kbps(), 750, 150); - - send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(200); - c->delay = TimeDelta::ms(200); - }); - ret_net->UpdateConfig( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(200); }); - - s.RunFor(TimeDelta::seconds(35)); - EXPECT_NEAR(client->send_bandwidth().kbps(), 170, 50); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/data_transfer_tracker.cc b/modules/congestion_controller/bbr/data_transfer_tracker.cc deleted file mode 100644 index 9d811475b4..0000000000 --- a/modules/congestion_controller/bbr/data_transfer_tracker.cc +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/data_transfer_tracker.h" - -#include "rtc_base/checks.h" - -namespace webrtc { -namespace bbr { - -DataTransferTracker::DataTransferTracker() {} - -DataTransferTracker::~DataTransferTracker() {} - -void DataTransferTracker::AddSample(DataSize size_delta, - Timestamp send_time, - Timestamp ack_time) { - size_sum_ += size_delta; - - RTC_DCHECK(samples_.empty() || ack_time >= samples_.back().ack_time); - - if (!samples_.empty() && ack_time == samples_.back().ack_time) { - samples_.back().send_time = send_time; - samples_.back().size_sum = size_sum_; - } else { - Sample new_sample; - new_sample.ack_time = ack_time; - new_sample.send_time = send_time; - new_sample.size_delta = size_delta; - new_sample.size_sum = size_sum_; - samples_.push_back(new_sample); - } -} - -void DataTransferTracker::ClearOldSamples(Timestamp excluding_end) { - while (!samples_.empty() && samples_.front().ack_time < excluding_end) { - samples_.pop_front(); - } -} - -DataTransferTracker::Result DataTransferTracker::GetRatesByAckTime( - Timestamp covered_start, - Timestamp including_end) { - Result res; - // Last sample before covered_start. - const Sample* window_begin = nullptr; - // Sample at end time or first sample after end time- - const Sample* window_end = nullptr; - // To handle the case when the first sample is after covered_start. - if (samples_.front().ack_time < including_end) - window_begin = &samples_.front(); - // To handle the case when the last sample is before including_end. - if (samples_.back().ack_time > covered_start) - window_end = &samples_.back(); - for (const auto& sample : samples_) { - if (sample.ack_time < covered_start) { - window_begin = &sample; - } else if (sample.ack_time >= including_end) { - window_end = &sample; - break; - } - } - if (window_begin != nullptr && window_end != nullptr) { - res.acked_data = window_end->size_sum - window_begin->size_sum; - res.send_timespan = window_end->send_time - window_begin->send_time; - res.ack_timespan = window_end->ack_time - window_begin->ack_time; - } else { - res.acked_data = DataSize::Zero(); - res.ack_timespan = including_end - covered_start; - res.send_timespan = TimeDelta::Zero(); - } - return res; -} - -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/data_transfer_tracker.h b/modules/congestion_controller/bbr/data_transfer_tracker.h deleted file mode 100644 index 29dd7a3235..0000000000 --- a/modules/congestion_controller/bbr/data_transfer_tracker.h +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_DATA_TRANSFER_TRACKER_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_DATA_TRANSFER_TRACKER_H_ - -#include - -#include "api/units/data_size.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" - -namespace webrtc { -namespace bbr { -class DataTransferTracker { - public: - struct Result { - TimeDelta ack_timespan = TimeDelta::Zero(); - TimeDelta send_timespan = TimeDelta::Zero(); - DataSize acked_data = DataSize::Zero(); - }; - DataTransferTracker(); - ~DataTransferTracker(); - void AddSample(DataSize size_delta, Timestamp send_time, Timestamp ack_time); - void ClearOldSamples(Timestamp excluding_end); - - // Get the average data rate in the window that starts with the last ack which - // comes before covered_start and ends at the first ack that comes after or at - // including_end. - Result GetRatesByAckTime(Timestamp covered_start, Timestamp including_end); - - private: - struct Sample { - Timestamp ack_time = Timestamp::PlusInfinity(); - Timestamp send_time = Timestamp::PlusInfinity(); - DataSize size_delta = DataSize::Zero(); - DataSize size_sum = DataSize::Zero(); - }; - std::deque samples_; - DataSize size_sum_ = DataSize::Zero(); -}; -} // namespace bbr -} // namespace webrtc -#endif // MODULES_CONGESTION_CONTROLLER_BBR_DATA_TRANSFER_TRACKER_H_ diff --git a/modules/congestion_controller/bbr/data_transfer_tracker_unittest.cc b/modules/congestion_controller/bbr/data_transfer_tracker_unittest.cc deleted file mode 100644 index a60343dba0..0000000000 --- a/modules/congestion_controller/bbr/data_transfer_tracker_unittest.cc +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/data_transfer_tracker.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { -namespace bbr { -namespace test { -namespace { -struct ResultForTest { - int64_t ack_span_ms; - int64_t send_span_ms; - int64_t acked_bytes; -}; -class DataTransferTrackerForTest : public DataTransferTracker { - public: - void AddSample(int bytes, int send_time_ms, int ack_time_ms) { - DataTransferTracker::AddSample(DataSize::bytes(bytes), - Timestamp::ms(send_time_ms), - Timestamp::ms(ack_time_ms)); - } - - void ClearOldSamples(int excluding_end_ms) { - DataTransferTracker::ClearOldSamples(Timestamp::ms(excluding_end_ms)); - } - ResultForTest GetRatesByAckTime(int covered_start_ms, int including_end_ms) { - auto result = DataTransferTracker::GetRatesByAckTime( - Timestamp::ms(covered_start_ms), Timestamp::ms(including_end_ms)); - return ResultForTest{result.ack_timespan.ms(), result.send_timespan.ms(), - result.acked_data.bytes()}; - } -}; - -} // namespace - -TEST(DataTransferTrackerTest, TracksData) { - DataTransferTrackerForTest calc; - // Since we dont have any previous reference for the first packet, it won't be - // counted. - calc.AddSample(5555, 100000, 100100); - calc.AddSample(1000, 100020, 100120); - calc.AddSample(1000, 100040, 100140); - calc.AddSample(1000, 100060, 100160); - - auto result = calc.GetRatesByAckTime(100000, 100200); - EXPECT_EQ(result.acked_bytes, 3000); - EXPECT_EQ(result.ack_span_ms, 60); - EXPECT_EQ(result.send_span_ms, 60); -} - -TEST(DataTransferTrackerTest, CoversStartTime) { - DataTransferTrackerForTest calc; - calc.AddSample(5555, 100000, 100100); - calc.AddSample(1000, 100020, 100120); - calc.AddSample(1000, 100040, 100140); - calc.AddSample(1000, 100060, 100160); - calc.AddSample(1000, 100080, 100180); - - auto result = calc.GetRatesByAckTime(100140, 100200); - EXPECT_EQ(result.acked_bytes, 3000); - EXPECT_EQ(result.ack_span_ms, 60); - EXPECT_EQ(result.send_span_ms, 60); -} - -TEST(DataTransferTrackerTest, IncludesEndExcludesPastEnd) { - DataTransferTrackerForTest calc; - calc.AddSample(5555, 100000, 100100); - calc.AddSample(1000, 100020, 100120); - calc.AddSample(1000, 100040, 100140); - calc.AddSample(1000, 100060, 100160); - calc.AddSample(1000, 100080, 100180); - - auto result = calc.GetRatesByAckTime(100120, 100160); - EXPECT_EQ(result.acked_bytes, 3000); - EXPECT_EQ(result.ack_span_ms, 60); - EXPECT_EQ(result.send_span_ms, 60); -} - -TEST(DataTransferTrackerTest, AccumulatesDuplicates) { - DataTransferTrackerForTest calc; - calc.AddSample(5555, 100000, 100100); - // Two packets at same time, should be accumulated. - calc.AddSample(1000, 100020, 100120); - calc.AddSample(1000, 100020, 100120); - calc.AddSample(1000, 100060, 100160); - // Two packets at same time, should be accumulated. - calc.AddSample(1000, 100100, 100200); - calc.AddSample(1000, 100100, 100200); - calc.AddSample(1000, 100120, 100220); - - auto result = calc.GetRatesByAckTime(100120, 100200); - EXPECT_EQ(result.acked_bytes, 5000); - EXPECT_EQ(result.ack_span_ms, 100); - EXPECT_EQ(result.send_span_ms, 100); -} - -TEST(DataTransferTrackerTest, RemovesOldData) { - DataTransferTrackerForTest calc; - calc.AddSample(5555, 100000, 100100); - calc.AddSample(1000, 100020, 100120); - calc.AddSample(1000, 100040, 100140); - calc.AddSample(1000, 100060, 100160); - calc.AddSample(1000, 100080, 100180); - { - auto result = calc.GetRatesByAckTime(100120, 100200); - EXPECT_EQ(result.acked_bytes, 4000); - EXPECT_EQ(result.ack_span_ms, 80); - EXPECT_EQ(result.send_span_ms, 80); - } - // Note that this operation means that the packet acked at 100140 will not be - // counted any more, just used as time reference. - calc.ClearOldSamples(100140); - { - auto result = calc.GetRatesByAckTime(100120, 100200); - EXPECT_EQ(result.acked_bytes, 2000); - EXPECT_EQ(result.ack_span_ms, 40); - EXPECT_EQ(result.send_span_ms, 40); - } -} -} // namespace test -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/loss_rate_filter.cc b/modules/congestion_controller/bbr/loss_rate_filter.cc deleted file mode 100644 index 302e64a429..0000000000 --- a/modules/congestion_controller/bbr/loss_rate_filter.cc +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/congestion_controller/bbr/loss_rate_filter.h" - -namespace webrtc { -namespace bbr { -namespace { -// From SendSideBandwidthEstimation. -const int kLimitNumPackets = 20; -// From RTCPSender video report interval. -const int64_t kUpdateIntervalMs = 1000; -} // namespace - -LossRateFilter::LossRateFilter() - : lost_packets_since_last_loss_update_(0), - expected_packets_since_last_loss_update_(0), - loss_rate_estimate_(0.0), - next_loss_update_ms_(0) {} - -void LossRateFilter::UpdateWithLossStatus(int64_t feedback_time, - int packets_sent, - int packets_lost) { - lost_packets_since_last_loss_update_ += packets_lost; - expected_packets_since_last_loss_update_ += packets_sent; - - if (feedback_time >= next_loss_update_ms_ && - expected_packets_since_last_loss_update_ >= kLimitNumPackets) { - int64_t lost = lost_packets_since_last_loss_update_; - int64_t expected = expected_packets_since_last_loss_update_; - loss_rate_estimate_ = static_cast(lost) / expected; - next_loss_update_ms_ = feedback_time + kUpdateIntervalMs; - lost_packets_since_last_loss_update_ = 0; - expected_packets_since_last_loss_update_ = 0; - } -} - -double LossRateFilter::GetLossRate() const { - return loss_rate_estimate_; -} -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/loss_rate_filter.h b/modules/congestion_controller/bbr/loss_rate_filter.h deleted file mode 100644 index 6a89c73e46..0000000000 --- a/modules/congestion_controller/bbr/loss_rate_filter.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_LOSS_RATE_FILTER_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_LOSS_RATE_FILTER_H_ - -#include - -namespace webrtc { -namespace bbr { - -// Loss rate filter based on the implementation in SendSideBandwidthEstimation -// and the RTCPSender receiver report interval for video. -class LossRateFilter { - public: - LossRateFilter(); - void UpdateWithLossStatus(int64_t feedback_time_ms, - int packets_sent, - int packets_lost); - double GetLossRate() const; - - private: - int lost_packets_since_last_loss_update_; - int expected_packets_since_last_loss_update_; - double loss_rate_estimate_; - int64_t next_loss_update_ms_; -}; - -} // namespace bbr -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_LOSS_RATE_FILTER_H_ diff --git a/modules/congestion_controller/bbr/loss_rate_filter_unittest.cc b/modules/congestion_controller/bbr/loss_rate_filter_unittest.cc deleted file mode 100644 index f553177b08..0000000000 --- a/modules/congestion_controller/bbr/loss_rate_filter_unittest.cc +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/congestion_controller/bbr/loss_rate_filter.h" - -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "test/gtest.h" - -namespace webrtc { -namespace bbr { - -namespace { -const Timestamp kTestStartTime = Timestamp::seconds(100000); -} // namespace - -TEST(LossRateFilterTest, AccumulatesToOne) { - LossRateFilter filter; - Timestamp current_time = kTestStartTime; - for (int i = 0; i < 10; i++) { - filter.UpdateWithLossStatus(current_time.ms(), 10, 10); - current_time += TimeDelta::seconds(1); - } - EXPECT_NEAR(filter.GetLossRate(), 1.0, 0.01); -} - -TEST(LossRateFilterTest, StaysAtZero) { - LossRateFilter filter; - Timestamp current_time = kTestStartTime; - for (int i = 0; i < 10; i++) { - filter.UpdateWithLossStatus(current_time.ms(), 10, 0); - current_time += TimeDelta::seconds(1); - } - EXPECT_NEAR(filter.GetLossRate(), 0.0, 0.01); -} - -TEST(LossRateFilterTest, VariesWithInput) { - LossRateFilter filter; - Timestamp current_time = kTestStartTime; - for (int j = 0; j < 10; j++) { - for (int i = 0; i < 5; i++) { - filter.UpdateWithLossStatus(current_time.ms(), 10, 10); - current_time += TimeDelta::seconds(1); - } - EXPECT_NEAR(filter.GetLossRate(), 1.0, 0.1); - for (int i = 0; i < 5; i++) { - filter.UpdateWithLossStatus(current_time.ms(), 10, 0); - current_time += TimeDelta::seconds(1); - } - EXPECT_NEAR(filter.GetLossRate(), 0.0, 0.1); - } -} - -TEST(LossRateFilterTest, DetectsChangingRate) { - LossRateFilter filter; - Timestamp current_time = kTestStartTime; - for (int per_decile = 0; per_decile < 10; per_decile += 1) { - // Update every 200 ms for 2 seconds - for (int i = 0; i < 10; i++) { - current_time += TimeDelta::ms(200); - filter.UpdateWithLossStatus(current_time.ms(), 10, per_decile); - } - EXPECT_NEAR(filter.GetLossRate(), per_decile / 10.0, 0.05); - } -} -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/packet_number_indexed_queue.h b/modules/congestion_controller/bbr/packet_number_indexed_queue.h deleted file mode 100644 index b072191284..0000000000 --- a/modules/congestion_controller/bbr/packet_number_indexed_queue.h +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Based on the Quic implementation in Chromium. - -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_PACKET_NUMBER_INDEXED_QUEUE_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_PACKET_NUMBER_INDEXED_QUEUE_H_ - -#include -#include - -#include -#include -#include - -#include "rtc_base/checks.h" - -namespace webrtc { -namespace bbr { - -// PacketNumberIndexedQueue is a queue of mostly continuous numbered entries -// which supports the following operations: -// - adding elements to the end of the queue, or at some point past the end -// - removing elements in any order -// - retrieving elements -// If all elements are inserted in order, all of the operations above are -// amortized O(1) time. -// -// Internally, the data structure is a deque where each element is marked as -// present or not. The deque starts at the lowest present index. Whenever an -// element is removed, it's marked as not present, and the front of the deque is -// cleared of elements that are not present. -// -// The tail of the queue is not cleared due to the assumption of entries being -// inserted in order, though removing all elements of the queue will return it -// to its initial state. -// -// Note that this data structure is inherently hazardous, since an addition of -// just two entries will cause it to consume all of the memory available. -// Because of that, it is not a general-purpose container and should not be used -// as one. -template -class PacketNumberIndexedQueue { - public: - PacketNumberIndexedQueue() - : number_of_present_entries_(0), first_packet_(0) {} - - // Retrieve the entry associated with the packet number. Returns the pointer - // to the entry in case of success, or nullptr if the entry does not exist. - T* GetEntry(int64_t packet_number); - const T* GetEntry(int64_t packet_number) const; - - // Inserts data associated |packet_number| into (or past) the end of the - // queue, filling up the missing intermediate entries as necessary. Returns - // true if the element has been inserted successfully, false if it was already - // in the queue or inserted out of order. - template - bool Emplace(int64_t packet_number, Args&&... args); - - // Removes data associated with |packet_number| and frees the slots in the - // queue as necessary. - bool Remove(int64_t packet_number); - - bool IsEmpty() const { return number_of_present_entries_ == 0; } - - // Returns the number of entries in the queue. - size_t number_of_present_entries() const { - return number_of_present_entries_; - } - - // Returns the number of entries allocated in the underlying deque. This is - // proportional to the memory usage of the queue. - size_t entry_slots_used() const { return entries_.size(); } - - // Packet number of the first entry in the queue. Zero if the queue is empty. - int64_t first_packet() const { return first_packet_; } - - // Packet number of the last entry ever inserted in the queue. Note that the - // entry in question may have already been removed. Zero if the queue is - // empty. - int64_t last_packet() const { - if (IsEmpty()) { - return 0; - } - return first_packet_ + entries_.size() - 1; - } - - private: - // Wrapper around T used to mark whether the entry is actually in the map. - struct EntryWrapper { - T data; - bool present; - - EntryWrapper() : data(), present(false) {} - - template - explicit EntryWrapper(Args&&... args) - : data(std::forward(args)...), present(true) {} - }; - - // Cleans up unused slots in the front after removing an element. - void Cleanup(); - - const EntryWrapper* GetEntryWrapper(int64_t offset) const; - EntryWrapper* GetEntryWrapper(int64_t offset) { - const auto* const_this = this; - return const_cast(const_this->GetEntryWrapper(offset)); - } - - std::deque entries_; - size_t number_of_present_entries_; - int64_t first_packet_; -}; - -template -T* PacketNumberIndexedQueue::GetEntry(int64_t packet_number) { - EntryWrapper* entry = GetEntryWrapper(packet_number); - if (entry == nullptr) { - return nullptr; - } - return &entry->data; -} - -template -const T* PacketNumberIndexedQueue::GetEntry(int64_t packet_number) const { - const EntryWrapper* entry = GetEntryWrapper(packet_number); - if (entry == nullptr) { - return nullptr; - } - return &entry->data; -} - -template -template -bool PacketNumberIndexedQueue::Emplace(int64_t packet_number, - Args&&... args) { - if (IsEmpty()) { - RTC_DCHECK(entries_.empty()); - RTC_DCHECK_EQ(0u, first_packet_); - - entries_.emplace_back(std::forward(args)...); - number_of_present_entries_ = 1; - first_packet_ = packet_number; - return true; - } - - // Do not allow insertion out-of-order. - if (packet_number <= last_packet()) { - return false; - } - - // Handle potentially missing elements. - int64_t offset = packet_number - first_packet_; - if (offset > static_cast(entries_.size())) { - entries_.resize(offset); - } - - number_of_present_entries_++; - entries_.emplace_back(std::forward(args)...); - RTC_DCHECK_EQ(packet_number, last_packet()); - return true; -} - -template -bool PacketNumberIndexedQueue::Remove(int64_t packet_number) { - EntryWrapper* entry = GetEntryWrapper(packet_number); - if (entry == nullptr) { - return false; - } - entry->present = false; - number_of_present_entries_--; - - if (packet_number == first_packet()) { - Cleanup(); - } - return true; -} - -template -void PacketNumberIndexedQueue::Cleanup() { - while (!entries_.empty() && !entries_.front().present) { - entries_.pop_front(); - first_packet_++; - } - if (entries_.empty()) { - first_packet_ = 0; - } -} - -template -auto PacketNumberIndexedQueue::GetEntryWrapper(int64_t offset) const - -> const EntryWrapper* { - if (offset < first_packet_) { - return nullptr; - } - - offset -= first_packet_; - if (offset >= static_cast(entries_.size())) { - return nullptr; - } - - const EntryWrapper* entry = &entries_[offset]; - if (!entry->present) { - return nullptr; - } - - return entry; -} - -} // namespace bbr -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_PACKET_NUMBER_INDEXED_QUEUE_H_ diff --git a/modules/congestion_controller/bbr/packet_number_indexed_queue_unittest.cc b/modules/congestion_controller/bbr/packet_number_indexed_queue_unittest.cc deleted file mode 100644 index c402083714..0000000000 --- a/modules/congestion_controller/bbr/packet_number_indexed_queue_unittest.cc +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/packet_number_indexed_queue.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { -namespace bbr { -namespace { - -class PacketNumberIndexedQueueTest : public ::testing::Test { - public: - PacketNumberIndexedQueueTest() {} - - protected: - PacketNumberIndexedQueue queue_; -}; - -TEST_F(PacketNumberIndexedQueueTest, InitialState) { - EXPECT_TRUE(queue_.IsEmpty()); - EXPECT_EQ(0u, queue_.first_packet()); - EXPECT_EQ(0u, queue_.last_packet()); - EXPECT_EQ(0u, queue_.number_of_present_entries()); - EXPECT_EQ(0u, queue_.entry_slots_used()); -} - -TEST_F(PacketNumberIndexedQueueTest, InsertingContinuousElements) { - ASSERT_TRUE(queue_.Emplace(1001, "one")); - EXPECT_EQ("one", *queue_.GetEntry(1001)); - - ASSERT_TRUE(queue_.Emplace(1002, "two")); - EXPECT_EQ("two", *queue_.GetEntry(1002)); - - EXPECT_FALSE(queue_.IsEmpty()); - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(1002u, queue_.last_packet()); - EXPECT_EQ(2u, queue_.number_of_present_entries()); - EXPECT_EQ(2u, queue_.entry_slots_used()); -} - -TEST_F(PacketNumberIndexedQueueTest, InsertingOutOfOrder) { - queue_.Emplace(1001, "one"); - - ASSERT_TRUE(queue_.Emplace(1003, "three")); - EXPECT_EQ(nullptr, queue_.GetEntry(1002)); - EXPECT_EQ("three", *queue_.GetEntry(1003)); - - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(1003u, queue_.last_packet()); - EXPECT_EQ(2u, queue_.number_of_present_entries()); - EXPECT_EQ(3u, queue_.entry_slots_used()); - - ASSERT_FALSE(queue_.Emplace(1002, "two")); -} - -TEST_F(PacketNumberIndexedQueueTest, InsertingIntoPast) { - queue_.Emplace(1001, "one"); - EXPECT_FALSE(queue_.Emplace(1000, "zero")); -} - -TEST_F(PacketNumberIndexedQueueTest, InsertingDuplicate) { - queue_.Emplace(1001, "one"); - EXPECT_FALSE(queue_.Emplace(1001, "one")); -} - -TEST_F(PacketNumberIndexedQueueTest, RemoveInTheMiddle) { - queue_.Emplace(1001, "one"); - queue_.Emplace(1002, "two"); - queue_.Emplace(1003, "three"); - - ASSERT_TRUE(queue_.Remove(1002)); - EXPECT_EQ(nullptr, queue_.GetEntry(1002)); - - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(1003u, queue_.last_packet()); - EXPECT_EQ(2u, queue_.number_of_present_entries()); - EXPECT_EQ(3u, queue_.entry_slots_used()); - - EXPECT_FALSE(queue_.Emplace(1002, "two")); - EXPECT_TRUE(queue_.Emplace(1004, "four")); -} - -TEST_F(PacketNumberIndexedQueueTest, RemoveAtImmediateEdges) { - queue_.Emplace(1001, "one"); - queue_.Emplace(1002, "two"); - queue_.Emplace(1003, "three"); - ASSERT_TRUE(queue_.Remove(1001)); - EXPECT_EQ(nullptr, queue_.GetEntry(1001)); - ASSERT_TRUE(queue_.Remove(1003)); - EXPECT_EQ(nullptr, queue_.GetEntry(1003)); - - EXPECT_EQ(1002u, queue_.first_packet()); - EXPECT_EQ(1003u, queue_.last_packet()); - EXPECT_EQ(1u, queue_.number_of_present_entries()); - EXPECT_EQ(2u, queue_.entry_slots_used()); - - EXPECT_TRUE(queue_.Emplace(1004, "four")); -} - -TEST_F(PacketNumberIndexedQueueTest, RemoveAtDistantFront) { - queue_.Emplace(1001, "one"); - queue_.Emplace(1002, "one (kinda)"); - queue_.Emplace(2001, "two"); - - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(2001u, queue_.last_packet()); - EXPECT_EQ(3u, queue_.number_of_present_entries()); - EXPECT_EQ(1001u, queue_.entry_slots_used()); - - ASSERT_TRUE(queue_.Remove(1002)); - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(2001u, queue_.last_packet()); - EXPECT_EQ(2u, queue_.number_of_present_entries()); - EXPECT_EQ(1001u, queue_.entry_slots_used()); - - ASSERT_TRUE(queue_.Remove(1001)); - EXPECT_EQ(2001u, queue_.first_packet()); - EXPECT_EQ(2001u, queue_.last_packet()); - EXPECT_EQ(1u, queue_.number_of_present_entries()); - EXPECT_EQ(1u, queue_.entry_slots_used()); -} - -TEST_F(PacketNumberIndexedQueueTest, RemoveAtDistantBack) { - queue_.Emplace(1001, "one"); - queue_.Emplace(2001, "two"); - - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(2001u, queue_.last_packet()); - - ASSERT_TRUE(queue_.Remove(2001)); - EXPECT_EQ(1001u, queue_.first_packet()); - EXPECT_EQ(2001u, queue_.last_packet()); -} - -TEST_F(PacketNumberIndexedQueueTest, ClearAndRepopulate) { - queue_.Emplace(1001, "one"); - queue_.Emplace(2001, "two"); - - ASSERT_TRUE(queue_.Remove(1001)); - ASSERT_TRUE(queue_.Remove(2001)); - EXPECT_TRUE(queue_.IsEmpty()); - EXPECT_EQ(0u, queue_.first_packet()); - EXPECT_EQ(0u, queue_.last_packet()); - - EXPECT_TRUE(queue_.Emplace(101, "one")); - EXPECT_TRUE(queue_.Emplace(201, "two")); - EXPECT_EQ(101u, queue_.first_packet()); - EXPECT_EQ(201u, queue_.last_packet()); -} - -TEST_F(PacketNumberIndexedQueueTest, FailToRemoveElementsThatNeverExisted) { - ASSERT_FALSE(queue_.Remove(1000)); - queue_.Emplace(1001, "one"); - ASSERT_FALSE(queue_.Remove(1000)); - ASSERT_FALSE(queue_.Remove(1002)); -} - -TEST_F(PacketNumberIndexedQueueTest, FailToRemoveElementsTwice) { - queue_.Emplace(1001, "one"); - ASSERT_TRUE(queue_.Remove(1001)); - ASSERT_FALSE(queue_.Remove(1001)); - ASSERT_FALSE(queue_.Remove(1001)); -} - -TEST_F(PacketNumberIndexedQueueTest, ConstGetter) { - queue_.Emplace(1001, "one"); - const auto& const_queue = queue_; - - EXPECT_EQ("one", *const_queue.GetEntry(1001)); - EXPECT_EQ(nullptr, const_queue.GetEntry(1002)); -} - -} // namespace -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/rtt_stats.cc b/modules/congestion_controller/bbr/rtt_stats.cc deleted file mode 100644 index 2973463f49..0000000000 --- a/modules/congestion_controller/bbr/rtt_stats.cc +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/rtt_stats.h" - -#include -#include -#include - -#include "rtc_base/logging.h" - -namespace webrtc { -namespace bbr { -namespace { - -// Default initial rtt used before any samples are received. -const int kInitialRttMs = 100; -const double kAlpha = 0.125; -const double kOneMinusAlpha = (1 - kAlpha); -const double kBeta = 0.25; -const double kOneMinusBeta = (1 - kBeta); -const int64_t kNumMicrosPerMilli = 1000; -} // namespace - -RttStats::RttStats() - : latest_rtt_(TimeDelta::Zero()), - min_rtt_(TimeDelta::Zero()), - smoothed_rtt_(TimeDelta::Zero()), - previous_srtt_(TimeDelta::Zero()), - mean_deviation_(TimeDelta::Zero()), - initial_rtt_us_(kInitialRttMs * kNumMicrosPerMilli) {} - -void RttStats::ExpireSmoothedMetrics() { - mean_deviation_ = - std::max(mean_deviation_, (smoothed_rtt_ - latest_rtt_).Abs()); - smoothed_rtt_ = std::max(smoothed_rtt_, latest_rtt_); -} - -// Updates the RTT based on a new sample. -void RttStats::UpdateRtt(TimeDelta send_delta, - TimeDelta ack_delay, - Timestamp now) { - if (send_delta.IsInfinite() || send_delta <= TimeDelta::Zero()) { - RTC_LOG(LS_WARNING) << "Ignoring measured send_delta, because it's is " - "either infinite, zero, or negative. send_delta = " - << ToString(send_delta); - return; - } - - // Update min_rtt_ first. min_rtt_ does not use an rtt_sample corrected for - // ack_delay but the raw observed send_delta, since poor clock granularity at - // the client may cause a high ack_delay to result in underestimation of the - // min_rtt_. - if (min_rtt_.IsZero() || min_rtt_ > send_delta) { - min_rtt_ = send_delta; - } - - // Correct for ack_delay if information received from the peer results in a - // positive RTT sample. Otherwise, we use the send_delta as a reasonable - // measure for smoothed_rtt. - TimeDelta rtt_sample = send_delta; - previous_srtt_ = smoothed_rtt_; - - if (rtt_sample > ack_delay) { - rtt_sample = rtt_sample - ack_delay; - } - latest_rtt_ = rtt_sample; - // First time call. - if (smoothed_rtt_.IsZero()) { - smoothed_rtt_ = rtt_sample; - mean_deviation_ = rtt_sample / 2; - } else { - mean_deviation_ = kOneMinusBeta * mean_deviation_ + - kBeta * (smoothed_rtt_ - rtt_sample).Abs(); - smoothed_rtt_ = kOneMinusAlpha * smoothed_rtt_ + kAlpha * rtt_sample; - RTC_LOG(LS_VERBOSE) << " smoothed_rtt(us):" << smoothed_rtt_.us() - << " mean_deviation(us):" << mean_deviation_.us(); - } -} - -void RttStats::OnConnectionMigration() { - latest_rtt_ = TimeDelta::Zero(); - min_rtt_ = TimeDelta::Zero(); - smoothed_rtt_ = TimeDelta::Zero(); - mean_deviation_ = TimeDelta::Zero(); - initial_rtt_us_ = kInitialRttMs * kNumMicrosPerMilli; -} - -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/rtt_stats.h b/modules/congestion_controller/bbr/rtt_stats.h deleted file mode 100644 index e8f0a8ba43..0000000000 --- a/modules/congestion_controller/bbr/rtt_stats.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -// A convenience class to store RTT samples and calculate smoothed RTT. -// From the Quic BBR implementation in Chromium. - -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_RTT_STATS_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_RTT_STATS_H_ - -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/logging.h" - -namespace webrtc { -namespace bbr { - -class RttStats { - public: - RttStats(); - - // Updates the RTT from an incoming ack which is received |send_delta| after - // the packet is sent and the peer reports the ack being delayed |ack_delay|. - void UpdateRtt(TimeDelta send_delta, TimeDelta ack_delay, Timestamp now); - - // Causes the smoothed_rtt to be increased to the latest_rtt if the latest_rtt - // is larger. The mean deviation is increased to the most recent deviation if - // it's larger. - void ExpireSmoothedMetrics(); - - // Called when connection migrates and RTT measurement needs to be reset. - void OnConnectionMigration(); - - // Returns the EWMA smoothed RTT for the connection. - // May return Zero if no valid updates have occurred. - TimeDelta smoothed_rtt() const { return smoothed_rtt_; } - - // Returns the EWMA smoothed RTT prior to the most recent RTT sample. - TimeDelta previous_srtt() const { return previous_srtt_; } - - int64_t initial_rtt_us() const { return initial_rtt_us_; } - - // Sets an initial RTT to be used for SmoothedRtt before any RTT updates. - void set_initial_rtt_us(int64_t initial_rtt_us) { - RTC_DCHECK_GE(initial_rtt_us, 0); - if (initial_rtt_us <= 0) { - RTC_LOG(LS_ERROR) << "Attempt to set initial rtt to <= 0."; - return; - } - initial_rtt_us_ = initial_rtt_us; - } - - // The most recent RTT measurement. - // May return Zero if no valid updates have occurred. - TimeDelta latest_rtt() const { return latest_rtt_; } - - // Returns the min_rtt for the entire connection. - // May return Zero if no valid updates have occurred. - TimeDelta min_rtt() const { return min_rtt_; } - - TimeDelta mean_deviation() const { return mean_deviation_; } - - private: - TimeDelta latest_rtt_; - TimeDelta min_rtt_; - TimeDelta smoothed_rtt_; - TimeDelta previous_srtt_; - // Mean RTT deviation during this session. - // Approximation of standard deviation, the error is roughly 1.25 times - // larger than the standard deviation, for a normally distributed signal. - TimeDelta mean_deviation_; - int64_t initial_rtt_us_; - - RTC_DISALLOW_COPY_AND_ASSIGN(RttStats); -}; - -} // namespace bbr -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_RTT_STATS_H_ diff --git a/modules/congestion_controller/bbr/rtt_stats_unittest.cc b/modules/congestion_controller/bbr/rtt_stats_unittest.cc deleted file mode 100644 index 54b142575c..0000000000 --- a/modules/congestion_controller/bbr/rtt_stats_unittest.cc +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/rtt_stats.h" - -#include - -#include -#include - -#include "test/gtest.h" - -namespace webrtc { -namespace bbr { -namespace test { - -class RttStatsTest : public ::testing::Test { - protected: - RttStats rtt_stats_; -}; - -TEST_F(RttStatsTest, DefaultsBeforeUpdate) { - EXPECT_LT(0u, rtt_stats_.initial_rtt_us()); - EXPECT_EQ(TimeDelta::Zero(), rtt_stats_.min_rtt()); - EXPECT_EQ(TimeDelta::Zero(), rtt_stats_.smoothed_rtt()); -} - -TEST_F(RttStatsTest, SmoothedRtt) { - // Verify that ack_delay is corrected for in Smoothed RTT. - rtt_stats_.UpdateRtt(TimeDelta::ms(300), TimeDelta::ms(100), - Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.smoothed_rtt()); - // Verify that effective RTT of zero does not change Smoothed RTT. - rtt_stats_.UpdateRtt(TimeDelta::ms(200), TimeDelta::ms(200), - Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.smoothed_rtt()); - // Verify that large erroneous ack_delay does not change Smoothed RTT. - rtt_stats_.UpdateRtt(TimeDelta::ms(200), TimeDelta::ms(300), - Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.smoothed_rtt()); -} - -// Ensure that the potential rounding artifacts in EWMA calculation do not cause -// the SRTT to drift too far from the exact value. -TEST_F(RttStatsTest, SmoothedRttStability) { - for (int64_t time = 3; time < 20000; time++) { - RttStats stats; - for (int64_t i = 0; i < 100; i++) { - stats.UpdateRtt(TimeDelta::us(time), TimeDelta::ms(0), Timestamp::ms(0)); - int64_t time_delta_us = stats.smoothed_rtt().us() - time; - ASSERT_LE(std::abs(time_delta_us), 1); - } - } -} - -TEST_F(RttStatsTest, PreviousSmoothedRtt) { - // Verify that ack_delay is corrected for in Smoothed RTT. - rtt_stats_.UpdateRtt(TimeDelta::ms(300), TimeDelta::ms(100), - Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.smoothed_rtt()); - EXPECT_EQ(TimeDelta::Zero(), rtt_stats_.previous_srtt()); - // Ensure the previous SRTT is 200ms after a 100ms sample. - rtt_stats_.UpdateRtt(TimeDelta::ms(100), TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(100), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::us(187500).us(), rtt_stats_.smoothed_rtt().us()); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.previous_srtt()); -} - -TEST_F(RttStatsTest, MinRtt) { - rtt_stats_.UpdateRtt(TimeDelta::ms(200), TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.min_rtt()); - rtt_stats_.UpdateRtt(TimeDelta::ms(10), TimeDelta::Zero(), - Timestamp::ms(0) + TimeDelta::ms(10)); - EXPECT_EQ(TimeDelta::ms(10), rtt_stats_.min_rtt()); - rtt_stats_.UpdateRtt(TimeDelta::ms(50), TimeDelta::Zero(), - Timestamp::ms(0) + TimeDelta::ms(20)); - EXPECT_EQ(TimeDelta::ms(10), rtt_stats_.min_rtt()); - rtt_stats_.UpdateRtt(TimeDelta::ms(50), TimeDelta::Zero(), - Timestamp::ms(0) + TimeDelta::ms(30)); - EXPECT_EQ(TimeDelta::ms(10), rtt_stats_.min_rtt()); - rtt_stats_.UpdateRtt(TimeDelta::ms(50), TimeDelta::Zero(), - Timestamp::ms(0) + TimeDelta::ms(40)); - EXPECT_EQ(TimeDelta::ms(10), rtt_stats_.min_rtt()); - // Verify that ack_delay does not go into recording of min_rtt_. - rtt_stats_.UpdateRtt(TimeDelta::ms(7), TimeDelta::ms(2), - Timestamp::ms(0) + TimeDelta::ms(50)); - EXPECT_EQ(TimeDelta::ms(7), rtt_stats_.min_rtt()); -} - -TEST_F(RttStatsTest, ExpireSmoothedMetrics) { - TimeDelta initial_rtt = TimeDelta::ms(10); - rtt_stats_.UpdateRtt(initial_rtt, TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_EQ(initial_rtt, rtt_stats_.min_rtt()); - EXPECT_EQ(initial_rtt, rtt_stats_.smoothed_rtt()); - - EXPECT_EQ(0.5 * initial_rtt, rtt_stats_.mean_deviation()); - - // Update once with a 20ms RTT. - TimeDelta doubled_rtt = 2 * initial_rtt; - rtt_stats_.UpdateRtt(doubled_rtt, TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_EQ(1.125 * initial_rtt, rtt_stats_.smoothed_rtt()); - - // Expire the smoothed metrics, increasing smoothed rtt and mean deviation. - rtt_stats_.ExpireSmoothedMetrics(); - EXPECT_EQ(doubled_rtt, rtt_stats_.smoothed_rtt()); - EXPECT_EQ(0.875 * initial_rtt, rtt_stats_.mean_deviation()); - - // Now go back down to 5ms and expire the smoothed metrics, and ensure the - // mean deviation increases to 15ms. - TimeDelta half_rtt = 0.5 * initial_rtt; - rtt_stats_.UpdateRtt(half_rtt, TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_GT(doubled_rtt, rtt_stats_.smoothed_rtt()); - EXPECT_LT(initial_rtt, rtt_stats_.mean_deviation()); -} - -TEST_F(RttStatsTest, UpdateRttWithBadSendDeltas) { - // Make sure we ignore bad RTTs. - - TimeDelta initial_rtt = TimeDelta::ms(10); - rtt_stats_.UpdateRtt(initial_rtt, TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_EQ(initial_rtt, rtt_stats_.min_rtt()); - EXPECT_EQ(initial_rtt, rtt_stats_.smoothed_rtt()); - - std::vector bad_send_deltas; - bad_send_deltas.push_back(TimeDelta::Zero()); - bad_send_deltas.push_back(TimeDelta::PlusInfinity()); - bad_send_deltas.push_back(TimeDelta::us(-1000)); - - for (TimeDelta bad_send_delta : bad_send_deltas) { - rtt_stats_.UpdateRtt(bad_send_delta, TimeDelta::Zero(), Timestamp::ms(0)); - EXPECT_EQ(initial_rtt, rtt_stats_.min_rtt()); - EXPECT_EQ(initial_rtt, rtt_stats_.smoothed_rtt()); - } -} - -TEST_F(RttStatsTest, ResetAfterConnectionMigrations) { - rtt_stats_.UpdateRtt(TimeDelta::ms(300), TimeDelta::ms(100), - Timestamp::ms(0)); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::ms(200), rtt_stats_.smoothed_rtt()); - EXPECT_EQ(TimeDelta::ms(300), rtt_stats_.min_rtt()); - - // Reset rtt stats on connection migrations. - rtt_stats_.OnConnectionMigration(); - EXPECT_EQ(TimeDelta::Zero(), rtt_stats_.latest_rtt()); - EXPECT_EQ(TimeDelta::Zero(), rtt_stats_.smoothed_rtt()); - EXPECT_EQ(TimeDelta::Zero(), rtt_stats_.min_rtt()); -} - -} // namespace test -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/bbr/windowed_filter.h b/modules/congestion_controller/bbr/windowed_filter.h deleted file mode 100644 index 14185a5306..0000000000 --- a/modules/congestion_controller/bbr/windowed_filter.h +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_CONGESTION_CONTROLLER_BBR_WINDOWED_FILTER_H_ -#define MODULES_CONGESTION_CONTROLLER_BBR_WINDOWED_FILTER_H_ - -// From the Quic BBR implementation in Chromium - -// Implements Kathleen Nichols' algorithm for tracking the minimum (or maximum) -// estimate of a stream of samples over some fixed time interval. (E.g., -// the minimum RTT over the past five minutes.) The algorithm keeps track of -// the best, second best, and third best min (or max) estimates, maintaining an -// invariant that the measurement time of the n'th best >= n-1'th best. - -// The algorithm works as follows. On a reset, all three estimates are set to -// the same sample. The second best estimate is then recorded in the second -// quarter of the window, and a third best estimate is recorded in the second -// half of the window, bounding the worst case error when the true min is -// monotonically increasing (or true max is monotonically decreasing) over the -// window. -// -// A new best sample replaces all three estimates, since the new best is lower -// (or higher) than everything else in the window and it is the most recent. -// The window thus effectively gets reset on every new min. The same property -// holds true for second best and third best estimates. Specifically, when a -// sample arrives that is better than the second best but not better than the -// best, it replaces the second and third best estimates but not the best -// estimate. Similarly, a sample that is better than the third best estimate -// but not the other estimates replaces only the third best estimate. -// -// Finally, when the best expires, it is replaced by the second best, which in -// turn is replaced by the third best. The newest sample replaces the third -// best. - -namespace webrtc { -namespace bbr { - -// Compares two values and returns true if the first is less than or equal -// to the second. -template -struct MinFilter { - bool operator()(const T& lhs, const T& rhs) const { return lhs <= rhs; } -}; - -// Compares two values and returns true if the first is greater than or equal -// to the second. -template -struct MaxFilter { - bool operator()(const T& lhs, const T& rhs) const { return lhs >= rhs; } -}; - -// Use the following to construct a windowed filter object of type T. -// For example, a min filter using Timestamp as the time type: -// WindowedFilter, Timestamp, TimeDelta> -// ObjectName; -// A max filter using 64-bit integers as the time type: -// WindowedFilter, uint64_t, int64_t> ObjectName; -// Specifically, this template takes four arguments: -// 1. T -- type of the measurement that is being filtered. -// 2. Compare -- MinFilter or MaxFilter, depending on the type of filter -// desired. -// 3. TimeT -- the type used to represent timestamps. -// 4. TimeDeltaT -- the type used to represent continuous time intervals between -// two timestamps. Has to be the type of (a - b) if both |a| and |b| are -// of type TimeT. -template -class WindowedFilter { - public: - // |window_length| is the period after which a best estimate expires. - // |zero_value| is used as the uninitialized value for objects of T. - // Importantly, |zero_value| should be an invalid value for a true sample. - WindowedFilter(TimeDeltaT window_length, T zero_value, TimeT zero_time) - : window_length_(window_length), - zero_value_(zero_value), - estimates_{Sample(zero_value_, zero_time), - Sample(zero_value_, zero_time), - Sample(zero_value_, zero_time)} {} - - // Changes the window length. Does not update any current samples. - void SetWindowLength(TimeDeltaT window_length) { - window_length_ = window_length; - } - - // Updates best estimates with |sample|, and expires and updates best - // estimates as necessary. - void Update(T new_sample, TimeT new_time) { - // Reset all estimates if they have not yet been initialized, if new sample - // is a new best, or if the newest recorded estimate is too old. - if (estimates_[0].sample == zero_value_ || - Compare()(new_sample, estimates_[0].sample) || - new_time - estimates_[2].time > window_length_) { - Reset(new_sample, new_time); - return; - } - - if (Compare()(new_sample, estimates_[1].sample)) { - estimates_[1] = Sample(new_sample, new_time); - estimates_[2] = estimates_[1]; - } else if (Compare()(new_sample, estimates_[2].sample)) { - estimates_[2] = Sample(new_sample, new_time); - } - - // Expire and update estimates as necessary. - if (new_time - estimates_[0].time > window_length_) { - // The best estimate hasn't been updated for an entire window, so promote - // second and third best estimates. - estimates_[0] = estimates_[1]; - estimates_[1] = estimates_[2]; - estimates_[2] = Sample(new_sample, new_time); - // Need to iterate one more time. Check if the new best estimate is - // outside the window as well, since it may also have been recorded a - // long time ago. Don't need to iterate once more since we cover that - // case at the beginning of the method. - if (new_time - estimates_[0].time > window_length_) { - estimates_[0] = estimates_[1]; - estimates_[1] = estimates_[2]; - } - return; - } - if (estimates_[1].sample == estimates_[0].sample && - new_time - estimates_[1].time > window_length_ >> 2) { - // A quarter of the window has passed without a better sample, so the - // second-best estimate is taken from the second quarter of the window. - estimates_[2] = estimates_[1] = Sample(new_sample, new_time); - return; - } - - if (estimates_[2].sample == estimates_[1].sample && - new_time - estimates_[2].time > window_length_ >> 1) { - // We've passed a half of the window without a better estimate, so take - // a third-best estimate from the second half of the window. - estimates_[2] = Sample(new_sample, new_time); - } - } - - // Resets all estimates to new sample. - void Reset(T new_sample, TimeT new_time) { - estimates_[0] = estimates_[1] = estimates_[2] = - Sample(new_sample, new_time); - } - - T GetBest() const { return estimates_[0].sample; } - T GetSecondBest() const { return estimates_[1].sample; } - T GetThirdBest() const { return estimates_[2].sample; } - - private: - struct Sample { - T sample; - TimeT time; - Sample(T init_sample, TimeT init_time) - : sample(init_sample), time(init_time) {} - }; - - TimeDeltaT window_length_; // Time length of window. - T zero_value_; // Uninitialized value of T. - Sample estimates_[3]; // Best estimate is element 0. -}; - -} // namespace bbr -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_BBR_WINDOWED_FILTER_H_ diff --git a/modules/congestion_controller/bbr/windowed_filter_unittest.cc b/modules/congestion_controller/bbr/windowed_filter_unittest.cc deleted file mode 100644 index 7ab4588b65..0000000000 --- a/modules/congestion_controller/bbr/windowed_filter_unittest.cc +++ /dev/null @@ -1,368 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/bbr/windowed_filter.h" - -#include - -#include -#include - -#include "api/units/data_rate.h" -#include "api/units/time_delta.h" -#include "rtc_base/logging.h" -#include "test/gtest.h" - -namespace webrtc { -namespace bbr { -namespace test { -class WindowedFilterTest : public ::testing::Test { - public: - // Set the window to 99ms, so 25ms is more than a quarter rtt. - WindowedFilterTest() - : windowed_min_rtt_(99, TimeDelta::Zero(), 0), - windowed_max_bw_(99, DataRate::Zero(), 0) {} - - // Sets up windowed_min_rtt_ to have the following values: - // Best = 20ms, recorded at 25ms - // Second best = 40ms, recorded at 75ms - // Third best = 50ms, recorded at 100ms - void InitializeMinFilter() { - int64_t now_ms = 0; - TimeDelta rtt_sample = TimeDelta::ms(10); - for (int i = 0; i < 5; ++i) { - windowed_min_rtt_.Update(rtt_sample, now_ms); - RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << ToString(rtt_sample) - << " mins: " - " " - << ToString(windowed_min_rtt_.GetBest()) << " " - << ToString(windowed_min_rtt_.GetSecondBest()) << " " - << ToString(windowed_min_rtt_.GetThirdBest()); - now_ms += 25; - rtt_sample = rtt_sample + TimeDelta::ms(10); - } - EXPECT_EQ(TimeDelta::ms(20), windowed_min_rtt_.GetBest()); - EXPECT_EQ(TimeDelta::ms(40), windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(TimeDelta::ms(50), windowed_min_rtt_.GetThirdBest()); - } - - // Sets up windowed_max_bw_ to have the following values: - // Best = 900 bps, recorded at 25ms - // Second best = 700 bps, recorded at 75ms - // Third best = 600 bps, recorded at 100ms - void InitializeMaxFilter() { - int64_t now_ms = 0; - DataRate bw_sample = DataRate::bps(1000); - for (int i = 0; i < 5; ++i) { - windowed_max_bw_.Update(bw_sample, now_ms); - RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << ToString(bw_sample) - << " maxs: " - " " - << ToString(windowed_max_bw_.GetBest()) << " " - << ToString(windowed_max_bw_.GetSecondBest()) << " " - << ToString(windowed_max_bw_.GetThirdBest()); - now_ms += 25; - bw_sample = DataRate::bps(bw_sample.bps() - 100); - } - EXPECT_EQ(DataRate::bps(900), windowed_max_bw_.GetBest()); - EXPECT_EQ(DataRate::bps(700), windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(DataRate::bps(600), windowed_max_bw_.GetThirdBest()); - } - - protected: - WindowedFilter, int64_t, int64_t> - windowed_min_rtt_; - WindowedFilter, int64_t, int64_t> - windowed_max_bw_; -}; - -namespace { -// Test helper function: updates the filter with a lot of small values in order -// to ensure that it is not susceptible to noise. -void UpdateWithIrrelevantSamples( - WindowedFilter, uint64_t, uint64_t>* filter, - uint64_t max_value, - uint64_t time) { - for (uint64_t i = 0; i < 1000; i++) { - filter->Update(i % max_value, time); - } -} -} // namespace - -TEST_F(WindowedFilterTest, UninitializedEstimates) { - EXPECT_EQ(TimeDelta::Zero(), windowed_min_rtt_.GetBest()); - EXPECT_EQ(TimeDelta::Zero(), windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(TimeDelta::Zero(), windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(DataRate::Zero(), windowed_max_bw_.GetBest()); - EXPECT_EQ(DataRate::Zero(), windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(DataRate::Zero(), windowed_max_bw_.GetThirdBest()); -} - -TEST_F(WindowedFilterTest, MonotonicallyIncreasingMin) { - int64_t now_ms = 0; - TimeDelta rtt_sample = TimeDelta::ms(10); - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(TimeDelta::ms(10), windowed_min_rtt_.GetBest()); - - // Gradually increase the rtt samples and ensure the windowed min rtt starts - // rising. - for (int i = 0; i < 6; ++i) { - now_ms += 25; - rtt_sample = rtt_sample + TimeDelta::ms(10); - windowed_min_rtt_.Update(rtt_sample, now_ms); - RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << rtt_sample.ms() - << " mins: " - " " - << windowed_min_rtt_.GetBest().ms() << " " - << windowed_min_rtt_.GetSecondBest().ms() << " " - << windowed_min_rtt_.GetThirdBest().ms(); - if (i < 3) { - EXPECT_EQ(TimeDelta::ms(10), windowed_min_rtt_.GetBest()); - } else if (i == 3) { - EXPECT_EQ(TimeDelta::ms(20), windowed_min_rtt_.GetBest()); - } else if (i < 6) { - EXPECT_EQ(TimeDelta::ms(40), windowed_min_rtt_.GetBest()); - } - } -} - -TEST_F(WindowedFilterTest, MonotonicallyDecreasingMax) { - int64_t now_ms = 0; - DataRate bw_sample = DataRate::bps(1000); - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(DataRate::bps(1000), windowed_max_bw_.GetBest()); - - // Gradually decrease the bw samples and ensure the windowed max bw starts - // decreasing. - for (int i = 0; i < 6; ++i) { - now_ms += 25; - bw_sample = DataRate::bps(bw_sample.bps() - 100); - windowed_max_bw_.Update(bw_sample, now_ms); - RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << bw_sample.bps() - << " maxs: " - " " - << windowed_max_bw_.GetBest().bps() << " " - << windowed_max_bw_.GetSecondBest().bps() << " " - << windowed_max_bw_.GetThirdBest().bps(); - if (i < 3) { - EXPECT_EQ(DataRate::bps(1000), windowed_max_bw_.GetBest()); - } else if (i == 3) { - EXPECT_EQ(DataRate::bps(900), windowed_max_bw_.GetBest()); - } else if (i < 6) { - EXPECT_EQ(DataRate::bps(700), windowed_max_bw_.GetBest()); - } - } -} - -TEST_F(WindowedFilterTest, SampleChangesThirdBestMin) { - InitializeMinFilter(); - // RTT sample lower than the third-choice min-rtt sets that, but nothing else. - TimeDelta rtt_sample = windowed_min_rtt_.GetThirdBest() - TimeDelta::ms(5); - // This assert is necessary to avoid triggering -Wstrict-overflow - // See crbug/616957 - ASSERT_GT(windowed_min_rtt_.GetThirdBest(), TimeDelta::ms(5)); - // Latest sample was recorded at 100ms. - int64_t now_ms = 101; - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(TimeDelta::ms(40), windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(TimeDelta::ms(20), windowed_min_rtt_.GetBest()); -} - -TEST_F(WindowedFilterTest, SampleChangesThirdBestMax) { - InitializeMaxFilter(); - // BW sample higher than the third-choice max sets that, but nothing else. - DataRate bw_sample = - DataRate::bps(windowed_max_bw_.GetThirdBest().bps() + 50); - // Latest sample was recorded at 100ms. - int64_t now_ms = 101; - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetThirdBest()); - EXPECT_EQ(DataRate::bps(700), windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(DataRate::bps(900), windowed_max_bw_.GetBest()); -} - -TEST_F(WindowedFilterTest, SampleChangesSecondBestMin) { - InitializeMinFilter(); - // RTT sample lower than the second-choice min sets that and also - // the third-choice min. - TimeDelta rtt_sample = windowed_min_rtt_.GetSecondBest() - TimeDelta::ms(5); - // This assert is necessary to avoid triggering -Wstrict-overflow - // See crbug/616957 - ASSERT_GT(windowed_min_rtt_.GetSecondBest(), TimeDelta::ms(5)); - // Latest sample was recorded at 100ms. - int64_t now_ms = 101; - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(TimeDelta::ms(20), windowed_min_rtt_.GetBest()); -} - -TEST_F(WindowedFilterTest, SampleChangesSecondBestMax) { - InitializeMaxFilter(); - // BW sample higher than the second-choice max sets that and also - // the third-choice max. - DataRate bw_sample = - DataRate::bps(windowed_max_bw_.GetSecondBest().bps() + 50); - - // Latest sample was recorded at 100ms. - int64_t now_ms = 101; - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetThirdBest()); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(DataRate::bps(900), windowed_max_bw_.GetBest()); -} - -TEST_F(WindowedFilterTest, SampleChangesAllMins) { - InitializeMinFilter(); - // RTT sample lower than the first-choice min-rtt sets that and also - // the second and third-choice mins. - TimeDelta rtt_sample = windowed_min_rtt_.GetBest() - TimeDelta::ms(5); - // This assert is necessary to avoid triggering -Wstrict-overflow - // See crbug/616957 - ASSERT_GT(windowed_min_rtt_.GetBest(), TimeDelta::ms(5)); - // Latest sample was recorded at 100ms. - int64_t now_ms = 101; - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetBest()); -} - -TEST_F(WindowedFilterTest, SampleChangesAllMaxs) { - InitializeMaxFilter(); - // BW sample higher than the first-choice max sets that and also - // the second and third-choice maxs. - DataRate bw_sample = DataRate::bps(windowed_max_bw_.GetBest().bps() + 50); - // Latest sample was recorded at 100ms. - int64_t now_ms = 101; - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetThirdBest()); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetBest()); -} - -TEST_F(WindowedFilterTest, ExpireBestMin) { - InitializeMinFilter(); - TimeDelta old_third_best = windowed_min_rtt_.GetThirdBest(); - TimeDelta old_second_best = windowed_min_rtt_.GetSecondBest(); - TimeDelta rtt_sample = old_third_best + TimeDelta::ms(5); - // Best min sample was recorded at 25ms, so expiry time is 124ms. - int64_t now_ms = 125; - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(old_third_best, windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(old_second_best, windowed_min_rtt_.GetBest()); -} - -TEST_F(WindowedFilterTest, ExpireBestMax) { - InitializeMaxFilter(); - DataRate old_third_best = windowed_max_bw_.GetThirdBest(); - DataRate old_second_best = windowed_max_bw_.GetSecondBest(); - DataRate bw_sample = DataRate::bps(old_third_best.bps() - 50); - // Best max sample was recorded at 25ms, so expiry time is 124ms. - int64_t now_ms = 125; - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetThirdBest()); - EXPECT_EQ(old_third_best, windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(old_second_best, windowed_max_bw_.GetBest()); -} - -TEST_F(WindowedFilterTest, ExpireSecondBestMin) { - InitializeMinFilter(); - TimeDelta old_third_best = windowed_min_rtt_.GetThirdBest(); - TimeDelta rtt_sample = old_third_best + TimeDelta::ms(5); - // Second best min sample was recorded at 75ms, so expiry time is 174ms. - int64_t now_ms = 175; - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(old_third_best, windowed_min_rtt_.GetBest()); -} - -TEST_F(WindowedFilterTest, ExpireSecondBestMax) { - InitializeMaxFilter(); - DataRate old_third_best = windowed_max_bw_.GetThirdBest(); - DataRate bw_sample = DataRate::bps(old_third_best.bps() - 50); - // Second best max sample was recorded at 75ms, so expiry time is 174ms. - int64_t now_ms = 175; - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetThirdBest()); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(old_third_best, windowed_max_bw_.GetBest()); -} - -TEST_F(WindowedFilterTest, ExpireAllMins) { - InitializeMinFilter(); - TimeDelta rtt_sample = windowed_min_rtt_.GetThirdBest() + TimeDelta::ms(5); - // This assert is necessary to avoid triggering -Wstrict-overflow - // See crbug/616957 - ASSERT_LT(windowed_min_rtt_.GetThirdBest(), TimeDelta::PlusInfinity()); - // Third best min sample was recorded at 100ms, so expiry time is 199ms. - int64_t now_ms = 200; - windowed_min_rtt_.Update(rtt_sample, now_ms); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetThirdBest()); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetSecondBest()); - EXPECT_EQ(rtt_sample, windowed_min_rtt_.GetBest()); -} - -TEST_F(WindowedFilterTest, ExpireAllMaxs) { - InitializeMaxFilter(); - DataRate bw_sample = - DataRate::bps(windowed_max_bw_.GetThirdBest().bps() - 50); - // Third best max sample was recorded at 100ms, so expiry time is 199ms. - int64_t now_ms = 200; - windowed_max_bw_.Update(bw_sample, now_ms); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetThirdBest()); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetSecondBest()); - EXPECT_EQ(bw_sample, windowed_max_bw_.GetBest()); -} - -// Test the windowed filter where the time used is an exact counter instead of a -// timestamp. This is useful if, for example, the time is measured in round -// trips. -TEST_F(WindowedFilterTest, ExpireCounterBasedMax) { - // Create a window which starts at t = 0 and expires after two cycles. - WindowedFilter, uint64_t, uint64_t> max_filter( - 2, 0, 0); - - const uint64_t kBest = 50000; - // Insert 50000 at t = 1. - max_filter.Update(50000, 1); - EXPECT_EQ(kBest, max_filter.GetBest()); - UpdateWithIrrelevantSamples(&max_filter, 20, 1); - EXPECT_EQ(kBest, max_filter.GetBest()); - - // Insert 40000 at t = 2. Nothing is expected to expire. - max_filter.Update(40000, 2); - EXPECT_EQ(kBest, max_filter.GetBest()); - UpdateWithIrrelevantSamples(&max_filter, 20, 2); - EXPECT_EQ(kBest, max_filter.GetBest()); - - // Insert 30000 at t = 3. Nothing is expected to expire yet. - max_filter.Update(30000, 3); - EXPECT_EQ(kBest, max_filter.GetBest()); - UpdateWithIrrelevantSamples(&max_filter, 20, 3); - EXPECT_EQ(kBest, max_filter.GetBest()); - RTC_LOG(LS_VERBOSE) << max_filter.GetSecondBest(); - RTC_LOG(LS_VERBOSE) << max_filter.GetThirdBest(); - - // Insert 20000 at t = 4. 50000 at t = 1 expires, so 40000 becomes the new - // maximum. - const uint64_t kNewBest = 40000; - max_filter.Update(20000, 4); - EXPECT_EQ(kNewBest, max_filter.GetBest()); - UpdateWithIrrelevantSamples(&max_filter, 20, 4); - EXPECT_EQ(kNewBest, max_filter.GetBest()); -} - -} // namespace test -} // namespace bbr -} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/BUILD.gn b/modules/congestion_controller/goog_cc/BUILD.gn index 7ec13afc17..d169d37acf 100644 --- a/modules/congestion_controller/goog_cc/BUILD.gn +++ b/modules/congestion_controller/goog_cc/BUILD.gn @@ -31,7 +31,6 @@ rtc_library("goog_cc") { ":probe_controller", ":pushback_controller", "../..:module_api", - "../../..:webrtc_common", "../../../api:network_state_predictor_api", "../../../api/rtc_event_log", "../../../api/transport:field_trial_based_config", @@ -45,12 +44,14 @@ rtc_library("goog_cc") { "../../../logging:rtc_event_pacing", "../../../rtc_base:checks", "../../../rtc_base:logging", - "../../../rtc_base:macromagic", "../../../rtc_base/experiments:alr_experiment", "../../../rtc_base/experiments:field_trial_parser", "../../../rtc_base/experiments:rate_control_settings", "../../../system_wrappers", "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -63,8 +64,8 @@ rtc_library("link_capacity_estimator") { deps = [ "../../../api/units:data_rate", "../../../rtc_base:safe_minmax", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("pushback_controller") { @@ -78,6 +79,9 @@ rtc_library("pushback_controller") { "../../../api/units:data_size", "../../../rtc_base:checks", "../../../rtc_base/experiments:rate_control_settings", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -98,8 +102,8 @@ rtc_library("alr_detector") { "../../../rtc_base/experiments:alr_experiment", "../../../rtc_base/experiments:field_trial_parser", "../../pacing:interval_budget", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("estimators") { configs += [ ":bwe_test_logging" ] @@ -111,8 +115,6 @@ rtc_library("estimators") { "bitrate_estimator.cc", "bitrate_estimator.h", "delay_increase_detector_interface.h", - "median_slope_estimator.cc", - "median_slope_estimator.h", "probe_bitrate_estimator.cc", "probe_bitrate_estimator.h", "robust_throughput_estimator.cc", @@ -137,6 +139,9 @@ rtc_library("estimators") { "../../../rtc_base:safe_minmax", "../../../rtc_base/experiments:field_trial_parser", "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -152,6 +157,7 @@ rtc_library("loss_based_controller") { deps = [ "../../../api/rtc_event_log", "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", "../../../api/units:data_rate", "../../../api/units:time_delta", "../../../api/units:timestamp", @@ -162,6 +168,9 @@ rtc_library("loss_based_controller") { "../../../system_wrappers:field_trial", "../../../system_wrappers:metrics", "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -187,6 +196,9 @@ rtc_library("delay_based_bwe") { "../../../system_wrappers:metrics", "../../pacing", "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -213,6 +225,9 @@ rtc_library("probe_controller") { "../../../rtc_base/experiments:field_trial_parser", "../../../rtc_base/system:unused", "../../../system_wrappers:metrics", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -236,8 +251,8 @@ if (rtc_include_tests) { "../../../rtc_base:checks", "../../../test/logging:log_writer", "../../remote_bitrate_estimator", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("goog_cc_unittests") { testonly = true @@ -250,7 +265,6 @@ if (rtc_include_tests) { "delay_based_bwe_unittest_helper.cc", "delay_based_bwe_unittest_helper.h", "goog_cc_network_control_unittest.cc", - "median_slope_estimator_unittest.cc", "probe_bitrate_estimator_unittest.cc", "probe_controller_unittest.cc", "robust_throughput_estimator_unittest.cc", @@ -279,6 +293,7 @@ if (rtc_include_tests) { "../../../rtc_base:rtc_base_tests_utils", "../../../rtc_base/experiments:alr_experiment", "../../../system_wrappers", + "../../../test:explicit_key_value_config", "../../../test:field_trial", "../../../test:test_support", "../../../test/scenario", diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc index 8abe6d6884..d5b1a13fcc 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc @@ -35,10 +35,10 @@ RobustThroughputEstimatorSettings::RobustThroughputEstimatorSettings( initial_packets = 20; } initial_packets = std::min(initial_packets, min_packets); - if (window_duration < TimeDelta::ms(100) || - TimeDelta::ms(2000) < window_duration) { + if (window_duration < TimeDelta::Millis(100) || + TimeDelta::Millis(2000) < window_duration) { RTC_LOG(LS_WARNING) << "Window duration must be between 100 and 2000 ms"; - window_duration = TimeDelta::ms(500); + window_duration = TimeDelta::Millis(500); } if (unacked_weight < 0.0 || 1.0 < unacked_weight) { RTC_LOG(LS_WARNING) diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h index fb257cf3f4..f802191a2c 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h @@ -47,7 +47,7 @@ struct RobustThroughputEstimatorSettings { // The estimator window keeps at least |min_packets| packets and up to // kMaxPackets received during the last |window_duration|. unsigned min_packets = 20; - TimeDelta window_duration = TimeDelta::ms(500); + TimeDelta window_duration = TimeDelta::Millis(500); // The estimator window requires at least |initial_packets| packets received // over at least |initial_duration|. diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc index 83ead59e18..e5b733b119 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc @@ -35,10 +35,12 @@ constexpr size_t kPayloadSize = 10; class MockBitrateEstimator : public BitrateEstimator { public: using BitrateEstimator::BitrateEstimator; - MOCK_METHOD3(Update, - void(Timestamp at_time, DataSize data_size, bool in_alr)); - MOCK_CONST_METHOD0(bitrate, absl::optional()); - MOCK_METHOD0(ExpectFastRateChange, void()); + MOCK_METHOD(void, + Update, + (Timestamp at_time, DataSize data_size, bool in_alr), + (override)); + MOCK_METHOD(absl::optional, bitrate, (), (const, override)); + MOCK_METHOD(void, ExpectFastRateChange, (), (override)); }; struct AcknowledgedBitrateEstimatorTestStates { @@ -60,18 +62,19 @@ AcknowledgedBitrateEstimatorTestStates CreateTestStates() { std::vector CreateFeedbackVector() { std::vector packet_feedback_vector(2); - packet_feedback_vector[0].receive_time = Timestamp::ms(kFirstArrivalTimeMs); + packet_feedback_vector[0].receive_time = + Timestamp::Millis(kFirstArrivalTimeMs); packet_feedback_vector[0].sent_packet.send_time = - Timestamp::ms(kFirstSendTimeMs); + Timestamp::Millis(kFirstSendTimeMs); packet_feedback_vector[0].sent_packet.sequence_number = kSequenceNumber; - packet_feedback_vector[0].sent_packet.size = DataSize::bytes(kPayloadSize); + packet_feedback_vector[0].sent_packet.size = DataSize::Bytes(kPayloadSize); packet_feedback_vector[1].receive_time = - Timestamp::ms(kFirstArrivalTimeMs + 10); + Timestamp::Millis(kFirstArrivalTimeMs + 10); packet_feedback_vector[1].sent_packet.send_time = - Timestamp::ms(kFirstSendTimeMs + 10); + Timestamp::Millis(kFirstSendTimeMs + 10); packet_feedback_vector[1].sent_packet.sequence_number = kSequenceNumber; packet_feedback_vector[1].sent_packet.size = - DataSize::bytes(kPayloadSize + 10); + DataSize::Bytes(kPayloadSize + 10); return packet_feedback_vector; } @@ -116,14 +119,14 @@ TEST(TestAcknowledgedBitrateEstimator, ExpectFastRateChangeWhenLeftAlr) { .Times(1); } states.acknowledged_bitrate_estimator->SetAlrEndedTime( - Timestamp::ms(kFirstArrivalTimeMs + 1)); + Timestamp::Millis(kFirstArrivalTimeMs + 1)); states.acknowledged_bitrate_estimator->IncomingPacketFeedbackVector( packet_feedback_vector); } TEST(TestAcknowledgedBitrateEstimator, ReturnBitrate) { auto states = CreateTestStates(); - absl::optional return_value = DataRate::kbps(42); + absl::optional return_value = DataRate::KilobitsPerSec(42); EXPECT_CALL(*states.mock_bitrate_estimator, bitrate()) .Times(1) .WillOnce(Return(return_value)); diff --git a/modules/congestion_controller/goog_cc/alr_detector.h b/modules/congestion_controller/goog_cc/alr_detector.h index d83ed760e7..ee3fe92845 100644 --- a/modules/congestion_controller/goog_cc/alr_detector.h +++ b/modules/congestion_controller/goog_cc/alr_detector.h @@ -60,9 +60,6 @@ class AlrDetector { // started or empty result if the sender is currently not application-limited. absl::optional GetApplicationLimitedRegionStartTime() const; - void UpdateBudgetWithElapsedTime(int64_t delta_time_ms); - void UpdateBudgetWithBytesSent(size_t bytes_sent); - private: friend class GoogCcStatePrinter; const AlrDetectorConfig conf_; diff --git a/modules/congestion_controller/goog_cc/bitrate_estimator.cc b/modules/congestion_controller/goog_cc/bitrate_estimator.cc index a68f33ec87..09b214a798 100644 --- a/modules/congestion_controller/goog_cc/bitrate_estimator.cc +++ b/modules/congestion_controller/goog_cc/bitrate_estimator.cc @@ -148,12 +148,12 @@ float BitrateEstimator::UpdateWindow(int64_t now_ms, absl::optional BitrateEstimator::bitrate() const { if (bitrate_estimate_kbps_ < 0.f) return absl::nullopt; - return DataRate::kbps(bitrate_estimate_kbps_); + return DataRate::KilobitsPerSec(bitrate_estimate_kbps_); } absl::optional BitrateEstimator::PeekRate() const { if (current_window_ms_ > 0) - return DataSize::bytes(sum_) / TimeDelta::ms(current_window_ms_); + return DataSize::Bytes(sum_) / TimeDelta::Millis(current_window_ms_); return absl::nullopt; } diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc index 479fefc565..ec642823df 100644 --- a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/strings/match.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/rate_control_settings.h" @@ -24,8 +25,9 @@ namespace webrtc { CongestionWindowPushbackController::CongestionWindowPushbackController( const WebRtcKeyValueConfig* key_value_config) : add_pacing_( - key_value_config->Lookup("WebRTC-AddPacingToCongestionWindowPushback") - .find("Enabled") == 0), + absl::StartsWith(key_value_config->Lookup( + "WebRTC-AddPacingToCongestionWindowPushback"), + "Enabled")), min_pushback_target_bitrate_bps_( RateControlSettings::ParseFromKeyValueConfig(key_value_config) .CongestionWindowMinPushbackTargetBitrateBps()), diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc index 426d9e182f..62dde02323 100644 --- a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc @@ -37,20 +37,20 @@ class CongestionWindowPushbackControllerTest : public ::testing::Test { TEST_F(CongestionWindowPushbackControllerTest, FullCongestionWindow) { cwnd_controller_->UpdateOutstandingData(100000); - cwnd_controller_->SetDataWindow(DataSize::bytes(50000)); + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); uint32_t bitrate_bps = 80000; bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(72000u, bitrate_bps); - cwnd_controller_->SetDataWindow(DataSize::bytes(50000)); + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(static_cast(72000 * 0.9 * 0.9), bitrate_bps); } TEST_F(CongestionWindowPushbackControllerTest, NormalCongestionWindow) { cwnd_controller_->UpdateOutstandingData(199999); - cwnd_controller_->SetDataWindow(DataSize::bytes(200000)); + cwnd_controller_->SetDataWindow(DataSize::Bytes(200000)); uint32_t bitrate_bps = 80000; bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); @@ -59,13 +59,13 @@ TEST_F(CongestionWindowPushbackControllerTest, NormalCongestionWindow) { TEST_F(CongestionWindowPushbackControllerTest, LowBitrate) { cwnd_controller_->UpdateOutstandingData(100000); - cwnd_controller_->SetDataWindow(DataSize::bytes(50000)); + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); uint32_t bitrate_bps = 35000; bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(static_cast(35000 * 0.9), bitrate_bps); - cwnd_controller_->SetDataWindow(DataSize::bytes(20000)); + cwnd_controller_->SetDataWindow(DataSize::Bytes(20000)); bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(30000u, bitrate_bps); } @@ -89,5 +89,17 @@ TEST_F(CongestionWindowPushbackControllerTest, PushbackOnInititialDataWindow) { EXPECT_GT(80000u, bitrate_bps); } +TEST_F(CongestionWindowPushbackControllerTest, PushbackDropFrame) { + test::ScopedFieldTrials trials("WebRTC-CongestionWindow/DropFrame:true/"); + cwnd_controller_.reset( + new CongestionWindowPushbackController(&field_trial_config_)); + cwnd_controller_->UpdateOutstandingData(1e8); // Large number + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); + + uint32_t bitrate_bps = 80000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_GT(80000u, bitrate_bps); +} + } // namespace test } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/modules/congestion_controller/goog_cc/delay_based_bwe.cc index 0a84284572..1c02301284 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe.cc +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/strings/match.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" @@ -28,21 +29,24 @@ namespace webrtc { namespace { -constexpr TimeDelta kStreamTimeOut = TimeDelta::Seconds<2>(); +constexpr TimeDelta kStreamTimeOut = TimeDelta::Seconds(2); constexpr int kTimestampGroupLengthMs = 5; constexpr int kAbsSendTimeFraction = 18; constexpr int kAbsSendTimeInterArrivalUpshift = 8; constexpr int kInterArrivalShift = kAbsSendTimeFraction + kAbsSendTimeInterArrivalUpshift; +constexpr int kTimestampGroupTicks = + (kTimestampGroupLengthMs << kInterArrivalShift) / 1000; constexpr double kTimestampToMs = 1000.0 / static_cast(1 << kInterArrivalShift); + // This ssrc is used to fulfill the current API but will be removed // after the API has been changed. constexpr uint32_t kFixedSsrc = 0; - } // namespace constexpr char BweIgnoreSmallPacketsSettings::kKey[]; +constexpr char BweSeparateAudioPacketsSettings::kKey[]; BweIgnoreSmallPacketsSettings::BweIgnoreSmallPacketsSettings( const WebRtcKeyValueConfig* key_value_config) { @@ -58,6 +62,20 @@ BweIgnoreSmallPacketsSettings::Parser() { "small", &small_threshold); } +BweSeparateAudioPacketsSettings::BweSeparateAudioPacketsSettings( + const WebRtcKeyValueConfig* key_value_config) { + Parser()->Parse( + key_value_config->Lookup(BweSeparateAudioPacketsSettings::kKey)); +} + +std::unique_ptr +BweSeparateAudioPacketsSettings::Parser() { + return StructParametersParser::Create( // + "enabled", &enabled, // + "packet_threshold", &packet_threshold, // + "time_threshold", &time_threshold); +} + DelayBasedBwe::Result::Result() : updated(false), probe(false), @@ -72,8 +90,6 @@ DelayBasedBwe::Result::Result(bool probe, DataRate target_bitrate) recovered_from_overuse(false), backoff_in_alr(false) {} -DelayBasedBwe::Result::~Result() {} - DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, RtcEventLog* event_log, NetworkStatePredictor* network_state_predictor) @@ -81,21 +97,30 @@ DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, key_value_config_(key_value_config), ignore_small_(key_value_config), fraction_large_packets_(0.5), + separate_audio_(key_value_config), + audio_packets_since_last_video_(0), + last_video_packet_recv_time_(Timestamp::MinusInfinity()), network_state_predictor_(network_state_predictor), - inter_arrival_(), - delay_detector_( + video_inter_arrival_(), + video_delay_detector_( new TrendlineEstimator(key_value_config_, network_state_predictor_)), + audio_inter_arrival_(), + audio_delay_detector_( + new TrendlineEstimator(key_value_config_, network_state_predictor_)), + active_delay_detector_(video_delay_detector_.get()), last_seen_packet_(Timestamp::MinusInfinity()), uma_recorded_(false), rate_control_(key_value_config, /*send_side=*/true), prev_bitrate_(DataRate::Zero()), has_once_detected_overuse_(false), prev_state_(BandwidthUsage::kBwNormal), - alr_limited_backoff_enabled_( - key_value_config->Lookup("WebRTC-Bwe-AlrLimitedBackoff") - .find("Enabled") == 0) { - RTC_LOG(LS_INFO) << "Initialized DelayBasedBwe with field trial " + alr_limited_backoff_enabled_(absl::StartsWith( + key_value_config->Lookup("WebRTC-Bwe-AlrLimitedBackoff"), + "Enabled")) { + RTC_LOG(LS_INFO) << "Initialized DelayBasedBwe with small packet filtering " << ignore_small_.Parser()->Encode() + << ", separate audio overuse detection" + << separate_audio_.Parser()->Encode() << " and alr limited backoff " << (alr_limited_backoff_enabled_ ? "enabled" : "disabled"); } @@ -127,15 +152,15 @@ DelayBasedBwe::Result DelayBasedBwe::IncomingPacketFeedbackVector( } bool delayed_feedback = true; bool recovered_from_overuse = false; - BandwidthUsage prev_detector_state = delay_detector_->State(); + BandwidthUsage prev_detector_state = active_delay_detector_->State(); for (const auto& packet_feedback : packet_feedback_vector) { delayed_feedback = false; IncomingPacketFeedback(packet_feedback, msg.feedback_time); if (prev_detector_state == BandwidthUsage::kBwUnderusing && - delay_detector_->State() == BandwidthUsage::kBwNormal) { + active_delay_detector_->State() == BandwidthUsage::kBwNormal) { recovered_from_overuse = true; } - prev_detector_state = delay_detector_->State(); + prev_detector_state = active_delay_detector_->State(); } if (delayed_feedback) { @@ -155,25 +180,18 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, // Reset if the stream has timed out. if (last_seen_packet_.IsInfinite() || at_time - last_seen_packet_ > kStreamTimeOut) { - inter_arrival_.reset( - new InterArrival((kTimestampGroupLengthMs << kInterArrivalShift) / 1000, - kTimestampToMs, true)); - delay_detector_.reset( + video_inter_arrival_.reset( + new InterArrival(kTimestampGroupTicks, kTimestampToMs, true)); + video_delay_detector_.reset( + new TrendlineEstimator(key_value_config_, network_state_predictor_)); + audio_inter_arrival_.reset( + new InterArrival(kTimestampGroupTicks, kTimestampToMs, true)); + audio_delay_detector_.reset( new TrendlineEstimator(key_value_config_, network_state_predictor_)); + active_delay_detector_ = video_delay_detector_.get(); } last_seen_packet_ = at_time; - uint32_t send_time_24bits = - static_cast( - ((static_cast(packet_feedback.sent_packet.send_time.ms()) - << kAbsSendTimeFraction) + - 500) / - 1000) & - 0x00FFFFFF; - // Shift up send time to use the full 32 bits that inter_arrival works with, - // so wrapping works properly. - uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; - // Ignore "small" packets if many/most packets in the call are "large". The // packet size may have a significant effect on the propagation delay, // especially at low bandwidths. Variations in packet size will then show up @@ -190,17 +208,51 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, } } - uint32_t ts_delta = 0; - int64_t t_delta = 0; + // As an alternative to ignoring small packets, we can separate audio and + // video packets for overuse detection. + InterArrival* inter_arrival_for_packet = video_inter_arrival_.get(); + DelayIncreaseDetectorInterface* delay_detector_for_packet = + video_delay_detector_.get(); + if (separate_audio_.enabled) { + if (packet_feedback.sent_packet.audio) { + inter_arrival_for_packet = audio_inter_arrival_.get(); + delay_detector_for_packet = audio_delay_detector_.get(); + audio_packets_since_last_video_++; + if (audio_packets_since_last_video_ > separate_audio_.packet_threshold && + packet_feedback.receive_time - last_video_packet_recv_time_ > + separate_audio_.time_threshold) { + active_delay_detector_ = audio_delay_detector_.get(); + } + } else { + audio_packets_since_last_video_ = 0; + last_video_packet_recv_time_ = + std::max(last_video_packet_recv_time_, packet_feedback.receive_time); + active_delay_detector_ = video_delay_detector_.get(); + } + } + + uint32_t send_time_24bits = + static_cast( + ((static_cast(packet_feedback.sent_packet.send_time.ms()) + << kAbsSendTimeFraction) + + 500) / + 1000) & + 0x00FFFFFF; + // Shift up send time to use the full 32 bits that inter_arrival works with, + // so wrapping works properly. + uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; + + uint32_t timestamp_delta = 0; + int64_t recv_delta_ms = 0; int size_delta = 0; - bool calculated_deltas = inter_arrival_->ComputeDeltas( + bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( timestamp, packet_feedback.receive_time.ms(), at_time.ms(), - packet_size.bytes(), &ts_delta, &t_delta, &size_delta); - double ts_delta_ms = (1000.0 * ts_delta) / (1 << kInterArrivalShift); - delay_detector_->Update(t_delta, ts_delta_ms, - packet_feedback.sent_packet.send_time.ms(), - packet_feedback.receive_time.ms(), - packet_size.bytes(), calculated_deltas); + packet_size.bytes(), ×tamp_delta, &recv_delta_ms, &size_delta); + double send_delta_ms = (1000.0 * timestamp_delta) / (1 << kInterArrivalShift); + delay_detector_for_packet->Update(recv_delta_ms, send_delta_ms, + packet_feedback.sent_packet.send_time.ms(), + packet_feedback.receive_time.ms(), + packet_size.bytes(), calculated_deltas); } DataRate DelayBasedBwe::TriggerOveruse(Timestamp at_time, @@ -219,7 +271,7 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( Result result; // Currently overusing the bandwidth. - if (delay_detector_->State() == BandwidthUsage::kBwOverusing) { + if (active_delay_detector_->State() == BandwidthUsage::kBwOverusing) { if (has_once_detected_overuse_ && in_alr && alr_limited_backoff_enabled_) { if (rate_control_.TimeToReduceFurther(at_time, prev_bitrate_)) { result.updated = @@ -254,7 +306,7 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( result.recovered_from_overuse = recovered_from_overuse; } } - BandwidthUsage detector_state = delay_detector_->State(); + BandwidthUsage detector_state = active_delay_detector_->State(); if ((result.updated && prev_bitrate_ != result.target_bitrate) || detector_state != prev_state_) { DataRate bitrate = result.updated ? result.target_bitrate : prev_bitrate_; @@ -275,7 +327,7 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( bool DelayBasedBwe::UpdateEstimate(Timestamp at_time, absl::optional acked_bitrate, DataRate* target_rate) { - const RateControlInput input(delay_detector_->State(), acked_bitrate); + const RateControlInput input(active_delay_detector_->State(), acked_bitrate); *target_rate = rate_control_.Update(&input, at_time); return rate_control_.ValidEstimate(); } diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.h b/modules/congestion_controller/goog_cc/delay_based_bwe.h index 03845949a4..74650dc822 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -26,7 +26,6 @@ #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" @@ -48,12 +47,26 @@ struct BweIgnoreSmallPacketsSettings { std::unique_ptr Parser(); }; +struct BweSeparateAudioPacketsSettings { + static constexpr char kKey[] = "WebRTC-Bwe-SeparateAudioPackets"; + + BweSeparateAudioPacketsSettings() = default; + explicit BweSeparateAudioPacketsSettings( + const WebRtcKeyValueConfig* key_value_config); + + bool enabled = false; + int packet_threshold = 10; + TimeDelta time_threshold = TimeDelta::Seconds(1); + + std::unique_ptr Parser(); +}; + class DelayBasedBwe { public: struct Result { Result(); Result(bool probe, DataRate target_bitrate); - ~Result(); + ~Result() = default; bool updated; bool probe; DataRate target_bitrate = DataRate::Zero(); @@ -64,6 +77,11 @@ class DelayBasedBwe { explicit DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, RtcEventLog* event_log, NetworkStatePredictor* network_state_predictor); + + DelayBasedBwe() = delete; + DelayBasedBwe(const DelayBasedBwe&) = delete; + DelayBasedBwe& operator=(const DelayBasedBwe&) = delete; + virtual ~DelayBasedBwe(); Result IncomingPacketFeedbackVector( @@ -108,9 +126,20 @@ class DelayBasedBwe { BweIgnoreSmallPacketsSettings ignore_small_; double fraction_large_packets_; + // Alternatively, run two separate overuse detectors for audio and video, + // and fall back to the audio one if we haven't seen a video packet in a + // while. + BweSeparateAudioPacketsSettings separate_audio_; + int64_t audio_packets_since_last_video_; + Timestamp last_video_packet_recv_time_; + NetworkStatePredictor* network_state_predictor_; - std::unique_ptr inter_arrival_; - std::unique_ptr delay_detector_; + std::unique_ptr video_inter_arrival_; + std::unique_ptr video_delay_detector_; + std::unique_ptr audio_inter_arrival_; + std::unique_ptr audio_delay_detector_; + DelayIncreaseDetectorInterface* active_delay_detector_; + Timestamp last_seen_packet_; bool uma_recorded_; AimdRateControl rate_control_; @@ -118,7 +147,6 @@ class DelayBasedBwe { bool has_once_detected_overuse_; BandwidthUsage prev_state_; bool alr_limited_backoff_enabled_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DelayBasedBwe); }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc index 9d8d226c61..7860c3d84d 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc @@ -174,8 +174,8 @@ TEST_F(DelayBasedBweTest, TestLongTimeoutAndWrap) { } TEST_F(DelayBasedBweTest, TestInitialOveruse) { - const DataRate kStartBitrate = DataRate::kbps(300); - const DataRate kInitialCapacity = DataRate::kbps(200); + const DataRate kStartBitrate = DataRate::KilobitsPerSec(300); + const DataRate kInitialCapacity = DataRate::KilobitsPerSec(200); const uint32_t kDummySsrc = 0; // High FPS to ensure that we send a lot of packets in a short time. const int kFps = 90; @@ -222,8 +222,8 @@ class DelayBasedBweTestWithBackoffTimeoutExperiment : public DelayBasedBweTest { // This test subsumes and improves DelayBasedBweTest.TestInitialOveruse above. TEST_F(DelayBasedBweTestWithBackoffTimeoutExperiment, TestInitialOveruse) { - const DataRate kStartBitrate = DataRate::kbps(300); - const DataRate kInitialCapacity = DataRate::kbps(200); + const DataRate kStartBitrate = DataRate::KilobitsPerSec(300); + const DataRate kInitialCapacity = DataRate::KilobitsPerSec(200); const uint32_t kDummySsrc = 0; // High FPS to ensure that we send a lot of packets in a short time. const int kFps = 90; diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc index 92e6f02f52..14bac1e455 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc @@ -53,8 +53,8 @@ int64_t RtpStream::GenerateFrame(int64_t time_now_us, for (size_t i = 0; i < n_packets; ++i) { PacketResult packet; packet.sent_packet.send_time = - Timestamp::us(time_now_us + kSendSideOffsetUs); - packet.sent_packet.size = DataSize::bytes(payload_size); + Timestamp::Micros(time_now_us + kSendSideOffsetUs); + packet.sent_packet.size = DataSize::Bytes(payload_size); packets->push_back(packet); } next_rtp_time_ = time_now_us + (1000000 + fps_ / 2) / fps_; @@ -137,7 +137,7 @@ int64_t StreamGenerator::GenerateFrame(std::vector* packets, prev_arrival_time_us_ = std::max(time_now_us + required_network_time_us, prev_arrival_time_us_ + required_network_time_us); - packet.receive_time = Timestamp::us(prev_arrival_time_us_); + packet.receive_time = Timestamp::Micros(prev_arrival_time_us_); ++i; } it = std::min_element(streams_.begin(), streams_.end(), RtpStream::Compare); @@ -194,16 +194,16 @@ void DelayBasedBweTest::IncomingFeedback(int64_t arrival_time_ms, RTC_CHECK_GE(arrival_time_ms + arrival_time_offset_ms_, 0); PacketResult packet; packet.receive_time = - Timestamp::ms(arrival_time_ms + arrival_time_offset_ms_); - packet.sent_packet.send_time = Timestamp::ms(send_time_ms); - packet.sent_packet.size = DataSize::bytes(payload_size); + Timestamp::Millis(arrival_time_ms + arrival_time_offset_ms_); + packet.sent_packet.send_time = Timestamp::Millis(send_time_ms); + packet.sent_packet.size = DataSize::Bytes(payload_size); packet.sent_packet.pacing_info = pacing_info; if (packet.sent_packet.pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) probe_bitrate_estimator_->HandleProbeAndEstimateBitrate(packet); TransportPacketsFeedback msg; - msg.feedback_time = Timestamp::ms(clock_.TimeInMilliseconds()); + msg.feedback_time = Timestamp::Millis(clock_.TimeInMilliseconds()); msg.packet_feedbacks.push_back(packet); acknowledged_bitrate_estimator_->IncomingPacketFeedbackVector( msg.SortedByReceiveTime()); @@ -239,7 +239,7 @@ bool DelayBasedBweTest::GenerateAndProcessFrame(uint32_t ssrc, clock_.TimeInMicroseconds()); for (auto& packet : packets) { RTC_CHECK_GE(packet.receive_time.ms() + arrival_time_offset_ms_, 0); - packet.receive_time += TimeDelta::ms(arrival_time_offset_ms_); + packet.receive_time += TimeDelta::Millis(arrival_time_offset_ms_); if (packet.sent_packet.pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) @@ -249,7 +249,7 @@ bool DelayBasedBweTest::GenerateAndProcessFrame(uint32_t ssrc, acknowledged_bitrate_estimator_->IncomingPacketFeedbackVector(packets); TransportPacketsFeedback msg; msg.packet_feedbacks = packets; - msg.feedback_time = Timestamp::ms(clock_.TimeInMilliseconds()); + msg.feedback_time = Timestamp::Millis(clock_.TimeInMilliseconds()); DelayBasedBwe::Result result = bitrate_estimator_->IncomingPacketFeedbackVector( diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index 852c9574ad..0a0b1801f2 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/strings/match.h" #include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" @@ -34,14 +35,20 @@ namespace webrtc { namespace { // From RTCPSender video report interval. -constexpr TimeDelta kLossUpdateInterval = TimeDelta::Millis<1000>(); +constexpr TimeDelta kLossUpdateInterval = TimeDelta::Millis(1000); // Pacing-rate relative to our target send rate. // Multiplicative factor that is applied to the target bitrate to calculate // the number of bytes that can be transmitted per interval. // Increasing this factor will result in lower delays in cases of bitrate // overshoots from the encoder. -const float kDefaultPaceMultiplier = 2.5f; +constexpr float kDefaultPaceMultiplier = 2.5f; + +// If the probe result is far below the current throughput estimate +// it's unlikely that the probe is accurate, so we don't want to drop too far. +// However, if we actually are overusing, we want to drop to something slightly +// below the current throughput estimate to drain the network queues. +constexpr double kProbeDropThroughputFraction = 0.85; int64_t GetBpsOrDefault(const absl::optional& rate, int64_t fallback_bps) { @@ -53,11 +60,11 @@ int64_t GetBpsOrDefault(const absl::optional& rate, } bool IsEnabled(const WebRtcKeyValueConfig* config, absl::string_view key) { - return config->Lookup(key).find("Enabled") == 0; + return absl::StartsWith(config->Lookup(key), "Enabled"); } bool IsNotDisabled(const WebRtcKeyValueConfig* config, absl::string_view key) { - return config->Lookup(key).find("Disabled") != 0; + return !absl::StartsWith(config->Lookup(key), "Disabled"); } } // namespace @@ -74,6 +81,9 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, ignore_probes_lower_than_network_estimate_(IsNotDisabled( key_value_config_, "WebRTC-Bwe-IgnoreProbesLowerThanNetworkStateEstimate")), + limit_probes_lower_than_throughput_estimate_( + IsEnabled(key_value_config_, + "WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate")), rate_control_settings_( RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), loss_based_stable_rate_( @@ -86,7 +96,8 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, key_value_config_) : nullptr), bandwidth_estimation_( - std::make_unique(event_log_)), + std::make_unique(key_value_config_, + event_log_)), alr_detector_( std::make_unique(key_value_config_, config.event_log)), probe_bitrate_estimator_(new ProbeBitrateEstimator(config.event_log)), @@ -101,6 +112,7 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, initial_config_(config), last_loss_based_target_rate_(*config.constraints.starting_rate), last_pushback_target_rate_(last_loss_based_target_rate_), + last_stable_target_rate_(last_loss_based_target_rate_), pacing_factor_(config.stream_based_config.pacing_factor.value_or( kDefaultPaceMultiplier)), min_total_allocated_bitrate_( @@ -372,13 +384,13 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportLossReport( } void GoogCcNetworkController::UpdateCongestionWindowSize() { - TimeDelta min_feedback_max_rtt = TimeDelta::ms( + TimeDelta min_feedback_max_rtt = TimeDelta::Millis( *std::min_element(feedback_max_rtts_.begin(), feedback_max_rtts_.end())); - const DataSize kMinCwnd = DataSize::bytes(2 * 1500); + const DataSize kMinCwnd = DataSize::Bytes(2 * 1500); TimeDelta time_window = min_feedback_max_rtt + - TimeDelta::ms( + TimeDelta::Millis( rate_control_settings_.GetCongestionWindowAdditionalTimeMs()); DataSize data_window = last_loss_based_target_rate_ * time_window; @@ -435,7 +447,7 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( feedback_max_rtts_.end(), 0); int64_t mean_rtt_ms = sum_rtt_ms / feedback_max_rtts_.size(); if (delay_based_bwe_) - delay_based_bwe_->OnRttUpdate(TimeDelta::ms(mean_rtt_ms)); + delay_based_bwe_->OnRttUpdate(TimeDelta::Millis(mean_rtt_ms)); } TimeDelta feedback_min_rtt = TimeDelta::PlusInfinity(); @@ -491,7 +503,7 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( network_estimator_->OnTransportPacketsFeedback(report); auto prev_estimate = estimate_; estimate_ = network_estimator_->GetCurrentEstimate(); - // TODO(srte): Make OnTransportPacketsFeedback signal wether the state + // TODO(srte): Make OnTransportPacketsFeedback signal whether the state // changed to avoid the need for this check. if (estimate_ && (!prev_estimate || estimate_->last_feed_time != prev_estimate->last_feed_time)) { @@ -506,6 +518,20 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( *probe_bitrate < estimate_->link_capacity_lower) { probe_bitrate.reset(); } + if (limit_probes_lower_than_throughput_estimate_ && probe_bitrate && + acknowledged_bitrate) { + // Limit the backoff to something slightly below the acknowledged + // bitrate. ("Slightly below" because we want to drain the queues + // if we are actually overusing.) + // The acknowledged bitrate shouldn't normally be higher than the delay + // based estimate, but it could happen e.g. due to packet bursts or + // encoder overshoot. We use std::min to ensure that a probe result + // below the current BWE never causes an increase. + DataRate limit = + std::min(delay_based_bwe_->last_estimate(), + *acknowledged_bitrate * kProbeDropThroughputFraction); + probe_bitrate = std::max(*probe_bitrate, limit); + } NetworkControlUpdate update; bool recovered_from_overuse = false; @@ -600,23 +626,38 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( BWE_TEST_LOGGING_PLOT(1, "Target_bitrate_kbps", at_time.ms(), loss_based_target_rate.kbps()); + double cwnd_reduce_ratio = 0.0; if (congestion_window_pushback_controller_) { int64_t pushback_rate = congestion_window_pushback_controller_->UpdateTargetBitrate( loss_based_target_rate.bps()); pushback_rate = std::max(bandwidth_estimation_->GetMinBitrate(), pushback_rate); - pushback_target_rate = DataRate::bps(pushback_rate); + pushback_target_rate = DataRate::BitsPerSec(pushback_rate); + if (rate_control_settings_.UseCongestionWindowDropFrameOnly()) { + cwnd_reduce_ratio = static_cast(loss_based_target_rate.bps() - + pushback_target_rate.bps()) / + loss_based_target_rate.bps(); + } + } + DataRate stable_target_rate = + bandwidth_estimation_->GetEstimatedLinkCapacity(); + if (loss_based_stable_rate_) { + stable_target_rate = std::min(stable_target_rate, loss_based_target_rate); + } else { + stable_target_rate = std::min(stable_target_rate, pushback_target_rate); } if ((loss_based_target_rate != last_loss_based_target_rate_) || (fraction_loss != last_estimated_fraction_loss_) || (round_trip_time != last_estimated_round_trip_time_) || - (pushback_target_rate != last_pushback_target_rate_)) { + (pushback_target_rate != last_pushback_target_rate_) || + (stable_target_rate != last_stable_target_rate_)) { last_loss_based_target_rate_ = loss_based_target_rate; last_pushback_target_rate_ = pushback_target_rate; last_estimated_fraction_loss_ = fraction_loss; last_estimated_round_trip_time_ = round_trip_time; + last_stable_target_rate_ = stable_target_rate; alr_detector_->SetEstimatedBitrate(loss_based_target_rate.bps()); @@ -624,16 +665,13 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( TargetTransferRate target_rate_msg; target_rate_msg.at_time = at_time; - target_rate_msg.target_rate = pushback_target_rate; - if (loss_based_stable_rate_) { - target_rate_msg.stable_target_rate = - std::min(bandwidth_estimation_->GetEstimatedLinkCapacity(), - loss_based_target_rate); + if (rate_control_settings_.UseCongestionWindowDropFrameOnly()) { + target_rate_msg.target_rate = loss_based_target_rate; + target_rate_msg.cwnd_reduce_ratio = cwnd_reduce_ratio; } else { - target_rate_msg.stable_target_rate = - std::min(bandwidth_estimation_->GetEstimatedLinkCapacity(), - pushback_target_rate); + target_rate_msg.target_rate = pushback_target_rate; } + target_rate_msg.stable_target_rate = stable_target_rate; target_rate_msg.network_estimate.at_time = at_time; target_rate_msg.network_estimate.round_trip_time = round_trip_time; target_rate_msg.network_estimate.loss_rate_ratio = fraction_loss / 255.0f; @@ -663,7 +701,7 @@ PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { std::min(max_padding_rate_, last_pushback_target_rate_); PacerConfig msg; msg.at_time = at_time; - msg.time_window = TimeDelta::seconds(1); + msg.time_window = TimeDelta::Seconds(1); msg.data_window = pacing_rate * msg.time_window; msg.pad_window = padding_rate * msg.time_window; return msg; diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/modules/congestion_controller/goog_cc/goog_cc_network_control.h index ae17b3ab39..6dd70c8969 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -33,7 +33,6 @@ #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" #include "modules/congestion_controller/goog_cc/probe_controller.h" #include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" @@ -48,6 +47,11 @@ class GoogCcNetworkController : public NetworkControllerInterface { public: GoogCcNetworkController(NetworkControllerConfig config, GoogCcConfig goog_cc_config); + + GoogCcNetworkController() = delete; + GoogCcNetworkController(const GoogCcNetworkController&) = delete; + GoogCcNetworkController& operator=(const GoogCcNetworkController&) = delete; + ~GoogCcNetworkController() override; // NetworkControllerInterface @@ -87,6 +91,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { FieldTrialFlag safe_reset_acknowledged_rate_; const bool use_min_allocatable_as_lower_bound_; const bool ignore_probes_lower_than_network_estimate_; + const bool limit_probes_lower_than_throughput_estimate_; const RateControlSettings rate_control_settings_; const bool loss_based_stable_rate_; @@ -122,6 +127,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { DataRate last_loss_based_target_rate_; DataRate last_pushback_target_rate_; + DataRate last_stable_target_rate_; absl::optional last_estimated_fraction_loss_ = 0; TimeDelta last_estimated_round_trip_time_ = TimeDelta::PlusInfinity(); @@ -135,8 +141,6 @@ class GoogCcNetworkController : public NetworkControllerInterface { bool previously_in_alr_ = false; absl::optional current_data_window_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(GoogCcNetworkController); }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc index 09aec436c1..33550d2a51 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc @@ -56,7 +56,7 @@ GoogCcNetworkControllerFactory CreateFeedbackOnlyFactory() { } const uint32_t kInitialBitrateKbps = 60; -const DataRate kInitialBitrate = DataRate::kbps(kInitialBitrateKbps); +const DataRate kInitialBitrate = DataRate::KilobitsPerSec(kInitialBitrateKbps); const float kDefaultPacingRate = 2.5f; CallClient* CreateVideoSendingClient( @@ -78,16 +78,16 @@ void UpdatesTargetRateBasedOnLinkCapacity(std::string test_name = "") { Scenario s("googcc_unit/target_capacity" + test_name, false); CallClientConfig config; config.transport.cc_factory = &factory; - config.transport.rates.min_rate = DataRate::kbps(10); - config.transport.rates.max_rate = DataRate::kbps(1500); - config.transport.rates.start_rate = DataRate::kbps(300); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(1500); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); auto send_net = s.CreateMutableSimulationNode([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(500); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(500); + c->delay = TimeDelta::Millis(100); c->loss_rate = 0.0; }); auto ret_net = s.CreateMutableSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(100); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); StatesPrinter* truth = s.CreatePrinter( "send.truth.txt", TimeDelta::PlusInfinity(), {send_net->ConfigPrinter()}); @@ -95,29 +95,29 @@ void UpdatesTargetRateBasedOnLinkCapacity(std::string test_name = "") { {ret_net->node()}); truth->PrintRow(); - s.RunFor(TimeDelta::seconds(25)); + s.RunFor(TimeDelta::Seconds(25)); truth->PrintRow(); EXPECT_NEAR(client->target_rate().kbps(), 450, 100); send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(800); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(800); + c->delay = TimeDelta::Millis(100); }); truth->PrintRow(); - s.RunFor(TimeDelta::seconds(20)); + s.RunFor(TimeDelta::Seconds(20)); truth->PrintRow(); EXPECT_NEAR(client->target_rate().kbps(), 750, 150); send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(100); - c->delay = TimeDelta::ms(200); + c->bandwidth = DataRate::KilobitsPerSec(100); + c->delay = TimeDelta::Millis(200); }); ret_net->UpdateConfig( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(200); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); truth->PrintRow(); - s.RunFor(TimeDelta::seconds(50)); + s.RunFor(TimeDelta::Seconds(50)); truth->PrintRow(); EXPECT_NEAR(client->target_rate().kbps(), 90, 25); } @@ -126,7 +126,7 @@ void UpdatesTargetRateBasedOnLinkCapacity(std::string test_name = "") { class GoogCcNetworkControllerTest : public ::testing::Test { protected: GoogCcNetworkControllerTest() - : current_time_(Timestamp::ms(123456)), factory_() {} + : current_time_(Timestamp::Millis(123456)), factory_() {} ~GoogCcNetworkControllerTest() override {} void SetUp() override { @@ -155,9 +155,12 @@ class GoogCcNetworkControllerTest : public ::testing::Test { int max_data_rate_kbps = 5 * kInitialBitrateKbps) { NetworkControllerConfig config; config.constraints.at_time = current_time_; - config.constraints.min_data_rate = DataRate::kbps(min_data_rate_kbps); - config.constraints.max_data_rate = DataRate::kbps(max_data_rate_kbps); - config.constraints.starting_rate = DataRate::kbps(starting_bandwidth_kbps); + config.constraints.min_data_rate = + DataRate::KilobitsPerSec(min_data_rate_kbps); + config.constraints.max_data_rate = + DataRate::KilobitsPerSec(max_data_rate_kbps); + config.constraints.starting_rate = + DataRate::KilobitsPerSec(starting_bandwidth_kbps); config.event_log = &event_log_; return config; } @@ -178,10 +181,10 @@ class GoogCcNetworkControllerTest : public ::testing::Test { PacedPacketInfo pacing_info) { PacketResult packet_result; packet_result.sent_packet = SentPacket(); - packet_result.sent_packet.send_time = Timestamp::ms(send_time_ms); - packet_result.sent_packet.size = DataSize::bytes(payload_size); + packet_result.sent_packet.send_time = Timestamp::Millis(send_time_ms); + packet_result.sent_packet.size = DataSize::Bytes(payload_size); packet_result.sent_packet.pacing_info = pacing_info; - packet_result.receive_time = Timestamp::ms(arrival_time_ms); + packet_result.receive_time = Timestamp::Millis(arrival_time_ms); return packet_result; } @@ -199,7 +202,7 @@ class GoogCcNetworkControllerTest : public ::testing::Test { } void AdvanceTimeMilliseconds(int timedelta_ms) { - current_time_ += TimeDelta::ms(timedelta_ms); + current_time_ += TimeDelta::Millis(timedelta_ms); } void OnUpdate(NetworkControlUpdate update) { @@ -263,24 +266,24 @@ TEST_F(GoogCcNetworkControllerTest, CongestionWindowPushbackOnNetworkDelay) { Scenario s("googcc_unit/cwnd_on_delay", false); auto send_net = s.CreateMutableSimulationNode([=](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(1000); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(1000); + c->delay = TimeDelta::Millis(100); }); auto ret_net = s.CreateSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(100); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); CallClientConfig config; config.transport.cc_factory = &factory; // Start high so bandwidth drop has max effect. - config.transport.rates.start_rate = DataRate::kbps(300); - config.transport.rates.max_rate = DataRate::kbps(2000); - config.transport.rates.min_rate = DataRate::kbps(10); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(2000); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); auto* client = CreateVideoSendingClient(&s, std::move(config), {send_net->node()}, {ret_net}); - s.RunFor(TimeDelta::seconds(10)); - send_net->PauseTransmissionUntil(s.Now() + TimeDelta::seconds(10)); - s.RunFor(TimeDelta::seconds(3)); + s.RunFor(TimeDelta::Seconds(10)); + send_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(10)); + s.RunFor(TimeDelta::Seconds(3)); // After 3 seconds without feedback from any sent packets, we expect that the // target rate is reduced to the minimum pushback threshold @@ -289,9 +292,43 @@ TEST_F(GoogCcNetworkControllerTest, CongestionWindowPushbackOnNetworkDelay) { EXPECT_LT(client->target_rate().kbps(), 40); } +// Test congestion window pushback on network delay happens. +TEST_F(GoogCcNetworkControllerTest, + CongestionWindowPushbackDropFrameOnNetworkDelay) { + auto factory = CreateFeedbackOnlyFactory(); + ScopedFieldTrials trial( + "WebRTC-CongestionWindow/QueueSize:800,MinBitrate:30000,DropFrame:true/"); + Scenario s("googcc_unit/cwnd_on_delay", false); + auto send_net = + s.CreateMutableSimulationNode([=](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(1000); + c->delay = TimeDelta::Millis(100); + }); + auto ret_net = s.CreateSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + CallClientConfig config; + config.transport.cc_factory = &factory; + // Start high so bandwidth drop has max effect. + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(2000); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + + auto* client = CreateVideoSendingClient(&s, std::move(config), + {send_net->node()}, {ret_net}); + + s.RunFor(TimeDelta::Seconds(10)); + send_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(10)); + s.RunFor(TimeDelta::Seconds(3)); + + // As the dropframe is set, after 3 seconds without feedback from any sent + // packets, we expect that the target rate is not reduced by congestion + // window. + EXPECT_GT(client->target_rate().kbps(), 300); +} + TEST_F(GoogCcNetworkControllerTest, OnNetworkRouteChanged) { NetworkControlUpdate update; - DataRate new_bitrate = DataRate::bps(200000); + DataRate new_bitrate = DataRate::BitsPerSec(200000); update = controller_->OnNetworkRouteChange(CreateRouteChange(new_bitrate)); EXPECT_EQ(update.target_rate->target_rate, new_bitrate); EXPECT_EQ(update.pacer_config->data_rate(), new_bitrate * kDefaultPacingRate); @@ -299,7 +336,7 @@ TEST_F(GoogCcNetworkControllerTest, OnNetworkRouteChanged) { // If the bitrate is reset to -1, the new starting bitrate will be // the minimum default bitrate. - const DataRate kDefaultMinBitrate = DataRate::kbps(5); + const DataRate kDefaultMinBitrate = DataRate::KilobitsPerSec(5); update = controller_->OnNetworkRouteChange(CreateRouteChange()); EXPECT_EQ(update.target_rate->target_rate, kDefaultMinBitrate); EXPECT_NEAR(update.pacer_config->data_rate().bps(), @@ -350,15 +387,15 @@ TEST_F(GoogCcNetworkControllerTest, Scenario s("googcc_unit/padding_limited", false); auto send_net = s.CreateMutableSimulationNode([=](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(1000); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(1000); + c->delay = TimeDelta::Millis(100); }); auto ret_net = s.CreateSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(100); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); CallClientConfig config; // Start high so bandwidth drop has max effect. - config.transport.rates.start_rate = DataRate::kbps(1000); - config.transport.rates.max_rate = DataRate::kbps(2000); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(1000); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(2000); auto* client = s.CreateClient("send", config); auto* route = s.CreateRoutes(client, {send_net->node()}, @@ -368,13 +405,13 @@ TEST_F(GoogCcNetworkControllerTest, s.CreateVideoStream(route->forward(), video); // Run for a few seconds to allow the controller to stabilize. - s.RunFor(TimeDelta::seconds(10)); + s.RunFor(TimeDelta::Seconds(10)); // Check that padding rate matches target rate. EXPECT_NEAR(client->padding_rate().kbps(), client->target_rate().kbps(), 1); // Check this is also the case when congestion window pushback kicks in. - send_net->PauseTransmissionUntil(s.Now() + TimeDelta::seconds(1)); + send_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(1)); EXPECT_NEAR(client->padding_rate().kbps(), client->target_rate().kbps(), 1); } @@ -383,34 +420,34 @@ TEST_F(GoogCcNetworkControllerTest, LimitsToFloorIfRttIsHighInTrial) { // controller backs off until it reaches the minimum configured bitrate. This // allows the RTT to recover faster than the regular control mechanism would // achieve. - const DataRate kBandwidthFloor = DataRate::kbps(50); + const DataRate kBandwidthFloor = DataRate::KilobitsPerSec(50); ScopedFieldTrials trial("WebRTC-Bwe-MaxRttLimit/limit:2s,floor:" + std::to_string(kBandwidthFloor.kbps()) + "kbps/"); // In the test case, we limit the capacity and add a cross traffic packet // burst that blocks media from being sent. This causes the RTT to quickly // increase above the threshold in the trial. - const DataRate kLinkCapacity = DataRate::kbps(100); - const TimeDelta kBufferBloatDuration = TimeDelta::seconds(10); + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(100); + const TimeDelta kBufferBloatDuration = TimeDelta::Seconds(10); Scenario s("googcc_unit/limit_trial", false); auto send_net = s.CreateSimulationNode([=](NetworkSimulationConfig* c) { c->bandwidth = kLinkCapacity; - c->delay = TimeDelta::ms(100); + c->delay = TimeDelta::Millis(100); }); auto ret_net = s.CreateSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(100); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); CallClientConfig config; config.transport.rates.start_rate = kLinkCapacity; auto* client = CreateVideoSendingClient(&s, config, {send_net}, {ret_net}); // Run for a few seconds to allow the controller to stabilize. - s.RunFor(TimeDelta::seconds(10)); - const DataSize kBloatPacketSize = DataSize::bytes(1000); + s.RunFor(TimeDelta::Seconds(10)); + const DataSize kBloatPacketSize = DataSize::Bytes(1000); const int kBloatPacketCount = static_cast(kBufferBloatDuration * kLinkCapacity / kBloatPacketSize); // This will cause the RTT to be large for a while. s.TriggerPacketBurst({send_net}, kBloatPacketCount, kBloatPacketSize.bytes()); // Wait to allow the high RTT to be detected and acted upon. - s.RunFor(TimeDelta::seconds(6)); + s.RunFor(TimeDelta::Seconds(6)); // By now the target rate should have dropped to the minimum configured rate. EXPECT_NEAR(client->target_rate().kbps(), kBandwidthFloor.kbps(), 5); } @@ -425,14 +462,14 @@ TEST_F(GoogCcNetworkControllerTest, StableEstimateDoesNotVaryInSteadyState) { CallClientConfig config; config.transport.cc_factory = &factory; NetworkSimulationConfig net_conf; - net_conf.bandwidth = DataRate::kbps(500); - net_conf.delay = TimeDelta::ms(100); + net_conf.bandwidth = DataRate::KilobitsPerSec(500); + net_conf.delay = TimeDelta::Millis(100); auto send_net = s.CreateSimulationNode(net_conf); auto ret_net = s.CreateSimulationNode(net_conf); auto* client = CreateVideoSendingClient(&s, config, {send_net}, {ret_net}); // Run for a while to allow the estimate to stabilize. - s.RunFor(TimeDelta::seconds(30)); + s.RunFor(TimeDelta::Seconds(30)); DataRate min_stable_target = DataRate::PlusInfinity(); DataRate max_stable_target = DataRate::MinusInfinity(); DataRate min_target = DataRate::PlusInfinity(); @@ -448,7 +485,7 @@ TEST_F(GoogCcNetworkControllerTest, StableEstimateDoesNotVaryInSteadyState) { max_stable_target = std::max(max_stable_target, stable_target_rate); min_target = std::min(min_target, target_rate); max_target = std::max(max_target, target_rate); - s.RunFor(TimeDelta::seconds(1)); + s.RunFor(TimeDelta::Seconds(1)); } // We should expect drops by at least 15% (default backoff.) EXPECT_LT(min_target / max_target, 0.85); @@ -468,20 +505,20 @@ TEST_F(GoogCcNetworkControllerTest, ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); Scenario s("googcc_unit/high_loss_channel", false); CallClientConfig config; - config.transport.rates.min_rate = DataRate::kbps(10); - config.transport.rates.max_rate = DataRate::kbps(1500); - config.transport.rates.start_rate = DataRate::kbps(300); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(1500); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); auto send_net = s.CreateSimulationNode([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(2000); - c->delay = TimeDelta::ms(200); + c->bandwidth = DataRate::KilobitsPerSec(2000); + c->delay = TimeDelta::Millis(200); c->loss_rate = 0.1; }); auto ret_net = s.CreateSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(200); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); auto* client = CreateVideoSendingClient(&s, config, {send_net}, {ret_net}); - s.RunFor(TimeDelta::seconds(120)); + s.RunFor(TimeDelta::Seconds(120)); // Without LossBasedControl trial, bandwidth drops to ~10 kbps. EXPECT_GT(client->target_rate().kbps(), 100); } @@ -489,8 +526,8 @@ TEST_F(GoogCcNetworkControllerTest, DataRate AverageBitrateAfterCrossInducedLoss(std::string name) { Scenario s(name, false); NetworkSimulationConfig net_conf; - net_conf.bandwidth = DataRate::kbps(1000); - net_conf.delay = TimeDelta::ms(100); + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(100); // Short queue length means that we'll induce loss when sudden TCP traffic // spikes are induced. This corresponds to ca 200 ms for a packet size of 1000 // bytes. Such limited buffers are common on for instance wifi routers. @@ -500,58 +537,65 @@ DataRate AverageBitrateAfterCrossInducedLoss(std::string name) { auto ret_net = {s.CreateSimulationNode(net_conf)}; auto* client = s.CreateClient("send", CallClientConfig()); - auto* route = s.CreateRoutes( - client, send_net, s.CreateClient("return", CallClientConfig()), ret_net); + auto* callee = s.CreateClient("return", CallClientConfig()); + auto* route = s.CreateRoutes(client, send_net, callee, ret_net); // TODO(srte): Make this work with RTX enabled or remove it. auto* video = s.CreateVideoStream(route->forward(), [](VideoStreamConfig* c) { c->stream.use_rtx = false; }); - s.RunFor(TimeDelta::seconds(10)); + s.RunFor(TimeDelta::Seconds(10)); for (int i = 0; i < 4; ++i) { // Sends TCP cross traffic inducing loss. auto* tcp_traffic = s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); - s.RunFor(TimeDelta::seconds(2)); + s.RunFor(TimeDelta::Seconds(2)); // Allow the ccongestion controller to recover. s.net()->StopCrossTraffic(tcp_traffic); - s.RunFor(TimeDelta::seconds(20)); + s.RunFor(TimeDelta::Seconds(20)); } - return DataSize::bytes(video->receive() - ->GetStats() - .rtp_stats.packet_counter.TotalBytes()) / + + // Querying the video stats from within the expected runtime environment + // (i.e. the TQ that belongs to the CallClient, not the Scenario TQ that + // we're currently on). + VideoReceiveStream::Stats video_receive_stats; + auto* video_stream = video->receive(); + callee->SendTask([&video_stream, &video_receive_stats]() { + video_receive_stats = video_stream->GetStats(); + }); + return DataSize::Bytes( + video_receive_stats.rtp_stats.packet_counter.TotalBytes()) / s.TimeSinceStart(); } TEST_F(GoogCcNetworkControllerTest, - NoLossBasedRecoversSlowerAfterCrossInducedLoss) { + LossBasedRecoversFasterAfterCrossInducedLoss) { // This test acts as a reference for the test below, showing that without the // trial, we have worse behavior. - DataRate average_bitrate = + DataRate average_bitrate_without_loss_based = AverageBitrateAfterCrossInducedLoss("googcc_unit/no_cross_loss_based"); - RTC_DCHECK_LE(average_bitrate, DataRate::kbps(650)); -} -TEST_F(GoogCcNetworkControllerTest, - LossBasedRecoversFasterAfterCrossInducedLoss) { // We recover bitrate better when subject to loss spikes from cross traffic // when loss based controller is used. ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); - DataRate average_bitrate = + SetUp(); + DataRate average_bitrate_with_loss_based = AverageBitrateAfterCrossInducedLoss("googcc_unit/cross_loss_based"); - RTC_DCHECK_GE(average_bitrate, DataRate::kbps(750)); + + EXPECT_GE(average_bitrate_with_loss_based, + average_bitrate_without_loss_based * 1.1); } TEST_F(GoogCcNetworkControllerTest, LossBasedEstimatorCapsRateAtModerateLoss) { ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); Scenario s("googcc_unit/moderate_loss_channel", false); CallClientConfig config; - config.transport.rates.min_rate = DataRate::kbps(10); - config.transport.rates.max_rate = DataRate::kbps(5000); - config.transport.rates.start_rate = DataRate::kbps(1000); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(5000); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(1000); NetworkSimulationConfig network; - network.bandwidth = DataRate::kbps(2000); - network.delay = TimeDelta::ms(100); + network.bandwidth = DataRate::KilobitsPerSec(2000); + network.delay = TimeDelta::Millis(100); // 3% loss rate is in the moderate loss rate region at 2000 kbps, limiting the // bitrate increase. network.loss_rate = 0.03; @@ -562,26 +606,27 @@ TEST_F(GoogCcNetworkControllerTest, LossBasedEstimatorCapsRateAtModerateLoss) { {s.CreateSimulationNode(network)}); s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow the controller to stabilize at the lower bitrate. - s.RunFor(TimeDelta::seconds(1)); + s.RunFor(TimeDelta::Seconds(1)); // This increase in capacity would cause the target bitrate to increase to // over 4000 kbps without LossBasedControl. - send_net->UpdateConfig( - [](NetworkSimulationConfig* c) { c->bandwidth = DataRate::kbps(5000); }); - s.RunFor(TimeDelta::seconds(20)); + send_net->UpdateConfig([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(5000); + }); + s.RunFor(TimeDelta::Seconds(20)); // Using LossBasedControl, the bitrate will not increase over 2500 kbps since // we have detected moderate loss. EXPECT_LT(client->target_rate().kbps(), 2500); } TEST_F(GoogCcNetworkControllerTest, MaintainsLowRateInSafeResetTrial) { - const DataRate kLinkCapacity = DataRate::kbps(200); - const DataRate kStartRate = DataRate::kbps(300); + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(200); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); ScopedFieldTrials trial("WebRTC-Bwe-SafeResetOnRouteChange/Enabled/"); Scenario s("googcc_unit/safe_reset_low"); auto* send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { c->bandwidth = kLinkCapacity; - c->delay = TimeDelta::ms(10); + c->delay = TimeDelta::Millis(10); }); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { c->transport.rates.start_rate = kStartRate; @@ -591,24 +636,24 @@ TEST_F(GoogCcNetworkControllerTest, MaintainsLowRateInSafeResetTrial) { {s.CreateSimulationNode(NetworkSimulationConfig())}); s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow the controller to stabilize. - s.RunFor(TimeDelta::ms(500)); + s.RunFor(TimeDelta::Millis(500)); EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 50); s.ChangeRoute(route->forward(), {send_net}); // Allow new settings to propagate. - s.RunFor(TimeDelta::ms(100)); + s.RunFor(TimeDelta::Millis(100)); // Under the trial, the target should be unchanged for low rates. EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 50); } TEST_F(GoogCcNetworkControllerTest, CutsHighRateInSafeResetTrial) { - const DataRate kLinkCapacity = DataRate::kbps(1000); - const DataRate kStartRate = DataRate::kbps(300); + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); ScopedFieldTrials trial("WebRTC-Bwe-SafeResetOnRouteChange/Enabled/"); Scenario s("googcc_unit/safe_reset_high_cut"); auto send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { c->bandwidth = kLinkCapacity; - c->delay = TimeDelta::ms(50); + c->delay = TimeDelta::Millis(50); }); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { c->transport.rates.start_rate = kStartRate; @@ -618,11 +663,11 @@ TEST_F(GoogCcNetworkControllerTest, CutsHighRateInSafeResetTrial) { {s.CreateSimulationNode(NetworkSimulationConfig())}); s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow the controller to stabilize. - s.RunFor(TimeDelta::ms(500)); + s.RunFor(TimeDelta::Millis(500)); EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 300); s.ChangeRoute(route->forward(), {send_net}); // Allow new settings to propagate. - s.RunFor(TimeDelta::ms(50)); + s.RunFor(TimeDelta::Millis(50)); // Under the trial, the target should be reset from high values. EXPECT_NEAR(client->send_bandwidth().kbps(), kStartRate.kbps(), 30); } @@ -631,18 +676,18 @@ TEST_F(GoogCcNetworkControllerTest, DetectsHighRateInSafeResetTrial) { ScopedFieldTrials trial( "WebRTC-Bwe-SafeResetOnRouteChange/Enabled,ack/" "WebRTC-SendSideBwe-WithOverhead/Enabled/"); - const DataRate kInitialLinkCapacity = DataRate::kbps(200); - const DataRate kNewLinkCapacity = DataRate::kbps(800); - const DataRate kStartRate = DataRate::kbps(300); + const DataRate kInitialLinkCapacity = DataRate::KilobitsPerSec(200); + const DataRate kNewLinkCapacity = DataRate::KilobitsPerSec(800); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); Scenario s("googcc_unit/safe_reset_high_detect"); auto* initial_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { c->bandwidth = kInitialLinkCapacity; - c->delay = TimeDelta::ms(50); + c->delay = TimeDelta::Millis(50); }); auto* new_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { c->bandwidth = kNewLinkCapacity; - c->delay = TimeDelta::ms(50); + c->delay = TimeDelta::Millis(50); }); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { c->transport.rates.start_rate = kStartRate; @@ -652,18 +697,18 @@ TEST_F(GoogCcNetworkControllerTest, DetectsHighRateInSafeResetTrial) { {s.CreateSimulationNode(NetworkSimulationConfig())}); s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow the controller to stabilize. - s.RunFor(TimeDelta::ms(1000)); + s.RunFor(TimeDelta::Millis(2000)); EXPECT_NEAR(client->send_bandwidth().kbps(), kInitialLinkCapacity.kbps(), 50); s.ChangeRoute(route->forward(), {new_net}); // Allow new settings to propagate, but not probes to be received. - s.RunFor(TimeDelta::ms(50)); + s.RunFor(TimeDelta::Millis(50)); // Under the field trial, the target rate should be unchanged since it's lower // than the starting rate. EXPECT_NEAR(client->send_bandwidth().kbps(), kInitialLinkCapacity.kbps(), 50); // However, probing should have made us detect the higher rate. // NOTE: This test causes high loss rate, and the loss-based estimator reduces // the bitrate, making the test fail if we wait longer than one second here. - s.RunFor(TimeDelta::ms(1000)); + s.RunFor(TimeDelta::Millis(1000)); EXPECT_GT(client->send_bandwidth().kbps(), kNewLinkCapacity.kbps() - 300); } @@ -675,13 +720,13 @@ TEST_F(GoogCcNetworkControllerTest, "WebRTC-Video-Pacing/factor:1.0/" "WebRTC-AddPacingToCongestionWindowPushback/Enabled/"); - const DataRate kLinkCapacity = DataRate::kbps(1000); - const DataRate kStartRate = DataRate::kbps(1000); + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(1000); Scenario s("googcc_unit/pacing_buffer_buildup"); auto* net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { c->bandwidth = kLinkCapacity; - c->delay = TimeDelta::ms(50); + c->delay = TimeDelta::Millis(50); }); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { c->transport.rates.start_rate = kStartRate; @@ -691,7 +736,7 @@ TEST_F(GoogCcNetworkControllerTest, {s.CreateSimulationNode(NetworkSimulationConfig())}); s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow some time for the buffer to build up. - s.RunFor(TimeDelta::seconds(5)); + s.RunFor(TimeDelta::Seconds(5)); // Without trial, pacer delay reaches ~250 ms. EXPECT_LT(client->GetStats().pacer_delay_ms, 150); @@ -701,31 +746,33 @@ TEST_F(GoogCcNetworkControllerTest, NoBandwidthTogglingInLossControlTrial) { ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); Scenario s("googcc_unit/no_toggling"); auto* send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(2000); + c->bandwidth = DataRate::KilobitsPerSec(2000); c->loss_rate = 0.2; - c->delay = TimeDelta::ms(10); + c->delay = TimeDelta::Millis(10); }); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { - c->transport.rates.start_rate = DataRate::kbps(300); + c->transport.rates.start_rate = DataRate::KilobitsPerSec(300); }); auto* route = s.CreateRoutes( client, {send_net}, s.CreateClient("return", CallClientConfig()), {s.CreateSimulationNode(NetworkSimulationConfig())}); s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow the controller to initialize. - s.RunFor(TimeDelta::ms(250)); + s.RunFor(TimeDelta::Millis(250)); std::queue bandwidth_history; - const TimeDelta step = TimeDelta::ms(50); - for (TimeDelta time = TimeDelta::Zero(); time < TimeDelta::ms(2000); + const TimeDelta step = TimeDelta::Millis(50); + for (TimeDelta time = TimeDelta::Zero(); time < TimeDelta::Millis(2000); time += step) { s.RunFor(step); - const TimeDelta window = TimeDelta::ms(500); + const TimeDelta window = TimeDelta::Millis(500); if (bandwidth_history.size() >= window / step) bandwidth_history.pop(); bandwidth_history.push(client->send_bandwidth()); - EXPECT_LT(CountBandwidthDips(bandwidth_history, DataRate::kbps(100)), 2); + EXPECT_LT( + CountBandwidthDips(bandwidth_history, DataRate::KilobitsPerSec(100)), + 2); } } @@ -733,21 +780,21 @@ TEST_F(GoogCcNetworkControllerTest, NoRttBackoffCollapseWhenVideoStops) { ScopedFieldTrials trial("WebRTC-Bwe-MaxRttLimit/limit:2s/"); Scenario s("googcc_unit/rttbackoff_video_stop"); auto* send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(2000); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(2000); + c->delay = TimeDelta::Millis(100); }); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { - c->transport.rates.start_rate = DataRate::kbps(1000); + c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000); }); auto* route = s.CreateRoutes( client, {send_net}, s.CreateClient("return", CallClientConfig()), {s.CreateSimulationNode(NetworkSimulationConfig())}); auto* video = s.CreateVideoStream(route->forward(), VideoStreamConfig()); // Allow the controller to initialize, then stop video. - s.RunFor(TimeDelta::seconds(1)); + s.RunFor(TimeDelta::Seconds(1)); video->send()->Stop(); - s.RunFor(TimeDelta::seconds(4)); + s.RunFor(TimeDelta::Seconds(4)); EXPECT_GT(client->send_bandwidth().kbps(), 1000); } @@ -759,29 +806,29 @@ TEST_F(GoogCcNetworkControllerTest, NoCrashOnVeryLateFeedback) { {s.CreateSimulationNode(NetworkSimulationConfig())}, s.CreateClient("return", CallClientConfig()), {ret_net->node()}); auto* video = s.CreateVideoStream(route->forward(), VideoStreamConfig()); - s.RunFor(TimeDelta::seconds(5)); + s.RunFor(TimeDelta::Seconds(5)); // Delay feedback by several minutes. This will cause removal of the send time // history for the packets as long as kSendTimeHistoryWindow is configured for // a shorter time span. - ret_net->PauseTransmissionUntil(s.Now() + TimeDelta::seconds(300)); + ret_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(300)); // Stopping video stream while waiting to save test execution time. video->send()->Stop(); - s.RunFor(TimeDelta::seconds(299)); + s.RunFor(TimeDelta::Seconds(299)); // Starting to cause addition of new packet to history, which cause old // packets to be removed. video->send()->Start(); // Runs until the lost packets are received. We expect that this will run // without causing any runtime failures. - s.RunFor(TimeDelta::seconds(2)); + s.RunFor(TimeDelta::Seconds(2)); } TEST_F(GoogCcNetworkControllerTest, IsFairToTCP) { Scenario s("googcc_unit/tcp_fairness"); NetworkSimulationConfig net_conf; - net_conf.bandwidth = DataRate::kbps(1000); - net_conf.delay = TimeDelta::ms(50); + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(50); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { - c->transport.rates.start_rate = DataRate::kbps(1000); + c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000); }); auto send_net = {s.CreateSimulationNode(net_conf)}; auto ret_net = {s.CreateSimulationNode(net_conf)}; @@ -789,7 +836,7 @@ TEST_F(GoogCcNetworkControllerTest, IsFairToTCP) { client, send_net, s.CreateClient("return", CallClientConfig()), ret_net); s.CreateVideoStream(route->forward(), VideoStreamConfig()); s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); - s.RunFor(TimeDelta::seconds(10)); + s.RunFor(TimeDelta::Seconds(10)); // Currently only testing for the upper limit as we in practice back out // quite a lot in this scenario. If this behavior is fixed, we should add a diff --git a/modules/congestion_controller/goog_cc/link_capacity_estimator.cc b/modules/congestion_controller/goog_cc/link_capacity_estimator.cc index e37d8d87b3..9fd537a422 100644 --- a/modules/congestion_controller/goog_cc/link_capacity_estimator.cc +++ b/modules/congestion_controller/goog_cc/link_capacity_estimator.cc @@ -18,14 +18,14 @@ LinkCapacityEstimator::LinkCapacityEstimator() {} DataRate LinkCapacityEstimator::UpperBound() const { if (estimate_kbps_.has_value()) - return DataRate::kbps(estimate_kbps_.value() + - 3 * deviation_estimate_kbps()); + return DataRate::KilobitsPerSec(estimate_kbps_.value() + + 3 * deviation_estimate_kbps()); return DataRate::Infinity(); } DataRate LinkCapacityEstimator::LowerBound() const { if (estimate_kbps_.has_value()) - return DataRate::kbps( + return DataRate::KilobitsPerSec( std::max(0.0, estimate_kbps_.value() - 3 * deviation_estimate_kbps())); return DataRate::Zero(); } @@ -65,7 +65,7 @@ bool LinkCapacityEstimator::has_estimate() const { } DataRate LinkCapacityEstimator::estimate() const { - return DataRate::kbps(*estimate_kbps_); + return DataRate::KilobitsPerSec(*estimate_kbps_); } double LinkCapacityEstimator::deviation_estimate_kbps() const { diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc index c39ae21ef0..1d2aab8521 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc +++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc @@ -77,19 +77,21 @@ LossBasedControlConfig::LossBasedControlConfig() : enabled(field_trial::IsEnabled(kBweLossBasedControl)), min_increase_factor("min_incr", 1.02), max_increase_factor("max_incr", 1.08), - increase_low_rtt("incr_low_rtt", TimeDelta::ms(200)), - increase_high_rtt("incr_high_rtt", TimeDelta::ms(800)), + increase_low_rtt("incr_low_rtt", TimeDelta::Millis(200)), + increase_high_rtt("incr_high_rtt", TimeDelta::Millis(800)), decrease_factor("decr", 0.99), - loss_window("loss_win", TimeDelta::ms(800)), - loss_max_window("loss_max_win", TimeDelta::ms(800)), - acknowledged_rate_max_window("ackrate_max_win", TimeDelta::ms(800)), - increase_offset("incr_offset", DataRate::bps(1000)), - loss_bandwidth_balance_increase("balance_incr", DataRate::kbps(0.5)), - loss_bandwidth_balance_decrease("balance_decr", DataRate::kbps(4)), + loss_window("loss_win", TimeDelta::Millis(800)), + loss_max_window("loss_max_win", TimeDelta::Millis(800)), + acknowledged_rate_max_window("ackrate_max_win", TimeDelta::Millis(800)), + increase_offset("incr_offset", DataRate::BitsPerSec(1000)), + loss_bandwidth_balance_increase("balance_incr", + DataRate::KilobitsPerSec(0.5)), + loss_bandwidth_balance_decrease("balance_decr", + DataRate::KilobitsPerSec(4)), loss_bandwidth_balance_exponent("exponent", 0.5), allow_resets("resets", false), - decrease_interval("decr_intvl", TimeDelta::ms(300)), - loss_report_timeout("timeout", TimeDelta::ms(6000)) { + decrease_interval("decr_intvl", TimeDelta::Millis(300)), + loss_report_timeout("timeout", TimeDelta::Millis(6000)) { std::string trial_string = field_trial::FindFullName(kBweLossBasedControl); ParseFieldTrial( {&min_increase_factor, &max_increase_factor, &increase_low_rtt, @@ -130,7 +132,7 @@ void LossBasedBandwidthEstimation::UpdateLossStatistics( last_loss_ratio_ = static_cast(loss_count) / packet_results.size(); const TimeDelta time_passed = last_loss_packet_report_.IsFinite() ? at_time - last_loss_packet_report_ - : TimeDelta::seconds(1); + : TimeDelta::Seconds(1); last_loss_packet_report_ = at_time; has_decreased_since_last_loss_report_ = false; @@ -151,7 +153,7 @@ void LossBasedBandwidthEstimation::UpdateAcknowledgedBitrate( const TimeDelta time_passed = acknowledged_bitrate_last_update_.IsFinite() ? at_time - acknowledged_bitrate_last_update_ - : TimeDelta::seconds(1); + : TimeDelta::Seconds(1); acknowledged_bitrate_last_update_ = at_time; if (acknowledged_bitrate > acknowledged_bitrate_max_) { acknowledged_bitrate_max_ = acknowledged_bitrate; diff --git a/modules/congestion_controller/goog_cc/median_slope_estimator.cc b/modules/congestion_controller/goog_cc/median_slope_estimator.cc deleted file mode 100644 index 45d2fe3211..0000000000 --- a/modules/congestion_controller/goog_cc/median_slope_estimator.cc +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/goog_cc/median_slope_estimator.h" - -#include - -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -constexpr unsigned int kDeltaCounterMax = 1000; - -MedianSlopeEstimator::MedianSlopeEstimator(size_t window_size, - double threshold_gain) - : window_size_(window_size), - threshold_gain_(threshold_gain), - num_of_deltas_(0), - accumulated_delay_(0), - delay_hist_(), - median_filter_(0.5), - trendline_(0) {} - -MedianSlopeEstimator::~MedianSlopeEstimator() {} - -MedianSlopeEstimator::DelayInfo::DelayInfo(int64_t time, - double delay, - size_t slope_count) - : time(time), delay(delay) { - slopes.reserve(slope_count); -} - -MedianSlopeEstimator::DelayInfo::~DelayInfo() = default; - -void MedianSlopeEstimator::Update(double recv_delta_ms, - double send_delta_ms, - int64_t arrival_time_ms) { - const double delta_ms = recv_delta_ms - send_delta_ms; - ++num_of_deltas_; - if (num_of_deltas_ > kDeltaCounterMax) - num_of_deltas_ = kDeltaCounterMax; - - accumulated_delay_ += delta_ms; - BWE_TEST_LOGGING_PLOT(1, "accumulated_delay_ms", arrival_time_ms, - accumulated_delay_); - - // If the window is full, remove the |window_size_| - 1 slopes that belong to - // the oldest point. - if (delay_hist_.size() == window_size_) { - for (double slope : delay_hist_.front().slopes) { - const bool success = median_filter_.Erase(slope); - RTC_CHECK(success); - } - delay_hist_.pop_front(); - } - // Add |window_size_| - 1 new slopes. - for (auto& old_delay : delay_hist_) { - if (arrival_time_ms - old_delay.time != 0) { - // The C99 standard explicitly states that casts and assignments must - // perform the associated conversions. This means that |slope| will be - // a 64-bit double even if the division is computed using, e.g., 80-bit - // extended precision. I believe this also holds in C++ even though the - // C++11 standard isn't as explicit. Furthermore, there are good reasons - // to believe that compilers couldn't perform optimizations that break - // this assumption even if they wanted to. - double slope = (accumulated_delay_ - old_delay.delay) / - static_cast(arrival_time_ms - old_delay.time); - median_filter_.Insert(slope); - // We want to avoid issues with different rounding mode / precision - // which we might get if we recomputed the slope when we remove it. - old_delay.slopes.push_back(slope); - } - } - delay_hist_.emplace_back(arrival_time_ms, accumulated_delay_, - window_size_ - 1); - // Recompute the median slope. - if (delay_hist_.size() == window_size_) - trendline_ = median_filter_.GetPercentileValue(); - - BWE_TEST_LOGGING_PLOT(1, "trendline_slope", arrival_time_ms, trendline_); -} - -} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/median_slope_estimator.h b/modules/congestion_controller/goog_cc/median_slope_estimator.h deleted file mode 100644 index 3f1e3f5063..0000000000 --- a/modules/congestion_controller/goog_cc/median_slope_estimator.h +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_MEDIAN_SLOPE_ESTIMATOR_H_ -#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_MEDIAN_SLOPE_ESTIMATOR_H_ - -#include -#include - -#include -#include - -#include "rtc_base/constructor_magic.h" -#include "rtc_base/numerics/percentile_filter.h" - -namespace webrtc { - -class MedianSlopeEstimator { - public: - // |window_size| is the number of points required to compute a trend line. - // |threshold_gain| is used to scale the trendline slope for comparison to - // the old threshold. Once the old estimator has been removed (or the - // thresholds been merged into the estimators), we can just set the - // threshold instead of setting a gain. - MedianSlopeEstimator(size_t window_size, double threshold_gain); - ~MedianSlopeEstimator(); - - // Update the estimator with a new sample. The deltas should represent deltas - // between timestamp groups as defined by the InterArrival class. - void Update(double recv_delta_ms, - double send_delta_ms, - int64_t arrival_time_ms); - - // Returns the estimated trend k multiplied by some gain. - // 0 < k < 1 -> the delay increases, queues are filling up - // k == 0 -> the delay does not change - // k < 0 -> the delay decreases, queues are being emptied - double trendline_slope() const { return trendline_ * threshold_gain_; } - - // Returns the number of deltas which the current estimator state is based on. - unsigned int num_of_deltas() const { return num_of_deltas_; } - - private: - struct DelayInfo { - DelayInfo(int64_t time, double delay, size_t slope_count); - ~DelayInfo(); - int64_t time; - double delay; - std::vector slopes; - }; - // Parameters. - const size_t window_size_; - const double threshold_gain_; - // Used by the existing threshold. - unsigned int num_of_deltas_; - // Theil-Sen robust line fitting - double accumulated_delay_; - std::deque delay_hist_; - PercentileFilter median_filter_; - double trendline_; - - RTC_DISALLOW_COPY_AND_ASSIGN(MedianSlopeEstimator); -}; - -} // namespace webrtc - -#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_MEDIAN_SLOPE_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/median_slope_estimator_unittest.cc b/modules/congestion_controller/goog_cc/median_slope_estimator_unittest.cc deleted file mode 100644 index a0df9fd835..0000000000 --- a/modules/congestion_controller/goog_cc/median_slope_estimator_unittest.cc +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/congestion_controller/goog_cc/median_slope_estimator.h" - -#include "rtc_base/random.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { -constexpr size_t kWindowSize = 20; -constexpr double kGain = 1; -constexpr int64_t kAvgTimeBetweenPackets = 10; -constexpr size_t kPacketCount = 2 * kWindowSize + 1; - -void TestEstimator(double slope, double jitter_stddev, double tolerance) { - MedianSlopeEstimator estimator(kWindowSize, kGain); - Random random(0x1234567); - int64_t send_times[kPacketCount]; - int64_t recv_times[kPacketCount]; - int64_t send_start_time = random.Rand(1000000); - int64_t recv_start_time = random.Rand(1000000); - for (size_t i = 0; i < kPacketCount; ++i) { - send_times[i] = send_start_time + i * kAvgTimeBetweenPackets; - double latency = i * kAvgTimeBetweenPackets / (1 - slope); - double jitter = random.Gaussian(0, jitter_stddev); - recv_times[i] = recv_start_time + latency + jitter; - } - for (size_t i = 1; i < kPacketCount; ++i) { - double recv_delta = recv_times[i] - recv_times[i - 1]; - double send_delta = send_times[i] - send_times[i - 1]; - estimator.Update(recv_delta, send_delta, recv_times[i]); - if (i < kWindowSize) - EXPECT_NEAR(estimator.trendline_slope(), 0, 0.001); - else - EXPECT_NEAR(estimator.trendline_slope(), slope, tolerance); - } -} -} // namespace - -TEST(MedianSlopeEstimator, PerfectLineSlopeOneHalf) { - TestEstimator(0.5, 0, 0.001); -} - -TEST(MedianSlopeEstimator, PerfectLineSlopeMinusOne) { - TestEstimator(-1, 0, 0.001); -} - -TEST(MedianSlopeEstimator, PerfectLineSlopeZero) { - TestEstimator(0, 0, 0.001); -} - -TEST(MedianSlopeEstimator, JitteryLineSlopeOneHalf) { - TestEstimator(0.5, kAvgTimeBetweenPackets / 3.0, 0.01); -} - -TEST(MedianSlopeEstimator, JitteryLineSlopeMinusOne) { - TestEstimator(-1, kAvgTimeBetweenPackets / 3.0, 0.05); -} - -TEST(MedianSlopeEstimator, JitteryLineSlopeZero) { - TestEstimator(0, kAvgTimeBetweenPackets / 3.0, 0.02); -} - -} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc index b4a33eb2fd..fdfd531135 100644 --- a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc @@ -46,11 +46,11 @@ constexpr float kTargetUtilizationFraction = 0.95f; // The maximum time period over which the cluster history is retained. // This is also the maximum time period beyond which a probing burst is not // expected to last. -constexpr TimeDelta kMaxClusterHistory = TimeDelta::Seconds<1>(); +constexpr TimeDelta kMaxClusterHistory = TimeDelta::Seconds(1); // The maximum time interval between first and the last probe on a cluster // on the sender side as well as the receive side. -constexpr TimeDelta kMaxProbeInterval = TimeDelta::Seconds<1>(); +constexpr TimeDelta kMaxProbeInterval = TimeDelta::Seconds(1); } // namespace @@ -94,7 +94,7 @@ absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( packet_feedback.sent_packet.pacing_info.probe_cluster_min_probes * kMinReceivedProbesRatio; DataSize min_size = - DataSize::bytes( + DataSize::Bytes( packet_feedback.sent_packet.pacing_info.probe_cluster_min_bytes) * kMinReceivedBytesRatio; if (cluster->num_probes < min_probes || cluster->size_total < min_size) @@ -179,7 +179,7 @@ absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( std::make_unique(cluster_id, res.bps())); } estimated_data_rate_ = res; - return *estimated_data_rate_; + return estimated_data_rate_; } absl::optional diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc index b886add2d0..6b4146d2bf 100644 --- a/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc @@ -35,14 +35,14 @@ class TestProbeBitrateEstimator : public ::testing::Test { int64_t arrival_time_ms, int min_probes = kDefaultMinProbes, int min_bytes = kDefaultMinBytes) { - const Timestamp kReferenceTime = Timestamp::seconds(1000); + const Timestamp kReferenceTime = Timestamp::Seconds(1000); PacketResult feedback; feedback.sent_packet.send_time = - kReferenceTime + TimeDelta::ms(send_time_ms); - feedback.sent_packet.size = DataSize::bytes(size_bytes); + kReferenceTime + TimeDelta::Millis(send_time_ms); + feedback.sent_packet.size = DataSize::Bytes(size_bytes); feedback.sent_packet.pacing_info = PacedPacketInfo(probe_cluster_id, min_probes, min_bytes); - feedback.receive_time = kReferenceTime + TimeDelta::ms(arrival_time_ms); + feedback.receive_time = kReferenceTime + TimeDelta::Millis(arrival_time_ms); measured_data_rate_ = probe_bitrate_estimator_.HandleProbeAndEstimateBitrate(feedback); } diff --git a/modules/congestion_controller/goog_cc/probe_controller.cc b/modules/congestion_controller/goog_cc/probe_controller.cc index 02b5833705..29b472a873 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/modules/congestion_controller/goog_cc/probe_controller.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/match.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -95,11 +96,12 @@ ProbeControllerConfig::ProbeControllerConfig( second_exponential_probe_scale("p2", 6.0), further_exponential_probe_scale("step_size", 2), further_probe_threshold("further_probe_threshold", 0.7), - alr_probing_interval("alr_interval", TimeDelta::seconds(5)), + alr_probing_interval("alr_interval", TimeDelta::Seconds(5)), alr_probe_scale("alr_scale", 2), first_allocation_probe_scale("alloc_p1", 1), second_allocation_probe_scale("alloc_p2", 2), - allocation_allow_further_probing("alloc_probe_further", false) { + allocation_allow_further_probing("alloc_probe_further", false), + allocation_probe_max("alloc_probe_max", DataRate::PlusInfinity()) { ParseFieldTrial( {&first_exponential_probe_scale, &second_exponential_probe_scale, &further_exponential_probe_scale, &further_probe_threshold, @@ -117,7 +119,7 @@ ProbeControllerConfig::ProbeControllerConfig( key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); ParseFieldTrial( {&first_allocation_probe_scale, &second_allocation_probe_scale, - &allocation_allow_further_probing}, + &allocation_allow_further_probing, &allocation_probe_max}, key_value_config->Lookup("WebRTC-Bwe-AllocationProbing")); } @@ -128,12 +130,12 @@ ProbeControllerConfig::~ProbeControllerConfig() = default; ProbeController::ProbeController(const WebRtcKeyValueConfig* key_value_config, RtcEventLog* event_log) : enable_periodic_alr_probing_(false), - in_rapid_recovery_experiment_( - key_value_config->Lookup(kBweRapidRecoveryExperiment) - .find("Enabled") == 0), - limit_probes_with_allocateable_rate_( - key_value_config->Lookup(kCappedProbingFieldTrialName) - .find("Disabled") != 0), + in_rapid_recovery_experiment_(absl::StartsWith( + key_value_config->Lookup(kBweRapidRecoveryExperiment), + "Enabled")), + limit_probes_with_allocateable_rate_(!absl::StartsWith( + key_value_config->Lookup(kCappedProbingFieldTrialName), + "Disabled")), event_log_(event_log), config_(ProbeControllerConfig(key_value_config)) { Reset(0); @@ -208,12 +210,19 @@ std::vector ProbeController::OnMaxTotalAllocatedBitrate( if (!config_.first_allocation_probe_scale) return std::vector(); - std::vector probes = { - static_cast(config_.first_allocation_probe_scale.Value() * - max_total_allocated_bitrate)}; + DataRate first_probe_rate = + DataRate::BitsPerSec(max_total_allocated_bitrate) * + config_.first_allocation_probe_scale.Value(); + DataRate probe_cap = config_.allocation_probe_max.Get(); + first_probe_rate = std::min(first_probe_rate, probe_cap); + std::vector probes = {first_probe_rate.bps()}; if (config_.second_allocation_probe_scale) { - probes.push_back(config_.second_allocation_probe_scale.Value() * - max_total_allocated_bitrate); + DataRate second_probe_rate = + DataRate::BitsPerSec(max_total_allocated_bitrate) * + config_.second_allocation_probe_scale.Value(); + second_probe_rate = std::min(second_probe_rate, probe_cap); + if (second_probe_rate > first_probe_rate) + probes.push_back(second_probe_rate.bps()); } return InitiateProbing(at_time_ms, probes, config_.allocation_allow_further_probing); @@ -417,9 +426,10 @@ std::vector ProbeController::InitiateProbing( } ProbeClusterConfig config; - config.at_time = Timestamp::ms(now_ms); - config.target_data_rate = DataRate::bps(rtc::dchecked_cast(bitrate)); - config.target_duration = TimeDelta::ms(kMinProbeDurationMs); + config.at_time = Timestamp::Millis(now_ms); + config.target_data_rate = + DataRate::BitsPerSec(rtc::dchecked_cast(bitrate)); + config.target_duration = TimeDelta::Millis(kMinProbeDurationMs); config.target_probe_count = kMinProbePacketsSent; config.id = next_probe_cluster_id_; next_probe_cluster_id_++; diff --git a/modules/congestion_controller/goog_cc/probe_controller.h b/modules/congestion_controller/goog_cc/probe_controller.h index f22acff25f..11e92b97ae 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.h +++ b/modules/congestion_controller/goog_cc/probe_controller.h @@ -20,6 +20,7 @@ #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/network_control.h" #include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/system/unused.h" @@ -50,6 +51,7 @@ struct ProbeControllerConfig { FieldTrialOptional first_allocation_probe_scale; FieldTrialOptional second_allocation_probe_scale; FieldTrialFlag allocation_allow_further_probing; + FieldTrialParameter allocation_probe_max; }; // This class controls initiation of probing to estimate initial channel diff --git a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc index f82e7d46ca..e27bf71489 100644 --- a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -55,7 +55,7 @@ class ProbeControllerTest : public ::testing::Test { std::vector SetNetworkAvailable(bool available) { NetworkAvailability msg; - msg.at_time = Timestamp::ms(NowMs()); + msg.at_time = Timestamp::Millis(NowMs()); msg.network_available = available; return probe_controller_->OnNetworkAvailability(msg); } diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc b/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc index 8c77e8ff31..1169e9f6bb 100644 --- a/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc @@ -35,6 +35,9 @@ void RobustThroughputEstimator::IncomingPacketFeedbackVector( for (const auto& packet : packet_feedback_vector) { // Insert the new packet. window_.push_back(packet); + window_.back().sent_packet.prior_unacked_data = + window_.back().sent_packet.prior_unacked_data * + settings_.unacked_weight; // In most cases, receive timestamps should already be in order, but in the // rare case where feedback packets have been reordered, we do some swaps to // ensure that the window is sorted. @@ -56,8 +59,8 @@ absl::optional RobustThroughputEstimator::bitrate() const { if (window_.size() < settings_.initial_packets) return absl::nullopt; - TimeDelta largest_recv_gap(TimeDelta::ms(0)); - TimeDelta second_largest_recv_gap(TimeDelta::ms(0)); + TimeDelta largest_recv_gap(TimeDelta::Millis(0)); + TimeDelta second_largest_recv_gap(TimeDelta::Millis(0)); for (size_t i = 1; i < window_.size(); i++) { // Find receive time gaps TimeDelta gap = window_[i].receive_time - window_[i - 1].receive_time; @@ -73,15 +76,14 @@ absl::optional RobustThroughputEstimator::bitrate() const { Timestamp max_send_time = window_[0].sent_packet.send_time; Timestamp min_recv_time = window_[0].receive_time; Timestamp max_recv_time = window_[0].receive_time; - DataSize data_size = DataSize::bytes(0); + DataSize data_size = DataSize::Bytes(0); for (const auto& packet : window_) { min_send_time = std::min(min_send_time, packet.sent_packet.send_time); max_send_time = std::max(max_send_time, packet.sent_packet.send_time); min_recv_time = std::min(min_recv_time, packet.receive_time); max_recv_time = std::max(max_recv_time, packet.receive_time); data_size += packet.sent_packet.size; - data_size += - packet.sent_packet.prior_unacked_data * settings_.unacked_weight; + data_size += packet.sent_packet.prior_unacked_data; } // Suppose a packet of size S is sent every T milliseconds. @@ -125,8 +127,8 @@ absl::optional RobustThroughputEstimator::bitrate() const { recv_duration += recv_duration / (window_.size() - 2); } - send_duration = std::max(send_duration, TimeDelta::ms(1)); - recv_duration = std::max(recv_duration, TimeDelta::ms(1)); + send_duration = std::max(send_duration, TimeDelta::Millis(1)); + recv_duration = std::max(recv_duration, TimeDelta::Millis(1)); return std::min(send_size / send_duration, recv_size / recv_duration); } diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc b/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc index 83773422c1..d2e01d362c 100644 --- a/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc @@ -45,11 +45,11 @@ TEST(RobustThroughputEstimatorTest, SteadyRate) { FieldTrialBasedConfig field_trial_config; RobustThroughputEstimatorSettings settings(&field_trial_config); RobustThroughputEstimator throughput_estimator(settings); - DataSize packet_size(DataSize::bytes(1000)); - Timestamp send_clock(Timestamp::ms(100000)); - Timestamp recv_clock(Timestamp::ms(10000)); - TimeDelta send_increment(TimeDelta::ms(10)); - TimeDelta recv_increment(TimeDelta::ms(10)); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(10)); + TimeDelta recv_increment(TimeDelta::Millis(10)); uint16_t sequence_number = 100; std::vector packet_feedback = CreateFeedbackVector(9, packet_size, send_increment, recv_increment, @@ -75,11 +75,11 @@ TEST(RobustThroughputEstimatorTest, DelaySpike) { FieldTrialBasedConfig field_trial_config; RobustThroughputEstimatorSettings settings(&field_trial_config); RobustThroughputEstimator throughput_estimator(settings); - DataSize packet_size(DataSize::bytes(1000)); - Timestamp send_clock(Timestamp::ms(100000)); - Timestamp recv_clock(Timestamp::ms(10000)); - TimeDelta send_increment(TimeDelta::ms(10)); - TimeDelta recv_increment(TimeDelta::ms(10)); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(10)); + TimeDelta recv_increment(TimeDelta::Millis(10)); uint16_t sequence_number = 100; std::vector packet_feedback = CreateFeedbackVector(20, packet_size, send_increment, recv_increment, @@ -91,10 +91,10 @@ TEST(RobustThroughputEstimatorTest, DelaySpike) { 0.05 * 100 * 1000.0); // Allow 5% error // Delay spike - recv_clock += TimeDelta::ms(40); + recv_clock += TimeDelta::Millis(40); // Faster delivery after the gap - recv_increment = TimeDelta::ms(2); + recv_increment = TimeDelta::Millis(2); packet_feedback = CreateFeedbackVector(5, packet_size, send_increment, recv_increment, &send_clock, &recv_clock, &sequence_number); @@ -105,7 +105,7 @@ TEST(RobustThroughputEstimatorTest, DelaySpike) { 0.05 * 100 * 1000.0); // Allow 5% error // Delivery at normal rate. This will be capped by the send rate. - recv_increment = TimeDelta::ms(10); + recv_increment = TimeDelta::Millis(10); packet_feedback = CreateFeedbackVector(5, packet_size, send_increment, recv_increment, &send_clock, &recv_clock, &sequence_number); @@ -124,11 +124,11 @@ TEST(RobustThroughputEstimatorTest, CappedByReceiveRate) { FieldTrialBasedConfig field_trial_config; RobustThroughputEstimatorSettings settings(&field_trial_config); RobustThroughputEstimator throughput_estimator(settings); - DataSize packet_size(DataSize::bytes(1000)); - Timestamp send_clock(Timestamp::ms(100000)); - Timestamp recv_clock(Timestamp::ms(10000)); - TimeDelta send_increment(TimeDelta::ms(10)); - TimeDelta recv_increment(TimeDelta::ms(40)); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(10)); + TimeDelta recv_increment(TimeDelta::Millis(40)); uint16_t sequence_number = 100; std::vector packet_feedback = CreateFeedbackVector(20, packet_size, send_increment, recv_increment, @@ -148,11 +148,11 @@ TEST(RobustThroughputEstimatorTest, CappedBySendRate) { FieldTrialBasedConfig field_trial_config; RobustThroughputEstimatorSettings settings(&field_trial_config); RobustThroughputEstimator throughput_estimator(settings); - DataSize packet_size(DataSize::bytes(1000)); - Timestamp send_clock(Timestamp::ms(100000)); - Timestamp recv_clock(Timestamp::ms(10000)); - TimeDelta send_increment(TimeDelta::ms(20)); - TimeDelta recv_increment(TimeDelta::ms(10)); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(20)); + TimeDelta recv_increment(TimeDelta::Millis(10)); uint16_t sequence_number = 100; std::vector packet_feedback = CreateFeedbackVector(20, packet_size, send_increment, recv_increment, diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index c16c83eabb..4ca75bf263 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -16,8 +16,11 @@ #include #include +#include "absl/strings/match.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "rtc_base/checks.h" @@ -27,16 +30,16 @@ namespace webrtc { namespace { -constexpr TimeDelta kBweIncreaseInterval = TimeDelta::Millis<1000>(); -constexpr TimeDelta kBweDecreaseInterval = TimeDelta::Millis<300>(); -constexpr TimeDelta kStartPhase = TimeDelta::Millis<2000>(); -constexpr TimeDelta kBweConverganceTime = TimeDelta::Millis<20000>(); +constexpr TimeDelta kBweIncreaseInterval = TimeDelta::Millis(1000); +constexpr TimeDelta kBweDecreaseInterval = TimeDelta::Millis(300); +constexpr TimeDelta kStartPhase = TimeDelta::Millis(2000); +constexpr TimeDelta kBweConverganceTime = TimeDelta::Millis(20000); constexpr int kLimitNumPackets = 20; -constexpr DataRate kDefaultMaxBitrate = DataRate::BitsPerSec<1000000000>(); -constexpr TimeDelta kLowBitrateLogPeriod = TimeDelta::Millis<10000>(); -constexpr TimeDelta kRtcEventLogPeriod = TimeDelta::Millis<5000>(); +constexpr DataRate kDefaultMaxBitrate = DataRate::BitsPerSec(1000000000); +constexpr TimeDelta kLowBitrateLogPeriod = TimeDelta::Millis(10000); +constexpr TimeDelta kRtcEventLogPeriod = TimeDelta::Millis(5000); // Expecting that RTCP feedback is sent uniformly within [0.5, 1.5]s intervals. -constexpr TimeDelta kMaxRtcpFeedbackInterval = TimeDelta::Millis<5000>(); +constexpr TimeDelta kMaxRtcpFeedbackInterval = TimeDelta::Millis(5000); constexpr float kDefaultLowLossThreshold = 0.02f; constexpr float kDefaultHighLossThreshold = 0.1f; @@ -60,7 +63,7 @@ bool BweLossExperimentIsEnabled() { std::string experiment_string = webrtc::field_trial::FindFullName(kBweLosExperiment); // The experiment is enabled iff the field trial string begins with "Enabled". - return experiment_string.find("Enabled") == 0; + return absl::StartsWith(experiment_string, "Enabled"); } bool ReadBweLossExperimentParameters(float* low_loss_threshold, @@ -103,7 +106,7 @@ bool ReadBweLossExperimentParameters(float* low_loss_threshold, } // namespace LinkCapacityTracker::LinkCapacityTracker() - : tracking_rate("rate", TimeDelta::seconds(10)) { + : tracking_rate("rate", TimeDelta::Seconds(10)) { ParseFieldTrial({&tracking_rate}, field_trial::FindFullName("WebRTC-Bwe-LinkCapacity")); } @@ -149,22 +152,27 @@ void LinkCapacityTracker::OnRttBackoff(DataRate backoff_rate, } DataRate LinkCapacityTracker::estimate() const { - return DataRate::bps(capacity_estimate_bps_); + return DataRate::BitsPerSec(capacity_estimate_bps_); } -RttBasedBackoff::RttBasedBackoff() - : rtt_limit_("limit", TimeDelta::seconds(3)), +RttBasedBackoff::RttBasedBackoff(const WebRtcKeyValueConfig* key_value_config) + : disabled_("Disabled"), + configured_limit_("limit", TimeDelta::Seconds(3)), drop_fraction_("fraction", 0.8), - drop_interval_("interval", TimeDelta::seconds(1)), - bandwidth_floor_("floor", DataRate::kbps(5)), + drop_interval_("interval", TimeDelta::Seconds(1)), + bandwidth_floor_("floor", DataRate::KilobitsPerSec(5)), + rtt_limit_(TimeDelta::PlusInfinity()), // By initializing this to plus infinity, we make sure that we never // trigger rtt backoff unless packet feedback is enabled. last_propagation_rtt_update_(Timestamp::PlusInfinity()), last_propagation_rtt_(TimeDelta::Zero()), last_packet_sent_(Timestamp::MinusInfinity()) { - ParseFieldTrial( - {&rtt_limit_, &drop_fraction_, &drop_interval_, &bandwidth_floor_}, - field_trial::FindFullName("WebRTC-Bwe-MaxRttLimit")); + ParseFieldTrial({&disabled_, &configured_limit_, &drop_fraction_, + &drop_interval_, &bandwidth_floor_}, + key_value_config->Lookup("WebRTC-Bwe-MaxRttLimit")); + if (!disabled_) { + rtt_limit_ = configured_limit_.Get(); + } } void RttBasedBackoff::UpdatePropagationRtt(Timestamp at_time, @@ -185,13 +193,16 @@ TimeDelta RttBasedBackoff::CorrectedRtt(Timestamp at_time) const { RttBasedBackoff::~RttBasedBackoff() = default; -SendSideBandwidthEstimation::SendSideBandwidthEstimation(RtcEventLog* event_log) - : lost_packets_since_last_loss_update_(0), +SendSideBandwidthEstimation::SendSideBandwidthEstimation( + const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log) + : rtt_backoff_(key_value_config), + lost_packets_since_last_loss_update_(0), expected_packets_since_last_loss_update_(0), current_target_(DataRate::Zero()), last_logged_target_(DataRate::Zero()), min_bitrate_configured_( - DataRate::bps(congestion_controller::GetMinBitrateBps())), + DataRate::BitsPerSec(congestion_controller::GetMinBitrateBps())), max_bitrate_configured_(kDefaultMaxBitrate), last_low_bitrate_log_(Timestamp::MinusInfinity()), has_decreased_since_last_fraction_loss_(false), @@ -223,7 +234,7 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation(RtcEventLog* event_log) RTC_LOG(LS_INFO) << "Enabled BweLossExperiment with parameters " << low_loss_threshold_ << ", " << high_loss_threshold_ << ", " << bitrate_threshold_kbps; - bitrate_threshold_ = DataRate::kbps(bitrate_threshold_kbps); + bitrate_threshold_ = DataRate::KilobitsPerSec(bitrate_threshold_kbps); } } } @@ -235,7 +246,7 @@ void SendSideBandwidthEstimation::OnRouteChange() { expected_packets_since_last_loss_update_ = 0; current_target_ = DataRate::Zero(); min_bitrate_configured_ = - DataRate::bps(congestion_controller::GetMinBitrateBps()); + DataRate::BitsPerSec(congestion_controller::GetMinBitrateBps()); max_bitrate_configured_ = kDefaultMaxBitrate; last_low_bitrate_log_ = Timestamp::MinusInfinity(); has_decreased_since_last_fraction_loss_ = false; @@ -373,7 +384,8 @@ void SendSideBandwidthEstimation::UpdatePacketsLost(int packets_lost, void SendSideBandwidthEstimation::UpdateUmaStatsPacketsLost(Timestamp at_time, int packets_lost) { - DataRate bitrate_kbps = DataRate::kbps((current_target_.bps() + 500) / 1000); + DataRate bitrate_kbps = + DataRate::KilobitsPerSec((current_target_.bps() + 500) / 1000); for (size_t i = 0; i < kNumUmaRampupMetrics; ++i) { if (!rampup_uma_stats_updated_[i] && bitrate_kbps.kbps() >= kUmaRampupMetrics[i].bitrate_kbps) { @@ -490,13 +502,13 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { // If instead one would do: current_bitrate_ *= 1.08^(delta time), // it would take over one second since the lower packet loss to achieve // 108kbps. - DataRate new_bitrate = - DataRate::bps(min_bitrate_history_.front().second.bps() * 1.08 + 0.5); + DataRate new_bitrate = DataRate::BitsPerSec( + min_bitrate_history_.front().second.bps() * 1.08 + 0.5); // Add 1 kbps extra, just to make sure that we do not get stuck // (gives a little extra increase at low rates, negligible at higher // rates). - new_bitrate += DataRate::bps(1000); + new_bitrate += DataRate::BitsPerSec(1000); UpdateTargetBitrate(new_bitrate, at_time); return; } else if (current_target_ > bitrate_threshold_) { @@ -513,10 +525,10 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { // Reduce rate: // newRate = rate * (1 - 0.5*lossRate); // where packetLoss = 256*lossRate; - DataRate new_bitrate = - DataRate::bps((current_target_.bps() * - static_cast(512 - last_fraction_loss_)) / - 512.0); + DataRate new_bitrate = DataRate::BitsPerSec( + (current_target_.bps() * + static_cast(512 - last_fraction_loss_)) / + 512.0); has_decreased_since_last_fraction_loss_ = true; UpdateTargetBitrate(new_bitrate, at_time); return; @@ -549,7 +561,7 @@ void SendSideBandwidthEstimation::UpdateMinHistory(Timestamp at_time) { // Since history precision is in ms, add one so it is able to increase // bitrate if it is off by as little as 0.5ms. while (!min_bitrate_history_.empty() && - at_time - min_bitrate_history_.front().first + TimeDelta::ms(1) > + at_time - min_bitrate_history_.front().first + TimeDelta::Millis(1) > kBweIncreaseInterval) { min_bitrate_history_.pop_front(); } @@ -572,7 +584,7 @@ DataRate SendSideBandwidthEstimation::MaybeRampupOrBackoff(DataRate new_bitrate, at_time - last_loss_packet_report_; if (time_since_loss_packet_report < 1.2 * kMaxRtcpFeedbackInterval) { new_bitrate = min_bitrate_history_.front().second * 1.08; - new_bitrate += DataRate::bps(1000); + new_bitrate += DataRate::BitsPerSec(1000); } return new_bitrate; } diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index 241ec8c841..a13800b7f6 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -21,6 +21,7 @@ #include "absl/types/optional.h" #include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -54,17 +55,19 @@ class LinkCapacityTracker { class RttBasedBackoff { public: - RttBasedBackoff(); + explicit RttBasedBackoff(const WebRtcKeyValueConfig* key_value_config); ~RttBasedBackoff(); void UpdatePropagationRtt(Timestamp at_time, TimeDelta propagation_rtt); TimeDelta CorrectedRtt(Timestamp at_time) const; - FieldTrialParameter rtt_limit_; + FieldTrialFlag disabled_; + FieldTrialParameter configured_limit_; FieldTrialParameter drop_fraction_; FieldTrialParameter drop_interval_; FieldTrialParameter bandwidth_floor_; public: + TimeDelta rtt_limit_; Timestamp last_propagation_rtt_update_; TimeDelta last_propagation_rtt_; Timestamp last_packet_sent_; @@ -73,7 +76,8 @@ class RttBasedBackoff { class SendSideBandwidthEstimation { public: SendSideBandwidthEstimation() = delete; - explicit SendSideBandwidthEstimation(RtcEventLog* event_log); + SendSideBandwidthEstimation(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log); ~SendSideBandwidthEstimation(); void OnRouteChange(); diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc index 710c71f9cd..85ce401098 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc @@ -13,6 +13,7 @@ #include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "logging/rtc_event_log/mock/mock_rtc_event_log.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" @@ -36,38 +37,41 @@ MATCHER(LossBasedBweUpdateWithBitrateAndLossFraction, "") { void TestProbing(bool use_delay_based) { ::testing::NiceMock event_log; - SendSideBandwidthEstimation bwe(&event_log); + test::ExplicitKeyValueConfig key_value_config(""); + SendSideBandwidthEstimation bwe(&key_value_config, &event_log); int64_t now_ms = 0; - bwe.SetMinMaxBitrate(DataRate::bps(100000), DataRate::bps(1500000)); - bwe.SetSendBitrate(DataRate::bps(200000), Timestamp::ms(now_ms)); + bwe.SetMinMaxBitrate(DataRate::BitsPerSec(100000), + DataRate::BitsPerSec(1500000)); + bwe.SetSendBitrate(DataRate::BitsPerSec(200000), Timestamp::Millis(now_ms)); const int kRembBps = 1000000; const int kSecondRembBps = kRembBps + 500000; bwe.UpdatePacketsLost(/*packets_lost=*/0, /*number_of_packets=*/1, - Timestamp::ms(now_ms)); - bwe.UpdateRtt(TimeDelta::ms(50), Timestamp::ms(now_ms)); + Timestamp::Millis(now_ms)); + bwe.UpdateRtt(TimeDelta::Millis(50), Timestamp::Millis(now_ms)); // Initial REMB applies immediately. if (use_delay_based) { - bwe.UpdateDelayBasedEstimate(Timestamp::ms(now_ms), - DataRate::bps(kRembBps)); + bwe.UpdateDelayBasedEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kRembBps)); } else { - bwe.UpdateReceiverEstimate(Timestamp::ms(now_ms), DataRate::bps(kRembBps)); + bwe.UpdateReceiverEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kRembBps)); } - bwe.UpdateEstimate(Timestamp::ms(now_ms)); + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); EXPECT_EQ(kRembBps, bwe.target_rate().bps()); // Second REMB doesn't apply immediately. now_ms += 2001; if (use_delay_based) { - bwe.UpdateDelayBasedEstimate(Timestamp::ms(now_ms), - DataRate::bps(kSecondRembBps)); + bwe.UpdateDelayBasedEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kSecondRembBps)); } else { - bwe.UpdateReceiverEstimate(Timestamp::ms(now_ms), - DataRate::bps(kSecondRembBps)); + bwe.UpdateReceiverEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kSecondRembBps)); } - bwe.UpdateEstimate(Timestamp::ms(now_ms)); + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); EXPECT_EQ(kRembBps, bwe.target_rate().bps()); } @@ -86,12 +90,15 @@ TEST(SendSideBweTest, DoesntReapplyBitrateDecreaseWithoutFollowingRemb) { EXPECT_CALL(event_log, LogProxy(LossBasedBweUpdateWithBitrateAndLossFraction())) .Times(1); - SendSideBandwidthEstimation bwe(&event_log); + test::ExplicitKeyValueConfig key_value_config(""); + SendSideBandwidthEstimation bwe(&key_value_config, &event_log); static const int kMinBitrateBps = 100000; static const int kInitialBitrateBps = 1000000; int64_t now_ms = 1000; - bwe.SetMinMaxBitrate(DataRate::bps(kMinBitrateBps), DataRate::bps(1500000)); - bwe.SetSendBitrate(DataRate::bps(kInitialBitrateBps), Timestamp::ms(now_ms)); + bwe.SetMinMaxBitrate(DataRate::BitsPerSec(kMinBitrateBps), + DataRate::BitsPerSec(1500000)); + bwe.SetSendBitrate(DataRate::BitsPerSec(kInitialBitrateBps), + Timestamp::Millis(now_ms)); static const uint8_t kFractionLoss = 128; static const int64_t kRttMs = 50; @@ -103,12 +110,12 @@ TEST(SendSideBweTest, DoesntReapplyBitrateDecreaseWithoutFollowingRemb) { // Signal heavy loss to go down in bitrate. bwe.UpdatePacketsLost(/*packets_lost=*/50, /*number_of_packets=*/100, - Timestamp::ms(now_ms)); - bwe.UpdateRtt(TimeDelta::ms(kRttMs), Timestamp::ms(now_ms)); + Timestamp::Millis(now_ms)); + bwe.UpdateRtt(TimeDelta::Millis(kRttMs), Timestamp::Millis(now_ms)); // Trigger an update 2 seconds later to not be rate limited. now_ms += 1000; - bwe.UpdateEstimate(Timestamp::ms(now_ms)); + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); EXPECT_LT(bwe.target_rate().bps(), kInitialBitrateBps); // Verify that the obtained bitrate isn't hitting the min bitrate, or this // test doesn't make sense. If this ever happens, update the thresholds or @@ -124,7 +131,7 @@ TEST(SendSideBweTest, DoesntReapplyBitrateDecreaseWithoutFollowingRemb) { // Trigger an update 2 seconds later to not be rate limited (but it still // shouldn't update). now_ms += 1000; - bwe.UpdateEstimate(Timestamp::ms(now_ms)); + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); EXPECT_EQ(last_bitrate_bps, bwe.target_rate().bps()); // The old loss rate should still be applied though. @@ -134,7 +141,8 @@ TEST(SendSideBweTest, DoesntReapplyBitrateDecreaseWithoutFollowingRemb) { TEST(SendSideBweTest, SettingSendBitrateOverridesDelayBasedEstimate) { ::testing::NiceMock event_log; - SendSideBandwidthEstimation bwe(&event_log); + test::ExplicitKeyValueConfig key_value_config(""); + SendSideBandwidthEstimation bwe(&key_value_config, &event_log); static const int kMinBitrateBps = 10000; static const int kMaxBitrateBps = 10000000; static const int kInitialBitrateBps = 300000; @@ -143,18 +151,33 @@ TEST(SendSideBweTest, SettingSendBitrateOverridesDelayBasedEstimate) { int64_t now_ms = 0; - bwe.SetMinMaxBitrate(DataRate::bps(kMinBitrateBps), - DataRate::bps(kMaxBitrateBps)); - bwe.SetSendBitrate(DataRate::bps(kInitialBitrateBps), Timestamp::ms(now_ms)); + bwe.SetMinMaxBitrate(DataRate::BitsPerSec(kMinBitrateBps), + DataRate::BitsPerSec(kMaxBitrateBps)); + bwe.SetSendBitrate(DataRate::BitsPerSec(kInitialBitrateBps), + Timestamp::Millis(now_ms)); - bwe.UpdateDelayBasedEstimate(Timestamp::ms(now_ms), - DataRate::bps(kDelayBasedBitrateBps)); - bwe.UpdateEstimate(Timestamp::ms(now_ms)); + bwe.UpdateDelayBasedEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kDelayBasedBitrateBps)); + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); EXPECT_GE(bwe.target_rate().bps(), kInitialBitrateBps); EXPECT_LE(bwe.target_rate().bps(), kDelayBasedBitrateBps); - bwe.SetSendBitrate(DataRate::bps(kForcedHighBitrate), Timestamp::ms(now_ms)); + bwe.SetSendBitrate(DataRate::BitsPerSec(kForcedHighBitrate), + Timestamp::Millis(now_ms)); EXPECT_EQ(bwe.target_rate().bps(), kForcedHighBitrate); } +TEST(RttBasedBackoff, DefaultEnabled) { + test::ExplicitKeyValueConfig key_value_config(""); + RttBasedBackoff rtt_backoff(&key_value_config); + EXPECT_TRUE(rtt_backoff.rtt_limit_.IsFinite()); +} + +TEST(RttBasedBackoff, CanBeDisabled) { + test::ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-MaxRttLimit/Disabled/"); + RttBasedBackoff rtt_backoff(&key_value_config); + EXPECT_TRUE(rtt_backoff.rtt_limit_.IsPlusInfinity()); +} + } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc b/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc index a0b3f37006..52baab06c7 100644 --- a/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc +++ b/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc @@ -70,7 +70,7 @@ GoogCcStatePrinter::GoogCcStatePrinter() { std::deque GoogCcStatePrinter::CreateLoggers() { auto stable_estimate = [this] { - return DataRate::kbps( + return DataRate::KilobitsPerSec( controller_->delay_based_bwe_->rate_control_.link_capacity_ .estimate_kbps_.value_or(-INFINITY)); }; @@ -80,7 +80,7 @@ std::deque GoogCcStatePrinter::CreateLoggers() { }; auto trend = [this] { return reinterpret_cast( - controller_->delay_based_bwe_->delay_detector_.get()); + controller_->delay_based_bwe_->active_delay_detector_); }; auto acknowledged_rate = [this] { return controller_->acknowledged_bitrate_estimator_->bitrate(); @@ -93,6 +93,7 @@ std::deque GoogCcStatePrinter::CreateLoggers() { Log("time", [=] { return target_.at_time; }), Log("rtt", [=] { return target_.network_estimate.round_trip_time; }), Log("target", [=] { return target_.target_rate; }), + Log("stable_target", [=] { return target_.stable_target_rate; }), Log("pacing", [=] { return pacing_.data_rate(); }), Log("padding", [=] { return pacing_.pad_rate(); }), Log("window", [=] { return congestion_window_; }), diff --git a/modules/congestion_controller/goog_cc/trendline_estimator.cc b/modules/congestion_controller/goog_cc/trendline_estimator.cc index 6675a3b0e9..c04db7351d 100644 --- a/modules/congestion_controller/goog_cc/trendline_estimator.cc +++ b/modules/congestion_controller/goog_cc/trendline_estimator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/match.h" #include "absl/types/optional.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" @@ -115,8 +116,9 @@ constexpr char TrendlineEstimatorSettings::kKey[]; TrendlineEstimatorSettings::TrendlineEstimatorSettings( const WebRtcKeyValueConfig* key_value_config) { - if (key_value_config->Lookup(kBweWindowSizeInPacketsExperiment) - .find("Enabled") == 0) { + if (absl::StartsWith( + key_value_config->Lookup(kBweWindowSizeInPacketsExperiment), + "Enabled")) { window_size = ReadTrendlineFilterWindowSize(key_value_config); } Parser()->Parse(key_value_config->Lookup(TrendlineEstimatorSettings::kKey)); diff --git a/modules/congestion_controller/include/receive_side_congestion_controller.h b/modules/congestion_controller/include/receive_side_congestion_controller.h index 4f13b4d549..034f2e9517 100644 --- a/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -18,8 +18,7 @@ #include "api/transport/network_control.h" #include "modules/include/module.h" #include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { class RemoteBitrateEstimator; @@ -66,6 +65,11 @@ class ReceiveSideCongestionController : public CallStatsObserver, public: WrappingBitrateEstimator(RemoteBitrateObserver* observer, Clock* clock); + WrappingBitrateEstimator() = delete; + WrappingBitrateEstimator(const WrappingBitrateEstimator&) = delete; + WrappingBitrateEstimator& operator=(const WrappingBitrateEstimator&) = + delete; + ~WrappingBitrateEstimator() override; void IncomingPacket(int64_t arrival_time_ms, @@ -87,17 +91,15 @@ class ReceiveSideCongestionController : public CallStatsObserver, private: void PickEstimatorFromHeader(const RTPHeader& header) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); - void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); RemoteBitrateObserver* observer_; Clock* const clock_; - rtc::CriticalSection crit_sect_; + mutable Mutex mutex_; std::unique_ptr rbe_; bool using_absolute_send_time_; uint32_t packets_since_absolute_send_time_; int min_bitrate_bps_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WrappingBitrateEstimator); }; const FieldTrialBasedConfig field_trial_config_; diff --git a/modules/congestion_controller/pcc/BUILD.gn b/modules/congestion_controller/pcc/BUILD.gn index d0111725d2..2f378769e7 100644 --- a/modules/congestion_controller/pcc/BUILD.gn +++ b/modules/congestion_controller/pcc/BUILD.gn @@ -37,8 +37,8 @@ rtc_library("pcc_controller") { "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("monitor_interval") { @@ -94,8 +94,8 @@ rtc_library("bitrate_controller") { "../../../api/transport:network_control", "../../../api/units:data_rate", "../../../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { diff --git a/modules/congestion_controller/pcc/bitrate_controller.cc b/modules/congestion_controller/pcc/bitrate_controller.cc index 44f306d20d..16b8e6966f 100644 --- a/modules/congestion_controller/pcc/bitrate_controller.cc +++ b/modules/congestion_controller/pcc/bitrate_controller.cc @@ -131,7 +131,7 @@ DataRate PccBitrateController::ComputeRateUpdateForOnlineLearningMode( double rate_change_bps = gradient * ComputeStepSize(gradient); // delta_r rate_change_bps = ApplyDynamicBoundary(rate_change_bps, bandwith_estimate.bps()); - return DataRate::bps( + return DataRate::BitsPerSec( std::max(0.0, bandwith_estimate.bps() + rate_change_bps)); } diff --git a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc index 7188ea33f4..957d99b1de 100644 --- a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc +++ b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc @@ -32,13 +32,13 @@ constexpr double kThroughputPower = 0.99; constexpr double kDelayGradientThreshold = 0.01; constexpr double kDelayGradientNegativeBound = 10; -const DataRate kTargetSendingRate = DataRate::kbps(300); +const DataRate kTargetSendingRate = DataRate::KilobitsPerSec(300); const double kEpsilon = 0.05; -const Timestamp kStartTime = Timestamp::us(0); -const TimeDelta kPacketsDelta = TimeDelta::ms(1); -const TimeDelta kIntervalDuration = TimeDelta::ms(1000); -const TimeDelta kDefaultRtt = TimeDelta::ms(1000); -const DataSize kDefaultDataSize = DataSize::bytes(100); +const Timestamp kStartTime = Timestamp::Micros(0); +const TimeDelta kPacketsDelta = TimeDelta::Millis(1); +const TimeDelta kIntervalDuration = TimeDelta::Millis(1000); +const TimeDelta kDefaultRtt = TimeDelta::Millis(1000); +const DataSize kDefaultDataSize = DataSize::Bytes(100); std::vector CreatePacketResults( const std::vector& packets_send_times, @@ -67,8 +67,10 @@ std::vector CreatePacketResults( class MockUtilityFunction : public PccUtilityFunctionInterface { public: - MOCK_CONST_METHOD1(Compute, - double(const PccMonitorInterval& monitor_interval)); + MOCK_METHOD(double, + Compute, + (const PccMonitorInterval& monitor_interval), + (const, override)); }; } // namespace diff --git a/modules/congestion_controller/pcc/monitor_interval_unittest.cc b/modules/congestion_controller/pcc/monitor_interval_unittest.cc index 65bd8611d6..aaff57bd2a 100644 --- a/modules/congestion_controller/pcc/monitor_interval_unittest.cc +++ b/modules/congestion_controller/pcc/monitor_interval_unittest.cc @@ -18,12 +18,12 @@ namespace webrtc { namespace pcc { namespace test { namespace { -const DataRate kTargetSendingRate = DataRate::kbps(300); -const Timestamp kStartTime = Timestamp::us(0); -const TimeDelta kPacketsDelta = TimeDelta::ms(1); -const TimeDelta kIntervalDuration = TimeDelta::ms(100); -const TimeDelta kDefaultDelay = TimeDelta::ms(100); -const DataSize kDefaultPacketSize = DataSize::bytes(100); +const DataRate kTargetSendingRate = DataRate::KilobitsPerSec(300); +const Timestamp kStartTime = Timestamp::Micros(0); +const TimeDelta kPacketsDelta = TimeDelta::Millis(1); +const TimeDelta kIntervalDuration = TimeDelta::Millis(100); +const TimeDelta kDefaultDelay = TimeDelta::Millis(100); +const DataSize kDefaultPacketSize = DataSize::Bytes(100); constexpr double kDelayGradientThreshold = 0.01; std::vector CreatePacketResults( diff --git a/modules/congestion_controller/pcc/pcc_network_controller.cc b/modules/congestion_controller/pcc/pcc_network_controller.cc index 9f074afa62..8653470955 100644 --- a/modules/congestion_controller/pcc/pcc_network_controller.cc +++ b/modules/congestion_controller/pcc/pcc_network_controller.cc @@ -29,12 +29,11 @@ constexpr double kSlowStartModeIncrease = 1.5; constexpr double kAlphaForPacketInterval = 0.9; constexpr int64_t kMinPacketsNumberPerInterval = 20; -const TimeDelta kMinDurationOfMonitorInterval = TimeDelta::Millis<50>(); -const TimeDelta kStartupDuration = TimeDelta::Millis<500>(); +const TimeDelta kMinDurationOfMonitorInterval = TimeDelta::Millis(50); +const TimeDelta kStartupDuration = TimeDelta::Millis(500); constexpr double kMinRateChangeBps = 4000; -constexpr DataRate kMinRateHaveMultiplicativeRateChange = - DataRate::BitsPerSec(kMinRateChangeBps / - kDefaultSamplingStep)>(); +constexpr DataRate kMinRateHaveMultiplicativeRateChange = DataRate::BitsPerSec( + static_cast(kMinRateChangeBps / kDefaultSamplingStep)); // Bitrate controller constants. constexpr double kInitialConversionFactor = 5; @@ -57,10 +56,11 @@ PccNetworkController::PccNetworkController(NetworkControllerConfig config) last_sent_packet_time_(Timestamp::PlusInfinity()), smoothed_packets_sending_interval_(TimeDelta::Zero()), mode_(Mode::kStartup), - default_bandwidth_(DataRate::kbps(kInitialBandwidthKbps)), + default_bandwidth_(DataRate::KilobitsPerSec(kInitialBandwidthKbps)), bandwidth_estimate_(default_bandwidth_), - rtt_tracker_(TimeDelta::ms(kInitialRttMs), kAlphaForRtt), - monitor_interval_timeout_(TimeDelta::ms(kInitialRttMs) * kTimeoutRatio), + rtt_tracker_(TimeDelta::Millis(kInitialRttMs), kAlphaForRtt), + monitor_interval_timeout_(TimeDelta::Millis(kInitialRttMs) * + kTimeoutRatio), monitor_interval_length_strategy_(MonitorIntervalLengthStrategy::kFixed), monitor_interval_duration_ratio_(kMonitorIntervalDurationRatio), sampling_step_(kDefaultSamplingStep), @@ -115,7 +115,7 @@ NetworkControlUpdate PccNetworkController::CreateRateUpdate( // Set up pacing/padding target rate. PacerConfig pacer_config; pacer_config.at_time = at_time; - pacer_config.time_window = TimeDelta::ms(1); + pacer_config.time_window = TimeDelta::Millis(1); pacer_config.data_window = sending_rate * pacer_config.time_window; pacer_config.pad_window = sending_rate * pacer_config.time_window; @@ -215,9 +215,9 @@ NetworkControlUpdate PccNetworkController::OnSentPacket(SentPacket msg) { bandwidth_estimate_ * (1 - sign * sampling_step_)}; } else { monitor_intervals_bitrates_ = { - DataRate::bps(std::max( + DataRate::BitsPerSec(std::max( bandwidth_estimate_.bps() + sign * kMinRateChangeBps, 0)), - DataRate::bps(std::max( + DataRate::BitsPerSec(std::max( bandwidth_estimate_.bps() - sign * kMinRateChangeBps, 0))}; } monitor_intervals_.emplace_back(monitor_intervals_bitrates_[0], diff --git a/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc b/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc index 9910a03322..c98680c785 100644 --- a/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc +++ b/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc @@ -28,8 +28,8 @@ namespace webrtc { namespace test { namespace { -const DataRate kInitialBitrate = DataRate::kbps(60); -const Timestamp kDefaultStartTime = Timestamp::ms(10000000); +const DataRate kInitialBitrate = DataRate::KilobitsPerSec(60); +const Timestamp kDefaultStartTime = Timestamp::Millis(10000000); constexpr double kDataRateMargin = 0.20; constexpr double kMinDataRateFactor = 1 - kDataRateMargin; @@ -47,9 +47,12 @@ NetworkControllerConfig InitialConfig( int max_data_rate_kbps = 5 * kInitialBitrate.kbps()) { NetworkControllerConfig config; config.constraints.at_time = kDefaultStartTime; - config.constraints.min_data_rate = DataRate::kbps(min_data_rate_kbps); - config.constraints.max_data_rate = DataRate::kbps(max_data_rate_kbps); - config.constraints.starting_rate = DataRate::kbps(starting_bandwidth_kbps); + config.constraints.min_data_rate = + DataRate::KilobitsPerSec(min_data_rate_kbps); + config.constraints.max_data_rate = + DataRate::KilobitsPerSec(max_data_rate_kbps); + config.constraints.starting_rate = + DataRate::KilobitsPerSec(starting_bandwidth_kbps); return config; } @@ -77,15 +80,15 @@ TEST(PccNetworkControllerTest, UpdatesTargetSendRate) { Scenario s("pcc_unit/updates_rate", false); CallClientConfig config; config.transport.cc_factory = &factory; - config.transport.rates.min_rate = DataRate::kbps(10); - config.transport.rates.max_rate = DataRate::kbps(1500); - config.transport.rates.start_rate = DataRate::kbps(300); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(1500); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); auto send_net = s.CreateMutableSimulationNode([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(500); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(500); + c->delay = TimeDelta::Millis(100); }); auto ret_net = s.CreateMutableSimulationNode( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(100); }); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); auto* client = s.CreateClient("send", config); auto* route = s.CreateRoutes(client, {send_net->node()}, @@ -94,21 +97,21 @@ TEST(PccNetworkControllerTest, UpdatesTargetSendRate) { VideoStreamConfig video; video.stream.use_rtx = false; s.CreateVideoStream(route->forward(), video); - s.RunFor(TimeDelta::seconds(30)); + s.RunFor(TimeDelta::Seconds(30)); EXPECT_NEAR(client->target_rate().kbps(), 450, 100); send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(800); - c->delay = TimeDelta::ms(100); + c->bandwidth = DataRate::KilobitsPerSec(800); + c->delay = TimeDelta::Millis(100); }); - s.RunFor(TimeDelta::seconds(20)); + s.RunFor(TimeDelta::Seconds(20)); EXPECT_NEAR(client->target_rate().kbps(), 750, 150); send_net->UpdateConfig([](NetworkSimulationConfig* c) { - c->bandwidth = DataRate::kbps(200); - c->delay = TimeDelta::ms(200); + c->bandwidth = DataRate::KilobitsPerSec(200); + c->delay = TimeDelta::Millis(200); }); ret_net->UpdateConfig( - [](NetworkSimulationConfig* c) { c->delay = TimeDelta::ms(200); }); - s.RunFor(TimeDelta::seconds(35)); + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); + s.RunFor(TimeDelta::Seconds(35)); EXPECT_NEAR(client->target_rate().kbps(), 170, 50); } diff --git a/modules/congestion_controller/pcc/rtt_tracker_unittest.cc b/modules/congestion_controller/pcc/rtt_tracker_unittest.cc index dd667b81b6..7d90e86822 100644 --- a/modules/congestion_controller/pcc/rtt_tracker_unittest.cc +++ b/modules/congestion_controller/pcc/rtt_tracker_unittest.cc @@ -16,9 +16,9 @@ namespace webrtc { namespace pcc { namespace test { namespace { -const TimeDelta kInitialRtt = TimeDelta::us(10); +const TimeDelta kInitialRtt = TimeDelta::Micros(10); constexpr double kAlpha = 0.9; -const Timestamp kStartTime = Timestamp::seconds(0); +const Timestamp kStartTime = Timestamp::Seconds(0); PacketResult GetPacketWithRtt(TimeDelta rtt) { SentPacket packet; @@ -53,7 +53,7 @@ TEST(PccRttTrackerTest, DoNothingWhenPacketIsLost) { TEST(PccRttTrackerTest, ChangeInRtt) { RttTracker tracker{kInitialRtt, kAlpha}; - const TimeDelta kNewRtt = TimeDelta::us(100); + const TimeDelta kNewRtt = TimeDelta::Micros(100); tracker.OnPacketsFeedback({GetPacketWithRtt(kNewRtt)}, kStartTime + kNewRtt); EXPECT_GT(tracker.GetRtt(), kInitialRtt); EXPECT_LE(tracker.GetRtt(), kNewRtt); @@ -61,7 +61,7 @@ TEST(PccRttTrackerTest, ChangeInRtt) { tracker.OnPacketsFeedback({GetPacketWithRtt(kNewRtt)}, kStartTime + kNewRtt); } - const TimeDelta absolute_error = TimeDelta::us(1); + const TimeDelta absolute_error = TimeDelta::Micros(1); EXPECT_NEAR(tracker.GetRtt().us(), kNewRtt.us(), absolute_error.us()); EXPECT_LE(tracker.GetRtt(), kNewRtt); } diff --git a/modules/congestion_controller/pcc/utility_function_unittest.cc b/modules/congestion_controller/pcc/utility_function_unittest.cc index 0459fb847b..19b2d15920 100644 --- a/modules/congestion_controller/pcc/utility_function_unittest.cc +++ b/modules/congestion_controller/pcc/utility_function_unittest.cc @@ -32,13 +32,13 @@ constexpr double kThroughputPower = 0.9; constexpr double kThroughputCoefficient = 1; constexpr double kDelayGradientNegativeBound = 10; -const Timestamp kStartTime = Timestamp::us(0); -const TimeDelta kPacketsDelta = TimeDelta::ms(1); -const TimeDelta kIntervalDuration = TimeDelta::ms(100); -const DataRate kSendingBitrate = DataRate::bps(1000); +const Timestamp kStartTime = Timestamp::Micros(0); +const TimeDelta kPacketsDelta = TimeDelta::Millis(1); +const TimeDelta kIntervalDuration = TimeDelta::Millis(100); +const DataRate kSendingBitrate = DataRate::BitsPerSec(1000); -const DataSize kDefaultDataSize = DataSize::bytes(100); -const TimeDelta kDefaultDelay = TimeDelta::ms(100); +const DataSize kDefaultDataSize = DataSize::Bytes(100); +const TimeDelta kDefaultDelay = TimeDelta::Millis(100); std::vector CreatePacketResults( const std::vector& packets_send_times, diff --git a/modules/congestion_controller/receive_side_congestion_controller.cc b/modules/congestion_controller/receive_side_congestion_controller.cc index 7448ec28b2..638cb2d295 100644 --- a/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/modules/congestion_controller/receive_side_congestion_controller.cc @@ -38,45 +38,45 @@ void ReceiveSideCongestionController::WrappingBitrateEstimator::IncomingPacket( int64_t arrival_time_ms, size_t payload_size, const RTPHeader& header) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); PickEstimatorFromHeader(header); rbe_->IncomingPacket(arrival_time_ms, payload_size, header); } void ReceiveSideCongestionController::WrappingBitrateEstimator::Process() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); rbe_->Process(); } int64_t ReceiveSideCongestionController::WrappingBitrateEstimator:: TimeUntilNextProcess() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return rbe_->TimeUntilNextProcess(); } void ReceiveSideCongestionController::WrappingBitrateEstimator::OnRttUpdate( int64_t avg_rtt_ms, int64_t max_rtt_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); rbe_->OnRttUpdate(avg_rtt_ms, max_rtt_ms); } void ReceiveSideCongestionController::WrappingBitrateEstimator::RemoveStream( unsigned int ssrc) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); rbe_->RemoveStream(ssrc); } bool ReceiveSideCongestionController::WrappingBitrateEstimator::LatestEstimate( std::vector* ssrcs, unsigned int* bitrate_bps) const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return rbe_->LatestEstimate(ssrcs, bitrate_bps); } void ReceiveSideCongestionController::WrappingBitrateEstimator::SetMinBitrate( int min_bitrate_bps) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); rbe_->SetMinBitrate(min_bitrate_bps); min_bitrate_bps_ = min_bitrate_bps; } diff --git a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc index 5473497633..b5846237ee 100644 --- a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc +++ b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc @@ -37,8 +37,10 @@ uint32_t AbsSendTime(int64_t t, int64_t denom) { class MockPacketRouter : public PacketRouter { public: - MOCK_METHOD2(OnReceiveBitrateChanged, - void(const std::vector& ssrcs, uint32_t bitrate)); + MOCK_METHOD(void, + OnReceiveBitrateChanged, + (const std::vector& ssrcs, uint32_t bitrate), + (override)); }; const uint32_t kInitialBitrateBps = 60000; @@ -76,10 +78,10 @@ TEST(ReceiveSideCongestionControllerTest, OnReceivedPacketWithAbsSendTime) { TEST(ReceiveSideCongestionControllerTest, ConvergesToCapacity) { Scenario s("recieve_cc_unit/converge"); NetworkSimulationConfig net_conf; - net_conf.bandwidth = DataRate::kbps(1000); - net_conf.delay = TimeDelta::ms(50); + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(50); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { - c->transport.rates.start_rate = DataRate::kbps(300); + c->transport.rates.start_rate = DataRate::KilobitsPerSec(300); }); auto* route = s.CreateRoutes(client, {s.CreateSimulationNode(net_conf)}, @@ -88,17 +90,17 @@ TEST(ReceiveSideCongestionControllerTest, ConvergesToCapacity) { VideoStreamConfig video; video.stream.packet_feedback = false; s.CreateVideoStream(route->forward(), video); - s.RunFor(TimeDelta::seconds(30)); + s.RunFor(TimeDelta::Seconds(30)); EXPECT_NEAR(client->send_bandwidth().kbps(), 900, 150); } TEST(ReceiveSideCongestionControllerTest, IsFairToTCP) { Scenario s("recieve_cc_unit/tcp_fairness"); NetworkSimulationConfig net_conf; - net_conf.bandwidth = DataRate::kbps(1000); - net_conf.delay = TimeDelta::ms(50); + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(50); auto* client = s.CreateClient("send", [&](CallClientConfig* c) { - c->transport.rates.start_rate = DataRate::kbps(1000); + c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000); }); auto send_net = {s.CreateSimulationNode(net_conf)}; auto ret_net = {s.CreateSimulationNode(net_conf)}; @@ -108,7 +110,7 @@ TEST(ReceiveSideCongestionControllerTest, IsFairToTCP) { video.stream.packet_feedback = false; s.CreateVideoStream(route->forward(), video); s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); - s.RunFor(TimeDelta::seconds(30)); + s.RunFor(TimeDelta::Seconds(30)); // For some reason we get outcompeted by TCP here, this should probably be // fixed and a lower bound should be added to the test. EXPECT_LT(client->send_bandwidth().kbps(), 750); diff --git a/modules/congestion_controller/rtp/BUILD.gn b/modules/congestion_controller/rtp/BUILD.gn index 38a4bf19df..2f97b67263 100644 --- a/modules/congestion_controller/rtp/BUILD.gn +++ b/modules/congestion_controller/rtp/BUILD.gn @@ -33,8 +33,8 @@ rtc_library("control_handler") { "../../../rtc_base/synchronization:sequence_checker", "../../../system_wrappers:field_trial", "../../pacing", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] if (!build_with_mozilla) { deps += [ "../../../rtc_base" ] @@ -54,12 +54,16 @@ rtc_library("transport_feedback") { "../../../api/transport:network_control", "../../../api/units:data_size", "../../../api/units:timestamp", + "../../../rtc_base", "../../../rtc_base:checks", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/network:sent_packet", + "../../../rtc_base/synchronization:mutex", "../../../system_wrappers", "../../../system_wrappers:field_trial", "../../rtp_rtcp:rtp_rtcp_format", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.cc b/modules/congestion_controller/rtp/transport_feedback_adapter.cc index b1de93559c..87691bf263 100644 --- a/modules/congestion_controller/rtp/transport_feedback_adapter.cc +++ b/modules/congestion_controller/rtp/transport_feedback_adapter.cc @@ -26,17 +26,16 @@ namespace webrtc { -constexpr TimeDelta kSendTimeHistoryWindow = TimeDelta::Seconds<60>(); +constexpr TimeDelta kSendTimeHistoryWindow = TimeDelta::Seconds(60); void InFlightBytesTracker::AddInFlightPacketBytes( const PacketFeedback& packet) { RTC_DCHECK(packet.sent.send_time.IsFinite()); - auto it = in_flight_data_.find({packet.local_net_id, packet.remote_net_id}); + auto it = in_flight_data_.find(packet.network_route); if (it != in_flight_data_.end()) { it->second += packet.sent.size; } else { - in_flight_data_.insert( - {{packet.local_net_id, packet.remote_net_id}, packet.sent.size}); + in_flight_data_.insert({packet.network_route, packet.sent.size}); } } @@ -44,7 +43,7 @@ void InFlightBytesTracker::RemoveInFlightPacketBytes( const PacketFeedback& packet) { if (packet.sent.send_time.IsInfinite()) return; - auto it = in_flight_data_.find({packet.local_net_id, packet.remote_net_id}); + auto it = in_flight_data_.find(packet.network_route); if (it != in_flight_data_.end()) { RTC_DCHECK_GE(it->second, packet.sent.size); it->second -= packet.sent.size; @@ -54,9 +53,8 @@ void InFlightBytesTracker::RemoveInFlightPacketBytes( } DataSize InFlightBytesTracker::GetOutstandingData( - uint16_t local_net_id, - uint16_t remote_net_id) const { - auto it = in_flight_data_.find({local_net_id, remote_net_id}); + const rtc::NetworkRoute& network_route) const { + auto it = in_flight_data_.find(network_route); if (it != in_flight_data_.end()) { return it->second; } else { @@ -64,6 +62,28 @@ DataSize InFlightBytesTracker::GetOutstandingData( } } +// Comparator for consistent map with NetworkRoute as key. +bool InFlightBytesTracker::NetworkRouteComparator::operator()( + const rtc::NetworkRoute& a, + const rtc::NetworkRoute& b) const { + if (a.local.network_id() != b.local.network_id()) + return a.local.network_id() < b.local.network_id(); + if (a.remote.network_id() != b.remote.network_id()) + return a.remote.network_id() < b.remote.network_id(); + + if (a.local.adapter_id() != b.local.adapter_id()) + return a.local.adapter_id() < b.local.adapter_id(); + if (a.remote.adapter_id() != b.remote.adapter_id()) + return a.remote.adapter_id() < b.remote.adapter_id(); + + if (a.local.uses_turn() != b.local.uses_turn()) + return a.local.uses_turn() < b.local.uses_turn(); + if (a.remote.uses_turn() != b.remote.uses_turn()) + return a.remote.uses_turn() < b.remote.uses_turn(); + + return a.connected < b.connected; +} + TransportFeedbackAdapter::TransportFeedbackAdapter() = default; @@ -74,9 +94,9 @@ void TransportFeedbackAdapter::AddPacket(const RtpPacketSendInfo& packet_info, packet.creation_time = creation_time; packet.sent.sequence_number = seq_num_unwrapper_.Unwrap(packet_info.transport_sequence_number); - packet.sent.size = DataSize::bytes(packet_info.length + overhead_bytes); - packet.local_net_id = local_net_id_; - packet.remote_net_id = remote_net_id_; + packet.sent.size = DataSize::Bytes(packet_info.length + overhead_bytes); + packet.sent.audio = packet_info.packet_type == RtpPacketMediaType::kAudio; + packet.network_route = network_route_; packet.sent.pacing_info = packet_info.pacing_info; while (!history_.empty() && @@ -89,9 +109,10 @@ void TransportFeedbackAdapter::AddPacket(const RtpPacketSendInfo& packet_info, } history_.insert(std::make_pair(packet.sent.sequence_number, packet)); } + absl::optional TransportFeedbackAdapter::ProcessSentPacket( const rtc::SentPacket& sent_packet) { - auto send_time = Timestamp::ms(sent_packet.send_time_ms); + auto send_time = Timestamp::Millis(sent_packet.send_time_ms); // TODO(srte): Only use one way to indicate that packet feedback is used. if (sent_packet.info.included_in_feedback || sent_packet.packet_id != -1) { int64_t unwrapped_seq_num = @@ -122,7 +143,7 @@ absl::optional TransportFeedbackAdapter::ProcessSentPacket( RTC_LOG(LS_WARNING) << "ignoring untracked data for out of order packet."; } pending_untracked_size_ += - DataSize::bytes(sent_packet.info.packet_size_bytes); + DataSize::Bytes(sent_packet.info.packet_size_bytes); last_untracked_send_time_ = std::max(last_untracked_send_time_, send_time); } return absl::nullopt; @@ -140,8 +161,7 @@ TransportFeedbackAdapter::ProcessTransportFeedback( TransportPacketsFeedback msg; msg.feedback_time = feedback_receive_time; - msg.prior_in_flight = - in_flight_.GetOutstandingData(local_net_id_, remote_net_id_); + msg.prior_in_flight = in_flight_.GetOutstandingData(network_route_); msg.packet_feedbacks = ProcessTransportFeedbackInner(feedback, feedback_receive_time); if (msg.packet_feedbacks.empty()) @@ -151,39 +171,37 @@ TransportFeedbackAdapter::ProcessTransportFeedback( if (it != history_.end()) { msg.first_unacked_send_time = it->second.sent.send_time; } - msg.data_in_flight = - in_flight_.GetOutstandingData(local_net_id_, remote_net_id_); + msg.data_in_flight = in_flight_.GetOutstandingData(network_route_); return msg; } -void TransportFeedbackAdapter::SetNetworkIds(uint16_t local_id, - uint16_t remote_id) { - local_net_id_ = local_id; - remote_net_id_ = remote_id; +void TransportFeedbackAdapter::SetNetworkRoute( + const rtc::NetworkRoute& network_route) { + network_route_ = network_route; } DataSize TransportFeedbackAdapter::GetOutstandingData() const { - return in_flight_.GetOutstandingData(local_net_id_, remote_net_id_); + return in_flight_.GetOutstandingData(network_route_); } std::vector TransportFeedbackAdapter::ProcessTransportFeedbackInner( const rtcp::TransportFeedback& feedback, - Timestamp feedback_time) { + Timestamp feedback_receive_time) { // Add timestamp deltas to a local time base selected on first packet arrival. // This won't be the true time base, but makes it easier to manually inspect // time stamps. if (last_timestamp_.IsInfinite()) { - current_offset_ = feedback_time; + current_offset_ = feedback_receive_time; } else { // TODO(srte): We shouldn't need to do rounding here. const TimeDelta delta = feedback.GetBaseDelta(last_timestamp_) - .RoundDownTo(TimeDelta::Millis<1>()); + .RoundDownTo(TimeDelta::Millis(1)); // Protect against assigning current_offset_ negative value. if (delta < Timestamp::Zero() - current_offset_) { RTC_LOG(LS_WARNING) << "Unexpected feedback timestamp received."; - current_offset_ = feedback_time; + current_offset_ = feedback_receive_time; } else { current_offset_ += delta; } @@ -227,13 +245,12 @@ TransportFeedbackAdapter::ProcessTransportFeedbackInner( if (packet.received()) { packet_offset += packet.delta(); packet_feedback.receive_time = - current_offset_ + packet_offset.RoundDownTo(TimeDelta::Millis<1>()); + current_offset_ + packet_offset.RoundDownTo(TimeDelta::Millis(1)); // Note: Lost packets are not removed from history because they might be // reported as received by a later feedback. history_.erase(it); } - if (packet_feedback.local_net_id == local_net_id_ && - packet_feedback.remote_net_id == remote_net_id_) { + if (packet_feedback.network_route == network_route_) { PacketResult result; result.sent_packet = packet_feedback.sent; result.receive_time = packet_feedback.receive_time; diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.h b/modules/congestion_controller/rtp/transport_feedback_adapter.h index c8ff9b9db5..c41a7c67f8 100644 --- a/modules/congestion_controller/rtp/transport_feedback_adapter.h +++ b/modules/congestion_controller/rtp/transport_feedback_adapter.h @@ -19,8 +19,8 @@ #include "api/transport/network_types.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/critical_section.h" #include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" @@ -32,24 +32,26 @@ struct PacketFeedback { Timestamp creation_time = Timestamp::MinusInfinity(); SentPacket sent; // Time corresponding to when the packet was received. Timestamped with the - // receiver's clock. For unreceived packet, Timestamp::PlusInfinity() is used. + // receiver's clock. For unreceived packet, Timestamp::PlusInfinity() is + // used. Timestamp receive_time = Timestamp::PlusInfinity(); - // The network route ids that this packet is associated with. - uint16_t local_net_id = 0; - uint16_t remote_net_id = 0; + // The network route that this packet is associated with. + rtc::NetworkRoute network_route; }; class InFlightBytesTracker { public: void AddInFlightPacketBytes(const PacketFeedback& packet); void RemoveInFlightPacketBytes(const PacketFeedback& packet); - DataSize GetOutstandingData(uint16_t local_net_id, - uint16_t remote_net_id) const; + DataSize GetOutstandingData(const rtc::NetworkRoute& network_route) const; private: - using RemoteAndLocalNetworkId = std::pair; - std::map in_flight_data_; + struct NetworkRouteComparator { + bool operator()(const rtc::NetworkRoute& a, + const rtc::NetworkRoute& b) const; + }; + std::map in_flight_data_; }; class TransportFeedbackAdapter { @@ -64,9 +66,9 @@ class TransportFeedbackAdapter { absl::optional ProcessTransportFeedback( const rtcp::TransportFeedback& feedback, - Timestamp feedback_time); + Timestamp feedback_receive_time); - void SetNetworkIds(uint16_t local_id, uint16_t remote_id); + void SetNetworkRoute(const rtc::NetworkRoute& network_route); DataSize GetOutstandingData() const; @@ -75,7 +77,7 @@ class TransportFeedbackAdapter { std::vector ProcessTransportFeedbackInner( const rtcp::TransportFeedback& feedback, - Timestamp feedback_time); + Timestamp feedback_receive_time); DataSize pending_untracked_size_ = DataSize::Zero(); Timestamp last_send_time_ = Timestamp::MinusInfinity(); @@ -91,8 +93,7 @@ class TransportFeedbackAdapter { Timestamp current_offset_ = Timestamp::MinusInfinity(); TimeDelta last_timestamp_ = TimeDelta::MinusInfinity(); - uint16_t local_net_id_ = 0; - uint16_t remote_net_id_ = 0; + rtc::NetworkRoute network_route_; }; } // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc index 4631dc29ef..3849cb3707 100644 --- a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc +++ b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc @@ -69,10 +69,10 @@ PacketResult CreatePacket(int64_t receive_time_ms, size_t payload_size, const PacedPacketInfo& pacing_info) { PacketResult res; - res.receive_time = Timestamp::ms(receive_time_ms); - res.sent_packet.send_time = Timestamp::ms(send_time_ms); + res.receive_time = Timestamp::Millis(receive_time_ms); + res.sent_packet.send_time = Timestamp::Millis(send_time_ms); res.sent_packet.sequence_number = sequence_number; - res.sent_packet.size = DataSize::bytes(payload_size); + res.sent_packet.size = DataSize::Bytes(payload_size); res.sent_packet.pacing_info = pacing_info; return res; } @@ -83,8 +83,10 @@ namespace test { class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver { public: - MOCK_METHOD1(OnPacketFeedbackVector, - void(std::vector packet_feedback_vector)); + MOCK_METHOD(void, + OnPacketFeedbackVector, + (std::vector packet_feedback_vector), + (override)); }; class TransportFeedbackAdapterTest : public ::testing::Test { @@ -110,9 +112,9 @@ class TransportFeedbackAdapterTest : public ::testing::Test { packet_info.transport_sequence_number = packet_feedback.sent_packet.sequence_number; packet_info.rtp_sequence_number = 0; - packet_info.has_rtp_sequence_number = true; packet_info.length = packet_feedback.sent_packet.size.bytes(); packet_info.pacing_info = packet_feedback.sent_packet.pacing_info; + packet_info.packet_type = RtpPacketMediaType::kVideo; adapter_->AddPacket(RtpPacketSendInfo(packet_info), 0u, clock_.CurrentTime()); adapter_->ProcessSentPacket(rtc::SentPacket( @@ -294,22 +296,22 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) { std::vector sent_packets; // TODO(srte): Consider using us resolution in the constants. const TimeDelta kSmallDelta = - TimeDelta::us(rtcp::TransportFeedback::kDeltaScaleFactor * 0xFF) - .RoundDownTo(TimeDelta::ms(1)); + TimeDelta::Micros(rtcp::TransportFeedback::kDeltaScaleFactor * 0xFF) + .RoundDownTo(TimeDelta::Millis(1)); const TimeDelta kLargePositiveDelta = - TimeDelta::us(rtcp::TransportFeedback::kDeltaScaleFactor * - std::numeric_limits::max()) - .RoundDownTo(TimeDelta::ms(1)); + TimeDelta::Micros(rtcp::TransportFeedback::kDeltaScaleFactor * + std::numeric_limits::max()) + .RoundDownTo(TimeDelta::Millis(1)); const TimeDelta kLargeNegativeDelta = - TimeDelta::us(rtcp::TransportFeedback::kDeltaScaleFactor * - std::numeric_limits::min()) - .RoundDownTo(TimeDelta::ms(1)); + TimeDelta::Micros(rtcp::TransportFeedback::kDeltaScaleFactor * + std::numeric_limits::min()) + .RoundDownTo(TimeDelta::Millis(1)); PacketResult packet_feedback; packet_feedback.sent_packet.sequence_number = 1; - packet_feedback.sent_packet.send_time = Timestamp::ms(100); - packet_feedback.receive_time = Timestamp::ms(200); - packet_feedback.sent_packet.size = DataSize::bytes(1500); + packet_feedback.sent_packet.send_time = Timestamp::Millis(100); + packet_feedback.receive_time = Timestamp::Millis(200); + packet_feedback.sent_packet.size = DataSize::Bytes(1500); sent_packets.push_back(packet_feedback); // TODO(srte): This rounding maintains previous behavior, but should ot be @@ -331,8 +333,8 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) { // Too large, delta - will need two feedback messages. packet_feedback.sent_packet.send_time += - kLargePositiveDelta + TimeDelta::ms(1); - packet_feedback.receive_time += kLargePositiveDelta + TimeDelta::ms(1); + kLargePositiveDelta + TimeDelta::Millis(1); + packet_feedback.receive_time += kLargePositiveDelta + TimeDelta::Millis(1); ++packet_feedback.sent_packet.sequence_number; // Packets will be added to send history. @@ -395,6 +397,7 @@ TEST_F(TransportFeedbackAdapterTest, IgnoreDuplicatePacketSentCalls) { packet_info.transport_sequence_number = packet.sent_packet.sequence_number; packet_info.length = packet.sent_packet.size.bytes(); packet_info.pacing_info = packet.sent_packet.pacing_info; + packet_info.packet_type = RtpPacketMediaType::kVideo; adapter_->AddPacket(packet_info, 0u, clock_.CurrentTime()); absl::optional sent_packet = adapter_->ProcessSentPacket( rtc::SentPacket(packet.sent_packet.sequence_number, diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc index c7893d71a9..c958a1c3cb 100644 --- a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc @@ -18,7 +18,7 @@ static const size_t kMaxPacketsInHistory = 5000; void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( std::vector ssrcs, StreamFeedbackObserver* observer) { - rtc::CritScope cs(&observers_lock_); + MutexLock lock(&observers_lock_); RTC_DCHECK(observer); RTC_DCHECK(absl::c_find_if(observers_, [=](const auto& pair) { return pair.second == observer; @@ -28,7 +28,7 @@ void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( StreamFeedbackObserver* observer) { - rtc::CritScope cs(&observers_lock_); + MutexLock lock(&observers_lock_); RTC_DCHECK(observer); const auto it = absl::c_find_if( observers_, [=](const auto& pair) { return pair.second == observer; }); @@ -37,8 +37,8 @@ void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( } void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) { - rtc::CritScope cs(&lock_); - if (packet_info.has_rtp_sequence_number && packet_info.ssrc != 0) { + MutexLock lock(&lock_); + if (packet_info.ssrc != 0) { StreamFeedbackObserver::StreamPacketInfo info; info.ssrc = packet_info.ssrc; info.rtp_sequence_number = packet_info.rtp_sequence_number; @@ -56,7 +56,7 @@ void TransportFeedbackDemuxer::OnTransportFeedback( const rtcp::TransportFeedback& feedback) { std::vector stream_feedbacks; { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); for (const auto& packet : feedback.GetAllPackets()) { int64_t seq_num = seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number()); @@ -71,7 +71,7 @@ void TransportFeedbackDemuxer::OnTransportFeedback( } } - rtc::CritScope cs(&observers_lock_); + MutexLock lock(&observers_lock_); for (auto& observer : observers_) { std::vector selected_feedback; for (const auto& packet_info : stream_feedbacks) { diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.h b/modules/congestion_controller/rtp/transport_feedback_demuxer.h index bcd25d5835..634a37ea1a 100644 --- a/modules/congestion_controller/rtp/transport_feedback_demuxer.h +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.h @@ -16,7 +16,7 @@ #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -32,7 +32,7 @@ class TransportFeedbackDemuxer : public StreamFeedbackProvider { void OnTransportFeedback(const rtcp::TransportFeedback& feedback); private: - rtc::CriticalSection lock_; + Mutex lock_; SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&lock_); std::map history_ RTC_GUARDED_BY(&lock_); @@ -40,7 +40,7 @@ class TransportFeedbackDemuxer : public StreamFeedbackProvider { // Maps a set of ssrcs to corresponding observer. Vectors are used rather than // set/map to ensure that the processing order is consistent independently of // the randomized ssrcs. - rtc::CriticalSection observers_lock_; + Mutex observers_lock_; std::vector, StreamFeedbackObserver*>> observers_ RTC_GUARDED_BY(&observers_lock_); }; diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc index 144e3e135d..6514a4eda7 100644 --- a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc @@ -21,8 +21,10 @@ static constexpr uint32_t kSsrc = 8492; class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver { public: - MOCK_METHOD1(OnPacketFeedbackVector, - void(std::vector packet_feedback_vector)); + MOCK_METHOD(void, + OnPacketFeedbackVector, + (std::vector packet_feedback_vector), + (override)); }; RtpPacketSendInfo CreatePacket(uint32_t ssrc, @@ -32,7 +34,6 @@ RtpPacketSendInfo CreatePacket(uint32_t ssrc, res.ssrc = ssrc; res.transport_sequence_number = transport_sequence_number; res.rtp_sequence_number = rtp_sequence_number; - res.has_rtp_sequence_number = true; return res; } } // namespace diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn index e2554d2eec..1731931554 100644 --- a/modules/desktop_capture/BUILD.gn +++ b/modules/desktop_capture/BUILD.gn @@ -42,7 +42,8 @@ rtc_library("primitives") { ] if (!build_with_mozilla) { - deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in bugs.webrtc.org/3806. + deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in + # bugs.webrtc.org/3806. } } @@ -75,6 +76,9 @@ if (rtc_include_tests) { "window_finder_unittest.cc", ] public_configs = [ ":x11_config" ] + if (is_win) { + deps += [ "../../rtc_base:win32" ] + } } } @@ -114,16 +118,18 @@ if (rtc_include_tests) { ":primitives", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", - "../../system_wrappers:cpu_features_api", + "../../system_wrappers", "../../test:test_support", ] if (rtc_desktop_capture_supported) { sources += [ "screen_capturer_helper_unittest.cc", - "screen_capturer_mac_unittest.cc", "screen_capturer_unittest.cc", "window_capturer_unittest.cc", ] + if (is_mac) { + sources += [ "screen_capturer_mac_unittest.cc" ] + } deps += [ ":desktop_capture_mock" ] public_configs = [ ":x11_config" ] } @@ -139,7 +145,7 @@ if (rtc_include_tests) { "screen_drawer.h", ] - if (is_linux) { + if (is_linux || is_chromeos) { sources += [ "screen_drawer_linux.cc" ] } @@ -185,7 +191,7 @@ if (rtc_include_tests) { } } -if (is_linux) { +if (is_linux || is_chromeos) { if (rtc_use_pipewire) { pkg_config("gio") { packages = [ @@ -224,7 +230,8 @@ if (is_linux) { rtc_source_set("desktop_capture") { visibility = [ "*" ] - public_deps = [ ":desktop_capture_generic" ] # no-presubmit-check TODO(webrtc:8603) + public_deps = # no-presubmit-check TODO(webrtc:8603) + [ ":desktop_capture_generic" ] if (is_mac) { public_deps += [ ":desktop_capture_objc" ] } @@ -256,11 +263,10 @@ if (is_mac) { "../../rtc_base", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", - "../../rtc_base/synchronization:rw_lock_wrapper", "../../rtc_base/system:rtc_export", "../../sdk:helpers_objc", ] - libs = [ + frameworks = [ "AppKit.framework", "IOKit.framework", "IOSurface.framework", @@ -355,7 +361,7 @@ rtc_library("desktop_capture_generic") { "window_capturer_linux.cc", ] - if (build_with_mozilla && is_linux) { + if (build_with_mozilla && (is_linux || is_chromeos)) { sources += [ "app_capturer_linux.cc", "linux/app_capturer_x11.cc", @@ -397,6 +403,8 @@ rtc_library("desktop_capture_generic") { "Xext", "Xfixes", "Xrender", + "Xrandr", + "Xtst", ] } @@ -408,6 +416,20 @@ rtc_library("desktop_capture_generic") { ] } + deps = [ + ":primitives", + "../../api:function_view", + "../../api:refcountedbase", + "../../api:scoped_refptr", + "../../rtc_base", # TODO(kjellander): Cleanup in bugs.webrtc.org/3806. + "../../rtc_base:checks", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:arch", + "../../rtc_base/system:rtc_export", + "../../system_wrappers", + "../../system_wrappers:metrics", + ] + if (is_win) { sources += [ "cropping_window_capturer_win.cc", @@ -421,6 +443,8 @@ rtc_library("desktop_capture_generic") { "win/d3d_device.h", "win/desktop.cc", "win/desktop.h", + "win/desktop_capture_utils.cc", + "win/desktop_capture_utils.h", "win/display_configuration_monitor.cc", "win/display_configuration_monitor.h", "win/dxgi_adapter_duplicator.cc", @@ -456,6 +480,8 @@ rtc_library("desktop_capture_generic") { "win/selected_window_context.h", "win/window_capture_utils.cc", "win/window_capture_utils.h", + "win/window_capturer_win_gdi.cc", + "win/window_capturer_win_gdi.h", "window_capturer_win.cc", "window_finder_win.cc", "window_finder_win.h", @@ -464,25 +490,18 @@ rtc_library("desktop_capture_generic") { "d3d11.lib", "dxgi.lib", ] + deps += [ "../../rtc_base:win32" ] } - deps = [ - ":primitives", - "../../api:function_view", - "../../api:refcountedbase", - "../../api:scoped_refptr", - "../../rtc_base", # TODO(kjellander): Cleanup in bugs.webrtc.org/3806. - "../../rtc_base:checks", - "../../rtc_base/synchronization:rw_lock_wrapper", - "../../rtc_base/system:arch", - "../../rtc_base/system:rtc_export", - "../../system_wrappers", - "../../system_wrappers:cpu_features_api", - "../../system_wrappers:metrics", + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", ] + if (rtc_use_x11_extensions) { + deps += [ "../../rtc_base:sanitizer" ] + } + if (build_with_mozilla) { deps += [ "../../rtc_base:rtc_base_approved" ] } else { @@ -514,6 +533,17 @@ rtc_library("desktop_capture_generic") { deps += [ ":pipewire_stubs" ] } } + + if (rtc_enable_win_wgc) { + sources += [ + "win/wgc_capture_session.cc", + "win/wgc_capture_session.h", + "win/window_capturer_win_wgc.cc", + "win/window_capturer_win_wgc.h", + ] + + defines += [ "RTC_ENABLE_WIN_WGC" ] + } } if (use_desktop_capture_differ_sse2) { diff --git a/modules/desktop_capture/OWNERS b/modules/desktop_capture/OWNERS index cdcfa5d55f..79df492e69 100644 --- a/modules/desktop_capture/OWNERS +++ b/modules/desktop_capture/OWNERS @@ -1,11 +1,2 @@ -# Please send the changes to zijiehe@chromium.org first. jamiewalch@chromium.org -sergeyu@chromium.org -wez@chromium.org -zijiehe@chromium.org -braveyao@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* +joedow@chromium.org diff --git a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc index e15a2cd344..ca3a89f49b 100644 --- a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc +++ b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc @@ -63,6 +63,10 @@ bool BlankDetectorDesktopCapturerWrapper::FocusOnSelectedSource() { return capturer_->FocusOnSelectedSource(); } +bool BlankDetectorDesktopCapturerWrapper::IsOccluded(const DesktopVector& pos) { + return capturer_->IsOccluded(pos); +} + void BlankDetectorDesktopCapturerWrapper::OnCaptureResult( Result result, std::unique_ptr frame) { diff --git a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h index 6ec6b1a82f..46ba5257fe 100644 --- a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h +++ b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h @@ -46,6 +46,7 @@ class BlankDetectorDesktopCapturerWrapper final bool GetSourceList(SourceList* sources) override; bool SelectSource(SourceId id) override; bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; private: // DesktopCapturer::Callback interface. diff --git a/modules/desktop_capture/cropping_window_capturer_win.cc b/modules/desktop_capture/cropping_window_capturer_win.cc index 6e53ca3522..de36adb01e 100644 --- a/modules/desktop_capture/cropping_window_capturer_win.cc +++ b/modules/desktop_capture/cropping_window_capturer_win.cc @@ -154,13 +154,30 @@ class CroppingWindowCapturerWin : public CroppingWindowCapturer { void CroppingWindowCapturerWin::CaptureFrame() { DesktopCapturer* win_capturer = window_capturer(); if (win_capturer) { - // Update the list of available sources and override source to capture if - // FullScreenWindowDetector returns not zero + // Feed the actual list of windows into full screen window detector. if (full_screen_window_detector_) { full_screen_window_detector_->UpdateWindowListIfNeeded( - selected_window(), - [win_capturer](DesktopCapturer::SourceList* sources) { - return win_capturer->GetSourceList(sources); + selected_window(), [this](DesktopCapturer::SourceList* sources) { + // Get the list of top level windows, including ones with empty + // title. win_capturer_->GetSourceList can't be used here + // cause it filters out the windows with empty titles and + // it uses responsiveness check which could lead to performance + // issues. + SourceList result; + if (!webrtc::GetWindowList(GetWindowListFlags::kNone, &result)) + return false; + + // Filter out windows not visible on current desktop + auto it = std::remove_if( + result.begin(), result.end(), [this](const auto& source) { + HWND hwnd = reinterpret_cast(source.id); + return !window_capture_helper_ + .IsWindowVisibleOnCurrentDesktop(hwnd); + }); + result.erase(it, result.end()); + + sources->swap(result); + return true; }); } win_capturer->SelectSource(GetWindowToCapture()); diff --git a/modules/desktop_capture/desktop_and_cursor_composer.cc b/modules/desktop_capture/desktop_and_cursor_composer.cc index 328cceb419..f282c1d500 100644 --- a/modules/desktop_capture/desktop_and_cursor_composer.cc +++ b/modules/desktop_capture/desktop_and_cursor_composer.cc @@ -187,6 +187,22 @@ void DesktopAndCursorComposer::SetExcludedWindow(WindowId window) { desktop_capturer_->SetExcludedWindow(window); } +bool DesktopAndCursorComposer::GetSourceList(SourceList* sources) { + return desktop_capturer_->GetSourceList(sources); +} + +bool DesktopAndCursorComposer::SelectSource(SourceId id) { + return desktop_capturer_->SelectSource(id); +} + +bool DesktopAndCursorComposer::FocusOnSelectedSource() { + return desktop_capturer_->FocusOnSelectedSource(); +} + +bool DesktopAndCursorComposer::IsOccluded(const DesktopVector& pos) { + return desktop_capturer_->IsOccluded(pos); +} + void DesktopAndCursorComposer::OnCaptureResult( DesktopCapturer::Result result, std::unique_ptr frame) { diff --git a/modules/desktop_capture/desktop_and_cursor_composer.h b/modules/desktop_capture/desktop_and_cursor_composer.h index 4219c4da30..8f95721ec2 100644 --- a/modules/desktop_capture/desktop_and_cursor_composer.h +++ b/modules/desktop_capture/desktop_and_cursor_composer.h @@ -53,6 +53,10 @@ class RTC_EXPORT DesktopAndCursorComposer std::unique_ptr shared_memory_factory) override; void CaptureFrame() override; void SetExcludedWindow(WindowId window) override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; // MouseCursorMonitor::Callback interface. void OnMouseCursor(MouseCursor* cursor) override; diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc index 61926a6023..e1fff4ea57 100644 --- a/modules/desktop_capture/desktop_capturer.cc +++ b/modules/desktop_capture/desktop_capturer.cc @@ -20,6 +20,13 @@ #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" +#if defined(RTC_ENABLE_WIN_WGC) +#include "modules/desktop_capture/win/window_capturer_win_wgc.h" +#include "rtc_base/win/windows_version.h" + +const bool kUseWinWgcCapturer = false; +#endif // defined(RTC_ENABLE_WIN_WGC) + namespace webrtc { DesktopCapturer::~DesktopCapturer() = default; @@ -48,6 +55,16 @@ bool DesktopCapturer::IsOccluded(const DesktopVector& pos) { // static std::unique_ptr DesktopCapturer::CreateWindowCapturer( const DesktopCaptureOptions& options) { +#if defined(RTC_ENABLE_WIN_WGC) + // TODO(bugs.webrtc.org/11760): Add a WebRTC field trial (or similar + // mechanism) check here that leads to use of the WGC capturer once it is + // fully implemented. + if (kUseWinWgcCapturer && + rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN10_RS5) { + return WindowCapturerWinWgc::CreateRawWindowCapturer(options); + } +#endif // defined(RTC_ENABLE_WIN_WGC) + #if defined(WEBRTC_WIN) if (options.allow_cropping_window_capturer()) { return CroppingWindowCapturer::CreateCapturer(options); diff --git a/modules/desktop_capture/desktop_frame.h b/modules/desktop_capture/desktop_frame.h index 3a18b7852d..4ee3680670 100644 --- a/modules/desktop_capture/desktop_frame.h +++ b/modules/desktop_capture/desktop_frame.h @@ -29,7 +29,7 @@ const float kStandardDPI = 96.0f; // DesktopFrame represents a video frame captured from the screen. class RTC_EXPORT DesktopFrame { public: - // DesktopFrame objects always hold RGBA data. + // DesktopFrame objects always hold BGRA data. static const int kBytesPerPixel = 4; virtual ~DesktopFrame(); diff --git a/modules/desktop_capture/desktop_geometry.h b/modules/desktop_capture/desktop_geometry.h index 91608f0c23..09ebefda94 100644 --- a/modules/desktop_capture/desktop_geometry.h +++ b/modules/desktop_capture/desktop_geometry.h @@ -43,6 +43,8 @@ class DesktopVector { return DesktopVector(x() - other.x(), y() - other.y()); } + DesktopVector operator-() const { return DesktopVector(-x_, -y_); } + private: int32_t x_; int32_t y_; diff --git a/modules/desktop_capture/differ_block.cc b/modules/desktop_capture/differ_block.cc index dd9ab457e0..4f0c5430c9 100644 --- a/modules/desktop_capture/differ_block.cc +++ b/modules/desktop_capture/differ_block.cc @@ -35,7 +35,7 @@ bool VectorDifference(const uint8_t* image1, const uint8_t* image2) { // TODO(hclam): Implement a NEON version. diff_proc = &VectorDifference_C; #else - bool have_sse2 = WebRtc_GetCPUInfo(kSSE2) != 0; + bool have_sse2 = GetCPUInfo(kSSE2) != 0; // For x86 processors, check if SSE2 is supported. if (have_sse2 && kBlockSize == 32) { diff_proc = &VectorDifference_SSE2_W32; diff --git a/modules/desktop_capture/linux/screen_capturer_x11.cc b/modules/desktop_capture/linux/screen_capturer_x11.cc index 4bb49fbd48..1b17071411 100644 --- a/modules/desktop_capture/linux/screen_capturer_x11.cc +++ b/modules/desktop_capture/linux/screen_capturer_x11.cc @@ -14,6 +14,7 @@ #include #include #include +#include #include #include @@ -30,6 +31,7 @@ #include "modules/desktop_capture/shared_desktop_frame.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/sanitizer.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -45,6 +47,10 @@ ScreenCapturerX11::~ScreenCapturerX11() { options_.x_display()->RemoveEventHandler(damage_event_base_ + XDamageNotify, this); } + if (use_randr_) { + options_.x_display()->RemoveEventHandler( + randr_event_base_ + RRScreenChangeNotify, this); + } DeinitXlib(); } @@ -92,6 +98,11 @@ bool ScreenCapturerX11::Init(const DesktopCaptureOptions& options) { InitXDamage(); } + InitXrandr(); + + // Default source set here so that selected_monitor_rect_ is sized correctly. + SelectSource(kFullDesktopScreenId); + return true; } @@ -136,6 +147,75 @@ void ScreenCapturerX11::InitXDamage() { RTC_LOG(LS_INFO) << "Using XDamage extension."; } +RTC_NO_SANITIZE("cfi-icall") +void ScreenCapturerX11::InitXrandr() { + int major_version = 0; + int minor_version = 0; + int error_base_ignored = 0; + if (XRRQueryExtension(display(), &randr_event_base_, &error_base_ignored) && + XRRQueryVersion(display(), &major_version, &minor_version)) { + if (major_version > 1 || (major_version == 1 && minor_version >= 5)) { + // Dynamically link XRRGetMonitors and XRRFreeMonitors as a workaround + // to avoid a dependency issue with Debian 8. + get_monitors_ = reinterpret_cast( + dlsym(RTLD_DEFAULT, "XRRGetMonitors")); + free_monitors_ = reinterpret_cast( + dlsym(RTLD_DEFAULT, "XRRFreeMonitors")); + if (get_monitors_ && free_monitors_) { + use_randr_ = true; + RTC_LOG(LS_INFO) << "Using XRandR extension v" << major_version << '.' + << minor_version << '.'; + monitors_ = + get_monitors_(display(), root_window_, true, &num_monitors_); + + // Register for screen change notifications + XRRSelectInput(display(), root_window_, RRScreenChangeNotifyMask); + options_.x_display()->AddEventHandler( + randr_event_base_ + RRScreenChangeNotify, this); + } else { + RTC_LOG(LS_ERROR) << "Unable to link XRandR monitor functions."; + } + } else { + RTC_LOG(LS_ERROR) << "XRandR entension is older than v1.5."; + } + } else { + RTC_LOG(LS_ERROR) << "X server does not support XRandR."; + } +} + +RTC_NO_SANITIZE("cfi-icall") +void ScreenCapturerX11::UpdateMonitors() { + if (monitors_) { + free_monitors_(monitors_); + monitors_ = nullptr; + } + + monitors_ = get_monitors_(display(), root_window_, true, &num_monitors_); + + if (selected_monitor_name_) { + if (selected_monitor_name_ == static_cast(kFullDesktopScreenId)) { + selected_monitor_rect_ = + DesktopRect::MakeSize(x_server_pixel_buffer_.window_size()); + return; + } + + for (int i = 0; i < num_monitors_; ++i) { + XRRMonitorInfo& m = monitors_[i]; + if (selected_monitor_name_ == m.name) { + RTC_LOG(LS_INFO) << "XRandR monitor " << m.name << " rect updated."; + selected_monitor_rect_ = + DesktopRect::MakeXYWH(m.x, m.y, m.width, m.height); + return; + } + } + + // The selected monitor is not connected anymore + RTC_LOG(LS_INFO) << "XRandR selected monitor " << selected_monitor_name_ + << " lost."; + selected_monitor_rect_ = DesktopRect::MakeWH(0, 0); + } +} + void ScreenCapturerX11::Start(Callback* callback) { RTC_DCHECK(!callback_); RTC_DCHECK(callback); @@ -163,13 +243,17 @@ void ScreenCapturerX11::CaptureFrame() { return; } - // If the current frame is from an older generation then allocate a new one. + // Allocate the current frame buffer only if it is not already allocated. // Note that we can't reallocate other buffers at this point, since the caller // may still be reading from them. if (!queue_.current_frame()) { - queue_.ReplaceCurrentFrame( - SharedDesktopFrame::Wrap(std::unique_ptr( - new BasicDesktopFrame(x_server_pixel_buffer_.window_size())))); + std::unique_ptr frame( + new BasicDesktopFrame(selected_monitor_rect_.size())); + + // We set the top-left of the frame so the mouse cursor will be composited + // properly, and our frame buffer will not be overrun while blitting. + frame->set_top_left(selected_monitor_rect_.top_left()); + queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(std::move(frame))); } std::unique_ptr result = CaptureScreen(); @@ -187,14 +271,52 @@ void ScreenCapturerX11::CaptureFrame() { bool ScreenCapturerX11::GetSourceList(SourceList* sources) { RTC_DCHECK(sources->size() == 0); - // TODO(jiayl): implement screen enumeration. - sources->push_back({0}); + if (!use_randr_) { + sources->push_back({}); + return true; + } + + // Ensure that |monitors_| is updated with changes that may have happened + // between calls to GetSourceList(). + options_.x_display()->ProcessPendingXEvents(); + + for (int i = 0; i < num_monitors_; ++i) { + XRRMonitorInfo& m = monitors_[i]; + char* monitor_title = XGetAtomName(display(), m.name); + + // Note name is an X11 Atom used to id the monitor. + sources->push_back({static_cast(m.name), monitor_title}); + XFree(monitor_title); + } + return true; } bool ScreenCapturerX11::SelectSource(SourceId id) { - // TODO(jiayl): implement screen selection. - return true; + // Prevent the reuse of any frame buffers allocated for a previously selected + // source. This is required to stop crashes, or old data from appearing in + // a captured frame, when the new source is sized differently then the source + // that was selected at the time a reused frame buffer was created. + queue_.Reset(); + + if (!use_randr_ || id == kFullDesktopScreenId) { + selected_monitor_name_ = kFullDesktopScreenId; + selected_monitor_rect_ = + DesktopRect::MakeSize(x_server_pixel_buffer_.window_size()); + return true; + } + + for (int i = 0; i < num_monitors_; ++i) { + if (id == static_cast(monitors_[i].name)) { + RTC_LOG(LS_INFO) << "XRandR selected source: " << id; + XRRMonitorInfo& m = monitors_[i]; + selected_monitor_name_ = m.name; + selected_monitor_rect_ = + DesktopRect::MakeXYWH(m.x, m.y, m.width, m.height); + return true; + } + } + return false; } bool ScreenCapturerX11::HandleXEvent(const XEvent& event) { @@ -205,6 +327,12 @@ bool ScreenCapturerX11::HandleXEvent(const XEvent& event) { return false; RTC_DCHECK(damage_event->level == XDamageReportNonEmpty); return true; + } else if (use_randr_ && + event.type == randr_event_base_ + RRScreenChangeNotify) { + XRRUpdateConfiguration(const_cast(&event)); + UpdateMonitors(); + RTC_LOG(LS_INFO) << "XRandR screen change event received."; + return true; } else if (event.type == ConfigureNotify) { ScreenConfigurationChanged(); return true; @@ -214,11 +342,11 @@ bool ScreenCapturerX11::HandleXEvent(const XEvent& event) { std::unique_ptr ScreenCapturerX11::CaptureScreen() { std::unique_ptr frame = queue_.current_frame()->Share(); - RTC_DCHECK(x_server_pixel_buffer_.window_size().equals(frame->size())); + RTC_DCHECK(selected_monitor_rect_.size().equals(frame->size())); // Pass the screen size to the helper, so it can clip the invalid region if it // expands that region to a grid. - helper_.set_size_most_recent(frame->size()); + helper_.set_size_most_recent(x_server_pixel_buffer_.window_size()); // In the DAMAGE case, ensure the frame is up-to-date with the previous frame // if any. If there isn't a previous frame, that means a screen-resolution @@ -246,12 +374,7 @@ std::unique_ptr ScreenCapturerX11::CaptureScreen() { // Capture the damaged portions of the desktop. helper_.TakeInvalidRegion(updated_region); - - // Clip the damaged portions to the current screen size, just in case some - // spurious XDamage notifications were received for a previous (larger) - // screen size. - updated_region->IntersectWith( - DesktopRect::MakeSize(x_server_pixel_buffer_.window_size())); + updated_region->IntersectWith(selected_monitor_rect_); for (DesktopRegion::Iterator it(*updated_region); !it.IsAtEnd(); it.Advance()) { @@ -261,10 +384,11 @@ std::unique_ptr ScreenCapturerX11::CaptureScreen() { } else { // Doing full-screen polling, or this is the first capture after a // screen-resolution change. In either case, need a full-screen capture. - DesktopRect screen_rect = DesktopRect::MakeSize(frame->size()); - if (!x_server_pixel_buffer_.CaptureRect(screen_rect, frame.get())) + if (!x_server_pixel_buffer_.CaptureRect(selected_monitor_rect_, + frame.get())) { return nullptr; - updated_region->SetRect(screen_rect); + } + updated_region->SetRect(selected_monitor_rect_); } return std::move(frame); @@ -281,6 +405,11 @@ void ScreenCapturerX11::ScreenConfigurationChanged() { RTC_LOG(LS_ERROR) << "Failed to initialize pixel buffer after screen " "configuration change."; } + + if (!use_randr_) { + selected_monitor_rect_ = + DesktopRect::MakeSize(x_server_pixel_buffer_.window_size()); + } } void ScreenCapturerX11::SynchronizeFrame() { @@ -299,11 +428,21 @@ void ScreenCapturerX11::SynchronizeFrame() { RTC_DCHECK(current != last); for (DesktopRegion::Iterator it(last_invalid_region_); !it.IsAtEnd(); it.Advance()) { - current->CopyPixelsFrom(*last, it.rect().top_left(), it.rect()); + if (selected_monitor_rect_.ContainsRect(it.rect())) { + DesktopRect r = it.rect(); + r.Translate(-selected_monitor_rect_.top_left()); + current->CopyPixelsFrom(*last, r.top_left(), r); + } } } +RTC_NO_SANITIZE("cfi-icall") void ScreenCapturerX11::DeinitXlib() { + if (monitors_) { + free_monitors_(monitors_); + monitors_ = nullptr; + } + if (gc_) { XFreeGC(display(), gc_); gc_ = nullptr; diff --git a/modules/desktop_capture/linux/screen_capturer_x11.h b/modules/desktop_capture/linux/screen_capturer_x11.h index 242c488998..b19e2e46e7 100644 --- a/modules/desktop_capture/linux/screen_capturer_x11.h +++ b/modules/desktop_capture/linux/screen_capturer_x11.h @@ -15,6 +15,7 @@ #include #include #include +#include #include @@ -64,6 +65,8 @@ class ScreenCapturerX11 : public DesktopCapturer, bool HandleXEvent(const XEvent& event) override; void InitXDamage(); + void InitXrandr(); + void UpdateMonitors(); // Capture screen pixels to the current buffer in the queue. In the DAMAGE // case, the ScreenCapturerHelper already holds the list of invalid rectangles @@ -92,6 +95,22 @@ class ScreenCapturerX11 : public DesktopCapturer, GC gc_ = nullptr; Window root_window_ = BadValue; + // XRandR 1.5 monitors. + bool use_randr_ = false; + int randr_event_base_ = 0; + XRRMonitorInfo* monitors_ = nullptr; + int num_monitors_ = 0; + DesktopRect selected_monitor_rect_; + // selected_monitor_name_ will be changed to kFullDesktopScreenId + // by a call to SelectSource() at the end of Init() because + // selected_monitor_rect_ should be updated as well. + // Setting it to kFullDesktopScreenId here might be misleading. + Atom selected_monitor_name_ = 0; + typedef XRRMonitorInfo* (*get_monitors_func)(Display*, Window, Bool, int*); + typedef void (*free_monitors_func)(XRRMonitorInfo*); + get_monitors_func get_monitors_ = nullptr; + free_monitors_func free_monitors_ = nullptr; + // XFixes. bool has_xfixes_ = false; int xfixes_event_base_ = -1; diff --git a/modules/desktop_capture/linux/shared_x_display.cc b/modules/desktop_capture/linux/shared_x_display.cc index c475db6e78..f0b35f62d3 100644 --- a/modules/desktop_capture/linux/shared_x_display.cc +++ b/modules/desktop_capture/linux/shared_x_display.cc @@ -11,6 +11,7 @@ #include "modules/desktop_capture/linux/shared_x_display.h" #include +#include #include @@ -86,4 +87,15 @@ void SharedXDisplay::ProcessPendingXEvents() { } } +void SharedXDisplay::IgnoreXServerGrabs() { + int test_event_base = 0; + int test_error_base = 0; + int major = 0; + int minor = 0; + if (XTestQueryExtension(display(), &test_event_base, &test_error_base, &major, + &minor)) { + XTestGrabControl(display(), true); + } +} + } // namespace webrtc diff --git a/modules/desktop_capture/linux/shared_x_display.h b/modules/desktop_capture/linux/shared_x_display.h index 98b6101904..64c498c134 100644 --- a/modules/desktop_capture/linux/shared_x_display.h +++ b/modules/desktop_capture/linux/shared_x_display.h @@ -18,6 +18,7 @@ #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "rtc_base/constructor_magic.h" +#include "rtc_base/system/rtc_export.h" // Including Xlib.h will involve evil defines (Bool, Status, True, False), which // easily conflict with other headers. @@ -27,7 +28,7 @@ typedef union _XEvent XEvent; namespace webrtc { // A ref-counted object to store XDisplay connection. -class SharedXDisplay : public rtc::RefCountedBase { +class RTC_EXPORT SharedXDisplay : public rtc::RefCountedBase { public: class XEventHandler { public: @@ -62,6 +63,8 @@ class SharedXDisplay : public rtc::RefCountedBase { // Processes pending XEvents, calling corresponding event handlers. void ProcessPendingXEvents(); + void IgnoreXServerGrabs(); + protected: ~SharedXDisplay() override; diff --git a/modules/desktop_capture/linux/x_server_pixel_buffer.cc b/modules/desktop_capture/linux/x_server_pixel_buffer.cc index 9d8efdd448..d3b568d984 100644 --- a/modules/desktop_capture/linux/x_server_pixel_buffer.cc +++ b/modules/desktop_capture/linux/x_server_pixel_buffer.cc @@ -66,8 +66,12 @@ void FastBlit(XImage* x_image, uint8_t* src_pos, const DesktopRect& rect, DesktopFrame* frame) { + RTC_DCHECK_LE(frame->top_left().x(), rect.left()); + RTC_DCHECK_LE(frame->top_left().y(), rect.top()); + int src_stride = x_image->bytes_per_line; - int dst_x = rect.left(), dst_y = rect.top(); + int dst_x = rect.left() - frame->top_left().x(); + int dst_y = rect.top() - frame->top_left().y(); uint8_t* dst_pos = frame->data() + frame->stride() * dst_y; dst_pos += dst_x * DesktopFrame::kBytesPerPixel; @@ -85,8 +89,12 @@ void SlowBlit(XImage* x_image, uint8_t* src_pos, const DesktopRect& rect, DesktopFrame* frame) { + RTC_DCHECK_LE(frame->top_left().x(), rect.left()); + RTC_DCHECK_LE(frame->top_left().y(), rect.top()); + int src_stride = x_image->bytes_per_line; - int dst_x = rect.left(), dst_y = rect.top(); + int dst_x = rect.left() - frame->top_left().x(); + int dst_y = rect.top() - frame->top_left().y(); int width = rect.width(), height = rect.height(); uint32_t red_mask = x_image->red_mask; diff --git a/modules/desktop_capture/mac/desktop_configuration_monitor.cc b/modules/desktop_capture/mac/desktop_configuration_monitor.cc index e2225cd4a9..048a679ecc 100644 --- a/modules/desktop_capture/mac/desktop_configuration_monitor.cc +++ b/modules/desktop_capture/mac/desktop_configuration_monitor.cc @@ -21,7 +21,7 @@ DesktopConfigurationMonitor::DesktopConfigurationMonitor() { DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this); if (err != kCGErrorSuccess) RTC_LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err; - rtc::CritScope cs(&desktop_configuration_lock_); + MutexLock lock(&desktop_configuration_lock_); desktop_configuration_ = MacDesktopConfiguration::GetCurrent( MacDesktopConfiguration::TopLeftOrigin); } @@ -34,7 +34,7 @@ DesktopConfigurationMonitor::~DesktopConfigurationMonitor() { } MacDesktopConfiguration DesktopConfigurationMonitor::desktop_configuration() { - rtc::CritScope crit(&desktop_configuration_lock_); + MutexLock lock(&desktop_configuration_lock_); return desktop_configuration_; } @@ -64,7 +64,7 @@ void DesktopConfigurationMonitor::DisplaysReconfigured( reconfiguring_displays_.erase(display); if (reconfiguring_displays_.empty()) { - rtc::CritScope cs(&desktop_configuration_lock_); + MutexLock lock(&desktop_configuration_lock_); desktop_configuration_ = MacDesktopConfiguration::GetCurrent( MacDesktopConfiguration::TopLeftOrigin); } diff --git a/modules/desktop_capture/mac/desktop_configuration_monitor.h b/modules/desktop_capture/mac/desktop_configuration_monitor.h index 1ed4c6bbcf..46a66d1d4c 100644 --- a/modules/desktop_capture/mac/desktop_configuration_monitor.h +++ b/modules/desktop_capture/mac/desktop_configuration_monitor.h @@ -19,7 +19,7 @@ #include "api/ref_counted_base.h" #include "modules/desktop_capture/mac/desktop_configuration.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -41,7 +41,7 @@ class DesktopConfigurationMonitor : public rtc::RefCountedBase { void DisplaysReconfigured(CGDirectDisplayID display, CGDisplayChangeSummaryFlags flags); - rtc::CriticalSection desktop_configuration_lock_; + Mutex desktop_configuration_lock_; MacDesktopConfiguration desktop_configuration_ RTC_GUARDED_BY(&desktop_configuration_lock_); std::set reconfiguring_displays_; diff --git a/modules/desktop_capture/mac/full_screen_mac_application_handler.cc b/modules/desktop_capture/mac/full_screen_mac_application_handler.cc index 9e6eacce85..36e16cbe54 100644 --- a/modules/desktop_capture/mac/full_screen_mac_application_handler.cc +++ b/modules/desktop_capture/mac/full_screen_mac_application_handler.cc @@ -14,6 +14,7 @@ #include #include #include "absl/strings/match.h" +#include "api/function_view.h" #include "modules/desktop_capture/mac/window_list_utils.h" namespace webrtc { @@ -59,17 +60,17 @@ class FullScreenMacApplicationHandler : public FullScreenApplicationHandler { title_predicate_(title_predicate), owner_pid_(GetWindowOwnerPid(sourceId)) {} + protected: + using CachePredicate = + rtc::FunctionView; + void InvalidateCacheIfNeeded(const DesktopCapturer::SourceList& source_list, - int64_t timestamp) const { - // Copy only sources with the same pid + int64_t timestamp, + CachePredicate predicate) const { if (timestamp != cache_timestamp_) { cache_sources_.clear(); std::copy_if(source_list.begin(), source_list.end(), - std::back_inserter(cache_sources_), - [&](const DesktopCapturer::Source& src) { - return src.id != GetSourceId() && - GetWindowOwnerPid(src.id) == owner_pid_; - }); + std::back_inserter(cache_sources_), predicate); cache_timestamp_ = timestamp; } } @@ -77,7 +78,11 @@ class FullScreenMacApplicationHandler : public FullScreenApplicationHandler { WindowId FindFullScreenWindowWithSamePid( const DesktopCapturer::SourceList& source_list, int64_t timestamp) const { - InvalidateCacheIfNeeded(source_list, timestamp); + InvalidateCacheIfNeeded(source_list, timestamp, + [&](const DesktopCapturer::Source& src) { + return src.id != GetSourceId() && + GetWindowOwnerPid(src.id) == owner_pid_; + }); if (cache_sources_.empty()) return kCGNullWindowID; @@ -119,7 +124,7 @@ class FullScreenMacApplicationHandler : public FullScreenApplicationHandler { : FindFullScreenWindowWithSamePid(source_list, timestamp); } - private: + protected: const TitlePredicate title_predicate_; const int owner_pid_; mutable int64_t cache_timestamp_ = 0; @@ -143,6 +148,52 @@ bool slide_show_title_predicate(const std::string& original_title, return false; } +class OpenOfficeApplicationHandler : public FullScreenMacApplicationHandler { + public: + OpenOfficeApplicationHandler(DesktopCapturer::SourceId sourceId) + : FullScreenMacApplicationHandler(sourceId, nullptr) {} + + DesktopCapturer::SourceId FindFullScreenWindow( + const DesktopCapturer::SourceList& source_list, + int64_t timestamp) const override { + InvalidateCacheIfNeeded(source_list, timestamp, + [&](const DesktopCapturer::Source& src) { + return GetWindowOwnerPid(src.id) == owner_pid_; + }); + + const auto original_window = GetSourceId(); + const std::string original_title = GetWindowTitle(original_window); + + // Check if we have only one document window, otherwise it's not possible + // to securely match a document window and a slide show window which has + // empty title. + if (std::any_of(cache_sources_.begin(), cache_sources_.end(), + [&original_title](const DesktopCapturer::Source& src) { + return src.title.length() && src.title != original_title; + })) { + return kCGNullWindowID; + } + + MacDesktopConfiguration desktop_config = + MacDesktopConfiguration::GetCurrent( + MacDesktopConfiguration::TopLeftOrigin); + + // Looking for slide show window, + // it must be a full screen window with empty title + const auto slide_show_window = std::find_if( + cache_sources_.begin(), cache_sources_.end(), [&](const auto& src) { + return src.title.empty() && + IsWindowFullScreen(desktop_config, src.id); + }); + + if (slide_show_window == cache_sources_.end()) { + return kCGNullWindowID; + } + + return slide_show_window->id; + } +}; + } // namespace std::unique_ptr @@ -154,6 +205,7 @@ CreateFullScreenMacApplicationHandler(DesktopCapturer::SourceId sourceId) { if (path_length > 0) { const char* last_slash = strrchr(buffer, '/'); const std::string name{last_slash ? last_slash + 1 : buffer}; + const std::string owner_name = GetWindowOwnerName(sourceId); FullScreenMacApplicationHandler::TitlePredicate predicate = nullptr; if (name.find("Google Chrome") == 0 || name == "Chromium") { predicate = equal_title_predicate; @@ -161,6 +213,8 @@ CreateFullScreenMacApplicationHandler(DesktopCapturer::SourceId sourceId) { predicate = slide_show_title_predicate; } else if (name == "Keynote") { predicate = equal_title_predicate; + } else if (owner_name == "OpenOffice") { + return std::make_unique(sourceId); } if (predicate) { diff --git a/modules/desktop_capture/mac/window_list_utils.cc b/modules/desktop_capture/mac/window_list_utils.cc index 67cf81c5ce..56d87ceaae 100644 --- a/modules/desktop_capture/mac/window_list_utils.cc +++ b/modules/desktop_capture/mac/window_list_utils.cc @@ -303,7 +303,7 @@ std::string GetWindowOwnerName(CFDictionaryRef window) { std::string GetWindowOwnerName(CGWindowID id) { std::string owner_name; if (GetWindowRef(id, [&owner_name](CFDictionaryRef window) { - owner_name = GetWindowOwnerPid(window); + owner_name = GetWindowOwnerName(window); })) { return owner_name; } diff --git a/modules/desktop_capture/mock_desktop_capturer_callback.h b/modules/desktop_capture/mock_desktop_capturer_callback.h index 659239ab9d..6530dc5542 100644 --- a/modules/desktop_capture/mock_desktop_capturer_callback.h +++ b/modules/desktop_capture/mock_desktop_capturer_callback.h @@ -22,9 +22,10 @@ class MockDesktopCapturerCallback : public DesktopCapturer::Callback { MockDesktopCapturerCallback(); ~MockDesktopCapturerCallback() override; - MOCK_METHOD2(OnCaptureResultPtr, - void(DesktopCapturer::Result result, - std::unique_ptr* frame)); + MOCK_METHOD(void, + OnCaptureResultPtr, + (DesktopCapturer::Result result, + std::unique_ptr* frame)); void OnCaptureResult(DesktopCapturer::Result result, std::unique_ptr frame) final; diff --git a/modules/desktop_capture/screen_capturer_helper.cc b/modules/desktop_capture/screen_capturer_helper.cc index 8a23c88be6..535b653c08 100644 --- a/modules/desktop_capture/screen_capturer_helper.cc +++ b/modules/desktop_capture/screen_capturer_helper.cc @@ -14,24 +14,19 @@ namespace webrtc { -ScreenCapturerHelper::ScreenCapturerHelper() - : invalid_region_lock_(RWLockWrapper::CreateRWLock()), log_grid_size_(0) {} - -ScreenCapturerHelper::~ScreenCapturerHelper() {} - void ScreenCapturerHelper::ClearInvalidRegion() { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); + MutexLock scoped_invalid_region_lock(&invalid_region_mutex_); invalid_region_.Clear(); } void ScreenCapturerHelper::InvalidateRegion( const DesktopRegion& invalid_region) { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); + MutexLock scoped_invalid_region_lock(&invalid_region_mutex_); invalid_region_.AddRegion(invalid_region); } void ScreenCapturerHelper::InvalidateScreen(const DesktopSize& size) { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); + MutexLock scoped_invalid_region_lock(&invalid_region_mutex_); invalid_region_.AddRect(DesktopRect::MakeSize(size)); } @@ -39,7 +34,7 @@ void ScreenCapturerHelper::TakeInvalidRegion(DesktopRegion* invalid_region) { invalid_region->Clear(); { - WriteLockScoped scoped_invalid_region_lock(*invalid_region_lock_); + MutexLock scoped_invalid_region_lock(&invalid_region_mutex_); invalid_region->Swap(&invalid_region_); } diff --git a/modules/desktop_capture/screen_capturer_helper.h b/modules/desktop_capture/screen_capturer_helper.h index fc4c85b706..3e658605a1 100644 --- a/modules/desktop_capture/screen_capturer_helper.h +++ b/modules/desktop_capture/screen_capturer_helper.h @@ -16,7 +16,8 @@ #include "modules/desktop_capture/desktop_geometry.h" #include "modules/desktop_capture/desktop_region.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -26,8 +27,8 @@ namespace webrtc { // ScreenCapturer that owns it. class ScreenCapturerHelper { public: - ScreenCapturerHelper(); - ~ScreenCapturerHelper(); + ScreenCapturerHelper() = default; + ~ScreenCapturerHelper() = default; // Clear out the invalid region. void ClearInvalidRegion(); @@ -69,10 +70,10 @@ class ScreenCapturerHelper { // A region that has been manually invalidated (through InvalidateRegion). // These will be returned as dirty_region in the capture data during the next // capture. - DesktopRegion invalid_region_; + DesktopRegion invalid_region_ RTC_GUARDED_BY(invalid_region_mutex_); // A lock protecting |invalid_region_| across threads. - std::unique_ptr invalid_region_lock_; + Mutex invalid_region_mutex_; // The size of the most recently captured screen. DesktopSize size_most_recent_; @@ -80,7 +81,7 @@ class ScreenCapturerHelper { // The log (base 2) of the size of the grid to which the invalid region is // expanded. // If the value is <= 0, then the invalid region is not expanded to a grid. - int log_grid_size_; + int log_grid_size_ = 0; RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCapturerHelper); }; diff --git a/modules/desktop_capture/win/d3d_device.cc b/modules/desktop_capture/win/d3d_device.cc index b220b138a5..3d46117501 100644 --- a/modules/desktop_capture/win/d3d_device.cc +++ b/modules/desktop_capture/win/d3d_device.cc @@ -12,6 +12,7 @@ #include +#include "modules/desktop_capture/win/desktop_capture_utils.h" #include "rtc_base/logging.h" namespace webrtc { @@ -38,17 +39,15 @@ bool D3dDevice::Initialize(const ComPtr& adapter) { nullptr, 0, D3D11_SDK_VERSION, d3d_device_.GetAddressOf(), &feature_level, context_.GetAddressOf()); if (error.Error() != S_OK || !d3d_device_ || !context_) { - RTC_LOG(LS_WARNING) << "D3D11CreateDeivce returns error " - << error.ErrorMessage() << " with code " - << error.Error(); + RTC_LOG(LS_WARNING) << "D3D11CreateDevice returned: " + << desktop_capture::utils::ComErrorToString(error); return false; } if (feature_level < D3D_FEATURE_LEVEL_11_0) { RTC_LOG(LS_WARNING) - << "D3D11CreateDevice returns an instance without DirectX " - "11 support, level " - << feature_level << ". Following initialization may fail."; + << "D3D11CreateDevice returned an instance without DirectX 11 support, " + << "level " << feature_level << ". Following initialization may fail."; // D3D_FEATURE_LEVEL_11_0 is not officially documented on MSDN to be a // requirement of Dxgi duplicator APIs. } @@ -57,9 +56,9 @@ bool D3dDevice::Initialize(const ComPtr& adapter) { if (error.Error() != S_OK || !dxgi_device_) { RTC_LOG(LS_WARNING) << "ID3D11Device is not an implementation of IDXGIDevice, " - "this usually means the system does not support DirectX " - "11. Error " - << error.ErrorMessage() << " with code " << error.Error(); + << "this usually means the system does not support DirectX " + << "11. Error received: " + << desktop_capture::utils::ComErrorToString(error); return false; } @@ -73,7 +72,8 @@ std::vector D3dDevice::EnumDevices() { CreateDXGIFactory1(__uuidof(IDXGIFactory1), reinterpret_cast(factory.GetAddressOf())); if (error.Error() != S_OK || !factory) { - RTC_LOG(LS_WARNING) << "Cannot create IDXGIFactory1."; + RTC_LOG(LS_WARNING) << "Cannot create IDXGIFactory1: " + << desktop_capture::utils::ComErrorToString(error); return std::vector(); } @@ -90,9 +90,8 @@ std::vector D3dDevice::EnumDevices() { break; } else { RTC_LOG(LS_WARNING) - << "IDXGIFactory1::EnumAdapters returns an unexpected " - "error " - << error.ErrorMessage() << " with code " << error.Error(); + << "IDXGIFactory1::EnumAdapters returned an unexpected error: " + << desktop_capture::utils::ComErrorToString(error); } } return result; diff --git a/modules/desktop_capture/win/desktop_capture_utils.cc b/modules/desktop_capture/win/desktop_capture_utils.cc new file mode 100644 index 0000000000..476ddc4aba --- /dev/null +++ b/modules/desktop_capture/win/desktop_capture_utils.cc @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/win/desktop_capture_utils.h" + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { +namespace desktop_capture { +namespace utils { + +// Generates a human-readable string from a COM error. +std::string ComErrorToString(const _com_error& error) { + char buffer[1024]; + rtc::SimpleStringBuilder string_builder(buffer); + // Use _bstr_t to simplify the wchar to char conversion for ErrorMessage(). + _bstr_t error_message(error.ErrorMessage()); + string_builder.AppendFormat("HRESULT: 0x%08X, Message: %s", error.Error(), + static_cast(error_message)); + return string_builder.str(); +} + +} // namespace utils +} // namespace desktop_capture +} // namespace webrtc diff --git a/modules/desktop_capture/win/desktop_capture_utils.h b/modules/desktop_capture/win/desktop_capture_utils.h new file mode 100644 index 0000000000..ebf31419ce --- /dev/null +++ b/modules/desktop_capture/win/desktop_capture_utils.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_CAPTURE_UTILS_H_ +#define MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_CAPTURE_UTILS_H_ + +#include + +#include + +namespace webrtc { +namespace desktop_capture { +namespace utils { + +// Generates a human-readable string from a COM error. +std::string ComErrorToString(const _com_error& error); + +} // namespace utils +} // namespace desktop_capture +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_CAPTURE_UTILS_H_ diff --git a/modules/desktop_capture/win/dxgi_adapter_duplicator.cc b/modules/desktop_capture/win/dxgi_adapter_duplicator.cc index e3f11ac30a..88ec4e25bf 100644 --- a/modules/desktop_capture/win/dxgi_adapter_duplicator.cc +++ b/modules/desktop_capture/win/dxgi_adapter_duplicator.cc @@ -15,6 +15,7 @@ #include +#include "modules/desktop_capture/win/desktop_capture_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -53,17 +54,16 @@ bool DxgiAdapterDuplicator::DoInitialize() { } if (error.Error() == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE) { - RTC_LOG(LS_WARNING) << "IDXGIAdapter::EnumOutputs returns " - "NOT_CURRENTLY_AVAILABLE. This may happen when " - "running in session 0."; + RTC_LOG(LS_WARNING) << "IDXGIAdapter::EnumOutputs returned " + << "NOT_CURRENTLY_AVAILABLE. This may happen when " + << "running in session 0."; break; } if (error.Error() != S_OK || !output) { - RTC_LOG(LS_WARNING) << "IDXGIAdapter::EnumOutputs returns an unexpected " - "result " - << error.ErrorMessage() << " with error code" - << error.Error(); + RTC_LOG(LS_WARNING) << "IDXGIAdapter::EnumOutputs returned an unexpected " + << "result: " + << desktop_capture::utils::ComErrorToString(error); continue; } @@ -75,16 +75,14 @@ bool DxgiAdapterDuplicator::DoInitialize() { error = output.As(&output1); if (error.Error() != S_OK || !output1) { RTC_LOG(LS_WARNING) - << "Failed to convert IDXGIOutput to IDXGIOutput1, " - "this usually means the system does not support " - "DirectX 11"; + << "Failed to convert IDXGIOutput to IDXGIOutput1, this usually " + << "means the system does not support DirectX 11"; continue; } DxgiOutputDuplicator duplicator(device_, output1, desc); if (!duplicator.Initialize()) { RTC_LOG(LS_WARNING) << "Failed to initialize DxgiOutputDuplicator on " - "output " - << i; + << "output " << i; continue; } diff --git a/modules/desktop_capture/win/dxgi_duplicator_controller.h b/modules/desktop_capture/win/dxgi_duplicator_controller.h index a24e9781b3..b6f8e78649 100644 --- a/modules/desktop_capture/win/dxgi_duplicator_controller.h +++ b/modules/desktop_capture/win/dxgi_duplicator_controller.h @@ -25,7 +25,7 @@ #include "modules/desktop_capture/win/dxgi_adapter_duplicator.h" #include "modules/desktop_capture/win/dxgi_context.h" #include "modules/desktop_capture/win/dxgi_frame.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" namespace webrtc { @@ -219,7 +219,7 @@ class DxgiDuplicatorController { std::atomic_int refcount_; // This lock must be locked whenever accessing any of the following objects. - rtc::CriticalSection lock_; + rtc::RecursiveCriticalSection lock_; // A self-incremented integer to compare with the one in Context. It ensures // a Context instance is always initialized after DxgiDuplicatorController. diff --git a/modules/desktop_capture/win/dxgi_output_duplicator.cc b/modules/desktop_capture/win/dxgi_output_duplicator.cc index db7ba251c2..65a0d77667 100644 --- a/modules/desktop_capture/win/dxgi_output_duplicator.cc +++ b/modules/desktop_capture/win/dxgi_output_duplicator.cc @@ -18,6 +18,7 @@ #include +#include "modules/desktop_capture/win/desktop_capture_utils.h" #include "modules/desktop_capture/win/dxgi_texture_mapping.h" #include "modules/desktop_capture/win/dxgi_texture_staging.h" #include "rtc_base/checks.h" @@ -103,9 +104,8 @@ bool DxgiOutputDuplicator::DuplicateOutput() { output_->DuplicateOutput(static_cast(device_.d3d_device()), duplication_.GetAddressOf()); if (error.Error() != S_OK || !duplication_) { - RTC_LOG(LS_WARNING) - << "Failed to duplicate output from IDXGIOutput1, error " - << error.ErrorMessage() << ", with code " << error.Error(); + RTC_LOG(LS_WARNING) << "Failed to duplicate output from IDXGIOutput1: " + << desktop_capture::utils::ComErrorToString(error); return false; } @@ -113,9 +113,8 @@ bool DxgiOutputDuplicator::DuplicateOutput() { duplication_->GetDesc(&desc_); if (desc_.ModeDesc.Format != DXGI_FORMAT_B8G8R8A8_UNORM) { RTC_LOG(LS_ERROR) << "IDXGIDuplicateOutput does not use RGBA (8 bit) " - "format, which is required by downstream components, " - "format is " - << desc_.ModeDesc.Format; + << "format, which is required by downstream components, " + << "format is " << desc_.ModeDesc.Format; return false; } @@ -123,7 +122,7 @@ bool DxgiOutputDuplicator::DuplicateOutput() { static_cast(desc_.ModeDesc.Height) != desktop_rect_.height()) { RTC_LOG(LS_ERROR) << "IDXGIDuplicateOutput does not return a same size as its " - "IDXGIOutput1, size returned by IDXGIDuplicateOutput is " + << "IDXGIOutput1, size returned by IDXGIDuplicateOutput is " << desc_.ModeDesc.Width << " x " << desc_.ModeDesc.Height << ", size returned by IDXGIOutput1 is " << desktop_rect_.width() << " x " << desktop_rect_.height(); @@ -140,9 +139,8 @@ bool DxgiOutputDuplicator::ReleaseFrame() { RTC_DCHECK(duplication_); _com_error error = duplication_->ReleaseFrame(); if (error.Error() != S_OK) { - RTC_LOG(LS_ERROR) << "Failed to release frame from IDXGIOutputDuplication, " - "error" - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to release frame from IDXGIOutputDuplication: " + << desktop_capture::utils::ComErrorToString(error); return false; } return true; @@ -166,8 +164,8 @@ bool DxgiOutputDuplicator::Duplicate(Context* context, _com_error error = duplication_->AcquireNextFrame( kAcquireTimeoutMs, &frame_info, resource.GetAddressOf()); if (error.Error() != S_OK && error.Error() != DXGI_ERROR_WAIT_TIMEOUT) { - RTC_LOG(LS_ERROR) << "Failed to capture frame, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to capture frame: " + << desktop_capture::utils::ComErrorToString(error); return false; } @@ -269,13 +267,13 @@ bool DxgiOutputDuplicator::DoDetectUpdatedRegion( if (frame_info.TotalMetadataBufferSize == 0) { // This should not happen, since frame_info.AccumulatedFrames > 0. RTC_LOG(LS_ERROR) << "frame_info.AccumulatedFrames > 0, " - "but TotalMetadataBufferSize == 0"; + << "but TotalMetadataBufferSize == 0"; return false; } - if (metadata_.capacity() < frame_info.TotalMetadataBufferSize) { + if (metadata_.size() < frame_info.TotalMetadataBufferSize) { metadata_.clear(); // Avoid data copy - metadata_.reserve(frame_info.TotalMetadataBufferSize); + metadata_.resize(frame_info.TotalMetadataBufferSize); } UINT buff_size = 0; @@ -283,10 +281,10 @@ bool DxgiOutputDuplicator::DoDetectUpdatedRegion( reinterpret_cast(metadata_.data()); size_t move_rects_count = 0; _com_error error = duplication_->GetFrameMoveRects( - static_cast(metadata_.capacity()), move_rects, &buff_size); + static_cast(metadata_.size()), move_rects, &buff_size); if (error.Error() != S_OK) { - RTC_LOG(LS_ERROR) << "Failed to get move rectangles, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to get move rectangles: " + << desktop_capture::utils::ComErrorToString(error); return false; } move_rects_count = buff_size / sizeof(DXGI_OUTDUPL_MOVE_RECT); @@ -294,11 +292,10 @@ bool DxgiOutputDuplicator::DoDetectUpdatedRegion( RECT* dirty_rects = reinterpret_cast(metadata_.data() + buff_size); size_t dirty_rects_count = 0; error = duplication_->GetFrameDirtyRects( - static_cast(metadata_.capacity()) - buff_size, dirty_rects, - &buff_size); + static_cast(metadata_.size()) - buff_size, dirty_rects, &buff_size); if (error.Error() != S_OK) { - RTC_LOG(LS_ERROR) << "Failed to get dirty rectangles, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to get dirty rectangles: " + << desktop_capture::utils::ComErrorToString(error); return false; } dirty_rects_count = buff_size / sizeof(RECT); diff --git a/modules/desktop_capture/win/dxgi_output_duplicator.h b/modules/desktop_capture/win/dxgi_output_duplicator.h index 5395146042..3079d3967a 100644 --- a/modules/desktop_capture/win/dxgi_output_duplicator.h +++ b/modules/desktop_capture/win/dxgi_output_duplicator.h @@ -27,7 +27,6 @@ #include "modules/desktop_capture/win/d3d_device.h" #include "modules/desktop_capture/win/dxgi_context.h" #include "modules/desktop_capture/win/dxgi_texture.h" -#include "rtc_base/critical_section.h" #include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/modules/desktop_capture/win/dxgi_texture.cc b/modules/desktop_capture/win/dxgi_texture.cc index 2919692c40..b8f5b81f90 100644 --- a/modules/desktop_capture/win/dxgi_texture.cc +++ b/modules/desktop_capture/win/dxgi_texture.cc @@ -15,6 +15,7 @@ #include #include "modules/desktop_capture/desktop_region.h" +#include "modules/desktop_capture/win/desktop_capture_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -49,9 +50,8 @@ bool DxgiTexture::CopyFrom(const DXGI_OUTDUPL_FRAME_INFO& frame_info, __uuidof(ID3D11Texture2D), reinterpret_cast(texture.GetAddressOf())); if (error.Error() != S_OK || !texture) { - RTC_LOG(LS_ERROR) << "Failed to convert IDXGIResource to ID3D11Texture2D, " - "error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to convert IDXGIResource to ID3D11Texture2D: " + << desktop_capture::utils::ComErrorToString(error); return false; } diff --git a/modules/desktop_capture/win/dxgi_texture_mapping.cc b/modules/desktop_capture/win/dxgi_texture_mapping.cc index 9e138d1d6f..7ecf1adc61 100644 --- a/modules/desktop_capture/win/dxgi_texture_mapping.cc +++ b/modules/desktop_capture/win/dxgi_texture_mapping.cc @@ -14,6 +14,7 @@ #include #include +#include "modules/desktop_capture/win/desktop_capture_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -36,9 +37,8 @@ bool DxgiTextureMapping::CopyFromTexture( if (error.Error() != S_OK) { *rect() = {0}; RTC_LOG(LS_ERROR) - << "Failed to map the IDXGIOutputDuplication to a bitmap, " - "error " - << error.ErrorMessage() << ", code " << error.Error(); + << "Failed to map the IDXGIOutputDuplication to a bitmap: " + << desktop_capture::utils::ComErrorToString(error); return false; } @@ -48,8 +48,8 @@ bool DxgiTextureMapping::CopyFromTexture( bool DxgiTextureMapping::DoRelease() { _com_error error = duplication_->UnMapDesktopSurface(); if (error.Error() != S_OK) { - RTC_LOG(LS_ERROR) << "Failed to unmap the IDXGIOutputDuplication, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to unmap the IDXGIOutputDuplication: " + << desktop_capture::utils::ComErrorToString(error); return false; } return true; diff --git a/modules/desktop_capture/win/dxgi_texture_staging.cc b/modules/desktop_capture/win/dxgi_texture_staging.cc index 2bd1eb9a6f..17e8518a7d 100644 --- a/modules/desktop_capture/win/dxgi_texture_staging.cc +++ b/modules/desktop_capture/win/dxgi_texture_staging.cc @@ -15,6 +15,7 @@ #include #include +#include "modules/desktop_capture/win/desktop_capture_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/metrics.h" @@ -64,17 +65,15 @@ bool DxgiTextureStaging::InitializeStage(ID3D11Texture2D* texture) { _com_error error = device_.d3d_device()->CreateTexture2D( &desc, nullptr, stage_.GetAddressOf()); if (error.Error() != S_OK || !stage_) { - RTC_LOG(LS_ERROR) - << "Failed to create a new ID3D11Texture2D as stage, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to create a new ID3D11Texture2D as stage: " + << desktop_capture::utils::ComErrorToString(error); return false; } error = stage_.As(&surface_); if (error.Error() != S_OK || !surface_) { - RTC_LOG(LS_ERROR) - << "Failed to convert ID3D11Texture2D to IDXGISurface, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to convert ID3D11Texture2D to IDXGISurface: " + << desktop_capture::utils::ComErrorToString(error); return false; } @@ -110,8 +109,8 @@ bool DxgiTextureStaging::CopyFromTexture( _com_error error = surface_->Map(rect(), DXGI_MAP_READ); if (error.Error() != S_OK) { *rect() = {0}; - RTC_LOG(LS_ERROR) << "Failed to map the IDXGISurface to a bitmap, error " - << error.ErrorMessage() << ", code " << error.Error(); + RTC_LOG(LS_ERROR) << "Failed to map the IDXGISurface to a bitmap: " + << desktop_capture::utils::ComErrorToString(error); return false; } diff --git a/modules/desktop_capture/win/full_screen_win_application_handler.cc b/modules/desktop_capture/win/full_screen_win_application_handler.cc index 0b7e3fc437..dd21410b03 100644 --- a/modules/desktop_capture/win/full_screen_win_application_handler.cc +++ b/modules/desktop_capture/win/full_screen_win_application_handler.cc @@ -14,6 +14,9 @@ #include #include #include +#include "absl/strings/match.h" +#include "modules/desktop_capture/win/screen_capture_utils.h" +#include "modules/desktop_capture/win/window_capture_utils.h" #include "rtc_base/arraysize.h" #include "rtc_base/logging.h" // For RTC_LOG_GLE #include "rtc_base/string_utils.h" @@ -21,6 +24,25 @@ namespace webrtc { namespace { +// Utility function to verify that |window| has class name equal to |class_name| +bool CheckWindowClassName(HWND window, const wchar_t* class_name) { + const size_t classNameLength = wcslen(class_name); + + // https://docs.microsoft.com/en-us/windows/win32/api/winuser/ns-winuser-wndclassa + // says lpszClassName field in WNDCLASS is limited by 256 symbols, so we don't + // need to have a buffer bigger than that. + constexpr size_t kMaxClassNameLength = 256; + WCHAR buffer[kMaxClassNameLength]; + + const int length = ::GetClassNameW(window, buffer, kMaxClassNameLength); + if (length <= 0) + return false; + + if (static_cast(length) != classNameLength) + return false; + return wcsncmp(buffer, class_name, classNameLength) == 0; +} + std::string WindowText(HWND window) { size_t len = ::GetWindowTextLength(window); if (len == 0) @@ -146,20 +168,7 @@ class FullScreenPowerPointHandler : public FullScreenApplicationHandler { } bool IsEditorWindow(HWND window) const { - constexpr WCHAR kScreenClassName[] = L"PPTFrameClass"; - constexpr size_t kScreenClassNameLength = arraysize(kScreenClassName) - 1; - - // We need to verify that window class is equal to |kScreenClassName|. - // To do that we need a buffer large enough to include a null terminated - // string one code point bigger than |kScreenClassName|. It will help us to - // check that size of class name string returned by GetClassNameW is equal - // to |kScreenClassNameLength| not being limited by size of buffer (case - // when |kScreenClassName| is a prefix for class name string). - WCHAR buffer[arraysize(kScreenClassName) + 3]; - const int length = ::GetClassNameW(window, buffer, arraysize(buffer)); - if (length != kScreenClassNameLength) - return false; - return wcsncmp(buffer, kScreenClassName, kScreenClassNameLength) == 0; + return CheckWindowClassName(window, L"PPTFrameClass"); } bool IsSlideShowWindow(HWND window) const { @@ -170,6 +179,74 @@ class FullScreenPowerPointHandler : public FullScreenApplicationHandler { } }; +class OpenOfficeApplicationHandler : public FullScreenApplicationHandler { + public: + explicit OpenOfficeApplicationHandler(DesktopCapturer::SourceId sourceId) + : FullScreenApplicationHandler(sourceId) {} + + DesktopCapturer::SourceId FindFullScreenWindow( + const DesktopCapturer::SourceList& window_list, + int64_t timestamp) const override { + if (window_list.empty()) + return 0; + + DWORD process_id = WindowProcessId(reinterpret_cast(GetSourceId())); + + DesktopCapturer::SourceList app_windows = + GetProcessWindows(window_list, process_id, nullptr); + + DesktopCapturer::SourceList document_windows; + std::copy_if( + app_windows.begin(), app_windows.end(), + std::back_inserter(document_windows), + [this](const DesktopCapturer::Source& x) { return IsEditorWindow(x); }); + + // Check if we have only one document window, otherwise it's not possible + // to securely match a document window and a slide show window which has + // empty title. + if (document_windows.size() != 1) { + return 0; + } + + // Check if document window has been selected as a source + if (document_windows.front().id != GetSourceId()) { + return 0; + } + + // Check if we have a slide show window. + auto slide_show_window = + std::find_if(app_windows.begin(), app_windows.end(), + [this](const DesktopCapturer::Source& x) { + return IsSlideShowWindow(x); + }); + + if (slide_show_window == app_windows.end()) + return 0; + + return slide_show_window->id; + } + + private: + bool IsEditorWindow(const DesktopCapturer::Source& source) const { + if (source.title.empty()) { + return false; + } + + return CheckWindowClassName(reinterpret_cast(source.id), L"SALFRAME"); + } + + bool IsSlideShowWindow(const DesktopCapturer::Source& source) const { + // Check title size to filter out a Presenter Control window which shares + // window class with Slide Show window but has non empty title. + if (!source.title.empty()) { + return false; + } + + return CheckWindowClassName(reinterpret_cast(source.id), + L"SALTMPSUBFRAME"); + } +}; + std::wstring GetPathByWindowId(HWND window_id) { DWORD process_id = WindowProcessId(window_id); HANDLE process = @@ -193,13 +270,17 @@ std::wstring GetPathByWindowId(HWND window_id) { std::unique_ptr CreateFullScreenWinApplicationHandler(DesktopCapturer::SourceId source_id) { std::unique_ptr result; - std::wstring exe_path = GetPathByWindowId(reinterpret_cast(source_id)); + HWND hwnd = reinterpret_cast(source_id); + std::wstring exe_path = GetPathByWindowId(hwnd); std::wstring file_name = FileNameFromPath(exe_path); std::transform(file_name.begin(), file_name.end(), file_name.begin(), std::towupper); if (file_name == L"POWERPNT.EXE") { result = std::make_unique(source_id); + } else if (file_name == L"SOFFICE.BIN" && + absl::EndsWith(WindowText(hwnd), "OpenOffice Impress")) { + result = std::make_unique(source_id); } return result; diff --git a/modules/desktop_capture/win/scoped_gdi_object.h b/modules/desktop_capture/win/scoped_gdi_object.h index 56abe95a9e..d3ac9b9443 100644 --- a/modules/desktop_capture/win/scoped_gdi_object.h +++ b/modules/desktop_capture/win/scoped_gdi_object.h @@ -58,27 +58,29 @@ class ScopedGDIObject { template class DeleteObjectTraits { public: + DeleteObjectTraits() = delete; + DeleteObjectTraits(const DeleteObjectTraits&) = delete; + DeleteObjectTraits& operator=(const DeleteObjectTraits&) = delete; + // Closes the handle. static void Close(T handle) { if (handle) DeleteObject(handle); } - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DeleteObjectTraits); }; // The traits class that uses DestroyCursor() to close a handle. class DestroyCursorTraits { public: + DestroyCursorTraits() = delete; + DestroyCursorTraits(const DestroyCursorTraits&) = delete; + DestroyCursorTraits& operator=(const DestroyCursorTraits&) = delete; + // Closes the handle. static void Close(HCURSOR handle) { if (handle) DestroyCursor(handle); } - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DestroyCursorTraits); }; typedef ScopedGDIObject > ScopedBitmap; diff --git a/modules/desktop_capture/win/selected_window_context.cc b/modules/desktop_capture/win/selected_window_context.cc index 74459571ca..398ea1e53a 100644 --- a/modules/desktop_capture/win/selected_window_context.cc +++ b/modules/desktop_capture/win/selected_window_context.cc @@ -28,20 +28,19 @@ bool SelectedWindowContext::IsSelectedWindowValid() const { } bool SelectedWindowContext::IsWindowOwnedBySelectedWindow(HWND hwnd) const { - // This check works for drop-down menus & dialog pop-up windows. It doesn't - // work for context menus or tooltips, which are handled differently below. + // This check works for drop-down menus & dialog pop-up windows. if (GetAncestor(hwnd, GA_ROOTOWNER) == selected_window_) { return true; } - // Some pop-up windows aren't owned (e.g. context menus, tooltips); treat - // windows that belong to the same thread as owned. - DWORD enumerated_window_process_id = 0; - DWORD enumerated_window_thread_id = - GetWindowThreadProcessId(hwnd, &enumerated_window_process_id); - return enumerated_window_thread_id != 0 && - enumerated_window_process_id == selected_window_process_id_ && - enumerated_window_thread_id == selected_window_thread_id_; + // Assume that all other windows are unrelated to the selected window. + // This will cause some windows that are actually related to be missed, + // e.g. context menus and tool-tips, but avoids the risk of capturing + // unrelated windows. Using heuristics such as matching the thread and + // process Ids suffers from false-positives, e.g. in multi-document + // applications. + + return false; } bool SelectedWindowContext::IsWindowOverlappingSelectedWindow(HWND hwnd) const { diff --git a/modules/desktop_capture/win/wgc_capture_session.cc b/modules/desktop_capture/win/wgc_capture_session.cc new file mode 100644 index 0000000000..ee55cf6164 --- /dev/null +++ b/modules/desktop_capture/win/wgc_capture_session.cc @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/win/wgc_capture_session.h" + +#include + +#include "rtc_base/checks.h" + +using Microsoft::WRL::ComPtr; +namespace webrtc { + +WgcCaptureSession::WgcCaptureSession(ComPtr d3d11_device, + HWND window) + : d3d11_device_(std::move(d3d11_device)), window_(window) {} +WgcCaptureSession::~WgcCaptureSession() = default; + +HRESULT WgcCaptureSession::StartCapture() { + RTC_DCHECK(!is_capture_started_); + RTC_DCHECK(d3d11_device_); + RTC_DCHECK(window_); + + return E_NOTIMPL; +} + +HRESULT WgcCaptureSession::GetMostRecentFrame( + std::unique_ptr* output_frame) { + RTC_DCHECK(is_capture_started_); + + return E_NOTIMPL; +} + +} // namespace webrtc diff --git a/modules/desktop_capture/win/wgc_capture_session.h b/modules/desktop_capture/win/wgc_capture_session.h new file mode 100644 index 0000000000..9f41331c92 --- /dev/null +++ b/modules/desktop_capture/win/wgc_capture_session.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_WIN_WGC_CAPTURE_SESSION_H_ +#define MODULES_DESKTOP_CAPTURE_WIN_WGC_CAPTURE_SESSION_H_ + +#include +#include +#include +#include + +#include "modules/desktop_capture/desktop_frame.h" + +namespace webrtc { + +class WgcCaptureSession final { + public: + WgcCaptureSession(Microsoft::WRL::ComPtr d3d11_device, + HWND window); + + // Disallow copy and assign + WgcCaptureSession(const WgcCaptureSession&) = delete; + WgcCaptureSession& operator=(const WgcCaptureSession&) = delete; + + ~WgcCaptureSession(); + + HRESULT StartCapture(); + HRESULT GetMostRecentFrame(std::unique_ptr* output_frame); + bool IsCaptureStarted() const { return is_capture_started_; } + + private: + // A Direct3D11 Device provided by the caller. We use this to create an + // IDirect3DDevice, and also to create textures that will hold the image data. + Microsoft::WRL::ComPtr d3d11_device_; + HWND window_; + bool is_capture_started_ = false; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_WIN_WGC_CAPTURE_SESSION_H_ diff --git a/modules/desktop_capture/win/window_capture_utils.cc b/modules/desktop_capture/win/window_capture_utils.cc index 226b564b64..e49c179fd3 100644 --- a/modules/desktop_capture/win/window_capture_utils.cc +++ b/modules/desktop_capture/win/window_capture_utils.cc @@ -13,13 +13,104 @@ // Just for the DWMWINDOWATTRIBUTE enums (DWMWA_CLOAKED). #include +#include + #include "modules/desktop_capture/win/scoped_gdi_object.h" +#include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/string_utils.h" #include "rtc_base/win32.h" namespace webrtc { +namespace { + +struct GetWindowListParams { + GetWindowListParams(int flags, DesktopCapturer::SourceList* result) + : ignoreUntitled(flags & GetWindowListFlags::kIgnoreUntitled), + ignoreUnresponsive(flags & GetWindowListFlags::kIgnoreUnresponsive), + result(result) {} + const bool ignoreUntitled; + const bool ignoreUnresponsive; + DesktopCapturer::SourceList* const result; +}; + +BOOL CALLBACK GetWindowListHandler(HWND hwnd, LPARAM param) { + GetWindowListParams* params = reinterpret_cast(param); + DesktopCapturer::SourceList* list = params->result; + + // Skip untitled window if ignoreUntitled specified + if (params->ignoreUntitled && GetWindowTextLength(hwnd) == 0) { + return TRUE; + } + + // Skip invisible and minimized windows + if (!IsWindowVisible(hwnd) || IsIconic(hwnd)) { + return TRUE; + } + + // Skip windows which are not presented in the taskbar, + // namely owned window if they don't have the app window style set + HWND owner = GetWindow(hwnd, GW_OWNER); + LONG exstyle = GetWindowLong(hwnd, GWL_EXSTYLE); + if (owner && !(exstyle & WS_EX_APPWINDOW)) { + return TRUE; + } + + // If ignoreUnresponsive is true then skip unresponsive windows. Set timout + // with 50ms, in case system is under heavy load, the check can wait longer + // but wont' be too long to delay the the enumeration. + const UINT uTimeout = 50; // ms + if (params->ignoreUnresponsive && + !SendMessageTimeout(hwnd, WM_NULL, 0, 0, SMTO_ABORTIFHUNG, uTimeout, + nullptr)) { + return TRUE; + } + + // Capture the window class name, to allow specific window classes to be + // skipped. + // + // https://docs.microsoft.com/en-us/windows/win32/api/winuser/ns-winuser-wndclassa + // says lpszClassName field in WNDCLASS is limited by 256 symbols, so we don't + // need to have a buffer bigger than that. + const size_t kMaxClassNameLength = 256; + WCHAR class_name[kMaxClassNameLength] = L""; + const int class_name_length = + GetClassNameW(hwnd, class_name, kMaxClassNameLength); + if (class_name_length < 1) + return TRUE; + + // Skip Program Manager window. + if (wcscmp(class_name, L"Progman") == 0) + return TRUE; + + // Skip Start button window on Windows Vista, Windows 7. + // On Windows 8, Windows 8.1, Windows 10 Start button is not a top level + // window, so it will not be examined here. + if (wcscmp(class_name, L"Button") == 0) + return TRUE; + + DesktopCapturer::Source window; + window.id = reinterpret_cast(hwnd); + + const size_t kTitleLength = 500; + WCHAR window_title[kTitleLength] = L""; + if (GetWindowTextW(hwnd, window_title, kTitleLength) > 0) { + window.title = rtc::ToUtf8(window_title); + } + + // Skip windows when we failed to convert the title or it is empty. + if (params->ignoreUntitled && window.title.empty()) + return TRUE; + + list->push_back(window); + + return TRUE; +} + +} // namespace + // Prefix used to match the window class for Chrome windows. const wchar_t kChromeWindowClassPrefix[] = L"Chrome_WidgetWin_"; @@ -157,6 +248,16 @@ bool IsWindowMaximized(HWND window, bool* result) { return true; } +bool IsWindowValidAndVisible(HWND window) { + return IsWindow(window) && IsWindowVisible(window) && !IsIconic(window); +} + +bool GetWindowList(int flags, DesktopCapturer::SourceList* windows) { + GetWindowListParams params(flags, windows); + return ::EnumWindows(&GetWindowListHandler, + reinterpret_cast(¶ms)) != 0; +} + // WindowCaptureHelperWin implementation. WindowCaptureHelperWin::WindowCaptureHelperWin() { // Try to load dwmapi.dll dynamically since it is not available on XP. @@ -223,12 +324,13 @@ bool WindowCaptureHelperWin::IsWindowChromeNotification(HWND hwnd) { } // |content_rect| is preferred because, -// 1. WindowCapturerWin is using GDI capturer, which cannot capture DX output. +// 1. WindowCapturerWinGdi is using GDI capturer, which cannot capture DX +// output. // So ScreenCapturer should be used as much as possible to avoid // uncapturable cases. Note: lots of new applications are using DX output // (hardware acceleration) to improve the performance which cannot be -// captured by WindowCapturerWin. See bug http://crbug.com/741770. -// 2. WindowCapturerWin is still useful because we do not want to expose the +// captured by WindowCapturerWinGdi. See bug http://crbug.com/741770. +// 2. WindowCapturerWinGdi is still useful because we do not want to expose the // content on other windows if the target window is covered by them. // 3. Shadow and borders should not be considered as "content" on other // windows because they do not expose any useful information. @@ -288,8 +390,8 @@ bool WindowCaptureHelperWin::IsWindowOnCurrentDesktop(HWND hwnd) { } bool WindowCaptureHelperWin::IsWindowVisibleOnCurrentDesktop(HWND hwnd) { - return !::IsIconic(hwnd) && ::IsWindowVisible(hwnd) && - IsWindowOnCurrentDesktop(hwnd) && !IsWindowCloaked(hwnd); + return IsWindowValidAndVisible(hwnd) && IsWindowOnCurrentDesktop(hwnd) && + !IsWindowCloaked(hwnd); } // A cloaked window is composited but not visible to the user. @@ -303,11 +405,30 @@ bool WindowCaptureHelperWin::IsWindowCloaked(HWND hwnd) { int res = 0; if (dwm_get_window_attribute_func_(hwnd, DWMWA_CLOAKED, &res, sizeof(res)) != S_OK) { - // Cannot tell so assume not cloacked for backward compatibility. + // Cannot tell so assume not cloaked for backward compatibility. return false; } return res != 0; } +bool WindowCaptureHelperWin::EnumerateCapturableWindows( + DesktopCapturer::SourceList* results) { + if (!webrtc::GetWindowList((GetWindowListFlags::kIgnoreUntitled | + GetWindowListFlags::kIgnoreUnresponsive), + results)) { + return false; + } + + for (auto it = results->begin(); it != results->end();) { + if (!IsWindowVisibleOnCurrentDesktop(reinterpret_cast(it->id))) { + it = results->erase(it); + } else { + ++it; + } + } + + return true; +} + } // namespace webrtc diff --git a/modules/desktop_capture/win/window_capture_utils.h b/modules/desktop_capture/win/window_capture_utils.h index 20a475510b..6e99ee9678 100644 --- a/modules/desktop_capture/win/window_capture_utils.h +++ b/modules/desktop_capture/win/window_capture_utils.h @@ -15,6 +15,7 @@ #include #include +#include "modules/desktop_capture/desktop_capturer.h" #include "modules/desktop_capture/desktop_geometry.h" #include "rtc_base/constructor_magic.h" @@ -40,7 +41,7 @@ bool GetWindowRect(HWND window, DesktopRect* result); // This function should only be used by CroppingWindowCapturerWin. Instead a // DesktopRect CropWindowRect(const DesktopRect& rect) // should be added as a utility function to help CroppingWindowCapturerWin and -// WindowCapturerWin to crop out the borders or shadow according to their +// WindowCapturerWinGdi to crop out the borders or shadow according to their // scenarios. But this function is too generic and easy to be misused. bool GetCroppedWindowRect(HWND window, bool avoid_cropping_border, @@ -66,6 +67,25 @@ bool GetDcSize(HDC hdc, DesktopSize* size); // function returns false if native APIs fail. bool IsWindowMaximized(HWND window, bool* result); +// Checks that the HWND is for a valid window, that window's visibility state is +// visible, and that it is not minimized. +bool IsWindowValidAndVisible(HWND window); + +enum GetWindowListFlags { + kNone = 0x00, + kIgnoreUntitled = 1 << 0, + kIgnoreUnresponsive = 1 << 1, +}; + +// Retrieves the list of top-level windows on the screen. +// Some windows will be ignored: +// - Those that are invisible or minimized. +// - Program Manager & Start menu. +// - [with kIgnoreUntitled] windows with no title. +// - [with kIgnoreUnresponsive] windows that unresponsive. +// Returns false if native APIs failed. +bool GetWindowList(int flags, DesktopCapturer::SourceList* windows); + typedef HRESULT(WINAPI* DwmIsCompositionEnabledFunc)(BOOL* enabled); typedef HRESULT(WINAPI* DwmGetWindowAttributeFunc)(HWND hwnd, DWORD flag, @@ -84,6 +104,7 @@ class WindowCaptureHelperWin { bool IsWindowOnCurrentDesktop(HWND hwnd); bool IsWindowVisibleOnCurrentDesktop(HWND hwnd); bool IsWindowCloaked(HWND hwnd); + bool EnumerateCapturableWindows(DesktopCapturer::SourceList* results); private: HMODULE dwmapi_library_ = nullptr; diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.cc b/modules/desktop_capture/win/window_capturer_win_gdi.cc new file mode 100644 index 0000000000..04cd7f667d --- /dev/null +++ b/modules/desktop_capture/win/window_capturer_win_gdi.cc @@ -0,0 +1,378 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/win/window_capturer_win_gdi.h" + +#include +#include +#include +#include +#include + +#include "modules/desktop_capture/cropped_desktop_frame.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame_win.h" +#include "modules/desktop_capture/win/screen_capture_utils.h" +#include "modules/desktop_capture/win/selected_window_context.h" +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_utils.h" +#include "rtc_base/trace_event.h" +#include "rtc_base/win32.h" + +namespace webrtc { + +// Used to pass input/output data during the EnumWindows call to collect +// owned/pop-up windows that should be captured. +struct OwnedWindowCollectorContext : public SelectedWindowContext { + OwnedWindowCollectorContext(HWND selected_window, + DesktopRect selected_window_rect, + WindowCaptureHelperWin* window_capture_helper, + std::vector* owned_windows) + : SelectedWindowContext(selected_window, + selected_window_rect, + window_capture_helper), + owned_windows(owned_windows) {} + + std::vector* owned_windows; +}; + +// Called via EnumWindows for each root window; adds owned/pop-up windows that +// should be captured to a vector it's passed. +BOOL CALLBACK OwnedWindowCollector(HWND hwnd, LPARAM param) { + OwnedWindowCollectorContext* context = + reinterpret_cast(param); + if (hwnd == context->selected_window()) { + // Windows are enumerated in top-down z-order, so we can stop enumerating + // upon reaching the selected window. + return FALSE; + } + + // Skip windows that aren't visible pop-up windows. + if (!(GetWindowLong(hwnd, GWL_STYLE) & WS_POPUP) || + !context->window_capture_helper()->IsWindowVisibleOnCurrentDesktop( + hwnd)) { + return TRUE; + } + + // Owned windows that intersect the selected window should be captured. + if (context->IsWindowOwnedBySelectedWindow(hwnd) && + context->IsWindowOverlappingSelectedWindow(hwnd)) { + // Skip windows that draw shadows around menus. These "SysShadow" windows + // would otherwise be captured as solid black bars with no transparency + // gradient (since this capturer doesn't detect / respect variations in the + // window alpha channel). Any other semi-transparent owned windows will be + // captured fully-opaque. This seems preferable to excluding them (at least + // when they have content aside from a solid fill color / visual adornment; + // e.g. some tooltips have the transparent style set). + if (GetWindowLong(hwnd, GWL_EXSTYLE) & WS_EX_TRANSPARENT) { + const WCHAR kSysShadow[] = L"SysShadow"; + const size_t kClassLength = arraysize(kSysShadow); + WCHAR class_name[kClassLength]; + const int class_name_length = + GetClassNameW(hwnd, class_name, kClassLength); + if (class_name_length == kClassLength - 1 && + wcscmp(class_name, kSysShadow) == 0) { + return TRUE; + } + } + + context->owned_windows->push_back(hwnd); + } + + return TRUE; +} + +WindowCapturerWinGdi::WindowCapturerWinGdi() {} +WindowCapturerWinGdi::~WindowCapturerWinGdi() {} + +bool WindowCapturerWinGdi::GetSourceList(SourceList* sources) { + if (!window_capture_helper_.EnumerateCapturableWindows(sources)) + return false; + + std::map new_map; + for (const auto& item : *sources) { + HWND hwnd = reinterpret_cast(item.id); + new_map[hwnd] = window_size_map_[hwnd]; + } + window_size_map_.swap(new_map); + + return true; +} + +bool WindowCapturerWinGdi::SelectSource(SourceId id) { + HWND window = reinterpret_cast(id); + if (!IsWindowValidAndVisible(window)) + return false; + + window_ = window; + // When a window is not in the map, window_size_map_[window] will create an + // item with DesktopSize (0, 0). + previous_size_ = window_size_map_[window]; + return true; +} + +bool WindowCapturerWinGdi::FocusOnSelectedSource() { + if (!window_) + return false; + + if (!IsWindowValidAndVisible(window_)) + return false; + + return BringWindowToTop(window_) && SetForegroundWindow(window_); +} + +bool WindowCapturerWinGdi::IsOccluded(const DesktopVector& pos) { + DesktopVector sys_pos = pos.add(GetFullscreenRect().top_left()); + HWND hwnd = + reinterpret_cast(window_finder_.GetWindowUnderPoint(sys_pos)); + + return hwnd != window_ && + std::find(owned_windows_.begin(), owned_windows_.end(), hwnd) == + owned_windows_.end(); +} + +void WindowCapturerWinGdi::Start(Callback* callback) { + RTC_DCHECK(!callback_); + RTC_DCHECK(callback); + + callback_ = callback; +} + +void WindowCapturerWinGdi::CaptureFrame() { + RTC_DCHECK(callback_); + + CaptureResults results = CaptureFrame(/*capture_owned_windows*/ true); + callback_->OnCaptureResult(results.result, std::move(results.frame)); +} + +WindowCapturerWinGdi::CaptureResults WindowCapturerWinGdi::CaptureFrame( + bool capture_owned_windows) { + TRACE_EVENT0("webrtc", "WindowCapturerWinGdi::CaptureFrame"); + + if (!window_) { + RTC_LOG(LS_ERROR) << "Window hasn't been selected: " << GetLastError(); + return {Result::ERROR_PERMANENT, nullptr}; + } + + // Stop capturing if the window has been closed. + if (!IsWindow(window_)) { + RTC_LOG(LS_ERROR) << "Target window has been closed."; + return {Result::ERROR_PERMANENT, nullptr}; + } + + // Determine the window region excluding any resize border, and including + // any visible border if capturing an owned window / dialog. (Don't include + // any visible border for the selected window for consistency with + // CroppingWindowCapturerWin, which would expose a bit of the background + // through the partially-transparent border.) + const bool avoid_cropping_border = !capture_owned_windows; + DesktopRect cropped_rect; + DesktopRect original_rect; + + if (!GetCroppedWindowRect(window_, avoid_cropping_border, &cropped_rect, + &original_rect)) { + RTC_LOG(LS_WARNING) << "Failed to get drawable window area: " + << GetLastError(); + return {Result::ERROR_TEMPORARY, nullptr}; + } + + // Return a 1x1 black frame if the window is minimized or invisible on current + // desktop, to match behavior on mace. Window can be temporarily invisible + // during the transition of full screen mode on/off. + if (original_rect.is_empty() || + !window_capture_helper_.IsWindowVisibleOnCurrentDesktop(window_)) { + std::unique_ptr frame( + new BasicDesktopFrame(DesktopSize(1, 1))); + + previous_size_ = frame->size(); + window_size_map_[window_] = previous_size_; + return {Result::SUCCESS, std::move(frame)}; + } + + HDC window_dc = GetWindowDC(window_); + if (!window_dc) { + RTC_LOG(LS_WARNING) << "Failed to get window DC: " << GetLastError(); + return {Result::ERROR_TEMPORARY, nullptr}; + } + + DesktopRect unscaled_cropped_rect = cropped_rect; + double horizontal_scale = 1.0; + double vertical_scale = 1.0; + + DesktopSize window_dc_size; + if (GetDcSize(window_dc, &window_dc_size)) { + // The |window_dc_size| is used to detect the scaling of the original + // window. If the application does not support high-DPI settings, it will + // be scaled by Windows according to the scaling setting. + // https://www.google.com/search?q=windows+scaling+settings&ie=UTF-8 + // So the size of the |window_dc|, i.e. the bitmap we can retrieve from + // PrintWindow() or BitBlt() function, will be smaller than + // |original_rect| and |cropped_rect|. Part of the captured desktop frame + // will be black. See + // bug https://bugs.chromium.org/p/webrtc/issues/detail?id=8112 for + // details. + + // If |window_dc_size| is smaller than |window_rect|, let's resize both + // |original_rect| and |cropped_rect| according to the scaling factor. + // This will adjust the width and height of the two rects. + horizontal_scale = + static_cast(window_dc_size.width()) / original_rect.width(); + vertical_scale = + static_cast(window_dc_size.height()) / original_rect.height(); + original_rect.Scale(horizontal_scale, vertical_scale); + cropped_rect.Scale(horizontal_scale, vertical_scale); + + // Translate |cropped_rect| to the left so that its position within + // |original_rect| remains accurate after scaling. + // See crbug.com/1083527 for more info. + int translate_left = static_cast(std::round( + (cropped_rect.left() - original_rect.left()) * (horizontal_scale - 1))); + int translate_top = static_cast(std::round( + (cropped_rect.top() - original_rect.top()) * (vertical_scale - 1))); + cropped_rect.Translate(translate_left, translate_top); + } + + std::unique_ptr frame( + DesktopFrameWin::Create(original_rect.size(), nullptr, window_dc)); + if (!frame.get()) { + RTC_LOG(LS_WARNING) << "Failed to create frame."; + ReleaseDC(window_, window_dc); + return {Result::ERROR_TEMPORARY, nullptr}; + } + + HDC mem_dc = CreateCompatibleDC(window_dc); + HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap()); + BOOL result = FALSE; + + // When desktop composition (Aero) is enabled each window is rendered to a + // private buffer allowing BitBlt() to get the window content even if the + // window is occluded. PrintWindow() is slower but lets rendering the window + // contents to an off-screen device context when Aero is not available. + // PrintWindow() is not supported by some applications. + // + // If Aero is enabled, we prefer BitBlt() because it's faster and avoids + // window flickering. Otherwise, we prefer PrintWindow() because BitBlt() may + // render occluding windows on top of the desired window. + // + // When composition is enabled the DC returned by GetWindowDC() doesn't always + // have window frame rendered correctly. Windows renders it only once and then + // caches the result between captures. We hack it around by calling + // PrintWindow() whenever window size changes, including the first time of + // capturing - it somehow affects what we get from BitBlt() on the subsequent + // captures. + // + // For Windows 8.1 and later, we want to always use PrintWindow when the + // cropping screen capturer falls back to the window capturer. I.e. + // on Windows 8.1 and later, PrintWindow is only used when the window is + // occluded. When the window is not occluded, it is much faster to capture + // the screen and to crop it to the window position and size. + if (rtc::IsWindows8OrLater()) { + // Special flag that makes PrintWindow to work on Windows 8.1 and later. + // Indeed certain apps (e.g. those using DirectComposition rendering) can't + // be captured using BitBlt or PrintWindow without this flag. Note that on + // Windows 8.0 this flag is not supported so the block below will fallback + // to the other call to PrintWindow. It seems to be very tricky to detect + // Windows 8.0 vs 8.1 so a try/fallback is more approriate here. + const UINT flags = PW_RENDERFULLCONTENT; + result = PrintWindow(window_, mem_dc, flags); + } + + if (!result && (!window_capture_helper_.IsAeroEnabled() || + !previous_size_.equals(frame->size()))) { + result = PrintWindow(window_, mem_dc, 0); + } + + // Aero is enabled or PrintWindow() failed, use BitBlt. + if (!result) { + result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(), + window_dc, 0, 0, SRCCOPY); + } + + SelectObject(mem_dc, previous_object); + DeleteDC(mem_dc); + ReleaseDC(window_, window_dc); + + previous_size_ = frame->size(); + window_size_map_[window_] = previous_size_; + + frame->mutable_updated_region()->SetRect( + DesktopRect::MakeSize(frame->size())); + frame->set_top_left( + original_rect.top_left().subtract(GetFullscreenRect().top_left())); + + if (!result) { + RTC_LOG(LS_ERROR) << "Both PrintWindow() and BitBlt() failed."; + return {Result::ERROR_TEMPORARY, nullptr}; + } + + // Rect for the data is relative to the first pixel of the frame. + cropped_rect.Translate(-original_rect.left(), -original_rect.top()); + std::unique_ptr cropped_frame = + CreateCroppedDesktopFrame(std::move(frame), cropped_rect); + RTC_DCHECK(cropped_frame); + + if (capture_owned_windows) { + // If any owned/pop-up windows overlap the selected window, capture them + // and copy/composite their contents into the frame. + owned_windows_.clear(); + OwnedWindowCollectorContext context(window_, unscaled_cropped_rect, + &window_capture_helper_, + &owned_windows_); + + if (context.IsSelectedWindowValid()) { + EnumWindows(OwnedWindowCollector, reinterpret_cast(&context)); + + if (!owned_windows_.empty()) { + if (!owned_window_capturer_) { + owned_window_capturer_ = std::make_unique(); + } + + // Owned windows are stored in top-down z-order, so this iterates in + // reverse to capture / draw them in bottom-up z-order + for (auto it = owned_windows_.rbegin(); it != owned_windows_.rend(); + it++) { + HWND hwnd = *it; + if (owned_window_capturer_->SelectSource( + reinterpret_cast(hwnd))) { + CaptureResults results = owned_window_capturer_->CaptureFrame( + /*capture_owned_windows*/ false); + + if (results.result != DesktopCapturer::Result::SUCCESS) { + // Simply log any error capturing an owned/pop-up window without + // bubbling it up to the caller (an expected error here is that + // the owned/pop-up window was closed; any unexpected errors won't + // fail the outer capture). + RTC_LOG(LS_INFO) << "Capturing owned window failed (previous " + "error/warning pertained to that)"; + } else { + // Copy / composite the captured frame into the outer frame. This + // may no-op if they no longer intersect (if the owned window was + // moved outside the owner bounds since scheduled for capture.) + cropped_frame->CopyIntersectingPixelsFrom( + *results.frame, horizontal_scale, vertical_scale); + } + } + } + } + } + } + + return {Result::SUCCESS, std::move(cropped_frame)}; +} + +// static +std::unique_ptr WindowCapturerWinGdi::CreateRawWindowCapturer( + const DesktopCaptureOptions& options) { + return std::unique_ptr(new WindowCapturerWinGdi()); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.h b/modules/desktop_capture/win/window_capturer_win_gdi.h new file mode 100644 index 0000000000..c954c230c9 --- /dev/null +++ b/modules/desktop_capture/win/window_capturer_win_gdi.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_GDI_H_ +#define MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_GDI_H_ + +#include +#include +#include + +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/win/window_capture_utils.h" +#include "modules/desktop_capture/window_finder_win.h" + +namespace webrtc { + +class WindowCapturerWinGdi : public DesktopCapturer { + public: + WindowCapturerWinGdi(); + + // Disallow copy and assign + WindowCapturerWinGdi(const WindowCapturerWinGdi&) = delete; + WindowCapturerWinGdi& operator=(const WindowCapturerWinGdi&) = delete; + + ~WindowCapturerWinGdi() override; + + static std::unique_ptr CreateRawWindowCapturer( + const DesktopCaptureOptions& options); + + // DesktopCapturer interface. + void Start(Callback* callback) override; + void CaptureFrame() override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; + + private: + struct CaptureResults { + Result result; + std::unique_ptr frame; + }; + + CaptureResults CaptureFrame(bool capture_owned_windows); + + Callback* callback_ = nullptr; + + // HWND and HDC for the currently selected window or nullptr if window is not + // selected. + HWND window_ = nullptr; + + DesktopSize previous_size_; + + WindowCaptureHelperWin window_capture_helper_; + + // This map is used to avoid flickering for the case when SelectWindow() calls + // are interleaved with Capture() calls. + std::map window_size_map_; + + WindowFinderWin window_finder_; + + std::vector owned_windows_; + std::unique_ptr owned_window_capturer_; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_GDI_H_ diff --git a/modules/desktop_capture/win/window_capturer_win_wgc.cc b/modules/desktop_capture/win/window_capturer_win_wgc.cc new file mode 100644 index 0000000000..30a672d9ef --- /dev/null +++ b/modules/desktop_capture/win/window_capturer_win_wgc.cc @@ -0,0 +1,137 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/win/window_capturer_win_wgc.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +WindowCapturerWinWgc::WindowCapturerWinWgc() = default; +WindowCapturerWinWgc::~WindowCapturerWinWgc() = default; + +bool WindowCapturerWinWgc::GetSourceList(SourceList* sources) { + return window_capture_helper_.EnumerateCapturableWindows(sources); +} + +bool WindowCapturerWinWgc::SelectSource(SourceId id) { + HWND window = reinterpret_cast(id); + if (!IsWindowValidAndVisible(window)) + return false; + + window_ = window; + return true; +} + +void WindowCapturerWinWgc::Start(Callback* callback) { + RTC_DCHECK(!callback_); + RTC_DCHECK(callback); + + callback_ = callback; + + // Create a Direct3D11 device to share amongst the WgcCaptureSessions. Many + // parameters are nullptr as the implemention uses defaults that work well for + // us. + HRESULT hr = D3D11CreateDevice( + /*adapter=*/nullptr, D3D_DRIVER_TYPE_HARDWARE, + /*software_rasterizer=*/nullptr, D3D11_CREATE_DEVICE_BGRA_SUPPORT, + /*feature_levels=*/nullptr, /*feature_levels_size=*/0, D3D11_SDK_VERSION, + &d3d11_device_, /*feature_level=*/nullptr, /*device_context=*/nullptr); + if (hr == DXGI_ERROR_UNSUPPORTED) { + // If a hardware device could not be created, use WARP which is a high speed + // software device. + hr = D3D11CreateDevice( + /*adapter=*/nullptr, D3D_DRIVER_TYPE_WARP, + /*software_rasterizer=*/nullptr, D3D11_CREATE_DEVICE_BGRA_SUPPORT, + /*feature_levels=*/nullptr, /*feature_levels_size=*/0, + D3D11_SDK_VERSION, &d3d11_device_, /*feature_level=*/nullptr, + /*device_context=*/nullptr); + } + + if (FAILED(hr)) { + RTC_LOG(LS_ERROR) << "Failed to create D3D11Device: " << hr; + } +} + +void WindowCapturerWinWgc::CaptureFrame() { + RTC_DCHECK(callback_); + + if (!window_) { + RTC_LOG(LS_ERROR) << "Window hasn't been selected"; + callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, + /*frame=*/nullptr); + return; + } + + if (!d3d11_device_) { + RTC_LOG(LS_ERROR) << "No D3D11D3evice, cannot capture."; + callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, + /*frame=*/nullptr); + return; + } + + WgcCaptureSession* capture_session = nullptr; + auto iter = ongoing_captures_.find(window_); + if (iter == ongoing_captures_.end()) { + auto iter_success_pair = ongoing_captures_.emplace( + std::piecewise_construct, std::forward_as_tuple(window_), + std::forward_as_tuple(d3d11_device_, window_)); + if (iter_success_pair.second) { + capture_session = &iter_success_pair.first->second; + } else { + RTC_LOG(LS_ERROR) << "Failed to create new WgcCaptureSession."; + callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, + /*frame=*/nullptr); + return; + } + } else { + capture_session = &iter->second; + } + + HRESULT hr; + if (!capture_session->IsCaptureStarted()) { + hr = capture_session->StartCapture(); + if (FAILED(hr)) { + RTC_LOG(LS_ERROR) << "Failed to start capture: " << hr; + callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, + /*frame=*/nullptr); + return; + } + } + + std::unique_ptr frame; + hr = capture_session->GetMostRecentFrame(&frame); + if (FAILED(hr)) { + RTC_LOG(LS_ERROR) << "GetMostRecentFrame failed: " << hr; + callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, + /*frame=*/nullptr); + return; + } + + if (!frame) { + RTC_LOG(LS_WARNING) << "GetMostRecentFrame returned an empty frame."; + callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_TEMPORARY, + /*frame=*/nullptr); + return; + } + + callback_->OnCaptureResult(DesktopCapturer::Result::SUCCESS, + std::move(frame)); +} + +// static +std::unique_ptr WindowCapturerWinWgc::CreateRawWindowCapturer( + const DesktopCaptureOptions& options) { + return std::unique_ptr(new WindowCapturerWinWgc()); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/win/window_capturer_win_wgc.h b/modules/desktop_capture/win/window_capturer_win_wgc.h new file mode 100644 index 0000000000..7e05b0e541 --- /dev/null +++ b/modules/desktop_capture/win/window_capturer_win_wgc.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_WGC_H_ +#define MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_WGC_H_ + +#include +#include +#include +#include + +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/win/wgc_capture_session.h" +#include "modules/desktop_capture/win/window_capture_utils.h" + +namespace webrtc { + +class WindowCapturerWinWgc final : public DesktopCapturer { + public: + WindowCapturerWinWgc(); + + WindowCapturerWinWgc(const WindowCapturerWinWgc&) = delete; + WindowCapturerWinWgc& operator=(const WindowCapturerWinWgc&) = delete; + + ~WindowCapturerWinWgc() override; + + static std::unique_ptr CreateRawWindowCapturer( + const DesktopCaptureOptions& options); + + // DesktopCapturer interface. + void Start(Callback* callback) override; + void CaptureFrame() override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + + private: + // The callback that we deliver frames to, synchronously, before CaptureFrame + // returns. + Callback* callback_ = nullptr; + + // HWND for the currently selected window or nullptr if a window is not + // selected. We may be capturing many other windows, but this is the window + // that we will return a frame for when CaptureFrame is called. + HWND window_ = nullptr; + + // This helps us enumerate the list of windows that we can capture. + WindowCaptureHelperWin window_capture_helper_; + + // A Direct3D11 device that is shared amongst the WgcCaptureSessions, who + // require one to perform the capture. + Microsoft::WRL::ComPtr<::ID3D11Device> d3d11_device_; + + // A map of all the windows we are capturing and the associated + // WgcCaptureSession. This is where we will get the frames for the window + // from, when requested. + std::map ongoing_captures_; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURER_WIN_WGC_H_ diff --git a/modules/desktop_capture/window_capturer_mac.mm b/modules/desktop_capture/window_capturer_mac.mm index 96f89eb14b..cbbc500613 100644 --- a/modules/desktop_capture/window_capturer_mac.mm +++ b/modules/desktop_capture/window_capturer_mac.mm @@ -161,7 +161,19 @@ explicit WindowCapturerMac( if (full_screen_window_detector_) { full_screen_window_detector_->UpdateWindowListIfNeeded( window_id_, [](DesktopCapturer::SourceList* sources) { - return webrtc::GetWindowList(sources, true, false); + // Not using webrtc::GetWindowList(sources, true, false) + // as it doesn't allow to have in the result window with + // empty title along with titled window owned by the same pid. + return webrtc::GetWindowList( + [sources](CFDictionaryRef window) { + WindowId window_id = GetWindowId(window); + if (window_id != kNullWindowId) { + sources->push_back(DesktopCapturer::Source{window_id, GetWindowTitle(window)}); + } + return true; + }, + true, + false); }); CGWindowID full_screen_window = full_screen_window_detector_->FindFullScreenWindow(window_id_); diff --git a/modules/desktop_capture/window_capturer_win.cc b/modules/desktop_capture/window_capturer_win.cc index 4e16c44ced..4bfa09f4d6 100644 --- a/modules/desktop_capture/window_capturer_win.cc +++ b/modules/desktop_capture/window_capturer_win.cc @@ -8,472 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include - -#include - -#include "modules/desktop_capture/cropped_desktop_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" -#include "modules/desktop_capture/desktop_frame_win.h" -#include "modules/desktop_capture/win/screen_capture_utils.h" -#include "modules/desktop_capture/win/selected_window_context.h" -#include "modules/desktop_capture/win/window_capture_utils.h" -#include "modules/desktop_capture/window_finder_win.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/logging.h" -#include "rtc_base/string_utils.h" -#include "rtc_base/trace_event.h" -#include "rtc_base/win32.h" +#include "modules/desktop_capture/win/window_capturer_win_gdi.h" namespace webrtc { -namespace { - -BOOL CALLBACK WindowsEnumerationHandler(HWND hwnd, LPARAM param) { - DesktopCapturer::SourceList* list = - reinterpret_cast(param); - - // Skip windows that are invisible, minimized, have no title, or are owned, - // unless they have the app window style set. - int len = GetWindowTextLength(hwnd); - HWND owner = GetWindow(hwnd, GW_OWNER); - LONG exstyle = GetWindowLong(hwnd, GWL_EXSTYLE); - if (len == 0 || IsIconic(hwnd) || !IsWindowVisible(hwnd) || - (owner && !(exstyle & WS_EX_APPWINDOW))) { - return TRUE; - } - // Skip unresponsive windows. Set timout with 50ms, in case system is under - // heavy load, the check can wait longer but wont' be too long to delay the - // the enumeration. - const UINT uTimeout = 50; // ms - if (!SendMessageTimeout(hwnd, WM_NULL, 0, 0, SMTO_ABORTIFHUNG, uTimeout, - nullptr)) { - return TRUE; - } - - // Skip the Program Manager window and the Start button. - const size_t kClassLength = 256; - WCHAR class_name[kClassLength]; - const int class_name_length = GetClassNameW(hwnd, class_name, kClassLength); - if (class_name_length < 1) - return TRUE; - - // Skip Program Manager window and the Start button. This is the same logic - // that's used in Win32WindowPicker in libjingle. Consider filtering other - // windows as well (e.g. toolbars). - if (wcscmp(class_name, L"Progman") == 0 || wcscmp(class_name, L"Button") == 0) - return TRUE; - - DesktopCapturer::Source window; - window.id = reinterpret_cast(hwnd); - - const size_t kTitleLength = 500; - WCHAR window_title[kTitleLength]; - // Truncate the title if it's longer than kTitleLength. - GetWindowTextW(hwnd, window_title, kTitleLength); - window.title = rtc::ToUtf8(window_title); - - // Skip windows when we failed to convert the title or it is empty. - if (window.title.empty()) - return TRUE; - - list->push_back(window); - - return TRUE; -} - -// Used to pass input/output data during the EnumWindows call to collect -// owned/pop-up windows that should be captured. -struct OwnedWindowCollectorContext : public SelectedWindowContext { - OwnedWindowCollectorContext(HWND selected_window, - DesktopRect selected_window_rect, - WindowCaptureHelperWin* window_capture_helper, - std::vector* owned_windows) - : SelectedWindowContext(selected_window, - selected_window_rect, - window_capture_helper), - owned_windows(owned_windows) {} - - std::vector* owned_windows; -}; - -// Called via EnumWindows for each root window; adds owned/pop-up windows that -// should be captured to a vector it's passed. -BOOL CALLBACK OwnedWindowCollector(HWND hwnd, LPARAM param) { - OwnedWindowCollectorContext* context = - reinterpret_cast(param); - if (hwnd == context->selected_window()) { - // Windows are enumerated in top-down z-order, so we can stop enumerating - // upon reaching the selected window. - return FALSE; - } - - // Skip windows that aren't visible pop-up windows. - if (!(GetWindowLong(hwnd, GWL_STYLE) & WS_POPUP) || - !context->window_capture_helper()->IsWindowVisibleOnCurrentDesktop( - hwnd)) { - return TRUE; - } - - // Owned windows that intersect the selected window should be captured. - if (context->IsWindowOwnedBySelectedWindow(hwnd) && - context->IsWindowOverlappingSelectedWindow(hwnd)) { - // Skip windows that draw shadows around menus. These "SysShadow" windows - // would otherwise be captured as solid black bars with no transparency - // gradient (since this capturer doesn't detect / respect variations in the - // window alpha channel). Any other semi-transparent owned windows will be - // captured fully-opaque. This seems preferable to excluding them (at least - // when they have content aside from a solid fill color / visual adornment; - // e.g. some tooltips have the transparent style set). - if (GetWindowLong(hwnd, GWL_EXSTYLE) & WS_EX_TRANSPARENT) { - const WCHAR kSysShadow[] = L"SysShadow"; - const size_t kClassLength = arraysize(kSysShadow); - WCHAR class_name[kClassLength]; - const int class_name_length = - GetClassNameW(hwnd, class_name, kClassLength); - if (class_name_length == kClassLength - 1 && - wcscmp(class_name, kSysShadow) == 0) { - return TRUE; - } - } - - context->owned_windows->push_back(hwnd); - } - - return TRUE; -} - -class WindowCapturerWin : public DesktopCapturer { - public: - WindowCapturerWin(); - ~WindowCapturerWin() override; - - // DesktopCapturer interface. - void Start(Callback* callback) override; - void CaptureFrame() override; - bool GetSourceList(SourceList* sources) override; - bool SelectSource(SourceId id) override; - bool FocusOnSelectedSource() override; - bool IsOccluded(const DesktopVector& pos) override; - - private: - struct CaptureResults { - Result result; - std::unique_ptr frame; - }; - - CaptureResults CaptureFrame(bool capture_owned_windows); - - Callback* callback_ = nullptr; - - // HWND and HDC for the currently selected window or nullptr if window is not - // selected. - HWND window_ = nullptr; - - DesktopSize previous_size_; - - WindowCaptureHelperWin window_capture_helper_; - - // This map is used to avoid flickering for the case when SelectWindow() calls - // are interleaved with Capture() calls. - std::map window_size_map_; - - WindowFinderWin window_finder_; - - std::vector owned_windows_; - std::unique_ptr owned_window_capturer_; - - RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerWin); -}; - -WindowCapturerWin::WindowCapturerWin() {} -WindowCapturerWin::~WindowCapturerWin() {} - -bool WindowCapturerWin::GetSourceList(SourceList* sources) { - SourceList result; - LPARAM param = reinterpret_cast(&result); - // EnumWindows only enumerates root windows. - if (!EnumWindows(&WindowsEnumerationHandler, param)) - return false; - - for (auto it = result.begin(); it != result.end();) { - if (!window_capture_helper_.IsWindowVisibleOnCurrentDesktop( - reinterpret_cast(it->id))) { - it = result.erase(it); - } else { - ++it; - } - } - sources->swap(result); - - std::map new_map; - for (const auto& item : *sources) { - HWND hwnd = reinterpret_cast(item.id); - new_map[hwnd] = window_size_map_[hwnd]; - } - window_size_map_.swap(new_map); - - return true; -} - -bool WindowCapturerWin::SelectSource(SourceId id) { - HWND window = reinterpret_cast(id); - if (!IsWindow(window) || !IsWindowVisible(window) || IsIconic(window)) - return false; - window_ = window; - // When a window is not in the map, window_size_map_[window] will create an - // item with DesktopSize (0, 0). - previous_size_ = window_size_map_[window]; - return true; -} - -bool WindowCapturerWin::FocusOnSelectedSource() { - if (!window_) - return false; - - if (!IsWindow(window_) || !IsWindowVisible(window_) || IsIconic(window_)) - return false; - - return BringWindowToTop(window_) != FALSE && - SetForegroundWindow(window_) != FALSE; -} - -bool WindowCapturerWin::IsOccluded(const DesktopVector& pos) { - DesktopVector sys_pos = pos.add(GetFullscreenRect().top_left()); - HWND hwnd = - reinterpret_cast(window_finder_.GetWindowUnderPoint(sys_pos)); - - return hwnd != window_ && - std::find(owned_windows_.begin(), owned_windows_.end(), hwnd) == - owned_windows_.end(); -} - -void WindowCapturerWin::Start(Callback* callback) { - assert(!callback_); - assert(callback); - - callback_ = callback; -} - -void WindowCapturerWin::CaptureFrame() { - CaptureResults results = CaptureFrame(/*capture_owned_windows*/ true); - - callback_->OnCaptureResult(results.result, std::move(results.frame)); -} - -WindowCapturerWin::CaptureResults WindowCapturerWin::CaptureFrame( - bool capture_owned_windows) { - TRACE_EVENT0("webrtc", "WindowCapturerWin::CaptureFrame"); - - if (!window_) { - RTC_LOG(LS_ERROR) << "Window hasn't been selected: " << GetLastError(); - return {Result::ERROR_PERMANENT, nullptr}; - } - - // Stop capturing if the window has been closed. - if (!IsWindow(window_)) { - RTC_LOG(LS_ERROR) << "target window has been closed"; - return {Result::ERROR_PERMANENT, nullptr}; - } - - // Determine the window region excluding any resize border, and including - // any visible border if capturing an owned window / dialog. (Don't include - // any visible border for the selected window for consistency with - // CroppingWindowCapturerWin, which would expose a bit of the background - // through the partially-transparent border.) - const bool avoid_cropping_border = !capture_owned_windows; - DesktopRect cropped_rect; - DesktopRect original_rect; - - if (!GetCroppedWindowRect(window_, avoid_cropping_border, &cropped_rect, - &original_rect)) { - RTC_LOG(LS_WARNING) << "Failed to get drawable window area: " - << GetLastError(); - return {Result::ERROR_TEMPORARY, nullptr}; - } - - // Return a 1x1 black frame if the window is minimized or invisible on current - // desktop, to match behavior on mace. Window can be temporarily invisible - // during the transition of full screen mode on/off. - if (original_rect.is_empty() || - !window_capture_helper_.IsWindowVisibleOnCurrentDesktop(window_)) { - std::unique_ptr frame( - new BasicDesktopFrame(DesktopSize(1, 1))); - - previous_size_ = frame->size(); - window_size_map_[window_] = previous_size_; - return {Result::SUCCESS, std::move(frame)}; - } - - HDC window_dc = GetWindowDC(window_); - if (!window_dc) { - RTC_LOG(LS_WARNING) << "Failed to get window DC: " << GetLastError(); - return {Result::ERROR_TEMPORARY, nullptr}; - } - - DesktopRect unscaled_cropped_rect = cropped_rect; - double horizontal_scale = 1.0; - double vertical_scale = 1.0; - - DesktopSize window_dc_size; - if (GetDcSize(window_dc, &window_dc_size)) { - // The |window_dc_size| is used to detect the scaling of the original - // window. If the application does not support high-DPI settings, it will - // be scaled by Windows according to the scaling setting. - // https://www.google.com/search?q=windows+scaling+settings&ie=UTF-8 - // So the size of the |window_dc|, i.e. the bitmap we can retrieve from - // PrintWindow() or BitBlt() function, will be smaller than - // |original_rect| and |cropped_rect|. Part of the captured desktop frame - // will be black. See - // bug https://bugs.chromium.org/p/webrtc/issues/detail?id=8112 for - // details. - - // If |window_dc_size| is smaller than |window_rect|, let's resize both - // |original_rect| and |cropped_rect| according to the scaling factor. - horizontal_scale = - static_cast(window_dc_size.width()) / original_rect.width(); - vertical_scale = - static_cast(window_dc_size.height()) / original_rect.height(); - original_rect.Scale(horizontal_scale, vertical_scale); - cropped_rect.Scale(horizontal_scale, vertical_scale); - } - - std::unique_ptr frame( - DesktopFrameWin::Create(original_rect.size(), nullptr, window_dc)); - if (!frame.get()) { - RTC_LOG(LS_WARNING) << "Failed to create frame."; - ReleaseDC(window_, window_dc); - return {Result::ERROR_TEMPORARY, nullptr}; - } - - HDC mem_dc = CreateCompatibleDC(window_dc); - HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap()); - BOOL result = FALSE; - - // When desktop composition (Aero) is enabled each window is rendered to a - // private buffer allowing BitBlt() to get the window content even if the - // window is occluded. PrintWindow() is slower but lets rendering the window - // contents to an off-screen device context when Aero is not available. - // PrintWindow() is not supported by some applications. - // - // If Aero is enabled, we prefer BitBlt() because it's faster and avoids - // window flickering. Otherwise, we prefer PrintWindow() because BitBlt() may - // render occluding windows on top of the desired window. - // - // When composition is enabled the DC returned by GetWindowDC() doesn't always - // have window frame rendered correctly. Windows renders it only once and then - // caches the result between captures. We hack it around by calling - // PrintWindow() whenever window size changes, including the first time of - // capturing - it somehow affects what we get from BitBlt() on the subsequent - // captures. - // - // For Windows 8.1 and later, we want to always use PrintWindow when the - // cropping screen capturer falls back to the window capturer. I.e. - // on Windows 8.1 and later, PrintWindow is only used when the window is - // occluded. When the window is not occluded, it is much faster to capture - // the screen and to crop it to the window position and size. - if (rtc::IsWindows8OrLater()) { - // Special flag that makes PrintWindow to work on Windows 8.1 and later. - // Indeed certain apps (e.g. those using DirectComposition rendering) can't - // be captured using BitBlt or PrintWindow without this flag. Note that on - // Windows 8.0 this flag is not supported so the block below will fallback - // to the other call to PrintWindow. It seems to be very tricky to detect - // Windows 8.0 vs 8.1 so a try/fallback is more approriate here. - const UINT flags = PW_RENDERFULLCONTENT; - result = PrintWindow(window_, mem_dc, flags); - } - - if (!result && (!window_capture_helper_.IsAeroEnabled() || - !previous_size_.equals(frame->size()))) { - result = PrintWindow(window_, mem_dc, 0); - } - - // Aero is enabled or PrintWindow() failed, use BitBlt. - if (!result) { - result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(), - window_dc, 0, 0, SRCCOPY); - } - - SelectObject(mem_dc, previous_object); - DeleteDC(mem_dc); - ReleaseDC(window_, window_dc); - - previous_size_ = frame->size(); - window_size_map_[window_] = previous_size_; - - frame->mutable_updated_region()->SetRect( - DesktopRect::MakeSize(frame->size())); - frame->set_top_left( - original_rect.top_left().subtract(GetFullscreenRect().top_left())); - - if (!result) { - RTC_LOG(LS_ERROR) << "Both PrintWindow() and BitBlt() failed."; - return {Result::ERROR_TEMPORARY, nullptr}; - } - - // Rect for the data is relative to the first pixel of the frame. - cropped_rect.Translate(-original_rect.left(), -original_rect.top()); - std::unique_ptr cropped_frame = - CreateCroppedDesktopFrame(std::move(frame), cropped_rect); - RTC_DCHECK(cropped_frame); - - if (capture_owned_windows) { - // If any owned/pop-up windows overlap the selected window, capture them - // and copy/composite their contents into the frame. - owned_windows_.clear(); - OwnedWindowCollectorContext context(window_, unscaled_cropped_rect, - &window_capture_helper_, - &owned_windows_); - - if (context.IsSelectedWindowValid()) { - EnumWindows(OwnedWindowCollector, reinterpret_cast(&context)); - - if (!owned_windows_.empty()) { - if (!owned_window_capturer_) { - owned_window_capturer_ = std::make_unique(); - } - - // Owned windows are stored in top-down z-order, so this iterates in - // reverse to capture / draw them in bottom-up z-order - for (auto it = owned_windows_.rbegin(); it != owned_windows_.rend(); - it++) { - HWND hwnd = *it; - if (owned_window_capturer_->SelectSource( - reinterpret_cast(hwnd))) { - CaptureResults results = owned_window_capturer_->CaptureFrame( - /*capture_owned_windows*/ false); - - if (results.result != DesktopCapturer::Result::SUCCESS) { - // Simply log any error capturing an owned/pop-up window without - // bubbling it up to the caller (an expected error here is that - // the owned/pop-up window was closed; any unexpected errors won't - // fail the outer capture). - RTC_LOG(LS_INFO) << "Capturing owned window failed (previous " - "error/warning pertained to that)"; - } else { - // Copy / composite the captured frame into the outer frame. This - // may no-op if they no longer intersect (if the owned window was - // moved outside the owner bounds since scheduled for capture.) - cropped_frame->CopyIntersectingPixelsFrom( - *results.frame, horizontal_scale, vertical_scale); - } - } - } - } - } - } - - return {Result::SUCCESS, std::move(cropped_frame)}; -} - -} // namespace - // static std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( const DesktopCaptureOptions& options) { - return std::unique_ptr(new WindowCapturerWin()); + return WindowCapturerWinGdi::CreateRawWindowCapturer(options); } } // namespace webrtc diff --git a/modules/include/module_common_types.cc b/modules/include/module_common_types.cc deleted file mode 100644 index 86f753356d..0000000000 --- a/modules/include/module_common_types.cc +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/include/module_common_types.h" - -#include - -#include -#include - -#include "rtc_base/numerics/safe_conversions.h" - -namespace webrtc { - -RTPFragmentationHeader::RTPFragmentationHeader() - : fragmentationVectorSize(0), - fragmentationOffset(nullptr), - fragmentationLength(nullptr) {} - -RTPFragmentationHeader::RTPFragmentationHeader(RTPFragmentationHeader&& other) - : RTPFragmentationHeader() { - swap(*this, other); -} - -RTPFragmentationHeader& RTPFragmentationHeader::operator=( - RTPFragmentationHeader&& other) { - swap(*this, other); - return *this; -} - -RTPFragmentationHeader::~RTPFragmentationHeader() { - delete[] fragmentationOffset; - delete[] fragmentationLength; -} - -void swap(RTPFragmentationHeader& a, RTPFragmentationHeader& b) { - using std::swap; - swap(a.fragmentationVectorSize, b.fragmentationVectorSize); - swap(a.fragmentationOffset, b.fragmentationOffset); - swap(a.fragmentationLength, b.fragmentationLength); -} - -void RTPFragmentationHeader::CopyFrom(const RTPFragmentationHeader& src) { - if (this == &src) { - return; - } - - if (src.fragmentationVectorSize != fragmentationVectorSize) { - // new size of vectors - - // delete old - delete[] fragmentationOffset; - fragmentationOffset = nullptr; - delete[] fragmentationLength; - fragmentationLength = nullptr; - - if (src.fragmentationVectorSize > 0) { - // allocate new - if (src.fragmentationOffset) { - fragmentationOffset = new size_t[src.fragmentationVectorSize]; - } - if (src.fragmentationLength) { - fragmentationLength = new size_t[src.fragmentationVectorSize]; - } - } - // set new size - fragmentationVectorSize = src.fragmentationVectorSize; - } - - if (src.fragmentationVectorSize > 0) { - // copy values - if (src.fragmentationOffset) { - memcpy(fragmentationOffset, src.fragmentationOffset, - src.fragmentationVectorSize * sizeof(size_t)); - } - if (src.fragmentationLength) { - memcpy(fragmentationLength, src.fragmentationLength, - src.fragmentationVectorSize * sizeof(size_t)); - } - } -} - -void RTPFragmentationHeader::Resize(size_t size) { - const uint16_t size16 = rtc::dchecked_cast(size); - if (fragmentationVectorSize < size16) { - uint16_t oldVectorSize = fragmentationVectorSize; - { - // offset - size_t* oldOffsets = fragmentationOffset; - fragmentationOffset = new size_t[size16]; - memset(fragmentationOffset + oldVectorSize, 0, - sizeof(size_t) * (size16 - oldVectorSize)); - // copy old values - memcpy(fragmentationOffset, oldOffsets, sizeof(size_t) * oldVectorSize); - delete[] oldOffsets; - } - // length - { - size_t* oldLengths = fragmentationLength; - fragmentationLength = new size_t[size16]; - memset(fragmentationLength + oldVectorSize, 0, - sizeof(size_t) * (size16 - oldVectorSize)); - memcpy(fragmentationLength, oldLengths, sizeof(size_t) * oldVectorSize); - delete[] oldLengths; - } - fragmentationVectorSize = size16; - } -} - -} // namespace webrtc diff --git a/modules/include/module_common_types.h b/modules/include/module_common_types.h index 3afd7b7d7a..7c9ef39cf0 100644 --- a/modules/include/module_common_types.h +++ b/modules/include/module_common_types.h @@ -11,44 +11,12 @@ #ifndef MODULES_INCLUDE_MODULE_COMMON_TYPES_H_ #define MODULES_INCLUDE_MODULE_COMMON_TYPES_H_ -#include #include #include -#include "rtc_base/system/rtc_export.h" - namespace webrtc { -class RTC_EXPORT RTPFragmentationHeader { - public: - RTPFragmentationHeader(); - RTPFragmentationHeader(const RTPFragmentationHeader&) = delete; - RTPFragmentationHeader(RTPFragmentationHeader&& other); - RTPFragmentationHeader& operator=(const RTPFragmentationHeader& other) = - delete; - RTPFragmentationHeader& operator=(RTPFragmentationHeader&& other); - ~RTPFragmentationHeader(); - - friend void swap(RTPFragmentationHeader& a, RTPFragmentationHeader& b); - - void CopyFrom(const RTPFragmentationHeader& src); - void VerifyAndAllocateFragmentationHeader(size_t size) { Resize(size); } - - void Resize(size_t size); - size_t Size() const { return fragmentationVectorSize; } - - size_t Offset(size_t index) const { return fragmentationOffset[index]; } - size_t Length(size_t index) const { return fragmentationLength[index]; } - - // TODO(danilchap): Move all members to private section, - // simplify by replacing raw arrays with single std::vector - uint16_t fragmentationVectorSize; // Number of fragmentations - size_t* fragmentationOffset; // Offset of pointer to data for each - // fragmentation - size_t* fragmentationLength; // Data size for each fragmentation -}; - // Interface used by the CallStats class to distribute call statistics. // Callbacks will be triggered as soon as the class has been registered to a // CallStats object using RegisterStatsObserver. diff --git a/modules/include/module_fec_types.h b/modules/include/module_fec_types.h index 25d6bc5714..f9b35cc288 100644 --- a/modules/include/module_fec_types.h +++ b/modules/include/module_fec_types.h @@ -24,9 +24,9 @@ enum FecMaskType { // Struct containing forward error correction settings. struct FecProtectionParams { - int fec_rate; - int max_fec_frames; - FecMaskType fec_mask_type; + int fec_rate = 0; + int max_fec_frames = 0; + FecMaskType fec_mask_type = FecMaskType::kFecMaskRandom; }; } // namespace webrtc diff --git a/modules/pacing/BUILD.gn b/modules/pacing/BUILD.gn index d59d2b93a4..cabcd9300b 100644 --- a/modules/pacing/BUILD.gn +++ b/modules/pacing/BUILD.gn @@ -49,6 +49,7 @@ rtc_library("pacing") { "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_task_queue", "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/task_utils:to_queued_task", "../../system_wrappers", @@ -57,7 +58,10 @@ rtc_library("pacing") { "../rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", "../utility", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -89,6 +93,7 @@ if (rtc_include_tests) { deps = [ ":interval_budget", ":pacing", + "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:time_delta", "../../modules/utility:mock_process_thread", @@ -98,6 +103,7 @@ if (rtc_include_tests) { "../../rtc_base/experiments:alr_experiment", "../../system_wrappers", "../../system_wrappers:field_trial", + "../../test:explicit_key_value_config", "../../test:field_trial", "../../test:test_support", "../../test/time_controller:time_controller", diff --git a/modules/pacing/OWNERS b/modules/pacing/OWNERS index becab12cc8..0a77688b1e 100644 --- a/modules/pacing/OWNERS +++ b/modules/pacing/OWNERS @@ -4,8 +4,3 @@ asapersson@webrtc.org philipel@webrtc.org srte@webrtc.org sprang@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/pacing/bitrate_prober.cc b/modules/pacing/bitrate_prober.cc index 719a6029ad..381a54d50a 100644 --- a/modules/pacing/bitrate_prober.cc +++ b/modules/pacing/bitrate_prober.cc @@ -26,24 +26,27 @@ namespace { // The min probe packet size is scaled with the bitrate we're probing at. // This defines the max min probe packet size, meaning that on high bitrates // we have a min probe packet size of 200 bytes. -constexpr size_t kMinProbePacketSize = 200; +constexpr DataSize kMinProbePacketSize = DataSize::Bytes(200); -constexpr TimeDelta kProbeClusterTimeout = TimeDelta::Seconds<5>(); +constexpr TimeDelta kProbeClusterTimeout = TimeDelta::Seconds(5); } // namespace BitrateProberConfig::BitrateProberConfig( const WebRtcKeyValueConfig* key_value_config) : min_probe_packets_sent("min_probe_packets_sent", 5), - min_probe_delta("min_probe_delta", TimeDelta::ms(1)), - min_probe_duration("min_probe_duration", TimeDelta::ms(15)), - max_probe_delay("max_probe_delay", TimeDelta::ms(3)) { - ParseFieldTrial({&min_probe_packets_sent, &min_probe_delta, - &min_probe_duration, &max_probe_delay}, - key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); - ParseFieldTrial({&min_probe_packets_sent, &min_probe_delta, - &min_probe_duration, &max_probe_delay}, - key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); + min_probe_delta("min_probe_delta", TimeDelta::Millis(1)), + min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), + max_probe_delay("max_probe_delay", TimeDelta::Millis(10)), + abort_delayed_probes("abort_delayed_probes", true) { + ParseFieldTrial( + {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration, + &max_probe_delay, &abort_delayed_probes}, + key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); + ParseFieldTrial( + {&min_probe_packets_sent, &min_probe_delta, &min_probe_duration, + &max_probe_delay, &abort_delayed_probes}, + key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); } BitrateProber::~BitrateProber() { @@ -74,16 +77,11 @@ void BitrateProber::SetEnabled(bool enable) { } } -bool BitrateProber::IsProbing() const { - return probing_state_ == ProbingState::kActive; -} - -void BitrateProber::OnIncomingPacket(size_t packet_size) { +void BitrateProber::OnIncomingPacket(DataSize packet_size) { // Don't initialize probing unless we have something large enough to start // probing. if (probing_state_ == ProbingState::kInactive && !clusters_.empty() && - packet_size >= - std::min(RecommendedMinProbeSize(), kMinProbePacketSize)) { + packet_size >= std::min(RecommendedMinProbeSize(), kMinProbePacketSize)) { // Send next probe right away. next_probe_time_ = Timestamp::MinusInfinity(); probing_state_ = ProbingState::kActive; @@ -129,7 +127,8 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const { return Timestamp::PlusInfinity(); } - if (next_probe_time_.IsFinite() && + // Legacy behavior, just warn about late probe and return as if not probing. + if (!config_.abort_delayed_probes && next_probe_time_.IsFinite() && now - next_probe_time_ > config_.max_probe_delay.Get()) { RTC_DLOG(LS_WARNING) << "Probe delay too high" " (next_ms:" @@ -141,9 +140,24 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const { return next_probe_time_; } -PacedPacketInfo BitrateProber::CurrentCluster() const { - RTC_DCHECK(!clusters_.empty()); - RTC_DCHECK(probing_state_ == ProbingState::kActive); +absl::optional BitrateProber::CurrentCluster(Timestamp now) { + if (clusters_.empty() || probing_state_ != ProbingState::kActive) { + return absl::nullopt; + } + + if (config_.abort_delayed_probes && next_probe_time_.IsFinite() && + now - next_probe_time_ > config_.max_probe_delay.Get()) { + RTC_DLOG(LS_WARNING) << "Probe delay too high" + " (next_ms:" + << next_probe_time_.ms() << ", now_ms: " << now.ms() + << "), discarding probe cluster."; + clusters_.pop(); + if (clusters_.empty()) { + probing_state_ = ProbingState::kSuspended; + return absl::nullopt; + } + } + PacedPacketInfo info = clusters_.front().pace_info; info.probe_cluster_bytes_sent = clusters_.front().sent_bytes; return info; @@ -152,15 +166,18 @@ PacedPacketInfo BitrateProber::CurrentCluster() const { // Probe size is recommended based on the probe bitrate required. We choose // a minimum of twice |kMinProbeDeltaMs| interval to allow scheduling to be // feasible. -size_t BitrateProber::RecommendedMinProbeSize() const { - RTC_DCHECK(!clusters_.empty()); - return clusters_.front().pace_info.send_bitrate_bps * 2 * - config_.min_probe_delta->ms() / (8 * 1000); +DataSize BitrateProber::RecommendedMinProbeSize() const { + if (clusters_.empty()) { + return DataSize::Zero(); + } + DataRate send_rate = + DataRate::BitsPerSec(clusters_.front().pace_info.send_bitrate_bps); + return 2 * send_rate * config_.min_probe_delta; } -void BitrateProber::ProbeSent(Timestamp now, size_t bytes) { +void BitrateProber::ProbeSent(Timestamp now, DataSize size) { RTC_DCHECK(probing_state_ == ProbingState::kActive); - RTC_DCHECK_GT(bytes, 0); + RTC_DCHECK(!size.IsZero()); if (!clusters_.empty()) { ProbeCluster* cluster = &clusters_.front(); @@ -168,7 +185,7 @@ void BitrateProber::ProbeSent(Timestamp now, size_t bytes) { RTC_DCHECK(cluster->started_at.IsInfinite()); cluster->started_at = now; } - cluster->sent_bytes += static_cast(bytes); + cluster->sent_bytes += size.bytes(); cluster->sent_probes += 1; next_probe_time_ = CalculateNextProbeTime(*cluster); if (cluster->sent_bytes >= cluster->pace_info.probe_cluster_min_bytes && @@ -182,8 +199,9 @@ void BitrateProber::ProbeSent(Timestamp now, size_t bytes) { clusters_.pop(); } - if (clusters_.empty()) + if (clusters_.empty()) { probing_state_ = ProbingState::kSuspended; + } } } @@ -194,8 +212,9 @@ Timestamp BitrateProber::CalculateNextProbeTime( // Compute the time delta from the cluster start to ensure probe bitrate stays // close to the target bitrate. Result is in milliseconds. - DataSize sent_bytes = DataSize::bytes(cluster.sent_bytes); - DataRate send_bitrate = DataRate::bps(cluster.pace_info.send_bitrate_bps); + DataSize sent_bytes = DataSize::Bytes(cluster.sent_bytes); + DataRate send_bitrate = + DataRate::BitsPerSec(cluster.pace_info.send_bitrate_bps); TimeDelta delta = sent_bytes / send_bitrate; return cluster.started_at + delta; } diff --git a/modules/pacing/bitrate_prober.h b/modules/pacing/bitrate_prober.h index ec234e8f5f..5a89aac435 100644 --- a/modules/pacing/bitrate_prober.h +++ b/modules/pacing/bitrate_prober.h @@ -35,9 +35,11 @@ struct BitrateProberConfig { FieldTrialParameter min_probe_delta; // The minimum probing duration. FieldTrialParameter min_probe_duration; - // Maximum amount of time each probe can be delayed. Probe cluster is reset - // and retried from the start when this limit is reached. + // Maximum amount of time each probe can be delayed. FieldTrialParameter max_probe_delay; + // If NextProbeTime() is called with a delay higher than specified by + // |max_probe_delay|, abort it. + FieldTrialParameter abort_delayed_probes; }; // Note that this class isn't thread-safe by itself and therefore relies @@ -52,34 +54,34 @@ class BitrateProber { // Returns true if the prober is in a probing session, i.e., it currently // wants packets to be sent out according to the time returned by // TimeUntilNextProbe(). - bool IsProbing() const; + bool is_probing() const { return probing_state_ == ProbingState::kActive; } // Initializes a new probing session if the prober is allowed to probe. Does // not initialize the prober unless the packet size is large enough to probe // with. - void OnIncomingPacket(size_t packet_size); + void OnIncomingPacket(DataSize packet_size); // Create a cluster used to probe for |bitrate_bps| with |num_probes| number // of probes. void CreateProbeCluster(DataRate bitrate, Timestamp now, int cluster_id); - // Returns the at which the next probe should be sent to get accurate probing. - // If probing is not desired at this time, Timestamp::PlusInfinity() will be - // returned. + // Returns the time at which the next probe should be sent to get accurate + // probing. If probing is not desired at this time, Timestamp::PlusInfinity() + // will be returned. + // TODO(bugs.webrtc.org/11780): Remove |now| argument when old mode is gone. Timestamp NextProbeTime(Timestamp now) const; // Information about the current probing cluster. - PacedPacketInfo CurrentCluster() const; + absl::optional CurrentCluster(Timestamp now); // Returns the minimum number of bytes that the prober recommends for - // the next probe. - size_t RecommendedMinProbeSize() const; + // the next probe, or zero if not probing. + DataSize RecommendedMinProbeSize() const; // Called to report to the prober that a probe has been sent. In case of // multiple packets per probe, this call would be made at the end of sending - // the last packet in probe. |probe_size| is the total size of all packets - // in probe. - void ProbeSent(Timestamp now, size_t probe_size); + // the last packet in probe. |size| is the total size of all packets in probe. + void ProbeSent(Timestamp now, DataSize size); private: enum class ProbingState { diff --git a/modules/pacing/bitrate_prober_unittest.cc b/modules/pacing/bitrate_prober_unittest.cc index 6f3624f4ab..5627db0519 100644 --- a/modules/pacing/bitrate_prober_unittest.cc +++ b/modules/pacing/bitrate_prober_unittest.cc @@ -12,6 +12,7 @@ #include +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { @@ -19,25 +20,25 @@ namespace webrtc { TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) { const FieldTrialBasedConfig config; BitrateProber prober(config); - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); - Timestamp now = Timestamp::ms(0); + Timestamp now = Timestamp::Millis(0); const Timestamp start_time = now; EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity()); - const DataRate kTestBitrate1 = DataRate::kbps(900); - const DataRate kTestBitrate2 = DataRate::kbps(1800); + const DataRate kTestBitrate1 = DataRate::KilobitsPerSec(900); + const DataRate kTestBitrate2 = DataRate::KilobitsPerSec(1800); const int kClusterSize = 5; - const int kProbeSize = 1000; - const TimeDelta kMinProbeDuration = TimeDelta::ms(15); + const DataSize kProbeSize = DataSize::Bytes(1000); + const TimeDelta kMinProbeDuration = TimeDelta::Millis(15); prober.CreateProbeCluster(kTestBitrate1, now, 0); prober.CreateProbeCluster(kTestBitrate2, now, 1); - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); prober.OnIncomingPacket(kProbeSize); - EXPECT_TRUE(prober.IsProbing()); - EXPECT_EQ(0, prober.CurrentCluster().probe_cluster_id); + EXPECT_TRUE(prober.is_probing()); + EXPECT_EQ(0, prober.CurrentCluster(now)->probe_cluster_id); // First packet should probe as soon as possible. EXPECT_EQ(Timestamp::MinusInfinity(), prober.NextProbeTime(now)); @@ -45,14 +46,13 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) { for (int i = 0; i < kClusterSize; ++i) { now = std::max(now, prober.NextProbeTime(now)); EXPECT_EQ(now, std::max(now, prober.NextProbeTime(now))); - EXPECT_EQ(0, prober.CurrentCluster().probe_cluster_id); + EXPECT_EQ(0, prober.CurrentCluster(now)->probe_cluster_id); prober.ProbeSent(now, kProbeSize); } EXPECT_GE(now - start_time, kMinProbeDuration); // Verify that the actual bitrate is withing 10% of the target. - DataRate bitrate = - DataSize::bytes(kProbeSize * (kClusterSize - 1)) / (now - start_time); + DataRate bitrate = kProbeSize * (kClusterSize - 1) / (now - start_time); EXPECT_GT(bitrate, kTestBitrate1 * 0.9); EXPECT_LT(bitrate, kTestBitrate1 * 1.1); @@ -62,65 +62,122 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) { for (int i = 0; i < kClusterSize; ++i) { now = std::max(now, prober.NextProbeTime(now)); EXPECT_EQ(now, std::max(now, prober.NextProbeTime(now))); - EXPECT_EQ(1, prober.CurrentCluster().probe_cluster_id); + EXPECT_EQ(1, prober.CurrentCluster(now)->probe_cluster_id); prober.ProbeSent(now, kProbeSize); } // Verify that the actual bitrate is withing 10% of the target. TimeDelta duration = now - probe2_started; EXPECT_GE(duration, kMinProbeDuration); - bitrate = DataSize::bytes(kProbeSize * (kClusterSize - 1)) / duration; + bitrate = (kProbeSize * (kClusterSize - 1)) / duration; EXPECT_GT(bitrate, kTestBitrate2 * 0.9); EXPECT_LT(bitrate, kTestBitrate2 * 1.1); EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity()); - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); } TEST(BitrateProberTest, DoesntProbeWithoutRecentPackets) { const FieldTrialBasedConfig config; BitrateProber prober(config); + const DataSize kProbeSize = DataSize::Bytes(1000); Timestamp now = Timestamp::Zero(); EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity()); - prober.CreateProbeCluster(DataRate::kbps(900), now, 0); - EXPECT_FALSE(prober.IsProbing()); + prober.CreateProbeCluster(DataRate::KilobitsPerSec(900), now, 0); + EXPECT_FALSE(prober.is_probing()); - prober.OnIncomingPacket(1000); - EXPECT_TRUE(prober.IsProbing()); + prober.OnIncomingPacket(kProbeSize); + EXPECT_TRUE(prober.is_probing()); EXPECT_EQ(now, std::max(now, prober.NextProbeTime(now))); - prober.ProbeSent(now, 1000); - // Let time pass, no large enough packets put into prober. - now += TimeDelta::seconds(6); + prober.ProbeSent(now, kProbeSize); +} + +TEST(BitrateProberTest, DoesntDiscardDelayedProbesInLegacyMode) { + const TimeDelta kMaxProbeDelay = TimeDelta::Millis(3); + const test::ExplicitKeyValueConfig trials( + "WebRTC-Bwe-ProbingBehavior/" + "abort_delayed_probes:0," + "max_probe_delay:3ms/"); + BitrateProber prober(trials); + const DataSize kProbeSize = DataSize::Bytes(1000); + + Timestamp now = Timestamp::Zero(); + prober.CreateProbeCluster(DataRate::KilobitsPerSec(900), now, 0); + prober.OnIncomingPacket(kProbeSize); + EXPECT_TRUE(prober.is_probing()); + EXPECT_EQ(prober.CurrentCluster(now)->probe_cluster_id, 0); + // Advance to first probe time and indicate sent probe. + now = std::max(now, prober.NextProbeTime(now)); + prober.ProbeSent(now, kProbeSize); + + // Advance time 1ms past timeout for the next probe. + Timestamp next_probe_time = prober.NextProbeTime(now); + EXPECT_GT(next_probe_time, now); + now += next_probe_time - now + kMaxProbeDelay + TimeDelta::Millis(1); + EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity()); // Check that legacy behaviour where prober is reset in TimeUntilNextProbe is // no longer there. Probes are no longer retried if they are timed out. - prober.OnIncomingPacket(1000); + prober.OnIncomingPacket(kProbeSize); EXPECT_EQ(prober.NextProbeTime(now), Timestamp::PlusInfinity()); } +TEST(BitrateProberTest, DiscardsDelayedProbesWhenNotInLegacyMode) { + const TimeDelta kMaxProbeDelay = TimeDelta::Millis(3); + const test::ExplicitKeyValueConfig trials( + "WebRTC-Bwe-ProbingBehavior/" + "abort_delayed_probes:1," + "max_probe_delay:3ms/"); + BitrateProber prober(trials); + const DataSize kProbeSize = DataSize::Bytes(1000); + + Timestamp now = Timestamp::Zero(); + + // Add two probe clusters. + prober.CreateProbeCluster(DataRate::KilobitsPerSec(900), now, /*id=*/0); + + prober.OnIncomingPacket(kProbeSize); + EXPECT_TRUE(prober.is_probing()); + EXPECT_EQ(prober.CurrentCluster(now)->probe_cluster_id, 0); + // Advance to first probe time and indicate sent probe. + now = std::max(now, prober.NextProbeTime(now)); + prober.ProbeSent(now, kProbeSize); + + // Advance time 1ms past timeout for the next probe. + Timestamp next_probe_time = prober.NextProbeTime(now); + EXPECT_GT(next_probe_time, now); + now += next_probe_time - now + kMaxProbeDelay + TimeDelta::Millis(1); + + // Still indicates the time we wanted to probe at. + EXPECT_EQ(prober.NextProbeTime(now), next_probe_time); + // First and only cluster removed due to timeout. + EXPECT_FALSE(prober.CurrentCluster(now).has_value()); +} + TEST(BitrateProberTest, DoesntInitializeProbingForSmallPackets) { const FieldTrialBasedConfig config; BitrateProber prober(config); prober.SetEnabled(true); - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); - prober.OnIncomingPacket(100); - EXPECT_FALSE(prober.IsProbing()); + prober.OnIncomingPacket(DataSize::Bytes(100)); + EXPECT_FALSE(prober.is_probing()); } TEST(BitrateProberTest, VerifyProbeSizeOnHighBitrate) { const FieldTrialBasedConfig config; BitrateProber prober(config); - const DataRate kHighBitrate = DataRate::kbps(10000); // 10 Mbps + const DataRate kHighBitrate = DataRate::KilobitsPerSec(10000); // 10 Mbps - prober.CreateProbeCluster(kHighBitrate, Timestamp::ms(0), /*cluster_id=*/0); + prober.CreateProbeCluster(kHighBitrate, Timestamp::Millis(0), + /*cluster_id=*/0); // Probe size should ensure a minimum of 1 ms interval. EXPECT_GT(prober.RecommendedMinProbeSize(), - (kHighBitrate * TimeDelta::ms(1)).bytes()); + kHighBitrate * TimeDelta::Millis(1)); } TEST(BitrateProberTest, MinumumNumberOfProbingPackets) { @@ -128,88 +185,88 @@ TEST(BitrateProberTest, MinumumNumberOfProbingPackets) { BitrateProber prober(config); // Even when probing at a low bitrate we expect a minimum number // of packets to be sent. - const DataRate kBitrate = DataRate::kbps(100); - const int kPacketSizeBytes = 1000; + const DataRate kBitrate = DataRate::KilobitsPerSec(100); + const DataSize kPacketSize = DataSize::Bytes(1000); - Timestamp now = Timestamp::ms(0); + Timestamp now = Timestamp::Millis(0); prober.CreateProbeCluster(kBitrate, now, 0); - prober.OnIncomingPacket(kPacketSizeBytes); + prober.OnIncomingPacket(kPacketSize); for (int i = 0; i < 5; ++i) { - EXPECT_TRUE(prober.IsProbing()); - prober.ProbeSent(now, kPacketSizeBytes); + EXPECT_TRUE(prober.is_probing()); + prober.ProbeSent(now, kPacketSize); } - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); } TEST(BitrateProberTest, ScaleBytesUsedForProbing) { const FieldTrialBasedConfig config; BitrateProber prober(config); - const DataRate kBitrate = DataRate::kbps(10000); // 10 Mbps. - const int kPacketSizeBytes = 1000; - const int kExpectedBytesSent = (kBitrate * TimeDelta::ms(15)).bytes(); + const DataRate kBitrate = DataRate::KilobitsPerSec(10000); // 10 Mbps. + const DataSize kPacketSize = DataSize::Bytes(1000); + const DataSize kExpectedDataSent = kBitrate * TimeDelta::Millis(15); - Timestamp now = Timestamp::ms(0); + Timestamp now = Timestamp::Millis(0); prober.CreateProbeCluster(kBitrate, now, /*cluster_id=*/0); - prober.OnIncomingPacket(kPacketSizeBytes); - int bytes_sent = 0; - while (bytes_sent < kExpectedBytesSent) { - ASSERT_TRUE(prober.IsProbing()); - prober.ProbeSent(now, kPacketSizeBytes); - bytes_sent += kPacketSizeBytes; + prober.OnIncomingPacket(kPacketSize); + DataSize data_sent = DataSize::Zero(); + while (data_sent < kExpectedDataSent) { + ASSERT_TRUE(prober.is_probing()); + prober.ProbeSent(now, kPacketSize); + data_sent += kPacketSize; } - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); } TEST(BitrateProberTest, HighBitrateProbing) { const FieldTrialBasedConfig config; BitrateProber prober(config); - const DataRate kBitrate = DataRate::kbps(1000000); // 1 Gbps. - const int kPacketSizeBytes = 1000; - const int kExpectedBytesSent = (kBitrate * TimeDelta::ms(15)).bytes(); + const DataRate kBitrate = DataRate::KilobitsPerSec(1000000); // 1 Gbps. + const DataSize kPacketSize = DataSize::Bytes(1000); + const DataSize kExpectedDataSent = kBitrate * TimeDelta::Millis(15); - Timestamp now = Timestamp::ms(0); + Timestamp now = Timestamp::Millis(0); prober.CreateProbeCluster(kBitrate, now, 0); - prober.OnIncomingPacket(kPacketSizeBytes); - int bytes_sent = 0; - while (bytes_sent < kExpectedBytesSent) { - ASSERT_TRUE(prober.IsProbing()); - prober.ProbeSent(now, kPacketSizeBytes); - bytes_sent += kPacketSizeBytes; + prober.OnIncomingPacket(kPacketSize); + DataSize data_sent = DataSize::Zero(); + while (data_sent < kExpectedDataSent) { + ASSERT_TRUE(prober.is_probing()); + prober.ProbeSent(now, kPacketSize); + data_sent += kPacketSize; } - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); } TEST(BitrateProberTest, ProbeClusterTimeout) { const FieldTrialBasedConfig config; BitrateProber prober(config); - const DataRate kBitrate = DataRate::kbps(300); - const int kSmallPacketSize = 20; + const DataRate kBitrate = DataRate::KilobitsPerSec(300); + const DataSize kSmallPacketSize = DataSize::Bytes(20); // Expecting two probe clusters of 5 packets each. - const int kExpectedBytesSent = 20 * 2 * 5; - const TimeDelta kTimeout = TimeDelta::ms(5000); + const DataSize kExpectedDataSent = kSmallPacketSize * 2 * 5; + const TimeDelta kTimeout = TimeDelta::Millis(5000); - Timestamp now = Timestamp::ms(0); + Timestamp now = Timestamp::Millis(0); prober.CreateProbeCluster(kBitrate, now, /*cluster_id=*/0); prober.OnIncomingPacket(kSmallPacketSize); - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); now += kTimeout; prober.CreateProbeCluster(kBitrate / 10, now, /*cluster_id=*/1); prober.OnIncomingPacket(kSmallPacketSize); - EXPECT_FALSE(prober.IsProbing()); - now += TimeDelta::ms(1); + EXPECT_FALSE(prober.is_probing()); + now += TimeDelta::Millis(1); prober.CreateProbeCluster(kBitrate / 10, now, /*cluster_id=*/2); prober.OnIncomingPacket(kSmallPacketSize); - EXPECT_TRUE(prober.IsProbing()); - int bytes_sent = 0; - while (bytes_sent < kExpectedBytesSent) { - ASSERT_TRUE(prober.IsProbing()); + EXPECT_TRUE(prober.is_probing()); + DataSize data_sent = DataSize::Zero(); + while (data_sent < kExpectedDataSent) { + ASSERT_TRUE(prober.is_probing()); prober.ProbeSent(now, kSmallPacketSize); - bytes_sent += kSmallPacketSize; + data_sent += kSmallPacketSize; } - EXPECT_FALSE(prober.IsProbing()); + EXPECT_FALSE(prober.is_probing()); } } // namespace webrtc diff --git a/modules/pacing/paced_sender.cc b/modules/pacing/paced_sender.cc index 3646952728..a0e76761e7 100644 --- a/modules/pacing/paced_sender.cc +++ b/modules/pacing/paced_sender.cc @@ -15,12 +15,14 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/match.h" #include "api/rtc_event_log/rtc_event_log.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -32,18 +34,18 @@ PacedSender::PacedSender(Clock* clock, RtcEventLog* event_log, const WebRtcKeyValueConfig* field_trials, ProcessThread* process_thread) - : process_mode_((field_trials != nullptr && - field_trials->Lookup("WebRTC-Pacer-DynamicProcess") - .find("Enabled") == 0) - ? PacingController::ProcessMode::kDynamic - : PacingController::ProcessMode::kPeriodic), + : process_mode_( + (field_trials != nullptr && + absl::StartsWith(field_trials->Lookup("WebRTC-Pacer-DynamicProcess"), + "Enabled")) + ? PacingController::ProcessMode::kDynamic + : PacingController::ProcessMode::kPeriodic), pacing_controller_(clock, - static_cast(this), + packet_router, event_log, field_trials, process_mode_), clock_(clock), - packet_router_(packet_router), process_thread_(process_thread) { if (process_thread_) process_thread_->RegisterModule(&module_proxy_, RTC_FROM_HERE); @@ -113,8 +115,15 @@ void PacedSender::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) { void PacedSender::EnqueuePackets( std::vector> packets) { { + TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "PacedSender::EnqueuePackets"); rtc::CritScope cs(&critsect_); for (auto& packet : packets) { + TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "PacedSender::EnqueuePackets::Loop", "sequence_number", + packet->SequenceNumber(), "rtp_timestamp", + packet->Timestamp()); + pacing_controller_.EnqueuePacket(std::move(packet)); } } @@ -195,19 +204,4 @@ void PacedSender::SetQueueTimeLimit(TimeDelta limit) { MaybeWakupProcessThread(); } -void PacedSender::SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { - critsect_.Leave(); - packet_router_->SendPacket(std::move(packet), cluster_info); - critsect_.Enter(); -} - -std::vector> PacedSender::GeneratePadding( - DataSize size) { - std::vector> padding_packets; - critsect_.Leave(); - padding_packets = packet_router_->GeneratePadding(size.bytes()); - critsect_.Enter(); - return padding_packets; -} } // namespace webrtc diff --git a/modules/pacing/paced_sender.h b/modules/pacing/paced_sender.h index 16137dfcd6..d255efdc3b 100644 --- a/modules/pacing/paced_sender.h +++ b/modules/pacing/paced_sender.h @@ -32,7 +32,7 @@ #include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/utility/include/process_thread.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -43,8 +43,7 @@ class RtcEventLog; // updating dependencies. class PacedSender : public Module, public RtpPacketPacer, - public RtpPacketSender, - private PacingController::PacketSender { + public RtpPacketSender { public: // Expected max pacer delay in ms. If ExpectedQueueTime() is higher than // this value, the packet producers should wait (eg drop frames rather than @@ -140,14 +139,6 @@ class PacedSender : public Module, // In dynamic process mode, refreshes the next process time. void MaybeWakupProcessThread(); - // Methods implementing PacedSenderController:PacketSender. - void SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info) override - RTC_EXCLUSIVE_LOCKS_REQUIRED(critsect_); - - std::vector> GeneratePadding( - DataSize size) override RTC_EXCLUSIVE_LOCKS_REQUIRED(critsect_); - // Private implementation of Module to not expose those implementation details // publicly and control when the class is registered/deregistered. class ModuleProxy : public Module { @@ -166,12 +157,11 @@ class PacedSender : public Module, PacedSender* const delegate_; } module_proxy_{this}; - rtc::CriticalSection critsect_; + rtc::RecursiveCriticalSection critsect_; const PacingController::ProcessMode process_mode_; PacingController pacing_controller_ RTC_GUARDED_BY(critsect_); Clock* const clock_; - PacketRouter* const packet_router_; ProcessThread* const process_thread_; }; } // namespace webrtc diff --git a/modules/pacing/paced_sender_unittest.cc b/modules/pacing/paced_sender_unittest.cc index 7d1b4cb92b..53cc1c42ed 100644 --- a/modules/pacing/paced_sender_unittest.cc +++ b/modules/pacing/paced_sender_unittest.cc @@ -39,12 +39,15 @@ constexpr size_t kDefaultPacketSize = 234; // Mock callback implementing the raw api. class MockCallback : public PacketRouter { public: - MOCK_METHOD2(SendPacket, - void(std::unique_ptr packet, - const PacedPacketInfo& cluster_info)); - MOCK_METHOD1( - GeneratePadding, - std::vector>(size_t target_size_bytes)); + MOCK_METHOD(void, + SendPacket, + (std::unique_ptr packet, + const PacedPacketInfo& cluster_info), + (override)); + MOCK_METHOD(std::vector>, + GeneratePadding, + (DataSize target_size), + (override)); }; class ProcessModeTrials : public WebRtcKeyValueConfig { @@ -86,21 +89,21 @@ class PacedSenderTest } protected: - std::unique_ptr BuildRtpPacket(RtpPacketToSend::Type type) { + std::unique_ptr BuildRtpPacket(RtpPacketMediaType type) { auto packet = std::make_unique(nullptr); packet->set_packet_type(type); switch (type) { - case RtpPacketToSend::Type::kAudio: + case RtpPacketMediaType::kAudio: packet->SetSsrc(kAudioSsrc); break; - case RtpPacketToSend::Type::kVideo: + case RtpPacketMediaType::kVideo: packet->SetSsrc(kVideoSsrc); break; - case RtpPacketToSend::Type::kRetransmission: - case RtpPacketToSend::Type::kPadding: + case RtpPacketMediaType::kRetransmission: + case RtpPacketMediaType::kPadding: packet->SetSsrc(kVideoRtxSsrc); break; - case RtpPacketToSend::Type::kForwardErrorCorrection: + case RtpPacketMediaType::kForwardErrorCorrection: packet->SetSsrc(kFlexFecSsrc); break; } @@ -120,11 +123,12 @@ class PacedSenderTest TEST_P(PacedSenderTest, PacesPackets) { // Insert a number of packets, covering one second. static constexpr size_t kPacketsToSend = 42; - pacer_->SetPacingRates(DataRate::bps(kDefaultPacketSize * 8 * kPacketsToSend), - DataRate::Zero()); + pacer_->SetPacingRates( + DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend), + DataRate::Zero()); std::vector> packets; for (size_t i = 0; i < kPacketsToSend; ++i) { - packets.emplace_back(BuildRtpPacket(RtpPacketToSend::Type::kVideo)); + packets.emplace_back(BuildRtpPacket(RtpPacketMediaType::kVideo)); } pacer_->EnqueuePackets(std::move(packets)); @@ -145,7 +149,7 @@ TEST_P(PacedSenderTest, PacesPackets) { // Packets should be sent over a period of close to 1s. Expect a little lower // than this since initial probing is a bit quicker. TimeDelta duration = clock_.CurrentTime() - start_time; - EXPECT_GT(duration, TimeDelta::ms(900)); + EXPECT_GT(duration, TimeDelta::Millis(900)); } INSTANTIATE_TEST_SUITE_P( diff --git a/modules/pacing/pacing_controller.cc b/modules/pacing/pacing_controller.cc index 1f3849e8e9..5ffbc903b3 100644 --- a/modules/pacing/pacing_controller.cc +++ b/modules/pacing/pacing_controller.cc @@ -15,10 +15,12 @@ #include #include +#include "absl/strings/match.h" #include "modules/pacing/bitrate_prober.h" #include "modules/pacing/interval_budget.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" @@ -26,60 +28,73 @@ namespace webrtc { namespace { // Time limit in milliseconds between packet bursts. -constexpr TimeDelta kDefaultMinPacketLimit = TimeDelta::Millis<5>(); -constexpr TimeDelta kCongestedPacketInterval = TimeDelta::Millis<500>(); +constexpr TimeDelta kDefaultMinPacketLimit = TimeDelta::Millis(5); +constexpr TimeDelta kCongestedPacketInterval = TimeDelta::Millis(500); // TODO(sprang): Consider dropping this limit. // The maximum debt level, in terms of time, capped when sending packets. -constexpr TimeDelta kMaxDebtInTime = TimeDelta::Millis<500>(); -constexpr TimeDelta kMaxElapsedTime = TimeDelta::Seconds<2>(); -constexpr DataSize kDefaultPaddingTarget = DataSize::Bytes<50>(); +constexpr TimeDelta kMaxDebtInTime = TimeDelta::Millis(500); +constexpr TimeDelta kMaxElapsedTime = TimeDelta::Seconds(2); // Upper cap on process interval, in case process has not been called in a long -// time. -constexpr TimeDelta kMaxProcessingInterval = TimeDelta::Millis<30>(); +// time. Applies only to periodic mode. +constexpr TimeDelta kMaxProcessingInterval = TimeDelta::Millis(30); + +// Allow probes to be processed slightly ahead of inteded send time. Currently +// set to 1ms as this is intended to allow times be rounded down to the nearest +// millisecond. +constexpr TimeDelta kMaxEarlyProbeProcessing = TimeDelta::Millis(1); constexpr int kFirstPriority = 0; bool IsDisabled(const WebRtcKeyValueConfig& field_trials, absl::string_view key) { - return field_trials.Lookup(key).find("Disabled") == 0; + return absl::StartsWith(field_trials.Lookup(key), "Disabled"); } bool IsEnabled(const WebRtcKeyValueConfig& field_trials, absl::string_view key) { - return field_trials.Lookup(key).find("Enabled") == 0; + return absl::StartsWith(field_trials.Lookup(key), "Enabled"); +} + +TimeDelta GetDynamicPaddingTarget(const WebRtcKeyValueConfig& field_trials) { + FieldTrialParameter padding_target("timedelta", + TimeDelta::Millis(5)); + ParseFieldTrial({&padding_target}, + field_trials.Lookup("WebRTC-Pacer-DynamicPaddingTarget")); + return padding_target.Get(); } -int GetPriorityForType(RtpPacketToSend::Type type) { +int GetPriorityForType(RtpPacketMediaType type) { // Lower number takes priority over higher. switch (type) { - case RtpPacketToSend::Type::kAudio: + case RtpPacketMediaType::kAudio: // Audio is always prioritized over other packet types. return kFirstPriority + 1; - case RtpPacketToSend::Type::kRetransmission: + case RtpPacketMediaType::kRetransmission: // Send retransmissions before new media. return kFirstPriority + 2; - case RtpPacketToSend::Type::kVideo: - case RtpPacketToSend::Type::kForwardErrorCorrection: + case RtpPacketMediaType::kVideo: + case RtpPacketMediaType::kForwardErrorCorrection: // Video has "normal" priority, in the old speak. // Send redundancy concurrently to video. If it is delayed it might have a // lower chance of being useful. return kFirstPriority + 3; - case RtpPacketToSend::Type::kPadding: + case RtpPacketMediaType::kPadding: // Packets that are in themselves likely useless, only sent to keep the // BWE high. return kFirstPriority + 4; } + RTC_CHECK_NOTREACHED(); } } // namespace const TimeDelta PacingController::kMaxExpectedQueueLength = - TimeDelta::Millis<2000>(); + TimeDelta::Millis(2000); const float PacingController::kDefaultPaceMultiplier = 2.5f; const TimeDelta PacingController::kPausedProcessInterval = kCongestedPacketInterval; -const TimeDelta PacingController::kMinSleepTime = TimeDelta::Millis<1>(); +const TimeDelta PacingController::kMinSleepTime = TimeDelta::Millis(1); PacingController::PacingController(Clock* clock, PacketSender* packet_sender, @@ -98,9 +113,10 @@ PacingController::PacingController(Clock* clock, IsEnabled(*field_trials_, "WebRTC-Pacer-PadInSilence")), pace_audio_(IsEnabled(*field_trials_, "WebRTC-Pacer-BlockAudio")), small_first_probe_packet_( - IsEnabled(*field_trials_, "WebRTC-Pacer-SmallFirstProbePacket")), + !IsDisabled(*field_trials_, "WebRTC-Pacer-SmallFirstProbePacket")), ignore_transport_overhead_( IsEnabled(*field_trials_, "WebRTC-Pacer-IgnoreTransportOverhead")), + padding_target_duration_(GetDynamicPaddingTarget(*field_trials_)), min_packet_limit_(kDefaultMinPacketLimit), transport_overhead_per_packet_(DataSize::Zero()), last_timestamp_(clock_->CurrentTime()), @@ -130,7 +146,7 @@ PacingController::PacingController(Clock* clock, FieldTrialParameter min_packet_limit_ms("", min_packet_limit_.ms()); ParseFieldTrial({&min_packet_limit_ms}, field_trials_->Lookup("WebRTC-Pacer-MinPacketLimitMs")); - min_packet_limit_ = TimeDelta::ms(min_packet_limit_ms.Get()); + min_packet_limit_ = TimeDelta::Millis(min_packet_limit_ms.Get()); UpdateBudgetWithElapsedTime(min_packet_limit_); } @@ -183,6 +199,10 @@ bool PacingController::Congested() const { return false; } +bool PacingController::IsProbing() const { + return prober_.is_probing(); +} + Timestamp PacingController::CurrentTime() const { Timestamp time = clock_->CurrentTime(); if (time < last_timestamp_) { @@ -242,7 +262,7 @@ void PacingController::SetTransportOverhead(DataSize overhead_per_packet) { TimeDelta PacingController::ExpectedQueueTime() const { RTC_DCHECK_GT(pacing_bitrate_, DataRate::Zero()); - return TimeDelta::ms( + return TimeDelta::Millis( (QueueSizeData().bytes() * 8 * rtc::kNumMillisecsPerSec) / pacing_bitrate_.bps()); } @@ -275,7 +295,7 @@ TimeDelta PacingController::OldestPacketWaitTime() const { void PacingController::EnqueuePacketInternal( std::unique_ptr packet, int priority) { - prober_.OnIncomingPacket(packet->payload_size()); + prober_.OnIncomingPacket(DataSize::Bytes(packet->payload_size())); // TODO(sprang): Make sure tests respect this, replace with DCHECK. Timestamp now = CurrentTime(); @@ -284,14 +304,17 @@ void PacingController::EnqueuePacketInternal( } if (mode_ == ProcessMode::kDynamic && packet_queue_.Empty() && - media_debt_ == DataSize::Zero()) { - last_process_time_ = CurrentTime(); + NextSendTime() <= now) { + TimeDelta elapsed_time = UpdateTimeAndGetElapsed(now); + UpdateBudgetWithElapsedTime(elapsed_time); } packet_queue_.Push(priority, now, packet_counter_++, std::move(packet)); } TimeDelta PacingController::UpdateTimeAndGetElapsed(Timestamp now) { - if (last_process_time_.IsMinusInfinity()) { + // If no previous processing, or last process was "in the future" because of + // early probe processing, then there is no elapsed time to add budget for. + if (last_process_time_.IsMinusInfinity() || now < last_process_time_) { return TimeDelta::Zero(); } RTC_DCHECK_GE(now, last_process_time_); @@ -320,14 +343,14 @@ bool PacingController::ShouldSendKeepalive(Timestamp now) const { } Timestamp PacingController::NextSendTime() const { - Timestamp now = CurrentTime(); + const Timestamp now = CurrentTime(); if (paused_) { return last_send_time_ + kPausedProcessInterval; } // If probing is active, that always takes priority. - if (prober_.IsProbing()) { + if (prober_.is_probing()) { Timestamp probe_time = prober_.NextProbeTime(now); // |probe_time| == PlusInfinity indicates no probe scheduled. if (probe_time != Timestamp::PlusInfinity() && !probing_send_failure_) { @@ -343,31 +366,35 @@ Timestamp PacingController::NextSendTime() const { // In dynamic mode, figure out when the next packet should be sent, // given the current conditions. - if (Congested() || packet_counter_ == 0) { - // If congested, we only send keep-alive or audio (if audio is - // configured in pass-through mode). - if (!pace_audio_ && packet_queue_.NextPacketIsAudio()) { - return now; + if (!pace_audio_) { + // Not pacing audio, if leading packet is audio its target send + // time is the time at which it was enqueued. + absl::optional audio_enqueue_time = + packet_queue_.LeadingAudioPacketEnqueueTime(); + if (audio_enqueue_time.has_value()) { + return *audio_enqueue_time; } + } + if (Congested() || packet_counter_ == 0) { // We need to at least send keep-alive packets with some interval. return last_send_time_ + kCongestedPacketInterval; } - // Check how long until media buffer has drained. We schedule a call - // for when the last packet in the queue drains as otherwise we may - // be late in starting padding. - if (media_rate_ > DataRate::Zero() && - (!packet_queue_.Empty() || !media_debt_.IsZero())) { + // Check how long until we can send the next media packet. + if (media_rate_ > DataRate::Zero() && !packet_queue_.Empty()) { return std::min(last_send_time_ + kPausedProcessInterval, last_process_time_ + media_debt_ / media_rate_); } // If we _don't_ have pending packets, check how long until we have - // bandwidth for padding packets. + // bandwidth for padding packets. Both media and padding debts must + // have been drained to do this. if (padding_rate_ > DataRate::Zero() && packet_queue_.Empty()) { + TimeDelta drain_time = + std::max(media_debt_ / media_rate_, padding_debt_ / padding_rate_); return std::min(last_send_time_ + kPausedProcessInterval, - last_process_time_ + padding_debt_ / padding_rate_); + last_process_time_ + drain_time); } if (send_padding_if_silent_) { @@ -381,10 +408,15 @@ void PacingController::ProcessPackets() { Timestamp target_send_time = now; if (mode_ == ProcessMode::kDynamic) { target_send_time = NextSendTime(); + TimeDelta early_execute_margin = + prober_.is_probing() ? kMaxEarlyProbeProcessing : TimeDelta::Zero(); if (target_send_time.IsMinusInfinity()) { target_send_time = now; - } else if (now < target_send_time) { - // We are too early, abort and regroup! + } else if (now < target_send_time - early_execute_margin) { + // We are too early, but if queue is empty still allow draining some debt. + // Probing is allowed to be sent up to kMinSleepTime early. + TimeDelta elapsed_time = UpdateTimeAndGetElapsed(now); + UpdateBudgetWithElapsedTime(elapsed_time); return; } @@ -415,11 +447,14 @@ void PacingController::ProcessPackets() { } else { DataSize keepalive_data_sent = DataSize::Zero(); std::vector> keepalive_packets = - packet_sender_->GeneratePadding(DataSize::bytes(1)); + packet_sender_->GeneratePadding(DataSize::Bytes(1)); for (auto& packet : keepalive_packets) { keepalive_data_sent += - DataSize::bytes(packet->payload_size() + packet->padding_size()); - packet_sender_->SendRtpPacket(std::move(packet), PacedPacketInfo()); + DataSize::Bytes(packet->payload_size() + packet->padding_size()); + packet_sender_->SendPacket(std::move(packet), PacedPacketInfo()); + for (auto& packet : packet_sender_->FetchFec()) { + EnqueuePacket(std::move(packet)); + } } OnPaddingSent(keepalive_data_sent); } @@ -439,7 +474,7 @@ void PacingController::ProcessPackets() { packet_queue_.UpdateQueueTime(now); if (drain_large_queues_) { TimeDelta avg_time_left = - std::max(TimeDelta::ms(1), + std::max(TimeDelta::Millis(1), queue_time_limit - packet_queue_.AverageQueueTime()); DataRate min_rate_needed = queue_size_data / avg_time_left; if (min_rate_needed > target_rate) { @@ -462,13 +497,21 @@ void PacingController::ProcessPackets() { } bool first_packet_in_probe = false; - bool is_probing = prober_.IsProbing(); PacedPacketInfo pacing_info; - absl::optional recommended_probe_size; + DataSize recommended_probe_size = DataSize::Zero(); + bool is_probing = prober_.is_probing(); if (is_probing) { - pacing_info = prober_.CurrentCluster(); - first_packet_in_probe = pacing_info.probe_cluster_bytes_sent == 0; - recommended_probe_size = DataSize::bytes(prober_.RecommendedMinProbeSize()); + // Probe timing is sensitive, and handled explicitly by BitrateProber, so + // use actual send time rather than target. + pacing_info = prober_.CurrentCluster(now).value_or(PacedPacketInfo()); + if (pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) { + first_packet_in_probe = pacing_info.probe_cluster_bytes_sent == 0; + recommended_probe_size = prober_.RecommendedMinProbeSize(); + RTC_DCHECK_GT(recommended_probe_size, DataSize::Zero()); + } else { + // No valid probe cluster returned, probe might have timed out. + is_probing = false; + } } DataSize data_sent = DataSize::Zero(); @@ -479,7 +522,7 @@ void PacingController::ProcessPackets() { if (small_first_probe_packet_ && first_packet_in_probe) { // If first packet in probe, insert a small padding packet so we have a // more reliable start window for the rate estimation. - auto padding = packet_sender_->GeneratePadding(DataSize::bytes(1)); + auto padding = packet_sender_->GeneratePadding(DataSize::Bytes(1)); // If no RTP modules sending media are registered, we may not get a // padding packet back. if (!padding.empty()) { @@ -530,22 +573,29 @@ void PacingController::ProcessPackets() { RTC_DCHECK(rtp_packet); RTC_DCHECK(rtp_packet->packet_type().has_value()); - const RtpPacketToSend::Type packet_type = *rtp_packet->packet_type(); - DataSize packet_size = DataSize::bytes(rtp_packet->payload_size() + + const RtpPacketMediaType packet_type = *rtp_packet->packet_type(); + DataSize packet_size = DataSize::Bytes(rtp_packet->payload_size() + rtp_packet->padding_size()); if (include_overhead_) { - packet_size += DataSize::bytes(rtp_packet->headers_size()) + + packet_size += DataSize::Bytes(rtp_packet->headers_size()) + transport_overhead_per_packet_; } - packet_sender_->SendRtpPacket(std::move(rtp_packet), pacing_info); + packet_sender_->SendPacket(std::move(rtp_packet), pacing_info); + for (auto& packet : packet_sender_->FetchFec()) { + EnqueuePacket(std::move(packet)); + } data_sent += packet_size; // Send done, update send/process time to the target send time. OnPacketSent(packet_type, packet_size, target_send_time); - if (recommended_probe_size && data_sent > *recommended_probe_size) + + // If we are currently probing, we need to stop the send loop when we have + // reached the send target. + if (is_probing && data_sent >= recommended_probe_size) { break; + } if (mode_ == ProcessMode::kDynamic) { // Update target send time in case that are more packets that we are late @@ -559,17 +609,18 @@ void PacingController::ProcessPackets() { } } + last_process_time_ = std::max(last_process_time_, previous_process_time); + if (is_probing) { probing_send_failure_ = data_sent == DataSize::Zero(); if (!probing_send_failure_) { - prober_.ProbeSent(CurrentTime(), data_sent.bytes()); + prober_.ProbeSent(CurrentTime(), data_sent); } } } -DataSize PacingController::PaddingToAdd( - absl::optional recommended_probe_size, - DataSize data_sent) const { +DataSize PacingController::PaddingToAdd(DataSize recommended_probe_size, + DataSize data_sent) const { if (!packet_queue_.Empty()) { // Actual payload available, no need to add padding. return DataSize::Zero(); @@ -586,18 +637,18 @@ DataSize PacingController::PaddingToAdd( return DataSize::Zero(); } - if (recommended_probe_size) { - if (*recommended_probe_size > data_sent) { - return *recommended_probe_size - data_sent; + if (!recommended_probe_size.IsZero()) { + if (recommended_probe_size > data_sent) { + return recommended_probe_size - data_sent; } return DataSize::Zero(); } if (mode_ == ProcessMode::kPeriodic) { - return DataSize::bytes(padding_budget_.bytes_remaining()); + return DataSize::Bytes(padding_budget_.bytes_remaining()); } else if (padding_rate_ > DataRate::Zero() && padding_debt_ == DataSize::Zero()) { - return kDefaultPaddingTarget; + return padding_target_duration_ * padding_rate_; } return DataSize::Zero(); } @@ -613,7 +664,8 @@ std::unique_ptr PacingController::GetPendingPacket( // First, check if there is any reason _not_ to send the next queued packet. // Unpaced audio packets and probes are exempted from send checks. - bool unpaced_audio_packet = !pace_audio_ && packet_queue_.NextPacketIsAudio(); + bool unpaced_audio_packet = + !pace_audio_ && packet_queue_.LeadingAudioPacketEnqueueTime().has_value(); bool is_probe = pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe; if (!unpaced_audio_packet && !is_probe) { if (Congested()) { @@ -643,13 +695,13 @@ std::unique_ptr PacingController::GetPendingPacket( return packet_queue_.Pop(); } -void PacingController::OnPacketSent(RtpPacketToSend::Type packet_type, +void PacingController::OnPacketSent(RtpPacketMediaType packet_type, DataSize packet_size, Timestamp send_time) { if (!first_sent_packet_time_) { first_sent_packet_time_ = send_time; } - bool audio_packet = packet_type == RtpPacketToSend::Type::kAudio; + bool audio_packet = packet_type == RtpPacketMediaType::kAudio; if (!audio_packet || account_for_audio_) { // Update media bytes sent. UpdateBudgetWithSentData(packet_size); @@ -662,8 +714,9 @@ void PacingController::OnPaddingSent(DataSize data_sent) { if (data_sent > DataSize::Zero()) { UpdateBudgetWithSentData(data_sent); } - last_send_time_ = CurrentTime(); - last_process_time_ = CurrentTime(); + Timestamp now = CurrentTime(); + last_send_time_ = now; + last_process_time_ = now; } void PacingController::UpdateBudgetWithElapsedTime(TimeDelta delta) { diff --git a/modules/pacing/pacing_controller.h b/modules/pacing/pacing_controller.h index c1b3942dfa..6e0f9bd5b2 100644 --- a/modules/pacing/pacing_controller.h +++ b/modules/pacing/pacing_controller.h @@ -29,8 +29,8 @@ #include "modules/pacing/round_robin_packet_queue.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/critical_section.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/thread_annotations.h" @@ -54,8 +54,10 @@ class PacingController { class PacketSender { public: virtual ~PacketSender() = default; - virtual void SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info) = 0; + virtual void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& cluster_info) = 0; + // Should be called after each call to SendPacket(). + virtual std::vector> FetchFec() = 0; virtual std::vector> GeneratePadding( DataSize size) = 0; }; @@ -145,6 +147,8 @@ class PacingController { bool Congested() const; + bool IsProbing() const; + private: void EnqueuePacketInternal(std::unique_ptr packet, int priority); @@ -155,14 +159,14 @@ class PacingController { void UpdateBudgetWithElapsedTime(TimeDelta delta); void UpdateBudgetWithSentData(DataSize size); - DataSize PaddingToAdd(absl::optional recommended_probe_size, + DataSize PaddingToAdd(DataSize recommended_probe_size, DataSize data_sent) const; std::unique_ptr GetPendingPacket( const PacedPacketInfo& pacing_info, Timestamp target_send_time, Timestamp now); - void OnPacketSent(RtpPacketToSend::Type packet_type, + void OnPacketSent(RtpPacketMediaType packet_type, DataSize packet_size, Timestamp send_time); void OnPaddingSent(DataSize padding_sent); @@ -180,6 +184,9 @@ class PacingController { const bool pace_audio_; const bool small_first_probe_packet_; const bool ignore_transport_overhead_; + // In dynamic mode, indicates the target size when requesting padding, + // expressed as a duration in order to adjust for varying padding rate. + const TimeDelta padding_target_duration_; TimeDelta min_packet_limit_; diff --git a/modules/pacing/pacing_controller_unittest.cc b/modules/pacing/pacing_controller_unittest.cc index 2e4e564b7e..a953d5b439 100644 --- a/modules/pacing/pacing_controller_unittest.cc +++ b/modules/pacing/pacing_controller_unittest.cc @@ -20,6 +20,7 @@ #include "api/units/data_rate.h" #include "modules/pacing/packet_router.h" #include "system_wrappers/include/clock.h" +#include "test/explicit_key_value_config.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -33,13 +34,13 @@ using ::testing::Return; namespace webrtc { namespace test { namespace { -constexpr DataRate kFirstClusterRate = DataRate::KilobitsPerSec<900>(); -constexpr DataRate kSecondClusterRate = DataRate::KilobitsPerSec<1800>(); +constexpr DataRate kFirstClusterRate = DataRate::KilobitsPerSec(900); +constexpr DataRate kSecondClusterRate = DataRate::KilobitsPerSec(1800); // The error stems from truncating the time interval of probe packets to integer // values. This results in probing slightly higher than the target bitrate. // For 1.8 Mbps, this comes to be about 120 kbps with 1200 probe packets. -constexpr DataRate kProbingErrorMargin = DataRate::KilobitsPerSec<150>(); +constexpr DataRate kProbingErrorMargin = DataRate::KilobitsPerSec(150); const float kPaceMultiplier = 2.5f; @@ -48,9 +49,9 @@ constexpr uint32_t kVideoSsrc = 234565; constexpr uint32_t kVideoRtxSsrc = 34567; constexpr uint32_t kFlexFecSsrc = 45678; -constexpr DataRate kTargetRate = DataRate::KilobitsPerSec<800>(); +constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(800); -std::unique_ptr BuildPacket(RtpPacketToSend::Type type, +std::unique_ptr BuildPacket(RtpPacketMediaType type, uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, @@ -69,12 +70,12 @@ std::unique_ptr BuildPacket(RtpPacketToSend::Type type, // methods that focus on core aspects. class MockPacingControllerCallback : public PacingController::PacketSender { public: - void SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info) override { + void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& cluster_info) override { SendPacket(packet->Ssrc(), packet->SequenceNumber(), packet->capture_time_ms(), - packet->packet_type() == RtpPacketToSend::Type::kRetransmission, - packet->packet_type() == RtpPacketToSend::Type::kPadding); + packet->packet_type() == RtpPacketMediaType::kRetransmission, + packet->packet_type() == RtpPacketMediaType::kPadding); } std::vector> GeneratePadding( @@ -84,30 +85,43 @@ class MockPacingControllerCallback : public PacingController::PacketSender { if (padding_size > 0) { auto packet = std::make_unique(nullptr); packet->SetPayloadSize(padding_size); - packet->set_packet_type(RtpPacketToSend::Type::kPadding); + packet->set_packet_type(RtpPacketMediaType::kPadding); ret.emplace_back(std::move(packet)); } return ret; } - MOCK_METHOD5(SendPacket, - void(uint32_t ssrc, - uint16_t sequence_number, - int64_t capture_timestamp, - bool retransmission, - bool padding)); - MOCK_METHOD1(SendPadding, size_t(size_t target_size)); + MOCK_METHOD(void, + SendPacket, + (uint32_t ssrc, + uint16_t sequence_number, + int64_t capture_timestamp, + bool retransmission, + bool padding)); + MOCK_METHOD(std::vector>, + FetchFec, + (), + (override)); + MOCK_METHOD(size_t, SendPadding, (size_t target_size)); }; // Mock callback implementing the raw api. class MockPacketSender : public PacingController::PacketSender { public: - MOCK_METHOD2(SendRtpPacket, - void(std::unique_ptr packet, - const PacedPacketInfo& cluster_info)); - MOCK_METHOD1( - GeneratePadding, - std::vector>(DataSize target_size)); + MOCK_METHOD(void, + SendPacket, + (std::unique_ptr packet, + const PacedPacketInfo& cluster_info), + (override)); + MOCK_METHOD(std::vector>, + FetchFec, + (), + (override)); + + MOCK_METHOD(std::vector>, + GeneratePadding, + (DataSize target_size), + (override)); }; class PacingControllerPadding : public PacingController::PacketSender { @@ -116,11 +130,15 @@ class PacingControllerPadding : public PacingController::PacketSender { PacingControllerPadding() : padding_sent_(0), total_bytes_sent_(0) {} - void SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& pacing_info) override { + void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& pacing_info) override { total_bytes_sent_ += packet->payload_size(); } + std::vector> FetchFec() override { + return {}; + } + std::vector> GeneratePadding( DataSize target_size) override { size_t num_packets = @@ -129,7 +147,7 @@ class PacingControllerPadding : public PacingController::PacketSender { for (size_t i = 0; i < num_packets; ++i) { packets.emplace_back(std::make_unique(nullptr)); packets.back()->SetPadding(kPaddingPacketSize); - packets.back()->set_packet_type(RtpPacketToSend::Type::kPadding); + packets.back()->set_packet_type(RtpPacketMediaType::kPadding); padding_sent_ += kPaddingPacketSize; } return packets; @@ -147,25 +165,30 @@ class PacingControllerProbing : public PacingController::PacketSender { public: PacingControllerProbing() : packets_sent_(0), padding_sent_(0) {} - void SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& pacing_info) override { - if (packet->packet_type() != RtpPacketToSend::Type::kPadding) { + void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& pacing_info) override { + if (packet->packet_type() != RtpPacketMediaType::kPadding) { ++packets_sent_; } + last_pacing_info_ = pacing_info; + } + + std::vector> FetchFec() override { + return {}; } std::vector> GeneratePadding( DataSize target_size) override { // From RTPSender: // Max in the RFC 3550 is 255 bytes, we limit it to be modulus 32 for SRTP. - const DataSize kMaxPadding = DataSize::bytes(224); + const DataSize kMaxPadding = DataSize::Bytes(224); std::vector> packets; while (target_size > DataSize::Zero()) { DataSize padding_size = std::min(kMaxPadding, target_size); packets.emplace_back(std::make_unique(nullptr)); packets.back()->SetPadding(padding_size.bytes()); - packets.back()->set_packet_type(RtpPacketToSend::Type::kPadding); + packets.back()->set_packet_type(RtpPacketMediaType::kPadding); padding_sent_ += padding_size.bytes(); target_size -= padding_size; } @@ -173,18 +196,22 @@ class PacingControllerProbing : public PacingController::PacketSender { } int packets_sent() const { return packets_sent_; } - int padding_sent() const { return padding_sent_; } + int total_packets_sent() const { return packets_sent_ + padding_sent_; } + PacedPacketInfo last_pacing_info() const { return last_pacing_info_; } private: int packets_sent_; int padding_sent_; + PacedPacketInfo last_pacing_info_; }; class PacingControllerTest : public ::testing::TestWithParam { protected: - PacingControllerTest() : clock_(123456) { + PacingControllerTest() : clock_(123456) {} + + void SetUp() override { srand(0); // Need to initialize PacingController after we initialize clock. pacer_ = std::make_unique(&clock_, &callback_, nullptr, @@ -208,7 +235,7 @@ class PacingControllerTest clock_.AdvanceTime(TimeUntilNextProcess()); } - void Send(RtpPacketToSend::Type type, + void Send(RtpPacketMediaType type, uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, @@ -217,34 +244,33 @@ class PacingControllerTest BuildPacket(type, ssrc, sequence_number, capture_time_ms, size)); } - void SendAndExpectPacket(RtpPacketToSend::Type type, + void SendAndExpectPacket(RtpPacketMediaType type, uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms, size_t size) { Send(type, ssrc, sequence_number, capture_time_ms, size); - EXPECT_CALL( - callback_, - SendPacket(ssrc, sequence_number, capture_time_ms, - type == RtpPacketToSend::Type::kRetransmission, false)) + EXPECT_CALL(callback_, + SendPacket(ssrc, sequence_number, capture_time_ms, + type == RtpPacketMediaType::kRetransmission, false)) .Times(1); } - std::unique_ptr BuildRtpPacket(RtpPacketToSend::Type type) { + std::unique_ptr BuildRtpPacket(RtpPacketMediaType type) { auto packet = std::make_unique(nullptr); packet->set_packet_type(type); switch (type) { - case RtpPacketToSend::Type::kAudio: + case RtpPacketMediaType::kAudio: packet->SetSsrc(kAudioSsrc); break; - case RtpPacketToSend::Type::kVideo: + case RtpPacketMediaType::kVideo: packet->SetSsrc(kVideoSsrc); break; - case RtpPacketToSend::Type::kRetransmission: - case RtpPacketToSend::Type::kPadding: + case RtpPacketMediaType::kRetransmission: + case RtpPacketMediaType::kPadding: packet->SetSsrc(kVideoRtxSsrc); break; - case RtpPacketToSend::Type::kForwardErrorCorrection: + case RtpPacketMediaType::kForwardErrorCorrection: packet->SetSsrc(kFlexFecSsrc); break; } @@ -279,8 +305,8 @@ class PacingControllerTest const size_t packets_to_send_per_interval = kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200); for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, kSsrc, - sequence_number++, capture_time_ms, kPacketSize); + SendAndExpectPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number++, + capture_time_ms, kPacketSize); } while (pacer_->QueueSizePackets() > 0) { @@ -294,7 +320,7 @@ class PacingControllerTest } SimulatedClock clock_; - MockPacingControllerCallback callback_; + ::testing::NiceMock callback_; std::unique_ptr pacer_; }; @@ -302,7 +328,7 @@ class PacingControllerFieldTrialTest : public ::testing::TestWithParam { protected: struct MediaStream { - const RtpPacketToSend::Type type; + const RtpPacketMediaType type; const uint32_t ssrc; const size_t packet_size; uint16_t seq_num; @@ -318,7 +344,7 @@ class PacingControllerFieldTrialTest } void ProcessNext(PacingController* pacer) { if (GetParam() == PacingController::ProcessMode::kPeriodic) { - TimeDelta process_interval = TimeDelta::ms(5); + TimeDelta process_interval = TimeDelta::Millis(5); clock_.AdvanceTime(process_interval); pacer->ProcessPackets(); return; @@ -330,9 +356,9 @@ class PacingControllerFieldTrialTest clock_.AdvanceTime(wait_time); pacer->ProcessPackets(); } - MediaStream audio{/*type*/ RtpPacketToSend::Type::kAudio, + MediaStream audio{/*type*/ RtpPacketMediaType::kAudio, /*ssrc*/ 3333, /*packet_size*/ 100, /*seq_num*/ 1000}; - MediaStream video{/*type*/ RtpPacketToSend::Type::kVideo, + MediaStream video{/*type*/ RtpPacketMediaType::kVideo, /*ssrc*/ 4444, /*packet_size*/ 1000, /*seq_num*/ 1000}; SimulatedClock clock_; MockPacingControllerCallback callback_; @@ -371,8 +397,8 @@ TEST_P(PacingControllerFieldTrialTest, CongestionWindowAffectsAudioInTrial) { ScopedFieldTrials trial("WebRTC-Pacer-BlockAudio/Enabled/"); EXPECT_CALL(callback_, SendPadding).Times(0); PacingController pacer(&clock_, &callback_, nullptr, nullptr, GetParam()); - pacer.SetPacingRates(DataRate::kbps(10000), DataRate::Zero()); - pacer.SetCongestionWindow(DataSize::bytes(video.packet_size - 100)); + pacer.SetPacingRates(DataRate::KilobitsPerSec(10000), DataRate::Zero()); + pacer.SetCongestionWindow(DataSize::Bytes(video.packet_size - 100)); pacer.UpdateOutstandingData(DataSize::Zero()); // Video packet fills congestion window. InsertPacket(&pacer, &video); @@ -398,8 +424,8 @@ TEST_P(PacingControllerFieldTrialTest, DefaultCongestionWindowDoesNotAffectAudio) { EXPECT_CALL(callback_, SendPadding).Times(0); PacingController pacer(&clock_, &callback_, nullptr, nullptr, GetParam()); - pacer.SetPacingRates(DataRate::bps(10000000), DataRate::Zero()); - pacer.SetCongestionWindow(DataSize::bytes(800)); + pacer.SetPacingRates(DataRate::BitsPerSec(10000000), DataRate::Zero()); + pacer.SetCongestionWindow(DataSize::Bytes(800)); pacer.UpdateOutstandingData(DataSize::Zero()); // Video packet fills congestion window. InsertPacket(&pacer, &video); @@ -414,8 +440,8 @@ TEST_P(PacingControllerFieldTrialTest, TEST_P(PacingControllerFieldTrialTest, BudgetAffectsAudioInTrial) { ScopedFieldTrials trial("WebRTC-Pacer-BlockAudio/Enabled/"); PacingController pacer(&clock_, &callback_, nullptr, nullptr, GetParam()); - DataRate pacing_rate = - DataRate::bps(video.packet_size / 3 * 8 * kProcessIntervalsPerSecond); + DataRate pacing_rate = DataRate::BitsPerSec(video.packet_size / 3 * 8 * + kProcessIntervalsPerSecond); pacer.SetPacingRates(pacing_rate, DataRate::Zero()); // Video fills budget for following process periods. InsertPacket(&pacer, &video); @@ -433,20 +459,20 @@ TEST_P(PacingControllerFieldTrialTest, BudgetAffectsAudioInTrial) { ProcessNext(&pacer); } const TimeDelta expected_wait_time = - DataSize::bytes(video.packet_size) / pacing_rate; + DataSize::Bytes(video.packet_size) / pacing_rate; // Verify delay is near expectation, within timing margin. EXPECT_LT(((wait_end_time - wait_start_time) - expected_wait_time).Abs(), GetParam() == PacingController::ProcessMode::kPeriodic - ? TimeDelta::ms(5) + ? TimeDelta::Millis(5) : PacingController::kMinSleepTime); } TEST_P(PacingControllerFieldTrialTest, DefaultBudgetDoesNotAffectAudio) { EXPECT_CALL(callback_, SendPadding).Times(0); PacingController pacer(&clock_, &callback_, nullptr, nullptr, GetParam()); - pacer.SetPacingRates( - DataRate::bps(video.packet_size / 3 * 8 * kProcessIntervalsPerSecond), - DataRate::Zero()); + pacer.SetPacingRates(DataRate::BitsPerSec(video.packet_size / 3 * 8 * + kProcessIntervalsPerSecond), + DataRate::Zero()); // Video fills budget for following process periods. InsertPacket(&pacer, &video); EXPECT_CALL(callback_, SendPacket).Times(1); @@ -472,7 +498,7 @@ TEST_P(PacingControllerTest, FirstSentPacketTimeIsSet) { EXPECT_FALSE(pacer_->FirstSentPacketTime().has_value()); for (size_t i = 0; i < kPacketToSend; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, kSsrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number++, clock_.TimeInMilliseconds(), kSizeBytes); clock_.AdvanceTime(TimeUntilNextProcess()); pacer_->ProcessPackets(); @@ -494,14 +520,14 @@ TEST_P(PacingControllerTest, QueuePacket) { const size_t kPacketsToSend = kTargetRate.bps() * kPaceMultiplier / (8 * 250 * 200); for (size_t i = 0; i < kPacketsToSend; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), 250); } EXPECT_CALL(callback_, SendPadding).Times(0); // Enqueue one extra packet. int64_t queued_packet_timestamp = clock_.TimeInMilliseconds(); - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number, queued_packet_timestamp, 250); EXPECT_EQ(kPacketsToSend + 1, pacer_->QueueSizePackets()); @@ -522,10 +548,10 @@ TEST_P(PacingControllerTest, QueuePacket) { // We can send packets_to_send -1 packets of size 250 during the current // interval since one packet has already been sent. for (size_t i = 0; i < kPacketsToSend - 1; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), 250); } - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), 250); EXPECT_EQ(kPacketsToSend, pacer_->QueueSizePackets()); pacer_->ProcessPackets(); @@ -540,8 +566,8 @@ TEST_P(PacingControllerTest, QueueAndPacePackets) { const uint32_t kSsrc = 12345; uint16_t sequence_number = 1234; - const DataSize kPackeSize = DataSize::bytes(250); - const TimeDelta kSendInterval = TimeDelta::ms(5); + const DataSize kPackeSize = DataSize::Bytes(250); + const TimeDelta kSendInterval = TimeDelta::Millis(5); // Due to the multiplicative factor we can send 5 packets during a 5ms send // interval. (send interval * network capacity * multiplier / packet size) @@ -549,14 +575,14 @@ TEST_P(PacingControllerTest, QueueAndPacePackets) { kPaceMultiplier / kPackeSize.bytes(); for (size_t i = 0; i < kPacketsToSend; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, kSsrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number++, clock_.TimeInMilliseconds(), kPackeSize.bytes()); } EXPECT_CALL(callback_, SendPadding).Times(0); // Enqueue one extra packet. int64_t queued_packet_timestamp = clock_.TimeInMilliseconds(); - Send(RtpPacketToSend::Type::kVideo, kSsrc, sequence_number, + Send(RtpPacketMediaType::kVideo, kSsrc, sequence_number, queued_packet_timestamp, kPackeSize.bytes()); EXPECT_EQ(kPacketsToSend + 1, pacer_->QueueSizePackets()); @@ -587,12 +613,12 @@ TEST_P(PacingControllerTest, PaceQueuedPackets) { const size_t packets_to_send_per_interval = kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200); for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } EXPECT_EQ(packets_to_send_per_interval + packets_to_send_per_interval * 10, @@ -610,7 +636,7 @@ TEST_P(PacingControllerTest, PaceQueuedPackets) { EXPECT_CALL(callback_, SendPacket(ssrc, _, _, false, false)) .Times(pacer_->QueueSizePackets()); const TimeDelta expected_pace_time = - DataSize::bytes(pacer_->QueueSizePackets() * kPacketSize) / + DataSize::Bytes(pacer_->QueueSizePackets() * kPacketSize) / (kPaceMultiplier * kTargetRate); Timestamp start_time = clock_.CurrentTime(); while (pacer_->QueueSizePackets() > 0) { @@ -622,9 +648,9 @@ TEST_P(PacingControllerTest, PaceQueuedPackets) { } } const TimeDelta actual_pace_time = clock_.CurrentTime() - start_time; - EXPECT_LT( - (actual_pace_time - expected_pace_time).Abs(), - PeriodicProcess() ? TimeDelta::ms(5) : PacingController::kMinSleepTime); + EXPECT_LT((actual_pace_time - expected_pace_time).Abs(), + PeriodicProcess() ? TimeDelta::Millis(5) + : PacingController::kMinSleepTime); EXPECT_EQ(0u, pacer_->QueueSizePackets()); clock_.AdvanceTime(TimeUntilNextProcess()); @@ -633,7 +659,7 @@ TEST_P(PacingControllerTest, PaceQueuedPackets) { // Send some more packet, just show that we can..? for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), 250); } EXPECT_EQ(packets_to_send_per_interval, pacer_->QueueSizePackets()); @@ -654,10 +680,10 @@ TEST_P(PacingControllerTest, RepeatedRetransmissionsAllowed) { constexpr uint16_t sequence_number = 444; constexpr size_t bytes = 250; bool is_retransmission = (i != 0); // Original followed by retransmissions. - SendAndExpectPacket( - is_retransmission ? RtpPacketToSend::Type::kRetransmission - : RtpPacketToSend::Type::kVideo, - ssrc, sequence_number, clock_.TimeInMilliseconds(), bytes); + SendAndExpectPacket(is_retransmission ? RtpPacketMediaType::kRetransmission + : RtpPacketMediaType::kVideo, + ssrc, sequence_number, clock_.TimeInMilliseconds(), + bytes); clock_.AdvanceTimeMilliseconds(5); } if (PeriodicProcess()) { @@ -674,11 +700,11 @@ TEST_P(PacingControllerTest, uint32_t ssrc = 12345; uint16_t sequence_number = 1234; - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number, clock_.TimeInMilliseconds(), 250); // Expect packet on second ssrc to be queued and sent as well. - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc + 1, sequence_number, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc + 1, sequence_number, clock_.TimeInMilliseconds(), 250); clock_.AdvanceTimeMilliseconds(1000); @@ -715,12 +741,11 @@ TEST_P(PacingControllerTest, Padding) { } else { const size_t kPacketsToSend = 20; for (size_t i = 0; i < kPacketsToSend; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, - sequence_number++, clock_.TimeInMilliseconds(), - kPacketSize); + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, + clock_.TimeInMilliseconds(), kPacketSize); } const TimeDelta expected_pace_time = - DataSize::bytes(pacer_->QueueSizePackets() * kPacketSize) / + DataSize::Bytes(pacer_->QueueSizePackets() * kPacketSize) / (kPaceMultiplier * kTargetRate); EXPECT_CALL(callback_, SendPadding).Times(0); // Only the media packets should be sent. @@ -756,7 +781,7 @@ TEST_P(PacingControllerTest, Padding) { // Don't count bytes of last packet, instead just // use this as the time the last packet finished // sending. - padding_sent += DataSize::bytes(target_size); + padding_sent += DataSize::Bytes(target_size); } if (first_send_time.IsInfinite()) { first_send_time = clock_.CurrentTime(); @@ -794,7 +819,7 @@ TEST_P(PacingControllerTest, NoPaddingBeforeNormalPacket) { uint16_t sequence_number = 1234; int64_t capture_time_ms = 56789; - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, capture_time_ms, 250); bool padding_sent = false; EXPECT_CALL(callback_, SendPadding).WillOnce([&](size_t padding) { @@ -826,7 +851,7 @@ TEST_P(PacingControllerTest, VerifyPaddingUpToBitrate) { int64_t start_time = clock_.TimeInMilliseconds(); while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, capture_time_ms, 250); EXPECT_CALL(callback_, SendPadding(250)).WillOnce(Return(250)); EXPECT_CALL(callback_, SendPacket(_, _, _, _, true)).Times(1); @@ -840,7 +865,7 @@ TEST_P(PacingControllerTest, VerifyAverageBitrateVaryingMediaPayload) { uint16_t sequence_number = 1234; int64_t capture_time_ms = 56789; const int kTimeStep = 5; - const TimeDelta kAveragingWindowLength = TimeDelta::seconds(10); + const TimeDelta kAveragingWindowLength = TimeDelta::Seconds(10); PacingControllerPadding callback; pacer_ = std::make_unique(&clock_, &callback, nullptr, nullptr, GetParam()); @@ -856,8 +881,8 @@ TEST_P(PacingControllerTest, VerifyAverageBitrateVaryingMediaPayload) { media_bytes < (kTargetRate * (clock_.CurrentTime() - start_time)).bytes()) { size_t media_payload = rand_value % 400 + 800; // [400, 1200] bytes. - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, - capture_time_ms, media_payload); + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, capture_time_ms, + media_payload); media_bytes += media_payload; } @@ -871,7 +896,7 @@ TEST_P(PacingControllerTest, VerifyAverageBitrateVaryingMediaPayload) { EXPECT_NEAR( kTargetRate.bps(), - (DataSize::bytes(callback.total_bytes_sent()) / kAveragingWindowLength) + (DataSize::Bytes(callback.total_bytes_sent()) / kAveragingWindowLength) .bps(), (kTargetRate * 0.01 /* 1% error marging */).bps()); } @@ -886,16 +911,16 @@ TEST_P(PacingControllerTest, Priority) { ConsumeInitialBudget(); // Expect normal and low priority to be queued and high to pass through. - Send(RtpPacketToSend::Type::kVideo, ssrc_low_priority, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc_low_priority, sequence_number++, capture_time_ms_low_priority, 250); const size_t packets_to_send_per_interval = kTargetRate.bps() * kPaceMultiplier / (8 * 250 * 200); for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - Send(RtpPacketToSend::Type::kRetransmission, ssrc, sequence_number++, + Send(RtpPacketMediaType::kRetransmission, ssrc, sequence_number++, capture_time_ms, 250); } - Send(RtpPacketToSend::Type::kAudio, ssrc, sequence_number++, capture_time_ms, + Send(RtpPacketMediaType::kAudio, ssrc, sequence_number++, capture_time_ms, 250); // Expect all high and normal priority to be sent out first. @@ -941,9 +966,9 @@ TEST_P(PacingControllerTest, RetransmissionPriority) { // Alternate retransmissions and normal packets. for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, - capture_time_ms, 250); - Send(RtpPacketToSend::Type::kRetransmission, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, capture_time_ms, + 250); + Send(RtpPacketMediaType::kRetransmission, ssrc, sequence_number++, capture_time_ms_retransmission, 250); } EXPECT_EQ(2 * packets_to_send_per_interval, pacer_->QueueSizePackets()); @@ -994,7 +1019,7 @@ TEST_P(PacingControllerTest, HighPrioDoesntAffectBudget) { // a high number of them at once. const size_t kNumAudioPackets = 25; for (size_t i = 0; i < kNumAudioPackets; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kAudio, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kAudio, ssrc, sequence_number++, capture_time_ms, kPacketSize); } pacer_->ProcessPackets(); @@ -1005,7 +1030,7 @@ TEST_P(PacingControllerTest, HighPrioDoesntAffectBudget) { const size_t kPacketsToSendPerInterval = kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200); for (size_t i = 0; i < kPacketsToSendPerInterval; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } @@ -1023,7 +1048,7 @@ TEST_P(PacingControllerTest, HighPrioDoesntAffectBudget) { // Measure pacing time. Expect only low-prio packets to affect this. TimeDelta pacing_time = clock_.CurrentTime() - start_time; TimeDelta expected_pacing_time = - DataSize::bytes(kPacketsToSendPerInterval * kPacketSize) / + DataSize::Bytes(kPacketsToSendPerInterval * kPacketSize) / (kTargetRate * kPaceMultiplier); EXPECT_NEAR(pacing_time.us(), expected_pacing_time.us(), PeriodicProcess() ? 5000.0 @@ -1037,11 +1062,11 @@ TEST_P(PacingControllerTest, SendsOnlyPaddingWhenCongested) { int kCongestionWindow = kPacketSize * 10; pacer_->UpdateOutstandingData(DataSize::Zero()); - pacer_->SetCongestionWindow(DataSize::bytes(kCongestionWindow)); + pacer_->SetCongestionWindow(DataSize::Bytes(kCongestionWindow)); int sent_data = 0; while (sent_data < kCongestionWindow) { sent_data += kPacketSize; - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); AdvanceTimeAndProcess(); } @@ -1052,7 +1077,7 @@ TEST_P(PacingControllerTest, SendsOnlyPaddingWhenCongested) { size_t blocked_packets = 0; int64_t expected_time_until_padding = 500; while (expected_time_until_padding > 5) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); blocked_packets++; clock_.AdvanceTimeMilliseconds(5); @@ -1075,33 +1100,34 @@ TEST_P(PacingControllerTest, DoesNotAllowOveruseAfterCongestion) { EXPECT_CALL(callback_, SendPadding).Times(0); // The pacing rate is low enough that the budget should not allow two packets // to be sent in a row. - pacer_->SetPacingRates(DataRate::bps(400 * 8 * 1000 / 5), DataRate::Zero()); + pacer_->SetPacingRates(DataRate::BitsPerSec(400 * 8 * 1000 / 5), + DataRate::Zero()); // The congestion window is small enough to only let one packet through. - pacer_->SetCongestionWindow(DataSize::bytes(800)); + pacer_->SetCongestionWindow(DataSize::Bytes(800)); pacer_->UpdateOutstandingData(DataSize::Zero()); // Not yet budget limited or congested, packet is sent. - Send(RtpPacketToSend::Type::kVideo, ssrc, seq_num++, now_ms(), size); + Send(RtpPacketMediaType::kVideo, ssrc, seq_num++, now_ms(), size); EXPECT_CALL(callback_, SendPacket).Times(1); clock_.AdvanceTimeMilliseconds(5); pacer_->ProcessPackets(); // Packet blocked due to congestion. - Send(RtpPacketToSend::Type::kVideo, ssrc, seq_num++, now_ms(), size); + Send(RtpPacketMediaType::kVideo, ssrc, seq_num++, now_ms(), size); EXPECT_CALL(callback_, SendPacket).Times(0); clock_.AdvanceTimeMilliseconds(5); pacer_->ProcessPackets(); // Packet blocked due to congestion. - Send(RtpPacketToSend::Type::kVideo, ssrc, seq_num++, now_ms(), size); + Send(RtpPacketMediaType::kVideo, ssrc, seq_num++, now_ms(), size); EXPECT_CALL(callback_, SendPacket).Times(0); clock_.AdvanceTimeMilliseconds(5); pacer_->ProcessPackets(); // Congestion removed and budget has recovered, packet is sent. - Send(RtpPacketToSend::Type::kVideo, ssrc, seq_num++, now_ms(), size); + Send(RtpPacketMediaType::kVideo, ssrc, seq_num++, now_ms(), size); EXPECT_CALL(callback_, SendPacket).Times(1); clock_.AdvanceTimeMilliseconds(5); pacer_->UpdateOutstandingData(DataSize::Zero()); pacer_->ProcessPackets(); // Should be blocked due to budget limitation as congestion has be removed. - Send(RtpPacketToSend::Type::kVideo, ssrc, seq_num++, now_ms(), size); + Send(RtpPacketMediaType::kVideo, ssrc, seq_num++, now_ms(), size); EXPECT_CALL(callback_, SendPacket).Times(0); clock_.AdvanceTimeMilliseconds(5); pacer_->ProcessPackets(); @@ -1116,11 +1142,11 @@ TEST_P(PacingControllerTest, ResumesSendingWhenCongestionEnds) { int64_t kCongestionTimeMs = 1000; pacer_->UpdateOutstandingData(DataSize::Zero()); - pacer_->SetCongestionWindow(DataSize::bytes(kCongestionWindow)); + pacer_->SetCongestionWindow(DataSize::Bytes(kCongestionWindow)); int sent_data = 0; while (sent_data < kCongestionWindow) { sent_data += kPacketSize; - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); clock_.AdvanceTimeMilliseconds(5); pacer_->ProcessPackets(); @@ -1129,7 +1155,7 @@ TEST_P(PacingControllerTest, ResumesSendingWhenCongestionEnds) { EXPECT_CALL(callback_, SendPacket).Times(0); int unacked_packets = 0; for (int duration = 0; duration < kCongestionTimeMs; duration += 5) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); unacked_packets++; clock_.AdvanceTimeMilliseconds(5); @@ -1142,7 +1168,7 @@ TEST_P(PacingControllerTest, ResumesSendingWhenCongestionEnds) { int ack_count = kCongestionCount / 2; EXPECT_CALL(callback_, SendPacket(ssrc, _, _, false, _)).Times(ack_count); pacer_->UpdateOutstandingData( - DataSize::bytes(kCongestionWindow - kPacketSize * ack_count)); + DataSize::Bytes(kCongestionWindow - kPacketSize * ack_count)); for (int duration = 0; duration < kCongestionTimeMs; duration += 5) { clock_.AdvanceTimeMilliseconds(5); @@ -1178,26 +1204,26 @@ TEST_P(PacingControllerTest, Pause) { const size_t packets_to_send_per_interval = kTargetRate.bps() * kPaceMultiplier / (8 * 250 * 200); for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc_low_priority, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc_low_priority, sequence_number++, capture_time_ms, 250); - Send(RtpPacketToSend::Type::kRetransmission, ssrc, sequence_number++, + Send(RtpPacketMediaType::kRetransmission, ssrc, sequence_number++, capture_time_ms, 250); - Send(RtpPacketToSend::Type::kAudio, ssrc_high_priority, sequence_number++, + Send(RtpPacketMediaType::kAudio, ssrc_high_priority, sequence_number++, capture_time_ms, 250); } clock_.AdvanceTimeMilliseconds(10000); int64_t second_capture_time_ms = clock_.TimeInMilliseconds(); for (size_t i = 0; i < packets_to_send_per_interval; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc_low_priority, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc_low_priority, sequence_number++, second_capture_time_ms, 250); - Send(RtpPacketToSend::Type::kRetransmission, ssrc, sequence_number++, + Send(RtpPacketMediaType::kRetransmission, ssrc, sequence_number++, second_capture_time_ms, 250); - Send(RtpPacketToSend::Type::kAudio, ssrc_high_priority, sequence_number++, + Send(RtpPacketMediaType::kAudio, ssrc_high_priority, sequence_number++, second_capture_time_ms, 250); } // Expect everything to be queued. - EXPECT_EQ(TimeDelta::ms(second_capture_time_ms - capture_time_ms), + EXPECT_EQ(TimeDelta::Millis(second_capture_time_ms - capture_time_ms), pacer_->OldestPacketWaitTime()); // Process triggers keep-alive packet. @@ -1208,7 +1234,7 @@ TEST_P(PacingControllerTest, Pause) { pacer_->ProcessPackets(); // Verify no packets sent for the rest of the paused process interval. - const TimeDelta kProcessInterval = TimeDelta::ms(5); + const TimeDelta kProcessInterval = TimeDelta::Millis(5); TimeDelta expected_time_until_send = PacingController::kPausedProcessInterval; EXPECT_CALL(callback_, SendPadding).Times(0); while (expected_time_until_send >= kProcessInterval) { @@ -1303,7 +1329,7 @@ TEST_P(PacingControllerTest, InactiveFromStart) { (GetParam() == PacingController::ProcessMode::kDynamic ? PacingController::kMinSleepTime : TimeDelta::Zero()) + - TimeDelta::us(1); + TimeDelta::Micros(1); EXPECT_EQ(pacer_->NextSendTime() - start_time, PacingController::kPausedProcessInterval); @@ -1326,16 +1352,16 @@ TEST_P(PacingControllerTest, ExpectedQueueTimeMs) { const int32_t kMaxBitrate = kPaceMultiplier * 30000; EXPECT_EQ(TimeDelta::Zero(), pacer_->OldestPacketWaitTime()); - pacer_->SetPacingRates(DataRate::bps(30000 * kPaceMultiplier), + pacer_->SetPacingRates(DataRate::BitsPerSec(30000 * kPaceMultiplier), DataRate::Zero()); for (size_t i = 0; i < kNumPackets; ++i) { - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } // Queue in ms = 1000 * (bytes in queue) *8 / (bits per second) TimeDelta queue_time = - TimeDelta::ms(1000 * kNumPackets * kPacketSize * 8 / kMaxBitrate); + TimeDelta::Millis(1000 * kNumPackets * kPacketSize * 8 / kMaxBitrate); EXPECT_EQ(queue_time, pacer_->ExpectedQueueTime()); const Timestamp time_start = clock_.CurrentTime(); @@ -1351,7 +1377,7 @@ TEST_P(PacingControllerTest, ExpectedQueueTimeMs) { const TimeDelta deviation = duration - PacingController::kMaxExpectedQueueLength; EXPECT_LT(deviation.Abs(), - TimeDelta::ms(1000 * kPacketSize * 8 / kMaxBitrate)); + TimeDelta::Millis(1000 * kPacketSize * 8 / kMaxBitrate)); } TEST_P(PacingControllerTest, QueueTimeGrowsOverTime) { @@ -1359,13 +1385,13 @@ TEST_P(PacingControllerTest, QueueTimeGrowsOverTime) { uint16_t sequence_number = 1234; EXPECT_EQ(TimeDelta::Zero(), pacer_->OldestPacketWaitTime()); - pacer_->SetPacingRates(DataRate::bps(30000 * kPaceMultiplier), + pacer_->SetPacingRates(DataRate::BitsPerSec(30000 * kPaceMultiplier), DataRate::Zero()); - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number, clock_.TimeInMilliseconds(), 1200); clock_.AdvanceTimeMilliseconds(500); - EXPECT_EQ(TimeDelta::ms(500), pacer_->OldestPacketWaitTime()); + EXPECT_EQ(TimeDelta::Millis(500), pacer_->OldestPacketWaitTime()); pacer_->ProcessPackets(); EXPECT_EQ(TimeDelta::Zero(), pacer_->OldestPacketWaitTime()); } @@ -1383,11 +1409,12 @@ TEST_P(PacingControllerTest, ProbingWithInsertedPackets) { /*cluster_id=*/0); pacer_->CreateProbeCluster(kSecondClusterRate, /*cluster_id=*/1); - pacer_->SetPacingRates(DataRate::bps(kInitialBitrateBps * kPaceMultiplier), - DataRate::Zero()); + pacer_->SetPacingRates( + DataRate::BitsPerSec(kInitialBitrateBps * kPaceMultiplier), + DataRate::Zero()); for (int i = 0; i < 10; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } @@ -1402,7 +1429,8 @@ TEST_P(PacingControllerTest, ProbingWithInsertedPackets) { EXPECT_NEAR((packets_sent - 1) * kPacketSize * 8000 / (clock_.TimeInMilliseconds() - start), kFirstClusterRate.bps(), kProbingErrorMargin.bps()); - EXPECT_EQ(0, packet_sender.padding_sent()); + // Probing always starts with a small padding packet. + EXPECT_EQ(1, packet_sender.padding_sent()); clock_.AdvanceTime(TimeUntilNextProcess()); start = clock_.TimeInMilliseconds(); @@ -1420,62 +1448,119 @@ TEST_P(PacingControllerTest, ProbingWithInsertedPackets) { TEST_P(PacingControllerTest, SkipsProbesWhenProcessIntervalTooLarge) { const size_t kPacketSize = 1200; const int kInitialBitrateBps = 300000; - uint32_t ssrc = 12346; - uint16_t sequence_number = 1234; + const uint32_t ssrc = 12346; + const int kProbeClusterId = 3; - PacingControllerProbing packet_sender; - pacer_ = std::make_unique(&clock_, &packet_sender, nullptr, - nullptr, GetParam()); - pacer_->SetPacingRates(DataRate::bps(kInitialBitrateBps * kPaceMultiplier), - DataRate::Zero()); + // Test with both legacy and new probe discard modes. + // TODO(bugs.webrtc.org/11780): Clean up when legacy is gone. + for (bool abort_delayed_probes : {false, true}) { + uint16_t sequence_number = 1234; - for (int i = 0; i < 10; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + PacingControllerProbing packet_sender; + + const test::ExplicitKeyValueConfig trials( + abort_delayed_probes ? "WebRTC-Bwe-ProbingBehavior/" + "abort_delayed_probes:1,max_probe_delay:2ms/" + : "WebRTC-Bwe-ProbingBehavior/" + "abort_delayed_probes:0,max_probe_delay:2ms/"); + pacer_ = std::make_unique(&clock_, &packet_sender, + nullptr, &trials, GetParam()); + pacer_->SetPacingRates( + DataRate::BitsPerSec(kInitialBitrateBps * kPaceMultiplier), + DataRate::BitsPerSec(kInitialBitrateBps)); + + for (int i = 0; i < 10; ++i) { + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, + clock_.TimeInMilliseconds(), kPacketSize); + } + while (pacer_->QueueSizePackets() > 0) { + clock_.AdvanceTime(TimeUntilNextProcess()); + pacer_->ProcessPackets(); + } + + // Probe at a very high rate. + pacer_->CreateProbeCluster(DataRate::KilobitsPerSec(10000), // 10 Mbps. + /*cluster_id=*/kProbeClusterId); + // We need one packet to start the probe. + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); - } - while (pacer_->QueueSizePackets() > 0) { + const int packets_sent_before_probe = packet_sender.packets_sent(); clock_.AdvanceTime(TimeUntilNextProcess()); pacer_->ProcessPackets(); - } + EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 1); - // Probe at a very high rate. - pacer_->CreateProbeCluster(DataRate::kbps(10000), // 10 Mbps. - /*cluster_id=*/3); - // We need one packet to start the probe. - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, - clock_.TimeInMilliseconds(), kPacketSize); - const int packets_sent_before_probe = packet_sender.packets_sent(); - clock_.AdvanceTime(TimeUntilNextProcess()); - pacer_->ProcessPackets(); - EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 1); + // Figure out how long between probe packets. + Timestamp start_time = clock_.CurrentTime(); + clock_.AdvanceTime(TimeUntilNextProcess()); + TimeDelta time_between_probes = clock_.CurrentTime() - start_time; + // Advance that distance again + 1ms. + clock_.AdvanceTime(time_between_probes); - // Figure out how long between probe packets. - Timestamp start_time = clock_.CurrentTime(); - clock_.AdvanceTime(TimeUntilNextProcess()); - TimeDelta time_between_probes = clock_.CurrentTime() - start_time; - // Advance that distance again + 1ms. - clock_.AdvanceTime(time_between_probes); + // Send second probe packet. + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, + clock_.TimeInMilliseconds(), kPacketSize); + pacer_->ProcessPackets(); + EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 2); + PacedPacketInfo last_pacing_info = packet_sender.last_pacing_info(); + EXPECT_EQ(last_pacing_info.probe_cluster_id, kProbeClusterId); + + // We're exactly where we should be for the next probe. + const Timestamp probe_time = clock_.CurrentTime(); + EXPECT_EQ(pacer_->NextSendTime(), clock_.CurrentTime()); + + BitrateProberConfig probing_config(&trials); + EXPECT_GT(probing_config.max_probe_delay.Get(), TimeDelta::Zero()); + // Advance to within max probe delay, should still return same target. + clock_.AdvanceTime(probing_config.max_probe_delay.Get()); + EXPECT_EQ(pacer_->NextSendTime(), probe_time); + + // Too high probe delay, drop it! + clock_.AdvanceTime(TimeDelta::Micros(1)); + + int packets_sent_before_timeout = packet_sender.total_packets_sent(); + if (abort_delayed_probes) { + // Expected next process time is unchanged, but calling should not + // generate new packets. + EXPECT_EQ(pacer_->NextSendTime(), probe_time); + pacer_->ProcessPackets(); + EXPECT_EQ(packet_sender.total_packets_sent(), + packets_sent_before_timeout); - // Send second probe packet. - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, - clock_.TimeInMilliseconds(), kPacketSize); - pacer_->ProcessPackets(); - EXPECT_EQ(packet_sender.packets_sent(), packets_sent_before_probe + 2); - - // We're exactly where we should be for the next probe. - const Timestamp probe_time = clock_.CurrentTime(); - EXPECT_EQ(pacer_->NextSendTime(), clock_.CurrentTime()); - - FieldTrialBasedConfig field_trial_config; - BitrateProberConfig probing_config(&field_trial_config); - EXPECT_GT(probing_config.max_probe_delay.Get(), TimeDelta::Zero()); - // Advance to within max probe delay, should still return same target. - clock_.AdvanceTime(probing_config.max_probe_delay.Get()); - EXPECT_EQ(pacer_->NextSendTime(), probe_time); - - // Too high probe delay, drop it! - clock_.AdvanceTime(TimeDelta::us(1)); - EXPECT_GT(pacer_->NextSendTime(), probe_time); + // Next packet sent is not part of probe. + if (PeriodicProcess()) { + do { + AdvanceTimeAndProcess(); + } while (packet_sender.total_packets_sent() == + packets_sent_before_timeout); + } else { + AdvanceTimeAndProcess(); + } + const int expected_probe_id = PacedPacketInfo::kNotAProbe; + EXPECT_EQ(packet_sender.last_pacing_info().probe_cluster_id, + expected_probe_id); + } else { + // Legacy behaviour, probe "aborted" so send time moved back. Next call to + // ProcessPackets() still results in packets being marked as part of probe + // cluster. + EXPECT_GT(pacer_->NextSendTime(), probe_time); + AdvanceTimeAndProcess(); + EXPECT_GT(packet_sender.total_packets_sent(), + packets_sent_before_timeout); + const int expected_probe_id = last_pacing_info.probe_cluster_id; + EXPECT_EQ(packet_sender.last_pacing_info().probe_cluster_id, + expected_probe_id); + + // Time between sent packets keeps being too large, but we still mark the + // packets as being part of the cluster. + Timestamp a = clock_.CurrentTime(); + AdvanceTimeAndProcess(); + EXPECT_GT(packet_sender.total_packets_sent(), + packets_sent_before_timeout); + EXPECT_EQ(packet_sender.last_pacing_info().probe_cluster_id, + expected_probe_id); + EXPECT_GT(clock_.CurrentTime() - a, time_between_probes); + } + } } TEST_P(PacingControllerTest, ProbingWithPaddingSupport) { @@ -1489,11 +1574,12 @@ TEST_P(PacingControllerTest, ProbingWithPaddingSupport) { nullptr, GetParam()); pacer_->CreateProbeCluster(kFirstClusterRate, /*cluster_id=*/0); - pacer_->SetPacingRates(DataRate::bps(kInitialBitrateBps * kPaceMultiplier), - DataRate::Zero()); + pacer_->SetPacingRates( + DataRate::BitsPerSec(kInitialBitrateBps * kPaceMultiplier), + DataRate::Zero()); for (int i = 0; i < 3; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } @@ -1522,22 +1608,22 @@ TEST_P(PacingControllerTest, PaddingOveruse) { // Initially no padding rate. pacer_->ProcessPackets(); - pacer_->SetPacingRates(DataRate::bps(60000 * kPaceMultiplier), + pacer_->SetPacingRates(DataRate::BitsPerSec(60000 * kPaceMultiplier), DataRate::Zero()); - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); pacer_->ProcessPackets(); // Add 30kbit padding. When increasing budget, media budget will increase from // negative (overuse) while padding budget will increase from 0. clock_.AdvanceTimeMilliseconds(5); - pacer_->SetPacingRates(DataRate::bps(60000 * kPaceMultiplier), - DataRate::bps(30000)); + pacer_->SetPacingRates(DataRate::BitsPerSec(60000 * kPaceMultiplier), + DataRate::BitsPerSec(30000)); - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); - EXPECT_LT(TimeDelta::ms(5), pacer_->ExpectedQueueTime()); + EXPECT_LT(TimeDelta::Millis(5), pacer_->ExpectedQueueTime()); // Don't send padding if queue is non-empty, even if padding budget > 0. EXPECT_CALL(callback_, SendPadding).Times(0); if (PeriodicProcess()) { @@ -1561,13 +1647,13 @@ TEST_P(PacingControllerTest, ProbeClusterId) { pacer_->SetPacingRates(kTargetRate * kPaceMultiplier, kTargetRate); pacer_->SetProbingEnabled(true); for (int i = 0; i < 10; ++i) { - Send(RtpPacketToSend::Type::kVideo, ssrc, sequence_number++, + Send(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } // First probing cluster. EXPECT_CALL(callback, - SendRtpPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 0))) + SendPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 0))) .Times(5); for (int i = 0; i < 5; ++i) { @@ -1576,7 +1662,7 @@ TEST_P(PacingControllerTest, ProbeClusterId) { // Second probing cluster. EXPECT_CALL(callback, - SendRtpPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 1))) + SendPacket(_, Field(&PacedPacketInfo::probe_cluster_id, 1))) .Times(5); for (int i = 0; i < 5; ++i) { @@ -1589,12 +1675,12 @@ TEST_P(PacingControllerTest, ProbeClusterId) { EXPECT_CALL(callback, GeneratePadding).WillOnce([&](DataSize padding_size) { std::vector> padding_packets; padding_packets.emplace_back( - BuildPacket(RtpPacketToSend::Type::kPadding, ssrc, sequence_number++, + BuildPacket(RtpPacketMediaType::kPadding, ssrc, sequence_number++, clock_.TimeInMilliseconds(), padding_size.bytes())); return padding_packets; }); bool non_probe_packet_seen = false; - EXPECT_CALL(callback, SendRtpPacket) + EXPECT_CALL(callback, SendPacket) .WillOnce([&](std::unique_ptr packet, const PacedPacketInfo& cluster_info) { EXPECT_EQ(cluster_info.probe_cluster_id, kNotAProbe); @@ -1614,34 +1700,33 @@ TEST_P(PacingControllerTest, OwnedPacketPrioritizedOnType) { // Insert a packet of each type, from low to high priority. Since priority // is weighted higher than insert order, these should come out of the pacer // in backwards order with the exception of FEC and Video. - for (RtpPacketToSend::Type type : - {RtpPacketToSend::Type::kPadding, - RtpPacketToSend::Type::kForwardErrorCorrection, - RtpPacketToSend::Type::kVideo, RtpPacketToSend::Type::kRetransmission, - RtpPacketToSend::Type::kAudio}) { + for (RtpPacketMediaType type : + {RtpPacketMediaType::kPadding, + RtpPacketMediaType::kForwardErrorCorrection, RtpPacketMediaType::kVideo, + RtpPacketMediaType::kRetransmission, RtpPacketMediaType::kAudio}) { pacer_->EnqueuePacket(BuildRtpPacket(type)); } ::testing::InSequence seq; EXPECT_CALL( callback, - SendRtpPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kAudioSsrc)), _)); - EXPECT_CALL(callback, - SendRtpPacket( - Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _)); + SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kAudioSsrc)), _)); + EXPECT_CALL( + callback, + SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _)); // FEC and video actually have the same priority, so will come out in // insertion order. - EXPECT_CALL(callback, - SendRtpPacket( - Pointee(Property(&RtpPacketToSend::Ssrc, kFlexFecSsrc)), _)); EXPECT_CALL( callback, - SendRtpPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoSsrc)), _)); + SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kFlexFecSsrc)), _)); + EXPECT_CALL( + callback, + SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoSsrc)), _)); - EXPECT_CALL(callback, - SendRtpPacket( - Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _)); + EXPECT_CALL( + callback, + SendPacket(Pointee(Property(&RtpPacketToSend::Ssrc, kVideoRtxSsrc)), _)); while (pacer_->QueueSizePackets() > 0) { if (PeriodicProcess()) { @@ -1654,7 +1739,6 @@ TEST_P(PacingControllerTest, OwnedPacketPrioritizedOnType) { } TEST_P(PacingControllerTest, SmallFirstProbePacket) { - ScopedFieldTrials trial("WebRTC-Pacer-SmallFirstProbePacket/Enabled/"); MockPacketSender callback; pacer_ = std::make_unique(&clock_, &callback, nullptr, nullptr, GetParam()); @@ -1662,28 +1746,28 @@ TEST_P(PacingControllerTest, SmallFirstProbePacket) { pacer_->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero()); // Add high prio media. - pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketToSend::Type::kAudio)); + pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketMediaType::kAudio)); // Expect small padding packet to be requested. - EXPECT_CALL(callback, GeneratePadding(DataSize::bytes(1))) + EXPECT_CALL(callback, GeneratePadding(DataSize::Bytes(1))) .WillOnce([&](DataSize padding_size) { std::vector> padding_packets; padding_packets.emplace_back( - BuildPacket(RtpPacketToSend::Type::kPadding, kAudioSsrc, 1, + BuildPacket(RtpPacketMediaType::kPadding, kAudioSsrc, 1, clock_.TimeInMilliseconds(), 1)); return padding_packets; }); size_t packets_sent = 0; bool media_seen = false; - EXPECT_CALL(callback, SendRtpPacket) + EXPECT_CALL(callback, SendPacket) .Times(::testing::AnyNumber()) .WillRepeatedly([&](std::unique_ptr packet, const PacedPacketInfo& cluster_info) { if (packets_sent == 0) { - EXPECT_EQ(packet->packet_type(), RtpPacketToSend::Type::kPadding); + EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding); } else { - if (packet->packet_type() == RtpPacketToSend::Type::kAudio) { + if (packet->packet_type() == RtpPacketMediaType::kAudio) { media_seen = true; } } @@ -1702,38 +1786,38 @@ TEST_P(PacingControllerTest, TaskLate) { } // Set a low send rate to more easily test timing issues. - DataRate kSendRate = DataRate::kbps(30); + DataRate kSendRate = DataRate::KilobitsPerSec(30); pacer_->SetPacingRates(kSendRate, DataRate::Zero()); // Add four packets of equal size and priority. - pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketToSend::Type::kVideo)); - pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketToSend::Type::kVideo)); - pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketToSend::Type::kVideo)); - pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketToSend::Type::kVideo)); + pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketMediaType::kVideo)); + pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketMediaType::kVideo)); + pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketMediaType::kVideo)); + pacer_->EnqueuePacket(BuildRtpPacket(RtpPacketMediaType::kVideo)); // Process packets, only first should be sent. EXPECT_CALL(callback_, SendPacket).Times(1); pacer_->ProcessPackets(); Timestamp next_send_time = pacer_->NextSendTime(); + // Determine time between packets (ca 62ms) const TimeDelta time_between_packets = next_send_time - clock_.CurrentTime(); // Simulate a late process call, executed just before we allow sending the // fourth packet. - clock_.AdvanceTime((time_between_packets * 3) - - (PacingController::kMinSleepTime + TimeDelta::ms(1))); + const TimeDelta kOffset = TimeDelta::Millis(1); + clock_.AdvanceTime((time_between_packets * 3) - kOffset); EXPECT_CALL(callback_, SendPacket).Times(2); pacer_->ProcessPackets(); - // Check that next scheduled send time is within sleep-time + 1ms. + // Check that next scheduled send time is in ca 1ms. next_send_time = pacer_->NextSendTime(); - EXPECT_LE(next_send_time - clock_.CurrentTime(), - PacingController::kMinSleepTime + TimeDelta::ms(1)); + const TimeDelta time_left = next_send_time - clock_.CurrentTime(); + EXPECT_EQ(time_left.RoundTo(TimeDelta::Millis(1)), kOffset); - // Advance to within error margin for execution. - clock_.AdvanceTime(TimeDelta::ms(1)); - EXPECT_CALL(callback_, SendPacket).Times(1); + clock_.AdvanceTime(time_left); + EXPECT_CALL(callback_, SendPacket); pacer_->ProcessPackets(); } @@ -1744,14 +1828,14 @@ TEST_P(PacingControllerTest, NoProbingWhilePaused) { pacer_->SetProbingEnabled(true); // Send at least one packet so probing can initate. - SendAndExpectPacket(RtpPacketToSend::Type::kVideo, ssrc, sequence_number, + SendAndExpectPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number, clock_.TimeInMilliseconds(), 250); while (pacer_->QueueSizePackets() > 0) { AdvanceTimeAndProcess(); } // Trigger probing. - pacer_->CreateProbeCluster(DataRate::kbps(10000), // 10 Mbps. + pacer_->CreateProbeCluster(DataRate::KilobitsPerSec(10000), // 10 Mbps. /*cluster_id=*/3); // Time to next send time should be small. @@ -1766,6 +1850,291 @@ TEST_P(PacingControllerTest, NoProbingWhilePaused) { PacingController::kPausedProcessInterval); } +TEST_P(PacingControllerTest, AudioNotPacedEvenWhenAccountedFor) { + const uint32_t kSsrc = 12345; + uint16_t sequence_number = 1234; + const size_t kPacketSize = 123; + + // Account for audio - so that audio packets can cause pushback on other + // types such as video. Audio packet should still be immediated passed + // through though ("WebRTC-Pacer-BlockAudio" needs to be enabled in order + // to pace audio packets). + pacer_->SetAccountForAudioPackets(true); + + // Set pacing rate to 1 packet/s, no padding. + pacer_->SetPacingRates(DataSize::Bytes(kPacketSize) / TimeDelta::Seconds(1), + DataRate::Zero()); + + // Add and send an audio packet. + SendAndExpectPacket(RtpPacketMediaType::kAudio, kSsrc, sequence_number++, + clock_.TimeInMilliseconds(), kPacketSize); + pacer_->ProcessPackets(); + + // Advance time, add another audio packet and process. It should be sent + // immediately. + clock_.AdvanceTimeMilliseconds(5); + SendAndExpectPacket(RtpPacketMediaType::kAudio, kSsrc, sequence_number++, + clock_.TimeInMilliseconds(), kPacketSize); + pacer_->ProcessPackets(); +} + +TEST_P(PacingControllerTest, + PaddingResumesAfterSaturationEvenWithConcurrentAudio) { + const uint32_t kSsrc = 12345; + const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125); + const DataRate kPaddingDataRate = DataRate::KilobitsPerSec(100); + const TimeDelta kMaxBufferInTime = TimeDelta::Millis(500); + const DataSize kPacketSize = DataSize::Bytes(130); + const TimeDelta kAudioPacketInterval = TimeDelta::Millis(20); + + // In this test, we fist send a burst of video in order to saturate the + // padding debt level. + // We then proceed to send audio at a bitrate that is slightly lower than + // the padding rate, meaning there will be a period with audio but no + // padding sent while the debt is draining, then audio and padding will + // be interlieved. + + // Verify both with and without accounting for audio. + for (bool account_for_audio : {false, true}) { + uint16_t sequence_number = 1234; + MockPacketSender callback; + EXPECT_CALL(callback, SendPacket).Times(::testing::AnyNumber()); + pacer_ = std::make_unique(&clock_, &callback, nullptr, + nullptr, GetParam()); + pacer_->SetAccountForAudioPackets(account_for_audio); + + // First, saturate the padding budget. + pacer_->SetPacingRates(kPacingDataRate, kPaddingDataRate); + + const TimeDelta kPaddingSaturationTime = + kMaxBufferInTime * kPaddingDataRate / + (kPacingDataRate - kPaddingDataRate); + const DataSize kVideoToSend = kPaddingSaturationTime * kPacingDataRate; + const DataSize kVideoPacketSize = DataSize::Bytes(1200); + DataSize video_sent = DataSize::Zero(); + while (video_sent < kVideoToSend) { + pacer_->EnqueuePacket( + BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number++, + clock_.TimeInMilliseconds(), kVideoPacketSize.bytes())); + video_sent += kVideoPacketSize; + } + while (pacer_->QueueSizePackets() > 0) { + AdvanceTimeAndProcess(); + } + + // Add a stream of audio packets at a rate slightly lower than the padding + // rate, once the padding debt is paid off we expect padding to be + // generated. + pacer_->SetPacingRates(kPacingDataRate, kPaddingDataRate); + bool padding_seen = false; + EXPECT_CALL(callback, GeneratePadding).WillOnce([&](DataSize padding_size) { + padding_seen = true; + std::vector> padding_packets; + padding_packets.emplace_back( + BuildPacket(RtpPacketMediaType::kPadding, kSsrc, sequence_number++, + clock_.TimeInMilliseconds(), padding_size.bytes())); + return padding_packets; + }); + + Timestamp start_time = clock_.CurrentTime(); + Timestamp last_audio_time = start_time; + while (!padding_seen) { + Timestamp now = clock_.CurrentTime(); + Timestamp next_send_time = pacer_->NextSendTime(); + TimeDelta sleep_time = + std::min(next_send_time, last_audio_time + kAudioPacketInterval) - + now; + clock_.AdvanceTime(sleep_time); + while (clock_.CurrentTime() >= last_audio_time + kAudioPacketInterval) { + pacer_->EnqueuePacket( + BuildPacket(RtpPacketMediaType::kAudio, kSsrc, sequence_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes())); + last_audio_time += kAudioPacketInterval; + } + pacer_->ProcessPackets(); + } + + // Verify how long it took to drain the padding debt. Allow 2% error margin. + const DataRate kAudioDataRate = kPacketSize / kAudioPacketInterval; + const TimeDelta expected_drain_time = + account_for_audio ? (kMaxBufferInTime * kPaddingDataRate / + (kPaddingDataRate - kAudioDataRate)) + : kMaxBufferInTime; + const TimeDelta actual_drain_time = clock_.CurrentTime() - start_time; + EXPECT_NEAR(actual_drain_time.ms(), expected_drain_time.ms(), + expected_drain_time.ms() * 0.02) + << " where account_for_audio = " + << (account_for_audio ? "true" : "false"); + } +} + +TEST_P(PacingControllerTest, AccountsForAudioEnqueuTime) { + if (PeriodicProcess()) { + // This test applies only when NOT using interval budget. + return; + } + + const uint32_t kSsrc = 12345; + const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125); + const DataRate kPaddingDataRate = DataRate::Zero(); + const DataSize kPacketSize = DataSize::Bytes(130); + const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate; + + uint32_t sequnce_number = 1; + // Audio not paced, but still accounted for in budget. + pacer_->SetAccountForAudioPackets(true); + pacer_->SetPacingRates(kPacingDataRate, kPaddingDataRate); + + // Enqueue two audio packets, advance clock to where one packet + // should have drained the buffer already, has they been sent + // immediately. + SendAndExpectPacket(RtpPacketMediaType::kAudio, kSsrc, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + SendAndExpectPacket(RtpPacketMediaType::kAudio, kSsrc, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + clock_.AdvanceTime(kPacketPacingTime); + // Now process and make sure both packets were sent. + pacer_->ProcessPackets(); + ::testing::Mock::VerifyAndClearExpectations(&callback_); + + // Add a video packet. I can't be sent until debt from audio + // packets have been drained. + Send(RtpPacketMediaType::kVideo, kSsrc + 1, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); +} + +TEST_P(PacingControllerTest, NextSendTimeAccountsForPadding) { + if (PeriodicProcess()) { + // This test applies only when NOT using interval budget. + return; + } + + const uint32_t kSsrc = 12345; + const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125); + const DataSize kPacketSize = DataSize::Bytes(130); + const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate; + + uint32_t sequnce_number = 1; + + // Start with no padding. + pacer_->SetPacingRates(kPacingDataRate, DataRate::Zero()); + + // Send a single packet. + SendAndExpectPacket(RtpPacketMediaType::kVideo, kSsrc, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + pacer_->ProcessPackets(); + ::testing::Mock::VerifyAndClearExpectations(&callback_); + + // With current conditions, no need to wake until next keep-alive. + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), + PacingController::kPausedProcessInterval); + + // Enqueue a new packet, that can't be sent until previous buffer has + // drained. + SendAndExpectPacket(RtpPacketMediaType::kVideo, kSsrc, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); + clock_.AdvanceTime(kPacketPacingTime); + pacer_->ProcessPackets(); + ::testing::Mock::VerifyAndClearExpectations(&callback_); + + // With current conditions, again no need to wake until next keep-alive. + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), + PacingController::kPausedProcessInterval); + + // Set a non-zero padding rate. Padding also can't be sent until + // previous debt has cleared. Since padding was disabled before, there + // currently is no padding debt. + pacer_->SetPacingRates(kPacingDataRate, kPacingDataRate / 2); + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); + + // Advance time, expect padding. + EXPECT_CALL(callback_, SendPadding).WillOnce(Return(kPacketSize.bytes())); + clock_.AdvanceTime(kPacketPacingTime); + pacer_->ProcessPackets(); + ::testing::Mock::VerifyAndClearExpectations(&callback_); + + // Since padding rate is half of pacing rate, next time we can send + // padding is double the packet pacing time. + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), + kPacketPacingTime * 2); + + // Insert a packet to be sent, this take precedence again. + Send(RtpPacketMediaType::kVideo, kSsrc, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + EXPECT_EQ(pacer_->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); +} + +TEST_P(PacingControllerTest, PaddingTargetAccountsForPaddingRate) { + if (PeriodicProcess()) { + // This test applies only when NOT using interval budget. + return; + } + + // Re-init pacer with an explicitly set padding target of 10ms; + const TimeDelta kPaddingTarget = TimeDelta::Millis(10); + ScopedFieldTrials field_trials( + "WebRTC-Pacer-DynamicPaddingTarget/timedelta:10ms/"); + SetUp(); + + const uint32_t kSsrc = 12345; + const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125); + const DataSize kPacketSize = DataSize::Bytes(130); + + uint32_t sequnce_number = 1; + + // Start with pacing and padding rate equal. + pacer_->SetPacingRates(kPacingDataRate, kPacingDataRate); + + // Send a single packet. + SendAndExpectPacket(RtpPacketMediaType::kVideo, kSsrc, sequnce_number++, + clock_.TimeInMilliseconds(), kPacketSize.bytes()); + AdvanceTimeAndProcess(); + ::testing::Mock::VerifyAndClearExpectations(&callback_); + + size_t expected_padding_target_bytes = + (kPaddingTarget * kPacingDataRate).bytes(); + EXPECT_CALL(callback_, SendPadding(expected_padding_target_bytes)) + .WillOnce(Return(expected_padding_target_bytes)); + AdvanceTimeAndProcess(); + + // Half the padding rate - expect half the padding target. + pacer_->SetPacingRates(kPacingDataRate, kPacingDataRate / 2); + EXPECT_CALL(callback_, SendPadding(expected_padding_target_bytes / 2)) + .WillOnce(Return(expected_padding_target_bytes / 2)); + AdvanceTimeAndProcess(); +} + +TEST_P(PacingControllerTest, SendsFecPackets) { + const uint32_t kSsrc = 12345; + const uint32_t kFlexSsrc = 54321; + uint16_t sequence_number = 1234; + uint16_t flexfec_sequence_number = 4321; + const size_t kPacketSize = 123; + + // Set pacing rate to 1000 packet/s, no padding. + pacer_->SetPacingRates( + DataSize::Bytes(1000 * kPacketSize) / TimeDelta::Seconds(1), + DataRate::Zero()); + + int64_t now = clock_.TimeInMilliseconds(); + Send(RtpPacketMediaType::kVideo, kSsrc, sequence_number, now, kPacketSize); + EXPECT_CALL(callback_, SendPacket(kSsrc, sequence_number, now, false, false)); + EXPECT_CALL(callback_, FetchFec).WillOnce([&]() { + EXPECT_CALL(callback_, SendPacket(kFlexSsrc, flexfec_sequence_number, now, + false, false)); + EXPECT_CALL(callback_, FetchFec); + std::vector> fec_packets; + fec_packets.push_back( + BuildPacket(RtpPacketMediaType::kForwardErrorCorrection, kFlexSsrc, + flexfec_sequence_number, now, kPacketSize)); + return fec_packets; + }); + AdvanceTimeAndProcess(); + AdvanceTimeAndProcess(); +} + INSTANTIATE_TEST_SUITE_P( WithAndWithoutIntervalBudget, PacingControllerTest, diff --git a/modules/pacing/packet_router.cc b/modules/pacing/packet_router.cc index fa64331493..5317f510c9 100644 --- a/modules/pacing/packet_router.cc +++ b/modules/pacing/packet_router.cc @@ -17,13 +17,14 @@ #include #include "absl/types/optional.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" namespace webrtc { namespace { @@ -52,8 +53,9 @@ PacketRouter::~PacketRouter() { RTC_DCHECK(active_remb_module_ == nullptr); } -void PacketRouter::AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate) { - rtc::CritScope cs(&modules_crit_); +void PacketRouter::AddSendRtpModule(RtpRtcpInterface* rtp_module, + bool remb_candidate) { + MutexLock lock(&modules_mutex_); AddSendRtpModuleToMap(rtp_module, rtp_module->SSRC()); if (absl::optional rtx_ssrc = rtp_module->RtxSsrc()) { @@ -72,7 +74,8 @@ void PacketRouter::AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate) { } } -void PacketRouter::AddSendRtpModuleToMap(RtpRtcp* rtp_module, uint32_t ssrc) { +void PacketRouter::AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module, + uint32_t ssrc) { RTC_DCHECK(send_modules_map_.find(ssrc) == send_modules_map_.end()); // Always keep the audio modules at the back of the list, so that when we // iterate over the modules in order to find one that can send padding we @@ -93,8 +96,8 @@ void PacketRouter::RemoveSendRtpModuleFromMap(uint32_t ssrc) { send_modules_map_.erase(kv); } -void PacketRouter::RemoveSendRtpModule(RtpRtcp* rtp_module) { - rtc::CritScope cs(&modules_crit_); +void PacketRouter::RemoveSendRtpModule(RtpRtcpInterface* rtp_module) { + MutexLock lock(&modules_mutex_); MaybeRemoveRembModuleCandidate(rtp_module, /* media_sender = */ true); RemoveSendRtpModuleFromMap(rtp_module->SSRC()); @@ -112,7 +115,7 @@ void PacketRouter::RemoveSendRtpModule(RtpRtcp* rtp_module) { void PacketRouter::AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender, bool remb_candidate) { - rtc::CritScope cs(&modules_crit_); + MutexLock lock(&modules_mutex_); RTC_DCHECK(std::find(rtcp_feedback_senders_.begin(), rtcp_feedback_senders_.end(), rtcp_sender) == rtcp_feedback_senders_.end()); @@ -126,7 +129,7 @@ void PacketRouter::AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender, void PacketRouter::RemoveReceiveRtpModule( RtcpFeedbackSenderInterface* rtcp_sender) { - rtc::CritScope cs(&modules_crit_); + MutexLock lock(&modules_mutex_); MaybeRemoveRembModuleCandidate(rtcp_sender, /* media_sender = */ false); auto it = std::find(rtcp_feedback_senders_.begin(), rtcp_feedback_senders_.end(), rtcp_sender); @@ -136,7 +139,11 @@ void PacketRouter::RemoveReceiveRtpModule( void PacketRouter::SendPacket(std::unique_ptr packet, const PacedPacketInfo& cluster_info) { - rtc::CritScope cs(&modules_crit_); + TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), "PacketRouter::SendPacket", + "sequence_number", packet->SequenceNumber(), "rtp_timestamp", + packet->Timestamp()); + + MutexLock lock(&modules_mutex_); // With the new pacer code path, transport sequence numbers are only set here, // on the pacer thread. Therefore we don't need atomics/synchronization. if (packet->HasExtension()) { @@ -153,7 +160,7 @@ void PacketRouter::SendPacket(std::unique_ptr packet, return; } - RtpRtcp* rtp_module = kv->second; + RtpRtcpInterface* rtp_module = kv->second; if (!rtp_module->TrySendPacket(packet.get(), cluster_info)) { RTC_LOG(LS_WARNING) << "Failed to send packet, rejected by RTP module."; return; @@ -164,11 +171,26 @@ void PacketRouter::SendPacket(std::unique_ptr packet, // properties needed for payload based padding. Cache it for later use. last_send_module_ = rtp_module; } + + for (auto& packet : rtp_module->FetchFecPackets()) { + pending_fec_packets_.push_back(std::move(packet)); + } +} + +std::vector> PacketRouter::FetchFec() { + MutexLock lock(&modules_mutex_); + std::vector> fec_packets = + std::move(pending_fec_packets_); + pending_fec_packets_.clear(); + return fec_packets; } std::vector> PacketRouter::GeneratePadding( - size_t target_size_bytes) { - rtc::CritScope cs(&modules_crit_); + DataSize size) { + TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "PacketRouter::GeneratePadding", "bytes", size.bytes()); + + MutexLock lock(&modules_mutex_); // First try on the last rtp module to have sent media. This increases the // the chance that any payload based padding will be useful as it will be // somewhat distributed over modules according the packet rate, even if it @@ -178,30 +200,38 @@ std::vector> PacketRouter::GeneratePadding( std::vector> padding_packets; if (last_send_module_ != nullptr && last_send_module_->SupportsRtxPayloadPadding()) { - padding_packets = last_send_module_->GeneratePadding(target_size_bytes); - if (!padding_packets.empty()) { - return padding_packets; - } + padding_packets = last_send_module_->GeneratePadding(size.bytes()); } - // Iterate over all modules send module. Video modules will be at the front - // and so will be prioritized. This is important since audio packets may not - // be taken into account by the bandwidth estimator, e.g. in FF. - for (RtpRtcp* rtp_module : send_modules_list_) { - if (rtp_module->SupportsPadding()) { - padding_packets = rtp_module->GeneratePadding(target_size_bytes); - if (!padding_packets.empty()) { - last_send_module_ = rtp_module; - break; + if (padding_packets.empty()) { + // Iterate over all modules send module. Video modules will be at the front + // and so will be prioritized. This is important since audio packets may not + // be taken into account by the bandwidth estimator, e.g. in FF. + for (RtpRtcpInterface* rtp_module : send_modules_list_) { + if (rtp_module->SupportsPadding()) { + padding_packets = rtp_module->GeneratePadding(size.bytes()); + if (!padding_packets.empty()) { + last_send_module_ = rtp_module; + break; + } } } } +#if RTC_TRACE_EVENTS_ENABLED + for (auto& packet : padding_packets) { + TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "PacketRouter::GeneratePadding::Loop", "sequence_number", + packet->SequenceNumber(), "rtp_timestamp", + packet->Timestamp()); + } +#endif + return padding_packets; } uint16_t PacketRouter::CurrentTransportSequenceNumber() const { - rtc::CritScope lock(&modules_crit_); + MutexLock lock(&modules_mutex_); return transport_seq_ & 0xFFFF; } @@ -215,7 +245,7 @@ void PacketRouter::OnReceiveBitrateChanged(const std::vector& ssrcs, int64_t now_ms = rtc::TimeMillis(); { - rtc::CritScope lock(&remb_crit_); + MutexLock lock(&remb_mutex_); // If we already have an estimate, check if the new total estimate is below // kSendThresholdPercent of the previous estimate. @@ -248,7 +278,7 @@ void PacketRouter::OnReceiveBitrateChanged(const std::vector& ssrcs, void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) { RTC_DCHECK_GE(bitrate_bps, 0); { - rtc::CritScope lock(&remb_crit_); + MutexLock lock(&remb_mutex_); max_bitrate_bps_ = bitrate_bps; if (rtc::TimeMillis() - last_remb_time_ms_ < kRembSendIntervalMs && last_send_bitrate_bps_ > 0 && @@ -262,7 +292,7 @@ void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) { bool PacketRouter::SendRemb(int64_t bitrate_bps, const std::vector& ssrcs) { - rtc::CritScope lock(&modules_crit_); + MutexLock lock(&modules_mutex_); if (!active_remb_module_) { return false; @@ -277,10 +307,10 @@ bool PacketRouter::SendRemb(int64_t bitrate_bps, bool PacketRouter::SendCombinedRtcpPacket( std::vector> packets) { - rtc::CritScope cs(&modules_crit_); + MutexLock lock(&modules_mutex_); // Prefer send modules. - for (RtpRtcp* rtp_module : send_modules_list_) { + for (RtpRtcpInterface* rtp_module : send_modules_list_) { if (rtp_module->RTCP() == RtcpMode::kOff) { continue; } diff --git a/modules/pacing/packet_router.h b/modules/pacing/packet_router.h index 40b3ad1407..2fa104b4cd 100644 --- a/modules/pacing/packet_router.h +++ b/modules/pacing/packet_router.h @@ -21,17 +21,18 @@ #include #include "api/transport/network_types.h" +#include "modules/pacing/pacing_controller.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { -class RtpRtcp; +class RtpRtcpInterface; // PacketRouter keeps track of rtp send modules to support the pacer. // In addition, it handles feedback messages, which are sent on a send @@ -39,24 +40,25 @@ class RtpRtcp; // (receiver report). For the latter case, we also keep track of the // receive modules. class PacketRouter : public RemoteBitrateObserver, - public TransportFeedbackSenderInterface { + public TransportFeedbackSenderInterface, + public PacingController::PacketSender { public: PacketRouter(); explicit PacketRouter(uint16_t start_transport_seq); ~PacketRouter() override; - void AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate); - void RemoveSendRtpModule(RtpRtcp* rtp_module); + void AddSendRtpModule(RtpRtcpInterface* rtp_module, bool remb_candidate); + void RemoveSendRtpModule(RtpRtcpInterface* rtp_module); void AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender, bool remb_candidate); void RemoveReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender); - virtual void SendPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info); - - virtual std::vector> GeneratePadding( - size_t target_size_bytes); + void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& cluster_info) override; + std::vector> FetchFec() override; + std::vector> GeneratePadding( + DataSize size) override; uint16_t CurrentTransportSequenceNumber() const; @@ -82,48 +84,54 @@ class PacketRouter : public RemoteBitrateObserver, private: void AddRembModuleCandidate(RtcpFeedbackSenderInterface* candidate_module, bool media_sender) - RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_); void MaybeRemoveRembModuleCandidate( RtcpFeedbackSenderInterface* candidate_module, - bool media_sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_); - void UnsetActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_); - void DetermineActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_); - void AddSendRtpModuleToMap(RtpRtcp* rtp_module, uint32_t ssrc) - RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_); + bool media_sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_); + void UnsetActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_); + void DetermineActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_); + void AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module, uint32_t ssrc) + RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_); void RemoveSendRtpModuleFromMap(uint32_t ssrc) - RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_); - - rtc::CriticalSection modules_crit_; - // Ssrc to RtpRtcp module; - std::unordered_map send_modules_map_ - RTC_GUARDED_BY(modules_crit_); - std::list send_modules_list_ RTC_GUARDED_BY(modules_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_); + + mutable Mutex modules_mutex_; + // Ssrc to RtpRtcpInterface module; + std::unordered_map send_modules_map_ + RTC_GUARDED_BY(modules_mutex_); + std::list send_modules_list_ + RTC_GUARDED_BY(modules_mutex_); // The last module used to send media. - RtpRtcp* last_send_module_ RTC_GUARDED_BY(modules_crit_); + RtpRtcpInterface* last_send_module_ RTC_GUARDED_BY(modules_mutex_); // Rtcp modules of the rtp receivers. std::vector rtcp_feedback_senders_ - RTC_GUARDED_BY(modules_crit_); + RTC_GUARDED_BY(modules_mutex_); - // TODO(eladalon): remb_crit_ only ever held from one function, and it's not + // TODO(eladalon): remb_mutex_ only ever held from one function, and it's not // clear if that function can actually be called from more than one thread. - rtc::CriticalSection remb_crit_; + Mutex remb_mutex_; // The last time a REMB was sent. - int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_crit_); - int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_crit_); + int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_mutex_); + int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_); // The last bitrate update. - int64_t bitrate_bps_ RTC_GUARDED_BY(remb_crit_); - int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_crit_); + int64_t bitrate_bps_ RTC_GUARDED_BY(remb_mutex_); + int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_); // Candidates for the REMB module can be RTP sender/receiver modules, with // the sender modules taking precedence. std::vector sender_remb_candidates_ - RTC_GUARDED_BY(modules_crit_); + RTC_GUARDED_BY(modules_mutex_); std::vector receiver_remb_candidates_ - RTC_GUARDED_BY(modules_crit_); + RTC_GUARDED_BY(modules_mutex_); RtcpFeedbackSenderInterface* active_remb_module_ - RTC_GUARDED_BY(modules_crit_); + RTC_GUARDED_BY(modules_mutex_); + + uint64_t transport_seq_ RTC_GUARDED_BY(modules_mutex_); - uint64_t transport_seq_ RTC_GUARDED_BY(modules_crit_); + // TODO(bugs.webrtc.org/10809): Replace lock with a sequence checker once the + // process thread is gone. + std::vector> pending_fec_packets_ + RTC_GUARDED_BY(modules_mutex_); RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter); }; diff --git a/modules/pacing/packet_router_unittest.cc b/modules/pacing/packet_router_unittest.cc index 03e9ae9331..10cf98b3dd 100644 --- a/modules/pacing/packet_router_unittest.cc +++ b/modules/pacing/packet_router_unittest.cc @@ -68,7 +68,7 @@ class PacketRouterTest : public ::testing::Test { }; TEST_F(PacketRouterTest, Sanity_NoModuleRegistered_GeneratePadding) { - constexpr size_t bytes = 300; + constexpr DataSize bytes = DataSize::Bytes(300); const PacedPacketInfo paced_info(1, kProbeMinProbes, kProbeMinBytes); EXPECT_TRUE(packet_router_.GeneratePadding(bytes).empty()); @@ -101,12 +101,12 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) { const uint16_t kSsrc1 = 1234; const uint16_t kSsrc2 = 4567; - NiceMock rtp_1; + NiceMock rtp_1; ON_CALL(rtp_1, RtxSendStatus()).WillByDefault(Return(kRtxRedundantPayloads)); ON_CALL(rtp_1, SSRC()).WillByDefault(Return(kSsrc1)); ON_CALL(rtp_1, SupportsPadding).WillByDefault(Return(false)); - NiceMock rtp_2; + NiceMock rtp_2; ON_CALL(rtp_2, RtxSendStatus()).WillByDefault(Return(kRtxOff)); ON_CALL(rtp_2, SSRC()).WillByDefault(Return(kSsrc2)); ON_CALL(rtp_2, SupportsPadding).WillByDefault(Return(true)); @@ -122,7 +122,8 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) { return std::vector>( kExpectedPaddingPackets); }); - auto generated_padding = packet_router_.GeneratePadding(kPaddingSize); + auto generated_padding = + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize)); EXPECT_EQ(generated_padding.size(), kExpectedPaddingPackets); packet_router_.RemoveSendRtpModule(&rtp_1); @@ -142,13 +143,13 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) { kExpectedPaddingPackets); }; - NiceMock audio_module; + NiceMock audio_module; ON_CALL(audio_module, RtxSendStatus()).WillByDefault(Return(kRtxOff)); ON_CALL(audio_module, SSRC()).WillByDefault(Return(kSsrc1)); ON_CALL(audio_module, SupportsPadding).WillByDefault(Return(true)); ON_CALL(audio_module, IsAudioConfigured).WillByDefault(Return(true)); - NiceMock video_module; + NiceMock video_module; ON_CALL(video_module, RtxSendStatus()).WillByDefault(Return(kRtxOff)); ON_CALL(video_module, SSRC()).WillByDefault(Return(kSsrc2)); ON_CALL(video_module, SupportsPadding).WillByDefault(Return(true)); @@ -159,7 +160,7 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) { packet_router_.AddSendRtpModule(&audio_module, false); EXPECT_CALL(audio_module, GeneratePadding(kPaddingSize)) .WillOnce(generate_padding); - packet_router_.GeneratePadding(kPaddingSize); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize)); // Add the video module, this should now be prioritized since we cannot // guarantee that audio packets will be included in the BWE. @@ -167,7 +168,7 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) { EXPECT_CALL(audio_module, GeneratePadding).Times(0); EXPECT_CALL(video_module, GeneratePadding(kPaddingSize)) .WillOnce(generate_padding); - packet_router_.GeneratePadding(kPaddingSize); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize)); // Remove and the add audio module again. Module order shouldn't matter; // video should still be prioritized. @@ -176,14 +177,14 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) { EXPECT_CALL(audio_module, GeneratePadding).Times(0); EXPECT_CALL(video_module, GeneratePadding(kPaddingSize)) .WillOnce(generate_padding); - packet_router_.GeneratePadding(kPaddingSize); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize)); // Remove and the video module, we should fall back to padding on the // audio module again. packet_router_.RemoveSendRtpModule(&video_module); EXPECT_CALL(audio_module, GeneratePadding(kPaddingSize)) .WillOnce(generate_padding); - packet_router_.GeneratePadding(kPaddingSize); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingSize)); packet_router_.RemoveSendRtpModule(&audio_module); } @@ -194,7 +195,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { const uint16_t kSsrc3 = 8901; // First two rtp modules send media and have rtx. - NiceMock rtp_1; + NiceMock rtp_1; EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1)); EXPECT_CALL(rtp_1, SupportsPadding).WillRepeatedly(Return(true)); EXPECT_CALL(rtp_1, SupportsRtxPayloadPadding).WillRepeatedly(Return(true)); @@ -205,7 +206,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { ::testing::Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc1)), _)) .WillRepeatedly(Return(true)); - NiceMock rtp_2; + NiceMock rtp_2; EXPECT_CALL(rtp_2, SSRC()).WillRepeatedly(Return(kSsrc2)); EXPECT_CALL(rtp_2, SupportsPadding).WillRepeatedly(Return(true)); EXPECT_CALL(rtp_2, SupportsRtxPayloadPadding).WillRepeatedly(Return(true)); @@ -217,7 +218,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { .WillRepeatedly(Return(true)); // Third module is sending media, but does not support rtx. - NiceMock rtp_3; + NiceMock rtp_3; EXPECT_CALL(rtp_3, SSRC()).WillRepeatedly(Return(kSsrc3)); EXPECT_CALL(rtp_3, SupportsPadding).WillRepeatedly(Return(true)); EXPECT_CALL(rtp_3, SupportsRtxPayloadPadding).WillRepeatedly(Return(false)); @@ -243,7 +244,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { packets.push_back(BuildRtpPacket(kSsrc2)); return packets; }); - packet_router_.GeneratePadding(kPaddingBytes); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingBytes)); // Send media on first module. Padding should be sent on that module. packet_router_.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo()); @@ -255,7 +256,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { packets.push_back(BuildRtpPacket(kSsrc1)); return packets; }); - packet_router_.GeneratePadding(kPaddingBytes); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingBytes)); // Send media on second module. Padding should be sent there. packet_router_.SendPacket(BuildRtpPacket(kSsrc2), PacedPacketInfo()); @@ -265,7 +266,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { packet_router_.RemoveSendRtpModule(&rtp_2); // Send on and then remove all remaining modules. - RtpRtcp* last_send_module; + RtpRtcpInterface* last_send_module; EXPECT_CALL(rtp_1, GeneratePadding(kPaddingBytes)) .Times(1) .WillOnce([&](size_t target_size_bytes) { @@ -285,7 +286,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { for (int i = 0; i < 2; ++i) { last_send_module = nullptr; - packet_router_.GeneratePadding(kPaddingBytes); + packet_router_.GeneratePadding(DataSize::Bytes(kPaddingBytes)); EXPECT_NE(last_send_module, nullptr); packet_router_.RemoveSendRtpModule(last_send_module); } @@ -297,7 +298,7 @@ TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) { const uint16_t kSsrc1 = 1234; PacketRouter packet_router(kStartSeq - 1); - NiceMock rtp_1; + NiceMock rtp_1; EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1)); EXPECT_CALL(rtp_1, TrySendPacket).WillRepeatedly(Return(true)); packet_router.AddSendRtpModule(&rtp_1, false); @@ -315,8 +316,8 @@ TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) { } TEST_F(PacketRouterTest, SendTransportFeedback) { - NiceMock rtp_1; - NiceMock rtp_2; + NiceMock rtp_1; + NiceMock rtp_2; ON_CALL(rtp_1, RTCP()).WillByDefault(Return(RtcpMode::kCompound)); ON_CALL(rtp_2, RTCP()).WillByDefault(Return(RtcpMode::kCompound)); @@ -338,7 +339,7 @@ TEST_F(PacketRouterTest, SendTransportFeedback) { TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) { const uint16_t kSsrc1 = 1234; - NiceMock rtp_1; + NiceMock rtp_1; ON_CALL(rtp_1, SendingMedia).WillByDefault(Return(true)); ON_CALL(rtp_1, SSRC).WillByDefault(Return(kSsrc1)); packet_router_.AddSendRtpModule(&rtp_1, false); @@ -361,8 +362,8 @@ TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) { } TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) { - NiceMock rtp_1; - NiceMock rtp_2; + NiceMock rtp_1; + NiceMock rtp_2; const uint16_t kSsrc1 = 1234; const uint16_t kSsrc2 = 2345; @@ -405,8 +406,9 @@ TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(PacketRouterTest, DoubleRegistrationOfSendModuleDisallowed) { - NiceMock module; +using PacketRouterDeathTest = PacketRouterTest; +TEST_F(PacketRouterDeathTest, DoubleRegistrationOfSendModuleDisallowed) { + NiceMock module; constexpr bool remb_candidate = false; // Value irrelevant. packet_router_.AddSendRtpModule(&module, remb_candidate); @@ -416,8 +418,8 @@ TEST_F(PacketRouterTest, DoubleRegistrationOfSendModuleDisallowed) { packet_router_.RemoveSendRtpModule(&module); } -TEST_F(PacketRouterTest, DoubleRegistrationOfReceiveModuleDisallowed) { - NiceMock module; +TEST_F(PacketRouterDeathTest, DoubleRegistrationOfReceiveModuleDisallowed) { + NiceMock module; constexpr bool remb_candidate = false; // Value irrelevant. packet_router_.AddReceiveRtpModule(&module, remb_candidate); @@ -427,14 +429,14 @@ TEST_F(PacketRouterTest, DoubleRegistrationOfReceiveModuleDisallowed) { packet_router_.RemoveReceiveRtpModule(&module); } -TEST_F(PacketRouterTest, RemovalOfNeverAddedSendModuleDisallowed) { - NiceMock module; +TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedSendModuleDisallowed) { + NiceMock module; EXPECT_DEATH(packet_router_.RemoveSendRtpModule(&module), ""); } -TEST_F(PacketRouterTest, RemovalOfNeverAddedReceiveModuleDisallowed) { - NiceMock module; +TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedReceiveModuleDisallowed) { + NiceMock module; EXPECT_DEATH(packet_router_.RemoveReceiveRtpModule(&module), ""); } @@ -442,7 +444,7 @@ TEST_F(PacketRouterTest, RemovalOfNeverAddedReceiveModuleDisallowed) { TEST(PacketRouterRembTest, LowerEstimateToSendRemb) { rtc::ScopedFakeClock clock; - NiceMock rtp; + NiceMock rtp; PacketRouter packet_router; packet_router.AddSendRtpModule(&rtp, true); @@ -453,7 +455,7 @@ TEST(PacketRouterRembTest, LowerEstimateToSendRemb) { packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Call OnReceiveBitrateChanged twice to get a first estimate. - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); @@ -468,7 +470,7 @@ TEST(PacketRouterRembTest, LowerEstimateToSendRemb) { TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) { rtc::ScopedFakeClock clock; - NiceMock rtp; + NiceMock rtp; PacketRouter packet_router; packet_router.AddSendRtpModule(&rtp, true); @@ -479,7 +481,7 @@ TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) { // Call OnReceiveBitrateChanged twice to get a first estimate. EXPECT_CALL(rtp, SetRemb(bitrate_estimate[0], ssrcs)).Times(1); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate[0]); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate[1] + 100); @@ -493,7 +495,7 @@ TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) { TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) { rtc::ScopedFakeClock clock; - NiceMock rtp; + NiceMock rtp; PacketRouter packet_router; packet_router.AddSendRtpModule(&rtp, true); @@ -504,7 +506,7 @@ TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) { // Call OnReceiveBitrateChanged twice to get a first estimate. EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Increased estimate shouldn't trigger a callback right away. @@ -521,8 +523,8 @@ TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) { TEST(PacketRouterRembTest, ChangeSendRtpModule) { rtc::ScopedFakeClock clock; - NiceMock rtp_send; - NiceMock rtp_recv; + NiceMock rtp_send; + NiceMock rtp_recv; PacketRouter packet_router; packet_router.AddSendRtpModule(&rtp_send, true); packet_router.AddReceiveRtpModule(&rtp_recv, true); @@ -533,7 +535,7 @@ TEST(PacketRouterRembTest, ChangeSendRtpModule) { packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Call OnReceiveBitrateChanged twice to get a first estimate. - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); EXPECT_CALL(rtp_send, SetRemb(bitrate_estimate, ssrcs)).Times(1); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); @@ -556,7 +558,7 @@ TEST(PacketRouterRembTest, ChangeSendRtpModule) { TEST(PacketRouterRembTest, OnlyOneRembForRepeatedOnReceiveBitrateChanged) { rtc::ScopedFakeClock clock; - NiceMock rtp; + NiceMock rtp; PacketRouter packet_router; packet_router.AddSendRtpModule(&rtp, true); @@ -566,7 +568,7 @@ TEST(PacketRouterRembTest, OnlyOneRembForRepeatedOnReceiveBitrateChanged) { packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Call OnReceiveBitrateChanged twice to get a first estimate. - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); EXPECT_CALL(rtp, SetRemb(_, _)).Times(1); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); @@ -584,8 +586,8 @@ TEST(PacketRouterRembTest, OnlyOneRembForRepeatedOnReceiveBitrateChanged) { TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateLimitsSetRemb) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - clock.AdvanceTime(TimeDelta::ms(1000)); - NiceMock remb_sender; + clock.AdvanceTime(TimeDelta::Millis(1000)); + NiceMock remb_sender; constexpr bool remb_candidate = true; packet_router.AddSendRtpModule(&remb_sender, remb_candidate); @@ -596,7 +598,7 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateLimitsSetRemb) { const std::vector ssrcs = {1234}; packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate); packet_router.OnReceiveBitrateChanged(ssrcs, cap_bitrate + 5000); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, cap_bitrate - 5000); // Test tear-down. @@ -607,8 +609,8 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateTriggersRembWhenMoreRestrictive) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - clock.AdvanceTime(TimeDelta::ms(1000)); - NiceMock remb_sender; + clock.AdvanceTime(TimeDelta::Millis(1000)); + NiceMock remb_sender; constexpr bool remb_candidate = true; packet_router.AddSendRtpModule(&remb_sender, remb_candidate); @@ -629,8 +631,8 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateDoesNotTriggerRembWhenAsRestrictive) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - clock.AdvanceTime(TimeDelta::ms(1000)); - NiceMock remb_sender; + clock.AdvanceTime(TimeDelta::Millis(1000)); + NiceMock remb_sender; constexpr bool remb_candidate = true; packet_router.AddSendRtpModule(&remb_sender, remb_candidate); @@ -651,8 +653,8 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateDoesNotTriggerRembWhenLessRestrictive) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - clock.AdvanceTime(TimeDelta::ms(1000)); - NiceMock remb_sender; + clock.AdvanceTime(TimeDelta::Millis(1000)); + NiceMock remb_sender; constexpr bool remb_candidate = true; packet_router.AddSendRtpModule(&remb_sender, remb_candidate); @@ -673,8 +675,8 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateTriggersRembWhenNoRecentMeasure) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - clock.AdvanceTime(TimeDelta::ms(1000)); - NiceMock remb_sender; + clock.AdvanceTime(TimeDelta::Millis(1000)); + NiceMock remb_sender; constexpr bool remb_candidate = true; packet_router.AddSendRtpModule(&remb_sender, remb_candidate); @@ -683,7 +685,7 @@ TEST(PacketRouterRembTest, const std::vector ssrcs = {1234}; EXPECT_CALL(remb_sender, SetRemb(measured_bitrate_bps, _)); packet_router.OnReceiveBitrateChanged(ssrcs, measured_bitrate_bps); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); EXPECT_CALL(remb_sender, SetRemb(cap_bitrate_bps, _)); packet_router.SetMaxDesiredReceiveBitrate(cap_bitrate_bps); @@ -696,8 +698,8 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateTriggersRembWhenNoMeasures) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - clock.AdvanceTime(TimeDelta::ms(1000)); - NiceMock remb_sender; + clock.AdvanceTime(TimeDelta::Millis(1000)); + NiceMock remb_sender; constexpr bool remb_candidate = true; packet_router.AddSendRtpModule(&remb_sender, remb_candidate); @@ -719,7 +721,7 @@ TEST(PacketRouterRembTest, // packet on this one. TEST(PacketRouterRembTest, NoSendingRtpModule) { rtc::ScopedFakeClock clock; - NiceMock rtp; + NiceMock rtp; PacketRouter packet_router; packet_router.AddReceiveRtpModule(&rtp, true); @@ -730,7 +732,7 @@ TEST(PacketRouterRembTest, NoSendingRtpModule) { packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Call OnReceiveBitrateChanged twice to get a first estimate. - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); EXPECT_CALL(rtp, SetRemb(bitrate_estimate, ssrcs)).Times(1); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); @@ -745,7 +747,7 @@ TEST(PacketRouterRembTest, NoSendingRtpModule) { TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock module; + NiceMock module; constexpr bool remb_candidate = false; @@ -754,7 +756,7 @@ TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) { constexpr uint32_t bitrate_estimate = 456; const std::vector ssrcs = {1234}; EXPECT_CALL(module, SetRemb(_, _)).Times(0); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down @@ -764,7 +766,7 @@ TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) { TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock module; + NiceMock module; constexpr bool remb_candidate = true; @@ -773,7 +775,7 @@ TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) { constexpr uint32_t bitrate_estimate = 456; const std::vector ssrcs = {1234}; EXPECT_CALL(module, SetRemb(bitrate_estimate, ssrcs)).Times(1); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down @@ -783,7 +785,7 @@ TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) { TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock module; + NiceMock module; constexpr bool remb_candidate = false; @@ -792,7 +794,7 @@ TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) { constexpr uint32_t bitrate_estimate = 456; const std::vector ssrcs = {1234}; EXPECT_CALL(module, SetRemb(_, _)).Times(0); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down @@ -802,7 +804,7 @@ TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) { TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock module; + NiceMock module; constexpr bool remb_candidate = true; @@ -811,7 +813,7 @@ TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) { constexpr uint32_t bitrate_estimate = 456; const std::vector ssrcs = {1234}; EXPECT_CALL(module, SetRemb(bitrate_estimate, ssrcs)).Times(1); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down @@ -822,8 +824,8 @@ TEST(PacketRouterRembTest, SendCandidatePreferredOverReceiveCandidate_SendModuleAddedFirst) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock send_module; - NiceMock receive_module; + NiceMock send_module; + NiceMock receive_module; constexpr bool remb_candidate = true; @@ -838,7 +840,7 @@ TEST(PacketRouterRembTest, EXPECT_CALL(send_module, SetRemb(bitrate_estimate, ssrcs)).Times(1); EXPECT_CALL(receive_module, SetRemb(_, _)).Times(0); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down @@ -850,8 +852,8 @@ TEST(PacketRouterRembTest, SendCandidatePreferredOverReceiveCandidate_ReceiveModuleAddedFirst) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock send_module; - NiceMock receive_module; + NiceMock send_module; + NiceMock receive_module; constexpr bool remb_candidate = true; @@ -866,7 +868,7 @@ TEST(PacketRouterRembTest, EXPECT_CALL(send_module, SetRemb(bitrate_estimate, ssrcs)).Times(1); EXPECT_CALL(receive_module, SetRemb(_, _)).Times(0); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down @@ -877,8 +879,8 @@ TEST(PacketRouterRembTest, TEST(PacketRouterRembTest, ReceiveModuleTakesOverWhenLastSendModuleRemoved) { rtc::ScopedFakeClock clock; PacketRouter packet_router; - NiceMock send_module; - NiceMock receive_module; + NiceMock send_module; + NiceMock receive_module; constexpr bool remb_candidate = true; @@ -893,7 +895,7 @@ TEST(PacketRouterRembTest, ReceiveModuleTakesOverWhenLastSendModuleRemoved) { EXPECT_CALL(send_module, SetRemb(_, _)).Times(0); EXPECT_CALL(receive_module, SetRemb(bitrate_estimate, ssrcs)).Times(1); - clock.AdvanceTime(TimeDelta::ms(1000)); + clock.AdvanceTime(TimeDelta::Millis(1000)); packet_router.OnReceiveBitrateChanged(ssrcs, bitrate_estimate); // Test tear-down diff --git a/modules/pacing/round_robin_packet_queue.cc b/modules/pacing/round_robin_packet_queue.cc index 7c2a7d2ae8..8094ccdc84 100644 --- a/modules/pacing/round_robin_packet_queue.cc +++ b/modules/pacing/round_robin_packet_queue.cc @@ -14,11 +14,12 @@ #include #include +#include "absl/strings/match.h" #include "rtc_base/checks.h" namespace webrtc { namespace { -static constexpr DataSize kMaxLeadingSize = DataSize::Bytes<1400>(); +static constexpr DataSize kMaxLeadingSize = DataSize::Bytes(1400); } RoundRobinPacketQueue::QueuedPacket::QueuedPacket(const QueuedPacket& rhs) = @@ -35,7 +36,7 @@ RoundRobinPacketQueue::QueuedPacket::QueuedPacket( enqueue_time_(enqueue_time), enqueue_order_(enqueue_order), is_retransmission_(packet->packet_type() == - RtpPacketToSend::Type::kRetransmission), + RtpPacketMediaType::kRetransmission), enqueue_time_it_(enqueue_time_it), owned_packet_(packet.release()) {} @@ -53,7 +54,7 @@ int RoundRobinPacketQueue::QueuedPacket::Priority() const { return priority_; } -RtpPacketToSend::Type RoundRobinPacketQueue::QueuedPacket::Type() const { +RtpPacketMediaType RoundRobinPacketQueue::QueuedPacket::Type() const { return *owned_packet_->packet_type(); } @@ -66,7 +67,7 @@ Timestamp RoundRobinPacketQueue::QueuedPacket::EnqueueTime() const { } bool RoundRobinPacketQueue::QueuedPacket::IsRetransmission() const { - return Type() == RtpPacketToSend::Type::kRetransmission; + return Type() == RtpPacketMediaType::kRetransmission; } uint64_t RoundRobinPacketQueue::QueuedPacket::EnqueueOrder() const { @@ -77,6 +78,11 @@ RtpPacketToSend* RoundRobinPacketQueue::QueuedPacket::RtpPacket() const { return owned_packet_; } +void RoundRobinPacketQueue::QueuedPacket::UpdateEnqueueTimeIterator( + std::multiset::iterator it) { + enqueue_time_it_ = it; +} + std::multiset::iterator RoundRobinPacketQueue::QueuedPacket::EnqueueTimeIterator() const { return enqueue_time_it_; @@ -105,7 +111,7 @@ bool IsEnabled(const WebRtcKeyValueConfig* field_trials, const char* name) { if (!field_trials) { return false; } - return field_trials->Lookup(name).find("Enabled") == 0; + return absl::StartsWith(field_trials->Lookup(name), "Enabled"); } RoundRobinPacketQueue::RoundRobinPacketQueue( @@ -133,11 +139,34 @@ void RoundRobinPacketQueue::Push(int priority, uint64_t enqueue_order, std::unique_ptr packet) { RTC_DCHECK(packet->packet_type().has_value()); - Push(QueuedPacket(priority, enqueue_time, enqueue_order, - enqueue_times_.insert(enqueue_time), std::move(packet))); + if (size_packets_ == 0) { + // Single packet fast-path. + single_packet_queue_.emplace( + QueuedPacket(priority, enqueue_time, enqueue_order, + enqueue_times_.end(), std::move(packet))); + UpdateQueueTime(enqueue_time); + single_packet_queue_->SubtractPauseTime(pause_time_sum_); + size_packets_ = 1; + size_ += PacketSize(*single_packet_queue_); + } else { + MaybePromoteSinglePacketToNormalQueue(); + Push(QueuedPacket(priority, enqueue_time, enqueue_order, + enqueue_times_.insert(enqueue_time), std::move(packet))); + } } std::unique_ptr RoundRobinPacketQueue::Pop() { + if (single_packet_queue_.has_value()) { + RTC_DCHECK(stream_priorities_.empty()); + std::unique_ptr rtp_packet( + single_packet_queue_->RtpPacket()); + single_packet_queue_.reset(); + queue_time_sum_ = TimeDelta::Zero(); + size_packets_ = 0; + size_ = DataSize::Zero(); + return rtp_packet; + } + RTC_DCHECK(!Empty()); Stream* stream = GetHighestPriorityStream(); const QueuedPacket& queued_packet = stream->packet_queue.top(); @@ -162,13 +191,7 @@ std::unique_ptr RoundRobinPacketQueue::Pop() { // case a "budget" will be built up for the stream sending at the lower // rate. To avoid building a too large budget we limit |bytes| to be within // kMaxLeading bytes of the stream that has sent the most amount of bytes. - DataSize packet_size = - DataSize::bytes(queued_packet.RtpPacket()->payload_size() + - queued_packet.RtpPacket()->padding_size()); - if (include_overhead_) { - packet_size += DataSize::bytes(queued_packet.RtpPacket()->headers_size()) + - transport_overhead_per_packet_; - } + DataSize packet_size = PacketSize(queued_packet); stream->size = std::max(stream->size + packet_size, max_size_ - kMaxLeadingSize); max_size_ = std::max(max_size_, stream->size); @@ -194,9 +217,12 @@ std::unique_ptr RoundRobinPacketQueue::Pop() { } bool RoundRobinPacketQueue::Empty() const { - RTC_CHECK((!stream_priorities_.empty() && size_packets_ > 0) || - (stream_priorities_.empty() && size_packets_ == 0)); - return stream_priorities_.empty(); + if (size_packets_ == 0) { + RTC_DCHECK(!single_packet_queue_.has_value() && stream_priorities_.empty()); + return true; + } + RTC_DCHECK(single_packet_queue_.has_value() || !stream_priorities_.empty()); + return false; } size_t RoundRobinPacketQueue::SizeInPackets() const { @@ -207,18 +233,32 @@ DataSize RoundRobinPacketQueue::Size() const { return size_; } -bool RoundRobinPacketQueue::NextPacketIsAudio() const { +absl::optional RoundRobinPacketQueue::LeadingAudioPacketEnqueueTime() + const { + if (single_packet_queue_.has_value()) { + if (single_packet_queue_->Type() == RtpPacketMediaType::kAudio) { + return single_packet_queue_->EnqueueTime(); + } + return absl::nullopt; + } + if (stream_priorities_.empty()) { - return false; + return absl::nullopt; } uint32_t ssrc = stream_priorities_.begin()->second; - auto stream_info_it = streams_.find(ssrc); - return stream_info_it->second.packet_queue.top().Type() == - RtpPacketToSend::Type::kAudio; + const auto& top_packet = streams_.find(ssrc)->second.packet_queue.top(); + if (top_packet.Type() == RtpPacketMediaType::kAudio) { + return top_packet.EnqueueTime(); + } + return absl::nullopt; } Timestamp RoundRobinPacketQueue::OldestEnqueueTime() const { + if (single_packet_queue_.has_value()) { + return single_packet_queue_->EnqueueTime(); + } + if (Empty()) return Timestamp::MinusInfinity(); RTC_CHECK(!enqueue_times_.empty()); @@ -235,7 +275,7 @@ void RoundRobinPacketQueue::UpdateQueueTime(Timestamp now) { if (paused_) { pause_time_sum_ += delta; } else { - queue_time_sum_ += TimeDelta::us(delta.us() * size_packets_); + queue_time_sum_ += TimeDelta::Micros(delta.us() * size_packets_); } time_last_updated_ = now; @@ -249,17 +289,19 @@ void RoundRobinPacketQueue::SetPauseState(bool paused, Timestamp now) { } void RoundRobinPacketQueue::SetIncludeOverhead() { + MaybePromoteSinglePacketToNormalQueue(); include_overhead_ = true; // We need to update the size to reflect overhead for existing packets. for (const auto& stream : streams_) { for (const QueuedPacket& packet : stream.second.packet_queue) { - size_ += DataSize::bytes(packet.RtpPacket()->headers_size()) + + size_ += DataSize::Bytes(packet.RtpPacket()->headers_size()) + transport_overhead_per_packet_; } } } void RoundRobinPacketQueue::SetTransportOverhead(DataSize overhead_per_packet) { + MaybePromoteSinglePacketToNormalQueue(); if (include_overhead_) { DataSize previous_overhead = transport_overhead_per_packet_; // We need to update the size to reflect overhead for existing packets. @@ -303,26 +345,44 @@ void RoundRobinPacketQueue::Push(QueuedPacket packet) { } RTC_CHECK(stream->priority_it != stream_priorities_.end()); - // In order to figure out how much time a packet has spent in the queue while - // not in a paused state, we subtract the total amount of time the queue has - // been paused so far, and when the packet is popped we subtract the total - // amount of time the queue has been paused at that moment. This way we - // subtract the total amount of time the packet has spent in the queue while - // in a paused state. - UpdateQueueTime(packet.EnqueueTime()); - packet.SubtractPauseTime(pause_time_sum_); - - size_packets_ += 1; - size_ += DataSize::bytes(packet.RtpPacket()->payload_size() + - packet.RtpPacket()->padding_size()); - if (include_overhead_) { - size_ += DataSize::bytes(packet.RtpPacket()->headers_size()) + - transport_overhead_per_packet_; + if (packet.EnqueueTimeIterator() == enqueue_times_.end()) { + // Promotion from single-packet queue. Just add to enqueue times. + packet.UpdateEnqueueTimeIterator( + enqueue_times_.insert(packet.EnqueueTime())); + } else { + // In order to figure out how much time a packet has spent in the queue + // while not in a paused state, we subtract the total amount of time the + // queue has been paused so far, and when the packet is popped we subtract + // the total amount of time the queue has been paused at that moment. This + // way we subtract the total amount of time the packet has spent in the + // queue while in a paused state. + UpdateQueueTime(packet.EnqueueTime()); + packet.SubtractPauseTime(pause_time_sum_); + + size_packets_ += 1; + size_ += PacketSize(packet); } stream->packet_queue.push(packet); } +DataSize RoundRobinPacketQueue::PacketSize(const QueuedPacket& packet) const { + DataSize packet_size = DataSize::Bytes(packet.RtpPacket()->payload_size() + + packet.RtpPacket()->padding_size()); + if (include_overhead_) { + packet_size += DataSize::Bytes(packet.RtpPacket()->headers_size()) + + transport_overhead_per_packet_; + } + return packet_size; +} + +void RoundRobinPacketQueue::MaybePromoteSinglePacketToNormalQueue() { + if (single_packet_queue_.has_value()) { + Push(*single_packet_queue_); + single_packet_queue_.reset(); + } +} + RoundRobinPacketQueue::Stream* RoundRobinPacketQueue::GetHighestPriorityStream() { RTC_CHECK(!stream_priorities_.empty()); diff --git a/modules/pacing/round_robin_packet_queue.h b/modules/pacing/round_robin_packet_queue.h index 225e137753..9446a8e174 100644 --- a/modules/pacing/round_robin_packet_queue.h +++ b/modules/pacing/round_robin_packet_queue.h @@ -46,7 +46,11 @@ class RoundRobinPacketQueue { bool Empty() const; size_t SizeInPackets() const; DataSize Size() const; - bool NextPacketIsAudio() const; + // If the next packet, that would be returned by Pop() if called + // now, is an audio packet this method returns the enqueue time + // of that packet. If queue is empty or top packet is not audio, + // returns nullopt. + absl::optional LeadingAudioPacketEnqueueTime() const; Timestamp OldestEnqueueTime() const; TimeDelta AverageQueueTime() const; @@ -69,7 +73,7 @@ class RoundRobinPacketQueue { bool operator<(const QueuedPacket& other) const; int Priority() const; - RtpPacketToSend::Type Type() const; + RtpPacketMediaType Type() const; uint32_t Ssrc() const; Timestamp EnqueueTime() const; bool IsRetransmission() const; @@ -77,6 +81,7 @@ class RoundRobinPacketQueue { RtpPacketToSend* RtpPacket() const; std::multiset::iterator EnqueueTimeIterator() const; + void UpdateEnqueueTimeIterator(std::multiset::iterator it); void SubtractPauseTime(TimeDelta pause_time_sum); private: @@ -132,6 +137,9 @@ class RoundRobinPacketQueue { void Push(QueuedPacket packet); + DataSize PacketSize(const QueuedPacket& packet) const; + void MaybePromoteSinglePacketToNormalQueue(); + Stream* GetHighestPriorityStream(); // Just used to verify correctness. @@ -161,6 +169,8 @@ class RoundRobinPacketQueue { // the age of the oldest packet in the queue. std::multiset enqueue_times_; + absl::optional single_packet_queue_; + bool include_overhead_; }; } // namespace webrtc diff --git a/modules/pacing/task_queue_paced_sender.cc b/modules/pacing/task_queue_paced_sender.cc index 646af4e95a..69ec5457ad 100644 --- a/modules/pacing/task_queue_paced_sender.cc +++ b/modules/pacing/task_queue_paced_sender.cc @@ -17,16 +17,17 @@ #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/trace_event.h" namespace webrtc { namespace { // If no calls to MaybeProcessPackets() happen, make sure we update stats // at least every |kMaxTimeBetweenStatsUpdates| as long as the pacer isn't // completely drained. -constexpr TimeDelta kMaxTimeBetweenStatsUpdates = TimeDelta::Millis<33>(); +constexpr TimeDelta kMaxTimeBetweenStatsUpdates = TimeDelta::Millis(33); // Don't call UpdateStats() more than |kMinTimeBetweenStatsUpdates| apart, // for performance reasons. -constexpr TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis<1>(); +constexpr TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1); } // namespace TaskQueuePacedSender::TaskQueuePacedSender( @@ -34,11 +35,12 @@ TaskQueuePacedSender::TaskQueuePacedSender( PacketRouter* packet_router, RtcEventLog* event_log, const WebRtcKeyValueConfig* field_trials, - TaskQueueFactory* task_queue_factory) + TaskQueueFactory* task_queue_factory, + TimeDelta hold_back_window) : clock_(clock), - packet_router_(packet_router), + hold_back_window_(hold_back_window), pacing_controller_(clock, - static_cast(this), + packet_router, event_log, field_trials, PacingController::ProcessMode::kDynamic), @@ -120,6 +122,17 @@ void TaskQueuePacedSender::SetPacingRates(DataRate pacing_rate, void TaskQueuePacedSender::EnqueuePackets( std::vector> packets) { +#if RTC_TRACE_EVENTS_ENABLED + TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "TaskQueuePacedSender::EnqueuePackets"); + for (auto& packet : packets) { + TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), + "TaskQueuePacedSender::EnqueuePackets::Loop", + "sequence_number", packet->SequenceNumber(), "rtp_timestamp", + packet->Timestamp()); + } +#endif + task_queue_.PostTask([this, packets_ = std::move(packets)]() mutable { RTC_DCHECK_RUN_ON(&task_queue_); for (auto& packet : packets_) { @@ -174,6 +187,11 @@ TimeDelta TaskQueuePacedSender::OldestPacketWaitTime() const { return GetStats().oldest_packet_wait_time; } +void TaskQueuePacedSender::OnStatsUpdated(const Stats& stats) { + MutexLock lock(&stats_mutex_); + current_stats_ = stats; +} + void TaskQueuePacedSender::MaybeProcessPackets( Timestamp scheduled_process_time) { RTC_DCHECK_RUN_ON(&task_queue_); @@ -182,86 +200,120 @@ void TaskQueuePacedSender::MaybeProcessPackets( return; } + // Normally, run ProcessPackets() only if this is the scheduled task. + // If it is not but it is already time to process and there either is + // no scheduled task or the schedule has shifted forward in time, run + // anyway and clear any schedule. + Timestamp next_process_time = pacing_controller_.NextSendTime(); const Timestamp now = clock_->CurrentTime(); - // Run ProcessPackets() only if this is the schedules task, or if there is - // no scheduled task and we need to process immediately. - if ((scheduled_process_time.IsFinite() && - scheduled_process_time == next_process_time_) || - (next_process_time_.IsInfinite() && - pacing_controller_.NextSendTime() <= now)) { - pacing_controller_.ProcessPackets(); + const bool is_scheduled_call = next_process_time_ == scheduled_process_time; + if (is_scheduled_call) { + // Indicate no pending scheduled call. next_process_time_ = Timestamp::MinusInfinity(); } + if (is_scheduled_call || + (now >= next_process_time && (next_process_time_.IsInfinite() || + next_process_time < next_process_time_))) { + pacing_controller_.ProcessPackets(); + next_process_time = pacing_controller_.NextSendTime(); + } - Timestamp next_process_time = std::max(now + PacingController::kMinSleepTime, - pacing_controller_.NextSendTime()); - TimeDelta sleep_time = next_process_time - now; - if (next_process_time_.IsMinusInfinity() || - next_process_time <= - next_process_time_ - PacingController::kMinSleepTime) { + absl::optional time_to_next_process; + if (pacing_controller_.IsProbing() && + next_process_time != next_process_time_) { + // If we're probing and there isn't already a wakeup scheduled for the next + // process time, always post a task and just round sleep time down to + // nearest millisecond. + if (next_process_time.IsMinusInfinity()) { + time_to_next_process = TimeDelta::Zero(); + } else { + time_to_next_process = + std::max(TimeDelta::Zero(), + (next_process_time - now).RoundDownTo(TimeDelta::Millis(1))); + } + } else if (next_process_time_.IsMinusInfinity() || + next_process_time <= next_process_time_ - hold_back_window_) { + // Schedule a new task since there is none currently scheduled + // (|next_process_time_| is infinite), or the new process time is at least + // one holdback window earlier than whatever is currently scheduled. + time_to_next_process = std::max(next_process_time - now, hold_back_window_); + } + + if (time_to_next_process) { + // Set a new scheduled process time and post a delayed task. next_process_time_ = next_process_time; task_queue_.PostDelayedTask( [this, next_process_time]() { MaybeProcessPackets(next_process_time); }, - sleep_time.ms()); + time_to_next_process->ms()); } MaybeUpdateStats(false); } -std::vector> -TaskQueuePacedSender::GeneratePadding(DataSize size) { - return packet_router_->GeneratePadding(size.bytes()); -} - -void TaskQueuePacedSender::SendRtpPacket( - std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { - packet_router_->SendPacket(std::move(packet), cluster_info); -} - void TaskQueuePacedSender::MaybeUpdateStats(bool is_scheduled_call) { if (is_shutdown_) { + if (is_scheduled_call) { + stats_update_scheduled_ = false; + } return; } Timestamp now = clock_->CurrentTime(); - if (!is_scheduled_call && - now - last_stats_time_ < kMinTimeBetweenStatsUpdates) { - // Too frequent unscheduled stats update, return early. - return; + if (is_scheduled_call) { + // Allow scheduled task to process packets to clear up an remaining debt + // level in an otherwise empty queue. + pacing_controller_.ProcessPackets(); + } else { + if (now - last_stats_time_ < kMinTimeBetweenStatsUpdates) { + // Too frequent unscheduled stats update, return early. + return; + } } - rtc::CritScope cs(&stats_crit_); - current_stats_.expected_queue_time = pacing_controller_.ExpectedQueueTime(); - current_stats_.first_sent_packet_time = - pacing_controller_.FirstSentPacketTime(); - current_stats_.oldest_packet_wait_time = - pacing_controller_.OldestPacketWaitTime(); - current_stats_.queue_size = pacing_controller_.QueueSizeData(); + Stats new_stats; + new_stats.expected_queue_time = pacing_controller_.ExpectedQueueTime(); + new_stats.first_sent_packet_time = pacing_controller_.FirstSentPacketTime(); + new_stats.oldest_packet_wait_time = pacing_controller_.OldestPacketWaitTime(); + new_stats.queue_size = pacing_controller_.QueueSizeData(); + OnStatsUpdated(new_stats); + last_stats_time_ = now; bool pacer_drained = pacing_controller_.QueueSizePackets() == 0 && pacing_controller_.CurrentBufferLevel().IsZero(); // If there's anything interesting to get from the pacer and this is a - // scheduled call (no scheduled call in flight), post a new scheduled stats + // scheduled call (or no scheduled call in flight), post a new scheduled stats // update. - if (!pacer_drained && (is_scheduled_call || !stats_update_scheduled_)) { - task_queue_.PostDelayedTask( - [this]() { - RTC_DCHECK_RUN_ON(&task_queue_); - MaybeUpdateStats(true); - }, - kMaxTimeBetweenStatsUpdates.ms()); - stats_update_scheduled_ = true; - } else { + if (!pacer_drained) { + if (!stats_update_scheduled_) { + // There is no pending delayed task to update stats, add one. + // Treat this call as being scheduled in order to bootstrap scheduling + // loop. + stats_update_scheduled_ = true; + is_scheduled_call = true; + } + + // Only if on the scheduled call loop do we want to schedule a new delayed + // task. + if (is_scheduled_call) { + task_queue_.PostDelayedTask( + [this]() { + RTC_DCHECK_RUN_ON(&task_queue_); + MaybeUpdateStats(true); + }, + kMaxTimeBetweenStatsUpdates.ms()); + } + } else if (is_scheduled_call) { + // This is a scheduled call, signing out since there's nothing interesting + // left to check. stats_update_scheduled_ = false; } } TaskQueuePacedSender::Stats TaskQueuePacedSender::GetStats() const { - rtc::CritScope cs(&stats_crit_); + MutexLock lock(&stats_mutex_); return current_stats_; } diff --git a/modules/pacing/task_queue_paced_sender.h b/modules/pacing/task_queue_paced_sender.h index 8b47f5ee3d..ba4f4667b7 100644 --- a/modules/pacing/task_queue_paced_sender.h +++ b/modules/pacing/task_queue_paced_sender.h @@ -29,7 +29,7 @@ #include "modules/pacing/packet_router.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" @@ -38,15 +38,20 @@ namespace webrtc { class Clock; class RtcEventLog; -class TaskQueuePacedSender : public RtpPacketPacer, - public RtpPacketSender, - private PacingController::PacketSender { +class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { public: - TaskQueuePacedSender(Clock* clock, - PacketRouter* packet_router, - RtcEventLog* event_log, - const WebRtcKeyValueConfig* field_trials, - TaskQueueFactory* task_queue_factory); + // The |hold_back_window| parameter sets a lower bound on time to sleep if + // there is currently a pacer queue and packets can't immediately be + // processed. Increasing this reduces thread wakeups at the expense of higher + // latency. + // TODO(bugs.webrtc.org/10809): Remove default value for hold_back_window. + TaskQueuePacedSender( + Clock* clock, + PacketRouter* packet_router, + RtcEventLog* event_log, + const WebRtcKeyValueConfig* field_trials, + TaskQueueFactory* task_queue_factory, + TimeDelta hold_back_window = PacingController::kMinSleepTime); ~TaskQueuePacedSender() override; @@ -99,7 +104,8 @@ class TaskQueuePacedSender : public RtpPacketPacer, // specified by SetPacingRates() if needed to achieve this goal. void SetQueueTimeLimit(TimeDelta limit) override; - private: + protected: + // Exposed as protected for test. struct Stats { Stats() : oldest_packet_wait_time(TimeDelta::Zero()), @@ -110,7 +116,9 @@ class TaskQueuePacedSender : public RtpPacketPacer, TimeDelta expected_queue_time; absl::optional first_sent_packet_time; }; + virtual void OnStatsUpdated(const Stats& stats); + private: // Check if it is time to send packets, or schedule a delayed task if not. // Use Timestamp::MinusInfinity() to indicate that this call has _not_ // been scheduled by the pacing controller. If this is the case, check if @@ -118,20 +126,11 @@ class TaskQueuePacedSender : public RtpPacketPacer, // method again with desired (finite) scheduled process time. void MaybeProcessPackets(Timestamp scheduled_process_time); - // Methods implementing PacedSenderController:PacketSender. - - void SendRtpPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info) override - RTC_RUN_ON(task_queue_); - - std::vector> GeneratePadding( - DataSize size) override RTC_RUN_ON(task_queue_); - void MaybeUpdateStats(bool is_scheduled_call) RTC_RUN_ON(task_queue_); Stats GetStats() const; Clock* const clock_; - PacketRouter* const packet_router_ RTC_GUARDED_BY(task_queue_); + const TimeDelta hold_back_window_; PacingController pacing_controller_ RTC_GUARDED_BY(task_queue_); // We want only one (valid) delayed process task in flight at a time. @@ -156,8 +155,8 @@ class TaskQueuePacedSender : public RtpPacketPacer, // never drain. bool is_shutdown_ RTC_GUARDED_BY(task_queue_); - rtc::CriticalSection stats_crit_; - Stats current_stats_ RTC_GUARDED_BY(stats_crit_); + mutable Mutex stats_mutex_; + Stats current_stats_ RTC_GUARDED_BY(stats_mutex_); rtc::TaskQueue task_queue_; }; diff --git a/modules/pacing/task_queue_paced_sender_unittest.cc b/modules/pacing/task_queue_paced_sender_unittest.cc index 0c3a092400..d389e271f7 100644 --- a/modules/pacing/task_queue_paced_sender_unittest.cc +++ b/modules/pacing/task_queue_paced_sender_unittest.cc @@ -10,12 +10,14 @@ #include "modules/pacing/task_queue_paced_sender.h" +#include #include #include #include #include #include +#include "api/transport/network_types.h" #include "modules/pacing/packet_router.h" #include "modules/utility/include/mock/mock_process_thread.h" #include "test/field_trial.h" @@ -24,6 +26,7 @@ #include "test/time_controller/simulated_time_controller.h" using ::testing::_; +using ::testing::AtLeast; using ::testing::Return; using ::testing::SaveArg; @@ -37,43 +40,91 @@ constexpr size_t kDefaultPacketSize = 1234; class MockPacketRouter : public PacketRouter { public: - MOCK_METHOD2(SendPacket, - void(std::unique_ptr packet, - const PacedPacketInfo& cluster_info)); - MOCK_METHOD1( - GeneratePadding, - std::vector>(size_t target_size_bytes)); + MOCK_METHOD(void, + SendPacket, + (std::unique_ptr packet, + const PacedPacketInfo& cluster_info), + (override)); + MOCK_METHOD(std::vector>, + FetchFec, + (), + (override)); + MOCK_METHOD(std::vector>, + GeneratePadding, + (DataSize target_size), + (override)); }; + +class StatsUpdateObserver { + public: + StatsUpdateObserver() = default; + virtual ~StatsUpdateObserver() = default; + + virtual void OnStatsUpdated() = 0; +}; + +class TaskQueuePacedSenderForTest : public TaskQueuePacedSender { + public: + TaskQueuePacedSenderForTest(Clock* clock, + PacketRouter* packet_router, + RtcEventLog* event_log, + const WebRtcKeyValueConfig* field_trials, + TaskQueueFactory* task_queue_factory, + TimeDelta hold_back_window) + : TaskQueuePacedSender(clock, + packet_router, + event_log, + field_trials, + task_queue_factory, + hold_back_window) {} + + void OnStatsUpdated(const Stats& stats) override { + ++num_stats_updates_; + TaskQueuePacedSender::OnStatsUpdated(stats); + } + + size_t num_stats_updates_ = 0; +}; + +std::vector> GeneratePadding( + DataSize target_size) { + // 224 bytes is the max padding size for plain padding packets generated by + // RTPSender::GeneratePadding(). + const DataSize kMaxPaddingPacketSize = DataSize::Bytes(224); + DataSize padding_generated = DataSize::Zero(); + std::vector> padding_packets; + while (padding_generated < target_size) { + DataSize packet_size = + std::min(target_size - padding_generated, kMaxPaddingPacketSize); + padding_generated += packet_size; + auto padding_packet = + std::make_unique(/*extensions=*/nullptr); + padding_packet->set_packet_type(RtpPacketMediaType::kPadding); + padding_packet->SetPadding(packet_size.bytes()); + padding_packets.push_back(std::move(padding_packet)); + } + return padding_packets; +} + } // namespace namespace test { -class TaskQueuePacedSenderTest : public ::testing::Test { - public: - TaskQueuePacedSenderTest() - : time_controller_(Timestamp::ms(1234)), - pacer_(time_controller_.GetClock(), - &packet_router_, - /*event_log=*/nullptr, - /*field_trials=*/nullptr, - time_controller_.GetTaskQueueFactory()) {} - - protected: - std::unique_ptr BuildRtpPacket(RtpPacketToSend::Type type) { + std::unique_ptr BuildRtpPacket(RtpPacketMediaType type) { auto packet = std::make_unique(nullptr); packet->set_packet_type(type); switch (type) { - case RtpPacketToSend::Type::kAudio: + case RtpPacketMediaType::kAudio: packet->SetSsrc(kAudioSsrc); break; - case RtpPacketToSend::Type::kVideo: + case RtpPacketMediaType::kVideo: packet->SetSsrc(kVideoSsrc); break; - case RtpPacketToSend::Type::kRetransmission: - case RtpPacketToSend::Type::kPadding: + case RtpPacketMediaType::kRetransmission: + case RtpPacketMediaType::kPadding: packet->SetSsrc(kVideoRtxSsrc); break; - case RtpPacketToSend::Type::kForwardErrorCorrection: + case RtpPacketMediaType::kForwardErrorCorrection: packet->SetSsrc(kFlexFecSsrc); break; } @@ -83,7 +134,7 @@ class TaskQueuePacedSenderTest : public ::testing::Test { } std::vector> GeneratePackets( - RtpPacketToSend::Type type, + RtpPacketMediaType type, size_t num_packets) { std::vector> packets; for (size_t i = 0; i < num_packets; ++i) { @@ -92,85 +143,414 @@ class TaskQueuePacedSenderTest : public ::testing::Test { return packets; } - Timestamp CurrentTime() { return time_controller_.GetClock()->CurrentTime(); } + TEST(TaskQueuePacedSenderTest, PacesPackets) { + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime); - GlobalSimulatedTimeController time_controller_; - MockPacketRouter packet_router_; - TaskQueuePacedSender pacer_; -}; + // Insert a number of packets, covering one second. + static constexpr size_t kPacketsToSend = 42; + pacer.SetPacingRates( + DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend), + DataRate::Zero()); + pacer.EnqueuePackets( + GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend)); -TEST_F(TaskQueuePacedSenderTest, PacesPackets) { - // Insert a number of packets, covering one second. - static constexpr size_t kPacketsToSend = 42; - pacer_.SetPacingRates(DataRate::bps(kDefaultPacketSize * 8 * kPacketsToSend), - DataRate::Zero()); - pacer_.EnqueuePackets( - GeneratePackets(RtpPacketToSend::Type::kVideo, kPacketsToSend)); - - // Expect all of them to be sent. - size_t packets_sent = 0; - Timestamp end_time = Timestamp::PlusInfinity(); - EXPECT_CALL(packet_router_, SendPacket) - .WillRepeatedly([&](std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { - ++packets_sent; - if (packets_sent == kPacketsToSend) { - end_time = time_controller_.GetClock()->CurrentTime(); - } - }); - - const Timestamp start_time = time_controller_.GetClock()->CurrentTime(); - - // Packets should be sent over a period of close to 1s. Expect a little lower - // than this since initial probing is a bit quicker. - time_controller_.AdvanceTime(TimeDelta::seconds(1)); - EXPECT_EQ(packets_sent, kPacketsToSend); - ASSERT_TRUE(end_time.IsFinite()); - EXPECT_NEAR((end_time - start_time).ms(), 1000.0, 50.0); -} + // Expect all of them to be sent. + size_t packets_sent = 0; + Timestamp end_time = Timestamp::PlusInfinity(); + EXPECT_CALL(packet_router, SendPacket) + .WillRepeatedly([&](std::unique_ptr packet, + const PacedPacketInfo& cluster_info) { + ++packets_sent; + if (packets_sent == kPacketsToSend) { + end_time = time_controller.GetClock()->CurrentTime(); + } + }); -TEST_F(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) { - // Insert a number of packets to be sent 200ms apart. - const size_t kPacketsPerSecond = 5; - const DataRate kPacingRate = - DataRate::bps(kDefaultPacketSize * 8 * kPacketsPerSecond); - pacer_.SetPacingRates(kPacingRate, DataRate::Zero()); - - // Send some initial packets to be rid of any probes. - EXPECT_CALL(packet_router_, SendPacket).Times(kPacketsPerSecond); - pacer_.EnqueuePackets( - GeneratePackets(RtpPacketToSend::Type::kVideo, kPacketsPerSecond)); - time_controller_.AdvanceTime(TimeDelta::seconds(1)); - - // Insert three packets, and record send time of each of them. - // After the second packet is sent, double the send rate so we can - // check the third packets is sent after half the wait time. - Timestamp first_packet_time = Timestamp::MinusInfinity(); - Timestamp second_packet_time = Timestamp::MinusInfinity(); - Timestamp third_packet_time = Timestamp::MinusInfinity(); - - EXPECT_CALL(packet_router_, SendPacket) - .Times(3) - .WillRepeatedly([&](std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { - if (first_packet_time.IsInfinite()) { - first_packet_time = CurrentTime(); - } else if (second_packet_time.IsInfinite()) { - second_packet_time = CurrentTime(); - pacer_.SetPacingRates(2 * kPacingRate, DataRate::Zero()); - } else { - third_packet_time = CurrentTime(); - } - }); - - pacer_.EnqueuePackets(GeneratePackets(RtpPacketToSend::Type::kVideo, 3)); - time_controller_.AdvanceTime(TimeDelta::ms(500)); - ASSERT_TRUE(third_packet_time.IsFinite()); - EXPECT_NEAR((second_packet_time - first_packet_time).ms(), 200.0, - 1.0); - EXPECT_NEAR((third_packet_time - second_packet_time).ms(), 100.0, - 1.0); -} + const Timestamp start_time = time_controller.GetClock()->CurrentTime(); + + // Packets should be sent over a period of close to 1s. Expect a little + // lower than this since initial probing is a bit quicker. + time_controller.AdvanceTime(TimeDelta::Seconds(1)); + EXPECT_EQ(packets_sent, kPacketsToSend); + ASSERT_TRUE(end_time.IsFinite()); + EXPECT_NEAR((end_time - start_time).ms(), 1000.0, 50.0); + } + + TEST(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) { + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime); + + // Insert a number of packets to be sent 200ms apart. + const size_t kPacketsPerSecond = 5; + const DataRate kPacingRate = + DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsPerSecond); + pacer.SetPacingRates(kPacingRate, DataRate::Zero()); + + // Send some initial packets to be rid of any probes. + EXPECT_CALL(packet_router, SendPacket).Times(kPacketsPerSecond); + pacer.EnqueuePackets( + GeneratePackets(RtpPacketMediaType::kVideo, kPacketsPerSecond)); + time_controller.AdvanceTime(TimeDelta::Seconds(1)); + + // Insert three packets, and record send time of each of them. + // After the second packet is sent, double the send rate so we can + // check the third packets is sent after half the wait time. + Timestamp first_packet_time = Timestamp::MinusInfinity(); + Timestamp second_packet_time = Timestamp::MinusInfinity(); + Timestamp third_packet_time = Timestamp::MinusInfinity(); + + EXPECT_CALL(packet_router, SendPacket) + .Times(3) + .WillRepeatedly([&](std::unique_ptr packet, + const PacedPacketInfo& cluster_info) { + if (first_packet_time.IsInfinite()) { + first_packet_time = time_controller.GetClock()->CurrentTime(); + } else if (second_packet_time.IsInfinite()) { + second_packet_time = time_controller.GetClock()->CurrentTime(); + pacer.SetPacingRates(2 * kPacingRate, DataRate::Zero()); + } else { + third_packet_time = time_controller.GetClock()->CurrentTime(); + } + }); + + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 3)); + time_controller.AdvanceTime(TimeDelta::Millis(500)); + ASSERT_TRUE(third_packet_time.IsFinite()); + EXPECT_NEAR((second_packet_time - first_packet_time).ms(), 200.0, + 1.0); + EXPECT_NEAR((third_packet_time - second_packet_time).ms(), 100.0, + 1.0); + } + + TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) { + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime); + + const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125); + const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); + const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate; + + pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); + + // Add some initial video packets, only one should be sent. + EXPECT_CALL(packet_router, SendPacket); + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10)); + time_controller.AdvanceTime(TimeDelta::Zero()); + ::testing::Mock::VerifyAndClearExpectations(&packet_router); + + // Advance time, but still before next packet should be sent. + time_controller.AdvanceTime(kPacketPacingTime / 2); + + // Insert an audio packet, it should be sent immediately. + EXPECT_CALL(packet_router, SendPacket); + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1)); + time_controller.AdvanceTime(TimeDelta::Zero()); + ::testing::Mock::VerifyAndClearExpectations(&packet_router); + } + + TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) { + const TimeDelta kCoalescingWindow = TimeDelta::Millis(5); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + kCoalescingWindow); + + // Set rates so one packet adds one ms of buffer level. + const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); + const TimeDelta kPacketPacingTime = TimeDelta::Millis(1); + const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime; + + pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); + + // Add 10 packets. The first should be sent immediately since the buffers + // are clear. + EXPECT_CALL(packet_router, SendPacket); + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10)); + time_controller.AdvanceTime(TimeDelta::Zero()); + ::testing::Mock::VerifyAndClearExpectations(&packet_router); + + // Advance time to 1ms before the coalescing window ends. No packets should + // be sent. + EXPECT_CALL(packet_router, SendPacket).Times(0); + time_controller.AdvanceTime(kCoalescingWindow - TimeDelta::Millis(1)); + + // Advance time to where coalescing window ends. All packets that should + // have been sent up til now will be sent. + EXPECT_CALL(packet_router, SendPacket).Times(5); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + ::testing::Mock::VerifyAndClearExpectations(&packet_router); + } + + TEST(TaskQueuePacedSenderTest, ProbingOverridesCoalescingWindow) { + const TimeDelta kCoalescingWindow = TimeDelta::Millis(5); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + kCoalescingWindow); + + // Set rates so one packet adds one ms of buffer level. + const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); + const TimeDelta kPacketPacingTime = TimeDelta::Millis(1); + const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime; + + pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); + + // Add 10 packets. The first should be sent immediately since the buffers + // are clear. This will also trigger the probe to start. + EXPECT_CALL(packet_router, SendPacket).Times(AtLeast(1)); + pacer.CreateProbeCluster(kPacingDataRate * 2, 17); + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10)); + time_controller.AdvanceTime(TimeDelta::Zero()); + ::testing::Mock::VerifyAndClearExpectations(&packet_router); + + // Advance time to 1ms before the coalescing window ends. Packets should be + // flying. + EXPECT_CALL(packet_router, SendPacket).Times(AtLeast(1)); + time_controller.AdvanceTime(kCoalescingWindow - TimeDelta::Millis(1)); + } + + TEST(TaskQueuePacedSenderTest, RespectedMinTimeBetweenStatsUpdates) { + const TimeDelta kCoalescingWindow = TimeDelta::Millis(5); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + kCoalescingWindow); + const DataRate kPacingDataRate = DataRate::KilobitsPerSec(300); + pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); + + const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1); + + // Nothing inserted, no stats updates yet. + EXPECT_EQ(pacer.num_stats_updates_, 0u); + + // Insert one packet, stats should be updated. + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1)); + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(pacer.num_stats_updates_, 1u); + + // Advance time half of the min stats update interval, and trigger a + // refresh - stats should not be updated yet. + time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2); + pacer.EnqueuePackets({}); + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(pacer.num_stats_updates_, 1u); + + // Advance time the next half, now stats update is triggered. + time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2); + pacer.EnqueuePackets({}); + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(pacer.num_stats_updates_, 2u); + } + + TEST(TaskQueuePacedSenderTest, ThrottlesStatsUpdates) { + const TimeDelta kCoalescingWindow = TimeDelta::Millis(5); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + kCoalescingWindow); + + // Set rates so one packet adds 10ms of buffer level. + const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); + const TimeDelta kPacketPacingTime = TimeDelta::Millis(10); + const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime; + const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1); + const TimeDelta kMaxTimeBetweenStatsUpdates = TimeDelta::Millis(33); + + // Nothing inserted, no stats updates yet. + size_t num_expected_stats_updates = 0; + EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates); + pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); + time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates); + // Updating pacing rates refreshes stats. + EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates); + + // Record time when we insert first packet, this triggers the scheduled + // stats updating. + Clock* const clock = time_controller.GetClock(); + const Timestamp start_time = clock->CurrentTime(); + + while (clock->CurrentTime() - start_time <= + kMaxTimeBetweenStatsUpdates - kPacketPacingTime) { + // Enqueue packet, expect stats update. + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1)); + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates); + + // Advance time to halfway through pacing time, expect another stats + // update. + time_controller.AdvanceTime(kPacketPacingTime / 2); + pacer.EnqueuePackets({}); + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates); + // Advance time the rest of the way. + time_controller.AdvanceTime(kPacketPacingTime / 2); + } + + // At this point, the pace queue is drained so there is no more intersting + // update to be made - but there is still as schduled task that should run + // |kMaxTimeBetweenStatsUpdates| after the first update. + time_controller.AdvanceTime(start_time + kMaxTimeBetweenStatsUpdates - + clock->CurrentTime()); + EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates); + + // Advance time a significant time - don't expect any more calls as stats + // updating does not happen when queue is drained. + time_controller.AdvanceTime(TimeDelta::Millis(400)); + EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates); + } + + TEST(TaskQueuePacedSenderTest, SchedulesProbeAtSetTime) { + ScopedFieldTrials trials("WebRTC-Bwe-ProbingBehavior/min_probe_delta:1ms/"); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime); + + // Set rates so one packet adds 4ms of buffer level. + const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); + const TimeDelta kPacketPacingTime = TimeDelta::Millis(4); + const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime; + pacer.SetPacingRates(kPacingDataRate, /*padding_rate=*/DataRate::Zero()); + EXPECT_CALL(packet_router, FetchFec).WillRepeatedly([]() { + return std::vector>(); + }); + EXPECT_CALL(packet_router, GeneratePadding(_)) + .WillRepeatedly( + [](DataSize target_size) { return GeneratePadding(target_size); }); + + // Enqueue two packets, only the first is sent immediately and the next + // will be scheduled for sending in 4ms. + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 2)); + const int kNotAProbe = PacedPacketInfo::kNotAProbe; + EXPECT_CALL( + packet_router, + SendPacket(_, ::testing::Field(&PacedPacketInfo::probe_cluster_id, + kNotAProbe))); + // Advance to less than 3ms before next packet send time. + time_controller.AdvanceTime(TimeDelta::Micros(1001)); + + // Trigger a probe at 4x the current pacing rate and insert the number of + // packets the probe needs. + const DataRate kProbeRate = 2 * kPacingDataRate; + const int kProbeClusterId = 1; + pacer.CreateProbeCluster(kProbeRate, kProbeClusterId); + + // Expected size for each probe in a cluster is twice the expected bits + // sent during min_probe_delta. + // Expect one additional call since probe always starts with a small + const TimeDelta kProbeTimeDelta = TimeDelta::Millis(2); + const DataSize kProbeSize = kProbeRate * kProbeTimeDelta; + const size_t kNumPacketsInProbe = + (kProbeSize + kPacketSize - DataSize::Bytes(1)) / kPacketSize; + EXPECT_CALL( + packet_router, + SendPacket(_, ::testing::Field(&PacedPacketInfo::probe_cluster_id, + kProbeClusterId))) + .Times(kNumPacketsInProbe + 1); + + pacer.EnqueuePackets( + GeneratePackets(RtpPacketMediaType::kVideo, kNumPacketsInProbe)); + time_controller.AdvanceTime(TimeDelta::Zero()); + + // The pacer should have scheduled the next probe to be sent in + // kProbeTimeDelta. That there was existing scheduled call less than + // PacingController::kMinSleepTime before this should not matter. + + EXPECT_CALL( + packet_router, + SendPacket(_, ::testing::Field(&PacedPacketInfo::probe_cluster_id, + kProbeClusterId))) + .Times(AtLeast(1)); + time_controller.AdvanceTime(TimeDelta::Millis(2)); + } + + TEST(TaskQueuePacedSenderTest, NoMinSleepTimeWhenProbing) { + // Set min_probe_delta to be less than kMinSleepTime (1ms). + const TimeDelta kMinProbeDelta = TimeDelta::Micros(100); + ScopedFieldTrials trials( + "WebRTC-Bwe-ProbingBehavior/min_probe_delta:100us/"); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); + MockPacketRouter packet_router; + TaskQueuePacedSenderForTest pacer( + time_controller.GetClock(), &packet_router, + /*event_log=*/nullptr, + /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(), + PacingController::kMinSleepTime); + + // Set rates so one packet adds 4ms of buffer level. + const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); + const TimeDelta kPacketPacingTime = TimeDelta::Millis(4); + const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime; + pacer.SetPacingRates(kPacingDataRate, /*padding_rate=*/DataRate::Zero()); + EXPECT_CALL(packet_router, FetchFec).WillRepeatedly([]() { + return std::vector>(); + }); + EXPECT_CALL(packet_router, GeneratePadding) + .WillRepeatedly( + [](DataSize target_size) { return GeneratePadding(target_size); }); + + // Set a high probe rate. + const int kProbeClusterId = 1; + DataRate kProbingRate = kPacingDataRate * 10; + pacer.CreateProbeCluster(kProbingRate, kProbeClusterId); + + // Advance time less than PacingController::kMinSleepTime, probing packets + // for the first millisecond should be sent immediately. Min delta between + // probes is 2x 100us, meaning 4 times per ms we will get least one call to + // SendPacket(). + DataSize data_sent = DataSize::Zero(); + EXPECT_CALL( + packet_router, + SendPacket(_, ::testing::Field(&PacedPacketInfo::probe_cluster_id, + kProbeClusterId))) + .Times(AtLeast(4)) + .WillRepeatedly([&](std::unique_ptr packet, + const PacedPacketInfo&) { + data_sent += + DataSize::Bytes(packet->payload_size() + packet->padding_size()); + }); + + // Add one packet to kickstart probing, the rest will be padding packets. + pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1)); + time_controller.AdvanceTime(kMinProbeDelta); + + // Verify the amount of probing data sent. + // Probe always starts with a small (1 byte) padding packet that's not + // counted into the probe rate here. + EXPECT_EQ(data_sent, + kProbingRate * TimeDelta::Millis(1) + DataSize::Bytes(1)); + } } // namespace test } // namespace webrtc diff --git a/modules/recording/BUILD.gn b/modules/recording/BUILD.gn new file mode 100644 index 0000000000..9a9ffde5e8 --- /dev/null +++ b/modules/recording/BUILD.gn @@ -0,0 +1,28 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_source_set("recording") { + visibility = [ "*" ] + sources = [ + "recorder.h", + "recorder.cc", + ] + deps = [ + ] + + public_configs = [ + "//third_party/ffmpeg:ffmpeg_dependent_config", + ] +} diff --git a/modules/recording/recorder.cc b/modules/recording/recorder.cc new file mode 100644 index 0000000000..f50c10dcec --- /dev/null +++ b/modules/recording/recorder.cc @@ -0,0 +1,357 @@ +#include "modules/recording/recorder.h" + +#include + +extern "C" { +#include +#include +} + +#include "rtc_base/logging.h" + +namespace webrtc { + +static inline int64_t currentTimeMs() { + return std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count(); +} + +Recorder::Frame::Frame(const uint8_t* payload, uint32_t length) { + this->payload = new uint8_t[length]; + memcpy(this->payload, payload, length); + this->length = length; + this->timestamp = currentTimeMs(); + this->duration = 0; + this->is_video = false; + this->is_key_frame = false; +} + +Recorder::Frame::~Frame() { + delete[] payload; +} + +Recorder::Recorder(TaskQueueFactory* task_queue_factory) + : got_audio_(false), + sample_rate_(0), + channel_num_(0), + got_video_(false), + width_(0), + height_(0), + stream_opened_(false), + context_(nullptr), + audio_stream_(nullptr), + video_stream_(nullptr), + record_queue_(task_queue_factory->CreateTaskQueue( + "recorder", TaskQueueFactory::Priority::NORMAL)), + timestamp_offset_(0), + added_audio_frames_(0), + added_video_frames_(0), + drained_frames_(0) { +} + +Recorder::~Recorder() { Stop(); } + +int32_t Recorder::Start(const std::string& path) { + const char* format_name = "matroska"; + avformat_alloc_output_context2(&context_, nullptr, format_name, + path.c_str()); + if (!context_) { + RTC_LOG(LS_ERROR) << "Recorder::Start error, alloc context fail"; + return -11; + } + int res = avio_open(&context_->pb, context_->url, AVIO_FLAG_WRITE); + if (res < 0) { + RTC_LOG(LS_ERROR) << "Recorder::Start error, open fail " + << av_err2str(res); + avformat_free_context(context_); + context_ = nullptr; + return -12; + } + + RTC_LOG(LS_INFO) << "Recorder::Start success"; + return 0; +} + +void Recorder::AddVideoFrame(const EncodedImage* frame, + VideoCodecType video_codec) { + if (++added_video_frames_ % 125 == 1) { + RTC_LOG(LS_INFO) << "Recorder::AddVideoFrame " << added_video_frames_ + << " times"; + } + if (!got_video_ && frame->_frameType == VideoFrameType::kVideoFrameKey) { + got_video_ = true; + video_codec_ = video_codec; + width_ = frame->_encodedWidth; + height_ = frame->_encodedHeight; + } + + std::shared_ptr media_frame(new Frame(frame->data(), frame->size())); + media_frame->is_video = true; + media_frame->is_key_frame = + frame->_frameType == VideoFrameType::kVideoFrameKey; + + if (!last_video_frame_) { + last_video_frame_ = media_frame; + return; + } + + last_video_frame_->duration = + media_frame->timestamp - last_video_frame_->timestamp; + if (last_video_frame_->duration <= 0) { + last_video_frame_->duration = 1; + media_frame->timestamp = last_video_frame_->timestamp + 1; + } + + if (last_video_frame_->is_key_frame && !video_key_frame_) { + video_key_frame_ = last_video_frame_; + } + + frames_.push(last_video_frame_); + last_video_frame_ = media_frame; + + record_queue_.PostTask([this]() { drainFrames(); }); +} + +void Recorder::AddAudioFrame(int32_t sample_rate, int32_t channel_num, + const uint8_t* frame, uint32_t size, + AudioEncoder::CodecType audio_codec) { + if (++added_audio_frames_ % 500 == 1) { + RTC_LOG(LS_INFO) << "Recorder::AddAudioFrame " << added_audio_frames_ + << " times"; + } + if (!frame || !size) { + return; + } + + if (!got_audio_) { + got_audio_ = true; + audio_codec_ = audio_codec; + sample_rate_ = sample_rate; + channel_num_ = channel_num; + } + + std::shared_ptr media_frame(new Frame(frame, size)); + + if (!last_audio_frame_) { + last_audio_frame_ = media_frame; + return; + } + + last_audio_frame_->duration = + media_frame->timestamp - last_audio_frame_->timestamp; + if (last_audio_frame_->duration <= 0) { + last_audio_frame_->duration = 1; + media_frame->timestamp = last_audio_frame_->timestamp + 1; + } + + frames_.push(last_audio_frame_); + last_audio_frame_ = media_frame; + + record_queue_.PostTask([this]() { drainFrames(); }); +} + +void Recorder::Stop() { + if (context_) { + if (audio_stream_ && video_stream_) { + av_write_trailer(context_); + } + avio_close(context_->pb); + avformat_free_context(context_); + context_ = nullptr; + } + audio_stream_ = nullptr; + video_stream_ = nullptr; +} + +void Recorder::openStreams() { + if (got_audio_ && got_video_ && video_key_frame_ && !stream_opened_) { + stream_opened_ = true; + + enum AVCodecID audio_codec_id = AV_CODEC_ID_NONE; + switch (audio_codec_) { + case AudioEncoder::CodecType::kOpus: + audio_codec_id = AV_CODEC_ID_OPUS; + break; + default: + break; + } + enum AVCodecID video_codec_id = AV_CODEC_ID_NONE; + switch (video_codec_) { + case kVideoCodecVP8: + video_codec_id = AV_CODEC_ID_VP8; + break; + case kVideoCodecVP9: + video_codec_id = AV_CODEC_ID_VP9; + break; + case kVideoCodecH264: + video_codec_id = AV_CODEC_ID_H264; + break; +#ifndef DISABLE_H265 + case kVideoCodecH265: + video_codec_id = AV_CODEC_ID_H265; + break; +#endif + default: + break; + } + if (audio_codec_id == AV_CODEC_ID_NONE || + video_codec_id == AV_CODEC_ID_NONE) { + RTC_LOG(LS_ERROR) + << "Recorder::openStreams error, unsupported codec, audio " + << audio_codec_ << ", video " << video_codec_; + return; + } + + AVStream* audio_stream = avformat_new_stream(context_, nullptr); + if (!audio_stream) { + RTC_LOG(LS_ERROR) + << "Recorder::openStreams error, open audio stream fail"; + return; + } + + AVCodecParameters* par = audio_stream->codecpar; + par->codec_type = AVMEDIA_TYPE_AUDIO; + par->codec_id = audio_codec_id; + par->sample_rate = sample_rate_; + par->channels = channel_num_; + par->channel_layout = av_get_default_channel_layout(par->channels); + switch (audio_codec_id) { + case AV_CODEC_ID_AAC: // AudioSpecificConfig 48000-2 + par->extradata_size = 2; + par->extradata = (uint8_t*)av_malloc( + par->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); + par->extradata[0] = 0x11; + par->extradata[1] = 0x90; + break; + case AV_CODEC_ID_OPUS: // OpusHead 48000-2 + par->extradata_size = 19; + par->extradata = (uint8_t*)av_malloc( + par->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); + par->extradata[0] = 'O'; + par->extradata[1] = 'p'; + par->extradata[2] = 'u'; + par->extradata[3] = 's'; + par->extradata[4] = 'H'; + par->extradata[5] = 'e'; + par->extradata[6] = 'a'; + par->extradata[7] = 'd'; + // Version + par->extradata[8] = 1; + // Channel Count + par->extradata[9] = 2; + // Pre-skip + par->extradata[10] = 0x38; + par->extradata[11] = 0x1; + // Input Sample Rate (Hz) + par->extradata[12] = 0x80; + par->extradata[13] = 0xbb; + par->extradata[14] = 0; + par->extradata[15] = 0; + // Output Gain (Q7.8 in dB) + par->extradata[16] = 0; + par->extradata[17] = 0; + // Mapping Family + par->extradata[18] = 0; + break; + default: + break; + } + + AVStream* video_stream = avformat_new_stream(context_, nullptr); + if (!video_stream) { + RTC_LOG(LS_ERROR) + << "Recorder::openStreams error, open video stream fail"; + return; + } + + par = video_stream->codecpar; + par->codec_type = AVMEDIA_TYPE_VIDEO; + par->codec_id = video_codec_id; + par->width = width_; + par->height = height_; + if (video_codec_id == AV_CODEC_ID_H264 || + video_codec_id == AV_CODEC_ID_H265) { // extradata + AVCodecParserContext* parser = av_parser_init(video_codec_id); + if (!parser) { + RTC_LOG(LS_ERROR) + << "Recorder::openStreams error, av_parser_init fail"; + return; + } + + int size = + parser->parser->split(nullptr, video_key_frame_->payload, + video_key_frame_->length); + if (size > 0) { + par->extradata_size = size; + par->extradata = (uint8_t*)av_malloc( + par->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); + memcpy(par->extradata, video_key_frame_->payload, + par->extradata_size); + } else { + RTC_LOG(LS_WARNING) << "Recorder::openStreams error, can't " + "find video extradata"; + } + + av_parser_close(parser); + } + + if (video_codec_id == AV_CODEC_ID_H265) { + par->codec_tag = 0x31637668; // hvc1 + } + + int res = avformat_write_header(context_, nullptr); + if (res < 0) { + RTC_LOG(LS_ERROR) + << "Recorder::openStreams error, avformat_write_header fail " + << av_err2str(res); + return; + } + + audio_stream_ = audio_stream; + video_stream_ = video_stream; + timestamp_offset_ = currentTimeMs(); + + RTC_LOG(LS_INFO) << "Recorder::openStreams success"; + } +} + +void Recorder::drainFrames() { + openStreams(); + + if (!audio_stream_ || !video_stream_) { + return; + } + + while (!frames_.empty()) { + if (++drained_frames_ % 1000 == 1) { + RTC_LOG(LS_INFO) << "Recorder::drainFrames " << drained_frames_ + << " times"; + } + std::shared_ptr frame = frames_.front(); + frames_.pop(); + + AVStream* stream = frame->is_video ? video_stream_ : audio_stream_; + AVPacket pkt; + + av_init_packet(&pkt); + pkt.data = frame->payload; + pkt.size = frame->length; + pkt.dts = (int64_t)((frame->timestamp - timestamp_offset_) / + (av_q2d(stream->time_base) * 1000)); + pkt.pts = pkt.dts; + pkt.duration = + (int64_t)(frame->duration / (av_q2d(stream->time_base) * 1000)); + pkt.stream_index = stream->index; + + if (frame->is_key_frame) { + pkt.flags |= AV_PKT_FLAG_KEY; + } + + int res = av_interleaved_write_frame(context_, &pkt); + if (res < 0) { + RTC_LOG(LS_ERROR) << "Recorder::drainFrames error, " + "av_interleaved_write_frame fail " + << av_err2str(res); + } + } +} +} diff --git a/modules/recording/recorder.h b/modules/recording/recorder.h new file mode 100644 index 0000000000..f24261b026 --- /dev/null +++ b/modules/recording/recorder.h @@ -0,0 +1,82 @@ +// +// Created by Piasy on 2019/12/16. +// + +#pragma once + +#include +#include +#include + +#include "api/audio_codecs/audio_encoder.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/video/encoded_image.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "rtc_base/task_queue.h" + +struct AVFormatContext; +struct AVStream; + +namespace webrtc { +class Recorder { +public: + Recorder(TaskQueueFactory* task_queue_factory); + ~Recorder(); + + int32_t Start(const std::string& path); + + void AddVideoFrame(const EncodedImage* frame, + VideoCodecType video_codec); + void AddAudioFrame(int32_t sample_rate, int32_t channel_num, + const uint8_t* frame, uint32_t size, + AudioEncoder::CodecType audio_codec); + + void Stop(); + +private: + class Frame { + public: + Frame(const uint8_t* payload, uint32_t length); + ~Frame(); + + uint8_t* payload; + uint32_t length; + int64_t timestamp; + int64_t duration; + bool is_video; + bool is_key_frame; + }; + + void openStreams(); + + void drainFrames(); + + std::shared_ptr last_audio_frame_; + std::shared_ptr last_video_frame_; + std::shared_ptr video_key_frame_; + + bool got_audio_; + AudioEncoder::CodecType audio_codec_; + int32_t sample_rate_; + int32_t channel_num_; + + bool got_video_; + VideoCodecType video_codec_; + int32_t width_; + int32_t height_; + + bool stream_opened_; + + AVFormatContext* context_; + AVStream* audio_stream_; + AVStream* video_stream_; + + rtc::TaskQueue record_queue_; + std::queue> frames_; + int64_t timestamp_offset_; + + int64_t added_audio_frames_; + int64_t added_video_frames_; + int64_t drained_frames_; +}; +} diff --git a/modules/remote_bitrate_estimator/BUILD.gn b/modules/remote_bitrate_estimator/BUILD.gn index 08233da02d..3f6532519e 100644 --- a/modules/remote_bitrate_estimator/BUILD.gn +++ b/modules/remote_bitrate_estimator/BUILD.gn @@ -56,9 +56,13 @@ rtc_library("remote_bitrate_estimator") { "../../rtc_base:rtc_numerics", "../../rtc_base:safe_minmax", "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/synchronization:mutex", "../../system_wrappers", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -75,6 +79,8 @@ if (!build_with_chromium) { "../../rtc_base:rtc_base_approved", "../../test:rtp_test_utils", "../rtp_rtcp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", ] @@ -110,7 +116,6 @@ if (rtc_include_tests) { deps = [ ":remote_bitrate_estimator", "..:module_api_public", - "../..:webrtc_common", "../../api/transport:field_trial_based_config", "../../api/transport:mock_network_control", "../../api/transport:network_control", diff --git a/modules/remote_bitrate_estimator/OWNERS b/modules/remote_bitrate_estimator/OWNERS index 2a4d2fe476..9b97144ac8 100644 --- a/modules/remote_bitrate_estimator/OWNERS +++ b/modules/remote_bitrate_estimator/OWNERS @@ -4,8 +4,3 @@ asapersson@webrtc.org mflodman@webrtc.org philipel@webrtc.org srte@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.cc b/modules/remote_bitrate_estimator/aimd_rate_control.cc index 4d2e58527c..da13176645 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control.cc +++ b/modules/remote_bitrate_estimator/aimd_rate_control.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/strings/match.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" @@ -30,19 +31,19 @@ namespace webrtc { namespace { -constexpr TimeDelta kDefaultRtt = TimeDelta::Millis<200>(); +constexpr TimeDelta kDefaultRtt = TimeDelta::Millis(200); constexpr double kDefaultBackoffFactor = 0.85; constexpr char kBweBackOffFactorExperiment[] = "WebRTC-BweBackOffFactor"; bool IsEnabled(const WebRtcKeyValueConfig& field_trials, absl::string_view key) { - return field_trials.Lookup(key).find("Enabled") == 0; + return absl::StartsWith(field_trials.Lookup(key), "Enabled"); } bool IsNotDisabled(const WebRtcKeyValueConfig& field_trials, absl::string_view key) { - return field_trials.Lookup(key).find("Disabled") != 0; + return !absl::StartsWith(field_trials.Lookup(key), "Disabled"); } double ReadBackoffFactor(const WebRtcKeyValueConfig& key_value_config) { @@ -73,7 +74,7 @@ AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config) AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config, bool send_side) : min_configured_bitrate_(congestion_controller::GetMinBitrate()), - max_configured_bitrate_(DataRate::kbps(30000)), + max_configured_bitrate_(DataRate::KilobitsPerSec(30000)), current_bitrate_(max_configured_bitrate_), latest_estimated_throughput_(current_bitrate_), link_capacity_(), @@ -92,8 +93,6 @@ AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config, no_bitrate_increase_in_alr_( IsEnabled(*key_value_config, "WebRTC-DontIncreaseDelayBasedBweInAlr")), - smoothing_experiment_( - IsEnabled(*key_value_config, "WebRTC-Audio-BandwidthSmoothing")), estimate_bounded_backoff_( IsNotDisabled(*key_value_config, "WebRTC-Bwe-EstimateBoundedBackoff")), @@ -101,13 +100,10 @@ AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config, IsNotDisabled(*key_value_config, "WebRTC-Bwe-EstimateBoundedIncrease")), initial_backoff_interval_("initial_backoff_interval"), - low_throughput_threshold_("low_throughput", DataRate::Zero()), link_capacity_fix_("link_capacity_fix") { // E.g - // WebRTC-BweAimdRateControlConfig/initial_backoff_interval:100ms, - // low_throughput:50kbps/ - ParseFieldTrial({&initial_backoff_interval_, &low_throughput_threshold_, - &link_capacity_fix_}, + // WebRTC-BweAimdRateControlConfig/initial_backoff_interval:100ms/ + ParseFieldTrial({&initial_backoff_interval_, &link_capacity_fix_}, key_value_config->Lookup("WebRTC-BweAimdRateControlConfig")); if (initial_backoff_interval_) { RTC_LOG(LS_INFO) << "Using aimd rate control with initial back-off interval" @@ -137,18 +133,18 @@ bool AimdRateControl::ValidEstimate() const { TimeDelta AimdRateControl::GetFeedbackInterval() const { // Estimate how often we can send RTCP if we allocate up to 5% of bandwidth // to feedback. - const DataSize kRtcpSize = DataSize::bytes(80); + const DataSize kRtcpSize = DataSize::Bytes(80); const DataRate rtcp_bitrate = current_bitrate_ * 0.05; const TimeDelta interval = kRtcpSize / rtcp_bitrate; - const TimeDelta kMinFeedbackInterval = TimeDelta::ms(200); - const TimeDelta kMaxFeedbackInterval = TimeDelta::ms(1000); + const TimeDelta kMinFeedbackInterval = TimeDelta::Millis(200); + const TimeDelta kMaxFeedbackInterval = TimeDelta::Millis(1000); return interval.Clamped(kMinFeedbackInterval, kMaxFeedbackInterval); } bool AimdRateControl::TimeToReduceFurther(Timestamp at_time, DataRate estimated_throughput) const { const TimeDelta bitrate_reduction_interval = - rtt_.Clamped(TimeDelta::ms(10), TimeDelta::ms(200)); + rtt_.Clamped(TimeDelta::Millis(10), TimeDelta::Millis(200)); if (at_time - time_last_bitrate_change_ >= bitrate_reduction_interval) { return true; } @@ -165,7 +161,7 @@ bool AimdRateControl::InitialTimeToReduceFurther(Timestamp at_time) const { if (!initial_backoff_interval_) { return ValidEstimate() && TimeToReduceFurther(at_time, - LatestEstimate() / 2 - DataRate::bps(1)); + LatestEstimate() / 2 - DataRate::BitsPerSec(1)); } // TODO(terelius): We could use the RTT (clamped to suitable limits) instead // of a fixed bitrate_reduction_interval. @@ -192,7 +188,7 @@ DataRate AimdRateControl::Update(const RateControlInput* input, // second. // TODO(bugs.webrtc.org/9379): The comment above doesn't match to the code. if (!bitrate_is_initialized_) { - const TimeDelta kInitializationTime = TimeDelta::seconds(5); + const TimeDelta kInitializationTime = TimeDelta::Seconds(5); RTC_DCHECK_LE(kBitrateWindowMs, kInitializationTime.ms()); if (time_first_throughput_estimate_.IsInfinite()) { if (input->estimated_throughput) @@ -205,7 +201,7 @@ DataRate AimdRateControl::Update(const RateControlInput* input, } } - current_bitrate_ = ChangeBitrate(current_bitrate_, *input, at_time); + ChangeBitrate(*input, at_time); return current_bitrate_; } @@ -216,7 +212,7 @@ void AimdRateControl::SetInApplicationLimitedRegion(bool in_alr) { void AimdRateControl::SetEstimate(DataRate bitrate, Timestamp at_time) { bitrate_is_initialized_ = true; DataRate prev_bitrate = current_bitrate_; - current_bitrate_ = ClampBitrate(bitrate, bitrate); + current_bitrate_ = ClampBitrate(bitrate); time_last_bitrate_change_ = at_time; if (current_bitrate_ < prev_bitrate) { time_last_bitrate_decrease_ = at_time; @@ -230,14 +226,14 @@ void AimdRateControl::SetNetworkStateEstimate( double AimdRateControl::GetNearMaxIncreaseRateBpsPerSecond() const { RTC_DCHECK(!current_bitrate_.IsZero()); - const TimeDelta kFrameInterval = TimeDelta::seconds(1) / 30; + const TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 30; DataSize frame_size = current_bitrate_ * kFrameInterval; - const DataSize kPacketSize = DataSize::bytes(1200); + const DataSize kPacketSize = DataSize::Bytes(1200); double packets_per_frame = std::ceil(frame_size / kPacketSize); DataSize avg_packet_size = frame_size / packets_per_frame; // Approximate the over-use estimator delay to 100 ms. - TimeDelta response_time = rtt_ + TimeDelta::ms(100); + TimeDelta response_time = rtt_ + TimeDelta::Millis(100); if (in_experiment_) response_time = response_time * 2; double increase_rate_bps_per_second = @@ -247,23 +243,22 @@ double AimdRateControl::GetNearMaxIncreaseRateBpsPerSecond() const { } TimeDelta AimdRateControl::GetExpectedBandwidthPeriod() const { - const TimeDelta kMinPeriod = - smoothing_experiment_ ? TimeDelta::ms(500) : TimeDelta::seconds(2); - const TimeDelta kDefaultPeriod = TimeDelta::seconds(3); - const TimeDelta kMaxPeriod = TimeDelta::seconds(50); + const TimeDelta kMinPeriod = TimeDelta::Seconds(2); + const TimeDelta kDefaultPeriod = TimeDelta::Seconds(3); + const TimeDelta kMaxPeriod = TimeDelta::Seconds(50); double increase_rate_bps_per_second = GetNearMaxIncreaseRateBpsPerSecond(); if (!last_decrease_) - return smoothing_experiment_ ? kMinPeriod : kDefaultPeriod; + return kDefaultPeriod; double time_to_recover_decrease_seconds = last_decrease_->bps() / increase_rate_bps_per_second; - TimeDelta period = TimeDelta::seconds(time_to_recover_decrease_seconds); + TimeDelta period = TimeDelta::Seconds(time_to_recover_decrease_seconds); return period.Clamped(kMinPeriod, kMaxPeriod); } -DataRate AimdRateControl::ChangeBitrate(DataRate new_bitrate, - const RateControlInput& input, - Timestamp at_time) { +void AimdRateControl::ChangeBitrate(const RateControlInput& input, + Timestamp at_time) { + absl::optional new_bitrate; DataRate estimated_throughput = input.estimated_throughput.value_or(latest_estimated_throughput_); if (input.estimated_throughput) @@ -274,10 +269,16 @@ DataRate AimdRateControl::ChangeBitrate(DataRate new_bitrate, // we will end up with a valid estimate. if (!bitrate_is_initialized_ && input.bw_state != BandwidthUsage::kBwOverusing) - return current_bitrate_; + return; ChangeState(input, at_time); + // We limit the new bitrate based on the troughput to avoid unlimited bitrate + // increases. We allow a bit more lag at very low rates to not too easily get + // stuck if the encoder produces uneven outputs. + const DataRate troughput_based_limit = + 1.5 * estimated_throughput + DataRate::KilobitsPerSec(10); + switch (rate_control_state_) { case kRcHold: break; @@ -289,7 +290,11 @@ DataRate AimdRateControl::ChangeBitrate(DataRate new_bitrate, // Do not increase the delay based estimate in alr since the estimator // will not be able to get transport feedback necessary to detect if // the new estimate is correct. - if (!(send_side_ && in_alr_ && no_bitrate_increase_in_alr_)) { + // If we have previously increased above the limit (for instance due to + // probing), we don't allow further changes. + if (current_bitrate_ < troughput_based_limit && + !(send_side_ && in_alr_ && no_bitrate_increase_in_alr_)) { + DataRate increased_bitrate = DataRate::MinusInfinity(); if (link_capacity_.has_estimate()) { // The link_capacity estimate is reset if the measured throughput // is too far from the estimate. We can therefore assume that our @@ -297,56 +302,49 @@ DataRate AimdRateControl::ChangeBitrate(DataRate new_bitrate, // increase. DataRate additive_increase = AdditiveRateIncrease(at_time, time_last_bitrate_change_); - new_bitrate += additive_increase; + increased_bitrate = current_bitrate_ + additive_increase; } else { // If we don't have an estimate of the link capacity, use faster ramp // up to discover the capacity. DataRate multiplicative_increase = MultiplicativeRateIncrease( - at_time, time_last_bitrate_change_, new_bitrate); - new_bitrate += multiplicative_increase; + at_time, time_last_bitrate_change_, current_bitrate_); + increased_bitrate = current_bitrate_ + multiplicative_increase; } + new_bitrate = std::min(increased_bitrate, troughput_based_limit); } time_last_bitrate_change_ = at_time; break; - case kRcDecrease: - if (estimated_throughput > low_throughput_threshold_) { - // Set bit rate to something slightly lower than the measured throughput - // to get rid of any self-induced delay. - new_bitrate = estimated_throughput * beta_; - if (new_bitrate > current_bitrate_ && !link_capacity_fix_) { - // TODO(terelius): The link_capacity estimate may be based on old - // throughput measurements. Relying on them may lead to unnecessary - // BWE drops. - if (link_capacity_.has_estimate()) { - new_bitrate = beta_ * link_capacity_.estimate(); - } - } - if (estimate_bounded_backoff_ && network_estimate_) { - new_bitrate = std::max( - new_bitrate, network_estimate_->link_capacity_lower * beta_); - } - } else { - new_bitrate = estimated_throughput; + case kRcDecrease: { + DataRate decreased_bitrate = DataRate::PlusInfinity(); + + // Set bit rate to something slightly lower than the measured throughput + // to get rid of any self-induced delay. + decreased_bitrate = estimated_throughput * beta_; + if (decreased_bitrate > current_bitrate_ && !link_capacity_fix_) { + // TODO(terelius): The link_capacity estimate may be based on old + // throughput measurements. Relying on them may lead to unnecessary + // BWE drops. if (link_capacity_.has_estimate()) { - new_bitrate = std::max(new_bitrate, link_capacity_.estimate()); + decreased_bitrate = beta_ * link_capacity_.estimate(); } - new_bitrate = std::min(new_bitrate, low_throughput_threshold_.Get()); } + if (estimate_bounded_backoff_ && network_estimate_) { + decreased_bitrate = std::max( + decreased_bitrate, network_estimate_->link_capacity_lower * beta_); + } + // Avoid increasing the rate when over-using. - new_bitrate = std::min(new_bitrate, current_bitrate_); + if (decreased_bitrate < current_bitrate_) { + new_bitrate = decreased_bitrate; + } if (bitrate_is_initialized_ && estimated_throughput < current_bitrate_) { - constexpr double kDegradationFactor = 0.9; - if (smoothing_experiment_ && - new_bitrate < kDegradationFactor * beta_ * current_bitrate_) { - // If bitrate decreases more than a normal back off after overuse, it - // indicates a real network degradation. We do not let such a decrease - // to determine the bandwidth estimation period. - last_decrease_ = absl::nullopt; + if (!new_bitrate.has_value()) { + last_decrease_ = DataRate::Zero(); } else { - last_decrease_ = current_bitrate_ - new_bitrate; + last_decrease_ = current_bitrate_ - *new_bitrate; } } if (estimated_throughput < link_capacity_.LowerBound()) { @@ -362,30 +360,15 @@ DataRate AimdRateControl::ChangeBitrate(DataRate new_bitrate, time_last_bitrate_change_ = at_time; time_last_bitrate_decrease_ = at_time; break; - + } default: assert(false); } - return ClampBitrate(new_bitrate, estimated_throughput); -} -DataRate AimdRateControl::ClampBitrate(DataRate new_bitrate, - DataRate estimated_throughput) const { - // Allow the estimate to increase as long as alr is not detected to ensure - // that there is no BWE values that can make the estimate stuck at a too - // low bitrate. If an encoder can not produce the bitrate necessary to - // fully use the capacity, alr will sooner or later trigger. - if (!(send_side_ && no_bitrate_increase_in_alr_)) { - // Don't change the bit rate if the send side is too far off. - // We allow a bit more lag at very low rates to not too easily get stuck if - // the encoder produces uneven outputs. - const DataRate max_bitrate = - 1.5 * estimated_throughput + DataRate::kbps(10); - if (new_bitrate > current_bitrate_ && new_bitrate > max_bitrate) { - new_bitrate = std::max(current_bitrate_, max_bitrate); - } - } + current_bitrate_ = ClampBitrate(new_bitrate.value_or(current_bitrate_)); +} +DataRate AimdRateControl::ClampBitrate(DataRate new_bitrate) const { if (estimate_bounded_increase_ && network_estimate_) { DataRate upper_bound = network_estimate_->link_capacity_upper; new_bitrate = std::min(new_bitrate, upper_bound); @@ -404,7 +387,7 @@ DataRate AimdRateControl::MultiplicativeRateIncrease( alpha = pow(alpha, std::min(time_since_last_update.seconds(), 1.0)); } DataRate multiplicative_increase = - std::max(current_bitrate * (alpha - 1.0), DataRate::bps(1000)); + std::max(current_bitrate * (alpha - 1.0), DataRate::BitsPerSec(1000)); return multiplicative_increase; } @@ -413,7 +396,7 @@ DataRate AimdRateControl::AdditiveRateIncrease(Timestamp at_time, double time_period_seconds = (at_time - last_time).seconds(); double data_rate_increase_bps = GetNearMaxIncreaseRateBpsPerSecond() * time_period_seconds; - return DataRate::bps(data_rate_increase_bps); + return DataRate::BitsPerSec(data_rate_increase_bps); } void AimdRateControl::ChangeState(const RateControlInput& input, diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.h b/modules/remote_bitrate_estimator/aimd_rate_control.h index 85e4025914..c9e9470c58 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control.h +++ b/modules/remote_bitrate_estimator/aimd_rate_control.h @@ -73,14 +73,9 @@ class AimdRateControl { // in the "decrease" state the bitrate will be decreased to slightly below the // current throughput. When in the "hold" state the bitrate will be kept // constant to allow built up queues to drain. - DataRate ChangeBitrate(DataRate current_bitrate, - const RateControlInput& input, - Timestamp at_time); - // Clamps new_bitrate to within the configured min bitrate and a linear - // function of the throughput, so that the new bitrate can't grow too - // large compared to the bitrate actually being received by the other end. - DataRate ClampBitrate(DataRate new_bitrate, - DataRate estimated_throughput) const; + void ChangeBitrate(const RateControlInput& input, Timestamp at_time); + + DataRate ClampBitrate(DataRate new_bitrate) const; DataRate MultiplicativeRateIncrease(Timestamp at_time, Timestamp last_ms, DataRate current_bitrate) const; @@ -107,7 +102,6 @@ class AimdRateControl { // Allow the delay based estimate to only increase as long as application // limited region (alr) is not detected. const bool no_bitrate_increase_in_alr_; - const bool smoothing_experiment_; // Use estimated link capacity lower bound if it is higher than the // acknowledged rate when backing off due to overuse. const bool estimate_bounded_backoff_; @@ -116,7 +110,6 @@ class AimdRateControl { const bool estimate_bounded_increase_; absl::optional last_decrease_; FieldTrialOptional initial_backoff_interval_; - FieldTrialParameter low_throughput_threshold_; FieldTrialFlag link_capacity_fix_; }; } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc b/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc index 11ed4b0077..6cbccf6b7b 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc +++ b/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc @@ -21,12 +21,9 @@ namespace { constexpr int64_t kClockInitialTime = 123456; -constexpr int kMinBwePeriodMsSmoothingExp = 500; -constexpr int kMinBwePeriodMsNoSmoothingExp = 2000; -constexpr int kDefaultPeriodMsNoSmoothingExp = 3000; +constexpr int kMinBwePeriodMs = 2000; +constexpr int kDefaultPeriodMs = 3000; constexpr int kMaxBwePeriodMs = 50000; -constexpr char kSmoothingExpFieldTrial[] = - "WebRTC-Audio-BandwidthSmoothing/Enabled/"; // After an overuse, we back off to 85% to the received bitrate. constexpr double kFractionAfterOveruse = 0.85; @@ -47,7 +44,7 @@ AimdRateControlStates CreateAimdRateControlStates(bool send_side = false) { absl::optional OptionalRateFromOptionalBps( absl::optional bitrate_bps) { if (bitrate_bps) { - return DataRate::bps(*bitrate_bps); + return DataRate::BitsPerSec(*bitrate_bps); } else { return absl::nullopt; } @@ -58,10 +55,10 @@ void UpdateRateControl(const AimdRateControlStates& states, int64_t now_ms) { RateControlInput input(bandwidth_usage, OptionalRateFromOptionalBps(throughput_estimate)); - states.aimd_rate_control->Update(&input, Timestamp::ms(now_ms)); + states.aimd_rate_control->Update(&input, Timestamp::Millis(now_ms)); } void SetEstimate(const AimdRateControlStates& states, int bitrate_bps) { - states.aimd_rate_control->SetEstimate(DataRate::bps(bitrate_bps), + states.aimd_rate_control->SetEstimate(DataRate::BitsPerSec(bitrate_bps), states.simulated_clock->CurrentTime()); } @@ -87,7 +84,7 @@ TEST(AimdRateControlTest, NearMaxIncreaseRateIs5kbpsOn60kbpsAnd100msRtt) { auto states = CreateAimdRateControlStates(); constexpr int kBitrate = 60000; SetEstimate(states, kBitrate); - states.aimd_rate_control->SetRtt(TimeDelta::ms(100)); + states.aimd_rate_control->SetRtt(TimeDelta::Millis(100)); EXPECT_EQ(5000, states.aimd_rate_control->GetNearMaxIncreaseRateBpsPerSecond()); } @@ -102,22 +99,7 @@ TEST(AimdRateControlTest, GetIncreaseRateAndBandwidthPeriod) { EXPECT_NEAR(14000, states.aimd_rate_control->GetNearMaxIncreaseRateBpsPerSecond(), 1000); - EXPECT_EQ(kDefaultPeriodMsNoSmoothingExp, - states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); -} - -TEST(AimdRateControlTest, GetIncreaseRateAndBandwidthPeriodSmoothingExp) { - // Smoothing experiment enabled - test::ScopedFieldTrials override_field_trials(kSmoothingExpFieldTrial); - auto states = CreateAimdRateControlStates(); - constexpr int kBitrate = 300000; - SetEstimate(states, kBitrate); - UpdateRateControl(states, BandwidthUsage::kBwOverusing, kBitrate, - states.simulated_clock->TimeInMilliseconds()); - EXPECT_NEAR(14000, - states.aimd_rate_control->GetNearMaxIncreaseRateBpsPerSecond(), - 1000); - EXPECT_EQ(kMinBwePeriodMsSmoothingExp, + EXPECT_EQ(kDefaultPeriodMs, states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); } @@ -161,27 +143,13 @@ TEST(AimdRateControlTest, BweNotLimitedByDecreasingAckedBitrate) { TEST(AimdRateControlTest, DefaultPeriodUntilFirstOveruse) { // Smoothing experiment disabled auto states = CreateAimdRateControlStates(); - states.aimd_rate_control->SetStartBitrate(DataRate::kbps(300)); - EXPECT_EQ(kDefaultPeriodMsNoSmoothingExp, - states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); - states.simulated_clock->AdvanceTimeMilliseconds(100); - UpdateRateControl(states, BandwidthUsage::kBwOverusing, 280000, - states.simulated_clock->TimeInMilliseconds()); - EXPECT_NE(kDefaultPeriodMsNoSmoothingExp, - states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); -} - -TEST(AimdRateControlTest, MinPeriodUntilFirstOveruseSmoothingExp) { - // Smoothing experiment enabled - test::ScopedFieldTrials override_field_trials(kSmoothingExpFieldTrial); - auto states = CreateAimdRateControlStates(); - states.aimd_rate_control->SetStartBitrate(DataRate::kbps(300)); - EXPECT_EQ(kMinBwePeriodMsSmoothingExp, + states.aimd_rate_control->SetStartBitrate(DataRate::KilobitsPerSec(300)); + EXPECT_EQ(kDefaultPeriodMs, states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); states.simulated_clock->AdvanceTimeMilliseconds(100); UpdateRateControl(states, BandwidthUsage::kBwOverusing, 280000, states.simulated_clock->TimeInMilliseconds()); - EXPECT_NE(kMinBwePeriodMsSmoothingExp, + EXPECT_NE(kDefaultPeriodMs, states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); } @@ -201,22 +169,6 @@ TEST(AimdRateControlTest, ExpectedPeriodAfter20kbpsDropAnd5kbpsIncrease) { EXPECT_EQ(4000, states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); } -TEST(AimdRateControlTest, MinPeriodAfterLargeBitrateDecreaseSmoothingExp) { - // Smoothing experiment enabled - test::ScopedFieldTrials override_field_trials(kSmoothingExpFieldTrial); - auto states = CreateAimdRateControlStates(); - constexpr int kInitialBitrate = 110000; - SetEstimate(states, kInitialBitrate); - states.simulated_clock->AdvanceTimeMilliseconds(100); - // Make such a large drop in bitrate that should be treated as network - // degradation. - constexpr int kAckedBitrate = kInitialBitrate * 3 / 4 / kFractionAfterOveruse; - UpdateRateControl(states, BandwidthUsage::kBwOverusing, kAckedBitrate, - states.simulated_clock->TimeInMilliseconds()); - EXPECT_EQ(kMinBwePeriodMsSmoothingExp, - states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); -} - TEST(AimdRateControlTest, BandwidthPeriodIsNotBelowMin) { auto states = CreateAimdRateControlStates(); constexpr int kInitialBitrate = 10000; @@ -225,22 +177,7 @@ TEST(AimdRateControlTest, BandwidthPeriodIsNotBelowMin) { // Make a small (1.5 kbps) bitrate drop to 8.5 kbps. UpdateRateControl(states, BandwidthUsage::kBwOverusing, kInitialBitrate - 1, states.simulated_clock->TimeInMilliseconds()); - EXPECT_EQ(kMinBwePeriodMsNoSmoothingExp, - states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); -} - -TEST(AimdRateControlTest, BandwidthPeriodIsNotAboveMaxSmoothingExp) { - // Smoothing experiment enabled - test::ScopedFieldTrials override_field_trials(kSmoothingExpFieldTrial); - auto states = CreateAimdRateControlStates(); - constexpr int kInitialBitrate = 50000000; - SetEstimate(states, kInitialBitrate); - states.simulated_clock->AdvanceTimeMilliseconds(100); - // Make a large (10 Mbps) bitrate drop to 10 kbps. - constexpr int kAckedBitrate = 40000000 / kFractionAfterOveruse; - UpdateRateControl(states, BandwidthUsage::kBwOverusing, kAckedBitrate, - states.simulated_clock->TimeInMilliseconds()); - EXPECT_EQ(kMaxBwePeriodMs, + EXPECT_EQ(kMinBwePeriodMs, states.aimd_rate_control->GetExpectedBandwidthPeriod().ms()); } diff --git a/modules/remote_bitrate_estimator/bwe_defines.cc b/modules/remote_bitrate_estimator/bwe_defines.cc index 1a67faafd0..6afbe133e2 100644 --- a/modules/remote_bitrate_estimator/bwe_defines.cc +++ b/modules/remote_bitrate_estimator/bwe_defines.cc @@ -23,7 +23,7 @@ int GetMinBitrateBps() { } DataRate GetMinBitrate() { - return DataRate::bps(GetMinBitrateBps()); + return DataRate::BitsPerSec(GetMinBitrateBps()); } } // namespace congestion_controller diff --git a/modules/remote_bitrate_estimator/inter_arrival.h b/modules/remote_bitrate_estimator/inter_arrival.h index 1d84970deb..dbc630ff63 100644 --- a/modules/remote_bitrate_estimator/inter_arrival.h +++ b/modules/remote_bitrate_estimator/inter_arrival.h @@ -14,8 +14,6 @@ #include #include -#include "rtc_base/constructor_magic.h" - namespace webrtc { // Helper class to compute the inter-arrival time delta and the size delta @@ -35,6 +33,10 @@ class InterArrival { double timestamp_to_ms_coeff, bool enable_burst_grouping); + InterArrival() = delete; + InterArrival(const InterArrival&) = delete; + InterArrival& operator=(const InterArrival&) = delete; + // This function returns true if a delta was computed, or false if the current // group is still incomplete or if only one group has been completed. // |timestamp| is the timestamp. @@ -87,8 +89,6 @@ class InterArrival { double timestamp_to_ms_coeff_; bool burst_grouping_; int num_consecutive_reordered_packets_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(InterArrival); }; } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc index 61dd3e2bfd..e8f835ca6a 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc @@ -27,7 +27,7 @@ namespace { absl::optional OptionalRateFromOptionalBps( absl::optional bitrate_bps) { if (bitrate_bps) { - return DataRate::bps(*bitrate_bps); + return DataRate::BitsPerSec(*bitrate_bps); } else { return absl::nullopt; } @@ -201,8 +201,8 @@ RemoteBitrateEstimatorAbsSendTime::ProcessClusters(int64_t now_ms) { << " bps. Mean send delta: " << best_it->send_mean_ms << " ms, mean recv delta: " << best_it->recv_mean_ms << " ms, num probes: " << best_it->count; - remote_rate_.SetEstimate(DataRate::bps(probe_bitrate_bps), - Timestamp::ms(now_ms)); + remote_rate_.SetEstimate(DataRate::BitsPerSec(probe_bitrate_bps), + Timestamp::Millis(now_ms)); return ProbeResult::kBitrateUpdated; } } @@ -282,7 +282,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( uint32_t target_bitrate_bps = 0; std::vector ssrcs; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); TimeoutStreams(now_ms); RTC_DCHECK(inter_arrival_.get()); @@ -335,9 +335,9 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( } else if (detector_.State() == BandwidthUsage::kBwOverusing) { absl::optional incoming_rate = incoming_bitrate_.Rate(arrival_time_ms); - if (incoming_rate && - remote_rate_.TimeToReduceFurther(Timestamp::ms(now_ms), - DataRate::bps(*incoming_rate))) { + if (incoming_rate && remote_rate_.TimeToReduceFurther( + Timestamp::Millis(now_ms), + DataRate::BitsPerSec(*incoming_rate))) { update_estimate = true; } } @@ -351,7 +351,8 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( detector_.State(), OptionalRateFromOptionalBps(incoming_bitrate_.Rate(arrival_time_ms))); target_bitrate_bps = - remote_rate_.Update(&input, Timestamp::ms(now_ms)).bps(); + remote_rate_.Update(&input, Timestamp::Millis(now_ms)) + .bps(); update_estimate = remote_rate_.ValidEstimate(); ssrcs = Keys(ssrcs_); } @@ -390,12 +391,12 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) { void RemoteBitrateEstimatorAbsSendTime::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { - rtc::CritScope lock(&crit_); - remote_rate_.SetRtt(TimeDelta::ms(avg_rtt_ms)); + MutexLock lock(&mutex_); + remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms)); } void RemoteBitrateEstimatorAbsSendTime::RemoveStream(uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ssrcs_.erase(ssrc); } @@ -408,7 +409,7 @@ bool RemoteBitrateEstimatorAbsSendTime::LatestEstimate( // thread. RTC_DCHECK(ssrcs); RTC_DCHECK(bitrate_bps); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (!remote_rate_.ValidEstimate()) { return false; } @@ -424,7 +425,7 @@ bool RemoteBitrateEstimatorAbsSendTime::LatestEstimate( void RemoteBitrateEstimatorAbsSendTime::SetMinBitrate(int min_bitrate_bps) { // Called from both the configuration thread and the network thread. Shouldn't // be called from the network thread in the future. - rtc::CritScope lock(&crit_); - remote_rate_.SetMinBitrate(DataRate::bps(min_bitrate_bps)); + MutexLock lock(&mutex_); + remote_rate_.SetMinBitrate(DataRate::BitsPerSec(min_bitrate_bps)); } } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h index 9fd4974116..f42a28f8c8 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h @@ -27,10 +27,9 @@ #include "modules/remote_bitrate_estimator/overuse_detector.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -76,6 +75,13 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { public: RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer, Clock* clock); + + RemoteBitrateEstimatorAbsSendTime() = delete; + RemoteBitrateEstimatorAbsSendTime(const RemoteBitrateEstimatorAbsSendTime&) = + delete; + RemoteBitrateEstimatorAbsSendTime& operator=( + const RemoteBitrateEstimatorAbsSendTime&) = delete; + ~RemoteBitrateEstimatorAbsSendTime() override; void IncomingPacket(int64_t arrival_time_ms, @@ -114,12 +120,12 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { // Returns true if a probe which changed the estimate was detected. ProbeResult ProcessClusters(int64_t now_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_); bool IsBitrateImproving(int probe_bitrate_bps) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_); - void TimeoutStreams(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_); + void TimeoutStreams(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_); rtc::RaceChecker network_race_; Clock* const clock_; @@ -138,11 +144,9 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { int64_t last_update_ms_; bool uma_recorded_; - rtc::CriticalSection crit_; - Ssrcs ssrcs_ RTC_GUARDED_BY(&crit_); - AimdRateControl remote_rate_ RTC_GUARDED_BY(&crit_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorAbsSendTime); + mutable Mutex mutex_; + Ssrcs ssrcs_ RTC_GUARDED_BY(&mutex_); + AimdRateControl remote_rate_ RTC_GUARDED_BY(&mutex_); }; } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index 32419dd47a..46d8fbc434 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -31,7 +31,7 @@ namespace { absl::optional OptionalRateFromOptionalBps( absl::optional bitrate_bps) { if (bitrate_bps) { - return DataRate::bps(*bitrate_bps); + return DataRate::BitsPerSec(*bitrate_bps); } else { return absl::nullopt; } @@ -95,7 +95,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( uint32_t rtp_timestamp = header.timestamp + header.extension.transmissionTimeOffset; int64_t now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.find(ssrc); if (it == overuse_detectors_.end()) { // This is a new SSRC. Adding to map. @@ -146,7 +146,8 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( if (incoming_bitrate_bps && (prior_state != BandwidthUsage::kBwOverusing || GetRemoteRate()->TimeToReduceFurther( - Timestamp::ms(now_ms), DataRate::bps(*incoming_bitrate_bps)))) { + Timestamp::Millis(now_ms), + DataRate::BitsPerSec(*incoming_bitrate_bps)))) { // The first overuse should immediately trigger a new estimate. // We also have to update the estimate immediately if we are overusing // and the target bitrate is too high compared to what we are receiving. @@ -157,7 +158,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( void RemoteBitrateEstimatorSingleStream::Process() { { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); UpdateEstimate(clock_->TimeInMilliseconds()); } last_process_time_ = clock_->TimeInMilliseconds(); @@ -167,7 +168,7 @@ int64_t RemoteBitrateEstimatorSingleStream::TimeUntilNextProcess() { if (last_process_time_ < 0) { return 0; } - rtc::CritScope cs_(&crit_sect_); + MutexLock lock_(&mutex_); RTC_DCHECK_GT(process_interval_ms_, 0); return last_process_time_ + process_interval_ms_ - clock_->TimeInMilliseconds(); @@ -203,7 +204,7 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { const RateControlInput input( bw_state, OptionalRateFromOptionalBps(incoming_bitrate_.Rate(now_ms))); uint32_t target_bitrate = - remote_rate->Update(&input, Timestamp::ms(now_ms)).bps(); + remote_rate->Update(&input, Timestamp::Millis(now_ms)).bps(); if (remote_rate->ValidEstimate()) { process_interval_ms_ = remote_rate->GetFeedbackInterval().ms(); RTC_DCHECK_GT(process_interval_ms_, 0); @@ -216,12 +217,12 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { void RemoteBitrateEstimatorSingleStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { - rtc::CritScope cs(&crit_sect_); - GetRemoteRate()->SetRtt(TimeDelta::ms(avg_rtt_ms)); + MutexLock lock(&mutex_); + GetRemoteRate()->SetRtt(TimeDelta::Millis(avg_rtt_ms)); } void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.find(ssrc); if (it != overuse_detectors_.end()) { delete it->second; @@ -232,7 +233,7 @@ void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { bool RemoteBitrateEstimatorSingleStream::LatestEstimate( std::vector* ssrcs, uint32_t* bitrate_bps) const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); assert(bitrate_bps); if (!remote_rate_->ValidEstimate()) { return false; @@ -263,8 +264,8 @@ AimdRateControl* RemoteBitrateEstimatorSingleStream::GetRemoteRate() { } void RemoteBitrateEstimatorSingleStream::SetMinBitrate(int min_bitrate_bps) { - rtc::CritScope cs(&crit_sect_); - remote_rate_->SetMinBitrate(DataRate::bps(min_bitrate_bps)); + MutexLock lock(&mutex_); + remote_rate_->SetMinBitrate(DataRate::BitsPerSec(min_bitrate_bps)); } } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index a28109ce99..9fd2f9fc06 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -21,9 +21,8 @@ #include "api/transport/field_trial_based_config.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -35,6 +34,13 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { public: RemoteBitrateEstimatorSingleStream(RemoteBitrateObserver* observer, Clock* clock); + + RemoteBitrateEstimatorSingleStream() = delete; + RemoteBitrateEstimatorSingleStream( + const RemoteBitrateEstimatorSingleStream&) = delete; + RemoteBitrateEstimatorSingleStream& operator=( + const RemoteBitrateEstimatorSingleStream&) = delete; + ~RemoteBitrateEstimatorSingleStream() override; void IncomingPacket(int64_t arrival_time_ms, @@ -54,29 +60,26 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { typedef std::map SsrcOveruseEstimatorMap; // Triggers a new estimate calculation. - void UpdateEstimate(int64_t time_now) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + void UpdateEstimate(int64_t time_now) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void GetSsrcs(std::vector* ssrcs) const - RTC_SHARED_LOCKS_REQUIRED(crit_sect_); + RTC_SHARED_LOCKS_REQUIRED(mutex_); // Returns |remote_rate_| if the pointed to object exists, // otherwise creates it. - AimdRateControl* GetRemoteRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + AimdRateControl* GetRemoteRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; const FieldTrialBasedConfig field_trials_; - SsrcOveruseEstimatorMap overuse_detectors_ RTC_GUARDED_BY(crit_sect_); - RateStatistics incoming_bitrate_ RTC_GUARDED_BY(crit_sect_); - uint32_t last_valid_incoming_bitrate_ RTC_GUARDED_BY(crit_sect_); - std::unique_ptr remote_rate_ RTC_GUARDED_BY(crit_sect_); - RemoteBitrateObserver* const observer_ RTC_GUARDED_BY(crit_sect_); - rtc::CriticalSection crit_sect_; + SsrcOveruseEstimatorMap overuse_detectors_ RTC_GUARDED_BY(mutex_); + RateStatistics incoming_bitrate_ RTC_GUARDED_BY(mutex_); + uint32_t last_valid_incoming_bitrate_ RTC_GUARDED_BY(mutex_); + std::unique_ptr remote_rate_ RTC_GUARDED_BY(mutex_); + RemoteBitrateObserver* const observer_ RTC_GUARDED_BY(mutex_); + mutable Mutex mutex_; int64_t last_process_time_; - int64_t process_interval_ms_ RTC_GUARDED_BY(crit_sect_); + int64_t process_interval_ms_ RTC_GUARDED_BY(mutex_); bool uma_recorded_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorSingleStream); }; } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc index f66b37046a..a9cc170a35 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc +++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.cc @@ -61,7 +61,7 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, RTC_LOG(LS_WARNING) << "Arrival time out of bounds: " << arrival_time_ms; return; } - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); media_ssrc_ = header.ssrc; int64_t seq = 0; @@ -112,17 +112,17 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, if (network_state_estimator_ && header.extension.hasAbsoluteSendTime) { PacketResult packet_result; - packet_result.receive_time = Timestamp::ms(arrival_time_ms); + packet_result.receive_time = Timestamp::Millis(arrival_time_ms); // Ignore reordering of packets and assume they have approximately the same // send time. abs_send_timestamp_ += std::max( header.extension.GetAbsoluteSendTimeDelta(previous_abs_send_time_), - TimeDelta::ms(0)); + TimeDelta::Millis(0)); previous_abs_send_time_ = header.extension.absoluteSendTime; packet_result.sent_packet.send_time = abs_send_timestamp_; // TODO(webrtc:10742): Take IP header and transport overhead into account. packet_result.sent_packet.size = - DataSize::bytes(header.headerLength + payload_size); + DataSize::Bytes(header.headerLength + payload_size); packet_result.sent_packet.sequence_number = seq; network_state_estimator_->OnReceivedPacket(packet_result); } @@ -134,7 +134,7 @@ bool RemoteEstimatorProxy::LatestEstimate(std::vector* ssrcs, } int64_t RemoteEstimatorProxy::TimeUntilNextProcess() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (!send_periodic_feedback_) { // Wait a day until next process. return 24 * 60 * 60 * 1000; @@ -147,7 +147,7 @@ int64_t RemoteEstimatorProxy::TimeUntilNextProcess() { } void RemoteEstimatorProxy::Process() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (!send_periodic_feedback_) { return; } @@ -169,7 +169,7 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { kTwccReportSize * 8.0 * 1000.0 / send_config_.min_interval->ms(); // Let TWCC reports occupy 5% of total bandwidth. - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); send_interval_ms_ = static_cast( 0.5 + kTwccReportSize * 8.0 * 1000.0 / rtc::SafeClamp(send_config_.bandwidth_fraction * bitrate_bps, @@ -178,7 +178,7 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { void RemoteEstimatorProxy::SetSendPeriodicFeedback( bool send_periodic_feedback) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); send_periodic_feedback_ = send_periodic_feedback; } diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/modules/remote_bitrate_estimator/remote_estimator_proxy.h index a772b58dc8..a4adefc5ee 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy.h +++ b/modules/remote_bitrate_estimator/remote_estimator_proxy.h @@ -17,9 +17,9 @@ #include "api/transport/network_control.h" #include "api/transport/webrtc_key_value_config.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" -#include "rtc_base/critical_section.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -56,10 +56,11 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { private: struct TransportWideFeedbackConfig { - FieldTrialParameter back_window{"wind", TimeDelta::ms(500)}; - FieldTrialParameter min_interval{"min", TimeDelta::ms(50)}; - FieldTrialParameter max_interval{"max", TimeDelta::ms(250)}; - FieldTrialParameter default_interval{"def", TimeDelta::ms(100)}; + FieldTrialParameter back_window{"wind", TimeDelta::Millis(500)}; + FieldTrialParameter min_interval{"min", TimeDelta::Millis(50)}; + FieldTrialParameter max_interval{"max", TimeDelta::Millis(250)}; + FieldTrialParameter default_interval{"def", + TimeDelta::Millis(100)}; FieldTrialParameter bandwidth_fraction{"frac", 0.05}; explicit TransportWideFeedbackConfig( const WebRtcKeyValueConfig* key_value_config) { @@ -91,7 +92,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { const TransportWideFeedbackConfig send_config_; int64_t last_process_time_ms_; - rtc::CriticalSection lock_; + Mutex lock_; // |network_state_estimator_| may be null. NetworkStateEstimator* const network_state_estimator_ RTC_PT_GUARDED_BY(&lock_); diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc index 2d2d8af52c..da995922d9 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc +++ b/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc @@ -65,9 +65,10 @@ std::vector TimestampsMs( class MockTransportFeedbackSender : public TransportFeedbackSenderInterface { public: - MOCK_METHOD1( - SendCombinedRtcpPacket, - bool(std::vector> feedback_packets)); + MOCK_METHOD(bool, + SendCombinedRtcpPacket, + (std::vector> feedback_packets), + (override)); }; class RemoteEstimatorProxyTest : public ::testing::Test { @@ -595,10 +596,10 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, } TEST_F(RemoteEstimatorProxyTest, ReportsIncomingPacketToNetworkStateEstimator) { - Timestamp first_send_timestamp = Timestamp::ms(0); + Timestamp first_send_timestamp = Timestamp::Millis(0); EXPECT_CALL(network_state_estimator_, OnReceivedPacket(_)) .WillOnce(Invoke([&first_send_timestamp](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, Timestamp::ms(kBaseTimeMs)); + EXPECT_EQ(packet.receive_time, Timestamp::Millis(kBaseTimeMs)); first_send_timestamp = packet.sent_packet.send_time; })); // Incoming packet with abs sendtime but without transport sequence number. @@ -611,7 +612,7 @@ TEST_F(RemoteEstimatorProxyTest, ReportsIncomingPacketToNetworkStateEstimator) { // time as the previous packet due to reordering. EXPECT_CALL(network_state_estimator_, OnReceivedPacket(_)) .WillOnce(Invoke([&first_send_timestamp](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, Timestamp::ms(kBaseTimeMs)); + EXPECT_EQ(packet.receive_time, Timestamp::Millis(kBaseTimeMs)); EXPECT_EQ(packet.sent_packet.send_time, first_send_timestamp); })); proxy_.IncomingPacket( @@ -626,12 +627,12 @@ TEST_F(RemoteEstimatorProxyTest, IncomingPacketHandlesWrapInAbsSendTime) { AbsoluteSendTime::MsTo24Bits((1 << 24) - 30); // Second abs send time has wrapped. const uint32_t kSecondAbsSendTime = AbsoluteSendTime::MsTo24Bits((1 << 24)); - const TimeDelta kExpectedAbsSendTimeDelta = TimeDelta::ms(30); + const TimeDelta kExpectedAbsSendTimeDelta = TimeDelta::Millis(30); - Timestamp first_send_timestamp = Timestamp::ms(0); + Timestamp first_send_timestamp = Timestamp::Millis(0); EXPECT_CALL(network_state_estimator_, OnReceivedPacket(_)) .WillOnce(Invoke([&first_send_timestamp](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, Timestamp::ms(kBaseTimeMs)); + EXPECT_EQ(packet.receive_time, Timestamp::Millis(kBaseTimeMs)); first_send_timestamp = packet.sent_packet.send_time; })); proxy_.IncomingPacket( @@ -641,7 +642,7 @@ TEST_F(RemoteEstimatorProxyTest, IncomingPacketHandlesWrapInAbsSendTime) { EXPECT_CALL(network_state_estimator_, OnReceivedPacket(_)) .WillOnce(Invoke([first_send_timestamp, kExpectedAbsSendTimeDelta](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, Timestamp::ms(kBaseTimeMs + 123)); + EXPECT_EQ(packet.receive_time, Timestamp::Millis(kBaseTimeMs + 123)); EXPECT_EQ(packet.sent_packet.send_time.ms(), (first_send_timestamp + kExpectedAbsSendTimeDelta).ms()); })); diff --git a/modules/remote_bitrate_estimator/test/bwe_test_logging.cc b/modules/remote_bitrate_estimator/test/bwe_test_logging.cc index cf44fa070a..f99576f59a 100644 --- a/modules/remote_bitrate_estimator/test/bwe_test_logging.cc +++ b/modules/remote_bitrate_estimator/test/bwe_test_logging.cc @@ -61,27 +61,27 @@ Logging* Logging::GetInstance() { } void Logging::SetGlobalContext(uint32_t name) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); thread_map_[rtc::CurrentThreadId()].global_state.tag = ToString(name); } void Logging::SetGlobalContext(const std::string& name) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); thread_map_[rtc::CurrentThreadId()].global_state.tag = name; } void Logging::SetGlobalContext(const char* name) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); thread_map_[rtc::CurrentThreadId()].global_state.tag = name; } void Logging::SetGlobalEnable(bool enabled) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); thread_map_[rtc::CurrentThreadId()].global_state.enabled = enabled; } void Logging::Log(const char format[], ...) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -118,7 +118,7 @@ void Logging::Plot(int figure, double value, uint32_t ssrc, const std::string& alg_name) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -132,7 +132,7 @@ void Logging::PlotBar(int figure, const std::string& name, double value, int flow_id) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -145,7 +145,7 @@ void Logging::PlotBaselineBar(int figure, const std::string& name, double value, int flow_id) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -161,7 +161,7 @@ void Logging::PlotErrorBar(int figure, double yhigh, const std::string& error_title, int flow_id) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -180,7 +180,7 @@ void Logging::PlotLimitErrorBar(int figure, double ymax, const std::string& limit_title, int flow_id) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -195,7 +195,7 @@ void Logging::PlotLabel(int figure, const std::string& title, const std::string& y_label, int num_flows) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); const State& state = it->second.stack.top(); @@ -229,7 +229,7 @@ void Logging::State::MergePrevious(const State& previous) { void Logging::PushState(const std::string& append_to_tag, int64_t timestamp_ms, bool enabled) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); State new_state(append_to_tag, timestamp_ms, enabled); ThreadState* thread_state = &thread_map_[rtc::CurrentThreadId()]; std::stack* stack = &thread_state->stack; @@ -242,7 +242,7 @@ void Logging::PushState(const std::string& append_to_tag, } void Logging::PopState() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); RTC_DCHECK(it != thread_map_.end()); std::stack* stack = &it->second.stack; diff --git a/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/modules/remote_bitrate_estimator/test/bwe_test_logging.h index a399d0b694..079a7f888d 100644 --- a/modules/remote_bitrate_estimator/test/bwe_test_logging.h +++ b/modules/remote_bitrate_estimator/test/bwe_test_logging.h @@ -129,7 +129,7 @@ #include #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \ do { \ @@ -263,10 +263,11 @@ class Logging { Context(uint32_t name, int64_t timestamp_ms, bool enabled); Context(const std::string& name, int64_t timestamp_ms, bool enabled); Context(const char* name, int64_t timestamp_ms, bool enabled); - ~Context(); - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Context); + Context() = delete; + Context(const Context&) = delete; + Context& operator=(const Context&) = delete; + ~Context(); }; static Logging* GetInstance(); @@ -345,7 +346,7 @@ class Logging { bool enabled); void PopState(); - rtc::CriticalSection crit_sect_; + Mutex mutex_; ThreadMap thread_map_; RTC_DISALLOW_COPY_AND_ASSIGN(Logging); diff --git a/modules/rtp_rtcp/BUILD.gn b/modules/rtp_rtcp/BUILD.gn index 099c0663d2..f9f4518170 100644 --- a/modules/rtp_rtcp/BUILD.gn +++ b/modules/rtp_rtcp/BUILD.gn @@ -52,6 +52,7 @@ rtc_library("rtp_rtcp_format") { "source/rtp_packet.h", "source/rtp_packet_received.h", "source/rtp_packet_to_send.h", + "source/rtp_video_layers_allocation_extension.h", ] sources = [ "include/report_block_data.cc", @@ -95,10 +96,10 @@ rtc_library("rtp_rtcp_format") { "source/rtp_packet.cc", "source/rtp_packet_received.cc", "source/rtp_packet_to_send.cc", + "source/rtp_video_layers_allocation_extension.cc", ] deps = [ - "..:module_api", "..:module_api_public", "../../api:array_view", "../../api:function_view", @@ -109,6 +110,7 @@ rtc_library("rtp_rtcp_format") { "../../api/transport/rtp:dependency_descriptor", "../../api/units:time_delta", "../../api/video:video_frame", + "../../api/video:video_layers_allocation", "../../api/video:video_rtp_headers", "../../common_video", "../../rtc_base:checks", @@ -118,6 +120,8 @@ rtc_library("rtp_rtcp_format") { "../../rtc_base/system:unused", "../../system_wrappers", "../video_coding:codec_globals_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -132,14 +136,18 @@ rtc_library("rtp_rtcp") { "include/flexfec_sender.h", "include/receive_statistics.h", "include/remote_ntp_time_estimator.h", - "include/rtp_rtcp.h", + "include/rtp_rtcp.h", # deprecated "include/ulpfec_receiver.h", "source/absolute_capture_time_receiver.cc", "source/absolute_capture_time_receiver.h", "source/absolute_capture_time_sender.cc", "source/absolute_capture_time_sender.h", + "source/active_decode_targets_helper.cc", + "source/active_decode_targets_helper.h", "source/create_video_rtp_depacketizer.cc", "source/create_video_rtp_depacketizer.h", + "source/deprecated/deprecated_rtp_sender_egress.cc", + "source/deprecated/deprecated_rtp_sender_egress.h", "source/dtmf_queue.cc", "source/dtmf_queue.h", "source/fec_private_tables_bursty.cc", @@ -156,8 +164,6 @@ rtc_library("rtp_rtcp") { "source/forward_error_correction_internal.h", "source/packet_loss_stats.cc", "source/packet_loss_stats.h", - "source/playout_delay_oracle.cc", - "source/playout_delay_oracle.h", "source/receive_statistics_impl.cc", "source/receive_statistics_impl.h", "source/remote_ntp_time_estimator.cc", @@ -167,6 +173,8 @@ rtc_library("rtp_rtcp") { "source/rtcp_receiver.h", "source/rtcp_sender.cc", "source/rtcp_sender.h", + "source/rtp_descriptor_authentication.cc", + "source/rtp_descriptor_authentication.h", "source/rtp_format.cc", "source/rtp_format.h", "source/rtp_format_h264.cc", @@ -186,6 +194,9 @@ rtc_library("rtp_rtcp") { "source/rtp_rtcp_config.h", "source/rtp_rtcp_impl.cc", "source/rtp_rtcp_impl.h", + "source/rtp_rtcp_impl2.cc", + "source/rtp_rtcp_impl2.h", + "source/rtp_rtcp_interface.h", "source/rtp_sender.cc", "source/rtp_sender.h", "source/rtp_sender_audio.cc", @@ -194,6 +205,8 @@ rtc_library("rtp_rtcp") { "source/rtp_sender_egress.h", "source/rtp_sender_video.cc", "source/rtp_sender_video.h", + "source/rtp_sender_video_frame_transformer_delegate.cc", + "source/rtp_sender_video_frame_transformer_delegate.h", "source/rtp_sequence_number_map.cc", "source/rtp_sequence_number_map.h", "source/rtp_utility.cc", @@ -210,6 +223,8 @@ rtc_library("rtp_rtcp") { "source/ulpfec_header_reader_writer.h", "source/ulpfec_receiver_impl.cc", "source/ulpfec_receiver_impl.h", + "source/video_fec_generator.h", + "source/video_rtp_depacketizer.cc", "source/video_rtp_depacketizer.h", "source/video_rtp_depacketizer_av1.cc", "source/video_rtp_depacketizer_av1.h", @@ -231,14 +246,27 @@ rtc_library("rtp_rtcp") { defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] } + if (rtc_use_h265) { + sources += [ + "source/rtp_format_h265.cc", + "source/rtp_format_h265.h", + "source/video_rtp_depacketizer_h265.cc", + "source/video_rtp_depacketizer_h265.h", + ] + } + + if (!rtc_use_h265) { + defines += ["DISABLE_H265"] + } + deps = [ ":rtp_rtcp_format", ":rtp_video_header", "..:module_api", "..:module_api_public", "..:module_fec_api", - "../..:webrtc_common", "../../api:array_view", + "../../api:frame_transformer_interface", "../../api:function_view", "../../api:libjingle_peerconnection_api", "../../api:rtp_headers", @@ -249,6 +277,7 @@ rtc_library("rtp_rtcp") { "../../api/audio_codecs:audio_codecs_api", "../../api/crypto:frame_encryptor_interface", "../../api/rtc_event_log", + "../../api/task_queue:task_queue", "../../api/transport:field_trial_based_config", "../../api/transport:webrtc_key_value_config", "../../api/transport/rtp:dependency_descriptor", @@ -256,12 +285,14 @@ rtc_library("rtp_rtcp") { "../../api/units:data_rate", "../../api/units:time_delta", "../../api/units:timestamp", + "../../api/video:encoded_frame", "../../api/video:encoded_image", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_type", + "../../api/video:video_layers_allocation", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../call:rtp_interfaces", @@ -277,12 +308,19 @@ rtc_library("rtp_rtcp") { "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_numerics", "../../rtc_base:safe_minmax", + "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:pending_task_safety_flag", + "../../rtc_base/task_utils:repeating_task", + "../../rtc_base/task_utils:to_queued_task", "../../rtc_base/time:timestamp_extrapolator", "../../system_wrappers", "../../system_wrappers:metrics", "../remote_bitrate_estimator", "../video_coding:codec_globals_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/container:inlined_vector", @@ -308,17 +346,18 @@ rtc_library("rtcp_transceiver") { deps = [ ":rtp_rtcp", ":rtp_rtcp_format", - "../../:webrtc_common", "../../api:array_view", "../../api:rtp_headers", "../../api:transport_api", + "../../api/task_queue", "../../api/video:video_bitrate_allocation", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", - "../../rtc_base:rtc_task_queue", "../../rtc_base/task_utils:repeating_task", "../../rtc_base/task_utils:to_queued_task", "../../system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", @@ -332,12 +371,13 @@ rtc_library("rtp_video_header") { "source/rtp_video_header.h", ] deps = [ - "../../:webrtc_common", "../../api/transport/rtp:dependency_descriptor", "../../api/video:video_frame", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../modules/video_coding:codec_globals_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/types:optional", "//third_party/abseil-cpp/absl/types:variant", @@ -353,7 +393,6 @@ rtc_library("fec_test_helper") { deps = [ ":rtp_rtcp", ":rtp_rtcp_format", - "..:module_api", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", ] @@ -361,12 +400,6 @@ rtc_library("fec_test_helper") { rtc_library("mock_rtp_rtcp") { testonly = true - sources = [ - "mocks/mock_recovered_packet_receiver.cc", - "mocks/mock_rtcp_bandwidth_observer.cc", - "mocks/mock_rtcp_rtt_stats.cc", - "mocks/mock_rtp_rtcp.cc", - ] public = [ "mocks/mock_recovered_packet_receiver.h", "mocks/mock_rtcp_bandwidth_observer.h", @@ -381,8 +414,8 @@ rtc_library("mock_rtp_rtcp") { "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { @@ -422,6 +455,7 @@ if (rtc_include_tests) { sources = [ "source/absolute_capture_time_receiver_unittest.cc", "source/absolute_capture_time_sender_unittest.cc", + "source/active_decode_targets_helper_unittest.cc", "source/byte_io_unittest.cc", "source/fec_private_tables_bursty_unittest.cc", "source/flexfec_header_reader_writer_unittest.cc", @@ -429,7 +463,6 @@ if (rtc_include_tests) { "source/flexfec_sender_unittest.cc", "source/nack_rtx_unittest.cc", "source/packet_loss_stats_unittest.cc", - "source/playout_delay_oracle_unittest.cc", "source/receive_statistics_unittest.cc", "source/remote_ntp_time_estimator_unittest.cc", "source/rtcp_nack_stats_unittest.cc", @@ -461,6 +494,7 @@ if (rtc_include_tests) { "source/rtcp_sender_unittest.cc", "source/rtcp_transceiver_impl_unittest.cc", "source/rtcp_transceiver_unittest.cc", + "source/rtp_dependency_descriptor_extension_unittest.cc", "source/rtp_fec_unittest.cc", "source/rtp_format_h264_unittest.cc", "source/rtp_format_unittest.cc", @@ -475,12 +509,14 @@ if (rtc_include_tests) { "source/rtp_packet_history_unittest.cc", "source/rtp_packet_unittest.cc", "source/rtp_packetizer_av1_unittest.cc", + "source/rtp_rtcp_impl2_unittest.cc", "source/rtp_rtcp_impl_unittest.cc", "source/rtp_sender_audio_unittest.cc", "source/rtp_sender_unittest.cc", "source/rtp_sender_video_unittest.cc", "source/rtp_sequence_number_map_unittest.cc", "source/rtp_utility_unittest.cc", + "source/rtp_video_layers_allocation_extension_unittest.cc", "source/source_tracker_unittest.cc", "source/time_util_unittest.cc", "source/ulpfec_generator_unittest.cc", @@ -499,10 +535,9 @@ if (rtc_include_tests) { ":rtcp_transceiver", ":rtp_rtcp", ":rtp_rtcp_format", - "..:module_api", - "../..:webrtc_common", "../../api:array_view", "../../api:libjingle_peerconnection_api", + "../../api:mock_frame_encryptor", "../../api:rtp_headers", "../../api:rtp_packet_info", "../../api:rtp_parameters", @@ -517,6 +552,7 @@ if (rtc_include_tests) { "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", "../../api/video:video_frame", + "../../api/video:video_layers_allocation", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../call:rtp_receiver", @@ -530,12 +566,18 @@ if (rtc_include_tests) { "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_numerics", "../../rtc_base:task_queue_for_test", + "../../rtc_base/task_utils:to_queued_task", "../../system_wrappers", "../../test:field_trial", + "../../test:mock_frame_transformer", + "../../test:mock_transport", "../../test:rtp_test_utils", "../../test:test_common", "../../test:test_support", + "../../test/time_controller:time_controller", "../video_coding:codec_globals_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", diff --git a/modules/rtp_rtcp/OWNERS b/modules/rtp_rtcp/OWNERS index e57898aa30..47d12c401f 100644 --- a/modules/rtp_rtcp/OWNERS +++ b/modules/rtp_rtcp/OWNERS @@ -4,8 +4,3 @@ mflodman@webrtc.org asapersson@webrtc.org danilchap@webrtc.org sprang@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/rtp_rtcp/include/flexfec_sender.h b/modules/rtp_rtcp/include/flexfec_sender.h index 94f3502d31..737593e04c 100644 --- a/modules/rtp_rtcp/include/flexfec_sender.h +++ b/modules/rtp_rtcp/include/flexfec_sender.h @@ -21,7 +21,10 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/ulpfec_generator.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/random.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -31,7 +34,7 @@ class RtpPacketToSend; // Note that this class is not thread safe, and thus requires external // synchronization. Currently, this is done using the lock in PayloadRouter. -class FlexfecSender { +class FlexfecSender : public VideoFecGenerator { public: FlexfecSender(int payload_type, uint32_t ssrc, @@ -43,29 +46,31 @@ class FlexfecSender { Clock* clock); ~FlexfecSender(); - uint32_t ssrc() const { return ssrc_; } + FecType GetFecType() const override { + return VideoFecGenerator::FecType::kFlexFec; + } + absl::optional FecSsrc() override { return ssrc_; } // Sets the FEC rate, max frames sent before FEC packets are sent, // and what type of generator matrices are used. - void SetFecParameters(const FecProtectionParams& params); + void SetProtectionParameters(const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) override; // Adds a media packet to the internal buffer. When enough media packets // have been added, the FEC packets are generated and stored internally. // These FEC packets are then obtained by calling GetFecPackets(). - // Returns true if the media packet was successfully added. - bool AddRtpPacketAndGenerateFec(const RtpPacketToSend& packet); - - // Returns true if there are generated FEC packets available. - bool FecAvailable() const; + void AddPacketAndGenerateFec(const RtpPacketToSend& packet) override; // Returns generated FlexFEC packets. - std::vector> GetFecPackets(); + std::vector> GetFecPackets() override; // Returns the overhead, per packet, for FlexFEC. - size_t MaxPacketOverhead() const; + size_t MaxPacketOverhead() const override; + + DataRate CurrentFecRate() const override; // Only called on the VideoSendStream queue, after operation has shut down. - RtpState GetRtpState(); + absl::optional GetRtpState() override; private: // Utility. @@ -87,6 +92,9 @@ class FlexfecSender { UlpfecGenerator ulpfec_generator_; const RtpHeaderExtensionMap rtp_header_extension_map_; const size_t header_extensions_size_; + + mutable Mutex mutex_; + RateStatistics fec_bitrate_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/modules/rtp_rtcp/include/remote_ntp_time_estimator.h index dd0e0de362..6112e54ef9 100644 --- a/modules/rtp_rtcp/include/remote_ntp_time_estimator.h +++ b/modules/rtp_rtcp/include/remote_ntp_time_estimator.h @@ -13,6 +13,7 @@ #include +#include "absl/types/optional.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/moving_median_filter.h" #include "system_wrappers/include/rtp_to_ntp_estimator.h" @@ -32,7 +33,7 @@ class RemoteNtpTimeEstimator { ~RemoteNtpTimeEstimator(); // Updates the estimator with round trip time |rtt|, NTP seconds |ntp_secs|, - // NTP fraction |ntp_frac| and RTP timestamp |rtcp_timestamp|. + // NTP fraction |ntp_frac| and RTP timestamp |rtp_timestamp|. bool UpdateRtcpTimestamp(int64_t rtt, uint32_t ntp_secs, uint32_t ntp_frac, @@ -42,6 +43,10 @@ class RemoteNtpTimeEstimator { // Returns the NTP timestamp in ms when success. -1 if failed. int64_t Estimate(uint32_t rtp_timestamp); + // Estimates the offset, in milliseconds, between the remote clock and the + // local one. This is equal to local NTP clock - remote NTP clock. + absl::optional EstimateRemoteToLocalClockOffsetMs(); + private: Clock* clock_; MovingMedianFilter ntp_clocks_offset_estimator_; diff --git a/modules/rtp_rtcp/include/rtp_header_extension_map.h b/modules/rtp_rtcp/include/rtp_header_extension_map.h index 360a619f82..ff2d34d60d 100644 --- a/modules/rtp_rtcp/include/rtp_header_extension_map.h +++ b/modules/rtp_rtcp/include/rtp_header_extension_map.h @@ -51,10 +51,6 @@ class RtpHeaderExtensionMap { return ids_[type]; } - // TODO(danilchap): Remove use of the functions below. - RTC_DEPRECATED int32_t Register(RTPExtensionType type, int id) { - return RegisterByType(id, type) ? 0 : -1; - } int32_t Deregister(RTPExtensionType type); void Deregister(absl::string_view uri); diff --git a/modules/rtp_rtcp/include/rtp_rtcp.h b/modules/rtp_rtcp/include/rtp_rtcp.h index b3cd8f6418..8663296eba 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp.h +++ b/modules/rtp_rtcp/include/rtp_rtcp.h @@ -12,432 +12,70 @@ #define MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_ #include -#include #include -#include #include -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" -#include "api/video/video_bitrate_allocation.h" #include "modules/include/module.h" -#include "modules/rtp_rtcp/include/flexfec_sender.h" -#include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/constructor_magic.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/deprecation.h" namespace webrtc { -// Forward declarations. -class FrameEncryptorInterface; -class OverheadObserver; -class RateLimiter; -class ReceiveStatisticsProvider; -class RemoteBitrateEstimator; -class RtcEventLog; -class RTPSender; -class Transport; -class VideoBitrateAllocationObserver; - -namespace rtcp { -class TransportFeedback; -} - -class RtpRtcp : public Module, public RtcpFeedbackSenderInterface { +// DEPRECATED. Do not use. +class RtpRtcp : public Module, public RtpRtcpInterface { public: - struct Configuration { - Configuration(); - Configuration(Configuration&& rhs); - - // True for a audio version of the RTP/RTCP module object false will create - // a video version. - bool audio = false; - bool receiver_only = false; - - // The clock to use to read time. If nullptr then system clock will be used. - Clock* clock = nullptr; - - ReceiveStatisticsProvider* receive_statistics = nullptr; - - // Transport object that will be called when packets are ready to be sent - // out on the network. - Transport* outgoing_transport = nullptr; - - // Called when the receiver requests an intra frame. - RtcpIntraFrameObserver* intra_frame_callback = nullptr; - - // Called when the receiver sends a loss notification. - RtcpLossNotificationObserver* rtcp_loss_notification_observer = nullptr; - - // Called when we receive a changed estimate from the receiver of out - // stream. - RtcpBandwidthObserver* bandwidth_callback = nullptr; - - NetworkStateEstimateObserver* network_state_estimate_observer = nullptr; - TransportFeedbackObserver* transport_feedback_callback = nullptr; - VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr; - RtcpRttStats* rtt_stats = nullptr; - RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr; - - // Estimates the bandwidth available for a set of streams from the same - // client. - RemoteBitrateEstimator* remote_bitrate_estimator = nullptr; - - // Spread any bursts of packets into smaller bursts to minimize packet loss. - RtpPacketSender* paced_sender = nullptr; - - // Generate FlexFEC packets. - // TODO(brandtr): Remove when FlexfecSender is wired up to PacedSender. - FlexfecSender* flexfec_sender = nullptr; - - BitrateStatisticsObserver* send_bitrate_observer = nullptr; - SendSideDelayObserver* send_side_delay_observer = nullptr; - RtcEventLog* event_log = nullptr; - SendPacketObserver* send_packet_observer = nullptr; - RateLimiter* retransmission_rate_limiter = nullptr; - OverheadObserver* overhead_observer = nullptr; - RtcpAckObserver* ack_observer = nullptr; - StreamDataCountersCallback* rtp_stats_callback = nullptr; - - int rtcp_report_interval_ms = 0; - - // Update network2 instead of pacer_exit field of video timing extension. - bool populate_network2_timestamp = false; - - // E2EE Custom Video Frame Encryption - FrameEncryptorInterface* frame_encryptor = nullptr; - // Require all outgoing frames to be encrypted with a FrameEncryptor. - bool require_frame_encryption = false; - - // Corresponds to extmap-allow-mixed in SDP negotiation. - bool extmap_allow_mixed = false; - - // If set, field trials are read from |field_trials|, otherwise - // defaults to webrtc::FieldTrialBasedConfig. - const WebRtcKeyValueConfig* field_trials = nullptr; - - // SSRCs for media and retransmission, respectively. - // FlexFec SSRC is fetched from |flexfec_sender|. - uint32_t local_media_ssrc = 0; - absl::optional rtx_send_ssrc; - - private: - RTC_DISALLOW_COPY_AND_ASSIGN(Configuration); - }; - - // Creates an RTP/RTCP module object using provided |configuration|. - static std::unique_ptr Create(const Configuration& configuration); - - // ************************************************************************** - // Receiver functions - // ************************************************************************** - - virtual void IncomingRtcpPacket(const uint8_t* incoming_packet, - size_t incoming_packet_length) = 0; - - virtual void SetRemoteSSRC(uint32_t ssrc) = 0; + // Instantiates a deprecated version of the RtpRtcp module. + static std::unique_ptr RTC_DEPRECATED + Create(const Configuration& configuration) { + return DEPRECATED_Create(configuration); + } - // ************************************************************************** - // Sender - // ************************************************************************** + static std::unique_ptr DEPRECATED_Create( + const Configuration& configuration); - // Sets the maximum size of an RTP packet, including RTP headers. - virtual void SetMaxRtpPacketSize(size_t size) = 0; + // (TMMBR) Temporary Max Media Bit Rate + RTC_DEPRECATED virtual bool TMMBR() const = 0; - // Returns max RTP packet size. Takes into account RTP headers and - // FEC/ULP/RED overhead (when FEC is enabled). - virtual size_t MaxRtpPacketSize() const = 0; + RTC_DEPRECATED virtual void SetTMMBRStatus(bool enable) = 0; - virtual void RegisterSendPayloadFrequency(int payload_type, - int payload_frequency) = 0; + // Returns -1 on failure else 0. + RTC_DEPRECATED virtual int32_t AddMixedCNAME(uint32_t ssrc, + const char* cname) = 0; - // Unregisters a send payload. - // |payload_type| - payload type of codec // Returns -1 on failure else 0. - virtual int32_t DeRegisterSendPayload(int8_t payload_type) = 0; + RTC_DEPRECATED virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0; - virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0; + // Returns remote CName. + // Returns -1 on failure else 0. + RTC_DEPRECATED virtual int32_t RemoteCNAME( + uint32_t remote_ssrc, + char cname[RTCP_CNAME_SIZE]) const = 0; // (De)registers RTP header extension type and id. // Returns -1 on failure else 0. RTC_DEPRECATED virtual int32_t RegisterSendRtpHeaderExtension( RTPExtensionType type, uint8_t id) = 0; - // Register extension by uri, triggers CHECK on falure. - virtual void RegisterRtpHeaderExtension(absl::string_view uri, int id) = 0; - - virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0; - virtual void DeregisterSendRtpHeaderExtension(absl::string_view uri) = 0; - - // Returns true if RTP module is send media, and any of the extensions - // required for bandwidth estimation is registered. - virtual bool SupportsPadding() const = 0; - // Same as SupportsPadding(), but additionally requires that - // SetRtxSendStatus() has been called with the kRtxRedundantPayloads option - // enabled. - virtual bool SupportsRtxPayloadPadding() const = 0; - - // Returns start timestamp. - virtual uint32_t StartTimestamp() const = 0; - - // Sets start timestamp. Start timestamp is set to a random value if this - // function is never called. - virtual void SetStartTimestamp(uint32_t timestamp) = 0; - - // Returns SequenceNumber. - virtual uint16_t SequenceNumber() const = 0; - - // Sets SequenceNumber, default is a random number. - virtual void SetSequenceNumber(uint16_t seq) = 0; - - virtual void SetRtpState(const RtpState& rtp_state) = 0; - virtual void SetRtxState(const RtpState& rtp_state) = 0; - virtual RtpState GetRtpState() const = 0; - virtual RtpState GetRtxState() const = 0; - - // Returns SSRC. - virtual uint32_t SSRC() const = 0; - - // Sets the value for sending in the RID (and Repaired) RTP header extension. - // RIDs are used to identify an RTP stream if SSRCs are not negotiated. - // If the RID and Repaired RID extensions are not registered, the RID will - // not be sent. - virtual void SetRid(const std::string& rid) = 0; - - // Sets the value for sending in the MID RTP header extension. - // The MID RTP header extension should be registered for this to do anything. - // Once set, this value can not be changed or removed. - virtual void SetMid(const std::string& mid) = 0; - - // Sets CSRC. - // |csrcs| - vector of CSRCs - virtual void SetCsrcs(const std::vector& csrcs) = 0; - - // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination - // of values of the enumerator RtxMode. - virtual void SetRtxSendStatus(int modes) = 0; - - // Returns status of sending RTX (RFC 4588). The returned value can be - // a combination of values of the enumerator RtxMode. - virtual int RtxSendStatus() const = 0; - - // Returns the SSRC used for RTX if set, otherwise a nullopt. - virtual absl::optional RtxSsrc() const = 0; - - // Sets the payload type to use when sending RTX packets. Note that this - // doesn't enable RTX, only the payload type is set. - virtual void SetRtxSendPayloadType(int payload_type, - int associated_payload_type) = 0; - - // Returns the FlexFEC SSRC, if there is one. - virtual absl::optional FlexfecSsrc() const = 0; - - // Sets sending status. Sends kRtcpByeCode when going from true to false. - // Returns -1 on failure else 0. - virtual int32_t SetSendingStatus(bool sending) = 0; - - // Returns current sending status. - virtual bool Sending() const = 0; - - // Starts/Stops media packets. On by default. - virtual void SetSendingMediaStatus(bool sending) = 0; - - // Returns current media sending status. - virtual bool SendingMedia() const = 0; - - // Returns whether audio is configured (i.e. Configuration::audio = true). - virtual bool IsAudioConfigured() const = 0; - - // Indicate that the packets sent by this module should be counted towards the - // bitrate estimate since the stream participates in the bitrate allocation. - virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0; - - // Fetches the current send bitrates in bits/s. - virtual void BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate) const = 0; - - virtual RTPSender* RtpSender() = 0; - virtual const RTPSender* RtpSender() const = 0; - - // Record that a frame is about to be sent. Returns true on success, and false - // if the module isn't ready to send. - virtual bool OnSendingRtpFrame(uint32_t timestamp, - int64_t capture_time_ms, - int payload_type, - bool force_sender_report) = 0; - - // Try to send the provided packet. Returns true iff packet matches any of - // the SSRCs for this module (media/rtx/fec etc) and was forwarded to the - // transport. - virtual bool TrySendPacket(RtpPacketToSend* packet, - const PacedPacketInfo& pacing_info) = 0; - - virtual void OnPacketsAcknowledged( - rtc::ArrayView sequence_numbers) = 0; - - virtual std::vector> GeneratePadding( - size_t target_size_bytes) = 0; - - // ************************************************************************** - // RTCP - // ************************************************************************** - - // Returns RTCP status. - virtual RtcpMode RTCP() const = 0; - - // Sets RTCP status i.e on(compound or non-compound)/off. - // |method| - RTCP method to use. - virtual void SetRTCPStatus(RtcpMode method) = 0; - - // Sets RTCP CName (i.e unique identifier). - // Returns -1 on failure else 0. - virtual int32_t SetCNAME(const char* cname) = 0; - - // Returns remote CName. - // Returns -1 on failure else 0. - virtual int32_t RemoteCNAME(uint32_t remote_ssrc, - char cname[RTCP_CNAME_SIZE]) const = 0; - - // Returns remote NTP. - // Returns -1 on failure else 0. - virtual int32_t RemoteNTP(uint32_t* received_ntp_secs, - uint32_t* received_ntp_frac, - uint32_t* rtcp_arrival_time_secs, - uint32_t* rtcp_arrival_time_frac, - uint32_t* rtcp_timestamp) const = 0; - - // Returns -1 on failure else 0. - virtual int32_t AddMixedCNAME(uint32_t ssrc, const char* cname) = 0; - - // Returns -1 on failure else 0. - virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0; - - // Returns current RTT (round-trip time) estimate. - // Returns -1 on failure else 0. - virtual int32_t RTT(uint32_t remote_ssrc, - int64_t* rtt, - int64_t* avg_rtt, - int64_t* min_rtt, - int64_t* max_rtt) const = 0; - - // Returns the estimated RTT, with fallback to a default value. - virtual int64_t ExpectedRetransmissionTimeMs() const = 0; - - // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the - // process function. - // Returns -1 on failure else 0. - virtual int32_t SendRTCP(RTCPPacketType rtcp_packet_type) = 0; - - // Returns statistics of the amount of data sent. - // Returns -1 on failure else 0. - virtual int32_t DataCountersRTP(size_t* bytes_sent, - uint32_t* packets_sent) const = 0; - - // Returns send statistics for the RTP and RTX stream. - virtual void GetSendStreamDataCounters( - StreamDataCounters* rtp_counters, - StreamDataCounters* rtx_counters) const = 0; - - // Returns received RTCP report block. - // Returns -1 on failure else 0. - // TODO(https://crbug.com/webrtc/10678): Remove this in favor of - // GetLatestReportBlockData(). - virtual int32_t RemoteRTCPStat( - std::vector* receive_blocks) const = 0; - // A snapshot of Report Blocks with additional data of interest to statistics. - // Within this list, the sender-source SSRC pair is unique and per-pair the - // ReportBlockData represents the latest Report Block that was received for - // that pair. - virtual std::vector GetLatestReportBlockData() const = 0; // (APP) Sets application specific data. // Returns -1 on failure else 0. - virtual int32_t SetRTCPApplicationSpecificData(uint8_t sub_type, - uint32_t name, - const uint8_t* data, - uint16_t length) = 0; - // (XR) Sets Receiver Reference Time Report (RTTR) status. - virtual void SetRtcpXrRrtrStatus(bool enable) = 0; - - // Returns current Receiver Reference Time Report (RTTR) status. - virtual bool RtcpXrRrtrStatus() const = 0; - - // (REMB) Receiver Estimated Max Bitrate. - // Schedules sending REMB on next and following sender/receiver reports. - void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override = 0; - // Stops sending REMB on next and following sender/receiver reports. - void UnsetRemb() override = 0; + RTC_DEPRECATED virtual int32_t SetRTCPApplicationSpecificData( + uint8_t sub_type, + uint32_t name, + const uint8_t* data, + uint16_t length) = 0; - // (TMMBR) Temporary Max Media Bit Rate - virtual bool TMMBR() const = 0; - - virtual void SetTMMBRStatus(bool enable) = 0; - - // (NACK) - - // Sends a Negative acknowledgement packet. + // Returns statistics of the amount of data sent. // Returns -1 on failure else 0. - // TODO(philipel): Deprecate this and start using SendNack instead, mostly - // because we want a function that actually send NACK for the specified - // packets. - virtual int32_t SendNACK(const uint16_t* nack_list, uint16_t size) = 0; - - // Sends NACK for the packets specified. - // Note: This assumes the caller keeps track of timing and doesn't rely on - // the RTP module to do this. - virtual void SendNack(const std::vector& sequence_numbers) = 0; - - // Store the sent packets, needed to answer to a Negative acknowledgment - // requests. - virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0; - - // Returns true if the module is configured to store packets. - virtual bool StorePackets() const = 0; - - // Called on receipt of RTCP report block from remote side. - // TODO(https://crbug.com/webrtc/10678): Remove RtcpStatisticsCallback in - // favor of ReportBlockDataObserver. - // TODO(https://crbug.com/webrtc/10679): Consider whether we want to use only - // getters or only callbacks. If we decide on getters, the - // ReportBlockDataObserver should also be removed in favor of - // GetLatestReportBlockData(). - // TODO(nisse): Replace RegisterRtcpStatisticsCallback and - // RegisterRtcpCnameCallback with construction-time settings in - // RtpRtcp::Configuration. - virtual void RegisterRtcpStatisticsCallback( - RtcpStatisticsCallback* callback) = 0; - virtual RtcpStatisticsCallback* GetRtcpStatisticsCallback() = 0; - virtual void RegisterRtcpCnameCallback(RtcpCnameCallback* callback) = 0; - // TODO(https://crbug.com/webrtc/10680): When callbacks are registered at - // construction, remove this setter. - virtual void SetReportBlockDataObserver( - ReportBlockDataObserver* observer) = 0; - virtual void SetVideoBitrateAllocation( - const VideoBitrateAllocation& bitrate) = 0; - - // ************************************************************************** - // Video - // ************************************************************************** + RTC_DEPRECATED virtual int32_t DataCountersRTP( + size_t* bytes_sent, + uint32_t* packets_sent) const = 0; // Requests new key frame. // using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1 void SendPictureLossIndication() { SendRTCP(kRtcpPli); } // using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2 void SendFullIntraRequest() { SendRTCP(kRtcpFir); } - - // Sends a LossNotification RTCP message. - // Returns -1 on failure else 0. - virtual int32_t SendLossNotification(uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag, - bool buffering_allowed) = 0; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.cc b/modules/rtp_rtcp/include/rtp_rtcp_defines.cc index ca128e708a..5aa41fccb3 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp_defines.cc +++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.cc @@ -44,6 +44,12 @@ bool IsLegalRsidName(absl::string_view name) { StreamDataCounters::StreamDataCounters() : first_packet_time_ms(-1) {} +RtpPacketCounter::RtpPacketCounter(const RtpPacket& packet) + : header_bytes(packet.headers_size()), + payload_bytes(packet.payload_size()), + padding_bytes(packet.padding_size()), + packets(1) {} + void RtpPacketCounter::AddPacket(const RtpPacket& packet) { ++packets; header_bytes += packet.headers_size(); diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 8cd402e227..cbc2d92111 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -17,6 +17,7 @@ #include #include +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "absl/types/variant.h" @@ -32,6 +33,7 @@ namespace webrtc { class RtpPacket; +class RtpPacketToSend; namespace rtcp { class TransportFeedback; } @@ -63,14 +65,13 @@ enum RTPExtensionType : int { kRtpExtensionTransportSequenceNumber02, kRtpExtensionPlayoutDelay, kRtpExtensionVideoContentType, + kRtpExtensionVideoLayersAllocation, kRtpExtensionVideoTiming, - kRtpExtensionFrameMarking, kRtpExtensionRtpStreamId, kRtpExtensionRepairedRtpStreamId, kRtpExtensionMid, kRtpExtensionGenericFrameDescriptor00, kRtpExtensionGenericFrameDescriptor = kRtpExtensionGenericFrameDescriptor00, - kRtpExtensionGenericFrameDescriptor01, kRtpExtensionGenericFrameDescriptor02, kRtpExtensionColorSpace, kRtpExtensionNumberOfExtensions // Must be the last entity in the enum. @@ -91,7 +92,6 @@ enum RTCPPacketType : uint32_t { kRtcpTmmbr = 0x0100, kRtcpTmmbn = 0x0200, kRtcpSrReq = 0x0400, - kRtcpApp = 0x1000, kRtcpLossNotification = 0x2000, kRtcpRemb = 0x10000, kRtcpTransmissionTimeOffset = 0x20000, @@ -158,14 +158,12 @@ struct RtpState { timestamp(0), capture_time_ms(-1), last_timestamp_time_ms(-1), - media_has_been_sent(false), ssrc_has_acked(false) {} uint16_t sequence_number; uint32_t start_timestamp; uint32_t timestamp; int64_t capture_time_ms; int64_t last_timestamp_time_ms; - bool media_has_been_sent; bool ssrc_has_acked; }; @@ -212,6 +210,17 @@ class RtcpBandwidthObserver { virtual ~RtcpBandwidthObserver() {} }; +// NOTE! |kNumMediaTypes| must be kept in sync with RtpPacketMediaType! +static constexpr size_t kNumMediaTypes = 5; +enum class RtpPacketMediaType : size_t { + kAudio, // Audio media packets. + kVideo, // Video media packets. + kRetransmission, // Retransmisions, sent as response to NACK. + kForwardErrorCorrection, // FEC packets. + kPadding = kNumMediaTypes - 1, // RTX or plain padding sent to maintain BWE. + // Again, don't forget to udate |kNumMediaTypes| if you add another value! +}; + struct RtpPacketSendInfo { public: RtpPacketSendInfo() = default; @@ -219,11 +228,11 @@ struct RtpPacketSendInfo { uint16_t transport_sequence_number = 0; uint32_t ssrc = 0; uint16_t rtp_sequence_number = 0; - // Get rid of this flag when all code paths populate |rtp_sequence_number|. - bool has_rtp_sequence_number = false; size_t length = 0; + absl::optional packet_type; PacedPacketInfo pacing_info; }; + class NetworkStateEstimateObserver { public: virtual void OnRemoteNetworkEstimate(NetworkStateEstimate estimate) = 0; @@ -288,6 +297,8 @@ struct RtpPacketCounter { RtpPacketCounter() : header_bytes(0), payload_bytes(0), padding_bytes(0), packets(0) {} + explicit RtpPacketCounter(const RtpPacket& packet); + void Add(const RtpPacketCounter& other) { header_bytes += other.header_bytes; payload_bytes += other.payload_bytes; @@ -306,6 +317,12 @@ struct RtpPacketCounter { packets -= other.packets; } + bool operator==(const RtpPacketCounter& other) const { + return header_bytes == other.header_bytes && + payload_bytes == other.payload_bytes && + padding_bytes == other.padding_bytes && packets == other.packets; + } + // Not inlined, since use of RtpPacket would result in circular includes. void AddPacket(const RtpPacket& packet); @@ -369,6 +386,34 @@ struct StreamDataCounters { RtpPacketCounter fec; // Number of redundancy packets/bytes. }; +class RtpSendRates { + template + constexpr std::array make_zero_array( + std::index_sequence) { + return {{(static_cast(Is), DataRate::Zero())...}}; + } + + public: + RtpSendRates() + : send_rates_( + make_zero_array(std::make_index_sequence())) {} + RtpSendRates(const RtpSendRates& rhs) = default; + RtpSendRates& operator=(const RtpSendRates&) = default; + + DataRate& operator[](RtpPacketMediaType type) { + return send_rates_[static_cast(type)]; + } + const DataRate& operator[](RtpPacketMediaType type) const { + return send_rates_[static_cast(type)]; + } + DataRate Sum() const { + return absl::c_accumulate(send_rates_, DataRate::Zero()); + } + + private: + std::array send_rates_; +}; + // Callback, called whenever byte/packet counts have been updated. class StreamDataCountersCallback { public: @@ -392,19 +437,6 @@ struct RtpReceiveStats { RtpPacketCounter packet_counter; }; -class RtcpAckObserver { - public: - // This method is called on received report blocks matching the sender ssrc. - // TODO(nisse): Use of "extended" sequence number is a bit brittle, since the - // observer for this callback typically has its own sequence number unwrapper, - // and there's no guarantee that they are in sync. Change to pass raw sequence - // number, possibly augmented with timestamp (if available) to aid - // disambiguation. - virtual void OnReceivedAck(int64_t extended_highest_sequence_number) = 0; - - virtual ~RtcpAckObserver() = default; -}; - // Callback, used to notify an observer whenever new rates have been estimated. class BitrateStatisticsObserver { public: @@ -436,5 +468,15 @@ class SendPacketObserver { int64_t capture_time_ms, uint32_t ssrc) = 0; }; + +// Interface for a class that can assign RTP sequence numbers for a packet +// to be sent. +class SequenceNumberAssigner { + public: + SequenceNumberAssigner() = default; + virtual ~SequenceNumberAssigner() = default; + + virtual void AssignSequenceNumber(RtpPacketToSend* packet) = 0; +}; } // namespace webrtc #endif // MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_DEFINES_H_ diff --git a/modules/rtp_rtcp/include/ulpfec_receiver.h b/modules/rtp_rtcp/include/ulpfec_receiver.h index eb55deca23..d3981dfac3 100644 --- a/modules/rtp_rtcp/include/ulpfec_receiver.h +++ b/modules/rtp_rtcp/include/ulpfec_receiver.h @@ -16,7 +16,7 @@ #include "api/array_view.h" #include "api/rtp_parameters.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" namespace webrtc { @@ -44,7 +44,7 @@ class UlpfecReceiver { // // TODO(brandtr): Set |ulpfec_payload_type| during constructor call, // rather than as a parameter here. - virtual bool AddReceivedRedPacket(const RtpPacket& rtp_packet, + virtual bool AddReceivedRedPacket(const RtpPacketReceived& rtp_packet, uint8_t ulpfec_payload_type) = 0; // Sends the received packets to the FEC and returns all packets diff --git a/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h b/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h index d6442ad58a..404ded01d8 100644 --- a/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h +++ b/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h @@ -18,9 +18,10 @@ namespace webrtc { class MockRecoveredPacketReceiver : public RecoveredPacketReceiver { public: - MockRecoveredPacketReceiver(); - ~MockRecoveredPacketReceiver(); - MOCK_METHOD2(OnRecoveredPacket, void(const uint8_t* packet, size_t length)); + MOCK_METHOD(void, + OnRecoveredPacket, + (const uint8_t* packet, size_t length), + (override)); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h b/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h index b9a8f79a1d..12f143ae8b 100644 --- a/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h +++ b/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h @@ -18,12 +18,11 @@ namespace webrtc { class MockRtcpBandwidthObserver : public RtcpBandwidthObserver { public: - MockRtcpBandwidthObserver(); - ~MockRtcpBandwidthObserver(); - - MOCK_METHOD1(OnReceivedEstimatedBitrate, void(uint32_t)); - MOCK_METHOD3(OnReceivedRtcpReceiverReport, - void(const ReportBlockList&, int64_t, int64_t)); + MOCK_METHOD(void, OnReceivedEstimatedBitrate, (uint32_t), (override)); + MOCK_METHOD(void, + OnReceivedRtcpReceiverReport, + (const ReportBlockList&, int64_t, int64_t), + (override)); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_MOCKS_MOCK_RTCP_BANDWIDTH_OBSERVER_H_ diff --git a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h b/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h index 6ccef612c2..e9a7d52691 100644 --- a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h +++ b/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h @@ -18,11 +18,8 @@ namespace webrtc { class MockRtcpRttStats : public RtcpRttStats { public: - MockRtcpRttStats(); - ~MockRtcpRttStats(); - - MOCK_METHOD1(OnRttUpdate, void(int64_t rtt)); - MOCK_CONST_METHOD0(LastProcessedRtt, int64_t()); + MOCK_METHOD(void, OnRttUpdate, (int64_t rtt), (override)); + MOCK_METHOD(int64_t, LastProcessedRtt, (), (const, override)); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_MOCKS_MOCK_RTCP_RTT_STATS_H_ diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 83bc7ccec7..70b073cd79 100644 --- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -20,161 +20,172 @@ #include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" #include "modules/include/module.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/checks.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "test/gmock.h" namespace webrtc { -class MockRtpRtcp : public RtpRtcp { +class MockRtpRtcpInterface : public RtpRtcpInterface { public: - MockRtpRtcp(); - ~MockRtpRtcp(); - - MOCK_METHOD2(IncomingRtcpPacket, - void(const uint8_t* incoming_packet, size_t packet_length)); - MOCK_METHOD1(SetRemoteSSRC, void(uint32_t ssrc)); - MOCK_METHOD1(SetMaxRtpPacketSize, void(size_t size)); - MOCK_CONST_METHOD0(MaxRtpPacketSize, size_t()); - MOCK_METHOD2(RegisterSendPayloadFrequency, - void(int payload_type, int frequency)); - MOCK_METHOD1(DeRegisterSendPayload, int32_t(int8_t payload_type)); - MOCK_METHOD1(SetExtmapAllowMixed, void(bool extmap_allow_mixed)); - MOCK_METHOD2(RegisterSendRtpHeaderExtension, - int32_t(RTPExtensionType type, uint8_t id)); - MOCK_METHOD2(RegisterRtpHeaderExtension, void(absl::string_view uri, int id)); - MOCK_METHOD1(DeregisterSendRtpHeaderExtension, - int32_t(RTPExtensionType type)); - MOCK_METHOD1(DeregisterSendRtpHeaderExtension, void(absl::string_view uri)); - MOCK_CONST_METHOD0(SupportsPadding, bool()); - MOCK_CONST_METHOD0(SupportsRtxPayloadPadding, bool()); - MOCK_CONST_METHOD0(StartTimestamp, uint32_t()); - MOCK_METHOD1(SetStartTimestamp, void(uint32_t timestamp)); - MOCK_CONST_METHOD0(SequenceNumber, uint16_t()); - MOCK_METHOD1(SetSequenceNumber, void(uint16_t seq)); - MOCK_METHOD1(SetRtpState, void(const RtpState& rtp_state)); - MOCK_METHOD1(SetRtxState, void(const RtpState& rtp_state)); - MOCK_CONST_METHOD0(GetRtpState, RtpState()); - MOCK_CONST_METHOD0(GetRtxState, RtpState()); - MOCK_CONST_METHOD0(SSRC, uint32_t()); - MOCK_METHOD1(SetSSRC, void(uint32_t ssrc)); - MOCK_METHOD1(SetRid, void(const std::string& rid)); - MOCK_METHOD1(SetMid, void(const std::string& mid)); - MOCK_CONST_METHOD1(CSRCs, int32_t(uint32_t csrcs[kRtpCsrcSize])); - MOCK_METHOD1(SetCsrcs, void(const std::vector& csrcs)); - MOCK_METHOD1(SetCSRCStatus, int32_t(bool include)); - MOCK_METHOD1(SetRtxSendStatus, void(int modes)); - MOCK_CONST_METHOD0(RtxSendStatus, int()); - MOCK_CONST_METHOD0(RtxSsrc, absl::optional()); - MOCK_METHOD1(SetRtxSsrc, void(uint32_t)); - MOCK_METHOD2(SetRtxSendPayloadType, void(int, int)); - MOCK_CONST_METHOD0(FlexfecSsrc, absl::optional()); - MOCK_CONST_METHOD0(RtxSendPayloadType, std::pair()); - MOCK_METHOD1(SetSendingStatus, int32_t(bool sending)); - MOCK_CONST_METHOD0(Sending, bool()); - MOCK_METHOD1(SetSendingMediaStatus, void(bool sending)); - MOCK_CONST_METHOD0(SendingMedia, bool()); - MOCK_CONST_METHOD0(IsAudioConfigured, bool()); - MOCK_METHOD1(SetAsPartOfAllocation, void(bool)); - MOCK_CONST_METHOD4(BitrateSent, - void(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate)); - MOCK_CONST_METHOD1(EstimatedReceiveBandwidth, - int(uint32_t* available_bandwidth)); - MOCK_METHOD4(OnSendingRtpFrame, bool(uint32_t, int64_t, int, bool)); - MOCK_METHOD2(TrySendPacket, - bool(RtpPacketToSend* packet, - const PacedPacketInfo& pacing_info)); - MOCK_METHOD1(OnPacketsAcknowledged, void(rtc::ArrayView)); - MOCK_METHOD1( - GeneratePadding, - std::vector>(size_t target_size_bytes)); - MOCK_METHOD2(RegisterRtcpObservers, - void(RtcpIntraFrameObserver* intra_frame_callback, - RtcpBandwidthObserver* bandwidth_callback)); - MOCK_CONST_METHOD0(RTCP, RtcpMode()); - MOCK_METHOD1(SetRTCPStatus, void(RtcpMode method)); - MOCK_METHOD1(SetCNAME, int32_t(const char cname[RTCP_CNAME_SIZE])); - MOCK_CONST_METHOD2(RemoteCNAME, - int32_t(uint32_t remote_ssrc, - char cname[RTCP_CNAME_SIZE])); - MOCK_CONST_METHOD5(RemoteNTP, - int32_t(uint32_t* received_ntp_secs, - uint32_t* received_ntp_frac, - uint32_t* rtcp_arrival_time_secs, - uint32_t* rtcp_arrival_time_frac, - uint32_t* rtcp_timestamp)); - MOCK_METHOD2(AddMixedCNAME, - int32_t(uint32_t ssrc, const char cname[RTCP_CNAME_SIZE])); - MOCK_METHOD1(RemoveMixedCNAME, int32_t(uint32_t ssrc)); - MOCK_CONST_METHOD5(RTT, - int32_t(uint32_t remote_ssrc, - int64_t* rtt, - int64_t* avg_rtt, - int64_t* min_rtt, - int64_t* max_rtt)); - MOCK_CONST_METHOD0(ExpectedRetransmissionTimeMs, int64_t()); - MOCK_METHOD1(SendRTCP, int32_t(RTCPPacketType packet_type)); - MOCK_METHOD1(SendCompoundRTCP, - int32_t(const std::set& packet_types)); - MOCK_CONST_METHOD2(DataCountersRTP, - int32_t(size_t* bytes_sent, uint32_t* packets_sent)); - MOCK_CONST_METHOD2(GetSendStreamDataCounters, - void(StreamDataCounters*, StreamDataCounters*)); - MOCK_CONST_METHOD1(RemoteRTCPStat, - int32_t(std::vector* receive_blocks)); - MOCK_CONST_METHOD0(GetLatestReportBlockData, std::vector()); - MOCK_METHOD4(SetRTCPApplicationSpecificData, - int32_t(uint8_t sub_type, - uint32_t name, - const uint8_t* data, - uint16_t length)); - MOCK_METHOD1(SetRtcpXrRrtrStatus, void(bool enable)); - MOCK_CONST_METHOD0(RtcpXrRrtrStatus, bool()); - MOCK_METHOD2(SetRemb, void(int64_t bitrate, std::vector ssrcs)); - MOCK_METHOD0(UnsetRemb, void()); - MOCK_CONST_METHOD0(TMMBR, bool()); - MOCK_METHOD1(SetTMMBRStatus, void(bool enable)); - MOCK_METHOD1(OnBandwidthEstimateUpdate, void(uint16_t bandwidth_kbit)); - MOCK_METHOD2(SendNACK, int32_t(const uint16_t* nack_list, uint16_t size)); - MOCK_METHOD1(SendNack, void(const std::vector& sequence_numbers)); - MOCK_METHOD2(SetStorePacketsStatus, - void(bool enable, uint16_t number_to_store)); - MOCK_CONST_METHOD0(StorePackets, bool()); - MOCK_METHOD1(RegisterRtcpStatisticsCallback, void(RtcpStatisticsCallback*)); - MOCK_METHOD0(GetRtcpStatisticsCallback, RtcpStatisticsCallback*()); - MOCK_METHOD1(RegisterRtcpCnameCallback, void(RtcpCnameCallback*)); - MOCK_METHOD1(SetReportBlockDataObserver, void(ReportBlockDataObserver*)); - MOCK_METHOD1(SendFeedbackPacket, bool(const rtcp::TransportFeedback& packet)); - MOCK_METHOD1(SendNetworkStateEstimatePacket, - bool(const rtcp::RemoteEstimate& packet)); - MOCK_METHOD1( - SendCombinedRtcpPacket, - void(std::vector> rtcp_packets)); - MOCK_METHOD1(SetTargetSendBitrate, void(uint32_t bitrate_bps)); - MOCK_METHOD4(SendLossNotification, - int32_t(uint16_t last_decoded_seq_num, - uint16_t last_received_seq_num, - bool decodability_flag, - bool buffering_allowed)); - MOCK_METHOD0(Process, void()); - MOCK_METHOD1(SetVideoBitrateAllocation, void(const VideoBitrateAllocation&)); - MOCK_METHOD0(RtpSender, RTPSender*()); - MOCK_CONST_METHOD0(RtpSender, const RTPSender*()); - - // Members. - unsigned int remote_ssrc_; - - private: - // Mocking this method is currently not required and having a default - // implementation like MOCK_METHOD0(TimeUntilNextProcess, int64_t()) - // can be dangerous since it can cause a tight loop on a process thread. - virtual int64_t TimeUntilNextProcess() { return 0xffffffff; } + MOCK_METHOD(void, + IncomingRtcpPacket, + (const uint8_t* incoming_packet, size_t packet_length), + (override)); + MOCK_METHOD(void, SetRemoteSSRC, (uint32_t ssrc), (override)); + MOCK_METHOD(void, SetMaxRtpPacketSize, (size_t size), (override)); + MOCK_METHOD(size_t, MaxRtpPacketSize, (), (const, override)); + MOCK_METHOD(void, + RegisterSendPayloadFrequency, + (int payload_type, int frequency), + (override)); + MOCK_METHOD(int32_t, + DeRegisterSendPayload, + (int8_t payload_type), + (override)); + MOCK_METHOD(void, SetExtmapAllowMixed, (bool extmap_allow_mixed), (override)); + MOCK_METHOD(void, + RegisterRtpHeaderExtension, + (absl::string_view uri, int id), + (override)); + MOCK_METHOD(int32_t, + DeregisterSendRtpHeaderExtension, + (RTPExtensionType type), + (override)); + MOCK_METHOD(void, + DeregisterSendRtpHeaderExtension, + (absl::string_view uri), + (override)); + MOCK_METHOD(bool, SupportsPadding, (), (const, override)); + MOCK_METHOD(bool, SupportsRtxPayloadPadding, (), (const, override)); + MOCK_METHOD(uint32_t, StartTimestamp, (), (const, override)); + MOCK_METHOD(void, SetStartTimestamp, (uint32_t timestamp), (override)); + MOCK_METHOD(uint16_t, SequenceNumber, (), (const, override)); + MOCK_METHOD(void, SetSequenceNumber, (uint16_t seq), (override)); + MOCK_METHOD(void, SetRtpState, (const RtpState& rtp_state), (override)); + MOCK_METHOD(void, SetRtxState, (const RtpState& rtp_state), (override)); + MOCK_METHOD(RtpState, GetRtpState, (), (const, override)); + MOCK_METHOD(RtpState, GetRtxState, (), (const, override)); + MOCK_METHOD(uint32_t, SSRC, (), (const, override)); + MOCK_METHOD(void, SetRid, (const std::string& rid), (override)); + MOCK_METHOD(void, SetMid, (const std::string& mid), (override)); + MOCK_METHOD(void, SetCsrcs, (const std::vector& csrcs), (override)); + MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override)); + MOCK_METHOD(int, RtxSendStatus, (), (const, override)); + MOCK_METHOD(absl::optional, RtxSsrc, (), (const, override)); + MOCK_METHOD(void, SetRtxSendPayloadType, (int, int), (override)); + MOCK_METHOD(absl::optional, FlexfecSsrc, (), (const, override)); + MOCK_METHOD(int32_t, SetSendingStatus, (bool sending), (override)); + MOCK_METHOD(bool, Sending, (), (const, override)); + MOCK_METHOD(void, SetSendingMediaStatus, (bool sending), (override)); + MOCK_METHOD(bool, SendingMedia, (), (const, override)); + MOCK_METHOD(bool, IsAudioConfigured, (), (const, override)); + MOCK_METHOD(void, SetAsPartOfAllocation, (bool), (override)); + MOCK_METHOD(RtpSendRates, GetSendRates, (), (const, override)); + MOCK_METHOD(bool, + OnSendingRtpFrame, + (uint32_t, int64_t, int, bool), + (override)); + MOCK_METHOD(bool, + TrySendPacket, + (RtpPacketToSend * packet, const PacedPacketInfo& pacing_info), + (override)); + MOCK_METHOD(void, + SetFecProtectionParams, + (const FecProtectionParams& delta_params, + const FecProtectionParams& key_params), + (override)); + MOCK_METHOD(std::vector>, + FetchFecPackets, + (), + (override)); + MOCK_METHOD(void, + OnPacketsAcknowledged, + (rtc::ArrayView), + (override)); + MOCK_METHOD(std::vector>, + GeneratePadding, + (size_t target_size_bytes), + (override)); + MOCK_METHOD(std::vector, + GetSentRtpPacketInfos, + (rtc::ArrayView sequence_numbers), + (const, override)); + MOCK_METHOD(size_t, ExpectedPerPacketOverhead, (), (const, override)); + MOCK_METHOD(RtcpMode, RTCP, (), (const, override)); + MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override)); + MOCK_METHOD(int32_t, + SetCNAME, + (const char cname[RTCP_CNAME_SIZE]), + (override)); + MOCK_METHOD(int32_t, + RemoteNTP, + (uint32_t * received_ntp_secs, + uint32_t* received_ntp_frac, + uint32_t* rtcp_arrival_time_secs, + uint32_t* rtcp_arrival_time_frac, + uint32_t* rtcp_timestamp), + (const, override)); + MOCK_METHOD(int32_t, + RTT, + (uint32_t remote_ssrc, + int64_t* rtt, + int64_t* avg_rtt, + int64_t* min_rtt, + int64_t* max_rtt), + (const, override)); + MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const, override)); + MOCK_METHOD(int32_t, SendRTCP, (RTCPPacketType packet_type), (override)); + MOCK_METHOD(void, + GetSendStreamDataCounters, + (StreamDataCounters*, StreamDataCounters*), + (const, override)); + MOCK_METHOD(int32_t, + RemoteRTCPStat, + (std::vector * receive_blocks), + (const, override)); + MOCK_METHOD(std::vector, + GetLatestReportBlockData, + (), + (const, override)); + MOCK_METHOD(void, SetRtcpXrRrtrStatus, (bool enable), (override)); + MOCK_METHOD(bool, RtcpXrRrtrStatus, (), (const, override)); + MOCK_METHOD(void, + SetRemb, + (int64_t bitrate, std::vector ssrcs), + (override)); + MOCK_METHOD(void, UnsetRemb, (), (override)); + MOCK_METHOD(int32_t, + SendNACK, + (const uint16_t* nack_list, uint16_t size), + (override)); + MOCK_METHOD(void, + SendNack, + (const std::vector& sequence_numbers), + (override)); + MOCK_METHOD(void, + SetStorePacketsStatus, + (bool enable, uint16_t number_to_store), + (override)); + MOCK_METHOD(bool, StorePackets, (), (const, override)); + MOCK_METHOD(void, + SendCombinedRtcpPacket, + (std::vector> rtcp_packets), + (override)); + MOCK_METHOD(int32_t, + SendLossNotification, + (uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed), + (override)); + MOCK_METHOD(void, + SetVideoBitrateAllocation, + (const VideoBitrateAllocation&), + (override)); + MOCK_METHOD(RTPSender*, RtpSender, (), (override)); + MOCK_METHOD(const RTPSender*, RtpSender, (), (const, override)); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc b/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc index 62f300d0e5..529ed7eef6 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc +++ b/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc @@ -39,7 +39,7 @@ uint32_t AbsoluteCaptureTimeReceiver::GetSource( void AbsoluteCaptureTimeReceiver::SetRemoteToLocalClockOffset( absl::optional value_q32x32) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); remote_to_local_clock_offset_ = value_q32x32; } @@ -52,7 +52,7 @@ AbsoluteCaptureTimeReceiver::OnReceivePacket( const absl::optional& received_extension) { const Timestamp receive_time = clock_->CurrentTime(); - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); AbsoluteCaptureTime extension; if (received_extension == absl::nullopt) { diff --git a/modules/rtp_rtcp/source/absolute_capture_time_receiver.h b/modules/rtp_rtcp/source/absolute_capture_time_receiver.h index 10f9539b2d..ce3442b386 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_receiver.h +++ b/modules/rtp_rtcp/source/absolute_capture_time_receiver.h @@ -15,7 +15,7 @@ #include "api/rtp_headers.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -36,7 +36,7 @@ namespace webrtc { class AbsoluteCaptureTimeReceiver { public: static constexpr TimeDelta kInterpolationMaxInterval = - TimeDelta::Millis<5000>(); + TimeDelta::Millis(5000); explicit AbsoluteCaptureTimeReceiver(Clock* clock); @@ -73,26 +73,26 @@ class AbsoluteCaptureTimeReceiver { uint32_t source, uint32_t rtp_timestamp, uint32_t rtp_clock_frequency) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); absl::optional AdjustEstimatedCaptureClockOffset( absl::optional received_value) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; - rtc::CriticalSection crit_; + Mutex mutex_; - absl::optional remote_to_local_clock_offset_ RTC_GUARDED_BY(crit_); + absl::optional remote_to_local_clock_offset_ RTC_GUARDED_BY(mutex_); - Timestamp last_receive_time_ RTC_GUARDED_BY(crit_); + Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_); - uint32_t last_source_ RTC_GUARDED_BY(crit_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(crit_); - uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(crit_); - uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(crit_); + uint32_t last_source_ RTC_GUARDED_BY(mutex_); + uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_); + uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_); + uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_); absl::optional last_estimated_capture_clock_offset_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); }; // AbsoluteCaptureTimeReceiver } // namespace webrtc diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender.cc b/modules/rtp_rtcp/source/absolute_capture_time_sender.cc index f614c0c521..83ba6cac91 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_sender.cc +++ b/modules/rtp_rtcp/source/absolute_capture_time_sender.cc @@ -47,7 +47,7 @@ absl::optional AbsoluteCaptureTimeSender::OnSendPacket( absl::optional estimated_capture_clock_offset) { const Timestamp send_time = clock_->CurrentTime(); - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); if (!ShouldSendExtension(send_time, source, rtp_timestamp, rtp_clock_frequency, absolute_capture_timestamp, diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender.h b/modules/rtp_rtcp/source/absolute_capture_time_sender.h index 86158a875d..348a28370d 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_sender.h +++ b/modules/rtp_rtcp/source/absolute_capture_time_sender.h @@ -15,7 +15,7 @@ #include "api/rtp_headers.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -41,8 +41,8 @@ namespace webrtc { class AbsoluteCaptureTimeSender { public: static constexpr TimeDelta kInterpolationMaxInterval = - TimeDelta::Millis<1000>(); - static constexpr TimeDelta kInterpolationMaxError = TimeDelta::Millis<1>(); + TimeDelta::Millis(1000); + static constexpr TimeDelta kInterpolationMaxError = TimeDelta::Millis(1); explicit AbsoluteCaptureTimeSender(Clock* clock); @@ -67,20 +67,20 @@ class AbsoluteCaptureTimeSender { uint32_t rtp_clock_frequency, uint64_t absolute_capture_timestamp, absl::optional estimated_capture_clock_offset) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; - rtc::CriticalSection crit_; + Mutex mutex_; - Timestamp last_send_time_ RTC_GUARDED_BY(crit_); + Timestamp last_send_time_ RTC_GUARDED_BY(mutex_); - uint32_t last_source_ RTC_GUARDED_BY(crit_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(crit_); - uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(crit_); - uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(crit_); + uint32_t last_source_ RTC_GUARDED_BY(mutex_); + uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_); + uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_); + uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_); absl::optional last_estimated_capture_clock_offset_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); }; // AbsoluteCaptureTimeSender } // namespace webrtc diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper.cc b/modules/rtp_rtcp/source/active_decode_targets_helper.cc new file mode 100644 index 0000000000..71e7e8cf78 --- /dev/null +++ b/modules/rtp_rtcp/source/active_decode_targets_helper.cc @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/active_decode_targets_helper.h" + +#include + +#include "api/array_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { + +// Returns mask of ids of chains previous frame is part of. +// Assumes for each chain frames are seen in order and no frame on any chain is +// missing. That assumptions allows a simple detection when previous frame is +// part of a chain. +std::bitset<32> LastSendOnChain(int frame_diff, + rtc::ArrayView chain_diffs) { + std::bitset<32> bitmask = 0; + for (size_t i = 0; i < chain_diffs.size(); ++i) { + if (frame_diff == chain_diffs[i]) { + bitmask.set(i); + } + } + return bitmask; +} + +// Returns bitmask with first `num` bits set to 1. +std::bitset<32> AllActive(size_t num) { + RTC_DCHECK_LE(num, 32); + return (~uint32_t{0}) >> (32 - num); +} + +// Returns bitmask of chains that protect at least one active decode target. +std::bitset<32> ActiveChains( + rtc::ArrayView decode_target_protected_by_chain, + int num_chains, + std::bitset<32> active_decode_targets) { + std::bitset<32> active_chains = 0; + for (size_t dt = 0; dt < decode_target_protected_by_chain.size(); ++dt) { + if (dt < active_decode_targets.size() && !active_decode_targets[dt]) { + continue; + } + int chain_idx = decode_target_protected_by_chain[dt]; + RTC_DCHECK_LT(chain_idx, num_chains); + active_chains.set(chain_idx); + } + return active_chains; +} + +} // namespace + +void ActiveDecodeTargetsHelper::OnFrame( + rtc::ArrayView decode_target_protected_by_chain, + std::bitset<32> active_decode_targets, + bool is_keyframe, + int64_t frame_id, + rtc::ArrayView chain_diffs) { + const int num_chains = chain_diffs.size(); + if (num_chains == 0) { + // Avoid printing the warning + // when already printed the warning for the same active decode targets, or + // when active_decode_targets are not changed from it's default value of + // all are active, including non-existent decode targets. + if (last_active_decode_targets_ != active_decode_targets && + !active_decode_targets.all()) { + RTC_LOG(LS_WARNING) << "No chains are configured, but some decode " + "targets might be inactive. Unsupported."; + } + last_active_decode_targets_ = active_decode_targets; + return; + } + const size_t num_decode_targets = decode_target_protected_by_chain.size(); + RTC_DCHECK_GT(num_decode_targets, 0); + std::bitset<32> all_decode_targets = AllActive(num_decode_targets); + // Default value for active_decode_targets is 'all are active', i.e. all bits + // are set. Default value is set before number of decode targets is known. + // It is up to this helper to make the value cleaner and unset unused bits. + active_decode_targets &= all_decode_targets; + + if (is_keyframe) { + // Key frame resets the state. + last_active_decode_targets_ = all_decode_targets; + last_active_chains_ = AllActive(num_chains); + unsent_on_chain_.reset(); + } else { + // Update state assuming previous frame was sent. + unsent_on_chain_ &= + ~LastSendOnChain(frame_id - last_frame_id_, chain_diffs); + } + // Save for the next call to OnFrame. + // Though usually `frame_id == last_frame_id_ + 1`, it might not be so when + // frame id space is shared by several simulcast rtp streams. + last_frame_id_ = frame_id; + + if (active_decode_targets == last_active_decode_targets_) { + return; + } + last_active_decode_targets_ = active_decode_targets; + + if (active_decode_targets.none()) { + RTC_LOG(LS_ERROR) << "It is invalid to produce a frame (" << frame_id + << ") while there are no active decode targets"; + return; + } + last_active_chains_ = ActiveChains(decode_target_protected_by_chain, + num_chains, active_decode_targets); + // Frames that are part of inactive chains might not be produced by the + // encoder. Thus stop sending `active_decode_target` bitmask when it is sent + // on all active chains rather than on all chains. + unsent_on_chain_ = last_active_chains_; + RTC_DCHECK(!unsent_on_chain_.none()); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper.h b/modules/rtp_rtcp/source/active_decode_targets_helper.h new file mode 100644 index 0000000000..13755e8d80 --- /dev/null +++ b/modules/rtp_rtcp/source/active_decode_targets_helper.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_ +#define MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_ + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace webrtc { + +// Helper class that decides when active_decode_target_bitmask should be written +// into the dependency descriptor rtp header extension. +// See: https://aomediacodec.github.io/av1-rtp-spec/#a44-switching +// This class is thread-compatible +class ActiveDecodeTargetsHelper { + public: + ActiveDecodeTargetsHelper() = default; + ActiveDecodeTargetsHelper(const ActiveDecodeTargetsHelper&) = delete; + ActiveDecodeTargetsHelper& operator=(const ActiveDecodeTargetsHelper&) = + delete; + ~ActiveDecodeTargetsHelper() = default; + + // Decides if active decode target bitmask should be attached to the frame + // that is about to be sent. + void OnFrame(rtc::ArrayView decode_target_protected_by_chain, + std::bitset<32> active_decode_targets, + bool is_keyframe, + int64_t frame_id, + rtc::ArrayView chain_diffs); + + // Returns active decode target to attach to the dependency descriptor. + absl::optional ActiveDecodeTargetsBitmask() const { + if (unsent_on_chain_.none()) + return absl::nullopt; + return last_active_decode_targets_.to_ulong(); + } + + std::bitset<32> ActiveChainsBitmask() const { return last_active_chains_; } + + private: + // `unsent_on_chain_[i]` indicates last active decode + // target bitmask wasn't attached to a packet on the chain with id `i`. + std::bitset<32> unsent_on_chain_ = 0; + std::bitset<32> last_active_decode_targets_ = 0; + std::bitset<32> last_active_chains_ = 0; + int64_t last_frame_id_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_ diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc b/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc new file mode 100644 index 0000000000..6f64fd1418 --- /dev/null +++ b/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc @@ -0,0 +1,272 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/active_decode_targets_helper.h" + +#include + +#include "absl/types/optional.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { +constexpr std::bitset<32> kAll = ~uint32_t{0}; +} // namespace + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsNulloptOnKeyFrameWhenAllDecodeTargetsAreActive) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b11, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs); + + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); +} + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsNulloptOnKeyFrameWhenAllDecodeTargetsAreActiveAfterDeltaFrame) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b11, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); + int chain_diffs_delta[] = {1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); + + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u); + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b11, + /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key); + + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); +} + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsBitmaskOnKeyFrameWhenSomeDecodeTargetsAreInactive) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs); + + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u); +} + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsBitmaskOnKeyFrameWhenSomeDecodeTargetsAreInactiveAfterDeltaFrame) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); + int chain_diffs_delta[] = {1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); + + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key); + + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u); +} + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsNulloptWhenActiveDecodeTargetsAreUnused) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/kAll, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/kAll, + /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); +} + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsNulloptOnDeltaFrameAfterSentOnKeyFrame) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); + int chain_diffs_delta[] = {1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); + + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); +} + +TEST(ActiveDecodeTargetsHelperTest, ReturnsNewBitmaskOnDeltaFrame) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b11, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + int chain_diffs_delta[] = {1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); + + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u); +} + +TEST(ActiveDecodeTargetsHelperTest, + ReturnsBitmaskWhenAllDecodeTargetsReactivatedOnDeltaFrame) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 0}; + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); + ASSERT_NE(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + int chain_diffs_delta[] = {1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b01, + /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + + // Reactive all the decode targets + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/kAll, + /*is_keyframe=*/false, /*frame_id=*/3, chain_diffs_delta); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b11u); +} + +TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptAfterSentOnAllActiveChains) { + // Active decode targets (0 and 1) are protected by chains 1 and 2. + const std::bitset<32> kSome = 0b011; + constexpr int kDecodeTargetProtectedByChain[] = {2, 1, 0}; + + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0, 0, 0}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b111, + /*is_keyframe=*/true, + /*frame_id=*/0, chain_diffs_key); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + + int chain_diffs_delta1[] = {1, 1, 1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/kSome, + /*is_keyframe=*/false, + /*frame_id=*/1, chain_diffs_delta1); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u); + + int chain_diffs_delta2[] = {2, 2, 1}; // Previous frame was part of chain#2 + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/kSome, + /*is_keyframe=*/false, + /*frame_id=*/2, chain_diffs_delta2); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u); + + // active_decode_targets_bitmask was send on chains 1 and 2. It was never sent + // on chain 0, but chain 0 only protects inactive decode target#2 + int chain_diffs_delta3[] = {3, 1, 2}; // Previous frame was part of chain#1 + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/kSome, + /*is_keyframe=*/false, + /*frame_id=*/3, chain_diffs_delta3); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); +} + +TEST(ActiveDecodeTargetsHelperTest, ReturnsBitmaskWhenChanged) { + constexpr int kDecodeTargetProtectedByChain[] = {0, 1, 1}; + + ActiveDecodeTargetsHelper helper; + int chain_diffs_key[] = {0, 0}; + helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b111, + /*is_keyframe=*/true, + /*frame_id=*/0, chain_diffs_key); + int chain_diffs_delta1[] = {1, 1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b011, + /*is_keyframe=*/false, + /*frame_id=*/1, chain_diffs_delta1); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u); + + int chain_diffs_delta2[] = {1, 2}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b101, + /*is_keyframe=*/false, + /*frame_id=*/2, chain_diffs_delta2); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b101u); + + // active_decode_target_bitmask was send on chain0, but it was an old one. + int chain_diffs_delta3[] = {2, 1}; + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b101, + /*is_keyframe=*/false, + /*frame_id=*/3, chain_diffs_delta3); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b101u); +} + +TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptWhenChainsAreNotUsed) { + const rtc::ArrayView kDecodeTargetProtectedByChain; + const rtc::ArrayView kNoChainDiffs; + + ActiveDecodeTargetsHelper helper; + helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/kAll, + /*is_keyframe=*/true, + /*frame_id=*/0, kNoChainDiffs); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + + helper.OnFrame(kDecodeTargetProtectedByChain, + /*active_decode_targets=*/0b101, + /*is_keyframe=*/false, + /*frame_id=*/1, kNoChainDiffs); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); +} + +TEST(ActiveDecodeTargetsHelperTest, Supports32DecodeTargets) { + std::bitset<32> some; + std::vector decode_target_protected_by_chain(32); + for (int i = 0; i < 32; ++i) { + decode_target_protected_by_chain[i] = i; + some[i] = i % 2 == 0; + } + + ActiveDecodeTargetsHelper helper; + std::vector chain_diffs_key(32, 0); + helper.OnFrame(decode_target_protected_by_chain, + /*active_decode_targets=*/some, + /*is_keyframe=*/true, + /*frame_id=*/1, chain_diffs_key); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), some.to_ulong()); + std::vector chain_diffs_delta(32, 1); + helper.OnFrame(decode_target_protected_by_chain, + /*active_decode_targets=*/some, + /*is_keyframe=*/false, + /*frame_id=*/2, chain_diffs_delta); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + helper.OnFrame(decode_target_protected_by_chain, + /*active_decode_targets=*/kAll, + /*is_keyframe=*/false, + /*frame_id=*/2, chain_diffs_delta); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), kAll.to_ulong()); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc b/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc index 724ad8c42e..1da19e476c 100644 --- a/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc +++ b/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc @@ -19,7 +19,9 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" - +#ifndef DISABLE_H265 +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" +#endif namespace webrtc { std::unique_ptr CreateVideoRtpDepacketizer( @@ -31,12 +33,17 @@ std::unique_ptr CreateVideoRtpDepacketizer( return std::make_unique(); case kVideoCodecVP9: return std::make_unique(); +#ifndef DISABLE_H265 + case kVideoCodecH265: + return std::make_unique(); +#endif case kVideoCodecAV1: return std::make_unique(); case kVideoCodecGeneric: case kVideoCodecMultiplex: return std::make_unique(); } + RTC_CHECK_NOTREACHED(); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc new file mode 100644 index 0000000000..6cb9d9330c --- /dev/null +++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -0,0 +1,472 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h" + +#include +#include +#include + +#include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +constexpr uint32_t kTimestampTicksPerMs = 90; +constexpr int kSendSideDelayWindowMs = 1000; +constexpr int kBitrateStatisticsWindowMs = 1000; +constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; + +bool IsDisabled(absl::string_view name, + const WebRtcKeyValueConfig* field_trials) { + FieldTrialBasedConfig default_trials; + auto& trials = field_trials ? *field_trials : default_trials; + return absl::StartsWith(trials.Lookup(name), "Disabled"); +} +} // namespace + +DEPRECATED_RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( + DEPRECATED_RtpSenderEgress* sender) + : transport_sequence_number_(0), sender_(sender) {} +DEPRECATED_RtpSenderEgress::NonPacedPacketSender::~NonPacedPacketSender() = + default; + +void DEPRECATED_RtpSenderEgress::NonPacedPacketSender::EnqueuePackets( + std::vector> packets) { + for (auto& packet : packets) { + if (!packet->SetExtension( + ++transport_sequence_number_)) { + --transport_sequence_number_; + } + packet->ReserveExtension(); + packet->ReserveExtension(); + sender_->SendPacket(packet.get(), PacedPacketInfo()); + } +} + +DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( + const RtpRtcpInterface::Configuration& config, + RtpPacketHistory* packet_history) + : ssrc_(config.local_media_ssrc), + rtx_ssrc_(config.rtx_send_ssrc), + flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() + : absl::nullopt), + populate_network2_timestamp_(config.populate_network2_timestamp), + send_side_bwe_with_overhead_( + !IsDisabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), + clock_(config.clock), + packet_history_(packet_history), + transport_(config.outgoing_transport), + event_log_(config.event_log), + is_audio_(config.audio), + need_rtp_packet_infos_(config.need_rtp_packet_infos), + transport_feedback_observer_(config.transport_feedback_callback), + send_side_delay_observer_(config.send_side_delay_observer), + send_packet_observer_(config.send_packet_observer), + rtp_stats_callback_(config.rtp_stats_callback), + bitrate_callback_(config.send_bitrate_observer), + media_has_been_sent_(false), + force_part_of_allocation_(false), + timestamp_offset_(0), + max_delay_it_(send_delays_.end()), + sum_delays_ms_(0), + total_packet_send_delay_ms_(0), + send_rates_(kNumMediaTypes, + {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}), + rtp_sequence_number_map_(need_rtp_packet_infos_ + ? std::make_unique( + kRtpSequenceNumberMapMaxEntries) + : nullptr) {} + +void DEPRECATED_RtpSenderEgress::SendPacket( + RtpPacketToSend* packet, + const PacedPacketInfo& pacing_info) { + RTC_DCHECK(packet); + + const uint32_t packet_ssrc = packet->Ssrc(); + RTC_DCHECK(packet->packet_type().has_value()); + RTC_DCHECK(HasCorrectSsrc(*packet)); + int64_t now_ms = clock_->TimeInMilliseconds(); + + if (is_audio_) { +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE + BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms, + GetSendRates().Sum().kbps(), packet_ssrc); + BWE_TEST_LOGGING_PLOT_WITH_SSRC( + 1, "AudioNackBitrate_kbps", now_ms, + GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(), + packet_ssrc); +#endif + } else { +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE + BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms, + GetSendRates().Sum().kbps(), packet_ssrc); + BWE_TEST_LOGGING_PLOT_WITH_SSRC( + 1, "VideoNackBitrate_kbps", now_ms, + GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(), + packet_ssrc); +#endif + } + + PacketOptions options; + { + MutexLock lock(&lock_); + options.included_in_allocation = force_part_of_allocation_; + + if (need_rtp_packet_infos_ && + packet->packet_type() == RtpPacketToSend::Type::kVideo) { + RTC_DCHECK(rtp_sequence_number_map_); + // Last packet of a frame, add it to sequence number info map. + const uint32_t timestamp = packet->Timestamp() - timestamp_offset_; + bool is_first_packet_of_frame = packet->is_first_packet_of_frame(); + bool is_last_packet_of_frame = packet->Marker(); + + rtp_sequence_number_map_->InsertPacket( + packet->SequenceNumber(), + RtpSequenceNumberMap::Info(timestamp, is_first_packet_of_frame, + is_last_packet_of_frame)); + } + } + + // Bug webrtc:7859. While FEC is invoked from rtp_sender_video, and not after + // the pacer, these modifications of the header below are happening after the + // FEC protection packets are calculated. This will corrupt recovered packets + // at the same place. It's not an issue for extensions, which are present in + // all the packets (their content just may be incorrect on recovered packets). + // In case of VideoTimingExtension, since it's present not in every packet, + // data after rtp header may be corrupted if these packets are protected by + // the FEC. + int64_t diff_ms = now_ms - packet->capture_time_ms(); + if (packet->HasExtension()) { + packet->SetExtension(kTimestampTicksPerMs * diff_ms); + } + if (packet->HasExtension()) { + packet->SetExtension( + AbsoluteSendTime::MsTo24Bits(now_ms)); + } + + if (packet->HasExtension()) { + if (populate_network2_timestamp_) { + packet->set_network2_time_ms(now_ms); + } else { + packet->set_pacer_exit_time_ms(now_ms); + } + } + + const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio || + packet->packet_type() == RtpPacketMediaType::kVideo; + + // Downstream code actually uses this flag to distinguish between media and + // everything else. + options.is_retransmit = !is_media; + if (auto packet_id = packet->GetExtension()) { + options.packet_id = *packet_id; + options.included_in_feedback = true; + options.included_in_allocation = true; + AddPacketToTransportFeedback(*packet_id, *packet, pacing_info); + } + + options.application_data.assign(packet->application_data().begin(), + packet->application_data().end()); + + if (packet->packet_type() != RtpPacketMediaType::kPadding && + packet->packet_type() != RtpPacketMediaType::kRetransmission) { + UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc); + UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(), + packet_ssrc); + } + + const bool send_success = SendPacketToNetwork(*packet, options, pacing_info); + + // Put packet in retransmission history or update pending status even if + // actual sending fails. + if (is_media && packet->allow_retransmission()) { + packet_history_->PutRtpPacket(std::make_unique(*packet), + now_ms); + } else if (packet->retransmitted_sequence_number()) { + packet_history_->MarkPacketAsSent(*packet->retransmitted_sequence_number()); + } + + if (send_success) { + MutexLock lock(&lock_); + UpdateRtpStats(*packet); + media_has_been_sent_ = true; + } +} + +void DEPRECATED_RtpSenderEgress::ProcessBitrateAndNotifyObservers() { + if (!bitrate_callback_) + return; + + MutexLock lock(&lock_); + RtpSendRates send_rates = GetSendRatesLocked(); + bitrate_callback_->Notify( + send_rates.Sum().bps(), + send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_); +} + +RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRates() const { + MutexLock lock(&lock_); + return GetSendRatesLocked(); +} + +RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRatesLocked() const { + const int64_t now_ms = clock_->TimeInMilliseconds(); + RtpSendRates current_rates; + for (size_t i = 0; i < kNumMediaTypes; ++i) { + RtpPacketMediaType type = static_cast(i); + current_rates[type] = + DataRate::BitsPerSec(send_rates_[i].Rate(now_ms).value_or(0)); + } + return current_rates; +} + +void DEPRECATED_RtpSenderEgress::GetDataCounters( + StreamDataCounters* rtp_stats, + StreamDataCounters* rtx_stats) const { + MutexLock lock(&lock_); + *rtp_stats = rtp_stats_; + *rtx_stats = rtx_rtp_stats_; +} + +void DEPRECATED_RtpSenderEgress::ForceIncludeSendPacketsInAllocation( + bool part_of_allocation) { + MutexLock lock(&lock_); + force_part_of_allocation_ = part_of_allocation; +} + +bool DEPRECATED_RtpSenderEgress::MediaHasBeenSent() const { + MutexLock lock(&lock_); + return media_has_been_sent_; +} + +void DEPRECATED_RtpSenderEgress::SetMediaHasBeenSent(bool media_sent) { + MutexLock lock(&lock_); + media_has_been_sent_ = media_sent; +} + +void DEPRECATED_RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) { + MutexLock lock(&lock_); + timestamp_offset_ = timestamp; +} + +std::vector +DEPRECATED_RtpSenderEgress::GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const { + RTC_DCHECK(!sequence_numbers.empty()); + if (!need_rtp_packet_infos_) { + return std::vector(); + } + + std::vector results; + results.reserve(sequence_numbers.size()); + + MutexLock lock(&lock_); + for (uint16_t sequence_number : sequence_numbers) { + const auto& info = rtp_sequence_number_map_->Get(sequence_number); + if (!info) { + // The empty vector will be returned. We can delay the clearing + // of the vector until after we exit the critical section. + return std::vector(); + } + results.push_back(*info); + } + + return results; +} + +bool DEPRECATED_RtpSenderEgress::HasCorrectSsrc( + const RtpPacketToSend& packet) const { + switch (*packet.packet_type()) { + case RtpPacketMediaType::kAudio: + case RtpPacketMediaType::kVideo: + return packet.Ssrc() == ssrc_; + case RtpPacketMediaType::kRetransmission: + case RtpPacketMediaType::kPadding: + // Both padding and retransmission must be on either the media or the + // RTX stream. + return packet.Ssrc() == rtx_ssrc_ || packet.Ssrc() == ssrc_; + case RtpPacketMediaType::kForwardErrorCorrection: + // FlexFEC is on separate SSRC, ULPFEC uses media SSRC. + return packet.Ssrc() == ssrc_ || packet.Ssrc() == flexfec_ssrc_; + } + return false; +} + +void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback( + uint16_t packet_id, + const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info) { + if (transport_feedback_observer_) { + size_t packet_size = packet.payload_size() + packet.padding_size(); + if (send_side_bwe_with_overhead_) { + packet_size = packet.size(); + } + + RtpPacketSendInfo packet_info; + packet_info.ssrc = ssrc_; + packet_info.transport_sequence_number = packet_id; + packet_info.rtp_sequence_number = packet.SequenceNumber(); + packet_info.length = packet_size; + packet_info.pacing_info = pacing_info; + packet_info.packet_type = packet.packet_type(); + transport_feedback_observer_->OnAddPacket(packet_info); + } +} + +void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, + int64_t now_ms, + uint32_t ssrc) { + if (!send_side_delay_observer_ || capture_time_ms <= 0) + return; + + int avg_delay_ms = 0; + int max_delay_ms = 0; + uint64_t total_packet_send_delay_ms = 0; + { + MutexLock lock(&lock_); + // Compute the max and average of the recent capture-to-send delays. + // The time complexity of the current approach depends on the distribution + // of the delay values. This could be done more efficiently. + + // Remove elements older than kSendSideDelayWindowMs. + auto lower_bound = + send_delays_.lower_bound(now_ms - kSendSideDelayWindowMs); + for (auto it = send_delays_.begin(); it != lower_bound; ++it) { + if (max_delay_it_ == it) { + max_delay_it_ = send_delays_.end(); + } + sum_delays_ms_ -= it->second; + } + send_delays_.erase(send_delays_.begin(), lower_bound); + if (max_delay_it_ == send_delays_.end()) { + // Removed the previous max. Need to recompute. + RecomputeMaxSendDelay(); + } + + // Add the new element. + RTC_DCHECK_GE(now_ms, 0); + RTC_DCHECK_LE(now_ms, std::numeric_limits::max() / 2); + RTC_DCHECK_GE(capture_time_ms, 0); + RTC_DCHECK_LE(capture_time_ms, std::numeric_limits::max() / 2); + int64_t diff_ms = now_ms - capture_time_ms; + RTC_DCHECK_GE(diff_ms, static_cast(0)); + RTC_DCHECK_LE(diff_ms, std::numeric_limits::max()); + int new_send_delay = rtc::dchecked_cast(now_ms - capture_time_ms); + SendDelayMap::iterator it; + bool inserted; + std::tie(it, inserted) = + send_delays_.insert(std::make_pair(now_ms, new_send_delay)); + if (!inserted) { + // TODO(terelius): If we have multiple delay measurements during the same + // millisecond then we keep the most recent one. It is not clear that this + // is the right decision, but it preserves an earlier behavior. + int previous_send_delay = it->second; + sum_delays_ms_ -= previous_send_delay; + it->second = new_send_delay; + if (max_delay_it_ == it && new_send_delay < previous_send_delay) { + RecomputeMaxSendDelay(); + } + } + if (max_delay_it_ == send_delays_.end() || + it->second >= max_delay_it_->second) { + max_delay_it_ = it; + } + sum_delays_ms_ += new_send_delay; + total_packet_send_delay_ms_ += new_send_delay; + total_packet_send_delay_ms = total_packet_send_delay_ms_; + + size_t num_delays = send_delays_.size(); + RTC_DCHECK(max_delay_it_ != send_delays_.end()); + max_delay_ms = rtc::dchecked_cast(max_delay_it_->second); + int64_t avg_ms = (sum_delays_ms_ + num_delays / 2) / num_delays; + RTC_DCHECK_GE(avg_ms, static_cast(0)); + RTC_DCHECK_LE(avg_ms, + static_cast(std::numeric_limits::max())); + avg_delay_ms = + rtc::dchecked_cast((sum_delays_ms_ + num_delays / 2) / num_delays); + } + send_side_delay_observer_->SendSideDelayUpdated( + avg_delay_ms, max_delay_ms, total_packet_send_delay_ms, ssrc); +} + +void DEPRECATED_RtpSenderEgress::RecomputeMaxSendDelay() { + max_delay_it_ = send_delays_.begin(); + for (auto it = send_delays_.begin(); it != send_delays_.end(); ++it) { + if (it->second >= max_delay_it_->second) { + max_delay_it_ = it; + } + } +} + +void DEPRECATED_RtpSenderEgress::UpdateOnSendPacket(int packet_id, + int64_t capture_time_ms, + uint32_t ssrc) { + if (!send_packet_observer_ || capture_time_ms <= 0 || packet_id == -1) { + return; + } + + send_packet_observer_->OnSendPacket(packet_id, capture_time_ms, ssrc); +} + +bool DEPRECATED_RtpSenderEgress::SendPacketToNetwork( + const RtpPacketToSend& packet, + const PacketOptions& options, + const PacedPacketInfo& pacing_info) { + int bytes_sent = -1; + if (transport_) { + bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options) + ? static_cast(packet.size()) + : -1; + if (event_log_ && bytes_sent > 0) { + event_log_->Log(std::make_unique( + packet, pacing_info.probe_cluster_id)); + } + } + + if (bytes_sent <= 0) { + RTC_LOG(LS_WARNING) << "Transport failed to send packet."; + return false; + } + return true; +} + +void DEPRECATED_RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) { + int64_t now_ms = clock_->TimeInMilliseconds(); + + StreamDataCounters* counters = + packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; + + if (counters->first_packet_time_ms == -1) { + counters->first_packet_time_ms = now_ms; + } + + if (packet.packet_type() == RtpPacketMediaType::kForwardErrorCorrection) { + counters->fec.AddPacket(packet); + } + + if (packet.packet_type() == RtpPacketMediaType::kRetransmission) { + counters->retransmitted.AddPacket(packet); + } + counters->transmitted.AddPacket(packet); + + RTC_DCHECK(packet.packet_type().has_value()); + send_rates_[static_cast(*packet.packet_type())].Update(packet.size(), + now_ms); + + if (rtp_stats_callback_) { + rtp_stats_callback_->DataCountersUpdated(*counters, packet.Ssrc()); + } +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h new file mode 100644 index 0000000000..742e7d5499 --- /dev/null +++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_ +#define MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/call/transport.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/units/data_rate.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +class DEPRECATED_RtpSenderEgress { + public: + // Helper class that redirects packets directly to the send part of this class + // without passing through an actual paced sender. + class NonPacedPacketSender : public RtpPacketSender { + public: + explicit NonPacedPacketSender(DEPRECATED_RtpSenderEgress* sender); + virtual ~NonPacedPacketSender(); + + void EnqueuePackets( + std::vector> packets) override; + + private: + uint16_t transport_sequence_number_; + DEPRECATED_RtpSenderEgress* const sender_; + }; + + DEPRECATED_RtpSenderEgress(const RtpRtcpInterface::Configuration& config, + RtpPacketHistory* packet_history); + ~DEPRECATED_RtpSenderEgress() = default; + + void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info) + RTC_LOCKS_EXCLUDED(lock_); + uint32_t Ssrc() const { return ssrc_; } + absl::optional RtxSsrc() const { return rtx_ssrc_; } + absl::optional FlexFecSsrc() const { return flexfec_ssrc_; } + + void ProcessBitrateAndNotifyObservers() RTC_LOCKS_EXCLUDED(lock_); + RtpSendRates GetSendRates() const RTC_LOCKS_EXCLUDED(lock_); + void GetDataCounters(StreamDataCounters* rtp_stats, + StreamDataCounters* rtx_stats) const + RTC_LOCKS_EXCLUDED(lock_); + + void ForceIncludeSendPacketsInAllocation(bool part_of_allocation) + RTC_LOCKS_EXCLUDED(lock_); + bool MediaHasBeenSent() const RTC_LOCKS_EXCLUDED(lock_); + void SetMediaHasBeenSent(bool media_sent) RTC_LOCKS_EXCLUDED(lock_); + void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(lock_); + + // For each sequence number in |sequence_number|, recall the last RTP packet + // which bore it - its timestamp and whether it was the first and/or last + // packet in that frame. If all of the given sequence numbers could be + // recalled, return a vector with all of them (in corresponding order). + // If any could not be recalled, return an empty vector. + std::vector GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const + RTC_LOCKS_EXCLUDED(lock_); + + private: + // Maps capture time in milliseconds to send-side delay in milliseconds. + // Send-side delay is the difference between transmission time and capture + // time. + typedef std::map SendDelayMap; + + RtpSendRates GetSendRatesLocked() const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + bool HasCorrectSsrc(const RtpPacketToSend& packet) const; + void AddPacketToTransportFeedback(uint16_t packet_id, + const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info); + void UpdateDelayStatistics(int64_t capture_time_ms, + int64_t now_ms, + uint32_t ssrc); + void RecomputeMaxSendDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + void UpdateOnSendPacket(int packet_id, + int64_t capture_time_ms, + uint32_t ssrc); + // Sends packet on to |transport_|, leaving the RTP module. + bool SendPacketToNetwork(const RtpPacketToSend& packet, + const PacketOptions& options, + const PacedPacketInfo& pacing_info); + void UpdateRtpStats(const RtpPacketToSend& packet) + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + + const uint32_t ssrc_; + const absl::optional rtx_ssrc_; + const absl::optional flexfec_ssrc_; + const bool populate_network2_timestamp_; + const bool send_side_bwe_with_overhead_; + Clock* const clock_; + RtpPacketHistory* const packet_history_; + Transport* const transport_; + RtcEventLog* const event_log_; + const bool is_audio_; + const bool need_rtp_packet_infos_; + + TransportFeedbackObserver* const transport_feedback_observer_; + SendSideDelayObserver* const send_side_delay_observer_; + SendPacketObserver* const send_packet_observer_; + StreamDataCountersCallback* const rtp_stats_callback_; + BitrateStatisticsObserver* const bitrate_callback_; + + mutable Mutex lock_; + bool media_has_been_sent_ RTC_GUARDED_BY(lock_); + bool force_part_of_allocation_ RTC_GUARDED_BY(lock_); + uint32_t timestamp_offset_ RTC_GUARDED_BY(lock_); + + SendDelayMap send_delays_ RTC_GUARDED_BY(lock_); + SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_); + // The sum of delays over a kSendSideDelayWindowMs sliding window. + int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_); + uint64_t total_packet_send_delay_ms_ RTC_GUARDED_BY(lock_); + StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_); + StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_); + // One element per value in RtpPacketMediaType, with index matching value. + std::vector send_rates_ RTC_GUARDED_BY(lock_); + + // Maps sent packets' sequence numbers to a tuple consisting of: + // 1. The timestamp, without the randomizing offset mandated by the RFC. + // 2. Whether the packet was the first in its frame. + // 3. Whether the packet was the last in its frame. + const std::unique_ptr rtp_sequence_number_map_ + RTC_GUARDED_BY(lock_); +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_ diff --git a/modules/rtp_rtcp/source/dtmf_queue.cc b/modules/rtp_rtcp/source/dtmf_queue.cc index 10e674789a..df06d2a2f3 100644 --- a/modules/rtp_rtcp/source/dtmf_queue.cc +++ b/modules/rtp_rtcp/source/dtmf_queue.cc @@ -24,7 +24,7 @@ DtmfQueue::DtmfQueue() {} DtmfQueue::~DtmfQueue() {} bool DtmfQueue::AddDtmf(const Event& event) { - rtc::CritScope lock(&dtmf_critsect_); + MutexLock lock(&dtmf_mutex_); if (queue_.size() >= kDtmfOutbandMax) { return false; } @@ -34,7 +34,7 @@ bool DtmfQueue::AddDtmf(const Event& event) { bool DtmfQueue::NextDtmf(Event* event) { RTC_DCHECK(event); - rtc::CritScope lock(&dtmf_critsect_); + MutexLock lock(&dtmf_mutex_); if (queue_.empty()) { return false; } @@ -45,7 +45,7 @@ bool DtmfQueue::NextDtmf(Event* event) { } bool DtmfQueue::PendingDtmf() const { - rtc::CritScope lock(&dtmf_critsect_); + MutexLock lock(&dtmf_mutex_); return !queue_.empty(); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/dtmf_queue.h b/modules/rtp_rtcp/source/dtmf_queue.h index adb93aa6fa..1d1867fd27 100644 --- a/modules/rtp_rtcp/source/dtmf_queue.h +++ b/modules/rtp_rtcp/source/dtmf_queue.h @@ -15,7 +15,7 @@ #include -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { class DtmfQueue { @@ -35,7 +35,7 @@ class DtmfQueue { bool PendingDtmf() const; private: - rtc::CriticalSection dtmf_critsect_; + mutable Mutex dtmf_mutex_; std::list queue_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/fec_test_helper.cc b/modules/rtp_rtcp/source/fec_test_helper.cc index 1941e213ab..f8579b48ff 100644 --- a/modules/rtp_rtcp/source/fec_test_helper.cc +++ b/modules/rtp_rtcp/source/fec_test_helper.cc @@ -181,9 +181,10 @@ std::unique_ptr FlexfecPacketGenerator::BuildFlexfecPacket( UlpfecPacketGenerator::UlpfecPacketGenerator(uint32_t ssrc) : AugmentedPacketGenerator(ssrc) {} -RtpPacket UlpfecPacketGenerator::BuildMediaRedPacket( - const AugmentedPacket& packet) { - RtpPacket red_packet; +RtpPacketReceived UlpfecPacketGenerator::BuildMediaRedPacket( + const AugmentedPacket& packet, + bool is_recovered) { + RtpPacketReceived red_packet; // Copy RTP header. const size_t kHeaderLength = packet.header.headerLength; red_packet.Parse(packet.data.cdata(), kHeaderLength); @@ -196,25 +197,26 @@ RtpPacket UlpfecPacketGenerator::BuildMediaRedPacket( // Copy the payload. memcpy(rtp_payload + 1, packet.data.cdata() + kHeaderLength, packet.data.size() - kHeaderLength); + red_packet.set_recovered(is_recovered); return red_packet; } -RtpPacket UlpfecPacketGenerator::BuildUlpfecRedPacket( +RtpPacketReceived UlpfecPacketGenerator::BuildUlpfecRedPacket( const ForwardErrorCorrection::Packet& packet) { // Create a fake media packet to get a correct header. 1 byte RED header. ++num_packets_; std::unique_ptr fake_packet = NextPacket(0, packet.data.size() + 1); - RtpPacket red_packet; + RtpPacketReceived red_packet; red_packet.Parse(fake_packet->data); red_packet.SetMarker(false); uint8_t* rtp_payload = red_packet.AllocatePayload(packet.data.size() + 1); rtp_payload[0] = kFecPayloadType; red_packet.SetPayloadType(kRedPayloadType); - memcpy(rtp_payload + 1, packet.data.cdata(), packet.data.size()); + red_packet.set_recovered(false); return red_packet; } diff --git a/modules/rtp_rtcp/source/fec_test_helper.h b/modules/rtp_rtcp/source/fec_test_helper.h index e66e6ca0dc..b661fa8300 100644 --- a/modules/rtp_rtcp/source/fec_test_helper.h +++ b/modules/rtp_rtcp/source/fec_test_helper.h @@ -14,6 +14,7 @@ #include #include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/random.h" namespace webrtc { @@ -106,13 +107,15 @@ class UlpfecPacketGenerator : public AugmentedPacketGenerator { explicit UlpfecPacketGenerator(uint32_t ssrc); // Creates a new RtpPacket with the RED header added to the packet. - static RtpPacket BuildMediaRedPacket(const AugmentedPacket& packet); + static RtpPacketReceived BuildMediaRedPacket(const AugmentedPacket& packet, + bool is_recovered); // Creates a new RtpPacket with FEC payload and RED header. Does this by // creating a new fake media AugmentedPacket, clears the marker bit and adds a // RED header. Finally replaces the payload with the content of // |packet->data|. - RtpPacket BuildUlpfecRedPacket(const ForwardErrorCorrection::Packet& packet); + RtpPacketReceived BuildUlpfecRedPacket( + const ForwardErrorCorrection::Packet& packet); }; } // namespace fec diff --git a/modules/rtp_rtcp/source/flexfec_sender.cc b/modules/rtp_rtcp/source/flexfec_sender.cc index 70f1666647..f1fe71d198 100644 --- a/modules/rtp_rtcp/source/flexfec_sender.cc +++ b/modules/rtp_rtcp/source/flexfec_sender.cc @@ -91,11 +91,13 @@ FlexfecSender::FlexfecSender( seq_num_(rtp_state ? rtp_state->sequence_number : random_.Rand(1, kMaxInitRtpSeqNumber)), ulpfec_generator_( - ForwardErrorCorrection::CreateFlexfec(ssrc, protected_media_ssrc)), + ForwardErrorCorrection::CreateFlexfec(ssrc, protected_media_ssrc), + clock_), rtp_header_extension_map_( RegisterSupportedExtensions(rtp_header_extensions)), header_extensions_size_( - RtpHeaderExtensionSize(extension_sizes, rtp_header_extension_map_)) { + RtpHeaderExtensionSize(extension_sizes, rtp_header_extension_map_)), + fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) { // This object should not have been instantiated if FlexFEC is disabled. RTC_DCHECK_GE(payload_type, 0); RTC_DCHECK_LE(payload_type, 127); @@ -105,30 +107,30 @@ FlexfecSender::~FlexfecSender() = default; // We are reusing the implementation from UlpfecGenerator for SetFecParameters, // AddRtpPacketAndGenerateFec, and FecAvailable. -void FlexfecSender::SetFecParameters(const FecProtectionParams& params) { - ulpfec_generator_.SetFecParameters(params); +void FlexfecSender::SetProtectionParameters( + const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) { + ulpfec_generator_.SetProtectionParameters(delta_params, key_params); } -bool FlexfecSender::AddRtpPacketAndGenerateFec(const RtpPacketToSend& packet) { +void FlexfecSender::AddPacketAndGenerateFec(const RtpPacketToSend& packet) { // TODO(brandtr): Generalize this SSRC check when we support multistream // protection. RTC_DCHECK_EQ(packet.Ssrc(), protected_media_ssrc_); - return ulpfec_generator_.AddRtpPacketAndGenerateFec( - packet.Buffer(), packet.headers_size()) == 0; -} - -bool FlexfecSender::FecAvailable() const { - return ulpfec_generator_.FecAvailable(); + ulpfec_generator_.AddPacketAndGenerateFec(packet); } std::vector> FlexfecSender::GetFecPackets() { + RTC_CHECK_RUNS_SERIALIZED(&ulpfec_generator_.race_checker_); std::vector> fec_packets_to_send; fec_packets_to_send.reserve(ulpfec_generator_.generated_fec_packets_.size()); + size_t total_fec_data_bytes = 0; for (const auto* fec_packet : ulpfec_generator_.generated_fec_packets_) { std::unique_ptr fec_packet_to_send( new RtpPacketToSend(&rtp_header_extension_map_)); fec_packet_to_send->set_packet_type( - RtpPacketToSend::Type::kForwardErrorCorrection); + RtpPacketMediaType::kForwardErrorCorrection); + fec_packet_to_send->set_allow_retransmission(false); // RTP header. fec_packet_to_send->SetMarker(false); @@ -157,9 +159,13 @@ std::vector> FlexfecSender::GetFecPackets() { fec_packet_to_send->AllocatePayload(fec_packet->data.size()); memcpy(payload, fec_packet->data.cdata(), fec_packet->data.size()); + total_fec_data_bytes += fec_packet_to_send->size(); fec_packets_to_send.push_back(std::move(fec_packet_to_send)); } - ulpfec_generator_.ResetState(); + + if (!fec_packets_to_send.empty()) { + ulpfec_generator_.ResetState(); + } int64_t now_ms = clock_->TimeInMilliseconds(); if (!fec_packets_to_send.empty() && @@ -170,6 +176,9 @@ std::vector> FlexfecSender::GetFecPackets() { last_generated_packet_ms_ = now_ms; } + MutexLock lock(&mutex_); + fec_bitrate_.Update(total_fec_data_bytes, now_ms); + return fec_packets_to_send; } @@ -178,7 +187,13 @@ size_t FlexfecSender::MaxPacketOverhead() const { return header_extensions_size_ + kFlexfecMaxHeaderSize; } -RtpState FlexfecSender::GetRtpState() { +DataRate FlexfecSender::CurrentFecRate() const { + MutexLock lock(&mutex_); + return DataRate::BitsPerSec( + fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0)); +} + +absl::optional FlexfecSender::GetRtpState() { RtpState rtp_state; rtp_state.sequence_number = seq_num_; rtp_state.start_timestamp = timestamp_offset_; diff --git a/modules/rtp_rtcp/source/flexfec_sender_unittest.cc b/modules/rtp_rtcp/source/flexfec_sender_unittest.cc index 10ec2e7495..3ff657476b 100644 --- a/modules/rtp_rtcp/source/flexfec_sender_unittest.cc +++ b/modules/rtp_rtcp/source/flexfec_sender_unittest.cc @@ -55,7 +55,7 @@ std::unique_ptr GenerateSingleFlexfecPacket( params.fec_mask_type = kFecMaskRandom; constexpr size_t kNumPackets = 4; - sender->SetFecParameters(params); + sender->SetProtectionParameters(params, params); AugmentedPacketGenerator packet_generator(kMediaSsrc); packet_generator.NewFrame(kNumPackets); for (size_t i = 0; i < kNumPackets; ++i) { @@ -63,13 +63,12 @@ std::unique_ptr GenerateSingleFlexfecPacket( packet_generator.NextPacket(i, kPayloadLength); RtpPacketToSend rtp_packet(nullptr); // No header extensions. rtp_packet.Parse(packet->data); - EXPECT_TRUE(sender->AddRtpPacketAndGenerateFec(rtp_packet)); + sender->AddPacketAndGenerateFec(rtp_packet); } - EXPECT_TRUE(sender->FecAvailable()); std::vector> fec_packets = sender->GetFecPackets(); - EXPECT_FALSE(sender->FecAvailable()); EXPECT_EQ(1U, fec_packets.size()); + EXPECT_TRUE(sender->GetFecPackets().empty()); return std::move(fec_packets.front()); } @@ -82,7 +81,7 @@ TEST(FlexfecSenderTest, Ssrc) { kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */, &clock); - EXPECT_EQ(kFlexfecSsrc, sender.ssrc()); + EXPECT_EQ(kFlexfecSsrc, sender.FecSsrc()); } TEST(FlexfecSenderTest, NoFecAvailableBeforeMediaAdded) { @@ -91,9 +90,7 @@ TEST(FlexfecSenderTest, NoFecAvailableBeforeMediaAdded) { kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */, &clock); - EXPECT_FALSE(sender.FecAvailable()); - auto fec_packets = sender.GetFecPackets(); - EXPECT_EQ(0U, fec_packets.size()); + EXPECT_TRUE(sender.GetFecPackets().empty()); } TEST(FlexfecSenderTest, ProtectOneFrameWithOneFecPacket) { @@ -124,7 +121,7 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithOneFecPacket) { FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */, &clock); - sender.SetFecParameters(params); + sender.SetProtectionParameters(params, params); AugmentedPacketGenerator packet_generator(kMediaSsrc); for (size_t i = 0; i < kNumFrames; ++i) { @@ -134,14 +131,13 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithOneFecPacket) { packet_generator.NextPacket(i, kPayloadLength); RtpPacketToSend rtp_packet(nullptr); rtp_packet.Parse(packet->data); - EXPECT_TRUE(sender.AddRtpPacketAndGenerateFec(rtp_packet)); + sender.AddPacketAndGenerateFec(rtp_packet); } } - EXPECT_TRUE(sender.FecAvailable()); std::vector> fec_packets = sender.GetFecPackets(); - EXPECT_FALSE(sender.FecAvailable()); ASSERT_EQ(1U, fec_packets.size()); + EXPECT_TRUE(sender.GetFecPackets().empty()); RtpPacketToSend* fec_packet = fec_packets.front().get(); EXPECT_EQ(kRtpHeaderSize, fec_packet->headers_size()); @@ -164,7 +160,7 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithTwoFecPackets) { FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */, &clock); - sender.SetFecParameters(params); + sender.SetProtectionParameters(params, params); AugmentedPacketGenerator packet_generator(kMediaSsrc); for (size_t i = 0; i < kNumFrames; ++i) { @@ -174,13 +170,12 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithTwoFecPackets) { packet_generator.NextPacket(i, kPayloadLength); RtpPacketToSend rtp_packet(nullptr); rtp_packet.Parse(packet->data); - EXPECT_TRUE(sender.AddRtpPacketAndGenerateFec(rtp_packet)); + sender.AddPacketAndGenerateFec(rtp_packet); } - EXPECT_TRUE(sender.FecAvailable()); std::vector> fec_packets = sender.GetFecPackets(); - EXPECT_FALSE(sender.FecAvailable()); ASSERT_EQ(1U, fec_packets.size()); + EXPECT_TRUE(sender.GetFecPackets().empty()); RtpPacketToSend* fec_packet = fec_packets.front().get(); EXPECT_EQ(kRtpHeaderSize, fec_packet->headers_size()); @@ -331,7 +326,7 @@ TEST(FlexfecSenderTest, SetsAndGetsRtpState) { EXPECT_EQ(initial_rtp_state.start_timestamp + 1 * kVideoPayloadTypeFrequency, fec_packet->Timestamp()); - RtpState updated_rtp_state = sender.GetRtpState(); + RtpState updated_rtp_state = sender.GetRtpState().value(); EXPECT_EQ(initial_rtp_state.sequence_number + 2, updated_rtp_state.sequence_number); EXPECT_EQ(initial_rtp_state.start_timestamp, diff --git a/modules/rtp_rtcp/source/forward_error_correction.h b/modules/rtp_rtcp/source/forward_error_correction.h index 100f532389..566ce7428a 100644 --- a/modules/rtp_rtcp/source/forward_error_correction.h +++ b/modules/rtp_rtcp/source/forward_error_correction.h @@ -85,6 +85,7 @@ class ForwardErrorCorrection { bool is_fec; // Set to true if this is an FEC packet and false // otherwise. + bool is_recovered; rtc::scoped_refptr pkt; // Pointer to the packet storage. }; diff --git a/modules/rtp_rtcp/source/h265_sps_parser.cc b/modules/rtp_rtcp/source/h265_sps_parser.cc new file mode 100644 index 0000000000..6e174f6304 --- /dev/null +++ b/modules/rtp_rtcp/source/h265_sps_parser.cc @@ -0,0 +1,189 @@ +/* + * Intel License + */ + +#include "webrtc/modules/rtp_rtcp/source/h265_sps_parser.h" + +#include "webrtc/base/bitbuffer.h" +#include "webrtc/base/bytebuffer.h" +#include "webrtc/base/logging.h" + +#include + +#define RETURN_FALSE_ON_FAIL(x) \ + if (!(x)) { \ + return false; \ + } + +namespace webrtc { + +H265SpsParser::H265SpsParser(const uint8_t* sps, size_t byte_length) + : sps_(sps), byte_length_(byte_length), width_(), height_() { +} + +bool H265SpsParser::Parse() { + // General note: this is based off the 04/2015 version of the H.265 standard. + // You can find it on this page: + // http://www.itu.int/rec/T-REC-H.265 + + const char* sps_bytes = reinterpret_cast(sps_); + // First, parse out rbsp, which is basically the source buffer minus emulation + // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in + // section 7.3.1.1 of the H.265 standard, similar to H264. + rtc::ByteBufferWriter rbsp_buffer; + for (size_t i = 0; i < byte_length_;) { + // Be careful about over/underflow here. byte_length_ - 3 can underflow, and + // i + 3 can overflow, but byte_length_ - i can't, because i < byte_length_ + // above, and that expression will produce the number of bytes left in + // the stream including the byte at i. + if (byte_length_ - i >= 3 && sps_[i] == 0 && sps_[i + 1] == 0 && + sps_[i + 2] == 3) { + // Two rbsp bytes + the emulation byte. + rbsp_buffer.WriteBytes(sps_bytes + i, 2); + i += 3; + } else { + // Single rbsp byte. + rbsp_buffer.WriteBytes(sps_bytes + i, 1); + i++; + } + } + + // Now, we need to use a bit buffer to parse through the actual HEVC SPS + // format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the + // H.265 standard for a complete description. + // Since we only care about resolution, we ignore the majority of fields, but + // we still have to actively parse through a lot of the data, since many of + // the fields have variable size. + // Unlike H264, for H265, the picture size is indicated by pic_width_in_luma_samples + // and pic_height_in_luma_samples, if conformance_window_flag !=1; + // When conformance_window_flag is 1, the width is adjusted with con_win_xx_offset + // + rtc::BitBuffer parser(reinterpret_cast(rbsp_buffer.Data()), + rbsp_buffer.Length()); + + // The golomb values we have to read, not just consume. + uint32_t golomb_ignored; + + // separate_colour_plane_flag is optional (assumed 0), but has implications + // about the ChromaArrayType, which modifies how we treat crop coordinates. + uint32_t separate_colour_plane_flag = 0; + // chroma_format_idc will be ChromaArrayType if separate_colour_plane_flag is + // 0. It defaults to 1, when not specified. + uint32_t chroma_format_idc = 1; + + + // sps_video_parameter_set_id: u(4) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(4)); + // sps_max_sub_layers_minus1: u(3) + uint32_t sps_max_sub_layers_minus1 = 0; + RETURN_FALSE_ON_FAIL(parser.ReadBits(&sps_max_sub_layers_minus1, 3)); + // sps_temporal_id_nesting_flag: u(1) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(1)); + // profile_tier_level(1, sps_max_sub_layers_minus1). We are acutally not + // using them, so read/skip over it. + // general_profile_space+general_tier_flag+general_prfile_idc: u(8) + RETURN_FALSE_ON_FAIL(parser.ConsumeBytes(1)); + // general_profile_compatabilitiy_flag[32] + RETURN_FALSE_ON_FAIL(parser.ConsumeBytes(4)); + // general_progressive_source_flag + interlaced_source_flag+ non-packed_constraint + // flag + frame_only_constraint_flag: u(4) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(4)); + // general_profile_idc decided flags or reserved. u(43) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(43)); + // general_inbld_flag or reserved 0: u(1) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(1)); + // general_level_idc: u(8) + RETURN_FALSE_ON_FAIL(parser.ConsumeBytes(1)); + // if max_sub_layers_minus1 >=1, read the sublayer profile information + std::vector sub_layer_profile_present_flags; + std::vector sub_layer_level_present_flags; + uint32_t sub_layer_profile_present = 0; + uint32_t sub_layer_level_present = 0; + for (uint32_t i = 0; i < sps_max_sub_layers_minus1; i++) { + //sublayer_profile_present_flag and sublayer_level_presnet_flag: u(2) + RETURN_FALSE_ON_FAIL(parser.ReadBits(&sub_layer_profile_present, 1)); + RETURN_FALSE_ON_FAIL(parser.ReadBits(&sub_layer_level_present, 1)); + sub_layer_profile_present_flags.push_back(sub_layer_profile_present); + sub_layer_level_present_flags.push_back(sub_layer_level_present); + } + if (sps_max_sub_layers_minus1 > 0) { + for (uint32_t j = sps_max_sub_layers_minus1; j < 8; j++) { + // reserved 2 bits: u(2) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(2)); + } + } + for (uint32_t k = 0; k < sps_max_sub_layers_minus1; k++) { + if(sub_layer_profile_present_flags[k]) {// + // sub_layer profile_space/tier_flag/profile_idc. ignored. u(8) + RETURN_FALSE_ON_FAIL(parser.ConsumeBytes(1)); + // profile_compatability_flag: u(32) + RETURN_FALSE_ON_FAIL(parser.ConsumeBytes(4)); + // sub_layer progressive_source_flag/interlaced_source_flag/ + // non_packed_constraint_flag/frame_only_constraint_flag: u(4) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(4)); + // following 43-bits are profile_idc specific. We simply read/skip it. u(43) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(43)); + // 1-bit profile_idc specific inbld flag. We simply read/skip it. u(1) + RETURN_FALSE_ON_FAIL(parser.ConsumeBits(1)); + } + if (sub_layer_level_present_flags[k]) { + // sub_layer_level_idc: u(8) + RETURN_FALSE_ON_FAIL(parser.ConsumeBytes(1)); + } + } + //sps_seq_parameter_set_id: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&golomb_ignored)); + // chrome_format_idc: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&chroma_format_idc)); + if (chroma_format_idc == 3) { + // seperate_colour_plane_flag: u(1) + RETURN_FALSE_ON_FAIL(parser.ReadBits(&separate_colour_plane_flag, 1)); + } + uint32_t pic_width_in_luma_samples = 0; + uint32_t pic_height_in_luma_samples = 0; + // pic_width_in_luma_samples: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&pic_width_in_luma_samples)); + // pic_height_in_luma_samples: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&pic_height_in_luma_samples)); + // conformance_window_flag: u(1) + uint32_t conformance_window_flag = 0; + RETURN_FALSE_ON_FAIL(parser.ReadBits(&conformance_window_flag, 1)); + + uint32_t conf_win_left_offset = 0; + uint32_t conf_win_right_offset = 0; + uint32_t conf_win_top_offset = 0; + uint32_t conf_win_bottom_offset = 0; + if (conformance_window_flag) { + // conf_win_left_offset: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_left_offset)); + // conf_win_right_offset: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_right_offset)); + // conf_win_top_offset: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_top_offset)); + // conf_win_bottom_offset: ue(v) + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_bottom_offset)); + } + + //For enough to get the resolution information. calcaluate according to HEVC spec 7.4.3.2 + int width = 0; + int height = 0; + + width = pic_width_in_luma_samples; + height = pic_height_in_luma_samples; + + if (conformance_window_flag) { + int sub_width_c = ((1 == chroma_format_idc) || (2 == chroma_format_idc)) && + (0 == separate_colour_plane_flag) ? 2 : 1; + int sub_height_c = (1 == chroma_format_idc) && (0 == separate_colour_plane_flag) ? 2 : 1; + //the offset includes the pixel within conformance window. so don't need to +1 as per spec + width -= sub_width_c*(conf_win_right_offset + conf_win_left_offset); + height -= sub_height_c*(conf_win_top_offset + conf_win_bottom_offset); + } + + width_ = width; + height_ = height; + return true; + +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/h265_sps_parser.h b/modules/rtp_rtcp/source/h265_sps_parser.h new file mode 100644 index 0000000000..6b08b0959f --- /dev/null +++ b/modules/rtp_rtcp/source/h265_sps_parser.h @@ -0,0 +1,31 @@ +/* + * Intel License + */ + +#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H265_SPS_PARSER_H_ +#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H265_SPS_PARSER_H_ + +#include "webrtc/base/common.h" + +namespace webrtc { + +// A class for parsing out sequence parameter set (SPS) data from an H265 NALU. +// Currently, only resolution is read without being ignored. +class H265SpsParser { + public: + H265SpsParser(const uint8_t* sps, size_t byte_length); + // Parses the SPS to completion. Returns true if the SPS was parsed correctly. + bool Parse(); + uint16_t width() { return width_; } + uint16_t height() { return height_; } + + private: + const uint8_t* const sps_; + const size_t byte_length_; + + uint16_t width_; + uint16_t height_; +}; + +} // namespace webrtc +#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H265_SPS_PARSER_H_ diff --git a/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/modules/rtp_rtcp/source/nack_rtx_unittest.cc index 17601dd966..8afaf3ee61 100644 --- a/modules/rtp_rtcp/source/nack_rtx_unittest.cc +++ b/modules/rtp_rtcp/source/nack_rtx_unittest.cc @@ -19,10 +19,9 @@ #include "call/rtp_stream_receiver_controller.h" #include "call/rtx_receive_stream.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/playout_delay_oracle.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "rtc_base/rate_limiter.h" #include "test/gtest.h" @@ -64,7 +63,9 @@ class RtxLoopBackTransport : public webrtc::Transport { count_rtx_ssrc_(0), module_(NULL) {} - void SetSendModule(RtpRtcp* rtpRtcpModule) { module_ = rtpRtcpModule; } + void SetSendModule(RtpRtcpInterface* rtpRtcpModule) { + module_ = rtpRtcpModule; + } void DropEveryNthPacket(int n) { packet_loss_ = n; } @@ -110,7 +111,7 @@ class RtxLoopBackTransport : public webrtc::Transport { int consecutive_drop_end_; uint32_t rtx_ssrc_; int count_rtx_ssrc_; - RtpRtcp* module_; + RtpRtcpInterface* module_; RtpStreamReceiverController stream_receiver_controller_; std::set expected_sequence_numbers_; }; @@ -126,7 +127,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test { ~RtpRtcpRtxNackTest() override {} void SetUp() override { - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.audio = false; configuration.clock = &fake_clock; receive_statistics_ = ReceiveStatistics::Create(&fake_clock); @@ -135,12 +136,11 @@ class RtpRtcpRtxNackTest : public ::testing::Test { configuration.retransmission_rate_limiter = &retransmission_rate_limiter_; configuration.local_media_ssrc = kTestSsrc; configuration.rtx_send_ssrc = kTestRtxSsrc; - rtp_rtcp_module_ = RtpRtcp::Create(configuration); + rtp_rtcp_module_ = ModuleRtpRtcpImpl2::Create(configuration); FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; video_config.clock = &fake_clock; video_config.rtp_sender = rtp_rtcp_module_->RtpSender(); - video_config.playout_delay_oracle = &playout_delay_oracle_; video_config.field_trials = &field_trials; rtp_sender_video_ = std::make_unique(video_config); rtp_rtcp_module_->SetRTCPStatus(RtcpMode::kCompound); @@ -211,7 +211,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test { video_header.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp, - timestamp / 90, payload_data, nullptr, video_header, 0)); + timestamp / 90, payload_data, video_header, 0)); // Min required delay until retransmit = 5 + RTT ms (RTT = 0). fake_clock.AdvanceTimeMilliseconds(5); int length = BuildNackList(nack_list); @@ -226,8 +226,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test { } std::unique_ptr receive_statistics_; - std::unique_ptr rtp_rtcp_module_; - PlayoutDelayOracle playout_delay_oracle_; + std::unique_ptr rtp_rtcp_module_; std::unique_ptr rtp_sender_video_; RtxLoopBackTransport transport_; const std::map rtx_associated_payload_types_ = { @@ -262,7 +261,7 @@ TEST_F(RtpRtcpRtxNackTest, LongNackList) { video_header.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp, - timestamp / 90, payload_data, nullptr, video_header, 0)); + timestamp / 90, payload_data, video_header, 0)); // Prepare next frame. timestamp += 3000; fake_clock.AdvanceTimeMilliseconds(33); diff --git a/modules/rtp_rtcp/source/playout_delay_oracle.cc b/modules/rtp_rtcp/source/playout_delay_oracle.cc deleted file mode 100644 index f234759678..0000000000 --- a/modules/rtp_rtcp/source/playout_delay_oracle.cc +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/rtp_rtcp/source/playout_delay_oracle.h" - -#include - -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -PlayoutDelayOracle::PlayoutDelayOracle() = default; - -PlayoutDelayOracle::~PlayoutDelayOracle() = default; - -absl::optional PlayoutDelayOracle::PlayoutDelayToSend( - PlayoutDelay requested_delay) const { - rtc::CritScope lock(&crit_sect_); - if (requested_delay.min_ms > PlayoutDelayLimits::kMaxMs || - requested_delay.max_ms > PlayoutDelayLimits::kMaxMs) { - RTC_DLOG(LS_ERROR) - << "Requested playout delay values out of range, ignored"; - return absl::nullopt; - } - if (requested_delay.max_ms != -1 && - requested_delay.min_ms > requested_delay.max_ms) { - RTC_DLOG(LS_ERROR) << "Requested playout delay values out of order"; - return absl::nullopt; - } - if ((requested_delay.min_ms == -1 || - requested_delay.min_ms == latest_delay_.min_ms) && - (requested_delay.max_ms == -1 || - requested_delay.max_ms == latest_delay_.max_ms)) { - // Unchanged. - return unacked_sequence_number_ ? absl::make_optional(latest_delay_) - : absl::nullopt; - } - if (requested_delay.min_ms == -1) { - RTC_DCHECK_GE(requested_delay.max_ms, 0); - requested_delay.min_ms = - std::min(latest_delay_.min_ms, requested_delay.max_ms); - } - if (requested_delay.max_ms == -1) { - requested_delay.max_ms = - std::max(latest_delay_.max_ms, requested_delay.min_ms); - } - return requested_delay; -} - -void PlayoutDelayOracle::OnSentPacket(uint16_t sequence_number, - absl::optional delay) { - rtc::CritScope lock(&crit_sect_); - int64_t unwrapped_sequence_number = unwrapper_.Unwrap(sequence_number); - - if (!delay) { - return; - } - - RTC_DCHECK_LE(0, delay->min_ms); - RTC_DCHECK_LE(delay->max_ms, PlayoutDelayLimits::kMaxMs); - RTC_DCHECK_LE(delay->min_ms, delay->max_ms); - - if (delay->min_ms != latest_delay_.min_ms || - delay->max_ms != latest_delay_.max_ms) { - latest_delay_ = *delay; - unacked_sequence_number_ = unwrapped_sequence_number; - } -} - -// If an ACK is received on the packet containing the playout delay extension, -// we stop sending the extension on future packets. -void PlayoutDelayOracle::OnReceivedAck( - int64_t extended_highest_sequence_number) { - rtc::CritScope lock(&crit_sect_); - if (unacked_sequence_number_ && - extended_highest_sequence_number > *unacked_sequence_number_) { - unacked_sequence_number_ = absl::nullopt; - } -} - -} // namespace webrtc diff --git a/modules/rtp_rtcp/source/playout_delay_oracle.h b/modules/rtp_rtcp/source/playout_delay_oracle.h deleted file mode 100644 index 6451be4cdc..0000000000 --- a/modules/rtp_rtcp/source/playout_delay_oracle.h +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_RTP_RTCP_SOURCE_PLAYOUT_DELAY_ORACLE_H_ -#define MODULES_RTP_RTCP_SOURCE_PLAYOUT_DELAY_ORACLE_H_ - -#include - -#include "absl/types/optional.h" -#include "common_types.h" // NOLINT(build/include) -#include "modules/include/module_common_types_public.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { - -// This class tracks the application requests to limit minimum and maximum -// playout delay and makes a decision on whether the current RTP frame -// should include the playout out delay extension header. -// -// Playout delay can be defined in terms of capture and render time as follows: -// -// Render time = Capture time in receiver time + playout delay -// -// The application specifies a minimum and maximum limit for the playout delay -// which are both communicated to the receiver and the receiver can adapt -// the playout delay within this range based on observed network jitter. -class PlayoutDelayOracle : public RtcpAckObserver { - public: - PlayoutDelayOracle(); - ~PlayoutDelayOracle() override; - - // The playout delay to be added to a packet. The input delays are provided by - // the application, with -1 meaning unchanged/unspecified. The output delay - // are the values to be attached to packets on the wire. Presence and value - // depends on the current input, previous inputs, and received acks from the - // remote end. - absl::optional PlayoutDelayToSend( - PlayoutDelay requested_delay) const; - - void OnSentPacket(uint16_t sequence_number, - absl::optional playout_delay); - - void OnReceivedAck(int64_t extended_highest_sequence_number) override; - - private: - // The playout delay information is updated from the encoder thread(s). - // The sequence number feedback is updated from the worker thread. - // Guards access to data across multiple threads. - rtc::CriticalSection crit_sect_; - // The oldest sequence number on which the current playout delay values have - // been sent. When set, it means we need to attach extension to sent packets. - absl::optional unacked_sequence_number_ RTC_GUARDED_BY(crit_sect_); - // Sequence number unwrapper for sent packets. - - // TODO(nisse): Could potentially get out of sync with the unwrapper used by - // the caller of OnReceivedAck. - SequenceNumberUnwrapper unwrapper_ RTC_GUARDED_BY(crit_sect_); - // Playout delay values on the next frame if |send_playout_delay_| is set. - PlayoutDelay latest_delay_ RTC_GUARDED_BY(crit_sect_) = {-1, -1}; - - RTC_DISALLOW_COPY_AND_ASSIGN(PlayoutDelayOracle); -}; - -} // namespace webrtc - -#endif // MODULES_RTP_RTCP_SOURCE_PLAYOUT_DELAY_ORACLE_H_ diff --git a/modules/rtp_rtcp/source/playout_delay_oracle_unittest.cc b/modules/rtp_rtcp/source/playout_delay_oracle_unittest.cc deleted file mode 100644 index 3857e9b211..0000000000 --- a/modules/rtp_rtcp/source/playout_delay_oracle_unittest.cc +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/rtp_rtcp/source/playout_delay_oracle.h" - -#include "rtc_base/logging.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { -constexpr int kSequenceNumber = 100; -constexpr int kMinPlayoutDelay = 0; -constexpr int kMaxPlayoutDelay = 150; -} // namespace - -TEST(PlayoutDelayOracleTest, DisabledByDefault) { - PlayoutDelayOracle playout_delay_oracle; - EXPECT_FALSE(playout_delay_oracle.PlayoutDelayToSend({-1, -1})); -} - -TEST(PlayoutDelayOracleTest, SendPlayoutDelayUntilSeqNumberExceeds) { - PlayoutDelayOracle playout_delay_oracle; - PlayoutDelay playout_delay = {kMinPlayoutDelay, kMaxPlayoutDelay}; - playout_delay_oracle.OnSentPacket(kSequenceNumber, playout_delay); - absl::optional delay_to_send = - playout_delay_oracle.PlayoutDelayToSend({-1, -1}); - ASSERT_TRUE(delay_to_send.has_value()); - EXPECT_EQ(kMinPlayoutDelay, delay_to_send->min_ms); - EXPECT_EQ(kMaxPlayoutDelay, delay_to_send->max_ms); - - // Oracle indicates playout delay should be sent if highest sequence number - // acked is lower than the sequence number of the first packet containing - // playout delay. - playout_delay_oracle.OnReceivedAck(kSequenceNumber - 1); - EXPECT_TRUE(playout_delay_oracle.PlayoutDelayToSend({-1, -1})); - - // Oracle indicates playout delay should not be sent if sequence number - // acked on a matching ssrc indicates the receiver has received the playout - // delay values. - playout_delay_oracle.OnReceivedAck(kSequenceNumber + 1); - EXPECT_FALSE(playout_delay_oracle.PlayoutDelayToSend({-1, -1})); -} - -} // namespace webrtc diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.cc b/modules/rtp_rtcp/source/receive_statistics_impl.cc index 0c47e08b1e..6ec41a1eb0 100644 --- a/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -100,7 +100,7 @@ bool StreamStatisticianImpl::UpdateOutOfOrder(const RtpPacketReceived& packet, } void StreamStatisticianImpl::UpdateCounters(const RtpPacketReceived& packet) { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); RTC_DCHECK_EQ(ssrc_, packet.Ssrc()); int64_t now_ms = clock_->TimeInMilliseconds(); @@ -159,17 +159,17 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, void StreamStatisticianImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); max_reordering_threshold_ = max_reordering_threshold; } void StreamStatisticianImpl::EnableRetransmitDetection(bool enable) { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); enable_retransmit_detection_ = enable; } RtpReceiveStats StreamStatisticianImpl::GetStats() const { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); RtpReceiveStats stats; stats.packets_lost = cumulative_loss_; // TODO(nisse): Can we return a float instead? @@ -183,7 +183,7 @@ RtpReceiveStats StreamStatisticianImpl::GetStats() const { bool StreamStatisticianImpl::GetActiveStatisticsAndReset( RtcpStatistics* statistics) { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); if (clock_->TimeInMilliseconds() - last_receive_time_ms_ >= kStatisticsTimeoutMs) { // Not active. @@ -241,7 +241,7 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() { } absl::optional StreamStatisticianImpl::GetFractionLostInPercent() const { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); if (!ReceivedRtpPacket()) { return absl::nullopt; } @@ -257,12 +257,12 @@ absl::optional StreamStatisticianImpl::GetFractionLostInPercent() const { StreamDataCounters StreamStatisticianImpl::GetReceiveStreamDataCounters() const { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); return receive_counters_; } uint32_t StreamStatisticianImpl::BitrateReceived() const { - rtc::CritScope cs(&stream_lock_); + MutexLock lock(&stream_lock_); return incoming_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); } @@ -320,7 +320,7 @@ void ReceiveStatisticsImpl::OnRtpPacket(const RtpPacketReceived& packet) { StreamStatisticianImpl* ReceiveStatisticsImpl::GetStatistician( uint32_t ssrc) const { - rtc::CritScope cs(&receive_statistics_lock_); + MutexLock lock(&receive_statistics_lock_); const auto& it = statisticians_.find(ssrc); if (it == statisticians_.end()) return NULL; @@ -329,7 +329,7 @@ StreamStatisticianImpl* ReceiveStatisticsImpl::GetStatistician( StreamStatisticianImpl* ReceiveStatisticsImpl::GetOrCreateStatistician( uint32_t ssrc) { - rtc::CritScope cs(&receive_statistics_lock_); + MutexLock lock(&receive_statistics_lock_); StreamStatisticianImpl*& impl = statisticians_[ssrc]; if (impl == nullptr) { // new element impl = new StreamStatisticianImpl(ssrc, clock_, max_reordering_threshold_); @@ -341,7 +341,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { std::map statisticians; { - rtc::CritScope cs(&receive_statistics_lock_); + MutexLock lock(&receive_statistics_lock_); max_reordering_threshold_ = max_reordering_threshold; statisticians = statisticians_; } @@ -366,7 +366,7 @@ std::vector ReceiveStatisticsImpl::RtcpReportBlocks( size_t max_blocks) { std::map statisticians; { - rtc::CritScope cs(&receive_statistics_lock_); + MutexLock lock(&receive_statistics_lock_); statisticians = statisticians_; } std::vector result; diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.h b/modules/rtp_rtcp/source/receive_statistics_impl.h index e352ae8787..41830b0b48 100644 --- a/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -18,8 +18,8 @@ #include "absl/types/optional.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -65,7 +65,7 @@ class StreamStatisticianImpl : public StreamStatistician { const uint32_t ssrc_; Clock* const clock_; - rtc::CriticalSection stream_lock_; + mutable Mutex stream_lock_; RateStatistics incoming_bitrate_ RTC_GUARDED_BY(&stream_lock_); // In number of packets or sequence numbers. int max_reordering_threshold_ RTC_GUARDED_BY(&stream_lock_); @@ -123,7 +123,7 @@ class ReceiveStatisticsImpl : public ReceiveStatistics { StreamStatisticianImpl* GetOrCreateStatistician(uint32_t ssrc); Clock* const clock_; - rtc::CriticalSection receive_statistics_lock_; + mutable Mutex receive_statistics_lock_; uint32_t last_returned_ssrc_; int max_reordering_threshold_ RTC_GUARDED_BY(receive_statistics_lock_); std::map statisticians_ diff --git a/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc index fd19b1383a..6fed7314c0 100644 --- a/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc +++ b/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc @@ -12,14 +12,17 @@ #include +#include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" namespace webrtc { namespace { -static const int kTimingLogIntervalMs = 10000; -static const int kClocksOffsetSmoothingWindow = 100; + +constexpr int kMinimumNumberOfSamples = 2; +constexpr int kTimingLogIntervalMs = 10000; +constexpr int kClocksOffsetSmoothingWindow = 100; } // namespace @@ -35,9 +38,9 @@ RemoteNtpTimeEstimator::~RemoteNtpTimeEstimator() {} bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(int64_t rtt, uint32_t ntp_secs, uint32_t ntp_frac, - uint32_t rtcp_timestamp) { + uint32_t rtp_timestamp) { bool new_rtcp_sr = false; - if (!rtp_to_ntp_.UpdateMeasurements(ntp_secs, ntp_frac, rtcp_timestamp, + if (!rtp_to_ntp_.UpdateMeasurements(ntp_secs, ntp_frac, rtp_timestamp, &new_rtcp_sr)) { return false; } @@ -47,8 +50,9 @@ bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(int64_t rtt, } // Update extrapolator with the new arrival time. - // The extrapolator assumes the TimeInMilliseconds time. - int64_t receiver_arrival_time_ms = clock_->TimeInMilliseconds(); + // The extrapolator assumes the ntp time. + int64_t receiver_arrival_time_ms = + clock_->TimeInMilliseconds() + NtpOffsetMs(); int64_t sender_send_time_ms = Clock::NtpToMs(ntp_secs, ntp_frac); int64_t sender_arrival_time_ms = sender_send_time_ms + rtt / 2; int64_t remote_to_local_clocks_offset = @@ -65,21 +69,36 @@ int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) { int64_t remote_to_local_clocks_offset = ntp_clocks_offset_estimator_.GetFilteredValue(); - int64_t receiver_capture_ms = + int64_t receiver_capture_ntp_ms = sender_capture_ntp_ms + remote_to_local_clocks_offset; + + // TODO(bugs.webrtc.org/11327): Clock::CurrentNtpInMilliseconds() was + // previously used to calculate the offset between the local and the remote + // clock. However, rtc::TimeMillis() + NtpOffsetMs() is now used as the local + // ntp clock value. To preserve the old behavior of this method, the return + // value is adjusted with the difference between the two local ntp clocks. int64_t now_ms = clock_->TimeInMilliseconds(); - int64_t ntp_offset = clock_->CurrentNtpInMilliseconds() - now_ms; - int64_t receiver_capture_ntp_ms = receiver_capture_ms + ntp_offset; + int64_t offset_between_local_ntp_clocks = + clock_->CurrentNtpInMilliseconds() - now_ms - NtpOffsetMs(); + receiver_capture_ntp_ms += offset_between_local_ntp_clocks; if (now_ms - last_timing_log_ms_ > kTimingLogIntervalMs) { RTC_LOG(LS_INFO) << "RTP timestamp: " << rtp_timestamp << " in NTP clock: " << sender_capture_ntp_ms - << " estimated time in receiver clock: " - << receiver_capture_ms - << " converted to NTP clock: " << receiver_capture_ntp_ms; + << " estimated time in receiver NTP clock: " + << receiver_capture_ntp_ms; last_timing_log_ms_ = now_ms; } return receiver_capture_ntp_ms; } +absl::optional +RemoteNtpTimeEstimator::EstimateRemoteToLocalClockOffsetMs() { + if (ntp_clocks_offset_estimator_.GetNumberOfSamplesStored() < + kMinimumNumberOfSamples) { + return absl::nullopt; + } + return ntp_clocks_offset_estimator_.GetFilteredValue(); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc b/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc index c9b9434db4..85f08483ea 100644 --- a/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc +++ b/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc @@ -9,17 +9,21 @@ */ #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" - +#include "absl/types/optional.h" +#include "modules/rtp_rtcp/source/time_util.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { -static const int64_t kTestRtt = 10; -static const int64_t kLocalClockInitialTimeMs = 123; -static const int64_t kRemoteClockInitialTimeMs = 345; -static const uint32_t kTimestampOffset = 567; +constexpr int64_t kTestRtt = 10; +constexpr int64_t kLocalClockInitialTimeMs = 123; +constexpr int64_t kRemoteClockInitialTimeMs = 345; +constexpr uint32_t kTimestampOffset = 567; +constexpr int64_t kRemoteToLocalClockOffsetMs = + kLocalClockInitialTimeMs - kRemoteClockInitialTimeMs; class RemoteNtpTimeEstimatorTest : public ::testing::Test { protected: @@ -39,9 +43,13 @@ class RemoteNtpTimeEstimatorTest : public ::testing::Test { kTimestampOffset; } + NtpTime GetRemoteNtpTime() { + return TimeMicrosToNtp(remote_clock_.TimeInMicroseconds()); + } + void SendRtcpSr() { uint32_t rtcp_timestamp = GetRemoteTimestamp(); - NtpTime ntp = remote_clock_.CurrentNtpTime(); + NtpTime ntp = GetRemoteNtpTime(); AdvanceTimeMilliseconds(kTestRtt / 2); ReceiveRtcpSr(kTestRtt, rtcp_timestamp, ntp.seconds(), ntp.fractions()); @@ -53,7 +61,7 @@ class RemoteNtpTimeEstimatorTest : public ::testing::Test { int64_t ntp_error_fractions = ntp_error_ms * static_cast(NtpTime::kFractionsPerSecond) / 1000; - NtpTime ntp(static_cast(remote_clock_.CurrentNtpTime()) + + NtpTime ntp(static_cast(GetRemoteNtpTime()) + ntp_error_fractions); AdvanceTimeMilliseconds(kTestRtt / 2 + networking_delay_ms); ReceiveRtcpSr(kTestRtt, rtcp_timestamp, ntp.seconds(), ntp.fractions()); @@ -96,6 +104,7 @@ TEST_F(RemoteNtpTimeEstimatorTest, Estimate) { // Local peer needs at least 2 RTCP SR to calculate the capture time. const int64_t kNotEnoughRtcpSr = -1; EXPECT_EQ(kNotEnoughRtcpSr, estimator_->Estimate(rtp_timestamp)); + EXPECT_EQ(absl::nullopt, estimator_->EstimateRemoteToLocalClockOffsetMs()); AdvanceTimeMilliseconds(800); // Remote sends second RTCP SR. @@ -103,36 +112,24 @@ TEST_F(RemoteNtpTimeEstimatorTest, Estimate) { // Local peer gets enough RTCP SR to calculate the capture time. EXPECT_EQ(capture_ntp_time_ms, estimator_->Estimate(rtp_timestamp)); + EXPECT_EQ(kRemoteToLocalClockOffsetMs, + estimator_->EstimateRemoteToLocalClockOffsetMs()); } TEST_F(RemoteNtpTimeEstimatorTest, AveragesErrorsOut) { // Remote peer sends first 10 RTCP SR without errors. - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); - AdvanceTimeMilliseconds(1000); - SendRtcpSr(); + for (int i = 0; i < 10; ++i) { + AdvanceTimeMilliseconds(1000); + SendRtcpSr(); + } AdvanceTimeMilliseconds(150); uint32_t rtp_timestamp = GetRemoteTimestamp(); int64_t capture_ntp_time_ms = local_clock_.CurrentNtpInMilliseconds(); // Local peer gets enough RTCP SR to calculate the capture time. EXPECT_EQ(capture_ntp_time_ms, estimator_->Estimate(rtp_timestamp)); + EXPECT_EQ(kRemoteToLocalClockOffsetMs, + estimator_->EstimateRemoteToLocalClockOffsetMs()); // Remote sends corrupted RTCP SRs AdvanceTimeMilliseconds(1000); @@ -147,6 +144,8 @@ TEST_F(RemoteNtpTimeEstimatorTest, AveragesErrorsOut) { // Errors should be averaged out. EXPECT_EQ(capture_ntp_time_ms, estimator_->Estimate(rtp_timestamp)); + EXPECT_EQ(kRemoteToLocalClockOffsetMs, + estimator_->EstimateRemoteToLocalClockOffsetMs()); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc index 5e762335ea..54f3555fc6 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include +#include + #include "rtc_base/checks.h" namespace webrtc { @@ -19,16 +22,16 @@ CompoundPacket::CompoundPacket() = default; CompoundPacket::~CompoundPacket() = default; -void CompoundPacket::Append(RtcpPacket* packet) { +void CompoundPacket::Append(std::unique_ptr packet) { RTC_CHECK(packet); - appended_packets_.push_back(packet); + appended_packets_.push_back(std::move(packet)); } bool CompoundPacket::Create(uint8_t* packet, size_t* index, size_t max_length, PacketReadyCallback callback) const { - for (RtcpPacket* appended : appended_packets_) { + for (const auto& appended : appended_packets_) { if (!appended->Create(packet, index, max_length, callback)) return false; } @@ -37,7 +40,7 @@ bool CompoundPacket::Create(uint8_t* packet, size_t CompoundPacket::BlockLength() const { size_t block_length = 0; - for (RtcpPacket* appended : appended_packets_) { + for (const auto& appended : appended_packets_) { block_length += appended->BlockLength(); } return block_length; diff --git a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h index f521c7f921..8bee600692 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h +++ b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h @@ -12,6 +12,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_ +#include #include #include "modules/rtp_rtcp/source/rtcp_packet.h" @@ -25,7 +26,7 @@ class CompoundPacket : public RtcpPacket { CompoundPacket(); ~CompoundPacket() override; - void Append(RtcpPacket* packet); + void Append(std::unique_ptr packet); // Size of this packet in bytes (i.e. total size of nested packets). size_t BlockLength() const override; @@ -36,7 +37,7 @@ class CompoundPacket : public RtcpPacket { PacketReadyCallback callback) const override; protected: - std::vector appended_packets_; + std::vector> appended_packets_; private: RTC_DISALLOW_COPY_AND_ASSIGN(CompoundPacket); diff --git a/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc index 02a4f11ac2..9348aee7e4 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" @@ -38,14 +41,14 @@ const uint8_t kSeqNo = 13; TEST(RtcpCompoundPacketTest, AppendPacket) { CompoundPacket compound; - Fir fir; - fir.AddRequestTo(kRemoteSsrc, kSeqNo); + auto fir = std::make_unique(); + fir->AddRequestTo(kRemoteSsrc, kSeqNo); ReportBlock rb; - ReceiverReport rr; - rr.SetSenderSsrc(kSenderSsrc); - EXPECT_TRUE(rr.AddReportBlock(rb)); - compound.Append(&rr); - compound.Append(&fir); + auto rr = std::make_unique(); + rr->SetSenderSsrc(kSenderSsrc); + EXPECT_TRUE(rr->AddReportBlock(rb)); + compound.Append(std::move(rr)); + compound.Append(std::move(fir)); rtc::Buffer packet = compound.Build(); RtcpPacketParser parser; @@ -58,21 +61,22 @@ TEST(RtcpCompoundPacketTest, AppendPacket) { TEST(RtcpCompoundPacketTest, AppendPacketWithOwnAppendedPacket) { CompoundPacket root; - CompoundPacket leaf; - Fir fir; - fir.AddRequestTo(kRemoteSsrc, kSeqNo); - Bye bye; + auto leaf = std::make_unique(); + + auto fir = std::make_unique(); + fir->AddRequestTo(kRemoteSsrc, kSeqNo); + auto bye = std::make_unique(); ReportBlock rb; - ReceiverReport rr; - EXPECT_TRUE(rr.AddReportBlock(rb)); - leaf.Append(&rr); - leaf.Append(&fir); + auto rr = std::make_unique(); + EXPECT_TRUE(rr->AddReportBlock(rb)); + leaf->Append(std::move(rr)); + leaf->Append(std::move(fir)); - SenderReport sr; - root.Append(&sr); - root.Append(&bye); - root.Append(&leaf); + auto sr = std::make_unique(); + root.Append(std::move(sr)); + root.Append(std::move(bye)); + root.Append(std::move(leaf)); rtc::Buffer packet = root.Build(); RtcpPacketParser parser; @@ -86,14 +90,14 @@ TEST(RtcpCompoundPacketTest, AppendPacketWithOwnAppendedPacket) { TEST(RtcpCompoundPacketTest, BuildWithInputBuffer) { CompoundPacket compound; - Fir fir; - fir.AddRequestTo(kRemoteSsrc, kSeqNo); + auto fir = std::make_unique(); + fir->AddRequestTo(kRemoteSsrc, kSeqNo); ReportBlock rb; - ReceiverReport rr; - rr.SetSenderSsrc(kSenderSsrc); - EXPECT_TRUE(rr.AddReportBlock(rb)); - compound.Append(&rr); - compound.Append(&fir); + auto rr = std::make_unique(); + rr->SetSenderSsrc(kSenderSsrc); + EXPECT_TRUE(rr->AddReportBlock(rb)); + compound.Append(std::move(rr)); + compound.Append(std::move(fir)); const size_t kRrLength = 8; const size_t kReportBlockLength = 24; @@ -115,14 +119,14 @@ TEST(RtcpCompoundPacketTest, BuildWithInputBuffer) { TEST(RtcpCompoundPacketTest, BuildWithTooSmallBuffer_FragmentedSend) { CompoundPacket compound; - Fir fir; - fir.AddRequestTo(kRemoteSsrc, kSeqNo); + auto fir = std::make_unique(); + fir->AddRequestTo(kRemoteSsrc, kSeqNo); ReportBlock rb; - ReceiverReport rr; - rr.SetSenderSsrc(kSenderSsrc); - EXPECT_TRUE(rr.AddReportBlock(rb)); - compound.Append(&rr); - compound.Append(&fir); + auto rr = std::make_unique(); + rr->SetSenderSsrc(kSenderSsrc); + EXPECT_TRUE(rr->AddReportBlock(rb)); + compound.Append(std::move(rr)); + compound.Append(std::move(fir)); const size_t kRrLength = 8; const size_t kReportBlockLength = 24; diff --git a/modules/rtp_rtcp/source/rtcp_packet/remb.cc b/modules/rtp_rtcp/source/rtcp_packet/remb.cc index 93c12d5672..39795fb79c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remb.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remb.cc @@ -71,7 +71,8 @@ bool Remb::Parse(const CommonHeader& packet) { uint64_t mantissa = (static_cast(payload[13] & 0x03) << 16) | ByteReader::ReadBigEndian(&payload[14]); bitrate_bps_ = (mantissa << exponenta); - bool shift_overflow = (bitrate_bps_ >> exponenta) != mantissa; + bool shift_overflow = + (static_cast(bitrate_bps_) >> exponenta) != mantissa; if (shift_overflow) { RTC_LOG(LS_ERROR) << "Invalid remb bitrate value : " << mantissa << "*2^" << static_cast(exponenta); diff --git a/modules/rtp_rtcp/source/rtcp_packet/remb.h b/modules/rtp_rtcp/source/rtcp_packet/remb.h index 232b25b096..b7075c0f23 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remb.h +++ b/modules/rtp_rtcp/source/rtcp_packet/remb.h @@ -32,9 +32,9 @@ class Remb : public Psfb { bool Parse(const CommonHeader& packet); bool SetSsrcs(std::vector ssrcs); - void SetBitrateBps(uint64_t bitrate_bps) { bitrate_bps_ = bitrate_bps; } + void SetBitrateBps(int64_t bitrate_bps) { bitrate_bps_ = bitrate_bps; } - uint64_t bitrate_bps() const { return bitrate_bps_; } + int64_t bitrate_bps() const { return bitrate_bps_; } const std::vector& ssrcs() const { return ssrcs_; } size_t BlockLength() const override; @@ -51,7 +51,7 @@ class Remb : public Psfb { void SetMediaSsrc(uint32_t); uint32_t media_ssrc() const; - uint64_t bitrate_bps_; + int64_t bitrate_bps_; std::vector ssrcs_; }; } // namespace rtcp diff --git a/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc index ed5f48fec6..391a61de89 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc @@ -24,7 +24,7 @@ namespace { const uint32_t kSenderSsrc = 0x12345678; const uint32_t kRemoteSsrcs[] = {0x23456789, 0x2345678a, 0x2345678b}; const uint32_t kBitrateBps = 0x3fb93 * 2; // 522022; -const uint64_t kBitrateBps64bit = 0x3fb93ULL << 30; +const int64_t kBitrateBps64bit = int64_t{0x3fb93} << 30; const uint8_t kPacket[] = {0x8f, 206, 0x00, 0x07, 0x12, 0x34, 0x56, 0x78, 0x00, 0x00, 0x00, 0x00, 'R', 'E', 'M', 'B', 0x03, 0x07, 0xfb, 0x93, 0x23, 0x45, 0x67, 0x89, diff --git a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc index 3f63bd5888..ca59791248 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc @@ -25,7 +25,7 @@ namespace { static constexpr int kFieldValueSize = 3; static constexpr int kFieldSize = 1 + kFieldValueSize; -static constexpr DataRate kDataRateResolution = DataRate::KilobitsPerSec<1>(); +static constexpr DataRate kDataRateResolution = DataRate::KilobitsPerSec(1); constexpr int64_t kMaxEncoded = (1 << (kFieldValueSize * 8)) - 1; class DataRateSerializer { diff --git a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc index bbeb227714..bf0e0e2610 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc @@ -15,8 +15,8 @@ namespace webrtc { namespace rtcp { TEST(RemoteEstimateTest, EncodesCapacityBounds) { NetworkStateEstimate src; - src.link_capacity_lower = DataRate::kbps(10); - src.link_capacity_upper = DataRate::kbps(1000000); + src.link_capacity_lower = DataRate::KilobitsPerSec(10); + src.link_capacity_upper = DataRate::KilobitsPerSec(1000000); rtc::Buffer data = GetRemoteEstimateSerializer()->Serialize(src); NetworkStateEstimate dst; EXPECT_TRUE(GetRemoteEstimateSerializer()->Parse(data, &dst)); @@ -28,7 +28,7 @@ TEST(RemoteEstimateTest, ExpandsToPlusInfinity) { NetworkStateEstimate src; // White box testing: We know that the value is stored in an unsigned 24 int // with kbps resolution. We expected it be represented as plus infinity. - src.link_capacity_lower = DataRate::kbps(2 << 24); + src.link_capacity_lower = DataRate::KilobitsPerSec(2 << 24); src.link_capacity_upper = DataRate::PlusInfinity(); rtc::Buffer data = GetRemoteEstimateSerializer()->Serialize(src); @@ -46,10 +46,10 @@ TEST(RemoteEstimateTest, DoesNotEncodeNegative) { // Since MinusInfinity can't be represented, the buffer should be empty. EXPECT_EQ(data.size(), 0u); NetworkStateEstimate dst; - dst.link_capacity_lower = DataRate::kbps(300); + dst.link_capacity_lower = DataRate::KilobitsPerSec(300); EXPECT_TRUE(GetRemoteEstimateSerializer()->Parse(data, &dst)); // The fields will be left unchanged by the parser as they were not encoded. - EXPECT_EQ(dst.link_capacity_lower, DataRate::kbps(300)); + EXPECT_EQ(dst.link_capacity_lower, DataRate::KilobitsPerSec(300)); EXPECT_TRUE(dst.link_capacity_upper.IsMinusInfinity()); } } // namespace rtcp diff --git a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc index 2900fcec9e..96c3cb3902 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc @@ -377,7 +377,7 @@ int64_t TransportFeedback::GetBaseTimeUs() const { } TimeDelta TransportFeedback::GetBaseTime() const { - return TimeDelta::us(GetBaseTimeUs()); + return TimeDelta::Micros(GetBaseTimeUs()); } int64_t TransportFeedback::GetBaseDeltaUs(int64_t prev_timestamp_us) const { @@ -393,7 +393,7 @@ int64_t TransportFeedback::GetBaseDeltaUs(int64_t prev_timestamp_us) const { } TimeDelta TransportFeedback::GetBaseDelta(TimeDelta prev_timestamp) const { - return TimeDelta::us(GetBaseDeltaUs(prev_timestamp.us())); + return TimeDelta::Micros(GetBaseDeltaUs(prev_timestamp.us())); } // De-serialize packet. diff --git a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h index 090abcc10a..c2a4d4327a 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h +++ b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h @@ -37,7 +37,7 @@ class TransportFeedback : public Rtpfb { uint16_t sequence_number() const { return sequence_number_; } int16_t delta_ticks() const { return delta_ticks_; } int32_t delta_us() const { return delta_ticks_ * kDeltaScaleFactor; } - TimeDelta delta() const { return TimeDelta::us(delta_us()); } + TimeDelta delta() const { return TimeDelta::Micros(delta_us()); } bool received() const { return received_; } private: diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc index 6b64473eea..1db5eeb550 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -12,6 +12,7 @@ #include +#include #include #include #include @@ -61,22 +62,42 @@ const int64_t kMaxWarningLogIntervalMs = 10000; const int64_t kRtcpMinFrameLengthMs = 17; // Maximum number of received RRTRs that will be stored. -const size_t kMaxNumberOfStoredRrtrs = 200; +const size_t kMaxNumberOfStoredRrtrs = 300; -constexpr int32_t kDefaultVideoReportInterval = 1000; -constexpr int32_t kDefaultAudioReportInterval = 5000; +constexpr TimeDelta kDefaultVideoReportInterval = TimeDelta::Seconds(1); +constexpr TimeDelta kDefaultAudioReportInterval = TimeDelta::Seconds(5); -std::set GetRegisteredSsrcs(const RtpRtcp::Configuration& config) { +std::set GetRegisteredSsrcs( + const RtpRtcpInterface::Configuration& config) { std::set ssrcs; ssrcs.insert(config.local_media_ssrc); if (config.rtx_send_ssrc) { ssrcs.insert(*config.rtx_send_ssrc); } - if (config.flexfec_sender) { - ssrcs.insert(config.flexfec_sender->ssrc()); + if (config.fec_generator) { + absl::optional flexfec_ssrc = config.fec_generator->FecSsrc(); + if (flexfec_ssrc) { + ssrcs.insert(*flexfec_ssrc); + } } return ssrcs; } + +// Returns true if the |timestamp| has exceeded the |interval * +// kRrTimeoutIntervals| period and was reset (set to PlusInfinity()). Returns +// false if the timer was either already reset or if it has not expired. +bool ResetTimestampIfExpired(const Timestamp now, + Timestamp& timestamp, + TimeDelta interval) { + if (timestamp.IsInfinite() || + now <= timestamp + interval * kRrTimeoutIntervals) { + return false; + } + + timestamp = Timestamp::PlusInfinity(); + return true; +} + } // namespace struct RTCPReceiver::PacketInformation { @@ -133,7 +154,7 @@ struct RTCPReceiver::LastFirStatus { uint8_t sequence_number; }; -RTCPReceiver::RTCPReceiver(const RtpRtcp::Configuration& config, +RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, ModuleRtpRtcp* owner) : clock_(config.clock), receiver_only_(config.receiver_only), @@ -146,21 +167,19 @@ RTCPReceiver::RTCPReceiver(const RtpRtcp::Configuration& config, network_state_estimate_observer_(config.network_state_estimate_observer), transport_feedback_observer_(config.transport_feedback_callback), bitrate_allocation_observer_(config.bitrate_allocation_observer), - report_interval_ms_(config.rtcp_report_interval_ms > 0 - ? config.rtcp_report_interval_ms - : (config.audio ? kDefaultAudioReportInterval - : kDefaultVideoReportInterval)), + report_interval_(config.rtcp_report_interval_ms > 0 + ? TimeDelta::Millis(config.rtcp_report_interval_ms) + : (config.audio ? kDefaultAudioReportInterval + : kDefaultVideoReportInterval)), // TODO(bugs.webrtc.org/10774): Remove fallback. remote_ssrc_(0), remote_sender_rtp_time_(0), xr_rrtr_status_(false), xr_rr_rtt_ms_(0), oldest_tmmbr_info_ms_(0), - last_received_rb_ms_(0), - last_increased_sequence_number_ms_(0), - stats_callback_(nullptr), - cname_callback_(nullptr), - report_block_data_observer_(nullptr), + stats_callback_(config.rtcp_statistics_callback), + cname_callback_(config.rtcp_cname_callback), + report_block_data_observer_(config.report_block_data_observer), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), num_skipped_packets_(0), last_skipped_packets_warning_ms_(clock_->TimeInMilliseconds()) { @@ -169,32 +188,34 @@ RTCPReceiver::RTCPReceiver(const RtpRtcp::Configuration& config, RTCPReceiver::~RTCPReceiver() {} -void RTCPReceiver::IncomingPacket(const uint8_t* packet, size_t packet_size) { - if (packet_size == 0) { +void RTCPReceiver::IncomingPacket(rtc::ArrayView packet) { + if (packet.empty()) { RTC_LOG(LS_WARNING) << "Incoming empty RTCP packet"; return; } PacketInformation packet_information; - if (!ParseCompoundPacket(packet, packet + packet_size, &packet_information)) + if (!ParseCompoundPacket(packet, &packet_information)) return; TriggerCallbacksFromRtcpPacket(packet_information); } +// This method is only used by test and legacy code, so we should be able to +// remove it soon. int64_t RTCPReceiver::LastReceivedReportBlockMs() const { - rtc::CritScope lock(&rtcp_receiver_lock_); - return last_received_rb_ms_; + MutexLock lock(&rtcp_receiver_lock_); + return last_received_rb_.IsFinite() ? last_received_rb_.ms() : 0; } void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); // New SSRC reset old reports. last_received_sr_ntp_.Reset(); remote_ssrc_ = ssrc; } uint32_t RTCPReceiver::RemoteSSRC() const { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); return remote_ssrc_; } @@ -203,7 +224,7 @@ int32_t RTCPReceiver::RTT(uint32_t remote_ssrc, int64_t* avg_rtt_ms, int64_t* min_rtt_ms, int64_t* max_rtt_ms) const { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); auto it = received_report_blocks_.find(main_ssrc_); if (it == received_report_blocks_.end()) @@ -236,13 +257,13 @@ int32_t RTCPReceiver::RTT(uint32_t remote_ssrc, } void RTCPReceiver::SetRtcpXrRrtrStatus(bool enable) { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); xr_rrtr_status_ = enable; } bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) { RTC_DCHECK(rtt_ms); - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); if (xr_rr_rtt_ms_ == 0) { return false; } @@ -251,12 +272,66 @@ bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) { return true; } +// Called regularly (1/sec) on the worker thread to do rtt calculations. +absl::optional RTCPReceiver::OnPeriodicRttUpdate( + Timestamp newer_than, + bool sending) { + // Running on the worker thread (same as construction thread). + absl::optional rtt; + + if (sending) { + // Check if we've received a report block within the last kRttUpdateInterval + // amount of time. + MutexLock lock(&rtcp_receiver_lock_); + if (last_received_rb_.IsInfinite() || last_received_rb_ > newer_than) { + // Stow away the report block for the main ssrc. We'll use the associated + // data map to look up each sender and check the last_rtt_ms(). + auto main_report_it = received_report_blocks_.find(main_ssrc_); + if (main_report_it != received_report_blocks_.end()) { + const ReportBlockDataMap& main_data_map = main_report_it->second; + int64_t max_rtt = 0; + for (const auto& reports_per_receiver : received_report_blocks_) { + for (const auto& report : reports_per_receiver.second) { + const RTCPReportBlock& block = report.second.report_block(); + auto it_info = main_data_map.find(block.sender_ssrc); + if (it_info != main_data_map.end()) { + const ReportBlockData* report_block_data = &it_info->second; + if (report_block_data->num_rtts() > 0) { + max_rtt = std::max(report_block_data->last_rtt_ms(), max_rtt); + } + } + } + } + if (max_rtt) + rtt.emplace(TimeDelta::Millis(max_rtt)); + } + } + + // Check for expired timers and if so, log and reset. + auto now = clock_->CurrentTime(); + if (RtcpRrTimeoutLocked(now)) { + RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received."; + } else if (RtcpRrSequenceNumberTimeoutLocked(now)) { + RTC_LOG_F(LS_WARNING) << "Timeout: No increase in RTCP RR extended " + "highest sequence number."; + } + } else { + // Report rtt from receiver. + int64_t rtt_ms; + if (GetAndResetXrRrRtt(&rtt_ms)) { + rtt.emplace(TimeDelta::Millis(rtt_ms)); + } + } + + return rtt; +} + bool RTCPReceiver::NTP(uint32_t* received_ntp_secs, uint32_t* received_ntp_frac, uint32_t* rtcp_arrival_time_secs, uint32_t* rtcp_arrival_time_frac, uint32_t* rtcp_timestamp) const { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); if (!last_received_sr_ntp_.Valid()) return false; @@ -281,7 +356,7 @@ bool RTCPReceiver::NTP(uint32_t* received_ntp_secs, std::vector RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); const size_t last_xr_rtis_size = std::min( received_rrtrs_.size(), rtcp::ExtendedReports::kMaxNumberOfDlrrItems); @@ -306,7 +381,7 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { int32_t RTCPReceiver::StatisticsReceived( std::vector* receive_blocks) const { RTC_DCHECK(receive_blocks); - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); for (const auto& reports_per_receiver : received_report_blocks_) for (const auto& report : reports_per_receiver.second) receive_blocks->push_back(report.second.report_block()); @@ -315,25 +390,24 @@ int32_t RTCPReceiver::StatisticsReceived( std::vector RTCPReceiver::GetLatestReportBlockData() const { std::vector result; - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); for (const auto& reports_per_receiver : received_report_blocks_) for (const auto& report : reports_per_receiver.second) result.push_back(report.second); return result; } -bool RTCPReceiver::ParseCompoundPacket(const uint8_t* packet_begin, - const uint8_t* packet_end, +bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, PacketInformation* packet_information) { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); CommonHeader rtcp_block; - for (const uint8_t* next_block = packet_begin; next_block != packet_end; + for (const uint8_t* next_block = packet.begin(); next_block != packet.end(); next_block = rtcp_block.NextPacket()) { - ptrdiff_t remaining_blocks_size = packet_end - next_block; + ptrdiff_t remaining_blocks_size = packet.end() - next_block; RTC_DCHECK_GT(remaining_blocks_size, 0); if (!rtcp_block.Parse(next_block, remaining_blocks_size)) { - if (next_block == packet_begin) { + if (next_block == packet.begin()) { // Failed to parse 1st header, nothing was extracted from this packet. RTC_LOG(LS_WARNING) << "Incoming invalid RTCP packet"; return false; @@ -413,15 +487,16 @@ bool RTCPReceiver::ParseCompoundPacket(const uint8_t* packet_begin, main_ssrc_, packet_type_counter_); } - int64_t now_ms = clock_->TimeInMilliseconds(); - if (now_ms - last_skipped_packets_warning_ms_ >= kMaxWarningLogIntervalMs && - num_skipped_packets_ > 0) { - last_skipped_packets_warning_ms_ = now_ms; - RTC_LOG(LS_WARNING) - << num_skipped_packets_ - << " RTCP blocks were skipped due to being malformed or of " - "unrecognized/unsupported type, during the past " - << (kMaxWarningLogIntervalMs / 1000) << " second period."; + if (num_skipped_packets_ > 0) { + const int64_t now_ms = clock_->TimeInMilliseconds(); + if (now_ms - last_skipped_packets_warning_ms_ >= kMaxWarningLogIntervalMs) { + last_skipped_packets_warning_ms_ = now_ms; + RTC_LOG(LS_WARNING) + << num_skipped_packets_ + << " RTCP blocks were skipped due to being malformed or of " + "unrecognized/unsupported type, during the past " + << (kMaxWarningLogIntervalMs / 1000) << " second period."; + } } return true; @@ -495,7 +570,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, if (registered_ssrcs_.count(report_block.source_ssrc()) == 0) return; - last_received_rb_ms_ = clock_->TimeInMilliseconds(); + last_received_rb_ = clock_->CurrentTime(); ReportBlockData* report_block_data = &received_report_blocks_[report_block.source_ssrc()][remote_ssrc]; @@ -508,7 +583,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, report_block_data->report_block().extended_highest_sequence_number) { // We have successfully delivered new RTP packets to the remote side after // the last RR was sent from the remote side. - last_increased_sequence_number_ms_ = clock_->TimeInMilliseconds(); + last_increased_sequence_number_ = last_received_rb_; } rtcp_report_block.extended_highest_sequence_number = report_block.extended_high_seq_num(); @@ -535,7 +610,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, uint32_t delay_ntp = report_block.delay_since_last_sr(); // Local NTP time. uint32_t receive_time_ntp = - CompactNtp(TimeMicrosToNtp(clock_->TimeInMicroseconds())); + CompactNtp(TimeMicrosToNtp(last_received_rb_.us())); // RTT in 1/(2^16) seconds. uint32_t rtt_ntp = receive_time_ntp - delay_ntp - send_time_ntp; @@ -574,37 +649,22 @@ RTCPReceiver::TmmbrInformation* RTCPReceiver::GetTmmbrInformation( return &it->second; } +// These two methods (RtcpRrTimeout and RtcpRrSequenceNumberTimeout) only exist +// for tests and legacy code (rtp_rtcp_impl.cc). We should be able to to delete +// the methods and require that access to the locked variables only happens on +// the worker thread and thus no locking is needed. bool RTCPReceiver::RtcpRrTimeout() { - rtc::CritScope lock(&rtcp_receiver_lock_); - if (last_received_rb_ms_ == 0) - return false; - - int64_t time_out_ms = kRrTimeoutIntervals * report_interval_ms_; - if (clock_->TimeInMilliseconds() > last_received_rb_ms_ + time_out_ms) { - // Reset the timer to only trigger one log. - last_received_rb_ms_ = 0; - return true; - } - return false; + MutexLock lock(&rtcp_receiver_lock_); + return RtcpRrTimeoutLocked(clock_->CurrentTime()); } bool RTCPReceiver::RtcpRrSequenceNumberTimeout() { - rtc::CritScope lock(&rtcp_receiver_lock_); - if (last_increased_sequence_number_ms_ == 0) - return false; - - int64_t time_out_ms = kRrTimeoutIntervals * report_interval_ms_; - if (clock_->TimeInMilliseconds() > - last_increased_sequence_number_ms_ + time_out_ms) { - // Reset the timer to only trigger one log. - last_increased_sequence_number_ms_ = 0; - return true; - } - return false; + MutexLock lock(&rtcp_receiver_lock_); + return RtcpRrSequenceNumberTimeoutLocked(clock_->CurrentTime()); } bool RTCPReceiver::UpdateTmmbrTimers() { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); int64_t now_ms = clock_->TimeInMilliseconds(); int64_t timeout_ms = now_ms - kTmmbrTimeoutIntervalMs; @@ -641,7 +701,7 @@ bool RTCPReceiver::UpdateTmmbrTimers() { } std::vector RTCPReceiver::BoundingSet(bool* tmmbr_owner) { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); TmmbrInformation* tmmbr_info = GetTmmbrInformation(remote_ssrc_); if (!tmmbr_info) return std::vector(); @@ -660,11 +720,8 @@ void RTCPReceiver::HandleSdes(const CommonHeader& rtcp_block, for (const rtcp::Sdes::Chunk& chunk : sdes.chunks()) { received_cnames_[chunk.ssrc] = chunk.cname; - { - rtc::CritScope lock(&feedbacks_lock_); - if (cname_callback_) - cname_callback_->OnCname(chunk.ssrc, chunk.cname); - } + if (cname_callback_) + cname_callback_->OnCname(chunk.ssrc, chunk.cname); } packet_information->packet_type_flags |= kRtcpSdes; } @@ -861,7 +918,9 @@ void RTCPReceiver::HandleTmmbr(const CommonHeader& rtcp_block, auto* entry = &tmmbr_info->tmmbr[sender_ssrc]; entry->tmmbr_item = rtcp::TmmbItem(sender_ssrc, request.bitrate_bps(), request.packet_overhead()); - entry->last_updated_ms = clock_->TimeInMilliseconds(); + // FindOrCreateTmmbrInfo always sets |last_time_received_ms| to + // |clock_->TimeInMilliseconds()|. + entry->last_updated_ms = tmmbr_info->last_time_received_ms; packet_information->packet_type_flags |= kRtcpTmmbr; break; @@ -928,6 +987,10 @@ void RTCPReceiver::HandleFir(const CommonHeader& rtcp_block, return; } + if (fir.requests().empty()) + return; + + const int64_t now_ms = clock_->TimeInMilliseconds(); for (const rtcp::Fir::Request& fir_request : fir.requests()) { // Is it our sender that is requested to generate a new keyframe. if (main_ssrc_ != fir_request.ssrc) @@ -935,7 +998,6 @@ void RTCPReceiver::HandleFir(const CommonHeader& rtcp_block, ++packet_type_counter_.fir_packets; - int64_t now_ms = clock_->TimeInMilliseconds(); auto inserted = last_fir_.insert(std::make_pair( fir.sender_ssrc(), LastFirStatus(now_ms, fir_request.seq_nr))); if (!inserted.second) { // There was already an entry. @@ -987,28 +1049,6 @@ void RTCPReceiver::NotifyTmmbrUpdated() { rtp_rtcp_->SetTmmbn(std::move(bounding)); } -void RTCPReceiver::RegisterRtcpStatisticsCallback( - RtcpStatisticsCallback* callback) { - rtc::CritScope cs(&feedbacks_lock_); - stats_callback_ = callback; -} - -RtcpStatisticsCallback* RTCPReceiver::GetRtcpStatisticsCallback() { - rtc::CritScope cs(&feedbacks_lock_); - return stats_callback_; -} - -void RTCPReceiver::RegisterRtcpCnameCallback(RtcpCnameCallback* callback) { - rtc::CritScope cs(&feedbacks_lock_); - cname_callback_ = callback; -} - -void RTCPReceiver::SetReportBlockDataObserver( - ReportBlockDataObserver* observer) { - rtc::CritScope cs(&feedbacks_lock_); - report_block_data_observer_ = observer; -} - // Holding no Critical section. void RTCPReceiver::TriggerCallbacksFromRtcpPacket( const PacketInformation& packet_information) { @@ -1022,7 +1062,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( std::set registered_ssrcs; { // We don't want to hold this critsect when triggering the callbacks below. - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); local_ssrc = main_ssrc_; registered_ssrcs = registered_ssrcs_; } @@ -1112,7 +1152,6 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( } if (!receiver_only_) { - rtc::CritScope cs(&feedbacks_lock_); if (stats_callback_) { for (const auto& report_block : packet_information.report_blocks) { RtcpStatistics stats; @@ -1139,7 +1178,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC, char cName[RTCP_CNAME_SIZE]) const { RTC_DCHECK(cName); - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); auto received_cname_it = received_cnames_.find(remoteSSRC); if (received_cname_it == received_cnames_.end()) return -1; @@ -1150,7 +1189,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC, } std::vector RTCPReceiver::TmmbrReceived() { - rtc::CritScope lock(&rtcp_receiver_lock_); + MutexLock lock(&rtcp_receiver_lock_); std::vector candidates; int64_t now_ms = clock_->TimeInMilliseconds(); @@ -1170,4 +1209,13 @@ std::vector RTCPReceiver::TmmbrReceived() { return candidates; } +bool RTCPReceiver::RtcpRrTimeoutLocked(Timestamp now) { + return ResetTimestampIfExpired(now, last_received_rb_, report_interval_); +} + +bool RTCPReceiver::RtcpRrSequenceNumberTimeoutLocked(Timestamp now) { + return ResetTimestampIfExpired(now, last_increased_sequence_number_, + report_interval_); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h index 5b92d55609..f97fe61291 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/modules/rtp_rtcp/source/rtcp_receiver.h @@ -17,13 +17,14 @@ #include #include +#include "api/array_view.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_nack_stats.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/ntp_time.h" @@ -37,7 +38,7 @@ class TargetBitrate; class TmmbItem; } // namespace rtcp -class RTCPReceiver { +class RTCPReceiver final { public: class ModuleRtpRtcp { public: @@ -52,10 +53,14 @@ class RTCPReceiver { virtual ~ModuleRtpRtcp() = default; }; - RTCPReceiver(const RtpRtcp::Configuration& config, ModuleRtpRtcp* owner); - virtual ~RTCPReceiver(); + RTCPReceiver(const RtpRtcpInterface::Configuration& config, + ModuleRtpRtcp* owner); + ~RTCPReceiver(); - void IncomingPacket(const uint8_t* packet, size_t packet_size); + void IncomingPacket(const uint8_t* packet, size_t packet_size) { + IncomingPacket(rtc::MakeArrayView(packet, packet_size)); + } + void IncomingPacket(rtc::ArrayView packet); int64_t LastReceivedReportBlockMs() const; @@ -84,6 +89,11 @@ class RTCPReceiver { void SetRtcpXrRrtrStatus(bool enable); bool GetAndResetXrRrRtt(int64_t* rtt_ms); + // Called once per second on the worker thread to do rtt calculations. + // Returns an optional rtt value if one is available. + absl::optional OnPeriodicRttUpdate(Timestamp newer_than, + bool sending); + // Get statistics. int32_t StatisticsReceived(std::vector* receiveBlocks) const; // A snapshot of Report Blocks with additional data of interest to statistics. @@ -109,11 +119,6 @@ class RTCPReceiver { // Set new bandwidth and notify remote clients about it. void NotifyTmmbrUpdated(); - void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback); - void RegisterRtcpCnameCallback(RtcpCnameCallback* callback); - RtcpStatisticsCallback* GetRtcpStatisticsCallback(); - void SetReportBlockDataObserver(ReportBlockDataObserver* observer); - private: struct PacketInformation; struct TmmbrInformation; @@ -124,8 +129,7 @@ class RTCPReceiver { // RTCP report blocks map mapped by source SSRC. using ReportBlockMap = std::map; - bool ParseCompoundPacket(const uint8_t* packet_begin, - const uint8_t* packet_end, + bool ParseCompoundPacket(rtc::ArrayView packet, PacketInformation* packet_information); void TriggerCallbacksFromRtcpPacket( @@ -211,22 +215,27 @@ class RTCPReceiver { PacketInformation* packet_information) RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); + bool RtcpRrTimeoutLocked(Timestamp now) + RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); + + bool RtcpRrSequenceNumberTimeoutLocked(Timestamp now) + RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); + Clock* const clock_; const bool receiver_only_; ModuleRtpRtcp* const rtp_rtcp_; const uint32_t main_ssrc_; const std::set registered_ssrcs_; - rtc::CriticalSection feedbacks_lock_; RtcpBandwidthObserver* const rtcp_bandwidth_observer_; RtcpIntraFrameObserver* const rtcp_intra_frame_observer_; RtcpLossNotificationObserver* const rtcp_loss_notification_observer_; NetworkStateEstimateObserver* const network_state_estimate_observer_; TransportFeedbackObserver* const transport_feedback_observer_; VideoBitrateAllocationObserver* const bitrate_allocation_observer_; - const int report_interval_ms_; + const TimeDelta report_interval_; - rtc::CriticalSection rtcp_receiver_lock_; + mutable Mutex rtcp_receiver_lock_; uint32_t remote_ssrc_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Received sender report. @@ -258,19 +267,19 @@ class RTCPReceiver { RTC_GUARDED_BY(rtcp_receiver_lock_); // The last time we received an RTCP Report block for this module. - int64_t last_received_rb_ms_ RTC_GUARDED_BY(rtcp_receiver_lock_); + Timestamp last_received_rb_ RTC_GUARDED_BY(rtcp_receiver_lock_) = + Timestamp::PlusInfinity(); // The time we last received an RTCP RR telling we have successfully // delivered RTP packet to the remote side. - int64_t last_increased_sequence_number_ms_; + Timestamp last_increased_sequence_number_ = Timestamp::PlusInfinity(); - RtcpStatisticsCallback* stats_callback_ RTC_GUARDED_BY(feedbacks_lock_); - RtcpCnameCallback* cname_callback_ RTC_GUARDED_BY(feedbacks_lock_); + RtcpStatisticsCallback* const stats_callback_; + RtcpCnameCallback* const cname_callback_; // TODO(hbos): Remove RtcpStatisticsCallback in favor of // ReportBlockDataObserver; the ReportBlockData contains a superset of the // RtcpStatistics data. - ReportBlockDataObserver* report_block_data_observer_ - RTC_GUARDED_BY(feedbacks_lock_); + ReportBlockDataObserver* const report_block_data_observer_; RtcpPacketTypeCounterObserver* const packet_type_counter_observer_; RtcpPacketTypeCounter packet_type_counter_; diff --git a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc index 41bc153790..0506aedadd 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc @@ -11,6 +11,7 @@ #include "modules/rtp_rtcp/source/rtcp_receiver.h" #include +#include #include "api/array_view.h" #include "api/units/timestamp.h" @@ -62,59 +63,74 @@ using ::testing::UnorderedElementsAre; class MockRtcpPacketTypeCounterObserver : public RtcpPacketTypeCounterObserver { public: - MOCK_METHOD2(RtcpPacketTypesCounterUpdated, - void(uint32_t, const RtcpPacketTypeCounter&)); + MOCK_METHOD(void, + RtcpPacketTypesCounterUpdated, + (uint32_t, const RtcpPacketTypeCounter&), + (override)); }; class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver { public: - MOCK_METHOD1(OnReceivedIntraFrameRequest, void(uint32_t)); + MOCK_METHOD(void, OnReceivedIntraFrameRequest, (uint32_t), (override)); }; class MockRtcpLossNotificationObserver : public RtcpLossNotificationObserver { public: ~MockRtcpLossNotificationObserver() override = default; - MOCK_METHOD4(OnReceivedLossNotification, - void(uint32_t ssrc, - uint16_t seq_num_of_last_decodable, - uint16_t seq_num_of_last_received, - bool decodability_flag)); + MOCK_METHOD(void, + OnReceivedLossNotification, + (uint32_t ssrc, + uint16_t seq_num_of_last_decodable, + uint16_t seq_num_of_last_received, + bool decodability_flag), + (override)); }; class MockRtcpCallbackImpl : public RtcpStatisticsCallback { public: - MOCK_METHOD2(StatisticsUpdated, void(const RtcpStatistics&, uint32_t)); + MOCK_METHOD(void, + StatisticsUpdated, + (const RtcpStatistics&, uint32_t), + (override)); }; class MockCnameCallbackImpl : public RtcpCnameCallback { public: - MOCK_METHOD2(OnCname, void(uint32_t, absl::string_view)); + MOCK_METHOD(void, OnCname, (uint32_t, absl::string_view), (override)); }; class MockReportBlockDataObserverImpl : public ReportBlockDataObserver { public: - MOCK_METHOD1(OnReportBlockDataUpdated, void(ReportBlockData)); + MOCK_METHOD(void, OnReportBlockDataUpdated, (ReportBlockData), (override)); }; class MockTransportFeedbackObserver : public TransportFeedbackObserver { public: - MOCK_METHOD1(OnAddPacket, void(const RtpPacketSendInfo&)); - MOCK_METHOD1(OnTransportFeedback, void(const rtcp::TransportFeedback&)); + MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override)); + MOCK_METHOD(void, + OnTransportFeedback, + (const rtcp::TransportFeedback&), + (override)); }; class MockModuleRtpRtcp : public RTCPReceiver::ModuleRtpRtcp { public: - MOCK_METHOD1(SetTmmbn, void(std::vector)); - MOCK_METHOD0(OnRequestSendReport, void()); - MOCK_METHOD1(OnReceivedNack, void(const std::vector&)); - MOCK_METHOD1(OnReceivedRtcpReportBlocks, void(const ReportBlockList&)); + MOCK_METHOD(void, SetTmmbn, (std::vector), (override)); + MOCK_METHOD(void, OnRequestSendReport, (), (override)); + MOCK_METHOD(void, OnReceivedNack, (const std::vector&), (override)); + MOCK_METHOD(void, + OnReceivedRtcpReportBlocks, + (const ReportBlockList&), + (override)); }; class MockVideoBitrateAllocationObserver : public VideoBitrateAllocationObserver { public: - MOCK_METHOD1(OnBitrateAllocationUpdated, - void(const VideoBitrateAllocation& allocation)); + MOCK_METHOD(void, + OnBitrateAllocationUpdated, + (const VideoBitrateAllocation& allocation), + (override)); }; // SSRC of remote peer, that sends rtcp packet to the rtcp receiver under test. @@ -131,119 +147,116 @@ constexpr int64_t kRtcpIntervalMs = 1000; } // namespace -class RtcpReceiverTest : public ::testing::Test { - protected: - RtcpReceiverTest() - : system_clock_(1335900000), - rtcp_receiver_( - [&] { - RtpRtcp::Configuration config; - config.clock = &system_clock_; - config.receiver_only = false; - config.rtcp_packet_type_counter_observer = - &packet_type_counter_observer_; - config.bandwidth_callback = &bandwidth_observer_; - config.intra_frame_callback = &intra_frame_observer_; - config.rtcp_loss_notification_observer = - &rtcp_loss_notification_observer_; - config.transport_feedback_callback = - &transport_feedback_observer_; - config.bitrate_allocation_observer = - &bitrate_allocation_observer_; - config.rtcp_report_interval_ms = kRtcpIntervalMs; - config.local_media_ssrc = kReceiverMainSsrc; - config.rtx_send_ssrc = kReceiverExtraSsrc; - return config; - }(), - &rtp_rtcp_impl_) {} - void SetUp() { - rtcp_receiver_.SetRemoteSSRC(kSenderSsrc); - } - - void InjectRtcpPacket(rtc::ArrayView raw) { - rtcp_receiver_.IncomingPacket(raw.data(), raw.size()); - } +struct ReceiverMocks { + ReceiverMocks() : clock(1335900000) {} - void InjectRtcpPacket(const rtcp::RtcpPacket& packet) { - rtc::Buffer raw = packet.Build(); - rtcp_receiver_.IncomingPacket(raw.data(), raw.size()); - } - - SimulatedClock system_clock_; + SimulatedClock clock; // Callbacks to packet_type_counter_observer are frequent but most of the time // are not interesting. - NiceMock packet_type_counter_observer_; - StrictMock bandwidth_observer_; - StrictMock intra_frame_observer_; - StrictMock rtcp_loss_notification_observer_; - StrictMock transport_feedback_observer_; - StrictMock bitrate_allocation_observer_; - StrictMock rtp_rtcp_impl_; - - RTCPReceiver rtcp_receiver_; + NiceMock packet_type_counter_observer; + StrictMock bandwidth_observer; + StrictMock intra_frame_observer; + StrictMock rtcp_loss_notification_observer; + StrictMock transport_feedback_observer; + StrictMock bitrate_allocation_observer; + StrictMock rtp_rtcp_impl; }; -TEST_F(RtcpReceiverTest, BrokenPacketIsIgnored) { +RtpRtcpInterface::Configuration DefaultConfiguration(ReceiverMocks* mocks) { + RtpRtcpInterface::Configuration config; + config.clock = &mocks->clock; + config.receiver_only = false; + config.rtcp_packet_type_counter_observer = + &mocks->packet_type_counter_observer; + config.bandwidth_callback = &mocks->bandwidth_observer; + config.intra_frame_callback = &mocks->intra_frame_observer; + config.rtcp_loss_notification_observer = + &mocks->rtcp_loss_notification_observer; + config.transport_feedback_callback = &mocks->transport_feedback_observer; + config.bitrate_allocation_observer = &mocks->bitrate_allocation_observer; + config.rtcp_report_interval_ms = kRtcpIntervalMs; + config.local_media_ssrc = kReceiverMainSsrc; + config.rtx_send_ssrc = kReceiverExtraSsrc; + return config; +} + +TEST(RtcpReceiverTest, BrokenPacketIsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + const uint8_t bad_packet[] = {0, 0, 0, 0}; - EXPECT_CALL(packet_type_counter_observer_, - RtcpPacketTypesCounterUpdated(_, _)) + EXPECT_CALL(mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated) .Times(0); - InjectRtcpPacket(bad_packet); + receiver.IncomingPacket(bad_packet); } -TEST_F(RtcpReceiverTest, InvalidFeedbackPacketIsIgnored) { +TEST(RtcpReceiverTest, InvalidFeedbackPacketIsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + // Too short feedback packet. const uint8_t bad_packet[] = {0x81, rtcp::Rtpfb::kPacketType, 0, 0}; // TODO(danilchap): Add expectation RtcpPacketTypesCounterUpdated // is not called once parser would be adjusted to avoid that callback on // semi-valid packets. - InjectRtcpPacket(bad_packet); + receiver.IncomingPacket(bad_packet); } -TEST_F(RtcpReceiverTest, InjectSrPacket) { - EXPECT_FALSE(rtcp_receiver_.NTP(nullptr, nullptr, nullptr, nullptr, nullptr)); +TEST(RtcpReceiverTest, InjectSrPacket) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); - int64_t now = system_clock_.TimeInMilliseconds(); + EXPECT_FALSE(receiver.NTP(nullptr, nullptr, nullptr, nullptr, nullptr)); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::SenderReport sr; sr.SetSenderSsrc(kSenderSsrc); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(IsEmpty())); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(IsEmpty())); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(IsEmpty(), _, now)); - InjectRtcpPacket(sr); + receiver.IncomingPacket(sr.Build()); - EXPECT_TRUE(rtcp_receiver_.NTP(nullptr, nullptr, nullptr, nullptr, nullptr)); + EXPECT_TRUE(receiver.NTP(nullptr, nullptr, nullptr, nullptr, nullptr)); } -TEST_F(RtcpReceiverTest, InjectSrPacketFromUnknownSender) { - int64_t now = system_clock_.TimeInMilliseconds(); +TEST(RtcpReceiverTest, InjectSrPacketFromUnknownSender) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::SenderReport sr; sr.SetSenderSsrc(kUnknownSenderSsrc); - // The parser will handle report blocks in Sender Report from other than his + // The parser will handle report blocks in Sender Report from other than their // expected peer. - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, now)); - InjectRtcpPacket(sr); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, + OnReceivedRtcpReceiverReport(_, _, now)); + receiver.IncomingPacket(sr.Build()); // But will not flag that he's gotten sender information. - EXPECT_FALSE(rtcp_receiver_.NTP(nullptr, nullptr, nullptr, nullptr, nullptr)); + EXPECT_FALSE(receiver.NTP(nullptr, nullptr, nullptr, nullptr, nullptr)); } -TEST_F(RtcpReceiverTest, InjectSrPacketCalculatesRTT) { - Random r(0x0123456789abcdef); - const int64_t kRttMs = r.Rand(1, 9 * 3600 * 1000); - const uint32_t kDelayNtp = r.Rand(0, 0x7fffffff); +TEST(RtcpReceiverTest, InjectSrPacketCalculatesRTT) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + const int64_t kRttMs = 123; + const uint32_t kDelayNtp = 0x4321; const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp); int64_t rtt_ms = 0; - EXPECT_EQ( - -1, rtcp_receiver_.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); + EXPECT_EQ(-1, receiver.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); uint32_t sent_ntp = - CompactNtp(TimeMicrosToNtp(system_clock_.TimeInMicroseconds())); - system_clock_.AdvanceTimeMilliseconds(kRttMs + kDelayMs); + CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds())); + mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs); rtcp::SenderReport sr; sr.SetSenderSsrc(kSenderSsrc); @@ -253,28 +266,29 @@ TEST_F(RtcpReceiverTest, InjectSrPacketCalculatesRTT) { block.SetDelayLastSr(kDelayNtp); sr.AddReportBlock(block); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(sr); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(sr.Build()); - EXPECT_EQ( - 0, rtcp_receiver_.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); + EXPECT_EQ(0, receiver.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); EXPECT_NEAR(kRttMs, rtt_ms, 1); } -TEST_F(RtcpReceiverTest, InjectSrPacketCalculatesNegativeRTTAsOne) { - Random r(0x0123456789abcdef); - const int64_t kRttMs = r.Rand(-3600 * 1000, -1); - const uint32_t kDelayNtp = r.Rand(0, 0x7fffffff); +TEST(RtcpReceiverTest, InjectSrPacketCalculatesNegativeRTTAsOne) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + const int64_t kRttMs = -13; + const uint32_t kDelayNtp = 0x4321; const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp); int64_t rtt_ms = 0; - EXPECT_EQ( - -1, rtcp_receiver_.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); + EXPECT_EQ(-1, receiver.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); uint32_t sent_ntp = - CompactNtp(TimeMicrosToNtp(system_clock_.TimeInMicroseconds())); - system_clock_.AdvanceTimeMilliseconds(kRttMs + kDelayMs); + CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds())); + mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs); rtcp::SenderReport sr; sr.SetSenderSsrc(kSenderSsrc); @@ -284,26 +298,28 @@ TEST_F(RtcpReceiverTest, InjectSrPacketCalculatesNegativeRTTAsOne) { block.SetDelayLastSr(kDelayNtp); sr.AddReportBlock(block); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(1))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(1))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(1), _, _)); - InjectRtcpPacket(sr); + receiver.IncomingPacket(sr.Build()); - EXPECT_EQ( - 0, rtcp_receiver_.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); + EXPECT_EQ(0, receiver.RTT(kSenderSsrc, &rtt_ms, nullptr, nullptr, nullptr)); EXPECT_EQ(1, rtt_ms); } -TEST_F( - RtcpReceiverTest, - TwoReportBlocksWithLastOneWithoutLastSrCalculatesRttForBandwidthObserver) { +TEST(RtcpReceiverTest, + TwoReportBlocksWithLastOneWithoutLastSrCalculatesRttForBandwidthObserver) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const int64_t kRttMs = 120; const uint32_t kDelayNtp = 123000; const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp); uint32_t sent_ntp = - CompactNtp(TimeMicrosToNtp(system_clock_.TimeInMicroseconds())); - system_clock_.AdvanceTimeMilliseconds(kRttMs + kDelayMs); + CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds())); + mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs); rtcp::SenderReport sr; sr.SetSenderSsrc(kSenderSsrc); @@ -316,48 +332,60 @@ TEST_F( block.SetLastSr(0); sr.AddReportBlock(block); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(2))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(2))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(2), kRttMs, _)); - InjectRtcpPacket(sr); + receiver.IncomingPacket(sr.Build()); } -TEST_F(RtcpReceiverTest, InjectRrPacket) { - int64_t now = system_clock_.TimeInMilliseconds(); +TEST(RtcpReceiverTest, InjectRrPacket) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::ReceiverReport rr; rr.SetSenderSsrc(kSenderSsrc); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(IsEmpty())); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(IsEmpty())); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(IsEmpty(), _, now)); - InjectRtcpPacket(rr); + receiver.IncomingPacket(rr.Build()); std::vector report_blocks; - rtcp_receiver_.StatisticsReceived(&report_blocks); + receiver.StatisticsReceived(&report_blocks); EXPECT_TRUE(report_blocks.empty()); } -TEST_F(RtcpReceiverTest, InjectRrPacketWithReportBlockNotToUsIgnored) { - int64_t now = system_clock_.TimeInMilliseconds(); +TEST(RtcpReceiverTest, InjectRrPacketWithReportBlockNotToUsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::ReportBlock rb; rb.SetMediaSsrc(kNotToUsSsrc); rtcp::ReceiverReport rr; rr.SetSenderSsrc(kSenderSsrc); rr.AddReportBlock(rb); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(IsEmpty())); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(IsEmpty())); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(IsEmpty(), _, now)); - InjectRtcpPacket(rr); + receiver.IncomingPacket(rr.Build()); - EXPECT_EQ(0, rtcp_receiver_.LastReceivedReportBlockMs()); + EXPECT_EQ(0, receiver.LastReceivedReportBlockMs()); std::vector received_blocks; - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_TRUE(received_blocks.empty()); } -TEST_F(RtcpReceiverTest, InjectRrPacketWithOneReportBlock) { - int64_t now = system_clock_.TimeInMilliseconds(); +TEST(RtcpReceiverTest, InjectRrPacketWithOneReportBlock) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::ReportBlock rb; rb.SetMediaSsrc(kReceiverMainSsrc); @@ -365,19 +393,23 @@ TEST_F(RtcpReceiverTest, InjectRrPacketWithOneReportBlock) { rr.SetSenderSsrc(kSenderSsrc); rr.AddReportBlock(rb); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(1))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(1))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(1), _, now)); - InjectRtcpPacket(rr); + receiver.IncomingPacket(rr.Build()); - EXPECT_EQ(now, rtcp_receiver_.LastReceivedReportBlockMs()); + EXPECT_EQ(now, receiver.LastReceivedReportBlockMs()); std::vector received_blocks; - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_EQ(1u, received_blocks.size()); } -TEST_F(RtcpReceiverTest, InjectSrPacketWithOneReportBlock) { - int64_t now = system_clock_.TimeInMilliseconds(); +TEST(RtcpReceiverTest, InjectSrPacketWithOneReportBlock) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::ReportBlock rb; rb.SetMediaSsrc(kReceiverMainSsrc); @@ -385,22 +417,26 @@ TEST_F(RtcpReceiverTest, InjectSrPacketWithOneReportBlock) { sr.SetSenderSsrc(kSenderSsrc); sr.AddReportBlock(rb); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(1))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(1))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(1), _, now)); - InjectRtcpPacket(sr); + receiver.IncomingPacket(sr.Build()); - EXPECT_EQ(now, rtcp_receiver_.LastReceivedReportBlockMs()); + EXPECT_EQ(now, receiver.LastReceivedReportBlockMs()); std::vector received_blocks; - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_EQ(1u, received_blocks.size()); } -TEST_F(RtcpReceiverTest, InjectRrPacketWithTwoReportBlocks) { +TEST(RtcpReceiverTest, InjectRrPacketWithTwoReportBlocks) { const uint16_t kSequenceNumbers[] = {10, 12423}; const uint32_t kCumLost[] = {13, 555}; const uint8_t kFracLost[] = {20, 11}; - int64_t now = system_clock_.TimeInMilliseconds(); + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + int64_t now = mocks.clock.TimeInMilliseconds(); rtcp::ReportBlock rb1; rb1.SetMediaSsrc(kReceiverMainSsrc); @@ -417,14 +453,14 @@ TEST_F(RtcpReceiverTest, InjectRrPacketWithTwoReportBlocks) { rr1.AddReportBlock(rb1); rr1.AddReportBlock(rb2); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(2))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(2))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(2), _, now)); - InjectRtcpPacket(rr1); + receiver.IncomingPacket(rr1.Build()); - EXPECT_EQ(now, rtcp_receiver_.LastReceivedReportBlockMs()); + EXPECT_EQ(now, receiver.LastReceivedReportBlockMs()); std::vector received_blocks; - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_THAT(received_blocks, UnorderedElementsAre(Field(&RTCPReportBlock::fraction_lost, 0), Field(&RTCPReportBlock::fraction_lost, 10))); @@ -448,16 +484,16 @@ TEST_F(RtcpReceiverTest, InjectRrPacketWithTwoReportBlocks) { rr2.AddReportBlock(rb4); // Advance time to make 1st sent time and 2nd sent time different. - system_clock_.AdvanceTimeMilliseconds(500); - now = system_clock_.TimeInMilliseconds(); + mocks.clock.AdvanceTimeMilliseconds(500); + now = mocks.clock.TimeInMilliseconds(); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(2))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(2))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(2), _, now)); - InjectRtcpPacket(rr2); + receiver.IncomingPacket(rr2.Build()); received_blocks.clear(); - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_EQ(2u, received_blocks.size()); EXPECT_THAT( received_blocks, @@ -474,11 +510,14 @@ TEST_F(RtcpReceiverTest, InjectRrPacketWithTwoReportBlocks) { kSequenceNumbers[1])))); } -TEST_F(RtcpReceiverTest, InjectRrPacketsFromTwoRemoteSsrcs) { +TEST(RtcpReceiverTest, InjectRrPacketsFromTwoRemoteSsrcs) { const uint32_t kSenderSsrc2 = 0x20304; const uint16_t kSequenceNumbers[] = {10, 12423}; const int32_t kCumLost[] = {13, 555}; const uint8_t kFracLost[] = {20, 11}; + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ReportBlock rb1; rb1.SetMediaSsrc(kReceiverMainSsrc); @@ -489,17 +528,17 @@ TEST_F(RtcpReceiverTest, InjectRrPacketsFromTwoRemoteSsrcs) { rr1.SetSenderSsrc(kSenderSsrc); rr1.AddReportBlock(rb1); - int64_t now = system_clock_.TimeInMilliseconds(); + int64_t now = mocks.clock.TimeInMilliseconds(); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(1))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(1))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(1), _, now)); - InjectRtcpPacket(rr1); + receiver.IncomingPacket(rr1.Build()); - EXPECT_EQ(now, rtcp_receiver_.LastReceivedReportBlockMs()); + EXPECT_EQ(now, receiver.LastReceivedReportBlockMs()); std::vector received_blocks; - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_EQ(1u, received_blocks.size()); EXPECT_EQ(kSenderSsrc, received_blocks[0].sender_ssrc); EXPECT_EQ(kReceiverMainSsrc, received_blocks[0].source_ssrc); @@ -517,13 +556,13 @@ TEST_F(RtcpReceiverTest, InjectRrPacketsFromTwoRemoteSsrcs) { rr2.SetSenderSsrc(kSenderSsrc2); rr2.AddReportBlock(rb2); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(SizeIs(1))); - EXPECT_CALL(bandwidth_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks(SizeIs(1))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport(SizeIs(1), _, now)); - InjectRtcpPacket(rr2); + receiver.IncomingPacket(rr2.Build()); received_blocks.clear(); - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); ASSERT_EQ(2u, received_blocks.size()); EXPECT_THAT( received_blocks, @@ -542,12 +581,15 @@ TEST_F(RtcpReceiverTest, InjectRrPacketsFromTwoRemoteSsrcs) { kSequenceNumbers[1])))); } -TEST_F(RtcpReceiverTest, GetRtt) { +TEST(RtcpReceiverTest, GetRtt) { const uint32_t kSentCompactNtp = 0x1234; const uint32_t kDelayCompactNtp = 0x222; + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + // No report block received. - EXPECT_EQ( - -1, rtcp_receiver_.RTT(kSenderSsrc, nullptr, nullptr, nullptr, nullptr)); + EXPECT_EQ(-1, receiver.RTT(kSenderSsrc, nullptr, nullptr, nullptr, nullptr)); rtcp::ReportBlock rb; rb.SetMediaSsrc(kReceiverMainSsrc); @@ -557,69 +599,89 @@ TEST_F(RtcpReceiverTest, GetRtt) { rtcp::ReceiverReport rr; rr.SetSenderSsrc(kSenderSsrc); rr.AddReportBlock(rb); - int64_t now = system_clock_.TimeInMilliseconds(); + int64_t now = mocks.clock.TimeInMilliseconds(); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr.Build()); - EXPECT_EQ(now, rtcp_receiver_.LastReceivedReportBlockMs()); - EXPECT_EQ( - 0, rtcp_receiver_.RTT(kSenderSsrc, nullptr, nullptr, nullptr, nullptr)); + EXPECT_EQ(now, receiver.LastReceivedReportBlockMs()); + EXPECT_EQ(0, receiver.RTT(kSenderSsrc, nullptr, nullptr, nullptr, nullptr)); } // Ij packets are ignored. -TEST_F(RtcpReceiverTest, InjectIjWithNoItem) { +TEST(RtcpReceiverTest, InjectIjWithNoItem) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::ExtendedJitterReport ij; - InjectRtcpPacket(ij); + receiver.IncomingPacket(ij.Build()); } // App packets are ignored. -TEST_F(RtcpReceiverTest, InjectApp) { +TEST(RtcpReceiverTest, InjectApp) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::App app; app.SetSubType(30); app.SetName(0x17a177e); const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'}; app.SetData(kData, sizeof(kData)); - InjectRtcpPacket(app); + receiver.IncomingPacket(app.Build()); } -TEST_F(RtcpReceiverTest, InjectSdesWithOneChunk) { - const char kCname[] = "alice@host"; +TEST(RtcpReceiverTest, InjectSdesWithOneChunk) { + ReceiverMocks mocks; MockCnameCallbackImpl callback; - rtcp_receiver_.RegisterRtcpCnameCallback(&callback); + RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); + config.rtcp_cname_callback = &callback; + RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + const char kCname[] = "alice@host"; rtcp::Sdes sdes; sdes.AddCName(kSenderSsrc, kCname); EXPECT_CALL(callback, OnCname(kSenderSsrc, StrEq(kCname))); - InjectRtcpPacket(sdes); + receiver.IncomingPacket(sdes.Build()); char cName[RTCP_CNAME_SIZE]; - EXPECT_EQ(0, rtcp_receiver_.CNAME(kSenderSsrc, cName)); + EXPECT_EQ(0, receiver.CNAME(kSenderSsrc, cName)); EXPECT_EQ(0, strncmp(cName, kCname, RTCP_CNAME_SIZE)); } -TEST_F(RtcpReceiverTest, InjectByePacket_RemovesCname) { +TEST(RtcpReceiverTest, InjectByePacket_RemovesCname) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const char kCname[] = "alice@host"; rtcp::Sdes sdes; sdes.AddCName(kSenderSsrc, kCname); - InjectRtcpPacket(sdes); + receiver.IncomingPacket(sdes.Build()); char cName[RTCP_CNAME_SIZE]; - EXPECT_EQ(0, rtcp_receiver_.CNAME(kSenderSsrc, cName)); + EXPECT_EQ(0, receiver.CNAME(kSenderSsrc, cName)); // Verify that BYE removes the CNAME. rtcp::Bye bye; bye.SetSenderSsrc(kSenderSsrc); - InjectRtcpPacket(bye); + receiver.IncomingPacket(bye.Build()); - EXPECT_EQ(-1, rtcp_receiver_.CNAME(kSenderSsrc, cName)); + EXPECT_EQ(-1, receiver.CNAME(kSenderSsrc, cName)); } -TEST_F(RtcpReceiverTest, InjectByePacket_RemovesReportBlocks) { +TEST(RtcpReceiverTest, InjectByePacket_RemovesReportBlocks) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::ReportBlock rb1; rb1.SetMediaSsrc(kReceiverMainSsrc); rtcp::ReportBlock rb2; @@ -629,103 +691,131 @@ TEST_F(RtcpReceiverTest, InjectByePacket_RemovesReportBlocks) { rr.AddReportBlock(rb1); rr.AddReportBlock(rb2); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr.Build()); std::vector received_blocks; - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_EQ(2u, received_blocks.size()); // Verify that BYE removes the report blocks. rtcp::Bye bye; bye.SetSenderSsrc(kSenderSsrc); - InjectRtcpPacket(bye); + receiver.IncomingPacket(bye.Build()); received_blocks.clear(); - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_TRUE(received_blocks.empty()); // Inject packet again. - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr.Build()); received_blocks.clear(); - rtcp_receiver_.StatisticsReceived(&received_blocks); + receiver.StatisticsReceived(&received_blocks); EXPECT_EQ(2u, received_blocks.size()); } -TEST_F(RtcpReceiverTest, InjectByePacketRemovesReferenceTimeInfo) { +TEST(RtcpReceiverTest, InjectByePacketRemovesReferenceTimeInfo) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); rtcp::Rrtr rrtr; rrtr.SetNtp(NtpTime(0x10203, 0x40506)); xr.SetRrtr(rrtr); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); rtcp::Bye bye; bye.SetSenderSsrc(kSenderSsrc); - InjectRtcpPacket(bye); + receiver.IncomingPacket(bye.Build()); - EXPECT_THAT(rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(), IsEmpty()); + EXPECT_THAT(receiver.ConsumeReceivedXrReferenceTimeInfo(), IsEmpty()); } -TEST_F(RtcpReceiverTest, InjectPliPacket) { +TEST(RtcpReceiverTest, InjectPliPacket) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::Pli pli; pli.SetMediaSsrc(kReceiverMainSsrc); EXPECT_CALL( - packet_type_counter_observer_, + mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated( kReceiverMainSsrc, Field(&RtcpPacketTypeCounter::pli_packets, 1))); - EXPECT_CALL(intra_frame_observer_, + EXPECT_CALL(mocks.intra_frame_observer, OnReceivedIntraFrameRequest(kReceiverMainSsrc)); - InjectRtcpPacket(pli); + receiver.IncomingPacket(pli.Build()); } -TEST_F(RtcpReceiverTest, PliPacketNotToUsIgnored) { +TEST(RtcpReceiverTest, PliPacketNotToUsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::Pli pli; pli.SetMediaSsrc(kNotToUsSsrc); EXPECT_CALL( - packet_type_counter_observer_, + mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated( kReceiverMainSsrc, Field(&RtcpPacketTypeCounter::pli_packets, 0))); - EXPECT_CALL(intra_frame_observer_, OnReceivedIntraFrameRequest(_)).Times(0); - InjectRtcpPacket(pli); + EXPECT_CALL(mocks.intra_frame_observer, OnReceivedIntraFrameRequest).Times(0); + receiver.IncomingPacket(pli.Build()); } -TEST_F(RtcpReceiverTest, InjectFirPacket) { +TEST(RtcpReceiverTest, InjectFirPacket) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::Fir fir; fir.AddRequestTo(kReceiverMainSsrc, 13); EXPECT_CALL( - packet_type_counter_observer_, + mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated( kReceiverMainSsrc, Field(&RtcpPacketTypeCounter::fir_packets, 1))); - EXPECT_CALL(intra_frame_observer_, + EXPECT_CALL(mocks.intra_frame_observer, OnReceivedIntraFrameRequest(kReceiverMainSsrc)); - InjectRtcpPacket(fir); + receiver.IncomingPacket(fir.Build()); } -TEST_F(RtcpReceiverTest, FirPacketNotToUsIgnored) { +TEST(RtcpReceiverTest, FirPacketNotToUsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::Fir fir; fir.AddRequestTo(kNotToUsSsrc, 13); - EXPECT_CALL(intra_frame_observer_, OnReceivedIntraFrameRequest(_)).Times(0); - InjectRtcpPacket(fir); + EXPECT_CALL(mocks.intra_frame_observer, OnReceivedIntraFrameRequest).Times(0); + receiver.IncomingPacket(fir.Build()); } -TEST_F(RtcpReceiverTest, ExtendedReportsPacketWithZeroReportBlocksIgnored) { +TEST(RtcpReceiverTest, ExtendedReportsPacketWithZeroReportBlocksIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); } -TEST_F(RtcpReceiverTest, InjectExtendedReportsReceiverReferenceTimePacket) { +TEST(RtcpReceiverTest, InjectExtendedReportsReceiverReferenceTimePacket) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const NtpTime kNtp(0x10203, 0x40506); rtcp::Rrtr rrtr; rrtr.SetNtp(kNtp); @@ -734,56 +824,68 @@ TEST_F(RtcpReceiverTest, InjectExtendedReportsReceiverReferenceTimePacket) { xr.SetRrtr(rrtr); std::vector last_xr_rtis = - rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + receiver.ConsumeReceivedXrReferenceTimeInfo(); EXPECT_THAT(last_xr_rtis, IsEmpty()); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); - last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + last_xr_rtis = receiver.ConsumeReceivedXrReferenceTimeInfo(); ASSERT_THAT(last_xr_rtis, SizeIs(1)); EXPECT_EQ(kSenderSsrc, last_xr_rtis[0].ssrc); EXPECT_EQ(CompactNtp(kNtp), last_xr_rtis[0].last_rr); EXPECT_EQ(0U, last_xr_rtis[0].delay_since_last_rr); } -TEST_F(RtcpReceiverTest, ExtendedReportsDlrrPacketNotToUsIgnored) { +TEST(RtcpReceiverTest, ExtendedReportsDlrrPacketNotToUsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + // Allow calculate rtt using dlrr/rrtr, simulating media receiver side. - rtcp_receiver_.SetRtcpXrRrtrStatus(true); + receiver.SetRtcpXrRrtrStatus(true); rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); xr.AddDlrrItem(ReceiveTimeInfo(kNotToUsSsrc, 0x12345, 0x67890)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); int64_t rtt_ms = 0; - EXPECT_FALSE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_FALSE(receiver.GetAndResetXrRrRtt(&rtt_ms)); } -TEST_F(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithSubBlock) { +TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithSubBlock) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint32_t kLastRR = 0x12345; const uint32_t kDelay = 0x23456; - rtcp_receiver_.SetRtcpXrRrtrStatus(true); + receiver.SetRtcpXrRrtrStatus(true); int64_t rtt_ms = 0; - EXPECT_FALSE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_FALSE(receiver.GetAndResetXrRrRtt(&rtt_ms)); rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc, kLastRR, kDelay)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); uint32_t compact_ntp_now = - CompactNtp(TimeMicrosToNtp(system_clock_.TimeInMicroseconds())); - EXPECT_TRUE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds())); + EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms)); uint32_t rtt_ntp = compact_ntp_now - kDelay - kLastRR; EXPECT_NEAR(CompactNtpRttToMs(rtt_ntp), rtt_ms, 1); } -TEST_F(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithMultipleSubBlocks) { +TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithMultipleSubBlocks) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint32_t kLastRR = 0x12345; const uint32_t kDelay = 0x56789; - rtcp_receiver_.SetRtcpXrRrtrStatus(true); + receiver.SetRtcpXrRrtrStatus(true); rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); @@ -791,18 +893,22 @@ TEST_F(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithMultipleSubBlocks) { xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc + 1, 0x12345, 0x67890)); xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc + 2, 0x12345, 0x67890)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); uint32_t compact_ntp_now = - CompactNtp(TimeMicrosToNtp(system_clock_.TimeInMicroseconds())); + CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds())); int64_t rtt_ms = 0; - EXPECT_TRUE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms)); uint32_t rtt_ntp = compact_ntp_now - kDelay - kLastRR; EXPECT_NEAR(CompactNtpRttToMs(rtt_ntp), rtt_ms, 1); } -TEST_F(RtcpReceiverTest, InjectExtendedReportsPacketWithMultipleReportBlocks) { - rtcp_receiver_.SetRtcpXrRrtrStatus(true); +TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithMultipleReportBlocks) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + receiver.SetRtcpXrRrtrStatus(true); rtcp::Rrtr rrtr; rtcp::ExtendedReports xr; @@ -810,17 +916,21 @@ TEST_F(RtcpReceiverTest, InjectExtendedReportsPacketWithMultipleReportBlocks) { xr.SetRrtr(rrtr); xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc, 0x12345, 0x67890)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); std::vector last_xr_rtis = - rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + receiver.ConsumeReceivedXrReferenceTimeInfo(); EXPECT_THAT(last_xr_rtis, SizeIs(1)); int64_t rtt_ms = 0; - EXPECT_TRUE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms)); } -TEST_F(RtcpReceiverTest, InjectExtendedReportsPacketWithUnknownReportBlock) { - rtcp_receiver_.SetRtcpXrRrtrStatus(true); +TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithUnknownReportBlock) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + receiver.SetRtcpXrRrtrStatus(true); rtcp::Rrtr rrtr; rtcp::ExtendedReports xr; @@ -832,71 +942,91 @@ TEST_F(RtcpReceiverTest, InjectExtendedReportsPacketWithUnknownReportBlock) { // Modify the DLRR block to have an unsupported block type, from 5 to 6. ASSERT_EQ(5, packet.data()[20]); packet.data()[20] = 6; - InjectRtcpPacket(packet); + receiver.IncomingPacket(packet); // Validate Rrtr was received and processed. std::vector last_xr_rtis = - rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + receiver.ConsumeReceivedXrReferenceTimeInfo(); EXPECT_THAT(last_xr_rtis, SizeIs(1)); // Validate Dlrr report wasn't processed. int64_t rtt_ms = 0; - EXPECT_FALSE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_FALSE(receiver.GetAndResetXrRrRtt(&rtt_ms)); } -TEST_F(RtcpReceiverTest, TestExtendedReportsRrRttInitiallyFalse) { - rtcp_receiver_.SetRtcpXrRrtrStatus(true); +TEST(RtcpReceiverTest, TestExtendedReportsRrRttInitiallyFalse) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + receiver.SetRtcpXrRrtrStatus(true); int64_t rtt_ms; - EXPECT_FALSE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_FALSE(receiver.GetAndResetXrRrRtt(&rtt_ms)); } -TEST_F(RtcpReceiverTest, RttCalculatedAfterExtendedReportsDlrr) { +TEST(RtcpReceiverTest, RttCalculatedAfterExtendedReportsDlrr) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + Random rand(0x0123456789abcdef); const int64_t kRttMs = rand.Rand(1, 9 * 3600 * 1000); const uint32_t kDelayNtp = rand.Rand(0, 0x7fffffff); const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp); - rtcp_receiver_.SetRtcpXrRrtrStatus(true); - NtpTime now = TimeMicrosToNtp(system_clock_.TimeInMicroseconds()); + receiver.SetRtcpXrRrtrStatus(true); + NtpTime now = TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()); uint32_t sent_ntp = CompactNtp(now); - system_clock_.AdvanceTimeMilliseconds(kRttMs + kDelayMs); + mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs); rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc, sent_ntp, kDelayNtp)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); int64_t rtt_ms = 0; - EXPECT_TRUE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms)); EXPECT_NEAR(kRttMs, rtt_ms, 1); } -TEST_F(RtcpReceiverTest, XrDlrrCalculatesNegativeRttAsOne) { +TEST(RtcpReceiverTest, XrDlrrCalculatesNegativeRttAsOne) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + Random rand(0x0123456789abcdef); const int64_t kRttMs = rand.Rand(-3600 * 1000, -1); const uint32_t kDelayNtp = rand.Rand(0, 0x7fffffff); const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp); - NtpTime now = TimeMicrosToNtp(system_clock_.TimeInMicroseconds()); + NtpTime now = TimeMicrosToNtp(mocks.clock.TimeInMicroseconds()); uint32_t sent_ntp = CompactNtp(now); - system_clock_.AdvanceTimeMilliseconds(kRttMs + kDelayMs); - rtcp_receiver_.SetRtcpXrRrtrStatus(true); + mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs); + receiver.SetRtcpXrRrtrStatus(true); rtcp::ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc, sent_ntp, kDelayNtp)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); int64_t rtt_ms = 0; - EXPECT_TRUE(rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)); + EXPECT_TRUE(receiver.GetAndResetXrRrRtt(&rtt_ms)); EXPECT_EQ(1, rtt_ms); } -TEST_F(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfoInitiallyEmpty) { - EXPECT_THAT(rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(), IsEmpty()); +TEST(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfoInitiallyEmpty) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + EXPECT_THAT(receiver.ConsumeReceivedXrReferenceTimeInfo(), IsEmpty()); } -TEST_F(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfo) { +TEST(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfo) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const NtpTime kNtp(0x10203, 0x40506); const uint32_t kNtpMid = CompactNtp(kNtp); @@ -906,20 +1036,24 @@ TEST_F(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfo) { xr.SetSenderSsrc(kSenderSsrc); xr.SetRrtr(rrtr); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); - system_clock_.AdvanceTimeMilliseconds(1000); + mocks.clock.AdvanceTimeMilliseconds(1000); std::vector last_xr_rtis = - rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + receiver.ConsumeReceivedXrReferenceTimeInfo(); ASSERT_THAT(last_xr_rtis, SizeIs(1)); EXPECT_EQ(kSenderSsrc, last_xr_rtis[0].ssrc); EXPECT_EQ(kNtpMid, last_xr_rtis[0].last_rr); EXPECT_EQ(65536U, last_xr_rtis[0].delay_since_last_rr); } -TEST_F(RtcpReceiverTest, - ReceivedRrtrFromSameSsrcUpdatesReceivedReferenceTimeInfo) { +TEST(RtcpReceiverTest, + ReceivedRrtrFromSameSsrcUpdatesReceivedReferenceTimeInfo) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const NtpTime kNtp1(0x10203, 0x40506); const NtpTime kNtp2(0x11223, 0x44556); const int64_t kDelayMs = 2000; @@ -929,23 +1063,27 @@ TEST_F(RtcpReceiverTest, rtcp::Rrtr rrtr1; rrtr1.SetNtp(kNtp1); xr.SetRrtr(rrtr1); - InjectRtcpPacket(xr); - system_clock_.AdvanceTimeMilliseconds(kDelayMs); + receiver.IncomingPacket(xr.Build()); + mocks.clock.AdvanceTimeMilliseconds(kDelayMs); rtcp::Rrtr rrtr2; rrtr2.SetNtp(kNtp2); xr.SetRrtr(rrtr2); - InjectRtcpPacket(xr); - system_clock_.AdvanceTimeMilliseconds(kDelayMs); + receiver.IncomingPacket(xr.Build()); + mocks.clock.AdvanceTimeMilliseconds(kDelayMs); std::vector last_xr_rtis = - rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + receiver.ConsumeReceivedXrReferenceTimeInfo(); ASSERT_THAT(last_xr_rtis, SizeIs(1)); EXPECT_EQ(kSenderSsrc, last_xr_rtis[0].ssrc); EXPECT_EQ(CompactNtp(kNtp2), last_xr_rtis[0].last_rr); EXPECT_EQ(kDelayMs * 65536 / 1000, last_xr_rtis[0].delay_since_last_rr); } -TEST_F(RtcpReceiverTest, StoresLastReceivedRrtrPerSsrc) { +TEST(RtcpReceiverTest, StoresLastReceivedRrtrPerSsrc) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const size_t kNumBufferedReports = 1; const size_t kNumReports = rtcp::ExtendedReports::kMaxNumberOfDlrrItems + kNumBufferedReports; @@ -955,12 +1093,12 @@ TEST_F(RtcpReceiverTest, StoresLastReceivedRrtrPerSsrc) { rtcp::Rrtr rrtr; rrtr.SetNtp(NtpTime(i * 200, i * 300)); xr.SetRrtr(rrtr); - InjectRtcpPacket(xr); - system_clock_.AdvanceTimeMilliseconds(1000); + receiver.IncomingPacket(xr.Build()); + mocks.clock.AdvanceTimeMilliseconds(1000); } std::vector last_xr_rtis = - rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + receiver.ConsumeReceivedXrReferenceTimeInfo(); ASSERT_THAT(last_xr_rtis, SizeIs(rtcp::ExtendedReports::kMaxNumberOfDlrrItems)); for (size_t i = 0; i < rtcp::ExtendedReports::kMaxNumberOfDlrrItems; ++i) { @@ -969,17 +1107,21 @@ TEST_F(RtcpReceiverTest, StoresLastReceivedRrtrPerSsrc) { EXPECT_EQ(65536U * (kNumReports - i), last_xr_rtis[i].delay_since_last_rr); } - last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + last_xr_rtis = receiver.ConsumeReceivedXrReferenceTimeInfo(); ASSERT_THAT(last_xr_rtis, SizeIs(kNumBufferedReports)); } -TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { +TEST(RtcpReceiverTest, ReceiveReportTimeout) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint16_t kSequenceNumber = 1234; - system_clock_.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs); + mocks.clock.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs); // No RR received, shouldn't trigger a timeout. - EXPECT_FALSE(rtcp_receiver_.RtcpRrTimeout()); - EXPECT_FALSE(rtcp_receiver_.RtcpRrSequenceNumberTimeout()); + EXPECT_FALSE(receiver.RtcpRrTimeout()); + EXPECT_FALSE(receiver.RtcpRrSequenceNumberTimeout()); // Add a RR and advance the clock just enough to not trigger a timeout. rtcp::ReportBlock rb1; @@ -989,32 +1131,32 @@ TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { rr1.SetSenderSsrc(kSenderSsrc); rr1.AddReportBlock(rb1); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr1); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr1.Build()); - system_clock_.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs - 1); - EXPECT_FALSE(rtcp_receiver_.RtcpRrTimeout()); - EXPECT_FALSE(rtcp_receiver_.RtcpRrSequenceNumberTimeout()); + mocks.clock.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs - 1); + EXPECT_FALSE(receiver.RtcpRrTimeout()); + EXPECT_FALSE(receiver.RtcpRrSequenceNumberTimeout()); // Add a RR with the same extended max as the previous RR to trigger a // sequence number timeout, but not a RR timeout. - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr1); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr1.Build()); - system_clock_.AdvanceTimeMilliseconds(2); - EXPECT_FALSE(rtcp_receiver_.RtcpRrTimeout()); - EXPECT_TRUE(rtcp_receiver_.RtcpRrSequenceNumberTimeout()); + mocks.clock.AdvanceTimeMilliseconds(2); + EXPECT_FALSE(receiver.RtcpRrTimeout()); + EXPECT_TRUE(receiver.RtcpRrSequenceNumberTimeout()); // Advance clock enough to trigger an RR timeout too. - system_clock_.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs); - EXPECT_TRUE(rtcp_receiver_.RtcpRrTimeout()); + mocks.clock.AdvanceTimeMilliseconds(3 * kRtcpIntervalMs); + EXPECT_TRUE(receiver.RtcpRrTimeout()); // We should only get one timeout even though we still haven't received a new // RR. - EXPECT_FALSE(rtcp_receiver_.RtcpRrTimeout()); - EXPECT_FALSE(rtcp_receiver_.RtcpRrSequenceNumberTimeout()); + EXPECT_FALSE(receiver.RtcpRrTimeout()); + EXPECT_FALSE(receiver.RtcpRrSequenceNumberTimeout()); // Add a new RR with increase sequence number to reset timers. rtcp::ReportBlock rb2; @@ -1024,130 +1166,155 @@ TEST_F(RtcpReceiverTest, ReceiveReportTimeout) { rr2.SetSenderSsrc(kSenderSsrc); rr2.AddReportBlock(rb2); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr2); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr2.Build()); - EXPECT_FALSE(rtcp_receiver_.RtcpRrTimeout()); - EXPECT_FALSE(rtcp_receiver_.RtcpRrSequenceNumberTimeout()); + EXPECT_FALSE(receiver.RtcpRrTimeout()); + EXPECT_FALSE(receiver.RtcpRrSequenceNumberTimeout()); // Verify we can get a timeout again once we've received new RR. - system_clock_.AdvanceTimeMilliseconds(2 * kRtcpIntervalMs); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr2); + mocks.clock.AdvanceTimeMilliseconds(2 * kRtcpIntervalMs); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr2.Build()); - system_clock_.AdvanceTimeMilliseconds(kRtcpIntervalMs + 1); - EXPECT_FALSE(rtcp_receiver_.RtcpRrTimeout()); - EXPECT_TRUE(rtcp_receiver_.RtcpRrSequenceNumberTimeout()); + mocks.clock.AdvanceTimeMilliseconds(kRtcpIntervalMs + 1); + EXPECT_FALSE(receiver.RtcpRrTimeout()); + EXPECT_TRUE(receiver.RtcpRrSequenceNumberTimeout()); - system_clock_.AdvanceTimeMilliseconds(2 * kRtcpIntervalMs); - EXPECT_TRUE(rtcp_receiver_.RtcpRrTimeout()); + mocks.clock.AdvanceTimeMilliseconds(2 * kRtcpIntervalMs); + EXPECT_TRUE(receiver.RtcpRrTimeout()); } -TEST_F(RtcpReceiverTest, TmmbrReceivedWithNoIncomingPacket) { - EXPECT_EQ(0u, rtcp_receiver_.TmmbrReceived().size()); +TEST(RtcpReceiverTest, TmmbrReceivedWithNoIncomingPacket) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + EXPECT_THAT(receiver.TmmbrReceived(), IsEmpty()); } -TEST_F(RtcpReceiverTest, TmmbrPacketAccepted) { +TEST(RtcpReceiverTest, TmmbrPacketAccepted) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint32_t kBitrateBps = 30000; - rtcp::Tmmbr tmmbr; - tmmbr.SetSenderSsrc(kSenderSsrc); - tmmbr.AddTmmbr(rtcp::TmmbItem(kReceiverMainSsrc, kBitrateBps, 0)); - rtcp::SenderReport sr; - sr.SetSenderSsrc(kSenderSsrc); + auto tmmbr = std::make_unique(); + tmmbr->SetSenderSsrc(kSenderSsrc); + tmmbr->AddTmmbr(rtcp::TmmbItem(kReceiverMainSsrc, kBitrateBps, 0)); + auto sr = std::make_unique(); + sr->SetSenderSsrc(kSenderSsrc); rtcp::CompoundPacket compound; - compound.Append(&sr); - compound.Append(&tmmbr); + compound.Append(std::move(sr)); + compound.Append(std::move(tmmbr)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(rtp_rtcp_impl_, SetTmmbn(SizeIs(1))); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - EXPECT_CALL(bandwidth_observer_, OnReceivedEstimatedBitrate(kBitrateBps)); - InjectRtcpPacket(compound); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.rtp_rtcp_impl, SetTmmbn(SizeIs(1))); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + EXPECT_CALL(mocks.bandwidth_observer, + OnReceivedEstimatedBitrate(kBitrateBps)); + receiver.IncomingPacket(compound.Build()); - std::vector tmmbr_received = rtcp_receiver_.TmmbrReceived(); + std::vector tmmbr_received = receiver.TmmbrReceived(); ASSERT_EQ(1u, tmmbr_received.size()); EXPECT_EQ(kBitrateBps, tmmbr_received[0].bitrate_bps()); EXPECT_EQ(kSenderSsrc, tmmbr_received[0].ssrc()); } -TEST_F(RtcpReceiverTest, TmmbrPacketNotForUsIgnored) { +TEST(RtcpReceiverTest, TmmbrPacketNotForUsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint32_t kBitrateBps = 30000; - rtcp::Tmmbr tmmbr; - tmmbr.SetSenderSsrc(kSenderSsrc); - tmmbr.AddTmmbr(rtcp::TmmbItem(kNotToUsSsrc, kBitrateBps, 0)); + auto tmmbr = std::make_unique(); + tmmbr->SetSenderSsrc(kSenderSsrc); + tmmbr->AddTmmbr(rtcp::TmmbItem(kNotToUsSsrc, kBitrateBps, 0)); - rtcp::SenderReport sr; - sr.SetSenderSsrc(kSenderSsrc); + auto sr = std::make_unique(); + sr->SetSenderSsrc(kSenderSsrc); rtcp::CompoundPacket compound; - compound.Append(&sr); - compound.Append(&tmmbr); + compound.Append(std::move(sr)); + compound.Append(std::move(tmmbr)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - EXPECT_CALL(bandwidth_observer_, OnReceivedEstimatedBitrate(_)).Times(0); - InjectRtcpPacket(compound); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedEstimatedBitrate).Times(0); + receiver.IncomingPacket(compound.Build()); - EXPECT_EQ(0u, rtcp_receiver_.TmmbrReceived().size()); + EXPECT_EQ(0u, receiver.TmmbrReceived().size()); } -TEST_F(RtcpReceiverTest, TmmbrPacketZeroRateIgnored) { - rtcp::Tmmbr tmmbr; - tmmbr.SetSenderSsrc(kSenderSsrc); - tmmbr.AddTmmbr(rtcp::TmmbItem(kReceiverMainSsrc, 0, 0)); - rtcp::SenderReport sr; - sr.SetSenderSsrc(kSenderSsrc); +TEST(RtcpReceiverTest, TmmbrPacketZeroRateIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + + auto tmmbr = std::make_unique(); + tmmbr->SetSenderSsrc(kSenderSsrc); + tmmbr->AddTmmbr(rtcp::TmmbItem(kReceiverMainSsrc, 0, 0)); + auto sr = std::make_unique(); + sr->SetSenderSsrc(kSenderSsrc); rtcp::CompoundPacket compound; - compound.Append(&sr); - compound.Append(&tmmbr); + compound.Append(std::move(sr)); + compound.Append(std::move(tmmbr)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - EXPECT_CALL(bandwidth_observer_, OnReceivedEstimatedBitrate(_)).Times(0); - InjectRtcpPacket(compound); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedEstimatedBitrate).Times(0); + receiver.IncomingPacket(compound.Build()); - EXPECT_EQ(0u, rtcp_receiver_.TmmbrReceived().size()); + EXPECT_EQ(0u, receiver.TmmbrReceived().size()); } -TEST_F(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) { +TEST(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + // Inject 3 packets "from" kSenderSsrc, kSenderSsrc+1, kSenderSsrc+2. // The times of arrival are starttime + 0, starttime + 5 and starttime + 10. for (uint32_t ssrc = kSenderSsrc; ssrc < kSenderSsrc + 3; ++ssrc) { - rtcp::Tmmbr tmmbr; - tmmbr.SetSenderSsrc(ssrc); - tmmbr.AddTmmbr(rtcp::TmmbItem(kReceiverMainSsrc, 30000, 0)); - rtcp::SenderReport sr; - sr.SetSenderSsrc(ssrc); + auto tmmbr = std::make_unique(); + tmmbr->SetSenderSsrc(ssrc); + tmmbr->AddTmmbr(rtcp::TmmbItem(kReceiverMainSsrc, 30000, 0)); + auto sr = std::make_unique(); + sr->SetSenderSsrc(ssrc); rtcp::CompoundPacket compound; - compound.Append(&sr); - compound.Append(&tmmbr); + compound.Append(std::move(sr)); + compound.Append(std::move(tmmbr)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(rtp_rtcp_impl_, SetTmmbn(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - EXPECT_CALL(bandwidth_observer_, OnReceivedEstimatedBitrate(_)); - InjectRtcpPacket(compound); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.rtp_rtcp_impl, SetTmmbn); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedEstimatedBitrate); + receiver.IncomingPacket(compound.Build()); // 5 seconds between each packet. - system_clock_.AdvanceTimeMilliseconds(5000); + mocks.clock.AdvanceTimeMilliseconds(5000); } // It is now starttime + 15. - std::vector candidate_set = rtcp_receiver_.TmmbrReceived(); + std::vector candidate_set = receiver.TmmbrReceived(); ASSERT_EQ(3u, candidate_set.size()); EXPECT_EQ(30000U, candidate_set[0].bitrate_bps()); // We expect the timeout to be 25 seconds. Advance the clock by 12 // seconds, timing out the first packet. - system_clock_.AdvanceTimeMilliseconds(12000); - candidate_set = rtcp_receiver_.TmmbrReceived(); + mocks.clock.AdvanceTimeMilliseconds(12000); + candidate_set = receiver.TmmbrReceived(); ASSERT_EQ(2u, candidate_set.size()); EXPECT_EQ(kSenderSsrc + 1, candidate_set[0].ssrc()); } -TEST_F(RtcpReceiverTest, Callbacks) { +TEST(RtcpReceiverTest, Callbacks) { + ReceiverMocks mocks; MockRtcpCallbackImpl callback; - rtcp_receiver_.RegisterRtcpStatisticsCallback(&callback); + RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); + config.rtcp_statistics_callback = &callback; + RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); const uint8_t kFractionLoss = 3; const uint32_t kCumulativeLoss = 7; @@ -1173,34 +1340,19 @@ TEST_F(RtcpReceiverTest, Callbacks) { kSequenceNumber), Field(&RtcpStatistics::jitter, kJitter)), kReceiverMainSsrc)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - InjectRtcpPacket(rr1); - - rtcp_receiver_.RegisterRtcpStatisticsCallback(nullptr); - - // Add arbitrary numbers, callback should not be called. - rtcp::ReportBlock rb2; - rb2.SetMediaSsrc(kReceiverMainSsrc); - rb2.SetExtHighestSeqNum(kSequenceNumber + 1); - rb2.SetFractionLost(42); - rb2.SetCumulativeLost(137); - rb2.SetJitter(4711); - - rtcp::ReceiverReport rr2; - rr2.SetSenderSsrc(kSenderSsrc); - rr2.AddReportBlock(rb2); - - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks(_)); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport(_, _, _)); - EXPECT_CALL(callback, StatisticsUpdated(_, _)).Times(0); - InjectRtcpPacket(rr2); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rr1.Build()); } -TEST_F(RtcpReceiverTest, - VerifyBlockAndTimestampObtainedFromReportBlockDataObserver) { +TEST(RtcpReceiverTest, + VerifyBlockAndTimestampObtainedFromReportBlockDataObserver) { + ReceiverMocks mocks; MockReportBlockDataObserverImpl observer; - rtcp_receiver_.SetReportBlockDataObserver(&observer); + RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); + config.report_block_data_observer = &observer; + RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); const uint8_t kFractionLoss = 3; const uint32_t kCumulativeLoss = 7; @@ -1209,9 +1361,9 @@ TEST_F(RtcpReceiverTest, const int64_t kUtcNowUs = 42; // The "report_block_timestamp_utc_us" is obtained from the global UTC clock - // (not the simulcated |system_clock_|) and requires a scoped fake clock. + // (not the simulcated |mocks.clock|) and requires a scoped fake clock. rtc::ScopedFakeClock fake_clock; - fake_clock.SetTime(Timestamp::us(kUtcNowUs)); + fake_clock.SetTime(Timestamp::Micros(kUtcNowUs)); rtcp::ReportBlock rtcp_block; rtcp_block.SetMediaSsrc(kReceiverMainSsrc); @@ -1238,22 +1390,26 @@ TEST_F(RtcpReceiverTest, // No RTT is calculated in this test. EXPECT_EQ(0u, report_block_data.num_rtts()); }); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); - InjectRtcpPacket(rtcp_report); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rtcp_report.Build()); } -TEST_F(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) { +TEST(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) { + ReceiverMocks mocks; MockReportBlockDataObserverImpl observer; - rtcp_receiver_.SetReportBlockDataObserver(&observer); + RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); + config.report_block_data_observer = &observer; + RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); const int64_t kRttMs = 120; const uint32_t kDelayNtp = 123000; const int64_t kDelayMs = CompactNtpRttToMs(kDelayNtp); uint32_t sent_ntp = - CompactNtp(TimeMicrosToNtp(system_clock_.TimeInMicroseconds())); - system_clock_.AdvanceTimeMilliseconds(kRttMs + kDelayMs); + CompactNtp(TimeMicrosToNtp(mocks.clock.TimeInMicroseconds())); + mocks.clock.AdvanceTimeMilliseconds(kRttMs + kDelayMs); rtcp::SenderReport sr; sr.SetSenderSsrc(kSenderSsrc); @@ -1266,8 +1422,8 @@ TEST_F(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) { block.SetLastSr(0); sr.AddReportBlock(block); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); InSequence sequence; EXPECT_CALL(observer, OnReportBlockDataUpdated) .WillOnce([&](ReportBlockData report_block_data) { @@ -1285,10 +1441,14 @@ TEST_F(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) { report_block_data.report_block().source_ssrc); EXPECT_EQ(0u, report_block_data.num_rtts()); }); - InjectRtcpPacket(sr); + receiver.IncomingPacket(sr.Build()); } -TEST_F(RtcpReceiverTest, GetReportBlockDataAfterOneReportBlock) { +TEST(RtcpReceiverTest, GetReportBlockDataAfterOneReportBlock) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint16_t kSequenceNumber = 1234; rtcp::ReportBlock rtcp_block; @@ -1298,11 +1458,11 @@ TEST_F(RtcpReceiverTest, GetReportBlockDataAfterOneReportBlock) { rtcp::ReceiverReport rtcp_report; rtcp_report.SetSenderSsrc(kSenderSsrc); rtcp_report.AddReportBlock(rtcp_block); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); - InjectRtcpPacket(rtcp_report); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rtcp_report.Build()); - auto report_block_datas = rtcp_receiver_.GetLatestReportBlockData(); + auto report_block_datas = receiver.GetLatestReportBlockData(); ASSERT_THAT(report_block_datas, SizeIs(1)); EXPECT_EQ(kReceiverMainSsrc, report_block_datas[0].report_block().source_ssrc); @@ -1311,7 +1471,11 @@ TEST_F(RtcpReceiverTest, GetReportBlockDataAfterOneReportBlock) { report_block_datas[0].report_block().extended_highest_sequence_number); } -TEST_F(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { +TEST(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint16_t kSequenceNumber1 = 1234; const uint16_t kSequenceNumber2 = 1235; @@ -1322,9 +1486,9 @@ TEST_F(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { rtcp::ReceiverReport rtcp_report1; rtcp_report1.SetSenderSsrc(kSenderSsrc); rtcp_report1.AddReportBlock(rtcp_block1); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); - InjectRtcpPacket(rtcp_report1); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rtcp_report1.Build()); // Inject a report block with an increased the sequence number for the same // source SSRC. @@ -1335,12 +1499,12 @@ TEST_F(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { rtcp::ReceiverReport rtcp_report2; rtcp_report2.SetSenderSsrc(kSenderSsrc); rtcp_report2.AddReportBlock(rtcp_block2); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); - InjectRtcpPacket(rtcp_report2); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rtcp_report2.Build()); // Only the latest block should be returned. - auto report_block_datas = rtcp_receiver_.GetLatestReportBlockData(); + auto report_block_datas = receiver.GetLatestReportBlockData(); ASSERT_THAT(report_block_datas, SizeIs(1)); EXPECT_EQ(kReceiverMainSsrc, report_block_datas[0].report_block().source_ssrc); @@ -1349,8 +1513,11 @@ TEST_F(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { report_block_datas[0].report_block().extended_highest_sequence_number); } -TEST_F(RtcpReceiverTest, - GetReportBlockDataAfterTwoReportBlocksOfDifferentSsrcs) { +TEST(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfDifferentSsrcs) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint16_t kSequenceNumber1 = 1234; const uint16_t kSequenceNumber2 = 42; @@ -1361,9 +1528,9 @@ TEST_F(RtcpReceiverTest, rtcp::ReceiverReport rtcp_report1; rtcp_report1.SetSenderSsrc(kSenderSsrc); rtcp_report1.AddReportBlock(rtcp_block1); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); - InjectRtcpPacket(rtcp_report1); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rtcp_report1.Build()); // Inject a report block for a different source SSRC. rtcp::ReportBlock rtcp_block2; @@ -1373,12 +1540,12 @@ TEST_F(RtcpReceiverTest, rtcp::ReceiverReport rtcp_report2; rtcp_report2.SetSenderSsrc(kSenderSsrc); rtcp_report2.AddReportBlock(rtcp_block2); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedRtcpReportBlocks); - EXPECT_CALL(bandwidth_observer_, OnReceivedRtcpReceiverReport); - InjectRtcpPacket(rtcp_report2); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks); + EXPECT_CALL(mocks.bandwidth_observer, OnReceivedRtcpReceiverReport); + receiver.IncomingPacket(rtcp_report2.Build()); // Both report blocks should be returned. - auto report_block_datas = rtcp_receiver_.GetLatestReportBlockData(); + auto report_block_datas = receiver.GetLatestReportBlockData(); ASSERT_THAT(report_block_datas, SizeIs(2)); EXPECT_EQ(kReceiverMainSsrc, report_block_datas[0].report_block().source_ssrc); @@ -1392,7 +1559,11 @@ TEST_F(RtcpReceiverTest, report_block_datas[1].report_block().extended_highest_sequence_number); } -TEST_F(RtcpReceiverTest, ReceivesTransportFeedback) { +TEST(RtcpReceiverTest, ReceivesTransportFeedback) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::TransportFeedback packet; packet.SetMediaSsrc(kReceiverMainSsrc); packet.SetSenderSsrc(kSenderSsrc); @@ -1400,38 +1571,47 @@ TEST_F(RtcpReceiverTest, ReceivesTransportFeedback) { packet.AddReceivedPacket(1, 1000); EXPECT_CALL( - transport_feedback_observer_, + mocks.transport_feedback_observer, OnTransportFeedback(AllOf( Property(&rtcp::TransportFeedback::media_ssrc, kReceiverMainSsrc), Property(&rtcp::TransportFeedback::sender_ssrc, kSenderSsrc)))); - InjectRtcpPacket(packet); + receiver.IncomingPacket(packet.Build()); } -TEST_F(RtcpReceiverTest, ReceivesRemb) { +TEST(RtcpReceiverTest, ReceivesRemb) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint32_t kBitrateBps = 500000; rtcp::Remb remb; remb.SetSenderSsrc(kSenderSsrc); remb.SetBitrateBps(kBitrateBps); - EXPECT_CALL(bandwidth_observer_, OnReceivedEstimatedBitrate(kBitrateBps)); - InjectRtcpPacket(remb); + EXPECT_CALL(mocks.bandwidth_observer, + OnReceivedEstimatedBitrate(kBitrateBps)); + receiver.IncomingPacket(remb.Build()); } -TEST_F(RtcpReceiverTest, HandlesInvalidTransportFeedback) { +TEST(RtcpReceiverTest, HandlesInvalidTransportFeedback) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + // Send a compound packet with a TransportFeedback followed by something else. - rtcp::TransportFeedback packet; - packet.SetMediaSsrc(kReceiverMainSsrc); - packet.SetSenderSsrc(kSenderSsrc); - packet.SetBase(1, 1000); - packet.AddReceivedPacket(1, 1000); + auto packet = std::make_unique(); + packet->SetMediaSsrc(kReceiverMainSsrc); + packet->SetSenderSsrc(kSenderSsrc); + packet->SetBase(1, 1000); + packet->AddReceivedPacket(1, 1000); static uint32_t kBitrateBps = 50000; - rtcp::Remb remb; - remb.SetSenderSsrc(kSenderSsrc); - remb.SetBitrateBps(kBitrateBps); + auto remb = std::make_unique(); + remb->SetSenderSsrc(kSenderSsrc); + remb->SetBitrateBps(kBitrateBps); rtcp::CompoundPacket compound; - compound.Append(&packet); - compound.Append(&remb); + compound.Append(std::move(packet)); + compound.Append(std::move(remb)); rtc::Buffer built_packet = compound.Build(); // Modify the TransportFeedback packet so that it is invalid. @@ -1440,13 +1620,18 @@ TEST_F(RtcpReceiverTest, HandlesInvalidTransportFeedback) { 42); // Stress no transport feedback is expected. - EXPECT_CALL(transport_feedback_observer_, OnTransportFeedback(_)).Times(0); + EXPECT_CALL(mocks.transport_feedback_observer, OnTransportFeedback).Times(0); // But remb should be processed and cause a callback - EXPECT_CALL(bandwidth_observer_, OnReceivedEstimatedBitrate(kBitrateBps)); - InjectRtcpPacket(built_packet); + EXPECT_CALL(mocks.bandwidth_observer, + OnReceivedEstimatedBitrate(kBitrateBps)); + receiver.IncomingPacket(built_packet); } -TEST_F(RtcpReceiverTest, Nack) { +TEST(RtcpReceiverTest, Nack) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint16_t kNackList1[] = {1, 2, 3, 5}; const uint16_t kNackList23[] = {5, 7, 30, 40, 41, 58, 59, 61, 63}; const size_t kNackListLength2 = 4; @@ -1455,47 +1640,53 @@ TEST_F(RtcpReceiverTest, Nack) { nack_set.insert(std::begin(kNackList1), std::end(kNackList1)); nack_set.insert(std::begin(kNackList23), std::end(kNackList23)); - rtcp::Nack nack1; - nack1.SetSenderSsrc(kSenderSsrc); - nack1.SetMediaSsrc(kReceiverMainSsrc); - nack1.SetPacketIds(kNackList1, arraysize(kNackList1)); + auto nack1 = std::make_unique(); + nack1->SetSenderSsrc(kSenderSsrc); + nack1->SetMediaSsrc(kReceiverMainSsrc); + nack1->SetPacketIds(kNackList1, arraysize(kNackList1)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedNack(ElementsAreArray(kNackList1))); - EXPECT_CALL(packet_type_counter_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, + OnReceivedNack(ElementsAreArray(kNackList1))); + EXPECT_CALL(mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated( kReceiverMainSsrc, AllOf(Field(&RtcpPacketTypeCounter::nack_requests, arraysize(kNackList1)), Field(&RtcpPacketTypeCounter::unique_nack_requests, arraysize(kNackList1))))); - InjectRtcpPacket(nack1); + receiver.IncomingPacket(nack1->Build()); - rtcp::Nack nack2; - nack2.SetSenderSsrc(kSenderSsrc); - nack2.SetMediaSsrc(kReceiverMainSsrc); - nack2.SetPacketIds(kNackList23, kNackListLength2); + auto nack2 = std::make_unique(); + nack2->SetSenderSsrc(kSenderSsrc); + nack2->SetMediaSsrc(kReceiverMainSsrc); + nack2->SetPacketIds(kNackList23, kNackListLength2); - rtcp::Nack nack3; - nack3.SetSenderSsrc(kSenderSsrc); - nack3.SetMediaSsrc(kReceiverMainSsrc); - nack3.SetPacketIds(kNackList23 + kNackListLength2, kNackListLength3); + auto nack3 = std::make_unique(); + nack3->SetSenderSsrc(kSenderSsrc); + nack3->SetMediaSsrc(kReceiverMainSsrc); + nack3->SetPacketIds(kNackList23 + kNackListLength2, kNackListLength3); rtcp::CompoundPacket two_nacks; - two_nacks.Append(&nack2); - two_nacks.Append(&nack3); + two_nacks.Append(std::move(nack2)); + two_nacks.Append(std::move(nack3)); - EXPECT_CALL(rtp_rtcp_impl_, OnReceivedNack(ElementsAreArray(kNackList23))); - EXPECT_CALL(packet_type_counter_observer_, + EXPECT_CALL(mocks.rtp_rtcp_impl, + OnReceivedNack(ElementsAreArray(kNackList23))); + EXPECT_CALL(mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated( kReceiverMainSsrc, AllOf(Field(&RtcpPacketTypeCounter::nack_requests, arraysize(kNackList1) + arraysize(kNackList23)), Field(&RtcpPacketTypeCounter::unique_nack_requests, nack_set.size())))); - InjectRtcpPacket(two_nacks); + receiver.IncomingPacket(two_nacks.Build()); } -TEST_F(RtcpReceiverTest, NackNotForUsIgnored) { +TEST(RtcpReceiverTest, NackNotForUsIgnored) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + const uint16_t kNackList1[] = {1, 2, 3, 5}; const size_t kNackListLength1 = std::end(kNackList1) - std::begin(kNackList1); @@ -1504,22 +1695,30 @@ TEST_F(RtcpReceiverTest, NackNotForUsIgnored) { nack.SetMediaSsrc(kNotToUsSsrc); nack.SetPacketIds(kNackList1, kNackListLength1); - EXPECT_CALL(packet_type_counter_observer_, + EXPECT_CALL(mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated( _, Field(&RtcpPacketTypeCounter::nack_requests, 0))); - InjectRtcpPacket(nack); + receiver.IncomingPacket(nack.Build()); } -TEST_F(RtcpReceiverTest, ForceSenderReport) { +TEST(RtcpReceiverTest, ForceSenderReport) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + rtcp::RapidResyncRequest rr; rr.SetSenderSsrc(kSenderSsrc); rr.SetMediaSsrc(kReceiverMainSsrc); - EXPECT_CALL(rtp_rtcp_impl_, OnRequestSendReport()); - InjectRtcpPacket(rr); + EXPECT_CALL(mocks.rtp_rtcp_impl, OnRequestSendReport()); + receiver.IncomingPacket(rr.Build()); } -TEST_F(RtcpReceiverTest, ReceivesTargetBitrate) { +TEST(RtcpReceiverTest, ReceivesTargetBitrate) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + VideoBitrateAllocation expected_allocation; expected_allocation.SetBitrate(0, 0, 10000); expected_allocation.SetBitrate(0, 1, 20000); @@ -1537,19 +1736,23 @@ TEST_F(RtcpReceiverTest, ReceivesTargetBitrate) { // Wrong sender ssrc, target bitrate should be discarded. xr.SetSenderSsrc(kSenderSsrc + 1); - EXPECT_CALL(bitrate_allocation_observer_, + EXPECT_CALL(mocks.bitrate_allocation_observer, OnBitrateAllocationUpdated(expected_allocation)) .Times(0); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); // Set correct ssrc, callback should be called once. xr.SetSenderSsrc(kSenderSsrc); - EXPECT_CALL(bitrate_allocation_observer_, + EXPECT_CALL(mocks.bitrate_allocation_observer, OnBitrateAllocationUpdated(expected_allocation)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); } -TEST_F(RtcpReceiverTest, HandlesIncorrectTargetBitrate) { +TEST(RtcpReceiverTest, HandlesIncorrectTargetBitrate) { + ReceiverMocks mocks; + RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + receiver.SetRemoteSSRC(kSenderSsrc); + VideoBitrateAllocation expected_allocation; expected_allocation.SetBitrate(0, 0, 10000); @@ -1562,9 +1765,9 @@ TEST_F(RtcpReceiverTest, HandlesIncorrectTargetBitrate) { xr.SetTargetBitrate(bitrate); xr.SetSenderSsrc(kSenderSsrc); - EXPECT_CALL(bitrate_allocation_observer_, + EXPECT_CALL(mocks.bitrate_allocation_observer, OnBitrateAllocationUpdated(expected_allocation)); - InjectRtcpPacket(xr); + receiver.IncomingPacket(xr.Build()); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc index f06d429fb9..61e6085bb1 100644 --- a/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/modules/rtp_rtcp/source/rtcp_sender.cc @@ -33,11 +33,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/time_util.h" #include "modules/rtp_rtcp/source/tmmbr_help.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/trace_event.h" @@ -55,10 +54,10 @@ class PacketContainer : public rtcp::CompoundPacket { public: PacketContainer(Transport* transport, RtcEventLog* event_log) : transport_(transport), event_log_(event_log) {} - ~PacketContainer() override { - for (RtcpPacket* packet : appended_packets_) - delete packet; - } + + PacketContainer() = delete; + PacketContainer(const PacketContainer&) = delete; + PacketContainer& operator=(const PacketContainer&) = delete; size_t SendPackets(size_t max_payload_length) { size_t bytes_sent = 0; @@ -76,8 +75,6 @@ class PacketContainer : public rtcp::CompoundPacket { private: Transport* transport_; RtcEventLog* const event_log_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(PacketContainer); }; // Helper to put several RTCP packets into lower layer datagram RTCP packet. @@ -123,7 +120,7 @@ RTCPSender::FeedbackState::FeedbackState() last_rr_ntp_secs(0), last_rr_ntp_frac(0), remote_sr(0), - module(nullptr) {} + receiver(nullptr) {} RTCPSender::FeedbackState::FeedbackState(const FeedbackState&) = default; @@ -148,7 +145,7 @@ class RTCPSender::RtcpContext { const int64_t now_us_; }; -RTCPSender::RTCPSender(const RtpRtcp::Configuration& config) +RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config) : audio_(config.audio), ssrc_(config.local_media_ssrc), clock_(config.clock), @@ -176,11 +173,6 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config) packet_oh_send_(0), max_packet_size_(IP_PACKET_SIZE - 28), // IPv4 + UDP by default. - app_sub_type_(0), - app_name_(0), - app_data_(nullptr), - app_length_(0), - xr_send_receiver_reference_time_enabled_(false), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), send_video_bitrate_allocation_(false), @@ -194,7 +186,6 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config) builders_[kRtcpFir] = &RTCPSender::BuildFIR; builders_[kRtcpRemb] = &RTCPSender::BuildREMB; builders_[kRtcpBye] = &RTCPSender::BuildBYE; - builders_[kRtcpApp] = &RTCPSender::BuildAPP; builders_[kRtcpLossNotification] = &RTCPSender::BuildLossNotification; builders_[kRtcpTmmbr] = &RTCPSender::BuildTMMBR; builders_[kRtcpTmmbn] = &RTCPSender::BuildTMMBN; @@ -205,12 +196,12 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config) RTCPSender::~RTCPSender() {} RtcpMode RTCPSender::Status() const { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); return method_; } void RTCPSender::SetRTCPStatus(RtcpMode new_method) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); if (method_ == RtcpMode::kOff && new_method != RtcpMode::kOff) { // When switching on, reschedule the next packet @@ -221,7 +212,7 @@ void RTCPSender::SetRTCPStatus(RtcpMode new_method) { } bool RTCPSender::Sending() const { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); return sending_; } @@ -229,7 +220,7 @@ int32_t RTCPSender::SetSendingStatus(const FeedbackState& feedback_state, bool sending) { bool sendRTCPBye = false; { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); if (method_ != RtcpMode::kOff) { if (sending == false && sending_ == true) { @@ -249,7 +240,7 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state, uint16_t last_received_seq_num, bool decodability_flag, bool buffering_allowed) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); loss_notification_state_.last_decoded_seq_num = last_decoded_seq_num; loss_notification_state_.last_received_seq_num = last_received_seq_num; @@ -262,13 +253,13 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state, return 0; } - return SendCompoundRTCP(feedback_state, - {RTCPPacketType::kRtcpLossNotification}); + return SendCompoundRTCPLocked( + feedback_state, {RTCPPacketType::kRtcpLossNotification}, 0, nullptr); } void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { RTC_CHECK_GE(bitrate_bps, 0); - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); remb_bitrate_ = bitrate_bps; remb_ssrcs_ = std::move(ssrcs); @@ -279,18 +270,18 @@ void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { } void RTCPSender::UnsetRemb() { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); // Stop sending REMB each report until it is reenabled and REMB data set. ConsumeFlag(kRtcpRemb, /*forced=*/true); } bool RTCPSender::TMMBR() const { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); return IsFlagPresent(RTCPPacketType::kRtcpTmmbr); } void RTCPSender::SetTMMBRStatus(bool enable) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); if (enable) { SetFlag(RTCPPacketType::kRtcpTmmbr, false); } else { @@ -299,19 +290,19 @@ void RTCPSender::SetTMMBRStatus(bool enable) { } void RTCPSender::SetMaxRtpPacketSize(size_t max_packet_size) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); max_packet_size_ = max_packet_size; } void RTCPSender::SetTimestampOffset(uint32_t timestamp_offset) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); timestamp_offset_ = timestamp_offset; } void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp, int64_t capture_time_ms, int8_t payload_type) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); // For compatibility with clients who don't set payload type correctly on all // calls. if (payload_type != -1) { @@ -327,12 +318,12 @@ void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp, } void RTCPSender::SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); rtp_clock_rates_khz_[payload_type] = rtp_clock_rate_hz / 1000; } void RTCPSender::SetRemoteSSRC(uint32_t ssrc) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); remote_ssrc_ = ssrc; } @@ -341,7 +332,7 @@ int32_t RTCPSender::SetCNAME(const char* c_name) { return -1; RTC_DCHECK_LT(strlen(c_name), RTCP_CNAME_SIZE); - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); cname_ = c_name; return 0; } @@ -349,7 +340,7 @@ int32_t RTCPSender::SetCNAME(const char* c_name) { int32_t RTCPSender::AddMixedCNAME(uint32_t SSRC, const char* c_name) { RTC_DCHECK(c_name); RTC_DCHECK_LT(strlen(c_name), RTCP_CNAME_SIZE); - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); // One spot is reserved for ssrc_/cname_. // TODO(danilchap): Add support for more than 30 contributes by sending // several sdes packets. @@ -361,7 +352,7 @@ int32_t RTCPSender::AddMixedCNAME(uint32_t SSRC, const char* c_name) { } int32_t RTCPSender::RemoveMixedCNAME(uint32_t SSRC) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); auto it = csrc_cnames_.find(SSRC); if (it == csrc_cnames_.end()) @@ -432,7 +423,7 @@ bool RTCPSender::TimeToSendRTCPReport(bool sendKeyframeBeforeRTP) const { int64_t now = clock_->TimeInMilliseconds(); - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); if (method_ == RtcpMode::kOff) return false; @@ -538,13 +529,13 @@ std::unique_ptr RTCPSender::BuildREMB( } void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); tmmbr_send_bps_ = target_bitrate; } std::unique_ptr RTCPSender::BuildTMMBR( const RtcpContext& ctx) { - if (ctx.feedback_state_.module == nullptr) + if (ctx.feedback_state_.receiver == nullptr) return nullptr; // Before sending the TMMBR check the received TMMBN, only an owner is // allowed to raise the bitrate: @@ -554,11 +545,11 @@ std::unique_ptr RTCPSender::BuildTMMBR( // get current bounding set from RTCP receiver bool tmmbr_owner = false; - // holding critical_section_rtcp_sender_ while calling RTCPreceiver which + // holding mutex_rtcp_sender_ while calling RTCPreceiver which // will accuire criticalSectionRTCPReceiver_ is a potental deadlock but // since RTCPreceiver is not doing the reverse we should be fine std::vector candidates = - ctx.feedback_state_.module->BoundingSet(&tmmbr_owner); + ctx.feedback_state_.receiver->BoundingSet(&tmmbr_owner); if (!candidates.empty()) { for (const auto& candidate : candidates) { @@ -614,9 +605,6 @@ std::unique_ptr RTCPSender::BuildTMMBN( std::unique_ptr RTCPSender::BuildAPP(const RtcpContext& ctx) { rtcp::App* app = new rtcp::App(); app->SetSenderSsrc(ssrc_); - app->SetSubType(app_sub_type_); - app->SetName(app_name_); - app->SetData(app_data_.get(), app_length_); return std::unique_ptr(app); } @@ -711,53 +699,87 @@ int32_t RTCPSender::SendCompoundRTCP( size_t max_packet_size; { - rtc::CritScope lock(&critical_section_rtcp_sender_); - if (method_ == RtcpMode::kOff) { - RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; - return -1; + MutexLock lock(&mutex_rtcp_sender_); + auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types, + nack_size, nack_list, &container); + if (result) { + return *result; } - // Add all flags as volatile. Non volatile entries will not be overwritten. - // All new volatile flags added will be consumed by the end of this call. - SetFlags(packet_types, true); - - // Prevent sending streams to send SR before any media has been sent. - const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0); - if (!can_calculate_rtp_timestamp) { - bool consumed_sr_flag = ConsumeFlag(kRtcpSr); - bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport); - bool sender_report = consumed_report_flag || consumed_sr_flag; - if (sender_report && AllVolatileFlagsConsumed()) { - // This call was for Sender Report and nothing else. - return 0; - } - if (sending_ && method_ == RtcpMode::kCompound) { - // Not allowed to send any RTCP packet without sender report. - return -1; - } + max_packet_size = max_packet_size_; + } + + size_t bytes_sent = container.SendPackets(max_packet_size); + return bytes_sent == 0 ? -1 : 0; +} + +int32_t RTCPSender::SendCompoundRTCPLocked( + const FeedbackState& feedback_state, + const std::set& packet_types, + int32_t nack_size, + const uint16_t* nack_list) { + PacketContainer container(transport_, event_log_); + auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types, + nack_size, nack_list, &container); + if (result) { + return *result; + } + size_t bytes_sent = container.SendPackets(max_packet_size_); + return bytes_sent == 0 ? -1 : 0; +} + +absl::optional RTCPSender::ComputeCompoundRTCPPacket( + const FeedbackState& feedback_state, + const std::set& packet_types, + int32_t nack_size, + const uint16_t* nack_list, + rtcp::CompoundPacket* out_packet) { + if (method_ == RtcpMode::kOff) { + RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; + return -1; + } + // Add all flags as volatile. Non volatile entries will not be overwritten. + // All new volatile flags added will be consumed by the end of this call. + SetFlags(packet_types, true); + + // Prevent sending streams to send SR before any media has been sent. + const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0); + if (!can_calculate_rtp_timestamp) { + bool consumed_sr_flag = ConsumeFlag(kRtcpSr); + bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport); + bool sender_report = consumed_report_flag || consumed_sr_flag; + if (sender_report && AllVolatileFlagsConsumed()) { + // This call was for Sender Report and nothing else. + return 0; } + if (sending_ && method_ == RtcpMode::kCompound) { + // Not allowed to send any RTCP packet without sender report. + return -1; + } + } - if (packet_type_counter_.first_packet_time_ms == -1) - packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds(); + if (packet_type_counter_.first_packet_time_ms == -1) + packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds(); - // We need to send our NTP even if we haven't received any reports. - RtcpContext context(feedback_state, nack_size, nack_list, - clock_->TimeInMicroseconds()); + // We need to send our NTP even if we haven't received any reports. + RtcpContext context(feedback_state, nack_size, nack_list, + clock_->TimeInMicroseconds()); - PrepareReport(feedback_state); + PrepareReport(feedback_state); - std::unique_ptr packet_bye; + std::unique_ptr packet_bye; - auto it = report_flags_.begin(); - while (it != report_flags_.end()) { - auto builder_it = builders_.find(it->type); - RTC_DCHECK(builder_it != builders_.end()) - << "Could not find builder for packet type " << it->type; - if (it->is_volatile) { - report_flags_.erase(it++); - } else { - ++it; - } + auto it = report_flags_.begin(); + while (it != report_flags_.end()) { + auto builder_it = builders_.find(it->type); + if (it->is_volatile) { + report_flags_.erase(it++); + } else { + ++it; + } + if (builder_it == builders_.end()) { + RTC_NOTREACHED() << "Could not find builder for packet type " << it->type; + } else { BuilderFunc func = builder_it->second; std::unique_ptr packet = (this->*func)(context); if (packet == nullptr) @@ -767,26 +789,23 @@ int32_t RTCPSender::SendCompoundRTCP( if (builder_it->first == kRtcpBye) { packet_bye = std::move(packet); } else { - container.Append(packet.release()); + out_packet->Append(std::move(packet)); } } + } - // Append the BYE now at the end - if (packet_bye) { - container.Append(packet_bye.release()); - } - - if (packet_type_counter_observer_ != nullptr) { - packet_type_counter_observer_->RtcpPacketTypesCounterUpdated( - remote_ssrc_, packet_type_counter_); - } + // Append the BYE now at the end + if (packet_bye) { + out_packet->Append(std::move(packet_bye)); + } - RTC_DCHECK(AllVolatileFlagsConsumed()); - max_packet_size = max_packet_size_; + if (packet_type_counter_observer_ != nullptr) { + packet_type_counter_observer_->RtcpPacketTypesCounterUpdated( + remote_ssrc_, packet_type_counter_); } - size_t bytes_sent = container.SendPackets(max_packet_size); - return bytes_sent == 0 ? -1 : 0; + RTC_DCHECK(AllVolatileFlagsConsumed()); + return absl::nullopt; } void RTCPSender::PrepareReport(const FeedbackState& feedback_state) { @@ -873,41 +892,22 @@ std::vector RTCPSender::CreateReportBlocks( void RTCPSender::SetCsrcs(const std::vector& csrcs) { RTC_DCHECK_LE(csrcs.size(), kRtpCsrcSize); - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); csrcs_ = csrcs; } -int32_t RTCPSender::SetApplicationSpecificData(uint8_t subType, - uint32_t name, - const uint8_t* data, - uint16_t length) { - if (length % 4 != 0) { - RTC_LOG(LS_ERROR) << "Failed to SetApplicationSpecificData."; - return -1; - } - rtc::CritScope lock(&critical_section_rtcp_sender_); - - SetFlag(kRtcpApp, true); - app_sub_type_ = subType; - app_name_ = name; - app_data_.reset(new uint8_t[length]); - app_length_ = length; - memcpy(app_data_.get(), data, length); - return 0; -} - void RTCPSender::SendRtcpXrReceiverReferenceTime(bool enable) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); xr_send_receiver_reference_time_enabled_ = enable; } bool RTCPSender::RtcpXrReceiverReferenceTime() const { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); return xr_send_receiver_reference_time_enabled_; } void RTCPSender::SetTmmbn(std::vector bounding_set) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); tmmbn_to_send_ = std::move(bounding_set); SetFlag(kRtcpTmmbn, true); } @@ -949,7 +949,7 @@ bool RTCPSender::AllVolatileFlagsConsumed() const { void RTCPSender::SetVideoBitrateAllocation( const VideoBitrateAllocation& bitrate) { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); // Check if this allocation is first ever, or has a different set of // spatial/temporal layers signaled and enabled, if so trigger an rtcp report // as soon as possible. @@ -997,7 +997,7 @@ void RTCPSender::SendCombinedRtcpPacket( size_t max_packet_size; uint32_t ssrc; { - rtc::CritScope lock(&critical_section_rtcp_sender_); + MutexLock lock(&mutex_rtcp_sender_); if (method_ == RtcpMode::kOff) { RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; return; diff --git a/modules/rtp_rtcp/source/rtcp_sender.h b/modules/rtp_rtcp/source/rtcp_sender.h index 32c1e1dbc1..22b2bb7b7c 100644 --- a/modules/rtp_rtcp/source/rtcp_sender.h +++ b/modules/rtp_rtcp/source/rtcp_sender.h @@ -23,24 +23,24 @@ #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_nack_stats.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/random.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { -class ModuleRtpRtcpImpl; +class RTCPReceiver; class RtcEventLog; -class RTCPSender { +class RTCPSender final { public: struct FeedbackState { FeedbackState(); @@ -60,162 +60,194 @@ class RTCPSender { std::vector last_xr_rtis; // Used when generating TMMBR. - ModuleRtpRtcpImpl* module; + RTCPReceiver* receiver; }; - explicit RTCPSender(const RtpRtcp::Configuration& config); + explicit RTCPSender(const RtpRtcpInterface::Configuration& config); + + RTCPSender() = delete; + RTCPSender(const RTCPSender&) = delete; + RTCPSender& operator=(const RTCPSender&) = delete; + virtual ~RTCPSender(); - RtcpMode Status() const; - void SetRTCPStatus(RtcpMode method); + RtcpMode Status() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); + void SetRTCPStatus(RtcpMode method) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - bool Sending() const; + bool Sending() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); int32_t SetSendingStatus(const FeedbackState& feedback_state, - bool enabled); // combine the functions + bool enabled) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); // combine the functions - int32_t SetNackStatus(bool enable); + int32_t SetNackStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetTimestampOffset(uint32_t timestamp_offset); + void SetTimestampOffset(uint32_t timestamp_offset) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); // TODO(bugs.webrtc.org/6458): Remove default parameter value when all the // depending projects are updated to correctly set payload type. void SetLastRtpTime(uint32_t rtp_timestamp, int64_t capture_time_ms, - int8_t payload_type = -1); + int8_t payload_type = -1) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz); + void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); uint32_t SSRC() const { return ssrc_; } - void SetRemoteSSRC(uint32_t ssrc); + void SetRemoteSSRC(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t SetCNAME(const char* cName); + int32_t SetCNAME(const char* cName) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name); + int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t RemoveMixedCNAME(uint32_t SSRC); + int32_t RemoveMixedCNAME(uint32_t SSRC) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const; + bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); int32_t SendRTCP(const FeedbackState& feedback_state, RTCPPacketType packetType, int32_t nackSize = 0, - const uint16_t* nackList = 0); + const uint16_t* nackList = 0) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); int32_t SendCompoundRTCP(const FeedbackState& feedback_state, const std::set& packetTypes, int32_t nackSize = 0, - const uint16_t* nackList = 0); + const uint16_t* nackList = nullptr) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); int32_t SendLossNotification(const FeedbackState& feedback_state, uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, bool decodability_flag, - bool buffering_allowed); + bool buffering_allowed) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetRemb(int64_t bitrate_bps, std::vector ssrcs); + void SetRemb(int64_t bitrate_bps, std::vector ssrcs) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void UnsetRemb(); + void UnsetRemb() RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - bool TMMBR() const; + bool TMMBR() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetTMMBRStatus(bool enable); + void SetTMMBRStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetMaxRtpPacketSize(size_t max_packet_size); + void SetMaxRtpPacketSize(size_t max_packet_size) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetTmmbn(std::vector bounding_set); + void SetTmmbn(std::vector bounding_set) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t SetApplicationSpecificData(uint8_t subType, - uint32_t name, - const uint8_t* data, - uint16_t length); + void SendRtcpXrReceiverReferenceTime(bool enable) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SendRtcpXrReceiverReferenceTime(bool enable); + bool RtcpXrReceiverReferenceTime() const + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - bool RtcpXrReceiverReferenceTime() const; + void SetCsrcs(const std::vector& csrcs) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetCsrcs(const std::vector& csrcs); - - void SetTargetBitrate(unsigned int target_bitrate); - void SetVideoBitrateAllocation(const VideoBitrateAllocation& bitrate); + void SetTargetBitrate(unsigned int target_bitrate) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); + void SetVideoBitrateAllocation(const VideoBitrateAllocation& bitrate) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); void SendCombinedRtcpPacket( - std::vector> rtcp_packets); + std::vector> rtcp_packets) + RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); private: class RtcpContext; + int32_t SendCompoundRTCPLocked(const FeedbackState& feedback_state, + const std::set& packet_types, + int32_t nack_size, + const uint16_t* nack_list) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); + + absl::optional ComputeCompoundRTCPPacket( + const FeedbackState& feedback_state, + const std::set& packet_types, + int32_t nack_size, + const uint16_t* nack_list, + rtcp::CompoundPacket* out_packet) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); + // Determine which RTCP messages should be sent and setup flags. void PrepareReport(const FeedbackState& feedback_state) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::vector CreateReportBlocks( const FeedbackState& feedback_state) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildSR(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildRR(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildSDES(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildPLI(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildREMB(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildTMMBR(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildTMMBN(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildAPP(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildLossNotification( const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildExtendedReports( const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildBYE(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildFIR(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); std::unique_ptr BuildNACK(const RtcpContext& context) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); private: const bool audio_; const uint32_t ssrc_; Clock* const clock_; - Random random_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - RtcpMode method_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + Random random_ RTC_GUARDED_BY(mutex_rtcp_sender_); + RtcpMode method_ RTC_GUARDED_BY(mutex_rtcp_sender_); RtcEventLog* const event_log_; Transport* const transport_; const int report_interval_ms_; - rtc::CriticalSection critical_section_rtcp_sender_; - bool sending_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + mutable Mutex mutex_rtcp_sender_; + bool sending_ RTC_GUARDED_BY(mutex_rtcp_sender_); - int64_t next_time_to_send_rtcp_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + int64_t next_time_to_send_rtcp_ RTC_GUARDED_BY(mutex_rtcp_sender_); - uint32_t timestamp_offset_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - int64_t last_frame_capture_time_ms_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + uint32_t timestamp_offset_ RTC_GUARDED_BY(mutex_rtcp_sender_); + uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_rtcp_sender_); + int64_t last_frame_capture_time_ms_ RTC_GUARDED_BY(mutex_rtcp_sender_); // SSRC that we receive on our RTP channel - uint32_t remote_ssrc_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - std::string cname_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + uint32_t remote_ssrc_ RTC_GUARDED_BY(mutex_rtcp_sender_); + std::string cname_ RTC_GUARDED_BY(mutex_rtcp_sender_); ReceiveStatisticsProvider* receive_statistics_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + RTC_GUARDED_BY(mutex_rtcp_sender_); std::map csrc_cnames_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + RTC_GUARDED_BY(mutex_rtcp_sender_); // send CSRCs - std::vector csrcs_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + std::vector csrcs_ RTC_GUARDED_BY(mutex_rtcp_sender_); // Full intra request - uint8_t sequence_number_fir_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + uint8_t sequence_number_fir_ RTC_GUARDED_BY(mutex_rtcp_sender_); // Loss Notification struct LossNotificationState { @@ -224,59 +256,47 @@ class RTCPSender { bool decodability_flag; }; LossNotificationState loss_notification_state_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + RTC_GUARDED_BY(mutex_rtcp_sender_); // REMB - int64_t remb_bitrate_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - std::vector remb_ssrcs_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); - - std::vector tmmbn_to_send_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); - uint32_t tmmbr_send_bps_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - uint32_t packet_oh_send_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - size_t max_packet_size_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - - // APP - uint8_t app_sub_type_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - uint32_t app_name_ RTC_GUARDED_BY(critical_section_rtcp_sender_); - std::unique_ptr app_data_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); - uint16_t app_length_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + int64_t remb_bitrate_ RTC_GUARDED_BY(mutex_rtcp_sender_); + std::vector remb_ssrcs_ RTC_GUARDED_BY(mutex_rtcp_sender_); + + std::vector tmmbn_to_send_ RTC_GUARDED_BY(mutex_rtcp_sender_); + uint32_t tmmbr_send_bps_ RTC_GUARDED_BY(mutex_rtcp_sender_); + uint32_t packet_oh_send_ RTC_GUARDED_BY(mutex_rtcp_sender_); + size_t max_packet_size_ RTC_GUARDED_BY(mutex_rtcp_sender_); // True if sending of XR Receiver reference time report is enabled. bool xr_send_receiver_reference_time_enabled_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + RTC_GUARDED_BY(mutex_rtcp_sender_); RtcpPacketTypeCounterObserver* const packet_type_counter_observer_; - RtcpPacketTypeCounter packet_type_counter_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + RtcpPacketTypeCounter packet_type_counter_ RTC_GUARDED_BY(mutex_rtcp_sender_); - RtcpNackStats nack_stats_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + RtcpNackStats nack_stats_ RTC_GUARDED_BY(mutex_rtcp_sender_); VideoBitrateAllocation video_bitrate_allocation_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); - bool send_video_bitrate_allocation_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + RTC_GUARDED_BY(mutex_rtcp_sender_); + bool send_video_bitrate_allocation_ RTC_GUARDED_BY(mutex_rtcp_sender_); - std::map rtp_clock_rates_khz_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); - int8_t last_payload_type_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + std::map rtp_clock_rates_khz_ RTC_GUARDED_BY(mutex_rtcp_sender_); + int8_t last_payload_type_ RTC_GUARDED_BY(mutex_rtcp_sender_); absl::optional CheckAndUpdateLayerStructure( const VideoBitrateAllocation& bitrate) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); void SetFlag(uint32_t type, bool is_volatile) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); void SetFlags(const std::set& types, bool is_volatile) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); bool IsFlagPresent(uint32_t type) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); bool ConsumeFlag(uint32_t type, bool forced = false) - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); bool AllVolatileFlagsConsumed() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); struct ReportFlag { ReportFlag(uint32_t type, bool is_volatile) : type(type), is_volatile(is_volatile) {} @@ -286,15 +306,12 @@ class RTCPSender { const bool is_volatile; }; - std::set report_flags_ - RTC_GUARDED_BY(critical_section_rtcp_sender_); + std::set report_flags_ RTC_GUARDED_BY(mutex_rtcp_sender_); typedef std::unique_ptr (RTCPSender::*BuilderFunc)( const RtcpContext&); // Map from RTCPPacketType to builder. std::map builders_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTCPSender); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc index c732a35bd0..4b6d4a3da9 100644 --- a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc @@ -18,7 +18,7 @@ #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/rate_limiter.h" #include "test/gmock.h" @@ -76,8 +76,8 @@ class RtcpSenderTest : public ::testing::Test { : clock_(1335900000), receive_statistics_(ReceiveStatistics::Create(&clock_)), retransmission_rate_limiter_(&clock_, 1000) { - RtpRtcp::Configuration configuration = GetDefaultConfig(); - rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl(configuration)); + RtpRtcpInterface::Configuration configuration = GetDefaultConfig(); + rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl2(configuration)); rtcp_sender_.reset(new RTCPSender(configuration)); rtcp_sender_->SetRemoteSSRC(kRemoteSsrc); rtcp_sender_->SetTimestampOffset(kStartRtpTimestamp); @@ -85,8 +85,8 @@ class RtcpSenderTest : public ::testing::Test { /*payload_type=*/0); } - RtpRtcp::Configuration GetDefaultConfig() { - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration GetDefaultConfig() { + RtpRtcpInterface::Configuration configuration; configuration.audio = false; configuration.clock = &clock_; configuration.outgoing_transport = &test_transport_; @@ -115,7 +115,7 @@ class RtcpSenderTest : public ::testing::Test { SimulatedClock clock_; TestTransport test_transport_; std::unique_ptr receive_statistics_; - std::unique_ptr rtp_rtcp_impl_; + std::unique_ptr rtp_rtcp_impl_; std::unique_ptr rtcp_sender_; RateLimiter retransmission_rate_limiter_; }; @@ -191,7 +191,7 @@ TEST_F(RtcpSenderTest, SendConsecutiveSrWithExactSlope) { } TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) { - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &test_transport_; @@ -213,7 +213,7 @@ TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) { } TEST_F(RtcpSenderTest, DoNotSendCompundBeforeRtp) { - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &test_transport_; @@ -315,47 +315,6 @@ TEST_F(RtcpSenderTest, StopSendingTriggersBye) { EXPECT_EQ(kSenderSsrc, parser()->bye()->sender_ssrc()); } -TEST_F(RtcpSenderTest, SendApp) { - const uint8_t kSubType = 30; - uint32_t name = 'n' << 24; - name += 'a' << 16; - name += 'm' << 8; - name += 'e'; - const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'}; - EXPECT_EQ(0, rtcp_sender_->SetApplicationSpecificData(kSubType, name, kData, - sizeof(kData))); - rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize); - EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpApp)); - EXPECT_EQ(1, parser()->app()->num_packets()); - EXPECT_EQ(kSubType, parser()->app()->sub_type()); - EXPECT_EQ(name, parser()->app()->name()); - EXPECT_EQ(sizeof(kData), parser()->app()->data_size()); - EXPECT_EQ(0, memcmp(kData, parser()->app()->data(), sizeof(kData))); -} - -TEST_F(RtcpSenderTest, SendEmptyApp) { - const uint8_t kSubType = 30; - const uint32_t kName = 0x6E616D65; - - EXPECT_EQ( - 0, rtcp_sender_->SetApplicationSpecificData(kSubType, kName, nullptr, 0)); - - rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize); - EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpApp)); - EXPECT_EQ(1, parser()->app()->num_packets()); - EXPECT_EQ(kSubType, parser()->app()->sub_type()); - EXPECT_EQ(kName, parser()->app()->name()); - EXPECT_EQ(0U, parser()->app()->data_size()); -} - -TEST_F(RtcpSenderTest, SetInvalidApplicationSpecificData) { - const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't'}; - const uint16_t kInvalidDataLength = sizeof(kData) / sizeof(kData[0]); - EXPECT_EQ(-1, - rtcp_sender_->SetApplicationSpecificData( - 0, 0, kData, kInvalidDataLength)); // Should by multiple of 4. -} - TEST_F(RtcpSenderTest, SendFir) { rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize); EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpFir)); @@ -445,7 +404,7 @@ TEST_F(RtcpSenderTest, RembNotIncludedBeforeSet) { } TEST_F(RtcpSenderTest, RembNotIncludedAfterUnset) { - const uint64_t kBitrate = 261011; + const int64_t kBitrate = 261011; const std::vector kSsrcs = {kRemoteSsrc, kRemoteSsrc + 1}; rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize); rtcp_sender_->SetRemb(kBitrate, kSsrcs); @@ -461,7 +420,7 @@ TEST_F(RtcpSenderTest, RembNotIncludedAfterUnset) { } TEST_F(RtcpSenderTest, SendRemb) { - const uint64_t kBitrate = 261011; + const int64_t kBitrate = 261011; const std::vector kSsrcs = {kRemoteSsrc, kRemoteSsrc + 1}; rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize); rtcp_sender_->SetRemb(kBitrate, kSsrcs); @@ -563,7 +522,7 @@ TEST_F(RtcpSenderTest, TestNoXrRrtrSentIfNotEnabled) { TEST_F(RtcpSenderTest, TestRegisterRtcpPacketTypeObserver) { RtcpPacketTypeCounterObserverImpl observer; - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &test_transport_; @@ -691,7 +650,7 @@ TEST_F(RtcpSenderTest, ByeMustBeLast) { })); // Re-configure rtcp_sender_ with mock_transport_ - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &mock_transport; diff --git a/modules/rtp_rtcp/source/rtcp_transceiver.cc b/modules/rtp_rtcp/source/rtcp_transceiver.cc index 2060b0b5e0..1de581849b 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver.cc @@ -32,9 +32,10 @@ RtcpTransceiver::~RtcpTransceiver() { if (!rtcp_transceiver_) return; auto rtcp_transceiver = std::move(rtcp_transceiver_); - task_queue_->PostTask([rtcp_transceiver = std::move(rtcp_transceiver)] { - rtcp_transceiver->StopPeriodicTask(); - }); + task_queue_->PostTask( + ToQueuedTask([rtcp_transceiver = std::move(rtcp_transceiver)] { + rtcp_transceiver->StopPeriodicTask(); + })); RTC_DCHECK(!rtcp_transceiver_); } @@ -54,9 +55,9 @@ void RtcpTransceiver::AddMediaReceiverRtcpObserver( MediaReceiverRtcpObserver* observer) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr, remote_ssrc, observer] { + task_queue_->PostTask(ToQueuedTask([ptr, remote_ssrc, observer] { ptr->AddMediaReceiverRtcpObserver(remote_ssrc, observer); - }); + })); } void RtcpTransceiver::RemoveMediaReceiverRtcpObserver( @@ -74,36 +75,38 @@ void RtcpTransceiver::RemoveMediaReceiverRtcpObserver( void RtcpTransceiver::SetReadyToSend(bool ready) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr, ready] { ptr->SetReadyToSend(ready); }); + task_queue_->PostTask( + ToQueuedTask([ptr, ready] { ptr->SetReadyToSend(ready); })); } void RtcpTransceiver::ReceivePacket(rtc::CopyOnWriteBuffer packet) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); int64_t now_us = rtc::TimeMicros(); - task_queue_->PostTask( - [ptr, packet, now_us] { ptr->ReceivePacket(packet, now_us); }); + task_queue_->PostTask(ToQueuedTask( + [ptr, packet, now_us] { ptr->ReceivePacket(packet, now_us); })); } void RtcpTransceiver::SendCompoundPacket() { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr] { ptr->SendCompoundPacket(); }); + task_queue_->PostTask(ToQueuedTask([ptr] { ptr->SendCompoundPacket(); })); } void RtcpTransceiver::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr, bitrate_bps, ssrcs = std::move(ssrcs)]() mutable { - ptr->SetRemb(bitrate_bps, std::move(ssrcs)); - }); + task_queue_->PostTask( + ToQueuedTask([ptr, bitrate_bps, ssrcs = std::move(ssrcs)]() mutable { + ptr->SetRemb(bitrate_bps, std::move(ssrcs)); + })); } void RtcpTransceiver::UnsetRemb() { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr] { ptr->UnsetRemb(); }); + task_queue_->PostTask(ToQueuedTask([ptr] { ptr->UnsetRemb(); })); } void RtcpTransceiver::SendCombinedRtcpPacket( @@ -111,25 +114,26 @@ void RtcpTransceiver::SendCombinedRtcpPacket( RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); task_queue_->PostTask( - [ptr, rtcp_packets = std::move(rtcp_packets)]() mutable { + ToQueuedTask([ptr, rtcp_packets = std::move(rtcp_packets)]() mutable { ptr->SendCombinedRtcpPacket(std::move(rtcp_packets)); - }); + })); } void RtcpTransceiver::SendNack(uint32_t ssrc, std::vector sequence_numbers) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask( + task_queue_->PostTask(ToQueuedTask( [ptr, ssrc, sequence_numbers = std::move(sequence_numbers)]() mutable { ptr->SendNack(ssrc, std::move(sequence_numbers)); - }); + })); } void RtcpTransceiver::SendPictureLossIndication(uint32_t ssrc) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr, ssrc] { ptr->SendPictureLossIndication(ssrc); }); + task_queue_->PostTask( + ToQueuedTask([ptr, ssrc] { ptr->SendPictureLossIndication(ssrc); })); } void RtcpTransceiver::SendFullIntraRequest(std::vector ssrcs) { @@ -140,9 +144,10 @@ void RtcpTransceiver::SendFullIntraRequest(std::vector ssrcs, bool new_request) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); - task_queue_->PostTask([ptr, ssrcs = std::move(ssrcs), new_request] { - ptr->SendFullIntraRequest(ssrcs, new_request); - }); + task_queue_->PostTask( + ToQueuedTask([ptr, ssrcs = std::move(ssrcs), new_request] { + ptr->SendFullIntraRequest(ssrcs, new_request); + })); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_transceiver.h b/modules/rtp_rtcp/source/rtcp_transceiver.h index 8bdb0bf913..2d1f37cd44 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver.h +++ b/modules/rtp_rtcp/source/rtcp_transceiver.h @@ -16,10 +16,10 @@ #include #include +#include "api/task_queue/task_queue_base.h" #include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/task_queue.h" namespace webrtc { // @@ -67,7 +67,8 @@ class RtcpTransceiver : public RtcpFeedbackSenderInterface { void SendCompoundPacket(); // (REMB) Receiver Estimated Max Bitrate. - // Includes REMB in following compound packets. + // Includes REMB in following compound packets and sends a REMB message + // immediately if 'RtcpTransceiverConfig::send_remb_on_change' is set. void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override; // Stops sending REMB in following compound packets. void UnsetRemb() override; @@ -92,7 +93,7 @@ class RtcpTransceiver : public RtcpFeedbackSenderInterface { void SendFullIntraRequest(std::vector ssrcs, bool new_request); private: - rtc::TaskQueue* const task_queue_; + TaskQueueBase* const task_queue_; std::unique_ptr rtcp_transceiver_; }; diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_config.cc b/modules/rtp_rtcp/source/rtcp_transceiver_config.cc index 64e034a2e5..214d8fd409 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_config.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_config.cc @@ -58,7 +58,7 @@ bool RtcpTransceiverConfig::Validate() const { << "ms between reports should be positive."; return false; } - if (schedule_periodic_compound_packets && !task_queue) { + if (schedule_periodic_compound_packets && task_queue == nullptr) { RTC_LOG(LS_ERROR) << debug_id << "missing task queue for periodic compound packets"; return false; diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/modules/rtp_rtcp/source/rtcp_transceiver_config.h index 01330d0bc7..8a8fd6aed8 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_config.h +++ b/modules/rtp_rtcp/source/rtcp_transceiver_config.h @@ -14,9 +14,9 @@ #include #include "api/rtp_headers.h" +#include "api/task_queue/task_queue_base.h" #include "api/video/video_bitrate_allocation.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/task_queue.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -28,8 +28,8 @@ class MediaReceiverRtcpObserver { public: virtual ~MediaReceiverRtcpObserver() = default; - // All message handlers have default empty implementation. This way user needs - // to implement only those she is interested in. + // All message handlers have default empty implementation. This way users only + // need to implement the ones they are interested in. virtual void OnSenderReport(uint32_t sender_ssrc, NtpTime ntp_time, uint32_t rtp_time) {} @@ -65,7 +65,7 @@ struct RtcpTransceiverConfig { Transport* outgoing_transport = nullptr; // Queue for scheduling delayed tasks, e.g. sending periodic compound packets. - rtc::TaskQueue* task_queue = nullptr; + TaskQueueBase* task_queue = nullptr; // Rtcp report block generator for outgoing receiver reports. ReceiveStatisticsProvider* receive_statistics = nullptr; @@ -97,6 +97,10 @@ struct RtcpTransceiverConfig { // Estimate RTT as non-sender as described in // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5 bool non_sender_rtt_measurement = false; + + // Allows a REMB message to be sent immediately when SetRemb is called without + // having to wait for the next compount message to be sent. + bool send_remb_on_change = false; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc index 6a73a476c2..0102616d59 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc @@ -32,8 +32,8 @@ #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -92,9 +92,9 @@ RtcpTransceiverImpl::RtcpTransceiverImpl(const RtcpTransceiverConfig& config) : config_(config), ready_to_send_(config.initial_ready_to_send) { RTC_CHECK(config_.Validate()); if (ready_to_send_ && config_.schedule_periodic_compound_packets) { - config_.task_queue->PostTask([this] { + config_.task_queue->PostTask(ToQueuedTask([this] { SchedulePeriodicCompoundPackets(config_.initial_report_delay_ms); - }); + })); } } @@ -156,12 +156,22 @@ void RtcpTransceiverImpl::SendCompoundPacket() { void RtcpTransceiverImpl::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { RTC_DCHECK_GE(bitrate_bps, 0); + + bool send_now = config_.send_remb_on_change && + (!remb_.has_value() || bitrate_bps != remb_->bitrate_bps()); remb_.emplace(); remb_->SetSsrcs(std::move(ssrcs)); remb_->SetBitrateBps(bitrate_bps); + remb_->SetSenderSsrc(config_.feedback_ssrc); // TODO(bugs.webrtc.org/8239): Move logic from PacketRouter for sending remb // immideately on large bitrate change when there is one RtcpTransceiver per // rtp transport. + if (send_now) { + absl::optional remb; + remb.swap(remb_); + SendImmediateFeedback(*remb); + remb.swap(remb_); + } } void RtcpTransceiverImpl::UnsetRemb() { @@ -332,11 +342,11 @@ void RtcpTransceiverImpl::ReschedulePeriodicCompoundPackets() { void RtcpTransceiverImpl::SchedulePeriodicCompoundPackets(int64_t delay_ms) { periodic_task_handle_ = RepeatingTaskHandle::DelayedStart( - config_.task_queue->Get(), TimeDelta::ms(delay_ms), [this] { + config_.task_queue, TimeDelta::Millis(delay_ms), [this] { RTC_DCHECK(config_.schedule_periodic_compound_packets); RTC_DCHECK(ready_to_send_); SendPeriodicCompoundPacket(); - return TimeDelta::ms(config_.report_period_ms); + return TimeDelta::Millis(config_.report_period_ms); }); } diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc index 7d3f092042..b7694df1e8 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc @@ -10,9 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" +#include +#include #include #include "absl/memory/memory.h" +#include "api/rtp_headers.h" #include "api/video/video_bitrate_allocation.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h" @@ -54,15 +57,17 @@ using ::webrtc::test::RtcpPacketParser; class MockReceiveStatisticsProvider : public webrtc::ReceiveStatisticsProvider { public: - MOCK_METHOD1(RtcpReportBlocks, std::vector(size_t)); + MOCK_METHOD(std::vector, RtcpReportBlocks, (size_t), (override)); }; class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver { public: - MOCK_METHOD3(OnSenderReport, void(uint32_t, NtpTime, uint32_t)); - MOCK_METHOD1(OnBye, void(uint32_t)); - MOCK_METHOD2(OnBitrateAllocation, - void(uint32_t, const VideoBitrateAllocation&)); + MOCK_METHOD(void, OnSenderReport, (uint32_t, NtpTime, uint32_t), (override)); + MOCK_METHOD(void, OnBye, (uint32_t), (override)); + MOCK_METHOD(void, + OnBitrateAllocation, + (uint32_t, const VideoBitrateAllocation&), + (override)); }; // Since some tests will need to wait for this period, make it small to avoid @@ -140,7 +145,7 @@ TEST(RtcpTransceiverImplTest, NeedToStopPeriodicTaskToDestroyOnTaskQueue) { FakeRtcpTransport transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config = DefaultTestConfig(); - config.task_queue = &queue; + config.task_queue = queue.Get(); config.schedule_periodic_compound_packets = true; config.outgoing_transport = &transport; auto* rtcp_transceiver = new RtcpTransceiverImpl(config); @@ -160,7 +165,7 @@ TEST(RtcpTransceiverImplTest, CanDestroyAfterTaskQueue) { FakeRtcpTransport transport; auto* queue = new TaskQueueForTest("rtcp"); RtcpTransceiverConfig config = DefaultTestConfig(); - config.task_queue = queue; + config.task_queue = queue->Get(); config.schedule_periodic_compound_packets = true; config.outgoing_transport = &transport; auto* rtcp_transceiver = new RtcpTransceiverImpl(config); @@ -177,7 +182,7 @@ TEST(RtcpTransceiverImplTest, DelaysSendingFirstCompondPacket) { RtcpTransceiverConfig config; config.outgoing_transport = &transport; config.initial_report_delay_ms = 10; - config.task_queue = &queue; + config.task_queue = queue.Get(); absl::optional rtcp_transceiver; int64_t started_ms = rtc::TimeMillis(); @@ -203,7 +208,7 @@ TEST(RtcpTransceiverImplTest, PeriodicallySendsPackets) { config.outgoing_transport = &transport; config.initial_report_delay_ms = 0; config.report_period_ms = kReportPeriodMs; - config.task_queue = &queue; + config.task_queue = queue.Get(); absl::optional rtcp_transceiver; int64_t time_just_before_1st_packet_ms = 0; queue.PostTask([&] { @@ -237,7 +242,7 @@ TEST(RtcpTransceiverImplTest, SendCompoundPacketDelaysPeriodicSendPackets) { config.outgoing_transport = &transport; config.initial_report_delay_ms = 0; config.report_period_ms = kReportPeriodMs; - config.task_queue = &queue; + config.task_queue = queue.Get(); absl::optional rtcp_transceiver; queue.PostTask([&] { rtcp_transceiver.emplace(config); }); @@ -323,7 +328,7 @@ TEST(RtcpTransceiverImplTest, SendsPeriodicRtcpWhenNetworkStateIsUp) { config.schedule_periodic_compound_packets = true; config.initial_ready_to_send = false; config.outgoing_transport = &transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); absl::optional rtcp_transceiver; rtcp_transceiver.emplace(config); @@ -395,7 +400,7 @@ TEST(RtcpTransceiverImplTest, SetRembIncludesRembInNextCompoundPacket) { EXPECT_EQ(rtcp_parser.remb()->num_packets(), 1); EXPECT_EQ(rtcp_parser.remb()->sender_ssrc(), kSenderSsrc); - EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 10000u); + EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 10000); EXPECT_THAT(rtcp_parser.remb()->ssrcs(), ElementsAre(54321, 64321)); } @@ -413,17 +418,61 @@ TEST(RtcpTransceiverImplTest, SetRembUpdatesValuesToSend) { rtcp_transceiver.SendCompoundPacket(); EXPECT_EQ(rtcp_parser.remb()->num_packets(), 1); - EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 10000u); + EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 10000); EXPECT_THAT(rtcp_parser.remb()->ssrcs(), ElementsAre(54321, 64321)); rtcp_transceiver.SetRemb(/*bitrate_bps=*/70000, /*ssrcs=*/{67321}); rtcp_transceiver.SendCompoundPacket(); EXPECT_EQ(rtcp_parser.remb()->num_packets(), 2); - EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 70000u); + EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 70000); EXPECT_THAT(rtcp_parser.remb()->ssrcs(), ElementsAre(67321)); } +TEST(RtcpTransceiverImplTest, SetRembSendsImmediatelyIfSendRembOnChange) { + const uint32_t kSenderSsrc = 12345; + RtcpTransceiverConfig config; + config.send_remb_on_change = true; + config.feedback_ssrc = kSenderSsrc; + RtcpPacketParser rtcp_parser; + RtcpParserTransport transport(&rtcp_parser); + config.outgoing_transport = &transport; + config.schedule_periodic_compound_packets = false; + RtcpTransceiverImpl rtcp_transceiver(config); + + rtcp_transceiver.SetRemb(/*bitrate_bps=*/10000, /*ssrcs=*/{}); + EXPECT_EQ(rtcp_parser.remb()->num_packets(), 1); + EXPECT_EQ(rtcp_parser.remb()->sender_ssrc(), kSenderSsrc); + EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 10000); + + // If there is no change, the packet is not sent immediately. + rtcp_transceiver.SetRemb(/*bitrate_bps=*/10000, /*ssrcs=*/{}); + EXPECT_EQ(rtcp_parser.remb()->num_packets(), 1); + + rtcp_transceiver.SetRemb(/*bitrate_bps=*/20000, /*ssrcs=*/{}); + EXPECT_EQ(rtcp_parser.remb()->num_packets(), 2); + EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 20000); +} + +TEST(RtcpTransceiverImplTest, + SetRembSendsImmediatelyIfSendRembOnChangeReducedSize) { + const uint32_t kSenderSsrc = 12345; + RtcpTransceiverConfig config; + config.send_remb_on_change = true; + config.rtcp_mode = webrtc::RtcpMode::kReducedSize; + config.feedback_ssrc = kSenderSsrc; + RtcpPacketParser rtcp_parser; + RtcpParserTransport transport(&rtcp_parser); + config.outgoing_transport = &transport; + config.schedule_periodic_compound_packets = false; + RtcpTransceiverImpl rtcp_transceiver(config); + + rtcp_transceiver.SetRemb(/*bitrate_bps=*/10000, /*ssrcs=*/{}); + EXPECT_EQ(rtcp_parser.remb()->num_packets(), 1); + EXPECT_EQ(rtcp_parser.remb()->sender_ssrc(), kSenderSsrc); + EXPECT_EQ(rtcp_parser.remb()->bitrate_bps(), 10000); +} + TEST(RtcpTransceiverImplTest, SetRembIncludesRembInAllCompoundPackets) { const uint32_t kSenderSsrc = 12345; RtcpTransceiverConfig config; @@ -630,12 +679,12 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnByeBehindSenderReport) { rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, &observer); CompoundPacket compound; - SenderReport sr; - sr.SetSenderSsrc(kRemoteSsrc); - compound.Append(&sr); - Bye bye; - bye.SetSenderSsrc(kRemoteSsrc); - compound.Append(&bye); + auto sr = std::make_unique(); + sr->SetSenderSsrc(kRemoteSsrc); + compound.Append(std::move(sr)); + auto bye = std::make_unique(); + bye->SetSenderSsrc(kRemoteSsrc); + compound.Append(std::move(bye)); auto raw_packet = compound.Build(); EXPECT_CALL(observer, OnBye(kRemoteSsrc)); @@ -651,11 +700,11 @@ TEST(RtcpTransceiverImplTest, CallsObserverOnByeBehindUnknownRtcpPacket) { CompoundPacket compound; // Use Application-Defined rtcp packet as unknown. - webrtc::rtcp::App app; - compound.Append(&app); - Bye bye; - bye.SetSenderSsrc(kRemoteSsrc); - compound.Append(&bye); + auto app = std::make_unique(); + compound.Append(std::move(app)); + auto bye = std::make_unique(); + bye->SetSenderSsrc(kRemoteSsrc); + compound.Append(std::move(bye)); auto raw_packet = compound.Build(); EXPECT_CALL(observer, OnBye(kRemoteSsrc)); @@ -734,10 +783,10 @@ TEST(RtcpTransceiverImplTest, }; receive_sender_report(kRemoteSsrc1); - clock.AdvanceTime(webrtc::TimeDelta::ms(100)); + clock.AdvanceTime(webrtc::TimeDelta::Millis(100)); receive_sender_report(kRemoteSsrc2); - clock.AdvanceTime(webrtc::TimeDelta::ms(100)); + clock.AdvanceTime(webrtc::TimeDelta::Millis(100)); // Trigger ReceiverReport back. rtcp_transceiver.SendCompoundPacket(); diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc index cd35cfb1da..9c181c6526 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc @@ -42,7 +42,10 @@ using ::webrtc::test::RtcpPacketParser; class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver { public: - MOCK_METHOD3(OnSenderReport, void(uint32_t, webrtc::NtpTime, uint32_t)); + MOCK_METHOD(void, + OnSenderReport, + (uint32_t, webrtc::NtpTime, uint32_t), + (override)); }; constexpr int kTimeoutMs = 1000; @@ -58,7 +61,7 @@ TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOffTaskQueue) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); EXPECT_CALL(outgoing_transport, SendRtcp(_, _)) .WillRepeatedly(InvokeWithoutArgs([&] { EXPECT_TRUE(queue.IsCurrent()); @@ -75,7 +78,7 @@ TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOnTaskQueue) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); EXPECT_CALL(outgoing_transport, SendRtcp(_, _)) .WillRepeatedly(InvokeWithoutArgs([&] { EXPECT_TRUE(queue.IsCurrent()); @@ -95,7 +98,7 @@ TEST(RtcpTransceiverTest, CanBeDestroyedOnTaskQueue) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); auto rtcp_transceiver = std::make_unique(config); queue.PostTask([&] { @@ -111,7 +114,7 @@ TEST(RtcpTransceiverTest, CanBeDestroyedWithoutBlocking) { NiceMock outgoing_transport; RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); auto* rtcp_transceiver = new RtcpTransceiver(config); rtcp_transceiver->SendCompoundPacket(); @@ -132,7 +135,7 @@ TEST(RtcpTransceiverTest, MaySendPacketsAfterDestructor) { // i.e. Be careful! TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); auto* rtcp_transceiver = new RtcpTransceiver(config); rtc::Event heavy_task; @@ -163,7 +166,7 @@ TEST(RtcpTransceiverTest, DoesntPostToRtcpObserverAfterCallToRemove) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &null_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); RtcpTransceiver rtcp_transceiver(config); rtc::Event observer_deleted; @@ -190,7 +193,7 @@ TEST(RtcpTransceiverTest, RemoveMediaReceiverRtcpObserverIsNonBlocking) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &null_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); RtcpTransceiver rtcp_transceiver(config); auto observer = std::make_unique(); rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, observer.get()); @@ -214,7 +217,7 @@ TEST(RtcpTransceiverTest, CanCallSendCompoundPacketFromAnyThread) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); EXPECT_CALL(outgoing_transport, SendRtcp(_, _)) // If test is slow, a periodic task may send an extra packet. @@ -243,7 +246,7 @@ TEST(RtcpTransceiverTest, DoesntSendPacketsAfterStopCallback) { TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); config.schedule_periodic_compound_packets = true; auto rtcp_transceiver = std::make_unique(config); @@ -265,7 +268,7 @@ TEST(RtcpTransceiverTest, SendsCombinedRtcpPacketOnTaskQueue) { RtcpTransceiverConfig config; config.feedback_ssrc = kSenderSsrc; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); config.schedule_periodic_compound_packets = false; RtcpTransceiver rtcp_transceiver(config); @@ -302,7 +305,7 @@ TEST(RtcpTransceiverTest, SendFrameIntraRequestDefaultsToNewRequest) { RtcpTransceiverConfig config; config.feedback_ssrc = kSenderSsrc; config.outgoing_transport = &outgoing_transport; - config.task_queue = &queue; + config.task_queue = queue.Get(); config.schedule_periodic_compound_packets = false; RtcpTransceiver rtcp_transceiver(config); diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc index 30dedb192f..3b09818576 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc @@ -10,6 +10,7 @@ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include #include #include "api/array_view.h" @@ -23,6 +24,7 @@ namespace webrtc { constexpr RTPExtensionType RtpDependencyDescriptorExtension::kId; constexpr char RtpDependencyDescriptorExtension::kUri[]; +constexpr std::bitset<32> RtpDependencyDescriptorExtension::kAllChainsAreActive; bool RtpDependencyDescriptorExtension::Parse( rtc::ArrayView data, @@ -34,16 +36,20 @@ bool RtpDependencyDescriptorExtension::Parse( size_t RtpDependencyDescriptorExtension::ValueSize( const FrameDependencyStructure& structure, + std::bitset<32> active_chains, const DependencyDescriptor& descriptor) { - RtpDependencyDescriptorWriter writer(/*data=*/{}, structure, descriptor); + RtpDependencyDescriptorWriter writer(/*data=*/{}, structure, active_chains, + descriptor); return DivideRoundUp(writer.ValueSizeBits(), 8); } bool RtpDependencyDescriptorExtension::Write( rtc::ArrayView data, const FrameDependencyStructure& structure, + std::bitset<32> active_chains, const DependencyDescriptor& descriptor) { - RtpDependencyDescriptorWriter writer(data, structure, descriptor); + RtpDependencyDescriptorWriter writer(data, structure, active_chains, + descriptor); return writer.Write(); } diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h index d6e080402d..de16eeab2a 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h @@ -10,6 +10,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_ +#include #include #include "api/array_view.h" @@ -26,18 +27,32 @@ class RtpDependencyDescriptorExtension { static constexpr RTPExtensionType kId = kRtpExtensionGenericFrameDescriptor02; // TODO(bugs.webrtc.org/10342): Use uri from the spec when there is one. static constexpr char kUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/" - "generic-frame-descriptor-02"; + "https://aomediacodec.github.io/av1-rtp-spec/" + "#dependency-descriptor-rtp-header-extension"; static bool Parse(rtc::ArrayView data, const FrameDependencyStructure* structure, DependencyDescriptor* descriptor); static size_t ValueSize(const FrameDependencyStructure& structure, + const DependencyDescriptor& descriptor) { + return ValueSize(structure, kAllChainsAreActive, descriptor); + } + static size_t ValueSize(const FrameDependencyStructure& structure, + std::bitset<32> active_chains, const DependencyDescriptor& descriptor); static bool Write(rtc::ArrayView data, const FrameDependencyStructure& structure, + const DependencyDescriptor& descriptor) { + return Write(data, structure, kAllChainsAreActive, descriptor); + } + static bool Write(rtc::ArrayView data, + const FrameDependencyStructure& structure, + std::bitset<32> active_chains, const DependencyDescriptor& descriptor); + + private: + static constexpr std::bitset<32> kAllChainsAreActive = ~uint32_t{0}; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc new file mode 100644 index 0000000000..11d809693c --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" + +#include "test/gmock.h" + +namespace webrtc { +namespace { + +using ::testing::Each; + +TEST(RtpDependencyDescriptorExtensionTest, Writer3BytesForPerfectTemplate) { + uint8_t buffer[3]; + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.templates = { + FrameDependencyTemplate().Dtis("SR").FrameDiffs({1}).ChainDiffs({2, 2})}; + DependencyDescriptor descriptor; + descriptor.frame_dependencies = structure.templates[0]; + + EXPECT_EQ(RtpDependencyDescriptorExtension::ValueSize(structure, descriptor), + 3u); + EXPECT_TRUE( + RtpDependencyDescriptorExtension::Write(buffer, structure, descriptor)); +} + +TEST(RtpDependencyDescriptorExtensionTest, WriteZeroInUnusedBits) { + uint8_t buffer[32]; + std::memset(buffer, 0xff, sizeof(buffer)); + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.templates = { + FrameDependencyTemplate().Dtis("SR").FrameDiffs({1}).ChainDiffs({1, 1})}; + DependencyDescriptor descriptor; + descriptor.frame_dependencies = structure.templates[0]; + descriptor.frame_dependencies.frame_diffs = {2}; + + // To test unused bytes are zeroed, need a buffer large enough. + size_t value_size = + RtpDependencyDescriptorExtension::ValueSize(structure, descriptor); + ASSERT_LT(value_size, sizeof(buffer)); + + ASSERT_TRUE( + RtpDependencyDescriptorExtension::Write(buffer, structure, descriptor)); + + const uint8_t* unused_bytes = buffer + value_size; + size_t num_unused_bytes = buffer + sizeof(buffer) - unused_bytes; + // Check remaining bytes are zeroed. + EXPECT_THAT(rtc::MakeArrayView(unused_bytes, num_unused_bytes), Each(0)); +} + +// In practice chain diff for inactive chain will grow uboundly because no +// frames are produced for it, that shouldn't block writing the extension. +TEST(RtpDependencyDescriptorExtensionTest, + TemplateMatchingSkipsInactiveChains) { + uint8_t buffer[3]; + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.templates = { + FrameDependencyTemplate().Dtis("SR").ChainDiffs({2, 2})}; + DependencyDescriptor descriptor; + descriptor.frame_dependencies = structure.templates[0]; + + // Set only 1st chain as active. + std::bitset<32> active_chains = 0b01; + descriptor.frame_dependencies.chain_diffs[1] = 1000; + + // Expect perfect template match since the only difference is for an inactive + // chain. Pefect template match consumes 3 bytes. + EXPECT_EQ(RtpDependencyDescriptorExtension::ValueSize( + structure, active_chains, descriptor), + 3u); + EXPECT_TRUE(RtpDependencyDescriptorExtension::Write( + buffer, structure, active_chains, descriptor)); +} + +TEST(RtpDependencyDescriptorExtensionTest, + AcceptsInvalidChainDiffForInactiveChainWhenChainsAreCustom) { + uint8_t buffer[256]; + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.templates = { + FrameDependencyTemplate().Dtis("SR").ChainDiffs({2, 2})}; + DependencyDescriptor descriptor; + descriptor.frame_dependencies = structure.templates[0]; + + // Set only 1st chain as active. + std::bitset<32> active_chains = 0b01; + // Set chain_diff different to the template to make it custom. + descriptor.frame_dependencies.chain_diffs[0] = 1; + // Set chain diff for inactive chain beyound limit of 255 max chain diff. + descriptor.frame_dependencies.chain_diffs[1] = 1000; + + // Because chains are custom, should use more than base 3 bytes. + EXPECT_GT(RtpDependencyDescriptorExtension::ValueSize( + structure, active_chains, descriptor), + 3u); + EXPECT_TRUE(RtpDependencyDescriptorExtension::Write( + buffer, structure, active_chains, descriptor)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc index 07b6a3b3c3..cba594dc6f 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc @@ -18,13 +18,6 @@ #include "rtc_base/checks.h" namespace webrtc { -namespace { - -constexpr int kMaxTemporalId = 7; -constexpr int kMaxSpatialId = 3; -constexpr int kMaxTemplates = 64; - -} // namespace RtpDependencyDescriptorReader::RtpDependencyDescriptorReader( rtc::ArrayView raw_data, @@ -95,7 +88,7 @@ void RtpDependencyDescriptorReader::ReadTemplateLayers() { int spatial_id = 0; NextLayerIdc next_layer_idc; do { - if (templates.size() == kMaxTemplates) { + if (templates.size() == DependencyDescriptor::kMaxTemplates) { parsing_failed_ = true; break; } @@ -107,14 +100,14 @@ void RtpDependencyDescriptorReader::ReadTemplateLayers() { next_layer_idc = static_cast(ReadBits(2)); if (next_layer_idc == kNextTemporalLayer) { temporal_id++; - if (temporal_id > kMaxTemporalId) { + if (temporal_id >= DependencyDescriptor::kMaxTemporalIds) { parsing_failed_ = true; break; } } else if (next_layer_idc == kNextSpatialLayer) { temporal_id = 0; spatial_id++; - if (spatial_id > kMaxSpatialId) { + if (spatial_id >= DependencyDescriptor::kMaxSpatialIds) { parsing_failed_ = true; break; } @@ -153,7 +146,7 @@ void RtpDependencyDescriptorReader::ReadTemplateChains() { if (structure->num_chains == 0) return; for (int i = 0; i < structure->num_decode_targets; ++i) { - uint32_t protected_by_chain = ReadNonSymmetric(structure->num_chains + 1); + uint32_t protected_by_chain = ReadNonSymmetric(structure->num_chains); structure->decode_target_protected_by_chain.push_back(protected_by_chain); } for (FrameDependencyTemplate& frame_template : structure->templates) { @@ -198,9 +191,10 @@ void RtpDependencyDescriptorReader::ReadExtendedFields() { } void RtpDependencyDescriptorReader::ReadFrameDependencyDefinition() { - size_t template_index = (frame_dependency_template_id_ + kMaxTemplates - - structure_->structure_id) % - kMaxTemplates; + size_t template_index = + (frame_dependency_template_id_ + DependencyDescriptor::kMaxTemplates - + structure_->structure_id) % + DependencyDescriptor::kMaxTemplates; if (template_index >= structure_->templates.size()) { parsing_failed_ = true; diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc index 9e1a425666..25d221253b 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc @@ -9,6 +9,7 @@ */ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h" +#include #include #include #include @@ -23,8 +24,6 @@ namespace webrtc { namespace { -constexpr int kMaxTemplates = 64; - enum class NextLayerIdc : uint64_t { kSameLayer = 0, kNextTemporal = 1, @@ -35,12 +34,8 @@ enum class NextLayerIdc : uint64_t { NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous, const FrameDependencyTemplate& next) { - // TODO(danilchap): Move these constants to header shared between reader and - // writer. - static constexpr int kMaxSpatialId = 3; - static constexpr int kMaxTemporalId = 7; - RTC_DCHECK_LE(next.spatial_id, kMaxSpatialId); - RTC_DCHECK_LE(next.temporal_id, kMaxTemporalId); + RTC_DCHECK_LT(next.spatial_id, DependencyDescriptor::kMaxSpatialIds); + RTC_DCHECK_LT(next.temporal_id, DependencyDescriptor::kMaxTemporalIds); if (next.spatial_id == previous.spatial_id && next.temporal_id == previous.temporal_id) { @@ -61,9 +56,11 @@ NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous, RtpDependencyDescriptorWriter::RtpDependencyDescriptorWriter( rtc::ArrayView data, const FrameDependencyStructure& structure, + std::bitset<32> active_chains, const DependencyDescriptor& descriptor) : descriptor_(descriptor), structure_(structure), + active_chains_(active_chains), bit_writer_(data.data(), data.size()) { FindBestTemplate(); } @@ -74,6 +71,14 @@ bool RtpDependencyDescriptorWriter::Write() { WriteExtendedFields(); WriteFrameDependencyDefinition(); } + size_t remaining_bits = bit_writer_.RemainingBitCount(); + // Zero remaining memory to avoid leaving it uninitialized. + if (remaining_bits % 64 != 0) { + WriteBits(/*val=*/0, remaining_bits % 64); + } + for (size_t i = 0; i < remaining_bits / 64; ++i) { + WriteBits(/*val=*/0, 64); + } return !build_failed_; } @@ -106,8 +111,8 @@ int RtpDependencyDescriptorWriter::StructureSizeBits() const { structure_.num_chains, structure_.num_decode_targets + 1); if (structure_.num_chains > 0) { for (int protected_by : structure_.decode_target_protected_by_chain) { - bits += rtc::BitBufferWriter::SizeNonSymmetricBits( - protected_by, structure_.num_chains + 1); + bits += rtc::BitBufferWriter::SizeNonSymmetricBits(protected_by, + structure_.num_chains); } bits += 4 * structure_.templates.size() * structure_.num_chains; } @@ -126,8 +131,14 @@ RtpDependencyDescriptorWriter::CalculateMatch( result.need_custom_dtis = descriptor_.frame_dependencies.decode_target_indications != frame_template->decode_target_indications; - result.need_custom_chains = - descriptor_.frame_dependencies.chain_diffs != frame_template->chain_diffs; + result.need_custom_chains = false; + for (int i = 0; i < structure_.num_chains; ++i) { + if (active_chains_[i] && descriptor_.frame_dependencies.chain_diffs[i] != + frame_template->chain_diffs[i]) { + result.need_custom_chains = true; + break; + } + } result.extra_size_bits = 0; if (result.need_custom_fdiffs) { @@ -193,7 +204,7 @@ bool RtpDependencyDescriptorWriter::HasExtendedFields() const { uint64_t RtpDependencyDescriptorWriter::TemplateId() const { return (best_template_.template_position - structure_.templates.begin() + structure_.structure_id) % - kMaxTemplates; + DependencyDescriptor::kMaxTemplates; } void RtpDependencyDescriptorWriter::WriteBits(uint64_t val, size_t bit_count) { @@ -209,9 +220,10 @@ void RtpDependencyDescriptorWriter::WriteNonSymmetric(uint32_t value, void RtpDependencyDescriptorWriter::WriteTemplateDependencyStructure() { RTC_DCHECK_GE(structure_.structure_id, 0); - RTC_DCHECK_LT(structure_.structure_id, kMaxTemplates); + RTC_DCHECK_LT(structure_.structure_id, DependencyDescriptor::kMaxTemplates); RTC_DCHECK_GT(structure_.num_decode_targets, 0); - RTC_DCHECK_LE(structure_.num_decode_targets, 1 << 5); + RTC_DCHECK_LE(structure_.num_decode_targets, + DependencyDescriptor::kMaxDecodeTargets); WriteBits(structure_.structure_id, 6); WriteBits(structure_.num_decode_targets - 1, 5); @@ -228,7 +240,7 @@ void RtpDependencyDescriptorWriter::WriteTemplateDependencyStructure() { void RtpDependencyDescriptorWriter::WriteTemplateLayers() { const auto& templates = structure_.templates; RTC_DCHECK(!templates.empty()); - RTC_DCHECK_LE(templates.size(), kMaxTemplates); + RTC_DCHECK_LE(templates.size(), DependencyDescriptor::kMaxTemplates); RTC_DCHECK_EQ(templates[0].spatial_id, 0); RTC_DCHECK_EQ(templates[0].temporal_id, 0); @@ -276,8 +288,8 @@ void RtpDependencyDescriptorWriter::WriteTemplateChains() { structure_.num_decode_targets); for (int protected_by : structure_.decode_target_protected_by_chain) { RTC_DCHECK_GE(protected_by, 0); - RTC_DCHECK_LE(protected_by, structure_.num_chains); - WriteNonSymmetric(protected_by, structure_.num_chains + 1); + RTC_DCHECK_LT(protected_by, structure_.num_chains); + WriteNonSymmetric(protected_by, structure_.num_chains); } for (const auto& frame_template : structure_.templates) { RTC_DCHECK_EQ(frame_template.chain_diffs.size(), structure_.num_chains); @@ -363,7 +375,9 @@ void RtpDependencyDescriptorWriter::WriteFrameFdiffs() { void RtpDependencyDescriptorWriter::WriteFrameChains() { RTC_DCHECK_EQ(descriptor_.frame_dependencies.chain_diffs.size(), structure_.num_chains); - for (int chain_diff : descriptor_.frame_dependencies.chain_diffs) { + for (int i = 0; i < structure_.num_chains; ++i) { + int chain_diff = + active_chains_[i] ? descriptor_.frame_dependencies.chain_diffs[i] : 0; RTC_DCHECK_GE(chain_diff, 0); RTC_DCHECK_LT(chain_diff, 1 << 8); WriteBits(chain_diff, 8); diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h index 5a823b6e86..99fefecea6 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h @@ -10,6 +10,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_WRITER_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_WRITER_H_ +#include #include #include #include @@ -25,6 +26,7 @@ class RtpDependencyDescriptorWriter { // |descriptor| matches the |structure|. RtpDependencyDescriptorWriter(rtc::ArrayView data, const FrameDependencyStructure& structure, + std::bitset<32> active_chains, const DependencyDescriptor& descriptor); // Serializes DependencyDescriptor rtp header extension. @@ -77,6 +79,7 @@ class RtpDependencyDescriptorWriter { bool build_failed_ = false; const DependencyDescriptor& descriptor_; const FrameDependencyStructure& structure_; + std::bitset<32> active_chains_; rtc::BitBufferWriter bit_writer_; TemplateMatch best_template_; }; diff --git a/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc b/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc new file mode 100644 index 0000000000..f4525f0db1 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" + +#include +#include + +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +namespace webrtc { + +std::vector RtpDescriptorAuthentication( + const RTPVideoHeader& rtp_video_header) { + if (!rtp_video_header.generic) { + return {}; + } + const RTPVideoHeader::GenericDescriptorInfo& descriptor = + *rtp_video_header.generic; + // Default way of creating additional data for an encrypted frame. + if (descriptor.spatial_index < 0 || descriptor.temporal_index < 0 || + descriptor.spatial_index >= + RtpGenericFrameDescriptor::kMaxSpatialLayers || + descriptor.temporal_index >= + RtpGenericFrameDescriptor::kMaxTemporalLayers || + descriptor.dependencies.size() > + RtpGenericFrameDescriptor::kMaxNumFrameDependencies) { + return {}; + } + RtpGenericFrameDescriptor frame_descriptor; + frame_descriptor.SetFirstPacketInSubFrame(true); + frame_descriptor.SetLastPacketInSubFrame(false); + frame_descriptor.SetTemporalLayer(descriptor.temporal_index); + frame_descriptor.SetSpatialLayersBitmask(1 << descriptor.spatial_index); + frame_descriptor.SetFrameId(descriptor.frame_id & 0xFFFF); + for (int64_t dependency : descriptor.dependencies) { + frame_descriptor.AddFrameDependencyDiff(descriptor.frame_id - dependency); + } + if (descriptor.dependencies.empty()) { + frame_descriptor.SetResolution(rtp_video_header.width, + rtp_video_header.height); + } + std::vector result( + RtpGenericFrameDescriptorExtension00::ValueSize(frame_descriptor)); + RtpGenericFrameDescriptorExtension00::Write(result, frame_descriptor); + return result; +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_descriptor_authentication.h b/modules/rtp_rtcp/source/rtp_descriptor_authentication.h new file mode 100644 index 0000000000..1791abecd8 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_descriptor_authentication.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_DESCRIPTOR_AUTHENTICATION_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_DESCRIPTOR_AUTHENTICATION_H_ + +#include +#include + +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +namespace webrtc { + +// Converts frame dependencies into array of bytes for authentication. +std::vector RtpDescriptorAuthentication( + const RTPVideoHeader& rtp_video_header); + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_RTP_DESCRIPTOR_AUTHENTICATION_H_ diff --git a/modules/rtp_rtcp/source/rtp_format.cc b/modules/rtp_rtcp/source/rtp_format.cc index 28f63f1109..c7a35ee5ab 100644 --- a/modules/rtp_rtcp/source/rtp_format.cc +++ b/modules/rtp_rtcp/source/rtp_format.cc @@ -14,11 +14,17 @@ #include "absl/types/variant.h" #include "modules/rtp_rtcp/source/rtp_format_h264.h" +#ifndef DISABLE_H265 +#include "modules/rtp_rtcp/source/rtp_format_h265.h" +#endif #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" #include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/rtp_rtcp/source/rtp_format_vp9.h" #include "modules/rtp_rtcp/source/rtp_packetizer_av1.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#ifndef DISABLE_H265 +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#endif #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/checks.h" @@ -30,8 +36,7 @@ std::unique_ptr RtpPacketizer::Create( rtc::ArrayView payload, PayloadSizeLimits limits, // Codec-specific details. - const RTPVideoHeader& rtp_video_header, - const RTPFragmentationHeader* fragmentation) { + const RTPVideoHeader& rtp_video_header) { if (!type) { // Use raw packetizer. return std::make_unique(payload, limits); @@ -39,12 +44,19 @@ std::unique_ptr RtpPacketizer::Create( switch (*type) { case kVideoCodecH264: { - RTC_CHECK(fragmentation); const auto& h264 = absl::get(rtp_video_header.video_type_header); - return std::make_unique( - payload, limits, h264.packetization_mode, *fragmentation); + return std::make_unique(payload, limits, + h264.packetization_mode); } +#ifndef DISABLE_H265 + case kVideoCodecH265: { + const auto& h265 = + absl::get(rtp_video_header.video_type_header); + return absl::make_unique( + payload, limits, h265.packetization_mode); + } +#endif case kVideoCodecVP8: { const auto& vp8 = absl::get(rtp_video_header.video_type_header); diff --git a/modules/rtp_rtcp/source/rtp_format.h b/modules/rtp_rtcp/source/rtp_format.h index dca8285b62..b593f29b1d 100644 --- a/modules/rtp_rtcp/source/rtp_format.h +++ b/modules/rtp_rtcp/source/rtp_format.h @@ -18,7 +18,6 @@ #include "absl/types/optional.h" #include "api/array_view.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" namespace webrtc { @@ -41,8 +40,7 @@ class RtpPacketizer { rtc::ArrayView payload, PayloadSizeLimits limits, // Codec-specific details. - const RTPVideoHeader& rtp_video_header, - const RTPFragmentationHeader* fragmentation); + const RTPVideoHeader& rtp_video_header); virtual ~RtpPacketizer() = default; diff --git a/modules/rtp_rtcp/source/rtp_format_h264.cc b/modules/rtp_rtcp/source/rtp_format_h264.cc index 6f19e38629..6c3966cb93 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264.cc +++ b/modules/rtp_rtcp/source/rtp_format_h264.cc @@ -25,7 +25,6 @@ #include "common_video/h264/pps_parser.h" #include "common_video/h264/sps_parser.h" #include "common_video/h264/sps_vui_rewriter.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" @@ -46,19 +45,18 @@ enum FuDefs : uint8_t { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 }; } // namespace -RtpPacketizerH264::RtpPacketizerH264( - rtc::ArrayView payload, - PayloadSizeLimits limits, - H264PacketizationMode packetization_mode, - const RTPFragmentationHeader& fragmentation) +RtpPacketizerH264::RtpPacketizerH264(rtc::ArrayView payload, + PayloadSizeLimits limits, + H264PacketizationMode packetization_mode) : limits_(limits), num_packets_left_(0) { // Guard against uninitialized memory in packetization_mode. RTC_CHECK(packetization_mode == H264PacketizationMode::NonInterleaved || packetization_mode == H264PacketizationMode::SingleNalUnit); - for (size_t i = 0; i < fragmentation.fragmentationVectorSize; ++i) { + for (const auto& nalu : + H264::FindNaluIndices(payload.data(), payload.size())) { input_fragments_.push_back( - payload.subview(fragmentation.Offset(i), fragmentation.Length(i))); + payload.subview(nalu.payload_start_offset, nalu.payload_size)); } if (!GeneratePackets(packetization_mode)) { diff --git a/modules/rtp_rtcp/source/rtp_format_h264.h b/modules/rtp_rtcp/source/rtp_format_h264.h index 4661dc2163..7c10dd5754 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264.h +++ b/modules/rtp_rtcp/source/rtp_format_h264.h @@ -19,7 +19,6 @@ #include #include "api/array_view.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" @@ -34,8 +33,7 @@ class RtpPacketizerH264 : public RtpPacketizer { // The payload_data must be exactly one encoded H264 frame. RtpPacketizerH264(rtc::ArrayView payload, PayloadSizeLimits limits, - H264PacketizationMode packetization_mode, - const RTPFragmentationHeader& fragmentation); + H264PacketizationMode packetization_mode); ~RtpPacketizerH264() override; diff --git a/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc b/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc index bf9771ab9f..9f660b7a74 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc @@ -13,9 +13,9 @@ #include #include +#include "absl/algorithm/container.h" #include "api/array_view.h" #include "common_video/h264/h264_common.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" @@ -56,45 +56,61 @@ enum NalDefs { kFBit = 0x80, kNriMask = 0x60, kTypeMask = 0x1F }; // Bit masks for FU (A and B) headers. enum FuDefs { kSBit = 0x80, kEBit = 0x40, kRBit = 0x20 }; -RTPFragmentationHeader CreateFragmentation(rtc::ArrayView sizes) { - RTPFragmentationHeader fragmentation; - fragmentation.VerifyAndAllocateFragmentationHeader(sizes.size()); - size_t offset = 0; - for (size_t i = 0; i < sizes.size(); ++i) { - fragmentation.fragmentationOffset[i] = offset; - fragmentation.fragmentationLength[i] = sizes[i]; - offset += sizes[i]; +// Creates Buffer that looks like nal unit of given size. +rtc::Buffer GenerateNalUnit(size_t size) { + RTC_CHECK_GT(size, 0); + rtc::Buffer buffer(size); + // Set some valid header. + buffer[0] = kSlice; + for (size_t i = 1; i < size; ++i) { + buffer[i] = static_cast(i); } - return fragmentation; -} - -// Create fragmentation with single fragment of same size as |frame| -RTPFragmentationHeader NoFragmentation(rtc::ArrayView frame) { - size_t frame_size[] = {frame.size()}; - return CreateFragmentation(frame_size); + // Last byte shouldn't be 0, or it may be counted as part of next 4-byte start + // sequence. + buffer[size - 1] |= 0x10; + return buffer; } -// Create frame of given size. -rtc::Buffer CreateFrame(size_t frame_size) { - rtc::Buffer frame(frame_size); - // Set some valid header. - frame[0] = 0x01; - // Generate payload to detect when shifted payload was put into a packet. - for (size_t i = 1; i < frame_size; ++i) - frame[i] = static_cast(i); +// Create frame consisting of nalus of given size. +rtc::Buffer CreateFrame(std::initializer_list nalu_sizes) { + static constexpr int kStartCodeSize = 3; + rtc::Buffer frame(absl::c_accumulate(nalu_sizes, 0) + + kStartCodeSize * nalu_sizes.size()); + size_t offset = 0; + for (size_t nalu_size : nalu_sizes) { + EXPECT_GE(nalu_size, 1u); + // Insert nalu start code + frame[offset] = 0; + frame[offset + 1] = 0; + frame[offset + 2] = 1; + // Set some valid header. + frame[offset + 3] = 1; + // Fill payload avoiding accidental start codes + if (nalu_size > 1) { + memset(frame.data() + offset + 4, 0x3f, nalu_size - 1); + } + offset += (kStartCodeSize + nalu_size); + } return frame; } -// Create frame with size deduced from fragmentation. -rtc::Buffer CreateFrame(const RTPFragmentationHeader& fragmentation) { - size_t last_frame_index = fragmentation.fragmentationVectorSize - 1; - size_t frame_size = fragmentation.fragmentationOffset[last_frame_index] + - fragmentation.fragmentationLength[last_frame_index]; - rtc::Buffer frame = CreateFrame(frame_size); - // Set some headers. - // Tests can expect those are valid but shouln't rely on actual values. - for (size_t i = 0; i <= last_frame_index; ++i) { - frame[fragmentation.fragmentationOffset[i]] = i + 1; +// Create frame consisting of given nalus. +rtc::Buffer CreateFrame(rtc::ArrayView nalus) { + static constexpr int kStartCodeSize = 3; + int frame_size = 0; + for (const rtc::Buffer& nalu : nalus) { + frame_size += (kStartCodeSize + nalu.size()); + } + rtc::Buffer frame(frame_size); + size_t offset = 0; + for (const rtc::Buffer& nalu : nalus) { + // Insert nalu start code + frame[offset] = 0; + frame[offset + 1] = 0; + frame[offset + 2] = 1; + // Copy the nalu unit. + memcpy(frame.data() + offset + 3, nalu.data(), nalu.size()); + offset += (kStartCodeSize + nalu.size()); } return frame; } @@ -117,31 +133,28 @@ class RtpPacketizerH264ModeTest : public ::testing::TestWithParam {}; TEST_P(RtpPacketizerH264ModeTest, SingleNalu) { - const uint8_t frame[2] = {kIdr, 0xFF}; + const uint8_t frame[] = {0, 0, 1, kIdr, 0xFF}; - RtpPacketizerH264 packetizer(frame, kNoLimits, GetParam(), - NoFragmentation(frame)); + RtpPacketizerH264 packetizer(frame, kNoLimits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(1)); - EXPECT_THAT(packets[0].payload(), ElementsAreArray(frame)); + EXPECT_THAT(packets[0].payload(), ElementsAre(kIdr, 0xFF)); } TEST_P(RtpPacketizerH264ModeTest, SingleNaluTwoPackets) { RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = kMaxPayloadSize; - const size_t fragment_sizes[] = {kMaxPayloadSize, 100}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragment_sizes); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(kMaxPayloadSize), + GenerateNalUnit(100)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer(frame, limits, GetParam(), fragmentation); + RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(2)); - EXPECT_THAT(packets[0].payload(), - ElementsAreArray(frame.data(), kMaxPayloadSize)); - EXPECT_THAT(packets[1].payload(), - ElementsAreArray(frame.data() + kMaxPayloadSize, 100)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1])); } TEST_P(RtpPacketizerH264ModeTest, @@ -149,21 +162,18 @@ TEST_P(RtpPacketizerH264ModeTest, RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = 200; limits.first_packet_reduction_len = 5; - const size_t fragments[] = {195, 200, 200}; - - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/195), + GenerateNalUnit(/*size=*/200), + GenerateNalUnit(/*size=*/200)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer(frame, limits, GetParam(), fragmentation); + RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(3)); - const uint8_t* next_fragment = frame.data(); - EXPECT_THAT(packets[0].payload(), ElementsAreArray(next_fragment, 195)); - next_fragment += 195; - EXPECT_THAT(packets[1].payload(), ElementsAreArray(next_fragment, 200)); - next_fragment += 200; - EXPECT_THAT(packets[2].payload(), ElementsAreArray(next_fragment, 200)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1])); + EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2])); } TEST_P(RtpPacketizerH264ModeTest, @@ -171,21 +181,18 @@ TEST_P(RtpPacketizerH264ModeTest, RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = 200; limits.last_packet_reduction_len = 5; - const size_t fragments[] = {200, 200, 195}; - - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/200), + GenerateNalUnit(/*size=*/200), + GenerateNalUnit(/*size=*/195)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer(frame, limits, GetParam(), fragmentation); + RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(3)); - const uint8_t* next_fragment = frame.data(); - EXPECT_THAT(packets[0].payload(), ElementsAreArray(next_fragment, 200)); - next_fragment += 200; - EXPECT_THAT(packets[1].payload(), ElementsAreArray(next_fragment, 200)); - next_fragment += 200; - EXPECT_THAT(packets[2].payload(), ElementsAreArray(next_fragment, 195)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1])); + EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2])); } TEST_P(RtpPacketizerH264ModeTest, @@ -194,10 +201,9 @@ TEST_P(RtpPacketizerH264ModeTest, limits.max_payload_len = 200; limits.first_packet_reduction_len = 20; limits.last_packet_reduction_len = 30; - rtc::Buffer frame = CreateFrame(150); + rtc::Buffer frame = CreateFrame({150}); - RtpPacketizerH264 packetizer(frame, limits, GetParam(), - NoFragmentation(frame)); + RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); EXPECT_THAT(packets, SizeIs(1)); @@ -211,19 +217,19 @@ INSTANTIATE_TEST_SUITE_P( // Aggregation tests. TEST(RtpPacketizerH264Test, StapA) { - size_t fragments[] = {2, 2, 0x123}; + rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/0x123)}; + rtc::Buffer frame = CreateFrame(nalus); - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); - - RtpPacketizerH264 packetizer( - frame, kNoLimits, H264PacketizationMode::NonInterleaved, fragmentation); + RtpPacketizerH264 packetizer(frame, kNoLimits, + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(1)); auto payload = packets[0].payload(); EXPECT_EQ(payload.size(), - kNalHeaderSize + 3 * kLengthFieldLength + frame.size()); + kNalHeaderSize + 3 * kLengthFieldLength + 2 + 2 + 0x123); EXPECT_EQ(payload[0], kStapA); payload = payload.subview(kNalHeaderSize); @@ -231,29 +237,26 @@ TEST(RtpPacketizerH264Test, StapA) { EXPECT_THAT(payload.subview(0, kLengthFieldLength), ElementsAre(0, 2)); // Size. EXPECT_THAT(payload.subview(kLengthFieldLength, 2), - ElementsAreArray(frame.data(), 2)); + ElementsAreArray(nalus[0])); payload = payload.subview(kLengthFieldLength + 2); // 2nd fragment. EXPECT_THAT(payload.subview(0, kLengthFieldLength), ElementsAre(0, 2)); // Size. EXPECT_THAT(payload.subview(kLengthFieldLength, 2), - ElementsAreArray(frame.data() + 2, 2)); + ElementsAreArray(nalus[1])); payload = payload.subview(kLengthFieldLength + 2); // 3rd fragment. EXPECT_THAT(payload.subview(0, kLengthFieldLength), ElementsAre(0x1, 0x23)); // Size. - EXPECT_THAT(payload.subview(kLengthFieldLength), - ElementsAreArray(frame.data() + 4, 0x123)); + EXPECT_THAT(payload.subview(kLengthFieldLength), ElementsAreArray(nalus[2])); } TEST(RtpPacketizerH264Test, SingleNalUnitModeHasNoStapA) { // This is the same setup as for the StapA test. - size_t fragments[] = {2, 2, 0x123}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer frame = CreateFrame({2, 2, 0x123}); - RtpPacketizerH264 packetizer( - frame, kNoLimits, H264PacketizationMode::SingleNalUnit, fragmentation); + RtpPacketizerH264 packetizer(frame, kNoLimits, + H264PacketizationMode::SingleNalUnit); std::vector packets = FetchAllPackets(&packetizer); // The three fragments should be returned as three packets. @@ -269,23 +272,23 @@ TEST(RtpPacketizerH264Test, StapARespectsFirstPacketReduction) { limits.first_packet_reduction_len = 100; const size_t kFirstFragmentSize = limits.max_payload_len - limits.first_packet_reduction_len; - size_t fragments[] = {kFirstFragmentSize, 2, 2}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/kFirstFragmentSize), + GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/2)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer( - frame, limits, H264PacketizationMode::NonInterleaved, fragmentation); + RtpPacketizerH264 packetizer(frame, limits, + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(2)); // Expect 1st packet is single nalu. - EXPECT_THAT(packets[0].payload(), - ElementsAreArray(frame.data(), kFirstFragmentSize)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); // Expect 2nd packet is aggregate of last two fragments. - const uint8_t* tail = frame.data() + kFirstFragmentSize; - EXPECT_THAT(packets[1].payload(), ElementsAre(kStapA, // - 0, 2, tail[0], tail[1], // - 0, 2, tail[2], tail[3])); + EXPECT_THAT(packets[1].payload(), + ElementsAre(kStapA, // + 0, 2, nalus[1][0], nalus[1][1], // + 0, 2, nalus[2][0], nalus[2][1])); } TEST(RtpPacketizerH264Test, StapARespectsLastPacketReduction) { @@ -294,22 +297,23 @@ TEST(RtpPacketizerH264Test, StapARespectsLastPacketReduction) { limits.last_packet_reduction_len = 100; const size_t kLastFragmentSize = limits.max_payload_len - limits.last_packet_reduction_len; - size_t fragments[] = {2, 2, kLastFragmentSize}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/kLastFragmentSize)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer( - frame, limits, H264PacketizationMode::NonInterleaved, fragmentation); + RtpPacketizerH264 packetizer(frame, limits, + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(2)); // Expect 1st packet is aggregate of 1st two fragments. - EXPECT_THAT(packets[0].payload(), ElementsAre(kStapA, // - 0, 2, frame[0], frame[1], // - 0, 2, frame[2], frame[3])); + EXPECT_THAT(packets[0].payload(), + ElementsAre(kStapA, // + 0, 2, nalus[0][0], nalus[0][1], // + 0, 2, nalus[1][0], nalus[1][1])); // Expect 2nd packet is single nalu. - EXPECT_THAT(packets[1].payload(), - ElementsAreArray(frame.data() + 4, kLastFragmentSize)); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2])); } TEST(RtpPacketizerH264Test, TooSmallForStapAHeaders) { @@ -317,22 +321,23 @@ TEST(RtpPacketizerH264Test, TooSmallForStapAHeaders) { limits.max_payload_len = 1000; const size_t kLastFragmentSize = limits.max_payload_len - 3 * kLengthFieldLength - 4; - size_t fragments[] = {2, 2, kLastFragmentSize}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/kLastFragmentSize)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer( - frame, limits, H264PacketizationMode::NonInterleaved, fragmentation); + RtpPacketizerH264 packetizer(frame, limits, + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(2)); // Expect 1st packet is aggregate of 1st two fragments. - EXPECT_THAT(packets[0].payload(), ElementsAre(kStapA, // - 0, 2, frame[0], frame[1], // - 0, 2, frame[2], frame[3])); + EXPECT_THAT(packets[0].payload(), + ElementsAre(kStapA, // + 0, 2, nalus[0][0], nalus[0][1], // + 0, 2, nalus[1][0], nalus[1][1])); // Expect 2nd packet is single nalu. - EXPECT_THAT(packets[1].payload(), - ElementsAreArray(frame.data() + 4, kLastFragmentSize)); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2])); } // Fragmentation + aggregation. @@ -342,28 +347,29 @@ TEST(RtpPacketizerH264Test, MixedStapAFUA) { const size_t kFuaPayloadSize = 70; const size_t kFuaNaluSize = kNalHeaderSize + 2 * kFuaPayloadSize; const size_t kStapANaluSize = 20; - size_t fragments[] = {kFuaNaluSize, kStapANaluSize, kStapANaluSize}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer nalus[] = {GenerateNalUnit(kFuaNaluSize), + GenerateNalUnit(kStapANaluSize), + GenerateNalUnit(kStapANaluSize)}; + rtc::Buffer frame = CreateFrame(nalus); - RtpPacketizerH264 packetizer( - frame, limits, H264PacketizationMode::NonInterleaved, fragmentation); + RtpPacketizerH264 packetizer(frame, limits, + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); ASSERT_THAT(packets, SizeIs(3)); - const uint8_t* next_fragment = frame.data() + kNalHeaderSize; // First expect two FU-A packets. EXPECT_THAT(packets[0].payload().subview(0, kFuAHeaderSize), - ElementsAre(kFuA, FuDefs::kSBit | frame[0])); - EXPECT_THAT(packets[0].payload().subview(kFuAHeaderSize), - ElementsAreArray(next_fragment, kFuaPayloadSize)); - next_fragment += kFuaPayloadSize; + ElementsAre(kFuA, FuDefs::kSBit | nalus[0][0])); + EXPECT_THAT( + packets[0].payload().subview(kFuAHeaderSize), + ElementsAreArray(nalus[0].data() + kNalHeaderSize, kFuaPayloadSize)); EXPECT_THAT(packets[1].payload().subview(0, kFuAHeaderSize), - ElementsAre(kFuA, FuDefs::kEBit | frame[0])); - EXPECT_THAT(packets[1].payload().subview(kFuAHeaderSize), - ElementsAreArray(next_fragment, kFuaPayloadSize)); - next_fragment += kFuaPayloadSize; + ElementsAre(kFuA, FuDefs::kEBit | nalus[0][0])); + EXPECT_THAT( + packets[1].payload().subview(kFuAHeaderSize), + ElementsAreArray(nalus[0].data() + kNalHeaderSize + kFuaPayloadSize, + kFuaPayloadSize)); // Then expect one STAP-A packet with two nal units. EXPECT_THAT(packets[2].payload()[0], kStapA); @@ -371,13 +377,11 @@ TEST(RtpPacketizerH264Test, MixedStapAFUA) { EXPECT_THAT(payload.subview(0, kLengthFieldLength), ElementsAre(0, kStapANaluSize)); EXPECT_THAT(payload.subview(kLengthFieldLength, kStapANaluSize), - ElementsAreArray(next_fragment, kStapANaluSize)); + ElementsAreArray(nalus[1])); payload = payload.subview(kLengthFieldLength + kStapANaluSize); - next_fragment += kStapANaluSize; EXPECT_THAT(payload.subview(0, kLengthFieldLength), ElementsAre(0, kStapANaluSize)); - EXPECT_THAT(payload.subview(kLengthFieldLength), - ElementsAreArray(next_fragment, kStapANaluSize)); + EXPECT_THAT(payload.subview(kLengthFieldLength), ElementsAreArray(nalus[2])); } TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) { @@ -387,12 +391,10 @@ TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) { limits.last_packet_reduction_len = 20; limits.single_packet_reduction_len = 20; // Actual sizes, which triggered this bug. - size_t fragments[] = {20, 8, 18, 1161}; - RTPFragmentationHeader fragmentation = CreateFragmentation(fragments); - rtc::Buffer frame = CreateFrame(fragmentation); + rtc::Buffer frame = CreateFrame({20, 8, 18, 1161}); - RtpPacketizerH264 packetizer( - frame, limits, H264PacketizationMode::NonInterleaved, fragmentation); + RtpPacketizerH264 packetizer(frame, limits, + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); // Last packet has to be of correct size. @@ -406,11 +408,11 @@ TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) { // Returns sizes of the payloads excluding fua headers. std::vector TestFua(size_t frame_payload_size, const RtpPacketizer::PayloadSizeLimits& limits) { - rtc::Buffer frame = CreateFrame(kNalHeaderSize + frame_payload_size); + rtc::Buffer nalu[] = {GenerateNalUnit(kNalHeaderSize + frame_payload_size)}; + rtc::Buffer frame = CreateFrame(nalu); RtpPacketizerH264 packetizer(frame, limits, - H264PacketizationMode::NonInterleaved, - NoFragmentation(frame)); + H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); EXPECT_GE(packets.size(), 2u); // Single packet indicates it is not FuA. @@ -429,7 +431,7 @@ std::vector TestFua(size_t frame_payload_size, // Clear S and E bits before testing all are duplicating same original header. fua_header.front() &= ~FuDefs::kSBit; fua_header.back() &= ~FuDefs::kEBit; - EXPECT_THAT(fua_header, Each(Eq((kFuA << 8) | frame[0]))); + EXPECT_THAT(fua_header, Each(Eq((kFuA << 8) | nalu[0][0]))); return payload_sizes; } @@ -488,11 +490,10 @@ TEST(RtpPacketizerH264Test, FUABig) { TEST(RtpPacketizerH264Test, RejectsOverlongDataInPacketizationMode0) { RtpPacketizer::PayloadSizeLimits limits; - rtc::Buffer frame = CreateFrame(kMaxPayloadSize + 1); - RTPFragmentationHeader fragmentation = NoFragmentation(frame); + rtc::Buffer frame = CreateFrame({kMaxPayloadSize + 1}); - RtpPacketizerH264 packetizer( - frame, limits, H264PacketizationMode::SingleNalUnit, fragmentation); + RtpPacketizerH264 packetizer(frame, limits, + H264PacketizationMode::SingleNalUnit); std::vector packets = FetchAllPackets(&packetizer); EXPECT_THAT(packets, IsEmpty()); diff --git a/modules/rtp_rtcp/source/rtp_format_h265.cc b/modules/rtp_rtcp/source/rtp_format_h265.cc new file mode 100644 index 0000000000..8608277046 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_format_h265.cc @@ -0,0 +1,368 @@ +/* + * Intel License + */ + +#include + +#include "absl/types/optional.h" +#include "absl/types/variant.h" + +#include "common_video/h264/h264_common.h" +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_pps_parser.h" +#include "common_video/h265/h265_sps_parser.h" +#include "common_video/h265/h265_vps_parser.h" +#include "modules/include/module_common_types.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_format_h265.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/logging.h" + +using namespace rtc; + +namespace webrtc { +namespace { + +enum NaluType { + kTrailN = 0, + kTrailR = 1, + kTsaN = 2, + kTsaR = 3, + kStsaN = 4, + kStsaR = 5, + kRadlN = 6, + kRadlR = 7, + kBlaWLp = 16, + kBlaWRadl = 17, + kBlaNLp = 18, + kIdrWRadl = 19, + kIdrNLp = 20, + kCra = 21, + kVps = 32, + kHevcSps = 33, + kHevcPps = 34, + kHevcAud = 35, + kPrefixSei = 39, + kSuffixSei = 40, + kHevcAp = 48, + kHevcFu = 49 +}; + +/* + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | PayloadHdr (Type=49) | FU header | DONL (cond) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-| +*/ +// Unlike H.264, HEVC NAL header is 2-bytes. +static const size_t kHevcNalHeaderSize = 2; +// H.265's FU is constructed of 2-byte payload header, and 1-byte FU header +static const size_t kHevcFuHeaderSize = 1; +static const size_t kHevcLengthFieldSize = 2; + +enum HevcNalHdrMasks { + kHevcFBit = 0x80, + kHevcTypeMask = 0x7E, + kHevcLayerIDHMask = 0x1, + kHevcLayerIDLMask = 0xF8, + kHevcTIDMask = 0x7, + kHevcTypeMaskN = 0x81, + kHevcTypeMaskInFuHeader = 0x3F +}; + +// Bit masks for FU headers. +enum HevcFuDefs { kHevcSBit = 0x80, kHevcEBit = 0x40, kHevcFuTypeBit = 0x3F }; + +} // namespace + +RtpPacketizerH265::RtpPacketizerH265( + rtc::ArrayView payload, + PayloadSizeLimits limits, + H265PacketizationMode packetization_mode) + : limits_(limits), + num_packets_left_(0) { + // Guard against uninitialized memory in packetization_mode. + RTC_CHECK(packetization_mode == H265PacketizationMode::NonInterleaved || + packetization_mode == H265PacketizationMode::SingleNalUnit); + + for (const auto& nalu : + H264::FindNaluIndices(payload.data(), payload.size())) { + input_fragments_.push_back( + payload.subview(nalu.payload_start_offset, nalu.payload_size)); + } + + if (!GeneratePackets(packetization_mode)) { + // If failed to generate all the packets, discard already generated + // packets in case the caller would ignore return value and still try to + // call NextPacket(). + num_packets_left_ = 0; + while (!packets_.empty()) { + packets_.pop(); + } + } +} + +RtpPacketizerH265::~RtpPacketizerH265() {} + +size_t RtpPacketizerH265::NumPackets() const { + return num_packets_left_; +} + +bool RtpPacketizerH265::GeneratePackets( + H265PacketizationMode packetization_mode) { + // For HEVC we follow non-interleaved mode for the packetization, + // and don't support single-nalu mode at present. + for (size_t i = 0; i < input_fragments_.size();) { + int fragment_len = input_fragments_[i].size(); + int single_packet_capacity = limits_.max_payload_len; + if (input_fragments_.size() == 1) + single_packet_capacity -= limits_.single_packet_reduction_len; + else if (i == 0) + single_packet_capacity -= limits_.first_packet_reduction_len; + else if (i + 1 == input_fragments_.size()) { + // Pretend that last fragment is larger instead of making last packet + // smaller. + single_packet_capacity -= limits_.last_packet_reduction_len; + } + if (fragment_len > single_packet_capacity) { + PacketizeFu(i); + ++i; + } else { + PacketizeSingleNalu(i); + ++i; + } + } + return true; +} + +bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) { + // Fragment payload into packets (FU). + // Strip out the original header and leave room for the FU header. + rtc::ArrayView fragment = input_fragments_[fragment_index]; + PayloadSizeLimits limits = limits_; + limits.max_payload_len -= kHevcFuHeaderSize + kHevcNalHeaderSize; + + // Update single/first/last packet reductions unless it is single/first/last + // fragment. + if (input_fragments_.size() != 1) { + // if this fragment is put into a single packet, it might still be the + // first or the last packet in the whole sequence of packets. + if (fragment_index == input_fragments_.size() - 1) { + limits.single_packet_reduction_len = limits_.last_packet_reduction_len; + } else if (fragment_index == 0) { + limits.single_packet_reduction_len = limits_.first_packet_reduction_len; + } else { + limits.single_packet_reduction_len = 0; + } + } + if (fragment_index != 0) + limits.first_packet_reduction_len = 0; + if (fragment_index != input_fragments_.size() - 1) + limits.last_packet_reduction_len = 0; + + // Strip out the original header. + size_t payload_left = fragment.size() - kHevcNalHeaderSize; + int offset = kHevcNalHeaderSize; + + std::vector payload_sizes = SplitAboutEqually(payload_left, limits); + if (payload_sizes.empty()) + return false; + + for (size_t i = 0; i < payload_sizes.size(); ++i) { + int packet_length = payload_sizes[i]; + RTC_CHECK_GT(packet_length, 0); + uint16_t header = (fragment[0] << 8) | fragment[1]; + packets_.push(PacketUnit(fragment.subview(offset, packet_length), + /*first_fragment=*/i == 0, + /*last_fragment=*/i == payload_sizes.size() - 1, + false, header)); + offset += packet_length; + payload_left -= packet_length; + } + num_packets_left_ += payload_sizes.size(); + RTC_CHECK_EQ(0, payload_left); + return true; +} + + +bool RtpPacketizerH265::PacketizeSingleNalu(size_t fragment_index) { + // Add a single NALU to the queue, no aggregation. + size_t payload_size_left = limits_.max_payload_len; + if (input_fragments_.size() == 1) + payload_size_left -= limits_.single_packet_reduction_len; + else if (fragment_index == 0) + payload_size_left -= limits_.first_packet_reduction_len; + else if (fragment_index + 1 == input_fragments_.size()) + payload_size_left -= limits_.last_packet_reduction_len; + rtc::ArrayView fragment = input_fragments_[fragment_index]; + if (payload_size_left < fragment.size()) { + RTC_LOG(LS_ERROR) << "Failed to fit a fragment to packet in SingleNalu " + "packetization mode. Payload size left " + << payload_size_left << ", fragment length " + << fragment.size() << ", packet capacity " + << limits_.max_payload_len; + return false; + } + RTC_CHECK_GT(fragment.size(), 0u); + packets_.push(PacketUnit(fragment, true /* first */, true /* last */, + false /* aggregated */, fragment[0])); + ++num_packets_left_; + return true; +} + +int RtpPacketizerH265::PacketizeAp(size_t fragment_index) { + // Aggregate fragments into one packet (STAP-A). + size_t payload_size_left = limits_.max_payload_len; + if (input_fragments_.size() == 1) + payload_size_left -= limits_.single_packet_reduction_len; + else if (fragment_index == 0) + payload_size_left -= limits_.first_packet_reduction_len; + int aggregated_fragments = 0; + size_t fragment_headers_length = 0; + rtc::ArrayView fragment = input_fragments_[fragment_index]; + RTC_CHECK_GE(payload_size_left, fragment.size()); + ++num_packets_left_; + + auto payload_size_needed = [&] { + size_t fragment_size = fragment.size() + fragment_headers_length; + if (input_fragments_.size() == 1) { + // Single fragment, single packet, payload_size_left already adjusted + // with limits_.single_packet_reduction_len. + return fragment_size; + } + if (fragment_index == input_fragments_.size() - 1) { + // Last fragment, so StrapA might be the last packet. + return fragment_size + limits_.last_packet_reduction_len; + } + return fragment_size; + }; + + while (payload_size_left >= payload_size_needed()) { + RTC_CHECK_GT(fragment.size(), 0); + packets_.push(PacketUnit(fragment, aggregated_fragments == 0, false, true, + fragment[0])); + payload_size_left -= fragment.size(); + payload_size_left -= fragment_headers_length; + + fragment_headers_length = kHevcLengthFieldSize; + // If we are going to try to aggregate more fragments into this packet + // we need to add the STAP-A NALU header and a length field for the first + // NALU of this packet. + if (aggregated_fragments == 0) + fragment_headers_length += kHevcNalHeaderSize + kHevcLengthFieldSize; + ++aggregated_fragments; + + // Next fragment. + ++fragment_index; + if (fragment_index == input_fragments_.size()) + break; + fragment = input_fragments_[fragment_index]; + } + RTC_CHECK_GT(aggregated_fragments, 0); + packets_.back().last_fragment = true; + return fragment_index; +} + +bool RtpPacketizerH265::NextPacket(RtpPacketToSend* rtp_packet) { + RTC_DCHECK(rtp_packet); + + if (packets_.empty()) { + return false; + } + + PacketUnit packet = packets_.front(); + + if (packet.first_fragment && packet.last_fragment) { + // Single NAL unit packet. + size_t bytes_to_send = packet.source_fragment.size(); + uint8_t* buffer = rtp_packet->AllocatePayload(bytes_to_send); + memcpy(buffer, packet.source_fragment.data(), bytes_to_send); + packets_.pop(); + input_fragments_.pop_front(); + } else if (packet.aggregated) { + bool is_last_packet = num_packets_left_ == 1; + NextAggregatePacket(rtp_packet, is_last_packet); + } else { + NextFragmentPacket(rtp_packet); + } + rtp_packet->SetMarker(packets_.empty()); + --num_packets_left_; + return true; +} + +void RtpPacketizerH265::NextAggregatePacket(RtpPacketToSend* rtp_packet, + bool last) { + size_t payload_capacity = rtp_packet->FreeCapacity(); + RTC_CHECK_GE(payload_capacity, kHevcNalHeaderSize); + uint8_t* buffer = rtp_packet->AllocatePayload(payload_capacity); + RTC_CHECK(buffer); + PacketUnit* packet = &packets_.front(); + RTC_CHECK(packet->first_fragment); + uint8_t payload_hdr_h = packet->header >> 8; + uint8_t payload_hdr_l = packet->header & 0xFF; + uint8_t layer_id_h = payload_hdr_h & kHevcLayerIDHMask; + + payload_hdr_h = + (payload_hdr_h & kHevcTypeMaskN) | (kHevcAp << 1) | layer_id_h; + + buffer[0] = payload_hdr_h; + buffer[1] = payload_hdr_l; + int index = kHevcNalHeaderSize; + bool is_last_fragment = packet->last_fragment; + while (packet->aggregated) { + // Add NAL unit length field. + rtc::ArrayView fragment = packet->source_fragment; + ByteWriter::WriteBigEndian(&buffer[index], fragment.size()); + index += kHevcLengthFieldSize; + // Add NAL unit. + memcpy(&buffer[index], fragment.data(), fragment.size()); + index += fragment.size(); + packets_.pop(); + input_fragments_.pop_front(); + if (is_last_fragment) + break; + packet = &packets_.front(); + is_last_fragment = packet->last_fragment; + } + RTC_CHECK(is_last_fragment); + rtp_packet->SetPayloadSize(index); +} + +void RtpPacketizerH265::NextFragmentPacket(RtpPacketToSend* rtp_packet) { + PacketUnit* packet = &packets_.front(); + // NAL unit fragmented over multiple packets (FU). + // We do not send original NALU header, so it will be replaced by the + // PayloadHdr of the first packet. + uint8_t payload_hdr_h = + packet->header >> 8; // 1-bit F, 6-bit type, 1-bit layerID highest-bit + uint8_t payload_hdr_l = packet->header & 0xFF; + uint8_t layer_id_h = payload_hdr_h & kHevcLayerIDHMask; + uint8_t fu_header = 0; + // S | E |6 bit type. + fu_header |= (packet->first_fragment ? kHevcSBit : 0); + fu_header |= (packet->last_fragment ? kHevcEBit : 0); + uint8_t type = (payload_hdr_h & kHevcTypeMask) >> 1; + fu_header |= type; + // Now update payload_hdr_h with FU type. + payload_hdr_h = + (payload_hdr_h & kHevcTypeMaskN) | (kHevcFu << 1) | layer_id_h; + rtc::ArrayView fragment = packet->source_fragment; + uint8_t* buffer = rtp_packet->AllocatePayload( + kHevcFuHeaderSize + kHevcNalHeaderSize + fragment.size()); + RTC_CHECK(buffer); + buffer[0] = payload_hdr_h; + buffer[1] = payload_hdr_l; + buffer[2] = fu_header; + + if (packet->last_fragment) { + memcpy(buffer + kHevcFuHeaderSize + kHevcNalHeaderSize, fragment.data(), + fragment.size()); + } else { + memcpy(buffer + kHevcFuHeaderSize + kHevcNalHeaderSize, fragment.data(), + fragment.size()); + } + packets_.pop(); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_format_h265.h b/modules/rtp_rtcp/source/rtp_format_h265.h new file mode 100644 index 0000000000..3a2b4ea206 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_format_h265.h @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H265_H_ +#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H265_H_ + +#include +#include +#include +#include "api/array_view.h" +#include "modules/include/module_common_types.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +class RtpPacketizerH265 : public RtpPacketizer { + public: + // Initialize with payload from encoder. + // The payload_data must be exactly one encoded H.265 frame. + RtpPacketizerH265(rtc::ArrayView payload, + PayloadSizeLimits limits, + H265PacketizationMode packetization_mode); + + ~RtpPacketizerH265() override; + + size_t NumPackets() const override; + + // Get the next payload with H.265 payload header. + // buffer is a pointer to where the output will be written. + // bytes_to_send is an output variable that will contain number of bytes + // written to buffer. The parameter last_packet is true for the last packet of + // the frame, false otherwise (i.e., call the function again to get the + // next packet). + // Returns true on success or false if there was no payload to packetize. + bool NextPacket(RtpPacketToSend* rtp_packet) override; + + private: + struct Packet { + Packet(size_t offset, + size_t size, + bool first_fragment, + bool last_fragment, + bool aggregated, + uint16_t header) + : offset(offset), + size(size), + first_fragment(first_fragment), + last_fragment(last_fragment), + aggregated(aggregated), + header(header) {} + + size_t offset; + size_t size; + bool first_fragment; + bool last_fragment; + bool aggregated; + uint16_t header; // Different from H264 + }; + struct PacketUnit { + PacketUnit(rtc::ArrayView source_fragment, + bool first_fragment, + bool last_fragment, + bool aggregated, + uint16_t header) + : source_fragment(source_fragment), + first_fragment(first_fragment), + last_fragment(last_fragment), + aggregated(aggregated), + header(header) {} + + rtc::ArrayView source_fragment; + bool first_fragment; + bool last_fragment; + bool aggregated; + uint16_t header; + }; + typedef std::queue PacketQueue; + std::deque> input_fragments_; + std::queue packets_; + + bool GeneratePackets(H265PacketizationMode packetization_mode); + bool PacketizeFu(size_t fragment_index); + int PacketizeAp(size_t fragment_index); + bool PacketizeSingleNalu(size_t fragment_index); + + void NextAggregatePacket(RtpPacketToSend* rtp_packet, bool last); + void NextFragmentPacket(RtpPacketToSend* rtp_packet); + + const PayloadSizeLimits limits_; + size_t num_packets_left_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerH265); +}; +} // namespace webrtc +#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H265_H_ diff --git a/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc b/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc index 35e7fe7ead..d83c3b03c9 100644 --- a/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc @@ -16,7 +16,6 @@ #include #include "api/array_view.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" diff --git a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h index 03d4e58576..916d6577f1 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h +++ b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h @@ -10,10 +10,9 @@ // This file contains the class RtpFormatVp8TestHelper. The class is // responsible for setting up a fake VP8 bitstream according to the -// RTPVideoHeaderVP8 header, and partition information. After initialization, -// an RTPFragmentationHeader is provided so that the tester can create a -// packetizer. The packetizer can then be provided to this helper class, which -// will then extract all packets and compare to the expected outcome. +// RTPVideoHeaderVP8 header. The packetizer can then be provided to this helper +// class, which will then extract all packets and compare to the expected +// outcome. #ifndef MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_ diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc index 7a8af09927..465308ec45 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc @@ -97,22 +97,4 @@ bool RtpGenericFrameDescriptor::AddFrameDependencyDiff(uint16_t fdiff) { return true; } -void RtpGenericFrameDescriptor::SetByteRepresentation( - rtc::ArrayView byte_representation) { - RTC_CHECK(!byte_representation.empty()); - byte_representation_.assign(byte_representation.begin(), - byte_representation.end()); - // Clear end_of_subframe bit. - // Because ByteRepresentation is used for frame authentication, bit describing - // position of the packet in the frame shouldn't be part of it. - // This match RtpVideoSender where descriptor is passed for authentication - // before end_of_subframe bit is decided and set, i.e. it is always 0. - byte_representation_[0] &= ~0x40; -} - -rtc::ArrayView -RtpGenericFrameDescriptor::GetByteRepresentation() { - return byte_representation_; -} - } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h index 9e79455aff..8760acca2a 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h @@ -38,15 +38,6 @@ class RtpGenericFrameDescriptor { bool LastPacketInSubFrame() const { return end_of_subframe_; } void SetLastPacketInSubFrame(bool last) { end_of_subframe_ = last; } - // Denotes whether the frame is discardable. That is, whether skipping it - // would have no effect on the decodability of subsequent frames. - // An absl::optional is used because version 0 of the extension did not - // support this flag. (The optional aspect is relevant only when parsing.) - // TODO(bugs.webrtc.org/10243): Make this into a plain bool when v00 of - // the extension is deprecated. - absl::optional Discardable() const { return discardable_; } - void SetDiscardable(bool discardable) { discardable_ = discardable; } - // Properties below undefined if !FirstPacketInSubFrame() // Valid range for temporal layer: [0, 7] int TemporalLayer() const; @@ -70,15 +61,10 @@ class RtpGenericFrameDescriptor { // Returns false on failure, i.e. number of dependencies is too large. bool AddFrameDependencyDiff(uint16_t fdiff); - void SetByteRepresentation(rtc::ArrayView representation); - rtc::ArrayView GetByteRepresentation(); - private: bool beginning_of_subframe_ = false; bool end_of_subframe_ = false; - absl::optional discardable_; - uint16_t frame_id_ = 0; uint8_t spatial_layers_ = 1; uint8_t temporal_layer_ = 0; @@ -86,8 +72,6 @@ class RtpGenericFrameDescriptor { uint16_t frame_deps_id_diffs_[kMaxNumFrameDependencies]; int width_ = 0; int height_ = 0; - - std::vector byte_representation_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc index a705b5aa7e..ca46fa6217 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc @@ -20,18 +20,16 @@ constexpr uint8_t kFlagEndOfSubframe = 0x40; // In version 00, the flags F and L in the first byte correspond to // kFlagFirstSubframeV00 and kFlagLastSubframeV00. In practice, they were -// always set to |true|. In version 01, these flags are deprecated, and we use -// one of their bits for the discardability flag. +// always set to |true|. constexpr uint8_t kFlagFirstSubframeV00 = 0x20; constexpr uint8_t kFlagLastSubframeV00 = 0x10; -constexpr uint8_t kFlagDiscardableV01 = 0x10; constexpr uint8_t kFlagDependencies = 0x08; constexpr uint8_t kMaskTemporalLayer = 0x07; constexpr uint8_t kFlagMoreDependencies = 0x01; constexpr uint8_t kFlageXtendedOffset = 0x02; - +} // namespace // 0 1 2 3 4 5 6 7 // +-+-+-+-+-+-+-+-+ // |B|E|F|L|D| T | @@ -58,9 +56,10 @@ constexpr uint8_t kFlageXtendedOffset = 0x02; // +---------------+ // | ... | // +-+-+-+-+-+-+-+-+ +constexpr RTPExtensionType RtpGenericFrameDescriptorExtension00::kId; +constexpr char RtpGenericFrameDescriptorExtension00::kUri[]; -bool RtpGenericFrameDescriptorExtensionParse( - size_t version, +bool RtpGenericFrameDescriptorExtension00::Parse( rtc::ArrayView data, RtpGenericFrameDescriptor* descriptor) { if (data.empty()) { @@ -71,10 +70,6 @@ bool RtpGenericFrameDescriptorExtensionParse( descriptor->SetFirstPacketInSubFrame(begins_subframe); descriptor->SetLastPacketInSubFrame((data[0] & kFlagEndOfSubframe) != 0); - if (version >= 1) { - descriptor->SetDiscardable((data[0] & kFlagDiscardableV01) != 0); - } - // Parse Subframe details provided in 1st packet of subframe. if (!begins_subframe) { return data.size() == 1; @@ -115,7 +110,7 @@ bool RtpGenericFrameDescriptorExtensionParse( return true; } -size_t RtpGenericFrameDescriptorExtensionValueSize( +size_t RtpGenericFrameDescriptorExtension00::ValueSize( const RtpGenericFrameDescriptor& descriptor) { if (!descriptor.FirstPacketInSubFrame()) return 1; @@ -132,23 +127,15 @@ size_t RtpGenericFrameDescriptorExtensionValueSize( return size; } -bool RtpGenericFrameDescriptorExtensionWrite( - size_t version, +bool RtpGenericFrameDescriptorExtension00::Write( rtc::ArrayView data, const RtpGenericFrameDescriptor& descriptor) { - RTC_CHECK_EQ(data.size(), - - RtpGenericFrameDescriptorExtensionValueSize(descriptor)); + RTC_CHECK_EQ(data.size(), ValueSize(descriptor)); uint8_t base_header = (descriptor.FirstPacketInSubFrame() ? kFlagBeginOfSubframe : 0) | (descriptor.LastPacketInSubFrame() ? kFlagEndOfSubframe : 0); - if (version == 0) { - base_header |= kFlagFirstSubframeV00; - base_header |= kFlagLastSubframeV00; - } else if (version >= 1) { - const absl::optional discardable = descriptor.Discardable(); - base_header |= (discardable.value_or(false) ? kFlagDiscardableV01 : 0); - } + base_header |= kFlagFirstSubframeV00; + base_header |= kFlagLastSubframeV00; if (!descriptor.FirstPacketInSubFrame()) { data[0] = base_header; @@ -184,48 +171,4 @@ bool RtpGenericFrameDescriptorExtensionWrite( return true; } -} // namespace - -constexpr RTPExtensionType RtpGenericFrameDescriptorExtension00::kId; -constexpr char RtpGenericFrameDescriptorExtension00::kUri[]; - -bool RtpGenericFrameDescriptorExtension00::Parse( - rtc::ArrayView data, - RtpGenericFrameDescriptor* descriptor) { - return RtpGenericFrameDescriptorExtensionParse(0, data, descriptor); -} - -size_t RtpGenericFrameDescriptorExtension00::ValueSize( - const RtpGenericFrameDescriptor& descriptor) { - // No difference between existing versions. - return RtpGenericFrameDescriptorExtensionValueSize(descriptor); -} - -bool RtpGenericFrameDescriptorExtension00::Write( - rtc::ArrayView data, - const RtpGenericFrameDescriptor& descriptor) { - return RtpGenericFrameDescriptorExtensionWrite(0, data, descriptor); -} - -constexpr RTPExtensionType RtpGenericFrameDescriptorExtension01::kId; -constexpr char RtpGenericFrameDescriptorExtension01::kUri[]; - -bool RtpGenericFrameDescriptorExtension01::Parse( - rtc::ArrayView data, - RtpGenericFrameDescriptor* descriptor) { - return RtpGenericFrameDescriptorExtensionParse(1, data, descriptor); -} - -size_t RtpGenericFrameDescriptorExtension01::ValueSize( - const RtpGenericFrameDescriptor& descriptor) { - // No difference between existing versions. - return RtpGenericFrameDescriptorExtensionValueSize(descriptor); -} - -bool RtpGenericFrameDescriptorExtension01::Write( - rtc::ArrayView data, - const RtpGenericFrameDescriptor& descriptor) { - return RtpGenericFrameDescriptorExtensionWrite(1, data, descriptor); -} - } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h index a52588ee3a..ac7afb489b 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h @@ -35,22 +35,6 @@ class RtpGenericFrameDescriptorExtension00 { const RtpGenericFrameDescriptor& descriptor); }; -class RtpGenericFrameDescriptorExtension01 { - public: - using value_type = RtpGenericFrameDescriptor; - static constexpr RTPExtensionType kId = kRtpExtensionGenericFrameDescriptor01; - static constexpr char kUri[] = - "http://www.webrtc.org/experiments/rtp-hdrext/" - "generic-frame-descriptor-01"; - static constexpr int kMaxSizeBytes = 16; - - static bool Parse(rtc::ArrayView data, - RtpGenericFrameDescriptor* descriptor); - static size_t ValueSize(const RtpGenericFrameDescriptor& descriptor); - static bool Write(rtc::ArrayView data, - const RtpGenericFrameDescriptor& descriptor); -}; - } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_GENERIC_FRAME_DESCRIPTOR_EXTENSION_H_ diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc index 3ce46fd8ff..d7f8e1e906 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc @@ -23,87 +23,27 @@ constexpr uint8_t kDeprecatedFlags = 0x30; // TODO(danilchap): Add fuzzer to test for various invalid inputs. -class RtpGenericFrameDescriptorExtensionTest - : public ::testing::Test, - public ::testing::WithParamInterface { - public: - RtpGenericFrameDescriptorExtensionTest() : version_(GetParam()) {} - - bool Parse(rtc::ArrayView data, - RtpGenericFrameDescriptor* descriptor) const { - switch (version_) { - case 0: - return RtpGenericFrameDescriptorExtension00::Parse(data, descriptor); - case 1: - return RtpGenericFrameDescriptorExtension01::Parse(data, descriptor); - } - RTC_NOTREACHED(); - return false; - } - - size_t ValueSize(const RtpGenericFrameDescriptor& descriptor) const { - switch (version_) { - case 0: - return RtpGenericFrameDescriptorExtension00::ValueSize(descriptor); - case 1: - return RtpGenericFrameDescriptorExtension01::ValueSize(descriptor); - } - RTC_NOTREACHED(); - return 0; - } - - bool Write(rtc::ArrayView data, - const RtpGenericFrameDescriptor& descriptor) const { - switch (version_) { - case 0: - return RtpGenericFrameDescriptorExtension00::Write(data, descriptor); - case 1: - return RtpGenericFrameDescriptorExtension01::Write(data, descriptor); - } - RTC_NOTREACHED(); - return false; - } - - protected: - const int version_; -}; - -INSTANTIATE_TEST_SUITE_P(All, - RtpGenericFrameDescriptorExtensionTest, - ::testing::Values(0, 1)); - -TEST_P(RtpGenericFrameDescriptorExtensionTest, - ParseFirstPacketOfIndependenSubFrame) { +TEST(RtpGenericFrameDescriptorExtensionTest, + ParseFirstPacketOfIndependenSubFrame) { const int kTemporalLayer = 5; constexpr uint8_t kRaw[] = {0x80 | kTemporalLayer, 0x49, 0x12, 0x34}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); EXPECT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_FALSE(descriptor.LastPacketInSubFrame()); - const absl::optional discardable = descriptor.Discardable(); - if (version_ == 0) { - ASSERT_FALSE(discardable.has_value()); - } else { - ASSERT_TRUE(discardable.has_value()); - EXPECT_FALSE(discardable.value()); - } - EXPECT_THAT(descriptor.FrameDependenciesDiffs(), IsEmpty()); EXPECT_EQ(descriptor.TemporalLayer(), kTemporalLayer); EXPECT_EQ(descriptor.SpatialLayersBitmask(), 0x49); EXPECT_EQ(descriptor.FrameId(), 0x3412); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, - WriteFirstPacketOfIndependenSubFrame) { +TEST(RtpGenericFrameDescriptorExtensionTest, + WriteFirstPacketOfIndependenSubFrame) { const int kTemporalLayer = 5; - uint8_t kRaw[] = {0x80 | kTemporalLayer, 0x49, 0x12, 0x34}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = {0x80 | kTemporalLayer | kDeprecatedFlags, 0x49, 0x12, 0x34}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); @@ -111,263 +51,215 @@ TEST_P(RtpGenericFrameDescriptorExtensionTest, descriptor.SetSpatialLayersBitmask(0x49); descriptor.SetFrameId(0x3412); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseLastPacketOfSubFrame) { +TEST(RtpGenericFrameDescriptorExtensionTest, ParseLastPacketOfSubFrame) { constexpr uint8_t kRaw[] = {0x40}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); EXPECT_FALSE(descriptor.FirstPacketInSubFrame()); - - const absl::optional discardable = descriptor.Discardable(); - if (version_ == 0) { - ASSERT_FALSE(discardable.has_value()); - } else { - ASSERT_TRUE(discardable.has_value()); - EXPECT_FALSE(discardable.value()); - } - EXPECT_TRUE(descriptor.LastPacketInSubFrame()); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteLastPacketOfSubFrame) { - uint8_t kRaw[] = {0x40}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } +TEST(RtpGenericFrameDescriptorExtensionTest, WriteLastPacketOfSubFrame) { + uint8_t kRaw[] = {0x40 | kDeprecatedFlags}; RtpGenericFrameDescriptor descriptor; descriptor.SetLastPacketInSubFrame(true); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); - uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); - EXPECT_THAT(buffer, ElementsAreArray(kRaw)); -} - -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseDiscardable) { - if (version_ == 0) { - return; - } - - constexpr uint8_t kRaw[] = {0x10}; - RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); - const absl::optional discardable = descriptor.Discardable(); - ASSERT_TRUE(discardable.has_value()); - EXPECT_TRUE(discardable.value()); -} - -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteDiscardable) { - if (version_ == 0) { - return; - } - - constexpr uint8_t kRaw[] = {0x10}; - RtpGenericFrameDescriptor descriptor; - descriptor.SetDiscardable(true); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseMinShortFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, ParseMinShortFrameDependencies) { constexpr uint16_t kDiff = 1; constexpr uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, 0x04}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); ASSERT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_THAT(descriptor.FrameDependenciesDiffs(), ElementsAre(kDiff)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteMinShortFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, WriteMinShortFrameDependencies) { constexpr uint16_t kDiff = 1; - uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, 0x04}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = {0x88 | kDeprecatedFlags, 0x01, 0x00, 0x00, 0x04}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.AddFrameDependencyDiff(kDiff); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseMaxShortFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, ParseMaxShortFrameDependencies) { constexpr uint16_t kDiff = 0x3f; constexpr uint8_t kRaw[] = {0xb8, 0x01, 0x00, 0x00, 0xfc}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); ASSERT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_THAT(descriptor.FrameDependenciesDiffs(), ElementsAre(kDiff)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteMaxShortFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, WriteMaxShortFrameDependencies) { constexpr uint16_t kDiff = 0x3f; - uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, 0xfc}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = {0x88 | kDeprecatedFlags, 0x01, 0x00, 0x00, 0xfc}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.AddFrameDependencyDiff(kDiff); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseMinLongFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, ParseMinLongFrameDependencies) { constexpr uint16_t kDiff = 0x40; constexpr uint8_t kRaw[] = {0xb8, 0x01, 0x00, 0x00, 0x02, 0x01}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); ASSERT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_THAT(descriptor.FrameDependenciesDiffs(), ElementsAre(kDiff)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteMinLongFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, WriteMinLongFrameDependencies) { constexpr uint16_t kDiff = 0x40; - uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, 0x02, 0x01}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = {0x88 | kDeprecatedFlags, 0x01, 0x00, 0x00, 0x02, 0x01}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.AddFrameDependencyDiff(kDiff); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, - ParseLongFrameDependenciesAsBigEndian) { +TEST(RtpGenericFrameDescriptorExtensionTest, + ParseLongFrameDependenciesAsBigEndian) { constexpr uint16_t kDiff = 0x7654 >> 2; constexpr uint8_t kRaw[] = {0xb8, 0x01, 0x00, 0x00, 0x54 | 0x02, 0x76}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); ASSERT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_THAT(descriptor.FrameDependenciesDiffs(), ElementsAre(kDiff)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, - WriteLongFrameDependenciesAsBigEndian) { +TEST(RtpGenericFrameDescriptorExtensionTest, + WriteLongFrameDependenciesAsBigEndian) { constexpr uint16_t kDiff = 0x7654 >> 2; - uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, 0x54 | 0x02, 0x76}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = { + 0x88 | kDeprecatedFlags, 0x01, 0x00, 0x00, 0x54 | 0x02, 0x76}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.AddFrameDependencyDiff(kDiff); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseMaxLongFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, ParseMaxLongFrameDependencies) { constexpr uint16_t kDiff = 0x3fff; constexpr uint8_t kRaw[] = {0xb8, 0x01, 0x00, 0x00, 0xfe, 0xff}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); ASSERT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_THAT(descriptor.FrameDependenciesDiffs(), ElementsAre(kDiff)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteMaxLongFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, WriteMaxLongFrameDependencies) { constexpr uint16_t kDiff = 0x3fff; - uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, 0xfe, 0xff}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = {0x88 | kDeprecatedFlags, 0x01, 0x00, 0x00, 0xfe, 0xff}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.AddFrameDependencyDiff(kDiff); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, ParseTwoFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, ParseTwoFrameDependencies) { constexpr uint16_t kDiff1 = 9; constexpr uint16_t kDiff2 = 15; constexpr uint8_t kRaw[] = { 0xb8, 0x01, 0x00, 0x00, (kDiff1 << 2) | 0x01, kDiff2 << 2}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); ASSERT_TRUE(descriptor.FirstPacketInSubFrame()); EXPECT_THAT(descriptor.FrameDependenciesDiffs(), ElementsAre(kDiff1, kDiff2)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, WriteTwoFrameDependencies) { +TEST(RtpGenericFrameDescriptorExtensionTest, WriteTwoFrameDependencies) { constexpr uint16_t kDiff1 = 9; constexpr uint16_t kDiff2 = 15; - uint8_t kRaw[] = {0x88, 0x01, 0x00, 0x00, (kDiff1 << 2) | 0x01, kDiff2 << 2}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = {0x88 | kDeprecatedFlags, 0x01, 0x00, 0x00, + (kDiff1 << 2) | 0x01, kDiff2 << 2}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.AddFrameDependencyDiff(kDiff1); descriptor.AddFrameDependencyDiff(kDiff2); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, - ParseResolutionOnIndependentFrame) { +TEST(RtpGenericFrameDescriptorExtensionTest, + ParseResolutionOnIndependentFrame) { constexpr int kWidth = 0x2468; constexpr int kHeight = 0x6543; constexpr uint8_t kRaw[] = {0xb0, 0x01, 0x00, 0x00, 0x24, 0x68, 0x65, 0x43}; RtpGenericFrameDescriptor descriptor; - ASSERT_TRUE(Parse(kRaw, &descriptor)); + ASSERT_TRUE(RtpGenericFrameDescriptorExtension00::Parse(kRaw, &descriptor)); EXPECT_EQ(descriptor.Width(), kWidth); EXPECT_EQ(descriptor.Height(), kHeight); } -TEST_P(RtpGenericFrameDescriptorExtensionTest, - WriteResolutionOnIndependentFrame) { +TEST(RtpGenericFrameDescriptorExtensionTest, + WriteResolutionOnIndependentFrame) { constexpr int kWidth = 0x2468; constexpr int kHeight = 0x6543; - uint8_t kRaw[] = {0x80, 0x01, 0x00, 0x00, 0x24, 0x68, 0x65, 0x43}; - if (version_ == 0) { - kRaw[0] |= kDeprecatedFlags; - } + uint8_t kRaw[] = { + 0x80 | kDeprecatedFlags, 0x01, 0x00, 0x00, 0x24, 0x68, 0x65, 0x43}; RtpGenericFrameDescriptor descriptor; descriptor.SetFirstPacketInSubFrame(true); descriptor.SetResolution(kWidth, kHeight); - ASSERT_EQ(ValueSize(descriptor), sizeof(kRaw)); + ASSERT_EQ(RtpGenericFrameDescriptorExtension00::ValueSize(descriptor), + sizeof(kRaw)); uint8_t buffer[sizeof(kRaw)]; - EXPECT_TRUE(Write(buffer, descriptor)); + EXPECT_TRUE(RtpGenericFrameDescriptorExtension00::Write(buffer, descriptor)); EXPECT_THAT(buffer, ElementsAreArray(kRaw)); } } // namespace diff --git a/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/modules/rtp_rtcp/source/rtp_header_extension_map.cc index 06f2e928f9..c16dcaf6f7 100644 --- a/modules/rtp_rtcp/source/rtp_header_extension_map.cc +++ b/modules/rtp_rtcp/source/rtp_header_extension_map.cc @@ -13,6 +13,7 @@ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -40,13 +41,12 @@ constexpr ExtensionInfo kExtensions[] = { CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), + CreateExtensionInfo(), CreateExtensionInfo(), - CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), - CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc index fefe6c618f..b540e4b22e 100644 --- a/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -371,7 +371,7 @@ constexpr uint8_t PlayoutDelayLimits::kValueSizeBytes; constexpr const char PlayoutDelayLimits::kUri[]; bool PlayoutDelayLimits::Parse(rtc::ArrayView data, - PlayoutDelay* playout_delay) { + VideoPlayoutDelay* playout_delay) { RTC_DCHECK(playout_delay); if (data.size() != 3) return false; @@ -386,7 +386,7 @@ bool PlayoutDelayLimits::Parse(rtc::ArrayView data, } bool PlayoutDelayLimits::Write(rtc::ArrayView data, - const PlayoutDelay& playout_delay) { + const VideoPlayoutDelay& playout_delay) { RTC_DCHECK_EQ(data.size(), 3); RTC_DCHECK_LE(0, playout_delay.min_ms); RTC_DCHECK_LE(playout_delay.min_ms, playout_delay.max_ms); @@ -525,86 +525,6 @@ bool VideoTimingExtension::Write(rtc::ArrayView data, return true; } -// Frame Marking. -// -// Meta-information about an RTP stream outside the encrypted media payload, -// useful for an RTP switch to do codec-agnostic selective forwarding -// without decrypting the payload. -// -// For non-scalable streams: -// 0 1 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | ID | L = 0 |S|E|I|D|0 0 0 0| -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// -// For scalable streams: -// 0 1 2 3 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | ID | L = 2 |S|E|I|D|B| TID | LID | TL0PICIDX | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ - -constexpr RTPExtensionType FrameMarkingExtension::kId; -constexpr const char FrameMarkingExtension::kUri[]; - -bool FrameMarkingExtension::IsScalable(uint8_t temporal_id, uint8_t layer_id) { - return temporal_id != kNoTemporalIdx || layer_id != kNoSpatialIdx; -} - -bool FrameMarkingExtension::Parse(rtc::ArrayView data, - FrameMarking* frame_marking) { - RTC_DCHECK(frame_marking); - - if (data.size() != 1 && data.size() != 3) - return false; - - frame_marking->start_of_frame = (data[0] & 0x80) != 0; - frame_marking->end_of_frame = (data[0] & 0x40) != 0; - frame_marking->independent_frame = (data[0] & 0x20) != 0; - frame_marking->discardable_frame = (data[0] & 0x10) != 0; - - if (data.size() == 3) { - frame_marking->base_layer_sync = (data[0] & 0x08) != 0; - frame_marking->temporal_id = data[0] & 0x7; - frame_marking->layer_id = data[1]; - frame_marking->tl0_pic_idx = data[2]; - } else { - // non-scalable - frame_marking->base_layer_sync = false; - frame_marking->temporal_id = kNoTemporalIdx; - frame_marking->layer_id = kNoSpatialIdx; - frame_marking->tl0_pic_idx = 0; - } - return true; -} - -size_t FrameMarkingExtension::ValueSize(const FrameMarking& frame_marking) { - if (IsScalable(frame_marking.temporal_id, frame_marking.layer_id)) - return 3; - else - return 1; -} - -bool FrameMarkingExtension::Write(rtc::ArrayView data, - const FrameMarking& frame_marking) { - RTC_DCHECK_GE(data.size(), 1); - RTC_CHECK_LE(frame_marking.temporal_id, 0x07); - data[0] = frame_marking.start_of_frame ? 0x80 : 0x00; - data[0] |= frame_marking.end_of_frame ? 0x40 : 0x00; - data[0] |= frame_marking.independent_frame ? 0x20 : 0x00; - data[0] |= frame_marking.discardable_frame ? 0x10 : 0x00; - - if (IsScalable(frame_marking.temporal_id, frame_marking.layer_id)) { - RTC_DCHECK_EQ(data.size(), 3); - data[0] |= frame_marking.base_layer_sync ? 0x08 : 0x00; - data[0] |= frame_marking.temporal_id & 0x07; - data[1] = frame_marking.layer_id; - data[2] = frame_marking.tl0_pic_idx; - } - return true; -} - // Color space including HDR metadata as an optional field. // // RTP header extension to carry color space information and optionally HDR diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.h b/modules/rtp_rtcp/source/rtp_header_extensions.h index f4517bb513..1352611fb1 100644 --- a/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -19,7 +19,6 @@ #include "api/rtp_headers.h" #include "api/video/color_space.h" #include "api/video/video_content_type.h" -#include "api/video/video_frame_marking.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -149,7 +148,7 @@ class VideoOrientation { class PlayoutDelayLimits { public: - using value_type = PlayoutDelay; + using value_type = VideoPlayoutDelay; static constexpr RTPExtensionType kId = kRtpExtensionPlayoutDelay; static constexpr uint8_t kValueSizeBytes = 3; static constexpr const char kUri[] = @@ -163,10 +162,10 @@ class PlayoutDelayLimits { static constexpr int kMaxMs = 0xfff * kGranularityMs; // 40950. static bool Parse(rtc::ArrayView data, - PlayoutDelay* playout_delay); - static size_t ValueSize(const PlayoutDelay&) { return kValueSizeBytes; } + VideoPlayoutDelay* playout_delay); + static size_t ValueSize(const VideoPlayoutDelay&) { return kValueSizeBytes; } static bool Write(rtc::ArrayView data, - const PlayoutDelay& playout_delay); + const VideoPlayoutDelay& playout_delay); }; class VideoContentTypeExtension { @@ -217,23 +216,6 @@ class VideoTimingExtension { uint8_t offset); }; -class FrameMarkingExtension { - public: - using value_type = FrameMarking; - static constexpr RTPExtensionType kId = kRtpExtensionFrameMarking; - static constexpr const char kUri[] = - "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07"; - - static bool Parse(rtc::ArrayView data, - FrameMarking* frame_marking); - static size_t ValueSize(const FrameMarking& frame_marking); - static bool Write(rtc::ArrayView data, - const FrameMarking& frame_marking); - - private: - static bool IsScalable(uint8_t temporal_id, uint8_t layer_id); -}; - class ColorSpaceExtension { public: using value_type = ColorSpace; diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc index 56438283e4..38d29cc2b4 100644 --- a/modules/rtp_rtcp/source/rtp_packet.cc +++ b/modules/rtp_rtcp/source/rtp_packet.cc @@ -188,9 +188,7 @@ void RtpPacket::ZeroMutableExtensions() { case RTPExtensionType::kRtpExtensionAudioLevel: case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime: case RTPExtensionType::kRtpExtensionColorSpace: - case RTPExtensionType::kRtpExtensionFrameMarking: case RTPExtensionType::kRtpExtensionGenericFrameDescriptor00: - case RTPExtensionType::kRtpExtensionGenericFrameDescriptor01: case RTPExtensionType::kRtpExtensionGenericFrameDescriptor02: case RTPExtensionType::kRtpExtensionMid: case RTPExtensionType::kRtpExtensionNumberOfExtensions: @@ -198,6 +196,7 @@ void RtpPacket::ZeroMutableExtensions() { case RTPExtensionType::kRtpExtensionRepairedRtpStreamId: case RTPExtensionType::kRtpExtensionRtpStreamId: case RTPExtensionType::kRtpExtensionVideoContentType: + case RTPExtensionType::kRtpExtensionVideoLayersAllocation: case RTPExtensionType::kRtpExtensionVideoRotation: case RTPExtensionType::kRtpExtensionInbandComfortNoise: { // Non-mutable extension. Don't change it. diff --git a/modules/rtp_rtcp/source/rtp_packet_history.cc b/modules/rtp_rtcp/source/rtp_packet_history.cc index 6a2253cd64..1fbfb7651d 100644 --- a/modules/rtp_rtcp/source/rtp_packet_history.cc +++ b/modules/rtp_rtcp/source/rtp_packet_history.cc @@ -56,7 +56,7 @@ void RtpPacketHistory::StoredPacket::IncrementTimesRetransmitted( // Check if this StoredPacket is in the priority set. If so, we need to remove // it before updating |times_retransmitted_| since that is used in sorting, // and then add it back. - const bool in_priority_set = priority_set->erase(this) > 0; + const bool in_priority_set = priority_set && priority_set->erase(this) > 0; ++times_retransmitted_; if (in_priority_set) { auto it = priority_set->insert(this); @@ -80,8 +80,9 @@ bool RtpPacketHistory::MoreUseful::operator()(StoredPacket* lhs, return lhs->insert_order() > rhs->insert_order(); } -RtpPacketHistory::RtpPacketHistory(Clock* clock) +RtpPacketHistory::RtpPacketHistory(Clock* clock, bool enable_padding_prio) : clock_(clock), + enable_padding_prio_(enable_padding_prio), number_to_store_(0), mode_(StorageMode::kDisabled), rtt_ms_(-1), @@ -92,7 +93,7 @@ RtpPacketHistory::~RtpPacketHistory() {} void RtpPacketHistory::SetStorePacketsStatus(StorageMode mode, size_t number_to_store) { RTC_DCHECK_LE(number_to_store, kMaxCapacity); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mode != StorageMode::kDisabled && mode_ != StorageMode::kDisabled) { RTC_LOG(LS_WARNING) << "Purging packet history in order to re-set status."; } @@ -102,12 +103,12 @@ void RtpPacketHistory::SetStorePacketsStatus(StorageMode mode, } RtpPacketHistory::StorageMode RtpPacketHistory::GetStorageMode() const { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); return mode_; } void RtpPacketHistory::SetRtt(int64_t rtt_ms) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); RTC_DCHECK_GE(rtt_ms, 0); rtt_ms_ = rtt_ms; // If storage is not disabled, packets will be removed after a timeout @@ -121,7 +122,7 @@ void RtpPacketHistory::SetRtt(int64_t rtt_ms) { void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, absl::optional send_time_ms) { RTC_DCHECK(packet); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); int64_t now_ms = clock_->TimeInMilliseconds(); if (mode_ == StorageMode::kDisabled) { return; @@ -158,16 +159,18 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, packet_history_[packet_index] = StoredPacket(std::move(packet), send_time_ms, packets_inserted_++); - if (padding_priority_.size() >= kMaxPaddingtHistory - 1) { - padding_priority_.erase(std::prev(padding_priority_.end())); + if (enable_padding_prio_) { + if (padding_priority_.size() >= kMaxPaddingtHistory - 1) { + padding_priority_.erase(std::prev(padding_priority_.end())); + } + auto prio_it = padding_priority_.insert(&packet_history_[packet_index]); + RTC_DCHECK(prio_it.second) << "Failed to insert packet into prio set."; } - auto prio_it = padding_priority_.insert(&packet_history_[packet_index]); - RTC_DCHECK(prio_it.second) << "Failed to insert packet into prio set."; } std::unique_ptr RtpPacketHistory::GetPacketAndSetSendTime( uint16_t sequence_number) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { return nullptr; } @@ -183,7 +186,8 @@ std::unique_ptr RtpPacketHistory::GetPacketAndSetSendTime( } if (packet->send_time_ms_) { - packet->IncrementTimesRetransmitted(&padding_priority_); + packet->IncrementTimesRetransmitted( + enable_padding_prio_ ? &padding_priority_ : nullptr); } // Update send-time and mark as no long in pacer queue. @@ -206,7 +210,7 @@ std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( uint16_t sequence_number, rtc::FunctionView(const RtpPacketToSend&)> encapsulate) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { return nullptr; } @@ -237,7 +241,7 @@ std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( } void RtpPacketHistory::MarkPacketAsSent(uint16_t sequence_number) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { return; } @@ -253,12 +257,13 @@ void RtpPacketHistory::MarkPacketAsSent(uint16_t sequence_number) { // transmission count. packet->send_time_ms_ = clock_->TimeInMilliseconds(); packet->pending_transmission_ = false; - packet->IncrementTimesRetransmitted(&padding_priority_); + packet->IncrementTimesRetransmitted(enable_padding_prio_ ? &padding_priority_ + : nullptr); } absl::optional RtpPacketHistory::GetPacketState( uint16_t sequence_number) const { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { return absl::nullopt; } @@ -306,13 +311,29 @@ std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket() { std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket( rtc::FunctionView(const RtpPacketToSend&)> encapsulate) { - rtc::CritScope cs(&lock_); - if (mode_ == StorageMode::kDisabled || padding_priority_.empty()) { + MutexLock lock(&lock_); + if (mode_ == StorageMode::kDisabled) { + return nullptr; + } + + StoredPacket* best_packet = nullptr; + if (enable_padding_prio_ && !padding_priority_.empty()) { + auto best_packet_it = padding_priority_.begin(); + best_packet = *best_packet_it; + } else if (!enable_padding_prio_ && !packet_history_.empty()) { + // Prioritization not available, pick the last packet. + for (auto it = packet_history_.rbegin(); it != packet_history_.rend(); + ++it) { + if (it->packet_ != nullptr) { + best_packet = &(*it); + break; + } + } + } + if (best_packet == nullptr) { return nullptr; } - auto best_packet_it = padding_priority_.begin(); - StoredPacket* best_packet = *best_packet_it; if (best_packet->pending_transmission_) { // Because PacedSender releases it's lock when it calls // GeneratePadding() there is the potential for a race where a new @@ -328,14 +349,15 @@ std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket( } best_packet->send_time_ms_ = clock_->TimeInMilliseconds(); - best_packet->IncrementTimesRetransmitted(&padding_priority_); + best_packet->IncrementTimesRetransmitted( + enable_padding_prio_ ? &padding_priority_ : nullptr); return padding_packet; } void RtpPacketHistory::CullAcknowledgedPackets( rtc::ArrayView sequence_numbers) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); for (uint16_t sequence_number : sequence_numbers) { int packet_index = GetPacketIndex(sequence_number); if (packet_index < 0 || @@ -347,7 +369,7 @@ void RtpPacketHistory::CullAcknowledgedPackets( } bool RtpPacketHistory::SetPendingTransmission(uint16_t sequence_number) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { return false; } @@ -362,7 +384,7 @@ bool RtpPacketHistory::SetPendingTransmission(uint16_t sequence_number) { } void RtpPacketHistory::Clear() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); Reset(); } @@ -414,7 +436,9 @@ std::unique_ptr RtpPacketHistory::RemovePacket( std::move(packet_history_[packet_index].packet_); // Erase from padding priority set, if eligible. - padding_priority_.erase(&packet_history_[packet_index]); + if (enable_padding_prio_) { + padding_priority_.erase(&packet_history_[packet_index]); + } if (packet_index == 0) { while (!packet_history_.empty() && diff --git a/modules/rtp_rtcp/source/rtp_packet_history.h b/modules/rtp_rtcp/source/rtp_packet_history.h index 9253ede4fa..4a2bf91bd7 100644 --- a/modules/rtp_rtcp/source/rtp_packet_history.h +++ b/modules/rtp_rtcp/source/rtp_packet_history.h @@ -19,8 +19,7 @@ #include "api/function_view.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -62,7 +61,12 @@ class RtpPacketHistory { // With kStoreAndCull, always remove packets after 3x max(1000ms, 3x rtt). static constexpr int kPacketCullingDelayFactor = 3; - explicit RtpPacketHistory(Clock* clock); + RtpPacketHistory(Clock* clock, bool enable_padding_prio); + + RtpPacketHistory() = delete; + RtpPacketHistory(const RtpPacketHistory&) = delete; + RtpPacketHistory& operator=(const RtpPacketHistory&) = delete; + ~RtpPacketHistory(); // Set/get storage mode. Note that setting the state will clear the history, @@ -192,7 +196,8 @@ class RtpPacketHistory { const StoredPacket& stored_packet); Clock* const clock_; - rtc::CriticalSection lock_; + const bool enable_padding_prio_; + mutable Mutex lock_; size_t number_to_store_ RTC_GUARDED_BY(lock_); StorageMode mode_ RTC_GUARDED_BY(lock_); int64_t rtt_ms_ RTC_GUARDED_BY(lock_); @@ -210,8 +215,6 @@ class RtpPacketHistory { // Objects from |packet_history_| ordered by "most likely to be useful", used // in GetPayloadPaddingPacket(). PacketPrioritySet padding_priority_ RTC_GUARDED_BY(lock_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpPacketHistory); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_ diff --git a/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc index fdf64d51bf..2331724397 100644 --- a/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc @@ -32,9 +32,11 @@ uint16_t To16u(size_t sequence_number) { using StorageMode = RtpPacketHistory::StorageMode; -class RtpPacketHistoryTest : public ::testing::Test { +class RtpPacketHistoryTest : public ::testing::TestWithParam { protected: - RtpPacketHistoryTest() : fake_clock_(123456), hist_(&fake_clock_) {} + RtpPacketHistoryTest() + : fake_clock_(123456), + hist_(&fake_clock_, /*enable_padding_prio=*/GetParam()) {} SimulatedClock fake_clock_; RtpPacketHistory hist_; @@ -49,7 +51,7 @@ class RtpPacketHistoryTest : public ::testing::Test { } }; -TEST_F(RtpPacketHistoryTest, SetStoreStatus) { +TEST_P(RtpPacketHistoryTest, SetStoreStatus) { EXPECT_EQ(StorageMode::kDisabled, hist_.GetStorageMode()); hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); EXPECT_EQ(StorageMode::kStoreAndCull, hist_.GetStorageMode()); @@ -59,7 +61,7 @@ TEST_F(RtpPacketHistoryTest, SetStoreStatus) { EXPECT_EQ(StorageMode::kDisabled, hist_.GetStorageMode()); } -TEST_F(RtpPacketHistoryTest, ClearsHistoryAfterSetStoreStatus) { +TEST_P(RtpPacketHistoryTest, ClearsHistoryAfterSetStoreStatus) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); // Store a packet, but with send-time. It should then not be removed. hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), absl::nullopt); @@ -70,7 +72,7 @@ TEST_F(RtpPacketHistoryTest, ClearsHistoryAfterSetStoreStatus) { EXPECT_FALSE(hist_.GetPacketState(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, StartSeqResetAfterReset) { +TEST_P(RtpPacketHistoryTest, StartSeqResetAfterReset) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); // Store a packet, but with send-time. It should then not be removed. hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), absl::nullopt); @@ -96,7 +98,7 @@ TEST_F(RtpPacketHistoryTest, StartSeqResetAfterReset) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 2))); } -TEST_F(RtpPacketHistoryTest, NoStoreStatus) { +TEST_P(RtpPacketHistoryTest, NoStoreStatus) { EXPECT_EQ(StorageMode::kDisabled, hist_.GetStorageMode()); std::unique_ptr packet = CreateRtpPacket(kStartSeqNum); hist_.PutRtpPacket(std::move(packet), absl::nullopt); @@ -104,12 +106,12 @@ TEST_F(RtpPacketHistoryTest, NoStoreStatus) { EXPECT_FALSE(hist_.GetPacketState(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) { +TEST_P(RtpPacketHistoryTest, GetRtpPacket_NotStored) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); EXPECT_FALSE(hist_.GetPacketState(0)); } -TEST_F(RtpPacketHistoryTest, PutRtpPacket) { +TEST_P(RtpPacketHistoryTest, PutRtpPacket) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); std::unique_ptr packet = CreateRtpPacket(kStartSeqNum); @@ -118,7 +120,7 @@ TEST_F(RtpPacketHistoryTest, PutRtpPacket) { EXPECT_TRUE(hist_.GetPacketState(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, GetRtpPacket) { +TEST_P(RtpPacketHistoryTest, GetRtpPacket) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); int64_t capture_time_ms = 1; std::unique_ptr packet = CreateRtpPacket(kStartSeqNum); @@ -133,7 +135,7 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket) { EXPECT_EQ(capture_time_ms, packet_out->capture_time_ms()); } -TEST_F(RtpPacketHistoryTest, PacketStateIsCorrect) { +TEST_P(RtpPacketHistoryTest, PacketStateIsCorrect) { const uint32_t kSsrc = 92384762; const int64_t kRttMs = 100; hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); @@ -164,7 +166,7 @@ TEST_F(RtpPacketHistoryTest, PacketStateIsCorrect) { EXPECT_EQ(state->times_retransmitted, 1u); } -TEST_F(RtpPacketHistoryTest, MinResendTimeWithPacer) { +TEST_P(RtpPacketHistoryTest, MinResendTimeWithPacer) { static const int64_t kMinRetransmitIntervalMs = 100; hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); @@ -205,7 +207,7 @@ TEST_F(RtpPacketHistoryTest, MinResendTimeWithPacer) { EXPECT_TRUE(hist_.GetPacketAndSetSendTime(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, MinResendTimeWithoutPacer) { +TEST_P(RtpPacketHistoryTest, MinResendTimeWithoutPacer) { static const int64_t kMinRetransmitIntervalMs = 100; hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); @@ -231,7 +233,7 @@ TEST_F(RtpPacketHistoryTest, MinResendTimeWithoutPacer) { EXPECT_TRUE(hist_.GetPacketAndSetSendTime(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, RemovesOldestSentPacketWhenAtMaxSize) { +TEST_P(RtpPacketHistoryTest, RemovesOldestSentPacketWhenAtMaxSize) { const size_t kMaxNumPackets = 10; hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, kMaxNumPackets); @@ -262,7 +264,7 @@ TEST_F(RtpPacketHistoryTest, RemovesOldestSentPacketWhenAtMaxSize) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 1))); } -TEST_F(RtpPacketHistoryTest, RemovesOldestPacketWhenAtMaxCapacity) { +TEST_P(RtpPacketHistoryTest, RemovesOldestPacketWhenAtMaxCapacity) { // Tests the absolute upper bound on number of stored packets. Don't allow // storing more than this, even if packets have not yet been sent. const size_t kMaxNumPackets = RtpPacketHistory::kMaxCapacity; @@ -290,7 +292,12 @@ TEST_F(RtpPacketHistoryTest, RemovesOldestPacketWhenAtMaxCapacity) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 1))); } -TEST_F(RtpPacketHistoryTest, RemovesLowestPrioPaddingWhenAtMaxCapacity) { +TEST_P(RtpPacketHistoryTest, RemovesLowestPrioPaddingWhenAtMaxCapacity) { + if (!GetParam()) { + // Padding prioritization is off, ignore this test. + return; + } + // Tests the absolute upper bound on number of packets in the prioritized // set of potential padding packets. const size_t kMaxNumPackets = RtpPacketHistory::kMaxPaddingtHistory; @@ -322,7 +329,7 @@ TEST_F(RtpPacketHistoryTest, RemovesLowestPrioPaddingWhenAtMaxCapacity) { EXPECT_EQ(packet->SequenceNumber(), To16u(kStartSeqNum + kMaxNumPackets)); } -TEST_F(RtpPacketHistoryTest, DontRemoveUnsentPackets) { +TEST_P(RtpPacketHistoryTest, DontRemoveUnsentPackets) { const size_t kMaxNumPackets = 10; hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, kMaxNumPackets); @@ -355,7 +362,7 @@ TEST_F(RtpPacketHistoryTest, DontRemoveUnsentPackets) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 2))); } -TEST_F(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPackets) { +TEST_P(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPackets) { // Set size to remove old packets as soon as possible. hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); @@ -380,7 +387,7 @@ TEST_F(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPackets) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 1))); } -TEST_F(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPacketsHighRtt) { +TEST_P(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPacketsHighRtt) { const int64_t kRttMs = RtpPacketHistory::kMinPacketDurationMs * 2; const int64_t kPacketTimeoutMs = kRttMs * RtpPacketHistory::kMinPacketDurationRtt; @@ -409,7 +416,7 @@ TEST_F(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPacketsHighRtt) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 1))); } -TEST_F(RtpPacketHistoryTest, RemovesOldWithCulling) { +TEST_P(RtpPacketHistoryTest, RemovesOldWithCulling) { const size_t kMaxNumPackets = 10; // Enable culling. Even without feedback, this can trigger early removal. hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, kMaxNumPackets); @@ -432,7 +439,7 @@ TEST_F(RtpPacketHistoryTest, RemovesOldWithCulling) { EXPECT_FALSE(hist_.GetPacketState(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, RemovesOldWithCullingHighRtt) { +TEST_P(RtpPacketHistoryTest, RemovesOldWithCullingHighRtt) { const size_t kMaxNumPackets = 10; const int64_t kRttMs = RtpPacketHistory::kMinPacketDurationMs * 2; // Enable culling. Even without feedback, this can trigger early removal. @@ -458,7 +465,7 @@ TEST_F(RtpPacketHistoryTest, RemovesOldWithCullingHighRtt) { EXPECT_FALSE(hist_.GetPacketState(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, CullWithAcks) { +TEST_P(RtpPacketHistoryTest, CullWithAcks) { const int64_t kPacketLifetime = RtpPacketHistory::kMinPacketDurationMs * RtpPacketHistory::kPacketCullingDelayFactor; @@ -511,7 +518,7 @@ TEST_F(RtpPacketHistoryTest, CullWithAcks) { EXPECT_FALSE(hist_.GetPacketState(To16u(kStartSeqNum + 2)).has_value()); } -TEST_F(RtpPacketHistoryTest, SetsPendingTransmissionState) { +TEST_P(RtpPacketHistoryTest, SetsPendingTransmissionState) { const int64_t kRttMs = RtpPacketHistory::kMinPacketDurationMs * 2; hist_.SetRtt(kRttMs); @@ -553,7 +560,7 @@ TEST_F(RtpPacketHistoryTest, SetsPendingTransmissionState) { EXPECT_FALSE(packet_state->pending_transmission); } -TEST_F(RtpPacketHistoryTest, GetPacketAndSetSent) { +TEST_P(RtpPacketHistoryTest, GetPacketAndSetSent) { const int64_t kRttMs = RtpPacketHistory::kMinPacketDurationMs * 2; hist_.SetRtt(kRttMs); @@ -580,7 +587,7 @@ TEST_F(RtpPacketHistoryTest, GetPacketAndSetSent) { EXPECT_TRUE(hist_.GetPacketAndMarkAsPending(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, GetPacketWithEncapsulation) { +TEST_P(RtpPacketHistoryTest, GetPacketWithEncapsulation) { const uint32_t kSsrc = 92384762; const int64_t kRttMs = RtpPacketHistory::kMinPacketDurationMs * 2; hist_.SetRtt(kRttMs); @@ -607,7 +614,7 @@ TEST_F(RtpPacketHistoryTest, GetPacketWithEncapsulation) { EXPECT_EQ(retransmit_packet->Ssrc(), kSsrc + 1); } -TEST_F(RtpPacketHistoryTest, GetPacketWithEncapsulationAbortOnNullptr) { +TEST_P(RtpPacketHistoryTest, GetPacketWithEncapsulationAbortOnNullptr) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), @@ -617,14 +624,14 @@ TEST_F(RtpPacketHistoryTest, GetPacketWithEncapsulationAbortOnNullptr) { // not suitable for retransmission (bandwidth exhausted?) so the retransmit is // aborted and the packet is not marked as pending. EXPECT_FALSE(hist_.GetPacketAndMarkAsPending( - kStartSeqNum, [](const RtpPacketToSend& packet) { return nullptr; })); + kStartSeqNum, [](const RtpPacketToSend&) { return nullptr; })); // New try, this time getting the packet should work, and it should not be // blocked due to any pending status. EXPECT_TRUE(hist_.GetPacketAndMarkAsPending(kStartSeqNum)); } -TEST_F(RtpPacketHistoryTest, DontRemovePendingTransmissions) { +TEST_P(RtpPacketHistoryTest, DontRemovePendingTransmissions) { const int64_t kRttMs = RtpPacketHistory::kMinPacketDurationMs * 2; const int64_t kPacketTimeoutMs = kRttMs * RtpPacketHistory::kMinPacketDurationRtt; @@ -657,7 +664,12 @@ TEST_F(RtpPacketHistoryTest, DontRemovePendingTransmissions) { ASSERT_FALSE(packet_state.has_value()); } -TEST_F(RtpPacketHistoryTest, PrioritizedPayloadPadding) { +TEST_P(RtpPacketHistoryTest, PrioritizedPayloadPadding) { + if (!GetParam()) { + // Padding prioritization is off, ignore this test. + return; + } + hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); // Add two sent packets, one millisecond apart. @@ -694,7 +706,7 @@ TEST_F(RtpPacketHistoryTest, PrioritizedPayloadPadding) { EXPECT_EQ(hist_.GetPayloadPaddingPacket(), nullptr); } -TEST_F(RtpPacketHistoryTest, NoPendingPacketAsPadding) { +TEST_P(RtpPacketHistoryTest, NoPendingPacketAsPadding) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), @@ -712,7 +724,7 @@ TEST_F(RtpPacketHistoryTest, NoPendingPacketAsPadding) { EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), kStartSeqNum); } -TEST_F(RtpPacketHistoryTest, PayloadPaddingWithEncapsulation) { +TEST_P(RtpPacketHistoryTest, PayloadPaddingWithEncapsulation) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), @@ -720,9 +732,8 @@ TEST_F(RtpPacketHistoryTest, PayloadPaddingWithEncapsulation) { fake_clock_.AdvanceTimeMilliseconds(1); // Aborted padding. - EXPECT_EQ(nullptr, - hist_.GetPayloadPaddingPacket( - [](const RtpPacketToSend& packet) { return nullptr; })); + EXPECT_EQ(nullptr, hist_.GetPayloadPaddingPacket( + [](const RtpPacketToSend&) { return nullptr; })); // Get copy of packet, but with sequence number modified. auto padding_packet = @@ -735,7 +746,7 @@ TEST_F(RtpPacketHistoryTest, PayloadPaddingWithEncapsulation) { EXPECT_EQ(padding_packet->SequenceNumber(), kStartSeqNum + 1); } -TEST_F(RtpPacketHistoryTest, NackAfterAckIsNoop) { +TEST_P(RtpPacketHistoryTest, NackAfterAckIsNoop) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 2); // Add two sent packets. hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), @@ -749,7 +760,7 @@ TEST_F(RtpPacketHistoryTest, NackAfterAckIsNoop) { EXPECT_EQ(packet.get(), nullptr); } -TEST_F(RtpPacketHistoryTest, OutOfOrderInsertRemoval) { +TEST_P(RtpPacketHistoryTest, OutOfOrderInsertRemoval) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); // Insert packets, out of order, including both forwards and backwards @@ -780,4 +791,51 @@ TEST_F(RtpPacketHistoryTest, OutOfOrderInsertRemoval) { expected_time_offset_ms += 33; } } + +TEST_P(RtpPacketHistoryTest, UsesLastPacketAsPaddingWithPrioOff) { + if (GetParam()) { + // Padding prioritization is enabled, ignore this test. + return; + } + + const size_t kHistorySize = 10; + hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, kHistorySize); + + EXPECT_EQ(hist_.GetPayloadPaddingPacket(), nullptr); + + for (size_t i = 0; i < kHistorySize; ++i) { + hist_.PutRtpPacket(CreateRtpPacket(To16u(kStartSeqNum + i)), + fake_clock_.TimeInMilliseconds()); + hist_.MarkPacketAsSent(To16u(kStartSeqNum + i)); + fake_clock_.AdvanceTimeMilliseconds(1); + + // Last packet always returned. + EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), + To16u(kStartSeqNum + i)); + EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), + To16u(kStartSeqNum + i)); + EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), + To16u(kStartSeqNum + i)); + } + + // Remove packets from the end, last in the list should be returned. + for (size_t i = kHistorySize - 1; i > 0; --i) { + hist_.CullAcknowledgedPackets( + std::vector{To16u(kStartSeqNum + i)}); + + EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), + To16u(kStartSeqNum + i - 1)); + EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), + To16u(kStartSeqNum + i - 1)); + EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), + To16u(kStartSeqNum + i - 1)); + } + + hist_.CullAcknowledgedPackets(std::vector{kStartSeqNum}); + EXPECT_EQ(hist_.GetPayloadPaddingPacket(), nullptr); +} + +INSTANTIATE_TEST_SUITE_P(WithAndWithoutPaddingPrio, + RtpPacketHistoryTest, + ::testing::Bool()); } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_received.cc b/modules/rtp_rtcp/source/rtp_packet_received.cc index 56aea8eb5e..feadee1db1 100644 --- a/modules/rtp_rtcp/source/rtp_packet_received.cc +++ b/modules/rtp_rtcp/source/rtp_packet_received.cc @@ -69,8 +69,6 @@ void RtpPacketReceived::GetHeader(RTPHeader* header) const { &header->extension.videoContentType); header->extension.has_video_timing = GetExtension(&header->extension.video_timing); - header->extension.has_frame_marking = - GetExtension(&header->extension.frame_marking); GetExtension(&header->extension.stream_id); GetExtension(&header->extension.repaired_stream_id); GetExtension(&header->extension.mid); diff --git a/modules/rtp_rtcp/source/rtp_packet_to_send.h b/modules/rtp_rtcp/source/rtp_packet_to_send.h index 07e6ea8588..9aaf9a52e6 100644 --- a/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -18,6 +18,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/video/video_timing.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" @@ -25,13 +26,8 @@ namespace webrtc { // Class to hold rtp packet with metadata for sender side. class RtpPacketToSend : public RtpPacket { public: - enum class Type { - kAudio, // Audio media packets. - kVideo, // Video media packets. - kRetransmission, // RTX (usually) packets send as response to NACK. - kForwardErrorCorrection, // FEC packets. - kPadding // RTX or plain padding sent to maintain BWE. - }; + // RtpPacketToSend::Type is deprecated. Use RtpPacketMediaType directly. + using Type = RtpPacketMediaType; explicit RtpPacketToSend(const ExtensionManager* extensions); RtpPacketToSend(const ExtensionManager* extensions, size_t capacity); @@ -48,8 +44,10 @@ class RtpPacketToSend : public RtpPacket { void set_capture_time_ms(int64_t time) { capture_time_ms_ = time; } - void set_packet_type(Type type) { packet_type_ = type; } - absl::optional packet_type() const { return packet_type_; } + void set_packet_type(RtpPacketMediaType type) { packet_type_ = type; } + absl::optional packet_type() const { + return packet_type_; + } // If this is a retransmission, indicates the sequence number of the original // media packet that this packet represents. If RTX is used this will likely @@ -100,12 +98,35 @@ class RtpPacketToSend : public RtpPacket { VideoTimingExtension::kNetwork2TimestampDeltaOffset); } + // Indicates if packet is the first packet of a video frame. + void set_first_packet_of_frame(bool is_first_packet) { + is_first_packet_of_frame_ = is_first_packet; + } + bool is_first_packet_of_frame() const { return is_first_packet_of_frame_; } + + // Indicates if packet contains payload for a video key-frame. + void set_is_key_frame(bool is_key_frame) { is_key_frame_ = is_key_frame; } + bool is_key_frame() const { return is_key_frame_; } + + // Indicates if packets should be protected by FEC (Forward Error Correction). + void set_fec_protect_packet(bool protect) { fec_protect_packet_ = protect; } + bool fec_protect_packet() const { return fec_protect_packet_; } + + // Indicates if packet is using RED encapsulation, in accordance with + // https://tools.ietf.org/html/rfc2198 + void set_is_red(bool is_red) { is_red_ = is_red; } + bool is_red() const { return is_red_; } + private: int64_t capture_time_ms_ = 0; - absl::optional packet_type_; + absl::optional packet_type_; bool allow_retransmission_ = false; absl::optional retransmitted_sequence_number_; std::vector application_data_; + bool is_first_packet_of_frame_ = false; + bool is_key_frame_ = false; + bool fec_protect_packet_ = false; + bool is_red_ = false; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/modules/rtp_rtcp/source/rtp_packet_unittest.cc index 74736a2ab7..f7f21af41d 100644 --- a/modules/rtp_rtcp/source/rtp_packet_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_packet_unittest.cc @@ -249,7 +249,7 @@ TEST(RtpPacketTest, CreateWithTwoByteHeaderExtensionFirst) { packet.SetTimestamp(kTimestamp); packet.SetSsrc(kSsrc); // Set extension that requires two-byte header. - PlayoutDelay playoutDelay = {30, 340}; + VideoPlayoutDelay playoutDelay = {30, 340}; ASSERT_TRUE(packet.SetExtension(playoutDelay)); packet.SetExtension(kTimeOffset); packet.SetExtension(kVoiceActive, kAudioLevel); @@ -273,7 +273,7 @@ TEST(RtpPacketTest, CreateWithTwoByteHeaderExtensionLast) { EXPECT_THAT(kPacketWithTOAndAL, ElementsAreArray(packet.data(), packet.size())); // Set extension that requires two-byte header. - PlayoutDelay playoutDelay = {30, 340}; + VideoPlayoutDelay playoutDelay = {30, 340}; ASSERT_TRUE(packet.SetExtension(playoutDelay)); EXPECT_THAT(kPacketWithTwoByteExtensionIdLast, ElementsAreArray(packet.data(), packet.size())); diff --git a/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc b/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc index 5930f4c5a1..0529e98129 100644 --- a/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc @@ -104,7 +104,7 @@ Av1Frame ReassembleFrame(rtc::ArrayView rtp_payloads) { for (size_t i = 0; i < rtp_payloads.size(); ++i) { payloads[i] = rtp_payloads[i]; } - return Av1Frame(VideoRtpDepacketizerAv1::AssembleFrame(payloads)); + return Av1Frame(VideoRtpDepacketizerAv1().AssembleFrame(payloads)); } class Obu { diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index 987ae0ec59..b2268c7d1c 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -39,8 +39,8 @@ const int64_t kDefaultExpectedRetransmissionTimeMs = 125; } // namespace ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext( - const RtpRtcp::Configuration& config) - : packet_history(config.clock), + const RtpRtcpInterface::Configuration& config) + : packet_history(config.clock, config.enable_rtx_padding_prioritization), packet_sender(config, &packet_history), non_paced_sender(&packet_sender), packet_generator( @@ -48,11 +48,11 @@ ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext( &packet_history, config.paced_sender ? config.paced_sender : &non_paced_sender) {} -RtpRtcp::Configuration::Configuration() = default; -RtpRtcp::Configuration::Configuration(Configuration&& rhs) = default; - -std::unique_ptr RtpRtcp::Create(const Configuration& configuration) { +std::unique_ptr RtpRtcp::DEPRECATED_Create( + const Configuration& configuration) { RTC_DCHECK(configuration.clock); + RTC_LOG(LS_ERROR) + << "*********** USING WebRTC INTERNAL IMPLEMENTATION DETAILS ***********"; return std::make_unique(configuration); } @@ -68,7 +68,6 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration) nack_last_time_sent_full_ms_(0), nack_last_seq_number_sent_(0), remote_bitrate_(configuration.remote_bitrate_estimator), - ack_observer_(configuration.ack_observer), rtt_stats_(configuration.rtt_stats), rtt_ms_(0) { if (!configuration.receiver_only) { @@ -97,23 +96,34 @@ int64_t ModuleRtpRtcpImpl::TimeUntilNextProcess() { // Process any pending tasks such as timeouts (non time critical events). void ModuleRtpRtcpImpl::Process() { const int64_t now = clock_->TimeInMilliseconds(); + // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200 + // times a second. next_process_time_ = now + kRtpRtcpMaxIdleTimeProcessMs; if (rtp_sender_) { if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) { rtp_sender_->packet_sender.ProcessBitrateAndNotifyObservers(); last_bitrate_process_time_ = now; + // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function, + // next_process_time_ is incremented by 5ms, here we effectively do a + // std::min() of (now + 5ms, now + 10ms). Seems like this is a no-op? next_process_time_ = std::min(next_process_time_, now + kRtpRtcpBitrateProcessTimeMs); } } + // TODO(bugs.webrtc.org/11581): We update the RTT once a second, whereas other + // things that run in this method are updated much more frequently. Move the + // RTT checking over to the worker thread, which matches better with where the + // stats are maintained. bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs; if (rtcp_sender_.Sending()) { // Process RTT if we have received a report block and we haven't // processed RTT for at least |kRtpRtcpRttProcessTimeMs| milliseconds. - if (rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_ && - process_rtt) { + // Note that LastReceivedReportBlockMs() grabs a lock, so check + // |process_rtt| first. + if (process_rtt && + rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_) { std::vector receive_blocks; rtcp_receiver_.StatisticsReceived(&receive_blocks); int64_t max_rtt = 0; @@ -130,6 +140,12 @@ void ModuleRtpRtcpImpl::Process() { // Verify receiver reports are delivered and the reported sequence number // is increasing. + // TODO(bugs.webrtc.org/11581): The timeout value needs to be checked every + // few seconds (see internals of RtcpRrTimeout). Here, we may be polling it + // a couple of hundred times a second, which isn't great since it grabs a + // lock. Note also that LastReceivedReportBlockMs() (called above) and + // RtcpRrTimeout() both grab the same lock and check the same timer, so + // it should be possible to consolidate that work somehow. if (rtcp_receiver_.RtcpRrTimeout()) { RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received."; } else if (rtcp_receiver_.RtcpRrSequenceNumberTimeout()) { @@ -160,6 +176,9 @@ void ModuleRtpRtcpImpl::Process() { // Get processed rtt. if (process_rtt) { last_rtt_process_time_ = now; + // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function, + // next_process_time_ is incremented by 5ms, here we effectively do a + // std::min() of (now + 5ms, now + 1000ms). Seems like this is a no-op? next_process_time_ = std::min( next_process_time_, last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs); if (rtt_stats_) { @@ -225,6 +244,7 @@ uint32_t ModuleRtpRtcpImpl::StartTimestamp() const { void ModuleRtpRtcpImpl::SetStartTimestamp(const uint32_t timestamp) { rtcp_sender_.SetTimestampOffset(timestamp); rtp_sender_->packet_generator.SetTimestampOffset(timestamp); + rtp_sender_->packet_sender.SetTimestampOffset(timestamp); } uint16_t ModuleRtpRtcpImpl::SequenceNumber() const { @@ -238,7 +258,6 @@ void ModuleRtpRtcpImpl::SetSequenceNumber(const uint16_t seq_num) { void ModuleRtpRtcpImpl::SetRtpState(const RtpState& rtp_state) { rtp_sender_->packet_generator.SetRtpState(rtp_state); - rtp_sender_->packet_sender.SetMediaHasBeenSent(rtp_state.media_has_been_sent); rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp); } @@ -248,7 +267,6 @@ void ModuleRtpRtcpImpl::SetRtxState(const RtpState& rtp_state) { RtpState ModuleRtpRtcpImpl::GetRtpState() const { RtpState state = rtp_sender_->packet_generator.GetRtpState(); - state.media_has_been_sent = rtp_sender_->packet_sender.MediaHasBeenSent(); return state; } @@ -290,9 +308,9 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { state.media_bytes_sent = rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes; state.send_bitrate = - rtp_sender_->packet_sender.SendBitrate().bps(); + rtp_sender_->packet_sender.GetSendRates().Sum().bps(); } - state.module = this; + state.receiver = &rtcp_receiver_; LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac, &state.remote_sr); @@ -371,6 +389,17 @@ bool ModuleRtpRtcpImpl::TrySendPacket(RtpPacketToSend* packet, return true; } +void ModuleRtpRtcpImpl::SetFecProtectionParams(const FecProtectionParams&, + const FecProtectionParams&) { + // Deferred FEC not supported in deprecated RTP module. +} + +std::vector> +ModuleRtpRtcpImpl::FetchFecPackets() { + // Deferred FEC not supported in deprecated RTP module. + return {}; +} + void ModuleRtpRtcpImpl::OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) { RTC_DCHECK(rtp_sender_); @@ -394,6 +423,20 @@ ModuleRtpRtcpImpl::GeneratePadding(size_t target_size_bytes) { target_size_bytes, rtp_sender_->packet_sender.MediaHasBeenSent()); } +std::vector +ModuleRtpRtcpImpl::GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const { + RTC_DCHECK(rtp_sender_); + return rtp_sender_->packet_sender.GetSentRtpPacketInfos(sequence_numbers); +} + +size_t ModuleRtpRtcpImpl::ExpectedPerPacketOverhead() const { + if (!rtp_sender_) { + return 0; + } + return rtp_sender_->packet_generator.ExpectedPerPacketOverhead(); +} + size_t ModuleRtpRtcpImpl::MaxRtpPacketSize() const { RTC_DCHECK(rtp_sender_); return rtp_sender_->packet_generator.MaxRtpPacketSize(); @@ -489,7 +532,8 @@ int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData( const uint32_t name, const uint8_t* data, const uint16_t length) { - return rtcp_sender_.SetApplicationSpecificData(sub_type, name, data, length); + RTC_NOTREACHED() << "Not implemented"; + return -1; } void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) { @@ -663,24 +707,6 @@ bool ModuleRtpRtcpImpl::StorePackets() const { RtpPacketHistory::StorageMode::kDisabled; } -void ModuleRtpRtcpImpl::RegisterRtcpStatisticsCallback( - RtcpStatisticsCallback* callback) { - rtcp_receiver_.RegisterRtcpStatisticsCallback(callback); -} - -RtcpStatisticsCallback* ModuleRtpRtcpImpl::GetRtcpStatisticsCallback() { - return rtcp_receiver_.GetRtcpStatisticsCallback(); -} - -void ModuleRtpRtcpImpl::RegisterRtcpCnameCallback(RtcpCnameCallback* callback) { - rtcp_receiver_.RegisterRtcpCnameCallback(callback); -} - -void ModuleRtpRtcpImpl::SetReportBlockDataObserver( - ReportBlockDataObserver* observer) { - return rtcp_receiver_.SetReportBlockDataObserver(observer); -} - void ModuleRtpRtcpImpl::SendCombinedRtcpPacket( std::vector> rtcp_packets) { rtcp_sender_.SendCombinedRtcpPacket(std::move(rtcp_packets)); @@ -701,17 +727,8 @@ void ModuleRtpRtcpImpl::SetRemoteSSRC(const uint32_t ssrc) { rtcp_receiver_.SetRemoteSSRC(ssrc); } -// TODO(nisse): Delete video_rate amd fec_rate arguments. -void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nack_rate) const { - *total_rate = rtp_sender_->packet_sender.SendBitrate().bps(); - if (video_rate) - *video_rate = 0; - if (fec_rate) - *fec_rate = 0; - *nack_rate = rtp_sender_->packet_sender.NackOverheadRate().bps(); +RtpSendRates ModuleRtpRtcpImpl::GetSendRates() const { + return rtp_sender_->packet_sender.GetSendRates(); } void ModuleRtpRtcpImpl::OnRequestSendReport() { @@ -736,7 +753,7 @@ void ModuleRtpRtcpImpl::OnReceivedNack( void ModuleRtpRtcpImpl::OnReceivedRtcpReportBlocks( const ReportBlockList& report_blocks) { - if (ack_observer_) { + if (rtp_sender_) { uint32_t ssrc = SSRC(); absl::optional rtx_ssrc; if (rtp_sender_->packet_generator.RtxStatus() != kRtxOff) { @@ -747,8 +764,6 @@ void ModuleRtpRtcpImpl::OnReceivedRtcpReportBlocks( if (ssrc == report_block.source_ssrc) { rtp_sender_->packet_generator.OnReceivedAckOnSsrc( report_block.extended_highest_sequence_number); - ack_observer_->OnReceivedAck( - report_block.extended_highest_sequence_number); } else if (rtx_ssrc && *rtx_ssrc == report_block.source_ssrc) { rtp_sender_->packet_generator.OnReceivedAckOnRtxSsrc( report_block.extended_highest_sequence_number); @@ -774,21 +789,18 @@ bool ModuleRtpRtcpImpl::LastReceivedNTP( return true; } -// Called from RTCPsender. -std::vector ModuleRtpRtcpImpl::BoundingSet(bool* tmmbr_owner) { - return rtcp_receiver_.BoundingSet(tmmbr_owner); -} - void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) { - rtc::CritScope cs(&critical_section_rtt_); - rtt_ms_ = rtt_ms; + { + MutexLock lock(&mutex_rtt_); + rtt_ms_ = rtt_ms; + } if (rtp_sender_) { rtp_sender_->packet_history.SetRtt(rtt_ms); } } int64_t ModuleRtpRtcpImpl::rtt_ms() const { - rtc::CritScope cs(&critical_section_rtt_); + MutexLock lock(&mutex_rtt_); return rtt_ms_; } @@ -807,12 +819,13 @@ const RTPSender* ModuleRtpRtcpImpl::RtpSender() const { DataRate ModuleRtpRtcpImpl::SendRate() const { RTC_DCHECK(rtp_sender_); - return rtp_sender_->packet_sender.SendBitrate(); + return rtp_sender_->packet_sender.GetSendRates().Sum(); } DataRate ModuleRtpRtcpImpl::NackOverheadRate() const { RTC_DCHECK(rtp_sender_); - return rtp_sender_->packet_sender.NackOverheadRate(); + return rtp_sender_->packet_sender + .GetSendRates()[RtpPacketMediaType::kRetransmission]; } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h index 976653a458..7f7df174fd 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -26,15 +26,15 @@ #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType +#include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_receiver.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_sender.h" -#include "modules/rtp_rtcp/source/rtp_sender_egress.h" -#include "rtc_base/critical_section.h" #include "rtc_base/gtest_prod_util.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -42,9 +42,11 @@ class Clock; struct PacedPacketInfo; struct RTPVideoHeader; +// DEPRECATED. class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { public: - explicit ModuleRtpRtcpImpl(const RtpRtcp::Configuration& configuration); + explicit ModuleRtpRtcpImpl( + const RtpRtcpInterface::Configuration& configuration); ~ModuleRtpRtcpImpl() override; // Returns the number of milliseconds until the module want a worker thread to @@ -137,12 +139,22 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { bool TrySendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info) override; + void SetFecProtectionParams(const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) override; + + std::vector> FetchFecPackets() override; + void OnPacketsAcknowledged( rtc::ArrayView sequence_numbers) override; std::vector> GeneratePadding( size_t target_size_bytes) override; + std::vector GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const override; + + size_t ExpectedPerPacketOverhead() const override; + // RTCP part. // Get RTCP status. @@ -228,14 +240,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { bool StorePackets() const override; - // Called on receipt of RTCP report block from remote side. - void RegisterRtcpStatisticsCallback( - RtcpStatisticsCallback* callback) override; - RtcpStatisticsCallback* GetRtcpStatisticsCallback() override; - void RegisterRtcpCnameCallback(RtcpCnameCallback* callback) override; - - void SetReportBlockDataObserver(ReportBlockDataObserver* observer) override; - void SendCombinedRtcpPacket( std::vector> rtcp_packets) override; @@ -260,12 +264,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { uint32_t* NTPfrac, uint32_t* remote_sr) const; - std::vector BoundingSet(bool* tmmbr_owner); - - void BitrateSent(uint32_t* total_rate, - uint32_t* video_rate, - uint32_t* fec_rate, - uint32_t* nackRate) const override; + RtpSendRates GetSendRates() const override; void OnReceivedNack( const std::vector& nack_sequence_numbers) override; @@ -295,8 +294,13 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { RTCPReceiver* rtcp_receiver() { return &rtcp_receiver_; } const RTCPReceiver* rtcp_receiver() const { return &rtcp_receiver_; } + void SetMediaHasBeenSent(bool media_has_been_sent) { + rtp_sender_->packet_sender.SetMediaHasBeenSent(media_has_been_sent); + } + Clock* clock() const { return clock_; } + // TODO(sprang): Remove when usage is gone. DataRate SendRate() const; DataRate NackOverheadRate() const; @@ -305,14 +309,14 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly); struct RtpSenderContext { - explicit RtpSenderContext(const RtpRtcp::Configuration& config); + explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config); // Storage of packets, for retransmissions and padding, if applicable. RtpPacketHistory packet_history; // Handles final time timestamping/stats/etc and handover to Transport. - RtpSenderEgress packet_sender; + DEPRECATED_RtpSenderEgress packet_sender; // If no paced sender configured, this class will be used to pass packets // from |packet_generator_| to |packet_sender_|. - RtpSenderEgress::NonPacedPacketSender non_paced_sender; + DEPRECATED_RtpSenderEgress::NonPacedPacketSender non_paced_sender; // Handles creation of RTP packets to be sent. RTPSender packet_generator; }; @@ -340,12 +344,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { RemoteBitrateEstimator* const remote_bitrate_; - RtcpAckObserver* const ack_observer_; - RtcpRttStats* const rtt_stats_; // The processed RTT from RtcpRttStats. - rtc::CriticalSection critical_section_rtt_; + mutable Mutex mutex_rtt_; int64_t rtt_ms_; }; diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc new file mode 100644 index 0000000000..88ede3d437 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -0,0 +1,745 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" + +#include + +#include +#include +#include +#include +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +#ifdef _WIN32 +// Disable warning C4355: 'this' : used in base member initializer list. +#pragma warning(disable : 4355) +#endif + +namespace webrtc { +namespace { +const int64_t kRtpRtcpMaxIdleTimeProcessMs = 5; +const int64_t kDefaultExpectedRetransmissionTimeMs = 125; + +constexpr TimeDelta kRttUpdateInterval = TimeDelta::Millis(1000); +} // namespace + +ModuleRtpRtcpImpl2::RtpSenderContext::RtpSenderContext( + const RtpRtcpInterface::Configuration& config) + : packet_history(config.clock, config.enable_rtx_padding_prioritization), + packet_sender(config, &packet_history), + non_paced_sender(&packet_sender, this), + packet_generator( + config, + &packet_history, + config.paced_sender ? config.paced_sender : &non_paced_sender) {} +void ModuleRtpRtcpImpl2::RtpSenderContext::AssignSequenceNumber( + RtpPacketToSend* packet) { + packet_generator.AssignSequenceNumber(packet); +} + +ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration) + : worker_queue_(TaskQueueBase::Current()), + rtcp_sender_(configuration), + rtcp_receiver_(configuration, this), + clock_(configuration.clock), + last_rtt_process_time_(clock_->TimeInMilliseconds()), + next_process_time_(clock_->TimeInMilliseconds() + + kRtpRtcpMaxIdleTimeProcessMs), + packet_overhead_(28), // IPV4 UDP. + nack_last_time_sent_full_ms_(0), + nack_last_seq_number_sent_(0), + remote_bitrate_(configuration.remote_bitrate_estimator), + rtt_stats_(configuration.rtt_stats), + rtt_ms_(0) { + RTC_DCHECK(worker_queue_); + process_thread_checker_.Detach(); + if (!configuration.receiver_only) { + rtp_sender_ = std::make_unique(configuration); + // Make sure rtcp sender use same timestamp offset as rtp sender. + rtcp_sender_.SetTimestampOffset( + rtp_sender_->packet_generator.TimestampOffset()); + } + + // Set default packet size limit. + // TODO(nisse): Kind-of duplicates + // webrtc::VideoSendStream::Config::Rtp::kDefaultMaxPacketSize. + const size_t kTcpOverIpv4HeaderSize = 40; + SetMaxRtpPacketSize(IP_PACKET_SIZE - kTcpOverIpv4HeaderSize); + + if (rtt_stats_) { + rtt_update_task_ = RepeatingTaskHandle::DelayedStart( + worker_queue_, kRttUpdateInterval, [this]() { + PeriodicUpdate(); + return kRttUpdateInterval; + }); + } +} + +ModuleRtpRtcpImpl2::~ModuleRtpRtcpImpl2() { + RTC_DCHECK_RUN_ON(worker_queue_); + rtt_update_task_.Stop(); +} + +// static +std::unique_ptr ModuleRtpRtcpImpl2::Create( + const Configuration& configuration) { + RTC_DCHECK(configuration.clock); + RTC_DCHECK(TaskQueueBase::Current()); + return std::make_unique(configuration); +} + +// Returns the number of milliseconds until the module want a worker thread +// to call Process. +int64_t ModuleRtpRtcpImpl2::TimeUntilNextProcess() { + RTC_DCHECK_RUN_ON(&process_thread_checker_); + return std::max(0, + next_process_time_ - clock_->TimeInMilliseconds()); +} + +// Process any pending tasks such as timeouts (non time critical events). +void ModuleRtpRtcpImpl2::Process() { + RTC_DCHECK_RUN_ON(&process_thread_checker_); + + const Timestamp now = clock_->CurrentTime(); + + // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200 + // times a second. + next_process_time_ = now.ms() + kRtpRtcpMaxIdleTimeProcessMs; + + // TODO(bugs.webrtc.org/11581): once we don't use Process() to trigger + // calls to SendRTCP(), the only remaining timer will require remote_bitrate_ + // to be not null. In that case, we can disable the timer when it is null. + if (remote_bitrate_ && rtcp_sender_.Sending() && rtcp_sender_.TMMBR()) { + unsigned int target_bitrate = 0; + std::vector ssrcs; + if (remote_bitrate_->LatestEstimate(&ssrcs, &target_bitrate)) { + if (!ssrcs.empty()) { + target_bitrate = target_bitrate / ssrcs.size(); + } + rtcp_sender_.SetTargetBitrate(target_bitrate); + } + } + + // TODO(bugs.webrtc.org/11581): Run this on a separate set of delayed tasks + // based off of next_time_to_send_rtcp_ in RTCPSender. + if (rtcp_sender_.TimeToSendRTCPReport()) + rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport); +} + +void ModuleRtpRtcpImpl2::SetRtxSendStatus(int mode) { + rtp_sender_->packet_generator.SetRtxStatus(mode); +} + +int ModuleRtpRtcpImpl2::RtxSendStatus() const { + return rtp_sender_ ? rtp_sender_->packet_generator.RtxStatus() : kRtxOff; +} + +void ModuleRtpRtcpImpl2::SetRtxSendPayloadType(int payload_type, + int associated_payload_type) { + rtp_sender_->packet_generator.SetRtxPayloadType(payload_type, + associated_payload_type); +} + +absl::optional ModuleRtpRtcpImpl2::RtxSsrc() const { + return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : absl::nullopt; +} + +absl::optional ModuleRtpRtcpImpl2::FlexfecSsrc() const { + if (rtp_sender_) { + return rtp_sender_->packet_generator.FlexfecSsrc(); + } + return absl::nullopt; +} + +void ModuleRtpRtcpImpl2::IncomingRtcpPacket(const uint8_t* rtcp_packet, + const size_t length) { + rtcp_receiver_.IncomingPacket(rtcp_packet, length); +} + +void ModuleRtpRtcpImpl2::RegisterSendPayloadFrequency(int payload_type, + int payload_frequency) { + rtcp_sender_.SetRtpClockRate(payload_type, payload_frequency); +} + +int32_t ModuleRtpRtcpImpl2::DeRegisterSendPayload(const int8_t payload_type) { + return 0; +} + +uint32_t ModuleRtpRtcpImpl2::StartTimestamp() const { + return rtp_sender_->packet_generator.TimestampOffset(); +} + +// Configure start timestamp, default is a random number. +void ModuleRtpRtcpImpl2::SetStartTimestamp(const uint32_t timestamp) { + rtcp_sender_.SetTimestampOffset(timestamp); + rtp_sender_->packet_generator.SetTimestampOffset(timestamp); + rtp_sender_->packet_sender.SetTimestampOffset(timestamp); +} + +uint16_t ModuleRtpRtcpImpl2::SequenceNumber() const { + return rtp_sender_->packet_generator.SequenceNumber(); +} + +// Set SequenceNumber, default is a random number. +void ModuleRtpRtcpImpl2::SetSequenceNumber(const uint16_t seq_num) { + rtp_sender_->packet_generator.SetSequenceNumber(seq_num); +} + +void ModuleRtpRtcpImpl2::SetRtpState(const RtpState& rtp_state) { + rtp_sender_->packet_generator.SetRtpState(rtp_state); + rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp); +} + +void ModuleRtpRtcpImpl2::SetRtxState(const RtpState& rtp_state) { + rtp_sender_->packet_generator.SetRtxRtpState(rtp_state); +} + +RtpState ModuleRtpRtcpImpl2::GetRtpState() const { + RtpState state = rtp_sender_->packet_generator.GetRtpState(); + return state; +} + +RtpState ModuleRtpRtcpImpl2::GetRtxState() const { + return rtp_sender_->packet_generator.GetRtxRtpState(); +} + +void ModuleRtpRtcpImpl2::SetRid(const std::string& rid) { + if (rtp_sender_) { + rtp_sender_->packet_generator.SetRid(rid); + } +} + +void ModuleRtpRtcpImpl2::SetMid(const std::string& mid) { + if (rtp_sender_) { + rtp_sender_->packet_generator.SetMid(mid); + } + // TODO(bugs.webrtc.org/4050): If we end up supporting the MID SDES item for + // RTCP, this will need to be passed down to the RTCPSender also. +} + +void ModuleRtpRtcpImpl2::SetCsrcs(const std::vector& csrcs) { + rtcp_sender_.SetCsrcs(csrcs); + rtp_sender_->packet_generator.SetCsrcs(csrcs); +} + +// TODO(pbos): Handle media and RTX streams separately (separate RTCP +// feedbacks). +RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { + // TODO(bugs.webrtc.org/11581): Called by potentially multiple threads. + // Mostly "Send*" methods. Make sure it's only called on the + // construction thread. + + RTCPSender::FeedbackState state; + // This is called also when receiver_only is true. Hence below + // checks that rtp_sender_ exists. + if (rtp_sender_) { + StreamDataCounters rtp_stats; + StreamDataCounters rtx_stats; + rtp_sender_->packet_sender.GetDataCounters(&rtp_stats, &rtx_stats); + state.packets_sent = + rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; + state.media_bytes_sent = rtp_stats.transmitted.payload_bytes + + rtx_stats.transmitted.payload_bytes; + state.send_bitrate = + rtp_sender_->packet_sender.GetSendRates().Sum().bps(); + } + state.receiver = &rtcp_receiver_; + + LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac, + &state.remote_sr); + + state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); + + return state; +} + +// TODO(nisse): This method shouldn't be called for a receive-only +// stream. Delete rtp_sender_ check as soon as all applications are +// updated. +int32_t ModuleRtpRtcpImpl2::SetSendingStatus(const bool sending) { + if (rtcp_sender_.Sending() != sending) { + // Sends RTCP BYE when going from true to false + if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) { + RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE"; + } + } + return 0; +} + +bool ModuleRtpRtcpImpl2::Sending() const { + return rtcp_sender_.Sending(); +} + +// TODO(nisse): This method shouldn't be called for a receive-only +// stream. Delete rtp_sender_ check as soon as all applications are +// updated. +void ModuleRtpRtcpImpl2::SetSendingMediaStatus(const bool sending) { + if (rtp_sender_) { + rtp_sender_->packet_generator.SetSendingMediaStatus(sending); + } else { + RTC_DCHECK(!sending); + } +} + +bool ModuleRtpRtcpImpl2::SendingMedia() const { + return rtp_sender_ ? rtp_sender_->packet_generator.SendingMedia() : false; +} + +bool ModuleRtpRtcpImpl2::IsAudioConfigured() const { + return rtp_sender_ ? rtp_sender_->packet_generator.IsAudioConfigured() + : false; +} + +void ModuleRtpRtcpImpl2::SetAsPartOfAllocation(bool part_of_allocation) { + RTC_CHECK(rtp_sender_); + rtp_sender_->packet_sender.ForceIncludeSendPacketsInAllocation( + part_of_allocation); +} + +bool ModuleRtpRtcpImpl2::OnSendingRtpFrame(uint32_t timestamp, + int64_t capture_time_ms, + int payload_type, + bool force_sender_report) { + if (!Sending()) + return false; + + rtcp_sender_.SetLastRtpTime(timestamp, capture_time_ms, payload_type); + // Make sure an RTCP report isn't queued behind a key frame. + if (rtcp_sender_.TimeToSendRTCPReport(force_sender_report)) + rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport); + + return true; +} + +bool ModuleRtpRtcpImpl2::TrySendPacket(RtpPacketToSend* packet, + const PacedPacketInfo& pacing_info) { + RTC_DCHECK(rtp_sender_); + // TODO(sprang): Consider if we can remove this check. + if (!rtp_sender_->packet_generator.SendingMedia()) { + return false; + } + rtp_sender_->packet_sender.SendPacket(packet, pacing_info); + return true; +} + +void ModuleRtpRtcpImpl2::SetFecProtectionParams( + const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) { + RTC_DCHECK(rtp_sender_); + rtp_sender_->packet_sender.SetFecProtectionParameters(delta_params, + key_params); +} + +std::vector> +ModuleRtpRtcpImpl2::FetchFecPackets() { + RTC_DCHECK(rtp_sender_); + auto fec_packets = rtp_sender_->packet_sender.FetchFecPackets(); + if (!fec_packets.empty()) { + // Don't assign sequence numbers for FlexFEC packets. + const bool generate_sequence_numbers = + !rtp_sender_->packet_sender.FlexFecSsrc().has_value(); + if (generate_sequence_numbers) { + for (auto& fec_packet : fec_packets) { + rtp_sender_->packet_generator.AssignSequenceNumber(fec_packet.get()); + } + } + } + return fec_packets; +} + +void ModuleRtpRtcpImpl2::OnPacketsAcknowledged( + rtc::ArrayView sequence_numbers) { + RTC_DCHECK(rtp_sender_); + rtp_sender_->packet_history.CullAcknowledgedPackets(sequence_numbers); +} + +bool ModuleRtpRtcpImpl2::SupportsPadding() const { + RTC_DCHECK(rtp_sender_); + return rtp_sender_->packet_generator.SupportsPadding(); +} + +bool ModuleRtpRtcpImpl2::SupportsRtxPayloadPadding() const { + RTC_DCHECK(rtp_sender_); + return rtp_sender_->packet_generator.SupportsRtxPayloadPadding(); +} + +std::vector> +ModuleRtpRtcpImpl2::GeneratePadding(size_t target_size_bytes) { + RTC_DCHECK(rtp_sender_); + return rtp_sender_->packet_generator.GeneratePadding( + target_size_bytes, rtp_sender_->packet_sender.MediaHasBeenSent()); +} + +std::vector +ModuleRtpRtcpImpl2::GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const { + RTC_DCHECK(rtp_sender_); + return rtp_sender_->packet_sender.GetSentRtpPacketInfos(sequence_numbers); +} + +size_t ModuleRtpRtcpImpl2::ExpectedPerPacketOverhead() const { + if (!rtp_sender_) { + return 0; + } + return rtp_sender_->packet_generator.ExpectedPerPacketOverhead(); +} + +size_t ModuleRtpRtcpImpl2::MaxRtpPacketSize() const { + RTC_DCHECK(rtp_sender_); + return rtp_sender_->packet_generator.MaxRtpPacketSize(); +} + +void ModuleRtpRtcpImpl2::SetMaxRtpPacketSize(size_t rtp_packet_size) { + RTC_DCHECK_LE(rtp_packet_size, IP_PACKET_SIZE) + << "rtp packet size too large: " << rtp_packet_size; + RTC_DCHECK_GT(rtp_packet_size, packet_overhead_) + << "rtp packet size too small: " << rtp_packet_size; + + rtcp_sender_.SetMaxRtpPacketSize(rtp_packet_size); + if (rtp_sender_) { + rtp_sender_->packet_generator.SetMaxRtpPacketSize(rtp_packet_size); + } +} + +RtcpMode ModuleRtpRtcpImpl2::RTCP() const { + return rtcp_sender_.Status(); +} + +// Configure RTCP status i.e on/off. +void ModuleRtpRtcpImpl2::SetRTCPStatus(const RtcpMode method) { + rtcp_sender_.SetRTCPStatus(method); +} + +int32_t ModuleRtpRtcpImpl2::SetCNAME(const char* c_name) { + return rtcp_sender_.SetCNAME(c_name); +} + +int32_t ModuleRtpRtcpImpl2::RemoteNTP(uint32_t* received_ntpsecs, + uint32_t* received_ntpfrac, + uint32_t* rtcp_arrival_time_secs, + uint32_t* rtcp_arrival_time_frac, + uint32_t* rtcp_timestamp) const { + return rtcp_receiver_.NTP(received_ntpsecs, received_ntpfrac, + rtcp_arrival_time_secs, rtcp_arrival_time_frac, + rtcp_timestamp) + ? 0 + : -1; +} + +// TODO(tommi): Check if |avg_rtt_ms|, |min_rtt_ms|, |max_rtt_ms| params are +// actually used in practice (some callers ask for it but don't use it). It +// could be that only |rtt| is needed and if so, then the fast path could be to +// just call rtt_ms() and rely on the calculation being done periodically. +int32_t ModuleRtpRtcpImpl2::RTT(const uint32_t remote_ssrc, + int64_t* rtt, + int64_t* avg_rtt, + int64_t* min_rtt, + int64_t* max_rtt) const { + int32_t ret = rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt); + if (rtt && *rtt == 0) { + // Try to get RTT from RtcpRttStats class. + *rtt = rtt_ms(); + } + return ret; +} + +int64_t ModuleRtpRtcpImpl2::ExpectedRetransmissionTimeMs() const { + int64_t expected_retransmission_time_ms = rtt_ms(); + if (expected_retransmission_time_ms > 0) { + return expected_retransmission_time_ms; + } + // No rtt available (|kRttUpdateInterval| not yet passed?), so try to + // poll avg_rtt_ms directly from rtcp receiver. + if (rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), nullptr, + &expected_retransmission_time_ms, nullptr, + nullptr) == 0) { + return expected_retransmission_time_ms; + } + return kDefaultExpectedRetransmissionTimeMs; +} + +// Force a send of an RTCP packet. +// Normal SR and RR are triggered via the process function. +int32_t ModuleRtpRtcpImpl2::SendRTCP(RTCPPacketType packet_type) { + return rtcp_sender_.SendRTCP(GetFeedbackState(), packet_type); +} + +void ModuleRtpRtcpImpl2::SetRtcpXrRrtrStatus(bool enable) { + rtcp_receiver_.SetRtcpXrRrtrStatus(enable); + rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable); +} + +bool ModuleRtpRtcpImpl2::RtcpXrRrtrStatus() const { + return rtcp_sender_.RtcpXrReceiverReferenceTime(); +} + +void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( + StreamDataCounters* rtp_counters, + StreamDataCounters* rtx_counters) const { + rtp_sender_->packet_sender.GetDataCounters(rtp_counters, rtx_counters); +} + +// Received RTCP report. +int32_t ModuleRtpRtcpImpl2::RemoteRTCPStat( + std::vector* receive_blocks) const { + return rtcp_receiver_.StatisticsReceived(receive_blocks); +} + +std::vector ModuleRtpRtcpImpl2::GetLatestReportBlockData() + const { + return rtcp_receiver_.GetLatestReportBlockData(); +} + +// (REMB) Receiver Estimated Max Bitrate. +void ModuleRtpRtcpImpl2::SetRemb(int64_t bitrate_bps, + std::vector ssrcs) { + rtcp_sender_.SetRemb(bitrate_bps, std::move(ssrcs)); +} + +void ModuleRtpRtcpImpl2::UnsetRemb() { + rtcp_sender_.UnsetRemb(); +} + +void ModuleRtpRtcpImpl2::SetExtmapAllowMixed(bool extmap_allow_mixed) { + rtp_sender_->packet_generator.SetExtmapAllowMixed(extmap_allow_mixed); +} + +void ModuleRtpRtcpImpl2::RegisterRtpHeaderExtension(absl::string_view uri, + int id) { + bool registered = + rtp_sender_->packet_generator.RegisterRtpHeaderExtension(uri, id); + RTC_CHECK(registered); +} + +int32_t ModuleRtpRtcpImpl2::DeregisterSendRtpHeaderExtension( + const RTPExtensionType type) { + return rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(type); +} +void ModuleRtpRtcpImpl2::DeregisterSendRtpHeaderExtension( + absl::string_view uri) { + rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(uri); +} + +void ModuleRtpRtcpImpl2::SetTmmbn(std::vector bounding_set) { + rtcp_sender_.SetTmmbn(std::move(bounding_set)); +} + +// Send a Negative acknowledgment packet. +int32_t ModuleRtpRtcpImpl2::SendNACK(const uint16_t* nack_list, + const uint16_t size) { + uint16_t nack_length = size; + uint16_t start_id = 0; + int64_t now_ms = clock_->TimeInMilliseconds(); + if (TimeToSendFullNackList(now_ms)) { + nack_last_time_sent_full_ms_ = now_ms; + } else { + // Only send extended list. + if (nack_last_seq_number_sent_ == nack_list[size - 1]) { + // Last sequence number is the same, do not send list. + return 0; + } + // Send new sequence numbers. + for (int i = 0; i < size; ++i) { + if (nack_last_seq_number_sent_ == nack_list[i]) { + start_id = i + 1; + break; + } + } + nack_length = size - start_id; + } + + // Our RTCP NACK implementation is limited to kRtcpMaxNackFields sequence + // numbers per RTCP packet. + if (nack_length > kRtcpMaxNackFields) { + nack_length = kRtcpMaxNackFields; + } + nack_last_seq_number_sent_ = nack_list[start_id + nack_length - 1]; + + return rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpNack, nack_length, + &nack_list[start_id]); +} + +void ModuleRtpRtcpImpl2::SendNack( + const std::vector& sequence_numbers) { + rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpNack, sequence_numbers.size(), + sequence_numbers.data()); +} + +bool ModuleRtpRtcpImpl2::TimeToSendFullNackList(int64_t now) const { + // Use RTT from RtcpRttStats class if provided. + int64_t rtt = rtt_ms(); + if (rtt == 0) { + rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL); + } + + const int64_t kStartUpRttMs = 100; + int64_t wait_time = 5 + ((rtt * 3) >> 1); // 5 + RTT * 1.5. + if (rtt == 0) { + wait_time = kStartUpRttMs; + } + + // Send a full NACK list once within every |wait_time|. + return now - nack_last_time_sent_full_ms_ > wait_time; +} + +// Store the sent packets, needed to answer to Negative acknowledgment requests. +void ModuleRtpRtcpImpl2::SetStorePacketsStatus(const bool enable, + const uint16_t number_to_store) { + rtp_sender_->packet_history.SetStorePacketsStatus( + enable ? RtpPacketHistory::StorageMode::kStoreAndCull + : RtpPacketHistory::StorageMode::kDisabled, + number_to_store); +} + +bool ModuleRtpRtcpImpl2::StorePackets() const { + return rtp_sender_->packet_history.GetStorageMode() != + RtpPacketHistory::StorageMode::kDisabled; +} + +void ModuleRtpRtcpImpl2::SendCombinedRtcpPacket( + std::vector> rtcp_packets) { + rtcp_sender_.SendCombinedRtcpPacket(std::move(rtcp_packets)); +} + +int32_t ModuleRtpRtcpImpl2::SendLossNotification(uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) { + return rtcp_sender_.SendLossNotification( + GetFeedbackState(), last_decoded_seq_num, last_received_seq_num, + decodability_flag, buffering_allowed); +} + +void ModuleRtpRtcpImpl2::SetRemoteSSRC(const uint32_t ssrc) { + // Inform about the incoming SSRC. + rtcp_sender_.SetRemoteSSRC(ssrc); + rtcp_receiver_.SetRemoteSSRC(ssrc); +} + +RtpSendRates ModuleRtpRtcpImpl2::GetSendRates() const { + RTC_DCHECK_RUN_ON(worker_queue_); + return rtp_sender_->packet_sender.GetSendRates(); +} + +void ModuleRtpRtcpImpl2::OnRequestSendReport() { + SendRTCP(kRtcpSr); +} + +void ModuleRtpRtcpImpl2::OnReceivedNack( + const std::vector& nack_sequence_numbers) { + if (!rtp_sender_) + return; + + if (!StorePackets() || nack_sequence_numbers.empty()) { + return; + } + // Use RTT from RtcpRttStats class if provided. + int64_t rtt = rtt_ms(); + if (rtt == 0) { + rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL); + } + rtp_sender_->packet_generator.OnReceivedNack(nack_sequence_numbers, rtt); +} + +void ModuleRtpRtcpImpl2::OnReceivedRtcpReportBlocks( + const ReportBlockList& report_blocks) { + if (rtp_sender_) { + uint32_t ssrc = SSRC(); + absl::optional rtx_ssrc; + if (rtp_sender_->packet_generator.RtxStatus() != kRtxOff) { + rtx_ssrc = rtp_sender_->packet_generator.RtxSsrc(); + } + + for (const RTCPReportBlock& report_block : report_blocks) { + if (ssrc == report_block.source_ssrc) { + rtp_sender_->packet_generator.OnReceivedAckOnSsrc( + report_block.extended_highest_sequence_number); + } else if (rtx_ssrc && *rtx_ssrc == report_block.source_ssrc) { + rtp_sender_->packet_generator.OnReceivedAckOnRtxSsrc( + report_block.extended_highest_sequence_number); + } + } + } +} + +bool ModuleRtpRtcpImpl2::LastReceivedNTP( + uint32_t* rtcp_arrival_time_secs, // When we got the last report. + uint32_t* rtcp_arrival_time_frac, + uint32_t* remote_sr) const { + // Remote SR: NTP inside the last received (mid 16 bits from sec and frac). + uint32_t ntp_secs = 0; + uint32_t ntp_frac = 0; + + if (!rtcp_receiver_.NTP(&ntp_secs, &ntp_frac, rtcp_arrival_time_secs, + rtcp_arrival_time_frac, NULL)) { + return false; + } + *remote_sr = + ((ntp_secs & 0x0000ffff) << 16) + ((ntp_frac & 0xffff0000) >> 16); + return true; +} + +void ModuleRtpRtcpImpl2::set_rtt_ms(int64_t rtt_ms) { + RTC_DCHECK_RUN_ON(worker_queue_); + { + MutexLock lock(&mutex_rtt_); + rtt_ms_ = rtt_ms; + } + if (rtp_sender_) { + rtp_sender_->packet_history.SetRtt(rtt_ms); + } +} + +int64_t ModuleRtpRtcpImpl2::rtt_ms() const { + MutexLock lock(&mutex_rtt_); + return rtt_ms_; +} + +void ModuleRtpRtcpImpl2::SetVideoBitrateAllocation( + const VideoBitrateAllocation& bitrate) { + rtcp_sender_.SetVideoBitrateAllocation(bitrate); +} + +RTPSender* ModuleRtpRtcpImpl2::RtpSender() { + return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr; +} + +const RTPSender* ModuleRtpRtcpImpl2::RtpSender() const { + return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr; +} + +void ModuleRtpRtcpImpl2::PeriodicUpdate() { + RTC_DCHECK_RUN_ON(worker_queue_); + + Timestamp check_since = clock_->CurrentTime() - kRttUpdateInterval; + absl::optional rtt = + rtcp_receiver_.OnPeriodicRttUpdate(check_since, rtcp_sender_.Sending()); + if (rtt) { + rtt_stats_->OnRttUpdate(rtt->ms()); + set_rtt_ms(rtt->ms()); + } + + // kTmmbrTimeoutIntervalMs is 25 seconds, so an order of seconds. + // Instead of this polling approach, consider having an optional timer in the + // RTCPReceiver class that is started/stopped based on the state of + // rtcp_sender_.TMMBR(). + if (rtcp_sender_.TMMBR() && rtcp_receiver_.UpdateTmmbrTimers()) + rtcp_receiver_.NotifyTmmbrUpdated(); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h new file mode 100644 index 0000000000..9eb7e3a6d6 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -0,0 +1,324 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_ + +#include +#include + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/rtp_headers.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_bitrate_allocation.h" +#include "modules/include/module_fec_types.h" +#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" +#include "modules/rtp_rtcp/source/rtcp_receiver.h" +#include "modules/rtp_rtcp/source/rtcp_sender.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/rtp_rtcp/source/rtp_sender_egress.h" +#include "rtc_base/gtest_prod_util.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" + +namespace webrtc { + +class Clock; +struct PacedPacketInfo; +struct RTPVideoHeader; + +class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, + public Module, + public RTCPReceiver::ModuleRtpRtcp { + public: + explicit ModuleRtpRtcpImpl2( + const RtpRtcpInterface::Configuration& configuration); + ~ModuleRtpRtcpImpl2() override; + + // This method is provided to easy with migrating away from the + // RtpRtcp::Create factory method. Since this is an internal implementation + // detail though, creating an instance of ModuleRtpRtcpImpl2 directly should + // be fine. + static std::unique_ptr Create( + const Configuration& configuration); + + // Returns the number of milliseconds until the module want a worker thread to + // call Process. + int64_t TimeUntilNextProcess() override; + + // Process any pending tasks such as timeouts. + void Process() override; + + // Receiver part. + + // Called when we receive an RTCP packet. + void IncomingRtcpPacket(const uint8_t* incoming_packet, + size_t incoming_packet_length) override; + + void SetRemoteSSRC(uint32_t ssrc) override; + + // Sender part. + void RegisterSendPayloadFrequency(int payload_type, + int payload_frequency) override; + + int32_t DeRegisterSendPayload(int8_t payload_type) override; + + void SetExtmapAllowMixed(bool extmap_allow_mixed) override; + + void RegisterRtpHeaderExtension(absl::string_view uri, int id) override; + int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) override; + void DeregisterSendRtpHeaderExtension(absl::string_view uri) override; + + bool SupportsPadding() const override; + bool SupportsRtxPayloadPadding() const override; + + // Get start timestamp. + uint32_t StartTimestamp() const override; + + // Configure start timestamp, default is a random number. + void SetStartTimestamp(uint32_t timestamp) override; + + uint16_t SequenceNumber() const override; + + // Set SequenceNumber, default is a random number. + void SetSequenceNumber(uint16_t seq) override; + + void SetRtpState(const RtpState& rtp_state) override; + void SetRtxState(const RtpState& rtp_state) override; + RtpState GetRtpState() const override; + RtpState GetRtxState() const override; + + uint32_t SSRC() const override { return rtcp_sender_.SSRC(); } + + void SetRid(const std::string& rid) override; + + void SetMid(const std::string& mid) override; + + void SetCsrcs(const std::vector& csrcs) override; + + RTCPSender::FeedbackState GetFeedbackState(); + + void SetRtxSendStatus(int mode) override; + int RtxSendStatus() const override; + absl::optional RtxSsrc() const override; + + void SetRtxSendPayloadType(int payload_type, + int associated_payload_type) override; + + absl::optional FlexfecSsrc() const override; + + // Sends kRtcpByeCode when going from true to false. + int32_t SetSendingStatus(bool sending) override; + + bool Sending() const override; + + // Drops or relays media packets. + void SetSendingMediaStatus(bool sending) override; + + bool SendingMedia() const override; + + bool IsAudioConfigured() const override; + + void SetAsPartOfAllocation(bool part_of_allocation) override; + + bool OnSendingRtpFrame(uint32_t timestamp, + int64_t capture_time_ms, + int payload_type, + bool force_sender_report) override; + + bool TrySendPacket(RtpPacketToSend* packet, + const PacedPacketInfo& pacing_info) override; + + void SetFecProtectionParams(const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) override; + + std::vector> FetchFecPackets() override; + + void OnPacketsAcknowledged( + rtc::ArrayView sequence_numbers) override; + + std::vector> GeneratePadding( + size_t target_size_bytes) override; + + std::vector GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const override; + + size_t ExpectedPerPacketOverhead() const override; + + // RTCP part. + + // Get RTCP status. + RtcpMode RTCP() const override; + + // Configure RTCP status i.e on/off. + void SetRTCPStatus(RtcpMode method) override; + + // Set RTCP CName. + int32_t SetCNAME(const char* c_name) override; + + // Get remote NTP. + int32_t RemoteNTP(uint32_t* received_ntp_secs, + uint32_t* received_ntp_frac, + uint32_t* rtcp_arrival_time_secs, + uint32_t* rtcp_arrival_time_frac, + uint32_t* rtcp_timestamp) const override; + + // Get RoundTripTime. + int32_t RTT(uint32_t remote_ssrc, + int64_t* rtt, + int64_t* avg_rtt, + int64_t* min_rtt, + int64_t* max_rtt) const override; + + int64_t ExpectedRetransmissionTimeMs() const override; + + // Force a send of an RTCP packet. + // Normal SR and RR are triggered via the process function. + int32_t SendRTCP(RTCPPacketType rtcpPacketType) override; + + void GetSendStreamDataCounters( + StreamDataCounters* rtp_counters, + StreamDataCounters* rtx_counters) const override; + + // Get received RTCP report, report block. + int32_t RemoteRTCPStat( + std::vector* receive_blocks) const override; + // A snapshot of the most recent Report Block with additional data of + // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. + // Within this list, the ReportBlockData::RTCPReportBlock::source_ssrc(), + // which is the SSRC of the corresponding outbound RTP stream, is unique. + std::vector GetLatestReportBlockData() const override; + + // (REMB) Receiver Estimated Max Bitrate. + void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override; + void UnsetRemb() override; + + void SetTmmbn(std::vector bounding_set) override; + + size_t MaxRtpPacketSize() const override; + + void SetMaxRtpPacketSize(size_t max_packet_size) override; + + // (NACK) Negative acknowledgment part. + + // Send a Negative acknowledgment packet. + // TODO(philipel): Deprecate SendNACK and use SendNack instead. + int32_t SendNACK(const uint16_t* nack_list, uint16_t size) override; + + void SendNack(const std::vector& sequence_numbers) override; + + // Store the sent packets, needed to answer to a negative acknowledgment + // requests. + void SetStorePacketsStatus(bool enable, uint16_t number_to_store) override; + + bool StorePackets() const override; + + void SendCombinedRtcpPacket( + std::vector> rtcp_packets) override; + + // (XR) Receiver reference time report. + void SetRtcpXrRrtrStatus(bool enable) override; + + bool RtcpXrRrtrStatus() const override; + + // Video part. + int32_t SendLossNotification(uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) override; + + bool LastReceivedNTP(uint32_t* NTPsecs, + uint32_t* NTPfrac, + uint32_t* remote_sr) const; + + RtpSendRates GetSendRates() const override; + + void OnReceivedNack( + const std::vector& nack_sequence_numbers) override; + void OnReceivedRtcpReportBlocks( + const ReportBlockList& report_blocks) override; + void OnRequestSendReport() override; + + void SetVideoBitrateAllocation( + const VideoBitrateAllocation& bitrate) override; + + RTPSender* RtpSender() override; + const RTPSender* RtpSender() const override; + + private: + FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, Rtt); + FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, RttForReceiverOnly); + + struct RtpSenderContext : public SequenceNumberAssigner { + explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config); + void AssignSequenceNumber(RtpPacketToSend* packet) override; + // Storage of packets, for retransmissions and padding, if applicable. + RtpPacketHistory packet_history; + // Handles final time timestamping/stats/etc and handover to Transport. + RtpSenderEgress packet_sender; + // If no paced sender configured, this class will be used to pass packets + // from |packet_generator_| to |packet_sender_|. + RtpSenderEgress::NonPacedPacketSender non_paced_sender; + // Handles creation of RTP packets to be sent. + RTPSender packet_generator; + }; + + void set_rtt_ms(int64_t rtt_ms); + int64_t rtt_ms() const; + + bool TimeToSendFullNackList(int64_t now) const; + + // Called on a timer, once a second, on the worker_queue_, to update the RTT, + // check if we need to send RTCP report, send TMMBR updates and fire events. + void PeriodicUpdate(); + + TaskQueueBase* const worker_queue_; + SequenceChecker process_thread_checker_; + + std::unique_ptr rtp_sender_; + + RTCPSender rtcp_sender_; + RTCPReceiver rtcp_receiver_; + + Clock* const clock_; + + int64_t last_rtt_process_time_; + int64_t next_process_time_; + uint16_t packet_overhead_; + + // Send side + int64_t nack_last_time_sent_full_ms_; + uint16_t nack_last_seq_number_sent_; + + RemoteBitrateEstimator* const remote_bitrate_; + + RtcpRttStats* const rtt_stats_; + RepeatingTaskHandle rtt_update_task_ RTC_GUARDED_BY(worker_queue_); + + // The processed RTT from RtcpRttStats. + mutable Mutex mutex_rtt_; + int64_t rtt_ms_; +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_ diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc new file mode 100644 index 0000000000..9925c541c3 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc @@ -0,0 +1,641 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" + +#include +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_sender_video.h" +#include "rtc_base/rate_limiter.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/rtcp_packet_parser.h" +#include "test/rtp_header_parser.h" +#include "test/run_loop.h" +#include "test/time_controller/simulated_time_controller.h" + +using ::testing::ElementsAre; + +namespace webrtc { +namespace { +const uint32_t kSenderSsrc = 0x12345; +const uint32_t kReceiverSsrc = 0x23456; +const int64_t kOneWayNetworkDelayMs = 100; +const uint8_t kBaseLayerTid = 0; +const uint8_t kHigherLayerTid = 1; +const uint16_t kSequenceNumber = 100; +const uint8_t kPayloadType = 100; +const int kWidth = 320; +const int kHeight = 100; + +class RtcpRttStatsTestImpl : public RtcpRttStats { + public: + RtcpRttStatsTestImpl() : rtt_ms_(0) {} + ~RtcpRttStatsTestImpl() override = default; + + void OnRttUpdate(int64_t rtt_ms) override { rtt_ms_ = rtt_ms; } + int64_t LastProcessedRtt() const override { return rtt_ms_; } + int64_t rtt_ms_; +}; + +class SendTransport : public Transport { + public: + SendTransport() + : receiver_(nullptr), + time_controller_(nullptr), + delay_ms_(0), + rtp_packets_sent_(0), + rtcp_packets_sent_(0) {} + + void SetRtpRtcpModule(ModuleRtpRtcpImpl2* receiver) { receiver_ = receiver; } + void SimulateNetworkDelay(int64_t delay_ms, TimeController* time_controller) { + time_controller_ = time_controller; + delay_ms_ = delay_ms; + } + bool SendRtp(const uint8_t* data, + size_t len, + const PacketOptions& options) override { + RTPHeader header; + std::unique_ptr parser(RtpHeaderParser::CreateForTest()); + EXPECT_TRUE(parser->Parse(static_cast(data), len, &header)); + ++rtp_packets_sent_; + last_rtp_header_ = header; + return true; + } + bool SendRtcp(const uint8_t* data, size_t len) override { + test::RtcpPacketParser parser; + parser.Parse(data, len); + last_nack_list_ = parser.nack()->packet_ids(); + + if (time_controller_) { + time_controller_->AdvanceTime(TimeDelta::Millis(delay_ms_)); + } + EXPECT_TRUE(receiver_); + receiver_->IncomingRtcpPacket(data, len); + ++rtcp_packets_sent_; + return true; + } + + size_t NumRtcpSent() { return rtcp_packets_sent_; } + + ModuleRtpRtcpImpl2* receiver_; + TimeController* time_controller_; + int64_t delay_ms_; + int rtp_packets_sent_; + size_t rtcp_packets_sent_; + RTPHeader last_rtp_header_; + std::vector last_nack_list_; +}; + +class RtpRtcpModule : public RtcpPacketTypeCounterObserver { + public: + RtpRtcpModule(TimeController* time_controller, bool is_sender) + : is_sender_(is_sender), + receive_statistics_( + ReceiveStatistics::Create(time_controller->GetClock())), + time_controller_(time_controller) { + CreateModuleImpl(); + transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, time_controller); + } + + const bool is_sender_; + RtcpPacketTypeCounter packets_sent_; + RtcpPacketTypeCounter packets_received_; + std::unique_ptr receive_statistics_; + SendTransport transport_; + RtcpRttStatsTestImpl rtt_stats_; + std::unique_ptr impl_; + int rtcp_report_interval_ms_ = 0; + + void RtcpPacketTypesCounterUpdated( + uint32_t ssrc, + const RtcpPacketTypeCounter& packet_counter) override { + counter_map_[ssrc] = packet_counter; + } + + RtcpPacketTypeCounter RtcpSent() { + // RTCP counters for remote SSRC. + return counter_map_[is_sender_ ? kReceiverSsrc : kSenderSsrc]; + } + + RtcpPacketTypeCounter RtcpReceived() { + // Received RTCP stats for (own) local SSRC. + return counter_map_[impl_->SSRC()]; + } + int RtpSent() { return transport_.rtp_packets_sent_; } + uint16_t LastRtpSequenceNumber() { + return transport_.last_rtp_header_.sequenceNumber; + } + std::vector LastNackListSent() { + return transport_.last_nack_list_; + } + void SetRtcpReportIntervalAndReset(int rtcp_report_interval_ms) { + rtcp_report_interval_ms_ = rtcp_report_interval_ms; + CreateModuleImpl(); + } + + private: + void CreateModuleImpl() { + RtpRtcpInterface::Configuration config; + config.audio = false; + config.clock = time_controller_->GetClock(); + config.outgoing_transport = &transport_; + config.receive_statistics = receive_statistics_.get(); + config.rtcp_packet_type_counter_observer = this; + config.rtt_stats = &rtt_stats_; + config.rtcp_report_interval_ms = rtcp_report_interval_ms_; + config.local_media_ssrc = is_sender_ ? kSenderSsrc : kReceiverSsrc; + config.need_rtp_packet_infos = true; + + impl_.reset(new ModuleRtpRtcpImpl2(config)); + impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc); + impl_->SetRTCPStatus(RtcpMode::kCompound); + } + + TimeController* const time_controller_; + std::map counter_map_; +}; +} // namespace + +class RtpRtcpImpl2Test : public ::testing::Test { + protected: + RtpRtcpImpl2Test() + : time_controller_(Timestamp::Micros(133590000000000)), + sender_(&time_controller_, /*is_sender=*/true), + receiver_(&time_controller_, /*is_sender=*/false) {} + + void SetUp() override { + // Send module. + EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true)); + sender_.impl_->SetSendingMediaStatus(true); + sender_.impl_->SetSequenceNumber(kSequenceNumber); + sender_.impl_->SetStorePacketsStatus(true, 100); + + FieldTrialBasedConfig field_trials; + RTPSenderVideo::Config video_config; + video_config.clock = time_controller_.GetClock(); + video_config.rtp_sender = sender_.impl_->RtpSender(); + video_config.field_trials = &field_trials; + sender_video_ = std::make_unique(video_config); + + // Receive module. + EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false)); + receiver_.impl_->SetSendingMediaStatus(false); + // Transport settings. + sender_.transport_.SetRtpRtcpModule(receiver_.impl_.get()); + receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get()); + } + + void AdvanceTimeMs(int64_t milliseconds) { + time_controller_.AdvanceTime(TimeDelta::Millis(milliseconds)); + } + + GlobalSimulatedTimeController time_controller_; + // test::RunLoop loop_; + // SimulatedClock clock_; + RtpRtcpModule sender_; + std::unique_ptr sender_video_; + RtpRtcpModule receiver_; + + void SendFrame(const RtpRtcpModule* module, + RTPSenderVideo* sender, + uint8_t tid) { + RTPVideoHeaderVP8 vp8_header = {}; + vp8_header.temporalIdx = tid; + RTPVideoHeader rtp_video_header; + rtp_video_header.frame_type = VideoFrameType::kVideoFrameKey; + rtp_video_header.width = kWidth; + rtp_video_header.height = kHeight; + rtp_video_header.rotation = kVideoRotation_0; + rtp_video_header.content_type = VideoContentType::UNSPECIFIED; + rtp_video_header.playout_delay = {-1, -1}; + rtp_video_header.is_first_packet_in_frame = true; + rtp_video_header.simulcastIdx = 0; + rtp_video_header.codec = kVideoCodecVP8; + rtp_video_header.video_type_header = vp8_header; + rtp_video_header.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false}; + + const uint8_t payload[100] = {0}; + EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true)); + EXPECT_TRUE(sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8, + 0, 0, payload, rtp_video_header, 0)); + } + + void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) { + bool sender = module->impl_->SSRC() == kSenderSsrc; + rtcp::Nack nack; + uint16_t list[1]; + list[0] = sequence_number; + const uint16_t kListLength = sizeof(list) / sizeof(list[0]); + nack.SetSenderSsrc(sender ? kReceiverSsrc : kSenderSsrc); + nack.SetMediaSsrc(sender ? kSenderSsrc : kReceiverSsrc); + nack.SetPacketIds(list, kListLength); + rtc::Buffer packet = nack.Build(); + module->impl_->IncomingRtcpPacket(packet.data(), packet.size()); + } +}; + +TEST_F(RtpRtcpImpl2Test, RetransmitsAllLayers) { + // Send frames. + EXPECT_EQ(0, sender_.RtpSent()); + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); // kSequenceNumber + SendFrame(&sender_, sender_video_.get(), + kHigherLayerTid); // kSequenceNumber + 1 + SendFrame(&sender_, sender_video_.get(), + kNoTemporalIdx); // kSequenceNumber + 2 + EXPECT_EQ(3, sender_.RtpSent()); + EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber()); + + // Min required delay until retransmit = 5 + RTT ms (RTT = 0). + AdvanceTimeMs(5); + + // Frame with kBaseLayerTid re-sent. + IncomingRtcpNack(&sender_, kSequenceNumber); + EXPECT_EQ(4, sender_.RtpSent()); + EXPECT_EQ(kSequenceNumber, sender_.LastRtpSequenceNumber()); + // Frame with kHigherLayerTid re-sent. + IncomingRtcpNack(&sender_, kSequenceNumber + 1); + EXPECT_EQ(5, sender_.RtpSent()); + EXPECT_EQ(kSequenceNumber + 1, sender_.LastRtpSequenceNumber()); + // Frame with kNoTemporalIdx re-sent. + IncomingRtcpNack(&sender_, kSequenceNumber + 2); + EXPECT_EQ(6, sender_.RtpSent()); + EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber()); +} + +TEST_F(RtpRtcpImpl2Test, Rtt) { + RtpPacketReceived packet; + packet.SetTimestamp(1); + packet.SetSequenceNumber(123); + packet.SetSsrc(kSenderSsrc); + packet.AllocatePayload(100 - 12); + receiver_.receive_statistics_->OnRtpPacket(packet); + + // Send Frame before sending an SR. + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + // Sender module should send an SR. + EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport)); + + // Receiver module should send a RR with a response to the last received SR. + AdvanceTimeMs(1000); + EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport)); + + // Verify RTT. + int64_t rtt; + int64_t avg_rtt; + int64_t min_rtt; + int64_t max_rtt; + EXPECT_EQ( + 0, sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt)); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, rtt, 1); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, avg_rtt, 1); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, min_rtt, 1); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, max_rtt, 1); + + // No RTT from other ssrc. + EXPECT_EQ(-1, sender_.impl_->RTT(kReceiverSsrc + 1, &rtt, &avg_rtt, &min_rtt, + &max_rtt)); + + // Verify RTT from rtt_stats config. + EXPECT_EQ(0, sender_.rtt_stats_.LastProcessedRtt()); + EXPECT_EQ(0, sender_.impl_->rtt_ms()); + AdvanceTimeMs(1000); + + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt(), + 1); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms(), 1); +} + +TEST_F(RtpRtcpImpl2Test, SetRtcpXrRrtrStatus) { + EXPECT_FALSE(receiver_.impl_->RtcpXrRrtrStatus()); + receiver_.impl_->SetRtcpXrRrtrStatus(true); + EXPECT_TRUE(receiver_.impl_->RtcpXrRrtrStatus()); +} + +TEST_F(RtpRtcpImpl2Test, RttForReceiverOnly) { + receiver_.impl_->SetRtcpXrRrtrStatus(true); + + // Receiver module should send a Receiver time reference report (RTRR). + EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport)); + + // Sender module should send a response to the last received RTRR (DLRR). + AdvanceTimeMs(1000); + // Send Frame before sending a SR. + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport)); + + // Verify RTT. + EXPECT_EQ(0, receiver_.rtt_stats_.LastProcessedRtt()); + EXPECT_EQ(0, receiver_.impl_->rtt_ms()); + AdvanceTimeMs(1000); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, + receiver_.rtt_stats_.LastProcessedRtt(), 1); + EXPECT_NEAR(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms(), 1); +} + +TEST_F(RtpRtcpImpl2Test, NoSrBeforeMedia) { + // Ignore fake transport delays in this test. + sender_.transport_.SimulateNetworkDelay(0, &time_controller_); + receiver_.transport_.SimulateNetworkDelay(0, &time_controller_); + + sender_.impl_->Process(); + EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms); + + // Verify no SR is sent before media has been sent, RR should still be sent + // from the receiving module though. + AdvanceTimeMs(2000); + int64_t current_time = time_controller_.GetClock()->TimeInMilliseconds(); + sender_.impl_->Process(); + receiver_.impl_->Process(); + EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms); + EXPECT_EQ(receiver_.RtcpSent().first_packet_time_ms, current_time); + + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, current_time); +} + +TEST_F(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) { + EXPECT_EQ(-1, receiver_.RtcpSent().first_packet_time_ms); + EXPECT_EQ(-1, sender_.RtcpReceived().first_packet_time_ms); + EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets); + EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets); + + // Receive module sends a NACK. + const uint16_t kNackLength = 1; + uint16_t nack_list[kNackLength] = {123}; + EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets); + EXPECT_GT(receiver_.RtcpSent().first_packet_time_ms, -1); + + // Send module receives the NACK. + EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets); + EXPECT_GT(sender_.RtcpReceived().first_packet_time_ms, -1); +} + +TEST_F(RtpRtcpImpl2Test, AddStreamDataCounters) { + StreamDataCounters rtp; + const int64_t kStartTimeMs = 1; + rtp.first_packet_time_ms = kStartTimeMs; + rtp.transmitted.packets = 1; + rtp.transmitted.payload_bytes = 1; + rtp.transmitted.header_bytes = 2; + rtp.transmitted.padding_bytes = 3; + EXPECT_EQ(rtp.transmitted.TotalBytes(), rtp.transmitted.payload_bytes + + rtp.transmitted.header_bytes + + rtp.transmitted.padding_bytes); + + StreamDataCounters rtp2; + rtp2.first_packet_time_ms = -1; + rtp2.transmitted.packets = 10; + rtp2.transmitted.payload_bytes = 10; + rtp2.retransmitted.header_bytes = 4; + rtp2.retransmitted.payload_bytes = 5; + rtp2.retransmitted.padding_bytes = 6; + rtp2.retransmitted.packets = 7; + rtp2.fec.packets = 8; + + StreamDataCounters sum = rtp; + sum.Add(rtp2); + EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms); + EXPECT_EQ(11U, sum.transmitted.packets); + EXPECT_EQ(11U, sum.transmitted.payload_bytes); + EXPECT_EQ(2U, sum.transmitted.header_bytes); + EXPECT_EQ(3U, sum.transmitted.padding_bytes); + EXPECT_EQ(4U, sum.retransmitted.header_bytes); + EXPECT_EQ(5U, sum.retransmitted.payload_bytes); + EXPECT_EQ(6U, sum.retransmitted.padding_bytes); + EXPECT_EQ(7U, sum.retransmitted.packets); + EXPECT_EQ(8U, sum.fec.packets); + EXPECT_EQ(sum.transmitted.TotalBytes(), + rtp.transmitted.TotalBytes() + rtp2.transmitted.TotalBytes()); + + StreamDataCounters rtp3; + rtp3.first_packet_time_ms = kStartTimeMs + 10; + sum.Add(rtp3); + EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms); // Holds oldest time. +} + +TEST_F(RtpRtcpImpl2Test, SendsInitialNackList) { + // Send module sends a NACK. + const uint16_t kNackLength = 1; + uint16_t nack_list[kNackLength] = {123}; + EXPECT_EQ(0U, sender_.RtcpSent().nack_packets); + // Send Frame before sending a compound RTCP that starts with SR. + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, sender_.RtcpSent().nack_packets); + EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123)); +} + +TEST_F(RtpRtcpImpl2Test, SendsExtendedNackList) { + // Send module sends a NACK. + const uint16_t kNackLength = 1; + uint16_t nack_list[kNackLength] = {123}; + EXPECT_EQ(0U, sender_.RtcpSent().nack_packets); + // Send Frame before sending a compound RTCP that starts with SR. + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, sender_.RtcpSent().nack_packets); + EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123)); + + // Same list not re-send. + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, sender_.RtcpSent().nack_packets); + EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123)); + + // Only extended list sent. + const uint16_t kNackExtLength = 2; + uint16_t nack_list_ext[kNackExtLength] = {123, 124}; + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list_ext, kNackExtLength)); + EXPECT_EQ(2U, sender_.RtcpSent().nack_packets); + EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(124)); +} + +TEST_F(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) { + sender_.transport_.SimulateNetworkDelay(0, &time_controller_); + // Send module sends a NACK. + const uint16_t kNackLength = 2; + uint16_t nack_list[kNackLength] = {123, 125}; + EXPECT_EQ(0U, sender_.RtcpSent().nack_packets); + // Send Frame before sending a compound RTCP that starts with SR. + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, sender_.RtcpSent().nack_packets); + EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125)); + + // Same list not re-send, rtt interval has not passed. + const int kStartupRttMs = 100; + AdvanceTimeMs(kStartupRttMs); + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, sender_.RtcpSent().nack_packets); + + // Rtt interval passed, full list sent. + AdvanceTimeMs(1); + EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(2U, sender_.RtcpSent().nack_packets); + EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125)); +} + +TEST_F(RtpRtcpImpl2Test, UniqueNackRequests) { + receiver_.transport_.SimulateNetworkDelay(0, &time_controller_); + EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets); + EXPECT_EQ(0U, receiver_.RtcpSent().nack_requests); + EXPECT_EQ(0U, receiver_.RtcpSent().unique_nack_requests); + EXPECT_EQ(0, receiver_.RtcpSent().UniqueNackRequestsInPercent()); + + // Receive module sends NACK request. + const uint16_t kNackLength = 4; + uint16_t nack_list[kNackLength] = {10, 11, 13, 18}; + EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength)); + EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets); + EXPECT_EQ(4U, receiver_.RtcpSent().nack_requests); + EXPECT_EQ(4U, receiver_.RtcpSent().unique_nack_requests); + EXPECT_THAT(receiver_.LastNackListSent(), ElementsAre(10, 11, 13, 18)); + + // Send module receives the request. + EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets); + EXPECT_EQ(4U, sender_.RtcpReceived().nack_requests); + EXPECT_EQ(4U, sender_.RtcpReceived().unique_nack_requests); + EXPECT_EQ(100, sender_.RtcpReceived().UniqueNackRequestsInPercent()); + + // Receive module sends new request with duplicated packets. + const int kStartupRttMs = 100; + AdvanceTimeMs(kStartupRttMs + 1); + const uint16_t kNackLength2 = 4; + uint16_t nack_list2[kNackLength2] = {11, 18, 20, 21}; + EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list2, kNackLength2)); + EXPECT_EQ(2U, receiver_.RtcpSent().nack_packets); + EXPECT_EQ(8U, receiver_.RtcpSent().nack_requests); + EXPECT_EQ(6U, receiver_.RtcpSent().unique_nack_requests); + EXPECT_THAT(receiver_.LastNackListSent(), ElementsAre(11, 18, 20, 21)); + + // Send module receives the request. + EXPECT_EQ(2U, sender_.RtcpReceived().nack_packets); + EXPECT_EQ(8U, sender_.RtcpReceived().nack_requests); + EXPECT_EQ(6U, sender_.RtcpReceived().unique_nack_requests); + EXPECT_EQ(75, sender_.RtcpReceived().UniqueNackRequestsInPercent()); +} + +TEST_F(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) { + const int kVideoReportInterval = 3000; + + // Recreate sender impl with new configuration, and redo setup. + sender_.SetRtcpReportIntervalAndReset(kVideoReportInterval); + SetUp(); + + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + + // Initial state + sender_.impl_->Process(); + EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1); + EXPECT_EQ(0u, sender_.transport_.NumRtcpSent()); + + // Move ahead to the last ms before a rtcp is expected, no action. + AdvanceTimeMs(kVideoReportInterval / 2 - 1); + sender_.impl_->Process(); + EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1); + EXPECT_EQ(sender_.transport_.NumRtcpSent(), 0u); + + // Move ahead to the first rtcp. Send RTCP. + AdvanceTimeMs(1); + sender_.impl_->Process(); + EXPECT_GT(sender_.RtcpSent().first_packet_time_ms, -1); + EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u); + + SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); + + // Move ahead to the last possible second before second rtcp is expected. + AdvanceTimeMs(kVideoReportInterval * 1 / 2 - 1); + sender_.impl_->Process(); + EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u); + + // Move ahead into the range of second rtcp, the second rtcp may be sent. + AdvanceTimeMs(1); + sender_.impl_->Process(); + EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u); + + AdvanceTimeMs(kVideoReportInterval / 2); + sender_.impl_->Process(); + EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u); + + // Move out the range of second rtcp, the second rtcp must have been sent. + AdvanceTimeMs(kVideoReportInterval / 2); + sender_.impl_->Process(); + EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u); +} + +TEST_F(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { + const uint32_t kStartTimestamp = 1u; + SetUp(); + sender_.impl_->SetStartTimestamp(kStartTimestamp); + + PacedPacketInfo pacing_info; + RtpPacketToSend packet(nullptr); + packet.set_packet_type(RtpPacketToSend::Type::kVideo); + packet.SetSsrc(kSenderSsrc); + + // Single-packet frame. + packet.SetTimestamp(1); + packet.SetSequenceNumber(1); + packet.set_first_packet_of_frame(true); + packet.SetMarker(true); + sender_.impl_->TrySendPacket(&packet, pacing_info); + AdvanceTimeMs(1); + + std::vector seqno_info = + sender_.impl_->GetSentRtpPacketInfos(std::vector{1}); + + EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info( + /*timestamp=*/1 - kStartTimestamp, + /*is_first=*/1, + /*is_last=*/1))); + + // Three-packet frame. + packet.SetTimestamp(2); + packet.SetSequenceNumber(2); + packet.set_first_packet_of_frame(true); + packet.SetMarker(false); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + packet.SetSequenceNumber(3); + packet.set_first_packet_of_frame(false); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + packet.SetSequenceNumber(4); + packet.SetMarker(true); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + AdvanceTimeMs(1); + + seqno_info = + sender_.impl_->GetSentRtpPacketInfos(std::vector{2, 3, 4}); + + EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info( + /*timestamp=*/2 - kStartTimestamp, + /*is_first=*/1, + /*is_last=*/0), + RtpSequenceNumberMap::Info( + /*timestamp=*/2 - kStartTimestamp, + /*is_first=*/0, + /*is_last=*/0), + RtpSequenceNumberMap::Info( + /*timestamp=*/2 - kStartTimestamp, + /*is_first=*/0, + /*is_last=*/1))); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc index 0b681cf183..826e4cb340 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc @@ -15,9 +15,7 @@ #include #include "api/transport/field_trial_based_config.h" -#include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/playout_delay_oracle.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -38,6 +36,9 @@ const int64_t kOneWayNetworkDelayMs = 100; const uint8_t kBaseLayerTid = 0; const uint8_t kHigherLayerTid = 1; const uint16_t kSequenceNumber = 100; +const uint8_t kPayloadType = 100; +const int kWidth = 320; +const int kHeight = 100; class RtcpRttStatsTestImpl : public RtcpRttStats { public: @@ -144,7 +145,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver { private: void CreateModuleImpl() { - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.audio = false; config.clock = clock_; config.outgoing_transport = &transport_; @@ -153,6 +154,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver { config.rtt_stats = &rtt_stats_; config.rtcp_report_interval_ms = rtcp_report_interval_ms_; config.local_media_ssrc = is_sender_ ? kSenderSsrc : kReceiverSsrc; + config.need_rtp_packet_infos = true; impl_.reset(new ModuleRtpRtcpImpl(config)); impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc); @@ -182,15 +184,9 @@ class RtpRtcpImplTest : public ::testing::Test { RTPSenderVideo::Config video_config; video_config.clock = &clock_; video_config.rtp_sender = sender_.impl_->RtpSender(); - video_config.playout_delay_oracle = &playout_delay_oracle_; video_config.field_trials = &field_trials; sender_video_ = std::make_unique(video_config); - memset(&codec_, 0, sizeof(VideoCodec)); - codec_.plType = 100; - codec_.width = 320; - codec_.height = 180; - // Receive module. EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false)); receiver_.impl_->SetSendingMediaStatus(false); @@ -201,10 +197,8 @@ class RtpRtcpImplTest : public ::testing::Test { SimulatedClock clock_; RtpRtcpModule sender_; - PlayoutDelayOracle playout_delay_oracle_; std::unique_ptr sender_video_; RtpRtcpModule receiver_; - VideoCodec codec_; void SendFrame(const RtpRtcpModule* module, RTPSenderVideo* sender, @@ -213,8 +207,8 @@ class RtpRtcpImplTest : public ::testing::Test { vp8_header.temporalIdx = tid; RTPVideoHeader rtp_video_header; rtp_video_header.frame_type = VideoFrameType::kVideoFrameKey; - rtp_video_header.width = codec_.width; - rtp_video_header.height = codec_.height; + rtp_video_header.width = kWidth; + rtp_video_header.height = kHeight; rtp_video_header.rotation = kVideoRotation_0; rtp_video_header.content_type = VideoContentType::UNSPECIFIED; rtp_video_header.playout_delay = {-1, -1}; @@ -225,9 +219,9 @@ class RtpRtcpImplTest : public ::testing::Test { rtp_video_header.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false}; const uint8_t payload[100] = {0}; - EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, codec_.plType, true)); - EXPECT_TRUE(sender->SendVideo(codec_.plType, VideoCodecType::kVideoCodecVP8, - 0, 0, payload, nullptr, rtp_video_header, 0)); + EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true)); + EXPECT_TRUE(sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8, + 0, 0, payload, rtp_video_header, 0)); } void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) { @@ -572,4 +566,61 @@ TEST_F(RtpRtcpImplTest, ConfigurableRtcpReportInterval) { EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u); } +TEST_F(RtpRtcpImplTest, StoresPacketInfoForSentPackets) { + const uint32_t kStartTimestamp = 1u; + SetUp(); + sender_.impl_->SetStartTimestamp(kStartTimestamp); + + PacedPacketInfo pacing_info; + RtpPacketToSend packet(nullptr); + packet.set_packet_type(RtpPacketToSend::Type::kVideo); + packet.SetSsrc(kSenderSsrc); + + // Single-packet frame. + packet.SetTimestamp(1); + packet.SetSequenceNumber(1); + packet.set_first_packet_of_frame(true); + packet.SetMarker(true); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + std::vector seqno_info = + sender_.impl_->GetSentRtpPacketInfos(std::vector{1}); + + EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info( + /*timestamp=*/1 - kStartTimestamp, + /*is_first=*/1, + /*is_last=*/1))); + + // Three-packet frame. + packet.SetTimestamp(2); + packet.SetSequenceNumber(2); + packet.set_first_packet_of_frame(true); + packet.SetMarker(false); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + packet.SetSequenceNumber(3); + packet.set_first_packet_of_frame(false); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + packet.SetSequenceNumber(4); + packet.SetMarker(true); + sender_.impl_->TrySendPacket(&packet, pacing_info); + + seqno_info = + sender_.impl_->GetSentRtpPacketInfos(std::vector{2, 3, 4}); + + EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info( + /*timestamp=*/2 - kStartTimestamp, + /*is_first=*/1, + /*is_last=*/0), + RtpSequenceNumberMap::Info( + /*timestamp=*/2 - kStartTimestamp, + /*is_first=*/0, + /*is_last=*/0), + RtpSequenceNumberMap::Info( + /*timestamp=*/2 - kStartTimestamp, + /*is_first=*/0, + /*is_last=*/1))); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h new file mode 100644 index 0000000000..3bd5d47053 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -0,0 +1,428 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/video/video_bitrate_allocation.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_packet_sender.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +// Forward declarations. +class FrameEncryptorInterface; +class RateLimiter; +class RemoteBitrateEstimator; +class RtcEventLog; +class RTPSender; +class Transport; +class VideoBitrateAllocationObserver; + +class RtpRtcpInterface : public RtcpFeedbackSenderInterface { + public: + struct Configuration { + Configuration() = default; + Configuration(Configuration&& rhs) = default; + + // True for a audio version of the RTP/RTCP module object false will create + // a video version. + bool audio = false; + bool receiver_only = false; + + // The clock to use to read time. If nullptr then system clock will be used. + Clock* clock = nullptr; + + ReceiveStatisticsProvider* receive_statistics = nullptr; + + // Transport object that will be called when packets are ready to be sent + // out on the network. + Transport* outgoing_transport = nullptr; + + // Called when the receiver requests an intra frame. + RtcpIntraFrameObserver* intra_frame_callback = nullptr; + + // Called when the receiver sends a loss notification. + RtcpLossNotificationObserver* rtcp_loss_notification_observer = nullptr; + + // Called when we receive a changed estimate from the receiver of out + // stream. + RtcpBandwidthObserver* bandwidth_callback = nullptr; + + NetworkStateEstimateObserver* network_state_estimate_observer = nullptr; + TransportFeedbackObserver* transport_feedback_callback = nullptr; + VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr; + RtcpRttStats* rtt_stats = nullptr; + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr; + // Called on receipt of RTCP report block from remote side. + // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in + // favor of ReportBlockDataObserver. + // TODO(bugs.webrtc.org/10679): Consider whether we want to use + // only getters or only callbacks. If we decide on getters, the + // ReportBlockDataObserver should also be removed in favor of + // GetLatestReportBlockData(). + RtcpStatisticsCallback* rtcp_statistics_callback = nullptr; + RtcpCnameCallback* rtcp_cname_callback = nullptr; + ReportBlockDataObserver* report_block_data_observer = nullptr; + + // Estimates the bandwidth available for a set of streams from the same + // client. + RemoteBitrateEstimator* remote_bitrate_estimator = nullptr; + + // Spread any bursts of packets into smaller bursts to minimize packet loss. + RtpPacketSender* paced_sender = nullptr; + + // Generates FEC packets. + // TODO(sprang): Wire up to RtpSenderEgress. + VideoFecGenerator* fec_generator = nullptr; + + BitrateStatisticsObserver* send_bitrate_observer = nullptr; + SendSideDelayObserver* send_side_delay_observer = nullptr; + RtcEventLog* event_log = nullptr; + SendPacketObserver* send_packet_observer = nullptr; + RateLimiter* retransmission_rate_limiter = nullptr; + StreamDataCountersCallback* rtp_stats_callback = nullptr; + + int rtcp_report_interval_ms = 0; + + // Update network2 instead of pacer_exit field of video timing extension. + bool populate_network2_timestamp = false; + + rtc::scoped_refptr frame_transformer; + + // E2EE Custom Video Frame Encryption + FrameEncryptorInterface* frame_encryptor = nullptr; + // Require all outgoing frames to be encrypted with a FrameEncryptor. + bool require_frame_encryption = false; + + // Corresponds to extmap-allow-mixed in SDP negotiation. + bool extmap_allow_mixed = false; + + // If true, the RTP sender will always annotate outgoing packets with + // MID and RID header extensions, if provided and negotiated. + // If false, the RTP sender will stop sending MID and RID header extensions, + // when it knows that the receiver is ready to demux based on SSRC. This is + // done by RTCP RR acking. + bool always_send_mid_and_rid = false; + + // If set, field trials are read from |field_trials|, otherwise + // defaults to webrtc::FieldTrialBasedConfig. + const WebRtcKeyValueConfig* field_trials = nullptr; + + // SSRCs for media and retransmission, respectively. + // FlexFec SSRC is fetched from |flexfec_sender|. + uint32_t local_media_ssrc = 0; + absl::optional rtx_send_ssrc; + + bool need_rtp_packet_infos = false; + + // If true, the RTP packet history will select RTX packets based on + // heuristics such as send time, retransmission count etc, in order to + // make padding potentially more useful. + // If false, the last packet will always be picked. This may reduce CPU + // overhead. + bool enable_rtx_padding_prioritization = true; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(Configuration); + }; + + // ************************************************************************** + // Receiver functions + // ************************************************************************** + + virtual void IncomingRtcpPacket(const uint8_t* incoming_packet, + size_t incoming_packet_length) = 0; + + virtual void SetRemoteSSRC(uint32_t ssrc) = 0; + + // ************************************************************************** + // Sender + // ************************************************************************** + + // Sets the maximum size of an RTP packet, including RTP headers. + virtual void SetMaxRtpPacketSize(size_t size) = 0; + + // Returns max RTP packet size. Takes into account RTP headers and + // FEC/ULP/RED overhead (when FEC is enabled). + virtual size_t MaxRtpPacketSize() const = 0; + + virtual void RegisterSendPayloadFrequency(int payload_type, + int payload_frequency) = 0; + + // Unregisters a send payload. + // |payload_type| - payload type of codec + // Returns -1 on failure else 0. + virtual int32_t DeRegisterSendPayload(int8_t payload_type) = 0; + + virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0; + + // Register extension by uri, triggers CHECK on falure. + virtual void RegisterRtpHeaderExtension(absl::string_view uri, int id) = 0; + + virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0; + virtual void DeregisterSendRtpHeaderExtension(absl::string_view uri) = 0; + + // Returns true if RTP module is send media, and any of the extensions + // required for bandwidth estimation is registered. + virtual bool SupportsPadding() const = 0; + // Same as SupportsPadding(), but additionally requires that + // SetRtxSendStatus() has been called with the kRtxRedundantPayloads option + // enabled. + virtual bool SupportsRtxPayloadPadding() const = 0; + + // Returns start timestamp. + virtual uint32_t StartTimestamp() const = 0; + + // Sets start timestamp. Start timestamp is set to a random value if this + // function is never called. + virtual void SetStartTimestamp(uint32_t timestamp) = 0; + + // Returns SequenceNumber. + virtual uint16_t SequenceNumber() const = 0; + + // Sets SequenceNumber, default is a random number. + virtual void SetSequenceNumber(uint16_t seq) = 0; + + virtual void SetRtpState(const RtpState& rtp_state) = 0; + virtual void SetRtxState(const RtpState& rtp_state) = 0; + virtual RtpState GetRtpState() const = 0; + virtual RtpState GetRtxState() const = 0; + + // Returns SSRC. + virtual uint32_t SSRC() const = 0; + + // Sets the value for sending in the RID (and Repaired) RTP header extension. + // RIDs are used to identify an RTP stream if SSRCs are not negotiated. + // If the RID and Repaired RID extensions are not registered, the RID will + // not be sent. + virtual void SetRid(const std::string& rid) = 0; + + // Sets the value for sending in the MID RTP header extension. + // The MID RTP header extension should be registered for this to do anything. + // Once set, this value can not be changed or removed. + virtual void SetMid(const std::string& mid) = 0; + + // Sets CSRC. + // |csrcs| - vector of CSRCs + virtual void SetCsrcs(const std::vector& csrcs) = 0; + + // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination + // of values of the enumerator RtxMode. + virtual void SetRtxSendStatus(int modes) = 0; + + // Returns status of sending RTX (RFC 4588). The returned value can be + // a combination of values of the enumerator RtxMode. + virtual int RtxSendStatus() const = 0; + + // Returns the SSRC used for RTX if set, otherwise a nullopt. + virtual absl::optional RtxSsrc() const = 0; + + // Sets the payload type to use when sending RTX packets. Note that this + // doesn't enable RTX, only the payload type is set. + virtual void SetRtxSendPayloadType(int payload_type, + int associated_payload_type) = 0; + + // Returns the FlexFEC SSRC, if there is one. + virtual absl::optional FlexfecSsrc() const = 0; + + // Sets sending status. Sends kRtcpByeCode when going from true to false. + // Returns -1 on failure else 0. + virtual int32_t SetSendingStatus(bool sending) = 0; + + // Returns current sending status. + virtual bool Sending() const = 0; + + // Starts/Stops media packets. On by default. + virtual void SetSendingMediaStatus(bool sending) = 0; + + // Returns current media sending status. + virtual bool SendingMedia() const = 0; + + // Returns whether audio is configured (i.e. Configuration::audio = true). + virtual bool IsAudioConfigured() const = 0; + + // Indicate that the packets sent by this module should be counted towards the + // bitrate estimate since the stream participates in the bitrate allocation. + virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0; + + // Returns bitrate sent (post-pacing) per packet type. + virtual RtpSendRates GetSendRates() const = 0; + + virtual RTPSender* RtpSender() = 0; + virtual const RTPSender* RtpSender() const = 0; + + // Record that a frame is about to be sent. Returns true on success, and false + // if the module isn't ready to send. + virtual bool OnSendingRtpFrame(uint32_t timestamp, + int64_t capture_time_ms, + int payload_type, + bool force_sender_report) = 0; + + // Try to send the provided packet. Returns true iff packet matches any of + // the SSRCs for this module (media/rtx/fec etc) and was forwarded to the + // transport. + virtual bool TrySendPacket(RtpPacketToSend* packet, + const PacedPacketInfo& pacing_info) = 0; + + // Update the FEC protection parameters to use for delta- and key-frames. + // Only used when deferred FEC is active. + virtual void SetFecProtectionParams( + const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) = 0; + + // If deferred FEC generation is enabled, this method should be called after + // calling TrySendPacket(). Any generated FEC packets will be removed and + // returned from the FEC generator. + virtual std::vector> FetchFecPackets() = 0; + + virtual void OnPacketsAcknowledged( + rtc::ArrayView sequence_numbers) = 0; + + virtual std::vector> GeneratePadding( + size_t target_size_bytes) = 0; + + virtual std::vector GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const = 0; + + // Returns an expected per packet overhead representing the main RTP header, + // any CSRCs, and the registered header extensions that are expected on all + // packets (i.e. disregarding things like abs capture time which is only + // populated on a subset of packets, but counting MID/RID type extensions + // when we expect to send them). + virtual size_t ExpectedPerPacketOverhead() const = 0; + + // ************************************************************************** + // RTCP + // ************************************************************************** + + // Returns RTCP status. + virtual RtcpMode RTCP() const = 0; + + // Sets RTCP status i.e on(compound or non-compound)/off. + // |method| - RTCP method to use. + virtual void SetRTCPStatus(RtcpMode method) = 0; + + // Sets RTCP CName (i.e unique identifier). + // Returns -1 on failure else 0. + virtual int32_t SetCNAME(const char* cname) = 0; + + // Returns remote NTP. + // Returns -1 on failure else 0. + virtual int32_t RemoteNTP(uint32_t* received_ntp_secs, + uint32_t* received_ntp_frac, + uint32_t* rtcp_arrival_time_secs, + uint32_t* rtcp_arrival_time_frac, + uint32_t* rtcp_timestamp) const = 0; + + // Returns current RTT (round-trip time) estimate. + // Returns -1 on failure else 0. + virtual int32_t RTT(uint32_t remote_ssrc, + int64_t* rtt, + int64_t* avg_rtt, + int64_t* min_rtt, + int64_t* max_rtt) const = 0; + + // Returns the estimated RTT, with fallback to a default value. + virtual int64_t ExpectedRetransmissionTimeMs() const = 0; + + // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the + // process function. + // Returns -1 on failure else 0. + virtual int32_t SendRTCP(RTCPPacketType rtcp_packet_type) = 0; + + // Returns send statistics for the RTP and RTX stream. + virtual void GetSendStreamDataCounters( + StreamDataCounters* rtp_counters, + StreamDataCounters* rtx_counters) const = 0; + + // Returns received RTCP report block. + // Returns -1 on failure else 0. + // TODO(https://crbug.com/webrtc/10678): Remove this in favor of + // GetLatestReportBlockData(). + virtual int32_t RemoteRTCPStat( + std::vector* receive_blocks) const = 0; + // A snapshot of Report Blocks with additional data of interest to statistics. + // Within this list, the sender-source SSRC pair is unique and per-pair the + // ReportBlockData represents the latest Report Block that was received for + // that pair. + virtual std::vector GetLatestReportBlockData() const = 0; + + // (XR) Sets Receiver Reference Time Report (RTTR) status. + virtual void SetRtcpXrRrtrStatus(bool enable) = 0; + + // Returns current Receiver Reference Time Report (RTTR) status. + virtual bool RtcpXrRrtrStatus() const = 0; + + // (REMB) Receiver Estimated Max Bitrate. + // Schedules sending REMB on next and following sender/receiver reports. + void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override = 0; + // Stops sending REMB on next and following sender/receiver reports. + void UnsetRemb() override = 0; + + // (NACK) + + // Sends a Negative acknowledgement packet. + // Returns -1 on failure else 0. + // TODO(philipel): Deprecate this and start using SendNack instead, mostly + // because we want a function that actually send NACK for the specified + // packets. + virtual int32_t SendNACK(const uint16_t* nack_list, uint16_t size) = 0; + + // Sends NACK for the packets specified. + // Note: This assumes the caller keeps track of timing and doesn't rely on + // the RTP module to do this. + virtual void SendNack(const std::vector& sequence_numbers) = 0; + + // Store the sent packets, needed to answer to a Negative acknowledgment + // requests. + virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0; + + // Returns true if the module is configured to store packets. + virtual bool StorePackets() const = 0; + + virtual void SetVideoBitrateAllocation( + const VideoBitrateAllocation& bitrate) = 0; + + // ************************************************************************** + // Video + // ************************************************************************** + + // Requests new key frame. + // using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1 + void SendPictureLossIndication() { SendRTCP(kRtcpPli); } + // using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2 + void SendFullIntraRequest() { SendRTCP(kRtcpFir); } + + // Sends a LossNotification RTCP message. + // Returns -1 on failure else 0. + virtual int32_t SendLossNotification(uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) = 0; +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_ diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc index c993e47c2e..584fced397 100644 --- a/modules/rtp_rtcp/source/rtp_sender.cc +++ b/modules/rtp_rtcp/source/rtp_sender.cc @@ -28,6 +28,7 @@ #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/rate_limiter.h" @@ -81,10 +82,55 @@ constexpr RtpExtensionSize kVideoExtensionSizes[] = { CreateMaxExtensionSize(), {RtpGenericFrameDescriptorExtension00::kId, RtpGenericFrameDescriptorExtension00::kMaxSizeBytes}, - {RtpGenericFrameDescriptorExtension01::kId, - RtpGenericFrameDescriptorExtension01::kMaxSizeBytes}, }; +// Size info for header extensions that might be used in audio packets. +constexpr RtpExtensionSize kAudioExtensionSizes[] = { + CreateExtensionSize(), + CreateExtensionSize(), + CreateExtensionSize(), + CreateExtensionSize(), + CreateExtensionSize(), + CreateExtensionSize(), + CreateMaxExtensionSize(), + CreateMaxExtensionSize(), + CreateMaxExtensionSize(), +}; + +// Non-volatile extensions can be expected on all packets, if registered. +// Volatile ones, such as VideoContentTypeExtension which is only set on +// key-frames, are removed to simplify overhead calculations at the expense of +// some accuracy. +bool IsNonVolatile(RTPExtensionType type) { + switch (type) { + case kRtpExtensionTransmissionTimeOffset: + case kRtpExtensionAudioLevel: + case kRtpExtensionAbsoluteSendTime: + case kRtpExtensionTransportSequenceNumber: + case kRtpExtensionTransportSequenceNumber02: + case kRtpExtensionRtpStreamId: + case kRtpExtensionMid: + case kRtpExtensionGenericFrameDescriptor00: + case kRtpExtensionGenericFrameDescriptor02: + return true; + case kRtpExtensionInbandComfortNoise: + case kRtpExtensionAbsoluteCaptureTime: + case kRtpExtensionVideoRotation: + case kRtpExtensionPlayoutDelay: + case kRtpExtensionVideoContentType: + case kRtpExtensionVideoLayersAllocation: + case kRtpExtensionVideoTiming: + case kRtpExtensionRepairedRtpStreamId: + case kRtpExtensionColorSpace: + return false; + case kRtpExtensionNone: + case kRtpExtensionNumberOfExtensions: + RTC_NOTREACHED(); + return false; + } + RTC_CHECK_NOTREACHED(); +} + bool HasBweExtension(const RtpHeaderExtensionMap& extensions_map) { return extensions_map.IsRegistered(kRtpExtensionTransportSequenceNumber) || extensions_map.IsRegistered(kRtpExtensionTransportSequenceNumber02) || @@ -92,9 +138,24 @@ bool HasBweExtension(const RtpHeaderExtensionMap& extensions_map) { extensions_map.IsRegistered(kRtpExtensionTransmissionTimeOffset); } +double GetMaxPaddingSizeFactor(const WebRtcKeyValueConfig* field_trials) { + // Too low factor means RTX payload padding is rarely used and ineffective. + // Too high means we risk interrupting regular media packets. + // In practice, 3x seems to yield reasonable results. + constexpr double kDefaultFactor = 3.0; + if (!field_trials) { + return kDefaultFactor; + } + + FieldTrialOptional factor("factor", kDefaultFactor); + ParseFieldTrial({&factor}, field_trials->Lookup("WebRTC-LimitPaddingSize")); + RTC_CHECK_GE(factor.Value(), 0.0); + return factor.Value(); +} + } // namespace -RTPSender::RTPSender(const RtpRtcp::Configuration& config, +RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history, RtpPacketSender* packet_sender) : clock_(config.clock), @@ -102,17 +163,20 @@ RTPSender::RTPSender(const RtpRtcp::Configuration& config, audio_configured_(config.audio), ssrc_(config.local_media_ssrc), rtx_ssrc_(config.rtx_send_ssrc), - flexfec_ssrc_(config.flexfec_sender - ? absl::make_optional(config.flexfec_sender->ssrc()) - : absl::nullopt), + flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() + : absl::nullopt), + max_padding_size_factor_(GetMaxPaddingSizeFactor(config.field_trials)), packet_history_(packet_history), paced_sender_(packet_sender), sending_media_(true), // Default to sending media. max_packet_size_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP. last_payload_type_(-1), rtp_header_extension_map_(config.extmap_allow_mixed), + max_media_packet_header_(kRtpHeaderSize), + max_padding_fec_packet_header_(kRtpHeaderSize), // RTP variables sequence_number_forced_(false), + always_send_mid_and_rid_(config.always_send_mid_and_rid), ssrc_has_acked_(false), rtx_ssrc_has_acked_(false), last_rtp_timestamp_(0), @@ -155,48 +219,57 @@ rtc::ArrayView RTPSender::VideoExtensionSizes() { arraysize(kVideoExtensionSizes)); } +rtc::ArrayView RTPSender::AudioExtensionSizes() { + return rtc::MakeArrayView(kAudioExtensionSizes, + arraysize(kAudioExtensionSizes)); +} + void RTPSender::SetExtmapAllowMixed(bool extmap_allow_mixed) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); rtp_header_extension_map_.SetExtmapAllowMixed(extmap_allow_mixed); } int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); bool registered = rtp_header_extension_map_.RegisterByType(id, type); supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_); + UpdateHeaderSizes(); return registered ? 0 : -1; } bool RTPSender::RegisterRtpHeaderExtension(absl::string_view uri, int id) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); bool registered = rtp_header_extension_map_.RegisterByUri(id, uri); supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_); + UpdateHeaderSizes(); return registered; } bool RTPSender::IsRtpHeaderExtensionRegistered(RTPExtensionType type) const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return rtp_header_extension_map_.IsRegistered(type); } int32_t RTPSender::DeregisterRtpHeaderExtension(RTPExtensionType type) { - rtc::CritScope lock(&send_critsect_); - int32_t deregistered = rtp_header_extension_map_.Deregister(type); + MutexLock lock(&send_mutex_); + rtp_header_extension_map_.Deregister(type); supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_); - return deregistered; + UpdateHeaderSizes(); + return 0; } void RTPSender::DeregisterRtpHeaderExtension(absl::string_view uri) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); rtp_header_extension_map_.Deregister(uri); supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_); + UpdateHeaderSizes(); } void RTPSender::SetMaxRtpPacketSize(size_t max_packet_size) { RTC_DCHECK_GE(max_packet_size, 100); RTC_DCHECK_LE(max_packet_size, IP_PACKET_SIZE); - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); max_packet_size_ = max_packet_size; } @@ -205,18 +278,18 @@ size_t RTPSender::MaxRtpPacketSize() const { } void RTPSender::SetRtxStatus(int mode) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); rtx_ = mode; } int RTPSender::RtxStatus() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return rtx_; } void RTPSender::SetRtxPayloadType(int payload_type, int associated_payload_type) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); RTC_DCHECK_LE(payload_type, 127); RTC_DCHECK_LE(associated_payload_type, 127); if (payload_type < 0) { @@ -266,7 +339,8 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { if (!packet) { return -1; } - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); + packet->set_fec_protect_packet(false); std::vector> packets; packets.emplace_back(std::move(packet)); paced_sender_->EnqueuePackets(std::move(packets)); @@ -275,13 +349,17 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { } void RTPSender::OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); + bool update_required = !ssrc_has_acked_; ssrc_has_acked_ = true; + if (update_required) { + UpdateHeaderSizes(); + } } void RTPSender::OnReceivedAckOnRtxSsrc( int64_t extended_highest_sequence_number) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); rtx_ssrc_has_acked_ = true; } @@ -301,12 +379,12 @@ void RTPSender::OnReceivedNack( } bool RTPSender::SupportsPadding() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return sending_media_ && supports_bwe_extension_; } bool RTPSender::SupportsRtxPayloadPadding() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return sending_media_ && supports_bwe_extension_ && (rtx_ & kRtxRedundantPayloads); } @@ -327,6 +405,15 @@ std::vector> RTPSender::GeneratePadding( packet_history_->GetPayloadPaddingPacket( [&](const RtpPacketToSend& packet) -> std::unique_ptr { + // Limit overshoot, generate <= |max_padding_size_factor_| * + // target_size_bytes. + const size_t max_overshoot_bytes = static_cast( + ((max_padding_size_factor_ - 1.0) * target_size_bytes) + + 0.5); + if (packet.payload_size() + kRtxHeaderSize > + max_overshoot_bytes + bytes_left) { + return nullptr; + } return BuildRtxPacket(packet); }); if (!packet) { @@ -334,18 +421,19 @@ std::vector> RTPSender::GeneratePadding( } bytes_left -= std::min(bytes_left, packet->payload_size()); - packet->set_packet_type(RtpPacketToSend::Type::kPadding); + packet->set_packet_type(RtpPacketMediaType::kPadding); padding_packets.push_back(std::move(packet)); } } - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); if (!sending_media_) { return {}; } size_t padding_bytes_in_packet; - const size_t max_payload_size = max_packet_size_ - RtpHeaderLength(); + const size_t max_payload_size = + max_packet_size_ - max_padding_fec_packet_header_; if (audio_configured_) { // Allow smaller padding packets for audio. padding_bytes_in_packet = rtc::SafeClamp( @@ -362,7 +450,7 @@ std::vector> RTPSender::GeneratePadding( while (bytes_left > 0) { auto padding_packet = std::make_unique(&rtp_header_extension_map_); - padding_packet->set_packet_type(RtpPacketToSend::Type::kPadding); + padding_packet->set_packet_type(RtpPacketMediaType::kPadding); padding_packet->SetMarker(false); padding_packet->SetTimestamp(last_rtp_timestamp_); padding_packet->set_capture_time_ms(capture_time_ms_); @@ -399,8 +487,11 @@ std::vector> RTPSender::GeneratePadding( padding_packet->SetTimestamp(padding_packet->Timestamp() + (now_ms - last_timestamp_time_ms_) * kTimestampTicksPerMs); - padding_packet->set_capture_time_ms(padding_packet->capture_time_ms() + - (now_ms - last_timestamp_time_ms_)); + if (padding_packet->capture_time_ms() > 0) { + padding_packet->set_capture_time_ms( + padding_packet->capture_time_ms() + + (now_ms - last_timestamp_time_ms_)); + } } RTC_DCHECK(rtx_ssrc_); padding_packet->SetSsrc(*rtx_ssrc_); @@ -460,24 +551,25 @@ void RTPSender::EnqueuePackets( paced_sender_->EnqueuePackets(std::move(packets)); } -size_t RTPSender::RtpHeaderLength() const { - rtc::CritScope lock(&send_critsect_); - size_t rtp_header_length = kRtpHeaderLength; - rtp_header_length += sizeof(uint32_t) * csrcs_.size(); - rtp_header_length += RtpHeaderExtensionSize(kFecOrPaddingExtensionSizes, - rtp_header_extension_map_); - return rtp_header_length; +size_t RTPSender::FecOrPaddingPacketMaxRtpHeaderLength() const { + MutexLock lock(&send_mutex_); + return max_padding_fec_packet_header_; +} + +size_t RTPSender::ExpectedPerPacketOverhead() const { + MutexLock lock(&send_mutex_); + return max_media_packet_header_; } uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); uint16_t first_allocated_sequence_number = sequence_number_; sequence_number_ += packets_to_send; return first_allocated_sequence_number; } std::unique_ptr RTPSender::AllocatePacket() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); // TODO(danilchap): Find better motivator and value for extra capacity. // RtpPacketizer might slightly miscalulate needed size, // SRTP may benefit from extra space in the buffer and do encryption in place @@ -498,13 +590,15 @@ std::unique_ptr RTPSender::AllocatePacket() const { // in the MID and/or (R)RID header extensions if present. Therefore, the // sender can reduce overhead by omitting these header extensions once it // knows that the receiver has "bound" the SSRC. + // This optimization can be configured by setting + // |always_send_mid_and_rid_| appropriately. // // The algorithm here is fairly simple: Always attach a MID and/or RID (if // configured) to the outgoing packets until an RTCP receiver report comes // back for this SSRC. That feedback indicates the receiver must have // received a packet with the SSRC and header extension(s), so the sender // then stops attaching the MID and RID. - if (!ssrc_has_acked_) { + if (always_send_mid_and_rid_ || !ssrc_has_acked_) { // These are no-ops if the corresponding header extension is not registered. if (!mid_.empty()) { packet->SetExtension(mid_); @@ -517,7 +611,7 @@ std::unique_ptr RTPSender::AllocatePacket() const { } bool RTPSender::AssignSequenceNumber(RtpPacketToSend* packet) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); if (!sending_media_) return false; RTC_DCHECK(packet->Ssrc() == ssrc_); @@ -536,12 +630,12 @@ bool RTPSender::AssignSequenceNumber(RtpPacketToSend* packet) { } void RTPSender::SetSendingMediaStatus(bool enabled) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); sending_media_ = enabled; } bool RTPSender::SendingMedia() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return sending_media_; } @@ -550,39 +644,42 @@ bool RTPSender::IsAudioConfigured() const { } void RTPSender::SetTimestampOffset(uint32_t timestamp) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); timestamp_offset_ = timestamp; } uint32_t RTPSender::TimestampOffset() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return timestamp_offset_; } void RTPSender::SetRid(const std::string& rid) { // RID is used in simulcast scenario when multiple layers share the same mid. - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); RTC_DCHECK_LE(rid.length(), RtpStreamId::kMaxValueSizeBytes); rid_ = rid; + UpdateHeaderSizes(); } void RTPSender::SetMid(const std::string& mid) { // This is configured via the API. - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); RTC_DCHECK_LE(mid.length(), RtpMid::kMaxValueSizeBytes); mid_ = mid; + UpdateHeaderSizes(); } void RTPSender::SetCsrcs(const std::vector& csrcs) { RTC_DCHECK_LE(csrcs.size(), kRtpCsrcSize); - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); csrcs_ = csrcs; + UpdateHeaderSizes(); } void RTPSender::SetSequenceNumber(uint16_t seq) { bool updated_sequence_number = false; { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); sequence_number_forced_ = true; if (sequence_number_ != seq) { updated_sequence_number = true; @@ -598,7 +695,7 @@ void RTPSender::SetSequenceNumber(uint16_t seq) { } uint16_t RTPSender::SequenceNumber() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return sequence_number_; } @@ -656,7 +753,7 @@ std::unique_ptr RTPSender::BuildRtxPacket( // Add original RTP header. { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); if (!sending_media_) return nullptr; @@ -687,7 +784,7 @@ std::unique_ptr RTPSender::BuildRtxPacket( // Note that RTX packets must used the RepairedRtpStreamId (RRID) header // extension instead of the RtpStreamId (RID) header extension even though // the payload is identical. - if (!rtx_ssrc_has_acked_) { + if (always_send_mid_and_rid_ || !rtx_ssrc_has_acked_) { // These are no-ops if the corresponding header extension is not // registered. if (!mid_.empty()) { @@ -722,7 +819,7 @@ std::unique_ptr RTPSender::BuildRtxPacket( } void RTPSender::SetRtpState(const RtpState& rtp_state) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); sequence_number_ = rtp_state.sequence_number; sequence_number_forced_ = true; timestamp_offset_ = rtp_state.start_timestamp; @@ -730,10 +827,11 @@ void RTPSender::SetRtpState(const RtpState& rtp_state) { capture_time_ms_ = rtp_state.capture_time_ms; last_timestamp_time_ms_ = rtp_state.last_timestamp_time_ms; ssrc_has_acked_ = rtp_state.ssrc_has_acked; + UpdateHeaderSizes(); } RtpState RTPSender::GetRtpState() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); RtpState state; state.sequence_number = sequence_number_; @@ -746,13 +844,13 @@ RtpState RTPSender::GetRtpState() const { } void RTPSender::SetRtxRtpState(const RtpState& rtp_state) { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); sequence_number_rtx_ = rtp_state.sequence_number; rtx_ssrc_has_acked_ = rtp_state.ssrc_has_acked; } RtpState RTPSender::GetRtxRtpState() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); RtpState state; state.sequence_number = sequence_number_rtx_; @@ -763,7 +861,45 @@ RtpState RTPSender::GetRtxRtpState() const { } int64_t RTPSender::LastTimestampTimeMs() const { - rtc::CritScope lock(&send_critsect_); + MutexLock lock(&send_mutex_); return last_timestamp_time_ms_; } + +void RTPSender::UpdateHeaderSizes() { + const size_t rtp_header_length = + kRtpHeaderLength + sizeof(uint32_t) * csrcs_.size(); + + max_padding_fec_packet_header_ = + rtp_header_length + RtpHeaderExtensionSize(kFecOrPaddingExtensionSizes, + rtp_header_extension_map_); + + // RtpStreamId and Mid are treated specially in that we check if they + // currently are being sent. RepairedRtpStreamId is still ignored since we + // assume RTX will not make up large enough bitrate to treat overhead + // differently. + const bool send_mid_rid = always_send_mid_and_rid_ || !ssrc_has_acked_; + std::vector non_volatile_extensions; + for (auto& extension : + audio_configured_ ? AudioExtensionSizes() : VideoExtensionSizes()) { + if (IsNonVolatile(extension.type)) { + switch (extension.type) { + case RTPExtensionType::kRtpExtensionMid: + if (send_mid_rid && !mid_.empty()) { + non_volatile_extensions.push_back(extension); + } + break; + case RTPExtensionType::kRtpExtensionRtpStreamId: + if (send_mid_rid && !rid_.empty()) { + non_volatile_extensions.push_back(extension); + } + break; + default: + non_volatile_extensions.push_back(extension); + } + } + } + max_media_packet_header_ = + rtp_header_length + RtpHeaderExtensionSize(non_volatile_extensions, + rtp_header_extension_map_); +} } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h index 8915e39e9e..1580259b36 100644 --- a/modules/rtp_rtcp/source/rtp_sender.h +++ b/modules/rtp_rtcp/source/rtp_sender.h @@ -25,122 +25,153 @@ #include "modules/rtp_rtcp/include/flexfec_sender.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/deprecation.h" #include "rtc_base/random.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { class FrameEncryptorInterface; -class OverheadObserver; class RateLimiter; class RtcEventLog; class RtpPacketToSend; class RTPSender { public: - RTPSender(const RtpRtcp::Configuration& config, + RTPSender(const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history, RtpPacketSender* packet_sender); + RTPSender() = delete; + RTPSender(const RTPSender&) = delete; + RTPSender& operator=(const RTPSender&) = delete; + ~RTPSender(); - void SetSendingMediaStatus(bool enabled); - bool SendingMedia() const; - bool IsAudioConfigured() const; + void SetSendingMediaStatus(bool enabled) RTC_LOCKS_EXCLUDED(send_mutex_); + bool SendingMedia() const RTC_LOCKS_EXCLUDED(send_mutex_); + bool IsAudioConfigured() const RTC_LOCKS_EXCLUDED(send_mutex_); - uint32_t TimestampOffset() const; - void SetTimestampOffset(uint32_t timestamp); + uint32_t TimestampOffset() const RTC_LOCKS_EXCLUDED(send_mutex_); + void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(send_mutex_); - void SetRid(const std::string& rid); + void SetRid(const std::string& rid) RTC_LOCKS_EXCLUDED(send_mutex_); - void SetMid(const std::string& mid); + void SetMid(const std::string& mid) RTC_LOCKS_EXCLUDED(send_mutex_); - uint16_t SequenceNumber() const; - void SetSequenceNumber(uint16_t seq); + uint16_t SequenceNumber() const RTC_LOCKS_EXCLUDED(send_mutex_); + void SetSequenceNumber(uint16_t seq) RTC_LOCKS_EXCLUDED(send_mutex_); - void SetCsrcs(const std::vector& csrcs); + void SetCsrcs(const std::vector& csrcs) + RTC_LOCKS_EXCLUDED(send_mutex_); - void SetMaxRtpPacketSize(size_t max_packet_size); + void SetMaxRtpPacketSize(size_t max_packet_size) + RTC_LOCKS_EXCLUDED(send_mutex_); - void SetExtmapAllowMixed(bool extmap_allow_mixed); + void SetExtmapAllowMixed(bool extmap_allow_mixed) + RTC_LOCKS_EXCLUDED(send_mutex_); // RTP header extension - int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id); - bool RegisterRtpHeaderExtension(absl::string_view uri, int id); - bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const; - int32_t DeregisterRtpHeaderExtension(RTPExtensionType type); - void DeregisterRtpHeaderExtension(absl::string_view uri); - - bool SupportsPadding() const; - bool SupportsRtxPayloadPadding() const; + int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id) + RTC_LOCKS_EXCLUDED(send_mutex_); + bool RegisterRtpHeaderExtension(absl::string_view uri, int id) + RTC_LOCKS_EXCLUDED(send_mutex_); + bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const + RTC_LOCKS_EXCLUDED(send_mutex_); + int32_t DeregisterRtpHeaderExtension(RTPExtensionType type) + RTC_LOCKS_EXCLUDED(send_mutex_); + void DeregisterRtpHeaderExtension(absl::string_view uri) + RTC_LOCKS_EXCLUDED(send_mutex_); + + bool SupportsPadding() const RTC_LOCKS_EXCLUDED(send_mutex_); + bool SupportsRtxPayloadPadding() const RTC_LOCKS_EXCLUDED(send_mutex_); std::vector> GeneratePadding( size_t target_size_bytes, - bool media_has_been_sent); + bool media_has_been_sent) RTC_LOCKS_EXCLUDED(send_mutex_); // NACK. void OnReceivedNack(const std::vector& nack_sequence_numbers, - int64_t avg_rtt); + int64_t avg_rtt) RTC_LOCKS_EXCLUDED(send_mutex_); - int32_t ReSendPacket(uint16_t packet_id); + int32_t ReSendPacket(uint16_t packet_id) RTC_LOCKS_EXCLUDED(send_mutex_); // ACK. - void OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number); - void OnReceivedAckOnRtxSsrc(int64_t extended_highest_sequence_number); + void OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) + RTC_LOCKS_EXCLUDED(send_mutex_); + void OnReceivedAckOnRtxSsrc(int64_t extended_highest_sequence_number) + RTC_LOCKS_EXCLUDED(send_mutex_); // RTX. - void SetRtxStatus(int mode); - int RtxStatus() const; - absl::optional RtxSsrc() const { return rtx_ssrc_; } + void SetRtxStatus(int mode) RTC_LOCKS_EXCLUDED(send_mutex_); + int RtxStatus() const RTC_LOCKS_EXCLUDED(send_mutex_); + absl::optional RtxSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { + return rtx_ssrc_; + } - void SetRtxPayloadType(int payload_type, int associated_payload_type); + void SetRtxPayloadType(int payload_type, int associated_payload_type) + RTC_LOCKS_EXCLUDED(send_mutex_); // Size info for header extensions used by FEC packets. - static rtc::ArrayView FecExtensionSizes(); + static rtc::ArrayView FecExtensionSizes() + RTC_LOCKS_EXCLUDED(send_mutex_); // Size info for header extensions used by video packets. - static rtc::ArrayView VideoExtensionSizes(); + static rtc::ArrayView VideoExtensionSizes() + RTC_LOCKS_EXCLUDED(send_mutex_); + + // Size info for header extensions used by audio packets. + static rtc::ArrayView AudioExtensionSizes() + RTC_LOCKS_EXCLUDED(send_mutex_); // Create empty packet, fills ssrc, csrcs and reserve place for header // extensions RtpSender updates before sending. - std::unique_ptr AllocatePacket() const; + std::unique_ptr AllocatePacket() const + RTC_LOCKS_EXCLUDED(send_mutex_); // Allocate sequence number for provided packet. // Save packet's fields to generate padding that doesn't break media stream. // Return false if sending was turned off. - bool AssignSequenceNumber(RtpPacketToSend* packet); - - // Used for padding and FEC packets only. - size_t RtpHeaderLength() const; - uint16_t AllocateSequenceNumber(uint16_t packets_to_send); + bool AssignSequenceNumber(RtpPacketToSend* packet) + RTC_LOCKS_EXCLUDED(send_mutex_); + // Maximum header overhead per fec/padding packet. + size_t FecOrPaddingPacketMaxRtpHeaderLength() const + RTC_LOCKS_EXCLUDED(send_mutex_); + // Expected header overhead per media packet. + size_t ExpectedPerPacketOverhead() const RTC_LOCKS_EXCLUDED(send_mutex_); + uint16_t AllocateSequenceNumber(uint16_t packets_to_send) + RTC_LOCKS_EXCLUDED(send_mutex_); // Including RTP headers. - size_t MaxRtpPacketSize() const; + size_t MaxRtpPacketSize() const RTC_LOCKS_EXCLUDED(send_mutex_); - uint32_t SSRC() const { return ssrc_; } + uint32_t SSRC() const RTC_LOCKS_EXCLUDED(send_mutex_) { return ssrc_; } - absl::optional FlexfecSsrc() const { return flexfec_ssrc_; } + absl::optional FlexfecSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { + return flexfec_ssrc_; + } // Sends packet to |transport_| or to the pacer, depending on configuration. // TODO(bugs.webrtc.org/XXX): Remove in favor of EnqueuePackets(). - bool SendToNetwork(std::unique_ptr packet); + bool SendToNetwork(std::unique_ptr packet) + RTC_LOCKS_EXCLUDED(send_mutex_); // Pass a set of packets to RtpPacketSender instance, for paced or immediate // sending to the network. - void EnqueuePackets(std::vector> packets); + void EnqueuePackets(std::vector> packets) + RTC_LOCKS_EXCLUDED(send_mutex_); - void SetRtpState(const RtpState& rtp_state); - RtpState GetRtpState() const; - void SetRtxRtpState(const RtpState& rtp_state); - RtpState GetRtxRtpState() const; + void SetRtpState(const RtpState& rtp_state) RTC_LOCKS_EXCLUDED(send_mutex_); + RtpState GetRtpState() const RTC_LOCKS_EXCLUDED(send_mutex_); + void SetRtxRtpState(const RtpState& rtp_state) + RTC_LOCKS_EXCLUDED(send_mutex_); + RtpState GetRtxRtpState() const RTC_LOCKS_EXCLUDED(send_mutex_); - int64_t LastTimestampTimeMs() const; + int64_t LastTimestampTimeMs() const RTC_LOCKS_EXCLUDED(send_mutex_); private: std::unique_ptr BuildRtxPacket( @@ -148,54 +179,60 @@ class RTPSender { bool IsFecPacket(const RtpPacketToSend& packet) const; + void UpdateHeaderSizes() RTC_EXCLUSIVE_LOCKS_REQUIRED(send_mutex_); + Clock* const clock_; - Random random_ RTC_GUARDED_BY(send_critsect_); + Random random_ RTC_GUARDED_BY(send_mutex_); const bool audio_configured_; const uint32_t ssrc_; const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; + // Limits GeneratePadding() outcome to <= + // |max_padding_size_factor_| * |target_size_bytes| + const double max_padding_size_factor_; RtpPacketHistory* const packet_history_; RtpPacketSender* const paced_sender_; - rtc::CriticalSection send_critsect_; + mutable Mutex send_mutex_; - bool sending_media_ RTC_GUARDED_BY(send_critsect_); + bool sending_media_ RTC_GUARDED_BY(send_mutex_); size_t max_packet_size_; - int8_t last_payload_type_ RTC_GUARDED_BY(send_critsect_); + int8_t last_payload_type_ RTC_GUARDED_BY(send_mutex_); - RtpHeaderExtensionMap rtp_header_extension_map_ - RTC_GUARDED_BY(send_critsect_); + RtpHeaderExtensionMap rtp_header_extension_map_ RTC_GUARDED_BY(send_mutex_); + size_t max_media_packet_header_ RTC_GUARDED_BY(send_mutex_); + size_t max_padding_fec_packet_header_ RTC_GUARDED_BY(send_mutex_); // RTP variables - uint32_t timestamp_offset_ RTC_GUARDED_BY(send_critsect_); - bool sequence_number_forced_ RTC_GUARDED_BY(send_critsect_); - uint16_t sequence_number_ RTC_GUARDED_BY(send_critsect_); - uint16_t sequence_number_rtx_ RTC_GUARDED_BY(send_critsect_); + uint32_t timestamp_offset_ RTC_GUARDED_BY(send_mutex_); + bool sequence_number_forced_ RTC_GUARDED_BY(send_mutex_); + uint16_t sequence_number_ RTC_GUARDED_BY(send_mutex_); + uint16_t sequence_number_rtx_ RTC_GUARDED_BY(send_mutex_); // RID value to send in the RID or RepairedRID header extension. - std::string rid_ RTC_GUARDED_BY(send_critsect_); + std::string rid_ RTC_GUARDED_BY(send_mutex_); // MID value to send in the MID header extension. - std::string mid_ RTC_GUARDED_BY(send_critsect_); + std::string mid_ RTC_GUARDED_BY(send_mutex_); + // Should we send MID/RID even when ACKed? (see below). + const bool always_send_mid_and_rid_; // Track if any ACK has been received on the SSRC and RTX SSRC to indicate // when to stop sending the MID and RID header extensions. - bool ssrc_has_acked_ RTC_GUARDED_BY(send_critsect_); - bool rtx_ssrc_has_acked_ RTC_GUARDED_BY(send_critsect_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(send_critsect_); - int64_t capture_time_ms_ RTC_GUARDED_BY(send_critsect_); - int64_t last_timestamp_time_ms_ RTC_GUARDED_BY(send_critsect_); - bool last_packet_marker_bit_ RTC_GUARDED_BY(send_critsect_); - std::vector csrcs_ RTC_GUARDED_BY(send_critsect_); - int rtx_ RTC_GUARDED_BY(send_critsect_); + bool ssrc_has_acked_ RTC_GUARDED_BY(send_mutex_); + bool rtx_ssrc_has_acked_ RTC_GUARDED_BY(send_mutex_); + uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(send_mutex_); + int64_t capture_time_ms_ RTC_GUARDED_BY(send_mutex_); + int64_t last_timestamp_time_ms_ RTC_GUARDED_BY(send_mutex_); + bool last_packet_marker_bit_ RTC_GUARDED_BY(send_mutex_); + std::vector csrcs_ RTC_GUARDED_BY(send_mutex_); + int rtx_ RTC_GUARDED_BY(send_mutex_); // Mapping rtx_payload_type_map_[associated] = rtx. - std::map rtx_payload_type_map_ RTC_GUARDED_BY(send_critsect_); - bool supports_bwe_extension_ RTC_GUARDED_BY(send_critsect_); + std::map rtx_payload_type_map_ RTC_GUARDED_BY(send_mutex_); + bool supports_bwe_extension_ RTC_GUARDED_BY(send_mutex_); RateLimiter* const retransmission_rate_limiter_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTPSender); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.cc b/modules/rtp_rtcp/source/rtp_sender_audio.cc index 4a47d33573..8cf60aaecd 100644 --- a/modules/rtp_rtcp/source/rtp_sender_audio.cc +++ b/modules/rtp_rtcp/source/rtp_sender_audio.cc @@ -46,15 +46,22 @@ const char* FrameTypeToString(AudioFrameType frame_type) { case AudioFrameType::kAudioFrameCN: return "audio_cn"; } + RTC_CHECK_NOTREACHED(); } #endif +constexpr char kIncludeCaptureClockOffset[] = + "WebRTC-IncludeCaptureClockOffset"; + } // namespace RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtp_sender) : clock_(clock), rtp_sender_(rtp_sender), - absolute_capture_time_sender_(clock) { + absolute_capture_time_sender_(clock), + include_capture_clock_offset_( + absl::StartsWith(field_trials_.Lookup(kIncludeCaptureClockOffset), + "Enabled")) { RTC_DCHECK(clock_); } @@ -66,7 +73,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name, const size_t channels, const uint32_t rate) { if (absl::EqualsIgnoreCase(payload_name, "cn")) { - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); // we can have multiple CNG payload types switch (frequency) { case 8000: @@ -85,14 +92,14 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name, return -1; } } else if (absl::EqualsIgnoreCase(payload_name, "telephone-event")) { - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); // Don't add it to the list // we dont want to allow send with a DTMF payloadtype dtmf_payload_type_ = payload_type; dtmf_payload_freq_ = frequency; return 0; } else if (payload_name == "audio") { - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); encoder_rtp_timestamp_frequency_ = frequency; return 0; } @@ -100,7 +107,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name, } bool RTPSenderAudio::MarkerBit(AudioFrameType frame_type, int8_t payload_type) { - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); // for audio true for first packet in a speech burst bool marker_bit = false; if (last_payload_type_ != payload_type) { @@ -174,7 +181,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, uint32_t dtmf_payload_freq = 0; absl::optional encoder_rtp_timestamp_frequency; { - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); audio_level_dbov = audio_level_dbov_; dtmf_payload_freq = dtmf_payload_freq_; encoder_rtp_timestamp_frequency = encoder_rtp_timestamp_frequency_; @@ -280,7 +287,8 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, // absolute capture time sending. encoder_rtp_timestamp_frequency.value_or(0), Int64MsToUQ32x32(absolute_capture_timestamp_ms + NtpOffsetMs()), - /*estimated_capture_clock_offset=*/absl::nullopt); + /*estimated_capture_clock_offset=*/ + include_capture_clock_offset_ ? absl::make_optional(0) : absl::nullopt); if (absolute_capture_time) { // It also checks that extension was registered during SDP negotiation. If // not then setter won't do anything. @@ -296,13 +304,13 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, return false; { - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); last_payload_type_ = payload_type; } TRACE_EVENT_ASYNC_END2("webrtc", "Audio", rtp_timestamp, "timestamp", packet->Timestamp(), "seqnum", packet->SequenceNumber()); - packet->set_packet_type(RtpPacketToSend::Type::kAudio); + packet->set_packet_type(RtpPacketMediaType::kAudio); packet->set_allow_retransmission(true); bool send_result = rtp_sender_->SendToNetwork(std::move(packet)); if (first_packet_sent_()) { @@ -316,7 +324,7 @@ int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dbov) { if (level_dbov > 127) { return -1; } - rtc::CritScope cs(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); audio_level_dbov_ = level_dbov; return 0; } @@ -327,7 +335,7 @@ int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key, uint8_t level) { DtmfQueue::Event event; { - rtc::CritScope lock(&send_audio_critsect_); + MutexLock lock(&send_audio_mutex_); if (dtmf_payload_type_ < 0) { // TelephoneEvent payloadtype not configured return -1; @@ -387,7 +395,7 @@ bool RTPSenderAudio::SendTelephoneEventPacket(bool ended, dtmfbuffer[1] = E | R | volume; ByteWriter::WriteBigEndian(dtmfbuffer + 2, duration); - packet->set_packet_type(RtpPacketToSend::Type::kAudio); + packet->set_packet_type(RtpPacketMediaType::kAudio); packet->set_allow_retransmission(true); result = rtp_sender_->SendToNetwork(std::move(packet)); send_count--; diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.h b/modules/rtp_rtcp/source/rtp_sender_audio.h index c2d8074a60..57b9dd7ce6 100644 --- a/modules/rtp_rtcp/source/rtp_sender_audio.h +++ b/modules/rtp_rtcp/source/rtp_sender_audio.h @@ -17,13 +17,13 @@ #include #include "absl/strings/string_view.h" +#include "api/transport/field_trial_based_config.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/dtmf_queue.h" #include "modules/rtp_rtcp/source/rtp_sender.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/one_time_event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -32,6 +32,11 @@ namespace webrtc { class RTPSenderAudio { public: RTPSenderAudio(Clock* clock, RTPSender* rtp_sender); + + RTPSenderAudio() = delete; + RTPSenderAudio(const RTPSenderAudio&) = delete; + RTPSenderAudio& operator=(const RTPSenderAudio&) = delete; + ~RTPSenderAudio(); int32_t RegisterAudioPayload(absl::string_view payload_name, @@ -74,13 +79,13 @@ class RTPSenderAudio { Clock* const clock_ = nullptr; RTPSender* const rtp_sender_ = nullptr; - rtc::CriticalSection send_audio_critsect_; + Mutex send_audio_mutex_; // DTMF. bool dtmf_event_is_on_ = false; bool dtmf_event_first_packet_sent_ = false; - int8_t dtmf_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1; - uint32_t dtmf_payload_freq_ RTC_GUARDED_BY(send_audio_critsect_) = 8000; + int8_t dtmf_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; + uint32_t dtmf_payload_freq_ RTC_GUARDED_BY(send_audio_mutex_) = 8000; uint32_t dtmf_timestamp_ = 0; uint32_t dtmf_length_samples_ = 0; int64_t dtmf_time_last_sent_ = 0; @@ -89,24 +94,25 @@ class RTPSenderAudio { DtmfQueue dtmf_queue_; // VAD detection, used for marker bit. - bool inband_vad_active_ RTC_GUARDED_BY(send_audio_critsect_) = false; - int8_t cngnb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1; - int8_t cngwb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1; - int8_t cngswb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1; - int8_t cngfb_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1; - int8_t last_payload_type_ RTC_GUARDED_BY(send_audio_critsect_) = -1; + bool inband_vad_active_ RTC_GUARDED_BY(send_audio_mutex_) = false; + int8_t cngnb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; + int8_t cngwb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; + int8_t cngswb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; + int8_t cngfb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; + int8_t last_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; // Audio level indication. // (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/) - uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_critsect_) = 0; + uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_mutex_) = 0; OneTimeEvent first_packet_sent_; absl::optional encoder_rtp_timestamp_frequency_ - RTC_GUARDED_BY(send_audio_critsect_); + RTC_GUARDED_BY(send_audio_mutex_); AbsoluteCaptureTimeSender absolute_capture_time_sender_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTPSenderAudio); + const FieldTrialBasedConfig field_trials_; + const bool include_capture_clock_offset_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc index 3e35f42bff..d75f4e8947 100644 --- a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc @@ -18,7 +18,9 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/time_util.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -67,56 +69,58 @@ class RtpSenderAudioTest : public ::testing::Test { public: RtpSenderAudioTest() : fake_clock_(kStartTime), - rtp_module_(RtpRtcp::Create([&] { - RtpRtcp::Configuration config; + rtp_module_(ModuleRtpRtcpImpl2::Create([&] { + RtpRtcpInterface::Configuration config; config.audio = true; config.clock = &fake_clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; return config; }())), - rtp_sender_audio_(&fake_clock_, rtp_module_->RtpSender()) { + rtp_sender_audio_( + std::make_unique(&fake_clock_, + rtp_module_->RtpSender())) { rtp_module_->SetSequenceNumber(kSeqNum); } SimulatedClock fake_clock_; LoopbackTransportTest transport_; - std::unique_ptr rtp_module_; - RTPSenderAudio rtp_sender_audio_; + std::unique_ptr rtp_module_; + std::unique_ptr rtp_sender_audio_; }; TEST_F(RtpSenderAudioTest, SendAudio) { const char payload_name[] = "PAYLOAD_NAME"; const uint8_t payload_type = 127; - ASSERT_EQ(0, rtp_sender_audio_.RegisterAudioPayload( + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( payload_name, payload_type, 48000, 0, 1500)); uint8_t payload[] = {47, 11, 32, 93, 89}; - ASSERT_TRUE(rtp_sender_audio_.SendAudio(AudioFrameType::kAudioFrameCN, - payload_type, 4321, payload, - sizeof(payload), - /*absolute_capture_timestamp_ms=*/0)); + ASSERT_TRUE( + rtp_sender_audio_->SendAudio(AudioFrameType::kAudioFrameCN, payload_type, + 4321, payload, sizeof(payload), + /*absolute_capture_timestamp_ms=*/0)); auto sent_payload = transport_.last_sent_packet().payload(); EXPECT_THAT(sent_payload, ElementsAreArray(payload)); } TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) { - EXPECT_EQ(0, rtp_sender_audio_.SetAudioLevel(kAudioLevel)); + EXPECT_EQ(0, rtp_sender_audio_->SetAudioLevel(kAudioLevel)); rtp_module_->RegisterRtpHeaderExtension(AudioLevel::kUri, kAudioLevelExtensionId); const char payload_name[] = "PAYLOAD_NAME"; const uint8_t payload_type = 127; - ASSERT_EQ(0, rtp_sender_audio_.RegisterAudioPayload( + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( payload_name, payload_type, 48000, 0, 1500)); uint8_t payload[] = {47, 11, 32, 93, 89}; - ASSERT_TRUE(rtp_sender_audio_.SendAudio(AudioFrameType::kAudioFrameCN, - payload_type, 4321, payload, - sizeof(payload), - /*absolute_capture_timestamp_ms=*/0)); + ASSERT_TRUE( + rtp_sender_audio_->SendAudio(AudioFrameType::kAudioFrameCN, payload_type, + 4321, payload, sizeof(payload), + /*absolute_capture_timestamp_ms=*/0)); auto sent_payload = transport_.last_sent_packet().payload(); EXPECT_THAT(sent_payload, ElementsAreArray(payload)); @@ -133,11 +137,11 @@ TEST_F(RtpSenderAudioTest, SendAudioWithoutAbsoluteCaptureTime) { constexpr uint32_t kAbsoluteCaptureTimestampMs = 521; const char payload_name[] = "audio"; const uint8_t payload_type = 127; - ASSERT_EQ(0, rtp_sender_audio_.RegisterAudioPayload( + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( payload_name, payload_type, 48000, 0, 1500)); uint8_t payload[] = {47, 11, 32, 93, 89}; - ASSERT_TRUE(rtp_sender_audio_.SendAudio( + ASSERT_TRUE(rtp_sender_audio_->SendAudio( AudioFrameType::kAudioFrameCN, payload_type, 4321, payload, sizeof(payload), kAbsoluteCaptureTimestampMs)); @@ -151,11 +155,11 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAbsoluteCaptureTime) { constexpr uint32_t kAbsoluteCaptureTimestampMs = 521; const char payload_name[] = "audio"; const uint8_t payload_type = 127; - ASSERT_EQ(0, rtp_sender_audio_.RegisterAudioPayload( + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( payload_name, payload_type, 48000, 0, 1500)); uint8_t payload[] = {47, 11, 32, 93, 89}; - ASSERT_TRUE(rtp_sender_audio_.SendAudio( + ASSERT_TRUE(rtp_sender_audio_->SendAudio( AudioFrameType::kAudioFrameCN, payload_type, 4321, payload, sizeof(payload), kAbsoluteCaptureTimestampMs)); @@ -165,6 +169,43 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAbsoluteCaptureTime) { EXPECT_TRUE(absolute_capture_time); EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp, Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs())); + EXPECT_FALSE( + absolute_capture_time->estimated_capture_clock_offset.has_value()); +} + +// Essentially the same test as SendAudioWithAbsoluteCaptureTime but with a +// field trial. After the field trial is experimented, we will remove +// SendAudioWithAbsoluteCaptureTime. +TEST_F(RtpSenderAudioTest, + SendAudioWithAbsoluteCaptureTimeWithCaptureClockOffset) { + // Recreate rtp_sender_audio_ wieh new field trial. + test::ScopedFieldTrials field_trial( + "WebRTC-IncludeCaptureClockOffset/Enabled/"); + rtp_sender_audio_ = + std::make_unique(&fake_clock_, rtp_module_->RtpSender()); + + rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::kUri, + kAbsoluteCaptureTimeExtensionId); + constexpr uint32_t kAbsoluteCaptureTimestampMs = 521; + const char payload_name[] = "audio"; + const uint8_t payload_type = 127; + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( + payload_name, payload_type, 48000, 0, 1500)); + uint8_t payload[] = {47, 11, 32, 93, 89}; + + ASSERT_TRUE(rtp_sender_audio_->SendAudio( + AudioFrameType::kAudioFrameCN, payload_type, 4321, payload, + sizeof(payload), kAbsoluteCaptureTimestampMs)); + + auto absolute_capture_time = + transport_.last_sent_packet() + .GetExtension(); + EXPECT_TRUE(absolute_capture_time); + EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp, + Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs())); + EXPECT_TRUE( + absolute_capture_time->estimated_capture_clock_offset.has_value()); + EXPECT_EQ(0, *absolute_capture_time->estimated_capture_clock_offset); } // As RFC4733, named telephone events are carried as part of the audio stream @@ -177,40 +218,40 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) { const char* kDtmfPayloadName = "telephone-event"; const uint32_t kPayloadFrequency = 8000; const uint8_t kPayloadType = 126; - ASSERT_EQ(0, rtp_sender_audio_.RegisterAudioPayload( + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( kDtmfPayloadName, kPayloadType, kPayloadFrequency, 0, 0)); // For Telephone events, payload is not added to the registered payload list, // it will register only the payload used for audio stream. // Registering the payload again for audio stream with different payload name. const char* kPayloadName = "payload_name"; - ASSERT_EQ(0, rtp_sender_audio_.RegisterAudioPayload( + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( kPayloadName, kPayloadType, kPayloadFrequency, 1, 0)); // Start time is arbitrary. uint32_t capture_timestamp = fake_clock_.TimeInMilliseconds(); // DTMF event key=9, duration=500 and attenuationdB=10 - rtp_sender_audio_.SendTelephoneEvent(9, 500, 10); + rtp_sender_audio_->SendTelephoneEvent(9, 500, 10); // During start, it takes the starting timestamp as last sent timestamp. // The duration is calculated as the difference of current and last sent // timestamp. So for first call it will skip since the duration is zero. - ASSERT_TRUE(rtp_sender_audio_.SendAudio( + ASSERT_TRUE(rtp_sender_audio_->SendAudio( AudioFrameType::kEmptyFrame, kPayloadType, capture_timestamp, nullptr, 0, /*absolute_capture_time_ms=0*/ 0)); // DTMF Sample Length is (Frequency/1000) * Duration. // So in this case, it is (8000/1000) * 500 = 4000. // Sending it as two packets. - ASSERT_TRUE(rtp_sender_audio_.SendAudio(AudioFrameType::kEmptyFrame, - kPayloadType, - capture_timestamp + 2000, nullptr, 0, - /*absolute_capture_time_ms=0*/ 0)); + ASSERT_TRUE(rtp_sender_audio_->SendAudio(AudioFrameType::kEmptyFrame, + kPayloadType, + capture_timestamp + 2000, nullptr, 0, + /*absolute_capture_time_ms=0*/ 0)); // Marker Bit should be set to 1 for first packet. EXPECT_TRUE(transport_.last_sent_packet().Marker()); - ASSERT_TRUE(rtp_sender_audio_.SendAudio(AudioFrameType::kEmptyFrame, - kPayloadType, - capture_timestamp + 4000, nullptr, 0, - /*absolute_capture_time_ms=0*/ 0)); + ASSERT_TRUE(rtp_sender_audio_->SendAudio(AudioFrameType::kEmptyFrame, + kPayloadType, + capture_timestamp + 4000, nullptr, 0, + /*absolute_capture_time_ms=0*/ 0)); // Marker Bit should be set to 0 for rest of the packets. EXPECT_FALSE(transport_.last_sent_packet().Marker()); } diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.cc b/modules/rtp_rtcp/source/rtp_sender_egress.cc index 2244927291..aba23ddc4b 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -10,6 +10,7 @@ #include "modules/rtp_rtcp/source/rtp_sender_egress.h" +#include #include #include #include @@ -17,102 +18,192 @@ #include "absl/strings/match.h" #include "api/transport/field_trial_based_config.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/logging.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { namespace { constexpr uint32_t kTimestampTicksPerMs = 90; constexpr int kSendSideDelayWindowMs = 1000; constexpr int kBitrateStatisticsWindowMs = 1000; +constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; +constexpr TimeDelta kUpdateInterval = + TimeDelta::Millis(kBitrateStatisticsWindowMs); -bool IsEnabled(absl::string_view name, - const WebRtcKeyValueConfig* field_trials) { +bool IsTrialSetTo(const WebRtcKeyValueConfig* field_trials, + absl::string_view name, + absl::string_view value) { FieldTrialBasedConfig default_trials; auto& trials = field_trials ? *field_trials : default_trials; - return absl::StartsWith(trials.Lookup(name), "Enabled"); + return absl::StartsWith(trials.Lookup(name), value); } } // namespace RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( - RtpSenderEgress* sender) - : transport_sequence_number_(0), sender_(sender) {} + RtpSenderEgress* sender, + SequenceNumberAssigner* sequence_number_assigner) + : transport_sequence_number_(0), + sender_(sender), + sequence_number_assigner_(sequence_number_assigner) { + RTC_DCHECK(sequence_number_assigner_); +} RtpSenderEgress::NonPacedPacketSender::~NonPacedPacketSender() = default; void RtpSenderEgress::NonPacedPacketSender::EnqueuePackets( std::vector> packets) { for (auto& packet : packets) { - if (!packet->SetExtension( - ++transport_sequence_number_)) { - --transport_sequence_number_; - } - packet->ReserveExtension(); - packet->ReserveExtension(); + PrepareForSend(packet.get()); sender_->SendPacket(packet.get(), PacedPacketInfo()); } + auto fec_packets = sender_->FetchFecPackets(); + if (!fec_packets.empty()) { + // Don't generate sequence numbers for flexfec, they are already running on + // an internally maintained sequence. + const bool generate_sequence_numbers = !sender_->FlexFecSsrc().has_value(); + + for (auto& packet : fec_packets) { + if (generate_sequence_numbers) { + sequence_number_assigner_->AssignSequenceNumber(packet.get()); + } + PrepareForSend(packet.get()); + } + EnqueuePackets(std::move(fec_packets)); + } } -RtpSenderEgress::RtpSenderEgress(const RtpRtcp::Configuration& config, +void RtpSenderEgress::NonPacedPacketSender::PrepareForSend( + RtpPacketToSend* packet) { + if (!packet->SetExtension( + ++transport_sequence_number_)) { + --transport_sequence_number_; + } + packet->ReserveExtension(); + packet->ReserveExtension(); +} + +RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history) - : ssrc_(config.local_media_ssrc), + : worker_queue_(TaskQueueBase::Current()), + ssrc_(config.local_media_ssrc), rtx_ssrc_(config.rtx_send_ssrc), - flexfec_ssrc_(config.flexfec_sender - ? absl::make_optional(config.flexfec_sender->ssrc()) - : absl::nullopt), + flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() + : absl::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), send_side_bwe_with_overhead_( - IsEnabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)), + !IsTrialSetTo(config.field_trials, + "WebRTC-SendSideBwe-WithOverhead", + "Disabled")), clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), event_log_(config.event_log), +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE is_audio_(config.audio), +#endif + need_rtp_packet_infos_(config.need_rtp_packet_infos), + fec_generator_(config.fec_generator), transport_feedback_observer_(config.transport_feedback_callback), send_side_delay_observer_(config.send_side_delay_observer), send_packet_observer_(config.send_packet_observer), - overhead_observer_(config.overhead_observer), rtp_stats_callback_(config.rtp_stats_callback), bitrate_callback_(config.send_bitrate_observer), media_has_been_sent_(false), force_part_of_allocation_(false), + timestamp_offset_(0), max_delay_it_(send_delays_.end()), sum_delays_ms_(0), total_packet_send_delay_ms_(0), - rtp_overhead_bytes_per_packet_(0), - total_bitrate_sent_(kBitrateStatisticsWindowMs, - RateStatistics::kBpsScale), - nack_bitrate_sent_(kBitrateStatisticsWindowMs, - RateStatistics::kBpsScale) {} + send_rates_(kNumMediaTypes, + {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}), + rtp_sequence_number_map_(need_rtp_packet_infos_ + ? std::make_unique( + kRtpSequenceNumberMapMaxEntries) + : nullptr) { + RTC_DCHECK(worker_queue_); + pacer_checker_.Detach(); + if (bitrate_callback_) { + update_task_ = RepeatingTaskHandle::DelayedStart(worker_queue_, + kUpdateInterval, [this]() { + PeriodicUpdate(); + return kUpdateInterval; + }); + } +} + +RtpSenderEgress::~RtpSenderEgress() { + RTC_DCHECK_RUN_ON(worker_queue_); + update_task_.Stop(); +} void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info) { + RTC_DCHECK_RUN_ON(&pacer_checker_); RTC_DCHECK(packet); - const uint32_t packet_ssrc = packet->Ssrc(); RTC_DCHECK(packet->packet_type().has_value()); RTC_DCHECK(HasCorrectSsrc(*packet)); - int64_t now_ms = clock_->TimeInMilliseconds(); - if (is_audio_) { -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms, - SendBitrate().kbps(), packet_ssrc); - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioNackBitrate_kbps", now_ms, - NackOverheadRate().kbps(), packet_ssrc); -#endif - } else { + const uint32_t packet_ssrc = packet->Ssrc(); + const int64_t now_ms = clock_->TimeInMilliseconds(); + #if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms, - SendBitrate().kbps(), packet_ssrc); - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoNackBitrate_kbps", now_ms, - NackOverheadRate().kbps(), packet_ssrc); + worker_queue_->PostTask( + ToQueuedTask(task_safety_, [this, now_ms, packet_ssrc]() { + BweTestLoggingPlot(now_ms, packet_ssrc); + })); #endif + + if (need_rtp_packet_infos_ && + packet->packet_type() == RtpPacketToSend::Type::kVideo) { + worker_queue_->PostTask(ToQueuedTask( + task_safety_, + [this, packet_timestamp = packet->Timestamp(), + is_first_packet_of_frame = packet->is_first_packet_of_frame(), + is_last_packet_of_frame = packet->Marker(), + sequence_number = packet->SequenceNumber()]() { + RTC_DCHECK_RUN_ON(worker_queue_); + // Last packet of a frame, add it to sequence number info map. + const uint32_t timestamp = packet_timestamp - timestamp_offset_; + rtp_sequence_number_map_->InsertPacket( + sequence_number, + RtpSequenceNumberMap::Info(timestamp, is_first_packet_of_frame, + is_last_packet_of_frame)); + })); } - PacketOptions options; - { - rtc::CritScope lock(&lock_); - options.included_in_allocation = force_part_of_allocation_; + if (fec_generator_ && packet->fec_protect_packet()) { + // This packet should be protected by FEC, add it to packet generator. + RTC_DCHECK(fec_generator_); + RTC_DCHECK(packet->packet_type() == RtpPacketMediaType::kVideo); + absl::optional> + new_fec_params; + { + MutexLock lock(&lock_); + new_fec_params.swap(pending_fec_params_); + } + if (new_fec_params) { + fec_generator_->SetProtectionParameters(new_fec_params->first, + new_fec_params->second); + } + if (packet->is_red()) { + RtpPacketToSend unpacked_packet(*packet); + + const rtc::CopyOnWriteBuffer buffer = packet->Buffer(); + // Grab media payload type from RED header. + const size_t headers_size = packet->headers_size(); + unpacked_packet.SetPayloadType(buffer[headers_size]); + + // Copy the media payload into the unpacked buffer. + uint8_t* payload_buffer = + unpacked_packet.SetPayloadSize(packet->payload_size() - 1); + std::copy(&packet->payload()[0] + 1, + &packet->payload()[0] + packet->payload_size(), payload_buffer); + + fec_generator_->AddPacketAndGenerateFec(unpacked_packet); + } else { + // If not RED encapsulated - we can just insert packet directly. + fec_generator_->AddPacketAndGenerateFec(*packet); + } } // Bug webrtc:7859. While FEC is invoked from rtp_sender_video, and not after @@ -140,9 +231,14 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, } } - const bool is_media = - packet->packet_type() == RtpPacketToSend::Type::kAudio || - packet->packet_type() == RtpPacketToSend::Type::kVideo; + const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio || + packet->packet_type() == RtpPacketMediaType::kVideo; + + PacketOptions options; + { + MutexLock lock(&lock_); + options.included_in_allocation = force_part_of_allocation_; + } // Downstream code actually uses this flag to distinguish between media and // everything else. @@ -157,8 +253,8 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, options.application_data.assign(packet->application_data().begin(), packet->application_data().end()); - if (packet->packet_type() != RtpPacketToSend::Type::kPadding && - packet->packet_type() != RtpPacketToSend::Type::kRetransmission) { + if (packet->packet_type() != RtpPacketMediaType::kPadding && + packet->packet_type() != RtpPacketMediaType::kRetransmission) { UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc); UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(), packet_ssrc); @@ -176,68 +272,127 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, } if (send_success) { - rtc::CritScope lock(&lock_); - UpdateRtpStats(*packet); + // |media_has_been_sent_| is used by RTPSender to figure out if it can send + // padding in the absence of transport-cc or abs-send-time. + // In those cases media must be sent first to set a reference timestamp. media_has_been_sent_ = true; - } -} - -void RtpSenderEgress::ProcessBitrateAndNotifyObservers() { - if (!bitrate_callback_) - return; - rtc::CritScope lock(&lock_); - int64_t now_ms = clock_->TimeInMilliseconds(); - bitrate_callback_->Notify(total_bitrate_sent_.Rate(now_ms).value_or(0), - nack_bitrate_sent_.Rate(now_ms).value_or(0), ssrc_); + // TODO(sprang): Add support for FEC protecting all header extensions, add + // media packet to generator here instead. + + RTC_DCHECK(packet->packet_type().has_value()); + RtpPacketMediaType packet_type = *packet->packet_type(); + RtpPacketCounter counter(*packet); + size_t size = packet->size(); + worker_queue_->PostTask( + ToQueuedTask(task_safety_, [this, now_ms, packet_ssrc, packet_type, + counter = std::move(counter), size]() { + RTC_DCHECK_RUN_ON(worker_queue_); + UpdateRtpStats(now_ms, packet_ssrc, packet_type, std::move(counter), + size); + })); + } } -DataRate RtpSenderEgress::SendBitrate() const { - rtc::CritScope cs(&lock_); - return DataRate::bps( - total_bitrate_sent_.Rate(clock_->TimeInMilliseconds()).value_or(0)); +RtpSendRates RtpSenderEgress::GetSendRates() const { + MutexLock lock(&lock_); + const int64_t now_ms = clock_->TimeInMilliseconds(); + return GetSendRatesLocked(now_ms); } -DataRate RtpSenderEgress::NackOverheadRate() const { - rtc::CritScope cs(&lock_); - return DataRate::bps( - nack_bitrate_sent_.Rate(clock_->TimeInMilliseconds()).value_or(0)); +RtpSendRates RtpSenderEgress::GetSendRatesLocked(int64_t now_ms) const { + RtpSendRates current_rates; + for (size_t i = 0; i < kNumMediaTypes; ++i) { + RtpPacketMediaType type = static_cast(i); + current_rates[type] = + DataRate::BitsPerSec(send_rates_[i].Rate(now_ms).value_or(0)); + } + return current_rates; } void RtpSenderEgress::GetDataCounters(StreamDataCounters* rtp_stats, StreamDataCounters* rtx_stats) const { - rtc::CritScope lock(&lock_); + // TODO(bugs.webrtc.org/11581): make sure rtx_rtp_stats_ and rtp_stats_ are + // only touched on the worker thread. + MutexLock lock(&lock_); *rtp_stats = rtp_stats_; *rtx_stats = rtx_rtp_stats_; } void RtpSenderEgress::ForceIncludeSendPacketsInAllocation( bool part_of_allocation) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); force_part_of_allocation_ = part_of_allocation; } bool RtpSenderEgress::MediaHasBeenSent() const { - rtc::CritScope lock(&lock_); + RTC_DCHECK_RUN_ON(&pacer_checker_); return media_has_been_sent_; } void RtpSenderEgress::SetMediaHasBeenSent(bool media_sent) { - rtc::CritScope lock(&lock_); + RTC_DCHECK_RUN_ON(&pacer_checker_); media_has_been_sent_ = media_sent; } +void RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) { + RTC_DCHECK_RUN_ON(worker_queue_); + timestamp_offset_ = timestamp; +} + +std::vector RtpSenderEgress::GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const { + RTC_DCHECK_RUN_ON(worker_queue_); + RTC_DCHECK(!sequence_numbers.empty()); + if (!need_rtp_packet_infos_) { + return std::vector(); + } + + std::vector results; + results.reserve(sequence_numbers.size()); + + for (uint16_t sequence_number : sequence_numbers) { + const auto& info = rtp_sequence_number_map_->Get(sequence_number); + if (!info) { + // The empty vector will be returned. We can delay the clearing + // of the vector until after we exit the critical section. + return std::vector(); + } + results.push_back(*info); + } + + return results; +} + +void RtpSenderEgress::SetFecProtectionParameters( + const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) { + // TODO(sprang): Post task to pacer queue instead, one pacer is fully + // migrated to a task queue. + MutexLock lock(&lock_); + pending_fec_params_.emplace(delta_params, key_params); +} + +std::vector> +RtpSenderEgress::FetchFecPackets() { + RTC_DCHECK_RUN_ON(&pacer_checker_); + if (fec_generator_) { + return fec_generator_->GetFecPackets(); + } + return {}; +} + bool RtpSenderEgress::HasCorrectSsrc(const RtpPacketToSend& packet) const { switch (*packet.packet_type()) { - case RtpPacketToSend::Type::kAudio: - case RtpPacketToSend::Type::kVideo: + case RtpPacketMediaType::kAudio: + case RtpPacketMediaType::kVideo: return packet.Ssrc() == ssrc_; - case RtpPacketToSend::Type::kRetransmission: - case RtpPacketToSend::Type::kPadding: + case RtpPacketMediaType::kRetransmission: + case RtpPacketMediaType::kPadding: // Both padding and retransmission must be on either the media or the // RTX stream. return packet.Ssrc() == rtx_ssrc_ || packet.Ssrc() == ssrc_; - case RtpPacketToSend::Type::kForwardErrorCorrection: + case RtpPacketMediaType::kForwardErrorCorrection: // FlexFEC is on separate SSRC, ULPFEC uses media SSRC. return packet.Ssrc() == ssrc_ || packet.Ssrc() == flexfec_ssrc_; } @@ -257,10 +412,10 @@ void RtpSenderEgress::AddPacketToTransportFeedback( RtpPacketSendInfo packet_info; packet_info.ssrc = ssrc_; packet_info.transport_sequence_number = packet_id; - packet_info.has_rtp_sequence_number = true; packet_info.rtp_sequence_number = packet.SequenceNumber(); packet_info.length = packet_size; packet_info.pacing_info = pacing_info; + packet_info.packet_type = packet.packet_type(); transport_feedback_observer_->OnAddPacket(packet_info); } } @@ -275,7 +430,7 @@ void RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, int max_delay_ms = 0; uint64_t total_packet_send_delay_ms = 0; { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); // Compute the max and average of the recent capture-to-send delays. // The time complexity of the current approach depends on the distribution // of the delay values. This could be done more efficiently. @@ -365,7 +520,6 @@ bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet, const PacedPacketInfo& pacing_info) { int bytes_sent = -1; if (transport_) { - UpdateRtpOverhead(packet); bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options) ? static_cast(packet.size()) : -1; @@ -382,46 +536,82 @@ bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet, return true; } -void RtpSenderEgress::UpdateRtpOverhead(const RtpPacketToSend& packet) { - if (!overhead_observer_) - return; - size_t overhead_bytes_per_packet; +void RtpSenderEgress::UpdateRtpStats(int64_t now_ms, + uint32_t packet_ssrc, + RtpPacketMediaType packet_type, + RtpPacketCounter counter, + size_t packet_size) { + RTC_DCHECK_RUN_ON(worker_queue_); + + // TODO(bugs.webrtc.org/11581): send_rates_ should be touched only on the + // worker thread. + RtpSendRates send_rates; { - rtc::CritScope lock(&lock_); - if (rtp_overhead_bytes_per_packet_ == packet.headers_size()) { - return; - } - rtp_overhead_bytes_per_packet_ = packet.headers_size(); - overhead_bytes_per_packet = rtp_overhead_bytes_per_packet_; - } - overhead_observer_->OnOverheadChanged(overhead_bytes_per_packet); -} + MutexLock lock(&lock_); -void RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) { - int64_t now_ms = clock_->TimeInMilliseconds(); + // TODO(bugs.webrtc.org/11581): make sure rtx_rtp_stats_ and rtp_stats_ are + // only touched on the worker thread. + StreamDataCounters* counters = + packet_ssrc == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; - StreamDataCounters* counters = - packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; + if (counters->first_packet_time_ms == -1) { + counters->first_packet_time_ms = now_ms; + } - total_bitrate_sent_.Update(packet.size(), now_ms); + if (packet_type == RtpPacketMediaType::kForwardErrorCorrection) { + counters->fec.Add(counter); + } else if (packet_type == RtpPacketMediaType::kRetransmission) { + counters->retransmitted.Add(counter); + } + counters->transmitted.Add(counter); - if (counters->first_packet_time_ms == -1) { - counters->first_packet_time_ms = now_ms; - } + send_rates_[static_cast(packet_type)].Update(packet_size, now_ms); + if (bitrate_callback_) { + send_rates = GetSendRatesLocked(now_ms); + } - if (packet.packet_type() == RtpPacketToSend::Type::kForwardErrorCorrection) { - counters->fec.AddPacket(packet); + if (rtp_stats_callback_) { + rtp_stats_callback_->DataCountersUpdated(*counters, packet_ssrc); + } } - if (packet.packet_type() == RtpPacketToSend::Type::kRetransmission) { - counters->retransmitted.AddPacket(packet); - nack_bitrate_sent_.Update(packet.size(), now_ms); + // The bitrate_callback_ and rtp_stats_callback_ pointers in practice point + // to the same object, so these callbacks could be consolidated into one. + if (bitrate_callback_) { + bitrate_callback_->Notify( + send_rates.Sum().bps(), + send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_); } - counters->transmitted.AddPacket(packet); +} + +void RtpSenderEgress::PeriodicUpdate() { + RTC_DCHECK_RUN_ON(worker_queue_); + RTC_DCHECK(bitrate_callback_); + RtpSendRates send_rates = GetSendRates(); + bitrate_callback_->Notify( + send_rates.Sum().bps(), + send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_); +} + +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +void RtpSenderEgress::BweTestLoggingPlot(int64_t now_ms, uint32_t packet_ssrc) { + RTC_DCHECK_RUN_ON(worker_queue_); - if (rtp_stats_callback_) { - rtp_stats_callback_->DataCountersUpdated(*counters, packet.Ssrc()); + const auto rates = GetSendRates(); + if (is_audio_) { + BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms, + rates.Sum().kbps(), packet_ssrc); + BWE_TEST_LOGGING_PLOT_WITH_SSRC( + 1, "AudioNackBitrate_kbps", now_ms, + rates[RtpPacketMediaType::kRetransmission].kbps(), packet_ssrc); + } else { + BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms, + rates.Sum().kbps(), packet_ssrc); + BWE_TEST_LOGGING_PLOT_WITH_SSRC( + 1, "VideoNackBitrate_kbps", now_ms, + rates[RtpPacketMediaType::kRetransmission].kbps(), packet_ssrc); } } +#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.h b/modules/rtp_rtcp/source/rtp_sender_egress.h index e72a0cff67..8e36425f29 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -13,18 +13,25 @@ #include #include +#include #include #include "absl/types/optional.h" #include "api/call/transport.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/data_rate.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -35,35 +42,53 @@ class RtpSenderEgress { // without passing through an actual paced sender. class NonPacedPacketSender : public RtpPacketSender { public: - explicit NonPacedPacketSender(RtpSenderEgress* sender); + NonPacedPacketSender(RtpSenderEgress* sender, + SequenceNumberAssigner* sequence_number_assigner); virtual ~NonPacedPacketSender(); void EnqueuePackets( std::vector> packets) override; private: + void PrepareForSend(RtpPacketToSend* packet); uint16_t transport_sequence_number_; RtpSenderEgress* const sender_; + SequenceNumberAssigner* sequence_number_assigner_; }; - RtpSenderEgress(const RtpRtcp::Configuration& config, + RtpSenderEgress(const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history); - ~RtpSenderEgress() = default; + ~RtpSenderEgress(); - void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info); + void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info) + RTC_LOCKS_EXCLUDED(lock_); uint32_t Ssrc() const { return ssrc_; } absl::optional RtxSsrc() const { return rtx_ssrc_; } absl::optional FlexFecSsrc() const { return flexfec_ssrc_; } - void ProcessBitrateAndNotifyObservers(); - DataRate SendBitrate() const; - DataRate NackOverheadRate() const; + RtpSendRates GetSendRates() const RTC_LOCKS_EXCLUDED(lock_); void GetDataCounters(StreamDataCounters* rtp_stats, - StreamDataCounters* rtx_stats) const; - - void ForceIncludeSendPacketsInAllocation(bool part_of_allocation); - bool MediaHasBeenSent() const; - void SetMediaHasBeenSent(bool media_sent); + StreamDataCounters* rtx_stats) const + RTC_LOCKS_EXCLUDED(lock_); + + void ForceIncludeSendPacketsInAllocation(bool part_of_allocation) + RTC_LOCKS_EXCLUDED(lock_); + bool MediaHasBeenSent() const RTC_LOCKS_EXCLUDED(lock_); + void SetMediaHasBeenSent(bool media_sent) RTC_LOCKS_EXCLUDED(lock_); + void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(lock_); + + // For each sequence number in |sequence_number|, recall the last RTP packet + // which bore it - its timestamp and whether it was the first and/or last + // packet in that frame. If all of the given sequence numbers could be + // recalled, return a vector with all of them (in corresponding order). + // If any could not be recalled, return an empty vector. + std::vector GetSentRtpPacketInfos( + rtc::ArrayView sequence_numbers) const + RTC_LOCKS_EXCLUDED(lock_); + + void SetFecProtectionParameters(const FecProtectionParams& delta_params, + const FecProtectionParams& key_params); + std::vector> FetchFecPackets(); private: // Maps capture time in milliseconds to send-side delay in milliseconds. @@ -71,6 +96,8 @@ class RtpSenderEgress { // time. typedef std::map SendDelayMap; + RtpSendRates GetSendRatesLocked(int64_t now_ms) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); bool HasCorrectSsrc(const RtpPacketToSend& packet) const; void AddPacketToTransportFeedback(uint16_t packet_id, const RtpPacketToSend& packet, @@ -86,10 +113,21 @@ class RtpSenderEgress { bool SendPacketToNetwork(const RtpPacketToSend& packet, const PacketOptions& options, const PacedPacketInfo& pacing_info); - void UpdateRtpOverhead(const RtpPacketToSend& packet); - void UpdateRtpStats(const RtpPacketToSend& packet) - RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + void UpdateRtpStats(int64_t now_ms, + uint32_t packet_ssrc, + RtpPacketMediaType packet_type, + RtpPacketCounter counter, + size_t packet_size); +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE + void BweTestLoggingPlot(int64_t now_ms, uint32_t packet_ssrc); +#endif + + // Called on a timer, once a second, on the worker_queue_. + void PeriodicUpdate(); + + TaskQueueBase* const worker_queue_; + SequenceChecker pacer_checker_; const uint32_t ssrc_; const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; @@ -99,29 +137,43 @@ class RtpSenderEgress { RtpPacketHistory* const packet_history_; Transport* const transport_; RtcEventLog* const event_log_; +#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE const bool is_audio_; +#endif + const bool need_rtp_packet_infos_; + VideoFecGenerator* const fec_generator_ RTC_GUARDED_BY(pacer_checker_); TransportFeedbackObserver* const transport_feedback_observer_; SendSideDelayObserver* const send_side_delay_observer_; SendPacketObserver* const send_packet_observer_; - OverheadObserver* const overhead_observer_; StreamDataCountersCallback* const rtp_stats_callback_; BitrateStatisticsObserver* const bitrate_callback_; - rtc::CriticalSection lock_; - bool media_has_been_sent_ RTC_GUARDED_BY(lock_); + mutable Mutex lock_; + bool media_has_been_sent_ RTC_GUARDED_BY(pacer_checker_); bool force_part_of_allocation_ RTC_GUARDED_BY(lock_); + uint32_t timestamp_offset_ RTC_GUARDED_BY(worker_queue_); SendDelayMap send_delays_ RTC_GUARDED_BY(lock_); SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_); // The sum of delays over a kSendSideDelayWindowMs sliding window. int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_); uint64_t total_packet_send_delay_ms_ RTC_GUARDED_BY(lock_); - size_t rtp_overhead_bytes_per_packet_ RTC_GUARDED_BY(lock_); StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_); - RateStatistics total_bitrate_sent_ RTC_GUARDED_BY(lock_); - RateStatistics nack_bitrate_sent_ RTC_GUARDED_BY(lock_); + // One element per value in RtpPacketMediaType, with index matching value. + std::vector send_rates_ RTC_GUARDED_BY(lock_); + absl::optional> + pending_fec_params_ RTC_GUARDED_BY(lock_); + + // Maps sent packets' sequence numbers to a tuple consisting of: + // 1. The timestamp, without the randomizing offset mandated by the RFC. + // 2. Whether the packet was the first in its frame. + // 3. Whether the packet was the last in its frame. + const std::unique_ptr rtp_sequence_number_map_ + RTC_GUARDED_BY(worker_queue_); + RepeatingTaskHandle update_task_ RTC_GUARDED_BY(worker_queue_); + ScopedTaskSafety task_safety_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_unittest.cc index 5ca4e70de8..38f2d10001 100644 --- a/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -32,13 +32,17 @@ #include "modules/rtp_rtcp/source/rtp_sender_egress.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/rtp_utility.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/arraysize.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_transport.h" #include "test/rtp_header_parser.h" +#include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -46,8 +50,7 @@ namespace { enum : int { // The first valid value is 1. kAbsoluteSendTimeExtensionId = 1, kAudioLevelExtensionId, - kGenericDescriptorId00, - kGenericDescriptorId01, + kGenericDescriptorId, kMidExtensionId, kRepairedRidExtensionId, kRidExtensionId, @@ -75,12 +78,18 @@ const char kNoMid[] = ""; using ::testing::_; using ::testing::AllOf; using ::testing::Contains; +using ::testing::Each; using ::testing::ElementsAreArray; +using ::testing::Eq; using ::testing::Field; +using ::testing::Gt; +using ::testing::IsEmpty; using ::testing::NiceMock; +using ::testing::Not; using ::testing::Pointee; using ::testing::Property; using ::testing::Return; +using ::testing::SizeIs; using ::testing::StrictMock; uint64_t ConvertMsToAbsSendTime(int64_t time_ms) { @@ -102,9 +111,7 @@ class LoopbackTransportTest : public webrtc::Transport { kVideoTimingExtensionId); receivers_extensions_.Register(kMidExtensionId); receivers_extensions_.Register( - kGenericDescriptorId00); - receivers_extensions_.Register( - kGenericDescriptorId01); + kGenericDescriptorId); receivers_extensions_.Register(kRidExtensionId); receivers_extensions_.Register( kRepairedRidExtensionId); @@ -140,51 +147,37 @@ struct TestConfig { bool with_overhead = false; }; -std::string ToFieldTrialString(TestConfig config) { - std::string field_trials; - if (config.with_overhead) { - field_trials += "WebRTC-SendSideBwe-WithOverhead/Enabled/"; - } - return field_trials; -} - class MockRtpPacketPacer : public RtpPacketSender { public: MockRtpPacketPacer() {} virtual ~MockRtpPacketPacer() {} - MOCK_METHOD1(EnqueuePackets, - void(std::vector>)); - - MOCK_METHOD2(CreateProbeCluster, void(int bitrate_bps, int cluster_id)); - - MOCK_METHOD0(Pause, void()); - MOCK_METHOD0(Resume, void()); - MOCK_METHOD1(SetCongestionWindow, - void(absl::optional congestion_window_bytes)); - MOCK_METHOD1(UpdateOutstandingData, void(int64_t outstanding_bytes)); - MOCK_METHOD1(SetAccountForAudioPackets, void(bool account_for_audio)); + MOCK_METHOD(void, + EnqueuePackets, + (std::vector>), + (override)); }; class MockSendSideDelayObserver : public SendSideDelayObserver { public: - MOCK_METHOD4(SendSideDelayUpdated, void(int, int, uint64_t, uint32_t)); + MOCK_METHOD(void, + SendSideDelayUpdated, + (int, int, uint64_t, uint32_t), + (override)); }; class MockSendPacketObserver : public SendPacketObserver { public: - MOCK_METHOD3(OnSendPacket, void(uint16_t, int64_t, uint32_t)); + MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override)); }; class MockTransportFeedbackObserver : public TransportFeedbackObserver { public: - MOCK_METHOD1(OnAddPacket, void(const RtpPacketSendInfo&)); - MOCK_METHOD1(OnTransportFeedback, void(const rtcp::TransportFeedback&)); -}; - -class MockOverheadObserver : public OverheadObserver { - public: - MOCK_METHOD1(OnOverheadChanged, void(size_t overhead_bytes_per_packet)); + MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override)); + MOCK_METHOD(void, + OnTransportFeedback, + (const rtcp::TransportFeedback&), + (override)); }; class StreamDataTestCallback : public StreamDataCountersCallback { @@ -218,31 +211,107 @@ class StreamDataTestCallback : public StreamDataCountersCallback { } }; +class TaskQueuePacketSender : public RtpPacketSender { + public: + TaskQueuePacketSender(TimeController* time_controller, + std::unique_ptr packet_sender) + : time_controller_(time_controller), + packet_sender_(std::move(packet_sender)), + queue_(time_controller_->CreateTaskQueueFactory()->CreateTaskQueue( + "PacerQueue", + TaskQueueFactory::Priority::NORMAL)) {} + + void EnqueuePackets( + std::vector> packets) override { + queue_->PostTask(ToQueuedTask([sender = packet_sender_.get(), + packets_ = std::move(packets)]() mutable { + sender->EnqueuePackets(std::move(packets_)); + })); + // Trigger task we just enqueued to be executed by updating the simulated + // time controller. + time_controller_->AdvanceTime(TimeDelta::Zero()); + } + + TaskQueueBase* task_queue() const { return queue_.get(); } + + TimeController* const time_controller_; + std::unique_ptr packet_sender_; + std::unique_ptr queue_; +}; + // Mimics ModuleRtpRtcp::RtpSenderContext. // TODO(sprang): Split up unit tests and test these components individually // wherever possible. -struct RtpSenderContext { - explicit RtpSenderContext(const RtpRtcp::Configuration& config) - : packet_history_(config.clock), +struct RtpSenderContext : public SequenceNumberAssigner { + RtpSenderContext(const RtpRtcpInterface::Configuration& config, + TimeController* time_controller) + : time_controller_(time_controller), + packet_history_(config.clock, config.enable_rtx_padding_prioritization), packet_sender_(config, &packet_history_), - non_paced_sender_(&packet_sender_), - packet_generator_( - config, - &packet_history_, - config.paced_sender ? config.paced_sender : &non_paced_sender_) {} + pacer_(time_controller, + std::make_unique( + &packet_sender_, + this)), + packet_generator_(config, + &packet_history_, + config.paced_sender ? config.paced_sender : &pacer_) { + } + void AssignSequenceNumber(RtpPacketToSend* packet) override { + packet_generator_.AssignSequenceNumber(packet); + } + // Inject packet straight into RtpSenderEgress without passing through the + // pacer, but while still running on the pacer task queue. + void InjectPacket(std::unique_ptr packet, + const PacedPacketInfo& packet_info) { + pacer_.task_queue()->PostTask( + ToQueuedTask([sender_ = &packet_sender_, packet_ = std::move(packet), + packet_info]() mutable { + sender_->SendPacket(packet_.get(), packet_info); + })); + time_controller_->AdvanceTime(TimeDelta::Zero()); + } + TimeController* time_controller_; RtpPacketHistory packet_history_; RtpSenderEgress packet_sender_; - RtpSenderEgress::NonPacedPacketSender non_paced_sender_; + TaskQueuePacketSender pacer_; RTPSender packet_generator_; }; +class FieldTrialConfig : public WebRtcKeyValueConfig { + public: + FieldTrialConfig() + : overhead_enabled_(false), + max_padding_factor_(1200) {} + ~FieldTrialConfig() override {} + + void SetOverHeadEnabled(bool enabled) { overhead_enabled_ = enabled; } + void SetMaxPaddingFactor(double factor) { max_padding_factor_ = factor; } + + std::string Lookup(absl::string_view key) const override { + if (key == "WebRTC-LimitPaddingSize") { + char string_buf[32]; + rtc::SimpleStringBuilder ssb(string_buf); + ssb << "factor:" << max_padding_factor_; + return ssb.str(); + } else if (key == "WebRTC-SendSideBwe-WithOverhead") { + return overhead_enabled_ ? "Enabled" : "Disabled"; + } + return ""; + } + + private: + bool overhead_enabled_; + double max_padding_factor_; +}; + } // namespace class RtpSenderTest : public ::testing::TestWithParam { protected: RtpSenderTest() - : fake_clock_(kStartTime), - retransmission_rate_limiter_(&fake_clock_, 1000), + : time_controller_(Timestamp::Millis(kStartTime)), + clock_(time_controller_.GetClock()), + retransmission_rate_limiter_(clock_, 1000), flexfec_sender_(0, kFlexFecSsrc, kSsrc, @@ -250,11 +319,12 @@ class RtpSenderTest : public ::testing::TestWithParam { std::vector(), std::vector(), nullptr, - &fake_clock_), - kMarkerBit(true), - field_trials_(ToFieldTrialString(GetParam())) {} + clock_), + kMarkerBit(true) { + field_trials_.SetOverHeadEnabled(GetParam().with_overhead); + } - void SetUp() override { SetUpRtpSender(true, false); } + void SetUp() override { SetUpRtpSender(true, false, false); } RTPSender* rtp_sender() { RTC_DCHECK(rtp_sender_context_); @@ -266,25 +336,40 @@ class RtpSenderTest : public ::testing::TestWithParam { return &rtp_sender_context_->packet_sender_; } - void SetUpRtpSender(bool pacer, bool populate_network2) { - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + void SetUpRtpSender(bool pacer, + bool populate_network2, + bool always_send_mid_and_rid) { + SetUpRtpSender(pacer, populate_network2, always_send_mid_and_rid, + &flexfec_sender_); + } + + void SetUpRtpSender(bool pacer, + bool populate_network2, + bool always_send_mid_and_rid, + VideoFecGenerator* fec_generator) { + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.rtx_send_ssrc = kRtxSsrc; - config.flexfec_sender = &flexfec_sender_; + config.fec_generator = fec_generator; config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; config.paced_sender = pacer ? &mock_paced_sender_ : nullptr; config.populate_network2_timestamp = populate_network2; config.rtp_stats_callback = &rtp_stats_callback_; - rtp_sender_context_ = std::make_unique(config); + config.always_send_mid_and_rid = always_send_mid_and_rid; + config.field_trials = &field_trials_; + + rtp_sender_context_ = + std::make_unique(config, &time_controller_); rtp_sender()->SetSequenceNumber(kSeqNum); rtp_sender()->SetTimestampOffset(0); } - SimulatedClock fake_clock_; + GlobalSimulatedTimeController time_controller_; + Clock* const clock_; NiceMock mock_rtc_event_log_; MockRtpPacketPacer mock_paced_sender_; StrictMock send_packet_observer_; @@ -296,7 +381,7 @@ class RtpSenderTest : public ::testing::TestWithParam { LoopbackTransportTest transport_; const bool kMarkerBit; - test::ScopedFieldTrials field_trials_; + FieldTrialConfig field_trials_; StreamDataTestCallback rtp_stats_callback_; std::unique_ptr BuildRtpPacket(int payload_type, @@ -305,7 +390,7 @@ class RtpSenderTest : public ::testing::TestWithParam { int64_t capture_time_ms) { auto packet = rtp_sender()->AllocatePacket(); packet->SetPayloadType(payload_type); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->SetMarker(marker_bit); packet->SetTimestamp(timestamp); packet->set_capture_time_ms(capture_time_ms); @@ -328,7 +413,7 @@ class RtpSenderTest : public ::testing::TestWithParam { } std::unique_ptr SendGenericPacket() { - const int64_t kCaptureTimeMs = fake_clock_.TimeInMilliseconds(); + const int64_t kCaptureTimeMs = clock_->TimeInMilliseconds(); return SendPacket(kCaptureTimeMs, sizeof(kPayloadData)); } @@ -337,7 +422,7 @@ class RtpSenderTest : public ::testing::TestWithParam { for (auto& packet : rtp_sender()->GeneratePadding(target_size_bytes, true)) { generated_bytes += packet->payload_size() + packet->padding_size(); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); } return generated_bytes; } @@ -378,7 +463,7 @@ class RtpSenderTest : public ::testing::TestWithParam { // default code path. class RtpSenderTestWithoutPacer : public RtpSenderTest { public: - void SetUp() override { SetUpRtpSender(false, false); } + void SetUp() override { SetUpRtpSender(false, false, false); } }; TEST_P(RtpSenderTestWithoutPacer, AllocatePacketSetCsrc) { @@ -460,15 +545,16 @@ TEST_P(RtpSenderTestWithoutPacer, AssignSequenceNumberMayAllowPaddingOnVideo) { TEST_P(RtpSenderTest, AssignSequenceNumberAllowsPaddingOnAudio) { MockTransport transport; - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.audio = true; - config.clock = &fake_clock_; + config.clock = clock_; config.outgoing_transport = &transport; config.paced_sender = &mock_paced_sender_; config.local_media_ssrc = kSsrc; config.event_log = &mock_rtc_event_log_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); rtp_sender()->SetTimestampOffset(0); @@ -508,18 +594,18 @@ TEST_P(RtpSenderTestWithoutPacer, AssignSequenceNumberSetPaddingTimestamps) { TEST_P(RtpSenderTestWithoutPacer, TransportFeedbackObserverGetsCorrectByteCount) { - constexpr int kRtpOverheadBytesPerPacket = 12 + 8; - NiceMock mock_overhead_observer; + constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8; - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.transport_feedback_callback = &feedback_observer_; config.event_log = &mock_rtc_event_log_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - config.overhead_observer = &mock_overhead_observer; - rtp_sender_context_ = std::make_unique(config); + config.field_trials = &field_trials_; + rtp_sender_context_ = + std::make_unique(config, &time_controller_); EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionTransportSequenceNumber, @@ -540,22 +626,22 @@ TEST_P(RtpSenderTestWithoutPacer, Field(&RtpPacketSendInfo::length, expected_bytes), Field(&RtpPacketSendInfo::pacing_info, PacedPacketInfo())))) .Times(1); - EXPECT_CALL(mock_overhead_observer, - OnOverheadChanged(kRtpOverheadBytesPerPacket)) - .Times(1); + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), + kRtpOverheadBytesPerPacket); SendGenericPacket(); } TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) { - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.transport_feedback_callback = &feedback_observer_; config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionTransportSequenceNumber, @@ -586,15 +672,16 @@ TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) { } TEST_P(RtpSenderTestWithoutPacer, PacketOptionsNoRetransmission) { - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.transport_feedback_callback = &feedback_observer_; config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); SendGenericPacket(); @@ -603,7 +690,7 @@ TEST_P(RtpSenderTestWithoutPacer, PacketOptionsNoRetransmission) { TEST_P(RtpSenderTestWithoutPacer, SetsIncludedInFeedbackWhenTransportSequenceNumberExtensionIsRegistered) { - SetUpRtpSender(false, false); + SetUpRtpSender(false, false, false); rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionTransportSequenceNumber, kTransportSequenceNumberExtensionId); EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1); @@ -614,7 +701,7 @@ TEST_P(RtpSenderTestWithoutPacer, TEST_P( RtpSenderTestWithoutPacer, SetsIncludedInAllocationWhenTransportSequenceNumberExtensionIsRegistered) { - SetUpRtpSender(false, false); + SetUpRtpSender(false, false, false); rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionTransportSequenceNumber, kTransportSequenceNumberExtensionId); EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1); @@ -624,7 +711,7 @@ TEST_P( TEST_P(RtpSenderTestWithoutPacer, SetsIncludedInAllocationWhenForcedAsPartOfAllocation) { - SetUpRtpSender(false, false); + SetUpRtpSender(false, false, false); rtp_egress()->ForceIncludeSendPacketsInAllocation(true); SendGenericPacket(); EXPECT_FALSE(transport_.last_options_.included_in_feedback); @@ -632,7 +719,7 @@ TEST_P(RtpSenderTestWithoutPacer, } TEST_P(RtpSenderTestWithoutPacer, DoesnSetIncludedInAllocationByDefault) { - SetUpRtpSender(false, false); + SetUpRtpSender(false, false, false); SendGenericPacket(); EXPECT_FALSE(transport_.last_options_.included_in_feedback); EXPECT_FALSE(transport_.last_options_.included_in_allocation); @@ -641,20 +728,19 @@ TEST_P(RtpSenderTestWithoutPacer, DoesnSetIncludedInAllocationByDefault) { TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) { StrictMock send_side_delay_observer_; - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.send_side_delay_observer = &send_side_delay_observer_; config.event_log = &mock_rtc_event_log_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.playout_delay_oracle = &playout_delay_oracle; video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); @@ -670,12 +756,12 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) { EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(10, 10, 10, kSsrc)) .Times(1); - int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); - fake_clock_.AdvanceTimeMilliseconds(10); + int64_t capture_time_ms = clock_->TimeInMilliseconds(); + time_controller_.AdvanceTime(TimeDelta::Millis(10)); video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp, - capture_time_ms, kPayloadData, nullptr, video_header, + capture_time_ms, kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); // Send another packet with 20 ms delay. The average, max and total should be @@ -683,11 +769,11 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) { EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(15, 20, 30, kSsrc)) .Times(1); - fake_clock_.AdvanceTimeMilliseconds(10); + time_controller_.AdvanceTime(TimeDelta::Millis(10)); video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp, - capture_time_ms, kPayloadData, nullptr, video_header, + capture_time_ms, kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); // Send another packet at the same time, which replaces the last packet. @@ -696,25 +782,25 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) { // TODO(terelius): Is is not clear that this is the right behavior. EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(5, 10, 30, kSsrc)) .Times(1); - capture_time_ms = fake_clock_.TimeInMilliseconds(); + capture_time_ms = clock_->TimeInMilliseconds(); video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp, - capture_time_ms, kPayloadData, nullptr, video_header, + capture_time_ms, kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); // Send a packet 1 second later. The earlier packets should have timed // out, so both max and average should be the delay of this packet. The total // keeps increasing. - fake_clock_.AdvanceTimeMilliseconds(1000); - capture_time_ms = fake_clock_.TimeInMilliseconds(); - fake_clock_.AdvanceTimeMilliseconds(1); + time_controller_.AdvanceTime(TimeDelta::Millis(1000)); + capture_time_ms = clock_->TimeInMilliseconds(); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); EXPECT_CALL(send_side_delay_observer_, SendSideDelayUpdated(1, 1, 31, kSsrc)) .Times(1); video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, capture_time_ms * kCaptureTimeMsToRtpTimestamp, - capture_time_ms, kPayloadData, nullptr, video_header, + capture_time_ms, kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); } @@ -730,8 +816,8 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendPacketUpdated) { } TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) { - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.paced_sender = &mock_paced_sender_; config.local_media_ssrc = kSsrc; @@ -739,7 +825,8 @@ TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) { config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); rtp_sender()->SetSequenceNumber(kSeqNum); rtp_sender_context_->packet_history_.SetStorePacketsStatus( @@ -767,10 +854,10 @@ TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) { Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); auto packet = SendGenericPacket(); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); // Transport sequence number is set by PacketRouter, before SendPacket(). packet->SetExtension(kTransportSequenceNumber); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); uint16_t transport_seq_no; EXPECT_TRUE( @@ -785,7 +872,7 @@ TEST_P(RtpSenderTest, WritesPacerExitToTimingExtension) { RtpPacketHistory::StorageMode::kStoreAndCull, 10); EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionVideoTiming, kVideoTimingExtensionId)); - int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + int64_t capture_time_ms = clock_->TimeInMilliseconds(); auto packet = rtp_sender()->AllocatePacket(); packet->SetPayloadType(kPayload); packet->SetMarker(true); @@ -797,14 +884,14 @@ TEST_P(RtpSenderTest, WritesPacerExitToTimingExtension) { size_t packet_size = packet->size(); const int kStoredTimeInMs = 100; - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->set_allow_retransmission(true); EXPECT_CALL(mock_paced_sender_, EnqueuePackets(Contains(Pointee(Property( &RtpPacketToSend::Ssrc, kSsrc))))); EXPECT_TRUE( rtp_sender()->SendToNetwork(std::make_unique(*packet))); - fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs)); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(1, transport_.packets_sent()); EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); @@ -815,12 +902,12 @@ TEST_P(RtpSenderTest, WritesPacerExitToTimingExtension) { } TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithPacer) { - SetUpRtpSender(/*pacer=*/true, /*populate_network2=*/true); + SetUpRtpSender(/*pacer=*/true, /*populate_network2=*/true, false); rtp_sender_context_->packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionVideoTiming, kVideoTimingExtensionId)); - int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + int64_t capture_time_ms = clock_->TimeInMilliseconds(); auto packet = rtp_sender()->AllocatePacket(); packet->SetPayloadType(kPayload); packet->SetMarker(true); @@ -834,41 +921,40 @@ TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithPacer) { const int kStoredTimeInMs = 100; - packet->set_packet_type(RtpPacketToSend::Type::kVideo); - packet->set_allow_retransmission(true); - EXPECT_CALL(mock_paced_sender_, EnqueuePackets(Contains(Pointee(Property( - &RtpPacketToSend::Ssrc, kSsrc))))); - EXPECT_TRUE(rtp_sender()->SendToNetwork( - std::make_unique(*packet))); - fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kVideo); + packet->set_allow_retransmission(true); + EXPECT_CALL(mock_paced_sender_, EnqueuePackets(Contains(Pointee(Property( + &RtpPacketToSend::Ssrc, kSsrc))))); + EXPECT_TRUE( + rtp_sender()->SendToNetwork(std::make_unique(*packet))); + time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs)); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); - EXPECT_EQ(1, transport_.packets_sent()); - EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); + EXPECT_EQ(1, transport_.packets_sent()); + EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); - VideoSendTiming video_timing; - EXPECT_TRUE( - transport_.last_sent_packet().GetExtension( - &video_timing)); - EXPECT_EQ(kStoredTimeInMs, video_timing.network2_timestamp_delta_ms); - EXPECT_EQ(kPacerExitMs, video_timing.pacer_exit_delta_ms); + VideoSendTiming video_timing; + EXPECT_TRUE(transport_.last_sent_packet().GetExtension( + &video_timing)); + EXPECT_EQ(kStoredTimeInMs, video_timing.network2_timestamp_delta_ms); + EXPECT_EQ(kPacerExitMs, video_timing.pacer_exit_delta_ms); } TEST_P(RtpSenderTest, WritesNetwork2ToTimingExtensionWithoutPacer) { - SetUpRtpSender(/*pacer=*/false, /*populate_network2=*/true); + SetUpRtpSender(/*pacer=*/false, /*populate_network2=*/true, false); EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionVideoTiming, kVideoTimingExtensionId)); auto packet = rtp_sender()->AllocatePacket(); packet->SetMarker(true); - packet->set_capture_time_ms(fake_clock_.TimeInMilliseconds()); + packet->set_capture_time_ms(clock_->TimeInMilliseconds()); const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true}; packet->SetExtension(kVideoTiming); packet->set_allow_retransmission(true); EXPECT_TRUE(rtp_sender()->AssignSequenceNumber(packet.get())); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); const int kPropagateTimeMs = 10; - fake_clock_.AdvanceTimeMilliseconds(kPropagateTimeMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kPropagateTimeMs)); EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet))); @@ -891,7 +977,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) { EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId)); - int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + int64_t capture_time_ms = clock_->TimeInMilliseconds(); auto packet = BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, capture_time_ms); size_t packet_size = packet->size(); @@ -902,13 +988,13 @@ TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) { EnqueuePackets(Contains(AllOf( Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->set_allow_retransmission(true); EXPECT_TRUE( rtp_sender()->SendToNetwork(std::make_unique(*packet))); EXPECT_EQ(0, transport_.packets_sent()); - fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs)); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); // Process send bucket. Packet should now be sent. EXPECT_EQ(1, transport_.packets_sent()); @@ -920,7 +1006,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingWithExtensions) { // Verify transmission time offset. EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset); uint64_t expected_send_time = - ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); + ConvertMsToAbsSendTime(clock_->TimeInMilliseconds()); EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); } @@ -936,7 +1022,7 @@ TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) { EXPECT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId)); - int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + int64_t capture_time_ms = clock_->TimeInMilliseconds(); auto packet = BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, capture_time_ms); size_t packet_size = packet->size(); @@ -947,46 +1033,45 @@ TEST_P(RtpSenderTest, TrafficSmoothingRetransmits) { EnqueuePackets(Contains(AllOf( Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->set_allow_retransmission(true); EXPECT_TRUE( rtp_sender()->SendToNetwork(std::make_unique(*packet))); // Immediately process send bucket and send packet. - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::make_unique(*packet), + PacedPacketInfo()); EXPECT_EQ(1, transport_.packets_sent()); // Retransmit packet. const int kStoredTimeInMs = 100; - fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs)); EXPECT_CALL(mock_rtc_event_log_, LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing))); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); - packet->set_retransmitted_sequence_number(kSeqNum); - EXPECT_CALL( - mock_paced_sender_, - EnqueuePackets(Contains(AllOf( - Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), - Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); - EXPECT_EQ(static_cast(packet_size), - rtp_sender()->ReSendPacket(kSeqNum)); - EXPECT_EQ(1, transport_.packets_sent()); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); - - // Process send bucket. Packet should now be sent. - EXPECT_EQ(2, transport_.packets_sent()); - EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); + packet->set_retransmitted_sequence_number(kSeqNum); + EXPECT_CALL( + mock_paced_sender_, + EnqueuePackets(Contains(AllOf( + Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), + Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); + EXPECT_EQ(static_cast(packet_size), rtp_sender()->ReSendPacket(kSeqNum)); + EXPECT_EQ(1, transport_.packets_sent()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); - webrtc::RTPHeader rtp_header; - transport_.last_sent_packet().GetHeader(&rtp_header); + // Process send bucket. Packet should now be sent. + EXPECT_EQ(2, transport_.packets_sent()); + EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); - // Verify transmission time offset. - EXPECT_EQ(kStoredTimeInMs * 90, - rtp_header.extension.transmissionTimeOffset); - uint64_t expected_send_time = - ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); - EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); + webrtc::RTPHeader rtp_header; + transport_.last_sent_packet().GetHeader(&rtp_header); + + // Verify transmission time offset. + EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset); + uint64_t expected_send_time = + ConvertMsToAbsSendTime(clock_->TimeInMilliseconds()); + EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); } // This test sends 1 regular video packet, then 4 padding packets, and then @@ -1014,7 +1099,7 @@ TEST_P(RtpSenderTest, SendPadding) { webrtc::RTPHeader rtp_header; - int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + int64_t capture_time_ms = clock_->TimeInMilliseconds(); auto packet = BuildRtpPacket(kPayload, kMarkerBit, timestamp, capture_time_ms); const uint32_t media_packet_timestamp = timestamp; @@ -1028,13 +1113,13 @@ TEST_P(RtpSenderTest, SendPadding) { EnqueuePackets(Contains(AllOf( Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->set_allow_retransmission(true); EXPECT_TRUE( rtp_sender()->SendToNetwork(std::make_unique(*packet))); EXPECT_EQ(total_packets_sent, transport_.packets_sent()); - fake_clock_.AdvanceTimeMilliseconds(kStoredTimeInMs); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs)); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); ++seq_num; // Packet should now be sent. This test doesn't verify the regular video @@ -1066,41 +1151,41 @@ TEST_P(RtpSenderTest, SendPadding) { int offset = timestamp - media_packet_timestamp; EXPECT_EQ(offset, rtp_header.extension.transmissionTimeOffset); uint64_t expected_send_time = - ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); + ConvertMsToAbsSendTime(clock_->TimeInMilliseconds()); EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); - fake_clock_.AdvanceTimeMilliseconds(kPaddingPeriodMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kPaddingPeriodMs)); timestamp += 90 * kPaddingPeriodMs; } // Send a regular video packet again. - capture_time_ms = fake_clock_.TimeInMilliseconds(); + capture_time_ms = clock_->TimeInMilliseconds(); packet = BuildRtpPacket(kPayload, kMarkerBit, timestamp, capture_time_ms); packet_size = packet->size(); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); - packet->set_allow_retransmission(true); - EXPECT_CALL( - mock_paced_sender_, - EnqueuePackets(Contains(AllOf( - Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), - Pointee(Property(&RtpPacketToSend::SequenceNumber, seq_num)))))); - EXPECT_TRUE(rtp_sender()->SendToNetwork( - std::make_unique(*packet))); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kVideo); + packet->set_allow_retransmission(true); + EXPECT_CALL( + mock_paced_sender_, + EnqueuePackets(Contains(AllOf( + Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), + Pointee(Property(&RtpPacketToSend::SequenceNumber, seq_num)))))); + EXPECT_TRUE( + rtp_sender()->SendToNetwork(std::make_unique(*packet))); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); - // Process send bucket. - EXPECT_EQ(++total_packets_sent, transport_.packets_sent()); - EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); - transport_.last_sent_packet().GetHeader(&rtp_header); + // Process send bucket. + EXPECT_EQ(++total_packets_sent, transport_.packets_sent()); + EXPECT_EQ(packet_size, transport_.last_sent_packet().size()); + transport_.last_sent_packet().GetHeader(&rtp_header); - // Verify sequence number and timestamp. - EXPECT_EQ(seq_num, rtp_header.sequenceNumber); - EXPECT_EQ(timestamp, rtp_header.timestamp); - // Verify transmission time offset. This packet is sent without delay. - EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset); - uint64_t expected_send_time = - ConvertMsToAbsSendTime(fake_clock_.TimeInMilliseconds()); - EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); + // Verify sequence number and timestamp. + EXPECT_EQ(seq_num, rtp_header.sequenceNumber); + EXPECT_EQ(timestamp, rtp_header.timestamp); + // Verify transmission time offset. This packet is sent without delay. + EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset); + uint64_t expected_send_time = + ConvertMsToAbsSendTime(clock_->TimeInMilliseconds()); + EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime); } TEST_P(RtpSenderTest, OnSendPacketUpdated) { @@ -1120,9 +1205,9 @@ TEST_P(RtpSenderTest, OnSendPacketUpdated) { Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); auto packet = SendGenericPacket(); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->SetExtension(kTransportSequenceNumber); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(1, transport_.packets_sent()); } @@ -1142,9 +1227,9 @@ TEST_P(RtpSenderTest, OnSendPacketNotUpdatedForRetransmits) { Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc)), Pointee(Property(&RtpPacketToSend::SequenceNumber, kSeqNum)))))); auto packet = SendGenericPacket(); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); packet->SetExtension(kTransportSequenceNumber); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(1, transport_.packets_sent()); EXPECT_TRUE(transport_.last_options_.is_retransmit); @@ -1153,12 +1238,10 @@ TEST_P(RtpSenderTest, OnSendPacketNotUpdatedForRetransmits) { TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) { const uint8_t kPayloadType = 127; const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.playout_delay_oracle = &playout_delay_oracle; video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); uint8_t payload[] = {47, 11, 32, 93, 89}; @@ -1167,7 +1250,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) { RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321, - payload, nullptr, video_header, + payload, video_header, kDefaultExpectedRetransmissionTimeMs)); auto sent_payload = transport_.last_sent_packet().payload(); @@ -1183,7 +1266,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendGenericVideo) { video_header.frame_type = VideoFrameType::kVideoFrameDelta; ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321, - payload, nullptr, video_header, + payload, video_header, kDefaultExpectedRetransmissionTimeMs)); sent_payload = transport_.last_sent_packet().payload(); @@ -1197,12 +1280,10 @@ TEST_P(RtpSenderTestWithoutPacer, SendRawVideo) { const uint8_t kPayloadType = 111; const uint8_t payload[] = {11, 22, 33, 44, 55}; - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.playout_delay_oracle = &playout_delay_oracle; video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); @@ -1210,7 +1291,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendRawVideo) { RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, absl::nullopt, 1234, - 4321, payload, nullptr, video_header, + 4321, payload, video_header, kDefaultExpectedRetransmissionTimeMs)); auto sent_payload = transport_.last_sent_packet().payload(); @@ -1226,31 +1307,34 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) { const std::vector kNoRtpExtensionSizes; FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid, kNoRtpExtensions, kNoRtpExtensionSizes, - nullptr /* rtp_state */, &fake_clock_); + nullptr /* rtp_state */, clock_); // Reset |rtp_sender_| to use FlexFEC. - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.paced_sender = &mock_paced_sender_; config.local_media_ssrc = kSsrc; - config.flexfec_sender = &flexfec_sender_; + config.fec_generator = &flexfec_sender_; config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + config.field_trials = &field_trials_; + rtp_sender_context_ = + std::make_unique(config, &time_controller_); rtp_sender()->SetSequenceNumber(kSeqNum); rtp_sender_context_->packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.flexfec_sender = &flexfec_sender; - video_config.playout_delay_oracle = &playout_delay_oracle; + video_config.fec_type = flexfec_sender.GetFecType(); + video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead(); + video_config.fec_type = flexfec_sender.GetFecType(); + video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead(); video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); @@ -1259,51 +1343,59 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) { params.fec_rate = 15; params.max_fec_frames = 1; params.fec_mask_type = kFecMaskRandom; - rtp_sender_video.SetFecParameters(params, params); + flexfec_sender.SetProtectionParameters(params, params); uint16_t flexfec_seq_num; RTPVideoHeader video_header; - std::unique_ptr media_packet; - std::unique_ptr fec_packet; - - EXPECT_CALL(mock_paced_sender_, EnqueuePackets) - .WillOnce([&](std::vector> packets) { - for (auto& packet : packets) { - if (packet->packet_type() == RtpPacketToSend::Type::kVideo) { - EXPECT_EQ(packet->Ssrc(), kSsrc); - EXPECT_EQ(packet->SequenceNumber(), kSeqNum); - media_packet = std::move(packet); - } else { - EXPECT_EQ(packet->packet_type(), - RtpPacketToSend::Type::kForwardErrorCorrection); - EXPECT_EQ(packet->Ssrc(), kFlexFecSsrc); - fec_packet = std::move(packet); - } + std::unique_ptr media_packet; + std::unique_ptr fec_packet; + + EXPECT_CALL(mock_paced_sender_, EnqueuePackets) + .WillOnce([&](std::vector> packets) { + for (auto& packet : packets) { + if (packet->packet_type() == RtpPacketMediaType::kVideo) { + EXPECT_EQ(packet->Ssrc(), kSsrc); + EXPECT_EQ(packet->SequenceNumber(), kSeqNum); + media_packet = std::move(packet); + + // Simulate RtpSenderEgress adding packet to fec generator. + flexfec_sender.AddPacketAndGenerateFec(*media_packet); + auto fec_packets = flexfec_sender.GetFecPackets(); + EXPECT_EQ(fec_packets.size(), 1u); + fec_packet = std::move(fec_packets[0]); + EXPECT_EQ(fec_packet->packet_type(), + RtpPacketMediaType::kForwardErrorCorrection); + EXPECT_EQ(fec_packet->Ssrc(), kFlexFecSsrc); + } else { + EXPECT_EQ(packet->packet_type(), + RtpPacketMediaType::kForwardErrorCorrection); + fec_packet = std::move(packet); + EXPECT_EQ(fec_packet->Ssrc(), kFlexFecSsrc); } - }); + } + }); - video_header.frame_type = VideoFrameType::kVideoFrameKey; - EXPECT_TRUE(rtp_sender_video.SendVideo( - kMediaPayloadType, kCodecType, kTimestamp, - fake_clock_.TimeInMilliseconds(), kPayloadData, nullptr, video_header, - kDefaultExpectedRetransmissionTimeMs)); - ASSERT_TRUE(media_packet != nullptr); - ASSERT_TRUE(fec_packet != nullptr); + video_header.frame_type = VideoFrameType::kVideoFrameKey; + EXPECT_TRUE(rtp_sender_video.SendVideo( + kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(), + kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); + ASSERT_TRUE(media_packet != nullptr); + ASSERT_TRUE(fec_packet != nullptr); - flexfec_seq_num = fec_packet->SequenceNumber(); - rtp_egress()->SendPacket(media_packet.get(), PacedPacketInfo()); - rtp_egress()->SendPacket(fec_packet.get(), PacedPacketInfo()); + flexfec_seq_num = fec_packet->SequenceNumber(); + rtp_sender_context_->InjectPacket(std::move(media_packet), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(fec_packet), PacedPacketInfo()); - ASSERT_EQ(2, transport_.packets_sent()); - const RtpPacketReceived& sent_media_packet = transport_.sent_packets_[0]; - EXPECT_EQ(kMediaPayloadType, sent_media_packet.PayloadType()); - EXPECT_EQ(kSeqNum, sent_media_packet.SequenceNumber()); - EXPECT_EQ(kSsrc, sent_media_packet.Ssrc()); - const RtpPacketReceived& sent_flexfec_packet = transport_.sent_packets_[1]; - EXPECT_EQ(kFlexfecPayloadType, sent_flexfec_packet.PayloadType()); - EXPECT_EQ(flexfec_seq_num, sent_flexfec_packet.SequenceNumber()); - EXPECT_EQ(kFlexFecSsrc, sent_flexfec_packet.Ssrc()); + ASSERT_EQ(2, transport_.packets_sent()); + const RtpPacketReceived& sent_media_packet = transport_.sent_packets_[0]; + EXPECT_EQ(kMediaPayloadType, sent_media_packet.PayloadType()); + EXPECT_EQ(kSeqNum, sent_media_packet.SequenceNumber()); + EXPECT_EQ(kSsrc, sent_media_packet.Ssrc()); + const RtpPacketReceived& sent_flexfec_packet = transport_.sent_packets_[1]; + EXPECT_EQ(kFlexfecPayloadType, sent_flexfec_packet.PayloadType()); + EXPECT_EQ(flexfec_seq_num, sent_flexfec_packet.SequenceNumber()); + EXPECT_EQ(kFlexFecSsrc, sent_flexfec_packet.Ssrc()); } TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) { @@ -1315,28 +1407,29 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) { const std::vector kNoRtpExtensionSizes; FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid, kNoRtpExtensions, kNoRtpExtensionSizes, - nullptr /* rtp_state */, &fake_clock_); + nullptr /* rtp_state */, clock_); // Reset |rtp_sender_| to use FlexFEC. - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; - config.flexfec_sender = &flexfec_sender; + config.fec_generator = &flexfec_sender; config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + config.field_trials = &field_trials_; + rtp_sender_context_ = + std::make_unique(config, &time_controller_); rtp_sender()->SetSequenceNumber(kSeqNum); - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.flexfec_sender = &flexfec_sender; - video_config.playout_delay_oracle = &playout_delay_oracle; + video_config.fec_type = flexfec_sender.GetFecType(); + video_config.fec_overhead_bytes = flexfec_sender_.MaxPacketOverhead(); video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); @@ -1345,7 +1438,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) { params.fec_rate = 15; params.max_fec_frames = 1; params.fec_mask_type = kFecMaskRandom; - rtp_sender_video.SetFecParameters(params, params); + rtp_egress()->SetFecProtectionParameters(params, params); EXPECT_CALL(mock_rtc_event_log_, LogProxy(SameRtcEventTypeAs(RtcEvent::Type::RtpPacketOutgoing))) @@ -1353,9 +1446,8 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) { RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video.SendVideo( - kMediaPayloadType, kCodecType, kTimestamp, - fake_clock_.TimeInMilliseconds(), kPayloadData, nullptr, video_header, - kDefaultExpectedRetransmissionTimeMs)); + kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(), + kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); ASSERT_EQ(2, transport_.packets_sent()); const RtpPacketReceived& media_packet = transport_.sent_packets_[0]; @@ -1453,6 +1545,27 @@ TEST_P(RtpSenderTestWithoutPacer, MidAndRidNotIncludedOnSentPacketsAfterAck) { EXPECT_FALSE(second_packet.HasExtension()); } +TEST_P(RtpSenderTestWithoutPacer, + MidAndRidAlwaysIncludedOnSentPacketsWhenConfigured) { + SetUpRtpSender(false, false, /*always_send_mid_and_rid=*/true); + const char kMid[] = "mid"; + const char kRid[] = "f"; + EnableMidSending(kMid); + EnableRidSending(kRid); + + // Send two media packets: one before and one after the ack. + auto first_packet = SendGenericPacket(); + rtp_sender()->OnReceivedAckOnSsrc(first_packet->SequenceNumber()); + SendGenericPacket(); + + // Due to the configuration, both sent packets should contain MID and RID. + ASSERT_EQ(2u, transport_.sent_packets_.size()); + for (const RtpPacketReceived& packet : transport_.sent_packets_) { + EXPECT_EQ(packet.GetExtension(), kMid); + EXPECT_EQ(packet.GetExtension(), kRid); + } +} + // Test that the first RTX packet includes both MID and RRID even if the packet // being retransmitted did not have MID or RID. The MID and RID are needed on // the first packets for a given SSRC, and RTX packets are sent on a separate @@ -1530,6 +1643,45 @@ TEST_P(RtpSenderTestWithoutPacer, MidAndRidNotIncludedOnRtxPacketsAfterAck) { EXPECT_FALSE(third_rtx_packet.HasExtension()); } +TEST_P(RtpSenderTestWithoutPacer, + MidAndRidAlwaysIncludedOnRtxPacketsWhenConfigured) { + SetUpRtpSender(false, false, /*always_send_mid_and_rid=*/true); + const char kMid[] = "mid"; + const char kRid[] = "f"; + EnableRtx(); + EnableMidSending(kMid); + EnableRidSending(kRid); + + // Send two media packets: one before and one after the ack. + auto media_packet1 = SendGenericPacket(); + rtp_sender()->OnReceivedAckOnSsrc(media_packet1->SequenceNumber()); + auto media_packet2 = SendGenericPacket(); + + // Send three RTX packets with different combinations of orders w.r.t. the + // media and RTX acks. + ASSERT_LT(0, rtp_sender()->ReSendPacket(media_packet2->SequenceNumber())); + ASSERT_EQ(3u, transport_.sent_packets_.size()); + rtp_sender()->OnReceivedAckOnRtxSsrc( + transport_.sent_packets_[2].SequenceNumber()); + ASSERT_LT(0, rtp_sender()->ReSendPacket(media_packet1->SequenceNumber())); + ASSERT_LT(0, rtp_sender()->ReSendPacket(media_packet2->SequenceNumber())); + + // Due to the configuration, all sent packets should contain MID + // and either RID (media) or RRID (RTX). + ASSERT_EQ(5u, transport_.sent_packets_.size()); + for (const auto& packet : transport_.sent_packets_) { + EXPECT_EQ(packet.GetExtension(), kMid); + } + for (size_t i = 0; i < 2; ++i) { + const RtpPacketReceived& packet = transport_.sent_packets_[i]; + EXPECT_EQ(packet.GetExtension(), kRid); + } + for (size_t i = 2; i < transport_.sent_packets_.size(); ++i) { + const RtpPacketReceived& packet = transport_.sent_packets_[i]; + EXPECT_EQ(packet.GetExtension(), kRid); + } +} + // Test that if the RtpState indicates an ACK has been received on that SSRC // then neither the MID nor RID header extensions will be sent. TEST_P(RtpSenderTestWithoutPacer, @@ -1588,29 +1740,17 @@ TEST_P(RtpSenderTest, FecOverheadRate) { const std::vector kNoRtpExtensionSizes; FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexFecSsrc, kSsrc, kNoMid, kNoRtpExtensions, kNoRtpExtensionSizes, - nullptr /* rtp_state */, &fake_clock_); - - // Reset |rtp_sender_| to use FlexFEC. - RtpRtcp::Configuration config; - config.clock = &fake_clock_; - config.outgoing_transport = &transport_; - config.paced_sender = &mock_paced_sender_; - config.local_media_ssrc = kSsrc; - config.flexfec_sender = &flexfec_sender; - config.event_log = &mock_rtc_event_log_; - config.send_packet_observer = &send_packet_observer_; - config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + nullptr /* rtp_state */, clock_); - rtp_sender()->SetSequenceNumber(kSeqNum); + // Reset |rtp_sender_| to use this FlexFEC instance. + SetUpRtpSender(false, false, false, &flexfec_sender); - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.flexfec_sender = &flexfec_sender; - video_config.playout_delay_oracle = &playout_delay_oracle; + video_config.fec_type = flexfec_sender.GetFecType(); + video_config.fec_overhead_bytes = flexfec_sender.MaxPacketOverhead(); video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); // Parameters selected to generate a single FEC packet per media packet. @@ -1618,22 +1758,20 @@ TEST_P(RtpSenderTest, FecOverheadRate) { params.fec_rate = 15; params.max_fec_frames = 1; params.fec_mask_type = kFecMaskRandom; - rtp_sender_video.SetFecParameters(params, params); + rtp_egress()->SetFecProtectionParameters(params, params); constexpr size_t kNumMediaPackets = 10; constexpr size_t kNumFecPackets = kNumMediaPackets; constexpr int64_t kTimeBetweenPacketsMs = 10; - EXPECT_CALL(mock_paced_sender_, EnqueuePackets).Times(kNumMediaPackets); for (size_t i = 0; i < kNumMediaPackets; ++i) { RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video.SendVideo( - kMediaPayloadType, kCodecType, kTimestamp, - fake_clock_.TimeInMilliseconds(), kPayloadData, nullptr, video_header, - kDefaultExpectedRetransmissionTimeMs)); + kMediaPayloadType, kCodecType, kTimestamp, clock_->TimeInMilliseconds(), + kPayloadData, video_header, kDefaultExpectedRetransmissionTimeMs)); - fake_clock_.AdvanceTimeMilliseconds(kTimeBetweenPacketsMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kTimeBetweenPacketsMs)); } constexpr size_t kRtpHeaderLength = 12; constexpr size_t kFlexfecHeaderLength = 20; @@ -1641,9 +1779,14 @@ TEST_P(RtpSenderTest, FecOverheadRate) { constexpr size_t kPayloadLength = sizeof(kPayloadData); constexpr size_t kPacketLength = kRtpHeaderLength + kFlexfecHeaderLength + kGenericCodecHeaderLength + kPayloadLength; - EXPECT_NEAR(kNumFecPackets * kPacketLength * 8 / - (kNumFecPackets * kTimeBetweenPacketsMs / 1000.0f), - rtp_sender_video.FecOverheadRate(), 500); + + EXPECT_NEAR( + kNumFecPackets * kPacketLength * 8 / + (kNumFecPackets * kTimeBetweenPacketsMs / 1000.0f), + rtp_egress() + ->GetSendRates()[RtpPacketMediaType::kForwardErrorCorrection] + .bps(), + 500); } TEST_P(RtpSenderTest, BitrateCallbacks) { @@ -1672,20 +1815,19 @@ TEST_P(RtpSenderTest, BitrateCallbacks) { uint32_t retransmit_bitrate_; } callback; - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.send_bitrate_observer = &callback; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.playout_delay_oracle = &playout_delay_oracle; video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; @@ -1706,28 +1848,23 @@ TEST_P(RtpSenderTest, BitrateCallbacks) { RtpPacketHistory::StorageMode::kStoreAndCull, 1); uint32_t ssrc = rtp_sender()->SSRC(); - // Initial process call so we get a new time window. - rtp_egress()->ProcessBitrateAndNotifyObservers(); - // Send a few frames. RTPVideoHeader video_header; for (uint32_t i = 0; i < kNumPackets; ++i) { video_header.frame_type = VideoFrameType::kVideoFrameKey; ASSERT_TRUE(rtp_sender_video.SendVideo( - kPayloadType, kCodecType, 1234, 4321, payload, nullptr, video_header, + kPayloadType, kCodecType, 1234, 4321, payload, video_header, kDefaultExpectedRetransmissionTimeMs)); - fake_clock_.AdvanceTimeMilliseconds(kPacketInterval); + time_controller_.AdvanceTime(TimeDelta::Millis(kPacketInterval)); } - rtp_egress()->ProcessBitrateAndNotifyObservers(); - // We get one call for every stats updated, thus two calls since both the // stream stats and the retransmit stats are updated once. - EXPECT_EQ(2u, callback.num_calls_); + EXPECT_EQ(kNumPackets, callback.num_calls_); EXPECT_EQ(ssrc, callback.ssrc_); const uint32_t kTotalPacketSize = kPacketOverhead + sizeof(payload); // Bitrate measured over delta between last and first timestamp, plus one. - const uint32_t kExpectedWindowMs = kNumPackets * kPacketInterval + 1; + const uint32_t kExpectedWindowMs = (kNumPackets - 1) * kPacketInterval + 1; const uint32_t kExpectedBitsAccumulated = kTotalPacketSize * kNumPackets * 8; const uint32_t kExpectedRateBps = (kExpectedBitsAccumulated * 1000 + (kExpectedWindowMs / 2)) / @@ -1738,12 +1875,10 @@ TEST_P(RtpSenderTest, BitrateCallbacks) { TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) { const uint8_t kPayloadType = 127; const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; - PlayoutDelayOracle playout_delay_oracle; FieldTrialBasedConfig field_trials; RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.playout_delay_oracle = &playout_delay_oracle; video_config.field_trials = &field_trials; RTPSenderVideo rtp_sender_video(video_config); uint8_t payload[] = {47, 11, 32, 93, 89}; @@ -1755,7 +1890,7 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) { RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321, - payload, nullptr, video_header, + payload, video_header, kDefaultExpectedRetransmissionTimeMs)); StreamDataCounters expected; expected.transmitted.payload_bytes = 6; @@ -1795,15 +1930,16 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacksUlpfec) { const uint8_t kUlpfecPayloadType = 97; const uint8_t kPayloadType = 127; const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; - PlayoutDelayOracle playout_delay_oracle; - FieldTrialBasedConfig field_trials; + + UlpfecGenerator ulpfec_generator(kRedPayloadType, kUlpfecPayloadType, clock_); + SetUpRtpSender(false, false, false, &ulpfec_generator); RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock_; + video_config.clock = clock_; video_config.rtp_sender = rtp_sender(); - video_config.playout_delay_oracle = &playout_delay_oracle; - video_config.field_trials = &field_trials; + video_config.field_trials = &field_trials_; video_config.red_payload_type = kRedPayloadType; - video_config.ulpfec_payload_type = kUlpfecPayloadType; + video_config.fec_type = ulpfec_generator.GetFecType(); + video_config.fec_overhead_bytes = ulpfec_generator.MaxPacketOverhead(); RTPSenderVideo rtp_sender_video(video_config); uint8_t payload[] = {47, 11, 32, 93, 89}; rtp_sender_context_->packet_history_.SetStorePacketsStatus( @@ -1818,10 +1954,10 @@ TEST_P(RtpSenderTestWithoutPacer, StreamDataCountersCallbacksUlpfec) { fec_params.fec_mask_type = kFecMaskRandom; fec_params.fec_rate = 1; fec_params.max_fec_frames = 1; - rtp_sender_video.SetFecParameters(fec_params, fec_params); + rtp_egress()->SetFecProtectionParameters(fec_params, fec_params); video_header.frame_type = VideoFrameType::kVideoFrameDelta; ASSERT_TRUE(rtp_sender_video.SendVideo(kPayloadType, kCodecType, 1234, 4321, - payload, nullptr, video_header, + payload, video_header, kDefaultExpectedRetransmissionTimeMs)); expected.transmitted.payload_bytes = 28; expected.transmitted.header_bytes = 24; @@ -1880,12 +2016,12 @@ TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) { std::vector sequence_numbers; for (int32_t i = 0; i < kNumPackets; ++i) { sequence_numbers.push_back(kStartSequenceNumber + i); - fake_clock_.AdvanceTimeMilliseconds(1); - SendPacket(fake_clock_.TimeInMilliseconds(), kPacketSize); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); + SendPacket(clock_->TimeInMilliseconds(), kPacketSize); } EXPECT_EQ(kNumPackets, transport_.packets_sent()); - fake_clock_.AdvanceTimeMilliseconds(1000 - kNumPackets); + time_controller_.AdvanceTime(TimeDelta::Millis(1000 - kNumPackets)); // Resending should work - brings the bandwidth up to the limit. // NACK bitrate is capped to the same bitrate as the encoder, since the max @@ -1894,140 +2030,192 @@ TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) { EXPECT_EQ(kNumPackets * 2, transport_.packets_sent()); // Must be at least 5ms in between retransmission attempts. - fake_clock_.AdvanceTimeMilliseconds(5); + time_controller_.AdvanceTime(TimeDelta::Millis(5)); // Resending should not work, bandwidth exceeded. rtp_sender()->OnReceivedNack(sequence_numbers, 0); EXPECT_EQ(kNumPackets * 2, transport_.packets_sent()); } +TEST_P(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) { + RtpRtcpInterface::Configuration config; + config.clock = clock_; + config.outgoing_transport = &transport_; + config.local_media_ssrc = kSsrc; + config.retransmission_rate_limiter = &retransmission_rate_limiter_; + rtp_sender_context_ = + std::make_unique(config, &time_controller_); + + // Base RTP overhead is 12B. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); + + // Adding two csrcs adds 2*4 bytes to the header. + rtp_sender()->SetCsrcs({1, 2}); + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 20u); +} + TEST_P(RtpSenderTest, OnOverheadChanged) { - MockOverheadObserver mock_overhead_observer; - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - config.overhead_observer = &mock_overhead_observer; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); - // RTP overhead is 12B. - EXPECT_CALL(mock_overhead_observer, OnOverheadChanged(12)).Times(1); - SendGenericPacket(); + // Base RTP overhead is 12B. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId); - // TransmissionTimeOffset extension has a size of 8B. - // 12B + 8B = 20B - EXPECT_CALL(mock_overhead_observer, OnOverheadChanged(20)).Times(1); - SendGenericPacket(); + // TransmissionTimeOffset extension has a size of 3B, but with the addition + // of header index and rounding to 4 byte boundary we end up with 20B total. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 20u); } -TEST_P(RtpSenderTest, DoesNotUpdateOverheadOnEqualSize) { - MockOverheadObserver mock_overhead_observer; - RtpRtcp::Configuration config; - config.clock = &fake_clock_; +TEST_P(RtpSenderTest, CountMidOnlyUntilAcked) { + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.retransmission_rate_limiter = &retransmission_rate_limiter_; - config.overhead_observer = &mock_overhead_observer; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); - EXPECT_CALL(mock_overhead_observer, OnOverheadChanged(_)).Times(1); - SendGenericPacket(); - SendGenericPacket(); + // Base RTP overhead is 12B. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); + + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionMid, kMidExtensionId); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionRtpStreamId, + kRidExtensionId); + + // Counted only if set. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); + rtp_sender()->SetMid("foo"); + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 36u); + rtp_sender()->SetRid("bar"); + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 52u); + + // Ack received, mid/rid no longer sent. + rtp_sender()->OnReceivedAckOnSsrc(0); + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); +} + +TEST_P(RtpSenderTest, DontCountVolatileExtensionsIntoOverhead) { + RtpRtcpInterface::Configuration config; + config.clock = clock_; + config.outgoing_transport = &transport_; + config.local_media_ssrc = kSsrc; + config.retransmission_rate_limiter = &retransmission_rate_limiter_; + rtp_sender_context_ = + std::make_unique(config, &time_controller_); + + // Base RTP overhead is 12B. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); + + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionInbandComfortNoise, 1); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteCaptureTime, 2); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionVideoRotation, 3); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionPlayoutDelay, 4); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionVideoContentType, 5); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionVideoTiming, 6); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionRepairedRtpStreamId, 7); + rtp_sender()->RegisterRtpHeaderExtension(kRtpExtensionColorSpace, 8); + + // Still only 12B counted since can't count on above being sent. + EXPECT_EQ(rtp_sender()->ExpectedPerPacketOverhead(), 12u); } TEST_P(RtpSenderTest, SendPacketMatchesVideo) { std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packet_type(RtpPacketMediaType::kVideo); // Verify sent with correct SSRC. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kVideo); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 1); } TEST_P(RtpSenderTest, SendPacketMatchesAudio) { std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packet_type(RtpPacketToSend::Type::kAudio); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packet_type(RtpPacketMediaType::kAudio); // Verify sent with correct SSRC. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kAudio); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kAudio); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 1); } TEST_P(RtpSenderTest, SendPacketMatchesRetransmissions) { std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); // Verify sent with correct SSRC (non-RTX). - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 1); // RTX retransmission. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kRtxSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 2); } TEST_P(RtpSenderTest, SendPacketMatchesPadding) { std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packet_type(RtpPacketToSend::Type::kPadding); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packet_type(RtpPacketMediaType::kPadding); // Verify sent with correct SSRC (non-RTX). - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kPadding); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kPadding); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 1); // RTX padding. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kRtxSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kPadding); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kPadding); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 2); } TEST_P(RtpSenderTest, SendPacketMatchesFlexfec) { std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packet_type(RtpPacketToSend::Type::kForwardErrorCorrection); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); // Verify sent with correct SSRC. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kFlexFecSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kForwardErrorCorrection); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 1); } TEST_P(RtpSenderTest, SendPacketMatchesUlpfec) { std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packet_type(RtpPacketToSend::Type::kForwardErrorCorrection); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); // Verify sent with correct SSRC. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetSsrc(kSsrc); - packet->set_packet_type(RtpPacketToSend::Type::kForwardErrorCorrection); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.packets_sent(), 1); } @@ -2035,37 +2223,41 @@ TEST_P(RtpSenderTest, SendPacketHandlesRetransmissionHistory) { rtp_sender_context_->packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); + // Ignore calls to EnqueuePackets() for this test. + EXPECT_CALL(mock_paced_sender_, EnqueuePackets).WillRepeatedly(Return()); + // Build a media packet and send it. std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); const uint16_t media_sequence_number = packet->SequenceNumber(); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); packet->set_allow_retransmission(true); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); // Simulate retransmission request. - fake_clock_.AdvanceTimeMilliseconds(30); + time_controller_.AdvanceTime(TimeDelta::Millis(30)); EXPECT_GT(rtp_sender()->ReSendPacket(media_sequence_number), 0); // Packet already pending, retransmission not allowed. - fake_clock_.AdvanceTimeMilliseconds(30); + time_controller_.AdvanceTime(TimeDelta::Millis(30)); EXPECT_EQ(rtp_sender()->ReSendPacket(media_sequence_number), 0); // Packet exiting pacer, mark as not longer pending. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); EXPECT_NE(packet->SequenceNumber(), media_sequence_number); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); packet->SetSsrc(kRtxSsrc); packet->set_retransmitted_sequence_number(media_sequence_number); packet->set_allow_retransmission(false); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + uint16_t seq_no = packet->SequenceNumber(); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); // Retransmissions allowed again. - fake_clock_.AdvanceTimeMilliseconds(30); + time_controller_.AdvanceTime(TimeDelta::Millis(30)); EXPECT_GT(rtp_sender()->ReSendPacket(media_sequence_number), 0); // Retransmission of RTX packet should not be allowed. - EXPECT_EQ(rtp_sender()->ReSendPacket(packet->SequenceNumber()), 0); + EXPECT_EQ(rtp_sender()->ReSendPacket(seq_no), 0); } TEST_P(RtpSenderTest, SendPacketUpdatesExtensions) { @@ -2081,21 +2273,21 @@ TEST_P(RtpSenderTest, SendPacketUpdatesExtensions) { 0); std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - packet->set_packetization_finish_time_ms(fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds()); const int32_t kDiffMs = 10; - fake_clock_.AdvanceTimeMilliseconds(kDiffMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs)); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kVideo); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); const RtpPacketReceived& received_packet = transport_.last_sent_packet(); EXPECT_EQ(received_packet.GetExtension(), kDiffMs * 90); EXPECT_EQ(received_packet.GetExtension(), - AbsoluteSendTime::MsTo24Bits(fake_clock_.TimeInMilliseconds())); + AbsoluteSendTime::MsTo24Bits(clock_->TimeInMilliseconds())); VideoSendTiming timing; EXPECT_TRUE(received_packet.GetExtension(&timing)); @@ -2109,12 +2301,12 @@ TEST_P(RtpSenderTest, SendPacketSetsPacketOptions) { kTransportSequenceNumberExtensionId), 0); std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetExtension(kPacketId); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); EXPECT_CALL(send_packet_observer_, OnSendPacket); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_EQ(transport_.last_options_.packet_id, kPacketId); EXPECT_TRUE(transport_.last_options_.included_in_allocation); @@ -2122,10 +2314,10 @@ TEST_P(RtpSenderTest, SendPacketSetsPacketOptions) { EXPECT_FALSE(transport_.last_options_.is_retransmit); // Send another packet as retransmission, verify options are populated. - packet = BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + packet = BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->SetExtension(kPacketId + 1); - packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); EXPECT_TRUE(transport_.last_options_.is_retransmit); } @@ -2134,44 +2326,45 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) { StrictMock send_side_delay_observer; - RtpRtcp::Configuration config; - config.clock = &fake_clock_; + RtpRtcpInterface::Configuration config; + config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.rtx_send_ssrc = kRtxSsrc; - config.flexfec_sender = &flexfec_sender_; + config.fec_generator = &flexfec_sender_; config.send_side_delay_observer = &send_side_delay_observer; config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; - rtp_sender_context_ = std::make_unique(config); + rtp_sender_context_ = + std::make_unique(config, &time_controller_); ASSERT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( kRtpExtensionTransportSequenceNumber, kTransportSequenceNumberExtensionId)); - const int64_t capture_time_ms = fake_clock_.TimeInMilliseconds(); + const int64_t capture_time_ms = clock_->TimeInMilliseconds(); std::unique_ptr video_packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); - video_packet->set_packet_type(RtpPacketToSend::Type::kVideo); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + video_packet->set_packet_type(RtpPacketMediaType::kVideo); video_packet->SetPayloadSize(kPayloadSize); video_packet->SetExtension(1); std::unique_ptr rtx_packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); rtx_packet->SetSsrc(kRtxSsrc); - rtx_packet->set_packet_type(RtpPacketToSend::Type::kRetransmission); + rtx_packet->set_packet_type(RtpPacketMediaType::kRetransmission); rtx_packet->SetPayloadSize(kPayloadSize); rtx_packet->SetExtension(2); std::unique_ptr fec_packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); fec_packet->SetSsrc(kFlexFecSsrc); - fec_packet->set_packet_type(RtpPacketToSend::Type::kForwardErrorCorrection); + fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); fec_packet->SetPayloadSize(kPayloadSize); fec_packet->SetExtension(3); const int64_t kDiffMs = 25; - fake_clock_.AdvanceTimeMilliseconds(kDiffMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs)); EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(kDiffMs, kDiffMs, kDiffMs, kSsrc)); @@ -2181,15 +2374,15 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) { EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time_ms, kSsrc)); - rtp_egress()->SendPacket(video_packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(video_packet), PacedPacketInfo()); // Send packet observer not called for padding/retransmissions. EXPECT_CALL(send_packet_observer_, OnSendPacket(2, _, _)).Times(0); - rtp_egress()->SendPacket(rtx_packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(rtx_packet), PacedPacketInfo()); EXPECT_CALL(send_packet_observer_, OnSendPacket(3, capture_time_ms, kFlexFecSsrc)); - rtp_egress()->SendPacket(fec_packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(fec_packet), PacedPacketInfo()); StreamDataCounters rtp_stats; StreamDataCounters rtx_stats; @@ -2199,7 +2392,7 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) { EXPECT_EQ(rtx_stats.retransmitted.packets, 1u); } -TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { +TEST_P(RtpSenderTest, GeneratedPaddingHasBweExtensions) { // Min requested size in order to use RTX payload. const size_t kMinPaddingSize = 50; @@ -2218,16 +2411,84 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { kRtpExtensionTransportSequenceNumber, kTransportSequenceNumberExtensionId)); - const size_t kPayloadPacketSize = 1234; + // Send a payload packet first, to enable padding and populate the packet + // history. + std::unique_ptr packet = + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_allow_retransmission(true); + packet->SetPayloadSize(kMinPaddingSize); + packet->set_packet_type(RtpPacketMediaType::kVideo); + EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); + + // Generate a plain padding packet, check that extensions are registered. + std::vector> generated_packets = + rtp_sender()->GeneratePadding(/*target_size_bytes=*/1, true); + ASSERT_THAT(generated_packets, SizeIs(1)); + auto& plain_padding = generated_packets.front(); + EXPECT_GT(plain_padding->padding_size(), 0u); + EXPECT_TRUE(plain_padding->HasExtension()); + EXPECT_TRUE(plain_padding->HasExtension()); + EXPECT_TRUE(plain_padding->HasExtension()); + + // Verify all header extensions have been written. + rtp_sender_context_->InjectPacket(std::move(plain_padding), + PacedPacketInfo()); + const auto& sent_plain_padding = transport_.last_sent_packet(); + EXPECT_TRUE(sent_plain_padding.HasExtension()); + EXPECT_TRUE(sent_plain_padding.HasExtension()); + EXPECT_TRUE(sent_plain_padding.HasExtension()); + webrtc::RTPHeader rtp_header; + sent_plain_padding.GetHeader(&rtp_header); + EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime); + EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset); + EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber); + + // Generate a payload padding packets, check that extensions are registered. + generated_packets = rtp_sender()->GeneratePadding(kMinPaddingSize, true); + ASSERT_EQ(generated_packets.size(), 1u); + auto& payload_padding = generated_packets.front(); + EXPECT_EQ(payload_padding->padding_size(), 0u); + EXPECT_TRUE(payload_padding->HasExtension()); + EXPECT_TRUE(payload_padding->HasExtension()); + EXPECT_TRUE(payload_padding->HasExtension()); + + // Verify all header extensions have been written. + rtp_sender_context_->InjectPacket(std::move(payload_padding), + PacedPacketInfo()); + const auto& sent_payload_padding = transport_.last_sent_packet(); + EXPECT_TRUE(sent_payload_padding.HasExtension()); + EXPECT_TRUE(sent_payload_padding.HasExtension()); + EXPECT_TRUE(sent_payload_padding.HasExtension()); + sent_payload_padding.GetHeader(&rtp_header); + EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime); + EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset); + EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber); +} + +TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { + // Min requested size in order to use RTX payload. + const size_t kMinPaddingSize = 50; + + rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads); + rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload); + rtp_sender_context_->packet_history_.SetStorePacketsStatus( + RtpPacketHistory::StorageMode::kStoreAndCull, 1); + + ASSERT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( + kRtpExtensionTransportSequenceNumber, + kTransportSequenceNumberExtensionId)); + + const size_t kPayloadPacketSize = kMinPaddingSize; std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->set_allow_retransmission(true); packet->SetPayloadSize(kPayloadPacketSize); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); // Send a dummy video packet so it ends up in the packet history. EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); // Generated padding has large enough budget that the video packet should be // retransmitted as padding. @@ -2235,21 +2496,10 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { rtp_sender()->GeneratePadding(kMinPaddingSize, true); ASSERT_EQ(generated_packets.size(), 1u); auto& padding_packet = generated_packets.front(); - EXPECT_EQ(padding_packet->packet_type(), RtpPacketToSend::Type::kPadding); + EXPECT_EQ(padding_packet->packet_type(), RtpPacketMediaType::kPadding); EXPECT_EQ(padding_packet->Ssrc(), kRtxSsrc); EXPECT_EQ(padding_packet->payload_size(), kPayloadPacketSize + kRtxHeaderSize); - EXPECT_TRUE(padding_packet->HasExtension()); - EXPECT_TRUE(padding_packet->HasExtension()); - EXPECT_TRUE(padding_packet->HasExtension()); - - // Verify all header extensions are received. - rtp_egress()->SendPacket(padding_packet.get(), PacedPacketInfo()); - webrtc::RTPHeader rtp_header; - transport_.last_sent_packet().GetHeader(&rtp_header); - EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime); - EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset); - EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber); // Not enough budged for payload padding, use plain padding instead. const size_t kPaddingBytesRequested = kMinPaddingSize - 1; @@ -2259,28 +2509,60 @@ TEST_P(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { rtp_sender()->GeneratePadding(kPaddingBytesRequested, true); EXPECT_EQ(generated_packets.size(), 1u); for (auto& packet : generated_packets) { - EXPECT_EQ(packet->packet_type(), RtpPacketToSend::Type::kPadding); + EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding); EXPECT_EQ(packet->Ssrc(), kRtxSsrc); EXPECT_EQ(packet->payload_size(), 0u); EXPECT_GT(packet->padding_size(), 0u); padding_bytes_generated += packet->padding_size(); - - EXPECT_TRUE(packet->HasExtension()); - EXPECT_TRUE(packet->HasExtension()); - EXPECT_TRUE(packet->HasExtension()); - - // Verify all header extensions are received. - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); - webrtc::RTPHeader rtp_header; - transport_.last_sent_packet().GetHeader(&rtp_header); - EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime); - EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset); - EXPECT_TRUE(rtp_header.extension.hasTransportSequenceNumber); } EXPECT_EQ(padding_bytes_generated, kMaxPaddingSize); } +TEST_P(RtpSenderTest, LimitsPayloadPaddingSize) { + // Limit RTX payload padding to 2x target size. + const double kFactor = 2.0; + field_trials_.SetMaxPaddingFactor(kFactor); + SetUpRtpSender(true, false, false); + rtp_sender()->SetRtxStatus(kRtxRetransmitted | kRtxRedundantPayloads); + rtp_sender()->SetRtxPayloadType(kRtxPayload, kPayload); + rtp_sender_context_->packet_history_.SetStorePacketsStatus( + RtpPacketHistory::StorageMode::kStoreAndCull, 1); + + ASSERT_EQ(0, rtp_sender()->RegisterRtpHeaderExtension( + kRtpExtensionTransportSequenceNumber, + kTransportSequenceNumberExtensionId)); + + // Send a dummy video packet so it ends up in the packet history. + const size_t kPayloadPacketSize = 1234u; + std::unique_ptr packet = + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); + packet->set_allow_retransmission(true); + packet->SetPayloadSize(kPayloadPacketSize); + packet->set_packet_type(RtpPacketMediaType::kVideo); + EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); + + // Smallest target size that will result in the sent packet being returned as + // padding. + const size_t kMinTargerSizeForPayload = + (kPayloadPacketSize + kRtxHeaderSize) / kFactor; + + // Generated padding has large enough budget that the video packet should be + // retransmitted as padding. + EXPECT_THAT( + rtp_sender()->GeneratePadding(kMinTargerSizeForPayload, true), + AllOf(Not(IsEmpty()), + Each(Pointee(Property(&RtpPacketToSend::padding_size, Eq(0u)))))); + + // If payload padding is > 2x requested size, plain padding is returned + // instead. + EXPECT_THAT( + rtp_sender()->GeneratePadding(kMinTargerSizeForPayload - 1, true), + AllOf(Not(IsEmpty()), + Each(Pointee(Property(&RtpPacketToSend::padding_size, Gt(0u)))))); +} + TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) { rtp_sender_context_->packet_history_.SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 1); @@ -2298,12 +2580,12 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) { // Send a dummy video packet so it ends up in the packet history. Since we // are not using RTX, it should never be used as padding. std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, fake_clock_.TimeInMilliseconds()); + BuildRtpPacket(kPayload, true, 0, clock_->TimeInMilliseconds()); packet->set_allow_retransmission(true); packet->SetPayloadSize(kPayloadPacketSize); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); + packet->set_packet_type(RtpPacketMediaType::kVideo); EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(1); - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); // Payload padding not available without RTX, only generate plain padding on // the media SSRC. @@ -2318,7 +2600,7 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) { rtp_sender()->GeneratePadding(kPaddingBytesRequested, true); EXPECT_EQ(padding_packets.size(), kExpectedNumPaddingPackets); for (auto& packet : padding_packets) { - EXPECT_EQ(packet->packet_type(), RtpPacketToSend::Type::kPadding); + EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding); EXPECT_EQ(packet->Ssrc(), kSsrc); EXPECT_EQ(packet->payload_size(), 0u); EXPECT_GT(packet->padding_size(), 0u); @@ -2328,7 +2610,7 @@ TEST_P(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) { EXPECT_TRUE(packet->HasExtension()); // Verify all header extensions are received. - rtp_egress()->SendPacket(packet.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet), PacedPacketInfo()); webrtc::RTPHeader rtp_header; transport_.last_sent_packet().GetHeader(&rtp_header); EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime); @@ -2392,54 +2674,55 @@ TEST_P(RtpSenderTest, SetsCaptureTimeAndPopulatesTransmissionOffset) { const uint32_t kTimestampTicksPerMs = 90; const int64_t kOffsetMs = 10; - auto packet = - BuildRtpPacket(kPayload, kMarkerBit, fake_clock_.TimeInMilliseconds(), - kMissingCaptureTimeMs); - packet->set_packet_type(RtpPacketToSend::Type::kVideo); - packet->ReserveExtension(); - packet->AllocatePayload(sizeof(kPayloadData)); - - std::unique_ptr packet_to_pace; - EXPECT_CALL(mock_paced_sender_, EnqueuePackets) - .WillOnce([&](std::vector> packets) { - EXPECT_EQ(packets.size(), 1u); - EXPECT_GT(packets[0]->capture_time_ms(), 0); - packet_to_pace = std::move(packets[0]); - }); + auto packet = + BuildRtpPacket(kPayload, kMarkerBit, clock_->TimeInMilliseconds(), + kMissingCaptureTimeMs); + packet->set_packet_type(RtpPacketMediaType::kVideo); + packet->ReserveExtension(); + packet->AllocatePayload(sizeof(kPayloadData)); + + std::unique_ptr packet_to_pace; + EXPECT_CALL(mock_paced_sender_, EnqueuePackets) + .WillOnce([&](std::vector> packets) { + EXPECT_EQ(packets.size(), 1u); + EXPECT_GT(packets[0]->capture_time_ms(), 0); + packet_to_pace = std::move(packets[0]); + }); - packet->set_allow_retransmission(true); - EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet))); + packet->set_allow_retransmission(true); + EXPECT_TRUE(rtp_sender()->SendToNetwork(std::move(packet))); - fake_clock_.AdvanceTimeMilliseconds(kOffsetMs); + time_controller_.AdvanceTime(TimeDelta::Millis(kOffsetMs)); - rtp_egress()->SendPacket(packet_to_pace.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet_to_pace), + PacedPacketInfo()); - EXPECT_EQ(1, transport_.packets_sent()); - absl::optional transmission_time_extension = - transport_.sent_packets_.back().GetExtension(); - ASSERT_TRUE(transmission_time_extension.has_value()); - EXPECT_EQ(*transmission_time_extension, kOffsetMs * kTimestampTicksPerMs); + EXPECT_EQ(1, transport_.packets_sent()); + absl::optional transmission_time_extension = + transport_.sent_packets_.back().GetExtension(); + ASSERT_TRUE(transmission_time_extension.has_value()); + EXPECT_EQ(*transmission_time_extension, kOffsetMs * kTimestampTicksPerMs); - // Retransmit packet. The RTX packet should get the same capture time as the - // original packet, so offset is delta from original packet to now. - fake_clock_.AdvanceTimeMilliseconds(kOffsetMs); + // Retransmit packet. The RTX packet should get the same capture time as the + // original packet, so offset is delta from original packet to now. + time_controller_.AdvanceTime(TimeDelta::Millis(kOffsetMs)); - std::unique_ptr rtx_packet_to_pace; - EXPECT_CALL(mock_paced_sender_, EnqueuePackets) - .WillOnce([&](std::vector> packets) { - EXPECT_GT(packets[0]->capture_time_ms(), 0); - rtx_packet_to_pace = std::move(packets[0]); - }); + std::unique_ptr rtx_packet_to_pace; + EXPECT_CALL(mock_paced_sender_, EnqueuePackets) + .WillOnce([&](std::vector> packets) { + EXPECT_GT(packets[0]->capture_time_ms(), 0); + rtx_packet_to_pace = std::move(packets[0]); + }); - EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0); - rtp_egress()->SendPacket(rtx_packet_to_pace.get(), PacedPacketInfo()); + EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0); + rtp_sender_context_->InjectPacket(std::move(rtx_packet_to_pace), + PacedPacketInfo()); - EXPECT_EQ(2, transport_.packets_sent()); - transmission_time_extension = - transport_.sent_packets_.back().GetExtension(); - ASSERT_TRUE(transmission_time_extension.has_value()); - EXPECT_EQ(*transmission_time_extension, - 2 * kOffsetMs * kTimestampTicksPerMs); + EXPECT_EQ(2, transport_.packets_sent()); + transmission_time_extension = + transport_.sent_packets_.back().GetExtension(); + ASSERT_TRUE(transmission_time_extension.has_value()); + EXPECT_EQ(*transmission_time_extension, 2 * kOffsetMs * kTimestampTicksPerMs); } TEST_P(RtpSenderTestWithoutPacer, ClearHistoryOnSequenceNumberCange) { @@ -2461,13 +2744,13 @@ TEST_P(RtpSenderTestWithoutPacer, ClearHistoryOnSequenceNumberCange) { // Advance time and make sure it can be retransmitted, even if we try to set // the ssrc the what it already is. rtp_sender()->SetSequenceNumber(rtp_sender()->SequenceNumber()); - fake_clock_.AdvanceTimeMilliseconds(kRtt); + time_controller_.AdvanceTime(TimeDelta::Millis(kRtt)); EXPECT_GT(rtp_sender()->ReSendPacket(packet_seqence_number), 0); // Change the sequence number, then move the time and try to retransmit again. // The old packet should now be gone. rtp_sender()->SetSequenceNumber(rtp_sender()->SequenceNumber() - 1); - fake_clock_.AdvanceTimeMilliseconds(kRtt); + time_controller_.AdvanceTime(TimeDelta::Millis(kRtt)); EXPECT_EQ(rtp_sender()->ReSendPacket(packet_seqence_number), 0); } @@ -2489,16 +2772,52 @@ TEST_P(RtpSenderTest, IgnoresNackAfterDisablingMedia) { }); SendGenericPacket(); - rtp_egress()->SendPacket(packet_to_pace.get(), PacedPacketInfo()); + rtp_sender_context_->InjectPacket(std::move(packet_to_pace), + PacedPacketInfo()); ASSERT_EQ(1u, transport_.sent_packets_.size()); // Disable media sending and try to retransmit the packet, it should fail. rtp_sender()->SetSendingMediaStatus(false); - fake_clock_.AdvanceTimeMilliseconds(kRtt); + time_controller_.AdvanceTime(TimeDelta::Millis(kRtt)); EXPECT_LT(rtp_sender()->ReSendPacket(kSeqNum), 0); } +TEST_P(RtpSenderTest, DoesntFecProtectRetransmissions) { + // Set up retranmission without RTX, so that a plain copy of the old packet is + // re-sent instead. + const int64_t kRtt = 10; + rtp_sender()->SetSendingMediaStatus(true); + rtp_sender()->SetRtxStatus(kRtxOff); + rtp_sender_context_->packet_history_.SetStorePacketsStatus( + RtpPacketHistory::StorageMode::kStoreAndCull, 10); + rtp_sender_context_->packet_history_.SetRtt(kRtt); + + // Send a packet so it is in the packet history, make sure to mark it for + // FEC protection. + std::unique_ptr packet_to_pace; + EXPECT_CALL(mock_paced_sender_, EnqueuePackets) + .WillOnce([&](std::vector> packets) { + packet_to_pace = std::move(packets[0]); + }); + + SendGenericPacket(); + packet_to_pace->set_fec_protect_packet(true); + rtp_sender_context_->InjectPacket(std::move(packet_to_pace), + PacedPacketInfo()); + + ASSERT_EQ(1u, transport_.sent_packets_.size()); + + // Re-send packet, the retransmitted packet should not have the FEC protection + // flag set. + EXPECT_CALL(mock_paced_sender_, + EnqueuePackets(Each(Pointee( + Property(&RtpPacketToSend::fec_protect_packet, false))))); + + time_controller_.AdvanceTime(TimeDelta::Millis(kRtt)); + EXPECT_GT(rtp_sender()->ReSendPacket(kSeqNum), 0); +} + INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, RtpSenderTest, ::testing::Values(TestConfig{false}, diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc index fc176c96cd..55ba9b0541 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -13,11 +13,13 @@ #include #include +#include #include #include #include #include +#include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" #include "api/crypto/frame_encryptor_interface.h" @@ -27,12 +29,15 @@ #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/trace_event.h" @@ -40,24 +45,9 @@ namespace webrtc { namespace { constexpr size_t kRedForFecHeaderLength = 1; -constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; constexpr int64_t kMaxUnretransmittableFrameIntervalMs = 33 * 4; - -// This is experimental field trial to exclude transport sequence number from -// FEC packets and should only be used in conjunction with datagram transport. -// Datagram transport removes transport sequence numbers from RTP packets and -// uses datagram feedback loop to re-generate RTCP feedback packets, but FEC -// contorol packets are calculated before sequence number is removed and as a -// result recovered packets will be corrupt unless we also remove transport -// sequence number during FEC calculation. -// -// TODO(sukhanov): We need to find find better way to implement FEC with -// datagram transport, probably moving FEC to datagram integration layter. We -// should also remove special field trial once we switch datagram path from -// RTCConfiguration flags to field trial and use the same field trial for FEC -// workaround. -const char kExcludeTransportSequenceNumberFromFecFieldTrial[] = - "WebRTC-ExcludeTransportSequenceNumberFromFec"; +constexpr char kIncludeCaptureClockOffset[] = + "WebRTC-IncludeCaptureClockOffset"; void BuildRedPayload(const RtpPacketToSend& media_packet, RtpPacketToSend* red_packet) { @@ -71,123 +61,6 @@ void BuildRedPayload(const RtpPacketToSend& media_packet, media_payload.size()); } -void AddRtpHeaderExtensions( - const RTPVideoHeader& video_header, - const absl::optional& playout_delay, - const absl::optional& absolute_capture_time, - FrameDependencyStructure* video_structure, - bool set_video_rotation, - bool set_color_space, - bool set_frame_marking, - bool first_packet, - bool last_packet, - RtpPacketToSend* packet) { - // Color space requires two-byte header extensions if HDR metadata is - // included. Therefore, it's best to add this extension first so that the - // other extensions in the same packet are written as two-byte headers at - // once. - if (last_packet && set_color_space && video_header.color_space) - packet->SetExtension(video_header.color_space.value()); - - if (last_packet && set_video_rotation) - packet->SetExtension(video_header.rotation); - - // Report content type only for key frames. - if (last_packet && - video_header.frame_type == VideoFrameType::kVideoFrameKey && - video_header.content_type != VideoContentType::UNSPECIFIED) - packet->SetExtension(video_header.content_type); - - if (last_packet && - video_header.video_timing.flags != VideoSendTiming::kInvalid) - packet->SetExtension(video_header.video_timing); - - // If transmitted, add to all packets; ack logic depends on this. - if (playout_delay) { - packet->SetExtension(*playout_delay); - } - - if (first_packet && absolute_capture_time) { - packet->SetExtension(*absolute_capture_time); - } - - if (set_frame_marking) { - FrameMarking frame_marking = video_header.frame_marking; - frame_marking.start_of_frame = first_packet; - frame_marking.end_of_frame = last_packet; - packet->SetExtension(frame_marking); - } - - if (video_header.generic) { - bool extension_is_set = false; - if (video_structure != nullptr) { - DependencyDescriptor descriptor; - descriptor.first_packet_in_frame = first_packet; - descriptor.last_packet_in_frame = last_packet; - descriptor.frame_number = video_header.generic->frame_id & 0xFFFF; - descriptor.frame_dependencies.spatial_id = - video_header.generic->spatial_index; - descriptor.frame_dependencies.temporal_id = - video_header.generic->temporal_index; - for (int64_t dep : video_header.generic->dependencies) { - descriptor.frame_dependencies.frame_diffs.push_back( - video_header.generic->frame_id - dep); - } - descriptor.frame_dependencies.decode_target_indications = - video_header.generic->decode_target_indications; - RTC_DCHECK_EQ( - descriptor.frame_dependencies.decode_target_indications.size(), - video_structure->num_decode_targets); - - // To avoid extra structure copy, temporary share ownership of the - // video_structure with the dependency descriptor. - if (video_header.frame_type == VideoFrameType::kVideoFrameKey && - first_packet) { - descriptor.attached_structure = absl::WrapUnique(video_structure); - } - extension_is_set = packet->SetExtension( - *video_structure, descriptor); - - // Remove the temporary shared ownership. - descriptor.attached_structure.release(); - } - - // Do not use v0/v1 generic frame descriptor when v2 is stored. - if (!extension_is_set) { - RtpGenericFrameDescriptor generic_descriptor; - generic_descriptor.SetFirstPacketInSubFrame(first_packet); - generic_descriptor.SetLastPacketInSubFrame(last_packet); - generic_descriptor.SetDiscardable(video_header.generic->discardable); - - if (first_packet) { - generic_descriptor.SetFrameId( - static_cast(video_header.generic->frame_id)); - for (int64_t dep : video_header.generic->dependencies) { - generic_descriptor.AddFrameDependencyDiff( - video_header.generic->frame_id - dep); - } - - uint8_t spatial_bimask = 1 << video_header.generic->spatial_index; - generic_descriptor.SetSpatialLayersBitmask(spatial_bimask); - - generic_descriptor.SetTemporalLayer( - video_header.generic->temporal_index); - - if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { - generic_descriptor.SetResolution(video_header.width, - video_header.height); - } - } - - if (!packet->SetExtension( - generic_descriptor)) { - packet->SetExtension( - generic_descriptor); - } - } - } -} - bool MinimizeDescriptor(RTPVideoHeader* video_header) { if (auto* vp8 = absl::get_if(&video_header->video_type_header)) { @@ -240,30 +113,36 @@ const char* FrameTypeToString(VideoFrameType frame_type) { } #endif -} // namespace +bool IsNoopDelay(const VideoPlayoutDelay& delay) { + return delay.min_ms == -1 && delay.max_ms == -1; +} -RTPSenderVideo::RTPSenderVideo(Clock* clock, - RTPSender* rtp_sender, - FlexfecSender* flexfec_sender, - PlayoutDelayOracle* playout_delay_oracle, - FrameEncryptorInterface* frame_encryptor, - bool require_frame_encryption, - bool need_rtp_packet_infos, - bool enable_retransmit_all_layers, - const WebRtcKeyValueConfig& field_trials) - : RTPSenderVideo([&] { - Config config; - config.clock = clock; - config.rtp_sender = rtp_sender; - config.flexfec_sender = flexfec_sender; - config.playout_delay_oracle = playout_delay_oracle; - config.frame_encryptor = frame_encryptor; - config.require_frame_encryption = require_frame_encryption; - config.need_rtp_packet_infos = need_rtp_packet_infos; - config.enable_retransmit_all_layers = enable_retransmit_all_layers; - config.field_trials = &field_trials; - return config; - }()) {} +absl::optional LoadVideoPlayoutDelayOverride( + const WebRtcKeyValueConfig* key_value_config) { + RTC_DCHECK(key_value_config); + FieldTrialOptional playout_delay_min_ms("min_ms", absl::nullopt); + FieldTrialOptional playout_delay_max_ms("max_ms", absl::nullopt); + ParseFieldTrial({&playout_delay_max_ms, &playout_delay_min_ms}, + key_value_config->Lookup("WebRTC-ForceSendPlayoutDelay")); + return playout_delay_max_ms && playout_delay_min_ms + ? absl::make_optional(*playout_delay_min_ms, + *playout_delay_max_ms) + : absl::nullopt; +} + +// Some packets can be skipped and the stream can still be decoded. Those +// packets are less likely to be retransmitted if they are lost. +bool PacketWillLikelyBeRequestedForRestransmitionIfLost( + const RTPVideoHeader& video_header) { + return IsBaseLayer(video_header) && + !(video_header.generic.has_value() + ? absl::c_linear_search( + video_header.generic->decode_target_indications, + DecodeTargetIndication::kDiscardable) + : false); +} + +} // namespace RTPSenderVideo::RTPSenderVideo(const Config& config) : rtp_sender_(config.rtp_sender), @@ -274,144 +153,54 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) : (kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers)), last_rotation_(kVideoRotation_0), transmit_color_space_next_frame_(false), - playout_delay_oracle_(config.playout_delay_oracle), - rtp_sequence_number_map_(config.need_rtp_packet_infos - ? std::make_unique( - kRtpSequenceNumberMapMaxEntries) - : nullptr), + send_allocation_(false), + current_playout_delay_{-1, -1}, + playout_delay_pending_(false), + forced_playout_delay_(LoadVideoPlayoutDelayOverride(config.field_trials)), red_payload_type_(config.red_payload_type), - ulpfec_payload_type_(config.ulpfec_payload_type), - flexfec_sender_(config.flexfec_sender), - delta_fec_params_{0, 1, kFecMaskRandom}, - key_fec_params_{0, 1, kFecMaskRandom}, - fec_bitrate_(1000, RateStatistics::kBpsScale), - video_bitrate_(1000, RateStatistics::kBpsScale), + fec_type_(config.fec_type), + fec_overhead_bytes_(config.fec_overhead_bytes), packetization_overhead_bitrate_(1000, RateStatistics::kBpsScale), frame_encryptor_(config.frame_encryptor), require_frame_encryption_(config.require_frame_encryption), - generic_descriptor_auth_experiment_( - config.field_trials->Lookup("WebRTC-GenericDescriptorAuth") - .find("Enabled") == 0), - exclude_transport_sequence_number_from_fec_experiment_( - config.field_trials - ->Lookup(kExcludeTransportSequenceNumberFromFecFieldTrial) - .find("Enabled") == 0), - absolute_capture_time_sender_(config.clock) { - RTC_DCHECK(playout_delay_oracle_); -} - -RTPSenderVideo::~RTPSenderVideo() {} - -void RTPSenderVideo::AppendAsRedMaybeWithUlpfec( - std::unique_ptr media_packet, - bool protect_media_packet, - std::vector>* packets) { - std::unique_ptr red_packet( - new RtpPacketToSend(*media_packet)); - BuildRedPayload(*media_packet, red_packet.get()); - red_packet->SetPayloadType(*red_payload_type_); - - std::vector> fec_packets; - if (ulpfec_enabled()) { - if (protect_media_packet) { - if (exclude_transport_sequence_number_from_fec_experiment_) { - // See comments at the top of the file why experiment - // "WebRTC-kExcludeTransportSequenceNumberFromFec" is needed in - // conjunction with datagram transport. - // TODO(sukhanov): We may also need to implement it for flexfec_sender - // if we decide to keep this approach in the future. - uint16_t transport_senquence_number; - if (media_packet->GetExtension( - &transport_senquence_number)) { - if (!media_packet->RemoveExtension( - webrtc::TransportSequenceNumber::kId)) { - RTC_NOTREACHED() - << "Failed to remove transport sequence number, packet=" - << media_packet->ToString(); - } - } - } - - ulpfec_generator_.AddRtpPacketAndGenerateFec( - media_packet->Buffer(), media_packet->headers_size()); - } - uint16_t num_fec_packets = ulpfec_generator_.NumAvailableFecPackets(); - if (num_fec_packets > 0) { - uint16_t first_fec_sequence_number = - rtp_sender_->AllocateSequenceNumber(num_fec_packets); - fec_packets = ulpfec_generator_.GetUlpfecPacketsAsRed( - *red_payload_type_, *ulpfec_payload_type_, first_fec_sequence_number); - RTC_DCHECK_EQ(num_fec_packets, fec_packets.size()); - } - } - - // Send |red_packet| instead of |packet| for allocated sequence number. - red_packet->set_packet_type(RtpPacketToSend::Type::kVideo); - red_packet->set_allow_retransmission(media_packet->allow_retransmission()); - packets->emplace_back(std::move(red_packet)); - - for (const auto& fec_packet : fec_packets) { - // TODO(danilchap): Make ulpfec_generator_ generate RtpPacketToSend to avoid - // reparsing them. - std::unique_ptr rtp_packet( - new RtpPacketToSend(*media_packet)); - RTC_CHECK(rtp_packet->Parse(fec_packet->data(), fec_packet->length())); - rtp_packet->set_capture_time_ms(media_packet->capture_time_ms()); - rtp_packet->set_packet_type(RtpPacketToSend::Type::kForwardErrorCorrection); - rtp_packet->set_allow_retransmission(false); - RTC_DCHECK_EQ(fec_packet->length(), rtp_packet->size()); - packets->emplace_back(std::move(rtp_packet)); - } + generic_descriptor_auth_experiment_(!absl::StartsWith( + config.field_trials->Lookup("WebRTC-GenericDescriptorAuth"), + "Disabled")), + absolute_capture_time_sender_(config.clock), + frame_transformer_delegate_( + config.frame_transformer + ? new rtc::RefCountedObject< + RTPSenderVideoFrameTransformerDelegate>( + this, + config.frame_transformer, + rtp_sender_->SSRC(), + config.send_transport_queue) + : nullptr), + include_capture_clock_offset_(absl::StartsWith( + config.field_trials->Lookup(kIncludeCaptureClockOffset), + "Enabled")) { + if (frame_transformer_delegate_) + frame_transformer_delegate_->Init(); } -void RTPSenderVideo::GenerateAndAppendFlexfec( - std::vector>* packets) { - RTC_DCHECK(flexfec_sender_); - - if (flexfec_sender_->FecAvailable()) { - std::vector> fec_packets = - flexfec_sender_->GetFecPackets(); - for (auto& fec_packet : fec_packets) { - fec_packet->set_packet_type( - RtpPacketToSend::Type::kForwardErrorCorrection); - fec_packet->set_allow_retransmission(false); - packets->emplace_back(std::move(fec_packet)); - } - } +RTPSenderVideo::~RTPSenderVideo() { + if (frame_transformer_delegate_) + frame_transformer_delegate_->Reset(); } void RTPSenderVideo::LogAndSendToNetwork( std::vector> packets, size_t unpacketized_payload_size) { - int64_t now_ms = clock_->TimeInMilliseconds(); -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - for (const auto& packet : packets) { - if (packet->packet_type() == - RtpPacketToSend::Type::kForwardErrorCorrection) { - const uint32_t ssrc = packet->Ssrc(); - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoFecBitrate_kbps", now_ms, - FecOverheadRate() / 1000, ssrc); - } - } -#endif - { - rtc::CritScope cs(&stats_crit_); + MutexLock lock(&stats_mutex_); size_t packetized_payload_size = 0; for (const auto& packet : packets) { - switch (*packet->packet_type()) { - case RtpPacketToSend::Type::kVideo: - video_bitrate_.Update(packet->size(), now_ms); - packetized_payload_size += packet->payload_size(); - break; - case RtpPacketToSend::Type::kForwardErrorCorrection: - fec_bitrate_.Update(packet->size(), clock_->TimeInMilliseconds()); - break; - default: - continue; + if (*packet->packet_type() == RtpPacketMediaType::kVideo) { + packetized_payload_size += packet->payload_size(); } } - // AV1 packetizer may produce less packetized bytes than unpacketized. + // AV1 and H264 packetizers may produce less packetized bytes than + // unpacketized. if (packetized_payload_size >= unpacketized_payload_size) { packetization_overhead_bitrate_.Update( packetized_payload_size - unpacketized_payload_size, @@ -423,42 +212,40 @@ void RTPSenderVideo::LogAndSendToNetwork( } size_t RTPSenderVideo::FecPacketOverhead() const { - if (flexfec_enabled()) - return flexfec_sender_->MaxPacketOverhead(); - - size_t overhead = 0; + size_t overhead = fec_overhead_bytes_; if (red_enabled()) { // The RED overhead is due to a small header. overhead += kRedForFecHeaderLength; - } - if (ulpfec_enabled()) { - // For ULPFEC, the overhead is the FEC headers plus RED for FEC header - // (see above) plus anything in RTP header beyond the 12 bytes base header - // (CSRC list, extensions...) - // This reason for the header extensions to be included here is that - // from an FEC viewpoint, they are part of the payload to be protected. - // (The base RTP header is already protected by the FEC header.) - overhead += ulpfec_generator_.MaxPacketOverhead() + - (rtp_sender_->RtpHeaderLength() - kRtpHeaderSize); + + if (fec_type_ == VideoFecGenerator::FecType::kUlpFec) { + // For ULPFEC, the overhead is the FEC headers plus RED for FEC header + // (see above) plus anything in RTP header beyond the 12 bytes base header + // (CSRC list, extensions...) + // This reason for the header extensions to be included here is that + // from an FEC viewpoint, they are part of the payload to be protected. + // (The base RTP header is already protected by the FEC header.) + overhead += + rtp_sender_->FecOrPaddingPacketMaxRtpHeaderLength() - kRtpHeaderSize; + } } return overhead; } -void RTPSenderVideo::SetFecParameters(const FecProtectionParams& delta_params, - const FecProtectionParams& key_params) { - rtc::CritScope cs(&crit_); - delta_fec_params_ = delta_params; - key_fec_params_ = key_params; +void RTPSenderVideo::SetVideoStructure( + const FrameDependencyStructure* video_structure) { + if (frame_transformer_delegate_) { + frame_transformer_delegate_->SetVideoStructureUnderLock(video_structure); + return; + } + SetVideoStructureInternal(video_structure); } -absl::optional RTPSenderVideo::FlexfecSsrc() const { - if (flexfec_sender_) { - return flexfec_sender_->ssrc(); - } - return absl::nullopt; +void RTPSenderVideo::SetVideoStructureAfterTransformation( + const FrameDependencyStructure* video_structure) { + SetVideoStructureInternal(video_structure); } -void RTPSenderVideo::SetVideoStructure( +void RTPSenderVideo::SetVideoStructureInternal( const FrameDependencyStructure* video_structure) { RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); if (video_structure == nullptr) { @@ -486,8 +273,177 @@ void RTPSenderVideo::SetVideoStructure( video_structure_ = std::make_unique(*video_structure); video_structure_->structure_id = structure_id; - // TODO(bugs.webrtc.org/10342): Support chains. - video_structure_->num_chains = 0; +} + +void RTPSenderVideo::SetVideoLayersAllocation( + VideoLayersAllocation allocation) { + if (frame_transformer_delegate_) { + frame_transformer_delegate_->SetVideoLayersAllocationUnderLock( + std::move(allocation)); + return; + } + SetVideoLayersAllocationInternal(std::move(allocation)); +} + +void RTPSenderVideo::SetVideoLayersAllocationAfterTransformation( + VideoLayersAllocation allocation) { + SetVideoLayersAllocationInternal(std::move(allocation)); +} + +void RTPSenderVideo::SetVideoLayersAllocationInternal( + VideoLayersAllocation allocation) { + RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); + allocation_ = std::move(allocation); + send_allocation_ = true; +} + +void RTPSenderVideo::AddRtpHeaderExtensions( + const RTPVideoHeader& video_header, + const absl::optional& absolute_capture_time, + bool first_packet, + bool last_packet, + RtpPacketToSend* packet) const { + // Send color space when changed or if the frame is a key frame. Keep + // sending color space information until the first base layer frame to + // guarantee that the information is retrieved by the receiver. + bool set_color_space = + video_header.color_space != last_color_space_ || + video_header.frame_type == VideoFrameType::kVideoFrameKey || + transmit_color_space_next_frame_; + // Color space requires two-byte header extensions if HDR metadata is + // included. Therefore, it's best to add this extension first so that the + // other extensions in the same packet are written as two-byte headers at + // once. + if (last_packet && set_color_space && video_header.color_space) + packet->SetExtension(video_header.color_space.value()); + + // According to + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ + // ts_126114v120700p.pdf Section 7.4.5: + // The MTSI client shall add the payload bytes as defined in this clause + // onto the last RTP packet in each group of packets which make up a key + // frame (I-frame or IDR frame in H.264 (AVC), or an IRAP picture in H.265 + // (HEVC)). The MTSI client may also add the payload bytes onto the last RTP + // packet in each group of packets which make up another type of frame + // (e.g. a P-Frame) only if the current value is different from the previous + // value sent. + // Set rotation when key frame or when changed (to follow standard). + // Or when different from 0 (to follow current receiver implementation). + bool set_video_rotation = + video_header.frame_type == VideoFrameType::kVideoFrameKey || + video_header.rotation != last_rotation_ || + video_header.rotation != kVideoRotation_0; + if (last_packet && set_video_rotation) + packet->SetExtension(video_header.rotation); + + // Report content type only for key frames. + if (last_packet && + video_header.frame_type == VideoFrameType::kVideoFrameKey && + video_header.content_type != VideoContentType::UNSPECIFIED) + packet->SetExtension(video_header.content_type); + + if (last_packet && + video_header.video_timing.flags != VideoSendTiming::kInvalid) + packet->SetExtension(video_header.video_timing); + + // If transmitted, add to all packets; ack logic depends on this. + if (playout_delay_pending_) { + packet->SetExtension(current_playout_delay_); + } + + if (first_packet && absolute_capture_time) { + packet->SetExtension(*absolute_capture_time); + } + + if (video_header.generic) { + bool extension_is_set = false; + if (video_structure_ != nullptr) { + DependencyDescriptor descriptor; + descriptor.first_packet_in_frame = first_packet; + descriptor.last_packet_in_frame = last_packet; + descriptor.frame_number = video_header.generic->frame_id & 0xFFFF; + descriptor.frame_dependencies.spatial_id = + video_header.generic->spatial_index; + descriptor.frame_dependencies.temporal_id = + video_header.generic->temporal_index; + for (int64_t dep : video_header.generic->dependencies) { + descriptor.frame_dependencies.frame_diffs.push_back( + video_header.generic->frame_id - dep); + } + descriptor.frame_dependencies.chain_diffs = + video_header.generic->chain_diffs; + descriptor.frame_dependencies.decode_target_indications = + video_header.generic->decode_target_indications; + RTC_DCHECK_EQ( + descriptor.frame_dependencies.decode_target_indications.size(), + video_structure_->num_decode_targets); + + if (first_packet) { + descriptor.active_decode_targets_bitmask = + active_decode_targets_tracker_.ActiveDecodeTargetsBitmask(); + } + // VP9 mark all layer frames of the first picture as kVideoFrameKey, + // Structure should be attached to the descriptor to lowest spatial layer + // when inter layer dependency is used, i.e. L structures; or to all + // layers when inter layer dependency is not used, i.e. S structures. + // Distinguish these two cases by checking if there are any dependencies. + if (video_header.frame_type == VideoFrameType::kVideoFrameKey && + video_header.generic->dependencies.empty() && first_packet) { + // To avoid extra structure copy, temporary share ownership of the + // video_structure with the dependency descriptor. + descriptor.attached_structure = + absl::WrapUnique(video_structure_.get()); + } + extension_is_set = packet->SetExtension( + *video_structure_, + active_decode_targets_tracker_.ActiveChainsBitmask(), descriptor); + + // Remove the temporary shared ownership. + descriptor.attached_structure.release(); + } + + // Do not use generic frame descriptor when dependency descriptor is stored. + if (!extension_is_set) { + RtpGenericFrameDescriptor generic_descriptor; + generic_descriptor.SetFirstPacketInSubFrame(first_packet); + generic_descriptor.SetLastPacketInSubFrame(last_packet); + + if (first_packet) { + generic_descriptor.SetFrameId( + static_cast(video_header.generic->frame_id)); + for (int64_t dep : video_header.generic->dependencies) { + generic_descriptor.AddFrameDependencyDiff( + video_header.generic->frame_id - dep); + } + + uint8_t spatial_bimask = 1 << video_header.generic->spatial_index; + generic_descriptor.SetSpatialLayersBitmask(spatial_bimask); + + generic_descriptor.SetTemporalLayer( + video_header.generic->temporal_index); + + if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { + generic_descriptor.SetResolution(video_header.width, + video_header.height); + } + } + + packet->SetExtension( + generic_descriptor); + } + } + + if (first_packet && send_allocation_) { + if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { + packet->SetExtension( + allocation_.value()); + } else if (PacketWillLikelyBeRequestedForRestransmitionIfLost( + video_header)) { + VideoLayersAllocation allocation = allocation_.value(); + allocation.resolution_and_frame_rate_is_valid = false; + packet->SetExtension(allocation); + } + } } bool RTPSenderVideo::SendVideo( @@ -496,13 +452,13 @@ bool RTPSenderVideo::SendVideo( uint32_t rtp_timestamp, int64_t capture_time_ms, rtc::ArrayView payload, - const RTPFragmentationHeader* fragmentation, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms) { - #if RTC_TRACE_EVENTS_ENABLED + absl::optional expected_retransmission_time_ms, + absl::optional estimated_capture_clock_offset_ms) { +#if RTC_TRACE_EVENTS_ENABLED TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type", FrameTypeToString(video_header.frame_type)); - #endif +#endif RTC_CHECK_RUNS_SERIALIZED(&send_checker_); if (video_header.frame_type == VideoFrameType::kEmptyFrame) @@ -517,63 +473,35 @@ bool RTPSenderVideo::SendVideo( retransmission_settings = kRetransmitBaseLayer | kRetransmitHigherLayers; } - bool set_frame_marking = - video_header.codec == kVideoCodecH264 && - video_header.frame_marking.temporal_id != kNoTemporalIdx; - - const absl::optional playout_delay = - playout_delay_oracle_->PlayoutDelayToSend(video_header.playout_delay); - - // According to - // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ - // ts_126114v120700p.pdf Section 7.4.5: - // The MTSI client shall add the payload bytes as defined in this clause - // onto the last RTP packet in each group of packets which make up a key - // frame (I-frame or IDR frame in H.264 (AVC), or an IRAP picture in H.265 - // (HEVC)). The MTSI client may also add the payload bytes onto the last RTP - // packet in each group of packets which make up another type of frame - // (e.g. a P-Frame) only if the current value is different from the previous - // value sent. - // Set rotation when key frame or when changed (to follow standard). - // Or when different from 0 (to follow current receiver implementation). - bool set_video_rotation = - video_header.frame_type == VideoFrameType::kVideoFrameKey || - video_header.rotation != last_rotation_ || - video_header.rotation != kVideoRotation_0; - last_rotation_ = video_header.rotation; - - // Send color space when changed or if the frame is a key frame. Keep - // sending color space information until the first base layer frame to - // guarantee that the information is retrieved by the receiver. - bool set_color_space; - if (video_header.color_space != last_color_space_) { - last_color_space_ = video_header.color_space; - set_color_space = true; - transmit_color_space_next_frame_ = !IsBaseLayer(video_header); - } else { - set_color_space = - video_header.frame_type == VideoFrameType::kVideoFrameKey || - transmit_color_space_next_frame_; - transmit_color_space_next_frame_ = - transmit_color_space_next_frame_ ? !IsBaseLayer(video_header) : false; + MaybeUpdateCurrentPlayoutDelay(video_header); + if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { + if (!IsNoopDelay(current_playout_delay_)) { + // Force playout delay on key-frames, if set. + playout_delay_pending_ = true; + } + if (allocation_) { + // Send the bitrate allocation on every key frame. + send_allocation_ = true; + } } - if (flexfec_enabled() || ulpfec_enabled()) { - rtc::CritScope cs(&crit_); - // FEC settings. - const FecProtectionParams& fec_params = - video_header.frame_type == VideoFrameType::kVideoFrameKey - ? key_fec_params_ - : delta_fec_params_; - if (flexfec_enabled()) - flexfec_sender_->SetFecParameters(fec_params); - if (ulpfec_enabled()) - ulpfec_generator_.SetFecParameters(fec_params); + if (video_structure_ != nullptr && video_header.generic) { + active_decode_targets_tracker_.OnFrame( + video_structure_->decode_target_protected_by_chain, + video_header.generic->active_decode_targets, + video_header.frame_type == VideoFrameType::kVideoFrameKey, + video_header.generic->frame_id, video_header.generic->chain_diffs); } + const uint8_t temporal_id = GetTemporalId(video_header); + // No FEC protection for upper temporal layers, if used. + const bool use_fec = fec_type_.has_value() && + (temporal_id == 0 || temporal_id == kNoTemporalIdx); + // Maximum size of packet including rtp headers. // Extra space left in case packet will be resent using fec or rtx. - int packet_capacity = rtp_sender_->MaxRtpPacketSize() - FecPacketOverhead() - + int packet_capacity = rtp_sender_->MaxRtpPacketSize() - + (use_fec ? FecPacketOverhead() : 0) - (rtp_sender_->RtxStatus() ? kRtxHeaderSize : 0); std::unique_ptr single_packet = @@ -589,28 +517,26 @@ bool RTPSenderVideo::SendVideo( single_packet->Csrcs()), single_packet->Timestamp(), kVideoPayloadTypeFrequency, Int64MsToUQ32x32(single_packet->capture_time_ms() + NtpOffsetMs()), - /*estimated_capture_clock_offset=*/absl::nullopt); + /*estimated_capture_clock_offset=*/ + include_capture_clock_offset_ ? estimated_capture_clock_offset_ms + : absl::nullopt); auto first_packet = std::make_unique(*single_packet); auto middle_packet = std::make_unique(*single_packet); auto last_packet = std::make_unique(*single_packet); // Simplest way to estimate how much extensions would occupy is to set them. - AddRtpHeaderExtensions(video_header, playout_delay, absolute_capture_time, - video_structure_.get(), set_video_rotation, - set_color_space, set_frame_marking, - /*first=*/true, /*last=*/true, single_packet.get()); - AddRtpHeaderExtensions(video_header, playout_delay, absolute_capture_time, - video_structure_.get(), set_video_rotation, - set_color_space, set_frame_marking, - /*first=*/true, /*last=*/false, first_packet.get()); - AddRtpHeaderExtensions(video_header, playout_delay, absolute_capture_time, - video_structure_.get(), set_video_rotation, - set_color_space, set_frame_marking, - /*first=*/false, /*last=*/false, middle_packet.get()); - AddRtpHeaderExtensions(video_header, playout_delay, absolute_capture_time, - video_structure_.get(), set_video_rotation, - set_color_space, set_frame_marking, - /*first=*/false, /*last=*/true, last_packet.get()); + AddRtpHeaderExtensions(video_header, absolute_capture_time, + /*first_packet=*/true, /*last_packet=*/true, + single_packet.get()); + AddRtpHeaderExtensions(video_header, absolute_capture_time, + /*first_packet=*/true, /*last_packet=*/false, + first_packet.get()); + AddRtpHeaderExtensions(video_header, absolute_capture_time, + /*first_packet=*/false, /*last_packet=*/false, + middle_packet.get()); + AddRtpHeaderExtensions(video_header, absolute_capture_time, + /*first_packet=*/false, /*last_packet=*/true, + last_packet.get()); RTC_DCHECK_GT(packet_capacity, single_packet->headers_size()); RTC_DCHECK_GT(packet_capacity, first_packet->headers_size()); @@ -631,30 +557,20 @@ bool RTPSenderVideo::SendVideo( limits.last_packet_reduction_len = last_packet->headers_size() - middle_packet->headers_size(); - rtc::ArrayView generic_descriptor_raw_00 = - first_packet->GetRawExtension(); - rtc::ArrayView generic_descriptor_raw_01 = - first_packet->GetRawExtension(); - - if (!generic_descriptor_raw_00.empty() && - !generic_descriptor_raw_01.empty()) { - RTC_LOG(LS_WARNING) << "Two versions of GFD extension used."; - return false; - } + bool has_generic_descriptor = + first_packet->HasExtension() || + first_packet->HasExtension(); - // Minimiazation of the vp8 descriptor may erase temporal_id, so save it. - const uint8_t temporal_id = GetTemporalId(video_header); - rtc::ArrayView generic_descriptor_raw = - !generic_descriptor_raw_01.empty() ? generic_descriptor_raw_01 - : generic_descriptor_raw_00; - if (!generic_descriptor_raw.empty()) { + // Minimization of the vp8 descriptor may erase temporal_id, so use + // |temporal_id| rather than reference |video_header| beyond this point. + if (has_generic_descriptor) { MinimizeDescriptor(&video_header); } // TODO(benwright@webrtc.org) - Allocate enough to always encrypt inline. rtc::Buffer encrypted_video_payload; if (frame_encryptor_ != nullptr) { - if (generic_descriptor_raw.empty()) { + if (!has_generic_descriptor) { return false; } @@ -665,10 +581,10 @@ bool RTPSenderVideo::SendVideo( size_t bytes_written = 0; - // Only enable header authentication if the field trial is enabled. - rtc::ArrayView additional_data; + // Enable header authentication if the field trial isn't disabled. + std::vector additional_data; if (generic_descriptor_auth_experiment_) { - additional_data = generic_descriptor_raw; + additional_data = RtpDescriptorAuthentication(video_header); } if (frame_encryptor_->Encrypt( @@ -685,8 +601,8 @@ bool RTPSenderVideo::SendVideo( "one is required since require_frame_encryptor is set"; } - std::unique_ptr packetizer = RtpPacketizer::Create( - codec_type, payload, limits, video_header, fragmentation); + std::unique_ptr packetizer = + RtpPacketizer::Create(codec_type, payload, limits, video_header); // TODO(bugs.webrtc.org/10714): retransmission_settings_ should generally be // replaced by expected_retransmission_time_ms.has_value(). For now, though, @@ -698,20 +614,9 @@ bool RTPSenderVideo::SendVideo( : false; const size_t num_packets = packetizer->NumPackets(); - size_t unpacketized_payload_size; - if (fragmentation && fragmentation->fragmentationVectorSize > 0) { - unpacketized_payload_size = 0; - for (uint16_t i = 0; i < fragmentation->fragmentationVectorSize; ++i) { - unpacketized_payload_size += fragmentation->fragmentationLength[i]; - } - } else { - unpacketized_payload_size = payload.size(); - } - if (num_packets == 0) return false; - uint16_t first_sequence_number; bool first_frame = first_frame_sent_(); std::vector> rtp_packets; for (size_t i = 0; i < num_packets; ++i) { @@ -735,23 +640,14 @@ bool RTPSenderVideo::SendVideo( expected_payload_capacity = limits.max_payload_len; } + packet->set_first_packet_of_frame(i == 0); + if (!packetizer->NextPacket(packet.get())) return false; RTC_DCHECK_LE(packet->payload_size(), expected_payload_capacity); if (!rtp_sender_->AssignSequenceNumber(packet.get())) return false; - if (rtp_sequence_number_map_ && i == 0) { - first_sequence_number = packet->SequenceNumber(); - } - - if (i == 0) { - playout_delay_oracle_->OnSentPacket(packet->SequenceNumber(), - playout_delay); - } - // No FEC protection for upper temporal layers, if used. - bool protect_packet = temporal_id == 0 || temporal_id == kNoTemporalIdx; - packet->set_allow_retransmission(allow_retransmission); // Put packetization finish timestamp into extension. @@ -759,21 +655,23 @@ bool RTPSenderVideo::SendVideo( packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds()); } + packet->set_fec_protect_packet(use_fec); + if (red_enabled()) { - AppendAsRedMaybeWithUlpfec(std::move(packet), protect_packet, - &rtp_packets); + // TODO(sprang): Consider packetizing directly into packets with the RED + // header already in place, to avoid this copy. + std::unique_ptr red_packet(new RtpPacketToSend(*packet)); + BuildRedPayload(*packet, red_packet.get()); + red_packet->SetPayloadType(*red_payload_type_); + red_packet->set_is_red(true); + + // Send |red_packet| instead of |packet| for allocated sequence number. + red_packet->set_packet_type(RtpPacketMediaType::kVideo); + red_packet->set_allow_retransmission(packet->allow_retransmission()); + rtp_packets.emplace_back(std::move(red_packet)); } else { - packet->set_packet_type(RtpPacketToSend::Type::kVideo); - const RtpPacketToSend& media_packet = *packet; + packet->set_packet_type(RtpPacketMediaType::kVideo); rtp_packets.emplace_back(std::move(packet)); - if (flexfec_enabled()) { - // TODO(brandtr): Remove the FlexFEC code path when FlexfecSender - // is wired up to PacedSender instead. - if (protect_packet) { - flexfec_sender_->AddRtpPacketAndGenerateFec(media_packet); - } - GenerateAndAppendFlexfec(&rtp_packets); - } } if (first_frame) { @@ -788,67 +686,56 @@ bool RTPSenderVideo::SendVideo( } } - if (rtp_sequence_number_map_) { - const uint32_t timestamp = rtp_timestamp - rtp_sender_->TimestampOffset(); - rtc::CritScope cs(&crit_); - rtp_sequence_number_map_->InsertFrame(first_sequence_number, num_packets, - timestamp); + LogAndSendToNetwork(std::move(rtp_packets), payload.size()); + + // Update details about the last sent frame. + last_rotation_ = video_header.rotation; + + if (video_header.color_space != last_color_space_) { + last_color_space_ = video_header.color_space; + transmit_color_space_next_frame_ = !IsBaseLayer(video_header); + } else { + transmit_color_space_next_frame_ = + transmit_color_space_next_frame_ ? !IsBaseLayer(video_header) : false; } - LogAndSendToNetwork(std::move(rtp_packets), unpacketized_payload_size); + if (video_header.frame_type == VideoFrameType::kVideoFrameKey || + PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header)) { + // This frame will likely be delivered, no need to populate playout + // delay extensions until it changes again. + playout_delay_pending_ = false; + send_allocation_ = false; + } TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp", rtp_timestamp); return true; } -uint32_t RTPSenderVideo::VideoBitrateSent() const { - rtc::CritScope cs(&stats_crit_); - return video_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); -} - -uint32_t RTPSenderVideo::FecOverheadRate() const { - rtc::CritScope cs(&stats_crit_); - return fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); +bool RTPSenderVideo::SendEncodedImage( + int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + const EncodedImage& encoded_image, + RTPVideoHeader video_header, + absl::optional expected_retransmission_time_ms) { + if (frame_transformer_delegate_) { + // The frame will be sent async once transformed. + return frame_transformer_delegate_->TransformFrame( + payload_type, codec_type, rtp_timestamp, encoded_image, video_header, + expected_retransmission_time_ms); + } + return SendVideo(payload_type, codec_type, rtp_timestamp, + encoded_image.capture_time_ms_, encoded_image, video_header, + expected_retransmission_time_ms); } uint32_t RTPSenderVideo::PacketizationOverheadBps() const { - rtc::CritScope cs(&stats_crit_); + MutexLock lock(&stats_mutex_); return packetization_overhead_bitrate_.Rate(clock_->TimeInMilliseconds()) .value_or(0); } -std::vector RTPSenderVideo::GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const { - RTC_DCHECK(!sequence_numbers.empty()); - - std::vector results; - if (!rtp_sequence_number_map_) { - return results; - } - results.reserve(sequence_numbers.size()); - - { - rtc::CritScope cs(&crit_); - for (uint16_t sequence_number : sequence_numbers) { - const absl::optional info = - rtp_sequence_number_map_->Get(sequence_number); - if (!info) { - // The empty vector will be returned. We can delay the clearing - // of the vector until after we exit the critical section. - break; - } - results.push_back(*info); - } - } - - if (results.size() != sequence_numbers.size()) { - results.clear(); // Some sequence number was not found. - } - - return results; -} - bool RTPSenderVideo::AllowRetransmission( uint8_t temporal_id, int32_t retransmission_settings, @@ -856,7 +743,7 @@ bool RTPSenderVideo::AllowRetransmission( if (retransmission_settings == kRetransmitOff) return false; - rtc::CritScope cs(&stats_crit_); + MutexLock lock(&stats_mutex_); // Media packet storage. if ((retransmission_settings & kConditionallyRetransmitHigherLayers) && UpdateConditionalRetransmit(temporal_id, @@ -886,14 +773,12 @@ uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) { uint8_t operator()(const RTPVideoHeaderLegacyGeneric&) { return kNoTemporalIdx; } +#ifndef DISABLE_H265 + uint8_t operator()(const RTPVideoHeaderH265&) { return kNoTemporalIdx; } +#endif uint8_t operator()(const absl::monostate&) { return kNoTemporalIdx; } }; - switch (header.codec) { - case kVideoCodecH264: - return header.frame_marking.temporal_id; - default: - return absl::visit(TemporalIdGetter(), header.video_type_header); - } + return absl::visit(TemporalIdGetter(), header.video_type_header); } bool RTPSenderVideo::UpdateConditionalRetransmit( @@ -942,4 +827,53 @@ bool RTPSenderVideo::UpdateConditionalRetransmit( return false; } +void RTPSenderVideo::MaybeUpdateCurrentPlayoutDelay( + const RTPVideoHeader& header) { + VideoPlayoutDelay requested_delay = + forced_playout_delay_.value_or(header.playout_delay); + + if (IsNoopDelay(requested_delay)) { + return; + } + + if (requested_delay.min_ms > PlayoutDelayLimits::kMaxMs || + requested_delay.max_ms > PlayoutDelayLimits::kMaxMs) { + RTC_DLOG(LS_ERROR) + << "Requested playout delay values out of range, ignored"; + return; + } + if (requested_delay.max_ms != -1 && + requested_delay.min_ms > requested_delay.max_ms) { + RTC_DLOG(LS_ERROR) << "Requested playout delay values out of order"; + return; + } + + if (!playout_delay_pending_) { + current_playout_delay_ = requested_delay; + playout_delay_pending_ = true; + return; + } + + if ((requested_delay.min_ms == -1 || + requested_delay.min_ms == current_playout_delay_.min_ms) && + (requested_delay.max_ms == -1 || + requested_delay.max_ms == current_playout_delay_.max_ms)) { + // No change, ignore. + return; + } + + if (requested_delay.min_ms == -1) { + RTC_DCHECK_GE(requested_delay.max_ms, 0); + requested_delay.min_ms = + std::min(current_playout_delay_.min_ms, requested_delay.max_ms); + } + if (requested_delay.max_ms == -1) { + requested_delay.max_ms = + std::max(current_playout_delay_.max_ms, requested_delay.min_ms); + } + + current_playout_delay_ = requested_delay; + playout_delay_pending_ = true; +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h index 053877ef28..3f431dfec2 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video.h +++ b/modules/rtp_rtcp/source/rtp_sender_video.h @@ -18,23 +18,25 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_type.h" #include "api/video/video_frame_type.h" -#include "modules/include/module_common_types.h" -#include "modules/rtp_rtcp/include/flexfec_sender.h" +#include "api/video/video_layers_allocation.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" -#include "modules/rtp_rtcp/source/playout_delay_oracle.h" +#include "modules/rtp_rtcp/source/active_decode_targets_helper.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_sender.h" -#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "modules/rtp_rtcp/source/ulpfec_generator.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/one_time_event.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" @@ -69,73 +71,76 @@ class RTPSenderVideo { // expected to outlive the RTPSenderVideo object they are passed to. Clock* clock = nullptr; RTPSender* rtp_sender = nullptr; - FlexfecSender* flexfec_sender = nullptr; - PlayoutDelayOracle* playout_delay_oracle = nullptr; + // Some FEC data is duplicated here in preparation of moving FEC to + // the egress stage. + absl::optional fec_type; + size_t fec_overhead_bytes = 0; // Per packet max FEC overhead. FrameEncryptorInterface* frame_encryptor = nullptr; bool require_frame_encryption = false; - bool need_rtp_packet_infos = false; bool enable_retransmit_all_layers = false; absl::optional red_payload_type; - absl::optional ulpfec_payload_type; const WebRtcKeyValueConfig* field_trials = nullptr; + rtc::scoped_refptr frame_transformer; + TaskQueueBase* send_transport_queue = nullptr; }; explicit RTPSenderVideo(const Config& config); - // TODO(bugs.webrtc.org/10809): Remove when downstream usage is gone. - RTPSenderVideo(Clock* clock, - RTPSender* rtpSender, - FlexfecSender* flexfec_sender, - PlayoutDelayOracle* playout_delay_oracle, - FrameEncryptorInterface* frame_encryptor, - bool require_frame_encryption, - bool need_rtp_packet_infos, - bool enable_retransmit_all_layers, - const WebRtcKeyValueConfig& field_trials); virtual ~RTPSenderVideo(); // expected_retransmission_time_ms.has_value() -> retransmission allowed. // Calls to this method is assumed to be externally serialized. + // |estimated_capture_clock_offset_ms| is an estimated clock offset between + // this sender and the original capturer, for this video packet. See + // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time for more + // details. If the sender and the capture has the same clock, it is supposed + // to be zero valued, which is given as the default. bool SendVideo(int payload_type, absl::optional codec_type, uint32_t rtp_timestamp, int64_t capture_time_ms, rtc::ArrayView payload, - const RTPFragmentationHeader* fragmentation, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms); + absl::optional expected_retransmission_time_ms, + absl::optional estimated_capture_clock_offset_ms = 0); + + bool SendEncodedImage( + int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + const EncodedImage& encoded_image, + RTPVideoHeader video_header, + absl::optional expected_retransmission_time_ms); + // Configures video structures produced by encoder to send using the // dependency descriptor rtp header extension. Next call to SendVideo should // have video_header.frame_type == kVideoFrameKey. // All calls to SendVideo after this call must use video_header compatible // with the video_structure. void SetVideoStructure(const FrameDependencyStructure* video_structure); - - // FlexFEC/ULPFEC. - // Set FEC rates, max frames before FEC is sent, and type of FEC masks. - // Returns false on failure. - void SetFecParameters(const FecProtectionParams& delta_params, - const FecProtectionParams& key_params); - - // FlexFEC. - absl::optional FlexfecSsrc() const; - - uint32_t VideoBitrateSent() const; - uint32_t FecOverheadRate() const; + // Should only be used by a RTPSenderVideoFrameTransformerDelegate and exists + // to ensure correct syncronization. + void SetVideoStructureAfterTransformation( + const FrameDependencyStructure* video_structure); + + // Sets current active VideoLayersAllocation. The allocation will be sent + // using the rtp video layers allocation extension. The allocation will be + // sent in full on every key frame. The allocation will be sent once on a + // none discardable delta frame per call to this method and will not contain + // resolution and frame rate. + void SetVideoLayersAllocation(VideoLayersAllocation allocation); + // Should only be used by a RTPSenderVideoFrameTransformerDelegate and exists + // to ensure correct syncronization. + void SetVideoLayersAllocationAfterTransformation( + VideoLayersAllocation allocation); // Returns the current packetization overhead rate, in bps. Note that this is // the payload overhead, eg the VP8 payload headers, not the RTP headers // or extension/ + // TODO(sprang): Consider moving this to RtpSenderEgress so it's in the same + // place as the other rate stats. uint32_t PacketizationOverheadBps() const; - // For each sequence number in |sequence_number|, recall the last RTP packet - // which bore it - its timestamp and whether it was the first and/or last - // packet in that frame. If all of the given sequence numbers could be - // recalled, return a vector with all of them (in corresponding order). - // If any could not be recalled, return an empty vector. - std::vector GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const; - protected: static uint8_t GetTemporalId(const RTPVideoHeader& header); bool AllowRetransmission(uint8_t temporal_id, @@ -154,18 +159,19 @@ class RTPSenderVideo { int64_t last_frame_time_ms; }; - size_t FecPacketOverhead() const RTC_EXCLUSIVE_LOCKS_REQUIRED(send_checker_); + void SetVideoStructureInternal( + const FrameDependencyStructure* video_structure); + void SetVideoLayersAllocationInternal(VideoLayersAllocation allocation); - void AppendAsRedMaybeWithUlpfec( - std::unique_ptr media_packet, - bool protect_media_packet, - std::vector>* packets) + void AddRtpHeaderExtensions( + const RTPVideoHeader& video_header, + const absl::optional& absolute_capture_time, + bool first_packet, + bool last_packet, + RtpPacketToSend* packet) const RTC_EXCLUSIVE_LOCKS_REQUIRED(send_checker_); - // TODO(brandtr): Remove the FlexFEC functions when FlexfecSender has been - // moved to PacedSender. - void GenerateAndAppendFlexfec( - std::vector>* packets); + size_t FecPacketOverhead() const RTC_EXCLUSIVE_LOCKS_REQUIRED(send_checker_); void LogAndSendToNetwork( std::vector> packets, @@ -173,13 +179,12 @@ class RTPSenderVideo { bool red_enabled() const { return red_payload_type_.has_value(); } - bool ulpfec_enabled() const { return ulpfec_payload_type_.has_value(); } - - bool flexfec_enabled() const { return flexfec_sender_ != nullptr; } - bool UpdateConditionalRetransmit(uint8_t temporal_id, int64_t expected_retransmission_time_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(stats_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(stats_mutex_); + + void MaybeUpdateCurrentPlayoutDelay(const RTPVideoHeader& header) + RTC_EXCLUSIVE_LOCKS_REQUIRED(send_checker_); RTPSender* const rtp_sender_; Clock* const clock_; @@ -194,44 +199,32 @@ class RTPSenderVideo { bool transmit_color_space_next_frame_ RTC_GUARDED_BY(send_checker_); std::unique_ptr video_structure_ RTC_GUARDED_BY(send_checker_); - - // Tracks the current request for playout delay limits from application - // and decides whether the current RTP frame should include the playout - // delay extension on header. - PlayoutDelayOracle* const playout_delay_oracle_; + absl::optional allocation_ + RTC_GUARDED_BY(send_checker_); + // Flag indicating if we should send |allocation_|. + bool send_allocation_ RTC_GUARDED_BY(send_checker_); + + // Current target playout delay. + VideoPlayoutDelay current_playout_delay_ RTC_GUARDED_BY(send_checker_); + // Flag indicating if we need to send |current_playout_delay_| in order + // to guarantee it gets delivered. + bool playout_delay_pending_; + // Set by the field trial WebRTC-ForceSendPlayoutDelay to override the playout + // delay of outgoing video frames. + const absl::optional forced_playout_delay_; // Should never be held when calling out of this class. - rtc::CriticalSection crit_; - - // Maps sent packets' sequence numbers to a tuple consisting of: - // 1. The timestamp, without the randomizing offset mandated by the RFC. - // 2. Whether the packet was the first in its frame. - // 3. Whether the packet was the last in its frame. - const std::unique_ptr rtp_sequence_number_map_ - RTC_PT_GUARDED_BY(crit_); + Mutex mutex_; - // RED/ULPFEC. const absl::optional red_payload_type_; - const absl::optional ulpfec_payload_type_; - UlpfecGenerator ulpfec_generator_ RTC_GUARDED_BY(send_checker_); + absl::optional fec_type_; + const size_t fec_overhead_bytes_; // Per packet max FEC overhead. - // FlexFEC. - FlexfecSender* const flexfec_sender_; - - // FEC parameters, applicable to either ULPFEC or FlexFEC. - FecProtectionParams delta_fec_params_ RTC_GUARDED_BY(crit_); - FecProtectionParams key_fec_params_ RTC_GUARDED_BY(crit_); - - rtc::CriticalSection stats_crit_; - // Bitrate used for FEC payload, RED headers, RTP headers for FEC packets - // and any padding overhead. - RateStatistics fec_bitrate_ RTC_GUARDED_BY(stats_crit_); - // Bitrate used for video payload and RTP headers. - RateStatistics video_bitrate_ RTC_GUARDED_BY(stats_crit_); - RateStatistics packetization_overhead_bitrate_ RTC_GUARDED_BY(stats_crit_); + mutable Mutex stats_mutex_; + RateStatistics packetization_overhead_bitrate_ RTC_GUARDED_BY(stats_mutex_); std::map frame_stats_by_temporal_layer_ - RTC_GUARDED_BY(stats_crit_); + RTC_GUARDED_BY(stats_mutex_); OneTimeEvent first_frame_sent_; @@ -244,9 +237,15 @@ class RTPSenderVideo { // Set to true if the generic descriptor should be authenticated. const bool generic_descriptor_auth_experiment_; - const bool exclude_transport_sequence_number_from_fec_experiment_; - AbsoluteCaptureTimeSender absolute_capture_time_sender_; + // Tracks updates to the active decode targets and decides when active decode + // targets bitmask should be attached to the dependency descriptor. + ActiveDecodeTargetsHelper active_decode_targets_tracker_; + + const rtc::scoped_refptr + frame_transformer_delegate_; + + const bool include_capture_clock_offset_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc new file mode 100644 index 0000000000..074b64086a --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" + +#include +#include + +#include "absl/memory/memory.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "modules/rtp_rtcp/source/rtp_sender_video.h" +#include "rtc_base/task_utils/to_queued_task.h" + +namespace webrtc { +namespace { + +class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { + public: + TransformableVideoSenderFrame( + const EncodedImage& encoded_image, + const RTPVideoHeader& video_header, + int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + absl::optional expected_retransmission_time_ms, + uint32_t ssrc) + : encoded_data_(encoded_image.GetEncodedData()), + header_(video_header), + metadata_(header_), + frame_type_(encoded_image._frameType), + payload_type_(payload_type), + codec_type_(codec_type), + timestamp_(rtp_timestamp), + capture_time_ms_(encoded_image.capture_time_ms_), + expected_retransmission_time_ms_(expected_retransmission_time_ms), + ssrc_(ssrc) {} + + ~TransformableVideoSenderFrame() override = default; + + // Implements TransformableVideoFrameInterface. + rtc::ArrayView GetData() const override { + return *encoded_data_; + } + + void SetData(rtc::ArrayView data) override { + encoded_data_ = EncodedImageBuffer::Create(data.data(), data.size()); + } + + uint32_t GetTimestamp() const override { return timestamp_; } + uint32_t GetSsrc() const override { return ssrc_; } + + bool IsKeyFrame() const override { + return frame_type_ == VideoFrameType::kVideoFrameKey; + } + + std::vector GetAdditionalData() const override { + return RtpDescriptorAuthentication(header_); + } + + const VideoFrameMetadata& GetMetadata() const override { return metadata_; } + + const RTPVideoHeader& GetHeader() const { return header_; } + int GetPayloadType() const { return payload_type_; } + absl::optional GetCodecType() const { return codec_type_; } + int64_t GetCaptureTimeMs() const { return capture_time_ms_; } + + const absl::optional& GetExpectedRetransmissionTimeMs() const { + return expected_retransmission_time_ms_; + } + + private: + rtc::scoped_refptr encoded_data_; + const RTPVideoHeader header_; + const VideoFrameMetadata metadata_; + const VideoFrameType frame_type_; + const int payload_type_; + const absl::optional codec_type_ = absl::nullopt; + const uint32_t timestamp_; + const int64_t capture_time_ms_; + const absl::optional expected_retransmission_time_ms_; + const uint32_t ssrc_; +}; +} // namespace + +RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( + RTPSenderVideo* sender, + rtc::scoped_refptr frame_transformer, + uint32_t ssrc, + TaskQueueBase* send_transport_queue) + : sender_(sender), + frame_transformer_(std::move(frame_transformer)), + ssrc_(ssrc), + send_transport_queue_(send_transport_queue) {} + +void RTPSenderVideoFrameTransformerDelegate::Init() { + frame_transformer_->RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr(this), ssrc_); +} + +bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( + int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + const EncodedImage& encoded_image, + RTPVideoHeader video_header, + absl::optional expected_retransmission_time_ms) { + if (!encoder_queue_) { + // Save the current task queue to post the transformed frame for sending + // once it is transformed. When there is no current task queue, i.e. + // encoding is done on an external thread (for example in the case of + // hardware encoders), use the send transport queue instead. + TaskQueueBase* current = TaskQueueBase::Current(); + encoder_queue_ = current ? current : send_transport_queue_; + } + frame_transformer_->Transform(std::make_unique( + encoded_image, video_header, payload_type, codec_type, rtp_timestamp, + expected_retransmission_time_ms, ssrc_)); + return true; +} + +void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( + std::unique_ptr frame) { + MutexLock lock(&sender_lock_); + + // The encoder queue gets destroyed after the sender; as long as the sender is + // alive, it's safe to post. + if (!sender_) + return; + rtc::scoped_refptr delegate = this; + encoder_queue_->PostTask(ToQueuedTask( + [delegate = std::move(delegate), frame = std::move(frame)]() mutable { + delegate->SendVideo(std::move(frame)); + })); +} + +void RTPSenderVideoFrameTransformerDelegate::SendVideo( + std::unique_ptr transformed_frame) const { + RTC_CHECK(encoder_queue_->IsCurrent()); + MutexLock lock(&sender_lock_); + if (!sender_) + return; + auto* transformed_video_frame = + static_cast(transformed_frame.get()); + sender_->SendVideo( + transformed_video_frame->GetPayloadType(), + transformed_video_frame->GetCodecType(), + transformed_video_frame->GetTimestamp(), + transformed_video_frame->GetCaptureTimeMs(), + transformed_video_frame->GetData(), + transformed_video_frame->GetHeader(), + transformed_video_frame->GetExpectedRetransmissionTimeMs()); +} + +void RTPSenderVideoFrameTransformerDelegate::SetVideoStructureUnderLock( + const FrameDependencyStructure* video_structure) { + MutexLock lock(&sender_lock_); + RTC_CHECK(sender_); + sender_->SetVideoStructureAfterTransformation(video_structure); +} + +void RTPSenderVideoFrameTransformerDelegate::SetVideoLayersAllocationUnderLock( + VideoLayersAllocation allocation) { + MutexLock lock(&sender_lock_); + RTC_CHECK(sender_); + sender_->SetVideoLayersAllocationAfterTransformation(std::move(allocation)); +} + +void RTPSenderVideoFrameTransformerDelegate::Reset() { + frame_transformer_->UnregisterTransformedFrameSinkCallback(ssrc_); + frame_transformer_ = nullptr; + { + MutexLock lock(&sender_lock_); + sender_ = nullptr; + } +} +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h new file mode 100644 index 0000000000..8573869296 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_layers_allocation.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +class RTPSenderVideo; + +// Delegates calls to FrameTransformerInterface to transform frames, and to +// RTPSenderVideo to send the transformed frames. Ensures thread-safe access to +// the sender. +class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { + public: + RTPSenderVideoFrameTransformerDelegate( + RTPSenderVideo* sender, + rtc::scoped_refptr frame_transformer, + uint32_t ssrc, + TaskQueueBase* send_transport_queue); + + void Init(); + + // Delegates the call to FrameTransformerInterface::TransformFrame. + bool TransformFrame(int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + const EncodedImage& encoded_image, + RTPVideoHeader video_header, + absl::optional expected_retransmission_time_ms); + + // Implements TransformedFrameCallback. Can be called on any thread. Posts + // the transformed frame to be sent on the |encoder_queue_|. + void OnTransformedFrame( + std::unique_ptr frame) override; + + // Delegates the call to RTPSendVideo::SendVideo on the |encoder_queue_|. + void SendVideo(std::unique_ptr frame) const; + + // Delegates the call to RTPSendVideo::SetVideoStructureAfterTransformation + // under |sender_lock_|. + void SetVideoStructureUnderLock( + const FrameDependencyStructure* video_structure); + + // Delegates the call to + // RTPSendVideo::SetVideoLayersAllocationAfterTransformation under + // |sender_lock_|. + void SetVideoLayersAllocationUnderLock(VideoLayersAllocation allocation); + + // Unregisters and releases the |frame_transformer_| reference, and resets + // |sender_| under lock. Called from RTPSenderVideo destructor to prevent the + // |sender_| to dangle. + void Reset(); + + protected: + ~RTPSenderVideoFrameTransformerDelegate() override = default; + + private: + mutable Mutex sender_lock_; + RTPSenderVideo* sender_ RTC_GUARDED_BY(sender_lock_); + rtc::scoped_refptr frame_transformer_; + const uint32_t ssrc_; + TaskQueueBase* encoder_queue_ = nullptr; + TaskQueueBase* send_transport_queue_; +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc index 867e05b60d..e415bad16f 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc @@ -15,45 +15,60 @@ #include #include +#include "absl/memory/memory.h" +#include "api/test/mock_frame_encryptor.h" +#include "api/transport/field_trial_based_config.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_constants.h" #include "api/video/video_timing.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/arraysize.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/task_queue_for_test.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/mock_frame_transformer.h" namespace webrtc { namespace { +using ::testing::_; +using ::testing::ContainerEq; using ::testing::ElementsAre; +using ::testing::ElementsAreArray; using ::testing::IsEmpty; +using ::testing::NiceMock; +using ::testing::Return; +using ::testing::ReturnArg; +using ::testing::SaveArg; using ::testing::SizeIs; +using ::testing::WithArgs; enum : int { // The first valid value is 1. kAbsoluteSendTimeExtensionId = 1, - kFrameMarkingExtensionId, - kGenericDescriptorId00, - kGenericDescriptorId01, - kGenericDescriptorId02, + kGenericDescriptorId, + kDependencyDescriptorId, kTransmissionTimeOffsetExtensionId, kTransportSequenceNumberExtensionId, kVideoRotationExtensionId, kVideoTimingExtensionId, kAbsoluteCaptureTimeExtensionId, + kPlayoutDelayExtensionId, + kVideoLayersAllocationExtensionId, }; constexpr int kPayload = 100; @@ -78,15 +93,15 @@ class LoopbackTransportTest : public webrtc::Transport { receivers_extensions_.Register( kVideoTimingExtensionId); receivers_extensions_.Register( - kGenericDescriptorId00); - receivers_extensions_.Register( - kGenericDescriptorId01); + kGenericDescriptorId); receivers_extensions_.Register( - kGenericDescriptorId02); - receivers_extensions_.Register( - kFrameMarkingExtensionId); + kDependencyDescriptorId); receivers_extensions_.Register( kAbsoluteCaptureTimeExtensionId); + receivers_extensions_.Register( + kPlayoutDelayExtensionId); + receivers_extensions_.Register( + kVideoLayersAllocationExtensionId); } bool SendRtp(const uint8_t* data, @@ -108,20 +123,15 @@ class LoopbackTransportTest : public webrtc::Transport { std::vector sent_packets_; }; -} // namespace - class TestRtpSenderVideo : public RTPSenderVideo { public: TestRtpSenderVideo(Clock* clock, RTPSender* rtp_sender, - FlexfecSender* flexfec_sender, const WebRtcKeyValueConfig& field_trials) : RTPSenderVideo([&] { Config config; config.clock = clock; config.rtp_sender = rtp_sender; - config.flexfec_sender = flexfec_sender; - config.playout_delay_oracle = &playout_delay_oracle_; config.field_trials = &field_trials; return config; }()) {} @@ -134,23 +144,30 @@ class TestRtpSenderVideo : public RTPSenderVideo { retransmission_settings, expected_retransmission_time_ms); } - PlayoutDelayOracle playout_delay_oracle_; }; class FieldTrials : public WebRtcKeyValueConfig { public: explicit FieldTrials(bool use_send_side_bwe_with_overhead) - : use_send_side_bwe_with_overhead_(use_send_side_bwe_with_overhead) {} + : use_send_side_bwe_with_overhead_(use_send_side_bwe_with_overhead), + include_capture_clock_offset_(false) {} + + void set_include_capture_clock_offset(bool include_capture_clock_offset) { + include_capture_clock_offset_ = include_capture_clock_offset; + } std::string Lookup(absl::string_view key) const override { - return key == "WebRTC-SendSideBwe-WithOverhead" && - use_send_side_bwe_with_overhead_ - ? "Enabled" - : ""; + if (key == "WebRTC-SendSideBwe-WithOverhead") { + return use_send_side_bwe_with_overhead_ ? "Enabled" : ""; + } else if (key == "WebRTC-IncludeCaptureClockOffset") { + return include_capture_clock_offset_ ? "Enabled" : ""; + } + return ""; } private: bool use_send_side_bwe_with_overhead_; + bool include_capture_clock_offset_; }; class RtpSenderVideoTest : public ::testing::TestWithParam { @@ -159,8 +176,8 @@ class RtpSenderVideoTest : public ::testing::TestWithParam { : field_trials_(GetParam()), fake_clock_(kStartTime), retransmission_rate_limiter_(&fake_clock_, 1000), - rtp_module_(RtpRtcp::Create([&] { - RtpRtcp::Configuration config; + rtp_module_(ModuleRtpRtcpImpl2::Create([&] { + RtpRtcpInterface::Configuration config; config.clock = &fake_clock_; config.outgoing_transport = &transport_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; @@ -168,27 +185,25 @@ class RtpSenderVideoTest : public ::testing::TestWithParam { config.local_media_ssrc = kSsrc; return config; }())), - rtp_sender_video_(&fake_clock_, - rtp_module_->RtpSender(), - nullptr, - field_trials_) { + rtp_sender_video_( + std::make_unique(&fake_clock_, + rtp_module_->RtpSender(), + field_trials_)) { rtp_module_->SetSequenceNumber(kSeqNum); rtp_module_->SetStartTimestamp(0); } - void PopulateGenericFrameDescriptor(int version); - void UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed( int version); protected: - const RtpRtcp::Configuration config_; + const RtpRtcpInterface::Configuration config_; FieldTrials field_trials_; SimulatedClock fake_clock_; LoopbackTransportTest transport_; RateLimiter retransmission_rate_limiter_; - std::unique_ptr rtp_module_; - TestRtpSenderVideo rtp_sender_video_; + std::unique_ptr rtp_module_; + std::unique_ptr rtp_sender_video_; }; TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) { @@ -199,8 +214,8 @@ TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) { RTPVideoHeader hdr; hdr.rotation = kVideoRotation_0; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); VideoRotation rotation; EXPECT_TRUE( @@ -225,9 +240,9 @@ TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { fake_clock_.AdvanceTimeMilliseconds(kPacketizationTimeMs); hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp, - kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp, + kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); VideoSendTiming timing; EXPECT_TRUE(transport_.last_sent_packet().GetExtension( &timing)); @@ -244,15 +259,15 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { RTPVideoHeader hdr; hdr.rotation = kVideoRotation_90; hdr.frame_type = VideoFrameType::kVideoFrameKey; - EXPECT_TRUE(rtp_sender_video_.SendVideo( - kPayload, kType, kTimestamp, 0, kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs)); + EXPECT_TRUE( + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs)); hdr.rotation = kVideoRotation_0; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - EXPECT_TRUE(rtp_sender_video_.SendVideo( - kPayload, kType, kTimestamp + 1, 0, kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs)); + EXPECT_TRUE( + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame, + hdr, kDefaultExpectedRetransmissionTimeMs)); VideoRotation rotation; EXPECT_TRUE( @@ -268,14 +283,14 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { RTPVideoHeader hdr; hdr.rotation = kVideoRotation_90; hdr.frame_type = VideoFrameType::kVideoFrameKey; - EXPECT_TRUE(rtp_sender_video_.SendVideo( - kPayload, kType, kTimestamp, 0, kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs)); + EXPECT_TRUE( + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs)); hdr.frame_type = VideoFrameType::kVideoFrameDelta; - EXPECT_TRUE(rtp_sender_video_.SendVideo( - kPayload, kType, kTimestamp + 1, 0, kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs)); + EXPECT_TRUE( + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame, + hdr, kDefaultExpectedRetransmissionTimeMs)); VideoRotation rotation; EXPECT_TRUE( @@ -283,43 +298,6 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { EXPECT_EQ(kVideoRotation_90, rotation); } -TEST_P(RtpSenderVideoTest, CheckH264FrameMarking) { - uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(FrameMarkingExtension::kUri, - kFrameMarkingExtensionId); - - RTPFragmentationHeader frag; - frag.VerifyAndAllocateFragmentationHeader(1); - frag.fragmentationOffset[0] = 0; - frag.fragmentationLength[0] = sizeof(kFrame); - - RTPVideoHeader hdr; - hdr.video_type_header.emplace().packetization_mode = - H264PacketizationMode::NonInterleaved; - hdr.codec = kVideoCodecH264; - hdr.frame_marking.temporal_id = kNoTemporalIdx; - hdr.frame_marking.tl0_pic_idx = 99; - hdr.frame_marking.base_layer_sync = true; - hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, &frag, - hdr, kDefaultExpectedRetransmissionTimeMs); - - FrameMarking fm; - EXPECT_FALSE( - transport_.last_sent_packet().GetExtension(&fm)); - - hdr.frame_marking.temporal_id = 0; - hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame, &frag, - hdr, kDefaultExpectedRetransmissionTimeMs); - - EXPECT_TRUE( - transport_.last_sent_packet().GetExtension(&fm)); - EXPECT_EQ(hdr.frame_marking.temporal_id, fm.temporal_id); - EXPECT_EQ(hdr.frame_marking.tl0_pic_idx, fm.tl0_pic_idx); - EXPECT_EQ(hdr.frame_marking.base_layer_sync, fm.base_layer_sync); -} - // Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits // are set in the CVO byte. TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { @@ -345,13 +323,13 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesGeneric) { RTPVideoHeader header; header.codec = kVideoCodecGeneric; - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kConditionallyRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); } @@ -361,25 +339,16 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) { header.video_type_header.emplace().packetization_mode = H264PacketizationMode::NonInterleaved; header.codec = kVideoCodecH264; - header.frame_marking.temporal_id = kNoTemporalIdx; - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kConditionallyRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - - // Test higher level retransmit. - for (int tid = 0; tid <= kMaxTemporalStreams; ++tid) { - header.frame_marking.temporal_id = tid; - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( - header, kRetransmitHigherLayers | kRetransmitBaseLayer, - kDefaultExpectedRetransmissionTimeMs)); - } } TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { @@ -388,19 +357,19 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { auto& vp8_header = header.video_type_header.emplace(); vp8_header.temporalIdx = 0; - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers | kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kConditionallyRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); } @@ -413,13 +382,13 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { for (int tid = 1; tid <= kMaxTemporalStreams; ++tid) { vp8_header.temporalIdx = tid; - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers | kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); } @@ -433,13 +402,13 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesVP9) { for (int tid = 1; tid <= kMaxTemporalStreams; ++tid) { vp9_header.temporal_idx = tid; - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_FALSE(rtp_sender_video_.AllowRetransmission( + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission( + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers | kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); } @@ -463,7 +432,7 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmit) { auto& vp8_header = header.video_type_header.emplace(); for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) { vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)]; - rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); } @@ -473,31 +442,34 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmit) { // will not be retransmitted. vp8_header.temporalIdx = 1; EXPECT_FALSE( - rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); EXPECT_FALSE( - rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); // The TL0 frame did not arrive. So allow retransmission. - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + EXPECT_TRUE( + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); // Insert a frame for TL2. We just had frame in TL1, so the next one there is // in three frames away. TL0 is still too far in the past. So, allow // retransmission. vp8_header.temporalIdx = 2; - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + EXPECT_TRUE( + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); // Another TL2, next in TL1 is two frames away. Allow again. - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + EXPECT_TRUE( + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); // Yet another TL2, next in TL1 is now only one frame away, so don't store // for retransmission. EXPECT_FALSE( - rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); } TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { @@ -519,7 +491,7 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) { vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)]; - rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs); fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); } @@ -529,22 +501,23 @@ TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) { // layer, but that last frame in TL1 was a long time ago in absolute terms, // so allow retransmission anyway. vp8_header.temporalIdx = 1; - EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs)); + EXPECT_TRUE( + rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); } TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( - RtpDependencyDescriptorExtension::kUri, kGenericDescriptorId02); + RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId); FrameDependencyStructure video_structure; video_structure.num_decode_targets = 2; video_structure.templates = { - GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(), - GenericFrameInfo::Builder().S(1).T(0).Dtis("-S").Build(), - GenericFrameInfo::Builder().S(1).T(1).Dtis("-D").Build(), + FrameDependencyTemplate().S(0).T(0).Dtis("SS"), + FrameDependencyTemplate().S(1).T(0).Dtis("-S"), + FrameDependencyTemplate().S(1).T(1).Dtis("-D"), }; - rtp_sender_video_.SetVideoStructure(&video_structure); + rtp_sender_video_->SetVideoStructure(&video_structure); // Send key frame. RTPVideoHeader hdr; @@ -555,8 +528,8 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { generic.decode_target_indications = {DecodeTargetIndication::kSwitch, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key; @@ -581,8 +554,8 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { generic.decode_target_indications = {DecodeTargetIndication::kNotPresent, DecodeTargetIndication::kRequired}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); EXPECT_EQ(transport_.packets_sent(), 2); DependencyDescriptor descriptor_delta; @@ -600,23 +573,90 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { ElementsAre(1, 501)); } +TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { + const int64_t kFrameId = 100000; + uint8_t kFrame[100]; + rtp_module_->RegisterRtpHeaderExtension( + RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId); + FrameDependencyStructure video_structure; + video_structure.num_decode_targets = 2; + video_structure.num_chains = 1; + video_structure.decode_target_protected_by_chain = {0, 0}; + video_structure.templates = { + FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}), + }; + rtp_sender_video_->SetVideoStructure(&video_structure); + + RTPVideoHeader hdr; + RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); + generic.frame_id = kFrameId; + generic.decode_target_indications = {DecodeTargetIndication::kSwitch, + DecodeTargetIndication::kSwitch}; + generic.chain_diffs = {2}; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + + ASSERT_EQ(transport_.packets_sent(), 1); + DependencyDescriptor descriptor_key; + ASSERT_TRUE(transport_.last_sent_packet() + .GetExtension( + nullptr, &descriptor_key)); + EXPECT_THAT(descriptor_key.frame_dependencies.chain_diffs, + ContainerEq(generic.chain_diffs)); +} + +TEST_P(RtpSenderVideoTest, + PropagatesActiveDecodeTargetsIntoDependencyDescriptor) { + const int64_t kFrameId = 100000; + uint8_t kFrame[100]; + rtp_module_->RegisterRtpHeaderExtension( + RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId); + FrameDependencyStructure video_structure; + video_structure.num_decode_targets = 2; + video_structure.num_chains = 1; + video_structure.decode_target_protected_by_chain = {0, 0}; + video_structure.templates = { + FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}), + }; + rtp_sender_video_->SetVideoStructure(&video_structure); + + RTPVideoHeader hdr; + RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); + generic.frame_id = kFrameId; + generic.decode_target_indications = {DecodeTargetIndication::kSwitch, + DecodeTargetIndication::kSwitch}; + generic.active_decode_targets = 0b01; + generic.chain_diffs = {1}; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + + ASSERT_EQ(transport_.packets_sent(), 1); + DependencyDescriptor descriptor_key; + ASSERT_TRUE(transport_.last_sent_packet() + .GetExtension( + nullptr, &descriptor_key)); + EXPECT_EQ(descriptor_key.active_decode_targets_bitmask, 0b01u); +} + TEST_P(RtpSenderVideoTest, SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; rtp_module_->RegisterRtpHeaderExtension( - RtpDependencyDescriptorExtension::kUri, kGenericDescriptorId02); + RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId); FrameDependencyStructure video_structure1; video_structure1.num_decode_targets = 2; video_structure1.templates = { - GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(), - GenericFrameInfo::Builder().S(0).T(1).Dtis("D-").Build(), + FrameDependencyTemplate().S(0).T(0).Dtis("SS"), + FrameDependencyTemplate().S(0).T(1).Dtis("D-"), }; FrameDependencyStructure video_structure2; video_structure2.num_decode_targets = 2; video_structure2.templates = { - GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(), - GenericFrameInfo::Builder().S(0).T(1).Dtis("R-").Build(), + FrameDependencyTemplate().S(0).T(0).Dtis("SS"), + FrameDependencyTemplate().S(0).T(1).Dtis("R-"), }; // Send 1st key frame. @@ -626,9 +666,9 @@ TEST_P(RtpSenderVideoTest, generic.decode_target_indications = {DecodeTargetIndication::kSwitch, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_.SetVideoStructure(&video_structure1); - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SetVideoStructure(&video_structure1); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); // Parse 1st extension. ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key1; @@ -643,8 +683,8 @@ TEST_P(RtpSenderVideoTest, generic.decode_target_indications = {DecodeTargetIndication::kDiscardable, DecodeTargetIndication::kNotPresent}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); ASSERT_EQ(transport_.packets_sent(), 2); RtpPacket delta_packet = transport_.last_sent_packet(); @@ -654,9 +694,9 @@ TEST_P(RtpSenderVideoTest, generic.decode_target_indications = {DecodeTargetIndication::kSwitch, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_.SetVideoStructure(&video_structure2); - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SetVideoStructure(&video_structure2); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); // Parse the 2nd key frame. ASSERT_EQ(transport_.packets_sent(), 3); DependencyDescriptor descriptor_key2; @@ -674,16 +714,55 @@ TEST_P(RtpSenderVideoTest, descriptor_key2.attached_structure.get(), &descriptor_delta)); } -void RtpSenderVideoTest::PopulateGenericFrameDescriptor(int version) { - const absl::string_view ext_uri = - (version == 0) ? RtpGenericFrameDescriptorExtension00::kUri - : RtpGenericFrameDescriptorExtension01::kUri; - const int ext_id = - (version == 0) ? kGenericDescriptorId00 : kGenericDescriptorId01; +TEST_P(RtpSenderVideoTest, + AuthenticateVideoHeaderWhenDependencyDescriptorExtensionIsUsed) { + static constexpr size_t kFrameSize = 100; + uint8_t kFrame[kFrameSize] = {1, 2, 3, 4}; + + rtp_module_->RegisterRtpHeaderExtension( + RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId); + rtc::scoped_refptr encryptor( + new rtc::RefCountedObject>); + ON_CALL(*encryptor, GetMaxCiphertextByteSize).WillByDefault(ReturnArg<1>()); + ON_CALL(*encryptor, Encrypt) + .WillByDefault(WithArgs<3, 5>( + [](rtc::ArrayView frame, size_t* bytes_written) { + *bytes_written = frame.size(); + return 0; + })); + RTPSenderVideo::Config config; + config.clock = &fake_clock_; + config.rtp_sender = rtp_module_->RtpSender(); + config.field_trials = &field_trials_; + config.frame_encryptor = encryptor; + RTPSenderVideo rtp_sender_video(config); + + FrameDependencyStructure video_structure; + video_structure.num_decode_targets = 1; + video_structure.templates = {FrameDependencyTemplate().Dtis("S")}; + rtp_sender_video.SetVideoStructure(&video_structure); + + // Send key frame. + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + hdr.generic.emplace().decode_target_indications = + video_structure.templates[0].decode_target_indications; + + EXPECT_CALL(*encryptor, + Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _)); + rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + // Double check packet with the dependency descriptor is sent. + ASSERT_EQ(transport_.packets_sent(), 1); + EXPECT_TRUE(transport_.last_sent_packet() + .HasExtension()); +} +TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension(ext_uri, ext_id); + rtp_module_->RegisterRtpHeaderExtension( + RtpGenericFrameDescriptorExtension00::kUri, kGenericDescriptorId); RTPVideoHeader hdr; RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); @@ -693,34 +772,20 @@ void RtpSenderVideoTest::PopulateGenericFrameDescriptor(int version) { generic.dependencies.push_back(kFrameId - 1); generic.dependencies.push_back(kFrameId - 500); hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr, - hdr, kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); RtpGenericFrameDescriptor descriptor_wire; EXPECT_EQ(1, transport_.packets_sent()); - if (version == 0) { - ASSERT_TRUE(transport_.last_sent_packet() - .GetExtension( - &descriptor_wire)); - } else { - ASSERT_TRUE(transport_.last_sent_packet() - .GetExtension( - &descriptor_wire)); - } + ASSERT_TRUE(transport_.last_sent_packet() + .GetExtension( + &descriptor_wire)); EXPECT_EQ(static_cast(generic.frame_id), descriptor_wire.FrameId()); EXPECT_EQ(generic.temporal_index, descriptor_wire.TemporalLayer()); EXPECT_THAT(descriptor_wire.FrameDependenciesDiffs(), ElementsAre(1, 500)); EXPECT_EQ(descriptor_wire.SpatialLayersBitmask(), 0b0000'0100); } -TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor00) { - PopulateGenericFrameDescriptor(0); -} - -TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor01) { - PopulateGenericFrameDescriptor(1); -} - void RtpSenderVideoTest:: UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed( int version) { @@ -728,13 +793,8 @@ void RtpSenderVideoTest:: const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - if (version == 0) { - rtp_module_->RegisterRtpHeaderExtension( - RtpGenericFrameDescriptorExtension00::kUri, kGenericDescriptorId00); - } else { - rtp_module_->RegisterRtpHeaderExtension( - RtpGenericFrameDescriptorExtension01::kUri, kGenericDescriptorId01); - } + rtp_module_->RegisterRtpHeaderExtension( + RtpGenericFrameDescriptorExtension00::kUri, kGenericDescriptorId); RTPVideoHeader hdr; hdr.codec = kVideoCodecVP8; @@ -746,9 +806,9 @@ void RtpSenderVideoTest:: RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); generic.frame_id = kFrameId; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_.SendVideo(kPayload, VideoCodecType::kVideoCodecVP8, - kTimestamp, 0, kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, VideoCodecType::kVideoCodecVP8, + kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); ASSERT_EQ(transport_.packets_sent(), 1); // Expect only minimal 1-byte vp8 descriptor was generated. @@ -765,6 +825,152 @@ TEST_P(RtpSenderVideoTest, UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1); } +TEST_P(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { + const size_t kFrameSize = 100; + uint8_t kFrame[kFrameSize]; + rtp_module_->RegisterRtpHeaderExtension( + RtpVideoLayersAllocationExtension::kUri, + kVideoLayersAllocationExtensionId); + + VideoLayersAllocation allocation; + VideoLayersAllocation::SpatialLayer layer; + layer.width = 360; + layer.height = 180; + layer.target_bitrate_per_temporal_layer.push_back( + DataRate::KilobitsPerSec(50)); + allocation.resolution_and_frame_rate_is_valid = true; + allocation.active_spatial_layers.push_back(layer); + rtp_sender_video_->SetVideoLayersAllocation(allocation); + + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + + VideoLayersAllocation sent_allocation; + EXPECT_TRUE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); + EXPECT_THAT(sent_allocation.active_spatial_layers, ElementsAre(layer)); + + // Next key frame also have the allocation. + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + EXPECT_TRUE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); +} + +TEST_P(RtpSenderVideoTest, + VideoLayersAllocationWithoutResolutionSentOnDeltaFrames) { + const size_t kFrameSize = 100; + uint8_t kFrame[kFrameSize]; + rtp_module_->RegisterRtpHeaderExtension( + RtpVideoLayersAllocationExtension::kUri, + kVideoLayersAllocationExtensionId); + + VideoLayersAllocation allocation; + VideoLayersAllocation::SpatialLayer layer; + layer.width = 360; + layer.height = 180; + allocation.resolution_and_frame_rate_is_valid = true; + layer.target_bitrate_per_temporal_layer.push_back( + DataRate::KilobitsPerSec(50)); + + allocation.active_spatial_layers.push_back(layer); + rtp_sender_video_->SetVideoLayersAllocation(allocation); + + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameDelta; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + VideoLayersAllocation sent_allocation; + EXPECT_TRUE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); + ASSERT_THAT(sent_allocation.active_spatial_layers, SizeIs(1)); + EXPECT_FALSE(sent_allocation.resolution_and_frame_rate_is_valid); + EXPECT_THAT(sent_allocation.active_spatial_layers[0] + .target_bitrate_per_temporal_layer, + SizeIs(1)); +} + +TEST_P(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { + const size_t kFrameSize = 100; + uint8_t kFrame[kFrameSize]; + rtp_module_->RegisterRtpHeaderExtension( + RtpVideoLayersAllocationExtension::kUri, + kVideoLayersAllocationExtensionId); + + VideoLayersAllocation allocation; + VideoLayersAllocation::SpatialLayer layer; + layer.target_bitrate_per_temporal_layer.push_back( + DataRate::KilobitsPerSec(50)); + allocation.active_spatial_layers.push_back(layer); + rtp_sender_video_->SetVideoLayersAllocation(allocation); + + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameDelta; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + + VideoLayersAllocation sent_allocation; + EXPECT_TRUE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); + EXPECT_THAT(sent_allocation.active_spatial_layers, SizeIs(1)); + + // VideoLayersAllocation not sent on the next delta frame. + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + EXPECT_FALSE(transport_.last_sent_packet() + .HasExtension()); + + // Update allocation. VideoLayesAllocation should be sent on the next frame. + rtp_sender_video_->SetVideoLayersAllocation(allocation); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + EXPECT_TRUE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); +} + +TEST_P(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { + const size_t kFrameSize = 100; + uint8_t kFrame[kFrameSize]; + rtp_module_->RegisterRtpHeaderExtension( + RtpVideoLayersAllocationExtension::kUri, + kVideoLayersAllocationExtensionId); + + VideoLayersAllocation allocation; + VideoLayersAllocation::SpatialLayer layer; + layer.target_bitrate_per_temporal_layer.push_back( + DataRate::KilobitsPerSec(50)); + allocation.active_spatial_layers.push_back(layer); + rtp_sender_video_->SetVideoLayersAllocation(allocation); + + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameDelta; + hdr.codec = VideoCodecType::kVideoCodecVP8; + auto& vp8_header = hdr.video_type_header.emplace(); + vp8_header.temporalIdx = 1; + + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + VideoLayersAllocation sent_allocation; + EXPECT_FALSE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); + + // Send a delta frame on tl0. + vp8_header.temporalIdx = 0; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + EXPECT_TRUE( + transport_.last_sent_packet() + .GetExtension(&sent_allocation)); +} + TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678; uint8_t kFrame[kMaxPacketLength]; @@ -773,12 +979,51 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, - kAbsoluteCaptureTimestampMs, kFrame, nullptr, hdr, - kDefaultExpectedRetransmissionTimeMs); + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, + kAbsoluteCaptureTimestampMs, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + + // It is expected that one and only one of the packets sent on this video + // frame has absolute capture time header extension. And no absolute capture + // time header extensions include capture clock offset. + int packets_with_abs_capture_time = 0; + for (const RtpPacketReceived& packet : transport_.sent_packets()) { + auto absolute_capture_time = + packet.GetExtension(); + if (absolute_capture_time) { + ++packets_with_abs_capture_time; + EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp, + Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs())); + EXPECT_FALSE( + absolute_capture_time->estimated_capture_clock_offset.has_value()); + } + } + EXPECT_EQ(packets_with_abs_capture_time, 1); +} + +// Essentially the same test as AbsoluteCaptureTime but with a field trial. +// After the field trial is experimented, we will remove AbsoluteCaptureTime. +TEST_P(RtpSenderVideoTest, AbsoluteCaptureTimeWithCaptureClockOffset) { + field_trials_.set_include_capture_clock_offset(true); + rtp_sender_video_ = std::make_unique( + &fake_clock_, rtp_module_->RtpSender(), field_trials_); + + constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678; + uint8_t kFrame[kMaxPacketLength]; + rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::kUri, + kAbsoluteCaptureTimeExtensionId); + + RTPVideoHeader hdr; + const absl::optional kExpectedCaptureClockOffset = + absl::make_optional(1234); + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, kAbsoluteCaptureTimestampMs, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs, kExpectedCaptureClockOffset); // It is expected that one and only one of the packets sent on this video - // frame has absolute capture time header extension. + // frame has absolute capture time header extension. And it includes capture + // clock offset. int packets_with_abs_capture_time = 0; for (const RtpPacketReceived& packet : transport_.sent_packets()) { auto absolute_capture_time = @@ -787,13 +1032,225 @@ TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { ++packets_with_abs_capture_time; EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp, Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs())); + EXPECT_EQ(kExpectedCaptureClockOffset, + absolute_capture_time->estimated_capture_clock_offset); } } EXPECT_EQ(packets_with_abs_capture_time, 1); } +TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) { + // Single packet frames. + constexpr size_t kPacketSize = 123; + uint8_t kFrame[kPacketSize]; + rtp_module_->RegisterRtpHeaderExtension(PlayoutDelayLimits::kUri, + kPlayoutDelayExtensionId); + const VideoPlayoutDelay kExpectedDelay = {10, 20}; + + // Send initial key-frame without playout delay. + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + hdr.codec = VideoCodecType::kVideoCodecVP8; + auto& vp8_header = hdr.video_type_header.emplace(); + vp8_header.temporalIdx = 0; + + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + EXPECT_FALSE( + transport_.last_sent_packet().HasExtension()); + + // Set playout delay on a discardable frame. + hdr.playout_delay = kExpectedDelay; + hdr.frame_type = VideoFrameType::kVideoFrameDelta; + vp8_header.temporalIdx = 1; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + VideoPlayoutDelay received_delay = VideoPlayoutDelay(); + ASSERT_TRUE(transport_.last_sent_packet().GetExtension( + &received_delay)); + EXPECT_EQ(received_delay, kExpectedDelay); + + // Set playout delay on a non-discardable frame, the extension should still + // be populated since dilvery wasn't guaranteed on the last one. + hdr.playout_delay = VideoPlayoutDelay(); // Indicates "no change". + vp8_header.temporalIdx = 0; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + ASSERT_TRUE(transport_.last_sent_packet().GetExtension( + &received_delay)); + EXPECT_EQ(received_delay, kExpectedDelay); + + // The next frame does not need the extensions since it's delivery has + // already been guaranteed. + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + EXPECT_FALSE( + transport_.last_sent_packet().HasExtension()); + + // Insert key-frame, we need to refresh the state here. + hdr.frame_type = VideoFrameType::kVideoFrameKey; + rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr, + kDefaultExpectedRetransmissionTimeMs); + ASSERT_TRUE(transport_.last_sent_packet().GetExtension( + &received_delay)); + EXPECT_EQ(received_delay, kExpectedDelay); +} + INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead, RtpSenderVideoTest, ::testing::Bool()); +class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test { + public: + RtpSenderVideoWithFrameTransformerTest() + : fake_clock_(kStartTime), + retransmission_rate_limiter_(&fake_clock_, 1000), + rtp_module_(ModuleRtpRtcpImpl2::Create([&] { + RtpRtcpInterface::Configuration config; + config.clock = &fake_clock_; + config.outgoing_transport = &transport_; + config.retransmission_rate_limiter = &retransmission_rate_limiter_; + config.field_trials = &field_trials_; + config.local_media_ssrc = kSsrc; + return config; + }())) { + rtp_module_->SetSequenceNumber(kSeqNum); + rtp_module_->SetStartTimestamp(0); + } + + std::unique_ptr CreateSenderWithFrameTransformer( + rtc::scoped_refptr transformer) { + RTPSenderVideo::Config config; + config.clock = &fake_clock_; + config.rtp_sender = rtp_module_->RtpSender(); + config.field_trials = &field_trials_; + config.frame_transformer = transformer; + return std::make_unique(config); + } + + protected: + FieldTrialBasedConfig field_trials_; + SimulatedClock fake_clock_; + LoopbackTransportTest transport_; + RateLimiter retransmission_rate_limiter_; + std::unique_ptr rtp_module_; +}; + +std::unique_ptr CreateDefaultEncodedImage() { + const uint8_t data[] = {1, 2, 3, 4}; + auto encoded_image = std::make_unique(); + encoded_image->SetEncodedData( + webrtc::EncodedImageBuffer::Create(data, sizeof(data))); + return encoded_image; +} + +TEST_F(RtpSenderVideoWithFrameTransformerTest, + CreateSenderRegistersFrameTransformer) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + EXPECT_CALL(*mock_frame_transformer, + RegisterTransformedFrameSinkCallback(_, kSsrc)); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); +} + +TEST_F(RtpSenderVideoWithFrameTransformerTest, + DestroySenderUnregistersFrameTransformer) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); + EXPECT_CALL(*mock_frame_transformer, + UnregisterTransformedFrameSinkCallback(kSsrc)); + rtp_sender_video = nullptr; +} + +TEST_F(RtpSenderVideoWithFrameTransformerTest, + SendEncodedImageTransformsFrame) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); + auto encoded_image = CreateDefaultEncodedImage(); + RTPVideoHeader video_header; + + EXPECT_CALL(*mock_frame_transformer, Transform); + rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); +} + +TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + rtc::scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) + .WillOnce(SaveArg<0>(&callback)); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); + ASSERT_TRUE(callback); + + auto encoded_image = CreateDefaultEncodedImage(); + RTPVideoHeader video_header; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&callback](std::unique_ptr frame) { + callback->OnTransformedFrame(std::move(frame)); + }); + TaskQueueForTest encoder_queue; + encoder_queue.SendTask( + [&] { + rtp_sender_video->SendEncodedImage( + kPayload, kType, kTimestamp, *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); + }, + RTC_FROM_HERE); + encoder_queue.WaitForPreviouslyPostedTasks(); + EXPECT_EQ(transport_.packets_sent(), 1); +} + +TEST_F(RtpSenderVideoWithFrameTransformerTest, + TransformableFrameMetadataHasCorrectValue) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + std::unique_ptr rtp_sender_video = + CreateSenderWithFrameTransformer(mock_frame_transformer); + auto encoded_image = CreateDefaultEncodedImage(); + RTPVideoHeader video_header; + video_header.width = 1280u; + video_header.height = 720u; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + generic.temporal_index = 3; + generic.spatial_index = 2; + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + generic.dependencies = {5}; + + // Check that the transformable frame passed to the frame transformer has the + // correct metadata. + EXPECT_CALL(*mock_frame_transformer, Transform) + .WillOnce( + [](std::unique_ptr transformable_frame) { + auto frame = + absl::WrapUnique(static_cast( + transformable_frame.release())); + ASSERT_TRUE(frame); + auto metadata = frame->GetMetadata(); + EXPECT_EQ(metadata.GetWidth(), 1280u); + EXPECT_EQ(metadata.GetHeight(), 720u); + EXPECT_EQ(metadata.GetFrameId(), 10); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5)); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); + }); + rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + *encoded_image, video_header, + kDefaultExpectedRetransmissionTimeMs); +} + +} // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_utility.cc b/modules/rtp_rtcp/source/rtp_utility.cc index 75ee052b7c..a3d6d6f7f1 100644 --- a/modules/rtp_rtcp/source/rtp_utility.cc +++ b/modules/rtp_rtcp/source/rtp_utility.cc @@ -17,7 +17,6 @@ #include "api/array_view.h" #include "api/video/video_content_type.h" -#include "api/video/video_frame_marking.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" @@ -245,10 +244,6 @@ bool RtpHeaderParser::Parse(RTPHeader* header, header->extension.has_video_timing = false; header->extension.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false}; - header->extension.has_frame_marking = false; - header->extension.frame_marking = {false, false, false, false, - false, kNoTemporalIdx, 0, 0}; - if (X) { /* RTP header extension, RFC 3550. 0 1 2 3 @@ -497,15 +492,10 @@ void RtpHeaderParser::ParseOneByteExtensionHeader( &header->extension.video_timing); break; } - case kRtpExtensionFrameMarking: { - if (!FrameMarkingExtension::Parse(rtc::MakeArrayView(ptr, len + 1), - &header->extension.frame_marking)) { - RTC_LOG(LS_WARNING) << "Incorrect frame marking len: " << len; - return; - } - header->extension.has_frame_marking = true; + case kRtpExtensionVideoLayersAllocation: + RTC_LOG(WARNING) << "VideoLayersAllocation extension unsupported by " + "rtp header parser."; break; - } case kRtpExtensionRtpStreamId: { std::string name(reinterpret_cast(ptr), len + 1); if (IsLegalRsidName(name)) { @@ -534,7 +524,6 @@ void RtpHeaderParser::ParseOneByteExtensionHeader( break; } case kRtpExtensionGenericFrameDescriptor00: - case kRtpExtensionGenericFrameDescriptor01: case kRtpExtensionGenericFrameDescriptor02: RTC_LOG(WARNING) << "RtpGenericFrameDescriptor unsupported by rtp header parser."; diff --git a/modules/rtp_rtcp/source/rtp_video_header.h b/modules/rtp_rtcp/source/rtp_video_header.h index 714d1eb08c..5977a1d920 100644 --- a/modules/rtp_rtcp/source/rtp_video_header.h +++ b/modules/rtp_rtcp/source/rtp_video_header.h @@ -10,6 +10,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_HEADER_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_HEADER_H_ +#include #include #include "absl/container/inlined_vector.h" @@ -19,12 +20,13 @@ #include "api/video/color_space.h" #include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" -#include "api/video/video_frame_marking.h" #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT(build/include) #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#ifndef DISABLE_H265 +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#endif #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" @@ -36,11 +38,20 @@ struct RTPVideoHeaderLegacyGeneric { uint16_t picture_id; }; +#ifndef DISABLE_H265 using RTPVideoTypeHeader = absl::variant; + RTPVideoHeaderH265, + RTPVideoHeaderLegacyGeneric>; +#else +using RTPVideoTypeHeader = absl::variant; +#endif struct RTPVideoHeader { struct GenericDescriptorInfo { @@ -53,7 +64,8 @@ struct RTPVideoHeader { int temporal_index = 0; absl::InlinedVector decode_target_indications; absl::InlinedVector dependencies; - bool discardable = false; + absl::InlinedVector chain_diffs; + std::bitset<32> active_decode_targets = ~uint32_t{0}; }; RTPVideoHeader(); @@ -73,9 +85,8 @@ struct RTPVideoHeader { uint8_t simulcastIdx = 0; VideoCodecType codec = VideoCodecType::kVideoCodecGeneric; - PlayoutDelay playout_delay = {-1, -1}; + VideoPlayoutDelay playout_delay; VideoSendTiming video_timing; - FrameMarking frame_marking = {false, false, false, false, false, 0xFF, 0, 0}; absl::optional color_space; RTPVideoTypeHeader video_type_header; }; diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc new file mode 100644 index 0000000000..dbaa36b15c --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc @@ -0,0 +1,260 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" + +#include + +#include "api/video/video_layers_allocation.h" +#include "rtc_base/bit_buffer.h" + +namespace webrtc { + +constexpr RTPExtensionType RtpVideoLayersAllocationExtension::kId; +constexpr const char RtpVideoLayersAllocationExtension::kUri[]; + +namespace { + +// Counts the number of bits used in the binary representation of val. +size_t CountBits(uint64_t val) { + size_t bit_count = 0; + while (val != 0) { + bit_count++; + val >>= 1; + } + return bit_count; +} + +// Counts the number of bits used if `val`is encoded using unsigned exponential +// Golomb encoding. +// TODO(bugs.webrtc.org/12000): Move to bit_buffer.cc if Golomb encoding is used +// in the final version. +size_t SizeExponentialGolomb(uint32_t val) { + if (val == std::numeric_limits::max()) { + return 0; + } + uint64_t val_to_encode = static_cast(val) + 1; + return CountBits(val_to_encode) * 2 - 1; +} + +} // namespace + +// TODO(bugs.webrtc.org/12000): Review and revise the content and encoding of +// this extension. This is an experimental first version. + +// 0 1 2 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | NS|RSID|T|X|Res| Bit encoded data... +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// NS: Number of spatial layers/simulcast streams - 1. 2 bits, thus allowing +// passing number of layers/streams up-to 4. +// RSID: RTP stream id this allocation is sent on, numbered from 0. 2 bits. +// T: indicates if all spatial layers have the same amount of temporal layers. +// X: indicates if resolution and frame rate per spatial layer is present. +// Res: 2 bits reserved for future use. +// Bit encoded data: consists of following fields written in order: +// 1) T=1: Nt - 2-bit value of number of temporal layers - 1 +// T=0: NS 2-bit values of numbers of temporal layers - 1 for all spatial +// layers from lower to higher. +// 2) Bitrates: +// One value for each spatial x temporal layer. +// Format: RSID (2-bit) SID(2-bit),folowed by bitrate for all temporal +// layers for the RSID,SID tuple. All bitrates are in kbps. All bitrates are +// total required bitrate to receive the corresponding layer, i.e. in +// simulcast mode they include only corresponding spatial layer, in full-svc +// all lower spatial layers are included. All lower temporal layers are also +// included. All bitrates are written using unsigned Exponential Golomb +// encoding. +// 3) [only if X bit is set]. Encoded width, 16-bit, height, 16-bit, +// max frame rate 8-bit per spatial layer in order from lower to higher. + +bool RtpVideoLayersAllocationExtension::Write( + rtc::ArrayView data, + const VideoLayersAllocation& allocation) { + RTC_DCHECK_LT(allocation.rtp_stream_index, + VideoLayersAllocation::kMaxSpatialIds); + RTC_DCHECK_GE(data.size(), ValueSize(allocation)); + rtc::BitBufferWriter writer(data.data(), data.size()); + + // NS: + if (allocation.active_spatial_layers.empty()) + return false; + writer.WriteBits(allocation.active_spatial_layers.size() - 1, 2); + + // RSID: + writer.WriteBits(allocation.rtp_stream_index, 2); + + // T: + bool num_tls_is_the_same = true; + size_t first_layers_number_of_temporal_layers = + allocation.active_spatial_layers.front() + .target_bitrate_per_temporal_layer.size(); + for (const auto& spatial_layer : allocation.active_spatial_layers) { + if (first_layers_number_of_temporal_layers != + spatial_layer.target_bitrate_per_temporal_layer.size()) { + num_tls_is_the_same = false; + break; + } + } + writer.WriteBits(num_tls_is_the_same ? 1 : 0, 1); + + // X: + writer.WriteBits(allocation.resolution_and_frame_rate_is_valid ? 1 : 0, 1); + + // RESERVED: + writer.WriteBits(/*val=*/0, /*bit_count=*/2); + + if (num_tls_is_the_same) { + writer.WriteBits(first_layers_number_of_temporal_layers - 1, 2); + } else { + for (const auto& spatial_layer : allocation.active_spatial_layers) { + writer.WriteBits( + spatial_layer.target_bitrate_per_temporal_layer.size() - 1, 2); + } + } + + for (const auto& spatial_layer : allocation.active_spatial_layers) { + writer.WriteBits(spatial_layer.rtp_stream_index, 2); + writer.WriteBits(spatial_layer.spatial_id, 2); + for (const DataRate& bitrate : + spatial_layer.target_bitrate_per_temporal_layer) { + writer.WriteExponentialGolomb(bitrate.kbps()); + } + } + + if (allocation.resolution_and_frame_rate_is_valid) { + for (const auto& spatial_layer : allocation.active_spatial_layers) { + writer.WriteUInt16(spatial_layer.width); + writer.WriteUInt16(spatial_layer.height); + writer.WriteUInt8(spatial_layer.frame_rate_fps); + } + } + return true; +} + +bool RtpVideoLayersAllocationExtension::Parse( + rtc::ArrayView data, + VideoLayersAllocation* allocation) { + if (data.size() == 0) + return false; + rtc::BitBuffer reader(data.data(), data.size()); + if (!allocation) + return false; + allocation->active_spatial_layers.clear(); + + uint32_t val; + // NS: + if (!reader.ReadBits(&val, 2)) + return false; + int active_spatial_layers = val + 1; + + // RSID: + if (!reader.ReadBits(&val, 2)) + return false; + allocation->rtp_stream_index = val; + + // T: + if (!reader.ReadBits(&val, 1)) + return false; + bool num_tls_is_constant = (val == 1); + + // X: + if (!reader.ReadBits(&val, 1)) + return false; + allocation->resolution_and_frame_rate_is_valid = (val == 1); + + // RESERVED: + if (!reader.ReadBits(&val, 2)) + return false; + + int number_of_temporal_layers[VideoLayersAllocation::kMaxSpatialIds]; + if (num_tls_is_constant) { + if (!reader.ReadBits(&val, 2)) + return false; + for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { + number_of_temporal_layers[sl_idx] = val + 1; + } + } else { + for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { + if (!reader.ReadBits(&val, 2)) + return false; + number_of_temporal_layers[sl_idx] = val + 1; + if (number_of_temporal_layers[sl_idx] > + VideoLayersAllocation::kMaxTemporalIds) + return false; + } + } + + for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { + allocation->active_spatial_layers.emplace_back(); + auto& spatial_layer = allocation->active_spatial_layers.back(); + auto& temporal_layers = spatial_layer.target_bitrate_per_temporal_layer; + if (!reader.ReadBits(&val, 2)) + return false; + spatial_layer.rtp_stream_index = val; + if (!reader.ReadBits(&val, 2)) + return false; + spatial_layer.spatial_id = val; + for (int tl_idx = 0; tl_idx < number_of_temporal_layers[sl_idx]; ++tl_idx) { + reader.ReadExponentialGolomb(&val); + temporal_layers.push_back(DataRate::KilobitsPerSec(val)); + } + } + + if (allocation->resolution_and_frame_rate_is_valid) { + for (auto& spatial_layer : allocation->active_spatial_layers) { + if (!reader.ReadUInt16(&spatial_layer.width)) + return false; + if (!reader.ReadUInt16(&spatial_layer.height)) + return false; + if (!reader.ReadUInt8(&spatial_layer.frame_rate_fps)) + return false; + } + } + return true; +} + +size_t RtpVideoLayersAllocationExtension::ValueSize( + const VideoLayersAllocation& allocation) { + if (allocation.active_spatial_layers.empty()) { + return 0; + } + size_t size_in_bits = 8; // Fixed first byte.¨ + bool num_tls_is_the_same = true; + size_t first_layers_number_of_temporal_layers = + allocation.active_spatial_layers.front() + .target_bitrate_per_temporal_layer.size(); + for (const auto& spatial_layer : allocation.active_spatial_layers) { + if (first_layers_number_of_temporal_layers != + spatial_layer.target_bitrate_per_temporal_layer.size()) { + num_tls_is_the_same = false; + } + size_in_bits += 4; // RSID, SID tuple. + for (const auto& bitrate : + spatial_layer.target_bitrate_per_temporal_layer) { + size_in_bits += SizeExponentialGolomb(bitrate.kbps()); + } + } + if (num_tls_is_the_same) { + size_in_bits += 2; + } else { + for (const auto& spatial_layer : allocation.active_spatial_layers) { + size_in_bits += + 2 * spatial_layer.target_bitrate_per_temporal_layer.size(); + } + } + if (allocation.resolution_and_frame_rate_is_valid) { + size_in_bits += allocation.active_spatial_layers.size() * 5 * 8; + } + return (size_in_bits + 7) / 8; +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h new file mode 100644 index 0000000000..ff8ea2a7a2 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ + +#include "api/video/video_layers_allocation.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" + +namespace webrtc { + +// TODO(bugs.webrtc.org/12000): Note that this extensions is being developed and +// the wire format will likely change. +class RtpVideoLayersAllocationExtension { + public: + using value_type = VideoLayersAllocation; + static constexpr RTPExtensionType kId = kRtpExtensionVideoLayersAllocation; + static constexpr const char kUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00"; + static bool Parse(rtc::ArrayView data, + VideoLayersAllocation* allocation); + static size_t ValueSize(const VideoLayersAllocation& allocation); + static bool Write(rtc::ArrayView data, + const VideoLayersAllocation& allocation); +}; + +} // namespace webrtc +#endif // MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc new file mode 100644 index 0000000000..e51c6372f6 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" + +#include "api/video/video_layers_allocation.h" +#include "rtc_base/buffer.h" + +#include "test/gmock.h" + +namespace webrtc { +namespace { + +TEST(RtpVideoLayersAllocationExtension, + WriteEmptyLayersAllocationReturnsFalse) { + VideoLayersAllocation written_allocation; + rtc::Buffer buffer( + RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); + EXPECT_FALSE( + RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); +} + +TEST(RtpVideoLayersAllocationExtension, + CanWriteAndParse2SpatialWith2TemporalLayers) { + VideoLayersAllocation written_allocation; + written_allocation.rtp_stream_index = 1; + written_allocation.active_spatial_layers = { + { + /*rtp_stream_index*/ 0, + /*spatial_id*/ 0, + /*target_bitrate_per_temporal_layer*/ + {DataRate::KilobitsPerSec(25), DataRate::KilobitsPerSec(50)}, + /*width*/ 0, + /*height*/ 0, + /*frame_rate_fps*/ 0, + }, + { + /*rtp_stream_index*/ 1, + /*spatial_id*/ 0, + /*target_bitrate_per_temporal_layer*/ + {DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200)}, + /*width*/ 0, + /*height*/ 0, + /*frame_rate_fps*/ 0, + }, + }; + rtc::Buffer buffer( + RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); + EXPECT_TRUE( + RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); + VideoLayersAllocation parsed_allocation; + EXPECT_TRUE( + RtpVideoLayersAllocationExtension::Parse(buffer, &parsed_allocation)); + EXPECT_EQ(written_allocation, parsed_allocation); +} + +TEST(RtpVideoLayersAllocationExtension, + CanWriteAndParseAllocationWithDifferentNumerOfTemporalLayers) { + VideoLayersAllocation written_allocation; + written_allocation.rtp_stream_index = 1; + written_allocation.active_spatial_layers = { + { + /*rtp_stream_index*/ 0, + /*spatial_id*/ 0, + /*target_bitrate_per_temporal_layer*/ + {DataRate::KilobitsPerSec(25), DataRate::KilobitsPerSec(50)}, + /*width*/ 0, + /*height*/ 0, + /*frame_rate_fps*/ 0, + }, + { + /*rtp_stream_index*/ 1, + /*spatial_id*/ 0, + /*target_bitrate_per_temporal_layer*/ {DataRate::KilobitsPerSec(100)}, + /*width*/ 0, + /*height*/ 0, + /*frame_rate_fps*/ 0, + }, + }; + rtc::Buffer buffer( + RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); + EXPECT_TRUE( + RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); + VideoLayersAllocation parsed_allocation; + EXPECT_TRUE( + RtpVideoLayersAllocationExtension::Parse(buffer, &parsed_allocation)); + EXPECT_EQ(written_allocation, parsed_allocation); +} + +TEST(RtpVideoLayersAllocationExtension, + CanWriteAndParseAllocationWithResolution) { + VideoLayersAllocation written_allocation; + written_allocation.rtp_stream_index = 1; + written_allocation.resolution_and_frame_rate_is_valid = true; + written_allocation.active_spatial_layers = { + { + /*rtp_stream_index*/ 0, + /*spatial_id*/ 0, + /*target_bitrate_per_temporal_layer*/ + {DataRate::KilobitsPerSec(25), DataRate::KilobitsPerSec(50)}, + /*width*/ 320, + /*height*/ 240, + /*frame_rate_fps*/ 8, + }, + { + /*rtp_stream_index*/ 0, + /*spatial_id*/ 1, + /*target_bitrate_per_temporal_layer*/ + {DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200)}, + /*width*/ 640, + /*height*/ 320, + /*frame_rate_fps*/ 30, + }, + }; + + rtc::Buffer buffer( + RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); + EXPECT_TRUE( + RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); + VideoLayersAllocation parsed_allocation; + EXPECT_TRUE( + RtpVideoLayersAllocationExtension::Parse(buffer, &parsed_allocation)); + EXPECT_EQ(written_allocation, parsed_allocation); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/source_tracker.cc b/modules/rtp_rtcp/source/source_tracker.cc index 22b887c5d2..d6c744512a 100644 --- a/modules/rtp_rtcp/source/source_tracker.cc +++ b/modules/rtp_rtcp/source/source_tracker.cc @@ -25,7 +25,7 @@ void SourceTracker::OnFrameDelivered(const RtpPacketInfos& packet_infos) { } int64_t now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope lock_scope(&lock_); + MutexLock lock_scope(&lock_); for (const auto& packet_info : packet_infos) { for (uint32_t csrc : packet_info.csrcs()) { @@ -54,7 +54,7 @@ std::vector SourceTracker::GetSources() const { std::vector sources; int64_t now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope lock_scope(&lock_); + MutexLock lock_scope(&lock_); PruneEntries(now_ms); diff --git a/modules/rtp_rtcp/source/source_tracker.h b/modules/rtp_rtcp/source/source_tracker.h index fcf99bf8b5..0c7627c41d 100644 --- a/modules/rtp_rtcp/source/source_tracker.h +++ b/modules/rtp_rtcp/source/source_tracker.h @@ -20,7 +20,7 @@ #include "absl/types/optional.h" #include "api/rtp_packet_infos.h" #include "api/transport/rtp/rtp_source.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" @@ -116,7 +116,7 @@ class SourceTracker { void PruneEntries(int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); Clock* const clock_; - rtc::CriticalSection lock_; + mutable Mutex lock_; // Entries are stored in reverse chronological order (i.e. with the most // recently updated entries appearing first). Mutability is needed for timeout diff --git a/modules/rtp_rtcp/source/time_util_unittest.cc b/modules/rtp_rtcp/source/time_util_unittest.cc index 906a458f50..4b469bb956 100644 --- a/modules/rtp_rtcp/source/time_util_unittest.cc +++ b/modules/rtp_rtcp/source/time_util_unittest.cc @@ -21,18 +21,18 @@ TEST(TimeUtilTest, TimeMicrosToNtpDoesntChangeBetweenRuns) { // TimeMicrosToNtp is not pure: it behave differently between different // execution of the program, but should behave same during same execution. const int64_t time_us = 12345; - clock.SetTime(Timestamp::us(2)); + clock.SetTime(Timestamp::Micros(2)); NtpTime time_ntp = TimeMicrosToNtp(time_us); - clock.SetTime(Timestamp::us(time_us)); + clock.SetTime(Timestamp::Micros(time_us)); EXPECT_EQ(TimeMicrosToNtp(time_us), time_ntp); - clock.SetTime(Timestamp::us(1000000)); + clock.SetTime(Timestamp::Micros(1000000)); EXPECT_EQ(TimeMicrosToNtp(time_us), time_ntp); } TEST(TimeUtilTest, TimeMicrosToNtpKeepsIntervals) { rtc::ScopedFakeClock clock; NtpTime time_ntp1 = TimeMicrosToNtp(rtc::TimeMicros()); - clock.AdvanceTime(TimeDelta::ms(20)); + clock.AdvanceTime(TimeDelta::Millis(20)); NtpTime time_ntp2 = TimeMicrosToNtp(rtc::TimeMicros()); EXPECT_EQ(time_ntp2.ToMs() - time_ntp1.ToMs(), 20); } diff --git a/modules/rtp_rtcp/source/ulpfec_generator.cc b/modules/rtp_rtcp/source/ulpfec_generator.cc index 92e65df187..76d1bb5d87 100644 --- a/modules/rtp_rtcp/source/ulpfec_generator.cc +++ b/modules/rtp_rtcp/source/ulpfec_generator.cc @@ -22,6 +22,7 @@ #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "modules/rtp_rtcp/source/rtp_utility.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -62,128 +63,119 @@ constexpr uint32_t kUnknownSsrc = 0; } // namespace -RedPacket::RedPacket(size_t length) - : data_(new uint8_t[length]), length_(length), header_length_(0) {} - -RedPacket::~RedPacket() = default; - -void RedPacket::CreateHeader(const uint8_t* rtp_header, - size_t header_length, - int red_payload_type, - int payload_type) { - RTC_DCHECK_LE(header_length + kRedForFecHeaderLength, length_); - memcpy(data_.get(), rtp_header, header_length); - // Replace payload type. - data_[1] &= 0x80; - data_[1] += red_payload_type; - // Add RED header - // f-bit always 0 - data_[header_length] = static_cast(payload_type); - header_length_ = header_length + kRedForFecHeaderLength; -} - -void RedPacket::SetSeqNum(int seq_num) { - RTC_DCHECK_GE(seq_num, 0); - RTC_DCHECK_LT(seq_num, 1 << 16); - - ByteWriter::WriteBigEndian(&data_[2], seq_num); -} - -void RedPacket::AssignPayload(const uint8_t* payload, size_t length) { - RTC_DCHECK_LE(header_length_ + length, length_); - memcpy(data_.get() + header_length_, payload, length); -} - -void RedPacket::ClearMarkerBit() { - data_[1] &= 0x7F; -} - -uint8_t* RedPacket::data() const { - return data_.get(); -} - -size_t RedPacket::length() const { - return length_; -} - -UlpfecGenerator::UlpfecGenerator() - : UlpfecGenerator(ForwardErrorCorrection::CreateUlpfec(kUnknownSsrc)) {} - -UlpfecGenerator::UlpfecGenerator(std::unique_ptr fec) - : fec_(std::move(fec)), - last_media_packet_rtp_header_length_(0), +UlpfecGenerator::Params::Params() = default; +UlpfecGenerator::Params::Params(FecProtectionParams delta_params, + FecProtectionParams keyframe_params) + : delta_params(delta_params), keyframe_params(keyframe_params) {} + +UlpfecGenerator::UlpfecGenerator(int red_payload_type, + int ulpfec_payload_type, + Clock* clock) + : red_payload_type_(red_payload_type), + ulpfec_payload_type_(ulpfec_payload_type), + clock_(clock), + fec_(ForwardErrorCorrection::CreateUlpfec(kUnknownSsrc)), num_protected_frames_(0), - min_num_media_packets_(1) { - memset(¶ms_, 0, sizeof(params_)); - memset(&new_params_, 0, sizeof(new_params_)); -} + min_num_media_packets_(1), + keyframe_in_process_(false), + fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) {} + +// Used by FlexFecSender, payload types are unused. +UlpfecGenerator::UlpfecGenerator(std::unique_ptr fec, + Clock* clock) + : red_payload_type_(0), + ulpfec_payload_type_(0), + clock_(clock), + fec_(std::move(fec)), + num_protected_frames_(0), + min_num_media_packets_(1), + keyframe_in_process_(false), + fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) {} UlpfecGenerator::~UlpfecGenerator() = default; -void UlpfecGenerator::SetFecParameters(const FecProtectionParams& params) { - RTC_DCHECK_GE(params.fec_rate, 0); - RTC_DCHECK_LE(params.fec_rate, 255); +void UlpfecGenerator::SetProtectionParameters( + const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) { + RTC_DCHECK_GE(delta_params.fec_rate, 0); + RTC_DCHECK_LE(delta_params.fec_rate, 255); + RTC_DCHECK_GE(key_params.fec_rate, 0); + RTC_DCHECK_LE(key_params.fec_rate, 255); // Store the new params and apply them for the next set of FEC packets being // produced. - new_params_ = params; - if (params.fec_rate > kHighProtectionThreshold) { - min_num_media_packets_ = kMinMediaPackets; - } else { - min_num_media_packets_ = 1; - } + MutexLock lock(&mutex_); + pending_params_.emplace(delta_params, key_params); } -int UlpfecGenerator::AddRtpPacketAndGenerateFec( - const rtc::CopyOnWriteBuffer& data_buffer, - size_t rtp_header_length) { +void UlpfecGenerator::AddPacketAndGenerateFec(const RtpPacketToSend& packet) { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); RTC_DCHECK(generated_fec_packets_.empty()); + if (media_packets_.empty()) { - params_ = new_params_; + MutexLock lock(&mutex_); + if (pending_params_) { + current_params_ = *pending_params_; + pending_params_.reset(); + + if (CurrentParams().fec_rate > kHighProtectionThreshold) { + min_num_media_packets_ = kMinMediaPackets; + } else { + min_num_media_packets_ = 1; + } + } + + keyframe_in_process_ = packet.is_key_frame(); } + RTC_DCHECK_EQ(packet.is_key_frame(), keyframe_in_process_); + bool complete_frame = false; - const bool marker_bit = (data_buffer[1] & kRtpMarkerBitMask) ? true : false; + const bool marker_bit = packet.Marker(); if (media_packets_.size() < kUlpfecMaxMediaPackets) { // Our packet masks can only protect up to |kUlpfecMaxMediaPackets| packets. - std::unique_ptr packet( - new ForwardErrorCorrection::Packet()); - RTC_DCHECK_GE(data_buffer.size(), rtp_header_length); - packet->data = data_buffer; - media_packets_.push_back(std::move(packet)); - // Keep track of the RTP header length, so we can copy the RTP header - // from |packet| to newly generated ULPFEC+RED packets. - RTC_DCHECK_GE(rtp_header_length, kRtpHeaderSize); - last_media_packet_rtp_header_length_ = rtp_header_length; + auto fec_packet = std::make_unique(); + fec_packet->data = packet.Buffer(); + media_packets_.push_back(std::move(fec_packet)); + + // Keep a copy of the last RTP packet, so we can copy the RTP header + // from it when creating newly generated ULPFEC+RED packets. + RTC_DCHECK_GE(packet.headers_size(), kRtpHeaderSize); + last_media_packet_ = packet; } + if (marker_bit) { ++num_protected_frames_; complete_frame = true; } + + auto params = CurrentParams(); + // Produce FEC over at most |params_.max_fec_frames| frames, or as soon as: // (1) the excess overhead (actual overhead - requested/target overhead) is // less than |kMaxExcessOverhead|, and // (2) at least |min_num_media_packets_| media packets is reached. if (complete_frame && - (num_protected_frames_ == params_.max_fec_frames || + (num_protected_frames_ == params.max_fec_frames || (ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) { // We are not using Unequal Protection feature of the parity erasure code. constexpr int kNumImportantPackets = 0; constexpr bool kUseUnequalProtection = false; - int ret = fec_->EncodeFec(media_packets_, params_.fec_rate, - kNumImportantPackets, kUseUnequalProtection, - params_.fec_mask_type, &generated_fec_packets_); + fec_->EncodeFec(media_packets_, params.fec_rate, kNumImportantPackets, + kUseUnequalProtection, params.fec_mask_type, + &generated_fec_packets_); if (generated_fec_packets_.empty()) { ResetState(); } - return ret; } - return 0; } bool UlpfecGenerator::ExcessOverheadBelowMax() const { - return ((Overhead() - params_.fec_rate) < kMaxExcessOverhead); + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + + return ((Overhead() - CurrentParams().fec_rate) < kMaxExcessOverhead); } bool UlpfecGenerator::MinimumMediaPacketsReached() const { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); float average_num_packets_per_frame = static_cast(media_packets_.size()) / num_protected_frames_; int num_media_packets = static_cast(media_packets_.size()); @@ -196,61 +188,81 @@ bool UlpfecGenerator::MinimumMediaPacketsReached() const { } } -bool UlpfecGenerator::FecAvailable() const { - return !generated_fec_packets_.empty(); -} - -size_t UlpfecGenerator::NumAvailableFecPackets() const { - return generated_fec_packets_.size(); +const FecProtectionParams& UlpfecGenerator::CurrentParams() const { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + return keyframe_in_process_ ? current_params_.keyframe_params + : current_params_.delta_params; } size_t UlpfecGenerator::MaxPacketOverhead() const { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); return fec_->MaxPacketOverhead(); } -std::vector> UlpfecGenerator::GetUlpfecPacketsAsRed( - int red_payload_type, - int ulpfec_payload_type, - uint16_t first_seq_num) { - std::vector> red_packets; - red_packets.reserve(generated_fec_packets_.size()); - RTC_DCHECK(!media_packets_.empty()); - ForwardErrorCorrection::Packet* last_media_packet = - media_packets_.back().get(); - uint16_t seq_num = first_seq_num; +std::vector> UlpfecGenerator::GetFecPackets() { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); + if (generated_fec_packets_.empty()) { + return std::vector>(); + } + + // Wrap FEC packet (including FEC headers) in a RED packet. Since the + // FEC packets in |generated_fec_packets_| don't have RTP headers, we + // reuse the header from the last media packet. + RTC_CHECK(last_media_packet_.has_value()); + last_media_packet_->SetPayloadSize(0); + + std::vector> fec_packets; + fec_packets.reserve(generated_fec_packets_.size()); + + size_t total_fec_size_bytes = 0; for (const auto* fec_packet : generated_fec_packets_) { - // Wrap FEC packet (including FEC headers) in a RED packet. Since the - // FEC packets in |generated_fec_packets_| don't have RTP headers, we - // reuse the header from the last media packet. - RTC_DCHECK_GT(last_media_packet_rtp_header_length_, 0); - std::unique_ptr red_packet( - new RedPacket(last_media_packet_rtp_header_length_ + - kRedForFecHeaderLength + fec_packet->data.size())); - red_packet->CreateHeader(last_media_packet->data.data(), - last_media_packet_rtp_header_length_, - red_payload_type, ulpfec_payload_type); - red_packet->SetSeqNum(seq_num++); - red_packet->ClearMarkerBit(); - red_packet->AssignPayload(fec_packet->data.data(), fec_packet->data.size()); - red_packets.push_back(std::move(red_packet)); + std::unique_ptr red_packet = + std::make_unique(*last_media_packet_); + red_packet->SetPayloadType(red_payload_type_); + red_packet->SetMarker(false); + uint8_t* payload_buffer = red_packet->SetPayloadSize( + kRedForFecHeaderLength + fec_packet->data.size()); + // Primary RED header with F bit unset. + // See https://tools.ietf.org/html/rfc2198#section-3 + payload_buffer[0] = ulpfec_payload_type_; // RED header. + memcpy(&payload_buffer[1], fec_packet->data.data(), + fec_packet->data.size()); + total_fec_size_bytes += red_packet->size(); + red_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); + red_packet->set_allow_retransmission(false); + red_packet->set_is_red(true); + red_packet->set_fec_protect_packet(false); + fec_packets.push_back(std::move(red_packet)); } ResetState(); - return red_packets; + MutexLock lock(&mutex_); + fec_bitrate_.Update(total_fec_size_bytes, clock_->TimeInMilliseconds()); + + return fec_packets; +} + +DataRate UlpfecGenerator::CurrentFecRate() const { + MutexLock lock(&mutex_); + return DataRate::BitsPerSec( + fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0)); } int UlpfecGenerator::Overhead() const { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); RTC_DCHECK(!media_packets_.empty()); int num_fec_packets = - fec_->NumFecPackets(media_packets_.size(), params_.fec_rate); + fec_->NumFecPackets(media_packets_.size(), CurrentParams().fec_rate); + // Return the overhead in Q8. return (num_fec_packets << 8) / media_packets_.size(); } void UlpfecGenerator::ResetState() { + RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); media_packets_.clear(); - last_media_packet_rtp_header_length_ = 0; + last_media_packet_.reset(); generated_fec_packets_.clear(); num_protected_frames_ = 0; } diff --git a/modules/rtp_rtcp/source/ulpfec_generator.h b/modules/rtp_rtcp/source/ulpfec_generator.h index cdfa1ff67d..32ddc6c4b9 100644 --- a/modules/rtp_rtcp/source/ulpfec_generator.h +++ b/modules/rtp_rtcp/source/ulpfec_generator.h @@ -20,63 +20,56 @@ #include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { class FlexfecSender; -class RedPacket { - public: - explicit RedPacket(size_t length); - ~RedPacket(); - - void CreateHeader(const uint8_t* rtp_header, - size_t header_length, - int red_payload_type, - int payload_type); - void SetSeqNum(int seq_num); - void AssignPayload(const uint8_t* payload, size_t length); - void ClearMarkerBit(); - uint8_t* data() const; - size_t length() const; - - private: - std::unique_ptr data_; - size_t length_; - size_t header_length_; -}; - -class UlpfecGenerator { +class UlpfecGenerator : public VideoFecGenerator { friend class FlexfecSender; public: - UlpfecGenerator(); + UlpfecGenerator(int red_payload_type, int ulpfec_payload_type, Clock* clock); ~UlpfecGenerator(); - void SetFecParameters(const FecProtectionParams& params); + FecType GetFecType() const override { + return VideoFecGenerator::FecType::kUlpFec; + } + absl::optional FecSsrc() override { return absl::nullopt; } + + void SetProtectionParameters(const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) override; // Adds a media packet to the internal buffer. When enough media packets // have been added, the FEC packets are generated and stored internally. // These FEC packets are then obtained by calling GetFecPacketsAsRed(). - int AddRtpPacketAndGenerateFec(const rtc::CopyOnWriteBuffer& data_buffer, - size_t rtp_header_length); + void AddPacketAndGenerateFec(const RtpPacketToSend& packet) override; - // Returns true if there are generated FEC packets available. - bool FecAvailable() const; + // Returns the overhead, per packet, for FEC (and possibly RED). + size_t MaxPacketOverhead() const override; - size_t NumAvailableFecPackets() const; + std::vector> GetFecPackets() override; - // Returns the overhead, per packet, for FEC (and possibly RED). - size_t MaxPacketOverhead() const; + // Current rate of FEC packets generated, including all RTP-level headers. + DataRate CurrentFecRate() const override; - // Returns generated FEC packets with RED headers added. - std::vector> GetUlpfecPacketsAsRed( - int red_payload_type, - int ulpfec_payload_type, - uint16_t first_seq_num); + absl::optional GetRtpState() override { return absl::nullopt; } private: - explicit UlpfecGenerator(std::unique_ptr fec); + struct Params { + Params(); + Params(FecProtectionParams delta_params, + FecProtectionParams keyframe_params); + + FecProtectionParams delta_params; + FecProtectionParams keyframe_params; + }; + + UlpfecGenerator(std::unique_ptr fec, Clock* clock); // Overhead is defined as relative to the number of media packets, and not // relative to total number of packets. This definition is inherited from the @@ -97,16 +90,31 @@ class UlpfecGenerator { // (e.g. (2k,2m) vs (k,m)) are generally more effective at recovering losses. bool MinimumMediaPacketsReached() const; + const FecProtectionParams& CurrentParams() const; + void ResetState(); - std::unique_ptr fec_; - ForwardErrorCorrection::PacketList media_packets_; - size_t last_media_packet_rtp_header_length_; - std::list generated_fec_packets_; - int num_protected_frames_; - int min_num_media_packets_; - FecProtectionParams params_; - FecProtectionParams new_params_; + const int red_payload_type_; + const int ulpfec_payload_type_; + Clock* const clock_; + + rtc::RaceChecker race_checker_; + const std::unique_ptr fec_ + RTC_GUARDED_BY(race_checker_); + ForwardErrorCorrection::PacketList media_packets_ + RTC_GUARDED_BY(race_checker_); + absl::optional last_media_packet_ + RTC_GUARDED_BY(race_checker_); + std::list generated_fec_packets_ + RTC_GUARDED_BY(race_checker_); + int num_protected_frames_ RTC_GUARDED_BY(race_checker_); + int min_num_media_packets_ RTC_GUARDED_BY(race_checker_); + Params current_params_ RTC_GUARDED_BY(race_checker_); + bool keyframe_in_process_ RTC_GUARDED_BY(race_checker_); + + mutable Mutex mutex_; + absl::optional pending_params_ RTC_GUARDED_BY(mutex_); + RateStatistics fec_bitrate_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc b/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc index 8c1c7ea396..db005ddb49 100644 --- a/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc +++ b/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc @@ -35,11 +35,8 @@ void VerifyHeader(uint16_t seq_num, uint32_t timestamp, int red_payload_type, int fec_payload_type, - RedPacket* packet, - bool marker_bit) { - EXPECT_GT(packet->length(), kRtpHeaderSize); - EXPECT_TRUE(packet->data() != NULL); - uint8_t* data = packet->data(); + bool marker_bit, + const rtc::CopyOnWriteBuffer& data) { // Marker bit not set. EXPECT_EQ(marker_bit ? 0x80 : 0, data[1] & 0x80); EXPECT_EQ(red_payload_type, data[1] & 0x7F); @@ -52,8 +49,12 @@ void VerifyHeader(uint16_t seq_num, class UlpfecGeneratorTest : public ::testing::Test { protected: - UlpfecGeneratorTest() : packet_generator_(kMediaSsrc) {} + UlpfecGeneratorTest() + : fake_clock_(1), + ulpfec_generator_(kRedPayloadType, kFecPayloadType, &fake_clock_), + packet_generator_(kMediaSsrc) {} + SimulatedClock fake_clock_; UlpfecGenerator ulpfec_generator_; AugmentedPacketGenerator packet_generator_; }; @@ -81,24 +82,22 @@ TEST_F(UlpfecGeneratorTest, NoEmptyFecWithSeqNumGaps) { protected_packets.push_back({21, 0, 55, 0}); protected_packets.push_back({13, 3, 57, 1}); FecProtectionParams params = {117, 3, kFecMaskBursty}; - ulpfec_generator_.SetFecParameters(params); - uint8_t packet[28] = {0}; + ulpfec_generator_.SetProtectionParameters(params, params); for (Packet p : protected_packets) { - if (p.marker_bit) { - packet[1] |= 0x80; + RtpPacketToSend packet(nullptr); + packet.SetMarker(p.marker_bit); + packet.AllocateExtension(RTPExtensionType::kRtpExtensionMid, + p.header_size - packet.headers_size()); + packet.SetSequenceNumber(p.seq_num); + packet.AllocatePayload(p.payload_size); + ulpfec_generator_.AddPacketAndGenerateFec(packet); + + std::vector> fec_packets = + ulpfec_generator_.GetFecPackets(); + if (!p.marker_bit) { + EXPECT_TRUE(fec_packets.empty()); } else { - packet[1] &= ~0x80; - } - ByteWriter::WriteBigEndian(&packet[2], p.seq_num); - ulpfec_generator_.AddRtpPacketAndGenerateFec( - rtc::CopyOnWriteBuffer(packet, p.payload_size + p.header_size), - p.header_size); - size_t num_fec_packets = ulpfec_generator_.NumAvailableFecPackets(); - if (num_fec_packets > 0) { - std::vector> fec_packets = - ulpfec_generator_.GetUlpfecPacketsAsRed(kRedPayloadType, - kFecPayloadType, 100); - EXPECT_EQ(num_fec_packets, fec_packets.size()); + EXPECT_FALSE(fec_packets.empty()); } } } @@ -113,24 +112,28 @@ TEST_F(UlpfecGeneratorTest, OneFrameFec) { constexpr size_t kNumPackets = 4; FecProtectionParams params = {15, 3, kFecMaskRandom}; packet_generator_.NewFrame(kNumPackets); - ulpfec_generator_.SetFecParameters(params); // Expecting one FEC packet. + // Expecting one FEC packet. + ulpfec_generator_.SetProtectionParameters(params, params); uint32_t last_timestamp = 0; for (size_t i = 0; i < kNumPackets; ++i) { std::unique_ptr packet = packet_generator_.NextPacket(i, 10); - EXPECT_EQ(0, ulpfec_generator_.AddRtpPacketAndGenerateFec(packet->data, - kRtpHeaderSize)); + RtpPacketToSend rtp_packet(nullptr); + EXPECT_TRUE(rtp_packet.Parse(packet->data.data(), packet->data.size())); + ulpfec_generator_.AddPacketAndGenerateFec(rtp_packet); last_timestamp = packet->header.timestamp; } - EXPECT_TRUE(ulpfec_generator_.FecAvailable()); - const uint16_t seq_num = packet_generator_.NextPacketSeqNum(); - std::vector> red_packets = - ulpfec_generator_.GetUlpfecPacketsAsRed(kRedPayloadType, kFecPayloadType, - seq_num); - EXPECT_FALSE(ulpfec_generator_.FecAvailable()); - ASSERT_EQ(1u, red_packets.size()); - VerifyHeader(seq_num, last_timestamp, kRedPayloadType, kFecPayloadType, - red_packets.front().get(), false); + std::vector> fec_packets = + ulpfec_generator_.GetFecPackets(); + EXPECT_EQ(fec_packets.size(), 1u); + uint16_t seq_num = packet_generator_.NextPacketSeqNum(); + fec_packets[0]->SetSequenceNumber(seq_num); + EXPECT_TRUE(ulpfec_generator_.GetFecPackets().empty()); + + EXPECT_EQ(fec_packets[0]->headers_size(), kRtpHeaderSize); + + VerifyHeader(seq_num, last_timestamp, kRedPayloadType, kFecPayloadType, false, + fec_packets[0]->Buffer()); } TEST_F(UlpfecGeneratorTest, TwoFrameFec) { @@ -145,27 +148,27 @@ TEST_F(UlpfecGeneratorTest, TwoFrameFec) { constexpr size_t kNumFrames = 2; FecProtectionParams params = {15, 3, kFecMaskRandom}; - ulpfec_generator_.SetFecParameters(params); // Expecting one FEC packet. + // Expecting one FEC packet. + ulpfec_generator_.SetProtectionParameters(params, params); uint32_t last_timestamp = 0; for (size_t i = 0; i < kNumFrames; ++i) { packet_generator_.NewFrame(kNumPackets); for (size_t j = 0; j < kNumPackets; ++j) { std::unique_ptr packet = packet_generator_.NextPacket(i * kNumPackets + j, 10); - EXPECT_EQ(0, ulpfec_generator_.AddRtpPacketAndGenerateFec( - packet->data, kRtpHeaderSize)); + RtpPacketToSend rtp_packet(nullptr); + EXPECT_TRUE(rtp_packet.Parse(packet->data.data(), packet->data.size())); + ulpfec_generator_.AddPacketAndGenerateFec(rtp_packet); last_timestamp = packet->header.timestamp; } } - EXPECT_TRUE(ulpfec_generator_.FecAvailable()); + std::vector> fec_packets = + ulpfec_generator_.GetFecPackets(); + EXPECT_EQ(fec_packets.size(), 1u); const uint16_t seq_num = packet_generator_.NextPacketSeqNum(); - std::vector> red_packets = - ulpfec_generator_.GetUlpfecPacketsAsRed(kRedPayloadType, kFecPayloadType, - seq_num); - EXPECT_FALSE(ulpfec_generator_.FecAvailable()); - ASSERT_EQ(1u, red_packets.size()); - VerifyHeader(seq_num, last_timestamp, kRedPayloadType, kFecPayloadType, - red_packets.front().get(), false); + fec_packets[0]->SetSequenceNumber(seq_num); + VerifyHeader(seq_num, last_timestamp, kRedPayloadType, kFecPayloadType, false, + fec_packets[0]->Buffer()); } TEST_F(UlpfecGeneratorTest, MixedMediaRtpHeaderLengths) { @@ -174,34 +177,43 @@ TEST_F(UlpfecGeneratorTest, MixedMediaRtpHeaderLengths) { // Only one frame required to generate FEC. FecProtectionParams params = {127, 1, kFecMaskRandom}; - ulpfec_generator_.SetFecParameters(params); + ulpfec_generator_.SetProtectionParameters(params, params); // Fill up internal buffer with media packets with short RTP header length. packet_generator_.NewFrame(kUlpfecMaxMediaPackets + 1); for (size_t i = 0; i < kUlpfecMaxMediaPackets; ++i) { std::unique_ptr packet = packet_generator_.NextPacket(i, 10); - EXPECT_EQ(0, ulpfec_generator_.AddRtpPacketAndGenerateFec( - packet->data, kShortRtpHeaderLength)); - EXPECT_FALSE(ulpfec_generator_.FecAvailable()); + RtpPacketToSend rtp_packet(nullptr); + EXPECT_TRUE(rtp_packet.Parse(packet->data.data(), packet->data.size())); + EXPECT_EQ(rtp_packet.headers_size(), kShortRtpHeaderLength); + ulpfec_generator_.AddPacketAndGenerateFec(rtp_packet); + EXPECT_TRUE(ulpfec_generator_.GetFecPackets().empty()); } // Kick off FEC generation with media packet with long RTP header length. // Since the internal buffer is full, this packet will not be protected. std::unique_ptr packet = packet_generator_.NextPacket(kUlpfecMaxMediaPackets, 10); - EXPECT_EQ(0, ulpfec_generator_.AddRtpPacketAndGenerateFec( - packet->data, kLongRtpHeaderLength)); - EXPECT_TRUE(ulpfec_generator_.FecAvailable()); + RtpPacketToSend rtp_packet(nullptr); + EXPECT_TRUE(rtp_packet.Parse(packet->data.data(), packet->data.size())); + EXPECT_TRUE(rtp_packet.SetPayloadSize(0) != nullptr); + const uint32_t csrcs[]{1}; + rtp_packet.SetCsrcs(csrcs); + + EXPECT_EQ(rtp_packet.headers_size(), kLongRtpHeaderLength); + + ulpfec_generator_.AddPacketAndGenerateFec(rtp_packet); + std::vector> fec_packets = + ulpfec_generator_.GetFecPackets(); + EXPECT_FALSE(fec_packets.empty()); // Ensure that the RED header is placed correctly, i.e. the correct // RTP header length was used in the RED packet creation. - const uint16_t seq_num = packet_generator_.NextPacketSeqNum(); - std::vector> red_packets = - ulpfec_generator_.GetUlpfecPacketsAsRed(kRedPayloadType, kFecPayloadType, - seq_num); - for (const auto& red_packet : red_packets) { - EXPECT_EQ(kFecPayloadType, red_packet->data()[kShortRtpHeaderLength]); + uint16_t seq_num = packet_generator_.NextPacketSeqNum(); + for (const auto& fec_packet : fec_packets) { + fec_packet->SetSequenceNumber(seq_num++); + EXPECT_EQ(kFecPayloadType, fec_packet->data()[kShortRtpHeaderLength]); } } diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc b/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc index ea85422ffe..26993cabb6 100644 --- a/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc +++ b/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc @@ -42,7 +42,7 @@ UlpfecReceiverImpl::~UlpfecReceiverImpl() { } FecPacketCounter UlpfecReceiverImpl::GetPacketCounter() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return packet_counter_; } @@ -74,8 +74,9 @@ FecPacketCounter UlpfecReceiverImpl::GetPacketCounter() const { // block length: 10 bits Length in bytes of the corresponding data // block excluding header. -bool UlpfecReceiverImpl::AddReceivedRedPacket(const RtpPacket& rtp_packet, - uint8_t ulpfec_payload_type) { +bool UlpfecReceiverImpl::AddReceivedRedPacket( + const RtpPacketReceived& rtp_packet, + uint8_t ulpfec_payload_type) { if (rtp_packet.Ssrc() != ssrc_) { RTC_LOG(LS_WARNING) << "Received RED packet with different SSRC than expected; dropping."; @@ -86,7 +87,7 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket(const RtpPacket& rtp_packet, "packet size; dropping."; return false; } - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); static constexpr uint8_t kRedHeaderLength = 1; @@ -103,6 +104,7 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket(const RtpPacket& rtp_packet, // Get payload type from RED header and sequence number from RTP header. uint8_t payload_type = rtp_packet.payload()[0] & 0x7f; received_packet->is_fec = payload_type == ulpfec_payload_type; + received_packet->is_recovered = rtp_packet.recovered(); received_packet->ssrc = rtp_packet.Ssrc(); received_packet->seq_num = rtp_packet.SequenceNumber(); @@ -148,7 +150,7 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket(const RtpPacket& rtp_packet, // TODO(nisse): Drop always-zero return value. int32_t UlpfecReceiverImpl::ProcessReceivedFec() { - crit_sect_.Enter(); + mutex_.Lock(); // If we iterate over |received_packets_| and it contains a packet that cause // us to recurse back to this function (for example a RED packet encapsulating @@ -165,10 +167,10 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() { // Send received media packet to VCM. if (!received_packet->is_fec) { ForwardErrorCorrection::Packet* packet = received_packet->pkt; - crit_sect_.Leave(); + mutex_.Unlock(); recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), packet->data.size()); - crit_sect_.Enter(); + mutex_.Lock(); // Create a packet with the buffer to modify it. RtpPacketReceived rtp_packet; const uint8_t* const original_data = packet->data.cdata(); @@ -185,7 +187,13 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() { RTC_DCHECK_EQ(packet->data.cdata(), original_data); } } - fec_->DecodeFec(*received_packet, &recovered_packets_); + if (!received_packet->is_recovered) { + // Do not pass recovered packets to FEC. Recovered packet might have + // different set of the RTP header extensions and thus different byte + // representation than the original packet, That will corrupt + // FEC calculation. + fec_->DecodeFec(*received_packet, &recovered_packets_); + } } // Send any recovered media packets to VCM. @@ -199,13 +207,13 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() { // Set this flag first; in case the recovered packet carries a RED // header, OnRecoveredPacket will recurse back here. recovered_packet->returned = true; - crit_sect_.Leave(); + mutex_.Unlock(); recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), packet->data.size()); - crit_sect_.Enter(); + mutex_.Lock(); } - crit_sect_.Leave(); + mutex_.Unlock(); return 0; } diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_impl.h b/modules/rtp_rtcp/source/ulpfec_receiver_impl.h index 7223696650..2bed042747 100644 --- a/modules/rtp_rtcp/source/ulpfec_receiver_impl.h +++ b/modules/rtp_rtcp/source/ulpfec_receiver_impl.h @@ -21,8 +21,8 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/include/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -33,7 +33,7 @@ class UlpfecReceiverImpl : public UlpfecReceiver { rtc::ArrayView extensions); ~UlpfecReceiverImpl() override; - bool AddReceivedRedPacket(const RtpPacket& rtp_packet, + bool AddReceivedRedPacket(const RtpPacketReceived& rtp_packet, uint8_t ulpfec_payload_type) override; int32_t ProcessReceivedFec() override; @@ -44,7 +44,7 @@ class UlpfecReceiverImpl : public UlpfecReceiver { const uint32_t ssrc_; const RtpHeaderExtensionMap extensions_; - rtc::CriticalSection crit_sect_; + mutable Mutex mutex_; RecoveredPacketReceiver* recovered_packet_callback_; std::unique_ptr fec_; // TODO(nisse): The AddReceivedRedPacket method adds one or two packets to diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc b/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc index 0ef8085b63..4d6aa3d2c9 100644 --- a/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc +++ b/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc @@ -20,6 +20,7 @@ #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/fec_test_helper.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "test/gmock.h" #include "test/gtest.h" @@ -66,7 +67,8 @@ class UlpfecReceiverTest : public ::testing::Test { // Build a media packet using |packet_generator_| and add it // to the receiver. - void BuildAndAddRedMediaPacket(AugmentedPacket* packet); + void BuildAndAddRedMediaPacket(AugmentedPacket* packet, + bool is_recovered = false); // Build a FEC packet using |packet_generator_| and add it // to the receiver. @@ -120,13 +122,16 @@ void UlpfecReceiverTest::PacketizeFrame( } } -void UlpfecReceiverTest::BuildAndAddRedMediaPacket(AugmentedPacket* packet) { - RtpPacket red_packet = packet_generator_.BuildMediaRedPacket(*packet); +void UlpfecReceiverTest::BuildAndAddRedMediaPacket(AugmentedPacket* packet, + bool is_recovered) { + RtpPacketReceived red_packet = + packet_generator_.BuildMediaRedPacket(*packet, is_recovered); EXPECT_TRUE(receiver_fec_->AddReceivedRedPacket(red_packet, kFecPayloadType)); } void UlpfecReceiverTest::BuildAndAddRedFecPacket(Packet* packet) { - RtpPacket red_packet = packet_generator_.BuildUlpfecRedPacket(*packet); + RtpPacketReceived red_packet = + packet_generator_.BuildUlpfecRedPacket(*packet); EXPECT_TRUE(receiver_fec_->AddReceivedRedPacket(red_packet, kFecPayloadType)); } @@ -174,7 +179,7 @@ void UlpfecReceiverTest::SurvivesMaliciousPacket(const uint8_t* data, std::unique_ptr receiver_fec( UlpfecReceiver::Create(kMediaSsrc, &null_callback, {})); - RtpPacket rtp_packet; + RtpPacketReceived rtp_packet; ASSERT_TRUE(rtp_packet.Parse(data, length)); receiver_fec->AddReceivedRedPacket(rtp_packet, ulpfec_payload_type); } @@ -217,6 +222,43 @@ TEST_F(UlpfecReceiverTest, TwoMediaOneFec) { EXPECT_EQ(first_packet_time_ms, counter.first_packet_time_ms); } +TEST_F(UlpfecReceiverTest, TwoMediaOneFecNotUsesRecoveredPackets) { + constexpr size_t kNumFecPackets = 1u; + std::list augmented_media_packets; + ForwardErrorCorrection::PacketList media_packets; + PacketizeFrame(2, 0, &augmented_media_packets, &media_packets); + std::list fec_packets; + EncodeFec(media_packets, kNumFecPackets, &fec_packets); + + FecPacketCounter counter = receiver_fec_->GetPacketCounter(); + EXPECT_EQ(0u, counter.num_packets); + EXPECT_EQ(-1, counter.first_packet_time_ms); + + // Recovery + auto it = augmented_media_packets.begin(); + BuildAndAddRedMediaPacket(*it, /*is_recovered=*/true); + VerifyReconstructedMediaPacket(**it, 1); + EXPECT_EQ(0, receiver_fec_->ProcessReceivedFec()); + counter = receiver_fec_->GetPacketCounter(); + EXPECT_EQ(1u, counter.num_packets); + EXPECT_EQ(0u, counter.num_fec_packets); + EXPECT_EQ(0u, counter.num_recovered_packets); + const int64_t first_packet_time_ms = counter.first_packet_time_ms; + EXPECT_NE(-1, first_packet_time_ms); + + // Drop one media packet. + auto fec_it = fec_packets.begin(); + BuildAndAddRedFecPacket(*fec_it); + ++it; + EXPECT_EQ(0, receiver_fec_->ProcessReceivedFec()); + + counter = receiver_fec_->GetPacketCounter(); + EXPECT_EQ(2u, counter.num_packets); + EXPECT_EQ(1u, counter.num_fec_packets); + EXPECT_EQ(0u, counter.num_recovered_packets); + EXPECT_EQ(first_packet_time_ms, counter.first_packet_time_ms); +} + TEST_F(UlpfecReceiverTest, InjectGarbageFecHeaderLengthRecovery) { // Byte offset 8 is the 'length recovery' field of the FEC header. InjectGarbagePacketLength(8); diff --git a/modules/rtp_rtcp/source/video_fec_generator.h b/modules/rtp_rtcp/source/video_fec_generator.h new file mode 100644 index 0000000000..38e4103cb6 --- /dev/null +++ b/modules/rtp_rtcp/source/video_fec_generator.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_FEC_GENERATOR_H_ +#define MODULES_RTP_RTCP_SOURCE_VIDEO_FEC_GENERATOR_H_ + +#include +#include + +#include "api/units/data_rate.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" + +namespace webrtc { + +class VideoFecGenerator { + public: + VideoFecGenerator() = default; + virtual ~VideoFecGenerator() = default; + + enum class FecType { kFlexFec, kUlpFec }; + virtual FecType GetFecType() const = 0; + // Returns the SSRC used for FEC packets (i.e. FlexFec SSRC). + virtual absl::optional FecSsrc() = 0; + // Returns the overhead, in bytes per packet, for FEC (and possibly RED). + virtual size_t MaxPacketOverhead() const = 0; + // Current rate of FEC packets generated, including all RTP-level headers. + virtual DataRate CurrentFecRate() const = 0; + // Set FEC rates, max frames before FEC is sent, and type of FEC masks. + virtual void SetProtectionParameters( + const FecProtectionParams& delta_params, + const FecProtectionParams& key_params) = 0; + // Called on new media packet to be protected. The generator may choose + // to generate FEC packets at this time, if so they will be stored in an + // internal buffer. + virtual void AddPacketAndGenerateFec(const RtpPacketToSend& packet) = 0; + // Get (and remove) and FEC packets pending in the generator. These packets + // will lack sequence numbers, that needs to be set externally. + // TODO(bugs.webrtc.org/11340): Actually FlexFec sets seq#, fix that! + virtual std::vector> GetFecPackets() = 0; + // Only called on the VideoSendStream queue, after operation has shut down, + // and only populated if there is an RtpState (e.g. FlexFec). + virtual absl::optional GetRtpState() = 0; +}; + +} // namespace webrtc +#endif // MODULES_RTP_RTCP_SOURCE_VIDEO_FEC_GENERATOR_H_ diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer.cc new file mode 100644 index 0000000000..bb0bf09e90 --- /dev/null +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer.cc @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" + +#include +#include + +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +rtc::scoped_refptr VideoRtpDepacketizer::AssembleFrame( + rtc::ArrayView> rtp_payloads) { + size_t frame_size = 0; + for (rtc::ArrayView payload : rtp_payloads) { + frame_size += payload.size(); + } + + rtc::scoped_refptr bitstream = + EncodedImageBuffer::Create(frame_size); + + uint8_t* write_at = bitstream->data(); + for (rtc::ArrayView payload : rtp_payloads) { + memcpy(write_at, payload.data(), payload.size()); + write_at += payload.size(); + } + RTC_DCHECK_EQ(write_at - bitstream->data(), bitstream->size()); + return bitstream; +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer.h b/modules/rtp_rtcp/source/video_rtp_depacketizer.h index 0420e4e646..2266120799 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer.h @@ -11,7 +11,12 @@ #ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H_ #define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H_ +#include + #include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/copy_on_write_buffer.h" @@ -27,6 +32,8 @@ class VideoRtpDepacketizer { virtual ~VideoRtpDepacketizer() = default; virtual absl::optional Parse( rtc::CopyOnWriteBuffer rtp_payload) = 0; + virtual rtc::scoped_refptr AssembleFrame( + rtc::ArrayView> rtp_payloads); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h index 9758d846e6..ac8c7e6d11 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h @@ -30,8 +30,9 @@ class VideoRtpDepacketizerAv1 : public VideoRtpDepacketizer { VideoRtpDepacketizerAv1& operator=(const VideoRtpDepacketizerAv1&) = delete; ~VideoRtpDepacketizerAv1() override = default; - static rtc::scoped_refptr AssembleFrame( - rtc::ArrayView> rtp_payloads); + rtc::scoped_refptr AssembleFrame( + rtc::ArrayView> rtp_payloads) + override; absl::optional Parse( rtc::CopyOnWriteBuffer rtp_payload) override; diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc index 9a3e1f734b..e9ad1a1b8e 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc @@ -118,7 +118,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameSetsOBUPayloadSizeWhenAbsent) { 0b0'0110'000, // / Frame 20, 30, 40}; // \ OBU rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); rtc::ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); @@ -133,7 +133,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameSetsOBUPayloadSizeWhenPresent) { 30, 40}; // \ obu_payload rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); rtc::ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); @@ -147,7 +147,7 @@ TEST(VideoRtpDepacketizerAv1Test, 0b010'01'000, // | extension_header 20, 30, 40}; // \ OBU rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); rtc::ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); @@ -164,7 +164,7 @@ TEST(VideoRtpDepacketizerAv1Test, 30, 40}; // \ obu_payload rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); rtc::ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); @@ -176,7 +176,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromOnePacketWithOneObu) { 0b0'0110'000, // / Frame 20}; // \ OBU rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0110'010, 1, 20)); @@ -190,7 +190,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromOnePacketWithTwoObus) { 0b0'0110'000, // / Frame 20}; // \ OBU rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0001'010, 1, 10, // Sequence Header OBU @@ -203,7 +203,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromTwoPacketsWithOneObu) { const uint8_t payload2[] = {0b10'01'0000, // aggregation header 40}; rtc::ArrayView payloads[] = {payload1, payload2}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0110'010, 3, 20, 30, 40)); @@ -220,7 +220,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromTwoPacketsWithTwoObu) { const uint8_t payload2[] = {0b10'01'0000, // aggregation header 40}; // rtc::ArrayView payloads[] = {payload1, payload2}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0001'010, 1, 10, // SH @@ -251,7 +251,7 @@ TEST(VideoRtpDepacketizerAv1Test, 70, 80, 90}; // \ tail of the frame OBU rtc::ArrayView payloads[] = {payload1, payload2}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre( // Sequence header OBU @@ -276,7 +276,7 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameWithOneObuFromManyPackets) { rtc::ArrayView payloads[] = {payload1, payload2, payload3, payload4}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0110'010, 8, 11, 12, 13, 14, 15, 16, 17, 18)); @@ -308,7 +308,7 @@ TEST(VideoRtpDepacketizerAv1Test, 33, 34, 35, 36}; rtc::ArrayView payloads[] = {payload1, payload2, payload3, payload4}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0011'010, 2, 11, 12, // Frame header @@ -327,7 +327,7 @@ TEST(VideoRtpDepacketizerAv1Test, payload1[3] = 0b0'0110'000; // obu_header with size and extension bits unset. payload1[4 + 42] = 0x42; rtc::ArrayView payloads[] = {payload1}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_EQ(frame->size(), 2 + 127u); rtc::ArrayView frame_view(*frame); @@ -352,7 +352,7 @@ TEST(VideoRtpDepacketizerAv1Test, payload2[2 + 20] = 0x20; rtc::ArrayView payloads[] = {payload1, payload2}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_EQ(frame->size(), 3 + 128u); rtc::ArrayView frame_view(*frame); @@ -370,7 +370,7 @@ TEST(VideoRtpDepacketizerAv1Test, const uint8_t payload2[] = {0b10'01'0000, 0b0'0110'000, 10, 20, 30}; rtc::ArrayView payloads[] = {payload1, payload2}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0110'010, 3, 10, 20, 30)); @@ -382,7 +382,7 @@ TEST(VideoRtpDepacketizerAv1Test, const uint8_t payload2[] = {0b10'01'0000}; rtc::ArrayView payloads[] = {payload1, payload2}; - auto frame = VideoRtpDepacketizerAv1::AssembleFrame(payloads); + auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_THAT(rtc::ArrayView(*frame), ElementsAre(0b0'0110'010, 3, 10, 20, 30)); diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc index d7e6147fd6..d335af0244 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc @@ -16,7 +16,6 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "common_video/h264/h264_common.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/copy_on_write_buffer.h" diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc new file mode 100644 index 0000000000..6c2519aa28 --- /dev/null +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc @@ -0,0 +1,354 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" + +#include +#include +#include +#include + +#include "absl/base/macros.h" +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "common_video/h264/h264_common.h" +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_pps_parser.h" +#include "common_video/h265/h265_sps_parser.h" +#include "common_video/h265/h265_vps_parser.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { + +enum NaluType { + kTrailN = 0, + kTrailR = 1, + kTsaN = 2, + kTsaR = 3, + kStsaN = 4, + kStsaR = 5, + kRadlN = 6, + kRadlR = 7, + kBlaWLp = 16, + kBlaWRadl = 17, + kBlaNLp = 18, + kIdrWRadl = 19, + kIdrNLp = 20, + kCra = 21, + kVps = 32, + kHevcSps = 33, + kHevcPps = 34, + kHevcAud = 35, + kPrefixSei = 39, + kSuffixSei = 40, + kHevcAp = 48, + kHevcFu = 49 +}; + +/* + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | PayloadHdr (Type=49) | FU header | DONL (cond) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-| +*/ +// Unlike H.264, HEVC NAL header is 2-bytes. +static const size_t kHevcNalHeaderSize = 2; +// H.265's FU is constructed of 2-byte payload header, and 1-byte FU header +static const size_t kHevcFuHeaderSize = 1; +static const size_t kHevcLengthFieldSize = 2; +static const size_t kHevcApHeaderSize = + kHevcNalHeaderSize + kHevcLengthFieldSize; + +enum HevcNalHdrMasks { + kHevcFBit = 0x80, + kHevcTypeMask = 0x7E, + kHevcLayerIDHMask = 0x1, + kHevcLayerIDLMask = 0xF8, + kHevcTIDMask = 0x7, + kHevcTypeMaskN = 0x81, + kHevcTypeMaskInFuHeader = 0x3F +}; + +// Bit masks for FU headers. +enum HevcFuDefs { kHevcSBit = 0x80, kHevcEBit = 0x40, kHevcFuTypeBit = 0x3F }; + +// TODO(pbos): Avoid parsing this here as well as inside the jitter buffer. +bool ParseApStartOffsets(const uint8_t* nalu_ptr, + size_t length_remaining, + std::vector* offsets) { + size_t offset = 0; + while (length_remaining > 0) { + // Buffer doesn't contain room for additional nalu length. + if (length_remaining < sizeof(uint16_t)) + return false; + uint16_t nalu_size = ByteReader::ReadBigEndian(nalu_ptr); + nalu_ptr += sizeof(uint16_t); + length_remaining -= sizeof(uint16_t); + if (nalu_size > length_remaining) + return false; + nalu_ptr += nalu_size; + length_remaining -= nalu_size; + + offsets->push_back(offset + kHevcApHeaderSize); + offset += kHevcLengthFieldSize + nalu_size; + } + return true; +} + +absl::optional ProcessApOrSingleNalu( + rtc::CopyOnWriteBuffer rtp_payload) { + const uint8_t* const payload_data = rtp_payload.cdata(); + absl::optional parsed_payload( + absl::in_place); + parsed_payload->video_payload = rtp_payload; + parsed_payload->video_header.width = 0; + parsed_payload->video_header.height = 0; + parsed_payload->video_header.codec = kVideoCodecH265; + parsed_payload->video_header.is_first_packet_in_frame = true; + auto& h265_header = parsed_payload->video_header.video_type_header + .emplace(); + + const uint8_t* nalu_start = payload_data + kHevcNalHeaderSize; + const size_t nalu_length = rtp_payload.size() - kHevcNalHeaderSize; + uint8_t nal_type = (payload_data[0] & kHevcTypeMask) >> 1; + std::vector nalu_start_offsets; + if (nal_type == H265::NaluType::kAP) { + // Skip the StapA header (StapA NAL type + length). + if (rtp_payload.size() <= kHevcApHeaderSize) { + RTC_LOG(LS_ERROR) << "AP header truncated."; + return absl::nullopt; + } + + if (!ParseApStartOffsets(nalu_start, nalu_length, &nalu_start_offsets)) { + RTC_LOG(LS_ERROR) << "AP packet with incorrect NALU packet lengths."; + return absl::nullopt; + } + + h265_header.packetization_type = kH265AP; + // nal_type = (payload_data[kHevcApHeaderSize] & kHevcTypeMask) >> 1; + } else { + h265_header.packetization_type = kH265SingleNalu; + nalu_start_offsets.push_back(0); + } + h265_header.nalu_type = nal_type; + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + + nalu_start_offsets.push_back(rtp_payload.size() + kHevcLengthFieldSize); // End offset. + for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) { + size_t start_offset = nalu_start_offsets[i]; + // End offset is actually start offset for next unit, excluding length field + // so remove that from this units length. + size_t end_offset = nalu_start_offsets[i + 1] - kHevcLengthFieldSize; + if (end_offset - start_offset < kHevcNalHeaderSize) { // Same as H.264. + RTC_LOG(LS_ERROR) << "AP packet too short"; + return absl::nullopt; + } + + H265NaluInfo nalu; + nalu.type = (payload_data[start_offset] & kHevcTypeMask) >> 1; + nalu.vps_id = -1; + nalu.sps_id = -1; + nalu.pps_id = -1; + start_offset += kHevcNalHeaderSize; + switch (nalu.type) { + case H265::NaluType::kVps: { + absl::optional vps = H265VpsParser::ParseVps( + &payload_data[start_offset], end_offset - start_offset); + if (vps) { + nalu.vps_id = vps->id; + } else { + RTC_LOG(LS_WARNING) << "Failed to parse VPS id from VPS slice."; + } + break; + } + case H265::NaluType::kSps: { + // TODO: Check if VUI is present in SPS and if it needs to be modified to + // avoid excessive decoder latency. + + // Copy any previous data first (likely just the first header). + std::unique_ptr output_buffer(new rtc::Buffer()); + if (start_offset) + output_buffer->AppendData(payload_data, start_offset); + + absl::optional sps = H265SpsParser::ParseSps( + &payload_data[start_offset], end_offset - start_offset); + + if (sps) { + parsed_payload->video_header.width = sps->width; + parsed_payload->video_header.height = sps->height; + nalu.sps_id = sps->id; + nalu.vps_id = sps->vps_id; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse SPS and VPS id from SPS slice."; + } + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; + break; + } + case H265::NaluType::kPps: { + uint32_t pps_id; + uint32_t sps_id; + if (H265PpsParser::ParsePpsIds(&payload_data[start_offset], + end_offset - start_offset, &pps_id, + &sps_id)) { + nalu.pps_id = pps_id; + nalu.sps_id = sps_id; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse PPS id and SPS id from PPS slice."; + } + break; + } + case H265::NaluType::kIdrWRadl: + case H265::NaluType::kIdrNLp: + case H265::NaluType::kCra: + parsed_payload->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + ABSL_FALLTHROUGH_INTENDED; + case H265::NaluType::kTrailN: + case H265::NaluType::kTrailR: { + absl::optional pps_id = + H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( + &payload_data[start_offset], end_offset - start_offset, + nalu.type); + if (pps_id) { + nalu.pps_id = *pps_id; + } else { + RTC_LOG(LS_WARNING) << "Failed to parse PPS id from slice of type: " + << static_cast(nalu.type); + } + break; + } + // Slices below don't contain SPS or PPS ids. + case H265::NaluType::kAud: + case H265::NaluType::kTsaN: + case H265::NaluType::kTsaR: + case H265::NaluType::kStsaN: + case H265::NaluType::kStsaR: + case H265::NaluType::kRadlN: + case H265::NaluType::kRadlR: + case H265::NaluType::kBlaWLp: + case H265::NaluType::kBlaWRadl: + case H265::NaluType::kPrefixSei: + case H265::NaluType::kSuffixSei: + break; + case H265::NaluType::kAP: + case H265::NaluType::kFU: + RTC_LOG(LS_WARNING) << "Unexpected AP or FU received."; + return absl::nullopt; + } + + if (h265_header.nalus_length == kMaxNalusPerPacket) { + RTC_LOG(LS_WARNING) + << "Received packet containing more than " << kMaxNalusPerPacket + << " NAL units. Will not keep track sps and pps ids for all of them."; + } else { + h265_header.nalus[h265_header.nalus_length++] = nalu; + } + } + return parsed_payload; +} + +absl::optional ParseFuNalu( + rtc::CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.size() < kHevcFuHeaderSize + kHevcNalHeaderSize) { + RTC_LOG(LS_ERROR) << "FU-A NAL units truncated."; + return absl::nullopt; + } + absl::optional parsed_payload( + absl::in_place); + + uint8_t f = rtp_payload.cdata()[0] & kHevcFBit; + uint8_t layer_id_h = rtp_payload.cdata()[0] & kHevcLayerIDHMask; + uint8_t layer_id_l_unshifted = rtp_payload.cdata()[1] & kHevcLayerIDLMask; + uint8_t tid = rtp_payload.cdata()[1] & kHevcTIDMask; + + uint8_t original_nal_type = rtp_payload.cdata()[2] & kHevcTypeMaskInFuHeader; + bool first_fragment = rtp_payload.cdata()[2] & kHevcSBit; + H265NaluInfo nalu; + nalu.type = original_nal_type; + nalu.vps_id = -1; + nalu.sps_id = -1; + nalu.pps_id = -1; + if (first_fragment) { + absl::optional pps_id = + H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( + rtp_payload.cdata() + kHevcNalHeaderSize + kHevcFuHeaderSize, + rtp_payload.size() - kHevcFuHeaderSize, nalu.type); + if (pps_id) { + nalu.pps_id = *pps_id; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse PPS from first fragment of FU NAL " + "unit with original type: " + << static_cast(nalu.type); + } + rtp_payload = + rtp_payload.Slice(1, rtp_payload.size() - 1); + rtp_payload[0] = f | original_nal_type << 1 | layer_id_h; + rtp_payload[1] = layer_id_l_unshifted | tid; + parsed_payload->video_payload = std::move(rtp_payload); + } else { + parsed_payload->video_payload = + rtp_payload.Slice(kHevcNalHeaderSize + kHevcFuHeaderSize, + rtp_payload.size() - kHevcNalHeaderSize - kHevcFuHeaderSize); + } + + if (original_nal_type == H265::NaluType::kIdrWRadl + || original_nal_type == H265::NaluType::kIdrNLp + || original_nal_type == H265::NaluType::kCra) { + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; + } else { + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + } + parsed_payload->video_header.width = 0; + parsed_payload->video_header.height = 0; + parsed_payload->video_header.codec = kVideoCodecH265; + parsed_payload->video_header.is_first_packet_in_frame = first_fragment; + auto& h265_header = parsed_payload->video_header.video_type_header + .emplace(); + h265_header.packetization_type = kH265FU; + h265_header.nalu_type = original_nal_type; + if (first_fragment) { + h265_header.nalus[h265_header.nalus_length] = nalu; + h265_header.nalus_length = 1; + } + return parsed_payload; +} + +} // namespace + +absl::optional +VideoRtpDepacketizerH265::Parse(rtc::CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.size() == 0) { + RTC_LOG(LS_ERROR) << "Empty payload."; + return absl::nullopt; + } + + uint8_t nal_type = (rtp_payload.cdata()[0] & kHevcTypeMask) >> 1; + + if (nal_type == H265::NaluType::kFU) { + // Fragmented NAL units (FU-A). + return ParseFuNalu(std::move(rtp_payload)); + } else { + // We handle STAP-A and single NALU's the same way here. The jitter buffer + // will depacketize the STAP-A into NAL units later. + // TODO(sprang): Parse STAP-A offsets here and store in fragmentation vec. + return ProcessApOrSingleNalu(std::move(rtp_payload)); + } +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h new file mode 100644 index 0000000000..4ae90cb6fe --- /dev/null +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ +#define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ + +#include "absl/types/optional.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { +class VideoRtpDepacketizerH265 : public VideoRtpDepacketizer { + public: + ~VideoRtpDepacketizerH265() override = default; + + absl::optional Parse( + rtc::CopyOnWriteBuffer rtp_payload) override; +}; +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ diff --git a/modules/utility/BUILD.gn b/modules/utility/BUILD.gn index 4e85129e43..df6945ab2c 100644 --- a/modules/utility/BUILD.gn +++ b/modules/utility/BUILD.gn @@ -26,7 +26,7 @@ rtc_library("utility") { } if (is_ios) { - libs = [ "AVFoundation.framework" ] + frameworks = [ "AVFoundation.framework" ] } deps = [ @@ -60,6 +60,7 @@ if (rtc_include_tests) { ":utility", "..:module_api", "../../api/task_queue", + "../../api/task_queue:task_queue_test", "../../rtc_base:rtc_base_approved", "../../test:test_support", ] diff --git a/modules/utility/OWNERS b/modules/utility/OWNERS index debeaab6d4..5904b95df7 100644 --- a/modules/utility/OWNERS +++ b/modules/utility/OWNERS @@ -1,6 +1 @@ perkj@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/utility/include/mock/mock_process_thread.h b/modules/utility/include/mock/mock_process_thread.h index 6826f5275c..e356bca99f 100644 --- a/modules/utility/include/mock/mock_process_thread.h +++ b/modules/utility/include/mock/mock_process_thread.h @@ -21,23 +21,20 @@ namespace webrtc { class MockProcessThread : public ProcessThread { public: - // TODO(nisse): Valid overrides commented out, because the gmock - // methods don't use any override declarations, and we want to avoid - // warnings from -Winconsistent-missing-override. See - // http://crbug.com/428099. - MOCK_METHOD0(Start, void()); - MOCK_METHOD0(Stop, void()); - MOCK_METHOD1(WakeUp, void(Module* module)); - MOCK_METHOD1(PostTask, void(QueuedTask* task)); - MOCK_METHOD2(RegisterModule, void(Module* module, const rtc::Location&)); - MOCK_METHOD1(DeRegisterModule, void(Module* module)); - - // MOCK_METHOD1 gets confused with mocking this method, so we work around it - // by overriding the method from the interface and forwarding the call to a - // mocked, simpler method. - void PostTask(std::unique_ptr task) /*override*/ { - PostTask(task.get()); - } + MOCK_METHOD(void, Start, (), (override)); + MOCK_METHOD(void, Stop, (), (override)); + MOCK_METHOD(void, Delete, (), (override)); + MOCK_METHOD(void, WakeUp, (Module*), (override)); + MOCK_METHOD(void, PostTask, (std::unique_ptr), (override)); + MOCK_METHOD(void, + PostDelayedTask, + (std::unique_ptr, uint32_t), + (override)); + MOCK_METHOD(void, + RegisterModule, + (Module*, const rtc::Location&), + (override)); + MOCK_METHOD(void, DeRegisterModule, (Module*), (override)); }; } // namespace webrtc diff --git a/modules/utility/include/process_thread.h b/modules/utility/include/process_thread.h index 9b02a7ea9a..7786dacf94 100644 --- a/modules/utility/include/process_thread.h +++ b/modules/utility/include/process_thread.h @@ -14,6 +14,7 @@ #include #include "api/task_queue/queued_task.h" +#include "api/task_queue/task_queue_base.h" namespace rtc { class Location; @@ -26,9 +27,9 @@ class Module; // interface. There exists one override besides ProcessThreadImpl, // MockProcessThread, but when looking at how it is used, it seems // a nullptr might suffice (or simply an actual ProcessThread instance). -class ProcessThread { +class ProcessThread : public TaskQueueBase { public: - virtual ~ProcessThread(); + ~ProcessThread() override; static std::unique_ptr Create(const char* thread_name); @@ -45,14 +46,6 @@ class ProcessThread { // Can be called on any thread. virtual void WakeUp(Module* module) = 0; - // Queues a task object to run on the worker thread. Ownership of the - // task object is transferred to the ProcessThread and the object will - // either be deleted after running on the worker thread, or on the - // construction thread of the ProcessThread instance, if the task did not - // get a chance to run (e.g. posting the task while shutting down or when - // the thread never runs). - virtual void PostTask(std::unique_ptr task) = 0; - // Adds a module that will start to receive callbacks on the worker thread. // Can be called from any thread. virtual void RegisterModule(Module* module, const rtc::Location& from) = 0; diff --git a/modules/utility/source/process_thread_impl.cc b/modules/utility/source/process_thread_impl.cc index 6739937103..3709306925 100644 --- a/modules/utility/source/process_thread_impl.cc +++ b/modules/utility/source/process_thread_impl.cc @@ -14,6 +14,7 @@ #include "modules/include/module.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -50,12 +51,24 @@ ProcessThreadImpl::~ProcessThreadImpl() { RTC_DCHECK(!thread_.get()); RTC_DCHECK(!stop_); + while (!delayed_tasks_.empty()) { + delete delayed_tasks_.top().task; + delayed_tasks_.pop(); + } + while (!queue_.empty()) { delete queue_.front(); queue_.pop(); } } +void ProcessThreadImpl::Delete() { + RTC_LOG(LS_WARNING) << "Process thread " << thread_name_ + << " is destroyed as a TaskQueue."; + Stop(); + delete this; +} + void ProcessThreadImpl::Start() { RTC_DCHECK(thread_checker_.IsCurrent()); RTC_DCHECK(!thread_.get()); @@ -113,6 +126,21 @@ void ProcessThreadImpl::PostTask(std::unique_ptr task) { wake_up_.Set(); } +void ProcessThreadImpl::PostDelayedTask(std::unique_ptr task, + uint32_t milliseconds) { + int64_t run_at_ms = rtc::TimeMillis() + milliseconds; + bool recalculate_wakeup_time; + { + rtc::CritScope lock(&lock_); + recalculate_wakeup_time = + delayed_tasks_.empty() || run_at_ms < delayed_tasks_.top().run_at_ms; + delayed_tasks_.emplace(run_at_ms, std::move(task)); + } + if (recalculate_wakeup_time) { + wake_up_.Set(); + } +} + void ProcessThreadImpl::RegisterModule(Module* module, const rtc::Location& from) { RTC_DCHECK(thread_checker_.IsCurrent()); @@ -166,6 +194,7 @@ void ProcessThreadImpl::DeRegisterModule(Module* module) { // static void ProcessThreadImpl::Run(void* obj) { ProcessThreadImpl* impl = static_cast(obj); + CurrentTaskQueueSetter set_current(impl); while (impl->Process()) { } } @@ -206,12 +235,23 @@ bool ProcessThreadImpl::Process() { next_checkpoint = m.next_callback; } + while (!delayed_tasks_.empty() && delayed_tasks_.top().run_at_ms <= now) { + queue_.push(delayed_tasks_.top().task); + delayed_tasks_.pop(); + } + + if (!delayed_tasks_.empty()) { + next_checkpoint = + std::min(next_checkpoint, delayed_tasks_.top().run_at_ms); + } + while (!queue_.empty()) { QueuedTask* task = queue_.front(); queue_.pop(); lock_.Leave(); - task->Run(); - delete task; + if (task->Run()) { + delete task; + } lock_.Enter(); } } diff --git a/modules/utility/source/process_thread_impl.h b/modules/utility/source/process_thread_impl.h index 8cac5742f5..ed9f5c3bfc 100644 --- a/modules/utility/source/process_thread_impl.h +++ b/modules/utility/source/process_thread_impl.h @@ -20,7 +20,7 @@ #include "api/task_queue/queued_task.h" #include "modules/include/module.h" #include "modules/utility/include/process_thread.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/event.h" #include "rtc_base/location.h" #include "rtc_base/platform_thread.h" @@ -38,6 +38,8 @@ class ProcessThreadImpl : public ProcessThread { void WakeUp(Module* module) override; void PostTask(std::unique_ptr task) override; + void PostDelayedTask(std::unique_ptr task, + uint32_t milliseconds) override; void RegisterModule(Module* module, const rtc::Location& from) override; void DeRegisterModule(Module* module) override; @@ -64,16 +66,34 @@ class ProcessThreadImpl : public ProcessThread { private: ModuleCallback& operator=(ModuleCallback&); }; + struct DelayedTask { + DelayedTask(int64_t run_at_ms, std::unique_ptr task) + : run_at_ms(run_at_ms), task(task.release()) {} + friend bool operator<(const DelayedTask& lhs, const DelayedTask& rhs) { + // Earliest DelayedTask should be at the top of the priority queue. + return lhs.run_at_ms > rhs.run_at_ms; + } + int64_t run_at_ms; + // DelayedTask owns the |task|, but some delayed tasks must be removed from + // the std::priority_queue, but mustn't be deleted. std::priority_queue does + // not give non-const access to the values, so storing unique_ptr would + // delete the task as soon as it is remove from the priority queue. + // Thus lifetime of the |task| is managed manually. + QueuedTask* task; + }; typedef std::list ModuleList; + void Delete() override; + // Warning: For some reason, if |lock_| comes immediately before |modules_| // with the current class layout, we will start to have mysterious crashes // on Mac 10.9 debug. I (Tommi) suspect we're hitting some obscure alignemnt // issues, but I haven't figured out what they are, if there are alignment // requirements for mutexes on Mac or if there's something else to it. // So be careful with changing the layout. - rtc::CriticalSection lock_; // Used to guard modules_, tasks_ and stop_. + rtc::RecursiveCriticalSection + lock_; // Used to guard modules_, tasks_ and stop_. rtc::ThreadChecker thread_checker_; rtc::Event wake_up_; @@ -82,6 +102,7 @@ class ProcessThreadImpl : public ProcessThread { ModuleList modules_; std::queue queue_; + std::priority_queue delayed_tasks_ RTC_GUARDED_BY(lock_); bool stop_; const char* thread_name_; }; diff --git a/modules/utility/source/process_thread_impl_unittest.cc b/modules/utility/source/process_thread_impl_unittest.cc index bc893cf2a1..1fef0b6740 100644 --- a/modules/utility/source/process_thread_impl_unittest.cc +++ b/modules/utility/source/process_thread_impl_unittest.cc @@ -14,6 +14,7 @@ #include #include "api/task_queue/queued_task.h" +#include "api/task_queue/task_queue_test.h" #include "modules/include/module.h" #include "rtc_base/location.h" #include "rtc_base/time_utils.h" @@ -36,9 +37,9 @@ static const int kEventWaitTimeout = 500; class MockModule : public Module { public: - MOCK_METHOD0(TimeUntilNextProcess, int64_t()); - MOCK_METHOD0(Process, void()); - MOCK_METHOD1(ProcessThreadAttached, void(ProcessThread*)); + MOCK_METHOD(int64_t, TimeUntilNextProcess, (), (override)); + MOCK_METHOD(void, Process, (), (override)); + MOCK_METHOD(void, ProcessThreadAttached, (ProcessThread*), (override)); }; class RaiseEventTask : public QueuedTask { @@ -310,4 +311,21 @@ TEST(ProcessThreadImpl, PostTask) { thread.Stop(); } +class ProcessThreadFactory : public TaskQueueFactory { + public: + ~ProcessThreadFactory() override = default; + std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const override { + ProcessThreadImpl* process_thread = new ProcessThreadImpl("thread"); + process_thread->Start(); + return std::unique_ptr(process_thread); + } +}; + +INSTANTIATE_TEST_SUITE_P( + ProcessThread, + TaskQueueTest, + testing::Values(std::make_unique)); + } // namespace webrtc diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn index 1c02412264..6788bf69c9 100644 --- a/modules/video_capture/BUILD.gn +++ b/modules/video_capture/BUILD.gn @@ -30,17 +30,16 @@ rtc_library("video_capture_module") { "..:module_api", "../../api:scoped_refptr", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../common_video", "../../media:rtc_media_base", "../../rtc_base:rtc_base_approved", "../../rtc_base:stringutils", - "../../rtc_base/synchronization:rw_lock_wrapper", + "../../rtc_base/synchronization:mutex", "../../system_wrappers", - "//third_party/abseil-cpp/absl/strings", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (!build_with_chromium) { @@ -51,10 +50,11 @@ if (!build_with_chromium) { "../../api:scoped_refptr", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../rtc_base/synchronization:mutex", "../../system_wrappers", ] - if (is_linux) { + if (is_linux || is_chromeos) { sources = [ "linux/device_info_linux.cc", "linux/device_info_linux.h", @@ -76,7 +76,12 @@ if (!build_with_chromium) { "windows/video_capture_factory_windows.cc", ] - libs = [ "strmiids.lib" ] + libs = [ + "ole32.lib", + "oleaut32.lib", + "strmiids.lib", + "user32.lib", + ] if (build_with_mozilla) { sources += [ @@ -106,13 +111,13 @@ if (!build_with_chromium) { rtc_test("video_capture_tests") { sources = [ "test/video_capture_unittest.cc" ] ldflags = [] - if (is_linux || is_mac) { + if (is_linux || is_chromeos || is_mac) { ldflags += [ "-lpthread", "-lm", ] } - if (is_linux) { + if (is_linux || is_chromeos) { ldflags += [ "-lrt", "-lXext", @@ -125,19 +130,19 @@ if (!build_with_chromium) { ":video_capture_module", "../../api:scoped_refptr", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../common_video", "../../rtc_base:rtc_base_approved", + "../../rtc_base/synchronization:mutex", "../../system_wrappers", "../../test:frame_utils", + "../../test:test_main", "../../test:test_support", "../../test:video_test_common", "../utility", "//testing/gtest", "//third_party/abseil-cpp/absl/memory", ] - deps += [ "../../test:test_main" ] } } } diff --git a/modules/video_capture/OWNERS b/modules/video_capture/OWNERS index 32e7151453..d728122343 100644 --- a/modules/video_capture/OWNERS +++ b/modules/video_capture/OWNERS @@ -1,10 +1,3 @@ -glaznev@webrtc.org mflodman@webrtc.org perkj@webrtc.org tkchin@webrtc.org - - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/video_capture/device_info_impl.cc b/modules/video_capture/device_info_impl.cc index 91a72326cf..846977e68f 100644 --- a/modules/video_capture/device_info_impl.cc +++ b/modules/video_capture/device_info_impl.cc @@ -25,34 +25,25 @@ namespace webrtc { namespace videocapturemodule { DeviceInfoImpl::DeviceInfoImpl() - : _apiLock(*RWLockWrapper::CreateRWLock()), - _lastUsedDeviceName(NULL), - _lastUsedDeviceNameLength(0) {} + : _lastUsedDeviceName(NULL), _lastUsedDeviceNameLength(0) {} DeviceInfoImpl::~DeviceInfoImpl(void) { - _apiLock.AcquireLockExclusive(); + MutexLock lock(&_apiLock); free(_lastUsedDeviceName); - _apiLock.ReleaseLockExclusive(); - - delete &_apiLock; } int32_t DeviceInfoImpl::NumberOfCapabilities(const char* deviceUniqueIdUTF8) { if (!deviceUniqueIdUTF8) return -1; - _apiLock.AcquireLockShared(); + MutexLock lock(&_apiLock); // Is it the same device that is asked for again. if (absl::EqualsIgnoreCase( deviceUniqueIdUTF8, absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { - _apiLock.ReleaseLockShared(); return static_cast(_captureCapabilities.size()); } - // Need to get exclusive rights to create the new capability map. - _apiLock.ReleaseLockShared(); - WriteLockScoped cs2(_apiLock); int32_t ret = CreateCapabilityMap(deviceUniqueIdUTF8); return ret; @@ -63,20 +54,14 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8, VideoCaptureCapability& capability) { assert(deviceUniqueIdUTF8 != NULL); - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); if (!absl::EqualsIgnoreCase( deviceUniqueIdUTF8, absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { - _apiLock.ReleaseLockShared(); - _apiLock.AcquireLockExclusive(); if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) { - _apiLock.ReleaseLockExclusive(); - _apiLock.AcquireLockShared(); return -1; } - _apiLock.ReleaseLockExclusive(); - _apiLock.AcquireLockShared(); } // Make sure the number is valid @@ -98,17 +83,13 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( if (!deviceUniqueIdUTF8) return -1; - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); if (!absl::EqualsIgnoreCase( deviceUniqueIdUTF8, absl::string_view(_lastUsedDeviceName, _lastUsedDeviceNameLength))) { - _apiLock.ReleaseLockShared(); - _apiLock.AcquireLockExclusive(); if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8)) { return -1; } - _apiLock.ReleaseLockExclusive(); - _apiLock.AcquireLockShared(); } int32_t bestformatIndex = -1; diff --git a/modules/video_capture/device_info_impl.h b/modules/video_capture/device_info_impl.h index 37a457ce8a..4b47389609 100644 --- a/modules/video_capture/device_info_impl.h +++ b/modules/video_capture/device_info_impl.h @@ -18,7 +18,8 @@ #include "api/video/video_rotation.h" #include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture_defines.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { namespace videocapturemodule { @@ -45,15 +46,16 @@ class DeviceInfoImpl : public VideoCaptureModule::DeviceInfo { * Fills the member variable _captureCapabilities with capabilities for the * given device name. */ - virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) = 0; + virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock) = 0; protected: // Data members typedef std::vector VideoCaptureCapabilities; - VideoCaptureCapabilities _captureCapabilities; - RWLockWrapper& _apiLock; - char* _lastUsedDeviceName; - uint32_t _lastUsedDeviceNameLength; + VideoCaptureCapabilities _captureCapabilities RTC_GUARDED_BY(_apiLock); + Mutex _apiLock; + char* _lastUsedDeviceName RTC_GUARDED_BY(_apiLock); + uint32_t _lastUsedDeviceNameLength RTC_GUARDED_BY(_apiLock); }; } // namespace videocapturemodule } // namespace webrtc diff --git a/modules/video_capture/linux/device_info_linux.cc b/modules/video_capture/linux/device_info_linux.cc index bac5d4078a..3c8fdd20fa 100644 --- a/modules/video_capture/linux/device_info_linux.cc +++ b/modules/video_capture/linux/device_info_linux.cc @@ -47,11 +47,19 @@ uint32_t DeviceInfoLinux::NumberOfDevices() { uint32_t count = 0; char device[20]; int fd = -1; + struct v4l2_capability cap; /* detect /dev/video [0-63]VideoCaptureModule entries */ for (int n = 0; n < 64; n++) { sprintf(device, "/dev/video%d", n); if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || + !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + close(fd); + continue; + } + close(fd); count++; } @@ -74,9 +82,16 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber, char device[20]; int fd = -1; bool found = false; + struct v4l2_capability cap; for (int n = 0; n < 64; n++) { sprintf(device, "/dev/video%d", n); if ((fd = open(device, O_RDONLY)) != -1) { + // query device capabilities and make sure this is a video capture device + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || + !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + close(fd); + continue; + } if (count == deviceNumber) { // Found the device found = true; @@ -92,7 +107,6 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber, return -1; // query device capabilities - struct v4l2_capability cap; if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { RTC_LOG(LS_INFO) << "error in querying the device capability for device " << device << ". errno = " << errno; @@ -153,6 +167,11 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { // query device capabilities struct v4l2_capability cap; if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + // skip devices without video capture capability + if (!(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + continue; + } + if (cap.bus_info[0] != 0) { if (strncmp((const char*)cap.bus_info, (const char*)deviceUniqueIdUTF8, strlen((const char*)deviceUniqueIdUTF8)) == diff --git a/modules/video_capture/linux/device_info_linux.h b/modules/video_capture/linux/device_info_linux.h index a320c36fde..304ae71230 100644 --- a/modules/video_capture/linux/device_info_linux.h +++ b/modules/video_capture/linux/device_info_linux.h @@ -33,13 +33,14 @@ class DeviceInfoLinux : public DeviceInfoImpl { * Fills the membervariable _captureCapabilities with capabilites for the * given device name. */ - int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override; + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); int32_t DisplayCaptureSettingsDialogBox(const char* /*deviceUniqueIdUTF8*/, const char* /*dialogTitleUTF8*/, void* /*parentWindow*/, uint32_t /*positionX*/, uint32_t /*positionY*/) override; - int32_t FillCapabilities(int fd); + int32_t FillCapabilities(int fd) RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); int32_t Init() override; private: diff --git a/modules/video_capture/linux/video_capture_linux.cc b/modules/video_capture/linux/video_capture_linux.cc index 30865235b6..504565f512 100644 --- a/modules/video_capture/linux/video_capture_linux.cc +++ b/modules/video_capture/linux/video_capture_linux.cc @@ -115,7 +115,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture( } } - rtc::CritScope cs(&_captureCritSect); + MutexLock lock(&capture_lock_); // first open /dev/video device char device[20]; sprintf(device, "/dev/video%d", (int)_deviceId); @@ -264,7 +264,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture( int32_t VideoCaptureModuleV4L2::StopCapture() { if (_captureThread) { { - rtc::CritScope cs(&_captureCritSect); + MutexLock lock(&capture_lock_); quit_ = true; } // Make sure the capture thread stop stop using the critsect. @@ -272,7 +272,7 @@ int32_t VideoCaptureModuleV4L2::StopCapture() { _captureThread.reset(); } - rtc::CritScope cs(&_captureCritSect); + MutexLock lock(&capture_lock_); if (_captureStarted) { _captureStarted = false; @@ -387,7 +387,7 @@ bool VideoCaptureModuleV4L2::CaptureProcess() { } { - rtc::CritScope cs(&_captureCritSect); + MutexLock lock(&capture_lock_); if (quit_) { return false; diff --git a/modules/video_capture/linux/video_capture_linux.h b/modules/video_capture/linux/video_capture_linux.h index ac9409e23a..ddb5d5ba87 100644 --- a/modules/video_capture/linux/video_capture_linux.h +++ b/modules/video_capture/linux/video_capture_linux.h @@ -18,8 +18,8 @@ #include "modules/video_capture/video_capture_defines.h" #include "modules/video_capture/video_capture_impl.h" -#include "rtc_base/critical_section.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace videocapturemodule { @@ -43,8 +43,8 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl { // TODO(pbos): Stop using unique_ptr and resetting the thread. std::unique_ptr _captureThread; - rtc::CriticalSection _captureCritSect; - bool quit_ RTC_GUARDED_BY(_captureCritSect); + Mutex capture_lock_; + bool quit_ RTC_GUARDED_BY(capture_lock_); int32_t _deviceId; int32_t _deviceFd; diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc index 4a726db8f2..1a0cf2d5da 100644 --- a/modules/video_capture/test/video_capture_unittest.cc +++ b/modules/video_capture/test/video_capture_unittest.cc @@ -23,7 +23,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/utility/include/process_thread.h" #include "modules/video_capture/video_capture_factory.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/sleep.h" #include "test/frame_utils.h" @@ -74,7 +74,7 @@ class TestVideoCaptureCallback } void OnFrame(const webrtc::VideoFrame& videoFrame) override { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); int height = videoFrame.height(); int width = videoFrame.width(); #if defined(WEBRTC_ANDROID) && WEBRTC_ANDROID @@ -106,38 +106,38 @@ class TestVideoCaptureCallback } void SetExpectedCapability(VideoCaptureCapability capability) { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); capability_ = capability; incoming_frames_ = 0; last_render_time_ms_ = 0; } int incoming_frames() { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); return incoming_frames_; } int timing_warnings() { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); return timing_warnings_; } VideoCaptureCapability capability() { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); return capability_; } bool CompareLastFrame(const webrtc::VideoFrame& frame) { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); return webrtc::test::FrameBufsEqual(last_frame_, frame.video_frame_buffer()); } void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) { - rtc::CritScope cs(&capture_cs_); + webrtc::MutexLock lock(&capture_lock_); rotate_frame_ = rotation; } private: - rtc::CriticalSection capture_cs_; + webrtc::Mutex capture_lock_; VideoCaptureCapability capability_; int64_t last_render_time_ms_; int incoming_frames_; @@ -242,11 +242,6 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) { #define MAYBE_Capabilities Capabilities #endif TEST_F(VideoCaptureTest, MAYBE_Capabilities) { -#ifdef WEBRTC_MAC - printf("Video capture capabilities are not supported on Mac.\n"); - return; -#endif - TestVideoCaptureCallback capture_observer; rtc::scoped_refptr module( diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc index 9d53a91157..6619d15924 100644 --- a/modules/video_capture/video_capture_impl.cc +++ b/modules/video_capture/video_capture_impl.cc @@ -96,12 +96,12 @@ VideoCaptureImpl::~VideoCaptureImpl() { void VideoCaptureImpl::RegisterCaptureDataCallback( rtc::VideoSinkInterface* dataCallBack) { - rtc::CritScope cs(&_apiCs); + MutexLock lock(&api_lock_); _dataCallBack = dataCallBack; } void VideoCaptureImpl::DeRegisterCaptureDataCallback() { - rtc::CritScope cs(&_apiCs); + MutexLock lock(&api_lock_); _dataCallBack = NULL; } int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { @@ -118,7 +118,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, size_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime /*=0*/) { - rtc::CritScope cs(&_apiCs); + MutexLock lock(&api_lock_); const int32_t width = frameInfo.width; const int32_t height = frameInfo.height; @@ -223,7 +223,7 @@ int32_t VideoCaptureImpl::CaptureSettings( } int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { - rtc::CritScope cs(&_apiCs); + MutexLock lock(&api_lock_); _rotateFrame = rotation; return 0; } diff --git a/modules/video_capture/video_capture_impl.h b/modules/video_capture/video_capture_impl.h index 197bfd387c..cbc99b76c1 100644 --- a/modules/video_capture/video_capture_impl.h +++ b/modules/video_capture/video_capture_impl.h @@ -25,7 +25,7 @@ #include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture_config.h" #include "modules/video_capture/video_capture_defines.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -78,7 +78,7 @@ class VideoCaptureImpl : public VideoCaptureModule { ~VideoCaptureImpl() override; char* _deviceUniqueId; // current Device unique name; - rtc::CriticalSection _apiCs; + Mutex api_lock_; VideoCaptureCapability _requestedCapability; // Should be set by platform // dependent code in // StartCapture. diff --git a/modules/video_capture/windows/device_info_ds.cc b/modules/video_capture/windows/device_info_ds.cc index a163579bf1..f43c508bee 100644 --- a/modules/video_capture/windows/device_info_ds.cc +++ b/modules/video_capture/windows/device_info_ds.cc @@ -99,7 +99,7 @@ int32_t DeviceInfoDS::Init() { return 0; } uint32_t DeviceInfoDS::NumberOfDevices() { - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0); } @@ -110,7 +110,7 @@ int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber, uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8, uint32_t productUniqueIdUTF8Length) { - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); const int32_t result = GetDeviceInfo( deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8, deviceUniqueIdUTF8Length, productUniqueIdUTF8, productUniqueIdUTF8Length); @@ -287,7 +287,7 @@ IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8, int32_t DeviceInfoDS::GetWindowsCapability( const int32_t capabilityIndex, VideoCaptureCapabilityWindows& windowsCapability) { - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); if (capabilityIndex < 0 || static_cast(capabilityIndex) >= _captureCapabilitiesWindows.size()) { @@ -584,7 +584,7 @@ int32_t DeviceInfoDS::DisplayCaptureSettingsDialogBox( void* parentWindow, uint32_t positionX, uint32_t positionY) { - ReadLockScoped cs(_apiLock); + MutexLock lock(&_apiLock); HWND window = (HWND)parentWindow; IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0); diff --git a/modules/video_capture/windows/device_info_ds.h b/modules/video_capture/windows/device_info_ds.h index d782eb5415..2fda3257f4 100644 --- a/modules/video_capture/windows/device_info_ds.h +++ b/modules/video_capture/windows/device_info_ds.h @@ -85,7 +85,8 @@ class DeviceInfoDS : public DeviceInfoImpl { char* productUniqueIdUTF8, uint32_t productUniqueIdUTF8Length); - int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override; + int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override + RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock); private: ICreateDevEnum* _dsDevEnum; diff --git a/modules/video_capture/windows/video_capture_ds.cc b/modules/video_capture/windows/video_capture_ds.cc index 615a1b56ea..6dca74750c 100644 --- a/modules/video_capture/windows/video_capture_ds.cc +++ b/modules/video_capture/windows/video_capture_ds.cc @@ -130,7 +130,7 @@ int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) { } int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) { - rtc::CritScope cs(&_apiCs); + MutexLock lock(&api_lock_); if (capability != _requestedCapability) { DisconnectGraph(); @@ -148,7 +148,7 @@ int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) { } int32_t VideoCaptureDS::StopCapture() { - rtc::CritScope cs(&_apiCs); + MutexLock lock(&api_lock_); HRESULT hr = _mediaControl->Pause(); if (FAILED(hr)) { diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn index bca50d5f92..aaa33c8009 100644 --- a/modules/video_coding/BUILD.gn +++ b/modules/video_coding/BUILD.gn @@ -6,6 +6,7 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +import("//third_party/libaom/options.gni") import("../../webrtc.gni") rtc_library("encoded_frame") { @@ -19,28 +20,64 @@ rtc_library("encoded_frame") { ":video_codec_interface", "../../api/video:encoded_image", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", - "../../modules:module_api", "../../modules:module_api_public", "../../modules/rtp_rtcp:rtp_video_header", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../rtc_base/experiments:alr_experiment", "../../rtc_base/experiments:rtt_mult_experiment", + "../../rtc_base/system:rtc_export", "../../system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional", "//third_party/abseil-cpp/absl/types:variant", ] } +rtc_library("chain_diff_calculator") { + sources = [ + "chain_diff_calculator.cc", + "chain_diff_calculator.h", + ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:logging", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("frame_dependencies_calculator") { + sources = [ + "frame_dependencies_calculator.cc", + "frame_dependencies_calculator.h", + ] + + deps = [ + "../../api:array_view", + "../../api/video:video_frame_type", + "../../common_video/generic_frame_descriptor", + "../../rtc_base:checks", + "../../rtc_base:logging", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + rtc_library("nack_module") { - visibility = [ "*" ] sources = [ "histogram.cc", "histogram.h", - "nack_module.cc", - "nack_module.h", + "nack_module2.cc", + "nack_module2.h", ] deps = [ @@ -50,7 +87,11 @@ rtc_library("nack_module") { "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_numerics", + "../../rtc_base:rtc_task_queue", "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:pending_task_safety_flag", + "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", "../../system_wrappers:field_trial", "../utility", @@ -64,13 +105,21 @@ rtc_library("video_coding") { "../../api:array_view", "../../api:scoped_refptr", "../../api/video:encoded_image", + "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator_factory", + "../../rtc_base:deprecation", + "../../rtc_base/task_utils:to_queued_task", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "../rtp_rtcp:rtp_video_header", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/types:variant", ] sources = [ @@ -118,6 +167,13 @@ rtc_library("video_coding") { "video_receiver2.h", ] + if (rtc_use_h265) { + sources += [ + "h265_vps_sps_pps_tracker.cc", + "h265_vps_sps_pps_tracker.h", + ] + } + deps += [ ":codec_globals_headers", ":encoded_frame", @@ -133,9 +189,9 @@ rtc_library("video_coding") { "../../api/units:time_delta", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_frame", + "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocator", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", @@ -151,6 +207,7 @@ rtc_library("video_coding") { "../../rtc_base/experiments:min_video_bitrate_experiment", "../../rtc_base/experiments:rate_control_settings", "../../rtc_base/experiments:rtt_mult_experiment", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/task_utils:repeating_task", "../../rtc_base/third_party/base64", @@ -158,9 +215,6 @@ rtc_library("video_coding") { "../../system_wrappers", "../rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", - "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", ] } @@ -181,9 +235,10 @@ rtc_library("video_codec_interface") { "../../api/video_codecs:video_codecs_api", "../../common_video", "../../common_video/generic_frame_descriptor", + "../../rtc_base:deprecation", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_coding_legacy") { @@ -191,6 +246,8 @@ rtc_library("video_coding_legacy") { sources = [ "decoding_state.cc", "decoding_state.h", + "event_wrapper.cc", + "event_wrapper.h", "frame_buffer.cc", "frame_buffer.h", "include/video_coding.h", @@ -214,7 +271,6 @@ rtc_library("video_coding_legacy") { ":video_coding", "..:module_api", "..:module_api_public", - "../../:webrtc_common", "../../api:rtp_headers", "../../api:rtp_packet_info", "../../api/video:encoded_image", @@ -227,12 +283,15 @@ rtc_library("video_coding_legacy") { "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_event", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../system_wrappers", - "../../system_wrappers:event_wrapper", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", "../utility", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", "//third_party/abseil-cpp/absl/types:variant", @@ -248,9 +307,7 @@ rtc_source_set("codec_globals_headers") { "codecs/vp9/include/vp9_globals.h", ] - deps = [ - "../../rtc_base:checks", - ] + deps = [ "../../rtc_base:checks" ] } rtc_library("video_coding_utility") { @@ -258,8 +315,6 @@ rtc_library("video_coding_utility") { sources = [ "utility/decoded_frames_history.cc", "utility/decoded_frames_history.h", - "utility/default_video_bitrate_allocator.cc", - "utility/default_video_bitrate_allocator.h", "utility/frame_dropper.cc", "utility/frame_dropper.h", "utility/framerate_controller.cc", @@ -282,11 +337,13 @@ rtc_library("video_coding_utility") { deps = [ ":video_codec_interface", - "..:module_api", + "../../api:scoped_refptr", "../../api/video:encoded_frame", "../../api/video:encoded_image", + "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", + "../../api/video:video_frame", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../modules/rtp_rtcp", @@ -294,7 +351,7 @@ rtc_library("video_coding_utility") { "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_numerics", "../../rtc_base:rtc_task_queue", - "../../rtc_base/experiments:experimental_screenshare_settings", + "../../rtc_base:weak_ptr", "../../rtc_base/experiments:quality_scaler_settings", "../../rtc_base/experiments:quality_scaling_experiment", "../../rtc_base/experiments:rate_control_settings", @@ -303,10 +360,11 @@ rtc_library("video_coding_utility") { "../../rtc_base/system:arch", "../../rtc_base/system:file_wrapper", "../../rtc_base/task_utils:repeating_task", + "../../rtc_base/task_utils:to_queued_task", "../../system_wrappers:field_trial", "../rtp_rtcp:rtp_rtcp_format", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("webrtc_h264") { @@ -328,7 +386,6 @@ rtc_library("webrtc_h264") { ":video_coding_utility", "../../api/video:video_frame", "../../api/video:video_frame_i010", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", @@ -339,9 +396,11 @@ rtc_library("webrtc_h264") { "../../rtc_base/system:rtc_export", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", + "//third_party/libyuv", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", - "//third_party/libyuv", ] if (rtc_use_h264) { @@ -370,18 +429,17 @@ rtc_library("webrtc_multiplex") { deps = [ ":video_codec_interface", ":video_coding_utility", - "..:module_api", "../../api:fec_controller_api", "../../api:scoped_refptr", "../../api/video:encoded_image", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../media:rtc_media_base", "../../rtc_base", "../../rtc_base:checks", + "../../rtc_base/synchronization:mutex", "../rtp_rtcp:rtp_rtcp_format", ] } @@ -405,29 +463,26 @@ rtc_library("webrtc_vp8") { ":video_codec_interface", ":video_coding_utility", ":webrtc_vp8_temporal_layers", - "..:module_api", - "../..:webrtc_common", "../../api:fec_controller_api", "../../api:scoped_refptr", "../../api/video:encoded_image", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:vp8_temporal_layers_factory", "../../common_video", "../../rtc_base:checks", + "../../rtc_base:deprecation", "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_numerics", "../../rtc_base/experiments:cpu_speed_experiment", - "../../rtc_base/experiments:experimental_screenshare_settings", "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/experiments:rate_control_settings", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] } @@ -449,8 +504,6 @@ rtc_library("webrtc_vp8_temporal_layers") { ":codec_globals_headers", ":video_codec_interface", ":video_coding_utility", - "..:module_api", - "../..:webrtc_common", "../../api:fec_controller_api", "../../api/video_codecs:video_codecs_api", "../../rtc_base:checks", @@ -458,8 +511,8 @@ rtc_library("webrtc_vp8_temporal_layers") { "../../rtc_base:rtc_numerics", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } # This target includes VP9 files that may be used for any VP9 codec, internal SW or external HW. @@ -467,23 +520,23 @@ rtc_library("webrtc_vp9_helpers") { sources = [ "codecs/vp9/svc_config.cc", "codecs/vp9/svc_config.h", - "codecs/vp9/svc_rate_allocator.cc", "codecs/vp9/svc_rate_allocator.h", ] deps = [ ":codec_globals_headers", ":video_codec_interface", - "../..:webrtc_common", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../rtc_base:checks", + "../../rtc_base:logging", "../../rtc_base/experiments:stable_target_rate_experiment", - "//third_party/abseil-cpp/absl/container:inlined_vector", + "svc:svc_rate_allocator", ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_library("webrtc_vp9") { @@ -502,10 +555,10 @@ rtc_library("webrtc_vp9") { ":video_codec_interface", ":video_coding_utility", ":webrtc_vp9_helpers", - "..:module_api", - "../..:webrtc_common", "../../api:fec_controller_api", "../../api:scoped_refptr", + "../../api/transport:field_trial_based_config", + "../../api/transport:webrtc_key_value_config", "../../api/video:video_frame", "../../api/video:video_frame_i010", "../../api/video:video_rtp_headers", @@ -515,10 +568,18 @@ rtc_library("webrtc_vp9") { "../../media:rtc_vp9_profile", "../../rtc_base", "../../rtc_base:checks", + "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/experiments:rate_control_settings", + "../../rtc_base/synchronization:mutex", "../../system_wrappers:field_trial", "../rtp_rtcp:rtp_rtcp_format", + "svc:scalability_structures", + "svc:scalable_video_controller", + "//third_party/libyuv", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:strings", ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] @@ -559,7 +620,6 @@ if (rtc_include_tests) { "../../api/video_codecs:video_codecs_api", "../../media:rtc_audio_video", "../../media:rtc_media_base", - "../../modules:module_api", "../../rtc_base:rtc_base_approved", "../../sdk:native_api", "../../sdk:peerconnectionfactory_base_objc", @@ -570,6 +630,25 @@ if (rtc_include_tests) { } } + rtc_library("encoded_video_frame_producer") { + testonly = true + sources = [ + "codecs/test/encoded_video_frame_producer.cc", + "codecs/test/encoded_video_frame_producer.h", + ] + deps = [ + ":video_codec_interface", + "../../api:create_frame_generator", + "../../api:frame_generator_api", + "../../api/transport/rtp:dependency_descriptor", + "../../api/video:encoded_image", + "../../api/video:video_frame", + "../../api/video:video_frame_type", + "../../api/video_codecs:video_codecs_api", + "../../rtc_base:checks", + ] + } + rtc_library("simulcast_test_fixture_impl") { testonly = true sources = [ @@ -581,13 +660,11 @@ if (rtc_include_tests) { ":video_codec_interface", ":video_coding", ":video_coding_utility", - "../../:webrtc_common", "../../api:mock_video_decoder", "../../api:mock_video_encoder", "../../api:simulcast_test_fixture_api", "../../api/video:encoded_image", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", @@ -625,22 +702,22 @@ if (rtc_include_tests) { "../../api/video:video_bitrate_allocator", "../../api/video:video_bitrate_allocator_factory", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_task_queue", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/task_utils:to_queued_task", "../../test:test_support", "../../test:video_test_common", "../../test:video_test_support", "../rtp_rtcp:rtp_rtcp_format", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } video_coding_modules_tests_resources = [] @@ -671,9 +748,7 @@ if (rtc_include_tests) { bundle_data("video_coding_modules_tests_resources_bundle_data") { testonly = true sources = video_coding_modules_tests_resources - outputs = [ - "{{bundle_resources_dir}}/{{source_file_part}}", - ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] } } } @@ -691,10 +766,10 @@ if (rtc_include_tests) { ":video_coding_utility", ":videocodec_test_stats_impl", ":webrtc_vp9_helpers", - "../..:webrtc_common", "../../api:array_view", "../../api:videocodec_test_fixture_api", "../../api/test/video:function_video_factory", + "../../api/transport:field_trial_based_config", "../../api/video:video_bitrate_allocation", "../../api/video_codecs:video_codecs_api", "../../call:video_stream_api", @@ -713,8 +788,8 @@ if (rtc_include_tests) { "../../test:test_support", "../../test:video_test_common", "../../test:video_test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("videocodec_test_stats_impl") { @@ -725,6 +800,7 @@ if (rtc_include_tests) { ] deps = [ "../../api:videocodec_test_fixture_api", + "../../api/numerics", "../../rtc_base:checks", "../../rtc_base:rtc_numerics", "../../rtc_base:stringutils", @@ -746,11 +822,17 @@ if (rtc_include_tests) { "codecs/vp8/test/vp8_impl_unittest.cc", "codecs/vp9/test/vp9_impl_unittest.cc", ] + + # TODO(jianj): Fix crash on iOS and re-enable + if (enable_libaom && !is_ios) { + sources += [ "codecs/test/videocodec_test_libaom.cc" ] + } if (rtc_use_h264) { sources += [ "codecs/test/videocodec_test_openh264.cc" ] } deps = [ + ":encoded_video_frame_producer", ":video_codec_interface", ":video_codecs_test_framework", ":video_coding_utility", @@ -760,7 +842,6 @@ if (rtc_include_tests) { ":webrtc_vp8", ":webrtc_vp9", ":webrtc_vp9_helpers", - "../..:webrtc_common", "../../api:create_frame_generator", "../../api:create_videocodec_test_fixture_api", "../../api:frame_generator_api", @@ -772,7 +853,6 @@ if (rtc_include_tests) { "../../api/test/video:function_video_factory", "../../api/video:encoded_image", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:rtc_software_fallback_wrappers", "../../api/video_codecs:video_codecs_api", @@ -789,9 +869,9 @@ if (rtc_include_tests) { "../../test:test_support", "../../test:video_test_common", "../rtp_rtcp:rtp_rtcp_format", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] data = video_coding_modules_tests_resources @@ -820,6 +900,7 @@ if (rtc_include_tests) { testonly = true sources = [ + "chain_diff_calculator_unittest.cc", "codecs/test/videocodec_test_fixture_config_unittest.cc", "codecs/test/videocodec_test_stats_impl_unittest.cc", "codecs/test/videoprocessor_unittest.cc", @@ -827,18 +908,18 @@ if (rtc_include_tests) { "codecs/vp8/libvpx_vp8_simulcast_test.cc", "codecs/vp8/screenshare_layers_unittest.cc", "codecs/vp9/svc_config_unittest.cc", - "codecs/vp9/svc_rate_allocator_unittest.cc", "decoding_state_unittest.cc", "fec_controller_unittest.cc", "frame_buffer2_unittest.cc", + "frame_dependencies_calculator_unittest.cc", "generic_decoder_unittest.cc", "h264_sprop_parameter_sets_unittest.cc", "h264_sps_pps_tracker_unittest.cc", "histogram_unittest.cc", - "include/mock/mock_vcm_callbacks.h", "jitter_buffer_unittest.cc", "jitter_estimator_tests.cc", "loss_notification_controller_unittest.cc", + "nack_module2_unittest.cc", "nack_module_unittest.cc", "packet_buffer_unittest.cc", "receiver_unittest.cc", @@ -849,7 +930,6 @@ if (rtc_include_tests) { "timing_unittest.cc", "unique_timestamp_counter_unittest.cc", "utility/decoded_frames_history_unittest.cc", - "utility/default_video_bitrate_allocator_unittest.cc", "utility/frame_dropper_unittest.cc", "utility/framerate_controller_unittest.cc", "utility/ivf_file_reader_unittest.cc", @@ -867,8 +947,10 @@ if (rtc_include_tests) { } deps = [ + ":chain_diff_calculator", ":codec_globals_headers", ":encoded_frame", + ":frame_dependencies_calculator", ":nack_module", ":simulcast_test_fixture_impl", ":video_codec_interface", @@ -883,7 +965,6 @@ if (rtc_include_tests) { ":webrtc_vp8_temporal_layers", ":webrtc_vp9", ":webrtc_vp9_helpers", - "..:module_api", "..:module_fec_api", "../../api:array_view", "../../api:create_simulcast_test_fixture_api", @@ -897,15 +978,17 @@ if (rtc_include_tests) { "../../api/task_queue:default_task_queue_factory", "../../api/test/video:function_video_factory", "../../api/video:builtin_video_bitrate_allocator_factory", + "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_bitrate_allocator_factory", "../../api/video:video_frame", - "../../api/video:video_frame_i420", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:vp8_temporal_layers_factory", "../../common_video", + "../../common_video/generic_frame_descriptor", "../../common_video/test:utilities", "../../media:rtc_media_base", "../../rtc_base", @@ -916,8 +999,8 @@ if (rtc_include_tests) { "../../rtc_base:rtc_task_queue", "../../rtc_base:task_queue_for_test", "../../rtc_base/experiments:jitter_upper_bound_experiment", + "../../rtc_base/synchronization:mutex", "../../system_wrappers", - "../../system_wrappers:event_wrapper", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "../../test:fake_video_codecs", @@ -930,6 +1013,12 @@ if (rtc_include_tests) { "../../test/time_controller:time_controller", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", + "codecs/av1:video_coding_codecs_av1_tests", + "deprecated:nack_module", + "svc:scalability_structure_tests", + "svc:svc_rate_allocator_tests", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", "//third_party/abseil-cpp/absl/types:variant", diff --git a/modules/video_coding/DEPS b/modules/video_coding/DEPS index 1b8a98bb26..3a7629e84b 100644 --- a/modules/video_coding/DEPS +++ b/modules/video_coding/DEPS @@ -6,6 +6,7 @@ include_rules = [ "+system_wrappers", "+rtc_tools", "+third_party/libyuv", + "+rtc_base/system/rtc_export.h", ] specific_include_rules = { diff --git a/modules/video_coding/OWNERS b/modules/video_coding/OWNERS index 99d6c1161d..2e4d968c98 100644 --- a/modules/video_coding/OWNERS +++ b/modules/video_coding/OWNERS @@ -5,8 +5,3 @@ marpan@webrtc.org philipel@webrtc.org sprang@webrtc.org stefan@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/video_coding/chain_diff_calculator.cc b/modules/video_coding/chain_diff_calculator.cc new file mode 100644 index 0000000000..5f852717b5 --- /dev/null +++ b/modules/video_coding/chain_diff_calculator.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/chain_diff_calculator.h" + +#include +#include + +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +void ChainDiffCalculator::Reset(const std::vector& chains) { + last_frame_in_chain_.resize(chains.size()); + for (size_t i = 0; i < chains.size(); ++i) { + if (chains[i]) { + last_frame_in_chain_[i] = absl::nullopt; + } + } +} + +absl::InlinedVector ChainDiffCalculator::ChainDiffs( + int64_t frame_id) const { + absl::InlinedVector result; + result.reserve(last_frame_in_chain_.size()); + for (const auto& frame_id_in_chain : last_frame_in_chain_) { + result.push_back(frame_id_in_chain ? (frame_id - *frame_id_in_chain) : 0); + } + return result; +} + +absl::InlinedVector ChainDiffCalculator::From( + int64_t frame_id, + const std::vector& chains) { + auto result = ChainDiffs(frame_id); + if (chains.size() != last_frame_in_chain_.size()) { + RTC_LOG(LS_ERROR) << "Insconsistent chain configuration for frame#" + << frame_id << ": expected " + << last_frame_in_chain_.size() << " chains, found " + << chains.size(); + } + size_t num_chains = std::min(last_frame_in_chain_.size(), chains.size()); + for (size_t i = 0; i < num_chains; ++i) { + if (chains[i]) { + last_frame_in_chain_[i] = frame_id; + } + } + return result; +} + +} // namespace webrtc diff --git a/modules/video_coding/chain_diff_calculator.h b/modules/video_coding/chain_diff_calculator.h new file mode 100644 index 0000000000..bca7340c6f --- /dev/null +++ b/modules/video_coding/chain_diff_calculator.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_ +#define MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_ + +#include + +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" + +namespace webrtc { + +// This class is thread compatible. +class ChainDiffCalculator { + public: + ChainDiffCalculator() = default; + ChainDiffCalculator(const ChainDiffCalculator&) = default; + ChainDiffCalculator& operator=(const ChainDiffCalculator&) = default; + + // Restarts chains, i.e. for position where chains[i] == true next chain_diff + // will be 0. Saves chains.size() as number of chains in the stream. + void Reset(const std::vector& chains); + + // Returns chain diffs based on flags if frame is part of the chain. + absl::InlinedVector From(int64_t frame_id, + const std::vector& chains); + + private: + absl::InlinedVector ChainDiffs(int64_t frame_id) const; + + absl::InlinedVector, 4> last_frame_in_chain_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_ diff --git a/modules/video_coding/chain_diff_calculator_unittest.cc b/modules/video_coding/chain_diff_calculator_unittest.cc new file mode 100644 index 0000000000..efd09bd888 --- /dev/null +++ b/modules/video_coding/chain_diff_calculator_unittest.cc @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/chain_diff_calculator.h" + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::SizeIs; + +TEST(ChainDiffCalculatorTest, SingleChain) { + // Simulate a stream with 2 temporal layer where chain + // protects temporal layer 0. + ChainDiffCalculator calculator; + // Key frame. + calculator.Reset({true}); + EXPECT_THAT(calculator.From(1, {true}), ElementsAre(0)); + // T1 delta frame. + EXPECT_THAT(calculator.From(2, {false}), ElementsAre(1)); + // T0 delta frame. + EXPECT_THAT(calculator.From(3, {true}), ElementsAre(2)); +} + +TEST(ChainDiffCalculatorTest, TwoChainsFullSvc) { + // Simulate a full svc stream with 2 spatial and 2 temporal layers. + // chains are protecting temporal layers 0. + ChainDiffCalculator calculator; + // S0 Key frame. + calculator.Reset({true, true}); + EXPECT_THAT(calculator.From(1, {true, true}), ElementsAre(0, 0)); + // S1 Key frame. + EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 1)); + // S0T1 delta frame. + EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1)); + // S1T1 delta frame. + EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2)); + // S0T0 delta frame. + EXPECT_THAT(calculator.From(5, {true, true}), ElementsAre(4, 3)); + // S1T0 delta frame. + EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 1)); +} + +TEST(ChainDiffCalculatorTest, TwoChainsKSvc) { + // Simulate a k-svc stream with 2 spatial and 2 temporal layers. + // chains are protecting temporal layers 0. + ChainDiffCalculator calculator; + // S0 Key frame. + calculator.Reset({true, true}); + EXPECT_THAT(calculator.From(1, {true, true}), ElementsAre(0, 0)); + // S1 Key frame. + EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 1)); + // S0T1 delta frame. + EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1)); + // S1T1 delta frame. + EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2)); + // S0T0 delta frame. + EXPECT_THAT(calculator.From(5, {true, false}), ElementsAre(4, 3)); + // S1T0 delta frame. + EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 4)); +} + +TEST(ChainDiffCalculatorTest, TwoChainsSimulcast) { + // Simulate a k-svc stream with 2 spatial and 2 temporal layers. + // chains are protecting temporal layers 0. + ChainDiffCalculator calculator; + // S0 Key frame. + calculator.Reset({true, false}); + EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0)); + // S1 Key frame. + calculator.Reset({false, true}); + EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 0)); + // S0T1 delta frame. + EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1)); + // S1T1 delta frame. + EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2)); + // S0T0 delta frame. + EXPECT_THAT(calculator.From(5, {true, false}), ElementsAre(4, 3)); + // S1T0 delta frame. + EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 4)); +} + +TEST(ChainDiffCalculatorTest, ResilentToAbsentChainConfig) { + ChainDiffCalculator calculator; + // Key frame. + calculator.Reset({true, false}); + EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0)); + // Forgot to set chains. should still return 2 chain_diffs. + EXPECT_THAT(calculator.From(2, {}), ElementsAre(1, 0)); + // chain diffs for next frame(s) are undefined, but still there should be + // correct number of them. + EXPECT_THAT(calculator.From(3, {true, false}), SizeIs(2)); + EXPECT_THAT(calculator.From(4, {false, true}), SizeIs(2)); + // Since previous two frames updated all the chains, can expect what + // chain_diffs would be. + EXPECT_THAT(calculator.From(5, {false, false}), ElementsAre(2, 1)); +} + +TEST(ChainDiffCalculatorTest, ResilentToTooMainChains) { + ChainDiffCalculator calculator; + // Key frame. + calculator.Reset({true, false}); + EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0)); + // Set wrong number of chains. Expect number of chain_diffs is not changed. + EXPECT_THAT(calculator.From(2, {true, true, true}), ElementsAre(1, 0)); + // chain diffs for next frame(s) are undefined, but still there should be + // correct number of them. + EXPECT_THAT(calculator.From(3, {true, false}), SizeIs(2)); + EXPECT_THAT(calculator.From(4, {false, true}), SizeIs(2)); + // Since previous two frames updated all the chains, can expect what + // chain_diffs would be. + EXPECT_THAT(calculator.From(5, {false, false}), ElementsAre(2, 1)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/codecs/av1/BUILD.gn b/modules/video_coding/codecs/av1/BUILD.gn new file mode 100644 index 0000000000..27b22a0a59 --- /dev/null +++ b/modules/video_coding/codecs/av1/BUILD.gn @@ -0,0 +1,95 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("//third_party/libaom/options.gni") +import("../../../../webrtc.gni") + +rtc_library("libaom_av1_decoder") { + visibility = [ "*" ] + poisonous = [ "software_video_codecs" ] + public = [ "libaom_av1_decoder.h" ] + deps = [ "../../../../api/video_codecs:video_codecs_api" ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] + + if (enable_libaom) { + sources = [ "libaom_av1_decoder.cc" ] + deps += [ + "../..:video_codec_interface", + "../../../../api:scoped_refptr", + "../../../../api/video:encoded_image", + "../../../../api/video:video_frame", + "../../../../common_video", + "../../../../rtc_base:logging", + "//third_party/libaom", + "//third_party/libyuv", + ] + absl_deps += [ "//third_party/abseil-cpp/absl/types:optional" ] + } else { + sources = [ "libaom_av1_decoder_absent.cc" ] + } +} + +rtc_library("libaom_av1_encoder") { + visibility = [ "*" ] + poisonous = [ "software_video_codecs" ] + public = [ "libaom_av1_encoder.h" ] + deps = [ + "../../../../api/video_codecs:video_codecs_api", + "../../svc:scalability_structures", + "../../svc:scalable_video_controller", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/types:optional", + ] + + if (enable_libaom) { + sources = [ "libaom_av1_encoder.cc" ] + deps += [ + "../..:video_codec_interface", + "../../../../api:scoped_refptr", + "../../../../api/video:encoded_image", + "../../../../api/video:video_frame", + "../../../../common_video", + "../../../../rtc_base:checks", + "../../../../rtc_base:logging", + "//third_party/libaom", + ] + } else { + sources = [ "libaom_av1_encoder_absent.cc" ] + } +} + +if (rtc_include_tests) { + rtc_library("video_coding_codecs_av1_tests") { + testonly = true + + if (enable_libaom) { + sources = [ + "libaom_av1_encoder_unittest.cc", + "libaom_av1_unittest.cc", + ] + deps = [ + ":libaom_av1_decoder", + ":libaom_av1_encoder", + "../..:encoded_video_frame_producer", + "../..:video_codec_interface", + "../../../../api:mock_video_encoder", + "../../../../api/units:data_size", + "../../../../api/units:time_delta", + "../../../../api/video:video_frame", + "../../../../api/video_codecs:video_codecs_api", + "../../../../test:test_support", + "../../svc:scalability_structures", + "../../svc:scalable_video_controller", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + } +} diff --git a/modules/video_coding/codecs/av1/DEPS b/modules/video_coding/codecs/av1/DEPS new file mode 100644 index 0000000000..25779919a7 --- /dev/null +++ b/modules/video_coding/codecs/av1/DEPS @@ -0,0 +1,3 @@ +include_rules = [ + "+third_party/libaom", +] diff --git a/modules/video_coding/codecs/av1/libaom_av1_decoder.cc b/modules/video_coding/codecs/av1/libaom_av1_decoder.cc new file mode 100644 index 0000000000..bedb51937a --- /dev/null +++ b/modules/video_coding/codecs/av1/libaom_av1_decoder.cc @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" +#include "common_video/include/video_frame_buffer_pool.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/logging.h" +#include "third_party/libaom/source/libaom/aom/aom_decoder.h" +#include "third_party/libaom/source/libaom/aom/aomdx.h" +#include "third_party/libyuv/include/libyuv/convert.h" + +namespace webrtc { +namespace { + +constexpr int kConfigLowBitDepth = 1; // 8-bits per luma/chroma sample. +constexpr int kDecFlags = 0; // 0 signals no post processing. + +class LibaomAv1Decoder final : public VideoDecoder { + public: + LibaomAv1Decoder(); + LibaomAv1Decoder(const LibaomAv1Decoder&) = delete; + LibaomAv1Decoder& operator=(const LibaomAv1Decoder&) = delete; + ~LibaomAv1Decoder(); + + // Implements VideoDecoder. + int32_t InitDecode(const VideoCodec* codec_settings, + int number_of_cores) override; + + // Decode an encoded video frame. + int32_t Decode(const EncodedImage& encoded_image, + bool missing_frames, + int64_t render_time_ms) override; + + int32_t RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) override; + + int32_t Release() override; + + const char* ImplementationName() const override; + + private: + aom_codec_ctx_t context_; + bool inited_; + // Pool of memory buffers to store decoded image data for application access. + VideoFrameBufferPool buffer_pool_; + DecodedImageCallback* decode_complete_callback_; +}; + +LibaomAv1Decoder::LibaomAv1Decoder() + : context_(), // Force value initialization instead of default one. + inited_(false), + buffer_pool_(false, /*max_number_of_buffers=*/150), + decode_complete_callback_(nullptr) {} + +LibaomAv1Decoder::~LibaomAv1Decoder() { + Release(); +} + +int32_t LibaomAv1Decoder::InitDecode(const VideoCodec* codec_settings, + int number_of_cores) { + aom_codec_dec_cfg_t config = { + static_cast(number_of_cores), // Max # of threads. + 0, // Frame width set after decode. + 0, // Frame height set after decode. + kConfigLowBitDepth}; // Enable low-bit-depth code path. + + aom_codec_err_t ret = + aom_codec_dec_init(&context_, aom_codec_av1_dx(), &config, kDecFlags); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::InitDecode returned " << ret + << " on aom_codec_dec_init."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + inited_ = true; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t LibaomAv1Decoder::Decode(const EncodedImage& encoded_image, + bool missing_frames, + int64_t /*render_time_ms*/) { + if (!inited_) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (decode_complete_callback_ == nullptr) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + // Decode one video frame. + aom_codec_err_t ret = + aom_codec_decode(&context_, encoded_image.data(), encoded_image.size(), + /*user_priv=*/nullptr); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned " << ret + << " on aom_codec_decode."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Get decoded frame data. + int corrupted_frame = 0; + aom_codec_iter_t iter = nullptr; + while (aom_image_t* decoded_image = aom_codec_get_frame(&context_, &iter)) { + if (aom_codec_control(&context_, AOMD_GET_FRAME_CORRUPTED, + &corrupted_frame)) { + RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode " + "AOM_GET_FRAME_CORRUPTED."; + } + // Check that decoded image format is I420 and has 8-bit depth. + if (decoded_image->fmt != AOM_IMG_FMT_I420) { + RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode invalid image format"; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Return decoded frame data. + int qp; + ret = aom_codec_control(&context_, AOMD_GET_LAST_QUANTIZER, &qp); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned " << ret + << " on control AOME_GET_LAST_QUANTIZER."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Allocate memory for decoded frame. + rtc::scoped_refptr buffer = + buffer_pool_.CreateI420Buffer(decoded_image->d_w, decoded_image->d_h); + if (!buffer.get()) { + // Pool has too many pending frames. + RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned due to lack of" + " space in decoded frame buffer pool."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Copy decoded_image to decoded_frame. + libyuv::I420Copy( + decoded_image->planes[AOM_PLANE_Y], decoded_image->stride[AOM_PLANE_Y], + decoded_image->planes[AOM_PLANE_U], decoded_image->stride[AOM_PLANE_U], + decoded_image->planes[AOM_PLANE_V], decoded_image->stride[AOM_PLANE_V], + buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), + buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), + decoded_image->d_w, decoded_image->d_h); + VideoFrame decoded_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_timestamp_rtp(encoded_image.Timestamp()) + .set_ntp_time_ms(encoded_image.ntp_time_ms_) + .set_color_space(encoded_image.ColorSpace()) + .build(); + + decode_complete_callback_->Decoded(decoded_frame, absl::nullopt, + absl::nullopt); + } + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t LibaomAv1Decoder::RegisterDecodeCompleteCallback( + DecodedImageCallback* decode_complete_callback) { + decode_complete_callback_ = decode_complete_callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t LibaomAv1Decoder::Release() { + if (aom_codec_destroy(&context_) != AOM_CODEC_OK) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } + buffer_pool_.Release(); + inited_ = false; + return WEBRTC_VIDEO_CODEC_OK; +} + +const char* LibaomAv1Decoder::ImplementationName() const { + return "libaom"; +} + +} // namespace + +const bool kIsLibaomAv1DecoderSupported = true; + +std::unique_ptr CreateLibaomAv1Decoder() { + return std::make_unique(); +} + +} // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_decoder.h b/modules/video_coding/codecs/av1/libaom_av1_decoder.h new file mode 100644 index 0000000000..9b01285c73 --- /dev/null +++ b/modules/video_coding/codecs/av1/libaom_av1_decoder.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_DECODER_H_ +#define MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_DECODER_H_ + +#include + +#include "absl/base/attributes.h" +#include "api/video_codecs/video_decoder.h" + +namespace webrtc { + +ABSL_CONST_INIT extern const bool kIsLibaomAv1DecoderSupported; + +std::unique_ptr CreateLibaomAv1Decoder(); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_DECODER_H_ diff --git a/test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc b/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc similarity index 55% rename from test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc rename to modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc index f7403b9567..b97b68b33f 100644 --- a/test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,17 +7,18 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" -#include -#include +#include -#include "api/array_view.h" -#include "call/rtp_rtcp_demuxer_helper.h" +#include "api/video_codecs/video_decoder.h" namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - ParseRtcpPacketSenderSsrc(rtc::MakeArrayView(data, size)); +const bool kIsLibaomAv1DecoderSupported = false; + +std::unique_ptr CreateLibaomAv1Decoder() { + return nullptr; } } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc new file mode 100644 index 0000000000..d3d4f84a9f --- /dev/null +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -0,0 +1,648 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" + +#include +#include + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/base/macros.h" +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "third_party/libaom/source/libaom/aom/aom_codec.h" +#include "third_party/libaom/source/libaom/aom/aom_encoder.h" +#include "third_party/libaom/source/libaom/aom/aomcx.h" + +namespace webrtc { +namespace { + +// Encoder configuration parameters +constexpr int kQpMin = 10; +constexpr int kUsageProfile = 1; // 0 = good quality; 1 = real-time. +constexpr int kMinQindex = 58; // Min qindex threshold for QP scaling. +constexpr int kMaxQindex = 180; // Max qindex threshold for QP scaling. +constexpr int kBitDepth = 8; +constexpr int kLagInFrames = 0; // No look ahead. +constexpr int kRtpTicksPerSecond = 90000; +constexpr float kMinimumFrameRate = 1.0; + +// Only positive speeds, range for real-time coding currently is: 6 - 8. +// Lower means slower/better quality, higher means fastest/lower quality. +int GetCpuSpeed(int width, int height, int number_of_cores) { + // For smaller resolutions, use lower speed setting (get some coding gain at + // the cost of increased encoding complexity). + if (number_of_cores > 2 && width * height <= 320 * 180) + return 6; + else if (width * height >= 1280 * 720) + return 8; + else + return 7; +} + + +class LibaomAv1Encoder final : public VideoEncoder { + public: + explicit LibaomAv1Encoder( + std::unique_ptr svc_controller); + ~LibaomAv1Encoder(); + + int InitEncode(const VideoCodec* codec_settings, + const Settings& settings) override; + + int32_t RegisterEncodeCompleteCallback( + EncodedImageCallback* encoded_image_callback) override; + + int32_t Release() override; + + int32_t Encode(const VideoFrame& frame, + const std::vector* frame_types) override; + + void SetRates(const RateControlParameters& parameters) override; + + EncoderInfo GetEncoderInfo() const override; + + private: + bool SvcEnabled() const { return svc_params_.has_value(); } + // Fills svc_params_ memeber value. Returns false on error. + bool SetSvcParams(ScalableVideoController::StreamLayersConfig svc_config); + // Configures the encoder with layer for the next frame. + void SetSvcLayerId( + const ScalableVideoController::LayerFrameConfig& layer_frame); + // Configures the encoder which buffers next frame updates and can reference. + void SetSvcRefFrameConfig( + const ScalableVideoController::LayerFrameConfig& layer_frame); + + std::unique_ptr svc_controller_; + bool inited_; + absl::optional svc_params_; + VideoCodec encoder_settings_; + aom_image_t* frame_for_encode_; + aom_codec_ctx_t ctx_; + aom_codec_enc_cfg_t cfg_; + EncodedImageCallback* encoded_image_callback_; + // jianlin: HW AV1 decoder seems not handling decoding without key frame + // correctly. This is a workaround to force key frame. + int aom_frame_count = 0; +}; + +int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { + if (codec_settings.width < 1) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (codec_settings.height < 1) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + // maxBitrate == 0 represents an unspecified maxBitRate. + if (codec_settings.maxBitrate > 0 && + codec_settings.minBitrate > codec_settings.maxBitrate) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (codec_settings.maxBitrate > 0 && + codec_settings.startBitrate > codec_settings.maxBitrate) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (codec_settings.startBitrate < codec_settings.minBitrate) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (codec_settings.maxFramerate < 1) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + return WEBRTC_VIDEO_CODEC_OK; +} + +LibaomAv1Encoder::LibaomAv1Encoder( + std::unique_ptr svc_controller) + : svc_controller_(std::move(svc_controller)), + inited_(false), + frame_for_encode_(nullptr), + encoded_image_callback_(nullptr) { + RTC_DCHECK(svc_controller_); +} + +LibaomAv1Encoder::~LibaomAv1Encoder() { + Release(); +} + +int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, + const Settings& settings) { + if (codec_settings == nullptr) { + RTC_LOG(LS_WARNING) << "No codec settings provided to " + "LibaomAv1Encoder."; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (settings.number_of_cores < 1) { + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (inited_) { + RTC_LOG(LS_WARNING) << "Initing LibaomAv1Encoder without first releasing."; + Release(); + } + encoder_settings_ = *codec_settings; + + // Sanity checks for encoder configuration. + const int32_t result = VerifyCodecSettings(encoder_settings_); + if (result < 0) { + RTC_LOG(LS_WARNING) << "Incorrect codec settings provided to " + "LibaomAv1Encoder."; + return result; + } + if (encoder_settings_.numberOfSimulcastStreams > 1) { + RTC_LOG(LS_WARNING) << "Simulcast is not implemented by LibaomAv1Encoder."; + return result; + } + absl::string_view scalability_mode = encoder_settings_.ScalabilityMode(); + // When scalability_mode is not set, keep using svc_controller_ created + // at construction of the encoder. + if (!scalability_mode.empty()) { + svc_controller_ = CreateScalabilityStructure(scalability_mode); + } + if (svc_controller_ == nullptr) { + RTC_LOG(LS_WARNING) << "Failed to set scalability mode " + << scalability_mode; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + + if (!SetSvcParams(svc_controller_->StreamConfig())) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Initialize encoder configuration structure with default values + aom_codec_err_t ret = + aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg_, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on aom_codec_enc_config_default."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Overwrite default config with input encoder settings & RTC-relevant values. + cfg_.g_w = encoder_settings_.width; + cfg_.g_h = encoder_settings_.height; + cfg_.g_threads = settings.number_of_cores; + cfg_.g_timebase.num = 1; + cfg_.g_timebase.den = kRtpTicksPerSecond; + cfg_.rc_target_bitrate = encoder_settings_.maxBitrate; // kilobits/sec. + cfg_.g_input_bit_depth = kBitDepth; + cfg_.kf_mode = AOM_KF_DISABLED; + cfg_.rc_min_quantizer = kQpMin; + cfg_.rc_max_quantizer = encoder_settings_.qpMax; + cfg_.g_usage = kUsageProfile; + cfg_.g_error_resilient = 0; + // Low-latency settings. + cfg_.rc_end_usage = AOM_CBR; // Constant Bit Rate (CBR) mode + cfg_.g_pass = AOM_RC_ONE_PASS; // One-pass rate control + cfg_.g_lag_in_frames = kLagInFrames; // No look ahead when lag equals 0. + + // Creating a wrapper to the image - setting image data to nullptr. Actual + // pointer will be set in encode. Setting align to 1, as it is meaningless + // (actual memory is not allocated). + frame_for_encode_ = + aom_img_alloc(nullptr, AOM_IMG_FMT_I420, cfg_.g_w, cfg_.g_h, 1); + + // Flag options: AOM_CODEC_USE_PSNR and AOM_CODEC_USE_HIGHBITDEPTH + aom_codec_flags_t flags = 0; + + // Initialize an encoder instance. + ret = aom_codec_enc_init(&ctx_, aom_codec_av1_cx(), &cfg_, flags); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on aom_codec_enc_init."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + inited_ = true; + + // Set control parameters + ret = aom_codec_control( + &ctx_, AOME_SET_CPUUSED, + GetCpuSpeed(cfg_.g_w, cfg_.g_h, settings.number_of_cores)); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_CPUUSED."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_CDEF, 1); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_CDEF."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_TPL_MODEL, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_TPL_MODEL."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_DELTAQ_MODE, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_DELTAQ_MODE."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_ORDER_HINT, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_ORDER_HINT."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_AQ_MODE, 3); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_AQ_MODE."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + if (SvcEnabled()) { + ret = aom_codec_control(&ctx_, AV1E_SET_SVC_PARAMS, &*svc_params_); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAV1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_SVC_PARAMS."; + return false; + } + } + + ret = aom_codec_control(&ctx_, AOME_SET_MAX_INTRA_BITRATE_PCT, 300); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_MAX_INTRA_BITRATE_PCT."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_COEFF_COST_UPD_FREQ, 2); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_COEFF_COST_UPD_FREQ."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_MODE_COST_UPD_FREQ, 2); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_MODE_COST_UPD_FREQ."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + ret = aom_codec_control(&ctx_, AV1E_SET_MV_COST_UPD_FREQ, 3); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_MV_COST_UPD_FREQ."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +bool LibaomAv1Encoder::SetSvcParams( + ScalableVideoController::StreamLayersConfig svc_config) { + bool svc_enabled = + svc_config.num_spatial_layers > 1 || svc_config.num_temporal_layers > 1; + if (!svc_enabled) { + svc_params_ = absl::nullopt; + return true; + } + if (svc_config.num_spatial_layers < 1 || svc_config.num_spatial_layers > 4) { + RTC_LOG(LS_WARNING) << "Av1 supports up to 4 spatial layers. " + << svc_config.num_spatial_layers << " configured."; + return false; + } + if (svc_config.num_temporal_layers < 1 || + svc_config.num_temporal_layers > 8) { + RTC_LOG(LS_WARNING) << "Av1 supports up to 8 temporal layers. " + << svc_config.num_temporal_layers << " configured."; + return false; + } + aom_svc_params_t& svc_params = svc_params_.emplace(); + svc_params.number_spatial_layers = svc_config.num_spatial_layers; + svc_params.number_temporal_layers = svc_config.num_temporal_layers; + + int num_layers = + svc_config.num_spatial_layers * svc_config.num_temporal_layers; + for (int i = 0; i < num_layers; ++i) { + svc_params.min_quantizers[i] = kQpMin; + svc_params.max_quantizers[i] = encoder_settings_.qpMax; + } + + // Assume each temporal layer doubles framerate. + for (int tid = 0; tid < svc_config.num_temporal_layers; ++tid) { + svc_params.framerate_factor[tid] = + 1 << (svc_config.num_temporal_layers - tid - 1); + } + + for (int sid = 0; sid < svc_config.num_spatial_layers; ++sid) { + svc_params.scaling_factor_num[sid] = svc_config.scaling_factor_num[sid]; + svc_params.scaling_factor_den[sid] = svc_config.scaling_factor_den[sid]; + } + + return true; +} + +void LibaomAv1Encoder::SetSvcLayerId( + const ScalableVideoController::LayerFrameConfig& layer_frame) { + aom_svc_layer_id_t layer_id = {}; + layer_id.spatial_layer_id = layer_frame.SpatialId(); + layer_id.temporal_layer_id = layer_frame.TemporalId(); + aom_codec_err_t ret = + aom_codec_control(&ctx_, AV1E_SET_SVC_LAYER_ID, &layer_id); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret + << " on control AV1E_SET_SVC_LAYER_ID."; + } +} + +void LibaomAv1Encoder::SetSvcRefFrameConfig( + const ScalableVideoController::LayerFrameConfig& layer_frame) { + // Buffer name to use for each layer_frame.buffers position. In particular + // when there are 2 buffers are referenced, prefer name them last and golden, + // because av1 bitstream format has dedicated fields for these two names. + // See last_frame_idx and golden_frame_idx in the av1 spec + // https://aomediacodec.github.io/av1-spec/av1-spec.pdf + static constexpr int kPreferedSlotName[] = {0, // Last + 3, // Golden + 1, 2, 4, 5, 6}; + static constexpr int kAv1NumBuffers = 8; + + aom_svc_ref_frame_config_t ref_frame_config = {}; + RTC_CHECK_LE(layer_frame.Buffers().size(), ABSL_ARRAYSIZE(kPreferedSlotName)); + for (size_t i = 0; i < layer_frame.Buffers().size(); ++i) { + const CodecBufferUsage& buffer = layer_frame.Buffers()[i]; + int slot_name = kPreferedSlotName[i]; + RTC_CHECK_GE(buffer.id, 0); + RTC_CHECK_LT(buffer.id, kAv1NumBuffers); + ref_frame_config.ref_idx[slot_name] = buffer.id; + if (buffer.referenced) { + ref_frame_config.reference[slot_name] = 1; + } + if (buffer.updated) { + ref_frame_config.refresh[buffer.id] = 1; + } + } + aom_codec_err_t ret = aom_codec_control(&ctx_, AV1E_SET_SVC_REF_FRAME_CONFIG, + &ref_frame_config); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret + << " on control AV1_SET_SVC_REF_FRAME_CONFIG."; + } +} + +int32_t LibaomAv1Encoder::RegisterEncodeCompleteCallback( + EncodedImageCallback* encoded_image_callback) { + encoded_image_callback_ = encoded_image_callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t LibaomAv1Encoder::Release() { + if (frame_for_encode_ != nullptr) { + aom_img_free(frame_for_encode_); + frame_for_encode_ = nullptr; + } + if (inited_) { + if (aom_codec_destroy(&ctx_)) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } + inited_ = false; + } + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t LibaomAv1Encoder::Encode( + const VideoFrame& frame, + const std::vector* frame_types) { + if (!inited_ || encoded_image_callback_ == nullptr) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + bool keyframe_required = + frame_types != nullptr && + absl::c_linear_search(*frame_types, VideoFrameType::kVideoFrameKey); + keyframe_required |= (aom_frame_count++ % 60 == 0)? true : false; + + std::vector layer_frames = + svc_controller_->NextFrameConfig(keyframe_required); + + if (layer_frames.empty()) { + RTC_LOG(LS_ERROR) << "SVCController returned no configuration for a frame."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Convert input frame to I420, if needed. + VideoFrame prepped_input_frame = frame; + if (prepped_input_frame.video_frame_buffer()->type() != + VideoFrameBuffer::Type::kI420) { + rtc::scoped_refptr converted_buffer( + prepped_input_frame.video_frame_buffer()->ToI420()); + prepped_input_frame = VideoFrame(converted_buffer, frame.timestamp(), + frame.render_time_ms(), frame.rotation()); + } + + // Set frame_for_encode_ data pointers and strides. + auto i420_buffer = prepped_input_frame.video_frame_buffer()->GetI420(); + frame_for_encode_->planes[AOM_PLANE_Y] = + const_cast(i420_buffer->DataY()); + frame_for_encode_->planes[AOM_PLANE_U] = + const_cast(i420_buffer->DataU()); + frame_for_encode_->planes[AOM_PLANE_V] = + const_cast(i420_buffer->DataV()); + frame_for_encode_->stride[AOM_PLANE_Y] = i420_buffer->StrideY(); + frame_for_encode_->stride[AOM_PLANE_U] = i420_buffer->StrideU(); + frame_for_encode_->stride[AOM_PLANE_V] = i420_buffer->StrideV(); + + const uint32_t duration = + kRtpTicksPerSecond / static_cast(encoder_settings_.maxFramerate); + + for (size_t i = 0; i < layer_frames.size(); ++i) { + ScalableVideoController::LayerFrameConfig& layer_frame = layer_frames[i]; + const bool end_of_picture = i == layer_frames.size() - 1; + + aom_enc_frame_flags_t flags = + layer_frame.IsKeyframe() ? AOM_EFLAG_FORCE_KF : 0; + + if (SvcEnabled()) { + SetSvcLayerId(layer_frame); + SetSvcRefFrameConfig(layer_frame); + } + + // Encode a frame. + aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_, + frame.timestamp(), duration, flags); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret + << " on aom_codec_encode."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + // Get encoded image data. + EncodedImage encoded_image; + aom_codec_iter_t iter = nullptr; + int data_pkt_count = 0; + while (const aom_codec_cx_pkt_t* pkt = + aom_codec_get_cx_data(&ctx_, &iter)) { + if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) { + if (data_pkt_count > 0) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encoder returned more than " + "one data packet for an input video frame."; + Release(); + } + encoded_image.SetEncodedData(EncodedImageBuffer::Create( + /*data=*/static_cast(pkt->data.frame.buf), + /*size=*/pkt->data.frame.sz)); + + if ((pkt->data.frame.flags & AOM_EFLAG_FORCE_KF) != 0) { + layer_frame.Keyframe(); + } + encoded_image._frameType = layer_frame.IsKeyframe() + ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta; + encoded_image.SetTimestamp(frame.timestamp()); + encoded_image.capture_time_ms_ = frame.render_time_ms(); + encoded_image.rotation_ = frame.rotation(); + encoded_image.content_type_ = VideoContentType::UNSPECIFIED; + // If encoded image width/height info are added to aom_codec_cx_pkt_t, + // use those values in lieu of the values in frame. + encoded_image._encodedHeight = frame.height(); + encoded_image._encodedWidth = frame.width(); + encoded_image.timing_.flags = VideoSendTiming::kInvalid; + int qp = -1; + ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret + << " on control AOME_GET_LAST_QUANTIZER."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + encoded_image.qp_ = qp; + encoded_image.SetColorSpace(frame.color_space()); + ++data_pkt_count; + } + } + + // Deliver encoded image data. + if (encoded_image.size() > 0) { + CodecSpecificInfo codec_specific_info; + codec_specific_info.codecType = kVideoCodecAV1; + codec_specific_info.end_of_picture = end_of_picture; + bool is_keyframe = layer_frame.IsKeyframe(); + codec_specific_info.generic_frame_info = + svc_controller_->OnEncodeDone(std::move(layer_frame)); + if (is_keyframe && codec_specific_info.generic_frame_info) { + codec_specific_info.template_structure = + svc_controller_->DependencyStructure(); + auto& resolutions = codec_specific_info.template_structure->resolutions; + if (SvcEnabled()) { + resolutions.resize(svc_params_->number_spatial_layers); + for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) { + int n = svc_params_->scaling_factor_num[sid]; + int d = svc_params_->scaling_factor_den[sid]; + resolutions[sid] = + RenderResolution(cfg_.g_w * n / d, cfg_.g_h * n / d); + } + } else { + resolutions = {RenderResolution(cfg_.g_w, cfg_.g_h)}; + } + } + encoded_image_callback_->OnEncodedImage(encoded_image, + &codec_specific_info); + } + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) { + if (!inited_) { + RTC_LOG(LS_WARNING) << "SetRates() while encoder is not initialized"; + return; + } + if (parameters.framerate_fps < kMinimumFrameRate) { + RTC_LOG(LS_WARNING) << "Unsupported framerate (must be >= " + << kMinimumFrameRate + << " ): " << parameters.framerate_fps; + return; + } + if (parameters.bitrate.get_sum_bps() == 0) { + RTC_LOG(LS_WARNING) << "Attempt to set target bit rate to zero"; + return; + } + + // Check input target bit rate value. + uint32_t rc_target_bitrate_kbps = parameters.bitrate.get_sum_kbps(); + if (encoder_settings_.maxBitrate > 0) + RTC_DCHECK_LE(rc_target_bitrate_kbps, encoder_settings_.maxBitrate); + RTC_DCHECK_GE(rc_target_bitrate_kbps, encoder_settings_.minBitrate); + + svc_controller_->OnRatesUpdated(parameters.bitrate); + // Set target bit rate. + cfg_.rc_target_bitrate = rc_target_bitrate_kbps; + + if (SvcEnabled()) { + for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) { + // libaom bitrate for spatial id S and temporal id T means bitrate + // of frames with spatial_id=S and temporal_id<=T + // while `parameters.bitrate` provdies bitrate of frames with + // spatial_id=S and temporal_id=T + int accumulated_bitrate_bps = 0; + for (int tid = 0; tid < svc_params_->number_temporal_layers; ++tid) { + int layer_index = sid * svc_params_->number_temporal_layers + tid; + accumulated_bitrate_bps += parameters.bitrate.GetBitrate(sid, tid); + // `svc_params.layer_target_bitrate` expects bitrate in kbps. + svc_params_->layer_target_bitrate[layer_index] = + accumulated_bitrate_bps / 1000; + } + } + aom_codec_control(&ctx_, AV1E_SET_SVC_PARAMS, &*svc_params_); + } + + // Set frame rate to closest integer value. + encoder_settings_.maxFramerate = + static_cast(parameters.framerate_fps + 0.5); + + // Update encoder context. + aom_codec_err_t error_code = aom_codec_enc_config_set(&ctx_, &cfg_); + if (error_code != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "Error configuring encoder, error code: " + << error_code; + } +} + +VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { + EncoderInfo info; + info.supports_native_handle = false; + info.implementation_name = "libaom"; + info.has_trusted_rate_controller = true; + info.is_hardware_accelerated = false; + info.scaling_settings = VideoEncoder::ScalingSettings(kMinQindex, kMaxQindex); + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; + return info; +} + +} // namespace + +const bool kIsLibaomAv1EncoderSupported = true; + +std::unique_ptr CreateLibaomAv1Encoder() { + return std::make_unique( + std::make_unique()); +} + +std::unique_ptr CreateLibaomAv1Encoder( + std::unique_ptr svc_controller) { + return std::make_unique(std::move(svc_controller)); +} + +} // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/modules/video_coding/codecs/av1/libaom_av1_encoder.h new file mode 100644 index 0000000000..04a2b65f54 --- /dev/null +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_H_ +#define MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_H_ + +#include + +#include "absl/base/attributes.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported; + +std::unique_ptr CreateLibaomAv1Encoder(); +std::unique_ptr CreateLibaomAv1Encoder( + std::unique_ptr controller); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_H_ diff --git a/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc similarity index 55% rename from modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.cc rename to modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc index abaf129d8b..f394260865 100644 --- a/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,12 +7,18 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" -#include "modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h" +#include + +#include "api/video_codecs/video_encoder.h" namespace webrtc { -MockRtcpBandwidthObserver::MockRtcpBandwidthObserver() = default; -MockRtcpBandwidthObserver::~MockRtcpBandwidthObserver() = default; +const bool kIsLibaomAv1EncoderSupported = false; + +std::unique_ptr CreateLibaomAv1Encoder() { + return nullptr; +} } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc new file mode 100644 index 0000000000..1e457dfbf2 --- /dev/null +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_structure_l1t2.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::SizeIs; + +VideoCodec DefaultCodecSettings() { + VideoCodec codec_settings; + codec_settings.width = 320; + codec_settings.height = 180; + codec_settings.maxFramerate = 30; + codec_settings.maxBitrate = 1000; + codec_settings.qpMax = 63; + return codec_settings; +} + +VideoEncoder::Settings DefaultEncoderSettings() { + return VideoEncoder::Settings( + VideoEncoder::Capabilities(/*loss_notification=*/false), + /*number_of_cores=*/1, /*max_payload_size=*/1200); +} + +TEST(LibaomAv1EncoderTest, CanCreate) { + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + EXPECT_TRUE(encoder); +} + +TEST(LibaomAv1EncoderTest, InitAndRelease) { + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + ASSERT_TRUE(encoder); + VideoCodec codec_settings = DefaultCodecSettings(); + EXPECT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK); +} + +TEST(LibaomAv1EncoderTest, NoBitrateOnTopLayerRefecltedInActiveDecodeTargets) { + // Configure encoder with 2 temporal layers. + std::unique_ptr encoder = + CreateLibaomAv1Encoder(std::make_unique()); + VideoCodec codec_settings = DefaultCodecSettings(); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + VideoEncoder::RateControlParameters rate_parameters; + rate_parameters.framerate_fps = 30; + rate_parameters.bitrate.SetBitrate(0, /*temporal_index=*/0, 300'000); + rate_parameters.bitrate.SetBitrate(0, /*temporal_index=*/1, 0); + encoder->SetRates(rate_parameters); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder).SetNumInputFrames(1).Encode(); + ASSERT_THAT(encoded_frames, SizeIs(1)); + ASSERT_NE(encoded_frames[0].codec_specific_info.generic_frame_info, + absl::nullopt); + // Assuming L1T2 structure uses 1st decode target for T0 and 2nd decode target + // for T0+T1 frames, expect only 1st decode target is active. + EXPECT_EQ(encoded_frames[0] + .codec_specific_info.generic_frame_info->active_decode_targets, + 0b01); +} + +TEST(LibaomAv1EncoderTest, SetsEndOfPictureForLastFrameInTemporalUnit) { + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + VideoCodec codec_settings = DefaultCodecSettings(); + // Configure encoder with 3 spatial layers. + codec_settings.SetScalabilityMode("L3T1"); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder).SetNumInputFrames(2).Encode(); + ASSERT_THAT(encoded_frames, SizeIs(6)); + EXPECT_FALSE(encoded_frames[0].codec_specific_info.end_of_picture); + EXPECT_FALSE(encoded_frames[1].codec_specific_info.end_of_picture); + EXPECT_TRUE(encoded_frames[2].codec_specific_info.end_of_picture); + EXPECT_FALSE(encoded_frames[3].codec_specific_info.end_of_picture); + EXPECT_FALSE(encoded_frames[4].codec_specific_info.end_of_picture); + EXPECT_TRUE(encoded_frames[5].codec_specific_info.end_of_picture); +} + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc new file mode 100644 index 0000000000..78725ab626 --- /dev/null +++ b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc @@ -0,0 +1,331 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ContainerEq; +using ::testing::Each; +using ::testing::ElementsAreArray; +using ::testing::Ge; +using ::testing::IsEmpty; +using ::testing::Not; +using ::testing::NotNull; +using ::testing::Pointwise; +using ::testing::SizeIs; +using ::testing::Truly; +using ::testing::Values; + +// Use small resolution for this test to make it faster. +constexpr int kWidth = 320; +constexpr int kHeight = 180; +constexpr int kFramerate = 30; + +VideoCodec DefaultCodecSettings() { + VideoCodec codec_settings; + codec_settings.width = kWidth; + codec_settings.height = kHeight; + codec_settings.maxFramerate = kFramerate; + codec_settings.maxBitrate = 1000; + codec_settings.qpMax = 63; + return codec_settings; +} +VideoEncoder::Settings DefaultEncoderSettings() { + return VideoEncoder::Settings( + VideoEncoder::Capabilities(/*loss_notification=*/false), + /*number_of_cores=*/1, /*max_payload_size=*/1200); +} + +class TestAv1Decoder { + public: + explicit TestAv1Decoder(int decoder_id) + : decoder_id_(decoder_id), decoder_(CreateLibaomAv1Decoder()) { + if (decoder_ == nullptr) { + ADD_FAILURE() << "Failed to create a decoder#" << decoder_id_; + return; + } + EXPECT_EQ(decoder_->InitDecode(/*codec_settings=*/nullptr, + /*number_of_cores=*/1), + WEBRTC_VIDEO_CODEC_OK); + EXPECT_EQ(decoder_->RegisterDecodeCompleteCallback(&callback_), + WEBRTC_VIDEO_CODEC_OK); + } + // This class requires pointer stability and thus not copyable nor movable. + TestAv1Decoder(const TestAv1Decoder&) = delete; + TestAv1Decoder& operator=(const TestAv1Decoder&) = delete; + + void Decode(int64_t frame_id, const EncodedImage& image) { + ASSERT_THAT(decoder_, NotNull()); + int32_t error = decoder_->Decode(image, /*missing_frames=*/false, + /*render_time_ms=*/image.capture_time_ms_); + if (error != WEBRTC_VIDEO_CODEC_OK) { + ADD_FAILURE() << "Failed to decode frame id " << frame_id + << " with error code " << error << " by decoder#" + << decoder_id_; + return; + } + decoded_ids_.push_back(frame_id); + } + + const std::vector& decoded_frame_ids() const { return decoded_ids_; } + size_t num_output_frames() const { return callback_.num_called(); } + + private: + // Decoder callback that only counts how many times it was called. + // While it is tempting to replace it with a simple mock, that one requires + // to set expectation on number of calls in advance. Tests below unsure about + // expected number of calls until after calls are done. + class DecoderCallback : public DecodedImageCallback { + public: + size_t num_called() const { return num_called_; } + + private: + int32_t Decoded(VideoFrame& /*decoded_image*/) override { + ++num_called_; + return 0; + } + void Decoded(VideoFrame& /*decoded_image*/, + absl::optional /*decode_time_ms*/, + absl::optional /*qp*/) override { + ++num_called_; + } + + int num_called_ = 0; + }; + + const int decoder_id_; + std::vector decoded_ids_; + DecoderCallback callback_; + const std::unique_ptr decoder_; +}; + +TEST(LibaomAv1Test, EncodeDecode) { + TestAv1Decoder decoder(0); + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + VideoCodec codec_settings = DefaultCodecSettings(); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder).SetNumInputFrames(4).Encode(); + for (size_t frame_id = 0; frame_id < encoded_frames.size(); ++frame_id) { + decoder.Decode(static_cast(frame_id), + encoded_frames[frame_id].encoded_image); + } + + // Check encoder produced some frames for decoder to decode. + ASSERT_THAT(encoded_frames, Not(IsEmpty())); + // Check decoder found all of them valid. + EXPECT_THAT(decoder.decoded_frame_ids(), SizeIs(encoded_frames.size())); + // Check each of them produced an output frame. + EXPECT_EQ(decoder.num_output_frames(), decoder.decoded_frame_ids().size()); +} + +struct LayerId { + friend bool operator==(const LayerId& lhs, const LayerId& rhs) { + return std::tie(lhs.spatial_id, lhs.temporal_id) == + std::tie(rhs.spatial_id, rhs.temporal_id); + } + friend bool operator<(const LayerId& lhs, const LayerId& rhs) { + return std::tie(lhs.spatial_id, lhs.temporal_id) < + std::tie(rhs.spatial_id, rhs.temporal_id); + } + friend std::ostream& operator<<(std::ostream& s, const LayerId& layer) { + return s << "S" << layer.spatial_id << "T" << layer.temporal_id; + } + + int spatial_id = 0; + int temporal_id = 0; +}; + +struct SvcTestParam { + std::string name; + int num_frames_to_generate; + std::map configured_bitrates; +}; + +class LibaomAv1SvcTest : public ::testing::TestWithParam {}; + +TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) { + size_t num_decode_targets = CreateScalabilityStructure(GetParam().name) + ->DependencyStructure() + .num_decode_targets; + + std::unique_ptr encoder = CreateLibaomAv1Encoder(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.SetScalabilityMode(GetParam().name); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(GetParam().num_frames_to_generate) + .SetResolution({kWidth, kHeight}) + .Encode(); + + ASSERT_THAT( + encoded_frames, + Each(Truly([&](const EncodedVideoFrameProducer::EncodedFrame& frame) { + return frame.codec_specific_info.generic_frame_info && + frame.codec_specific_info.generic_frame_info + ->decode_target_indications.size() == num_decode_targets; + }))); + + for (size_t dt = 0; dt < num_decode_targets; ++dt) { + TestAv1Decoder decoder(dt); + std::vector requested_ids; + for (int64_t frame_id = 0; + frame_id < static_cast(encoded_frames.size()); ++frame_id) { + const EncodedVideoFrameProducer::EncodedFrame& frame = + encoded_frames[frame_id]; + if (frame.codec_specific_info.generic_frame_info + ->decode_target_indications[dt] != + DecodeTargetIndication::kNotPresent) { + requested_ids.push_back(frame_id); + decoder.Decode(frame_id, frame.encoded_image); + } + } + + ASSERT_THAT(requested_ids, SizeIs(Ge(2u))); + // Check decoder found all of them valid. + EXPECT_THAT(decoder.decoded_frame_ids(), ContainerEq(requested_ids)) + << "Decoder#" << dt; + // Check each of them produced an output frame. + EXPECT_EQ(decoder.num_output_frames(), decoder.decoded_frame_ids().size()) + << "Decoder#" << dt; + } +} + +MATCHER(SameLayerIdAndBitrateIsNear, "") { + // First check if layer id is the same. + return std::get<0>(arg).first == std::get<1>(arg).first && + // check measured bitrate is not much lower than requested. + std::get<0>(arg).second >= std::get<1>(arg).second * 0.8 && + // check measured bitrate is not much larger than requested. + std::get<0>(arg).second <= std::get<1>(arg).second * 1.1; +} + +TEST_P(LibaomAv1SvcTest, SetRatesMatchMeasuredBitrate) { + const SvcTestParam param = GetParam(); + if (param.configured_bitrates.empty()) { + // Rates are not configured for this particular structure, skip the test. + return; + } + constexpr TimeDelta kDuration = TimeDelta::Seconds(5); + + VideoBitrateAllocation allocation; + for (const auto& kv : param.configured_bitrates) { + allocation.SetBitrate(kv.first.spatial_id, kv.first.temporal_id, + kv.second.bps()); + } + + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateScalabilityStructure(param.name)); + ASSERT_TRUE(encoder); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.maxBitrate = allocation.get_sum_kbps(); + codec_settings.maxFramerate = 30; + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + encoder->SetRates(VideoEncoder::RateControlParameters( + allocation, codec_settings.maxFramerate)); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(codec_settings.maxFramerate * kDuration.seconds()) + .SetResolution({codec_settings.width, codec_settings.height}) + .SetFramerateFps(codec_settings.maxFramerate) + .Encode(); + + // Calculate size of each layer. + std::map layer_size; + for (const auto& frame : encoded_frames) { + ASSERT_TRUE(frame.codec_specific_info.generic_frame_info); + const auto& layer = *frame.codec_specific_info.generic_frame_info; + LayerId layer_id = {layer.spatial_id, layer.temporal_id}; + // This is almost same as + // layer_size[layer_id] += DataSize::Bytes(frame.encoded_image.size()); + // but avoids calling deleted default constructor for DataSize. + layer_size.emplace(layer_id, DataSize::Zero()).first->second += + DataSize::Bytes(frame.encoded_image.size()); + } + // Convert size of the layer into bitrate of that layer. + std::vector> measured_bitrates; + for (const auto& kv : layer_size) { + measured_bitrates.emplace_back(kv.first, kv.second / kDuration); + } + EXPECT_THAT(measured_bitrates, Pointwise(SameLayerIdAndBitrateIsNear(), + param.configured_bitrates)); +} + +INSTANTIATE_TEST_SUITE_P( + Svc, + LibaomAv1SvcTest, + Values(SvcTestParam{"NONE", /*num_frames_to_generate=*/4}, + SvcTestParam{"L1T2", + /*num_frames_to_generate=*/4, + /*configured_bitrates=*/ + {{{0, 0}, DataRate::KilobitsPerSec(60)}, + {{0, 1}, DataRate::KilobitsPerSec(40)}}}, + SvcTestParam{"L1T3", /*num_frames_to_generate=*/8}, + SvcTestParam{"L2T1", + /*num_frames_to_generate=*/3, + /*configured_bitrates=*/ + {{{0, 0}, DataRate::KilobitsPerSec(30)}, + {{1, 0}, DataRate::KilobitsPerSec(70)}}}, + SvcTestParam{"L2T1h", + /*num_frames_to_generate=*/3, + /*configured_bitrates=*/ + {{{0, 0}, DataRate::KilobitsPerSec(30)}, + {{1, 0}, DataRate::KilobitsPerSec(70)}}}, + SvcTestParam{"L2T1_KEY", /*num_frames_to_generate=*/3}, + SvcTestParam{"L3T1", /*num_frames_to_generate=*/3}, + SvcTestParam{"L3T3", /*num_frames_to_generate=*/8}, + SvcTestParam{"S2T1", /*num_frames_to_generate=*/3}, + SvcTestParam{"L2T2", /*num_frames_to_generate=*/4}, + SvcTestParam{"L2T2_KEY", /*num_frames_to_generate=*/4}, + SvcTestParam{"L2T2_KEY_SHIFT", + /*num_frames_to_generate=*/4, + /*configured_bitrates=*/ + {{{0, 0}, DataRate::KilobitsPerSec(70)}, + {{0, 1}, DataRate::KilobitsPerSec(30)}, + {{1, 0}, DataRate::KilobitsPerSec(110)}, + {{1, 1}, DataRate::KilobitsPerSec(80)}}}), + [](const testing::TestParamInfo& info) { + return info.param.name; + }); + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/codecs/h264/h264.cc b/modules/video_coding/codecs/h264/h264.cc index 147e964b54..be5b031e88 100644 --- a/modules/video_coding/codecs/h264/h264.cc +++ b/modules/video_coding/codecs/h264/h264.cc @@ -16,7 +16,6 @@ #include "absl/types/optional.h" #include "api/video_codecs/sdp_video_format.h" -#include "media/base/h264_profile_level_id.h" #include "media/base/media_constants.h" #if defined(WEBRTC_USE_H264) @@ -44,6 +43,8 @@ bool IsH264CodecSupported() { #endif } +} // namespace + SdpVideoFormat CreateH264Format(H264::Profile profile, H264::Level level, const std::string& packetization_mode) { @@ -57,8 +58,6 @@ SdpVideoFormat CreateH264Format(H264::Profile profile, {cricket::kH264FmtpPacketizationMode, packetization_mode}}); } -} // namespace - void DisableRtcUseH264() { #if defined(WEBRTC_USE_H264) g_rtc_use_h264 = false; diff --git a/modules/video_coding/codecs/h264/h264_color_space.h b/modules/video_coding/codecs/h264/h264_color_space.h index 392ccaf563..aec76efbac 100644 --- a/modules/video_coding/codecs/h264/h264_color_space.h +++ b/modules/video_coding/codecs/h264/h264_color_space.h @@ -16,10 +16,6 @@ // #ifdef unless needed and tested. #ifdef WEBRTC_USE_H264 -#if defined(WEBRTC_WIN) && !defined(__clang__) -#error "See: bugs.webrtc.org/9213#c13." -#endif - #include "api/video/color_space.h" extern "C" { diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc index 6725a3b7c7..9002b87461 100644 --- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -32,11 +32,11 @@ extern "C" { #include "common_video/include/video_frame_buffer.h" #include "modules/video_coding/codecs/h264/h264_color_space.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" +#include "third_party/libyuv/include/libyuv/convert.h" namespace webrtc { @@ -104,7 +104,7 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // TODO(nisse): Delete that feature from the video pool, instead add // an explicit call to InitializeData here. rtc::scoped_refptr frame_buffer = - decoder->pool_.CreateBuffer(width, height); + decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); int y_size = width * height; int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight(); @@ -151,10 +151,13 @@ void H264DecoderImpl::AVFreeBuffer2(void* opaque, uint8_t* data) { } H264DecoderImpl::H264DecoderImpl() - : pool_(true), + : ffmpeg_buffer_pool_(true), decoded_image_callback_(nullptr), has_reported_init_(false), - has_reported_error_(false) {} + has_reported_error_(false), + preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} H264DecoderImpl::~H264DecoderImpl() { Release(); @@ -220,7 +223,8 @@ int32_t H264DecoderImpl::InitDecode(const VideoCodec* codec_settings, av_frame_.reset(av_frame_alloc()); if (codec_settings && codec_settings->buffer_pool_size) { - if (!pool_.Resize(*codec_settings->buffer_pool_size)) { + if (!ffmpeg_buffer_pool_.Resize(*codec_settings->buffer_pool_size) || + !output_buffer_pool_.Resize(*codec_settings->buffer_pool_size)) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } } @@ -260,7 +264,9 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, AVPacket packet; av_init_packet(&packet); - packet.data = input_image.mutable_data(); + // packet.data has a non-const type, but isn't modified by + // avcodec_send_packet. + packet.data = const_cast(input_image.data()); if (input_image.size() > static_cast(std::numeric_limits::max())) { ReportError(); @@ -324,12 +330,25 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, i420_buffer->DataV() + i420_buffer->StrideV() * i420_buffer->height() / 2); - auto cropped_buffer = WrapI420Buffer( + rtc::scoped_refptr cropped_buffer = WrapI420Buffer( av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], av_frame_->linesize[kVPlaneIndex], rtc::KeepRefUntilDone(i420_buffer)); + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420(); + auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer( + cropped_i420->width(), cropped_i420->height()); + libyuv::I420ToNV12(cropped_i420->DataY(), cropped_i420->StrideY(), + cropped_i420->DataU(), cropped_i420->StrideU(), + cropped_i420->DataV(), cropped_i420->StrideV(), + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), + i420_buffer->width(), i420_buffer->height()); + cropped_buffer = nv12_buffer; + } + // Pass on color space from input frame if explicitly specified. const ColorSpace& color_space = input_image.ColorSpace() ? *input_image.ColorSpace() diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.h b/modules/video_coding/codecs/h264/h264_decoder_impl.h index 3c038e6425..47af12c8ca 100644 --- a/modules/video_coding/codecs/h264/h264_decoder_impl.h +++ b/modules/video_coding/codecs/h264/h264_decoder_impl.h @@ -17,10 +17,6 @@ // #ifdef unless needed and tested. #ifdef WEBRTC_USE_H264 -#if defined(WEBRTC_WIN) && !defined(__clang__) -#error "See: bugs.webrtc.org/9213#c13." -#endif - #include #include "modules/video_coding/codecs/h264/include/h264.h" @@ -44,7 +40,7 @@ extern "C" { } // extern "C" #include "common_video/h264/h264_bitstream_parser.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" namespace webrtc { @@ -92,7 +88,10 @@ class H264DecoderImpl : public H264Decoder { void ReportInit(); void ReportError(); - I420BufferPool pool_; + // Used by ffmpeg via |AVGetBuffer2()| to allocate I420 images. + VideoFrameBufferPool ffmpeg_buffer_pool_; + // Used to allocate NV12 images if NV12 output is preferred. + VideoFrameBufferPool output_buffer_pool_; std::unique_ptr av_context_; std::unique_ptr av_frame_; @@ -102,6 +101,9 @@ class H264DecoderImpl : public H264Decoder { bool has_reported_error_; webrtc::H264BitstreamParser h264_bitstream_parser_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc index 53fac77b9b..ccf0bc515a 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -87,19 +87,15 @@ VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) { } // namespace // Helper method used by H264EncoderImpl::Encode. -// Copies the encoded bytes from |info| to |encoded_image| and updates the -// fragmentation information of |frag_header|. The |encoded_image->_buffer| may -// be deleted and reallocated if a bigger buffer is required. +// Copies the encoded bytes from |info| to |encoded_image|. The +// |encoded_image->_buffer| may be deleted and reallocated if a bigger buffer is +// required. // // After OpenH264 encoding, the encoded bytes are stored in |info| spread out // over a number of layers and "NAL units". Each NAL unit is a fragment starting // with the four-byte start code {0,0,0,1}. All of this data (including the -// start codes) is copied to the |encoded_image->_buffer| and the |frag_header| -// is updated to point to each fragment, with offsets and lengths set as to -// exclude the start codes. -static void RtpFragmentize(EncodedImage* encoded_image, - SFrameBSInfo* info, - RTPFragmentationHeader* frag_header) { +// start codes) is copied to the |encoded_image->_buffer|. +static void RtpFragmentize(EncodedImage* encoded_image, SFrameBSInfo* info) { // Calculate minimum buffer size required to hold encoded data. size_t required_capacity = 0; size_t fragments_count = 0; @@ -114,12 +110,12 @@ static void RtpFragmentize(EncodedImage* encoded_image, } } // TODO(nisse): Use a cache or buffer pool to avoid allocation? - encoded_image->SetEncodedData(EncodedImageBuffer::Create(required_capacity)); + auto buffer = EncodedImageBuffer::Create(required_capacity); + encoded_image->SetEncodedData(buffer); // Iterate layers and NAL units, note each NAL unit as a fragment and copy // the data to |encoded_image->_buffer|. const uint8_t start_code[4] = {0, 0, 0, 1}; - frag_header->VerifyAndAllocateFragmentationHeader(fragments_count); size_t frag = 0; encoded_image->set_size(0); for (int layer = 0; layer < info->iLayerNum; ++layer) { @@ -134,15 +130,10 @@ static void RtpFragmentize(EncodedImage* encoded_image, RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 1], start_code[1]); RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 2], start_code[2]); RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 3], start_code[3]); - frag_header->fragmentationOffset[frag] = - encoded_image->size() + layer_len + sizeof(start_code); - frag_header->fragmentationLength[frag] = - layerInfo.pNalLengthInByte[nal] - sizeof(start_code); layer_len += layerInfo.pNalLengthInByte[nal]; } // Copy the entire layer's data (including start codes). - memcpy(encoded_image->data() + encoded_image->size(), layerInfo.pBsBuf, - layer_len); + memcpy(buffer->data() + encoded_image->size(), layerInfo.pBsBuf, layer_len); encoded_image->set_size(encoded_image->size() + layer_len); } } @@ -284,7 +275,6 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width, codec_.simulcastStream[idx].height); encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity)); - encoded_images_[i]._completeFrame = true; encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width; encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height; encoded_images_[i].set_size(0); @@ -295,7 +285,7 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, SimulcastRateAllocator init_allocator(codec_); VideoBitrateAllocation allocation = init_allocator.Allocate(VideoBitrateAllocationParameters( - DataRate::kbps(codec_.startBitrate), codec_.maxFramerate)); + DataRate::KilobitsPerSec(codec_.startBitrate), codec_.maxFramerate)); SetRates(RateControlParameters(allocation, codec_.maxFramerate)); return WEBRTC_VIDEO_CODEC_OK; } @@ -485,8 +475,7 @@ int32_t H264EncoderImpl::Encode( // Split encoded image up into fragments. This also updates // |encoded_image_|. - RTPFragmentationHeader frag_header; - RtpFragmentize(&encoded_images_[i], &info, &frag_header); + RtpFragmentize(&encoded_images_[i], &info); // Encoder can skip frames to save bandwidth in which case // |encoded_images_[i]._length| == 0. @@ -518,7 +507,7 @@ int32_t H264EncoderImpl::Encode( } } encoded_image_callback_->OnEncodedImage(encoded_images_[i], - &codec_specific, &frag_header); + &codec_specific); } } return WEBRTC_VIDEO_CODEC_OK; @@ -539,10 +528,21 @@ SEncParamExt H264EncoderImpl::CreateEncoderParams(size_t i) const { } else { RTC_NOTREACHED(); } + + // Reuse SPS id if possible. This helps to avoid reset of chromium HW decoder + // on each key-frame. + // Note that WebRTC resets encoder on resolution change which makes all + // EParameterSetStrategy modes except INCREASING_ID (default) essentially + // equivalent to CONSTANT_ID. + //https://bugs.chromium.org/p/chromium/issues/detail?id=1111273 + encoder_params.eSpsPpsIdStrategy = SPS_LISTING; + encoder_params.iPicWidth = configurations_[i].width; encoder_params.iPicHeight = configurations_[i].height; encoder_params.iTargetBitrate = configurations_[i].target_bps; - encoder_params.iMaxBitrate = configurations_[i].max_bps; + // Keep unspecified. WebRTC's max codec bitrate is not the same setting + // as OpenH264's iMaxBitrate. More details in https://crbug.com/webrtc/11543 + encoder_params.iMaxBitrate = UNSPECIFIED_BIT_RATE; // Rate Control mode encoder_params.iRCMode = RC_BITRATE_MODE; encoder_params.fMaxFrameRate = configurations_[i].max_frame_rate; @@ -553,6 +553,12 @@ SEncParamExt H264EncoderImpl::CreateEncoderParams(size_t i) const { // |uiIntraPeriod| - multiple of GOP size // |keyFrameInterval| - number of frames encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval; + // Reuse SPS id if possible. This helps to avoid reset of chromium HW decoder + // on each key-frame. + // Note that WebRTC resets encoder on resolution change which makes all + // EParameterSetStrategy modes except INCREASING_ID (default) essentially + // equivalent to CONSTANT_ID. + encoder_params.eSpsPpsIdStrategy = SPS_LISTING; encoder_params.uiMaxNalSize = 0; // Threading model: use auto. // 0: auto (dynamic imp. internal encoder) @@ -623,6 +629,7 @@ VideoEncoder::EncoderInfo H264EncoderImpl::GetEncoderInfo() const { info.is_hardware_accelerated = false; info.has_internal_source = false; info.supports_simulcast = true; + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; return info; } diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.h b/modules/video_coding/codecs/h264/h264_encoder_impl.h index ba996366a3..4eb4ad38c8 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -17,10 +17,6 @@ // #ifdef unless needed and tested. #ifdef WEBRTC_USE_H264 -#if defined(WEBRTC_WIN) && !defined(__clang__) -#error "See: bugs.webrtc.org/9213#c13." -#endif - #include #include @@ -72,7 +68,7 @@ class H264EncoderImpl : public H264Encoder { EncodedImageCallback* callback) override; void SetRates(const RateControlParameters& parameters) override; - // The result of encoding - an EncodedImage and RTPFragmentationHeader - are + // The result of encoding - an EncodedImage and CodecSpecificInfo - are // passed to the encode complete callback. int32_t Encode(const VideoFrame& frame, const std::vector* frame_types) override; diff --git a/modules/video_coding/codecs/h264/include/h264.h b/modules/video_coding/codecs/h264/include/h264.h index f5cebcfe62..70ca817988 100644 --- a/modules/video_coding/codecs/h264/include/h264.h +++ b/modules/video_coding/codecs/h264/include/h264.h @@ -13,9 +13,11 @@ #define MODULES_VIDEO_CODING_CODECS_H264_INCLUDE_H264_H_ #include +#include #include #include "media/base/codec.h" +#include "media/base/h264_profile_level_id.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/system/rtc_export.h" @@ -23,6 +25,12 @@ namespace webrtc { struct SdpVideoFormat; +// Creates an H264 SdpVideoFormat entry with specified paramters. +RTC_EXPORT SdpVideoFormat +CreateH264Format(H264::Profile profile, + H264::Level level, + const std::string& packetization_mode); + // Set to disable the H.264 encoder/decoder implementations that are provided if // |rtc_use_h264| build flag is true (if false, this function does nothing). // This function should only be called before or during WebRTC initialization diff --git a/modules/video_coding/codecs/h265/include/h265_globals.h b/modules/video_coding/codecs/h265/include/h265_globals.h new file mode 100644 index 0000000000..bc0eef236c --- /dev/null +++ b/modules/video_coding/codecs/h265/include/h265_globals.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains codec dependent definitions that are needed in +// order to compile the WebRTC codebase, even if this codec is not used. + +#ifndef MODULES_VIDEO_CODING_CODECS_H265_INCLUDE_H265_GLOBALS_H_ +#define MODULES_VIDEO_CODING_CODECS_H265_INCLUDE_H265_GLOBALS_H_ + +#ifndef DISABLE_H265 + +#include "modules/video_coding/codecs/h264/include/h264_globals.h" + +namespace webrtc { + +// The packetization types that we support: single, aggregated, and fragmented. +enum H265PacketizationTypes { + kH265SingleNalu, // This packet contains a single NAL unit. + kH265AP, // This packet contains aggregation Packet. + // If this packet has an associated NAL unit type, + // it'll be for the first such aggregated packet. + kH265FU, // This packet contains a FU (fragmentation + // unit) packet, meaning it is a part of a frame + // that was too large to fit into a single packet. +}; + +struct H265NaluInfo { + uint8_t type; + int vps_id; + int sps_id; + int pps_id; +}; + +enum class H265PacketizationMode { + NonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed + SingleNalUnit // Mode 0 - only single NALU allowed +}; + +struct RTPVideoHeaderH265 { + // The NAL unit type. If this is a header for a fragmented packet, it's the + // NAL unit type of the original data. If this is the header for an aggregated + // packet, it's the NAL unit type of the first NAL unit in the packet. + uint8_t nalu_type; + H265PacketizationTypes packetization_type; + H265NaluInfo nalus[kMaxNalusPerPacket]; + size_t nalus_length; + // The packetization type of this buffer - single, aggregated or fragmented. + H265PacketizationMode packetization_mode; +}; + +} // namespace webrtc + +#endif + +#endif // MODULES_VIDEO_CODING_CODECS_H265_INCLUDE_H265_GLOBALS_H_ diff --git a/modules/video_coding/codecs/multiplex/OWNERS b/modules/video_coding/codecs/multiplex/OWNERS deleted file mode 100644 index 6b72be3336..0000000000 --- a/modules/video_coding/codecs/multiplex/OWNERS +++ /dev/null @@ -1 +0,0 @@ -emircan@webrtc.org diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h index 9e718303b7..c43109e460 100644 --- a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h +++ b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h @@ -21,7 +21,7 @@ #include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -57,8 +57,7 @@ class MultiplexEncoderAdapter : public VideoEncoder { EncodedImageCallback::Result OnEncodedImage( AlphaCodecStream stream_idx, const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation); + const CodecSpecificInfo* codecSpecificInfo); private: // Wrapper class that redirects OnEncodedImage() calls. @@ -71,7 +70,7 @@ class MultiplexEncoderAdapter : public VideoEncoder { EncodedImageCallback* encoded_complete_callback_; std::map stashed_images_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); uint16_t picture_index_ = 0; std::vector multiplex_dummy_planes_; @@ -79,7 +78,7 @@ class MultiplexEncoderAdapter : public VideoEncoder { int key_frame_interval_; EncodedImage combined_image_; - rtc::CriticalSection crit_; + Mutex mutex_; const bool supports_augmented_data_; int augmenting_data_size_ = 0; diff --git a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc index cd39e72c29..39c14e412c 100644 --- a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc +++ b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc @@ -76,23 +76,26 @@ struct MultiplexDecoderAdapter::DecodedImageData { decoded_image_(decoded_image), decode_time_ms_(decode_time_ms), qp_(qp) {} + + DecodedImageData() = delete; + DecodedImageData(const DecodedImageData&) = delete; + DecodedImageData& operator=(const DecodedImageData&) = delete; + const AlphaCodecStream stream_idx_; VideoFrame decoded_image_; const absl::optional decode_time_ms_; const absl::optional qp_; - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DecodedImageData); }; struct MultiplexDecoderAdapter::AugmentingData { AugmentingData(std::unique_ptr augmenting_data, uint16_t data_size) : data_(std::move(augmenting_data)), size_(data_size) {} + AugmentingData() = delete; + AugmentingData(const AugmentingData&) = delete; + AugmentingData& operator=(const AugmentingData&) = delete; + std::unique_ptr data_; const uint16_t size_; - - private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AugmentingData); }; MultiplexDecoderAdapter::MultiplexDecoderAdapter( diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc index 38f16d7f2f..6bc306dda8 100644 --- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc +++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc @@ -193,7 +193,7 @@ EncodedImage MultiplexEncodedImagePacker::PackAndRelease( combined_image.SetEncodedData(buffer); // header - header_offset = PackHeader(combined_image.data(), header); + header_offset = PackHeader(buffer->data(), header); RTC_DCHECK_EQ(header.first_component_header_offset, kMultiplexImageHeaderSize); diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h index 4a913fe502..9f9f39ce05 100644 --- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h +++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h @@ -16,6 +16,7 @@ #include #include "api/video/encoded_image.h" +#include "api/video_codecs/video_codec.h" namespace webrtc { diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc index 70ffb4254a..0fbbc4271f 100644 --- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc +++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc @@ -17,7 +17,6 @@ #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/video_common.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" #include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" @@ -35,12 +34,11 @@ class MultiplexEncoderAdapter::AdapterEncodedImageCallback EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { if (!adapter_) return Result(Result::OK); return adapter_->OnEncodedImage(stream_idx_, encoded_image, - codec_specific_info, fragmentation); + codec_specific_info); } private: @@ -96,6 +94,12 @@ int MultiplexEncoderAdapter::InitEncode( key_frame_interval_ = video_codec.H264()->keyFrameInterval; video_codec.H264()->keyFrameInterval = 0; break; +#ifndef DISABLE_H265 + case kVideoCodecH265: + key_frame_interval_ = video_codec.H265()->keyFrameInterval; + video_codec.H265()->keyFrameInterval = 0; + break; +#endif default: break; } @@ -103,6 +107,7 @@ int MultiplexEncoderAdapter::InitEncode( encoder_info_ = EncoderInfo(); encoder_info_.implementation_name = "MultiplexEncoderAdapter ("; encoder_info_.requested_resolution_alignment = 1; + encoder_info_.apply_alignment_to_all_simulcast_layers = false; // This needs to be false so that we can do the split in Encode(). encoder_info_.supports_native_handle = false; @@ -139,6 +144,10 @@ int MultiplexEncoderAdapter::InitEncode( encoder_info_.requested_resolution_alignment, encoder_impl_info.requested_resolution_alignment); + if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) { + encoder_info_.apply_alignment_to_all_simulcast_layers = true; + } + encoder_info_.has_internal_source = false; encoders_.emplace_back(std::move(encoder)); @@ -180,7 +189,7 @@ int MultiplexEncoderAdapter::Encode( } { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); stashed_images_.emplace( std::piecewise_construct, std::forward_as_tuple(input_image.timestamp()), @@ -242,7 +251,7 @@ void MultiplexEncoderAdapter::SetRates( bitrate_allocation, static_cast(encoders_.size() * parameters.framerate_fps), parameters.bandwidth_allocation - - DataRate::bps(augmenting_data_size_))); + DataRate::BitsPerSec(augmenting_data_size_))); } } @@ -273,7 +282,7 @@ int MultiplexEncoderAdapter::Release() { } encoders_.clear(); adapter_callbacks_.clear(); - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); stashed_images_.clear(); return WEBRTC_VIDEO_CODEC_OK; @@ -286,8 +295,7 @@ VideoEncoder::EncoderInfo MultiplexEncoderAdapter::GetEncoderInfo() const { EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( AlphaCodecStream stream_idx, const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codecSpecificInfo) { // Save the image MultiplexImageComponent image_component; image_component.component_index = stream_idx; @@ -298,7 +306,7 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( // If we don't already own the buffer, make a copy. image_component.encoded_image.Retain(); - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& stashed_image_itr = stashed_images_.find(encodedImage.Timestamp()); const auto& stashed_image_next_itr = std::next(stashed_image_itr, 1); @@ -324,8 +332,7 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( CodecSpecificInfo codec_info = *codecSpecificInfo; codec_info.codecType = kVideoCodecMultiplex; - encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info, - fragmentation); + encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info); } stashed_images_.erase(stashed_images_.begin(), stashed_image_next_itr); diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc index 8983fb022a..770d8b596c 100644 --- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc +++ b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc @@ -180,21 +180,17 @@ class TestMultiplexAdapter : public VideoCodecUnitTest, private: void SetUp() override { - EXPECT_CALL(*decoder_factory_, Die()); + EXPECT_CALL(*decoder_factory_, Die); // The decoders/encoders will be owned by the caller of // CreateVideoDecoder()/CreateVideoEncoder(). - VideoDecoder* decoder1 = VP9Decoder::Create().release(); - VideoDecoder* decoder2 = VP9Decoder::Create().release(); - EXPECT_CALL(*decoder_factory_, CreateVideoDecoderProxy(_)) - .WillOnce(Return(decoder1)) - .WillOnce(Return(decoder2)); - - EXPECT_CALL(*encoder_factory_, Die()); - VideoEncoder* encoder1 = VP9Encoder::Create().release(); - VideoEncoder* encoder2 = VP9Encoder::Create().release(); - EXPECT_CALL(*encoder_factory_, CreateVideoEncoderProxy(_)) - .WillOnce(Return(encoder1)) - .WillOnce(Return(encoder2)); + EXPECT_CALL(*decoder_factory_, CreateVideoDecoder) + .Times(2) + .WillRepeatedly([] { return VP9Decoder::Create(); }); + + EXPECT_CALL(*encoder_factory_, Die); + EXPECT_CALL(*encoder_factory_, CreateVideoEncoder) + .Times(2) + .WillRepeatedly([] { return VP9Encoder::Create(); }); VideoCodecUnitTest::SetUp(); } diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.cc b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc new file mode 100644 index 0000000000..899826eee4 --- /dev/null +++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc @@ -0,0 +1,77 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" + +#include +#include + +#include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +class EncoderCallback : public EncodedImageCallback { + public: + explicit EncoderCallback( + std::vector& output_frames) + : output_frames_(output_frames) {} + + private: + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + output_frames_.push_back({encoded_image, *codec_specific_info}); + return Result(Result::Error::OK); + } + + std::vector& output_frames_; +}; + +} // namespace + +std::vector +EncodedVideoFrameProducer::Encode() { + std::unique_ptr frame_buffer_generator = + test::CreateSquareFrameGenerator( + resolution_.Width(), resolution_.Height(), + test::FrameGeneratorInterface::OutputType::kI420, absl::nullopt); + + std::vector encoded_frames; + EncoderCallback encoder_callback(encoded_frames); + RTC_CHECK_EQ(encoder_.RegisterEncodeCompleteCallback(&encoder_callback), + WEBRTC_VIDEO_CODEC_OK); + + uint32_t rtp_tick = 90000 / framerate_fps_; + for (int i = 0; i < num_input_frames_; ++i) { + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer) + .set_timestamp_rtp(rtp_timestamp_) + .build(); + rtp_timestamp_ += rtp_tick; + RTC_CHECK_EQ(encoder_.Encode(frame, &next_frame_type_), + WEBRTC_VIDEO_CODEC_OK); + next_frame_type_[0] = VideoFrameType::kVideoFrameDelta; + } + + RTC_CHECK_EQ(encoder_.RegisterEncodeCompleteCallback(nullptr), + WEBRTC_VIDEO_CODEC_OK); + return encoded_frames; +} + +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.h b/modules/video_coding/codecs/test/encoded_video_frame_producer.h new file mode 100644 index 0000000000..2216287b92 --- /dev/null +++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.h @@ -0,0 +1,92 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_ +#define MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_ + +#include + +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_image.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/include/video_codec_interface.h" + +namespace webrtc { + +// Wrapper around VideoEncoder::Encode for convenient input (generates frames) +// and output (returns encoded frames instead of passing them to callback) +class EncodedVideoFrameProducer { + public: + struct EncodedFrame { + EncodedImage encoded_image; + CodecSpecificInfo codec_specific_info; + }; + + // `encoder` should be initialized, but shouldn't have `EncoderCallback` set. + explicit EncodedVideoFrameProducer(VideoEncoder& encoder) + : encoder_(encoder) {} + EncodedVideoFrameProducer(const EncodedVideoFrameProducer&) = delete; + EncodedVideoFrameProducer& operator=(const EncodedVideoFrameProducer&) = + delete; + + // Number of the input frames to pass to the encoder. + EncodedVideoFrameProducer& SetNumInputFrames(int value); + // Encode next frame as key frame. + EncodedVideoFrameProducer& ForceKeyFrame(); + // Resolution of the input frames. + EncodedVideoFrameProducer& SetResolution(RenderResolution value); + + EncodedVideoFrameProducer& SetFramerateFps(int value); + + // Generates input video frames and encodes them with `encoder` provided in + // the constructor. Returns frame passed to the `OnEncodedImage` by wraping + // `EncodedImageCallback` underneath. + std::vector Encode(); + + private: + VideoEncoder& encoder_; + + uint32_t rtp_timestamp_ = 1000; + int num_input_frames_ = 1; + int framerate_fps_ = 30; + RenderResolution resolution_ = {320, 180}; + std::vector next_frame_type_ = { + VideoFrameType::kVideoFrameKey}; +}; + +inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetNumInputFrames( + int value) { + RTC_DCHECK_GT(value, 0); + num_input_frames_ = value; + return *this; +} + +inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::ForceKeyFrame() { + next_frame_type_ = {VideoFrameType::kVideoFrameKey}; + return *this; +} + +inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetResolution( + RenderResolution value) { + resolution_ = value; + return *this; +} + +inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetFramerateFps( + int value) { + RTC_DCHECK_GT(value, 0); + framerate_fps_ = value; + return *this; +} + +} // namespace webrtc +#endif // MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_ diff --git a/modules/video_coding/codecs/test/objc_codec_factory_helper.mm b/modules/video_coding/codecs/test/objc_codec_factory_helper.mm index df12fd9689..ed82376251 100644 --- a/modules/video_coding/codecs/test/objc_codec_factory_helper.mm +++ b/modules/video_coding/codecs/test/objc_codec_factory_helper.mm @@ -19,11 +19,11 @@ namespace test { std::unique_ptr CreateObjCEncoderFactory() { - return ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]); + return ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); } std::unique_ptr CreateObjCDecoderFactory() { - return ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]); + return ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); } } // namespace test diff --git a/modules/video_coding/codecs/test/plot_webrtc_test_logs.py b/modules/video_coding/codecs/test/plot_webrtc_test_logs.py index a0cdc0637f..29e2d6f65a 100755 --- a/modules/video_coding/codecs/test/plot_webrtc_test_logs.py +++ b/modules/video_coding/codecs/test/plot_webrtc_test_logs.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Plots statistics from WebRTC integration test logs. Usage: $ python plot_webrtc_test_logs.py filename.txt @@ -52,43 +51,43 @@ # Settings. SETTINGS = [ - WIDTH, - HEIGHT, - FILENAME, - NUM_FRAMES, + WIDTH, + HEIGHT, + FILENAME, + NUM_FRAMES, ] # Settings, options for x-axis. X_SETTINGS = [ - CORES, - FRAMERATE, - DENOISING, - RESILIENCE, - ERROR_CONCEALMENT, - BITRATE, # TODO(asapersson): Needs to be last. + CORES, + FRAMERATE, + DENOISING, + RESILIENCE, + ERROR_CONCEALMENT, + BITRATE, # TODO(asapersson): Needs to be last. ] # Settings, options for subplots. SUBPLOT_SETTINGS = [ - CODEC_TYPE, - ENCODER_IMPLEMENTATION_NAME, - DECODER_IMPLEMENTATION_NAME, - CODEC_IMPLEMENTATION_NAME, + CODEC_TYPE, + ENCODER_IMPLEMENTATION_NAME, + DECODER_IMPLEMENTATION_NAME, + CODEC_IMPLEMENTATION_NAME, ] + X_SETTINGS # Results. RESULTS = [ - PSNR, - SSIM, - ENC_BITRATE, - NUM_DROPPED_FRAMES, - TIME_TO_TARGET, - ENCODE_SPEED_FPS, - DECODE_SPEED_FPS, - QP, - CPU_USAGE, - AVG_KEY_FRAME_SIZE, - AVG_DELTA_FRAME_SIZE, + PSNR, + SSIM, + ENC_BITRATE, + NUM_DROPPED_FRAMES, + TIME_TO_TARGET, + ENCODE_SPEED_FPS, + DECODE_SPEED_FPS, + QP, + CPU_USAGE, + AVG_KEY_FRAME_SIZE, + AVG_DELTA_FRAME_SIZE, ] METRICS_TO_PARSE = SETTINGS + SUBPLOT_SETTINGS + RESULTS @@ -102,7 +101,7 @@ def ParseSetting(filename, setting): - """Parses setting from file. + """Parses setting from file. Args: filename: The name of the file. @@ -111,36 +110,36 @@ def ParseSetting(filename, setting): Returns: A list holding parsed settings, e.g. ['width: 128.0', 'width: 160.0'] """ - settings = [] - - settings_file = open(filename) - while True: - line = settings_file.readline() - if not line: - break - if re.search(r'%s' % EVENT_START, line): - # Parse event. - parsed = {} - while True: + settings = [] + + settings_file = open(filename) + while True: line = settings_file.readline() if not line: - break - if re.search(r'%s' % EVENT_END, line): - # Add parsed setting to list. - if setting in parsed: - s = setting + ': ' + str(parsed[setting]) - if s not in settings: - settings.append(s) - break - - TryFindMetric(parsed, line) - - settings_file.close() - return settings + break + if re.search(r'%s' % EVENT_START, line): + # Parse event. + parsed = {} + while True: + line = settings_file.readline() + if not line: + break + if re.search(r'%s' % EVENT_END, line): + # Add parsed setting to list. + if setting in parsed: + s = setting + ': ' + str(parsed[setting]) + if s not in settings: + settings.append(s) + break + + TryFindMetric(parsed, line) + + settings_file.close() + return settings def ParseMetrics(filename, setting1, setting2): - """Parses metrics from file. + """Parses metrics from file. Args: filename: The name of the file. @@ -175,82 +174,82 @@ def ParseMetrics(filename, setting1, setting2): } } """ - metrics = {} - - # Parse events. - settings_file = open(filename) - while True: - line = settings_file.readline() - if not line: - break - if re.search(r'%s' % EVENT_START, line): - # Parse event. - parsed = {} - while True: + metrics = {} + + # Parse events. + settings_file = open(filename) + while True: line = settings_file.readline() if not line: - break - if re.search(r'%s' % EVENT_END, line): - # Add parsed values to metrics. - key1 = setting1 + ': ' + str(parsed[setting1]) - key2 = setting2 + ': ' + str(parsed[setting2]) - if key1 not in metrics: - metrics[key1] = {} - if key2 not in metrics[key1]: - metrics[key1][key2] = {} - - for label in parsed: - if label not in metrics[key1][key2]: - metrics[key1][key2][label] = [] - metrics[key1][key2][label].append(parsed[label]) - - break - - TryFindMetric(parsed, line) - - settings_file.close() - return metrics + break + if re.search(r'%s' % EVENT_START, line): + # Parse event. + parsed = {} + while True: + line = settings_file.readline() + if not line: + break + if re.search(r'%s' % EVENT_END, line): + # Add parsed values to metrics. + key1 = setting1 + ': ' + str(parsed[setting1]) + key2 = setting2 + ': ' + str(parsed[setting2]) + if key1 not in metrics: + metrics[key1] = {} + if key2 not in metrics[key1]: + metrics[key1][key2] = {} + + for label in parsed: + if label not in metrics[key1][key2]: + metrics[key1][key2][label] = [] + metrics[key1][key2][label].append(parsed[label]) + + break + + TryFindMetric(parsed, line) + + settings_file.close() + return metrics def TryFindMetric(parsed, line): - for metric in METRICS_TO_PARSE: - name = metric[0] - label = metric[1] - if re.search(r'%s' % name, line): - found, value = GetMetric(name, line) - if found: - parsed[label] = value - return + for metric in METRICS_TO_PARSE: + name = metric[0] + label = metric[1] + if re.search(r'%s' % name, line): + found, value = GetMetric(name, line) + if found: + parsed[label] = value + return def GetMetric(name, string): - # Float (e.g. bitrate = 98.8253). - pattern = r'%s\s*[:=]\s*([+-]?\d+\.*\d*)' % name - m = re.search(r'%s' % pattern, string) - if m is not None: - return StringToFloat(m.group(1)) - - # Alphanumeric characters (e.g. codec type : VP8). - pattern = r'%s\s*[:=]\s*(\w+)' % name - m = re.search(r'%s' % pattern, string) - if m is not None: - return True, m.group(1) + # Float (e.g. bitrate = 98.8253). + pattern = r'%s\s*[:=]\s*([+-]?\d+\.*\d*)' % name + m = re.search(r'%s' % pattern, string) + if m is not None: + return StringToFloat(m.group(1)) + + # Alphanumeric characters (e.g. codec type : VP8). + pattern = r'%s\s*[:=]\s*(\w+)' % name + m = re.search(r'%s' % pattern, string) + if m is not None: + return True, m.group(1) - return False, -1 + return False, -1 def StringToFloat(value): - try: - value = float(value) - except ValueError: - print "Not a float, skipped %s" % value - return False, -1 + try: + value = float(value) + except ValueError: + print "Not a float, skipped %s" % value + return False, -1 - return True, value + return True, value def Plot(y_metric, x_metric, metrics): - """Plots y_metric vs x_metric per key in metrics. + """Plots y_metric vs x_metric per key in metrics. For example: y_metric = 'PSNR (dB)' @@ -266,26 +265,31 @@ def Plot(y_metric, x_metric, metrics): }, } """ - for key in sorted(metrics): - data = metrics[key] - if y_metric not in data: - print "Failed to find metric: %s" % y_metric - continue + for key in sorted(metrics): + data = metrics[key] + if y_metric not in data: + print "Failed to find metric: %s" % y_metric + continue - y = numpy.array(data[y_metric]) - x = numpy.array(data[x_metric]) - if len(y) != len(x): - print "Length mismatch for %s, %s" % (y, x) - continue + y = numpy.array(data[y_metric]) + x = numpy.array(data[x_metric]) + if len(y) != len(x): + print "Length mismatch for %s, %s" % (y, x) + continue - label = y_metric + ' - ' + str(key) + label = y_metric + ' - ' + str(key) - plt.plot(x, y, label=label, linewidth=1.5, marker='o', markersize=5, - markeredgewidth=0.0) + plt.plot(x, + y, + label=label, + linewidth=1.5, + marker='o', + markersize=5, + markeredgewidth=0.0) def PlotFigure(settings, y_metrics, x_metric, metrics, title): - """Plots metrics in y_metrics list. One figure is plotted and each entry + """Plots metrics in y_metrics list. One figure is plotted and each entry in the list is plotted in a subplot (and sorted per settings). For example: @@ -295,136 +299,140 @@ def PlotFigure(settings, y_metrics, x_metric, metrics, title): """ - plt.figure() - plt.suptitle(title, fontsize='large', fontweight='bold') - settings.sort() - rows = len(settings) - cols = 1 - pos = 1 - while pos <= rows: - plt.rc('grid', color=GRID_COLOR) - ax = plt.subplot(rows, cols, pos) - plt.grid() - plt.setp(ax.get_xticklabels(), visible=(pos == rows), fontsize='large') - plt.setp(ax.get_yticklabels(), fontsize='large') - setting = settings[pos - 1] - Plot(y_metrics[pos - 1], x_metric, metrics[setting]) - if setting.startswith(WIDTH[1]): - plt.title(setting, fontsize='medium') - plt.legend(fontsize='large', loc='best') - pos += 1 - - plt.xlabel(x_metric, fontsize='large') - plt.subplots_adjust(left=0.06, right=0.98, bottom=0.05, top=0.94, hspace=0.08) + plt.figure() + plt.suptitle(title, fontsize='large', fontweight='bold') + settings.sort() + rows = len(settings) + cols = 1 + pos = 1 + while pos <= rows: + plt.rc('grid', color=GRID_COLOR) + ax = plt.subplot(rows, cols, pos) + plt.grid() + plt.setp(ax.get_xticklabels(), visible=(pos == rows), fontsize='large') + plt.setp(ax.get_yticklabels(), fontsize='large') + setting = settings[pos - 1] + Plot(y_metrics[pos - 1], x_metric, metrics[setting]) + if setting.startswith(WIDTH[1]): + plt.title(setting, fontsize='medium') + plt.legend(fontsize='large', loc='best') + pos += 1 + + plt.xlabel(x_metric, fontsize='large') + plt.subplots_adjust(left=0.06, + right=0.98, + bottom=0.05, + top=0.94, + hspace=0.08) def GetTitle(filename, setting): - title = '' - if setting != CODEC_IMPLEMENTATION_NAME[1] and setting != CODEC_TYPE[1]: - codec_types = ParseSetting(filename, CODEC_TYPE[1]) - for i in range(0, len(codec_types)): - title += codec_types[i] + ', ' + title = '' + if setting != CODEC_IMPLEMENTATION_NAME[1] and setting != CODEC_TYPE[1]: + codec_types = ParseSetting(filename, CODEC_TYPE[1]) + for i in range(0, len(codec_types)): + title += codec_types[i] + ', ' - if setting != CORES[1]: - cores = ParseSetting(filename, CORES[1]) - for i in range(0, len(cores)): - title += cores[i].split('.')[0] + ', ' + if setting != CORES[1]: + cores = ParseSetting(filename, CORES[1]) + for i in range(0, len(cores)): + title += cores[i].split('.')[0] + ', ' - if setting != FRAMERATE[1]: - framerate = ParseSetting(filename, FRAMERATE[1]) - for i in range(0, len(framerate)): - title += framerate[i].split('.')[0] + ', ' + if setting != FRAMERATE[1]: + framerate = ParseSetting(filename, FRAMERATE[1]) + for i in range(0, len(framerate)): + title += framerate[i].split('.')[0] + ', ' - if (setting != CODEC_IMPLEMENTATION_NAME[1] and - setting != ENCODER_IMPLEMENTATION_NAME[1]): - enc_names = ParseSetting(filename, ENCODER_IMPLEMENTATION_NAME[1]) - for i in range(0, len(enc_names)): - title += enc_names[i] + ', ' + if (setting != CODEC_IMPLEMENTATION_NAME[1] + and setting != ENCODER_IMPLEMENTATION_NAME[1]): + enc_names = ParseSetting(filename, ENCODER_IMPLEMENTATION_NAME[1]) + for i in range(0, len(enc_names)): + title += enc_names[i] + ', ' - if (setting != CODEC_IMPLEMENTATION_NAME[1] and - setting != DECODER_IMPLEMENTATION_NAME[1]): - dec_names = ParseSetting(filename, DECODER_IMPLEMENTATION_NAME[1]) - for i in range(0, len(dec_names)): - title += dec_names[i] + ', ' + if (setting != CODEC_IMPLEMENTATION_NAME[1] + and setting != DECODER_IMPLEMENTATION_NAME[1]): + dec_names = ParseSetting(filename, DECODER_IMPLEMENTATION_NAME[1]) + for i in range(0, len(dec_names)): + title += dec_names[i] + ', ' - filenames = ParseSetting(filename, FILENAME[1]) - title += filenames[0].split('_')[0] + filenames = ParseSetting(filename, FILENAME[1]) + title += filenames[0].split('_')[0] - num_frames = ParseSetting(filename, NUM_FRAMES[1]) - for i in range(0, len(num_frames)): - title += ' (' + num_frames[i].split('.')[0] + ')' + num_frames = ParseSetting(filename, NUM_FRAMES[1]) + for i in range(0, len(num_frames)): + title += ' (' + num_frames[i].split('.')[0] + ')' - return title + return title def ToString(input_list): - return ToStringWithoutMetric(input_list, ('', '')) + return ToStringWithoutMetric(input_list, ('', '')) def ToStringWithoutMetric(input_list, metric): - i = 1 - output_str = "" - for m in input_list: - if m != metric: - output_str = output_str + ("%s. %s\n" % (i, m[1])) - i += 1 - return output_str + i = 1 + output_str = "" + for m in input_list: + if m != metric: + output_str = output_str + ("%s. %s\n" % (i, m[1])) + i += 1 + return output_str def GetIdx(text_list): - return int(raw_input(text_list)) - 1 + return int(raw_input(text_list)) - 1 def main(): - filename = sys.argv[1] - - # Setup. - idx_metric = GetIdx("Choose metric:\n0. All\n%s" % ToString(RESULTS)) - if idx_metric == -1: - # Plot all metrics. One subplot for each metric. - # Per subplot: metric vs bitrate (per resolution). - cores = ParseSetting(filename, CORES[1]) - setting1 = CORES[1] - setting2 = WIDTH[1] - sub_keys = [cores[0]] * len(Y_METRICS) - y_metrics = Y_METRICS - x_metric = BITRATE[1] - else: - resolutions = ParseSetting(filename, WIDTH[1]) - idx = GetIdx("Select metric for x-axis:\n%s" % ToString(X_SETTINGS)) - if X_SETTINGS[idx] == BITRATE: - idx = GetIdx("Plot per:\n%s" % ToStringWithoutMetric(SUBPLOT_SETTINGS, - BITRATE)) - idx_setting = METRICS_TO_PARSE.index(SUBPLOT_SETTINGS[idx]) - # Plot one metric. One subplot for each resolution. - # Per subplot: metric vs bitrate (per setting). - setting1 = WIDTH[1] - setting2 = METRICS_TO_PARSE[idx_setting][1] - sub_keys = resolutions - y_metrics = [RESULTS[idx_metric][1]] * len(sub_keys) - x_metric = BITRATE[1] + filename = sys.argv[1] + + # Setup. + idx_metric = GetIdx("Choose metric:\n0. All\n%s" % ToString(RESULTS)) + if idx_metric == -1: + # Plot all metrics. One subplot for each metric. + # Per subplot: metric vs bitrate (per resolution). + cores = ParseSetting(filename, CORES[1]) + setting1 = CORES[1] + setting2 = WIDTH[1] + sub_keys = [cores[0]] * len(Y_METRICS) + y_metrics = Y_METRICS + x_metric = BITRATE[1] else: - # Plot one metric. One subplot for each resolution. - # Per subplot: metric vs setting (per bitrate). - setting1 = WIDTH[1] - setting2 = BITRATE[1] - sub_keys = resolutions - y_metrics = [RESULTS[idx_metric][1]] * len(sub_keys) - x_metric = X_SETTINGS[idx][1] - - metrics = ParseMetrics(filename, setting1, setting2) - - # Stretch fig size. - figsize = plt.rcParams["figure.figsize"] - figsize[0] *= FIG_SIZE_SCALE_FACTOR_X - figsize[1] *= FIG_SIZE_SCALE_FACTOR_Y - plt.rcParams["figure.figsize"] = figsize - - PlotFigure(sub_keys, y_metrics, x_metric, metrics, - GetTitle(filename, setting2)) - - plt.show() + resolutions = ParseSetting(filename, WIDTH[1]) + idx = GetIdx("Select metric for x-axis:\n%s" % ToString(X_SETTINGS)) + if X_SETTINGS[idx] == BITRATE: + idx = GetIdx("Plot per:\n%s" % + ToStringWithoutMetric(SUBPLOT_SETTINGS, BITRATE)) + idx_setting = METRICS_TO_PARSE.index(SUBPLOT_SETTINGS[idx]) + # Plot one metric. One subplot for each resolution. + # Per subplot: metric vs bitrate (per setting). + setting1 = WIDTH[1] + setting2 = METRICS_TO_PARSE[idx_setting][1] + sub_keys = resolutions + y_metrics = [RESULTS[idx_metric][1]] * len(sub_keys) + x_metric = BITRATE[1] + else: + # Plot one metric. One subplot for each resolution. + # Per subplot: metric vs setting (per bitrate). + setting1 = WIDTH[1] + setting2 = BITRATE[1] + sub_keys = resolutions + y_metrics = [RESULTS[idx_metric][1]] * len(sub_keys) + x_metric = X_SETTINGS[idx][1] + + metrics = ParseMetrics(filename, setting1, setting2) + + # Stretch fig size. + figsize = plt.rcParams["figure.figsize"] + figsize[0] *= FIG_SIZE_SCALE_FACTOR_X + figsize[1] *= FIG_SIZE_SCALE_FACTOR_Y + plt.rcParams["figure.figsize"] = figsize + + PlotFigure(sub_keys, y_metrics, x_metric, metrics, + GetTitle(filename, setting2)) + + plt.show() if __name__ == '__main__': - main() + main() diff --git a/modules/video_coding/codecs/test/video_codec_unittest.cc b/modules/video_coding/codecs/test/video_codec_unittest.cc index c6cf1add94..ff09231b62 100644 --- a/modules/video_coding/codecs/test/video_codec_unittest.cc +++ b/modules/video_coding/codecs/test/video_codec_unittest.cc @@ -35,9 +35,8 @@ const VideoEncoder::Capabilities kCapabilities(false); EncodedImageCallback::Result VideoCodecUnitTest::FakeEncodeCompleteCallback::OnEncodedImage( const EncodedImage& frame, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { - rtc::CritScope lock(&test_->encoded_frame_section_); + const CodecSpecificInfo* codec_specific_info) { + MutexLock lock(&test_->encoded_frame_section_); test_->encoded_frames_.push_back(frame); RTC_DCHECK(codec_specific_info); test_->codec_specific_infos_.push_back(*codec_specific_info); @@ -58,7 +57,7 @@ void VideoCodecUnitTest::FakeDecodeCompleteCallback::Decoded( VideoFrame& frame, absl::optional decode_time_ms, absl::optional qp) { - rtc::CritScope lock(&test_->decoded_frame_section_); + MutexLock lock(&test_->decoded_frame_section_); test_->decoded_frame_.emplace(frame); test_->decoded_qp_ = qp; test_->decoded_frame_event_.Set(); @@ -126,7 +125,7 @@ bool VideoCodecUnitTest::WaitForEncodedFrame( } void VideoCodecUnitTest::SetWaitForEncodedFramesThreshold(size_t num_frames) { - rtc::CritScope lock(&encoded_frame_section_); + MutexLock lock(&encoded_frame_section_); wait_for_encoded_frames_threshold_ = num_frames; } @@ -136,7 +135,7 @@ bool VideoCodecUnitTest::WaitForEncodedFrames( EXPECT_TRUE(encoded_frame_event_.Wait(kEncodeTimeoutMs)) << "Timed out while waiting for encoded frame."; // This becomes unsafe if there are multiple threads waiting for frames. - rtc::CritScope lock(&encoded_frame_section_); + MutexLock lock(&encoded_frame_section_); EXPECT_FALSE(encoded_frames_.empty()); EXPECT_FALSE(codec_specific_infos_.empty()); EXPECT_EQ(encoded_frames_.size(), codec_specific_infos_.size()); @@ -157,7 +156,7 @@ bool VideoCodecUnitTest::WaitForDecodedFrame(std::unique_ptr* frame, bool ret = decoded_frame_event_.Wait(kDecodeTimeoutMs); EXPECT_TRUE(ret) << "Timed out while waiting for a decoded frame."; // This becomes unsafe if there are multiple threads waiting for frames. - rtc::CritScope lock(&decoded_frame_section_); + MutexLock lock(&decoded_frame_section_); EXPECT_TRUE(decoded_frame_); if (decoded_frame_) { frame->reset(new VideoFrame(std::move(*decoded_frame_))); @@ -170,7 +169,7 @@ bool VideoCodecUnitTest::WaitForDecodedFrame(std::unique_ptr* frame, } size_t VideoCodecUnitTest::GetNumEncodedFrames() { - rtc::CritScope lock(&encoded_frame_section_); + MutexLock lock(&encoded_frame_section_); return encoded_frames_.size(); } diff --git a/modules/video_coding/codecs/test/video_codec_unittest.h b/modules/video_coding/codecs/test/video_codec_unittest.h index 1ce37a7ed5..adab3558aa 100644 --- a/modules/video_coding/codecs/test/video_codec_unittest.h +++ b/modules/video_coding/codecs/test/video_codec_unittest.h @@ -20,8 +20,8 @@ #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "test/gtest.h" @@ -42,8 +42,7 @@ class VideoCodecUnitTest : public ::testing::Test { : test_(test) {} Result OnEncodedImage(const EncodedImage& frame, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation); + const CodecSpecificInfo* codec_specific_info); private: VideoCodecUnitTest* const test_; @@ -108,7 +107,7 @@ class VideoCodecUnitTest : public ::testing::Test { FakeDecodeCompleteCallback decode_complete_callback_; rtc::Event encoded_frame_event_; - rtc::CriticalSection encoded_frame_section_; + Mutex encoded_frame_section_; size_t wait_for_encoded_frames_threshold_; std::vector encoded_frames_ RTC_GUARDED_BY(encoded_frame_section_); @@ -116,7 +115,7 @@ class VideoCodecUnitTest : public ::testing::Test { RTC_GUARDED_BY(encoded_frame_section_); rtc::Event decoded_frame_event_; - rtc::CriticalSection decoded_frame_section_; + Mutex decoded_frame_section_; absl::optional decoded_frame_ RTC_GUARDED_BY(decoded_frame_section_); absl::optional decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_); diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc index 0eb256ea99..63803d67cd 100644 --- a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc @@ -22,6 +22,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" @@ -61,13 +62,14 @@ const int kMaxFramerateFps = 30; const int kMaxQp = 56; void ConfigureSimulcast(VideoCodec* codec_settings) { + FieldTrialBasedConfig trials; const std::vector streams = cricket::GetSimulcastConfig( - codec_settings->numberOfSimulcastStreams, codec_settings->width, - codec_settings->height, kBitratePriority, kMaxQp, - /* is_screenshare = */ false, true); + /*min_layer=*/1, codec_settings->numberOfSimulcastStreams, + codec_settings->width, codec_settings->height, kBitratePriority, kMaxQp, + /* is_screenshare = */ false, true, trials); for (size_t i = 0; i < streams.size(); ++i) { - SimulcastStream* ss = &codec_settings->simulcastStream[i]; + SpatialLayer* ss = &codec_settings->simulcastStream[i]; ss->width = static_cast(streams[i].width); ss->height = static_cast(streams[i].height); ss->numberOfTemporalLayers = @@ -85,7 +87,7 @@ void ConfigureSvc(VideoCodec* codec_settings) { const std::vector layers = GetSvcConfig( codec_settings->width, codec_settings->height, kMaxFramerateFps, - codec_settings->VP9()->numberOfSpatialLayers, + /*first_active_layer=*/0, codec_settings->VP9()->numberOfSpatialLayers, codec_settings->VP9()->numberOfTemporalLayers, /* is_screen_sharing = */ false); ASSERT_EQ(codec_settings->VP9()->numberOfSpatialLayers, layers.size()) @@ -201,10 +203,14 @@ void VideoCodecTestFixtureImpl::Config::SetCodecSettings( codec_settings.VP9()->denoisingOn = denoising_on; codec_settings.VP9()->frameDroppingOn = frame_dropper_on; codec_settings.VP9()->keyFrameInterval = kBaseKeyFrameInterval; + codec_settings.VP9()->keyFrameInterval = 60; codec_settings.VP9()->automaticResizeOn = spatial_resize_on; codec_settings.VP9()->numberOfSpatialLayers = static_cast(num_spatial_layers); break; + case kVideoCodecAV1: + codec_settings.qpMax = 63; + break; case kVideoCodecH264: codec_settings.H264()->frameDroppingOn = frame_dropper_on; codec_settings.H264()->keyFrameInterval = kBaseKeyFrameInterval; @@ -274,8 +280,7 @@ std::string VideoCodecTestFixtureImpl::Config::ToString() const { if (codec_settings.numberOfSimulcastStreams > 1) { for (int i = 0; i < codec_settings.numberOfSimulcastStreams; ++i) { ss << "\n\n--> codec_settings.simulcastStream[" << i << "]"; - const SimulcastStream& simulcast_stream = - codec_settings.simulcastStream[i]; + const SpatialLayer& simulcast_stream = codec_settings.simulcastStream[i]; ss << "\nwidth: " << simulcast_stream.width; ss << "\nheight: " << simulcast_stream.height; ss << "\nnum_temporal_layers: " @@ -446,6 +451,8 @@ void VideoCodecTestFixtureImpl::ProcessAllFrames( } } + task_queue->PostTask([this] { processor_->Finalize(); }); + // Wait until we know that the last frame has been sent for encode. task_queue->SendTask([] {}, RTC_FROM_HERE); diff --git a/modules/video_coding/codecs/test/videocodec_test_libaom.cc b/modules/video_coding/codecs/test/videocodec_test_libaom.cc new file mode 100644 index 0000000000..45730aa09e --- /dev/null +++ b/modules/video_coding/codecs/test/videocodec_test_libaom.cc @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "api/test/create_videocodec_test_fixture.h" +#include "api/test/video/function_video_encoder_factory.h" +#include "api/video_codecs/sdp_video_format.h" +#include "media/base/media_constants.h" +#include "media/engine/internal_decoder_factory.h" +#include "media/engine/internal_encoder_factory.h" +#include "media/engine/simulcast_encoder_adapter.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace test { +namespace { +// Test clips settings. +constexpr int kCifWidth = 352; +constexpr int kCifHeight = 288; +constexpr int kNumFramesLong = 300; + +VideoCodecTestFixture::Config CreateConfig(std::string filename) { + VideoCodecTestFixture::Config config; + config.filename = filename; + config.filepath = ResourcePath(config.filename, "yuv"); + config.num_frames = kNumFramesLong; + config.use_single_core = true; + return config; +} + +TEST(VideoCodecTestLibaom, HighBitrateAV1) { + auto config = CreateConfig("foreman_cif"); + config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true, + kCifWidth, kCifHeight); + config.num_frames = kNumFramesLong; + auto fixture = CreateVideoCodecTestFixture(config); + + std::vector rate_profiles = {{500, 30, 0}}; + + std::vector rc_thresholds = { + {12, 1, 0, 1, 0.3, 0.1, 0, 1}}; + + std::vector quality_thresholds = {{37, 34, 0.94, 0.92}}; + + fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr); +} + +TEST(VideoCodecTestLibaom, VeryLowBitrateAV1) { + auto config = CreateConfig("foreman_cif"); + config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true, + kCifWidth, kCifHeight); + auto fixture = CreateVideoCodecTestFixture(config); + + std::vector rate_profiles = {{50, 30, 0}}; + + std::vector rc_thresholds = { + {15, 8, 75, 2, 2, 2, 2, 1}}; + + std::vector quality_thresholds = {{28, 25, 0.70, 0.62}}; + + fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr); +} + +#if !defined(WEBRTC_ANDROID) +constexpr int kHdWidth = 1280; +constexpr int kHdHeight = 720; +TEST(VideoCodecTestLibaom, HdAV1) { + auto config = CreateConfig("ConferenceMotion_1280_720_50"); + config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true, + kHdWidth, kHdHeight); + config.num_frames = kNumFramesLong; + auto fixture = CreateVideoCodecTestFixture(config); + + std::vector rate_profiles = {{1000, 50, 0}}; + + std::vector rc_thresholds = { + {13, 3, 0, 1, 0.3, 0.1, 0, 1}}; + + std::vector quality_thresholds = {{36, 32, 0.93, 0.87}}; + + fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr); +} +#endif + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc index 244dbc4121..8076e40fd4 100644 --- a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc +++ b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc @@ -188,7 +188,7 @@ TEST(VideoCodecTestLibvpx, VeryLowBitrateVP9) { std::vector rate_profiles = {{50, 30, 0}}; std::vector rc_thresholds = { - {15, 3, 75, 1, 0.5, 0.4, 1, 1}}; + {15, 3, 75, 1, 0.5, 0.4, 2, 1}}; std::vector quality_thresholds = {{28, 25, 0.80, 0.65}}; @@ -358,7 +358,7 @@ TEST(VideoCodecTestLibvpx, MAYBE_MultiresVP8) { std::vector rate_profiles = {{1500, 30, 0}}; #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) std::vector rc_thresholds = { - {4.1, 1.04, 6, 0.18, 0.14, 0.08, 0, 1}}; + {4.1, 1.04, 7, 0.18, 0.14, 0.08, 0, 1}}; #else std::vector rc_thresholds = { {5, 1, 5, 1, 0.3, 0.1, 0, 1}}; diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc b/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc index e5d6d2aaad..aa0ff0b9c8 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc @@ -179,20 +179,20 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( VideoStatistics video_stat; float buffer_level_bits = 0.0f; - RunningStatistics buffer_level_sec; + webrtc_impl::RunningStatistics buffer_level_sec; - RunningStatistics key_frame_size_bytes; - RunningStatistics delta_frame_size_bytes; + webrtc_impl::RunningStatistics key_frame_size_bytes; + webrtc_impl::RunningStatistics delta_frame_size_bytes; - RunningStatistics frame_encoding_time_us; - RunningStatistics frame_decoding_time_us; + webrtc_impl::RunningStatistics frame_encoding_time_us; + webrtc_impl::RunningStatistics frame_decoding_time_us; - RunningStatistics psnr_y; - RunningStatistics psnr_u; - RunningStatistics psnr_v; - RunningStatistics psnr; - RunningStatistics ssim; - RunningStatistics qp; + webrtc_impl::RunningStatistics psnr_y; + webrtc_impl::RunningStatistics psnr_u; + webrtc_impl::RunningStatistics psnr_v; + webrtc_impl::RunningStatistics psnr; + webrtc_impl::RunningStatistics ssim; + webrtc_impl::RunningStatistics qp; size_t rtp_timestamp_first_frame = 0; size_t rtp_timestamp_prev_frame = 0; @@ -252,12 +252,6 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( video_stat.height = std::max(video_stat.height, frame_stat.decoded_height); - psnr_y.AddSample(frame_stat.psnr_y); - psnr_u.AddSample(frame_stat.psnr_u); - psnr_v.AddSample(frame_stat.psnr_v); - psnr.AddSample(frame_stat.psnr); - ssim.AddSample(frame_stat.ssim); - if (video_stat.num_decoded_frames > 1) { if (last_successfully_decoded_frame.decoded_width != frame_stat.decoded_width || @@ -271,6 +265,14 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( last_successfully_decoded_frame = frame_stat; } + if (frame_stat.quality_analysis_successful) { + psnr_y.AddSample(frame_stat.psnr_y); + psnr_u.AddSample(frame_stat.psnr_u); + psnr_v.AddSample(frame_stat.psnr_v); + psnr.AddSample(frame_stat.psnr); + ssim.AddSample(frame_stat.ssim); + } + if (video_stat.num_input_frames > 0) { if (video_stat.time_to_reach_target_bitrate_sec == 0.0f) { RTC_CHECK_GT(time_since_first_frame_sec, 0); @@ -329,10 +331,10 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( ? 1000000.0f / mean_decode_time_us : std::numeric_limits::max(); - auto MaxDelaySec = - [target_bitrate_kbps](const RunningStatistics& stats) { - return 8 * stats.GetMax().value_or(0) / 1000 / target_bitrate_kbps; - }; + auto MaxDelaySec = [target_bitrate_kbps]( + const webrtc_impl::RunningStatistics& stats) { + return 8 * stats.GetMax().value_or(0) / 1000 / target_bitrate_kbps; + }; video_stat.avg_delay_sec = buffer_level_sec.GetMean().value_or(0); video_stat.max_key_frame_delay_sec = MaxDelaySec(key_frame_size_bytes); diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc index 8fbbe4a04e..1532695b23 100644 --- a/modules/video_coding/codecs/test/videoprocessor.cc +++ b/modules/video_coding/codecs/test/videoprocessor.cc @@ -41,8 +41,6 @@ namespace webrtc { namespace test { -using FrameStatistics = VideoCodecTestStats::FrameStatistics; - namespace { const int kMsToRtpTimestamp = kVideoPayloadTypeFrequency / 1000; const int kMaxBufferedInputFrames = 20; @@ -86,34 +84,9 @@ int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) { return static_cast(diff_us); } -void ExtractI420BufferWithSize(const VideoFrame& image, - int width, - int height, - rtc::Buffer* buffer) { - if (image.width() != width || image.height() != height) { - EXPECT_DOUBLE_EQ(static_cast(width) / height, - static_cast(image.width()) / image.height()); - // Same aspect ratio, no cropping needed. - rtc::scoped_refptr scaled(I420Buffer::Create(width, height)); - scaled->ScaleFrom(*image.video_frame_buffer()->ToI420()); - - size_t length = - CalcBufferSize(VideoType::kI420, scaled->width(), scaled->height()); - buffer->SetSize(length); - RTC_CHECK_NE(ExtractBuffer(scaled, length, buffer->data()), -1); - return; - } - - // No resize. - size_t length = - CalcBufferSize(VideoType::kI420, image.width(), image.height()); - buffer->SetSize(length); - RTC_CHECK_NE(ExtractBuffer(image, length, buffer->data()), -1); -} - void CalculateFrameQuality(const I420BufferInterface& ref_buffer, const I420BufferInterface& dec_buffer, - FrameStatistics* frame_stat, + VideoCodecTestStats::FrameStatistics* frame_stat, bool calc_ssim) { if (ref_buffer.width() != dec_buffer.width() || ref_buffer.height() != dec_buffer.height()) { @@ -174,6 +147,7 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, num_simulcast_or_spatial_layers_( std::max(config_.NumberOfSimulcastStreams(), config_.NumberOfSpatialLayers())), + analyze_frame_quality_(!config_.measure_cpu), stats_(stats), encoder_(encoder), decoders_(decoders), @@ -192,8 +166,9 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, last_encoded_frame_num_(num_simulcast_or_spatial_layers_), first_decoded_frame_(num_simulcast_or_spatial_layers_, true), last_decoded_frame_num_(num_simulcast_or_spatial_layers_), - decoded_frame_buffer_(num_simulcast_or_spatial_layers_), - post_encode_time_ns_(0) { + last_decoded_frame_buffer_(num_simulcast_or_spatial_layers_), + post_encode_time_ns_(0), + is_finalized_(false) { // Sanity checks. RTC_CHECK(TaskQueueBase::Current()) << "VideoProcessor must be run on a task queue."; @@ -234,6 +209,10 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, VideoProcessor::~VideoProcessor() { RTC_DCHECK_RUN_ON(&sequence_checker_); + if (!is_finalized_) { + Finalize(); + } + // Explicitly reset codecs, in case they don't do that themselves when they // go out of scope. RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK); @@ -249,6 +228,8 @@ VideoProcessor::~VideoProcessor() { void VideoProcessor::ProcessFrame() { RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!is_finalized_); + const size_t frame_number = last_inputed_frame_num_++; // Get input frame and store for future quality calculation. @@ -304,6 +285,8 @@ void VideoProcessor::ProcessFrame() { void VideoProcessor::SetRates(size_t bitrate_kbps, double framerate_fps) { RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!is_finalized_); + framerate_fps_ = framerate_fps; bitrate_allocation_ = bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( @@ -390,13 +373,11 @@ void VideoProcessor::FrameEncoded( frame_stat->max_nalu_size_bytes = GetMaxNaluSizeBytes(encoded_image, config_); frame_stat->qp = encoded_image.qp_; - bool end_of_picture = false; if (codec_type == kVideoCodecVP9) { const CodecSpecificInfoVP9& vp9_info = codec_specific.codecSpecific.VP9; frame_stat->inter_layer_predicted = vp9_info.inter_layer_predicted; frame_stat->non_ref_for_inter_layer_pred = vp9_info.non_ref_for_inter_layer_pred; - end_of_picture = vp9_info.end_of_picture; } else { frame_stat->inter_layer_predicted = false; frame_stat->non_ref_for_inter_layer_pred = true; @@ -414,7 +395,7 @@ void VideoProcessor::FrameEncoded( if (config_.decode) { DecodeFrame(*encoded_image_for_decode, spatial_idx); - if (end_of_picture && num_spatial_layers > 1) { + if (codec_specific.end_of_picture && num_spatial_layers > 1) { // If inter-layer prediction is enabled and upper layer was dropped then // base layer should be passed to upper layer decoder. Otherwise decoder // won't be able to decode next superframe. @@ -460,6 +441,56 @@ void VideoProcessor::FrameEncoded( } } +void VideoProcessor::CalcFrameQuality(const I420BufferInterface& decoded_frame, + FrameStatistics* frame_stat) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + const auto reference_frame = input_frames_.find(frame_stat->frame_number); + RTC_CHECK(reference_frame != input_frames_.cend()) + << "The codecs are either buffering too much, dropping too much, or " + "being too slow relative to the input frame rate."; + + // SSIM calculation is not optimized. Skip it in real-time mode. + const bool calc_ssim = !config_.encode_in_real_time; + CalculateFrameQuality(*reference_frame->second.video_frame_buffer()->ToI420(), + decoded_frame, frame_stat, calc_ssim); + + frame_stat->quality_analysis_successful = true; +} + +void VideoProcessor::WriteDecodedFrame(const I420BufferInterface& decoded_frame, + FrameWriter& frame_writer) { + int input_video_width = config_.codec_settings.width; + int input_video_height = config_.codec_settings.height; + + rtc::scoped_refptr scaled_buffer; + const I420BufferInterface* scaled_frame; + + if (decoded_frame.width() == input_video_width && + decoded_frame.height() == input_video_height) { + scaled_frame = &decoded_frame; + } else { + EXPECT_DOUBLE_EQ( + static_cast(input_video_width) / input_video_height, + static_cast(decoded_frame.width()) / decoded_frame.height()); + + scaled_buffer = I420Buffer::Create(input_video_width, input_video_height); + scaled_buffer->ScaleFrom(decoded_frame); + + scaled_frame = scaled_buffer; + } + + // Ensure there is no padding. + RTC_CHECK_EQ(scaled_frame->StrideY(), input_video_width); + RTC_CHECK_EQ(scaled_frame->StrideU(), input_video_width / 2); + RTC_CHECK_EQ(scaled_frame->StrideV(), input_video_width / 2); + + RTC_CHECK_EQ(3 * input_video_width * input_video_height / 2, + frame_writer.FrameLength()); + + RTC_CHECK(frame_writer.WriteFrame(scaled_frame->DataY())); +} + void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame, size_t spatial_idx) { RTC_DCHECK_RUN_ON(&sequence_checker_); @@ -472,13 +503,24 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame, stats_->GetFrameWithTimestamp(decoded_frame.timestamp(), spatial_idx); const size_t frame_number = frame_stat->frame_number; - if (decoded_frame_writers_ && !first_decoded_frame_[spatial_idx]) { - // Fill drops with last decoded frame to make them look like freeze at - // playback and to keep decoded layers in sync. - for (size_t i = last_decoded_frame_num_[spatial_idx] + 1; i < frame_number; - ++i) { - RTC_CHECK(decoded_frame_writers_->at(spatial_idx) - ->WriteFrame(decoded_frame_buffer_[spatial_idx].data())); + if (!first_decoded_frame_[spatial_idx]) { + for (size_t dropped_frame_number = last_decoded_frame_num_[spatial_idx] + 1; + dropped_frame_number < frame_number; ++dropped_frame_number) { + FrameStatistics* dropped_frame_stat = + stats_->GetFrame(dropped_frame_number, spatial_idx); + + if (analyze_frame_quality_ && config_.analyze_quality_of_dropped_frames) { + // Calculate frame quality comparing input frame with last decoded one. + CalcFrameQuality(*last_decoded_frame_buffer_[spatial_idx], + dropped_frame_stat); + } + + if (decoded_frame_writers_ != nullptr) { + // Fill drops with last decoded frame to make them look like freeze at + // playback and to keep decoded layers in sync. + WriteDecodedFrame(*last_decoded_frame_buffer_[spatial_idx], + *decoded_frame_writers_->at(spatial_idx)); + } } } @@ -497,41 +539,40 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame, frame_stat->decoded_height = decoded_frame.height(); // Skip quality metrics calculation to not affect CPU usage. - if (!config_.measure_cpu) { - const auto reference_frame = input_frames_.find(frame_number); - RTC_CHECK(reference_frame != input_frames_.cend()) - << "The codecs are either buffering too much, dropping too much, or " - "being too slow relative the input frame rate."; - - // SSIM calculation is not optimized. Skip it in real-time mode. - const bool calc_ssim = !config_.encode_in_real_time; - CalculateFrameQuality( - *reference_frame->second.video_frame_buffer()->ToI420(), - *decoded_frame.video_frame_buffer()->ToI420(), frame_stat, calc_ssim); - - // Erase all buffered input frames that we have moved past for all - // simulcast/spatial layers. Never buffer more than - // |kMaxBufferedInputFrames| frames, to protect against long runs of - // consecutive frame drops for a particular layer. - const auto min_last_decoded_frame_num = std::min_element( - last_decoded_frame_num_.cbegin(), last_decoded_frame_num_.cend()); - const size_t min_buffered_frame_num = std::max( - 0, static_cast(frame_number) - kMaxBufferedInputFrames + 1); - RTC_CHECK(min_last_decoded_frame_num != last_decoded_frame_num_.cend()); - const auto input_frames_erase_before = input_frames_.lower_bound( - std::max(*min_last_decoded_frame_num, min_buffered_frame_num)); - input_frames_.erase(input_frames_.cbegin(), input_frames_erase_before); + if (analyze_frame_quality_ || decoded_frame_writers_) { + // Save last decoded frame to handle possible future drops. + rtc::scoped_refptr i420buffer = + decoded_frame.video_frame_buffer()->ToI420(); + + // Copy decoded frame to a buffer without padding/stride such that we can + // dump Y, U and V planes into a file in one shot. + last_decoded_frame_buffer_[spatial_idx] = I420Buffer::Copy( + i420buffer->width(), i420buffer->height(), i420buffer->DataY(), + i420buffer->StrideY(), i420buffer->DataU(), i420buffer->StrideU(), + i420buffer->DataV(), i420buffer->StrideV()); + } + + if (analyze_frame_quality_) { + CalcFrameQuality(*decoded_frame.video_frame_buffer()->ToI420(), frame_stat); } - if (decoded_frame_writers_) { - ExtractI420BufferWithSize(decoded_frame, config_.codec_settings.width, - config_.codec_settings.height, - &decoded_frame_buffer_[spatial_idx]); - RTC_CHECK_EQ(decoded_frame_buffer_[spatial_idx].size(), - decoded_frame_writers_->at(spatial_idx)->FrameLength()); - RTC_CHECK(decoded_frame_writers_->at(spatial_idx) - ->WriteFrame(decoded_frame_buffer_[spatial_idx].data())); + if (decoded_frame_writers_ != nullptr) { + WriteDecodedFrame(*last_decoded_frame_buffer_[spatial_idx], + *decoded_frame_writers_->at(spatial_idx)); } + + // Erase all buffered input frames that we have moved past for all + // simulcast/spatial layers. Never buffer more than + // |kMaxBufferedInputFrames| frames, to protect against long runs of + // consecutive frame drops for a particular layer. + const auto min_last_decoded_frame_num = std::min_element( + last_decoded_frame_num_.cbegin(), last_decoded_frame_num_.cend()); + const size_t min_buffered_frame_num = + std::max(0, static_cast(frame_number) - kMaxBufferedInputFrames + 1); + RTC_CHECK(min_last_decoded_frame_num != last_decoded_frame_num_.cend()); + const auto input_frames_erase_before = input_frames_.lower_bound( + std::max(*min_last_decoded_frame_num, min_buffered_frame_num)); + input_frames_.erase(input_frames_.cbegin(), input_frames_erase_before); } void VideoProcessor::DecodeFrame(const EncodedImage& encoded_image, @@ -571,16 +612,16 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe( } const size_t payload_size_bytes = base_image.size() + encoded_image.size(); - EncodedImage copied_image = encoded_image; - copied_image.SetEncodedData(EncodedImageBuffer::Create(payload_size_bytes)); + auto buffer = EncodedImageBuffer::Create(payload_size_bytes); if (base_image.size()) { RTC_CHECK(base_image.data()); - memcpy(copied_image.data(), base_image.data(), base_image.size()); + memcpy(buffer->data(), base_image.data(), base_image.size()); } - memcpy(copied_image.data() + base_image.size(), encoded_image.data(), + memcpy(buffer->data() + base_image.size(), encoded_image.data(), encoded_image.size()); - copied_image.set_size(payload_size_bytes); + EncodedImage copied_image = encoded_image; + copied_image.SetEncodedData(buffer); // Replace previous EncodedImage for this spatial layer. merged_encoded_frames_.at(spatial_idx) = std::move(copied_image); @@ -588,5 +629,41 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe( return &merged_encoded_frames_.at(spatial_idx); } +void VideoProcessor::Finalize() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!is_finalized_); + is_finalized_ = true; + + if (!(analyze_frame_quality_ && config_.analyze_quality_of_dropped_frames) && + decoded_frame_writers_ == nullptr) { + return; + } + + for (size_t spatial_idx = 0; spatial_idx < num_simulcast_or_spatial_layers_; + ++spatial_idx) { + if (first_decoded_frame_[spatial_idx]) { + continue; // No decoded frames on this spatial layer. + } + + for (size_t dropped_frame_number = last_decoded_frame_num_[spatial_idx] + 1; + dropped_frame_number < last_inputed_frame_num_; + ++dropped_frame_number) { + FrameStatistics* frame_stat = + stats_->GetFrame(dropped_frame_number, spatial_idx); + + RTC_DCHECK(!frame_stat->decoding_successful); + + if (analyze_frame_quality_ && config_.analyze_quality_of_dropped_frames) { + CalcFrameQuality(*last_decoded_frame_buffer_[spatial_idx], frame_stat); + } + + if (decoded_frame_writers_ != nullptr) { + WriteDecodedFrame(*last_decoded_frame_buffer_[spatial_idx], + *decoded_frame_writers_->at(spatial_idx)); + } + } + } +} + } // namespace test } // namespace webrtc diff --git a/modules/video_coding/codecs/test/videoprocessor.h b/modules/video_coding/codecs/test/videoprocessor.h index bed65bdb2e..cd755ea0e0 100644 --- a/modules/video_coding/codecs/test/videoprocessor.h +++ b/modules/video_coding/codecs/test/videoprocessor.h @@ -24,6 +24,7 @@ #include "api/task_queue/task_queue_base.h" #include "api/test/videocodec_test_fixture.h" #include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_frame.h" @@ -58,6 +59,7 @@ class VideoProcessor { // TODO(brandtr): Consider changing FrameWriterList to be a FrameWriterMap, // to be able to save different TLs separately. using FrameWriterList = std::vector>; + using FrameStatistics = VideoCodecTestStats::FrameStatistics; VideoProcessor(webrtc::VideoEncoder* encoder, VideoDecoderList* decoders, @@ -77,6 +79,11 @@ class VideoProcessor { // Updates the encoder with target rates. Must be called at least once. void SetRates(size_t bitrate_kbps, double framerate_fps); + // Signals processor to finalize frame processing and handle possible tail + // drops. If not called expelicitly, this will be called in dtor. It is + // unexpected to get ProcessFrame() or SetRates() calls after Finalize(). + void Finalize(); + private: class VideoProcessorEncodeCompleteCallback : public webrtc::EncodedImageCallback { @@ -91,8 +98,7 @@ class VideoProcessor { Result OnEncodedImage( const webrtc::EncodedImage& encoded_image, - const webrtc::CodecSpecificInfo* codec_specific_info, - const webrtc::RTPFragmentationHeader* fragmentation) override { + const webrtc::CodecSpecificInfo* codec_specific_info) override { RTC_CHECK(codec_specific_info); // Post the callback to the right task queue, if needed. @@ -183,9 +189,20 @@ class VideoProcessor { size_t simulcast_svc_idx, bool inter_layer_predicted) RTC_RUN_ON(sequence_checker_); - // Test input/output. - VideoCodecTestFixture::Config config_ RTC_GUARDED_BY(sequence_checker_); + void CalcFrameQuality(const I420BufferInterface& decoded_frame, + FrameStatistics* frame_stat); + + void WriteDecodedFrame(const I420BufferInterface& decoded_frame, + FrameWriter& frame_writer); + + void HandleTailDrops(); + + // Test config. + const VideoCodecTestFixture::Config config_; const size_t num_simulcast_or_spatial_layers_; + const bool analyze_frame_quality_; + + // Frame statistics. VideoCodecTestStatsImpl* const stats_; // Codecs. @@ -241,7 +258,7 @@ class VideoProcessor { // simulcast_svc_idx -> frame_number. std::vector last_decoded_frame_num_ RTC_GUARDED_BY(sequence_checker_); // simulcast_svc_idx -> buffer. - std::vector decoded_frame_buffer_ + std::vector> last_decoded_frame_buffer_ RTC_GUARDED_BY(sequence_checker_); // Time spent in frame encode callback. It is accumulated for layers and @@ -249,6 +266,9 @@ class VideoProcessor { // is substracted from measured encode time. Thus we get pure encode time. int64_t post_encode_time_ns_ RTC_GUARDED_BY(sequence_checker_); + // Indicates whether Finalize() was called or not. + bool is_finalized_ RTC_GUARDED_BY(sequence_checker_); + // This class must be operated on a TaskQueue. SequenceChecker sequence_checker_; diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/modules/video_coding/codecs/vp8/default_temporal_layers.cc index 426ee76779..b5652593ae 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers.cc +++ b/modules/video_coding/codecs/vp8/default_temporal_layers.cc @@ -164,6 +164,12 @@ DefaultTemporalLayers::GetDependencyInfo(size_t num_layers) { // TL1 references 'last' and references and updates 'golden'. // TL2 references both 'last' & 'golden' and references and updates // 'arf'. + // 2-------2 2-------2 2 + // / __/ / __/ / + // / __1 / __1 / + // /___/ /___/ / + // 0---------------0---------------0----- + // 0 1 2 3 4 5 6 7 8 9 ... return {{"SSS", {kReferenceAndUpdate, kNone, kNone}}, {"--S", {kReference, kNone, kUpdate}}, {"-DR", {kReference, kUpdate, kNone}}, @@ -174,6 +180,12 @@ DefaultTemporalLayers::GetDependencyInfo(size_t num_layers) { // TL0 also references and updates the 'last' buffer. // TL1 also references 'last' and references and updates 'golden'. // TL2 references both 'last' and 'golden' but updates no buffer. + // 2 __2 _____2 __2 2 + // / /____/ / / / + // / 1---------/-----1 / + // /_____/ /_____/ / + // 0---------------0---------------0----- + // 0 1 2 3 4 5 6 7 8 9 ... return {{"SSS", {kReferenceAndUpdate, kNone, kNone}}, {"--D", {kReference, kNone, kNone, kFreezeEntropy}}, {"-SS", {kReference, kUpdate, kNone}}, @@ -554,10 +566,14 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, // subsequent frames. if (is_keyframe) { info->template_structure = GetTemplateStructure(num_layers_); + generic_frame_info.decode_target_indications = + temporal_pattern_.front().decode_target_indications; + generic_frame_info.temporal_id = 0; + } else { + generic_frame_info.decode_target_indications = + frame.dependency_info.decode_target_indications; + generic_frame_info.temporal_id = frame_config.packetizer_temporal_idx; } - generic_frame_info.decode_target_indications = - frame.dependency_info.decode_target_indications; - generic_frame_info.temporal_id = frame_config.packetizer_temporal_idx; if (!frame.expired) { for (Vp8BufferReference buffer : kAllBuffers) { @@ -592,48 +608,52 @@ FrameDependencyStructure DefaultTemporalLayers::GetTemplateStructure( FrameDependencyStructure template_structure; template_structure.num_decode_targets = num_layers; - using Builder = GenericFrameInfo::Builder; switch (num_layers) { case 1: { - template_structure.templates = { - Builder().T(0).Dtis("S").Build(), - Builder().T(0).Dtis("S").Fdiffs({1}).Build(), - }; + template_structure.templates.resize(2); + template_structure.templates[0].T(0).Dtis("S"); + template_structure.templates[1].T(0).Dtis("S").FrameDiffs({1}); return template_structure; } case 2: { - template_structure.templates = { - Builder().T(0).Dtis("SS").Build(), - Builder().T(0).Dtis("SS").Fdiffs({2}).Build(), - Builder().T(0).Dtis("SR").Fdiffs({2}).Build(), - Builder().T(1).Dtis("-S").Fdiffs({1}).Build(), - Builder().T(1).Dtis("-D").Fdiffs({1, 2}).Build(), - }; + template_structure.templates.resize(5); + template_structure.templates[0].T(0).Dtis("SS"); + template_structure.templates[1].T(0).Dtis("SS").FrameDiffs({2}); + template_structure.templates[2].T(0).Dtis("SR").FrameDiffs({2}); + template_structure.templates[3].T(1).Dtis("-S").FrameDiffs({1}); + template_structure.templates[4].T(1).Dtis("-D").FrameDiffs({2, 1}); return template_structure; } case 3: { - template_structure.templates = { - Builder().T(0).Dtis("SSS").Build(), - Builder().T(0).Dtis("SSS").Fdiffs({4}).Build(), - Builder().T(0).Dtis("SRR").Fdiffs({4}).Build(), - Builder().T(1).Dtis("-SR").Fdiffs({2}).Build(), - Builder().T(1).Dtis("-DR").Fdiffs({2, 4}).Build(), - Builder().T(2).Dtis("--D").Fdiffs({1}).Build(), - Builder().T(2).Dtis("--D").Fdiffs({1, 3}).Build(), - }; + if (field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) { + template_structure.templates.resize(5); + template_structure.templates[0].T(0).Dtis("SSS"); + template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4}); + template_structure.templates[2].T(1).Dtis("-DR").FrameDiffs({2}); + template_structure.templates[3].T(2).Dtis("--S").FrameDiffs({1}); + template_structure.templates[4].T(2).Dtis("--D").FrameDiffs({2, 1}); + } else { + template_structure.templates.resize(7); + template_structure.templates[0].T(0).Dtis("SSS"); + template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4}); + template_structure.templates[2].T(0).Dtis("SRR").FrameDiffs({4}); + template_structure.templates[3].T(1).Dtis("-SS").FrameDiffs({2}); + template_structure.templates[4].T(1).Dtis("-DS").FrameDiffs({4, 2}); + template_structure.templates[5].T(2).Dtis("--D").FrameDiffs({1}); + template_structure.templates[6].T(2).Dtis("--D").FrameDiffs({3, 1}); + } return template_structure; } case 4: { - template_structure.templates = { - Builder().T(0).Dtis("SSSS").Build(), - Builder().T(0).Dtis("SSSS").Fdiffs({8}).Build(), - Builder().T(1).Dtis("-SRR").Fdiffs({4}).Build(), - Builder().T(1).Dtis("-SRR").Fdiffs({4, 8}).Build(), - Builder().T(2).Dtis("--SR").Fdiffs({2}).Build(), - Builder().T(2).Dtis("--SR").Fdiffs({2, 4}).Build(), - Builder().T(3).Dtis("---D").Fdiffs({1}).Build(), - Builder().T(3).Dtis("---D").Fdiffs({1, 3}).Build(), - }; + template_structure.templates.resize(8); + template_structure.templates[0].T(0).Dtis("SSSS"); + template_structure.templates[1].T(0).Dtis("SSSS").FrameDiffs({8}); + template_structure.templates[2].T(1).Dtis("-SRR").FrameDiffs({4}); + template_structure.templates[3].T(1).Dtis("-SRR").FrameDiffs({4, 8}); + template_structure.templates[4].T(2).Dtis("--SR").FrameDiffs({2}); + template_structure.templates[5].T(2).Dtis("--SR").FrameDiffs({2, 4}); + template_structure.templates[6].T(3).Dtis("---D").FrameDiffs({1}); + template_structure.templates[7].T(3).Dtis("---D").FrameDiffs({1, 3}); return template_structure; } default: diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.h b/modules/video_coding/codecs/vp8/default_temporal_layers.h index 29cfcf0489..d127d8056d 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers.h +++ b/modules/video_coding/codecs/vp8/default_temporal_layers.h @@ -75,7 +75,7 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController { DependencyInfo(absl::string_view indication_symbols, Vp8FrameConfig frame_config) : decode_target_indications( - GenericFrameInfo::DecodeTargetInfo(indication_symbols)), + webrtc_impl::StringToDecodeTargetIndications(indication_symbols)), frame_config(frame_config) {} absl::InlinedVector decode_target_indications; diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc index 54f091da5b..64ad40ab76 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc +++ b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc @@ -20,6 +20,7 @@ #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "test/field_trial.h" +#include "test/gmock.h" #include "test/gtest.h" #include "vpx/vp8cx.h" @@ -28,6 +29,9 @@ namespace webrtc { namespace test { namespace { + +using ::testing::Each; + enum { kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF, @@ -674,6 +678,11 @@ TEST_F(TemporalLayersTest, KeyFrame) { << "Key frame should be marked layer sync."; EXPECT_EQ(0, info.codecSpecific.VP8.temporalIdx) << "Key frame should always be packetized as layer 0"; + EXPECT_EQ(0, info.generic_frame_info->temporal_id) + << "Key frame should always be packetized as layer 0"; + EXPECT_THAT(info.generic_frame_info->decode_target_indications, + Each(DecodeTargetIndication::kSwitch)) + << "Key frame is universal switch"; EXPECT_TRUE(checker.CheckTemporalConfig(true, tl_config)); } } diff --git a/modules/video_coding/codecs/vp8/include/vp8.h b/modules/video_coding/codecs/vp8/include/vp8.h index fc2c123874..44efbeeb3b 100644 --- a/modules/video_coding/codecs/vp8/include/vp8.h +++ b/modules/video_coding/codecs/vp8/include/vp8.h @@ -6,33 +6,50 @@ * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. - * - * WEBRTC VP8 wrapper interface */ #ifndef MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_ #include +#include +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "modules/video_coding/include/video_codec_interface.h" +#include "rtc_base/deprecation.h" namespace webrtc { +// TODO(brandtr): Move these interfaces to the api/ folder. class VP8Encoder { public: + struct Settings { + // Allows for overriding the Vp8FrameBufferController used by the encoder. + // If unset, a default Vp8FrameBufferController will be instantiated + // internally. + std::unique_ptr + frame_buffer_controller_factory = nullptr; + + // Allows for overriding the resolution/bitrate limits exposed through + // VideoEncoder::GetEncoderInfo(). No override is done if empty. + std::vector + resolution_bitrate_limits = {}; + }; + static std::unique_ptr Create(); + static std::unique_ptr Create(Settings settings); - static std::unique_ptr Create( + RTC_DEPRECATED static std::unique_ptr Create( std::unique_ptr frame_buffer_controller_factory); -}; // end of VP8Encoder class +}; class VP8Decoder { public: static std::unique_ptr Create(); -}; // end of VP8Decoder class +}; + } // namespace webrtc #endif // MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_ diff --git a/modules/video_coding/codecs/vp8/libvpx_interface.cc b/modules/video_coding/codecs/vp8/libvpx_interface.cc index 1a3df403ae..7bf6117157 100644 --- a/modules/video_coding/codecs/vp8/libvpx_interface.cc +++ b/modules/video_coding/codecs/vp8/libvpx_interface.cc @@ -195,6 +195,10 @@ class LibvpxVp8Facade : public LibvpxInterface { vpx_codec_iter_t* iter) const override { return ::vpx_codec_get_cx_data(ctx, iter); } + + const char* codec_error_detail(vpx_codec_ctx_t* ctx) const override { + return ::vpx_codec_error_detail(ctx); + } }; } // namespace diff --git a/modules/video_coding/codecs/vp8/libvpx_interface.h b/modules/video_coding/codecs/vp8/libvpx_interface.h index fe40dedeca..3da38ea24a 100644 --- a/modules/video_coding/codecs/vp8/libvpx_interface.h +++ b/modules/video_coding/codecs/vp8/libvpx_interface.h @@ -93,6 +93,8 @@ class LibvpxInterface { vpx_codec_ctx_t* ctx, vpx_codec_iter_t* iter) const = 0; + virtual const char* codec_error_detail(vpx_codec_ctx_t* ctx) const = 0; + // Returns interface wrapping the actual libvpx functions. static std::unique_ptr CreateEncoder(); }; diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index a3ee2c0c41..af48c92535 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -44,26 +44,48 @@ constexpr int kVp8ErrorPropagationTh = 30; constexpr long kDecodeDeadlineRealtime = 1; // NOLINT const char kVp8PostProcArmFieldTrial[] = "WebRTC-VP8-Postproc-Config-Arm"; +const char kVp8PostProcFieldTrial[] = "WebRTC-VP8-Postproc-Config"; -void GetPostProcParamsFromFieldTrialGroup( - LibvpxVp8Decoder::DeblockParams* deblock_params) { - std::string group = - webrtc::field_trial::FindFullName(kVp8PostProcArmFieldTrial); - if (group.empty()) - return; +#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ + defined(WEBRTC_ANDROID) +constexpr bool kIsArm = true; +#else +constexpr bool kIsArm = false; +#endif + +absl::optional DefaultDeblockParams() { + if (kIsArm) { + // For ARM, this is only called when deblocking is explicitly enabled, and + // the default strength is set by the ctor. + return LibvpxVp8Decoder::DeblockParams(); + } + // For non-arm, don't use the explicit deblocking settings by default. + return absl::nullopt; +} + +absl::optional +GetPostProcParamsFromFieldTrialGroup() { + std::string group = webrtc::field_trial::FindFullName( + kIsArm ? kVp8PostProcArmFieldTrial : kVp8PostProcFieldTrial); + if (group.empty()) { + return DefaultDeblockParams(); + } LibvpxVp8Decoder::DeblockParams params; if (sscanf(group.c_str(), "Enabled-%d,%d,%d", ¶ms.max_level, - ¶ms.min_qp, ¶ms.degrade_qp) != 3) - return; + ¶ms.min_qp, ¶ms.degrade_qp) != 3) { + return DefaultDeblockParams(); + } - if (params.max_level < 0 || params.max_level > 16) - return; + if (params.max_level < 0 || params.max_level > 16) { + return DefaultDeblockParams(); + } - if (params.min_qp < 0 || params.degrade_qp <= params.min_qp) - return; + if (params.min_qp < 0 || params.degrade_qp <= params.min_qp) { + return DefaultDeblockParams(); + } - *deblock_params = params; + return params; } } // namespace @@ -97,8 +119,9 @@ class LibvpxVp8Decoder::QpSmoother { }; LibvpxVp8Decoder::LibvpxVp8Decoder() - : use_postproc_arm_( - webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial)), + : use_postproc_( + kIsArm ? webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial) + : true), buffer_pool_(false, 300 /* max_number_of_buffers*/), decode_complete_callback_(NULL), inited_(false), @@ -107,10 +130,12 @@ LibvpxVp8Decoder::LibvpxVp8Decoder() last_frame_width_(0), last_frame_height_(0), key_frame_required_(true), - qp_smoother_(use_postproc_arm_ ? new QpSmoother() : nullptr) { - if (use_postproc_arm_) - GetPostProcParamsFromFieldTrialGroup(&deblock_); -} + deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup() + : absl::nullopt), + qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr), + preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} LibvpxVp8Decoder::~LibvpxVp8Decoder() { inited_ = true; // in order to do the actual release @@ -131,12 +156,7 @@ int LibvpxVp8Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) { cfg.threads = 1; cfg.h = cfg.w = 0; // set after decode -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ - defined(WEBRTC_ANDROID) - vpx_codec_flags_t flags = use_postproc_arm_ ? VPX_CODEC_USE_POSTPROC : 0; -#else - vpx_codec_flags_t flags = VPX_CODEC_USE_POSTPROC; -#endif + vpx_codec_flags_t flags = use_postproc_ ? VPX_CODEC_USE_POSTPROC : 0; if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) { delete decoder_; @@ -174,63 +194,60 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image, } // Post process configurations. -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ - defined(WEBRTC_ANDROID) - if (use_postproc_arm_) { + if (use_postproc_) { vp8_postproc_cfg_t ppcfg; + // MFQE enabled to reduce key frame popping. ppcfg.post_proc_flag = VP8_MFQE; - // For low resolutions, use stronger deblocking filter. - int last_width_x_height = last_frame_width_ * last_frame_height_; - if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) { - // Enable the deblock and demacroblocker based on qp thresholds. - RTC_DCHECK(qp_smoother_); - int qp = qp_smoother_->GetAvg(); - if (qp > deblock_.min_qp) { - int level = deblock_.max_level; - if (qp < deblock_.degrade_qp) { - // Use lower level. - level = deblock_.max_level * (qp - deblock_.min_qp) / - (deblock_.degrade_qp - deblock_.min_qp); + + if (kIsArm) { + RTC_DCHECK(deblock_params_.has_value()); + } + if (deblock_params_.has_value()) { + // For low resolutions, use stronger deblocking filter. + int last_width_x_height = last_frame_width_ * last_frame_height_; + if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) { + // Enable the deblock and demacroblocker based on qp thresholds. + RTC_DCHECK(qp_smoother_); + int qp = qp_smoother_->GetAvg(); + if (qp > deblock_params_->min_qp) { + int level = deblock_params_->max_level; + if (qp < deblock_params_->degrade_qp) { + // Use lower level. + level = deblock_params_->max_level * + (qp - deblock_params_->min_qp) / + (deblock_params_->degrade_qp - deblock_params_->min_qp); + } + // Deblocking level only affects VP8_DEMACROBLOCK. + ppcfg.deblocking_level = std::max(level, 1); + ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK; } - // Deblocking level only affects VP8_DEMACROBLOCK. - ppcfg.deblocking_level = std::max(level, 1); - ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK; } + } else { + // Non-arm with no explicit deblock params set. + ppcfg.post_proc_flag |= VP8_DEBLOCK; + // For VGA resolutions and lower, enable the demacroblocker postproc. + if (last_frame_width_ * last_frame_height_ <= 640 * 360) { + ppcfg.post_proc_flag |= VP8_DEMACROBLOCK; + } + // Strength of deblocking filter. Valid range:[0,16] + ppcfg.deblocking_level = 3; } + vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg); } -#else - vp8_postproc_cfg_t ppcfg; - // MFQE enabled to reduce key frame popping. - ppcfg.post_proc_flag = VP8_MFQE | VP8_DEBLOCK; - // For VGA resolutions and lower, enable the demacroblocker postproc. - if (last_frame_width_ * last_frame_height_ <= 640 * 360) { - ppcfg.post_proc_flag |= VP8_DEMACROBLOCK; - } - // Strength of deblocking filter. Valid range:[0,16] - ppcfg.deblocking_level = 3; - vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg); -#endif // Always start with a complete key frame. if (key_frame_required_) { if (input_image._frameType != VideoFrameType::kVideoFrameKey) return WEBRTC_VIDEO_CODEC_ERROR; - // We have a key frame - is it complete? - if (input_image._completeFrame) { - key_frame_required_ = false; - } else { - return WEBRTC_VIDEO_CODEC_ERROR; - } + key_frame_required_ = false; } // Restrict error propagation using key frame requests. // Reset on a key frame refresh. - if (input_image._frameType == VideoFrameType::kVideoFrameKey && - input_image._completeFrame) { + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { propagation_cnt_ = -1; // Start count on first loss. - } else if ((!input_image._completeFrame || missing_frames) && - propagation_cnt_ == -1) { + } else if (missing_frames && propagation_cnt_ == -1) { propagation_cnt_ = 0; } if (propagation_cnt_ >= 0) { @@ -307,8 +324,39 @@ int LibvpxVp8Decoder::ReturnFrame( last_frame_width_ = img->d_w; last_frame_height_ = img->d_h; // Allocate memory for decoded image. - rtc::scoped_refptr buffer = - buffer_pool_.CreateBuffer(img->d_w, img->d_h); + rtc::scoped_refptr buffer; + + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + // Convert instead of making a copy. + // Note: libvpx doesn't support creating NV12 image directly. + // Due to the bitstream structure such a change would just hide the + // conversion operation inside the decode call. + rtc::scoped_refptr nv12_buffer = + buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + buffer = nv12_buffer; + if (nv12_buffer.get()) { + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), + img->d_w, img->d_h); + } + } else { + rtc::scoped_refptr i420_buffer = + buffer_pool_.CreateI420Buffer(img->d_w, img->d_h); + buffer = i420_buffer; + if (i420_buffer.get()) { + libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + img->d_w, img->d_h); + } + } + if (!buffer.get()) { // Pool has too many pending frames. RTC_HISTOGRAM_BOOLEAN("WebRTC.Video.LibvpxVp8Decoder.TooManyPendingFrames", @@ -316,14 +364,6 @@ int LibvpxVp8Decoder::ReturnFrame( return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } - libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - buffer->MutableDataY(), buffer->StrideY(), - buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), img->d_w, - img->d_h); - VideoFrame decoded_image = VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_timestamp_rtp(timestamp) diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h index d9bfee81c1..cf699f1833 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h @@ -13,9 +13,10 @@ #include +#include "absl/types/optional.h" #include "api/video/encoded_image.h" #include "api/video_codecs/video_decoder.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" #include "vpx/vp8dx.h" @@ -51,9 +52,9 @@ class LibvpxVp8Decoder : public VideoDecoder { uint32_t timeStamp, int qp, const webrtc::ColorSpace* explicit_color_space); - const bool use_postproc_arm_; + const bool use_postproc_; - I420BufferPool buffer_pool_; + VideoFrameBufferPool buffer_pool_; DecodedImageCallback* decode_complete_callback_; bool inited_; vpx_codec_ctx_t* decoder_; @@ -61,8 +62,11 @@ class LibvpxVp8Decoder : public VideoDecoder { int last_frame_width_; int last_frame_height_; bool key_frame_required_; - DeblockParams deblock_; + const absl::optional deblock_params_; const std::unique_ptr qp_smoother_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index e3776aac92..340817658d 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -33,7 +33,6 @@ #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/experimental_screenshare_settings.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" #include "rtc_base/logging.h" @@ -45,12 +44,17 @@ namespace webrtc { namespace { #if defined(WEBRTC_IOS) -const char kVP8IosMaxNumberOfThreadFieldTrial[] = +constexpr char kVP8IosMaxNumberOfThreadFieldTrial[] = "WebRTC-VP8IosMaxNumberOfThread"; -const char kVP8IosMaxNumberOfThreadFieldTrialParameter[] = "max_thread"; +constexpr char kVP8IosMaxNumberOfThreadFieldTrialParameter[] = "max_thread"; #endif -const char kVp8ForcePartitionResilience[] = +constexpr char kVp8GetEncoderInfoOverrideFieldTrial[] = + "WebRTC-VP8-GetEncoderInfoOverride"; +constexpr char kVp8RequestedResolutionAlignmentFieldTrialParameter[] = + "requested_resolution_alignment"; + +constexpr char kVp8ForcePartitionResilience[] = "WebRTC-VP8-ForcePartitionResilience"; // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the @@ -64,9 +68,6 @@ constexpr uint32_t kVp832ByteAlign = 32u; constexpr int kRtpTicksPerSecond = 90000; constexpr int kRtpTicksPerMs = kRtpTicksPerSecond / 1000; -constexpr double kLowRateFactor = 1.0; -constexpr double kHighRateFactor = 2.0; - // VP8 denoiser states. enum denoiserState : uint32_t { kDenoiserOff, @@ -78,15 +79,6 @@ enum denoiserState : uint32_t { kDenoiserOnAdaptive }; -// These settings correspond to the settings in vpx_codec_enc_cfg. -struct Vp8RateSettings { - uint32_t rc_undershoot_pct; - uint32_t rc_overshoot_pct; - uint32_t rc_buf_sz; - uint32_t rc_buf_optimal_sz; - uint32_t rc_dropframe_thresh; -}; - // Greatest common divisior int GCD(int a, int b) { int c = a % b; @@ -98,56 +90,6 @@ int GCD(int a, int b) { return b; } -uint32_t Interpolate(uint32_t low, - uint32_t high, - double bandwidth_headroom_factor) { - RTC_DCHECK_GE(bandwidth_headroom_factor, kLowRateFactor); - RTC_DCHECK_LE(bandwidth_headroom_factor, kHighRateFactor); - - // |factor| is between 0.0 and 1.0. - const double factor = bandwidth_headroom_factor - kLowRateFactor; - - return static_cast(((1.0 - factor) * low) + (factor * high) + 0.5); -} - -Vp8RateSettings GetRateSettings(double bandwidth_headroom_factor) { - static const Vp8RateSettings low_settings{1000u, 0u, 100u, 30u, 40u}; - static const Vp8RateSettings high_settings{100u, 15u, 1000u, 600u, 5u}; - - if (bandwidth_headroom_factor <= kLowRateFactor) { - return low_settings; - } else if (bandwidth_headroom_factor >= kHighRateFactor) { - return high_settings; - } - - Vp8RateSettings settings; - settings.rc_undershoot_pct = - Interpolate(low_settings.rc_undershoot_pct, - high_settings.rc_undershoot_pct, bandwidth_headroom_factor); - settings.rc_overshoot_pct = - Interpolate(low_settings.rc_overshoot_pct, high_settings.rc_overshoot_pct, - bandwidth_headroom_factor); - settings.rc_buf_sz = - Interpolate(low_settings.rc_buf_sz, high_settings.rc_buf_sz, - bandwidth_headroom_factor); - settings.rc_buf_optimal_sz = - Interpolate(low_settings.rc_buf_optimal_sz, - high_settings.rc_buf_optimal_sz, bandwidth_headroom_factor); - settings.rc_dropframe_thresh = - Interpolate(low_settings.rc_dropframe_thresh, - high_settings.rc_dropframe_thresh, bandwidth_headroom_factor); - return settings; -} - -void UpdateRateSettings(vpx_codec_enc_cfg_t* config, - const Vp8RateSettings& new_settings) { - config->rc_undershoot_pct = new_settings.rc_undershoot_pct; - config->rc_overshoot_pct = new_settings.rc_overshoot_pct; - config->rc_buf_sz = new_settings.rc_buf_sz; - config->rc_buf_optimal_sz = new_settings.rc_buf_optimal_sz; - config->rc_dropframe_thresh = new_settings.rc_dropframe_thresh; -} - static_assert(Vp8EncoderConfig::TemporalLayerConfig::kMaxPeriodicity == VPX_TS_MAX_PERIODICITY, "Vp8EncoderConfig::kMaxPeriodicity must be kept in sync with the " @@ -203,6 +145,11 @@ void ApplyVp8EncoderConfigToVpxConfig(const Vp8EncoderConfig& encoder_config, vpx_config->ts_periodicity = ts_config.ts_periodicity; std::copy(ts_config.ts_layer_id.begin(), ts_config.ts_layer_id.end(), std::begin(vpx_config->ts_layer_id)); + } else { + vpx_config->ts_number_layers = 1; + vpx_config->ts_rate_decimator[0] = 1; + vpx_config->ts_periodicity = 1; + vpx_config->ts_layer_id[0] = 0; } if (encoder_config.rc_target_bitrate.has_value()) { @@ -218,17 +165,36 @@ void ApplyVp8EncoderConfigToVpxConfig(const Vp8EncoderConfig& encoder_config, } } +absl::optional GetRequestedResolutionAlignmentOverride() { + const std::string trial_string = + field_trial::FindFullName(kVp8GetEncoderInfoOverrideFieldTrial); + FieldTrialOptional requested_resolution_alignment( + kVp8RequestedResolutionAlignmentFieldTrialParameter); + ParseFieldTrial({&requested_resolution_alignment}, trial_string); + return requested_resolution_alignment.GetOptional(); +} + } // namespace std::unique_ptr VP8Encoder::Create() { - return VP8Encoder::Create(nullptr); + return std::make_unique(LibvpxInterface::CreateEncoder(), + VP8Encoder::Settings()); +} + +std::unique_ptr VP8Encoder::Create( + VP8Encoder::Settings settings) { + return std::make_unique(LibvpxInterface::CreateEncoder(), + std::move(settings)); } std::unique_ptr VP8Encoder::Create( std::unique_ptr frame_buffer_controller_factory) { - return std::make_unique( - std::move(frame_buffer_controller_factory)); + VP8Encoder::Settings settings; + settings.frame_buffer_controller_factory = + std::move(frame_buffer_controller_factory); + return std::make_unique(LibvpxInterface::CreateEncoder(), + std::move(settings)); } vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags( @@ -260,42 +226,19 @@ vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags( return flags; } -LibvpxVp8Encoder::LibvpxVp8Encoder() - : LibvpxVp8Encoder(nullptr, LibvpxInterface::CreateEncoder()) {} - -LibvpxVp8Encoder::LibvpxVp8Encoder( - std::unique_ptr - frame_buffer_controller_factory) - : LibvpxVp8Encoder(std::move(frame_buffer_controller_factory), - LibvpxInterface::CreateEncoder()) {} - -LibvpxVp8Encoder::LibvpxVp8Encoder(std::unique_ptr interface) - : LibvpxVp8Encoder(nullptr, std::move(interface)) {} - -LibvpxVp8Encoder::LibvpxVp8Encoder( - std::unique_ptr - frame_buffer_controller_factory, - std::unique_ptr interface) +LibvpxVp8Encoder::LibvpxVp8Encoder(std::unique_ptr interface, + VP8Encoder::Settings settings) : libvpx_(std::move(interface)), - experimental_cpu_speed_config_arm_(CpuSpeedExperiment::GetConfigs()), rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), - screenshare_max_qp_( - ExperimentalScreenshareSettings::ParseFromFieldTrials().MaxQp()), - encoded_complete_callback_(nullptr), - inited_(false), - timestamp_(0), - qp_max_(56), // Setting for max quantizer. - cpu_speed_default_(-6), - number_of_cores_(0), - rc_max_intra_target_(0), + requested_resolution_alignment_override_( + GetRequestedResolutionAlignmentOverride()), frame_buffer_controller_factory_( - std::move(frame_buffer_controller_factory)), + std::move(settings.frame_buffer_controller_factory)), + resolution_bitrate_limits_(std::move(settings.resolution_bitrate_limits)), key_frame_request_(kMaxSimulcastStreams, false), variable_framerate_experiment_(ParseVariableFramerateConfig( "WebRTC-VP8VariableFramerateScreenshare")), - framerate_controller_(variable_framerate_experiment_.framerate_limit), - num_steady_state_frames_(0), - fec_controller_override_(nullptr) { + framerate_controller_(variable_framerate_experiment_.framerate_limit) { // TODO(eladalon/ilnik): These reservations might be wasting memory. // InitEncode() is resizing to the actual size, which might be smaller. raw_images_.reserve(kMaxSimulcastStreams); @@ -402,18 +345,12 @@ void LibvpxVp8Encoder::SetRates(const RateControlParameters& parameters) { UpdateVpxConfiguration(stream_idx); - if (rate_control_settings_.Vp8DynamicRateSettings()) { - // Tweak rate control settings based on available network headroom. - UpdateRateSettings( - &vpx_configs_[i], - GetRateSettings(parameters.bandwidth_allocation.bps() / - parameters.bitrate.get_sum_bps())); - } - vpx_codec_err_t err = libvpx_->codec_enc_config_set(&encoders_[i], &vpx_configs_[i]); if (err != VPX_CODEC_OK) { - RTC_LOG(LS_WARNING) << "Error configuring codec, error code: " << err; + RTC_LOG(LS_WARNING) << "Error configuring codec, error code: " << err + << ", details: " + << libvpx_->codec_error_detail(&encoders_[i]); } } } @@ -475,9 +412,25 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, if (settings.number_of_cores < 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { + + num_active_streams_ = 0; + for (int i = 0; i < inst->numberOfSimulcastStreams; ++i) { + if (inst->simulcastStream[i].active) { + ++num_active_streams_; + } + } + if (inst->numberOfSimulcastStreams == 0 && inst->active) { + num_active_streams_ = 1; + } + + if (inst->VP8().automaticResizeOn && num_active_streams_ > 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } + + // Use the previous pixel format to avoid extra image allocations. + vpx_img_fmt_t pixel_format = + raw_images_.empty() ? VPX_IMG_FMT_I420 : raw_images_[0].fmt; + int retVal = Release(); if (retVal < 0) { return retVal; @@ -535,9 +488,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, downsampling_factors_[number_of_streams - 1].num = 1; downsampling_factors_[number_of_streams - 1].den = 1; } - for (int i = 0; i < number_of_streams; ++i) { - encoded_images_[i]._completeFrame = true; - } + // populate encoder configuration with default values if (libvpx_->codec_enc_config_default(vpx_codec_vp8_cx(), &vpx_configs_[0], 0)) { @@ -580,9 +531,6 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, qp_max_ = std::max(rate_control_settings_.LibvpxVp8QpMax().value(), static_cast(vpx_configs_[0].rc_min_quantizer)); } - if (codec_.mode == VideoCodecMode::kScreensharing && screenshare_max_qp_) { - qp_max_ = *screenshare_max_qp_; - } vpx_configs_[0].rc_max_quantizer = qp_max_; vpx_configs_[0].rc_undershoot_pct = 100; vpx_configs_[0].rc_overshoot_pct = 15; @@ -634,8 +582,8 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, // Creating a wrapper to the image - setting image data to NULL. // Actual pointer will be set in encode. Setting align to 1, as it // is meaningless (no memory allocation is done here). - libvpx_->img_wrap(&raw_images_[0], VPX_IMG_FMT_I420, inst->width, - inst->height, 1, NULL); + libvpx_->img_wrap(&raw_images_[0], pixel_format, inst->width, inst->height, 1, + NULL); // Note the order we use is different from webm, we have lowest resolution // at position 0 and they have highest resolution at position 0. @@ -683,10 +631,9 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, // Setting alignment to 32 - as that ensures at least 16 for all // planes (32 for Y, 16 for U,V). Libvpx sets the requested stride for // the y plane, but only half of it to the u and v planes. - libvpx_->img_alloc(&raw_images_[i], VPX_IMG_FMT_I420, - inst->simulcastStream[stream_idx].width, - inst->simulcastStream[stream_idx].height, - kVp832ByteAlign); + libvpx_->img_alloc( + &raw_images_[i], pixel_format, inst->simulcastStream[stream_idx].width, + inst->simulcastStream[stream_idx].height, kVp832ByteAlign); SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx); vpx_configs_[i].rc_target_bitrate = stream_bitrates[stream_idx]; if (stream_bitrates[stream_idx] > 0) { @@ -713,14 +660,17 @@ int LibvpxVp8Encoder::GetCpuSpeed(int width, int height) { // On mobile platform, use a lower speed setting for lower resolutions for // CPUs with 4 or more cores. RTC_DCHECK_GT(number_of_cores_, 0); + if (experimental_cpu_speed_config_arm_ + .GetValue(width * height, number_of_cores_) + .has_value()) { + return experimental_cpu_speed_config_arm_ + .GetValue(width * height, number_of_cores_) + .value(); + } + if (number_of_cores_ <= 3) return -12; - if (experimental_cpu_speed_config_arm_) { - return CpuSpeedExperiment::GetValue(width * height, - *experimental_cpu_speed_config_arm_); - } - if (width * height <= 352 * 288) return -8; else if (width * height <= 640 * 480) @@ -880,7 +830,7 @@ size_t LibvpxVp8Encoder::SteadyStateSize(int sid, int tid) { const int encoder_id = encoders_.size() - 1 - sid; size_t bitrate_bps; float fps; - if (SimulcastUtility::IsConferenceModeScreenshare(codec_) || + if ((SimulcastUtility::IsConferenceModeScreenshare(codec_) && sid == 0) || vpx_configs_[encoder_id].ts_number_layers <= 1) { // In conference screenshare there's no defined per temporal layer bitrate // and framerate. @@ -995,26 +945,31 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, flags[i] = send_key_frame ? VPX_EFLAG_FORCE_KF : EncodeFlags(tl_configs[i]); } - rtc::scoped_refptr input_image = - frame.video_frame_buffer()->ToI420(); + rtc::scoped_refptr input_image = frame.video_frame_buffer(); // Since we are extracting raw pointers from |input_image| to // |raw_images_[0]|, the resolution of these frames must match. RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w); RTC_DCHECK_EQ(input_image->height(), raw_images_[0].d_h); - - // Image in vpx_image_t format. - // Input image is const. VP8's raw image is not defined as const. - raw_images_[0].planes[VPX_PLANE_Y] = - const_cast(input_image->DataY()); - raw_images_[0].planes[VPX_PLANE_U] = - const_cast(input_image->DataU()); - raw_images_[0].planes[VPX_PLANE_V] = - const_cast(input_image->DataV()); - - raw_images_[0].stride[VPX_PLANE_Y] = input_image->StrideY(); - raw_images_[0].stride[VPX_PLANE_U] = input_image->StrideU(); - raw_images_[0].stride[VPX_PLANE_V] = input_image->StrideV(); - + switch (input_image->type()) { + case VideoFrameBuffer::Type::kI420: + PrepareI420Image(input_image->GetI420()); + break; + case VideoFrameBuffer::Type::kNV12: + PrepareNV12Image(input_image->GetNV12()); + break; + default: { + rtc::scoped_refptr i420_image = + input_image->ToI420(); + if (!i420_image) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString(input_image->type()) + << " image to I420. Can't encode frame."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + input_image = i420_image; + PrepareI420Image(i420_image); + } + } struct CleanUpOnExit { explicit CleanUpOnExit(vpx_image_t& raw_image) : raw_image_(raw_image) {} ~CleanUpOnExit() { @@ -1025,22 +980,6 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, vpx_image_t& raw_image_; } clean_up_on_exit(raw_images_[0]); - for (size_t i = 1; i < encoders_.size(); ++i) { - // Scale the image down a number of times by downsampling factor - libyuv::I420Scale( - raw_images_[i - 1].planes[VPX_PLANE_Y], - raw_images_[i - 1].stride[VPX_PLANE_Y], - raw_images_[i - 1].planes[VPX_PLANE_U], - raw_images_[i - 1].stride[VPX_PLANE_U], - raw_images_[i - 1].planes[VPX_PLANE_V], - raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, - raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], - raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], - raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], - raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, - raw_images_[i].d_h, libyuv::kFilterBilinear); - } - if (send_key_frame) { // Adapt the size of the key frame when in screenshare with 1 temporal // layer. @@ -1199,7 +1138,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, &qp_128); encoded_images_[encoder_idx].qp_ = qp_128; encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx], - &codec_specific, nullptr); + &codec_specific); const size_t steady_state_size = SteadyStateSize( stream_idx, codec_specific.codecSpecific.VP8.temporalIdx); if (qp_128 > variable_framerate_experiment_.steady_state_qp || @@ -1231,10 +1170,19 @@ VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const { info.is_hardware_accelerated = false; info.has_internal_source = false; info.supports_simulcast = true; + if (!resolution_bitrate_limits_.empty()) { + info.resolution_bitrate_limits = resolution_bitrate_limits_; + } + if (requested_resolution_alignment_override_) { + info.requested_resolution_alignment = + *requested_resolution_alignment_override_; + } + + const bool enable_scaling = + num_active_streams_ == 1 && + (vpx_configs_.empty() || vpx_configs_[0].rc_dropframe_thresh > 0) && + codec_.VP8().automaticResizeOn; - const bool enable_scaling = encoders_.size() == 1 && - vpx_configs_[0].rc_dropframe_thresh > 0 && - codec_.VP8().automaticResizeOn; info.scaling_settings = enable_scaling ? VideoEncoder::ScalingSettings( kLowVp8QpThreshold, kHighVp8QpThreshold) @@ -1243,28 +1191,33 @@ VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const { info.scaling_settings.min_pixels_per_frame = rate_control_settings_.LibvpxVp8MinPixels().value(); } - // |encoder_idx| is libvpx index where 0 is highest resolution. - // |si| is simulcast index, where 0 is lowest resolution. - for (size_t si = 0, encoder_idx = encoders_.size() - 1; si < encoders_.size(); - ++si, --encoder_idx) { - info.fps_allocation[si].clear(); - if ((codec_.numberOfSimulcastStreams > si && - !codec_.simulcastStream[si].active) || - (si == 0 && SimulcastUtility::IsConferenceModeScreenshare(codec_))) { - // No defined frame rate fractions if not active or if using - // ScreenshareLayers, leave vector empty and continue; - continue; - } - if (vpx_configs_[encoder_idx].ts_number_layers <= 1) { - info.fps_allocation[si].push_back(EncoderInfo::kMaxFramerateFraction); - } else { - for (size_t ti = 0; ti < vpx_configs_[encoder_idx].ts_number_layers; - ++ti) { - RTC_DCHECK_GT(vpx_configs_[encoder_idx].ts_rate_decimator[ti], 0); - info.fps_allocation[si].push_back(rtc::saturated_cast( - EncoderInfo::kMaxFramerateFraction / - vpx_configs_[encoder_idx].ts_rate_decimator[ti] + - 0.5)); + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; + + if (inited_) { + // |encoder_idx| is libvpx index where 0 is highest resolution. + // |si| is simulcast index, where 0 is lowest resolution. + for (size_t si = 0, encoder_idx = encoders_.size() - 1; + si < encoders_.size(); ++si, --encoder_idx) { + info.fps_allocation[si].clear(); + if ((codec_.numberOfSimulcastStreams > si && + !codec_.simulcastStream[si].active) || + (si == 0 && SimulcastUtility::IsConferenceModeScreenshare(codec_))) { + // No defined frame rate fractions if not active or if using + // ScreenshareLayers, leave vector empty and continue; + continue; + } + if (vpx_configs_[encoder_idx].ts_number_layers <= 1) { + info.fps_allocation[si].push_back(EncoderInfo::kMaxFramerateFraction); + } else { + for (size_t ti = 0; ti < vpx_configs_[encoder_idx].ts_number_layers; + ++ti) { + RTC_DCHECK_GT(vpx_configs_[encoder_idx].ts_rate_decimator[ti], 0); + info.fps_allocation[si].push_back(rtc::saturated_cast( + EncoderInfo::kMaxFramerateFraction / + vpx_configs_[encoder_idx].ts_rate_decimator[ti] + + 0.5)); + } } } } @@ -1278,17 +1231,99 @@ int LibvpxVp8Encoder::RegisterEncodeCompleteCallback( return WEBRTC_VIDEO_CODEC_OK; } +void LibvpxVp8Encoder::MaybeUpdatePixelFormat(vpx_img_fmt fmt) { + RTC_DCHECK(!raw_images_.empty()); + if (raw_images_[0].fmt == fmt) { + RTC_DCHECK(std::all_of( + std::next(raw_images_.begin()), raw_images_.end(), + [fmt](const vpx_image_t& raw_img) { return raw_img.fmt == fmt; })) + << "Not all raw images had the right format!"; + return; + } + RTC_LOG(INFO) << "Updating vp8 encoder pixel format to " + << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420"); + for (size_t i = 0; i < raw_images_.size(); ++i) { + vpx_image_t& img = raw_images_[i]; + auto d_w = img.d_w; + auto d_h = img.d_h; + libvpx_->img_free(&img); + // First image is wrapping the input frame, the rest are allocated. + if (i == 0) { + libvpx_->img_wrap(&img, fmt, d_w, d_h, 1, NULL); + } else { + libvpx_->img_alloc(&img, fmt, d_w, d_h, kVp832ByteAlign); + } + } +} + +void LibvpxVp8Encoder::PrepareI420Image(const I420BufferInterface* frame) { + RTC_DCHECK(!raw_images_.empty()); + MaybeUpdatePixelFormat(VPX_IMG_FMT_I420); + // Image in vpx_image_t format. + // Input image is const. VP8's raw image is not defined as const. + raw_images_[0].planes[VPX_PLANE_Y] = const_cast(frame->DataY()); + raw_images_[0].planes[VPX_PLANE_U] = const_cast(frame->DataU()); + raw_images_[0].planes[VPX_PLANE_V] = const_cast(frame->DataV()); + + raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY(); + raw_images_[0].stride[VPX_PLANE_U] = frame->StrideU(); + raw_images_[0].stride[VPX_PLANE_V] = frame->StrideV(); + + for (size_t i = 1; i < encoders_.size(); ++i) { + // Scale the image down a number of times by downsampling factor + libyuv::I420Scale( + raw_images_[i - 1].planes[VPX_PLANE_Y], + raw_images_[i - 1].stride[VPX_PLANE_Y], + raw_images_[i - 1].planes[VPX_PLANE_U], + raw_images_[i - 1].stride[VPX_PLANE_U], + raw_images_[i - 1].planes[VPX_PLANE_V], + raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, + raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], + raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], + raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], + raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, + raw_images_[i].d_h, libyuv::kFilterBilinear); + } +} + +void LibvpxVp8Encoder::PrepareNV12Image(const NV12BufferInterface* frame) { + RTC_DCHECK(!raw_images_.empty()); + MaybeUpdatePixelFormat(VPX_IMG_FMT_NV12); + // Image in vpx_image_t format. + // Input image is const. VP8's raw image is not defined as const. + raw_images_[0].planes[VPX_PLANE_Y] = const_cast(frame->DataY()); + raw_images_[0].planes[VPX_PLANE_U] = const_cast(frame->DataUV()); + raw_images_[0].planes[VPX_PLANE_V] = raw_images_[0].planes[VPX_PLANE_U] + 1; + raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY(); + raw_images_[0].stride[VPX_PLANE_U] = frame->StrideUV(); + raw_images_[0].stride[VPX_PLANE_V] = frame->StrideUV(); + + for (size_t i = 1; i < encoders_.size(); ++i) { + // Scale the image down a number of times by downsampling factor + libyuv::NV12Scale( + raw_images_[i - 1].planes[VPX_PLANE_Y], + raw_images_[i - 1].stride[VPX_PLANE_Y], + raw_images_[i - 1].planes[VPX_PLANE_U], + raw_images_[i - 1].stride[VPX_PLANE_U], raw_images_[i - 1].d_w, + raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], + raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], + raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].d_w, + raw_images_[i].d_h, libyuv::kFilterBilinear); + raw_images_[i].planes[VPX_PLANE_V] = raw_images_[i].planes[VPX_PLANE_U] + 1; + } +} + // static LibvpxVp8Encoder::VariableFramerateExperiment LibvpxVp8Encoder::ParseVariableFramerateConfig(std::string group_name) { - FieldTrialFlag enabled = FieldTrialFlag("Enabled"); + FieldTrialFlag disabled = FieldTrialFlag("Disabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); FieldTrialParameter qp("min_qp", 15); FieldTrialParameter undershoot_percentage("undershoot", 30); - ParseFieldTrial({&enabled, &framerate_limit, &qp, &undershoot_percentage}, + ParseFieldTrial({&disabled, &framerate_limit, &qp, &undershoot_percentage}, field_trial::FindFullName(group_name)); VariableFramerateExperiment config; - config.enabled = enabled.Get(); + config.enabled = !disabled.Get(); config.framerate_limit = framerate_limit.Get(); config.steady_state_qp = qp.Get(); config.steady_state_undershoot_percentage = undershoot_percentage.Get(); diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index 675d386456..c08b9b0883 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -34,13 +34,8 @@ namespace webrtc { class LibvpxVp8Encoder : public VideoEncoder { public: - LibvpxVp8Encoder(); - explicit LibvpxVp8Encoder(std::unique_ptr - frame_buffer_controller_factory); - explicit LibvpxVp8Encoder(std::unique_ptr interface); - LibvpxVp8Encoder(std::unique_ptr - frame_buffer_controller_factory, - std::unique_ptr interface); + LibvpxVp8Encoder(std::unique_ptr interface, + VP8Encoder::Settings settings); ~LibvpxVp8Encoder() override; int Release() override; @@ -98,24 +93,32 @@ class LibvpxVp8Encoder : public VideoEncoder { bool UpdateVpxConfiguration(size_t stream_index); + void MaybeUpdatePixelFormat(vpx_img_fmt fmt); + void PrepareI420Image(const I420BufferInterface* frame); + void PrepareNV12Image(const NV12BufferInterface* frame); + const std::unique_ptr libvpx_; - const absl::optional> - experimental_cpu_speed_config_arm_; + const CpuSpeedExperiment experimental_cpu_speed_config_arm_; const RateControlSettings rate_control_settings_; - const absl::optional screenshare_max_qp_; - EncodedImageCallback* encoded_complete_callback_; + // EncoderInfo::requested_resolution_alignment override from field trial. + const absl::optional requested_resolution_alignment_override_; + + EncodedImageCallback* encoded_complete_callback_ = nullptr; VideoCodec codec_; - bool inited_; - int64_t timestamp_; - int qp_max_; - int cpu_speed_default_; - int number_of_cores_; - uint32_t rc_max_intra_target_; + bool inited_ = false; + int64_t timestamp_ = 0; + int qp_max_ = 56; + int cpu_speed_default_ = -6; + int number_of_cores_ = 0; + uint32_t rc_max_intra_target_ = 0; + int num_active_streams_ = 0; const std::unique_ptr frame_buffer_controller_factory_; std::unique_ptr frame_buffer_controller_; + const std::vector + resolution_bitrate_limits_; std::vector key_frame_request_; std::vector send_stream_; std::vector cpu_speed_; @@ -140,9 +143,9 @@ class LibvpxVp8Encoder : public VideoEncoder { static VariableFramerateExperiment ParseVariableFramerateConfig( std::string group_name); FramerateController framerate_controller_; - int num_steady_state_frames_; + int num_steady_state_frames_ = 0; - FecControllerOverride* fec_controller_override_; + FecControllerOverride* fec_controller_override_ = nullptr; }; } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.cc b/modules/video_coding/codecs/vp8/screenshare_layers.cc index b5b963e2a9..caccb4246c 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers.cc +++ b/modules/video_coding/codecs/vp8/screenshare_layers.cc @@ -36,6 +36,7 @@ constexpr int kMinTimeBetweenSyncs = kOneSecond90Khz * 2; constexpr int kMaxTimeBetweenSyncs = kOneSecond90Khz * 4; constexpr int kQpDeltaThresholdForSync = 8; constexpr int kMinBitrateKbpsForQpBoost = 500; +constexpr auto kSwitch = DecodeTargetIndication::kSwitch; } // namespace const double ScreenshareLayers::kMaxTL0FpsReduction = 2.5; @@ -319,8 +320,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index, if (number_of_temporal_layers_ == 1) { vp8_info.temporalIdx = kNoTemporalIdx; vp8_info.layerSync = false; - generic_frame_info.decode_target_indications = - GenericFrameInfo::DecodeTargetInfo("S"); + generic_frame_info.decode_target_indications = {kSwitch}; generic_frame_info.encoder_buffers.emplace_back( 0, /*referenced=*/!is_keyframe, /*updated=*/true); } else { @@ -333,8 +333,6 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index, dependency_info->decode_target_indications; } else { RTC_DCHECK(is_keyframe); - generic_frame_info.decode_target_indications = - GenericFrameInfo::DecodeTargetInfo("SS"); } if (is_keyframe) { @@ -346,6 +344,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index, active_layer_ = 1; info->template_structure = GetTemplateStructure(number_of_temporal_layers_); + generic_frame_info.decode_target_indications = {kSwitch, kSwitch}; } else if (active_layer_ >= 0 && layers_[active_layer_].state == TemporalLayer::State::kKeyFrame) { layers_[active_layer_].state = TemporalLayer::State::kNormal; @@ -429,21 +428,18 @@ FrameDependencyStructure ScreenshareLayers::GetTemplateStructure( FrameDependencyStructure template_structure; template_structure.num_decode_targets = num_layers; - using Builder = GenericFrameInfo::Builder; switch (num_layers) { case 1: { - template_structure.templates = { - Builder().T(0).Dtis("S").Build(), - Builder().T(0).Dtis("S").Fdiffs({1}).Build(), - }; + template_structure.templates.resize(2); + template_structure.templates[0].T(0).Dtis("S"); + template_structure.templates[1].T(0).Dtis("S").FrameDiffs({1}); return template_structure; } case 2: { - template_structure.templates = { - Builder().T(0).Dtis("SS").Build(), - Builder().T(0).Dtis("SS").Fdiffs({1}).Build(), - Builder().T(1).Dtis("-S").Fdiffs({1}).Build(), - }; + template_structure.templates.resize(3); + template_structure.templates[0].T(0).Dtis("SS"); + template_structure.templates[1].T(0).Dtis("SS").FrameDiffs({1}); + template_structure.templates[2].T(1).Dtis("-S").FrameDiffs({1}); return template_structure; } default: diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.h b/modules/video_coding/codecs/vp8/screenshare_layers.h index 5270ffe81c..39477f12f1 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers.h +++ b/modules/video_coding/codecs/vp8/screenshare_layers.h @@ -78,7 +78,7 @@ class ScreenshareLayers final : public Vp8FrameBufferController { DependencyInfo(absl::string_view indication_symbols, Vp8FrameConfig frame_config) : decode_target_indications( - GenericFrameInfo::DecodeTargetInfo(indication_symbols)), + webrtc_impl::StringToDecodeTargetIndications(indication_symbols)), frame_config(frame_config) {} absl::InlinedVector decode_target_indications; diff --git a/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc b/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc index 9ef29dbc75..88ef9b8c14 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc +++ b/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc @@ -113,7 +113,7 @@ class ScreenshareLayerTest : public ::testing::Test { Vp8FrameConfig NextFrameConfig(size_t stream_index, uint32_t timestamp) { int64_t timestamp_ms = timestamp / 90; - clock_.AdvanceTime(TimeDelta::ms(timestamp_ms - rtc::TimeMillis())); + clock_.AdvanceTime(TimeDelta::Millis(timestamp_ms - rtc::TimeMillis())); return layers_->NextFrameConfig(stream_index, timestamp); } @@ -563,7 +563,7 @@ TEST_F(ScreenshareLayerTest, UpdatesHistograms) { } else { RTC_NOTREACHED() << "Unexpected flags"; } - clock_.AdvanceTime(TimeDelta::ms(1000 / 5)); + clock_.AdvanceTime(TimeDelta::Millis(1000 / 5)); } EXPECT_TRUE(overshoot); @@ -626,7 +626,7 @@ TEST_F(ScreenshareLayerTest, RespectsConfiguredFramerate) { IgnoredCodecSpecificInfo()); } timestamp += kFrameIntervalsMs * 90; - clock_.AdvanceTime(TimeDelta::ms(kFrameIntervalsMs)); + clock_.AdvanceTime(TimeDelta::Millis(kFrameIntervalsMs)); ++num_input_frames; } @@ -644,7 +644,7 @@ TEST_F(ScreenshareLayerTest, RespectsConfiguredFramerate) { IgnoredCodecSpecificInfo()); } timestamp += kFrameIntervalsMs * 90 / 2; - clock_.AdvanceTime(TimeDelta::ms(kFrameIntervalsMs)); + clock_.AdvanceTime(TimeDelta::Millis(kFrameIntervalsMs)); ++num_input_frames; } diff --git a/modules/video_coding/codecs/vp8/temporal_layers_checker.cc b/modules/video_coding/codecs/vp8/temporal_layers_checker.cc index 53a68bd5e8..5aebd2c526 100644 --- a/modules/video_coding/codecs/vp8/temporal_layers_checker.cc +++ b/modules/video_coding/codecs/vp8/temporal_layers_checker.cc @@ -29,6 +29,7 @@ TemporalLayersChecker::CreateTemporalLayersChecker(Vp8TemporalLayersType type, // Conference mode temporal layering for screen content in base stream. return std::make_unique(num_temporal_layers); } + RTC_CHECK_NOTREACHED(); } TemporalLayersChecker::TemporalLayersChecker(int num_temporal_layers) diff --git a/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h b/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h index dcff1e6a18..697b44b9d5 100644 --- a/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h +++ b/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h @@ -19,74 +19,90 @@ namespace webrtc { class MockLibvpxVp8Interface : public LibvpxInterface { public: - MOCK_CONST_METHOD5(img_alloc, - vpx_image_t*(vpx_image_t*, - vpx_img_fmt_t, - unsigned int, - unsigned int, - unsigned int)); - MOCK_CONST_METHOD6(img_wrap, - vpx_image_t*(vpx_image_t*, - vpx_img_fmt_t, - unsigned int, - unsigned int, - unsigned int, - unsigned char*)); - MOCK_CONST_METHOD1(img_free, void(vpx_image_t* img)); - MOCK_CONST_METHOD2(codec_enc_config_set, - vpx_codec_err_t(vpx_codec_ctx_t*, - const vpx_codec_enc_cfg_t*)); - MOCK_CONST_METHOD3(codec_enc_config_default, - vpx_codec_err_t(vpx_codec_iface_t*, - vpx_codec_enc_cfg_t*, - unsigned int)); - MOCK_CONST_METHOD4(codec_enc_init, - vpx_codec_err_t(vpx_codec_ctx_t*, - vpx_codec_iface_t*, - const vpx_codec_enc_cfg_t*, - vpx_codec_flags_t)); - MOCK_CONST_METHOD6(codec_enc_init_multi, - vpx_codec_err_t(vpx_codec_ctx_t*, - vpx_codec_iface_t*, - vpx_codec_enc_cfg_t*, - int, - vpx_codec_flags_t, - vpx_rational_t*)); - MOCK_CONST_METHOD1(codec_destroy, vpx_codec_err_t(vpx_codec_ctx_t*)); - MOCK_CONST_METHOD3(codec_control, - vpx_codec_err_t(vpx_codec_ctx_t*, - vp8e_enc_control_id, - uint32_t)); - MOCK_CONST_METHOD3(codec_control, - vpx_codec_err_t(vpx_codec_ctx_t*, - vp8e_enc_control_id, - int)); - MOCK_CONST_METHOD3(codec_control, - vpx_codec_err_t(vpx_codec_ctx_t*, - vp8e_enc_control_id, - int*)); - MOCK_CONST_METHOD3(codec_control, - vpx_codec_err_t(vpx_codec_ctx_t*, - vp8e_enc_control_id, - vpx_roi_map*)); - MOCK_CONST_METHOD3(codec_control, - vpx_codec_err_t(vpx_codec_ctx_t*, - vp8e_enc_control_id, - vpx_active_map*)); - MOCK_CONST_METHOD3(codec_control, - vpx_codec_err_t(vpx_codec_ctx_t*, - vp8e_enc_control_id, - vpx_scaling_mode*)); - MOCK_CONST_METHOD6(codec_encode, - vpx_codec_err_t(vpx_codec_ctx_t*, - const vpx_image_t*, - vpx_codec_pts_t, - uint64_t, - vpx_enc_frame_flags_t, - uint64_t)); - MOCK_CONST_METHOD2(codec_get_cx_data, - const vpx_codec_cx_pkt_t*(vpx_codec_ctx_t*, - vpx_codec_iter_t*)); + MOCK_METHOD( + vpx_image_t*, + img_alloc, + (vpx_image_t*, vpx_img_fmt_t, unsigned int, unsigned int, unsigned int), + (const, override)); + MOCK_METHOD(vpx_image_t*, + img_wrap, + (vpx_image_t*, + vpx_img_fmt_t, + unsigned int, + unsigned int, + unsigned int, + unsigned char*), + (const, override)); + MOCK_METHOD(void, img_free, (vpx_image_t * img), (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_config_set, + (vpx_codec_ctx_t*, const vpx_codec_enc_cfg_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_config_default, + (vpx_codec_iface_t*, vpx_codec_enc_cfg_t*, unsigned int), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_init, + (vpx_codec_ctx_t*, + vpx_codec_iface_t*, + const vpx_codec_enc_cfg_t*, + vpx_codec_flags_t), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_init_multi, + (vpx_codec_ctx_t*, + vpx_codec_iface_t*, + vpx_codec_enc_cfg_t*, + int, + vpx_codec_flags_t, + vpx_rational_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_destroy, + (vpx_codec_ctx_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, uint32_t), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, int), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, int*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_roi_map*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_active_map*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_scaling_mode*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_encode, + (vpx_codec_ctx_t*, + const vpx_image_t*, + vpx_codec_pts_t, + uint64_t, + vpx_enc_frame_flags_t, + uint64_t), + (const, override)); + MOCK_METHOD(const vpx_codec_cx_pkt_t*, + codec_get_cx_data, + (vpx_codec_ctx_t*, vpx_codec_iter_t*), + (const, override)); + MOCK_METHOD(const char*, + codec_error_detail, + (vpx_codec_ctx_t*), + (const, override)); }; } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index a597dc40b9..f22b9179d2 100644 --- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -94,10 +94,6 @@ class TestVp8Impl : public VideoCodecUnitTest { encoder_->Encode(input_frame, &frame_types)); ASSERT_TRUE(WaitForEncodedFrame(encoded_frame, codec_specific_info)); VerifyQpParser(*encoded_frame); - VideoEncoder::EncoderInfo encoder_info = encoder_->GetEncoderInfo(); - EXPECT_EQ("libvpx", encoder_info.implementation_name); - EXPECT_EQ(false, encoder_info.is_hardware_accelerated); - EXPECT_EQ(false, encoder_info.has_internal_source); EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType); EXPECT_EQ(0, encoded_frame->SpatialIndex()); } @@ -124,7 +120,8 @@ TEST_F(TestVp8Impl, ErrorResilienceDisabledForNoTemporalLayers) { codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx))); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); EXPECT_CALL(*vpx, codec_enc_init( _, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, 0), _)); @@ -137,7 +134,8 @@ TEST_F(TestVp8Impl, DefaultErrorResilienceEnabledForTemporalLayers) { codec_settings_.VP8()->numberOfTemporalLayers = 2; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx))); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); EXPECT_CALL(*vpx, codec_enc_init(_, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, @@ -155,7 +153,8 @@ TEST_F(TestVp8Impl, codec_settings_.VP8()->numberOfTemporalLayers = 2; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx))); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); EXPECT_CALL(*vpx, codec_enc_init(_, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, @@ -167,7 +166,8 @@ TEST_F(TestVp8Impl, TEST_F(TestVp8Impl, SetRates) { auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx))); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder.InitEncode(&codec_settings_, VideoEncoder::Settings(kCapabilities, 1, 1000))); @@ -190,73 +190,51 @@ TEST_F(TestVp8Impl, SetRates) { bitrate_allocation, static_cast(codec_settings_.maxFramerate))); } -TEST_F(TestVp8Impl, DynamicSetRates) { - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/vp8_dynamic_rate:true/"); - auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx))); +TEST_F(TestVp8Impl, EncodeFrameAndRelease) { + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder.InitEncode(&codec_settings_, - VideoEncoder::Settings(kCapabilities, 1, 1000))); + encoder_->InitEncode(&codec_settings_, kSettings)); - const uint32_t kBitrateBps = 300000; - VideoEncoder::RateControlParameters rate_settings; - rate_settings.bitrate.SetBitrate(0, 0, kBitrateBps); - rate_settings.framerate_fps = - static_cast(codec_settings_.maxFramerate); + EncodedImage encoded_frame; + CodecSpecificInfo codec_specific_info; + EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); - // Set rates with no headroom. - rate_settings.bandwidth_allocation = DataRate::bps(kBitrateBps); - EXPECT_CALL( - *vpx, - codec_enc_config_set( - _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate, - kBitrateBps / 1000), - Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 1000u), - Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 0u), - Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 100u), - Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 30u), - Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 40u)))) - .WillOnce(Return(VPX_CODEC_OK)); - encoder.SetRates(rate_settings); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED, + encoder_->Encode(NextInputFrame(), nullptr)); +} - // Set rates with max headroom. - rate_settings.bandwidth_allocation = DataRate::bps(kBitrateBps * 2); - EXPECT_CALL( - *vpx, codec_enc_config_set( - _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate, - kBitrateBps / 1000), - Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 100u), - Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 15u), - Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 1000u), - Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 600u), - Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 5u)))) - .WillOnce(Return(VPX_CODEC_OK)); - encoder.SetRates(rate_settings); +TEST_F(TestVp8Impl, EncodeNv12FrameSimulcast) { + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); - // Set rates with headroom half way. - rate_settings.bandwidth_allocation = DataRate::bps((3 * kBitrateBps) / 2); - EXPECT_CALL( - *vpx, - codec_enc_config_set( - _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate, - kBitrateBps / 1000), - Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 550u), - Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 8u), - Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 550u), - Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 315u), - Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 23u)))) - .WillOnce(Return(VPX_CODEC_OK)); - encoder.SetRates(rate_settings); + EncodedImage encoded_frame; + CodecSpecificInfo codec_specific_info; + input_frame_generator_ = test::CreateSquareFrameGenerator( + kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12, + absl::nullopt); + EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED, + encoder_->Encode(NextInputFrame(), nullptr)); } -TEST_F(TestVp8Impl, EncodeFrameAndRelease) { +TEST_F(TestVp8Impl, EncodeI420FrameAfterNv12Frame) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; + input_frame_generator_ = test::CreateSquareFrameGenerator( + kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12, + absl::nullopt); + EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); + input_frame_generator_ = test::CreateSquareFrameGenerator( + kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420, + absl::nullopt); EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); @@ -398,36 +376,6 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp()); } -#if defined(WEBRTC_ANDROID) -#define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame -#else -#define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame -#endif -TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) { - VideoFrame input_frame = NextInputFrame(); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info); - - // Setting complete to false -> should return an error. - encoded_frame._completeFrame = false; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, - decoder_->Decode(encoded_frame, false, -1)); - // Setting complete back to true. Forcing a delta frame. - encoded_frame._frameType = VideoFrameType::kVideoFrameDelta; - encoded_frame._completeFrame = true; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR, - decoder_->Decode(encoded_frame, false, -1)); - // Now setting a key frame. - encoded_frame._frameType = VideoFrameType::kVideoFrameKey; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1)); - std::unique_ptr decoded_frame; - absl::optional decoded_qp; - ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); - ASSERT_TRUE(decoded_frame); - EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36); -} - TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) { codec_settings_.VP8()->numberOfTemporalLayers = 2; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -513,13 +461,15 @@ TEST_F(TestVp8Impl, DontDropKeyframes) { TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx))); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); // Settings needed to trigger ScreenshareLayers usage, which is required for // overshoot-drop-reencode logic. codec_settings_.maxBitrate = 1000; codec_settings_.mode = VideoCodecMode::kScreensharing; codec_settings_.VP8()->numberOfTemporalLayers = 2; + codec_settings_.legacy_conference_mode = true; EXPECT_CALL(*vpx, img_wrap(_, _, _, _, _, _)) .WillOnce(Invoke([](vpx_image_t* img, vpx_img_fmt_t fmt, unsigned int d_w, @@ -549,6 +499,67 @@ TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { encoder.Encode(NextInputFrame(), &delta_frame); } +TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) { + auto* const vpx = new NiceMock(); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); + + const auto info = encoder.GetEncoderInfo(); + + EXPECT_FALSE(info.supports_native_handle); + EXPECT_FALSE(info.is_hardware_accelerated); + EXPECT_FALSE(info.has_internal_source); + EXPECT_TRUE(info.supports_simulcast); + EXPECT_EQ(info.implementation_name, "libvpx"); + EXPECT_EQ(info.requested_resolution_alignment, 1); + EXPECT_THAT(info.preferred_pixel_formats, + testing::UnorderedElementsAre(VideoFrameBuffer::Type::kNV12, + VideoFrameBuffer::Type::kI420)); +} + +TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { + test::ScopedFieldTrials field_trials( + "WebRTC-VP8-GetEncoderInfoOverride/" + "requested_resolution_alignment:10/"); + + auto* const vpx = new NiceMock(); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); + + EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10); +} + +TEST(LibvpxVp8EncoderTest, + GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault) { + auto* const vpx = new NiceMock(); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + VP8Encoder::Settings()); + + const auto info = encoder.GetEncoderInfo(); + + EXPECT_TRUE(info.resolution_bitrate_limits.empty()); +} + +TEST(LibvpxVp8EncoderTest, + GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured) { + std::vector resolution_bitrate_limits = + {VideoEncoder::ResolutionBitrateLimits(/*frame_size_pixels=*/640 * 360, + /*min_start_bitrate_bps=*/300, + /*min_bitrate_bps=*/100, + /*max_bitrate_bps=*/1000), + VideoEncoder::ResolutionBitrateLimits(320 * 180, 100, 30, 500)}; + VP8Encoder::Settings settings; + settings.resolution_bitrate_limits = resolution_bitrate_limits; + + auto* const vpx = new NiceMock(); + LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), + std::move(settings)); + + const auto info = encoder.GetEncoderInfo(); + + EXPECT_EQ(info.resolution_bitrate_limits, resolution_bitrate_limits); +} + TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationNoLayers) { FramerateFractions expected_fps_allocation[kMaxSpatialLayers] = { FramerateFractions(1, EncoderInfo::kMaxFramerateFraction)}; @@ -605,6 +616,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) { codec_settings_.simulcastStream[0].maxBitrate = kLegacyScreenshareTl1BitrateKbps; codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2; + codec_settings_.legacy_conference_mode = true; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); @@ -644,6 +656,28 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) { expected_fps_allocation[2] = expected_fps_allocation[0]; EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, ::testing::ElementsAreArray(expected_fps_allocation)); + + // Release encoder and re-init without temporal layers. + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); + + // Sanity check fps allocation when not inited. + FramerateFractions default_fps_fraction[kMaxSpatialLayers]; + default_fps_fraction[0].push_back(EncoderInfo::kMaxFramerateFraction); + EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, + ::testing::ElementsAreArray(default_fps_fraction)); + + for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) { + codec_settings_.simulcastStream[i].numberOfTemporalLayers = 1; + } + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); + + for (size_t i = 0; i < 3; ++i) { + expected_fps_allocation[i].clear(); + expected_fps_allocation[i].push_back(EncoderInfo::kMaxFramerateFraction); + } + EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, + ::testing::ElementsAreArray(expected_fps_allocation)); } } // namespace webrtc diff --git a/modules/video_coding/codecs/vp9/include/vp9.h b/modules/video_coding/codecs/vp9/include/vp9.h index 8091cacec9..7cf1c2ebd1 100644 --- a/modules/video_coding/codecs/vp9/include/vp9.h +++ b/modules/video_coding/codecs/vp9/include/vp9.h @@ -25,6 +25,10 @@ namespace webrtc { // negotiate in SDP, in order of preference. std::vector SupportedVP9Codecs(); +// Returns a vector with all supported internal VP9 decode profiles in order of +// preference. These will be availble for receive-only connections. +std::vector SupportedVP9DecoderCodecs(); + class VP9Encoder : public VideoEncoder { public: // Deprecated. Returns default implementation using VP9 Profile 0. diff --git a/modules/video_coding/codecs/vp9/include/vp9_globals.h b/modules/video_coding/codecs/vp9/include/vp9_globals.h index c6853127ac..6f9d09933f 100644 --- a/modules/video_coding/codecs/vp9/include/vp9_globals.h +++ b/modules/video_coding/codecs/vp9/include/vp9_globals.h @@ -30,8 +30,8 @@ const size_t kMaxVp9RefPics = 3; const size_t kMaxVp9FramesInGof = 0xFF; // 8 bits const size_t kMaxVp9NumberOfSpatialLayers = 8; -const size_t kMinVp9SpatialLayerWidth = 320; -const size_t kMinVp9SpatialLayerHeight = 180; +const size_t kMinVp9SpatialLayerWidth = 240; +const size_t kMinVp9SpatialLayerHeight = 135; enum TemporalStructureMode { kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP... diff --git a/modules/video_coding/codecs/vp9/svc_config.cc b/modules/video_coding/codecs/vp9/svc_config.cc index a3bf56d90c..cc7743ad25 100644 --- a/modules/video_coding/codecs/vp9/svc_config.cc +++ b/modules/video_coding/codecs/vp9/svc_config.cc @@ -16,6 +16,7 @@ #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" namespace webrtc { @@ -61,8 +62,10 @@ std::vector ConfigureSvcScreenSharing(size_t input_width, std::vector ConfigureSvcNormalVideo(size_t input_width, size_t input_height, float max_framerate_fps, + size_t first_active_layer, size_t num_spatial_layers, size_t num_temporal_layers) { + RTC_DCHECK_LT(first_active_layer, num_spatial_layers); std::vector spatial_layers; // Limit number of layers for given resolution. @@ -72,10 +75,25 @@ std::vector ConfigureSvcNormalVideo(size_t input_width, const size_t num_layers_fit_vert = static_cast( std::floor(1 + std::max(0.0f, std::log2(1.0f * input_height / kMinVp9SpatialLayerHeight)))); - num_spatial_layers = - std::min({num_spatial_layers, num_layers_fit_horz, num_layers_fit_vert}); + const size_t limited_num_spatial_layers = + std::min(num_layers_fit_horz, num_layers_fit_vert); + if (limited_num_spatial_layers < num_spatial_layers) { + RTC_LOG(LS_WARNING) << "Reducing number of spatial layers from " + << num_spatial_layers << " to " + << limited_num_spatial_layers + << " due to low input resolution."; + num_spatial_layers = limited_num_spatial_layers; + } + // First active layer must be configured. + num_spatial_layers = std::max(num_spatial_layers, first_active_layer + 1); - for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { + // Ensure top layer is even enough. + int required_divisiblity = 1 << (num_spatial_layers - first_active_layer - 1); + input_width = input_width - input_width % required_divisiblity; + input_height = input_height - input_height % required_divisiblity; + + for (size_t sl_idx = first_active_layer; sl_idx < num_spatial_layers; + ++sl_idx) { SpatialLayer spatial_layer = {0}; spatial_layer.width = input_width >> (num_spatial_layers - sl_idx - 1); spatial_layer.height = input_height >> (num_spatial_layers - sl_idx - 1); @@ -103,12 +121,26 @@ std::vector ConfigureSvcNormalVideo(size_t input_width, spatial_layers.push_back(spatial_layer); } + // A workaround for sitiation when single HD layer is left with minBitrate + // about 500kbps. This would mean that there will always be at least 500kbps + // allocated to video regardless of how low is the actual BWE. + // Also, boost maxBitrate for the first layer to account for lost ability to + // predict from previous layers. + if (first_active_layer > 0) { + spatial_layers[0].minBitrate = kMinVp9SvcBitrateKbps; + // TODO(ilnik): tune this value or come up with a different formula to + // ensure that all singlecast configurations look good and not too much + // bitrate is added. + spatial_layers[0].maxBitrate *= 1.1; + } + return spatial_layers; } std::vector GetSvcConfig(size_t input_width, size_t input_height, float max_framerate_fps, + size_t first_active_layer, size_t num_spatial_layers, size_t num_temporal_layers, bool is_screen_sharing) { @@ -122,7 +154,8 @@ std::vector GetSvcConfig(size_t input_width, max_framerate_fps, num_spatial_layers); } else { return ConfigureSvcNormalVideo(input_width, input_height, max_framerate_fps, - num_spatial_layers, num_temporal_layers); + first_active_layer, num_spatial_layers, + num_temporal_layers); } } diff --git a/modules/video_coding/codecs/vp9/svc_config.h b/modules/video_coding/codecs/vp9/svc_config.h index 6e9ae9b2e5..f6b562e189 100644 --- a/modules/video_coding/codecs/vp9/svc_config.h +++ b/modules/video_coding/codecs/vp9/svc_config.h @@ -14,13 +14,14 @@ #include -#include "common_types.h" // NOLINT(build/include) +#include "api/video_codecs/spatial_layer.h" namespace webrtc { std::vector GetSvcConfig(size_t input_width, size_t input_height, float max_framerate_fps, + size_t first_active_layer, size_t num_spatial_layers, size_t num_temporal_layers, bool is_screen_sharing); diff --git a/modules/video_coding/codecs/vp9/svc_config_unittest.cc b/modules/video_coding/codecs/vp9/svc_config_unittest.cc index bda6a5573c..1891628921 100644 --- a/modules/video_coding/codecs/vp9/svc_config_unittest.cc +++ b/modules/video_coding/codecs/vp9/svc_config_unittest.cc @@ -19,22 +19,74 @@ namespace webrtc { TEST(SvcConfig, NumSpatialLayers) { const size_t max_num_spatial_layers = 6; + const size_t first_active_layer = 0; const size_t num_spatial_layers = 2; std::vector spatial_layers = GetSvcConfig(kMinVp9SpatialLayerWidth << (num_spatial_layers - 1), kMinVp9SpatialLayerHeight << (num_spatial_layers - 1), 30, - max_num_spatial_layers, 1, false); + first_active_layer, max_num_spatial_layers, 1, false); EXPECT_EQ(spatial_layers.size(), num_spatial_layers); } +TEST(SvcConfig, AlwaysSendsAtLeastOneLayer) { + const size_t max_num_spatial_layers = 6; + const size_t first_active_layer = 5; + + std::vector spatial_layers = + GetSvcConfig(kMinVp9SpatialLayerWidth, kMinVp9SpatialLayerHeight, 30, + first_active_layer, max_num_spatial_layers, 1, false); + EXPECT_EQ(spatial_layers.size(), 1u); + EXPECT_EQ(spatial_layers.back().width, kMinVp9SpatialLayerWidth); +} + +TEST(SvcConfig, EnforcesMinimalRequiredParity) { + const size_t max_num_spatial_layers = 3; + const size_t kOddSize = 1023; + + std::vector spatial_layers = + GetSvcConfig(kOddSize, kOddSize, 30, + /*first_active_layer=*/1, max_num_spatial_layers, 1, false); + // Since there are 2 layers total (1, 2), divisiblity by 2 is required. + EXPECT_EQ(spatial_layers.back().width, kOddSize - 1); + EXPECT_EQ(spatial_layers.back().width, kOddSize - 1); + + spatial_layers = + GetSvcConfig(kOddSize, kOddSize, 30, + /*first_active_layer=*/0, max_num_spatial_layers, 1, false); + // Since there are 3 layers total (0, 1, 2), divisiblity by 4 is required. + EXPECT_EQ(spatial_layers.back().width, kOddSize - 3); + EXPECT_EQ(spatial_layers.back().width, kOddSize - 3); + + spatial_layers = + GetSvcConfig(kOddSize, kOddSize, 30, + /*first_active_layer=*/2, max_num_spatial_layers, 1, false); + // Since there is only 1 layer active (2), divisiblity by 1 is required. + EXPECT_EQ(spatial_layers.back().width, kOddSize); + EXPECT_EQ(spatial_layers.back().width, kOddSize); +} + +TEST(SvcConfig, SkipsInactiveLayers) { + const size_t num_spatial_layers = 4; + const size_t first_active_layer = 2; + + std::vector spatial_layers = + GetSvcConfig(kMinVp9SpatialLayerWidth << (num_spatial_layers - 1), + kMinVp9SpatialLayerHeight << (num_spatial_layers - 1), 30, + first_active_layer, num_spatial_layers, 1, false); + EXPECT_EQ(spatial_layers.size(), 2u); + EXPECT_EQ(spatial_layers.back().width, + kMinVp9SpatialLayerWidth << (num_spatial_layers - 1)); +} + TEST(SvcConfig, BitrateThresholds) { + const size_t first_active_layer = 0; const size_t num_spatial_layers = 3; std::vector spatial_layers = GetSvcConfig(kMinVp9SpatialLayerWidth << (num_spatial_layers - 1), kMinVp9SpatialLayerHeight << (num_spatial_layers - 1), 30, - num_spatial_layers, 1, false); + first_active_layer, num_spatial_layers, 1, false); EXPECT_EQ(spatial_layers.size(), num_spatial_layers); @@ -47,7 +99,7 @@ TEST(SvcConfig, BitrateThresholds) { TEST(SvcConfig, ScreenSharing) { std::vector spatial_layers = - GetSvcConfig(1920, 1080, 30, 3, 3, true); + GetSvcConfig(1920, 1080, 30, 1, 3, 3, true); EXPECT_EQ(spatial_layers.size(), 3UL); diff --git a/modules/video_coding/codecs/vp9/svc_rate_allocator.h b/modules/video_coding/codecs/vp9/svc_rate_allocator.h index a4e0c28cc0..fa53a155ab 100644 --- a/modules/video_coding/codecs/vp9/svc_rate_allocator.h +++ b/modules/video_coding/codecs/vp9/svc_rate_allocator.h @@ -11,52 +11,7 @@ #ifndef MODULES_VIDEO_CODING_CODECS_VP9_SVC_RATE_ALLOCATOR_H_ #define MODULES_VIDEO_CODING_CODECS_VP9_SVC_RATE_ALLOCATOR_H_ -#include -#include - -#include "absl/container/inlined_vector.h" -#include "api/video/video_bitrate_allocation.h" -#include "api/video/video_bitrate_allocator.h" -#include "api/video/video_codec_constants.h" -#include "api/video_codecs/video_codec.h" -#include "rtc_base/experiments/stable_target_rate_experiment.h" - -namespace webrtc { - -class SvcRateAllocator : public VideoBitrateAllocator { - public: - explicit SvcRateAllocator(const VideoCodec& codec); - - VideoBitrateAllocation Allocate( - VideoBitrateAllocationParameters parameters) override; - - static DataRate GetMaxBitrate(const VideoCodec& codec); - static DataRate GetPaddingBitrate(const VideoCodec& codec); - static absl::InlinedVector GetLayerStartBitrates( - const VideoCodec& codec); - - private: - VideoBitrateAllocation GetAllocationNormalVideo( - DataRate total_bitrate, - size_t first_active_layer, - size_t num_spatial_layers) const; - - VideoBitrateAllocation GetAllocationScreenSharing( - DataRate total_bitrate, - size_t first_active_layer, - size_t num_spatial_layers) const; - - // Returns the number of layers that are active and have enough bitrate to - // actually be enabled. - size_t FindNumEnabledLayers(DataRate target_rate) const; - - const VideoCodec codec_; - const StableTargetRateExperiment experiment_settings_; - const absl::InlinedVector - cumulative_layer_start_bitrates_; - size_t last_active_layer_count_; -}; - -} // namespace webrtc +// TODO(danilchap): Update dependent includes and remove this forwarding header. +#include "modules/video_coding/svc/svc_rate_allocator.h" #endif // MODULES_VIDEO_CODING_CODECS_VP9_SVC_RATE_ALLOCATOR_H_ diff --git a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc index ed15ee0a2c..31401f801f 100644 --- a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc +++ b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc @@ -16,6 +16,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/vp9_profile.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" #include "modules/video_coding/codecs/test/video_codec_unittest.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/svc_config.h" @@ -25,20 +26,50 @@ #include "test/video_codec_settings.h" namespace webrtc { +namespace { +using ::testing::ElementsAre; using ::testing::ElementsAreArray; +using ::testing::IsEmpty; +using ::testing::SizeIs; +using ::testing::UnorderedElementsAreArray; using EncoderInfo = webrtc::VideoEncoder::EncoderInfo; using FramerateFractions = absl::InlinedVector; -namespace { -const size_t kWidth = 1280; -const size_t kHeight = 720; +constexpr size_t kWidth = 1280; +constexpr size_t kHeight = 720; const VideoEncoder::Capabilities kCapabilities(false); const VideoEncoder::Settings kSettings(kCapabilities, /*number_of_cores=*/1, /*max_payload_size=*/0); + +VideoCodec DefaultCodecSettings() { + VideoCodec codec_settings; + webrtc::test::CodecSettings(kVideoCodecVP9, &codec_settings); + codec_settings.width = kWidth; + codec_settings.height = kHeight; + codec_settings.VP9()->numberOfTemporalLayers = 1; + codec_settings.VP9()->numberOfSpatialLayers = 1; + return codec_settings; +} + +void ConfigureSvc(VideoCodec& codec_settings, + int num_spatial_layers, + int num_temporal_layers = 1) { + codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers; + codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers; + codec_settings.VP9()->frameDroppingOn = false; + + std::vector layers = GetSvcConfig( + codec_settings.width, codec_settings.height, codec_settings.maxFramerate, + /*first_active_layer=*/0, num_spatial_layers, num_temporal_layers, false); + for (size_t i = 0; i < layers.size(); ++i) { + codec_settings.spatialLayers[i] = layers[i]; + } +} + } // namespace class TestVp9Impl : public VideoCodecUnitTest { @@ -58,75 +89,25 @@ class TestVp9Impl : public VideoCodecUnitTest { codec_settings->VP9()->numberOfTemporalLayers = 1; codec_settings->VP9()->numberOfSpatialLayers = 1; } +}; - void ExpectFrameWith(uint8_t temporal_idx) { - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP9.temporal_idx); - } - - void ExpectFrameWith(size_t num_spatial_layers, - uint8_t temporal_idx, - bool temporal_up_switch, - uint8_t num_ref_pics, - const std::vector& p_diff) { - std::vector encoded_frame; - std::vector codec_specific; - ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific)); - for (size_t spatial_idx = 0; spatial_idx < num_spatial_layers; - ++spatial_idx) { - const CodecSpecificInfoVP9& vp9 = - codec_specific[spatial_idx].codecSpecific.VP9; - if (vp9.temporal_idx == kNoTemporalIdx) { - EXPECT_EQ(temporal_idx, 0); - } else { - EXPECT_EQ(vp9.temporal_idx, temporal_idx); - } - if (num_spatial_layers == 1) { - EXPECT_FALSE(encoded_frame[spatial_idx].SpatialIndex()); - } else { - EXPECT_EQ(encoded_frame[spatial_idx].SpatialIndex(), - static_cast(spatial_idx)); - } - EXPECT_EQ(vp9.temporal_up_switch, temporal_up_switch); - - // Ensure there are no duplicates in reference list. - std::vector vp9_p_diff(vp9.p_diff, - vp9.p_diff + vp9.num_ref_pics); - std::sort(vp9_p_diff.begin(), vp9_p_diff.end()); - EXPECT_EQ(std::unique(vp9_p_diff.begin(), vp9_p_diff.end()), - vp9_p_diff.end()); - - for (size_t ref_pic_num = 0; ref_pic_num < num_ref_pics; ++ref_pic_num) { - EXPECT_NE( - std::find(p_diff.begin(), p_diff.end(), vp9.p_diff[ref_pic_num]), - p_diff.end()); - } - } - } - - void ConfigureSvc(size_t num_spatial_layers, size_t num_temporal_layers = 1) { - codec_settings_.VP9()->numberOfSpatialLayers = - static_cast(num_spatial_layers); - codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers; - codec_settings_.VP9()->frameDroppingOn = false; - - std::vector layers = - GetSvcConfig(codec_settings_.width, codec_settings_.height, - codec_settings_.maxFramerate, num_spatial_layers, - num_temporal_layers, false); - for (size_t i = 0; i < layers.size(); ++i) { - codec_settings_.spatialLayers[i] = layers[i]; - } +class TestVp9ImplForPixelFormat + : public TestVp9Impl, + public ::testing::WithParamInterface< + test::FrameGeneratorInterface::OutputType> { + protected: + void SetUp() override { + input_frame_generator_ = test::CreateSquareFrameGenerator( + kWidth, kHeight, GetParam(), absl::optional()); + TestVp9Impl::SetUp(); } }; // Disabled on ios as flake, see https://crbug.com/webrtc/7057 #if defined(WEBRTC_IOS) -TEST_F(TestVp9Impl, DISABLED_EncodeDecode) { +TEST_P(TestVp9ImplForPixelFormat, DISABLED_EncodeDecode) { #else -TEST_F(TestVp9Impl, EncodeDecode) { +TEST_P(TestVp9ImplForPixelFormat, EncodeDecode) { #endif VideoFrame input_frame = NextInputFrame(); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); @@ -153,7 +134,7 @@ TEST_F(TestVp9Impl, EncodeDecode) { color_space.chroma_siting_vertical()); } -TEST_F(TestVp9Impl, DecodedColorSpaceFromBitstream) { +TEST_P(TestVp9ImplForPixelFormat, DecodedColorSpaceFromBitstream) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; @@ -171,7 +152,7 @@ TEST_F(TestVp9Impl, DecodedColorSpaceFromBitstream) { EXPECT_FALSE(decoded_frame->color_space()->hdr_metadata()); } -TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) { +TEST_P(TestVp9ImplForPixelFormat, DecodedQpEqualsEncodedQp) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; @@ -187,57 +168,166 @@ TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) { EXPECT_EQ(encoded_frame.qp_, *decoded_qp); } -TEST_F(TestVp9Impl, ParserQpEqualsEncodedQp) { +TEST_F(TestVp9Impl, SwitchInputPixelFormatsWithoutReconfigure) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); + // Change the input frame type from I420 to NV12, encoding should still work. + input_frame_generator_ = test::CreateSquareFrameGenerator( + kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12, + absl::optional()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); + ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); + + // Flipping back to I420, encoding should still work. + input_frame_generator_ = test::CreateSquareFrameGenerator( + kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420, + absl::optional()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); + ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); +} + +TEST(Vp9ImplTest, ParserQpEqualsEncodedQp) { + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + encoder->InitEncode(&codec_settings, kSettings); + + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(1) + .SetResolution({kWidth, kHeight}) + .Encode(); + ASSERT_THAT(frames, SizeIs(1)); + const auto& encoded_frame = frames.front().encoded_image; int qp = 0; ASSERT_TRUE(vp9::GetQp(encoded_frame.data(), encoded_frame.size(), &qp)); EXPECT_EQ(encoded_frame.qp_, qp); } -TEST_F(TestVp9Impl, EncoderWith2TemporalLayers) { - // Override default settings. - codec_settings_.VP9()->numberOfTemporalLayers = 2; - // Tl0PidIdx is only used in non-flexible mode. - codec_settings_.VP9()->flexibleMode = false; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->InitEncode(&codec_settings_, kSettings)); - - // Temporal layer 0. - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(0, codec_specific_info.codecSpecific.VP9.temporal_idx); - - // Temporal layer 1. - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); - ExpectFrameWith(1); +TEST(Vp9ImplTest, EncodeAttachesTemplateStructureWithSvcController) { + test::ScopedFieldTrials override_field_trials( + "WebRTC-Vp9DependencyDescriptor/Enabled/"); + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(2) + .SetResolution({kWidth, kHeight}) + .Encode(); + + ASSERT_THAT(frames, SizeIs(2)); + EXPECT_TRUE(frames[0].codec_specific_info.template_structure); + EXPECT_TRUE(frames[0].codec_specific_info.generic_frame_info); + + EXPECT_FALSE(frames[1].codec_specific_info.template_structure); + EXPECT_TRUE(frames[1].codec_specific_info.generic_frame_info); +} - // Temporal layer 0. - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); - ExpectFrameWith(0); +TEST(Vp9ImplTest, EncoderWith2TemporalLayers) { + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.VP9()->numberOfTemporalLayers = 2; + // Tl0PidIdx is only used in non-flexible mode. + codec_settings.VP9()->flexibleMode = false; + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(4) + .SetResolution({kWidth, kHeight}) + .Encode(); + + ASSERT_THAT(frames, SizeIs(4)); + EXPECT_EQ(frames[0].codec_specific_info.codecSpecific.VP9.temporal_idx, 0); + EXPECT_EQ(frames[1].codec_specific_info.codecSpecific.VP9.temporal_idx, 1); + EXPECT_EQ(frames[2].codec_specific_info.codecSpecific.VP9.temporal_idx, 0); + EXPECT_EQ(frames[3].codec_specific_info.codecSpecific.VP9.temporal_idx, 1); +} - // Temporal layer 1. - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); - ExpectFrameWith(1); +TEST(Vp9ImplTest, EncodeTemporalLayersWithSvcController) { + test::ScopedFieldTrials override_field_trials( + "WebRTC-Vp9DependencyDescriptor/Enabled/"); + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.VP9()->numberOfTemporalLayers = 2; + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(4) + .SetResolution({kWidth, kHeight}) + .Encode(); + + ASSERT_THAT(frames, SizeIs(4)); + EXPECT_EQ(frames[0].codec_specific_info.codecSpecific.VP9.temporal_idx, 0); + EXPECT_EQ(frames[1].codec_specific_info.codecSpecific.VP9.temporal_idx, 1); + EXPECT_EQ(frames[2].codec_specific_info.codecSpecific.VP9.temporal_idx, 0); + EXPECT_EQ(frames[3].codec_specific_info.codecSpecific.VP9.temporal_idx, 1); + // Verify codec agnostic part + ASSERT_TRUE(frames[0].codec_specific_info.generic_frame_info); + ASSERT_TRUE(frames[1].codec_specific_info.generic_frame_info); + ASSERT_TRUE(frames[2].codec_specific_info.generic_frame_info); + ASSERT_TRUE(frames[3].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[0].codec_specific_info.generic_frame_info->temporal_id, 0); + EXPECT_EQ(frames[1].codec_specific_info.generic_frame_info->temporal_id, 1); + EXPECT_EQ(frames[2].codec_specific_info.generic_frame_info->temporal_id, 0); + EXPECT_EQ(frames[3].codec_specific_info.generic_frame_info->temporal_id, 1); } -TEST_F(TestVp9Impl, EncoderWith2SpatialLayers) { - codec_settings_.VP9()->numberOfSpatialLayers = 2; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->InitEncode(&codec_settings_, kSettings)); +TEST(Vp9ImplTest, EncoderWith2SpatialLayers) { + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.VP9()->numberOfSpatialLayers = 2; + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(1) + .SetResolution({kWidth, kHeight}) + .Encode(); + + ASSERT_THAT(frames, SizeIs(2)); + EXPECT_EQ(frames[0].encoded_image.SpatialIndex(), 0); + EXPECT_EQ(frames[1].encoded_image.SpatialIndex(), 1); +} - SetWaitForEncodedFramesThreshold(2); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); - std::vector encoded_frame; - std::vector codec_info; - ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_info)); - EXPECT_EQ(encoded_frame[0].SpatialIndex(), 0); - EXPECT_EQ(encoded_frame[1].SpatialIndex(), 1); +TEST(Vp9ImplTest, EncodeSpatialLayersWithSvcController) { + test::ScopedFieldTrials override_field_trials( + "WebRTC-Vp9DependencyDescriptor/Enabled/"); + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.VP9()->numberOfSpatialLayers = 2; + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(2) + .SetResolution({kWidth, kHeight}) + .Encode(); + + ASSERT_THAT(frames, SizeIs(4)); + EXPECT_EQ(frames[0].encoded_image.SpatialIndex(), 0); + EXPECT_EQ(frames[1].encoded_image.SpatialIndex(), 1); + EXPECT_EQ(frames[2].encoded_image.SpatialIndex(), 0); + EXPECT_EQ(frames[3].encoded_image.SpatialIndex(), 1); + // Verify codec agnostic part + ASSERT_TRUE(frames[0].codec_specific_info.generic_frame_info); + ASSERT_TRUE(frames[1].codec_specific_info.generic_frame_info); + ASSERT_TRUE(frames[2].codec_specific_info.generic_frame_info); + ASSERT_TRUE(frames[3].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[0].codec_specific_info.generic_frame_info->spatial_id, 0); + EXPECT_EQ(frames[1].codec_specific_info.generic_frame_info->spatial_id, 1); + EXPECT_EQ(frames[2].codec_specific_info.generic_frame_info->spatial_id, 0); + EXPECT_EQ(frames[3].codec_specific_info.generic_frame_info->spatial_id, 1); } TEST_F(TestVp9Impl, EncoderExplicitLayering) { @@ -300,7 +390,7 @@ TEST_F(TestVp9Impl, EnableDisableSpatialLayers) { const size_t num_spatial_layers = 3; const size_t num_frames_to_encode = 5; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.VP9()->frameDroppingOn = true; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -346,6 +436,68 @@ TEST_F(TestVp9Impl, EnableDisableSpatialLayers) { } } +TEST(Vp9ImplTest, EnableDisableSpatialLayersWithSvcController) { + test::ScopedFieldTrials override_field_trials( + "WebRTC-Vp9DependencyDescriptor/Enabled/"); + const int num_spatial_layers = 3; + // Configure encoder to produce 3 spatial layers. Encode frames of layer 0 + // then enable layer 1 and encode more frames and so on. + // Then disable layers one by one in the same way. + // Note: bit rate allocation is high to avoid frame dropping due to rate + // control, the encoder should always produce a frame. A dropped + // frame indicates a problem and the test will fail. + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + ConfigureSvc(codec_settings, num_spatial_layers); + codec_settings.VP9()->frameDroppingOn = true; + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + EncodedVideoFrameProducer producer(*encoder); + producer.SetResolution({kWidth, kHeight}); + + // Encode a key frame to validate all other frames are delta frames. + std::vector frames = + producer.SetNumInputFrames(1).Encode(); + ASSERT_THAT(frames, Not(IsEmpty())); + EXPECT_TRUE(frames[0].codec_specific_info.template_structure); + + const size_t num_frames_to_encode = 5; + + VideoBitrateAllocation bitrate_allocation; + for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { + // Allocate high bit rate to avoid frame dropping due to rate control. + bitrate_allocation.SetBitrate( + sl_idx, 0, + codec_settings.spatialLayers[sl_idx].targetBitrate * 1000 * 2); + encoder->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings.maxFramerate)); + + frames = producer.SetNumInputFrames(num_frames_to_encode).Encode(); + // With (sl_idx+1) spatial layers expect (sl_idx+1) frames per input frame. + ASSERT_THAT(frames, SizeIs(num_frames_to_encode * (sl_idx + 1))); + for (size_t i = 0; i < frames.size(); ++i) { + EXPECT_TRUE(frames[i].codec_specific_info.generic_frame_info); + EXPECT_FALSE(frames[i].codec_specific_info.template_structure); + } + } + + for (int sl_idx = num_spatial_layers - 1; sl_idx > 0; --sl_idx) { + bitrate_allocation.SetBitrate(sl_idx, 0, 0); + encoder->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings.maxFramerate)); + + frames = producer.SetNumInputFrames(num_frames_to_encode).Encode(); + // With |sl_idx| spatial layer disabled, there are |sl_idx| spatial layers + // left. + ASSERT_THAT(frames, SizeIs(num_frames_to_encode * sl_idx)); + for (size_t i = 0; i < frames.size(); ++i) { + EXPECT_TRUE(frames[i].codec_specific_info.generic_frame_info); + EXPECT_FALSE(frames[i].codec_specific_info.template_structure); + } + } +} + TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrame) { // Configure encoder to produce N spatial layers. Encode frames for all // layers. Then disable all but the last layer. Then reenable all back again. @@ -356,7 +508,7 @@ TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrame) { // Must not be multiple of temporal period to exercise all code paths. const size_t num_frames_to_encode = 5; - ConfigureSvc(num_spatial_layers, num_temporal_layers); + ConfigureSvc(codec_settings_, num_spatial_layers, num_temporal_layers); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.VP9()->flexibleMode = false; codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOnKeyPic; @@ -502,13 +654,134 @@ TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrame) { } } +TEST(Vp9ImplTest, DisableEnableBaseLayerWithSvcControllerTriggersKeyFrame) { + // Configure encoder to produce N spatial layers. Encode frames for all + // layers. Then disable all but the last layer. Then reenable all back again. + test::ScopedFieldTrials override_field_trials( + "WebRTC-Vp9DependencyDescriptor/Enabled/"); + const size_t num_spatial_layers = 3; + const size_t num_temporal_layers = 3; + // Must not be multiple of temporal period to exercise all code paths. + const size_t num_frames_to_encode = 5; + + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + ConfigureSvc(codec_settings, num_spatial_layers, num_temporal_layers); + codec_settings.VP9()->frameDroppingOn = false; + codec_settings.VP9()->flexibleMode = false; + codec_settings.VP9()->interLayerPred = InterLayerPredMode::kOnKeyPic; + codec_settings.mode = VideoCodecMode::kRealtimeVideo; + + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); + + VideoBitrateAllocation bitrate_allocation; + for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { + for (size_t tl_idx = 0; tl_idx < num_temporal_layers; ++tl_idx) { + // Allocate high bit rate to avoid frame dropping due to rate control. + bitrate_allocation.SetBitrate( + sl_idx, tl_idx, + codec_settings.spatialLayers[sl_idx].targetBitrate * 1000 * 2); + } + } + encoder->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings.maxFramerate)); + + EncodedVideoFrameProducer producer(*encoder); + producer.SetResolution({kWidth, kHeight}); + + std::vector frames = + producer.SetNumInputFrames(num_frames_to_encode).Encode(); + ASSERT_THAT(frames, SizeIs(num_frames_to_encode * num_spatial_layers)); + + // Disable all but top spatial layer. + for (size_t sl_idx = 0; sl_idx < num_spatial_layers - 1; ++sl_idx) { + for (size_t tl_idx = 0; tl_idx < num_temporal_layers; ++tl_idx) { + bitrate_allocation.SetBitrate(sl_idx, tl_idx, 0); + } + } + encoder->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings.maxFramerate)); + + frames = producer.SetNumInputFrames(num_frames_to_encode).Encode(); + EXPECT_THAT(frames, SizeIs(num_frames_to_encode)); + for (const auto& frame : frames) { + // Expect no key-frames generated. + EXPECT_FALSE(frame.codec_specific_info.template_structure); + ASSERT_TRUE(frame.codec_specific_info.generic_frame_info); + EXPECT_EQ(frame.codec_specific_info.generic_frame_info->spatial_id, 2); + } + + frames = producer.ForceKeyFrame().SetNumInputFrames(1).Encode(); + ASSERT_THAT(frames, SizeIs(1)); + // Key-frame should be produced. + EXPECT_EQ(frames[0].encoded_image._frameType, VideoFrameType::kVideoFrameKey); + ASSERT_TRUE(frames[0].codec_specific_info.template_structure); + ASSERT_TRUE(frames[0].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[0].codec_specific_info.generic_frame_info->spatial_id, 2); + + frames = producer.SetNumInputFrames(num_frames_to_encode).Encode(); + ASSERT_THAT(frames, SizeIs(num_frames_to_encode)); + for (const auto& frame : frames) { + EXPECT_EQ(frame.encoded_image._frameType, VideoFrameType::kVideoFrameDelta); + EXPECT_FALSE(frame.codec_specific_info.template_structure); + ASSERT_TRUE(frame.codec_specific_info.generic_frame_info); + EXPECT_EQ(frame.codec_specific_info.generic_frame_info->spatial_id, 2); + } + + // Enable the second layer back. + // Allocate high bit rate to avoid frame dropping due to rate control. + for (size_t tl_idx = 0; tl_idx < num_temporal_layers; ++tl_idx) { + bitrate_allocation.SetBitrate( + 1, tl_idx, codec_settings.spatialLayers[0].targetBitrate * 1000 * 2); + } + encoder->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings.maxFramerate)); + + frames = producer.SetNumInputFrames(num_frames_to_encode).Encode(); + ASSERT_THAT(frames, SizeIs(num_frames_to_encode * 2)); + EXPECT_EQ(frames[0].encoded_image._frameType, VideoFrameType::kVideoFrameKey); + EXPECT_TRUE(frames[0].codec_specific_info.template_structure); + ASSERT_TRUE(frames[0].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[0].codec_specific_info.generic_frame_info->spatial_id, 1); + for (size_t i = 1; i < frames.size(); ++i) { + EXPECT_EQ(frames[i].encoded_image._frameType, + VideoFrameType::kVideoFrameDelta); + EXPECT_FALSE(frames[i].codec_specific_info.template_structure); + ASSERT_TRUE(frames[i].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[i].codec_specific_info.generic_frame_info->spatial_id, + 1 + static_cast(i % 2)); + } + + // Enable the first layer back. + // Allocate high bit rate to avoid frame dropping due to rate control. + for (size_t tl_idx = 0; tl_idx < num_temporal_layers; ++tl_idx) { + bitrate_allocation.SetBitrate( + 0, tl_idx, codec_settings.spatialLayers[1].targetBitrate * 1000 * 2); + } + encoder->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings.maxFramerate)); + + frames = producer.SetNumInputFrames(num_frames_to_encode).Encode(); + ASSERT_THAT(frames, SizeIs(num_frames_to_encode * 3)); + EXPECT_TRUE(frames[0].codec_specific_info.template_structure); + ASSERT_TRUE(frames[0].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[0].codec_specific_info.generic_frame_info->spatial_id, 0); + for (size_t i = 1; i < frames.size(); ++i) { + EXPECT_FALSE(frames[i].codec_specific_info.template_structure); + ASSERT_TRUE(frames[i].codec_specific_info.generic_frame_info); + EXPECT_EQ(frames[i].codec_specific_info.generic_frame_info->spatial_id, + static_cast(i % 3)); + } +} + TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrameForScreenshare) { // Configure encoder to produce N spatial layers. Encode frames for all // layers. Then disable all but the last layer. Then reenable all back again. const size_t num_spatial_layers = 3; const size_t num_frames_to_encode = 5; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.mode = VideoCodecMode::kScreensharing; codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn; @@ -626,7 +899,7 @@ TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrameForScreenshare) { TEST_F(TestVp9Impl, EndOfPicture) { const size_t num_spatial_layers = 2; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); @@ -646,8 +919,8 @@ TEST_F(TestVp9Impl, EndOfPicture) { std::vector frames; std::vector codec_specific; ASSERT_TRUE(WaitForEncodedFrames(&frames, &codec_specific)); - EXPECT_FALSE(codec_specific[0].codecSpecific.VP9.end_of_picture); - EXPECT_TRUE(codec_specific[1].codecSpecific.VP9.end_of_picture); + EXPECT_FALSE(codec_specific[0].end_of_picture); + EXPECT_TRUE(codec_specific[1].end_of_picture); // Encode only base layer. Check that end-of-superframe flag is // set on base layer frame. @@ -662,12 +935,12 @@ TEST_F(TestVp9Impl, EndOfPicture) { ASSERT_TRUE(WaitForEncodedFrames(&frames, &codec_specific)); EXPECT_FALSE(frames[0].SpatialIndex()); - EXPECT_TRUE(codec_specific[0].codecSpecific.VP9.end_of_picture); + EXPECT_TRUE(codec_specific[0].end_of_picture); } TEST_F(TestVp9Impl, InterLayerPred) { const size_t num_spatial_layers = 2; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.VP9()->frameDroppingOn = false; VideoBitrateAllocation bitrate_allocation; @@ -742,7 +1015,7 @@ TEST_F(TestVp9Impl, const size_t num_spatial_layers = 3; const size_t num_frames_to_encode = 2; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.VP9()->frameDroppingOn = false; const std::vector inter_layer_pred_modes = { @@ -799,7 +1072,7 @@ TEST_F(TestVp9Impl, const size_t num_spatial_layers = 3; const size_t num_frames_to_encode = 2; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.VP9()->flexibleMode = false; @@ -854,7 +1127,7 @@ TEST_F(TestVp9Impl, EnablingDisablingUpperLayerInTheSameGof) { const size_t num_spatial_layers = 2; const size_t num_temporal_layers = 2; - ConfigureSvc(num_spatial_layers, num_temporal_layers); + ConfigureSvc(codec_settings_, num_spatial_layers, num_temporal_layers); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.VP9()->flexibleMode = false; @@ -926,7 +1199,7 @@ TEST_F(TestVp9Impl, EnablingDisablingUpperLayerAccrossGof) { const size_t num_spatial_layers = 2; const size_t num_temporal_layers = 2; - ConfigureSvc(num_spatial_layers, num_temporal_layers); + ConfigureSvc(codec_settings_, num_spatial_layers, num_temporal_layers); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.VP9()->flexibleMode = false; @@ -1006,7 +1279,7 @@ TEST_F(TestVp9Impl, EnablingNewLayerInScreenshareForcesAllLayersWithSS) { const size_t num_frames_to_encode_before_drop = 1; codec_settings_.maxFramerate = 30; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.spatialLayers[0].maxFramerate = 5.0; // use 30 for the SL 1 instead of 10, so even if SL 0 frame is dropped due to // framerate capping we would still get back at least a middle layer. It @@ -1065,7 +1338,7 @@ TEST_F(TestVp9Impl, ScreenshareFrameDropping) { const int num_frames_to_detect_drops = 2; codec_settings_.maxFramerate = 30; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); // use 30 for the SL0 and SL1 because it simplifies the test. codec_settings_.spatialLayers[0].maxFramerate = 30.0; codec_settings_.spatialLayers[1].maxFramerate = 30.0; @@ -1155,7 +1428,7 @@ TEST_F(TestVp9Impl, RemovingLayerIsNotDelayedInScreenshareAndAddsSsInfo) { const size_t num_dropped_frames = 5; codec_settings_.maxFramerate = 30; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); codec_settings_.spatialLayers[0].maxFramerate = 5.0; // use 30 for the SL 1 instead of 5, so even if SL 0 frame is dropped due to // framerate capping we would still get back at least a middle layer. It @@ -1242,7 +1515,7 @@ TEST_F(TestVp9Impl, DisableNewLayerInVideoDelaysSsInfoTillTL0) { const size_t num_temporal_layers = 2; // Chosen by hand, the 2nd frame is dropped with configured per-layer max // framerate. - ConfigureSvc(num_spatial_layers, num_temporal_layers); + ConfigureSvc(codec_settings_, num_spatial_layers, num_temporal_layers); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.mode = VideoCodecMode::kRealtimeVideo; codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOnKeyPic; @@ -1301,7 +1574,7 @@ TEST_F(TestVp9Impl, DisableNewLayerInVideoDelaysSsInfoTillTL0) { TEST_F(TestVp9Impl, LowLayerMarkedAsRefIfHighLayerNotEncodedAndInterLayerPredIsEnabled) { - ConfigureSvc(3); + ConfigureSvc(codec_settings_, 3); codec_settings_.VP9()->frameDroppingOn = false; codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn; @@ -1333,6 +1606,12 @@ TEST_F(TestVp9Impl, ScalabilityStructureIsAvailableInFlexibleMode) { EXPECT_TRUE(codec_specific_info.codecSpecific.VP9.ss_data_available); } +TEST_F(TestVp9Impl, Profile0PreferredPixelFormats) { + EXPECT_THAT(encoder_->GetEncoderInfo().preferred_pixel_formats, + testing::UnorderedElementsAre(VideoFrameBuffer::Type::kNV12, + VideoFrameBuffer::Type::kI420)); +} + TEST_F(TestVp9Impl, EncoderInfoFpsAllocation) { const uint8_t kNumSpatialLayers = 3; const uint8_t kNumTemporalLayers = 3; @@ -1376,6 +1655,7 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) { codec_settings_.VP9()->numberOfTemporalLayers = 1; codec_settings_.VP9()->flexibleMode = true; + VideoEncoder::RateControlParameters rate_params; for (uint8_t sl_idx = 0; sl_idx < kNumSpatialLayers; ++sl_idx) { codec_settings_.spatialLayers[sl_idx].width = codec_settings_.width; codec_settings_.spatialLayers[sl_idx].height = codec_settings_.height; @@ -1390,7 +1670,12 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) { // fraction is correct. codec_settings_.spatialLayers[sl_idx].maxFramerate = codec_settings_.maxFramerate / (kNumSpatialLayers - sl_idx); + rate_params.bitrate.SetBitrate(sl_idx, 0, + codec_settings_.startBitrate * 1000); } + rate_params.bandwidth_allocation = + DataRate::BitsPerSec(rate_params.bitrate.get_sum_bps()); + rate_params.framerate_fps = codec_settings_.maxFramerate; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); @@ -1402,69 +1687,47 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) { expected_fps_allocation[2].push_back(EncoderInfo::kMaxFramerateFraction); EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, ::testing::ElementsAreArray(expected_fps_allocation)); + + // SetRates with current fps does not alter outcome. + encoder_->SetRates(rate_params); + EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, + ::testing::ElementsAreArray(expected_fps_allocation)); + + // Higher fps than the codec wants, should still not affect outcome. + rate_params.framerate_fps *= 2; + encoder_->SetRates(rate_params); + EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, + ::testing::ElementsAreArray(expected_fps_allocation)); } -class TestVp9ImplWithLayering - : public TestVp9Impl, - public ::testing::WithParamInterface<::testing::tuple> { +class Vp9ImplWithLayeringTest + : public ::testing::TestWithParam> { protected: - TestVp9ImplWithLayering() - : num_spatial_layers_(::testing::get<0>(GetParam())), - num_temporal_layers_(::testing::get<1>(GetParam())) {} + Vp9ImplWithLayeringTest() + : num_spatial_layers_(std::get<0>(GetParam())), + num_temporal_layers_(std::get<1>(GetParam())), + override_field_trials_(std::get<2>(GetParam()) + ? "WebRTC-Vp9ExternalRefCtrl/Enabled/" + : "") {} const uint8_t num_spatial_layers_; const uint8_t num_temporal_layers_; + const test::ScopedFieldTrials override_field_trials_; }; -TEST_P(TestVp9ImplWithLayering, FlexibleMode) { +TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) { // In flexible mode encoder wrapper obtains actual list of references from // encoder and writes it into RTP payload descriptor. Check that reference // list in payload descriptor matches the predefined one, which is used // in non-flexible mode. - codec_settings_.VP9()->flexibleMode = true; - codec_settings_.VP9()->frameDroppingOn = false; - codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_; - codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->InitEncode(&codec_settings_, kSettings)); - - GofInfoVP9 gof; - if (num_temporal_layers_ == 1) { - gof.SetGofInfoVP9(kTemporalStructureMode1); - } else if (num_temporal_layers_ == 2) { - gof.SetGofInfoVP9(kTemporalStructureMode2); - } else if (num_temporal_layers_ == 3) { - gof.SetGofInfoVP9(kTemporalStructureMode3); - } - - // Encode at least (num_frames_in_gof + 1) frames to verify references - // of non-key frame with gof_idx = 0. - for (size_t frame_num = 0; frame_num < gof.num_frames_in_gof + 1; - ++frame_num) { - SetWaitForEncodedFramesThreshold(num_spatial_layers_); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(NextInputFrame(), nullptr)); - - const bool is_key_frame = frame_num == 0; - const size_t gof_idx = frame_num % gof.num_frames_in_gof; - const std::vector p_diff(std::begin(gof.pid_diff[gof_idx]), - std::end(gof.pid_diff[gof_idx])); - - ExpectFrameWith(num_spatial_layers_, gof.temporal_idx[gof_idx], - gof.temporal_up_switch[gof_idx], - is_key_frame ? 0 : gof.num_ref_pics[gof_idx], p_diff); - } -} - -TEST_P(TestVp9ImplWithLayering, ExternalRefControl) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-Vp9ExternalRefCtrl/Enabled/"); - codec_settings_.VP9()->flexibleMode = true; - codec_settings_.VP9()->frameDroppingOn = false; - codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_; - codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_; - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->InitEncode(&codec_settings_, kSettings)); + std::unique_ptr encoder = VP9Encoder::Create(); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.VP9()->flexibleMode = true; + codec_settings.VP9()->frameDroppingOn = false; + codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers_; + codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers_; + EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), + WEBRTC_VIDEO_CODEC_OK); GofInfoVP9 gof; if (num_temporal_layers_ == 1) { @@ -1477,27 +1740,48 @@ TEST_P(TestVp9ImplWithLayering, ExternalRefControl) { // Encode at least (num_frames_in_gof + 1) frames to verify references // of non-key frame with gof_idx = 0. - for (size_t frame_num = 0; frame_num < gof.num_frames_in_gof + 1; - ++frame_num) { - SetWaitForEncodedFramesThreshold(num_spatial_layers_); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(NextInputFrame(), nullptr)); - - const bool is_key_frame = frame_num == 0; - const size_t gof_idx = frame_num % gof.num_frames_in_gof; - const std::vector p_diff(std::begin(gof.pid_diff[gof_idx]), - std::end(gof.pid_diff[gof_idx])); - - ExpectFrameWith(num_spatial_layers_, gof.temporal_idx[gof_idx], - gof.temporal_up_switch[gof_idx], - is_key_frame ? 0 : gof.num_ref_pics[gof_idx], p_diff); + int num_input_frames = gof.num_frames_in_gof + 1; + std::vector frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(num_input_frames) + .SetResolution({kWidth, kHeight}) + .Encode(); + ASSERT_THAT(frames, SizeIs(num_input_frames * num_spatial_layers_)); + + for (size_t i = 0; i < frames.size(); ++i) { + const EncodedVideoFrameProducer::EncodedFrame& frame = frames[i]; + const size_t picture_idx = i / num_spatial_layers_; + const size_t gof_idx = picture_idx % gof.num_frames_in_gof; + + const CodecSpecificInfoVP9& vp9 = + frame.codec_specific_info.codecSpecific.VP9; + EXPECT_EQ(frame.encoded_image.SpatialIndex(), + num_spatial_layers_ == 1 + ? absl::nullopt + : absl::optional(i % num_spatial_layers_)) + << "Frame " << i; + EXPECT_EQ(vp9.temporal_idx, num_temporal_layers_ == 1 + ? kNoTemporalIdx + : gof.temporal_idx[gof_idx]) + << "Frame " << i; + EXPECT_EQ(vp9.temporal_up_switch, gof.temporal_up_switch[gof_idx]) + << "Frame " << i; + if (picture_idx == 0) { + EXPECT_EQ(vp9.num_ref_pics, 0) << "Frame " << i; + } else { + EXPECT_THAT(rtc::MakeArrayView(vp9.p_diff, vp9.num_ref_pics), + UnorderedElementsAreArray(gof.pid_diff[gof_idx], + gof.num_ref_pics[gof_idx])) + << "Frame " << i; + } } } INSTANTIATE_TEST_SUITE_P(All, - TestVp9ImplWithLayering, + Vp9ImplWithLayeringTest, ::testing::Combine(::testing::Values(1, 2, 3), - ::testing::Values(1, 2, 3))); + ::testing::Values(1, 2, 3), + ::testing::Bool())); class TestVp9ImplFrameDropping : public TestVp9Impl { protected: @@ -1667,7 +1951,7 @@ TEST_F(TestVp9Impl, EncodeWithDynamicRate) { // Set 300kbps target with 100% headroom. VideoEncoder::RateControlParameters params; - params.bandwidth_allocation = DataRate::bps(300000); + params.bandwidth_allocation = DataRate::BitsPerSec(300000); params.bitrate.SetBitrate(0, 0, params.bandwidth_allocation.bps()); params.framerate_fps = 30.0; @@ -1695,7 +1979,7 @@ TEST_F(TestVp9Impl, ReenablingUpperLayerAfterKFWithInterlayerPredIsEnabled) { // Force low frame-rate, so all layers are present for all frames. codec_settings_.maxFramerate = 5; - ConfigureSvc(num_spatial_layers); + ConfigureSvc(codec_settings_, num_spatial_layers); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); @@ -1757,4 +2041,20 @@ TEST_F(TestVp9Impl, ReenablingUpperLayerAfterKFWithInterlayerPredIsEnabled) { EXPECT_EQ(encoded_frames[0]._frameType, VideoFrameType::kVideoFrameDelta); } +TEST_F(TestVp9Impl, HandlesEmptyInitDecode) { + std::unique_ptr decoder = CreateDecoder(); + // Check that nullptr settings are ok for decoder. + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + decoder->InitDecode(/*codec_settings=*/nullptr, 1)); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder->Release()); +} + +INSTANTIATE_TEST_SUITE_P( + TestVp9ImplForPixelFormat, + TestVp9ImplForPixelFormat, + ::testing::Values(test::FrameGeneratorInterface::OutputType::kI420, + test::FrameGeneratorInterface::OutputType::kNV12), + [](const auto& info) { + return test::FrameGeneratorInterface::OutputTypeToString(info.param); + }); } // namespace webrtc diff --git a/modules/video_coding/codecs/vp9/vp9.cc b/modules/video_coding/codecs/vp9/vp9.cc index 527bce7729..9b0585c059 100644 --- a/modules/video_coding/codecs/vp9/vp9.cc +++ b/modules/video_coding/codecs/vp9/vp9.cc @@ -39,6 +39,22 @@ std::vector SupportedVP9Codecs() { cricket::kVp9CodecName, {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})); } + + return supported_formats; +#else + return std::vector(); +#endif +} + +std::vector SupportedVP9DecoderCodecs() { +#ifdef RTC_ENABLE_VP9 + std::vector supported_formats = SupportedVP9Codecs(); + // The WebRTC internal decoder supports VP9 profile 1. However, there's + // currently no way of sending VP9 profile 1 using the internal encoder. + // It would require extended support for I444, I422, and I440 buffers. + supported_formats.push_back(SdpVideoFormat( + cricket::kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}})); return supported_formats; #else return std::vector(); diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc index 551ace22a2..4d0a6983ac 100644 --- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc +++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc @@ -58,7 +58,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) { RTC_DCHECK_GT(min_size, 0); rtc::scoped_refptr available_buffer = nullptr; { - rtc::CritScope cs(&buffers_lock_); + MutexLock lock(&buffers_lock_); // Do we have a buffer we can recycle? for (const auto& buffer : allocated_buffers_) { if (buffer->HasOneRef()) { @@ -91,7 +91,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) { int Vp9FrameBufferPool::GetNumBuffersInUse() const { int num_buffers_in_use = 0; - rtc::CritScope cs(&buffers_lock_); + MutexLock lock(&buffers_lock_); for (const auto& buffer : allocated_buffers_) { if (!buffer->HasOneRef()) ++num_buffers_in_use; @@ -100,7 +100,7 @@ int Vp9FrameBufferPool::GetNumBuffersInUse() const { } bool Vp9FrameBufferPool::Resize(size_t max_number_of_buffers) { - rtc::CritScope cs(&buffers_lock_); + MutexLock lock(&buffers_lock_); size_t used_buffers_count = 0; for (const auto& buffer : allocated_buffers_) { // If the buffer is in use, the ref count will be >= 2, one from the list we @@ -130,7 +130,7 @@ bool Vp9FrameBufferPool::Resize(size_t max_number_of_buffers) { } void Vp9FrameBufferPool::ClearPool() { - rtc::CritScope cs(&buffers_lock_); + MutexLock lock(&buffers_lock_); allocated_buffers_.clear(); } diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h index 02d2b26273..d37a9fc0e2 100644 --- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h +++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h @@ -18,8 +18,8 @@ #include "api/scoped_refptr.h" #include "rtc_base/buffer.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ref_count.h" +#include "rtc_base/synchronization/mutex.h" struct vpx_codec_ctx; struct vpx_codec_frame_buffer; @@ -119,7 +119,7 @@ class Vp9FrameBufferPool { private: // Protects |allocated_buffers_|. - rtc::CriticalSection buffers_lock_; + mutable Mutex buffers_lock_; // All buffers, in use or ready to be recycled. std::vector> allocated_buffers_ RTC_GUARDED_BY(buffers_lock_); diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc index fe6c912917..74fe565d10 100644 --- a/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/modules/video_coding/codecs/vp9/vp9_impl.cc @@ -19,19 +19,28 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" #include "api/video/color_space.h" #include "api/video/i010_buffer.h" #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_list.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" +#include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8cx.h" #include "vpx/vp8dx.h" #include "vpx/vpx_decoder.h" @@ -45,37 +54,18 @@ namespace { uint8_t kRefBufIdx[4] = {0, 0, 0, 1}; uint8_t kUpdBufIdx[4] = {0, 0, 1, 0}; -int kMaxNumTiles4kVideo = 8; - // Maximum allowed PID difference for differnet per-layer frame-rate case. const int kMaxAllowedPidDiff = 30; -constexpr double kLowRateFactor = 1.0; -constexpr double kHighRateFactor = 2.0; - -// These settings correspond to the settings in vpx_codec_enc_cfg. -struct Vp9RateSettings { - uint32_t rc_undershoot_pct; - uint32_t rc_overshoot_pct; - uint32_t rc_buf_sz; - uint32_t rc_buf_optimal_sz; - uint32_t rc_dropframe_thresh; -}; - -// Only positive speeds, range for real-time coding currently is: 5 - 8. -// Lower means slower/better quality, higher means fastest/lower quality. -int GetCpuSpeed(int width, int height) { -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) - return 8; -#else - // For smaller resolutions, use lower speed setting (get some coding gain at - // the cost of increased encoding complexity). - if (width * height <= 352 * 288) - return 5; - else - return 7; -#endif -} +// TODO(ilink): Tune these thresholds further. +// Selected using ConverenceMotion_1280_720_50.yuv clip. +// No toggling observed on any link capacity from 100-2000kbps. +// HD was reached consistently when link capacity was 1500kbps. +// Set resolutions are a bit more conservative than svc_config.cc sets, e.g. +// for 300kbps resolution converged to 270p instead of 360p. +constexpr int kLowVp9QpThreshold = 149; +constexpr int kHighVp9QpThreshold = 205; + // Helper class for extracting VP9 colorspace. ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t, vpx_color_range_t range_t, @@ -153,54 +143,110 @@ std::pair GetActiveLayers( return {0, 0}; } -uint32_t Interpolate(uint32_t low, - uint32_t high, - double bandwidth_headroom_factor) { - RTC_DCHECK_GE(bandwidth_headroom_factor, kLowRateFactor); - RTC_DCHECK_LE(bandwidth_headroom_factor, kHighRateFactor); - - // |factor| is between 0.0 and 1.0. - const double factor = bandwidth_headroom_factor - kLowRateFactor; +std::unique_ptr CreateVp9ScalabilityStructure( + const VideoCodec& codec) { + int num_spatial_layers = codec.VP9().numberOfSpatialLayers; + int num_temporal_layers = + std::max(1, int{codec.VP9().numberOfTemporalLayers}); + if (num_spatial_layers == 1 && num_temporal_layers == 1) { + return std::make_unique(); + } + + char name[20]; + rtc::SimpleStringBuilder ss(name); + if (codec.mode == VideoCodecMode::kScreensharing) { + // TODO(bugs.webrtc.org/11999): Compose names of the structures when they + // are implemented. + return nullptr; + } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOn || + num_spatial_layers == 1) { + ss << "L" << num_spatial_layers << "T" << num_temporal_layers; + } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOnKeyPic) { + ss << "L" << num_spatial_layers << "T" << num_temporal_layers << "_KEY"; + } else { + RTC_DCHECK_EQ(codec.VP9().interLayerPred, InterLayerPredMode::kOff); + ss << "S" << num_spatial_layers << "T" << num_temporal_layers; + } - return static_cast(((1.0 - factor) * low) + (factor * high) + 0.5); -} + // Check spatial ratio. + if (num_spatial_layers > 1 && codec.spatialLayers[0].targetBitrate > 0) { + if (codec.width != codec.spatialLayers[num_spatial_layers - 1].width || + codec.height != codec.spatialLayers[num_spatial_layers - 1].height) { + RTC_LOG(LS_WARNING) + << "Top layer resolution expected to match overall resolution"; + return nullptr; + } + // Check if the ratio is one of the supported. + int numerator; + int denominator; + if (codec.spatialLayers[1].width == 2 * codec.spatialLayers[0].width) { + numerator = 1; + denominator = 2; + // no suffix for 1:2 ratio. + } else if (2 * codec.spatialLayers[1].width == + 3 * codec.spatialLayers[0].width) { + numerator = 2; + denominator = 3; + ss << "h"; + } else { + RTC_LOG(LS_WARNING) << "Unsupported scalability ratio " + << codec.spatialLayers[0].width << ":" + << codec.spatialLayers[1].width; + return nullptr; + } + // Validate ratio is consistent for all spatial layer transitions. + for (int sid = 1; sid < num_spatial_layers; ++sid) { + if (codec.spatialLayers[sid].width * numerator != + codec.spatialLayers[sid - 1].width * denominator || + codec.spatialLayers[sid].height * numerator != + codec.spatialLayers[sid - 1].height * denominator) { + RTC_LOG(LS_WARNING) << "Inconsistent scalability ratio " << numerator + << ":" << denominator; + return nullptr; + } + } + } -Vp9RateSettings GetRateSettings(double bandwidth_headroom_factor) { - static const Vp9RateSettings low_settings{100u, 0u, 100u, 33u, 40u}; - static const Vp9RateSettings high_settings{50u, 50u, 1000u, 700u, 5u}; - - if (bandwidth_headroom_factor <= kLowRateFactor) { - return low_settings; - } else if (bandwidth_headroom_factor >= kHighRateFactor) { - return high_settings; - } - - Vp9RateSettings settings; - settings.rc_undershoot_pct = - Interpolate(low_settings.rc_undershoot_pct, - high_settings.rc_undershoot_pct, bandwidth_headroom_factor); - settings.rc_overshoot_pct = - Interpolate(low_settings.rc_overshoot_pct, high_settings.rc_overshoot_pct, - bandwidth_headroom_factor); - settings.rc_buf_sz = - Interpolate(low_settings.rc_buf_sz, high_settings.rc_buf_sz, - bandwidth_headroom_factor); - settings.rc_buf_optimal_sz = - Interpolate(low_settings.rc_buf_optimal_sz, - high_settings.rc_buf_optimal_sz, bandwidth_headroom_factor); - settings.rc_dropframe_thresh = - Interpolate(low_settings.rc_dropframe_thresh, - high_settings.rc_dropframe_thresh, bandwidth_headroom_factor); - return settings; + auto scalability_structure_controller = CreateScalabilityStructure(name); + if (scalability_structure_controller == nullptr) { + RTC_LOG(LS_WARNING) << "Unsupported scalability structure " << name; + } else { + RTC_LOG(LS_INFO) << "Created scalability structure " << name; + } + return scalability_structure_controller; } -void UpdateRateSettings(vpx_codec_enc_cfg_t* config, - const Vp9RateSettings& new_settings) { - config->rc_undershoot_pct = new_settings.rc_undershoot_pct; - config->rc_overshoot_pct = new_settings.rc_overshoot_pct; - config->rc_buf_sz = new_settings.rc_buf_sz; - config->rc_buf_optimal_sz = new_settings.rc_buf_optimal_sz; - config->rc_dropframe_thresh = new_settings.rc_dropframe_thresh; +vpx_svc_ref_frame_config_t Vp9References( + rtc::ArrayView layers) { + vpx_svc_ref_frame_config_t ref_config = {}; + for (const ScalableVideoController::LayerFrameConfig& layer_frame : layers) { + const auto& buffers = layer_frame.Buffers(); + RTC_DCHECK_LE(buffers.size(), 3); + int sid = layer_frame.SpatialId(); + if (!buffers.empty()) { + ref_config.lst_fb_idx[sid] = buffers[0].id; + ref_config.reference_last[sid] = buffers[0].referenced; + if (buffers[0].updated) { + ref_config.update_buffer_slot[sid] |= (1 << buffers[0].id); + } + } + if (buffers.size() > 1) { + ref_config.gld_fb_idx[sid] = buffers[1].id; + ref_config.reference_golden[sid] = buffers[1].referenced; + if (buffers[1].updated) { + ref_config.update_buffer_slot[sid] |= (1 << buffers[1].id); + } + } + if (buffers.size() > 2) { + ref_config.alt_fb_idx[sid] = buffers[2].id; + ref_config.reference_alt_ref[sid] = buffers[2].referenced; + if (buffers[2].updated) { + ref_config.update_buffer_slot[sid] |= (1 << buffers[2].id); + } + } + } + // TODO(bugs.webrtc.org/11999): Fill ref_config.duration + return ref_config; } } // namespace @@ -212,13 +258,16 @@ void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, } VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec) + : VP9EncoderImpl(codec, FieldTrialBasedConfig()) {} + +VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec, + const WebRtcKeyValueConfig& trials) : encoded_image_(), encoded_complete_callback_(nullptr), profile_( ParseSdpForVP9Profile(codec.params).value_or(VP9Profile::kProfile0)), inited_(false), timestamp_(0), - cpu_speed_(3), rc_max_intra_target_(0), encoder_(nullptr), config_(nullptr), @@ -230,26 +279,34 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec) num_spatial_layers_(0), num_active_spatial_layers_(0), first_active_layer_(0), - layer_deactivation_requires_key_frame_( - field_trial::IsEnabled("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation")), + layer_deactivation_requires_key_frame_(absl::StartsWith( + trials.Lookup("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation"), + "Enabled")), is_svc_(false), inter_layer_pred_(InterLayerPredMode::kOn), external_ref_control_(false), // Set in InitEncode because of tests. - trusted_rate_controller_(RateControlSettings::ParseFromFieldTrials() - .LibvpxVp9TrustedRateController()), - dynamic_rate_settings_( - RateControlSettings::ParseFromFieldTrials().Vp9DynamicRateSettings()), + trusted_rate_controller_( + RateControlSettings::ParseFromKeyValueConfig(&trials) + .LibvpxVp9TrustedRateController()), layer_buffering_(false), full_superframe_drop_(true), first_frame_in_picture_(true), ss_info_needed_(false), force_all_active_layers_(false), + use_svc_controller_( + absl::StartsWith(trials.Lookup("WebRTC-Vp9DependencyDescriptor"), + "Enabled")), is_flexible_mode_(false), - variable_framerate_experiment_(ParseVariableFramerateConfig( - "WebRTC-VP9VariableFramerateScreenshare")), + variable_framerate_experiment_(ParseVariableFramerateConfig(trials)), variable_framerate_controller_( variable_framerate_experiment_.framerate_limit), - num_steady_state_frames_(0) { + quality_scaler_experiment_(ParseQualityScalerConfig(trials)), + external_ref_ctrl_( + !absl::StartsWith(trials.Lookup("WebRTC-Vp9ExternalRefCtrl"), + "Disabled")), + performance_flags_(ParsePerformanceFlagsFromTrials(trials)), + num_steady_state_frames_(0), + config_changed_(true) { codec_ = {}; memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); } @@ -258,8 +315,7 @@ VP9EncoderImpl::~VP9EncoderImpl() { Release(); } -void VP9EncoderImpl::SetFecControllerOverride( - FecControllerOverride* fec_controller_override) { +void VP9EncoderImpl::SetFecControllerOverride(FecControllerOverride*) { // Ignored. } @@ -398,7 +454,6 @@ bool VP9EncoderImpl::SetSvcRates( expect_no_more_active_layers = seen_active_layer; } } - RTC_DCHECK_GT(num_active_spatial_layers_, 0); if (higher_layers_enabled && !force_key_frame_) { // Prohibit drop of all layers for the next frame, so newly enabled @@ -409,7 +464,20 @@ bool VP9EncoderImpl::SetSvcRates( force_all_active_layers_ = true; } + if (svc_controller_) { + VideoBitrateAllocation allocation; + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + allocation.SetBitrate( + sid, tid, + config_->layer_target_bitrate[sid * num_temporal_layers_ + tid] * + 1000); + } + } + svc_controller_->OnRatesUpdated(allocation); + } current_bitrate_allocation_ = bitrate_allocation; + config_changed_ = true; return true; } @@ -430,15 +498,9 @@ void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) { codec_.maxFramerate = static_cast(parameters.framerate_fps + 0.5); - if (dynamic_rate_settings_) { - // Tweak rate control settings based on available network headroom. - UpdateRateSettings( - config_, GetRateSettings(parameters.bandwidth_allocation.bps() / - parameters.bitrate.get_sum_bps())); - } - bool res = SetSvcRates(parameters.bitrate); RTC_DCHECK(res) << "Failed to set new bitrate allocation"; + config_changed_ = true; } // TODO(eladalon): s/inst/codec_settings/g. @@ -468,6 +530,9 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } + absl::optional previous_img_fmt = + raw_ ? absl::make_optional(raw_->fmt) : absl::nullopt; + int ret_val = Release(); if (ret_val < 0) { return ret_val; @@ -482,6 +547,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, if (&codec_ != inst) { codec_ = *inst; } + memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); force_key_frame_ = true; pics_since_key_ = 0; @@ -493,12 +559,14 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, num_temporal_layers_ = 1; } + if (use_svc_controller_) { + svc_controller_ = CreateVp9ScalabilityStructure(*inst); + } framerate_controller_ = std::vector( num_spatial_layers_, FramerateController(codec_.maxFramerate)); is_svc_ = (num_spatial_layers_ > 1 || num_temporal_layers_ > 1); - encoded_image_._completeFrame = true; // Populate encoder configuration with default values. if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { return WEBRTC_VIDEO_CODEC_ERROR; @@ -508,12 +576,17 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, unsigned int bits_for_storage = 8; switch (profile_) { case VP9Profile::kProfile0: - img_fmt = VPX_IMG_FMT_I420; + img_fmt = previous_img_fmt.value_or(VPX_IMG_FMT_I420); bits_for_storage = 8; config_->g_bit_depth = VPX_BITS_8; config_->g_profile = 0; config_->g_input_bit_depth = 8; break; + case VP9Profile::kProfile1: + // Encoding of profile 1 is not implemented. It would require extended + // support for I444, I422, and I440 buffers. + RTC_NOTREACHED(); + break; case VP9Profile::kProfile2: img_fmt = VPX_IMG_FMT_I42016; bits_for_storage = 16; @@ -559,13 +632,17 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, // put some key-frames at will even in VPX_KF_DISABLED kf_mode. config_->kf_max_dist = inst->VP9().keyFrameInterval; config_->kf_min_dist = config_->kf_max_dist; - config_->rc_resize_allowed = inst->VP9().automaticResizeOn ? 1 : 0; + if (quality_scaler_experiment_.enabled) { + // In that experiment webrtc wide quality scaler is used instead of libvpx + // internal scaler. + config_->rc_resize_allowed = 0; + } else { + config_->rc_resize_allowed = inst->VP9().automaticResizeOn ? 1 : 0; + } // Determine number of threads based on the image size and #cores. config_->g_threads = NumberOfThreads(config_->g_w, config_->g_h, settings.number_of_cores); - cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h); - is_flexible_mode_ = inst->VP9().flexibleMode; inter_layer_pred_ = inst->VP9().interLayerPred; @@ -579,7 +656,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, // External reference control is required for different frame rate on spatial // layers because libvpx generates rtp incompatible references in this case. - external_ref_control_ = field_trial::IsEnabled("WebRTC-Vp9ExternalRefCtrl") || + external_ref_control_ = external_ref_ctrl_ || (num_spatial_layers_ > 1 && codec_.mode == VideoCodecMode::kScreensharing) || inter_layer_pred_ == InterLayerPredMode::kOn; @@ -660,7 +737,13 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { svc_params_.min_quantizers[i] = config_->rc_min_quantizer; } config_->ss_number_layers = num_spatial_layers_; - if (ExplicitlyConfiguredSpatialLayers()) { + if (svc_controller_) { + auto stream_config = svc_controller_->StreamConfig(); + for (int i = 0; i < stream_config.num_spatial_layers; ++i) { + svc_params_.scaling_factor_num[i] = stream_config.scaling_factor_num[i]; + svc_params_.scaling_factor_den[i] = stream_config.scaling_factor_den[i]; + } + } else if (ExplicitlyConfiguredSpatialLayers()) { for (int i = 0; i < num_spatial_layers_; ++i) { const auto& layer = codec_.spatialLayers[i]; RTC_CHECK_GT(layer.width, 0); @@ -719,7 +802,19 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { RTC_LOG(LS_ERROR) << "Init error: " << vpx_codec_err_to_string(rv); return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_); + + UpdatePerformanceFlags(); + RTC_DCHECK_EQ(performance_flags_by_spatial_index_.size(), + static_cast(num_spatial_layers_)); + if (performance_flags_.use_per_layer_speed) { + for (int si = 0; si < num_spatial_layers_; ++si) { + svc_params_.speed_per_layer[si] = + performance_flags_by_spatial_index_[si].base_layer_speed; + svc_params_.loopfilter_ctrl[si] = + performance_flags_by_spatial_index_[si].deblock_mode; + } + } + vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, rc_max_intra_target_); vpx_codec_control(encoder_, VP9E_SET_AQ_MODE, @@ -732,6 +827,11 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { vpx_codec_control(encoder_, VP9E_SET_SVC, 1); vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, &svc_params_); } + if (!performance_flags_.use_per_layer_speed) { + vpx_codec_control( + encoder_, VP8E_SET_CPUUSED, + performance_flags_by_spatial_index_.rbegin()->base_layer_speed); + } if (num_spatial_layers_ > 1) { switch (inter_layer_pred_) { @@ -814,6 +914,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { // Enable encoder skip of static/low content blocks. vpx_codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1); inited_ = true; + config_changed_ = true; return WEBRTC_VIDEO_CODEC_OK; } @@ -857,6 +958,13 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, force_key_frame_ = true; } + if (svc_controller_) { + layer_frames_ = svc_controller_->NextFrameConfig(force_key_frame_); + if (layer_frames_.empty()) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + } + vpx_svc_layer_id_t layer_id = {0}; if (!force_key_frame_) { const size_t gof_idx = (pics_since_key_ + 1) % gof_.num_frames_in_gof; @@ -917,7 +1025,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, } } - for (int sl_idx = 0; sl_idx < num_active_spatial_layers_; ++sl_idx) { + // Need to set temporal layer id on ALL layers, even disabled ones. + // Otherwise libvpx might produce frames on a disabled layer: + // http://crbug.com/1051476 + for (int sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) { layer_id.temporal_layer_id_per_spatial[sl_idx] = layer_id.temporal_layer_id; } @@ -925,6 +1036,33 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, layer_id.spatial_layer_id = first_active_layer_; } + if (svc_controller_) { + layer_id.spatial_layer_id = layer_frames_.front().SpatialId(); + layer_id.temporal_layer_id = layer_frames_.front().TemporalId(); + for (const auto& layer : layer_frames_) { + layer_id.temporal_layer_id_per_spatial[layer.SpatialId()] = + layer.TemporalId(); + } + } + + if (is_svc_ && performance_flags_.use_per_layer_speed) { + // Update speed settings that might depend on temporal index. + bool speed_updated = false; + for (int sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) { + const int target_speed = + layer_id.temporal_layer_id_per_spatial[sl_idx] == 0 + ? performance_flags_by_spatial_index_[sl_idx].base_layer_speed + : performance_flags_by_spatial_index_[sl_idx].high_layer_speed; + if (svc_params_.speed_per_layer[sl_idx] != target_speed) { + svc_params_.speed_per_layer[sl_idx] = target_speed; + speed_updated = true; + } + } + if (speed_updated) { + vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, &svc_params_); + } + } + vpx_codec_control(encoder_, VP9E_SET_SVC_LAYER_ID, &layer_id); if (num_spatial_layers_ > 1) { @@ -933,8 +1071,30 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, &svc_drop_frame_); } - if (vpx_codec_enc_config_set(encoder_, config_)) { - return WEBRTC_VIDEO_CODEC_ERROR; + if (config_changed_) { + if (vpx_codec_enc_config_set(encoder_, config_)) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + + if (!performance_flags_.use_per_layer_speed) { + // Not setting individual speeds per layer, find the highest active + // resolution instead and base the speed on that. + for (int i = num_spatial_layers_ - 1; i >= 0; --i) { + if (config_->ss_target_bitrate[i] > 0) { + int width = (svc_params_.scaling_factor_num[i] * config_->g_w) / + svc_params_.scaling_factor_den[i]; + int height = (svc_params_.scaling_factor_num[i] * config_->g_h) / + svc_params_.scaling_factor_den[i]; + int speed = + std::prev(performance_flags_.settings_by_resolution.lower_bound( + width * height)) + ->second.base_layer_speed; + vpx_codec_control(encoder_, VP8E_SET_CPUUSED, speed); + break; + } + } + } + config_changed_ = false; } RTC_DCHECK_EQ(input_image.width(), raw_->d_w); @@ -947,20 +1107,41 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, input_image_ = &input_image; // Keep reference to buffer until encode completes. - rtc::scoped_refptr i420_buffer; + rtc::scoped_refptr video_frame_buffer; const I010BufferInterface* i010_buffer; rtc::scoped_refptr i010_copy; switch (profile_) { case VP9Profile::kProfile0: { - i420_buffer = input_image.video_frame_buffer()->ToI420(); - // Image in vpx_image_t format. - // Input image is const. VPX's raw image is not defined as const. - raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); - raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); - raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); - raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); - raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); - raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + if (input_image.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNV12) { + const NV12BufferInterface* nv12_buffer = + input_image.video_frame_buffer()->GetNV12(); + video_frame_buffer = nv12_buffer; + MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); + raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(nv12_buffer->DataUV()); + raw_->planes[VPX_PLANE_V] = raw_->planes[VPX_PLANE_U] + 1; + raw_->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); + raw_->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); + } else { + rtc::scoped_refptr i420_buffer = + input_image.video_frame_buffer()->ToI420(); + video_frame_buffer = i420_buffer; + MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); + // Image in vpx_image_t format. + // Input image is const. VPX's raw image is not defined as const. + raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); + raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); + raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); + raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + } + break; + } + case VP9Profile::kProfile1: { + RTC_NOTREACHED(); break; } case VP9Profile::kProfile2: { @@ -995,7 +1176,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, flags = VPX_EFLAG_FORCE_KF; } - if (external_ref_control_) { + if (svc_controller_) { + vpx_svc_ref_frame_config_t ref_config = Vp9References(layer_frames_); + vpx_codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, &ref_config); + } else if (external_ref_control_) { vpx_svc_ref_frame_config_t ref_config = SetReferences(force_key_frame_, layer_id.spatial_layer_id); @@ -1161,6 +1345,31 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } first_frame_in_picture_ = false; + + // Populate codec-agnostic section in the codec specific structure. + if (svc_controller_) { + auto it = absl::c_find_if( + layer_frames_, + [&](const ScalableVideoController::LayerFrameConfig& config) { + return config.SpatialId() == spatial_idx->value_or(0); + }); + RTC_CHECK(it != layer_frames_.end()) + << "Failed to find spatial id " << spatial_idx->value_or(0); + codec_specific->generic_frame_info = svc_controller_->OnEncodeDone(*it); + if (is_key_frame) { + codec_specific->template_structure = + svc_controller_->DependencyStructure(); + auto& resolutions = codec_specific->template_structure->resolutions; + resolutions.resize(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + resolutions[sid] = RenderResolution( + /*width=*/codec_.width * svc_params_.scaling_factor_num[sid] / + svc_params_.scaling_factor_den[sid], + /*height=*/codec_.height * svc_params_.scaling_factor_num[sid] / + svc_params_.scaling_factor_den[sid]); + } + } + } } void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, @@ -1449,7 +1658,6 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { encoded_image_._frameType = VideoFrameType::kVideoFrameKey; force_key_frame_ = false; } - RTC_DCHECK_LE(encoded_image_.size(), encoded_image_.capacity()); codec_specific_ = {}; absl::optional spatial_index; @@ -1488,17 +1696,10 @@ void VP9EncoderImpl::DeliverBufferedFrame(bool end_of_picture) { } } - codec_specific_.codecSpecific.VP9.end_of_picture = end_of_picture; + codec_specific_.end_of_picture = end_of_picture; - // No data partitioning in VP9, so 1 partition only. - int part_idx = 0; - RTPFragmentationHeader frag_info; - frag_info.VerifyAndAllocateFragmentationHeader(1); - frag_info.fragmentationOffset[part_idx] = 0; - frag_info.fragmentationLength[part_idx] = encoded_image_.size(); - - encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific_, - &frag_info); + encoded_complete_callback_->OnEncodedImage(encoded_image_, + &codec_specific_); if (codec_.mode == VideoCodecMode::kScreensharing) { const uint8_t spatial_idx = encoded_image_.SpatialIndex().value_or(0); @@ -1536,24 +1737,47 @@ VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { EncoderInfo info; info.supports_native_handle = false; info.implementation_name = "libvpx"; - info.scaling_settings = VideoEncoder::ScalingSettings::kOff; + if (quality_scaler_experiment_.enabled && inited_ && + codec_.VP9().automaticResizeOn) { + info.scaling_settings = VideoEncoder::ScalingSettings( + quality_scaler_experiment_.low_qp, quality_scaler_experiment_.high_qp); + } else { + info.scaling_settings = VideoEncoder::ScalingSettings::kOff; + } info.has_trusted_rate_controller = trusted_rate_controller_; info.is_hardware_accelerated = false; info.has_internal_source = false; - for (size_t si = 0; si < num_spatial_layers_; ++si) { - info.fps_allocation[si].clear(); - if (!codec_.spatialLayers[si].active) { - continue; + if (inited_) { + // Find the max configured fps of any active spatial layer. + float max_fps = 0.0; + for (size_t si = 0; si < num_spatial_layers_; ++si) { + if (codec_.spatialLayers[si].active && + codec_.spatialLayers[si].maxFramerate > max_fps) { + max_fps = codec_.spatialLayers[si].maxFramerate; + } } - // This spatial layer may already use a fraction of the total frame rate. - const float sl_fps_fraction = - codec_.spatialLayers[si].maxFramerate / codec_.maxFramerate; - for (size_t ti = 0; ti < num_temporal_layers_; ++ti) { - const uint32_t decimator = - num_temporal_layers_ <= 1 ? 1 : config_->ts_rate_decimator[ti]; - RTC_DCHECK_GT(decimator, 0); - info.fps_allocation[si].push_back(rtc::saturated_cast( - EncoderInfo::kMaxFramerateFraction * (sl_fps_fraction / decimator))); + + for (size_t si = 0; si < num_spatial_layers_; ++si) { + info.fps_allocation[si].clear(); + if (!codec_.spatialLayers[si].active) { + continue; + } + + // This spatial layer may already use a fraction of the total frame rate. + const float sl_fps_fraction = + codec_.spatialLayers[si].maxFramerate / max_fps; + for (size_t ti = 0; ti < num_temporal_layers_; ++ti) { + const uint32_t decimator = + num_temporal_layers_ <= 1 ? 1 : config_->ts_rate_decimator[ti]; + RTC_DCHECK_GT(decimator, 0); + info.fps_allocation[si].push_back( + rtc::saturated_cast(EncoderInfo::kMaxFramerateFraction * + (sl_fps_fraction / decimator))); + } + } + if (profile_ == VP9Profile::kProfile0) { + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; } } return info; @@ -1576,7 +1800,8 @@ size_t VP9EncoderImpl::SteadyStateSize(int sid, int tid) { // static VP9EncoderImpl::VariableFramerateExperiment -VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) { +VP9EncoderImpl::ParseVariableFramerateConfig( + const WebRtcKeyValueConfig& trials) { FieldTrialFlag enabled = FieldTrialFlag("Enabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); FieldTrialParameter qp("min_qp", 32); @@ -1585,7 +1810,7 @@ VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) { "frames_before_steady_state", 5); ParseFieldTrial({&enabled, &framerate_limit, &qp, &undershoot_percentage, &frames_before_steady_state}, - field_trial::FindFullName(group_name)); + trials.Lookup("WebRTC-VP9VariableFramerateScreenshare")); VariableFramerateExperiment config; config.enabled = enabled.Get(); config.framerate_limit = framerate_limit.Get(); @@ -1596,16 +1821,139 @@ VP9EncoderImpl::ParseVariableFramerateConfig(std::string group_name) { return config; } -VP9DecoderImpl::VP9DecoderImpl() +// static +VP9EncoderImpl::QualityScalerExperiment +VP9EncoderImpl::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { + FieldTrialFlag disabled = FieldTrialFlag("Disabled"); + FieldTrialParameter low_qp("low_qp", kLowVp9QpThreshold); + FieldTrialParameter high_qp("hihg_qp", kHighVp9QpThreshold); + ParseFieldTrial({&disabled, &low_qp, &high_qp}, + trials.Lookup("WebRTC-VP9QualityScaler")); + QualityScalerExperiment config; + config.enabled = !disabled.Get(); + RTC_LOG(LS_INFO) << "Webrtc quality scaler for vp9 is " + << (config.enabled ? "enabled." : "disabled"); + config.low_qp = low_qp.Get(); + config.high_qp = high_qp.Get(); + + return config; +} + +void VP9EncoderImpl::UpdatePerformanceFlags() { + const auto find_speed = [&](int min_pixel_count) { + RTC_DCHECK(!performance_flags_.settings_by_resolution.empty()); + auto it = + performance_flags_.settings_by_resolution.upper_bound(min_pixel_count); + return std::prev(it)->second; + }; + + performance_flags_by_spatial_index_.clear(); + if (is_svc_) { + for (int si = 0; si < num_spatial_layers_; ++si) { + performance_flags_by_spatial_index_.push_back(find_speed( + codec_.spatialLayers[si].width * codec_.spatialLayers[si].height)); + } + } else { + performance_flags_by_spatial_index_.push_back( + find_speed(codec_.width * codec_.height)); + } +} + +// static +VP9EncoderImpl::PerformanceFlags +VP9EncoderImpl::ParsePerformanceFlagsFromTrials( + const WebRtcKeyValueConfig& trials) { + struct Params : public PerformanceFlags::ParameterSet { + int min_pixel_count = 0; + }; + + FieldTrialStructList trials_list( + {FieldTrialStructMember("min_pixel_count", + [](Params* p) { return &p->min_pixel_count; }), + FieldTrialStructMember("high_layer_speed", + [](Params* p) { return &p->high_layer_speed; }), + FieldTrialStructMember("base_layer_speed", + [](Params* p) { return &p->base_layer_speed; }), + FieldTrialStructMember("deblock_mode", + [](Params* p) { return &p->deblock_mode; })}, + {}); + + FieldTrialFlag per_layer_speed("use_per_layer_speed"); + + ParseFieldTrial({&trials_list, &per_layer_speed}, + trials.Lookup("WebRTC-VP9-PerformanceFlags")); + + PerformanceFlags flags; + flags.use_per_layer_speed = per_layer_speed.Get(); + + constexpr int kMinSpeed = 1; + constexpr int kMaxSpeed = 9; + for (auto& f : trials_list.Get()) { + if (f.base_layer_speed < kMinSpeed || f.base_layer_speed > kMaxSpeed || + f.high_layer_speed < kMinSpeed || f.high_layer_speed > kMaxSpeed || + f.deblock_mode < 0 || f.deblock_mode > 2) { + RTC_LOG(LS_WARNING) << "Ignoring invalid performance flags: " + << "min_pixel_count = " << f.min_pixel_count + << ", high_layer_speed = " << f.high_layer_speed + << ", base_layer_speed = " << f.base_layer_speed + << ", deblock_mode = " << f.deblock_mode; + continue; + } + flags.settings_by_resolution[f.min_pixel_count] = f; + } + + if (flags.settings_by_resolution.empty()) { + return GetDefaultPerformanceFlags(); + } + + return flags; +} + +// static +VP9EncoderImpl::PerformanceFlags VP9EncoderImpl::GetDefaultPerformanceFlags() { + PerformanceFlags flags; + flags.use_per_layer_speed = false; +#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) + // Speed 8 on all layers for all resolutions. + flags.settings_by_resolution[0] = {8, 8, 0}; +#else + // For smaller resolutions, use lower speed setting (get some coding gain at + // the cost of increased encoding complexity). + flags.settings_by_resolution[0] = {5, 5, 0}; + + // Use speed 7 for QCIF and above. + flags.settings_by_resolution[352 * 288] = {7, 7, 0}; +#endif + return flags; +} + +void VP9EncoderImpl::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { + if (!raw_) { + raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); + } else if (raw_->fmt != fmt) { + RTC_LOG(INFO) << "Switching VP9 encoder pixel format to " + << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420"); + vpx_img_free(raw_); + raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); + } + // else no-op since the image is already in the right format. +} + +VP9DecoderImpl::VP9DecoderImpl() : VP9DecoderImpl(FieldTrialBasedConfig()) {} +VP9DecoderImpl::VP9DecoderImpl(const WebRtcKeyValueConfig& trials) : decode_complete_callback_(nullptr), inited_(false), decoder_(nullptr), - key_frame_required_(true) {} + key_frame_required_(true), + preferred_output_format_( + absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} VP9DecoderImpl::~VP9DecoderImpl() { inited_ = true; // in order to do the actual release Release(); - int num_buffers_in_use = frame_buffer_pool_.GetNumBuffersInUse(); + int num_buffers_in_use = libvpx_buffer_pool_.GetNumBuffersInUse(); if (num_buffers_in_use > 0) { // The frame buffers are reference counted and frames are exposed after // decoding. There may be valid usage cases where previous frames are still @@ -1635,20 +1983,38 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { // errors earlier than the multi-threads version. // - Make peak CPU usage under control (not depending on input) cfg.threads = 1; - (void)kMaxNumTiles4kVideo; // unused #else - // We want to use multithreading when decoding high resolution videos. But, - // since we don't know resolution of input stream at this stage, we always - // enable it. - cfg.threads = std::min(number_of_cores, kMaxNumTiles4kVideo); + if (!inst) { + // No config provided - don't know resolution to decode yet. + // Set thread count to one in the meantime. + cfg.threads = 1; + } else { + // We want to use multithreading when decoding high resolution videos. But + // not too many in order to avoid overhead when many stream are decoded + // concurrently. + // Set 2 thread as target for 1280x720 pixel count, and then scale up + // linearly from there - but cap at physical core count. + // For common resolutions this results in: + // 1 for 360p + // 2 for 720p + // 4 for 1080p + // 8 for 1440p + // 18 for 4K + int num_threads = + std::max(1, 2 * (inst->width * inst->height) / (1280 * 720)); + cfg.threads = std::min(number_of_cores, num_threads); + current_codec_ = *inst; + } #endif + num_cores_ = number_of_cores; + vpx_codec_flags_t flags = 0; if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { return WEBRTC_VIDEO_CODEC_MEMORY; } - if (!frame_buffer_pool_.InitializeVpxUsePool(decoder_)) { + if (!libvpx_buffer_pool_.InitializeVpxUsePool(decoder_)) { return WEBRTC_VIDEO_CODEC_MEMORY; } @@ -1656,10 +2022,20 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { // Always start with a complete key frame. key_frame_required_ = true; if (inst && inst->buffer_pool_size) { - if (!frame_buffer_pool_.Resize(*inst->buffer_pool_size)) { + if (!libvpx_buffer_pool_.Resize(*inst->buffer_pool_size) || + !output_buffer_pool_.Resize(*inst->buffer_pool_size)) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } } + + vpx_codec_err_t status = + vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1); + if (status != VPX_CODEC_OK) { + RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. " + << vpx_codec_error(decoder_); + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + return WEBRTC_VIDEO_CODEC_OK; } @@ -1672,16 +2048,34 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image, if (decode_complete_callback_ == nullptr) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } + + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { + absl::optional frame_info = + vp9::ParseIntraFrameInfo(input_image.data(), input_image.size()); + if (frame_info) { + if (frame_info->frame_width != current_codec_.width || + frame_info->frame_height != current_codec_.height) { + // Resolution has changed, tear down and re-init a new decoder in + // order to get correct sizing. + Release(); + current_codec_.width = frame_info->frame_width; + current_codec_.height = frame_info->frame_height; + int reinit_status = InitDecode(¤t_codec_, num_cores_); + if (reinit_status != WEBRTC_VIDEO_CODEC_OK) { + RTC_LOG(LS_WARNING) << "Failed to re-init decoder."; + return reinit_status; + } + } + } else { + RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame."; + } + } + // Always start with a complete key frame. if (key_frame_required_) { if (input_image._frameType != VideoFrameType::kVideoFrameKey) return WEBRTC_VIDEO_CODEC_ERROR; - // We have a key frame - is it complete? - if (input_image._completeFrame) { - key_frame_required_ = false; - } else { - return WEBRTC_VIDEO_CODEC_ERROR; - } + key_frame_required_ = false; } vpx_codec_iter_t iter = nullptr; vpx_image_t* img; @@ -1689,8 +2083,9 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image, if (input_image.size() == 0) { buffer = nullptr; // Triggers full frame concealment. } - // During decode libvpx may get and release buffers from |frame_buffer_pool_|. - // In practice libvpx keeps a few (~3-4) buffers alive at a time. + // During decode libvpx may get and release buffers from + // |libvpx_buffer_pool_|. In practice libvpx keeps a few (~3-4) buffers alive + // at a time. if (vpx_codec_decode(decoder_, buffer, static_cast(input_image.size()), 0, VPX_DL_REALTIME)) { @@ -1733,17 +2128,35 @@ int VP9DecoderImpl::ReturnFrame( rtc::scoped_refptr img_wrapped_buffer; switch (img->bit_depth) { case 8: - RTC_DCHECK(img->fmt == VPX_IMG_FMT_I420 || img->fmt == VPX_IMG_FMT_I444); if (img->fmt == VPX_IMG_FMT_I420) { - img_wrapped_buffer = WrapI420Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI420Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release |img_buffer|. - rtc::KeepRefUntilDone(img_buffer)); + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + rtc::scoped_refptr nv12_buffer = + output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + if (!nv12_buffer.get()) { + // Buffer pool is full. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + img_wrapped_buffer = nv12_buffer; + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), + nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), + nv12_buffer->StrideUV(), img->d_w, img->d_h); + // No holding onto img_buffer as it's no longer needed and can be + // reused. + } else { + img_wrapped_buffer = WrapI420Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI420Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release |img_buffer|. + rtc::KeepRefUntilDone(img_buffer)); + } } else if (img->fmt == VPX_IMG_FMT_I444) { img_wrapped_buffer = WrapI444Buffer( img->d_w, img->d_h, img->planes[VPX_PLANE_Y], @@ -1754,6 +2167,11 @@ int VP9DecoderImpl::ReturnFrame( // frame buffer is through a callback function. This is where we // should release |img_buffer|. rtc::KeepRefUntilDone(img_buffer)); + } else { + RTC_LOG(LS_ERROR) + << "Unsupported pixel format produced by the decoder: " + << static_cast(img->fmt); + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; } break; case 10: @@ -1799,7 +2217,7 @@ int VP9DecoderImpl::Release() { if (decoder_ != nullptr) { if (inited_) { // When a codec is destroyed libvpx will release any buffers of - // |frame_buffer_pool_| it is currently using. + // |libvpx_buffer_pool_| it is currently using. if (vpx_codec_destroy(decoder_)) { ret_val = WEBRTC_VIDEO_CODEC_MEMORY; } @@ -1810,7 +2228,8 @@ int VP9DecoderImpl::Release() { // Releases buffers from the pool. Any buffers not in use are deleted. Buffers // still referenced externally are deleted once fully released, not returning // to the pool. - frame_buffer_pool_.ClearPool(); + libvpx_buffer_pool_.ClearPool(); + output_buffer_pool_.Release(); inited_ = false; return ret_val; } diff --git a/modules/video_coding/codecs/vp9/vp9_impl.h b/modules/video_coding/codecs/vp9/vp9_impl.h index a5f2f35336..075a214628 100644 --- a/modules/video_coding/codecs/vp9/vp9_impl.h +++ b/modules/video_coding/codecs/vp9/vp9_impl.h @@ -20,10 +20,13 @@ #include #include "api/fec_controller_override.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_encoder.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "media/base/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/framerate_controller.h" #include "vpx/vp8cx.h" #include "vpx/vpx_decoder.h" @@ -34,6 +37,8 @@ namespace webrtc { class VP9EncoderImpl : public VP9Encoder { public: explicit VP9EncoderImpl(const cricket::VideoCodec& codec); + VP9EncoderImpl(const cricket::VideoCodec& codec, + const WebRtcKeyValueConfig& trials); ~VP9EncoderImpl() override; @@ -98,6 +103,8 @@ class VP9EncoderImpl : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); + void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); + EncodedImage encoded_image_; CodecSpecificInfo codec_specific_; EncodedImageCallback* encoded_complete_callback_; @@ -105,7 +112,6 @@ class VP9EncoderImpl : public VP9Encoder { const VP9Profile profile_; bool inited_; int64_t timestamp_; - int cpu_speed_; uint32_t rc_max_intra_target_; vpx_codec_ctx_t* encoder_; vpx_codec_enc_cfg_t* config_; @@ -125,7 +131,6 @@ class VP9EncoderImpl : public VP9Encoder { InterLayerPredMode inter_layer_pred_; bool external_ref_control_; const bool trusted_rate_controller_; - const bool dynamic_rate_settings_; bool layer_buffering_; const bool full_superframe_drop_; vpx_svc_frame_drop_t svc_drop_frame_; @@ -133,7 +138,9 @@ class VP9EncoderImpl : public VP9Encoder { VideoBitrateAllocation current_bitrate_allocation_; bool ss_info_needed_; bool force_all_active_layers_; + const bool use_svc_controller_; + std::unique_ptr svc_controller_; std::vector framerate_controller_; // Used for flexible mode. @@ -157,6 +164,7 @@ class VP9EncoderImpl : public VP9Encoder { size_t temporal_layer_id = 0; }; std::map ref_buf_; + std::vector layer_frames_; // Variable frame-rate related fields and methods. const struct VariableFramerateExperiment { @@ -173,14 +181,62 @@ class VP9EncoderImpl : public VP9Encoder { int frames_before_steady_state; } variable_framerate_experiment_; static VariableFramerateExperiment ParseVariableFramerateConfig( - std::string group_name); + const WebRtcKeyValueConfig& trials); FramerateController variable_framerate_controller_; + + const struct QualityScalerExperiment { + int low_qp; + int high_qp; + bool enabled; + } quality_scaler_experiment_; + static QualityScalerExperiment ParseQualityScalerConfig( + const WebRtcKeyValueConfig& trials); + const bool external_ref_ctrl_; + + // Flags that can affect speed vs quality tradeoff, and are configureable per + // resolution ranges. + struct PerformanceFlags { + // If false, a lookup will be made in |settings_by_resolution| base on the + // highest currently active resolution, and the overall speed then set to + // to the |base_layer_speed| matching that entry. + // If true, each active resolution will have it's speed and deblock_mode set + // based on it resolution, and the high layer speed configured for non + // base temporal layer frames. + bool use_per_layer_speed = false; + + struct ParameterSet { + int base_layer_speed = -1; // Speed setting for TL0. + int high_layer_speed = -1; // Speed setting for TL1-TL3. + // 0 = deblock all temporal layers (TL) + // 1 = disable deblock for top-most TL + // 2 = disable deblock for all TLs + int deblock_mode = 0; + }; + // Map from min pixel count to settings for that resolution and above. + // E.g. if you want some settings A if below wvga (640x360) and some other + // setting B at wvga and above, you'd use map {{0, A}, {230400, B}}. + std::map settings_by_resolution; + }; + // Performance flags, ordered by |min_pixel_count|. + const PerformanceFlags performance_flags_; + // Caching of of |speed_configs_|, where index i maps to the resolution as + // specified in |codec_.spatialLayer[i]|. + std::vector + performance_flags_by_spatial_index_; + void UpdatePerformanceFlags(); + static PerformanceFlags ParsePerformanceFlagsFromTrials( + const WebRtcKeyValueConfig& trials); + static PerformanceFlags GetDefaultPerformanceFlags(); + int num_steady_state_frames_; + // Only set config when this flag is set. + bool config_changed_; }; class VP9DecoderImpl : public VP9Decoder { public: VP9DecoderImpl(); + explicit VP9DecoderImpl(const WebRtcKeyValueConfig& trials); virtual ~VP9DecoderImpl(); @@ -203,11 +259,18 @@ class VP9DecoderImpl : public VP9Decoder { const webrtc::ColorSpace* explicit_color_space); // Memory pool used to share buffers between libvpx and webrtc. - Vp9FrameBufferPool frame_buffer_pool_; + Vp9FrameBufferPool libvpx_buffer_pool_; + // Buffer pool used to allocate additionally needed NV12 buffers. + VideoFrameBufferPool output_buffer_pool_; DecodedImageCallback* decode_complete_callback_; bool inited_; vpx_codec_ctx_t* decoder_; bool key_frame_required_; + VideoCodec current_codec_; + int num_cores_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; }; } // namespace webrtc diff --git a/modules/video_coding/decoder_database.cc b/modules/video_coding/decoder_database.cc index c203721e5d..a7a4b8f75d 100644 --- a/modules/video_coding/decoder_database.cc +++ b/modules/video_coding/decoder_database.cc @@ -15,12 +15,8 @@ namespace webrtc { -VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings, - int number_of_cores, - bool require_key_frame) - : settings(settings), - number_of_cores(number_of_cores), - require_key_frame(require_key_frame) { +VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings, int number_of_cores) + : settings(settings), number_of_cores(number_of_cores) { RTC_DCHECK_GE(number_of_cores, 0); } @@ -33,7 +29,10 @@ VCMExtDecoderMapItem::VCMExtDecoderMapItem( VCMDecoderMapItem::~VCMDecoderMapItem() {} VCMDecoderDataBase::VCMDecoderDataBase() - : receive_codec_(), dec_map_(), dec_external_map_() {} + : current_payload_type_(0), + receive_codec_(), + dec_map_(), + dec_external_map_() {} VCMDecoderDataBase::~VCMDecoderDataBase() { ptr_decoder_.reset(); @@ -74,17 +73,17 @@ void VCMDecoderDataBase::RegisterExternalDecoder(VideoDecoder* external_decoder, dec_external_map_[payload_type] = ext_decoder; } -bool VCMDecoderDataBase::RegisterReceiveCodec(const VideoCodec* receive_codec, - int number_of_cores, - bool require_key_frame) { +bool VCMDecoderDataBase::RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receive_codec, + int number_of_cores) { if (number_of_cores < 0) { return false; } // If payload value already exists, erase old and insert new. - DeregisterReceiveCodec(receive_codec->plType); + DeregisterReceiveCodec(payload_type); VideoCodec* new_receive_codec = new VideoCodec(*receive_codec); - dec_map_[receive_codec->plType] = new VCMDecoderMapItem( - new_receive_codec, number_of_cores, require_key_frame); + dec_map_[payload_type] = + new VCMDecoderMapItem(new_receive_codec, number_of_cores); return true; } @@ -95,9 +94,10 @@ bool VCMDecoderDataBase::DeregisterReceiveCodec(uint8_t payload_type) { } delete it->second; dec_map_.erase(it); - if (receive_codec_.plType == payload_type) { + if (payload_type == current_payload_type_) { // This codec is currently in use. - memset(&receive_codec_, 0, sizeof(VideoCodec)); + receive_codec_ = {}; + current_payload_type_ = 0; } return true; } @@ -107,24 +107,27 @@ VCMGenericDecoder* VCMDecoderDataBase::GetDecoder( VCMDecodedFrameCallback* decoded_frame_callback) { RTC_DCHECK(decoded_frame_callback->UserReceiveCallback()); uint8_t payload_type = frame.PayloadType(); - if (payload_type == receive_codec_.plType || payload_type == 0) { + if (payload_type == current_payload_type_ || payload_type == 0) { return ptr_decoder_.get(); } // If decoder exists - delete. if (ptr_decoder_) { ptr_decoder_.reset(); - memset(&receive_codec_, 0, sizeof(VideoCodec)); + receive_codec_ = {}; + current_payload_type_ = 0; } ptr_decoder_ = CreateAndInitDecoder(frame, &receive_codec_); if (!ptr_decoder_) { return nullptr; } + current_payload_type_ = frame.PayloadType(); VCMReceiveCallback* callback = decoded_frame_callback->UserReceiveCallback(); - callback->OnIncomingPayloadType(receive_codec_.plType); + callback->OnIncomingPayloadType(current_payload_type_); if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) < 0) { ptr_decoder_.reset(); - memset(&receive_codec_, 0, sizeof(VideoCodec)); + receive_codec_ = {}; + current_payload_type_ = 0; return nullptr; } return ptr_decoder_.get(); @@ -169,11 +172,13 @@ std::unique_ptr VCMDecoderDataBase::CreateAndInitDecoder( decoder_item->settings->width = frame.EncodedImage()._encodedWidth; decoder_item->settings->height = frame.EncodedImage()._encodedHeight; } - if (ptr_decoder->InitDecode(decoder_item->settings.get(), - decoder_item->number_of_cores) < 0) { + int err = ptr_decoder->InitDecode(decoder_item->settings.get(), + decoder_item->number_of_cores); + if (err < 0) { + RTC_LOG(LS_ERROR) << "Failed to initialize decoder. Error code: " << err; return nullptr; } - memcpy(new_codec, decoder_item->settings.get(), sizeof(VideoCodec)); + *new_codec = *decoder_item->settings.get(); return ptr_decoder; } diff --git a/modules/video_coding/decoder_database.h b/modules/video_coding/decoder_database.h index 8c96b41efd..abfd81e342 100644 --- a/modules/video_coding/decoder_database.h +++ b/modules/video_coding/decoder_database.h @@ -20,14 +20,11 @@ namespace webrtc { struct VCMDecoderMapItem { public: - VCMDecoderMapItem(VideoCodec* settings, - int number_of_cores, - bool require_key_frame); + VCMDecoderMapItem(VideoCodec* settings, int number_of_cores); ~VCMDecoderMapItem(); std::unique_ptr settings; int number_of_cores; - bool require_key_frame; }; struct VCMExtDecoderMapItem { @@ -48,9 +45,9 @@ class VCMDecoderDataBase { void RegisterExternalDecoder(VideoDecoder* external_decoder, uint8_t payload_type); - bool RegisterReceiveCodec(const VideoCodec* receive_codec, - int number_of_cores, - bool require_key_frame); + bool RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receive_codec, + int number_of_cores); bool DeregisterReceiveCodec(uint8_t payload_type); // Returns a decoder specified by frame.PayloadType. The decoded frame @@ -79,6 +76,7 @@ class VCMDecoderDataBase { const VCMExtDecoderMapItem* FindExternalDecoderItem( uint8_t payload_type) const; + uint8_t current_payload_type_; // Corresponding to receive_codec_. VideoCodec receive_codec_; std::unique_ptr ptr_decoder_; DecoderMap dec_map_; diff --git a/modules/video_coding/deprecated/BUILD.gn b/modules/video_coding/deprecated/BUILD.gn new file mode 100644 index 0000000000..fd3a5fa5fc --- /dev/null +++ b/modules/video_coding/deprecated/BUILD.gn @@ -0,0 +1,34 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("nack_module") { + sources = [ + "nack_module.cc", + "nack_module.h", + ] + + deps = [ + "..:nack_module", + "../..:module_api", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:checks", + "../../../rtc_base:criticalsection", + "../../../rtc_base:deprecation", + "../../../rtc_base:logging", + "../../../rtc_base:macromagic", + "../../../rtc_base:rtc_numerics", + "../../../rtc_base/experiments:field_trial_parser", + "../../../rtc_base/synchronization:mutex", + "../../../system_wrappers", + "../../../system_wrappers:field_trial", + "../../utility", + ] +} diff --git a/modules/video_coding/nack_module.cc b/modules/video_coding/deprecated/nack_module.cc similarity index 81% rename from modules/video_coding/nack_module.cc rename to modules/video_coding/deprecated/nack_module.cc index e6fd9f3f70..f8cfd3440b 100644 --- a/modules/video_coding/nack_module.cc +++ b/modules/video_coding/deprecated/nack_module.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/nack_module.h" +#include "modules/video_coding/deprecated/nack_module.h" #include #include @@ -45,31 +45,31 @@ int64_t GetSendNackDelay() { } } // namespace -NackModule::NackInfo::NackInfo() +DEPRECATED_NackModule::NackInfo::NackInfo() : seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {} -NackModule::NackInfo::NackInfo(uint16_t seq_num, - uint16_t send_at_seq_num, - int64_t created_at_time) +DEPRECATED_NackModule::NackInfo::NackInfo(uint16_t seq_num, + uint16_t send_at_seq_num, + int64_t created_at_time) : seq_num(seq_num), send_at_seq_num(send_at_seq_num), created_at_time(created_at_time), sent_at_time(-1), retries(0) {} -NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry, - TimeDelta max_rtt, - double base) +DEPRECATED_NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry, + TimeDelta max_rtt, + double base) : min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {} -absl::optional -NackModule::BackoffSettings::ParseFromFieldTrials() { +absl::optional +DEPRECATED_NackModule::BackoffSettings::ParseFromFieldTrials() { // Matches magic number in RTPSender::OnReceivedNack(). - const TimeDelta kDefaultMinRetryInterval = TimeDelta::ms(5); + const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5); // Upper bound on link-delay considered for exponential backoff. // Selected so that cumulative delay with 1.25 base and 10 retries ends up // below 3s, since above that there will be a FIR generated instead. - const TimeDelta kDefaultMaxRtt = TimeDelta::ms(160); + const TimeDelta kDefaultMaxRtt = TimeDelta::Millis(160); // Default base for exponential backoff, adds 25% RTT delay for each retry. const double kDefaultBase = 1.25; @@ -82,15 +82,16 @@ NackModule::BackoffSettings::ParseFromFieldTrials() { field_trial::FindFullName("WebRTC-ExponentialNackBackoff")); if (enabled) { - return NackModule::BackoffSettings(min_retry.Get(), max_rtt.Get(), - base.Get()); + return DEPRECATED_NackModule::BackoffSettings(min_retry.Get(), + max_rtt.Get(), base.Get()); } return absl::nullopt; } -NackModule::NackModule(Clock* clock, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender) +DEPRECATED_NackModule::DEPRECATED_NackModule( + Clock* clock, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender) : clock_(clock), nack_sender_(nack_sender), keyframe_request_sender_(keyframe_request_sender), @@ -106,14 +107,15 @@ NackModule::NackModule(Clock* clock, RTC_DCHECK(keyframe_request_sender_); } -int NackModule::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) { +int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num, + bool is_keyframe) { return OnReceivedPacket(seq_num, is_keyframe, false); } -int NackModule::OnReceivedPacket(uint16_t seq_num, - bool is_keyframe, - bool is_recovered) { - rtc::CritScope lock(&crit_); +int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num, + bool is_keyframe, + bool is_recovered) { + MutexLock lock(&mutex_); // TODO(philipel): When the packet includes information whether it is // retransmitted or not, use that value instead. For // now set it to true, which will cause the reordering @@ -181,8 +183,8 @@ int NackModule::OnReceivedPacket(uint16_t seq_num, return 0; } -void NackModule::ClearUpTo(uint16_t seq_num) { - rtc::CritScope lock(&crit_); +void DEPRECATED_NackModule::ClearUpTo(uint16_t seq_num) { + MutexLock lock(&mutex_); nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num)); keyframe_list_.erase(keyframe_list_.begin(), keyframe_list_.lower_bound(seq_num)); @@ -190,28 +192,28 @@ void NackModule::ClearUpTo(uint16_t seq_num) { recovered_list_.lower_bound(seq_num)); } -void NackModule::UpdateRtt(int64_t rtt_ms) { - rtc::CritScope lock(&crit_); +void DEPRECATED_NackModule::UpdateRtt(int64_t rtt_ms) { + MutexLock lock(&mutex_); rtt_ms_ = rtt_ms; } -void NackModule::Clear() { - rtc::CritScope lock(&crit_); +void DEPRECATED_NackModule::Clear() { + MutexLock lock(&mutex_); nack_list_.clear(); keyframe_list_.clear(); recovered_list_.clear(); } -int64_t NackModule::TimeUntilNextProcess() { +int64_t DEPRECATED_NackModule::TimeUntilNextProcess() { return std::max(next_process_time_ms_ - clock_->TimeInMilliseconds(), 0); } -void NackModule::Process() { +void DEPRECATED_NackModule::Process() { if (nack_sender_) { std::vector nack_batch; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); nack_batch = GetNackBatch(kTimeOnly); } @@ -236,7 +238,7 @@ void NackModule::Process() { } } -bool NackModule::RemovePacketsUntilKeyFrame() { +bool DEPRECATED_NackModule::RemovePacketsUntilKeyFrame() { while (!keyframe_list_.empty()) { auto it = nack_list_.lower_bound(*keyframe_list_.begin()); @@ -254,8 +256,8 @@ bool NackModule::RemovePacketsUntilKeyFrame() { return false; } -void NackModule::AddPacketsToNack(uint16_t seq_num_start, - uint16_t seq_num_end) { +void DEPRECATED_NackModule::AddPacketsToNack(uint16_t seq_num_start, + uint16_t seq_num_end) { // Remove old packets. auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge); nack_list_.erase(nack_list_.begin(), it); @@ -289,20 +291,21 @@ void NackModule::AddPacketsToNack(uint16_t seq_num_start, } } -std::vector NackModule::GetNackBatch(NackFilterOptions options) { +std::vector DEPRECATED_NackModule::GetNackBatch( + NackFilterOptions options) { bool consider_seq_num = options != kTimeOnly; bool consider_timestamp = options != kSeqNumOnly; Timestamp now = clock_->CurrentTime(); std::vector nack_batch; auto it = nack_list_.begin(); while (it != nack_list_.end()) { - TimeDelta resend_delay = TimeDelta::ms(rtt_ms_); + TimeDelta resend_delay = TimeDelta::Millis(rtt_ms_); if (backoff_settings_) { resend_delay = std::max(resend_delay, backoff_settings_->min_retry_interval); if (it->second.retries > 1) { TimeDelta exponential_backoff = - std::min(TimeDelta::ms(rtt_ms_), backoff_settings_->max_rtt) * + std::min(TimeDelta::Millis(rtt_ms_), backoff_settings_->max_rtt) * std::pow(backoff_settings_->base, it->second.retries - 1); resend_delay = std::max(resend_delay, exponential_backoff); } @@ -334,13 +337,13 @@ std::vector NackModule::GetNackBatch(NackFilterOptions options) { return nack_batch; } -void NackModule::UpdateReorderingStatistics(uint16_t seq_num) { +void DEPRECATED_NackModule::UpdateReorderingStatistics(uint16_t seq_num) { RTC_DCHECK(AheadOf(newest_seq_num_, seq_num)); uint16_t diff = ReverseDiff(newest_seq_num_, seq_num); reordering_histogram_.Add(diff); } -int NackModule::WaitNumberOfPackets(float probability) const { +int DEPRECATED_NackModule::WaitNumberOfPackets(float probability) const { if (reordering_histogram_.NumValues() == 0) return 0; return reordering_histogram_.InverseCdf(probability); diff --git a/modules/video_coding/nack_module.h b/modules/video_coding/deprecated/nack_module.h similarity index 77% rename from modules/video_coding/nack_module.h rename to modules/video_coding/deprecated/nack_module.h index d4f705b351..f9580ae80c 100644 --- a/modules/video_coding/nack_module.h +++ b/modules/video_coding/deprecated/nack_module.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_NACK_MODULE_H_ -#define MODULES_VIDEO_CODING_NACK_MODULE_H_ +#ifndef MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_ +#define MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_ #include @@ -21,18 +21,19 @@ #include "modules/include/module.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/histogram.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecation.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" namespace webrtc { -class NackModule : public Module { +class DEPRECATED_NackModule : public Module { public: - NackModule(Clock* clock, - NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender); + DEPRECATED_NackModule(Clock* clock, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender); int OnReceivedPacket(uint16_t seq_num, bool is_keyframe); int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered); @@ -79,24 +80,24 @@ class NackModule : public Module { }; void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Removes packets from the nack list until the next keyframe. Returns true // if packets were removed. - bool RemovePacketsUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + bool RemovePacketsUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); std::vector GetNackBatch(NackFilterOptions options) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Update the reordering distribution. void UpdateReorderingStatistics(uint16_t seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns how many packets we have to wait in order to receive the packet // with probability |probabilty| or higher. int WaitNumberOfPackets(float probability) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - rtc::CriticalSection crit_; + Mutex mutex_; Clock* const clock_; NackSender* const nack_sender_; KeyFrameRequestSender* const keyframe_request_sender_; @@ -105,15 +106,15 @@ class NackModule : public Module { // known thread (e.g. see |initialized_|). Those probably do not need // synchronized access. std::map> nack_list_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); std::set> keyframe_list_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); std::set> recovered_list_ - RTC_GUARDED_BY(crit_); - video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(crit_); - bool initialized_ RTC_GUARDED_BY(crit_); - int64_t rtt_ms_ RTC_GUARDED_BY(crit_); - uint16_t newest_seq_num_ RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); + video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(mutex_); + bool initialized_ RTC_GUARDED_BY(mutex_); + int64_t rtt_ms_ RTC_GUARDED_BY(mutex_); + uint16_t newest_seq_num_ RTC_GUARDED_BY(mutex_); // Only touched on the process thread. int64_t next_process_time_ms_; @@ -124,6 +125,8 @@ class NackModule : public Module { const absl::optional backoff_settings_; }; +using NackModule = RTC_DEPRECATED DEPRECATED_NackModule; + } // namespace webrtc -#endif // MODULES_VIDEO_CODING_NACK_MODULE_H_ +#endif // MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_ diff --git a/modules/video_coding/encoded_frame.cc b/modules/video_coding/encoded_frame.cc index 1e9e374c64..4638771b21 100644 --- a/modules/video_coding/encoded_frame.cc +++ b/modules/video_coding/encoded_frame.cc @@ -43,7 +43,6 @@ void VCMEncodedFrame::Reset() { _frameType = VideoFrameType::kVideoFrameDelta; _encodedWidth = 0; _encodedHeight = 0; - _completeFrame = false; _missingFrame = false; set_size(0); _codecSpecificInfo.codecType = kVideoCodecGeneric; @@ -135,22 +134,14 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { } case kVideoCodecH264: { _codecSpecificInfo.codecType = kVideoCodecH264; - - // The following H264 codec specific data are not used elsewhere. - // Instead they are read directly from the frame marking extension. - // These codec specific data structures should be removed - // when frame marking is used. - _codecSpecificInfo.codecSpecific.H264.temporal_idx = kNoTemporalIdx; - if (header->frame_marking.temporal_id != kNoTemporalIdx) { - _codecSpecificInfo.codecSpecific.H264.temporal_idx = - header->frame_marking.temporal_id; - _codecSpecificInfo.codecSpecific.H264.base_layer_sync = - header->frame_marking.base_layer_sync; - _codecSpecificInfo.codecSpecific.H264.idr_frame = - header->frame_marking.independent_frame; - } break; } +#ifndef DISABLE_H265 + case kVideoCodecH265: { + _codecSpecificInfo.codecType = kVideoCodecH265; + break; + } +#endif default: { _codecSpecificInfo.codecType = kVideoCodecGeneric; break; diff --git a/modules/video_coding/encoded_frame.h b/modules/video_coding/encoded_frame.h index 798c005e5d..a77d42eec7 100644 --- a/modules/video_coding/encoded_frame.h +++ b/modules/video_coding/encoded_frame.h @@ -17,10 +17,11 @@ #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { -class VCMEncodedFrame : protected EncodedImage { +class RTC_EXPORT VCMEncodedFrame : protected EncodedImage { public: VCMEncodedFrame(); VCMEncodedFrame(const VCMEncodedFrame&); @@ -33,15 +34,9 @@ class VCMEncodedFrame : protected EncodedImage { _renderTimeMs = renderTimeMs; } - /** - * Set the encoded frame size - */ - void SetEncodedSize(uint32_t width, uint32_t height) { - _encodedWidth = width; - _encodedHeight = height; - } + VideoPlayoutDelay PlayoutDelay() const { return playout_delay_; } - void SetPlayoutDelay(PlayoutDelay playout_delay) { + void SetPlayoutDelay(VideoPlayoutDelay playout_delay) { playout_delay_ = playout_delay; } @@ -55,6 +50,7 @@ class VCMEncodedFrame : protected EncodedImage { using EncodedImage::ColorSpace; using EncodedImage::data; using EncodedImage::GetEncodedData; + using EncodedImage::NtpTimeMs; using EncodedImage::PacketInfos; using EncodedImage::Retain; using EncodedImage::set_size; @@ -96,10 +92,6 @@ class VCMEncodedFrame : protected EncodedImage { */ EncodedImage::Timing video_timing() const { return timing_; } EncodedImage::Timing* video_timing_mutable() { return &timing_; } - /** - * True if this frame is complete, false otherwise - */ - bool Complete() const { return _completeFrame; } /** * True if there's a frame missing before this frame */ diff --git a/system_wrappers/source/event.cc b/modules/video_coding/event_wrapper.cc similarity index 61% rename from system_wrappers/source/event.cc rename to modules/video_coding/event_wrapper.cc index 0c4ce10157..e6a4752401 100644 --- a/system_wrappers/source/event.cc +++ b/modules/video_coding/event_wrapper.cc @@ -8,13 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "system_wrappers/include/event_wrapper.h" - -#if defined(_WIN32) -#include -#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) -#include -#endif +#include "modules/video_coding/event_wrapper.h" #include "rtc_base/event.h" @@ -29,11 +23,8 @@ class EventWrapperImpl : public EventWrapper { return true; } - EventTypeWrapper Wait(unsigned long max_time) override { - int to_wait = max_time == WEBRTC_EVENT_INFINITE - ? rtc::Event::kForever - : static_cast(max_time); - return event_.Wait(to_wait) ? kEventSignaled : kEventTimeout; + EventTypeWrapper Wait(int max_time_ms) override { + return event_.Wait(max_time_ms) ? kEventSignaled : kEventTimeout; } private: diff --git a/system_wrappers/include/event_wrapper.h b/modules/video_coding/event_wrapper.h similarity index 81% rename from system_wrappers/include/event_wrapper.h rename to modules/video_coding/event_wrapper.h index 989e7929b3..77ca7b506c 100644 --- a/system_wrappers/include/event_wrapper.h +++ b/modules/video_coding/event_wrapper.h @@ -8,14 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef SYSTEM_WRAPPERS_INCLUDE_EVENT_WRAPPER_H_ -#define SYSTEM_WRAPPERS_INCLUDE_EVENT_WRAPPER_H_ +#ifndef MODULES_VIDEO_CODING_EVENT_WRAPPER_H_ +#define MODULES_VIDEO_CODING_EVENT_WRAPPER_H_ namespace webrtc { enum EventTypeWrapper { kEventSignaled = 1, kEventTimeout = 2 }; -#define WEBRTC_EVENT_INFINITE 0xffffffff - class EventWrapper { public: // Factory method. Constructor disabled. @@ -39,11 +37,10 @@ class EventWrapper { // be released. It is possible that multiple (random) threads are released // Depending on timing. // - // |max_time| is the maximum time to wait in milliseconds or - // WEBRTC_EVENT_INFINITE to wait infinitely. - virtual EventTypeWrapper Wait(unsigned long max_time) = 0; + // |max_time_ms| is the maximum time to wait in milliseconds. + virtual EventTypeWrapper Wait(int max_time_ms) = 0; }; } // namespace webrtc -#endif // SYSTEM_WRAPPERS_INCLUDE_EVENT_WRAPPER_H_ +#endif // MODULES_VIDEO_CODING_EVENT_WRAPPER_H_ diff --git a/modules/video_coding/fec_controller_default.cc b/modules/video_coding/fec_controller_default.cc index 97919f5315..827c853541 100644 --- a/modules/video_coding/fec_controller_default.cc +++ b/modules/video_coding/fec_controller_default.cc @@ -20,7 +20,6 @@ #include "system_wrappers/include/field_trial.h" namespace webrtc { -using rtc::CritScope; const float kProtectionOverheadRateThreshold = 0.5; @@ -54,7 +53,7 @@ void FecControllerDefault::SetEncodingData(size_t width, size_t height, size_t num_temporal_layers, size_t max_payload_size) { - CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); loss_prot_logic_->UpdateFrameSize(width, height); loss_prot_logic_->UpdateNumLayers(num_temporal_layers); max_payload_size_ = max_payload_size; @@ -94,7 +93,7 @@ uint32_t FecControllerDefault::UpdateFecRates( FecProtectionParams delta_fec_params; FecProtectionParams key_fec_params; { - CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); loss_prot_logic_->UpdateBitRate(target_bitrate_kbps); loss_prot_logic_->UpdateRtt(round_trip_time_ms); // Update frame rate for the loss protection logic class: frame rate should @@ -175,7 +174,7 @@ void FecControllerDefault::SetProtectionMethod(bool enable_fec, } else if (enable_fec) { method = media_optimization::kFec; } - CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); loss_prot_logic_->SetMethod(method); } @@ -183,7 +182,7 @@ void FecControllerDefault::UpdateWithEncodedData( const size_t encoded_image_length, const VideoFrameType encoded_image_frametype) { const size_t encoded_length = encoded_image_length; - CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); if (encoded_length > 0) { const bool delta_frame = encoded_image_frametype != VideoFrameType::kVideoFrameKey; diff --git a/modules/video_coding/fec_controller_default.h b/modules/video_coding/fec_controller_default.h index 02c0ec0d37..6b9e8eb8e5 100644 --- a/modules/video_coding/fec_controller_default.h +++ b/modules/video_coding/fec_controller_default.h @@ -20,7 +20,7 @@ #include "api/fec_controller.h" #include "modules/video_coding/media_opt_util.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -54,10 +54,10 @@ class FecControllerDefault : public FecController { enum { kBitrateAverageWinMs = 1000 }; Clock* const clock_; VCMProtectionCallback* protection_callback_; - rtc::CriticalSection crit_sect_; + Mutex mutex_; std::unique_ptr loss_prot_logic_ - RTC_GUARDED_BY(crit_sect_); - size_t max_payload_size_ RTC_GUARDED_BY(crit_sect_); + RTC_GUARDED_BY(mutex_); + size_t max_payload_size_ RTC_GUARDED_BY(mutex_); RTC_DISALLOW_COPY_AND_ASSIGN(FecControllerDefault); const float overhead_threshold_; }; diff --git a/modules/video_coding/frame_buffer.cc b/modules/video_coding/frame_buffer.cc index 755acb2940..0f64ab1449 100644 --- a/modules/video_coding/frame_buffer.cc +++ b/modules/video_coding/frame_buffer.cc @@ -70,11 +70,6 @@ void VCMFrameBuffer::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) { gof_info.temporal_up_switch[idx]; } -bool VCMFrameBuffer::IsSessionComplete() const { - TRACE_EVENT0("webrtc", "VCMFrameBuffer::IsSessionComplete"); - return _sessionInfo.complete(); -} - // Insert packet VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, int64_t timeInMs, @@ -98,15 +93,16 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, } } + size_t oldSize = encoded_image_buffer_ ? encoded_image_buffer_->size() : 0; uint32_t requiredSizeBytes = size() + packet.sizeBytes + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0); - if (requiredSizeBytes > capacity()) { + if (requiredSizeBytes > oldSize) { const uint8_t* prevBuffer = data(); const uint32_t increments = requiredSizeBytes / kBufferIncStepSizeBytes + (requiredSizeBytes % kBufferIncStepSizeBytes > 0); - const uint32_t newSize = capacity() + increments * kBufferIncStepSizeBytes; + const uint32_t newSize = oldSize + increments * kBufferIncStepSizeBytes; if (newSize > kMaxJBFrameSizeBytes) { RTC_LOG(LS_ERROR) << "Failed to insert packet due to frame being too " "big."; @@ -133,7 +129,9 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, if (packet.sizeBytes > 0) CopyCodecSpecific(&packet.video_header); - int retVal = _sessionInfo.InsertPacket(packet, data(), frame_data); + int retVal = _sessionInfo.InsertPacket( + packet, encoded_image_buffer_ ? encoded_image_buffer_->data() : nullptr, + frame_data); if (retVal == -1) { return kSizeError; } else if (retVal == -2) { @@ -262,7 +260,6 @@ void VCMFrameBuffer::PrepareForDecode(bool continuous) { // Transfer frame information to EncodedFrame and create any codec // specific information. _frameType = _sessionInfo.FrameType(); - _completeFrame = _sessionInfo.complete(); _missingFrame = !continuous; } diff --git a/modules/video_coding/frame_buffer2.cc b/modules/video_coding/frame_buffer2.cc index 5239d6bd9d..afce787664 100644 --- a/modules/video_coding/frame_buffer2.cc +++ b/modules/video_coding/frame_buffer2.cc @@ -63,20 +63,26 @@ FrameBuffer::FrameBuffer(Clock* clock, last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs), add_rtt_to_playout_delay_( webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")), - rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) {} + rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) { + callback_checker_.Detach(); +} -FrameBuffer::~FrameBuffer() {} +FrameBuffer::~FrameBuffer() { + RTC_DCHECK_RUN_ON(&construction_checker_); +} void FrameBuffer::NextFrame( int64_t max_wait_time_ms, bool keyframe_required, rtc::TaskQueue* callback_queue, std::function, ReturnReason)> handler) { - RTC_DCHECK_RUN_ON(callback_queue); + RTC_DCHECK_RUN_ON(&callback_checker_); + RTC_DCHECK(callback_queue->IsCurrent()); TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame"); int64_t latest_return_time_ms = clock_->TimeInMilliseconds() + max_wait_time_ms; - rtc::CritScope lock(&crit_); + + MutexLock lock(&mutex_); if (stopped_) { return; } @@ -92,10 +98,11 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { RTC_DCHECK(!callback_task_.Running()); int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); callback_task_ = RepeatingTaskHandle::DelayedStart( - callback_queue_->Get(), TimeDelta::ms(wait_ms), [this] { + callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] { + RTC_DCHECK_RUN_ON(&callback_checker_); // If this task has not been cancelled, we did not get any new frames // while waiting. Continue with frame delivery. - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (!frames_to_decode_.empty()) { // We have frames, deliver! frame_handler_(absl::WrapUnique(GetNextFrame()), kFrameFound); @@ -111,7 +118,7 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { // means that the frame buffer was cleared between creation and // execution of this task. Continue waiting for the remaining time. int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); - return TimeDelta::ms(wait_ms); + return TimeDelta::Millis(wait_ms); } }); } @@ -211,6 +218,7 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) { } EncodedFrame* FrameBuffer::GetNextFrame() { + RTC_DCHECK_RUN_ON(&callback_checker_); int64_t now_ms = clock_->TimeInMilliseconds(); // TODO(ilnik): remove |frames_out| use frames_to_decode_ directly. std::vector frames_out; @@ -321,30 +329,32 @@ bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame, void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) { TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode"); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); protection_mode_ = mode; } -void FrameBuffer::Start() { - TRACE_EVENT0("webrtc", "FrameBuffer::Start"); - rtc::CritScope lock(&crit_); - stopped_ = false; -} - void FrameBuffer::Stop() { TRACE_EVENT0("webrtc", "FrameBuffer::Stop"); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + if (stopped_) + return; stopped_ = true; + CancelCallback(); } void FrameBuffer::Clear() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ClearFramesAndHistory(); } +int FrameBuffer::Size() { + MutexLock lock(&mutex_); + return frames_.size(); +} + void FrameBuffer::UpdateRtt(int64_t rtt_ms) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); jitter_estimator_.UpdateRtt(rtt_ms); } @@ -366,9 +376,11 @@ bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const { } void FrameBuffer::CancelCallback() { + // Called from the callback queue or from within Stop(). frame_handler_ = {}; callback_task_.Stop(); callback_queue_ = nullptr; + callback_checker_.Detach(); } bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) { @@ -418,7 +430,7 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame"); RTC_DCHECK(frame); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); const VideoLayerFrameId& id = frame->id; int64_t last_continuous_picture_id = @@ -491,10 +503,6 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { auto info = frames_.emplace(id, FrameInfo()).first; if (info->second.frame) { - RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" - << id.picture_id << ":" - << static_cast(id.spatial_layer) - << ") already inserted, dropping frame."; return last_continuous_picture_id; } @@ -520,7 +528,7 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { // to return from NextFrame. if (callback_queue_) { callback_queue_->PostTask([this] { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (!callback_task_.Running()) return; RTC_CHECK(frame_handler_); diff --git a/modules/video_coding/frame_buffer2.h b/modules/video_coding/frame_buffer2.h index 51f3820d31..2ed21c4f70 100644 --- a/modules/video_coding/frame_buffer2.h +++ b/modules/video_coding/frame_buffer2.h @@ -23,11 +23,11 @@ #include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/jitter_estimator.h" #include "modules/video_coding/utility/decoded_frames_history.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/experiments/rtt_mult_experiment.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -49,6 +49,10 @@ class FrameBuffer { VCMTiming* timing, VCMReceiveStatisticsCallback* stats_callback); + FrameBuffer() = delete; + FrameBuffer(const FrameBuffer&) = delete; + FrameBuffer& operator=(const FrameBuffer&) = delete; + virtual ~FrameBuffer(); // Insert a frame into the frame buffer. Returns the picture id @@ -70,10 +74,6 @@ class FrameBuffer { // implemented. void SetProtectionMode(VCMVideoProtection mode); - // Start the frame buffer, has no effect if the frame buffer is started. - // The frame buffer is started upon construction. - void Start(); - // Stop the frame buffer, causing any sleeping thread in NextFrame to // return immediately. void Stop(); @@ -84,6 +84,8 @@ class FrameBuffer { // Clears the FrameBuffer, removing all the buffered frames. void Clear(); + int Size(); + private: struct FrameInfo { FrameInfo(); @@ -117,40 +119,40 @@ class FrameBuffer { // Check that the references of |frame| are valid. bool ValidReferences(const EncodedFrame& frame) const; - int64_t FindNextFrame(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - EncodedFrame* GetNextFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + int64_t FindNextFrame(int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + EncodedFrame* GetNextFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void StartWaitForNextFrameOnQueue() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - void CancelCallback() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void StartWaitForNextFrameOnQueue() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void CancelCallback() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Update all directly dependent and indirectly dependent frames and mark // them as continuous if all their references has been fulfilled. void PropagateContinuity(FrameMap::iterator start) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Marks the frame as decoded and updates all directly dependent frames. void PropagateDecodability(const FrameInfo& info) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Update the corresponding FrameInfo of |frame| and all FrameInfos that // |frame| references. // Return false if |frame| will never be decodable, true otherwise. bool UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame, FrameMap::iterator info) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void UpdateJitterDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void UpdateJitterDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void UpdateTimingFrameInfo() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void UpdateTimingFrameInfo() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Checks if the superframe, which current frame belongs to, is complete. bool IsCompleteSuperFrame(const EncodedFrame& frame) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // The cleaner solution would be to have the NextFrame function return a // vector of frames, but until the decoding pipeline can support decoding @@ -159,37 +161,38 @@ class FrameBuffer { EncodedFrame* CombineAndDeleteFrames( const std::vector& frames) const; + SequenceChecker construction_checker_; + SequenceChecker callback_checker_; + // Stores only undecoded frames. - FrameMap frames_ RTC_GUARDED_BY(crit_); - DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(crit_); + FrameMap frames_ RTC_GUARDED_BY(mutex_); + DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(mutex_); - rtc::CriticalSection crit_; + Mutex mutex_; Clock* const clock_; - rtc::TaskQueue* callback_queue_ RTC_GUARDED_BY(crit_); - RepeatingTaskHandle callback_task_ RTC_GUARDED_BY(crit_); + rtc::TaskQueue* callback_queue_ RTC_GUARDED_BY(mutex_); + RepeatingTaskHandle callback_task_ RTC_GUARDED_BY(mutex_); std::function, ReturnReason)> - frame_handler_ RTC_GUARDED_BY(crit_); - int64_t latest_return_time_ms_ RTC_GUARDED_BY(crit_); - bool keyframe_required_ RTC_GUARDED_BY(crit_); + frame_handler_ RTC_GUARDED_BY(mutex_); + int64_t latest_return_time_ms_ RTC_GUARDED_BY(mutex_); + bool keyframe_required_ RTC_GUARDED_BY(mutex_); - VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(crit_); - VCMTiming* const timing_ RTC_GUARDED_BY(crit_); - VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(crit_); + VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(mutex_); + VCMTiming* const timing_ RTC_GUARDED_BY(mutex_); + VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(mutex_); absl::optional last_continuous_frame_ - RTC_GUARDED_BY(crit_); - std::vector frames_to_decode_ RTC_GUARDED_BY(crit_); - bool stopped_ RTC_GUARDED_BY(crit_); - VCMVideoProtection protection_mode_ RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); + std::vector frames_to_decode_ RTC_GUARDED_BY(mutex_); + bool stopped_ RTC_GUARDED_BY(mutex_); + VCMVideoProtection protection_mode_ RTC_GUARDED_BY(mutex_); VCMReceiveStatisticsCallback* const stats_callback_; - int64_t last_log_non_decoded_ms_ RTC_GUARDED_BY(crit_); + int64_t last_log_non_decoded_ms_ RTC_GUARDED_BY(mutex_); const bool add_rtt_to_playout_delay_; // rtt_mult experiment settings. const absl::optional rtt_mult_settings_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer); }; } // namespace video_coding diff --git a/modules/video_coding/frame_buffer2_unittest.cc b/modules/video_coding/frame_buffer2_unittest.cc index 2c342d0b39..c05fe089c5 100644 --- a/modules/video_coding/frame_buffer2_unittest.cc +++ b/modules/video_coding/frame_buffer2_unittest.cc @@ -108,21 +108,26 @@ class FrameObjectFake : public EncodedFrame { class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback { public: - MOCK_METHOD3(OnCompleteFrame, - void(bool is_keyframe, - size_t size_bytes, - VideoContentType content_type)); - MOCK_METHOD1(OnDroppedFrames, void(uint32_t frames_dropped)); - MOCK_METHOD1(OnDiscardedPacketsUpdated, void(int discarded_packets)); - MOCK_METHOD1(OnFrameCountsUpdated, void(const FrameCounts& frame_counts)); - MOCK_METHOD6(OnFrameBufferTimingsUpdated, - void(int max_decode_ms, - int current_delay_ms, - int target_delay_ms, - int jitter_buffer_ms, - int min_playout_delay_ms, - int render_delay_ms)); - MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info)); + MOCK_METHOD(void, + OnCompleteFrame, + (bool is_keyframe, + size_t size_bytes, + VideoContentType content_type), + (override)); + MOCK_METHOD(void, OnDroppedFrames, (uint32_t frames_dropped), (override)); + MOCK_METHOD(void, + OnFrameBufferTimingsUpdated, + (int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms), + (override)); + MOCK_METHOD(void, + OnTimingFrameInfoUpdated, + (const TimingFrameInfo& info), + (override)); }; class TestFrameBuffer2 : public ::testing::Test { @@ -135,7 +140,7 @@ class TestFrameBuffer2 : public ::testing::Test { TestFrameBuffer2() : trial_("WebRTC-AddRttToPlayoutDelay/Enabled/"), - time_controller_(Timestamp::seconds(0)), + time_controller_(Timestamp::Seconds(0)), time_task_queue_( time_controller_.GetTaskQueueFactory()->CreateTaskQueue( "extract queue", @@ -206,7 +211,7 @@ class TestFrameBuffer2 : public ::testing::Test { }); }); if (max_wait_time == 0) { - time_controller_.AdvanceTime(TimeDelta::ms(0)); + time_controller_.AdvanceTime(TimeDelta::Millis(0)); } } @@ -256,7 +261,7 @@ TEST_F(TestFrameBuffer2, WaitForFrame) { ExtractFrame(50); InsertFrame(pid, 0, ts, false, true, kFrameSize); - time_controller_.AdvanceTime(TimeDelta::ms(50)); + time_controller_.AdvanceTime(TimeDelta::Millis(50)); CheckFrame(0, pid, 0); } @@ -275,7 +280,7 @@ TEST_F(TestFrameBuffer2, ZeroPlayoutDelay) { VCMTiming timing(time_controller_.GetClock()); buffer_.reset( new FrameBuffer(time_controller_.GetClock(), &timing, &stats_callback_)); - const PlayoutDelay kPlayoutDelayMs = {0, 0}; + const VideoPlayoutDelay kPlayoutDelayMs = {0, 0}; std::unique_ptr test_frame(new FrameObjectFake()); test_frame->id.picture_id = 0; test_frame->SetPlayoutDelay(kPlayoutDelayMs); @@ -293,7 +298,7 @@ TEST_F(TestFrameBuffer2, DISABLED_OneUnorderedSuperFrame) { ExtractFrame(50); InsertFrame(pid, 1, ts, true, true, kFrameSize); InsertFrame(pid, 0, ts, false, false, kFrameSize); - time_controller_.AdvanceTime(TimeDelta::ms(0)); + time_controller_.AdvanceTime(TimeDelta::Millis(0)); CheckFrame(0, pid, 0); CheckFrame(1, pid, 1); @@ -310,10 +315,10 @@ TEST_F(TestFrameBuffer2, DISABLED_OneLayerStreamReordered) { ExtractFrame(50); InsertFrame(pid + i + 1, 0, ts + (i + 1) * kFps10, false, true, kFrameSize, pid + i); - time_controller_.AdvanceTime(TimeDelta::ms(kFps10)); + time_controller_.AdvanceTime(TimeDelta::Millis(kFps10)); InsertFrame(pid + i, 0, ts + i * kFps10, false, true, kFrameSize, pid + i - 1); - time_controller_.AdvanceTime(TimeDelta::ms(kFps10)); + time_controller_.AdvanceTime(TimeDelta::Millis(kFps10)); ExtractFrame(); CheckFrame(i, pid + i, 0); CheckFrame(i + 1, pid + i + 1, 0); @@ -352,7 +357,7 @@ TEST_F(TestFrameBuffer2, OneLayerStream) { InsertFrame(pid + i, 0, ts + i * kFps10, false, true, kFrameSize, pid + i - 1); ExtractFrame(); - time_controller_.AdvanceTime(TimeDelta::ms(kFps10)); + time_controller_.AdvanceTime(TimeDelta::Millis(kFps10)); CheckFrame(i, pid + i, 0); } } @@ -374,7 +379,7 @@ TEST_F(TestFrameBuffer2, DropTemporalLayerSlowDecoder) { for (int i = 0; i < 10; ++i) { ExtractFrame(); - time_controller_.AdvanceTime(TimeDelta::ms(70)); + time_controller_.AdvanceTime(TimeDelta::Millis(70)); } CheckFrame(0, pid, 0); @@ -400,7 +405,7 @@ TEST_F(TestFrameBuffer2, DropFramesIfSystemIsStalled) { ExtractFrame(); // Jump forward in time, simulating the system being stalled for some reason. - time_controller_.AdvanceTime(TimeDelta::ms(3) * kFps10); + time_controller_.AdvanceTime(TimeDelta::Millis(3) * kFps10); // Extract one more frame, expect second and third frame to be dropped. EXPECT_CALL(stats_callback_, OnDroppedFrames(2)).Times(1); ExtractFrame(); @@ -683,7 +688,7 @@ TEST_F(TestFrameBuffer2, HigherSpatialLayerNonDecodable) { InsertFrame(pid + 2, 0, ts + kFps10, false, false, kFrameSize, pid); InsertFrame(pid + 2, 1, ts + kFps10, true, true, kFrameSize, pid + 1); - time_controller_.AdvanceTime(TimeDelta::ms(1000)); + time_controller_.AdvanceTime(TimeDelta::Millis(1000)); // Frame pid+1 is decodable but too late. // In superframe pid+2 frame sid=0 is decodable, but frame sid=1 is not. // Incorrect implementation might skip pid+1 frame and output undecodable diff --git a/modules/video_coding/frame_dependencies_calculator.cc b/modules/video_coding/frame_dependencies_calculator.cc new file mode 100644 index 0000000000..6de5081b94 --- /dev/null +++ b/modules/video_coding/frame_dependencies_calculator.cc @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/frame_dependencies_calculator.h" + +#include + +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/container/inlined_vector.h" +#include "api/array_view.h" +#include "api/video/video_frame_type.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +absl::InlinedVector FrameDependenciesCalculator::FromBuffersUsage( + VideoFrameType frame_type, + int64_t frame_id, + rtc::ArrayView buffers_usage) { + absl::InlinedVector dependencies; + RTC_DCHECK_GT(buffers_usage.size(), 0); + for (const CodecBufferUsage& buffer_usage : buffers_usage) { + RTC_CHECK_GE(buffer_usage.id, 0); + if (buffers_.size() <= static_cast(buffer_usage.id)) { + buffers_.resize(buffer_usage.id + 1); + } + } + std::set direct_depenendencies; + std::set indirect_depenendencies; + if (frame_type == VideoFrameType::kVideoFrameDelta) { + for (const CodecBufferUsage& buffer_usage : buffers_usage) { + if (!buffer_usage.referenced) { + continue; + } + const BufferUsage& buffer = buffers_[buffer_usage.id]; + if (buffer.frame_id == absl::nullopt) { + RTC_LOG(LS_ERROR) << "Odd configuration: frame " << frame_id + << " references buffer #" << buffer_usage.id + << " that was never updated."; + continue; + } + direct_depenendencies.insert(*buffer.frame_id); + indirect_depenendencies.insert(buffer.dependencies.begin(), + buffer.dependencies.end()); + } + // Reduce references: if frame #3 depends on frame #2 and #1, and frame #2 + // depends on frame #1, then frame #3 needs to depend just on frame #2. + // Though this set diff removes only 1 level of indirection, it seems + // enough for all currently used structures. + absl::c_set_difference(direct_depenendencies, indirect_depenendencies, + std::back_inserter(dependencies)); + } + + // Update buffers. + for (const CodecBufferUsage& buffer_usage : buffers_usage) { + if (!buffer_usage.updated) { + continue; + } + BufferUsage& buffer = buffers_[buffer_usage.id]; + buffer.frame_id = frame_id; + buffer.dependencies.assign(direct_depenendencies.begin(), + direct_depenendencies.end()); + } + + return dependencies; +} + +} // namespace webrtc diff --git a/modules/video_coding/frame_dependencies_calculator.h b/modules/video_coding/frame_dependencies_calculator.h new file mode 100644 index 0000000000..b70eddfc53 --- /dev/null +++ b/modules/video_coding/frame_dependencies_calculator.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_FRAME_DEPENDENCIES_CALCULATOR_H_ +#define MODULES_VIDEO_CODING_FRAME_DEPENDENCIES_CALCULATOR_H_ + +#include + +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/video/video_frame_type.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" + +namespace webrtc { + +// This class is thread compatible. +class FrameDependenciesCalculator { + public: + FrameDependenciesCalculator() = default; + FrameDependenciesCalculator(const FrameDependenciesCalculator&) = default; + FrameDependenciesCalculator& operator=(const FrameDependenciesCalculator&) = + default; + + // Calculates frame dependencies based on previous encoder buffer usage. + absl::InlinedVector FromBuffersUsage( + VideoFrameType frame_type, + int64_t frame_id, + rtc::ArrayView buffers_usage); + + private: + struct BufferUsage { + absl::optional frame_id; + absl::InlinedVector dependencies; + }; + + absl::InlinedVector buffers_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_FRAME_DEPENDENCIES_CALCULATOR_H_ diff --git a/modules/video_coding/frame_dependencies_calculator_unittest.cc b/modules/video_coding/frame_dependencies_calculator_unittest.cc new file mode 100644 index 0000000000..81f774b227 --- /dev/null +++ b/modules/video_coding/frame_dependencies_calculator_unittest.cc @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/frame_dependencies_calculator.h" + +#include "api/video/video_frame_type.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; +using ::testing::UnorderedElementsAre; + +constexpr VideoFrameType kVideoFrameKey = VideoFrameType::kVideoFrameKey; +constexpr VideoFrameType kVideoFrameDelta = VideoFrameType::kVideoFrameDelta; + +constexpr CodecBufferUsage ReferenceAndUpdate(int id) { + return CodecBufferUsage(id, /*referenced=*/true, /*updated=*/true); +} +constexpr CodecBufferUsage Reference(int id) { + return CodecBufferUsage(id, /*referenced=*/true, /*updated=*/false); +} +constexpr CodecBufferUsage Update(int id) { + return CodecBufferUsage(id, /*referenced=*/false, /*updated=*/true); +} + +TEST(FrameDependenciesCalculatorTest, SingleLayer) { + CodecBufferUsage pattern[] = {ReferenceAndUpdate(0)}; + FrameDependenciesCalculator calculator; + + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameKey, /*frame_id=*/1, pattern), + IsEmpty()); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/3, pattern), + ElementsAre(1)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/6, pattern), + ElementsAre(3)); +} + +TEST(FrameDependenciesCalculatorTest, TwoTemporalLayers) { + // Shortened 4-frame pattern: + // T1: 2---4 6---8 ... + // / / / / + // T0: 1---3---5---7 ... + CodecBufferUsage pattern0[] = {ReferenceAndUpdate(0)}; + CodecBufferUsage pattern1[] = {Reference(0), Update(1)}; + CodecBufferUsage pattern2[] = {ReferenceAndUpdate(0)}; + CodecBufferUsage pattern3[] = {Reference(0), Reference(1)}; + FrameDependenciesCalculator calculator; + + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameKey, /*frame_id=*/1, pattern0), + IsEmpty()); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/2, pattern1), + ElementsAre(1)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/3, pattern2), + ElementsAre(1)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/4, pattern3), + UnorderedElementsAre(2, 3)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/5, pattern0), + ElementsAre(3)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/6, pattern1), + ElementsAre(5)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/7, pattern2), + ElementsAre(5)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/8, pattern3), + UnorderedElementsAre(6, 7)); +} + +TEST(FrameDependenciesCalculatorTest, ThreeTemporalLayers4FramePattern) { + // T2: 2---4 6---8 ... + // / / / / + // T1: | 3 | 7 ... + // /_/ /_/ + // T0: 1-------5----- ... + CodecBufferUsage pattern0[] = {ReferenceAndUpdate(0)}; + CodecBufferUsage pattern1[] = {Reference(0), Update(2)}; + CodecBufferUsage pattern2[] = {Reference(0), Update(1)}; + CodecBufferUsage pattern3[] = {Reference(0), Reference(1), Reference(2)}; + FrameDependenciesCalculator calculator; + + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameKey, /*frame_id=*/1, pattern0), + IsEmpty()); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/2, pattern1), + ElementsAre(1)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/3, pattern2), + ElementsAre(1)); + // Note that frame#4 references buffer#0 that is updated by frame#1, + // yet there is no direct dependency from frame#4 to frame#1. + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/4, pattern3), + UnorderedElementsAre(2, 3)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/5, pattern0), + ElementsAre(1)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/6, pattern1), + ElementsAre(5)); +} + +TEST(FrameDependenciesCalculatorTest, SimulcastWith2Layers) { + // S1: 2---4---6- ... + // + // S0: 1---3---5- ... + CodecBufferUsage pattern0[] = {ReferenceAndUpdate(0)}; + CodecBufferUsage pattern1[] = {ReferenceAndUpdate(1)}; + FrameDependenciesCalculator calculator; + + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameKey, /*frame_id=*/1, pattern0), + IsEmpty()); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameKey, /*frame_id=*/2, pattern1), + IsEmpty()); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/3, pattern0), + ElementsAre(1)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/4, pattern1), + ElementsAre(2)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/5, pattern0), + ElementsAre(3)); + EXPECT_THAT( + calculator.FromBuffersUsage(kVideoFrameDelta, /*frame_id=*/6, pattern1), + ElementsAre(4)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/frame_object.cc b/modules/video_coding/frame_object.cc index 682ce17f9c..25fd23234c 100644 --- a/modules/video_coding/frame_object.cc +++ b/modules/video_coding/frame_object.cc @@ -17,7 +17,6 @@ #include "api/video/encoded_image.h" #include "api/video/video_timing.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" namespace webrtc { namespace video_coding { @@ -37,15 +36,14 @@ RtpFrameObject::RtpFrameObject( VideoContentType content_type, const RTPVideoHeader& video_header, const absl::optional& color_space, - const absl::optional& generic_descriptor, RtpPacketInfos packet_infos, rtc::scoped_refptr image_buffer) - : first_seq_num_(first_seq_num), + : image_buffer_(image_buffer), + first_seq_num_(first_seq_num), last_seq_num_(last_seq_num), last_packet_received_time_(last_packet_received_time), times_nacked_(times_nacked) { rtp_video_header_ = video_header; - rtp_generic_frame_descriptor_ = generic_descriptor; // EncodedFrame members codec_type_ = codec; @@ -53,7 +51,6 @@ RtpFrameObject::RtpFrameObject( // TODO(philipel): Remove when encoded image is replaced by EncodedFrame. // VCMEncodedFrame members CopyCodecSpecific(&rtp_video_header_); - _completeFrame = true; _payloadType = payload_type; SetTimestamp(rtp_timestamp); ntp_time_ms_ = ntp_time_ms; @@ -63,7 +60,7 @@ RtpFrameObject::RtpFrameObject( // as of the first packet's. SetPlayoutDelay(rtp_video_header_.playout_delay); - SetEncodedData(std::move(image_buffer)); + SetEncodedData(image_buffer_); _encodedWidth = rtp_video_header_.width; _encodedHeight = rtp_video_header_.height; @@ -131,14 +128,5 @@ const RTPVideoHeader& RtpFrameObject::GetRtpVideoHeader() const { return rtp_video_header_; } -const absl::optional& -RtpFrameObject::GetGenericFrameDescriptor() const { - return rtp_generic_frame_descriptor_; -} - -const FrameMarking& RtpFrameObject::GetFrameMarking() const { - return rtp_video_header_.frame_marking; -} - } // namespace video_coding } // namespace webrtc diff --git a/modules/video_coding/frame_object.h b/modules/video_coding/frame_object.h index b3cee20ae4..d812b8fd2e 100644 --- a/modules/video_coding/frame_object.h +++ b/modules/video_coding/frame_object.h @@ -13,32 +13,29 @@ #include "absl/types/optional.h" #include "api/video/encoded_frame.h" -#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" namespace webrtc { namespace video_coding { class RtpFrameObject : public EncodedFrame { public: - RtpFrameObject( - uint16_t first_seq_num, - uint16_t last_seq_num, - bool markerBit, - int times_nacked, - int64_t first_packet_received_time, - int64_t last_packet_received_time, - uint32_t rtp_timestamp, - int64_t ntp_time_ms, - const VideoSendTiming& timing, - uint8_t payload_type, - VideoCodecType codec, - VideoRotation rotation, - VideoContentType content_type, - const RTPVideoHeader& video_header, - const absl::optional& color_space, - const absl::optional& generic_descriptor, - RtpPacketInfos packet_infos, - rtc::scoped_refptr image_buffer); + RtpFrameObject(uint16_t first_seq_num, + uint16_t last_seq_num, + bool markerBit, + int times_nacked, + int64_t first_packet_received_time, + int64_t last_packet_received_time, + uint32_t rtp_timestamp, + int64_t ntp_time_ms, + const VideoSendTiming& timing, + uint8_t payload_type, + VideoCodecType codec, + VideoRotation rotation, + VideoContentType content_type, + const RTPVideoHeader& video_header, + const absl::optional& color_space, + RtpPacketInfos packet_infos, + rtc::scoped_refptr image_buffer); ~RtpFrameObject() override; uint16_t first_seq_num() const; @@ -50,13 +47,13 @@ class RtpFrameObject : public EncodedFrame { int64_t RenderTime() const override; bool delayed_by_retransmission() const override; const RTPVideoHeader& GetRtpVideoHeader() const; - const absl::optional& GetGenericFrameDescriptor() - const; - const FrameMarking& GetFrameMarking() const; + + uint8_t* mutable_data() { return image_buffer_->data(); } private: + // Reference for mutable access. + rtc::scoped_refptr image_buffer_; RTPVideoHeader rtp_video_header_; - absl::optional rtp_generic_frame_descriptor_; VideoCodecType codec_type_; uint16_t first_seq_num_; uint16_t last_seq_num_; diff --git a/modules/video_coding/generic_decoder.cc b/modules/video_coding/generic_decoder.cc index 100686d336..79057926fc 100644 --- a/modules/video_coding/generic_decoder.cc +++ b/modules/video_coding/generic_decoder.cc @@ -13,6 +13,7 @@ #include #include +#include #include "api/video/video_timing.h" #include "modules/video_coding/include/video_error_codes.h" @@ -31,12 +32,18 @@ VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming* timing, : _clock(clock), _timing(timing), _timestampMap(kDecoderFrameMemoryLength), - _extra_decode_time("t", absl::nullopt) { + _extra_decode_time("t", absl::nullopt), + low_latency_renderer_enabled_("enabled", true), + low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", + true) { ntp_offset_ = _clock->CurrentNtpInMilliseconds() - _clock->TimeInMilliseconds(); ParseFieldTrial({&_extra_decode_time}, field_trial::FindFullName("WebRTC-SlowDownDecoder")); + ParseFieldTrial({&low_latency_renderer_enabled_, + &low_latency_renderer_include_predecode_buffer_}, + field_trial::FindFullName("WebRTC-LowLatencyRenderer")); } VCMDecodedFrameCallback::~VCMDecodedFrameCallback() {} @@ -57,6 +64,8 @@ VCMReceiveCallback* VCMDecodedFrameCallback::UserReceiveCallback() { } int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) { + // This function may be called on the decode TaskQueue, but may also be called + // on an OS provided queue such as on iOS (see e.g. b/153465112). return Decoded(decodedImage, -1); } @@ -83,9 +92,11 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). VCMFrameInformation* frameInfo; + int timestamp_map_size = 0; { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); frameInfo = _timestampMap.Pop(decodedImage.timestamp()); + timestamp_map_size = _timestampMap.Size(); } if (frameInfo == NULL) { @@ -99,13 +110,30 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_packet_infos(frameInfo->packet_infos); decodedImage.set_rotation(frameInfo->rotation); - const Timestamp now = _clock->CurrentTime(); - RTC_DCHECK(frameInfo->decodeStart); - if (!decode_time_ms) { - decode_time_ms = (now - *frameInfo->decodeStart).ms(); + if (low_latency_renderer_enabled_ && frameInfo->playout_delay.min_ms == 0 && + frameInfo->playout_delay.max_ms > 0) { + absl::optional max_composition_delay_in_frames = + _timing->MaxCompositionDelayInFrames(); + if (max_composition_delay_in_frames) { + // Subtract frames that are in flight. + if (low_latency_renderer_include_predecode_buffer_) { + *max_composition_delay_in_frames -= timestamp_map_size; + *max_composition_delay_in_frames = + std::max(0, *max_composition_delay_in_frames); + } + decodedImage.set_max_composition_delay_in_frames( + max_composition_delay_in_frames); + } } - _timing->StopDecodeTimer(*decode_time_ms, now.ms()); - decodedImage.set_processing_time({*frameInfo->decodeStart, now}); + + RTC_DCHECK(frameInfo->decodeStart); + const Timestamp now = _clock->CurrentTime(); + const TimeDelta decode_time = decode_time_ms + ? TimeDelta::Millis(*decode_time_ms) + : now - *frameInfo->decodeStart; + _timing->StopDecodeTimer(decode_time.ms(), now.ms()); + decodedImage.set_processing_time( + {*frameInfo->decodeStart, *frameInfo->decodeStart + decode_time}); // Report timing information. TimingFrameInfo timing_frame_info; @@ -159,7 +187,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_timestamp_us(frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec); - _receiveCallback->FrameToRender(decodedImage, qp, *decode_time_ms, + _receiveCallback->FrameToRender(decodedImage, qp, decode_time.ms(), frameInfo->content_type); } @@ -170,12 +198,12 @@ void VCMDecodedFrameCallback::OnDecoderImplementationName( void VCMDecodedFrameCallback::Map(uint32_t timestamp, VCMFrameInformation* frameInfo) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); _timestampMap.Add(timestamp, frameInfo); } int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (_timestampMap.Pop(timestamp) == NULL) { return VCM_GENERAL_ERROR; } @@ -209,7 +237,10 @@ int32_t VCMGenericDecoder::InitDecode(const VideoCodec* settings, TRACE_EVENT0("webrtc", "VCMGenericDecoder::InitDecode"); _codecType = settings->codecType; - return decoder_->InitDecode(settings, numberOfCores); + int err = decoder_->InitDecode(settings, numberOfCores); + implementation_name_ = decoder_->ImplementationName(); + RTC_LOG(LS_INFO) << "Decoder implementation: " << implementation_name_; + return err; } int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { @@ -218,6 +249,7 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { _frameInfos[_nextFrameInfoIdx].decodeStart = now; _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); + _frameInfos[_nextFrameInfoIdx].playout_delay = frame.PlayoutDelay(); _frameInfos[_nextFrameInfoIdx].timing = frame.video_timing(); _frameInfos[_nextFrameInfoIdx].ntp_time_ms = frame.EncodedImage().ntp_time_ms_; @@ -237,8 +269,13 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength; int32_t ret = decoder_->Decode(frame.EncodedImage(), frame.MissingFrame(), frame.RenderTimeMs()); - - _callback->OnDecoderImplementationName(decoder_->ImplementationName()); + const char* new_implementation_name = decoder_->ImplementationName(); + if (new_implementation_name != implementation_name_) { + implementation_name_ = new_implementation_name; + RTC_LOG(LS_INFO) << "Changed decoder implementation to: " + << new_implementation_name; + } + _callback->OnDecoderImplementationName(implementation_name_.c_str()); if (ret < WEBRTC_VIDEO_CODEC_OK) { RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp " << frame.Timestamp() << ", error code: " << ret; diff --git a/modules/video_coding/generic_decoder.h b/modules/video_coding/generic_decoder.h index 4b4d83ecd5..8481fdc15d 100644 --- a/modules/video_coding/generic_decoder.h +++ b/modules/video_coding/generic_decoder.h @@ -12,14 +12,15 @@ #define MODULES_VIDEO_CODING_GENERIC_DECODER_H_ #include +#include #include "api/units/time_delta.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/timestamp_map.h" #include "modules/video_coding/timing.h" -#include "rtc_base/critical_section.h" #include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" namespace webrtc { @@ -34,6 +35,7 @@ struct VCMFrameInformation { void* userData; VideoRotation rotation; VideoContentType content_type; + PlayoutDelay playout_delay; EncodedImage::Timing timing; int64_t ntp_time_ms; RtpPacketInfos packet_infos; @@ -69,11 +71,21 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { // from the same thread, and therfore a lock is not required to access it. VCMReceiveCallback* _receiveCallback = nullptr; VCMTiming* _timing; - rtc::CriticalSection lock_; + Mutex lock_; VCMTimestampMap _timestampMap RTC_GUARDED_BY(lock_); int64_t ntp_offset_; // Set by the field trial WebRTC-SlowDownDecoder to simulate a slow decoder. FieldTrialOptional _extra_decode_time; + + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter |enabled| + // determines if the low-latency renderer algorithm should be used for the + // case min playout delay=0 and max playout delay>0. + FieldTrialParameter low_latency_renderer_enabled_; + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter + // |include_predecode_buffer| determines if the predecode buffer should be + // taken into account when calculating maximum number of frames in composition + // queue. + FieldTrialParameter low_latency_renderer_include_predecode_buffer_; }; class VCMGenericDecoder { @@ -112,6 +124,7 @@ class VCMGenericDecoder { VideoCodecType _codecType; const bool _isExternal; VideoContentType _last_keyframe_content_type; + std::string implementation_name_; }; } // namespace webrtc diff --git a/modules/video_coding/generic_decoder_unittest.cc b/modules/video_coding/generic_decoder_unittest.cc index 3e07a2a81c..a4cc5b0ded 100644 --- a/modules/video_coding/generic_decoder_unittest.cc +++ b/modules/video_coding/generic_decoder_unittest.cc @@ -16,8 +16,8 @@ #include "api/task_queue/default_task_queue_factory.h" #include "common_video/test/utilities.h" #include "modules/video_coding/timing.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" #include "test/fake_decoder.h" #include "test/gmock.h" @@ -33,7 +33,7 @@ class ReceiveCallback : public VCMReceiveCallback { int32_t decode_time_ms, VideoContentType content_type) override { { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); last_frame_ = videoFrame; } received_frame_event_.Set(); @@ -41,13 +41,13 @@ class ReceiveCallback : public VCMReceiveCallback { } absl::optional GetLastFrame() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); return last_frame_; } absl::optional WaitForFrame(int64_t wait_ms) { if (received_frame_event_.Wait(wait_ms)) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); return last_frame_; } else { return absl::nullopt; @@ -55,7 +55,7 @@ class ReceiveCallback : public VCMReceiveCallback { } private: - rtc::CriticalSection lock_; + Mutex lock_; rtc::Event received_frame_event_; absl::optional last_frame_ RTC_GUARDED_BY(lock_); }; @@ -115,5 +115,29 @@ TEST_F(GenericDecoderTest, PassesPacketInfosForDelayedDecoders) { EXPECT_EQ(decoded_frame->packet_infos().size(), 3U); } +TEST_F(GenericDecoderTest, MaxCompositionDelayNotSetByDefault) { + VCMEncodedFrame encoded_frame; + generic_decoder_.Decode(encoded_frame, clock_.CurrentTime()); + absl::optional decoded_frame = user_callback_.WaitForFrame(10); + ASSERT_TRUE(decoded_frame.has_value()); + EXPECT_FALSE(decoded_frame->max_composition_delay_in_frames()); +} + +TEST_F(GenericDecoderTest, MaxCompositionDelayActivatedByPlayoutDelay) { + VCMEncodedFrame encoded_frame; + // VideoReceiveStream2 would set MaxCompositionDelayInFrames if playout delay + // is specified as X,Y, where X=0, Y>0. + const VideoPlayoutDelay kPlayoutDelay = {0, 50}; + constexpr int kMaxCompositionDelayInFrames = 3; // ~50 ms at 60 fps. + encoded_frame.SetPlayoutDelay(kPlayoutDelay); + timing_.SetMaxCompositionDelayInFrames( + absl::make_optional(kMaxCompositionDelayInFrames)); + generic_decoder_.Decode(encoded_frame, clock_.CurrentTime()); + absl::optional decoded_frame = user_callback_.WaitForFrame(10); + ASSERT_TRUE(decoded_frame.has_value()); + EXPECT_EQ(kMaxCompositionDelayInFrames, + decoded_frame->max_composition_delay_in_frames()); +} + } // namespace video_coding } // namespace webrtc diff --git a/modules/video_coding/h264_sps_pps_tracker.cc b/modules/video_coding/h264_sps_pps_tracker.cc index 3965b28e8e..4becdb7608 100644 --- a/modules/video_coding/h264_sps_pps_tracker.cc +++ b/modules/video_coding/h264_sps_pps_tracker.cc @@ -49,6 +49,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( RTPVideoHeader* video_header) { RTC_DCHECK(video_header); RTC_DCHECK(video_header->codec == kVideoCodecH264); + RTC_DCHECK_GT(bitstream.size(), 0); auto& h264_header = absl::get(video_header->video_type_header); @@ -128,7 +129,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( if (h264_header.packetization_type == kH264StapA) { const uint8_t* nalu_ptr = bitstream.data() + 1; - while (nalu_ptr < bitstream.data() + bitstream.size()) { + while (nalu_ptr < bitstream.data() + bitstream.size() - 1) { RTC_DCHECK(video_header->is_first_packet_in_frame); required_size += sizeof(start_code_h264); @@ -180,7 +181,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( // Copy the rest of the bitstream and insert start codes. if (h264_header.packetization_type == kH264StapA) { const uint8_t* nalu_ptr = bitstream.data() + 1; - while (nalu_ptr < bitstream.data() + bitstream.size()) { + while (nalu_ptr < bitstream.data() + bitstream.size() - 1) { fixed.bitstream.AppendData(start_code_h264); // The first two bytes describe the length of a segment. diff --git a/modules/video_coding/h265_vps_sps_pps_tracker.cc b/modules/video_coding/h265_vps_sps_pps_tracker.cc new file mode 100644 index 0000000000..84a6c35771 --- /dev/null +++ b/modules/video_coding/h265_vps_sps_pps_tracker.cc @@ -0,0 +1,315 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/h265_vps_sps_pps_tracker.h" + +#include +#include +#include + +#include "absl/types/variant.h" +#include "common_video/h264/h264_common.h" +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_pps_parser.h" +#include "common_video/h265/h265_sps_parser.h" +#include "common_video/h265/h265_vps_parser.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/packet_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace video_coding { + +namespace { +const uint8_t start_code_h265[] = {0, 0, 0, 1}; +} // namespace + +H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream( + rtc::ArrayView bitstream, + RTPVideoHeader* video_header) { + RTC_DCHECK(video_header); + RTC_DCHECK(video_header->codec == kVideoCodecH265); + + auto& h265_header = + absl::get(video_header->video_type_header); + + bool append_vps_sps_pps = false; + auto vps = vps_data_.end(); + auto sps = sps_data_.end(); + auto pps = pps_data_.end(); + + for (size_t i = 0; i < h265_header.nalus_length; ++i) { + const H265NaluInfo& nalu = h265_header.nalus[i]; + switch (nalu.type) { + case H265::NaluType::kVps: { + vps_data_[nalu.vps_id].size = 0; + break; + } + case H265::NaluType::kSps: { + sps_data_[nalu.sps_id].vps_id = nalu.vps_id; + sps_data_[nalu.sps_id].width = video_header->width; + sps_data_[nalu.sps_id].height = video_header->height; + break; + } + case H265::NaluType::kPps: { + pps_data_[nalu.pps_id].sps_id = nalu.sps_id; + break; + } + case H265::NaluType::kIdrWRadl: + case H265::NaluType::kIdrNLp: + case H265::NaluType::kCra: { + // If this is the first packet of an IDR, make sure we have the required + // SPS/PPS and also calculate how much extra space we need in the buffer + // to prepend the SPS/PPS to the bitstream with start codes. + if (video_header->is_first_packet_in_frame) { + if (nalu.pps_id == -1) { + RTC_LOG(LS_WARNING) << "No PPS id in IDR nalu."; + return {kRequestKeyframe}; + } + + pps = pps_data_.find(nalu.pps_id); + if (pps == pps_data_.end()) { + RTC_LOG(LS_WARNING) + << "No PPS with id " << nalu.pps_id << " received"; + return {kRequestKeyframe}; + } + + sps = sps_data_.find(pps->second.sps_id); + if (sps == sps_data_.end()) { + RTC_LOG(LS_WARNING) + << "No SPS with id << " << pps->second.sps_id << " received"; + return {kRequestKeyframe}; + } + + vps = vps_data_.find(sps->second.vps_id); + if (vps == vps_data_.end()) { + RTC_LOG(LS_WARNING) + << "No VPS with id " << sps->second.vps_id << " received"; + return {kRequestKeyframe}; + } + + // Since the first packet of every keyframe should have its width and + // height set we set it here in the case of it being supplied out of + // band. + video_header->width = sps->second.width; + video_header->height = sps->second.height; + + // If the VPS/SPS/PPS was supplied out of band then we will have saved + // the actual bitstream in |data|. + // This branch is not verified. + if (vps->second.data && sps->second.data && pps->second.data) { + RTC_DCHECK_GT(vps->second.size, 0); + RTC_DCHECK_GT(sps->second.size, 0); + RTC_DCHECK_GT(pps->second.size, 0); + append_vps_sps_pps = true; + } + } + break; + } + default: + break; + } + } + + RTC_CHECK(!append_vps_sps_pps || + (sps != sps_data_.end() && pps != pps_data_.end())); + + // Calculate how much space we need for the rest of the bitstream. + size_t required_size = 0; + + if (append_vps_sps_pps) { + required_size += vps->second.size + sizeof(start_code_h265); + required_size += sps->second.size + sizeof(start_code_h265); + required_size += pps->second.size + sizeof(start_code_h265); + } + + if (h265_header.packetization_type == kH265AP) { + const uint8_t* nalu_ptr = bitstream.data() + 1; + while (nalu_ptr < bitstream.data() + bitstream.size()) { + RTC_DCHECK(video_header->is_first_packet_in_frame); + required_size += sizeof(start_code_h265); + + // The first two bytes describe the length of a segment. + uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; + nalu_ptr += 2; + + required_size += segment_length; + nalu_ptr += segment_length; + } + } else { + // TODO: in h.264 this is "h264_header.nalus_length > 0" + if (video_header->is_first_packet_in_frame) + required_size += sizeof(start_code_h265); + required_size += bitstream.size(); + } + + // Then we copy to the new buffer. + H265VpsSpsPpsTracker::FixedBitstream fixed; + fixed.bitstream.EnsureCapacity(required_size); + + if (append_vps_sps_pps) { + // Insert VPS. + fixed.bitstream.AppendData(start_code_h265); + fixed.bitstream.AppendData(vps->second.data.get(), vps->second.size); + + // Insert SPS. + fixed.bitstream.AppendData(start_code_h265); + fixed.bitstream.AppendData(sps->second.data.get(), sps->second.size); + + // Insert PPS. + fixed.bitstream.AppendData(start_code_h265); + fixed.bitstream.AppendData(pps->second.data.get(), pps->second.size); + + // Update codec header to reflect the newly added SPS and PPS. + H265NaluInfo vps_info; + vps_info.type = H265::NaluType::kVps; + vps_info.vps_id = vps->first; + vps_info.sps_id = -1; + vps_info.pps_id = -1; + H265NaluInfo sps_info; + sps_info.type = H265::NaluType::kSps; + sps_info.vps_id = vps->first; + sps_info.sps_id = sps->first; + sps_info.pps_id = -1; + H265NaluInfo pps_info; + pps_info.type = H265::NaluType::kPps; + pps_info.vps_id = vps->first; + pps_info.sps_id = sps->first; + pps_info.pps_id = pps->first; + if (h265_header.nalus_length + 2 <= kMaxNalusPerPacket) { + h265_header.nalus[h265_header.nalus_length++] = vps_info; + h265_header.nalus[h265_header.nalus_length++] = sps_info; + h265_header.nalus[h265_header.nalus_length++] = pps_info; + } else { + RTC_LOG(LS_WARNING) << "Not enough space in H.265 codec header to insert " + "SPS/PPS provided out-of-band."; + } + } + + // Copy the rest of the bitstream and insert start codes. + if (h265_header.packetization_type == kH265AP) { + const uint8_t* nalu_ptr = bitstream.data() + 1; + while (nalu_ptr < bitstream.data() + bitstream.size()) { + fixed.bitstream.AppendData(start_code_h265); + + // The first two bytes describe the length of a segment. + uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; + nalu_ptr += 2; + + size_t copy_end = nalu_ptr - bitstream.data() + segment_length; + if (copy_end > bitstream.size()) { + return {kDrop}; + } + + fixed.bitstream.AppendData(nalu_ptr, segment_length); + nalu_ptr += segment_length; + } + } else { + // For h.264 it is "h264_header.nalus_length > 0" + if (video_header->is_first_packet_in_frame) { + fixed.bitstream.AppendData(start_code_h265); + } + fixed.bitstream.AppendData(bitstream.data(), bitstream.size()); + } + + fixed.action = kInsert; + return fixed; +} + +void H265VpsSpsPpsTracker::InsertVpsSpsPpsNalus( + const std::vector& vps, + const std::vector& sps, + const std::vector& pps) { + constexpr size_t kNaluHeaderOffset = 1; + if (vps.size() < kNaluHeaderOffset) { + RTC_LOG(LS_WARNING) << "VPS size " << vps.size() << " is smaller than " + << kNaluHeaderOffset; + return; + } + if ((vps[0] & 0x7e) >> 1 != H265::NaluType::kSps) { + RTC_LOG(LS_WARNING) << "SPS Nalu header missing"; + return; + } + if (sps.size() < kNaluHeaderOffset) { + RTC_LOG(LS_WARNING) << "SPS size " << sps.size() << " is smaller than " + << kNaluHeaderOffset; + return; + } + if ((sps[0] & 0x7e) >> 1 != H265::NaluType::kSps) { + RTC_LOG(LS_WARNING) << "SPS Nalu header missing"; + return; + } + if (pps.size() < kNaluHeaderOffset) { + RTC_LOG(LS_WARNING) << "PPS size " << pps.size() << " is smaller than " + << kNaluHeaderOffset; + return; + } + if ((pps[0] & 0x7e) >> 1 != H265::NaluType::kPps) { + RTC_LOG(LS_WARNING) << "SPS Nalu header missing"; + return; + } + absl::optional parsed_vps = H265VpsParser::ParseVps( + vps.data() + kNaluHeaderOffset, vps.size() - kNaluHeaderOffset); + absl::optional parsed_sps = H265SpsParser::ParseSps( + sps.data() + kNaluHeaderOffset, sps.size() - kNaluHeaderOffset); + absl::optional parsed_pps = H265PpsParser::ParsePps( + pps.data() + kNaluHeaderOffset, pps.size() - kNaluHeaderOffset); + + if (!parsed_vps) { + RTC_LOG(LS_WARNING) << "Failed to parse VPS."; + } + + if (!parsed_sps) { + RTC_LOG(LS_WARNING) << "Failed to parse SPS."; + } + + if (!parsed_pps) { + RTC_LOG(LS_WARNING) << "Failed to parse PPS."; + } + + if (!parsed_vps || !parsed_pps || !parsed_sps) { + return; + } + + VpsInfo vps_info; + vps_info.size = vps.size(); + uint8_t* vps_data = new uint8_t[vps_info.size]; + memcpy(vps_data, vps.data(), vps_info.size); + vps_info.data.reset(vps_data); + vps_data_[parsed_vps->id] = std::move(vps_info); + + SpsInfo sps_info; + sps_info.size = sps.size(); + sps_info.width = parsed_sps->width; + sps_info.height = parsed_sps->height; + sps_info.vps_id = parsed_sps->vps_id; + uint8_t* sps_data = new uint8_t[sps_info.size]; + memcpy(sps_data, sps.data(), sps_info.size); + sps_info.data.reset(sps_data); + sps_data_[parsed_sps->id] = std::move(sps_info); + + PpsInfo pps_info; + pps_info.size = pps.size(); + pps_info.sps_id = parsed_pps->sps_id; + uint8_t* pps_data = new uint8_t[pps_info.size]; + memcpy(pps_data, pps.data(), pps_info.size); + pps_info.data.reset(pps_data); + pps_data_[parsed_pps->id] = std::move(pps_info); + + RTC_LOG(LS_INFO) << "Inserted SPS id " << parsed_sps->id << " and PPS id " + << parsed_pps->id << " (referencing SPS " + << parsed_pps->sps_id << ")"; +} + +} // namespace video_coding +} // namespace webrtc diff --git a/modules/video_coding/h265_vps_sps_pps_tracker.h b/modules/video_coding/h265_vps_sps_pps_tracker.h new file mode 100644 index 0000000000..1aa22d76ad --- /dev/null +++ b/modules/video_coding/h265_vps_sps_pps_tracker.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_H265_VPS_SPS_PPS_TRACKER_H_ +#define MODULES_VIDEO_CODING_H265_VPS_SPS_PPS_TRACKER_H_ + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { +namespace video_coding { + +class H265VpsSpsPpsTracker { + public: + enum PacketAction { kInsert, kDrop, kRequestKeyframe }; + struct FixedBitstream { + PacketAction action; + rtc::CopyOnWriteBuffer bitstream; + }; + + FixedBitstream CopyAndFixBitstream(rtc::ArrayView bitstream, + RTPVideoHeader* video_header); + + void InsertVpsSpsPpsNalus(const std::vector& vps, + const std::vector& sps, + const std::vector& pps); + + private: + struct VpsInfo { + size_t size = 0; + std::unique_ptr data; + }; + + struct PpsInfo { + int sps_id = -1; + size_t size = 0; + std::unique_ptr data; + }; + + struct SpsInfo { + int vps_id = -1; + size_t size = 0; + int width = -1; + int height = -1; + std::unique_ptr data; + }; + + std::map vps_data_; + std::map pps_data_; + std::map sps_data_; +}; + +} // namespace video_coding +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_H265_SPS_PPS_TRACKER_H_ diff --git a/modules/video_coding/include/mock/mock_vcm_callbacks.h b/modules/video_coding/include/mock/mock_vcm_callbacks.h deleted file mode 100644 index 76fc561d63..0000000000 --- a/modules/video_coding/include/mock/mock_vcm_callbacks.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VCM_CALLBACKS_H_ -#define MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VCM_CALLBACKS_H_ - -#include "modules/video_coding/include/video_coding_defines.h" -#include "test/gmock.h" - -namespace webrtc { - -class MockPacketRequestCallback : public VCMPacketRequestCallback { - public: - MOCK_METHOD2(ResendPackets, - int32_t(const uint16_t* sequenceNumbers, uint16_t length)); -}; - -class MockVCMReceiveCallback : public VCMReceiveCallback { - public: - MockVCMReceiveCallback() {} - virtual ~MockVCMReceiveCallback() {} - - MOCK_METHOD4( - FrameToRender, - int32_t(VideoFrame&, absl::optional, int32_t, VideoContentType)); - MOCK_METHOD1(OnIncomingPayloadType, void(int)); - MOCK_METHOD1(OnDecoderImplementationName, void(const char*)); -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_INCLUDE_MOCK_MOCK_VCM_CALLBACKS_H_ diff --git a/modules/video_coding/include/video_codec_interface.h b/modules/video_coding/include/video_codec_interface.h index c7b116f4ae..c7834a2721 100644 --- a/modules/video_coding/include/video_codec_interface.h +++ b/modules/video_coding/include/video_codec_interface.h @@ -20,8 +20,12 @@ #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#ifndef DISABLE_H265 +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/deprecation.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -79,7 +83,7 @@ struct CodecSpecificInfoVP9 { uint8_t num_ref_pics; uint8_t p_diff[kMaxVp9RefPics]; - bool end_of_picture; + RTC_DEPRECATED bool end_of_picture; }; static_assert(std::is_pod::value, ""); @@ -90,12 +94,23 @@ struct CodecSpecificInfoH264 { bool base_layer_sync; bool idr_frame; }; + +#ifndef DISABLE_H265 +struct CodecSpecificInfoH265 { + H265PacketizationMode packetization_mode; + bool idr_frame; +}; +#endif + static_assert(std::is_pod::value, ""); union CodecSpecificInfoUnion { CodecSpecificInfoVP8 VP8; CodecSpecificInfoVP9 VP9; CodecSpecificInfoH264 H264; +#ifndef DISABLE_H265 + CodecSpecificInfoH265 H265; +#endif }; static_assert(std::is_pod::value, ""); @@ -109,6 +124,7 @@ struct RTC_EXPORT CodecSpecificInfo { VideoCodecType codecType; CodecSpecificInfoUnion codecSpecific; + bool end_of_picture = true; absl::optional generic_frame_info; absl::optional template_structure; }; diff --git a/modules/video_coding/include/video_coding.h b/modules/video_coding/include/video_coding.h index acaa73bbf2..a7cb50ef9c 100644 --- a/modules/video_coding/include/video_coding.h +++ b/modules/video_coding/include/video_coding.h @@ -42,19 +42,16 @@ class VideoCodingModule : public Module { // needed. // // Input: + // - payload_type : RTP payload type // - receiveCodec : Settings for the codec to be registered. // - numberOfCores : Number of CPU cores that the decoder is allowed // to use. - // - requireKeyFrame : Set this to true if you don't want any delta - // frames - // to be decoded until the first key frame has been - // decoded. // // Return value : VCM_OK, on success. // < 0, on error. - virtual int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec, - int32_t numberOfCores, - bool requireKeyFrame = false) = 0; + virtual int32_t RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receiveCodec, + int32_t numberOfCores) = 0; // Register an external decoder object. // diff --git a/modules/video_coding/include/video_coding_defines.h b/modules/video_coding/include/video_coding_defines.h index ff9b7d6a66..641e7121ef 100644 --- a/modules/video_coding/include/video_coding_defines.h +++ b/modules/video_coding/include/video_coding_defines.h @@ -41,9 +41,7 @@ enum { }; enum VCMVideoProtection { - kProtectionNone, kProtectionNack, - kProtectionFEC, kProtectionNackFEC, }; diff --git a/modules/video_coding/jitter_buffer.cc b/modules/video_coding/jitter_buffer.cc index 0873285f39..772098a738 100644 --- a/modules/video_coding/jitter_buffer.cc +++ b/modules/video_coding/jitter_buffer.cc @@ -153,7 +153,7 @@ VCMJitterBuffer::~VCMJitterBuffer() { } void VCMJitterBuffer::Start() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); running_ = true; num_consecutive_old_packets_ = 0; @@ -172,7 +172,7 @@ void VCMJitterBuffer::Start() { } void VCMJitterBuffer::Stop() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); running_ = false; last_decoded_state_.Reset(); @@ -181,12 +181,12 @@ void VCMJitterBuffer::Stop() { } bool VCMJitterBuffer::Running() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return running_; } void VCMJitterBuffer::Flush() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); decodable_frames_.Reset(&free_frames_); incomplete_frames_.Reset(&free_frames_); last_decoded_state_.Reset(); // TODO(mikhal): sync reset. @@ -202,21 +202,20 @@ void VCMJitterBuffer::Flush() { } int VCMJitterBuffer::num_packets() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return num_packets_; } int VCMJitterBuffer::num_duplicated_packets() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return num_duplicated_packets_; } // Returns immediately or a |max_wait_time_ms| ms event hang waiting for a // complete frame, |max_wait_time_ms| decided by caller. VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) { - crit_sect_.Enter(); + MutexLock lock(&mutex_); if (!running_) { - crit_sect_.Leave(); return nullptr; } CleanUpOldOrEmptyFrames(); @@ -227,14 +226,13 @@ VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) { clock_->TimeInMilliseconds() + max_wait_time_ms; int64_t wait_time_ms = max_wait_time_ms; while (wait_time_ms > 0) { - crit_sect_.Leave(); + mutex_.Unlock(); const EventTypeWrapper ret = frame_event_->Wait(static_cast(wait_time_ms)); - crit_sect_.Enter(); + mutex_.Lock(); if (ret == kEventSignaled) { // Are we shutting down the jitter buffer? if (!running_) { - crit_sect_.Leave(); return nullptr; } // Finding oldest frame ready for decoder. @@ -252,16 +250,13 @@ VCMEncodedFrame* VCMJitterBuffer::NextCompleteFrame(uint32_t max_wait_time_ms) { } if (decodable_frames_.empty() || decodable_frames_.Front()->GetState() != kStateComplete) { - crit_sect_.Leave(); return nullptr; } - VCMEncodedFrame* encoded_frame = decodable_frames_.Front(); - crit_sect_.Leave(); - return encoded_frame; + return decodable_frames_.Front(); } VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); if (!running_) { return NULL; } @@ -303,8 +298,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) { last_decoded_state_.SetState(frame); DropPacketsFromNackList(last_decoded_state_.sequence_num()); - if ((*frame).IsSessionComplete()) - UpdateAveragePacketsPerFrame(frame->NumPackets()); + UpdateAveragePacketsPerFrame(frame->NumPackets()); return frame; } @@ -313,7 +307,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) { // frames from within the jitter buffer. void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) { RTC_CHECK(frame != nullptr); - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); VCMFrameBuffer* frame_buffer = static_cast(frame); RecycleFrameBuffer(frame_buffer); } @@ -354,7 +348,7 @@ VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet, int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame, bool* retransmitted) const { assert(retransmitted); - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); const VCMFrameBuffer* frame_buffer = static_cast(frame); *retransmitted = (frame_buffer->GetNackCount() > 0); @@ -363,7 +357,7 @@ int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame, VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet, bool* retransmitted) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ++num_packets_; // Does this packet belong to an old frame? @@ -577,7 +571,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState( } uint32_t VCMJitterBuffer::EstimatedJitterMs() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); const double rtt_mult = 1.0f; return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt); } @@ -585,7 +579,7 @@ uint32_t VCMJitterBuffer::EstimatedJitterMs() { void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size, int max_packet_age_to_nack, int max_incomplete_time_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); assert(max_packet_age_to_nack >= 0); assert(max_incomplete_time_ms_ >= 0); max_nack_list_size_ = max_nack_list_size; @@ -616,7 +610,7 @@ uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber( } std::vector VCMJitterBuffer::GetNackList(bool* request_key_frame) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); *request_key_frame = false; if (last_decoded_state_.in_initial_state()) { VCMFrameBuffer* next_frame = NextFrame(); @@ -827,7 +821,7 @@ void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) { } } -// Must be called under the critical section |crit_sect_|. +// Must be called under the critical section |mutex_|. void VCMJitterBuffer::CleanUpOldOrEmptyFrames() { decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_, &free_frames_); @@ -838,13 +832,13 @@ void VCMJitterBuffer::CleanUpOldOrEmptyFrames() { } } -// Must be called from within |crit_sect_|. +// Must be called from within |mutex_|. bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const { return missing_sequence_numbers_.find(packet.seqNum) != missing_sequence_numbers_.end(); } -// Must be called under the critical section |crit_sect_|. Should never be +// Must be called under the critical section |mutex_|. Should never be // called with retransmitted frames, they must be filtered out before this // function is called. void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample, @@ -856,7 +850,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample, sample.frame_size, incomplete_frame); } -// Must be called under the critical section crit_sect_. Should never be +// Must be called under the critical section mutex_. Should never be // called with retransmitted frames, they must be filtered out before this // function is called. void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame, @@ -870,7 +864,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame, frame.size(), incomplete_frame); } -// Must be called under the critical section |crit_sect_|. Should never be +// Must be called under the critical section |mutex_|. Should never be // called with retransmitted frames, they must be filtered out before this // function is called. void VCMJitterBuffer::UpdateJitterEstimate(int64_t latest_packet_time_ms, diff --git a/modules/video_coding/jitter_buffer.h b/modules/video_coding/jitter_buffer.h index 1070c379bb..b15ca75ffa 100644 --- a/modules/video_coding/jitter_buffer.h +++ b/modules/video_coding/jitter_buffer.h @@ -21,15 +21,15 @@ #include "modules/include/module_common_types_public.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/decoding_state.h" +#include "modules/video_coding/event_wrapper.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/jitter_buffer_common.h" #include "modules/video_coding/jitter_estimator.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/event_wrapper.h" namespace webrtc { @@ -143,66 +143,66 @@ class VCMJitterBuffer { VCMFrameBufferEnum GetFrame(const VCMPacket& packet, VCMFrameBuffer** frame, FrameList** frame_list) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns true if |frame| is continuous in |decoding_state|, not taking // decodable frames into account. bool IsContinuousInState(const VCMFrameBuffer& frame, const VCMDecodingState& decoding_state) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns true if |frame| is continuous in the |last_decoded_state_|, taking // all decodable frames into account. bool IsContinuous(const VCMFrameBuffer& frame) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Looks for frames in |incomplete_frames_| which are continuous in the // provided |decoded_state|. Starts the search from the timestamp of // |decoded_state|. void FindAndInsertContinuousFramesWithState( const VCMDecodingState& decoded_state) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Looks for frames in |incomplete_frames_| which are continuous in // |last_decoded_state_| taking all decodable frames into account. Starts // the search from |new_frame|. void FindAndInsertContinuousFrames(const VCMFrameBuffer& new_frame) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); - VCMFrameBuffer* NextFrame() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + VCMFrameBuffer* NextFrame() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns true if the NACK list was updated to cover sequence numbers up to // |sequence_number|. If false a key frame is needed to get into a state where // we can continue decoding. bool UpdateNackList(uint16_t sequence_number) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); bool TooLargeNackList() const; // Returns true if the NACK list was reduced without problem. If false a key // frame is needed to get into a state where we can continue decoding. - bool HandleTooLargeNackList() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + bool HandleTooLargeNackList() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); bool MissingTooOldPacket(uint16_t latest_sequence_number) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns true if the too old packets was successfully removed from the NACK // list. If false, a key frame is needed to get into a state where we can // continue decoding. bool HandleTooOldPackets(uint16_t latest_sequence_number) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Drops all packets in the NACK list up until |last_decoded_sequence_number|. void DropPacketsFromNackList(uint16_t last_decoded_sequence_number); // Gets an empty frame, creating a new frame if necessary (i.e. increases // jitter buffer size). - VCMFrameBuffer* GetEmptyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + VCMFrameBuffer* GetEmptyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Attempts to increase the size of the jitter buffer. Returns true on // success, false otherwise. - bool TryToIncreaseJitterBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + bool TryToIncreaseJitterBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Recycles oldest frames until a key frame is found. Used if jitter buffer is // completely full. Returns true if a key frame was found. - bool RecycleFramesUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + bool RecycleFramesUntilKeyFrame() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Update rolling average of packets per frame. void UpdateAveragePacketsPerFrame(int current_number_packets_); // Cleans the frame list in the JB from old/empty frames. // Should only be called prior to actual use. - void CleanUpOldOrEmptyFrames() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + void CleanUpOldOrEmptyFrames() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns true if |packet| is likely to have been retransmitted. bool IsPacketRetransmitted(const VCMPacket& packet) const; @@ -217,35 +217,34 @@ class VCMJitterBuffer { unsigned int frame_size, bool incomplete_frame); - int NonContinuousOrIncompleteDuration() - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + int NonContinuousOrIncompleteDuration() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); uint16_t EstimatedLowSequenceNumber(const VCMFrameBuffer& frame) const; // Reset frame buffer and return it to free_frames_. void RecycleFrameBuffer(VCMFrameBuffer* frame) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* clock_; // If we are running (have started) or not. bool running_; - rtc::CriticalSection crit_sect_; + mutable Mutex mutex_; // Event to signal when we have a frame ready for decoder. std::unique_ptr frame_event_; // Number of allocated frames. int max_number_of_frames_; - UnorderedFrameList free_frames_ RTC_GUARDED_BY(crit_sect_); - FrameList decodable_frames_ RTC_GUARDED_BY(crit_sect_); - FrameList incomplete_frames_ RTC_GUARDED_BY(crit_sect_); - VCMDecodingState last_decoded_state_ RTC_GUARDED_BY(crit_sect_); + UnorderedFrameList free_frames_ RTC_GUARDED_BY(mutex_); + FrameList decodable_frames_ RTC_GUARDED_BY(mutex_); + FrameList incomplete_frames_ RTC_GUARDED_BY(mutex_); + VCMDecodingState last_decoded_state_ RTC_GUARDED_BY(mutex_); bool first_packet_since_reset_; // Number of packets in a row that have been too old. int num_consecutive_old_packets_; // Number of packets received. - int num_packets_ RTC_GUARDED_BY(crit_sect_); + int num_packets_ RTC_GUARDED_BY(mutex_); // Number of duplicated packets received. - int num_duplicated_packets_ RTC_GUARDED_BY(crit_sect_); + int num_duplicated_packets_ RTC_GUARDED_BY(mutex_); // Jitter estimation. // Filter for estimating jitter. diff --git a/modules/video_coding/jitter_buffer_common.h b/modules/video_coding/jitter_buffer_common.h index 6ccfe39199..5492ee5b41 100644 --- a/modules/video_coding/jitter_buffer_common.h +++ b/modules/video_coding/jitter_buffer_common.h @@ -54,6 +54,9 @@ enum VCMFrameBufferStateEnum { }; enum { kH264StartCodeLengthBytes = 4 }; +#ifndef DISABLE_H265 +enum { kH265StartCodeLengthBytes = 4 }; +#endif } // namespace webrtc #endif // MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_ diff --git a/modules/video_coding/jitter_estimator.cc b/modules/video_coding/jitter_estimator.cc index cd505835d1..44e2a9811e 100644 --- a/modules/video_coding/jitter_estimator.cc +++ b/modules/video_coding/jitter_estimator.cc @@ -23,6 +23,7 @@ #include "rtc_base/experiments/jitter_upper_bound_experiment.h" #include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -50,6 +51,8 @@ VCMJitterEstimator::VCMJitterEstimator(Clock* clock) time_deviation_upper_bound_( JitterUpperBoundExperiment::GetUpperBoundSigmas().value_or( kDefaultMaxTimestampDeviationInSigmas)), + enable_reduced_delay_( + !field_trial::IsEnabled("WebRTC-ReducedJitterDelayKillSwitch")), clock_(clock) { Reset(); } @@ -395,22 +398,25 @@ int VCMJitterEstimator::GetJitterEstimate( } } - static const double kJitterScaleLowThreshold = 5.0; - static const double kJitterScaleHighThreshold = 10.0; - double fps = GetFrameRate(); - // Ignore jitter for very low fps streams. - if (fps < kJitterScaleLowThreshold) { - if (fps == 0.0) { - return rtc::checked_cast(std::max(0.0, jitterMS) + 0.5); + if (enable_reduced_delay_) { + static const double kJitterScaleLowThreshold = 5.0; + static const double kJitterScaleHighThreshold = 10.0; + double fps = GetFrameRate(); + // Ignore jitter for very low fps streams. + if (fps < kJitterScaleLowThreshold) { + if (fps == 0.0) { + return rtc::checked_cast(std::max(0.0, jitterMS) + 0.5); + } + return 0; } - return 0; - } - // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at - // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold. - if (fps < kJitterScaleHighThreshold) { - jitterMS = (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) * - (fps - kJitterScaleLowThreshold) * jitterMS; + // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at + // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold. + if (fps < kJitterScaleHighThreshold) { + jitterMS = + (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) * + (fps - kJitterScaleLowThreshold) * jitterMS; + } } return rtc::checked_cast(std::max(0.0, jitterMS) + 0.5); diff --git a/modules/video_coding/jitter_estimator.h b/modules/video_coding/jitter_estimator.h index d9798b40a1..1d69b95769 100644 --- a/modules/video_coding/jitter_estimator.h +++ b/modules/video_coding/jitter_estimator.h @@ -150,6 +150,7 @@ class VCMJitterEstimator { rtc::RollingAccumulator fps_counter_; const double time_deviation_upper_bound_; + const bool enable_reduced_delay_; Clock* clock_; }; diff --git a/modules/video_coding/jitter_estimator_tests.cc b/modules/video_coding/jitter_estimator_tests.cc index 1ad9abb56f..14baae7e81 100644 --- a/modules/video_coding/jitter_estimator_tests.cc +++ b/modules/video_coding/jitter_estimator_tests.cc @@ -72,6 +72,22 @@ TEST_F(TestVCMJitterEstimator, TestLowRate) { } } +TEST_F(TestVCMJitterEstimator, TestLowRateDisabled) { + test::ScopedFieldTrials field_trials( + "WebRTC-ReducedJitterDelayKillSwitch/Enabled/"); + SetUp(); + + ValueGenerator gen(10); + uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5; + for (int i = 0; i < 60; ++i) { + estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize()); + AdvanceClock(time_delta_us); + if (i > 2) + EXPECT_GT(estimator_->GetJitterEstimate(0, absl::nullopt), 0); + gen.Advance(); + } +} + TEST_F(TestVCMJitterEstimator, TestUpperBound) { struct TestContext { TestContext() diff --git a/modules/video_coding/nack_module2.cc b/modules/video_coding/nack_module2.cc new file mode 100644 index 0000000000..8a3a731ed0 --- /dev/null +++ b/modules/video_coding/nack_module2.cc @@ -0,0 +1,343 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/nack_module2.h" + +#include +#include + +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/logging.h" +#include "rtc_base/task_queue.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { + +namespace { +const int kMaxPacketAge = 10000; +const int kMaxNackPackets = 1000; +const int kDefaultRttMs = 100; +const int kMaxNackRetries = 10; +const int kMaxReorderedPackets = 128; +const int kNumReorderingBuckets = 10; +const int kDefaultSendNackDelayMs = 0; + +int64_t GetSendNackDelay() { + int64_t delay_ms = strtol( + webrtc::field_trial::FindFullName("WebRTC-SendNackDelayMs").c_str(), + nullptr, 10); + if (delay_ms > 0 && delay_ms <= 20) { + RTC_LOG(LS_INFO) << "SendNackDelay is set to " << delay_ms; + return delay_ms; + } + return kDefaultSendNackDelayMs; +} +} // namespace + +constexpr TimeDelta NackModule2::kUpdateInterval; + +NackModule2::NackInfo::NackInfo() + : seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {} + +NackModule2::NackInfo::NackInfo(uint16_t seq_num, + uint16_t send_at_seq_num, + int64_t created_at_time) + : seq_num(seq_num), + send_at_seq_num(send_at_seq_num), + created_at_time(created_at_time), + sent_at_time(-1), + retries(0) {} + +NackModule2::BackoffSettings::BackoffSettings(TimeDelta min_retry, + TimeDelta max_rtt, + double base) + : min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {} + +absl::optional +NackModule2::BackoffSettings::ParseFromFieldTrials() { + // Matches magic number in RTPSender::OnReceivedNack(). + const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5); + // Upper bound on link-delay considered for exponential backoff. + // Selected so that cumulative delay with 1.25 base and 10 retries ends up + // below 3s, since above that there will be a FIR generated instead. + const TimeDelta kDefaultMaxRtt = TimeDelta::Millis(160); + // Default base for exponential backoff, adds 25% RTT delay for each retry. + const double kDefaultBase = 1.25; + + FieldTrialParameter enabled("enabled", false); + FieldTrialParameter min_retry("min_retry", + kDefaultMinRetryInterval); + FieldTrialParameter max_rtt("max_rtt", kDefaultMaxRtt); + FieldTrialParameter base("base", kDefaultBase); + ParseFieldTrial({&enabled, &min_retry, &max_rtt, &base}, + field_trial::FindFullName("WebRTC-ExponentialNackBackoff")); + + if (enabled) { + return NackModule2::BackoffSettings(min_retry.Get(), max_rtt.Get(), + base.Get()); + } + return absl::nullopt; +} + +NackModule2::NackModule2(TaskQueueBase* current_queue, + Clock* clock, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender, + TimeDelta update_interval /*= kUpdateInterval*/) + : worker_thread_(current_queue), + update_interval_(update_interval), + clock_(clock), + nack_sender_(nack_sender), + keyframe_request_sender_(keyframe_request_sender), + reordering_histogram_(kNumReorderingBuckets, kMaxReorderedPackets), + initialized_(false), + rtt_ms_(kDefaultRttMs), + newest_seq_num_(0), + send_nack_delay_ms_(GetSendNackDelay()), + backoff_settings_(BackoffSettings::ParseFromFieldTrials()) { + RTC_DCHECK(clock_); + RTC_DCHECK(nack_sender_); + RTC_DCHECK(keyframe_request_sender_); + RTC_DCHECK_GT(update_interval.ms(), 0); + RTC_DCHECK(worker_thread_); + RTC_DCHECK(worker_thread_->IsCurrent()); + + repeating_task_ = RepeatingTaskHandle::DelayedStart( + TaskQueueBase::Current(), update_interval_, + [this]() { + RTC_DCHECK_RUN_ON(worker_thread_); + std::vector nack_batch = GetNackBatch(kTimeOnly); + if (!nack_batch.empty()) { + // This batch of NACKs is triggered externally; there is no external + // initiator who can batch them with other feedback messages. + nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/false); + } + return update_interval_; + }, + clock_); +} + +NackModule2::~NackModule2() { + RTC_DCHECK_RUN_ON(worker_thread_); + repeating_task_.Stop(); +} + +int NackModule2::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) { + RTC_DCHECK_RUN_ON(worker_thread_); + return OnReceivedPacket(seq_num, is_keyframe, false); +} + +int NackModule2::OnReceivedPacket(uint16_t seq_num, + bool is_keyframe, + bool is_recovered) { + RTC_DCHECK_RUN_ON(worker_thread_); + // TODO(philipel): When the packet includes information whether it is + // retransmitted or not, use that value instead. For + // now set it to true, which will cause the reordering + // statistics to never be updated. + bool is_retransmitted = true; + + if (!initialized_) { + newest_seq_num_ = seq_num; + if (is_keyframe) + keyframe_list_.insert(seq_num); + initialized_ = true; + return 0; + } + + // Since the |newest_seq_num_| is a packet we have actually received we know + // that packet has never been Nacked. + if (seq_num == newest_seq_num_) + return 0; + + if (AheadOf(newest_seq_num_, seq_num)) { + // An out of order packet has been received. + auto nack_list_it = nack_list_.find(seq_num); + int nacks_sent_for_packet = 0; + if (nack_list_it != nack_list_.end()) { + nacks_sent_for_packet = nack_list_it->second.retries; + nack_list_.erase(nack_list_it); + } + if (!is_retransmitted) + UpdateReorderingStatistics(seq_num); + return nacks_sent_for_packet; + } + + // Keep track of new keyframes. + if (is_keyframe) + keyframe_list_.insert(seq_num); + + // And remove old ones so we don't accumulate keyframes. + auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge); + if (it != keyframe_list_.begin()) + keyframe_list_.erase(keyframe_list_.begin(), it); + + if (is_recovered) { + recovered_list_.insert(seq_num); + + // Remove old ones so we don't accumulate recovered packets. + auto it = recovered_list_.lower_bound(seq_num - kMaxPacketAge); + if (it != recovered_list_.begin()) + recovered_list_.erase(recovered_list_.begin(), it); + + // Do not send nack for packets recovered by FEC or RTX. + return 0; + } + + AddPacketsToNack(newest_seq_num_ + 1, seq_num); + newest_seq_num_ = seq_num; + + // Are there any nacks that are waiting for this seq_num. + std::vector nack_batch = GetNackBatch(kSeqNumOnly); + if (!nack_batch.empty()) { + // This batch of NACKs is triggered externally; the initiator can + // batch them with other feedback messages. + nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/true); + } + + return 0; +} + +void NackModule2::ClearUpTo(uint16_t seq_num) { + // Called via RtpVideoStreamReceiver2::FrameContinuous on the network thread. + worker_thread_->PostTask(ToQueuedTask(task_safety_, [seq_num, this]() { + RTC_DCHECK_RUN_ON(worker_thread_); + nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num)); + keyframe_list_.erase(keyframe_list_.begin(), + keyframe_list_.lower_bound(seq_num)); + recovered_list_.erase(recovered_list_.begin(), + recovered_list_.lower_bound(seq_num)); + })); +} + +void NackModule2::UpdateRtt(int64_t rtt_ms) { + RTC_DCHECK_RUN_ON(worker_thread_); + rtt_ms_ = rtt_ms; +} + +bool NackModule2::RemovePacketsUntilKeyFrame() { + // Called on worker_thread_. + while (!keyframe_list_.empty()) { + auto it = nack_list_.lower_bound(*keyframe_list_.begin()); + + if (it != nack_list_.begin()) { + // We have found a keyframe that actually is newer than at least one + // packet in the nack list. + nack_list_.erase(nack_list_.begin(), it); + return true; + } + + // If this keyframe is so old it does not remove any packets from the list, + // remove it from the list of keyframes and try the next keyframe. + keyframe_list_.erase(keyframe_list_.begin()); + } + return false; +} + +void NackModule2::AddPacketsToNack(uint16_t seq_num_start, + uint16_t seq_num_end) { + // Called on worker_thread_. + // Remove old packets. + auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge); + nack_list_.erase(nack_list_.begin(), it); + + // If the nack list is too large, remove packets from the nack list until + // the latest first packet of a keyframe. If the list is still too large, + // clear it and request a keyframe. + uint16_t num_new_nacks = ForwardDiff(seq_num_start, seq_num_end); + if (nack_list_.size() + num_new_nacks > kMaxNackPackets) { + while (RemovePacketsUntilKeyFrame() && + nack_list_.size() + num_new_nacks > kMaxNackPackets) { + } + + if (nack_list_.size() + num_new_nacks > kMaxNackPackets) { + nack_list_.clear(); + RTC_LOG(LS_WARNING) << "NACK list full, clearing NACK" + " list and requesting keyframe."; + keyframe_request_sender_->RequestKeyFrame(); + return; + } + } + + for (uint16_t seq_num = seq_num_start; seq_num != seq_num_end; ++seq_num) { + // Do not send nack for packets that are already recovered by FEC or RTX + if (recovered_list_.find(seq_num) != recovered_list_.end()) + continue; + NackInfo nack_info(seq_num, seq_num + WaitNumberOfPackets(0.5), + clock_->TimeInMilliseconds()); + RTC_DCHECK(nack_list_.find(seq_num) == nack_list_.end()); + nack_list_[seq_num] = nack_info; + } +} + +std::vector NackModule2::GetNackBatch(NackFilterOptions options) { + // Called on worker_thread_. + + bool consider_seq_num = options != kTimeOnly; + bool consider_timestamp = options != kSeqNumOnly; + Timestamp now = clock_->CurrentTime(); + std::vector nack_batch; + auto it = nack_list_.begin(); + while (it != nack_list_.end()) { + TimeDelta resend_delay = TimeDelta::Millis(rtt_ms_); + if (backoff_settings_) { + resend_delay = + std::max(resend_delay, backoff_settings_->min_retry_interval); + if (it->second.retries > 1) { + TimeDelta exponential_backoff = + std::min(TimeDelta::Millis(rtt_ms_), backoff_settings_->max_rtt) * + std::pow(backoff_settings_->base, it->second.retries - 1); + resend_delay = std::max(resend_delay, exponential_backoff); + } + } + + bool delay_timed_out = + now.ms() - it->second.created_at_time >= send_nack_delay_ms_; + bool nack_on_rtt_passed = + now.ms() - it->second.sent_at_time >= resend_delay.ms(); + bool nack_on_seq_num_passed = + it->second.sent_at_time == -1 && + AheadOrAt(newest_seq_num_, it->second.send_at_seq_num); + if (delay_timed_out && ((consider_seq_num && nack_on_seq_num_passed) || + (consider_timestamp && nack_on_rtt_passed))) { + nack_batch.emplace_back(it->second.seq_num); + ++it->second.retries; + it->second.sent_at_time = now.ms(); + if (it->second.retries >= kMaxNackRetries) { + RTC_LOG(LS_WARNING) << "Sequence number " << it->second.seq_num + << " removed from NACK list due to max retries."; + it = nack_list_.erase(it); + } else { + ++it; + } + continue; + } + ++it; + } + return nack_batch; +} + +void NackModule2::UpdateReorderingStatistics(uint16_t seq_num) { + // Running on worker_thread_. + RTC_DCHECK(AheadOf(newest_seq_num_, seq_num)); + uint16_t diff = ReverseDiff(newest_seq_num_, seq_num); + reordering_histogram_.Add(diff); +} + +int NackModule2::WaitNumberOfPackets(float probability) const { + // Called on worker_thread_; + if (reordering_histogram_.NumValues() == 0) + return 0; + return reordering_histogram_.InverseCdf(probability); +} + +} // namespace webrtc diff --git a/modules/video_coding/nack_module2.h b/modules/video_coding/nack_module2.h new file mode 100644 index 0000000000..89dd082192 --- /dev/null +++ b/modules/video_coding/nack_module2.h @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_NACK_MODULE2_H_ +#define MODULES_VIDEO_CODING_NACK_MODULE2_H_ + +#include + +#include +#include +#include + +#include "api/units/time_delta.h" +#include "modules/include/module_common_types.h" +#include "modules/video_coding/histogram.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +// TODO(bugs.webrtc.org/11594): This class no longer implements the Module +// interface and therefore "NackModule" may not be a descriptive name anymore. +// Consider renaming to e.g. NackTracker or NackRequester. +class NackModule2 final { + public: + static constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(20); + + NackModule2(TaskQueueBase* current_queue, + Clock* clock, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender, + TimeDelta update_interval = kUpdateInterval); + ~NackModule2(); + + int OnReceivedPacket(uint16_t seq_num, bool is_keyframe); + int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered); + + void ClearUpTo(uint16_t seq_num); + void UpdateRtt(int64_t rtt_ms); + + private: + // Which fields to consider when deciding which packet to nack in + // GetNackBatch. + enum NackFilterOptions { kSeqNumOnly, kTimeOnly, kSeqNumAndTime }; + + // This class holds the sequence number of the packet that is in the nack list + // as well as the meta data about when it should be nacked and how many times + // we have tried to nack this packet. + struct NackInfo { + NackInfo(); + NackInfo(uint16_t seq_num, + uint16_t send_at_seq_num, + int64_t created_at_time); + + uint16_t seq_num; + uint16_t send_at_seq_num; + int64_t created_at_time; + int64_t sent_at_time; + int retries; + }; + + struct BackoffSettings { + BackoffSettings(TimeDelta min_retry, TimeDelta max_rtt, double base); + static absl::optional ParseFromFieldTrials(); + + // Min time between nacks. + const TimeDelta min_retry_interval; + // Upper bound on link-delay considered for exponential backoff. + const TimeDelta max_rtt; + // Base for the exponential backoff. + const double base; + }; + + void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); + + // Removes packets from the nack list until the next keyframe. Returns true + // if packets were removed. + bool RemovePacketsUntilKeyFrame() + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); + std::vector GetNackBatch(NackFilterOptions options) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); + + // Update the reordering distribution. + void UpdateReorderingStatistics(uint16_t seq_num) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); + + // Returns how many packets we have to wait in order to receive the packet + // with probability |probabilty| or higher. + int WaitNumberOfPackets(float probability) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); + + TaskQueueBase* const worker_thread_; + + // Used to regularly call SendNack if needed. + RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(worker_thread_); + const TimeDelta update_interval_; + + Clock* const clock_; + NackSender* const nack_sender_; + KeyFrameRequestSender* const keyframe_request_sender_; + + // TODO(philipel): Some of the variables below are consistently used on a + // known thread (e.g. see |initialized_|). Those probably do not need + // synchronized access. + std::map> nack_list_ + RTC_GUARDED_BY(worker_thread_); + std::set> keyframe_list_ + RTC_GUARDED_BY(worker_thread_); + std::set> recovered_list_ + RTC_GUARDED_BY(worker_thread_); + video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(worker_thread_); + bool initialized_ RTC_GUARDED_BY(worker_thread_); + int64_t rtt_ms_ RTC_GUARDED_BY(worker_thread_); + uint16_t newest_seq_num_ RTC_GUARDED_BY(worker_thread_); + + // Adds a delay before send nack on packet received. + const int64_t send_nack_delay_ms_; + + const absl::optional backoff_settings_; + + // Used to signal destruction to potentially pending tasks. + ScopedTaskSafety task_safety_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_NACK_MODULE2_H_ diff --git a/modules/video_coding/nack_module2_unittest.cc b/modules/video_coding/nack_module2_unittest.cc new file mode 100644 index 0000000000..acd1eead01 --- /dev/null +++ b/modules/video_coding/nack_module2_unittest.cc @@ -0,0 +1,411 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/nack_module2.h" + +#include +#include +#include +#include + +#include "system_wrappers/include/clock.h" +#include "test/field_trial.h" +#include "test/gtest.h" +#include "test/run_loop.h" + +namespace webrtc { +// TODO(bugs.webrtc.org/11594): Use the use the GlobalSimulatedTimeController +// instead of RunLoop. At the moment we mix use of the Clock and the underlying +// implementation of RunLoop, which is realtime. +class TestNackModule2 : public ::testing::TestWithParam, + public NackSender, + public KeyFrameRequestSender { + protected: + TestNackModule2() + : clock_(new SimulatedClock(0)), + field_trial_(GetParam() + ? "WebRTC-ExponentialNackBackoff/enabled:true/" + : "WebRTC-ExponentialNackBackoff/enabled:false/"), + keyframes_requested_(0) {} + + void SetUp() override {} + + void SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) override { + sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(), + sequence_numbers.end()); + if (waiting_for_send_nack_) { + waiting_for_send_nack_ = false; + loop_.Quit(); + } + } + + void RequestKeyFrame() override { ++keyframes_requested_; } + + void Flush() { + // nack_module.Process(); + loop_.Flush(); + } + + bool WaitForSendNack() { + if (timed_out_) { + RTC_NOTREACHED(); + return false; + } + + RTC_DCHECK(!waiting_for_send_nack_); + + waiting_for_send_nack_ = true; + loop_.PostDelayedTask( + [this]() { + timed_out_ = true; + loop_.Quit(); + }, + 1000); + + loop_.Run(); + + if (timed_out_) + return false; + + RTC_DCHECK(!waiting_for_send_nack_); + return true; + } + + NackModule2& CreateNackModule( + TimeDelta interval = NackModule2::kUpdateInterval) { + RTC_DCHECK(!nack_module_.get()); + nack_module_ = std::make_unique( + TaskQueueBase::Current(), clock_.get(), this, this, interval); + nack_module_->UpdateRtt(kDefaultRttMs); + return *nack_module_.get(); + } + + static constexpr int64_t kDefaultRttMs = 20; + test::RunLoop loop_; + std::unique_ptr clock_; + test::ScopedFieldTrials field_trial_; + std::unique_ptr nack_module_; + std::vector sent_nacks_; + int keyframes_requested_; + bool waiting_for_send_nack_ = false; + bool timed_out_ = false; +}; + +TEST_P(TestNackModule2, NackOnePacket) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(1, false, false); + nack_module.OnReceivedPacket(3, false, false); + ASSERT_EQ(1u, sent_nacks_.size()); + EXPECT_EQ(2, sent_nacks_[0]); +} + +TEST_P(TestNackModule2, WrappingSeqNum) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(0xfffe, false, false); + nack_module.OnReceivedPacket(1, false, false); + ASSERT_EQ(2u, sent_nacks_.size()); + EXPECT_EQ(0xffff, sent_nacks_[0]); + EXPECT_EQ(0, sent_nacks_[1]); +} + +TEST_P(TestNackModule2, WrappingSeqNumClearToKeyframe) { + NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(10)); + nack_module.OnReceivedPacket(0xfffe, false, false); + nack_module.OnReceivedPacket(1, false, false); + ASSERT_EQ(2u, sent_nacks_.size()); + EXPECT_EQ(0xffff, sent_nacks_[0]); + EXPECT_EQ(0, sent_nacks_[1]); + + sent_nacks_.clear(); + nack_module.OnReceivedPacket(2, true, false); + ASSERT_EQ(0u, sent_nacks_.size()); + + nack_module.OnReceivedPacket(501, true, false); + ASSERT_EQ(498u, sent_nacks_.size()); + for (int seq_num = 3; seq_num < 501; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]); + + sent_nacks_.clear(); + nack_module.OnReceivedPacket(1001, false, false); + EXPECT_EQ(499u, sent_nacks_.size()); + for (int seq_num = 502; seq_num < 1001; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]); + + sent_nacks_.clear(); + clock_->AdvanceTimeMilliseconds(100); + ASSERT_TRUE(WaitForSendNack()); + ASSERT_EQ(999u, sent_nacks_.size()); + EXPECT_EQ(0xffff, sent_nacks_[0]); + EXPECT_EQ(0, sent_nacks_[1]); + for (int seq_num = 3; seq_num < 501; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 1]); + for (int seq_num = 502; seq_num < 1001; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 2]); + + // Adding packet 1004 will cause the nack list to reach it's max limit. + // It will then clear all nacks up to the next keyframe (seq num 2), + // thus removing 0xffff and 0 from the nack list. + sent_nacks_.clear(); + nack_module.OnReceivedPacket(1004, false, false); + ASSERT_EQ(2u, sent_nacks_.size()); + EXPECT_EQ(1002, sent_nacks_[0]); + EXPECT_EQ(1003, sent_nacks_[1]); + + sent_nacks_.clear(); + clock_->AdvanceTimeMilliseconds(100); + ASSERT_TRUE(WaitForSendNack()); + ASSERT_EQ(999u, sent_nacks_.size()); + for (int seq_num = 3; seq_num < 501; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]); + for (int seq_num = 502; seq_num < 1001; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 4]); + + // Adding packet 1007 will cause the nack module to overflow again, thus + // clearing everything up to 501 which is the next keyframe. + nack_module.OnReceivedPacket(1007, false, false); + sent_nacks_.clear(); + clock_->AdvanceTimeMilliseconds(100); + ASSERT_TRUE(WaitForSendNack()); + ASSERT_EQ(503u, sent_nacks_.size()); + for (int seq_num = 502; seq_num < 1001; ++seq_num) + EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]); + EXPECT_EQ(1005, sent_nacks_[501]); + EXPECT_EQ(1006, sent_nacks_[502]); +} + +TEST_P(TestNackModule2, ResendNack) { + NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1)); + nack_module.OnReceivedPacket(1, false, false); + nack_module.OnReceivedPacket(3, false, false); + size_t expected_nacks_sent = 1; + ASSERT_EQ(expected_nacks_sent, sent_nacks_.size()); + EXPECT_EQ(2, sent_nacks_[0]); + + if (GetParam()) { + // Retry has to wait at least 5ms by default. + nack_module.UpdateRtt(1); + clock_->AdvanceTimeMilliseconds(4); + Flush(); // Too early. + EXPECT_EQ(expected_nacks_sent, sent_nacks_.size()); + + clock_->AdvanceTimeMilliseconds(1); + WaitForSendNack(); // Now allowed. + EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size()); + } else { + nack_module.UpdateRtt(1); + clock_->AdvanceTimeMilliseconds(1); + WaitForSendNack(); // Fast retransmit allowed. + EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size()); + } + + // N:th try has to wait b^(N-1) * rtt by default. + const double b = GetParam() ? 1.25 : 1.0; + for (int i = 2; i < 10; ++i) { + // Change RTT, above the 40ms max for exponential backoff. + TimeDelta rtt = TimeDelta::Millis(160); // + (i * 10 - 40) + nack_module.UpdateRtt(rtt.ms()); + + // RTT gets capped at 160ms in backoff calculations. + TimeDelta expected_backoff_delay = + std::pow(b, i - 1) * std::min(rtt, TimeDelta::Millis(160)); + + // Move to one millisecond before next allowed NACK. + clock_->AdvanceTimeMilliseconds(expected_backoff_delay.ms() - 1); + Flush(); + EXPECT_EQ(expected_nacks_sent, sent_nacks_.size()); + + // Move to one millisecond after next allowed NACK. + // After rather than on to avoid rounding errors. + clock_->AdvanceTimeMilliseconds(2); + WaitForSendNack(); // Now allowed. + EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size()); + } + + // Giving up after 10 tries. + clock_->AdvanceTimeMilliseconds(3000); + Flush(); + EXPECT_EQ(expected_nacks_sent, sent_nacks_.size()); +} + +TEST_P(TestNackModule2, ResendPacketMaxRetries) { + NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1)); + nack_module.OnReceivedPacket(1, false, false); + nack_module.OnReceivedPacket(3, false, false); + ASSERT_EQ(1u, sent_nacks_.size()); + EXPECT_EQ(2, sent_nacks_[0]); + + int backoff_factor = 1; + for (size_t retries = 1; retries < 10; ++retries) { + // Exponential backoff, so that we don't reject NACK because of time. + clock_->AdvanceTimeMilliseconds(backoff_factor * kDefaultRttMs); + backoff_factor *= 2; + WaitForSendNack(); + EXPECT_EQ(retries + 1, sent_nacks_.size()); + } + + clock_->AdvanceTimeMilliseconds(backoff_factor * kDefaultRttMs); + Flush(); + EXPECT_EQ(10u, sent_nacks_.size()); +} + +TEST_P(TestNackModule2, TooLargeNackList) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(0, false, false); + nack_module.OnReceivedPacket(1001, false, false); + EXPECT_EQ(1000u, sent_nacks_.size()); + EXPECT_EQ(0, keyframes_requested_); + nack_module.OnReceivedPacket(1003, false, false); + EXPECT_EQ(1000u, sent_nacks_.size()); + EXPECT_EQ(1, keyframes_requested_); + nack_module.OnReceivedPacket(1004, false, false); + EXPECT_EQ(1000u, sent_nacks_.size()); + EXPECT_EQ(1, keyframes_requested_); +} + +TEST_P(TestNackModule2, TooLargeNackListWithKeyFrame) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(0, false, false); + nack_module.OnReceivedPacket(1, true, false); + nack_module.OnReceivedPacket(1001, false, false); + EXPECT_EQ(999u, sent_nacks_.size()); + EXPECT_EQ(0, keyframes_requested_); + nack_module.OnReceivedPacket(1003, false, false); + EXPECT_EQ(1000u, sent_nacks_.size()); + EXPECT_EQ(0, keyframes_requested_); + nack_module.OnReceivedPacket(1005, false, false); + EXPECT_EQ(1000u, sent_nacks_.size()); + EXPECT_EQ(1, keyframes_requested_); +} + +TEST_P(TestNackModule2, ClearUpTo) { + NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1)); + nack_module.OnReceivedPacket(0, false, false); + nack_module.OnReceivedPacket(100, false, false); + EXPECT_EQ(99u, sent_nacks_.size()); + + sent_nacks_.clear(); + clock_->AdvanceTimeMilliseconds(100); + nack_module.ClearUpTo(50); + WaitForSendNack(); + ASSERT_EQ(50u, sent_nacks_.size()); + EXPECT_EQ(50, sent_nacks_[0]); +} + +TEST_P(TestNackModule2, ClearUpToWrap) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(0xfff0, false, false); + nack_module.OnReceivedPacket(0xf, false, false); + EXPECT_EQ(30u, sent_nacks_.size()); + + sent_nacks_.clear(); + clock_->AdvanceTimeMilliseconds(100); + nack_module.ClearUpTo(0); + WaitForSendNack(); + ASSERT_EQ(15u, sent_nacks_.size()); + EXPECT_EQ(0, sent_nacks_[0]); +} + +TEST_P(TestNackModule2, PacketNackCount) { + NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(0, false, false)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(2, false, false)); + EXPECT_EQ(1, nack_module.OnReceivedPacket(1, false, false)); + + sent_nacks_.clear(); + nack_module.UpdateRtt(100); + EXPECT_EQ(0, nack_module.OnReceivedPacket(5, false, false)); + clock_->AdvanceTimeMilliseconds(100); + WaitForSendNack(); + EXPECT_EQ(4u, sent_nacks_.size()); + + clock_->AdvanceTimeMilliseconds(125); + WaitForSendNack(); + + EXPECT_EQ(6u, sent_nacks_.size()); + + EXPECT_EQ(3, nack_module.OnReceivedPacket(3, false, false)); + EXPECT_EQ(3, nack_module.OnReceivedPacket(4, false, false)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(4, false, false)); +} + +TEST_P(TestNackModule2, NackListFullAndNoOverlapWithKeyframes) { + NackModule2& nack_module = CreateNackModule(); + const int kMaxNackPackets = 1000; + const unsigned int kFirstGap = kMaxNackPackets - 20; + const unsigned int kSecondGap = 200; + uint16_t seq_num = 0; + nack_module.OnReceivedPacket(seq_num++, true, false); + seq_num += kFirstGap; + nack_module.OnReceivedPacket(seq_num++, true, false); + EXPECT_EQ(kFirstGap, sent_nacks_.size()); + sent_nacks_.clear(); + seq_num += kSecondGap; + nack_module.OnReceivedPacket(seq_num, true, false); + EXPECT_EQ(kSecondGap, sent_nacks_.size()); +} + +TEST_P(TestNackModule2, HandleFecRecoveredPacket) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(1, false, false); + nack_module.OnReceivedPacket(4, false, true); + EXPECT_EQ(0u, sent_nacks_.size()); + nack_module.OnReceivedPacket(5, false, false); + EXPECT_EQ(2u, sent_nacks_.size()); +} + +TEST_P(TestNackModule2, SendNackWithoutDelay) { + NackModule2& nack_module = CreateNackModule(); + nack_module.OnReceivedPacket(0, false, false); + nack_module.OnReceivedPacket(100, false, false); + EXPECT_EQ(99u, sent_nacks_.size()); +} + +INSTANTIATE_TEST_SUITE_P(WithAndWithoutBackoff, + TestNackModule2, + ::testing::Values(true, false)); + +class TestNackModule2WithFieldTrial : public ::testing::Test, + public NackSender, + public KeyFrameRequestSender { + protected: + TestNackModule2WithFieldTrial() + : nack_delay_field_trial_("WebRTC-SendNackDelayMs/10/"), + clock_(new SimulatedClock(0)), + nack_module_(TaskQueueBase::Current(), clock_.get(), this, this), + keyframes_requested_(0) {} + + void SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) override { + sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(), + sequence_numbers.end()); + } + + void RequestKeyFrame() override { ++keyframes_requested_; } + + test::ScopedFieldTrials nack_delay_field_trial_; + std::unique_ptr clock_; + NackModule2 nack_module_; + std::vector sent_nacks_; + int keyframes_requested_; +}; + +TEST_F(TestNackModule2WithFieldTrial, SendNackWithDelay) { + nack_module_.OnReceivedPacket(0, false, false); + nack_module_.OnReceivedPacket(100, false, false); + EXPECT_EQ(0u, sent_nacks_.size()); + clock_->AdvanceTimeMilliseconds(10); + nack_module_.OnReceivedPacket(106, false, false); + EXPECT_EQ(99u, sent_nacks_.size()); + clock_->AdvanceTimeMilliseconds(10); + nack_module_.OnReceivedPacket(109, false, false); + EXPECT_EQ(104u, sent_nacks_.size()); +} +} // namespace webrtc diff --git a/modules/video_coding/nack_module_unittest.cc b/modules/video_coding/nack_module_unittest.cc index c9a2023104..f91eb750f0 100644 --- a/modules/video_coding/nack_module_unittest.cc +++ b/modules/video_coding/nack_module_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/nack_module.h" +#include "modules/video_coding/deprecated/nack_module.h" #include #include @@ -45,7 +45,7 @@ class TestNackModule : public ::testing::TestWithParam, static constexpr int64_t kDefaultRttMs = 20; std::unique_ptr clock_; test::ScopedFieldTrials field_trial_; - NackModule nack_module_; + DEPRECATED_NackModule nack_module_; std::vector sent_nacks_; int keyframes_requested_; }; @@ -184,12 +184,12 @@ TEST_P(TestNackModule, ResendNack) { const double b = GetParam() ? 1.25 : 1.0; for (int i = 2; i < 10; ++i) { // Change RTT, above the 40ms max for exponential backoff. - TimeDelta rtt = TimeDelta::ms(160); // + (i * 10 - 40) + TimeDelta rtt = TimeDelta::Millis(160); // + (i * 10 - 40) nack_module_.UpdateRtt(rtt.ms()); // RTT gets capped at 160ms in backoff calculations. TimeDelta expected_backoff_delay = - std::pow(b, i - 1) * std::min(rtt, TimeDelta::ms(160)); + std::pow(b, i - 1) * std::min(rtt, TimeDelta::Millis(160)); // Move to one millisecond before next allowed NACK. clock_->AdvanceTimeMilliseconds(expected_backoff_delay.ms() - 1); @@ -352,7 +352,7 @@ class TestNackModuleWithFieldTrial : public ::testing::Test, test::ScopedFieldTrials nack_delay_field_trial_; std::unique_ptr clock_; - NackModule nack_module_; + DEPRECATED_NackModule nack_module_; std::vector sent_nacks_; int keyframes_requested_; }; diff --git a/modules/video_coding/packet.cc b/modules/video_coding/packet.cc index 0c4a658b8f..c32f890f47 100644 --- a/modules/video_coding/packet.cc +++ b/modules/video_coding/packet.cc @@ -44,8 +44,13 @@ VCMPacket::VCMPacket(const uint8_t* ptr, markerBit(rtp_header.markerBit), timesNacked(-1), completeNALU(kNaluIncomplete), +#ifndef DISABLE_H265 + insertStartCode((videoHeader.codec == kVideoCodecH264 || videoHeader.codec == kVideoCodecH265) && + videoHeader.is_first_packet_in_frame), +#else insertStartCode(videoHeader.codec == kVideoCodecH264 && videoHeader.is_first_packet_in_frame), +#endif video_header(videoHeader), packet_info(rtp_header, receive_time_ms) { if (is_first_packet_in_frame() && markerBit) { diff --git a/modules/video_coding/packet_buffer.cc b/modules/video_coding/packet_buffer.cc index 6ebb9c4c9b..df689caa5a 100644 --- a/modules/video_coding/packet_buffer.cc +++ b/modules/video_coding/packet_buffer.cc @@ -21,20 +21,22 @@ #include "absl/types/variant.h" #include "api/array_view.h" #include "api/rtp_packet_info.h" -#include "api/video/encoded_frame.h" #include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" +#ifndef DISABLE_H265 +#include "common_video/h265/h265_common.h" +#endif #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" -#include "modules/video_coding/frame_object.h" +#ifndef DISABLE_H265 +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#endif #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/mod_ops.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace video_coding { @@ -66,8 +68,7 @@ PacketBuffer::PacketBuffer(Clock* clock, first_packet_received_(false), is_cleared_to_first_seq_num_(false), buffer_(start_buffer_size), - sps_pps_idr_is_h264_keyframe_( - field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + sps_pps_idr_is_h264_keyframe_(false) { RTC_DCHECK_LE(start_buffer_size, max_buffer_size); // Buffer size must always be a power of 2. RTC_DCHECK((start_buffer_size & (start_buffer_size - 1)) == 0); @@ -81,7 +82,7 @@ PacketBuffer::~PacketBuffer() { PacketBuffer::InsertResult PacketBuffer::InsertPacket( std::unique_ptr packet) { PacketBuffer::InsertResult result; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); uint16_t seq_num = packet->seq_num; size_t index = seq_num % buffer_.size(); @@ -99,23 +100,23 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( first_seq_num_ = seq_num; } - if (buffer_[index].used()) { + if (buffer_[index] != nullptr) { // Duplicate packet, just delete the payload. - if (buffer_[index].seq_num() == packet->seq_num) { + if (buffer_[index]->seq_num == packet->seq_num) { return result; } // The packet buffer is full, try to expand the buffer. - while (ExpandBufferSize() && buffer_[seq_num % buffer_.size()].used()) { + while (ExpandBufferSize() && buffer_[seq_num % buffer_.size()] != nullptr) { } index = seq_num % buffer_.size(); // Packet buffer is still full since we were unable to expand the buffer. - if (buffer_[index].used()) { + if (buffer_[index] != nullptr) { // Clear the buffer, delete payload, and return false to signal that a // new keyframe is needed. RTC_LOG(LS_WARNING) << "Clear PacketBuffer and request key frame."; - Clear(); + ClearInternal(); result.buffer_cleared = true; return result; } @@ -129,18 +130,17 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( last_received_keyframe_rtp_timestamp_ = packet->timestamp; } - StoredPacket& new_entry = buffer_[index]; - new_entry.continuous = false; - new_entry.packet = std::move(packet); + packet->continuous = false; + buffer_[index] = std::move(packet); UpdateMissingPackets(seq_num); - result.frames = FindFrames(seq_num); + result.packets = FindFrames(seq_num); return result; } void PacketBuffer::ClearTo(uint16_t seq_num) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // We have already cleared past this sequence number, no need to do anything. if (is_cleared_to_first_seq_num_ && AheadOf(first_seq_num_, seq_num)) { @@ -157,9 +157,9 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { size_t diff = ForwardDiff(first_seq_num_, seq_num); size_t iterations = std::min(diff, buffer_.size()); for (size_t i = 0; i < iterations; ++i) { - StoredPacket& stored = buffer_[first_seq_num_ % buffer_.size()]; - if (stored.used() && AheadOf(seq_num, stored.seq_num())) { - stored.packet = nullptr; + auto& stored = buffer_[first_seq_num_ % buffer_.size()]; + if (stored != nullptr && AheadOf(seq_num, stored->seq_num)) { + stored = nullptr; } ++first_seq_num_; } @@ -176,51 +176,43 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { } } -void PacketBuffer::ClearInterval(uint16_t start_seq_num, - uint16_t stop_seq_num) { - size_t iterations = ForwardDiff(start_seq_num, stop_seq_num + 1); - RTC_DCHECK_LE(iterations, buffer_.size()); - uint16_t seq_num = start_seq_num; - for (size_t i = 0; i < iterations; ++i) { - size_t index = seq_num % buffer_.size(); - RTC_DCHECK_EQ(buffer_[index].seq_num(), seq_num); - buffer_[index].packet = nullptr; - - ++seq_num; - } -} - void PacketBuffer::Clear() { - rtc::CritScope lock(&crit_); - for (StoredPacket& entry : buffer_) { - entry.packet = nullptr; - } - - first_packet_received_ = false; - is_cleared_to_first_seq_num_ = false; - last_received_packet_ms_.reset(); - last_received_keyframe_packet_ms_.reset(); - newest_inserted_seq_num_.reset(); - missing_packets_.clear(); + MutexLock lock(&mutex_); + ClearInternal(); } PacketBuffer::InsertResult PacketBuffer::InsertPadding(uint16_t seq_num) { PacketBuffer::InsertResult result; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); UpdateMissingPackets(seq_num); - result.frames = FindFrames(static_cast(seq_num + 1)); + result.packets = FindFrames(static_cast(seq_num + 1)); return result; } absl::optional PacketBuffer::LastReceivedPacketMs() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return last_received_packet_ms_; } absl::optional PacketBuffer::LastReceivedKeyframePacketMs() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return last_received_keyframe_packet_ms_; } +void PacketBuffer::ForceSpsPpsIdrIsH264Keyframe() { + sps_pps_idr_is_h264_keyframe_ = true; +} +void PacketBuffer::ClearInternal() { + for (auto& entry : buffer_) { + entry = nullptr; + } + + first_packet_received_ = false; + is_cleared_to_first_seq_num_ = false; + last_received_packet_ms_.reset(); + last_received_keyframe_packet_ms_.reset(); + newest_inserted_seq_num_.reset(); + missing_packets_.clear(); +} bool PacketBuffer::ExpandBufferSize() { if (buffer_.size() == max_size_) { @@ -230,10 +222,10 @@ bool PacketBuffer::ExpandBufferSize() { } size_t new_size = std::min(max_size_, 2 * buffer_.size()); - std::vector new_buffer(new_size); - for (StoredPacket& entry : buffer_) { - if (entry.used()) { - new_buffer[entry.seq_num() % new_size] = std::move(entry); + std::vector> new_buffer(new_size); + for (std::unique_ptr& entry : buffer_) { + if (entry != nullptr) { + new_buffer[entry->seq_num % new_size] = std::move(entry); } } buffer_ = std::move(new_buffer); @@ -244,62 +236,72 @@ bool PacketBuffer::ExpandBufferSize() { bool PacketBuffer::PotentialNewFrame(uint16_t seq_num) const { size_t index = seq_num % buffer_.size(); int prev_index = index > 0 ? index - 1 : buffer_.size() - 1; - const StoredPacket& entry = buffer_[index]; - const StoredPacket& prev_entry = buffer_[prev_index]; + const auto& entry = buffer_[index]; + const auto& prev_entry = buffer_[prev_index]; - if (!entry.used()) + if (entry == nullptr) return false; - if (entry.seq_num() != seq_num) + if (entry->seq_num != seq_num) return false; - if (entry.frame_begin()) + if (entry->is_first_packet_in_frame()) return true; - if (!prev_entry.used()) + if (prev_entry == nullptr) return false; - if (prev_entry.seq_num() != static_cast(entry.seq_num() - 1)) + if (prev_entry->seq_num != static_cast(entry->seq_num - 1)) return false; - if (prev_entry.packet->timestamp != entry.packet->timestamp) + if (prev_entry->timestamp != entry->timestamp) return false; - if (prev_entry.continuous) + if (prev_entry->continuous) return true; return false; } -std::vector> PacketBuffer::FindFrames( +std::vector> PacketBuffer::FindFrames( uint16_t seq_num) { - std::vector> found_frames; + std::vector> found_frames; for (size_t i = 0; i < buffer_.size() && PotentialNewFrame(seq_num); ++i) { size_t index = seq_num % buffer_.size(); - buffer_[index].continuous = true; + buffer_[index]->continuous = true; // If all packets of the frame is continuous, find the first packet of the - // frame and create an RtpFrameObject. - if (buffer_[index].frame_end()) { + // frame and add all packets of the frame to the returned packets. + if (buffer_[index]->is_last_packet_in_frame()) { uint16_t start_seq_num = seq_num; // Find the start index by searching backward until the packet with // the |frame_begin| flag is set. int start_index = index; size_t tested_packets = 0; - int64_t frame_timestamp = buffer_[start_index].packet->timestamp; + int64_t frame_timestamp = buffer_[start_index]->timestamp; // Identify H.264 keyframes by means of SPS, PPS, and IDR. - bool is_h264 = buffer_[start_index].packet->codec() == kVideoCodecH264; + bool is_h264 = buffer_[start_index]->codec() == kVideoCodecH264; bool has_h264_sps = false; bool has_h264_pps = false; bool has_h264_idr = false; bool is_h264_keyframe = false; + + bool is_h265 = false; +#ifndef DISABLE_H265 + is_h265 = buffer_[start_index]->codec() == kVideoCodecH265; + bool has_h265_sps = false; + bool has_h265_pps = false; + bool has_h265_idr = false; + bool is_h265_keyframe = false; +#endif + int idr_width = -1; int idr_height = -1; while (true) { ++tested_packets; - if (!is_h264 && buffer_[start_index].frame_begin()) + if (!is_h264 && !is_h265 && buffer_[start_index]->is_first_packet_in_frame()) break; if (is_h264) { const auto* h264_header = absl::get_if( - &buffer_[start_index].packet->video_header.video_type_header); + &buffer_[start_index]->video_header.video_type_header); if (!h264_header || h264_header->nalus_length >= kMaxNalusPerPacket) return found_frames; @@ -320,13 +322,40 @@ std::vector> PacketBuffer::FindFrames( // smallest index and valid resolution; typically its IDR or SPS // packet; there may be packet preceeding this packet, IDR's // resolution will be applied to them. - if (buffer_[start_index].packet->width() > 0 && - buffer_[start_index].packet->height() > 0) { - idr_width = buffer_[start_index].packet->width(); - idr_height = buffer_[start_index].packet->height(); + if (buffer_[start_index]->width() > 0 && + buffer_[start_index]->height() > 0) { + idr_width = buffer_[start_index]->width(); + idr_height = buffer_[start_index]->height(); + } + } + } +#ifndef DISABLE_H265 + if (is_h265 && !is_h265_keyframe) { + const auto* h265_header = absl::get_if( + &buffer_[start_index]->video_header.video_type_header); + if (!h265_header || h265_header->nalus_length >= kMaxNalusPerPacket) + return found_frames; + for (size_t j = 0; j < h265_header->nalus_length; ++j) { + if (h265_header->nalus[j].type == H265::NaluType::kSps) { + has_h265_sps = true; + } else if (h265_header->nalus[j].type == H265::NaluType::kPps) { + has_h265_pps = true; + } else if (h265_header->nalus[j].type == H265::NaluType::kIdrWRadl + || h265_header->nalus[j].type == H265::NaluType::kIdrNLp + || h265_header->nalus[j].type == H265::NaluType::kCra) { + has_h265_idr = true; + } + } + if (has_h265_sps && has_h265_pps && has_h265_idr) { + is_h265_keyframe = true; + if (buffer_[start_index]->width() > 0 && + buffer_[start_index]->height() > 0) { + idr_width = buffer_[start_index]->width(); + idr_height = buffer_[start_index]->height(); } } } +#endif if (tested_packets == buffer_.size()) break; @@ -339,9 +368,8 @@ std::vector> PacketBuffer::FindFrames( // the timestamp of that packet is the same as this one. This may cause // the PacketBuffer to hand out incomplete frames. // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=7106 - if (is_h264 && - (!buffer_[start_index].used() || - buffer_[start_index].packet->timestamp != frame_timestamp)) { + if ((is_h264 || is_h265) && (buffer_[start_index] == nullptr || + buffer_[start_index]->timestamp != frame_timestamp)) { break; } @@ -365,126 +393,86 @@ std::vector> PacketBuffer::FindFrames( // determines if the RtpFrameObject is a key frame or delta frame. const size_t first_packet_index = start_seq_num % buffer_.size(); if (is_h264_keyframe) { - buffer_[first_packet_index].packet->video_header.frame_type = + buffer_[first_packet_index]->video_header.frame_type = VideoFrameType::kVideoFrameKey; if (idr_width > 0 && idr_height > 0) { // IDR frame was finalized and we have the correct resolution for // IDR; update first packet to have same resolution as IDR. - buffer_[first_packet_index].packet->video_header.width = idr_width; - buffer_[first_packet_index].packet->video_header.height = - idr_height; + buffer_[first_packet_index]->video_header.width = idr_width; + buffer_[first_packet_index]->video_header.height = idr_height; } } else { - buffer_[first_packet_index].packet->video_header.frame_type = + buffer_[first_packet_index]->video_header.frame_type = VideoFrameType::kVideoFrameDelta; } - // With IPPP, if this is not a keyframe, make sure there are no gaps - // in the packet sequence numbers up until this point. - const uint8_t h264tid = - buffer_[start_index].used() - ? buffer_[start_index] - .packet->video_header.frame_marking.temporal_id - : kNoTemporalIdx; - if (h264tid == kNoTemporalIdx && !is_h264_keyframe && - missing_packets_.upper_bound(start_seq_num) != - missing_packets_.begin()) { + // If this is not a keyframe, make sure there are no gaps in the packet + // sequence numbers up until this point. + if (!is_h264_keyframe && missing_packets_.upper_bound(start_seq_num) != + missing_packets_.begin()) { return found_frames; } } - if (auto frame = AssembleFrame(start_seq_num, seq_num)) { - found_frames.push_back(std::move(frame)); - } else { - RTC_LOG(LS_ERROR) << "Failed to assemble frame from packets " - << start_seq_num << "-" << seq_num; +#ifndef DISABLE_H265 + if (is_h265) { + // Warn if this is an unsafe frame. + if (has_h265_idr && (!has_h265_sps || !has_h265_pps)) { + RTC_LOG(LS_WARNING) + << "Received H.265-IDR frame " + << "(SPS: " << has_h265_sps << ", PPS: " << has_h265_pps << "). " + << "Treating as delta frame since " + << "WebRTC-SpsPpsIdrIsH265Keyframe is always enabled."; + } + + // Now that we have decided whether to treat this frame as a key frame + // or delta frame in the frame buffer, we update the field that + // determines if the RtpFrameObject is a key frame or delta frame. + const size_t first_packet_index = start_seq_num % buffer_.size(); + if (is_h265_keyframe) { + buffer_[first_packet_index]->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + if (idr_width > 0 && idr_height > 0) { + // IDR frame was finalized and we have the correct resolution for + // IDR; update first packet to have same resolution as IDR. + buffer_[first_packet_index]->video_header.width = idr_width; + buffer_[first_packet_index]->video_header.height = idr_height; + } + } else { + buffer_[first_packet_index]->video_header.frame_type = + VideoFrameType::kVideoFrameDelta; + } + + // If this is not a key frame, make sure there are no gaps in the + // packet sequence numbers up until this point. + if (!is_h265_keyframe && missing_packets_.upper_bound(start_seq_num) != + missing_packets_.begin()) { + return found_frames; + } + } +#endif + const uint16_t end_seq_num = seq_num + 1; + // Use uint16_t type to handle sequence number wrap around case. + uint16_t num_packets = end_seq_num - start_seq_num; + found_frames.reserve(found_frames.size() + num_packets); + for (uint16_t i = start_seq_num; i != end_seq_num; ++i) { + std::unique_ptr& packet = buffer_[i % buffer_.size()]; + RTC_DCHECK(packet); + RTC_DCHECK_EQ(i, packet->seq_num); + // Ensure frame boundary flags are properly set. + packet->video_header.is_first_packet_in_frame = (i == start_seq_num); + packet->video_header.is_last_packet_in_frame = (i == seq_num); + found_frames.push_back(std::move(packet)); } missing_packets_.erase(missing_packets_.begin(), missing_packets_.upper_bound(seq_num)); - ClearInterval(start_seq_num, seq_num); } ++seq_num; } return found_frames; } -std::unique_ptr PacketBuffer::AssembleFrame( - uint16_t first_seq_num, - uint16_t last_seq_num) { - const uint16_t end_seq_num = last_seq_num + 1; - const uint16_t num_packets = end_seq_num - first_seq_num; - int max_nack_count = -1; - int64_t min_recv_time = std::numeric_limits::max(); - int64_t max_recv_time = std::numeric_limits::min(); - size_t frame_size = 0; - - std::vector> payloads; - RtpPacketInfos::vector_type packet_infos; - payloads.reserve(num_packets); - packet_infos.reserve(num_packets); - - for (uint16_t seq_num = first_seq_num; seq_num != end_seq_num; ++seq_num) { - const Packet& packet = GetPacket(seq_num); - - max_nack_count = std::max(max_nack_count, packet.times_nacked); - min_recv_time = - std::min(min_recv_time, packet.packet_info.receive_time_ms()); - max_recv_time = - std::max(max_recv_time, packet.packet_info.receive_time_ms()); - frame_size += packet.video_payload.size(); - payloads.emplace_back(packet.video_payload); - packet_infos.push_back(packet.packet_info); - } - - const Packet& first_packet = GetPacket(first_seq_num); - rtc::scoped_refptr bitstream; - // TODO(danilchap): Hide codec-specific code paths behind an interface. - if (first_packet.codec() == VideoCodecType::kVideoCodecAV1) { - bitstream = VideoRtpDepacketizerAv1::AssembleFrame(payloads); - if (!bitstream) { - // Failed to assemble a frame. Discard and continue. - return nullptr; - } - } else { - bitstream = EncodedImageBuffer::Create(frame_size); - - uint8_t* write_at = bitstream->data(); - for (rtc::ArrayView payload : payloads) { - memcpy(write_at, payload.data(), payload.size()); - write_at += payload.size(); - } - RTC_DCHECK_EQ(write_at - bitstream->data(), bitstream->size()); - } - const Packet& last_packet = GetPacket(last_seq_num); - return std::make_unique( - first_seq_num, // - last_seq_num, // - last_packet.marker_bit, // - max_nack_count, // - min_recv_time, // - max_recv_time, // - first_packet.timestamp, // - first_packet.ntp_time_ms, // - last_packet.video_header.video_timing, // - first_packet.payload_type, // - first_packet.codec(), // - last_packet.video_header.rotation, // - last_packet.video_header.content_type, // - first_packet.video_header, // - last_packet.video_header.color_space, // - first_packet.generic_descriptor, // - RtpPacketInfos(std::move(packet_infos)), // - std::move(bitstream)); -} - -const PacketBuffer::Packet& PacketBuffer::GetPacket(uint16_t seq_num) const { - const StoredPacket& entry = buffer_[seq_num % buffer_.size()]; - RTC_DCHECK(entry.used()); - RTC_DCHECK_EQ(seq_num, entry.seq_num()); - return *entry.packet; -} - void PacketBuffer::UpdateMissingPackets(uint16_t seq_num) { if (!newest_inserted_seq_num_) newest_inserted_seq_num_ = seq_num; diff --git a/modules/video_coding/packet_buffer.h b/modules/video_coding/packet_buffer.h index f78147c78e..e34f7040b5 100644 --- a/modules/video_coding/packet_buffer.h +++ b/modules/video_coding/packet_buffer.h @@ -19,13 +19,11 @@ #include "absl/base/attributes.h" #include "api/rtp_packet_info.h" #include "api/video/encoded_image.h" -#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "modules/video_coding/frame_object.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/critical_section.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -57,6 +55,9 @@ class PacketBuffer { return video_header.is_last_packet_in_frame; } + // If all its previous packets have been inserted into the packet buffer. + // Set and used internally by the PacketBuffer. + bool continuous = false; bool marker_bit = false; uint8_t payload_type = 0; uint16_t seq_num = 0; @@ -67,12 +68,11 @@ class PacketBuffer { rtc::CopyOnWriteBuffer video_payload; RTPVideoHeader video_header; - absl::optional generic_descriptor; RtpPacketInfo packet_info; }; struct InsertResult { - std::vector> frames; + std::vector> packets; // Indicates if the packet buffer was cleared, which means that a key // frame request should be sent. bool buffer_cleared = false; @@ -82,97 +82,73 @@ class PacketBuffer { PacketBuffer(Clock* clock, size_t start_buffer_size, size_t max_buffer_size); ~PacketBuffer(); - InsertResult InsertPacket(std::unique_ptr packet) - ABSL_MUST_USE_RESULT; - InsertResult InsertPadding(uint16_t seq_num) ABSL_MUST_USE_RESULT; - void ClearTo(uint16_t seq_num); - void Clear(); + ABSL_MUST_USE_RESULT InsertResult InsertPacket(std::unique_ptr packet) + RTC_LOCKS_EXCLUDED(mutex_); + ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t seq_num) + RTC_LOCKS_EXCLUDED(mutex_); + void ClearTo(uint16_t seq_num) RTC_LOCKS_EXCLUDED(mutex_); + void Clear() RTC_LOCKS_EXCLUDED(mutex_); // Timestamp (not RTP timestamp) of the last received packet/keyframe packet. - absl::optional LastReceivedPacketMs() const; - absl::optional LastReceivedKeyframePacketMs() const; + absl::optional LastReceivedPacketMs() const + RTC_LOCKS_EXCLUDED(mutex_); + absl::optional LastReceivedKeyframePacketMs() const + RTC_LOCKS_EXCLUDED(mutex_); + void ForceSpsPpsIdrIsH264Keyframe(); private: - struct StoredPacket { - uint16_t seq_num() const { return packet->seq_num; } - - // If this is the first packet of the frame. - bool frame_begin() const { return packet->is_first_packet_in_frame(); } - - // If this is the last packet of the frame. - bool frame_end() const { return packet->is_last_packet_in_frame(); } - - // If this slot is currently used. - bool used() const { return packet != nullptr; } - - // If all its previous packets have been inserted into the packet buffer. - bool continuous = false; - - std::unique_ptr packet; - }; - Clock* const clock_; + // Clears with |mutex_| taken. + void ClearInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // Tries to expand the buffer. - bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Test if all previous packets has arrived for the given sequence number. bool PotentialNewFrame(uint16_t seq_num) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - - // Test if all packets of a frame has arrived, and if so, creates a frame. - // Returns a vector of received frames. - std::vector> FindFrames(uint16_t seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - - std::unique_ptr AssembleFrame(uint16_t first_seq_num, - uint16_t last_seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - - // Get the packet with sequence number |seq_num|. - const Packet& GetPacket(uint16_t seq_num) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // Clears the packet buffer from |start_seq_num| to |stop_seq_num| where the - // endpoints are inclusive. - void ClearInterval(uint16_t start_seq_num, uint16_t stop_seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + // Test if all packets of a frame has arrived, and if so, returns packets to + // create frames. + std::vector> FindFrames(uint16_t seq_num) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateMissingPackets(uint16_t seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - rtc::CriticalSection crit_; + mutable Mutex mutex_; // buffer_.size() and max_size_ must always be a power of two. const size_t max_size_; // The fist sequence number currently in the buffer. - uint16_t first_seq_num_ RTC_GUARDED_BY(crit_); + uint16_t first_seq_num_ RTC_GUARDED_BY(mutex_); // If the packet buffer has received its first packet. - bool first_packet_received_ RTC_GUARDED_BY(crit_); + bool first_packet_received_ RTC_GUARDED_BY(mutex_); // If the buffer is cleared to |first_seq_num_|. - bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(crit_); + bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(mutex_); // Buffer that holds the the inserted packets and information needed to // determine continuity between them. - std::vector buffer_ RTC_GUARDED_BY(crit_); + std::vector> buffer_ RTC_GUARDED_BY(mutex_); // Timestamp of the last received packet/keyframe packet. - absl::optional last_received_packet_ms_ RTC_GUARDED_BY(crit_); + absl::optional last_received_packet_ms_ RTC_GUARDED_BY(mutex_); absl::optional last_received_keyframe_packet_ms_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); absl::optional last_received_keyframe_rtp_timestamp_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); - absl::optional newest_inserted_seq_num_ RTC_GUARDED_BY(crit_); + absl::optional newest_inserted_seq_num_ RTC_GUARDED_BY(mutex_); std::set> missing_packets_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); // Indicates if we should require SPS, PPS, and IDR for a particular // RTP timestamp to treat the corresponding frame as a keyframe. - const bool sps_pps_idr_is_h264_keyframe_; + bool sps_pps_idr_is_h264_keyframe_; }; } // namespace video_coding diff --git a/modules/video_coding/packet_buffer_unittest.cc b/modules/video_coding/packet_buffer_unittest.cc index 7779999fc1..a01b480398 100644 --- a/modules/video_coding/packet_buffer_unittest.cc +++ b/modules/video_coding/packet_buffer_unittest.cc @@ -40,29 +40,39 @@ constexpr int kMaxSize = 64; void IgnoreResult(PacketBuffer::InsertResult /*result*/) {} +// Validates frame boundaries are valid and returns first sequence_number for +// each frame. std::vector StartSeqNums( - rtc::ArrayView> frames) { + rtc::ArrayView> packets) { std::vector result; - for (const auto& frame : frames) { - result.push_back(frame->first_seq_num()); + bool frame_boundary = true; + for (const auto& packet : packets) { + EXPECT_EQ(frame_boundary, packet->is_first_packet_in_frame()); + if (packet->is_first_packet_in_frame()) { + result.push_back(packet->seq_num); + } + frame_boundary = packet->is_last_packet_in_frame(); } + EXPECT_TRUE(frame_boundary); return result; } MATCHER_P(StartSeqNumsAre, seq_num, "") { - return Matches(ElementsAre(seq_num))(StartSeqNums(arg.frames)); + return Matches(ElementsAre(seq_num))(StartSeqNums(arg.packets)); } MATCHER_P2(StartSeqNumsAre, seq_num1, seq_num2, "") { - return Matches(ElementsAre(seq_num1, seq_num2))(StartSeqNums(arg.frames)); + return Matches(ElementsAre(seq_num1, seq_num2))(StartSeqNums(arg.packets)); } MATCHER(KeyFrame, "") { - return arg->frame_type() == VideoFrameType::kVideoFrameKey; + return arg->is_first_packet_in_frame() && + arg->video_header.frame_type == VideoFrameType::kVideoFrameKey; } MATCHER(DeltaFrame, "") { - return arg->frame_type() == VideoFrameType::kVideoFrameDelta; + return arg->is_first_packet_in_frame() && + arg->video_header.frame_type == VideoFrameType::kVideoFrameDelta; } struct PacketBufferInsertResult : public PacketBuffer::InsertResult { @@ -72,18 +82,15 @@ struct PacketBufferInsertResult : public PacketBuffer::InsertResult { void PrintTo(const PacketBufferInsertResult& result, std::ostream* os) { *os << "frames: { "; - for (size_t i = 0; i < result.frames.size(); ++i) { - const RtpFrameObject& frame = *result.frames[i]; - if (i > 0) { - *os << ", "; - } - *os << "{sn: "; - if (frame.first_seq_num() == frame.last_seq_num()) { - *os << frame.first_seq_num(); - } else { - *os << "[" << frame.first_seq_num() << "-" << frame.last_seq_num() << "]"; + for (const auto& packet : result.packets) { + if (packet->is_first_packet_in_frame() && + packet->is_last_packet_in_frame()) { + *os << "{sn: " << packet->seq_num << " }"; + } else if (packet->is_first_packet_in_frame()) { + *os << "{sn: [" << packet->seq_num << "-"; + } else if (packet->is_last_packet_in_frame()) { + *os << packet->seq_num << "] }, "; } - *os << "}"; } *os << " }"; if (result.buffer_cleared) { @@ -93,9 +100,8 @@ void PrintTo(const PacketBufferInsertResult& result, std::ostream* os) { class PacketBufferTest : public ::testing::Test { protected: - explicit PacketBufferTest(std::string field_trials = "") - : scoped_field_trials_(field_trials), - rand_(0x7732213), + PacketBufferTest() + : rand_(0x7732213), clock_(0), packet_buffer_(&clock_, kStartSize, kMaxSize) {} @@ -126,7 +132,6 @@ class PacketBufferTest : public ::testing::Test { packet_buffer_.InsertPacket(std::move(packet))); } - const test::ScopedFieldTrials scoped_field_trials_; Random rand_; SimulatedClock clock_; PacketBuffer packet_buffer_; @@ -134,23 +139,23 @@ class PacketBufferTest : public ::testing::Test { TEST_F(PacketBufferTest, InsertOnePacket) { const uint16_t seq_num = Rand(); - EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast).frames, SizeIs(1)); + EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); } TEST_F(PacketBufferTest, InsertMultiplePackets) { const uint16_t seq_num = Rand(); - EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast).frames, SizeIs(1)); - EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kFirst, kLast).frames, SizeIs(1)); - EXPECT_THAT(Insert(seq_num + 2, kKeyFrame, kFirst, kLast).frames, SizeIs(1)); - EXPECT_THAT(Insert(seq_num + 3, kKeyFrame, kFirst, kLast).frames, SizeIs(1)); + EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); + EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); + EXPECT_THAT(Insert(seq_num + 2, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); + EXPECT_THAT(Insert(seq_num + 3, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); } TEST_F(PacketBufferTest, InsertDuplicatePacket) { const uint16_t seq_num = Rand(); - EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kNotFirst, kLast).frames, - SizeIs(1)); + EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); + EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); + EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kNotFirst, kLast).packets, + SizeIs(2)); } TEST_F(PacketBufferTest, SeqNumWrapOneFrame) { @@ -166,57 +171,17 @@ TEST_F(PacketBufferTest, SeqNumWrapTwoFrames) { } TEST_F(PacketBufferTest, InsertOldPackets) { - EXPECT_THAT(Insert(100, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(102, kDeltaFrame, kFirst, kLast).frames, SizeIs(1)); - EXPECT_THAT(Insert(101, kKeyFrame, kNotFirst, kLast).frames, SizeIs(1)); + EXPECT_THAT(Insert(100, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); + EXPECT_THAT(Insert(102, kDeltaFrame, kFirst, kLast).packets, SizeIs(1)); + EXPECT_THAT(Insert(101, kKeyFrame, kNotFirst, kLast).packets, SizeIs(2)); - EXPECT_THAT(Insert(100, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(100, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(102, kDeltaFrame, kFirst, kLast).frames, SizeIs(1)); + EXPECT_THAT(Insert(100, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); + EXPECT_THAT(Insert(100, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); + EXPECT_THAT(Insert(102, kDeltaFrame, kFirst, kLast).packets, SizeIs(1)); packet_buffer_.ClearTo(102); - EXPECT_THAT(Insert(102, kDeltaFrame, kFirst, kLast).frames, IsEmpty()); - EXPECT_THAT(Insert(103, kDeltaFrame, kFirst, kLast).frames, SizeIs(1)); -} - -TEST_F(PacketBufferTest, NackCount) { - const uint16_t seq_num = Rand(); - - auto packet = std::make_unique(); - packet->video_header.codec = kVideoCodecGeneric; - packet->seq_num = seq_num; - packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - packet->video_header.is_first_packet_in_frame = true; - packet->video_header.is_last_packet_in_frame = false; - packet->times_nacked = 0; - IgnoreResult(packet_buffer_.InsertPacket(std::move(packet))); - - packet = std::make_unique(); - packet->seq_num = seq_num + 1; - packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - packet->video_header.is_first_packet_in_frame = false; - packet->video_header.is_last_packet_in_frame = false; - packet->times_nacked = 1; - IgnoreResult(packet_buffer_.InsertPacket(std::move(packet))); - - packet = std::make_unique(); - packet->seq_num = seq_num + 2; - packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - packet->video_header.is_first_packet_in_frame = false; - packet->video_header.is_last_packet_in_frame = false; - packet->times_nacked = 3; - IgnoreResult(packet_buffer_.InsertPacket(std::move(packet))); - - packet = std::make_unique(); - packet->seq_num = seq_num + 3; - packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - packet->video_header.is_first_packet_in_frame = false; - packet->video_header.is_last_packet_in_frame = true; - packet->times_nacked = 1; - auto frames = packet_buffer_.InsertPacket(std::move(packet)).frames; - - ASSERT_THAT(frames, SizeIs(1)); - EXPECT_EQ(frames.front()->times_nacked(), 3); + EXPECT_THAT(Insert(102, kDeltaFrame, kFirst, kLast).packets, IsEmpty()); + EXPECT_THAT(Insert(103, kDeltaFrame, kFirst, kLast).packets, SizeIs(1)); } TEST_F(PacketBufferTest, FrameSize) { @@ -229,8 +194,11 @@ TEST_F(PacketBufferTest, FrameSize) { Insert(seq_num, kKeyFrame, kFirst, kNotLast, data1); Insert(seq_num + 1, kKeyFrame, kNotFirst, kNotLast, data2); Insert(seq_num + 2, kKeyFrame, kNotFirst, kNotLast, data3); - EXPECT_THAT(Insert(seq_num + 3, kKeyFrame, kNotFirst, kLast, data4).frames, - ElementsAre(Pointee(SizeIs(20)))); + auto packets = + Insert(seq_num + 3, kKeyFrame, kNotFirst, kLast, data4).packets; + // Expect one frame of 4 packets. + EXPECT_THAT(StartSeqNums(packets), ElementsAre(seq_num)); + EXPECT_THAT(packets, SizeIs(4)); } TEST_F(PacketBufferTest, ExpandBuffer) { @@ -289,7 +257,7 @@ TEST_F(PacketBufferTest, TwoPacketsTwoFrames) { TEST_F(PacketBufferTest, TwoPacketsOneFrames) { const uint16_t seq_num = Rand(); - EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); + EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kNotFirst, kLast), StartSeqNumsAre(seq_num)); } @@ -297,8 +265,8 @@ TEST_F(PacketBufferTest, TwoPacketsOneFrames) { TEST_F(PacketBufferTest, ThreePacketReorderingOneFrame) { const uint16_t seq_num = Rand(); - EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(seq_num + 2, kKeyFrame, kNotFirst, kLast).frames, + EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); + EXPECT_THAT(Insert(seq_num + 2, kKeyFrame, kNotFirst, kLast).packets, IsEmpty()); EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kNotFirst, kNotLast), StartSeqNumsAre(seq_num)); @@ -343,7 +311,7 @@ TEST_F(PacketBufferTest, DontClearNewerPacket) { packet_buffer_.ClearTo(0); EXPECT_THAT(Insert(2 * kStartSize, kKeyFrame, kFirst, kLast), StartSeqNumsAre(2 * kStartSize)); - EXPECT_THAT(Insert(3 * kStartSize + 1, kKeyFrame, kFirst, kNotLast).frames, + EXPECT_THAT(Insert(3 * kStartSize + 1, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); packet_buffer_.ClearTo(2 * kStartSize); EXPECT_THAT(Insert(3 * kStartSize + 2, kKeyFrame, kNotFirst, kLast), @@ -353,10 +321,11 @@ TEST_F(PacketBufferTest, DontClearNewerPacket) { TEST_F(PacketBufferTest, OneIncompleteFrame) { const uint16_t seq_num = Rand(); - EXPECT_THAT(Insert(seq_num, kDeltaFrame, kFirst, kNotLast).frames, IsEmpty()); + EXPECT_THAT(Insert(seq_num, kDeltaFrame, kFirst, kNotLast).packets, + IsEmpty()); EXPECT_THAT(Insert(seq_num + 1, kDeltaFrame, kNotFirst, kLast), StartSeqNumsAre(seq_num)); - EXPECT_THAT(Insert(seq_num - 1, kDeltaFrame, kNotFirst, kLast).frames, + EXPECT_THAT(Insert(seq_num - 1, kDeltaFrame, kNotFirst, kLast).packets, IsEmpty()); } @@ -365,8 +334,9 @@ TEST_F(PacketBufferTest, TwoIncompleteFramesFullBuffer) { for (int i = 1; i < kMaxSize - 1; ++i) Insert(seq_num + i, kDeltaFrame, kNotFirst, kNotLast); - EXPECT_THAT(Insert(seq_num, kDeltaFrame, kFirst, kNotLast).frames, IsEmpty()); - EXPECT_THAT(Insert(seq_num - 1, kDeltaFrame, kNotFirst, kLast).frames, + EXPECT_THAT(Insert(seq_num, kDeltaFrame, kFirst, kNotLast).packets, + IsEmpty()); + EXPECT_THAT(Insert(seq_num - 1, kDeltaFrame, kNotFirst, kLast).packets, IsEmpty()); } @@ -383,111 +353,6 @@ TEST_F(PacketBufferTest, FramesReordered) { StartSeqNumsAre(seq_num + 2)); } -TEST_F(PacketBufferTest, GetBitstream) { - // "many bitstream, such data" with null termination. - uint8_t many[] = {0x6d, 0x61, 0x6e, 0x79, 0x20}; - uint8_t bitstream[] = {0x62, 0x69, 0x74, 0x73, 0x74, 0x72, - 0x65, 0x61, 0x6d, 0x2c, 0x20}; - uint8_t such[] = {0x73, 0x75, 0x63, 0x68, 0x20}; - uint8_t data[] = {0x64, 0x61, 0x74, 0x61, 0x0}; - - const uint16_t seq_num = Rand(); - - Insert(seq_num, kKeyFrame, kFirst, kNotLast, many); - Insert(seq_num + 1, kDeltaFrame, kNotFirst, kNotLast, bitstream); - Insert(seq_num + 2, kDeltaFrame, kNotFirst, kNotLast, such); - auto frames = Insert(seq_num + 3, kDeltaFrame, kNotFirst, kLast, data).frames; - - ASSERT_THAT(frames, SizeIs(1)); - EXPECT_EQ(frames[0]->first_seq_num(), seq_num); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray("many bitstream, such data")); -} - -TEST_F(PacketBufferTest, GetBitstreamOneFrameOnePacket) { - uint8_t bitstream[] = "All the bitstream data for this frame!"; - - auto frames = Insert(0, kKeyFrame, kFirst, kLast, bitstream).frames; - ASSERT_THAT(StartSeqNums(frames), ElementsAre(0)); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray(bitstream)); -} - -TEST_F(PacketBufferTest, GetBitstreamOneFrameFullBuffer) { - uint8_t data_arr[kStartSize][1]; - uint8_t expected[kStartSize]; - - for (uint8_t i = 0; i < kStartSize; ++i) { - data_arr[i][0] = i; - expected[i] = i; - } - - Insert(0, kKeyFrame, kFirst, kNotLast, data_arr[0]); - for (uint8_t i = 1; i < kStartSize - 1; ++i) - Insert(i, kKeyFrame, kNotFirst, kNotLast, data_arr[i]); - auto frames = Insert(kStartSize - 1, kKeyFrame, kNotFirst, kLast, - data_arr[kStartSize - 1]) - .frames; - - ASSERT_THAT(StartSeqNums(frames), ElementsAre(0)); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray(expected)); -} - -TEST_F(PacketBufferTest, GetBitstreamAv1) { - const uint8_t data1[] = {0b01'01'0000, 0b0'0100'000, 'm', 'a', 'n', 'y', ' '}; - const uint8_t data2[] = {0b10'01'0000, 'b', 'i', 't', 's', 0}; - - auto packet1 = std::make_unique(); - packet1->video_header.codec = kVideoCodecAV1; - packet1->seq_num = 13; - packet1->video_header.is_first_packet_in_frame = true; - packet1->video_header.is_last_packet_in_frame = false; - packet1->video_payload = data1; - auto frames = packet_buffer_.InsertPacket(std::move(packet1)).frames; - EXPECT_THAT(frames, IsEmpty()); - - auto packet2 = std::make_unique(); - packet2->video_header.codec = kVideoCodecAV1; - packet2->seq_num = 14; - packet2->video_header.is_first_packet_in_frame = false; - packet2->video_header.is_last_packet_in_frame = true; - packet2->video_payload = data2; - frames = packet_buffer_.InsertPacket(std::move(packet2)).frames; - - ASSERT_THAT(frames, SizeIs(1)); - EXPECT_EQ(frames[0]->first_seq_num(), 13); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), 2), - ElementsAre(0b0'0100'010, 10)); // obu_header and obu_size. - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data() + 2, frames[0]->size() - 2), - ElementsAreArray("many bits")); -} - -TEST_F(PacketBufferTest, GetBitstreamInvalidAv1) { - // Two av1 payloads that can't be combined into proper frame. - const uint8_t data1[] = {0b01'01'0000, 0b0'0100'000, 'm', 'a', 'n', 'y', ' '}; - const uint8_t data2[] = {0b00'01'0000, 'b', 'i', 't', 's', 0}; - - auto packet1 = std::make_unique(); - packet1->video_header.codec = kVideoCodecAV1; - packet1->seq_num = 13; - packet1->video_header.is_first_packet_in_frame = true; - packet1->video_header.is_last_packet_in_frame = false; - packet1->video_payload = data1; - auto frames = packet_buffer_.InsertPacket(std::move(packet1)).frames; - EXPECT_THAT(frames, IsEmpty()); - - auto packet2 = std::make_unique(); - packet2->video_header.codec = kVideoCodecAV1; - packet2->seq_num = 14; - packet2->video_header.is_first_packet_in_frame = false; - packet2->video_header.is_last_packet_in_frame = true; - packet2->video_payload = data2; - frames = packet_buffer_.InsertPacket(std::move(packet2)).frames; - - EXPECT_THAT(frames, IsEmpty()); -} - TEST_F(PacketBufferTest, InsertPacketAfterSequenceNumberWrapAround) { uint16_t kFirstSeqNum = 0; uint32_t kTimestampDelta = 100; @@ -510,9 +375,11 @@ TEST_F(PacketBufferTest, InsertPacketAfterSequenceNumberWrapAround) { for (int i = 0; i < 5; ++i) { Insert(seq_num++, kKeyFrame, kNotFirst, kNotLast, {}, timestamp); } - EXPECT_THAT( - Insert(seq_num++, kKeyFrame, kNotFirst, kLast, {}, timestamp).frames, - SizeIs(1)); + auto packets = + Insert(seq_num++, kKeyFrame, kNotFirst, kLast, {}, timestamp).packets; + // One frame of 7 packets. + EXPECT_THAT(StartSeqNums(packets), SizeIs(1)); + EXPECT_THAT(packets, SizeIs(7)); } // If |sps_pps_idr_is_keyframe| is true, we require keyframes to contain @@ -522,10 +389,11 @@ TEST_F(PacketBufferTest, InsertPacketAfterSequenceNumberWrapAround) { class PacketBufferH264Test : public PacketBufferTest { protected: explicit PacketBufferH264Test(bool sps_pps_idr_is_keyframe) - : PacketBufferTest(sps_pps_idr_is_keyframe - ? "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/" - : ""), - sps_pps_idr_is_keyframe_(sps_pps_idr_is_keyframe) {} + : PacketBufferTest(), sps_pps_idr_is_keyframe_(sps_pps_idr_is_keyframe) { + if (sps_pps_idr_is_keyframe) { + packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); + } + } PacketBufferInsertResult InsertH264( uint16_t seq_num, // packet sequence number @@ -614,7 +482,7 @@ TEST_P(PacketBufferH264ParameterizedTest, DontRemoveMissingPacketOnClearTo) { InsertH264(2, kDeltaFrame, kFirst, kNotLast, 2); packet_buffer_.ClearTo(0); // Expect no frame because of missing of packet #1 - EXPECT_THAT(InsertH264(3, kDeltaFrame, kNotFirst, kLast, 2).frames, + EXPECT_THAT(InsertH264(3, kDeltaFrame, kNotFirst, kLast, 2).packets, IsEmpty()); } @@ -632,17 +500,19 @@ TEST_P(PacketBufferH264ParameterizedTest, GetBitstreamOneFrameFullBuffer) { InsertH264(i, kKeyFrame, kNotFirst, kNotLast, 1, data_arr[i]); } - auto frames = InsertH264(kStartSize - 1, kKeyFrame, kNotFirst, kLast, 1, - data_arr[kStartSize - 1]) - .frames; - ASSERT_THAT(StartSeqNums(frames), ElementsAre(0)); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray(expected)); + auto packets = InsertH264(kStartSize - 1, kKeyFrame, kNotFirst, kLast, 1, + data_arr[kStartSize - 1]) + .packets; + ASSERT_THAT(StartSeqNums(packets), ElementsAre(0)); + EXPECT_THAT(packets, SizeIs(kStartSize)); + for (size_t i = 0; i < packets.size(); ++i) { + EXPECT_THAT(packets[i]->video_payload, SizeIs(1)) << "Packet #" << i; + } } TEST_P(PacketBufferH264ParameterizedTest, GetBitstreamBufferPadding) { uint16_t seq_num = Rand(); - uint8_t data[] = "some plain old data"; + rtc::CopyOnWriteBuffer data = "some plain old data"; auto packet = std::make_unique(); auto& h264_header = @@ -655,14 +525,11 @@ TEST_P(PacketBufferH264ParameterizedTest, GetBitstreamBufferPadding) { packet->video_payload = data; packet->video_header.is_first_packet_in_frame = true; packet->video_header.is_last_packet_in_frame = true; - auto frames = packet_buffer_.InsertPacket(std::move(packet)).frames; + auto frames = packet_buffer_.InsertPacket(std::move(packet)).packets; ASSERT_THAT(frames, SizeIs(1)); - EXPECT_EQ(frames[0]->first_seq_num(), seq_num); - EXPECT_EQ(frames[0]->EncodedImage().size(), sizeof(data)); - EXPECT_EQ(frames[0]->EncodedImage().capacity(), sizeof(data)); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray(data)); + EXPECT_EQ(frames[0]->seq_num, seq_num); + EXPECT_EQ(frames[0]->video_payload, data); } TEST_P(PacketBufferH264ParameterizedTest, FrameResolution) { @@ -672,15 +539,13 @@ TEST_P(PacketBufferH264ParameterizedTest, FrameResolution) { uint32_t height = 360; uint32_t timestamp = 1000; - auto frames = InsertH264(seq_num, kKeyFrame, kFirst, kLast, timestamp, data, - width, height) - .frames; + auto packets = InsertH264(seq_num, kKeyFrame, kFirst, kLast, timestamp, data, + width, height) + .packets; - ASSERT_THAT(frames, SizeIs(1)); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray(data)); - EXPECT_EQ(frames[0]->EncodedImage()._encodedWidth, width); - EXPECT_EQ(frames[0]->EncodedImage()._encodedHeight, height); + ASSERT_THAT(packets, SizeIs(1)); + EXPECT_EQ(packets[0]->video_header.width, width); + EXPECT_EQ(packets[0]->video_header.height, height); } TEST_P(PacketBufferH264ParameterizedTest, FrameResolutionNaluBeforeSPS) { @@ -690,18 +555,13 @@ TEST_P(PacketBufferH264ParameterizedTest, FrameResolutionNaluBeforeSPS) { uint32_t height = 360; uint32_t timestamp = 1000; - auto frames = InsertH264KeyFrameWithAud(seq_num, kKeyFrame, kFirst, kLast, - timestamp, data, width, height) - .frames; - - ASSERT_THAT(StartSeqNums(frames), ElementsAre(seq_num)); + auto packets = InsertH264KeyFrameWithAud(seq_num, kKeyFrame, kFirst, kLast, + timestamp, data, width, height) + .packets; - EXPECT_EQ(frames[0]->EncodedImage().size(), sizeof(data)); - EXPECT_EQ(frames[0]->EncodedImage().capacity(), sizeof(data)); - EXPECT_EQ(frames[0]->EncodedImage()._encodedWidth, width); - EXPECT_EQ(frames[0]->EncodedImage()._encodedHeight, height); - EXPECT_THAT(rtc::MakeArrayView(frames[0]->data(), frames[0]->size()), - ElementsAreArray(data)); + ASSERT_THAT(StartSeqNums(packets), ElementsAre(seq_num)); + EXPECT_EQ(packets[0]->video_header.width, width); + EXPECT_EQ(packets[0]->video_header.height, height); } TEST_F(PacketBufferTest, FreeSlotsOnFrameCreation) { @@ -740,20 +600,20 @@ TEST_F(PacketBufferTest, FramesAfterClear) { Insert(9025, kDeltaFrame, kFirst, kLast); Insert(9024, kKeyFrame, kFirst, kLast); packet_buffer_.ClearTo(9025); - EXPECT_THAT(Insert(9057, kDeltaFrame, kFirst, kLast).frames, SizeIs(1)); - EXPECT_THAT(Insert(9026, kDeltaFrame, kFirst, kLast).frames, SizeIs(1)); + EXPECT_THAT(Insert(9057, kDeltaFrame, kFirst, kLast).packets, SizeIs(1)); + EXPECT_THAT(Insert(9026, kDeltaFrame, kFirst, kLast).packets, SizeIs(1)); } TEST_F(PacketBufferTest, SameFrameDifferentTimestamps) { Insert(0, kKeyFrame, kFirst, kNotLast, {}, 1000); - EXPECT_THAT(Insert(1, kKeyFrame, kNotFirst, kLast, {}, 1001).frames, + EXPECT_THAT(Insert(1, kKeyFrame, kNotFirst, kLast, {}, 1001).packets, IsEmpty()); } TEST_F(PacketBufferTest, ContinuousSeqNumDoubleMarkerBit) { Insert(2, kKeyFrame, kNotFirst, kNotLast); Insert(1, kKeyFrame, kFirst, kLast); - EXPECT_THAT(Insert(3, kKeyFrame, kNotFirst, kLast).frames, IsEmpty()); + EXPECT_THAT(Insert(3, kKeyFrame, kNotFirst, kLast).packets, IsEmpty()); } TEST_F(PacketBufferTest, PacketTimestamps) { @@ -826,7 +686,8 @@ TEST_F(PacketBufferTest, IncomingCodecChange) { packet->timestamp = 1; packet->seq_num = 1; packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, SizeIs(1)); + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, + SizeIs(1)); packet = std::make_unique(); packet->video_header.is_first_packet_in_frame = true; @@ -838,7 +699,8 @@ TEST_F(PacketBufferTest, IncomingCodecChange) { packet->timestamp = 3; packet->seq_num = 3; packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, IsEmpty()); + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, + IsEmpty()); packet = std::make_unique(); packet->video_header.is_first_packet_in_frame = true; @@ -848,7 +710,8 @@ TEST_F(PacketBufferTest, IncomingCodecChange) { packet->timestamp = 2; packet->seq_num = 2; packet->video_header.frame_type = VideoFrameType::kVideoFrameDelta; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, SizeIs(2)); + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, + SizeIs(2)); } TEST_F(PacketBufferTest, TooManyNalusInPacket) { @@ -862,7 +725,8 @@ TEST_F(PacketBufferTest, TooManyNalusInPacket) { auto& h264_header = packet->video_header.video_type_header.emplace(); h264_header.nalus_length = kMaxNalusPerPacket; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, IsEmpty()); + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, + IsEmpty()); } TEST_P(PacketBufferH264ParameterizedTest, OneFrameFillBuffer) { @@ -874,7 +738,7 @@ TEST_P(PacketBufferH264ParameterizedTest, OneFrameFillBuffer) { } TEST_P(PacketBufferH264ParameterizedTest, CreateFramesAfterFilledBuffer) { - EXPECT_THAT(InsertH264(kStartSize - 2, kKeyFrame, kFirst, kLast, 0).frames, + EXPECT_THAT(InsertH264(kStartSize - 2, kKeyFrame, kFirst, kLast, 0).packets, SizeIs(1)); InsertH264(kStartSize, kDeltaFrame, kFirst, kNotLast, 2000); @@ -882,7 +746,7 @@ TEST_P(PacketBufferH264ParameterizedTest, CreateFramesAfterFilledBuffer) { InsertH264(kStartSize + i, kDeltaFrame, kNotFirst, kNotLast, 2000); EXPECT_THAT( InsertH264(kStartSize + kStartSize, kDeltaFrame, kNotFirst, kLast, 2000) - .frames, + .packets, IsEmpty()); EXPECT_THAT(InsertH264(kStartSize - 1, kKeyFrame, kFirst, kLast, 1000), @@ -908,7 +772,7 @@ TEST_P(PacketBufferH264ParameterizedTest, ClearMissingPacketsOnKeyframe) { TEST_P(PacketBufferH264ParameterizedTest, FindFramesOnPadding) { EXPECT_THAT(InsertH264(0, kKeyFrame, kFirst, kLast, 1000), StartSeqNumsAre(0)); - EXPECT_THAT(InsertH264(2, kDeltaFrame, kFirst, kLast, 1000).frames, + EXPECT_THAT(InsertH264(2, kDeltaFrame, kFirst, kLast, 1000).packets, IsEmpty()); EXPECT_THAT(packet_buffer_.InsertPadding(1), StartSeqNumsAre(2)); @@ -945,7 +809,7 @@ TEST_F(PacketBufferH264IdrIsKeyframeTest, IdrIsKeyframe) { packet->video_header.video_type_header.emplace(); h264_header.nalus[0].type = H264::NaluType::kIdr; h264_header.nalus_length = 1; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(KeyFrame())); } @@ -958,7 +822,7 @@ TEST_F(PacketBufferH264IdrIsKeyframeTest, SpsPpsIdrIsKeyframe) { h264_header.nalus[2].type = H264::NaluType::kIdr; h264_header.nalus_length = 3; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(KeyFrame())); } @@ -976,7 +840,7 @@ TEST_F(PacketBufferH264SpsPpsIdrIsKeyframeTest, IdrIsNotKeyframe) { h264_header.nalus[0].type = H264::NaluType::kIdr; h264_header.nalus_length = 1; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(DeltaFrame())); } @@ -988,7 +852,7 @@ TEST_F(PacketBufferH264SpsPpsIdrIsKeyframeTest, SpsPpsIsNotKeyframe) { h264_header.nalus[1].type = H264::NaluType::kPps; h264_header.nalus_length = 2; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(DeltaFrame())); } @@ -1001,7 +865,7 @@ TEST_F(PacketBufferH264SpsPpsIdrIsKeyframeTest, SpsPpsIdrIsKeyframe) { h264_header.nalus[2].type = H264::NaluType::kIdr; h264_header.nalus_length = 3; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).frames, + EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(KeyFrame())); } diff --git a/modules/video_coding/receiver.cc b/modules/video_coding/receiver.cc index 2db4e211b1..6b942fbe57 100644 --- a/modules/video_coding/receiver.cc +++ b/modules/video_coding/receiver.cc @@ -161,18 +161,6 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, frame->SetRenderTime(render_time_ms); TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->Timestamp(), "SetRenderTS", "render_time", frame->RenderTimeMs()); - if (!frame->Complete()) { - // Update stats for incomplete frames. - bool retransmitted = false; - const int64_t last_packet_time_ms = - jitter_buffer_.LastPacketTime(frame, &retransmitted); - if (last_packet_time_ms >= 0 && !retransmitted) { - // We don't want to include timestamps which have suffered from - // retransmission here, since we compensate with extra retransmission - // delay within the jitter estimate. - timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms); - } - } return frame; } diff --git a/modules/video_coding/receiver.h b/modules/video_coding/receiver.h index a3033e643a..8f6b041a5a 100644 --- a/modules/video_coding/receiver.h +++ b/modules/video_coding/receiver.h @@ -14,13 +14,12 @@ #include #include +#include "modules/video_coding/event_wrapper.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/jitter_buffer.h" #include "modules/video_coding/packet.h" #include "modules/video_coding/timing.h" -#include "rtc_base/critical_section.h" -#include "system_wrappers/include/event_wrapper.h" namespace webrtc { diff --git a/modules/video_coding/receiver_unittest.cc b/modules/video_coding/receiver_unittest.cc index 2d9a92e6bd..2585056023 100644 --- a/modules/video_coding/receiver_unittest.cc +++ b/modules/video_coding/receiver_unittest.cc @@ -350,8 +350,8 @@ class FrameInjectEvent : public EventWrapper { bool Set() override { return true; } - EventTypeWrapper Wait(unsigned long max_time) override { // NOLINT - if (clock_->AdvanceTimeMilliseconds(max_time, stop_on_frame_) && + EventTypeWrapper Wait(int max_time_ms) override { + if (clock_->AdvanceTimeMilliseconds(max_time_ms, stop_on_frame_) && stop_on_frame_) { return EventTypeWrapper::kEventSignaled; } else { diff --git a/modules/video_coding/rtp_frame_reference_finder.cc b/modules/video_coding/rtp_frame_reference_finder.cc index 3767161e36..2a43c275d6 100644 --- a/modules/video_coding/rtp_frame_reference_finder.cc +++ b/modules/video_coding/rtp_frame_reference_finder.cc @@ -108,8 +108,6 @@ RtpFrameReferenceFinder::ManageFrameInternal(RtpFrameObject* frame) { return ManageFrameVp8(frame); case kVideoCodecVP9: return ManageFrameVp9(frame); - case kVideoCodecH264: - return ManageFrameH264(frame); case kVideoCodecGeneric: if (auto* generic_header = absl::get_if( &frame->GetRtpVideoHeader().video_type_header)) { @@ -173,9 +171,9 @@ void RtpFrameReferenceFinder::UpdateLastPictureIdWithPadding(uint16_t seq_num) { // the keyframe they belong to due to wrapping sequence number. In order // to prevent this we advance the picture id of the keyframe every so often. if (ForwardDiff(gop_seq_num_it->first, seq_num) > 10000) { - RTC_DCHECK_EQ(1ul, last_seq_num_gop_.size()); - last_seq_num_gop_[seq_num] = gop_seq_num_it->second; - last_seq_num_gop_.erase(gop_seq_num_it); + auto save = gop_seq_num_it->second; + last_seq_num_gop_.clear(); + last_seq_num_gop_[seq_num] = save; } } @@ -291,6 +289,16 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8( if (last_picture_id_ == -1) last_picture_id_ = frame->id.picture_id; + // Clean up info about not yet received frames that are too old. + uint16_t old_picture_id = + Subtract(frame->id.picture_id, kMaxNotYetReceivedFrames); + auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id); + not_yet_received_frames_.erase(not_yet_received_frames_.begin(), + clean_frames_to); + // Avoid re-adding picture ids that were just erased. + if (AheadOf(old_picture_id, last_picture_id_)) { + last_picture_id_ = old_picture_id; + } // Find if there has been a gap in fully received frames and save the picture // id of those frames in |not_yet_received_frames_|. if (AheadOf(frame->id.picture_id, last_picture_id_)) { @@ -307,13 +315,6 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8( auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); layer_info_.erase(layer_info_.begin(), clean_layer_info_to); - // Clean up info about not yet received frames that are too old. - uint16_t old_picture_id = - Subtract(frame->id.picture_id, kMaxNotYetReceivedFrames); - auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id); - not_yet_received_frames_.erase(not_yet_received_frames_.begin(), - clean_frames_to); - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { if (codec_header.temporalIdx != 0) { return kDrop; @@ -338,7 +339,16 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8( layer_info_it = layer_info_.emplace(unwrapped_tl0, layer_info_it->second).first; frame->num_references = 1; - frame->references[0] = layer_info_it->second[0]; + int64_t last_pid_on_layer = layer_info_it->second[0]; + + // Is this an old frame that has already been used to update the state? If + // so, drop it. + if (AheadOrAt(last_pid_on_layer, + frame->id.picture_id)) { + return kDrop; + } + + frame->references[0] = last_pid_on_layer; UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); return kHandOff; } @@ -346,8 +356,17 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8( // Layer sync frame, this frame only references its base layer frame. if (codec_header.layerSync) { frame->num_references = 1; - frame->references[0] = layer_info_it->second[0]; + int64_t last_pid_on_layer = layer_info_it->second[codec_header.temporalIdx]; + + // Is this an old frame that has already been used to update the state? If + // so, drop it. + if (last_pid_on_layer != -1 && + AheadOrAt(last_pid_on_layer, + frame->id.picture_id)) { + return kDrop; + } + frame->references[0] = layer_info_it->second[0]; UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); return kHandOff; } @@ -432,7 +451,8 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp9( } // Protect against corrupted packets with arbitrary large temporal idx. - if (codec_header.temporal_idx >= kMaxTemporalLayers) + if (codec_header.temporal_idx >= kMaxTemporalLayers || + codec_header.spatial_idx >= kMaxSpatialLayers) return kDrop; frame->id.spatial_layer = codec_header.spatial_idx; @@ -474,6 +494,12 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp9( return kDrop; } + for (size_t i = 0; i < codec_header.gof.num_frames_in_gof; ++i) { + if (codec_header.gof.num_ref_pics[i] > kMaxVp9RefPics) { + return kDrop; + } + } + GofInfoVP9 gof = codec_header.gof; if (gof.num_frames_in_gof == 0) { RTC_LOG(LS_WARNING) << "Number of frames in GOF is zero. Assume " @@ -560,6 +586,9 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp9( frame->id.picture_id); size_t gof_idx = diff % info->gof->num_frames_in_gof; + if (info->gof->num_ref_pics[gof_idx] > EncodedFrame::kMaxFrameReferences) { + return kDrop; + } // Populate references according to the scalability structure. frame->num_references = info->gof->num_ref_pics[gof_idx]; for (size_t i = 0; i < frame->num_references; ++i) { @@ -684,130 +713,6 @@ void RtpFrameReferenceFinder::UnwrapPictureIds(RtpFrameObject* frame) { frame->id.picture_id = unwrapper_.Unwrap(frame->id.picture_id); } -RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameH264( - RtpFrameObject* frame) { - const FrameMarking& rtp_frame_marking = frame->GetFrameMarking(); - - uint8_t tid = rtp_frame_marking.temporal_id; - bool blSync = rtp_frame_marking.base_layer_sync; - - if (tid == kNoTemporalIdx) - return ManageFramePidOrSeqNum(std::move(frame), kNoPictureId); - - // Protect against corrupted packets with arbitrary large temporal idx. - if (tid >= kMaxTemporalLayers) - return kDrop; - - frame->id.picture_id = frame->last_seq_num(); - - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - // For H264, use last_seq_num_gop_ to simply store last picture id - // as a pair of unpadded and padded sequence numbers. - if (last_seq_num_gop_.empty()) { - last_seq_num_gop_.insert(std::make_pair( - 0, std::make_pair(frame->id.picture_id, frame->id.picture_id))); - } - } - - // Stash if we have no keyframe yet. - if (last_seq_num_gop_.empty()) - return kStash; - - // Check for gap in sequence numbers. Store in |not_yet_received_seq_num_|. - if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) { - uint16_t last_pic_id_padded = last_seq_num_gop_.begin()->second.second; - if (AheadOf(frame->id.picture_id, last_pic_id_padded)) { - do { - last_pic_id_padded = last_pic_id_padded + 1; - not_yet_received_seq_num_.insert(last_pic_id_padded); - } while (last_pic_id_padded != frame->id.picture_id); - } - } - - int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(rtp_frame_marking.tl0_pic_idx); - - // Clean up info for base layers that are too old. - int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo; - auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); - layer_info_.erase(layer_info_.begin(), clean_layer_info_to); - - // Clean up info about not yet received frames that are too old. - uint16_t old_picture_id = frame->id.picture_id - kMaxNotYetReceivedFrames * 2; - auto clean_frames_to = not_yet_received_seq_num_.lower_bound(old_picture_id); - not_yet_received_seq_num_.erase(not_yet_received_seq_num_.begin(), - clean_frames_to); - - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - frame->num_references = 0; - layer_info_[unwrapped_tl0].fill(-1); - UpdateDataH264(frame, unwrapped_tl0, tid); - return kHandOff; - } - - auto layer_info_it = - layer_info_.find(tid == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0); - - // Stash if we have no base layer frame yet. - if (layer_info_it == layer_info_.end()) - return kStash; - - // Base layer frame. Copy layer info from previous base layer frame. - if (tid == 0) { - layer_info_it = - layer_info_.insert(std::make_pair(unwrapped_tl0, layer_info_it->second)) - .first; - frame->num_references = 1; - frame->references[0] = layer_info_it->second[0]; - UpdateDataH264(frame, unwrapped_tl0, tid); - return kHandOff; - } - - // This frame only references its base layer frame. - if (blSync) { - frame->num_references = 1; - frame->references[0] = layer_info_it->second[0]; - UpdateDataH264(frame, unwrapped_tl0, tid); - return kHandOff; - } - - // Find all references for general frame. - frame->num_references = 0; - for (uint8_t layer = 0; layer <= tid; ++layer) { - // Stash if we have not yet received frames on this temporal layer. - if (layer_info_it->second[layer] == -1) - return kStash; - - // Drop if the last frame on this layer is ahead of this frame. A layer sync - // frame was received after this frame for the same base layer frame. - uint16_t last_frame_in_layer = layer_info_it->second[layer]; - if (AheadOf(last_frame_in_layer, frame->id.picture_id)) - return kDrop; - - // Stash and wait for missing frame between this frame and the reference - auto not_received_seq_num_it = - not_yet_received_seq_num_.upper_bound(last_frame_in_layer); - if (not_received_seq_num_it != not_yet_received_seq_num_.end() && - AheadOf(frame->id.picture_id, *not_received_seq_num_it)) { - return kStash; - } - - if (!(AheadOf(frame->id.picture_id, last_frame_in_layer))) { - RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->id.picture_id - << " and packet range [" << frame->first_seq_num() - << ", " << frame->last_seq_num() - << "] already received, " - " dropping frame."; - return kDrop; - } - - ++frame->num_references; - frame->references[layer] = last_frame_in_layer; - } - - UpdateDataH264(frame, unwrapped_tl0, tid); - return kHandOff; -} - void RtpFrameReferenceFinder::UpdateLastPictureIdWithPaddingH264() { auto seq_num_it = last_seq_num_gop_.begin(); diff --git a/modules/video_coding/rtp_frame_reference_finder.h b/modules/video_coding/rtp_frame_reference_finder.h index d9c7c72d1e..ed67b91fed 100644 --- a/modules/video_coding/rtp_frame_reference_finder.h +++ b/modules/video_coding/rtp_frame_reference_finder.h @@ -21,7 +21,6 @@ #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" -#include "rtc_base/critical_section.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/thread_annotations.h" diff --git a/modules/video_coding/rtp_frame_reference_finder_unittest.cc b/modules/video_coding/rtp_frame_reference_finder_unittest.cc index 29fdba53a5..0c08ddd302 100644 --- a/modules/video_coding/rtp_frame_reference_finder_unittest.cc +++ b/modules/video_coding/rtp_frame_reference_finder_unittest.cc @@ -32,13 +32,11 @@ std::unique_ptr CreateFrame( uint16_t seq_num_end, bool keyframe, VideoCodecType codec, - const RTPVideoTypeHeader& video_type_header, - const FrameMarking& frame_markings) { + const RTPVideoTypeHeader& video_type_header) { RTPVideoHeader video_header; video_header.frame_type = keyframe ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; video_header.video_type_header = video_type_header; - video_header.frame_marking = frame_markings; // clang-format off return std::make_unique( @@ -57,7 +55,6 @@ std::unique_ptr CreateFrame( VideoContentType::UNSPECIFIED, video_header, /*color_space=*/absl::nullopt, - /*generic_descriptor=*/absl::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on @@ -93,7 +90,7 @@ class TestRtpFrameReferenceFinder : public ::testing::Test, bool keyframe) { std::unique_ptr frame = CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecGeneric, - RTPVideoTypeHeader(), FrameMarking()); + RTPVideoTypeHeader()); reference_finder_->ManageFrame(std::move(frame)); } @@ -111,9 +108,8 @@ class TestRtpFrameReferenceFinder : public ::testing::Test, vp8_header.tl0PicIdx = tl0; vp8_header.layerSync = sync; - std::unique_ptr frame = - CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP8, - vp8_header, FrameMarking()); + std::unique_ptr frame = CreateFrame( + seq_num_start, seq_num_end, keyframe, kVideoCodecVP8, vp8_header); reference_finder_->ManageFrame(std::move(frame)); } @@ -141,9 +137,8 @@ class TestRtpFrameReferenceFinder : public ::testing::Test, vp9_header.gof = *ss; } - std::unique_ptr frame = - CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, - vp9_header, FrameMarking()); + std::unique_ptr frame = CreateFrame( + seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, vp9_header); reference_finder_->ManageFrame(std::move(frame)); } @@ -167,26 +162,15 @@ class TestRtpFrameReferenceFinder : public ::testing::Test, for (size_t i = 0; i < refs.size(); ++i) vp9_header.pid_diff[i] = refs[i]; - std::unique_ptr frame = - CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, - vp9_header, FrameMarking()); + std::unique_ptr frame = CreateFrame( + seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, vp9_header); reference_finder_->ManageFrame(std::move(frame)); } - void InsertH264(uint16_t seq_num_start, - uint16_t seq_num_end, - bool keyframe, - uint8_t tid = kNoTemporalIdx, - int32_t tl0 = kNoTl0PicIdx, - bool sync = false) { - FrameMarking frame_marking{}; - frame_marking.temporal_id = tid; - frame_marking.tl0_pic_idx = tl0; - frame_marking.base_layer_sync = sync; - + void InsertH264(uint16_t seq_num_start, uint16_t seq_num_end, bool keyframe) { std::unique_ptr frame = CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecH264, - RTPVideoTypeHeader(), frame_marking); + RTPVideoTypeHeader()); reference_finder_->ManageFrame(std::move(frame)); } @@ -309,6 +293,12 @@ TEST_F(TestRtpFrameReferenceFinder, AdvanceSavedKeyframe) { EXPECT_EQ(6UL, frames_from_callback_.size()); } +TEST_F(TestRtpFrameReferenceFinder, AdvanceSavedKeyframeBigJump) { + InsertVp9Flex(0, 0, true); + InsertVp9Flex(1, 1, true); + reference_finder_->PaddingReceived(32768); +} + TEST_F(TestRtpFrameReferenceFinder, ClearTo) { uint16_t sn = Rand(); @@ -407,6 +397,50 @@ TEST_F(TestRtpFrameReferenceFinder, Vp8KeyFrameReferences) { CheckReferencesVp8(sn); } +TEST_F(TestRtpFrameReferenceFinder, Vp8RepeatedFrame_0) { + uint16_t pid = Rand(); + uint16_t sn = Rand(); + + InsertVp8(sn, sn, true, pid, 0, 1); + InsertVp8(sn + 1, sn + 1, false, pid + 1, 0, 2); + InsertVp8(sn + 1, sn + 1, false, pid + 1, 0, 2); + + ASSERT_EQ(2UL, frames_from_callback_.size()); + CheckReferencesVp8(pid); + CheckReferencesVp8(pid + 1, pid); +} + +TEST_F(TestRtpFrameReferenceFinder, Vp8RepeatedFrameLayerSync_01) { + uint16_t pid = Rand(); + uint16_t sn = Rand(); + + InsertVp8(sn, sn, true, pid, 0, 1); + InsertVp8(sn + 1, sn + 1, false, pid + 1, 1, 1, true); + ASSERT_EQ(2UL, frames_from_callback_.size()); + InsertVp8(sn + 1, sn + 1, false, pid + 1, 1, 1, true); + + ASSERT_EQ(2UL, frames_from_callback_.size()); + CheckReferencesVp8(pid); + CheckReferencesVp8(pid + 1, pid); +} + +TEST_F(TestRtpFrameReferenceFinder, Vp8RepeatedFrame_01) { + uint16_t pid = Rand(); + uint16_t sn = Rand(); + + InsertVp8(sn, sn, true, pid, 0, 1); + InsertVp8(sn + 1, sn + 1, false, pid + 1, 0, 2, true); + InsertVp8(sn + 2, sn + 2, false, pid + 2, 0, 3); + InsertVp8(sn + 3, sn + 3, false, pid + 3, 0, 4); + InsertVp8(sn + 3, sn + 3, false, pid + 3, 0, 4); + + ASSERT_EQ(4UL, frames_from_callback_.size()); + CheckReferencesVp8(pid); + CheckReferencesVp8(pid + 1, pid); + CheckReferencesVp8(pid + 2, pid + 1); + CheckReferencesVp8(pid + 3, pid + 2); +} + // Test with 1 temporal layer. TEST_F(TestRtpFrameReferenceFinder, Vp8TemporalLayers_0) { uint16_t pid = Rand(); @@ -1391,53 +1425,46 @@ TEST_F(TestRtpFrameReferenceFinder, H264KeyFrameReferences) { CheckReferencesH264(sn); } -// Test with 1 temporal layer. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_0) { - uint16_t sn = Rand(); +TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrap) { + uint16_t sn = 0xFFFF; - InsertH264(sn, sn, true, 0, 1); - InsertH264(sn + 1, sn + 1, false, 0, 2); - InsertH264(sn + 2, sn + 2, false, 0, 3); - InsertH264(sn + 3, sn + 3, false, 0, 4); + InsertH264(sn - 1, sn - 1, true); + InsertH264(sn, sn, false); + InsertH264(sn + 1, sn + 1, false); + InsertH264(sn + 2, sn + 2, false); ASSERT_EQ(4UL, frames_from_callback_.size()); - CheckReferencesH264(sn); + CheckReferencesH264(sn - 1); + CheckReferencesH264(sn, sn - 1); CheckReferencesH264(sn + 1, sn); CheckReferencesH264(sn + 2, sn + 1); - CheckReferencesH264(sn + 3, sn + 2); } -TEST_F(TestRtpFrameReferenceFinder, H264DuplicateTl1Frames) { +TEST_F(TestRtpFrameReferenceFinder, H264Frames) { uint16_t sn = Rand(); - InsertH264(sn, sn, true, 0, 0); - InsertH264(sn + 1, sn + 1, false, 1, 0, true); - InsertH264(sn + 2, sn + 2, false, 0, 1); - InsertH264(sn + 3, sn + 3, false, 1, 1); - InsertH264(sn + 3, sn + 3, false, 1, 1); - InsertH264(sn + 4, sn + 4, false, 0, 2); - InsertH264(sn + 5, sn + 5, false, 1, 2); + InsertH264(sn, sn, true); + InsertH264(sn + 1, sn + 1, false); + InsertH264(sn + 2, sn + 2, false); + InsertH264(sn + 3, sn + 3, false); - ASSERT_EQ(6UL, frames_from_callback_.size()); + ASSERT_EQ(4UL, frames_from_callback_.size()); CheckReferencesH264(sn); CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 3, sn + 1, sn + 2); - CheckReferencesH264(sn + 4, sn + 2); - CheckReferencesH264(sn + 5, sn + 3, sn + 4); + CheckReferencesH264(sn + 2, sn + 1); + CheckReferencesH264(sn + 3, sn + 2); } -// Test with 1 temporal layer. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0) { +TEST_F(TestRtpFrameReferenceFinder, H264Reordering) { uint16_t sn = Rand(); - InsertH264(sn, sn, true, 0, 1); - InsertH264(sn + 1, sn + 1, false, 0, 2); - InsertH264(sn + 3, sn + 3, false, 0, 4); - InsertH264(sn + 2, sn + 2, false, 0, 3); - InsertH264(sn + 5, sn + 5, false, 0, 6); - InsertH264(sn + 6, sn + 6, false, 0, 7); - InsertH264(sn + 4, sn + 4, false, 0, 5); + InsertH264(sn, sn, true); + InsertH264(sn + 1, sn + 1, false); + InsertH264(sn + 3, sn + 3, false); + InsertH264(sn + 2, sn + 2, false); + InsertH264(sn + 5, sn + 5, false); + InsertH264(sn + 6, sn + 6, false); + InsertH264(sn + 4, sn + 4, false); ASSERT_EQ(7UL, frames_from_callback_.size()); CheckReferencesH264(sn); @@ -1449,258 +1476,13 @@ TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0) { CheckReferencesH264(sn + 6, sn + 5); } -// Test with 2 temporal layers in a 01 pattern. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_01) { - uint16_t sn = Rand(); - - InsertH264(sn, sn, true, 0, 255); - InsertH264(sn + 1, sn + 1, false, 1, 255, true); - InsertH264(sn + 2, sn + 2, false, 0, 0); - InsertH264(sn + 3, sn + 3, false, 1, 0); - - ASSERT_EQ(4UL, frames_from_callback_.size()); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 3, sn + 1, sn + 2); -} - -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersMultiSn_01) { - uint16_t sn = Rand(); - - InsertH264(sn, sn + 3, true, 0, 255); - InsertH264(sn + 4, sn + 5, false, 1, 255, true); - InsertH264(sn + 6, sn + 8, false, 0, 0); - InsertH264(sn + 9, sn + 9, false, 1, 0); - - ASSERT_EQ(4UL, frames_from_callback_.size()); - CheckReferencesH264(sn + 3); - CheckReferencesH264(sn + 5, sn + 3); - CheckReferencesH264(sn + 8, sn + 3); - CheckReferencesH264(sn + 9, sn + 5, sn + 8); -} - -// Test with 2 temporal layers in a 01 pattern. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_01) { - uint16_t sn = Rand(); - - InsertH264(sn + 1, sn + 1, false, 1, 255, true); - InsertH264(sn, sn, true, 0, 255); - InsertH264(sn + 3, sn + 3, false, 1, 0); - InsertH264(sn + 5, sn + 5, false, 1, 1); - InsertH264(sn + 2, sn + 2, false, 0, 0); - InsertH264(sn + 4, sn + 4, false, 0, 1); - InsertH264(sn + 6, sn + 6, false, 0, 2); - InsertH264(sn + 7, sn + 7, false, 1, 2); - - ASSERT_EQ(8UL, frames_from_callback_.size()); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 3, sn + 1, sn + 2); - CheckReferencesH264(sn + 4, sn + 2); - CheckReferencesH264(sn + 5, sn + 3, sn + 4); - CheckReferencesH264(sn + 6, sn + 4); - CheckReferencesH264(sn + 7, sn + 5, sn + 6); -} - -// Test with 3 temporal layers in a 0212 pattern. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_0212) { - uint16_t sn = Rand(); - - InsertH264(sn, sn, true, 0, 55); - InsertH264(sn + 1, sn + 1, false, 2, 55, true); - InsertH264(sn + 2, sn + 2, false, 1, 55, true); - InsertH264(sn + 3, sn + 3, false, 2, 55); - InsertH264(sn + 4, sn + 4, false, 0, 56); - InsertH264(sn + 5, sn + 5, false, 2, 56, true); - InsertH264(sn + 6, sn + 6, false, 1, 56, true); - InsertH264(sn + 7, sn + 7, false, 2, 56); - InsertH264(sn + 8, sn + 8, false, 0, 57); - InsertH264(sn + 9, sn + 9, false, 2, 57, true); - InsertH264(sn + 10, sn + 10, false, 1, 57, true); - InsertH264(sn + 11, sn + 11, false, 2, 57); - - ASSERT_EQ(12UL, frames_from_callback_.size()); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2); - CheckReferencesH264(sn + 4, sn); - CheckReferencesH264(sn + 5, sn + 4); - CheckReferencesH264(sn + 6, sn + 4); - CheckReferencesH264(sn + 7, sn + 4, sn + 5, sn + 6); - CheckReferencesH264(sn + 8, sn + 4); - CheckReferencesH264(sn + 9, sn + 8); - CheckReferencesH264(sn + 10, sn + 8); - CheckReferencesH264(sn + 11, sn + 8, sn + 9, sn + 10); -} - -// Test with 3 temporal layers in a 0212 pattern. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersMissingFrame_0212) { - uint16_t sn = Rand(); - - InsertH264(sn, sn, true, 0, 55, false); - InsertH264(sn + 2, sn + 2, false, 1, 55, true); - InsertH264(sn + 3, sn + 3, false, 2, 55, false); - - ASSERT_EQ(2UL, frames_from_callback_.size()); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 2, sn); -} - -// Test with 3 temporal layers in a 0212 pattern. -TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0212) { - uint16_t sn = Rand(); - - InsertH264(sn + 1, sn + 1, false, 2, 55, true); - InsertH264(sn, sn, true, 0, 55, false); - InsertH264(sn + 2, sn + 2, false, 1, 55, true); - InsertH264(sn + 4, sn + 4, false, 0, 56, false); - InsertH264(sn + 5, sn + 5, false, 2, 56, false); - InsertH264(sn + 3, sn + 3, false, 2, 55, false); - InsertH264(sn + 7, sn + 7, false, 2, 56, false); - InsertH264(sn + 9, sn + 9, false, 2, 57, true); - InsertH264(sn + 6, sn + 6, false, 1, 56, false); - InsertH264(sn + 8, sn + 8, false, 0, 57, false); - InsertH264(sn + 11, sn + 11, false, 2, 57, false); - InsertH264(sn + 10, sn + 10, false, 1, 57, true); - - ASSERT_EQ(12UL, frames_from_callback_.size()); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2); - CheckReferencesH264(sn + 4, sn); - CheckReferencesH264(sn + 5, sn + 2, sn + 3, sn + 4); - CheckReferencesH264(sn + 6, sn + 2, sn + 4); - CheckReferencesH264(sn + 7, sn + 4, sn + 5, sn + 6); - CheckReferencesH264(sn + 8, sn + 4); - CheckReferencesH264(sn + 9, sn + 8); - CheckReferencesH264(sn + 10, sn + 8); - CheckReferencesH264(sn + 11, sn + 8, sn + 9, sn + 10); -} - -TEST_F(TestRtpFrameReferenceFinder, H264InsertManyFrames_0212) { - uint16_t sn = Rand(); - - const int keyframes_to_insert = 50; - const int frames_per_keyframe = 120; // Should be a multiple of 4. - uint8_t tl0 = 128; - - for (int k = 0; k < keyframes_to_insert; ++k) { - InsertH264(sn, sn, true, 0, tl0, false); - InsertH264(sn + 1, sn + 1, false, 2, tl0, true); - InsertH264(sn + 2, sn + 2, false, 1, tl0, true); - InsertH264(sn + 3, sn + 3, false, 2, tl0, false); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2); - frames_from_callback_.clear(); - ++tl0; - - for (int f = 4; f < frames_per_keyframe; f += 4) { - uint16_t sf = sn + f; - - InsertH264(sf, sf, false, 0, tl0, false); - InsertH264(sf + 1, sf + 1, false, 2, tl0, false); - InsertH264(sf + 2, sf + 2, false, 1, tl0, false); - InsertH264(sf + 3, sf + 3, false, 2, tl0, false); - CheckReferencesH264(sf, sf - 4); - CheckReferencesH264(sf + 1, sf, sf - 1, sf - 2); - CheckReferencesH264(sf + 2, sf, sf - 2); - CheckReferencesH264(sf + 3, sf, sf + 1, sf + 2); - frames_from_callback_.clear(); - ++tl0; - } - - sn += frames_per_keyframe; - } -} - -TEST_F(TestRtpFrameReferenceFinder, H264LayerSync) { - uint16_t sn = Rand(); - - InsertH264(sn, sn, true, 0, 0, false); - InsertH264(sn + 1, sn + 1, false, 1, 0, true); - InsertH264(sn + 2, sn + 2, false, 0, 1, false); - ASSERT_EQ(3UL, frames_from_callback_.size()); - - InsertH264(sn + 4, sn + 4, false, 0, 2, false); - InsertH264(sn + 5, sn + 5, false, 1, 2, true); - InsertH264(sn + 6, sn + 6, false, 0, 3, false); - InsertH264(sn + 7, sn + 7, false, 1, 3, false); - - ASSERT_EQ(7UL, frames_from_callback_.size()); - CheckReferencesH264(sn); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn); - CheckReferencesH264(sn + 4, sn + 2); - CheckReferencesH264(sn + 5, sn + 4); - CheckReferencesH264(sn + 6, sn + 4); - CheckReferencesH264(sn + 7, sn + 6, sn + 5); -} - -TEST_F(TestRtpFrameReferenceFinder, H264Tl1SyncFrameAfterTl1Frame) { - InsertH264(1000, 1000, true, 0, 247, true); - InsertH264(1001, 1001, false, 0, 248, false); - InsertH264(1002, 1002, false, 1, 248, false); // Will be dropped - InsertH264(1003, 1003, false, 1, 248, true); // due to this frame. - - ASSERT_EQ(3UL, frames_from_callback_.size()); - CheckReferencesH264(1000); - CheckReferencesH264(1001, 1000); - CheckReferencesH264(1003, 1001); -} - -TEST_F(TestRtpFrameReferenceFinder, H264DetectMissingFrame_0212) { - InsertH264(1, 1, true, 0, 1, false); - InsertH264(2, 2, false, 2, 1, true); - InsertH264(3, 3, false, 1, 1, true); - InsertH264(4, 4, false, 2, 1, false); - - InsertH264(6, 6, false, 2, 2, false); - InsertH264(7, 7, false, 1, 2, false); - InsertH264(8, 8, false, 2, 2, false); - ASSERT_EQ(4UL, frames_from_callback_.size()); - - InsertH264(5, 5, false, 0, 2, false); - ASSERT_EQ(8UL, frames_from_callback_.size()); - - CheckReferencesH264(1); - CheckReferencesH264(2, 1); - CheckReferencesH264(3, 1); - CheckReferencesH264(4, 3, 2, 1); - - CheckReferencesH264(5, 1); - CheckReferencesH264(6, 5, 4, 3); - CheckReferencesH264(7, 5, 3); - CheckReferencesH264(8, 7, 6, 5); -} - -TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrap) { - uint16_t sn = 0xFFFF; - - InsertH264(sn - 1, sn - 1, true, 0, 1); - InsertH264(sn, sn, false, 0, 2); - InsertH264(sn + 1, sn + 1, false, 0, 3); - InsertH264(sn + 2, sn + 2, false, 0, 4); - - ASSERT_EQ(4UL, frames_from_callback_.size()); - CheckReferencesH264(sn - 1); - CheckReferencesH264(sn, sn - 1); - CheckReferencesH264(sn + 1, sn); - CheckReferencesH264(sn + 2, sn + 1); -} - TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrapMulti) { uint16_t sn = 0xFFFF; - InsertH264(sn - 3, sn - 2, true, 0, 1); - InsertH264(sn - 1, sn + 1, false, 0, 2); - InsertH264(sn + 2, sn + 3, false, 0, 3); - InsertH264(sn + 4, sn + 7, false, 0, 4); + InsertH264(sn - 3, sn - 2, true); + InsertH264(sn - 1, sn + 1, false); + InsertH264(sn + 2, sn + 3, false); + InsertH264(sn + 4, sn + 7, false); ASSERT_EQ(4UL, frames_from_callback_.size()); CheckReferencesH264(sn - 2); @@ -1709,35 +1491,5 @@ TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrapMulti) { CheckReferencesH264(sn + 7, sn + 3); } -TEST_F(TestRtpFrameReferenceFinder, H264Tl0PicIdxWrap) { - int numTl0Wraps = 1000; - int64_t sn = Rand(); - - for (int i = 0; i < numTl0Wraps; i++) { - for (int tl0 = 0; tl0 < 256; tl0 += 16, sn += 16) { - InsertH264(sn, sn, true, 0, tl0); - reference_finder_->ClearTo(sn); // Too many stashed frames cause errors. - - for (int k = 1; k < 8; k++) { - InsertH264(sn + k, sn + k, false, 0, tl0 + k); - } - - // Skip a TL0 index. - for (int k = 9; k < 16; k++) { - InsertH264(sn + k, sn + k, false, 0, tl0 + k); - } - - ASSERT_EQ(8UL, frames_from_callback_.size()); - - CheckReferencesH264(sn); - for (int k = 1; k < 8; k++) { - CheckReferencesH264(sn + k, sn + k - 1); - } - - frames_from_callback_.clear(); - } - } -} - } // namespace video_coding } // namespace webrtc diff --git a/modules/video_coding/session_info.cc b/modules/video_coding/session_info.cc index e51d293607..6a350afb25 100644 --- a/modules/video_coding/session_info.cc +++ b/modules/video_coding/session_info.cc @@ -95,8 +95,6 @@ int VCMSessionInfo::TemporalId() const { return absl::get( packets_.front().video_header.video_type_header) .temporal_idx; - } else if (packets_.front().video_header.codec == kVideoCodecH264) { - return packets_.front().video_header.frame_marking.temporal_id; } else { return kNoTemporalIdx; } @@ -113,8 +111,6 @@ bool VCMSessionInfo::LayerSync() const { return absl::get( packets_.front().video_header.video_type_header) .temporal_up_switch; - } else if (packets_.front().video_header.codec == kVideoCodecH264) { - return packets_.front().video_header.frame_marking.base_layer_sync; } else { return false; } @@ -131,8 +127,6 @@ int VCMSessionInfo::Tl0PicId() const { return absl::get( packets_.front().video_header.video_type_header) .tl0_pic_idx; - } else if (packets_.front().video_header.codec == kVideoCodecH264) { - return packets_.front().video_header.frame_marking.tl0_pic_idx; } else { return kNoTl0PicIdx; } @@ -152,6 +146,21 @@ std::vector VCMSessionInfo::GetNaluInfos() const { } return nalu_infos; } +#ifndef DISABLE_H265 +std::vector VCMSessionInfo::GetH265NaluInfos() const { + if (packets_.empty() || packets_.front().video_header.codec != kVideoCodecH265) + return std::vector(); + std::vector nalu_infos; + for (const VCMPacket& packet : packets_) { + const auto& h265 = + absl::get(packet.video_header.video_type_header); + for (size_t i = 0; i < h265.nalus_length; ++i) { + nalu_infos.push_back(h265.nalus[i]); + } + } + return nalu_infos; +} +#endif void VCMSessionInfo::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) { if (packets_.empty()) @@ -211,6 +220,11 @@ size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer, // TODO(pbos): Remove H264 parsing from this step and use a fragmentation // header supplied by the H264 depacketizer. const size_t kH264NALHeaderLengthInBytes = 1; +#ifndef DISABLE_H265 + const size_t kH265NALHeaderLengthInBytes = 2; + const auto* h265 = + absl::get_if(&packet.video_header.video_type_header); +#endif const size_t kLengthFieldLength = 2; const auto* h264 = absl::get_if(&packet.video_header.video_type_header); @@ -236,6 +250,36 @@ size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer, packet.sizeBytes = required_length; return packet.sizeBytes; } +#ifndef DISABLE_H265 + else if (h265 && h265->packetization_type == kH265AP) { + // Similar to H264, for H265 aggregation packets, we rely on jitter buffer + // to remove the two length bytes between each NAL unit, and potentially add + // start codes. + size_t required_length = 0; + const uint8_t* nalu_ptr = + packet_buffer + kH265NALHeaderLengthInBytes; // skip payloadhdr + while (nalu_ptr < packet_buffer + packet.sizeBytes) { + size_t length = BufferToUWord16(nalu_ptr); + required_length += + length + (packet.insertStartCode ? kH265StartCodeLengthBytes : 0); + nalu_ptr += kLengthFieldLength + length; + } + ShiftSubsequentPackets(packet_it, required_length); + nalu_ptr = packet_buffer + kH265NALHeaderLengthInBytes; + uint8_t* frame_buffer_ptr = frame_buffer + offset; + while (nalu_ptr < packet_buffer + packet.sizeBytes) { + size_t length = BufferToUWord16(nalu_ptr); + nalu_ptr += kLengthFieldLength; + // since H265 shares the same start code as H264, use the same Insert + // function to handle start code. + frame_buffer_ptr += Insert(nalu_ptr, length, packet.insertStartCode, + const_cast(frame_buffer_ptr)); + nalu_ptr += length; + } + packet.sizeBytes = required_length; + return packet.sizeBytes; + } +#endif ShiftSubsequentPackets( packet_it, packet.sizeBytes + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0)); @@ -462,6 +506,20 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet, IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_))) { last_packet_seq_num_ = packet.seqNum; } +#ifndef DISABLE_H265 + } else if (packet.codec() == kVideoCodecH265) { + frame_type_ = packet.video_header.frame_type; + if (packet.is_first_packet_in_frame() && + (first_packet_seq_num_ == -1 || + IsNewerSequenceNumber(first_packet_seq_num_, packet.seqNum))) { + first_packet_seq_num_ = packet.seqNum; + } + if (packet.markerBit && + (last_packet_seq_num_ == -1 || + IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_))) { + last_packet_seq_num_ = packet.seqNum; + } +#endif } else { // Only insert media packets between first and last packets (when // available). diff --git a/modules/video_coding/session_info.h b/modules/video_coding/session_info.h index 06a348ef72..b72f9df8b0 100644 --- a/modules/video_coding/session_info.h +++ b/modules/video_coding/session_info.h @@ -65,6 +65,9 @@ class VCMSessionInfo { int Tl0PicId() const; std::vector GetNaluInfos() const; +#ifndef DISABLE_H265 + std::vector GetH265NaluInfos() const; +#endif void SetGofInfo(const GofInfoVP9& gof_info, size_t idx); diff --git a/modules/video_coding/svc/BUILD.gn b/modules/video_coding/svc/BUILD.gn new file mode 100644 index 0000000000..3e93b897b4 --- /dev/null +++ b/modules/video_coding/svc/BUILD.gn @@ -0,0 +1,125 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_source_set("scalable_video_controller") { + sources = [ + "scalable_video_controller.h", + "scalable_video_controller_no_layering.cc", + "scalable_video_controller_no_layering.h", + ] + deps = [ + "../../../api/transport/rtp:dependency_descriptor", + "../../../api/video:video_bitrate_allocation", + "../../../common_video/generic_frame_descriptor", + "../../../rtc_base:checks", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("scalability_structures") { + sources = [ + "create_scalability_structure.cc", + "create_scalability_structure.h", + "scalability_structure_full_svc.cc", + "scalability_structure_full_svc.h", + "scalability_structure_key_svc.cc", + "scalability_structure_key_svc.h", + "scalability_structure_l1t2.cc", + "scalability_structure_l1t2.h", + "scalability_structure_l1t3.cc", + "scalability_structure_l1t3.h", + "scalability_structure_l2t1.cc", + "scalability_structure_l2t1.h", + "scalability_structure_l2t1h.cc", + "scalability_structure_l2t1h.h", + "scalability_structure_l2t2.cc", + "scalability_structure_l2t2.h", + "scalability_structure_l2t2_key_shift.cc", + "scalability_structure_l2t2_key_shift.h", + "scalability_structure_l3t1.cc", + "scalability_structure_l3t1.h", + "scalability_structure_l3t3.cc", + "scalability_structure_l3t3.h", + "scalability_structure_s2t1.cc", + "scalability_structure_s2t1.h", + ] + deps = [ + ":scalable_video_controller", + "../../../api/transport/rtp:dependency_descriptor", + "../../../api/video:video_bitrate_allocation", + "../../../common_video/generic_frame_descriptor", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("svc_rate_allocator") { + sources = [ + "svc_rate_allocator.cc", + "svc_rate_allocator.h", + ] + deps = [ + ":scalability_structures", + "../../../api/video:video_bitrate_allocation", + "../../../api/video:video_bitrate_allocator", + "../../../api/video:video_codec_constants", + "../../../api/video_codecs:video_codecs_api", + "../../../rtc_base:checks", + "../../../rtc_base/experiments:stable_target_rate_experiment", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] +} + +if (rtc_include_tests) { + rtc_source_set("scalability_structure_tests") { + testonly = true + sources = [ + "scalability_structure_key_svc_unittest.cc", + "scalability_structure_l2t2_key_shift_unittest.cc", + "scalability_structure_l3t3_unittest.cc", + "scalability_structure_test_helpers.cc", + "scalability_structure_test_helpers.h", + "scalability_structure_unittest.cc", + ] + deps = [ + ":scalability_structures", + ":scalable_video_controller", + "..:chain_diff_calculator", + "..:frame_dependencies_calculator", + "../../../api:array_view", + "../../../api/transport/rtp:dependency_descriptor", + "../../../api/video:video_bitrate_allocation", + "../../../api/video:video_frame_type", + "../../../common_video/generic_frame_descriptor", + "../../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + + rtc_source_set("svc_rate_allocator_tests") { + testonly = true + sources = [ "svc_rate_allocator_unittest.cc" ] + deps = [ + ":svc_rate_allocator", + "..:webrtc_vp9_helpers", + "../../../rtc_base:checks", + "../../../test:field_trial", + "../../../test:test_support", + ] + } +} diff --git a/modules/video_coding/svc/create_scalability_structure.cc b/modules/video_coding/svc/create_scalability_structure.cc new file mode 100644 index 0000000000..4b4a23ed24 --- /dev/null +++ b/modules/video_coding/svc/create_scalability_structure.cc @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/create_scalability_structure.h" + +#include + +#include "absl/strings/string_view.h" +#include "modules/video_coding/svc/scalability_structure_key_svc.h" +#include "modules/video_coding/svc/scalability_structure_l1t2.h" +#include "modules/video_coding/svc/scalability_structure_l1t3.h" +#include "modules/video_coding/svc/scalability_structure_l2t1.h" +#include "modules/video_coding/svc/scalability_structure_l2t1h.h" +#include "modules/video_coding/svc/scalability_structure_l2t2.h" +#include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" +#include "modules/video_coding/svc/scalability_structure_l3t1.h" +#include "modules/video_coding/svc/scalability_structure_l3t3.h" +#include "modules/video_coding/svc/scalability_structure_s2t1.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +struct NamedStructureFactory { + absl::string_view name; + // Use function pointer to make NamedStructureFactory trivally destructable. + std::unique_ptr (*factory)(); +}; + +// Wrap std::make_unique function to have correct return type. +template +std::unique_ptr Create() { + return std::make_unique(); +} + +constexpr NamedStructureFactory kFactories[] = { + {"NONE", Create}, + {"L1T2", Create}, + {"L1T3", Create}, + {"L2T1", Create}, + {"L2T1h", Create}, + {"L2T1_KEY", Create}, + {"L2T2", Create}, + {"L2T2_KEY", Create}, + {"L2T2_KEY_SHIFT", Create}, + {"L3T1", Create}, + {"L3T3", Create}, + {"L3T3_KEY", Create}, + {"S2T1", Create}, +}; + +} // namespace + +std::unique_ptr CreateScalabilityStructure( + absl::string_view name) { + RTC_DCHECK(!name.empty()); + for (const auto& entry : kFactories) { + if (entry.name == name) { + return entry.factory(); + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/create_scalability_structure.h b/modules/video_coding/svc/create_scalability_structure.h new file mode 100644 index 0000000000..9a14221fd2 --- /dev/null +++ b/modules/video_coding/svc/create_scalability_structure.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ +#define MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +// Creates a structure by name according to +// https://w3c.github.io/webrtc-svc/#scalabilitymodes* +// Returns nullptr for unknown name. +std::unique_ptr CreateScalabilityStructure( + absl::string_view name); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ diff --git a/modules/video_coding/svc/scalability_structure_full_svc.cc b/modules/video_coding/svc/scalability_structure_full_svc.cc new file mode 100644 index 0000000000..c489b60502 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_full_svc.cc @@ -0,0 +1,285 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +enum : int { kKey, kDelta }; +} // namespace + +constexpr int ScalabilityStructureFullSvc::kMaxNumSpatialLayers; +constexpr int ScalabilityStructureFullSvc::kMaxNumTemporalLayers; +constexpr absl::string_view ScalabilityStructureFullSvc::kFramePatternNames[]; + +ScalabilityStructureFullSvc::ScalabilityStructureFullSvc( + int num_spatial_layers, + int num_temporal_layers) + : num_spatial_layers_(num_spatial_layers), + num_temporal_layers_(num_temporal_layers), + active_decode_targets_( + (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { + RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); + RTC_DCHECK_LE(num_temporal_layers, kMaxNumTemporalLayers); +} + +ScalabilityStructureFullSvc::~ScalabilityStructureFullSvc() = default; + +ScalabilityStructureFullSvc::StreamLayersConfig +ScalabilityStructureFullSvc::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = num_spatial_layers_; + result.num_temporal_layers = num_temporal_layers_; + result.scaling_factor_num[num_spatial_layers_ - 1] = 1; + result.scaling_factor_den[num_spatial_layers_ - 1] = 1; + for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { + result.scaling_factor_num[sid - 1] = 1; + result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + } + return result; +} + +bool ScalabilityStructureFullSvc::TemporalLayerIsActive(int tid) const { + if (tid >= num_temporal_layers_) { + return false; + } + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (DecodeTargetIsActive(sid, tid)) { + return true; + } + } + return false; +} + +DecodeTargetIndication ScalabilityStructureFullSvc::Dti( + int sid, + int tid, + const LayerFrameConfig& config) { + if (sid < config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (sid == config.SpatialId()) { + if (tid == 0) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return DecodeTargetIndication::kSwitch; + } + if (tid == config.TemporalId()) { + return DecodeTargetIndication::kDiscardable; + } + if (tid > config.TemporalId()) { + RTC_DCHECK_GT(tid, config.TemporalId()); + return DecodeTargetIndication::kSwitch; + } + } + RTC_DCHECK_GT(sid, config.SpatialId()); + RTC_DCHECK_GE(tid, config.TemporalId()); + if (config.IsKeyframe() || config.Id() == kKey) { + return DecodeTargetIndication::kSwitch; + } + return DecodeTargetIndication::kRequired; +} + +ScalabilityStructureFullSvc::FramePattern +ScalabilityStructureFullSvc::NextPattern() const { + switch (last_pattern_) { + case kNone: + case kDeltaT2B: + return kDeltaT0; + case kDeltaT2A: + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + case kDeltaT1: + if (TemporalLayerIsActive(2)) { + return kDeltaT2B; + } + return kDeltaT0; + case kDeltaT0: + if (TemporalLayerIsActive(2)) { + return kDeltaT2A; + } + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + } +} + +std::vector +ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { + std::vector configs; + if (active_decode_targets_.none()) { + last_pattern_ = kNone; + return configs; + } + configs.reserve(num_spatial_layers_); + + if (last_pattern_ == kNone || restart) { + can_reference_t0_frame_for_spatial_id_.reset(); + last_pattern_ = kNone; + } + FramePattern current_pattern = NextPattern(); + + absl::optional spatial_dependency_buffer_id; + switch (current_pattern) { + case kDeltaT0: + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + // Next frame from the spatial layer `sid` shouldn't depend on + // potentially old previous frame from the spatial layer `sid`. + can_reference_t0_frame_for_spatial_id_.reset(sid); + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(last_pattern_ == kNone ? kKey : kDelta).S(sid).T(0); + + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } else if (last_pattern_ == kNone) { + config.Keyframe(); + } + + if (can_reference_t0_frame_for_spatial_id_[sid]) { + config.ReferenceAndUpdate(BufferIndex(sid, /*tid=*/0)); + } else { + // TODO(bugs.webrtc.org/11999): Propagate chain restart on delta frame + // to ChainDiffCalculator + config.Update(BufferIndex(sid, /*tid=*/0)); + } + + can_reference_t0_frame_for_spatial_id_.set(sid); + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/0); + } + break; + case kDeltaT1: + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/1) || + !can_reference_t0_frame_for_spatial_id_[sid]) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(1); + // Temporal reference. + config.Reference(BufferIndex(sid, /*tid=*/0)); + // Spatial reference unless this is the lowest active spatial layer. + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } + // No frame reference top layer frame, so no need save it into a buffer. + if (num_temporal_layers_ > 2 || sid < num_spatial_layers_ - 1) { + config.Update(BufferIndex(sid, /*tid=*/1)); + can_reference_t1_frame_for_spatial_id_.set(sid); + } + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/1); + } + break; + case kDeltaT2A: + case kDeltaT2B: + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/2) || + !can_reference_t0_frame_for_spatial_id_[sid]) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(2); + // Temporal reference. + if (current_pattern == kDeltaT2B && + can_reference_t1_frame_for_spatial_id_[sid]) { + config.Reference(BufferIndex(sid, /*tid=*/1)); + } else { + config.Reference(BufferIndex(sid, /*tid=*/0)); + } + // Spatial reference unless this is the lowest active spatial layer. + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } + // No frame reference top layer frame, so no need save it into a buffer. + if (sid < num_spatial_layers_ - 1) { + config.Update(BufferIndex(sid, /*tid=*/2)); + } + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/2); + } + break; + case kNone: + RTC_NOTREACHED(); + break; + } + + if (configs.empty() && !restart) { + RTC_LOG(LS_WARNING) << "Failed to generate configuration for L" + << num_spatial_layers_ << "T" << num_temporal_layers_ + << " with active decode targets " + << active_decode_targets_.to_string('-').substr( + active_decode_targets_.size() - + num_spatial_layers_ * num_temporal_layers_) + << " and transition from " + << kFramePatternNames[last_pattern_] << " to " + << kFramePatternNames[current_pattern] + << ". Resetting."; + return NextFrameConfig(/*restart=*/true); + } + + last_pattern_ = current_pattern; + return configs; +} + +GenericFrameInfo ScalabilityStructureFullSvc::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications.reserve(num_spatial_layers_ * + num_temporal_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + if (config.TemporalId() == 0) { + frame_info.part_of_chain.resize(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + frame_info.part_of_chain[sid] = config.SpatialId() <= sid; + } + } else { + frame_info.part_of_chain.assign(num_spatial_layers_, false); + } + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureFullSvc::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + // Enable/disable spatial layers independetely. + bool active = true; + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + // To enable temporal layer, require bitrates for lower temporal layers. + active = active && bitrates.GetBitrate(sid, tid) > 0; + SetDecodeTargetIsActive(sid, tid, active); + } + } +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_full_svc.h b/modules/video_coding/svc/scalability_structure_full_svc.h new file mode 100644 index 0000000000..d490d6e4a1 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_full_svc.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ + +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +class ScalabilityStructureFullSvc : public ScalableVideoController { + public: + ScalabilityStructureFullSvc(int num_spatial_layers, int num_temporal_layers); + ~ScalabilityStructureFullSvc() override; + + StreamLayersConfig StreamConfig() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + enum FramePattern { + kNone, + kDeltaT2A, + kDeltaT1, + kDeltaT2B, + kDeltaT0, + }; + static constexpr absl::string_view kFramePatternNames[] = { + "None", "DeltaT2A", "DeltaT1", "DeltaT2B", "DeltaT0"}; + static constexpr int kMaxNumSpatialLayers = 3; + static constexpr int kMaxNumTemporalLayers = 3; + + // Index of the buffer to store last frame for layer (`sid`, `tid`) + int BufferIndex(int sid, int tid) const { + return tid * num_spatial_layers_ + sid; + } + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * num_temporal_layers_ + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * num_temporal_layers_ + tid, value); + } + FramePattern NextPattern() const; + bool TemporalLayerIsActive(int tid) const; + static DecodeTargetIndication Dti(int sid, + int tid, + const LayerFrameConfig& frame); + + const int num_spatial_layers_; + const int num_temporal_layers_; + + FramePattern last_pattern_ = kNone; + std::bitset can_reference_t0_frame_for_spatial_id_ = 0; + std::bitset can_reference_t1_frame_for_spatial_id_ = 0; + std::bitset<32> active_decode_targets_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ diff --git a/modules/video_coding/svc/scalability_structure_key_svc.cc b/modules/video_coding/svc/scalability_structure_key_svc.cc new file mode 100644 index 0000000000..cfc89a3794 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_key_svc.cc @@ -0,0 +1,336 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_key_svc.h" + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +// Values to use as LayerFrameConfig::Id +enum : int { kKey, kDelta }; + +DecodeTargetIndication +Dti(int sid, int tid, const ScalableVideoController::LayerFrameConfig& config) { + if (config.IsKeyframe() || config.Id() == kKey) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return sid < config.SpatialId() ? DecodeTargetIndication::kNotPresent + : DecodeTargetIndication::kSwitch; + } + + if (sid != config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (tid == config.TemporalId() && tid > 0) { + return DecodeTargetIndication::kDiscardable; + } + return DecodeTargetIndication::kSwitch; +} + +} // namespace + +constexpr int ScalabilityStructureKeySvc::kMaxNumSpatialLayers; +constexpr int ScalabilityStructureKeySvc::kMaxNumTemporalLayers; + +ScalabilityStructureKeySvc::ScalabilityStructureKeySvc(int num_spatial_layers, + int num_temporal_layers) + : num_spatial_layers_(num_spatial_layers), + num_temporal_layers_(num_temporal_layers), + active_decode_targets_( + (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { + // There is no point to use this structure without spatial scalability. + RTC_DCHECK_GT(num_spatial_layers, 1); + RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); + RTC_DCHECK_LE(num_temporal_layers, kMaxNumTemporalLayers); +} + +ScalabilityStructureKeySvc::~ScalabilityStructureKeySvc() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureKeySvc::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = num_spatial_layers_; + result.num_temporal_layers = num_temporal_layers_; + result.scaling_factor_num[num_spatial_layers_ - 1] = 1; + result.scaling_factor_den[num_spatial_layers_ - 1] = 1; + for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { + result.scaling_factor_num[sid - 1] = 1; + result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + } + return result; +} + +bool ScalabilityStructureKeySvc::TemporalLayerIsActive(int tid) const { + if (tid >= num_temporal_layers_) { + return false; + } + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (DecodeTargetIsActive(sid, tid)) { + return true; + } + } + return false; +} + +std::vector +ScalabilityStructureKeySvc::KeyframeConfig() { + std::vector configs; + configs.reserve(num_spatial_layers_); + absl::optional spatial_dependency_buffer_id; + spatial_id_is_enabled_.reset(); + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kKey).S(sid).T(0); + + if (spatial_dependency_buffer_id) { + config.Reference(*spatial_dependency_buffer_id); + } else { + config.Keyframe(); + } + config.Update(BufferIndex(sid, /*tid=*/0)); + + spatial_id_is_enabled_.set(sid); + spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/0); + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::T0Config() { + std::vector configs; + configs.reserve(num_spatial_layers_); + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + spatial_id_is_enabled_.reset(sid); + continue; + } + configs.emplace_back(); + configs.back().Id(kDelta).S(sid).T(0).ReferenceAndUpdate( + BufferIndex(sid, /*tid=*/0)); + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::T1Config() { + std::vector configs; + configs.reserve(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/1)) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(1).Reference(BufferIndex(sid, /*tid=*/0)); + if (num_temporal_layers_ > 2) { + config.Update(BufferIndex(sid, /*tid=*/1)); + can_reference_t1_frame_for_spatial_id_.set(sid); + } + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::T2Config() { + std::vector configs; + configs.reserve(num_spatial_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/2)) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(kDelta).S(sid).T(2); + if (can_reference_t1_frame_for_spatial_id_[sid]) { + config.Reference(BufferIndex(sid, /*tid=*/1)); + } else { + config.Reference(BufferIndex(sid, /*tid=*/0)); + } + } + return configs; +} + +std::vector +ScalabilityStructureKeySvc::NextFrameConfig(bool restart) { + if (active_decode_targets_.none()) { + last_pattern_ = kNone; + return {}; + } + + if (restart) { + last_pattern_ = kNone; + } + + switch (last_pattern_) { + case kNone: + last_pattern_ = kDeltaT0; + return KeyframeConfig(); + case kDeltaT2B: + last_pattern_ = kDeltaT0; + return T0Config(); + case kDeltaT2A: + if (TemporalLayerIsActive(1)) { + last_pattern_ = kDeltaT1; + return T1Config(); + } + last_pattern_ = kDeltaT0; + return T0Config(); + case kDeltaT1: + if (TemporalLayerIsActive(2)) { + last_pattern_ = kDeltaT2B; + return T2Config(); + } + last_pattern_ = kDeltaT0; + return T0Config(); + case kDeltaT0: + if (TemporalLayerIsActive(2)) { + last_pattern_ = kDeltaT2A; + return T2Config(); + } else if (TemporalLayerIsActive(1)) { + last_pattern_ = kDeltaT1; + return T1Config(); + } + last_pattern_ = kDeltaT0; + return T0Config(); + } + RTC_NOTREACHED(); + return {}; +} + +GenericFrameInfo ScalabilityStructureKeySvc::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications.reserve(num_spatial_layers_ * + num_temporal_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + frame_info.part_of_chain.assign(num_spatial_layers_, false); + if (config.IsKeyframe() || config.Id() == kKey) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + for (int sid = config.SpatialId(); sid < num_spatial_layers_; ++sid) { + frame_info.part_of_chain[sid] = true; + } + } else if (config.TemporalId() == 0) { + frame_info.part_of_chain[config.SpatialId()] = true; + } + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureKeySvc::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + // Enable/disable spatial layers independetely. + bool active = bitrates.GetBitrate(sid, /*tid=*/0) > 0; + SetDecodeTargetIsActive(sid, /*tid=*/0, active); + if (!spatial_id_is_enabled_[sid] && active) { + // Key frame is required to reenable any spatial layer. + last_pattern_ = kNone; + } + + for (int tid = 1; tid < num_temporal_layers_; ++tid) { + // To enable temporal layer, require bitrates for lower temporal layers. + active = active && bitrates.GetBitrate(sid, tid) > 0; + SetDecodeTargetIsActive(sid, tid, active); + } + } +} + +ScalabilityStructureL2T1Key::~ScalabilityStructureL2T1Key() = default; + +FrameDependencyStructure ScalabilityStructureL2T1Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); + return structure; +} + +ScalabilityStructureL2T2Key::~ScalabilityStructureL2T2Key() = default; + +FrameDependencyStructure ScalabilityStructureL2T2Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(6); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({4, 3}).FrameDiffs({4}); + templates[2].S(0).T(1).Dtis("-D--").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 4}).FrameDiffs({4}); + templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2}); + return structure; +} + +ScalabilityStructureL3T3Key::~ScalabilityStructureL3T3Key() = default; + +FrameDependencyStructure ScalabilityStructureL3T3Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 9; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; + auto& t = structure.templates; + t.resize(15); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. Indexes are written in hex for nicer alignment. + t[0x0].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); + t[0x5].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[0x3].S(0).T(2).Dtis("--D------").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[0x8].S(1).T(2).Dtis("-----D---").ChainDiffs({4, 3, 2}).FrameDiffs({3}); + t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3}); + t[0x2].S(0).T(1).Dtis("-DS------").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[0x7].S(1).T(1).Dtis("----DS---").ChainDiffs({7, 6, 5}).FrameDiffs({6}); + t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6}); + t[0x4].S(0).T(2).Dtis("--D------").ChainDiffs({9, 8, 7}).FrameDiffs({3}); + t[0x9].S(1).T(2).Dtis("-----D---").ChainDiffs({10, 9, 8}).FrameDiffs({3}); + t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3}); + t[0x1].S(0).T(0).Dtis("SSS------").ChainDiffs({12, 11, 10}).FrameDiffs({12}); + t[0x6].S(1).T(0).Dtis("---SSS---").ChainDiffs({1, 12, 11}).FrameDiffs({12}); + t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 12}).FrameDiffs({12}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_key_svc.h b/modules/video_coding/svc/scalability_structure_key_svc.h new file mode 100644 index 0000000000..1d3277b5cd --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_key_svc.h @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_KEY_SVC_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_KEY_SVC_H_ + +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +class ScalabilityStructureKeySvc : public ScalableVideoController { + public: + ScalabilityStructureKeySvc(int num_spatial_layers, int num_temporal_layers); + ~ScalabilityStructureKeySvc() override; + + StreamLayersConfig StreamConfig() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + enum FramePattern { + kNone, + kDeltaT0, + kDeltaT2A, + kDeltaT1, + kDeltaT2B, + }; + static constexpr int kMaxNumSpatialLayers = 3; + static constexpr int kMaxNumTemporalLayers = 3; + + // Index of the buffer to store last frame for layer (`sid`, `tid`) + int BufferIndex(int sid, int tid) const { + return tid * num_spatial_layers_ + sid; + } + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * num_temporal_layers_ + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * num_temporal_layers_ + tid, value); + } + bool TemporalLayerIsActive(int tid) const; + std::vector KeyframeConfig(); + std::vector T0Config(); + std::vector T1Config(); + std::vector T2Config(); + + const int num_spatial_layers_; + const int num_temporal_layers_; + + FramePattern last_pattern_ = kNone; + std::bitset spatial_id_is_enabled_; + std::bitset can_reference_t1_frame_for_spatial_id_; + std::bitset<32> active_decode_targets_; +}; + +// S1 0--0--0- +// | ... +// S0 0--0--0- +class ScalabilityStructureL2T1Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL2T1Key() : ScalabilityStructureKeySvc(2, 1) {} + ~ScalabilityStructureL2T1Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// S1T1 0 0 +// / / / +// S1T0 0---0---0 +// | ... +// S0T1 | 0 0 +// |/ / / +// S0T0 0---0---0 +// Time-> 0 1 2 3 4 +class ScalabilityStructureL2T2Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL2T2Key() : ScalabilityStructureKeySvc(2, 2) {} + ~ScalabilityStructureL2T2Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +class ScalabilityStructureL3T3Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL3T3Key() : ScalabilityStructureKeySvc(3, 3) {} + ~ScalabilityStructureL3T3Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_KEY_SVC_H_ diff --git a/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc b/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc new file mode 100644 index 0000000000..752f710eb6 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_key_svc.h" + +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalability_structure_test_helpers.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +TEST(ScalabilityStructureL3T3KeyTest, + SkipingT1FrameOnOneSpatialLayerKeepsStructureValid) { + ScalabilityStructureL3T3Key structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/3, /*s1=*/3)); + wrapper.GenerateFrames(/*num_temporal_units=*/2, frames); + EXPECT_THAT(frames, SizeIs(4)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/3, /*s1=*/1)); + wrapper.GenerateFrames(/*num_temporal_units=*/1, frames); + EXPECT_THAT(frames, SizeIs(5)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/3, /*s1=*/3)); + wrapper.GenerateFrames(/*num_temporal_units=*/1, frames); + ASSERT_THAT(frames, SizeIs(7)); + + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[1].temporal_id, 0); + EXPECT_EQ(frames[2].temporal_id, 2); + EXPECT_EQ(frames[3].temporal_id, 2); + EXPECT_EQ(frames[4].temporal_id, 1); + EXPECT_EQ(frames[5].temporal_id, 2); + EXPECT_EQ(frames[6].temporal_id, 2); + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +TEST(ScalabilityStructureL3T3KeyTest, + ReenablingSpatialLayerBeforeMissedT0FrameDoesntTriggerAKeyFrame) { + ScalabilityStructureL3T3Key structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/2)); + wrapper.GenerateFrames(1, frames); + EXPECT_THAT(frames, SizeIs(2)); + // Drop a spatial layer. + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/0)); + wrapper.GenerateFrames(1, frames); + EXPECT_THAT(frames, SizeIs(3)); + // Reenable a spatial layer before T0 frame is encoded. + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/2)); + wrapper.GenerateFrames(1, frames); + EXPECT_THAT(frames, SizeIs(5)); + + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[1].temporal_id, 0); + EXPECT_EQ(frames[2].temporal_id, 1); + EXPECT_EQ(frames[3].temporal_id, 0); + EXPECT_EQ(frames[4].temporal_id, 0); + EXPECT_THAT(frames[3].frame_diffs, SizeIs(1)); + EXPECT_THAT(frames[4].frame_diffs, SizeIs(1)); + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +TEST(ScalabilityStructureL3T3KeyTest, ReenablingSpatialLayerTriggersKeyFrame) { + ScalabilityStructureL3T3Key structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + // Start with all spatial layers enabled. + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/2, /*s2=*/2)); + wrapper.GenerateFrames(3, frames); + EXPECT_THAT(frames, SizeIs(9)); + // Drop a spatial layer. Two remaining spatial layers should just continue. + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/0, /*s2=*/2)); + wrapper.GenerateFrames(2, frames); + EXPECT_THAT(frames, SizeIs(13)); + // Reenable spatial layer, expect a full restart. + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/2, /*s2=*/2)); + wrapper.GenerateFrames(1, frames); + ASSERT_THAT(frames, SizeIs(16)); + + // First 3 temporal units with all spatial layers enabled. + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[3].temporal_id, 1); + EXPECT_EQ(frames[6].temporal_id, 0); + // 2 temporal units with spatial layer 1 disabled. + EXPECT_EQ(frames[9].spatial_id, 0); + EXPECT_EQ(frames[9].temporal_id, 1); + EXPECT_EQ(frames[10].spatial_id, 2); + EXPECT_EQ(frames[10].temporal_id, 1); + // T0 frames were encoded while spatial layer 1 is disabled. + EXPECT_EQ(frames[11].spatial_id, 0); + EXPECT_EQ(frames[11].temporal_id, 0); + EXPECT_EQ(frames[12].spatial_id, 2); + EXPECT_EQ(frames[12].temporal_id, 0); + // Key frame to reenable spatial layer 1. + EXPECT_THAT(frames[13].frame_diffs, IsEmpty()); + EXPECT_THAT(frames[14].frame_diffs, ElementsAre(1)); + EXPECT_THAT(frames[15].frame_diffs, ElementsAre(1)); + EXPECT_EQ(frames[13].temporal_id, 0); + EXPECT_EQ(frames[14].temporal_id, 0); + EXPECT_EQ(frames[15].temporal_id, 0); + auto all_frames = rtc::MakeArrayView(frames.data(), frames.size()); + EXPECT_TRUE(wrapper.FrameReferencesAreValid(all_frames.subview(0, 13))); + // Frames starting from the frame#13 should not reference any earlier frames. + EXPECT_TRUE(wrapper.FrameReferencesAreValid(all_frames.subview(13))); +} + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l1t2.cc b/modules/video_coding/svc/scalability_structure_l1t2.cc new file mode 100644 index 0000000000..f639e2da6e --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l1t2.cc @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l1t2.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL1T2::~ScalabilityStructureL1T2() = default; + +FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0, 0}; + structure.templates.resize(3); + structure.templates[0].T(0).Dtis("SS").ChainDiffs({0}); + structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2}); + structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l1t2.h b/modules/video_coding/svc/scalability_structure_l1t2.h new file mode 100644 index 0000000000..d2f81aa113 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l1t2.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +class ScalabilityStructureL1T2 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL1T2() : ScalabilityStructureFullSvc(1, 2) {} + ~ScalabilityStructureL1T2() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ diff --git a/modules/video_coding/svc/scalability_structure_l1t3.cc b/modules/video_coding/svc/scalability_structure_l1t3.cc new file mode 100644 index 0000000000..17073344c3 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l1t3.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l1t3.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL1T3::~ScalabilityStructureL1T3() = default; + +FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0, 0, 0}; + structure.templates.resize(5); + structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0}); + structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4}); + structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2}); + structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1}); + structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l1t3.h b/modules/video_coding/svc/scalability_structure_l1t3.h new file mode 100644 index 0000000000..00e48ccc47 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l1t3.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// T2 0 0 0 0 +// | / | / +// T1 / 0 / 0 ... +// |_/ |_/ +// T0 0-------0------ +// Time-> 0 1 2 3 4 5 6 7 +class ScalabilityStructureL1T3 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL1T3() : ScalabilityStructureFullSvc(1, 3) {} + ~ScalabilityStructureL1T3() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ diff --git a/modules/video_coding/svc/scalability_structure_l2t1.cc b/modules/video_coding/svc/scalability_structure_l2t1.cc new file mode 100644 index 0000000000..efd7516657 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t1.cc @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t1.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL2T1::~ScalabilityStructureL2T1() = default; + +FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({2, 1}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l2t1.h b/modules/video_coding/svc/scalability_structure_l2t1.h new file mode 100644 index 0000000000..96a0da56df --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t1.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// S1 0--0--0- +// | | | ... +// S0 0--0--0- +class ScalabilityStructureL2T1 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL2T1() : ScalabilityStructureFullSvc(2, 1) {} + ~ScalabilityStructureL2T1() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ diff --git a/modules/video_coding/svc/scalability_structure_l2t1h.cc b/modules/video_coding/svc/scalability_structure_l2t1h.cc new file mode 100644 index 0000000000..c4682764ae --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t1h.cc @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t1h.h" + +#include +#include + +#include "absl/base/macros.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +ScalabilityStructureL2T1h::~ScalabilityStructureL2T1h() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureL2T1h::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = 2; + result.num_temporal_layers = 1; + // 1.5:1 scaling, see https://w3c.github.io/webrtc-svc/#scalabilitymodes* + result.scaling_factor_num[0] = 2; + result.scaling_factor_den[0] = 3; + return result; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l2t1h.h b/modules/video_coding/svc/scalability_structure_l2t1h.h new file mode 100644 index 0000000000..7200a10843 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t1h.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ + +#include "modules/video_coding/svc/scalability_structure_l2t1.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +class ScalabilityStructureL2T1h : public ScalabilityStructureL2T1 { + public: + ~ScalabilityStructureL2T1h() override; + + StreamLayersConfig StreamConfig() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ diff --git a/modules/video_coding/svc/scalability_structure_l2t2.cc b/modules/video_coding/svc/scalability_structure_l2t2.cc new file mode 100644 index 0000000000..a381ad080a --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t2.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t2.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL2T2::~ScalabilityStructureL2T2() = default; + +FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(6); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4}); + templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1}); + templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l2t2.h b/modules/video_coding/svc/scalability_structure_l2t2.h new file mode 100644 index 0000000000..781ea7e60d --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t2.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// S1T1 0 0 +// /| /| / +// S1T0 0-+-0-+-0 +// | | | | | ... +// S0T1 | 0 | 0 | +// |/ |/ |/ +// S0T0 0---0---0-- +// Time-> 0 1 2 3 4 +class ScalabilityStructureL2T2 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL2T2() : ScalabilityStructureFullSvc(2, 2) {} + ~ScalabilityStructureL2T2() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ diff --git a/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc new file mode 100644 index 0000000000..c53ff8f07b --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" + +#include +#include + +#include "absl/base/macros.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { + +DecodeTargetIndication +Dti(int sid, int tid, const ScalableVideoController::LayerFrameConfig& config) { + if (config.IsKeyframe()) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return sid < config.SpatialId() ? DecodeTargetIndication::kNotPresent + : DecodeTargetIndication::kSwitch; + } + + if (sid != config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (tid == config.TemporalId() && tid > 0) { + return DecodeTargetIndication::kDiscardable; + } + return DecodeTargetIndication::kSwitch; +} + +} // namespace + +constexpr int ScalabilityStructureL2T2KeyShift::kNumSpatialLayers; +constexpr int ScalabilityStructureL2T2KeyShift::kNumTemporalLayers; + +ScalabilityStructureL2T2KeyShift::~ScalabilityStructureL2T2KeyShift() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureL2T2KeyShift::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = 2; + result.num_temporal_layers = 2; + result.scaling_factor_num[0] = 1; + result.scaling_factor_den[0] = 2; + return result; +} + +FrameDependencyStructure ScalabilityStructureL2T2KeyShift::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(7); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[2].S(0).T(0).Dtis("SS--").ChainDiffs({4, 1}).FrameDiffs({4}); + templates[3].S(0).T(1).Dtis("-D--").ChainDiffs({2, 3}).FrameDiffs({2}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[5].S(1).T(0).Dtis("--SS").ChainDiffs({3, 4}).FrameDiffs({4}); + templates[6].S(1).T(1).Dtis("---D").ChainDiffs({1, 2}).FrameDiffs({2}); + return structure; +} + +std::vector +ScalabilityStructureL2T2KeyShift::NextFrameConfig(bool restart) { + std::vector configs; + configs.reserve(2); + if (restart) { + next_pattern_ = kKey; + } + + // Buffer0 keeps latest S0T0 frame, + // Buffer1 keeps latest S1T0 frame. + switch (next_pattern_) { + case kKey: + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(0).T(0).Update(0).Keyframe(); + } + if (DecodeTargetIsActive(/*sid=*/1, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(1).T(0).Update(1); + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.back().Reference(0); + } else { + configs.back().Keyframe(); + } + } + next_pattern_ = kDelta0; + break; + case kDelta0: + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(0).T(0).ReferenceAndUpdate(0); + } + if (DecodeTargetIsActive(/*sid=*/1, /*tid=*/1)) { + configs.emplace_back(); + configs.back().S(1).T(1).Reference(1); + } + if (configs.empty() && DecodeTargetIsActive(/*sid=*/1, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(1).T(0).ReferenceAndUpdate(1); + } + next_pattern_ = kDelta1; + break; + case kDelta1: + if (DecodeTargetIsActive(/*sid=*/0, /*tid=*/1)) { + configs.emplace_back(); + configs.back().S(0).T(1).Reference(0); + } + if (DecodeTargetIsActive(/*sid=*/1, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(1).T(0).ReferenceAndUpdate(1); + } + if (configs.empty() && DecodeTargetIsActive(/*sid=*/0, /*tid=*/0)) { + configs.emplace_back(); + configs.back().S(0).T(0).ReferenceAndUpdate(0); + } + next_pattern_ = kDelta0; + break; + } + + RTC_DCHECK(!configs.empty() || active_decode_targets_.none()); + return configs; +} + +GenericFrameInfo ScalabilityStructureL2T2KeyShift::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + for (int sid = 0; sid < kNumSpatialLayers; ++sid) { + for (int tid = 0; tid < kNumTemporalLayers; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + if (config.IsKeyframe()) { + frame_info.part_of_chain = {true, true}; + } else if (config.TemporalId() == 0) { + frame_info.part_of_chain = {config.SpatialId() == 0, + config.SpatialId() == 1}; + } else { + frame_info.part_of_chain = {false, false}; + } + return frame_info; +} + +void ScalabilityStructureL2T2KeyShift::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < kNumSpatialLayers; ++sid) { + // Enable/disable spatial layers independetely. + bool active = bitrates.GetBitrate(sid, /*tid=*/0) > 0; + if (!DecodeTargetIsActive(sid, /*tid=*/0) && active) { + // Key frame is required to reenable any spatial layer. + next_pattern_ = kKey; + } + + SetDecodeTargetIsActive(sid, /*tid=*/0, active); + SetDecodeTargetIsActive(sid, /*tid=*/1, + active && bitrates.GetBitrate(sid, /*tid=*/1) > 0); + } +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h new file mode 100644 index 0000000000..26d1afcb29 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ + +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +// S1T1 0 0 +// / / / +// S1T0 0---0---0 +// | ... +// S0T1 | 0 0 +// | / / +// S0T0 0-0---0-- +// Time-> 0 1 2 3 4 +class ScalabilityStructureL2T2KeyShift : public ScalableVideoController { + public: + ~ScalabilityStructureL2T2KeyShift() override; + + StreamLayersConfig StreamConfig() const override; + FrameDependencyStructure DependencyStructure() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + enum FramePattern { + kKey, + kDelta0, + kDelta1, + }; + + static constexpr int kNumSpatialLayers = 2; + static constexpr int kNumTemporalLayers = 2; + + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * kNumTemporalLayers + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * kNumTemporalLayers + tid, value); + } + + FramePattern next_pattern_ = kKey; + std::bitset<32> active_decode_targets_ = 0b1111; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ diff --git a/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc b/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc new file mode 100644 index 0000000000..40fecf1812 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc @@ -0,0 +1,358 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" + +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalability_structure_test_helpers.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +// S1T1 3 7 +// / / +// S1T0 1---5---9 +// | +// S0T1 | 4 8 +// | / / +// S0T0 0-2---6 +// Time-> 0 1 2 3 4 +TEST(ScalabilityStructureL2T2KeyShiftTest, DecodeTargetsAreEnabledByDefault) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + wrapper.GenerateFrames(/*num_temporal_units=*/5, frames); + ASSERT_THAT(frames, SizeIs(10)); + + EXPECT_EQ(frames[0].spatial_id, 0); + EXPECT_EQ(frames[1].spatial_id, 1); + EXPECT_EQ(frames[2].spatial_id, 0); + EXPECT_EQ(frames[3].spatial_id, 1); + EXPECT_EQ(frames[4].spatial_id, 0); + EXPECT_EQ(frames[5].spatial_id, 1); + EXPECT_EQ(frames[6].spatial_id, 0); + EXPECT_EQ(frames[7].spatial_id, 1); + EXPECT_EQ(frames[8].spatial_id, 0); + EXPECT_EQ(frames[9].spatial_id, 1); + + // spatial_id = 0 has the temporal shift. + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[2].temporal_id, 0); + EXPECT_EQ(frames[4].temporal_id, 1); + EXPECT_EQ(frames[6].temporal_id, 0); + EXPECT_EQ(frames[8].temporal_id, 1); + + // spatial_id = 1 hasn't temporal shift. + EXPECT_EQ(frames[1].temporal_id, 0); + EXPECT_EQ(frames[3].temporal_id, 1); + EXPECT_EQ(frames[5].temporal_id, 0); + EXPECT_EQ(frames[7].temporal_id, 1); + EXPECT_EQ(frames[9].temporal_id, 0); + + // Key frame diff. + EXPECT_THAT(frames[0].frame_diffs, IsEmpty()); + EXPECT_THAT(frames[1].frame_diffs, ElementsAre(1)); + // S0T0 frame diffs + EXPECT_THAT(frames[2].frame_diffs, ElementsAre(2)); + EXPECT_THAT(frames[6].frame_diffs, ElementsAre(4)); + // S1T0 frame diffs + EXPECT_THAT(frames[5].frame_diffs, ElementsAre(4)); + EXPECT_THAT(frames[9].frame_diffs, ElementsAre(4)); + // T1 frames refer T0 frame of same spatial layer which is 2 frame ids away. + EXPECT_THAT(frames[3].frame_diffs, ElementsAre(2)); + EXPECT_THAT(frames[4].frame_diffs, ElementsAre(2)); + EXPECT_THAT(frames[7].frame_diffs, ElementsAre(2)); + EXPECT_THAT(frames[8].frame_diffs, ElementsAre(2)); +} + +// S1T0 1---4---7 +// | +// S0T1 | 3 6 +// | / / +// S0T0 0-2---5-- +// Time-> 0 1 2 3 4 +TEST(ScalabilityStructureL2T2KeyShiftTest, DisableS1T1Layer) { + ScalabilityStructureL2T2KeyShift structure; + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/1)); + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + wrapper.GenerateFrames(/*num_temporal_units=*/5, frames); + ASSERT_THAT(frames, SizeIs(8)); + + EXPECT_EQ(frames[0].spatial_id, 0); + EXPECT_EQ(frames[1].spatial_id, 1); + EXPECT_EQ(frames[2].spatial_id, 0); + EXPECT_EQ(frames[3].spatial_id, 0); + EXPECT_EQ(frames[4].spatial_id, 1); + EXPECT_EQ(frames[5].spatial_id, 0); + EXPECT_EQ(frames[6].spatial_id, 0); + EXPECT_EQ(frames[7].spatial_id, 1); + + // spatial_id = 0 has the temporal shift. + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[2].temporal_id, 0); + EXPECT_EQ(frames[3].temporal_id, 1); + EXPECT_EQ(frames[5].temporal_id, 0); + EXPECT_EQ(frames[6].temporal_id, 1); + + // spatial_id = 1 has single temporal layer. + EXPECT_EQ(frames[1].temporal_id, 0); + EXPECT_EQ(frames[4].temporal_id, 0); + EXPECT_EQ(frames[5].temporal_id, 0); +} + +// S1T1 3 | +// / | +// S1T0 1---5+--7 +// | | +// S0T1 | 4| +// | / | +// S0T0 0-2--+6---8 +// Time-> 0 1 2 3 4 5 +TEST(ScalabilityStructureL2T2KeyShiftTest, DisableT1LayersAfterFewFrames) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + EXPECT_THAT(frames, SizeIs(6)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/1, /*s1=*/1)); + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + ASSERT_THAT(frames, SizeIs(9)); + + // Skip validation before T1 was disabled as that is covered by the test + // where no layers are disabled. + EXPECT_EQ(frames[6].spatial_id, 0); + EXPECT_EQ(frames[7].spatial_id, 1); + EXPECT_EQ(frames[8].spatial_id, 0); + + EXPECT_EQ(frames[6].temporal_id, 0); + EXPECT_EQ(frames[7].temporal_id, 0); + EXPECT_EQ(frames[8].temporal_id, 0); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +// S1T1 1 3 +// / / +// S1T0 0---2 +// Time-> 0 1 2 3 4 5 +TEST(ScalabilityStructureL2T2KeyShiftTest, DisableS0FromTheStart) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/0, /*s1=*/2)); + wrapper.GenerateFrames(/*num_temporal_units=*/4, frames); + EXPECT_THAT(frames, SizeIs(4)); + + EXPECT_EQ(frames[0].spatial_id, 1); + EXPECT_EQ(frames[1].spatial_id, 1); + EXPECT_EQ(frames[2].spatial_id, 1); + EXPECT_EQ(frames[3].spatial_id, 1); + + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[1].temporal_id, 1); + EXPECT_EQ(frames[2].temporal_id, 0); + EXPECT_EQ(frames[3].temporal_id, 1); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +// S1T1 3 |6 8 +// / / / +// S1T0 1---5+--7 +// | | +// S0T1 | 4| +// | / | +// S0T0 0-2 | +// Time-> 0 1 2 3 4 5 +TEST(ScalabilityStructureL2T2KeyShiftTest, DisableS0AfterFewFrames) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + EXPECT_THAT(frames, SizeIs(6)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/0, /*s1=*/2)); + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + ASSERT_THAT(frames, SizeIs(9)); + + // Expect frame[6] is delta frame. + EXPECT_THAT(frames[6].frame_diffs, ElementsAre(1)); + // Skip validation before S0 was disabled as that should be covered by + // test where no layers are disabled. + EXPECT_EQ(frames[6].spatial_id, 1); + EXPECT_EQ(frames[7].spatial_id, 1); + EXPECT_EQ(frames[8].spatial_id, 1); + + EXPECT_EQ(frames[6].temporal_id, 1); + EXPECT_EQ(frames[7].temporal_id, 0); + EXPECT_EQ(frames[8].temporal_id, 1); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +// S1T1 3| | 8 +// / | | / +// S1T0 1 | |6 +// | | || +// S0T1 | |4|| +// | / || +// S0T0 0-2| |5-7 +// Time-> 0 1 2 3 4 5 +TEST(ScalabilityStructureL2T2KeyShiftTest, ReenableS1TriggersKeyFrame) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + wrapper.GenerateFrames(/*num_temporal_units=*/2, frames); + EXPECT_THAT(frames, SizeIs(4)); + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/0)); + wrapper.GenerateFrames(/*num_temporal_units=*/1, frames); + EXPECT_THAT(frames, SizeIs(5)); + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/2)); + wrapper.GenerateFrames(/*num_temporal_units=*/2, frames); + ASSERT_THAT(frames, SizeIs(9)); + + EXPECT_THAT(frames[4].spatial_id, 0); + EXPECT_THAT(frames[4].temporal_id, 1); + + // Expect frame[5] to be a key frame. + EXPECT_TRUE(wrapper.FrameReferencesAreValid( + rtc::MakeArrayView(frames.data() + 5, 4))); + + EXPECT_THAT(frames[5].spatial_id, 0); + EXPECT_THAT(frames[6].spatial_id, 1); + EXPECT_THAT(frames[7].spatial_id, 0); + EXPECT_THAT(frames[8].spatial_id, 1); + + // S0 should do temporal shift after the key frame. + EXPECT_THAT(frames[5].temporal_id, 0); + EXPECT_THAT(frames[7].temporal_id, 0); + + // No temporal shift for the top spatial layer. + EXPECT_THAT(frames[6].temporal_id, 0); + EXPECT_THAT(frames[8].temporal_id, 1); +} + +TEST(ScalabilityStructureL2T2KeyShiftTest, EnableOnlyS0T0FromTheStart) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/1, /*s1=*/0)); + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + ASSERT_THAT(frames, SizeIs(3)); + + EXPECT_EQ(frames[0].spatial_id, 0); + EXPECT_EQ(frames[1].spatial_id, 0); + EXPECT_EQ(frames[2].spatial_id, 0); + + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[1].temporal_id, 0); + EXPECT_EQ(frames[2].temporal_id, 0); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +// S1T1 3| +// / | +// S1T0 1 | +// | | +// S0T1 | | +// | | +// S0T0 0-2+4-5-6 +// Time-> 0 1 2 3 4 +TEST(ScalabilityStructureL2T2KeyShiftTest, EnableOnlyS0T0AfterFewFrames) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + wrapper.GenerateFrames(/*num_temporal_units=*/2, frames); + EXPECT_THAT(frames, SizeIs(4)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/1, /*s1=*/0)); + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + ASSERT_THAT(frames, SizeIs(7)); + + EXPECT_EQ(frames[4].spatial_id, 0); + EXPECT_EQ(frames[5].spatial_id, 0); + EXPECT_EQ(frames[6].spatial_id, 0); + + EXPECT_EQ(frames[4].temporal_id, 0); + EXPECT_EQ(frames[5].temporal_id, 0); + EXPECT_EQ(frames[6].temporal_id, 0); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +TEST(ScalabilityStructureL2T2KeyShiftTest, EnableOnlyS1T0FromTheStart) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/0, /*s1=*/1)); + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + ASSERT_THAT(frames, SizeIs(3)); + + EXPECT_EQ(frames[0].spatial_id, 1); + EXPECT_EQ(frames[1].spatial_id, 1); + EXPECT_EQ(frames[2].spatial_id, 1); + + EXPECT_EQ(frames[0].temporal_id, 0); + EXPECT_EQ(frames[1].temporal_id, 0); + EXPECT_EQ(frames[2].temporal_id, 0); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +// S1T1 3| +// / | +// S1T0 1--+4-5-6 +// | | +// S0T1 | | +// | | +// S0T0 0-2| +// Time-> 0 1 2 3 4 +TEST(ScalabilityStructureL2T2KeyShiftTest, EnableOnlyS1T0AfterFewFrames) { + ScalabilityStructureL2T2KeyShift structure; + ScalabilityStructureWrapper wrapper(structure); + std::vector frames; + + wrapper.GenerateFrames(/*num_temporal_units=*/2, frames); + EXPECT_THAT(frames, SizeIs(4)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/0, /*s1=*/1)); + wrapper.GenerateFrames(/*num_temporal_units=*/3, frames); + ASSERT_THAT(frames, SizeIs(7)); + + EXPECT_EQ(frames[4].spatial_id, 1); + EXPECT_EQ(frames[5].spatial_id, 1); + EXPECT_EQ(frames[6].spatial_id, 1); + + EXPECT_EQ(frames[4].temporal_id, 0); + EXPECT_EQ(frames[5].temporal_id, 0); + EXPECT_EQ(frames[6].temporal_id, 0); + + EXPECT_TRUE(wrapper.FrameReferencesAreValid(frames)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l3t1.cc b/modules/video_coding/svc/scalability_structure_l3t1.cc new file mode 100644 index 0000000000..d7a5324465 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l3t1.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l3t1.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL3T1::~ScalabilityStructureL3T1() = default; + +FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 1, 2}; + auto& templates = structure.templates; + templates.resize(6); + templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0}); + templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1}); + templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1}); + templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l3t1.h b/modules/video_coding/svc/scalability_structure_l3t1.h new file mode 100644 index 0000000000..dea40e96b8 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l3t1.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// S2 0-0-0- +// | | | +// S1 0-0-0-... +// | | | +// S0 0-0-0- +// Time-> 0 1 2 +class ScalabilityStructureL3T1 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL3T1() : ScalabilityStructureFullSvc(3, 1) {} + ~ScalabilityStructureL3T1() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ diff --git a/modules/video_coding/svc/scalability_structure_l3t3.cc b/modules/video_coding/svc/scalability_structure_l3t3.cc new file mode 100644 index 0000000000..932056b0d3 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l3t3.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l3t3.h" + +#include + +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +ScalabilityStructureL3T3::~ScalabilityStructureL3T3() = default; + +FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 9; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; + auto& t = structure.templates; + t.resize(15); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. Indexes are written in hex for nicer alignment. + t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); + t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1}); + t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1}); + t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1}); + t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1}); + t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3}); + t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1}); + t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1}); + t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12}); + t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1}); + t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1}); + return structure; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_l3t3.h b/modules/video_coding/svc/scalability_structure_l3t3.h new file mode 100644 index 0000000000..3f42726cc1 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l3t3.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ + +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" + +namespace webrtc { + +// https://aomediacodec.github.io/av1-rtp-spec/#a63-l3t3-full-svc +class ScalabilityStructureL3T3 : public ScalabilityStructureFullSvc { + public: + ScalabilityStructureL3T3() : ScalabilityStructureFullSvc(3, 3) {} + ~ScalabilityStructureL3T3() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ diff --git a/modules/video_coding/svc/scalability_structure_l3t3_unittest.cc b/modules/video_coding/svc/scalability_structure_l3t3_unittest.cc new file mode 100644 index 0000000000..1a3dc8b60d --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_l3t3_unittest.cc @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_l3t3.h" + +#include "modules/video_coding/svc/scalability_structure_test_helpers.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::IsEmpty; +using ::testing::SizeIs; + +TEST(ScalabilityStructureL3T3Test, SkipS1T1FrameKeepsStructureValid) { + ScalabilityStructureL3T3 structure; + ScalabilityStructureWrapper wrapper(structure); + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/3, /*s1=*/3)); + auto frames = wrapper.GenerateFrames(/*num_temporal_units=*/1); + EXPECT_THAT(frames, SizeIs(2)); + EXPECT_EQ(frames[0].temporal_id, 0); + + frames = wrapper.GenerateFrames(/*num_temporal_units=*/1); + EXPECT_THAT(frames, SizeIs(2)); + EXPECT_EQ(frames[0].temporal_id, 2); + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/3, /*s1=*/0)); + frames = wrapper.GenerateFrames(/*num_temporal_units=*/1); + EXPECT_THAT(frames, SizeIs(1)); + EXPECT_EQ(frames[0].temporal_id, 1); + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/3, /*s1=*/3)); + // Rely on checks inside GenerateFrames frame references are valid. + frames = wrapper.GenerateFrames(/*num_temporal_units=*/1); + EXPECT_THAT(frames, SizeIs(2)); + EXPECT_EQ(frames[0].temporal_id, 2); +} + +TEST(ScalabilityStructureL3T3Test, SwitchSpatialLayerBeforeT1Frame) { + ScalabilityStructureL3T3 structure; + ScalabilityStructureWrapper wrapper(structure); + + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/2, /*s1=*/0)); + EXPECT_THAT(wrapper.GenerateFrames(1), SizeIs(1)); + structure.OnRatesUpdated(EnableTemporalLayers(/*s0=*/0, /*s1=*/2)); + auto frames = wrapper.GenerateFrames(1); + ASSERT_THAT(frames, SizeIs(1)); + EXPECT_THAT(frames[0].frame_diffs, IsEmpty()); + EXPECT_EQ(frames[0].temporal_id, 0); +} + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_s2t1.cc b/modules/video_coding/svc/scalability_structure_s2t1.cc new file mode 100644 index 0000000000..618deb4b37 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_s2t1.cc @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_s2t1.h" + +#include +#include + +#include "absl/base/macros.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +constexpr int ScalabilityStructureS2T1::kNumSpatialLayers; + +ScalabilityStructureS2T1::~ScalabilityStructureS2T1() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureS2T1::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = kNumSpatialLayers; + result.num_temporal_layers = 1; + result.scaling_factor_num[0] = 1; + result.scaling_factor_den[0] = 2; + return result; +} + +FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = kNumSpatialLayers; + structure.num_chains = kNumSpatialLayers; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0}); + return structure; +} + +std::vector +ScalabilityStructureS2T1::NextFrameConfig(bool restart) { + if (restart) { + can_reference_frame_for_spatial_id_.reset(); + } + std::vector configs; + configs.reserve(kNumSpatialLayers); + for (int sid = 0; sid < kNumSpatialLayers; ++sid) { + if (!active_decode_targets_[sid]) { + can_reference_frame_for_spatial_id_.reset(sid); + continue; + } + configs.emplace_back(); + LayerFrameConfig& config = configs.back().S(sid); + if (can_reference_frame_for_spatial_id_[sid]) { + config.ReferenceAndUpdate(sid); + } else { + config.Keyframe().Update(sid); + can_reference_frame_for_spatial_id_.set(sid); + } + } + + return configs; +} + +GenericFrameInfo ScalabilityStructureS2T1::OnEncodeDone( + const LayerFrameConfig& config) { + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications = { + config.SpatialId() == 0 ? DecodeTargetIndication::kSwitch + : DecodeTargetIndication::kNotPresent, + config.SpatialId() == 1 ? DecodeTargetIndication::kSwitch + : DecodeTargetIndication::kNotPresent, + }; + frame_info.part_of_chain = {config.SpatialId() == 0, config.SpatialId() == 1}; + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureS2T1::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + active_decode_targets_.set(0, bitrates.GetBitrate(/*sid=*/0, /*tid=*/0) > 0); + active_decode_targets_.set(1, bitrates.GetBitrate(/*sid=*/1, /*tid=*/0) > 0); +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_s2t1.h b/modules/video_coding/svc/scalability_structure_s2t1.h new file mode 100644 index 0000000000..0f27e480fa --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_s2t1.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ + +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +// S1 0--0--0- +// ... +// S0 0--0--0- +class ScalabilityStructureS2T1 : public ScalableVideoController { + public: + ~ScalabilityStructureS2T1() override; + + StreamLayersConfig StreamConfig() const override; + FrameDependencyStructure DependencyStructure() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + static constexpr int kNumSpatialLayers = 2; + + std::bitset can_reference_frame_for_spatial_id_; + std::bitset<32> active_decode_targets_ = 0b11; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ diff --git a/modules/video_coding/svc/scalability_structure_test_helpers.cc b/modules/video_coding/svc/scalability_structure_test_helpers.cc new file mode 100644 index 0000000000..2b0393f9cf --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_test_helpers.cc @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_test_helpers.h" + +#include + +#include +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_frame_type.h" +#include "modules/video_coding/chain_diff_calculator.h" +#include "modules/video_coding/frame_dependencies_calculator.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "test/gtest.h" + +namespace webrtc { + +VideoBitrateAllocation EnableTemporalLayers(int s0, int s1, int s2) { + VideoBitrateAllocation bitrate; + for (int tid = 0; tid < s0; ++tid) { + bitrate.SetBitrate(0, tid, 1'000'000); + } + for (int tid = 0; tid < s1; ++tid) { + bitrate.SetBitrate(1, tid, 1'000'000); + } + for (int tid = 0; tid < s2; ++tid) { + bitrate.SetBitrate(2, tid, 1'000'000); + } + return bitrate; +} + +void ScalabilityStructureWrapper::GenerateFrames( + int num_temporal_units, + std::vector& frames) { + for (int i = 0; i < num_temporal_units; ++i) { + for (auto& layer_frame : + structure_controller_.NextFrameConfig(/*restart=*/false)) { + int64_t frame_id = ++frame_id_; + bool is_keyframe = layer_frame.IsKeyframe(); + + GenericFrameInfo frame_info = + structure_controller_.OnEncodeDone(layer_frame); + if (is_keyframe) { + chain_diff_calculator_.Reset(frame_info.part_of_chain); + } + frame_info.chain_diffs = + chain_diff_calculator_.From(frame_id, frame_info.part_of_chain); + for (int64_t base_frame_id : frame_deps_calculator_.FromBuffersUsage( + is_keyframe ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta, + frame_id, frame_info.encoder_buffers)) { + frame_info.frame_diffs.push_back(frame_id - base_frame_id); + } + + frames.push_back(std::move(frame_info)); + } + } +} + +bool ScalabilityStructureWrapper::FrameReferencesAreValid( + rtc::ArrayView frames) const { + bool valid = true; + // VP9 and AV1 supports up to 8 buffers. Expect no more buffers are not used. + std::bitset<8> buffer_contains_frame; + for (size_t i = 0; i < frames.size(); ++i) { + const GenericFrameInfo& frame = frames[i]; + for (const CodecBufferUsage& buffer_usage : frame.encoder_buffers) { + if (buffer_usage.id < 0 || buffer_usage.id >= 8) { + ADD_FAILURE() << "Invalid buffer id " << buffer_usage.id + << " for frame#" << i + << ". Up to 8 buffers are supported."; + valid = false; + continue; + } + if (buffer_usage.referenced && !buffer_contains_frame[buffer_usage.id]) { + ADD_FAILURE() << "buffer " << buffer_usage.id << " for frame#" << i + << " was reference before updated."; + valid = false; + } + if (buffer_usage.updated) { + buffer_contains_frame.set(buffer_usage.id); + } + } + for (int fdiff : frame.frame_diffs) { + if (fdiff <= 0 || static_cast(fdiff) > i) { + ADD_FAILURE() << "Invalid frame diff " << fdiff << " for frame#" << i; + valid = false; + } + } + } + return valid; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalability_structure_test_helpers.h b/modules/video_coding/svc/scalability_structure_test_helpers.h new file mode 100644 index 0000000000..d183be4766 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_test_helpers.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_TEST_HELPERS_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_TEST_HELPERS_H_ + +#include + +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/chain_diff_calculator.h" +#include "modules/video_coding/frame_dependencies_calculator.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +// Creates bitrate allocation with non-zero bitrate for given number of temporal +// layers for each spatial layer. +VideoBitrateAllocation EnableTemporalLayers(int s0, int s1 = 0, int s2 = 0); + +class ScalabilityStructureWrapper { + public: + explicit ScalabilityStructureWrapper(ScalableVideoController& structure) + : structure_controller_(structure) {} + + std::vector GenerateFrames(int num_temporal_units) { + std::vector frames; + GenerateFrames(num_temporal_units, frames); + return frames; + } + void GenerateFrames(int num_temporal_units, + std::vector& frames); + + // Returns false and ADD_FAILUREs for frames with invalid references. + // In particular validates no frame frame reference to frame before frames[0]. + // In error messages frames are indexed starting with 0. + bool FrameReferencesAreValid( + rtc::ArrayView frames) const; + + private: + ScalableVideoController& structure_controller_; + FrameDependenciesCalculator frame_deps_calculator_; + ChainDiffCalculator chain_diff_calculator_; + int64_t frame_id_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_TEST_HELPERS_H_ diff --git a/modules/video_coding/svc/scalability_structure_unittest.cc b/modules/video_coding/svc/scalability_structure_unittest.cc new file mode 100644 index 0000000000..d6766f0d50 --- /dev/null +++ b/modules/video_coding/svc/scalability_structure_unittest.cc @@ -0,0 +1,311 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalability_structure_test_helpers.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::AllOf; +using ::testing::Contains; +using ::testing::Each; +using ::testing::Field; +using ::testing::Ge; +using ::testing::IsEmpty; +using ::testing::Le; +using ::testing::Lt; +using ::testing::Not; +using ::testing::SizeIs; +using ::testing::TestWithParam; +using ::testing::Values; + +struct SvcTestParam { + friend std::ostream& operator<<(std::ostream& os, const SvcTestParam& param) { + return os << param.name; + } + + std::string name; + int num_temporal_units; +}; + +class ScalabilityStructureTest : public TestWithParam {}; + +TEST_P(ScalabilityStructureTest, + NumberOfDecodeTargetsAndChainsAreInRangeAndConsistent) { + FrameDependencyStructure structure = + CreateScalabilityStructure(GetParam().name)->DependencyStructure(); + EXPECT_GT(structure.num_decode_targets, 0); + EXPECT_LE(structure.num_decode_targets, + DependencyDescriptor::kMaxDecodeTargets); + EXPECT_GE(structure.num_chains, 0); + EXPECT_LE(structure.num_chains, structure.num_decode_targets); + if (structure.num_chains == 0) { + EXPECT_THAT(structure.decode_target_protected_by_chain, IsEmpty()); + } else { + EXPECT_THAT(structure.decode_target_protected_by_chain, + AllOf(SizeIs(structure.num_decode_targets), Each(Ge(0)), + Each(Lt(structure.num_chains)))); + } + EXPECT_THAT(structure.templates, + SizeIs(Lt(size_t{DependencyDescriptor::kMaxTemplates}))); +} + +TEST_P(ScalabilityStructureTest, TemplatesAreSortedByLayerId) { + FrameDependencyStructure structure = + CreateScalabilityStructure(GetParam().name)->DependencyStructure(); + ASSERT_THAT(structure.templates, Not(IsEmpty())); + const auto& first_templates = structure.templates.front(); + EXPECT_EQ(first_templates.spatial_id, 0); + EXPECT_EQ(first_templates.temporal_id, 0); + for (size_t i = 1; i < structure.templates.size(); ++i) { + const auto& prev_template = structure.templates[i - 1]; + const auto& next_template = structure.templates[i]; + if (next_template.spatial_id == prev_template.spatial_id && + next_template.temporal_id == prev_template.temporal_id) { + // Same layer, next_layer_idc == 0 + } else if (next_template.spatial_id == prev_template.spatial_id && + next_template.temporal_id == prev_template.temporal_id + 1) { + // Next temporal layer, next_layer_idc == 1 + } else if (next_template.spatial_id == prev_template.spatial_id + 1 && + next_template.temporal_id == 0) { + // Next spatial layer, next_layer_idc == 2 + } else { + // everything else is invalid. + ADD_FAILURE() << "Invalid templates order. Template #" << i + << " with layer (" << next_template.spatial_id << "," + << next_template.temporal_id + << ") follows template with layer (" + << prev_template.spatial_id << "," + << prev_template.temporal_id << ")."; + } + } +} + +TEST_P(ScalabilityStructureTest, TemplatesMatchNumberOfDecodeTargetsAndChains) { + FrameDependencyStructure structure = + CreateScalabilityStructure(GetParam().name)->DependencyStructure(); + EXPECT_THAT( + structure.templates, + Each(AllOf(Field(&FrameDependencyTemplate::decode_target_indications, + SizeIs(structure.num_decode_targets)), + Field(&FrameDependencyTemplate::chain_diffs, + SizeIs(structure.num_chains))))); +} + +TEST_P(ScalabilityStructureTest, FrameInfoMatchesFrameDependencyStructure) { + std::unique_ptr svc_controller = + CreateScalabilityStructure(GetParam().name); + FrameDependencyStructure structure = svc_controller->DependencyStructure(); + std::vector frame_infos = + ScalabilityStructureWrapper(*svc_controller) + .GenerateFrames(GetParam().num_temporal_units); + for (size_t frame_id = 0; frame_id < frame_infos.size(); ++frame_id) { + const auto& frame = frame_infos[frame_id]; + EXPECT_GE(frame.spatial_id, 0) << " for frame " << frame_id; + EXPECT_GE(frame.temporal_id, 0) << " for frame " << frame_id; + EXPECT_THAT(frame.decode_target_indications, + SizeIs(structure.num_decode_targets)) + << " for frame " << frame_id; + EXPECT_THAT(frame.part_of_chain, SizeIs(structure.num_chains)) + << " for frame " << frame_id; + } +} + +TEST_P(ScalabilityStructureTest, ThereIsAPerfectTemplateForEachFrame) { + std::unique_ptr svc_controller = + CreateScalabilityStructure(GetParam().name); + FrameDependencyStructure structure = svc_controller->DependencyStructure(); + std::vector frame_infos = + ScalabilityStructureWrapper(*svc_controller) + .GenerateFrames(GetParam().num_temporal_units); + for (size_t frame_id = 0; frame_id < frame_infos.size(); ++frame_id) { + EXPECT_THAT(structure.templates, Contains(frame_infos[frame_id])) + << " for frame " << frame_id; + } +} + +TEST_P(ScalabilityStructureTest, FrameDependsOnSameOrLowerLayer) { + std::unique_ptr svc_controller = + CreateScalabilityStructure(GetParam().name); + std::vector frame_infos = + ScalabilityStructureWrapper(*svc_controller) + .GenerateFrames(GetParam().num_temporal_units); + int64_t num_frames = frame_infos.size(); + + for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) { + const auto& frame = frame_infos[frame_id]; + for (int frame_diff : frame.frame_diffs) { + int64_t base_frame_id = frame_id - frame_diff; + const auto& base_frame = frame_infos[base_frame_id]; + EXPECT_GE(frame.spatial_id, base_frame.spatial_id) + << "Frame " << frame_id << " depends on frame " << base_frame_id; + EXPECT_GE(frame.temporal_id, base_frame.temporal_id) + << "Frame " << frame_id << " depends on frame " << base_frame_id; + } + } +} + +TEST_P(ScalabilityStructureTest, NoFrameDependsOnDiscardableOrNotPresent) { + std::unique_ptr svc_controller = + CreateScalabilityStructure(GetParam().name); + std::vector frame_infos = + ScalabilityStructureWrapper(*svc_controller) + .GenerateFrames(GetParam().num_temporal_units); + int64_t num_frames = frame_infos.size(); + FrameDependencyStructure structure = svc_controller->DependencyStructure(); + + for (int dt = 0; dt < structure.num_decode_targets; ++dt) { + for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) { + const auto& frame = frame_infos[frame_id]; + if (frame.decode_target_indications[dt] == + DecodeTargetIndication::kNotPresent) { + continue; + } + for (int frame_diff : frame.frame_diffs) { + int64_t base_frame_id = frame_id - frame_diff; + const auto& base_frame = frame_infos[base_frame_id]; + EXPECT_NE(base_frame.decode_target_indications[dt], + DecodeTargetIndication::kNotPresent) + << "Frame " << frame_id << " depends on frame " << base_frame_id + << " that is not part of decode target#" << dt; + EXPECT_NE(base_frame.decode_target_indications[dt], + DecodeTargetIndication::kDiscardable) + << "Frame " << frame_id << " depends on frame " << base_frame_id + << " that is discardable for decode target#" << dt; + } + } + } +} + +TEST_P(ScalabilityStructureTest, NoFrameDependsThroughSwitchIndication) { + std::unique_ptr svc_controller = + CreateScalabilityStructure(GetParam().name); + FrameDependencyStructure structure = svc_controller->DependencyStructure(); + std::vector frame_infos = + ScalabilityStructureWrapper(*svc_controller) + .GenerateFrames(GetParam().num_temporal_units); + int64_t num_frames = frame_infos.size(); + std::vector> full_deps(num_frames); + + // For each frame calculate set of all frames it depends on, both directly and + // indirectly. + for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) { + std::set all_base_frames; + for (int frame_diff : frame_infos[frame_id].frame_diffs) { + int64_t base_frame_id = frame_id - frame_diff; + all_base_frames.insert(base_frame_id); + const auto& indirect = full_deps[base_frame_id]; + all_base_frames.insert(indirect.begin(), indirect.end()); + } + full_deps[frame_id] = std::move(all_base_frames); + } + + // Now check the switch indication: frames after the switch indication mustn't + // depend on any addition frames before the switch indications. + for (int dt = 0; dt < structure.num_decode_targets; ++dt) { + for (int64_t switch_frame_id = 0; switch_frame_id < num_frames; + ++switch_frame_id) { + if (frame_infos[switch_frame_id].decode_target_indications[dt] != + DecodeTargetIndication::kSwitch) { + continue; + } + for (int64_t later_frame_id = switch_frame_id + 1; + later_frame_id < num_frames; ++later_frame_id) { + if (frame_infos[later_frame_id].decode_target_indications[dt] == + DecodeTargetIndication::kNotPresent) { + continue; + } + for (int frame_diff : frame_infos[later_frame_id].frame_diffs) { + int64_t early_frame_id = later_frame_id - frame_diff; + if (early_frame_id < switch_frame_id) { + EXPECT_THAT(full_deps[switch_frame_id], Contains(early_frame_id)) + << "For decode target #" << dt << " frame " << later_frame_id + << " depends on the frame " << early_frame_id + << " that switch indication frame " << switch_frame_id + << " doesn't directly on indirectly depend on."; + } + } + } + } + } +} + +TEST_P(ScalabilityStructureTest, ProduceNoFrameForDisabledLayers) { + std::unique_ptr svc_controller = + CreateScalabilityStructure(GetParam().name); + ScalableVideoController::StreamLayersConfig structure = + svc_controller->StreamConfig(); + + VideoBitrateAllocation all_bitrates; + for (int sid = 0; sid < structure.num_spatial_layers; ++sid) { + for (int tid = 0; tid < structure.num_temporal_layers; ++tid) { + all_bitrates.SetBitrate(sid, tid, 100'000); + } + } + + svc_controller->OnRatesUpdated(all_bitrates); + ScalabilityStructureWrapper wrapper(*svc_controller); + std::vector frames = + wrapper.GenerateFrames(GetParam().num_temporal_units); + + for (int sid = 0; sid < structure.num_spatial_layers; ++sid) { + for (int tid = 0; tid < structure.num_temporal_layers; ++tid) { + // When all layers were enabled, expect there was a frame for each layer. + EXPECT_THAT(frames, + Contains(AllOf(Field(&GenericFrameInfo::spatial_id, sid), + Field(&GenericFrameInfo::temporal_id, tid)))) + << "For layer (" << sid << "," << tid << ")"; + // Restore bitrates for all layers before disabling single layer. + VideoBitrateAllocation bitrates = all_bitrates; + bitrates.SetBitrate(sid, tid, 0); + svc_controller->OnRatesUpdated(bitrates); + // With layer (sid, tid) disabled, expect no frames are produced for it. + EXPECT_THAT( + wrapper.GenerateFrames(GetParam().num_temporal_units), + Not(Contains(AllOf(Field(&GenericFrameInfo::spatial_id, sid), + Field(&GenericFrameInfo::temporal_id, tid))))) + << "For layer (" << sid << "," << tid << ")"; + } + } +} + +INSTANTIATE_TEST_SUITE_P( + Svc, + ScalabilityStructureTest, + Values(SvcTestParam{"L1T2", /*num_temporal_units=*/4}, + SvcTestParam{"L1T3", /*num_temporal_units=*/8}, + SvcTestParam{"L2T1", /*num_temporal_units=*/3}, + SvcTestParam{"L2T1_KEY", /*num_temporal_units=*/3}, + SvcTestParam{"L3T1", /*num_temporal_units=*/3}, + SvcTestParam{"L3T3", /*num_temporal_units=*/8}, + SvcTestParam{"S2T1", /*num_temporal_units=*/3}, + SvcTestParam{"L2T2", /*num_temporal_units=*/4}, + SvcTestParam{"L2T2_KEY", /*num_temporal_units=*/4}, + SvcTestParam{"L2T2_KEY_SHIFT", /*num_temporal_units=*/4}, + SvcTestParam{"L3T3_KEY", /*num_temporal_units=*/8}), + [](const testing::TestParamInfo& info) { + return info.param.name; + }); + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/svc/scalable_video_controller.h b/modules/video_coding/svc/scalable_video_controller.h new file mode 100644 index 0000000000..d2d8486863 --- /dev/null +++ b/modules/video_coding/svc/scalable_video_controller.h @@ -0,0 +1,137 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" + +namespace webrtc { + +// Controls how video should be encoded to be scalable. Outputs results as +// buffer usage configuration for encoder and enough details to communicate the +// scalability structure via dependency descriptor rtp header extension. +class ScalableVideoController { + public: + struct StreamLayersConfig { + int num_spatial_layers = 1; + int num_temporal_layers = 1; + // Spatial layers scaling. Frames with spatial_id = i expected to be encoded + // with original_resolution * scaling_factor_num[i] / scaling_factor_den[i]. + int scaling_factor_num[DependencyDescriptor::kMaxSpatialIds] = {1, 1, 1, 1}; + int scaling_factor_den[DependencyDescriptor::kMaxSpatialIds] = {1, 1, 1, 1}; + }; + class LayerFrameConfig { + public: + // Builders/setters. + LayerFrameConfig& Id(int value); + LayerFrameConfig& Keyframe(); + LayerFrameConfig& S(int value); + LayerFrameConfig& T(int value); + LayerFrameConfig& Reference(int buffer_id); + LayerFrameConfig& Update(int buffer_id); + LayerFrameConfig& ReferenceAndUpdate(int buffer_id); + + // Getters. + int Id() const { return id_; } + bool IsKeyframe() const { return is_keyframe_; } + int SpatialId() const { return spatial_id_; } + int TemporalId() const { return temporal_id_; } + const absl::InlinedVector& Buffers() + const { + return buffers_; + } + + private: + // Id to match configuration returned by NextFrameConfig with + // (possibly modified) configuration passed back via OnEncoderDone. + // The meaning of the id is an implementation detail of + // the ScalableVideoController. + int id_ = 0; + + // Indication frame should be encoded as a key frame. In particular when + // `is_keyframe=true` property `CodecBufferUsage::referenced` should be + // ignored and treated as false. + bool is_keyframe_ = false; + + int spatial_id_ = 0; + int temporal_id_ = 0; + // Describes how encoder which buffers encoder allowed to reference and + // which buffers encoder should update. + absl::InlinedVector buffers_; + }; + + virtual ~ScalableVideoController() = default; + + // Returns video structure description for encoder to configure itself. + virtual StreamLayersConfig StreamConfig() const = 0; + + // Returns video structure description in format compatible with + // dependency descriptor rtp header extension. + virtual FrameDependencyStructure DependencyStructure() const = 0; + + // Notifies Controller with updated bitrates per layer. In particular notifies + // when certain layers should be disabled. + // Controller shouldn't produce LayerFrameConfig for disabled layers. + virtual void OnRatesUpdated(const VideoBitrateAllocation& bitrates) = 0; + + // When `restart` is true, first `LayerFrameConfig` should have `is_keyframe` + // set to true. + // Returned vector shouldn't be empty. + virtual std::vector NextFrameConfig(bool restart) = 0; + + // Returns configuration to pass to EncoderCallback. + virtual GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) = 0; +}; + +// Below are implementation details. +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::Id(int value) { + id_ = value; + return *this; +} +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::Keyframe() { + is_keyframe_ = true; + return *this; +} +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::S(int value) { + spatial_id_ = value; + return *this; +} +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::T(int value) { + temporal_id_ = value; + return *this; +} +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::Reference(int buffer_id) { + buffers_.emplace_back(buffer_id, /*referenced=*/true, /*updated=*/false); + return *this; +} +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::Update(int buffer_id) { + buffers_.emplace_back(buffer_id, /*referenced=*/false, /*updated=*/true); + return *this; +} +inline ScalableVideoController::LayerFrameConfig& +ScalableVideoController::LayerFrameConfig::ReferenceAndUpdate(int buffer_id) { + buffers_.emplace_back(buffer_id, /*referenced=*/true, /*updated=*/true); + return *this; +} + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_H_ diff --git a/modules/video_coding/svc/scalable_video_controller_no_layering.cc b/modules/video_coding/svc/scalable_video_controller_no_layering.cc new file mode 100644 index 0000000000..6d8e6e8fc6 --- /dev/null +++ b/modules/video_coding/svc/scalable_video_controller_no_layering.cc @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" + +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +ScalableVideoControllerNoLayering::~ScalableVideoControllerNoLayering() = + default; + +ScalableVideoController::StreamLayersConfig +ScalableVideoControllerNoLayering::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = 1; + result.num_temporal_layers = 1; + return result; +} + +FrameDependencyStructure +ScalableVideoControllerNoLayering::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 1; + FrameDependencyTemplate a_template; + a_template.decode_target_indications = {DecodeTargetIndication::kSwitch}; + structure.templates.push_back(a_template); + return structure; +} + +std::vector +ScalableVideoControllerNoLayering::NextFrameConfig(bool restart) { + std::vector result(1); + if (restart || start_) { + result[0].Id(0).Keyframe().Update(0); + } else { + result[0].Id(0).ReferenceAndUpdate(0); + } + start_ = false; + return result; +} + +GenericFrameInfo ScalableVideoControllerNoLayering::OnEncodeDone( + const LayerFrameConfig& config) { + RTC_DCHECK_EQ(config.Id(), 0); + GenericFrameInfo frame_info; + frame_info.encoder_buffers = config.Buffers(); + if (config.IsKeyframe()) { + for (auto& buffer : frame_info.encoder_buffers) { + buffer.referenced = false; + } + } + frame_info.decode_target_indications = {DecodeTargetIndication::kSwitch}; + return frame_info; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/scalable_video_controller_no_layering.h b/modules/video_coding/svc/scalable_video_controller_no_layering.h new file mode 100644 index 0000000000..e253ffe841 --- /dev/null +++ b/modules/video_coding/svc/scalable_video_controller_no_layering.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ + +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +class ScalableVideoControllerNoLayering : public ScalableVideoController { + public: + ~ScalableVideoControllerNoLayering() override; + + StreamLayersConfig StreamConfig() const override; + FrameDependencyStructure DependencyStructure() const override; + + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override {} + + private: + bool start_ = true; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_ diff --git a/modules/video_coding/codecs/vp9/svc_rate_allocator.cc b/modules/video_coding/svc/svc_rate_allocator.cc similarity index 74% rename from modules/video_coding/codecs/vp9/svc_rate_allocator.cc rename to modules/video_coding/svc/svc_rate_allocator.cc index 7d5c724e30..a51bdb05dd 100644 --- a/modules/video_coding/codecs/vp9/svc_rate_allocator.cc +++ b/modules/video_coding/svc/svc_rate_allocator.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/svc/svc_rate_allocator.h" #include #include @@ -17,40 +17,38 @@ #include #include "absl/container/inlined_vector.h" +#include "modules/video_coding/svc/create_scalability_structure.h" #include "rtc_base/checks.h" namespace webrtc { namespace { -const float kSpatialLayeringRateScalingFactor = 0.55f; -const float kTemporalLayeringRateScalingFactor = 0.55f; +constexpr float kSpatialLayeringRateScalingFactor = 0.55f; +constexpr float kTemporalLayeringRateScalingFactor = 0.55f; -// Returns numberOfSpatialLayers if no layers are active. -size_t GetFirstActiveLayer(const VideoCodec& codec) { - RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - RTC_DCHECK_GT(codec.VP9().numberOfSpatialLayers, 0u); - size_t layer = 0; - for (; layer < codec.VP9().numberOfSpatialLayers; ++layer) { - if (codec.spatialLayers[layer].active) { +struct ActiveSpatialLayers { + size_t first = 0; + size_t num = 0; +}; + +ActiveSpatialLayers GetActiveSpatialLayers(const VideoCodec& codec, + size_t num_spatial_layers) { + ActiveSpatialLayers active; + for (active.first = 0; active.first < num_spatial_layers; ++active.first) { + if (codec.spatialLayers[active.first].active) { break; } } - return layer; -} -static size_t GetNumActiveSpatialLayers(const VideoCodec& codec) { - RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - RTC_DCHECK_GT(codec.VP9().numberOfSpatialLayers, 0u); - - const size_t first_active_layer = GetFirstActiveLayer(codec); - size_t last_active_layer = first_active_layer; - for (; last_active_layer < codec.VP9().numberOfSpatialLayers; - ++last_active_layer) { + size_t last_active_layer = active.first; + for (; last_active_layer < num_spatial_layers; ++last_active_layer) { if (!codec.spatialLayers[last_active_layer].active) { break; } } - return last_active_layer - first_active_layer; + active.num = last_active_layer - active.first; + + return active; } std::vector AdjustAndVerify( @@ -62,9 +60,9 @@ std::vector AdjustAndVerify( // max bitrate constraint, try to pass it forward to the next one. DataRate excess_rate = DataRate::Zero(); for (size_t sl_idx = 0; sl_idx < spatial_layer_rates.size(); ++sl_idx) { - DataRate min_rate = DataRate::kbps( + DataRate min_rate = DataRate::KilobitsPerSec( codec.spatialLayers[first_active_layer + sl_idx].minBitrate); - DataRate max_rate = DataRate::kbps( + DataRate max_rate = DataRate::KilobitsPerSec( codec.spatialLayers[first_active_layer + sl_idx].maxBitrate); DataRate layer_rate = spatial_layer_rates[sl_idx] + excess_rate; @@ -125,7 +123,7 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, size_t first_active_layer, size_t num_active_layers) { if (num_active_layers == 1) { - return DataRate::kbps(codec.spatialLayers[0].minBitrate); + return DataRate::KilobitsPerSec(codec.spatialLayers[0].minBitrate); } if (codec.mode == VideoCodecMode::kRealtimeVideo) { @@ -133,19 +131,20 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, DataRate upper_bound = DataRate::Zero(); if (num_active_layers > 1) { for (size_t i = 0; i < num_active_layers - 1; ++i) { - lower_bound += DataRate::kbps( + lower_bound += DataRate::KilobitsPerSec( codec.spatialLayers[first_active_layer + i].minBitrate); - upper_bound += DataRate::kbps( + upper_bound += DataRate::KilobitsPerSec( codec.spatialLayers[first_active_layer + i].maxBitrate); } } - upper_bound += - DataRate::kbps(codec.spatialLayers[num_active_layers - 1].minBitrate); + upper_bound += DataRate::KilobitsPerSec( + codec.spatialLayers[first_active_layer + num_active_layers - 1] + .minBitrate); // Do a binary search until upper and lower bound is the highest bitrate for // |num_active_layers| - 1 layers and lowest bitrate for |num_active_layers| // layers respectively. - while (upper_bound - lower_bound > DataRate::bps(1)) { + while (upper_bound - lower_bound > DataRate::BitsPerSec(1)) { DataRate try_rate = (lower_bound + upper_bound) / 2; if (AdjustAndVerify(codec, first_active_layer, SplitBitrate(num_active_layers, try_rate, @@ -160,10 +159,10 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, } else { DataRate toggling_rate = DataRate::Zero(); for (size_t i = 0; i < num_active_layers - 1; ++i) { - toggling_rate += DataRate::kbps( + toggling_rate += DataRate::KilobitsPerSec( codec.spatialLayers[first_active_layer + i].targetBitrate); } - toggling_rate += DataRate::kbps( + toggling_rate += DataRate::KilobitsPerSec( codec.spatialLayers[first_active_layer + num_active_layers - 1] .minBitrate); return toggling_rate; @@ -172,16 +171,39 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, } // namespace +SvcRateAllocator::NumLayers SvcRateAllocator::GetNumLayers( + const VideoCodec& codec) { + NumLayers layers; + if (!codec.ScalabilityMode().empty()) { + if (auto structure = CreateScalabilityStructure(codec.ScalabilityMode())) { + ScalableVideoController::StreamLayersConfig config = + structure->StreamConfig(); + layers.spatial = config.num_spatial_layers; + layers.temporal = config.num_temporal_layers; + return layers; + } + } + if (codec.codecType == kVideoCodecVP9) { + layers.spatial = codec.VP9().numberOfSpatialLayers; + layers.temporal = codec.VP9().numberOfTemporalLayers; + return layers; + } + layers.spatial = 1; + layers.temporal = 1; + return layers; +} + SvcRateAllocator::SvcRateAllocator(const VideoCodec& codec) : codec_(codec), + num_layers_(GetNumLayers(codec)), experiment_settings_(StableTargetRateExperiment::ParseFromFieldTrials()), cumulative_layer_start_bitrates_(GetLayerStartBitrates(codec)), last_active_layer_count_(0) { - RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - RTC_DCHECK_GT(codec.VP9().numberOfSpatialLayers, 0u); - RTC_DCHECK_GT(codec.VP9().numberOfTemporalLayers, 0u); - for (size_t layer_idx = 0; layer_idx < codec.VP9().numberOfSpatialLayers; - ++layer_idx) { + RTC_DCHECK_GT(num_layers_.spatial, 0); + RTC_DCHECK_LE(num_layers_.spatial, kMaxSpatialLayers); + RTC_DCHECK_GT(num_layers_.temporal, 0); + RTC_DCHECK_LE(num_layers_.temporal, 3); + for (size_t layer_idx = 0; layer_idx < num_layers_.spatial; ++layer_idx) { // Verify min <= target <= max. if (codec.spatialLayers[layer_idx].active) { RTC_DCHECK_GT(codec.spatialLayers[layer_idx].maxBitrate, 0); @@ -199,20 +221,21 @@ VideoBitrateAllocation SvcRateAllocator::Allocate( VideoBitrateAllocationParameters parameters) { DataRate total_bitrate = parameters.total_bitrate; if (codec_.maxBitrate != 0) { - total_bitrate = std::min(total_bitrate, DataRate::kbps(codec_.maxBitrate)); + total_bitrate = + std::min(total_bitrate, DataRate::KilobitsPerSec(codec_.maxBitrate)); } if (codec_.spatialLayers[0].targetBitrate == 0) { - // Delegate rate distribution to VP9 encoder wrapper if bitrate thresholds + // Delegate rate distribution to encoder wrapper if bitrate thresholds // are not set. VideoBitrateAllocation bitrate_allocation; bitrate_allocation.SetBitrate(0, 0, total_bitrate.bps()); return bitrate_allocation; } - const size_t first_active_layer = GetFirstActiveLayer(codec_); - const size_t num_active_layers = GetNumActiveSpatialLayers(codec_); - size_t num_spatial_layers = num_active_layers; + const ActiveSpatialLayers active_layers = + GetActiveSpatialLayers(codec_, num_layers_.spatial); + size_t num_spatial_layers = active_layers.num; if (num_spatial_layers == 0) { return VideoBitrateAllocation(); // All layers are deactivated. @@ -247,13 +270,13 @@ VideoBitrateAllocation SvcRateAllocator::Allocate( VideoBitrateAllocation allocation; if (codec_.mode == VideoCodecMode::kRealtimeVideo) { - allocation = GetAllocationNormalVideo(total_bitrate, first_active_layer, + allocation = GetAllocationNormalVideo(total_bitrate, active_layers.first, num_spatial_layers); } else { - allocation = GetAllocationScreenSharing(total_bitrate, first_active_layer, + allocation = GetAllocationScreenSharing(total_bitrate, active_layers.first, num_spatial_layers); } - allocation.set_bw_limited(num_spatial_layers < num_active_layers); + allocation.set_bw_limited(num_spatial_layers < active_layers.num); return allocation; } @@ -277,25 +300,24 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationNormalVideo( VideoBitrateAllocation bitrate_allocation; - const size_t num_temporal_layers = codec_.VP9().numberOfTemporalLayers; for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { std::vector temporal_layer_rates = - SplitBitrate(num_temporal_layers, spatial_layer_rates[sl_idx], + SplitBitrate(num_layers_.temporal, spatial_layer_rates[sl_idx], kTemporalLayeringRateScalingFactor); // Distribute rate across temporal layers. Allocate more bits to lower // layers since they are used for prediction of higher layers and their // references are far apart. - if (num_temporal_layers == 1) { + if (num_layers_.temporal == 1) { bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, temporal_layer_rates[0].bps()); - } else if (num_temporal_layers == 2) { + } else if (num_layers_.temporal == 2) { bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, temporal_layer_rates[1].bps()); bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 1, temporal_layer_rates[0].bps()); } else { - RTC_CHECK_EQ(num_temporal_layers, 3); + RTC_CHECK_EQ(num_layers_.temporal, 3); // In case of three temporal layers the high layer has two frames and the // middle layer has one frame within GOP (in between two consecutive low // layer frames). Thus high layer requires more bits (comparing pure @@ -324,7 +346,8 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationScreenSharing( if (num_spatial_layers == 0 || total_bitrate < - DataRate::kbps(codec_.spatialLayers[first_active_layer].minBitrate)) { + DataRate::KilobitsPerSec( + codec_.spatialLayers[first_active_layer].minBitrate)) { // Always enable at least one layer. bitrate_allocation.SetBitrate(first_active_layer, 0, total_bitrate.bps()); return bitrate_allocation; @@ -336,9 +359,9 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationScreenSharing( for (sl_idx = first_active_layer; sl_idx < first_active_layer + num_spatial_layers; ++sl_idx) { const DataRate min_rate = - DataRate::kbps(codec_.spatialLayers[sl_idx].minBitrate); + DataRate::KilobitsPerSec(codec_.spatialLayers[sl_idx].minBitrate); const DataRate target_rate = - DataRate::kbps(codec_.spatialLayers[sl_idx].targetBitrate); + DataRate::KilobitsPerSec(codec_.spatialLayers[sl_idx].targetBitrate); if (allocated_rate + min_rate > total_bitrate) { // Use stable rate to determine if layer should be enabled. @@ -352,9 +375,9 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationScreenSharing( if (sl_idx > 0 && total_bitrate - allocated_rate > DataRate::Zero()) { // Add leftover to the last allocated layer. - top_layer_rate = - std::min(top_layer_rate + (total_bitrate - allocated_rate), - DataRate::kbps(codec_.spatialLayers[sl_idx - 1].maxBitrate)); + top_layer_rate = std::min( + top_layer_rate + (total_bitrate - allocated_rate), + DataRate::KilobitsPerSec(codec_.spatialLayers[sl_idx - 1].maxBitrate)); bitrate_allocation.SetBitrate(sl_idx - 1, 0, top_layer_rate.bps()); } @@ -380,17 +403,19 @@ size_t SvcRateAllocator::FindNumEnabledLayers(DataRate target_rate) const { } DataRate SvcRateAllocator::GetMaxBitrate(const VideoCodec& codec) { - const size_t first_active_layer = GetFirstActiveLayer(codec); - const size_t num_spatial_layers = GetNumActiveSpatialLayers(codec); + const NumLayers num_layers = GetNumLayers(codec); + const ActiveSpatialLayers active_layers = + GetActiveSpatialLayers(codec, num_layers.spatial); DataRate max_bitrate = DataRate::Zero(); - for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { - max_bitrate += DataRate::kbps( - codec.spatialLayers[first_active_layer + sl_idx].maxBitrate); + for (size_t sl_idx = 0; sl_idx < active_layers.num; ++sl_idx) { + max_bitrate += DataRate::KilobitsPerSec( + codec.spatialLayers[active_layers.first + sl_idx].maxBitrate); } if (codec.maxBitrate != 0) { - max_bitrate = std::min(max_bitrate, DataRate::kbps(codec.maxBitrate)); + max_bitrate = + std::min(max_bitrate, DataRate::KilobitsPerSec(codec.maxBitrate)); } return max_bitrate; @@ -408,12 +433,13 @@ DataRate SvcRateAllocator::GetPaddingBitrate(const VideoCodec& codec) { absl::InlinedVector SvcRateAllocator::GetLayerStartBitrates(const VideoCodec& codec) { absl::InlinedVector start_bitrates; - const size_t first_active_layer = GetFirstActiveLayer(codec); - const size_t num_layers = GetNumActiveSpatialLayers(codec); + const NumLayers num_layers = GetNumLayers(codec); + const ActiveSpatialLayers active_layers = + GetActiveSpatialLayers(codec, num_layers.spatial); DataRate last_rate = DataRate::Zero(); - for (size_t i = 1; i <= num_layers; ++i) { + for (size_t i = 1; i <= active_layers.num; ++i) { DataRate layer_toggling_rate = - FindLayerTogglingThreshold(codec, first_active_layer, i); + FindLayerTogglingThreshold(codec, active_layers.first, i); start_bitrates.push_back(layer_toggling_rate); RTC_DCHECK_LE(last_rate, layer_toggling_rate); last_rate = layer_toggling_rate; diff --git a/modules/video_coding/svc/svc_rate_allocator.h b/modules/video_coding/svc/svc_rate_allocator.h new file mode 100644 index 0000000000..bd75fca284 --- /dev/null +++ b/modules/video_coding/svc/svc_rate_allocator.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ +#define MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ + +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/video_codec.h" +#include "rtc_base/experiments/stable_target_rate_experiment.h" + +namespace webrtc { + +class SvcRateAllocator : public VideoBitrateAllocator { + public: + explicit SvcRateAllocator(const VideoCodec& codec); + + VideoBitrateAllocation Allocate( + VideoBitrateAllocationParameters parameters) override; + + static DataRate GetMaxBitrate(const VideoCodec& codec); + static DataRate GetPaddingBitrate(const VideoCodec& codec); + static absl::InlinedVector GetLayerStartBitrates( + const VideoCodec& codec); + + private: + struct NumLayers { + size_t spatial = 1; + size_t temporal = 1; + }; + + static NumLayers GetNumLayers(const VideoCodec& codec); + VideoBitrateAllocation GetAllocationNormalVideo( + DataRate total_bitrate, + size_t first_active_layer, + size_t num_spatial_layers) const; + + VideoBitrateAllocation GetAllocationScreenSharing( + DataRate total_bitrate, + size_t first_active_layer, + size_t num_spatial_layers) const; + + // Returns the number of layers that are active and have enough bitrate to + // actually be enabled. + size_t FindNumEnabledLayers(DataRate target_rate) const; + + const VideoCodec codec_; + const NumLayers num_layers_; + const StableTargetRateExperiment experiment_settings_; + const absl::InlinedVector + cumulative_layer_start_bitrates_; + size_t last_active_layer_count_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ diff --git a/modules/video_coding/codecs/vp9/svc_rate_allocator_unittest.cc b/modules/video_coding/svc/svc_rate_allocator_unittest.cc similarity index 78% rename from modules/video_coding/codecs/vp9/svc_rate_allocator_unittest.cc rename to modules/video_coding/svc/svc_rate_allocator_unittest.cc index 6a677a2a6f..fd22acd85d 100644 --- a/modules/video_coding/codecs/vp9/svc_rate_allocator_unittest.cc +++ b/modules/video_coding/svc/svc_rate_allocator_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/svc/svc_rate_allocator.h" #include #include @@ -34,8 +34,8 @@ static VideoCodec Configure(size_t width, : VideoCodecMode::kRealtimeVideo; std::vector spatial_layers = - GetSvcConfig(width, height, 30, num_spatial_layers, num_temporal_layers, - is_screen_sharing); + GetSvcConfig(width, height, 30, /*first_active_layer=*/0, + num_spatial_layers, num_temporal_layers, is_screen_sharing); RTC_CHECK_LE(spatial_layers.size(), kMaxSpatialLayers); codec.VP9()->numberOfSpatialLayers = @@ -259,8 +259,8 @@ TEST(SvcRateAllocatorTest, FindLayerTogglingThreshold) { // Predetermined constants indicating the min bitrate needed for two and three // layers to be enabled respectively, using the config from Configure() with // 1280x720 resolution and three spatial layers. - const DataRate kTwoLayerMinRate = DataRate::bps(299150); - const DataRate kThreeLayerMinRate = DataRate::bps(891052); + const DataRate kTwoLayerMinRate = DataRate::BitsPerSec(299150); + const DataRate kThreeLayerMinRate = DataRate::BitsPerSec(891052); VideoCodec codec = Configure(1280, 720, 3, 1, false); absl::InlinedVector layer_start_bitrates = @@ -270,6 +270,97 @@ TEST(SvcRateAllocatorTest, FindLayerTogglingThreshold) { EXPECT_EQ(layer_start_bitrates[2], kThreeLayerMinRate); } +TEST(SvcRateAllocatorTest, SupportsAv1) { + VideoCodec codec; + codec.width = 640; + codec.height = 360; + codec.codecType = kVideoCodecAV1; + codec.SetScalabilityMode("L3T3"); + codec.spatialLayers[0].active = true; + codec.spatialLayers[0].minBitrate = 30; + codec.spatialLayers[0].targetBitrate = 51; + codec.spatialLayers[0].maxBitrate = 73; + codec.spatialLayers[1].active = true; + codec.spatialLayers[1].minBitrate = 49; + codec.spatialLayers[1].targetBitrate = 64; + codec.spatialLayers[1].maxBitrate = 97; + codec.spatialLayers[2].active = true; + codec.spatialLayers[2].minBitrate = 193; + codec.spatialLayers[2].targetBitrate = 305; + codec.spatialLayers[2].maxBitrate = 418; + + SvcRateAllocator allocator(codec); + + VideoBitrateAllocation allocation = + allocator.Allocate(VideoBitrateAllocationParameters(1'000'000, 30)); + + EXPECT_GT(allocation.GetSpatialLayerSum(0), 0u); + EXPECT_GT(allocation.GetSpatialLayerSum(1), 0u); + EXPECT_GT(allocation.GetSpatialLayerSum(2), 0u); +} + +TEST(SvcRateAllocatorTest, SupportsAv1WithSkippedLayer) { + VideoCodec codec; + codec.width = 640; + codec.height = 360; + codec.codecType = kVideoCodecAV1; + codec.SetScalabilityMode("L3T3"); + codec.spatialLayers[0].active = false; + codec.spatialLayers[0].minBitrate = 30; + codec.spatialLayers[0].targetBitrate = 51; + codec.spatialLayers[0].maxBitrate = 73; + codec.spatialLayers[1].active = true; + codec.spatialLayers[1].minBitrate = 49; + codec.spatialLayers[1].targetBitrate = 64; + codec.spatialLayers[1].maxBitrate = 97; + codec.spatialLayers[2].active = true; + codec.spatialLayers[2].minBitrate = 193; + codec.spatialLayers[2].targetBitrate = 305; + codec.spatialLayers[2].maxBitrate = 418; + + SvcRateAllocator allocator(codec); + + VideoBitrateAllocation allocation = + allocator.Allocate(VideoBitrateAllocationParameters(1'000'000, 30)); + + EXPECT_EQ(allocation.GetSpatialLayerSum(0), 0u); + EXPECT_GT(allocation.GetSpatialLayerSum(1), 0u); + EXPECT_GT(allocation.GetSpatialLayerSum(2), 0u); +} + +TEST(SvcRateAllocatorTest, UsesScalabilityModeToGetNumberOfLayers) { + VideoCodec codec; + codec.width = 640; + codec.height = 360; + codec.codecType = kVideoCodecAV1; + codec.SetScalabilityMode("L2T2"); + codec.spatialLayers[0].active = true; + codec.spatialLayers[0].minBitrate = 30; + codec.spatialLayers[0].targetBitrate = 51; + codec.spatialLayers[0].maxBitrate = 73; + codec.spatialLayers[1].active = true; + codec.spatialLayers[1].minBitrate = 49; + codec.spatialLayers[1].targetBitrate = 64; + codec.spatialLayers[1].maxBitrate = 97; + codec.spatialLayers[2].active = true; + codec.spatialLayers[2].minBitrate = 193; + codec.spatialLayers[2].targetBitrate = 305; + codec.spatialLayers[2].maxBitrate = 418; + + SvcRateAllocator allocator(codec); + VideoBitrateAllocation allocation = + allocator.Allocate(VideoBitrateAllocationParameters(1'000'000, 30)); + + // Expect bitrates for 2 temporal layers. + EXPECT_TRUE(allocation.HasBitrate(1, /*temporal_index=*/0)); + EXPECT_TRUE(allocation.HasBitrate(1, /*temporal_index=*/1)); + EXPECT_FALSE(allocation.HasBitrate(1, /*temporal_index=*/2)); + + // expect codec.spatialLayers[2].active is ignored because scability mode uses + // just 2 spatial layers. + EXPECT_EQ(allocation.GetSpatialLayerSum(2), 0u); +} + class SvcRateAllocatorTestParametrizedContentType : public ::testing::Test, public ::testing::WithParamInterface { @@ -283,14 +374,14 @@ class SvcRateAllocatorTestParametrizedContentType TEST_P(SvcRateAllocatorTestParametrizedContentType, MaxBitrate) { VideoCodec codec = Configure(1280, 720, 3, 1, is_screen_sharing_); EXPECT_EQ(SvcRateAllocator::GetMaxBitrate(codec), - DataRate::kbps(codec.spatialLayers[0].maxBitrate + - codec.spatialLayers[1].maxBitrate + - codec.spatialLayers[2].maxBitrate)); + DataRate::KilobitsPerSec(codec.spatialLayers[0].maxBitrate + + codec.spatialLayers[1].maxBitrate + + codec.spatialLayers[2].maxBitrate)); // Deactivate middle layer. This causes deactivation of top layer as well. codec.spatialLayers[1].active = false; EXPECT_EQ(SvcRateAllocator::GetMaxBitrate(codec), - DataRate::kbps(codec.spatialLayers[0].maxBitrate)); + DataRate::KilobitsPerSec(codec.spatialLayers[0].maxBitrate)); } TEST_P(SvcRateAllocatorTestParametrizedContentType, PaddingBitrate) { @@ -349,12 +440,13 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrate) { const DataRate min_rate_three_layers = start_rates[2]; const DataRate max_rate_one_layer = - DataRate::kbps(codec.spatialLayers[0].maxBitrate); + DataRate::KilobitsPerSec(codec.spatialLayers[0].maxBitrate); const DataRate max_rate_two_layers = - is_screen_sharing_ ? DataRate::kbps(codec.spatialLayers[0].targetBitrate + - codec.spatialLayers[1].maxBitrate) - : DataRate::kbps(codec.spatialLayers[0].maxBitrate + - codec.spatialLayers[1].maxBitrate); + is_screen_sharing_ + ? DataRate::KilobitsPerSec(codec.spatialLayers[0].targetBitrate + + codec.spatialLayers[1].maxBitrate) + : DataRate::KilobitsPerSec(codec.spatialLayers[0].maxBitrate + + codec.spatialLayers[1].maxBitrate); SvcRateAllocator allocator = SvcRateAllocator(codec); @@ -368,12 +460,12 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrate) { // Two layers, stable bitrate too low for two layers. allocation = allocator.Allocate(VideoBitrateAllocationParameters( /*total_bitrate=*/min_rate_two_layers, - /*stable_bitrate=*/min_rate_two_layers - DataRate::bps(1), + /*stable_bitrate=*/min_rate_two_layers - DataRate::BitsPerSec(1), /*fps=*/30.0)); EXPECT_FALSE(allocation.IsSpatialLayerUsed(1)); - EXPECT_EQ( - DataRate::bps(allocation.get_sum_bps()), - std::min(min_rate_two_layers - DataRate::bps(1), max_rate_one_layer)); + EXPECT_EQ(DataRate::BitsPerSec(allocation.get_sum_bps()), + std::min(min_rate_two_layers - DataRate::BitsPerSec(1), + max_rate_one_layer)); // Three layers, stable and target equal. allocation = allocator.Allocate(VideoBitrateAllocationParameters( @@ -385,12 +477,12 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrate) { // Three layers, stable bitrate too low for three layers. allocation = allocator.Allocate(VideoBitrateAllocationParameters( /*total_bitrate=*/min_rate_three_layers, - /*stable_bitrate=*/min_rate_three_layers - DataRate::bps(1), + /*stable_bitrate=*/min_rate_three_layers - DataRate::BitsPerSec(1), /*fps=*/30.0)); EXPECT_FALSE(allocation.IsSpatialLayerUsed(2)); - EXPECT_EQ( - DataRate::bps(allocation.get_sum_bps()), - std::min(min_rate_three_layers - DataRate::bps(1), max_rate_two_layers)); + EXPECT_EQ(DataRate::BitsPerSec(allocation.get_sum_bps()), + std::min(min_rate_three_layers - DataRate::BitsPerSec(1), + max_rate_two_layers)); } TEST_P(SvcRateAllocatorTestParametrizedContentType, @@ -444,7 +536,8 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, // Going below min for two layers, second layer should turn off again. allocation = allocator.Allocate(VideoBitrateAllocationParameters( /*total_bitrate=*/max_bitrate, - /*stable_bitrate=*/min_rate_two_layers - DataRate::bps(1), /*fps=*/30.0)); + /*stable_bitrate=*/min_rate_two_layers - DataRate::BitsPerSec(1), + /*fps=*/30.0)); EXPECT_TRUE(allocation.IsSpatialLayerUsed(0)); EXPECT_FALSE(allocation.IsSpatialLayerUsed(1)); EXPECT_FALSE(allocation.IsSpatialLayerUsed(2)); @@ -476,7 +569,7 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, // Going below min for three layers, third layer should turn off again. allocation = allocator.Allocate(VideoBitrateAllocationParameters( /*total_bitrate=*/max_bitrate, - /*stable_bitrate=*/min_rate_three_layers - DataRate::bps(1), + /*stable_bitrate=*/min_rate_three_layers - DataRate::BitsPerSec(1), /*fps=*/30.0)); EXPECT_TRUE(allocation.IsSpatialLayerUsed(0)); EXPECT_TRUE(allocation.IsSpatialLayerUsed(1)); diff --git a/modules/video_coding/timestamp_map.cc b/modules/video_coding/timestamp_map.cc index d93293704d..d79075ff21 100644 --- a/modules/video_coding/timestamp_map.cc +++ b/modules/video_coding/timestamp_map.cc @@ -60,4 +60,13 @@ VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) { bool VCMTimestampMap::IsEmpty() const { return (next_add_idx_ == next_pop_idx_); } + +size_t VCMTimestampMap::Size() const { + // The maximum number of elements in the list is |capacity_| - 1. The list is + // empty if the add and pop indices are equal. + return next_add_idx_ >= next_pop_idx_ + ? next_add_idx_ - next_pop_idx_ + : next_add_idx_ + capacity_ - next_pop_idx_; +} + } // namespace webrtc diff --git a/modules/video_coding/timestamp_map.h b/modules/video_coding/timestamp_map.h index c85666c9aa..cfa12573ec 100644 --- a/modules/video_coding/timestamp_map.h +++ b/modules/video_coding/timestamp_map.h @@ -24,6 +24,7 @@ class VCMTimestampMap { void Add(uint32_t timestamp, VCMFrameInformation* data); VCMFrameInformation* Pop(uint32_t timestamp); + size_t Size() const; private: struct TimestampDataTuple { diff --git a/modules/video_coding/timing.cc b/modules/video_coding/timing.cc index c62c848c09..eddac4f5de 100644 --- a/modules/video_coding/timing.cc +++ b/modules/video_coding/timing.cc @@ -14,16 +14,18 @@ #include +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/time/timestamp_extrapolator.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { -VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing) +VCMTiming::VCMTiming(Clock* clock) : clock_(clock), - master_(false), - ts_extrapolator_(), - codec_timer_(new VCMCodecTimer()), + ts_extrapolator_(std::make_unique( + clock_->TimeInMilliseconds())), + codec_timer_(std::make_unique()), render_delay_ms_(kDefaultRenderDelayMs), min_playout_delay_ms_(0), max_playout_delay_ms_(10000), @@ -31,25 +33,16 @@ VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing) current_delay_ms_(0), prev_frame_timestamp_(0), timing_frame_info_(), - num_decoded_frames_(0) { - if (master_timing == NULL) { - master_ = true; - ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds()); - } else { - ts_extrapolator_ = master_timing->ts_extrapolator_; - } -} - -VCMTiming::~VCMTiming() { - if (master_) { - delete ts_extrapolator_; - } + num_decoded_frames_(0), + low_latency_renderer_enabled_("enabled", true) { + ParseFieldTrial({&low_latency_renderer_enabled_}, + field_trial::FindFullName("WebRTC-LowLatencyRenderer")); } void VCMTiming::Reset() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ts_extrapolator_->Reset(clock_->TimeInMilliseconds()); - codec_timer_.reset(new VCMCodecTimer()); + codec_timer_ = std::make_unique(); render_delay_ms_ = kDefaultRenderDelayMs; min_playout_delay_ms_ = 0; jitter_delay_ms_ = 0; @@ -58,32 +51,32 @@ void VCMTiming::Reset() { } void VCMTiming::set_render_delay(int render_delay_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); render_delay_ms_ = render_delay_ms; } void VCMTiming::set_min_playout_delay(int min_playout_delay_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); min_playout_delay_ms_ = min_playout_delay_ms; } int VCMTiming::min_playout_delay() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return min_playout_delay_ms_; } void VCMTiming::set_max_playout_delay(int max_playout_delay_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); max_playout_delay_ms_ = max_playout_delay_ms; } int VCMTiming::max_playout_delay() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return max_playout_delay_ms_; } void VCMTiming::SetJitterDelay(int jitter_delay_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); if (jitter_delay_ms != jitter_delay_ms_) { jitter_delay_ms_ = jitter_delay_ms; // When in initial state, set current delay to minimum delay. @@ -94,7 +87,7 @@ void VCMTiming::SetJitterDelay(int jitter_delay_ms) { } void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); int target_delay_ms = TargetDelayInternal(); if (current_delay_ms_ == 0) { @@ -135,7 +128,7 @@ void VCMTiming::UpdateCurrentDelay(uint32_t frame_timestamp) { void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms, int64_t actual_decode_time_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); uint32_t target_delay_ms = TargetDelayInternal(); int64_t delayed_ms = actual_decode_time_ms - @@ -158,29 +151,35 @@ void VCMTiming::StopDecodeTimer(uint32_t /*time_stamp*/, } void VCMTiming::StopDecodeTimer(int32_t decode_time_ms, int64_t now_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); codec_timer_->AddTiming(decode_time_ms, now_ms); assert(decode_time_ms >= 0); ++num_decoded_frames_; } void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); ts_extrapolator_->Update(now_ms, time_stamp); } int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return RenderTimeMsInternal(frame_timestamp, now_ms); } int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const { - if (min_playout_delay_ms_ == 0 && max_playout_delay_ms_ == 0) { - // Render as soon as possible. + constexpr int kLowLatencyRendererMaxPlayoutDelayMs = 500; + if (min_playout_delay_ms_ == 0 && + (max_playout_delay_ms_ == 0 || + (low_latency_renderer_enabled_ && + max_playout_delay_ms_ <= kLowLatencyRendererMaxPlayoutDelayMs))) { + // Render as soon as possible or with low-latency renderer algorithm. return 0; } + // Note that TimestampExtrapolator::ExtrapolateLocalTime is not a const + // method; it mutates the object's wraparound state. int64_t estimated_complete_time_ms = ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp); if (estimated_complete_time_ms == -1) { @@ -202,7 +201,7 @@ int VCMTiming::RequiredDecodeTimeMs() const { int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); const int64_t max_wait_time_ms = render_time_ms - now_ms - RequiredDecodeTimeMs() - render_delay_ms_; @@ -211,7 +210,7 @@ int64_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, } int VCMTiming::TargetVideoDelay() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return TargetDelayInternal(); } @@ -226,7 +225,7 @@ bool VCMTiming::GetTimings(int* max_decode_ms, int* jitter_buffer_ms, int* min_playout_delay_ms, int* render_delay_ms) const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); *max_decode_ms = RequiredDecodeTimeMs(); *current_delay_ms = current_delay_ms_; *target_delay_ms = TargetDelayInternal(); @@ -237,13 +236,24 @@ bool VCMTiming::GetTimings(int* max_decode_ms, } void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); timing_frame_info_.emplace(info); } absl::optional VCMTiming::GetTimingFrameInfo() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return timing_frame_info_; } +void VCMTiming::SetMaxCompositionDelayInFrames( + absl::optional max_composition_delay_in_frames) { + MutexLock lock(&mutex_); + max_composition_delay_in_frames_ = max_composition_delay_in_frames; +} + +absl::optional VCMTiming::MaxCompositionDelayInFrames() const { + MutexLock lock(&mutex_); + return max_composition_delay_in_frames_; +} + } // namespace webrtc diff --git a/modules/video_coding/timing.h b/modules/video_coding/timing.h index c9efcb13b0..736b5e9ae4 100644 --- a/modules/video_coding/timing.h +++ b/modules/video_coding/timing.h @@ -16,8 +16,10 @@ #include "absl/types/optional.h" #include "api/video/video_timing.h" #include "modules/video_coding/codec_timer.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" +#include "rtc_base/time/timestamp_extrapolator.h" namespace webrtc { @@ -26,10 +28,8 @@ class TimestampExtrapolator; class VCMTiming { public: - // The primary timing component should be passed - // if this is the dual timing component. - explicit VCMTiming(Clock* clock, VCMTiming* master_timing = NULL); - virtual ~VCMTiming(); + explicit VCMTiming(Clock* clock); + virtual ~VCMTiming() = default; // Resets the timing to the initial state. void Reset(); @@ -100,34 +100,45 @@ class VCMTiming { void SetTimingFrameInfo(const TimingFrameInfo& info); absl::optional GetTimingFrameInfo(); + void SetMaxCompositionDelayInFrames( + absl::optional max_composition_delay_in_frames); + absl::optional MaxCompositionDelayInFrames() const; + enum { kDefaultRenderDelayMs = 10 }; enum { kDelayMaxChangeMsPerS = 100 }; protected: - int RequiredDecodeTimeMs() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + int RequiredDecodeTimeMs() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int64_t RenderTimeMsInternal(uint32_t frame_timestamp, int64_t now_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); - int TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int TargetDelayInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); private: - rtc::CriticalSection crit_sect_; + mutable Mutex mutex_; Clock* const clock_; - bool master_ RTC_GUARDED_BY(crit_sect_); - TimestampExtrapolator* ts_extrapolator_ RTC_GUARDED_BY(crit_sect_); - std::unique_ptr codec_timer_ RTC_GUARDED_BY(crit_sect_); - int render_delay_ms_ RTC_GUARDED_BY(crit_sect_); + const std::unique_ptr ts_extrapolator_ + RTC_PT_GUARDED_BY(mutex_); + std::unique_ptr codec_timer_ RTC_GUARDED_BY(mutex_) + RTC_PT_GUARDED_BY(mutex_); + int render_delay_ms_ RTC_GUARDED_BY(mutex_); // Best-effort playout delay range for frames from capture to render. // The receiver tries to keep the delay between |min_playout_delay_ms_| // and |max_playout_delay_ms_| taking the network jitter into account. // A special case is where min_playout_delay_ms_ = max_playout_delay_ms_ = 0, // in which case the receiver tries to play the frames as they arrive. - int min_playout_delay_ms_ RTC_GUARDED_BY(crit_sect_); - int max_playout_delay_ms_ RTC_GUARDED_BY(crit_sect_); - int jitter_delay_ms_ RTC_GUARDED_BY(crit_sect_); - int current_delay_ms_ RTC_GUARDED_BY(crit_sect_); - uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(crit_sect_); - absl::optional timing_frame_info_ RTC_GUARDED_BY(crit_sect_); - size_t num_decoded_frames_ RTC_GUARDED_BY(crit_sect_); + int min_playout_delay_ms_ RTC_GUARDED_BY(mutex_); + int max_playout_delay_ms_ RTC_GUARDED_BY(mutex_); + int jitter_delay_ms_ RTC_GUARDED_BY(mutex_); + int current_delay_ms_ RTC_GUARDED_BY(mutex_); + uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_); + absl::optional timing_frame_info_ RTC_GUARDED_BY(mutex_); + size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_); + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter enabled + // determines if the low-latency renderer algorithm should be used for the + // case min playout delay=0 and max playout delay>0. + FieldTrialParameter low_latency_renderer_enabled_ + RTC_GUARDED_BY(mutex_); + absl::optional max_composition_delay_in_frames_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/modules/video_coding/utility/default_video_bitrate_allocator.cc b/modules/video_coding/utility/default_video_bitrate_allocator.cc deleted file mode 100644 index bbb15cdd76..0000000000 --- a/modules/video_coding/utility/default_video_bitrate_allocator.cc +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/utility/default_video_bitrate_allocator.h" - -#include -#include - -#include - -namespace webrtc { - -DefaultVideoBitrateAllocator::DefaultVideoBitrateAllocator( - const VideoCodec& codec) - : codec_(codec) {} - -DefaultVideoBitrateAllocator::~DefaultVideoBitrateAllocator() {} - -// TODO(http://crbug.com/webrtc/9671): Do not split bitrate between simulcast -// streams, but allocate everything to the first stream. -VideoBitrateAllocation DefaultVideoBitrateAllocator::Allocate( - VideoBitrateAllocationParameters parameters) { - VideoBitrateAllocation allocation; - if (parameters.total_bitrate.IsZero() || !codec_.active) - return allocation; - - uint32_t allocated_bitrate_bps = parameters.total_bitrate.bps(); - allocated_bitrate_bps = - std::max(allocated_bitrate_bps, codec_.minBitrate * 1000); - if (codec_.maxBitrate > 0) { - allocated_bitrate_bps = - std::min(allocated_bitrate_bps, codec_.maxBitrate * 1000); - } - size_t num_simulcast_streams = - std::max(1, codec_.numberOfSimulcastStreams); - // The bitrate is split between all the streams in proportion of powers of 2 - // e.g. 1:2, 1:2:4, etc. - for (size_t i = 0; i < num_simulcast_streams; i++) { - allocation.SetBitrate( - i, 0, - allocated_bitrate_bps * (1 << i) / ((1 << num_simulcast_streams) - 1)); - } - - return allocation; -} - -} // namespace webrtc diff --git a/modules/video_coding/utility/default_video_bitrate_allocator.h b/modules/video_coding/utility/default_video_bitrate_allocator.h deleted file mode 100644 index c3eb67036a..0000000000 --- a/modules/video_coding/utility/default_video_bitrate_allocator.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_UTILITY_DEFAULT_VIDEO_BITRATE_ALLOCATOR_H_ -#define MODULES_VIDEO_CODING_UTILITY_DEFAULT_VIDEO_BITRATE_ALLOCATOR_H_ - -#include - -#include "api/video/video_bitrate_allocation.h" -#include "api/video/video_bitrate_allocator.h" -#include "api/video_codecs/video_codec.h" - -namespace webrtc { - -class DefaultVideoBitrateAllocator : public VideoBitrateAllocator { - public: - explicit DefaultVideoBitrateAllocator(const VideoCodec& codec); - ~DefaultVideoBitrateAllocator() override; - - VideoBitrateAllocation Allocate( - VideoBitrateAllocationParameters parameters) override; - - private: - const VideoCodec codec_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_UTILITY_DEFAULT_VIDEO_BITRATE_ALLOCATOR_H_ diff --git a/modules/video_coding/utility/default_video_bitrate_allocator_unittest.cc b/modules/video_coding/utility/default_video_bitrate_allocator_unittest.cc deleted file mode 100644 index ed0cb5f3ee..0000000000 --- a/modules/video_coding/utility/default_video_bitrate_allocator_unittest.cc +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/utility/default_video_bitrate_allocator.h" - -#include -#include - -#include "test/gtest.h" - -namespace webrtc { -namespace { -uint32_t kMaxBitrateBps = 1000000; -uint32_t kMinBitrateBps = 50000; -uint32_t kMaxFramerate = 30; -} // namespace - -class DefaultVideoBitrateAllocatorTest : public ::testing::Test { - public: - DefaultVideoBitrateAllocatorTest() {} - virtual ~DefaultVideoBitrateAllocatorTest() {} - - void SetUp() override { - codec_.codecType = kVideoCodecVP8; - codec_.minBitrate = kMinBitrateBps / 1000; - codec_.maxBitrate = kMaxBitrateBps / 1000; - codec_.maxFramerate = kMaxFramerate; - allocator_.reset(new DefaultVideoBitrateAllocator(codec_)); - } - - protected: - VideoCodec codec_; - std::unique_ptr allocator_; -}; - -TEST_F(DefaultVideoBitrateAllocatorTest, ZeroIsOff) { - VideoBitrateAllocation allocation = - allocator_->Allocate(VideoBitrateAllocationParameters(0, kMaxFramerate)); - EXPECT_EQ(0u, allocation.get_sum_bps()); -} - -TEST_F(DefaultVideoBitrateAllocatorTest, Inactive) { - codec_.active = false; - allocator_.reset(new DefaultVideoBitrateAllocator(codec_)); - VideoBitrateAllocation allocation = - allocator_->Allocate(VideoBitrateAllocationParameters(1, kMaxFramerate)); - EXPECT_EQ(0u, allocation.get_sum_bps()); -} - -TEST_F(DefaultVideoBitrateAllocatorTest, CapsToMin) { - VideoBitrateAllocation allocation = - allocator_->Allocate(VideoBitrateAllocationParameters(1, kMaxFramerate)); - EXPECT_EQ(kMinBitrateBps, allocation.get_sum_bps()); - - allocation = allocator_->Allocate( - VideoBitrateAllocationParameters(kMinBitrateBps - 1, kMaxFramerate)); - EXPECT_EQ(kMinBitrateBps, allocation.get_sum_bps()); - - allocation = allocator_->Allocate( - VideoBitrateAllocationParameters(kMinBitrateBps, kMaxFramerate)); - EXPECT_EQ(kMinBitrateBps, allocation.get_sum_bps()); -} - -TEST_F(DefaultVideoBitrateAllocatorTest, CapsToMax) { - VideoBitrateAllocation allocation = allocator_->Allocate( - VideoBitrateAllocationParameters(kMaxBitrateBps, kMaxFramerate)); - EXPECT_EQ(kMaxBitrateBps, allocation.get_sum_bps()); - - allocation = allocator_->Allocate( - VideoBitrateAllocationParameters(kMaxBitrateBps + 1, kMaxFramerate)); - EXPECT_EQ(kMaxBitrateBps, allocation.get_sum_bps()); - - allocation = allocator_->Allocate(VideoBitrateAllocationParameters( - std::numeric_limits::max(), kMaxFramerate)); - EXPECT_EQ(kMaxBitrateBps, allocation.get_sum_bps()); -} - -TEST_F(DefaultVideoBitrateAllocatorTest, GoodInBetween) { - VideoBitrateAllocation allocation = allocator_->Allocate( - VideoBitrateAllocationParameters(kMinBitrateBps + 1, kMaxFramerate)); - EXPECT_EQ(kMinBitrateBps + 1, allocation.get_sum_bps()); - - allocation = allocator_->Allocate( - VideoBitrateAllocationParameters(kMaxBitrateBps - 1, kMaxFramerate)); - EXPECT_EQ(kMaxBitrateBps - 1, allocation.get_sum_bps()); -} -} // namespace webrtc diff --git a/modules/video_coding/utility/ivf_file_reader.cc b/modules/video_coding/utility/ivf_file_reader.cc index 9667bb7cec..e3c249947d 100644 --- a/modules/video_coding/utility/ivf_file_reader.cc +++ b/modules/video_coding/utility/ivf_file_reader.cc @@ -27,6 +27,7 @@ constexpr int kCodecTypeBytesCount = 4; constexpr uint8_t kFileHeaderStart[kCodecTypeBytesCount] = {'D', 'K', 'I', 'F'}; constexpr uint8_t kVp8Header[kCodecTypeBytesCount] = {'V', 'P', '8', '0'}; constexpr uint8_t kVp9Header[kCodecTypeBytesCount] = {'V', 'P', '9', '0'}; +constexpr uint8_t kAv1Header[kCodecTypeBytesCount] = {'A', 'V', '0', '1'}; constexpr uint8_t kH264Header[kCodecTypeBytesCount] = {'H', '2', '6', '4'}; } // namespace @@ -170,7 +171,6 @@ absl::optional IvfFileReader::NextFrame() { if (is_first_frame) { image._frameType = VideoFrameType::kVideoFrameKey; } - image._completeFrame = true; return image; } @@ -191,6 +191,9 @@ absl::optional IvfFileReader::ParseCodecType(uint8_t* buffer, if (memcmp(&buffer[start_pos], kVp9Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecVP9; } + if (memcmp(&buffer[start_pos], kAv1Header, kCodecTypeBytesCount) == 0) { + return VideoCodecType::kVideoCodecAV1; + } if (memcmp(&buffer[start_pos], kH264Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecH264; } diff --git a/modules/video_coding/utility/ivf_file_reader.h b/modules/video_coding/utility/ivf_file_reader.h index eb5a21d55d..5e0634f9fd 100644 --- a/modules/video_coding/utility/ivf_file_reader.h +++ b/modules/video_coding/utility/ivf_file_reader.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "api/video/encoded_image.h" +#include "api/video_codecs/video_codec.h" #include "rtc_base/system/file_wrapper.h" namespace webrtc { diff --git a/modules/video_coding/utility/ivf_file_reader_unittest.cc b/modules/video_coding/utility/ivf_file_reader_unittest.cc index 6ff580511b..58a808840d 100644 --- a/modules/video_coding/utility/ivf_file_reader_unittest.cc +++ b/modules/video_coding/utility/ivf_file_reader_unittest.cc @@ -145,6 +145,16 @@ TEST_F(IvfFileReaderTest, BasicVP9FileMsTimestamp) { ValidateContent(kVideoCodecVP9, true, 1); } +TEST_F(IvfFileReaderTest, BasicAv1FileNtpTimestamp) { + CreateTestFile(kVideoCodecAV1, false, 1); + ValidateContent(kVideoCodecAV1, false, 1); +} + +TEST_F(IvfFileReaderTest, BasicAv1FileMsTimestamp) { + CreateTestFile(kVideoCodecAV1, true, 1); + ValidateContent(kVideoCodecAV1, true, 1); +} + TEST_F(IvfFileReaderTest, BasicH264FileNtpTimestamp) { CreateTestFile(kVideoCodecH264, false, 1); ValidateContent(kVideoCodecH264, false, 1); @@ -165,6 +175,11 @@ TEST_F(IvfFileReaderTest, MultilayerVP9FileNtpTimestamp) { ValidateContent(kVideoCodecVP9, false, 3); } +TEST_F(IvfFileReaderTest, MultilayerAv1FileNtpTimestamp) { + CreateTestFile(kVideoCodecAV1, false, 3); + ValidateContent(kVideoCodecAV1, false, 3); +} + TEST_F(IvfFileReaderTest, MultilayerH264FileNtpTimestamp) { CreateTestFile(kVideoCodecH264, false, 3); ValidateContent(kVideoCodecH264, false, 3); diff --git a/modules/video_coding/utility/ivf_file_writer.cc b/modules/video_coding/utility/ivf_file_writer.cc index 46b8e87ba3..496da894a1 100644 --- a/modules/video_coding/utility/ivf_file_writer.cc +++ b/modules/video_coding/utility/ivf_file_writer.cc @@ -75,6 +75,12 @@ bool IvfFileWriter::WriteHeader() { ivf_header[10] = '9'; ivf_header[11] = '0'; break; + case kVideoCodecAV1: + ivf_header[8] = 'A'; + ivf_header[9] = 'V'; + ivf_header[10] = '0'; + ivf_header[11] = '1'; + break; case kVideoCodecH264: ivf_header[8] = 'H'; ivf_header[9] = '2'; diff --git a/modules/video_coding/utility/ivf_file_writer.h b/modules/video_coding/utility/ivf_file_writer.h index 5de67acdb2..140b9c06ff 100644 --- a/modules/video_coding/utility/ivf_file_writer.h +++ b/modules/video_coding/utility/ivf_file_writer.h @@ -17,6 +17,7 @@ #include #include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/time_utils.h" diff --git a/modules/video_coding/utility/ivf_file_writer_unittest.cc b/modules/video_coding/utility/ivf_file_writer_unittest.cc index 49e0459ba6..8e781a7b22 100644 --- a/modules/video_coding/utility/ivf_file_writer_unittest.cc +++ b/modules/video_coding/utility/ivf_file_writer_unittest.cc @@ -147,6 +147,16 @@ TEST_F(IvfFileWriterTest, WritesBasicVP9FileMsTimestamp) { RunBasicFileStructureTest(kVideoCodecVP9, fourcc, true); } +TEST_F(IvfFileWriterTest, WritesBasicAv1FileNtpTimestamp) { + const uint8_t fourcc[4] = {'A', 'V', '0', '1'}; + RunBasicFileStructureTest(kVideoCodecAV1, fourcc, false); +} + +TEST_F(IvfFileWriterTest, WritesBasicAv1FileMsTimestamp) { + const uint8_t fourcc[4] = {'A', 'V', '0', '1'}; + RunBasicFileStructureTest(kVideoCodecAV1, fourcc, true); +} + TEST_F(IvfFileWriterTest, WritesBasicH264FileNtpTimestamp) { const uint8_t fourcc[4] = {'H', '2', '6', '4'}; RunBasicFileStructureTest(kVideoCodecH264, fourcc, false); diff --git a/modules/video_coding/utility/quality_scaler.cc b/modules/video_coding/utility/quality_scaler.cc index b7aca9a4fb..2859ac2e22 100644 --- a/modules/video_coding/utility/quality_scaler.cc +++ b/modules/video_coding/utility/quality_scaler.cc @@ -13,11 +13,14 @@ #include #include +#include "api/video/video_adaptation_reason.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/quality_scaler_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/weak_ptr.h" // TODO(kthelgason): Some versions of Android have issues with log2. // See https://code.google.com/p/android/issues/detail?id=212634 for details @@ -68,24 +71,138 @@ class QualityScaler::QpSmoother { rtc::ExpFilter smoother_; }; -QualityScaler::QualityScaler(AdaptationObserverInterface* observer, +// The QualityScaler checks for QP periodically by queuing CheckQpTasks. The +// task will either run to completion and trigger a new task being queued, or it +// will be destroyed because the QualityScaler is destroyed. +// +// When high or low QP is reported, the task will be pending until a callback is +// invoked. This lets the QualityScalerQpUsageHandlerInterface react to QP usage +// asynchronously and prevents checking for QP until the stream has potentially +// been reconfigured. +class QualityScaler::CheckQpTask { + public: + // The result of one CheckQpTask may influence the delay of the next + // CheckQpTask. + struct Result { + bool observed_enough_frames = false; + bool qp_usage_reported = false; + }; + + CheckQpTask(QualityScaler* quality_scaler, Result previous_task_result) + : quality_scaler_(quality_scaler), + state_(State::kNotStarted), + previous_task_result_(previous_task_result), + weak_ptr_factory_(this) {} + + void StartDelayedTask() { + RTC_DCHECK_EQ(state_, State::kNotStarted); + state_ = State::kCheckingQp; + TaskQueueBase::Current()->PostDelayedTask( + ToQueuedTask([this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), this] { + if (!this_weak_ptr) { + // The task has been cancelled through destruction. + return; + } + RTC_DCHECK_EQ(state_, State::kCheckingQp); + RTC_DCHECK_RUN_ON(&quality_scaler_->task_checker_); + switch (quality_scaler_->CheckQp()) { + case QualityScaler::CheckQpResult::kInsufficientSamples: { + result_.observed_enough_frames = false; + // After this line, |this| may be deleted. + break; + } + case QualityScaler::CheckQpResult::kNormalQp: { + result_.observed_enough_frames = true; + break; + } + case QualityScaler::CheckQpResult::kHighQp: { + result_.observed_enough_frames = true; + result_.qp_usage_reported = true; + quality_scaler_->fast_rampup_ = false; + quality_scaler_->handler_->OnReportQpUsageHigh(); + quality_scaler_->ClearSamples(); + break; + } + case QualityScaler::CheckQpResult::kLowQp: { + result_.observed_enough_frames = true; + result_.qp_usage_reported = true; + quality_scaler_->handler_->OnReportQpUsageLow(); + quality_scaler_->ClearSamples(); + break; + } + } + state_ = State::kCompleted; + // Starting the next task deletes the pending task. After this line, + // |this| has been deleted. + quality_scaler_->StartNextCheckQpTask(); + }), + GetCheckingQpDelayMs()); + } + + bool HasCompletedTask() const { return state_ == State::kCompleted; } + + Result result() const { + RTC_DCHECK(HasCompletedTask()); + return result_; + } + + private: + enum class State { + kNotStarted, + kCheckingQp, + kCompleted, + }; + + // Determines the sampling period of CheckQpTasks. + int64_t GetCheckingQpDelayMs() const { + RTC_DCHECK_RUN_ON(&quality_scaler_->task_checker_); + if (quality_scaler_->fast_rampup_) { + return quality_scaler_->sampling_period_ms_; + } + if (quality_scaler_->experiment_enabled_ && + !previous_task_result_.observed_enough_frames) { + // Use half the interval while waiting for enough frames. + return quality_scaler_->sampling_period_ms_ / 2; + } + if (quality_scaler_->scale_factor_ && + !previous_task_result_.qp_usage_reported) { + // Last CheckQp did not call AdaptDown/Up, possibly reduce interval. + return quality_scaler_->sampling_period_ms_ * + quality_scaler_->scale_factor_.value(); + } + return quality_scaler_->sampling_period_ms_ * + quality_scaler_->initial_scale_factor_; + } + + QualityScaler* const quality_scaler_; + State state_; + const Result previous_task_result_; + Result result_; + + rtc::WeakPtrFactory weak_ptr_factory_; +}; + +QualityScaler::QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds) - : QualityScaler(observer, thresholds, kMeasureMs) {} + : QualityScaler(handler, thresholds, kMeasureMs) {} // Protected ctor, should not be called directly. -QualityScaler::QualityScaler(AdaptationObserverInterface* observer, +QualityScaler::QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds, - int64_t sampling_period_ms) - : observer_(observer), + int64_t default_sampling_period_ms) + : handler_(handler), thresholds_(thresholds), - sampling_period_ms_(sampling_period_ms), + sampling_period_ms_(QualityScalerSettings::ParseFromFieldTrials() + .SamplingPeriodMs() + .value_or(default_sampling_period_ms)), fast_rampup_(true), // Arbitrarily choose size based on 30 fps for 5 seconds. - average_qp_(5 * 30), + average_qp_(QualityScalerSettings::ParseFromFieldTrials() + .AverageQpWindow() + .value_or(5 * 30)), framedrop_percent_media_opt_(5 * 30), framedrop_percent_all_(5 * 30), experiment_enabled_(QualityScalingExperiment::Enabled()), - observed_enough_frames_(false), min_frames_needed_( QualityScalerSettings::ParseFromFieldTrials().MinFrames().value_or( kMinFramesNeededToScale)), @@ -93,48 +210,33 @@ QualityScaler::QualityScaler(AdaptationObserverInterface* observer, .InitialScaleFactor() .value_or(kSamplePeriodScaleFactor)), scale_factor_( - QualityScalerSettings::ParseFromFieldTrials().ScaleFactor()), - adapt_called_(false), - adapt_failed_(false) { + QualityScalerSettings::ParseFromFieldTrials().ScaleFactor()) { RTC_DCHECK_RUN_ON(&task_checker_); if (experiment_enabled_) { config_ = QualityScalingExperiment::GetConfig(); qp_smoother_high_.reset(new QpSmoother(config_.alpha_high)); qp_smoother_low_.reset(new QpSmoother(config_.alpha_low)); } - RTC_DCHECK(observer_ != nullptr); - check_qp_task_ = RepeatingTaskHandle::DelayedStart( - TaskQueueBase::Current(), TimeDelta::ms(GetSamplingPeriodMs()), [this]() { - CheckQp(); - return TimeDelta::ms(GetSamplingPeriodMs()); - }); + RTC_DCHECK(handler_ != nullptr); + StartNextCheckQpTask(); RTC_LOG(LS_INFO) << "QP thresholds: low: " << thresholds_.low << ", high: " << thresholds_.high; } QualityScaler::~QualityScaler() { RTC_DCHECK_RUN_ON(&task_checker_); - check_qp_task_.Stop(); } -int64_t QualityScaler::GetSamplingPeriodMs() const { +void QualityScaler::StartNextCheckQpTask() { RTC_DCHECK_RUN_ON(&task_checker_); - if (fast_rampup_) { - return sampling_period_ms_; - } - if (experiment_enabled_ && !observed_enough_frames_) { - // Use half the interval while waiting for enough frames. - return sampling_period_ms_ / 2; - } - if (adapt_failed_) { - // Check shortly again. - return sampling_period_ms_ / 8; - } - if (scale_factor_ && !adapt_called_) { - // Last CheckQp did not call AdaptDown/Up, possibly reduce interval. - return sampling_period_ms_ * scale_factor_.value(); + RTC_DCHECK(!pending_qp_task_ || pending_qp_task_->HasCompletedTask()) + << "A previous CheckQpTask has not completed yet!"; + CheckQpTask::Result previous_task_result; + if (pending_qp_task_) { + previous_task_result = pending_qp_task_->result(); } - return sampling_period_ms_ * initial_scale_factor_; + pending_qp_task_ = std::make_unique(this, previous_task_result); + pending_qp_task_->StartDelayedTask(); } void QualityScaler::SetQpThresholds(VideoEncoder::QpThresholds thresholds) { @@ -179,12 +281,10 @@ bool QualityScaler::QpFastFilterLow() const { return (avg_qp_high) ? (avg_qp_high.value() <= thresholds_.low) : false; } -void QualityScaler::CheckQp() { +QualityScaler::CheckQpResult QualityScaler::CheckQp() const { RTC_DCHECK_RUN_ON(&task_checker_); // Should be set through InitEncode -> Should be set by now. RTC_DCHECK_GE(thresholds_.low, 0); - adapt_failed_ = false; - adapt_called_ = false; // If we have not observed at least this many frames we can't make a good // scaling decision. @@ -192,10 +292,8 @@ void QualityScaler::CheckQp() { ? framedrop_percent_all_.Size() : framedrop_percent_media_opt_.Size(); if (frames < min_frames_needed_) { - observed_enough_frames_ = false; - return; + return CheckQpResult::kInsufficientSamples; } - observed_enough_frames_ = true; // Check if we should scale down due to high frame drop. const absl::optional drop_rate = @@ -204,8 +302,7 @@ void QualityScaler::CheckQp() { : framedrop_percent_media_opt_.GetAverageRoundedDown(); if (drop_rate && *drop_rate >= kFramedropPercentThreshold) { RTC_LOG(LS_INFO) << "Reporting high QP, framedrop percent " << *drop_rate; - ReportQpHigh(); - return; + return CheckQpResult::kHighQp; } // Check if we should scale up or down based on QP. @@ -219,39 +316,14 @@ void QualityScaler::CheckQp() { RTC_LOG(LS_INFO) << "Checking average QP " << *avg_qp_high << " (" << *avg_qp_low << ")."; if (*avg_qp_high > thresholds_.high) { - ReportQpHigh(); - return; + return CheckQpResult::kHighQp; } if (*avg_qp_low <= thresholds_.low) { // QP has been low. We want to try a higher resolution. - ReportQpLow(); - return; + return CheckQpResult::kLowQp; } } -} - -void QualityScaler::ReportQpLow() { - RTC_DCHECK_RUN_ON(&task_checker_); - ClearSamples(); - observer_->AdaptUp(AdaptationObserverInterface::AdaptReason::kQuality); - adapt_called_ = true; -} - -void QualityScaler::ReportQpHigh() { - RTC_DCHECK_RUN_ON(&task_checker_); - - if (observer_->AdaptDown( - AdaptationObserverInterface::AdaptReason::kQuality)) { - ClearSamples(); - } else { - adapt_failed_ = true; - } - - // If we've scaled down, wait longer before scaling up again. - if (fast_rampup_) { - fast_rampup_ = false; - } - adapt_called_ = true; + return CheckQpResult::kNormalQp; } void QualityScaler::ClearSamples() { @@ -264,4 +336,7 @@ void QualityScaler::ClearSamples() { if (qp_smoother_low_) qp_smoother_low_->Reset(); } + +QualityScalerQpUsageHandlerInterface::~QualityScalerQpUsageHandlerInterface() {} + } // namespace webrtc diff --git a/modules/video_coding/utility/quality_scaler.h b/modules/video_coding/utility/quality_scaler.h index 9556a58358..28f225f397 100644 --- a/modules/video_coding/utility/quality_scaler.h +++ b/modules/video_coding/utility/quality_scaler.h @@ -17,43 +17,30 @@ #include #include "absl/types/optional.h" +#include "api/scoped_refptr.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/experiments/quality_scaling_experiment.h" #include "rtc_base/numerics/moving_average.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/repeating_task.h" namespace webrtc { -// An interface for signaling requests to limit or increase the resolution or -// framerate of the captured video stream. -class AdaptationObserverInterface { - public: - // Indicates if the adaptation is due to overuse of the CPU resources, or if - // the quality of the encoded frames have dropped too low. - enum AdaptReason : size_t { kQuality = 0, kCpu = 1 }; - static const size_t kScaleReasonSize = 2; - // Called to signal that we can handle larger or more frequent frames. - virtual void AdaptUp(AdaptReason reason) = 0; - // Called to signal that the source should reduce the resolution or framerate. - // Returns false if a downgrade was requested but the request did not result - // in a new limiting resolution or fps. - virtual bool AdaptDown(AdaptReason reason) = 0; - - protected: - virtual ~AdaptationObserverInterface() {} -}; +class QualityScalerQpUsageHandlerCallbackInterface; +class QualityScalerQpUsageHandlerInterface; // QualityScaler runs asynchronously and monitors QP values of encoded frames. -// It holds a reference to an AdaptationObserverInterface implementation to -// signal an intent to scale up or down. +// It holds a reference to a QualityScalerQpUsageHandlerInterface implementation +// to signal an overuse or underuse of QP (which indicate a desire to scale the +// video stream down or up). class QualityScaler { public: - // Construct a QualityScaler with given |thresholds| and |observer|. + // Construct a QualityScaler with given |thresholds| and |handler|. // This starts the quality scaler periodically checking what the average QP // has been recently. - QualityScaler(AdaptationObserverInterface* observer, + QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds); virtual ~QualityScaler(); // Should be called each time a frame is dropped at encoding. @@ -67,21 +54,34 @@ class QualityScaler { // The following members declared protected for testing purposes. protected: - QualityScaler(AdaptationObserverInterface* observer, + QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds, int64_t sampling_period_ms); private: class QpSmoother; - - void CheckQp(); + class CheckQpTask; + class CheckQpTaskHandlerCallback; + + enum class CheckQpResult { + kInsufficientSamples, + kNormalQp, + kHighQp, + kLowQp, + }; + + // Starts checking for QP in a delayed task. When the resulting CheckQpTask + // completes, it will invoke this method again, ensuring that we always + // periodically check for QP. See CheckQpTask for more details. We never run + // more than one CheckQpTask at a time. + void StartNextCheckQpTask(); + + CheckQpResult CheckQp() const; void ClearSamples(); - void ReportQpLow(); - void ReportQpHigh(); - int64_t GetSamplingPeriodMs() const; - RepeatingTaskHandle check_qp_task_ RTC_GUARDED_BY(&task_checker_); - AdaptationObserverInterface* const observer_ RTC_GUARDED_BY(&task_checker_); + std::unique_ptr pending_qp_task_ RTC_GUARDED_BY(&task_checker_); + QualityScalerQpUsageHandlerInterface* const handler_ + RTC_GUARDED_BY(&task_checker_); SequenceChecker task_checker_; VideoEncoder::QpThresholds thresholds_ RTC_GUARDED_BY(&task_checker_); @@ -97,14 +97,25 @@ class QualityScaler { QualityScalingExperiment::Config config_ RTC_GUARDED_BY(&task_checker_); std::unique_ptr qp_smoother_high_ RTC_GUARDED_BY(&task_checker_); std::unique_ptr qp_smoother_low_ RTC_GUARDED_BY(&task_checker_); - bool observed_enough_frames_ RTC_GUARDED_BY(&task_checker_); const size_t min_frames_needed_; const double initial_scale_factor_; const absl::optional scale_factor_; - bool adapt_called_ RTC_GUARDED_BY(&task_checker_); - bool adapt_failed_ RTC_GUARDED_BY(&task_checker_); }; + +// Reacts to QP being too high or too low. For best quality, when QP is high it +// is desired to decrease the resolution or frame rate of the stream and when QP +// is low it is desired to increase the resolution or frame rate of the stream. +// Whether to reconfigure the stream is ultimately up to the handler, which is +// able to respond asynchronously. +class QualityScalerQpUsageHandlerInterface { + public: + virtual ~QualityScalerQpUsageHandlerInterface(); + + virtual void OnReportQpUsageHigh() = 0; + virtual void OnReportQpUsageLow() = 0; +}; + } // namespace webrtc #endif // MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_ diff --git a/modules/video_coding/utility/quality_scaler_unittest.cc b/modules/video_coding/utility/quality_scaler_unittest.cc index 6f16dc81bf..d5b22a8a29 100644 --- a/modules/video_coding/utility/quality_scaler_unittest.cc +++ b/modules/video_coding/utility/quality_scaler_unittest.cc @@ -28,18 +28,19 @@ static const int kMinFramesNeededToScale = 60; // From quality_scaler.cc. static const size_t kDefaultTimeoutMs = 150; } // namespace -class MockAdaptationObserver : public AdaptationObserverInterface { +class FakeQpUsageHandler : public QualityScalerQpUsageHandlerInterface { public: - virtual ~MockAdaptationObserver() {} + ~FakeQpUsageHandler() override = default; - void AdaptUp(AdaptReason r) override { - adapt_up_events_++; + // QualityScalerQpUsageHandlerInterface implementation. + void OnReportQpUsageHigh() override { + adapt_down_events_++; event.Set(); } - bool AdaptDown(AdaptReason r) override { - adapt_down_events_++; + + void OnReportQpUsageLow() override { + adapt_up_events_++; event.Set(); - return true; } rtc::Event event; @@ -50,9 +51,9 @@ class MockAdaptationObserver : public AdaptationObserverInterface { // Pass a lower sampling period to speed up the tests. class QualityScalerUnderTest : public QualityScaler { public: - explicit QualityScalerUnderTest(AdaptationObserverInterface* observer, + explicit QualityScalerUnderTest(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds) - : QualityScaler(observer, thresholds, 5) {} + : QualityScaler(handler, thresholds, 5) {} }; class QualityScalerTest : public ::testing::Test, @@ -69,16 +70,16 @@ class QualityScalerTest : public ::testing::Test, QualityScalerTest() : scoped_field_trial_(GetParam()), task_queue_("QualityScalerTestQueue"), - observer_(new MockAdaptationObserver()) { + handler_(std::make_unique()) { task_queue_.SendTask( [this] { qs_ = std::unique_ptr(new QualityScalerUnderTest( - observer_.get(), VideoEncoder::QpThresholds(kLowQp, kHighQp))); + handler_.get(), VideoEncoder::QpThresholds(kLowQp, kHighQp))); }, RTC_FROM_HERE); } - ~QualityScalerTest() { + ~QualityScalerTest() override { task_queue_.SendTask([this] { qs_ = nullptr; }, RTC_FROM_HERE); } @@ -107,7 +108,7 @@ class QualityScalerTest : public ::testing::Test, test::ScopedFieldTrials scoped_field_trial_; TaskQueueForTest task_queue_; std::unique_ptr qs_; - std::unique_ptr observer_; + std::unique_ptr handler_; }; INSTANTIATE_TEST_SUITE_P( @@ -119,25 +120,25 @@ INSTANTIATE_TEST_SUITE_P( TEST_P(QualityScalerTest, DownscalesAfterContinuousFramedrop) { task_queue_.SendTask([this] { TriggerScale(kScaleDown); }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, KeepsScaleAtHighQp) { task_queue_.SendTask([this] { TriggerScale(kKeepScaleAtHighQp); }, RTC_FROM_HERE); - EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(0, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_FALSE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(0, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, DownscalesAboveHighQp) { task_queue_.SendTask([this] { TriggerScale(kScaleDownAboveHighQp); }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) { @@ -150,9 +151,9 @@ TEST_P(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) { } }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) { @@ -164,9 +165,9 @@ TEST_P(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) { } }, RTC_FROM_HERE); - EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(0, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_FALSE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(0, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, DownscalesAfterTwoThirdsIfFieldTrialEnabled) { @@ -180,35 +181,35 @@ TEST_P(QualityScalerTest, DownscalesAfterTwoThirdsIfFieldTrialEnabled) { } }, RTC_FROM_HERE); - EXPECT_EQ(kDownScaleExpected, observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(kDownScaleExpected ? 1 : 0, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_EQ(kDownScaleExpected, handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(kDownScaleExpected ? 1 : 0, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, KeepsScaleOnNormalQp) { task_queue_.SendTask([this] { TriggerScale(kKeepScaleAboveLowQp); }, RTC_FROM_HERE); - EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(0, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_FALSE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(0, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, UpscalesAfterLowQp) { task_queue_.SendTask([this] { TriggerScale(kScaleUp); }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(0, observer_->adapt_down_events_); - EXPECT_EQ(1, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(0, handler_->adapt_down_events_); + EXPECT_EQ(1, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, ScalesDownAndBackUp) { task_queue_.SendTask([this] { TriggerScale(kScaleDown); }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); task_queue_.SendTask([this] { TriggerScale(kScaleUp); }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(1, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(1, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, DoesNotScaleUntilEnoughFramesObserved) { @@ -220,7 +221,7 @@ TEST_P(QualityScalerTest, DoesNotScaleUntilEnoughFramesObserved) { } }, RTC_FROM_HERE); - EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs)); + EXPECT_FALSE(handler_->event.Wait(kDefaultTimeoutMs)); task_queue_.SendTask( [this] { // Send 1 more. Enough frames observed, should result in an adapt @@ -228,9 +229,9 @@ TEST_P(QualityScalerTest, DoesNotScaleUntilEnoughFramesObserved) { qs_->ReportQp(kLowQp, 0); }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(0, observer_->adapt_down_events_); - EXPECT_EQ(1, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(0, handler_->adapt_down_events_); + EXPECT_EQ(1, handler_->adapt_up_events_); // Samples should be cleared after an adapt request. task_queue_.SendTask( @@ -239,9 +240,9 @@ TEST_P(QualityScalerTest, DoesNotScaleUntilEnoughFramesObserved) { qs_->ReportQp(kLowQp, 0); }, RTC_FROM_HERE); - EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(0, observer_->adapt_down_events_); - EXPECT_EQ(1, observer_->adapt_up_events_); + EXPECT_FALSE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(0, handler_->adapt_down_events_); + EXPECT_EQ(1, handler_->adapt_up_events_); } TEST_P(QualityScalerTest, ScalesDownAndBackUpWithMinFramesNeeded) { @@ -252,9 +253,9 @@ TEST_P(QualityScalerTest, ScalesDownAndBackUpWithMinFramesNeeded) { } }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(0, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(0, handler_->adapt_up_events_); // Samples cleared. task_queue_.SendTask( [this] { @@ -263,9 +264,9 @@ TEST_P(QualityScalerTest, ScalesDownAndBackUpWithMinFramesNeeded) { } }, RTC_FROM_HERE); - EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs)); - EXPECT_EQ(1, observer_->adapt_down_events_); - EXPECT_EQ(1, observer_->adapt_up_events_); + EXPECT_TRUE(handler_->event.Wait(kDefaultTimeoutMs)); + EXPECT_EQ(1, handler_->adapt_down_events_); + EXPECT_EQ(1, handler_->adapt_up_events_); } } // namespace webrtc diff --git a/modules/video_coding/utility/simulcast_rate_allocator.cc b/modules/video_coding/utility/simulcast_rate_allocator.cc index f2a90120dd..39e39abca1 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator.cc +++ b/modules/video_coding/utility/simulcast_rate_allocator.cc @@ -61,7 +61,8 @@ float SimulcastRateAllocator::GetTemporalRateAllocation( SimulcastRateAllocator::SimulcastRateAllocator(const VideoCodec& codec) : codec_(codec), stable_rate_settings_(StableTargetRateExperiment::ParseFromFieldTrials()), - rate_control_settings_(RateControlSettings::ParseFromFieldTrials()) {} + rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), + legacy_conference_mode_(false) {} SimulcastRateAllocator::~SimulcastRateAllocator() = default; @@ -87,7 +88,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( DataRate left_in_stable_allocation = stable_bitrate; if (codec_.maxBitrate) { - DataRate max_rate = DataRate::kbps(codec_.maxBitrate); + DataRate max_rate = DataRate::KilobitsPerSec(codec_.maxBitrate); left_in_total_allocation = std::min(left_in_total_allocation, max_rate); left_in_stable_allocation = std::min(left_in_stable_allocation, max_rate); } @@ -97,7 +98,8 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( if (codec_.active) { allocated_bitrates->SetBitrate( 0, 0, - std::max(DataRate::kbps(codec_.minBitrate), left_in_total_allocation) + std::max(DataRate::KilobitsPerSec(codec_.minBitrate), + left_in_total_allocation) .bps()); } return; @@ -129,7 +131,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( // Always allocate enough bitrate for the minimum bitrate of the first // active layer. Suspending below min bitrate is controlled outside the // codec implementation and is not overridden by this. - DataRate min_rate = DataRate::kbps( + DataRate min_rate = DataRate::KilobitsPerSec( codec_.simulcastStream[layer_index[active_layer]].minBitrate); left_in_total_allocation = std::max(left_in_total_allocation, min_rate); left_in_stable_allocation = std::max(left_in_stable_allocation, min_rate); @@ -149,7 +151,7 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( size_t top_active_layer = active_layer; // Allocate up to the target bitrate for each active simulcast layer. for (; active_layer < codec_.numberOfSimulcastStreams; ++active_layer) { - const SimulcastStream& stream = + const SpatialLayer& stream = codec_.simulcastStream[layer_index[active_layer]]; if (!stream.active) { stream_enabled_[layer_index[active_layer]] = false; @@ -157,8 +159,8 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( } // If we can't allocate to the current layer we can't allocate to higher // layers because they require a higher minimum bitrate. - DataRate min_bitrate = DataRate::kbps(stream.minBitrate); - DataRate target_bitrate = DataRate::kbps(stream.targetBitrate); + DataRate min_bitrate = DataRate::KilobitsPerSec(stream.minBitrate); + DataRate target_bitrate = DataRate::KilobitsPerSec(stream.targetBitrate); double hysteresis_factor = codec_.mode == VideoCodecMode::kRealtimeVideo ? stable_rate_settings_.GetVideoHysteresisFactor() @@ -192,12 +194,12 @@ void SimulcastRateAllocator::DistributeAllocationToSimulcastLayers( // TODO(sprang): Allocate up to max bitrate for all layers once we have a // better idea of possible performance implications. if (left_in_total_allocation > DataRate::Zero()) { - const SimulcastStream& stream = codec_.simulcastStream[top_active_layer]; - DataRate initial_layer_rate = - DataRate::bps(allocated_bitrates->GetSpatialLayerSum(top_active_layer)); - DataRate additional_allocation = - std::min(left_in_total_allocation, - DataRate::kbps(stream.maxBitrate) - initial_layer_rate); + const SpatialLayer& stream = codec_.simulcastStream[top_active_layer]; + DataRate initial_layer_rate = DataRate::BitsPerSec( + allocated_bitrates->GetSpatialLayerSum(top_active_layer)); + DataRate additional_allocation = std::min( + left_in_total_allocation, + DataRate::KilobitsPerSec(stream.maxBitrate) - initial_layer_rate); allocated_bitrates->SetBitrate( top_active_layer, 0, (initial_layer_rate + additional_allocation).bps()); @@ -227,12 +229,8 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers( uint32_t max_bitrate_kbps; // Legacy temporal-layered only screenshare, or simulcast screenshare // with legacy mode for simulcast stream 0. - const bool conference_screenshare_mode = - codec_.mode == VideoCodecMode::kScreensharing && - ((num_spatial_streams == 1 && num_temporal_streams == 2) || // Legacy. - (num_spatial_streams > 1 && simulcast_id == 0 && - num_temporal_streams == 2)); // Simulcast. - if (conference_screenshare_mode) { + if (codec_.mode == VideoCodecMode::kScreensharing && + legacy_conference_mode_ && simulcast_id == 0) { // TODO(holmer): This is a "temporary" hack for screensharing, where we // interpret the startBitrate as the encoder target bitrate. This is // to allow for a different max bitrate, so if the codec can't meet @@ -252,7 +250,8 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers( if (num_temporal_streams == 1) { tl_allocation.push_back(target_bitrate_kbps); } else { - if (conference_screenshare_mode) { + if (codec_.mode == VideoCodecMode::kScreensharing && + legacy_conference_mode_ && simulcast_id == 0) { tl_allocation = ScreenshareTemporalLayerAllocation( target_bitrate_kbps, max_bitrate_kbps, simulcast_id); } else { @@ -337,4 +336,8 @@ int SimulcastRateAllocator::NumTemporalStreams(size_t simulcast_id) const { : codec_.simulcastStream[simulcast_id].numberOfTemporalLayers); } +void SimulcastRateAllocator::SetLegacyConferenceMode(bool enabled) { + legacy_conference_mode_ = enabled; +} + } // namespace webrtc diff --git a/modules/video_coding/utility/simulcast_rate_allocator.h b/modules/video_coding/utility/simulcast_rate_allocator.h index d9d9627352..9b2f9696e6 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator.h +++ b/modules/video_coding/utility/simulcast_rate_allocator.h @@ -38,6 +38,8 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { int temporal_id, bool base_heavy_tl3_alloc); + void SetLegacyConferenceMode(bool mode) override; + private: void DistributeAllocationToSimulcastLayers( DataRate total_bitrate, @@ -58,6 +60,7 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { const StableTargetRateExperiment stable_rate_settings_; const RateControlSettings rate_control_settings_; std::vector stream_enabled_; + bool legacy_conference_mode_; RTC_DISALLOW_COPY_AND_ASSIGN(SimulcastRateAllocator); }; diff --git a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc index e85ae3bc29..24d7c58bcd 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc +++ b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc @@ -35,24 +35,28 @@ constexpr uint32_t kLegacyScreenshareMaxBitrateKbps = 1000; // Bitrates for upper simulcast screenshare layer. constexpr uint32_t kSimulcastScreenshareMinBitrateKbps = 600; constexpr uint32_t kSimulcastScreenshareMaxBitrateKbps = 1250; +// Default video hysteresis factor: allocatable bitrate for next layer must +// exceed 20% of min setting in order to be initially turned on. +const double kDefaultHysteresis = 1.2; class MockTemporalLayers : public Vp8FrameBufferController { public: - MOCK_METHOD2(NextFrameConfig, Vp8FrameConfig(size_t, uint32_t)); - MOCK_METHOD3(OnRatesUpdated, void(size_t, const std::vector&, int)); - MOCK_METHOD1(UpdateConfiguration, Vp8EncoderConfig(size_t)); - MOCK_METHOD6(OnEncodeDone, - void(size_t, uint32_t, size_t, bool, int, CodecSpecificInfo*)); - MOCK_METHOD4(FrameEncoded, void(size_t, uint32_t, size_t, int)); - MOCK_CONST_METHOD0(Tl0PicIdx, uint8_t()); - MOCK_CONST_METHOD1(GetTemporalLayerId, int(const Vp8FrameConfig&)); + MOCK_METHOD(Vp8FrameConfig, NextFrameConfig, (size_t, uint32_t), (override)); + MOCK_METHOD(void, + OnRatesUpdated, + (size_t, const std::vector&, int), + (override)); + MOCK_METHOD(Vp8EncoderConfig, UpdateConfiguration, (size_t), (override)); + MOCK_METHOD(void, + OnEncodeDone, + (size_t, uint32_t, size_t, bool, int, CodecSpecificInfo*), + (override)); }; } // namespace class SimulcastRateAllocatorTest : public ::testing::TestWithParam { public: SimulcastRateAllocatorTest() { - memset(&codec_, 0, sizeof(VideoCodec)); codec_.codecType = kVideoCodecVP8; codec_.minBitrate = kMinBitrateKbps; codec_.maxBitrate = kLegacyScreenshareMaxBitrateKbps; @@ -86,8 +90,9 @@ class SimulcastRateAllocatorTest : public ::testing::TestWithParam { EXPECT_EQ(sum, actual.get_sum_bps()); } - void CreateAllocator() { + void CreateAllocator(bool legacy_conference_mode = false) { allocator_.reset(new SimulcastRateAllocator(codec_)); + allocator_->SetLegacyConferenceMode(legacy_conference_mode); } void SetupCodec3SL3TL(const std::vector& active_streams) { @@ -133,7 +138,7 @@ class SimulcastRateAllocatorTest : public ::testing::TestWithParam { VideoBitrateAllocation GetAllocation(uint32_t target_bitrate) { return allocator_->Allocate(VideoBitrateAllocationParameters( - DataRate::kbps(target_bitrate), kDefaultFrameRate)); + DataRate::KilobitsPerSec(target_bitrate), kDefaultFrameRate)); } VideoBitrateAllocation GetAllocation(DataRate target_rate, @@ -143,15 +148,18 @@ class SimulcastRateAllocatorTest : public ::testing::TestWithParam { } DataRate MinRate(size_t layer_index) const { - return DataRate::kbps(codec_.simulcastStream[layer_index].minBitrate); + return DataRate::KilobitsPerSec( + codec_.simulcastStream[layer_index].minBitrate); } DataRate TargetRate(size_t layer_index) const { - return DataRate::kbps(codec_.simulcastStream[layer_index].targetBitrate); + return DataRate::KilobitsPerSec( + codec_.simulcastStream[layer_index].targetBitrate); } DataRate MaxRate(size_t layer_index) const { - return DataRate::kbps(codec_.simulcastStream[layer_index].maxBitrate); + return DataRate::KilobitsPerSec( + codec_.simulcastStream[layer_index].maxBitrate); } protected: @@ -224,6 +232,7 @@ TEST_F(SimulcastRateAllocatorTest, SingleSimulcastBelowMin) { TEST_F(SimulcastRateAllocatorTest, SignalsBwLimited) { // Enough to enable all layers. const int kVeryBigBitrate = 100000; + // With simulcast, use the min bitrate from the ss spec instead of the global. SetupCodec3SL3TL({true, true, true}); CreateAllocator(); @@ -235,10 +244,13 @@ TEST_F(SimulcastRateAllocatorTest, SignalsBwLimited) { EXPECT_TRUE(GetAllocation(codec_.simulcastStream[0].targetBitrate + codec_.simulcastStream[1].minBitrate) .is_bw_limited()); - EXPECT_FALSE(GetAllocation(codec_.simulcastStream[0].targetBitrate + - codec_.simulcastStream[1].targetBitrate + - codec_.simulcastStream[2].minBitrate) - .is_bw_limited()); + EXPECT_FALSE( + GetAllocation( + codec_.simulcastStream[0].targetBitrate + + codec_.simulcastStream[1].targetBitrate + + static_cast( + codec_.simulcastStream[2].minBitrate * kDefaultHysteresis + 0.5)) + .is_bw_limited()); EXPECT_FALSE(GetAllocation(kVeryBigBitrate).is_bw_limited()); } @@ -334,20 +346,23 @@ TEST_F(SimulcastRateAllocatorTest, OneToThreeStreams) { ExpectEqual(expected, GetAllocation(bitrate)); } + uint32_t kMinInitialRateTwoLayers = + codec_.simulcastStream[0].targetBitrate + + static_cast(codec_.simulcastStream[1].minBitrate * + kDefaultHysteresis); { // Bitrate above target for first stream, but below min for the next one. - const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate + - codec_.simulcastStream[1].minBitrate - 1; + const uint32_t bitrate = kMinInitialRateTwoLayers - 1; uint32_t expected[] = {bitrate, 0, 0}; ExpectEqual(expected, GetAllocation(bitrate)); } { // Just enough for two streams. - const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate + - codec_.simulcastStream[1].minBitrate; - uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate, - codec_.simulcastStream[1].minBitrate, 0}; + const uint32_t bitrate = kMinInitialRateTwoLayers; + uint32_t expected[] = { + codec_.simulcastStream[0].targetBitrate, + kMinInitialRateTwoLayers - codec_.simulcastStream[0].targetBitrate, 0}; ExpectEqual(expected, GetAllocation(bitrate)); } @@ -360,11 +375,15 @@ TEST_F(SimulcastRateAllocatorTest, OneToThreeStreams) { ExpectEqual(expected, GetAllocation(bitrate)); } + uint32_t kMinInitialRateThreeLayers = + codec_.simulcastStream[0].targetBitrate + + codec_.simulcastStream[1].targetBitrate + + static_cast(codec_.simulcastStream[2].minBitrate * + kDefaultHysteresis); { // First two streams maxed out, but not enough for third. Nowhere to put // remaining bits. - const uint32_t bitrate = codec_.simulcastStream[0].maxBitrate + - codec_.simulcastStream[1].maxBitrate + 499; + const uint32_t bitrate = kMinInitialRateThreeLayers - 1; uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate, codec_.simulcastStream[1].maxBitrate, 0}; ExpectEqual(expected, GetAllocation(bitrate)); @@ -372,12 +391,12 @@ TEST_F(SimulcastRateAllocatorTest, OneToThreeStreams) { { // Just enough for all three streams. - const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate + - codec_.simulcastStream[1].targetBitrate + - codec_.simulcastStream[2].minBitrate; - uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate, - codec_.simulcastStream[1].targetBitrate, - codec_.simulcastStream[2].minBitrate}; + const uint32_t bitrate = kMinInitialRateThreeLayers; + uint32_t expected[] = { + codec_.simulcastStream[0].targetBitrate, + codec_.simulcastStream[1].targetBitrate, + static_cast(codec_.simulcastStream[2].minBitrate * + kDefaultHysteresis)}; ExpectEqual(expected, GetAllocation(bitrate)); } @@ -590,8 +609,8 @@ TEST_F(SimulcastRateAllocatorTest, StableRate) { // Let stable rate go to a bitrate below what is needed for two streams. uint32_t expected[] = {MaxRate(0).kbps(), 0}; ExpectEqual(expected, - GetAllocation(volatile_rate, - TargetRate(0) + MinRate(1) - DataRate::bps(1))); + GetAllocation(volatile_rate, TargetRate(0) + MinRate(1) - + DataRate::BitsPerSec(1))); } { @@ -664,9 +683,9 @@ INSTANTIATE_TEST_SUITE_P(ScreenshareTest, ScreenshareRateAllocationTest, ::testing::Bool()); -TEST_P(ScreenshareRateAllocationTest, BitrateBelowTl0) { +TEST_P(ScreenshareRateAllocationTest, ConferenceBitrateBelowTl0) { SetupConferenceScreenshare(GetParam()); - CreateAllocator(); + CreateAllocator(true); VideoBitrateAllocation allocation = allocator_->Allocate(VideoBitrateAllocationParameters( @@ -679,9 +698,9 @@ TEST_P(ScreenshareRateAllocationTest, BitrateBelowTl0) { EXPECT_EQ(allocation.is_bw_limited(), GetParam()); } -TEST_P(ScreenshareRateAllocationTest, BitrateAboveTl0) { +TEST_P(ScreenshareRateAllocationTest, ConferenceBitrateAboveTl0) { SetupConferenceScreenshare(GetParam()); - CreateAllocator(); + CreateAllocator(true); uint32_t target_bitrate_kbps = (kLegacyScreenshareTargetBitrateKbps + kLegacyScreenshareMaxBitrateKbps) / @@ -699,10 +718,10 @@ TEST_P(ScreenshareRateAllocationTest, BitrateAboveTl0) { EXPECT_EQ(allocation.is_bw_limited(), GetParam()); } -TEST_F(ScreenshareRateAllocationTest, BitrateAboveTl1) { +TEST_F(ScreenshareRateAllocationTest, ConferenceBitrateAboveTl1) { // This test is only for the non-simulcast case. SetupConferenceScreenshare(false); - CreateAllocator(); + CreateAllocator(true); VideoBitrateAllocation allocation = allocator_->Allocate(VideoBitrateAllocationParameters( diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc index f157734192..a9af643446 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -75,27 +75,18 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback } Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { bool is_vp8 = (codec_specific_info->codecType == kVideoCodecVP8); bool is_h264 = (codec_specific_info->codecType == kVideoCodecH264); // Only store the base layer. if (encoded_image.SpatialIndex().value_or(0) == 0) { if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { - // TODO(nisse): Why not size() ? - encoded_key_frame_.SetEncodedData( - EncodedImageBuffer::Create(encoded_image.capacity())); - encoded_key_frame_.set_size(encoded_image.size()); + encoded_key_frame_.SetEncodedData(EncodedImageBuffer::Create( + encoded_image.data(), encoded_image.size())); encoded_key_frame_._frameType = VideoFrameType::kVideoFrameKey; - encoded_key_frame_._completeFrame = encoded_image._completeFrame; - memcpy(encoded_key_frame_.data(), encoded_image.data(), - encoded_image.size()); } else { - encoded_frame_.SetEncodedData( - EncodedImageBuffer::Create(encoded_image.capacity())); - encoded_frame_.set_size(encoded_image.size()); - memcpy(encoded_frame_.data(), encoded_image.data(), - encoded_image.size()); + encoded_frame_.SetEncodedData(EncodedImageBuffer::Create( + encoded_image.data(), encoded_image.size())); } } if (is_vp8) { @@ -197,7 +188,7 @@ void ConfigureStream(int width, int min_bitrate, int target_bitrate, float max_framerate, - SimulcastStream* stream, + SpatialLayer* stream, int num_temporal_layers) { assert(stream); stream->width = width; @@ -221,10 +212,8 @@ void SimulcastTestFixtureImpl::DefaultSettings( VideoCodecType codec_type, bool reverse_layer_order) { RTC_CHECK(settings); - memset(settings, 0, sizeof(VideoCodec)); + *settings = {}; settings->codecType = codec_type; - // 96 to 127 dynamic payload types for video codecs - settings->plType = 120; settings->startBitrate = 300; settings->minBitrate = 30; settings->maxBitrate = 0; @@ -354,7 +343,7 @@ void SimulcastTestFixtureImpl::ExpectStreams( AllOf(Field(&EncodedImage::_frameType, frame_type), Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), - _, _)) + _)) .Times(1) .WillRepeatedly(Return( EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); @@ -366,7 +355,7 @@ void SimulcastTestFixtureImpl::ExpectStreams( AllOf(Field(&EncodedImage::_frameType, frame_type), Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), - _, _)) + _)) .Times(1) .WillRepeatedly(Return( EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); @@ -377,7 +366,7 @@ void SimulcastTestFixtureImpl::ExpectStreams( AllOf(Field(&EncodedImage::_frameType, frame_type), Field(&EncodedImage::_encodedWidth, kDefaultWidth), Field(&EncodedImage::_encodedHeight, kDefaultHeight)), - _, _)) + _)) .Times(1) .WillRepeatedly(Return( EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); @@ -645,7 +634,7 @@ void SimulcastTestFixtureImpl::SwitchingToOneStream(int width, int height) { VideoFrameType::kVideoFrameKey), Field(&EncodedImage::_encodedWidth, width), Field(&EncodedImage::_encodedHeight, height)), - _, _)) + _)) .Times(1) .WillRepeatedly(Return( EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); @@ -868,23 +857,17 @@ void SimulcastTestFixtureImpl::TestDecodeWidthHeightSet() { encoder_->RegisterEncodeCompleteCallback(&encoder_callback); decoder_->RegisterDecodeCompleteCallback(&decoder_callback); - EXPECT_CALL(encoder_callback, OnEncodedImage(_, _, _)) + EXPECT_CALL(encoder_callback, OnEncodedImage(_, _)) .Times(3) .WillRepeatedly( ::testing::Invoke([&](const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codec_specific_info) { EXPECT_EQ(encoded_image._frameType, VideoFrameType::kVideoFrameKey); size_t index = encoded_image.SpatialIndex().value_or(0); - // TODO(nisse): Why not size() - encoded_frame[index].SetEncodedData( - EncodedImageBuffer::Create(encoded_image.capacity())); - encoded_frame[index].set_size(encoded_image.size()); + encoded_frame[index].SetEncodedData(EncodedImageBuffer::Create( + encoded_image.data(), encoded_image.size())); encoded_frame[index]._frameType = encoded_image._frameType; - encoded_frame[index]._completeFrame = encoded_image._completeFrame; - memcpy(encoded_frame[index].data(), encoded_image.data(), - encoded_image.size()); return EncodedImageCallback::Result( EncodedImageCallback::Result::OK, 0); })); @@ -918,5 +901,15 @@ void SimulcastTestFixtureImpl::TestDecodeWidthHeightSet() { EXPECT_EQ(0, decoder_->Decode(encoded_frame[2], false, 0)); } +void SimulcastTestFixtureImpl:: + TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation() { + VideoEncoder::EncoderInfo encoder_info = encoder_->GetEncoderInfo(); + EXPECT_EQ(encoder_info.fps_allocation[0].size(), + static_cast(kDefaultTemporalLayerProfile[0])); + EXPECT_EQ(encoder_info.fps_allocation[1].size(), + static_cast(kDefaultTemporalLayerProfile[1])); + EXPECT_EQ(encoder_info.fps_allocation[2].size(), + static_cast(kDefaultTemporalLayerProfile[2])); +} } // namespace test } // namespace webrtc diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.h b/modules/video_coding/utility/simulcast_test_fixture_impl.h index 3b55898ccf..a3d3fc66a8 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -50,6 +50,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { void TestSpatioTemporalLayers321PatternEncoder() override; void TestStrideEncodeDecode() override; void TestDecodeWidthHeightSet() override; + void TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation() override; static void DefaultSettings(VideoCodec* settings, const int* temporal_layer_profile, diff --git a/modules/video_coding/utility/simulcast_utility.cc b/modules/video_coding/utility/simulcast_utility.cc index 3c3e235896..a407483edd 100644 --- a/modules/video_coding/utility/simulcast_utility.cc +++ b/modules/video_coding/utility/simulcast_utility.cc @@ -14,7 +14,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/experiments/experimental_screenshare_settings.h" namespace webrtc { @@ -85,24 +84,8 @@ bool SimulcastUtility::ValidSimulcastParameters(const VideoCodec& codec, } bool SimulcastUtility::IsConferenceModeScreenshare(const VideoCodec& codec) { - if (codec.mode != VideoCodecMode::kScreensharing || - NumberOfTemporalLayers(codec, 0) != 2) { - return false; - } - - if (codec.numberOfSimulcastStreams > 0 && - ExperimentalScreenshareSettings::ParseFromFieldTrials() - .DefaultTlInBaseLayer() - .value_or(false)) { - // Don't use ScreenshareLayers for base layer, regardless of flags. - return false; - } - - // Fixed default bitrates for legacy screenshare layers mode. - return (codec.numberOfSimulcastStreams == 0 && codec.maxBitrate == 1000) || - (codec.numberOfSimulcastStreams >= 1 && - codec.simulcastStream[0].maxBitrate == 1000 && - codec.simulcastStream[0].targetBitrate == 200); + return codec.mode == VideoCodecMode::kScreensharing && + codec.legacy_conference_mode; } int SimulcastUtility::NumberOfTemporalLayers(const VideoCodec& codec, diff --git a/modules/video_coding/utility/vp8_header_parser.cc b/modules/video_coding/utility/vp8_header_parser.cc index 6b65fad400..80026f9a0f 100644 --- a/modules/video_coding/utility/vp8_header_parser.cc +++ b/modules/video_coding/utility/vp8_header_parser.cc @@ -18,143 +18,136 @@ namespace vp8 { namespace { const size_t kCommonPayloadHeaderLength = 3; const size_t kKeyPayloadHeaderLength = 10; -} // namespace - -static uint32_t BSwap32(uint32_t x) { - return (x >> 24) | ((x >> 8) & 0xff00) | ((x << 8) & 0xff0000) | (x << 24); -} - -static void VP8LoadFinalBytes(VP8BitReader* const br) { - // Only read 8bits at a time. - if (br->buf_ < br->buf_end_) { - br->bits_ += 8; - br->value_ = static_cast(*br->buf_++) | (br->value_ << 8); - } else if (!br->eof_) { - br->value_ <<= 8; - br->bits_ += 8; - br->eof_ = 1; - } -} +const int kMbFeatureTreeProbs = 3; +const int kNumMbSegments = 4; +const int kNumRefLfDeltas = 4; +const int kNumModeLfDeltas = 4; -static void VP8LoadNewBytes(VP8BitReader* const br) { - int BITS = 24; - // Read 'BITS' bits at a time. - if (br->buf_ + sizeof(uint32_t) <= br->buf_end_) { - uint32_t bits; - const uint32_t in_bits = *(const uint32_t*)(br->buf_); - br->buf_ += BITS >> 3; -#if defined(WEBRTC_ARCH_BIG_ENDIAN) - bits = static_cast(in_bits); - if (BITS != 8 * sizeof(uint32_t)) - bits >>= (8 * sizeof(uint32_t) - BITS); -#else - bits = BSwap32(in_bits); - bits >>= 32 - BITS; -#endif - br->value_ = bits | (br->value_ << BITS); - br->bits_ += BITS; - } else { - VP8LoadFinalBytes(br); - } -} +} // namespace -static void VP8InitBitReader(VP8BitReader* const br, - const uint8_t* const start, - const uint8_t* const end) { - br->range_ = 255 - 1; +// Bitstream parser according to +// https://tools.ietf.org/html/rfc6386#section-7.3 +void VP8InitBitReader(VP8BitReader* const br, + const uint8_t* start, + const uint8_t* end) { + br->range_ = 255; br->buf_ = start; br->buf_end_ = end; br->value_ = 0; - br->bits_ = -8; // To load the very first 8bits. - br->eof_ = 0; - VP8LoadNewBytes(br); + br->bits_ = 0; + + // Read 2 bytes. + int i = 0; + while (++i <= 2) { + if (br->buf_ != br->buf_end_) { + br->value_ = br->value_ << 8 | *br->buf_++; + } else { + br->value_ = br->value_ << 8; + } + } } -// Read a bit with proba 'prob'. -static int VP8GetBit(VP8BitReader* const br, int prob) { - uint8_t range = br->range_; - if (br->bits_ < 0) { - VP8LoadNewBytes(br); - if (br->eof_) - return 0; - } - const int pos = br->bits_; - const uint8_t split = (range * prob) >> 8; - const uint8_t value = static_cast(br->value_ >> pos); - int bit; - if (value > split) { - range -= split + 1; - br->value_ -= static_cast(split + 1) << pos; - bit = 1; +// Bit decoder according to https://tools.ietf.org/html/rfc6386#section-7.3 +// Reads one bit from the bitstream, given that it has probability prob/256 to +// be 1. +int Vp8BitReaderGetBool(VP8BitReader* br, int prob) { + uint32_t split = 1 + (((br->range_ - 1) * prob) >> 8); + uint32_t split_hi = split << 8; + int retval = 0; + if (br->value_ >= split_hi) { + retval = 1; + br->range_ -= split; + br->value_ -= split_hi; } else { - range = split; - bit = 0; + retval = 0; + br->range_ = split; } - if (range <= static_cast(0x7e)) { - const int shift = kVP8Log2Range[range]; - range = kVP8NewRange[range]; - br->bits_ -= shift; + + while (br->range_ < 128) { + br->value_ <<= 1; + br->range_ <<= 1; + if (++br->bits_ == 8) { + br->bits_ = 0; + if (br->buf_ != br->buf_end_) { + br->value_ |= *br->buf_++; + } + } } - br->range_ = range; - return bit; + return retval; } -static uint32_t VP8GetValue(VP8BitReader* const br, int bits) { +uint32_t VP8GetValue(VP8BitReader* br, int num_bits) { uint32_t v = 0; - while (bits-- > 0) { - v |= VP8GetBit(br, 0x80) << bits; + while (num_bits--) { + // According to https://tools.ietf.org/html/rfc6386 + // Probability 128/256 is used to encode header fields. + v = (v << 1) | Vp8BitReaderGetBool(br, 128); } return v; } -static uint32_t VP8Get(VP8BitReader* const br) { - return VP8GetValue(br, 1); -} - -static int32_t VP8GetSignedValue(VP8BitReader* const br, int bits) { - const int value = VP8GetValue(br, bits); - return VP8Get(br) ? -value : value; +// Not a read_signed_literal() from RFC 6386! +// This one is used to read e.g. quantizer_update, which is written as: +// L(num_bits), sign-bit. +int32_t VP8GetSignedValue(VP8BitReader* br, int num_bits) { + int v = VP8GetValue(br, num_bits); + int sign = VP8GetValue(br, 1); + return sign ? -v : v; } static void ParseSegmentHeader(VP8BitReader* br) { - int use_segment = VP8Get(br); + int use_segment = VP8GetValue(br, 1); if (use_segment) { - int update_map = VP8Get(br); - if (VP8Get(br)) { + int update_map = VP8GetValue(br, 1); + if (VP8GetValue(br, 1)) { // update_segment_feature_data. + VP8GetValue(br, 1); // segment_feature_mode. int s; - VP8Get(br); - for (s = 0; s < NUM_MB_SEGMENTS; ++s) { - VP8Get(br) ? VP8GetSignedValue(br, 7) : 0; + for (s = 0; s < kNumMbSegments; ++s) { + bool quantizer_update = VP8GetValue(br, 1); + if (quantizer_update) { + VP8GetSignedValue(br, 7); + } } - for (s = 0; s < NUM_MB_SEGMENTS; ++s) { - VP8Get(br) ? VP8GetSignedValue(br, 6) : 0; + for (s = 0; s < kNumMbSegments; ++s) { + bool loop_filter_update = VP8GetValue(br, 1); + if (loop_filter_update) { + VP8GetSignedValue(br, 6); + } } } if (update_map) { int s; - for (s = 0; s < MB_FEATURE_TREE_PROBS; ++s) { - VP8Get(br) ? VP8GetValue(br, 8) : 255; + for (s = 0; s < kMbFeatureTreeProbs; ++s) { + bool segment_prob_update = VP8GetValue(br, 1); + if (segment_prob_update) { + VP8GetValue(br, 8); + } } } } } static void ParseFilterHeader(VP8BitReader* br) { - VP8Get(br); - VP8GetValue(br, 6); - VP8GetValue(br, 3); - int use_lf_delta = VP8Get(br); - if (use_lf_delta) { - if (VP8Get(br)) { + VP8GetValue(br, 1); // filter_type. + VP8GetValue(br, 6); // loop_filter_level. + VP8GetValue(br, 3); // sharpness_level. + + // mb_lf_adjustments. + int loop_filter_adj_enable = VP8GetValue(br, 1); + if (loop_filter_adj_enable) { + int mode_ref_lf_delta_update = VP8GetValue(br, 1); + if (mode_ref_lf_delta_update) { int i; - for (i = 0; i < NUM_REF_LF_DELTAS; ++i) { - if (VP8Get(br)) { - VP8GetSignedValue(br, 6); + for (i = 0; i < kNumRefLfDeltas; ++i) { + int ref_frame_delta_update_flag = VP8GetValue(br, 1); + if (ref_frame_delta_update_flag) { + VP8GetSignedValue(br, 6); // delta_magnitude. } } - for (i = 0; i < NUM_MODE_LF_DELTAS; ++i) { - if (VP8Get(br)) { - VP8GetSignedValue(br, 6); + for (i = 0; i < kNumModeLfDeltas; ++i) { + int mb_mode_delta_update_flag = VP8GetValue(br, 1); + if (mb_mode_delta_update_flag) { + VP8GetSignedValue(br, 6); // delta_magnitude. } } } @@ -184,17 +177,18 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) { VP8InitBitReader(&br, buf, buf + partition_length); if (key_frame) { // Color space and pixel type. - VP8Get(&br); - VP8Get(&br); + VP8GetValue(&br, 1); + VP8GetValue(&br, 1); } ParseSegmentHeader(&br); ParseFilterHeader(&br); - // Number of coefficient data partitions. + // Parse log2_nbr_of_dct_partitions value. VP8GetValue(&br, 2); // Base QP. const int base_q0 = VP8GetValue(&br, 7); - if (br.eof_ == 1) { - RTC_LOG(LS_WARNING) << "Failed to get QP, end of file reached."; + if (br.buf_ == br.buf_end_) { + RTC_LOG(LS_WARNING) << "Failed to get QP, bitstream is truncated or" + " corrupted."; return false; } *qp = base_q0; diff --git a/modules/video_coding/utility/vp8_header_parser.h b/modules/video_coding/utility/vp8_header_parser.h index 637bc5c87b..dbad999dc8 100644 --- a/modules/video_coding/utility/vp8_header_parser.h +++ b/modules/video_coding/utility/vp8_header_parser.h @@ -18,45 +18,17 @@ namespace webrtc { namespace vp8 { -enum { - MB_FEATURE_TREE_PROBS = 3, - NUM_MB_SEGMENTS = 4, - NUM_REF_LF_DELTAS = 4, - NUM_MODE_LF_DELTAS = 4, -}; - typedef struct VP8BitReader VP8BitReader; struct VP8BitReader { // Boolean decoder. - uint32_t value_; // Current value. - uint32_t range_; // Current range minus 1. In [127, 254] interval. - int bits_; // Number of valid bits left. + uint32_t value_; // Current value (2 bytes). + uint32_t range_; // Current range (always in [128..255] interval). + int bits_; // Number of bits shifted out of value, at most 7. // Read buffer. const uint8_t* buf_; // Next byte to be read. const uint8_t* buf_end_; // End of read buffer. - int eof_; // True if input is exhausted. }; -const uint8_t kVP8Log2Range[128] = { - 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0}; - -// range = ((range - 1) << kVP8Log2Range[range]) + 1 -const uint8_t kVP8NewRange[128] = { - 127, 127, 191, 127, 159, 191, 223, 127, 143, 159, 175, 191, 207, 223, 239, - 127, 135, 143, 151, 159, 167, 175, 183, 191, 199, 207, 215, 223, 231, 239, - 247, 127, 131, 135, 139, 143, 147, 151, 155, 159, 163, 167, 171, 175, 179, - 183, 187, 191, 195, 199, 203, 207, 211, 215, 219, 223, 227, 231, 235, 239, - 243, 247, 251, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, - 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179, - 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, - 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, - 241, 243, 245, 247, 249, 251, 253, 127}; - // Gets the QP, QP range: [0, 127]. // Returns true on success, false otherwise. bool GetQp(const uint8_t* buf, size_t length, int* qp); diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc index 9c89235fe2..f8ddd4db41 100644 --- a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc +++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc @@ -52,40 +52,65 @@ bool Vp9ReadSyncCode(rtc::BitBuffer* br) { return true; } -bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) { - if (profile == 2 || profile == 3) { - // Bitdepth. - RETURN_FALSE_IF_ERROR(br->ConsumeBits(1)); +bool Vp9ReadColorConfig(rtc::BitBuffer* br, + uint8_t profile, + FrameInfo* frame_info) { + if (profile == 0 || profile == 1) { + frame_info->bit_detph = BitDept::k8Bit; + } else if (profile == 2 || profile == 3) { + uint32_t ten_or_twelve_bits; + RETURN_FALSE_IF_ERROR(br->ReadBits(&ten_or_twelve_bits, 1)); + frame_info->bit_detph = + ten_or_twelve_bits ? BitDept::k12Bit : BitDept::k10Bit; } uint32_t color_space; RETURN_FALSE_IF_ERROR(br->ReadBits(&color_space, 3)); + frame_info->color_space = static_cast(color_space); // SRGB is 7. if (color_space != 7) { - // YUV range flag. - RETURN_FALSE_IF_ERROR(br->ConsumeBits(1)); + uint32_t color_range; + RETURN_FALSE_IF_ERROR(br->ReadBits(&color_range, 1)); + frame_info->color_range = + color_range ? ColorRange::kFull : ColorRange::kStudio; + if (profile == 1 || profile == 3) { - // 1 bit: subsampling x. - // 1 bit: subsampling y. - RETURN_FALSE_IF_ERROR(br->ConsumeBits(2)); + uint32_t subsampling_x; + uint32_t subsampling_y; + RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_x, 1)); + RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_y, 1)); + if (subsampling_x) { + frame_info->sub_sampling = + subsampling_y ? YuvSubsampling::k420 : YuvSubsampling::k422; + } else { + frame_info->sub_sampling = + subsampling_y ? YuvSubsampling::k440 : YuvSubsampling::k444; + } + uint32_t reserved_bit; RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1)); if (reserved_bit) { - RTC_LOG(LS_WARNING) << "Failed to get QP. Reserved bit set."; + RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set."; return false; } + } else { + // Profile 0 or 2. + frame_info->sub_sampling = YuvSubsampling::k420; } } else { + // SRGB + frame_info->color_range = ColorRange::kFull; if (profile == 1 || profile == 3) { + frame_info->sub_sampling = YuvSubsampling::k444; uint32_t reserved_bit; RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1)); if (reserved_bit) { - RTC_LOG(LS_WARNING) << "Failed to get QP. Reserved bit set."; + RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set."; return false; } } else { - RTC_LOG(LS_WARNING) << "Failed to get QP. 4:4:4 color not supported in " - "profile 0 or 2."; + RTC_LOG(LS_WARNING) << "Failed to parse header. 4:4:4 color not supported" + " in profile 0 or 2."; return false; } } @@ -93,24 +118,38 @@ bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) { return true; } -bool Vp9ReadFrameSize(rtc::BitBuffer* br) { - // 2 bytes: frame width. - // 2 bytes: frame height. - return br->ConsumeBytes(4); +bool Vp9ReadFrameSize(rtc::BitBuffer* br, FrameInfo* frame_info) { + // 16 bits: frame width - 1. + uint16_t frame_width_minus_one; + RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_width_minus_one)); + // 16 bits: frame height - 1. + uint16_t frame_height_minus_one; + RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_height_minus_one)); + frame_info->frame_width = frame_width_minus_one + 1; + frame_info->frame_height = frame_height_minus_one + 1; + return true; } -bool Vp9ReadRenderSize(rtc::BitBuffer* br) { - uint32_t bit; - RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1)); - if (bit) { - // 2 bytes: render width. - // 2 bytes: render height. - RETURN_FALSE_IF_ERROR(br->ConsumeBytes(4)); +bool Vp9ReadRenderSize(rtc::BitBuffer* br, FrameInfo* frame_info) { + uint32_t render_and_frame_size_different; + RETURN_FALSE_IF_ERROR(br->ReadBits(&render_and_frame_size_different, 1)); + if (render_and_frame_size_different) { + // 16 bits: render width - 1. + uint16_t render_width_minus_one; + RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_width_minus_one)); + // 16 bits: render height - 1. + uint16_t render_height_minus_one; + RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_height_minus_one)); + frame_info->render_width = render_width_minus_one + 1; + frame_info->render_height = render_height_minus_one + 1; + } else { + frame_info->render_width = frame_info->frame_width; + frame_info->render_height = frame_info->frame_height; } return true; } -bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) { +bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br, FrameInfo* frame_info) { uint32_t found_ref = 0; for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) { // Size in refs. @@ -120,11 +159,11 @@ bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) { } if (!found_ref) { - if (!Vp9ReadFrameSize(br)) { + if (!Vp9ReadFrameSize(br, frame_info)) { return false; } } - return Vp9ReadRenderSize(br); + return Vp9ReadRenderSize(br, frame_info); } bool Vp9ReadInterpolationFilter(rtc::BitBuffer* br) { @@ -166,14 +205,14 @@ bool Vp9ReadLoopfilter(rtc::BitBuffer* br) { } } // namespace -bool GetQp(const uint8_t* buf, size_t length, int* qp) { +bool Parse(const uint8_t* buf, size_t length, int* qp, FrameInfo* frame_info) { rtc::BitBuffer br(buf, length); // Frame marker. uint32_t frame_marker; RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_marker, 2)); if (frame_marker != 0x2) { - RTC_LOG(LS_WARNING) << "Failed to get QP. Frame marker should be 2."; + RTC_LOG(LS_WARNING) << "Failed to parse header. Frame marker should be 2."; return false; } @@ -181,6 +220,7 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) { uint8_t profile; if (!Vp9ReadProfile(&br, &profile)) return false; + frame_info->profile = profile; // Show existing frame. uint32_t show_existing_frame; @@ -195,18 +235,21 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) { RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_type, 1)); RETURN_FALSE_IF_ERROR(br.ReadBits(&show_frame, 1)); RETURN_FALSE_IF_ERROR(br.ReadBits(&error_resilient, 1)); + frame_info->show_frame = show_frame; + frame_info->error_resilient = error_resilient; - if (!frame_type) { + if (frame_type == 0) { + // Key-frame. if (!Vp9ReadSyncCode(&br)) return false; - if (!Vp9ReadColorConfig(&br, profile)) + if (!Vp9ReadColorConfig(&br, profile, frame_info)) return false; - if (!Vp9ReadFrameSize(&br)) + if (!Vp9ReadFrameSize(&br, frame_info)) return false; - if (!Vp9ReadRenderSize(&br)) + if (!Vp9ReadRenderSize(&br, frame_info)) return false; - } else { + // Non-keyframe. uint32_t intra_only = 0; if (!show_frame) RETURN_FALSE_IF_ERROR(br.ReadBits(&intra_only, 1)); @@ -218,14 +261,14 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) { return false; if (profile > 0) { - if (!Vp9ReadColorConfig(&br, profile)) + if (!Vp9ReadColorConfig(&br, profile, frame_info)) return false; } // Refresh frame flags. RETURN_FALSE_IF_ERROR(br.ConsumeBits(8)); - if (!Vp9ReadFrameSize(&br)) + if (!Vp9ReadFrameSize(&br, frame_info)) return false; - if (!Vp9ReadRenderSize(&br)) + if (!Vp9ReadRenderSize(&br, frame_info)) return false; } else { // Refresh frame flags. @@ -237,7 +280,7 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) { RETURN_FALSE_IF_ERROR(br.ConsumeBits(4)); } - if (!Vp9ReadFrameSizeFromRefs(&br)) + if (!Vp9ReadFrameSizeFromRefs(&br, frame_info)) return false; // Allow high precision mv. @@ -267,6 +310,20 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) { return true; } -} // namespace vp9 +bool GetQp(const uint8_t* buf, size_t length, int* qp) { + FrameInfo frame_info; + return Parse(buf, length, qp, &frame_info); +} +absl::optional ParseIntraFrameInfo(const uint8_t* buf, + size_t length) { + int qp = 0; + FrameInfo frame_info; + if (Parse(buf, length, &qp, &frame_info) && frame_info.frame_width > 0) { + return frame_info; + } + return absl::nullopt; +} + +} // namespace vp9 } // namespace webrtc diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.h b/modules/video_coding/utility/vp9_uncompressed_header_parser.h index 69e8de87df..a7f04670d2 100644 --- a/modules/video_coding/utility/vp9_uncompressed_header_parser.h +++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.h @@ -13,6 +13,7 @@ #include #include +#include "absl/types/optional.h" namespace webrtc { @@ -22,6 +23,65 @@ namespace vp9 { // Returns true on success, false otherwise. bool GetQp(const uint8_t* buf, size_t length, int* qp); +// Bit depth per channel. Support varies by profile. +enum class BitDept : uint8_t { + k8Bit = 8, + k10Bit = 10, + k12Bit = 12, +}; + +enum class ColorSpace : uint8_t { + CS_UNKNOWN = 0, // Unknown (in this case the color space must be signaled + // outside the VP9 bitstream). + CS_BT_601 = 1, // CS_BT_601 Rec. ITU-R BT.601-7 + CS_BT_709 = 2, // Rec. ITU-R BT.709-6 + CS_SMPTE_170 = 3, // SMPTE-170 + CS_SMPTE_240 = 4, // SMPTE-240 + CS_BT_2020 = 5, // Rec. ITU-R BT.2020-2 + CS_RESERVED = 6, // Reserved + CS_RGB = 7, // sRGB (IEC 61966-2-1) +}; + +enum class ColorRange { + kStudio, // Studio swing: + // For BitDepth equals 8: + // Y is between 16 and 235 inclusive. + // U and V are between 16 and 240 inclusive. + // For BitDepth equals 10: + // Y is between 64 and 940 inclusive. + // U and V are between 64 and 960 inclusive. + // For BitDepth equals 12: + // Y is between 256 and 3760. + // U and V are between 256 and 3840 inclusive. + kFull // Full swing; no restriction on Y, U, V values. +}; + +enum class YuvSubsampling { + k444, + k440, + k422, + k420, +}; + +struct FrameInfo { + int profile = 0; // Profile 0-3 are valid. + bool show_frame = false; + bool error_resilient = false; + BitDept bit_detph = BitDept::k8Bit; + ColorSpace color_space = ColorSpace::CS_UNKNOWN; + ColorRange color_range; + YuvSubsampling sub_sampling; + int frame_width = 0; + int frame_height = 0; + int render_width = 0; + int render_height = 0; +}; + +// Parses frame information for a VP9 key-frame or all-intra frame from a +// bitstream. Returns nullopt on failure or if not a key-frame. +absl::optional ParseIntraFrameInfo(const uint8_t* buf, + size_t length); + } // namespace vp9 } // namespace webrtc diff --git a/modules/video_coding/video_codec_initializer.cc b/modules/video_coding/video_codec_initializer.cc index ea5de23a8f..36401ff244 100644 --- a/modules/video_coding/video_codec_initializer.cc +++ b/modules/video_coding/video_codec_initializer.cc @@ -68,14 +68,17 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( break; } - // TODO(nisse): The plType field should be deleted. Luckily, our - // callers don't need it. - video_codec.plType = 0; + video_codec.legacy_conference_mode = + config.content_type == VideoEncoderConfig::ContentType::kScreen && + config.legacy_conference_mode; + video_codec.numberOfSimulcastStreams = static_cast(streams.size()); video_codec.minBitrate = streams[0].min_bitrate_bps / 1000; bool codec_active = false; - for (const VideoStream& stream : streams) { + // Active configuration might not be fully copied to |streams| for SVC yet. + // Therefore the |config| is checked here. + for (const VideoStream& stream : config.simulcast_layers) { if (stream.active) { codec_active = true; break; @@ -92,7 +95,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( int max_framerate = 0; for (size_t i = 0; i < streams.size(); ++i) { - SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; + SpatialLayer* sim_stream = &video_codec.simulcastStream[i]; RTC_DCHECK_GT(streams[i].width, 0); RTC_DCHECK_GT(streams[i].height, 0); RTC_DCHECK_GT(streams[i].max_framerate, 0); @@ -156,6 +159,9 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( break; } case kVideoCodecVP9: { + // Force the first stream to always be active. + video_codec.simulcastStream[0].active = codec_active; + if (!config.encoder_specific_settings) { *video_codec.VP9() = VideoEncoder::GetDefaultVp9Settings(); } @@ -176,9 +182,18 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( // Layering is set explicitly. spatial_layers = config.spatial_layers; } else { + size_t first_active_layer = 0; + for (size_t spatial_idx = 0; + spatial_idx < config.simulcast_layers.size(); ++spatial_idx) { + if (config.simulcast_layers[spatial_idx].active) { + first_active_layer = spatial_idx; + break; + } + } + spatial_layers = GetSvcConfig( video_codec.width, video_codec.height, video_codec.maxFramerate, - video_codec.VP9()->numberOfSpatialLayers, + first_active_layer, video_codec.VP9()->numberOfSpatialLayers, video_codec.VP9()->numberOfTemporalLayers, video_codec.mode == VideoCodecMode::kScreensharing); @@ -193,11 +208,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( spatial_layers.back().maxBitrate = video_codec.maxBitrate; } - for (size_t spatial_idx = 0; + for (size_t spatial_idx = first_active_layer; spatial_idx < config.simulcast_layers.size() && - spatial_idx < spatial_layers.size(); + spatial_idx < spatial_layers.size() + first_active_layer; ++spatial_idx) { - spatial_layers[spatial_layers.size() - spatial_idx - 1].active = + spatial_layers[spatial_idx - first_active_layer].active = config.simulcast_layers[spatial_idx].active; } } @@ -207,6 +222,14 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( video_codec.spatialLayers[i] = spatial_layers[i]; } + // The top spatial layer dimensions may not be equal to the input + // resolution because of the rounding or explicit configuration. + // This difference must be propagated to the stream configuration. + video_codec.width = spatial_layers.back().width; + video_codec.height = spatial_layers.back().height; + video_codec.simulcastStream[0].width = spatial_layers.back().width; + video_codec.simulcastStream[0].height = spatial_layers.back().height; + // Update layering settings. video_codec.VP9()->numberOfSpatialLayers = static_cast(spatial_layers.size()); diff --git a/modules/video_coding/video_codec_initializer_unittest.cc b/modules/video_coding/video_codec_initializer_unittest.cc index ca1da2cc8c..1ea145e14f 100644 --- a/modules/video_coding/video_codec_initializer_unittest.cc +++ b/modules/video_coding/video_codec_initializer_unittest.cc @@ -42,7 +42,8 @@ static const uint32_t kDefaultTargetBitrateBps = 2000000; static const uint32_t kDefaultMaxBitrateBps = 2000000; static const uint32_t kDefaultMinTransmitBitrateBps = 400000; static const int kDefaultMaxQp = 48; -static const uint32_t kScreenshareTl0BitrateBps = 200000; +static const uint32_t kScreenshareTl0BitrateBps = 120000; +static const uint32_t kScreenshareConferenceTl0BitrateBps = 200000; static const uint32_t kScreenshareCodecTargetBitrateBps = 200000; static const uint32_t kScreenshareDefaultFramerate = 5; // Bitrates for the temporal layers of the higher screenshare simulcast stream. @@ -126,7 +127,7 @@ class VideoCodecInitializerTest : public ::testing::Test { VideoStream DefaultScreenshareStream() { VideoStream stream = DefaultStream(); stream.min_bitrate_bps = 30000; - stream.target_bitrate_bps = kScreenshareTl0BitrateBps; + stream.target_bitrate_bps = kScreenshareCodecTargetBitrateBps; stream.max_bitrate_bps = 1000000; stream.max_framerate = kScreenshareDefaultFramerate; stream.num_temporal_layers = 2; @@ -174,6 +175,23 @@ TEST_F(VideoCodecInitializerTest, SingleStreamVp8ScreenshareInactive) { EXPECT_EQ(0U, bitrate_allocation.get_sum_bps()); } +TEST_F(VideoCodecInitializerTest, TemporalLayeredVp8ScreenshareConference) { + SetUpFor(VideoCodecType::kVideoCodecVP8, 1, 2, true); + streams_.push_back(DefaultScreenshareStream()); + EXPECT_TRUE(InitializeCodec()); + bitrate_allocator_->SetLegacyConferenceMode(true); + + EXPECT_EQ(1u, codec_out_.numberOfSimulcastStreams); + EXPECT_EQ(2u, codec_out_.VP8()->numberOfTemporalLayers); + VideoBitrateAllocation bitrate_allocation = + bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( + kScreenshareCodecTargetBitrateBps, kScreenshareDefaultFramerate)); + EXPECT_EQ(kScreenshareCodecTargetBitrateBps, + bitrate_allocation.get_sum_bps()); + EXPECT_EQ(kScreenshareConferenceTl0BitrateBps, + bitrate_allocation.GetBitrate(0, 0)); +} + TEST_F(VideoCodecInitializerTest, TemporalLayeredVp8Screenshare) { SetUpFor(VideoCodecType::kVideoCodecVP8, 1, 2, true); streams_.push_back(DefaultScreenshareStream()); @@ -346,13 +364,17 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[1].active = true; config_.simulcast_layers[2].active = true; EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_TRUE(codec_out_.spatialLayers[1].active); EXPECT_TRUE(codec_out_.spatialLayers[2].active); // Deactivate top layer. - config_.simulcast_layers[0].active = false; + config_.simulcast_layers[0].active = true; + config_.simulcast_layers[1].active = true; + config_.simulcast_layers[2].active = false; EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_TRUE(codec_out_.spatialLayers[1].active); EXPECT_FALSE(codec_out_.spatialLayers[2].active); @@ -360,10 +382,48 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { // Deactivate middle layer. config_.simulcast_layers[0].active = true; config_.simulcast_layers[1].active = false; + config_.simulcast_layers[2].active = true; EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_FALSE(codec_out_.spatialLayers[1].active); EXPECT_TRUE(codec_out_.spatialLayers[2].active); + + // Deactivate first layer. + config_.simulcast_layers[0].active = false; + config_.simulcast_layers[1].active = true; + config_.simulcast_layers[2].active = true; + EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 2); + EXPECT_TRUE(codec_out_.spatialLayers[0].active); + EXPECT_TRUE(codec_out_.spatialLayers[1].active); + + // HD singlecast. + config_.simulcast_layers[0].active = false; + config_.simulcast_layers[1].active = false; + config_.simulcast_layers[2].active = true; + EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 1); + EXPECT_TRUE(codec_out_.spatialLayers[0].active); + + // VGA singlecast. + config_.simulcast_layers[0].active = false; + config_.simulcast_layers[1].active = true; + config_.simulcast_layers[2].active = false; + EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 2); + EXPECT_TRUE(codec_out_.spatialLayers[0].active); + EXPECT_FALSE(codec_out_.spatialLayers[1].active); + + // QVGA singlecast. + config_.simulcast_layers[0].active = true; + config_.simulcast_layers[1].active = false; + config_.simulcast_layers[2].active = false; + EXPECT_TRUE(InitializeCodec()); + EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); + EXPECT_TRUE(codec_out_.spatialLayers[0].active); + EXPECT_FALSE(codec_out_.spatialLayers[1].active); + EXPECT_FALSE(codec_out_.spatialLayers[2].active); } } // namespace webrtc diff --git a/modules/video_coding/video_coding_impl.cc b/modules/video_coding/video_coding_impl.cc index 1d12ac93f0..049695d753 100644 --- a/modules/video_coding/video_coding_impl.cc +++ b/modules/video_coding/video_coding_impl.cc @@ -16,7 +16,6 @@ #include "api/video/encoded_image.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/timing.h" -#include "rtc_base/critical_section.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" @@ -57,11 +56,11 @@ class VideoCodingModuleImpl : public VideoCodingModule { void Process() override { receiver_.Process(); } - int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec, - int32_t numberOfCores, - bool requireKeyFrame) override { - return receiver_.RegisterReceiveCodec(receiveCodec, numberOfCores, - requireKeyFrame); + int32_t RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receiveCodec, + int32_t numberOfCores) override { + return receiver_.RegisterReceiveCodec(payload_type, receiveCodec, + numberOfCores); } void RegisterExternalDecoder(VideoDecoder* externalDecoder, diff --git a/modules/video_coding/video_coding_impl.h b/modules/video_coding/video_coding_impl.h index 89353640d6..aee6337e50 100644 --- a/modules/video_coding/video_coding_impl.h +++ b/modules/video_coding/video_coding_impl.h @@ -24,6 +24,7 @@ #include "modules/video_coding/receiver.h" #include "modules/video_coding/timing.h" #include "rtc_base/one_time_event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" @@ -59,9 +60,9 @@ class VideoReceiver : public Module { VideoReceiver(Clock* clock, VCMTiming* timing); ~VideoReceiver() override; - int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec, - int32_t numberOfCores, - bool requireKeyFrame); + int32_t RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receiveCodec, + int32_t numberOfCores); void RegisterExternalDecoder(VideoDecoder* externalDecoder, uint8_t payloadType); @@ -100,7 +101,7 @@ class VideoReceiver : public Module { rtc::ThreadChecker decoder_thread_checker_; rtc::ThreadChecker module_thread_checker_; Clock* const clock_; - rtc::CriticalSection process_crit_; + Mutex process_mutex_; VCMTiming* _timing; VCMReceiver _receiver; VCMDecodedFrameCallback _decodedFrameCallback; @@ -111,8 +112,8 @@ class VideoReceiver : public Module { VCMPacketRequestCallback* _packetRequestCallback; // Used on both the module and decoder thread. - bool _scheduleKeyRequest RTC_GUARDED_BY(process_crit_); - bool drop_frames_until_keyframe_ RTC_GUARDED_BY(process_crit_); + bool _scheduleKeyRequest RTC_GUARDED_BY(process_mutex_); + bool drop_frames_until_keyframe_ RTC_GUARDED_BY(process_mutex_); // Modified on the construction thread while not attached to the process // thread. Once attached to the process thread, its value is only read @@ -124,7 +125,6 @@ class VideoReceiver : public Module { // over to the decoder thread. VCMDecoderDataBase _codecDataBase; - VCMProcessTimer _receiveStatsTimer RTC_GUARDED_BY(module_thread_checker_); VCMProcessTimer _retransmissionTimer RTC_GUARDED_BY(module_thread_checker_); VCMProcessTimer _keyRequestTimer RTC_GUARDED_BY(module_thread_checker_); ThreadUnsafeOneTimeEvent first_frame_received_ diff --git a/modules/video_coding/video_receiver.cc b/modules/video_coding/video_receiver.cc index 44d5526a0a..c2c8f8aa13 100644 --- a/modules/video_coding/video_receiver.cc +++ b/modules/video_coding/video_receiver.cc @@ -16,7 +16,6 @@ #include "api/rtp_headers.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" -#include "modules/include/module_common_types.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/encoded_frame.h" @@ -31,7 +30,6 @@ #include "modules/video_coding/timing.h" #include "modules/video_coding/video_coding_impl.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/one_time_event.h" @@ -53,7 +51,6 @@ VideoReceiver::VideoReceiver(Clock* clock, VCMTiming* timing) drop_frames_until_keyframe_(false), max_nack_list_size_(0), _codecDataBase(), - _receiveStatsTimer(1000, clock_), _retransmissionTimer(10, clock_), _keyRequestTimer(500, clock_) { decoder_thread_checker_.Detach(); @@ -66,20 +63,13 @@ VideoReceiver::~VideoReceiver() { void VideoReceiver::Process() { RTC_DCHECK_RUN_ON(&module_thread_checker_); - // Receive-side statistics - - // TODO(philipel): Remove this if block when we know what to do with - // ReceiveStatisticsProxy::QualitySample. - if (_receiveStatsTimer.TimeUntilProcess() == 0) { - _receiveStatsTimer.Processed(); - } // Key frame requests if (_keyRequestTimer.TimeUntilProcess() == 0) { _keyRequestTimer.Processed(); bool request_key_frame = _frameTypeCallback != nullptr; if (request_key_frame) { - rtc::CritScope cs(&process_crit_); + MutexLock lock(&process_mutex_); request_key_frame = _scheduleKeyRequest; } if (request_key_frame) @@ -102,7 +92,7 @@ void VideoReceiver::Process() { ret = RequestKeyFrame(); } if (ret == VCM_OK && !nackList.empty()) { - rtc::CritScope cs(&process_crit_); + MutexLock lock(&process_mutex_); if (_packetRequestCallback != nullptr) { _packetRequestCallback->ResendPackets(&nackList[0], nackList.size()); } @@ -124,11 +114,7 @@ void VideoReceiver::ProcessThreadAttached(ProcessThread* process_thread) { int64_t VideoReceiver::TimeUntilNextProcess() { RTC_DCHECK_RUN_ON(&module_thread_checker_); - int64_t timeUntilNextProcess = _receiveStatsTimer.TimeUntilProcess(); - // We need a Process call more often if we are relying on - // retransmissions - timeUntilNextProcess = - VCM_MIN(timeUntilNextProcess, _retransmissionTimer.TimeUntilProcess()); + int64_t timeUntilNextProcess = _retransmissionTimer.TimeUntilProcess(); timeUntilNextProcess = VCM_MIN(timeUntilNextProcess, _keyRequestTimer.TimeUntilProcess()); @@ -195,7 +181,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { bool drop_frame = false; { - rtc::CritScope cs(&process_crit_); + MutexLock lock(&process_mutex_); if (drop_frames_until_keyframe_) { // Still getting delta frames, schedule another keyframe request as if // decode failed. @@ -222,9 +208,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { clock_->TimeInMilliseconds()); if (first_frame_received_()) { - RTC_LOG(LS_INFO) << "Received first " - << (frame->Complete() ? "complete" : "incomplete") - << " decodable video frame"; + RTC_LOG(LS_INFO) << "Received first complete decodable video frame"; } const int32_t ret = Decode(*frame); @@ -241,7 +225,7 @@ int32_t VideoReceiver::RequestKeyFrame() { if (ret < 0) { return ret; } - rtc::CritScope cs(&process_crit_); + MutexLock lock(&process_mutex_); _scheduleKeyRequest = false; } else { return VCM_MISSING_CALLBACK; @@ -263,15 +247,15 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) { } // Register possible receive codecs, can be called multiple times -int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec, - int32_t numberOfCores, - bool requireKeyFrame) { +int32_t VideoReceiver::RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receiveCodec, + int32_t numberOfCores) { RTC_DCHECK_RUN_ON(&construction_thread_checker_); if (receiveCodec == nullptr) { return VCM_PARAMETER_ERROR; } - if (!_codecDataBase.RegisterReceiveCodec(receiveCodec, numberOfCores, - requireKeyFrame)) { + if (!_codecDataBase.RegisterReceiveCodec(payload_type, receiveCodec, + numberOfCores)) { return -1; } return 0; @@ -303,7 +287,7 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload, // request scheduling to throttle the requests. if (ret == VCM_FLUSH_INDICATOR) { { - rtc::CritScope cs(&process_crit_); + MutexLock lock(&process_mutex_); drop_frames_until_keyframe_ = true; } RequestKeyFrame(); diff --git a/modules/video_coding/video_receiver2.cc b/modules/video_coding/video_receiver2.cc index 8eaefbb8da..6b3cb63679 100644 --- a/modules/video_coding/video_receiver2.cc +++ b/modules/video_coding/video_receiver2.cc @@ -95,16 +95,16 @@ int32_t VideoReceiver2::Decode(const VCMEncodedFrame* frame) { } // Register possible receive codecs, can be called multiple times -int32_t VideoReceiver2::RegisterReceiveCodec(const VideoCodec* receiveCodec, - int32_t numberOfCores, - bool requireKeyFrame) { +int32_t VideoReceiver2::RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receiveCodec, + int32_t numberOfCores) { RTC_DCHECK_RUN_ON(&construction_thread_checker_); RTC_DCHECK(!IsDecoderThreadRunning()); if (receiveCodec == nullptr) { return VCM_PARAMETER_ERROR; } - if (!codecDataBase_.RegisterReceiveCodec(receiveCodec, numberOfCores, - requireKeyFrame)) { + if (!codecDataBase_.RegisterReceiveCodec(payload_type, receiveCodec, + numberOfCores)) { return -1; } return 0; diff --git a/modules/video_coding/video_receiver2.h b/modules/video_coding/video_receiver2.h index 202072a560..c7b7b80b6d 100644 --- a/modules/video_coding/video_receiver2.h +++ b/modules/video_coding/video_receiver2.h @@ -30,9 +30,9 @@ class VideoReceiver2 { VideoReceiver2(Clock* clock, VCMTiming* timing); ~VideoReceiver2(); - int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec, - int32_t numberOfCores, - bool requireKeyFrame); + int32_t RegisterReceiveCodec(uint8_t payload_type, + const VideoCodec* receiveCodec, + int32_t numberOfCores); void RegisterExternalDecoder(VideoDecoder* externalDecoder, uint8_t payloadType); diff --git a/modules/video_coding/video_receiver_unittest.cc b/modules/video_coding/video_receiver_unittest.cc index 7526691587..fcd4f449ca 100644 --- a/modules/video_coding/video_receiver_unittest.cc +++ b/modules/video_coding/video_receiver_unittest.cc @@ -9,7 +9,6 @@ */ #include "api/test/mock_video_decoder.h" -#include "modules/video_coding/include/mock/mock_vcm_callbacks.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/timing.h" #include "modules/video_coding/video_coding_impl.h" @@ -25,6 +24,27 @@ namespace webrtc { namespace vcm { namespace { +class MockPacketRequestCallback : public VCMPacketRequestCallback { + public: + MOCK_METHOD(int32_t, + ResendPackets, + (const uint16_t* sequenceNumbers, uint16_t length), + (override)); +}; + +class MockVCMReceiveCallback : public VCMReceiveCallback { + public: + MockVCMReceiveCallback() {} + virtual ~MockVCMReceiveCallback() {} + + MOCK_METHOD(int32_t, + FrameToRender, + (VideoFrame&, absl::optional, int32_t, VideoContentType), + (override)); + MOCK_METHOD(void, OnIncomingPayloadType, (int), (override)); + MOCK_METHOD(void, OnDecoderImplementationName, (const char*), (override)); +}; + class TestVideoReceiver : public ::testing::Test { protected: static const int kUnusedPayloadType = 10; @@ -37,8 +57,8 @@ class TestVideoReceiver : public ::testing::Test { // Register decoder. receiver_.RegisterExternalDecoder(&decoder_, kUnusedPayloadType); webrtc::test::CodecSettings(kVideoCodecVP8, &settings_); - settings_.plType = kUnusedPayloadType; - EXPECT_EQ(0, receiver_.RegisterReceiveCodec(&settings_, 1, true)); + EXPECT_EQ( + 0, receiver_.RegisterReceiveCodec(kUnusedPayloadType, &settings_, 1)); // Set protection mode. const size_t kMaxNackListSize = 250; diff --git a/modules/video_processing/BUILD.gn b/modules/video_processing/BUILD.gn index 4354454111..871012ae18 100644 --- a/modules/video_processing/BUILD.gn +++ b/modules/video_processing/BUILD.gn @@ -29,7 +29,6 @@ rtc_library("video_processing") { "..:module_api", "../../api:scoped_refptr", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../common_audio", "../../common_video", @@ -37,7 +36,7 @@ rtc_library("video_processing") { "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:arch", - "../../system_wrappers:cpu_features_api", + "../../system_wrappers", "//third_party/libyuv", ] if (build_video_processing_sse2) { @@ -100,7 +99,6 @@ if (rtc_include_tests) { ":video_processing", "../../api:scoped_refptr", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../common_video", "../../test:fileutils", diff --git a/modules/video_processing/OWNERS b/modules/video_processing/OWNERS index fb23261b15..07c2987707 100644 --- a/modules/video_processing/OWNERS +++ b/modules/video_processing/OWNERS @@ -1,7 +1,2 @@ stefan@webrtc.org marpan@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/modules/video_processing/util/denoiser_filter.cc b/modules/video_processing/util/denoiser_filter.cc index d6b5094a5b..0e1570114a 100644 --- a/modules/video_processing/util/denoiser_filter.cc +++ b/modules/video_processing/util/denoiser_filter.cc @@ -41,7 +41,7 @@ std::unique_ptr DenoiserFilter::Create( filter.reset(new DenoiserFilterSSE2()); #else // x86 CPU detection required. - if (WebRtc_GetCPUInfo(kSSE2)) { + if (GetCPUInfo(kSSE2)) { filter.reset(new DenoiserFilterSSE2()); } else { filter.reset(new DenoiserFilterC()); diff --git a/modules/video_processing/video_denoiser.cc b/modules/video_processing/video_denoiser.cc index 40568a5ec6..3a18125146 100644 --- a/modules/video_processing/video_denoiser.cc +++ b/modules/video_processing/video_denoiser.cc @@ -235,7 +235,7 @@ rtc::scoped_refptr VideoDenoiser::DenoiseFrame( const uint8_t* y_src = frame->DataY(); int stride_y_src = frame->StrideY(); rtc::scoped_refptr dst = - buffer_pool_.CreateBuffer(width_, height_); + buffer_pool_.CreateI420Buffer(width_, height_); uint8_t* y_dst = dst->MutableDataY(); int stride_y_dst = dst->StrideY(); diff --git a/modules/video_processing/video_denoiser.h b/modules/video_processing/video_denoiser.h index 37d624bb25..eb98c5bc53 100644 --- a/modules/video_processing/video_denoiser.h +++ b/modules/video_processing/video_denoiser.h @@ -15,7 +15,7 @@ #include "api/scoped_refptr.h" #include "api/video/video_frame_buffer.h" -#include "common_video/include/i420_buffer_pool.h" +#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_processing/util/denoiser_filter.h" #include "modules/video_processing/util/noise_estimation.h" #include "modules/video_processing/util/skin_detection.h" @@ -77,7 +77,7 @@ class VideoDenoiser { std::unique_ptr y_density_; // Save the return values by MbDenoise for each block. std::unique_ptr mb_filter_decision_; - I420BufferPool buffer_pool_; + VideoFrameBufferPool buffer_pool_; rtc::scoped_refptr prev_buffer_; }; diff --git a/native-api.md b/native-api.md index 18d074ea9b..2c193274ad 100644 --- a/native-api.md +++ b/native-api.md @@ -106,3 +106,11 @@ argument `rtc_exclude_metrics_default` to true and GN will define the macro for you. [metrics_h]: https://webrtc.googlesource.com/src/+/master/system_wrappers/include/metrics.h + +## `WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR` +The transient suppressor functionality in the audio processing module is not +always used. If you wish to exclude it from the build in order to preserve +binary size, then define the preprocessor macro +`WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR`. If you use GN, you can just set the GN +argument `rtc_exclude_transient_suppressor` to true and GN will define the macro +for you. diff --git a/p2p/BUILD.gn b/p2p/BUILD.gn index ae49deb264..76c8273fba 100644 --- a/p2p/BUILD.gn +++ b/p2p/BUILD.gn @@ -99,19 +99,24 @@ rtc_library("rtc_p2p") { "../rtc_base:checks", "../rtc_base:rtc_numerics", "../rtc_base/experiments:field_trial_parser", - "//third_party/abseil-cpp/absl/memory", + "../rtc_base/synchronization:sequence_checker", # Needed by pseudo_tcp, which should move to a separate target. "../rtc_base:safe_minmax", "../rtc_base:weak_ptr", "../rtc_base/memory:fifo_buffer", "../rtc_base/network:sent_packet", + "../rtc_base/synchronization:mutex", "../rtc_base/system:rtc_export", + "../rtc_base/task_utils:to_queued_task", "../rtc_base/third_party/base64", "../rtc_base/third_party/sigslot", "../system_wrappers:field_trial", "../system_wrappers:metrics", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -127,6 +132,8 @@ if (rtc_include_tests) { "../api:libjingle_peerconnection_api", "../rtc_base", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", ] @@ -169,6 +176,8 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_tests_utils", "../rtc_base/third_party/sigslot", "../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", ] @@ -221,8 +230,11 @@ if (rtc_include_tests) { "../test:field_trial", "../test:test_support", "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -243,8 +255,8 @@ rtc_library("p2p_server_utils") { "../rtc_base:checks", "../rtc_base:rtc_base_tests_utils", "../rtc_base/third_party/sigslot", - "//third_party/abseil-cpp/absl/algorithm:container", ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("libstunprober") { diff --git a/p2p/OWNERS b/p2p/OWNERS index 673cbe3aee..639ef44e68 100644 --- a/p2p/OWNERS +++ b/p2p/OWNERS @@ -1,15 +1,9 @@ -honghaiz@webrtc.org hta@webrtc.org juberti@webrtc.org mflodman@webrtc.org perkj@webrtc.org qingsi@webrtc.org -emadomara@webrtc.org -steveanton@webrtc.org sergeyu@chromium.org tommi@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* +deadbeef@webrtc.org +jonaso@webrtc.org diff --git a/p2p/base/async_stun_tcp_socket_unittest.cc b/p2p/base/async_stun_tcp_socket_unittest.cc index 4f693a5cbe..d1dfac10d2 100644 --- a/p2p/base/async_stun_tcp_socket_unittest.cc +++ b/p2p/base/async_stun_tcp_socket_unittest.cc @@ -106,10 +106,10 @@ class AsyncStunTCPSocketTest : public ::testing::Test, bool Send(const void* data, size_t len) { rtc::PacketOptions options; - size_t ret = + int ret = send_socket_->Send(reinterpret_cast(data), len, options); vss_->ProcessMessagesUntilIdle(); - return (ret == len); + return (ret == static_cast(len)); } bool CheckData(const void* data, int len) { @@ -224,10 +224,6 @@ TEST_F(AsyncStunTCPSocketTest, TestTooSmallMessageBuffer) { // Verifying a legal large turn message. TEST_F(AsyncStunTCPSocketTest, TestMaximumSizeTurnPacket) { - // We have problem in getting the SignalWriteEvent from the virtual socket - // server. So increasing the send buffer to 64k. - // TODO(mallinath) - Remove this setting after we fix vss issue. - vss_->set_send_buffer_capacity(64 * 1024); unsigned char packet[65539]; packet[0] = 0x40; packet[1] = 0x00; @@ -238,10 +234,6 @@ TEST_F(AsyncStunTCPSocketTest, TestMaximumSizeTurnPacket) { // Verifying a legal large stun message. TEST_F(AsyncStunTCPSocketTest, TestMaximumSizeStunPacket) { - // We have problem in getting the SignalWriteEvent from the virtual socket - // server. So increasing the send buffer to 64k. - // TODO(mallinath) - Remove this setting after we fix vss issue. - vss_->set_send_buffer_capacity(64 * 1024); unsigned char packet[65552]; packet[0] = 0x00; packet[1] = 0x01; @@ -250,8 +242,9 @@ TEST_F(AsyncStunTCPSocketTest, TestMaximumSizeStunPacket) { EXPECT_TRUE(Send(packet, sizeof(packet))); } -// Investigate why WriteEvent is not signaled from VSS. -TEST_F(AsyncStunTCPSocketTest, DISABLED_TestWithSmallSendBuffer) { +// Test that a turn message is sent completely even if it exceeds the socket +// send buffer capacity. +TEST_F(AsyncStunTCPSocketTest, TestWithSmallSendBuffer) { vss_->set_send_buffer_capacity(1); Send(kTurnChannelDataMessageWithOddLength, sizeof(kTurnChannelDataMessageWithOddLength)); diff --git a/p2p/base/basic_async_resolver_factory_unittest.cc b/p2p/base/basic_async_resolver_factory_unittest.cc index 0c21c682fb..8242146bae 100644 --- a/p2p/base/basic_async_resolver_factory_unittest.cc +++ b/p2p/base/basic_async_resolver_factory_unittest.cc @@ -30,6 +30,7 @@ class BasicAsyncResolverFactoryTest : public ::testing::Test, rtc::SocketAddress address("", 0); resolver->Start(address); ASSERT_TRUE_WAIT(address_resolved_, 10000 /*ms*/); + resolver->Destroy(false); } void SetAddressResolved(rtc::AsyncResolverInterface* resolver) { diff --git a/p2p/base/basic_ice_controller.cc b/p2p/base/basic_ice_controller.cc index 09bc4f1f5f..aa20025b2c 100644 --- a/p2p/base/basic_ice_controller.cc +++ b/p2p/base/basic_ice_controller.cc @@ -92,7 +92,7 @@ bool BasicIceController::HasPingableConnection() const { }); } -std::pair BasicIceController::SelectConnectionToPing( +IceControllerInterface::PingResult BasicIceController::SelectConnectionToPing( int64_t last_ping_sent_ms) { // When the selected connection is not receiving or not writable, or any // active connection has not been pinged enough times, use the weak ping @@ -110,8 +110,8 @@ std::pair BasicIceController::SelectConnectionToPing( if (rtc::TimeMillis() >= last_ping_sent_ms + ping_interval) { conn = FindNextPingableConnection(); } - int delay = std::min(ping_interval, check_receiving_interval()); - return std::make_pair(const_cast(conn), delay); + PingResult res(conn, std::min(ping_interval, check_receiving_interval())); + return res; } void BasicIceController::MarkConnectionPinged(const Connection* conn) { diff --git a/p2p/base/basic_ice_controller.h b/p2p/base/basic_ice_controller.h index ae1339fc03..2e462720f3 100644 --- a/p2p/base/basic_ice_controller.h +++ b/p2p/base/basic_ice_controller.h @@ -40,8 +40,8 @@ class BasicIceController : public IceControllerInterface { bool HasPingableConnection() const override; - std::pair SelectConnectionToPing( - int64_t last_ping_sent_ms) override; + PingResult SelectConnectionToPing(int64_t last_ping_sent_ms) override; + bool GetUseCandidateAttr(const Connection* conn, NominationMode mode, IceMode remote_ice_mode) const override; diff --git a/p2p/base/basic_packet_socket_factory.cc b/p2p/base/basic_packet_socket_factory.cc index 1476939a3d..8be9079338 100644 --- a/p2p/base/basic_packet_socket_factory.cc +++ b/p2p/base/basic_packet_socket_factory.cc @@ -157,7 +157,7 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( socket = ssl_adapter; - if (ssl_adapter->StartSSL(remote_address.hostname().c_str(), false) != 0) { + if (ssl_adapter->StartSSL(remote_address.hostname().c_str()) != 0) { delete ssl_adapter; return NULL; } diff --git a/p2p/base/connection.cc b/p2p/base/connection.cc index e50390901f..fe6042102c 100644 --- a/p2p/base/connection.cc +++ b/p2p/base/connection.cc @@ -106,18 +106,24 @@ webrtc::IceCandidatePairAddressFamily GetAddressFamilyByInt( } webrtc::IceCandidateNetworkType ConvertNetworkType(rtc::AdapterType type) { - if (type == rtc::ADAPTER_TYPE_ETHERNET) { - return webrtc::IceCandidateNetworkType::kEthernet; - } else if (type == rtc::ADAPTER_TYPE_LOOPBACK) { - return webrtc::IceCandidateNetworkType::kLoopback; - } else if (type == rtc::ADAPTER_TYPE_WIFI) { - return webrtc::IceCandidateNetworkType::kWifi; - } else if (type == rtc::ADAPTER_TYPE_VPN) { - return webrtc::IceCandidateNetworkType::kVpn; - } else if (type == rtc::ADAPTER_TYPE_CELLULAR) { - return webrtc::IceCandidateNetworkType::kCellular; + switch (type) { + case rtc::ADAPTER_TYPE_ETHERNET: + return webrtc::IceCandidateNetworkType::kEthernet; + case rtc::ADAPTER_TYPE_LOOPBACK: + return webrtc::IceCandidateNetworkType::kLoopback; + case rtc::ADAPTER_TYPE_WIFI: + return webrtc::IceCandidateNetworkType::kWifi; + case rtc::ADAPTER_TYPE_VPN: + return webrtc::IceCandidateNetworkType::kVpn; + case rtc::ADAPTER_TYPE_CELLULAR: + case rtc::ADAPTER_TYPE_CELLULAR_2G: + case rtc::ADAPTER_TYPE_CELLULAR_3G: + case rtc::ADAPTER_TYPE_CELLULAR_4G: + case rtc::ADAPTER_TYPE_CELLULAR_5G: + return webrtc::IceCandidateNetworkType::kCellular; + default: + return webrtc::IceCandidateNetworkType::kUnknown; } - return webrtc::IceCandidateNetworkType::kUnknown; } // When we don't have any RTT data, we have to pick something reasonable. We @@ -181,13 +187,13 @@ void ConnectionRequest::Prepare(StunMessage* request) { uint32_t network_info = connection_->port()->Network()->id(); network_info = (network_info << 16) | connection_->port()->network_cost(); request->AddAttribute(std::make_unique( - STUN_ATTR_NETWORK_INFO, network_info)); + STUN_ATTR_GOOG_NETWORK_INFO, network_info)); if (webrtc::field_trial::IsEnabled( "WebRTC-PiggybackIceCheckAcknowledgement") && connection_->last_ping_id_received()) { request->AddAttribute(std::make_unique( - STUN_ATTR_LAST_ICE_CHECK_RECEIVED, + STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED, connection_->last_ping_id_received().value())); } @@ -455,6 +461,7 @@ void Connection::OnReadPacket(const char* data, last_data_received_ = rtc::TimeMillis(); UpdateReceiving(last_data_received_); recv_rate_tracker_.AddSamples(size); + stats_.packets_received++; SignalReadPacket(this, data, size, packet_time_us); // If timed out sending writability checks, start up again @@ -609,7 +616,7 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { // Note: If packets are re-ordered, we may get incorrect network cost // temporarily, but it should get the correct value shortly after that. const StunUInt32Attribute* network_attr = - msg->GetUInt32(STUN_ATTR_NETWORK_INFO); + msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); if (network_attr) { uint32_t network_info = network_attr->value(); uint16_t network_cost = static_cast(network_info); @@ -861,7 +868,7 @@ void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { RTC_DCHECK(msg->type() == STUN_BINDING_REQUEST || msg->type() == GOOG_PING_REQUEST); const StunByteStringAttribute* last_ice_check_received_attr = - msg->GetByteString(STUN_ATTR_LAST_ICE_CHECK_RECEIVED); + msg->GetByteString(STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED); if (last_ice_check_received_attr) { const std::string request_id = last_ice_check_received_attr->GetString(); auto iter = absl::c_find_if( @@ -912,12 +919,31 @@ void Connection::ReceivedPingResponse( bool Connection::dead(int64_t now) const { if (last_received() > 0) { - // If it has ever received anything, we keep it alive until it hasn't - // received anything for DEAD_CONNECTION_RECEIVE_TIMEOUT. This covers the - // normal case of a successfully used connection that stops working. This - // also allows a remote peer to continue pinging over a locally inactive - // (pruned) connection. - return (now > (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT)); + // If it has ever received anything, we keep it alive + // - if it has recevied last DEAD_CONNECTION_RECEIVE_TIMEOUT (30s) + // - if it has a ping outstanding shorter than + // DEAD_CONNECTION_RECEIVE_TIMEOUT (30s) + // - else if IDLE let it live field_trials_->dead_connection_timeout_ms + // + // This covers the normal case of a successfully used connection that stops + // working. This also allows a remote peer to continue pinging over a + // locally inactive (pruned) connection. This also allows the local agent to + // ping with longer interval than 30s as long as it shorter than + // |dead_connection_timeout_ms|. + if (now <= (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT)) { + // Not dead since we have received the last 30s. + return false; + } + if (!pings_since_last_response_.empty()) { + // Outstanding pings: let it live until the ping is unreplied for + // DEAD_CONNECTION_RECEIVE_TIMEOUT. + return now > (pings_since_last_response_[0].sent_time + + DEAD_CONNECTION_RECEIVE_TIMEOUT); + } + + // No outstanding pings: let it live until + // field_trials_->dead_connection_timeout_ms has passed. + return now > (last_received() + field_trials_->dead_connection_timeout_ms); } if (active()) { @@ -1265,24 +1291,16 @@ void Connection::MaybeUpdateLocalCandidate(ConnectionRequest* request, const uint32_t priority = priority_attr->value(); std::string id = rtc::CreateRandomString(8); - Candidate new_local_candidate; + // Create a peer-reflexive candidate based on the local candidate. + Candidate new_local_candidate(local_candidate()); new_local_candidate.set_id(id); - new_local_candidate.set_component(local_candidate().component()); new_local_candidate.set_type(PRFLX_PORT_TYPE); - new_local_candidate.set_protocol(local_candidate().protocol()); new_local_candidate.set_address(addr->GetAddress()); new_local_candidate.set_priority(priority); - new_local_candidate.set_username(local_candidate().username()); - new_local_candidate.set_password(local_candidate().password()); - new_local_candidate.set_network_name(local_candidate().network_name()); - new_local_candidate.set_network_type(local_candidate().network_type()); new_local_candidate.set_related_address(local_candidate().address()); - new_local_candidate.set_generation(local_candidate().generation()); new_local_candidate.set_foundation(Port::ComputeFoundation( PRFLX_PORT_TYPE, local_candidate().protocol(), local_candidate().relay_protocol(), local_candidate().address())); - new_local_candidate.set_network_id(local_candidate().network_id()); - new_local_candidate.set_network_cost(local_candidate().network_cost()); // Change the local candidate of this Connection to the new prflx candidate. RTC_LOG(LS_INFO) << ToString() << ": Updating local candidate type to prflx."; @@ -1334,6 +1352,15 @@ bool Connection::ShouldSendGoogPing(const StunMessage* message) { return false; } +void Connection::ForgetLearnedState() { + RTC_LOG(LS_INFO) << ToString() << ": Connection forget learned state"; + requests_.Clear(); + receiving_ = false; + write_state_ = STATE_WRITE_INIT; + rtt_estimate_.Reset(); + pings_since_last_response_.clear(); +} + ProxyConnection::ProxyConnection(Port* port, size_t index, const Candidate& remote_candidate) diff --git a/p2p/base/connection.h b/p2p/base/connection.h index 9f3ad21125..88e930c216 100644 --- a/p2p/base/connection.h +++ b/p2p/base/connection.h @@ -65,13 +65,13 @@ class ConnectionRequest : public StunRequest { int resend_delay() override; private: - Connection* connection_; + Connection* const connection_; }; // Represents a communication link between a port on the local client and a // port on the remote client. class Connection : public CandidatePairInterface, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: struct SentPing { @@ -86,7 +86,7 @@ class Connection : public CandidatePairInterface, ~Connection() override; // A unique ID assigned when the connection is created. - uint32_t id() { return id_; } + uint32_t id() const { return id_; } // Implementation of virtual methods in CandidatePairInterface. // Returns the description of the local port @@ -303,6 +303,20 @@ class Connection : public CandidatePairInterface, return rtt_estimate_; } + // Reset the connection to a state of a newly connected. + // - STATE_WRITE_INIT + // - receving = false + // - throw away all pending request + // - reset RttEstimate + // + // Keep the following unchanged: + // - connected + // - remote_candidate + // - statistics + // + // Does not trigger SignalStateChange + void ForgetLearnedState(); + void SendStunBindingResponse(const StunMessage* request); void SendGoogPingResponse(const StunMessage* request); void SendResponseMessage(const StunMessage& response); diff --git a/p2p/base/connection_info.cc b/p2p/base/connection_info.cc index a4f8036769..ebea2ab5b0 100644 --- a/p2p/base/connection_info.cc +++ b/p2p/base/connection_info.cc @@ -28,6 +28,7 @@ ConnectionInfo::ConnectionInfo() sent_ping_responses(0), recv_total_bytes(0), recv_bytes_second(0), + packets_received(0), recv_ping_requests(0), recv_ping_responses(0), key(nullptr), diff --git a/p2p/base/connection_info.h b/p2p/base/connection_info.h index a62e8aec00..b5e1c14433 100644 --- a/p2p/base/connection_info.h +++ b/p2p/base/connection_info.h @@ -54,6 +54,7 @@ struct ConnectionInfo { size_t recv_total_bytes; // Total bytes received on this connection. size_t recv_bytes_second; // Bps over the last measurement interval. + size_t packets_received; // Number of packets that were received. size_t recv_ping_requests; // Number of STUN ping request received. size_t recv_ping_responses; // Number of STUN ping response received. Candidate local_candidate; // The local candidate for this connection. diff --git a/p2p/base/default_ice_transport_factory.cc b/p2p/base/default_ice_transport_factory.cc index f4b182efdf..d4395423ce 100644 --- a/p2p/base/default_ice_transport_factory.cc +++ b/p2p/base/default_ice_transport_factory.cc @@ -41,12 +41,13 @@ DefaultIceTransport::~DefaultIceTransport() { rtc::scoped_refptr DefaultIceTransportFactory::CreateIceTransport( const std::string& transport_name, + cricket::MediaType media_type, int component, IceTransportInit init) { BasicIceControllerFactory factory; return new rtc::RefCountedObject( std::make_unique( - transport_name, component, init.port_allocator(), + transport_name, media_type, component, init.port_allocator(), init.async_resolver_factory(), init.event_log(), &factory)); } diff --git a/p2p/base/default_ice_transport_factory.h b/p2p/base/default_ice_transport_factory.h index 4834c9ada7..41f37689ea 100644 --- a/p2p/base/default_ice_transport_factory.h +++ b/p2p/base/default_ice_transport_factory.h @@ -49,6 +49,7 @@ class DefaultIceTransportFactory : public IceTransportFactory { // Must be called on the network thread and returns a DefaultIceTransport. rtc::scoped_refptr CreateIceTransport( const std::string& transport_name, + cricket::MediaType media_type, int component, IceTransportInit init) override; }; diff --git a/p2p/base/dtls_transport.cc b/p2p/base/dtls_transport.cc index 9d49c09894..d45cb1b279 100644 --- a/p2p/base/dtls_transport.cc +++ b/p2p/base/dtls_transport.cc @@ -14,6 +14,7 @@ #include #include +#include "absl/memory/memory.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" #include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" @@ -36,7 +37,10 @@ static const size_t kMinRtpPacketLen = 12; // Maximum number of pending packets in the queue. Packets are read immediately // after they have been written, so a capacity of "1" is sufficient. -static const size_t kMaxPendingPackets = 1; +// +// However, this bug seems to indicate that's not the case: crbug.com/1063834 +// So, temporarily increasing it to 2 to see if that makes a difference. +static const size_t kMaxPendingPackets = 2; // Minimum and maximum values for the initial DTLS handshake timeout. We'll pick // an initial timeout based on ICE RTT estimates, but clamp it to this range. @@ -69,6 +73,8 @@ rtc::StreamResult StreamInterfaceChannel::Read(void* buffer, size_t buffer_len, size_t* read, int* error) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (state_ == rtc::SS_CLOSED) return rtc::SR_EOS; if (state_ == rtc::SS_OPENING) @@ -85,6 +91,7 @@ rtc::StreamResult StreamInterfaceChannel::Write(const void* data, size_t data_len, size_t* written, int* error) { + RTC_DCHECK_RUN_ON(&sequence_checker_); // Always succeeds, since this is an unreliable transport anyway. // TODO(zhihuang): Should this block if ice_transport_'s temporarily // unwritable? @@ -98,20 +105,29 @@ rtc::StreamResult StreamInterfaceChannel::Write(const void* data, } bool StreamInterfaceChannel::OnPacketReceived(const char* data, size_t size) { - // We force a read event here to ensure that we don't overflow our queue. - bool ret = packets_.WriteBack(data, size, NULL); - RTC_CHECK(ret) << "Failed to write packet to queue."; - if (ret) { - SignalEvent(this, rtc::SE_READ, 0); + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (packets_.size() > 0) { + RTC_LOG(LS_WARNING) << "Packet already in queue."; } + bool ret = packets_.WriteBack(data, size, NULL); + if (!ret) { + // Somehow we received another packet before the SSLStreamAdapter read the + // previous one out of our temporary buffer. In this case, we'll log an + // error and still signal the read event, hoping that it will read the + // packet currently in packets_. + RTC_LOG(LS_ERROR) << "Failed to write packet to queue."; + } + SignalEvent(this, rtc::SE_READ, 0); return ret; } rtc::StreamState StreamInterfaceChannel::GetState() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); return state_; } void StreamInterfaceChannel::Close() { + RTC_DCHECK_RUN_ON(&sequence_checker_); packets_.Clear(); state_ = rtc::SS_CLOSED; } @@ -120,6 +136,7 @@ DtlsTransport::DtlsTransport(IceTransportInternal* ice_transport, const webrtc::CryptoOptions& crypto_options, webrtc::RtcEventLog* event_log) : transport_name_(ice_transport->transport_name()), + media_type_(ice_transport->media_type()), component_(ice_transport->component()), ice_transport_(ice_transport), downward_(NULL), @@ -145,6 +162,10 @@ const std::string& DtlsTransport::transport_name() const { return transport_name_; } +cricket::MediaType DtlsTransport::media_type() const { + return media_type_; +} + int DtlsTransport::component() const { return component_; } @@ -325,18 +346,19 @@ bool DtlsTransport::ExportKeyingMaterial(const std::string& label, bool DtlsTransport::SetupDtls() { RTC_DCHECK(dtls_role_); - StreamInterfaceChannel* downward = new StreamInterfaceChannel(ice_transport_); + { + auto downward = std::make_unique(ice_transport_); + StreamInterfaceChannel* downward_ptr = downward.get(); - dtls_.reset(rtc::SSLStreamAdapter::Create(downward)); - if (!dtls_) { - RTC_LOG(LS_ERROR) << ToString() << ": Failed to create DTLS adapter."; - delete downward; - return false; + dtls_ = rtc::SSLStreamAdapter::Create(std::move(downward)); + if (!dtls_) { + RTC_LOG(LS_ERROR) << ToString() << ": Failed to create DTLS adapter."; + return false; + } + downward_ = downward_ptr; } - downward_ = downward; - - dtls_->SetIdentity(local_certificate_->identity()->GetReference()); + dtls_->SetIdentity(local_certificate_->identity()->Clone()); dtls_->SetMode(rtc::SSL_MODE_DTLS); dtls_->SetMaxProtocolVersion(ssl_max_version_); dtls_->SetServerRole(*dtls_role_); diff --git a/p2p/base/dtls_transport.h b/p2p/base/dtls_transport.h index 89156a15d1..889f2f07dc 100644 --- a/p2p/base/dtls_transport.h +++ b/p2p/base/dtls_transport.h @@ -24,6 +24,7 @@ #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_checker.h" namespace rtc { @@ -54,9 +55,10 @@ class StreamInterfaceChannel : public rtc::StreamInterface { int* error) override; private: - IceTransportInternal* ice_transport_; // owned by DtlsTransport - rtc::StreamState state_; - rtc::BufferQueue packets_; + webrtc::SequenceChecker sequence_checker_; + IceTransportInternal* const ice_transport_; // owned by DtlsTransport + rtc::StreamState state_ RTC_GUARDED_BY(sequence_checker_); + rtc::BufferQueue packets_ RTC_GUARDED_BY(sequence_checker_); RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterfaceChannel); }; @@ -108,6 +110,8 @@ class DtlsTransport : public DtlsTransportInternal { const webrtc::CryptoOptions& crypto_options() const override; DtlsTransportState dtls_state() const override; const std::string& transport_name() const override; + cricket::MediaType media_type() const override; + int component() const override; // DTLS is active if a local certificate was set. Otherwise this acts in a @@ -223,6 +227,7 @@ class DtlsTransport : public DtlsTransportInternal { rtc::ThreadChecker thread_checker_; std::string transport_name_; + cricket::MediaType media_type_; int component_; DtlsTransportState dtls_state_ = DTLS_TRANSPORT_NEW; // Underlying ice_transport, not owned by this class. diff --git a/p2p/base/dtls_transport_unittest.cc b/p2p/base/dtls_transport_unittest.cc index 287264da9e..6dab872bc6 100644 --- a/p2p/base/dtls_transport_unittest.cc +++ b/p2p/base/dtls_transport_unittest.cc @@ -66,8 +66,7 @@ class DtlsTestClient : public sigslot::has_slots<> { explicit DtlsTestClient(const std::string& name) : name_(name) {} void CreateCertificate(rtc::KeyType key_type) { certificate_ = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate(name_, key_type))); + rtc::RTCCertificate::Create(rtc::SSLIdentity::Create(name_, key_type)); } const rtc::scoped_refptr& certificate() { return certificate_; @@ -77,7 +76,7 @@ class DtlsTestClient : public sigslot::has_slots<> { } // Set up fake ICE transport and real DTLS transport under test. void SetupTransports(IceRole role, int async_delay_ms = 0) { - fake_ice_transport_.reset(new FakeIceTransport("fake", 0)); + fake_ice_transport_.reset(new FakeIceTransport("fake", cricket::MEDIA_TYPE_VIDEO, 0)); fake_ice_transport_->SetAsync(true); fake_ice_transport_->SetAsyncDelay(async_delay_ms); fake_ice_transport_->SetIceRole(role); @@ -584,9 +583,10 @@ TEST_F(DtlsTransportTest, TestRetransmissionSchedule) { // millisecond before the expected time and verify that no unexpected // retransmissions were sent. Then advance it the final millisecond and // verify that the expected retransmission was sent. - fake_clock_.AdvanceTime(webrtc::TimeDelta::ms(timeout_schedule_ms[i] - 1)); + fake_clock_.AdvanceTime( + webrtc::TimeDelta::Millis(timeout_schedule_ms[i] - 1)); EXPECT_EQ(expected_hellos, client1_.received_dtls_client_hellos()); - fake_clock_.AdvanceTime(webrtc::TimeDelta::ms(1)); + fake_clock_.AdvanceTime(webrtc::TimeDelta::Millis(1)); EXPECT_EQ(++expected_hellos, client1_.received_dtls_client_hellos()); } } diff --git a/p2p/base/fake_ice_transport.h b/p2p/base/fake_ice_transport.h index d0fa1ea8cc..8c53b7d8ce 100644 --- a/p2p/base/fake_ice_transport.h +++ b/p2p/base/fake_ice_transport.h @@ -28,9 +28,11 @@ namespace cricket { class FakeIceTransport : public IceTransportInternal { public: explicit FakeIceTransport(const std::string& name, + cricket::MediaType media_type, int component, rtc::Thread* network_thread = nullptr) : name_(name), + media_type_(media_type), component_(component), network_thread_(network_thread ? network_thread : rtc::Thread::Current()) {} @@ -109,13 +111,22 @@ class FakeIceTransport : public IceTransportInternal { // Fake IceTransportInternal implementation. const std::string& transport_name() const override { return name_; } + cricket::MediaType media_type() const override { return media_type_; } int component() const override { return component_; } uint64_t IceTiebreaker() const { return tiebreaker_; } IceMode remote_ice_mode() const { return remote_ice_mode_; } - const std::string& ice_ufrag() const { return ice_ufrag_; } - const std::string& ice_pwd() const { return ice_pwd_; } - const std::string& remote_ice_ufrag() const { return remote_ice_ufrag_; } - const std::string& remote_ice_pwd() const { return remote_ice_pwd_; } + const std::string& ice_ufrag() const { return ice_parameters_.ufrag; } + const std::string& ice_pwd() const { return ice_parameters_.pwd; } + const std::string& remote_ice_ufrag() const { + return remote_ice_parameters_.ufrag; + } + const std::string& remote_ice_pwd() const { + return remote_ice_parameters_.pwd; + } + const IceParameters& ice_parameters() const { return ice_parameters_; } + const IceParameters& remote_ice_parameters() const { + return remote_ice_parameters_; + } IceTransportState GetState() const override { if (legacy_transport_state_) { @@ -157,12 +168,10 @@ class FakeIceTransport : public IceTransportInternal { tiebreaker_ = tiebreaker; } void SetIceParameters(const IceParameters& ice_params) override { - ice_ufrag_ = ice_params.ufrag; - ice_pwd_ = ice_params.pwd; + ice_parameters_ = ice_params; } void SetRemoteIceParameters(const IceParameters& params) override { - remote_ice_ufrag_ = params.ufrag; - remote_ice_pwd_ = params.pwd; + remote_ice_parameters_ = params; } void SetRemoteIceMode(IceMode mode) override { remote_ice_mode_ = mode; } @@ -304,6 +313,7 @@ class FakeIceTransport : public IceTransportInternal { rtc::AsyncInvoker invoker_; std::string name_; + cricket::MediaType media_type_; int component_; FakeIceTransport* dest_ = nullptr; bool async_ = false; @@ -312,10 +322,8 @@ class FakeIceTransport : public IceTransportInternal { IceConfig ice_config_; IceRole role_ = ICEROLE_UNKNOWN; uint64_t tiebreaker_ = 0; - std::string ice_ufrag_; - std::string ice_pwd_; - std::string remote_ice_ufrag_; - std::string remote_ice_pwd_; + IceParameters ice_parameters_; + IceParameters remote_ice_parameters_; IceMode remote_ice_mode_ = ICEMODE_FULL; size_t connection_count_ = 0; absl::optional transport_state_; diff --git a/p2p/base/fake_port_allocator.h b/p2p/base/fake_port_allocator.h index 266bb7956b..e8dbf9205d 100644 --- a/p2p/base/fake_port_allocator.h +++ b/p2p/base/fake_port_allocator.h @@ -79,10 +79,12 @@ class FakePortAllocatorSession : public PortAllocatorSession { rtc::Thread* network_thread, rtc::PacketSocketFactory* factory, const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) : PortAllocatorSession(content_name, + media_type, component, ice_ufrag, ice_pwd, @@ -231,11 +233,12 @@ class FakePortAllocator : public cricket::PortAllocator { cricket::PortAllocatorSession* CreateSessionInternal( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) override { return new FakePortAllocatorSession(this, network_thread_, factory_, - content_name, component, ice_ufrag, + content_name, media_type, component, ice_ufrag, ice_pwd); } diff --git a/p2p/base/ice_controller_factory_interface.h b/p2p/base/ice_controller_factory_interface.h index a859c07be9..bae8b8f19d 100644 --- a/p2p/base/ice_controller_factory_interface.h +++ b/p2p/base/ice_controller_factory_interface.h @@ -12,6 +12,7 @@ #define P2P_BASE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ #include +#include #include "p2p/base/ice_controller_interface.h" #include "p2p/base/ice_transport_internal.h" @@ -24,6 +25,7 @@ struct IceControllerFactoryArgs { std::function ice_role_func; std::function is_connection_pruned_func; const IceFieldTrials* ice_field_trials; + std::string ice_controller_field_trials; }; class IceControllerFactoryInterface { diff --git a/p2p/base/ice_controller_interface.h b/p2p/base/ice_controller_interface.h index 43bb88471b..d5dc29e782 100644 --- a/p2p/base/ice_controller_interface.h +++ b/p2p/base/ice_controller_interface.h @@ -51,12 +51,20 @@ struct IceControllerEvent { // - which connection to ping // - which connection to use // - which connection to prune +// - which connection to forget learned state on // -// P2PTransportChannel creates a |Connection| and adds a const pointer -// to the IceController using |AddConnection|, i.e the IceController -// should not call any non-const methods on a Connection. +// The P2PTransportChannel owns (creates and destroys) Connections, +// but P2PTransportChannel gives const pointers to the the IceController using +// |AddConnection|, i.e the IceController should not call any non-const methods +// on a Connection but signal back in the interface if any mutable function +// shall be called. // -// The IceController shall keeps track of all connections added +// Current these are limited to: +// Connection::Ping - returned in PingResult +// Connection::Prune - retuned in PruneConnections +// Connection::ForgetLearnedState - return in SwitchResult +// +// The IceController shall keep track of all connections added // (and not destroyed) and give them back using the connections()-function- // // When a Connection gets destroyed @@ -71,6 +79,27 @@ class IceControllerInterface { // An optional recheck event for when a Switch() should be attempted again. absl::optional recheck_event; + + // A vector with connection to run ForgetLearnedState on. + std::vector connections_to_forget_state_on; + }; + + // This represents the result of a call to SelectConnectionToPing. + struct PingResult { + PingResult(const Connection* conn, int _recheck_delay_ms) + : connection(conn), recheck_delay_ms(_recheck_delay_ms) {} + + // Connection that we should (optionally) ping. + const absl::optional connection; + + // The delay before P2PTransportChannel shall call SelectConnectionToPing() + // again. + // + // Since the IceController determines which connection to ping and + // only returns one connection at a time, the recheck_delay_ms does not have + // any obvious implication on bitrate for pings. E.g the recheck_delay_ms + // will be shorter if there are more connections available. + const int recheck_delay_ms = 0; }; virtual ~IceControllerInterface() = default; @@ -90,8 +119,7 @@ class IceControllerInterface { virtual bool HasPingableConnection() const = 0; // Select a connection to Ping, or nullptr if none. - virtual std::pair SelectConnectionToPing( - int64_t last_ping_sent_ms) = 0; + virtual PingResult SelectConnectionToPing(int64_t last_ping_sent_ms) = 0; // Compute the "STUN_ATTR_USE_CANDIDATE" for |conn|. virtual bool GetUseCandidateAttr(const Connection* conn, diff --git a/p2p/base/mock_async_resolver.h b/p2p/base/mock_async_resolver.h index 7d3be5b0b0..8bc0eb9cff 100644 --- a/p2p/base/mock_async_resolver.h +++ b/p2p/base/mock_async_resolver.h @@ -29,14 +29,17 @@ class MockAsyncResolver : public AsyncResolverInterface { } ~MockAsyncResolver() = default; - MOCK_METHOD1(Start, void(const rtc::SocketAddress&)); - MOCK_CONST_METHOD2(GetResolvedAddress, bool(int family, SocketAddress* addr)); - MOCK_CONST_METHOD0(GetError, int()); + MOCK_METHOD(void, Start, (const rtc::SocketAddress&), (override)); + MOCK_METHOD(bool, + GetResolvedAddress, + (int family, SocketAddress* addr), + (const, override)); + MOCK_METHOD(int, GetError, (), (const, override)); // Note that this won't delete the object like AsyncResolverInterface says in // order to avoid sanitizer failures caused by this being a synchronous // implementation. The test code should delete the object instead. - MOCK_METHOD1(Destroy, void(bool)); + MOCK_METHOD(void, Destroy, (bool), (override)); }; } // namespace rtc @@ -45,7 +48,7 @@ namespace webrtc { class MockAsyncResolverFactory : public AsyncResolverFactory { public: - MOCK_METHOD0(Create, rtc::AsyncResolverInterface*()); + MOCK_METHOD(rtc::AsyncResolverInterface*, Create, (), (override)); }; } // namespace webrtc diff --git a/p2p/base/mock_ice_transport.h b/p2p/base/mock_ice_transport.h index 1436cacb50..f863158cf8 100644 --- a/p2p/base/mock_ice_transport.h +++ b/p2p/base/mock_ice_transport.h @@ -32,15 +32,20 @@ class MockIceTransport : public IceTransportInternal { SignalWritableState(this); } - MOCK_METHOD4(SendPacket, - int(const char* data, - size_t len, - const rtc::PacketOptions& options, - int flags)); - MOCK_METHOD2(SetOption, int(rtc::Socket::Option opt, int value)); - MOCK_METHOD0(GetError, int()); - MOCK_CONST_METHOD0(GetIceRole, cricket::IceRole()); - MOCK_METHOD1(GetStats, bool(cricket::IceTransportStats* ice_transport_stats)); + MOCK_METHOD(int, + SendPacket, + (const char* data, + size_t len, + const rtc::PacketOptions& options, + int flags), + (override)); + MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override)); + MOCK_METHOD(int, GetError, (), (override)); + MOCK_METHOD(cricket::IceRole, GetIceRole, (), (const, override)); + MOCK_METHOD(bool, + GetStats, + (cricket::IceTransportStats * ice_transport_stats), + (override)); IceTransportState GetState() const override { return IceTransportState::STATE_INIT; @@ -50,6 +55,7 @@ class MockIceTransport : public IceTransportInternal { } const std::string& transport_name() const override { return transport_name_; } + cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_VIDEO; } int component() const override { return 0; } void SetIceRole(IceRole role) override {} void SetIceTiebreaker(uint64_t tiebreaker) override {} diff --git a/p2p/base/p2p_transport_channel.cc b/p2p/base/p2p_transport_channel.cc index 4e18cd43cb..92eff3f791 100644 --- a/p2p/base/p2p_transport_channel.cc +++ b/p2p/base/p2p_transport_channel.cc @@ -30,6 +30,7 @@ #include "rtc_base/net_helper.h" #include "rtc_base/net_helpers.h" #include "rtc_base/string_encode.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" @@ -57,6 +58,51 @@ uint32_t GetWeakPingIntervalInFieldTrial() { return cricket::WEAK_PING_INTERVAL; } +rtc::AdapterType GuessAdapterTypeFromNetworkCost(int network_cost) { + // The current network costs have been unchanged since they were added + // to webrtc. If they ever were to change we would need to reconsider + // this method. + switch (network_cost) { + case rtc::kNetworkCostMin: + return rtc::ADAPTER_TYPE_ETHERNET; + case rtc::kNetworkCostLow: + return rtc::ADAPTER_TYPE_WIFI; + case rtc::kNetworkCostCellular: + return rtc::ADAPTER_TYPE_CELLULAR; + case rtc::kNetworkCostCellular2G: + return rtc::ADAPTER_TYPE_CELLULAR_2G; + case rtc::kNetworkCostCellular3G: + return rtc::ADAPTER_TYPE_CELLULAR_3G; + case rtc::kNetworkCostCellular4G: + return rtc::ADAPTER_TYPE_CELLULAR_4G; + case rtc::kNetworkCostCellular5G: + return rtc::ADAPTER_TYPE_CELLULAR_5G; + case rtc::kNetworkCostUnknown: + return rtc::ADAPTER_TYPE_UNKNOWN; + case rtc::kNetworkCostMax: + return rtc::ADAPTER_TYPE_ANY; + } + return rtc::ADAPTER_TYPE_UNKNOWN; +} + +rtc::RouteEndpoint CreateRouteEndpointFromCandidate( + bool local, + const cricket::Candidate& candidate, + bool uses_turn) { + auto adapter_type = candidate.network_type(); + if (!local && adapter_type == rtc::ADAPTER_TYPE_UNKNOWN) { + adapter_type = GuessAdapterTypeFromNetworkCost(candidate.network_cost()); + } + + // TODO(bugs.webrtc.org/9446) : Rewrite if information about remote network + // adapter becomes available. The implication of this implementation is that + // we will only ever report 1 adapter per type. In practice this is probably + // fine, since the endpoint also contains network-id. + uint16_t adapter_id = static_cast(adapter_type); + return rtc::RouteEndpoint(adapter_type, adapter_id, candidate.network_id(), + uses_turn); +} + } // unnamed namespace namespace cricket { @@ -76,9 +122,11 @@ bool IceCredentialsChanged(const std::string& old_ufrag, } P2PTransportChannel::P2PTransportChannel(const std::string& transport_name, + cricket::MediaType media_type, int component, PortAllocator* allocator) : P2PTransportChannel(transport_name, + media_type, component, allocator, nullptr, @@ -86,12 +134,14 @@ P2PTransportChannel::P2PTransportChannel(const std::string& transport_name, P2PTransportChannel::P2PTransportChannel( const std::string& transport_name, + cricket::MediaType media_type, int component, PortAllocator* allocator, webrtc::AsyncResolverFactory* async_resolver_factory, webrtc::RtcEventLog* event_log, IceControllerFactoryInterface* ice_controller_factory) : transport_name_(transport_name), + media_type_(media_type), component_(component), allocator_(allocator), async_resolver_factory_(async_resolver_factory), @@ -129,8 +179,7 @@ P2PTransportChannel::P2PTransportChannel( ice_event_log_.set_event_log(event_log); IceControllerFactoryArgs args{ - [this] { return GetState(); }, - [this] { return GetIceRole(); }, + [this] { return GetState(); }, [this] { return GetIceRole(); }, [this](const Connection* connection) { // TODO(webrtc:10647/jonaso): Figure out a way to remove friendship // between P2PTransportChannel and Connection. @@ -138,7 +187,7 @@ P2PTransportChannel::P2PTransportChannel( IsRemoteCandidatePruned(connection->remote_candidate()); }, &field_trials_, - }; + webrtc::field_trial::FindFullName("WebRTC-IceControllerFieldTrials")}; if (ice_controller_factory != nullptr) { ice_controller_ = ice_controller_factory->Create(args); } else { @@ -230,8 +279,7 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection( if (result.connection.has_value()) { RTC_LOG(LS_INFO) << "Switching selected connection due to: " << reason.ToString(); - SwitchSelectedConnection(const_cast(*result.connection), - reason); + SwitchSelectedConnection(FromIceController(*result.connection), reason); } if (result.recheck_event.has_value()) { @@ -246,6 +294,10 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection( result.recheck_event->recheck_delay_ms); } + for (const auto* con : result.connections_to_forget_state_on) { + FromIceController(con)->ForgetLearnedState(); + } + return result.connection.has_value(); } @@ -295,6 +347,11 @@ const std::string& P2PTransportChannel::transport_name() const { return transport_name_; } +cricket::MediaType P2PTransportChannel::media_type() const { + RTC_DCHECK_RUN_ON(network_thread_); + return media_type_; +} + int P2PTransportChannel::component() const { RTC_DCHECK_RUN_ON(network_thread_); return component_; @@ -644,9 +701,27 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { // Use goog ping if remote support it. "enable_goog_ping", &field_trials_.enable_goog_ping, // How fast does a RTT sample decay. - "rtt_estimate_halftime_ms", &field_trials_.rtt_estimate_halftime_ms) + "rtt_estimate_halftime_ms", &field_trials_.rtt_estimate_halftime_ms, + // Make sure that nomination reaching ICE controlled asap. + "send_ping_on_switch_ice_controlling", + &field_trials_.send_ping_on_switch_ice_controlling, + // Make sure that nomination reaching ICE controlled asap. + "send_ping_on_selected_ice_controlling", + &field_trials_.send_ping_on_selected_ice_controlling, + // Reply to nomination ASAP. + "send_ping_on_nomination_ice_controlled", + &field_trials_.send_ping_on_nomination_ice_controlled, + // Allow connections to live untouched longer that 30s. + "dead_connection_timeout_ms", &field_trials_.dead_connection_timeout_ms) ->Parse(webrtc::field_trial::FindFullName("WebRTC-IceFieldTrials")); + if (field_trials_.dead_connection_timeout_ms < 30000) { + RTC_LOG(LS_WARNING) << "dead_connection_timeout_ms set to " + << field_trials_.dead_connection_timeout_ms + << " increasing it to 30000"; + field_trials_.dead_connection_timeout_ms = 30000; + } + if (field_trials_.skip_relay_to_non_relay_connections) { RTC_LOG(LS_INFO) << "Set skip_relay_to_non_relay_connections"; } @@ -792,7 +867,7 @@ void P2PTransportChannel::MaybeStartGathering() { } } else { AddAllocatorSession(allocator_->CreateSession( - transport_name(), component(), ice_parameters_.ufrag, + transport_name(), media_type(), component(), ice_parameters_.ufrag, ice_parameters_.pwd)); allocator_sessions_.back()->StartGettingPorts(); } @@ -950,7 +1025,7 @@ void P2PTransportChannel::OnUnknownAddress(PortInterface* port, uint16_t network_id = 0; uint16_t network_cost = 0; const StunUInt32Attribute* network_attr = - stun_msg->GetUInt32(STUN_ATTR_NETWORK_INFO); + stun_msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); if (network_attr) { uint32_t network_info = network_attr->value(); network_id = static_cast(network_info >> 16); @@ -965,6 +1040,9 @@ void P2PTransportChannel::OnUnknownAddress(PortInterface* port, component(), ProtoToString(proto), address, remote_candidate_priority, remote_username, remote_password, PRFLX_PORT_TYPE, remote_generation, "", network_id, network_cost); + if (proto == PROTO_TCP) { + remote_candidate.set_tcptype(TCPTYPE_ACTIVE_STR); + } // From RFC 5245, section-7.2.1.3: // The foundation of the candidate is set to an arbitrary value, different @@ -1060,6 +1138,11 @@ void P2PTransportChannel::OnNominated(Connection* conn) { return; } + if (field_trials_.send_ping_on_nomination_ice_controlled && conn != nullptr) { + PingConnection(conn); + MarkConnectionPinged(conn); + } + // TODO(qingsi): RequestSortAndStateUpdate will eventually call // MaybeSwitchSelectedConnection again. Rewrite this logic. if (MaybeSwitchSelectedConnection( @@ -1127,7 +1210,12 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) { } if (new_remote_candidate.address().IsUnresolvedIP()) { - ResolveHostnameCandidate(new_remote_candidate); + // Don't do DNS lookups if the IceTransportPolicy is "none" or "relay". + bool sharing_host = ((allocator_->candidate_filter() & CF_HOST) != 0); + bool sharing_stun = ((allocator_->candidate_filter() & CF_REFLEXIVE) != 0); + if (sharing_host || sharing_stun) { + ResolveHostnameCandidate(new_remote_candidate); + } return; } @@ -1156,9 +1244,8 @@ void P2PTransportChannel::OnCandidateResolved( Candidate candidate = p->candidate_; resolvers_.erase(p); AddRemoteCandidateWithResolver(candidate, resolver); - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread(), - rtc::Bind(&rtc::AsyncResolverInterface::Destroy, resolver, false)); + thread()->PostTask( + webrtc::ToQueuedTask([] {}, [resolver] { resolver->Destroy(false); })); } void P2PTransportChannel::AddRemoteCandidateWithResolver( @@ -1331,7 +1418,7 @@ bool P2PTransportChannel::CreateConnection(PortInterface* port, return false; } -bool P2PTransportChannel::FindConnection(Connection* connection) const { +bool P2PTransportChannel::FindConnection(const Connection* connection) const { RTC_DCHECK_RUN_ON(network_thread_); return absl::c_linear_search(connections(), connection); } @@ -1637,7 +1724,7 @@ void P2PTransportChannel::PruneConnections() { std::vector connections_to_prune = ice_controller_->PruneConnections(); for (const Connection* conn : connections_to_prune) { - const_cast(conn)->Prune(); + FromIceController(conn)->Prune(); } } @@ -1676,18 +1763,31 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, network_route_.emplace(rtc::NetworkRoute()); network_route_->connected = ReadyToSend(selected_connection_); - network_route_->local_network_id = - selected_connection_->local_candidate().network_id(); - network_route_->remote_network_id = - selected_connection_->remote_candidate().network_id(); + network_route_->local = CreateRouteEndpointFromCandidate( + /* local= */ true, selected_connection_->local_candidate(), + /* uses_turn= */ selected_connection_->port()->Type() == + RELAY_PORT_TYPE); + network_route_->remote = CreateRouteEndpointFromCandidate( + /* local= */ false, selected_connection_->remote_candidate(), + /* uses_turn= */ selected_connection_->remote_candidate().type() == + RELAY_PORT_TYPE); + network_route_->last_sent_packet_id = last_sent_packet_id_; network_route_->packet_overhead = - GetIpOverhead( - selected_connection_->local_candidate().address().family()) + + selected_connection_->local_candidate().address().ipaddr().overhead() + GetProtocolOverhead(selected_connection_->local_candidate().protocol()); } else { RTC_LOG(LS_INFO) << ToString() << ": No selected connection"; } + + if (conn != nullptr && ice_role_ == ICEROLE_CONTROLLING && + ((field_trials_.send_ping_on_switch_ice_controlling && + old_selected_connection != nullptr) || + field_trials_.send_ping_on_selected_ice_controlling)) { + PingConnection(conn); + MarkConnectionPinged(conn); + } + SignalNetworkRouteChanged(network_route_); // Create event for candidate pair change. @@ -1697,6 +1797,15 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, pair_change.selected_candidate_pair = *GetSelectedCandidatePair(); pair_change.last_data_received_ms = selected_connection_->last_data_received(); + + if (old_selected_connection) { + pair_change.estimated_disconnected_time_ms = + ComputeEstimatedDisconnectedTimeMs(rtc::TimeMillis(), + old_selected_connection); + } else { + pair_change.estimated_disconnected_time_ms = 0; + } + SignalCandidatePairChanged(pair_change); } @@ -1705,6 +1814,16 @@ void P2PTransportChannel::SwitchSelectedConnection(Connection* conn, ice_controller_->SetSelectedConnection(selected_connection_); } +int64_t P2PTransportChannel::ComputeEstimatedDisconnectedTimeMs( + int64_t now_ms, + Connection* old_connection) { + // TODO(jonaso): nicer keeps estimate of how frequently data _should_ be + // received, this could be used to give better estimate (if needed). + int64_t last_data_or_old_ping = + std::max(old_connection->last_received(), last_data_received_ms_); + return (now_ms - last_data_or_old_ping); +} + // Warning: UpdateState should eventually be called whenever a connection // is added, deleted, or the write state of any connection changes so that the // transport controller will get the up-to-date channel state. However it @@ -1828,10 +1947,10 @@ void P2PTransportChannel::CheckAndPing() { UpdateConnectionStates(); auto result = ice_controller_->SelectConnectionToPing(last_ping_sent_ms_); - Connection* conn = result.first; - int delay = result.second; + int delay = result.recheck_delay_ms; - if (conn) { + if (result.connection.value_or(nullptr)) { + Connection* conn = FromIceController(*result.connection); PingConnection(conn); MarkConnectionPinged(conn); } @@ -1844,7 +1963,12 @@ void P2PTransportChannel::CheckAndPing() { // This method is only for unit testing. Connection* P2PTransportChannel::FindNextPingableConnection() { RTC_DCHECK_RUN_ON(network_thread_); - return const_cast(ice_controller_->FindNextPingableConnection()); + auto* conn = ice_controller_->FindNextPingableConnection(); + if (conn) { + return FromIceController(conn); + } else { + return nullptr; + } } // A connection is considered a backup connection if the channel state @@ -2018,6 +2142,9 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, if (connection == selected_connection_) { // Let the client know of an incoming packet + RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); + last_data_received_ms_ = + std::max(last_data_received_ms_, connection->last_data_received()); SignalReadPacket(this, data, len, packet_time_us, 0); return; } @@ -2026,6 +2153,10 @@ void P2PTransportChannel::OnReadPacket(Connection* connection, if (!FindConnection(connection)) return; + RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); + last_data_received_ms_ = + std::max(last_data_received_ms_, connection->last_data_received()); + // Let the client know of an incoming packet SignalReadPacket(this, data, len, packet_time_us, 0); diff --git a/p2p/base/p2p_transport_channel.h b/p2p/base/p2p_transport_channel.h index 3d6c86f031..808caac45d 100644 --- a/p2p/base/p2p_transport_channel.h +++ b/p2p/base/p2p_transport_channel.h @@ -41,6 +41,7 @@ #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" #include "p2p/base/regathering_controller.h" +#include "pc/session_description.h" #include "rtc_base/async_invoker.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/constructor_magic.h" @@ -85,10 +86,12 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // For testing only. // TODO(zstein): Remove once AsyncResolverFactory is required. P2PTransportChannel(const std::string& transport_name, + cricket::MediaType media_type, int component, PortAllocator* allocator); P2PTransportChannel( const std::string& transport_name, + cricket::MediaType media_type, int component, PortAllocator* allocator, webrtc::AsyncResolverFactory* async_resolver_factory, @@ -101,6 +104,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { webrtc::IceTransportState GetIceTransportState() const override; const std::string& transport_name() const override; + cricket::MediaType media_type() const override; int component() const override; bool writable() const override; bool receiving() const override; @@ -245,7 +249,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { bool CreateConnection(PortInterface* port, const Candidate& remote_candidate, PortInterface* origin_port); - bool FindConnection(Connection* connection) const; + bool FindConnection(const Connection* connection) const; uint32_t GetRemoteCandidateGeneration(const Candidate& candidate); bool IsDuplicateRemoteCandidate(const Candidate& candidate); @@ -348,7 +352,21 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // 2. Peer-reflexive remote candidates. Candidate SanitizeRemoteCandidate(const Candidate& c) const; + // Cast a Connection returned from IceController and verify that it exists. + // (P2P owns all Connections, and only gives const pointers to IceController, + // see IceControllerInterface). + Connection* FromIceController(const Connection* conn) { + // Verify that IceController does not return a connection + // that we have destroyed. + RTC_DCHECK(FindConnection(conn)); + return const_cast(conn); + } + + int64_t ComputeEstimatedDisconnectedTimeMs(int64_t now, + Connection* old_connection); + std::string transport_name_ RTC_GUARDED_BY(network_thread_); + cricket::MediaType media_type_ RTC_GUARDED_BY(network_thread_); int component_ RTC_GUARDED_BY(network_thread_); PortAllocator* allocator_ RTC_GUARDED_BY(network_thread_); webrtc::AsyncResolverFactory* async_resolver_factory_ @@ -430,6 +448,10 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // Number of times the selected_connection_ has been modified. uint32_t selected_candidate_pair_changes_ = 0; + // When was last data received on a existing connection, + // from connection->last_data_received() that uses rtc::TimeMillis(). + int64_t last_data_received_ms_ = 0; + IceFieldTrials field_trials_; RTC_DISALLOW_COPY_AND_ASSIGN(P2PTransportChannel); diff --git a/p2p/base/p2p_transport_channel_ice_field_trials.h b/p2p/base/p2p_transport_channel_ice_field_trials.h index e55f7ce918..00e1151ba3 100644 --- a/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -42,6 +42,22 @@ struct IceFieldTrials { // Decay rate for RTT estimate using EventBasedExponentialMovingAverage // expressed as halving time. int rtt_estimate_halftime_ms = 500; + + // Sending a PING directly after a switch on ICE_CONTROLLING-side. + // TODO(jonaso) : Deprecate this in favor of + // |send_ping_on_selected_ice_controlling|. + bool send_ping_on_switch_ice_controlling = false; + + // Sending a PING directly after selecting a connection + // (i.e either a switch or the inital selection). + bool send_ping_on_selected_ice_controlling = false; + + // Sending a PING directly after a nomination on ICE_CONTROLLED-side. + bool send_ping_on_nomination_ice_controlled = false; + + // The timeout after which the connection will be considered dead if no + // traffic is received. + int dead_connection_timeout_ms = 30000; }; } // namespace cricket diff --git a/p2p/base/p2p_transport_channel_unittest.cc b/p2p/base/p2p_transport_channel_unittest.cc index 5f84aa3cf1..1aae04d22e 100644 --- a/p2p/base/p2p_transport_channel_unittest.cc +++ b/p2p/base/p2p_transport_channel_unittest.cc @@ -177,14 +177,14 @@ cricket::BasicPortAllocator* CreateBasicPortAllocator( class MockIceControllerFactory : public cricket::IceControllerFactoryInterface { public: - ~MockIceControllerFactory() = default; + ~MockIceControllerFactory() override = default; std::unique_ptr Create( - const cricket::IceControllerFactoryArgs& args) { + const cricket::IceControllerFactoryArgs& args) override { RecordIceControllerCreated(); return std::make_unique(args); } - MOCK_METHOD0(RecordIceControllerCreated, void()); + MOCK_METHOD(void, RecordIceControllerCreated, ()); }; } // namespace @@ -207,7 +207,7 @@ namespace cricket { // Note that this class is a base class for use by other tests, who will provide // specialized test behavior. class P2PTransportChannelTestBase : public ::testing::Test, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: P2PTransportChannelTestBase() @@ -378,9 +378,9 @@ class P2PTransportChannelTestBase : public ::testing::Test, IceParamsWithRenomination(kIceParams[0], renomination); IceParameters ice_ep2_cd1_ch = IceParamsWithRenomination(kIceParams[1], renomination); - ep1_.cd1_.ch_.reset(CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, + ep1_.cd1_.ch_.reset(CreateChannel(0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, ice_ep1_cd1_ch, ice_ep2_cd1_ch)); - ep2_.cd1_.ch_.reset(CreateChannel(1, ICE_CANDIDATE_COMPONENT_DEFAULT, + ep2_.cd1_.ch_.reset(CreateChannel(1, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, ice_ep2_cd1_ch, ice_ep1_cd1_ch)); ep1_.cd1_.ch_->SetIceConfig(ep1_config); ep2_.cd1_.ch_->SetIceConfig(ep2_config); @@ -398,11 +398,12 @@ class P2PTransportChannelTestBase : public ::testing::Test, } P2PTransportChannel* CreateChannel(int endpoint, + cricket::MediaType media_type, int component, const IceParameters& local_ice, const IceParameters& remote_ice) { P2PTransportChannel* channel = new P2PTransportChannel( - "test content name", component, GetAllocator(endpoint), + "test content name", media_type, component, GetAllocator(endpoint), GetEndpoint(endpoint)->async_resolver_factory_); channel->SignalReadyToSend.connect( this, &P2PTransportChannelTestBase::OnReadyToSend); @@ -1284,6 +1285,7 @@ TEST_F(P2PTransportChannelTest, GetStats) { ep2_ch1()->receiving() && ep2_ch1()->writable(), kMediumTimeout, clock); + // Sends and receives 10 packets. TestSendRecv(&clock); IceTransportStats ice_transport_stats; ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); @@ -1306,6 +1308,7 @@ TEST_F(P2PTransportChannelTest, GetStats) { EXPECT_EQ(0U, best_conn_info->sent_discarded_packets); EXPECT_EQ(10 * 36U, best_conn_info->sent_total_bytes); EXPECT_EQ(10 * 36U, best_conn_info->recv_total_bytes); + EXPECT_EQ(10U, best_conn_info->packets_received); DestroyChannels(); } @@ -1480,7 +1483,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) { PauseCandidates(1); // Wait until the callee becomes writable to make sure that a ping request is - // received by the caller before his remote ICE credentials are set. + // received by the caller before their remote ICE credentials are set. ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout); // Add two sets of remote ICE credentials, so that the ones used by the // candidate will be generation 1 instead of 0. @@ -1588,7 +1591,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) { PauseCandidates(1); // Wait until the callee becomes writable to make sure that a ping request is - // received by the caller before his remote ICE credentials are set. + // received by the caller before their remote ICE credentials are set. ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout); // Add two sets of remote ICE credentials, so that the ones used by the // candidate will be generation 1 instead of 0. @@ -1802,6 +1805,29 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) { DestroyChannels(); } +// Test that tcptype is set on all candidates for a connection running over TCP. +TEST_F(P2PTransportChannelTest, TestTcpConnectionTcptypeSet) { + rtc::ScopedFakeClock clock; + ConfigureEndpoints(BLOCK_UDP_AND_INCOMING_TCP, OPEN, + PORTALLOCATOR_ENABLE_SHARED_SOCKET, + PORTALLOCATOR_ENABLE_SHARED_SOCKET); + + SetAllowTcpListen(0, false); // active. + SetAllowTcpListen(1, true); // actpass. + CreateChannels(); + + EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), + kMediumTimeout, clock); + SIMULATED_WAIT(false, kDefaultTimeout, clock); + + EXPECT_EQ(RemoteCandidate(ep1_ch1())->tcptype(), "passive"); + EXPECT_EQ(LocalCandidate(ep1_ch1())->tcptype(), "active"); + EXPECT_EQ(RemoteCandidate(ep2_ch1())->tcptype(), "active"); + EXPECT_EQ(LocalCandidate(ep2_ch1())->tcptype(), "passive"); + + DestroyChannels(); +} + TEST_F(P2PTransportChannelTest, TestIceRoleConflict) { AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); @@ -2055,7 +2081,7 @@ TEST_F(P2PTransportChannelTest, TurnToTurnPresumedWritable) { // Only configure one channel so we can control when the remote candidate // is added. GetEndpoint(0)->cd1_.ch_.reset(CreateChannel( - 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); + 0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); IceConfig config; config.presume_writable_when_fully_relayed = true; ep1_ch1()->SetIceConfig(config); @@ -2104,9 +2130,9 @@ TEST_F(P2PTransportChannelTest, TurnToPrflxPresumedWritable) { IceConfig config; config.presume_writable_when_fully_relayed = true; GetEndpoint(0)->cd1_.ch_.reset(CreateChannel( - 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); + 0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); GetEndpoint(1)->cd1_.ch_.reset(CreateChannel( - 1, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0])); + 1, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0])); ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); // Don't signal candidates from channel 2, so that channel 1 sees the TURN @@ -2143,9 +2169,9 @@ TEST_F(P2PTransportChannelTest, PresumedWritablePreferredOverUnreliable) { IceConfig config; config.presume_writable_when_fully_relayed = true; GetEndpoint(0)->cd1_.ch_.reset(CreateChannel( - 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); + 0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); GetEndpoint(1)->cd1_.ch_.reset(CreateChannel( - 1, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0])); + 1, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0])); ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); ep1_ch1()->MaybeStartGathering(); @@ -2181,7 +2207,7 @@ TEST_F(P2PTransportChannelTest, SignalReadyToSendWithPresumedWritable) { // Only test one endpoint, so we can ensure the connection doesn't receive a // binding response and advance beyond being "presumed" writable. GetEndpoint(0)->cd1_.ch_.reset(CreateChannel( - 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); + 0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); IceConfig config; config.presume_writable_when_fully_relayed = true; ep1_ch1()->SetIceConfig(config); @@ -2234,9 +2260,9 @@ TEST_F(P2PTransportChannelTest, // explicitly installed permission for. test_turn_server()->set_enable_permission_checks(false); GetEndpoint(0)->cd1_.ch_.reset(CreateChannel( - 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); + 0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); GetEndpoint(1)->cd1_.ch_.reset(CreateChannel( - 1, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0])); + 1, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0])); // Don't signal candidates from channel 2, so that channel 1 sees the TURN // candidate as peer reflexive. PauseCandidates(1); @@ -3177,7 +3203,7 @@ class P2PTransportChannelPingTest : public ::testing::Test, } if (piggyback_ping_id) { msg.AddAttribute(std::make_unique( - STUN_ATTR_LAST_ICE_CHECK_RECEIVED, piggyback_ping_id.value())); + STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED, piggyback_ping_id.value())); } msg.SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); msg.AddMessageIntegrity(conn->local_candidate().password()); @@ -3221,9 +3247,9 @@ class P2PTransportChannelPingTest : public ::testing::Test, return !last_network_route_.has_value(); } else { return pair->local_candidate().network_id() == - last_network_route_->local_network_id && + last_network_route_->local.network_id() && pair->remote_candidate().network_id() == - last_network_route_->remote_network_id; + last_network_route_->remote.network_id(); } } @@ -3243,6 +3269,14 @@ class P2PTransportChannelPingTest : public ::testing::Test, } } + int64_t LastEstimatedDisconnectedTimeMs() const { + if (!last_candidate_change_event_.has_value()) { + return 0; + } else { + return last_candidate_change_event_->estimated_disconnected_time_ms; + } + } + private: std::unique_ptr vss_; rtc::AutoSocketServerThread thread_; @@ -3256,7 +3290,7 @@ class P2PTransportChannelPingTest : public ::testing::Test, TEST_F(P2PTransportChannelPingTest, TestTriggeredChecks) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("trigger checks", 1, &pa); + P2PTransportChannel ch("trigger checks", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); @@ -3280,7 +3314,7 @@ TEST_F(P2PTransportChannelPingTest, TestTriggeredChecks) { TEST_F(P2PTransportChannelPingTest, TestAllConnectionsPingedSufficiently) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("ping sufficiently", 1, &pa); + P2PTransportChannel ch("ping sufficiently", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); @@ -3308,7 +3342,7 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { int RTT_RANGE = 10; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("TestChannel", 1, &pa); + P2PTransportChannel ch("TestChannel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); @@ -3399,7 +3433,7 @@ TEST_F(P2PTransportChannelPingTest, PingingStartedAsSoonAsPossible) { rtc::ScopedFakeClock clock; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("TestChannel", 1, &pa); + P2PTransportChannel ch("TestChannel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); ch.SetIceRole(ICEROLE_CONTROLLING); ch.SetIceParameters(kIceParams[0]); ch.MaybeStartGathering(); @@ -3436,7 +3470,7 @@ TEST_F(P2PTransportChannelPingTest, PingingStartedAsSoonAsPossible) { TEST_F(P2PTransportChannelPingTest, TestNoTriggeredChecksWhenWritable) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("trigger checks", 1, &pa); + P2PTransportChannel ch("trigger checks", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); @@ -3461,7 +3495,7 @@ TEST_F(P2PTransportChannelPingTest, TestNoTriggeredChecksWhenWritable) { TEST_F(P2PTransportChannelPingTest, TestFailedConnectionNotPingable) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("Do not ping failed connections", 1, &pa); + P2PTransportChannel ch("Do not ping failed connections", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); @@ -3478,7 +3512,7 @@ TEST_F(P2PTransportChannelPingTest, TestFailedConnectionNotPingable) { TEST_F(P2PTransportChannelPingTest, TestSignalStateChanged) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("state change", 1, &pa); + P2PTransportChannel ch("state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); @@ -3499,7 +3533,7 @@ TEST_F(P2PTransportChannelPingTest, TestSignalStateChanged) { // ufrag, its pwd and generation will be set properly. TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithVariousUfrags) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("add candidate", 1, &pa); + P2PTransportChannel ch("add candidate", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); // Add a candidate with a future ufrag. @@ -3551,7 +3585,7 @@ TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithVariousUfrags) { TEST_F(P2PTransportChannelPingTest, ConnectionResurrection) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("connection resurrection", 1, &pa); + P2PTransportChannel ch("connection resurrection", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); @@ -3605,7 +3639,7 @@ TEST_F(P2PTransportChannelPingTest, ConnectionResurrection) { TEST_F(P2PTransportChannelPingTest, TestReceivingStateChange) { rtc::ScopedFakeClock clock; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("receiving state change", 1, &pa); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); // Default receiving timeout and checking receiving interval should not be too // small. @@ -3619,7 +3653,7 @@ TEST_F(P2PTransportChannelPingTest, TestReceivingStateChange) { Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); conn1->ReceivedPing(); conn1->OnReadPacket("ABC", 3, rtc::TimeMicros()); EXPECT_TRUE_SIMULATED_WAIT(ch.receiving(), kShortTimeout, clock); @@ -3634,7 +3668,7 @@ TEST_F(P2PTransportChannelPingTest, TestReceivingStateChange) { // selected connection is writable. TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("receiving state change", 1, &pa); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -3716,6 +3750,110 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { EXPECT_TRUE(channel_ready_to_send()); } +// Test the field trial send_ping_on_nomination_ice_controlled +// that sends a ping directly when a connection has been nominated +// i.e on the ICE_CONTROLLED-side. +TEST_F(P2PTransportChannelPingTest, TestPingOnNomination) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-IceFieldTrials/send_ping_on_nomination_ice_controlled:true/"); + FakePortAllocator pa(rtc::Thread::Current(), nullptr); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); + PrepareChannel(&ch); + ch.SetIceConfig(ch.config()); + ch.SetIceRole(ICEROLE_CONTROLLED); + ch.MaybeStartGathering(); + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); + ASSERT_TRUE(conn1 != nullptr); + + // A connection needs to be writable before it is selected for transmission. + conn1->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); + + // When a higher priority candidate comes in, the new connection is chosen + // as the selected connection. + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 10)); + Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); + ASSERT_TRUE(conn2 != nullptr); + conn2->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_EQ_WAIT(conn2, ch.selected_connection(), kDefaultTimeout); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); + + // Now nominate conn1 (low prio), it shall be choosen. + const int before = conn1->num_pings_sent(); + NominateConnection(conn1); + ASSERT_EQ(conn1, ch.selected_connection()); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); + + // And the additional ping should have been sent directly. + EXPECT_EQ(conn1->num_pings_sent(), before + 1); +} + +// Test the field trial send_ping_on_switch_ice_controlling +// that sends a ping directly when switching to a new connection +// on the ICE_CONTROLLING-side. +TEST_F(P2PTransportChannelPingTest, TestPingOnSwitch) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-IceFieldTrials/send_ping_on_switch_ice_controlling:true/"); + FakePortAllocator pa(rtc::Thread::Current(), nullptr); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); + PrepareChannel(&ch); + ch.SetIceConfig(ch.config()); + ch.SetIceRole(ICEROLE_CONTROLLING); + ch.MaybeStartGathering(); + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); + ASSERT_TRUE(conn1 != nullptr); + + // A connection needs to be writable before it is selected for transmission. + conn1->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); + + // When a higher priority candidate comes in, the new connection is chosen + // as the selected connection. + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 10)); + Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); + ASSERT_TRUE(conn2 != nullptr); + + const int before = conn2->num_pings_sent(); + + conn2->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_EQ_WAIT(conn2, ch.selected_connection(), kDefaultTimeout); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); + + // And the additional ping should have been sent directly. + EXPECT_EQ(conn2->num_pings_sent(), before + 1); +} + +// Test the field trial send_ping_on_switch_ice_controlling +// that sends a ping directly when selecteing a new connection +// on the ICE_CONTROLLING-side (i.e also initial selection). +TEST_F(P2PTransportChannelPingTest, TestPingOnSelected) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-IceFieldTrials/send_ping_on_selected_ice_controlling:true/"); + FakePortAllocator pa(rtc::Thread::Current(), nullptr); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); + PrepareChannel(&ch); + ch.SetIceConfig(ch.config()); + ch.SetIceRole(ICEROLE_CONTROLLING); + ch.MaybeStartGathering(); + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); + ASSERT_TRUE(conn1 != nullptr); + + const int before = conn1->num_pings_sent(); + + // A connection needs to be writable before it is selected for transmission. + conn1->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); + + // And the additional ping should have been sent directly. + EXPECT_EQ(conn1->num_pings_sent(), before + 1); +} + // The controlled side will select a connection as the "selected connection" // based on requests from an unknown address before the controlling side // nominates a connection, and will nominate a connection from an unknown @@ -3724,7 +3862,7 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { // appropriately. TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("receiving state change", 1, &pa); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -3802,7 +3940,7 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { // the "selected connection". TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBasedOnMediaReceived) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("receiving state change", 1, &pa); + P2PTransportChannel ch("receiving state change", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -3854,9 +3992,9 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBasedOnMediaReceived) { TEST_F(P2PTransportChannelPingTest, TestControlledAgentDataReceivingTakesHigherPrecedenceThanPriority) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa); + P2PTransportChannel ch("SwitchSelectedConnection", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -3902,10 +4040,10 @@ TEST_F(P2PTransportChannelPingTest, TEST_F(P2PTransportChannelPingTest, TestControlledAgentNominationTakesHigherPrecedenceThanDataReceiving) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa); + P2PTransportChannel ch("SwitchSelectedConnection", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -3942,10 +4080,10 @@ TEST_F(P2PTransportChannelPingTest, TEST_F(P2PTransportChannelPingTest, TestControlledAgentSelectsConnectionWithHigherNomination) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test", 1, &pa); + P2PTransportChannel ch("test", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -3986,13 +4124,71 @@ TEST_F(P2PTransportChannelPingTest, EXPECT_EQ(0, reset_selected_candidate_pair_switches()); } +TEST_F(P2PTransportChannelPingTest, TestEstimatedDisconnectedTime) { + rtc::ScopedFakeClock clock; + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + + FakePortAllocator pa(rtc::Thread::Current(), nullptr); + P2PTransportChannel ch("test", cricket::MEDIA_TYPE_VIDEO, 1, &pa); + PrepareChannel(&ch); + ch.SetIceRole(ICEROLE_CONTROLLED); + ch.MaybeStartGathering(); + // The connections have decreasing priority. + Connection* conn1 = + CreateConnectionWithCandidate(&ch, &clock, "1.1.1.1", /* port= */ 1, + /* priority= */ 10, /* writable= */ true); + ASSERT_TRUE(conn1 != nullptr); + Connection* conn2 = + CreateConnectionWithCandidate(&ch, &clock, "2.2.2.2", /* port= */ 2, + /* priority= */ 9, /* writable= */ true); + ASSERT_TRUE(conn2 != nullptr); + + // conn1 is the selected connection because it has a higher priority, + EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kDefaultTimeout, + clock); + EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); + // No estimateded disconnect time at first connect <=> value is 0. + EXPECT_EQ(LastEstimatedDisconnectedTimeMs(), 0); + + // Use nomination to force switching of selected connection. + int nomination = 1; + + { + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + // This will not parse as STUN, and is considered data + conn1->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(2)); + + // conn2 is nominated; it becomes selected. + NominateConnection(conn2, nomination++); + EXPECT_EQ(conn2, ch.selected_connection()); + // We got data 2s ago...guess that we lost 2s of connectivity. + EXPECT_EQ(LastEstimatedDisconnectedTimeMs(), 2000); + } + + { + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + conn2->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + + clock.AdvanceTime(webrtc::TimeDelta::Seconds(2)); + ReceivePingOnConnection(conn2, kIceUfrag[1], 1, nomination++); + + clock.AdvanceTime(webrtc::TimeDelta::Millis(500)); + + ReceivePingOnConnection(conn1, kIceUfrag[1], 1, nomination++); + EXPECT_EQ(conn1, ch.selected_connection()); + // We got ping 500ms ago...guess that we lost 500ms of connectivity. + EXPECT_EQ(LastEstimatedDisconnectedTimeMs(), 500); + } +} + TEST_F(P2PTransportChannelPingTest, TestControlledAgentIgnoresSmallerNomination) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test", 1, &pa); + P2PTransportChannel ch("test", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -4010,7 +4206,7 @@ TEST_F(P2PTransportChannelPingTest, rtc::ScopedFakeClock clock; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa); + P2PTransportChannel ch("SwitchSelectedConnection", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -4051,7 +4247,7 @@ TEST_F(P2PTransportChannelPingTest, // an old one, it will be used to create a new connection. TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithAddressReuse) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("candidate reuse", 1, &pa); + P2PTransportChannel ch("candidate reuse", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); const std::string host_address = "1.1.1.1"; @@ -4089,9 +4285,9 @@ TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithAddressReuse) { // will be pruned. Otherwise, lower-priority connections are kept. TEST_F(P2PTransportChannelPingTest, TestDontPruneWhenWeak) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -4127,7 +4323,7 @@ TEST_F(P2PTransportChannelPingTest, TestDontPruneWhenWeak) { TEST_F(P2PTransportChannelPingTest, TestDontPruneHighPriorityConnections) { rtc::ScopedFakeClock clock; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -4149,9 +4345,9 @@ TEST_F(P2PTransportChannelPingTest, TestDontPruneHighPriorityConnections) { // Test that GetState returns the state correctly. TEST_F(P2PTransportChannelPingTest, TestGetState) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); EXPECT_EQ(webrtc::IceTransportState::kNew, ch.GetIceTransportState()); PrepareChannel(&ch); ch.MaybeStartGathering(); @@ -4189,10 +4385,10 @@ TEST_F(P2PTransportChannelPingTest, TestGetState) { // right away, and it can become active and be pruned again. TEST_F(P2PTransportChannelPingTest, TestConnectionPrunedAgain) { rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); IceConfig config = CreateIceConfig(1000, GATHER_ONCE); config.receiving_switching_delay = 800; @@ -4242,7 +4438,7 @@ TEST_F(P2PTransportChannelPingTest, TestConnectionPrunedAgain) { TEST_F(P2PTransportChannelPingTest, TestDeleteConnectionsIfAllWriteTimedout) { rtc::ScopedFakeClock clock; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.MaybeStartGathering(); // Have one connection only but later becomes write-time-out. @@ -4274,7 +4470,7 @@ TEST_F(P2PTransportChannelPingTest, TestDeleteConnectionsIfAllWriteTimedout) { // the current port allocator session. TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceConfig(CreateIceConfig(2000, GATHER_ONCE)); ch.MaybeStartGathering(); @@ -4307,7 +4503,7 @@ TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) { // the connections on it may still receive stun pings. TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnRemovedPort) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", ICE_CANDIDATE_COMPONENT_DEFAULT, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, &pa); // Starts with ICEROLE_CONTROLLING. PrepareChannel(&ch); IceConfig config = CreateIceConfig(1000, GATHER_CONTINUALLY); @@ -4332,7 +4528,7 @@ TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnRemovedPort) { // connections. TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnPortAfterIceRestart) { FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", ICE_CANDIDATE_COMPONENT_DEFAULT, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, &pa); // Starts with ICEROLE_CONTROLLING. PrepareChannel(&ch); ch.MaybeStartGathering(); @@ -4356,7 +4552,7 @@ TEST_F(P2PTransportChannelPingTest, TestPortDestroyedAfterTimeoutAndPruned) { rtc::ScopedFakeClock fake_clock; FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", ICE_CANDIDATE_COMPONENT_DEFAULT, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, &pa); PrepareChannel(&ch); ch.SetIceRole(ICEROLE_CONTROLLED); ch.MaybeStartGathering(); @@ -4368,7 +4564,7 @@ TEST_F(P2PTransportChannelPingTest, TestPortDestroyedAfterTimeoutAndPruned) { // Simulate 2 minutes going by. This should be enough time for the port to // time out. for (int second = 0; second < 120; ++second) { - fake_clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + fake_clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); } EXPECT_EQ(nullptr, GetConnectionTo(&ch, "1.1.1.1", 1)); // Port will not be removed because it is not pruned yet. @@ -4385,7 +4581,7 @@ TEST_F(P2PTransportChannelPingTest, TestMaxOutstandingPingsFieldTrial) { webrtc::test::ScopedFieldTrials field_trials( "WebRTC-IceFieldTrials/max_outstanding_pings:3/"); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("max", 1, &pa); + P2PTransportChannel ch("max", cricket::MEDIA_TYPE_VIDEO, 1, &pa); ch.SetIceConfig(ch.config()); PrepareChannel(&ch); ch.MaybeStartGathering(); @@ -4421,7 +4617,7 @@ class P2PTransportChannelMostLikelyToWorkFirstTest P2PTransportChannel& StartTransportChannel( bool prioritize_most_likely_to_work, int stable_writable_connection_ping_interval) { - channel_.reset(new P2PTransportChannel("checks", 1, allocator())); + channel_.reset(new P2PTransportChannel("checks", cricket::MEDIA_TYPE_VIDEO, 1, allocator())); IceConfig config = channel_->config(); config.prioritize_most_likely_candidate_pairs = prioritize_most_likely_to_work; @@ -4653,7 +4849,7 @@ TEST(P2PTransportChannelResolverTest, HostnameCandidateIsResolved) { .WillOnce(Return(&mock_async_resolver)); FakePortAllocator allocator(rtc::Thread::Current(), nullptr); - P2PTransportChannel channel("tn", 0, &allocator, + P2PTransportChannel channel("tn", cricket::MEDIA_TYPE_VIDEO, 0, &allocator, &mock_async_resolver_factory); Candidate hostname_candidate; SocketAddress hostname_address("fake.test", 1000); @@ -4744,10 +4940,13 @@ TEST_F(P2PTransportChannelTest, // address after the resolution completes. TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateDuringResolvingHostCandidateWithMdnsName) { - NiceMock mock_async_resolver; + auto mock_async_resolver = new NiceMock(); + ON_CALL(*mock_async_resolver, Destroy).WillByDefault([mock_async_resolver] { + delete mock_async_resolver; + }); webrtc::MockAsyncResolverFactory mock_async_resolver_factory; EXPECT_CALL(mock_async_resolver_factory, Create()) - .WillOnce(Return(&mock_async_resolver)); + .WillOnce(Return(mock_async_resolver)); // ep1 and ep2 will only gather host candidates with addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively. @@ -4774,7 +4973,7 @@ TEST_F(P2PTransportChannelTest, bool mock_async_resolver_started = false; // Not signaling done yet, and only make sure we are in the process of // resolution. - EXPECT_CALL(mock_async_resolver, Start(_)) + EXPECT_CALL(*mock_async_resolver, Start(_)) .WillOnce(InvokeWithoutArgs([&mock_async_resolver_started]() { mock_async_resolver_started = true; })); @@ -4787,7 +4986,7 @@ TEST_F(P2PTransportChannelTest, ResumeCandidates(1); ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kMediumTimeout); // Let the mock resolver of ep2 receives the correct resolution. - EXPECT_CALL(mock_async_resolver, GetResolvedAddress(_, _)) + EXPECT_CALL(*mock_async_resolver, GetResolvedAddress(_, _)) .WillOnce(DoAll(SetArgPointee<1>(local_address), Return(true))); // Upon receiving a ping from ep1, ep2 adds a prflx candidate from the // unknown address and establishes a connection. @@ -4799,7 +4998,7 @@ TEST_F(P2PTransportChannelTest, ep2_ch1()->selected_connection()->remote_candidate().type()); // ep2 should also be able resolve the hostname candidate. The resolved remote // host candidate should be merged with the prflx remote candidate. - mock_async_resolver.SignalDone(&mock_async_resolver); + mock_async_resolver->SignalDone(mock_async_resolver); EXPECT_EQ_WAIT(LOCAL_PORT_TYPE, ep2_ch1()->selected_connection()->remote_candidate().type(), kMediumTimeout); @@ -5108,7 +5307,7 @@ TEST_F(P2PTransportChannelTest, // this endpoint only gathers relay candidates. ConfigureEndpoints(OPEN, OPEN, kOnlyRelayPorts, kDefaultPortAllocatorFlags); GetEndpoint(0)->cd1_.ch_.reset(CreateChannel( - 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); + 0, cricket::MEDIA_TYPE_VIDEO, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1])); IceConfig config; // Start gathering and we should have only a single relay port. ep1_ch1()->SetIceConfig(config); @@ -5155,10 +5354,14 @@ TEST_F(P2PTransportChannelTest, class MockMdnsResponder : public webrtc::MdnsResponderInterface { public: - MOCK_METHOD2(CreateNameForAddress, - void(const rtc::IPAddress&, NameCreatedCallback)); - MOCK_METHOD2(RemoveNameForAddress, - void(const rtc::IPAddress&, NameRemovedCallback)); + MOCK_METHOD(void, + CreateNameForAddress, + (const rtc::IPAddress&, NameCreatedCallback), + (override)); + MOCK_METHOD(void, + RemoveNameForAddress, + (const rtc::IPAddress&, NameRemovedCallback), + (override)); }; TEST_F(P2PTransportChannelTest, @@ -5439,16 +5642,86 @@ TEST_F(P2PTransportChannelTest, DestroyChannels(); } +// Verify that things break unless +// - both parties use the surface_ice_candidates_on_ice_transport_type_changed +// - both parties loosen candidate filter at the same time (approx.). +// +// i.e surface_ice_candidates_on_ice_transport_type_changed requires +// coordination outside of webrtc to function properly. +TEST_F(P2PTransportChannelTest, SurfaceRequiresCoordination) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/"); + rtc::ScopedFakeClock clock; + + ConfigureEndpoints( + OPEN, OPEN, + kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, + kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + auto* ep1 = GetEndpoint(0); + auto* ep2 = GetEndpoint(1); + ep1->allocator_->SetCandidateFilter(CF_RELAY); + ep2->allocator_->SetCandidateFilter(CF_ALL); + // Enable continual gathering and also resurfacing gathered candidates upon + // the candidate filter changed in the ICE configuration. + IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); + ice_config.surface_ice_candidates_on_ice_transport_type_changed = true; + // Pause candidates gathering so we can gather all types of candidates. See + // P2PTransportChannel::OnConnectionStateChange, where we would stop the + // gathering when we have a strongly connected candidate pair. + PauseCandidates(0); + PauseCandidates(1); + CreateChannels(ice_config, ice_config); + + // On the caller we only have relay, + // on the callee we have host, srflx and relay. + EXPECT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 1u, + kDefaultTimeout, clock); + EXPECT_TRUE_SIMULATED_WAIT(ep2->saved_candidates_.size() == 3u, + kDefaultTimeout, clock); + + ResumeCandidates(0); + ResumeCandidates(1); + ASSERT_TRUE_SIMULATED_WAIT( + ep1_ch1()->selected_connection() != nullptr && + RELAY_PORT_TYPE == + ep1_ch1()->selected_connection()->local_candidate().type() && + ep2_ch1()->selected_connection() != nullptr && + RELAY_PORT_TYPE == + ep1_ch1()->selected_connection()->remote_candidate().type(), + kDefaultTimeout, clock); + ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr, + kDefaultTimeout, clock); + + // Wait until the callee discards it's candidates + // since they don't manage to connect. + SIMULATED_WAIT(false, 300000, clock); + + // And then loosen caller candidate filter. + ep1->allocator_->SetCandidateFilter(CF_ALL); + + SIMULATED_WAIT(false, kDefaultTimeout, clock); + + // No p2p connection will be made, it will remain on relay. + EXPECT_TRUE(ep1_ch1()->selected_connection() != nullptr && + RELAY_PORT_TYPE == + ep1_ch1()->selected_connection()->local_candidate().type() && + ep2_ch1()->selected_connection() != nullptr && + RELAY_PORT_TYPE == + ep1_ch1()->selected_connection()->remote_candidate().type()); + + DestroyChannels(); +} + TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampening0) { webrtc::test::ScopedFieldTrials field_trials( "WebRTC-IceFieldTrials/initial_select_dampening:0/"); constexpr int kMargin = 10; rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); @@ -5469,10 +5742,10 @@ TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampening) { constexpr int kMargin = 10; rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); @@ -5493,10 +5766,10 @@ TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampeningPingReceived) { constexpr int kMargin = 10; rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); @@ -5520,10 +5793,10 @@ TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampeningBoth) { constexpr int kMargin = 10; rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::seconds(1)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); FakePortAllocator pa(rtc::Thread::Current(), nullptr); - P2PTransportChannel ch("test channel", 1, &pa); + P2PTransportChannel ch("test channel", cricket::MEDIA_TYPE_VIDEO, 1, &pa); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); @@ -5546,9 +5819,165 @@ TEST(P2PTransportChannel, InjectIceController) { EXPECT_CALL(factory, RecordIceControllerCreated()).Times(1); auto dummy = std::make_unique( "transport_name", + cricket::MEDIA_TYPE_VIDEO, /* component= */ 77, &pa, /* async_resolver_factory = */ nullptr, /* event_log = */ nullptr, &factory); } +class ForgetLearnedStateController : public cricket::BasicIceController { + public: + explicit ForgetLearnedStateController( + const cricket::IceControllerFactoryArgs& args) + : cricket::BasicIceController(args) {} + + SwitchResult SortAndSwitchConnection(IceControllerEvent reason) override { + auto result = cricket::BasicIceController::SortAndSwitchConnection(reason); + if (forget_connnection_) { + result.connections_to_forget_state_on.push_back(forget_connnection_); + forget_connnection_ = nullptr; + } + result.recheck_event = + IceControllerEvent(IceControllerEvent::ICE_CONTROLLER_RECHECK); + result.recheck_event->recheck_delay_ms = 100; + return result; + } + + void ForgetThisConnectionNextTimeSortAndSwitchConnectionIsCalled( + Connection* con) { + forget_connnection_ = con; + } + + private: + Connection* forget_connnection_ = nullptr; +}; + +class ForgetLearnedStateControllerFactory + : public cricket::IceControllerFactoryInterface { + public: + std::unique_ptr Create( + const cricket::IceControllerFactoryArgs& args) override { + auto controller = std::make_unique(args); + // Keep a pointer to allow modifying calls. + // Must not be used after the p2ptransportchannel has been destructed. + controller_ = controller.get(); + return controller; + } + virtual ~ForgetLearnedStateControllerFactory() = default; + + ForgetLearnedStateController* controller_; +}; + +TEST_F(P2PTransportChannelPingTest, TestForgetLearnedState) { + ForgetLearnedStateControllerFactory factory; + FakePortAllocator pa(rtc::Thread::Current(), nullptr); + P2PTransportChannel ch("ping sufficiently", cricket::MEDIA_TYPE_VIDEO, 1, &pa, nullptr, nullptr, + &factory); + PrepareChannel(&ch); + ch.MaybeStartGathering(); + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + + Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); + Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); + ASSERT_TRUE(conn1 != nullptr); + ASSERT_TRUE(conn2 != nullptr); + + // Wait for conn1 to be selected. + conn1->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kMediumTimeout); + + conn2->ReceivedPingResponse(LOW_RTT, "id"); + EXPECT_TRUE(conn2->writable()); + + // Now let the ice controller signal to P2PTransportChannel that it + // should Forget conn2. + factory.controller_ + ->ForgetThisConnectionNextTimeSortAndSwitchConnectionIsCalled(conn2); + + // We don't have a mock Connection, so verify this by checking that it + // is no longer writable. + EXPECT_EQ_WAIT(false, conn2->writable(), kMediumTimeout); +} + +TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyRelay) { + ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, + kDefaultPortAllocatorFlags); + auto* ep1 = GetEndpoint(0); + ep1->allocator_->SetCandidateFilter(CF_RELAY); + + rtc::MockAsyncResolver mock_async_resolver; + webrtc::MockAsyncResolverFactory mock_async_resolver_factory; + ON_CALL(mock_async_resolver_factory, Create()) + .WillByDefault(Return(&mock_async_resolver)); + ep1->async_resolver_factory_ = &mock_async_resolver_factory; + + bool lookup_started = false; + ON_CALL(mock_async_resolver, Start(_)) + .WillByDefault(Assign(&lookup_started, true)); + + CreateChannels(); + + ep1_ch1()->AddRemoteCandidate( + CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100)); + + EXPECT_FALSE(lookup_started); + + DestroyChannels(); +} + +TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyNone) { + ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, + kDefaultPortAllocatorFlags); + auto* ep1 = GetEndpoint(0); + ep1->allocator_->SetCandidateFilter(CF_NONE); + + rtc::MockAsyncResolver mock_async_resolver; + webrtc::MockAsyncResolverFactory mock_async_resolver_factory; + ON_CALL(mock_async_resolver_factory, Create()) + .WillByDefault(Return(&mock_async_resolver)); + ep1->async_resolver_factory_ = &mock_async_resolver_factory; + + bool lookup_started = false; + ON_CALL(mock_async_resolver, Start(_)) + .WillByDefault(Assign(&lookup_started, true)); + + CreateChannels(); + + ep1_ch1()->AddRemoteCandidate( + CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100)); + + EXPECT_FALSE(lookup_started); + + DestroyChannels(); +} + +TEST_F(P2PTransportChannelTest, EnableDnsLookupsWithTransportPolicyNoHost) { + ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, + kDefaultPortAllocatorFlags); + auto* ep1 = GetEndpoint(0); + ep1->allocator_->SetCandidateFilter(CF_ALL & ~CF_HOST); + + rtc::MockAsyncResolver mock_async_resolver; + webrtc::MockAsyncResolverFactory mock_async_resolver_factory; + EXPECT_CALL(mock_async_resolver_factory, Create()) + .WillOnce(Return(&mock_async_resolver)); + EXPECT_CALL(mock_async_resolver, Destroy(_)); + + ep1->async_resolver_factory_ = &mock_async_resolver_factory; + + bool lookup_started = false; + EXPECT_CALL(mock_async_resolver, Start(_)) + .WillOnce(Assign(&lookup_started, true)); + + CreateChannels(); + + ep1_ch1()->AddRemoteCandidate( + CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100)); + + EXPECT_TRUE(lookup_started); + + DestroyChannels(); +} + } // namespace cricket diff --git a/p2p/base/packet_transport_internal.h b/p2p/base/packet_transport_internal.h index f65d7f4981..186c6f1547 100644 --- a/p2p/base/packet_transport_internal.h +++ b/p2p/base/packet_transport_internal.h @@ -14,6 +14,7 @@ #include #include +#include "api/media_types.h" #include "absl/types/optional.h" #include "p2p/base/port.h" #include "rtc_base/async_packet_socket.h" @@ -30,6 +31,8 @@ class RTC_EXPORT PacketTransportInternal : public sigslot::has_slots<> { public: virtual const std::string& transport_name() const = 0; + virtual cricket::MediaType media_type() const = 0; + // The transport has been established. virtual bool writable() const = 0; diff --git a/p2p/base/port.cc b/p2p/base/port.cc index a6eb333923..035d3d4bb3 100644 --- a/p2p/base/port.cc +++ b/p2p/base/port.cc @@ -254,20 +254,6 @@ Connection* Port::GetConnection(const rtc::SocketAddress& remote_addr) { return NULL; } -void Port::AddAddress(const rtc::SocketAddress& address, - const rtc::SocketAddress& base_address, - const rtc::SocketAddress& related_address, - const std::string& protocol, - const std::string& relay_protocol, - const std::string& tcptype, - const std::string& type, - uint32_t type_preference, - uint32_t relay_preference, - bool is_final) { - AddAddress(address, base_address, related_address, protocol, relay_protocol, - tcptype, type, type_preference, relay_preference, "", is_final); -} - void Port::AddAddress(const rtc::SocketAddress& address, const rtc::SocketAddress& base_address, const rtc::SocketAddress& related_address, @@ -469,6 +455,12 @@ bool Port::GetStunMessage(const char* data, return false; } + // Get list of attributes in the "comprehension-required" range that were not + // comprehended. If one or more is found, the behavior differs based on the + // type of the incoming message; see below. + std::vector unknown_attributes = + stun_msg->GetNonComprehendedAttributes(); + if (stun_msg->type() == STUN_BINDING_REQUEST) { // Check for the presence of USERNAME and MESSAGE-INTEGRITY (if ICE) first. // If not present, fail with a 400 Bad Request. @@ -507,6 +499,15 @@ bool Port::GetStunMessage(const char* data, STUN_ERROR_REASON_UNAUTHORIZED); return true; } + + // If a request contains unknown comprehension-required attributes, reply + // with an error. See RFC5389 section 7.3.1. + if (!unknown_attributes.empty()) { + SendUnknownAttributesErrorResponse(stun_msg.get(), addr, + unknown_attributes); + return true; + } + out_username->assign(remote_ufrag); } else if ((stun_msg->type() == STUN_BINDING_RESPONSE) || (stun_msg->type() == STUN_BINDING_ERROR_RESPONSE)) { @@ -527,6 +528,15 @@ bool Port::GetStunMessage(const char* data, return true; } } + // If a response contains unknown comprehension-required attributes, it's + // simply discarded and the transaction is considered failed. See RFC5389 + // sections 7.3.3 and 7.3.4. + if (!unknown_attributes.empty()) { + RTC_LOG(LS_ERROR) << ToString() + << ": Discarding STUN response due to unknown " + "comprehension-required attribute"; + return true; + } // NOTE: Username should not be used in verifying response messages. out_username->clear(); } else if (stun_msg->type() == STUN_BINDING_INDICATION) { @@ -534,6 +544,15 @@ bool Port::GetStunMessage(const char* data, << StunMethodToString(stun_msg->type()) << ": from " << addr.ToSensitiveString(); out_username->clear(); + + // If an indication contains unknown comprehension-required attributes,[] + // it's simply discarded. See RFC5389 section 7.3.2. + if (!unknown_attributes.empty()) { + RTC_LOG(LS_ERROR) << ToString() + << ": Discarding STUN indication due to " + "unknown comprehension-required attribute"; + return true; + } // No stun attributes will be verified, if it's stun indication message. // Returning from end of the this method. } else if (stun_msg->type() == GOOG_PING_REQUEST) { @@ -749,6 +768,44 @@ void Port::SendBindingErrorResponse(StunMessage* request, << addr.ToSensitiveString(); } +void Port::SendUnknownAttributesErrorResponse( + StunMessage* request, + const rtc::SocketAddress& addr, + const std::vector& unknown_types) { + RTC_DCHECK(request->type() == STUN_BINDING_REQUEST); + + // Fill in the response message. + StunMessage response; + response.SetType(STUN_BINDING_ERROR_RESPONSE); + response.SetTransactionID(request->transaction_id()); + + auto error_attr = StunAttribute::CreateErrorCode(); + error_attr->SetCode(STUN_ERROR_UNKNOWN_ATTRIBUTE); + error_attr->SetReason(STUN_ERROR_REASON_UNKNOWN_ATTRIBUTE); + response.AddAttribute(std::move(error_attr)); + + std::unique_ptr unknown_attr = + StunAttribute::CreateUnknownAttributes(); + for (uint16_t type : unknown_types) { + unknown_attr->AddType(type); + } + response.AddAttribute(std::move(unknown_attr)); + + response.AddMessageIntegrity(password_); + response.AddFingerprint(); + + // Send the response message. + rtc::ByteBufferWriter buf; + response.Write(&buf); + rtc::PacketOptions options(StunDscpValue()); + options.info_signaled_after_sent.packet_type = + rtc::PacketType::kIceConnectivityCheckResponse; + SendTo(buf.Data(), buf.Length(), addr, options, false); + RTC_LOG(LS_ERROR) << ToString() << ": Sending STUN binding error: reason=" + << STUN_ERROR_UNKNOWN_ATTRIBUTE << " to " + << addr.ToSensitiveString(); +} + void Port::KeepAliveUntilPruned() { // If it is pruned, we won't bring it up again. if (state_ == State::INIT) { diff --git a/p2p/base/port.h b/p2p/base/port.h index 4200bed096..1e20d13462 100644 --- a/p2p/base/port.h +++ b/p2p/base/port.h @@ -150,6 +150,8 @@ struct CandidatePairChangeEvent { CandidatePair selected_candidate_pair; int64_t last_data_received_ms; std::string reason; + // How long do we estimate that we've been disconnected. + int64_t estimated_disconnected_time_ms; }; typedef std::set ServerAddresses; @@ -158,7 +160,7 @@ typedef std::set ServerAddresses; // connections to similar mechanisms of the other client. Subclasses of this // one add support for specific mechanisms like local UDP ports. class Port : public PortInterface, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: // INIT: The state when a port is just created. @@ -295,6 +297,10 @@ class Port : public PortInterface, const rtc::SocketAddress& addr, int error_code, const std::string& reason) override; + void SendUnknownAttributesErrorResponse( + StunMessage* request, + const rtc::SocketAddress& addr, + const std::vector& unknown_types); void set_proxy(const std::string& user_agent, const rtc::ProxyInfo& proxy) { user_agent_ = user_agent; @@ -366,19 +372,6 @@ class Port : public PortInterface, void set_type(const std::string& type) { type_ = type; } - // Deprecated. Use the AddAddress() method below with "url" instead. - // TODO(zhihuang): Remove this after downstream applications stop using it. - void AddAddress(const rtc::SocketAddress& address, - const rtc::SocketAddress& base_address, - const rtc::SocketAddress& related_address, - const std::string& protocol, - const std::string& relay_protocol, - const std::string& tcptype, - const std::string& type, - uint32_t type_preference, - uint32_t relay_preference, - bool is_final); - void AddAddress(const rtc::SocketAddress& address, const rtc::SocketAddress& base_address, const rtc::SocketAddress& related_address, diff --git a/p2p/base/port_allocator.cc b/p2p/base/port_allocator.cc index b13896c4bc..6e1be65dc9 100644 --- a/p2p/base/port_allocator.cc +++ b/p2p/base/port_allocator.cc @@ -58,6 +58,7 @@ RelayServerConfig::RelayServerConfig(const RelayServerConfig&) = default; RelayServerConfig::~RelayServerConfig() = default; PortAllocatorSession::PortAllocatorSession(const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd, @@ -65,6 +66,7 @@ PortAllocatorSession::PortAllocatorSession(const std::string& content_name, : flags_(flags), generation_(0), content_name_(content_name), + media_type_(media_type), component_(component), ice_ufrag_(ice_ufrag), ice_pwd_(ice_pwd) { @@ -95,6 +97,14 @@ PortAllocator::PortAllocator() : flags_(kDefaultPortAllocatorFlags), min_port_(0), max_port_(0), + min_audio_port_(0), + max_audio_port_(0), + min_video_port_(0), + max_video_port_(0), + min_screen_port_(0), + max_screen_port_(0), + min_data_port_(0), + max_data_port_(0), max_ipv6_networks_(kDefaultMaxIPv6Networks), step_delay_(kDefaultStepDelay), allow_tcp_listen_(true), @@ -196,7 +206,7 @@ bool PortAllocator::SetConfiguration( IceParameters iceCredentials = IceCredentialsIterator::CreateRandomIceCredentials(); PortAllocatorSession* pooled_session = - CreateSessionInternal("", 0, iceCredentials.ufrag, iceCredentials.pwd); + CreateSessionInternal("", cricket::MediaType::MEDIA_TYPE_AUDIO, 0, iceCredentials.ufrag, iceCredentials.pwd); pooled_session->set_pooled(true); pooled_session->StartGettingPorts(); pooled_sessions_.push_back( @@ -207,12 +217,13 @@ bool PortAllocator::SetConfiguration( std::unique_ptr PortAllocator::CreateSession( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) { CheckRunOnValidThreadAndInitialized(); auto session = std::unique_ptr( - CreateSessionInternal(content_name, component, ice_ufrag, ice_pwd)); + CreateSessionInternal(content_name, media_type, component, ice_ufrag, ice_pwd)); session->SetCandidateFilter(candidate_filter()); return session; } diff --git a/p2p/base/port_allocator.h b/p2p/base/port_allocator.h index 4bbe56c0b5..4979e7984b 100644 --- a/p2p/base/port_allocator.h +++ b/p2p/base/port_allocator.h @@ -17,6 +17,7 @@ #include #include "api/transport/enums.h" +#include "api/media_types.h" #include "p2p/base/port.h" #include "p2p/base/port_interface.h" #include "rtc_base/helpers.h" @@ -189,6 +190,7 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { public: // Content name passed in mostly for logging and debugging. PortAllocatorSession(const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd, @@ -200,6 +202,7 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { uint32_t flags() const { return flags_; } void set_flags(uint32_t flags) { flags_ = flags; } std::string content_name() const { return content_name_; } + cricket::MediaType media_type() const { return media_type_; } int component() const { return component_; } const std::string& ice_ufrag() const { return ice_ufrag_; } const std::string& ice_pwd() const { return ice_pwd_; } @@ -316,6 +319,7 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { uint32_t flags_; uint32_t generation_; std::string content_name_; + cricket::MediaType media_type_; int component_; std::string ice_ufrag_; std::string ice_pwd_; @@ -403,6 +407,7 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { std::unique_ptr CreateSession( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd); @@ -474,16 +479,47 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { } // Gets/Sets the port range to use when choosing client ports. - int min_port() const { + int min_audio_port() const { CheckRunOnValidThreadIfInitialized(); - return min_port_; + return min_audio_port_; } - int max_port() const { + int min_video_port() const { CheckRunOnValidThreadIfInitialized(); - return max_port_; + return min_video_port_; } + int min_screen_port() const { + CheckRunOnValidThreadIfInitialized(); + return min_screen_port_; + } + + int min_data_port() const { + CheckRunOnValidThreadIfInitialized(); + return min_data_port_; + } + + int max_audio_port() const { + CheckRunOnValidThreadIfInitialized(); + return max_audio_port_; + } + + int max_video_port() const { + CheckRunOnValidThreadIfInitialized(); + return max_video_port_; + } + + int max_screen_port() const { + CheckRunOnValidThreadIfInitialized(); + return max_screen_port_; + } + + int max_data_port() const { + CheckRunOnValidThreadIfInitialized(); + return max_data_port_; + } + + bool SetPortRange(int min_port, int max_port) { CheckRunOnValidThreadIfInitialized(); if (min_port > max_port) { @@ -495,6 +531,53 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { return true; } + bool SetAudioPortRange(int min_port, int max_port) { + CheckRunOnValidThreadIfInitialized(); + if (min_port > max_port) { + return false; + } + + min_audio_port_ = min_port; + max_audio_port_ = max_port; + + return true; + } + + bool SetVideoPortRange(int min_port, int max_port) { + CheckRunOnValidThreadIfInitialized(); + if (min_port > max_port) { + return false; + } + + min_video_port_ = min_port; + max_video_port_ = max_port; + + return true; + } + + bool SetScreenPortRange(int min_port, int max_port) { + CheckRunOnValidThreadIfInitialized(); + if (min_port > max_port) { + return false; + } + + min_screen_port_ = min_port; + max_screen_port_ = max_port; + + return true; + } + + bool SetDataPortRange(int min_port, int max_port) { + CheckRunOnValidThreadIfInitialized(); + if (min_port > max_port) { + return false; + } + + min_data_port_ = min_port; + max_data_port_ = max_port; + + return true; + } // Can be used to change the default numer of IPv6 network interfaces used // (5). Can set to INT_MAX to effectively disable the limit. // @@ -606,6 +689,7 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { protected: virtual PortAllocatorSession* CreateSessionInternal( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) = 0; @@ -633,6 +717,14 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { rtc::ProxyInfo proxy_; int min_port_; int max_port_; + int min_audio_port_; + int max_audio_port_; + int min_video_port_; + int max_video_port_; + int min_screen_port_; + int max_screen_port_; + int min_data_port_; + int max_data_port_; int max_ipv6_networks_; uint32_t step_delay_; bool allow_tcp_listen_; diff --git a/p2p/base/port_allocator_unittest.cc b/p2p/base/port_allocator_unittest.cc index 70946a3d81..69aaa8e915 100644 --- a/p2p/base/port_allocator_unittest.cc +++ b/p2p/base/port_allocator_unittest.cc @@ -48,13 +48,14 @@ class PortAllocatorTest : public ::testing::Test, public sigslot::has_slots<> { std::unique_ptr CreateSession( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) { return std::unique_ptr( static_cast( allocator_ - ->CreateSession(content_name, component, ice_ufrag, ice_pwd) + ->CreateSession(content_name, media_type, component, ice_ufrag, ice_pwd) .release())); } @@ -102,7 +103,7 @@ TEST_F(PortAllocatorTest, TestDefaults) { // candidate filter are applied as expected. TEST_F(PortAllocatorTest, CreateSession) { allocator_->SetCandidateFilter(cricket::CF_RELAY); - auto session = CreateSession(kContentName, 1, kIceUfrag, kIcePwd); + auto session = CreateSession(kContentName, cricket::MEDIA_TYPE_VIDEO, 1, kIceUfrag, kIcePwd); ASSERT_NE(nullptr, session); EXPECT_EQ(cricket::CF_RELAY, session->candidate_filter()); EXPECT_EQ(kContentName, session->content_name()); diff --git a/p2p/base/port_unittest.cc b/p2p/base/port_unittest.cc index e2dde8358c..2cab407901 100644 --- a/p2p/base/port_unittest.cc +++ b/p2p/base/port_unittest.cc @@ -13,6 +13,7 @@ #include #include +#include #include #include #include @@ -63,6 +64,7 @@ #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" +#include "test/field_trial.h" #include "test/gtest.h" using rtc::AsyncPacketSocket; @@ -255,7 +257,7 @@ static void SendPingAndReceiveResponse(Connection* lconn, ASSERT_TRUE(lport->last_stun_buf()); rconn->OnReadPacket(lport->last_stun_buf()->data(), lport->last_stun_buf()->size(), /* packet_time_us */ -1); - clock->AdvanceTime(webrtc::TimeDelta::ms(ms)); + clock->AdvanceTime(webrtc::TimeDelta::Millis(ms)); ASSERT_TRUE_WAIT(rport->last_stun_msg(), kDefaultTimeout); ASSERT_TRUE(rport->last_stun_buf()); lconn->OnReadPacket(rport->last_stun_buf()->data(), @@ -1297,6 +1299,77 @@ TEST_F(PortTest, TestConnectionDead) { EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); } +TEST_F(PortTest, TestConnectionDeadWithDeadConnectionTimeout) { + TestChannel ch1(CreateUdpPort(kLocalAddr1)); + TestChannel ch2(CreateUdpPort(kLocalAddr2)); + // Acquire address. + ch1.Start(); + ch2.Start(); + ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout); + + // Note: set field trials manually since they are parsed by + // P2PTransportChannel but P2PTransportChannel is not used in this test. + IceFieldTrials field_trials; + field_trials.dead_connection_timeout_ms = 90000; + + // Create a connection again and receive a ping. + ch1.CreateConnection(GetCandidate(ch2.port())); + auto conn = ch1.conn(); + conn->SetIceFieldTrials(&field_trials); + + ASSERT_NE(conn, nullptr); + int64_t before_last_receiving = rtc::TimeMillis(); + conn->ReceivedPing(); + int64_t after_last_receiving = rtc::TimeMillis(); + // The connection will be dead after 90s + conn->UpdateState(before_last_receiving + 90000 - 1); + rtc::Thread::Current()->ProcessMessages(100); + EXPECT_TRUE(ch1.conn() != nullptr); + conn->UpdateState(after_last_receiving + 90000 + 1); + EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); +} + +TEST_F(PortTest, TestConnectionDeadOutstandingPing) { + auto port1 = CreateUdpPort(kLocalAddr1); + port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceTiebreaker(kTiebreaker1); + auto port2 = CreateUdpPort(kLocalAddr2); + port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceTiebreaker(kTiebreaker2); + + TestChannel ch1(std::move(port1)); + TestChannel ch2(std::move(port2)); + // Acquire address. + ch1.Start(); + ch2.Start(); + ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout); + + // Note: set field trials manually since they are parsed by + // P2PTransportChannel but P2PTransportChannel is not used in this test. + IceFieldTrials field_trials; + field_trials.dead_connection_timeout_ms = 360000; + + // Create a connection again and receive a ping and then send + // a ping and keep it outstanding. + ch1.CreateConnection(GetCandidate(ch2.port())); + auto conn = ch1.conn(); + conn->SetIceFieldTrials(&field_trials); + + ASSERT_NE(conn, nullptr); + conn->ReceivedPing(); + int64_t send_ping_timestamp = rtc::TimeMillis(); + conn->Ping(send_ping_timestamp); + + // The connection will be dead 30s after the ping was sent. + conn->UpdateState(send_ping_timestamp + DEAD_CONNECTION_RECEIVE_TIMEOUT - 1); + rtc::Thread::Current()->ProcessMessages(100); + EXPECT_TRUE(ch1.conn() != nullptr); + conn->UpdateState(send_ping_timestamp + DEAD_CONNECTION_RECEIVE_TIMEOUT + 1); + EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); +} + // This test case verifies standard ICE features in STUN messages. Currently it // verifies Message Integrity attribute in STUN messages and username in STUN // binding request will have colon (":") between remote and local username. @@ -1979,7 +2052,7 @@ TEST_F(PortTest, TestNetworkInfoAttribute) { ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); IceMessage* msg = lport->last_stun_msg(); const StunUInt32Attribute* network_info_attr = - msg->GetUInt32(STUN_ATTR_NETWORK_INFO); + msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); ASSERT_TRUE(network_info_attr != NULL); uint32_t network_info = network_info_attr->value(); EXPECT_EQ(lnetwork_id, network_info >> 16); @@ -1996,7 +2069,7 @@ TEST_F(PortTest, TestNetworkInfoAttribute) { rconn->Ping(0); ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); msg = rport->last_stun_msg(); - network_info_attr = msg->GetUInt32(STUN_ATTR_NETWORK_INFO); + network_info_attr = msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); ASSERT_TRUE(network_info_attr != NULL); network_info = network_info_attr->value(); EXPECT_EQ(rnetwork_id, network_info >> 16); @@ -2221,6 +2294,110 @@ TEST_F(PortTest, TestHandleStunMessageBadFingerprint) { EXPECT_EQ(0, port->last_stun_error_code()); } +// Test handling a STUN message with unknown attributes in the +// "comprehension-required" range. Should respond with an error with the +// unknown attributes' IDs. +TEST_F(PortTest, + TestHandleStunRequestWithUnknownComprehensionRequiredAttribute) { + // Our port will act as the "remote" port. + std::unique_ptr port(CreateTestPort(kLocalAddr2, "rfrag", "rpass")); + + std::unique_ptr in_msg, out_msg; + auto buf = std::make_unique(); + rtc::SocketAddress addr(kLocalAddr1); + std::string username; + + // Build ordinary message with valid ufrag/pass. + in_msg = CreateStunMessageWithUsername(STUN_BINDING_REQUEST, "rfrag:lfrag"); + in_msg->AddMessageIntegrity("rpass"); + // Add a couple attributes with ID in comprehension-required range. + in_msg->AddAttribute(StunAttribute::CreateUInt32(0x7777)); + in_msg->AddAttribute(StunAttribute::CreateUInt32(0x4567)); + // ... And one outside the range. + in_msg->AddAttribute(StunAttribute::CreateUInt32(0xdead)); + in_msg->AddFingerprint(); + WriteStunMessage(*in_msg, buf.get()); + ASSERT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, + &username)); + IceMessage* error_response = port->last_stun_msg(); + ASSERT_NE(nullptr, error_response); + + // Verify that the "unknown attribute" error response has the right error + // code, and includes an attribute that lists out the unrecognized attribute + // types. + EXPECT_EQ(STUN_ERROR_UNKNOWN_ATTRIBUTE, error_response->GetErrorCodeValue()); + const StunUInt16ListAttribute* unknown_attributes = + error_response->GetUnknownAttributes(); + ASSERT_NE(nullptr, unknown_attributes); + ASSERT_EQ(2u, unknown_attributes->Size()); + EXPECT_EQ(0x7777, unknown_attributes->GetType(0)); + EXPECT_EQ(0x4567, unknown_attributes->GetType(1)); +} + +// Similar to the above, but with a response instead of a request. In this +// case the response should just be ignored and transaction treated is failed. +TEST_F(PortTest, + TestHandleStunResponseWithUnknownComprehensionRequiredAttribute) { + // Generic setup. + auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); + lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); + rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + lport->PrepareAddress(); + rport->PrepareAddress(); + ASSERT_FALSE(lport->Candidates().empty()); + ASSERT_FALSE(rport->Candidates().empty()); + Connection* lconn = + lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); + Connection* rconn = + rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); + + // Send request. + lconn->Ping(0); + ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + rconn->OnReadPacket(lport->last_stun_buf()->data(), + lport->last_stun_buf()->size(), /* packet_time_us */ -1); + + // Intercept request and add comprehension required attribute. + ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); + auto modified_response = rport->last_stun_msg()->Clone(); + modified_response->AddAttribute(StunAttribute::CreateUInt32(0x7777)); + modified_response->RemoveAttribute(STUN_ATTR_FINGERPRINT); + modified_response->AddFingerprint(); + ByteBufferWriter buf; + WriteStunMessage(*modified_response, &buf); + lconn->OnReadPacket(buf.Data(), buf.Length(), /* packet_time_us */ -1); + // Response should have been ignored, leaving us unwritable still. + EXPECT_FALSE(lconn->writable()); +} + +// Similar to the above, but with an indication. As with a response, it should +// just be ignored. +TEST_F(PortTest, + TestHandleStunIndicationWithUnknownComprehensionRequiredAttribute) { + // Generic set up. + auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass"); + lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); + rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + lport->PrepareAddress(); + rport->PrepareAddress(); + ASSERT_FALSE(lport->Candidates().empty()); + ASSERT_FALSE(rport->Candidates().empty()); + Connection* lconn = + lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); + + // Generate indication with comprehension required attribute and verify it + // doesn't update last_ping_received. + auto in_msg = CreateStunMessage(STUN_BINDING_INDICATION); + in_msg->AddAttribute(StunAttribute::CreateUInt32(0x7777)); + in_msg->AddFingerprint(); + ByteBufferWriter buf; + WriteStunMessage(*in_msg, &buf); + lconn->OnReadPacket(buf.Data(), buf.Length(), /* packet_time_us */ -1); + EXPECT_EQ(0u, lconn->last_ping_received()); +} + // Test handling of STUN binding indication messages . STUN binding // indications are allowed only to the connection which is in read mode. TEST_F(PortTest, TestHandleStunBindingIndication) { @@ -3363,4 +3540,158 @@ TEST_F(PortTest, TestAddConnectionWithSameAddress) { EXPECT_TRUE(port->GetConnection(address) != nullptr); } +// TODO(webrtc:11463) : Move Connection tests into separate unit test +// splitting out shared test code as needed. + +class ConnectionTest : public PortTest { + public: + ConnectionTest() { + lport_ = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); + rport_ = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); + lport_->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport_->SetIceTiebreaker(kTiebreaker1); + rport_->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport_->SetIceTiebreaker(kTiebreaker2); + + lport_->PrepareAddress(); + rport_->PrepareAddress(); + } + + rtc::ScopedFakeClock clock_; + int num_state_changes_ = 0; + + Connection* CreateConnection(IceRole role) { + Connection* conn; + if (role == cricket::ICEROLE_CONTROLLING) { + conn = lport_->CreateConnection(rport_->Candidates()[0], + Port::ORIGIN_MESSAGE); + } else { + conn = rport_->CreateConnection(lport_->Candidates()[0], + Port::ORIGIN_MESSAGE); + } + conn->SignalStateChange.connect(this, + &ConnectionTest::OnConnectionStateChange); + return conn; + } + + void SendPingAndCaptureReply(Connection* lconn, + Connection* rconn, + int64_t ms, + rtc::BufferT* reply) { + TestPort* lport = + lconn->PortForTest() == lport_.get() ? lport_.get() : rport_.get(); + TestPort* rport = + rconn->PortForTest() == rport_.get() ? rport_.get() : lport_.get(); + lconn->Ping(rtc::TimeMillis()); + ASSERT_TRUE_WAIT(lport->last_stun_msg(), kDefaultTimeout); + ASSERT_TRUE(lport->last_stun_buf()); + rconn->OnReadPacket(lport->last_stun_buf()->data(), + lport->last_stun_buf()->size(), + /* packet_time_us */ -1); + clock_.AdvanceTime(webrtc::TimeDelta::Millis(ms)); + ASSERT_TRUE_WAIT(rport->last_stun_msg(), kDefaultTimeout); + ASSERT_TRUE(rport->last_stun_buf()); + *reply = std::move(*rport->last_stun_buf()); + } + + void SendPingAndReceiveResponse(Connection* lconn, + Connection* rconn, + int64_t ms) { + rtc::BufferT reply; + SendPingAndCaptureReply(lconn, rconn, ms, &reply); + lconn->OnReadPacket(reply.data(), reply.size(), + /* packet_time_us */ -1); + } + + void OnConnectionStateChange(Connection* connection) { num_state_changes_++; } + + private: + std::unique_ptr lport_; + std::unique_ptr rport_; +}; + +TEST_F(ConnectionTest, ConnectionForgetLearnedState) { + Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + + EXPECT_FALSE(lconn->writable()); + EXPECT_FALSE(lconn->receiving()); + EXPECT_TRUE(std::isnan(lconn->GetRttEstimate().GetAverage())); + EXPECT_EQ(lconn->GetRttEstimate().GetVariance(), + std::numeric_limits::infinity()); + + SendPingAndReceiveResponse(lconn, rconn, 10); + + EXPECT_TRUE(lconn->writable()); + EXPECT_TRUE(lconn->receiving()); + EXPECT_EQ(lconn->GetRttEstimate().GetAverage(), 10); + EXPECT_EQ(lconn->GetRttEstimate().GetVariance(), + std::numeric_limits::infinity()); + + SendPingAndReceiveResponse(lconn, rconn, 11); + + EXPECT_TRUE(lconn->writable()); + EXPECT_TRUE(lconn->receiving()); + EXPECT_NEAR(lconn->GetRttEstimate().GetAverage(), 10, 0.5); + EXPECT_LT(lconn->GetRttEstimate().GetVariance(), + std::numeric_limits::infinity()); + + lconn->ForgetLearnedState(); + + EXPECT_FALSE(lconn->writable()); + EXPECT_FALSE(lconn->receiving()); + EXPECT_TRUE(std::isnan(lconn->GetRttEstimate().GetAverage())); + EXPECT_EQ(lconn->GetRttEstimate().GetVariance(), + std::numeric_limits::infinity()); +} + +TEST_F(ConnectionTest, ConnectionForgetLearnedStateDiscardsPendingPings) { + Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + + SendPingAndReceiveResponse(lconn, rconn, 10); + + EXPECT_TRUE(lconn->writable()); + EXPECT_TRUE(lconn->receiving()); + + rtc::BufferT reply; + SendPingAndCaptureReply(lconn, rconn, 10, &reply); + + lconn->ForgetLearnedState(); + + EXPECT_FALSE(lconn->writable()); + EXPECT_FALSE(lconn->receiving()); + + lconn->OnReadPacket(reply.data(), reply.size(), + /* packet_time_us */ -1); + + // That reply was discarded due to the ForgetLearnedState() while it was + // outstanding. + EXPECT_FALSE(lconn->writable()); + EXPECT_FALSE(lconn->receiving()); + + // But sending a new ping and getting a reply works. + SendPingAndReceiveResponse(lconn, rconn, 11); + EXPECT_TRUE(lconn->writable()); + EXPECT_TRUE(lconn->receiving()); +} + +TEST_F(ConnectionTest, ConnectionForgetLearnedStateDoesNotTriggerStateChange) { + Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + + EXPECT_EQ(num_state_changes_, 0); + SendPingAndReceiveResponse(lconn, rconn, 10); + + EXPECT_TRUE(lconn->writable()); + EXPECT_TRUE(lconn->receiving()); + EXPECT_EQ(num_state_changes_, 2); + + lconn->ForgetLearnedState(); + + EXPECT_FALSE(lconn->writable()); + EXPECT_FALSE(lconn->receiving()); + EXPECT_EQ(num_state_changes_, 2); +} + } // namespace cricket diff --git a/p2p/base/pseudo_tcp.cc b/p2p/base/pseudo_tcp.cc index 56e6b9b6ad..13e7a2214f 100644 --- a/p2p/base/pseudo_tcp.cc +++ b/p2p/base/pseudo_tcp.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include @@ -402,9 +403,7 @@ uint32_t PseudoTcp::GetBytesInFlight() const { } uint32_t PseudoTcp::GetBytesBufferedNotSent() const { - size_t buffered_bytes = 0; - m_sbuf.GetBuffered(&buffered_bytes); - return static_cast(m_snd_una + buffered_bytes - m_snd_nxt); + return static_cast(m_snd_una + m_sbuf.GetBuffered() - m_snd_nxt); } uint32_t PseudoTcp::GetRoundTripTimeEstimateMs() const { @@ -422,15 +421,11 @@ int PseudoTcp::Recv(char* buffer, size_t len) { } size_t read = 0; - rtc::StreamResult result = m_rbuf.Read(buffer, len, &read, NULL); - - // If there's no data in |m_rbuf|. - if (result == rtc::SR_BLOCK) { + if (!m_rbuf.Read(buffer, len, &read)) { m_bReadEnable = true; m_error = EWOULDBLOCK; return SOCKET_ERROR; } - RTC_DCHECK(result == rtc::SR_SUCCESS); size_t available_space = 0; m_rbuf.GetWriteRemaining(&available_space); @@ -497,14 +492,13 @@ uint32_t PseudoTcp::queue(const char* data, uint32_t len, bool bCtrl) { (m_slist.back().xmit == 0)) { m_slist.back().len += len; } else { - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); - SSegment sseg(static_cast(m_snd_una + snd_buffered), len, bCtrl); + SSegment sseg(static_cast(m_snd_una + m_sbuf.GetBuffered()), len, + bCtrl); m_slist.push_back(sseg); } size_t written = 0; - m_sbuf.Write(data, len, &written, NULL); + m_sbuf.Write(data, len, &written); return static_cast(written); } @@ -532,9 +526,9 @@ IPseudoTcpNotify::WriteResult PseudoTcp::packet(uint32_t seq, if (len) { size_t bytes_read = 0; - rtc::StreamResult result = + bool result = m_sbuf.ReadOffset(buffer.get() + HEADER_SIZE, len, offset, &bytes_read); - RTC_DCHECK(result == rtc::SR_SUCCESS); + RTC_DCHECK(result); RTC_DCHECK(static_cast(bytes_read) == len); } @@ -601,11 +595,9 @@ bool PseudoTcp::clock_check(uint32_t now, long& nTimeout) { if (m_shutdown == SD_FORCEFUL) return false; - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); if ((m_shutdown == SD_GRACEFUL) && ((m_state != TCP_ESTABLISHED) || - ((snd_buffered == 0) && (m_t_ack == 0)))) { + ((m_sbuf.GetBuffered() == 0) && (m_t_ack == 0)))) { return false; } @@ -830,10 +822,8 @@ bool PseudoTcp::process(Segment& seg) { // The goal it to make sure we always have at least enough data to fill the // window. We'd like to notify the app when we are halfway to that point. const uint32_t kIdealRefillSize = (m_sbuf_len + m_rbuf_len) / 2; - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); if (m_bWriteEnable && - static_cast(snd_buffered) < kIdealRefillSize) { + static_cast(m_sbuf.GetBuffered()) < kIdealRefillSize) { m_bWriteEnable = false; if (m_notify) { m_notify->OnTcpWriteable(this); @@ -912,8 +902,7 @@ bool PseudoTcp::process(Segment& seg) { // there's not already data ready to read, but this should always be // true in the problematic scenario, since control frames are always // sent first in the stream. - size_t rcv_buffered; - if (m_rbuf.GetBuffered(&rcv_buffered) && rcv_buffered == 0) { + if (m_rbuf.GetBuffered() == 0) { m_rbuf.ConsumeWriteBuffer(seg.len); m_rbuf.ConsumeReadData(seg.len); // After shifting the position in the buffer, we may have @@ -924,15 +913,11 @@ bool PseudoTcp::process(Segment& seg) { } else { uint32_t nOffset = seg.seq - m_rcv_nxt; - rtc::StreamResult result = - m_rbuf.WriteOffset(seg.data, seg.len, nOffset, NULL); - if (result == rtc::SR_BLOCK) { + if (!m_rbuf.WriteOffset(seg.data, seg.len, nOffset, NULL)) { // Ignore incoming packets outside of the receive window. return false; } - RTC_DCHECK(result == rtc::SR_SUCCESS); - if (seg.seq == m_rcv_nxt) { m_rbuf.ConsumeWriteBuffer(seg.len); m_rcv_nxt += seg.len; @@ -1078,8 +1063,7 @@ void PseudoTcp::attemptSend(SendFlags sflags) { uint32_t nInFlight = m_snd_nxt - m_snd_una; uint32_t nUseable = (nInFlight < nWindow) ? (nWindow - nInFlight) : 0; - size_t snd_buffered = 0; - m_sbuf.GetBuffered(&snd_buffered); + size_t snd_buffered = m_sbuf.GetBuffered(); uint32_t nAvailable = std::min(static_cast(snd_buffered) - nInFlight, m_mss); @@ -1300,4 +1284,149 @@ void PseudoTcp::resizeReceiveBuffer(uint32_t new_size) { m_rcv_wnd = static_cast(available_space); } +PseudoTcp::LockedFifoBuffer::LockedFifoBuffer(size_t size) + : buffer_(new char[size]), + buffer_length_(size), + data_length_(0), + read_position_(0) {} + +PseudoTcp::LockedFifoBuffer::~LockedFifoBuffer() {} + +size_t PseudoTcp::LockedFifoBuffer::GetBuffered() const { + webrtc::MutexLock lock(&mutex_); + return data_length_; +} + +bool PseudoTcp::LockedFifoBuffer::SetCapacity(size_t size) { + webrtc::MutexLock lock(&mutex_); + if (data_length_ > size) + return false; + + if (size != buffer_length_) { + char* buffer = new char[size]; + const size_t copy = data_length_; + const size_t tail_copy = std::min(copy, buffer_length_ - read_position_); + memcpy(buffer, &buffer_[read_position_], tail_copy); + memcpy(buffer + tail_copy, &buffer_[0], copy - tail_copy); + buffer_.reset(buffer); + read_position_ = 0; + buffer_length_ = size; + } + + return true; +} + +bool PseudoTcp::LockedFifoBuffer::ReadOffset(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read) { + webrtc::MutexLock lock(&mutex_); + return ReadOffsetLocked(buffer, bytes, offset, bytes_read); +} + +bool PseudoTcp::LockedFifoBuffer::WriteOffset(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written) { + webrtc::MutexLock lock(&mutex_); + return WriteOffsetLocked(buffer, bytes, offset, bytes_written); +} + +bool PseudoTcp::LockedFifoBuffer::Read(void* buffer, + size_t bytes, + size_t* bytes_read) { + webrtc::MutexLock lock(&mutex_); + size_t copy = 0; + if (!ReadOffsetLocked(buffer, bytes, 0, ©)) + return false; + + // If read was successful then adjust the read position and number of + // bytes buffered. + read_position_ = (read_position_ + copy) % buffer_length_; + data_length_ -= copy; + if (bytes_read) + *bytes_read = copy; + + return true; +} + +bool PseudoTcp::LockedFifoBuffer::Write(const void* buffer, + size_t bytes, + size_t* bytes_written) { + webrtc::MutexLock lock(&mutex_); + size_t copy = 0; + if (!WriteOffsetLocked(buffer, bytes, 0, ©)) + return false; + + // If write was successful then adjust the number of readable bytes. + data_length_ += copy; + if (bytes_written) { + *bytes_written = copy; + } + + return true; +} + +void PseudoTcp::LockedFifoBuffer::ConsumeReadData(size_t size) { + webrtc::MutexLock lock(&mutex_); + RTC_DCHECK(size <= data_length_); + read_position_ = (read_position_ + size) % buffer_length_; + data_length_ -= size; +} + +void PseudoTcp::LockedFifoBuffer::ConsumeWriteBuffer(size_t size) { + webrtc::MutexLock lock(&mutex_); + RTC_DCHECK(size <= buffer_length_ - data_length_); + data_length_ += size; +} + +bool PseudoTcp::LockedFifoBuffer::GetWriteRemaining(size_t* size) const { + webrtc::MutexLock lock(&mutex_); + *size = buffer_length_ - data_length_; + return true; +} + +bool PseudoTcp::LockedFifoBuffer::ReadOffsetLocked(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read) { + if (offset >= data_length_) + return false; + + const size_t available = data_length_ - offset; + const size_t read_position = (read_position_ + offset) % buffer_length_; + const size_t copy = std::min(bytes, available); + const size_t tail_copy = std::min(copy, buffer_length_ - read_position); + char* const p = static_cast(buffer); + memcpy(p, &buffer_[read_position], tail_copy); + memcpy(p + tail_copy, &buffer_[0], copy - tail_copy); + + if (bytes_read) + *bytes_read = copy; + + return true; +} + +bool PseudoTcp::LockedFifoBuffer::WriteOffsetLocked(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written) { + if (data_length_ + offset >= buffer_length_) + return false; + + const size_t available = buffer_length_ - data_length_ - offset; + const size_t write_position = + (read_position_ + data_length_ + offset) % buffer_length_; + const size_t copy = std::min(bytes, available); + const size_t tail_copy = std::min(copy, buffer_length_ - write_position); + const char* const p = static_cast(buffer); + memcpy(&buffer_[write_position], p, tail_copy); + memcpy(&buffer_[0], p + tail_copy, copy - tail_copy); + + if (bytes_written) + *bytes_written = copy; + + return true; +} + } // namespace cricket diff --git a/p2p/base/pseudo_tcp.h b/p2p/base/pseudo_tcp.h index cb6d974496..74ffee631c 100644 --- a/p2p/base/pseudo_tcp.h +++ b/p2p/base/pseudo_tcp.h @@ -15,8 +15,9 @@ #include #include +#include -#include "rtc_base/memory/fifo_buffer.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" namespace cricket { @@ -196,6 +197,50 @@ class RTC_EXPORT PseudoTcp { // window scale factor |m_swnd_scale| accordingly. void resizeReceiveBuffer(uint32_t new_size); + class LockedFifoBuffer final { + public: + explicit LockedFifoBuffer(size_t size); + ~LockedFifoBuffer(); + + size_t GetBuffered() const; + bool SetCapacity(size_t size); + bool ReadOffset(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read); + bool WriteOffset(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written); + bool Read(void* buffer, size_t bytes, size_t* bytes_read); + bool Write(const void* buffer, size_t bytes, size_t* bytes_written); + void ConsumeReadData(size_t size); + void ConsumeWriteBuffer(size_t size); + bool GetWriteRemaining(size_t* size) const; + + private: + bool ReadOffsetLocked(void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_read) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + bool WriteOffsetLocked(const void* buffer, + size_t bytes, + size_t offset, + size_t* bytes_written) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + // the allocated buffer + std::unique_ptr buffer_ RTC_GUARDED_BY(mutex_); + // size of the allocated buffer + size_t buffer_length_ RTC_GUARDED_BY(mutex_); + // amount of readable data in the buffer + size_t data_length_ RTC_GUARDED_BY(mutex_); + // offset to the readable data + size_t read_position_ RTC_GUARDED_BY(mutex_); + mutable webrtc::Mutex mutex_; + }; + IPseudoTcpNotify* m_notify; enum Shutdown { SD_NONE, SD_GRACEFUL, SD_FORCEFUL } m_shutdown; int m_error; @@ -211,13 +256,13 @@ class RTC_EXPORT PseudoTcp { RList m_rlist; uint32_t m_rbuf_len, m_rcv_nxt, m_rcv_wnd, m_lastrecv; uint8_t m_rwnd_scale; // Window scale factor. - rtc::FifoBuffer m_rbuf; + LockedFifoBuffer m_rbuf; // Outgoing data SList m_slist; uint32_t m_sbuf_len, m_snd_nxt, m_snd_wnd, m_lastsend, m_snd_una; uint8_t m_swnd_scale; // Window scale factor. - rtc::FifoBuffer m_sbuf; + LockedFifoBuffer m_sbuf; // Maximum segment size, estimated protocol level, largest segment sent uint32_t m_mss, m_msslevel, m_largest, m_mtu_advise; diff --git a/p2p/base/pseudo_tcp_unittest.cc b/p2p/base/pseudo_tcp_unittest.cc index a7fc9b3e69..ecafec9fb6 100644 --- a/p2p/base/pseudo_tcp_unittest.cc +++ b/p2p/base/pseudo_tcp_unittest.cc @@ -44,7 +44,7 @@ class PseudoTcpForTest : public cricket::PseudoTcp { }; class PseudoTcpTestBase : public ::testing::Test, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public cricket::IPseudoTcpNotify { public: PseudoTcpTestBase() diff --git a/p2p/base/regathering_controller_unittest.cc b/p2p/base/regathering_controller_unittest.cc index 1617b92894..f2c91a0f0e 100644 --- a/p2p/base/regathering_controller_unittest.cc +++ b/p2p/base/regathering_controller_unittest.cc @@ -73,7 +73,7 @@ class RegatheringControllerTest : public ::testing::Test, allocator_->SetConfiguration(stun_servers, turn_servers, 0 /* pool size */, webrtc::NO_PRUNE); allocator_session_ = allocator_->CreateSession( - "test", cricket::ICE_CANDIDATE_COMPONENT_RTP, kIceUfrag, kIcePwd); + "test", cricket::MEDIA_TYPE_VIDEO, cricket::ICE_CANDIDATE_COMPONENT_RTP, kIceUfrag, kIcePwd); // The gathering will take place on the current thread and the following // call of StartGettingPorts is blocking. We will not ClearGettingPorts // prematurely. diff --git a/p2p/base/stun_port_unittest.cc b/p2p/base/stun_port_unittest.cc index dfc72362ce..2804ac03af 100644 --- a/p2p/base/stun_port_unittest.cc +++ b/p2p/base/stun_port_unittest.cc @@ -412,24 +412,29 @@ class MockAsyncPacketSocket : public rtc::AsyncPacketSocket { public: ~MockAsyncPacketSocket() = default; - MOCK_CONST_METHOD0(GetLocalAddress, SocketAddress()); - MOCK_CONST_METHOD0(GetRemoteAddress, SocketAddress()); - MOCK_METHOD3(Send, - int(const void* pv, - size_t cb, - const rtc::PacketOptions& options)); - - MOCK_METHOD4(SendTo, - int(const void* pv, - size_t cb, - const SocketAddress& addr, - const rtc::PacketOptions& options)); - MOCK_METHOD0(Close, int()); - MOCK_CONST_METHOD0(GetState, State()); - MOCK_METHOD2(GetOption, int(rtc::Socket::Option opt, int* value)); - MOCK_METHOD2(SetOption, int(rtc::Socket::Option opt, int value)); - MOCK_CONST_METHOD0(GetError, int()); - MOCK_METHOD1(SetError, void(int error)); + MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override)); + MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override)); + MOCK_METHOD(int, + Send, + (const void* pv, size_t cb, const rtc::PacketOptions& options), + (override)); + + MOCK_METHOD(int, + SendTo, + (const void* pv, + size_t cb, + const SocketAddress& addr, + const rtc::PacketOptions& options), + (override)); + MOCK_METHOD(int, Close, (), (override)); + MOCK_METHOD(State, GetState, (), (const, override)); + MOCK_METHOD(int, + GetOption, + (rtc::Socket::Option opt, int* value), + (override)); + MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override)); + MOCK_METHOD(int, GetError, (), (const, override)); + MOCK_METHOD(void, SetError, (int error), (override)); }; // Test that outbound packets inherit the dscp value assigned to the socket. diff --git a/p2p/base/stun_request.cc b/p2p/base/stun_request.cc index b4dba7d3a0..44376ced95 100644 --- a/p2p/base/stun_request.cc +++ b/p2p/base/stun_request.cc @@ -35,7 +35,6 @@ const int STUN_INITIAL_RTO = 250; // milliseconds // RFC 5389 says SHOULD retransmit 7 times. // This has been 8 for years (not sure why). const int STUN_MAX_RETRANSMISSIONS = 8; // Total sends: 9 -const int STUN_MAX_RETRANSMISSIONS_RFC_5389 = 6; // Total sends: 7 // We also cap the doubling, even though the standard doesn't say to. // This has been 1.6 seconds for years, but for networks that @@ -43,10 +42,6 @@ const int STUN_MAX_RETRANSMISSIONS_RFC_5389 = 6; // Total sends: 7 // work well. const int STUN_MAX_RTO = 8000; // milliseconds, or 5 doublings -namespace { -const char kRfc5389StunRetransmissions[] = "WebRTC-Rfc5389StunRetransmissions"; -} // namespace - StunRequestManager::StunRequestManager(rtc::Thread* thread) : thread_(thread) {} StunRequestManager::~StunRequestManager() { @@ -125,7 +120,15 @@ bool StunRequestManager::CheckResponse(StunMessage* msg) { } StunRequest* request = iter->second; - if (msg->type() == GetStunSuccessResponseType(request->type())) { + if (!msg->GetNonComprehendedAttributes().empty()) { + // If a response contains unknown comprehension-required attributes, it's + // simply discarded and the transaction is considered failed. See RFC5389 + // sections 7.3.3 and 7.3.4. + RTC_LOG(LS_ERROR) << ": Discarding response due to unknown " + "comprehension-required attribute."; + delete request; + return false; + } else if (msg->type() == GetStunSuccessResponseType(request->type())) { request->OnResponse(msg); } else if (msg->type() == GetStunErrorResponseType(request->type())) { request->OnErrorResponse(msg); @@ -175,20 +178,12 @@ StunRequest::StunRequest() timeout_(false), manager_(0), msg_(new StunMessage()), - tstamp_(0), - in_rfc5389_retransmission_experiment_( - webrtc::field_trial::IsEnabled(kRfc5389StunRetransmissions)) { + tstamp_(0) { msg_->SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); } StunRequest::StunRequest(StunMessage* request) - : count_(0), - timeout_(false), - manager_(0), - msg_(request), - tstamp_(0), - in_rfc5389_retransmission_experiment_( - webrtc::field_trial::IsEnabled(kRfc5389StunRetransmissions)) { + : count_(0), timeout_(false), manager_(0), msg_(request), tstamp_(0) { msg_->SetTransactionID(rtc::CreateRandomString(kStunTransactionIdLength)); } @@ -258,9 +253,7 @@ void StunRequest::OnMessage(rtc::Message* pmsg) { void StunRequest::OnSent() { count_ += 1; int retransmissions = (count_ - 1); - if (retransmissions >= STUN_MAX_RETRANSMISSIONS || - (in_rfc5389_retransmission_experiment_ && - retransmissions >= STUN_MAX_RETRANSMISSIONS_RFC_5389)) { + if (retransmissions >= STUN_MAX_RETRANSMISSIONS) { timeout_ = true; } RTC_LOG(LS_VERBOSE) << "Sent STUN request " << count_ diff --git a/p2p/base/stun_request.h b/p2p/base/stun_request.h index 9a2c3a99d8..39f928eaf4 100644 --- a/p2p/base/stun_request.h +++ b/p2p/base/stun_request.h @@ -76,7 +76,7 @@ class StunRequestManager { private: typedef std::map RequestMap; - rtc::Thread* thread_; + rtc::Thread* const thread_; RequestMap requests_; std::string origin_; @@ -148,7 +148,6 @@ class StunRequest : public rtc::MessageHandler { StunRequestManager* manager_; StunMessage* msg_; int64_t tstamp_; - bool in_rfc5389_retransmission_experiment_; friend class StunRequestManager; }; diff --git a/p2p/base/stun_request_unittest.cc b/p2p/base/stun_request_unittest.cc index 1f48c19ad7..ce573f087d 100644 --- a/p2p/base/stun_request_unittest.cc +++ b/p2p/base/stun_request_unittest.cc @@ -198,4 +198,22 @@ TEST_F(StunRequestTest, TestNoEmptyRequest) { delete res; } +// If the response contains an attribute in the "comprehension required" range +// which is not recognized, the transaction should be considered a failure and +// the response should be ignored. +TEST_F(StunRequestTest, TestUnrecognizedComprehensionRequiredAttribute) { + StunMessage* req = CreateStunMessage(STUN_BINDING_REQUEST, NULL); + + manager_.Send(new StunRequestThunker(req, this)); + StunMessage* res = CreateStunMessage(STUN_BINDING_ERROR_RESPONSE, req); + res->AddAttribute(StunAttribute::CreateUInt32(0x7777)); + EXPECT_FALSE(manager_.CheckResponse(res)); + + EXPECT_EQ(nullptr, response_); + EXPECT_FALSE(success_); + EXPECT_FALSE(failure_); + EXPECT_FALSE(timeout_); + delete res; +} + } // namespace cricket diff --git a/p2p/base/tcp_port.cc b/p2p/base/tcp_port.cc index e07361acf7..efbf62e496 100644 --- a/p2p/base/tcp_port.cc +++ b/p2p/base/tcp_port.cc @@ -122,7 +122,8 @@ Connection* TCPPort::CreateConnection(const Candidate& address, return NULL; } - if (address.tcptype() == TCPTYPE_ACTIVE_STR || + if ((address.tcptype() == TCPTYPE_ACTIVE_STR && + address.type() != PRFLX_PORT_TYPE) || (address.tcptype().empty() && address.address().port() == 0)) { // It's active only candidate, we should not try to create connections // for these candidates. diff --git a/p2p/base/test_turn_server.h b/p2p/base/test_turn_server.h index 3a9da85f08..d438a83301 100644 --- a/p2p/base/test_turn_server.h +++ b/p2p/base/test_turn_server.h @@ -109,7 +109,7 @@ class TestTurnServer : public TurnAuthInterface { rtc::SSLAdapter* adapter = rtc::SSLAdapter::Create(socket); adapter->SetRole(rtc::SSL_SERVER); adapter->SetIdentity( - rtc::SSLIdentity::Generate(common_name, rtc::KeyParams())); + rtc::SSLIdentity::Create(common_name, rtc::KeyParams())); adapter->SetIgnoreBadCert(ignore_bad_cert); socket = adapter; } diff --git a/p2p/base/transport_description.cc b/p2p/base/transport_description.cc index dd7e38e5a8..96fb9597e0 100644 --- a/p2p/base/transport_description.cc +++ b/p2p/base/transport_description.cc @@ -14,6 +14,7 @@ #include "absl/strings/match.h" #include "p2p/base/p2p_constants.h" #include "rtc_base/arraysize.h" +#include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" using webrtc::RTCError; @@ -24,10 +25,20 @@ namespace cricket { namespace { bool IsIceChar(char c) { + // Note: '-', '=', '#' and '_' are *not* valid ice-chars but temporarily + // permitted in order to allow external software to upgrade. + if (c == '-' || c == '=' || c == '#' || c == '_') { + RTC_LOG(LS_WARNING) + << "'-', '=', '#' and '-' are not valid ice-char and thus not " + << "permitted in ufrag or pwd. This is a protocol violation that " + << "is permitted to allow upgrading but will be rejected in " + << "the future. See https://crbug.com/1053756"; + return true; + } return absl::ascii_isalnum(c) || c == '+' || c == '/'; } -RTCErrorOr ParseIceUfrag(absl::string_view raw_ufrag) { +RTCError ValidateIceUfrag(absl::string_view raw_ufrag) { if (!(ICE_UFRAG_MIN_LENGTH <= raw_ufrag.size() && raw_ufrag.size() <= ICE_UFRAG_MAX_LENGTH)) { rtc::StringBuilder sb; @@ -42,10 +53,10 @@ RTCErrorOr ParseIceUfrag(absl::string_view raw_ufrag) { "ICE ufrag must contain only alphanumeric characters, '+', and '/'."); } - return std::string(raw_ufrag); + return RTCError::OK(); } -RTCErrorOr ParseIcePwd(absl::string_view raw_pwd) { +RTCError ValidateIcePwd(absl::string_view raw_pwd) { if (!(ICE_PWD_MIN_LENGTH <= raw_pwd.size() && raw_pwd.size() <= ICE_PWD_MAX_LENGTH)) { rtc::StringBuilder sb; @@ -60,35 +71,41 @@ RTCErrorOr ParseIcePwd(absl::string_view raw_pwd) { "ICE pwd must contain only alphanumeric characters, '+', and '/'."); } - return std::string(raw_pwd); + return RTCError::OK(); } } // namespace -// static RTCErrorOr IceParameters::Parse(absl::string_view raw_ufrag, absl::string_view raw_pwd) { + IceParameters parameters(std::string(raw_ufrag), std::string(raw_pwd), + /* renomination= */ false); + auto result = parameters.Validate(); + if (!result.ok()) { + return result; + } + return parameters; +} + +RTCError IceParameters::Validate() const { // For legacy protocols. // TODO(zhihuang): Remove this once the legacy protocol is no longer // supported. - if (raw_ufrag.empty() && raw_pwd.empty()) { - return IceParameters(); + if (ufrag.empty() && pwd.empty()) { + return RTCError::OK(); } - auto ufrag_result = ParseIceUfrag(raw_ufrag); + auto ufrag_result = ValidateIceUfrag(ufrag); if (!ufrag_result.ok()) { - return ufrag_result.MoveError(); + return ufrag_result; } - auto pwd_result = ParseIcePwd(raw_pwd); + auto pwd_result = ValidateIcePwd(pwd); if (!pwd_result.ok()) { - return pwd_result.MoveError(); + return pwd_result; } - IceParameters parameters; - parameters.ufrag = ufrag_result.MoveValue(); - parameters.pwd = pwd_result.MoveValue(); - return parameters; + return RTCError::OK(); } bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role) { @@ -155,8 +172,7 @@ TransportDescription::TransportDescription(const TransportDescription& from) ice_pwd(from.ice_pwd), ice_mode(from.ice_mode), connection_role(from.connection_role), - identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())), - opaque_parameters(from.opaque_parameters) {} + identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())) {} TransportDescription::~TransportDescription() = default; @@ -173,7 +189,6 @@ TransportDescription& TransportDescription::operator=( connection_role = from.connection_role; identity_fingerprint.reset(CopyFingerprint(from.identity_fingerprint.get())); - opaque_parameters = from.opaque_parameters; return *this; } diff --git a/p2p/base/transport_description.h b/p2p/base/transport_description.h index d7eedf15ef..32fdb5c9b3 100644 --- a/p2p/base/transport_description.h +++ b/p2p/base/transport_description.h @@ -83,6 +83,10 @@ struct IceParameters { bool operator!=(const IceParameters& other) const { return !(*this == other); } + + // Validate IceParameters, returns a SyntaxError if the ufrag or pwd are + // malformed. + webrtc::RTCError Validate() const; }; extern const char CONNECTIONROLE_ACTIVE_STR[]; @@ -96,28 +100,6 @@ constexpr auto* ICE_OPTION_RENOMINATION = "renomination"; bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role); bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str); -// Parameters for an opaque transport protocol which may be plugged into WebRTC. -struct OpaqueTransportParameters { - // Protocol used by this opaque transport. Two endpoints that support the - // same protocol are expected to be able to understand the contents of each - // others' |parameters| fields. If those parameters are compatible, the - // endpoints are expected to use this transport protocol. - std::string protocol; - - // Opaque parameters for this transport. These parameters are serialized in a - // manner determined by the |protocol|. They can be parsed and understood by - // the plugin that supports |protocol|. - std::string parameters; - - bool operator==(const OpaqueTransportParameters& other) const { - return protocol == other.protocol && parameters == other.parameters; - } - - bool operator!=(const OpaqueTransportParameters& other) const { - return !(*this == other); - } -}; - struct TransportDescription { TransportDescription(); TransportDescription(const std::vector& transport_options, @@ -142,7 +124,7 @@ struct TransportDescription { } bool secure() const { return identity_fingerprint != nullptr; } - IceParameters GetIceParameters() { + IceParameters GetIceParameters() const { return IceParameters(ice_ufrag, ice_pwd, HasOption(ICE_OPTION_RENOMINATION)); } @@ -164,7 +146,6 @@ struct TransportDescription { ConnectionRole connection_role; std::unique_ptr identity_fingerprint; - absl::optional opaque_parameters; }; } // namespace cricket diff --git a/p2p/base/transport_description_factory.cc b/p2p/base/transport_description_factory.cc index 17152d1a04..5cce2ac09d 100644 --- a/p2p/base/transport_description_factory.cc +++ b/p2p/base/transport_description_factory.cc @@ -55,8 +55,6 @@ std::unique_ptr TransportDescriptionFactory::CreateOffer( } } - desc->opaque_parameters = options.opaque_parameters; - return desc; } @@ -110,13 +108,6 @@ std::unique_ptr TransportDescriptionFactory::CreateAnswer( return NULL; } - // Answers may only attach opaque parameters if the offer contained them as - // well. The answer's parameters may differ, and it's up to the opaque - // transport implementation to decide if the difference is acceptable. - if (offer->opaque_parameters && options.opaque_parameters) { - desc->opaque_parameters = options.opaque_parameters; - } - return desc; } diff --git a/p2p/base/transport_description_factory.h b/p2p/base/transport_description_factory.h index d0813dc541..c1656a0fac 100644 --- a/p2p/base/transport_description_factory.h +++ b/p2p/base/transport_description_factory.h @@ -29,9 +29,6 @@ struct TransportOptions { // If true, ICE renomination is supported and will be used if it is also // supported by the remote side. bool enable_ice_renomination = false; - - // Opaque parameters for plug-in transports. - absl::optional opaque_parameters; }; // Creates transport descriptions according to the supplied configuration. diff --git a/p2p/base/transport_description_factory_unittest.cc b/p2p/base/transport_description_factory_unittest.cc index 8359ffc1c9..f7675ae643 100644 --- a/p2p/base/transport_description_factory_unittest.cc +++ b/p2p/base/transport_description_factory_unittest.cc @@ -26,7 +26,6 @@ #include "test/gmock.h" #include "test/gtest.h" -using cricket::OpaqueTransportParameters; using cricket::TransportDescription; using cricket::TransportDescriptionFactory; using cricket::TransportOptions; @@ -210,73 +209,6 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsReofferDtls) { CheckDesc(desc.get(), "", old_desc->ice_ufrag, old_desc->ice_pwd, digest_alg); } -TEST_F(TransportDescriptionFactoryTest, TestOfferOpaqueTransportParameters) { - OpaqueTransportParameters params; - params.protocol = "fake"; - params.parameters = "foobar"; - - TransportOptions options; - options.opaque_parameters = params; - - std::unique_ptr desc = - f1_.CreateOffer(options, NULL, &ice_credentials_); - - CheckDesc(desc.get(), "", "", "", ""); - EXPECT_EQ(desc->opaque_parameters, params); -} - -TEST_F(TransportDescriptionFactoryTest, TestAnswerOpaqueTransportParameters) { - OpaqueTransportParameters params; - params.protocol = "fake"; - params.parameters = "foobar"; - - TransportOptions options; - options.opaque_parameters = params; - - std::unique_ptr offer = - f1_.CreateOffer(options, NULL, &ice_credentials_); - std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), options, true, NULL, &ice_credentials_); - - CheckDesc(answer.get(), "", "", "", ""); - EXPECT_EQ(answer->opaque_parameters, params); -} - -TEST_F(TransportDescriptionFactoryTest, TestAnswerNoOpaqueTransportParameters) { - OpaqueTransportParameters params; - params.protocol = "fake"; - params.parameters = "foobar"; - - TransportOptions options; - options.opaque_parameters = params; - - std::unique_ptr offer = - f1_.CreateOffer(options, NULL, &ice_credentials_); - std::unique_ptr answer = f2_.CreateAnswer( - offer.get(), TransportOptions(), true, NULL, &ice_credentials_); - - CheckDesc(answer.get(), "", "", "", ""); - EXPECT_EQ(answer->opaque_parameters, absl::nullopt); -} - -TEST_F(TransportDescriptionFactoryTest, - TestAnswerNoOpaqueTransportParametersInOffer) { - std::unique_ptr offer = - f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); - - OpaqueTransportParameters params; - params.protocol = "fake"; - params.parameters = "foobar"; - - TransportOptions options; - options.opaque_parameters = params; - std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), options, true, NULL, &ice_credentials_); - - CheckDesc(answer.get(), "", "", "", ""); - EXPECT_EQ(answer->opaque_parameters, absl::nullopt); -} - TEST_F(TransportDescriptionFactoryTest, TestAnswerDefault) { std::unique_ptr offer = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); diff --git a/p2p/base/turn_port.cc b/p2p/base/turn_port.cc index 2e8024dcb6..4d39f207b4 100644 --- a/p2p/base/turn_port.cc +++ b/p2p/base/turn_port.cc @@ -367,7 +367,7 @@ void TurnPort::PrepareAddress() { << server_address_.address.ToSensitiveString(); if (!CreateTurnClientSocket()) { RTC_LOG(LS_ERROR) << "Failed to create TURN client socket"; - OnAllocateError(STUN_ERROR_GLOBAL_FAILURE, + OnAllocateError(SERVER_NOT_REACHABLE_ERROR, "Failed to create TURN client socket."); return; } @@ -883,12 +883,17 @@ void TurnPort::OnAllocateError(int error_code, const std::string& reason) { // port initialization. This way it will not be blocking other port // creation. thread()->Post(RTC_FROM_HERE, this, MSG_ALLOCATE_ERROR); + std::string address = GetLocalAddress().HostAsSensitiveURIString(); + int port = GetLocalAddress().port(); + if (server_address_.proto == PROTO_TCP && + server_address_.address.IsPrivateIP()) { + address.clear(); + port = 0; + } SignalCandidateError( - this, - IceCandidateErrorEvent(GetLocalAddress().HostAsSensitiveURIString(), - GetLocalAddress().port(), - ReconstructedServerUrl(true /* use_hostname */), - error_code, reason)); + this, IceCandidateErrorEvent( + address, port, ReconstructedServerUrl(true /* use_hostname */), + error_code, reason)); } void TurnPort::OnRefreshError() { diff --git a/p2p/base/turn_port.h b/p2p/base/turn_port.h index 8247dbc777..a9ec434194 100644 --- a/p2p/base/turn_port.h +++ b/p2p/base/turn_port.h @@ -33,6 +33,8 @@ class TurnCustomizer; namespace cricket { +const int kMaxTurnUsernameLength = 509; // RFC 8489 section 14.3 + extern const int STUN_ATTR_TURN_LOGGING_ID; extern const char TURN_PORT_TYPE[]; class TurnAllocateRequest; @@ -61,6 +63,10 @@ class TurnPort : public Port { int server_priority, const std::string& origin, webrtc::TurnCustomizer* customizer) { + // Do basic parameter validation. + if (credentials.username.size() > kMaxTurnUsernameLength) { + return nullptr; + } // Using `new` to access a non-public constructor. return absl::WrapUnique(new TurnPort( thread, factory, network, socket, username, password, server_address, @@ -102,6 +108,10 @@ class TurnPort : public Port { const std::vector& tls_elliptic_curves, webrtc::TurnCustomizer* customizer, rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr) { + // Do basic parameter validation. + if (credentials.username.size() > kMaxTurnUsernameLength) { + return nullptr; + } // Using `new` to access a non-public constructor. return absl::WrapUnique( new TurnPort(thread, factory, network, min_port, max_port, username, diff --git a/p2p/base/turn_port_unittest.cc b/p2p/base/turn_port_unittest.cc index e8c1a6e0f3..e8c9b5c8ad 100644 --- a/p2p/base/turn_port_unittest.cc +++ b/p2p/base/turn_port_unittest.cc @@ -148,7 +148,7 @@ class TestConnectionWrapper : public sigslot::has_slots<> { // (between local port and TURN server) of kSimulatedRtt. class TurnPortTest : public ::testing::Test, public sigslot::has_slots<>, - public rtc::MessageHandler { + public rtc::MessageHandlerAutoCleanup { public: TurnPortTest() : ss_(new TurnPortTestVirtualSocketServer()), @@ -166,7 +166,7 @@ class TurnPortTest : public ::testing::Test, // Some code uses "last received time == 0" to represent "nothing received // so far", so we need to start the fake clock at a nonzero time... // TODO(deadbeef): Fix this. - fake_clock_.AdvanceTime(webrtc::TimeDelta::seconds(1)); + fake_clock_.AdvanceTime(webrtc::TimeDelta::Seconds(1)); } virtual void OnMessage(rtc::Message* msg) { @@ -236,43 +236,43 @@ class TurnPortTest : public ::testing::Test, return &networks_.back(); } - void CreateTurnPort(const std::string& username, + bool CreateTurnPort(const std::string& username, const std::string& password, const ProtocolAddress& server_address) { - CreateTurnPortWithAllParams(MakeNetwork(kLocalAddr1), username, password, - server_address, std::string()); + return CreateTurnPortWithAllParams(MakeNetwork(kLocalAddr1), username, + password, server_address, std::string()); } - void CreateTurnPort(const rtc::SocketAddress& local_address, + bool CreateTurnPort(const rtc::SocketAddress& local_address, const std::string& username, const std::string& password, const ProtocolAddress& server_address) { - CreateTurnPortWithAllParams(MakeNetwork(local_address), username, password, - server_address, std::string()); + return CreateTurnPortWithAllParams(MakeNetwork(local_address), username, + password, server_address, std::string()); } // Should be identical to CreateTurnPort but specifies an origin value // when creating the instance of TurnPort. - void CreateTurnPortWithOrigin(const rtc::SocketAddress& local_address, + bool CreateTurnPortWithOrigin(const rtc::SocketAddress& local_address, const std::string& username, const std::string& password, const ProtocolAddress& server_address, const std::string& origin) { - CreateTurnPortWithAllParams(MakeNetwork(local_address), username, password, - server_address, origin); + return CreateTurnPortWithAllParams(MakeNetwork(local_address), username, + password, server_address, origin); } - void CreateTurnPortWithNetwork(rtc::Network* network, + bool CreateTurnPortWithNetwork(rtc::Network* network, const std::string& username, const std::string& password, const ProtocolAddress& server_address) { - CreateTurnPortWithAllParams(network, username, password, server_address, - std::string()); + return CreateTurnPortWithAllParams(network, username, password, + server_address, std::string()); } // Version of CreateTurnPort that takes all possible parameters; all other // helper methods call this, such that "SetIceRole" and "ConnectSignals" (and // possibly other things in the future) only happen in one place. - void CreateTurnPortWithAllParams(rtc::Network* network, + bool CreateTurnPortWithAllParams(rtc::Network* network, const std::string& username, const std::string& password, const ProtocolAddress& server_address, @@ -281,6 +281,9 @@ class TurnPortTest : public ::testing::Test, turn_port_ = TurnPort::Create( &main_, &socket_factory_, network, 0, 0, kIceUfrag1, kIcePwd1, server_address, credentials, 0, origin, {}, {}, turn_customizer_.get()); + if (!turn_port_) { + return false; + } // This TURN port will be the controlling. turn_port_->SetIceRole(ICEROLE_CONTROLLING); ConnectSignals(); @@ -292,6 +295,7 @@ class TurnPortTest : public ::testing::Test, turn_port_->SetTlsCertPolicy( TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK); } + return true; } void CreateSharedTurnPort(const std::string& username, @@ -1774,4 +1778,11 @@ TEST_F(TurnPortTest, TestTurnCustomizerAddAttribute) { turn_port_.reset(nullptr); } +TEST_F(TurnPortTest, TestOverlongUsername) { + std::string overlong_username(513, 'x'); + RelayCredentials credentials(overlong_username, kTurnPassword); + EXPECT_FALSE( + CreateTurnPort(overlong_username, kTurnPassword, kTurnTlsProtoAddr)); +} + } // namespace cricket diff --git a/p2p/base/turn_server.cc b/p2p/base/turn_server.cc index 3a4784ac52..17a49e403d 100644 --- a/p2p/base/turn_server.cc +++ b/p2p/base/turn_server.cc @@ -59,7 +59,7 @@ enum { // Encapsulates a TURN permission. // The object is created when a create permission request is received by an // allocation, and self-deletes when its lifetime timer expires. -class TurnServerAllocation::Permission : public rtc::MessageHandler { +class TurnServerAllocation::Permission : public rtc::MessageHandlerAutoCleanup { public: Permission(rtc::Thread* thread, const rtc::IPAddress& peer); ~Permission() override; @@ -79,7 +79,7 @@ class TurnServerAllocation::Permission : public rtc::MessageHandler { // Encapsulates a TURN channel binding. // The object is created when a channel bind request is received by an // allocation, and self-deletes when its lifetime timer expires. -class TurnServerAllocation::Channel : public rtc::MessageHandler { +class TurnServerAllocation::Channel : public rtc::MessageHandlerAutoCleanup { public: Channel(rtc::Thread* thread, int id, const rtc::SocketAddress& peer); ~Channel() override; diff --git a/p2p/base/turn_server.h b/p2p/base/turn_server.h index 0f4fefea84..ca856448b3 100644 --- a/p2p/base/turn_server.h +++ b/p2p/base/turn_server.h @@ -66,7 +66,7 @@ class TurnServerConnection { // handles TURN messages (via HandleTurnMessage) and channel data messages // (via HandleChannelData) for this allocation when received by the server. // The object self-deletes and informs the server if its lifetime timer expires. -class TurnServerAllocation : public rtc::MessageHandler, +class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: TurnServerAllocation(TurnServer* server_, diff --git a/p2p/client/basic_port_allocator.cc b/p2p/client/basic_port_allocator.cc index 8aeef9361d..d1c4bedc3a 100644 --- a/p2p/client/basic_port_allocator.cc +++ b/p2p/client/basic_port_allocator.cc @@ -220,12 +220,13 @@ void BasicPortAllocator::SetNetworkIgnoreMask(int network_ignore_mask) { PortAllocatorSession* BasicPortAllocator::CreateSessionInternal( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) { CheckRunOnValidThreadAndInitialized(); PortAllocatorSession* session = new BasicPortAllocatorSession( - this, content_name, component, ice_ufrag, ice_pwd); + this, content_name, media_type, component, ice_ufrag, ice_pwd); session->SignalIceRegathering.connect(this, &BasicPortAllocator::OnIceRegathering); return session; @@ -253,10 +254,12 @@ void BasicPortAllocator::InitRelayPortFactory( BasicPortAllocatorSession::BasicPortAllocatorSession( BasicPortAllocator* allocator, const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) : PortAllocatorSession(content_name, + media_type, component, ice_ufrag, ice_pwd, @@ -979,8 +982,11 @@ void BasicPortAllocatorSession::OnCandidateError( const IceCandidateErrorEvent& event) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(FindPort(port)); - - SignalCandidateError(this, event); + if (event.address.empty()) { + candidate_error_events_.push_back(event); + } else { + SignalCandidateError(this, event); + } } Port* BasicPortAllocatorSession::GetBestTurnPortForNetwork( @@ -1140,6 +1146,10 @@ void BasicPortAllocatorSession::MaybeSignalCandidatesAllocationDone() { RTC_LOG(LS_INFO) << "All candidates gathered for " << content_name() << ":" << component() << ":" << generation(); } + for (const auto& event : candidate_error_events_) { + SignalCandidateError(this, event); + } + candidate_error_events_.clear(); SignalCandidatesAllocationDone(this); } } @@ -1226,10 +1236,32 @@ AllocationSequence::AllocationSequence(BasicPortAllocatorSession* session, phase_(0) {} void AllocationSequence::Init() { + int min_port = 0, max_port = 0; + + switch (session_->media_type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: + min_port = session_->allocator_->min_audio_port(); + max_port = session_->allocator_->max_audio_port(); + break; + case cricket::MediaType::MEDIA_TYPE_VIDEO: + min_port = session_->allocator_->min_video_port(); + max_port = session_->allocator_->max_video_port(); + break; + case cricket::MediaType::MEDIA_TYPE_DATA: + min_port = session_->allocator_->min_data_port(); + max_port = session_->allocator_->max_data_port(); + break; + case cricket::MediaType::MEDIA_TYPE_SCREEN: + min_port = session_->allocator_->min_screen_port(); + max_port = session_->allocator_->max_screen_port(); + break; + default: + break; + } if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { udp_socket_.reset(session_->socket_factory()->CreateUdpSocket( rtc::SocketAddress(network_->GetBestIP(), 0), - session_->allocator()->min_port(), session_->allocator()->max_port())); + min_port, max_port)); if (udp_socket_) { udp_socket_->SignalReadPacket.connect(this, &AllocationSequence::OnReadPacket); @@ -1403,9 +1435,31 @@ void AllocationSequence::CreateUDPPorts() { session_->allocator()->origin(), emit_local_candidate_for_anyaddress, session_->allocator()->stun_candidate_keepalive_interval()); } else { + uint16_t min_port = 0, max_port = 0; + + switch (session_->media_type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: + min_port = session_->allocator_->min_audio_port(); + max_port = session_->allocator_->max_audio_port(); + break; + case cricket::MediaType::MEDIA_TYPE_VIDEO: + min_port = session_->allocator_->min_video_port(); + max_port = session_->allocator_->max_video_port(); + break; + case cricket::MediaType::MEDIA_TYPE_DATA: + min_port = session_->allocator_->min_data_port(); + max_port = session_->allocator_->max_data_port(); + break; + case cricket::MediaType::MEDIA_TYPE_SCREEN: + min_port = session_->allocator_->min_screen_port(); + max_port = session_->allocator_->max_screen_port(); + break; + default: + break; + } port = UDPPort::Create( session_->network_thread(), session_->socket_factory(), network_, - session_->allocator()->min_port(), session_->allocator()->max_port(), + min_port, max_port, session_->username(), session_->password(), session_->allocator()->origin(), emit_local_candidate_for_anyaddress, session_->allocator()->stun_candidate_keepalive_interval()); @@ -1438,10 +1492,31 @@ void AllocationSequence::CreateTCPPorts() { RTC_LOG(LS_VERBOSE) << "AllocationSequence: TCP ports disabled, skipping."; return; } + uint16_t min_port = 0, max_port = 0; + switch (session_->media_type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: + min_port = session_->allocator_->min_audio_port(); + max_port = session_->allocator_->max_audio_port(); + break; + case cricket::MediaType::MEDIA_TYPE_VIDEO: + min_port = session_->allocator_->min_video_port(); + max_port = session_->allocator_->max_video_port(); + break; + case cricket::MediaType::MEDIA_TYPE_DATA: + min_port = session_->allocator_->min_data_port(); + max_port = session_->allocator_->max_data_port(); + break; + case cricket::MediaType::MEDIA_TYPE_SCREEN: + min_port = session_->allocator_->min_screen_port(); + max_port = session_->allocator_->max_screen_port(); + break; + default: + break; + } std::unique_ptr port = TCPPort::Create( session_->network_thread(), session_->socket_factory(), network_, - session_->allocator()->min_port(), session_->allocator()->max_port(), + min_port, max_port, session_->username(), session_->password(), session_->allocator()->allow_tcp_listen()); if (port) { @@ -1466,10 +1541,31 @@ void AllocationSequence::CreateStunPorts() { << "AllocationSequence: No STUN server configured, skipping."; return; } + uint16_t min_port = 0, max_port = 0; + switch (session_->media_type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: + min_port = session_->allocator_->min_audio_port(); + max_port = session_->allocator_->max_audio_port(); + break; + case cricket::MediaType::MEDIA_TYPE_VIDEO: + min_port = session_->allocator_->min_video_port(); + max_port = session_->allocator_->max_video_port(); + break; + case cricket::MediaType::MEDIA_TYPE_DATA: + min_port = session_->allocator_->min_data_port(); + max_port = session_->allocator_->max_data_port(); + break; + case cricket::MediaType::MEDIA_TYPE_SCREEN: + min_port = session_->allocator_->min_screen_port(); + max_port = session_->allocator_->max_screen_port(); + break; + default: + break; + } std::unique_ptr port = StunPort::Create( session_->network_thread(), session_->socket_factory(), network_, - session_->allocator()->min_port(), session_->allocator()->max_port(), + min_port, max_port, session_->username(), session_->password(), config_->StunServers(), session_->allocator()->origin(), session_->allocator()->stun_candidate_keepalive_interval()); @@ -1557,9 +1653,30 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config) { // remove entrt from it's map. port->SignalDestroyed.connect(this, &AllocationSequence::OnPortDestroyed); } else { + uint16_t min_port = 0, max_port = 0; + + switch (session_->media_type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: + min_port = session_->allocator_->min_audio_port(); + max_port = session_->allocator_->max_audio_port(); + break; + case cricket::MediaType::MEDIA_TYPE_VIDEO: + min_port = session_->allocator_->min_video_port(); + max_port = session_->allocator_->max_video_port(); + break; + case cricket::MediaType::MEDIA_TYPE_DATA: + min_port = session_->allocator_->min_data_port(); + max_port = session_->allocator_->max_data_port(); + break; + case cricket::MediaType::MEDIA_TYPE_SCREEN: + min_port = session_->allocator_->min_screen_port(); + max_port = session_->allocator_->max_screen_port(); + break; + default: + break; + } port = session_->allocator()->relay_port_factory()->Create( - args, session_->allocator()->min_port(), - session_->allocator()->max_port()); + args, min_port, max_port); if (!port) { RTC_LOG(LS_WARNING) << "Failed to create relay port with " diff --git a/p2p/client/basic_port_allocator.h b/p2p/client/basic_port_allocator.h index b9f2b2ebd2..59959bde12 100644 --- a/p2p/client/basic_port_allocator.h +++ b/p2p/client/basic_port_allocator.h @@ -63,6 +63,7 @@ class RTC_EXPORT BasicPortAllocator : public PortAllocator { PortAllocatorSession* CreateSessionInternal( const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd) override; @@ -106,11 +107,13 @@ enum class SessionState { // process will be started. }; -class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession, - public rtc::MessageHandler { +class RTC_EXPORT BasicPortAllocatorSession + : public PortAllocatorSession, + public rtc::MessageHandlerAutoCleanup { public: BasicPortAllocatorSession(BasicPortAllocator* allocator, const std::string& content_name, + cricket::MediaType media_type, int component, const std::string& ice_ufrag, const std::string& ice_pwd); @@ -269,6 +272,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession, std::vector configs_; std::vector sequences_; std::vector ports_; + std::vector candidate_error_events_; uint32_t candidate_filter_ = CF_ALL; // Policy on how to prune turn ports, taken from the port allocator. webrtc::PortPrunePolicy turn_port_prune_policy_; @@ -322,7 +326,7 @@ class TurnPort; // Performs the allocation of ports, in a sequenced (timed) manner, for a given // network and IP address. -class AllocationSequence : public rtc::MessageHandler, +class AllocationSequence : public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: enum State { diff --git a/p2p/client/basic_port_allocator_unittest.cc b/p2p/client/basic_port_allocator_unittest.cc index 00551ef190..ffacd714a6 100644 --- a/p2p/client/basic_port_allocator_unittest.cc +++ b/p2p/client/basic_port_allocator_unittest.cc @@ -261,17 +261,18 @@ class BasicPortAllocatorTestBase : public ::testing::Test, const std::string& sid, const std::string& content_name, int component) { - return CreateSession(sid, content_name, component, kIceUfrag0, kIcePwd0); + return CreateSession(sid, cricket::MEDIA_TYPE_VIDEO, content_name, component, kIceUfrag0, kIcePwd0); } std::unique_ptr CreateSession( const std::string& sid, + cricket::MediaType media_type, const std::string& content_name, int component, const std::string& ice_ufrag, const std::string& ice_pwd) { std::unique_ptr session = - allocator_->CreateSession(content_name, component, ice_ufrag, ice_pwd); + allocator_->CreateSession(content_name, media_type, component, ice_ufrag, ice_pwd); session->SignalPortReady.connect(this, &BasicPortAllocatorTestBase::OnPortReady); session->SignalPortsPruned.connect( @@ -1058,7 +1059,6 @@ TEST_F(BasicPortAllocatorTest, TestSameNetworkDownAndUpWhenSessionNotStopped) { AddInterface(kClientAddr, if_name); ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout, fake_clock); - // TODO(nisse): Needs fixing, test fails with sizes == 0. EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); } @@ -1373,10 +1373,6 @@ TEST_F(BasicPortAllocatorTest, TestDisableUdpTurn) { EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); } -// Disable for asan, see -// https://code.google.com/p/webrtc/issues/detail?id=4743 for details. -#if !defined(ADDRESS_SANITIZER) - // Test that we can get OnCandidatesAllocationDone callback when all the ports // are disabled. TEST_F(BasicPortAllocatorTest, TestDisableAllPorts) { @@ -1402,8 +1398,6 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpSockets) { EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); } -#endif // if !defined(ADDRESS_SANITIZER) - // Test that we don't crash or malfunction if we can't create UDP sockets or // listen on TCP sockets. We still give out a local TCP address, since // apparently this is needed for the remote side to accept our connection. diff --git a/p2p/client/turn_port_factory.cc b/p2p/client/turn_port_factory.cc index de4b9e6a09..fd3420c016 100644 --- a/p2p/client/turn_port_factory.cc +++ b/p2p/client/turn_port_factory.cc @@ -28,6 +28,8 @@ std::unique_ptr TurnPortFactory::Create( args.username, args.password, *args.server_address, args.config->credentials, args.config->priority, args.origin, args.turn_customizer); + if (!port) + return nullptr; port->SetTlsCertPolicy(args.config->tls_cert_policy); port->SetTurnLoggingId(args.config->turn_logging_id); return std::move(port); @@ -42,6 +44,8 @@ std::unique_ptr TurnPortFactory::Create(const CreateRelayPortArgs& args, args.config->credentials, args.config->priority, args.origin, args.config->tls_alpn_protocols, args.config->tls_elliptic_curves, args.turn_customizer, args.config->tls_cert_verifier); + if (!port) + return nullptr; port->SetTlsCertPolicy(args.config->tls_cert_policy); port->SetTurnLoggingId(args.config->turn_logging_id); return std::move(port); diff --git a/pc/BUILD.gn b/pc/BUILD.gn index 8f6ef59900..10c5c7b7ab 100644 --- a/pc/BUILD.gn +++ b/pc/BUILD.gn @@ -32,12 +32,8 @@ rtc_library("rtc_pc_base") { "channel_interface.h", "channel_manager.cc", "channel_manager.h", - "composite_data_channel_transport.cc", - "composite_data_channel_transport.h", "composite_rtp_transport.cc", "composite_rtp_transport.h", - "datagram_rtp_transport.cc", - "datagram_rtp_transport.h", "dtls_srtp_transport.cc", "dtls_srtp_transport.h", "dtls_transport.cc", @@ -88,6 +84,7 @@ rtc_library("rtc_pc_base") { "../api:function_view", "../api:ice_transport_factory", "../api:libjingle_peerconnection_api", + "../api:priority", "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", @@ -96,7 +93,6 @@ rtc_library("rtc_pc_base") { "../api/crypto:options", "../api/rtc_event_log", "../api/transport:datagram_transport_interface", - "../api/transport/media:media_transport_interface", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_frame", "../api/video:video_rtp_headers", @@ -114,16 +110,21 @@ rtc_library("rtc_pc_base") { "../modules/rtp_rtcp:rtp_rtcp_format", "../p2p:rtc_p2p", "../rtc_base", + "../rtc_base:callback_list", "../rtc_base:checks", "../rtc_base:deprecation", "../rtc_base:rtc_task_queue", "../rtc_base:stringutils", + "../rtc_base/synchronization:mutex", + "../rtc_base/synchronization:sequence_checker", "../rtc_base/system:file_wrapper", "../rtc_base/system:rtc_export", "../rtc_base/third_party/base64", "../rtc_base/third_party/sigslot", "../system_wrappers:field_trial", "../system_wrappers:metrics", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", @@ -158,50 +159,33 @@ rtc_library("peerconnection") { visibility = [ "*" ] cflags = [] sources = [ - "audio_rtp_receiver.cc", - "audio_rtp_receiver.h", - "audio_track.cc", - "audio_track.h", - "data_channel.cc", - "data_channel.h", "data_channel_controller.cc", "data_channel_controller.h", - "dtmf_sender.cc", - "dtmf_sender.h", + "data_channel_utils.cc", + "data_channel_utils.h", "ice_server_parsing.cc", "ice_server_parsing.h", - "jitter_buffer_delay.cc", - "jitter_buffer_delay.h", - "jitter_buffer_delay_interface.h", - "jitter_buffer_delay_proxy.h", "jsep_ice_candidate.cc", "jsep_session_description.cc", "local_audio_source.cc", "local_audio_source.h", - "media_stream.cc", - "media_stream.h", "media_stream_observer.cc", "media_stream_observer.h", - "media_stream_track.h", "peer_connection.cc", "peer_connection.h", "peer_connection_factory.cc", "peer_connection_factory.h", "peer_connection_internal.h", - "remote_audio_source.cc", - "remote_audio_source.h", "rtc_stats_collector.cc", "rtc_stats_collector.h", "rtc_stats_traversal.cc", "rtc_stats_traversal.h", - "rtp_parameters_conversion.cc", - "rtp_parameters_conversion.h", - "rtp_receiver.cc", - "rtp_receiver.h", - "rtp_sender.cc", - "rtp_sender.h", - "rtp_transceiver.cc", - "rtp_transceiver.h", + "rtp_data_channel.cc", + "rtp_data_channel.h", + "sctp_data_channel.cc", + "sctp_data_channel.h", + "sdp_offer_answer.cc", # TODO: Make separate target when not circular + "sdp_offer_answer.h", # dependent on peerconnection.h "sdp_serializer.cc", "sdp_serializer.h", "sdp_utils.cc", @@ -211,14 +195,6 @@ rtc_library("peerconnection") { "stream_collection.h", "track_media_info_map.cc", "track_media_info_map.h", - "video_rtp_receiver.cc", - "video_rtp_receiver.h", - "video_rtp_track_source.cc", - "video_rtp_track_source.h", - "video_track.cc", - "video_track.h", - "video_track_source.cc", - "video_track_source.h", "webrtc_sdp.cc", "webrtc_sdp.h", "webrtc_session_description_factory.cc", @@ -226,28 +202,66 @@ rtc_library("peerconnection") { ] deps = [ + ":audio_rtp_receiver", + ":audio_track", + ":connection_context", + ":dtmf_sender", + ":jitter_buffer_delay", + ":jitter_buffer_delay_interface", + ":jitter_buffer_delay_proxy", + ":media_protocol_names", + ":media_stream", + ":peer_connection_message_handler", + ":remote_audio_source", ":rtc_pc_base", + ":rtp_parameters_conversion", + ":rtp_receiver", + ":rtp_sender", + ":rtp_transceiver", + ":rtp_transmission_manager", + ":sdp_state_provider", + ":stats_collector_interface", + ":transceiver_list", + ":usage_pattern", + ":video_rtp_receiver", + ":video_track", + ":video_track_source", "../api:array_view", "../api:audio_options_api", "../api:call_api", + "../api:callfactory_api", "../api:fec_controller_api", + "../api:frame_transformer_interface", "../api:ice_transport_factory", + "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:network_state_predictor_api", + "../api:packet_socket_factory", + "../api:priority", "../api:rtc_error", "../api:rtc_event_log_output_file", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_transceiver_direction", "../api:scoped_refptr", + "../api/adaptation:resource_adaptation_api", "../api/crypto:frame_decryptor_interface", + "../api/crypto:options", + "../api/neteq:neteq_api", "../api/rtc_event_log", "../api/task_queue", + "../api/transport:bitrate_settings", "../api/transport:datagram_transport_interface", + "../api/transport:enums", "../api/transport:field_trial_based_config", - "../api/transport/media:media_transport_interface", + "../api/transport:network_control", + "../api/transport:sctp_transport_factory_interface", + "../api/transport:webrtc_key_value_config", "../api/units:data_rate", "../api/video:builtin_video_bitrate_allocator_factory", + "../api/video:video_bitrate_allocator_factory", + "../api/video:video_codec_constants", "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", @@ -256,29 +270,465 @@ rtc_library("peerconnection") { "../logging:ice_log", "../media:rtc_data", "../media:rtc_media_base", + "../media:rtc_media_config", "../modules/rtp_rtcp:rtp_rtcp_format", "../p2p:rtc_p2p", "../rtc_base", + "../rtc_base:callback_list", "../rtc_base:checks", + "../rtc_base:deprecation", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_operations_chain", "../rtc_base:safe_minmax", "../rtc_base:weak_ptr", "../rtc_base/experiments:field_trial_parser", + "../rtc_base/network:sent_packet", + "../rtc_base/synchronization:mutex", + "../rtc_base/synchronization:sequence_checker", "../rtc_base/system:file_wrapper", "../rtc_base/system:rtc_export", + "../rtc_base/task_utils:pending_task_safety_flag", + "../rtc_base/task_utils:to_queued_task", "../rtc_base/third_party/base64", "../rtc_base/third_party/sigslot", "../stats", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("connection_context") { + sources = [ + "connection_context.cc", + "connection_context.h", + ] + deps = [ + ":rtc_pc_base", + "../api:callfactory_api", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:scoped_refptr", + "../api/neteq:neteq_api", + "../api/transport:field_trial_based_config", + "../api/transport:sctp_transport_factory_interface", + "../api/transport:webrtc_key_value_config", + "../media:rtc_data", + "../media:rtc_media_base", + "../p2p:rtc_p2p", + "../rtc_base", + "../rtc_base:checks", + ] +} + +rtc_library("peer_connection_message_handler") { + sources = [ + "peer_connection_message_handler.cc", + "peer_connection_message_handler.h", + ] + deps = [ + ":stats_collector_interface", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtc_error", + "../rtc_base", + "../rtc_base/synchronization:sequence_checker", + ] +} + +rtc_library("usage_pattern") { + sources = [ + "usage_pattern.cc", + "usage_pattern.h", + ] + deps = [ + "../api:libjingle_peerconnection_api", + "../rtc_base:logging", + "../system_wrappers:metrics", + ] +} + +rtc_library("rtp_transceiver") { + sources = [ + "rtp_transceiver.cc", + "rtp_transceiver.h", + ] + deps = [ + ":rtc_pc_base", + ":rtp_parameters_conversion", + ":rtp_receiver", + ":rtp_sender", + "../api:libjingle_peerconnection_api", + "../api:rtp_parameters", + "../rtc_base:checks", + "../rtc_base:logging", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("rtp_transmission_manager") { + sources = [ + "rtp_transmission_manager.cc", + "rtp_transmission_manager.h", + ] + deps = [ + ":audio_rtp_receiver", + ":rtc_pc_base", + ":rtp_receiver", + ":rtp_sender", + ":rtp_transceiver", + ":stats_collector_interface", + ":transceiver_list", + ":usage_pattern", + ":video_rtp_receiver", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtc_error", + "../api:rtp_parameters", + "../api:rtp_transceiver_direction", + "../api:scoped_refptr", + "../media:rtc_media_base", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base/third_party/sigslot", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } +rtc_library("transceiver_list") { + sources = [ + "transceiver_list.cc", + "transceiver_list.h", + ] + deps = [ ":rtp_transceiver" ] +} + +rtc_library("rtp_receiver") { + sources = [ + "rtp_receiver.cc", + "rtp_receiver.h", + ] + deps = [ + ":media_stream", + ":video_track_source", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/crypto:frame_decryptor_interface", + "../api/video:video_frame", + "../media:rtc_media_base", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:rtc_base", + "../rtc_base:rtc_base_approved", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_rtp_receiver") { + sources = [ + "audio_rtp_receiver.cc", + "audio_rtp_receiver.h", + ] + deps = [ + ":audio_track", + ":jitter_buffer_delay", + ":jitter_buffer_delay_interface", + ":jitter_buffer_delay_proxy", + ":media_stream", + ":remote_audio_source", + ":rtp_receiver", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/crypto:frame_decryptor_interface", + "../media:rtc_media_base", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base:refcount", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("video_rtp_receiver") { + sources = [ + "video_rtp_receiver.cc", + "video_rtp_receiver.h", + ] + deps = [ + ":jitter_buffer_delay", + ":jitter_buffer_delay_interface", + ":jitter_buffer_delay_proxy", + ":media_stream", + ":rtp_receiver", + ":video_rtp_track_source", + ":video_track", + "../api:frame_transformer_interface", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/crypto:frame_decryptor_interface", + "../api/video:video_frame", + "../media:rtc_media_base", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("video_rtp_track_source") { + sources = [ + "video_rtp_track_source.cc", + "video_rtp_track_source.h", + ] + deps = [ + ":video_track_source", + "../media:rtc_media_base", + "../rtc_base", + "../rtc_base/synchronization:mutex", + ] +} + +rtc_library("audio_track") { + sources = [ + "audio_track.cc", + "audio_track.h", + ] + deps = [ + "../api:media_stream_interface", + "../api:scoped_refptr", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base:thread_checker", + ] +} + +rtc_library("video_track") { + sources = [ + "video_track.cc", + "video_track.h", + ] + deps = [ + "../api:media_stream_interface", + "../api:scoped_refptr", + "../api/video:video_frame", + "../media:rtc_media_base", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base:rtc_base_approved", + ] +} + +rtc_source_set("jitter_buffer_delay_interface") { + sources = [ "jitter_buffer_delay_interface.h" ] + deps = [ + "../media:rtc_media_base", + "../rtc_base:refcount", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("sdp_state_provider") { + sources = [ "sdp_state_provider.h" ] + deps = [ + ":rtc_pc_base", + "../api:libjingle_peerconnection_api", + ] +} + +rtc_source_set("jitter_buffer_delay_proxy") { + sources = [ "jitter_buffer_delay_proxy.h" ] + deps = [ + ":jitter_buffer_delay_interface", + "../api:libjingle_peerconnection_api", + "../media:rtc_media_base", + ] +} + +rtc_library("jitter_buffer_delay") { + sources = [ + "jitter_buffer_delay.cc", + "jitter_buffer_delay.h", + ] + deps = [ + ":jitter_buffer_delay_interface", + "../media:rtc_media_base", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base:safe_minmax", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("remote_audio_source") { + sources = [ + "remote_audio_source.cc", + "remote_audio_source.h", + ] + deps = [ + ":rtc_pc_base", + "../api:call_api", + "../api:media_stream_interface", + "../api:scoped_refptr", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:rtc_base_approved", + "../rtc_base:safe_conversions", + "../rtc_base:stringutils", + "../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("rtp_sender") { + sources = [ + "rtp_sender.cc", + "rtp_sender.h", + ] + deps = [ + ":dtmf_sender", + ":stats_collector_interface", + "../api:audio_options_api", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../media:rtc_media_base", + "../rtc_base:checks", + "../rtc_base:rtc_base", + "../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("rtp_parameters_conversion") { + sources = [ + "rtp_parameters_conversion.cc", + "rtp_parameters_conversion.h", + ] + deps = [ + ":rtc_pc_base", + "../api:array_view", + "../api:libjingle_peerconnection_api", + "../api:rtc_error", + "../api:rtp_parameters", + "../media:rtc_media_base", + "../rtc_base:checks", + "../rtc_base:rtc_base", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("dtmf_sender") { + sources = [ + "dtmf_sender.cc", + "dtmf_sender.h", + ] + deps = [ + "../api:libjingle_peerconnection_api", + "../rtc_base:checks", + "../rtc_base:rtc_base", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("media_stream") { + sources = [ + "media_stream.cc", + "media_stream.h", + ] + deps = [ + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:scoped_refptr", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base:rtc_base", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("video_track_source") { + sources = [ + "video_track_source.cc", + "video_track_source.h", + ] + deps = [ + "../api:media_stream_interface", + "../api/video:video_frame", + "../media:rtc_media_base", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + "../rtc_base/system:rtc_export", + ] +} + +rtc_source_set("stats_collector_interface") { + sources = [ "stats_collector_interface.h" ] + deps = [ + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + ] +} + rtc_source_set("libjingle_peerconnection") { visibility = [ "*" ] deps = [ @@ -326,16 +776,14 @@ if (rtc_include_tests) { ":peerconnection", ":rtc_pc", ":rtc_pc_base", + ":video_rtp_receiver", "../api:array_view", "../api:audio_options_api", - "../api:fake_media_transport", "../api:ice_transport_factory", "../api:libjingle_peerconnection_api", - "../api:loopback_media_transport", "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", - "../api/transport/media:media_transport_interface", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video/test:mock_recordable_encoded_frame", "../call:rtp_interfaces", @@ -359,6 +807,7 @@ if (rtc_include_tests) { "../test:test_support", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", ] if (rtc_build_libsrtp) { @@ -402,8 +851,8 @@ if (rtc_include_tests) { "../system_wrappers", "../test:perf_test", "../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("peerconnection_wrapper") { @@ -440,7 +889,6 @@ if (rtc_include_tests) { "test/fake_periodic_video_source.h", "test/fake_periodic_video_track_source.h", "test/fake_rtc_certificate_generator.h", - "test/fake_sctp_transport.h", "test/fake_video_track_renderer.h", "test/fake_video_track_source.h", "test/frame_generator_capturer_video_track_source.h", @@ -457,9 +905,14 @@ if (rtc_include_tests) { ] deps = [ + ":jitter_buffer_delay", + ":jitter_buffer_delay_interface", ":libjingle_peerconnection", ":peerconnection", ":rtc_pc_base", + ":rtp_receiver", + ":rtp_sender", + ":video_track_source", "../api:audio_options_api", "../api:create_frame_generator", "../api:create_peerconnection_factory", @@ -495,12 +948,14 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_approved", "../rtc_base:rtc_task_queue", "../rtc_base:task_queue_for_test", + "../rtc_base/synchronization:mutex", + "../rtc_base/synchronization:sequence_checker", "../rtc_base/task_utils:repeating_task", "../rtc_base/third_party/sigslot", "../test:test_support", "../test:video_test_common", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_test("peerconnection_unittests") { @@ -513,11 +968,13 @@ if (rtc_include_tests) { "jsep_session_description_unittest.cc", "local_audio_source_unittest.cc", "media_stream_unittest.cc", + "peer_connection_adaptation_integrationtest.cc", "peer_connection_bundle_unittest.cc", "peer_connection_crypto_unittest.cc", "peer_connection_data_channel_unittest.cc", "peer_connection_end_to_end_unittest.cc", "peer_connection_factory_unittest.cc", + "peer_connection_header_extension_unittest.cc", "peer_connection_histogram_unittest.cc", "peer_connection_ice_unittest.cc", "peer_connection_integrationtest.cc", @@ -553,8 +1010,24 @@ if (rtc_include_tests) { } deps = [ + ":audio_rtp_receiver", + ":audio_track", + ":dtmf_sender", + ":jitter_buffer_delay", + ":jitter_buffer_delay_interface", + ":media_stream", ":peerconnection", + ":remote_audio_source", ":rtc_pc_base", + ":rtp_parameters_conversion", + ":rtp_receiver", + ":rtp_sender", + ":rtp_transceiver", + ":usage_pattern", + ":video_rtp_receiver", + ":video_rtp_track_source", + ":video_track", + ":video_track_source", "../api:array_view", "../api:audio_options_api", "../api:create_peerconnection_factory", @@ -563,7 +1036,6 @@ if (rtc_include_tests) { "../api:function_view", "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", - "../api:loopback_media_transport", "../api:media_stream_interface", "../api:mock_rtp", "../api:rtc_error", @@ -575,15 +1047,17 @@ if (rtc_include_tests) { "../api/rtc_event_log", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue:default_task_queue_factory", - "../api/transport/media:media_transport_interface", + "../api/transport:field_trial_based_config", "../api/transport/rtp:rtp_source", "../api/units:time_delta", "../api/video:builtin_video_bitrate_allocator_factory", + "../call/adaptation:resource_adaptation_test_utilities", "../logging:fake_rtc_event_log", "../media:rtc_media_config", "../media:rtc_media_engine_defaults", "../modules/audio_device:audio_device_api", "../modules/audio_processing:audio_processing_statistics", + "../modules/audio_processing:audioproc_test_utils", "../modules/rtp_rtcp:rtp_rtcp_format", "../p2p:fake_ice_transport", "../p2p:fake_port_allocator", @@ -591,15 +1065,19 @@ if (rtc_include_tests) { "../rtc_base:gunit_helpers", "../rtc_base:rtc_base_tests_utils", "../rtc_base:rtc_json", + "../rtc_base/synchronization:mutex", "../rtc_base/third_party/base64", "../rtc_base/third_party/sigslot", "../system_wrappers:metrics", "../test:field_trial", "../test:fileutils", "../test:rtp_test_utils", + "../test/pc/sctp:fake_sctp_transport", + "./scenario_tests:pc_scenario_tests", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", ] if (is_android) { deps += [ ":android_black_magic" ] @@ -624,7 +1102,8 @@ if (rtc_include_tests) { "../api/video_codecs:video_codecs_api", "../call:call_interfaces", "../media:rtc_audio_video", - "../media:rtc_data", # TODO(phoglund): AFAIK only used for one sctp constant. + "../media:rtc_data", # TODO(phoglund): AFAIK only used for one sctp + # constant. "../media:rtc_media_base", "../media:rtc_media_tests_utils", "../modules/audio_processing", @@ -640,7 +1119,6 @@ if (rtc_include_tests) { "../test:audio_codec_mocks", "../test:test_main", "../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] if (is_android) { diff --git a/pc/OWNERS b/pc/OWNERS index 5519686efc..493114f43f 100644 --- a/pc/OWNERS +++ b/pc/OWNERS @@ -1,14 +1,6 @@ -amithi@webrtc.org -honghaiz@webrtc.org hbos@webrtc.org hta@webrtc.org juberti@webrtc.org perkj@webrtc.org -shampson@webrtc.org -steveanton@webrtc.org tommi@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* +deadbeef@webrtc.org diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index e83e558c7b..8ff685d8e2 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -42,8 +42,9 @@ AudioRtpReceiver::AudioRtpReceiver( : worker_thread_(worker_thread), id_(receiver_id), source_(new rtc::RefCountedObject(worker_thread)), - track_(AudioTrackProxy::Create(rtc::Thread::Current(), - AudioTrack::Create(receiver_id, source_))), + track_(AudioTrackProxyWithInternal::Create( + rtc::Thread::Current(), + AudioTrack::Create(receiver_id, source_))), cached_track_enabled_(track_->enabled()), attachment_id_(GenerateUniqueId()), delay_(JitterBufferDelayProxy::Create( @@ -146,6 +147,11 @@ void AudioRtpReceiver::Stop() { stopped_ = true; } +void AudioRtpReceiver::StopAndEndTrack() { + Stop(); + track_->internal()->set_ended(); +} + void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK(media_channel_); if (!stopped_ && ssrc_ == ssrc) { @@ -225,6 +231,19 @@ std::vector AudioRtpReceiver::GetSources() const { RTC_FROM_HERE, [&] { return media_channel_->GetSources(*ssrc_); }); } +void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + worker_thread_->Invoke( + RTC_FROM_HERE, [this, frame_transformer = std::move(frame_transformer)] { + RTC_DCHECK_RUN_ON(worker_thread_); + frame_transformer_ = frame_transformer; + if (media_channel_ && ssrc_.has_value() && !stopped_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + *ssrc_, frame_transformer); + } + }); +} + void AudioRtpReceiver::Reconfigure() { if (!media_channel_ || stopped_) { RTC_LOG(LS_ERROR) @@ -237,6 +256,16 @@ void AudioRtpReceiver::Reconfigure() { // Reattach the frame decryptor if we were reconfigured. MaybeAttachFrameDecryptorToMediaChannel( ssrc_, worker_thread_, frame_decryptor_, media_channel_, stopped_); + + if (media_channel_ && ssrc_.has_value() && !stopped_) { + worker_thread_->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!frame_transformer_) + return; + media_channel_->SetDepacketizerToDecoderFrameTransformer( + *ssrc_, frame_transformer_); + }); + } } void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { diff --git a/pc/audio_rtp_receiver.h b/pc/audio_rtp_receiver.h index 908cb647fd..f4b821068e 100644 --- a/pc/audio_rtp_receiver.h +++ b/pc/audio_rtp_receiver.h @@ -19,10 +19,12 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/media_stream_interface.h" +#include "api/media_stream_track_proxy.h" #include "api/media_types.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "media/base/media_channel.h" +#include "pc/audio_track.h" #include "pc/jitter_buffer_delay_interface.h" #include "pc/remote_audio_source.h" #include "pc/rtp_receiver.h" @@ -84,6 +86,7 @@ class AudioRtpReceiver : public ObserverInterface, // RtpReceiverInternal implementation. void Stop() override; + void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override { return ssrc_.value_or(0); } @@ -104,6 +107,9 @@ class AudioRtpReceiver : public ObserverInterface, std::vector GetSources() const override; int AttachmentId() const override { return attachment_id_; } + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) + override; private: void RestartMediaChannel(absl::optional ssrc); @@ -113,7 +119,7 @@ class AudioRtpReceiver : public ObserverInterface, rtc::Thread* const worker_thread_; const std::string id_; const rtc::scoped_refptr source_; - const rtc::scoped_refptr track_; + const rtc::scoped_refptr> track_; cricket::VoiceMediaChannel* media_channel_ = nullptr; absl::optional ssrc_; std::vector> streams_; @@ -128,6 +134,8 @@ class AudioRtpReceiver : public ObserverInterface, // Allows to thread safely change playout delay. Handles caching cases if // |SetJitterBufferMinimumDelay| is called before start. rtc::scoped_refptr delay_; + rtc::scoped_refptr frame_transformer_ + RTC_GUARDED_BY(worker_thread_); }; } // namespace webrtc diff --git a/pc/audio_track.cc b/pc/audio_track.cc index ff680652c9..4f4c6b4757 100644 --- a/pc/audio_track.cc +++ b/pc/audio_track.cc @@ -39,7 +39,6 @@ AudioTrack::~AudioTrack() { } std::string AudioTrack::kind() const { - RTC_DCHECK(thread_checker_.IsCurrent()); return kAudioKind; } diff --git a/pc/audio_track.h b/pc/audio_track.h index f89bbcdd1d..8cff79e8b9 100644 --- a/pc/audio_track.h +++ b/pc/audio_track.h @@ -14,9 +14,8 @@ #include #include "api/media_stream_interface.h" +#include "api/media_stream_track.h" #include "api/scoped_refptr.h" -#include "pc/media_stream_track.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/thread_checker.h" namespace webrtc { @@ -27,6 +26,11 @@ class AudioTrack : public MediaStreamTrack, // Protected ctor to force use of factory method. AudioTrack(const std::string& label, const rtc::scoped_refptr& source); + + AudioTrack() = delete; + AudioTrack(const AudioTrack&) = delete; + AudioTrack& operator=(const AudioTrack&) = delete; + ~AudioTrack() override; public: @@ -34,10 +38,10 @@ class AudioTrack : public MediaStreamTrack, const std::string& id, const rtc::scoped_refptr& source); - private: // MediaStreamTrack implementation. std::string kind() const override; + private: // AudioTrackInterface implementation. AudioSourceInterface* GetSource() const override; @@ -50,7 +54,6 @@ class AudioTrack : public MediaStreamTrack, private: const rtc::scoped_refptr audio_source_; rtc::ThreadChecker thread_checker_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioTrack); }; } // namespace webrtc diff --git a/pc/channel.cc b/pc/channel.cc index 03e63b0224..02ee9d2492 100644 --- a/pc/channel.cc +++ b/pc/channel.cc @@ -16,7 +16,6 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "api/call/audio_sink.h" -#include "api/transport/media/media_transport_config.h" #include "media/base/media_constants.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -31,6 +30,7 @@ #include "rtc_base/logging.h" #include "rtc_base/network_route.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/trace_event.h" namespace cricket { @@ -101,13 +101,8 @@ void RtpParametersFromMediaDescription( bool is_stream_active, RtpParameters* params) { params->is_stream_active = is_stream_active; - // TODO(pthatcher): Remove this once we're sure no one will give us - // a description without codecs. Currently the ORTC implementation is relying - // on this. - if (desc->has_codecs()) { - params->codecs = desc->codecs(); - } - // TODO(pthatcher): See if we really need + params->codecs = desc->codecs(); + // TODO(bugs.webrtc.org/11513): See if we really need // rtp_header_extensions_set() and remove it if we don't. if (desc->rtp_header_extensions_set()) { params->extensions = extensions; @@ -147,7 +142,7 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread, RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(ssrc_generator_); demuxer_criteria_.mid = content_name; - RTC_LOG(LS_INFO) << "Created channel for " << content_name; + RTC_LOG(LS_INFO) << "Created channel: " << ToString(); } BaseChannel::~BaseChannel() { @@ -161,12 +156,23 @@ BaseChannel::~BaseChannel() { // the media channel may try to send on the dead transport channel. NULLing // is not an effective strategy since the sends will come on another thread. media_channel_.reset(); - RTC_LOG(LS_INFO) << "Destroyed channel: " << content_name_; + RTC_LOG(LS_INFO) << "Destroyed channel: " << ToString(); +} + +std::string BaseChannel::ToString() const { + rtc::StringBuilder sb; + sb << "{mid: " << content_name_; + if (media_channel_) { + sb << ", media_type: " << MediaTypeToString(media_channel_->media_type()); + } + sb << "}"; + return sb.Release(); } bool BaseChannel::ConnectToRtpTransport() { RTC_DCHECK(rtp_transport_); if (!RegisterRtpDemuxerSink()) { + RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); return false; } rtp_transport_->SignalReadyToSend.connect( @@ -189,24 +195,20 @@ void BaseChannel::DisconnectFromRtpTransport() { rtp_transport_->SignalSentPacket.disconnect(this); } -void BaseChannel::Init_w( - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config) { +void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { RTC_DCHECK_RUN_ON(worker_thread_); - media_transport_config_ = media_transport_config; network_thread_->Invoke( RTC_FROM_HERE, [this, rtp_transport] { SetRtpTransport(rtp_transport); }); // Both RTP and RTCP channels should be set, we can call SetInterface on // the media channel and it can set network options. - media_channel_->SetInterface(this, media_transport_config); + media_channel_->SetInterface(this); } void BaseChannel::Deinit() { - RTC_DCHECK(worker_thread_->IsCurrent()); - media_channel_->SetInterface(/*iface=*/nullptr, - webrtc::MediaTransportConfig()); + RTC_DCHECK_RUN_ON(worker_thread()); + media_channel_->SetInterface(/*iface=*/nullptr); // Packets arrive on the network thread, processing packets calls virtual // functions, so need to stop this process in Deinit that is called in // derived classes destructor. @@ -242,7 +244,8 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { transport_name_ = rtp_transport_->transport_name(); if (!ConnectToRtpTransport()) { - RTC_LOG(LS_ERROR) << "Failed to connect to the new RtpTransport."; + RTC_LOG(LS_ERROR) << "Failed to connect to the new RtpTransport for " + << ToString() << "."; return false; } OnTransportReadyToSend(rtp_transport_->IsReadyToSend()); @@ -287,6 +290,13 @@ bool BaseChannel::SetRemoteContent(const MediaContentDescription* content, Bind(&BaseChannel::SetRemoteContent_w, this, content, type, error_desc)); } +bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) { + TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled"); + return InvokeOnWorker( + RTC_FROM_HERE, + Bind(&BaseChannel::SetPayloadTypeDemuxingEnabled_w, this, enabled)); +} + bool BaseChannel::IsReadyToReceiveMedia_w() const { // Receive data if we are enabled and have local content, return enabled() && @@ -328,7 +338,7 @@ int BaseChannel::SetOption(SocketType type, int BaseChannel::SetOption_n(SocketType type, rtc::Socket::Option opt, int value) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(rtp_transport_); switch (type) { case ST_RTP: @@ -344,7 +354,7 @@ int BaseChannel::SetOption_n(SocketType type, } void BaseChannel::OnWritableState(bool writable) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); if (writable) { ChannelWritable_n(); } else { @@ -354,9 +364,9 @@ void BaseChannel::OnWritableState(bool writable) { void BaseChannel::OnNetworkRouteChanged( absl::optional network_route) { - RTC_LOG(LS_INFO) << "Network route was changed."; + RTC_LOG(LS_INFO) << "Network route for " << ToString() << " was changed."; - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); rtc::NetworkRoute new_route; if (network_route) { new_route = *(network_route); @@ -370,6 +380,18 @@ void BaseChannel::OnNetworkRouteChanged( }); } +sigslot::signal1& BaseChannel::SignalFirstPacketReceived() { + RTC_DCHECK_RUN_ON(signaling_thread_); + return SignalFirstPacketReceived_; +} + +sigslot::signal1& BaseChannel::SignalSentPacket() { + // TODO(bugs.webrtc.org/11994): Uncomment this check once callers have been + // fixed to access this variable from the correct thread. + // RTC_DCHECK_RUN_ON(worker_thread_); + return SignalSentPacket_; +} + void BaseChannel::OnTransportReadyToSend(bool ready) { invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [=] { media_channel_->OnReadyToSend(ready); }); @@ -409,7 +431,7 @@ bool BaseChannel::SendPacket(bool rtcp, // Protect ourselves against crazy data. if (!IsValidRtpPacketSize(packet_type, packet->size())) { - RTC_LOG(LS_ERROR) << "Dropping outgoing " << content_name_ << " " + RTC_LOG(LS_ERROR) << "Dropping outgoing " << ToString() << " " << RtpPacketTypeToString(packet_type) << " packet: wrong size=" << packet->size(); return false; @@ -425,16 +447,16 @@ bool BaseChannel::SendPacket(bool rtcp, } // However, there shouldn't be any RTP packets sent before SRTP is set up // (and SetSend(true) is called). - RTC_LOG(LS_ERROR) - << "Can't send outgoing RTP packet when SRTP is inactive" - " and crypto is required"; + RTC_LOG(LS_ERROR) << "Can't send outgoing RTP packet for " << ToString() + << " when SRTP is inactive and crypto is required"; RTC_NOTREACHED(); return false; } std::string packet_type = rtcp ? "RTCP" : "RTP"; - RTC_LOG(LS_WARNING) << "Sending an " << packet_type - << " packet without encryption."; + RTC_DLOG(LS_WARNING) << "Sending an " << packet_type + << " packet without encryption for " << ToString() + << "."; } // Bon voyage. @@ -468,7 +490,8 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { // for us to just eat packets here. This is all sidestepped if RTCP mux // is used anyway. RTC_LOG(LS_WARNING) << "Can't process incoming RTP packet when " - "SRTP is inactive and crypto is required"; + "SRTP is inactive and crypto is required " + << ToString(); return; } @@ -476,7 +499,7 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { invoker_.AsyncInvoke( RTC_FROM_HERE, worker_thread_, [this, packet_buffer, packet_time_us] { - RTC_DCHECK(worker_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread()); media_channel_->OnPacketReceived(packet_buffer, packet_time_us); }); } @@ -509,7 +532,7 @@ void BaseChannel::EnableMedia_w() { if (enabled_) return; - RTC_LOG(LS_INFO) << "Channel enabled"; + RTC_LOG(LS_INFO) << "Channel enabled: " << ToString(); enabled_ = true; UpdateMediaSendRecvState_w(); } @@ -519,7 +542,7 @@ void BaseChannel::DisableMedia_w() { if (!enabled_) return; - RTC_LOG(LS_INFO) << "Channel disabled"; + RTC_LOG(LS_INFO) << "Channel disabled: " << ToString(); enabled_ = false; UpdateMediaSendRecvState_w(); } @@ -534,12 +557,12 @@ void BaseChannel::UpdateWritableState_n() { } void BaseChannel::ChannelWritable_n() { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); if (writable_) { return; } - RTC_LOG(LS_INFO) << "Channel writable (" << content_name_ << ")" + RTC_LOG(LS_INFO) << "Channel writable (" << ToString() << ")" << (was_ever_writable_ ? "" : " for the first time"); was_ever_writable_ = true; @@ -548,11 +571,11 @@ void BaseChannel::ChannelWritable_n() { } void BaseChannel::ChannelNotWritable_n() { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); if (!writable_) return; - RTC_LOG(LS_INFO) << "Channel not writable (" << content_name_ << ")"; + RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")"; writable_ = false; UpdateMediaSendRecvState(); } @@ -572,6 +595,37 @@ void BaseChannel::ResetUnsignaledRecvStream_w() { media_channel()->ResetUnsignaledRecvStream(); } +bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { + RTC_DCHECK_RUN_ON(worker_thread()); + if (enabled == payload_type_demuxing_enabled_) { + return true; + } + payload_type_demuxing_enabled_ = enabled; + if (!enabled) { + // TODO(crbug.com/11477): This will remove *all* unsignaled streams (those + // without an explicitly signaled SSRC), which may include streams that + // were matched to this channel by MID or RID. Ideally we'd remove only the + // streams that were matched based on payload type alone, but currently + // there is no straightforward way to identify those streams. + media_channel()->ResetUnsignaledRecvStream(); + demuxer_criteria_.payload_types.clear(); + if (!RegisterRtpDemuxerSink()) { + RTC_LOG(LS_ERROR) << "Failed to disable payload type demuxing for " + << ToString(); + return false; + } + } else if (!payload_types_.empty()) { + demuxer_criteria_.payload_types.insert(payload_types_.begin(), + payload_types_.end()); + if (!RegisterRtpDemuxerSink()) { + RTC_LOG(LS_ERROR) << "Failed to enable payload type demuxing for " + << ToString(); + return false; + } + } + return true; +} + bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, SdpType type, std::string* error_desc) { @@ -596,7 +650,8 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) { rtc::StringBuilder desc; desc << "Failed to remove send stream with ssrc " - << old_stream.first_ssrc() << "."; + << old_stream.first_ssrc() << " from m-section with mid='" + << content_name() << "'."; SafeSetError(desc.str(), error_desc); ret = false; } @@ -622,7 +677,8 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, if (new_stream.has_ssrcs() && new_stream.has_rids()) { rtc::StringBuilder desc; desc << "Failed to add send stream: " << new_stream.first_ssrc() - << ". Stream has both SSRCs and RIDs."; + << " into m-section with mid='" << content_name() + << "'. Stream has both SSRCs and RIDs."; SafeSetError(desc.str(), error_desc); ret = false; continue; @@ -637,10 +693,12 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, } if (media_channel()->AddSendStream(new_stream)) { - RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0]; + RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0] + << " into " << ToString(); } else { rtc::StringBuilder desc; - desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc(); + desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc() + << " into m-section with mid='" << content_name() << "'"; SafeSetError(desc.str(), error_desc); ret = false; } @@ -660,15 +718,18 @@ bool BaseChannel::UpdateRemoteStreams_w( // the unsignaled stream params that are cached. if (!old_stream.has_ssrcs() && !HasStreamWithNoSsrcs(streams)) { ResetUnsignaledRecvStream_w(); - RTC_LOG(LS_INFO) << "Reset unsignaled remote stream."; + RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString() + << "."; } else if (old_stream.has_ssrcs() && !GetStreamBySsrc(streams, old_stream.first_ssrc())) { if (RemoveRecvStream_w(old_stream.first_ssrc())) { - RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc(); + RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc() + << " from " << ToString() << "."; } else { rtc::StringBuilder desc; desc << "Failed to remove remote stream with ssrc " - << old_stream.first_ssrc() << "."; + << old_stream.first_ssrc() << " from m-section with mid='" + << content_name() << "'."; SafeSetError(desc.str(), error_desc); ret = false; } @@ -686,13 +747,15 @@ bool BaseChannel::UpdateRemoteStreams_w( RTC_LOG(LS_INFO) << "Add remote ssrc: " << (new_stream.has_ssrcs() ? std::to_string(new_stream.first_ssrc()) - : "unsignaled"); + : "unsignaled") + << " to " << ToString(); } else { rtc::StringBuilder desc; desc << "Failed to add remote stream ssrc: " << (new_stream.has_ssrcs() ? std::to_string(new_stream.first_ssrc()) - : "unsignaled"); + : "unsignaled") + << " to " << ToString(); SafeSetError(desc.str(), error_desc); ret = false; } @@ -702,7 +765,10 @@ bool BaseChannel::UpdateRemoteStreams_w( new_stream.ssrcs.end()); } // Re-register the sink to update the receiving ssrcs. - RegisterRtpDemuxerSink(); + if (!RegisterRtpDemuxerSink()) { + RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); + ret = false; + } remote_streams_ = streams; return ret; } @@ -727,7 +793,7 @@ void BaseChannel::OnMessage(rtc::Message* pmsg) { switch (pmsg->message_id) { case MSG_SEND_RTP_PACKET: case MSG_SEND_RTCP_PACKET: { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); SendPacketMessageData* data = static_cast(pmsg->pdata); bool rtcp = pmsg->message_id == MSG_SEND_RTCP_PACKET; @@ -736,24 +802,31 @@ void BaseChannel::OnMessage(rtc::Message* pmsg) { break; } case MSG_FIRSTPACKETRECEIVED: { + RTC_DCHECK_RUN_ON(signaling_thread_); SignalFirstPacketReceived_(this); break; } } } -void BaseChannel::AddHandledPayloadType(int payload_type) { - demuxer_criteria_.payload_types.insert(static_cast(payload_type)); +void BaseChannel::MaybeAddHandledPayloadType(int payload_type) { + if (payload_type_demuxing_enabled_) { + demuxer_criteria_.payload_types.insert(static_cast(payload_type)); + } + // Even if payload type demuxing is currently disabled, we need to remember + // the payload types in case it's re-enabled later. + payload_types_.insert(static_cast(payload_type)); } void BaseChannel::ClearHandledPayloadTypes() { demuxer_criteria_.payload_types.clear(); + payload_types_.clear(); } void BaseChannel::FlushRtcpMessages_n() { // Flush all remaining RTCP messages. This should only be called in // destructor. - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); rtc::MessageList rtcp_messages; network_thread_->Clear(this, MSG_SEND_RTCP_PACKET, &rtcp_messages); for (const auto& message : rtcp_messages) { @@ -763,11 +836,11 @@ void BaseChannel::FlushRtcpMessages_n() { } void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this, sent_packet] { - RTC_DCHECK(worker_thread_->IsCurrent()); - SignalSentPacket(sent_packet); + RTC_DCHECK_RUN_ON(worker_thread()); + SignalSentPacket()(sent_packet); }); } @@ -796,15 +869,13 @@ VoiceChannel::~VoiceChannel() { } void BaseChannel::UpdateMediaSendRecvState() { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread()); invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [this] { UpdateMediaSendRecvState_w(); }); } -void VoiceChannel::Init_w( - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config) { - BaseChannel::Init_w(rtp_transport, media_transport_config); +void VoiceChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { + BaseChannel::Init_w(rtp_transport); } void VoiceChannel::UpdateMediaSendRecvState_w() { @@ -818,7 +889,8 @@ void VoiceChannel::UpdateMediaSendRecvState_w() { bool send = IsReadyToSendMedia_w(); media_channel()->SetSend(send); - RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send; + RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send + << " for " << ToString(); } bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, @@ -826,7 +898,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, std::string* error_desc) { TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w"); RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting local voice description"; + RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString(); RTC_DCHECK(content); if (!content) { @@ -846,18 +918,21 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, audio, rtp_header_extensions, webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &recv_params); if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError("Failed to set local audio description recv parameters.", - error_desc); + SafeSetError( + "Failed to set local audio description recv parameters for m-section " + "with mid='" + + content_name() + "'.", + error_desc); return false; } if (webrtc::RtpTransceiverDirectionHasRecv(audio->direction())) { for (const AudioCodec& codec : audio->codecs()) { - AddHandledPayloadType(codec.id); + MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. if (!RegisterRtpDemuxerSink()) { - RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing."; + RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing for " << ToString(); return false; } } @@ -869,7 +944,11 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, // description too (without a remote description, we won't be able // to send them anyway). if (!UpdateLocalStreams_w(audio->streams(), type, error_desc)) { - SafeSetError("Failed to set local audio description streams.", error_desc); + SafeSetError( + "Failed to set local audio description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } @@ -883,7 +962,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, std::string* error_desc) { TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w"); RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting remote voice description"; + RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString(); RTC_DCHECK(content); if (!content) { @@ -904,18 +983,22 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, bool parameters_applied = media_channel()->SetSendParameters(send_params); if (!parameters_applied) { - SafeSetError("Failed to set remote audio description send parameters.", - error_desc); + SafeSetError( + "Failed to set remote audio description send parameters for m-section " + "with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - " - "disable payload type demuxing"; + "disable payload type demuxing for " + << ToString(); ClearHandledPayloadTypes(); if (!RegisterRtpDemuxerSink()) { - RTC_LOG(LS_ERROR) << "Failed to update audio demuxing."; + RTC_LOG(LS_ERROR) << "Failed to update audio demuxing for " << ToString(); return false; } } @@ -925,7 +1008,11 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, // description too (without a local description, we won't be able to // recv them anyway). if (!UpdateRemoteStreams_w(audio->streams(), type, error_desc)) { - SafeSetError("Failed to set remote audio description streams.", error_desc); + SafeSetError( + "Failed to set remote audio description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } @@ -963,11 +1050,12 @@ void VideoChannel::UpdateMediaSendRecvState_w() { // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); if (!media_channel()->SetSend(send)) { - RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel"; + RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel: " + ToString(); // TODO(gangji): Report error back to server. } - RTC_LOG(LS_INFO) << "Changing video state, send=" << send; + RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for " + << ToString(); } void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) { @@ -980,7 +1068,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, std::string* error_desc) { TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w"); RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting local video description"; + RTC_LOG(LS_INFO) << "Setting local video description for " << ToString(); RTC_DCHECK(content); if (!content) { @@ -1012,7 +1100,9 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, needs_send_params_update = true; } else if (recv_codec->packetization != send_codec.packetization) { SafeSetError( - "Failed to set local answer due to invalid codec packetization.", + "Failed to set local answer due to invalid codec packetization " + "specified in m-section with mid='" + + content_name() + "'.", error_desc); return false; } @@ -1021,18 +1111,21 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, } if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError("Failed to set local video description recv parameters.", - error_desc); + SafeSetError( + "Failed to set local video description recv parameters for m-section " + "with mid='" + + content_name() + "'.", + error_desc); return false; } if (webrtc::RtpTransceiverDirectionHasRecv(video->direction())) { for (const VideoCodec& codec : video->codecs()) { - AddHandledPayloadType(codec.id); + MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. if (!RegisterRtpDemuxerSink()) { - RTC_LOG(LS_ERROR) << "Failed to set up video demuxing."; + RTC_LOG(LS_ERROR) << "Failed to set up video demuxing for " << ToString(); return false; } } @@ -1041,7 +1134,9 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, if (needs_send_params_update) { if (!media_channel()->SetSendParameters(send_params)) { - SafeSetError("Failed to set send parameters.", error_desc); + SafeSetError("Failed to set send parameters for m-section with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; @@ -1052,7 +1147,11 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, // description too (without a remote description, we won't be able // to send them anyway). if (!UpdateLocalStreams_w(video->streams(), type, error_desc)) { - SafeSetError("Failed to set local video description streams.", error_desc); + SafeSetError( + "Failed to set local video description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } @@ -1066,7 +1165,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, std::string* error_desc) { TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w"); RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting remote video description"; + RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString(); RTC_DCHECK(content); if (!content) { @@ -1100,7 +1199,9 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, needs_recv_params_update = true; } else if (send_codec->packetization != recv_codec.packetization) { SafeSetError( - "Failed to set remote answer due to invalid codec packetization.", + "Failed to set remote answer due to invalid codec packetization " + "specifid in m-section with mid='" + + content_name() + "'.", error_desc); return false; } @@ -1109,15 +1210,20 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, } if (!media_channel()->SetSendParameters(send_params)) { - SafeSetError("Failed to set remote video description send parameters.", - error_desc); + SafeSetError( + "Failed to set remote video description send parameters for m-section " + "with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; if (needs_recv_params_update) { if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError("Failed to set recv parameters.", error_desc); + SafeSetError("Failed to set recv parameters for m-section with mid='" + + content_name() + "'.", + error_desc); return false; } last_recv_params_ = recv_params; @@ -1125,10 +1231,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - " - "disable payload type demuxing"; + "disable payload type demuxing for " + << ToString(); ClearHandledPayloadTypes(); if (!RegisterRtpDemuxerSink()) { - RTC_LOG(LS_ERROR) << "Failed to update video demuxing."; + RTC_LOG(LS_ERROR) << "Failed to update video demuxing for " << ToString(); return false; } } @@ -1138,7 +1245,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, // description too (without a local description, we won't be able to // recv them anyway). if (!UpdateRemoteStreams_w(video->streams(), type, error_desc)) { - SafeSetError("Failed to set remote video description streams.", error_desc); + SafeSetError( + "Failed to set remote video description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } set_remote_content_direction(content->direction()); @@ -1170,10 +1281,8 @@ RtpDataChannel::~RtpDataChannel() { Deinit(); } -void RtpDataChannel::Init_w( - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config) { - BaseChannel::Init_w(rtp_transport, media_transport_config); +void RtpDataChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { + BaseChannel::Init_w(rtp_transport); media_channel()->SignalDataReceived.connect(this, &RtpDataChannel::OnDataReceived); media_channel()->SignalReadyToSend.connect( @@ -1208,7 +1317,7 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content, std::string* error_desc) { TRACE_EVENT0("webrtc", "RtpDataChannel::SetLocalContent_w"); RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting local data description"; + RTC_LOG(LS_INFO) << "Setting local data description for " << ToString(); RTC_DCHECK(content); if (!content) { @@ -1229,16 +1338,19 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content, data, rtp_header_extensions, webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &recv_params); if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError("Failed to set remote data description recv parameters.", - error_desc); + SafeSetError( + "Failed to set remote data description recv parameters for m-section " + "with mid='" + + content_name() + "'.", + error_desc); return false; } for (const DataCodec& codec : data->codecs()) { - AddHandledPayloadType(codec.id); + MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. if (!RegisterRtpDemuxerSink()) { - RTC_LOG(LS_ERROR) << "Failed to set up data demuxing."; + RTC_LOG(LS_ERROR) << "Failed to set up data demuxing for " << ToString(); return false; } @@ -1249,7 +1361,11 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content, // description too (without a remote description, we won't be able // to send them anyway). if (!UpdateLocalStreams_w(data->streams(), type, error_desc)) { - SafeSetError("Failed to set local data description streams.", error_desc); + SafeSetError( + "Failed to set local data description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } @@ -1263,7 +1379,7 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content, std::string* error_desc) { TRACE_EVENT0("webrtc", "RtpDataChannel::SetRemoteContent_w"); RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting remote data description"; + RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString(); RTC_DCHECK(content); if (!content) { @@ -1285,14 +1401,17 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content, RtpHeaderExtensions rtp_header_extensions = GetFilteredRtpHeaderExtensions(data->rtp_header_extensions()); - RTC_LOG(LS_INFO) << "Setting remote data description"; + RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString(); DataSendParameters send_params = last_send_params_; RtpSendParametersFromMediaDescription( data, rtp_header_extensions, webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &send_params); if (!media_channel()->SetSendParameters(send_params)) { - SafeSetError("Failed to set remote data description send parameters.", - error_desc); + SafeSetError( + "Failed to set remote data description send parameters for m-section " + "with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; @@ -1302,7 +1421,11 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content, // description too (without a local description, we won't be able to // recv them anyway). if (!UpdateRemoteStreams_w(data->streams(), type, error_desc)) { - SafeSetError("Failed to set remote data description streams.", error_desc); + SafeSetError( + "Failed to set remote data description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } @@ -1316,20 +1439,21 @@ void RtpDataChannel::UpdateMediaSendRecvState_w() { // content. We receive data on the default channel and multiplexed streams. bool recv = IsReadyToReceiveMedia_w(); if (!media_channel()->SetReceive(recv)) { - RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel"; + RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel: " << ToString(); } // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); if (!media_channel()->SetSend(send)) { - RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel"; + RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel: " << ToString(); } // Trigger SignalReadyToSendData asynchronously. OnDataChannelReadyToSend(send); - RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send; + RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send + << " for " << ToString(); } void RtpDataChannel::OnMessage(rtc::Message* pmsg) { diff --git a/pc/channel.h b/pc/channel.h index 238a8e20fe..51cc40fc53 100644 --- a/pc/channel.h +++ b/pc/channel.h @@ -22,7 +22,6 @@ #include "api/function_view.h" #include "api/jsep.h" #include "api/rtp_receiver_interface.h" -#include "api/transport/media/media_transport_config.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "call/rtp_packet_sink_interface.h" @@ -39,14 +38,14 @@ #include "pc/srtp_transport.h" #include "rtc_base/async_invoker.h" #include "rtc_base/async_udp_socket.h" -#include "rtc_base/critical_section.h" #include "rtc_base/network.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" namespace webrtc { class AudioSinkInterface; -class MediaTransportInterface; } // namespace webrtc namespace cricket { @@ -72,7 +71,7 @@ struct CryptoParams; // NetworkInterface. class BaseChannel : public ChannelInterface, - public rtc::MessageHandler, + public rtc::MessageHandlerAutoCleanup, public sigslot::has_slots<>, public MediaChannel::NetworkInterface, public webrtc::RtpPacketSinkInterface { @@ -92,9 +91,7 @@ class BaseChannel : public ChannelInterface, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); virtual ~BaseChannel(); - virtual void Init_w( - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config); + virtual void Init_w(webrtc::RtpTransportInternal* rtp_transport); // Deinit may be called multiple times and is simply ignored if it's already // done. @@ -129,6 +126,15 @@ class BaseChannel : public ChannelInterface, bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, std::string* error_desc) override; + // Controls whether this channel will receive packets on the basis of + // matching payload type alone. This is needed for legacy endpoints that + // don't signal SSRCs or use MID/RID, but doesn't make sense if there is + // more than channel of specific media type, As that creates an ambiguity. + // + // This method will also remove any existing streams that were bound to this + // channel on the basis of payload type, since one of these streams might + // actually belong to a new channel. See: crbug.com/webrtc/11477 + bool SetPayloadTypeDemuxingEnabled(bool enabled) override; bool Enable(bool enable) override; @@ -139,22 +145,11 @@ class BaseChannel : public ChannelInterface, return remote_streams_; } - sigslot::signal2 SignalDtlsSrtpSetupFailure; - void SignalDtlsSrtpSetupFailure_n(bool rtcp); - void SignalDtlsSrtpSetupFailure_s(bool rtcp); - // Used for latency measurements. - sigslot::signal1& SignalFirstPacketReceived() override { - return SignalFirstPacketReceived_; - } + sigslot::signal1& SignalFirstPacketReceived() override; // Forward SignalSentPacket to worker thread. - sigslot::signal1 SignalSentPacket; - - // Emitted whenever rtcp-mux is fully negotiated and the rtcp-transport can - // be destroyed. - // Fired on the network thread. - sigslot::signal1 SignalRtcpMuxFullyActive; + sigslot::signal1& SignalSentPacket(); // From RtpTransport - public for testing only void OnTransportReadyToSend(bool ready); @@ -229,6 +224,7 @@ class BaseChannel : public ChannelInterface, bool AddRecvStream_w(const StreamParams& sp); bool RemoveRecvStream_w(uint32_t ssrc); void ResetUnsignaledRecvStream_w(); + bool SetPayloadTypeDemuxingEnabled_w(bool enabled); bool AddSendStream_w(const StreamParams& sp); bool RemoveSendStream_w(uint32_t ssrc); @@ -266,15 +262,20 @@ class BaseChannel : public ChannelInterface, return worker_thread_->Invoke(posted_from, functor); } - void AddHandledPayloadType(int payload_type); + // Add |payload_type| to |demuxer_criteria_| if payload type demuxing is + // enabled. + void MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); - void ClearHandledPayloadTypes(); + void ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread()); void UpdateRtpHeaderExtensionMap( const RtpHeaderExtensions& header_extensions); bool RegisterRtpDemuxerSink(); + // Return description of media channel to facilitate logging + std::string ToString() const; + bool has_received_packet_ = false; private: @@ -287,7 +288,10 @@ class BaseChannel : public ChannelInterface, rtc::Thread* const network_thread_; rtc::Thread* const signaling_thread_; rtc::AsyncInvoker invoker_; - sigslot::signal1 SignalFirstPacketReceived_; + sigslot::signal1 SignalFirstPacketReceived_ + RTC_GUARDED_BY(signaling_thread_); + sigslot::signal1 SignalSentPacket_ + RTC_GUARDED_BY(worker_thread_); const std::string content_name_; @@ -296,9 +300,6 @@ class BaseChannel : public ChannelInterface, webrtc::RtpTransportInternal* rtp_transport_ = nullptr; - // Optional media transport configuration (experimental). - webrtc::MediaTransportConfig media_transport_config_; - std::vector > socket_options_; std::vector > rtcp_socket_options_; bool writable_ = false; @@ -313,6 +314,7 @@ class BaseChannel : public ChannelInterface, // well, but it can be changed only when signaling thread does a synchronous // call to the worker thread, so it should be safe. bool enabled_ = false; + bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true; std::vector local_streams_; std::vector remote_streams_; webrtc::RtpTransceiverDirection local_content_direction_ = @@ -320,6 +322,8 @@ class BaseChannel : public ChannelInterface, webrtc::RtpTransceiverDirection remote_content_direction_ = webrtc::RtpTransceiverDirection::kInactive; + // Cached list of payload types, used if payload type demuxing is re-enabled. + std::set payload_types_ RTC_GUARDED_BY(worker_thread()); webrtc::RtpDemuxerCriteria demuxer_criteria_; // This generator is used to generate SSRCs for local streams. // This is needed in cases where SSRCs are not negotiated or set explicitly @@ -350,9 +354,7 @@ class VoiceChannel : public BaseChannel { cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_AUDIO; } - void Init_w( - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config) override; + void Init_w(webrtc::RtpTransportInternal* rtp_transport) override; private: // overrides from BaseChannel @@ -432,9 +434,7 @@ class RtpDataChannel : public BaseChannel { DtlsTransportInternal* rtcp_dtls_transport, rtc::PacketTransportInternal* rtp_packet_transport, rtc::PacketTransportInternal* rtcp_packet_transport); - void Init_w( - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config) override; + void Init_w(webrtc::RtpTransportInternal* rtp_transport) override; virtual bool SendData(const SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, diff --git a/pc/channel_interface.h b/pc/channel_interface.h index cd29ed4f84..68b6486304 100644 --- a/pc/channel_interface.h +++ b/pc/channel_interface.h @@ -52,6 +52,7 @@ class ChannelInterface { virtual bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, std::string* error_desc) = 0; + virtual bool SetPayloadTypeDemuxingEnabled(bool enabled) = 0; // Access to the local and remote streams that were set on the channel. virtual const std::vector& local_streams() const = 0; diff --git a/pc/channel_manager.cc b/pc/channel_manager.cc index 16814bd493..9d5adcad42 100644 --- a/pc/channel_manager.cc +++ b/pc/channel_manager.cc @@ -79,14 +79,6 @@ void ChannelManager::GetSupportedAudioReceiveCodecs( *codecs = media_engine_->voice().recv_codecs(); } -void ChannelManager::GetSupportedAudioRtpHeaderExtensions( - RtpHeaderExtensions* ext) const { - if (!media_engine_) { - return; - } - *ext = media_engine_->voice().GetCapabilities().header_extensions; -} - void ChannelManager::GetSupportedVideoSendCodecs( std::vector* codecs) const { if (!media_engine_) { @@ -121,14 +113,6 @@ void ChannelManager::GetSupportedVideoReceiveCodecs( } } -void ChannelManager::GetSupportedVideoRtpHeaderExtensions( - RtpHeaderExtensions* ext) const { - if (!media_engine_) { - return; - } - *ext = media_engine_->video().GetCapabilities().header_extensions; -} - void ChannelManager::GetSupportedDataCodecs( std::vector* codecs) const { *codecs = data_engine_->data_codecs(); @@ -157,6 +141,34 @@ bool ChannelManager::Init() { return initialized_; } +RtpHeaderExtensions ChannelManager::GetDefaultEnabledAudioRtpHeaderExtensions() + const { + if (!media_engine_) + return {}; + return GetDefaultEnabledRtpHeaderExtensions(media_engine_->voice()); +} + +std::vector +ChannelManager::GetSupportedAudioRtpHeaderExtensions() const { + if (!media_engine_) + return {}; + return media_engine_->voice().GetRtpHeaderExtensions(); +} + +RtpHeaderExtensions ChannelManager::GetDefaultEnabledVideoRtpHeaderExtensions() + const { + if (!media_engine_) + return {}; + return GetDefaultEnabledRtpHeaderExtensions(media_engine_->video()); +} + +std::vector +ChannelManager::GetSupportedVideoRtpHeaderExtensions() const { + if (!media_engine_) + return {}; + return media_engine_->video().GetRtpHeaderExtensions(); +} + void ChannelManager::Terminate() { RTC_DCHECK(initialized_); if (!initialized_) { @@ -175,19 +187,20 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( webrtc::Call* call, const cricket::MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config, rtc::Thread* signaling_thread, const std::string& content_name, bool srtp_required, const webrtc::CryptoOptions& crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator, const AudioOptions& options) { + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. if (!worker_thread_->IsCurrent()) { return worker_thread_->Invoke(RTC_FROM_HERE, [&] { return CreateVoiceChannel(call, media_config, rtp_transport, - media_transport_config, signaling_thread, - content_name, srtp_required, crypto_options, - ssrc_generator, options); + signaling_thread, content_name, srtp_required, + crypto_options, ssrc_generator, options); }); } @@ -209,7 +222,7 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( absl::WrapUnique(media_channel), content_name, srtp_required, crypto_options, ssrc_generator); - voice_channel->Init_w(rtp_transport, media_transport_config); + voice_channel->Init_w(rtp_transport); VoiceChannel* voice_channel_ptr = voice_channel.get(); voice_channels_.push_back(std::move(voice_channel)); @@ -245,7 +258,6 @@ VideoChannel* ChannelManager::CreateVideoChannel( webrtc::Call* call, const cricket::MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config, rtc::Thread* signaling_thread, const std::string& content_name, bool srtp_required, @@ -253,12 +265,15 @@ VideoChannel* ChannelManager::CreateVideoChannel( rtc::UniqueRandomIdGenerator* ssrc_generator, const VideoOptions& options, webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { + // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in + // PeerConnection and add the expectation that we're already on the right + // thread. if (!worker_thread_->IsCurrent()) { return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return CreateVideoChannel( - call, media_config, rtp_transport, media_transport_config, - signaling_thread, content_name, srtp_required, crypto_options, - ssrc_generator, options, video_bitrate_allocator_factory); + return CreateVideoChannel(call, media_config, rtp_transport, + signaling_thread, content_name, srtp_required, + crypto_options, ssrc_generator, options, + video_bitrate_allocator_factory); }); } @@ -281,7 +296,7 @@ VideoChannel* ChannelManager::CreateVideoChannel( absl::WrapUnique(media_channel), content_name, srtp_required, crypto_options, ssrc_generator); - video_channel->Init_w(rtp_transport, media_transport_config); + video_channel->Init_w(rtp_transport); VideoChannel* video_channel_ptr = video_channel.get(); video_channels_.push_back(std::move(video_channel)); @@ -343,7 +358,7 @@ RtpDataChannel* ChannelManager::CreateRtpDataChannel( crypto_options, ssrc_generator); // Media Transports are not supported with Rtp Data Channel. - data_channel->Init_w(rtp_transport, webrtc::MediaTransportConfig()); + data_channel->Init_w(rtp_transport); RtpDataChannel* data_channel_ptr = data_channel.get(); data_channels_.push_back(std::move(data_channel)); diff --git a/pc/channel_manager.h b/pc/channel_manager.h index f66ad4bfc1..ba2c260099 100644 --- a/pc/channel_manager.h +++ b/pc/channel_manager.h @@ -19,7 +19,6 @@ #include "api/audio_options.h" #include "api/crypto/crypto_options.h" -#include "api/transport/media/media_transport_config.h" #include "call/call.h" #include "media/base/codec.h" #include "media/base/media_channel.h" @@ -75,11 +74,15 @@ class ChannelManager final { // Can be called before starting the media engine. void GetSupportedAudioSendCodecs(std::vector* codecs) const; void GetSupportedAudioReceiveCodecs(std::vector* codecs) const; - void GetSupportedAudioRtpHeaderExtensions(RtpHeaderExtensions* ext) const; void GetSupportedVideoSendCodecs(std::vector* codecs) const; void GetSupportedVideoReceiveCodecs(std::vector* codecs) const; - void GetSupportedVideoRtpHeaderExtensions(RtpHeaderExtensions* ext) const; void GetSupportedDataCodecs(std::vector* codecs) const; + RtpHeaderExtensions GetDefaultEnabledAudioRtpHeaderExtensions() const; + std::vector + GetSupportedAudioRtpHeaderExtensions() const; + RtpHeaderExtensions GetDefaultEnabledVideoRtpHeaderExtensions() const; + std::vector + GetSupportedVideoRtpHeaderExtensions() const; // Indicates whether the media engine is started. bool initialized() const { return initialized_; } @@ -93,17 +96,15 @@ class ChannelManager final { // call the appropriate Destroy*Channel method when done. // Creates a voice channel, to be associated with the specified session. - VoiceChannel* CreateVoiceChannel( - webrtc::Call* call, - const cricket::MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const AudioOptions& options); + VoiceChannel* CreateVoiceChannel(webrtc::Call* call, + const cricket::MediaConfig& media_config, + webrtc::RtpTransportInternal* rtp_transport, + rtc::Thread* signaling_thread, + const std::string& content_name, + bool srtp_required, + const webrtc::CryptoOptions& crypto_options, + rtc::UniqueRandomIdGenerator* ssrc_generator, + const AudioOptions& options); // Destroys a voice channel created by CreateVoiceChannel. void DestroyVoiceChannel(VoiceChannel* voice_channel); @@ -114,7 +115,6 @@ class ChannelManager final { webrtc::Call* call, const cricket::MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, - const webrtc::MediaTransportConfig& media_transport_config, rtc::Thread* signaling_thread, const std::string& content_name, bool srtp_required, diff --git a/pc/channel_manager_unittest.cc b/pc/channel_manager_unittest.cc index 6f3128ebde..610d7979ab 100644 --- a/pc/channel_manager_unittest.cc +++ b/pc/channel_manager_unittest.cc @@ -13,7 +13,6 @@ #include #include "api/rtc_error.h" -#include "api/transport/media/media_transport_config.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "media/base/fake_media_engine.h" #include "media/base/test_utils.h" @@ -73,20 +72,17 @@ class ChannelManagerTest : public ::testing::Test { return dtls_srtp_transport; } - void TestCreateDestroyChannels( - webrtc::RtpTransportInternal* rtp_transport, - webrtc::MediaTransportConfig media_transport_config) { + void TestCreateDestroyChannels(webrtc::RtpTransportInternal* rtp_transport) { cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel( &fake_call_, cricket::MediaConfig(), rtp_transport, - media_transport_config, rtc::Thread::Current(), cricket::CN_AUDIO, - kDefaultSrtpRequired, webrtc::CryptoOptions(), &ssrc_generator_, - AudioOptions()); + rtc::Thread::Current(), cricket::CN_AUDIO, kDefaultSrtpRequired, + webrtc::CryptoOptions(), &ssrc_generator_, AudioOptions()); EXPECT_TRUE(voice_channel != nullptr); cricket::VideoChannel* video_channel = cm_->CreateVideoChannel( &fake_call_, cricket::MediaConfig(), rtp_transport, - media_transport_config, rtc::Thread::Current(), cricket::CN_VIDEO, - kDefaultSrtpRequired, webrtc::CryptoOptions(), &ssrc_generator_, - VideoOptions(), video_bitrate_allocator_factory_.get()); + rtc::Thread::Current(), cricket::CN_VIDEO, kDefaultSrtpRequired, + webrtc::CryptoOptions(), &ssrc_generator_, VideoOptions(), + video_bitrate_allocator_factory_.get()); EXPECT_TRUE(video_channel != nullptr); cricket::RtpDataChannel* rtp_data_channel = cm_->CreateRtpDataChannel( cricket::MediaConfig(), rtp_transport, rtc::Thread::Current(), @@ -183,8 +179,7 @@ TEST_F(ChannelManagerTest, SetVideoRtxEnabled) { TEST_F(ChannelManagerTest, CreateDestroyChannels) { EXPECT_TRUE(cm_->Init()); auto rtp_transport = CreateDtlsSrtpTransport(); - TestCreateDestroyChannels(rtp_transport.get(), - webrtc::MediaTransportConfig()); + TestCreateDestroyChannels(rtp_transport.get()); } TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) { @@ -194,8 +189,7 @@ TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) { EXPECT_TRUE(cm_->set_network_thread(network_.get())); EXPECT_TRUE(cm_->Init()); auto rtp_transport = CreateDtlsSrtpTransport(); - TestCreateDestroyChannels(rtp_transport.get(), - webrtc::MediaTransportConfig()); + TestCreateDestroyChannels(rtp_transport.get()); } } // namespace cricket diff --git a/pc/channel_unittest.cc b/pc/channel_unittest.cc index 8ce3729f45..4eef70f4ea 100644 --- a/pc/channel_unittest.cc +++ b/pc/channel_unittest.cc @@ -17,7 +17,6 @@ #include "api/array_view.h" #include "api/audio_options.h" #include "api/rtp_parameters.h" -#include "api/transport/media/media_transport_config.h" #include "media/base/codec.h" #include "media/base/fake_media_engine.h" #include "media/base/fake_rtp.h" @@ -179,9 +178,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { rtcp1 = fake_rtcp_dtls_transport1_.get(); } if (flags1 & DTLS) { - auto cert1 = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert1 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); fake_rtp_dtls_transport1_->SetLocalCertificate(cert1); if (fake_rtcp_dtls_transport1_) { fake_rtcp_dtls_transport1_->SetLocalCertificate(cert1); @@ -209,9 +207,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { rtcp2 = fake_rtcp_dtls_transport2_.get(); } if (flags2 & DTLS) { - auto cert2 = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT))); + auto cert2 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session2", rtc::KT_DEFAULT)); fake_rtp_dtls_transport2_->SetLocalCertificate(cert2); if (fake_rtcp_dtls_transport2_) { fake_rtcp_dtls_transport2_->SetLocalCertificate(cert2); @@ -231,10 +228,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { rtp_transport1_.get(), flags1); channel2_ = CreateChannel(worker_thread, network_thread_, std::move(ch2), rtp_transport2_.get(), flags2); - channel1_->SignalRtcpMuxFullyActive.connect( - this, &ChannelTest::OnRtcpMuxFullyActive1); - channel2_->SignalRtcpMuxFullyActive.connect( - this, &ChannelTest::OnRtcpMuxFullyActive2); CreateContent(flags1, kPcmuCodec, kH264Codec, &local_media_content1_); CreateContent(flags2, kPcmuCodec, kH264Codec, &local_media_content2_); CopyContent(local_media_content1_, &remote_media_content1_); @@ -493,13 +486,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { return false; // overridden in specialized classes } - void OnRtcpMuxFullyActive1(const std::string&) { - rtcp_mux_activated_callbacks1_++; - } - void OnRtcpMuxFullyActive2(const std::string&) { - rtcp_mux_activated_callbacks2_++; - } - cricket::CandidatePairInterface* last_selected_candidate_pair() { return last_selected_candidate_pair_; } @@ -600,29 +586,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel2_->SetRemoteContent(&content, SdpType::kAnswer, NULL)); } - // Test that SetLocalContent and SetRemoteContent properly set RTCP - // mux when a provisional answer is received. - void TestSetContentsRtcpMuxWithPrAnswer() { - CreateChannels(0, 0); - typename T::Content content; - CreateContent(0, kPcmuCodec, kH264Codec, &content); - content.set_rtcp_mux(true); - EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, NULL)); - EXPECT_TRUE( - channel1_->SetRemoteContent(&content, SdpType::kPrAnswer, NULL)); - // Both sides agree on mux. Should signal RTCP mux as fully activated. - EXPECT_EQ(0, rtcp_mux_activated_callbacks1_); - EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, NULL)); - EXPECT_EQ(1, rtcp_mux_activated_callbacks1_); - // Only initiator supports mux. Should still have a separate RTCP channel. - EXPECT_TRUE(channel2_->SetLocalContent(&content, SdpType::kOffer, NULL)); - content.set_rtcp_mux(false); - EXPECT_TRUE( - channel2_->SetRemoteContent(&content, SdpType::kPrAnswer, NULL)); - EXPECT_TRUE(channel2_->SetRemoteContent(&content, SdpType::kAnswer, NULL)); - EXPECT_EQ(0, rtcp_mux_activated_callbacks2_); - } - // Test that SetLocalContent and SetRemoteContent properly // handles adding and removing StreamParams when the action is a full // SdpType::kOffer / SdpType::kAnswer. @@ -843,7 +806,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { rtc::NetworkRoute network_route; // The transport channel becomes disconnected. fake_rtp_dtls_transport1_->ice_transport()->SignalNetworkRouteChanged( - absl::optional(network_route)); }); WaitForThreads(); @@ -854,8 +816,10 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { network_thread_->Invoke(RTC_FROM_HERE, [this] { rtc::NetworkRoute network_route; network_route.connected = true; - network_route.local_network_id = kLocalNetId; - network_route.remote_network_id = kRemoteNetId; + network_route.local = + rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); + network_route.remote = + rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); network_route.last_sent_packet_id = kLastPacketId; network_route.packet_overhead = kTransportOverheadPerPacket; // The transport channel becomes connected. @@ -867,9 +831,9 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(1, media_channel1->num_network_route_changes()); EXPECT_TRUE(media_channel1->last_network_route().connected); EXPECT_EQ(kLocalNetId, - media_channel1->last_network_route().local_network_id); + media_channel1->last_network_route().local.network_id()); EXPECT_EQ(kRemoteNetId, - media_channel1->last_network_route().remote_network_id); + media_channel1->last_network_route().remote.network_id()); EXPECT_EQ(kLastPacketId, media_channel1->last_network_route().last_sent_packet_id); EXPECT_EQ(kTransportOverheadPerPacket + kSrtpOverheadPerPacket, @@ -1312,7 +1276,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { void CreateSimulcastContent(const std::vector& rids, typename T::Content* content) { std::vector rid_descriptions; - for (const std::string name : rids) { + for (const std::string& name : rids) { rid_descriptions.push_back(RidDescription(name, RidDirection::kSend)); } @@ -1414,8 +1378,6 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { // The RTP and RTCP packets to send in the tests. rtc::Buffer rtp_packet_; rtc::Buffer rtcp_packet_; - int rtcp_mux_activated_callbacks1_ = 0; - int rtcp_mux_activated_callbacks2_ = 0; cricket::CandidatePairInterface* last_selected_candidate_pair_; rtc::UniqueRandomIdGenerator ssrc_generator_; }; @@ -1432,7 +1394,7 @@ std::unique_ptr ChannelTest::CreateChannel( worker_thread, network_thread, signaling_thread, std::move(ch), cricket::CN_AUDIO, (flags & DTLS) != 0, webrtc::CryptoOptions(), &ssrc_generator_); - channel->Init_w(rtp_transport, webrtc::MediaTransportConfig()); + channel->Init_w(rtp_transport); return channel; } @@ -1515,7 +1477,7 @@ std::unique_ptr ChannelTest::CreateChannel( worker_thread, network_thread, signaling_thread, std::move(ch), cricket::CN_VIDEO, (flags & DTLS) != 0, webrtc::CryptoOptions(), &ssrc_generator_); - channel->Init_w(rtp_transport, webrtc::MediaTransportConfig()); + channel->Init_w(rtp_transport); return channel; } @@ -2302,7 +2264,7 @@ std::unique_ptr ChannelTest::CreateChannel( worker_thread, network_thread, signaling_thread, std::move(ch), cricket::CN_DATA, (flags & DTLS) != 0, webrtc::CryptoOptions(), &ssrc_generator_); - channel->Init_w(rtp_transport, webrtc::MediaTransportConfig()); + channel->Init_w(rtp_transport); return channel; } diff --git a/pc/composite_data_channel_transport.cc b/pc/composite_data_channel_transport.cc deleted file mode 100644 index e66febc12b..0000000000 --- a/pc/composite_data_channel_transport.cc +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/composite_data_channel_transport.h" - -#include - -#include "absl/algorithm/container.h" - -namespace webrtc { - -CompositeDataChannelTransport::CompositeDataChannelTransport( - std::vector transports) - : transports_(std::move(transports)) { - for (auto transport : transports_) { - transport->SetDataSink(this); - } -} - -CompositeDataChannelTransport::~CompositeDataChannelTransport() { - for (auto transport : transports_) { - transport->SetDataSink(nullptr); - } -} - -void CompositeDataChannelTransport::SetSendTransport( - DataChannelTransportInterface* send_transport) { - if (!absl::c_linear_search(transports_, send_transport)) { - return; - } - send_transport_ = send_transport; - // NB: OnReadyToSend() checks if we're actually ready to send, and signals - // |sink_| if appropriate. This signal is required upon setting the sink. - OnReadyToSend(); -} - -void CompositeDataChannelTransport::RemoveTransport( - DataChannelTransportInterface* transport) { - RTC_DCHECK(transport != send_transport_) << "Cannot remove send transport"; - - auto it = absl::c_find(transports_, transport); - if (it == transports_.end()) { - return; - } - - transport->SetDataSink(nullptr); - transports_.erase(it); -} - -RTCError CompositeDataChannelTransport::OpenChannel(int channel_id) { - RTCError error = RTCError::OK(); - for (auto transport : transports_) { - RTCError e = transport->OpenChannel(channel_id); - if (!e.ok()) { - error = std::move(e); - } - } - return error; -} - -RTCError CompositeDataChannelTransport::SendData( - int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - if (send_transport_) { - return send_transport_->SendData(channel_id, params, buffer); - } - return RTCError(RTCErrorType::NETWORK_ERROR, "Send transport is not ready"); -} - -RTCError CompositeDataChannelTransport::CloseChannel(int channel_id) { - if (send_transport_) { - return send_transport_->CloseChannel(channel_id); - } - return RTCError(RTCErrorType::NETWORK_ERROR, "Send transport is not ready"); -} - -void CompositeDataChannelTransport::SetDataSink(DataChannelSink* sink) { - sink_ = sink; - // NB: OnReadyToSend() checks if we're actually ready to send, and signals - // |sink_| if appropriate. This signal is required upon setting the sink. - OnReadyToSend(); -} - -bool CompositeDataChannelTransport::IsReadyToSend() const { - return send_transport_ && send_transport_->IsReadyToSend(); -} - -void CompositeDataChannelTransport::OnDataReceived( - int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) { - if (sink_) { - sink_->OnDataReceived(channel_id, type, buffer); - } -} - -void CompositeDataChannelTransport::OnChannelClosing(int channel_id) { - if (sink_) { - sink_->OnChannelClosing(channel_id); - } -} - -void CompositeDataChannelTransport::OnChannelClosed(int channel_id) { - if (sink_) { - sink_->OnChannelClosed(channel_id); - } -} - -void CompositeDataChannelTransport::OnReadyToSend() { - if (sink_ && send_transport_ && send_transport_->IsReadyToSend()) { - sink_->OnReadyToSend(); - } -} - -} // namespace webrtc diff --git a/pc/composite_data_channel_transport.h b/pc/composite_data_channel_transport.h deleted file mode 100644 index 97633cb6ed..0000000000 --- a/pc/composite_data_channel_transport.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_COMPOSITE_DATA_CHANNEL_TRANSPORT_H_ -#define PC_COMPOSITE_DATA_CHANNEL_TRANSPORT_H_ - -#include - -#include "api/transport/data_channel_transport_interface.h" -#include "rtc_base/critical_section.h" - -namespace webrtc { - -// Composite implementation of DataChannelTransportInterface. Allows users to -// receive data channel messages over multiple transports and send over one of -// those transports. -class CompositeDataChannelTransport : public DataChannelTransportInterface, - public DataChannelSink { - public: - explicit CompositeDataChannelTransport( - std::vector transports); - ~CompositeDataChannelTransport() override; - - // Specifies which transport to be used for sending. Must be called before - // sending data. - void SetSendTransport(DataChannelTransportInterface* send_transport); - - // Removes a given transport from the composite, if present. - void RemoveTransport(DataChannelTransportInterface* transport); - - // DataChannelTransportInterface overrides. - RTCError OpenChannel(int channel_id) override; - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; - RTCError CloseChannel(int channel_id) override; - void SetDataSink(DataChannelSink* sink) override; - bool IsReadyToSend() const override; - - // DataChannelSink overrides. - void OnDataReceived(int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) override; - void OnChannelClosing(int channel_id) override; - void OnChannelClosed(int channel_id) override; - void OnReadyToSend() override; - - private: - std::vector transports_; - DataChannelTransportInterface* send_transport_ = nullptr; - DataChannelSink* sink_ = nullptr; -}; - -} // namespace webrtc - -#endif // PC_COMPOSITE_DATA_CHANNEL_TRANSPORT_H_ diff --git a/pc/composite_rtp_transport_test.cc b/pc/composite_rtp_transport_test.cc index 02480844a0..fee8c215b2 100644 --- a/pc/composite_rtp_transport_test.cc +++ b/pc/composite_rtp_transport_test.cc @@ -229,17 +229,17 @@ TEST_F(CompositeRtpTransportTest, NetworkRouteChange) { SetupRtpTransports(/*rtcp_mux=*/true); rtc::NetworkRoute route; - route.local_network_id = 7; + route.local = rtc::RouteEndpoint::CreateWithNetworkId(7); packet_transport_1_->SetNetworkRoute(route); EXPECT_EQ(1, network_route_count_); - EXPECT_EQ(7, last_network_route_->local_network_id); + EXPECT_EQ(7, last_network_route_->local.network_id()); - route.local_network_id = 8; + route.local = rtc::RouteEndpoint::CreateWithNetworkId(8); packet_transport_2_->SetNetworkRoute(route); EXPECT_EQ(2, network_route_count_); - EXPECT_EQ(8, last_network_route_->local_network_id); + EXPECT_EQ(8, last_network_route_->local.network_id()); } TEST_F(CompositeRtpTransportTest, RemoveTransport) { @@ -249,7 +249,7 @@ TEST_F(CompositeRtpTransportTest, RemoveTransport) { // Check that signals are disconnected. rtc::NetworkRoute route; - route.local_network_id = 7; + route.local = rtc::RouteEndpoint::CreateWithNetworkId(7); packet_transport_1_->SetNetworkRoute(route); EXPECT_EQ(0, network_route_count_); diff --git a/pc/connection_context.cc b/pc/connection_context.cc new file mode 100644 index 0000000000..727fbd6542 --- /dev/null +++ b/pc/connection_context.cc @@ -0,0 +1,147 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/connection_context.h" + +#include +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "media/base/rtp_data_engine.h" +#include "rtc_base/helpers.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +namespace { + +rtc::Thread* MaybeStartThread(rtc::Thread* old_thread, + const std::string& thread_name, + bool with_socket_server, + std::unique_ptr& thread_holder) { + if (old_thread) { + return old_thread; + } + if (with_socket_server) { + thread_holder = rtc::Thread::CreateWithSocketServer(); + } else { + thread_holder = rtc::Thread::Create(); + } + thread_holder->SetName(thread_name, nullptr); + thread_holder->Start(); + return thread_holder.get(); +} + +rtc::Thread* MaybeWrapThread(rtc::Thread* signaling_thread, + bool& wraps_current_thread) { + wraps_current_thread = false; + if (signaling_thread) { + return signaling_thread; + } + auto this_thread = rtc::Thread::Current(); + if (!this_thread) { + // If this thread isn't already wrapped by an rtc::Thread, create a + // wrapper and own it in this class. + this_thread = rtc::ThreadManager::Instance()->WrapCurrentThread(); + wraps_current_thread = true; + } + return this_thread; +} + +std::unique_ptr MaybeCreateSctpFactory( + std::unique_ptr factory, + rtc::Thread* network_thread) { + if (factory) { + return factory; + } +#ifdef HAVE_SCTP + return std::make_unique(network_thread); +#else + return nullptr; +#endif +} + +} // namespace + +// Static +rtc::scoped_refptr ConnectionContext::Create( + PeerConnectionFactoryDependencies* dependencies) { + auto context = new rtc::RefCountedObject(dependencies); + if (!context->channel_manager_->Init()) { + return nullptr; + } + return context; +} + +ConnectionContext::ConnectionContext( + PeerConnectionFactoryDependencies* dependencies) + : network_thread_(MaybeStartThread(dependencies->network_thread, + "pc_network_thread", + true, + owned_network_thread_)), + worker_thread_(MaybeStartThread(dependencies->worker_thread, + "pc_worker_thread", + false, + owned_worker_thread_)), + signaling_thread_(MaybeWrapThread(dependencies->signaling_thread, + wraps_current_thread_)), + network_monitor_factory_( + std::move(dependencies->network_monitor_factory)), + call_factory_(std::move(dependencies->call_factory)), + media_engine_(std::move(dependencies->media_engine)), + sctp_factory_( + MaybeCreateSctpFactory(std::move(dependencies->sctp_factory), + network_thread())), + trials_(dependencies->trials + ? std::move(dependencies->trials) + : std::make_unique()) { + signaling_thread_->AllowInvokesToThread(worker_thread_); + signaling_thread_->AllowInvokesToThread(network_thread_); + worker_thread_->AllowInvokesToThread(network_thread_); + network_thread_->DisallowAllInvokes(); + + RTC_DCHECK_RUN_ON(signaling_thread_); + rtc::InitRandom(rtc::Time32()); + + // If network_monitor_factory_ is non-null, it will be used to create a + // network monitor while on the network thread. + default_network_manager_ = std::make_unique( + network_monitor_factory_.get()); + + default_socket_factory_ = + std::make_unique(network_thread()); + + channel_manager_ = std::make_unique( + std::move(media_engine_), std::make_unique(), + worker_thread(), network_thread()); + + channel_manager_->SetVideoRtxEnabled(true); +} + +ConnectionContext::~ConnectionContext() { + RTC_DCHECK_RUN_ON(signaling_thread_); + channel_manager_.reset(nullptr); + + // Make sure |worker_thread()| and |signaling_thread()| outlive + // |default_socket_factory_| and |default_network_manager_|. + default_socket_factory_ = nullptr; + default_network_manager_ = nullptr; + + if (wraps_current_thread_) + rtc::ThreadManager::Instance()->UnwrapCurrentThread(); +} + +cricket::ChannelManager* ConnectionContext::channel_manager() const { + return channel_manager_.get(); +} + +} // namespace webrtc diff --git a/pc/connection_context.h b/pc/connection_context.h new file mode 100644 index 0000000000..02d08a191e --- /dev/null +++ b/pc/connection_context.h @@ -0,0 +1,134 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_CONNECTION_CONTEXT_H_ +#define PC_CONNECTION_CONTEXT_H_ + +#include +#include + +#include "api/call/call_factory_interface.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/sctp_transport_factory_interface.h" +#include "api/transport/webrtc_key_value_config.h" +#include "media/base/media_engine.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "pc/channel_manager.h" +#include "rtc_base/checks.h" +#include "rtc_base/network.h" +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace rtc { +class BasicNetworkManager; +class BasicPacketSocketFactory; +} // namespace rtc + +namespace webrtc { + +class RtcEventLog; + +// This class contains resources needed by PeerConnection and associated +// objects. A reference to this object is passed to each PeerConnection. The +// methods on this object are assumed not to change the state in any way that +// interferes with the operation of other PeerConnections. +// +// This class must be created and destroyed on the signaling thread. +class ConnectionContext : public rtc::RefCountInterface { + public: + // Creates a ConnectionContext. May return null if initialization fails. + // The Dependencies class allows simple management of all new dependencies + // being added to the ConnectionContext. + static rtc::scoped_refptr Create( + PeerConnectionFactoryDependencies* dependencies); + + // This class is not copyable or movable. + ConnectionContext(const ConnectionContext&) = delete; + ConnectionContext& operator=(const ConnectionContext&) = delete; + + // Functions called from PeerConnection and friends + SctpTransportFactoryInterface* sctp_transport_factory() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return sctp_factory_.get(); + } + + cricket::ChannelManager* channel_manager() const; + + rtc::Thread* signaling_thread() { return signaling_thread_; } + const rtc::Thread* signaling_thread() const { return signaling_thread_; } + rtc::Thread* worker_thread() { return worker_thread_; } + const rtc::Thread* worker_thread() const { return worker_thread_; } + rtc::Thread* network_thread() { return network_thread_; } + const rtc::Thread* network_thread() const { return network_thread_; } + + const WebRtcKeyValueConfig& trials() const { return *trials_.get(); } + + // Accessors only used from the PeerConnectionFactory class + rtc::BasicNetworkManager* default_network_manager() { + RTC_DCHECK_RUN_ON(signaling_thread_); + return default_network_manager_.get(); + } + rtc::BasicPacketSocketFactory* default_socket_factory() { + RTC_DCHECK_RUN_ON(signaling_thread_); + return default_socket_factory_.get(); + } + CallFactoryInterface* call_factory() { + RTC_DCHECK_RUN_ON(worker_thread_); + return call_factory_.get(); + } + + protected: + explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies); + + virtual ~ConnectionContext(); + + private: + // The following three variables are used to communicate between the + // constructor and the destructor, and are never exposed externally. + bool wraps_current_thread_; + // Note: Since owned_network_thread_ and owned_worker_thread_ are used + // in the initialization of network_thread_ and worker_thread_, they + // must be declared before them, so that they are initialized first. + std::unique_ptr owned_network_thread_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr owned_worker_thread_ + RTC_GUARDED_BY(signaling_thread_); + rtc::Thread* const network_thread_; + rtc::Thread* const worker_thread_; + rtc::Thread* const signaling_thread_; + // channel_manager is accessed both on signaling thread and worker thread. + std::unique_ptr channel_manager_; + std::unique_ptr const network_monitor_factory_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr default_network_manager_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr const call_factory_ + RTC_GUARDED_BY(worker_thread_); + + std::unique_ptr default_socket_factory_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr media_engine_ + RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr const sctp_factory_ + RTC_GUARDED_BY(signaling_thread_); + // Accessed both on signaling thread and worker thread. + std::unique_ptr const trials_; +}; + +} // namespace webrtc + +#endif // PC_CONNECTION_CONTEXT_H_ diff --git a/pc/data_channel.h b/pc/data_channel.h deleted file mode 100644 index c1de7c7a7a..0000000000 --- a/pc/data_channel.h +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Copyright 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_DATA_CHANNEL_H_ -#define PC_DATA_CHANNEL_H_ - -#include -#include -#include -#include - -#include "api/data_channel_interface.h" -#include "api/proxy.h" -#include "api/scoped_refptr.h" -#include "media/base/media_channel.h" -#include "pc/channel.h" -#include "rtc_base/async_invoker.h" -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace webrtc { - -class DataChannel; - -// TODO(deadbeef): Once RTP data channels go away, get rid of this and have -// DataChannel depend on SctpTransportInternal (pure virtual SctpTransport -// interface) instead. -class DataChannelProviderInterface { - public: - // Sends the data to the transport. - virtual bool SendData(const cricket::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - cricket::SendDataResult* result) = 0; - // Connects to the transport signals. - virtual bool ConnectDataChannel(DataChannel* data_channel) = 0; - // Disconnects from the transport signals. - virtual void DisconnectDataChannel(DataChannel* data_channel) = 0; - // Adds the data channel SID to the transport for SCTP. - virtual void AddSctpDataStream(int sid) = 0; - // Begins the closing procedure by sending an outgoing stream reset. Still - // need to wait for callbacks to tell when this completes. - virtual void RemoveSctpDataStream(int sid) = 0; - // Returns true if the transport channel is ready to send data. - virtual bool ReadyToSendData() const = 0; - - protected: - virtual ~DataChannelProviderInterface() {} -}; - -struct InternalDataChannelInit : public DataChannelInit { - enum OpenHandshakeRole { kOpener, kAcker, kNone }; - // The default role is kOpener because the default |negotiated| is false. - InternalDataChannelInit() : open_handshake_role(kOpener) {} - explicit InternalDataChannelInit(const DataChannelInit& base); - OpenHandshakeRole open_handshake_role; -}; - -// Helper class to allocate unique IDs for SCTP DataChannels -class SctpSidAllocator { - public: - // Gets the first unused odd/even id based on the DTLS role. If |role| is - // SSL_CLIENT, the allocated id starts from 0 and takes even numbers; - // otherwise, the id starts from 1 and takes odd numbers. - // Returns false if no ID can be allocated. - bool AllocateSid(rtc::SSLRole role, int* sid); - - // Attempts to reserve a specific sid. Returns false if it's unavailable. - bool ReserveSid(int sid); - - // Indicates that |sid| isn't in use any more, and is thus available again. - void ReleaseSid(int sid); - - private: - // Checks if |sid| is available to be assigned to a new SCTP data channel. - bool IsSidAvailable(int sid) const; - - std::set used_sids_; -}; - -// DataChannel is a an implementation of the DataChannelInterface based on -// libjingle's data engine. It provides an implementation of unreliable or -// reliabledata channels. Currently this class is specifically designed to use -// both RtpDataChannel and SctpTransport. - -// DataChannel states: -// kConnecting: The channel has been created the transport might not yet be -// ready. -// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc -// and a remote SSRC set by call to UpdateReceiveSsrc and the transport -// has been writable once. -// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc -// has been called with SSRC==0 -// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with -// SSRC==0. -// -// How the closing procedure works for SCTP: -// 1. Alice calls Close(), state changes to kClosing. -// 2. Alice finishes sending any queued data. -// 3. Alice calls RemoveSctpDataStream, sends outgoing stream reset. -// 4. Bob receives incoming stream reset; OnClosingProcedureStartedRemotely -// called. -// 5. Bob sends outgoing stream reset. 6. Alice receives incoming reset, -// Bob receives acknowledgement. Both receive OnClosingProcedureComplete -// callback and transition to kClosed. -class DataChannel : public DataChannelInterface, public sigslot::has_slots<> { - public: - static rtc::scoped_refptr Create( - DataChannelProviderInterface* provider, - cricket::DataChannelType dct, - const std::string& label, - const InternalDataChannelInit& config); - - static bool IsSctpLike(cricket::DataChannelType type); - - virtual void RegisterObserver(DataChannelObserver* observer); - virtual void UnregisterObserver(); - - virtual std::string label() const { return label_; } - virtual bool reliable() const; - virtual bool ordered() const { return config_.ordered; } - // Backwards compatible accessors - virtual uint16_t maxRetransmitTime() const { - return config_.maxRetransmitTime ? *config_.maxRetransmitTime - : static_cast(-1); - } - virtual uint16_t maxRetransmits() const { - return config_.maxRetransmits ? *config_.maxRetransmits - : static_cast(-1); - } - virtual absl::optional maxPacketLifeTime() const { - return config_.maxRetransmitTime; - } - virtual absl::optional maxRetransmitsOpt() const { - return config_.maxRetransmits; - } - virtual std::string protocol() const { return config_.protocol; } - virtual bool negotiated() const { return config_.negotiated; } - virtual int id() const { return config_.id; } - virtual int internal_id() const { return internal_id_; } - virtual uint64_t buffered_amount() const; - virtual void Close(); - virtual DataState state() const { return state_; } - virtual RTCError error() const; - virtual uint32_t messages_sent() const { return messages_sent_; } - virtual uint64_t bytes_sent() const { return bytes_sent_; } - virtual uint32_t messages_received() const { return messages_received_; } - virtual uint64_t bytes_received() const { return bytes_received_; } - virtual bool Send(const DataBuffer& buffer); - - // Close immediately, ignoring any queued data or closing procedure. - // This is called for RTP data channels when SDP indicates a channel should - // be removed, or SCTP data channels when the underlying SctpTransport is - // being destroyed. - // It is also called by the PeerConnection if SCTP ID assignment fails. - void CloseAbruptlyWithError(RTCError error); - // Specializations of CloseAbruptlyWithError - void CloseAbruptlyWithDataChannelFailure(const std::string& message); - void CloseAbruptlyWithSctpCauseCode(const std::string& message, - uint16_t cause_code); - - // Called when the channel's ready to use. That can happen when the - // underlying DataMediaChannel becomes ready, or when this channel is a new - // stream on an existing DataMediaChannel, and we've finished negotiation. - void OnChannelReady(bool writable); - - // Slots for provider to connect signals to. - void OnDataReceived(const cricket::ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& payload); - - /******************************************** - * The following methods are for SCTP only. * - ********************************************/ - - // Sets the SCTP sid and adds to transport layer if not set yet. Should only - // be called once. - void SetSctpSid(int sid); - // The remote side started the closing procedure by resetting its outgoing - // stream (our incoming stream). Sets state to kClosing. - void OnClosingProcedureStartedRemotely(int sid); - // The closing procedure is complete; both incoming and outgoing stream - // resets are done and the channel can transition to kClosed. Called - // asynchronously after RemoveSctpDataStream. - void OnClosingProcedureComplete(int sid); - // Called when the transport channel is created. - // Only needs to be called for SCTP data channels. - void OnTransportChannelCreated(); - // Called when the transport channel is unusable. - // This method makes sure the DataChannel is disconnected and changes state - // to kClosed. - void OnTransportChannelClosed(); - - /******************************************* - * The following methods are for RTP only. * - *******************************************/ - - // The remote peer requested that this channel should be closed. - void RemotePeerRequestClose(); - // Set the SSRC this channel should use to send data on the - // underlying data engine. |send_ssrc| == 0 means that the channel is no - // longer part of the session negotiation. - void SetSendSsrc(uint32_t send_ssrc); - // Set the SSRC this channel should use to receive data from the - // underlying data engine. - void SetReceiveSsrc(uint32_t receive_ssrc); - - cricket::DataChannelType data_channel_type() const { - return data_channel_type_; - } - - // Emitted when state transitions to kOpen. - sigslot::signal1 SignalOpened; - // Emitted when state transitions to kClosed. - // In the case of SCTP channels, this signal can be used to tell when the - // channel's sid is free. - sigslot::signal1 SignalClosed; - - // Reset the allocator for internal ID values for testing, so that - // the internal IDs generated are predictable. Test only. - static void ResetInternalIdAllocatorForTesting(int new_value); - - protected: - DataChannel(DataChannelProviderInterface* client, - cricket::DataChannelType dct, - const std::string& label); - virtual ~DataChannel(); - - private: - // A packet queue which tracks the total queued bytes. Queued packets are - // owned by this class. - class PacketQueue final { - public: - size_t byte_count() const { return byte_count_; } - - bool Empty() const; - - std::unique_ptr PopFront(); - - void PushFront(std::unique_ptr packet); - void PushBack(std::unique_ptr packet); - - void Clear(); - - void Swap(PacketQueue* other); - - private: - std::deque> packets_; - size_t byte_count_ = 0; - }; - - // The OPEN(_ACK) signaling state. - enum HandshakeState { - kHandshakeInit, - kHandshakeShouldSendOpen, - kHandshakeShouldSendAck, - kHandshakeWaitingForAck, - kHandshakeReady - }; - - bool Init(const InternalDataChannelInit& config); - void UpdateState(); - void SetState(DataState state); - void DisconnectFromProvider(); - - void DeliverQueuedReceivedData(); - - void SendQueuedDataMessages(); - bool SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked); - bool QueueSendDataMessage(const DataBuffer& buffer); - - void SendQueuedControlMessages(); - void QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer); - bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer); - - const int internal_id_; - std::string label_; - InternalDataChannelInit config_; - DataChannelObserver* observer_; - DataState state_; - RTCError error_; - uint32_t messages_sent_; - uint64_t bytes_sent_; - uint32_t messages_received_; - uint64_t bytes_received_; - // Number of bytes of data that have been queued using Send(). Increased - // before each transport send and decreased after each successful send. - uint64_t buffered_amount_; - cricket::DataChannelType data_channel_type_; - DataChannelProviderInterface* provider_; - HandshakeState handshake_state_; - bool connected_to_provider_; - bool send_ssrc_set_; - bool receive_ssrc_set_; - bool writable_; - // Did we already start the graceful SCTP closing procedure? - bool started_closing_procedure_ = false; - uint32_t send_ssrc_; - uint32_t receive_ssrc_; - // Control messages that always have to get sent out before any queued - // data. - PacketQueue queued_control_data_; - PacketQueue queued_received_data_; - PacketQueue queued_send_data_; - rtc::AsyncInvoker invoker_; -}; - -// Define proxy for DataChannelInterface. -BEGIN_SIGNALING_PROXY_MAP(DataChannel) -PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*) -PROXY_METHOD0(void, UnregisterObserver) -PROXY_CONSTMETHOD0(std::string, label) -PROXY_CONSTMETHOD0(bool, reliable) -PROXY_CONSTMETHOD0(bool, ordered) -PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime) -PROXY_CONSTMETHOD0(uint16_t, maxRetransmits) -PROXY_CONSTMETHOD0(absl::optional, maxRetransmitsOpt) -PROXY_CONSTMETHOD0(absl::optional, maxPacketLifeTime) -PROXY_CONSTMETHOD0(std::string, protocol) -PROXY_CONSTMETHOD0(bool, negotiated) -PROXY_CONSTMETHOD0(int, id) -PROXY_CONSTMETHOD0(DataState, state) -PROXY_CONSTMETHOD0(RTCError, error) -PROXY_CONSTMETHOD0(uint32_t, messages_sent) -PROXY_CONSTMETHOD0(uint64_t, bytes_sent) -PROXY_CONSTMETHOD0(uint32_t, messages_received) -PROXY_CONSTMETHOD0(uint64_t, bytes_received) -PROXY_CONSTMETHOD0(uint64_t, buffered_amount) -PROXY_METHOD0(void, Close) -PROXY_METHOD1(bool, Send, const DataBuffer&) -END_PROXY_MAP() - -} // namespace webrtc - -#endif // PC_DATA_CHANNEL_H_ diff --git a/pc/data_channel_controller.cc b/pc/data_channel_controller.cc index 2800992ab6..9fabe13cc7 100644 --- a/pc/data_channel_controller.cc +++ b/pc/data_channel_controller.cc @@ -25,88 +25,74 @@ bool DataChannelController::HasDataChannels() const { bool DataChannelController::SendData(const cricket::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) { - // RTC_DCHECK_RUN_ON(signaling_thread()); - if (data_channel_transport()) { - SendDataParams send_params; - send_params.type = ToWebrtcDataMessageType(params.type); - send_params.ordered = params.ordered; - if (params.max_rtx_count >= 0) { - send_params.max_rtx_count = params.max_rtx_count; - } else if (params.max_rtx_ms >= 0) { - send_params.max_rtx_ms = params.max_rtx_ms; - } - - RTCError error = network_thread()->Invoke( - RTC_FROM_HERE, [this, params, send_params, payload] { - return data_channel_transport()->SendData(params.sid, send_params, - payload); - }); - - if (error.ok()) { - *result = cricket::SendDataResult::SDR_SUCCESS; - return true; - } else if (error.type() == RTCErrorType::RESOURCE_EXHAUSTED) { - // SCTP transport uses RESOURCE_EXHAUSTED when it's blocked. - // TODO(mellem): Stop using RTCError here and get rid of the mapping. - *result = cricket::SendDataResult::SDR_BLOCK; - return false; - } - *result = cricket::SendDataResult::SDR_ERROR; - return false; - } else if (rtp_data_channel()) { + if (data_channel_transport()) + return DataChannelSendData(params, payload, result); + if (rtp_data_channel()) return rtp_data_channel()->SendData(params, payload, result); - } RTC_LOG(LS_ERROR) << "SendData called before transport is ready"; return false; } bool DataChannelController::ConnectDataChannel( - DataChannel* webrtc_data_channel) { + RtpDataChannel* webrtc_data_channel) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (!rtp_data_channel() && !data_channel_transport()) { + if (!rtp_data_channel()) { // Don't log an error here, because DataChannels are expected to call // ConnectDataChannel in this state. It's the only way to initially tell // whether or not the underlying transport is ready. return false; } - if (data_channel_transport()) { - SignalDataChannelTransportWritable_s.connect(webrtc_data_channel, - &DataChannel::OnChannelReady); - SignalDataChannelTransportReceivedData_s.connect( - webrtc_data_channel, &DataChannel::OnDataReceived); - SignalDataChannelTransportChannelClosing_s.connect( - webrtc_data_channel, &DataChannel::OnClosingProcedureStartedRemotely); - SignalDataChannelTransportChannelClosed_s.connect( - webrtc_data_channel, &DataChannel::OnClosingProcedureComplete); - } - if (rtp_data_channel()) { - rtp_data_channel()->SignalReadyToSendData.connect( - webrtc_data_channel, &DataChannel::OnChannelReady); - rtp_data_channel()->SignalDataReceived.connect( - webrtc_data_channel, &DataChannel::OnDataReceived); - } + rtp_data_channel()->SignalReadyToSendData.connect( + webrtc_data_channel, &RtpDataChannel::OnChannelReady); + rtp_data_channel()->SignalDataReceived.connect( + webrtc_data_channel, &RtpDataChannel::OnDataReceived); return true; } void DataChannelController::DisconnectDataChannel( - DataChannel* webrtc_data_channel) { + RtpDataChannel* webrtc_data_channel) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (!rtp_data_channel() && !data_channel_transport()) { + if (!rtp_data_channel()) { RTC_LOG(LS_ERROR) - << "DisconnectDataChannel called when rtp_data_channel_ and " - "sctp_transport_ are NULL."; + << "DisconnectDataChannel called when rtp_data_channel_ is NULL."; return; } - if (data_channel_transport()) { - SignalDataChannelTransportWritable_s.disconnect(webrtc_data_channel); - SignalDataChannelTransportReceivedData_s.disconnect(webrtc_data_channel); - SignalDataChannelTransportChannelClosing_s.disconnect(webrtc_data_channel); - SignalDataChannelTransportChannelClosed_s.disconnect(webrtc_data_channel); + rtp_data_channel()->SignalReadyToSendData.disconnect(webrtc_data_channel); + rtp_data_channel()->SignalDataReceived.disconnect(webrtc_data_channel); +} + +bool DataChannelController::ConnectDataChannel( + SctpDataChannel* webrtc_data_channel) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!data_channel_transport()) { + // Don't log an error here, because DataChannels are expected to call + // ConnectDataChannel in this state. It's the only way to initially tell + // whether or not the underlying transport is ready. + return false; } - if (rtp_data_channel()) { - rtp_data_channel()->SignalReadyToSendData.disconnect(webrtc_data_channel); - rtp_data_channel()->SignalDataReceived.disconnect(webrtc_data_channel); + SignalDataChannelTransportWritable_s.connect( + webrtc_data_channel, &SctpDataChannel::OnTransportReady); + SignalDataChannelTransportReceivedData_s.connect( + webrtc_data_channel, &SctpDataChannel::OnDataReceived); + SignalDataChannelTransportChannelClosing_s.connect( + webrtc_data_channel, &SctpDataChannel::OnClosingProcedureStartedRemotely); + SignalDataChannelTransportChannelClosed_s.connect( + webrtc_data_channel, &SctpDataChannel::OnClosingProcedureComplete); + return true; +} + +void DataChannelController::DisconnectDataChannel( + SctpDataChannel* webrtc_data_channel) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!data_channel_transport()) { + RTC_LOG(LS_ERROR) + << "DisconnectDataChannel called when sctp_transport_ is NULL."; + return; } + SignalDataChannelTransportWritable_s.disconnect(webrtc_data_channel); + SignalDataChannelTransportReceivedData_s.disconnect(webrtc_data_channel); + SignalDataChannelTransportChannelClosing_s.disconnect(webrtc_data_channel); + SignalDataChannelTransportChannelClosed_s.disconnect(webrtc_data_channel); } void DataChannelController::AddSctpDataStream(int sid) { @@ -143,9 +129,17 @@ void DataChannelController::OnDataReceived( cricket::ReceiveDataParams params; params.sid = channel_id; params.type = ToCricketDataMessageType(type); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this, params, buffer] { RTC_DCHECK_RUN_ON(signaling_thread()); + // TODO(bugs.webrtc.org/11547): The data being received should be + // delivered on the network thread. The way HandleOpenMessage_s works + // right now is that it's called for all types of buffers and operates + // as a selector function. Change this so that it's only called for + // buffers that it should be able to handle. Once we do that, we can + // deliver all other buffers on the network thread (change + // SignalDataChannelTransportReceivedData_s to + // SignalDataChannelTransportReceivedData_n). if (!HandleOpenMessage_s(params, buffer)) { SignalDataChannelTransportReceivedData_s(params, buffer); } @@ -154,7 +148,7 @@ void DataChannelController::OnDataReceived( void DataChannelController::OnChannelClosing(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this, channel_id] { RTC_DCHECK_RUN_ON(signaling_thread()); SignalDataChannelTransportChannelClosing_s(channel_id); @@ -163,7 +157,7 @@ void DataChannelController::OnChannelClosing(int channel_id) { void DataChannelController::OnChannelClosed(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this, channel_id] { RTC_DCHECK_RUN_ON(signaling_thread()); SignalDataChannelTransportChannelClosed_s(channel_id); @@ -172,7 +166,7 @@ void DataChannelController::OnChannelClosed(int channel_id) { void DataChannelController::OnReadyToSend() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this] { RTC_DCHECK_RUN_ON(signaling_thread()); data_channel_transport_ready_to_send_ = true; @@ -183,7 +177,7 @@ void DataChannelController::OnReadyToSend() { void DataChannelController::OnTransportClosed() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_->AsyncInvoke( + data_channel_transport_invoker_.AsyncInvoke( RTC_FROM_HERE, signaling_thread(), [this] { RTC_DCHECK_RUN_ON(signaling_thread()); OnTransportChannelClosed(); @@ -192,12 +186,15 @@ void DataChannelController::OnTransportClosed() { void DataChannelController::SetupDataChannelTransport_n() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_ = std::make_unique(); + + // There's a new data channel transport. This needs to be signaled to the + // |sctp_data_channels_| so that they can reopen and reconnect. This is + // necessary when bundling is applied. + NotifyDataChannelsOfTransportCreated(); } void DataChannelController::TeardownDataChannelTransport_n() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_ = nullptr; if (data_channel_transport()) { data_channel_transport()->SetDataSink(nullptr); } @@ -219,17 +216,21 @@ void DataChannelController::OnTransportChanged( // There's a new data channel transport. This needs to be signaled to the // |sctp_data_channels_| so that they can reopen and reconnect. This is // necessary when bundling is applied. - data_channel_transport_invoker_->AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this] { - RTC_DCHECK_RUN_ON(signaling_thread()); - for (auto channel : sctp_data_channels_) { - channel->OnTransportChannelCreated(); - } - }); + NotifyDataChannelsOfTransportCreated(); } } } +std::vector DataChannelController::GetDataChannelStats() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector stats; + stats.reserve(sctp_data_channels_.size()); + for (const auto& channel : sctp_data_channels_) + stats.push_back(channel->GetStats()); + return stats; +} + bool DataChannelController::HandleOpenMessage_s( const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { @@ -254,21 +255,19 @@ bool DataChannelController::HandleOpenMessage_s( void DataChannelController::OnDataChannelOpenMessage( const std::string& label, const InternalDataChannelInit& config) { - rtc::scoped_refptr channel( - InternalCreateDataChannel(label, &config)); + rtc::scoped_refptr channel( + InternalCreateDataChannelWithProxy(label, &config)); if (!channel.get()) { RTC_LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message."; return; } - rtc::scoped_refptr proxy_channel = - DataChannelProxy::Create(signaling_thread(), channel); - pc_->Observer()->OnDataChannel(std::move(proxy_channel)); + pc_->Observer()->OnDataChannel(std::move(channel)); pc_->NoteDataAddedEvent(); } -rtc::scoped_refptr -DataChannelController::InternalCreateDataChannel( +rtc::scoped_refptr +DataChannelController::InternalCreateDataChannelWithProxy( const std::string& label, const InternalDataChannelInit* config) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -280,51 +279,78 @@ DataChannelController::InternalCreateDataChannel( << "InternalCreateDataChannel: Data is not supported in this call."; return nullptr; } - InternalDataChannelInit new_config = - config ? (*config) : InternalDataChannelInit(); - if (DataChannel::IsSctpLike(data_channel_type_)) { - if (new_config.id < 0) { - rtc::SSLRole role; - if ((pc_->GetSctpSslRole(&role)) && - !sid_allocator_.AllocateSid(role, &new_config.id)) { - RTC_LOG(LS_ERROR) - << "No id can be allocated for the SCTP data channel."; - return nullptr; - } - } else if (!sid_allocator_.ReserveSid(new_config.id)) { - RTC_LOG(LS_ERROR) << "Failed to create a SCTP data channel " - "because the id is already in use or out of range."; - return nullptr; + if (IsSctpLike(data_channel_type())) { + rtc::scoped_refptr channel = + InternalCreateSctpDataChannel(label, config); + if (channel) { + return SctpDataChannel::CreateProxy(channel); + } + } else if (data_channel_type() == cricket::DCT_RTP) { + rtc::scoped_refptr channel = + InternalCreateRtpDataChannel(label, config); + if (channel) { + return RtpDataChannel::CreateProxy(channel); } } - rtc::scoped_refptr channel( - DataChannel::Create(this, data_channel_type(), label, new_config)); + return nullptr; +} + +rtc::scoped_refptr +DataChannelController::InternalCreateRtpDataChannel( + const std::string& label, + const DataChannelInit* config) { + RTC_DCHECK_RUN_ON(signaling_thread()); + DataChannelInit new_config = config ? (*config) : DataChannelInit(); + rtc::scoped_refptr channel( + RtpDataChannel::Create(this, label, new_config, signaling_thread())); if (!channel) { - sid_allocator_.ReleaseSid(new_config.id); return nullptr; } + if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) { + RTC_LOG(LS_ERROR) << "DataChannel with label " << channel->label() + << " already exists."; + return nullptr; + } + rtp_data_channels_[channel->label()] = channel; + SignalRtpDataChannelCreated_(channel.get()); + return channel; +} - if (channel->data_channel_type() == cricket::DCT_RTP) { - if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) { - RTC_LOG(LS_ERROR) << "DataChannel with label " << channel->label() - << " already exists."; +rtc::scoped_refptr +DataChannelController::InternalCreateSctpDataChannel( + const std::string& label, + const InternalDataChannelInit* config) { + RTC_DCHECK_RUN_ON(signaling_thread()); + InternalDataChannelInit new_config = + config ? (*config) : InternalDataChannelInit(); + if (new_config.id < 0) { + rtc::SSLRole role; + if ((pc_->GetSctpSslRole(&role)) && + !sid_allocator_.AllocateSid(role, &new_config.id)) { + RTC_LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel."; return nullptr; } - rtp_data_channels_[channel->label()] = channel; - } else { - RTC_DCHECK(DataChannel::IsSctpLike(data_channel_type_)); - sctp_data_channels_.push_back(channel); - channel->SignalClosed.connect(pc_, - &PeerConnection::OnSctpDataChannelClosed); + } else if (!sid_allocator_.ReserveSid(new_config.id)) { + RTC_LOG(LS_ERROR) << "Failed to create a SCTP data channel " + "because the id is already in use or out of range."; + return nullptr; + } + rtc::scoped_refptr channel(SctpDataChannel::Create( + this, label, new_config, signaling_thread(), network_thread())); + if (!channel) { + sid_allocator_.ReleaseSid(new_config.id); + return nullptr; } - SignalDataChannelCreated_(channel.get()); + sctp_data_channels_.push_back(channel); + channel->SignalClosed.connect(pc_, &PeerConnection::OnSctpDataChannelClosed); + SignalSctpDataChannelCreated_(channel.get()); return channel; } void DataChannelController::AllocateSctpSids(rtc::SSLRole role) { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector> channels_to_close; + std::vector> channels_to_close; for (const auto& channel : sctp_data_channels_) { if (channel->id() < 0) { int sid; @@ -343,7 +369,7 @@ void DataChannelController::AllocateSctpSids(rtc::SSLRole role) { } } -void DataChannelController::OnSctpDataChannelClosed(DataChannel* channel) { +void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) { RTC_DCHECK_RUN_ON(signaling_thread()); for (auto it = sctp_data_channels_.begin(); it != sctp_data_channels_.end(); ++it) { @@ -373,20 +399,20 @@ void DataChannelController::OnTransportChannelClosed() { RTC_DCHECK_RUN_ON(signaling_thread()); // Use a temporary copy of the RTP/SCTP DataChannel list because the // DataChannel may callback to us and try to modify the list. - std::map> temp_rtp_dcs; + std::map> temp_rtp_dcs; temp_rtp_dcs.swap(rtp_data_channels_); for (const auto& kv : temp_rtp_dcs) { kv.second->OnTransportChannelClosed(); } - std::vector> temp_sctp_dcs; + std::vector> temp_sctp_dcs; temp_sctp_dcs.swap(sctp_data_channels_); for (const auto& channel : temp_sctp_dcs) { channel->OnTransportChannelClosed(); } } -DataChannel* DataChannelController::FindDataChannelBySid(int sid) const { +SctpDataChannel* DataChannelController::FindDataChannelBySid(int sid) const { RTC_DCHECK_RUN_ON(signaling_thread()); for (const auto& channel : sctp_data_channels_) { if (channel->id() == sid) { @@ -424,9 +450,10 @@ void DataChannelController::UpdateLocalRtpDataChannels( void DataChannelController::UpdateRemoteRtpDataChannels( const cricket::StreamParamsVec& streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector existing_channels; - RTC_DCHECK_RUN_ON(signaling_thread()); // Find new and active data channels. for (const cricket::StreamParams& params : streams) { // The data channel label is either the mslabel or the SSRC if the mslabel @@ -447,12 +474,44 @@ void DataChannelController::UpdateRemoteRtpDataChannels( UpdateClosingRtpDataChannels(existing_channels, false); } +cricket::DataChannelType DataChannelController::data_channel_type() const { + // TODO(bugs.webrtc.org/9987): Should be restricted to the signaling thread. + // RTC_DCHECK_RUN_ON(signaling_thread()); + return data_channel_type_; +} + +void DataChannelController::set_data_channel_type( + cricket::DataChannelType type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + data_channel_type_ = type; +} + +DataChannelTransportInterface* DataChannelController::data_channel_transport() + const { + // TODO(bugs.webrtc.org/11547): Only allow this accessor to be called on the + // network thread. + // RTC_DCHECK_RUN_ON(network_thread()); + return data_channel_transport_; +} + +void DataChannelController::set_data_channel_transport( + DataChannelTransportInterface* transport) { + RTC_DCHECK_RUN_ON(network_thread()); + data_channel_transport_ = transport; +} + +const std::map>* +DataChannelController::rtp_data_channels() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &rtp_data_channels_; +} + void DataChannelController::UpdateClosingRtpDataChannels( const std::vector& active_channels, bool is_local_update) { auto it = rtp_data_channels_.begin(); while (it != rtp_data_channels_.end()) { - DataChannel* data_channel = it->second; + RtpDataChannel* data_channel = it->second; if (absl::c_linear_search(active_channels, data_channel->label())) { ++it; continue; @@ -464,7 +523,7 @@ void DataChannelController::UpdateClosingRtpDataChannels( data_channel->RemotePeerRequestClose(); } - if (data_channel->state() == DataChannel::kClosed) { + if (data_channel->state() == RtpDataChannel::kClosed) { rtp_data_channels_.erase(it); it = rtp_data_channels_.begin(); } else { @@ -475,8 +534,11 @@ void DataChannelController::UpdateClosingRtpDataChannels( void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label, uint32_t remote_ssrc) { - rtc::scoped_refptr channel( - InternalCreateDataChannel(label, nullptr)); + if (data_channel_type() != cricket::DCT_RTP) { + return; + } + rtc::scoped_refptr channel( + InternalCreateRtpDataChannel(label, nullptr)); if (!channel.get()) { RTC_LOG(LS_WARNING) << "Remote peer requested a DataChannel but" "CreateDataChannel failed."; @@ -484,10 +546,59 @@ void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label, } channel->SetReceiveSsrc(remote_ssrc); rtc::scoped_refptr proxy_channel = - DataChannelProxy::Create(signaling_thread(), channel); + RtpDataChannel::CreateProxy(std::move(channel)); pc_->Observer()->OnDataChannel(std::move(proxy_channel)); } +bool DataChannelController::DataChannelSendData( + const cricket::SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + cricket::SendDataResult* result) { + // TODO(bugs.webrtc.org/11547): Expect method to be called on the network + // thread instead. Remove the Invoke() below and move assocated state to + // the network thread. + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(data_channel_transport()); + + SendDataParams send_params; + send_params.type = ToWebrtcDataMessageType(params.type); + send_params.ordered = params.ordered; + if (params.max_rtx_count >= 0) { + send_params.max_rtx_count = params.max_rtx_count; + } else if (params.max_rtx_ms >= 0) { + send_params.max_rtx_ms = params.max_rtx_ms; + } + + RTCError error = network_thread()->Invoke( + RTC_FROM_HERE, [this, params, send_params, payload] { + return data_channel_transport()->SendData(params.sid, send_params, + payload); + }); + + if (error.ok()) { + *result = cricket::SendDataResult::SDR_SUCCESS; + return true; + } else if (error.type() == RTCErrorType::RESOURCE_EXHAUSTED) { + // SCTP transport uses RESOURCE_EXHAUSTED when it's blocked. + // TODO(mellem): Stop using RTCError here and get rid of the mapping. + *result = cricket::SendDataResult::SDR_BLOCK; + return false; + } + *result = cricket::SendDataResult::SDR_ERROR; + return false; +} + +void DataChannelController::NotifyDataChannelsOfTransportCreated() { + RTC_DCHECK_RUN_ON(network_thread()); + data_channel_transport_invoker_.AsyncInvoke( + RTC_FROM_HERE, signaling_thread(), [this] { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& channel : sctp_data_channels_) { + channel->OnTransportChannelCreated(); + } + }); +} + rtc::Thread* DataChannelController::network_thread() const { return pc_->network_thread(); } diff --git a/pc/data_channel_controller.h b/pc/data_channel_controller.h index 60bcbb32a8..6759288825 100644 --- a/pc/data_channel_controller.h +++ b/pc/data_channel_controller.h @@ -17,14 +17,16 @@ #include #include "pc/channel.h" -#include "pc/data_channel.h" +#include "pc/rtp_data_channel.h" +#include "pc/sctp_data_channel.h" #include "rtc_base/weak_ptr.h" namespace webrtc { class PeerConnection; -class DataChannelController : public DataChannelProviderInterface, +class DataChannelController : public RtpDataChannelProviderInterface, + public SctpDataChannelProviderInterface, public DataChannelSink { public: explicit DataChannelController(PeerConnection* pc) : pc_(pc) {} @@ -35,12 +37,15 @@ class DataChannelController : public DataChannelProviderInterface, DataChannelController(DataChannelController&&) = delete; DataChannelController& operator=(DataChannelController&& other) = delete; - // Implements DataChannelProviderInterface. + // Implements RtpDataChannelProviderInterface/ + // SctpDataChannelProviderInterface. bool SendData(const cricket::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override; - bool ConnectDataChannel(DataChannel* webrtc_data_channel) override; - void DisconnectDataChannel(DataChannel* webrtc_data_channel) override; + bool ConnectDataChannel(RtpDataChannel* webrtc_data_channel) override; + void DisconnectDataChannel(RtpDataChannel* webrtc_data_channel) override; + bool ConnectDataChannel(SctpDataChannel* webrtc_data_channel) override; + void DisconnectDataChannel(SctpDataChannel* webrtc_data_channel) override; void AddSctpDataStream(int sid) override; void RemoveSctpDataStream(int sid) override; bool ReadyToSendData() const override; @@ -64,15 +69,18 @@ class DataChannelController : public DataChannelProviderInterface, void OnTransportChanged( DataChannelTransportInterface* data_channel_transport); + // Called from PeerConnection::GetDataChannelStats on the signaling thread. + std::vector GetDataChannelStats() const; + // Creates channel and adds it to the collection of DataChannels that will - // be offered in a SessionDescription. - rtc::scoped_refptr InternalCreateDataChannel( + // be offered in a SessionDescription, and wraps it in a proxy object. + rtc::scoped_refptr InternalCreateDataChannelWithProxy( const std::string& label, const InternalDataChannelInit* config) /* RTC_RUN_ON(signaling_thread()) */; void AllocateSctpSids(rtc::SSLRole role); - DataChannel* FindDataChannelBySid(int sid) const; + SctpDataChannel* FindDataChannelBySid(int sid) const; // Checks if any data channel has been added. bool HasDataChannels() const; @@ -89,45 +97,42 @@ class DataChannelController : public DataChannelProviderInterface, void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams); // Accessors - cricket::DataChannelType data_channel_type() const { - return data_channel_type_; - } - void set_data_channel_type(cricket::DataChannelType type) { - data_channel_type_ = type; - } + cricket::DataChannelType data_channel_type() const; + void set_data_channel_type(cricket::DataChannelType type); cricket::RtpDataChannel* rtp_data_channel() const { return rtp_data_channel_; } void set_rtp_data_channel(cricket::RtpDataChannel* channel) { rtp_data_channel_ = channel; } - DataChannelTransportInterface* data_channel_transport() const { - return data_channel_transport_; - } - void set_data_channel_transport(DataChannelTransportInterface* transport) { - data_channel_transport_ = transport; - } - const std::map>* - rtp_data_channels() const { + DataChannelTransportInterface* data_channel_transport() const; + void set_data_channel_transport(DataChannelTransportInterface* transport); + const std::map>* + rtp_data_channels() const; + + sigslot::signal1& SignalRtpDataChannelCreated() { RTC_DCHECK_RUN_ON(signaling_thread()); - return &rtp_data_channels_; + return SignalRtpDataChannelCreated_; } - const std::vector>* sctp_data_channels() - const { + sigslot::signal1& SignalSctpDataChannelCreated() { RTC_DCHECK_RUN_ON(signaling_thread()); - return &sctp_data_channels_; - } - - sigslot::signal1& SignalDataChannelCreated() { - RTC_DCHECK_RUN_ON(signaling_thread()); - return SignalDataChannelCreated_; + return SignalSctpDataChannelCreated_; } // Called when the transport for the data channels is closed or destroyed. void OnTransportChannelClosed(); - void OnSctpDataChannelClosed(DataChannel* channel); + void OnSctpDataChannelClosed(SctpDataChannel* channel); private: + rtc::scoped_refptr InternalCreateRtpDataChannel( + const std::string& label, + const DataChannelInit* config) /* RTC_RUN_ON(signaling_thread()) */; + + rtc::scoped_refptr InternalCreateSctpDataChannel( + const std::string& label, + const InternalDataChannelInit* + config) /* RTC_RUN_ON(signaling_thread()) */; + // Parses and handles open messages. Returns true if the message is an open // message, false otherwise. bool HandleOpenMessage_s(const cricket::ReceiveDataParams& params, @@ -146,6 +151,15 @@ class DataChannelController : public DataChannelProviderInterface, const std::vector& active_channels, bool is_local_update) RTC_RUN_ON(signaling_thread()); + // Called from SendData when data_channel_transport() is true. + bool DataChannelSendData(const cricket::SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + cricket::SendDataResult* result); + + // Called when all data channels need to be notified of a transport channel + // (calls OnTransportChannelCreated on the signaling thread). + void NotifyDataChannelsOfTransportCreated(); + rtc::Thread* network_thread() const; rtc::Thread* signaling_thread() const; @@ -178,17 +192,19 @@ class DataChannelController : public DataChannelProviderInterface, // signaling and some other thread. SctpSidAllocator sid_allocator_ /* RTC_GUARDED_BY(signaling_thread()) */; - std::vector> sctp_data_channels_ + std::vector> sctp_data_channels_ RTC_GUARDED_BY(signaling_thread()); - std::vector> sctp_data_channels_to_free_ + std::vector> sctp_data_channels_to_free_ RTC_GUARDED_BY(signaling_thread()); // Map of label -> DataChannel - std::map> rtp_data_channels_ + std::map> rtp_data_channels_ RTC_GUARDED_BY(signaling_thread()); // Signals from |data_channel_transport_|. These are invoked on the // signaling thread. + // TODO(bugs.webrtc.org/11547): These '_s' signals likely all belong on the + // network thread. sigslot::signal1 SignalDataChannelTransportWritable_s RTC_GUARDED_BY(signaling_thread()); sigslot::signal2 SignalDataChannelTransportChannelClosed_s RTC_GUARDED_BY(signaling_thread()); - sigslot::signal1 SignalDataChannelCreated_ + sigslot::signal1 SignalRtpDataChannelCreated_ + RTC_GUARDED_BY(signaling_thread()); + sigslot::signal1 SignalSctpDataChannelCreated_ RTC_GUARDED_BY(signaling_thread()); - // Used to invoke data channel transport signals on the signaling thread. - std::unique_ptr data_channel_transport_invoker_ + // Used from the network thread to invoke data channel transport signals on + // the signaling thread. + rtc::AsyncInvoker data_channel_transport_invoker_ RTC_GUARDED_BY(network_thread()); // Owning PeerConnection. diff --git a/pc/data_channel_unittest.cc b/pc/data_channel_unittest.cc index 6bb8f7e5c7..7048dc82b7 100644 --- a/pc/data_channel_unittest.cc +++ b/pc/data_channel_unittest.cc @@ -8,20 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "pc/data_channel.h" - #include #include #include +#include "pc/sctp_data_channel.h" #include "pc/sctp_utils.h" #include "pc/test/fake_data_channel_provider.h" #include "rtc_base/gunit.h" #include "rtc_base/numerics/safe_conversions.h" #include "test/gtest.h" -using webrtc::DataChannel; +using webrtc::DataChannelInterface; +using webrtc::SctpDataChannel; using webrtc::SctpSidAllocator; static constexpr int kDefaultTimeout = 10000; @@ -64,14 +64,16 @@ class FakeDataChannelObserver : public webrtc::DataChannelObserver { // TODO(deadbeef): The fact that these tests use a fake provider makes them not // too valuable. Should rewrite using the // peerconnection_datachannel_unittest.cc infrastructure. +// TODO(bugs.webrtc.org/11547): Incorporate a dedicated network thread. class SctpDataChannelTest : public ::testing::Test { protected: SctpDataChannelTest() : provider_(new FakeDataChannelProvider()), - webrtc_data_channel_(DataChannel::Create(provider_.get(), - cricket::DCT_SCTP, - "test", - init_)) {} + webrtc_data_channel_(SctpDataChannel::Create(provider_.get(), + "test", + init_, + rtc::Thread::Current(), + rtc::Thread::Current())) {} void SetChannelReady() { provider_->set_transport_available(true); @@ -90,7 +92,7 @@ class SctpDataChannelTest : public ::testing::Test { webrtc::InternalDataChannelInit init_; std::unique_ptr provider_; std::unique_ptr observer_; - rtc::scoped_refptr webrtc_data_channel_; + rtc::scoped_refptr webrtc_data_channel_; }; class StateSignalsListener : public sigslot::has_slots<> { @@ -98,9 +100,9 @@ class StateSignalsListener : public sigslot::has_slots<> { int opened_count() const { return opened_count_; } int closed_count() const { return closed_count_; } - void OnSignalOpened(DataChannel* data_channel) { ++opened_count_; } + void OnSignalOpened(DataChannelInterface* data_channel) { ++opened_count_; } - void OnSignalClosed(DataChannel* data_channel) { ++closed_count_; } + void OnSignalClosed(DataChannelInterface* data_channel) { ++closed_count_; } private: int opened_count_ = 0; @@ -110,8 +112,9 @@ class StateSignalsListener : public sigslot::has_slots<> { // Verifies that the data channel is connected to the transport after creation. TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) { provider_->set_transport_available(true); - rtc::scoped_refptr dc = - DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init_); + rtc::scoped_refptr dc = + SctpDataChannel::Create(provider_.get(), "test1", init_, + rtc::Thread::Current(), rtc::Thread::Current()); EXPECT_TRUE(provider_->IsConnected(dc.get())); // The sid is not set yet, so it should not have added the streams. @@ -304,8 +307,9 @@ TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) { SetChannelReady(); webrtc::InternalDataChannelInit init; init.id = 1; - rtc::scoped_refptr dc = - DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init); + rtc::scoped_refptr dc = + SctpDataChannel::Create(provider_.get(), "test1", init, + rtc::Thread::Current(), rtc::Thread::Current()); EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, dc->state()); EXPECT_TRUE_WAIT(webrtc::DataChannelInterface::kOpen == dc->state(), 1000); } @@ -317,8 +321,9 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) { webrtc::InternalDataChannelInit init; init.id = 1; init.ordered = false; - rtc::scoped_refptr dc = - DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init); + rtc::scoped_refptr dc = + SctpDataChannel::Create(provider_.get(), "test1", init, + rtc::Thread::Current(), rtc::Thread::Current()); EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000); @@ -347,8 +352,9 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) { webrtc::InternalDataChannelInit init; init.id = 1; init.ordered = false; - rtc::scoped_refptr dc = - DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init); + rtc::scoped_refptr dc = + SctpDataChannel::Create(provider_.get(), "test1", init, + rtc::Thread::Current(), rtc::Thread::Current()); EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000); @@ -448,8 +454,9 @@ TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) { config.open_handshake_role = webrtc::InternalDataChannelInit::kNone; SetChannelReady(); - rtc::scoped_refptr dc = - DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config); + rtc::scoped_refptr dc = + SctpDataChannel::Create(provider_.get(), "test1", config, + rtc::Thread::Current(), rtc::Thread::Current()); EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000); EXPECT_EQ(0U, provider_->last_send_data_params().ssrc); @@ -511,8 +518,9 @@ TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) { config.open_handshake_role = webrtc::InternalDataChannelInit::kAcker; SetChannelReady(); - rtc::scoped_refptr dc = - DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config); + rtc::scoped_refptr dc = + SctpDataChannel::Create(provider_.get(), "test1", config, + rtc::Thread::Current(), rtc::Thread::Current()); EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000); @@ -630,9 +638,9 @@ TEST_F(SctpDataChannelTest, TransportDestroyedWhileDataBuffered) { EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, webrtc_data_channel_->state(), kDefaultTimeout); EXPECT_FALSE(webrtc_data_channel_->error().ok()); - EXPECT_EQ(webrtc::RTCErrorType::NETWORK_ERROR, + EXPECT_EQ(webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA, webrtc_data_channel_->error().type()); - EXPECT_EQ(webrtc::RTCErrorDetailType::NONE, + EXPECT_EQ(webrtc::RTCErrorDetailType::SCTP_FAILURE, webrtc_data_channel_->error().error_detail()); } diff --git a/pc/data_channel_utils.cc b/pc/data_channel_utils.cc new file mode 100644 index 0000000000..51d6af941f --- /dev/null +++ b/pc/data_channel_utils.cc @@ -0,0 +1,54 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/data_channel_utils.h" + +namespace webrtc { + +bool PacketQueue::Empty() const { + return packets_.empty(); +} + +std::unique_ptr PacketQueue::PopFront() { + RTC_DCHECK(!packets_.empty()); + byte_count_ -= packets_.front()->size(); + std::unique_ptr packet = std::move(packets_.front()); + packets_.pop_front(); + return packet; +} + +void PacketQueue::PushFront(std::unique_ptr packet) { + byte_count_ += packet->size(); + packets_.push_front(std::move(packet)); +} + +void PacketQueue::PushBack(std::unique_ptr packet) { + byte_count_ += packet->size(); + packets_.push_back(std::move(packet)); +} + +void PacketQueue::Clear() { + packets_.clear(); + byte_count_ = 0; +} + +void PacketQueue::Swap(PacketQueue* other) { + size_t other_byte_count = other->byte_count_; + other->byte_count_ = byte_count_; + byte_count_ = other_byte_count; + + other->packets_.swap(packets_); +} + +bool IsSctpLike(cricket::DataChannelType type) { + return type == cricket::DCT_SCTP; +} + +} // namespace webrtc diff --git a/pc/data_channel_utils.h b/pc/data_channel_utils.h new file mode 100644 index 0000000000..13c6620cd8 --- /dev/null +++ b/pc/data_channel_utils.h @@ -0,0 +1,62 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_DATA_CHANNEL_UTILS_H_ +#define PC_DATA_CHANNEL_UTILS_H_ + +#include +#include +#include +#include + +#include "api/data_channel_interface.h" +#include "media/base/media_engine.h" + +namespace webrtc { + +// A packet queue which tracks the total queued bytes. Queued packets are +// owned by this class. +class PacketQueue final { + public: + size_t byte_count() const { return byte_count_; } + + bool Empty() const; + + std::unique_ptr PopFront(); + + void PushFront(std::unique_ptr packet); + void PushBack(std::unique_ptr packet); + + void Clear(); + + void Swap(PacketQueue* other); + + private: + std::deque> packets_; + size_t byte_count_ = 0; +}; + +struct DataChannelStats { + int internal_id; + int id; + std::string label; + std::string protocol; + DataChannelInterface::DataState state; + uint32_t messages_sent; + uint32_t messages_received; + uint64_t bytes_sent; + uint64_t bytes_received; +}; + +bool IsSctpLike(cricket::DataChannelType type); + +} // namespace webrtc + +#endif // PC_DATA_CHANNEL_UTILS_H_ diff --git a/pc/datagram_rtp_transport.cc b/pc/datagram_rtp_transport.cc deleted file mode 100644 index 388a92090a..0000000000 --- a/pc/datagram_rtp_transport.cc +++ /dev/null @@ -1,385 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/datagram_rtp_transport.h" - -#include -#include -#include - -#include "absl/memory/memory.h" -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtc_error.h" -#include "media/base/rtp_utils.h" -#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/packet_transport_internal.h" -#include "rtc_base/buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/dscp.h" -#include "rtc_base/logging.h" -#include "rtc_base/rtc_certificate.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/stream.h" -#include "rtc_base/thread.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -namespace { - -// Field trials. -// Disable datagram to RTCP feedback translation and enable RTCP feedback loop -// on top of datagram feedback loop. Note that two -// feedback loops add unneccesary overhead, so it's preferable to use feedback -// loop provided by datagram transport and convert datagram ACKs to RTCP ACKs, -// but enabling RTCP feedback loop may be useful in tests and experiments. -const char kDisableDatagramToRtcpFeebackTranslationFieldTrial[] = - "WebRTC-kDisableDatagramToRtcpFeebackTranslation"; - -} // namespace - -// Maximum packet size of RTCP feedback packet for allocation. We re-create RTCP -// feedback packets when we get ACK notifications from datagram transport. Our -// rtcp feedback packets contain only 1 ACK, so they are much smaller than 1250. -constexpr size_t kMaxRtcpFeedbackPacketSize = 1250; - -DatagramRtpTransport::DatagramRtpTransport( - const std::vector& rtp_header_extensions, - cricket::IceTransportInternal* ice_transport, - DatagramTransportInterface* datagram_transport) - : ice_transport_(ice_transport), - datagram_transport_(datagram_transport), - disable_datagram_to_rtcp_feeback_translation_(field_trial::IsEnabled( - kDisableDatagramToRtcpFeebackTranslationFieldTrial)) { - // Save extension map for parsing RTP packets (we only need transport - // sequence numbers). - const RtpExtension* transport_sequence_number_extension = - RtpExtension::FindHeaderExtensionByUri(rtp_header_extensions, - TransportSequenceNumber::kUri); - - if (transport_sequence_number_extension != nullptr) { - rtp_header_extension_map_.Register( - transport_sequence_number_extension->id); - } else { - RTC_LOG(LS_ERROR) << "Transport sequence numbers are not supported in " - "datagram transport connection"; - } - - // TODO(sukhanov): Add CHECK to make sure that field trial - // WebRTC-ExcludeTransportSequenceNumberFromFecFieldTrial is enabled. - // If feedback loop is translation is enabled, FEC packets must exclude - // transport sequence numbers, otherwise recovered packets will be corrupt. - - RTC_DCHECK(ice_transport_); - RTC_DCHECK(datagram_transport_); - - ice_transport_->SignalNetworkRouteChanged.connect( - this, &DatagramRtpTransport::OnNetworkRouteChanged); - // Subscribe to DatagramTransport to read incoming packets. - datagram_transport_->SetDatagramSink(this); - datagram_transport_->SetTransportStateCallback(this); -} - -DatagramRtpTransport::~DatagramRtpTransport() { - // Unsubscribe from DatagramTransport sinks. - datagram_transport_->SetDatagramSink(nullptr); - datagram_transport_->SetTransportStateCallback(nullptr); -} - -bool DatagramRtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - // Assign and increment datagram_id. - const DatagramId datagram_id = current_datagram_id_++; - - // Send as is (without extracting transport sequence number) for - // RTP packets if we are not doing datagram => RTCP feedback translation. - if (disable_datagram_to_rtcp_feeback_translation_) { - // Even if we are not extracting transport sequence number we need to - // propagate "Sent" notification for both RTP and RTCP packets. For this - // reason we need save options.packet_id in packet map. - sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id); - - return SendDatagram(*packet, datagram_id); - } - - // Parse RTP packet. - RtpPacket rtp_packet(&rtp_header_extension_map_); - // TODO(mellem): Verify that this doesn't mangle something (it shouldn't). - if (!rtp_packet.Parse(*packet)) { - RTC_NOTREACHED() << "Failed to parse outgoing RtpPacket, len=" - << packet->size() - << ", options.packet_id=" << options.packet_id; - return -1; - } - - // Try to get transport sequence number. - uint16_t transport_senquence_number; - if (!rtp_packet.GetExtension( - &transport_senquence_number)) { - // Save packet info without transport sequence number. - sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id); - - RTC_LOG(LS_VERBOSE) - << "Sending rtp packet without transport sequence number, packet=" - << rtp_packet.ToString(); - - return SendDatagram(*packet, datagram_id); - } - - // Save packet info with sequence number and ssrc so we could reconstruct - // RTCP feedback packet when we receive datagram ACK. - sent_rtp_packet_map_[datagram_id] = SentPacketInfo( - options.packet_id, rtp_packet.Ssrc(), transport_senquence_number); - - // Since datagram transport provides feedback and timestamps, we do not need - // to send transport sequence number, so we remove it from RTP packet. Later - // when we get Ack for sent datagram, we will re-create RTCP feedback packet. - if (!rtp_packet.RemoveExtension(TransportSequenceNumber::kId)) { - RTC_NOTREACHED() << "Failed to remove transport sequence number, packet=" - << rtp_packet.ToString(); - return -1; - } - - RTC_LOG(LS_VERBOSE) << "Removed transport_senquence_number=" - << transport_senquence_number - << " from packet=" << rtp_packet.ToString() - << ", saved bytes=" << packet->size() - rtp_packet.size(); - - return SendDatagram( - rtc::ArrayView(rtp_packet.data(), rtp_packet.size()), - datagram_id); -} - -bool DatagramRtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - // Assign and increment datagram_id. - const DatagramId datagram_id = current_datagram_id_++; - - // Even if we are not extracting transport sequence number we need to - // propagate "Sent" notification for both RTP and RTCP packets. For this - // reason we need save options.packet_id in packet map. - sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id); - return SendDatagram(*packet, datagram_id); -} - -bool DatagramRtpTransport::SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) { - return datagram_transport_->SendDatagram(data, datagram_id).ok(); -} - -void DatagramRtpTransport::OnDatagramReceived( - rtc::ArrayView data) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - rtc::ArrayView cdata(reinterpret_cast(data.data()), - data.size()); - if (cricket::InferRtpPacketType(cdata) == cricket::RtpPacketType::kRtcp) { - rtc::CopyOnWriteBuffer buffer(data.data(), data.size()); - SignalRtcpPacketReceived(&buffer, /*packet_time_us=*/-1); - return; - } - - // TODO(sukhanov): I am not filling out time, but on my video quality - // test in WebRTC the time was not set either and higher layers of the stack - // overwrite -1 with current current rtc time. Leaveing comment for now to - // make sure it works as expected. - RtpPacketReceived parsed_packet(&rtp_header_extension_map_); - if (!parsed_packet.Parse(data)) { - RTC_LOG(LS_ERROR) << "Failed to parse incoming RTP packet"; - return; - } - if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) { - RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: " - << RtpDemuxer::DescribePacket(parsed_packet); - } -} - -void DatagramRtpTransport::OnDatagramSent(DatagramId datagram_id) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - // Find packet_id and propagate OnPacketSent notification. - const auto& it = sent_rtp_packet_map_.find(datagram_id); - if (it == sent_rtp_packet_map_.end()) { - RTC_NOTREACHED() << "Did not find sent packet info for sent datagram_id=" - << datagram_id; - return; - } - - // Also see how DatagramRtpTransport::OnSentPacket handles OnSentPacket - // notification from ICE in bypass mode. - rtc::SentPacket sent_packet(/*packet_id=*/it->second.packet_id, - rtc::TimeMillis()); - - SignalSentPacket(sent_packet); -} - -bool DatagramRtpTransport::GetAndRemoveSentPacketInfo( - DatagramId datagram_id, - SentPacketInfo* sent_packet_info) { - RTC_CHECK(sent_packet_info != nullptr); - - const auto& it = sent_rtp_packet_map_.find(datagram_id); - if (it == sent_rtp_packet_map_.end()) { - return false; - } - - *sent_packet_info = it->second; - sent_rtp_packet_map_.erase(it); - return true; -} - -void DatagramRtpTransport::OnDatagramAcked(const DatagramAck& ack) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - SentPacketInfo sent_packet_info; - if (!GetAndRemoveSentPacketInfo(ack.datagram_id, &sent_packet_info)) { - // TODO(sukhanov): If OnDatagramAck() can come after OnDatagramLost(), - // datagram_id is already deleted and we may need to relax the CHECK below. - // It's probably OK to ignore such datagrams, because it's been a few RTTs - // anyway since they were sent. - RTC_NOTREACHED() << "Did not find sent packet info for datagram_id=" - << ack.datagram_id; - return; - } - - RTC_LOG(LS_VERBOSE) << "Datagram acked, ack.datagram_id=" << ack.datagram_id - << ", sent_packet_info.packet_id=" - << sent_packet_info.packet_id - << ", sent_packet_info.transport_sequence_number=" - << sent_packet_info.transport_sequence_number.value_or(-1) - << ", sent_packet_info.ssrc=" - << sent_packet_info.ssrc.value_or(-1) - << ", receive_timestamp_ms=" - << ack.receive_timestamp.ms(); - - // If transport sequence number was not present in RTP packet, we do not need - // to propagate RTCP feedback. - if (!sent_packet_info.transport_sequence_number) { - return; - } - - // TODO(sukhanov): We noticed that datagram transport implementations can - // return zero timestamps in the middle of the call. This is workaround to - // avoid propagating zero timestamps, but we need to understand why we have - // them in the first place. - int64_t receive_timestamp_us = ack.receive_timestamp.us(); - - if (receive_timestamp_us == 0) { - receive_timestamp_us = previous_nonzero_timestamp_us_; - } else { - previous_nonzero_timestamp_us_ = receive_timestamp_us; - } - - // Ssrc must be provided in packet info if transport sequence number is set, - // which is guaranteed by SentPacketInfo constructor. - RTC_CHECK(sent_packet_info.ssrc); - - // Recreate RTCP feedback packet. - rtcp::TransportFeedback feedback_packet; - feedback_packet.SetMediaSsrc(*sent_packet_info.ssrc); - - const uint16_t transport_sequence_number = - sent_packet_info.transport_sequence_number.value(); - - feedback_packet.SetBase(transport_sequence_number, receive_timestamp_us); - feedback_packet.AddReceivedPacket(transport_sequence_number, - receive_timestamp_us); - - rtc::CopyOnWriteBuffer buffer(kMaxRtcpFeedbackPacketSize); - size_t index = 0; - if (!feedback_packet.Create(buffer.data(), &index, buffer.capacity(), - nullptr)) { - RTC_NOTREACHED() << "Failed to create RTCP feedback packet"; - return; - } - - RTC_CHECK_GT(index, 0); - RTC_CHECK_LE(index, kMaxRtcpFeedbackPacketSize); - - // Propagage created RTCP packet as normal incoming packet. - buffer.SetSize(index); - SignalRtcpPacketReceived(&buffer, /*packet_time_us=*/-1); -} - -void DatagramRtpTransport::OnDatagramLost(DatagramId datagram_id) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - RTC_LOG(LS_INFO) << "Datagram lost, datagram_id=" << datagram_id; - - SentPacketInfo sent_packet_info; - if (!GetAndRemoveSentPacketInfo(datagram_id, &sent_packet_info)) { - RTC_NOTREACHED() << "Did not find sent packet info for lost datagram_id=" - << datagram_id; - } -} - -void DatagramRtpTransport::OnStateChanged(MediaTransportState state) { - state_ = state; - SignalWritableState(state_ == MediaTransportState::kWritable); - if (state_ == MediaTransportState::kWritable) { - SignalReadyToSend(true); - } -} - -const std::string& DatagramRtpTransport::transport_name() const { - return ice_transport_->transport_name(); -} - -int DatagramRtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) { - return ice_transport_->SetOption(opt, value); -} - -int DatagramRtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) { - return -1; -} - -bool DatagramRtpTransport::IsReadyToSend() const { - return state_ == MediaTransportState::kWritable; -} - -bool DatagramRtpTransport::IsWritable(bool /*rtcp*/) const { - return state_ == MediaTransportState::kWritable; -} - -void DatagramRtpTransport::UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) { - rtp_header_extension_map_ = RtpHeaderExtensionMap(header_extensions); -} - -bool DatagramRtpTransport::RegisterRtpDemuxerSink( - const RtpDemuxerCriteria& criteria, - RtpPacketSinkInterface* sink) { - rtp_demuxer_.RemoveSink(sink); - return rtp_demuxer_.AddSink(criteria, sink); -} - -bool DatagramRtpTransport::UnregisterRtpDemuxerSink( - RtpPacketSinkInterface* sink) { - return rtp_demuxer_.RemoveSink(sink); -} - -void DatagramRtpTransport::OnNetworkRouteChanged( - absl::optional network_route) { - RTC_DCHECK_RUN_ON(&thread_checker_); - SignalNetworkRouteChanged(network_route); -} - -} // namespace webrtc diff --git a/pc/datagram_rtp_transport.h b/pc/datagram_rtp_transport.h deleted file mode 100644 index f9684c69c0..0000000000 --- a/pc/datagram_rtp_transport.h +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_DATAGRAM_RTP_TRANSPORT_H_ -#define PC_DATAGRAM_RTP_TRANSPORT_H_ - -#include -#include -#include -#include - -#include "api/crypto/crypto_options.h" -#include "api/transport/datagram_transport_interface.h" -#include "api/transport/media/media_transport_interface.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "p2p/base/ice_transport_internal.h" -#include "p2p/base/packet_transport_internal.h" -#include "pc/rtp_transport_internal.h" -#include "rtc_base/buffer.h" -#include "rtc_base/buffer_queue.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/stream.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/thread_checker.h" - -namespace webrtc { - -constexpr int kDatagramDtlsAdaptorComponent = -1; - -// RTP transport which uses the DatagramTransportInterface to send and receive -// packets. -class DatagramRtpTransport : public RtpTransportInternal, - public webrtc::DatagramSinkInterface, - public webrtc::MediaTransportStateCallback { - public: - DatagramRtpTransport( - const std::vector& rtp_header_extensions, - cricket::IceTransportInternal* ice_transport, - DatagramTransportInterface* datagram_transport); - - ~DatagramRtpTransport() override; - - // ===================================================== - // Overrides for webrtc::DatagramTransportSinkInterface - // and MediaTransportStateCallback - // ===================================================== - void OnDatagramReceived(rtc::ArrayView data) override; - - void OnDatagramSent(webrtc::DatagramId datagram_id) override; - - void OnDatagramAcked(const webrtc::DatagramAck& ack) override; - - void OnDatagramLost(webrtc::DatagramId datagram_id) override; - - void OnStateChanged(webrtc::MediaTransportState state) override; - - // ===================================================== - // RtpTransportInternal overrides - // ===================================================== - bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) override; - - bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) override; - - const std::string& transport_name() const override; - - // Datagram transport always muxes RTCP. - bool rtcp_mux_enabled() const override { return true; } - void SetRtcpMuxEnabled(bool enable) override {} - - int SetRtpOption(rtc::Socket::Option opt, int value) override; - int SetRtcpOption(rtc::Socket::Option opt, int value) override; - - bool IsReadyToSend() const override; - - bool IsWritable(bool rtcp) const override; - - bool IsSrtpActive() const override { return false; } - - void UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) override; - - bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria, - RtpPacketSinkInterface* sink) override; - - bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override; - - private: - // RTP/RTCP packet info stored for each sent packet. - struct SentPacketInfo { - // RTP packet info with ssrc and transport sequence number. - SentPacketInfo(int64_t packet_id, - uint32_t ssrc, - uint16_t transport_sequence_number) - : ssrc(ssrc), - transport_sequence_number(transport_sequence_number), - packet_id(packet_id) {} - - // Packet info without SSRC and transport sequence number used for RTCP - // packets, RTP packets when transport sequence number is not provided or - // when feedback translation is disabled. - explicit SentPacketInfo(int64_t packet_id) : packet_id(packet_id) {} - - SentPacketInfo() = default; - - absl::optional ssrc; - - // Transport sequence number (if it was provided in outgoing RTP packet). - // It is used to re-create RTCP feedback packets from datagram ACKs. - absl::optional transport_sequence_number; - - // Packet id from rtc::PacketOptions. It is required to propagage sent - // notification up the stack (SignalSentPacket). - int64_t packet_id = 0; - }; - - // Finds SentPacketInfo for given |datagram_id| and removes map entry. - // Returns false if entry was not found. - bool GetAndRemoveSentPacketInfo(webrtc::DatagramId datagram_id, - SentPacketInfo* sent_packet_info); - - // Sends datagram to datagram_transport. - bool SendDatagram(rtc::ArrayView data, - webrtc::DatagramId datagram_id); - - // Propagates network route changes from ICE. - void OnNetworkRouteChanged(absl::optional network_route); - - rtc::ThreadChecker thread_checker_; - cricket::IceTransportInternal* ice_transport_; - webrtc::DatagramTransportInterface* datagram_transport_; - - RtpDemuxer rtp_demuxer_; - - MediaTransportState state_ = MediaTransportState::kPending; - - // Extension map for parsing transport sequence numbers. - webrtc::RtpHeaderExtensionMap rtp_header_extension_map_; - - // Keeps information about sent RTP packet until they are Acked or Lost. - std::map sent_rtp_packet_map_; - - // Current datagram_id, incremented after each sent RTP packets. - // Datagram id is passed to datagram transport when we send datagram and we - // get it back in notifications about Sent, Acked and Lost datagrams. - int64_t current_datagram_id_ = 0; - - // TODO(sukhanov): Previous nonzero timestamp is required for workaround for - // zero timestamps received, which sometimes are received from datagram - // transport. Investigate if we can eliminate zero timestamps. - int64_t previous_nonzero_timestamp_us_ = 0; - - // Disable datagram to RTCP feedback translation and enable RTCP feedback - // loop (note that having both RTCP and datagram feedback loops is - // inefficient, but can be useful in tests and experiments). - const bool disable_datagram_to_rtcp_feeback_translation_; -}; - -} // namespace webrtc - -#endif // PC_DATAGRAM_RTP_TRANSPORT_H_ diff --git a/pc/dtls_srtp_transport_unittest.cc b/pc/dtls_srtp_transport_unittest.cc index 770c140ce7..6952159a01 100644 --- a/pc/dtls_srtp_transport_unittest.cc +++ b/pc/dtls_srtp_transport_unittest.cc @@ -97,11 +97,11 @@ class DtlsSrtpTransportTest : public ::testing::Test, void CompleteDtlsHandshake(FakeDtlsTransport* fake_dtls1, FakeDtlsTransport* fake_dtls2) { - auto cert1 = rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert1 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); fake_dtls1->SetLocalCertificate(cert1); - auto cert2 = rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert2 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); fake_dtls2->SetLocalCertificate(cert2); fake_dtls1->SetDestination(fake_dtls2); } diff --git a/pc/dtls_transport.cc b/pc/dtls_transport.cc index 8abfcae53f..550ede790d 100644 --- a/pc/dtls_transport.cc +++ b/pc/dtls_transport.cc @@ -22,20 +22,16 @@ DtlsTransportState TranslateState(cricket::DtlsTransportState internal_state) { switch (internal_state) { case cricket::DTLS_TRANSPORT_NEW: return DtlsTransportState::kNew; - break; case cricket::DTLS_TRANSPORT_CONNECTING: return DtlsTransportState::kConnecting; - break; case cricket::DTLS_TRANSPORT_CONNECTED: return DtlsTransportState::kConnected; - break; case cricket::DTLS_TRANSPORT_CLOSED: return DtlsTransportState::kClosed; - break; case cricket::DTLS_TRANSPORT_FAILED: return DtlsTransportState::kFailed; - break; } + RTC_CHECK_NOTREACHED(); } } // namespace @@ -61,7 +57,7 @@ DtlsTransport::~DtlsTransport() { } DtlsTransportInformation DtlsTransport::Information() { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); return info_; } @@ -90,7 +86,7 @@ void DtlsTransport::Clear() { // into DtlsTransport, so we can't hold the lock while releasing. std::unique_ptr transport_to_release; { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); transport_to_release = std::move(internal_dtls_transport_); ice_transport_->Clear(); } @@ -114,7 +110,7 @@ void DtlsTransport::OnInternalDtlsState( void DtlsTransport::UpdateInformation() { RTC_DCHECK_RUN_ON(owner_thread_); - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); if (internal_dtls_transport_) { if (internal_dtls_transport_->dtls_state() == cricket::DTLS_TRANSPORT_CONNECTED) { diff --git a/pc/dtls_transport.h b/pc/dtls_transport.h index b5caae5212..ff8108ca90 100644 --- a/pc/dtls_transport.h +++ b/pc/dtls_transport.h @@ -17,6 +17,7 @@ #include "api/ice_transport_interface.h" #include "api/scoped_refptr.h" #include "p2p/base/dtls_transport.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -42,12 +43,12 @@ class DtlsTransport : public DtlsTransportInterface, void Clear(); cricket::DtlsTransportInternal* internal() { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); return internal_dtls_transport_.get(); } const cricket::DtlsTransportInternal* internal() const { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); return internal_dtls_transport_.get(); } @@ -61,7 +62,7 @@ class DtlsTransport : public DtlsTransportInterface, DtlsTransportObserverInterface* observer_ = nullptr; rtc::Thread* owner_thread_; - rtc::CriticalSection lock_; + mutable Mutex lock_; DtlsTransportInformation info_ RTC_GUARDED_BY(lock_); std::unique_ptr internal_dtls_transport_ RTC_GUARDED_BY(lock_); diff --git a/pc/dtls_transport_unittest.cc b/pc/dtls_transport_unittest.cc index f7d7a88d1e..a3f0a7ce8b 100644 --- a/pc/dtls_transport_unittest.cc +++ b/pc/dtls_transport_unittest.cc @@ -70,11 +70,11 @@ class DtlsTransportTest : public ::testing::Test { auto fake_dtls1 = static_cast(transport_->internal()); auto fake_dtls2 = std::make_unique( "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); - auto cert1 = rtc::RTCCertificate::Create(absl::WrapUnique( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert1 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); fake_dtls1->SetLocalCertificate(cert1); - auto cert2 = rtc::RTCCertificate::Create(absl::WrapUnique( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert2 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); fake_dtls2->SetLocalCertificate(cert2); fake_dtls1->SetDestination(fake_dtls2.get()); } diff --git a/pc/ice_transport.h b/pc/ice_transport.h index 69b69e41d8..c1529de6b7 100644 --- a/pc/ice_transport.h +++ b/pc/ice_transport.h @@ -12,7 +12,6 @@ #define PC_ICE_TRANSPORT_H_ #include "api/ice_transport_interface.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/thread.h" #include "rtc_base/thread_checker.h" @@ -29,6 +28,10 @@ class IceTransportWithPointer : public IceTransportInterface { RTC_DCHECK(internal_); } + IceTransportWithPointer() = delete; + IceTransportWithPointer(const IceTransportWithPointer&) = delete; + IceTransportWithPointer& operator=(const IceTransportWithPointer&) = delete; + cricket::IceTransportInternal* internal() override; // This call will ensure that the pointer passed at construction is // no longer in use by this object. Later calls to internal() will return @@ -39,7 +42,6 @@ class IceTransportWithPointer : public IceTransportInterface { ~IceTransportWithPointer() override; private: - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(IceTransportWithPointer); const rtc::Thread* creator_thread_; cricket::IceTransportInternal* internal_ RTC_GUARDED_BY(creator_thread_); }; diff --git a/pc/jsep_transport.cc b/pc/jsep_transport.cc index bc380402b1..2f7615ab3b 100644 --- a/pc/jsep_transport.cc +++ b/pc/jsep_transport.cc @@ -38,16 +38,12 @@ JsepTransportDescription::JsepTransportDescription( const std::vector& cryptos, const std::vector& encrypted_header_extension_ids, int rtp_abs_sendtime_extn_id, - const TransportDescription& transport_desc, - absl::optional media_alt_protocol, - absl::optional data_alt_protocol) + const TransportDescription& transport_desc) : rtcp_mux_enabled(rtcp_mux_enabled), cryptos(cryptos), encrypted_header_extension_ids(encrypted_header_extension_ids), rtp_abs_sendtime_extn_id(rtp_abs_sendtime_extn_id), - transport_desc(transport_desc), - media_alt_protocol(media_alt_protocol), - data_alt_protocol(data_alt_protocol) {} + transport_desc(transport_desc) {} JsepTransportDescription::JsepTransportDescription( const JsepTransportDescription& from) @@ -55,9 +51,7 @@ JsepTransportDescription::JsepTransportDescription( cryptos(from.cryptos), encrypted_header_extension_ids(from.encrypted_header_extension_ids), rtp_abs_sendtime_extn_id(from.rtp_abs_sendtime_extn_id), - transport_desc(from.transport_desc), - media_alt_protocol(from.media_alt_protocol), - data_alt_protocol(from.data_alt_protocol) {} + transport_desc(from.transport_desc) {} JsepTransportDescription::~JsepTransportDescription() = default; @@ -71,8 +65,6 @@ JsepTransportDescription& JsepTransportDescription::operator=( encrypted_header_extension_ids = from.encrypted_header_extension_ids; rtp_abs_sendtime_extn_id = from.rtp_abs_sendtime_extn_id; transport_desc = from.transport_desc; - media_alt_protocol = from.media_alt_protocol; - data_alt_protocol = from.data_alt_protocol; return *this; } @@ -88,9 +80,7 @@ JsepTransport::JsepTransport( std::unique_ptr datagram_rtp_transport, std::unique_ptr rtp_dtls_transport, std::unique_ptr rtcp_dtls_transport, - std::unique_ptr sctp_transport, - std::unique_ptr datagram_transport, - webrtc::DataChannelTransportInterface* data_channel_transport) + std::unique_ptr sctp_transport) : network_thread_(rtc::Thread::Current()), mid_(mid), local_certificate_(local_certificate), @@ -115,10 +105,7 @@ JsepTransport::JsepTransport( sctp_transport_(sctp_transport ? new rtc::RefCountedObject( std::move(sctp_transport)) - : nullptr), - datagram_transport_(std::move(datagram_transport)), - datagram_rtp_transport_(std::move(datagram_rtp_transport)), - data_channel_transport_(data_channel_transport) { + : nullptr) { RTC_DCHECK(ice_transport_); RTC_DCHECK(rtp_dtls_transport_); // |rtcp_ice_transport_| must be present iff |rtcp_dtls_transport_| is @@ -147,13 +134,6 @@ JsepTransport::JsepTransport( std::vector{ datagram_rtp_transport_.get(), default_rtp_transport()}); } - - if (data_channel_transport_ && sctp_data_channel_transport_) { - composite_data_channel_transport_ = - std::make_unique( - std::vector{ - data_channel_transport_, sctp_data_channel_transport_.get()}); - } } JsepTransport::~JsepTransport() { @@ -178,16 +158,15 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( RTC_DCHECK_RUN_ON(network_thread_); - webrtc::RTCErrorOr ice_parameters_result = - IceParameters::Parse(jsep_description.transport_desc.ice_ufrag, - jsep_description.transport_desc.ice_pwd); + IceParameters ice_parameters = + jsep_description.transport_desc.GetIceParameters(); + webrtc::RTCError ice_parameters_result = ice_parameters.Validate(); if (!ice_parameters_result.ok()) { rtc::StringBuilder sb; - sb << "Invalid ICE parameters: " << ice_parameters_result.error().message(); + sb << "Invalid ICE parameters: " << ice_parameters_result.message(); return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, sb.Release()); } - IceParameters ice_parameters = ice_parameters_result.MoveValue(); if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type, ContentSource::CS_LOCAL)) { @@ -248,7 +227,6 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( // If PRANSWER/ANSWER is set, we should decide transport protocol type. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { error = NegotiateAndSetDtlsParameters(type); - NegotiateDatagramTransport(type); } if (!error.ok()) { local_description_.reset(); @@ -273,17 +251,16 @@ webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription( RTC_DCHECK_RUN_ON(network_thread_); - webrtc::RTCErrorOr ice_parameters_result = - IceParameters::Parse(jsep_description.transport_desc.ice_ufrag, - jsep_description.transport_desc.ice_pwd); + IceParameters ice_parameters = + jsep_description.transport_desc.GetIceParameters(); + webrtc::RTCError ice_parameters_result = ice_parameters.Validate(); if (!ice_parameters_result.ok()) { remote_description_.reset(); rtc::StringBuilder sb; - sb << "Invalid ICE parameters: " << ice_parameters_result.error().message(); + sb << "Invalid ICE parameters: " << ice_parameters_result.message(); return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, sb.Release()); } - IceParameters ice_parameters = ice_parameters_result.MoveValue(); if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type, ContentSource::CS_REMOTE)) { @@ -327,7 +304,6 @@ webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription( // If PRANSWER/ANSWER is set, we should decide transport protocol type. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { error = NegotiateAndSetDtlsParameters(SdpType::kOffer); - NegotiateDatagramTransport(type); } if (!error.ok()) { remote_description_.reset(); @@ -385,18 +361,6 @@ absl::optional JsepTransport::GetDtlsRole() const { return absl::optional(dtls_role); } -absl::optional -JsepTransport::GetTransportParameters() const { - rtc::CritScope scope(&accessor_lock_); - if (!datagram_transport()) { - return absl::nullopt; - } - - OpaqueTransportParameters params; - params.parameters = datagram_transport()->GetTransportParameters(); - return params; -} - bool JsepTransport::GetStats(TransportStats* stats) { RTC_DCHECK_RUN_ON(network_thread_); rtc::CritScope scope(&accessor_lock_); @@ -464,7 +428,6 @@ webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters( DtlsTransportInternal* dtls_transport, absl::optional dtls_role, rtc::SSLFingerprint* remote_fingerprint) { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(dtls_transport); // Set SSL role. Role must be set before fingerprint is applied, which // initiates DTLS setup. @@ -537,7 +500,7 @@ void JsepTransport::ActivateRtcpMux() { RTC_DCHECK(dtls_srtp_transport_); RTC_DCHECK(!unencrypted_rtp_transport_); RTC_DCHECK(!sdes_transport_); - dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport(), + dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport_locked(), /*rtcp_dtls_transport=*/nullptr); } rtcp_dtls_transport_ = nullptr; // Destroy this reference. @@ -551,7 +514,6 @@ bool JsepTransport::SetSdes(const std::vector& cryptos, webrtc::SdpType type, ContentSource source) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::CritScope scope(&accessor_lock_); bool ret = false; ret = sdes_negotiator_.Process(cryptos, type, source); if (!ret) { @@ -736,7 +698,6 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, TransportStats* stats) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::CritScope scope(&accessor_lock_); RTC_DCHECK(dtls_transport); TransportChannelStats substats; if (rtcp_dtls_transport_) { @@ -758,106 +719,4 @@ bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, return true; } -void JsepTransport::NegotiateDatagramTransport(SdpType type) { - RTC_DCHECK(type == SdpType::kAnswer || type == SdpType::kPrAnswer); - rtc::CritScope lock(&accessor_lock_); - if (!datagram_transport_) { - return; // No need to negotiate the use of datagram transport. - } - - bool compatible_datagram_transport = false; - if (datagram_transport_ && - local_description_->transport_desc.opaque_parameters && - remote_description_->transport_desc.opaque_parameters) { - // If both descriptions have datagram transport parameters, and the remote - // parameters are accepted by the datagram transport, then use the datagram - // transport. Otherwise, fall back to RTP. - compatible_datagram_transport = - datagram_transport_ - ->SetRemoteTransportParameters(remote_description_->transport_desc - .opaque_parameters->parameters) - .ok(); - } - - bool use_datagram_transport_for_media = - compatible_datagram_transport && - remote_description_->media_alt_protocol == - remote_description_->transport_desc.opaque_parameters->protocol && - remote_description_->media_alt_protocol == - local_description_->media_alt_protocol; - - bool use_datagram_transport_for_data = - compatible_datagram_transport && - remote_description_->data_alt_protocol == - remote_description_->transport_desc.opaque_parameters->protocol && - remote_description_->data_alt_protocol == - local_description_->data_alt_protocol; - - RTC_LOG(LS_INFO) - << "Negotiating datagram transport, use_datagram_transport_for_media=" - << use_datagram_transport_for_media - << ", use_datagram_transport_for_data=" << use_datagram_transport_for_data - << " answer type=" << (type == SdpType::kAnswer ? "answer" : "pr_answer"); - - // A provisional or full or answer lets the peer start sending on one of the - // transports. - if (composite_rtp_transport_) { - composite_rtp_transport_->SetSendTransport( - use_datagram_transport_for_media ? datagram_rtp_transport_.get() - : default_rtp_transport()); - } - if (composite_data_channel_transport_) { - composite_data_channel_transport_->SetSendTransport( - use_datagram_transport_for_data ? data_channel_transport_ - : sctp_data_channel_transport_.get()); - } - - if (type != SdpType::kAnswer) { - return; - } - - if (composite_rtp_transport_) { - if (use_datagram_transport_for_media) { - // Negotiated use of datagram transport for RTP, so remove the - // non-datagram RTP transport. - composite_rtp_transport_->RemoveTransport(default_rtp_transport()); - if (unencrypted_rtp_transport_) { - unencrypted_rtp_transport_ = nullptr; - } else if (sdes_transport_) { - sdes_transport_ = nullptr; - } else { - dtls_srtp_transport_ = nullptr; - } - } else { - composite_rtp_transport_->RemoveTransport(datagram_rtp_transport_.get()); - datagram_rtp_transport_ = nullptr; - } - } - - if (composite_data_channel_transport_) { - if (use_datagram_transport_for_data) { - // Negotiated use of datagram transport for data channels, so remove the - // non-datagram data channel transport. - composite_data_channel_transport_->RemoveTransport( - sctp_data_channel_transport_.get()); - sctp_data_channel_transport_ = nullptr; - sctp_transport_ = nullptr; - } else { - composite_data_channel_transport_->RemoveTransport( - data_channel_transport_); - data_channel_transport_ = nullptr; - } - } else if (data_channel_transport_ && !use_datagram_transport_for_data) { - // The datagram transport has been rejected without a fallback. We still - // need to inform the application and delete it. - SignalDataChannelTransportNegotiated(this, nullptr); - data_channel_transport_ = nullptr; - } - - if (!use_datagram_transport_for_media && !use_datagram_transport_for_data) { - // Datagram transport is not being used for anything, so clean it up. - datagram_transport_ = nullptr; - } -} - } // namespace cricket diff --git a/pc/jsep_transport.h b/pc/jsep_transport.h index 6d88deff07..11c8168d9e 100644 --- a/pc/jsep_transport.h +++ b/pc/jsep_transport.h @@ -20,12 +20,11 @@ #include "api/candidate.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" -#include "api/transport/datagram_transport_interface.h" +#include "api/transport/data_channel_transport_interface.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/dtls_transport.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/transport_info.h" -#include "pc/composite_data_channel_transport.h" #include "pc/composite_rtp_transport.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" @@ -54,9 +53,7 @@ struct JsepTransportDescription { const std::vector& cryptos, const std::vector& encrypted_header_extension_ids, int rtp_abs_sendtime_extn_id, - const TransportDescription& transport_description, - absl::optional media_alt_protocol, - absl::optional data_alt_protocol); + const TransportDescription& transport_description); JsepTransportDescription(const JsepTransportDescription& from); ~JsepTransportDescription(); @@ -69,14 +66,6 @@ struct JsepTransportDescription { // TODO(zhihuang): Add the ICE and DTLS related variables and methods from // TransportDescription and remove this extra layer of abstraction. TransportDescription transport_desc; - - // Alt-protocols that apply to this JsepTransport. Presence indicates a - // request to use an alternative protocol for media and/or data. The - // alt-protocol is handled by a datagram transport. If one or both of these - // values are present, JsepTransport will attempt to negotiate use of the - // datagram transport for media and/or data. - absl::optional media_alt_protocol; - absl::optional data_alt_protocol; }; // Helper class used by JsepTransportController that processes @@ -103,9 +92,7 @@ class JsepTransport : public sigslot::has_slots<> { std::unique_ptr datagram_rtp_transport, std::unique_ptr rtp_dtls_transport, std::unique_ptr rtcp_dtls_transport, - std::unique_ptr sctp_transport, - std::unique_ptr datagram_transport, - webrtc::DataChannelTransportInterface* data_channel_transport); + std::unique_ptr sctp_transport); ~JsepTransport() override; @@ -128,14 +115,15 @@ class JsepTransport : public sigslot::has_slots<> { webrtc::RTCError SetLocalJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type); + webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_); // Set the remote TransportDescription to be used by DTLS and ICE channels // that are part of this Transport. webrtc::RTCError SetRemoteJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type); - webrtc::RTCError AddRemoteCandidates(const Candidates& candidates); + webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_); + webrtc::RTCError AddRemoteCandidates(const Candidates& candidates) + RTC_LOCKS_EXCLUDED(accessor_lock_); // Set the "needs-ice-restart" flag as described in JSEP. After the flag is // set, offers should generate new ufrags/passwords until an ICE restart @@ -143,23 +131,22 @@ class JsepTransport : public sigslot::has_slots<> { // // This and the below method can be called safely from any thread as long as // SetXTransportDescription is not in progress. - void SetNeedsIceRestartFlag(); + void SetNeedsIceRestartFlag() RTC_LOCKS_EXCLUDED(accessor_lock_); // Returns true if the ICE restart flag above was set, and no ICE restart has // occurred yet for this transport (by applying a local description with // changed ufrag/password). - bool needs_ice_restart() const { + bool needs_ice_restart() const RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); return needs_ice_restart_; } // Returns role if negotiated, or empty absl::optional if it hasn't been // negotiated yet. - absl::optional GetDtlsRole() const; - - absl::optional GetTransportParameters() const; + absl::optional GetDtlsRole() const + RTC_LOCKS_EXCLUDED(accessor_lock_); // TODO(deadbeef): Make this const. See comment in transportcontroller.h. - bool GetStats(TransportStats* stats); + bool GetStats(TransportStats* stats) RTC_LOCKS_EXCLUDED(accessor_lock_); const JsepTransportDescription* local_description() const { RTC_DCHECK_RUN_ON(network_thread_); @@ -171,7 +158,8 @@ class JsepTransport : public sigslot::has_slots<> { return remote_description_.get(); } - webrtc::RtpTransportInternal* rtp_transport() const { + webrtc::RtpTransportInternal* rtp_transport() const + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); if (composite_rtp_transport_) { return composite_rtp_transport_.get(); @@ -182,7 +170,8 @@ class JsepTransport : public sigslot::has_slots<> { } } - const DtlsTransportInternal* rtp_dtls_transport() const { + const DtlsTransportInternal* rtp_dtls_transport() const + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); if (rtp_dtls_transport_) { return rtp_dtls_transport_->internal(); @@ -191,16 +180,14 @@ class JsepTransport : public sigslot::has_slots<> { } } - DtlsTransportInternal* rtp_dtls_transport() { + DtlsTransportInternal* rtp_dtls_transport() + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); - if (rtp_dtls_transport_) { - return rtp_dtls_transport_->internal(); - } else { - return nullptr; - } + return rtp_dtls_transport_locked(); } - const DtlsTransportInternal* rtcp_dtls_transport() const { + const DtlsTransportInternal* rtcp_dtls_transport() const + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); if (rtcp_dtls_transport_) { return rtcp_dtls_transport_->internal(); @@ -209,7 +196,8 @@ class JsepTransport : public sigslot::has_slots<> { } } - DtlsTransportInternal* rtcp_dtls_transport() { + DtlsTransportInternal* rtcp_dtls_transport() + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); if (rtcp_dtls_transport_) { return rtcp_dtls_transport_->internal(); @@ -218,30 +206,27 @@ class JsepTransport : public sigslot::has_slots<> { } } - rtc::scoped_refptr RtpDtlsTransport() { + rtc::scoped_refptr RtpDtlsTransport() + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); return rtp_dtls_transport_; } - rtc::scoped_refptr SctpTransport() const { + rtc::scoped_refptr SctpTransport() const + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); return sctp_transport_; } - webrtc::DataChannelTransportInterface* data_channel_transport() const { + // TODO(bugs.webrtc.org/9719): Delete method, update callers to use + // SctpTransport() instead. + webrtc::DataChannelTransportInterface* data_channel_transport() const + RTC_LOCKS_EXCLUDED(accessor_lock_) { rtc::CritScope scope(&accessor_lock_); - if (composite_data_channel_transport_) { - return composite_data_channel_transport_.get(); - } else if (sctp_data_channel_transport_) { + if (sctp_data_channel_transport_) { return sctp_data_channel_transport_.get(); } - return data_channel_transport_; - } - - // Returns datagram transport, if available. - webrtc::DatagramTransportInterface* datagram_transport() const { - rtc::CritScope scope(&accessor_lock_); - return datagram_transport_.get(); + return nullptr; } // This is signaled when RTCP-mux becomes active and @@ -249,15 +234,6 @@ class JsepTransport : public sigslot::has_slots<> { // handle the signal and update the aggregate transport states. sigslot::signal<> SignalRtcpMuxActive; - // Signals that a data channel transport was negotiated and may be used to - // send data. The first parameter is |this|. The second parameter is the - // transport that was negotiated, or null if negotiation rejected the data - // channel transport. The third parameter (bool) indicates whether the - // negotiation was provisional or final. If true, it is provisional, if - // false, it is final. - sigslot::signal2 - SignalDataChannelTransportNegotiated; - // TODO(deadbeef): The methods below are only public for testing. Should make // them utility functions or objects so they can be tested independently from // this class. @@ -271,6 +247,15 @@ class JsepTransport : public sigslot::has_slots<> { void SetActiveResetSrtpParams(bool active_reset_srtp_params); private: + DtlsTransportInternal* rtp_dtls_transport_locked() + RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_) { + if (rtp_dtls_transport_) { + return rtp_dtls_transport_->internal(); + } else { + return nullptr; + } + } + bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source); void ActivateRtcpMux(); @@ -278,7 +263,8 @@ class JsepTransport : public sigslot::has_slots<> { bool SetSdes(const std::vector& cryptos, const std::vector& encrypted_extension_ids, webrtc::SdpType type, - ContentSource source); + ContentSource source) + RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_); // Negotiates and sets the DTLS parameters based on the current local and // remote transport description, such as the DTLS role to use, and whether @@ -295,26 +281,22 @@ class JsepTransport : public sigslot::has_slots<> { webrtc::SdpType local_description_type, ConnectionRole local_connection_role, ConnectionRole remote_connection_role, - absl::optional* negotiated_dtls_role); + absl::optional* negotiated_dtls_role) + RTC_LOCKS_EXCLUDED(accessor_lock_); // Pushes down the ICE parameters from the remote description. void SetRemoteIceParameters(const IceParameters& ice_parameters, IceTransportInternal* ice); // Pushes down the DTLS parameters obtained via negotiation. - webrtc::RTCError SetNegotiatedDtlsParameters( + static webrtc::RTCError SetNegotiatedDtlsParameters( DtlsTransportInternal* dtls_transport, absl::optional dtls_role, rtc::SSLFingerprint* remote_fingerprint); bool GetTransportStats(DtlsTransportInternal* dtls_transport, - TransportStats* stats); - - // Deactivates, signals removal, and deletes |composite_rtp_transport_| if the - // current state of negotiation is sufficient to determine which rtp_transport - // and data channel transport to use. - void NegotiateDatagramTransport(webrtc::SdpType type) - RTC_RUN_ON(network_thread_); + TransportStats* stats) + RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_); // Returns the default (non-datagram) rtp transport, if any. webrtc::RtpTransportInternal* default_rtp_transport() const @@ -334,7 +316,7 @@ class JsepTransport : public sigslot::has_slots<> { const rtc::Thread* const network_thread_; // Critical scope for fields accessed off-thread // TODO(https://bugs.webrtc.org/10300): Stop doing this. - rtc::CriticalSection accessor_lock_; + rtc::RecursiveCriticalSection accessor_lock_; const std::string mid_; // needs-ice-restart bit as described in JSEP. bool needs_ice_restart_ RTC_GUARDED_BY(accessor_lock_) = false; @@ -387,22 +369,9 @@ class JsepTransport : public sigslot::has_slots<> { absl::optional> recv_extension_ids_ RTC_GUARDED_BY(network_thread_); - // Optional datagram transport (experimental). - std::unique_ptr datagram_transport_ - RTC_GUARDED_BY(accessor_lock_); - std::unique_ptr datagram_rtp_transport_ RTC_GUARDED_BY(accessor_lock_); - // Non-SCTP data channel transport. Set to |datagram_transport_| if that - // transport should be used for data chanels. Unset otherwise. - webrtc::DataChannelTransportInterface* data_channel_transport_ - RTC_GUARDED_BY(accessor_lock_) = nullptr; - - // Composite data channel transport, used during negotiation. - std::unique_ptr - composite_data_channel_transport_ RTC_GUARDED_BY(accessor_lock_); - RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransport); }; diff --git a/pc/jsep_transport_controller.cc b/pc/jsep_transport_controller.cc index bc7000f451..6ec305297e 100644 --- a/pc/jsep_transport_controller.cc +++ b/pc/jsep_transport_controller.cc @@ -15,11 +15,8 @@ #include "absl/algorithm/container.h" #include "api/ice_transport_factory.h" -#include "api/transport/datagram_transport_interface.h" -#include "api/transport/media/media_transport_interface.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/port.h" -#include "pc/datagram_rtp_transport.h" #include "pc/srtp_filter.h" #include "rtc_base/bind.h" #include "rtc_base/checks.h" @@ -140,26 +137,6 @@ RtpTransportInternal* JsepTransportController::GetRtpTransport( return jsep_transport->rtp_transport(); } -MediaTransportConfig JsepTransportController::GetMediaTransportConfig( - const std::string& mid) const { - auto jsep_transport = GetJsepTransportForMid(mid); - if (!jsep_transport) { - return MediaTransportConfig(); - } - - DatagramTransportInterface* datagram_transport = nullptr; - if (config_.use_datagram_transport) { - datagram_transport = jsep_transport->datagram_transport(); - } - - if (datagram_transport) { - return MediaTransportConfig( - /*rtp_max_packet_size=*/datagram_transport->GetLargestDatagramSize()); - } else { - return MediaTransportConfig(); - } -} - DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport( const std::string& mid) const { auto jsep_transport = GetJsepTransportForMid(mid); @@ -425,34 +402,24 @@ void JsepTransportController::SetActiveResetSrtpParams( } } -void JsepTransportController::SetMediaTransportSettings( - bool use_datagram_transport, - bool use_datagram_transport_for_data_channels, - bool use_datagram_transport_for_data_channels_receive_only) { - config_.use_datagram_transport = use_datagram_transport; - config_.use_datagram_transport_for_data_channels = - use_datagram_transport_for_data_channels; - config_.use_datagram_transport_for_data_channels_receive_only = - use_datagram_transport_for_data_channels_receive_only; -} - -void JsepTransportController::RollbackTransportForMids( - const std::vector& mids) { +void JsepTransportController::RollbackTransports() { if (!network_thread_->IsCurrent()) { - network_thread_->Invoke(RTC_FROM_HERE, - [=] { RollbackTransportForMids(mids); }); + network_thread_->Invoke(RTC_FROM_HERE, [=] { RollbackTransports(); }); return; } - for (auto&& mid : mids) { + RTC_DCHECK_RUN_ON(network_thread_); + for (auto&& mid : pending_mids_) { RemoveTransportForMid(mid); } - for (auto&& mid : mids) { + for (auto&& mid : pending_mids_) { MaybeDestroyJsepTransport(mid); } + pending_mids_.clear(); } rtc::scoped_refptr JsepTransportController::CreateIceTransport(const std::string& transport_name, + cricket::MediaType media_type, bool rtcp) { int component = rtcp ? cricket::ICE_CANDIDATE_COMPONENT_RTCP : cricket::ICE_CANDIDATE_COMPONENT_RTP; @@ -462,22 +429,18 @@ JsepTransportController::CreateIceTransport(const std::string& transport_name, init.set_async_resolver_factory(async_resolver_factory_); init.set_event_log(config_.event_log); return config_.ice_transport_factory->CreateIceTransport( - transport_name, component, std::move(init)); + transport_name, media_type, component, std::move(init)); } std::unique_ptr JsepTransportController::CreateDtlsTransport( const cricket::ContentInfo& content_info, - cricket::IceTransportInternal* ice, - DatagramTransportInterface* datagram_transport) { + cricket::IceTransportInternal* ice) { RTC_DCHECK(network_thread_->IsCurrent()); std::unique_ptr dtls; - if (datagram_transport) { - RTC_DCHECK(config_.use_datagram_transport || - config_.use_datagram_transport_for_data_channels); - } else if (config_.dtls_transport_factory) { + if (config_.dtls_transport_factory) { dtls = config_.dtls_transport_factory->CreateDtlsTransport( ice, config_.crypto_options); } else { @@ -598,7 +561,7 @@ RTCError JsepTransportController::ApplyDescription_n( bool local, SdpType type, const cricket::SessionDescription* description) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(description); if (local) { @@ -614,16 +577,9 @@ RTCError JsepTransportController::ApplyDescription_n( } std::vector merged_encrypted_extension_ids; - absl::optional bundle_media_alt_protocol; - absl::optional bundle_data_alt_protocol; if (bundle_group_) { merged_encrypted_extension_ids = MergeEncryptedHeaderExtensionIdsForBundle(description); - error = GetAltProtocolsForBundle(description, &bundle_media_alt_protocol, - &bundle_data_alt_protocol); - if (!error.ok()) { - return error; - } } for (const cricket::ContentInfo& content_info : description->contents()) { @@ -642,8 +598,6 @@ RTCError JsepTransportController::ApplyDescription_n( description->transport_infos().size()); for (size_t i = 0; i < description->contents().size(); ++i) { const cricket::ContentInfo& content_info = description->contents()[i]; - const cricket::MediaContentDescription* media_description = - content_info.media_description(); const cricket::TransportInfo& transport_info = description->transport_infos()[i]; if (content_info.rejected) { @@ -654,7 +608,8 @@ RTCError JsepTransportController::ApplyDescription_n( if (IsBundled(content_info.name) && content_info.name != *bundled_mid()) { if (!HandleBundledContent(content_info)) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "Failed to process the bundled m= section."); + "Failed to process the bundled m= section with mid='" + + content_info.name + "'."); } continue; } @@ -665,23 +620,10 @@ RTCError JsepTransportController::ApplyDescription_n( } std::vector extension_ids; - absl::optional media_alt_protocol; - absl::optional data_alt_protocol; if (bundled_mid() && content_info.name == *bundled_mid()) { extension_ids = merged_encrypted_extension_ids; - media_alt_protocol = bundle_media_alt_protocol; - data_alt_protocol = bundle_data_alt_protocol; } else { extension_ids = GetEncryptedHeaderExtensionIds(content_info); - switch (media_description->type()) { - case cricket::MEDIA_TYPE_AUDIO: - case cricket::MEDIA_TYPE_VIDEO: - media_alt_protocol = media_description->alt_protocol(); - break; - case cricket::MEDIA_TYPE_DATA: - data_alt_protocol = media_description->alt_protocol(); - break; - } } int rtp_abs_sendtime_extn_id = @@ -695,8 +637,7 @@ RTCError JsepTransportController::ApplyDescription_n( cricket::JsepTransportDescription jsep_description = CreateJsepTransportDescription(content_info, transport_info, - extension_ids, rtp_abs_sendtime_extn_id, - media_alt_protocol, data_alt_protocol); + extension_ids, rtp_abs_sendtime_extn_id); if (local) { error = transport->SetLocalJsepTransportDescription(jsep_description, type); @@ -706,11 +647,15 @@ RTCError JsepTransportController::ApplyDescription_n( } if (!error.ok()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Failed to apply the description for " + - content_info.name + ": " + error.message()); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Failed to apply the description for m= section with mid='" + + content_info.name + "': " + error.message()); } } + if (type == SdpType::kAnswer) { + pending_mids_.clear(); + } return RTCError::OK(); } @@ -724,11 +669,11 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup( // The BUNDLE group containing a MID that no m= section has is invalid. if (new_bundle_group) { - for (const auto& content_name : new_bundle_group->content_names()) { + for (const std::string& content_name : new_bundle_group->content_names()) { if (!description->GetContentByName(content_name)) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "The BUNDLE group contains MID:" + content_name + - " matching no m= section."); + "The BUNDLE group contains MID='" + content_name + + "' matching no m= section."); } } } @@ -740,18 +685,21 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup( if (new_bundle_group) { // The BUNDLE group in answer should be a subset of offered group. - for (const auto& content_name : new_bundle_group->content_names()) { + for (const std::string& content_name : + new_bundle_group->content_names()) { if (!offered_bundle_group || !offered_bundle_group->HasContentName(content_name)) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "The BUNDLE group in answer contains a MID that was " - "not in the offered group."); + "The BUNDLE group in answer contains a MID='" + + content_name + + "' that was " + "not in the offered group."); } } } if (bundle_group_) { - for (const auto& content_name : bundle_group_->content_names()) { + for (const std::string& content_name : bundle_group_->content_names()) { // An answer that removes m= sections from pre-negotiated BUNDLE group // without rejecting it, is invalid. if (!new_bundle_group || @@ -759,8 +707,9 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup( auto* content_info = description->GetContentByName(content_name); if (!content_info || !content_info->rejected) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "Answer cannot remove m= section " + content_name + - " from already-established BUNDLE group."); + "Answer cannot remove m= section with mid='" + + content_name + + "' from already-established BUNDLE group."); } } } @@ -795,9 +744,9 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup( for (const auto& content_name : bundle_group_->content_names()) { auto other_content = description->GetContentByName(content_name); if (!other_content->rejected) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "The m= section:" + content_name + " should be rejected."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "The m= section with mid='" + content_name + + "' should be rejected."); } } } @@ -812,8 +761,8 @@ RTCError JsepTransportController::ValidateContent( content_info.type == cricket::MediaProtocolType::kRtp && !content_info.media_description()->rtcp_mux()) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "The m= section:" + content_info.name + - " is invalid. RTCP-MUX is not " + "The m= section with mid='" + content_info.name + + "' is invalid. RTCP-MUX is not " "enabled when it is required."); } return RTCError::OK(); @@ -867,7 +816,8 @@ bool JsepTransportController::SetTransportForMid( if (mid_to_transport_[mid] == jsep_transport) { return true; } - + RTC_DCHECK_RUN_ON(network_thread_); + pending_mids_.push_back(mid); mid_to_transport_[mid] = jsep_transport; return config_.transport_observer->OnTransportChanged( mid, jsep_transport->rtp_transport(), jsep_transport->RtpDtlsTransport(), @@ -888,9 +838,7 @@ JsepTransportController::CreateJsepTransportDescription( const cricket::ContentInfo& content_info, const cricket::TransportInfo& transport_info, const std::vector& encrypted_extension_ids, - int rtp_abs_sendtime_extn_id, - absl::optional media_alt_protocol, - absl::optional data_alt_protocol) { + int rtp_abs_sendtime_extn_id) { const cricket::MediaContentDescription* content_desc = content_info.media_description(); RTC_DCHECK(content_desc); @@ -900,8 +848,7 @@ JsepTransportController::CreateJsepTransportDescription( return cricket::JsepTransportDescription( rtcp_mux_enabled, content_desc->cryptos(), encrypted_extension_ids, - rtp_abs_sendtime_extn_id, transport_info.description, media_alt_protocol, - data_alt_protocol); + rtp_abs_sendtime_extn_id, transport_info.description); } bool JsepTransportController::ShouldUpdateBundleGroup( @@ -967,55 +914,6 @@ JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundle( return merged_ids; } -RTCError JsepTransportController::GetAltProtocolsForBundle( - const cricket::SessionDescription* description, - absl::optional* media_alt_protocol, - absl::optional* data_alt_protocol) { - RTC_DCHECK(description); - RTC_DCHECK(bundle_group_); - RTC_DCHECK(media_alt_protocol); - RTC_DCHECK(data_alt_protocol); - - bool found_media = false; - bool found_data = false; - for (const cricket::ContentInfo& content : description->contents()) { - if (bundle_group_->HasContentName(content.name)) { - const cricket::MediaContentDescription* media_description = - content.media_description(); - switch (media_description->type()) { - case cricket::MEDIA_TYPE_AUDIO: - case cricket::MEDIA_TYPE_VIDEO: - if (found_media && - *media_alt_protocol != media_description->alt_protocol()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "The BUNDLE group contains conflicting " - "alt-protocols for media ('" + - media_alt_protocol->value_or("") + "' and '" + - media_description->alt_protocol().value_or("") + - "')"); - } - found_media = true; - *media_alt_protocol = media_description->alt_protocol(); - break; - case cricket::MEDIA_TYPE_DATA: - if (found_data && - *data_alt_protocol != media_description->alt_protocol()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "The BUNDLE group contains conflicting " - "alt-protocols for data ('" + - data_alt_protocol->value_or("") + "' and '" + - media_description->alt_protocol().value_or("") + - "')"); - } - found_data = true; - *data_alt_protocol = media_description->alt_protocol(); - break; - } - } - } - return RTCError::OK(); -} - int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId( const cricket::ContentInfo& content_info) { if (!config_.enable_external_auth) { @@ -1056,83 +954,6 @@ cricket::JsepTransport* JsepTransportController::GetJsepTransportByName( return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get(); } -// TODO(sukhanov): Refactor to avoid code duplication for Media and Datagram -// transports setup. -std::unique_ptr -JsepTransportController::MaybeCreateDatagramTransport( - const cricket::ContentInfo& content_info, - const cricket::SessionDescription& description, - bool local) { - if (config_.media_transport_factory == nullptr) { - return nullptr; - } - - if (!(config_.use_datagram_transport || - config_.use_datagram_transport_for_data_channels)) { - return nullptr; - } - - // Caller (offerer) datagram transport. - if (offer_datagram_transport_) { - RTC_DCHECK(local); - RTC_LOG(LS_INFO) << "Offered datagram transport has now been activated."; - return std::move(offer_datagram_transport_); - } - - const cricket::TransportDescription* transport_description = - description.GetTransportDescriptionByName(content_info.mid()); - RTC_DCHECK(transport_description) - << "Missing transport description for mid=" << content_info.mid(); - - if (!transport_description->opaque_parameters) { - RTC_LOG(LS_INFO) - << "No opaque transport parameters, not creating datagram transport"; - return nullptr; - } - - if (transport_description->opaque_parameters->protocol != - config_.media_transport_factory->GetTransportName()) { - RTC_LOG(LS_INFO) << "Opaque transport parameters for protocol=" - << transport_description->opaque_parameters->protocol - << ", which does not match supported protocol=" - << config_.media_transport_factory->GetTransportName(); - return nullptr; - } - - RTC_DCHECK(!local); - // When bundle is enabled, two JsepTransports are created, and then - // the second transport is destroyed (right away). - // For datagram transport, we don't want to create the second - // datagram transport in the first place. - RTC_LOG(LS_INFO) << "Returning new, client datagram transport."; - - MediaTransportSettings settings; - settings.is_caller = local; - settings.remote_transport_parameters = - transport_description->opaque_parameters->parameters; - settings.event_log = config_.event_log; - - auto datagram_transport_result = - config_.media_transport_factory->CreateDatagramTransport(network_thread_, - settings); - - if (!datagram_transport_result.ok()) { - // Datagram transport negotiation will fail and we'll fall back to RTP. - return nullptr; - } - - if (!datagram_transport_result.value() - ->SetRemoteTransportParameters( - transport_description->opaque_parameters->parameters) - .ok()) { - // Datagram transport negotiation failed (parameters are incompatible). - // Fall back to RTP. - return nullptr; - } - - return datagram_transport_result.MoveValue(); -} - RTCError JsepTransportController::MaybeCreateJsepTransport( bool local, const cricket::ContentInfo& content_info, @@ -1150,18 +971,17 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( "SDES and DTLS-SRTP cannot be enabled at the same time."); } - rtc::scoped_refptr ice = - CreateIceTransport(content_info.name, /*rtcp=*/false); - RTC_DCHECK(ice); + // jianlin: force MEDIA_TYPE_SCREEN if priority is high(10). + cricket::MediaType media_type = content_desc->type(); + if (media_type == cricket::MEDIA_TYPE_VIDEO && content_desc->quality() == 10) + media_type = cricket::MEDIA_TYPE_SCREEN; - std::unique_ptr datagram_transport = - MaybeCreateDatagramTransport(content_info, description, local); - if (datagram_transport) { - datagram_transport->Connect(ice->internal()); - } + rtc::scoped_refptr ice = CreateIceTransport( + content_info.name, media_type, /*rtcp=*/false); + RTC_DCHECK(ice); std::unique_ptr rtp_dtls_transport = - CreateDtlsTransport(content_info, ice->internal(), nullptr); + CreateDtlsTransport(content_info, ice->internal()); std::unique_ptr rtcp_dtls_transport; std::unique_ptr unencrypted_rtp_transport; @@ -1173,29 +993,9 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( if (config_.rtcp_mux_policy != PeerConnectionInterface::kRtcpMuxPolicyRequire && content_info.type == cricket::MediaProtocolType::kRtp) { - RTC_DCHECK(datagram_transport == nullptr); - rtcp_ice = CreateIceTransport(content_info.name, /*rtcp=*/true); + rtcp_ice = CreateIceTransport(content_info.name, content_desc->type(), /*rtcp=*/true); rtcp_dtls_transport = - CreateDtlsTransport(content_info, rtcp_ice->internal(), - /*datagram_transport=*/nullptr); - } - - // Only create a datagram RTP transport if the datagram transport should be - // used for RTP. - if (datagram_transport && config_.use_datagram_transport) { - // TODO(sukhanov): We use unencrypted RTP transport over DatagramTransport, - // because MediaTransport encrypts. In the future we may want to - // implement our own version of RtpTransport over MediaTransport, because - // it will give us more control over things like: - // - Fusing - // - Rtp header compression - // - Handling Rtcp feedback. - RTC_LOG(LS_INFO) << "Creating UnencryptedRtpTransport, because datagram " - "transport is used."; - RTC_DCHECK(!rtcp_dtls_transport); - datagram_rtp_transport = std::make_unique( - content_info.media_description()->rtp_header_extensions(), - ice->internal(), datagram_transport.get()); + CreateDtlsTransport(content_info, rtcp_ice->internal()); } if (config_.disable_encryption) { @@ -1219,27 +1019,19 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( config_.sctp_factory->CreateSctpTransport(rtp_dtls_transport.get()); } - DataChannelTransportInterface* data_channel_transport = nullptr; - if (config_.use_datagram_transport_for_data_channels) { - data_channel_transport = datagram_transport.get(); - } - std::unique_ptr jsep_transport = std::make_unique( content_info.name, certificate_, std::move(ice), std::move(rtcp_ice), std::move(unencrypted_rtp_transport), std::move(sdes_transport), std::move(dtls_srtp_transport), std::move(datagram_rtp_transport), std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport), - std::move(sctp_transport), std::move(datagram_transport), - data_channel_transport); + std::move(sctp_transport)); jsep_transport->rtp_transport()->SignalRtcpPacketReceived.connect( this, &JsepTransportController::OnRtcpPacketReceived_n); jsep_transport->SignalRtcpMuxActive.connect( this, &JsepTransportController::UpdateAggregateStates_n); - jsep_transport->SignalDataChannelTransportNegotiated.connect( - this, &JsepTransportController::OnDataChannelTransportNegotiated_n); SetTransportForMid(content_info.name, jsep_transport.get()); jsep_transports_by_name_[content_info.name] = std::move(jsep_transport); @@ -1308,28 +1100,6 @@ cricket::IceRole JsepTransportController::DetermineIceRole( tdesc.ice_mode == cricket::ICEMODE_FULL) { ice_role = cricket::ICEROLE_CONTROLLING; } - - // Older versions of Chrome expect the ICE role to be re-determined when an - // ICE restart occurs, and also don't perform conflict resolution correctly, - // so for now we can't safely stop doing this, unless the application opts - // in by setting |config_.redetermine_role_on_ice_restart_| to false. See: - // https://bugs.chromium.org/p/chromium/issues/detail?id=628676 - // TODO(deadbeef): Remove this when these old versions of Chrome reach a low - // enough population. - if (config_.redetermine_role_on_ice_restart && - jsep_transport->local_description() && - cricket::IceCredentialsChanged( - jsep_transport->local_description()->transport_desc.ice_ufrag, - jsep_transport->local_description()->transport_desc.ice_pwd, - tdesc.ice_ufrag, tdesc.ice_pwd) && - // Don't change the ICE role if the remote endpoint is ICE lite; we - // should always be controlling in that case. - (!jsep_transport->remote_description() || - jsep_transport->remote_description()->transport_desc.ice_mode != - cricket::ICEMODE_LITE)) { - ice_role = (type == SdpType::kOffer) ? cricket::ICEROLE_CONTROLLING - : cricket::ICEROLE_CONTROLLED; - } } else { // If our role is cricket::ICEROLE_CONTROLLED and the remote endpoint // supports only ice_lite, this local endpoint should take the CONTROLLING @@ -1442,18 +1212,6 @@ void JsepTransportController::OnTransportStateChanged_n( UpdateAggregateStates_n(); } -void JsepTransportController::OnDataChannelTransportNegotiated_n( - cricket::JsepTransport* transport, - DataChannelTransportInterface* data_channel_transport) { - for (auto it : mid_to_transport_) { - if (it.second == transport) { - config_.transport_observer->OnTransportChanged( - it.first, transport->rtp_transport(), transport->RtpDtlsTransport(), - data_channel_transport); - } - } -} - void JsepTransportController::UpdateAggregateStates_n() { RTC_DCHECK(network_thread_->IsCurrent()); @@ -1504,10 +1262,11 @@ void JsepTransportController::UpdateAggregateStates_n() { } if (ice_connection_state_ != new_connection_state) { ice_connection_state_ = new_connection_state; - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, - [this, new_connection_state] { - SignalIceConnectionState(new_connection_state); - }); + + invoker_.AsyncInvoke( + RTC_FROM_HERE, signaling_thread_, [this, new_connection_state] { + SignalIceConnectionState.Send(new_connection_state); + }); } // Compute the current RTCIceConnectionState as described in @@ -1629,7 +1388,10 @@ void JsepTransportController::UpdateAggregateStates_n() { }); } - if (all_done_gathering) { + // Compute the gathering state. + if (dtls_transports.empty()) { + new_gathering_state = cricket::kIceGatheringNew; + } else if (all_done_gathering) { new_gathering_state = cricket::kIceGatheringComplete; } else if (any_gathering) { new_gathering_state = cricket::kIceGatheringGathering; @@ -1655,54 +1417,4 @@ void JsepTransportController::OnDtlsHandshakeError( SignalDtlsHandshakeError(error); } -absl::optional -JsepTransportController::GetTransportParameters(const std::string& mid) { - if (!(config_.use_datagram_transport || - config_.use_datagram_transport_for_data_channels)) { - return absl::nullopt; - } - - cricket::JsepTransport* transport = GetJsepTransportForMid(mid); - if (transport) { - absl::optional params = - transport->GetTransportParameters(); - if (params) { - params->protocol = config_.media_transport_factory->GetTransportName(); - } - return params; - } - - RTC_DCHECK(!local_desc_ && !remote_desc_) - << "JsepTransport should exist for every mid once any description is set"; - - if (config_.use_datagram_transport_for_data_channels_receive_only) { - return absl::nullopt; - } - - // Need to generate a transport for the offer. - if (!offer_datagram_transport_) { - webrtc::MediaTransportSettings settings; - settings.is_caller = true; - settings.pre_shared_key = rtc::CreateRandomString(32); - settings.event_log = config_.event_log; - auto datagram_transport_or_error = - config_.media_transport_factory->CreateDatagramTransport( - network_thread_, settings); - - if (datagram_transport_or_error.ok()) { - offer_datagram_transport_ = - std::move(datagram_transport_or_error.value()); - } else { - RTC_LOG(LS_INFO) << "Unable to create datagram transport, error=" - << datagram_transport_or_error.error().message(); - } - } - - // We have prepared a transport for the offer, and can now use its parameters. - cricket::OpaqueTransportParameters params; - params.parameters = offer_datagram_transport_->GetTransportParameters(); - params.protocol = config_.media_transport_factory->GetTransportName(); - return params; -} - } // namespace webrtc diff --git a/pc/jsep_transport_controller.h b/pc/jsep_transport_controller.h index 9c3f691302..7a30c62c54 100644 --- a/pc/jsep_transport_controller.h +++ b/pc/jsep_transport_controller.h @@ -22,7 +22,6 @@ #include "api/ice_transport_factory.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/media/media_transport_config.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/dtls_transport.h" #include "p2p/base/dtls_transport_factory.h" @@ -36,6 +35,7 @@ #include "rtc_base/async_invoker.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/callback_list.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -102,32 +102,7 @@ class JsepTransportController : public sigslot::has_slots<> { RtcEventLog* event_log = nullptr; // Factory for SCTP transports. - cricket::SctpTransportInternalFactory* sctp_factory = nullptr; - - // Whether an RtpMediaTransport should be created as default, when no - // MediaTransportFactory is provided. - bool use_rtp_media_transport = false; - - // Use encrypted datagram transport to send packets. - bool use_datagram_transport = false; - - // Use datagram transport's implementation of data channels instead of SCTP. - bool use_datagram_transport_for_data_channels = false; - - // Whether |use_datagram_transport_for_data_channels| applies to outgoing - // calls. If true, |use_datagram_transport_for_data_channels| applies only - // to incoming calls. - bool use_datagram_transport_for_data_channels_receive_only = false; - - // Optional media transport factory (experimental). If provided it will be - // used to create datagram_transport (as long as either - // |use_datagram_transport| or - // |use_datagram_transport_for_data_channels| is set to true). However, - // whether it will be used to send / receive audio and video frames instead - // of RTP is determined by |use_datagram_transport|. Note that currently - // datagram_transport co-exists with RTP / RTCP transports and may use the - // same underlying ICE transport. - MediaTransportFactory* media_transport_factory = nullptr; + SctpTransportFactoryInterface* sctp_factory = nullptr; }; // The ICE related events are signaled on the |signaling_thread|. @@ -161,8 +136,6 @@ class JsepTransportController : public sigslot::has_slots<> { rtc::scoped_refptr GetSctpTransport( const std::string& mid) const; - MediaTransportConfig GetMediaTransportConfig(const std::string& mid) const; - DataChannelTransportInterface* GetDataChannelTransport( const std::string& mid) const; @@ -215,25 +188,9 @@ class JsepTransportController : public sigslot::has_slots<> { void SetActiveResetSrtpParams(bool active_reset_srtp_params); - // Allows to overwrite the settings from config. You may set or reset the - // media transport configuration on the jsep transport controller, as long as - // you did not call 'GetMediaTransport' or 'MaybeCreateJsepTransport'. Once - // Jsep transport is created, you can't change this setting. - void SetMediaTransportSettings( - bool use_datagram_transport, - bool use_datagram_transport_for_data_channels, - bool use_datagram_transport_for_data_channels_receive_only); - - // TODO(elrello): For now the rollback only removes mid to transport mappings + // For now the rollback only removes mid to transport mappings // and deletes unused transports, but doesn't consider anything more complex. - void RollbackTransportForMids(const std::vector& mids); - - // Gets the transport parameters for the transport identified by |mid|. - // If |mid| is bundled, returns the parameters for the bundled transport. - // If the transport for |mid| has not been created yet, it may be allocated in - // order to generate transport parameters. - absl::optional GetTransportParameters( - const std::string& mid); + void RollbackTransports(); // All of these signals are fired on the signaling thread. @@ -241,10 +198,11 @@ class JsepTransportController : public sigslot::has_slots<> { // Else if all completed => completed, // Else if all connected => connected, // Else => connecting - sigslot::signal1 SignalIceConnectionState; + CallbackList SignalIceConnectionState; sigslot::signal1 SignalConnectionState; + sigslot::signal1 SignalStandardizedIceConnectionState; @@ -290,9 +248,7 @@ class JsepTransportController : public sigslot::has_slots<> { const cricket::ContentInfo& content_info, const cricket::TransportInfo& transport_info, const std::vector& encrypted_extension_ids, - int rtp_abs_sendtime_extn_id, - absl::optional media_alt_protocol, - absl::optional data_alt_protocol); + int rtp_abs_sendtime_extn_id); absl::optional bundled_mid() const { absl::optional bundled_mid; @@ -314,12 +270,6 @@ class JsepTransportController : public sigslot::has_slots<> { std::vector GetEncryptedHeaderExtensionIds( const cricket::ContentInfo& content_info); - // Extracts the alt-protocol settings that apply to the bundle group. - RTCError GetAltProtocolsForBundle( - const cricket::SessionDescription* description, - absl::optional* media_alt_protocol, - absl::optional* data_alt_protocol); - int GetRtpAbsSendTimeHeaderExtensionId( const cricket::ContentInfo& content_info); @@ -347,16 +297,6 @@ class JsepTransportController : public sigslot::has_slots<> { const cricket::ContentInfo& content_info, const cricket::SessionDescription& description); - // Creates datagram transport if config wants to use it, and a=x-mt line is - // present for the current media transport. Returned - // DatagramTransportInterface is not connected, and must be connected to ICE. - // You must call |GenerateOrGetLastMediaTransportOffer| on the caller before - // calling MaybeCreateDatagramTransport. - std::unique_ptr - MaybeCreateDatagramTransport(const cricket::ContentInfo& content_info, - const cricket::SessionDescription& description, - bool local); - void MaybeDestroyJsepTransport(const std::string& mid); void DestroyAllJsepTransports_n(); @@ -370,10 +310,10 @@ class JsepTransportController : public sigslot::has_slots<> { std::unique_ptr CreateDtlsTransport( const cricket::ContentInfo& content_info, - cricket::IceTransportInternal* ice, - DatagramTransportInterface* datagram_transport); + cricket::IceTransportInternal* ice); rtc::scoped_refptr CreateIceTransport( const std::string& transport_name, + cricket::MediaType media_type, bool rtcp); std::unique_ptr CreateUnencryptedRtpTransport( @@ -409,10 +349,6 @@ class JsepTransportController : public sigslot::has_slots<> { void OnTransportStateChanged_n(cricket::IceTransportInternal* transport); void OnTransportCandidatePairChanged_n( const cricket::CandidatePairChangeEvent& event); - void OnDataChannelTransportNegotiated_n( - cricket::JsepTransport* transport, - DataChannelTransportInterface* data_channel_transport); - void UpdateAggregateStates_n(); void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet, @@ -430,7 +366,8 @@ class JsepTransportController : public sigslot::has_slots<> { // This keeps track of the mapping between media section // (BaseChannel/SctpTransport) and the JsepTransport underneath. std::map mid_to_transport_; - + // Keep track of mids that have been mapped to transports. Used for rollback. + std::vector pending_mids_ RTC_GUARDED_BY(network_thread_); // Aggregate states for Transports. // standardized_ice_connection_state_ is intended to replace // ice_connection_state, see bugs.webrtc.org/9308 @@ -445,17 +382,6 @@ class JsepTransportController : public sigslot::has_slots<> { Config config_; - // Early on in the call we don't know if datagram transport is going to be - // used, but we need to get the server-supported parameters to add to an SDP. - // This server datagram transport will be promoted to the used datagram - // transport after the local description is set, and the ownership will be - // transferred to the actual JsepTransport. This "offer" datagram transport is - // not created if it's done on the party that provides answer. This offer - // datagram transport is only created once at the beginning of the connection, - // and never again. - std::unique_ptr offer_datagram_transport_ = - nullptr; - const cricket::SessionDescription* local_desc_ = nullptr; const cricket::SessionDescription* remote_desc_ = nullptr; absl::optional initial_offerer_; diff --git a/pc/jsep_transport_controller_unittest.cc b/pc/jsep_transport_controller_unittest.cc index 18fdc209d1..40dc23e535 100644 --- a/pc/jsep_transport_controller_unittest.cc +++ b/pc/jsep_transport_controller_unittest.cc @@ -13,9 +13,6 @@ #include #include -#include "api/test/fake_media_transport.h" -#include "api/test/loopback_media_transport.h" -#include "api/transport/media/media_transport_interface.h" #include "p2p/base/dtls_transport_factory.h" #include "p2p/base/fake_dtls_transport.h" #include "p2p/base/fake_ice_transport.h" @@ -44,20 +41,6 @@ static const char kDataMid1[] = "data1"; namespace webrtc { -namespace { - -// Media transport factory requires crypto settings to be present in order to -// create media transport. -void AddCryptoSettings(cricket::SessionDescription* description) { - for (auto& content : description->contents()) { - content.media_description()->AddCrypto(cricket::CryptoParams( - /*t=*/0, std::string(rtc::CS_AES_CM_128_HMAC_SHA1_80), - "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2", "")); - } -} - -} // namespace - class FakeIceTransportFactory : public webrtc::IceTransportFactory { public: ~FakeIceTransportFactory() override = default; @@ -106,8 +89,10 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, } void ConnectTransportControllerSignals() { - transport_controller_->SignalIceConnectionState.connect( - this, &JsepTransportControllerTest::OnConnectionState); + transport_controller_->SignalIceConnectionState.AddReceiver( + [this](cricket::IceConnectionState s) { + JsepTransportControllerTest::OnConnectionState(s); + }); transport_controller_->SignalStandardizedIceConnectionState.connect( this, &JsepTransportControllerTest::OnStandardizedIceConnectionState); transport_controller_->SignalConnectionState.connect( @@ -440,96 +425,6 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransportWithRtcpMux) { EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kVideoMid1)); } -TEST_F(JsepTransportControllerTest, - DtlsIsStillCreatedIfDatagramTransportIsOnlyUsedForDataChannels) { - FakeMediaTransportFactory fake_media_transport_factory("transport_params"); - JsepTransportController::Config config; - - config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - config.media_transport_factory = &fake_media_transport_factory; - config.use_datagram_transport_for_data_channels = true; - CreateJsepTransportController(config); - - auto description = CreateSessionDescriptionWithBundledData(); - AddCryptoSettings(description.get()); - - absl::optional params = - transport_controller_->GetTransportParameters(kAudioMid1); - for (auto& info : description->transport_infos()) { - info.description.opaque_parameters = params; - } - for (cricket::ContentInfo& content_info : description->contents()) { - if (content_info.media_description()->type() == cricket::MEDIA_TYPE_DATA) { - content_info.media_description()->set_alt_protocol(params->protocol); - } - } - - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, description.get()) - .ok()); - - FakeDatagramTransport* datagram_transport = - static_cast( - transport_controller_->GetDataChannelTransport(kAudioMid1)); - - ASSERT_NE(nullptr, datagram_transport); - - EXPECT_EQ(cricket::ICE_CANDIDATE_COMPONENT_RTP, - transport_controller_->GetDtlsTransport(kAudioMid1)->component()) - << "Datagram transport for media was not enabled, and so DTLS transport " - "should be created."; - - // Datagram transport is not used for media, so no max packet size is - // specified. - EXPECT_EQ(transport_controller_->GetMediaTransportConfig(kAudioMid1) - .rtp_max_packet_size, - absl::nullopt); - - // Since datagram transport is not used for RTP, setting it to writable should - // not make the RTP transport writable. - datagram_transport->set_state(MediaTransportState::kWritable); - EXPECT_FALSE(transport_controller_->GetRtpTransport(kAudioMid1) - ->IsWritable(/*rtcp=*/false)); -} - -// An offer that bundles different alt-protocols should be rejected. -TEST_F(JsepTransportControllerTest, CannotBundleDifferentAltProtocols) { - FakeMediaTransportFactory fake_media_transport_factory("transport_params"); - JsepTransportController::Config config; - config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - config.media_transport_factory = &fake_media_transport_factory; - config.use_datagram_transport = true; - config.use_datagram_transport_for_data_channels = true; - CreateJsepTransportController(config); - - auto description = CreateSessionDescriptionWithBundledData(); - AddCryptoSettings(description.get()); - - absl::optional params = - transport_controller_->GetTransportParameters(kAudioMid1); - for (auto& info : description->transport_infos()) { - info.description.opaque_parameters = params; - } - - // Append a different alt-protocol to each of the sections. - for (cricket::ContentInfo& content_info : description->contents()) { - content_info.media_description()->set_alt_protocol(params->protocol + "-" + - content_info.name); - } - - EXPECT_FALSE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); - EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, description.get()) - .ok()); -} - TEST_F(JsepTransportControllerTest, SetIceConfig) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); @@ -637,8 +532,8 @@ TEST_F(JsepTransportControllerTest, SetAndGetLocalCertificate) { CreateJsepTransportController(JsepTransportController::Config()); rtc::scoped_refptr certificate1 = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); rtc::scoped_refptr returned_certificate; auto description = std::make_unique(); @@ -662,8 +557,8 @@ TEST_F(JsepTransportControllerTest, SetAndGetLocalCertificate) { // Shouldn't be able to change the identity once set. rtc::scoped_refptr certificate2 = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session2", rtc::KT_DEFAULT)); EXPECT_FALSE(transport_controller_->SetLocalCertificate(certificate2)); } @@ -691,12 +586,10 @@ TEST_F(JsepTransportControllerTest, GetRemoteSSLCertChain) { TEST_F(JsepTransportControllerTest, GetDtlsRole) { CreateJsepTransportController(JsepTransportController::Config()); - auto offer_certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("offer", rtc::KT_DEFAULT))); - auto answer_certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("answer", rtc::KT_DEFAULT))); + auto offer_certificate = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("offer", rtc::KT_DEFAULT)); + auto answer_certificate = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("answer", rtc::KT_DEFAULT)); transport_controller_->SetLocalCertificate(offer_certificate); auto offer_desc = std::make_unique(); @@ -1002,49 +895,6 @@ TEST_F(JsepTransportControllerTest, IceSignalingOccursOnSignalingThread) { EXPECT_TRUE(!signaled_on_non_signaling_thread_); } -// Older versions of Chrome expect the ICE role to be re-determined when an -// ICE restart occurs, and also don't perform conflict resolution correctly, -// so for now we can't safely stop doing this. -// See: https://bugs.chromium.org/p/chromium/issues/detail?id=628676 -// TODO(deadbeef): Remove this when these old versions of Chrome reach a low -// enough population. -TEST_F(JsepTransportControllerTest, IceRoleRedeterminedOnIceRestartByDefault) { - CreateJsepTransportController(JsepTransportController::Config()); - // Let the |transport_controller_| be the controlled side initially. - auto remote_offer = std::make_unique(); - AddAudioSection(remote_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - auto local_answer = std::make_unique(); - AddAudioSection(local_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); - - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kOffer, remote_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kAnswer, local_answer.get()) - .ok()); - - auto fake_dtls = static_cast( - transport_controller_->GetDtlsTransport(kAudioMid1)); - EXPECT_EQ(cricket::ICEROLE_CONTROLLED, - fake_dtls->fake_ice_transport()->GetIceRole()); - - // New offer will trigger the ICE restart. - auto restart_local_offer = std::make_unique(); - AddAudioSection(restart_local_offer.get(), kAudioMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - EXPECT_TRUE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, restart_local_offer.get()) - .ok()); - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, - fake_dtls->fake_ice_transport()->GetIceRole()); -} - // Test that if the TransportController was created with the // |redetermine_role_on_ice_restart| parameter set to false, the role is *not* // redetermined on an ICE restart. @@ -1695,423 +1545,4 @@ TEST_F(JsepTransportControllerTest, ChangeTaggedMediaSectionMaxBundle) { .ok()); } -constexpr char kFakeTransportParameters[] = "fake-params"; - -// Test fixture that provides common setup and helpers for tests related to the -// datagram transport. -class JsepTransportControllerDatagramTest - : public JsepTransportControllerTest, - public testing::WithParamInterface { - public: - JsepTransportControllerDatagramTest() - : JsepTransportControllerTest(), - fake_media_transport_factory_(kFakeTransportParameters) { - JsepTransportController::Config config; - config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - config.media_transport_factory = &fake_media_transport_factory_; - config.use_datagram_transport = true; - CreateJsepTransportController(config); - } - - // Whether the JsepTransportController under test acts as the offerer or - // answerer in this test. - bool IsOfferer() { return GetParam(); } - - // Sets a description as local or remote based on type and current - // perspective. - RTCError SetDescription(SdpType type, - const cricket::SessionDescription* description) { - if (IsOfferer() == (type == SdpType::kOffer)) { - return transport_controller_->SetLocalDescription(type, description); - } else { - return transport_controller_->SetRemoteDescription(type, description); - } - } - - // Creates a session description with the settings necessary for datagram - // transport (bundle + crypto) and the given |transport_params|. - std::unique_ptr - CreateSessionDescriptionForDatagramTransport( - absl::optional transport_params) { - auto description = CreateSessionDescriptionWithBundleGroup(); - AddCryptoSettings(description.get()); - - for (auto& info : description->transport_infos()) { - info.description.opaque_parameters = transport_params; - } - if (transport_params) { - for (auto& content_info : description->contents()) { - content_info.media_description()->set_alt_protocol( - transport_params->protocol); - } - } - return description; - } - - // Creates transport parameters with |protocol| and |parameters| - // matching what |fake_media_transport_factory_| provides. - cricket::OpaqueTransportParameters CreateTransportParameters() { - cricket::OpaqueTransportParameters params; - params.protocol = fake_media_transport_factory_.GetTransportName(); - params.parameters = "fake-params"; - return params; - } - - protected: - FakeMediaTransportFactory fake_media_transport_factory_; -}; - -TEST_P(JsepTransportControllerDatagramTest, InitDatagramTransport) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - // Getting transport parameters is allowed before setting a description. - // This is necessary so that the offerer can include these params. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - // Setting a description activates the datagram transport without changing - // transport parameters. - auto description = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok()); - - // After setting an offer with transport parameters, those parameters are - // reflected by the controller. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -TEST_P(JsepTransportControllerDatagramTest, - OfferMissingDatagramTransportParams) { - if (IsOfferer()) { - // This test doesn't make sense from the offerer's perspective, as the offer - // must contain datagram transport params if the offerer supports it. - return; - } - - auto description = - CreateSessionDescriptionForDatagramTransport(absl::nullopt); - EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok()); - - // The offer didn't contain any datagram transport parameters, so the answer - // won't either. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); -} - -TEST_P(JsepTransportControllerDatagramTest, OfferHasWrongTransportName) { - if (IsOfferer()) { - // This test doesn't make sense from the offerer's perspective, as the - // offerer cannot offer itself the wrong transport. - return; - } - - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - fake_params.protocol = "wrong-name"; - - auto description = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok()); - - // The offerer and answerer support different datagram transports, so the - // answerer rejects the offered parameters. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); -} - -TEST_P(JsepTransportControllerDatagramTest, IncompatibleAnswer) { - // Transport will claim that no parameters are compatible, even if they match - // exactly. - fake_media_transport_factory_.set_transport_parameters_comparison( - [](absl::string_view, absl::string_view) { return false; }); - - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - auto answer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - // The offerer and answerer have incompatible parameters, so the answerer - // rejects the offered parameters. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); -} - -TEST_P(JsepTransportControllerDatagramTest, CompatibleAnswer) { - // Transport will claim that no parameters are compatible, even if they are - // completely different. - fake_media_transport_factory_.set_transport_parameters_comparison( - [](absl::string_view, absl::string_view) { return true; }); - - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - cricket::OpaqueTransportParameters answer_params; - answer_params.protocol = fake_params.protocol; - answer_params.parameters = "something different from offer"; - auto answer = CreateSessionDescriptionForDatagramTransport(answer_params); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - // The offerer and answerer have compatible parameters, so the answerer - // accepts the offered parameters. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -TEST_P(JsepTransportControllerDatagramTest, AnswerRejectsDatagram) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - // The answer rejected datagram transport, so its parameters are empty. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); -} - -TEST_P(JsepTransportControllerDatagramTest, AnswerAcceptsDatagram) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto answer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - // The answer accepted datagram transport, so it is present. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -TEST_P(JsepTransportControllerDatagramTest, PrAnswerRejectsDatagram) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt); - EXPECT_TRUE(SetDescription(SdpType::kPrAnswer, answer.get()).ok()); - - // The answer rejected datagram transport, but it's provisional, so the - // transport is kept around for now. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -TEST_P(JsepTransportControllerDatagramTest, PrAnswerAcceptsDatagram) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto answer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kPrAnswer, answer.get()).ok()); - - // The answer provisionally accepted datagram transport, so it's kept. - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -TEST_P(JsepTransportControllerDatagramTest, RenegotiationCannotAddDatagram) { - auto offer = CreateSessionDescriptionForDatagramTransport(absl::nullopt); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); - - auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); - - // Attempting to add a datagram transport on a re-offer does not cause an - // error, but also does not add a datagram transport. - auto reoffer = - CreateSessionDescriptionForDatagramTransport(CreateTransportParameters()); - EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - absl::nullopt); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - absl::nullopt); -} - -TEST_P(JsepTransportControllerDatagramTest, RenegotiationCannotRemoveDatagram) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto answer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - // Attempting to remove a datagram transport on a re-offer does not cause an - // error, but also does not remove the datagram transport. - auto reoffer = CreateSessionDescriptionForDatagramTransport(absl::nullopt); - EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -TEST_P(JsepTransportControllerDatagramTest, - RenegotiationKeepsDatagramTransport) { - cricket::OpaqueTransportParameters fake_params = CreateTransportParameters(); - if (IsOfferer()) { - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - } - - auto offer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto answer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - // Attempting to remove a datagram transport on a re-offer does not cause an - // error, but also does not remove the datagram transport. - auto reoffer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); - - auto reanswer = CreateSessionDescriptionForDatagramTransport(fake_params); - EXPECT_TRUE(SetDescription(SdpType::kAnswer, reanswer.get()).ok()); - - EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1), - fake_params); - EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1), - fake_params); -} - -INSTANTIATE_TEST_SUITE_P( - JsepTransportControllerDatagramTests, - JsepTransportControllerDatagramTest, - testing::Values(true, false), - // The parameter value is the local perspective (offerer or answerer). - [](const testing::TestParamInfo& info) { - return info.param ? "Offerer" : "Answerer"; - }); - } // namespace webrtc diff --git a/pc/jsep_transport_unittest.cc b/pc/jsep_transport_unittest.cc index c4193e5974..d8f2fff621 100644 --- a/pc/jsep_transport_unittest.cc +++ b/pc/jsep_transport_unittest.cc @@ -120,9 +120,7 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { std::move(sdes_transport), std::move(dtls_srtp_transport), /*datagram_rtp_transport=*/nullptr, std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport), - /*sctp_transport=*/nullptr, - /*datagram_transport=*/nullptr, - /*data_channel_transport=*/nullptr); + /*sctp_transport=*/nullptr); signal_rtcp_mux_active_received_ = false; jsep_transport->SignalRtcpMuxActive.connect( @@ -225,11 +223,11 @@ TEST_P(JsepTransport2WithRtcpMux, SetDtlsParameters) { // Create certificates. rtc::scoped_refptr local_cert = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("local", rtc::KT_DEFAULT))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("local", rtc::KT_DEFAULT)); rtc::scoped_refptr remote_cert = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("remote", rtc::KT_DEFAULT))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("remote", rtc::KT_DEFAULT)); jsep_transport_->SetLocalCertificate(local_cert); // Apply offer. @@ -276,11 +274,11 @@ TEST_P(JsepTransport2WithRtcpMux, SetDtlsParametersWithPassiveAnswer) { // Create certificates. rtc::scoped_refptr local_cert = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("local", rtc::KT_DEFAULT))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("local", rtc::KT_DEFAULT)); rtc::scoped_refptr remote_cert = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("remote", rtc::KT_DEFAULT))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("remote", rtc::KT_DEFAULT)); jsep_transport_->SetLocalCertificate(local_cert); // Apply offer. @@ -393,8 +391,8 @@ TEST_P(JsepTransport2WithRtcpMux, VerifyCertificateFingerprint) { for (auto& key_type : key_types) { rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", key_type))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", key_type)); ASSERT_NE(nullptr, certificate); std::string digest_algorithm; @@ -433,8 +431,8 @@ TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) { // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); JsepTransportDescription local_description = MakeJsepTransportDescription( rtcp_mux_enabled, kIceUfrag1, kIcePwd1, certificate); @@ -532,8 +530,8 @@ TEST_P(JsepTransport2WithRtcpMux, InvalidDtlsRoleNegotiation) { // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); JsepTransportDescription local_description = MakeJsepTransportDescription( rtcp_mux_enabled, kIceUfrag1, kIcePwd1, certificate); @@ -663,8 +661,8 @@ TEST_F(JsepTransport2Test, ValidDtlsReofferFromAnswerer) { // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); bool rtcp_mux_enabled = true; jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); jsep_transport_->SetLocalCertificate(certificate); @@ -710,8 +708,8 @@ TEST_F(JsepTransport2Test, InvalidDtlsReofferFromAnswerer) { // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); bool rtcp_mux_enabled = true; jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); jsep_transport_->SetLocalCertificate(certificate); @@ -756,8 +754,8 @@ TEST_F(JsepTransport2Test, InvalidDtlsReofferFromAnswerer) { // since JSEP requires generating "actpass". TEST_F(JsepTransport2Test, RemoteOfferWithCurrentNegotiatedDtlsRole) { rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); bool rtcp_mux_enabled = true; jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); jsep_transport_->SetLocalCertificate(certificate); @@ -801,8 +799,8 @@ TEST_F(JsepTransport2Test, RemoteOfferWithCurrentNegotiatedDtlsRole) { // role is rejected. TEST_F(JsepTransport2Test, RemoteOfferThatChangesNegotiatedDtlsRole) { rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); bool rtcp_mux_enabled = true; jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); jsep_transport_->SetLocalCertificate(certificate); @@ -846,8 +844,8 @@ TEST_F(JsepTransport2Test, RemoteOfferThatChangesNegotiatedDtlsRole) { // interpreted as having an active role. TEST_F(JsepTransport2Test, DtlsSetupWithLegacyAsAnswerer) { rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("testing", rtc::KT_ECDSA))); + rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); bool rtcp_mux_enabled = true; jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); jsep_transport_->SetLocalCertificate(certificate); @@ -1052,13 +1050,11 @@ class JsepTransport2HeaderExtensionTest this, &JsepTransport2HeaderExtensionTest::OnReadPacket2); if (mode == SrtpMode::kDtlsSrtp) { - auto cert1 = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert1 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); jsep_transport1_->rtp_dtls_transport()->SetLocalCertificate(cert1); - auto cert2 = - rtc::RTCCertificate::Create(std::unique_ptr( - rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))); + auto cert2 = rtc::RTCCertificate::Create( + rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); jsep_transport2_->rtp_dtls_transport()->SetLocalCertificate(cert2); } } @@ -1256,5 +1252,39 @@ INSTANTIATE_TEST_SUITE_P( std::make_tuple(Scenario::kDtlsBeforeCallerSendOffer, false), std::make_tuple(Scenario::kDtlsBeforeCallerSetAnswer, false), std::make_tuple(Scenario::kDtlsAfterCallerSetAnswer, false))); + +// This test verifies the ICE parameters are properly applied to the transports. +TEST_F(JsepTransport2Test, SetIceParametersWithRenomination) { + jsep_transport_ = + CreateJsepTransport2(/* rtcp_mux_enabled= */ true, SrtpMode::kDtlsSrtp); + + JsepTransportDescription jsep_description; + jsep_description.transport_desc = TransportDescription(kIceUfrag1, kIcePwd1); + jsep_description.transport_desc.AddOption(ICE_OPTION_RENOMINATION); + ASSERT_TRUE( + jsep_transport_ + ->SetLocalJsepTransportDescription(jsep_description, SdpType::kOffer) + .ok()); + auto fake_ice_transport = static_cast( + jsep_transport_->rtp_dtls_transport()->ice_transport()); + EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); + EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag()); + EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd()); + EXPECT_TRUE(fake_ice_transport->ice_parameters().renomination); + + jsep_description.transport_desc = TransportDescription(kIceUfrag2, kIcePwd2); + jsep_description.transport_desc.AddOption(ICE_OPTION_RENOMINATION); + ASSERT_TRUE(jsep_transport_ + ->SetRemoteJsepTransportDescription(jsep_description, + SdpType::kAnswer) + .ok()); + fake_ice_transport = static_cast( + jsep_transport_->rtp_dtls_transport()->ice_transport()); + EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); + EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag()); + EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd()); + EXPECT_TRUE(fake_ice_transport->remote_ice_parameters().renomination); +} + } // namespace } // namespace cricket diff --git a/pc/media_session.cc b/pc/media_session.cc index e764101eef..cbff21635e 100644 --- a/pc/media_session.cc +++ b/pc/media_session.cc @@ -55,6 +55,57 @@ void GetSupportedSdesCryptoSuiteNames( } } +webrtc::RtpExtension RtpExtensionFromCapability( + const webrtc::RtpHeaderExtensionCapability& capability) { + return webrtc::RtpExtension(capability.uri, + capability.preferred_id.value_or(1)); +} + +cricket::RtpHeaderExtensions RtpHeaderExtensionsFromCapabilities( + const std::vector& capabilities) { + cricket::RtpHeaderExtensions exts; + for (const auto& capability : capabilities) { + exts.push_back(RtpExtensionFromCapability(capability)); + } + return exts; +} + +std::vector +UnstoppedRtpHeaderExtensionCapabilities( + std::vector capabilities) { + capabilities.erase( + std::remove_if( + capabilities.begin(), capabilities.end(), + [](const webrtc::RtpHeaderExtensionCapability& capability) { + return capability.direction == RtpTransceiverDirection::kStopped; + }), + capabilities.end()); + return capabilities; +} + +bool IsCapabilityPresent(const webrtc::RtpHeaderExtensionCapability& capability, + const cricket::RtpHeaderExtensions& extensions) { + return std::find_if(extensions.begin(), extensions.end(), + [&capability](const webrtc::RtpExtension& extension) { + return capability.uri == extension.uri; + }) != extensions.end(); +} + +cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions( + const std::vector& capabilities, + const cricket::RtpHeaderExtensions& unencrypted, + const cricket::RtpHeaderExtensions& encrypted) { + cricket::RtpHeaderExtensions extensions; + for (const auto& capability : capabilities) { + if (capability.direction != RtpTransceiverDirection::kStopped || + IsCapabilityPresent(capability, unencrypted) || + IsCapabilityPresent(capability, encrypted)) { + extensions.push_back(RtpExtensionFromCapability(capability)); + } + } + return extensions; +} + } // namespace namespace cricket { @@ -182,14 +233,14 @@ bool FindMatchingCrypto(const CryptoParamsVec& cryptos, void GetSupportedAudioSdesCryptoSuites( const webrtc::CryptoOptions& crypto_options, std::vector* crypto_suites) { - if (crypto_options.srtp.enable_gcm_crypto_suites) { - crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM); - crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM); - } if (crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher) { crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_32); } crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80); + if (crypto_options.srtp.enable_gcm_crypto_suites) { + crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM); + crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM); + } } void GetSupportedAudioSdesCryptoSuiteNames( @@ -202,11 +253,11 @@ void GetSupportedAudioSdesCryptoSuiteNames( void GetSupportedVideoSdesCryptoSuites( const webrtc::CryptoOptions& crypto_options, std::vector* crypto_suites) { + crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80); if (crypto_options.srtp.enable_gcm_crypto_suites) { crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM); crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM); } - crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80); } void GetSupportedVideoSdesCryptoSuiteNames( @@ -219,11 +270,11 @@ void GetSupportedVideoSdesCryptoSuiteNames( void GetSupportedDataSdesCryptoSuites( const webrtc::CryptoOptions& crypto_options, std::vector* crypto_suites) { + crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80); if (crypto_options.srtp.enable_gcm_crypto_suites) { crypto_suites->push_back(rtc::SRTP_AEAD_AES_256_GCM); crypto_suites->push_back(rtc::SRTP_AEAD_AES_128_GCM); } - crypto_suites->push_back(rtc::SRTP_AES128_CM_SHA1_80); } void GetSupportedDataSdesCryptoSuiteNames( @@ -268,19 +319,6 @@ static StreamParamsVec GetCurrentStreamParams( return stream_params; } -// Filters the data codecs for the data channel type. -void FilterDataCodecs(std::vector* codecs, bool sctp) { - // Filter RTP codec for SCTP and vice versa. - const char* codec_name = - sctp ? kGoogleRtpDataCodecName : kGoogleSctpDataCodecName; - codecs->erase(std::remove_if(codecs->begin(), codecs->end(), - [&codec_name](const DataCodec& codec) { - return absl::EqualsIgnoreCase(codec.name, - codec_name); - }), - codecs->end()); -} - static StreamParams CreateStreamParamsForNewSenderWithSsrcs( const SenderOptions& sender, const std::string& rtcp_cname, @@ -452,15 +490,12 @@ static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group, selected_transport_info->description.ice_pwd; ConnectionRole selected_connection_role = selected_transport_info->description.connection_role; - const absl::optional& selected_opaque_parameters = - selected_transport_info->description.opaque_parameters; for (TransportInfo& transport_info : sdesc->transport_infos()) { if (bundle_group.HasContentName(transport_info.content_name) && transport_info.content_name != selected_content_name) { transport_info.description.ice_ufrag = selected_ufrag; transport_info.description.ice_pwd = selected_pwd; transport_info.description.connection_role = selected_connection_role; - transport_info.description.opaque_parameters = selected_opaque_parameters; } } return true; @@ -646,7 +681,21 @@ static bool CreateContentOffer( if (offer->type() == cricket::MEDIA_TYPE_VIDEO) { offer->set_rtcp_reduced_size(true); } - offer->set_rtp_header_extensions(rtp_extensions); + + // Build the vector of header extensions with directions for this + // media_description's options. + RtpHeaderExtensions extensions; + for (auto extension_with_id : rtp_extensions) { + for (const auto& extension : media_description_options.header_extensions) { + if (extension_with_id.uri == extension.uri) { + // TODO(crbug.com/1051821): Configure the extension direction from + // the information in the media_description_options extension + // capability. + extensions.push_back(extension_with_id); + } + } + } + offer->set_rtp_header_extensions(extensions); AddSimulcastToMediaDescription(media_description_options, offer); @@ -661,8 +710,6 @@ static bool CreateContentOffer( } } - offer->set_alt_protocol(media_description_options.alt_protocol); - if (secure_policy == SEC_REQUIRED && offer->cryptos().empty()) { return false; } @@ -961,13 +1008,13 @@ static bool FindByUri(const RtpHeaderExtensions& extensions, static bool FindByUriWithEncryptionPreference( const RtpHeaderExtensions& extensions, - const webrtc::RtpExtension& ext_to_match, + absl::string_view uri_to_match, bool encryption_preference, webrtc::RtpExtension* found_extension) { const webrtc::RtpExtension* unencrypted_extension = nullptr; for (const webrtc::RtpExtension& extension : extensions) { // We assume that all URIs are given in a canonical format. - if (extension.uri == ext_to_match.uri) { + if (extension.uri == uri_to_match) { if (!encryption_preference || extension.encrypt) { if (found_extension) { *found_extension = extension; @@ -1037,7 +1084,7 @@ static void AddEncryptedVersionsOfHdrExts(RtpHeaderExtensions* extensions, // extensions. if (extension.encrypt || !webrtc::RtpExtension::IsEncryptionSupported(extension.uri) || - (FindByUriWithEncryptionPreference(*extensions, extension, true, + (FindByUriWithEncryptionPreference(*extensions, extension.uri, true, &existing) && existing.encrypt)) { continue; @@ -1073,11 +1120,21 @@ static void NegotiateRtpHeaderExtensions( offered_extensions, webrtc::RtpExtension::kTransportSequenceNumberV2Uri); + bool frame_descriptor_in_local = false; + bool dependency_descriptor_in_local = false; + bool abs_capture_time_in_local = false; + for (const webrtc::RtpExtension& ours : local_extensions) { + if (ours.uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00) + frame_descriptor_in_local = true; + else if (ours.uri == webrtc::RtpExtension::kDependencyDescriptorUri) + dependency_descriptor_in_local = true; + else if (ours.uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri) + abs_capture_time_in_local = true; webrtc::RtpExtension theirs; if (FindByUriWithEncryptionPreference( - offered_extensions, ours, enable_encrypted_rtp_header_extensions, - &theirs)) { + offered_extensions, ours.uri, + enable_encrypted_rtp_header_extensions, &theirs)) { if (transport_sequence_number_v2_offer && ours.uri == webrtc::RtpExtension::kTransportSequenceNumberUri) { // Don't respond to @@ -1096,6 +1153,32 @@ static void NegotiateRtpHeaderExtensions( // Respond that we support kTransportSequenceNumberV2Uri. negotiated_extensions->push_back(*transport_sequence_number_v2_offer); } + + // Frame descriptors support. If the extension is not present locally, but is + // in the offer, we add it to the list. + webrtc::RtpExtension theirs; + if (!dependency_descriptor_in_local && + FindByUriWithEncryptionPreference( + offered_extensions, webrtc::RtpExtension::kDependencyDescriptorUri, + enable_encrypted_rtp_header_extensions, &theirs)) { + negotiated_extensions->push_back(theirs); + } + if (!frame_descriptor_in_local && + FindByUriWithEncryptionPreference( + offered_extensions, + webrtc::RtpExtension::kGenericFrameDescriptorUri00, + enable_encrypted_rtp_header_extensions, &theirs)) { + negotiated_extensions->push_back(theirs); + } + + // Absolute capture time support. If the extension is not present locally, but + // is in the offer, we add it to the list. + if (!abs_capture_time_in_local && + FindByUriWithEncryptionPreference( + offered_extensions, webrtc::RtpExtension::kAbsoluteCaptureTimeUri, + enable_encrypted_rtp_header_extensions, &theirs)) { + negotiated_extensions->push_back(theirs); + } } static void StripCNCodecs(AudioCodecs* audio_codecs) { @@ -1142,7 +1225,7 @@ static bool CreateMediaContentAnswer( const MediaSessionOptions& session_options, const SecurePolicy& sdes_policy, const CryptoParamsVec* current_cryptos, - const RtpHeaderExtensions& local_rtp_extenstions, + const RtpHeaderExtensions& local_rtp_extensions, UniqueRandomIdGenerator* ssrc_generator, bool enable_encrypted_rtp_header_extensions, StreamParamsVec* current_streams, @@ -1151,7 +1234,7 @@ static bool CreateMediaContentAnswer( answer->set_extmap_allow_mixed_enum(offer->extmap_allow_mixed_enum()); RtpHeaderExtensions negotiated_rtp_extensions; NegotiateRtpHeaderExtensions( - local_rtp_extenstions, offer->rtp_header_extensions(), + local_rtp_extensions, offer->rtp_header_extensions(), enable_encrypted_rtp_header_extensions, &negotiated_rtp_extensions); answer->set_rtp_header_extensions(negotiated_rtp_extensions); @@ -1182,9 +1265,6 @@ static bool CreateMediaContentAnswer( answer->set_direction(NegotiateRtpTransceiverDirection( offer->direction(), media_description_options.direction)); - if (offer->alt_protocol() == media_description_options.alt_protocol) { - answer->set_alt_protocol(media_description_options.alt_protocol); - } return true; } @@ -1329,10 +1409,8 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( : MediaSessionDescriptionFactory(transport_desc_factory, ssrc_generator) { channel_manager->GetSupportedAudioSendCodecs(&audio_send_codecs_); channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_); - channel_manager->GetSupportedAudioRtpHeaderExtensions(&audio_rtp_extensions_); channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_); channel_manager->GetSupportedVideoReceiveCodecs(&video_recv_codecs_); - channel_manager->GetSupportedVideoRtpHeaderExtensions(&video_rtp_extensions_); channel_manager->GetSupportedDataCodecs(&rtp_data_codecs_); ComputeAudioCodecsIntersectionAndUnion(); ComputeVideoCodecsIntersectionAndUnion(); @@ -1395,22 +1473,11 @@ static void RemoveUnifiedPlanExtensions(RtpHeaderExtensions* extensions) { } RtpHeaderExtensions -MediaSessionDescriptionFactory::audio_rtp_header_extensions() const { - RtpHeaderExtensions extensions = audio_rtp_extensions_; - if (!is_unified_plan_) { - RemoveUnifiedPlanExtensions(&extensions); - } - - return extensions; -} - -RtpHeaderExtensions -MediaSessionDescriptionFactory::video_rtp_header_extensions() const { - RtpHeaderExtensions extensions = video_rtp_extensions_; +MediaSessionDescriptionFactory::filtered_rtp_header_extensions( + RtpHeaderExtensions extensions) const { if (!is_unified_plan_) { RemoveUnifiedPlanExtensions(&extensions); } - return extensions; } @@ -1440,19 +1507,14 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( RtpDataCodecs offer_rtp_data_codecs; GetCodecsForOffer(current_active_contents, &offer_audio_codecs, &offer_video_codecs, &offer_rtp_data_codecs); - if (!session_options.vad_enabled) { // If application doesn't want CN codecs in offer. StripCNCodecs(&offer_audio_codecs); } - FilterDataCodecs(&offer_rtp_data_codecs, - session_options.data_channel_type == DCT_SCTP); - - RtpHeaderExtensions audio_rtp_extensions; - RtpHeaderExtensions video_rtp_extensions; - GetRtpHdrExtsToOffer(current_active_contents, - session_options.offer_extmap_allow_mixed, - &audio_rtp_extensions, &video_rtp_extensions); + AudioVideoRtpHeaderExtensions extensions_with_ids = + GetOfferedRtpHeaderExtensionsWithIds( + current_active_contents, session_options.offer_extmap_allow_mixed, + session_options.media_description_options); auto offer = std::make_unique(); @@ -1472,18 +1534,20 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( } switch (media_description_options.type) { case MEDIA_TYPE_AUDIO: - if (!AddAudioContentForOffer( - media_description_options, session_options, current_content, - current_description, audio_rtp_extensions, offer_audio_codecs, - ¤t_streams, offer.get(), &ice_credentials)) { + if (!AddAudioContentForOffer(media_description_options, session_options, + current_content, current_description, + extensions_with_ids.audio, + offer_audio_codecs, ¤t_streams, + offer.get(), &ice_credentials)) { return nullptr; } break; case MEDIA_TYPE_VIDEO: - if (!AddVideoContentForOffer( - media_description_options, session_options, current_content, - current_description, video_rtp_extensions, offer_video_codecs, - ¤t_streams, offer.get(), &ice_credentials)) { + if (!AddVideoContentForOffer(media_description_options, session_options, + current_content, current_description, + extensions_with_ids.video, + offer_video_codecs, ¤t_streams, + offer.get(), &ice_credentials)) { return nullptr; } break; @@ -1495,6 +1559,13 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( return nullptr; } break; + case MEDIA_TYPE_UNSUPPORTED: + if (!AddUnsupportedContentForOffer( + media_description_options, session_options, current_content, + current_description, offer.get(), &ice_credentials)) { + return nullptr; + } + break; default: RTC_NOTREACHED(); } @@ -1590,8 +1661,6 @@ MediaSessionDescriptionFactory::CreateAnswer( // If application doesn't want CN codecs in answer. StripCNCodecs(&answer_audio_codecs); } - FilterDataCodecs(&answer_rtp_data_codecs, - session_options.data_channel_type == DCT_SCTP); auto answer = std::make_unique(); @@ -1620,13 +1689,16 @@ MediaSessionDescriptionFactory::CreateAnswer( msection_index < current_description->contents().size()) { current_content = ¤t_description->contents()[msection_index]; } + RtpHeaderExtensions header_extensions = RtpHeaderExtensionsFromCapabilities( + UnstoppedRtpHeaderExtensionCapabilities( + media_description_options.header_extensions)); switch (media_description_options.type) { case MEDIA_TYPE_AUDIO: if (!AddAudioContentForAnswer( media_description_options, session_options, offer_content, offer, current_content, current_description, - bundle_transport.get(), answer_audio_codecs, ¤t_streams, - answer.get(), &ice_credentials)) { + bundle_transport.get(), answer_audio_codecs, header_extensions, + ¤t_streams, answer.get(), &ice_credentials)) { return nullptr; } break; @@ -1634,8 +1706,8 @@ MediaSessionDescriptionFactory::CreateAnswer( if (!AddVideoContentForAnswer( media_description_options, session_options, offer_content, offer, current_content, current_description, - bundle_transport.get(), answer_video_codecs, ¤t_streams, - answer.get(), &ice_credentials)) { + bundle_transport.get(), answer_video_codecs, header_extensions, + ¤t_streams, answer.get(), &ice_credentials)) { return nullptr; } break; @@ -1648,6 +1720,14 @@ MediaSessionDescriptionFactory::CreateAnswer( return nullptr; } break; + case MEDIA_TYPE_UNSUPPORTED: + if (!AddUnsupportedContentForAnswer( + media_description_options, session_options, offer_content, + offer, current_content, current_description, + bundle_transport.get(), answer.get(), &ice_credentials)) { + return nullptr; + } + break; default: RTC_NOTREACHED(); } @@ -1730,6 +1810,7 @@ const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForOffer( switch (direction) { // If stream is inactive - generate list as if sendrecv. case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: case RtpTransceiverDirection::kInactive: return audio_sendrecv_codecs_; case RtpTransceiverDirection::kSendOnly: @@ -1737,8 +1818,7 @@ const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForOffer( case RtpTransceiverDirection::kRecvOnly: return audio_recv_codecs_; } - RTC_NOTREACHED(); - return audio_sendrecv_codecs_; + RTC_CHECK_NOTREACHED(); } const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer( @@ -1748,6 +1828,7 @@ const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer( // For inactive and sendrecv answers, generate lists as if we were to accept // the offer's direction. See RFC 3264 Section 6.1. case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: case RtpTransceiverDirection::kInactive: return GetAudioCodecsForOffer( webrtc::RtpTransceiverDirectionReversed(offer)); @@ -1756,8 +1837,7 @@ const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer( case RtpTransceiverDirection::kRecvOnly: return audio_recv_codecs_; } - RTC_NOTREACHED(); - return audio_sendrecv_codecs_; + RTC_CHECK_NOTREACHED(); } const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer( @@ -1765,6 +1845,7 @@ const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer( switch (direction) { // If stream is inactive - generate list as if sendrecv. case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: case RtpTransceiverDirection::kInactive: return video_sendrecv_codecs_; case RtpTransceiverDirection::kSendOnly: @@ -1772,8 +1853,7 @@ const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer( case RtpTransceiverDirection::kRecvOnly: return video_recv_codecs_; } - RTC_NOTREACHED(); - return video_sendrecv_codecs_; + RTC_CHECK_NOTREACHED(); } const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer( @@ -1783,6 +1863,7 @@ const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer( // For inactive and sendrecv answers, generate lists as if we were to accept // the offer's direction. See RFC 3264 Section 6.1. case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: case RtpTransceiverDirection::kInactive: return GetVideoCodecsForOffer( webrtc::RtpTransceiverDirectionReversed(offer)); @@ -1791,8 +1872,7 @@ const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer( case RtpTransceiverDirection::kRecvOnly: return video_recv_codecs_; } - RTC_NOTREACHED(); - return video_sendrecv_codecs_; + RTC_CHECK_NOTREACHED(); } void MergeCodecsFromDescription( @@ -1924,11 +2004,12 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( &used_pltypes); } -void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer( +MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions +MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds( const std::vector& current_active_contents, bool extmap_allow_mixed, - RtpHeaderExtensions* offer_audio_extensions, - RtpHeaderExtensions* offer_video_extensions) const { + const std::vector& media_description_options) + const { // All header extensions allocated from the same range to avoid potential // issues when using BUNDLE. @@ -1942,6 +2023,7 @@ void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer( RtpHeaderExtensions all_regular_extensions; RtpHeaderExtensions all_encrypted_extensions; + AudioVideoRtpHeaderExtensions offered_extensions; // First - get all extensions from the current description if the media type // is used. // Add them to |used_ids| so the local ids are not reused if a new media @@ -1950,36 +2032,45 @@ void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer( if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) { const AudioContentDescription* audio = content->media_description()->as_audio(); - MergeRtpHdrExts(audio->rtp_header_extensions(), offer_audio_extensions, + MergeRtpHdrExts(audio->rtp_header_extensions(), &offered_extensions.audio, &all_regular_extensions, &all_encrypted_extensions, &used_ids); } else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) { const VideoContentDescription* video = content->media_description()->as_video(); - MergeRtpHdrExts(video->rtp_header_extensions(), offer_video_extensions, + MergeRtpHdrExts(video->rtp_header_extensions(), &offered_extensions.video, &all_regular_extensions, &all_encrypted_extensions, &used_ids); } } - // Add our default RTP header extensions that are not in the current - // description. - MergeRtpHdrExts(audio_rtp_header_extensions(), offer_audio_extensions, - &all_regular_extensions, &all_encrypted_extensions, - &used_ids); - MergeRtpHdrExts(video_rtp_header_extensions(), offer_video_extensions, - &all_regular_extensions, &all_encrypted_extensions, - &used_ids); + // Add all encountered header extensions in the media description options that + // are not in the current description. + for (const auto& entry : media_description_options) { + RtpHeaderExtensions filtered_extensions = + filtered_rtp_header_extensions(UnstoppedOrPresentRtpHeaderExtensions( + entry.header_extensions, all_regular_extensions, + all_encrypted_extensions)); + if (entry.type == MEDIA_TYPE_AUDIO) + MergeRtpHdrExts(filtered_extensions, &offered_extensions.audio, + &all_regular_extensions, &all_encrypted_extensions, + &used_ids); + else if (entry.type == MEDIA_TYPE_VIDEO) + MergeRtpHdrExts(filtered_extensions, &offered_extensions.video, + &all_regular_extensions, &all_encrypted_extensions, + &used_ids); + } // TODO(jbauch): Support adding encrypted header extensions to existing // sessions. if (enable_encrypted_rtp_header_extensions_ && current_active_contents.empty()) { - AddEncryptedVersionsOfHdrExts(offer_audio_extensions, + AddEncryptedVersionsOfHdrExts(&offered_extensions.audio, &all_encrypted_extensions, &used_ids); - AddEncryptedVersionsOfHdrExts(offer_video_extensions, + AddEncryptedVersionsOfHdrExts(&offered_extensions.video, &all_encrypted_extensions, &used_ids); } + return offered_extensions; } bool MediaSessionDescriptionFactory::AddTransportOffer( @@ -2255,7 +2346,7 @@ bool MediaSessionDescriptionFactory::AddSctpDataContentForOffer( } desc->AddContent(media_description_options.mid, MediaProtocolType::kSctp, - std::move(data)); + media_description_options.stopped, std::move(data)); if (!AddTransportOffer(media_description_options.mid, media_description_options.transport_options, current_description, desc, ice_credentials)) { @@ -2290,7 +2381,7 @@ bool MediaSessionDescriptionFactory::AddRtpDataContentForOffer( return false; } - data->set_bandwidth(kDataMaxBandwidth); + data->set_bandwidth(kRtpDataMaxBandwidth); SetMediaProtocol(secure_transport, data.get()); desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, media_description_options.stopped, std::move(data)); @@ -2311,9 +2402,7 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const { - bool is_sctp = - (session_options.data_channel_type == DCT_SCTP || - session_options.data_channel_type == DCT_DATA_CHANNEL_TRANSPORT_SCTP); + bool is_sctp = (session_options.data_channel_type == DCT_SCTP); // If the DataChannel type is not specified, use the DataChannel type in // the current description. if (session_options.data_channel_type == DCT_NONE && current_content) { @@ -2333,6 +2422,31 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( } } +bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* current_content, + const SessionDescription* current_description, + SessionDescription* desc, + IceCredentialsIterator* ice_credentials) const { + RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_UNSUPPORTED)); + + const UnsupportedContentDescription* current_unsupported_description = + current_content->media_description()->as_unsupported(); + auto unsupported = std::make_unique( + current_unsupported_description->media_type()); + unsupported->set_protocol(current_content->media_description()->protocol()); + desc->AddContent(media_description_options.mid, MediaProtocolType::kOther, + /*rejected=*/true, std::move(unsupported)); + + if (!AddTransportOffer(media_description_options.mid, + media_description_options.transport_options, + current_description, desc, ice_credentials)) { + return false; + } + return true; +} + // |audio_codecs| = set of all possible codecs that can be used, with correct // payload type mappings // @@ -2354,6 +2468,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( const SessionDescription* current_description, const TransportInfo* bundle_transport, const AudioCodecs& audio_codecs, + const RtpHeaderExtensions& default_audio_rtp_header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { @@ -2426,9 +2541,9 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( if (!CreateMediaContentAnswer( offer_audio_description, media_description_options, session_options, sdes_policy, GetCryptos(current_content), - audio_rtp_header_extensions(), ssrc_generator_, - enable_encrypted_rtp_header_extensions_, current_streams, - bundle_enabled, audio_answer.get())) { + filtered_rtp_header_extensions(default_audio_rtp_header_extensions), + ssrc_generator_, enable_encrypted_rtp_header_extensions_, + current_streams, bundle_enabled, audio_answer.get())) { return false; // Fails the session setup. } @@ -2464,6 +2579,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( const SessionDescription* current_description, const TransportInfo* bundle_transport, const VideoCodecs& video_codecs, + const RtpHeaderExtensions& default_video_rtp_header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { @@ -2544,9 +2660,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( if (!CreateMediaContentAnswer( offer_video_description, media_description_options, session_options, sdes_policy, GetCryptos(current_content), - video_rtp_header_extensions(), ssrc_generator_, - enable_encrypted_rtp_header_extensions_, current_streams, - bundle_enabled, video_answer.get())) { + filtered_rtp_header_extensions(default_video_rtp_header_extensions), + ssrc_generator_, enable_encrypted_rtp_header_extensions_, + current_streams, bundle_enabled, video_answer.get())) { return false; // Failed the sessin setup. } bool secure = bundle_transport ? bundle_transport->description.secure() @@ -2661,8 +2777,8 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( return false; } - if (!rejected) { - data_answer->set_bandwidth(kDataMaxBandwidth); + if (!rejected && session_options.data_channel_type == DCT_RTP) { + data_answer->set_bandwidth(kRtpDataMaxBandwidth); } else { // RFC 3264 // The answer MUST contain the same number of m-lines as the offer. @@ -2673,6 +2789,42 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( return true; } +bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* offer_content, + const SessionDescription* offer_description, + const ContentInfo* current_content, + const SessionDescription* current_description, + const TransportInfo* bundle_transport, + SessionDescription* answer, + IceCredentialsIterator* ice_credentials) const { + std::unique_ptr unsupported_transport = + CreateTransportAnswer(media_description_options.mid, offer_description, + media_description_options.transport_options, + current_description, bundle_transport != nullptr, + ice_credentials); + if (!unsupported_transport) { + return false; + } + RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_UNSUPPORTED)); + + const UnsupportedContentDescription* offer_unsupported_description = + offer_content->media_description()->as_unsupported(); + std::unique_ptr unsupported_answer = + std::make_unique( + offer_unsupported_description->media_type()); + unsupported_answer->set_protocol(offer_unsupported_description->protocol()); + + if (!AddTransportAnswer(media_description_options.mid, + *(unsupported_transport.get()), answer)) { + return false; + } + answer->AddContent(media_description_options.mid, offer_content->type, + /*rejected=*/true, std::move(unsupported_answer)); + return true; +} + void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { audio_sendrecv_codecs_.clear(); all_audio_codecs_.clear(); @@ -2750,6 +2902,10 @@ bool IsDataContent(const ContentInfo* content) { return IsMediaContentOfType(content, MEDIA_TYPE_DATA); } +bool IsUnsupportedContent(const ContentInfo* content) { + return IsMediaContentOfType(content, MEDIA_TYPE_UNSUPPORTED); +} + const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, MediaType media_type) { for (const ContentInfo& content : contents) { diff --git a/pc/media_session.h b/pc/media_session.h index ef83834318..58a31a2ab2 100644 --- a/pc/media_session.h +++ b/pc/media_session.h @@ -78,7 +78,7 @@ struct MediaDescriptionOptions { // stream information goes in the local descriptions. std::vector sender_options; std::vector codec_preferences; - absl::optional alt_protocol; + std::vector header_extensions; private: // Doesn't DCHECK on |type|. @@ -147,19 +147,13 @@ class MediaSessionDescriptionFactory { const AudioCodecs& audio_recv_codecs() const; void set_audio_codecs(const AudioCodecs& send_codecs, const AudioCodecs& recv_codecs); - void set_audio_rtp_header_extensions(const RtpHeaderExtensions& extensions) { - audio_rtp_extensions_ = extensions; - } - RtpHeaderExtensions audio_rtp_header_extensions() const; const VideoCodecs& video_sendrecv_codecs() const; const VideoCodecs& video_send_codecs() const; const VideoCodecs& video_recv_codecs() const; void set_video_codecs(const VideoCodecs& send_codecs, const VideoCodecs& recv_codecs); - void set_video_rtp_header_extensions(const RtpHeaderExtensions& extensions) { - video_rtp_extensions_ = extensions; - } - RtpHeaderExtensions video_rtp_header_extensions() const; + RtpHeaderExtensions filtered_rtp_header_extensions( + RtpHeaderExtensions extensions) const; const RtpDataCodecs& rtp_data_codecs() const { return rtp_data_codecs_; } void set_rtp_data_codecs(const RtpDataCodecs& codecs) { rtp_data_codecs_ = codecs; @@ -184,6 +178,11 @@ class MediaSessionDescriptionFactory { const SessionDescription* current_description) const; private: + struct AudioVideoRtpHeaderExtensions { + RtpHeaderExtensions audio; + RtpHeaderExtensions video; + }; + const AudioCodecs& GetAudioCodecsForOffer( const webrtc::RtpTransceiverDirection& direction) const; const AudioCodecs& GetAudioCodecsForAnswer( @@ -205,11 +204,11 @@ class MediaSessionDescriptionFactory { AudioCodecs* audio_codecs, VideoCodecs* video_codecs, RtpDataCodecs* rtp_data_codecs) const; - void GetRtpHdrExtsToOffer( + AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds( const std::vector& current_active_contents, bool extmap_allow_mixed, - RtpHeaderExtensions* audio_extensions, - RtpHeaderExtensions* video_extensions) const; + const std::vector& media_description_options) + const; bool AddTransportOffer(const std::string& content_name, const TransportOptions& transport_options, const SessionDescription* current_desc, @@ -284,6 +283,14 @@ class MediaSessionDescriptionFactory { SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; + bool AddUnsupportedContentForOffer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* current_content, + const SessionDescription* current_description, + SessionDescription* desc, + IceCredentialsIterator* ice_credentials) const; + bool AddAudioContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, @@ -293,6 +300,7 @@ class MediaSessionDescriptionFactory { const SessionDescription* current_description, const TransportInfo* bundle_transport, const AudioCodecs& audio_codecs, + const RtpHeaderExtensions& default_audio_rtp_header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; @@ -306,6 +314,7 @@ class MediaSessionDescriptionFactory { const SessionDescription* current_description, const TransportInfo* bundle_transport, const VideoCodecs& video_codecs, + const RtpHeaderExtensions& default_video_rtp_header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; @@ -323,6 +332,17 @@ class MediaSessionDescriptionFactory { SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; + bool AddUnsupportedContentForAnswer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* offer_content, + const SessionDescription* offer_description, + const ContentInfo* current_content, + const SessionDescription* current_description, + const TransportInfo* bundle_transport, + SessionDescription* answer, + IceCredentialsIterator* ice_credentials) const; + void ComputeAudioCodecsIntersectionAndUnion(); void ComputeVideoCodecsIntersectionAndUnion(); @@ -334,14 +354,12 @@ class MediaSessionDescriptionFactory { AudioCodecs audio_sendrecv_codecs_; // Union of send and recv. AudioCodecs all_audio_codecs_; - RtpHeaderExtensions audio_rtp_extensions_; VideoCodecs video_send_codecs_; VideoCodecs video_recv_codecs_; // Intersection of send and recv. VideoCodecs video_sendrecv_codecs_; // Union of send and recv. VideoCodecs all_video_codecs_; - RtpHeaderExtensions video_rtp_extensions_; RtpDataCodecs rtp_data_codecs_; // This object is not owned by the channel so it must outlive it. rtc::UniqueRandomIdGenerator* const ssrc_generator_; @@ -357,6 +375,7 @@ bool IsMediaContent(const ContentInfo* content); bool IsAudioContent(const ContentInfo* content); bool IsVideoContent(const ContentInfo* content); bool IsDataContent(const ContentInfo* content); +bool IsUnsupportedContent(const ContentInfo* content); const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, MediaType media_type); const ContentInfo* GetFirstAudioContent(const ContentInfos& contents); diff --git a/pc/media_session_unittest.cc b/pc/media_session_unittest.cc index a901dedb70..5a9002bc4e 100644 --- a/pc/media_session_unittest.cc +++ b/pc/media_session_unittest.cc @@ -18,6 +18,7 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/match.h" #include "media/base/codec.h" #include "media/base/test_utils.h" #include "media/sctp/sctp_transport_internal.h" @@ -88,6 +89,7 @@ using rtc::CS_AES_CM_128_HMAC_SHA1_80; using rtc::UniqueRandomIdGenerator; using ::testing::Contains; using ::testing::Each; +using ::testing::ElementsAre; using ::testing::ElementsAreArray; using ::testing::Eq; using ::testing::Field; @@ -238,6 +240,12 @@ static const RtpExtension kRtpExtensionTransportSequenceNumber02[] = { 2), }; +static const RtpExtension kRtpExtensionGenericFrameDescriptorUri00[] = { + RtpExtension("http://www.webrtc.org/experiments/rtp-hdrext/" + "generic-frame-descriptor-00", + 3), +}; + static const uint32_t kSimulcastParamsSsrc[] = {10, 11, 20, 21, 30, 31}; static const uint32_t kSimSsrc[] = {10, 20, 30}; static const uint32_t kFec1Ssrc[] = {10, 11}; @@ -405,6 +413,17 @@ static MediaSessionOptions CreatePlanBMediaSessionOptions() { return session_options; } +// prefers GCM SDES crypto suites by removing non-GCM defaults. +void PreferGcmCryptoParameters(CryptoParamsVec* cryptos) { + cryptos->erase( + std::remove_if(cryptos->begin(), cryptos->end(), + [](const cricket::CryptoParams& crypto) { + return crypto.cipher_suite != CS_AEAD_AES_256_GCM && + crypto.cipher_suite != CS_AEAD_AES_128_GCM; + }), + cryptos->end()); +} + // TODO(zhihuang): Most of these tests were written while MediaSessionOptions // was designed for Plan B SDP, where only one audio "m=" section and one video // "m=" section could be generated, and ordering couldn't be controlled. Many of @@ -522,9 +541,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ( media_desc_options_it->transport_options.enable_ice_renomination, GetIceRenomination(ti_audio)); - EXPECT_EQ(media_desc_options_it->transport_options.opaque_parameters, - ti_audio->description.opaque_parameters); - } else { EXPECT_TRUE(ti_audio == NULL); } @@ -537,8 +553,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ(ti_audio->description.ice_ufrag, ti_video->description.ice_ufrag); EXPECT_EQ(ti_audio->description.ice_pwd, ti_video->description.ice_pwd); - EXPECT_EQ(ti_audio->description.opaque_parameters, - ti_video->description.opaque_parameters); } else { if (has_current_desc) { EXPECT_EQ(current_video_ufrag, ti_video->description.ice_ufrag); @@ -549,8 +563,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ(static_cast(cricket::ICE_PWD_LENGTH), ti_video->description.ice_pwd.size()); } - EXPECT_EQ(media_desc_options_it->transport_options.opaque_parameters, - ti_video->description.opaque_parameters); } EXPECT_EQ( media_desc_options_it->transport_options.enable_ice_renomination, @@ -690,6 +702,13 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { std::unique_ptr offer = f1_.CreateOffer(offer_opts, NULL); ASSERT_TRUE(offer.get() != NULL); + if (gcm_offer && gcm_answer) { + for (cricket::ContentInfo& content : offer->contents()) { + auto cryptos = content.media_description()->cryptos(); + PreferGcmCryptoParameters(&cryptos); + content.media_description()->set_cryptos(cryptos); + } + } std::unique_ptr answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL); const ContentInfo* ac = answer->GetContentByName("audio"); @@ -728,13 +747,10 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { const cricket::RtpHeaderExtensions& expectedAnswer) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f1_.set_audio_rtp_header_extensions(offered); - f1_.set_video_rtp_header_extensions(offered); - f2_.set_audio_rtp_header_extensions(local); - f2_.set_video_rtp_header_extensions(local); - + SetAudioVideoRtpHeaderExtensions(offered, offered, &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + SetAudioVideoRtpHeaderExtensions(local, local, &opts); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); @@ -746,6 +762,38 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); } + std::vector + HeaderExtensionCapabilitiesFromRtpExtensions( + cricket::RtpHeaderExtensions extensions) { + std::vector capabilities; + for (const auto& extension : extensions) { + webrtc::RtpHeaderExtensionCapability capability( + extension.uri, extension.id, + webrtc::RtpTransceiverDirection::kSendRecv); + capabilities.push_back(capability); + } + return capabilities; + } + + void SetAudioVideoRtpHeaderExtensions(cricket::RtpHeaderExtensions audio_exts, + cricket::RtpHeaderExtensions video_exts, + MediaSessionOptions* opts) { + auto audio_caps = HeaderExtensionCapabilitiesFromRtpExtensions(audio_exts); + auto video_caps = HeaderExtensionCapabilitiesFromRtpExtensions(video_exts); + for (auto& entry : opts->media_description_options) { + switch (entry.type) { + case MEDIA_TYPE_AUDIO: + entry.header_extensions = audio_caps; + break; + case MEDIA_TYPE_VIDEO: + entry.header_extensions = video_caps; + break; + default: + break; + } + } + } + protected: UniqueRandomIdGenerator ssrc_generator1; UniqueRandomIdGenerator ssrc_generator2; @@ -910,7 +958,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateRtpDataOffer) { EXPECT_EQ(MEDIA_TYPE_DATA, dcd->type()); EXPECT_EQ(f1_.rtp_data_codecs(), dcd->codecs()); EXPECT_EQ(0U, dcd->first_ssrc()); // no sender is attached. - EXPECT_EQ(cricket::kDataMaxBandwidth, + EXPECT_EQ(cricket::kRtpDataMaxBandwidth, dcd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite); @@ -1229,6 +1277,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerGcm) { opts.crypto_options.srtp.enable_gcm_crypto_suites = true; std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + for (cricket::ContentInfo& content : offer->contents()) { + auto cryptos = content.media_description()->cryptos(); + PreferGcmCryptoParameters(&cryptos); + content.media_description()->set_cryptos(cryptos); + } std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); const ContentInfo* ac = answer->GetContentByName("audio"); @@ -1335,6 +1388,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerGcm) { f2_.set_secure(SEC_ENABLED); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + for (cricket::ContentInfo& content : offer->contents()) { + auto cryptos = content.media_description()->cryptos(); + PreferGcmCryptoParameters(&cryptos); + content.media_description()->set_cryptos(cryptos); + } std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); const ContentInfo* ac = answer->GetContentByName("audio"); @@ -1623,13 +1681,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, AudioOfferAnswerWithCryptoDisabled) { TEST_F(MediaSessionDescriptionFactoryTest, TestOfferAnswerWithRtpExtensions) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1)); - f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1)); - f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2)); - f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2)); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), + MAKE_VECTOR(kVideoRtpExtension1), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), + MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); @@ -1673,6 +1731,347 @@ TEST_F(MediaSessionDescriptionFactoryTest, MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02)); // Expected answer. } +TEST_F(MediaSessionDescriptionFactoryTest, + TestNegotiateFrameDescriptorWhenUnexposedLocally) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), + MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), &opts); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + TestNegotiateFrameDescriptorWhenExposedLocally) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), + MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + NegotiateDependencyDescriptorWhenUnexposedLocally) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7); + SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5); + SetAudioVideoRtpHeaderExtensions({}, {local_tsn}, &opts); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + ElementsAre(offer_dd)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + NegotiateDependencyDescriptorWhenExposedLocally) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7); + RtpExtension local_dd(RtpExtension::kDependencyDescriptorUri, 5); + SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + SetAudioVideoRtpHeaderExtensions({}, {local_dd}, &opts); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + ElementsAre(offer_dd)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + NegotiateAbsoluteCaptureTimeWhenUnexposedLocally) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + const cricket::RtpHeaderExtensions offered_extensions = { + RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)}; + const cricket::RtpHeaderExtensions local_extensions = { + RtpExtension(RtpExtension::kTransportSequenceNumberUri, 5)}; + SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions, + &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(offered_extensions)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(offered_extensions)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + NegotiateAbsoluteCaptureTimeWhenExposedLocally) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + const cricket::RtpHeaderExtensions offered_extensions = { + RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)}; + const cricket::RtpHeaderExtensions local_extensions = { + RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)}; + SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions, + &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(offered_extensions)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + ElementsAreArray(offered_extensions)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + DoNotNegotiateAbsoluteCaptureTimeWhenNotOffered) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + const cricket::RtpHeaderExtensions offered_extensions = { + RtpExtension(RtpExtension::kTransportSequenceNumberUri, 7)}; + const cricket::RtpHeaderExtensions local_extensions = { + RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)}; + SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions, + &opts); + std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts); + std::unique_ptr answer = + f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + IsEmpty()); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + IsEmpty()); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + OffersUnstoppedExtensionsWithAudioVideoExtensionStopped) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kStopped), + webrtc::RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kSendOnly)}; + AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kStopped), + webrtc::RtpHeaderExtensionCapability("uri3", 7, + RtpTransceiverDirection::kSendOnly)}; + auto offer = f1_.CreateOffer(opts, nullptr); + EXPECT_THAT( + offer->contents(), + ElementsAre( + Property(&ContentInfo::media_description, + Pointee(Property( + &MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri2"))))), + Property(&ContentInfo::media_description, + Pointee(Property( + &MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri3"))))))); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + OffersUnstoppedExtensionsWithAudioExtensionStopped) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendOnly), + webrtc::RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kStopped)}; + AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri42", 42, + RtpTransceiverDirection::kSendRecv), + webrtc::RtpHeaderExtensionCapability("uri3", 7, + RtpTransceiverDirection::kSendOnly)}; + auto offer = f1_.CreateOffer(opts, nullptr); + EXPECT_THAT( + offer->contents(), + ElementsAre( + Property(&ContentInfo::media_description, + Pointee(Property( + &MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri1"))))), + Property( + &ContentInfo::media_description, + Pointee(Property( + &MediaContentDescription::rtp_header_extensions, + UnorderedElementsAre(Field(&RtpExtension::uri, "uri3"), + Field(&RtpExtension::uri, "uri42"))))))); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + OffersUnstoppedExtensionsWithVideoExtensionStopped) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 5, + RtpTransceiverDirection::kSendOnly), + webrtc::RtpHeaderExtensionCapability("uri2", 7, + RtpTransceiverDirection::kSendRecv)}; + AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri42", 42, + RtpTransceiverDirection::kSendRecv), + webrtc::RtpHeaderExtensionCapability("uri3", 7, + RtpTransceiverDirection::kStopped)}; + auto offer = f1_.CreateOffer(opts, nullptr); + EXPECT_THAT( + offer->contents(), + ElementsAre( + Property( + &ContentInfo::media_description, + Pointee(Property( + &MediaContentDescription::rtp_header_extensions, + UnorderedElementsAre(Field(&RtpExtension::uri, "uri1"), + Field(&RtpExtension::uri, "uri2"))))), + Property(&ContentInfo::media_description, + Pointee(Property( + &MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri42"))))))); +} + +TEST_F(MediaSessionDescriptionFactoryTest, AnswersUnstoppedExtensions) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 4, + RtpTransceiverDirection::kStopped), + webrtc::RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kSendOnly), + webrtc::RtpHeaderExtensionCapability("uri3", 2, + RtpTransceiverDirection::kRecvOnly), + webrtc::RtpHeaderExtensionCapability("uri4", 1, + RtpTransceiverDirection::kSendRecv)}; + auto offer = f1_.CreateOffer(opts, nullptr); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 4, + RtpTransceiverDirection::kSendOnly), + webrtc::RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kRecvOnly), + webrtc::RtpHeaderExtensionCapability("uri3", 2, + RtpTransceiverDirection::kStopped), + webrtc::RtpHeaderExtensionCapability("uri4", 1, + RtpTransceiverDirection::kSendRecv)}; + auto answer = f2_.CreateAnswer(offer.get(), opts, nullptr); + EXPECT_THAT( + answer->contents(), + ElementsAre(Property( + &ContentInfo::media_description, + Pointee(Property(&MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri2"), + Field(&RtpExtension::uri, "uri4"))))))); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + AppendsUnstoppedExtensionsToCurrentDescription) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendRecv)}; + auto offer = f1_.CreateOffer(opts, nullptr); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 2, + RtpTransceiverDirection::kSendRecv), + webrtc::RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kRecvOnly), + webrtc::RtpHeaderExtensionCapability("uri3", 5, + RtpTransceiverDirection::kStopped), + webrtc::RtpHeaderExtensionCapability("uri4", 6, + RtpTransceiverDirection::kSendRecv)}; + auto offer2 = f1_.CreateOffer(opts, offer.get()); + EXPECT_THAT( + offer2->contents(), + ElementsAre(Property( + &ContentInfo::media_description, + Pointee(Property(&MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri1"), + Field(&RtpExtension::uri, "uri2"), + Field(&RtpExtension::uri, "uri4"))))))); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + AppendsStoppedExtensionIfKnownAndPresentInTheOffer) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendRecv), + webrtc::RtpHeaderExtensionCapability("uri2", 1, + RtpTransceiverDirection::kSendRecv)}; + auto offer = f1_.CreateOffer(opts, nullptr); + + // Now add "uri2" as stopped to the options verify that the offer contains + // uri2 since it's already present since before. + opts.media_description_options.back().header_extensions = { + webrtc::RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendRecv), + webrtc::RtpHeaderExtensionCapability("uri2", 2, + RtpTransceiverDirection::kStopped)}; + auto offer2 = f1_.CreateOffer(opts, offer.get()); + EXPECT_THAT( + offer2->contents(), + ElementsAre(Property( + &ContentInfo::media_description, + Pointee(Property(&MediaContentDescription::rtp_header_extensions, + ElementsAre(Field(&RtpExtension::uri, "uri1"), + Field(&RtpExtension::uri, "uri2"))))))); +} + TEST_F(MediaSessionDescriptionFactoryTest, TestOfferAnswerWithEncryptedRtpExtensionsBoth) { MediaSessionOptions opts; @@ -1681,13 +2080,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, f1_.set_enable_encrypted_rtp_header_extensions(true); f2_.set_enable_encrypted_rtp_header_extensions(true); - f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1)); - f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1)); - f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2)); - f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2)); - + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), + MAKE_VECTOR(kVideoRtpExtension1), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), + MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); @@ -1712,13 +2110,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, f1_.set_enable_encrypted_rtp_header_extensions(true); - f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1)); - f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1)); - f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2)); - f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2)); - + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), + MAKE_VECTOR(kVideoRtpExtension1), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), + MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); @@ -1743,13 +2140,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, f2_.set_enable_encrypted_rtp_header_extensions(true); - f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1)); - f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1)); - f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2)); - f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2)); - + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), + MAKE_VECTOR(kVideoRtpExtension1), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); ASSERT_TRUE(offer.get() != NULL); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), + MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); @@ -2087,7 +2483,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) { ASSERT_EQ(1U, data_streams[1].ssrcs.size()); EXPECT_NE(0U, data_streams[1].ssrcs[0]); - EXPECT_EQ(cricket::kDataMaxBandwidth, + EXPECT_EQ(cricket::kRtpDataMaxBandwidth, dcd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on ASSERT_CRYPTO(dcd, 1U, kDefaultSrtpCryptoSuite); @@ -2447,7 +2843,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) { ASSERT_EQ(1U, data_streams[1].ssrcs.size()); EXPECT_NE(0U, data_streams[1].ssrcs[0]); - EXPECT_EQ(cricket::kDataMaxBandwidth, + EXPECT_EQ(cricket::kRtpDataMaxBandwidth, dcd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on @@ -2901,7 +3297,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtxWithoutApt) { VideoContentDescription* desc = media_desc->as_video(); std::vector codecs = desc->codecs(); for (VideoCodec& codec : codecs) { - if (codec.name.find(cricket::kRtxCodecName) == 0) { + if (absl::StartsWith(codec.name, cricket::kRtxCodecName)) { codec.params.clear(); } } @@ -3151,12 +3547,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1)); - f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1)); - f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2)); - f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2)); - + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), + MAKE_VECTOR(kVideoRtpExtension1), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), + MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), opts, NULL); @@ -3207,9 +3602,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReused) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension3)); - f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension3)); - + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension3), + MAKE_VECTOR(kVideoRtpExtension3), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); // Since the audio extensions used ID 3 for "both_audio_and_video", so should @@ -3246,11 +3640,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) { f1_.set_enable_encrypted_rtp_header_extensions(true); f2_.set_enable_encrypted_rtp_header_extensions(true); - f1_.set_audio_rtp_header_extensions( - MAKE_VECTOR(kAudioRtpExtension3ForEncryption)); - f1_.set_video_rtp_header_extensions( - MAKE_VECTOR(kVideoRtpExtension3ForEncryption)); - + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kAudioRtpExtension3ForEncryption), + MAKE_VECTOR(kVideoRtpExtension3ForEncryption), &opts); std::unique_ptr offer = f1_.CreateOffer(opts, NULL); // The extensions that are shared between audio and video should use the same @@ -3288,18 +3680,12 @@ TEST(MediaSessionDescription, CopySessionDescription) { std::make_unique(); acd->set_codecs(MAKE_VECTOR(kAudioCodecs1)); acd->AddLegacyStream(1); - std::unique_ptr acd_passed = - absl::WrapUnique(acd->Copy()); - source.AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, - std::move(acd_passed)); + source.AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, acd->Clone()); std::unique_ptr vcd = std::make_unique(); vcd->set_codecs(MAKE_VECTOR(kVideoCodecs1)); vcd->AddLegacyStream(2); - std::unique_ptr vcd_passed = - absl::WrapUnique(vcd->Copy()); - source.AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, - std::move(vcd_passed)); + source.AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, vcd->Clone()); std::unique_ptr copy = source.Clone(); ASSERT_TRUE(copy.get() != NULL); @@ -3448,164 +3834,6 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfo(false, options, true); } -TEST_F(MediaSessionDescriptionFactoryTest, - TestTransportInfoOfferBundlesTransportOptions) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - - cricket::OpaqueTransportParameters audio_params; - audio_params.protocol = "audio-transport"; - audio_params.parameters = "audio-params"; - FindFirstMediaDescriptionByMid("audio", &options) - ->transport_options.opaque_parameters = audio_params; - - cricket::OpaqueTransportParameters video_params; - video_params.protocol = "video-transport"; - video_params.parameters = "video-params"; - FindFirstMediaDescriptionByMid("video", &options) - ->transport_options.opaque_parameters = video_params; - - TestTransportInfo(/*offer=*/true, options, /*has_current_desc=*/false); -} - -TEST_F(MediaSessionDescriptionFactoryTest, - TestTransportInfoAnswerBundlesTransportOptions) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - - cricket::OpaqueTransportParameters audio_params; - audio_params.protocol = "audio-transport"; - audio_params.parameters = "audio-params"; - FindFirstMediaDescriptionByMid("audio", &options) - ->transport_options.opaque_parameters = audio_params; - - cricket::OpaqueTransportParameters video_params; - video_params.protocol = "video-transport"; - video_params.parameters = "video-params"; - FindFirstMediaDescriptionByMid("video", &options) - ->transport_options.opaque_parameters = video_params; - - TestTransportInfo(/*offer=*/false, options, /*has_current_desc=*/false); -} - -TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolAddedToOffer) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly, - &options); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo"; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar"; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz"; - - std::unique_ptr offer = f1_.CreateOffer(options, nullptr); - - EXPECT_EQ(offer->GetContentDescriptionByName("audio")->alt_protocol(), "foo"); - EXPECT_EQ(offer->GetContentDescriptionByName("video")->alt_protocol(), "bar"); - EXPECT_EQ(offer->GetContentDescriptionByName("data")->alt_protocol(), "baz"); -} - -TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolAddedToAnswer) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly, - &options); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo"; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar"; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz"; - - std::unique_ptr offer = f1_.CreateOffer(options, nullptr); - std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), options, nullptr); - - EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(), - "foo"); - EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(), - "bar"); - EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(), "baz"); -} - -TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolNotInOffer) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly, - &options); - - std::unique_ptr offer = f1_.CreateOffer(options, nullptr); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo"; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar"; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz"; - - std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), options, nullptr); - - EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(), - absl::nullopt); - EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(), - absl::nullopt); - EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(), - absl::nullopt); -} - -TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolDifferentInOffer) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly, - &options); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "not-foo"; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "not-bar"; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "not-baz"; - - std::unique_ptr offer = f1_.CreateOffer(options, nullptr); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo"; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar"; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz"; - - std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), options, nullptr); - - EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(), - absl::nullopt); - EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(), - absl::nullopt); - EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(), - absl::nullopt); -} - -TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolNotInAnswer) { - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly, - &options); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo"; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar"; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz"; - - std::unique_ptr offer = f1_.CreateOffer(options, nullptr); - - FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = - absl::nullopt; - FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = - absl::nullopt; - FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = - absl::nullopt; - - std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), options, nullptr); - - EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(), - absl::nullopt); - EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(), - absl::nullopt); - EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(), - absl::nullopt); -} - // Create an offer with bundle enabled and verify the crypto parameters are // the common set of the available cryptos. TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithOfferBundle) { @@ -4416,7 +4644,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestSetAudioCodecs) { // properly. send_codecs[1].channels = 0; - // Alther iLBC receive codec to be lowercase, to test that case conversions + // Alter iLBC receive codec to be lowercase, to test that case conversions // are handled properly. recv_codecs[2].name = "ilbc"; @@ -4635,6 +4863,9 @@ void TestAudioCodecsAnswer(RtpTransceiverDirection offer_direction, kResultSendrecv_SendrecvCodecs); } break; + case RtpTransceiverDirection::kStopped: + // This does not happen in any current test. + RTC_NOTREACHED(); } auto format_codecs = [](const std::vector& codecs) { diff --git a/pc/media_stream.h b/pc/media_stream.h index 34299f46e3..6f16bea1d9 100644 --- a/pc/media_stream.h +++ b/pc/media_stream.h @@ -48,7 +48,7 @@ class MediaStream : public Notifier { template bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track); - std::string id_; + const std::string id_; AudioTrackVector audio_tracks_; VideoTrackVector video_tracks_; }; diff --git a/pc/media_stream_unittest.cc b/pc/media_stream_unittest.cc index b70171dfcb..55226992e0 100644 --- a/pc/media_stream_unittest.cc +++ b/pc/media_stream_unittest.cc @@ -46,7 +46,7 @@ class MockObserver : public ObserverInterface { } } - MOCK_METHOD0(OnChanged, void()); + MOCK_METHOD(void, OnChanged, (), (override)); private: NotifierInterface* notifier_; diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index cf4189b896..d0b785b0f3 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -10,55 +10,54 @@ #include "pc/peer_connection.h" +#include +#include #include -#include #include -#include #include #include -#include #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "api/jsep_ice_candidate.h" -#include "api/jsep_session_description.h" -#include "api/media_stream_proxy.h" -#include "api/media_stream_track_proxy.h" -#include "api/rtc_error.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/rtc_event_log_output_file.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/task_queue/queued_task.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/uma_metrics.h" -#include "api/video/builtin_video_bitrate_allocator_factory.h" -#include "call/call.h" -#include "logging/rtc_event_log/ice_logger.h" +#include "api/video/video_codec_constants.h" +#include "call/audio_state.h" +#include "call/packet_receiver.h" +#include "media/base/media_channel.h" +#include "media/base/media_config.h" #include "media/base/rid_description.h" -#include "media/sctp/sctp_transport.h" -#include "pc/audio_rtp_receiver.h" -#include "pc/audio_track.h" -#include "pc/channel.h" -#include "pc/channel_manager.h" -#include "pc/dtmf_sender.h" -#include "pc/media_stream.h" -#include "pc/media_stream_observer.h" -#include "pc/remote_audio_source.h" -#include "pc/rtp_media_utils.h" +#include "media/base/stream_params.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/dtls_transport_internal.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/transport_info.h" +#include "pc/ice_server_parsing.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/sctp_transport.h" -#include "pc/sctp_utils.h" -#include "pc/sdp_utils.h" -#include "pc/stream_collection.h" -#include "pc/video_rtp_receiver.h" -#include "pc/video_track.h" +#include "pc/simulcast_description.h" +#include "pc/webrtc_session_description_factory.h" #include "rtc_base/bind.h" -#include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/location.h" #include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/callback_list.h" +#include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" -#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/metrics.h" using cricket::ContentInfo; @@ -81,165 +80,14 @@ using cricket::STUN_PORT_TYPE; namespace webrtc { -// Error messages -const char kBundleWithoutRtcpMux[] = - "rtcp-mux must be enabled when BUNDLE " - "is enabled."; -const char kInvalidCandidates[] = "Description contains invalid candidates."; -const char kInvalidSdp[] = "Invalid session description."; -const char kMlineMismatchInAnswer[] = - "The order of m-lines in answer doesn't match order in offer. Rejecting " - "answer."; -const char kMlineMismatchInSubsequentOffer[] = - "The order of m-lines in subsequent offer doesn't match order from " - "previous offer/answer."; -const char kSdpWithoutDtlsFingerprint[] = - "Called with SDP without DTLS fingerprint."; -const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto."; -const char kSdpWithoutIceUfragPwd[] = - "Called with SDP without ice-ufrag and ice-pwd."; -const char kSessionError[] = "Session error code: "; -const char kSessionErrorDesc[] = "Session error description: "; -const char kDtlsSrtpSetupFailureRtp[] = - "Couldn't set up DTLS-SRTP on RTP channel."; -const char kDtlsSrtpSetupFailureRtcp[] = - "Couldn't set up DTLS-SRTP on RTCP channel."; - namespace { -// Field trials. -// Controls datagram transport support. -const char kDatagramTransportFieldTrial[] = "WebRTC-DatagramTransport"; -// Controls datagram transport data channel support. -const char kDatagramTransportDataChannelFieldTrial[] = - "WebRTC-DatagramTransportDataChannels"; - // UMA metric names. -const char kSimulcastVersionApplyLocalDescription[] = - "WebRTC.PeerConnection.Simulcast.ApplyLocalDescription"; -const char kSimulcastVersionApplyRemoteDescription[] = - "WebRTC.PeerConnection.Simulcast.ApplyRemoteDescription"; const char kSimulcastNumberOfEncodings[] = "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings"; -const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; - -static const char kDefaultStreamId[] = "default"; -static const char kDefaultAudioSenderId[] = "defaulta0"; -static const char kDefaultVideoSenderId[] = "defaultv0"; - -// The length of RTCP CNAMEs. -static const int kRtcpCnameLength = 16; - -enum { - MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0, - MSG_SET_SESSIONDESCRIPTION_FAILED, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, - MSG_GETSTATS, - MSG_REPORT_USAGE_PATTERN, -}; static const int REPORT_USAGE_PATTERN_DELAY_MS = 60000; -struct SetSessionDescriptionMsg : public rtc::MessageData { - explicit SetSessionDescriptionMsg( - webrtc::SetSessionDescriptionObserver* observer) - : observer(observer) {} - - rtc::scoped_refptr observer; - RTCError error; -}; - -struct CreateSessionDescriptionMsg : public rtc::MessageData { - explicit CreateSessionDescriptionMsg( - webrtc::CreateSessionDescriptionObserver* observer) - : observer(observer) {} - - rtc::scoped_refptr observer; - RTCError error; -}; - -struct GetStatsMsg : public rtc::MessageData { - GetStatsMsg(webrtc::StatsObserver* observer, - webrtc::MediaStreamTrackInterface* track) - : observer(observer), track(track) {} - rtc::scoped_refptr observer; - rtc::scoped_refptr track; -}; - -// Check if we can send |new_stream| on a PeerConnection. -bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, - webrtc::MediaStreamInterface* new_stream) { - if (!new_stream || !current_streams) { - return false; - } - if (current_streams->find(new_stream->id()) != nullptr) { - RTC_LOG(LS_ERROR) << "MediaStream with ID " << new_stream->id() - << " is already added."; - return false; - } - return true; -} - -// If the direction is "recvonly" or "inactive", treat the description -// as containing no streams. -// See: https://code.google.com/p/webrtc/issues/detail?id=5054 -std::vector GetActiveStreams( - const cricket::MediaContentDescription* desc) { - return RtpTransceiverDirectionHasSend(desc->direction()) - ? desc->streams() - : std::vector(); -} - -bool IsValidOfferToReceiveMedia(int value) { - typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; - return (value >= Options::kUndefined) && - (value <= Options::kMaxOfferToReceiveMedia); -} - -// Add options to |[audio/video]_media_description_options| from |senders|. -void AddPlanBRtpSenderOptions( - const std::vector>>& senders, - cricket::MediaDescriptionOptions* audio_media_description_options, - cricket::MediaDescriptionOptions* video_media_description_options, - int num_sim_layers) { - for (const auto& sender : senders) { - if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { - if (audio_media_description_options) { - audio_media_description_options->AddAudioSender( - sender->id(), sender->internal()->stream_ids()); - } - } else { - RTC_DCHECK(sender->media_type() == cricket::MEDIA_TYPE_VIDEO); - if (video_media_description_options) { - video_media_description_options->AddVideoSender( - sender->id(), sender->internal()->stream_ids(), {}, - SimulcastLayerList(), num_sim_layers); - } - } - } -} - -// Add options to |session_options| from |rtp_data_channels|. -void AddRtpDataChannelOptions( - const std::map>& - rtp_data_channels, - cricket::MediaDescriptionOptions* data_media_description_options) { - if (!data_media_description_options) { - return; - } - // Check for data channels. - for (const auto& kv : rtp_data_channels) { - const DataChannel* channel = kv.second; - if (channel->state() == DataChannel::kConnecting || - channel->state() == DataChannel::kOpen) { - // Legacy RTP data channels are signaled with the track/stream ID set to - // the data channel's label. - data_media_description_options->AddRtpDataChannel(channel->label(), - channel->label()); - } - } -} uint32_t ConvertIceTransportTypeToCandidateFilter( PeerConnectionInterface::IceTransportsType type) { @@ -258,26 +106,6 @@ uint32_t ConvertIceTransportTypeToCandidateFilter( return cricket::CF_NONE; } -std::string GetSignalingStateString( - PeerConnectionInterface::SignalingState state) { - switch (state) { - case PeerConnectionInterface::kStable: - return "kStable"; - case PeerConnectionInterface::kHaveLocalOffer: - return "kHaveLocalOffer"; - case PeerConnectionInterface::kHaveLocalPrAnswer: - return "kHavePrAnswer"; - case PeerConnectionInterface::kHaveRemoteOffer: - return "kHaveRemoteOffer"; - case PeerConnectionInterface::kHaveRemotePrAnswer: - return "kHaveRemotePrAnswer"; - case PeerConnectionInterface::kClosed: - return "kClosed"; - } - RTC_NOTREACHED(); - return ""; -} - IceCandidatePairType GetIceCandidatePairCounter( const cricket::Candidate& local, const cricket::Candidate& remote) { @@ -351,253 +179,6 @@ IceCandidatePairType GetIceCandidatePairCounter( return kIceCandidatePairMax; } -// Logic to decide if an m= section can be recycled. This means that the new -// m= section is not rejected, but the old local or remote m= section is -// rejected. |old_content_one| and |old_content_two| refer to the m= section -// of the old remote and old local descriptions in no particular order. -// We need to check both the old local and remote because either -// could be the most current from the latest negotation. -bool IsMediaSectionBeingRecycled(SdpType type, - const ContentInfo& content, - const ContentInfo* old_content_one, - const ContentInfo* old_content_two) { - return type == SdpType::kOffer && !content.rejected && - ((old_content_one && old_content_one->rejected) || - (old_content_two && old_content_two->rejected)); -} - -// Verify that the order of media sections in |new_desc| matches -// |current_desc|. The number of m= sections in |new_desc| should be no -// less than |current_desc|. In the case of checking an answer's -// |new_desc|, the |current_desc| is the last offer that was set as the -// local or remote. In the case of checking an offer's |new_desc| we -// check against the local and remote descriptions stored from the last -// negotiation, because either of these could be the most up to date for -// possible rejected m sections. These are the |current_desc| and -// |secondary_current_desc|. -bool MediaSectionsInSameOrder(const SessionDescription& current_desc, - const SessionDescription* secondary_current_desc, - const SessionDescription& new_desc, - const SdpType type) { - if (current_desc.contents().size() > new_desc.contents().size()) { - return false; - } - - for (size_t i = 0; i < current_desc.contents().size(); ++i) { - const cricket::ContentInfo* secondary_content_info = nullptr; - if (secondary_current_desc && - i < secondary_current_desc->contents().size()) { - secondary_content_info = &secondary_current_desc->contents()[i]; - } - if (IsMediaSectionBeingRecycled(type, new_desc.contents()[i], - ¤t_desc.contents()[i], - secondary_content_info)) { - // For new offer descriptions, if the media section can be recycled, it's - // valid for the MID and media type to change. - continue; - } - if (new_desc.contents()[i].name != current_desc.contents()[i].name) { - return false; - } - const MediaContentDescription* new_desc_mdesc = - new_desc.contents()[i].media_description(); - const MediaContentDescription* current_desc_mdesc = - current_desc.contents()[i].media_description(); - if (new_desc_mdesc->type() != current_desc_mdesc->type()) { - return false; - } - } - return true; -} - -bool MediaSectionsHaveSameCount(const SessionDescription& desc1, - const SessionDescription& desc2) { - return desc1.contents().size() == desc2.contents().size(); -} - -void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, - cricket::MediaType media_type) { - // Array of structs needed to map {KeyExchangeProtocolType, - // cricket::MediaType} to KeyExchangeProtocolMedia without using std::map in - // order to avoid -Wglobal-constructors and -Wexit-time-destructors. - static constexpr struct { - KeyExchangeProtocolType protocol_type; - cricket::MediaType media_type; - KeyExchangeProtocolMedia protocol_media; - } kEnumCounterKeyProtocolMediaMap[] = { - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_AUDIO, - kEnumCounterKeyProtocolMediaTypeDtlsAudio}, - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_VIDEO, - kEnumCounterKeyProtocolMediaTypeDtlsVideo}, - {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_DATA, - kEnumCounterKeyProtocolMediaTypeDtlsData}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_AUDIO, - kEnumCounterKeyProtocolMediaTypeSdesAudio}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_VIDEO, - kEnumCounterKeyProtocolMediaTypeSdesVideo}, - {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_DATA, - kEnumCounterKeyProtocolMediaTypeSdesData}, - }; - - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocol", protocol_type, - kEnumCounterKeyProtocolMax); - - for (const auto& i : kEnumCounterKeyProtocolMediaMap) { - if (i.protocol_type == protocol_type && i.media_type == media_type) { - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocolByMedia", - i.protocol_media, - kEnumCounterKeyProtocolMediaTypeMax); - } - } -} - -void NoteAddIceCandidateResult(int result) { - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.AddIceCandidate", result, - kAddIceCandidateMax); -} - -// Checks that each non-rejected content has SDES crypto keys or a DTLS -// fingerprint, unless it's in a BUNDLE group, in which case only the -// BUNDLE-tag section (first media section/description in the BUNDLE group) -// needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint -// to SDES keys, will be caught in JsepTransport negotiation, and backstopped -// by Channel's |srtp_required| check. -RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentInfo& content_info : desc->contents()) { - if (content_info.rejected) { - continue; - } - // Note what media is used with each crypto protocol, for all sections. - NoteKeyProtocolAndMedia(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls - : webrtc::kEnumCounterKeyProtocolSdes, - content_info.media_description()->type()); - const std::string& mid = content_info.name; - if (bundle && bundle->HasContentName(mid) && - mid != *(bundle->FirstContentName())) { - // This isn't the first media section in the BUNDLE group, so it's not - // required to have crypto attributes, since only the crypto attributes - // from the first section actually get used. - continue; - } - - // If the content isn't rejected or bundled into another m= section, crypto - // must be present. - const MediaContentDescription* media = content_info.media_description(); - const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); - if (!media || !tinfo) { - // Something is not right. - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); - } - if (dtls_enabled) { - if (!tinfo->description.identity_fingerprint) { - RTC_LOG(LS_WARNING) - << "Session description must have DTLS fingerprint if " - "DTLS enabled."; - return RTCError(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutDtlsFingerprint); - } - } else { - if (media->cryptos().empty()) { - RTC_LOG(LS_WARNING) - << "Session description must have SDES when DTLS disabled."; - return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutSdesCrypto); - } - } - } - return RTCError::OK(); -} - -// Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless -// it's in a BUNDLE group, in which case only the BUNDLE-tag section (first -// media section/description in the BUNDLE group) needs a ufrag and pwd. -bool VerifyIceUfragPwdPresent(const SessionDescription* desc) { - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentInfo& content_info : desc->contents()) { - if (content_info.rejected) { - continue; - } - const std::string& mid = content_info.name; - if (bundle && bundle->HasContentName(mid) && - mid != *(bundle->FirstContentName())) { - // This isn't the first media section in the BUNDLE group, so it's not - // required to have ufrag/password, since only the ufrag/password from - // the first section actually get used. - continue; - } - - // If the content isn't rejected or bundled into another m= section, - // ice-ufrag and ice-pwd must be present. - const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); - if (!tinfo) { - // Something is not right. - RTC_LOG(LS_ERROR) << kInvalidSdp; - return false; - } - if (tinfo->description.ice_ufrag.empty() || - tinfo->description.ice_pwd.empty()) { - RTC_LOG(LS_ERROR) << "Session description must have ice ufrag and pwd."; - return false; - } - } - return true; -} - -// Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd). -bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, - const SessionDescriptionInterface* new_desc, - const std::string& content_name) { - if (!old_desc) { - return false; - } - const SessionDescription* new_sd = new_desc->description(); - const SessionDescription* old_sd = old_desc->description(); - const ContentInfo* cinfo = new_sd->GetContentByName(content_name); - if (!cinfo || cinfo->rejected) { - return false; - } - // If the content isn't rejected, check if ufrag and password has changed. - const cricket::TransportDescription* new_transport_desc = - new_sd->GetTransportDescriptionByName(content_name); - const cricket::TransportDescription* old_transport_desc = - old_sd->GetTransportDescriptionByName(content_name); - if (!new_transport_desc || !old_transport_desc) { - // No transport description exists. This is not an ICE restart. - return false; - } - if (cricket::IceCredentialsChanged( - old_transport_desc->ice_ufrag, old_transport_desc->ice_pwd, - new_transport_desc->ice_ufrag, new_transport_desc->ice_pwd)) { - RTC_LOG(LS_INFO) << "Remote peer requests ICE restart for " << content_name - << "."; - return true; - } - return false; -} - -// Generates a string error message for SetLocalDescription/SetRemoteDescription -// from an RTCError. -std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, - SdpType type, - const RTCError& error) { - rtc::StringBuilder oss; - oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") - << " " << SdpTypeToString(type) << " sdp: " << error.message(); - return oss.Release(); -} - -std::string GetStreamIdsString(rtc::ArrayView stream_ids) { - std::string output = "streams=["; - const char* separator = ""; - for (const auto& stream_id : stream_ids) { - output.append(separator).append(stream_id); - separator = ", "; - } - output.append("]"); - return output; -} absl::optional RTCConfigurationToIceConfigOptionalInt( int rtc_configuration_parameter) { @@ -608,247 +189,83 @@ absl::optional RTCConfigurationToIceConfigOptionalInt( return rtc_configuration_parameter; } -void ReportSimulcastApiVersion(const char* name, - const SessionDescription& session) { - bool has_legacy = false; - bool has_spec_compliant = false; - for (const ContentInfo& content : session.contents()) { - if (!content.media_description()) { - continue; - } - has_spec_compliant |= content.media_description()->HasSimulcast(); - for (const StreamParams& sp : content.media_description()->streams()) { - has_legacy |= sp.has_ssrc_group(cricket::kSimSsrcGroupSemantics); - } +// Check if the changes of IceTransportsType motives an ice restart. +bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed, + PeerConnectionInterface::IceTransportsType current, + PeerConnectionInterface::IceTransportsType modified) { + if (current == modified) { + return false; } - if (has_legacy) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionLegacy, - kSimulcastApiVersionMax); - } - if (has_spec_compliant) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionSpecCompliant, - kSimulcastApiVersionMax); - } - if (!has_legacy && !has_spec_compliant) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionNone, - kSimulcastApiVersionMax); + if (!surface_ice_candidates_on_ice_transport_type_changed) { + return true; } -} -const ContentInfo* FindTransceiverMSection( - RtpTransceiverProxyWithInternal* transceiver, - const SessionDescriptionInterface* session_description) { - return transceiver->mid() - ? session_description->description()->GetContentByName( - *transceiver->mid()) - : nullptr; + auto current_filter = ConvertIceTransportTypeToCandidateFilter(current); + auto modified_filter = ConvertIceTransportTypeToCandidateFilter(modified); + + // If surface_ice_candidates_on_ice_transport_type_changed is true and we + // extend the filter, then no ice restart is needed. + return (current_filter & modified_filter) != current_filter; } -// Wraps a CreateSessionDescriptionObserver and an OperationsChain operation -// complete callback. When the observer is invoked, the wrapped observer is -// invoked followed by invoking the completion callback. -class CreateSessionDescriptionObserverOperationWrapper - : public CreateSessionDescriptionObserver { - public: - CreateSessionDescriptionObserverOperationWrapper( - rtc::scoped_refptr observer, - std::function operation_complete_callback) - : observer_(std::move(observer)), - operation_complete_callback_(std::move(operation_complete_callback)) { - RTC_DCHECK(observer_); - } - ~CreateSessionDescriptionObserverOperationWrapper() override { - RTC_DCHECK(was_called_); +cricket::IceConfig ParseIceConfig( + const PeerConnectionInterface::RTCConfiguration& config) { + cricket::ContinualGatheringPolicy gathering_policy; + switch (config.continual_gathering_policy) { + case PeerConnectionInterface::GATHER_ONCE: + gathering_policy = cricket::GATHER_ONCE; + break; + case PeerConnectionInterface::GATHER_CONTINUALLY: + gathering_policy = cricket::GATHER_CONTINUALLY; + break; + default: + RTC_NOTREACHED(); + gathering_policy = cricket::GATHER_ONCE; } - void OnSuccess(SessionDescriptionInterface* desc) override { - RTC_DCHECK(!was_called_); -#ifdef RTC_DCHECK_IS_ON - was_called_ = true; -#endif // RTC_DCHECK_IS_ON - // Completing the operation before invoking the observer allows the observer - // to execute SetLocalDescription() without delay. - operation_complete_callback_(); - observer_->OnSuccess(desc); - } + cricket::IceConfig ice_config; + ice_config.receiving_timeout = RTCConfigurationToIceConfigOptionalInt( + config.ice_connection_receiving_timeout); + ice_config.prioritize_most_likely_candidate_pairs = + config.prioritize_most_likely_ice_candidate_pairs; + ice_config.backup_connection_ping_interval = + RTCConfigurationToIceConfigOptionalInt( + config.ice_backup_candidate_pair_ping_interval); + ice_config.continual_gathering_policy = gathering_policy; + ice_config.presume_writable_when_fully_relayed = + config.presume_writable_when_fully_relayed; + ice_config.surface_ice_candidates_on_ice_transport_type_changed = + config.surface_ice_candidates_on_ice_transport_type_changed; + ice_config.ice_check_interval_strong_connectivity = + config.ice_check_interval_strong_connectivity; + ice_config.ice_check_interval_weak_connectivity = + config.ice_check_interval_weak_connectivity; + ice_config.ice_check_min_interval = config.ice_check_min_interval; + ice_config.ice_unwritable_timeout = config.ice_unwritable_timeout; + ice_config.ice_unwritable_min_checks = config.ice_unwritable_min_checks; + ice_config.ice_inactive_timeout = config.ice_inactive_timeout; + ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval; + ice_config.network_preference = config.network_preference; + return ice_config; +} - void OnFailure(RTCError error) override { - RTC_DCHECK(!was_called_); -#ifdef RTC_DCHECK_IS_ON - was_called_ = true; -#endif // RTC_DCHECK_IS_ON - operation_complete_callback_(); - observer_->OnFailure(std::move(error)); - } +// Ensures the configuration doesn't have any parameters with invalid values, +// or values that conflict with other parameters. +// +// Returns RTCError::OK() if there are no issues. +RTCError ValidateConfiguration( + const PeerConnectionInterface::RTCConfiguration& config) { + return cricket::P2PTransportChannel::ValidateIceConfig( + ParseIceConfig(config)); +} - private: -#ifdef RTC_DCHECK_IS_ON - bool was_called_ = false; -#endif // RTC_DCHECK_IS_ON - rtc::scoped_refptr observer_; - std::function operation_complete_callback_; -}; +bool HasRtcpMuxEnabled(const cricket::ContentInfo* content) { + return content->media_description()->rtcp_mux(); +} } // namespace -// Used by parameterless SetLocalDescription() to create an offer or answer. -// Upon completion of creating the session description, SetLocalDescription() is -// invoked with the result. -// For consistency with DoSetLocalDescription(), if the PeerConnection is -// destroyed midst operation, we DO NOT inform the -// |set_local_description_observer| that the operation failed. -// TODO(hbos): If/when we process SLD messages in ~PeerConnection, the -// consistent thing would be to inform the observer here. -class PeerConnection::ImplicitCreateSessionDescriptionObserver - : public CreateSessionDescriptionObserver { - public: - ImplicitCreateSessionDescriptionObserver( - rtc::WeakPtr pc, - rtc::scoped_refptr - set_local_description_observer) - : pc_(std::move(pc)), - set_local_description_observer_( - std::move(set_local_description_observer)) {} - ~ImplicitCreateSessionDescriptionObserver() override { - RTC_DCHECK(was_called_); - } - - void SetOperationCompleteCallback( - std::function operation_complete_callback) { - operation_complete_callback_ = std::move(operation_complete_callback); - } - - bool was_called() const { return was_called_; } - - void OnSuccess(SessionDescriptionInterface* desc_ptr) override { - RTC_DCHECK(!was_called_); - std::unique_ptr desc(desc_ptr); - was_called_ = true; - - // Abort early if |pc_| is no longer valid. - if (!pc_) { - operation_complete_callback_(); - return; - } - // DoSetLocalDescription() is currently implemented as a synchronous - // operation but where the |set_local_description_observer_|'s callbacks are - // invoked asynchronously in a post to PeerConnection::OnMessage(). - pc_->DoSetLocalDescription(std::move(desc), - std::move(set_local_description_observer_)); - // For backwards-compatability reasons, we declare the operation as - // completed here (rather than in PeerConnection::OnMessage()). This ensures - // that subsequent offer/answer operations can start immediately (without - // waiting for OnMessage()). - operation_complete_callback_(); - } - - void OnFailure(RTCError error) override { - RTC_DCHECK(!was_called_); - was_called_ = true; - - // Abort early if |pc_| is no longer valid. - if (!pc_) { - operation_complete_callback_(); - return; - } - // DoSetLocalDescription() reports its failures in a post. We do the - // same thing here for consistency. - pc_->PostSetSessionDescriptionFailure( - set_local_description_observer_, - RTCError(error.type(), - std::string("SetLocalDescription failed to create " - "session description - ") + - error.message())); - operation_complete_callback_(); - } - - private: - bool was_called_ = false; - rtc::WeakPtr pc_; - rtc::scoped_refptr - set_local_description_observer_; - std::function operation_complete_callback_; -}; - -class PeerConnection::LocalIceCredentialsToReplace { - public: - // Sets the ICE credentials that need restarting to the ICE credentials of - // the current and pending descriptions. - void SetIceCredentialsFromLocalDescriptions( - const SessionDescriptionInterface* current_local_description, - const SessionDescriptionInterface* pending_local_description) { - ice_credentials_.clear(); - if (current_local_description) { - AppendIceCredentialsFromSessionDescription(*current_local_description); - } - if (pending_local_description) { - AppendIceCredentialsFromSessionDescription(*pending_local_description); - } - } - - void ClearIceCredentials() { ice_credentials_.clear(); } - - // Returns true if we have ICE credentials that need restarting. - bool HasIceCredentials() const { return !ice_credentials_.empty(); } - - // Returns true if |local_description| shares no ICE credentials with the - // ICE credentials that need restarting. - bool SatisfiesIceRestart( - const SessionDescriptionInterface& local_description) const { - for (const auto& transport_info : - local_description.description()->transport_infos()) { - if (ice_credentials_.find(std::make_pair( - transport_info.description.ice_ufrag, - transport_info.description.ice_pwd)) != ice_credentials_.end()) { - return false; - } - } - return true; - } - - private: - void AppendIceCredentialsFromSessionDescription( - const SessionDescriptionInterface& desc) { - for (const auto& transport_info : desc.description()->transport_infos()) { - ice_credentials_.insert( - std::make_pair(transport_info.description.ice_ufrag, - transport_info.description.ice_pwd)); - } - } - - std::set> ice_credentials_; -}; - -// Upon completion, posts a task to execute the callback of the -// SetSessionDescriptionObserver asynchronously on the same thread. At this -// point, the state of the peer connection might no longer reflect the effects -// of the SetRemoteDescription operation, as the peer connection could have been -// modified during the post. -// TODO(hbos): Remove this class once we remove the version of -// PeerConnectionInterface::SetRemoteDescription() that takes a -// SetSessionDescriptionObserver as an argument. -class PeerConnection::SetRemoteDescriptionObserverAdapter - : public rtc::RefCountedObject { - public: - SetRemoteDescriptionObserverAdapter( - rtc::scoped_refptr pc, - rtc::scoped_refptr wrapper) - : pc_(std::move(pc)), wrapper_(std::move(wrapper)) {} - - // SetRemoteDescriptionObserverInterface implementation. - void OnSetRemoteDescriptionComplete(RTCError error) override { - if (error.ok()) - pc_->PostSetSessionDescriptionSuccess(wrapper_); - else - pc_->PostSetSessionDescriptionFailure(wrapper_, std::move(error)); - } - - private: - rtc::scoped_refptr pc_; - rtc::scoped_refptr wrapper_; -}; - bool PeerConnectionInterface::RTCConfiguration::operator==( const PeerConnectionInterface::RTCConfiguration& o) const { // This static_assert prevents us from accidentally breaking operator==. @@ -896,16 +313,12 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( SdpSemantics sdp_semantics; absl::optional network_preference; bool active_reset_srtp_params; - bool use_media_transport; - bool use_media_transport_for_data_channels; - absl::optional use_datagram_transport; - absl::optional use_datagram_transport_for_data_channels; - absl::optional use_datagram_transport_for_data_channels_receive_only; absl::optional crypto_options; bool offer_extmap_allow_mixed; std::string turn_logging_id; bool enable_implicit_rollback; absl::optional allow_codec_switching; + absl::optional report_usage_pattern_delay_ms; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -961,19 +374,12 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( sdp_semantics == o.sdp_semantics && network_preference == o.network_preference && active_reset_srtp_params == o.active_reset_srtp_params && - use_media_transport == o.use_media_transport && - use_media_transport_for_data_channels == - o.use_media_transport_for_data_channels && - use_datagram_transport == o.use_datagram_transport && - use_datagram_transport_for_data_channels == - o.use_datagram_transport_for_data_channels && - use_datagram_transport_for_data_channels_receive_only == - o.use_datagram_transport_for_data_channels_receive_only && crypto_options == o.crypto_options && offer_extmap_allow_mixed == o.offer_extmap_allow_mixed && turn_logging_id == o.turn_logging_id && enable_implicit_rollback == o.enable_implicit_rollback && - allow_codec_switching == o.allow_codec_switching; + allow_codec_switching == o.allow_codec_switching && + report_usage_pattern_delay_ms == o.report_usage_pattern_delay_ms; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -981,86 +387,84 @@ bool PeerConnectionInterface::RTCConfiguration::operator!=( return !(*this == o); } -void PeerConnection::TransceiverStableState::set_newly_created() { - RTC_DCHECK(!has_m_section_); - newly_created_ = true; -} - -void PeerConnection::TransceiverStableState::SetMSectionIfUnset( - absl::optional mid, - absl::optional mline_index) { - if (!has_m_section_) { - mid_ = mid; - mline_index_ = mline_index; - has_m_section_ = true; +rtc::scoped_refptr PeerConnection::Create( + rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + std::unique_ptr event_log, + std::unique_ptr call, + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies) { + RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig( + ParseIceConfig(configuration)); + if (!config_error.ok()) { + RTC_LOG(LS_ERROR) << "Invalid configuration: " << config_error.message(); + return nullptr; } -} -void PeerConnection::TransceiverStableState::SetRemoteStreamIdsIfUnset( - const std::vector& ids) { - if (!remote_stream_ids_.has_value()) { - remote_stream_ids_ = ids; + if (!dependencies.allocator) { + RTC_LOG(LS_ERROR) + << "PeerConnection initialized without a PortAllocator? " + "This shouldn't happen if using PeerConnectionFactory."; + return nullptr; } -} -// Generate a RTCP CNAME when a PeerConnection is created. -std::string GenerateRtcpCname() { - std::string cname; - if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) { - RTC_LOG(LS_ERROR) << "Failed to generate CNAME."; - RTC_NOTREACHED(); + if (!dependencies.observer) { + // TODO(deadbeef): Why do we do this? + RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " + "PeerConnectionObserver"; + return nullptr; } - return cname; -} - -bool ValidateOfferAnswerOptions( - const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options) { - return IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) && - IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video); -} -// From |rtc_options|, fill parts of |session_options| shared by all generated -// m= sections (in other words, nothing that involves a map/array). -void ExtractSharedMediaSessionOptions( - const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, - cricket::MediaSessionOptions* session_options) { - session_options->vad_enabled = rtc_options.voice_activity_detection; - session_options->bundle_enabled = rtc_options.use_rtp_mux; - session_options->raw_packetization_for_video = - rtc_options.raw_packetization_for_video; + bool is_unified_plan = + configuration.sdp_semantics == SdpSemantics::kUnifiedPlan; + // The PeerConnection constructor consumes some, but not all, dependencies. + rtc::scoped_refptr pc( + new rtc::RefCountedObject( + context, options, is_unified_plan, std::move(event_log), + std::move(call), dependencies)); + if (!pc->Initialize(configuration, std::move(dependencies))) { + return nullptr; + } + return pc; } -PeerConnection::PeerConnection(PeerConnectionFactory* factory, - std::unique_ptr event_log, - std::unique_ptr call) - : factory_(factory), +PeerConnection::PeerConnection( + rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + bool is_unified_plan, + std::unique_ptr event_log, + std::unique_ptr call, + PeerConnectionDependencies& dependencies) + : context_(context), + options_(options), + observer_(dependencies.observer), + is_unified_plan_(is_unified_plan), event_log_(std::move(event_log)), event_log_ptr_(event_log_.get()), - operations_chain_(rtc::OperationsChain::Create()), - datagram_transport_config_( - field_trial::FindFullName(kDatagramTransportFieldTrial)), - datagram_transport_data_channel_config_( - field_trial::FindFullName(kDatagramTransportDataChannelFieldTrial)), - rtcp_cname_(GenerateRtcpCname()), - local_streams_(StreamCollection::Create()), - remote_streams_(StreamCollection::Create()), + async_resolver_factory_(std::move(dependencies.async_resolver_factory)), + port_allocator_(std::move(dependencies.allocator)), + ice_transport_factory_(std::move(dependencies.ice_transport_factory)), + tls_cert_verifier_(std::move(dependencies.tls_cert_verifier)), call_(std::move(call)), call_ptr_(call_.get()), - local_ice_credentials_to_replace_(new LocalIceCredentialsToReplace()), data_channel_controller_(this), - weak_ptr_factory_(this) {} + message_handler_(signaling_thread()) {} PeerConnection::~PeerConnection() { TRACE_EVENT0("webrtc", "PeerConnection::~PeerConnection"); RTC_DCHECK_RUN_ON(signaling_thread()); - weak_ptr_factory_.InvalidateWeakPtrs(); + if (sdp_handler_) { + sdp_handler_->PrepareForShutdown(); + } // Need to stop transceivers before destroying the stats collector because // AudioRtpSender has a reference to the StatsCollector it will update when // stopping. - for (const auto& transceiver : transceivers_) { - transceiver->Stop(); + if (rtp_manager()) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + transceiver->StopInternal(); + } } stats_.reset(nullptr); @@ -1069,14 +473,15 @@ PeerConnection::~PeerConnection() { stats_collector_ = nullptr; } - // Don't destroy BaseChannels until after stats has been cleaned up so that - // the last stats request can still read from the channels. - DestroyAllChannels(); + if (sdp_handler_) { + // Don't destroy BaseChannels until after stats has been cleaned up so that + // the last stats request can still read from the channels. + sdp_handler_->DestroyAllChannels(); - RTC_LOG(LS_INFO) << "Session: " << session_id() << " is destroyed."; + RTC_LOG(LS_INFO) << "Session: " << session_id() << " is destroyed."; - webrtc_session_desc_factory_.reset(); - sctp_factory_.reset(); + sdp_handler_->ResetSessionDescFactory(); + } transport_controller_.reset(); // port_allocator_ lives on the network thread and should be destroyed there. @@ -1087,43 +492,11 @@ PeerConnection::~PeerConnection() { // call_ and event_log_ must be destroyed on the worker thread. worker_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(worker_thread()); + call_safety_.reset(); call_.reset(); // The event log must outlive call (and any other object that uses it). event_log_.reset(); }); - - // Process all pending notifications in the message queue. If we don't do - // this, requests will linger and not know they succeeded or failed. - rtc::MessageList list; - signaling_thread()->Clear(this, rtc::MQID_ANY, &list); - for (auto& msg : list) { - if (msg.message_id == MSG_CREATE_SESSIONDESCRIPTION_FAILED) { - // Processing CreateOffer() and CreateAnswer() messages ensures their - // observers are invoked even if the PeerConnection is destroyed early. - OnMessage(&msg); - } else { - // TODO(hbos): Consider processing all pending messages. This would mean - // that SetLocalDescription() and SetRemoteDescription() observers are - // informed of successes and failures; this is currently NOT the case. - delete msg.pdata; - } - } -} - -void PeerConnection::DestroyAllChannels() { - // Destroy video channels first since they may have a pointer to a voice - // channel. - for (const auto& transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - DestroyTransceiverChannel(transceiver); - } - } - for (const auto& transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - DestroyTransceiverChannel(transceiver); - } - } - DestroyDataChannelTransport(); } bool PeerConnection::Initialize( @@ -1132,32 +505,6 @@ bool PeerConnection::Initialize( RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "PeerConnection::Initialize"); - RTCError config_error = ValidateConfiguration(configuration); - if (!config_error.ok()) { - RTC_LOG(LS_ERROR) << "Invalid configuration: " << config_error.message(); - return false; - } - - if (!dependencies.allocator) { - RTC_LOG(LS_ERROR) - << "PeerConnection initialized without a PortAllocator? " - "This shouldn't happen if using PeerConnectionFactory."; - return false; - } - - if (!dependencies.observer) { - // TODO(deadbeef): Why do we do this? - RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " - "PeerConnectionObserver"; - return false; - } - - observer_ = dependencies.observer; - async_resolver_factory_ = std::move(dependencies.async_resolver_factory); - port_allocator_ = std::move(dependencies.allocator); - ice_transport_factory_ = std::move(dependencies.ice_transport_factory); - tls_cert_verifier_ = std::move(dependencies.tls_cert_verifier); - cricket::ServerAddresses stun_servers; std::vector turn_servers; @@ -1180,8 +527,7 @@ bool PeerConnection::Initialize( rtc::Bind(&PeerConnection::InitializePortAllocator_n, this, stun_servers, turn_servers, configuration)); - // If initialization was successful, note if STUN or TURN servers - // were supplied. + // Note if STUN or TURN servers were supplied. if (!stun_servers.empty()) { NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); } @@ -1199,8 +545,6 @@ bool PeerConnection::Initialize( RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, kPeerConnectionAddressFamilyCounter_Max); - const PeerConnectionFactoryInterface::Options& options = factory_->options(); - // RFC 3264: The numeric value of the session id and version in the // o line MUST be representable with a "64 bit signed integer". // Due to this constraint session id |session_id_| is max limited to @@ -1209,117 +553,45 @@ bool PeerConnection::Initialize( JsepTransportController::Config config; config.redetermine_role_on_ice_restart = configuration.redetermine_role_on_ice_restart; - config.ssl_max_version = factory_->options().ssl_max_version; - config.disable_encryption = options.disable_encryption; + config.ssl_max_version = options_.ssl_max_version; + config.disable_encryption = options_.disable_encryption; config.bundle_policy = configuration.bundle_policy; config.rtcp_mux_policy = configuration.rtcp_mux_policy; - // TODO(bugs.webrtc.org/9891) - Remove options.crypto_options then remove this - // stub. + // TODO(bugs.webrtc.org/9891) - Remove options_.crypto_options then remove + // this stub. config.crypto_options = configuration.crypto_options.has_value() ? *configuration.crypto_options - : options.crypto_options; + : options_.crypto_options; config.transport_observer = this; - // It's safe to pass |this| and using |rtcp_invoker_| and the |call_| pointer - // since the JsepTransportController instance is owned by this PeerConnection - // instance and is destroyed before both |rtcp_invoker_| and the |call_| - // pointer. - config.rtcp_handler = [this](const rtc::CopyOnWriteBuffer& packet, - int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(network_thread()); - rtcp_invoker_.AsyncInvoke( - RTC_FROM_HERE, worker_thread(), [this, packet, packet_time_us] { - RTC_DCHECK_RUN_ON(worker_thread()); - // |call_| is reset on the worker thread in the PeerConnection - // destructor, so we check that it's still valid before propagating - // the packet. - if (call_) { - call_->Receiver()->DeliverPacket(MediaType::ANY, packet, - packet_time_us); - } - }); - }; + config.rtcp_handler = InitializeRtcpCallback(); config.event_log = event_log_ptr_; #if defined(ENABLE_EXTERNAL_AUTH) config.enable_external_auth = true; #endif config.active_reset_srtp_params = configuration.active_reset_srtp_params; - use_datagram_transport_ = datagram_transport_config_.enabled && - configuration.use_datagram_transport.value_or( - datagram_transport_config_.default_value); - use_datagram_transport_for_data_channels_ = - datagram_transport_data_channel_config_.enabled && - configuration.use_datagram_transport_for_data_channels.value_or( - datagram_transport_data_channel_config_.default_value); - use_datagram_transport_for_data_channels_receive_only_ = - configuration.use_datagram_transport_for_data_channels_receive_only - .value_or(datagram_transport_data_channel_config_.receive_only); - if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) { - if (!factory_->media_transport_factory()) { - RTC_DCHECK(false) - << "PeerConnecton is initialized with use_datagram_transport = true " - "or use_datagram_transport_for_data_channels = true " - "but media transport factory is not set in PeerConnectionFactory"; - return false; - } - - config.use_datagram_transport = use_datagram_transport_; - config.use_datagram_transport_for_data_channels = - use_datagram_transport_for_data_channels_; - config.use_datagram_transport_for_data_channels_receive_only = - use_datagram_transport_for_data_channels_receive_only_; - config.media_transport_factory = factory_->media_transport_factory(); - } - - // Obtain a certificate from RTCConfiguration if any were provided (optional). - rtc::scoped_refptr certificate; - if (!configuration.certificates.empty()) { - // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of - // just picking the first one. The decision should be made based on the DTLS - // handshake. The DTLS negotiations need to know about all certificates. - certificate = configuration.certificates[0]; - } - - if (options.disable_encryption) { + if (options_.disable_encryption) { dtls_enabled_ = false; } else { // Enable DTLS by default if we have an identity store or a certificate. - dtls_enabled_ = (dependencies.cert_generator || certificate); + dtls_enabled_ = + (dependencies.cert_generator || !configuration.certificates.empty()); // |configuration| can override the default |dtls_enabled_| value. if (configuration.enable_dtls_srtp) { dtls_enabled_ = *(configuration.enable_dtls_srtp); } } - sctp_factory_ = factory_->CreateSctpTransportInternalFactory(); - - if (use_datagram_transport_for_data_channels_) { - if (configuration.enable_rtp_data_channel) { - RTC_LOG(LS_ERROR) << "enable_rtp_data_channel and " - "use_datagram_transport_for_data_channels are " - "incompatible and cannot both be set to true"; - return false; - } - if (configuration.enable_dtls_srtp && !*configuration.enable_dtls_srtp) { - RTC_LOG(LS_INFO) << "Using data channel transport with no fallback"; - data_channel_controller_.set_data_channel_type( - cricket::DCT_DATA_CHANNEL_TRANSPORT); - } else { - RTC_LOG(LS_INFO) << "Using data channel transport with fallback to SCTP"; - data_channel_controller_.set_data_channel_type( - cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP); - config.sctp_factory = sctp_factory_.get(); - } - } else if (configuration.enable_rtp_data_channel) { + if (configuration.enable_rtp_data_channel) { // Enable creation of RTP data channels if the kEnableRtpDataChannels is // set. It takes precendence over the disable_sctp_data_channels // PeerConnectionFactoryInterface::Options. data_channel_controller_.set_data_channel_type(cricket::DCT_RTP); } else { // DTLS has to be enabled to use SCTP. - if (!options.disable_sctp_data_channels && dtls_enabled_) { + if (!options_.disable_sctp_data_channels && dtls_enabled_) { data_channel_controller_.set_data_channel_type(cricket::DCT_SCTP); - config.sctp_factory = sctp_factory_.get(); + config.sctp_factory = context_->sctp_transport_factory(); } } @@ -1328,8 +600,6 @@ bool PeerConnection::Initialize( transport_controller_.reset(new JsepTransportController( signaling_thread(), network_thread(), port_allocator_.get(), async_resolver_factory_.get(), config)); - transport_controller_->SignalIceConnectionState.connect( - this, &PeerConnection::OnTransportControllerConnectionState); transport_controller_->SignalStandardizedIceConnectionState.connect( this, &PeerConnection::SetStandardizedIceConnectionState); transport_controller_->SignalConnectionState.connect( @@ -1347,83 +617,50 @@ bool PeerConnection::Initialize( transport_controller_->SignalIceCandidatePairChanged.connect( this, &PeerConnection::OnTransportControllerCandidateChanged); - stats_.reset(new StatsCollector(this)); - stats_collector_ = RTCStatsCollector::Create(this); + transport_controller_->SignalIceConnectionState.AddReceiver( + [this](cricket::IceConnectionState s) { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerConnectionState(s); + }); configuration_ = configuration; transport_controller_->SetIceConfig(ParseIceConfig(configuration)); - video_options_.screencast_min_bitrate_kbps = - configuration.screencast_min_bitrate; - audio_options_.combined_audio_video_bwe = - configuration.combined_audio_video_bwe; - - audio_options_.audio_jitter_buffer_max_packets = - configuration.audio_jitter_buffer_max_packets; - - audio_options_.audio_jitter_buffer_fast_accelerate = - configuration.audio_jitter_buffer_fast_accelerate; - - audio_options_.audio_jitter_buffer_min_delay_ms = - configuration.audio_jitter_buffer_min_delay_ms; - - audio_options_.audio_jitter_buffer_enable_rtx_handling = - configuration.audio_jitter_buffer_enable_rtx_handling; - - // Whether the certificate generator/certificate is null or not determines - // what PeerConnectionDescriptionFactory will do, so make sure that we give it - // the right instructions by clearing the variables if needed. - if (!dtls_enabled_) { - dependencies.cert_generator.reset(); - certificate = nullptr; - } else if (certificate) { - // Favor generated certificate over the certificate generator. - dependencies.cert_generator.reset(); - } - - webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory( - signaling_thread(), channel_manager(), this, session_id(), - std::move(dependencies.cert_generator), certificate, &ssrc_generator_)); - webrtc_session_desc_factory_->SignalCertificateReady.connect( - this, &PeerConnection::OnCertificateReady); + stats_ = std::make_unique(this); + stats_collector_ = RTCStatsCollector::Create(this); - if (options.disable_encryption) { - webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED); - } + sdp_handler_ = + SdpOfferAnswerHandler::Create(this, configuration, dependencies); - webrtc_session_desc_factory_->set_enable_encrypted_rtp_header_extensions( - GetCryptoOptions().srtp.enable_encrypted_rtp_header_extensions); - webrtc_session_desc_factory_->set_is_unified_plan(IsUnifiedPlan()); + rtp_manager_ = std::make_unique( + IsUnifiedPlan(), signaling_thread(), worker_thread(), channel_manager(), + &usage_pattern_, observer_, stats_.get(), [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sdp_handler_->UpdateNegotiationNeeded(); + }); // Add default audio/video transceivers for Plan B SDP. if (!IsUnifiedPlan()) { - transceivers_.push_back( + rtp_manager()->transceivers()->Add( RtpTransceiverProxyWithInternal::Create( signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO))); - transceivers_.push_back( + rtp_manager()->transceivers()->Add( RtpTransceiverProxyWithInternal::Create( signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO))); } - int delay_ms = - return_histogram_very_quickly_ ? 0 : REPORT_USAGE_PATTERN_DELAY_MS; - signaling_thread()->PostDelayed(RTC_FROM_HERE, delay_ms, this, - MSG_REPORT_USAGE_PATTERN, nullptr); - if (dependencies.video_bitrate_allocator_factory) { - video_bitrate_allocator_factory_ = - std::move(dependencies.video_bitrate_allocator_factory); - } else { - video_bitrate_allocator_factory_ = - CreateBuiltinVideoBitrateAllocatorFactory(); - } - return true; -} + int delay_ms = configuration.report_usage_pattern_delay_ms + ? *configuration.report_usage_pattern_delay_ms + : REPORT_USAGE_PATTERN_DELAY_MS; + message_handler_.RequestUsagePatternReport( + [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ReportUsagePattern(); + }, + delay_ms); -RTCError PeerConnection::ValidateConfiguration( - const RTCConfiguration& config) const { - return cricket::P2PTransportChannel::ValidateIceConfig( - ParseIceConfig(config)); + return true; } rtc::scoped_refptr PeerConnection::local_streams() { @@ -1431,7 +668,7 @@ rtc::scoped_refptr PeerConnection::local_streams() { RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified " "Plan SdpSemantics. Please use GetSenders " "instead."; - return local_streams_; + return sdp_handler_->local_streams(); } rtc::scoped_refptr PeerConnection::remote_streams() { @@ -1439,7 +676,7 @@ rtc::scoped_refptr PeerConnection::remote_streams() { RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified " "Plan SdpSemantics. Please use GetReceivers " "instead."; - return remote_streams_; + return sdp_handler_->remote_streams(); } bool PeerConnection::AddStream(MediaStreamInterface* local_stream) { @@ -1447,35 +684,7 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) { RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan " "SdpSemantics. Please use AddTrack instead."; TRACE_EVENT0("webrtc", "PeerConnection::AddStream"); - if (IsClosed()) { - return false; - } - if (!CanAddLocalMediaStream(local_streams_, local_stream)) { - return false; - } - - local_streams_->AddStream(local_stream); - MediaStreamObserver* observer = new MediaStreamObserver(local_stream); - observer->SignalAudioTrackAdded.connect(this, - &PeerConnection::OnAudioTrackAdded); - observer->SignalAudioTrackRemoved.connect( - this, &PeerConnection::OnAudioTrackRemoved); - observer->SignalVideoTrackAdded.connect(this, - &PeerConnection::OnVideoTrackAdded); - observer->SignalVideoTrackRemoved.connect( - this, &PeerConnection::OnVideoTrackRemoved); - stream_observers_.push_back(std::unique_ptr(observer)); - - for (const auto& track : local_stream->GetAudioTracks()) { - AddAudioTrack(track.get(), local_stream); - } - for (const auto& track : local_stream->GetVideoTracks()) { - AddVideoTrack(track.get(), local_stream); - } - - stats_->AddStream(local_stream); - UpdateNegotiationNeeded(); - return true; + return sdp_handler_->AddStream(local_stream); } void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { @@ -1484,27 +693,7 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { "Plan SdpSemantics. Please use RemoveTrack " "instead."; TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream"); - if (!IsClosed()) { - for (const auto& track : local_stream->GetAudioTracks()) { - RemoveAudioTrack(track.get(), local_stream); - } - for (const auto& track : local_stream->GetVideoTracks()) { - RemoveVideoTrack(track.get(), local_stream); - } - } - local_streams_->RemoveStream(local_stream); - stream_observers_.erase( - std::remove_if( - stream_observers_.begin(), stream_observers_.end(), - [local_stream](const std::unique_ptr& observer) { - return observer->stream()->id().compare(local_stream->id()) == 0; - }), - stream_observers_.end()); - - if (IsClosed()) { - return; - } - UpdateNegotiationNeeded(); + sdp_handler_->RemoveStream(local_stream); } RTCErrorOr> PeerConnection::AddTrack( @@ -1524,121 +713,19 @@ RTCErrorOr> PeerConnection::AddTrack( LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "PeerConnection is closed."); } - if (FindSenderForTrack(track)) { + if (rtp_manager()->FindSenderForTrack(track)) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "Sender already exists for track " + track->id() + "."); } - auto sender_or_error = - (IsUnifiedPlan() ? AddTrackUnifiedPlan(track, stream_ids) - : AddTrackPlanB(track, stream_ids)); + auto sender_or_error = rtp_manager()->AddTrack(track, stream_ids); if (sender_or_error.ok()) { - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); stats_->AddTrack(track); } return sender_or_error; } -RTCErrorOr> -PeerConnection::AddTrackPlanB( - rtc::scoped_refptr track, - const std::vector& stream_ids) { - if (stream_ids.size() > 1u) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, - "AddTrack with more than one stream is not " - "supported with Plan B semantics."); - } - std::vector adjusted_stream_ids = stream_ids; - if (adjusted_stream_ids.empty()) { - adjusted_stream_ids.push_back(rtc::CreateRandomUuid()); - } - cricket::MediaType media_type = - (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO); - auto new_sender = - CreateSender(media_type, track->id(), track, adjusted_stream_ids, {}); - if (track->kind() == MediaStreamTrackInterface::kAudioKind) { - new_sender->internal()->SetMediaChannel(voice_media_channel()); - GetAudioTransceiver()->internal()->AddSender(new_sender); - const RtpSenderInfo* sender_info = - FindSenderInfo(local_audio_sender_infos_, - new_sender->internal()->stream_ids()[0], track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } - } else { - RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); - new_sender->internal()->SetMediaChannel(video_media_channel()); - GetVideoTransceiver()->internal()->AddSender(new_sender); - const RtpSenderInfo* sender_info = - FindSenderInfo(local_video_sender_infos_, - new_sender->internal()->stream_ids()[0], track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } - } - return rtc::scoped_refptr(new_sender); -} - -RTCErrorOr> -PeerConnection::AddTrackUnifiedPlan( - rtc::scoped_refptr track, - const std::vector& stream_ids) { - auto transceiver = FindFirstTransceiverForAddedTrack(track); - if (transceiver) { - RTC_LOG(LS_INFO) << "Reusing an existing " - << cricket::MediaTypeToString(transceiver->media_type()) - << " transceiver for AddTrack."; - if (transceiver->direction() == RtpTransceiverDirection::kRecvOnly) { - transceiver->internal()->set_direction( - RtpTransceiverDirection::kSendRecv); - } else if (transceiver->direction() == RtpTransceiverDirection::kInactive) { - transceiver->internal()->set_direction( - RtpTransceiverDirection::kSendOnly); - } - transceiver->sender()->SetTrack(track); - transceiver->internal()->sender_internal()->set_stream_ids(stream_ids); - transceiver->internal()->set_reused_for_addtrack(true); - } else { - cricket::MediaType media_type = - (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO); - RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type) - << " transceiver in response to a call to AddTrack."; - std::string sender_id = track->id(); - // Avoid creating a sender with an existing ID by generating a random ID. - // This can happen if this is the second time AddTrack has created a sender - // for this track. - if (FindSenderById(sender_id)) { - sender_id = rtc::CreateRandomUuid(); - } - auto sender = CreateSender(media_type, sender_id, track, stream_ids, {}); - auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); - transceiver = CreateAndAddTransceiver(sender, receiver); - transceiver->internal()->set_created_by_addtrack(true); - transceiver->internal()->set_direction(RtpTransceiverDirection::kSendRecv); - } - return transceiver->sender(); -} - -rtc::scoped_refptr> -PeerConnection::FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track) { - RTC_DCHECK(track); - for (auto transceiver : transceivers_) { - if (!transceiver->sender()->track() && - cricket::MediaTypeToString(transceiver->media_type()) == - track->kind() && - !transceiver->internal()->has_ever_been_used_to_send() && - !transceiver->stopped()) { - return transceiver; - } - } - return nullptr; -} - bool PeerConnection::RemoveTrack(RtpSenderInterface* sender) { TRACE_EVENT0("webrtc", "PeerConnection::RemoveTrack"); return RemoveTrackNew(sender).ok(); @@ -1670,10 +757,12 @@ RTCError PeerConnection::RemoveTrackNew( } else { bool removed; if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { - removed = GetAudioTransceiver()->internal()->RemoveSender(sender); + removed = rtp_manager()->GetAudioTransceiver()->internal()->RemoveSender( + sender); } else { RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type()); - removed = GetVideoTransceiver()->internal()->RemoveSender(sender); + removed = rtp_manager()->GetVideoTransceiver()->internal()->RemoveSender( + sender); } if (!removed) { LOG_AND_RETURN_ERROR( @@ -1681,19 +770,14 @@ RTCError PeerConnection::RemoveTrackNew( "Couldn't find sender " + sender->id() + " to remove."); } } - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); return RTCError::OK(); } rtc::scoped_refptr> PeerConnection::FindTransceiverBySender( rtc::scoped_refptr sender) { - for (auto transceiver : transceivers_) { - if (transceiver->sender() == sender) { - return transceiver; - } - } - return nullptr; + return rtp_manager()->transceivers()->FindBySender(sender); } RTCErrorOr> @@ -1749,6 +833,7 @@ PeerConnection::AddTransceiver( rtc::scoped_refptr track, const RtpTransceiverInit& init, bool update_negotiation_needed) { + RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK((media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO)); if (track) { @@ -1828,96 +913,27 @@ PeerConnection::AddTransceiver( << " transceiver in response to a call to AddTransceiver."; // Set the sender ID equal to the track ID if the track is specified unless // that sender ID is already in use. - std::string sender_id = - (track && !FindSenderById(track->id()) ? track->id() - : rtc::CreateRandomUuid()); - auto sender = CreateSender(media_type, sender_id, track, init.stream_ids, - parameters.encodings); - auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); - auto transceiver = CreateAndAddTransceiver(sender, receiver); + std::string sender_id = (track && !rtp_manager()->FindSenderById(track->id()) + ? track->id() + : rtc::CreateRandomUuid()); + auto sender = rtp_manager()->CreateSender( + media_type, sender_id, track, init.stream_ids, parameters.encodings); + auto receiver = + rtp_manager()->CreateReceiver(media_type, rtc::CreateRandomUuid()); + auto transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver); transceiver->internal()->set_direction(init.direction); if (update_negotiation_needed) { - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); } return rtc::scoped_refptr(transceiver); } -rtc::scoped_refptr> -PeerConnection::CreateSender( - cricket::MediaType media_type, - const std::string& id, - rtc::scoped_refptr track, - const std::vector& stream_ids, - const std::vector& send_encodings) { - RTC_DCHECK_RUN_ON(signaling_thread()); - rtc::scoped_refptr> sender; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - RTC_DCHECK(!track || - (track->kind() == MediaStreamTrackInterface::kAudioKind)); - sender = RtpSenderProxyWithInternal::Create( - signaling_thread(), - AudioRtpSender::Create(worker_thread(), id, stats_.get(), this)); - NoteUsageEvent(UsageEvent::AUDIO_ADDED); - } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); - RTC_DCHECK(!track || - (track->kind() == MediaStreamTrackInterface::kVideoKind)); - sender = RtpSenderProxyWithInternal::Create( - signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this)); - NoteUsageEvent(UsageEvent::VIDEO_ADDED); - } - bool set_track_succeeded = sender->SetTrack(track); - RTC_DCHECK(set_track_succeeded); - sender->internal()->set_stream_ids(stream_ids); - sender->internal()->set_init_send_encodings(send_encodings); - return sender; -} - -rtc::scoped_refptr> -PeerConnection::CreateReceiver(cricket::MediaType media_type, - const std::string& receiver_id) { - rtc::scoped_refptr> - receiver; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), new AudioRtpReceiver(worker_thread(), receiver_id, - std::vector({}))); - NoteUsageEvent(UsageEvent::AUDIO_ADDED); - } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); - receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), new VideoRtpReceiver(worker_thread(), receiver_id, - std::vector({}))); - NoteUsageEvent(UsageEvent::VIDEO_ADDED); - } - return receiver; -} - -rtc::scoped_refptr> -PeerConnection::CreateAndAddTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver) { - // Ensure that the new sender does not have an ID that is already in use by - // another sender. - // Allow receiver IDs to conflict since those come from remote SDP (which - // could be invalid, but should not cause a crash). - RTC_DCHECK(!FindSenderById(sender->id())); - auto transceiver = RtpTransceiverProxyWithInternal::Create( - signaling_thread(), - new RtpTransceiver(sender, receiver, channel_manager())); - transceivers_.push_back(transceiver); - transceiver->internal()->SignalNegotiationNeeded.connect( - this, &PeerConnection::OnNegotiationNeeded); - return transceiver; -} - void PeerConnection::OnNegotiationNeeded() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsClosed()); - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); } rtc::scoped_refptr PeerConnection::CreateSender( @@ -1948,18 +964,18 @@ rtc::scoped_refptr PeerConnection::CreateSender( rtc::scoped_refptr> new_sender; if (kind == MediaStreamTrackInterface::kAudioKind) { auto audio_sender = AudioRtpSender::Create( - worker_thread(), rtc::CreateRandomUuid(), stats_.get(), this); - audio_sender->SetMediaChannel(voice_media_channel()); + worker_thread(), rtc::CreateRandomUuid(), stats_.get(), rtp_manager()); + audio_sender->SetMediaChannel(rtp_manager()->voice_media_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), audio_sender); - GetAudioTransceiver()->internal()->AddSender(new_sender); + rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender); } else if (kind == MediaStreamTrackInterface::kVideoKind) { - auto video_sender = - VideoRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), this); - video_sender->SetMediaChannel(video_media_channel()); + auto video_sender = VideoRtpSender::Create( + worker_thread(), rtc::CreateRandomUuid(), rtp_manager()); + video_sender->SetMediaChannel(rtp_manager()->video_media_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), video_sender); - GetVideoTransceiver()->internal()->AddSender(new_sender); + rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender); } else { RTC_LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind; return nullptr; @@ -1973,54 +989,29 @@ std::vector> PeerConnection::GetSenders() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> ret; - for (const auto& sender : GetSendersInternal()) { + for (const auto& sender : rtp_manager()->GetSendersInternal()) { ret.push_back(sender); } return ret; } -std::vector>> -PeerConnection::GetSendersInternal() const { - std::vector>> - all_senders; - for (const auto& transceiver : transceivers_) { - auto senders = transceiver->internal()->senders(); - all_senders.insert(all_senders.end(), senders.begin(), senders.end()); - } - return all_senders; -} - std::vector> PeerConnection::GetReceivers() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> ret; - for (const auto& receiver : GetReceiversInternal()) { + for (const auto& receiver : rtp_manager()->GetReceiversInternal()) { ret.push_back(receiver); } return ret; } -std::vector< - rtc::scoped_refptr>> -PeerConnection::GetReceiversInternal() const { - std::vector< - rtc::scoped_refptr>> - all_receivers; - for (const auto& transceiver : transceivers_) { - auto receivers = transceiver->internal()->receivers(); - all_receivers.insert(all_receivers.end(), receivers.begin(), - receivers.end()); - } - return all_receivers; -} - std::vector> PeerConnection::GetTransceivers() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(IsUnifiedPlan()) << "GetTransceivers is only supported with Unified Plan SdpSemantics."; std::vector> all_transceivers; - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { all_transceivers.push_back(transceiver); } return all_transceivers; @@ -2044,8 +1035,7 @@ bool PeerConnection::GetStats(StatsObserver* observer, << track->id(); return false; } - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_GETSTATS, - new GetStatsMsg(observer, track)); + message_handler_.PostGetStats(observer, stats_.get(), track); return true; } @@ -2066,7 +1056,8 @@ void PeerConnection::GetStats( RTC_DCHECK(stats_collector_); rtc::scoped_refptr internal_sender; if (selector) { - for (const auto& proxy_transceiver : transceivers_) { + for (const auto& proxy_transceiver : + rtp_manager()->transceivers()->List()) { for (const auto& proxy_sender : proxy_transceiver->internal()->senders()) { if (proxy_sender == selector) { @@ -2095,7 +1086,8 @@ void PeerConnection::GetStats( RTC_DCHECK(stats_collector_); rtc::scoped_refptr internal_receiver; if (selector) { - for (const auto& proxy_transceiver : transceivers_) { + for (const auto& proxy_transceiver : + rtp_manager()->transceivers()->List()) { for (const auto& proxy_receiver : proxy_transceiver->internal()->receivers()) { if (proxy_receiver == selector) { @@ -2117,7 +1109,7 @@ void PeerConnection::GetStats( PeerConnectionInterface::SignalingState PeerConnection::signaling_state() { RTC_DCHECK_RUN_ON(signaling_thread()); - return signaling_state_; + return sdp_handler_->signaling_state(); } PeerConnectionInterface::IceConnectionState @@ -2144,6 +1136,23 @@ PeerConnection::ice_gathering_state() { return ice_gathering_state_; } +absl::optional PeerConnection::can_trickle_ice_candidates() { + RTC_DCHECK_RUN_ON(signaling_thread()); + const SessionDescriptionInterface* description = current_remote_description(); + if (!description) { + description = pending_remote_description(); + } + if (!description) { + return absl::nullopt; + } + // TODO(bugs.webrtc.org/7443): Change to retrieve from session-level option. + if (description->description()->transport_infos().size() < 1) { + return absl::nullopt; + } + return description->description()->transport_infos()[0].description.HasOption( + "trickle"); +} + rtc::scoped_refptr PeerConnection::CreateDataChannel( const std::string& label, const DataChannelInit* config) { @@ -2157,7 +1166,7 @@ rtc::scoped_refptr PeerConnection::CreateDataChannel( internal_config.reset(new InternalDataChannelInit(*config)); } rtc::scoped_refptr channel( - data_channel_controller_.InternalCreateDataChannel( + data_channel_controller_.InternalCreateDataChannelWithProxy( label, internal_config.get())); if (!channel.get()) { return nullptr; @@ -2166,3614 +1175,598 @@ rtc::scoped_refptr PeerConnection::CreateDataChannel( // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or // the first SCTP DataChannel. if (data_channel_type() == cricket::DCT_RTP || first_datachannel) { - UpdateNegotiationNeeded(); + sdp_handler_->UpdateNegotiationNeeded(); } NoteUsageEvent(UsageEvent::DATA_ADDED); - return DataChannelProxy::Create(signaling_thread(), channel.get()); + return channel; } void PeerConnection::RestartIce() { RTC_DCHECK_RUN_ON(signaling_thread()); - local_ice_credentials_to_replace_->SetIceCredentialsFromLocalDescriptions( - current_local_description_.get(), pending_local_description_.get()); - UpdateNegotiationNeeded(); + sdp_handler_->RestartIce(); } void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - options](std::function operations_chain_callback) { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - observer_refptr->OnFailure( - RTCError(RTCErrorType::INTERNAL_ERROR, - "CreateOffer failed because the session was shut down")); - operations_chain_callback(); - return; - } - // The operation completes asynchronously when the wrapper is invoked. - rtc::scoped_refptr - observer_wrapper(new rtc::RefCountedObject< - CreateSessionDescriptionObserverOperationWrapper>( - std::move(observer_refptr), - std::move(operations_chain_callback))); - this_weak_ptr->DoCreateOffer(options, observer_wrapper); - }); + sdp_handler_->CreateOffer(observer, options); } -void PeerConnection::DoCreateOffer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer) { +void PeerConnection::CreateAnswer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoCreateOffer"); - - if (!observer) { - RTC_LOG(LS_ERROR) << "CreateOffer - observer is NULL."; - return; - } - - if (IsClosed()) { - std::string error = "CreateOffer called when PeerConnection is closed."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "CreateOffer: " << error_message; - PostCreateSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - - if (!ValidateOfferAnswerOptions(options)) { - std::string error = "CreateOffer called with invalid options."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error))); - return; - } - - // Legacy handling for offer_to_receive_audio and offer_to_receive_video. - // Specified in WebRTC section 4.4.3.2 "Legacy configuration extensions". - if (IsUnifiedPlan()) { - RTCError error = HandleLegacyOfferOptions(options); - if (!error.ok()) { - PostCreateSessionDescriptionFailure(observer, std::move(error)); - return; - } - } - - cricket::MediaSessionOptions session_options; - GetOptionsForOffer(options, &session_options); - webrtc_session_desc_factory_->CreateOffer(observer, options, session_options); -} - -RTCError PeerConnection::HandleLegacyOfferOptions( - const RTCOfferAnswerOptions& options) { - RTC_DCHECK(IsUnifiedPlan()); - - if (options.offer_to_receive_audio == 0) { - RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MEDIA_TYPE_AUDIO); - } else if (options.offer_to_receive_audio == 1) { - AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_AUDIO); - } else if (options.offer_to_receive_audio > 1) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, - "offer_to_receive_audio > 1 is not supported."); - } - - if (options.offer_to_receive_video == 0) { - RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MEDIA_TYPE_VIDEO); - } else if (options.offer_to_receive_video == 1) { - AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); - } else if (options.offer_to_receive_video > 1) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, - "offer_to_receive_video > 1 is not supported."); - } - - return RTCError::OK(); + sdp_handler_->CreateAnswer(observer, options); } -void PeerConnection::RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MediaType media_type) { - for (const auto& transceiver : GetReceivingTransceiversOfType(media_type)) { - RtpTransceiverDirection new_direction = - RtpTransceiverDirectionWithRecvSet(transceiver->direction(), false); - if (new_direction != transceiver->direction()) { - RTC_LOG(LS_INFO) << "Changing " << cricket::MediaTypeToString(media_type) - << " transceiver (MID=" - << transceiver->mid().value_or("") << ") from " - << RtpTransceiverDirectionToString( - transceiver->direction()) - << " to " - << RtpTransceiverDirectionToString(new_direction) - << " since CreateOffer specified offer_to_receive=0"; - transceiver->internal()->set_direction(new_direction); - } - } +void PeerConnection::SetLocalDescription( + SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc_ptr) { + RTC_DCHECK_RUN_ON(signaling_thread()); + sdp_handler_->SetLocalDescription(observer, desc_ptr); } -void PeerConnection::AddUpToOneReceivingTransceiverOfType( - cricket::MediaType media_type) { +void PeerConnection::SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (GetReceivingTransceiversOfType(media_type).empty()) { - RTC_LOG(LS_INFO) - << "Adding one recvonly " << cricket::MediaTypeToString(media_type) - << " transceiver since CreateOffer specified offer_to_receive=1"; - RtpTransceiverInit init; - init.direction = RtpTransceiverDirection::kRecvOnly; - AddTransceiver(media_type, nullptr, init, - /*update_negotiation_needed=*/false); - } -} - -std::vector>> -PeerConnection::GetReceivingTransceiversOfType(cricket::MediaType media_type) { - std::vector< - rtc::scoped_refptr>> - receiving_transceivers; - for (const auto& transceiver : transceivers_) { - if (!transceiver->stopped() && transceiver->media_type() == media_type && - RtpTransceiverDirectionHasRecv(transceiver->direction())) { - receiving_transceivers.push_back(transceiver); - } - } - return receiving_transceivers; + sdp_handler_->SetLocalDescription(std::move(desc), observer); } -void PeerConnection::CreateAnswer(CreateSessionDescriptionObserver* observer, - const RTCOfferAnswerOptions& options) { +void PeerConnection::SetLocalDescription( + SetSessionDescriptionObserver* observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - options](std::function operations_chain_callback) { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - observer_refptr->OnFailure(RTCError( - RTCErrorType::INTERNAL_ERROR, - "CreateAnswer failed because the session was shut down")); - operations_chain_callback(); - return; - } - // The operation completes asynchronously when the wrapper is invoked. - rtc::scoped_refptr - observer_wrapper(new rtc::RefCountedObject< - CreateSessionDescriptionObserverOperationWrapper>( - std::move(observer_refptr), - std::move(operations_chain_callback))); - this_weak_ptr->DoCreateAnswer(options, observer_wrapper); - }); + sdp_handler_->SetLocalDescription(observer); } -void PeerConnection::DoCreateAnswer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer) { +void PeerConnection::SetLocalDescription( + rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoCreateAnswer"); - if (!observer) { - RTC_LOG(LS_ERROR) << "CreateAnswer - observer is NULL."; - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "CreateAnswer: " << error_message; - PostCreateSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - - if (!(signaling_state_ == kHaveRemoteOffer || - signaling_state_ == kHaveLocalPrAnswer)) { - std::string error = - "PeerConnection cannot create an answer in a state other than " - "have-remote-offer or have-local-pranswer."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailure( - observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); - return; - } - - // The remote description should be set if we're in the right state. - RTC_DCHECK(remote_description()); - - if (IsUnifiedPlan()) { - if (options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) { - RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_audio is not " - "supported with Unified Plan semantics. Use the " - "RtpTransceiver API instead."; - } - if (options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) { - RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_video is not " - "supported with Unified Plan semantics. Use the " - "RtpTransceiver API instead."; - } - } - - cricket::MediaSessionOptions session_options; - GetOptionsForAnswer(options, &session_options); - - webrtc_session_desc_factory_->CreateAnswer(observer, session_options); + sdp_handler_->SetLocalDescription(observer); } -void PeerConnection::SetLocalDescription( +void PeerConnection::SetRemoteDescription( SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc_ptr) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - desc = std::unique_ptr(desc_ptr)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - // For consistency with DoSetLocalDescription(), we DO NOT inform the - // |observer_refptr| that the operation failed in this case. - // TODO(hbos): If/when we process SLD messages in ~PeerConnection, - // the consistent thing would be to inform the observer here. - operations_chain_callback(); - return; - } - this_weak_ptr->DoSetLocalDescription(std::move(desc), - std::move(observer_refptr)); - // DoSetLocalDescription() is currently implemented as a synchronous - // operation but where the |observer|'s callbacks are invoked - // asynchronously in a post to OnMessage(). - // For backwards-compatability reasons, we declare the operation as - // completed here (rather than in OnMessage()). This ensures that - // subsequent offer/answer operations can start immediately (without - // waiting for OnMessage()). - operations_chain_callback(); - }); + sdp_handler_->SetRemoteDescription(observer, desc_ptr); } -void PeerConnection::SetLocalDescription( - SetSessionDescriptionObserver* observer) { +void PeerConnection::SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - // The |create_sdp_observer| handles performing DoSetLocalDescription() with - // the resulting description as well as completing the operation. - rtc::scoped_refptr - create_sdp_observer( - new rtc::RefCountedObject( - weak_ptr_factory_.GetWeakPtr(), - rtc::scoped_refptr(observer))); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - create_sdp_observer](std::function operations_chain_callback) { - // The |create_sdp_observer| is responsible for completing the - // operation. - create_sdp_observer->SetOperationCompleteCallback( - std::move(operations_chain_callback)); - // Abort early if |this_weak_ptr| is no longer valid. This triggers the - // same code path as if DoCreateOffer() or DoCreateAnswer() failed. - if (!this_weak_ptr) { - create_sdp_observer->OnFailure(RTCError( - RTCErrorType::INTERNAL_ERROR, - "SetLocalDescription failed because the session was shut down")); - return; - } - switch (this_weak_ptr->signaling_state()) { - case PeerConnectionInterface::kStable: - case PeerConnectionInterface::kHaveLocalOffer: - case PeerConnectionInterface::kHaveRemotePrAnswer: - // TODO(hbos): If [LastCreatedOffer] exists and still represents the - // current state of the system, use that instead of creating another - // offer. - this_weak_ptr->DoCreateOffer(RTCOfferAnswerOptions(), - create_sdp_observer); - break; - case PeerConnectionInterface::kHaveLocalPrAnswer: - case PeerConnectionInterface::kHaveRemoteOffer: - // TODO(hbos): If [LastCreatedAnswer] exists and still represents - // the current state of the system, use that instead of creating - // another answer. - this_weak_ptr->DoCreateAnswer(RTCOfferAnswerOptions(), - create_sdp_observer); - break; - case PeerConnectionInterface::kClosed: - create_sdp_observer->OnFailure(RTCError( - RTCErrorType::INVALID_STATE, - "SetLocalDescription called when PeerConnection is closed.")); - break; - } - }); + sdp_handler_->SetRemoteDescription(std::move(desc), observer); } -void PeerConnection::DoSetLocalDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) { +PeerConnectionInterface::RTCConfiguration PeerConnection::GetConfiguration() { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoSetLocalDescription"); + return configuration_; +} - if (!observer) { - RTC_LOG(LS_ERROR) << "SetLocalDescription - observer is NULL."; - return; +RTCError PeerConnection::SetConfiguration( + const RTCConfiguration& configuration) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "PeerConnection::SetConfiguration"); + if (IsClosed()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, + "SetConfiguration: PeerConnection is closed."); } - if (!desc) { - PostSetSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, "SessionDescription is NULL.")); - return; + // According to JSEP, after setLocalDescription, changing the candidate pool + // size is not allowed, and changing the set of ICE servers will not result + // in new candidates being gathered. + if (local_description() && configuration.ice_candidate_pool_size != + configuration_.ice_candidate_pool_size) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Can't change candidate pool size after calling " + "SetLocalDescription."); } - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "SetLocalDescription: " << error_message; - PostSetSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; + if (local_description() && + configuration.crypto_options != configuration_.crypto_options) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Can't change crypto_options after calling " + "SetLocalDescription."); } - // For SLD we support only explicit rollback. - if (desc->GetType() == SdpType::kRollback) { - if (IsUnifiedPlan()) { - RTCError error = Rollback(desc->GetType()); - if (error.ok()) { - PostSetSessionDescriptionSuccess(observer); - } else { - PostSetSessionDescriptionFailure(observer, std::move(error)); - } - } else { - PostSetSessionDescriptionFailure( - observer, RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Rollback not supported in Plan B")); - } - return; + // The simplest (and most future-compatible) way to tell if the config was + // modified in an invalid way is to copy each property we do support + // modifying, then use operator==. There are far more properties we don't + // support modifying than those we do, and more could be added. + RTCConfiguration modified_config = configuration_; + modified_config.servers = configuration.servers; + modified_config.type = configuration.type; + modified_config.ice_candidate_pool_size = + configuration.ice_candidate_pool_size; + modified_config.prune_turn_ports = configuration.prune_turn_ports; + modified_config.turn_port_prune_policy = configuration.turn_port_prune_policy; + modified_config.surface_ice_candidates_on_ice_transport_type_changed = + configuration.surface_ice_candidates_on_ice_transport_type_changed; + modified_config.ice_check_min_interval = configuration.ice_check_min_interval; + modified_config.ice_check_interval_strong_connectivity = + configuration.ice_check_interval_strong_connectivity; + modified_config.ice_check_interval_weak_connectivity = + configuration.ice_check_interval_weak_connectivity; + modified_config.ice_unwritable_timeout = configuration.ice_unwritable_timeout; + modified_config.ice_unwritable_min_checks = + configuration.ice_unwritable_min_checks; + modified_config.ice_inactive_timeout = configuration.ice_inactive_timeout; + modified_config.stun_candidate_keepalive_interval = + configuration.stun_candidate_keepalive_interval; + modified_config.turn_customizer = configuration.turn_customizer; + modified_config.network_preference = configuration.network_preference; + modified_config.active_reset_srtp_params = + configuration.active_reset_srtp_params; + modified_config.turn_logging_id = configuration.turn_logging_id; + modified_config.allow_codec_switching = configuration.allow_codec_switching; + if (configuration != modified_config) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Modifying the configuration in an unsupported way."); } - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL); - if (!error.ok()) { - std::string error_message = GetSetDescriptionErrorMessage( - cricket::CS_LOCAL, desc->GetType(), error); - RTC_LOG(LS_ERROR) << error_message; - PostSetSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; + // Validate the modified configuration. + RTCError validate_error = ValidateConfiguration(modified_config); + if (!validate_error.ok()) { + return validate_error; } - // Grab the description type before moving ownership to ApplyLocalDescription, - // which may destroy it before returning. - const SdpType type = desc->GetType(); - - error = ApplyLocalDescription(std::move(desc)); - // |desc| may be destroyed at this point. - - if (!error.ok()) { - // If ApplyLocalDescription fails, the PeerConnection could be in an - // inconsistent state, so act conservatively here and set the session error - // so that future calls to SetLocalDescription/SetRemoteDescription fail. - SetSessionError(SessionError::kContent, error.message()); - std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_LOCAL, type, error); - RTC_LOG(LS_ERROR) << error_message; - PostSetSessionDescriptionFailure( - observer, - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; + // Note that this isn't possible through chromium, since it's an unsigned + // short in WebIDL. + if (configuration.ice_candidate_pool_size < 0 || + configuration.ice_candidate_pool_size > static_cast(UINT16_MAX)) { + return RTCError(RTCErrorType::INVALID_RANGE); } - RTC_DCHECK(local_description()); - - PostSetSessionDescriptionSuccess(observer); - // MaybeStartGathering needs to be called after posting - // MSG_SET_SESSIONDESCRIPTION_SUCCESS, so that we don't signal any candidates - // before signaling that SetLocalDescription completed. - transport_controller_->MaybeStartGathering(); - - if (local_description()->GetType() == SdpType::kAnswer) { - // TODO(deadbeef): We already had to hop to the network thread for - // MaybeStartGathering... - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator_.get())); - // Make UMA notes about what was agreed to. - ReportNegotiatedSdpSemantics(*local_description()); + // Parse ICE servers before hopping to network thread. + cricket::ServerAddresses stun_servers; + std::vector turn_servers; + RTCErrorType parse_error = + ParseIceServers(configuration.servers, &stun_servers, &turn_servers); + if (parse_error != RTCErrorType::NONE) { + return RTCError(parse_error); } - - if (IsUnifiedPlan()) { - bool was_negotiation_needed = is_negotiation_needed_; - UpdateNegotiationNeeded(); - if (signaling_state() == kStable && was_negotiation_needed && - is_negotiation_needed_) { - Observer()->OnRenegotiationNeeded(); - } + // Add the turn logging id to all turn servers + for (cricket::RelayServerConfig& turn_server : turn_servers) { + turn_server.turn_logging_id = configuration.turn_logging_id; } - NoteUsageEvent(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED); -} - -RTCError PeerConnection::ApplyLocalDescription( - std::unique_ptr desc) { - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(desc); - - // Update stats here so that we have the most recent stats for tracks and - // streams that might be removed by updating the session description. - stats_->UpdateStats(kStatsOutputLevelStandard); - - // Take a reference to the old local description since it's used below to - // compare against the new local description. When setting the new local - // description, grab ownership of the replaced session description in case it - // is the same as |old_local_description|, to keep it alive for the duration - // of the method. - const SessionDescriptionInterface* old_local_description = - local_description(); - std::unique_ptr replaced_local_description; - SdpType type = desc->GetType(); - if (type == SdpType::kAnswer) { - replaced_local_description = pending_local_description_ - ? std::move(pending_local_description_) - : std::move(current_local_description_); - current_local_description_ = std::move(desc); - pending_local_description_ = nullptr; - current_remote_description_ = std::move(pending_remote_description_); - } else { - replaced_local_description = std::move(pending_local_description_); - pending_local_description_ = std::move(desc); + // Note if STUN or TURN servers were supplied. + if (!stun_servers.empty()) { + NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); } - // The session description to apply now must be accessed by - // |local_description()|. - RTC_DCHECK(local_description()); - - // Report statistics about any use of simulcast. - ReportSimulcastApiVersion(kSimulcastVersionApplyLocalDescription, - *local_description()->description()); - - if (!is_caller_) { - if (remote_description()) { - // Remote description was applied first, so this PC is the callee. - is_caller_ = false; - } else { - // Local description is applied first, so this PC is the caller. - is_caller_ = true; - } - } - - RTCError error = PushdownTransportDescription(cricket::CS_LOCAL, type); - if (!error.ok()) { - return error; - } - - if (IsUnifiedPlan()) { - RTCError error = UpdateTransceiversAndDataChannels( - cricket::CS_LOCAL, *local_description(), old_local_description, - remote_description()); - if (!error.ok()) { - return error; - } - std::vector> remove_list; - std::vector> removed_streams; - for (const auto& transceiver : transceivers_) { - // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots. - // Note that code paths that don't set MID won't be able to use - // information about DTLS transports. - if (transceiver->mid()) { - auto dtls_transport = - LookupDtlsTransportByMidInternal(*transceiver->mid()); - transceiver->internal()->sender_internal()->set_transport( - dtls_transport); - transceiver->internal()->receiver_internal()->set_transport( - dtls_transport); - } - - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run - // the following steps: - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and - // transceiver's [[FiredDirection]] slot is either "sendrecv" or - // "recvonly", process the removal of a remote track for the media - // description, given transceiver, removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && - (transceiver->internal()->fired_direction() && - RtpTransceiverDirectionHasRecv( - *transceiver->internal()->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver, &remove_list, - &removed_streams); - } - // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and - // [[FiredDirection]] slots to direction. - transceiver->internal()->set_current_direction(media_desc->direction()); - transceiver->internal()->set_fired_direction(media_desc->direction()); - } - } - auto observer = Observer(); - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); - } - } else { - // Media channels will be created only when offer is set. These may use new - // transports just created by PushdownTransportDescription. - if (type == SdpType::kOffer) { - // TODO(bugs.webrtc.org/4676) - Handle CreateChannel failure, as new local - // description is applied. Restore back to old description. - RTCError error = CreateChannels(*local_description()->description()); - if (!error.ok()) { - return error; - } - } - // Remove unused channels if MediaContentDescription is rejected. - RemoveUnusedChannels(local_description()->description()); + if (!turn_servers.empty()) { + NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); } - error = UpdateSessionState(type, cricket::CS_LOCAL, - local_description()->description()); - if (!error.ok()) { - return error; + // In theory this shouldn't fail. + if (!network_thread()->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnection::ReconfigurePortAllocator_n, this, + stun_servers, turn_servers, modified_config.type, + modified_config.ice_candidate_pool_size, + modified_config.GetTurnPortPrunePolicy(), + modified_config.turn_customizer, + modified_config.stun_candidate_keepalive_interval, + static_cast(local_description())))) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to apply configuration to PortAllocator."); } - if (remote_description()) { - // Now that we have a local description, we can push down remote candidates. - UseCandidatesInSessionDescription(remote_description()); + // As described in JSEP, calling setConfiguration with new ICE servers or + // candidate policy must set a "needs-ice-restart" bit so that the next offer + // triggers an ICE restart which will pick up the changes. + if (modified_config.servers != configuration_.servers || + NeedIceRestart( + configuration_.surface_ice_candidates_on_ice_transport_type_changed, + configuration_.type, modified_config.type) || + modified_config.GetTurnPortPrunePolicy() != + configuration_.GetTurnPortPrunePolicy()) { + transport_controller_->SetNeedsIceRestartFlag(); } - pending_ice_restarts_.clear(); - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } + transport_controller_->SetIceConfig(ParseIceConfig(modified_config)); - // If setting the description decided our SSL role, allocate any necessary - // SCTP sids. - rtc::SSLRole role; - if (DataChannel::IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) { - data_channel_controller_.AllocateSctpSids(role); + if (configuration_.active_reset_srtp_params != + modified_config.active_reset_srtp_params) { + transport_controller_->SetActiveResetSrtpParams( + modified_config.active_reset_srtp_params); } - if (IsUnifiedPlan()) { - for (const auto& transceiver : transceivers_) { - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, local_description()); - if (!content) { + if (modified_config.allow_codec_switching.has_value()) { + std::vector channels; + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) continue; - } - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (content->rejected || !channel || channel->local_streams().empty()) { - // 0 is a special value meaning "this sender has no associated send - // stream". Need to call this so the sender won't attempt to configure - // a no longer existing stream and run into DCHECKs in the lower - // layers. - transceiver->internal()->sender_internal()->SetSsrc(0); - } else { - // Get the StreamParams from the channel which could generate SSRCs. - const std::vector& streams = channel->local_streams(); - transceiver->internal()->sender_internal()->set_stream_ids( - streams[0].stream_ids()); - transceiver->internal()->sender_internal()->SetSsrc( - streams[0].first_ssrc()); - } - } - } else { - // Plan B semantics. - - // Update state and SSRC of local MediaStreams and DataChannels based on the - // local session description. - const cricket::ContentInfo* audio_content = - GetFirstAudioContent(local_description()->description()); - if (audio_content) { - if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); - } else { - const cricket::AudioContentDescription* audio_desc = - audio_content->media_description()->as_audio(); - UpdateLocalSenders(audio_desc->streams(), audio_desc->type()); - } - } - - const cricket::ContentInfo* video_content = - GetFirstVideoContent(local_description()->description()); - if (video_content) { - if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); - } else { - const cricket::VideoContentDescription* video_desc = - video_content->media_description()->as_video(); - UpdateLocalSenders(video_desc->streams(), video_desc->type()); - } - } - } - const cricket::ContentInfo* data_content = - GetFirstDataContent(local_description()->description()); - if (data_content) { - const cricket::RtpDataContentDescription* rtp_data_desc = - data_content->media_description()->as_rtp_data(); - // rtp_data_desc will be null if this is an SCTP description. - if (rtp_data_desc) { - data_channel_controller_.UpdateLocalRtpDataChannels( - rtp_data_desc->streams()); + auto* video_channel = static_cast( + transceiver->internal()->channel()); + if (video_channel) + channels.push_back(video_channel->media_channel()); } - } - if (type == SdpType::kAnswer && - local_ice_credentials_to_replace_->SatisfiesIceRestart( - *current_local_description_)) { - local_ice_credentials_to_replace_->ClearIceCredentials(); + worker_thread()->Invoke( + RTC_FROM_HERE, + [channels = std::move(channels), + allow_codec_switching = *modified_config.allow_codec_switching]() { + for (auto* ch : channels) + ch->SetVideoCodecSwitchingEnabled(allow_codec_switching); + }); } + configuration_ = modified_config; return RTCError::OK(); } -// The SDP parser used to populate these values by default for the 'content -// name' if an a=mid line was absent. -static absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { - switch (media_type) { - case cricket::MEDIA_TYPE_AUDIO: - return cricket::CN_AUDIO; - case cricket::MEDIA_TYPE_VIDEO: - return cricket::CN_VIDEO; - case cricket::MEDIA_TYPE_DATA: - return cricket::CN_DATA; - } - RTC_NOTREACHED(); - return ""; -} - -void PeerConnection::FillInMissingRemoteMids( - cricket::SessionDescription* new_remote_description) { - RTC_DCHECK(new_remote_description); - const cricket::ContentInfos no_infos; - const cricket::ContentInfos& local_contents = - (local_description() ? local_description()->description()->contents() - : no_infos); - const cricket::ContentInfos& remote_contents = - (remote_description() ? remote_description()->description()->contents() - : no_infos); - for (size_t i = 0; i < new_remote_description->contents().size(); ++i) { - cricket::ContentInfo& content = new_remote_description->contents()[i]; - if (!content.name.empty()) { - continue; - } - std::string new_mid; - absl::string_view source_explanation; - if (IsUnifiedPlan()) { - if (i < local_contents.size()) { - new_mid = local_contents[i].name; - source_explanation = "from the matching local media section"; - } else if (i < remote_contents.size()) { - new_mid = remote_contents[i].name; - source_explanation = "from the matching previous remote media section"; - } else { - new_mid = mid_generator_(); - source_explanation = "generated just now"; - } - } else { - new_mid = std::string( - GetDefaultMidForPlanB(content.media_description()->type())); - source_explanation = "to match pre-existing behavior"; - } - RTC_DCHECK(!new_mid.empty()); - content.name = new_mid; - new_remote_description->transport_infos()[i].content_name = new_mid; - RTC_LOG(LS_INFO) << "SetRemoteDescription: Remote media section at i=" << i - << " is missing an a=mid line. Filling in the value '" - << new_mid << "' " << source_explanation << "."; - } -} - -void PeerConnection::SetRemoteDescription( - SetSessionDescriptionObserver* observer, - SessionDescriptionInterface* desc_ptr) { - RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), - desc = std::unique_ptr(desc_ptr)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - // For consistency with SetRemoteDescriptionObserverAdapter, we DO NOT - // inform the |observer_refptr| that the operation failed in this - // case. - // TODO(hbos): If/when we process SRD messages in ~PeerConnection, - // the consistent thing would be to inform the observer here. - operations_chain_callback(); - return; - } - this_weak_ptr->DoSetRemoteDescription( - std::move(desc), - rtc::scoped_refptr( - new SetRemoteDescriptionObserverAdapter( - this_weak_ptr.get(), std::move(observer_refptr)))); - // DoSetRemoteDescription() is currently implemented as a synchronous - // operation but where SetRemoteDescriptionObserverAdapter ensures that - // the |observer|'s callbacks are invoked asynchronously in a post to - // OnMessage(). - // For backwards-compatability reasons, we declare the operation as - // completed here (rather than in OnMessage()). This ensures that - // subsequent offer/answer operations can start immediately (without - // waiting for OnMessage()). - operations_chain_callback(); - }); -} - -void PeerConnection::SetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) { +bool PeerConnection::AddIceCandidate( + const IceCandidateInterface* ice_candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, - desc = std::move(desc)]( - std::function operations_chain_callback) mutable { - // Abort early if |this_weak_ptr| is no longer valid. - if (!this_weak_ptr) { - // For consistency with DoSetRemoteDescription(), we DO inform the - // |observer| that the operation failed in this case. - observer->OnSetRemoteDescriptionComplete(RTCError( - RTCErrorType::INVALID_STATE, - "Failed to set remote offer sdp: failed because the session was " - "shut down")); - operations_chain_callback(); - return; - } - this_weak_ptr->DoSetRemoteDescription(std::move(desc), - std::move(observer)); - // DoSetRemoteDescription() is currently implemented as a synchronous - // operation. The |observer| will already have been informed that it - // completed, and we can mark this operation as complete without any - // loose ends. - operations_chain_callback(); - }); + return sdp_handler_->AddIceCandidate(ice_candidate); } -void PeerConnection::DoSetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) { +void PeerConnection::AddIceCandidate( + std::unique_ptr candidate, + std::function callback) { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::DoSetRemoteDescription"); - - if (!observer) { - RTC_LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; - return; - } - - if (!desc) { - observer->OnSetRemoteDescriptionComplete(RTCError( - RTCErrorType::INVALID_PARAMETER, "SessionDescription is NULL.")); - return; - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - if (session_error() != SessionError::kNone) { - std::string error_message = GetSessionErrorMsg(); - RTC_LOG(LS_ERROR) << "SetRemoteDescription: " << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); - return; - } - if (IsUnifiedPlan()) { - if (configuration_.enable_implicit_rollback) { - if (desc->GetType() == SdpType::kOffer && - signaling_state() == kHaveLocalOffer) { - Rollback(desc->GetType()); - } - } - // Explicit rollback. - if (desc->GetType() == SdpType::kRollback) { - observer->OnSetRemoteDescriptionComplete(Rollback(desc->GetType())); - return; - } - } else if (desc->GetType() == SdpType::kRollback) { - observer->OnSetRemoteDescriptionComplete( - RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Rollback not supported in Plan B")); - return; - } - if (desc->GetType() == SdpType::kOffer) { - // Report to UMA the format of the received offer. - ReportSdpFormatReceived(*desc); - } - - // Handle remote descriptions missing a=mid lines for interop with legacy end - // points. - FillInMissingRemoteMids(desc->description()); - - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE); - if (!error.ok()) { - std::string error_message = GetSetDescriptionErrorMessage( - cricket::CS_REMOTE, desc->GetType(), error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(error.type(), std::move(error_message))); - return; - } - - // Grab the description type before moving ownership to - // ApplyRemoteDescription, which may destroy it before returning. - const SdpType type = desc->GetType(); - - error = ApplyRemoteDescription(std::move(desc)); - // |desc| may be destroyed at this point. - - if (!error.ok()) { - // If ApplyRemoteDescription fails, the PeerConnection could be in an - // inconsistent state, so act conservatively here and set the session error - // so that future calls to SetLocalDescription/SetRemoteDescription fail. - SetSessionError(SessionError::kContent, error.message()); - std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type, error); - RTC_LOG(LS_ERROR) << error_message; - observer->OnSetRemoteDescriptionComplete( - RTCError(error.type(), std::move(error_message))); - return; - } - RTC_DCHECK(remote_description()); - - if (type == SdpType::kAnswer) { - // TODO(deadbeef): We already had to hop to the network thread for - // MaybeStartGathering... - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator_.get())); - // Make UMA notes about what was agreed to. - ReportNegotiatedSdpSemantics(*remote_description()); - } - - if (IsUnifiedPlan()) { - bool was_negotiation_needed = is_negotiation_needed_; - UpdateNegotiationNeeded(); - if (signaling_state() == kStable && was_negotiation_needed && - is_negotiation_needed_) { - Observer()->OnRenegotiationNeeded(); - } - } - - observer->OnSetRemoteDescriptionComplete(RTCError::OK()); - NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED); + sdp_handler_->AddIceCandidate(std::move(candidate), callback); } -RTCError PeerConnection::ApplyRemoteDescription( - std::unique_ptr desc) { +bool PeerConnection::RemoveIceCandidates( + const std::vector& candidates) { + TRACE_EVENT0("webrtc", "PeerConnection::RemoveIceCandidates"); RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(desc); - - // Update stats here so that we have the most recent stats for tracks and - // streams that might be removed by updating the session description. - stats_->UpdateStats(kStatsOutputLevelStandard); - - // Take a reference to the old remote description since it's used below to - // compare against the new remote description. When setting the new remote - // description, grab ownership of the replaced session description in case it - // is the same as |old_remote_description|, to keep it alive for the duration - // of the method. - const SessionDescriptionInterface* old_remote_description = - remote_description(); - std::unique_ptr replaced_remote_description; - SdpType type = desc->GetType(); - if (type == SdpType::kAnswer) { - replaced_remote_description = pending_remote_description_ - ? std::move(pending_remote_description_) - : std::move(current_remote_description_); - current_remote_description_ = std::move(desc); - pending_remote_description_ = nullptr; - current_local_description_ = std::move(pending_local_description_); - } else { - replaced_remote_description = std::move(pending_remote_description_); - pending_remote_description_ = std::move(desc); - } - // The session description to apply now must be accessed by - // |remote_description()|. - RTC_DCHECK(remote_description()); - - // Report statistics about any use of simulcast. - ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, - *remote_description()->description()); - - RTCError error = PushdownTransportDescription(cricket::CS_REMOTE, type); - if (!error.ok()) { - return error; - } - // Transport and Media channels will be created only when offer is set. - if (IsUnifiedPlan()) { - RTCError error = UpdateTransceiversAndDataChannels( - cricket::CS_REMOTE, *remote_description(), local_description(), - old_remote_description); - if (!error.ok()) { - return error; - } - } else { - // Media channels will be created only when offer is set. These may use new - // transports just created by PushdownTransportDescription. - if (type == SdpType::kOffer) { - // TODO(mallinath) - Handle CreateChannel failure, as new local - // description is applied. Restore back to old description. - RTCError error = CreateChannels(*remote_description()->description()); - if (!error.ok()) { - return error; - } - } - // Remove unused channels if MediaContentDescription is rejected. - RemoveUnusedChannels(remote_description()->description()); - } - - // NOTE: Candidates allocation will be initiated only when - // SetLocalDescription is called. - error = UpdateSessionState(type, cricket::CS_REMOTE, - remote_description()->description()); - if (!error.ok()) { - return error; - } - - if (local_description() && - !UseCandidatesInSessionDescription(remote_description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidCandidates); - } - - if (old_remote_description) { - for (const cricket::ContentInfo& content : - old_remote_description->description()->contents()) { - // Check if this new SessionDescription contains new ICE ufrag and - // password that indicates the remote peer requests an ICE restart. - // TODO(deadbeef): When we start storing both the current and pending - // remote description, this should reset pending_ice_restarts and compare - // against the current description. - if (CheckForRemoteIceRestart(old_remote_description, remote_description(), - content.name)) { - if (type == SdpType::kOffer) { - pending_ice_restarts_.insert(content.name); - } - } else { - // We retain all received candidates only if ICE is not restarted. - // When ICE is restarted, all previous candidates belong to an old - // generation and should not be kept. - // TODO(deadbeef): This goes against the W3C spec which says the remote - // description should only contain candidates from the last set remote - // description plus any candidates added since then. We should remove - // this once we're sure it won't break anything. - WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( - old_remote_description, content.name, mutable_remote_description()); - } - } - } - - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } - - // Set the the ICE connection state to connecting since the connection may - // become writable with peer reflexive candidates before any remote candidate - // is signaled. - // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix - // is to have a new signal the indicates a change in checking state from the - // transport and expose a new checking() member from transport that can be - // read to determine the current checking state. The existing SignalConnecting - // actually means "gathering candidates", so cannot be be used here. - if (remote_description()->GetType() != SdpType::kOffer && - remote_description()->number_of_mediasections() > 0u && - ice_connection_state() == PeerConnectionInterface::kIceConnectionNew) { - SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); - } - - // If setting the description decided our SSL role, allocate any necessary - // SCTP sids. - rtc::SSLRole role; - if (DataChannel::IsSctpLike(data_channel_type()) && GetSctpSslRole(&role)) { - data_channel_controller_.AllocateSctpSids(role); - } - - if (IsUnifiedPlan()) { - std::vector> - now_receiving_transceivers; - std::vector> remove_list; - std::vector> added_streams; - std::vector> removed_streams; - for (const auto& transceiver : transceivers_) { - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, remote_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - RtpTransceiverDirection local_direction = - RtpTransceiverDirectionReversed(media_desc->direction()); - // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the - // RTCSessionDescription: Set the associated remote streams given - // transceiver.[[Receiver]], msids, addList, and removeList". - // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription - if (RtpTransceiverDirectionHasRecv(local_direction)) { - std::vector stream_ids; - if (!media_desc->streams().empty()) { - // The remote description has signaled the stream IDs. - stream_ids = media_desc->streams()[0].stream_ids(); - } - transceiver_stable_states_by_transceivers_[transceiver] - .SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); - - RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name - << " (" << GetStreamIdsString(stream_ids) << ")."; - SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), - stream_ids, &added_streams, - &removed_streams); - // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 - // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, - // and transceiver's current direction is neither sendrecv nor recvonly, - // process the addition of a remote track for the media description. - if (!transceiver->fired_direction() || - !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { - RTC_LOG(LS_INFO) - << "Processing the addition of a remote track for MID=" - << content->name << "."; - now_receiving_transceivers.push_back(transceiver); - } - } - // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's - // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the - // removal of a remote track for the media description, given transceiver, - // removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(local_direction) && - (transceiver->fired_direction() && - RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver, &remove_list, - &removed_streams); - } - // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. - transceiver->internal()->set_fired_direction(local_direction); - // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run - // the following steps: - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to - // direction. - transceiver->internal()->set_current_direction(local_direction); - // 2.2.8.1.11.[3-6]: Set the transport internal slots. - if (transceiver->mid()) { - auto dtls_transport = - LookupDtlsTransportByMidInternal(*transceiver->mid()); - transceiver->internal()->sender_internal()->set_transport( - dtls_transport); - transceiver->internal()->receiver_internal()->set_transport( - dtls_transport); - } - } - // 2.2.8.1.12: If the media description is rejected, and transceiver is - // not already stopped, stop the RTCRtpTransceiver transceiver. - if (content->rejected && !transceiver->stopped()) { - RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name - << " since the media section was rejected."; - transceiver->Stop(); - } - if (!content->rejected && - RtpTransceiverDirectionHasRecv(local_direction)) { - if (!media_desc->streams().empty() && - media_desc->streams()[0].has_ssrcs()) { - uint32_t ssrc = media_desc->streams()[0].first_ssrc(); - transceiver->internal()->receiver_internal()->SetupMediaChannel(ssrc); - } else { - transceiver->internal() - ->receiver_internal() - ->SetupUnsignaledMediaChannel(); - } - } - } - // Once all processing has finished, fire off callbacks. - auto observer = Observer(); - for (const auto& transceiver : now_receiving_transceivers) { - stats_->AddTrack(transceiver->receiver()->track()); - observer->OnTrack(transceiver); - observer->OnAddTrack(transceiver->receiver(), - transceiver->receiver()->streams()); - } - for (const auto& stream : added_streams) { - observer->OnAddStream(stream); - } - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); - } - } - - const cricket::ContentInfo* audio_content = - GetFirstAudioContent(remote_description()->description()); - const cricket::ContentInfo* video_content = - GetFirstVideoContent(remote_description()->description()); - const cricket::AudioContentDescription* audio_desc = - GetFirstAudioContentDescription(remote_description()->description()); - const cricket::VideoContentDescription* video_desc = - GetFirstVideoContentDescription(remote_description()->description()); - const cricket::RtpDataContentDescription* rtp_data_desc = - GetFirstRtpDataContentDescription(remote_description()->description()); - - // Check if the descriptions include streams, just in case the peer supports - // MSID, but doesn't indicate so with "a=msid-semantic". - if (remote_description()->description()->msid_supported() || - (audio_desc && !audio_desc->streams().empty()) || - (video_desc && !video_desc->streams().empty())) { - remote_peer_supports_msid_ = true; - } - - // We wait to signal new streams until we finish processing the description, - // since only at that point will new streams have all their tracks. - rtc::scoped_refptr new_streams(StreamCollection::Create()); - - if (!IsUnifiedPlan()) { - // TODO(steveanton): When removing RTP senders/receivers in response to a - // rejected media section, there is some cleanup logic that expects the - // voice/ video channel to still be set. But in this method the voice/video - // channel would have been destroyed by the SetRemoteDescription caller - // above so the cleanup that relies on them fails to run. The RemoveSenders - // calls should be moved to right before the DestroyChannel calls to fix - // this. - - // Find all audio rtp streams and create corresponding remote AudioTracks - // and MediaStreams. - if (audio_content) { - if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); - } else { - bool default_audio_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(audio_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(audio_desc), - default_audio_track_needed, audio_desc->type(), - new_streams); - } - } - - // Find all video rtp streams and create corresponding remote VideoTracks - // and MediaStreams. - if (video_content) { - if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); - } else { - bool default_video_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(video_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(video_desc), - default_video_track_needed, video_desc->type(), - new_streams); - } - } - - // If this is an RTP data transport, update the DataChannels with the - // information from the remote peer. - if (rtp_data_desc) { - data_channel_controller_.UpdateRemoteRtpDataChannels( - GetActiveStreams(rtp_data_desc)); - } - - // Iterate new_streams and notify the observer about new MediaStreams. - auto observer = Observer(); - for (size_t i = 0; i < new_streams->count(); ++i) { - MediaStreamInterface* new_stream = new_streams->at(i); - stats_->AddStream(new_stream); - observer->OnAddStream( - rtc::scoped_refptr(new_stream)); - } - - UpdateEndedRemoteMediaStreams(); - } - - if (type == SdpType::kAnswer && - local_ice_credentials_to_replace_->SatisfiesIceRestart( - *current_local_description_)) { - local_ice_credentials_to_replace_->ClearIceCredentials(); - } - - return RTCError::OK(); -} - -void PeerConnection::SetAssociatedRemoteStreams( - rtc::scoped_refptr receiver, - const std::vector& stream_ids, - std::vector>* added_streams, - std::vector>* removed_streams) { - std::vector> media_streams; - for (const std::string& stream_id : stream_ids) { - rtc::scoped_refptr stream = - remote_streams_->find(stream_id); - if (!stream) { - stream = MediaStreamProxy::Create(rtc::Thread::Current(), - MediaStream::Create(stream_id)); - remote_streams_->AddStream(stream); - added_streams->push_back(stream); - } - media_streams.push_back(stream); - } - // Special case: "a=msid" missing, use random stream ID. - if (media_streams.empty() && - !(remote_description()->description()->msid_signaling() & - cricket::kMsidSignalingMediaSection)) { - if (!missing_msid_default_stream_) { - missing_msid_default_stream_ = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(rtc::CreateRandomUuid())); - added_streams->push_back(missing_msid_default_stream_); - } - media_streams.push_back(missing_msid_default_stream_); - } - std::vector> previous_streams = - receiver->streams(); - // SetStreams() will add/remove the receiver's track to/from the streams. This - // differs from the spec - the spec uses an "addList" and "removeList" to - // update the stream-track relationships in a later step. We do this earlier, - // changing the order of things, but the end-result is the same. - // TODO(hbos): When we remove remote_streams(), use set_stream_ids() - // instead. https://crbug.com/webrtc/9480 - receiver->SetStreams(media_streams); - RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); -} - -void PeerConnection::ProcessRemovalOfRemoteTrack( - rtc::scoped_refptr> - transceiver, - std::vector>* remove_list, - std::vector>* removed_streams) { - RTC_DCHECK(transceiver->mid()); - RTC_LOG(LS_INFO) << "Processing the removal of a track for MID=" - << *transceiver->mid(); - std::vector> previous_streams = - transceiver->internal()->receiver_internal()->streams(); - // This will remove the remote track from the streams. - transceiver->internal()->receiver_internal()->set_stream_ids({}); - remove_list->push_back(transceiver); - RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); -} - -void PeerConnection::RemoveRemoteStreamsIfEmpty( - const std::vector>& remote_streams, - std::vector>* removed_streams) { - // TODO(https://crbug.com/webrtc/9480): When we use stream IDs instead of - // streams, see if the stream was removed by checking if this was the last - // receiver with that stream ID. - for (const auto& remote_stream : remote_streams) { - if (remote_stream->GetAudioTracks().empty() && - remote_stream->GetVideoTracks().empty()) { - remote_streams_->RemoveStream(remote_stream); - removed_streams->push_back(remote_stream); - } - } -} - -RTCError PeerConnection::UpdateTransceiversAndDataChannels( - cricket::ContentSource source, - const SessionDescriptionInterface& new_session, - const SessionDescriptionInterface* old_local_description, - const SessionDescriptionInterface* old_remote_description) { - RTC_DCHECK(IsUnifiedPlan()); - - const cricket::ContentGroup* bundle_group = nullptr; - if (new_session.GetType() == SdpType::kOffer) { - auto bundle_group_or_error = - GetEarlyBundleGroup(*new_session.description()); - if (!bundle_group_or_error.ok()) { - return bundle_group_or_error.MoveError(); - } - bundle_group = bundle_group_or_error.MoveValue(); - } - - const ContentInfos& new_contents = new_session.description()->contents(); - for (size_t i = 0; i < new_contents.size(); ++i) { - const cricket::ContentInfo& new_content = new_contents[i]; - cricket::MediaType media_type = new_content.media_description()->type(); - mid_generator_.AddKnownId(new_content.name); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - const cricket::ContentInfo* old_local_content = nullptr; - if (old_local_description && - i < old_local_description->description()->contents().size()) { - old_local_content = - &old_local_description->description()->contents()[i]; - } - const cricket::ContentInfo* old_remote_content = nullptr; - if (old_remote_description && - i < old_remote_description->description()->contents().size()) { - old_remote_content = - &old_remote_description->description()->contents()[i]; - } - auto transceiver_or_error = - AssociateTransceiver(source, new_session.GetType(), i, new_content, - old_local_content, old_remote_content); - if (!transceiver_or_error.ok()) { - return transceiver_or_error.MoveError(); - } - auto transceiver = transceiver_or_error.MoveValue(); - RTCError error = - UpdateTransceiverChannel(transceiver, new_content, bundle_group); - if (!error.ok()) { - return error; - } - } else if (media_type == cricket::MEDIA_TYPE_DATA) { - if (GetDataMid() && new_content.name != *GetDataMid()) { - // Ignore all but the first data section. - RTC_LOG(LS_INFO) << "Ignoring data media section with MID=" - << new_content.name; - continue; - } - RTCError error = UpdateDataChannel(source, new_content, bundle_group); - if (!error.ok()) { - return error; - } - } else { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Unknown section type."); - } - } - - return RTCError::OK(); -} - -RTCError PeerConnection::UpdateTransceiverChannel( - rtc::scoped_refptr> - transceiver, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) { - RTC_DCHECK(IsUnifiedPlan()); - RTC_DCHECK(transceiver); - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (content.rejected) { - if (channel) { - transceiver->internal()->SetChannel(nullptr); - DestroyChannelInterface(channel); - } - } else { - if (!channel) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - channel = CreateVoiceChannel(content.name); - } else { - RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type()); - channel = CreateVideoChannel(content.name); - } - if (!channel) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INTERNAL_ERROR, - "Failed to create channel for mid=" + content.name); - } - transceiver->internal()->SetChannel(channel); - } - } - return RTCError::OK(); -} - -RTCError PeerConnection::UpdateDataChannel( - cricket::ContentSource source, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) { - if (data_channel_type() == cricket::DCT_NONE) { - // If data channels are disabled, ignore this media section. CreateAnswer - // will take care of rejecting it. - return RTCError::OK(); - } - if (content.rejected) { - RTC_LOG(LS_INFO) << "Rejected data channel, mid=" << content.mid(); - DestroyDataChannelTransport(); - } else { - if (!data_channel_controller_.rtp_data_channel() && - !data_channel_controller_.data_channel_transport()) { - RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); - if (!CreateDataChannel(content.name)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); - } - } - if (source == cricket::CS_REMOTE) { - const MediaContentDescription* data_desc = content.media_description(); - if (data_desc && cricket::IsRtpProtocol(data_desc->protocol())) { - data_channel_controller_.UpdateRemoteRtpDataChannels( - GetActiveStreams(data_desc)); - } - } - } - return RTCError::OK(); + return sdp_handler_->RemoveIceCandidates(candidates); } -// This method will extract any send encodings that were sent by the remote -// connection. This is currently only relevant for Simulcast scenario (where -// the number of layers may be communicated by the server). -static std::vector GetSendEncodingsFromRemoteDescription( - const MediaContentDescription& desc) { - if (!desc.HasSimulcast()) { - return {}; - } - std::vector result; - const SimulcastDescription& simulcast = desc.simulcast_description(); - - // This is a remote description, the parameters we are after should appear - // as receive streams. - for (const auto& alternatives : simulcast.receive_layers()) { - RTC_DCHECK(!alternatives.empty()); - // There is currently no way to specify or choose from alternatives. - // We will always use the first alternative, which is the most preferred. - const SimulcastLayer& layer = alternatives[0]; - RtpEncodingParameters parameters; - parameters.rid = layer.rid; - parameters.active = !layer.is_paused; - result.push_back(parameters); - } - - return result; -} - -static RTCError UpdateSimulcastLayerStatusInSender( - const std::vector& layers, - rtc::scoped_refptr sender) { - RTC_DCHECK(sender); - RtpParameters parameters = sender->GetParametersInternal(); - std::vector disabled_layers; - - // The simulcast envelope cannot be changed, only the status of the streams. - // So we will iterate over the send encodings rather than the layers. - for (RtpEncodingParameters& encoding : parameters.encodings) { - auto iter = std::find_if(layers.begin(), layers.end(), - [&encoding](const SimulcastLayer& layer) { - return layer.rid == encoding.rid; - }); - // A layer that cannot be found may have been removed by the remote party. - if (iter == layers.end()) { - disabled_layers.push_back(encoding.rid); - continue; - } - - encoding.active = !iter->is_paused; - } - - RTCError result = sender->SetParametersInternal(parameters); - if (result.ok()) { - result = sender->DisableEncodingLayers(disabled_layers); - } - - return result; -} - -static bool SimulcastIsRejected( - const ContentInfo* local_content, - const MediaContentDescription& answer_media_desc) { - bool simulcast_offered = local_content && - local_content->media_description() && - local_content->media_description()->HasSimulcast(); - bool simulcast_answered = answer_media_desc.HasSimulcast(); - bool rids_supported = RtpExtension::FindHeaderExtensionByUri( - answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri); - return simulcast_offered && (!simulcast_answered || !rids_supported); -} - -static RTCError DisableSimulcastInSender( - rtc::scoped_refptr sender) { - RTC_DCHECK(sender); - RtpParameters parameters = sender->GetParametersInternal(); - if (parameters.encodings.size() <= 1) { - return RTCError::OK(); - } - - std::vector disabled_layers; - std::transform( - parameters.encodings.begin() + 1, parameters.encodings.end(), - std::back_inserter(disabled_layers), - [](const RtpEncodingParameters& encoding) { return encoding.rid; }); - return sender->DisableEncodingLayers(disabled_layers); -} - -RTCErrorOr>> -PeerConnection::AssociateTransceiver(cricket::ContentSource source, - SdpType type, - size_t mline_index, - const ContentInfo& content, - const ContentInfo* old_local_content, - const ContentInfo* old_remote_content) { - RTC_DCHECK(IsUnifiedPlan()); - // If this is an offer then the m= section might be recycled. If the m= - // section is being recycled (defined as: rejected in the current local or - // remote description and not rejected in new description), dissociate the - // currently associated RtpTransceiver by setting its mid property to null, - // and discard the mapping between the transceiver and its m= section index. - if (IsMediaSectionBeingRecycled(type, content, old_local_content, - old_remote_content)) { - // We want to dissociate the transceiver that has the rejected mid. - const std::string& old_mid = - (old_local_content && old_local_content->rejected) - ? old_local_content->name - : old_remote_content->name; - auto old_transceiver = GetAssociatedTransceiver(old_mid); - if (old_transceiver) { - RTC_LOG(LS_INFO) << "Dissociating transceiver for MID=" << old_mid - << " since the media section is being recycled."; - old_transceiver->internal()->set_mid(absl::nullopt); - old_transceiver->internal()->set_mline_index(absl::nullopt); - } - } - const MediaContentDescription* media_desc = content.media_description(); - auto transceiver = GetAssociatedTransceiver(content.name); - if (source == cricket::CS_LOCAL) { - // Find the RtpTransceiver that corresponds to this m= section, using the - // mapping between transceivers and m= section indices established when - // creating the offer. - if (!transceiver) { - transceiver = GetTransceiverByMLineIndex(mline_index); - } - if (!transceiver) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Unknown transceiver"); - } - } else { - RTC_DCHECK_EQ(source, cricket::CS_REMOTE); - // If the m= section is sendrecv or recvonly, and there are RtpTransceivers - // of the same type... - // When simulcast is requested, a transceiver cannot be associated because - // AddTrack cannot be called to initialize it. - if (!transceiver && - RtpTransceiverDirectionHasRecv(media_desc->direction()) && - !media_desc->HasSimulcast()) { - transceiver = FindAvailableTransceiverToReceive(media_desc->type()); - } - // If no RtpTransceiver was found in the previous step, create one with a - // recvonly direction. - if (!transceiver) { - RTC_LOG(LS_INFO) << "Adding " - << cricket::MediaTypeToString(media_desc->type()) - << " transceiver for MID=" << content.name - << " at i=" << mline_index - << " in response to the remote description."; - std::string sender_id = rtc::CreateRandomUuid(); - std::vector send_encodings = - GetSendEncodingsFromRemoteDescription(*media_desc); - auto sender = CreateSender(media_desc->type(), sender_id, nullptr, {}, - send_encodings); - std::string receiver_id; - if (!media_desc->streams().empty()) { - receiver_id = media_desc->streams()[0].id; - } else { - receiver_id = rtc::CreateRandomUuid(); - } - auto receiver = CreateReceiver(media_desc->type(), receiver_id); - transceiver = CreateAndAddTransceiver(sender, receiver); - transceiver->internal()->set_direction( - RtpTransceiverDirection::kRecvOnly); - if (type == SdpType::kOffer) { - transceiver_stable_states_by_transceivers_[transceiver] - .set_newly_created(); - } - } - // Check if the offer indicated simulcast but the answer rejected it. - // This can happen when simulcast is not supported on the remote party. - if (SimulcastIsRejected(old_local_content, *media_desc)) { - RTC_HISTOGRAM_BOOLEAN(kSimulcastDisabled, true); - RTCError error = - DisableSimulcastInSender(transceiver->internal()->sender_internal()); - if (!error.ok()) { - RTC_LOG(LS_ERROR) << "Failed to remove rejected simulcast."; - return std::move(error); - } - } - } - RTC_DCHECK(transceiver); - if (transceiver->media_type() != media_desc->type()) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Transceiver type does not match media description type."); - } - if (media_desc->HasSimulcast()) { - std::vector layers = - source == cricket::CS_LOCAL - ? media_desc->simulcast_description().send_layers().GetAllLayers() - : media_desc->simulcast_description() - .receive_layers() - .GetAllLayers(); - RTCError error = UpdateSimulcastLayerStatusInSender( - layers, transceiver->internal()->sender_internal()); - if (!error.ok()) { - RTC_LOG(LS_ERROR) << "Failed updating status for simulcast layers."; - return std::move(error); - } - } - if (type == SdpType::kOffer) { - bool state_changes = transceiver->internal()->mid() != content.name || - transceiver->internal()->mline_index() != mline_index; - if (state_changes) { - transceiver_stable_states_by_transceivers_[transceiver] - .SetMSectionIfUnset(transceiver->internal()->mid(), - transceiver->internal()->mline_index()); - } - } - // Associate the found or created RtpTransceiver with the m= section by - // setting the value of the RtpTransceiver's mid property to the MID of the m= - // section, and establish a mapping between the transceiver and the index of - // the m= section. - transceiver->internal()->set_mid(content.name); - transceiver->internal()->set_mline_index(mline_index); - return std::move(transceiver); -} - -rtc::scoped_refptr> -PeerConnection::GetAssociatedTransceiver(const std::string& mid) const { - RTC_DCHECK(IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->mid() == mid) { - return transceiver; - } - } - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::GetTransceiverByMLineIndex(size_t mline_index) const { - RTC_DCHECK(IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->internal()->mline_index() == mline_index) { - return transceiver; - } - } - return nullptr; -} - -rtc::scoped_refptr> -PeerConnection::FindAvailableTransceiverToReceive( - cricket::MediaType media_type) const { - RTC_DCHECK(IsUnifiedPlan()); - // From JSEP section 5.10 (Applying a Remote Description): - // If the m= section is sendrecv or recvonly, and there are RtpTransceivers of - // the same type that were added to the PeerConnection by addTrack and are not - // associated with any m= section and are not stopped, find the first such - // RtpTransceiver. - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == media_type && - transceiver->internal()->created_by_addtrack() && !transceiver->mid() && - !transceiver->stopped()) { - return transceiver; - } - } - return nullptr; -} - -const cricket::ContentInfo* PeerConnection::FindMediaSectionForTransceiver( - rtc::scoped_refptr> - transceiver, - const SessionDescriptionInterface* sdesc) const { - RTC_DCHECK(transceiver); - RTC_DCHECK(sdesc); - if (IsUnifiedPlan()) { - if (!transceiver->internal()->mid()) { - // This transceiver is not associated with a media section yet. - return nullptr; - } - return sdesc->description()->GetContentByName( - *transceiver->internal()->mid()); - } else { - // Plan B only allows at most one audio and one video section, so use the - // first media section of that type. - return cricket::GetFirstMediaContent(sdesc->description()->contents(), - transceiver->media_type()); - } -} - -PeerConnectionInterface::RTCConfiguration PeerConnection::GetConfiguration() { - RTC_DCHECK_RUN_ON(signaling_thread()); - return configuration_; -} - -RTCError PeerConnection::SetConfiguration( - const RTCConfiguration& configuration) { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::SetConfiguration"); - if (IsClosed()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, - "SetConfiguration: PeerConnection is closed."); - } - - // According to JSEP, after setLocalDescription, changing the candidate pool - // size is not allowed, and changing the set of ICE servers will not result - // in new candidates being gathered. - if (local_description() && configuration.ice_candidate_pool_size != - configuration_.ice_candidate_pool_size) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Can't change candidate pool size after calling " - "SetLocalDescription."); - } - - if (local_description() && - configuration.crypto_options != configuration_.crypto_options) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Can't change crypto_options after calling " - "SetLocalDescription."); - } - - if (local_description() && configuration.use_datagram_transport != - configuration_.use_datagram_transport) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Can't change use_datagram_transport " - "after calling SetLocalDescription."); - } - - if (remote_description() && configuration.use_datagram_transport != - configuration_.use_datagram_transport) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Can't change use_datagram_transport " - "after calling SetRemoteDescription."); - } - - if (local_description() && - configuration.use_datagram_transport_for_data_channels != - configuration_.use_datagram_transport_for_data_channels) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_MODIFICATION, - "Can't change use_datagram_transport_for_data_channels " - "after calling SetLocalDescription."); - } - - if (remote_description() && - configuration.use_datagram_transport_for_data_channels != - configuration_.use_datagram_transport_for_data_channels) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_MODIFICATION, - "Can't change use_datagram_transport_for_data_channels " - "after calling SetRemoteDescription."); - } - - if (local_description() && - configuration.use_datagram_transport_for_data_channels_receive_only != - configuration_ - .use_datagram_transport_for_data_channels_receive_only) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_MODIFICATION, - "Can't change use_datagram_transport_for_data_channels_receive_only " - "after calling SetLocalDescription."); - } - - if (remote_description() && - configuration.use_datagram_transport_for_data_channels_receive_only != - configuration_ - .use_datagram_transport_for_data_channels_receive_only) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_MODIFICATION, - "Can't change use_datagram_transport_for_data_channels_receive_only " - "after calling SetRemoteDescription."); - } - - if ((configuration.use_datagram_transport && - *configuration.use_datagram_transport) || - (configuration.use_datagram_transport_for_data_channels && - *configuration.use_datagram_transport_for_data_channels)) { - RTC_CHECK(configuration.bundle_policy == kBundlePolicyMaxBundle) - << "Media transport requires MaxBundle policy."; - } - - // The simplest (and most future-compatible) way to tell if the config was - // modified in an invalid way is to copy each property we do support - // modifying, then use operator==. There are far more properties we don't - // support modifying than those we do, and more could be added. - RTCConfiguration modified_config = configuration_; - modified_config.servers = configuration.servers; - modified_config.type = configuration.type; - modified_config.ice_candidate_pool_size = - configuration.ice_candidate_pool_size; - modified_config.prune_turn_ports = configuration.prune_turn_ports; - modified_config.turn_port_prune_policy = configuration.turn_port_prune_policy; - modified_config.surface_ice_candidates_on_ice_transport_type_changed = - configuration.surface_ice_candidates_on_ice_transport_type_changed; - modified_config.ice_check_min_interval = configuration.ice_check_min_interval; - modified_config.ice_check_interval_strong_connectivity = - configuration.ice_check_interval_strong_connectivity; - modified_config.ice_check_interval_weak_connectivity = - configuration.ice_check_interval_weak_connectivity; - modified_config.ice_unwritable_timeout = configuration.ice_unwritable_timeout; - modified_config.ice_unwritable_min_checks = - configuration.ice_unwritable_min_checks; - modified_config.ice_inactive_timeout = configuration.ice_inactive_timeout; - modified_config.stun_candidate_keepalive_interval = - configuration.stun_candidate_keepalive_interval; - modified_config.turn_customizer = configuration.turn_customizer; - modified_config.network_preference = configuration.network_preference; - modified_config.active_reset_srtp_params = - configuration.active_reset_srtp_params; - modified_config.use_datagram_transport = configuration.use_datagram_transport; - modified_config.use_datagram_transport_for_data_channels = - configuration.use_datagram_transport_for_data_channels; - modified_config.use_datagram_transport_for_data_channels_receive_only = - configuration.use_datagram_transport_for_data_channels_receive_only; - modified_config.turn_logging_id = configuration.turn_logging_id; - modified_config.allow_codec_switching = configuration.allow_codec_switching; - if (configuration != modified_config) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Modifying the configuration in an unsupported way."); - } - - // Validate the modified configuration. - RTCError validate_error = ValidateConfiguration(modified_config); - if (!validate_error.ok()) { - return validate_error; - } - - // Note that this isn't possible through chromium, since it's an unsigned - // short in WebIDL. - if (configuration.ice_candidate_pool_size < 0 || - configuration.ice_candidate_pool_size > static_cast(UINT16_MAX)) { - return RTCError(RTCErrorType::INVALID_RANGE); - } - - // Parse ICE servers before hopping to network thread. - cricket::ServerAddresses stun_servers; - std::vector turn_servers; - RTCErrorType parse_error = - ParseIceServers(configuration.servers, &stun_servers, &turn_servers); - if (parse_error != RTCErrorType::NONE) { - return RTCError(parse_error); - } - // Add the turn logging id to all turn servers - for (cricket::RelayServerConfig& turn_server : turn_servers) { - turn_server.turn_logging_id = configuration.turn_logging_id; - } - - // Note if STUN or TURN servers were supplied. - if (!stun_servers.empty()) { - NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); - } - if (!turn_servers.empty()) { - NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); - } - - // In theory this shouldn't fail. - if (!network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::ReconfigurePortAllocator_n, this, - stun_servers, turn_servers, modified_config.type, - modified_config.ice_candidate_pool_size, - modified_config.GetTurnPortPrunePolicy(), - modified_config.turn_customizer, - modified_config.stun_candidate_keepalive_interval, - static_cast(local_description())))) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to apply configuration to PortAllocator."); - } - - // As described in JSEP, calling setConfiguration with new ICE servers or - // candidate policy must set a "needs-ice-restart" bit so that the next offer - // triggers an ICE restart which will pick up the changes. - if (modified_config.servers != configuration_.servers || - modified_config.type != configuration_.type || - modified_config.GetTurnPortPrunePolicy() != - configuration_.GetTurnPortPrunePolicy()) { - transport_controller_->SetNeedsIceRestartFlag(); - } - - transport_controller_->SetIceConfig(ParseIceConfig(modified_config)); - - use_datagram_transport_ = datagram_transport_config_.enabled && - modified_config.use_datagram_transport.value_or( - datagram_transport_config_.default_value); - use_datagram_transport_for_data_channels_ = - datagram_transport_data_channel_config_.enabled && - modified_config.use_datagram_transport_for_data_channels.value_or( - datagram_transport_data_channel_config_.default_value); - use_datagram_transport_for_data_channels_receive_only_ = - modified_config.use_datagram_transport_for_data_channels_receive_only - .value_or(datagram_transport_data_channel_config_.receive_only); - transport_controller_->SetMediaTransportSettings( - use_datagram_transport_, use_datagram_transport_for_data_channels_, - use_datagram_transport_for_data_channels_receive_only_); - - if (configuration_.active_reset_srtp_params != - modified_config.active_reset_srtp_params) { - transport_controller_->SetActiveResetSrtpParams( - modified_config.active_reset_srtp_params); - } - - if (modified_config.allow_codec_switching.has_value()) { - cricket::VideoMediaChannel* video_channel = video_media_channel(); - if (video_channel) { - video_channel->SetVideoCodecSwitchingEnabled( - *modified_config.allow_codec_switching); - } - } - - configuration_ = modified_config; - return RTCError::OK(); -} - -bool PeerConnection::AddIceCandidate( - const IceCandidateInterface* ice_candidate) { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::AddIceCandidate"); - if (IsClosed()) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: PeerConnection is closed."; - NoteAddIceCandidateResult(kAddIceCandidateFailClosed); - return false; - } - - if (!remote_description()) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: ICE candidates can't be added " - "without any remote session description."; - NoteAddIceCandidateResult(kAddIceCandidateFailNoRemoteDescription); - return false; - } - - if (!ice_candidate) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate is null."; - NoteAddIceCandidateResult(kAddIceCandidateFailNullCandidate); - return false; - } - - bool valid = false; - bool ready = ReadyToUseRemoteCandidate(ice_candidate, nullptr, &valid); - if (!valid) { - NoteAddIceCandidateResult(kAddIceCandidateFailNotValid); - return false; - } - - // Add this candidate to the remote session description. - if (!mutable_remote_description()->AddCandidate(ice_candidate)) { - RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate cannot be used."; - NoteAddIceCandidateResult(kAddIceCandidateFailInAddition); - return false; - } - - if (ready) { - bool result = UseCandidate(ice_candidate); - if (result) { - NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED); - NoteAddIceCandidateResult(kAddIceCandidateSuccess); - } else { - NoteAddIceCandidateResult(kAddIceCandidateFailNotUsable); - } - return result; - } else { - RTC_LOG(LS_INFO) << "AddIceCandidate: Not ready to use candidate."; - NoteAddIceCandidateResult(kAddIceCandidateFailNotReady); - return true; - } -} - -void PeerConnection::AddIceCandidate( - std::unique_ptr candidate, - std::function callback) { - RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the chain, - // this operation will be queued to be invoked, otherwise the contents of the - // lambda will execute immediately. - operations_chain_->ChainOperation( - [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - candidate = std::move(candidate), callback = std::move(callback)]( - std::function operations_chain_callback) { - if (!this_weak_ptr) { - operations_chain_callback(); - callback(RTCError( - RTCErrorType::INVALID_STATE, - "AddIceCandidate failed because the session was shut down")); - return; - } - if (!this_weak_ptr->AddIceCandidate(candidate.get())) { - operations_chain_callback(); - // Fail with an error type and message consistent with Chromium. - // TODO(hbos): Fail with error types according to spec. - callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Error processing ICE candidate")); - return; - } - operations_chain_callback(); - callback(RTCError::OK()); - }); -} - -bool PeerConnection::RemoveIceCandidates( - const std::vector& candidates) { - TRACE_EVENT0("webrtc", "PeerConnection::RemoveIceCandidates"); - RTC_DCHECK_RUN_ON(signaling_thread()); - if (IsClosed()) { - RTC_LOG(LS_ERROR) << "RemoveIceCandidates: PeerConnection is closed."; - return false; - } - - if (!remote_description()) { - RTC_LOG(LS_ERROR) << "RemoveIceCandidates: ICE candidates can't be removed " - "without any remote session description."; - return false; - } - - if (candidates.empty()) { - RTC_LOG(LS_ERROR) << "RemoveIceCandidates: candidates are empty."; - return false; - } - - size_t number_removed = - mutable_remote_description()->RemoveCandidates(candidates); - if (number_removed != candidates.size()) { - RTC_LOG(LS_ERROR) - << "RemoveIceCandidates: Failed to remove candidates. Requested " - << candidates.size() << " but only " << number_removed - << " are removed."; - } - - // Remove the candidates from the transport controller. - RTCError error = transport_controller_->RemoveRemoteCandidates(candidates); - if (!error.ok()) { - RTC_LOG(LS_ERROR) - << "RemoveIceCandidates: Error when removing remote candidates: " - << error.message(); - } - return true; -} - -RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { - if (!worker_thread()->IsCurrent()) { - return worker_thread()->Invoke( - RTC_FROM_HERE, [&]() { return SetBitrate(bitrate); }); - } - RTC_DCHECK_RUN_ON(worker_thread()); - - const bool has_min = bitrate.min_bitrate_bps.has_value(); - const bool has_start = bitrate.start_bitrate_bps.has_value(); - const bool has_max = bitrate.max_bitrate_bps.has_value(); - if (has_min && *bitrate.min_bitrate_bps < 0) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "min_bitrate_bps <= 0"); - } - if (has_start) { - if (has_min && *bitrate.start_bitrate_bps < *bitrate.min_bitrate_bps) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "start_bitrate_bps < min_bitrate_bps"); - } else if (*bitrate.start_bitrate_bps < 0) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "curent_bitrate_bps < 0"); - } - } - if (has_max) { - if (has_start && *bitrate.max_bitrate_bps < *bitrate.start_bitrate_bps) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max_bitrate_bps < start_bitrate_bps"); - } else if (has_min && *bitrate.max_bitrate_bps < *bitrate.min_bitrate_bps) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max_bitrate_bps < min_bitrate_bps"); - } else if (*bitrate.max_bitrate_bps < 0) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max_bitrate_bps < 0"); - } - } - - RTC_DCHECK(call_.get()); - call_->SetClientBitratePreferences(bitrate); - - return RTCError::OK(); -} - -void PeerConnection::SetAudioPlayout(bool playout) { - if (!worker_thread()->IsCurrent()) { - worker_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetAudioPlayout, this, playout)); - return; - } - auto audio_state = - factory_->channel_manager()->media_engine()->voice().GetAudioState(); - audio_state->SetPlayout(playout); -} - -void PeerConnection::SetAudioRecording(bool recording) { - if (!worker_thread()->IsCurrent()) { - worker_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetAudioRecording, this, recording)); - return; - } - auto audio_state = - factory_->channel_manager()->media_engine()->voice().GetAudioState(); - audio_state->SetRecording(recording); -} - -std::unique_ptr -PeerConnection::GetRemoteAudioSSLCertificate() { - std::unique_ptr chain = GetRemoteAudioSSLCertChain(); - if (!chain || !chain->GetSize()) { - return nullptr; - } - return chain->Get(0).Clone(); -} - -std::unique_ptr -PeerConnection::GetRemoteAudioSSLCertChain() { - RTC_DCHECK_RUN_ON(signaling_thread()); - auto audio_transceiver = GetFirstAudioTransceiver(); - if (!audio_transceiver || !audio_transceiver->internal()->channel()) { - return nullptr; - } - return transport_controller_->GetRemoteSSLCertChain( - audio_transceiver->internal()->channel()->transport_name()); -} - -rtc::scoped_refptr> -PeerConnection::GetFirstAudioTransceiver() const { - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - return transceiver; - } - } - return nullptr; -} - -bool PeerConnection::StartRtcEventLog(std::unique_ptr output, - int64_t output_period_ms) { - return worker_thread()->Invoke( - RTC_FROM_HERE, - [this, output = std::move(output), output_period_ms]() mutable { - return StartRtcEventLog_w(std::move(output), output_period_ms); - }); -} - -bool PeerConnection::StartRtcEventLog( - std::unique_ptr output) { - int64_t output_period_ms = webrtc::RtcEventLog::kImmediateOutput; - if (field_trial::IsEnabled("WebRTC-RtcEventLogNewFormat")) { - output_period_ms = 5000; - } - return StartRtcEventLog(std::move(output), output_period_ms); -} - -void PeerConnection::StopRtcEventLog() { - worker_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&PeerConnection::StopRtcEventLog_w, this)); -} - -rtc::scoped_refptr -PeerConnection::LookupDtlsTransportByMid(const std::string& mid) { - RTC_DCHECK_RUN_ON(signaling_thread()); - return transport_controller_->LookupDtlsTransportByMid(mid); -} - -rtc::scoped_refptr -PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { - RTC_DCHECK_RUN_ON(signaling_thread()); - return transport_controller_->LookupDtlsTransportByMid(mid); -} - -rtc::scoped_refptr PeerConnection::GetSctpTransport() - const { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!sctp_mid_) { - return nullptr; - } - return transport_controller_->GetSctpTransport(*sctp_mid_); -} - -const SessionDescriptionInterface* PeerConnection::local_description() const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_local_description_ ? pending_local_description_.get() - : current_local_description_.get(); -} - -const SessionDescriptionInterface* PeerConnection::remote_description() const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_remote_description_ ? pending_remote_description_.get() - : current_remote_description_.get(); -} - -const SessionDescriptionInterface* PeerConnection::current_local_description() - const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return current_local_description_.get(); -} - -const SessionDescriptionInterface* PeerConnection::current_remote_description() - const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return current_remote_description_.get(); -} - -const SessionDescriptionInterface* PeerConnection::pending_local_description() - const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_local_description_.get(); -} - -const SessionDescriptionInterface* PeerConnection::pending_remote_description() - const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_remote_description_.get(); -} - -void PeerConnection::Close() { - RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::Close"); - // Update stats here so that we have the most recent stats for tracks and - // streams before the channels are closed. - stats_->UpdateStats(kStatsOutputLevelStandard); - - ChangeSignalingState(PeerConnectionInterface::kClosed); - NoteUsageEvent(UsageEvent::CLOSE_CALLED); - - for (const auto& transceiver : transceivers_) { - transceiver->Stop(); - } - - // Ensure that all asynchronous stats requests are completed before destroying - // the transport controller below. - if (stats_collector_) { - stats_collector_->WaitForPendingRequest(); - } - - // Don't destroy BaseChannels until after stats has been cleaned up so that - // the last stats request can still read from the channels. - DestroyAllChannels(); - - // The event log is used in the transport controller, which must be outlived - // by the former. CreateOffer by the peer connection is implemented - // asynchronously and if the peer connection is closed without resetting the - // WebRTC session description factory, the session description factory would - // call the transport controller. - webrtc_session_desc_factory_.reset(); - transport_controller_.reset(); - - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator_.get())); - - worker_thread()->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(worker_thread()); - call_.reset(); - // The event log must outlive call (and any other object that uses it). - event_log_.reset(); - }); - ReportUsagePattern(); - // The .h file says that observer can be discarded after close() returns. - // Make sure this is true. - observer_ = nullptr; -} - -void PeerConnection::OnMessage(rtc::Message* msg) { - RTC_DCHECK_RUN_ON(signaling_thread()); - switch (msg->message_id) { - case MSG_SET_SESSIONDESCRIPTION_SUCCESS: { - SetSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnSuccess(); - delete param; - break; - } - case MSG_SET_SESSIONDESCRIPTION_FAILED: { - SetSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_CREATE_SESSIONDESCRIPTION_FAILED: { - CreateSessionDescriptionMsg* param = - static_cast(msg->pdata); - param->observer->OnFailure(std::move(param->error)); - delete param; - break; - } - case MSG_GETSTATS: { - GetStatsMsg* param = static_cast(msg->pdata); - StatsReports reports; - stats_->GetStats(param->track, &reports); - param->observer->OnComplete(reports); - delete param; - break; - } - case MSG_REPORT_USAGE_PATTERN: { - ReportUsagePattern(); - break; - } - default: - RTC_NOTREACHED() << "Not implemented"; - break; - } -} - -cricket::VoiceMediaChannel* PeerConnection::voice_media_channel() const { - RTC_DCHECK(!IsUnifiedPlan()); - auto* voice_channel = static_cast( - GetAudioTransceiver()->internal()->channel()); - if (voice_channel) { - return voice_channel->media_channel(); - } else { - return nullptr; - } -} - -cricket::VideoMediaChannel* PeerConnection::video_media_channel() const { - RTC_DCHECK(!IsUnifiedPlan()); - auto* video_channel = static_cast( - GetVideoTransceiver()->internal()->channel()); - if (video_channel) { - return video_channel->media_channel(); - } else { - return nullptr; - } -} - -void PeerConnection::CreateAudioReceiver( - MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) { - std::vector> streams; - streams.push_back(rtc::scoped_refptr(stream)); - // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use - // the constructor taking stream IDs instead. - auto* audio_receiver = new AudioRtpReceiver( - worker_thread(), remote_sender_info.sender_id, streams); - audio_receiver->SetMediaChannel(voice_media_channel()); - if (remote_sender_info.sender_id == kDefaultAudioSenderId) { - audio_receiver->SetupUnsignaledMediaChannel(); - } else { - audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); - } - auto receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), audio_receiver); - GetAudioTransceiver()->internal()->AddReceiver(receiver); - Observer()->OnAddTrack(receiver, streams); - NoteUsageEvent(UsageEvent::AUDIO_ADDED); -} - -void PeerConnection::CreateVideoReceiver( - MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) { - std::vector> streams; - streams.push_back(rtc::scoped_refptr(stream)); - // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use - // the constructor taking stream IDs instead. - auto* video_receiver = new VideoRtpReceiver( - worker_thread(), remote_sender_info.sender_id, streams); - video_receiver->SetMediaChannel(video_media_channel()); - if (remote_sender_info.sender_id == kDefaultVideoSenderId) { - video_receiver->SetupUnsignaledMediaChannel(); - } else { - video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); - } - auto receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), video_receiver); - GetVideoTransceiver()->internal()->AddReceiver(receiver); - Observer()->OnAddTrack(receiver, streams); - NoteUsageEvent(UsageEvent::VIDEO_ADDED); -} - -// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote -// description. -rtc::scoped_refptr PeerConnection::RemoveAndStopReceiver( - const RtpSenderInfo& remote_sender_info) { - auto receiver = FindReceiverById(remote_sender_info.sender_id); - if (!receiver) { - RTC_LOG(LS_WARNING) << "RtpReceiver for track with id " - << remote_sender_info.sender_id << " doesn't exist."; - return nullptr; - } - if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - GetAudioTransceiver()->internal()->RemoveReceiver(receiver); - } else { - GetVideoTransceiver()->internal()->RemoveReceiver(receiver); - } - return receiver; -} - -void PeerConnection::AddAudioTrack(AudioTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - RTC_DCHECK(track); - RTC_DCHECK(stream); - auto sender = FindSenderForTrack(track); - if (sender) { - // We already have a sender for this track, so just change the stream_id - // so that it's correct in the next call to CreateOffer. - sender->internal()->set_stream_ids({stream->id()}); - return; - } - - // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), track, - {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(voice_media_channel()); - GetAudioTransceiver()->internal()->AddSender(new_sender); - // If the sender has already been configured in SDP, we call SetSsrc, - // which will connect the sender to the underlying transport. This can - // occur if a local session description that contains the ID of the sender - // is set before AddStream is called. It can also occur if the local - // session description is not changed and RemoveStream is called, and - // later AddStream is called again with the same stream. - const RtpSenderInfo* sender_info = - FindSenderInfo(local_audio_sender_infos_, stream->id(), track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } -} - -// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around -// indefinitely, when we have unified plan SDP. -void PeerConnection::RemoveAudioTrack(AudioTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - auto sender = FindSenderForTrack(track); - if (!sender) { - RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() - << " doesn't exist."; - return; - } - GetAudioTransceiver()->internal()->RemoveSender(sender); -} - -void PeerConnection::AddVideoTrack(VideoTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - RTC_DCHECK(track); - RTC_DCHECK(stream); - auto sender = FindSenderForTrack(track); - if (sender) { - // We already have a sender for this track, so just change the stream_id - // so that it's correct in the next call to CreateOffer. - sender->internal()->set_stream_ids({stream->id()}); - return; - } - - // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), track, - {stream->id()}, {}); - new_sender->internal()->SetMediaChannel(video_media_channel()); - GetVideoTransceiver()->internal()->AddSender(new_sender); - const RtpSenderInfo* sender_info = - FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); - if (sender_info) { - new_sender->internal()->SetSsrc(sender_info->first_ssrc); - } -} - -void PeerConnection::RemoveVideoTrack(VideoTrackInterface* track, - MediaStreamInterface* stream) { - RTC_DCHECK(!IsClosed()); - auto sender = FindSenderForTrack(track); - if (!sender) { - RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() - << " doesn't exist."; - return; - } - GetVideoTransceiver()->internal()->RemoveSender(sender); -} - -void PeerConnection::SetIceConnectionState(IceConnectionState new_state) { - if (ice_connection_state_ == new_state) { - return; - } - - // After transitioning to "closed", ignore any additional states from - // TransportController (such as "disconnected"). - if (IsClosed()) { - return; - } - - RTC_LOG(LS_INFO) << "Changing IceConnectionState " << ice_connection_state_ - << " => " << new_state; - RTC_DCHECK(ice_connection_state_ != - PeerConnectionInterface::kIceConnectionClosed); - - ice_connection_state_ = new_state; - Observer()->OnIceConnectionChange(ice_connection_state_); -} - -void PeerConnection::SetStandardizedIceConnectionState( - PeerConnectionInterface::IceConnectionState new_state) { - if (standardized_ice_connection_state_ == new_state) { - return; - } - - if (IsClosed()) { - return; - } - - RTC_LOG(LS_INFO) << "Changing standardized IceConnectionState " - << standardized_ice_connection_state_ << " => " << new_state; - - standardized_ice_connection_state_ = new_state; - Observer()->OnStandardizedIceConnectionChange(new_state); -} - -void PeerConnection::SetConnectionState( - PeerConnectionInterface::PeerConnectionState new_state) { - if (connection_state_ == new_state) - return; - if (IsClosed()) - return; - connection_state_ = new_state; - Observer()->OnConnectionChange(new_state); -} - -void PeerConnection::OnIceGatheringChange( - PeerConnectionInterface::IceGatheringState new_state) { - if (IsClosed()) { - return; - } - ice_gathering_state_ = new_state; - Observer()->OnIceGatheringChange(ice_gathering_state_); -} - -void PeerConnection::OnIceCandidate( - std::unique_ptr candidate) { - if (IsClosed()) { - return; - } - ReportIceCandidateCollected(candidate->candidate()); - Observer()->OnIceCandidate(candidate.get()); -} - -void PeerConnection::OnIceCandidateError(const std::string& address, - int port, - const std::string& url, - int error_code, - const std::string& error_text) { - if (IsClosed()) { - return; - } - Observer()->OnIceCandidateError(address, port, url, error_code, error_text); - // Leftover not to break wpt test during migration to the new API. - Observer()->OnIceCandidateError(address + ":", url, error_code, error_text); -} - -void PeerConnection::OnIceCandidatesRemoved( - const std::vector& candidates) { - if (IsClosed()) { - return; - } - Observer()->OnIceCandidatesRemoved(candidates); -} - -void PeerConnection::OnSelectedCandidatePairChanged( - const cricket::CandidatePairChangeEvent& event) { - if (IsClosed()) { - return; - } - - if (event.selected_candidate_pair.local_candidate().type() == - LOCAL_PORT_TYPE && - event.selected_candidate_pair.remote_candidate().type() == - LOCAL_PORT_TYPE) { - NoteUsageEvent(UsageEvent::DIRECT_CONNECTION_SELECTED); - } - - Observer()->OnIceSelectedCandidatePairChanged(event); -} - -void PeerConnection::ChangeSignalingState( - PeerConnectionInterface::SignalingState signaling_state) { - if (signaling_state_ == signaling_state) { - return; - } - RTC_LOG(LS_INFO) << "Session: " << session_id() << " Old state: " - << GetSignalingStateString(signaling_state_) - << " New state: " - << GetSignalingStateString(signaling_state); - signaling_state_ = signaling_state; - if (signaling_state == kClosed) { - ice_connection_state_ = kIceConnectionClosed; - Observer()->OnIceConnectionChange(ice_connection_state_); - standardized_ice_connection_state_ = - PeerConnectionInterface::IceConnectionState::kIceConnectionClosed; - connection_state_ = PeerConnectionInterface::PeerConnectionState::kClosed; - Observer()->OnConnectionChange(connection_state_); - if (ice_gathering_state_ != kIceGatheringComplete) { - ice_gathering_state_ = kIceGatheringComplete; - Observer()->OnIceGatheringChange(ice_gathering_state_); - } - } - Observer()->OnSignalingChange(signaling_state_); -} - -void PeerConnection::OnAudioTrackAdded(AudioTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - AddAudioTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::OnAudioTrackRemoved(AudioTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - RemoveAudioTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::OnVideoTrackAdded(VideoTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - AddVideoTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::OnVideoTrackRemoved(VideoTrackInterface* track, - MediaStreamInterface* stream) { - if (IsClosed()) { - return; - } - RemoveVideoTrack(track, stream); - UpdateNegotiationNeeded(); -} - -void PeerConnection::PostSetSessionDescriptionSuccess( - SetSessionDescriptionObserver* observer) { - SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg); -} - -void PeerConnection::PostSetSessionDescriptionFailure( - SetSessionDescriptionObserver* observer, - RTCError&& error) { - RTC_DCHECK(!error.ok()); - SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); - msg->error = std::move(error); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_SET_SESSIONDESCRIPTION_FAILED, msg); -} - -void PeerConnection::PostCreateSessionDescriptionFailure( - CreateSessionDescriptionObserver* observer, - RTCError error) { - RTC_DCHECK(!error.ok()); - CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer); - msg->error = std::move(error); - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg); -} - -void PeerConnection::GetOptionsForOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - ExtractSharedMediaSessionOptions(offer_answer_options, session_options); - - if (IsUnifiedPlan()) { - GetOptionsForUnifiedPlanOffer(offer_answer_options, session_options); - } else { - GetOptionsForPlanBOffer(offer_answer_options, session_options); - } - - // Intentionally unset the data channel type for RTP data channel with the - // second condition. Otherwise the RTP data channels would be successfully - // negotiated by default and the unit tests in WebRtcDataBrowserTest will fail - // when building with chromium. We want to leave RTP data channels broken, so - // people won't try to use them. - if (data_channel_controller_.HasRtpDataChannels() || - data_channel_type() != cricket::DCT_RTP) { - session_options->data_channel_type = data_channel_type(); +RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { + if (!worker_thread()->IsCurrent()) { + return worker_thread()->Invoke( + RTC_FROM_HERE, [&]() { return SetBitrate(bitrate); }); } + RTC_DCHECK_RUN_ON(worker_thread()); - // Apply ICE restart flag and renomination flag. - bool ice_restart = offer_answer_options.ice_restart || - local_ice_credentials_to_replace_->HasIceCredentials(); - for (auto& options : session_options->media_description_options) { - options.transport_options.ice_restart = ice_restart; - options.transport_options.enable_ice_renomination = - configuration_.enable_ice_renomination; + const bool has_min = bitrate.min_bitrate_bps.has_value(); + const bool has_start = bitrate.start_bitrate_bps.has_value(); + const bool has_max = bitrate.max_bitrate_bps.has_value(); + if (has_min && *bitrate.min_bitrate_bps < 0) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "min_bitrate_bps <= 0"); } - - session_options->rtcp_cname = rtcp_cname_; - session_options->crypto_options = GetCryptoOptions(); - session_options->pooled_ice_credentials = - network_thread()->Invoke>( - RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, - port_allocator_.get())); - session_options->offer_extmap_allow_mixed = - configuration_.offer_extmap_allow_mixed; - - // If datagram transport is in use, add opaque transport parameters. - if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) { - for (auto& options : session_options->media_description_options) { - absl::optional params = - transport_controller_->GetTransportParameters(options.mid); - if (!params) { - continue; - } - options.transport_options.opaque_parameters = params; - if ((use_datagram_transport_ && - (options.type == cricket::MEDIA_TYPE_AUDIO || - options.type == cricket::MEDIA_TYPE_VIDEO)) || - (use_datagram_transport_for_data_channels_ && - options.type == cricket::MEDIA_TYPE_DATA)) { - options.alt_protocol = params->protocol; - } + if (has_start) { + if (has_min && *bitrate.start_bitrate_bps < *bitrate.min_bitrate_bps) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "start_bitrate_bps < min_bitrate_bps"); + } else if (*bitrate.start_bitrate_bps < 0) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "curent_bitrate_bps < 0"); } } - - // Allow fallback for using obsolete SCTP syntax. - // Note that the default in |session_options| is true, while - // the default in |options| is false. - session_options->use_obsolete_sctp_sdp = - offer_answer_options.use_obsolete_sctp_sdp; -} - -void PeerConnection::GetOptionsForPlanBOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Figure out transceiver directional preferences. - bool send_audio = HasRtpSender(cricket::MEDIA_TYPE_AUDIO); - bool send_video = HasRtpSender(cricket::MEDIA_TYPE_VIDEO); - - // By default, generate sendrecv/recvonly m= sections. - bool recv_audio = true; - bool recv_video = true; - - // By default, only offer a new m= section if we have media to send with it. - bool offer_new_audio_description = send_audio; - bool offer_new_video_description = send_video; - bool offer_new_data_description = data_channel_controller_.HasDataChannels(); - - // The "offer_to_receive_X" options allow those defaults to be overridden. - if (offer_answer_options.offer_to_receive_audio != - RTCOfferAnswerOptions::kUndefined) { - recv_audio = (offer_answer_options.offer_to_receive_audio > 0); - offer_new_audio_description = - offer_new_audio_description || - (offer_answer_options.offer_to_receive_audio > 0); - } - if (offer_answer_options.offer_to_receive_video != - RTCOfferAnswerOptions::kUndefined) { - recv_video = (offer_answer_options.offer_to_receive_video > 0); - offer_new_video_description = - offer_new_video_description || - (offer_answer_options.offer_to_receive_video > 0); - } - - absl::optional audio_index; - absl::optional video_index; - absl::optional data_index; - // If a current description exists, generate m= sections in the same order, - // using the first audio/video/data section that appears and rejecting - // extraneous ones. - if (local_description()) { - GenerateMediaDescriptionOptions( - local_description(), - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), - &audio_index, &video_index, &data_index, session_options); - } - - // Add audio/video/data m= sections to the end if needed. - if (!audio_index && offer_new_audio_description) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), - false)); - audio_index = session_options->media_description_options.size() - 1; - } - if (!video_index && offer_new_video_description) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), - false)); - video_index = session_options->media_description_options.size() - 1; - } - if (!data_index && offer_new_data_description) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA)); - data_index = session_options->media_description_options.size() - 1; - } - - cricket::MediaDescriptionOptions* audio_media_description_options = - !audio_index ? nullptr - : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = - !video_index ? nullptr - : &session_options->media_description_options[*video_index]; - - AddPlanBRtpSenderOptions(GetSendersInternal(), - audio_media_description_options, - video_media_description_options, - offer_answer_options.num_simulcast_layers); -} - -static cricket::MediaDescriptionOptions -GetMediaDescriptionOptionsForTransceiver( - rtc::scoped_refptr> - transceiver, - const std::string& mid) { - cricket::MediaDescriptionOptions media_description_options( - transceiver->media_type(), mid, transceiver->direction(), - transceiver->stopped()); - media_description_options.codec_preferences = - transceiver->codec_preferences(); - // This behavior is specified in JSEP. The gist is that: - // 1. The MSID is included if the RtpTransceiver's direction is sendonly or - // sendrecv. - // 2. If the MSID is included, then it must be included in any subsequent - // offer/answer exactly the same until the RtpTransceiver is stopped. - if (transceiver->stopped() || - (!RtpTransceiverDirectionHasSend(transceiver->direction()) && - !transceiver->internal()->has_ever_been_used_to_send())) { - return media_description_options; - } - - cricket::SenderOptions sender_options; - sender_options.track_id = transceiver->sender()->id(); - sender_options.stream_ids = transceiver->sender()->stream_ids(); - - // The following sets up RIDs and Simulcast. - // RIDs are included if Simulcast is requested or if any RID was specified. - RtpParameters send_parameters = - transceiver->internal()->sender_internal()->GetParametersInternal(); - bool has_rids = std::any_of(send_parameters.encodings.begin(), - send_parameters.encodings.end(), - [](const RtpEncodingParameters& encoding) { - return !encoding.rid.empty(); - }); - - std::vector send_rids; - SimulcastLayerList send_layers; - for (const RtpEncodingParameters& encoding : send_parameters.encodings) { - if (encoding.rid.empty()) { - continue; + if (has_max) { + if (has_start && *bitrate.max_bitrate_bps < *bitrate.start_bitrate_bps) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max_bitrate_bps < start_bitrate_bps"); + } else if (has_min && *bitrate.max_bitrate_bps < *bitrate.min_bitrate_bps) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max_bitrate_bps < min_bitrate_bps"); + } else if (*bitrate.max_bitrate_bps < 0) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max_bitrate_bps < 0"); } - send_rids.push_back(RidDescription(encoding.rid, RidDirection::kSend)); - send_layers.AddLayer(SimulcastLayer(encoding.rid, !encoding.active)); - } - - if (has_rids) { - sender_options.rids = send_rids; } - sender_options.simulcast_layers = send_layers; - // When RIDs are configured, we must set num_sim_layers to 0 to. - // Otherwise, num_sim_layers must be 1 because either there is no - // simulcast, or simulcast is acheived by munging the SDP. - sender_options.num_sim_layers = has_rids ? 0 : 1; - media_description_options.sender_options.push_back(sender_options); + RTC_DCHECK(call_.get()); + call_->SetClientBitratePreferences(bitrate); - return media_description_options; + return RTCError::OK(); } -// Returns the ContentInfo at mline index |i|, or null if none exists. -static const ContentInfo* GetContentByIndex( - const SessionDescriptionInterface* sdesc, - size_t i) { - if (!sdesc) { - return nullptr; - } - const ContentInfos& contents = sdesc->description()->contents(); - return (i < contents.size() ? &contents[i] : nullptr); -} - -void PeerConnection::GetOptionsForUnifiedPlanOffer( - const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Rules for generating an offer are dictated by JSEP sections 5.2.1 (Initial - // Offers) and 5.2.2 (Subsequent Offers). - RTC_DCHECK_EQ(session_options->media_description_options.size(), 0); - const ContentInfos no_infos; - const ContentInfos& local_contents = - (local_description() ? local_description()->description()->contents() - : no_infos); - const ContentInfos& remote_contents = - (remote_description() ? remote_description()->description()->contents() - : no_infos); - // The mline indices that can be recycled. New transceivers should reuse these - // slots first. - std::queue recycleable_mline_indices; - // First, go through each media section that exists in either the local or - // remote description and generate a media section in this offer for the - // associated transceiver. If a media section can be recycled, generate a - // default, rejected media section here that can be later overwritten. - for (size_t i = 0; - i < std::max(local_contents.size(), remote_contents.size()); ++i) { - // Either |local_content| or |remote_content| is non-null. - const ContentInfo* local_content = - (i < local_contents.size() ? &local_contents[i] : nullptr); - const ContentInfo* current_local_content = - GetContentByIndex(current_local_description(), i); - const ContentInfo* remote_content = - (i < remote_contents.size() ? &remote_contents[i] : nullptr); - const ContentInfo* current_remote_content = - GetContentByIndex(current_remote_description(), i); - bool had_been_rejected = - (current_local_content && current_local_content->rejected) || - (current_remote_content && current_remote_content->rejected); - const std::string& mid = - (local_content ? local_content->name : remote_content->name); - cricket::MediaType media_type = - (local_content ? local_content->media_description()->type() - : remote_content->media_description()->type()); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - auto transceiver = GetAssociatedTransceiver(mid); - RTC_CHECK(transceiver); - // A media section is considered eligible for recycling if it is marked as - // rejected in either the current local or current remote description. - if (had_been_rejected && transceiver->stopped()) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(transceiver->media_type(), mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); - recycleable_mline_indices.push(i); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver(transceiver, mid)); - // CreateOffer shouldn't really cause any state changes in - // PeerConnection, but we need a way to match new transceivers to new - // media sections in SetLocalDescription and JSEP specifies this is done - // by recording the index of the media section generated for the - // transceiver in the offer. - transceiver->internal()->set_mline_index(i); - } - } else { - RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); - RTC_CHECK(GetDataMid()); - if (had_been_rejected || mid != *GetDataMid()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(mid)); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(mid)); - } - } - } - - // Next, look for transceivers that are newly added (that is, are not stopped - // and not associated). Reuse media sections marked as recyclable first, - // otherwise append to the end of the offer. New media sections should be - // added in the order they were added to the PeerConnection. - for (const auto& transceiver : transceivers_) { - if (transceiver->mid() || transceiver->stopped()) { - continue; - } - size_t mline_index; - if (!recycleable_mline_indices.empty()) { - mline_index = recycleable_mline_indices.front(); - recycleable_mline_indices.pop(); - session_options->media_description_options[mline_index] = - GetMediaDescriptionOptionsForTransceiver(transceiver, - mid_generator_()); - } else { - mline_index = session_options->media_description_options.size(); - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver(transceiver, - mid_generator_())); - } - // See comment above for why CreateOffer changes the transceiver's state. - transceiver->internal()->set_mline_index(mline_index); - } - // Lastly, add a m-section if we have local data channels and an m section - // does not already exist. - if (!GetDataMid() && data_channel_controller_.HasDataChannels()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(mid_generator_())); +void PeerConnection::SetAudioPlayout(bool playout) { + if (!worker_thread()->IsCurrent()) { + worker_thread()->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnection::SetAudioPlayout, this, playout)); + return; } + auto audio_state = + context_->channel_manager()->media_engine()->voice().GetAudioState(); + audio_state->SetPlayout(playout); } -void PeerConnection::GetOptionsForAnswer( - const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - ExtractSharedMediaSessionOptions(offer_answer_options, session_options); - - if (IsUnifiedPlan()) { - GetOptionsForUnifiedPlanAnswer(offer_answer_options, session_options); - } else { - GetOptionsForPlanBAnswer(offer_answer_options, session_options); +void PeerConnection::SetAudioRecording(bool recording) { + if (!worker_thread()->IsCurrent()) { + worker_thread()->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnection::SetAudioRecording, this, recording)); + return; } + auto audio_state = + context_->channel_manager()->media_engine()->voice().GetAudioState(); + audio_state->SetRecording(recording); +} - // Intentionally unset the data channel type for RTP data channel. Otherwise - // the RTP data channels would be successfully negotiated by default and the - // unit tests in WebRtcDataBrowserTest will fail when building with chromium. - // We want to leave RTP data channels broken, so people won't try to use them. - if (data_channel_controller_.HasRtpDataChannels() || - data_channel_type() != cricket::DCT_RTP) { - session_options->data_channel_type = data_channel_type(); +void PeerConnection::AddAdaptationResource( + rtc::scoped_refptr resource) { + if (!worker_thread()->IsCurrent()) { + return worker_thread()->Invoke(RTC_FROM_HERE, [this, resource]() { + return AddAdaptationResource(resource); + }); } - - // Apply ICE renomination flag. - for (auto& options : session_options->media_description_options) { - options.transport_options.enable_ice_renomination = - configuration_.enable_ice_renomination; + RTC_DCHECK_RUN_ON(worker_thread()); + if (!call_) { + // The PeerConnection has been closed. + return; } + call_->AddAdaptationResource(resource); +} - session_options->rtcp_cname = rtcp_cname_; - session_options->crypto_options = GetCryptoOptions(); - session_options->pooled_ice_credentials = - network_thread()->Invoke>( - RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, - port_allocator_.get())); - - // If datagram transport is in use, add opaque transport parameters. - if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) { - for (auto& options : session_options->media_description_options) { - absl::optional params = - transport_controller_->GetTransportParameters(options.mid); - if (!params) { - continue; - } - options.transport_options.opaque_parameters = params; - if ((use_datagram_transport_ && - (options.type == cricket::MEDIA_TYPE_AUDIO || - options.type == cricket::MEDIA_TYPE_VIDEO)) || - (use_datagram_transport_for_data_channels_ && - options.type == cricket::MEDIA_TYPE_DATA)) { - options.alt_protocol = params->protocol; - } - } - } +bool PeerConnection::StartRtcEventLog(std::unique_ptr output, + int64_t output_period_ms) { + return worker_thread()->Invoke( + RTC_FROM_HERE, + [this, output = std::move(output), output_period_ms]() mutable { + return StartRtcEventLog_w(std::move(output), output_period_ms); + }); } -void PeerConnection::GetOptionsForPlanBAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Figure out transceiver directional preferences. - bool send_audio = HasRtpSender(cricket::MEDIA_TYPE_AUDIO); - bool send_video = HasRtpSender(cricket::MEDIA_TYPE_VIDEO); - - // By default, generate sendrecv/recvonly m= sections. The direction is also - // restricted by the direction in the offer. - bool recv_audio = true; - bool recv_video = true; - - // The "offer_to_receive_X" options allow those defaults to be overridden. - if (offer_answer_options.offer_to_receive_audio != - RTCOfferAnswerOptions::kUndefined) { - recv_audio = (offer_answer_options.offer_to_receive_audio > 0); - } - if (offer_answer_options.offer_to_receive_video != - RTCOfferAnswerOptions::kUndefined) { - recv_video = (offer_answer_options.offer_to_receive_video > 0); - } - - absl::optional audio_index; - absl::optional video_index; - absl::optional data_index; - - // Generate m= sections that match those in the offer. - // Note that mediasession.cc will handle intersection our preferred - // direction with the offered direction. - GenerateMediaDescriptionOptions( - remote_description(), - RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), - RtpTransceiverDirectionFromSendRecv(send_video, recv_video), &audio_index, - &video_index, &data_index, session_options); - - cricket::MediaDescriptionOptions* audio_media_description_options = - !audio_index ? nullptr - : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = - !video_index ? nullptr - : &session_options->media_description_options[*video_index]; - - AddPlanBRtpSenderOptions(GetSendersInternal(), - audio_media_description_options, - video_media_description_options, - offer_answer_options.num_simulcast_layers); -} - -void PeerConnection::GetOptionsForUnifiedPlanAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { - // Rules for generating an answer are dictated by JSEP sections 5.3.1 (Initial - // Answers) and 5.3.2 (Subsequent Answers). - RTC_DCHECK(remote_description()); - RTC_DCHECK(remote_description()->GetType() == SdpType::kOffer); - for (const ContentInfo& content : - remote_description()->description()->contents()) { - cricket::MediaType media_type = content.media_description()->type(); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - auto transceiver = GetAssociatedTransceiver(content.name); - RTC_CHECK(transceiver); - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForTransceiver(transceiver, content.name)); - } else { - RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); - // Reject all data sections if data channels are disabled. - // Reject a data section if it has already been rejected. - // Reject all data sections except for the first one. - if (data_channel_type() == cricket::DCT_NONE || content.rejected || - content.name != *GetDataMid()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(content.name)); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(content.name)); - } - } +bool PeerConnection::StartRtcEventLog( + std::unique_ptr output) { + int64_t output_period_ms = webrtc::RtcEventLog::kImmediateOutput; + if (absl::StartsWith(context_->trials().Lookup("WebRTC-RtcEventLogNewFormat"), + "Enabled")) { + output_period_ms = 5000; } + return StartRtcEventLog(std::move(output), output_period_ms); } -void PeerConnection::GenerateMediaDescriptionOptions( - const SessionDescriptionInterface* session_desc, - RtpTransceiverDirection audio_direction, - RtpTransceiverDirection video_direction, - absl::optional* audio_index, - absl::optional* video_index, - absl::optional* data_index, - cricket::MediaSessionOptions* session_options) { - for (const cricket::ContentInfo& content : - session_desc->description()->contents()) { - if (IsAudioContent(&content)) { - // If we already have an audio m= section, reject this extra one. - if (*audio_index) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_AUDIO, content.name, - RtpTransceiverDirection::kInactive, /*stopped=*/true)); - } else { - bool stopped = (audio_direction == RtpTransceiverDirection::kInactive); - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_AUDIO, - content.name, audio_direction, - stopped)); - *audio_index = session_options->media_description_options.size() - 1; - } - } else if (IsVideoContent(&content)) { - // If we already have an video m= section, reject this extra one. - if (*video_index) { - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_VIDEO, content.name, - RtpTransceiverDirection::kInactive, /*stopped=*/true)); - } else { - bool stopped = (video_direction == RtpTransceiverDirection::kInactive); - session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_VIDEO, - content.name, video_direction, - stopped)); - *video_index = session_options->media_description_options.size() - 1; - } - } else { - RTC_DCHECK(IsDataContent(&content)); - // If we already have an data m= section, reject this extra one. - if (*data_index) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(content.name)); - } else { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(content.name)); - *data_index = session_options->media_description_options.size() - 1; - } - } - } +void PeerConnection::StopRtcEventLog() { + worker_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&PeerConnection::StopRtcEventLog_w, this)); } -cricket::MediaDescriptionOptions -PeerConnection::GetMediaDescriptionOptionsForActiveData( - const std::string& mid) const { - // Direction for data sections is meaningless, but legacy endpoints might - // expect sendrecv. - cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, - RtpTransceiverDirection::kSendRecv, - /*stopped=*/false); - AddRtpDataChannelOptions(*data_channel_controller_.rtp_data_channels(), - &options); - return options; +rtc::scoped_refptr +PeerConnection::LookupDtlsTransportByMid(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + return transport_controller_->LookupDtlsTransportByMid(mid); } -cricket::MediaDescriptionOptions -PeerConnection::GetMediaDescriptionOptionsForRejectedData( - const std::string& mid) const { - cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true); - AddRtpDataChannelOptions(*data_channel_controller_.rtp_data_channels(), - &options); - return options; +rtc::scoped_refptr +PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + return transport_controller_->LookupDtlsTransportByMid(mid); } -absl::optional PeerConnection::GetDataMid() const { - switch (data_channel_type()) { - case cricket::DCT_RTP: - if (!data_channel_controller_.rtp_data_channel()) { - return absl::nullopt; - } - return data_channel_controller_.rtp_data_channel()->content_name(); - case cricket::DCT_SCTP: - case cricket::DCT_DATA_CHANNEL_TRANSPORT: - case cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP: - return sctp_mid_; - default: - return absl::nullopt; +rtc::scoped_refptr PeerConnection::GetSctpTransport() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!sctp_mid_s_) { + return nullptr; } + return transport_controller_->GetSctpTransport(*sctp_mid_s_); } -void PeerConnection::RemoveSenders(cricket::MediaType media_type) { - UpdateLocalSenders(std::vector(), media_type); - UpdateRemoteSendersList(std::vector(), false, - media_type, nullptr); +const SessionDescriptionInterface* PeerConnection::local_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->local_description(); } -void PeerConnection::UpdateRemoteSendersList( - const cricket::StreamParamsVec& streams, - bool default_sender_needed, - cricket::MediaType media_type, - StreamCollection* new_streams) { - RTC_DCHECK(!IsUnifiedPlan()); - - std::vector* current_senders = - GetRemoteSenderInfos(media_type); - - // Find removed senders. I.e., senders where the sender id or ssrc don't match - // the new StreamParam. - for (auto sender_it = current_senders->begin(); - sender_it != current_senders->end(); - /* incremented manually */) { - const RtpSenderInfo& info = *sender_it; - const cricket::StreamParams* params = - cricket::GetStreamBySsrc(streams, info.first_ssrc); - std::string params_stream_id; - if (params) { - params_stream_id = - (!params->first_stream_id().empty() ? params->first_stream_id() - : kDefaultStreamId); - } - bool sender_exists = params && params->id == info.sender_id && - params_stream_id == info.stream_id; - // If this is a default track, and we still need it, don't remove it. - if ((info.stream_id == kDefaultStreamId && default_sender_needed) || - sender_exists) { - ++sender_it; - } else { - OnRemoteSenderRemoved(info, media_type); - sender_it = current_senders->erase(sender_it); - } - } +const SessionDescriptionInterface* PeerConnection::remote_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->remote_description(); +} - // Find new and active senders. - for (const cricket::StreamParams& params : streams) { - if (!params.has_ssrcs()) { - // The remote endpoint has streams, but didn't signal ssrcs. For an active - // sender, this means it is coming from a Unified Plan endpoint,so we just - // create a default. - default_sender_needed = true; - break; - } +const SessionDescriptionInterface* PeerConnection::current_local_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->current_local_description(); +} - // |params.id| is the sender id and the stream id uses the first of - // |params.stream_ids|. The remote description could come from a Unified - // Plan endpoint, with multiple or no stream_ids() signaled. Since this is - // not supported in Plan B, we just take the first here and create the - // default stream ID if none is specified. - const std::string& stream_id = - (!params.first_stream_id().empty() ? params.first_stream_id() - : kDefaultStreamId); - const std::string& sender_id = params.id; - uint32_t ssrc = params.first_ssrc(); - - rtc::scoped_refptr stream = - remote_streams_->find(stream_id); - if (!stream) { - // This is a new MediaStream. Create a new remote MediaStream. - stream = MediaStreamProxy::Create(rtc::Thread::Current(), - MediaStream::Create(stream_id)); - remote_streams_->AddStream(stream); - new_streams->AddStream(stream); - } +const SessionDescriptionInterface* PeerConnection::current_remote_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->current_remote_description(); +} - const RtpSenderInfo* sender_info = - FindSenderInfo(*current_senders, stream_id, sender_id); - if (!sender_info) { - current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); - OnRemoteSenderAdded(current_senders->back(), media_type); - } - } +const SessionDescriptionInterface* PeerConnection::pending_local_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->pending_local_description(); +} - // Add default sender if necessary. - if (default_sender_needed) { - rtc::scoped_refptr default_stream = - remote_streams_->find(kDefaultStreamId); - if (!default_stream) { - // Create the new default MediaStream. - default_stream = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(kDefaultStreamId)); - remote_streams_->AddStream(default_stream); - new_streams->AddStream(default_stream); - } - std::string default_sender_id = (media_type == cricket::MEDIA_TYPE_AUDIO) - ? kDefaultAudioSenderId - : kDefaultVideoSenderId; - const RtpSenderInfo* default_sender_info = - FindSenderInfo(*current_senders, kDefaultStreamId, default_sender_id); - if (!default_sender_info) { - current_senders->push_back( - RtpSenderInfo(kDefaultStreamId, default_sender_id, /*ssrc=*/0)); - OnRemoteSenderAdded(current_senders->back(), media_type); - } - } +const SessionDescriptionInterface* PeerConnection::pending_remote_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sdp_handler_->pending_remote_description(); } -void PeerConnection::OnRemoteSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - RTC_LOG(LS_INFO) << "Creating " << cricket::MediaTypeToString(media_type) - << " receiver for track_id=" << sender_info.sender_id - << " and stream_id=" << sender_info.stream_id; +void PeerConnection::Close() { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "PeerConnection::Close"); - MediaStreamInterface* stream = remote_streams_->find(sender_info.stream_id); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - CreateAudioReceiver(stream, sender_info); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - CreateVideoReceiver(stream, sender_info); - } else { - RTC_NOTREACHED() << "Invalid media type"; + if (IsClosed()) { + return; } -} + // Update stats here so that we have the most recent stats for tracks and + // streams before the channels are closed. + stats_->UpdateStats(kStatsOutputLevelStandard); -void PeerConnection::OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - RTC_LOG(LS_INFO) << "Removing " << cricket::MediaTypeToString(media_type) - << " receiver for track_id=" << sender_info.sender_id - << " and stream_id=" << sender_info.stream_id; + ice_connection_state_ = PeerConnectionInterface::kIceConnectionClosed; + Observer()->OnIceConnectionChange(ice_connection_state_); + standardized_ice_connection_state_ = + PeerConnectionInterface::IceConnectionState::kIceConnectionClosed; + connection_state_ = PeerConnectionInterface::PeerConnectionState::kClosed; + Observer()->OnConnectionChange(connection_state_); - MediaStreamInterface* stream = remote_streams_->find(sender_info.stream_id); + sdp_handler_->Close(); - rtc::scoped_refptr receiver; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - // When the MediaEngine audio channel is destroyed, the RemoteAudioSource - // will be notified which will end the AudioRtpReceiver::track(). - receiver = RemoveAndStopReceiver(sender_info); - rtc::scoped_refptr audio_track = - stream->FindAudioTrack(sender_info.sender_id); - if (audio_track) { - stream->RemoveTrack(audio_track); - } - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - // Stopping or destroying a VideoRtpReceiver will end the - // VideoRtpReceiver::track(). - receiver = RemoveAndStopReceiver(sender_info); - rtc::scoped_refptr video_track = - stream->FindVideoTrack(sender_info.sender_id); - if (video_track) { - // There's no guarantee the track is still available, e.g. the track may - // have been removed from the stream by an application. - stream->RemoveTrack(video_track); - } - } else { - RTC_NOTREACHED() << "Invalid media type"; - } - if (receiver) { - Observer()->OnRemoveTrack(receiver); - } -} + NoteUsageEvent(UsageEvent::CLOSE_CALLED); -void PeerConnection::UpdateEndedRemoteMediaStreams() { - std::vector> streams_to_remove; - for (size_t i = 0; i < remote_streams_->count(); ++i) { - MediaStreamInterface* stream = remote_streams_->at(i); - if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { - streams_to_remove.push_back(stream); - } + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + transceiver->internal()->SetPeerConnectionClosed(); + if (!transceiver->stopped()) + transceiver->StopInternal(); } - for (auto& stream : streams_to_remove) { - remote_streams_->RemoveStream(stream); - Observer()->OnRemoveStream(std::move(stream)); + // Ensure that all asynchronous stats requests are completed before destroying + // the transport controller below. + if (stats_collector_) { + stats_collector_->WaitForPendingRequest(); } -} -void PeerConnection::UpdateLocalSenders( - const std::vector& streams, - cricket::MediaType media_type) { - std::vector* current_senders = GetLocalSenderInfos(media_type); + // Don't destroy BaseChannels until after stats has been cleaned up so that + // the last stats request can still read from the channels. + sdp_handler_->DestroyAllChannels(); - // Find removed tracks. I.e., tracks where the track id, stream id or ssrc - // don't match the new StreamParam. - for (auto sender_it = current_senders->begin(); - sender_it != current_senders->end(); - /* incremented manually */) { - const RtpSenderInfo& info = *sender_it; - const cricket::StreamParams* params = - cricket::GetStreamBySsrc(streams, info.first_ssrc); - if (!params || params->id != info.sender_id || - params->first_stream_id() != info.stream_id) { - OnLocalSenderRemoved(info, media_type); - sender_it = current_senders->erase(sender_it); - } else { - ++sender_it; - } - } + // The event log is used in the transport controller, which must be outlived + // by the former. CreateOffer by the peer connection is implemented + // asynchronously and if the peer connection is closed without resetting the + // WebRTC session description factory, the session description factory would + // call the transport controller. + sdp_handler_->ResetSessionDescFactory(); + transport_controller_.reset(); + rtp_manager_->Close(); - // Find new and active senders. - for (const cricket::StreamParams& params : streams) { - // The sync_label is the MediaStream label and the |stream.id| is the - // sender id. - const std::string& stream_id = params.first_stream_id(); - const std::string& sender_id = params.id; - uint32_t ssrc = params.first_ssrc(); - const RtpSenderInfo* sender_info = - FindSenderInfo(*current_senders, stream_id, sender_id); - if (!sender_info) { - current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); - OnLocalSenderAdded(current_senders->back(), media_type); - } - } + network_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, + port_allocator_.get())); + + worker_thread()->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(worker_thread()); + call_safety_.reset(); + call_.reset(); + // The event log must outlive call (and any other object that uses it). + event_log_.reset(); + }); + ReportUsagePattern(); + // The .h file says that observer can be discarded after close() returns. + // Make sure this is true. + observer_ = nullptr; } -void PeerConnection::OnLocalSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - RTC_DCHECK(!IsUnifiedPlan()); - auto sender = FindSenderById(sender_info.sender_id); - if (!sender) { - RTC_LOG(LS_WARNING) << "An unknown RtpSender with id " - << sender_info.sender_id - << " has been configured in the local description."; +void PeerConnection::SetIceConnectionState(IceConnectionState new_state) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (ice_connection_state_ == new_state) { return; } - if (sender->media_type() != media_type) { - RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" - " description with an unexpected media type."; + // After transitioning to "closed", ignore any additional states from + // TransportController (such as "disconnected"). + if (IsClosed()) { return; } - sender->internal()->set_stream_ids({sender_info.stream_id}); - sender->internal()->SetSsrc(sender_info.first_ssrc); + RTC_LOG(LS_INFO) << "Changing IceConnectionState " << ice_connection_state_ + << " => " << new_state; + RTC_DCHECK(ice_connection_state_ != + PeerConnectionInterface::kIceConnectionClosed); + + ice_connection_state_ = new_state; + Observer()->OnIceConnectionChange(ice_connection_state_); } -void PeerConnection::OnLocalSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { - auto sender = FindSenderById(sender_info.sender_id); - if (!sender) { - // This is the normal case. I.e., RemoveStream has been called and the - // SessionDescriptions has been renegotiated. +void PeerConnection::SetStandardizedIceConnectionState( + PeerConnectionInterface::IceConnectionState new_state) { + if (standardized_ice_connection_state_ == new_state) { return; } - // A sender has been removed from the SessionDescription but it's still - // associated with the PeerConnection. This only occurs if the SDP doesn't - // match with the calls to CreateSender, AddStream and RemoveStream. - if (sender->media_type() != media_type) { - RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" - " description with an unexpected media type."; + if (IsClosed()) { return; } - sender->internal()->SetSsrc(0); + RTC_LOG(LS_INFO) << "Changing standardized IceConnectionState " + << standardized_ice_connection_state_ << " => " << new_state; + + standardized_ice_connection_state_ = new_state; + Observer()->OnStandardizedIceConnectionChange(new_state); } -void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) { - // Since data_channel_controller doesn't do signals, this - // signal is relayed here. - data_channel_controller_.OnSctpDataChannelClosed(channel); +void PeerConnection::SetConnectionState( + PeerConnectionInterface::PeerConnectionState new_state) { + if (connection_state_ == new_state) + return; + if (IsClosed()) + return; + connection_state_ = new_state; + Observer()->OnConnectionChange(new_state); } -rtc::scoped_refptr> -PeerConnection::GetAudioTransceiver() const { - // This method only works with Plan B SDP, where there is a single - // audio/video transceiver. - RTC_DCHECK(!IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - return transceiver; - } +void PeerConnection::OnIceGatheringChange( + PeerConnectionInterface::IceGatheringState new_state) { + if (IsClosed()) { + return; } - RTC_NOTREACHED(); - return nullptr; + ice_gathering_state_ = new_state; + Observer()->OnIceGatheringChange(ice_gathering_state_); } -rtc::scoped_refptr> -PeerConnection::GetVideoTransceiver() const { - // This method only works with Plan B SDP, where there is a single - // audio/video transceiver. - RTC_DCHECK(!IsUnifiedPlan()); - for (auto transceiver : transceivers_) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - return transceiver; - } +void PeerConnection::OnIceCandidate( + std::unique_ptr candidate) { + if (IsClosed()) { + return; } - RTC_NOTREACHED(); - return nullptr; + ReportIceCandidateCollected(candidate->candidate()); + Observer()->OnIceCandidate(candidate.get()); } -// TODO(bugs.webrtc.org/7600): Remove this when multiple transceivers with -// individual transceiver directions are supported. -bool PeerConnection::HasRtpSender(cricket::MediaType type) const { - switch (type) { - case cricket::MEDIA_TYPE_AUDIO: - return !GetAudioTransceiver()->internal()->senders().empty(); - case cricket::MEDIA_TYPE_VIDEO: - return !GetVideoTransceiver()->internal()->senders().empty(); - case cricket::MEDIA_TYPE_DATA: - return false; - } - RTC_NOTREACHED(); - return false; +void PeerConnection::OnIceCandidateError(const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) { + if (IsClosed()) { + return; + } + Observer()->OnIceCandidateError(address, port, url, error_code, error_text); + // Leftover not to break wpt test during migration to the new API. + Observer()->OnIceCandidateError(address + ":", url, error_code, error_text); } -rtc::scoped_refptr> -PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) const { - for (const auto& transceiver : transceivers_) { - for (auto sender : transceiver->internal()->senders()) { - if (sender->track() == track) { - return sender; - } - } +void PeerConnection::OnIceCandidatesRemoved( + const std::vector& candidates) { + if (IsClosed()) { + return; } - return nullptr; + Observer()->OnIceCandidatesRemoved(candidates); } -rtc::scoped_refptr> -PeerConnection::FindSenderById(const std::string& sender_id) const { - for (const auto& transceiver : transceivers_) { - for (auto sender : transceiver->internal()->senders()) { - if (sender->id() == sender_id) { - return sender; - } - } +void PeerConnection::OnSelectedCandidatePairChanged( + const cricket::CandidatePairChangeEvent& event) { + if (IsClosed()) { + return; } - return nullptr; -} -rtc::scoped_refptr> -PeerConnection::FindReceiverById(const std::string& receiver_id) const { - for (const auto& transceiver : transceivers_) { - for (auto receiver : transceiver->internal()->receivers()) { - if (receiver->id() == receiver_id) { - return receiver; - } - } + if (event.selected_candidate_pair.local_candidate().type() == + LOCAL_PORT_TYPE && + event.selected_candidate_pair.remote_candidate().type() == + LOCAL_PORT_TYPE) { + NoteUsageEvent(UsageEvent::DIRECT_CONNECTION_SELECTED); } - return nullptr; -} -std::vector* -PeerConnection::GetRemoteSenderInfos(cricket::MediaType media_type) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); - return (media_type == cricket::MEDIA_TYPE_AUDIO) - ? &remote_audio_sender_infos_ - : &remote_video_sender_infos_; + Observer()->OnIceSelectedCandidatePairChanged(event); } -std::vector* PeerConnection::GetLocalSenderInfos( - cricket::MediaType media_type) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); - return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_sender_infos_ - : &local_video_sender_infos_; +absl::optional PeerConnection::GetDataMid() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + switch (data_channel_type()) { + case cricket::DCT_RTP: + if (!data_channel_controller_.rtp_data_channel()) { + return absl::nullopt; + } + return data_channel_controller_.rtp_data_channel()->content_name(); + case cricket::DCT_SCTP: + return sctp_mid_s_; + default: + return absl::nullopt; + } } -const PeerConnection::RtpSenderInfo* PeerConnection::FindSenderInfo( - const std::vector& infos, - const std::string& stream_id, - const std::string sender_id) const { - for (const RtpSenderInfo& sender_info : infos) { - if (sender_info.stream_id == stream_id && - sender_info.sender_id == sender_id) { - return &sender_info; - } - } - return nullptr; +void PeerConnection::OnSctpDataChannelClosed(DataChannelInterface* channel) { + // Since data_channel_controller doesn't do signals, this + // signal is relayed here. + data_channel_controller_.OnSctpDataChannelClosed( + static_cast(channel)); } -DataChannel* PeerConnection::FindDataChannelBySid(int sid) const { +SctpDataChannel* PeerConnection::FindDataChannelBySid(int sid) const { return data_channel_controller_.FindDataChannelBySid(sid); } @@ -5795,11 +1788,10 @@ PeerConnection::InitializePortAllocator_n( // by experiment. if (configuration.disable_ipv6) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); - } else if (webrtc::field_trial::FindFullName("WebRTC-IPv6Default") - .find("Disabled") == 0) { + } else if (absl::StartsWith(context_->trials().Lookup("WebRTC-IPv6Default"), + "Disabled")) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); } - if (configuration.disable_ipv6_on_wifi) { port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); RTC_LOG(LS_INFO) << "IPv6 candidates on Wi-Fi are disabled."; @@ -5854,6 +1846,7 @@ bool PeerConnection::ReconfigurePortAllocator_n( webrtc::TurnCustomizer* turn_customizer, absl::optional stun_candidate_keepalive_interval, bool have_local_description) { + RTC_DCHECK_RUN_ON(network_thread()); port_allocator_->SetCandidateFilter( ConvertIceTransportTypeToCandidateFilter(type)); // According to JSEP, after setLocalDescription, changing the candidate pool @@ -5876,7 +1869,7 @@ bool PeerConnection::ReconfigurePortAllocator_n( } cricket::ChannelManager* PeerConnection::channel_manager() const { - return factory_->channel_manager(); + return context_->channel_manager(); } bool PeerConnection::StartRtcEventLog_w( @@ -5898,7 +1891,7 @@ void PeerConnection::StopRtcEventLog_w() { cricket::ChannelInterface* PeerConnection::GetChannel( const std::string& content_name) { - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (channel && channel->content_name() == content_name) { return channel; @@ -5926,10 +1919,11 @@ bool PeerConnection::GetSctpSslRole(rtc::SSLRole* role) { } absl::optional dtls_role; - if (sctp_mid_) { - dtls_role = transport_controller_->GetDtlsRole(*sctp_mid_); - if (!dtls_role && is_caller_.has_value()) { - dtls_role = *is_caller_ ? rtc::SSL_SERVER : rtc::SSL_CLIENT; + if (sctp_mid_s_) { + dtls_role = transport_controller_->GetDtlsRole(*sctp_mid_s_); + if (!dtls_role && sdp_handler_->is_caller().has_value()) { + dtls_role = + *sdp_handler_->is_caller() ? rtc::SSL_SERVER : rtc::SSL_CLIENT; } *role = *dtls_role; return true; @@ -5955,152 +1949,25 @@ bool PeerConnection::GetSslRole(const std::string& content_name, return false; } -void PeerConnection::SetSessionError(SessionError error, - const std::string& error_desc) { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (error != session_error_) { - session_error_ = error; - session_error_desc_ = error_desc; - } -} - -RTCError PeerConnection::UpdateSessionState( - SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description) { - RTC_DCHECK_RUN_ON(signaling_thread()); - - // If there's already a pending error then no state transition should happen. - // But all call-sites should be verifying this before calling us! - RTC_DCHECK(session_error() == SessionError::kNone); - - // If this is answer-ish we're ready to let media flow. - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - EnableSending(); - } - - // Update the signaling state according to the specified state machine (see - // https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum). - if (type == SdpType::kOffer) { - ChangeSignalingState(source == cricket::CS_LOCAL - ? PeerConnectionInterface::kHaveLocalOffer - : PeerConnectionInterface::kHaveRemoteOffer); - } else if (type == SdpType::kPrAnswer) { - ChangeSignalingState(source == cricket::CS_LOCAL - ? PeerConnectionInterface::kHaveLocalPrAnswer - : PeerConnectionInterface::kHaveRemotePrAnswer); - } else { - RTC_DCHECK(type == SdpType::kAnswer); - ChangeSignalingState(PeerConnectionInterface::kStable); - transceiver_stable_states_by_transceivers_.clear(); - } - - // Update internal objects according to the session description's media - // descriptions. - RTCError error = PushdownMediaDescription(type, source); - if (!error.ok()) { - return error; - } - - return RTCError::OK(); -} - -RTCError PeerConnection::PushdownMediaDescription( - SdpType type, - cricket::ContentSource source) { - const SessionDescriptionInterface* sdesc = - (source == cricket::CS_LOCAL ? local_description() - : remote_description()); - RTC_DCHECK(sdesc); - - // Push down the new SDP media section for each audio/video transceiver. - for (const auto& transceiver : transceivers_) { - const ContentInfo* content_info = - FindMediaSectionForTransceiver(transceiver, sdesc); - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (!channel || !content_info || content_info->rejected) { - continue; - } - const MediaContentDescription* content_desc = - content_info->media_description(); - if (!content_desc) { - continue; - } - std::string error; - bool success = (source == cricket::CS_LOCAL) - ? channel->SetLocalContent(content_desc, type, &error) - : channel->SetRemoteContent(content_desc, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } - } - - // If using the RtpDataChannel, push down the new SDP section for it too. - if (data_channel_controller_.rtp_data_channel()) { - const ContentInfo* data_content = - cricket::GetFirstDataContent(sdesc->description()); - if (data_content && !data_content->rejected) { - const MediaContentDescription* data_desc = - data_content->media_description(); - if (data_desc) { - std::string error; - bool success = - (source == cricket::CS_LOCAL) - ? data_channel_controller_.rtp_data_channel()->SetLocalContent( - data_desc, type, &error) - : data_channel_controller_.rtp_data_channel()->SetRemoteContent( - data_desc, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } - } - } - } - - // Need complete offer/answer with an SCTP m= section before starting SCTP, - // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 - if (sctp_mid_ && local_description() && remote_description()) { - rtc::scoped_refptr sctp_transport = - transport_controller_->GetSctpTransport(*sctp_mid_); - auto local_sctp_description = cricket::GetFirstSctpDataContentDescription( - local_description()->description()); - auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription( - remote_description()->description()); - if (sctp_transport && local_sctp_description && remote_sctp_description) { - int max_message_size; - // A remote max message size of zero means "any size supported". - // We configure the connection with our own max message size. - if (remote_sctp_description->max_message_size() == 0) { - max_message_size = local_sctp_description->max_message_size(); - } else { - max_message_size = - std::min(local_sctp_description->max_message_size(), - remote_sctp_description->max_message_size()); - } - sctp_transport->Start(local_sctp_description->port(), - remote_sctp_description->port(), max_message_size); - } +int32_t PeerConnection::StartRecorder(int32_t dir, std::string path) { + if (!worker_thread()->IsCurrent()) { + return worker_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&PeerConnection::StartRecorder, this, dir, + path)); } - - return RTCError::OK(); + RTC_DCHECK_RUN_ON(worker_thread()); + RTC_DCHECK(call_); + return call_->StartRecorder(dir, path); } -RTCError PeerConnection::PushdownTransportDescription( - cricket::ContentSource source, - SdpType type) { - RTC_DCHECK_RUN_ON(signaling_thread()); - - if (source == cricket::CS_LOCAL) { - const SessionDescriptionInterface* sdesc = local_description(); - RTC_DCHECK(sdesc); - return transport_controller_->SetLocalDescription(type, - sdesc->description()); - } else { - const SessionDescriptionInterface* sdesc = remote_description(); - RTC_DCHECK(sdesc); - return transport_controller_->SetRemoteDescription(type, - sdesc->description()); +int32_t PeerConnection::StopRecorder(int32_t dir) { + if (!worker_thread()->IsCurrent()) { + return worker_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&PeerConnection::StopRecorder, this, dir)); } + RTC_DCHECK_RUN_ON(worker_thread()); + RTC_DCHECK(call_); + return call_->StopRecorder(dir); } bool PeerConnection::GetTransportDescription( @@ -6119,51 +1986,15 @@ bool PeerConnection::GetTransportDescription( return true; } -cricket::IceConfig PeerConnection::ParseIceConfig( - const PeerConnectionInterface::RTCConfiguration& config) const { - cricket::ContinualGatheringPolicy gathering_policy; - switch (config.continual_gathering_policy) { - case PeerConnectionInterface::GATHER_ONCE: - gathering_policy = cricket::GATHER_ONCE; - break; - case PeerConnectionInterface::GATHER_CONTINUALLY: - gathering_policy = cricket::GATHER_CONTINUALLY; - break; - default: - RTC_NOTREACHED(); - gathering_policy = cricket::GATHER_ONCE; - } - - cricket::IceConfig ice_config; - ice_config.receiving_timeout = RTCConfigurationToIceConfigOptionalInt( - config.ice_connection_receiving_timeout); - ice_config.prioritize_most_likely_candidate_pairs = - config.prioritize_most_likely_ice_candidate_pairs; - ice_config.backup_connection_ping_interval = - RTCConfigurationToIceConfigOptionalInt( - config.ice_backup_candidate_pair_ping_interval); - ice_config.continual_gathering_policy = gathering_policy; - ice_config.presume_writable_when_fully_relayed = - config.presume_writable_when_fully_relayed; - ice_config.surface_ice_candidates_on_ice_transport_type_changed = - config.surface_ice_candidates_on_ice_transport_type_changed; - ice_config.ice_check_interval_strong_connectivity = - config.ice_check_interval_strong_connectivity; - ice_config.ice_check_interval_weak_connectivity = - config.ice_check_interval_weak_connectivity; - ice_config.ice_check_min_interval = config.ice_check_min_interval; - ice_config.ice_unwritable_timeout = config.ice_unwritable_timeout; - ice_config.ice_unwritable_min_checks = config.ice_unwritable_min_checks; - ice_config.ice_inactive_timeout = config.ice_inactive_timeout; - ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval; - ice_config.network_preference = config.network_preference; - return ice_config; +std::vector PeerConnection::GetDataChannelStats() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return data_channel_controller_.GetDataChannelStats(); } absl::optional PeerConnection::sctp_transport_name() const { RTC_DCHECK_RUN_ON(signaling_thread()); - if (sctp_mid_ && transport_controller_) { - auto dtls_transport = transport_controller_->GetDtlsTransport(*sctp_mid_); + if (sctp_mid_s_ && transport_controller_) { + auto dtls_transport = transport_controller_->GetDtlsTransport(*sctp_mid_s_); if (dtls_transport) { return dtls_transport->transport_name(); } @@ -6185,7 +2016,7 @@ std::map PeerConnection::GetTransportNamesByMid() const { RTC_DCHECK_RUN_ON(signaling_thread()); std::map transport_names_by_mid; - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (channel) { transport_names_by_mid[channel->content_name()] = @@ -6200,7 +2031,7 @@ std::map PeerConnection::GetTransportNamesByMid() if (data_channel_controller_.data_channel_transport()) { absl::optional transport_name = sctp_transport_name(); RTC_DCHECK(transport_name); - transport_names_by_mid[*sctp_mid_] = *transport_name; + transport_names_by_mid[*sctp_mid_s_] = *transport_name; } return transport_names_by_mid; } @@ -6251,24 +2082,13 @@ cricket::DataChannelType PeerConnection::data_channel_type() const { bool PeerConnection::IceRestartPending(const std::string& content_name) const { RTC_DCHECK_RUN_ON(signaling_thread()); - return pending_ice_restarts_.find(content_name) != - pending_ice_restarts_.end(); + return sdp_handler_->IceRestartPending(content_name); } bool PeerConnection::NeedsIceRestart(const std::string& content_name) const { return transport_controller_->NeedsIceRestart(content_name); } -void PeerConnection::OnCertificateReady( - const rtc::scoped_refptr& certificate) { - transport_controller_->SetLocalCertificate(certificate); -} - -void PeerConnection::OnDtlsSrtpSetupFailure(cricket::BaseChannel*, bool rtcp) { - SetSessionError(SessionError::kTransport, - rtcp ? kDtlsSrtpSetupFailureRtcp : kDtlsSrtpSetupFailureRtp); -} - void PeerConnection::OnTransportControllerConnectionState( cricket::IceConnectionState state) { switch (state) { @@ -6330,341 +2150,62 @@ void PeerConnection::OnTransportControllerCandidatesGathered( // Use transport_name as the candidate media id. std::unique_ptr candidate( new JsepIceCandidate(transport_name, sdp_mline_index, *citer)); - if (local_description()) { - mutable_local_description()->AddCandidate(candidate.get()); - } - OnIceCandidate(std::move(candidate)); - } -} - -void PeerConnection::OnTransportControllerCandidateError( - const cricket::IceCandidateErrorEvent& event) { - OnIceCandidateError(event.address, event.port, event.url, event.error_code, - event.error_text); -} - -void PeerConnection::OnTransportControllerCandidatesRemoved( - const std::vector& candidates) { - // Sanity check. - for (const cricket::Candidate& candidate : candidates) { - if (candidate.transport_name().empty()) { - RTC_LOG(LS_ERROR) << "OnTransportControllerCandidatesRemoved: " - "empty content name in candidate " - << candidate.ToString(); - return; - } - } - - if (local_description()) { - mutable_local_description()->RemoveCandidates(candidates); - } - OnIceCandidatesRemoved(candidates); -} - -void PeerConnection::OnTransportControllerCandidateChanged( - const cricket::CandidatePairChangeEvent& event) { - OnSelectedCandidatePairChanged(event); -} - -void PeerConnection::OnTransportControllerDtlsHandshakeError( - rtc::SSLHandshakeError error) { - RTC_HISTOGRAM_ENUMERATION( - "WebRTC.PeerConnection.DtlsHandshakeError", static_cast(error), - static_cast(rtc::SSLHandshakeError::MAX_VALUE)); -} - -void PeerConnection::EnableSending() { - for (const auto& transceiver : transceivers_) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel && !channel->enabled()) { - channel->Enable(true); - } - } - - if (data_channel_controller_.rtp_data_channel() && - !data_channel_controller_.rtp_data_channel()->enabled()) { - data_channel_controller_.rtp_data_channel()->Enable(true); - } -} - -// Returns the media index for a local ice candidate given the content name. -bool PeerConnection::GetLocalCandidateMediaIndex( - const std::string& content_name, - int* sdp_mline_index) { - if (!local_description() || !sdp_mline_index) { - return false; - } - - bool content_found = false; - const ContentInfos& contents = local_description()->description()->contents(); - for (size_t index = 0; index < contents.size(); ++index) { - if (contents[index].name == content_name) { - *sdp_mline_index = static_cast(index); - content_found = true; - break; - } - } - return content_found; -} - -bool PeerConnection::UseCandidatesInSessionDescription( - const SessionDescriptionInterface* remote_desc) { - if (!remote_desc) { - return true; - } - bool ret = true; - - for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) { - const IceCandidateCollection* candidates = remote_desc->candidates(m); - for (size_t n = 0; n < candidates->count(); ++n) { - const IceCandidateInterface* candidate = candidates->at(n); - bool valid = false; - if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) { - if (valid) { - RTC_LOG(LS_INFO) - << "UseCandidatesInSessionDescription: Not ready to use " - "candidate."; - } - continue; - } - ret = UseCandidate(candidate); - if (!ret) { - break; - } - } - } - return ret; -} - -bool PeerConnection::UseCandidate(const IceCandidateInterface* candidate) { - RTCErrorOr result = - FindContentInfo(remote_description(), candidate); - if (!result.ok()) { - RTC_LOG(LS_ERROR) << "UseCandidate: Invalid candidate. " - << result.error().message(); - return false; - } - std::vector candidates; - candidates.push_back(candidate->candidate()); - // Invoking BaseSession method to handle remote candidates. - RTCError error = transport_controller_->AddRemoteCandidates( - result.value()->name, candidates); - if (error.ok()) { - ReportRemoteIceCandidateAdded(candidate->candidate()); - // Candidates successfully submitted for checking. - if (ice_connection_state_ == PeerConnectionInterface::kIceConnectionNew || - ice_connection_state_ == - PeerConnectionInterface::kIceConnectionDisconnected) { - // If state is New, then the session has just gotten its first remote ICE - // candidates, so go to Checking. - // If state is Disconnected, the session is re-using old candidates or - // receiving additional ones, so go to Checking. - // If state is Connected, stay Connected. - // TODO(bemasc): If state is Connected, and the new candidates are for a - // newly added transport, then the state actually _should_ move to - // checking. Add a way to distinguish that case. - SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); - } - // TODO(bemasc): If state is Completed, go back to Connected. - } else { - RTC_LOG(LS_WARNING) << error.message(); - } - return true; -} - -RTCErrorOr PeerConnection::FindContentInfo( - const SessionDescriptionInterface* description, - const IceCandidateInterface* candidate) { - if (candidate->sdp_mline_index() >= 0) { - size_t mediacontent_index = - static_cast(candidate->sdp_mline_index()); - size_t content_size = description->description()->contents().size(); - if (mediacontent_index < content_size) { - return &description->description()->contents()[mediacontent_index]; - } else { - return RTCError(RTCErrorType::INVALID_RANGE, - "Media line index (" + - rtc::ToString(candidate->sdp_mline_index()) + - ") out of range (number of mlines: " + - rtc::ToString(content_size) + ")."); - } - } else if (!candidate->sdp_mid().empty()) { - auto& contents = description->description()->contents(); - auto it = absl::c_find_if( - contents, [candidate](const cricket::ContentInfo& content_info) { - return content_info.mid() == candidate->sdp_mid(); - }); - if (it == contents.end()) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "Mid " + candidate->sdp_mid() + - " specified but no media section with that mid found."); - } else { - return &*it; - } - } - - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Neither sdp_mline_index nor sdp_mid specified."); -} - -void PeerConnection::RemoveUnusedChannels(const SessionDescription* desc) { - // Destroy video channel first since it may have a pointer to the - // voice channel. - const cricket::ContentInfo* video_info = cricket::GetFirstVideoContent(desc); - if (!video_info || video_info->rejected) { - DestroyTransceiverChannel(GetVideoTransceiver()); - } - - const cricket::ContentInfo* audio_info = cricket::GetFirstAudioContent(desc); - if (!audio_info || audio_info->rejected) { - DestroyTransceiverChannel(GetAudioTransceiver()); - } - - const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); - if (!data_info || data_info->rejected) { - DestroyDataChannelTransport(); + sdp_handler_->AddLocalIceCandidate(candidate.get()); + OnIceCandidate(std::move(candidate)); } } -RTCErrorOr PeerConnection::GetEarlyBundleGroup( - const SessionDescription& desc) const { - const cricket::ContentGroup* bundle_group = nullptr; - if (configuration_.bundle_policy == - PeerConnectionInterface::kBundlePolicyMaxBundle) { - bundle_group = desc.GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - if (!bundle_group) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max-bundle configured but session description " - "has no BUNDLE group"); - } - } - return bundle_group; +void PeerConnection::OnTransportControllerCandidateError( + const cricket::IceCandidateErrorEvent& event) { + OnIceCandidateError(event.address, event.port, event.url, event.error_code, + event.error_text); } -RTCError PeerConnection::CreateChannels(const SessionDescription& desc) { - // Creating the media channels. Transports should already have been created - // at this point. - const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc); - if (voice && !voice->rejected && - !GetAudioTransceiver()->internal()->channel()) { - cricket::VoiceChannel* voice_channel = CreateVoiceChannel(voice->name); - if (!voice_channel) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create voice channel."); - } - GetAudioTransceiver()->internal()->SetChannel(voice_channel); - } - - const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); - if (video && !video->rejected && - !GetVideoTransceiver()->internal()->channel()) { - cricket::VideoChannel* video_channel = CreateVideoChannel(video->name); - if (!video_channel) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create video channel."); - } - GetVideoTransceiver()->internal()->SetChannel(video_channel); - } - - const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); - if (data_channel_type() != cricket::DCT_NONE && data && !data->rejected && - !data_channel_controller_.rtp_data_channel() && - !data_channel_controller_.data_channel_transport()) { - if (!CreateDataChannel(data->name)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); +void PeerConnection::OnTransportControllerCandidatesRemoved( + const std::vector& candidates) { + // Sanity check. + for (const cricket::Candidate& candidate : candidates) { + if (candidate.transport_name().empty()) { + RTC_LOG(LS_ERROR) << "OnTransportControllerCandidatesRemoved: " + "empty content name in candidate " + << candidate.ToString(); + return; } } - - return RTCError::OK(); + sdp_handler_->RemoveLocalIceCandidates(candidates); + OnIceCandidatesRemoved(candidates); } -// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. -cricket::VoiceChannel* PeerConnection::CreateVoiceChannel( - const std::string& mid) { - RtpTransportInternal* rtp_transport = GetRtpTransport(mid); - MediaTransportConfig media_transport_config = - transport_controller_->GetMediaTransportConfig(mid); - - cricket::VoiceChannel* voice_channel = channel_manager()->CreateVoiceChannel( - call_ptr_, configuration_.media_config, rtp_transport, - media_transport_config, signaling_thread(), mid, SrtpRequired(), - GetCryptoOptions(), &ssrc_generator_, audio_options_); - if (!voice_channel) { - return nullptr; - } - voice_channel->SignalDtlsSrtpSetupFailure.connect( - this, &PeerConnection::OnDtlsSrtpSetupFailure); - voice_channel->SignalSentPacket.connect(this, - &PeerConnection::OnSentPacket_w); - voice_channel->SetRtpTransport(rtp_transport); - - return voice_channel; +void PeerConnection::OnTransportControllerCandidateChanged( + const cricket::CandidatePairChangeEvent& event) { + OnSelectedCandidatePairChanged(event); } -// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. -cricket::VideoChannel* PeerConnection::CreateVideoChannel( - const std::string& mid) { - RtpTransportInternal* rtp_transport = GetRtpTransport(mid); - MediaTransportConfig media_transport_config = - transport_controller_->GetMediaTransportConfig(mid); - - cricket::VideoChannel* video_channel = channel_manager()->CreateVideoChannel( - call_ptr_, configuration_.media_config, rtp_transport, - media_transport_config, signaling_thread(), mid, SrtpRequired(), - GetCryptoOptions(), &ssrc_generator_, video_options_, - video_bitrate_allocator_factory_.get()); - if (!video_channel) { - return nullptr; - } - video_channel->SignalDtlsSrtpSetupFailure.connect( - this, &PeerConnection::OnDtlsSrtpSetupFailure); - video_channel->SignalSentPacket.connect(this, - &PeerConnection::OnSentPacket_w); - video_channel->SetRtpTransport(rtp_transport); - - return video_channel; +void PeerConnection::OnTransportControllerDtlsHandshakeError( + rtc::SSLHandshakeError error) { + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.DtlsHandshakeError", static_cast(error), + static_cast(rtc::SSLHandshakeError::MAX_VALUE)); } -bool PeerConnection::CreateDataChannel(const std::string& mid) { - switch (data_channel_type()) { - case cricket::DCT_SCTP: - case cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP: - case cricket::DCT_DATA_CHANNEL_TRANSPORT: - if (!network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetupDataChannelTransport_n, this, - mid))) { - return false; - } +// Returns the media index for a local ice candidate given the content name. +bool PeerConnection::GetLocalCandidateMediaIndex( + const std::string& content_name, + int* sdp_mline_index) { + if (!local_description() || !sdp_mline_index) { + return false; + } - // All non-RTP data channels must initialize |sctp_data_channels_|. - for (const auto& channel : - *data_channel_controller_.sctp_data_channels()) { - channel->OnTransportChannelCreated(); - } - return true; - case cricket::DCT_RTP: - default: - RtpTransportInternal* rtp_transport = GetRtpTransport(mid); - data_channel_controller_.set_rtp_data_channel( - channel_manager()->CreateRtpDataChannel( - configuration_.media_config, rtp_transport, signaling_thread(), - mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_)); - if (!data_channel_controller_.rtp_data_channel()) { - return false; - } - data_channel_controller_.rtp_data_channel() - ->SignalDtlsSrtpSetupFailure.connect( - this, &PeerConnection::OnDtlsSrtpSetupFailure); - data_channel_controller_.rtp_data_channel()->SignalSentPacket.connect( - this, &PeerConnection::OnSentPacket_w); - data_channel_controller_.rtp_data_channel()->SetRtpTransport( - rtp_transport); - return true; + bool content_found = false; + const ContentInfos& contents = local_description()->description()->contents(); + for (size_t index = 0; index < contents.size(); ++index) { + if (contents[index].name == content_name) { + *sdp_mline_index = static_cast(index); + content_found = true; + break; + } } - return false; + return content_found; } Call::Stats PeerConnection::GetCallStats() { @@ -6673,6 +2214,7 @@ Call::Stats PeerConnection::GetCallStats() { RTC_FROM_HERE, rtc::Bind(&PeerConnection::GetCallStats, this)); } RTC_DCHECK_RUN_ON(worker_thread()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (call_) { return call_->GetStats(); } else { @@ -6693,7 +2235,7 @@ bool PeerConnection::SetupDataChannelTransport_n(const std::string& mid) { data_channel_controller_.set_data_channel_transport(transport); data_channel_controller_.SetupDataChannelTransport_n(); - sctp_mid_ = mid; + sctp_mid_n_ = mid; // Note: setting the data sink and checking initial state must be done last, // after setting up the data channel. Setting the data sink may trigger @@ -6704,15 +2246,15 @@ bool PeerConnection::SetupDataChannelTransport_n(const std::string& mid) { } void PeerConnection::TeardownDataChannelTransport_n() { - if (!sctp_mid_ && !data_channel_controller_.data_channel_transport()) { + if (!sctp_mid_n_ && !data_channel_controller_.data_channel_transport()) { return; } RTC_LOG(LS_INFO) << "Tearing down data channel transport for mid=" - << *sctp_mid_; + << *sctp_mid_n_; // |sctp_mid_| may still be active through an SCTP transport. If not, unset // it. - sctp_mid_.reset(); + sctp_mid_n_.reset(); data_channel_controller_.TeardownDataChannelTransport_n(); } @@ -6741,178 +2283,6 @@ bool PeerConnection::ValidateBundleSettings(const SessionDescription* desc) { return true; } -bool PeerConnection::HasRtcpMuxEnabled(const cricket::ContentInfo* content) { - return content->media_description()->rtcp_mux(); -} - -static RTCError ValidateMids(const cricket::SessionDescription& description) { - std::set mids; - for (const cricket::ContentInfo& content : description.contents()) { - if (content.name.empty()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "A media section is missing a MID attribute."); - } - if (!mids.insert(content.name).second) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Duplicate a=mid value '" + content.name + "'."); - } - } - return RTCError::OK(); -} - -RTCError PeerConnection::ValidateSessionDescription( - const SessionDescriptionInterface* sdesc, - cricket::ContentSource source) { - if (session_error() != SessionError::kNone) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); - } - - if (!sdesc || !sdesc->description()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); - } - - SdpType type = sdesc->GetType(); - if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || - (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_STATE, - "Called in wrong state: " + GetSignalingStateString(signaling_state())); - } - - RTCError error = ValidateMids(*sdesc->description()); - if (!error.ok()) { - return error; - } - - // Verify crypto settings. - std::string crypto_error; - if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED || - dtls_enabled_) { - RTCError crypto_error = VerifyCrypto(sdesc->description(), dtls_enabled_); - if (!crypto_error.ok()) { - return crypto_error; - } - } - - // Verify ice-ufrag and ice-pwd. - if (!VerifyIceUfragPwdPresent(sdesc->description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutIceUfragPwd); - } - - if (!ValidateBundleSettings(sdesc->description())) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kBundleWithoutRtcpMux); - } - - // TODO(skvlad): When the local rtcp-mux policy is Require, reject any - // m-lines that do not rtcp-mux enabled. - - // Verify m-lines in Answer when compared against Offer. - if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // With an answer we want to compare the new answer session description with - // the offer's session description from the current negotiation. - const cricket::SessionDescription* offer_desc = - (source == cricket::CS_LOCAL) ? remote_description()->description() - : local_description()->description(); - if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || - !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), - type)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInAnswer); - } - } else { - // The re-offers should respect the order of m= sections in current - // description. See RFC3264 Section 8 paragraph 4 for more details. - // With a re-offer, either the current local or current remote descriptions - // could be the most up to date, so we would like to check against both of - // them if they exist. It could be the case that one of them has a 0 port - // for a media section, but the other does not. This is important to check - // against in the case that we are recycling an m= section. - const cricket::SessionDescription* current_desc = nullptr; - const cricket::SessionDescription* secondary_current_desc = nullptr; - if (local_description()) { - current_desc = local_description()->description(); - if (remote_description()) { - secondary_current_desc = remote_description()->description(); - } - } else if (remote_description()) { - current_desc = remote_description()->description(); - } - if (current_desc && - !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, - *sdesc->description(), type)) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInSubsequentOffer); - } - } - - if (IsUnifiedPlan()) { - // Ensure that each audio and video media section has at most one - // "StreamParams". This will return an error if receiving a session - // description from a "Plan B" endpoint which adds multiple tracks of the - // same type. With Unified Plan, there can only be at most one track per - // media section. - for (const ContentInfo& content : sdesc->description()->contents()) { - const MediaContentDescription& desc = *content.media_description(); - if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || - desc.type() == cricket::MEDIA_TYPE_VIDEO) && - desc.streams().size() > 1u) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Media section has more than one track specified " - "with a=ssrc lines which is not supported with " - "Unified Plan."); - } - } - } - - return RTCError::OK(); -} - -bool PeerConnection::ExpectSetLocalDescription(SdpType type) { - PeerConnectionInterface::SignalingState state = signaling_state(); - if (type == SdpType::kOffer) { - return (state == PeerConnectionInterface::kStable) || - (state == PeerConnectionInterface::kHaveLocalOffer); - } else { - RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); - return (state == PeerConnectionInterface::kHaveRemoteOffer) || - (state == PeerConnectionInterface::kHaveLocalPrAnswer); - } -} - -bool PeerConnection::ExpectSetRemoteDescription(SdpType type) { - PeerConnectionInterface::SignalingState state = signaling_state(); - if (type == SdpType::kOffer) { - return (state == PeerConnectionInterface::kStable) || - (state == PeerConnectionInterface::kHaveRemoteOffer); - } else { - RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); - return (state == PeerConnectionInterface::kHaveLocalOffer) || - (state == PeerConnectionInterface::kHaveRemotePrAnswer); - } -} - -const char* PeerConnection::SessionErrorToString(SessionError error) const { - switch (error) { - case SessionError::kNone: - return "ERROR_NONE"; - case SessionError::kContent: - return "ERROR_CONTENT"; - case SessionError::kTransport: - return "ERROR_TRANSPORT"; - } - RTC_NOTREACHED(); - return ""; -} - -std::string PeerConnection::GetSessionErrorMsg() { - rtc::StringBuilder desc; - desc << kSessionError << SessionErrorToString(session_error()) << ". "; - desc << kSessionErrorDesc << session_error_desc() << "."; - return desc.Release(); -} - void PeerConnection::ReportSdpFormatReceived( const SessionDescriptionInterface& remote_offer) { int num_audio_mlines = 0; @@ -6939,8 +2309,21 @@ void PeerConnection::ReportSdpFormatReceived( } else if (num_audio_tracks > 0 || num_video_tracks > 0) { format = kSdpFormatReceivedSimple; } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceived", format, - kSdpFormatReceivedMax); + switch (remote_offer.GetType()) { + case SdpType::kOffer: + // Historically only offers were counted. + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceived", + format, kSdpFormatReceivedMax); + break; + case SdpType::kAnswer: + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceivedAnswer", + format, kSdpFormatReceivedMax); + break; + default: + RTC_LOG(LS_ERROR) << "Can not report SdpFormatReceived for " + << SdpTypeToString(remote_offer.GetType()); + break; + } } void PeerConnection::ReportIceCandidateCollected( @@ -6957,111 +2340,19 @@ void PeerConnection::ReportIceCandidateCollected( } } -void PeerConnection::ReportRemoteIceCandidateAdded( - const cricket::Candidate& candidate) { - NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); - if (candidate.address().IsPrivateIP()) { - NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED); - } - if (candidate.address().IsUnresolvedIP()) { - NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED); - } - if (candidate.address().family() == AF_INET6) { - NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED); - } -} - void PeerConnection::NoteUsageEvent(UsageEvent event) { RTC_DCHECK_RUN_ON(signaling_thread()); - usage_event_accumulator_ |= static_cast(event); + usage_pattern_.NoteUsageEvent(event); } void PeerConnection::ReportUsagePattern() const { - RTC_DLOG(LS_INFO) << "Usage signature is " << usage_event_accumulator_; - RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.PeerConnection.UsagePattern", - usage_event_accumulator_, - static_cast(UsageEvent::MAX_VALUE)); - const int bad_bits = - static_cast(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED) | - static_cast(UsageEvent::CANDIDATE_COLLECTED); - const int good_bits = - static_cast(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED) | - static_cast(UsageEvent::REMOTE_CANDIDATE_ADDED) | - static_cast(UsageEvent::ICE_STATE_CONNECTED); - if ((usage_event_accumulator_ & bad_bits) == bad_bits && - (usage_event_accumulator_ & good_bits) == 0) { - // If called after close(), we can't report, because observer may have - // been deallocated, and therefore pointer is null. Write to log instead. - if (observer_) { - Observer()->OnInterestingUsage(usage_event_accumulator_); - } else { - RTC_LOG(LS_INFO) << "Interesting usage signature " - << usage_event_accumulator_ - << " observed after observer shutdown"; - } - } -} - -void PeerConnection::ReportNegotiatedSdpSemantics( - const SessionDescriptionInterface& answer) { - SdpSemanticNegotiated semantics_negotiated; - switch (answer.description()->msid_signaling()) { - case 0: - semantics_negotiated = kSdpSemanticNegotiatedNone; - break; - case cricket::kMsidSignalingMediaSection: - semantics_negotiated = kSdpSemanticNegotiatedUnifiedPlan; - break; - case cricket::kMsidSignalingSsrcAttribute: - semantics_negotiated = kSdpSemanticNegotiatedPlanB; - break; - case cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute: - semantics_negotiated = kSdpSemanticNegotiatedMixed; - break; - default: - RTC_NOTREACHED(); - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpSemanticNegotiated", - semantics_negotiated, kSdpSemanticNegotiatedMax); -} - -// We need to check the local/remote description for the Transport instead of -// the session, because a new Transport added during renegotiation may have -// them unset while the session has them set from the previous negotiation. -// Not doing so may trigger the auto generation of transport description and -// mess up DTLS identity information, ICE credential, etc. -bool PeerConnection::ReadyToUseRemoteCandidate( - const IceCandidateInterface* candidate, - const SessionDescriptionInterface* remote_desc, - bool* valid) { - *valid = true; - - const SessionDescriptionInterface* current_remote_desc = - remote_desc ? remote_desc : remote_description(); - - if (!current_remote_desc) { - return false; - } - - RTCErrorOr result = - FindContentInfo(current_remote_desc, candidate); - if (!result.ok()) { - RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Invalid candidate. " - << result.error().message(); - - *valid = false; - return false; - } - - std::string transport_name = GetTransportName(result.value()->name); - return !transport_name.empty(); + usage_pattern_.ReportUsagePattern(observer_); } bool PeerConnection::SrtpRequired() const { - return !use_datagram_transport_ && - (dtls_enabled_ || - webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED); + return (dtls_enabled_ || + sdp_handler_->webrtc_session_desc_factory()->SdesPolicy() == + cricket::SEC_REQUIRED); } void PeerConnection::OnTransportControllerGatheringState( @@ -7071,13 +2362,18 @@ void PeerConnection::OnTransportControllerGatheringState( OnIceGatheringChange(PeerConnectionInterface::kIceGatheringGathering); } else if (state == cricket::kIceGatheringComplete) { OnIceGatheringChange(PeerConnectionInterface::kIceGatheringComplete); + } else if (state == cricket::kIceGatheringNew) { + OnIceGatheringChange(PeerConnectionInterface::kIceGatheringNew); + } else { + RTC_LOG(LS_ERROR) << "Unknown state received: " << state; + RTC_NOTREACHED(); } } void PeerConnection::ReportTransportStats() { std::map> media_types_by_transport_name; - for (const auto& transceiver : transceivers_) { + for (const auto& transceiver : rtp_manager()->transceivers()->List()) { if (transceiver->internal()->channel()) { const std::string& transport_name = transceiver->internal()->channel()->transport_name(); @@ -7133,7 +2429,7 @@ void PeerConnection::ReportBestConnectionState( GetIceCandidatePairCounter(local, remote), kIceCandidatePairMax); } else { - RTC_CHECK(0); + RTC_CHECK_NOTREACHED(); } // Increment the counter for IP type. @@ -7226,78 +2522,6 @@ void PeerConnection::OnSentPacket_w(const rtc::SentPacket& sent_packet) { call_->OnSentPacket(sent_packet); } -const std::string PeerConnection::GetTransportName( - const std::string& content_name) { - cricket::ChannelInterface* channel = GetChannel(content_name); - if (channel) { - return channel->transport_name(); - } - if (data_channel_controller_.data_channel_transport()) { - RTC_DCHECK(sctp_mid_); - if (content_name == *sctp_mid_) { - return *sctp_transport_name(); - } - } - // Return an empty string if failed to retrieve the transport name. - return ""; -} - -void PeerConnection::DestroyTransceiverChannel( - rtc::scoped_refptr> - transceiver) { - RTC_DCHECK(transceiver); - - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel) { - transceiver->internal()->SetChannel(nullptr); - DestroyChannelInterface(channel); - } -} - -void PeerConnection::DestroyDataChannelTransport() { - if (data_channel_controller_.rtp_data_channel()) { - data_channel_controller_.OnTransportChannelClosed(); - DestroyChannelInterface(data_channel_controller_.rtp_data_channel()); - data_channel_controller_.set_rtp_data_channel(nullptr); - } - - // Note: Cannot use rtc::Bind to create a functor to invoke because it will - // grab a reference to this PeerConnection. If this is called from the - // PeerConnection destructor, the RefCountedObject vtable will have already - // been destroyed (since it is a subclass of PeerConnection) and using - // rtc::Bind will cause "Pure virtual function called" error to appear. - - if (sctp_mid_) { - data_channel_controller_.OnTransportChannelClosed(); - network_thread()->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(network_thread()); - TeardownDataChannelTransport_n(); - }); - } -} - -void PeerConnection::DestroyChannelInterface( - cricket::ChannelInterface* channel) { - RTC_DCHECK(channel); - switch (channel->media_type()) { - case cricket::MEDIA_TYPE_AUDIO: - channel_manager()->DestroyVoiceChannel( - static_cast(channel)); - break; - case cricket::MEDIA_TYPE_VIDEO: - channel_manager()->DestroyVideoChannel( - static_cast(channel)); - break; - case cricket::MEDIA_TYPE_DATA: - channel_manager()->DestroyRtpDataChannel( - static_cast(channel)); - break; - default: - RTC_NOTREACHED() << "Unknown media type: " << channel->media_type(); - break; - } -} - bool PeerConnection::OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, @@ -7309,18 +2533,12 @@ bool PeerConnection::OnTransportChanged( if (base_channel) { ret = base_channel->SetRtpTransport(rtp_transport); } - if (mid == sctp_mid_) { + if (mid == sctp_mid_n_) { data_channel_controller_.OnTransportChanged(data_channel_transport); } return ret; } -void PeerConnection::OnSetStreams() { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (IsUnifiedPlan()) - UpdateNegotiationNeeded(); -} - PeerConnectionObserver* PeerConnection::Observer() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(observer_); @@ -7328,11 +2546,12 @@ PeerConnectionObserver* PeerConnection::Observer() const { } CryptoOptions PeerConnection::GetCryptoOptions() { + RTC_DCHECK_RUN_ON(signaling_thread()); // TODO(bugs.webrtc.org/9891) - Remove PeerConnectionFactory::CryptoOptions // after it has been removed. return configuration_.crypto_options.has_value() ? *configuration_.crypto_options - : factory_->options().crypto_options; + : options_.crypto_options; } void PeerConnection::ClearStatsCache() { @@ -7342,275 +2561,52 @@ void PeerConnection::ClearStatsCache() { } } -void PeerConnection::RequestUsagePatternReportForTesting() { - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_REPORT_USAGE_PATTERN, - nullptr); -} - -void PeerConnection::UpdateNegotiationNeeded() { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!IsUnifiedPlan()) { - Observer()->OnRenegotiationNeeded(); - return; - } - - // If connection's [[IsClosed]] slot is true, abort these steps. - if (IsClosed()) - return; - - // If connection's signaling state is not "stable", abort these steps. - if (signaling_state() != kStable) - return; - - // NOTE - // The negotiation-needed flag will be updated once the state transitions to - // "stable", as part of the steps for setting an RTCSessionDescription. - - // If the result of checking if negotiation is needed is false, clear the - // negotiation-needed flag by setting connection's [[NegotiationNeeded]] slot - // to false, and abort these steps. - bool is_negotiation_needed = CheckIfNegotiationIsNeeded(); - if (!is_negotiation_needed) { - is_negotiation_needed_ = false; - return; - } - - // If connection's [[NegotiationNeeded]] slot is already true, abort these - // steps. - if (is_negotiation_needed_) - return; - - // Set connection's [[NegotiationNeeded]] slot to true. - is_negotiation_needed_ = true; - - // Queue a task that runs the following steps: - // If connection's [[IsClosed]] slot is true, abort these steps. - // If connection's [[NegotiationNeeded]] slot is false, abort these steps. - // Fire an event named negotiationneeded at connection. - Observer()->OnRenegotiationNeeded(); -} - -bool PeerConnection::CheckIfNegotiationIsNeeded() { +bool PeerConnection::ShouldFireNegotiationNeededEvent(uint32_t event_id) { RTC_DCHECK_RUN_ON(signaling_thread()); - // 1. If any implementation-specific negotiation is required, as described at - // the start of this section, return true. - - // 2. If connection's [[RestartIce]] internal slot is true, return true. - if (local_ice_credentials_to_replace_->HasIceCredentials()) { - return true; - } - - // 3. Let description be connection.[[CurrentLocalDescription]]. - const SessionDescriptionInterface* description = current_local_description(); - if (!description) - return true; - - // 4. If connection has created any RTCDataChannels, and no m= section in - // description has been negotiated yet for data, return true. - if (data_channel_controller_.HasSctpDataChannels()) { - if (!cricket::GetFirstDataContent(description->description()->contents())) - return true; - } - - // 5. For each transceiver in connection's set of transceivers, perform the - // following checks: - for (const auto& transceiver : transceivers_) { - const ContentInfo* current_local_msection = - FindTransceiverMSection(transceiver.get(), description); - - const ContentInfo* current_remote_msection = FindTransceiverMSection( - transceiver.get(), current_remote_description()); - - // 5.3 If transceiver is stopped and is associated with an m= section, - // but the associated m= section is not yet rejected in - // connection.[[CurrentLocalDescription]] or - // connection.[[CurrentRemoteDescription]], return true. - if (transceiver->stopped()) { - if (current_local_msection && !current_local_msection->rejected && - ((current_remote_msection && !current_remote_msection->rejected) || - !current_remote_msection)) { - return true; - } - continue; - } - - // 5.1 If transceiver isn't stopped and isn't yet associated with an m= - // section in description, return true. - if (!current_local_msection) - return true; - - const MediaContentDescription* current_local_media_description = - current_local_msection->media_description(); - // 5.2 If transceiver isn't stopped and is associated with an m= section - // in description then perform the following checks: - - // 5.2.1 If transceiver.[[Direction]] is "sendrecv" or "sendonly", and the - // associated m= section in description either doesn't contain a single - // "a=msid" line, or the number of MSIDs from the "a=msid" lines in this - // m= section, or the MSID values themselves, differ from what is in - // transceiver.sender.[[AssociatedMediaStreamIds]], return true. - if (RtpTransceiverDirectionHasSend(transceiver->direction())) { - if (current_local_media_description->streams().size() == 0) - return true; - - std::vector msection_msids; - for (const auto& stream : current_local_media_description->streams()) { - for (const std::string& msid : stream.stream_ids()) - msection_msids.push_back(msid); - } - - std::vector transceiver_msids = - transceiver->sender()->stream_ids(); - if (msection_msids.size() != transceiver_msids.size()) - return true; - - absl::c_sort(transceiver_msids); - absl::c_sort(msection_msids); - if (transceiver_msids != msection_msids) - return true; - } - - // 5.2.2 If description is of type "offer", and the direction of the - // associated m= section in neither connection.[[CurrentLocalDescription]] - // nor connection.[[CurrentRemoteDescription]] matches - // transceiver.[[Direction]], return true. - if (description->GetType() == SdpType::kOffer) { - if (!current_remote_description()) - return true; - - if (!current_remote_msection) - return true; - - RtpTransceiverDirection current_local_direction = - current_local_media_description->direction(); - RtpTransceiverDirection current_remote_direction = - current_remote_msection->media_description()->direction(); - if (transceiver->direction() != current_local_direction && - transceiver->direction() != - RtpTransceiverDirectionReversed(current_remote_direction)) { - return true; - } - } - - // 5.2.3 If description is of type "answer", and the direction of the - // associated m= section in the description does not match - // transceiver.[[Direction]] intersected with the offered direction (as - // described in [JSEP] (section 5.3.1.)), return true. - if (description->GetType() == SdpType::kAnswer) { - if (!remote_description()) - return true; - - const ContentInfo* offered_remote_msection = - FindTransceiverMSection(transceiver.get(), remote_description()); - - RtpTransceiverDirection offered_direction = - offered_remote_msection - ? offered_remote_msection->media_description()->direction() - : RtpTransceiverDirection::kInactive; - - if (current_local_media_description->direction() != - (RtpTransceiverDirectionIntersection( - transceiver->direction(), - RtpTransceiverDirectionReversed(offered_direction)))) { - return true; - } - } - } - - // If all the preceding checks were performed and true was not returned, - // nothing remains to be negotiated; return false. - return false; + return sdp_handler_->ShouldFireNegotiationNeededEvent(event_id); } -RTCError PeerConnection::Rollback(SdpType sdp_type) { - auto state = signaling_state(); - if (state != PeerConnectionInterface::kHaveLocalOffer && - state != PeerConnectionInterface::kHaveRemoteOffer) { - return RTCError(RTCErrorType::INVALID_STATE, - "Called in wrong signalingState: " + - GetSignalingStateString(signaling_state())); - } - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(IsUnifiedPlan()); - std::vector mids; - std::vector> all_added_streams; - std::vector> all_removed_streams; - std::vector> removed_receivers; - - for (auto&& transceivers_stable_state_pair : - transceiver_stable_states_by_transceivers_) { - auto transceiver = transceivers_stable_state_pair.first; - auto state = transceivers_stable_state_pair.second; - - if (state.remote_stream_ids()) { - std::vector> added_streams; - std::vector> removed_streams; - SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), - state.remote_stream_ids().value(), - &added_streams, &removed_streams); - all_added_streams.insert(all_added_streams.end(), added_streams.begin(), - added_streams.end()); - all_removed_streams.insert(all_removed_streams.end(), - removed_streams.begin(), - removed_streams.end()); - if (!state.has_m_section() && !state.newly_created()) { - continue; - } - } - - RTC_DCHECK(transceiver->internal()->mid().has_value()); - std::string mid = transceiver->internal()->mid().value(); - mids.push_back(mid); - DestroyTransceiverChannel(transceiver); - - if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer && - transceiver->receiver()) { - removed_receivers.push_back(transceiver->receiver()); - } - if (state.newly_created()) { - if (transceiver->internal()->reused_for_addtrack()) { - transceiver->internal()->set_created_by_addtrack(true); - } else { - int remaining_transceiver_count = 0; - for (auto&& t : transceivers_) { - if (t != transceiver) { - transceivers_[remaining_transceiver_count++] = t; - } - } - transceivers_.resize(remaining_transceiver_count); - } - } - transceiver->internal()->sender_internal()->set_transport(nullptr); - transceiver->internal()->receiver_internal()->set_transport(nullptr); - transceiver->internal()->set_mid(state.mid()); - transceiver->internal()->set_mline_index(state.mline_index()); - } - transport_controller_->RollbackTransportForMids(mids); - transceiver_stable_states_by_transceivers_.clear(); - pending_local_description_.reset(); - pending_remote_description_.reset(); - ChangeSignalingState(PeerConnectionInterface::kStable); - - // Once all processing has finished, fire off callbacks. - for (const auto& receiver : removed_receivers) { - Observer()->OnRemoveTrack(receiver); - } - for (const auto& stream : all_added_streams) { - Observer()->OnAddStream(stream); - } - for (const auto& stream : all_removed_streams) { - Observer()->OnRemoveStream(stream); - } - - // The assumption is that in case of implicit rollback UpdateNegotiationNeeded - // gets called in SetRemoteDescription. - if (sdp_type == SdpType::kRollback) { - UpdateNegotiationNeeded(); - if (is_negotiation_needed_) { - Observer()->OnRenegotiationNeeded(); - } - } - return RTCError::OK(); +void PeerConnection::RequestUsagePatternReportForTesting() { + message_handler_.RequestUsagePatternReport( + [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ReportUsagePattern(); + }, + /* delay_ms= */ 0); +} + +std::function +PeerConnection::InitializeRtcpCallback() { + RTC_DCHECK_RUN_ON(signaling_thread()); + + auto flag = + worker_thread()->Invoke>( + RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(worker_thread()); + if (!call_) + return rtc::scoped_refptr(); + if (!call_safety_) + call_safety_.reset(new ScopedTaskSafety()); + return call_safety_->flag(); + }); + + if (!flag) + return [](const rtc::CopyOnWriteBuffer&, int64_t) {}; + + return [this, flag = std::move(flag)](const rtc::CopyOnWriteBuffer& packet, + int64_t packet_time_us) { + RTC_DCHECK_RUN_ON(network_thread()); + // TODO(bugs.webrtc.org/11993): We should actually be delivering this call + // directly to the Call class somehow directly on the network thread and not + // incur this hop here. The DeliverPacket() method will eventually just have + // to hop back over to the network thread. + worker_thread()->PostTask(ToQueuedTask(flag, [this, packet, + packet_time_us] { + RTC_DCHECK_RUN_ON(worker_thread()); + call_->Receiver()->DeliverPacket(MediaType::ANY, packet, packet_time_us); + })); + }; } } // namespace webrtc diff --git a/pc/peer_connection.h b/pc/peer_connection.h index 9065982f00..5cec7501fc 100644 --- a/pc/peer_connection.h +++ b/pc/peer_connection.h @@ -11,6 +11,8 @@ #ifndef PC_PEER_CONNECTION_H_ #define PC_PEER_CONNECTION_H_ +#include +#include #include #include #include @@ -18,33 +20,86 @@ #include #include +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/async_resolver_factory.h" +#include "api/audio_options.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/packet_socket_factory.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/data_channel_transport_interface.h" +#include "api/transport/enums.h" #include "api/turn_customizer.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "call/call.h" +#include "media/base/media_channel.h" +#include "media/base/media_engine.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" +#include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/channel_manager.h" +#include "pc/connection_context.h" #include "pc/data_channel_controller.h" -#include "pc/ice_server_parsing.h" +#include "pc/data_channel_utils.h" +#include "pc/dtls_transport.h" #include "pc/jsep_transport_controller.h" -#include "pc/peer_connection_factory.h" #include "pc/peer_connection_internal.h" +#include "pc/peer_connection_message_handler.h" #include "pc/rtc_stats_collector.h" +#include "pc/rtp_data_channel.h" +#include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_data_channel.h" #include "pc/sctp_transport.h" +#include "pc/sdp_offer_answer.h" +#include "pc/session_description.h" #include "pc/stats_collector.h" #include "pc/stream_collection.h" -#include "pc/webrtc_session_description_factory.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/operations_chain.h" -#include "rtc_base/race_checker.h" +#include "pc/transceiver_list.h" +#include "pc/transport_stats.h" +#include "pc/usage_pattern.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" -#include "rtc_base/weak_ptr.h" namespace webrtc { -class MediaStreamObserver; -class VideoRtpReceiver; -class RtcEventLog; - // PeerConnection is the implementation of the PeerConnection object as defined // by the PeerConnectionInterface API surface. // The class currently is solely responsible for the following: @@ -61,62 +116,19 @@ class RtcEventLog; // - Generating stats. class PeerConnection : public PeerConnectionInternal, public JsepTransportController::Observer, - public RtpSenderBase::SetStreamsObserver, - public rtc::MessageHandler, public sigslot::has_slots<> { public: - // A bit in the usage pattern is registered when its defining event occurs at - // least once. - enum class UsageEvent : int { - TURN_SERVER_ADDED = 0x01, - STUN_SERVER_ADDED = 0x02, - DATA_ADDED = 0x04, - AUDIO_ADDED = 0x08, - VIDEO_ADDED = 0x10, - // |SetLocalDescription| returns successfully. - SET_LOCAL_DESCRIPTION_SUCCEEDED = 0x20, - // |SetRemoteDescription| returns successfully. - SET_REMOTE_DESCRIPTION_SUCCEEDED = 0x40, - // A local candidate (with type host, server-reflexive, or relay) is - // collected. - CANDIDATE_COLLECTED = 0x80, - // A remote candidate is successfully added via |AddIceCandidate|. - ADD_ICE_CANDIDATE_SUCCEEDED = 0x100, - ICE_STATE_CONNECTED = 0x200, - CLOSE_CALLED = 0x400, - // A local candidate with private IP is collected. - PRIVATE_CANDIDATE_COLLECTED = 0x800, - // A remote candidate with private IP is added, either via AddiceCandidate - // or from the remote description. - REMOTE_PRIVATE_CANDIDATE_ADDED = 0x1000, - // A local mDNS candidate is collected. - MDNS_CANDIDATE_COLLECTED = 0x2000, - // A remote mDNS candidate is added, either via AddIceCandidate or from the - // remote description. - REMOTE_MDNS_CANDIDATE_ADDED = 0x4000, - // A local candidate with IPv6 address is collected. - IPV6_CANDIDATE_COLLECTED = 0x8000, - // A remote candidate with IPv6 address is added, either via AddIceCandidate - // or from the remote description. - REMOTE_IPV6_CANDIDATE_ADDED = 0x10000, - // A remote candidate (with type host, server-reflexive, or relay) is - // successfully added, either via AddIceCandidate or from the remote - // description. - REMOTE_CANDIDATE_ADDED = 0x20000, - // An explicit host-host candidate pair is selected, i.e. both the local and - // the remote candidates have the host type. This does not include candidate - // pairs formed with equivalent prflx remote candidates, e.g. a host-prflx - // pair where the prflx candidate has the same base as a host candidate of - // the remote peer. - DIRECT_CONNECTION_SELECTED = 0x40000, - MAX_VALUE = 0x80000, - }; - - explicit PeerConnection(PeerConnectionFactory* factory, - std::unique_ptr event_log, - std::unique_ptr call); - - bool Initialize( + // Creates a PeerConnection and initializes it with the given values. + // If the initialization fails, the function releases the PeerConnection + // and returns nullptr. + // + // Note that the function takes ownership of dependencies, and will + // either use them or release them, whether it succeeds or fails. + static rtc::scoped_refptr Create( + rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + std::unique_ptr event_log, + std::unique_ptr call, const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies); @@ -143,18 +155,6 @@ class PeerConnection : public PeerConnectionInternal, cricket::MediaType media_type, const RtpTransceiverInit& init) override; - // Gets the DTLS SSL certificate associated with the audio transport on the - // remote side. This will become populated once the DTLS connection with the - // peer has been completed, as indicated by the ICE connection state - // transitioning to kIceConnectionCompleted. - // Note that this will be removed once we implement RTCDtlsTransport which - // has standardized method for getting this information. - // See https://www.w3.org/TR/webrtc/#rtcdtlstransport-interface - std::unique_ptr GetRemoteAudioSSLCertificate(); - - // Version of the above method that returns the full certificate chain. - std::unique_ptr GetRemoteAudioSSLCertChain(); - rtc::scoped_refptr CreateSender( const std::string& kind, const std::string& stream_id) override; @@ -189,6 +189,7 @@ class PeerConnection : public PeerConnectionInternal, IceConnectionState standardized_ice_connection_state() override; PeerConnectionState peer_connection_state() override; IceGatheringState ice_gathering_state() override; + absl::optional can_trickle_ice_candidates() override; const SessionDescriptionInterface* local_description() const override; const SessionDescriptionInterface* remote_description() const override; @@ -206,15 +207,29 @@ class PeerConnection : public PeerConnectionInternal, const RTCOfferAnswerOptions& options) override; void CreateAnswer(CreateSessionDescriptionObserver* observer, const RTCOfferAnswerOptions& options) override; + + void SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) + override; + void SetLocalDescription( + rtc::scoped_refptr observer) + override; + // TODO(https://crbug.com/webrtc/11798): Delete these methods in favor of the + // ones taking SetLocalDescriptionObserverInterface as argument. void SetLocalDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) override; void SetLocalDescription(SetSessionDescriptionObserver* observer) override; - void SetRemoteDescription(SetSessionDescriptionObserver* observer, - SessionDescriptionInterface* desc) override; + void SetRemoteDescription( std::unique_ptr desc, rtc::scoped_refptr observer) override; + // TODO(https://crbug.com/webrtc/11798): Delete this methods in favor of the + // ones taking SetRemoteDescriptionObserverInterface as argument. + void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) override; + PeerConnectionInterface::RTCConfiguration GetConfiguration() override; RTCError SetConfiguration( const PeerConnectionInterface::RTCConfiguration& configuration) override; @@ -236,6 +251,8 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr GetSctpTransport() const override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + bool StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) override; bool StartRtcEventLog(std::unique_ptr output) override; @@ -243,14 +260,15 @@ class PeerConnection : public PeerConnectionInternal, void Close() override; + rtc::Thread* signaling_thread() const final { + return context_->signaling_thread(); + } + // PeerConnectionInternal implementation. rtc::Thread* network_thread() const final { - return factory_->network_thread(); - } - rtc::Thread* worker_thread() const final { return factory_->worker_thread(); } - rtc::Thread* signaling_thread() const final { - return factory_->signaling_thread(); + return context_->network_thread(); } + rtc::Thread* worker_thread() const final { return context_->worker_thread(); } std::string session_id() const override { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -266,27 +284,22 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr>> GetTransceiversInternal() const override { RTC_DCHECK_RUN_ON(signaling_thread()); - return transceivers_; + return rtp_manager()->transceivers()->List(); } - sigslot::signal1& SignalDataChannelCreated() override { - return data_channel_controller_.SignalDataChannelCreated(); + sigslot::signal1& SignalRtpDataChannelCreated() override { + return data_channel_controller_.SignalRtpDataChannelCreated(); } - cricket::RtpDataChannel* rtp_data_channel() const override { - return data_channel_controller_.rtp_data_channel(); + sigslot::signal1& SignalSctpDataChannelCreated() override { + return data_channel_controller_.SignalSctpDataChannelCreated(); } - std::vector> sctp_data_channels() - const override { - RTC_DCHECK_RUN_ON(signaling_thread()); - return *data_channel_controller_.sctp_data_channels(); + cricket::RtpDataChannel* rtp_data_channel() const override { + return data_channel_controller_.rtp_data_channel(); } - absl::optional sctp_content_name() const override { - RTC_DCHECK_RUN_ON(signaling_thread()); - return sctp_mid_; - } + std::vector GetDataChannelStats() const override; absl::optional sctp_transport_name() const override; @@ -305,227 +318,103 @@ class PeerConnection : public PeerConnectionInternal, bool NeedsIceRestart(const std::string& content_name) const override; bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) override; + int32_t StartRecorder(int32_t dir, std::string path) override; + int32_t StopRecorder(int32_t dir) override; + // Functions needed by DataChannelController void NoteDataAddedEvent() { NoteUsageEvent(UsageEvent::DATA_ADDED); } // Returns the observer. Will crash on CHECK if the observer is removed. PeerConnectionObserver* Observer() const; bool IsClosed() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return signaling_state_ == PeerConnectionInterface::kClosed; + return sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; } // Get current SSL role used by SCTP's underlying transport. bool GetSctpSslRole(rtc::SSLRole* role); // Handler for the "channel closed" signal - void OnSctpDataChannelClosed(DataChannel* channel); + void OnSctpDataChannelClosed(DataChannelInterface* channel); - // Functions made public for testing. - void ReturnHistogramVeryQuicklyForTesting() { + bool ShouldFireNegotiationNeededEvent(uint32_t event_id) override; + + // Functions needed by SdpOfferAnswerHandler + StatsCollector* stats() { RTC_DCHECK_RUN_ON(signaling_thread()); - return_histogram_very_quickly_ = true; + return stats_.get(); + } + DataChannelController* data_channel_controller() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &data_channel_controller_; + } + bool dtls_enabled() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return dtls_enabled_; + } + const PeerConnectionInterface::RTCConfiguration* configuration() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &configuration_; + } + absl::optional sctp_mid() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sctp_mid_s_; + } + PeerConnectionMessageHandler* message_handler() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return &message_handler_; } - void RequestUsagePatternReportForTesting(); - - protected: - ~PeerConnection() override; - - private: - class ImplicitCreateSessionDescriptionObserver; - friend class ImplicitCreateSessionDescriptionObserver; - class SetRemoteDescriptionObserverAdapter; - friend class SetRemoteDescriptionObserverAdapter; - - // Represents the [[LocalIceCredentialsToReplace]] internal slot in the spec. - // It makes the next CreateOffer() produce new ICE credentials even if - // RTCOfferAnswerOptions::ice_restart is false. - // https://w3c.github.io/webrtc-pc/#dfn-localufragstoreplace - // TODO(hbos): When JsepTransportController/JsepTransport supports rollback, - // move this type of logic to JsepTransportController/JsepTransport. - class LocalIceCredentialsToReplace; - - struct RtpSenderInfo { - RtpSenderInfo() : first_ssrc(0) {} - RtpSenderInfo(const std::string& stream_id, - const std::string sender_id, - uint32_t ssrc) - : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {} - bool operator==(const RtpSenderInfo& other) { - return this->stream_id == other.stream_id && - this->sender_id == other.sender_id && - this->first_ssrc == other.first_ssrc; - } - std::string stream_id; - std::string sender_id; - // An RtpSender can have many SSRCs. The first one is used as a sort of ID - // for communicating with the lower layers. - uint32_t first_ssrc; - }; - - // Field-trial based configuration for datagram transport. - struct DatagramTransportConfig { - explicit DatagramTransportConfig(const std::string& field_trial) - : enabled("enabled", true), default_value("default_value", false) { - ParseFieldTrial({&enabled, &default_value}, field_trial); - } - - // Whether datagram transport support is enabled at all. Defaults to true, - // allowing datagram transport to be used if (a) the application provides a - // factory for it and (b) the configuration specifies its use. This flag - // provides a kill-switch to force-disable datagram transport across all - // applications, without code changes. - FieldTrialFlag enabled; - - // Whether the datagram transport is enabled or disabled by default. - // Defaults to false, meaning that applications must configure use of - // datagram transport through RTCConfiguration. If set to true, - // applications will use the datagram transport by default (but may still - // explicitly configure themselves not to use it through RTCConfiguration). - FieldTrialFlag default_value; - }; - - // Field-trial based configuration for datagram transport data channels. - struct DatagramTransportDataChannelConfig { - explicit DatagramTransportDataChannelConfig(const std::string& field_trial) - : enabled("enabled", true), - default_value("default_value", false), - receive_only("receive_only", false) { - ParseFieldTrial({&enabled, &default_value, &receive_only}, field_trial); - } - - // Whether datagram transport data channel support is enabled at all. - // Defaults to true, allowing datagram transport to be used if (a) the - // application provides a factory for it and (b) the configuration specifies - // its use. This flag provides a kill-switch to force-disable datagram - // transport across all applications, without code changes. - FieldTrialFlag enabled; - - // Whether the datagram transport data channels are enabled or disabled by - // default. Defaults to false, meaning that applications must configure use - // of datagram transport through RTCConfiguration. If set to true, - // applications will use the datagram transport by default (but may still - // explicitly configure themselves not to use it through RTCConfiguration). - FieldTrialFlag default_value; - - // Whether the datagram transport is enabled in receive-only mode. If true, - // and if the datagram transport is enabled, it will only be used when - // receiving incoming calls, not when placing outgoing calls. - FieldTrialFlag receive_only; - }; - - // Captures partial state to be used for rollback. Applicable only in - // Unified Plan. - class TransceiverStableState { - public: - TransceiverStableState() {} - void set_newly_created(); - void SetMSectionIfUnset(absl::optional mid, - absl::optional mline_index); - void SetRemoteStreamIdsIfUnset(const std::vector& ids); - absl::optional mid() const { return mid_; } - absl::optional mline_index() const { return mline_index_; } - absl::optional> remote_stream_ids() const { - return remote_stream_ids_; - } - bool has_m_section() const { return has_m_section_; } - bool newly_created() const { return newly_created_; } - - private: - absl::optional mid_; - absl::optional mline_index_; - absl::optional> remote_stream_ids_; - // Indicates that mid value from stable state has been captured and - // that rollback has to restore the transceiver. Also protects against - // subsequent overwrites. - bool has_m_section_ = false; - // Indicates that the transceiver was created as part of applying a - // description to track potential need for removing transceiver during - // rollback. - bool newly_created_ = false; - }; - - // Implements MessageHandler. - void OnMessage(rtc::Message* msg) override; - - // Plan B helpers for getting the voice/video media channels for the single - // audio/video transceiver, if it exists. - cricket::VoiceMediaChannel* voice_media_channel() const - RTC_RUN_ON(signaling_thread()); - cricket::VideoMediaChannel* video_media_channel() const - RTC_RUN_ON(signaling_thread()); - - std::vector>> - GetSendersInternal() const RTC_RUN_ON(signaling_thread()); - std::vector< - rtc::scoped_refptr>> - GetReceiversInternal() const RTC_RUN_ON(signaling_thread()); - rtc::scoped_refptr> - GetAudioTransceiver() const RTC_RUN_ON(signaling_thread()); - rtc::scoped_refptr> - GetVideoTransceiver() const RTC_RUN_ON(signaling_thread()); + RtpTransmissionManager* rtp_manager() { return rtp_manager_.get(); } + const RtpTransmissionManager* rtp_manager() const { + return rtp_manager_.get(); + } + cricket::ChannelManager* channel_manager() const; - rtc::scoped_refptr> - GetFirstAudioTransceiver() const RTC_RUN_ON(signaling_thread()); - - // Implementation of the offer/answer exchange operations. These are chained - // onto the |operations_chain_| when the public CreateOffer(), CreateAnswer(), - // SetLocalDescription() and SetRemoteDescription() methods are invoked. - void DoCreateOffer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer); - void DoCreateAnswer( - const RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer); - void DoSetLocalDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer); - void DoSetRemoteDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer); + JsepTransportController* transport_controller() { + return transport_controller_.get(); + } + cricket::PortAllocator* port_allocator() { return port_allocator_.get(); } + Call* call_ptr() { return call_ptr_; } - void CreateAudioReceiver(MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) - RTC_RUN_ON(signaling_thread()); + ConnectionContext* context() { return context_.get(); } + const PeerConnectionFactoryInterface::Options* options() const { + return &options_; + } + cricket::DataChannelType data_channel_type() const; + void SetIceConnectionState(IceConnectionState new_state); + void NoteUsageEvent(UsageEvent event); - void CreateVideoReceiver(MediaStreamInterface* stream, - const RtpSenderInfo& remote_sender_info) - RTC_RUN_ON(signaling_thread()); - rtc::scoped_refptr RemoveAndStopReceiver( - const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread()); + // Report the UMA metric SdpFormatReceived for the given remote offer. + void ReportSdpFormatReceived(const SessionDescriptionInterface& remote_offer); - // May be called either by AddStream/RemoveStream, or when a track is - // added/removed from a stream previously added via AddStream. - void AddAudioTrack(AudioTrackInterface* track, MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void RemoveAudioTrack(AudioTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void AddVideoTrack(VideoTrackInterface* track, MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void RemoveVideoTrack(VideoTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); + // Returns true if the PeerConnection is configured to use Unified Plan + // semantics for creating offers/answers and setting local/remote + // descriptions. If this is true the RtpTransceiver API will also be available + // to the user. If this is false, Plan B semantics are assumed. + // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once + // sufficient time has passed. + bool IsUnifiedPlan() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return is_unified_plan_; + } + bool ValidateBundleSettings(const cricket::SessionDescription* desc); - // AddTrack implementation when Unified Plan is specified. - RTCErrorOr> AddTrackUnifiedPlan( - rtc::scoped_refptr track, - const std::vector& stream_ids) - RTC_RUN_ON(signaling_thread()); - // AddTrack implementation when Plan B is specified. - RTCErrorOr> AddTrackPlanB( - rtc::scoped_refptr track, - const std::vector& stream_ids) - RTC_RUN_ON(signaling_thread()); + // Returns the MID for the data section associated with either the + // RtpDataChannel or SCTP data channel, if it has been set. If no data + // channels are configured this will return nullopt. + absl::optional GetDataMid() const; - // Returns the first RtpTransceiver suitable for a newly added track, if such - // transceiver is available. - rtc::scoped_refptr> - FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track) - RTC_RUN_ON(signaling_thread()); + void SetSctpDataMid(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_mid_s_ = mid; + } + void ResetSctpDataMid() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_mid_s_.reset(); + } - rtc::scoped_refptr> - FindTransceiverBySender(rtc::scoped_refptr sender) - RTC_RUN_ON(signaling_thread()); + // Returns the CryptoOptions for this PeerConnection. This will always + // return the RTCConfiguration.crypto_options if set and will only default + // back to the PeerConnectionFactory settings if nothing was set. + CryptoOptions GetCryptoOptions(); // Internal implementation for AddTransceiver family of methods. If // |fire_callback| is set, fires OnRenegotiationNeeded callback if successful. @@ -533,28 +422,54 @@ class PeerConnection : public PeerConnectionInternal, cricket::MediaType media_type, rtc::scoped_refptr track, const RtpTransceiverInit& init, - bool fire_callback = true) RTC_RUN_ON(signaling_thread()); + bool fire_callback = true); - rtc::scoped_refptr> - CreateSender(cricket::MediaType media_type, - const std::string& id, - rtc::scoped_refptr track, - const std::vector& stream_ids, - const std::vector& send_encodings); + // Returns rtp transport, result can not be nullptr. + RtpTransportInternal* GetRtpTransport(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + auto rtp_transport = transport_controller_->GetRtpTransport(mid); + RTC_DCHECK(rtp_transport); + return rtp_transport; + } - rtc::scoped_refptr> - CreateReceiver(cricket::MediaType media_type, const std::string& receiver_id); + // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by + // this session. + bool SrtpRequired() const RTC_RUN_ON(signaling_thread()); - // Create a new RtpTransceiver of the given type and add it to the list of - // transceivers. - rtc::scoped_refptr> - CreateAndAddTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver) RTC_RUN_ON(signaling_thread()); + void OnSentPacket_w(const rtc::SentPacket& sent_packet); + + bool SetupDataChannelTransport_n(const std::string& mid) + RTC_RUN_ON(network_thread()); + void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); + cricket::ChannelInterface* GetChannel(const std::string& content_name); + + // Functions made public for testing. + void ReturnHistogramVeryQuicklyForTesting() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return_histogram_very_quickly_ = true; + } + void RequestUsagePatternReportForTesting(); + + protected: + // Available for rtc::scoped_refptr creation + PeerConnection(rtc::scoped_refptr context, + const PeerConnectionFactoryInterface::Options& options, + bool is_unified_plan, + std::unique_ptr event_log, + std::unique_ptr call, + PeerConnectionDependencies& dependencies); + + ~PeerConnection() override; + + private: + bool Initialize( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies); - void SetIceConnectionState(IceConnectionState new_state) + rtc::scoped_refptr> + FindTransceiverBySender(rtc::scoped_refptr sender) RTC_RUN_ON(signaling_thread()); + void SetStandardizedIceConnectionState( PeerConnectionInterface::IceConnectionState new_state) RTC_RUN_ON(signaling_thread()); @@ -583,312 +498,13 @@ class PeerConnection : public PeerConnectionInternal, const cricket::CandidatePairChangeEvent& event) RTC_RUN_ON(signaling_thread()); - // Update the state, signaling if necessary. - void ChangeSignalingState(SignalingState signaling_state) - RTC_RUN_ON(signaling_thread()); - - // Signals from MediaStreamObserver. - void OnAudioTrackAdded(AudioTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void OnAudioTrackRemoved(AudioTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void OnVideoTrackAdded(VideoTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - void OnVideoTrackRemoved(VideoTrackInterface* track, - MediaStreamInterface* stream) - RTC_RUN_ON(signaling_thread()); - - void PostSetSessionDescriptionSuccess( - SetSessionDescriptionObserver* observer); - void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer, - RTCError&& error); - void PostCreateSessionDescriptionFailure( - CreateSessionDescriptionObserver* observer, - RTCError error); - - // Synchronous implementations of SetLocalDescription/SetRemoteDescription - // that return an RTCError instead of invoking a callback. - RTCError ApplyLocalDescription( - std::unique_ptr desc); - RTCError ApplyRemoteDescription( - std::unique_ptr desc); - - // Updates the local RtpTransceivers according to the JSEP rules. Called as - // part of setting the local/remote description. - RTCError UpdateTransceiversAndDataChannels( - cricket::ContentSource source, - const SessionDescriptionInterface& new_session, - const SessionDescriptionInterface* old_local_description, - const SessionDescriptionInterface* old_remote_description) - RTC_RUN_ON(signaling_thread()); - - // Either creates or destroys the transceiver's BaseChannel according to the - // given media section. - RTCError UpdateTransceiverChannel( - rtc::scoped_refptr> - transceiver, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); - - // Either creates or destroys the local data channel according to the given - // media section. - RTCError UpdateDataChannel(cricket::ContentSource source, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) - RTC_RUN_ON(signaling_thread()); - - // Associate the given transceiver according to the JSEP rules. - RTCErrorOr< - rtc::scoped_refptr>> - AssociateTransceiver(cricket::ContentSource source, - SdpType type, - size_t mline_index, - const cricket::ContentInfo& content, - const cricket::ContentInfo* old_local_content, - const cricket::ContentInfo* old_remote_content) - RTC_RUN_ON(signaling_thread()); - - // Returns the RtpTransceiver, if found, that is associated to the given MID. - rtc::scoped_refptr> - GetAssociatedTransceiver(const std::string& mid) const - RTC_RUN_ON(signaling_thread()); - - // Returns the RtpTransceiver, if found, that was assigned to the given mline - // index in CreateOffer. - rtc::scoped_refptr> - GetTransceiverByMLineIndex(size_t mline_index) const - RTC_RUN_ON(signaling_thread()); - - // Returns an RtpTransciever, if available, that can be used to receive the - // given media type according to JSEP rules. - rtc::scoped_refptr> - FindAvailableTransceiverToReceive(cricket::MediaType media_type) const - RTC_RUN_ON(signaling_thread()); - - // Returns the media section in the given session description that is - // associated with the RtpTransceiver. Returns null if none found or this - // RtpTransceiver is not associated. Logic varies depending on the - // SdpSemantics specified in the configuration. - const cricket::ContentInfo* FindMediaSectionForTransceiver( - rtc::scoped_refptr> - transceiver, - const SessionDescriptionInterface* sdesc) const - RTC_RUN_ON(signaling_thread()); - - // Runs the algorithm **set the associated remote streams** specified in - // https://w3c.github.io/webrtc-pc/#set-associated-remote-streams. - void SetAssociatedRemoteStreams( - rtc::scoped_refptr receiver, - const std::vector& stream_ids, - std::vector>* added_streams, - std::vector>* removed_streams) - RTC_RUN_ON(signaling_thread()); - - // Runs the algorithm **process the removal of a remote track** specified in - // the WebRTC specification. - // This method will update the following lists: - // |remove_list| is the list of transceivers for which the receiving track is - // being removed. - // |removed_streams| is the list of streams which no longer have a receiving - // track so should be removed. - // https://w3c.github.io/webrtc-pc/#process-remote-track-removal - void ProcessRemovalOfRemoteTrack( - rtc::scoped_refptr> - transceiver, - std::vector>* remove_list, - std::vector>* removed_streams) - RTC_RUN_ON(signaling_thread()); - - void RemoveRemoteStreamsIfEmpty( - const std::vector>& - remote_streams, - std::vector>* removed_streams) - RTC_RUN_ON(signaling_thread()); void OnNegotiationNeeded(); - // Returns a MediaSessionOptions struct with options decided by |options|, - // the local MediaStreams and DataChannels. - void GetOptionsForOffer(const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForPlanBOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForUnifiedPlanOffer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - - RTCError HandleLegacyOfferOptions(const RTCOfferAnswerOptions& options) - RTC_RUN_ON(signaling_thread()); - void RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MediaType media_type) RTC_RUN_ON(signaling_thread()); - void AddUpToOneReceivingTransceiverOfType(cricket::MediaType media_type); - std::vector< - rtc::scoped_refptr>> - GetReceivingTransceiversOfType(cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Returns a MediaSessionOptions struct with options decided by - // |constraints|, the local MediaStreams and DataChannels. - void GetOptionsForAnswer(const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForPlanBAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - void GetOptionsForUnifiedPlanAnswer( - const PeerConnectionInterface::RTCOfferAnswerOptions& - offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - - // Generates MediaDescriptionOptions for the |session_opts| based on existing - // local description or remote description. - void GenerateMediaDescriptionOptions( - const SessionDescriptionInterface* session_desc, - RtpTransceiverDirection audio_direction, - RtpTransceiverDirection video_direction, - absl::optional* audio_index, - absl::optional* video_index, - absl::optional* data_index, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); - - // Generates the active MediaDescriptionOptions for the local data channel - // given the specified MID. - cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData( - const std::string& mid) const RTC_RUN_ON(signaling_thread()); - - // Generates the rejected MediaDescriptionOptions for the local data channel - // given the specified MID. - cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( - const std::string& mid) const RTC_RUN_ON(signaling_thread()); - - // Returns the MID for the data section associated with either the - // RtpDataChannel or SCTP data channel, if it has been set. If no data - // channels are configured this will return nullopt. - absl::optional GetDataMid() const RTC_RUN_ON(signaling_thread()); - - // Remove all local and remote senders of type |media_type|. - // Called when a media type is rejected (m-line set to port 0). - void RemoveSenders(cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Makes sure a MediaStreamTrack is created for each StreamParam in |streams|, - // and existing MediaStreamTracks are removed if there is no corresponding - // StreamParam. If |default_track_needed| is true, a default MediaStreamTrack - // is created if it doesn't exist; if false, it's removed if it exists. - // |media_type| is the type of the |streams| and can be either audio or video. - // If a new MediaStream is created it is added to |new_streams|. - void UpdateRemoteSendersList( - const std::vector& streams, - bool default_track_needed, - cricket::MediaType media_type, - StreamCollection* new_streams) RTC_RUN_ON(signaling_thread()); - - // Triggered when a remote sender has been seen for the first time in a remote - // session description. It creates a remote MediaStreamTrackInterface - // implementation and triggers CreateAudioReceiver or CreateVideoReceiver. - void OnRemoteSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Triggered when a remote sender has been removed from a remote session - // description. It removes the remote sender with id |sender_id| from a remote - // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver. - void OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Finds remote MediaStreams without any tracks and removes them from - // |remote_streams_| and notifies the observer that the MediaStreams no longer - // exist. - void UpdateEndedRemoteMediaStreams() RTC_RUN_ON(signaling_thread()); - - // Loops through the vector of |streams| and finds added and removed - // StreamParams since last time this method was called. - // For each new or removed StreamParam, OnLocalSenderSeen or - // OnLocalSenderRemoved is invoked. - void UpdateLocalSenders(const std::vector& streams, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Triggered when a local sender has been seen for the first time in a local - // session description. - // This method triggers CreateAudioSender or CreateVideoSender if the rtp - // streams in the local SessionDescription can be mapped to a MediaStreamTrack - // in a MediaStream in |local_streams_| - void OnLocalSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Triggered when a local sender has been removed from a local session - // description. - // This method triggers DestroyAudioSender or DestroyVideoSender if a stream - // has been removed from the local SessionDescription and the stream can be - // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|. - void OnLocalSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type) - RTC_RUN_ON(signaling_thread()); - - // Returns true if the PeerConnection is configured to use Unified Plan - // semantics for creating offers/answers and setting local/remote - // descriptions. If this is true the RtpTransceiver API will also be available - // to the user. If this is false, Plan B semantics are assumed. - // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once - // sufficient time has passed. - bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread()) { - return configuration_.sdp_semantics == SdpSemantics::kUnifiedPlan; - } - - // The offer/answer machinery assumes the media section MID is present and - // unique. To support legacy end points that do not supply a=mid lines, this - // method will modify the session description to add MIDs generated according - // to the SDP semantics. - void FillInMissingRemoteMids(cricket::SessionDescription* remote_description) - RTC_RUN_ON(signaling_thread()); - - // Is there an RtpSender of the given type? - bool HasRtpSender(cricket::MediaType type) const - RTC_RUN_ON(signaling_thread()); - - // Return the RtpSender with the given track attached. - rtc::scoped_refptr> - FindSenderForTrack(MediaStreamTrackInterface* track) const - RTC_RUN_ON(signaling_thread()); - - // Return the RtpSender with the given id, or null if none exists. - rtc::scoped_refptr> - FindSenderById(const std::string& sender_id) const - RTC_RUN_ON(signaling_thread()); - - // Return the RtpReceiver with the given id, or null if none exists. - rtc::scoped_refptr> - FindReceiverById(const std::string& receiver_id) const - RTC_RUN_ON(signaling_thread()); - - std::vector* GetRemoteSenderInfos( - cricket::MediaType media_type); - std::vector* GetLocalSenderInfos( - cricket::MediaType media_type); - const RtpSenderInfo* FindSenderInfo(const std::vector& infos, - const std::string& stream_id, - const std::string sender_id) const; // Returns the specified SCTP DataChannel in sctp_data_channels_, // or nullptr if not found. - DataChannel* FindDataChannelBySid(int sid) const + SctpDataChannel* FindDataChannelBySid(int sid) const RTC_RUN_ON(signaling_thread()); // Called when first configuring the port allocator. @@ -920,66 +536,6 @@ class PeerConnection : public PeerConnectionInternal, // This function should only be called from the worker thread. void StopRtcEventLog_w(); - // Ensures the configuration doesn't have any parameters with invalid values, - // or values that conflict with other parameters. - // - // Returns RTCError::OK() if there are no issues. - RTCError ValidateConfiguration(const RTCConfiguration& config) const; - - cricket::ChannelManager* channel_manager() const; - - enum class SessionError { - kNone, // No error. - kContent, // Error in BaseChannel SetLocalContent/SetRemoteContent. - kTransport, // Error from the underlying transport. - }; - - // Returns the last error in the session. See the enum above for details. - SessionError session_error() const RTC_RUN_ON(signaling_thread()) { - return session_error_; - } - const std::string& session_error_desc() const { return session_error_desc_; } - - cricket::ChannelInterface* GetChannel(const std::string& content_name); - - cricket::IceConfig ParseIceConfig( - const PeerConnectionInterface::RTCConfiguration& config) const; - - cricket::DataChannelType data_channel_type() const; - - // Called when an RTCCertificate is generated or retrieved by - // WebRTCSessionDescriptionFactory. Should happen before setLocalDescription. - void OnCertificateReady( - const rtc::scoped_refptr& certificate); - void OnDtlsSrtpSetupFailure(cricket::BaseChannel*, bool rtcp); - - // Non-const versions of local_description()/remote_description(), for use - // internally. - SessionDescriptionInterface* mutable_local_description() - RTC_RUN_ON(signaling_thread()) { - return pending_local_description_ ? pending_local_description_.get() - : current_local_description_.get(); - } - SessionDescriptionInterface* mutable_remote_description() - RTC_RUN_ON(signaling_thread()) { - return pending_remote_description_ ? pending_remote_description_.get() - : current_remote_description_.get(); - } - - // Updates the error state, signaling if necessary. - void SetSessionError(SessionError error, const std::string& error_desc); - - RTCError UpdateSessionState(SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description); - // Push the media parts of the local or remote session description - // down to all of the channels. - RTCError PushdownMediaDescription(SdpType type, cricket::ContentSource source) - RTC_RUN_ON(signaling_thread()); - - RTCError PushdownTransportDescription(cricket::ContentSource source, - SdpType type); - // Returns true and the TransportInfo of the given |content_name| // from |description|. Returns false if it's not available. static bool GetTransportDescription( @@ -987,89 +543,12 @@ class PeerConnection : public PeerConnectionInternal, const std::string& content_name, cricket::TransportDescription* info); - // Enables media channels to allow sending of media. - // This enables media to flow on all configured audio/video channels and the - // RtpDataChannel. - void EnableSending() RTC_RUN_ON(signaling_thread()); - - // Destroys all BaseChannels and destroys the SCTP data channel, if present. - void DestroyAllChannels() RTC_RUN_ON(signaling_thread()); - // Returns the media index for a local ice candidate given the content name. // Returns false if the local session description does not have a media // content called |content_name|. bool GetLocalCandidateMediaIndex(const std::string& content_name, int* sdp_mline_index) RTC_RUN_ON(signaling_thread()); - // Uses all remote candidates in |remote_desc| in this session. - bool UseCandidatesInSessionDescription( - const SessionDescriptionInterface* remote_desc) - RTC_RUN_ON(signaling_thread()); - // Uses |candidate| in this session. - bool UseCandidate(const IceCandidateInterface* candidate) - RTC_RUN_ON(signaling_thread()); - RTCErrorOr FindContentInfo( - const SessionDescriptionInterface* description, - const IceCandidateInterface* candidate) RTC_RUN_ON(signaling_thread()); - // Deletes the corresponding channel of contents that don't exist in |desc|. - // |desc| can be null. This means that all channels are deleted. - void RemoveUnusedChannels(const cricket::SessionDescription* desc) - RTC_RUN_ON(signaling_thread()); - - // Allocates media channels based on the |desc|. If |desc| doesn't have - // the BUNDLE option, this method will disable BUNDLE in PortAllocator. - // This method will also delete any existing media channels before creating. - RTCError CreateChannels(const cricket::SessionDescription& desc) - RTC_RUN_ON(signaling_thread()); - - // If the BUNDLE policy is max-bundle, then we know for sure that all - // transports will be bundled from the start. This method returns the BUNDLE - // group if that's the case, or null if BUNDLE will be negotiated later. An - // error is returned if max-bundle is specified but the session description - // does not have a BUNDLE group. - RTCErrorOr GetEarlyBundleGroup( - const cricket::SessionDescription& desc) const - RTC_RUN_ON(signaling_thread()); - - // Helper methods to create media channels. - cricket::VoiceChannel* CreateVoiceChannel(const std::string& mid) - RTC_RUN_ON(signaling_thread()); - cricket::VideoChannel* CreateVideoChannel(const std::string& mid) - RTC_RUN_ON(signaling_thread()); - bool CreateDataChannel(const std::string& mid) RTC_RUN_ON(signaling_thread()); - - bool SetupDataChannelTransport_n(const std::string& mid) - RTC_RUN_ON(network_thread()); - void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); - - bool ValidateBundleSettings(const cricket::SessionDescription* desc); - bool HasRtcpMuxEnabled(const cricket::ContentInfo* content); - // Below methods are helper methods which verifies SDP. - RTCError ValidateSessionDescription(const SessionDescriptionInterface* sdesc, - cricket::ContentSource source) - RTC_RUN_ON(signaling_thread()); - - // Check if a call to SetLocalDescription is acceptable with a session - // description of the given type. - bool ExpectSetLocalDescription(SdpType type); - // Check if a call to SetRemoteDescription is acceptable with a session - // description of the given type. - bool ExpectSetRemoteDescription(SdpType type); - // Verifies a=setup attribute as per RFC 5763. - bool ValidateDtlsSetupAttribute(const cricket::SessionDescription* desc, - SdpType type); - - // Returns true if we are ready to push down the remote candidate. - // |remote_desc| is the new remote description, or NULL if the current remote - // description should be used. Output |valid| is true if the candidate media - // index is valid. - bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate, - const SessionDescriptionInterface* remote_desc, - bool* valid) RTC_RUN_ON(signaling_thread()); - - // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by - // this session. - bool SrtpRequired() const RTC_RUN_ON(signaling_thread()); // JsepTransportController signal handlers. void OnTransportControllerConnectionState(cricket::IceConnectionState state) @@ -1091,16 +570,6 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(signaling_thread()); void OnTransportControllerDtlsHandshakeError(rtc::SSLHandshakeError error); - const char* SessionErrorToString(SessionError error) const; - std::string GetSessionErrorMsg() RTC_RUN_ON(signaling_thread()); - - // Report the UMA metric SdpFormatReceived for the given remote offer. - void ReportSdpFormatReceived(const SessionDescriptionInterface& remote_offer); - - // Report inferred negotiated SDP semantics from a local/remote answer to the - // UMA observer. - void ReportNegotiatedSdpSemantics(const SessionDescriptionInterface& answer); - // Invoked when TransportController connection completion is signaled. // Reports stats for all transports in use. void ReportTransportStats() RTC_RUN_ON(signaling_thread()); @@ -1113,35 +582,9 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(signaling_thread()); void ReportIceCandidateCollected(const cricket::Candidate& candidate) RTC_RUN_ON(signaling_thread()); - void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate) - RTC_RUN_ON(signaling_thread()); - void NoteUsageEvent(UsageEvent event); void ReportUsagePattern() const RTC_RUN_ON(signaling_thread()); - void OnSentPacket_w(const rtc::SentPacket& sent_packet); - - const std::string GetTransportName(const std::string& content_name) - RTC_RUN_ON(signaling_thread()); - - // Functions for dealing with transports. - // Note that cricket code uses the term "channel" for what other code - // refers to as "transport". - - // Destroys and clears the BaseChannel associated with the given transceiver, - // if such channel is set. - void DestroyTransceiverChannel( - rtc::scoped_refptr> - transceiver); - - // Destroys the RTP data channel transport and/or the SCTP data channel - // transport and clears it. - void DestroyDataChannelTransport() RTC_RUN_ON(signaling_thread()); - - // Destroys the given ChannelInterface. - // The channel cannot be accessed after this method is called. - void DestroyChannelInterface(cricket::ChannelInterface* channel); - // JsepTransportController::Observer override. // // Called by |transport_controller_| when processing transport information @@ -1154,38 +597,17 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) override; - // RtpSenderBase::SetStreamsObserver override. - void OnSetStreams() override; - - // Returns the CryptoOptions for this PeerConnection. This will always - // return the RTCConfiguration.crypto_options if set and will only default - // back to the PeerConnectionFactory settings if nothing was set. - CryptoOptions GetCryptoOptions() RTC_RUN_ON(signaling_thread()); - - // Returns rtp transport, result can not be nullptr. - RtpTransportInternal* GetRtpTransport(const std::string& mid) - RTC_RUN_ON(signaling_thread()) { - auto rtp_transport = transport_controller_->GetRtpTransport(mid); - RTC_DCHECK(rtp_transport); - return rtp_transport; - } - - void UpdateNegotiationNeeded(); - bool CheckIfNegotiationIsNeeded(); - - // | sdp_type | is the type of the SDP that caused the rollback. - RTCError Rollback(SdpType sdp_type); + std::function + InitializeRtcpCallback(); - // Storing the factory as a scoped reference pointer ensures that the memory - // in the PeerConnectionFactoryImpl remains available as long as the - // PeerConnection is running. It is passed to PeerConnection as a raw pointer. - // However, since the reference counting is done in the - // PeerConnectionFactoryInterface all instances created using the raw pointer - // will refer to the same reference count. - const rtc::scoped_refptr factory_; + const rtc::scoped_refptr context_; + const PeerConnectionFactoryInterface::Options options_; PeerConnectionObserver* observer_ RTC_GUARDED_BY(signaling_thread()) = nullptr; + const bool is_unified_plan_; + // The EventLog needs to outlive |call_| (and any other object that uses it). std::unique_ptr event_log_ RTC_GUARDED_BY(worker_thread()); @@ -1193,15 +615,6 @@ class PeerConnection : public PeerConnectionInternal, // pointer (but not touch the object) from any thread. RtcEventLog* const event_log_ptr_ RTC_PT_GUARDED_BY(worker_thread()); - // The operations chain is used by the offer/answer exchange methods to ensure - // they are executed in the right order. For example, if - // SetRemoteDescription() is invoked while CreateOffer() is still pending, the - // SRD operation will not start until CreateOffer() has completed. See - // https://w3c.github.io/webrtc-pc/#dfn-operations-chain. - rtc::scoped_refptr operations_chain_ - RTC_GUARDED_BY(signaling_thread()); - - SignalingState signaling_state_ RTC_GUARDED_BY(signaling_thread()) = kStable; IceConnectionState ice_connection_state_ RTC_GUARDED_BY(signaling_thread()) = kIceConnectionNew; PeerConnectionInterface::IceConnectionState standardized_ice_connection_state_ @@ -1214,179 +627,73 @@ class PeerConnection : public PeerConnectionInternal, PeerConnectionInterface::RTCConfiguration configuration_ RTC_GUARDED_BY(signaling_thread()); - // Field-trial based configuration for datagram transport. - const DatagramTransportConfig datagram_transport_config_; - - // Field-trial based configuration for datagram transport data channels. - const DatagramTransportDataChannelConfig - datagram_transport_data_channel_config_; - - // Final, resolved value for whether datagram transport is in use. - bool use_datagram_transport_ RTC_GUARDED_BY(signaling_thread()) = false; - - // Equivalent of |use_datagram_transport_|, but for its use with data - // channels. - bool use_datagram_transport_for_data_channels_ - RTC_GUARDED_BY(signaling_thread()) = false; - - // Resolved value of whether to use data channels only for incoming calls. - bool use_datagram_transport_for_data_channels_receive_only_ - RTC_GUARDED_BY(signaling_thread()) = false; - // TODO(zstein): |async_resolver_factory_| can currently be nullptr if it // is not injected. It should be required once chromium supplies it. - std::unique_ptr async_resolver_factory_ + const std::unique_ptr async_resolver_factory_ RTC_GUARDED_BY(signaling_thread()); std::unique_ptr port_allocator_; // TODO(bugs.webrtc.org/9987): Accessed on both // signaling and network thread. - std::unique_ptr + const std::unique_ptr ice_transport_factory_; // TODO(bugs.webrtc.org/9987): Accessed on the // signaling thread but the underlying raw // pointer is given to // |jsep_transport_controller_| and used on the // network thread. - std::unique_ptr - tls_cert_verifier_; // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and network thread. - - // One PeerConnection has only one RTCP CNAME. - // https://tools.ietf.org/html/draft-ietf-rtcweb-rtp-usage-26#section-4.9 - const std::string rtcp_cname_; - - // Streams added via AddStream. - const rtc::scoped_refptr local_streams_ - RTC_GUARDED_BY(signaling_thread()); - // Streams created as a result of SetRemoteDescription. - const rtc::scoped_refptr remote_streams_ - RTC_GUARDED_BY(signaling_thread()); - - std::vector> stream_observers_ - RTC_GUARDED_BY(signaling_thread()); - - // These lists store sender info seen in local/remote descriptions. - std::vector remote_audio_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - std::vector remote_video_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - std::vector local_audio_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - std::vector local_video_sender_infos_ - RTC_GUARDED_BY(signaling_thread()); - - bool remote_peer_supports_msid_ RTC_GUARDED_BY(signaling_thread()) = false; + const std::unique_ptr tls_cert_verifier_ + RTC_GUARDED_BY(network_thread()); // The unique_ptr belongs to the worker thread, but the Call object manages // its own thread safety. std::unique_ptr call_ RTC_GUARDED_BY(worker_thread()); - - rtc::AsyncInvoker rtcp_invoker_ RTC_GUARDED_BY(network_thread()); + std::unique_ptr call_safety_ + RTC_GUARDED_BY(worker_thread()); // Points to the same thing as `call_`. Since it's const, we may read the // pointer from any thread. + // TODO(bugs.webrtc.org/11992): Remove this workaround (and potential dangling + // pointer). Call* const call_ptr_; std::unique_ptr stats_ RTC_GUARDED_BY(signaling_thread()); // A pointer is passed to senders_ rtc::scoped_refptr stats_collector_ RTC_GUARDED_BY(signaling_thread()); - // Holds changes made to transceivers during applying descriptors for - // potential rollback. Gets cleared once signaling state goes to stable. - std::map>, - TransceiverStableState> - transceiver_stable_states_by_transceivers_; - // Holds remote stream ids for transceivers from stable state. - std::map>, - std::vector> - remote_stream_ids_by_transceivers_; - std::vector< - rtc::scoped_refptr>> - transceivers_; // TODO(bugs.webrtc.org/9987): Accessed on both signaling - // and network thread. - - // In Unified Plan, if we encounter remote SDP that does not contain an a=msid - // line we create and use a stream with a random ID for our receivers. This is - // to support legacy endpoints that do not support the a=msid attribute (as - // opposed to streamless tracks with "a=msid:-"). - rtc::scoped_refptr missing_msid_default_stream_ - RTC_GUARDED_BY(signaling_thread()); - // MIDs will be generated using this generator which will keep track of - // all the MIDs that have been seen over the life of the PeerConnection. - rtc::UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread()); - - SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) = - SessionError::kNone; - std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread()); std::string session_id_ RTC_GUARDED_BY(signaling_thread()); std::unique_ptr transport_controller_; // TODO(bugs.webrtc.org/9987): Accessed on both // signaling and network thread. - std::unique_ptr - sctp_factory_; // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and network thread. // |sctp_mid_| is the content name (MID) in SDP. // Note: this is used as the data channel MID by both SCTP and data channel // transports. It is set when either transport is initialized and unset when // both transports are deleted. - absl::optional - sctp_mid_; // TODO(bugs.webrtc.org/9987): Accessed on both signaling - // and network thread. - - // Whether this peer is the caller. Set when the local description is applied. - absl::optional is_caller_ RTC_GUARDED_BY(signaling_thread()); - - - - std::unique_ptr current_local_description_ - RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr pending_local_description_ - RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr current_remote_description_ + // There is one copy on the signaling thread and another copy on the + // networking thread. Changes are always initiated from the signaling + // thread, but applied first on the networking thread via an invoke(). + absl::optional sctp_mid_s_ RTC_GUARDED_BY(signaling_thread()); + absl::optional sctp_mid_n_ RTC_GUARDED_BY(network_thread()); + + // The machinery for handling offers and answers. Const after initialization. + std::unique_ptr sdp_handler_ RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr pending_remote_description_ - RTC_GUARDED_BY(signaling_thread()); - bool dtls_enabled_ RTC_GUARDED_BY(signaling_thread()) = false; - // List of content names for which the remote side triggered an ICE restart. - std::set pending_ice_restarts_ - RTC_GUARDED_BY(signaling_thread()); - - std::unique_ptr webrtc_session_desc_factory_ - RTC_GUARDED_BY(signaling_thread()); - - // Member variables for caching global options. - cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); - cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); + bool dtls_enabled_ RTC_GUARDED_BY(signaling_thread()) = false; - int usage_event_accumulator_ RTC_GUARDED_BY(signaling_thread()) = 0; + UsagePattern usage_pattern_ RTC_GUARDED_BY(signaling_thread()); bool return_histogram_very_quickly_ RTC_GUARDED_BY(signaling_thread()) = false; - // This object should be used to generate any SSRC that is not explicitly - // specified by the user (or by the remote party). - // The generator is not used directly, instead it is passed on to the - // channel manager and the session description factory. - rtc::UniqueRandomIdGenerator ssrc_generator_ - RTC_GUARDED_BY(signaling_thread()); - - // A video bitrate allocator factory. - // This can injected using the PeerConnectionDependencies, - // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. - // Note that one can still choose to override this in a MediaEngine - // if one wants too. - std::unique_ptr - video_bitrate_allocator_factory_; + DataChannelController data_channel_controller_; - std::unique_ptr - local_ice_credentials_to_replace_ RTC_GUARDED_BY(signaling_thread()); - bool is_negotiation_needed_ RTC_GUARDED_BY(signaling_thread()) = false; + // Machinery for handling messages posted to oneself + PeerConnectionMessageHandler message_handler_; - DataChannelController data_channel_controller_; - rtc::WeakPtrFactory weak_ptr_factory_ - RTC_GUARDED_BY(signaling_thread()); + // Administration of senders, receivers and transceivers + // Accessed on both signaling and network thread. Const after Initialize(). + std::unique_ptr rtp_manager_; }; } // namespace webrtc diff --git a/pc/peer_connection_adaptation_integrationtest.cc b/pc/peer_connection_adaptation_integrationtest.cc new file mode 100644 index 0000000000..71d054eb90 --- /dev/null +++ b/pc/peer_connection_adaptation_integrationtest.cc @@ -0,0 +1,161 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "call/adaptation/test/fake_resource.h" +#include "pc/test/fake_periodic_video_source.h" +#include "pc/test/fake_periodic_video_track_source.h" +#include "pc/test/peer_connection_test_wrapper.h" +#include "rtc_base/checks.h" +#include "rtc_base/gunit.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gtest.h" + +namespace webrtc { + +const int64_t kDefaultTimeoutMs = 5000; + +struct TrackWithPeriodicSource { + rtc::scoped_refptr track; + rtc::scoped_refptr periodic_track_source; +}; + +// Performs an O/A exchange and waits until the signaling state is stable again. +void Negotiate(rtc::scoped_refptr caller, + rtc::scoped_refptr callee) { + // Wire up callbacks and listeners such that a full O/A is performed in + // response to CreateOffer(). + PeerConnectionTestWrapper::Connect(caller.get(), callee.get()); + caller->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions()); + caller->WaitForNegotiation(); +} + +TrackWithPeriodicSource CreateTrackWithPeriodicSource( + rtc::scoped_refptr factory) { + FakePeriodicVideoSource::Config periodic_track_source_config; + periodic_track_source_config.frame_interval_ms = 100; + periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis(); + rtc::scoped_refptr periodic_track_source = + new rtc::RefCountedObject( + periodic_track_source_config, /* remote */ false); + TrackWithPeriodicSource track_with_source; + track_with_source.track = + factory->CreateVideoTrack("PeriodicTrack", periodic_track_source); + track_with_source.periodic_track_source = periodic_track_source; + return track_with_source; +} + +// Triggers overuse and obtains VideoSinkWants. Adaptation processing happens in +// parallel and this function makes no guarantee that the returnd VideoSinkWants +// have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT +// to "spam overuse until a change is observed". +rtc::VideoSinkWants TriggerOveruseAndGetSinkWants( + rtc::scoped_refptr fake_resource, + const FakePeriodicVideoSource& source) { + fake_resource->SetUsageState(ResourceUsageState::kOveruse); + return source.wants(); +} + +class PeerConnectionAdaptationIntegrationTest : public ::testing::Test { + public: + PeerConnectionAdaptationIntegrationTest() + : virtual_socket_server_(), + network_thread_(new rtc::Thread(&virtual_socket_server_)), + worker_thread_(rtc::Thread::Create()) { + RTC_CHECK(network_thread_->Start()); + RTC_CHECK(worker_thread_->Start()); + } + + rtc::scoped_refptr CreatePcWrapper( + const char* name) { + rtc::scoped_refptr pc_wrapper = + new rtc::RefCountedObject( + name, network_thread_.get(), worker_thread_.get()); + PeerConnectionInterface::RTCConfiguration config; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + EXPECT_TRUE(pc_wrapper->CreatePc(config, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory())); + return pc_wrapper; + } + + protected: + rtc::VirtualSocketServer virtual_socket_server_; + std::unique_ptr network_thread_; + std::unique_ptr worker_thread_; +}; + +TEST_F(PeerConnectionAdaptationIntegrationTest, + ResouceInjectedAfterNegotiationCausesReductionInResolution) { + auto caller_wrapper = CreatePcWrapper("caller"); + auto caller = caller_wrapper->pc(); + auto callee_wrapper = CreatePcWrapper("callee"); + + // Adding a track and negotiating ensures that a VideoSendStream exists. + TrackWithPeriodicSource track_with_source = + CreateTrackWithPeriodicSource(caller_wrapper->pc_factory()); + auto sender = caller->AddTrack(track_with_source.track, {}).value(); + Negotiate(caller_wrapper, callee_wrapper); + // Prefer degrading resolution. + auto parameters = sender->GetParameters(); + parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; + sender->SetParameters(parameters); + + const auto& source = + track_with_source.periodic_track_source->fake_periodic_source(); + int pixel_count_before_overuse = source.wants().max_pixel_count; + + // Inject a fake resource and spam kOveruse until resolution becomes limited. + auto fake_resource = FakeResource::Create("FakeResource"); + caller->AddAdaptationResource(fake_resource); + EXPECT_TRUE_WAIT( + TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < + pixel_count_before_overuse, + kDefaultTimeoutMs); +} + +TEST_F(PeerConnectionAdaptationIntegrationTest, + ResouceInjectedBeforeNegotiationCausesReductionInResolution) { + auto caller_wrapper = CreatePcWrapper("caller"); + auto caller = caller_wrapper->pc(); + auto callee_wrapper = CreatePcWrapper("callee"); + + // Inject a fake resource before adding any tracks or negotiating. + auto fake_resource = FakeResource::Create("FakeResource"); + caller->AddAdaptationResource(fake_resource); + + // Adding a track and negotiating ensures that a VideoSendStream exists. + TrackWithPeriodicSource track_with_source = + CreateTrackWithPeriodicSource(caller_wrapper->pc_factory()); + auto sender = caller->AddTrack(track_with_source.track, {}).value(); + Negotiate(caller_wrapper, callee_wrapper); + // Prefer degrading resolution. + auto parameters = sender->GetParameters(); + parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; + sender->SetParameters(parameters); + + const auto& source = + track_with_source.periodic_track_source->fake_periodic_source(); + int pixel_count_before_overuse = source.wants().max_pixel_count; + + // Spam kOveruse until resolution becomes limited. + EXPECT_TRUE_WAIT( + TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < + pixel_count_before_overuse, + kDefaultTimeoutMs); +} + +} // namespace webrtc diff --git a/pc/peer_connection_bundle_unittest.cc b/pc/peer_connection_bundle_unittest.cc index 543c9be81a..c544db396f 100644 --- a/pc/peer_connection_bundle_unittest.cc +++ b/pc/peer_connection_bundle_unittest.cc @@ -873,7 +873,7 @@ TEST_F(PeerConnectionBundleTestUnifiedPlan, // Stop all transceivers, causing all m= sections to be rejected. for (const auto& transceiver : callee->pc()->GetTransceivers()) { - transceiver->Stop(); + transceiver->StopInternal(); } EXPECT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); diff --git a/pc/peer_connection_crypto_unittest.cc b/pc/peer_connection_crypto_unittest.cc index 99eb5cd7ac..32e8cbd74c 100644 --- a/pc/peer_connection_crypto_unittest.cc +++ b/pc/peer_connection_crypto_unittest.cc @@ -149,9 +149,12 @@ SdpContentPredicate HaveSdesGcmCryptos(size_t num_crypto_suites) { if (cryptos.size() != num_crypto_suites) { return false; } - const cricket::CryptoParams first_params = cryptos[0]; - return first_params.key_params.size() == 67U && - first_params.cipher_suite == "AEAD_AES_256_GCM"; + for (size_t i = 0; i < cryptos.size(); ++i) { + if (cryptos[i].key_params.size() == 67U && + cryptos[i].cipher_suite == "AEAD_AES_256_GCM") + return true; + } + return false; }; } @@ -333,7 +336,14 @@ TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWithSdesAndGcm) { auto caller = CreatePeerConnectionWithAudioVideo(config); auto callee = CreatePeerConnectionWithAudioVideo(config); - callee->SetRemoteDescription(caller->CreateOffer()); + auto offer = caller->CreateOffer(); + for (cricket::ContentInfo& content : offer->description()->contents()) { + auto cryptos = content.media_description()->cryptos(); + cryptos.erase(cryptos.begin()); // Assumes that non-GCM is the default. + content.media_description()->set_cryptos(cryptos); + } + + callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswer(); ASSERT_TRUE(answer); diff --git a/pc/peer_connection_data_channel_unittest.cc b/pc/peer_connection_data_channel_unittest.cc index b063c39307..6c51f01594 100644 --- a/pc/peer_connection_data_channel_unittest.cc +++ b/pc/peer_connection_data_channel_unittest.cc @@ -45,8 +45,8 @@ #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif -#include "pc/test/fake_sctp_transport.h" #include "rtc_base/virtual_socket_server.h" +#include "test/pc/sctp/fake_sctp_transport.h" namespace webrtc { @@ -58,46 +58,20 @@ using ::testing::Values; namespace { -PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies( - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_engine, - std::unique_ptr call_factory) { +PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() { PeerConnectionFactoryDependencies deps; - deps.network_thread = network_thread; - deps.worker_thread = worker_thread; - deps.signaling_thread = signaling_thread; + deps.network_thread = rtc::Thread::Current(); + deps.worker_thread = rtc::Thread::Current(); + deps.signaling_thread = rtc::Thread::Current(); deps.task_queue_factory = CreateDefaultTaskQueueFactory(); - deps.media_engine = std::move(media_engine); - deps.call_factory = std::move(call_factory); + deps.media_engine = std::make_unique(); + deps.call_factory = CreateCallFactory(); + deps.sctp_factory = std::make_unique(); return deps; } } // namespace -class PeerConnectionFactoryForDataChannelTest - : public rtc::RefCountedObject { - public: - PeerConnectionFactoryForDataChannelTest() - : rtc::RefCountedObject( - CreatePeerConnectionFactoryDependencies( - rtc::Thread::Current(), - rtc::Thread::Current(), - rtc::Thread::Current(), - std::make_unique(), - CreateCallFactory())) {} - - std::unique_ptr - CreateSctpTransportInternalFactory() { - auto factory = std::make_unique(); - last_fake_sctp_transport_factory_ = factory.get(); - return factory; - } - - FakeSctpTransportFactory* last_fake_sctp_transport_factory_ = nullptr; -}; - class PeerConnectionWrapperForDataChannelTest : public PeerConnectionWrapper { public: using PeerConnectionWrapper::PeerConnectionWrapper; @@ -111,8 +85,8 @@ class PeerConnectionWrapperForDataChannelTest : public PeerConnectionWrapper { sctp_transport_factory_ = sctp_transport_factory; } - absl::optional sctp_content_name() { - return GetInternalPeerConnection()->sctp_content_name(); + absl::optional sctp_mid() { + return GetInternalPeerConnection()->sctp_mid(); } absl::optional sctp_transport_name() { @@ -155,10 +129,12 @@ class PeerConnectionDataChannelBaseTest : public ::testing::Test { WrapperPtr CreatePeerConnection( const RTCConfiguration& config, const PeerConnectionFactoryInterface::Options factory_options) { - rtc::scoped_refptr pc_factory( - new PeerConnectionFactoryForDataChannelTest()); + auto factory_deps = CreatePeerConnectionFactoryDependencies(); + FakeSctpTransportFactory* fake_sctp_transport_factory = + static_cast(factory_deps.sctp_factory.get()); + rtc::scoped_refptr pc_factory = + CreateModularPeerConnectionFactory(std::move(factory_deps)); pc_factory->SetOptions(factory_options); - RTC_CHECK(pc_factory->Initialize()); auto observer = std::make_unique(); RTCConfiguration modified_config = config; modified_config.sdp_semantics = sdp_semantics_; @@ -171,9 +147,7 @@ class PeerConnectionDataChannelBaseTest : public ::testing::Test { observer->SetPeerConnectionInterface(pc.get()); auto wrapper = std::make_unique( pc_factory, pc, std::move(observer)); - RTC_DCHECK(pc_factory->last_fake_sctp_transport_factory_); - wrapper->set_sctp_transport_factory( - pc_factory->last_fake_sctp_transport_factory_); + wrapper->set_sctp_transport_factory(fake_sctp_transport_factory); return wrapper; } @@ -212,6 +186,13 @@ class PeerConnectionDataChannelTest : PeerConnectionDataChannelBaseTest(GetParam()) {} }; +class PeerConnectionDataChannelUnifiedPlanTest + : public PeerConnectionDataChannelBaseTest { + protected: + PeerConnectionDataChannelUnifiedPlanTest() + : PeerConnectionDataChannelBaseTest(SdpSemantics::kUnifiedPlan) {} +}; + TEST_P(PeerConnectionDataChannelTest, NoSctpTransportCreatedIfRtpDataChannelEnabled) { RTCConfiguration config; @@ -248,14 +229,14 @@ TEST_P(PeerConnectionDataChannelTest, InternalSctpTransportDeletedOnTeardown) { nullptr); } -// Test that sctp_content_name/sctp_transport_name (used for stats) are correct +// Test that sctp_mid/sctp_transport_name (used for stats) are correct // before and after BUNDLE is negotiated. TEST_P(PeerConnectionDataChannelTest, SctpContentAndTransportNameSetCorrectly) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); // Initially these fields should be empty. - EXPECT_FALSE(caller->sctp_content_name()); + EXPECT_FALSE(caller->sctp_mid()); EXPECT_FALSE(caller->sctp_transport_name()); // Create offer with audio/video/data. @@ -278,8 +259,8 @@ TEST_P(PeerConnectionDataChannelTest, SctpContentAndTransportNameSetCorrectly) { caller->SetLocalDescription(CloneSessionDescription(offer.get()))); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); - ASSERT_TRUE(caller->sctp_content_name()); - EXPECT_EQ(data_mid, *caller->sctp_content_name()); + ASSERT_TRUE(caller->sctp_mid()); + EXPECT_EQ(data_mid, *caller->sctp_mid()); ASSERT_TRUE(caller->sctp_transport_name()); EXPECT_EQ(data_mid, *caller->sctp_transport_name()); @@ -290,8 +271,8 @@ TEST_P(PeerConnectionDataChannelTest, SctpContentAndTransportNameSetCorrectly) { ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - ASSERT_TRUE(caller->sctp_content_name()); - EXPECT_EQ(data_mid, *caller->sctp_content_name()); + ASSERT_TRUE(caller->sctp_mid()); + EXPECT_EQ(data_mid, *caller->sctp_mid()); ASSERT_TRUE(caller->sctp_transport_name()); EXPECT_EQ(audio_mid, *caller->sctp_transport_name()); } @@ -411,4 +392,28 @@ INSTANTIATE_TEST_SUITE_P(PeerConnectionDataChannelTest, Values(SdpSemantics::kPlanB, SdpSemantics::kUnifiedPlan)); +TEST_F(PeerConnectionDataChannelUnifiedPlanTest, + ReOfferAfterPeerRejectsDataChannel) { + auto caller = CreatePeerConnectionWithDataChannel(); + PeerConnectionFactoryInterface::Options options; + options.disable_sctp_data_channels = true; + auto callee = CreatePeerConnection(RTCConfiguration(), options); + + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + + auto offer = caller->CreateOffer(); + ASSERT_TRUE(offer); + const auto& contents = offer->description()->contents(); + ASSERT_EQ(1u, contents.size()); + EXPECT_TRUE(contents[0].rejected); + + ASSERT_TRUE( + caller->SetLocalDescription(CloneSessionDescription(offer.get()))); + ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); + + auto answer = callee->CreateAnswerAndSetAsLocal(); + ASSERT_TRUE(answer); + EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer))); +} + } // namespace webrtc diff --git a/pc/peer_connection_factory.cc b/pc/peer_connection_factory.cc index c8bb22e43e..3ab907fb09 100644 --- a/pc/peer_connection_factory.cc +++ b/pc/peer_connection_factory.cc @@ -10,24 +10,27 @@ #include "pc/peer_connection_factory.h" +#include #include +#include #include -#include +#include "absl/strings/match.h" +#include "api/async_resolver_factory.h" +#include "api/call/call_factory_interface.h" #include "api/fec_controller.h" +#include "api/ice_transport_interface.h" #include "api/media_stream_proxy.h" #include "api/media_stream_track_proxy.h" #include "api/network_state_predictor.h" +#include "api/packet_socket_factory.h" #include "api/peer_connection_factory_proxy.h" #include "api/peer_connection_proxy.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/field_trial_based_config.h" -#include "api/transport/media/media_transport_interface.h" -#include "api/turn_customizer.h" +#include "api/transport/bitrate_settings.h" #include "api/units/data_rate.h" -#include "api/video_track_source_proxy.h" -#include "media/base/rtp_data_engine.h" -#include "media/sctp/sctp_transport.h" +#include "call/audio_state.h" +#include "media/base/media_engine.h" #include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/default_ice_transport_factory.h" @@ -37,12 +40,17 @@ #include "pc/media_stream.h" #include "pc/peer_connection.h" #include "pc/rtp_parameters_conversion.h" +#include "pc/session_description.h" #include "pc/video_track.h" #include "rtc_base/bind.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/system/file_wrapper.h" namespace webrtc { @@ -50,180 +58,148 @@ namespace webrtc { rtc::scoped_refptr CreateModularPeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) { - rtc::scoped_refptr pc_factory( - new rtc::RefCountedObject( - std::move(dependencies))); - // Call Initialize synchronously but make sure it is executed on - // |signaling_thread|. - MethodCall call( - pc_factory.get(), &PeerConnectionFactory::Initialize); - bool result = call.Marshal(RTC_FROM_HERE, pc_factory->signaling_thread()); - - if (!result) { + // The PeerConnectionFactory must be created on the signaling thread. + if (dependencies.signaling_thread && + !dependencies.signaling_thread->IsCurrent()) { + return dependencies.signaling_thread + ->Invoke>( + RTC_FROM_HERE, [&dependencies] { + return CreateModularPeerConnectionFactory( + std::move(dependencies)); + }); + } + + auto pc_factory = PeerConnectionFactory::Create(std::move(dependencies)); + if (!pc_factory) { return nullptr; } + // Verify that the invocation and the initialization ended up agreeing on the + // thread. + RTC_DCHECK_RUN_ON(pc_factory->signaling_thread()); return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(), pc_factory); } +// Static +rtc::scoped_refptr PeerConnectionFactory::Create( + PeerConnectionFactoryDependencies dependencies) { + auto context = ConnectionContext::Create(&dependencies); + if (!context) { + return nullptr; + } + return new rtc::RefCountedObject(context, + &dependencies); +} + PeerConnectionFactory::PeerConnectionFactory( - PeerConnectionFactoryDependencies dependencies) - : wraps_current_thread_(false), - network_thread_(dependencies.network_thread), - worker_thread_(dependencies.worker_thread), - signaling_thread_(dependencies.signaling_thread), - task_queue_factory_(std::move(dependencies.task_queue_factory)), - media_engine_(std::move(dependencies.media_engine)), - call_factory_(std::move(dependencies.call_factory)), - event_log_factory_(std::move(dependencies.event_log_factory)), - fec_controller_factory_(std::move(dependencies.fec_controller_factory)), + rtc::scoped_refptr context, + PeerConnectionFactoryDependencies* dependencies) + : context_(context), + task_queue_factory_(std::move(dependencies->task_queue_factory)), + event_log_factory_(std::move(dependencies->event_log_factory)), + fec_controller_factory_(std::move(dependencies->fec_controller_factory)), network_state_predictor_factory_( - std::move(dependencies.network_state_predictor_factory)), + std::move(dependencies->network_state_predictor_factory)), injected_network_controller_factory_( - std::move(dependencies.network_controller_factory)), - media_transport_factory_(std::move(dependencies.media_transport_factory)), - neteq_factory_(std::move(dependencies.neteq_factory)), - trials_(dependencies.trials ? std::move(dependencies.trials) - : std::make_unique()) { - if (!network_thread_) { - owned_network_thread_ = rtc::Thread::CreateWithSocketServer(); - owned_network_thread_->SetName("pc_network_thread", nullptr); - owned_network_thread_->Start(); - network_thread_ = owned_network_thread_.get(); - } - - if (!worker_thread_) { - owned_worker_thread_ = rtc::Thread::Create(); - owned_worker_thread_->SetName("pc_worker_thread", nullptr); - owned_worker_thread_->Start(); - worker_thread_ = owned_worker_thread_.get(); - } + std::move(dependencies->network_controller_factory)), + neteq_factory_(std::move(dependencies->neteq_factory)) {} - if (!signaling_thread_) { - signaling_thread_ = rtc::Thread::Current(); - if (!signaling_thread_) { - // If this thread isn't already wrapped by an rtc::Thread, create a - // wrapper and own it in this class. - signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread(); - wraps_current_thread_ = true; - } - } -} +PeerConnectionFactory::PeerConnectionFactory( + PeerConnectionFactoryDependencies dependencies) + : PeerConnectionFactory(ConnectionContext::Create(&dependencies), + &dependencies) {} PeerConnectionFactory::~PeerConnectionFactory() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - channel_manager_.reset(nullptr); - - // Make sure |worker_thread_| and |signaling_thread_| outlive - // |default_socket_factory_| and |default_network_manager_|. - default_socket_factory_ = nullptr; - default_network_manager_ = nullptr; - - if (wraps_current_thread_) - rtc::ThreadManager::Instance()->UnwrapCurrentThread(); -} - -bool PeerConnectionFactory::Initialize() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - rtc::InitRandom(rtc::Time32()); - - default_network_manager_.reset(new rtc::BasicNetworkManager()); - if (!default_network_manager_) { - return false; - } - - default_socket_factory_.reset( - new rtc::BasicPacketSocketFactory(network_thread_)); - if (!default_socket_factory_) { - return false; - } - - channel_manager_ = std::make_unique( - std::move(media_engine_), std::make_unique(), - worker_thread_, network_thread_); - - channel_manager_->SetVideoRtxEnabled(true); - if (!channel_manager_->Init()) { - return false; - } - - return true; + RTC_DCHECK_RUN_ON(signaling_thread()); } void PeerConnectionFactory::SetOptions(const Options& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); options_ = options; } RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities( cricket::MediaType kind) const { - RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK_RUN_ON(signaling_thread()); switch (kind) { case cricket::MEDIA_TYPE_AUDIO: { cricket::AudioCodecs cricket_codecs; - cricket::RtpHeaderExtensions cricket_extensions; - channel_manager_->GetSupportedAudioSendCodecs(&cricket_codecs); - channel_manager_->GetSupportedAudioRtpHeaderExtensions( - &cricket_extensions); - return ToRtpCapabilities(cricket_codecs, cricket_extensions); + channel_manager()->GetSupportedAudioSendCodecs(&cricket_codecs); + return ToRtpCapabilities( + cricket_codecs, + channel_manager()->GetDefaultEnabledAudioRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_VIDEO: { cricket::VideoCodecs cricket_codecs; - cricket::RtpHeaderExtensions cricket_extensions; - channel_manager_->GetSupportedVideoSendCodecs(&cricket_codecs); - channel_manager_->GetSupportedVideoRtpHeaderExtensions( - &cricket_extensions); - return ToRtpCapabilities(cricket_codecs, cricket_extensions); + channel_manager()->GetSupportedVideoSendCodecs(&cricket_codecs); + return ToRtpCapabilities( + cricket_codecs, + channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); + } + case cricket::MEDIA_TYPE_SCREEN: { + cricket::VideoCodecs cricket_codecs; + channel_manager()->GetSupportedVideoSendCodecs(&cricket_codecs); + return ToRtpCapabilities( + cricket_codecs, + channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_DATA: return RtpCapabilities(); + case cricket::MEDIA_TYPE_UNSUPPORTED: + return RtpCapabilities(); } - // Not reached; avoids compile warning. - FATAL(); + RTC_CHECK_NOTREACHED(); } RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities( cricket::MediaType kind) const { - RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_DCHECK_RUN_ON(signaling_thread()); switch (kind) { case cricket::MEDIA_TYPE_AUDIO: { cricket::AudioCodecs cricket_codecs; - cricket::RtpHeaderExtensions cricket_extensions; - channel_manager_->GetSupportedAudioReceiveCodecs(&cricket_codecs); - channel_manager_->GetSupportedAudioRtpHeaderExtensions( - &cricket_extensions); - return ToRtpCapabilities(cricket_codecs, cricket_extensions); + channel_manager()->GetSupportedAudioReceiveCodecs(&cricket_codecs); + return ToRtpCapabilities( + cricket_codecs, + channel_manager()->GetDefaultEnabledAudioRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_VIDEO: { cricket::VideoCodecs cricket_codecs; - cricket::RtpHeaderExtensions cricket_extensions; - channel_manager_->GetSupportedVideoReceiveCodecs(&cricket_codecs); - channel_manager_->GetSupportedVideoRtpHeaderExtensions( - &cricket_extensions); - return ToRtpCapabilities(cricket_codecs, cricket_extensions); + channel_manager()->GetSupportedVideoReceiveCodecs(&cricket_codecs); + return ToRtpCapabilities( + cricket_codecs, + channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); + } + case cricket::MEDIA_TYPE_SCREEN: { + cricket::VideoCodecs cricket_codecs; + channel_manager()->GetSupportedVideoReceiveCodecs(&cricket_codecs); + return ToRtpCapabilities( + cricket_codecs, + channel_manager()->GetDefaultEnabledVideoRtpHeaderExtensions()); } case cricket::MEDIA_TYPE_DATA: return RtpCapabilities(); + case cricket::MEDIA_TYPE_UNSUPPORTED: + return RtpCapabilities(); } - // Not reached; avoids compile warning. - FATAL(); + RTC_CHECK_NOTREACHED(); } rtc::scoped_refptr PeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK(signaling_thread()->IsCurrent()); rtc::scoped_refptr source( LocalAudioSource::Create(&options)); return source; } bool PeerConnectionFactory::StartAecDump(FILE* file, int64_t max_size_bytes) { - RTC_DCHECK(signaling_thread_->IsCurrent()); - return channel_manager_->StartAecDump(FileWrapper(file), max_size_bytes); + RTC_DCHECK(signaling_thread()->IsCurrent()); + return channel_manager()->StartAecDump(FileWrapper(file), max_size_bytes); } void PeerConnectionFactory::StopAecDump() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - channel_manager_->StopAecDump(); + RTC_DCHECK(signaling_thread()->IsCurrent()); + channel_manager()->StopAecDump(); } rtc::scoped_refptr @@ -244,7 +220,7 @@ rtc::scoped_refptr PeerConnectionFactory::CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!(dependencies.allocator && dependencies.packet_socket_factory)) << "You can't set both allocator and packet_socket_factory; " "the former is going away (see bugs.webrtc.org/7447"; @@ -252,23 +228,19 @@ PeerConnectionFactory::CreatePeerConnection( // Set internal defaults if optional dependencies are not set. if (!dependencies.cert_generator) { dependencies.cert_generator = - std::make_unique(signaling_thread_, - network_thread_); + std::make_unique(signaling_thread(), + network_thread()); } if (!dependencies.allocator) { rtc::PacketSocketFactory* packet_socket_factory; if (dependencies.packet_socket_factory) packet_socket_factory = dependencies.packet_socket_factory.get(); else - packet_socket_factory = default_socket_factory_.get(); - - network_thread_->Invoke(RTC_FROM_HERE, [this, &configuration, - &dependencies, - &packet_socket_factory]() { - dependencies.allocator = std::make_unique( - default_network_manager_.get(), packet_socket_factory, - configuration.turn_customizer); - }); + packet_socket_factory = context_->default_socket_factory(); + + dependencies.allocator = std::make_unique( + context_->default_network_manager(), packet_socket_factory, + configuration.turn_customizer); } if (!dependencies.async_resolver_factory) { @@ -281,25 +253,21 @@ PeerConnectionFactory::CreatePeerConnection( std::make_unique(); } - network_thread_->Invoke( - RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::SetNetworkIgnoreMask, - dependencies.allocator.get(), options_.network_ignore_mask)); + dependencies.allocator->SetNetworkIgnoreMask(options().network_ignore_mask); std::unique_ptr event_log = - worker_thread_->Invoke>( + worker_thread()->Invoke>( RTC_FROM_HERE, rtc::Bind(&PeerConnectionFactory::CreateRtcEventLog_w, this)); - std::unique_ptr call = worker_thread_->Invoke>( + std::unique_ptr call = worker_thread()->Invoke>( RTC_FROM_HERE, rtc::Bind(&PeerConnectionFactory::CreateCall_w, this, event_log.get())); - rtc::scoped_refptr pc( - new rtc::RefCountedObject(this, std::move(event_log), - std::move(call))); - ActionsBeforeInitializeForTesting(pc); - if (!pc->Initialize(configuration, std::move(dependencies))) { + rtc::scoped_refptr pc = PeerConnection::Create( + context_, options_, std::move(event_log), std::move(call), configuration, + std::move(dependencies)); + if (!pc) { return nullptr; } return PeerConnectionProxy::Create(signaling_thread(), pc); @@ -307,43 +275,34 @@ PeerConnectionFactory::CreatePeerConnection( rtc::scoped_refptr PeerConnectionFactory::CreateLocalMediaStream(const std::string& stream_id) { - RTC_DCHECK(signaling_thread_->IsCurrent()); - return MediaStreamProxy::Create(signaling_thread_, + RTC_DCHECK(signaling_thread()->IsCurrent()); + return MediaStreamProxy::Create(signaling_thread(), MediaStream::Create(stream_id)); } rtc::scoped_refptr PeerConnectionFactory::CreateVideoTrack( const std::string& id, VideoTrackSourceInterface* source) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK(signaling_thread()->IsCurrent()); rtc::scoped_refptr track( - VideoTrack::Create(id, source, worker_thread_)); - return VideoTrackProxy::Create(signaling_thread_, worker_thread_, track); + VideoTrack::Create(id, source, worker_thread())); + return VideoTrackProxy::Create(signaling_thread(), worker_thread(), track); } rtc::scoped_refptr PeerConnectionFactory::CreateAudioTrack( const std::string& id, AudioSourceInterface* source) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK(signaling_thread()->IsCurrent()); rtc::scoped_refptr track(AudioTrack::Create(id, source)); - return AudioTrackProxy::Create(signaling_thread_, track); -} - -std::unique_ptr -PeerConnectionFactory::CreateSctpTransportInternalFactory() { -#ifdef HAVE_SCTP - return std::make_unique(network_thread()); -#else - return nullptr; -#endif + return AudioTrackProxy::Create(signaling_thread(), track); } cricket::ChannelManager* PeerConnectionFactory::channel_manager() { - return channel_manager_.get(); + return context_->channel_manager(); } std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(worker_thread()); auto encoding_type = RtcEventLog::EncodingType::Legacy; if (IsTrialEnabled("WebRTC-RtcEventLogNewFormat")) @@ -355,20 +314,23 @@ std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { std::unique_ptr PeerConnectionFactory::CreateCall_w( RtcEventLog* event_log) { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(worker_thread()); webrtc::Call::Config call_config(event_log); - if (!channel_manager_->media_engine() || !call_factory_) { + if (!channel_manager()->media_engine() || !context_->call_factory()) { return nullptr; } call_config.audio_state = - channel_manager_->media_engine()->voice().GetAudioState(); - - FieldTrialParameter min_bandwidth("min", DataRate::kbps(30)); - FieldTrialParameter start_bandwidth("start", DataRate::kbps(300)); - FieldTrialParameter max_bandwidth("max", DataRate::kbps(2000)); + channel_manager()->media_engine()->voice().GetAudioState(); + + FieldTrialParameter min_bandwidth("min", + DataRate::KilobitsPerSec(30)); + FieldTrialParameter start_bandwidth("start", + DataRate::KilobitsPerSec(300)); + FieldTrialParameter max_bandwidth("max", + DataRate::KilobitsPerSec(2000)); ParseFieldTrial({&min_bandwidth, &start_bandwidth, &max_bandwidth}, - trials_->Lookup("WebRTC-PcFactoryDefaultBitrates")); + trials().Lookup("WebRTC-PcFactoryDefaultBitrates")); call_config.bitrate_config.min_bitrate_bps = rtc::saturated_cast(min_bandwidth->bps()); @@ -391,14 +353,14 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( RTC_LOG(LS_INFO) << "Using default network controller factory"; } - call_config.trials = trials_.get(); + call_config.trials = &trials(); - return std::unique_ptr(call_factory_->CreateCall(call_config)); + return std::unique_ptr( + context_->call_factory()->CreateCall(call_config)); } bool PeerConnectionFactory::IsTrialEnabled(absl::string_view key) const { - RTC_DCHECK(trials_); - return trials_->Lookup(key).find("Enabled") == 0; + return absl::StartsWith(trials().Lookup(key), "Enabled"); } } // namespace webrtc diff --git a/pc/peer_connection_factory.h b/pc/peer_connection_factory.h index 962b08c7c9..427207f9cc 100644 --- a/pc/peer_connection_factory.h +++ b/pc/peer_connection_factory.h @@ -12,15 +12,32 @@ #ifndef PC_PEER_CONNECTION_FACTORY_H_ #define PC_PEER_CONNECTION_FACTORY_H_ +#include +#include #include #include +#include "absl/strings/string_view.h" +#include "api/audio_options.h" +#include "api/fec_controller.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/neteq/neteq_factory.h" +#include "api/network_state_predictor.h" #include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtp_parameters.h" #include "api/scoped_refptr.h" -#include "api/transport/media/media_transport_interface.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/transport/network_control.h" +#include "api/transport/sctp_transport_factory_interface.h" +#include "api/transport/webrtc_key_value_config.h" +#include "call/call.h" #include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/port_allocator.h" #include "pc/channel_manager.h" +#include "pc/connection_context.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" @@ -35,6 +52,14 @@ class RtcEventLog; class PeerConnectionFactory : public PeerConnectionFactoryInterface { public: + // Creates a PeerConnectionFactory. It returns nullptr on initialization + // error. + // + // The Dependencies structure allows simple management of all new + // dependencies being added to the PeerConnectionFactory. + static rtc::scoped_refptr Create( + PeerConnectionFactoryDependencies dependencies); + void SetOptions(const Options& options) override; rtc::scoped_refptr CreatePeerConnection( @@ -47,8 +72,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) override; - bool Initialize(); - RtpCapabilities GetRtpSenderCapabilities( cricket::MediaType kind) const override; @@ -72,65 +95,60 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { bool StartAecDump(FILE* file, int64_t max_size_bytes) override; void StopAecDump() override; - virtual std::unique_ptr - CreateSctpTransportInternalFactory(); + SctpTransportFactoryInterface* sctp_transport_factory() { + return context_->sctp_transport_factory(); + } virtual cricket::ChannelManager* channel_manager(); - rtc::Thread* signaling_thread() { + rtc::Thread* signaling_thread() const { // This method can be called on a different thread when the factory is // created in CreatePeerConnectionFactory(). - return signaling_thread_; + return context_->signaling_thread(); } - rtc::Thread* worker_thread() { return worker_thread_; } - rtc::Thread* network_thread() { return network_thread_; } - - const Options& options() const { return options_; } - MediaTransportFactory* media_transport_factory() { - return media_transport_factory_.get(); + const Options& options() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return options_; } + const WebRtcKeyValueConfig& trials() const { return context_->trials(); } + protected: - // This structure allows simple management of all new dependencies being added - // to the PeerConnectionFactory. + // Constructor used by the static Create() method. Modifies the dependencies. + PeerConnectionFactory(rtc::scoped_refptr context, + PeerConnectionFactoryDependencies* dependencies); + + // Constructor for use in testing. Ignores the possibility of initialization + // failure. The dependencies are passed in by std::move(). explicit PeerConnectionFactory( PeerConnectionFactoryDependencies dependencies); - // Hook to let testing framework insert actions between - // "new RTCPeerConnection" and "pc.Initialize" - virtual void ActionsBeforeInitializeForTesting(PeerConnectionInterface*) {} - virtual ~PeerConnectionFactory(); private: + rtc::Thread* worker_thread() const { return context_->worker_thread(); } + rtc::Thread* network_thread() const { return context_->network_thread(); } + bool IsTrialEnabled(absl::string_view key) const; + const cricket::ChannelManager* channel_manager() const { + return context_->channel_manager(); + } std::unique_ptr CreateRtcEventLog_w(); std::unique_ptr CreateCall_w(RtcEventLog* event_log); - bool wraps_current_thread_; - rtc::Thread* network_thread_; - rtc::Thread* worker_thread_; - rtc::Thread* signaling_thread_; - std::unique_ptr owned_network_thread_; - std::unique_ptr owned_worker_thread_; - const std::unique_ptr task_queue_factory_; - Options options_; - std::unique_ptr channel_manager_; - std::unique_ptr default_network_manager_; - std::unique_ptr default_socket_factory_; - std::unique_ptr media_engine_; - std::unique_ptr call_factory_; + rtc::scoped_refptr context_; + PeerConnectionFactoryInterface::Options options_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr task_queue_factory_; std::unique_ptr event_log_factory_; std::unique_ptr fec_controller_factory_; std::unique_ptr network_state_predictor_factory_; std::unique_ptr injected_network_controller_factory_; - std::unique_ptr media_transport_factory_; std::unique_ptr neteq_factory_; - const std::unique_ptr trials_; }; } // namespace webrtc diff --git a/pc/peer_connection_header_extension_unittest.cc b/pc/peer_connection_header_extension_unittest.cc new file mode 100644 index 0000000000..62fda59212 --- /dev/null +++ b/pc/peer_connection_header_extension_unittest.cc @@ -0,0 +1,189 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "media/base/fake_media_engine.h" +#include "p2p/base/fake_port_allocator.h" +#include "pc/media_session.h" +#include "pc/peer_connection_wrapper.h" +#include "rtc_base/gunit.h" +#include "rtc_base/strings/string_builder.h" +#include "test/gmock.h" + +namespace webrtc { + +using ::testing::Combine; +using ::testing::ElementsAre; +using ::testing::Field; +using ::testing::Return; +using ::testing::Values; + +class PeerConnectionHeaderExtensionTest + : public ::testing::TestWithParam< + std::tuple> { + protected: + PeerConnectionHeaderExtensionTest() + : extensions_( + {RtpHeaderExtensionCapability("uri1", + 1, + RtpTransceiverDirection::kStopped), + RtpHeaderExtensionCapability("uri2", + 2, + RtpTransceiverDirection::kSendOnly), + RtpHeaderExtensionCapability("uri3", + 3, + RtpTransceiverDirection::kRecvOnly), + RtpHeaderExtensionCapability( + "uri4", + 4, + RtpTransceiverDirection::kSendRecv)}) {} + + std::unique_ptr CreatePeerConnection( + cricket::MediaType media_type, + absl::optional semantics) { + auto voice = std::make_unique(); + auto video = std::make_unique(); + if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) + voice->SetRtpHeaderExtensions(extensions_); + else + video->SetRtpHeaderExtensions(extensions_); + auto media_engine = std::make_unique( + std::move(voice), std::move(video)); + PeerConnectionFactoryDependencies factory_dependencies; + factory_dependencies.network_thread = rtc::Thread::Current(); + factory_dependencies.worker_thread = rtc::Thread::Current(); + factory_dependencies.signaling_thread = rtc::Thread::Current(); + factory_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + factory_dependencies.media_engine = std::move(media_engine); + factory_dependencies.call_factory = CreateCallFactory(); + factory_dependencies.event_log_factory = + std::make_unique( + factory_dependencies.task_queue_factory.get()); + + auto pc_factory = + CreateModularPeerConnectionFactory(std::move(factory_dependencies)); + + auto fake_port_allocator = std::make_unique( + rtc::Thread::Current(), nullptr); + auto observer = std::make_unique(); + PeerConnectionInterface::RTCConfiguration config; + if (semantics) + config.sdp_semantics = *semantics; + auto pc = pc_factory->CreatePeerConnection( + config, std::move(fake_port_allocator), nullptr, observer.get()); + observer->SetPeerConnectionInterface(pc.get()); + return std::make_unique(pc_factory, pc, + std::move(observer)); + } + + std::vector extensions_; +}; + +TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) { + cricket::MediaType media_type; + SdpSemantics semantics; + std::tie(media_type, semantics) = GetParam(); + if (semantics != SdpSemantics::kUnifiedPlan) + return; + std::unique_ptr wrapper = + CreatePeerConnection(media_type, semantics); + auto transceiver = wrapper->AddTransceiver(media_type); + EXPECT_EQ(transceiver->HeaderExtensionsToOffer(), extensions_); +} + +TEST_P(PeerConnectionHeaderExtensionTest, + SenderReceiverCapabilitiesReturnNotStoppedExtensions) { + cricket::MediaType media_type; + SdpSemantics semantics; + std::tie(media_type, semantics) = GetParam(); + std::unique_ptr wrapper = + CreatePeerConnection(media_type, semantics); + EXPECT_THAT(wrapper->pc_factory() + ->GetRtpSenderCapabilities(media_type) + .header_extensions, + ElementsAre(Field(&RtpHeaderExtensionCapability::uri, "uri2"), + Field(&RtpHeaderExtensionCapability::uri, "uri3"), + Field(&RtpHeaderExtensionCapability::uri, "uri4"))); + EXPECT_EQ(wrapper->pc_factory() + ->GetRtpReceiverCapabilities(media_type) + .header_extensions, + wrapper->pc_factory() + ->GetRtpSenderCapabilities(media_type) + .header_extensions); +} + +TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedDefaultExtensions) { + cricket::MediaType media_type; + SdpSemantics semantics; + std::tie(media_type, semantics) = GetParam(); + if (semantics != SdpSemantics::kUnifiedPlan) + return; + std::unique_ptr wrapper = + CreatePeerConnection(media_type, semantics); + auto transceiver = wrapper->AddTransceiver(media_type); + auto session_description = wrapper->CreateOffer(); + EXPECT_THAT(session_description->description() + ->contents()[0] + .media_description() + ->rtp_header_extensions(), + ElementsAre(Field(&RtpExtension::uri, "uri2"), + Field(&RtpExtension::uri, "uri3"), + Field(&RtpExtension::uri, "uri4"))); +} + +TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedModifiedExtensions) { + cricket::MediaType media_type; + SdpSemantics semantics; + std::tie(media_type, semantics) = GetParam(); + if (semantics != SdpSemantics::kUnifiedPlan) + return; + std::unique_ptr wrapper = + CreatePeerConnection(media_type, semantics); + auto transceiver = wrapper->AddTransceiver(media_type); + auto modified_extensions = transceiver->HeaderExtensionsToOffer(); + modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv; + modified_extensions[3].direction = RtpTransceiverDirection::kStopped; + EXPECT_TRUE( + transceiver->SetOfferedRtpHeaderExtensions(modified_extensions).ok()); + auto session_description = wrapper->CreateOffer(); + EXPECT_THAT(session_description->description() + ->contents()[0] + .media_description() + ->rtp_header_extensions(), + ElementsAre(Field(&RtpExtension::uri, "uri1"), + Field(&RtpExtension::uri, "uri2"), + Field(&RtpExtension::uri, "uri3"))); +} + +INSTANTIATE_TEST_SUITE_P( + , + PeerConnectionHeaderExtensionTest, + Combine(Values(SdpSemantics::kPlanB, SdpSemantics::kUnifiedPlan), + Values(cricket::MediaType::MEDIA_TYPE_AUDIO, + cricket::MediaType::MEDIA_TYPE_VIDEO)), + [](const testing::TestParamInfo< + PeerConnectionHeaderExtensionTest::ParamType>& info) { + cricket::MediaType media_type; + SdpSemantics semantics; + std::tie(media_type, semantics) = info.param; + return (rtc::StringBuilder("With") + << (semantics == SdpSemantics::kPlanB ? "PlanB" : "UnifiedPlan") + << "And" + << (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO ? "Voice" + : "Video") + << "Engine") + .str(); + }); + +} // namespace webrtc diff --git a/pc/peer_connection_histogram_unittest.cc b/pc/peer_connection_histogram_unittest.cc index e36b29a23a..8730ac4bb4 100644 --- a/pc/peer_connection_histogram_unittest.cc +++ b/pc/peer_connection_histogram_unittest.cc @@ -32,6 +32,7 @@ #include "pc/peer_connection_wrapper.h" #include "pc/sdp_utils.h" #include "pc/test/mock_peer_connection_observers.h" +#include "pc/usage_pattern.h" #include "pc/webrtc_sdp.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" @@ -61,7 +62,7 @@ static const rtc::SocketAddress kPrivateLocalAddress("10.1.1.1", 0); static const rtc::SocketAddress kPrivateIpv6LocalAddress("fd12:3456:789a:1::1", 0); -int MakeUsageFingerprint(std::set events) { +int MakeUsageFingerprint(std::set events) { int signature = 0; for (const auto it : events) { signature |= static_cast(it); @@ -84,18 +85,6 @@ class PeerConnectionFactoryForUsageHistogramTest dependencies.call_factory = CreateCallFactory(); return dependencies; }()) {} - - void ActionsBeforeInitializeForTesting(PeerConnectionInterface* pc) override { - PeerConnection* internal_pc = static_cast(pc); - if (return_histogram_very_quickly_) { - internal_pc->ReturnHistogramVeryQuicklyForTesting(); - } - } - - void ReturnHistogramVeryQuickly() { return_histogram_very_quickly_ = true; } - - private: - bool return_histogram_very_quickly_ = false; }; class PeerConnectionWrapperForUsageHistogramTest; @@ -254,14 +243,13 @@ class PeerConnectionUsageHistogramTest : public ::testing::Test { } WrapperPtr CreatePeerConnection() { - return CreatePeerConnection(RTCConfiguration(), - PeerConnectionFactoryInterface::Options(), - nullptr, false); + return CreatePeerConnection( + RTCConfiguration(), PeerConnectionFactoryInterface::Options(), nullptr); } WrapperPtr CreatePeerConnection(const RTCConfiguration& config) { return CreatePeerConnection( - config, PeerConnectionFactoryInterface::Options(), nullptr, false); + config, PeerConnectionFactoryInterface::Options(), nullptr); } WrapperPtr CreatePeerConnectionWithMdns(const RTCConfiguration& config) { @@ -281,15 +269,15 @@ class PeerConnectionUsageHistogramTest : public ::testing::Test { deps.async_resolver_factory = std::move(resolver_factory); deps.allocator = std::move(port_allocator); - return CreatePeerConnection(config, - PeerConnectionFactoryInterface::Options(), - std::move(deps), false); + return CreatePeerConnection( + config, PeerConnectionFactoryInterface::Options(), std::move(deps)); } WrapperPtr CreatePeerConnectionWithImmediateReport() { - return CreatePeerConnection(RTCConfiguration(), - PeerConnectionFactoryInterface::Options(), - nullptr, true); + RTCConfiguration configuration; + configuration.report_usage_pattern_delay_ms = 0; + return CreatePeerConnection( + configuration, PeerConnectionFactoryInterface::Options(), nullptr); } WrapperPtr CreatePeerConnectionWithPrivateLocalAddresses() { @@ -299,10 +287,9 @@ class PeerConnectionUsageHistogramTest : public ::testing::Test { auto port_allocator = std::make_unique(fake_network); - return CreatePeerConnection(RTCConfiguration(), PeerConnectionFactoryInterface::Options(), - std::move(port_allocator), false); + std::move(port_allocator)); } WrapperPtr CreatePeerConnectionWithPrivateIpv6LocalAddresses() { @@ -315,33 +302,26 @@ class PeerConnectionUsageHistogramTest : public ::testing::Test { return CreatePeerConnection(RTCConfiguration(), PeerConnectionFactoryInterface::Options(), - std::move(port_allocator), false); + std::move(port_allocator)); } WrapperPtr CreatePeerConnection( const RTCConfiguration& config, const PeerConnectionFactoryInterface::Options factory_options, - std::unique_ptr allocator, - bool immediate_report) { + std::unique_ptr allocator) { PeerConnectionDependencies deps(nullptr); deps.allocator = std::move(allocator); - return CreatePeerConnection(config, factory_options, std::move(deps), - immediate_report); + return CreatePeerConnection(config, factory_options, std::move(deps)); } WrapperPtr CreatePeerConnection( const RTCConfiguration& config, const PeerConnectionFactoryInterface::Options factory_options, - PeerConnectionDependencies deps, - bool immediate_report) { + PeerConnectionDependencies deps) { rtc::scoped_refptr pc_factory( new PeerConnectionFactoryForUsageHistogramTest()); pc_factory->SetOptions(factory_options); - RTC_CHECK(pc_factory->Initialize()); - if (immediate_report) { - pc_factory->ReturnHistogramVeryQuickly(); - } // If no allocator is provided, one will be created using a network manager // that uses the host network. This doesn't work on all trybots. @@ -418,16 +398,12 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintAudioVideo) { caller->pc()->Close(); callee->pc()->Close(); int expected_fingerprint = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::VIDEO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED, + UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); // In this case, we may or may not have PRIVATE_CANDIDATE_COLLECTED, // depending on the machine configuration. EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); @@ -437,9 +413,7 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintAudioVideo) { webrtc::metrics::NumEvents( kUsagePatternMetric, expected_fingerprint | - static_cast( - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == - 2); + static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == 2); } // Test getting the usage fingerprint when the caller collects an mDNS @@ -458,32 +432,24 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithMdnsCaller) { callee->pc()->Close(); int expected_fingerprint_caller = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::VIDEO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::MDNS_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED, + UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::MDNS_CANDIDATE_COLLECTED, + UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); // Without a resolver, the callee cannot resolve the received mDNS candidate // but can still connect with the caller via a prflx candidate. As a result, // the bit for the direct connection should not be logged. int expected_fingerprint_callee = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::VIDEO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED, + UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); @@ -509,29 +475,21 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithMdnsCallee) { // Similar to the test above, the caller connects with the callee via a prflx // candidate. int expected_fingerprint_caller = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::VIDEO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED, + UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::CLOSE_CALLED}); int expected_fingerprint_callee = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::VIDEO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::MDNS_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED, + UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::MDNS_CANDIDATE_COLLECTED, + UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); @@ -549,15 +507,11 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintDataOnly) { caller->pc()->Close(); callee->pc()->Close(); int expected_fingerprint = MakeUsageFingerprint( - {PeerConnection::UsageEvent::DATA_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_TRUE( webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) == @@ -565,9 +519,7 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintDataOnly) { webrtc::metrics::NumEvents( kUsagePatternMetric, expected_fingerprint | - static_cast( - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == - 2); + static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == 2); } #endif // HAVE_SCTP #endif // WEBRTC_ANDROID @@ -584,10 +536,9 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurn) { auto caller = CreatePeerConnection(configuration); ASSERT_TRUE(caller); caller->pc()->Close(); - int expected_fingerprint = - MakeUsageFingerprint({PeerConnection::UsageEvent::STUN_SERVER_ADDED, - PeerConnection::UsageEvent::TURN_SERVER_ADDED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + int expected_fingerprint = MakeUsageFingerprint( + {UsageEvent::STUN_SERVER_ADDED, UsageEvent::TURN_SERVER_ADDED, + UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ( 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); @@ -606,10 +557,9 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurnInReconfiguration) { ASSERT_TRUE(caller); ASSERT_TRUE(caller->pc()->SetConfiguration(configuration).ok()); caller->pc()->Close(); - int expected_fingerprint = - MakeUsageFingerprint({PeerConnection::UsageEvent::STUN_SERVER_ADDED, - PeerConnection::UsageEvent::TURN_SERVER_ADDED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + int expected_fingerprint = MakeUsageFingerprint( + {UsageEvent::STUN_SERVER_ADDED, UsageEvent::TURN_SERVER_ADDED, + UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ( 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); @@ -624,28 +574,20 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIPCaller) { callee->pc()->Close(); int expected_fingerprint_caller = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::PRIVATE_CANDIDATE_COLLECTED, + UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); int expected_fingerprint_callee = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, + UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); @@ -662,30 +604,22 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIpv6Callee) { callee->pc()->Close(); int expected_fingerprint_caller = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, + UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); int expected_fingerprint_callee = MakeUsageFingerprint( - {PeerConnection::UsageEvent::AUDIO_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::IPV6_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::PRIVATE_CANDIDATE_COLLECTED, + UsageEvent::IPV6_CANDIDATE_COLLECTED, + UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, + UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); @@ -747,27 +681,20 @@ TEST_F(PeerConnectionUsageHistogramTest, // with the callee via a prflx candidate and hence no direct connection bit // should be set. int expected_fingerprint_caller = MakeUsageFingerprint( - {PeerConnection::UsageEvent::DATA_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::IPV6_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::PRIVATE_CANDIDATE_COLLECTED, + UsageEvent::IPV6_CANDIDATE_COLLECTED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::CLOSE_CALLED}); int expected_fingerprint_callee = MakeUsageFingerprint( - {PeerConnection::UsageEvent::DATA_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::MDNS_CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::REMOTE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, - PeerConnection::UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED, - PeerConnection::UsageEvent::ICE_STATE_CONNECTED, - PeerConnection::UsageEvent::DIRECT_CONNECTION_SELECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::MDNS_CANDIDATE_COLLECTED, + UsageEvent::REMOTE_CANDIDATE_ADDED, + UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, + UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, + UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); @@ -781,16 +708,13 @@ TEST_F(PeerConnectionUsageHistogramTest, NotableUsageNoted) { caller->GenerateOfferAndCollectCandidates(); caller->pc()->Close(); int expected_fingerprint = MakeUsageFingerprint( - {PeerConnection::UsageEvent::DATA_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_TRUE( expected_fingerprint == ObservedFingerprint() || (expected_fingerprint | - static_cast( - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == + static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == ObservedFingerprint()); EXPECT_METRIC_EQ(absl::make_optional(ObservedFingerprint()), caller->observer()->interesting_usage_detected()); @@ -801,9 +725,8 @@ TEST_F(PeerConnectionUsageHistogramTest, NotableUsageOnEventFiring) { caller->CreateDataChannel("foo"); caller->GenerateOfferAndCollectCandidates(); int expected_fingerprint = MakeUsageFingerprint( - {PeerConnection::UsageEvent::DATA_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED}); + {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED}); EXPECT_METRIC_EQ(0, webrtc::metrics::NumSamples(kUsagePatternMetric)); caller->GetInternalPeerConnection()->RequestUsagePatternReportForTesting(); EXPECT_METRIC_EQ_WAIT(1, webrtc::metrics::NumSamples(kUsagePatternMetric), @@ -811,8 +734,7 @@ TEST_F(PeerConnectionUsageHistogramTest, NotableUsageOnEventFiring) { EXPECT_METRIC_TRUE( expected_fingerprint == ObservedFingerprint() || (expected_fingerprint | - static_cast( - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == + static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == ObservedFingerprint()); EXPECT_METRIC_EQ(absl::make_optional(ObservedFingerprint()), caller->observer()->interesting_usage_detected()); @@ -824,10 +746,8 @@ TEST_F(PeerConnectionUsageHistogramTest, caller->CreateDataChannel("foo"); caller->GenerateOfferAndCollectCandidates(); int expected_fingerprint = MakeUsageFingerprint( - {PeerConnection::UsageEvent::DATA_ADDED, - PeerConnection::UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, - PeerConnection::UsageEvent::CANDIDATE_COLLECTED, - PeerConnection::UsageEvent::CLOSE_CALLED}); + {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, + UsageEvent::CANDIDATE_COLLECTED, UsageEvent::CLOSE_CALLED}); EXPECT_METRIC_EQ(0, webrtc::metrics::NumSamples(kUsagePatternMetric)); caller->pc()->Close(); EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); @@ -838,8 +758,7 @@ TEST_F(PeerConnectionUsageHistogramTest, EXPECT_METRIC_TRUE( expected_fingerprint == ObservedFingerprint() || (expected_fingerprint | - static_cast( - PeerConnection::UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == + static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == ObservedFingerprint()); // After close, the usage-detection callback should NOT have been called. EXPECT_METRIC_FALSE(caller->observer()->interesting_usage_detected()); diff --git a/pc/peer_connection_ice_unittest.cc b/pc/peer_connection_ice_unittest.cc index ab5a8f40ca..8c1a764398 100644 --- a/pc/peer_connection_ice_unittest.cc +++ b/pc/peer_connection_ice_unittest.cc @@ -1041,9 +1041,11 @@ TEST_P(PeerConnectionIceTest, RestartIceCausesNegotiationNeeded) { auto callee = CreatePeerConnectionWithAudioVideo(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); } // In Unified Plan, "onnegotiationneeded" is spec-compliant, including not @@ -1064,14 +1066,17 @@ TEST_F(PeerConnectionIceTestUnifiedPlan, ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); // ICE restart becomes needed while an O/A is pending and |caller| is the // offerer. - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); // In Unified Plan, the event should not fire until we are back in the stable // signaling state. - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionIceTestUnifiedPlan, @@ -1084,14 +1089,17 @@ TEST_F(PeerConnectionIceTestUnifiedPlan, ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal())); // ICE restart becomes needed while an O/A is pending and |caller| is the // answerer. - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); // In Unified Plan, the event should not fire until we are back in the stable // signaling state. - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE( callee->SetRemoteDescription(caller->CreateAnswerAndSetAsLocal())); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionIceTestUnifiedPlan, @@ -1102,14 +1110,16 @@ TEST_F(PeerConnectionIceTestUnifiedPlan, ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); // Local restart. caller->pc()->RestartIce(); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); // Remote restart and O/A exchange with |caller| as the answerer should // restart ICE locally as well. callee->pc()->RestartIce(); ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); // Having restarted ICE by the remote offer, we do not need to renegotiate ICE // credentials when back in the stable signaling state. - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionIceTestUnifiedPlan, @@ -1119,10 +1129,13 @@ TEST_F(PeerConnectionIceTestUnifiedPlan, ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); caller->pc()->RestartIce(); - EXPECT_TRUE(caller->observer()->negotiation_needed()); - caller->observer()->clear_negotiation_needed(); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } // In Plan B, "onnegotiationneeded" is not spec-compliant, firing based on if @@ -1140,15 +1153,19 @@ TEST_F(PeerConnectionIceTestPlanB, auto callee = CreatePeerConnectionWithAudioVideo(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); - EXPECT_TRUE(caller->observer()->negotiation_needed()); - caller->observer()->clear_negotiation_needed(); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); // In Plan B, the event fired early so we don't expect it to fire now. This is // not spec-compliant but follows the pattern of existing Plan B behavior. - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionIceTestPlanB, @@ -1157,15 +1174,19 @@ TEST_F(PeerConnectionIceTestPlanB, auto callee = CreatePeerConnectionWithAudioVideo(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); - EXPECT_TRUE(caller->observer()->negotiation_needed()); - caller->observer()->clear_negotiation_needed(); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); caller->pc()->RestartIce(); // In Plan B, the event fires every time something changed, even if we have // already fired the event. This is not spec-compliant but follows the same // pattern of existing Plan B behavior. - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); } // The following parameterized test verifies that if an offer is sent with a @@ -1404,4 +1425,13 @@ TEST_P(PeerConnectionIceTest, IceCredentialsCreateAnswer) { } } +// Regression test for https://bugs.chromium.org/p/webrtc/issues/detail?id=4728 +TEST_P(PeerConnectionIceTest, CloseDoesNotTransitionGatheringStateToComplete) { + auto pc = CreatePeerConnectionWithAudioVideo(); + pc->pc()->Close(); + EXPECT_FALSE(pc->IsIceGatheringDone()); + EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew, + pc->pc()->ice_gathering_state()); +} + } // namespace webrtc diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc index d184a92eae..53e0f6d7c9 100644 --- a/pc/peer_connection_integrationtest.cc +++ b/pc/peer_connection_integrationtest.cc @@ -14,6 +14,7 @@ #include +#include #include #include #include @@ -28,7 +29,7 @@ #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/rtp_receiver_interface.h" #include "api/task_queue/default_task_queue_factory.h" -#include "api/test/loopback_media_transport.h" +#include "api/transport/field_trial_based_config.h" #include "api/uma_metrics.h" #include "api/video_codecs/sdp_video_format.h" #include "call/call.h" @@ -36,6 +37,7 @@ #include "media/engine/fake_webrtc_video_engine.h" #include "media/engine/webrtc_media_engine.h" #include "media/engine/webrtc_media_engine_defaults.h" +#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "p2p/base/fake_ice_transport.h" #include "p2p/base/mock_async_resolver.h" #include "p2p/base/p2p_constants.h" @@ -214,7 +216,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, dependencies.cert_generator = std::move(cert_generator); if (!client->Init(nullptr, nullptr, std::move(dependencies), network_thread, worker_thread, nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false)) { delete client; @@ -594,6 +595,10 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, pc()->CreateOffer(observer, offer_answer_options_); return WaitForDescriptionFromObserver(observer); } + bool Rollback() { + return SetRemoteDescription( + webrtc::CreateSessionDescription(SdpType::kRollback, "")); + } private: explicit PeerConnectionWrapper(const std::string& debug_name) @@ -606,7 +611,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, rtc::Thread* network_thread, rtc::Thread* worker_thread, std::unique_ptr event_log_factory, - std::unique_ptr media_transport_factory, bool reset_encoder_factory, bool reset_decoder_factory) { // There's an error in this test code if Init ends up being called twice. @@ -631,6 +635,7 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, pc_factory_dependencies.signaling_thread = signaling_thread; pc_factory_dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); + pc_factory_dependencies.trials = std::make_unique(); cricket::MediaEngineDependencies media_deps; media_deps.task_queue_factory = pc_factory_dependencies.task_queue_factory.get(); @@ -644,6 +649,14 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, media_deps.video_decoder_factory.reset(); } + if (!media_deps.audio_processing) { + // If the standard Creation method for APM returns a null pointer, instead + // use the builder for testing to create an APM object. + media_deps.audio_processing = AudioProcessingBuilderForTesting().Create(); + } + + media_deps.trials = pc_factory_dependencies.trials.get(); + pc_factory_dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); pc_factory_dependencies.call_factory = webrtc::CreateCallFactory(); @@ -655,10 +668,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, std::make_unique( pc_factory_dependencies.task_queue_factory.get()); } - if (media_transport_factory) { - pc_factory_dependencies.media_transport_factory = - std::move(media_transport_factory); - } peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory( std::move(pc_factory_dependencies)); @@ -795,9 +804,7 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, EXPECT_TRUE(desc->ToString(&sdp)); RTC_LOG(LS_INFO) << debug_name_ << ": local SDP contents=\n" << sdp; pc()->SetLocalDescription(observer, desc.release()); - if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) { - RemoveUnusedVideoRenderers(); - } + RemoveUnusedVideoRenderers(); // As mentioned above, we need to send the message immediately after // SetLocalDescription. SendSdpMessage(type, sdp); @@ -810,9 +817,7 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, new rtc::RefCountedObject()); RTC_LOG(LS_INFO) << debug_name_ << ": SetRemoteDescription"; pc()->SetRemoteDescription(observer, desc.release()); - if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) { - RemoveUnusedVideoRenderers(); - } + RemoveUnusedVideoRenderers(); EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); return observer->result(); } @@ -820,29 +825,26 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, // This is a work around to remove unused fake_video_renderers from // transceivers that have either stopped or are no longer receiving. void RemoveUnusedVideoRenderers() { + if (sdp_semantics_ != SdpSemantics::kUnifiedPlan) { + return; + } auto transceivers = pc()->GetTransceivers(); + std::set active_renderers; for (auto& transceiver : transceivers) { - if (transceiver->receiver()->media_type() != cricket::MEDIA_TYPE_VIDEO) { - continue; - } - // Remove fake video renderers from any stopped transceivers. - if (transceiver->stopped()) { - auto it = - fake_video_renderers_.find(transceiver->receiver()->track()->id()); - if (it != fake_video_renderers_.end()) { - fake_video_renderers_.erase(it); - } + // Note - we don't check for direction here. This function is called + // before direction is set, and in that case, we should not remove + // the renderer. + if (transceiver->receiver()->media_type() == cricket::MEDIA_TYPE_VIDEO) { + active_renderers.insert(transceiver->receiver()->track()->id()); } - // Remove fake video renderers from any transceivers that are no longer - // receiving. - if ((transceiver->current_direction() && - !webrtc::RtpTransceiverDirectionHasRecv( - *transceiver->current_direction()))) { - auto it = - fake_video_renderers_.find(transceiver->receiver()->track()->id()); - if (it != fake_video_renderers_.end()) { - fake_video_renderers_.erase(it); - } + } + for (auto it = fake_video_renderers_.begin(); + it != fake_video_renderers_.end();) { + // Remove fake video renderers belonging to any non-active transceivers. + if (!active_renderers.count(it->first)) { + it = fake_video_renderers_.erase(it); + } else { + it++; } } } @@ -932,8 +934,11 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, rtc::scoped_refptr receiver) override { if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { auto it = fake_video_renderers_.find(receiver->track()->id()); - RTC_DCHECK(it != fake_video_renderers_.end()); - fake_video_renderers_.erase(it); + if (it != fake_video_renderers_.end()) { + fake_video_renderers_.erase(it); + } else { + RTC_LOG(LS_ERROR) << "OnRemoveTrack called for non-active renderer"; + } } } void OnRenegotiationNeeded() override {} @@ -1071,8 +1076,8 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver, class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput { public: virtual ~MockRtcEventLogOutput() = default; - MOCK_CONST_METHOD0(IsActive, bool()); - MOCK_METHOD1(Write, bool(const std::string&)); + MOCK_METHOD(bool, IsActive, (), (const, override)); + MOCK_METHOD(bool, Write, (const std::string&), (override)); }; // This helper object is used for both specifying how many audio/video frames @@ -1205,7 +1210,7 @@ class MockIceTransportFactory : public IceTransportFactory { return new rtc::RefCountedObject(transport_name, component); } - MOCK_METHOD0(RecordIceTransportCreated, void()); + MOCK_METHOD(void, RecordIceTransportCreated, ()); }; // Tests two PeerConnections connecting to each other end-to-end, using a @@ -1219,8 +1224,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { ss_(new rtc::VirtualSocketServer()), fss_(new rtc::FirewallSocketServer(ss_.get())), network_thread_(new rtc::Thread(fss_.get())), - worker_thread_(rtc::Thread::Create()), - loopback_media_transports_(network_thread_.get()) { + worker_thread_(rtc::Thread::Create()) { network_thread_->SetName("PCNetworkThread", this); worker_thread_->SetName("PCWorkerThread", this); RTC_CHECK(network_thread_->Start()); @@ -1277,7 +1281,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { const RTCConfiguration* config, webrtc::PeerConnectionDependencies dependencies, std::unique_ptr event_log_factory, - std::unique_ptr media_transport_factory, bool reset_encoder_factory, bool reset_decoder_factory) { RTCConfiguration modified_config; @@ -1294,8 +1297,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { if (!client->Init(options, &modified_config, std::move(dependencies), network_thread_.get(), worker_thread_.get(), - std::move(event_log_factory), - std::move(media_transport_factory), reset_encoder_factory, + std::move(event_log_factory), reset_encoder_factory, reset_decoder_factory)) { return nullptr; } @@ -1310,11 +1312,11 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { webrtc::PeerConnectionDependencies dependencies) { std::unique_ptr event_log_factory( new webrtc::FakeRtcEventLogFactory(rtc::Thread::Current())); - return CreatePeerConnectionWrapper( - debug_name, options, config, std::move(dependencies), - std::move(event_log_factory), - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + return CreatePeerConnectionWrapper(debug_name, options, config, + std::move(dependencies), + std::move(event_log_factory), + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); } bool CreatePeerConnectionWrappers() { @@ -1335,13 +1337,13 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { sdp_semantics_ = caller_semantics; caller_ = CreatePeerConnectionWrapper( "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, /*media_transport_factory=*/nullptr, + nullptr, /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); sdp_semantics_ = callee_semantics; callee_ = CreatePeerConnectionWrapper( "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, /*media_transport_factory=*/nullptr, + nullptr, /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); sdp_semantics_ = original_semantics; @@ -1354,30 +1356,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { caller_ = CreatePeerConnectionWrapper( "Caller", nullptr, &caller_config, webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, &callee_config, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - return caller_ && callee_; - } - - bool CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - const PeerConnectionInterface::RTCConfiguration& caller_config, - const PeerConnectionInterface::RTCConfiguration& callee_config, - std::unique_ptr caller_factory, - std::unique_ptr callee_factory) { - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, &caller_config, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - std::move(caller_factory), /*reset_encoder_factory=*/false, + /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); callee_ = CreatePeerConnectionWrapper( "Callee", nullptr, &callee_config, webrtc::PeerConnectionDependencies(nullptr), nullptr, - std::move(callee_factory), /*reset_encoder_factory=*/false, + /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); return caller_ && callee_; } @@ -1387,16 +1371,16 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { webrtc::PeerConnectionDependencies caller_dependencies, const PeerConnectionInterface::RTCConfiguration& callee_config, webrtc::PeerConnectionDependencies callee_dependencies) { - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, &caller_config, std::move(caller_dependencies), - nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, &callee_config, std::move(callee_dependencies), - nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + caller_ = + CreatePeerConnectionWrapper("Caller", nullptr, &caller_config, + std::move(caller_dependencies), nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); + callee_ = + CreatePeerConnectionWrapper("Callee", nullptr, &callee_config, + std::move(callee_dependencies), nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); return caller_ && callee_; } @@ -1406,12 +1390,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { caller_ = CreatePeerConnectionWrapper( "Caller", &caller_options, nullptr, webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, + /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); callee_ = CreatePeerConnectionWrapper( "Callee", &callee_options, nullptr, webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, + /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); return caller_ && callee_; } @@ -1435,21 +1419,21 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { webrtc::PeerConnectionDependencies dependencies(nullptr); dependencies.cert_generator = std::move(cert_generator); - return CreatePeerConnectionWrapper( - "New Peer", nullptr, nullptr, std::move(dependencies), nullptr, - /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr, + std::move(dependencies), nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); } bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) { caller_ = CreatePeerConnectionWrapper( "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, /*media_transport_factory=*/nullptr, + nullptr, /*reset_encoder_factory=*/!caller_to_callee, /*reset_decoder_factory=*/caller_to_callee); callee_ = CreatePeerConnectionWrapper( "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, /*media_transport_factory=*/nullptr, + nullptr, /*reset_encoder_factory=*/caller_to_callee, /*reset_decoder_factory=*/!caller_to_callee); return caller_ && callee_; @@ -1540,10 +1524,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); } - webrtc::MediaTransportPair* loopback_media_transports() { - return &loopback_media_transports_; - } - PeerConnectionWrapper* caller() { return caller_.get(); } // Set the |caller_| to the |wrapper| passed in and return the @@ -1582,6 +1562,9 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { // |media_expectations|. Returns false if any of the expectations were // not met. bool ExpectNewFrames(const MediaExpectations& media_expectations) { + // Make sure there are no bogus tracks confusing the issue. + caller()->RemoveUnusedVideoRenderers(); + callee()->RemoveUnusedVideoRenderers(); // First initialize the expected frame counts based upon the current // frame count. int total_caller_audio_frames_expected = caller()->audio_frames_received(); @@ -1741,7 +1724,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { // on the network thread. std::vector> turn_servers_; std::vector> turn_customizers_; - webrtc::MediaTransportPair loopback_media_transports_; std::unique_ptr caller_; std::unique_ptr callee_; }; @@ -1766,7 +1748,7 @@ class FakeClockForTest : public rtc::ScopedFakeClock { // Some things use a time of "0" as a special value, so we need to start out // the fake clock at a nonzero time. // TODO(deadbeef): Fix this. - AdvanceTime(webrtc::TimeDelta::seconds(1)); + AdvanceTime(webrtc::TimeDelta::Seconds(1)); } // Explicit handle. @@ -1968,76 +1950,6 @@ TEST_P(PeerConnectionIntegrationTest, ASSERT_TRUE(ExpectNewFrames(media_expectations)); } -// Tests that the GetRemoteAudioSSLCertificate method returns the remote DTLS -// certificate once the DTLS handshake has finished. -TEST_P(PeerConnectionIntegrationTest, - GetRemoteAudioSSLCertificateReturnsExchangedCertificate) { - auto GetRemoteAudioSSLCertificate = [](PeerConnectionWrapper* wrapper) { - auto pci = reinterpret_cast(wrapper->pc()); - auto pc = reinterpret_cast(pci->internal()); - return pc->GetRemoteAudioSSLCertificate(); - }; - auto GetRemoteAudioSSLCertChain = [](PeerConnectionWrapper* wrapper) { - auto pci = reinterpret_cast(wrapper->pc()); - auto pc = reinterpret_cast(pci->internal()); - return pc->GetRemoteAudioSSLCertChain(); - }; - - auto caller_cert = rtc::RTCCertificate::FromPEM(kRsaPems[0]); - auto callee_cert = rtc::RTCCertificate::FromPEM(kRsaPems[1]); - - // Configure each side with a known certificate so they can be compared later. - PeerConnectionInterface::RTCConfiguration caller_config; - caller_config.enable_dtls_srtp.emplace(true); - caller_config.certificates.push_back(caller_cert); - PeerConnectionInterface::RTCConfiguration callee_config; - callee_config.enable_dtls_srtp.emplace(true); - callee_config.certificates.push_back(callee_cert); - ASSERT_TRUE( - CreatePeerConnectionWrappersWithConfig(caller_config, callee_config)); - ConnectFakeSignaling(); - - // When first initialized, there should not be a remote SSL certificate (and - // calling this method should not crash). - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertificate(caller())); - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertificate(callee())); - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertChain(caller())); - EXPECT_EQ(nullptr, GetRemoteAudioSSLCertChain(callee())); - - caller()->AddAudioTrack(); - callee()->AddAudioTrack(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - - // Once DTLS has been connected, each side should return the other's SSL - // certificate when calling GetRemoteAudioSSLCertificate. - - auto caller_remote_cert = GetRemoteAudioSSLCertificate(caller()); - ASSERT_TRUE(caller_remote_cert); - EXPECT_EQ(callee_cert->GetSSLCertificate().ToPEMString(), - caller_remote_cert->ToPEMString()); - - auto callee_remote_cert = GetRemoteAudioSSLCertificate(callee()); - ASSERT_TRUE(callee_remote_cert); - EXPECT_EQ(caller_cert->GetSSLCertificate().ToPEMString(), - callee_remote_cert->ToPEMString()); - - auto caller_remote_cert_chain = GetRemoteAudioSSLCertChain(caller()); - ASSERT_TRUE(caller_remote_cert_chain); - ASSERT_EQ(1U, caller_remote_cert_chain->GetSize()); - auto remote_cert = &caller_remote_cert_chain->Get(0); - EXPECT_EQ(callee_cert->GetSSLCertificate().ToPEMString(), - remote_cert->ToPEMString()); - - auto callee_remote_cert_chain = GetRemoteAudioSSLCertChain(callee()); - ASSERT_TRUE(callee_remote_cert_chain); - ASSERT_EQ(1U, callee_remote_cert_chain->GetSize()); - remote_cert = &callee_remote_cert_chain->Get(0); - EXPECT_EQ(caller_cert->GetSSLCertificate().ToPEMString(), - remote_cert->ToPEMString()); -} - // This test sets up a call between two parties with a source resolution of // 1280x720 and verifies that a 16:9 aspect ratio is received. TEST_P(PeerConnectionIntegrationTest, @@ -2259,7 +2171,9 @@ TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) { callee()->SetOfferAnswerOptions(options); } else { callee()->SetRemoteOfferHandler([this] { - callee()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->Stop(); + callee() + ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->StopInternal(); }); } // Do offer/answer and make sure audio is still received end-to-end. @@ -2291,11 +2205,12 @@ TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) { // The caller creates a new transceiver to receive video on when receiving // the offer, but by default it is send only. auto transceivers = caller()->pc()->GetTransceivers(); - ASSERT_EQ(3U, transceivers.size()); + ASSERT_EQ(2U, transceivers.size()); ASSERT_EQ(cricket::MEDIA_TYPE_VIDEO, - transceivers[2]->receiver()->media_type()); - transceivers[2]->sender()->SetTrack(caller()->CreateLocalVideoTrack()); - transceivers[2]->SetDirection(RtpTransceiverDirection::kSendRecv); + transceivers[1]->receiver()->media_type()); + transceivers[1]->sender()->SetTrack(caller()->CreateLocalVideoTrack()); + transceivers[1]->SetDirectionWithError( + RtpTransceiverDirection::kSendRecv); }); } callee()->CreateAndSetAndSignalOffer(); @@ -2507,7 +2422,9 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioSection) { // Stopping the audio RtpTransceiver will cause the media section to be // rejected in the answer. callee()->SetRemoteOfferHandler([this] { - callee()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)->Stop(); + callee() + ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO) + ->StopInternal(); }); } callee()->AddTrack(callee()->CreateLocalVideoTrack()); @@ -2526,10 +2443,10 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioSection) { ASSERT_NE(nullptr, callee_audio_content); EXPECT_TRUE(callee_audio_content->rejected); if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) { - // The caller's transceiver should have stopped after receiving the answer. - EXPECT_TRUE(caller() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO) - ->stopped()); + // The caller's transceiver should have stopped after receiving the answer, + // and thus no longer listed in transceivers. + EXPECT_EQ(nullptr, + caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)); } } @@ -2549,7 +2466,9 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsVideoSection) { // Stopping the video RtpTransceiver will cause the media section to be // rejected in the answer. callee()->SetRemoteOfferHandler([this] { - callee()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->Stop(); + callee() + ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->StopInternal(); }); } callee()->AddTrack(callee()->CreateLocalAudioTrack()); @@ -2568,10 +2487,10 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsVideoSection) { ASSERT_NE(nullptr, callee_video_content); EXPECT_TRUE(callee_video_content->rejected); if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) { - // The caller's transceiver should have stopped after receiving the answer. - EXPECT_TRUE(caller() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) - ->stopped()); + // The caller's transceiver should have stopped after receiving the answer, + // and thus is no longer present. + EXPECT_EQ(nullptr, + caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)); } } @@ -2595,7 +2514,7 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioAndVideoSections) { callee()->SetRemoteOfferHandler([this] { // Stopping all transceivers will cause all media sections to be rejected. for (const auto& transceiver : callee()->pc()->GetTransceivers()) { - transceiver->Stop(); + transceiver->StopInternal(); } }); } @@ -2642,7 +2561,9 @@ TEST_P(PeerConnectionIntegrationTest, VideoRejectedInSubsequentOffer) { } }); } else { - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->Stop(); + caller() + ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->StopInternal(); } caller()->CreateAndSetAndSignalOffer(); ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); @@ -2757,7 +2678,7 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); // Add receive direction. - video_sender->SetDirection(RtpTransceiverDirection::kSendRecv); + video_sender->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); rtc::scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); @@ -2796,6 +2717,106 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, EXPECT_TRUE(ExpectNewFrames(media_expectations)); } +// Used for the test below. +void RemoveBundleGroupSsrcsAndMidExtension(cricket::SessionDescription* desc) { + RemoveSsrcsAndKeepMsids(desc); + desc->RemoveGroupByName("BUNDLE"); + for (ContentInfo& content : desc->contents()) { + cricket::MediaContentDescription* media = content.media_description(); + cricket::RtpHeaderExtensions extensions = media->rtp_header_extensions(); + extensions.erase(std::remove_if(extensions.begin(), extensions.end(), + [](const RtpExtension& extension) { + return extension.uri == + RtpExtension::kMidUri; + }), + extensions.end()); + media->set_rtp_header_extensions(extensions); + } +} + +// Tests that video flows between multiple video tracks when BUNDLE is not used, +// SSRCs are not signaled and the MID RTP header extension is not used. This +// relies on demuxing by payload type, which normally doesn't work if you have +// multiple media sections using the same payload type, but which should work as +// long as the media sections aren't bundled. +// Regression test for: http://crbug.com/webrtc/12023 +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + EndToEndCallWithTwoVideoTracksNoBundleNoSignaledSsrcAndNoMid) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddVideoTrack(); + caller()->AddVideoTrack(); + callee()->AddVideoTrack(); + callee()->AddVideoTrack(); + caller()->SetReceivedSdpMunger(&RemoveBundleGroupSsrcsAndMidExtension); + callee()->SetReceivedSdpMunger(&RemoveBundleGroupSsrcsAndMidExtension); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_EQ(2u, caller()->pc()->GetReceivers().size()); + ASSERT_EQ(2u, callee()->pc()->GetReceivers().size()); + // Make sure we are not bundled. + ASSERT_NE(caller()->pc()->GetSenders()[0]->dtls_transport(), + caller()->pc()->GetSenders()[1]->dtls_transport()); + + // Expect video to be received in both directions on both tracks. + MediaExpectations media_expectations; + media_expectations.ExpectBidirectionalVideo(); + EXPECT_TRUE(ExpectNewFrames(media_expectations)); +} + +// Used for the test below. +void ModifyPayloadTypesAndRemoveMidExtension( + cricket::SessionDescription* desc) { + int pt = 96; + for (ContentInfo& content : desc->contents()) { + cricket::MediaContentDescription* media = content.media_description(); + cricket::RtpHeaderExtensions extensions = media->rtp_header_extensions(); + extensions.erase(std::remove_if(extensions.begin(), extensions.end(), + [](const RtpExtension& extension) { + return extension.uri == + RtpExtension::kMidUri; + }), + extensions.end()); + media->set_rtp_header_extensions(extensions); + cricket::VideoContentDescription* video = media->as_video(); + ASSERT_TRUE(video != nullptr); + std::vector codecs = {{pt++, "VP8"}}; + video->set_codecs(codecs); + } +} + +// Tests that two video tracks can be demultiplexed by payload type alone, by +// using different payload types for the same codec in different m= sections. +// This practice is discouraged but historically has been supported. +// Regression test for: http://crbug.com/webrtc/12029 +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + EndToEndCallWithTwoVideoTracksDemultiplexedByPayloadType) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddVideoTrack(); + caller()->AddVideoTrack(); + callee()->AddVideoTrack(); + callee()->AddVideoTrack(); + caller()->SetGeneratedSdpMunger(&ModifyPayloadTypesAndRemoveMidExtension); + callee()->SetGeneratedSdpMunger(&ModifyPayloadTypesAndRemoveMidExtension); + // We can't remove SSRCs from the generated SDP because then no send streams + // would be created. + caller()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); + callee()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_EQ(2u, caller()->pc()->GetReceivers().size()); + ASSERT_EQ(2u, callee()->pc()->GetReceivers().size()); + // Make sure we are bundled. + ASSERT_EQ(caller()->pc()->GetSenders()[0]->dtls_transport(), + caller()->pc()->GetSenders()[1]->dtls_transport()); + + // Expect video to be received in both directions on both tracks. + MediaExpectations media_expectations; + media_expectations.ExpectBidirectionalVideo(); + EXPECT_TRUE(ExpectNewFrames(media_expectations)); +} + TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLinePresent) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); @@ -3048,7 +3069,7 @@ TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) { ASSERT_TRUE(caller_report); auto outbound_stream_stats = caller_report->GetStatsOfType(); - ASSERT_EQ(4u, outbound_stream_stats.size()); + ASSERT_EQ(outbound_stream_stats.size(), 4u); std::vector outbound_track_ids; for (const auto& stat : outbound_stream_stats) { ASSERT_TRUE(stat->bytes_sent.is_defined()); @@ -3482,6 +3503,31 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithRtpDataChannel) { kDefaultTimeout); } +TEST_P(PeerConnectionIntegrationTest, RtpDataChannelWorksAfterRollback) { + PeerConnectionInterface::RTCConfiguration rtc_config; + rtc_config.enable_rtp_data_channel = true; + rtc_config.enable_dtls_srtp = false; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(rtc_config, rtc_config)); + ConnectFakeSignaling(); + auto data_channel = caller()->pc()->CreateDataChannel("label_1", nullptr); + ASSERT_TRUE(data_channel.get() != nullptr); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + + caller()->CreateDataChannel("label_2", nullptr); + rtc::scoped_refptr observer( + new rtc::RefCountedObject()); + caller()->pc()->SetLocalDescription(observer, + caller()->CreateOfferAndWait().release()); + EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); + caller()->Rollback(); + + std::string data = "hello world"; + SendRtpDataWithRetries(data_channel, data, 5); + EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), + kDefaultTimeout); +} + // Ensure that an RTP data channel is signaled as closed for the caller when // the callee rejects it in a subsequent offer. TEST_P(PeerConnectionIntegrationTest, @@ -3839,960 +3885,91 @@ TEST_P(PeerConnectionIntegrationTest, kDefaultTimeout); } -// Tests that the datagram transport to SCTP fallback works correctly when -// datagram transport negotiation fails. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelFallbackToSctp) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - - // Configure one endpoint to use datagram transport for data channels while - // the other does not. - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, RTCConfiguration(), - loopback_media_transports()->first_factory(), nullptr)); - ConnectFakeSignaling(); +#endif // HAVE_SCTP - // The caller offers a data channel using either datagram transport or SCTP. - caller()->CreateDataChannel(); +// Test that the ICE connection and gathering states eventually reach +// "complete". +TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + // Do normal offer/answer. caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, + caller()->ice_gathering_state(), kMaxWaitForFramesMs); + EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, + callee()->ice_gathering_state(), kMaxWaitForFramesMs); + // After the best candidate pair is selected and all candidates are signaled, + // the ICE connection state should reach "complete". + // TODO(deadbeef): Currently, the ICE "controlled" agent (the + // answerer/"callee" by default) only reaches "connected". When this is + // fixed, this test should be updated. + EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, + caller()->ice_connection_state(), kDefaultTimeout); + EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, + callee()->ice_connection_state(), kDefaultTimeout); +} - // Negotiation should fallback to SCTP, allowing the data channel to be - // established. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); +constexpr int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN | + cricket::PORTALLOCATOR_DISABLE_RELAY | + cricket::PORTALLOCATOR_DISABLE_TCP; - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); +// Use a mock resolver to resolve the hostname back to the original IP on both +// sides and check that the ICE connection connects. +TEST_P(PeerConnectionIntegrationTest, + IceStatesReachCompletionWithRemoteHostname) { + auto caller_resolver_factory = + std::make_unique>(); + auto callee_resolver_factory = + std::make_unique>(); + NiceMock callee_async_resolver; + NiceMock caller_async_resolver; - // Ensure that failure of the datagram negotiation doesn't impede media flow. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} + // This also verifies that the injected AsyncResolverFactory is used by + // P2PTransportChannel. + EXPECT_CALL(*caller_resolver_factory, Create()) + .WillOnce(Return(&caller_async_resolver)); + webrtc::PeerConnectionDependencies caller_deps(nullptr); + caller_deps.async_resolver_factory = std::move(caller_resolver_factory); -// Tests that the data channel transport works correctly when datagram transport -// negotiation succeeds and does not fall back to SCTP. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelDoesNotFallbackToSctp) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - - // Configure one endpoint to use datagram transport for data channels while - // the other does not. - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); + EXPECT_CALL(*callee_resolver_factory, Create()) + .WillOnce(Return(&callee_async_resolver)); + webrtc::PeerConnectionDependencies callee_deps(nullptr); + callee_deps.async_resolver_factory = std::move(callee_resolver_factory); - // The caller offers a data channel using either datagram transport or SCTP. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + PeerConnectionInterface::RTCConfiguration config; + config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; + config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( + config, std::move(caller_deps), config, std::move(callee_deps))); - // Negotiation should succeed, allowing the data channel to be established. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + caller()->SetRemoteAsyncResolver(&callee_async_resolver); + callee()->SetRemoteAsyncResolver(&caller_async_resolver); - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + // Enable hostname candidates with mDNS names. + caller()->SetMdnsResponder( + std::make_unique(network_thread())); + callee()->SetMdnsResponder( + std::make_unique(network_thread())); - // Ensure that failure of the datagram negotiation doesn't impede media flow. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} + SetPortAllocatorFlags(kOnlyLocalPorts, kOnlyLocalPorts); -// Tests that the datagram transport to SCTP fallback works correctly when -// datagram transports do not advertise compatible transport parameters. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportIncompatibleParametersFallsBackToSctp) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - - // By default, only equal parameters are compatible. - loopback_media_transports()->SetFirstDatagramTransportParameters("foo"); - loopback_media_transports()->SetSecondDatagramTransportParameters("bar"); - - // Configure one endpoint to use datagram transport for data channels while - // the other does not. - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); ConnectFakeSignaling(); - - // The caller offers a data channel using either datagram transport or SCTP. - caller()->CreateDataChannel(); caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, + caller()->ice_connection_state(), kDefaultTimeout); + EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, + callee()->ice_connection_state(), kDefaultTimeout); - // Negotiation should fallback to SCTP, allowing the data channel to be - // established. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use SCTP for data channels. - EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Ensure that failure of the datagram negotiation doesn't impede media flow. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that the datagram transport to SCTP fallback works correctly when -// only the answerer believes datagram transport parameters are incompatible. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportIncompatibleParametersOnAnswererFallsBackToSctp) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - - // By default, only equal parameters are compatible. - loopback_media_transports()->SetFirstDatagramTransportParameters("foo"); - loopback_media_transports()->SetSecondDatagramTransportParameters("bar"); - - // Set the offerer to accept different parameters, while the answerer rejects - // them. - loopback_media_transports()->SetFirstDatagramTransportParametersComparison( - [](absl::string_view a, absl::string_view b) { return true; }); - loopback_media_transports()->SetSecondDatagramTransportParametersComparison( - [](absl::string_view a, absl::string_view b) { return false; }); - - // Configure one endpoint to use datagram transport for data channels while - // the other does not. - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // The caller offers a data channel using either datagram transport or SCTP. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Negotiation should fallback to SCTP, allowing the data channel to be - // established. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use SCTP for data channels. - EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Ensure that failure of the datagram negotiation doesn't impede media flow. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that the data channel transport works correctly when datagram -// transports provide different, but compatible, transport parameters. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportCompatibleParametersDoNotFallbackToSctp) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - - // By default, only equal parameters are compatible. - loopback_media_transports()->SetFirstDatagramTransportParameters("foo"); - loopback_media_transports()->SetSecondDatagramTransportParameters("bar"); - - // Change the comparison used to treat these transport parameters are - // compatible (on both sides). - loopback_media_transports()->SetFirstDatagramTransportParametersComparison( - [](absl::string_view a, absl::string_view b) { return true; }); - loopback_media_transports()->SetSecondDatagramTransportParametersComparison( - [](absl::string_view a, absl::string_view b) { return true; }); - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // The caller offers a data channel using either datagram transport or SCTP. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - // Negotiation should succeed, allowing the data channel to be established. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use datagram transport for data channels. - EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Ensure that failure of the datagram negotiation doesn't impede media flow. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelWithMediaOnCaller) { - // Configure the caller to attempt use of datagram transport for media and - // data channels. - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport_for_data_channels = true; - offerer_config.use_datagram_transport = true; - - // Configure the callee to only use datagram transport for data channels. - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - answerer_config.use_datagram_transport_for_data_channels = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Offer both media and data. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use datagram transport for data channels. - EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Media flow should not be impacted. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportMediaWithDataChannelOnCaller) { - // Configure the caller to attempt use of datagram transport for media and - // data channels. - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport_for_data_channels = true; - offerer_config.use_datagram_transport = true; - - // Configure the callee to only use datagram transport for media. - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - answerer_config.use_datagram_transport = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Offer both media and data. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use SCTP for data channels. - EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Media flow should not be impacted. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelWithMediaOnCallee) { - // Configure the caller to attempt use of datagram transport for data - // channels. - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport_for_data_channels = true; - - // Configure the callee to use datagram transport for data channels and media. - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - answerer_config.use_datagram_transport_for_data_channels = true; - answerer_config.use_datagram_transport = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Offer both media and data. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use datagram transport for data channels. - EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Media flow should not be impacted. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportMediaWithDataChannelOnCallee) { - // Configure the caller to attempt use of datagram transport for media. - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport = true; - - // Configure the callee to only use datagram transport for media and data - // channels. - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - answerer_config.use_datagram_transport = true; - answerer_config.use_datagram_transport_for_data_channels = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Offer both media and data. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use SCTP for data channels. - EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Media flow should not be impacted. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelAndMedia) { - // Configure the caller to use datagram transport for data channels and media. - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport_for_data_channels = true; - offerer_config.use_datagram_transport = true; - - // Configure the callee to use datagram transport for data channels and media. - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - answerer_config.use_datagram_transport_for_data_channels = true; - answerer_config.use_datagram_transport = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Offer both media and data. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Both endpoints should agree to use datagram transport for data channels. - EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport()); - EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport()); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); - - // Media flow should not be impacted. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Tests that data channels use SCTP instead of datagram transport if datagram -// transport is configured in receive-only mode on the caller. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelReceiveOnlyOnCallerUsesSctp) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - rtc_config.use_datagram_transport_for_data_channels_receive_only = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // The caller should offer a data channel using SCTP. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // SCTP transports should be present, since they are in use. - EXPECT_NE(caller()->pc()->GetSctpTransport(), nullptr); - EXPECT_NE(callee()->pc()->GetSctpTransport(), nullptr); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -#endif // HAVE_SCTP - -// Tests that a callee configured for receive-only use of datagram transport -// data channels accepts them on incoming calls. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelReceiveOnlyOnCallee) { - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport_for_data_channels = true; - - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - answerer_config.use_datagram_transport_for_data_channels = true; - answerer_config.use_datagram_transport_for_data_channels_receive_only = true; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // SCTP transports should not be present, since datagram transport is used. - EXPECT_EQ(caller()->pc()->GetSctpTransport(), nullptr); - EXPECT_EQ(callee()->pc()->GetSctpTransport(), nullptr); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -// This test sets up a call between two parties with a datagram transport data -// channel. -TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelEndToEnd) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Expect that data channel created on caller side will show up for callee as - // well. - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - // Caller data channel should already exist (it created one). Callee data - // channel may not exist yet, since negotiation happens in-band, not in SDP. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -// Tests that 'zero-rtt' data channel transports (which are ready-to-send as -// soon as they're created) work correctly. -TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelZeroRtt) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - rtc_config.use_datagram_transport_for_data_channels = true; - rtc_config.enable_dtls_srtp = false; // SDES is required for media transport. - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Ensure that the callee's media transport is ready-to-send immediately. - // Note that only the callee can become writable in zero RTTs. The caller - // must wait for the callee's answer. - loopback_media_transports()->SetSecondStateAfterConnect( - webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - // Expect that data channel created on caller side will show up for callee as - // well. - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - loopback_media_transports()->SetFirstState( - webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - // Caller data channel should already exist (it created one). Callee data - // channel may not exist yet, since negotiation happens in-band, not in SDP. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Ensure data can be sent in both directions. - std::string data = "hello world"; - caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); - callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); -} - -// Ensures that when the callee closes a datagram transport data channel, the -// closing procedure results in the data channel being closed for the caller -// as well. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelCalleeCloses) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.use_datagram_transport_for_data_channels = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Create a data channel on the caller and signal it to the callee. - caller()->CreateDataChannel(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - // Data channels exist and open on both ends of the connection. - ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - - // Close the data channel on the callee side, and wait for it to reach the - // "closed" state on both sides. - callee()->data_channel()->Close(); - EXPECT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout); -} - -// Tests that datagram transport data channels can do in-band negotiation. -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelConfigSentToOtherSide) { - PeerConnectionInterface::RTCConfiguration rtc_config; - rtc_config.use_datagram_transport_for_data_channels = true; - rtc_config.enable_dtls_srtp = false; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - rtc_config, rtc_config, loopback_media_transports()->first_factory(), - loopback_media_transports()->second_factory())); - ConnectFakeSignaling(); - - // Create a data channel with a non-default configuration and signal it to the - // callee. - webrtc::DataChannelInit init; - init.id = 53; - init.maxRetransmits = 52; - caller()->CreateDataChannel("data-channel", &init); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Ensure that the data channel transport is ready. - loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); - loopback_media_transports()->FlushAsyncInvokes(); - - // Ensure that the data channel exists on the callee with the correct - // configuration. - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); - // Since "negotiate" is false, the "id" parameter is ignored. - EXPECT_NE(init.id, callee()->data_channel()->id()); - EXPECT_EQ("data-channel", callee()->data_channel()->label()); - EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits()); - EXPECT_FALSE(callee()->data_channel()->negotiated()); -} - -TEST_P(PeerConnectionIntegrationTest, - DatagramTransportDataChannelRejectedWithNoFallback) { - PeerConnectionInterface::RTCConfiguration offerer_config; - offerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - offerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - offerer_config.use_datagram_transport_for_data_channels = true; - // Disabling DTLS precludes a fallback to SCTP. - offerer_config.enable_dtls_srtp = false; - - PeerConnectionInterface::RTCConfiguration answerer_config; - answerer_config.rtcp_mux_policy = - PeerConnectionInterface::kRtcpMuxPolicyRequire; - answerer_config.bundle_policy = - PeerConnectionInterface::kBundlePolicyMaxBundle; - // Both endpoints must disable DTLS or SetRemoteDescription will fail. - answerer_config.enable_dtls_srtp = false; - - // Configure one endpoint to use datagram transport for data channels while - // the other does not. - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( - offerer_config, answerer_config, - loopback_media_transports()->first_factory(), nullptr)); - ConnectFakeSignaling(); - - // The caller offers a data channel using either datagram transport or SCTP. - caller()->CreateDataChannel(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Caller data channel should already exist (it created one). Callee data - // channel should not exist, since negotiation happens in-band, not in SDP. - EXPECT_NE(nullptr, caller()->data_channel()); - EXPECT_EQ(nullptr, callee()->data_channel()); - - // The caller's data channel should close when the datagram transport is - // rejected. - EXPECT_FALSE(caller()->data_observer()->IsOpen()); - - // Media flow should not be impacted by the failed data channel. - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} - -// Test that the ICE connection and gathering states eventually reach -// "complete". -TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - // Do normal offer/answer. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - caller()->ice_gathering_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - callee()->ice_gathering_state(), kMaxWaitForFramesMs); - // After the best candidate pair is selected and all candidates are signaled, - // the ICE connection state should reach "complete". - // TODO(deadbeef): Currently, the ICE "controlled" agent (the - // answerer/"callee" by default) only reaches "connected". When this is - // fixed, this test should be updated. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); -} - -constexpr int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP; - -// Use a mock resolver to resolve the hostname back to the original IP on both -// sides and check that the ICE connection connects. -TEST_P(PeerConnectionIntegrationTest, - IceStatesReachCompletionWithRemoteHostname) { - auto caller_resolver_factory = - std::make_unique>(); - auto callee_resolver_factory = - std::make_unique>(); - NiceMock callee_async_resolver; - NiceMock caller_async_resolver; - - // This also verifies that the injected AsyncResolverFactory is used by - // P2PTransportChannel. - EXPECT_CALL(*caller_resolver_factory, Create()) - .WillOnce(Return(&caller_async_resolver)); - webrtc::PeerConnectionDependencies caller_deps(nullptr); - caller_deps.async_resolver_factory = std::move(caller_resolver_factory); - - EXPECT_CALL(*callee_resolver_factory, Create()) - .WillOnce(Return(&callee_async_resolver)); - webrtc::PeerConnectionDependencies callee_deps(nullptr); - callee_deps.async_resolver_factory = std::move(callee_resolver_factory); - - PeerConnectionInterface::RTCConfiguration config; - config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; - config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; - - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( - config, std::move(caller_deps), config, std::move(callee_deps))); - - caller()->SetRemoteAsyncResolver(&callee_async_resolver); - callee()->SetRemoteAsyncResolver(&caller_async_resolver); - - // Enable hostname candidates with mDNS names. - caller()->SetMdnsResponder( - std::make_unique(network_thread())); - callee()->SetMdnsResponder( - std::make_unique(network_thread())); - - SetPortAllocatorFlags(kOnlyLocalPorts, kOnlyLocalPorts); - - ConnectFakeSignaling(); - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); - - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostNameHostName)); + EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( + "WebRTC.PeerConnection.CandidatePairType_UDP", + webrtc::kIceCandidatePairHostNameHostName)); } // Test that firewalling the ICE connection causes the clients to identify the @@ -5211,7 +4388,9 @@ TEST_P(PeerConnectionIntegrationTest, callee()->SetOfferAnswerOptions(options); } else { callee()->SetRemoteOfferHandler([this] { - callee()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->Stop(); + callee() + ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->StopInternal(); }); } caller()->CreateAndSetAndSignalOffer(); @@ -5232,7 +4411,7 @@ TEST_P(PeerConnectionIntegrationTest, // The caller's transceiver is stopped, so we need to add another track. auto caller_transceiver = caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); - EXPECT_TRUE(caller_transceiver->stopped()); + EXPECT_EQ(nullptr, caller_transceiver.get()); caller()->AddVideoTrack(); } callee()->AddVideoTrack(); @@ -5295,9 +4474,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto caller_video_sender = video_result.MoveValue()->sender(); callee()->SetRemoteOfferHandler([this] { ASSERT_EQ(2u, callee()->pc()->GetTransceivers().size()); - callee()->pc()->GetTransceivers()[0]->SetDirection( + callee()->pc()->GetTransceivers()[0]->SetDirectionWithError( RtpTransceiverDirection::kSendRecv); - callee()->pc()->GetTransceivers()[1]->SetDirection( + callee()->pc()->GetTransceivers()[1]->SetDirectionWithError( RtpTransceiverDirection::kSendRecv); }); caller()->CreateAndSetAndSignalOffer(); @@ -5666,10 +4845,10 @@ TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) { auto ice_transport_factory = std::make_unique(); EXPECT_CALL(*ice_transport_factory, RecordIceTransportCreated()).Times(1); dependencies.ice_transport_factory = std::move(ice_transport_factory); - auto wrapper = CreatePeerConnectionWrapper( - "Caller", nullptr, &default_config, std::move(dependencies), nullptr, - nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + auto wrapper = CreatePeerConnectionWrapper("Caller", nullptr, &default_config, + std::move(dependencies), nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); ASSERT_TRUE(wrapper); wrapper->CreateDataChannel(); rtc::scoped_refptr observer( @@ -6098,6 +5277,23 @@ TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) { callee()->pc()->SetConfiguration(callee_config); EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE, callee()->last_candidate_gathered().type(), kDefaultTimeout); + + // Create an offer and verify that it does not contain an ICE restart (i.e new + // ice credentials). + std::string caller_ufrag_pre_offer = caller() + ->pc() + ->local_description() + ->description() + ->transport_infos()[0] + .description.ice_ufrag; + caller()->CreateAndSetAndSignalOffer(); + std::string caller_ufrag_post_offer = caller() + ->pc() + ->local_description() + ->description() + ->transport_infos()[0] + .description.ice_ufrag; + EXPECT_EQ(caller_ufrag_pre_offer, caller_ufrag_post_offer); } TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) { @@ -6137,6 +5333,35 @@ TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) { EXPECT_NE(caller()->error_event().address, ""); } +TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) { + webrtc::PeerConnectionInterface::IceServer ice_server; + ice_server.urls.push_back("turn:127.0.0.1:3478?transport=tcp"); + ice_server.username = "test"; + ice_server.password = "test"; + + PeerConnectionInterface::RTCConfiguration caller_config; + caller_config.servers.push_back(ice_server); + caller_config.type = webrtc::PeerConnectionInterface::kRelay; + caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; + + PeerConnectionInterface::RTCConfiguration callee_config; + callee_config.servers.push_back(ice_server); + callee_config.type = webrtc::PeerConnectionInterface::kRelay; + callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; + + ASSERT_TRUE( + CreatePeerConnectionWrappersWithConfig(caller_config, callee_config)); + + // Do normal offer/answer and wait for ICE to complete. + ConnectFakeSignaling(); + caller()->AddAudioVideoTracks(); + callee()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + EXPECT_EQ_WAIT(701, caller()->error_event().error_code, kDefaultTimeout); + EXPECT_EQ(caller()->error_event().address, ""); +} + TEST_F(PeerConnectionIntegrationTestUnifiedPlan, AudioKeepsFlowingAfterImplicitRollback) { PeerConnectionInterface::RTCConfiguration config; @@ -6192,6 +5417,49 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, PeerConnectionInterface::kHaveRemoteOffer)); } +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + H264FmtpSpsPpsIdrInKeyframeParameterUsage) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddVideoTrack(); + callee()->AddVideoTrack(); + auto munger = [](cricket::SessionDescription* desc) { + cricket::VideoContentDescription* video = + GetFirstVideoContentDescription(desc); + auto codecs = video->codecs(); + for (auto&& codec : codecs) { + if (codec.name == "H264") { + std::string value; + // The parameter is not supposed to be present in SDP by default. + EXPECT_FALSE( + codec.GetParam(cricket::kH264FmtpSpsPpsIdrInKeyframe, &value)); + codec.SetParam(std::string(cricket::kH264FmtpSpsPpsIdrInKeyframe), + std::string("")); + } + } + video->set_codecs(codecs); + }; + // Munge local offer for SLD. + caller()->SetGeneratedSdpMunger(munger); + // Munge remote answer for SRD. + caller()->SetReceivedSdpMunger(munger); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + // Observe that after munging the parameter is present in generated SDP. + caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) { + cricket::VideoContentDescription* video = + GetFirstVideoContentDescription(desc); + for (auto&& codec : video->codecs()) { + if (codec.name == "H264") { + std::string value; + EXPECT_TRUE( + codec.GetParam(cricket::kH264FmtpSpsPpsIdrInKeyframe, &value)); + } + } + }); + caller()->CreateOfferAndWait(); +} + INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest, PeerConnectionIntegrationTest, Values(SdpSemantics::kPlanB, @@ -6372,7 +5640,7 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, ASSERT_TRUE(ExpectNewFrames(media_expectations)); } - audio_transceiver->Stop(); + audio_transceiver->StopInternal(); caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); caller()->CreateAndSetAndSignalOffer(); @@ -6384,6 +5652,104 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, } } +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + StopTransceiverRemovesDtlsTransports) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + auto audio_transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); + ASSERT_TRUE(audio_transceiver_or_error.ok()); + auto audio_transceiver = audio_transceiver_or_error.MoveValue(); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + + audio_transceiver->StopStandard(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_EQ(0U, caller()->pc()->GetTransceivers().size()); + EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew, + caller()->pc()->ice_gathering_state()); + EXPECT_THAT(caller()->ice_gathering_state_history(), + ElementsAre(PeerConnectionInterface::kIceGatheringGathering, + PeerConnectionInterface::kIceGatheringComplete, + PeerConnectionInterface::kIceGatheringNew)); +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + StopTransceiverStopsAndRemovesTransceivers) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + auto audio_transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); + ASSERT_TRUE(audio_transceiver_or_error.ok()); + auto caller_transceiver = audio_transceiver_or_error.MoveValue(); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + caller_transceiver->StopStandard(); + + auto callee_transceiver = callee()->pc()->GetTransceivers()[0]; + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + EXPECT_EQ(0U, caller()->pc()->GetTransceivers().size()); + EXPECT_EQ(0U, callee()->pc()->GetTransceivers().size()); + EXPECT_EQ(0U, caller()->pc()->GetSenders().size()); + EXPECT_EQ(0U, callee()->pc()->GetSenders().size()); + EXPECT_EQ(0U, caller()->pc()->GetReceivers().size()); + EXPECT_EQ(0U, callee()->pc()->GetReceivers().size()); + EXPECT_TRUE(caller_transceiver->stopped()); + EXPECT_TRUE(callee_transceiver->stopped()); +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + StopTransceiverEndsIncomingAudioTrack) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + auto audio_transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); + ASSERT_TRUE(audio_transceiver_or_error.ok()); + auto audio_transceiver = audio_transceiver_or_error.MoveValue(); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + auto caller_track = audio_transceiver->receiver()->track(); + auto callee_track = callee()->pc()->GetReceivers()[0]->track(); + audio_transceiver->StopStandard(); + EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, + caller_track->state()); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, + callee_track->state()); +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + StopTransceiverEndsIncomingVideoTrack) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + auto audio_transceiver_or_error = + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); + ASSERT_TRUE(audio_transceiver_or_error.ok()); + auto audio_transceiver = audio_transceiver_or_error.MoveValue(); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + auto caller_track = audio_transceiver->receiver()->track(); + auto callee_track = callee()->pc()->GetReceivers()[0]->track(); + audio_transceiver->StopStandard(); + EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, + caller_track->state()); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, + callee_track->state()); +} + #ifdef HAVE_SCTP TEST_F(PeerConnectionIntegrationTestUnifiedPlan, diff --git a/pc/peer_connection_interface_unittest.cc b/pc/peer_connection_interface_unittest.cc index 8db8751b64..abedf48688 100644 --- a/pc/peer_connection_interface_unittest.cc +++ b/pc/peer_connection_interface_unittest.cc @@ -43,6 +43,7 @@ #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_codecs/builtin_video_encoder_factory.h" #include "api/video_codecs/video_decoder_factory.h" @@ -627,7 +628,7 @@ class MockTrackObserver : public ObserverInterface { } } - MOCK_METHOD0(OnChanged, void()); + MOCK_METHOD(void, OnChanged, (), (override)); private: NotifierInterface* notifier_; @@ -646,12 +647,14 @@ class PeerConnectionFactoryForTest : public webrtc::PeerConnectionFactory { dependencies.network_thread = rtc::Thread::Current(); dependencies.signaling_thread = rtc::Thread::Current(); dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + dependencies.trials = std::make_unique(); cricket::MediaEngineDependencies media_deps; media_deps.task_queue_factory = dependencies.task_queue_factory.get(); // Use fake audio device module since we're only testing the interface // level, and using a real one could make tests flaky when run in parallel. media_deps.adm = FakeAudioCaptureModule::Create(); SetMediaEngineDefaults(&media_deps); + media_deps.trials = dependencies.trials.get(); dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); dependencies.call_factory = webrtc::CreateCallFactory(); @@ -696,7 +699,6 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { ASSERT_TRUE(pc_factory_); pc_factory_for_test_ = PeerConnectionFactoryForTest::CreatePeerConnectionFactoryForTest(); - pc_factory_for_test_->Initialize(); } void CreatePeerConnection() { @@ -1421,15 +1423,11 @@ TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterSetConfiguration) { PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration(); config.type = PeerConnectionInterface::kRelay; - config.use_datagram_transport = true; - config.use_datagram_transport_for_data_channels = true; EXPECT_TRUE(pc_->SetConfiguration(config).ok()); PeerConnectionInterface::RTCConfiguration returned_config = pc_->GetConfiguration(); EXPECT_EQ(PeerConnectionInterface::kRelay, returned_config.type); - EXPECT_TRUE(returned_config.use_datagram_transport); - EXPECT_TRUE(returned_config.use_datagram_transport_for_data_channels); } TEST_P(PeerConnectionInterfaceTest, SetConfigurationFailsAfterClose) { @@ -2672,23 +2670,24 @@ TEST_P(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) { EXPECT_EQ(1u, pc_->local_streams()->count()); EXPECT_EQ(1u, pc_->remote_streams()->count()); } else { - // Verify that the RtpTransceivers are still present but all stopped. + // Verify that the RtpTransceivers are still returned. EXPECT_EQ(2u, pc_->GetTransceivers().size()); - for (const auto& transceiver : pc_->GetTransceivers()) { - EXPECT_TRUE(transceiver->stopped()); - } } auto audio_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_AUDIO); - ASSERT_TRUE(audio_receiver); auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO); - ASSERT_TRUE(video_receiver); - - // Track state may be updated asynchronously. - EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, - audio_receiver->track()->state(), kTimeout); - EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, - video_receiver->track()->state(), kTimeout); + if (sdp_semantics_ == SdpSemantics::kPlanB) { + ASSERT_TRUE(audio_receiver); + ASSERT_TRUE(video_receiver); + // Track state may be updated asynchronously. + EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, + audio_receiver->track()->state(), kTimeout); + EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, + video_receiver->track()->state(), kTimeout); + } else { + ASSERT_FALSE(audio_receiver); + ASSERT_FALSE(video_receiver); + } } // Test that PeerConnection methods fails gracefully after @@ -3485,7 +3484,10 @@ TEST_P(PeerConnectionInterfaceTest, OffersAndAnswersHaveTrickleIceOption) { EXPECT_TRUE(desc->transport_infos()[1].description.HasOption("trickle")); // Apply the offer as a remote description, then create an answer. + EXPECT_FALSE(pc_->can_trickle_ice_candidates()); EXPECT_TRUE(DoSetRemoteDescription(std::move(offer))); + ASSERT_TRUE(pc_->can_trickle_ice_candidates()); + EXPECT_TRUE(*(pc_->can_trickle_ice_candidates())); std::unique_ptr answer; ASSERT_TRUE(DoCreateAnswer(&answer, &options)); desc = answer->description(); @@ -3616,44 +3618,44 @@ TEST_P(PeerConnectionInterfaceTest, TEST_P(PeerConnectionInterfaceTest, SetBitrateWithoutMinSucceeds) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; - bitrate.current_bitrate_bps = 100000; + BitrateSettings bitrate; + bitrate.start_bitrate_bps = 100000; EXPECT_TRUE(pc_->SetBitrate(bitrate).ok()); } TEST_P(PeerConnectionInterfaceTest, SetBitrateNegativeMinFails) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; + BitrateSettings bitrate; bitrate.min_bitrate_bps = -1; EXPECT_FALSE(pc_->SetBitrate(bitrate).ok()); } TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentLessThanMinFails) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; + BitrateSettings bitrate; bitrate.min_bitrate_bps = 5; - bitrate.current_bitrate_bps = 3; + bitrate.start_bitrate_bps = 3; EXPECT_FALSE(pc_->SetBitrate(bitrate).ok()); } TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentNegativeFails) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; - bitrate.current_bitrate_bps = -1; + BitrateSettings bitrate; + bitrate.start_bitrate_bps = -1; EXPECT_FALSE(pc_->SetBitrate(bitrate).ok()); } TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanCurrentFails) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; - bitrate.current_bitrate_bps = 10; + BitrateSettings bitrate; + bitrate.start_bitrate_bps = 10; bitrate.max_bitrate_bps = 8; EXPECT_FALSE(pc_->SetBitrate(bitrate).ok()); } TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanMinFails) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; + BitrateSettings bitrate; bitrate.min_bitrate_bps = 10; bitrate.max_bitrate_bps = 8; EXPECT_FALSE(pc_->SetBitrate(bitrate).ok()); @@ -3661,7 +3663,7 @@ TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanMinFails) { TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxNegativeFails) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; + BitrateSettings bitrate; bitrate.max_bitrate_bps = -1; EXPECT_FALSE(pc_->SetBitrate(bitrate).ok()); } @@ -3672,8 +3674,8 @@ TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxNegativeFails) { // be clamped succeeds. TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentLessThanImplicitMin) { CreatePeerConnection(); - PeerConnectionInterface::BitrateParameters bitrate; - bitrate.current_bitrate_bps = 1; + BitrateSettings bitrate; + bitrate.start_bitrate_bps = 1; EXPECT_TRUE(pc_->SetBitrate(bitrate).ok()); } @@ -3920,7 +3922,6 @@ class PeerConnectionMediaConfigTest : public ::testing::Test { protected: void SetUp() override { pcf_ = PeerConnectionFactoryForTest::CreatePeerConnectionFactoryForTest(); - pcf_->Initialize(); } const cricket::MediaConfig TestCreatePeerConnection( const RTCConfiguration& config) { diff --git a/pc/peer_connection_internal.h b/pc/peer_connection_internal.h index a51ba4b8f6..029febab2d 100644 --- a/pc/peer_connection_internal.h +++ b/pc/peer_connection_internal.h @@ -19,8 +19,9 @@ #include "api/peer_connection_interface.h" #include "call/call.h" -#include "pc/data_channel.h" +#include "pc/rtp_data_channel.h" #include "pc/rtp_transceiver.h" +#include "pc/sctp_data_channel.h" namespace webrtc { @@ -29,7 +30,6 @@ class PeerConnectionInternal : public PeerConnectionInterface { public: virtual rtc::Thread* network_thread() const = 0; virtual rtc::Thread* worker_thread() const = 0; - virtual rtc::Thread* signaling_thread() const = 0; // The SDP session ID as defined by RFC 3264. virtual std::string session_id() const = 0; @@ -41,15 +41,19 @@ class PeerConnectionInternal : public PeerConnectionInterface { rtc::scoped_refptr>> GetTransceiversInternal() const = 0; - virtual sigslot::signal1& SignalDataChannelCreated() = 0; + virtual sigslot::signal1& SignalRtpDataChannelCreated() = 0; + virtual sigslot::signal1& + SignalSctpDataChannelCreated() = 0; // Only valid when using deprecated RTP data channels. virtual cricket::RtpDataChannel* rtp_data_channel() const = 0; - virtual std::vector> sctp_data_channels() - const = 0; + // Call on the network thread to fetch stats for all the data channels. + // TODO(tommi): Make pure virtual after downstream updates. + virtual std::vector GetDataChannelStats() const { + return {}; + } - virtual absl::optional sctp_content_name() const = 0; virtual absl::optional sctp_transport_name() const = 0; virtual cricket::CandidateStatsList GetPooledCandidateStats() const = 0; diff --git a/pc/peer_connection_jsep_unittest.cc b/pc/peer_connection_jsep_unittest.cc index 3186e8f39b..c3e093617b 100644 --- a/pc/peer_connection_jsep_unittest.cc +++ b/pc/peer_connection_jsep_unittest.cc @@ -11,6 +11,7 @@ #include #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "media/engine/webrtc_media_engine.h" #include "media/engine/webrtc_media_engine_defaults.h" #include "pc/media_session.h" @@ -21,10 +22,10 @@ #include "pc/test/android_test_initializer.h" #endif #include "pc/test/fake_audio_capture_module.h" -#include "pc/test/fake_sctp_transport.h" #include "rtc_base/gunit.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" +#include "test/pc/sctp/fake_sctp_transport.h" // This file contains tests that ensure the PeerConnection's implementation of // CreateOffer/CreateAnswer/SetLocalDescription/SetRemoteDescription conform @@ -41,30 +42,23 @@ using ::testing::ElementsAre; using ::testing::UnorderedElementsAre; using ::testing::Values; -class PeerConnectionFactoryForJsepTest : public PeerConnectionFactory { - public: - PeerConnectionFactoryForJsepTest() - : PeerConnectionFactory([] { - PeerConnectionFactoryDependencies dependencies; - dependencies.worker_thread = rtc::Thread::Current(); - dependencies.network_thread = rtc::Thread::Current(); - dependencies.signaling_thread = rtc::Thread::Current(); - dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = dependencies.task_queue_factory.get(); - media_deps.adm = FakeAudioCaptureModule::Create(); - SetMediaEngineDefaults(&media_deps); - dependencies.media_engine = - cricket::CreateMediaEngine(std::move(media_deps)); - dependencies.call_factory = CreateCallFactory(); - return dependencies; - }()) {} - - std::unique_ptr - CreateSctpTransportInternalFactory() { - return std::make_unique(); - } -}; +PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() { + PeerConnectionFactoryDependencies dependencies; + dependencies.worker_thread = rtc::Thread::Current(); + dependencies.network_thread = rtc::Thread::Current(); + dependencies.signaling_thread = rtc::Thread::Current(); + dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + dependencies.trials = std::make_unique(); + cricket::MediaEngineDependencies media_deps; + media_deps.task_queue_factory = dependencies.task_queue_factory.get(); + media_deps.adm = FakeAudioCaptureModule::Create(); + media_deps.trials = dependencies.trials.get(); + SetMediaEngineDefaults(&media_deps); + dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); + dependencies.call_factory = CreateCallFactory(); + dependencies.sctp_factory = std::make_unique(); + return dependencies; +} class PeerConnectionJsepTest : public ::testing::Test { protected: @@ -84,9 +78,9 @@ class PeerConnectionJsepTest : public ::testing::Test { } WrapperPtr CreatePeerConnection(const RTCConfiguration& config) { - rtc::scoped_refptr pc_factory( - new rtc::RefCountedObject()); - RTC_CHECK(pc_factory->Initialize()); + rtc::scoped_refptr pc_factory = + CreateModularPeerConnectionFactory( + CreatePeerConnectionFactoryDependencies()); auto observer = std::make_unique(); auto pc = pc_factory->CreatePeerConnection(config, nullptr, nullptr, observer.get()); @@ -212,7 +206,7 @@ TEST_F(PeerConnectionJsepTest, StoppedTransceiverHasNoMediaSectionInInitialOffer) { auto caller = CreatePeerConnection(); auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - transceiver->Stop(); + transceiver->StopInternal(); auto offer = caller->CreateOffer(); EXPECT_EQ(0u, offer->description()->contents().size()); @@ -300,7 +294,7 @@ TEST_F(PeerConnectionJsepTest, auto caller = CreatePeerConnection(); caller->AddAudioTrack("a"); auto caller_audio = caller->pc()->GetTransceivers()[0]; - caller_audio->SetDirection(RtpTransceiverDirection::kSendOnly); + caller_audio->SetDirectionWithError(RtpTransceiverDirection::kSendOnly); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); @@ -358,16 +352,18 @@ TEST_F(PeerConnectionJsepTest, SetRemoteOfferDoesNotReuseStoppedTransceiver) { caller->AddAudioTrack("a"); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); - callee->pc()->GetTransceivers()[0]->Stop(); + callee->pc()->GetTransceivers()[0]->StopInternal(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto transceivers = callee->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - EXPECT_EQ(absl::nullopt, transceivers[0]->mid()); - EXPECT_TRUE(transceivers[0]->stopped()); - EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[1]->mid()); - EXPECT_FALSE(transceivers[1]->stopped()); + // The stopped transceiver is removed in SetLocalDescription(answer) + ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer())); + transceivers = callee->pc()->GetTransceivers(); + ASSERT_EQ(1u, transceivers.size()); + EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[0]->mid()); + EXPECT_FALSE(transceivers[0]->stopped()); } // Test that audio and video transceivers created on the remote side with @@ -432,7 +428,7 @@ TEST_F(PeerConnectionJsepTest, CreateAnswerRejectsStoppedTransceiver) { ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - callee->pc()->GetTransceivers()[0]->Stop(); + callee->pc()->GetTransceivers()[0]->StopInternal(); auto answer = callee->CreateAnswer(); auto contents = answer->description()->contents(); @@ -469,7 +465,7 @@ TEST_F(PeerConnectionJsepTest, CreateAnswerNegotiatesDirection) { TEST_F(PeerConnectionJsepTest, SetLocalAnswerUpdatesCurrentDirection) { auto caller = CreatePeerConnection(); auto caller_audio = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - caller_audio->SetDirection(RtpTransceiverDirection::kRecvOnly); + caller_audio->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); @@ -494,7 +490,7 @@ TEST_F(PeerConnectionJsepTest, SetRemoteAnswerUpdatesCurrentDirection) { auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); auto callee_audio = callee->pc()->GetTransceivers()[0]; - callee_audio->SetDirection(RtpTransceiverDirection::kSendOnly); + callee_audio->SetDirectionWithError(RtpTransceiverDirection::kSendOnly); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); ASSERT_TRUE( @@ -518,7 +514,7 @@ TEST_F(PeerConnectionJsepTest, SettingTransceiverInactiveDoesNotStopIt) { caller->AddAudioTrack("a"); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); - callee->pc()->GetTransceivers()[0]->SetDirection( + callee->pc()->GetTransceivers()[0]->SetDirectionWithError( RtpTransceiverDirection::kInactive); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -543,7 +539,7 @@ TEST_F(PeerConnectionJsepTest, caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); ASSERT_TRUE(transceiver->mid()); - transceiver->Stop(); + transceiver->StopInternal(); auto reoffer = caller->CreateOffer(); auto contents = reoffer->description()->contents(); @@ -564,13 +560,15 @@ TEST_F(PeerConnectionJsepTest, ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - transceiver->Stop(); + transceiver->StopInternal(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto transceivers = callee->pc()->GetTransceivers(); - EXPECT_TRUE(transceivers[0]->stopped()); - EXPECT_TRUE(transceivers[0]->mid()); + EXPECT_EQ(1u, transceivers.size()); + ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer())); + transceivers = callee->pc()->GetTransceivers(); + EXPECT_EQ(0u, transceivers.size()); } // Test that CreateOffer will only generate a recycled media section if the @@ -586,7 +584,7 @@ TEST_F(PeerConnectionJsepTest, caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); auto second_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - first_transceiver->Stop(); + first_transceiver->StopInternal(); auto reoffer = caller->CreateOffer(); auto contents = reoffer->description()->contents(); @@ -605,14 +603,17 @@ TEST_F(PeerConnectionJsepTest, auto callee = CreatePeerConnection(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - callee->pc()->GetTransceivers()[0]->Stop(); + std::string first_mid = *first_transceiver->mid(); + ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); + callee->pc()->GetTransceivers()[0]->StopInternal(); + ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); EXPECT_TRUE(first_transceiver->stopped()); - // First transceivers aren't dissociated yet. - ASSERT_NE(absl::nullopt, first_transceiver->mid()); - std::string first_mid = *first_transceiver->mid(); - EXPECT_EQ(first_mid, callee->pc()->GetTransceivers()[0]->mid()); + // First transceivers are dissociated on caller side. + ASSERT_EQ(absl::nullopt, first_transceiver->mid()); + // They are disassociated on callee side. + ASSERT_EQ(0u, callee->pc()->GetTransceivers().size()); // New offer exchange with new transceivers that recycles the m section // correctly. @@ -630,10 +631,11 @@ TEST_F(PeerConnectionJsepTest, ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); EXPECT_EQ(absl::nullopt, first_transceiver->mid()); - EXPECT_EQ(second_mid, caller->pc()->GetTransceivers()[1]->mid()); + ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); + EXPECT_EQ(second_mid, caller->pc()->GetTransceivers()[0]->mid()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); - EXPECT_EQ(absl::nullopt, callee->pc()->GetTransceivers()[0]->mid()); - EXPECT_EQ(second_mid, callee->pc()->GetTransceivers()[1]->mid()); + ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); + EXPECT_EQ(second_mid, callee->pc()->GetTransceivers()[0]->mid()); // The new answer should also recycle the m section correctly. auto answer = callee->CreateAnswer(); @@ -647,13 +649,11 @@ TEST_F(PeerConnectionJsepTest, callee->SetLocalDescription(CloneSessionDescription(answer.get()))); ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); auto caller_transceivers = caller->pc()->GetTransceivers(); - ASSERT_EQ(2u, caller_transceivers.size()); - EXPECT_EQ(absl::nullopt, caller_transceivers[0]->mid()); - EXPECT_EQ(second_mid, caller_transceivers[1]->mid()); + ASSERT_EQ(1u, caller_transceivers.size()); + EXPECT_EQ(second_mid, caller_transceivers[0]->mid()); auto callee_transceivers = callee->pc()->GetTransceivers(); - ASSERT_EQ(2u, callee_transceivers.size()); - EXPECT_EQ(absl::nullopt, callee_transceivers[0]->mid()); - EXPECT_EQ(second_mid, callee_transceivers[1]->mid()); + ASSERT_EQ(1u, callee_transceivers.size()); + EXPECT_EQ(second_mid, callee_transceivers[0]->mid()); } // Test that creating/setting a local offer that recycles an m= section is @@ -664,7 +664,7 @@ TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - first_transceiver->Stop(); + first_transceiver->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); caller->AddAudioTrack("audio2"); @@ -675,7 +675,8 @@ TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { ASSERT_EQ(1u, offer_contents.size()); EXPECT_FALSE(offer_contents[0].rejected); ASSERT_TRUE(caller->SetLocalDescription(std::move(offer))); - EXPECT_FALSE(caller->pc()->GetTransceivers()[1]->stopped()); + ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); + EXPECT_FALSE(caller->pc()->GetTransceivers()[0]->stopped()); std::string second_mid = offer_contents[0].name; // Create another new offer and set the local description again without the @@ -690,10 +691,9 @@ TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { ASSERT_TRUE(caller->SetLocalDescription(std::move(second_offer))); // Make sure that the caller's transceivers are associated correctly. auto caller_transceivers = caller->pc()->GetTransceivers(); - ASSERT_EQ(2u, caller_transceivers.size()); - EXPECT_EQ(absl::nullopt, caller_transceivers[0]->mid()); - EXPECT_EQ(second_mid, caller_transceivers[1]->mid()); - EXPECT_FALSE(caller_transceivers[1]->stopped()); + ASSERT_EQ(1u, caller_transceivers.size()); + EXPECT_EQ(second_mid, caller_transceivers[0]->mid()); + EXPECT_FALSE(caller_transceivers[0]->stopped()); } // Test that the offer/answer and transceivers for both the caller and callee @@ -729,7 +729,7 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); std::string first_mid = *first_transceiver->mid(); - first_transceiver->Stop(); + first_transceiver->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -756,11 +756,9 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) { // create a new transceiver for the media section. ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto callee_transceivers = callee->pc()->GetTransceivers(); - ASSERT_EQ(2u, callee_transceivers.size()); - EXPECT_EQ(absl::nullopt, callee_transceivers[0]->mid()); - EXPECT_EQ(first_type_, callee_transceivers[0]->media_type()); - EXPECT_EQ(second_mid, callee_transceivers[1]->mid()); - EXPECT_EQ(second_type_, callee_transceivers[1]->media_type()); + ASSERT_EQ(1u, callee_transceivers.size()); + EXPECT_EQ(second_mid, callee_transceivers[0]->mid()); + EXPECT_EQ(second_type_, callee_transceivers[0]->media_type()); // The answer should have only one media section for the new transceiver. auto answer = callee->CreateAnswer(); @@ -777,8 +775,8 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) { // Setting the remote answer should succeed and not create any new // transceivers. ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); - ASSERT_EQ(2u, caller->pc()->GetTransceivers().size()); - ASSERT_EQ(2u, callee->pc()->GetTransceivers().size()); + ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); + ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); } // Test that recycling works properly when a new transceiver recycles an m= @@ -793,7 +791,7 @@ TEST_P(RecycleMediaSectionTest, CurrentRemoteOnlyRejected) { std::string first_mid = *caller_first_transceiver->mid(); ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); auto callee_first_transceiver = callee->pc()->GetTransceivers()[0]; - callee_first_transceiver->Stop(); + callee_first_transceiver->StopInternal(); // The answer will have a rejected m= section. ASSERT_TRUE( @@ -821,11 +819,9 @@ TEST_P(RecycleMediaSectionTest, CurrentRemoteOnlyRejected) { // create a new transceiver for the media section. ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto callee_transceivers = callee->pc()->GetTransceivers(); - ASSERT_EQ(2u, callee_transceivers.size()); - EXPECT_EQ(absl::nullopt, callee_transceivers[0]->mid()); - EXPECT_EQ(first_type_, callee_transceivers[0]->media_type()); - EXPECT_EQ(second_mid, callee_transceivers[1]->mid()); - EXPECT_EQ(second_type_, callee_transceivers[1]->media_type()); + ASSERT_EQ(1u, callee_transceivers.size()); + EXPECT_EQ(second_mid, callee_transceivers[0]->mid()); + EXPECT_EQ(second_type_, callee_transceivers[0]->media_type()); // The answer should have only one media section for the new transceiver. auto answer = callee->CreateAnswer(); @@ -842,8 +838,8 @@ TEST_P(RecycleMediaSectionTest, CurrentRemoteOnlyRejected) { // Setting the remote answer should succeed and not create any new // transceivers. ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); - ASSERT_EQ(2u, caller->pc()->GetTransceivers().size()); - ASSERT_EQ(2u, callee->pc()->GetTransceivers().size()); + ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); + ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); } // Test that recycling works properly when a new transceiver recycles an m= @@ -858,7 +854,7 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalOnlyRejected) { std::string first_mid = *caller_first_transceiver->mid(); ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); auto callee_first_transceiver = callee->pc()->GetTransceivers()[0]; - callee_first_transceiver->Stop(); + callee_first_transceiver->StopInternal(); // The answer will have a rejected m= section. ASSERT_TRUE( @@ -886,11 +882,9 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalOnlyRejected) { // create a new transceiver for the media section. ASSERT_TRUE(caller->SetRemoteDescription(std::move(offer))); auto caller_transceivers = caller->pc()->GetTransceivers(); - ASSERT_EQ(2u, caller_transceivers.size()); - EXPECT_EQ(absl::nullopt, caller_transceivers[0]->mid()); - EXPECT_EQ(first_type_, caller_transceivers[0]->media_type()); - EXPECT_EQ(second_mid, caller_transceivers[1]->mid()); - EXPECT_EQ(second_type_, caller_transceivers[1]->media_type()); + ASSERT_EQ(1u, caller_transceivers.size()); + EXPECT_EQ(second_mid, caller_transceivers[0]->mid()); + EXPECT_EQ(second_type_, caller_transceivers[0]->media_type()); // The answer should have only one media section for the new transceiver. auto answer = caller->CreateAnswer(); @@ -907,8 +901,8 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalOnlyRejected) { // Setting the remote answer should succeed and not create any new // transceivers. ASSERT_TRUE(callee->SetRemoteDescription(std::move(answer))); - ASSERT_EQ(2u, callee->pc()->GetTransceivers().size()); - ASSERT_EQ(2u, caller->pc()->GetTransceivers().size()); + ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); + ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); } // Test that a m= section is *not* recycled if the media section is only @@ -921,7 +915,7 @@ TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNoRemote) { ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); std::string first_mid = *caller_first_transceiver->mid(); - caller_first_transceiver->Stop(); + caller_first_transceiver->StopInternal(); // The reoffer will have a rejected m= section. ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); @@ -959,7 +953,7 @@ TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNotRejectedRemote) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); std::string first_mid = *caller_first_transceiver->mid(); - caller_first_transceiver->Stop(); + caller_first_transceiver->StopInternal(); // The reoffer will have a rejected m= section. ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); @@ -999,7 +993,7 @@ TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNoLocal) { ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); auto callee_first_transceiver = callee->pc()->GetTransceivers()[0]; std::string first_mid = *callee_first_transceiver->mid(); - caller_first_transceiver->Stop(); + caller_first_transceiver->StopInternal(); // The reoffer will have a rejected m= section. ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -1036,7 +1030,7 @@ TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNotRejectedLocal) { ASSERT_EQ(1u, callee->pc()->GetTransceivers().size()); auto callee_first_transceiver = callee->pc()->GetTransceivers()[0]; std::string first_mid = *callee_first_transceiver->mid(); - caller_first_transceiver->Stop(); + caller_first_transceiver->StopInternal(); // The reoffer will have a rejected m= section. ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -1080,7 +1074,7 @@ TEST_F(PeerConnectionJsepTest, DataChannelDoesNotRecycleMediaSection) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - transceiver->Stop(); + transceiver->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -1367,7 +1361,7 @@ TEST_F(PeerConnectionJsepTest, IncludeMsidEvenIfDirectionHasChanged) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->pc()->GetTransceivers()[0]->SetDirection( + caller->pc()->GetTransceivers()[0]->SetDirectionWithError( RtpTransceiverDirection::kInactive); // The transceiver direction on both sides will turn to inactive. @@ -1395,7 +1389,7 @@ TEST_F(PeerConnectionJsepTest, RemoveMsidIfTransceiverStopped) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - transceiver->Stop(); + transceiver->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -1552,8 +1546,9 @@ TEST_F(PeerConnectionJsepTest, CurrentDirectionResetWhenRtpTransceiverStopped) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); ASSERT_TRUE(transceiver->current_direction()); - transceiver->Stop(); - EXPECT_FALSE(transceiver->current_direction()); + transceiver->StopInternal(); + EXPECT_EQ(transceiver->current_direction(), + RtpTransceiverDirection::kStopped); } // Test that you can't set an answer on a PeerConnection before setting the @@ -1797,7 +1792,8 @@ TEST_F(PeerConnectionJsepTest, RollbackImplicitly) { EXPECT_EQ(callee->signaling_state(), PeerConnectionInterface::kHaveRemoteOffer); EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); - EXPECT_FALSE(callee->observer()->negotiation_needed()); + EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(callee->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionJsepTest, RollbackImplicitlyNegotatiationNotNeeded) { @@ -1809,13 +1805,15 @@ TEST_F(PeerConnectionJsepTest, RollbackImplicitlyNegotatiationNotNeeded) { caller->AddAudioTrack("a"); callee->AddAudioTrack("b"); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); - callee->observer()->clear_negotiation_needed(); + callee->observer()->clear_legacy_renegotiation_needed(); + callee->observer()->clear_latest_negotiation_needed_event(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_EQ(callee->signaling_state(), PeerConnectionInterface::kHaveRemoteOffer); EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); // No negotiation needed as track got attached in the answer. - EXPECT_FALSE(callee->observer()->negotiation_needed()); + EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(callee->observer()->has_negotiation_needed_event()); EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u); } @@ -1827,13 +1825,16 @@ TEST_F(PeerConnectionJsepTest, RollbackImplicitlyAndNegotiationNeeded) { auto callee = CreatePeerConnection(config); callee->AddAudioTrack("a"); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); - callee->observer()->clear_negotiation_needed(); + callee->observer()->clear_legacy_renegotiation_needed(); + callee->observer()->clear_latest_negotiation_needed_event(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_EQ(callee->signaling_state(), PeerConnectionInterface::kHaveRemoteOffer); - EXPECT_FALSE(callee->observer()->negotiation_needed()); + EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(callee->observer()->has_negotiation_needed_event()); EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); - EXPECT_TRUE(callee->observer()->negotiation_needed()); + EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u); } @@ -1944,7 +1945,8 @@ TEST_F(PeerConnectionJsepTest, RollbackHasNoEffectOnStableTransceivers) { EXPECT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); // In stable don't add or remove anything. - callee->observer()->clear_negotiation_needed(); + callee->observer()->clear_legacy_renegotiation_needed(); + callee->observer()->clear_latest_negotiation_needed_event(); size_t transceiver_count = callee->pc()->GetTransceivers().size(); auto mid_0 = callee->pc()->GetTransceivers()[0]->mid(); auto mid_1 = callee->pc()->GetTransceivers()[1]->mid(); @@ -1954,7 +1956,8 @@ TEST_F(PeerConnectionJsepTest, RollbackHasNoEffectOnStableTransceivers) { EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), mid_0); EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), mid_1); EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u); - EXPECT_FALSE(callee->observer()->negotiation_needed()); + EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(callee->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionJsepTest, ImplicitlyRollbackTransceiversWithSameMids) { @@ -2039,7 +2042,7 @@ TEST_F(PeerConnectionJsepTest, RollbackLocalDirectionChange) { EXPECT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); callee->AddAudioTrack("a"); - callee->pc()->GetTransceivers()[0]->SetDirection( + callee->pc()->GetTransceivers()[0]->SetDirectionWithError( RtpTransceiverDirection::kSendOnly); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); @@ -2063,7 +2066,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRemoteDirectionChange) { EXPECT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); // In stable make remote audio receive only. - caller_transceiver->SetDirection(RtpTransceiverDirection::kRecvOnly); + caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); // The direction attribute is not modified by the offer. @@ -2089,9 +2092,11 @@ TEST_F(PeerConnectionJsepTest, RollbackAfterMultipleSLD) { EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); - callee->observer()->clear_negotiation_needed(); + callee->observer()->clear_legacy_renegotiation_needed(); + callee->observer()->clear_latest_negotiation_needed_event(); EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); - EXPECT_TRUE(callee->observer()->negotiation_needed()); + EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u); EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), absl::nullopt); @@ -2129,4 +2134,86 @@ TEST_F(PeerConnectionJsepTest, RollbackMultipleStreamChanges) { "id_1"); } +TEST_F(PeerConnectionJsepTest, DataChannelImplicitRollback) { + RTCConfiguration config; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + config.enable_implicit_rollback = true; + auto caller = CreatePeerConnection(config); + caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto callee = CreatePeerConnection(config); + callee->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); + EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); + EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); + EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); +} + +TEST_F(PeerConnectionJsepTest, RollbackRemoteDataChannelThenAddTransceiver) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + caller->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); + EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); + callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); +} + +TEST_F(PeerConnectionJsepTest, + RollbackRemoteDataChannelThenAddTransceiverAndDataChannel) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + caller->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); + EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); + callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + callee->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); +} + +TEST_F(PeerConnectionJsepTest, RollbackRemoteDataChannelThenAddDataChannel) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + caller->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); + EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); + callee->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); +} + +TEST_F(PeerConnectionJsepTest, RollbackRemoteTransceiverThenAddDataChannel) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); + EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); + callee->CreateDataChannel("dummy"); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); +} + +TEST_F(PeerConnectionJsepTest, + RollbackRemoteTransceiverThenAddDataChannelAndTransceiver) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); + EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); + callee->CreateDataChannel("dummy"); + callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); +} + +TEST_F(PeerConnectionJsepTest, RollbackRtpDataChannel) { + RTCConfiguration config; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + config.enable_rtp_data_channel = true; + auto pc = CreatePeerConnection(config); + pc->CreateDataChannel("dummy"); + auto offer = pc->CreateOffer(); + EXPECT_TRUE(pc->CreateOfferAndSetAsLocal()); + EXPECT_TRUE(pc->SetRemoteDescription(pc->CreateRollback())); + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer))); +} + } // namespace webrtc diff --git a/pc/peer_connection_media_unittest.cc b/pc/peer_connection_media_unittest.cc index c9ffd776d9..f078144d4f 100644 --- a/pc/peer_connection_media_unittest.cc +++ b/pc/peer_connection_media_unittest.cc @@ -290,8 +290,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, // Stop both audio and video transceivers on the caller. auto transceivers = caller->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - transceivers[0]->Stop(); - transceivers[1]->Stop(); + transceivers[0]->StopInternal(); + transceivers[1]->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -388,8 +388,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, // Stop both audio and video transceivers on the callee. auto transceivers = callee->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - transceivers[0]->Stop(); - transceivers[1]->Stop(); + transceivers[0]->StopInternal(); + transceivers[1]->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -825,8 +825,10 @@ TEST_P(PeerConnectionMediaTest, AnswerHasDifferentDirectionsForAudioVideo) { } void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) { - const cricket::AudioCodec kComfortNoiseCodec8k(102, "CN", 8000, 0, 1); - const cricket::AudioCodec kComfortNoiseCodec16k(103, "CN", 16000, 0, 1); + const cricket::AudioCodec kComfortNoiseCodec8k(102, cricket::kCnCodecName, + 8000, 0, 1); + const cricket::AudioCodec kComfortNoiseCodec16k(103, cricket::kCnCodecName, + 16000, 0, 1); auto codecs = media_engine->voice().send_codecs(); codecs.push_back(kComfortNoiseCodec8k); @@ -837,7 +839,7 @@ void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) { bool HasAnyComfortNoiseCodecs(const cricket::SessionDescription* desc) { const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc); for (const auto& codec : audio_desc->codecs()) { - if (codec.name == "CN") { + if (codec.name == cricket::kCnCodecName) { return true; } } @@ -1118,10 +1120,11 @@ TEST_P(PeerConnectionMediaTest, MediaEngineErrorPropagatedToClients) { std::string error; ASSERT_FALSE(caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(), &error)); - EXPECT_EQ( - "Failed to set remote answer sdp: Failed to set remote video description " - "send parameters.", - error); + EXPECT_EQ(std::string("Failed to set remote answer sdp: Failed to set remote " + "video description " + "send parameters for m-section with mid='") + + (IsUnifiedPlan() ? "1" : "video") + "'.", + error); } // Tests that if the underlying video encoder fails once then subsequent diff --git a/pc/peer_connection_message_handler.cc b/pc/peer_connection_message_handler.cc new file mode 100644 index 0000000000..b3ffcf888d --- /dev/null +++ b/pc/peer_connection_message_handler.cc @@ -0,0 +1,176 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/peer_connection_message_handler.h" + +#include + +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "pc/stats_collector_interface.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +namespace { + +enum { + MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0, + MSG_SET_SESSIONDESCRIPTION_FAILED, + MSG_CREATE_SESSIONDESCRIPTION_FAILED, + MSG_GETSTATS, + MSG_REPORT_USAGE_PATTERN, +}; + +struct SetSessionDescriptionMsg : public rtc::MessageData { + explicit SetSessionDescriptionMsg( + webrtc::SetSessionDescriptionObserver* observer) + : observer(observer) {} + + rtc::scoped_refptr observer; + RTCError error; +}; + +struct CreateSessionDescriptionMsg : public rtc::MessageData { + explicit CreateSessionDescriptionMsg( + webrtc::CreateSessionDescriptionObserver* observer) + : observer(observer) {} + + rtc::scoped_refptr observer; + RTCError error; +}; + +struct GetStatsMsg : public rtc::MessageData { + GetStatsMsg(webrtc::StatsObserver* observer, + StatsCollectorInterface* stats, + webrtc::MediaStreamTrackInterface* track) + : observer(observer), stats(stats), track(track) {} + rtc::scoped_refptr observer; + StatsCollectorInterface* stats; + rtc::scoped_refptr track; +}; + +struct RequestUsagePatternMsg : public rtc::MessageData { + explicit RequestUsagePatternMsg(std::function func) + : function(func) {} + std::function function; +}; + +} // namespace + +PeerConnectionMessageHandler::~PeerConnectionMessageHandler() { + // Process all pending notifications in the message queue. If we don't do + // this, requests will linger and not know they succeeded or failed. + rtc::MessageList list; + signaling_thread()->Clear(this, rtc::MQID_ANY, &list); + for (auto& msg : list) { + if (msg.message_id == MSG_CREATE_SESSIONDESCRIPTION_FAILED) { + // Processing CreateOffer() and CreateAnswer() messages ensures their + // observers are invoked even if the PeerConnection is destroyed early. + OnMessage(&msg); + } else { + // TODO(hbos): Consider processing all pending messages. This would mean + // that SetLocalDescription() and SetRemoteDescription() observers are + // informed of successes and failures; this is currently NOT the case. + delete msg.pdata; + } + } +} + +void PeerConnectionMessageHandler::OnMessage(rtc::Message* msg) { + RTC_DCHECK_RUN_ON(signaling_thread()); + switch (msg->message_id) { + case MSG_SET_SESSIONDESCRIPTION_SUCCESS: { + SetSessionDescriptionMsg* param = + static_cast(msg->pdata); + param->observer->OnSuccess(); + delete param; + break; + } + case MSG_SET_SESSIONDESCRIPTION_FAILED: { + SetSessionDescriptionMsg* param = + static_cast(msg->pdata); + param->observer->OnFailure(std::move(param->error)); + delete param; + break; + } + case MSG_CREATE_SESSIONDESCRIPTION_FAILED: { + CreateSessionDescriptionMsg* param = + static_cast(msg->pdata); + param->observer->OnFailure(std::move(param->error)); + delete param; + break; + } + case MSG_GETSTATS: { + GetStatsMsg* param = static_cast(msg->pdata); + StatsReports reports; + param->stats->GetStats(param->track, &reports); + param->observer->OnComplete(reports); + delete param; + break; + } + case MSG_REPORT_USAGE_PATTERN: { + RequestUsagePatternMsg* param = + static_cast(msg->pdata); + param->function(); + delete param; + break; + } + default: + RTC_NOTREACHED() << "Not implemented"; + break; + } +} + +void PeerConnectionMessageHandler::PostSetSessionDescriptionSuccess( + SetSessionDescriptionObserver* observer) { + SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg); +} + +void PeerConnectionMessageHandler::PostSetSessionDescriptionFailure( + SetSessionDescriptionObserver* observer, + RTCError&& error) { + RTC_DCHECK(!error.ok()); + SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer); + msg->error = std::move(error); + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_SET_SESSIONDESCRIPTION_FAILED, msg); +} + +void PeerConnectionMessageHandler::PostCreateSessionDescriptionFailure( + CreateSessionDescriptionObserver* observer, + RTCError error) { + RTC_DCHECK(!error.ok()); + CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer); + msg->error = std::move(error); + signaling_thread()->Post(RTC_FROM_HERE, this, + MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg); +} + +void PeerConnectionMessageHandler::PostGetStats( + StatsObserver* observer, + StatsCollectorInterface* stats, + MediaStreamTrackInterface* track) { + signaling_thread()->Post(RTC_FROM_HERE, this, MSG_GETSTATS, + new GetStatsMsg(observer, stats, track)); +} + +void PeerConnectionMessageHandler::RequestUsagePatternReport( + std::function func, + int delay_ms) { + signaling_thread()->PostDelayed(RTC_FROM_HERE, delay_ms, this, + MSG_REPORT_USAGE_PATTERN, + new RequestUsagePatternMsg(func)); +} + +} // namespace webrtc diff --git a/pc/peer_connection_message_handler.h b/pc/peer_connection_message_handler.h new file mode 100644 index 0000000000..027fbea6c3 --- /dev/null +++ b/pc/peer_connection_message_handler.h @@ -0,0 +1,55 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ +#define PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ + +#include "api/rtc_error.h" +#include "api/stats_types.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +class CreateSessionDescriptionObserver; +class SetSessionDescriptionObserver; +class StatsCollectorInterface; +class StatsObserver; +class MediaStreamTrackInterface; + +class PeerConnectionMessageHandler : public rtc::MessageHandler { + public: + explicit PeerConnectionMessageHandler(rtc::Thread* signaling_thread) + : signaling_thread_(signaling_thread) {} + ~PeerConnectionMessageHandler(); + + // Implements MessageHandler. + void OnMessage(rtc::Message* msg) override; + void PostSetSessionDescriptionSuccess( + SetSessionDescriptionObserver* observer); + void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer, + RTCError&& error); + void PostCreateSessionDescriptionFailure( + CreateSessionDescriptionObserver* observer, + RTCError error); + void PostGetStats(StatsObserver* observer, + StatsCollectorInterface* stats, + MediaStreamTrackInterface* track); + void RequestUsagePatternReport(std::function, int delay_ms); + + private: + rtc::Thread* signaling_thread() const { return signaling_thread_; } + + rtc::Thread* const signaling_thread_; +}; + +} // namespace webrtc + +#endif // PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ diff --git a/pc/peer_connection_rampup_tests.cc b/pc/peer_connection_rampup_tests.cc index b50489d534..cf3b0a27f5 100644 --- a/pc/peer_connection_rampup_tests.cc +++ b/pc/peer_connection_rampup_tests.cc @@ -333,7 +333,7 @@ class PeerConnectionRampUpTest : public ::testing::Test { std::unique_ptr callee_; }; -TEST_F(PeerConnectionRampUpTest, TurnOverTCP) { +TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTCP) { CreateTurnServer(cricket::ProtocolType::PROTO_TCP); PeerConnectionInterface::IceServer ice_server; std::string ice_server_url = "turn:" + std::string(kTurnInternalAddress) + @@ -354,7 +354,7 @@ TEST_F(PeerConnectionRampUpTest, TurnOverTCP) { RunTest("turn_over_tcp"); } -TEST_F(PeerConnectionRampUpTest, TurnOverUDP) { +TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverUDP) { CreateTurnServer(cricket::ProtocolType::PROTO_UDP); PeerConnectionInterface::IceServer ice_server; std::string ice_server_url = "turn:" + std::string(kTurnInternalAddress) + @@ -375,7 +375,7 @@ TEST_F(PeerConnectionRampUpTest, TurnOverUDP) { RunTest("turn_over_udp"); } -TEST_F(PeerConnectionRampUpTest, TurnOverTLS) { +TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTLS) { CreateTurnServer(cricket::ProtocolType::PROTO_TLS, kTurnInternalAddress); PeerConnectionInterface::IceServer ice_server; std::string ice_server_url = "turns:" + std::string(kTurnInternalAddress) + @@ -397,7 +397,7 @@ TEST_F(PeerConnectionRampUpTest, TurnOverTLS) { RunTest("turn_over_tls"); } -TEST_F(PeerConnectionRampUpTest, UDPPeerToPeer) { +TEST_F(PeerConnectionRampUpTest, Bwe_After_UDPPeerToPeer) { PeerConnectionInterface::RTCConfiguration client_1_config; client_1_config.tcp_candidate_policy = PeerConnection::kTcpCandidatePolicyDisabled; @@ -410,7 +410,7 @@ TEST_F(PeerConnectionRampUpTest, UDPPeerToPeer) { RunTest("udp_peer_to_peer"); } -TEST_F(PeerConnectionRampUpTest, TCPPeerToPeer) { +TEST_F(PeerConnectionRampUpTest, Bwe_After_TCPPeerToPeer) { firewall_socket_server()->set_udp_sockets_enabled(false); ASSERT_TRUE(CreatePeerConnectionWrappers( PeerConnectionInterface::RTCConfiguration(), diff --git a/pc/peer_connection_rtp_unittest.cc b/pc/peer_connection_rtp_unittest.cc index 9e4a816a45..4d6da66943 100644 --- a/pc/peer_connection_rtp_unittest.cc +++ b/pc/peer_connection_rtp_unittest.cc @@ -164,6 +164,28 @@ class PeerConnectionRtpTestUnifiedPlan : public PeerConnectionRtpBaseTest { protected: PeerConnectionRtpTestUnifiedPlan() : PeerConnectionRtpBaseTest(SdpSemantics::kUnifiedPlan) {} + + // Helper to emulate an SFU that rejects an offered media section + // in answer. + bool ExchangeOfferAnswerWhereRemoteStopsTransceiver( + PeerConnectionWrapper* caller, + PeerConnectionWrapper* callee, + size_t mid_to_stop) { + auto offer = caller->CreateOffer(); + caller->SetLocalDescription(CloneSessionDescription(offer.get())); + callee->SetRemoteDescription(std::move(offer)); + EXPECT_LT(mid_to_stop, callee->pc()->GetTransceivers().size()); + // Must use StopInternal in order to do instant reject. + callee->pc()->GetTransceivers()[mid_to_stop]->StopInternal(); + auto answer = callee->CreateAnswer(); + EXPECT_TRUE(answer); + bool set_local_answer = + callee->SetLocalDescription(CloneSessionDescription(answer.get())); + EXPECT_TRUE(set_local_answer); + bool set_remote_answer = caller->SetRemoteDescription(std::move(answer)); + EXPECT_TRUE(set_remote_answer); + return set_remote_answer; + } }; // These tests cover |webrtc::PeerConnectionObserver| callbacks firing upon @@ -370,19 +392,25 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionCallsOnTrack) { auto callee = CreatePeerConnection(); auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - transceiver->SetDirection(RtpTransceiverDirection::kInactive); + EXPECT_TRUE( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive) + .ok()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size()); EXPECT_EQ(0u, callee->observer()->on_track_transceivers_.size()); - transceiver->SetDirection(RtpTransceiverDirection::kSendOnly); + EXPECT_TRUE( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendOnly) + .ok()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size()); EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size()); // If the direction changes but it is still receiving on the remote side, then // OnTrack should not be fired again. - transceiver->SetDirection(RtpTransceiverDirection::kSendRecv); + EXPECT_TRUE( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv) + .ok()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size()); EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size()); @@ -401,8 +429,10 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionHoldCallsOnTrackTwice) { EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size()); // Put the call on hold by no longer receiving the track. - callee->pc()->GetTransceivers()[0]->SetDirection( - RtpTransceiverDirection::kInactive); + EXPECT_TRUE(callee->pc() + ->GetTransceivers()[0] + ->SetDirectionWithError(RtpTransceiverDirection::kInactive) + .ok()); ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size()); @@ -410,8 +440,10 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionHoldCallsOnTrackTwice) { // Resume the call by changing the direction to recvonly. This should call // OnTrack again on the callee side. - callee->pc()->GetTransceivers()[0]->SetDirection( - RtpTransceiverDirection::kRecvOnly); + EXPECT_TRUE(callee->pc() + ->GetTransceivers()[0] + ->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly) + .ok()); ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size()); @@ -470,7 +502,9 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, EXPECT_EQ(0u, callee->observer()->remove_track_events_.size()); auto callee_transceiver = callee->pc()->GetTransceivers()[0]; - callee_transceiver->SetDirection(RtpTransceiverDirection::kSendOnly); + EXPECT_TRUE(callee_transceiver + ->SetDirectionWithError(RtpTransceiverDirection::kSendOnly) + .ok()); ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer())); EXPECT_EQ(1u, callee->observer()->add_track_events_.size()); @@ -1133,12 +1167,15 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kInactive; auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); ASSERT_TRUE(caller->AddAudioTrack("a")); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); EXPECT_EQ(RtpTransceiverDirection::kSendOnly, transceiver->direction()); } @@ -1153,12 +1190,15 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kRecvOnly; auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); ASSERT_TRUE(caller->AddAudioTrack("a")); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceiver->direction()); } @@ -1182,10 +1222,12 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackErrorIfClosed) { auto audio_track = caller->CreateAudioTrack("a"); caller->pc()->Close(); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); auto result = caller->pc()->AddTrack(audio_track, std::vector()); EXPECT_EQ(RTCErrorType::INVALID_STATE, result.error().type()); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackErrorIfTrackAlreadyHasSender) { @@ -1194,10 +1236,12 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackErrorIfTrackAlreadyHasSender) { auto audio_track = caller->CreateAudioTrack("a"); ASSERT_TRUE(caller->AddTrack(audio_track)); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); auto result = caller->pc()->AddTrack(audio_track, std::vector()); EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } // Unified Plan RemoveTrack tests. @@ -1224,13 +1268,16 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, init.direction = RtpTransceiverDirection::kSendRecv; auto transceiver = caller->AddTransceiver(caller->CreateAudioTrack("a"), init); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); ASSERT_TRUE(caller->pc()->RemoveTrack(transceiver->sender())); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, transceiver->direction()); } @@ -1246,13 +1293,16 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, init.direction = RtpTransceiverDirection::kSendOnly; auto transceiver = caller->AddTransceiver(caller->CreateAudioTrack("a"), init); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); ASSERT_TRUE(caller->pc()->RemoveTrack(transceiver->sender())); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver->direction()); } @@ -1266,9 +1316,11 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RemoveTrackWithNullSenderTrackIsNoOp) { auto transceiver = caller->pc()->GetTransceivers()[0]; ASSERT_TRUE(sender->SetTrack(nullptr)); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); ASSERT_TRUE(caller->pc()->RemoveTrack(sender)); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceiver->direction()); } @@ -1281,9 +1333,11 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RemoveTrackErrorIfClosed) { auto sender = caller->AddAudioTrack("a"); caller->pc()->Close(); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); EXPECT_FALSE(caller->pc()->RemoveTrack(sender)); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } TEST_F(PeerConnectionRtpTestUnifiedPlan, @@ -1293,9 +1347,11 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, auto sender = caller->AddAudioTrack("a"); ASSERT_TRUE(caller->pc()->RemoveTrack(sender)); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); EXPECT_TRUE(caller->pc()->RemoveTrack(sender)); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } // Test that setting offers that add/remove/add a track repeatedly without @@ -1401,16 +1457,20 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RenegotiationNeededAfterTransceiverSetDirection) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); - transceiver->SetDirection(RtpTransceiverDirection::kInactive); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); } // Test that OnRenegotiationNeeded is not fired if SetDirection is called on an @@ -1421,9 +1481,11 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - caller->observer()->clear_negotiation_needed(); - transceiver->SetDirection(transceiver->direction()); - EXPECT_FALSE(caller->observer()->negotiation_needed()); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); + transceiver->SetDirectionWithError(transceiver->direction()); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); } // Test that OnRenegotiationNeeded is not fired if SetDirection is called on a @@ -1433,11 +1495,140 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, auto caller = CreatePeerConnection(); auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - transceiver->Stop(); + transceiver->StopInternal(); + + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); + transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive); + EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); +} + +// Test that currentDirection returnes "stopped" if the transceiver was stopped. +TEST_F(PeerConnectionRtpTestUnifiedPlan, + CheckStoppedCurrentDirectionOnStoppedTransceiver) { + auto caller = CreatePeerConnection(); + + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + transceiver->StopInternal(); + + EXPECT_TRUE(transceiver->stopping()); + EXPECT_TRUE(transceiver->stopped()); + EXPECT_EQ(RtpTransceiverDirection::kStopped, + transceiver->current_direction()); +} + +// Test that InvalidState is thrown on a stopping transceiver. +TEST_F(PeerConnectionRtpTestUnifiedPlan, + CheckForInvalidStateOnStoppingTransceiver) { + auto caller = CreatePeerConnection(); + + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + transceiver->StopStandard(); + + EXPECT_TRUE(transceiver->stopping()); + EXPECT_FALSE(transceiver->stopped()); + EXPECT_EQ( + RTCErrorType::INVALID_STATE, + transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive) + .type()); +} + +// Test that InvalidState is thrown on a stopped transceiver. +TEST_F(PeerConnectionRtpTestUnifiedPlan, + CheckForInvalidStateOnStoppedTransceiver) { + auto caller = CreatePeerConnection(); + + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + transceiver->StopInternal(); + + EXPECT_TRUE(transceiver->stopping()); + EXPECT_TRUE(transceiver->stopped()); + EXPECT_EQ( + RTCErrorType::INVALID_STATE, + transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive) + .type()); +} + +// Test that TypeError is thrown if the direction is set to "stopped". +TEST_F(PeerConnectionRtpTestUnifiedPlan, + CheckForTypeErrorForStoppedOnTransceiver) { + auto caller = CreatePeerConnection(); + + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + EXPECT_EQ( + RTCErrorType::INVALID_PARAMETER, + transceiver->SetDirectionWithError(RtpTransceiverDirection::kStopped) + .type()); +} + +// Test that you can do createOffer/setLocalDescription with a stopped +// media section. +TEST_F(PeerConnectionRtpTestUnifiedPlan, + SetLocalDescriptionWithStoppedMediaSection) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + callee->pc()->GetTransceivers()[0]->StopStandard(); + ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); + EXPECT_EQ(RtpTransceiverDirection::kStopped, + transceiver->current_direction()); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); +} + +TEST_F(PeerConnectionRtpTestUnifiedPlan, + StopAndNegotiateCausesTransceiverToDisappear) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + callee->pc()->GetTransceivers()[0]->StopStandard(); + ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); + EXPECT_EQ(RtpTransceiverDirection::kStopped, + transceiver->current_direction()); + EXPECT_EQ(0U, caller->pc()->GetTransceivers().size()); + EXPECT_EQ(0U, callee->pc()->GetTransceivers().size()); + EXPECT_EQ(0U, caller->pc()->GetSenders().size()); + EXPECT_EQ(0U, callee->pc()->GetSenders().size()); + EXPECT_EQ(0U, caller->pc()->GetReceivers().size()); + EXPECT_EQ(0U, callee->pc()->GetReceivers().size()); +} - caller->observer()->clear_negotiation_needed(); - transceiver->SetDirection(RtpTransceiverDirection::kInactive); - EXPECT_FALSE(caller->observer()->negotiation_needed()); +TEST_F(PeerConnectionRtpTestUnifiedPlan, + SetLocalDescriptionWorksAfterRepeatedAddRemove) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + auto video_track = caller->CreateVideoTrack("v"); + auto track = caller->CreateAudioTrack("a"); + caller->AddTransceiver(video_track); + auto transceiver = caller->AddTransceiver(track); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + caller->pc()->RemoveTrack(transceiver->sender()); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + caller->AddTrack(track); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + caller->pc()->RemoveTrack(transceiver->sender()); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); +} + +// This is a repro of Chromium bug https://crbug.com/1134686 +TEST_F(PeerConnectionRtpTestUnifiedPlan, + SetLocalDescriptionWorksAfterRepeatedAddRemoveWithRemoteReject) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + auto video_track = caller->CreateVideoTrack("v"); + auto track = caller->CreateAudioTrack("a"); + caller->AddTransceiver(video_track); + auto transceiver = caller->AddTransceiver(track); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + caller->pc()->RemoveTrack(transceiver->sender()); + ExchangeOfferAnswerWhereRemoteStopsTransceiver(caller.get(), callee.get(), 1); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + caller->AddTrack(track); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + caller->pc()->RemoveTrack(transceiver->sender()); + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); } // Test that AddTransceiver fails if trying to use unimplemented RTP encoding @@ -1722,6 +1913,19 @@ TEST_F(SdpFormatReceivedTest, ComplexPlanBIsReportedAsComplexPlanB) { ElementsAre(Pair(kSdpFormatReceivedComplexPlanB, 1))); } +TEST_F(SdpFormatReceivedTest, AnswerIsReported) { + auto caller = CreatePeerConnectionWithPlanB(); + caller->AddAudioTrack("audio"); + caller->AddVideoTrack("video"); + auto callee = CreatePeerConnectionWithUnifiedPlan(); + + ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); + ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateAnswer())); + EXPECT_METRIC_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpFormatReceivedAnswer"), + ElementsAre(Pair(kSdpFormatReceivedSimple, 1))); +} + // Sender setups in a call. TEST_P(PeerConnectionRtpTest, CreateTwoSendersWithSameTrack) { @@ -1759,13 +1963,16 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, init.direction = RtpTransceiverDirection::kSendRecv; auto transceiver = caller->AddTransceiver(caller->CreateAudioTrack("a"), init); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - caller->observer()->clear_negotiation_needed(); + caller->observer()->clear_legacy_renegotiation_needed(); + caller->observer()->clear_latest_negotiation_needed_event(); transceiver->sender()->SetStreams({"stream3", "stream4", "stream5"}); - EXPECT_TRUE(caller->observer()->negotiation_needed()); + EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto callee_streams = callee->pc()->GetReceivers()[0]->streams(); diff --git a/pc/peer_connection_signaling_unittest.cc b/pc/peer_connection_signaling_unittest.cc index 30b11ceaa7..605a1338c6 100644 --- a/pc/peer_connection_signaling_unittest.cc +++ b/pc/peer_connection_signaling_unittest.cc @@ -17,12 +17,14 @@ #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" +#include "api/jsep_session_description.h" #include "api/peer_connection_proxy.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_codecs/builtin_video_encoder_factory.h" #include "pc/peer_connection.h" #include "pc/peer_connection_wrapper.h" #include "pc/sdp_utils.h" +#include "pc/webrtc_sdp.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif @@ -565,30 +567,102 @@ TEST_P(PeerConnectionSignalingTest, CloseCreateOfferAndShutdown) { EXPECT_TRUE(observer->called()); } -TEST_P(PeerConnectionSignalingTest, ImplicitCreateOfferAndShutdown) { +TEST_P(PeerConnectionSignalingTest, + ImplicitCreateOfferAndShutdownWithOldObserver) { auto caller = CreatePeerConnection(); auto observer = MockSetSessionDescriptionObserver::Create(); + caller->pc()->SetLocalDescription(observer.get()); + caller.reset(nullptr); + // The old observer does not get invoked because posted messages are lost. + EXPECT_FALSE(observer->called()); +} + +TEST_P(PeerConnectionSignalingTest, ImplicitCreateOfferAndShutdown) { + auto caller = CreatePeerConnection(); + rtc::scoped_refptr observer( + new FakeSetLocalDescriptionObserver()); caller->pc()->SetLocalDescription(observer); caller.reset(nullptr); + // The new observer gets invoked because it is called immediately. + EXPECT_TRUE(observer->called()); + EXPECT_FALSE(observer->error().ok()); +} + +TEST_P(PeerConnectionSignalingTest, + CloseBeforeImplicitCreateOfferAndShutdownWithOldObserver) { + auto caller = CreatePeerConnection(); + auto observer = MockSetSessionDescriptionObserver::Create(); + caller->pc()->Close(); + caller->pc()->SetLocalDescription(observer.get()); + caller.reset(nullptr); + // The old observer does not get invoked because posted messages are lost. EXPECT_FALSE(observer->called()); } TEST_P(PeerConnectionSignalingTest, CloseBeforeImplicitCreateOfferAndShutdown) { auto caller = CreatePeerConnection(); - auto observer = MockSetSessionDescriptionObserver::Create(); + rtc::scoped_refptr observer( + new FakeSetLocalDescriptionObserver()); caller->pc()->Close(); caller->pc()->SetLocalDescription(observer); caller.reset(nullptr); + // The new observer gets invoked because it is called immediately. + EXPECT_TRUE(observer->called()); + EXPECT_FALSE(observer->error().ok()); +} + +TEST_P(PeerConnectionSignalingTest, + CloseAfterImplicitCreateOfferAndShutdownWithOldObserver) { + auto caller = CreatePeerConnection(); + auto observer = MockSetSessionDescriptionObserver::Create(); + caller->pc()->SetLocalDescription(observer.get()); + caller->pc()->Close(); + caller.reset(nullptr); + // The old observer does not get invoked because posted messages are lost. EXPECT_FALSE(observer->called()); } TEST_P(PeerConnectionSignalingTest, CloseAfterImplicitCreateOfferAndShutdown) { auto caller = CreatePeerConnection(); - auto observer = MockSetSessionDescriptionObserver::Create(); + rtc::scoped_refptr observer( + new FakeSetLocalDescriptionObserver()); caller->pc()->SetLocalDescription(observer); caller->pc()->Close(); caller.reset(nullptr); + // The new observer gets invoked because it is called immediately. + EXPECT_TRUE(observer->called()); + EXPECT_FALSE(observer->error().ok()); +} + +TEST_P(PeerConnectionSignalingTest, + SetLocalDescriptionNewObserverIsInvokedImmediately) { + auto caller = CreatePeerConnection(); + auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); + + rtc::scoped_refptr observer( + new FakeSetLocalDescriptionObserver()); + caller->pc()->SetLocalDescription(std::move(offer), observer); + // The new observer is invoked immediately. + EXPECT_TRUE(observer->called()); + EXPECT_TRUE(observer->error().ok()); +} + +TEST_P(PeerConnectionSignalingTest, + SetLocalDescriptionOldObserverIsInvokedInAPostedMessage) { + auto caller = CreatePeerConnection(); + auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); + + auto observer = MockSetSessionDescriptionObserver::Create(); + caller->pc()->SetLocalDescription(observer, offer.release()); + // The old observer is not invoked immediately. EXPECT_FALSE(observer->called()); + // Process all currently pending messages by waiting for a posted task to run. + bool checkpoint_reached = false; + rtc::Thread::Current()->PostTask( + RTC_FROM_HERE, [&checkpoint_reached] { checkpoint_reached = true; }); + EXPECT_TRUE_WAIT(checkpoint_reached, kWaitTimeout); + // If resolving the observer was pending, it must now have been called. + EXPECT_TRUE(observer->called()); } TEST_P(PeerConnectionSignalingTest, SetRemoteDescriptionExecutesImmediately) { @@ -601,7 +675,7 @@ TEST_P(PeerConnectionSignalingTest, SetRemoteDescriptionExecutesImmediately) { // By not waiting for the observer's callback we can verify that the operation // executed immediately. callee->pc()->SetRemoteDescription(std::move(offer), - new MockSetRemoteDescriptionObserver()); + new FakeSetRemoteDescriptionObserver()); EXPECT_EQ(2u, callee->pc()->GetReceivers().size()); } @@ -620,7 +694,7 @@ TEST_P(PeerConnectionSignalingTest, CreateOfferBlocksSetRemoteDescription) { // asynchronously, when CreateOffer() completes. callee->pc()->CreateOffer(offer_observer, RTCOfferAnswerOptions()); callee->pc()->SetRemoteDescription(std::move(offer), - new MockSetRemoteDescriptionObserver()); + new FakeSetRemoteDescriptionObserver()); // CreateOffer() is asynchronous; without message processing this operation // should not have completed. EXPECT_FALSE(offer_observer->called()); @@ -639,7 +713,7 @@ TEST_P(PeerConnectionSignalingTest, auto caller = CreatePeerConnectionWithAudioVideo(); auto observer = MockSetSessionDescriptionObserver::Create(); - caller->pc()->SetLocalDescription(observer); + caller->pc()->SetLocalDescription(observer.get()); // The offer is created asynchronously; message processing is needed for it to // complete. @@ -665,7 +739,7 @@ TEST_P(PeerConnectionSignalingTest, EXPECT_EQ(PeerConnection::kHaveRemoteOffer, callee->signaling_state()); auto observer = MockSetSessionDescriptionObserver::Create(); - callee->pc()->SetLocalDescription(observer); + callee->pc()->SetLocalDescription(observer.get()); // The answer is created asynchronously; message processing is needed for it // to complete. @@ -687,28 +761,27 @@ TEST_P(PeerConnectionSignalingTest, auto callee = CreatePeerConnectionWithAudioVideo(); // SetLocalDescription(), implicitly creating an offer. - rtc::scoped_refptr - caller_set_local_description_observer( - new rtc::RefCountedObject()); - caller->pc()->SetLocalDescription(caller_set_local_description_observer); + auto caller_set_local_description_observer = + MockSetSessionDescriptionObserver::Create(); + caller->pc()->SetLocalDescription( + caller_set_local_description_observer.get()); EXPECT_TRUE_WAIT(caller_set_local_description_observer->called(), kWaitTimeout); ASSERT_TRUE(caller->pc()->pending_local_description()); // SetRemoteDescription(offer) - rtc::scoped_refptr - callee_set_remote_description_observer( - new rtc::RefCountedObject()); + auto callee_set_remote_description_observer = + MockSetSessionDescriptionObserver::Create(); callee->pc()->SetRemoteDescription( - callee_set_remote_description_observer.get(), + callee_set_remote_description_observer, CloneSessionDescription(caller->pc()->pending_local_description()) .release()); // SetLocalDescription(), implicitly creating an answer. - rtc::scoped_refptr - callee_set_local_description_observer( - new rtc::RefCountedObject()); - callee->pc()->SetLocalDescription(callee_set_local_description_observer); + auto callee_set_local_description_observer = + MockSetSessionDescriptionObserver::Create(); + callee->pc()->SetLocalDescription( + callee_set_local_description_observer.get()); EXPECT_TRUE_WAIT(callee_set_local_description_observer->called(), kWaitTimeout); // Chaining guarantees SetRemoteDescription() happened before @@ -717,9 +790,8 @@ TEST_P(PeerConnectionSignalingTest, EXPECT_TRUE(callee->pc()->current_local_description()); // SetRemoteDescription(answer) - rtc::scoped_refptr - caller_set_remote_description_observer( - new rtc::RefCountedObject()); + auto caller_set_remote_description_observer = + MockSetSessionDescriptionObserver::Create(); caller->pc()->SetRemoteDescription( caller_set_remote_description_observer, CloneSessionDescription(callee->pc()->current_local_description()) @@ -737,7 +809,7 @@ TEST_P(PeerConnectionSignalingTest, auto observer = MockSetSessionDescriptionObserver::Create(); caller->pc()->Close(); - caller->pc()->SetLocalDescription(observer); + caller->pc()->SetLocalDescription(observer.get()); // The operation should fail asynchronously. EXPECT_FALSE(observer->called()); @@ -756,7 +828,7 @@ TEST_P(PeerConnectionSignalingTest, auto caller = CreatePeerConnectionWithAudioVideo(); auto observer = MockSetSessionDescriptionObserver::Create(); - caller->pc()->SetLocalDescription(observer); + caller->pc()->SetLocalDescription(observer.get()); caller->pc()->Close(); // The operation should fail asynchronously. @@ -771,6 +843,64 @@ TEST_P(PeerConnectionSignalingTest, observer->error()); } +TEST_P(PeerConnectionSignalingTest, UnsupportedContentType) { + auto caller = CreatePeerConnection(); + + // Call setRemoteDescription with a m= line we don't understand. + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=bogus 9 FOO 0 8\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=mid:bogusmid\r\n"; + std::unique_ptr remote_description = + webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr); + + EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description))); + + // Assert we respond back with something meaningful. + auto answer = caller->CreateAnswer(); + ASSERT_EQ(answer->description()->contents().size(), 1u); + EXPECT_NE(answer->description() + ->contents()[0] + .media_description() + ->as_unsupported(), + nullptr); + EXPECT_EQ(answer->description() + ->contents()[0] + .media_description() + ->as_unsupported() + ->media_type(), + "bogus"); + EXPECT_TRUE(answer->description()->contents()[0].rejected); + EXPECT_EQ(answer->description()->contents()[0].mid(), "bogusmid"); + EXPECT_EQ( + answer->description()->contents()[0].media_description()->protocol(), + "FOO"); + EXPECT_FALSE( + answer->description()->contents()[0].media_description()->has_codecs()); + + EXPECT_TRUE(caller->SetLocalDescription(std::move(answer))); + + // Assert we keep this in susequent offers. + auto offer = caller->CreateOffer(); + EXPECT_EQ(offer->description() + ->contents()[0] + .media_description() + ->as_unsupported() + ->media_type(), + "bogus"); + EXPECT_TRUE(offer->description()->contents()[0].rejected); + EXPECT_EQ(offer->description()->contents()[0].media_description()->protocol(), + "FOO"); + EXPECT_EQ(offer->description()->contents()[0].mid(), "bogusmid"); + EXPECT_FALSE( + offer->description()->contents()[0].media_description()->has_codecs()); + EXPECT_TRUE(caller->SetLocalDescription(std::move(offer))); +} + INSTANTIATE_TEST_SUITE_P(PeerConnectionSignalingTest, PeerConnectionSignalingTest, Values(SdpSemantics::kPlanB, @@ -788,14 +918,15 @@ class PeerConnectionSignalingUnifiedPlanTest // unique to Unified Plan, but the transceivers used to verify this are only // available in Unified Plan. TEST_F(PeerConnectionSignalingUnifiedPlanTest, - SetLocalDescriptionExecutesImmediately) { + SetLocalDescriptionExecutesImmediatelyUsingOldObserver) { auto caller = CreatePeerConnectionWithAudioVideo(); // This offer will cause transceiver mids to get assigned. auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); // By not waiting for the observer's callback we can verify that the operation - // executed immediately. + // executed immediately. The old observer is invoked in a posted message, so + // waiting for it would not ensure synchronicity. RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value()); caller->pc()->SetLocalDescription( new rtc::RefCountedObject(), @@ -803,6 +934,22 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value()); } +TEST_F(PeerConnectionSignalingUnifiedPlanTest, + SetLocalDescriptionExecutesImmediatelyUsingNewObserver) { + auto caller = CreatePeerConnectionWithAudioVideo(); + + // This offer will cause transceiver mids to get assigned. + auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); + + // Verify that mids were assigned without waiting for the observer. (However, + // the new observer should also be invoked synchronously - as is ensured by + // other tests.) + RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value()); + caller->pc()->SetLocalDescription(std::move(offer), + new FakeSetLocalDescriptionObserver()); + EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value()); +} + TEST_F(PeerConnectionSignalingUnifiedPlanTest, SetLocalDescriptionExecutesImmediatelyInsideCreateOfferCallback) { auto caller = CreatePeerConnectionWithAudioVideo(); @@ -889,4 +1036,68 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, ASSERT_EQ(SignalingState::kStable, caller->signaling_state()); } +TEST_F(PeerConnectionSignalingUnifiedPlanTest, + ShouldFireNegotiationNeededWhenNoChangesArePending) { + auto caller = CreatePeerConnection(); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); + auto transceiver = + caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); + EXPECT_TRUE(caller->pc()->ShouldFireNegotiationNeededEvent( + caller->observer()->latest_negotiation_needed_event())); +} + +TEST_F(PeerConnectionSignalingUnifiedPlanTest, + SuppressNegotiationNeededWhenOperationChainIsNotEmpty) { + auto caller = CreatePeerConnection(); + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); + auto transceiver = + caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit()); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); + + rtc::scoped_refptr observer = + new rtc::RefCountedObject(); + caller->pc()->CreateOffer(observer, RTCOfferAnswerOptions()); + // For this test to work, the operation has to be pending, i.e. the observer + // has not yet been invoked. + EXPECT_FALSE(observer->called()); + // Because the Operations Chain is not empty, the event is now suppressed. + EXPECT_FALSE(caller->pc()->ShouldFireNegotiationNeededEvent( + caller->observer()->latest_negotiation_needed_event())); + caller->observer()->clear_latest_negotiation_needed_event(); + + // When the Operations Chain becomes empty again, a new negotiation needed + // event will be generated that is not suppressed. + EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); + EXPECT_TRUE(caller->pc()->ShouldFireNegotiationNeededEvent( + caller->observer()->latest_negotiation_needed_event())); +} + +TEST_F(PeerConnectionSignalingUnifiedPlanTest, + SuppressNegotiationNeededWhenSignalingStateIsNotStable) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); + + EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); + auto transceiver = + callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit()); + EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); + + // Change signaling state (to "have-remote-offer") by setting a remote offer. + callee->SetRemoteDescription(std::move(offer)); + // Because the signaling state is not "stable", the event is now suppressed. + EXPECT_FALSE(callee->pc()->ShouldFireNegotiationNeededEvent( + callee->observer()->latest_negotiation_needed_event())); + callee->observer()->clear_latest_negotiation_needed_event(); + + // Upon rolling back to "stable", a new negotiation needed event will be + // generated that is not suppressed. + callee->SetLocalDescription(CreateSessionDescription(SdpType::kRollback, "")); + EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); + EXPECT_TRUE(callee->pc()->ShouldFireNegotiationNeededEvent( + callee->observer()->latest_negotiation_needed_event())); +} + } // namespace webrtc diff --git a/pc/peer_connection_simulcast_unittest.cc b/pc/peer_connection_simulcast_unittest.cc index 42bdae17b9..8822a980f7 100644 --- a/pc/peer_connection_simulcast_unittest.cc +++ b/pc/peer_connection_simulcast_unittest.cc @@ -455,7 +455,7 @@ TEST_F(PeerConnectionSimulcastTests, ServerSendsOfferToReceiveSimulcast) { std::string error; EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error; auto transceiver = remote->pc()->GetTransceivers()[0]; - transceiver->SetDirection(RtpTransceiverDirection::kSendRecv); + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); EXPECT_TRUE(remote->CreateAnswerAndSetAsLocal()); ValidateTransceiverParameters(transceiver, layers); } @@ -478,7 +478,7 @@ TEST_F(PeerConnectionSimulcastTests, TransceiverIsNotRecycledWithSimulcast) { auto transceivers = remote->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); auto transceiver = transceivers[1]; - transceiver->SetDirection(RtpTransceiverDirection::kSendRecv); + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); EXPECT_TRUE(remote->CreateAnswerAndSetAsLocal()); ValidateTransceiverParameters(transceiver, layers); } @@ -611,7 +611,7 @@ TEST_F(PeerConnectionSimulcastMetricsTests, IncomingSimulcastIsLogged) { ElementsAre(Pair(kSimulcastApiVersionSpecCompliant, 1))); auto transceiver = remote->pc()->GetTransceivers()[0]; - transceiver->SetDirection(RtpTransceiverDirection::kSendRecv); + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); EXPECT_TRUE(remote->CreateAnswerAndSetAsLocal()); EXPECT_THAT(LocalDescriptionSamples(), ElementsAre(Pair(kSimulcastApiVersionSpecCompliant, 2))); diff --git a/pc/peer_connection_wrapper.cc b/pc/peer_connection_wrapper.cc index 7c0b3391d0..328f5795e2 100644 --- a/pc/peer_connection_wrapper.cc +++ b/pc/peer_connection_wrapper.cc @@ -166,8 +166,8 @@ bool PeerConnectionWrapper::SetRemoteDescription( bool PeerConnectionWrapper::SetRemoteDescription( std::unique_ptr desc, RTCError* error_out) { - rtc::scoped_refptr observer = - new MockSetRemoteDescriptionObserver(); + rtc::scoped_refptr observer = + new FakeSetRemoteDescriptionObserver(); pc()->SetRemoteDescription(std::move(desc), observer); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); bool ok = observer->error().ok(); diff --git a/pc/proxy_unittest.cc b/pc/proxy_unittest.cc index a00b47ff6b..500828a03e 100644 --- a/pc/proxy_unittest.cc +++ b/pc/proxy_unittest.cc @@ -46,16 +46,16 @@ class Fake : public FakeInterface { return new rtc::RefCountedObject(); } // Used to verify destructor is called on the correct thread. - MOCK_METHOD0(Destroy, void()); + MOCK_METHOD(void, Destroy, ()); - MOCK_METHOD0(VoidMethod0, void()); - MOCK_METHOD0(Method0, std::string()); - MOCK_CONST_METHOD0(ConstMethod0, std::string()); + MOCK_METHOD(void, VoidMethod0, (), (override)); + MOCK_METHOD(std::string, Method0, (), (override)); + MOCK_METHOD(std::string, ConstMethod0, (), (const, override)); - MOCK_METHOD1(Method1, std::string(std::string)); - MOCK_CONST_METHOD1(ConstMethod1, std::string(std::string)); + MOCK_METHOD(std::string, Method1, (std::string), (override)); + MOCK_METHOD(std::string, ConstMethod1, (std::string), (const, override)); - MOCK_METHOD2(Method2, std::string(std::string, std::string)); + MOCK_METHOD(std::string, Method2, (std::string, std::string), (override)); protected: Fake() {} @@ -266,7 +266,7 @@ class FooInterface { class Foo : public FooInterface { public: Foo() {} - MOCK_METHOD0(Bar, void()); + MOCK_METHOD(void, Bar, (), (override)); }; BEGIN_OWNED_PROXY_MAP(Foo) diff --git a/pc/remote_audio_source.cc b/pc/remote_audio_source.cc index da00402e41..8ae0612541 100644 --- a/pc/remote_audio_source.cc +++ b/pc/remote_audio_source.cc @@ -18,10 +18,10 @@ #include "absl/algorithm/container.h" #include "api/scoped_refptr.h" #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/strings/string_format.h" #include "rtc_base/thread.h" #include "rtc_base/thread_checker.h" @@ -35,6 +35,11 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { explicit AudioDataProxy(RemoteAudioSource* source) : source_(source) { RTC_DCHECK(source); } + + AudioDataProxy() = delete; + AudioDataProxy(const AudioDataProxy&) = delete; + AudioDataProxy& operator=(const AudioDataProxy&) = delete; + ~AudioDataProxy() override { source_->OnAudioChannelGone(); } // AudioSinkInterface implementation. @@ -44,8 +49,6 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { private: const rtc::scoped_refptr source_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioDataProxy); }; RemoteAudioSource::RemoteAudioSource(rtc::Thread* worker_thread) @@ -102,6 +105,8 @@ bool RemoteAudioSource::remote() const { void RemoteAudioSource::SetVolume(double volume) { RTC_DCHECK_GE(volume, 0); RTC_DCHECK_LE(volume, 10); + RTC_LOG(LS_INFO) << rtc::StringFormat("RAS::%s({volume=%.2f})", __func__, + volume); for (auto* observer : audio_observers_) { observer->OnSetVolume(volume); } @@ -127,7 +132,7 @@ void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) { return; } - rtc::CritScope lock(&sink_lock_); + MutexLock lock(&sink_lock_); RTC_DCHECK(!absl::c_linear_search(sinks_, sink)); sinks_.push_back(sink); } @@ -136,13 +141,13 @@ void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) { RTC_DCHECK(main_thread_->IsCurrent()); RTC_DCHECK(sink); - rtc::CritScope lock(&sink_lock_); + MutexLock lock(&sink_lock_); sinks_.remove(sink); } void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) { // Called on the externally-owned audio callback thread, via/from webrtc. - rtc::CritScope lock(&sink_lock_); + MutexLock lock(&sink_lock_); for (auto* sink : sinks_) { // When peerconnection acts as an audio source, it should not provide // absolute capture timestamp. diff --git a/pc/remote_audio_source.h b/pc/remote_audio_source.h index 15dc75b511..9ec09165cf 100644 --- a/pc/remote_audio_source.h +++ b/pc/remote_audio_source.h @@ -18,8 +18,8 @@ #include "api/call/audio_sink.h" #include "api/notifier.h" #include "pc/channel.h" -#include "rtc_base/critical_section.h" #include "rtc_base/message_handler.h" +#include "rtc_base/synchronization/mutex.h" namespace rtc { struct Message; @@ -69,7 +69,7 @@ class RemoteAudioSource : public Notifier, rtc::Thread* const main_thread_; rtc::Thread* const worker_thread_; std::list audio_observers_; - rtc::CriticalSection sink_lock_; + Mutex sink_lock_; std::list sinks_; SourceState state_; }; diff --git a/pc/rtc_stats_collector.cc b/pc/rtc_stats_collector.cc index 116b4ba497..085af02ee2 100644 --- a/pc/rtc_stats_collector.cc +++ b/pc/rtc_stats_collector.cc @@ -10,6 +10,7 @@ #include "pc/rtc_stats_collector.h" +#include #include #include #include @@ -24,6 +25,7 @@ #include "p2p/base/port.h" #include "pc/peer_connection.h" #include "pc/rtc_stats_traversal.h" +#include "pc/webrtc_sdp.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" @@ -182,6 +184,10 @@ const char* DtlsTransportStateToRTCDtlsTransportState( const char* NetworkAdapterTypeToStatsType(rtc::AdapterType type) { switch (type) { case rtc::ADAPTER_TYPE_CELLULAR: + case rtc::ADAPTER_TYPE_CELLULAR_2G: + case rtc::ADAPTER_TYPE_CELLULAR_3G: + case rtc::ADAPTER_TYPE_CELLULAR_4G: + case rtc::ADAPTER_TYPE_CELLULAR_5G: return RTCNetworkType::kCellular; case rtc::ADAPTER_TYPE_ETHERNET: return RTCNetworkType::kEthernet; @@ -210,6 +216,7 @@ const char* QualityLimitationReasonToRTCQualityLimitationReason( case QualityLimitationReason::kOther: return RTCQualityLimitationReason::kOther; } + RTC_CHECK_NOTREACHED(); } double DoubleAudioLevelFromIntAudioLevel(int audio_level) { @@ -235,6 +242,14 @@ std::unique_ptr CodecStatsFromRtpCodecParameters( if (codec_params.clock_rate) { codec_stats->clock_rate = static_cast(*codec_params.clock_rate); } + if (codec_params.num_channels) { + codec_stats->channels = *codec_params.num_channels; + } + + rtc::StringBuilder fmtp; + if (WriteFmtpParameters(codec_params.parameters, &fmtp)) { + codec_stats->sdp_fmtp_line = fmtp.Release(); + } return codec_stats; } @@ -277,6 +292,27 @@ void SetInboundRTPStreamStatsFromVoiceReceiverInfo( } inbound_audio->jitter = static_cast(voice_receiver_info.jitter_ms) / rtc::kNumMillisecsPerSec; + inbound_audio->jitter_buffer_delay = + voice_receiver_info.jitter_buffer_delay_seconds; + inbound_audio->jitter_buffer_emitted_count = + voice_receiver_info.jitter_buffer_emitted_count; + inbound_audio->total_samples_received = + voice_receiver_info.total_samples_received; + inbound_audio->concealed_samples = voice_receiver_info.concealed_samples; + inbound_audio->silent_concealed_samples = + voice_receiver_info.silent_concealed_samples; + inbound_audio->concealment_events = voice_receiver_info.concealment_events; + inbound_audio->inserted_samples_for_deceleration = + voice_receiver_info.inserted_samples_for_deceleration; + inbound_audio->removed_samples_for_acceleration = + voice_receiver_info.removed_samples_for_acceleration; + if (voice_receiver_info.audio_level >= 0) { + inbound_audio->audio_level = + DoubleAudioLevelFromIntAudioLevel(voice_receiver_info.audio_level); + } + inbound_audio->total_audio_energy = voice_receiver_info.total_output_energy; + inbound_audio->total_samples_duration = + voice_receiver_info.total_output_duration; // |fir_count|, |pli_count| and |sli_count| are only valid for video and are // purposefully left undefined for audio. if (voice_receiver_info.last_packet_received_timestamp_ms) { @@ -313,8 +349,22 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( static_cast(video_receiver_info.plis_sent); inbound_video->nack_count = static_cast(video_receiver_info.nacks_sent); + inbound_video->frames_received = video_receiver_info.frames_received; inbound_video->frames_decoded = video_receiver_info.frames_decoded; + inbound_video->frames_dropped = video_receiver_info.frames_dropped; + inbound_video->frames_rendered = video_receiver_info.frames_rendered; inbound_video->key_frames_decoded = video_receiver_info.key_frames_decoded; + if (video_receiver_info.frame_width > 0) { + inbound_video->frame_width = + static_cast(video_receiver_info.frame_width); + } + if (video_receiver_info.frame_height > 0) { + inbound_video->frame_height = + static_cast(video_receiver_info.frame_height); + } + if (video_receiver_info.framerate_rcvd > 0) { + inbound_video->frames_per_second = video_receiver_info.framerate_rcvd; + } if (video_receiver_info.qp_sum) inbound_video->qp_sum = *video_receiver_info.qp_sum; inbound_video->total_decode_time = @@ -407,6 +457,19 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( rtc::kNumMillisecsPerSec; outbound_video->total_encoded_bytes_target = video_sender_info.total_encoded_bytes_target; + if (video_sender_info.send_frame_width > 0) { + outbound_video->frame_width = + static_cast(video_sender_info.send_frame_width); + } + if (video_sender_info.send_frame_height > 0) { + outbound_video->frame_height = + static_cast(video_sender_info.send_frame_height); + } + if (video_sender_info.framerate_sent > 0) { + outbound_video->frames_per_second = video_sender_info.framerate_sent; + } + outbound_video->frames_sent = video_sender_info.frames_sent; + outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent; outbound_video->total_packet_send_delay = static_cast(video_sender_info.total_packet_send_delay_ms) / rtc::kNumMillisecsPerSec; @@ -423,13 +486,16 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( outbound_video->encoder_implementation = video_sender_info.encoder_implementation_name; } + if (video_sender_info.rid) { + outbound_video->rid = *video_sender_info.rid; + } } std::unique_ptr ProduceRemoteInboundRtpStreamStatsFromReportBlockData( const ReportBlockData& report_block_data, cricket::MediaType media_type, - std::map outbound_rtps, + const std::map& outbound_rtps, const RTCStatsReport& report) { const auto& report_block = report_block_data.report_block(); // RTCStats' timestamp generally refers to when the metric was sampled, but @@ -638,6 +704,8 @@ ProduceMediaStreamTrackStatsFromVoiceReceiverInfo( voice_receiver_info.delayed_packet_outage_samples; audio_track_stats->relative_packet_arrival_delay = voice_receiver_info.relative_packet_arrival_delay_seconds; + audio_track_stats->jitter_buffer_target_delay = + voice_receiver_info.jitter_buffer_target_delay_seconds; audio_track_stats->interruption_count = voice_receiver_info.interruption_count >= 0 ? voice_receiver_info.interruption_count @@ -950,8 +1018,10 @@ RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc, RTC_DCHECK(worker_thread_); RTC_DCHECK(network_thread_); RTC_DCHECK_GE(cache_lifetime_us_, 0); - pc_->SignalDataChannelCreated().connect( - this, &RTCStatsCollector::OnDataChannelCreated); + pc_->SignalRtpDataChannelCreated().connect( + this, &RTCStatsCollector::OnRtpDataChannelCreated); + pc_->SignalSctpDataChannelCreated().connect( + this, &RTCStatsCollector::OnSctpDataChannelCreated); } RTCStatsCollector::~RTCStatsCollector() { @@ -1008,7 +1078,7 @@ void RTCStatsCollector::GetStatsReportInternal( // Prepare |transceiver_stats_infos_| for use in // |ProducePartialResultsOnNetworkThread| and // |ProducePartialResultsOnSignalingThread|. - transceiver_stats_infos_ = PrepareTransceiverStatsInfos_s(); + transceiver_stats_infos_ = PrepareTransceiverStatsInfos_s_w(); // Prepare |transport_names_| for use in // |ProducePartialResultsOnNetworkThread|. transport_names_ = PrepareTransportNames_s(); @@ -1017,6 +1087,10 @@ void RTCStatsCollector::GetStatsReportInternal( // thread. // TODO(holmer): To avoid the hop we could move BWE and BWE stats to the // network thread, where it more naturally belongs. + // TODO(https://crbug.com/webrtc/11767): In the meantime we can piggyback on + // the blocking-invoke that is already performed in + // PrepareTransceiverStatsInfos_s_w() so that we can call GetCallStats() + // without additional blocking-invokes. call_stats_ = pc_->GetCallStats(); // Don't touch |network_report_| on the signaling thread until @@ -1046,6 +1120,8 @@ void RTCStatsCollector::WaitForPendingRequest() { void RTCStatsCollector::ProducePartialResultsOnSignalingThread( int64_t timestamp_us) { RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + partial_report_ = RTCStatsReport::Create(timestamp_us); ProducePartialResultsOnSignalingThreadImpl(timestamp_us, @@ -1063,6 +1139,8 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl( int64_t timestamp_us, RTCStatsReport* partial_report) { RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + ProduceDataChannelStats_s(timestamp_us, partial_report); ProduceMediaStreamStats_s(timestamp_us, partial_report); ProduceMediaStreamTrackStats_s(timestamp_us, partial_report); @@ -1073,6 +1151,8 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl( void RTCStatsCollector::ProducePartialResultsOnNetworkThread( int64_t timestamp_us) { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + // Touching |network_report_| on this thread is safe by this method because // |network_report_event_| is reset before this method is invoked. network_report_ = RTCStatsReport::Create(timestamp_us); @@ -1100,6 +1180,8 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( const std::map& transport_cert_stats, RTCStatsReport* partial_report) { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + ProduceCertificateStats_n(timestamp_us, transport_cert_stats, partial_report); ProduceCodecStats_n(timestamp_us, transceiver_stats_infos_, partial_report); ProduceIceCandidateAndPairStats_n(timestamp_us, transport_stats_by_name, @@ -1186,6 +1268,8 @@ void RTCStatsCollector::ProduceCertificateStats_n( const std::map& transport_cert_stats, RTCStatsReport* report) const { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const auto& transport_cert_stats_pair : transport_cert_stats) { if (transport_cert_stats_pair.second.local) { ProduceCertificateStatsFromSSLCertificateStats( @@ -1203,6 +1287,8 @@ void RTCStatsCollector::ProduceCodecStats_n( const std::vector& transceiver_stats_infos, RTCStatsReport* report) const { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const auto& stats : transceiver_stats_infos) { if (!stats.mid) { continue; @@ -1243,22 +1329,22 @@ void RTCStatsCollector::ProduceCodecStats_n( void RTCStatsCollector::ProduceDataChannelStats_s( int64_t timestamp_us, RTCStatsReport* report) const { - RTC_DCHECK(signaling_thread_->IsCurrent()); - for (const rtc::scoped_refptr& data_channel : - pc_->sctp_data_channels()) { + RTC_DCHECK_RUN_ON(signaling_thread_); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::vector data_stats = pc_->GetDataChannelStats(); + for (const auto& stats : data_stats) { std::unique_ptr data_channel_stats( new RTCDataChannelStats( - "RTCDataChannel_" + rtc::ToString(data_channel->internal_id()), + "RTCDataChannel_" + rtc::ToString(stats.internal_id), timestamp_us)); - data_channel_stats->label = data_channel->label(); - data_channel_stats->protocol = data_channel->protocol(); - data_channel_stats->datachannelid = data_channel->id(); - data_channel_stats->state = - DataStateToRTCDataChannelState(data_channel->state()); - data_channel_stats->messages_sent = data_channel->messages_sent(); - data_channel_stats->bytes_sent = data_channel->bytes_sent(); - data_channel_stats->messages_received = data_channel->messages_received(); - data_channel_stats->bytes_received = data_channel->bytes_received(); + data_channel_stats->label = std::move(stats.label); + data_channel_stats->protocol = std::move(stats.protocol); + data_channel_stats->data_channel_identifier = stats.id; + data_channel_stats->state = DataStateToRTCDataChannelState(stats.state); + data_channel_stats->messages_sent = stats.messages_sent; + data_channel_stats->bytes_sent = stats.bytes_sent; + data_channel_stats->messages_received = stats.messages_received; + data_channel_stats->bytes_received = stats.bytes_received; report->AddStats(std::move(data_channel_stats)); } } @@ -1270,6 +1356,8 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( const Call::Stats& call_stats, RTCStatsReport* report) const { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const auto& entry : transport_stats_by_name) { const std::string& transport_name = entry.first; const cricket::TransportStats& transport_stats = entry.second; @@ -1350,6 +1438,7 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( int64_t timestamp_us, RTCStatsReport* report) const { RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map> track_ids; @@ -1386,6 +1475,8 @@ void RTCStatsCollector::ProduceMediaStreamTrackStats_s( int64_t timestamp_us, RTCStatsReport* report) const { RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos_) { std::vector> senders; for (const auto& sender : stats.transceiver->senders()) { @@ -1407,6 +1498,8 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( int64_t timestamp_us, RTCStatsReport* report) const { RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const RtpTransceiverStatsInfo& transceiver_stats_info : transceiver_stats_infos_) { const auto& track_media_info_map = @@ -1488,6 +1581,8 @@ void RTCStatsCollector::ProducePeerConnectionStats_s( int64_t timestamp_us, RTCStatsReport* report) const { RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::unique_ptr stats( new RTCPeerConnectionStats("RTCPeerConnection", timestamp_us)); stats->data_channels_opened = internal_record_.data_channels_opened; @@ -1500,6 +1595,7 @@ void RTCStatsCollector::ProduceRTPStreamStats_n( const std::vector& transceiver_stats_infos, RTCStatsReport* report) const { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos) { if (stats.media_type == cricket::MEDIA_TYPE_AUDIO) { @@ -1516,6 +1612,9 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( int64_t timestamp_us, const RtpTransceiverStatsInfo& stats, RTCStatsReport* report) const { + RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + if (!stats.mid || !stats.transport_name) { return; } @@ -1584,8 +1683,8 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( track_media_info_map.voice_media_info()->senders) { for (const auto& report_block_data : voice_sender_info.report_block_datas) { report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData( - report_block_data, cricket::MEDIA_TYPE_AUDIO, - std::move(audio_outbound_rtps), *report)); + report_block_data, cricket::MEDIA_TYPE_AUDIO, audio_outbound_rtps, + *report)); } } } @@ -1594,6 +1693,9 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( int64_t timestamp_us, const RtpTransceiverStatsInfo& stats, RTCStatsReport* report) const { + RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + if (!stats.mid || !stats.transport_name) { return; } @@ -1661,8 +1763,8 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( track_media_info_map.video_media_info()->senders) { for (const auto& report_block_data : video_sender_info.report_block_datas) { report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData( - report_block_data, cricket::MEDIA_TYPE_VIDEO, - std::move(video_outbound_rtps), *report)); + report_block_data, cricket::MEDIA_TYPE_VIDEO, video_outbound_rtps, + *report)); } } } @@ -1674,6 +1776,8 @@ void RTCStatsCollector::ProduceTransportStats_n( const std::map& transport_cert_stats, RTCStatsReport* report) const { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const auto& entry : transport_stats_by_name) { const std::string& transport_name = entry.first; const cricket::TransportStats& transport_stats = entry.second; @@ -1713,7 +1817,9 @@ void RTCStatsCollector::ProduceTransportStats_n( transport_name, channel_stats.component), timestamp_us)); transport_stats->bytes_sent = 0; + transport_stats->packets_sent = 0; transport_stats->bytes_received = 0; + transport_stats->packets_received = 0; transport_stats->dtls_state = DtlsTransportStateToRTCDtlsTransportState(channel_stats.dtls_state); transport_stats->selected_candidate_pair_changes = @@ -1721,7 +1827,10 @@ void RTCStatsCollector::ProduceTransportStats_n( for (const cricket::ConnectionInfo& info : channel_stats.ice_transport_stats.connection_infos) { *transport_stats->bytes_sent += info.sent_total_bytes; + *transport_stats->packets_sent += + info.sent_total_packets - info.sent_discarded_packets; *transport_stats->bytes_received += info.recv_total_bytes; + *transport_stats->packets_received += info.packets_received; if (info.best_connection) { transport_stats->selected_candidate_pair_id = RTCIceCandidatePairStatsIDFromConnectionInfo(info); @@ -1765,6 +1874,8 @@ RTCStatsCollector::PrepareTransportCertificateStats_n( const std::map& transport_stats_by_name) const { RTC_DCHECK(network_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::map transport_cert_stats; for (const auto& entry : transport_stats_by_name) { const std::string& transport_name = entry.first; @@ -1789,9 +1900,10 @@ RTCStatsCollector::PrepareTransportCertificateStats_n( } std::vector -RTCStatsCollector::PrepareTransceiverStatsInfos_s() const { - std::vector transceiver_stats_infos; +RTCStatsCollector::PrepareTransceiverStatsInfos_s_w() const { + RTC_DCHECK(signaling_thread_->IsCurrent()); + std::vector transceiver_stats_infos; // These are used to invoke GetStats for all the media channels together in // one worker thread hop. std::map> video_stats; - for (const auto& transceiver : pc_->GetTransceiversInternal()) { - cricket::MediaType media_type = transceiver->media_type(); - - // Prepare stats entry. The TrackMediaInfoMap will be filled in after the - // stats have been fetched on the worker thread. - transceiver_stats_infos.emplace_back(); - RtpTransceiverStatsInfo& stats = transceiver_stats_infos.back(); - stats.transceiver = transceiver->internal(); - stats.media_type = media_type; - - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (!channel) { - // The remaining fields require a BaseChannel. - continue; - } + { + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - stats.mid = channel->content_name(); - stats.transport_name = channel->transport_name(); - - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - auto* voice_channel = static_cast(channel); - RTC_DCHECK(voice_stats.find(voice_channel->media_channel()) == - voice_stats.end()); - voice_stats[voice_channel->media_channel()] = - std::make_unique(); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - auto* video_channel = static_cast(channel); - RTC_DCHECK(video_stats.find(video_channel->media_channel()) == - video_stats.end()); - video_stats[video_channel->media_channel()] = - std::make_unique(); - } else { - RTC_NOTREACHED(); + for (const auto& transceiver : pc_->GetTransceiversInternal()) { + cricket::MediaType media_type = transceiver->media_type(); + + // Prepare stats entry. The TrackMediaInfoMap will be filled in after the + // stats have been fetched on the worker thread. + transceiver_stats_infos.emplace_back(); + RtpTransceiverStatsInfo& stats = transceiver_stats_infos.back(); + stats.transceiver = transceiver->internal(); + stats.media_type = media_type; + + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (!channel) { + // The remaining fields require a BaseChannel. + continue; + } + + stats.mid = channel->content_name(); + stats.transport_name = channel->transport_name(); + + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + auto* voice_channel = static_cast(channel); + RTC_DCHECK(voice_stats.find(voice_channel->media_channel()) == + voice_stats.end()); + voice_stats[voice_channel->media_channel()] = + std::make_unique(); + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + auto* video_channel = static_cast(channel); + RTC_DCHECK(video_stats.find(video_channel->media_channel()) == + video_stats.end()); + video_stats[video_channel->media_channel()] = + std::make_unique(); + } else { + RTC_NOTREACHED(); + } } } - // Call GetStats for all media channels together on the worker thread in one - // hop. + // We jump to the worker thread and call GetStats() on each media channel. At + // the same time we construct the TrackMediaInfoMaps, which also needs info + // from the worker thread. This minimizes the number of thread jumps. worker_thread_->Invoke(RTC_FROM_HERE, [&] { + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + for (const auto& entry : voice_stats) { - if (!entry.first->GetStats(entry.second.get())) { + if (!entry.first->GetStats(entry.second.get(), + /*get_and_clear_legacy_stats=*/false)) { RTC_LOG(LS_WARNING) << "Failed to get voice stats."; } } @@ -1850,46 +1970,49 @@ RTCStatsCollector::PrepareTransceiverStatsInfos_s() const { RTC_LOG(LS_WARNING) << "Failed to get video stats."; } } - }); - // Create the TrackMediaInfoMap for each transceiver stats object. - for (auto& stats : transceiver_stats_infos) { - auto transceiver = stats.transceiver; - std::unique_ptr voice_media_info; - std::unique_ptr video_media_info; - if (transceiver->channel()) { - cricket::MediaType media_type = transceiver->media_type(); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - auto* voice_channel = - static_cast(transceiver->channel()); - RTC_DCHECK(voice_stats[voice_channel->media_channel()]); - voice_media_info = - std::move(voice_stats[voice_channel->media_channel()]); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - auto* video_channel = - static_cast(transceiver->channel()); - RTC_DCHECK(video_stats[video_channel->media_channel()]); - video_media_info = - std::move(video_stats[video_channel->media_channel()]); + // Create the TrackMediaInfoMap for each transceiver stats object. + for (auto& stats : transceiver_stats_infos) { + auto transceiver = stats.transceiver; + std::unique_ptr voice_media_info; + std::unique_ptr video_media_info; + if (transceiver->channel()) { + cricket::MediaType media_type = transceiver->media_type(); + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + auto* voice_channel = + static_cast(transceiver->channel()); + RTC_DCHECK(voice_stats[voice_channel->media_channel()]); + voice_media_info = + std::move(voice_stats[voice_channel->media_channel()]); + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + auto* video_channel = + static_cast(transceiver->channel()); + RTC_DCHECK(video_stats[video_channel->media_channel()]); + video_media_info = + std::move(video_stats[video_channel->media_channel()]); + } } + std::vector> senders; + for (const auto& sender : transceiver->senders()) { + senders.push_back(sender->internal()); + } + std::vector> receivers; + for (const auto& receiver : transceiver->receivers()) { + receivers.push_back(receiver->internal()); + } + stats.track_media_info_map = std::make_unique( + std::move(voice_media_info), std::move(video_media_info), senders, + receivers); } - std::vector> senders; - for (const auto& sender : transceiver->senders()) { - senders.push_back(sender->internal()); - } - std::vector> receivers; - for (const auto& receiver : transceiver->receivers()) { - receivers.push_back(receiver->internal()); - } - stats.track_media_info_map = std::make_unique( - std::move(voice_media_info), std::move(video_media_info), senders, - receivers); - } + }); return transceiver_stats_infos; } std::set RTCStatsCollector::PrepareTransportNames_s() const { + RTC_DCHECK(signaling_thread_->IsCurrent()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::set transport_names; for (const auto& transceiver : pc_->GetTransceiversInternal()) { if (transceiver->internal()->channel()) { @@ -1906,12 +2029,17 @@ std::set RTCStatsCollector::PrepareTransportNames_s() const { return transport_names; } -void RTCStatsCollector::OnDataChannelCreated(DataChannel* channel) { +void RTCStatsCollector::OnRtpDataChannelCreated(RtpDataChannel* channel) { + channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened); + channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed); +} + +void RTCStatsCollector::OnSctpDataChannelCreated(SctpDataChannel* channel) { channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened); channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed); } -void RTCStatsCollector::OnDataChannelOpened(DataChannel* channel) { +void RTCStatsCollector::OnDataChannelOpened(DataChannelInterface* channel) { RTC_DCHECK(signaling_thread_->IsCurrent()); bool result = internal_record_.opened_data_channels .insert(reinterpret_cast(channel)) @@ -1920,7 +2048,7 @@ void RTCStatsCollector::OnDataChannelOpened(DataChannel* channel) { RTC_DCHECK(result); } -void RTCStatsCollector::OnDataChannelClosed(DataChannel* channel) { +void RTCStatsCollector::OnDataChannelClosed(DataChannelInterface* channel) { RTC_DCHECK(signaling_thread_->IsCurrent()); // Only channels that have been fully opened (and have increased the // |data_channels_opened_| counter) increase the closed counter. diff --git a/pc/rtc_stats_collector.h b/pc/rtc_stats_collector.h index cd5ec21041..e1bc27d9e5 100644 --- a/pc/rtc_stats_collector.h +++ b/pc/rtc_stats_collector.h @@ -24,7 +24,7 @@ #include "api/stats/rtcstats_objects.h" #include "call/call.h" #include "media/base/media_channel.h" -#include "pc/data_channel.h" +#include "pc/data_channel_utils.h" #include "pc/peer_connection_internal.h" #include "pc/track_media_info_map.h" #include "rtc_base/event.h" @@ -215,7 +215,7 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface, PrepareTransportCertificateStats_n( const std::map& transport_stats_by_name) const; - std::vector PrepareTransceiverStatsInfos_s() const; + std::vector PrepareTransceiverStatsInfos_s_w() const; std::set PrepareTransportNames_s() const; // Stats gathering on a particular thread. @@ -226,10 +226,11 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface, void MergeNetworkReport_s(); // Slots for signals (sigslot) that are wired up to |pc_|. - void OnDataChannelCreated(DataChannel* channel); + void OnRtpDataChannelCreated(RtpDataChannel* channel); + void OnSctpDataChannelCreated(SctpDataChannel* channel); // Slots for signals (sigslot) that are wired up to |channel|. - void OnDataChannelOpened(DataChannel* channel); - void OnDataChannelClosed(DataChannel* channel); + void OnDataChannelOpened(DataChannelInterface* channel); + void OnDataChannelClosed(DataChannelInterface* channel); PeerConnectionInternal* const pc_; rtc::Thread* const signaling_thread_; diff --git a/pc/rtc_stats_collector_unittest.cc b/pc/rtc_stats_collector_unittest.cc index 59d2e5d10a..7c19c3cbb6 100644 --- a/pc/rtc_stats_collector_unittest.cc +++ b/pc/rtc_stats_collector_unittest.cc @@ -22,6 +22,7 @@ #include "absl/memory/memory.h" #include "absl/strings/str_replace.h" +#include "api/media_stream_track.h" #include "api/rtp_parameters.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" @@ -31,7 +32,7 @@ #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" #include "pc/media_stream.h" -#include "pc/media_stream_track.h" +#include "pc/test/fake_data_channel_provider.h" #include "pc/test/fake_peer_connection_for_stats.h" #include "pc/test/mock_data_channel.h" #include "pc/test/mock_rtp_receiver_internal.h" @@ -43,6 +44,7 @@ #include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/strings/json.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" using ::testing::AtLeast; @@ -248,6 +250,12 @@ class FakeVideoTrackSourceForStats : public VideoTrackSourceInterface { void AddOrUpdateSink(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) override {} void RemoveSink(rtc::VideoSinkInterface* sink) override {} + bool SupportsEncodedOutput() const override { return false; } + void GenerateKeyFrame() override {} + void AddEncodedSink( + rtc::VideoSinkInterface* sink) override {} + void RemoveEncodedSink( + rtc::VideoSinkInterface* sink) override {} private: int input_width_; @@ -519,6 +527,7 @@ class RTCStatsCollectorWrapper { MediaStreamTrackInterface::kVideoKind); video_media_info.senders.push_back(video_sender_info); + video_media_info.aggregated_senders.push_back(video_sender_info); rtc::scoped_refptr rtp_sender = CreateMockSender( cricket::MEDIA_TYPE_VIDEO, rtc::scoped_refptr(local_video_track), @@ -641,6 +650,7 @@ class RTCStatsCollectorTest : public ::testing::Test { cricket::SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = 3; video_media_info.senders[0].codec_payload_type = send_codec.payload_type; + video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); // inbound-rtp graph.inbound_rtp_id = "RTCInboundRTPVideoStream_4"; video_media_info.receivers.push_back(cricket::VideoReceiverInfo()); @@ -748,7 +758,7 @@ TEST_F(RTCStatsCollectorTest, CachedStatsReports) { rtc::scoped_refptr c = stats_->GetStatsReport(); EXPECT_NE(b.get(), c.get()); // Invalidate cache by advancing time. - fake_clock_.AdvanceTime(TimeDelta::ms(51)); + fake_clock_.AdvanceTime(TimeDelta::Millis(51)); rtc::scoped_refptr d = stats_->GetStatsReport(); EXPECT_TRUE(d); EXPECT_NE(c.get(), d.get()); @@ -759,7 +769,7 @@ TEST_F(RTCStatsCollectorTest, MultipleCallbacksWithInvalidatedCacheInBetween) { stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&a)); stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&b)); // Cache is invalidated after 50 ms. - fake_clock_.AdvanceTime(TimeDelta::ms(51)); + fake_clock_.AdvanceTime(TimeDelta::Millis(51)); stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&c)); EXPECT_TRUE_WAIT(a, kGetStatsReportTimeoutMs); EXPECT_TRUE_WAIT(b, kGetStatsReportTimeoutMs); @@ -813,6 +823,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStats) { inbound_audio_codec.kind = cricket::MEDIA_TYPE_AUDIO; inbound_audio_codec.name = "opus"; inbound_audio_codec.clock_rate = 1337; + inbound_audio_codec.num_channels = 1; + inbound_audio_codec.parameters = {{"minptime", "10"}, {"useinbandfec", "1"}}; voice_media_info.receive_codecs.insert( std::make_pair(inbound_audio_codec.payload_type, inbound_audio_codec)); @@ -821,6 +833,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStats) { outbound_audio_codec.kind = cricket::MEDIA_TYPE_AUDIO; outbound_audio_codec.name = "isac"; outbound_audio_codec.clock_rate = 1338; + outbound_audio_codec.num_channels = 2; voice_media_info.send_codecs.insert( std::make_pair(outbound_audio_codec.payload_type, outbound_audio_codec)); @@ -835,6 +848,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStats) { inbound_video_codec.kind = cricket::MEDIA_TYPE_VIDEO; inbound_video_codec.name = "H264"; inbound_video_codec.clock_rate = 1339; + inbound_video_codec.parameters = {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "42001f"}}; video_media_info.receive_codecs.insert( std::make_pair(inbound_video_codec.payload_type, inbound_video_codec)); @@ -856,18 +872,23 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStats) { expected_inbound_audio_codec.payload_type = 1; expected_inbound_audio_codec.mime_type = "audio/opus"; expected_inbound_audio_codec.clock_rate = 1337; + expected_inbound_audio_codec.channels = 1; + expected_inbound_audio_codec.sdp_fmtp_line = "minptime=10;useinbandfec=1"; RTCCodecStats expected_outbound_audio_codec("RTCCodec_AudioMid_Outbound_2", report->timestamp_us()); expected_outbound_audio_codec.payload_type = 2; expected_outbound_audio_codec.mime_type = "audio/isac"; expected_outbound_audio_codec.clock_rate = 1338; + expected_outbound_audio_codec.channels = 2; RTCCodecStats expected_inbound_video_codec("RTCCodec_VideoMid_Inbound_3", report->timestamp_us()); expected_inbound_video_codec.payload_type = 3; expected_inbound_video_codec.mime_type = "video/H264"; expected_inbound_video_codec.clock_rate = 1339; + expected_inbound_video_codec.sdp_fmtp_line = + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42001f"; RTCCodecStats expected_outbound_video_codec("RTCCodec_VideoMid_Outbound_4", report->timestamp_us()); @@ -956,9 +977,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsChain) { TEST_F(RTCStatsCollectorTest, CollectTwoRTCDataChannelStatsWithPendingId) { pc_->AddSctpDataChannel( - new MockDataChannel(/*id=*/-1, DataChannelInterface::kConnecting)); + new MockSctpDataChannel(/*id=*/-1, DataChannelInterface::kConnecting)); pc_->AddSctpDataChannel( - new MockDataChannel(/*id=*/-1, DataChannelInterface::kConnecting)); + new MockSctpDataChannel(/*id=*/-1, DataChannelInterface::kConnecting)); rtc::scoped_refptr report = stats_->GetStatsReport(); } @@ -967,52 +988,53 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { // Note: The test assumes data channel IDs are predictable. // This is not a safe assumption, but in order to make it work for // the test, we reset the ID allocator at test start. - DataChannel::ResetInternalIdAllocatorForTesting(-1); - pc_->AddSctpDataChannel(new MockDataChannel(0, "MockDataChannel0", - DataChannelInterface::kConnecting, - "udp", 1, 2, 3, 4)); + SctpDataChannel::ResetInternalIdAllocatorForTesting(-1); + pc_->AddSctpDataChannel(new MockSctpDataChannel( + 0, "MockSctpDataChannel0", DataChannelInterface::kConnecting, "udp", 1, 2, + 3, 4)); RTCDataChannelStats expected_data_channel0("RTCDataChannel_0", 0); - expected_data_channel0.label = "MockDataChannel0"; + expected_data_channel0.label = "MockSctpDataChannel0"; expected_data_channel0.protocol = "udp"; - expected_data_channel0.datachannelid = 0; + expected_data_channel0.data_channel_identifier = 0; expected_data_channel0.state = "connecting"; expected_data_channel0.messages_sent = 1; expected_data_channel0.bytes_sent = 2; expected_data_channel0.messages_received = 3; expected_data_channel0.bytes_received = 4; - pc_->AddSctpDataChannel(new MockDataChannel( - 1, "MockDataChannel1", DataChannelInterface::kOpen, "tcp", 5, 6, 7, 8)); + pc_->AddSctpDataChannel(new MockSctpDataChannel(1, "MockSctpDataChannel1", + DataChannelInterface::kOpen, + "tcp", 5, 6, 7, 8)); RTCDataChannelStats expected_data_channel1("RTCDataChannel_1", 0); - expected_data_channel1.label = "MockDataChannel1"; + expected_data_channel1.label = "MockSctpDataChannel1"; expected_data_channel1.protocol = "tcp"; - expected_data_channel1.datachannelid = 1; + expected_data_channel1.data_channel_identifier = 1; expected_data_channel1.state = "open"; expected_data_channel1.messages_sent = 5; expected_data_channel1.bytes_sent = 6; expected_data_channel1.messages_received = 7; expected_data_channel1.bytes_received = 8; - pc_->AddSctpDataChannel(new MockDataChannel(2, "MockDataChannel2", - DataChannelInterface::kClosing, - "udp", 9, 10, 11, 12)); + pc_->AddSctpDataChannel(new MockSctpDataChannel( + 2, "MockSctpDataChannel2", DataChannelInterface::kClosing, "udp", 9, 10, + 11, 12)); RTCDataChannelStats expected_data_channel2("RTCDataChannel_2", 0); - expected_data_channel2.label = "MockDataChannel2"; + expected_data_channel2.label = "MockSctpDataChannel2"; expected_data_channel2.protocol = "udp"; - expected_data_channel2.datachannelid = 2; + expected_data_channel2.data_channel_identifier = 2; expected_data_channel2.state = "closing"; expected_data_channel2.messages_sent = 9; expected_data_channel2.bytes_sent = 10; expected_data_channel2.messages_received = 11; expected_data_channel2.bytes_received = 12; - pc_->AddSctpDataChannel(new MockDataChannel(3, "MockDataChannel3", - DataChannelInterface::kClosed, - "tcp", 13, 14, 15, 16)); + pc_->AddSctpDataChannel(new MockSctpDataChannel(3, "MockSctpDataChannel3", + DataChannelInterface::kClosed, + "tcp", 13, 14, 15, 16)); RTCDataChannelStats expected_data_channel3("RTCDataChannel_3", 0); - expected_data_channel3.label = "MockDataChannel3"; + expected_data_channel3.label = "MockSctpDataChannel3"; expected_data_channel3.protocol = "tcp"; - expected_data_channel3.datachannelid = 3; + expected_data_channel3.data_channel_identifier = 3; expected_data_channel3.state = "closed"; expected_data_channel3.messages_sent = 13; expected_data_channel3.bytes_sent = 14; @@ -1379,12 +1401,16 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { report->Get("RTCPeerConnection")->cast_to()); } - rtc::scoped_refptr dummy_channel_a = DataChannel::Create( - nullptr, cricket::DCT_NONE, "DummyChannelA", InternalDataChannelInit()); - pc_->SignalDataChannelCreated()(dummy_channel_a.get()); - rtc::scoped_refptr dummy_channel_b = DataChannel::Create( - nullptr, cricket::DCT_NONE, "DummyChannelB", InternalDataChannelInit()); - pc_->SignalDataChannelCreated()(dummy_channel_b.get()); + // TODO(bugs.webrtc.org/11547): Supply a separate network thread. + FakeDataChannelProvider provider; + rtc::scoped_refptr dummy_channel_a = SctpDataChannel::Create( + &provider, "DummyChannelA", InternalDataChannelInit(), + rtc::Thread::Current(), rtc::Thread::Current()); + pc_->SignalSctpDataChannelCreated()(dummy_channel_a.get()); + rtc::scoped_refptr dummy_channel_b = SctpDataChannel::Create( + &provider, "DummyChannelB", InternalDataChannelInit(), + rtc::Thread::Current(), rtc::Thread::Current()); + pc_->SignalSctpDataChannelCreated()(dummy_channel_b.get()); dummy_channel_a->SignalOpened(dummy_channel_a.get()); // Closing a channel that is not opened should not affect the counts. @@ -1525,7 +1551,7 @@ TEST_F(RTCStatsCollectorTest, cricket::VoiceReceiverInfo voice_receiver_info; voice_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo()); voice_receiver_info.local_stats[0].ssrc = 3; - voice_receiver_info.audio_level = 16383; + voice_receiver_info.audio_level = 16383; // [0,32767] voice_receiver_info.total_output_energy = 0.125; voice_receiver_info.total_samples_received = 4567; voice_receiver_info.total_output_duration = 0.25; @@ -1536,6 +1562,7 @@ TEST_F(RTCStatsCollectorTest, voice_receiver_info.silent_concealed_samples = 765; voice_receiver_info.jitter_buffer_delay_seconds = 3456; voice_receiver_info.jitter_buffer_emitted_count = 13; + voice_receiver_info.jitter_buffer_target_delay_seconds = 7.894; voice_receiver_info.jitter_buffer_flushes = 7; voice_receiver_info.delayed_packet_outage_samples = 15; voice_receiver_info.relative_packet_arrival_delay_seconds = 16; @@ -1569,7 +1596,7 @@ TEST_F(RTCStatsCollectorTest, expected_remote_audio_track.remote_source = true; expected_remote_audio_track.ended = false; expected_remote_audio_track.detached = false; - expected_remote_audio_track.audio_level = 16383.0 / 32767.0; + expected_remote_audio_track.audio_level = 16383.0 / 32767.0; // [0,1] expected_remote_audio_track.total_audio_energy = 0.125; expected_remote_audio_track.total_samples_received = 4567; expected_remote_audio_track.total_samples_duration = 0.25; @@ -1580,6 +1607,7 @@ TEST_F(RTCStatsCollectorTest, expected_remote_audio_track.silent_concealed_samples = 765; expected_remote_audio_track.jitter_buffer_delay = 3456; expected_remote_audio_track.jitter_buffer_emitted_count = 13; + expected_remote_audio_track.jitter_buffer_target_delay = 7.894; expected_remote_audio_track.jitter_buffer_flushes = 7; expected_remote_audio_track.delayed_packet_outage_samples = 15; expected_remote_audio_track.relative_packet_arrival_delay = 16; @@ -1755,6 +1783,18 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) { voice_media_info.receivers[0].header_and_padding_bytes_rcvd = 4; voice_media_info.receivers[0].codec_payload_type = 42; voice_media_info.receivers[0].jitter_ms = 4500; + voice_media_info.receivers[0].jitter_buffer_delay_seconds = 1.0; + voice_media_info.receivers[0].jitter_buffer_emitted_count = 2; + voice_media_info.receivers[0].total_samples_received = 3; + voice_media_info.receivers[0].concealed_samples = 4; + voice_media_info.receivers[0].silent_concealed_samples = 5; + voice_media_info.receivers[0].concealment_events = 6; + voice_media_info.receivers[0].inserted_samples_for_deceleration = 7; + voice_media_info.receivers[0].removed_samples_for_acceleration = 8; + voice_media_info.receivers[0].audio_level = 14442; // [0,32767] + voice_media_info.receivers[0].total_output_energy = 10.0; + voice_media_info.receivers[0].total_output_duration = 11.0; + voice_media_info.receivers[0].last_packet_received_timestamp_ms = absl::nullopt; @@ -1793,6 +1833,18 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Audio) { expected_audio.packets_lost = -1; // |expected_audio.last_packet_received_timestamp| should be undefined. expected_audio.jitter = 4.5; + expected_audio.jitter_buffer_delay = 1.0; + expected_audio.jitter_buffer_emitted_count = 2; + expected_audio.total_samples_received = 3; + expected_audio.concealed_samples = 4; + expected_audio.silent_concealed_samples = 5; + expected_audio.concealment_events = 6; + expected_audio.inserted_samples_for_deceleration = 7; + expected_audio.removed_samples_for_acceleration = 8; + expected_audio.audio_level = 14442.0 / 32767.0; // [0,1] + expected_audio.total_audio_energy = 10.0; + expected_audio.total_samples_duration = 11.0; + ASSERT_TRUE(report->Get(expected_audio.id())); EXPECT_EQ( report->Get(expected_audio.id())->cast_to(), @@ -1831,8 +1883,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) { video_media_info.receivers[0].firs_sent = 5; video_media_info.receivers[0].plis_sent = 6; video_media_info.receivers[0].nacks_sent = 7; - video_media_info.receivers[0].frames_decoded = 8; + video_media_info.receivers[0].frames_received = 8; + video_media_info.receivers[0].frames_decoded = 9; video_media_info.receivers[0].key_frames_decoded = 3; + video_media_info.receivers[0].frames_dropped = 13; video_media_info.receivers[0].qp_sum = absl::nullopt; video_media_info.receivers[0].total_decode_time_ms = 9000; video_media_info.receivers[0].total_inter_frame_delay = 0.123; @@ -1876,8 +1930,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRTPStreamStats_Video) { expected_video.bytes_received = 3; expected_video.header_bytes_received = 12; expected_video.packets_lost = 42; - expected_video.frames_decoded = 8; + expected_video.frames_received = 8; + expected_video.frames_decoded = 9; expected_video.key_frames_decoded = 3; + expected_video.frames_dropped = 13; // |expected_video.qp_sum| should be undefined. expected_video.total_decode_time = 9.0; expected_video.total_inter_frame_delay = 0.123; @@ -2001,7 +2057,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { video_media_info.senders[0].qp_sum = absl::nullopt; video_media_info.senders[0].content_type = VideoContentType::UNSPECIFIED; video_media_info.senders[0].encoder_implementation_name = ""; - + video_media_info.senders[0].send_frame_width = 200; + video_media_info.senders[0].send_frame_height = 100; + video_media_info.senders[0].framerate_sent = 10; + video_media_info.senders[0].frames_sent = 5; + video_media_info.senders[0].huge_frames_sent = 2; + video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO; @@ -2049,6 +2110,11 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { expected_video.total_packet_send_delay = 10.0; expected_video.quality_limitation_reason = "bandwidth"; expected_video.quality_limitation_resolution_changes = 56u; + expected_video.frame_width = 200u; + expected_video.frame_height = 100u; + expected_video.frames_per_second = 10.0; + expected_video.frames_sent = 5; + expected_video.huge_frames_sent = 2; // |expected_video.content_type| should be undefined. // |expected_video.qp_sum| should be undefined. // |expected_video.encoder_implementation| should be undefined. @@ -2064,6 +2130,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) { video_media_info.senders[0].content_type = VideoContentType::SCREENSHARE; expected_video.content_type = "screenshare"; video_media_info.senders[0].encoder_implementation_name = "libfooencoder"; + video_media_info.aggregated_senders[0] = video_media_info.senders[0]; expected_video.encoder_implementation = "libfooencoder"; video_media_channel->SetStats(video_media_info); @@ -2104,6 +2171,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { rtp_connection_info.remote_candidate = *rtp_remote_candidate.get(); rtp_connection_info.sent_total_bytes = 42; rtp_connection_info.recv_total_bytes = 1337; + rtp_connection_info.sent_total_packets = 3; + rtp_connection_info.sent_discarded_packets = 2; + rtp_connection_info.packets_received = 4; cricket::TransportChannelStats rtp_transport_channel_stats; rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back( @@ -2121,7 +2191,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP), report->timestamp_us()); expected_rtp_transport.bytes_sent = 42; + expected_rtp_transport.packets_sent = 1; expected_rtp_transport.bytes_received = 1337; + expected_rtp_transport.packets_received = 4; expected_rtp_transport.dtls_state = RTCDtlsTransportState::kNew; expected_rtp_transport.selected_candidate_pair_changes = 1; @@ -2136,6 +2208,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { rtcp_connection_info.remote_candidate = *rtcp_remote_candidate.get(); rtcp_connection_info.sent_total_bytes = 1337; rtcp_connection_info.recv_total_bytes = 42; + rtcp_connection_info.sent_total_packets = 3; + rtcp_connection_info.sent_discarded_packets = 2; + rtcp_connection_info.packets_received = 4; cricket::TransportChannelStats rtcp_transport_channel_stats; rtcp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTCP; @@ -2153,7 +2228,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTCP), report->timestamp_us()); expected_rtcp_transport.bytes_sent = 1337; + expected_rtcp_transport.packets_sent = 1; expected_rtcp_transport.bytes_received = 42; + expected_rtcp_transport.packets_received = 4; expected_rtcp_transport.dtls_state = RTCDtlsTransportState::kConnecting; expected_rtcp_transport.selected_candidate_pair_changes = 0; @@ -2247,6 +2324,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) { rtp_connection_info.remote_candidate = *rtp_remote_candidate.get(); rtp_connection_info.sent_total_bytes = 42; rtp_connection_info.recv_total_bytes = 1337; + rtp_connection_info.sent_total_packets = 3; + rtp_connection_info.sent_discarded_packets = 2; + rtp_connection_info.packets_received = 4; cricket::TransportChannelStats rtp_transport_channel_stats; rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back( @@ -2269,7 +2349,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) { rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP), report->timestamp_us()); expected_rtp_transport.bytes_sent = 42; + expected_rtp_transport.packets_sent = 1; expected_rtp_transport.bytes_received = 1337; + expected_rtp_transport.packets_received = 4; expected_rtp_transport.dtls_state = RTCDtlsTransportState::kConnected; expected_rtp_transport.selected_candidate_pair_changes = 1; // Crypto parameters @@ -2377,10 +2459,15 @@ TEST_F(RTCStatsCollectorTest, RTCVideoSourceStatsCollectedForSenderWithTrack) { const int kVideoSourceHeight = 34; cricket::VideoMediaInfo video_media_info; + video_media_info.aggregated_senders.push_back(cricket::VideoSenderInfo()); video_media_info.senders.push_back(cricket::VideoSenderInfo()); video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = kSsrc; video_media_info.senders[0].framerate_input = 29; + video_media_info.aggregated_senders[0].local_stats.push_back( + cricket::SsrcSenderInfo()); + video_media_info.aggregated_senders[0].local_stats[0].ssrc = kSsrc; + video_media_info.aggregated_senders[0].framerate_input = 29; auto* video_media_channel = pc_->AddVideoChannel("VideoMid", "TransportName"); video_media_channel->SetStats(video_media_info); @@ -2510,6 +2597,7 @@ class RTCStatsCollectorTestWithParamKind case cricket::MEDIA_TYPE_VIDEO: return "Video"; case cricket::MEDIA_TYPE_DATA: + case cricket::MEDIA_TYPE_UNSUPPORTED: RTC_NOTREACHED(); return ""; } @@ -2523,47 +2611,52 @@ class RTCStatsCollectorTestWithParamKind // Adds a sender and channel of the appropriate kind, creating a sender info // with the report block's |source_ssrc| and report block data. - void AddSenderInfoAndMediaChannel(std::string transport_name, - ReportBlockData report_block_data, - absl::optional codec) { + void AddSenderInfoAndMediaChannel( + std::string transport_name, + const std::vector& report_block_datas, + absl::optional codec) { switch (media_type_) { case cricket::MEDIA_TYPE_AUDIO: { cricket::VoiceMediaInfo voice_media_info; - voice_media_info.senders.push_back(cricket::VoiceSenderInfo()); - voice_media_info.senders[0].local_stats.push_back( - cricket::SsrcSenderInfo()); - voice_media_info.senders[0].local_stats[0].ssrc = - report_block_data.report_block().source_ssrc; - if (codec.has_value()) { - voice_media_info.senders[0].codec_payload_type = codec->payload_type; - voice_media_info.send_codecs.insert( - std::make_pair(codec->payload_type, *codec)); + for (const auto& report_block_data : report_block_datas) { + cricket::VoiceSenderInfo sender; + sender.local_stats.push_back(cricket::SsrcSenderInfo()); + sender.local_stats[0].ssrc = + report_block_data.report_block().source_ssrc; + if (codec.has_value()) { + sender.codec_payload_type = codec->payload_type; + voice_media_info.send_codecs.insert( + std::make_pair(codec->payload_type, *codec)); + } + sender.report_block_datas.push_back(report_block_data); + voice_media_info.senders.push_back(sender); } - voice_media_info.senders[0].report_block_datas.push_back( - report_block_data); auto* voice_media_channel = pc_->AddVoiceChannel("mid", transport_name); voice_media_channel->SetStats(voice_media_info); return; } case cricket::MEDIA_TYPE_VIDEO: { cricket::VideoMediaInfo video_media_info; - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back( - cricket::SsrcSenderInfo()); - video_media_info.senders[0].local_stats[0].ssrc = - report_block_data.report_block().source_ssrc; - if (codec.has_value()) { - video_media_info.senders[0].codec_payload_type = codec->payload_type; - video_media_info.send_codecs.insert( - std::make_pair(codec->payload_type, *codec)); + for (const auto& report_block_data : report_block_datas) { + cricket::VideoSenderInfo sender; + sender.local_stats.push_back(cricket::SsrcSenderInfo()); + sender.local_stats[0].ssrc = + report_block_data.report_block().source_ssrc; + if (codec.has_value()) { + sender.codec_payload_type = codec->payload_type; + video_media_info.send_codecs.insert( + std::make_pair(codec->payload_type, *codec)); + } + sender.report_block_datas.push_back(report_block_data); + video_media_info.aggregated_senders.push_back(sender); + video_media_info.senders.push_back(sender); } - video_media_info.senders[0].report_block_datas.push_back( - report_block_data); auto* video_media_channel = pc_->AddVideoChannel("mid", transport_name); video_media_channel->SetStats(video_media_info); return; } case cricket::MEDIA_TYPE_DATA: + case cricket::MEDIA_TYPE_UNSUPPORTED: RTC_NOTREACHED(); } } @@ -2582,56 +2675,62 @@ TEST_P(RTCStatsCollectorTestWithParamKind, // The report block's timestamp cannot be from the future, set the fake clock // to match. - fake_clock_.SetTime(Timestamp::us(kReportBlockTimestampUtcUs)); - - RTCPReportBlock report_block; - // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the - // |source_ssrc|, "SSRC of the RTP packet sender". - report_block.source_ssrc = 12; - report_block.packets_lost = 7; - ReportBlockData report_block_data; - report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs); - report_block_data.AddRoundTripTimeSample(1234); - // Only the last sample should be exposed as the - // |RTCRemoteInboundRtpStreamStats::round_trip_time|. - report_block_data.AddRoundTripTimeSample(kRoundTripTimeMs); - - AddSenderInfoAndMediaChannel("TransportName", report_block_data, + fake_clock_.SetTime(Timestamp::Micros(kReportBlockTimestampUtcUs)); + auto ssrcs = {12, 13}; + std::vector report_block_datas; + for (auto ssrc : ssrcs) { + RTCPReportBlock report_block; + // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the + // |source_ssrc|, "SSRC of the RTP packet sender". + report_block.source_ssrc = ssrc; + report_block.packets_lost = 7; + ReportBlockData report_block_data; + report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs); + report_block_data.AddRoundTripTimeSample(1234); + // Only the last sample should be exposed as the + // |RTCRemoteInboundRtpStreamStats::round_trip_time|. + report_block_data.AddRoundTripTimeSample(kRoundTripTimeMs); + report_block_datas.push_back(report_block_data); + } + AddSenderInfoAndMediaChannel("TransportName", report_block_datas, absl::nullopt); rtc::scoped_refptr report = stats_->GetStatsReport(); - - RTCRemoteInboundRtpStreamStats expected_remote_inbound_rtp( - "RTCRemoteInboundRtp" + MediaTypeUpperCase() + "Stream_12", - kReportBlockTimestampUtcUs); - expected_remote_inbound_rtp.ssrc = 12; - expected_remote_inbound_rtp.kind = MediaTypeLowerCase(); - expected_remote_inbound_rtp.transport_id = - "RTCTransport_TransportName_1"; // 1 for RTP (we have no RTCP transport) - expected_remote_inbound_rtp.packets_lost = 7; - expected_remote_inbound_rtp.local_id = - "RTCOutboundRTP" + MediaTypeUpperCase() + "Stream_12"; - expected_remote_inbound_rtp.round_trip_time = kRoundTripTimeSeconds; - // This test does not set up RTCCodecStats, so |codec_id| and |jitter| are - // expected to be missing. These are tested separately. - - ASSERT_TRUE(report->Get(expected_remote_inbound_rtp.id())); - EXPECT_EQ(report->Get(expected_remote_inbound_rtp.id()) - ->cast_to(), - expected_remote_inbound_rtp); - EXPECT_TRUE(report->Get(*expected_remote_inbound_rtp.transport_id)); - ASSERT_TRUE(report->Get(*expected_remote_inbound_rtp.local_id)); - // Lookup works in both directions. - EXPECT_EQ(*report->Get(*expected_remote_inbound_rtp.local_id) - ->cast_to() - .remote_id, - expected_remote_inbound_rtp.id()); + for (auto ssrc : ssrcs) { + std::string stream_id = "Stream_" + std::to_string(ssrc); + RTCRemoteInboundRtpStreamStats expected_remote_inbound_rtp( + "RTCRemoteInboundRtp" + MediaTypeUpperCase() + stream_id, + kReportBlockTimestampUtcUs); + expected_remote_inbound_rtp.ssrc = ssrc; + expected_remote_inbound_rtp.kind = MediaTypeLowerCase(); + expected_remote_inbound_rtp.transport_id = + "RTCTransport_TransportName_1"; // 1 for RTP (we have no RTCP + // transport) + expected_remote_inbound_rtp.packets_lost = 7; + expected_remote_inbound_rtp.local_id = + "RTCOutboundRTP" + MediaTypeUpperCase() + stream_id; + expected_remote_inbound_rtp.round_trip_time = kRoundTripTimeSeconds; + // This test does not set up RTCCodecStats, so |codec_id| and |jitter| are + // expected to be missing. These are tested separately. + + ASSERT_TRUE(report->Get(expected_remote_inbound_rtp.id())); + EXPECT_EQ(report->Get(expected_remote_inbound_rtp.id()) + ->cast_to(), + expected_remote_inbound_rtp); + EXPECT_TRUE(report->Get(*expected_remote_inbound_rtp.transport_id)); + ASSERT_TRUE(report->Get(*expected_remote_inbound_rtp.local_id)); + // Lookup works in both directions. + EXPECT_EQ(*report->Get(*expected_remote_inbound_rtp.local_id) + ->cast_to() + .remote_id, + expected_remote_inbound_rtp.id()); + } } TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithTimestampFromReportBlock) { const int64_t kReportBlockTimestampUtcUs = 123456789; - fake_clock_.SetTime(Timestamp::us(kReportBlockTimestampUtcUs)); + fake_clock_.SetTime(Timestamp::Micros(kReportBlockTimestampUtcUs)); RTCPReportBlock report_block; // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the @@ -2640,11 +2739,11 @@ TEST_P(RTCStatsCollectorTestWithParamKind, ReportBlockData report_block_data; report_block_data.SetReportBlock(report_block, kReportBlockTimestampUtcUs); - AddSenderInfoAndMediaChannel("TransportName", report_block_data, + AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, absl::nullopt); // Advance time, it should be OK to have fresher reports than report blocks. - fake_clock_.AdvanceTime(TimeDelta::us(1234)); + fake_clock_.AdvanceTime(TimeDelta::Micros(1234)); rtc::scoped_refptr report = stats_->GetStatsReport(); @@ -2663,7 +2762,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind, TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithCodecBasedMembers) { const int64_t kReportBlockTimestampUtcUs = 123456789; - fake_clock_.SetTime(Timestamp::us(kReportBlockTimestampUtcUs)); + fake_clock_.SetTime(Timestamp::Micros(kReportBlockTimestampUtcUs)); RTCPReportBlock report_block; // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the @@ -2678,7 +2777,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind, codec.kind = media_type_; codec.clock_rate = 1000; - AddSenderInfoAndMediaChannel("TransportName", report_block_data, codec); + AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, codec); rtc::scoped_refptr report = stats_->GetStatsReport(); @@ -2700,7 +2799,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind, TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithRtcpTransport) { const int64_t kReportBlockTimestampUtcUs = 123456789; - fake_clock_.SetTime(Timestamp::us(kReportBlockTimestampUtcUs)); + fake_clock_.SetTime(Timestamp::Micros(kReportBlockTimestampUtcUs)); RTCPReportBlock report_block; // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the @@ -2718,7 +2817,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind, rtcp_transport_channel_stats.dtls_state = cricket::DTLS_TRANSPORT_NEW; pc_->SetTransportStats("TransportName", {rtp_transport_channel_stats, rtcp_transport_channel_stats}); - AddSenderInfoAndMediaChannel("TransportName", report_block_data, + AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, absl::nullopt); rtc::scoped_refptr report = stats_->GetStatsReport(); @@ -2935,7 +3034,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector, void OnStatsDelivered( const rtc::scoped_refptr& report) override { EXPECT_TRUE(signaling_thread_->IsCurrent()); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); delivered_report_ = report; } @@ -2946,7 +3045,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector, bool HasVerifiedResults() { EXPECT_TRUE(signaling_thread_->IsCurrent()); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (!delivered_report_) return false; EXPECT_EQ(produced_on_signaling_thread_, 1); @@ -2973,7 +3072,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector, RTCStatsReport* partial_report) override { EXPECT_TRUE(signaling_thread_->IsCurrent()); { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); EXPECT_FALSE(delivered_report_); ++produced_on_signaling_thread_; } @@ -2989,7 +3088,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector, RTCStatsReport* partial_report) override { EXPECT_TRUE(network_thread_->IsCurrent()); { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); EXPECT_FALSE(delivered_report_); ++produced_on_network_thread_; } @@ -3003,7 +3102,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector, rtc::Thread* const worker_thread_; rtc::Thread* const network_thread_; - rtc::CriticalSection lock_; + Mutex lock_; rtc::scoped_refptr delivered_report_; int produced_on_signaling_thread_ = 0; int produced_on_network_thread_ = 0; diff --git a/pc/rtc_stats_integrationtest.cc b/pc/rtc_stats_integrationtest.cc index e3f522bd47..e627d45e2e 100644 --- a/pc/rtc_stats_integrationtest.cc +++ b/pc/rtc_stats_integrationtest.cc @@ -445,8 +445,14 @@ class RTCStatsReportVerifier { verifier.TestMemberIsDefined(codec.payload_type); verifier.TestMemberIsDefined(codec.mime_type); verifier.TestMemberIsPositive(codec.clock_rate); - verifier.TestMemberIsUndefined(codec.channels); - verifier.TestMemberIsUndefined(codec.sdp_fmtp_line); + + if (codec.mime_type->rfind("audio", 0) == 0) + verifier.TestMemberIsPositive(codec.channels); + else + verifier.TestMemberIsUndefined(codec.channels); + + // sdp_fmtp_line is an optional field. + verifier.MarkMemberTested(codec.sdp_fmtp_line, true); return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -454,7 +460,7 @@ class RTCStatsReportVerifier { RTCStatsVerifier verifier(report_, &data_channel); verifier.TestMemberIsDefined(data_channel.label); verifier.TestMemberIsDefined(data_channel.protocol); - verifier.TestMemberIsDefined(data_channel.datachannelid); + verifier.TestMemberIsDefined(data_channel.data_channel_identifier); verifier.TestMemberIsDefined(data_channel.state); verifier.TestMemberIsNonNegative(data_channel.messages_sent); verifier.TestMemberIsNonNegative(data_channel.bytes_sent); @@ -644,6 +650,8 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(media_stream_track.interruption_count); verifier.TestMemberIsUndefined( media_stream_track.total_interruption_duration); + verifier.TestMemberIsUndefined( + media_stream_track.jitter_buffer_target_delay); } else { RTC_DCHECK_EQ(*media_stream_track.kind, RTCMediaStreamTrackKind::kAudio); // The type of the referenced media source depends on kind. @@ -654,6 +662,8 @@ class RTCStatsReportVerifier { media_stream_track.jitter_buffer_delay); verifier.TestMemberIsNonNegative( media_stream_track.jitter_buffer_emitted_count); + verifier.TestMemberIsNonNegative( + media_stream_track.jitter_buffer_target_delay); verifier.TestMemberIsPositive(media_stream_track.audio_level); verifier.TestMemberIsPositive( media_stream_track.total_audio_energy); @@ -688,6 +698,8 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_delay); verifier.TestMemberIsUndefined( media_stream_track.jitter_buffer_emitted_count); + verifier.TestMemberIsUndefined( + media_stream_track.jitter_buffer_target_delay); verifier.TestMemberIsUndefined(media_stream_track.audio_level); verifier.TestMemberIsUndefined(media_stream_track.total_audio_energy); verifier.TestMemberIsUndefined( @@ -806,13 +818,62 @@ class RTCStatsReportVerifier { // this test. See RFC 3550. verifier.TestMemberIsNonNegative(inbound_stream.packets_lost); verifier.TestMemberIsDefined(inbound_stream.last_packet_received_timestamp); + if (inbound_stream.frames_received.ValueOrDefault(0) > 0) { + verifier.TestMemberIsNonNegative(inbound_stream.frame_width); + verifier.TestMemberIsNonNegative(inbound_stream.frame_height); + } else { + verifier.TestMemberIsUndefined(inbound_stream.frame_width); + verifier.TestMemberIsUndefined(inbound_stream.frame_height); + } + if (inbound_stream.frames_per_second.is_defined()) { + verifier.TestMemberIsNonNegative( + inbound_stream.frames_per_second); + } else { + verifier.TestMemberIsUndefined(inbound_stream.frames_per_second); + } + verifier.TestMemberIsUndefined(inbound_stream.frame_bit_depth); if (inbound_stream.media_type.is_defined() && *inbound_stream.media_type == "video") { verifier.TestMemberIsUndefined(inbound_stream.jitter); + verifier.TestMemberIsUndefined(inbound_stream.jitter_buffer_delay); + verifier.TestMemberIsUndefined( + inbound_stream.jitter_buffer_emitted_count); + verifier.TestMemberIsUndefined(inbound_stream.total_samples_received); + verifier.TestMemberIsUndefined(inbound_stream.concealed_samples); + verifier.TestMemberIsUndefined(inbound_stream.silent_concealed_samples); + verifier.TestMemberIsUndefined(inbound_stream.concealment_events); + verifier.TestMemberIsUndefined( + inbound_stream.inserted_samples_for_deceleration); + verifier.TestMemberIsUndefined( + inbound_stream.removed_samples_for_acceleration); + verifier.TestMemberIsUndefined(inbound_stream.audio_level); + verifier.TestMemberIsUndefined(inbound_stream.total_audio_energy); + verifier.TestMemberIsUndefined(inbound_stream.total_samples_duration); + verifier.TestMemberIsNonNegative(inbound_stream.frames_received); } else { verifier.TestMemberIsNonNegative(inbound_stream.jitter); + verifier.TestMemberIsNonNegative( + inbound_stream.jitter_buffer_delay); + verifier.TestMemberIsNonNegative( + inbound_stream.jitter_buffer_emitted_count); + verifier.TestMemberIsPositive( + inbound_stream.total_samples_received); + verifier.TestMemberIsNonNegative( + inbound_stream.concealed_samples); + verifier.TestMemberIsNonNegative( + inbound_stream.silent_concealed_samples); + verifier.TestMemberIsNonNegative( + inbound_stream.concealment_events); + verifier.TestMemberIsNonNegative( + inbound_stream.inserted_samples_for_deceleration); + verifier.TestMemberIsNonNegative( + inbound_stream.removed_samples_for_acceleration); + verifier.TestMemberIsPositive(inbound_stream.audio_level); + verifier.TestMemberIsPositive(inbound_stream.total_audio_energy); + verifier.TestMemberIsPositive( + inbound_stream.total_samples_duration); + verifier.TestMemberIsUndefined(inbound_stream.frames_received); } - verifier.TestMemberIsUndefined(inbound_stream.round_trip_time); verifier.TestMemberIsUndefined(inbound_stream.packets_discarded); verifier.TestMemberIsUndefined(inbound_stream.packets_repaired); @@ -831,6 +892,7 @@ class RTCStatsReportVerifier { *inbound_stream.media_type == "video") { verifier.TestMemberIsDefined(inbound_stream.frames_decoded); verifier.TestMemberIsDefined(inbound_stream.key_frames_decoded); + verifier.TestMemberIsNonNegative(inbound_stream.frames_dropped); verifier.TestMemberIsNonNegative( inbound_stream.total_decode_time); verifier.TestMemberIsNonNegative( @@ -843,6 +905,7 @@ class RTCStatsReportVerifier { } else { verifier.TestMemberIsUndefined(inbound_stream.frames_decoded); verifier.TestMemberIsUndefined(inbound_stream.key_frames_decoded); + verifier.TestMemberIsUndefined(inbound_stream.frames_dropped); verifier.TestMemberIsUndefined(inbound_stream.total_decode_time); verifier.TestMemberIsUndefined(inbound_stream.total_inter_frame_delay); verifier.TestMemberIsUndefined( @@ -860,7 +923,11 @@ class RTCStatsReportVerifier { *outbound_stream.media_type == "video") { verifier.TestMemberIsIDReference(outbound_stream.media_source_id, RTCVideoSourceStats::kType); - verifier.TestMemberIsNonNegative(outbound_stream.qp_sum); + if (*outbound_stream.frames_encoded > 0) { + verifier.TestMemberIsNonNegative(outbound_stream.qp_sum); + } else { + verifier.TestMemberIsUndefined(outbound_stream.qp_sum); + } } else { verifier.TestMemberIsIDReference(outbound_stream.media_source_id, RTCAudioSourceStats::kType); @@ -894,6 +961,23 @@ class RTCStatsReportVerifier { // this to be present. verifier.MarkMemberTested(outbound_stream.content_type, true); verifier.TestMemberIsDefined(outbound_stream.encoder_implementation); + // Unless an implementation-specific amount of time has passed and at + // least one frame has been encoded, undefined is reported. Because it + // is hard to tell what is the case here, we treat FPS as optional. + // TODO(hbos): Update the tests to run until all implemented metrics + // should be populated. + if (outbound_stream.frames_per_second.is_defined()) { + verifier.TestMemberIsNonNegative( + outbound_stream.frames_per_second); + } else { + verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); + } + verifier.TestMemberIsNonNegative(outbound_stream.frame_height); + verifier.TestMemberIsNonNegative(outbound_stream.frame_width); + verifier.TestMemberIsNonNegative(outbound_stream.frames_sent); + verifier.TestMemberIsNonNegative( + outbound_stream.huge_frames_sent); + verifier.MarkMemberTested(outbound_stream.rid, true); } else { verifier.TestMemberIsUndefined(outbound_stream.frames_encoded); verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded); @@ -908,6 +992,12 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(outbound_stream.content_type); // TODO(hbos): Implement for audio as well. verifier.TestMemberIsUndefined(outbound_stream.encoder_implementation); + verifier.TestMemberIsUndefined(outbound_stream.rid); + verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); + verifier.TestMemberIsUndefined(outbound_stream.frame_height); + verifier.TestMemberIsUndefined(outbound_stream.frame_width); + verifier.TestMemberIsUndefined(outbound_stream.frames_sent); + verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent); } return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -975,7 +1065,9 @@ class RTCStatsReportVerifier { bool VerifyRTCTransportStats(const RTCTransportStats& transport) { RTCStatsVerifier verifier(report_, &transport); verifier.TestMemberIsNonNegative(transport.bytes_sent); + verifier.TestMemberIsNonNegative(transport.packets_sent); verifier.TestMemberIsNonNegative(transport.bytes_received); + verifier.TestMemberIsNonNegative(transport.packets_received); verifier.TestMemberIsOptionalIDReference(transport.rtcp_transport_stats_id, RTCTransportStats::kType); verifier.TestMemberIsDefined(transport.dtls_state); @@ -1004,7 +1096,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) { rtc::scoped_refptr report = GetStatsFromCaller(); RTCStatsReportVerifier(report.get()).VerifyReport({}); - #if RTC_TRACE_EVENTS_ENABLED +#if RTC_TRACE_EVENTS_ENABLED EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace()); #endif } @@ -1015,7 +1107,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) { rtc::scoped_refptr report = GetStatsFromCallee(); RTCStatsReportVerifier(report.get()).VerifyReport({}); - #if RTC_TRACE_EVENTS_ENABLED +#if RTC_TRACE_EVENTS_ENABLED EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace()); #endif } diff --git a/pc/rtp_data_channel.cc b/pc/rtp_data_channel.cc new file mode 100644 index 0000000000..b08b2b2ffb --- /dev/null +++ b/pc/rtp_data_channel.cc @@ -0,0 +1,394 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/rtp_data_channel.h" + +#include +#include +#include + +#include "api/proxy.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +namespace { + +static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024; + +static std::atomic g_unique_id{0}; + +int GenerateUniqueId() { + return ++g_unique_id; +} + +// Define proxy for DataChannelInterface. +BEGIN_SIGNALING_PROXY_MAP(DataChannel) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*) +PROXY_METHOD0(void, UnregisterObserver) +BYPASS_PROXY_CONSTMETHOD0(std::string, label) +BYPASS_PROXY_CONSTMETHOD0(bool, reliable) +BYPASS_PROXY_CONSTMETHOD0(bool, ordered) +BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime) +BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits) +BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxRetransmitsOpt) +BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxPacketLifeTime) +BYPASS_PROXY_CONSTMETHOD0(std::string, protocol) +BYPASS_PROXY_CONSTMETHOD0(bool, negotiated) +// Can't bypass the proxy since the id may change. +PROXY_CONSTMETHOD0(int, id) +BYPASS_PROXY_CONSTMETHOD0(Priority, priority) +PROXY_CONSTMETHOD0(DataState, state) +PROXY_CONSTMETHOD0(RTCError, error) +PROXY_CONSTMETHOD0(uint32_t, messages_sent) +PROXY_CONSTMETHOD0(uint64_t, bytes_sent) +PROXY_CONSTMETHOD0(uint32_t, messages_received) +PROXY_CONSTMETHOD0(uint64_t, bytes_received) +PROXY_CONSTMETHOD0(uint64_t, buffered_amount) +PROXY_METHOD0(void, Close) +// TODO(bugs.webrtc.org/11547): Change to run on the network thread. +PROXY_METHOD1(bool, Send, const DataBuffer&) +END_PROXY_MAP() + +} // namespace + +rtc::scoped_refptr RtpDataChannel::Create( + RtpDataChannelProviderInterface* provider, + const std::string& label, + const DataChannelInit& config, + rtc::Thread* signaling_thread) { + rtc::scoped_refptr channel( + new rtc::RefCountedObject(config, provider, label, + signaling_thread)); + if (!channel->Init()) { + return nullptr; + } + return channel; +} + +// static +rtc::scoped_refptr RtpDataChannel::CreateProxy( + rtc::scoped_refptr channel) { + return DataChannelProxy::Create(channel->signaling_thread_, channel.get()); +} + +RtpDataChannel::RtpDataChannel(const DataChannelInit& config, + RtpDataChannelProviderInterface* provider, + const std::string& label, + rtc::Thread* signaling_thread) + : signaling_thread_(signaling_thread), + internal_id_(GenerateUniqueId()), + label_(label), + config_(config), + provider_(provider) { + RTC_DCHECK_RUN_ON(signaling_thread_); +} + +bool RtpDataChannel::Init() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (config_.reliable || config_.id != -1 || config_.maxRetransmits || + config_.maxRetransmitTime) { + RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to " + "invalid DataChannelInit."; + return false; + } + + return true; +} + +RtpDataChannel::~RtpDataChannel() { + RTC_DCHECK_RUN_ON(signaling_thread_); +} + +void RtpDataChannel::RegisterObserver(DataChannelObserver* observer) { + RTC_DCHECK_RUN_ON(signaling_thread_); + observer_ = observer; + DeliverQueuedReceivedData(); +} + +void RtpDataChannel::UnregisterObserver() { + RTC_DCHECK_RUN_ON(signaling_thread_); + observer_ = nullptr; +} + +void RtpDataChannel::Close() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (state_ == kClosed) + return; + send_ssrc_ = 0; + send_ssrc_set_ = false; + SetState(kClosing); + UpdateState(); +} + +RtpDataChannel::DataState RtpDataChannel::state() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return state_; +} + +RTCError RtpDataChannel::error() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return error_; +} + +uint32_t RtpDataChannel::messages_sent() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return messages_sent_; +} + +uint64_t RtpDataChannel::bytes_sent() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return bytes_sent_; +} + +uint32_t RtpDataChannel::messages_received() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return messages_received_; +} + +uint64_t RtpDataChannel::bytes_received() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return bytes_received_; +} + +bool RtpDataChannel::Send(const DataBuffer& buffer) { + RTC_DCHECK_RUN_ON(signaling_thread_); + + if (state_ != kOpen) { + return false; + } + + // TODO(jiayl): the spec is unclear about if the remote side should get the + // onmessage event. We need to figure out the expected behavior and change the + // code accordingly. + if (buffer.size() == 0) { + return true; + } + + return SendDataMessage(buffer); +} + +void RtpDataChannel::SetReceiveSsrc(uint32_t receive_ssrc) { + RTC_DCHECK_RUN_ON(signaling_thread_); + + if (receive_ssrc_set_) { + return; + } + receive_ssrc_ = receive_ssrc; + receive_ssrc_set_ = true; + UpdateState(); +} + +void RtpDataChannel::OnTransportChannelClosed() { + RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA, + "Transport channel closed"); + CloseAbruptlyWithError(std::move(error)); +} + +DataChannelStats RtpDataChannel::GetStats() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + DataChannelStats stats{internal_id_, id(), label(), + protocol(), state(), messages_sent(), + messages_received(), bytes_sent(), bytes_received()}; + return stats; +} + +// The remote peer request that this channel shall be closed. +void RtpDataChannel::RemotePeerRequestClose() { + // Close with error code explicitly set to OK. + CloseAbruptlyWithError(RTCError()); +} + +void RtpDataChannel::SetSendSsrc(uint32_t send_ssrc) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (send_ssrc_set_) { + return; + } + send_ssrc_ = send_ssrc; + send_ssrc_set_ = true; + UpdateState(); +} + +void RtpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, + const rtc::CopyOnWriteBuffer& payload) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (params.ssrc != receive_ssrc_) { + return; + } + + RTC_DCHECK(params.type == cricket::DMT_BINARY || + params.type == cricket::DMT_TEXT); + + RTC_LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " + << params.sid; + + bool binary = (params.type == cricket::DMT_BINARY); + auto buffer = std::make_unique(payload, binary); + if (state_ == kOpen && observer_) { + ++messages_received_; + bytes_received_ += buffer->size(); + observer_->OnMessage(*buffer.get()); + } else { + if (queued_received_data_.byte_count() + payload.size() > + kMaxQueuedReceivedDataBytes) { + RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size."; + + queued_received_data_.Clear(); + CloseAbruptlyWithError( + RTCError(RTCErrorType::RESOURCE_EXHAUSTED, + "Queued received data exceeds the max buffer size.")); + + return; + } + queued_received_data_.PushBack(std::move(buffer)); + } +} + +void RtpDataChannel::OnChannelReady(bool writable) { + RTC_DCHECK_RUN_ON(signaling_thread_); + + writable_ = writable; + if (!writable) { + return; + } + + UpdateState(); +} + +void RtpDataChannel::CloseAbruptlyWithError(RTCError error) { + RTC_DCHECK_RUN_ON(signaling_thread_); + + if (state_ == kClosed) { + return; + } + + if (connected_to_provider_) { + DisconnectFromProvider(); + } + + // Still go to "kClosing" before "kClosed", since observers may be expecting + // that. + SetState(kClosing); + error_ = std::move(error); + SetState(kClosed); +} + +void RtpDataChannel::UpdateState() { + RTC_DCHECK_RUN_ON(signaling_thread_); + // UpdateState determines what to do from a few state variables. Include + // all conditions required for each state transition here for + // clarity. + switch (state_) { + case kConnecting: { + if (send_ssrc_set_ == receive_ssrc_set_) { + if (!connected_to_provider_) { + connected_to_provider_ = provider_->ConnectDataChannel(this); + } + if (connected_to_provider_ && writable_) { + SetState(kOpen); + // If we have received buffers before the channel got writable. + // Deliver them now. + DeliverQueuedReceivedData(); + } + } + break; + } + case kOpen: { + break; + } + case kClosing: { + // For RTP data channels, we can go to "closed" after we finish + // sending data and the send/recv SSRCs are unset. + if (connected_to_provider_) { + DisconnectFromProvider(); + } + if (!send_ssrc_set_ && !receive_ssrc_set_) { + SetState(kClosed); + } + break; + } + case kClosed: + break; + } +} + +void RtpDataChannel::SetState(DataState state) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (state_ == state) { + return; + } + + state_ = state; + if (observer_) { + observer_->OnStateChange(); + } + if (state_ == kOpen) { + SignalOpened(this); + } else if (state_ == kClosed) { + SignalClosed(this); + } +} + +void RtpDataChannel::DisconnectFromProvider() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (!connected_to_provider_) + return; + + provider_->DisconnectDataChannel(this); + connected_to_provider_ = false; +} + +void RtpDataChannel::DeliverQueuedReceivedData() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (!observer_) { + return; + } + + while (!queued_received_data_.Empty()) { + std::unique_ptr buffer = queued_received_data_.PopFront(); + ++messages_received_; + bytes_received_ += buffer->size(); + observer_->OnMessage(*buffer); + } +} + +bool RtpDataChannel::SendDataMessage(const DataBuffer& buffer) { + RTC_DCHECK_RUN_ON(signaling_thread_); + cricket::SendDataParams send_params; + + send_params.ssrc = send_ssrc_; + send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT; + + cricket::SendDataResult send_result = cricket::SDR_SUCCESS; + bool success = provider_->SendData(send_params, buffer.data, &send_result); + + if (success) { + ++messages_sent_; + bytes_sent_ += buffer.size(); + if (observer_ && buffer.size() > 0) { + observer_->OnBufferedAmountChange(buffer.size()); + } + return true; + } + + return false; +} + +// static +void RtpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) { + g_unique_id = new_value; +} + +} // namespace webrtc diff --git a/pc/rtp_data_channel.h b/pc/rtp_data_channel.h new file mode 100644 index 0000000000..ea2de49b5a --- /dev/null +++ b/pc/rtp_data_channel.h @@ -0,0 +1,198 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_RTP_DATA_CHANNEL_H_ +#define PC_RTP_DATA_CHANNEL_H_ + +#include +#include + +#include "api/data_channel_interface.h" +#include "api/priority.h" +#include "api/scoped_refptr.h" +#include "api/transport/data_channel_transport_interface.h" +#include "media/base/media_channel.h" +#include "pc/channel.h" +#include "pc/data_channel_utils.h" +#include "rtc_base/async_invoker.h" +#include "rtc_base/third_party/sigslot/sigslot.h" + +namespace webrtc { + +class RtpDataChannel; + +// TODO(deadbeef): Once RTP data channels go away, get rid of this and have +// DataChannel depend on SctpTransportInternal (pure virtual SctpTransport +// interface) instead. +class RtpDataChannelProviderInterface { + public: + // Sends the data to the transport. + virtual bool SendData(const cricket::SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + cricket::SendDataResult* result) = 0; + // Connects to the transport signals. + virtual bool ConnectDataChannel(RtpDataChannel* data_channel) = 0; + // Disconnects from the transport signals. + virtual void DisconnectDataChannel(RtpDataChannel* data_channel) = 0; + // Returns true if the transport channel is ready to send data. + virtual bool ReadyToSendData() const = 0; + + protected: + virtual ~RtpDataChannelProviderInterface() {} +}; + +// RtpDataChannel is an implementation of the DataChannelInterface based on +// libjingle's data engine. It provides an implementation of unreliable data +// channels. + +// DataChannel states: +// kConnecting: The channel has been created the transport might not yet be +// ready. +// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc +// and a remote SSRC set by call to UpdateReceiveSsrc and the transport +// has been writable once. +// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc +// has been called with SSRC==0 +// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with +// SSRC==0. +class RtpDataChannel : public DataChannelInterface, + public sigslot::has_slots<> { + public: + static rtc::scoped_refptr Create( + RtpDataChannelProviderInterface* provider, + const std::string& label, + const DataChannelInit& config, + rtc::Thread* signaling_thread); + + // Instantiates an API proxy for a DataChannel instance that will be handed + // out to external callers. + static rtc::scoped_refptr CreateProxy( + rtc::scoped_refptr channel); + + void RegisterObserver(DataChannelObserver* observer) override; + void UnregisterObserver() override; + + std::string label() const override { return label_; } + bool reliable() const override { return false; } + bool ordered() const override { return config_.ordered; } + // Backwards compatible accessors + uint16_t maxRetransmitTime() const override { + return config_.maxRetransmitTime ? *config_.maxRetransmitTime + : static_cast(-1); + } + uint16_t maxRetransmits() const override { + return config_.maxRetransmits ? *config_.maxRetransmits + : static_cast(-1); + } + absl::optional maxPacketLifeTime() const override { + return config_.maxRetransmitTime; + } + absl::optional maxRetransmitsOpt() const override { + return config_.maxRetransmits; + } + std::string protocol() const override { return config_.protocol; } + bool negotiated() const override { return config_.negotiated; } + int id() const override { return config_.id; } + Priority priority() const override { + return config_.priority ? *config_.priority : Priority::kLow; + } + + virtual int internal_id() const { return internal_id_; } + + uint64_t buffered_amount() const override { return 0; } + void Close() override; + DataState state() const override; + RTCError error() const override; + uint32_t messages_sent() const override; + uint64_t bytes_sent() const override; + uint32_t messages_received() const override; + uint64_t bytes_received() const override; + bool Send(const DataBuffer& buffer) override; + + // Close immediately, ignoring any queued data or closing procedure. + // This is called when SDP indicates a channel should be removed. + void CloseAbruptlyWithError(RTCError error); + + // Called when the channel's ready to use. That can happen when the + // underlying DataMediaChannel becomes ready, or when this channel is a new + // stream on an existing DataMediaChannel, and we've finished negotiation. + void OnChannelReady(bool writable); + + // Slots for provider to connect signals to. + void OnDataReceived(const cricket::ReceiveDataParams& params, + const rtc::CopyOnWriteBuffer& payload); + + // Called when the transport channel is unusable. + // This method makes sure the DataChannel is disconnected and changes state + // to kClosed. + void OnTransportChannelClosed(); + + DataChannelStats GetStats() const; + + // The remote peer requested that this channel should be closed. + void RemotePeerRequestClose(); + // Set the SSRC this channel should use to send data on the + // underlying data engine. |send_ssrc| == 0 means that the channel is no + // longer part of the session negotiation. + void SetSendSsrc(uint32_t send_ssrc); + // Set the SSRC this channel should use to receive data from the + // underlying data engine. + void SetReceiveSsrc(uint32_t receive_ssrc); + + // Emitted when state transitions to kOpen. + sigslot::signal1 SignalOpened; + // Emitted when state transitions to kClosed. + sigslot::signal1 SignalClosed; + + // Reset the allocator for internal ID values for testing, so that + // the internal IDs generated are predictable. Test only. + static void ResetInternalIdAllocatorForTesting(int new_value); + + protected: + RtpDataChannel(const DataChannelInit& config, + RtpDataChannelProviderInterface* client, + const std::string& label, + rtc::Thread* signaling_thread); + ~RtpDataChannel() override; + + private: + bool Init(); + void UpdateState(); + void SetState(DataState state); + void DisconnectFromProvider(); + + void DeliverQueuedReceivedData(); + + bool SendDataMessage(const DataBuffer& buffer); + + rtc::Thread* const signaling_thread_; + const int internal_id_; + const std::string label_; + const DataChannelInit config_; + DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_) = nullptr; + DataState state_ RTC_GUARDED_BY(signaling_thread_) = kConnecting; + RTCError error_ RTC_GUARDED_BY(signaling_thread_); + uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0; + RtpDataChannelProviderInterface* const provider_; + bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false; + bool send_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false; + bool receive_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false; + bool writable_ RTC_GUARDED_BY(signaling_thread_) = false; + uint32_t send_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint32_t receive_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0; + PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_); +}; + +} // namespace webrtc + +#endif // PC_RTP_DATA_CHANNEL_H_ diff --git a/pc/rtp_media_utils.cc b/pc/rtp_media_utils.cc index 6e8be589c7..c5d642b685 100644 --- a/pc/rtp_media_utils.cc +++ b/pc/rtp_media_utils.cc @@ -42,14 +42,16 @@ RtpTransceiverDirection RtpTransceiverDirectionReversed( switch (direction) { case RtpTransceiverDirection::kSendRecv: case RtpTransceiverDirection::kInactive: + case RtpTransceiverDirection::kStopped: return direction; case RtpTransceiverDirection::kSendOnly: return RtpTransceiverDirection::kRecvOnly; case RtpTransceiverDirection::kRecvOnly: return RtpTransceiverDirection::kSendOnly; + default: + RTC_NOTREACHED(); + return direction; } - RTC_NOTREACHED(); - return direction; } RtpTransceiverDirection RtpTransceiverDirectionWithSendSet( @@ -76,6 +78,8 @@ const char* RtpTransceiverDirectionToString(RtpTransceiverDirection direction) { return "kRecvOnly"; case RtpTransceiverDirection::kInactive: return "kInactive"; + case RtpTransceiverDirection::kStopped: + return "kStopped"; } RTC_NOTREACHED(); return ""; diff --git a/pc/rtp_parameters_conversion.cc b/pc/rtp_parameters_conversion.cc index 93f28f1815..68a948ea8e 100644 --- a/pc/rtp_parameters_conversion.cc +++ b/pc/rtp_parameters_conversion.cc @@ -76,8 +76,7 @@ RTCErrorOr ToCricketFeedbackParam( } return cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc); } - // Not reached; avoids compile warning. - FATAL(); + RTC_CHECK_NOTREACHED(); } template @@ -164,7 +163,7 @@ RTCErrorOr ToCricketCodec(const RtpCodecParameters& codec) { } cricket_codec.AddFeedbackParam(result.MoveValue()); } - cricket_codec.params.insert(codec.parameters.begin(), codec.parameters.end()); + cricket_codec.params = codec.parameters; return std::move(cricket_codec); } @@ -366,8 +365,7 @@ RtpCodecParameters ToRtpCodecParameters(const C& cricket_codec) { } } ToRtpCodecParametersTypeSpecific(cricket_codec, &codec_param); - codec_param.parameters.insert(cricket_codec.params.begin(), - cricket_codec.params.end()); + codec_param.parameters = cricket_codec.params; return codec_param; } diff --git a/pc/rtp_receiver.h b/pc/rtp_receiver.h index 84c2ff723b..2cfccd4e63 100644 --- a/pc/rtp_receiver.h +++ b/pc/rtp_receiver.h @@ -41,7 +41,11 @@ namespace webrtc { // Internal class used by PeerConnection. class RtpReceiverInternal : public RtpReceiverInterface { public: + // Stops receiving. The track may be reactivated. virtual void Stop() = 0; + // Stops the receiver permanently. + // Causes the associated track to enter kEnded state. Cannot be reversed. + virtual void StopAndEndTrack() = 0; // Sets the underlying MediaEngine channel associated with this RtpSender. // A VoiceMediaChannel should be used for audio RtpSenders and diff --git a/pc/rtp_sender.cc b/pc/rtp_sender.cc index 73cfcd045a..0da6dfca80 100644 --- a/pc/rtp_sender.cc +++ b/pc/rtp_sender.cc @@ -17,8 +17,7 @@ #include "api/audio_options.h" #include "api/media_stream_interface.h" #include "media/base/media_engine.h" -#include "pc/peer_connection.h" -#include "pc/stats_collector.h" +#include "pc/stats_collector_interface.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/location.h" @@ -50,7 +49,7 @@ int GenerateUniqueId() { bool PerSenderRtpEncodingParameterHasValue( const RtpEncodingParameters& encoding_params) { if (encoding_params.bitrate_priority != kDefaultBitratePriority || - encoding_params.network_priority != kDefaultBitratePriority) { + encoding_params.network_priority != Priority::kLow) { return true; } return false; @@ -184,6 +183,15 @@ RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); + if (is_transceiver_stopped_) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_STATE, + "Cannot set parameters on sender of a stopped transceiver."); + } + if (stopped_) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, + "Cannot set parameters on a stopped sender."); + } if (stopped_) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "Cannot set parameters on a stopped sender."); @@ -297,6 +305,9 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { if (frame_encryptor_) { SetFrameEncryptor(frame_encryptor_); } + if (frame_transformer_) { + SetEncoderToPacketizerFrameTransformer(frame_transformer_); + } } void RtpSenderBase::Stop() { @@ -364,10 +375,21 @@ RTCError RtpSenderBase::DisableEncodingLayers( return result; } +void RtpSenderBase::SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer) { + frame_transformer_ = std::move(frame_transformer); + if (media_channel_ && ssrc_ && !stopped_) { + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + media_channel_->SetEncoderToPacketizerFrameTransformer( + ssrc_, frame_transformer_); + }); + } +} + LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {} LocalAudioSinkAdapter::~LocalAudioSinkAdapter() { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (sink_) sink_->OnClose(); } @@ -379,7 +401,7 @@ void LocalAudioSinkAdapter::OnData( size_t number_of_channels, size_t number_of_frames, absl::optional absolute_capture_timestamp_ms) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (sink_) { sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, number_of_frames, absolute_capture_timestamp_ms); @@ -387,7 +409,7 @@ void LocalAudioSinkAdapter::OnData( } void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); RTC_DCHECK(!sink || !sink_); sink_ = sink; } @@ -395,7 +417,7 @@ void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) { rtc::scoped_refptr AudioRtpSender::Create( rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) { return rtc::scoped_refptr( new rtc::RefCountedObject(worker_thread, id, stats, @@ -404,7 +426,7 @@ rtc::scoped_refptr AudioRtpSender::Create( AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) : RtpSenderBase(worker_thread, id, set_streams_observer), stats_(stats), @@ -592,6 +614,7 @@ void VideoRtpSender::SetSend() { options.is_screencast = source->is_screencast(); options.video_noise_reduction = source->needs_denoising(); } + options.content_hint = cached_track_content_hint_; switch (cached_track_content_hint_) { case VideoTrackInterface::ContentHint::kNone: break; diff --git a/pc/rtp_sender.h b/pc/rtp_sender.h index fcf8448e4c..c2fe91f01d 100644 --- a/pc/rtp_sender.h +++ b/pc/rtp_sender.h @@ -24,11 +24,11 @@ #include "media/base/audio_source.h" #include "media/base/media_channel.h" #include "pc/dtmf_sender.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { -class StatsCollector; +class StatsCollectorInterface; bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters); @@ -69,6 +69,8 @@ class RtpSenderInternal : public RtpSenderInterface { // If the specified list is empty, this is a no-op. virtual RTCError DisableEncodingLayers( const std::vector& rid) = 0; + + virtual void SetTransceiverAsStopped() = 0; }; // Shared implementation for RtpSenderInternal interface. @@ -149,6 +151,11 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // If the specified list is empty, this is a no-op. RTCError DisableEncodingLayers(const std::vector& rid) override; + void SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer) override; + + void SetTransceiverAsStopped() override { is_transceiver_stopped_ = true; } + protected: // If |set_streams_observer| is not null, it is invoked when SetStreams() // is called. |set_streams_observer| is not owned by this object. If not @@ -177,6 +184,7 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { rtc::Thread* worker_thread_; uint32_t ssrc_ = 0; bool stopped_ = false; + bool is_transceiver_stopped_ = false; int attachment_id_ = 0; const std::string id_; @@ -197,6 +205,8 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { std::vector disabled_rids_; SetStreamsObserver* set_streams_observer_ = nullptr; + + rtc::scoped_refptr frame_transformer_; }; // LocalAudioSinkAdapter receives data callback as a sink to the local @@ -232,7 +242,7 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface, cricket::AudioSource::Sink* sink_; // Critical section protecting |sink_|. - rtc::CriticalSection lock_; + Mutex lock_; }; class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { @@ -247,7 +257,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { static rtc::scoped_refptr Create( rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer); virtual ~AudioRtpSender(); @@ -271,7 +281,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { protected: AudioRtpSender(rtc::Thread* worker_thread, const std::string& id, - StatsCollector* stats, + StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer); void SetSend() override; @@ -293,7 +303,7 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { } sigslot::signal0<> SignalDestroyed; - StatsCollector* stats_ = nullptr; + StatsCollectorInterface* stats_ = nullptr; rtc::scoped_refptr dtmf_sender_proxy_; bool cached_track_enabled_ = false; diff --git a/pc/rtp_sender_receiver_unittest.cc b/pc/rtp_sender_receiver_unittest.cc index 9736f183b5..364e87a89f 100644 --- a/pc/rtp_sender_receiver_unittest.cc +++ b/pc/rtp_sender_receiver_unittest.cc @@ -88,7 +88,7 @@ static const int kDefaultTimeout = 10000; // 10 seconds. class MockSetStreamsObserver : public webrtc::RtpSenderBase::SetStreamsObserver { public: - MOCK_METHOD0(OnSetStreams, void()); + MOCK_METHOD(void, OnSetStreams, (), (override)); }; } // namespace @@ -123,14 +123,13 @@ class RtpSenderReceiverTest voice_channel_ = channel_manager_.CreateVoiceChannel( &fake_call_, cricket::MediaConfig(), rtp_transport_.get(), - MediaTransportConfig(), rtc::Thread::Current(), cricket::CN_AUDIO, - srtp_required, webrtc::CryptoOptions(), &ssrc_generator_, - cricket::AudioOptions()); + rtc::Thread::Current(), cricket::CN_AUDIO, srtp_required, + webrtc::CryptoOptions(), &ssrc_generator_, cricket::AudioOptions()); video_channel_ = channel_manager_.CreateVideoChannel( &fake_call_, cricket::MediaConfig(), rtp_transport_.get(), - MediaTransportConfig(), rtc::Thread::Current(), cricket::CN_VIDEO, - srtp_required, webrtc::CryptoOptions(), &ssrc_generator_, - cricket::VideoOptions(), video_bitrate_allocator_factory_.get()); + rtc::Thread::Current(), cricket::CN_VIDEO, srtp_required, + webrtc::CryptoOptions(), &ssrc_generator_, cricket::VideoOptions(), + video_bitrate_allocator_factory_.get()); voice_channel_->Enable(true); video_channel_->Enable(true); voice_media_channel_ = media_engine_->GetVoiceChannel(0); diff --git a/pc/rtp_transceiver.cc b/pc/rtp_transceiver.cc index fcb54b54c2..6b3032e27f 100644 --- a/pc/rtp_transceiver.cc +++ b/pc/rtp_transceiver.cc @@ -11,8 +11,10 @@ #include "pc/rtp_transceiver.h" #include +#include #include "absl/algorithm/container.h" +#include "api/rtp_parameters.h" #include "pc/channel_manager.h" #include "pc/rtp_media_utils.h" #include "pc/rtp_parameters_conversion.h" @@ -25,7 +27,7 @@ template RTCError VerifyCodecPreferences(const std::vector& codecs, const std::vector& send_codecs, const std::vector& recv_codecs) { - // 6. If the intersection between codecs and + // If the intersection between codecs and // RTCRtpSender.getCapabilities(kind).codecs or the intersection between // codecs and RTCRtpReceiver.getCapabilities(kind).codecs only contains RTX, // RED or FEC codecs or is an empty set, throw InvalidModificationError. @@ -58,11 +60,10 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, "codec capabilities."); } - // 7. Let codecCapabilities be the union of + // Let codecCapabilities be the union of // RTCRtpSender.getCapabilities(kind).codecs and - // RTCRtpReceiver.getCapabilities(kind).codecs. 8.1 For each codec in - // codecs, If codec is not in codecCapabilities, throw - // InvalidModificationError. + // RTCRtpReceiver.getCapabilities(kind).codecs. For each codec in codecs, If + // codec is not in codecCapabilities, throw InvalidModificationError. for (const auto& codec_preference : codecs) { bool is_recv_codec = absl::c_any_of(recv_codecs, [&codec_preference](const T& codec) { @@ -96,10 +97,19 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, return RTCError::OK(); } +TaskQueueBase* GetCurrentTaskQueueOrThread() { + TaskQueueBase* current = TaskQueueBase::Current(); + if (!current) + current = rtc::ThreadManager::Instance()->CurrentThread(); + return current; +} + } // namespace RtpTransceiver::RtpTransceiver(cricket::MediaType media_type) - : unified_plan_(false), media_type_(media_type) { + : thread_(GetCurrentTaskQueueOrThread()), + unified_plan_(false), + media_type_(media_type) { RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO); } @@ -108,10 +118,15 @@ RtpTransceiver::RtpTransceiver( rtc::scoped_refptr> sender, rtc::scoped_refptr> receiver, - cricket::ChannelManager* channel_manager) - : unified_plan_(true), + cricket::ChannelManager* channel_manager, + std::vector header_extensions_offered, + std::function on_negotiation_needed) + : thread_(GetCurrentTaskQueueOrThread()), + unified_plan_(true), media_type_(sender->media_type()), - channel_manager_(channel_manager) { + channel_manager_(channel_manager), + header_extensions_to_offer_(std::move(header_extensions_offered)), + on_negotiation_needed_(std::move(on_negotiation_needed)) { RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || media_type_ == cricket::MEDIA_TYPE_VIDEO); RTC_DCHECK_EQ(sender->media_type(), receiver->media_type()); @@ -120,7 +135,7 @@ RtpTransceiver::RtpTransceiver( } RtpTransceiver::~RtpTransceiver() { - Stop(); + StopInternal(); } void RtpTransceiver::SetChannel(cricket::ChannelInterface* channel) { @@ -274,23 +289,43 @@ bool RtpTransceiver::stopped() const { return stopped_; } +bool RtpTransceiver::stopping() const { + RTC_DCHECK_RUN_ON(thread_); + return stopping_; +} + RtpTransceiverDirection RtpTransceiver::direction() const { + if (unified_plan_ && stopping()) + return webrtc::RtpTransceiverDirection::kStopped; + return direction_; } -void RtpTransceiver::SetDirection(RtpTransceiverDirection new_direction) { - if (stopped()) { - return; +RTCError RtpTransceiver::SetDirectionWithError( + RtpTransceiverDirection new_direction) { + if (unified_plan_ && stopping()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, + "Cannot set direction on a stopping transceiver."); } - if (new_direction == direction_) { - return; + if (new_direction == direction_) + return RTCError::OK(); + + if (new_direction == RtpTransceiverDirection::kStopped) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "The set direction 'stopped' is invalid."); } + direction_ = new_direction; - SignalNegotiationNeeded(); + on_negotiation_needed_(); + + return RTCError::OK(); } absl::optional RtpTransceiver::current_direction() const { + if (unified_plan_ && stopped()) + return webrtc::RtpTransceiverDirection::kStopped; + return current_direction_; } @@ -299,14 +334,78 @@ absl::optional RtpTransceiver::fired_direction() return fired_direction_; } -void RtpTransceiver::Stop() { - for (const auto& sender : senders_) { +void RtpTransceiver::StopSendingAndReceiving() { + // 1. Let sender be transceiver.[[Sender]]. + // 2. Let receiver be transceiver.[[Receiver]]. + // + // 3. Stop sending media with sender. + // + // 4. Send an RTCP BYE for each RTP stream that was being sent by sender, as + // specified in [RFC3550]. + RTC_DCHECK_RUN_ON(thread_); + for (const auto& sender : senders_) sender->internal()->Stop(); + + // 5. Stop receiving media with receiver. + for (const auto& receiver : receivers_) + receiver->internal()->StopAndEndTrack(); + + stopping_ = true; + direction_ = webrtc::RtpTransceiverDirection::kInactive; +} + +RTCError RtpTransceiver::StopStandard() { + RTC_DCHECK_RUN_ON(thread_); + // If we're on Plan B, do what Stop() used to do there. + if (!unified_plan_) { + StopInternal(); + return RTCError::OK(); } - for (const auto& receiver : receivers_) { - receiver->internal()->Stop(); + // 1. Let transceiver be the RTCRtpTransceiver object on which the method is + // invoked. + // + // 2. Let connection be the RTCPeerConnection object associated with + // transceiver. + // + // 3. If connection.[[IsClosed]] is true, throw an InvalidStateError. + if (is_pc_closed_) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, + "PeerConnection is closed."); } + + // 4. If transceiver.[[Stopping]] is true, abort these steps. + if (stopping_) + return RTCError::OK(); + + // 5. Stop sending and receiving given transceiver, and update the + // negotiation-needed flag for connection. + StopSendingAndReceiving(); + on_negotiation_needed_(); + + return RTCError::OK(); +} + +void RtpTransceiver::StopInternal() { + StopTransceiverProcedure(); +} + +void RtpTransceiver::StopTransceiverProcedure() { + RTC_DCHECK_RUN_ON(thread_); + // As specified in the "Stop the RTCRtpTransceiver" procedure + // 1. If transceiver.[[Stopping]] is false, stop sending and receiving given + // transceiver. + if (!stopping_) + StopSendingAndReceiving(); + + // 2. Set transceiver.[[Stopped]] to true. stopped_ = true; + + // Signal the updated change to the senders. + for (const auto& sender : senders_) + sender->internal()->SetTransceiverAsStopped(); + + // 3. Set transceiver.[[Receptive]] to false. + // 4. Set transceiver.[[CurrentDirection]] to null. current_direction_ = absl::nullopt; } @@ -328,6 +427,7 @@ RTCError RtpTransceiver::SetCodecPreferences( return absl::c_linear_search(codecs, codec); }); + // 6. to 8. RTCError result; if (media_type_ == cricket::MEDIA_TYPE_AUDIO) { std::vector recv_codecs, send_codecs; @@ -350,4 +450,57 @@ RTCError RtpTransceiver::SetCodecPreferences( return result; } +std::vector +RtpTransceiver::HeaderExtensionsToOffer() const { + return header_extensions_to_offer_; +} + +RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer) { + for (const auto& entry : header_extensions_to_offer) { + // Handle unsupported requests for mandatory extensions as per + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface. + // Note: + // - We do not handle setOfferedRtpHeaderExtensions algorithm step 2.1, + // this has to be checked on a higher level. We naturally error out + // in the handling of Step 2.2 if an unset URI is encountered. + + // Step 2.2. + // Handle unknown extensions. + auto it = std::find_if( + header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(), + [&entry](const auto& offered) { return entry.uri == offered.uri; }); + if (it == header_extensions_to_offer_.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Attempted to modify an unoffered extension."); + } + + // Step 2.4-2.5. + // - Use of the transceiver interface indicates unified plan is in effect, + // hence the MID extension needs to be enabled. + // - Also handle the mandatory video orientation extensions. + if ((entry.uri == RtpExtension::kMidUri || + entry.uri == RtpExtension::kVideoRotationUri) && + entry.direction != RtpTransceiverDirection::kSendRecv) { + return RTCError(RTCErrorType::INVALID_MODIFICATION, + "Attempted to stop a mandatory extension."); + } + } + + // Apply mutation after error checking. + for (const auto& entry : header_extensions_to_offer) { + auto it = std::find_if( + header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(), + [&entry](const auto& offered) { return entry.uri == offered.uri; }); + it->direction = entry.direction; + } + + return RTCError::OK(); +} + +void RtpTransceiver::SetPeerConnectionClosed() { + is_pc_closed_ = true; +} + } // namespace webrtc diff --git a/pc/rtp_transceiver.h b/pc/rtp_transceiver.h index 7ab9e9849a..4d9716c89b 100644 --- a/pc/rtp_transceiver.h +++ b/pc/rtp_transceiver.h @@ -64,11 +64,15 @@ class RtpTransceiver final // Construct a Unified Plan-style RtpTransceiver with the given sender and // receiver. The media type will be derived from the media types of the sender // and receiver. The sender and receiver should have the same media type. + // |HeaderExtensionsToOffer| is used for initializing the return value of + // HeaderExtensionsToOffer(). RtpTransceiver( rtc::scoped_refptr> sender, rtc::scoped_refptr> receiver, - cricket::ChannelManager* channel_manager); + cricket::ChannelManager* channel_manager, + std::vector HeaderExtensionsToOffer, + std::function on_negotiation_needed); ~RtpTransceiver() override; // Returns the Voice/VideoChannel set for this transceiver. May be null if @@ -170,9 +174,17 @@ class RtpTransceiver final return has_ever_been_used_to_send_; } + // Informs the transceiver that its owning + // PeerConnection is closed. + void SetPeerConnectionClosed(); + + // Executes the "stop the RTCRtpTransceiver" procedure from + // the webrtc-pc specification, described under the stop() method. + void StopTransceiverProcedure(); + // Fired when the RtpTransceiver state changes such that negotiation is now // needed (e.g., in response to a direction change). - sigslot::signal0<> SignalNegotiationNeeded; + // sigslot::signal0<> SignalNegotiationNeeded; // RtpTransceiverInterface implementation. cricket::MediaType media_type() const override; @@ -180,20 +192,31 @@ class RtpTransceiver final rtc::scoped_refptr sender() const override; rtc::scoped_refptr receiver() const override; bool stopped() const override; + bool stopping() const override; RtpTransceiverDirection direction() const override; - void SetDirection(RtpTransceiverDirection new_direction) override; + RTCError SetDirectionWithError( + RtpTransceiverDirection new_direction) override; absl::optional current_direction() const override; absl::optional fired_direction() const override; - void Stop() override; + RTCError StopStandard() override; + void StopInternal() override; RTCError SetCodecPreferences( rtc::ArrayView codecs) override; std::vector codec_preferences() const override { return codec_preferences_; } + std::vector HeaderExtensionsToOffer() + const override; + RTCError SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer) override; private: void OnFirstPacketReceived(cricket::ChannelInterface* channel); + void StopSendingAndReceiving(); + // Enforce that this object is created, used and destroyed on one thread. + const TaskQueueBase* thread_; const bool unified_plan_; const cricket::MediaType media_type_; std::vector>> @@ -203,6 +226,8 @@ class RtpTransceiver final receivers_; bool stopped_ = false; + bool stopping_ RTC_GUARDED_BY(thread_) = false; + bool is_pc_closed_ = false; RtpTransceiverDirection direction_ = RtpTransceiverDirection::kInactive; absl::optional current_direction_; absl::optional fired_direction_; @@ -215,24 +240,33 @@ class RtpTransceiver final cricket::ChannelInterface* channel_ = nullptr; cricket::ChannelManager* channel_manager_ = nullptr; std::vector codec_preferences_; + std::vector header_extensions_to_offer_; + const std::function on_negotiation_needed_; }; BEGIN_SIGNALING_PROXY_MAP(RtpTransceiver) PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) PROXY_CONSTMETHOD0(absl::optional, mid) PROXY_CONSTMETHOD0(rtc::scoped_refptr, sender) PROXY_CONSTMETHOD0(rtc::scoped_refptr, receiver) PROXY_CONSTMETHOD0(bool, stopped) +PROXY_CONSTMETHOD0(bool, stopping) PROXY_CONSTMETHOD0(RtpTransceiverDirection, direction) -PROXY_METHOD1(void, SetDirection, RtpTransceiverDirection) +PROXY_METHOD1(webrtc::RTCError, SetDirectionWithError, RtpTransceiverDirection) PROXY_CONSTMETHOD0(absl::optional, current_direction) PROXY_CONSTMETHOD0(absl::optional, fired_direction) -PROXY_METHOD0(void, Stop) +PROXY_METHOD0(webrtc::RTCError, StopStandard) +PROXY_METHOD0(void, StopInternal) PROXY_METHOD1(webrtc::RTCError, SetCodecPreferences, rtc::ArrayView) PROXY_CONSTMETHOD0(std::vector, codec_preferences) +PROXY_CONSTMETHOD0(std::vector, + HeaderExtensionsToOffer) +PROXY_METHOD1(webrtc::RTCError, + SetOfferedRtpHeaderExtensions, + rtc::ArrayView) END_PROXY_MAP() } // namespace webrtc diff --git a/pc/rtp_transceiver_unittest.cc b/pc/rtp_transceiver_unittest.cc index 885a5a10c8..96e38b0b23 100644 --- a/pc/rtp_transceiver_unittest.cc +++ b/pc/rtp_transceiver_unittest.cc @@ -12,10 +12,20 @@ #include "pc/rtp_transceiver.h" +#include + +#include "media/base/fake_media_engine.h" #include "pc/test/mock_channel_interface.h" +#include "pc/test/mock_rtp_receiver_internal.h" +#include "pc/test/mock_rtp_sender_internal.h" #include "test/gmock.h" #include "test/gtest.h" +using ::testing::ElementsAre; +using ::testing::Eq; +using ::testing::Field; +using ::testing::Not; +using ::testing::Property; using ::testing::Return; using ::testing::ReturnRef; @@ -35,7 +45,7 @@ TEST(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) { EXPECT_EQ(&channel1, transceiver.channel()); // Stop the transceiver. - transceiver.Stop(); + transceiver.StopInternal(); EXPECT_EQ(&channel1, transceiver.channel()); cricket::MockChannelInterface channel2; @@ -61,7 +71,7 @@ TEST(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) { EXPECT_EQ(&channel, transceiver.channel()); // Stop the transceiver. - transceiver.Stop(); + transceiver.StopInternal(); EXPECT_EQ(&channel, transceiver.channel()); // Set the channel to |nullptr|. @@ -69,4 +79,131 @@ TEST(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) { EXPECT_EQ(nullptr, transceiver.channel()); } +class RtpTransceiverUnifiedPlanTest : public ::testing::Test { + public: + RtpTransceiverUnifiedPlanTest() + : channel_manager_(std::make_unique(), + std::make_unique(), + rtc::Thread::Current(), + rtc::Thread::Current()), + transceiver_(RtpSenderProxyWithInternal::Create( + rtc::Thread::Current(), + new rtc::RefCountedObject()), + RtpReceiverProxyWithInternal::Create( + rtc::Thread::Current(), + new rtc::RefCountedObject()), + &channel_manager_, + channel_manager_.GetSupportedAudioRtpHeaderExtensions(), + /* on_negotiation_needed= */ [] {}) {} + + cricket::ChannelManager channel_manager_; + RtpTransceiver transceiver_; +}; + +// Basic tests for Stop() +TEST_F(RtpTransceiverUnifiedPlanTest, StopSetsDirection) { + EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver_.direction()); + EXPECT_FALSE(transceiver_.current_direction()); + transceiver_.StopStandard(); + EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver_.direction()); + EXPECT_FALSE(transceiver_.current_direction()); + transceiver_.StopTransceiverProcedure(); + EXPECT_TRUE(transceiver_.current_direction()); + EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver_.direction()); + EXPECT_EQ(RtpTransceiverDirection::kStopped, + *transceiver_.current_direction()); +} + +class RtpTransceiverTestForHeaderExtensions : public ::testing::Test { + public: + RtpTransceiverTestForHeaderExtensions() + : channel_manager_(std::make_unique(), + std::make_unique(), + rtc::Thread::Current(), + rtc::Thread::Current()), + extensions_( + {RtpHeaderExtensionCapability("uri1", + 1, + RtpTransceiverDirection::kSendOnly), + RtpHeaderExtensionCapability("uri2", + 2, + RtpTransceiverDirection::kRecvOnly), + RtpHeaderExtensionCapability(RtpExtension::kMidUri, + 3, + RtpTransceiverDirection::kSendRecv), + RtpHeaderExtensionCapability(RtpExtension::kVideoRotationUri, + 4, + RtpTransceiverDirection::kSendRecv)}), + transceiver_(RtpSenderProxyWithInternal::Create( + rtc::Thread::Current(), + new rtc::RefCountedObject()), + RtpReceiverProxyWithInternal::Create( + rtc::Thread::Current(), + new rtc::RefCountedObject()), + &channel_manager_, + extensions_, + /* on_negotiation_needed= */ [] {}) {} + + cricket::ChannelManager channel_manager_; + std::vector extensions_; + RtpTransceiver transceiver_; +}; + +TEST_F(RtpTransceiverTestForHeaderExtensions, OffersChannelManagerList) { + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_); +} + +TEST_F(RtpTransceiverTestForHeaderExtensions, ModifiesDirection) { + auto modified_extensions = extensions_; + modified_extensions[0].direction = RtpTransceiverDirection::kSendOnly; + EXPECT_TRUE( + transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok()); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions); + modified_extensions[0].direction = RtpTransceiverDirection::kRecvOnly; + EXPECT_TRUE( + transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok()); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions); + modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv; + EXPECT_TRUE( + transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok()); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions); + modified_extensions[0].direction = RtpTransceiverDirection::kInactive; + EXPECT_TRUE( + transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok()); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions); +} + +TEST_F(RtpTransceiverTestForHeaderExtensions, AcceptsStoppedExtension) { + auto modified_extensions = extensions_; + modified_extensions[0].direction = RtpTransceiverDirection::kStopped; + EXPECT_TRUE( + transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok()); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions); +} + +TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsUnsupportedExtension) { + std::vector modified_extensions( + {RtpHeaderExtensionCapability("uri3", 1, + RtpTransceiverDirection::kSendRecv)}); + EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions), + Property(&RTCError::type, RTCErrorType::INVALID_PARAMETER)); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_); +} + +TEST_F(RtpTransceiverTestForHeaderExtensions, + RejectsStoppedMandatoryExtensions) { + std::vector modified_extensions = extensions_; + // Attempting to stop the mandatory MID extension. + modified_extensions[2].direction = RtpTransceiverDirection::kStopped; + EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions), + Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION)); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_); + modified_extensions = extensions_; + // Attempting to stop the mandatory video orientation extension. + modified_extensions[3].direction = RtpTransceiverDirection::kStopped; + EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions), + Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION)); + EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_); +} + } // namespace webrtc diff --git a/pc/rtp_transmission_manager.cc b/pc/rtp_transmission_manager.cc new file mode 100644 index 0000000000..e796f9b1b1 --- /dev/null +++ b/pc/rtp_transmission_manager.cc @@ -0,0 +1,685 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/rtp_transmission_manager.h" + +#include + +#include "absl/types/optional.h" +#include "api/peer_connection_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "pc/audio_rtp_receiver.h" +#include "pc/channel.h" +#include "pc/stats_collector_interface.h" +#include "pc/video_rtp_receiver.h" +#include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { + +static const char kDefaultAudioSenderId[] = "defaulta0"; +static const char kDefaultVideoSenderId[] = "defaultv0"; + +} // namespace + +RtpTransmissionManager::RtpTransmissionManager( + bool is_unified_plan, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + cricket::ChannelManager* channel_manager, + UsagePattern* usage_pattern, + PeerConnectionObserver* observer, + StatsCollectorInterface* stats, + std::function on_negotiation_needed) + : is_unified_plan_(is_unified_plan), + signaling_thread_(signaling_thread), + worker_thread_(worker_thread), + channel_manager_(channel_manager), + usage_pattern_(usage_pattern), + observer_(observer), + stats_(stats), + on_negotiation_needed_(on_negotiation_needed), + weak_ptr_factory_(this) {} + +void RtpTransmissionManager::Close() { + closed_ = true; + observer_ = nullptr; +} + +// Implementation of SetStreamsObserver +void RtpTransmissionManager::OnSetStreams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (IsUnifiedPlan()) + OnNegotiationNeeded(); +} + +// Function to call back to the PeerConnection when negotiation is needed +void RtpTransmissionManager::OnNegotiationNeeded() { + on_negotiation_needed_(); +} + +// Function that returns the currently valid observer +PeerConnectionObserver* RtpTransmissionManager::Observer() const { + RTC_DCHECK(!closed_); + RTC_DCHECK(observer_); + return observer_; +} + +cricket::VoiceMediaChannel* RtpTransmissionManager::voice_media_channel() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* voice_channel = static_cast( + GetAudioTransceiver()->internal()->channel()); + if (voice_channel) { + return voice_channel->media_channel(); + } else { + return nullptr; + } +} + +cricket::VideoMediaChannel* RtpTransmissionManager::video_media_channel() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto* video_channel = static_cast( + GetVideoTransceiver()->internal()->channel()); + if (video_channel) { + return video_channel->media_channel(); + } else { + return nullptr; + } +} + +RTCErrorOr> +RtpTransmissionManager::AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + return (IsUnifiedPlan() ? AddTrackUnifiedPlan(track, stream_ids) + : AddTrackPlanB(track, stream_ids)); +} + +RTCErrorOr> +RtpTransmissionManager::AddTrackPlanB( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (stream_ids.size() > 1u) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "AddTrack with more than one stream is not " + "supported with Plan B semantics."); + } + std::vector adjusted_stream_ids = stream_ids; + if (adjusted_stream_ids.empty()) { + adjusted_stream_ids.push_back(rtc::CreateRandomUuid()); + } + cricket::MediaType media_type = + (track->kind() == MediaStreamTrackInterface::kAudioKind + ? cricket::MEDIA_TYPE_AUDIO + : cricket::MEDIA_TYPE_VIDEO); + auto new_sender = + CreateSender(media_type, track->id(), track, adjusted_stream_ids, {}); + if (track->kind() == MediaStreamTrackInterface::kAudioKind) { + new_sender->internal()->SetMediaChannel(voice_media_channel()); + GetAudioTransceiver()->internal()->AddSender(new_sender); + const RtpSenderInfo* sender_info = + FindSenderInfo(local_audio_sender_infos_, + new_sender->internal()->stream_ids()[0], track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } + } else { + RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); + new_sender->internal()->SetMediaChannel(video_media_channel()); + GetVideoTransceiver()->internal()->AddSender(new_sender); + const RtpSenderInfo* sender_info = + FindSenderInfo(local_video_sender_infos_, + new_sender->internal()->stream_ids()[0], track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } + } + return rtc::scoped_refptr(new_sender); +} + +RTCErrorOr> +RtpTransmissionManager::AddTrackUnifiedPlan( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + auto transceiver = FindFirstTransceiverForAddedTrack(track); + if (transceiver) { + RTC_LOG(LS_INFO) << "Reusing an existing " + << cricket::MediaTypeToString(transceiver->media_type()) + << " transceiver for AddTrack."; + if (transceiver->stopping()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "The existing transceiver is stopping."); + } + + if (transceiver->direction() == RtpTransceiverDirection::kRecvOnly) { + transceiver->internal()->set_direction( + RtpTransceiverDirection::kSendRecv); + } else if (transceiver->direction() == RtpTransceiverDirection::kInactive) { + transceiver->internal()->set_direction( + RtpTransceiverDirection::kSendOnly); + } + transceiver->sender()->SetTrack(track); + transceiver->internal()->sender_internal()->set_stream_ids(stream_ids); + transceiver->internal()->set_reused_for_addtrack(true); + } else { + cricket::MediaType media_type = + (track->kind() == MediaStreamTrackInterface::kAudioKind + ? cricket::MEDIA_TYPE_AUDIO + : cricket::MEDIA_TYPE_VIDEO); + RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type) + << " transceiver in response to a call to AddTrack."; + std::string sender_id = track->id(); + // Avoid creating a sender with an existing ID by generating a random ID. + // This can happen if this is the second time AddTrack has created a sender + // for this track. + if (FindSenderById(sender_id)) { + sender_id = rtc::CreateRandomUuid(); + } + auto sender = CreateSender(media_type, sender_id, track, stream_ids, {}); + auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); + transceiver = CreateAndAddTransceiver(sender, receiver); + transceiver->internal()->set_created_by_addtrack(true); + transceiver->internal()->set_direction(RtpTransceiverDirection::kSendRecv); + } + return transceiver->sender(); +} + +rtc::scoped_refptr> +RtpTransmissionManager::CreateSender( + cricket::MediaType media_type, + const std::string& id, + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& send_encodings) { + RTC_DCHECK_RUN_ON(signaling_thread()); + rtc::scoped_refptr> sender; + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + RTC_DCHECK(!track || + (track->kind() == MediaStreamTrackInterface::kAudioKind)); + sender = RtpSenderProxyWithInternal::Create( + signaling_thread(), + AudioRtpSender::Create(worker_thread(), id, stats_, this)); + NoteUsageEvent(UsageEvent::AUDIO_ADDED); + } else { + RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(!track || + (track->kind() == MediaStreamTrackInterface::kVideoKind)); + sender = RtpSenderProxyWithInternal::Create( + signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this)); + NoteUsageEvent(UsageEvent::VIDEO_ADDED); + } + bool set_track_succeeded = sender->SetTrack(track); + RTC_DCHECK(set_track_succeeded); + sender->internal()->set_stream_ids(stream_ids); + sender->internal()->set_init_send_encodings(send_encodings); + return sender; +} + +rtc::scoped_refptr> +RtpTransmissionManager::CreateReceiver(cricket::MediaType media_type, + const std::string& receiver_id) { + RTC_DCHECK_RUN_ON(signaling_thread()); + rtc::scoped_refptr> + receiver; + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), new AudioRtpReceiver(worker_thread(), receiver_id, + std::vector({}))); + NoteUsageEvent(UsageEvent::AUDIO_ADDED); + } else { + RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), new VideoRtpReceiver(worker_thread(), receiver_id, + std::vector({}))); + NoteUsageEvent(UsageEvent::VIDEO_ADDED); + } + return receiver; +} + +rtc::scoped_refptr> +RtpTransmissionManager::CreateAndAddTransceiver( + rtc::scoped_refptr> sender, + rtc::scoped_refptr> + receiver) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Ensure that the new sender does not have an ID that is already in use by + // another sender. + // Allow receiver IDs to conflict since those come from remote SDP (which + // could be invalid, but should not cause a crash). + RTC_DCHECK(!FindSenderById(sender->id())); + auto transceiver = RtpTransceiverProxyWithInternal::Create( + signaling_thread(), + new RtpTransceiver( + sender, receiver, channel_manager(), + sender->media_type() == cricket::MEDIA_TYPE_AUDIO + ? channel_manager()->GetSupportedAudioRtpHeaderExtensions() + : channel_manager()->GetSupportedVideoRtpHeaderExtensions(), + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() { + if (this_weak_ptr) { + this_weak_ptr->OnNegotiationNeeded(); + } + })); + transceivers()->Add(transceiver); + return transceiver; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindFirstTransceiverForAddedTrack( + rtc::scoped_refptr track) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(track); + for (auto transceiver : transceivers()->List()) { + if (!transceiver->sender()->track() && + cricket::MediaTypeToString(transceiver->media_type()) == + track->kind() && + !transceiver->internal()->has_ever_been_used_to_send() && + !transceiver->stopped()) { + return transceiver; + } + } + return nullptr; +} + +std::vector>> +RtpTransmissionManager::GetSendersInternal() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector>> + all_senders; + for (const auto& transceiver : transceivers_.List()) { + if (IsUnifiedPlan() && transceiver->internal()->stopped()) + continue; + + auto senders = transceiver->internal()->senders(); + all_senders.insert(all_senders.end(), senders.begin(), senders.end()); + } + return all_senders; +} + +std::vector< + rtc::scoped_refptr>> +RtpTransmissionManager::GetReceiversInternal() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector< + rtc::scoped_refptr>> + all_receivers; + for (const auto& transceiver : transceivers_.List()) { + if (IsUnifiedPlan() && transceiver->internal()->stopped()) + continue; + + auto receivers = transceiver->internal()->receivers(); + all_receivers.insert(all_receivers.end(), receivers.begin(), + receivers.end()); + } + return all_receivers; +} + +rtc::scoped_refptr> +RtpTransmissionManager::GetAudioTransceiver() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + // This method only works with Plan B SDP, where there is a single + // audio/video transceiver. + RTC_DCHECK(!IsUnifiedPlan()); + for (auto transceiver : transceivers_.List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + return transceiver; + } + } + RTC_NOTREACHED(); + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::GetVideoTransceiver() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + // This method only works with Plan B SDP, where there is a single + // audio/video transceiver. + RTC_DCHECK(!IsUnifiedPlan()); + for (auto transceiver : transceivers_.List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + return transceiver; + } + } + RTC_NOTREACHED(); + return nullptr; +} + +void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(track); + RTC_DCHECK(stream); + auto sender = FindSenderForTrack(track); + if (sender) { + // We already have a sender for this track, so just change the stream_id + // so that it's correct in the next call to CreateOffer. + sender->internal()->set_stream_ids({stream->id()}); + return; + } + + // Normal case; we've never seen this track before. + auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), track, + {stream->id()}, {}); + new_sender->internal()->SetMediaChannel(voice_media_channel()); + GetAudioTransceiver()->internal()->AddSender(new_sender); + // If the sender has already been configured in SDP, we call SetSsrc, + // which will connect the sender to the underlying transport. This can + // occur if a local session description that contains the ID of the sender + // is set before AddStream is called. It can also occur if the local + // session description is not changed and RemoveStream is called, and + // later AddStream is called again with the same stream. + const RtpSenderInfo* sender_info = + FindSenderInfo(local_audio_sender_infos_, stream->id(), track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } +} + +// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around +// indefinitely, when we have unified plan SDP. +void RtpTransmissionManager::RemoveAudioTrack(AudioTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto sender = FindSenderForTrack(track); + if (!sender) { + RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() + << " doesn't exist."; + return; + } + GetAudioTransceiver()->internal()->RemoveSender(sender); +} + +void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(track); + RTC_DCHECK(stream); + auto sender = FindSenderForTrack(track); + if (sender) { + // We already have a sender for this track, so just change the stream_id + // so that it's correct in the next call to CreateOffer. + sender->internal()->set_stream_ids({stream->id()}); + return; + } + + // Normal case; we've never seen this track before. + auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), track, + {stream->id()}, {}); + new_sender->internal()->SetMediaChannel(video_media_channel()); + GetVideoTransceiver()->internal()->AddSender(new_sender); + const RtpSenderInfo* sender_info = + FindSenderInfo(local_video_sender_infos_, stream->id(), track->id()); + if (sender_info) { + new_sender->internal()->SetSsrc(sender_info->first_ssrc); + } +} + +void RtpTransmissionManager::RemoveVideoTrack(VideoTrackInterface* track, + MediaStreamInterface* stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto sender = FindSenderForTrack(track); + if (!sender) { + RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id() + << " doesn't exist."; + return; + } + GetVideoTransceiver()->internal()->RemoveSender(sender); +} + +void RtpTransmissionManager::CreateAudioReceiver( + MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) { + RTC_DCHECK(!closed_); + std::vector> streams; + streams.push_back(rtc::scoped_refptr(stream)); + // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use + // the constructor taking stream IDs instead. + auto* audio_receiver = new AudioRtpReceiver( + worker_thread(), remote_sender_info.sender_id, streams); + audio_receiver->SetMediaChannel(voice_media_channel()); + if (remote_sender_info.sender_id == kDefaultAudioSenderId) { + audio_receiver->SetupUnsignaledMediaChannel(); + } else { + audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); + } + auto receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), audio_receiver); + GetAudioTransceiver()->internal()->AddReceiver(receiver); + Observer()->OnAddTrack(receiver, streams); + NoteUsageEvent(UsageEvent::AUDIO_ADDED); +} + +void RtpTransmissionManager::CreateVideoReceiver( + MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) { + RTC_DCHECK(!closed_); + std::vector> streams; + streams.push_back(rtc::scoped_refptr(stream)); + // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use + // the constructor taking stream IDs instead. + auto* video_receiver = new VideoRtpReceiver( + worker_thread(), remote_sender_info.sender_id, streams); + video_receiver->SetMediaChannel(video_media_channel()); + if (remote_sender_info.sender_id == kDefaultVideoSenderId) { + video_receiver->SetupUnsignaledMediaChannel(); + } else { + video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); + } + auto receiver = RtpReceiverProxyWithInternal::Create( + signaling_thread(), video_receiver); + GetVideoTransceiver()->internal()->AddReceiver(receiver); + Observer()->OnAddTrack(receiver, streams); + NoteUsageEvent(UsageEvent::VIDEO_ADDED); +} + +// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote +// description. +rtc::scoped_refptr +RtpTransmissionManager::RemoveAndStopReceiver( + const RtpSenderInfo& remote_sender_info) { + auto receiver = FindReceiverById(remote_sender_info.sender_id); + if (!receiver) { + RTC_LOG(LS_WARNING) << "RtpReceiver for track with id " + << remote_sender_info.sender_id << " doesn't exist."; + return nullptr; + } + if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + GetAudioTransceiver()->internal()->RemoveReceiver(receiver); + } else { + GetVideoTransceiver()->internal()->RemoveReceiver(receiver); + } + return receiver; +} + +void RtpTransmissionManager::OnRemoteSenderAdded( + const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_LOG(LS_INFO) << "Creating " << cricket::MediaTypeToString(media_type) + << " receiver for track_id=" << sender_info.sender_id + << " and stream_id=" << sender_info.stream_id; + + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + CreateAudioReceiver(stream, sender_info); + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + CreateVideoReceiver(stream, sender_info); + } else { + RTC_NOTREACHED() << "Invalid media type"; + } +} + +void RtpTransmissionManager::OnRemoteSenderRemoved( + const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_LOG(LS_INFO) << "Removing " << cricket::MediaTypeToString(media_type) + << " receiver for track_id=" << sender_info.sender_id + << " and stream_id=" << sender_info.stream_id; + + rtc::scoped_refptr receiver; + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + // When the MediaEngine audio channel is destroyed, the RemoteAudioSource + // will be notified which will end the AudioRtpReceiver::track(). + receiver = RemoveAndStopReceiver(sender_info); + rtc::scoped_refptr audio_track = + stream->FindAudioTrack(sender_info.sender_id); + if (audio_track) { + stream->RemoveTrack(audio_track); + } + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + // Stopping or destroying a VideoRtpReceiver will end the + // VideoRtpReceiver::track(). + receiver = RemoveAndStopReceiver(sender_info); + rtc::scoped_refptr video_track = + stream->FindVideoTrack(sender_info.sender_id); + if (video_track) { + // There's no guarantee the track is still available, e.g. the track may + // have been removed from the stream by an application. + stream->RemoveTrack(video_track); + } + } else { + RTC_NOTREACHED() << "Invalid media type"; + } + if (receiver) { + RTC_DCHECK(!closed_); + Observer()->OnRemoveTrack(receiver); + } +} + +void RtpTransmissionManager::OnLocalSenderAdded( + const RtpSenderInfo& sender_info, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + auto sender = FindSenderById(sender_info.sender_id); + if (!sender) { + RTC_LOG(LS_WARNING) << "An unknown RtpSender with id " + << sender_info.sender_id + << " has been configured in the local description."; + return; + } + + if (sender->media_type() != media_type) { + RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" + " description with an unexpected media type."; + return; + } + + sender->internal()->set_stream_ids({sender_info.stream_id}); + sender->internal()->SetSsrc(sender_info.first_ssrc); +} + +void RtpTransmissionManager::OnLocalSenderRemoved( + const RtpSenderInfo& sender_info, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + auto sender = FindSenderById(sender_info.sender_id); + if (!sender) { + // This is the normal case. I.e., RemoveStream has been called and the + // SessionDescriptions has been renegotiated. + return; + } + + // A sender has been removed from the SessionDescription but it's still + // associated with the PeerConnection. This only occurs if the SDP doesn't + // match with the calls to CreateSender, AddStream and RemoveStream. + if (sender->media_type() != media_type) { + RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local" + " description with an unexpected media type."; + return; + } + + sender->internal()->SetSsrc(0); +} + +std::vector* RtpTransmissionManager::GetRemoteSenderInfos( + cricket::MediaType media_type) { + RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO); + return (media_type == cricket::MEDIA_TYPE_AUDIO) + ? &remote_audio_sender_infos_ + : &remote_video_sender_infos_; +} + +std::vector* RtpTransmissionManager::GetLocalSenderInfos( + cricket::MediaType media_type) { + RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO); + return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_sender_infos_ + : &local_video_sender_infos_; +} + +const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo( + const std::vector& infos, + const std::string& stream_id, + const std::string sender_id) const { + for (const RtpSenderInfo& sender_info : infos) { + if (sender_info.stream_id == stream_id && + sender_info.sender_id == sender_id) { + return &sender_info; + } + } + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindSenderForTrack( + MediaStreamTrackInterface* track) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers_.List()) { + for (auto sender : transceiver->internal()->senders()) { + if (sender->track() == track) { + return sender; + } + } + } + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindSenderById(const std::string& sender_id) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers_.List()) { + for (auto sender : transceiver->internal()->senders()) { + if (sender->id() == sender_id) { + return sender; + } + } + } + return nullptr; +} + +rtc::scoped_refptr> +RtpTransmissionManager::FindReceiverById(const std::string& receiver_id) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers_.List()) { + for (auto receiver : transceiver->internal()->receivers()) { + if (receiver->id() == receiver_id) { + return receiver; + } + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/pc/rtp_transmission_manager.h b/pc/rtp_transmission_manager.h new file mode 100644 index 0000000000..731c3b74dd --- /dev/null +++ b/pc/rtp_transmission_manager.h @@ -0,0 +1,267 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_RTP_TRANSMISSION_MANAGER_H_ +#define PC_RTP_TRANSMISSION_MANAGER_H_ + +#include +#include +#include +#include + +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "media/base/media_channel.h" +#include "pc/channel_manager.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transceiver.h" +#include "pc/stats_collector_interface.h" +#include "pc/transceiver_list.h" +#include "pc/usage_pattern.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace rtc { +class Thread; +} + +namespace webrtc { + +// This class contains information about +// an RTPSender, used for things like looking it up by SSRC. +struct RtpSenderInfo { + RtpSenderInfo() : first_ssrc(0) {} + RtpSenderInfo(const std::string& stream_id, + const std::string sender_id, + uint32_t ssrc) + : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {} + bool operator==(const RtpSenderInfo& other) { + return this->stream_id == other.stream_id && + this->sender_id == other.sender_id && + this->first_ssrc == other.first_ssrc; + } + std::string stream_id; + std::string sender_id; + // An RtpSender can have many SSRCs. The first one is used as a sort of ID + // for communicating with the lower layers. + uint32_t first_ssrc; +}; + +// The RtpTransmissionManager class is responsible for managing the lifetime +// and relationships between objects of type RtpSender, RtpReceiver and +// RtpTransceiver. +class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { + public: + RtpTransmissionManager(bool is_unified_plan, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + cricket::ChannelManager* channel_manager, + UsagePattern* usage_pattern, + PeerConnectionObserver* observer, + StatsCollectorInterface* stats_, + std::function on_negotiation_needed); + + // No move or copy permitted. + RtpTransmissionManager(const RtpTransmissionManager&) = delete; + RtpTransmissionManager& operator=(const RtpTransmissionManager&) = delete; + + // Stop activity. In particular, don't call observer_ any more. + void Close(); + + // RtpSenderBase::SetStreamsObserver override. + void OnSetStreams() override; + + // Add a new track, creating transceiver if required. + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids); + + // Create a new RTP sender. Does not associate with a transceiver. + rtc::scoped_refptr> + CreateSender(cricket::MediaType media_type, + const std::string& id, + rtc::scoped_refptr track, + const std::vector& stream_ids, + const std::vector& send_encodings); + + // Create a new RTP receiver. Does not associate with a transceiver. + rtc::scoped_refptr> + CreateReceiver(cricket::MediaType media_type, const std::string& receiver_id); + + // Create a new RtpTransceiver of the given type and add it to the list of + // registered transceivers. + rtc::scoped_refptr> + CreateAndAddTransceiver( + rtc::scoped_refptr> sender, + rtc::scoped_refptr> + receiver); + + // Returns the first RtpTransceiver suitable for a newly added track, if such + // transceiver is available. + rtc::scoped_refptr> + FindFirstTransceiverForAddedTrack( + rtc::scoped_refptr track); + + // Returns the list of senders currently associated with some + // registered transceiver + std::vector>> + GetSendersInternal() const; + + // Returns the list of receivers currently associated with a transceiver + std::vector< + rtc::scoped_refptr>> + GetReceiversInternal() const; + + // Plan B: Get the transceiver containing all audio senders and receivers + rtc::scoped_refptr> + GetAudioTransceiver() const; + // Plan B: Get the transceiver containing all video senders and receivers + rtc::scoped_refptr> + GetVideoTransceiver() const; + + // Add an audio track, reusing or creating the sender. + void AddAudioTrack(AudioTrackInterface* track, MediaStreamInterface* stream); + // Plan B: Remove an audio track, removing the sender. + void RemoveAudioTrack(AudioTrackInterface* track, + MediaStreamInterface* stream); + // Add a video track, reusing or creating the sender. + void AddVideoTrack(VideoTrackInterface* track, MediaStreamInterface* stream); + // Plan B: Remove a video track, removing the sender. + void RemoveVideoTrack(VideoTrackInterface* track, + MediaStreamInterface* stream); + + // Triggered when a remote sender has been seen for the first time in a remote + // session description. It creates a remote MediaStreamTrackInterface + // implementation and triggers CreateAudioReceiver or CreateVideoReceiver. + void OnRemoteSenderAdded(const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type); + + // Triggered when a remote sender has been removed from a remote session + // description. It removes the remote sender with id |sender_id| from a remote + // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver. + void OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, + MediaStreamInterface* stream, + cricket::MediaType media_type); + + // Triggered when a local sender has been seen for the first time in a local + // session description. + // This method triggers CreateAudioSender or CreateVideoSender if the rtp + // streams in the local SessionDescription can be mapped to a MediaStreamTrack + // in a MediaStream in |local_streams_| + void OnLocalSenderAdded(const RtpSenderInfo& sender_info, + cricket::MediaType media_type); + + // Triggered when a local sender has been removed from a local session + // description. + // This method triggers DestroyAudioSender or DestroyVideoSender if a stream + // has been removed from the local SessionDescription and the stream can be + // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|. + void OnLocalSenderRemoved(const RtpSenderInfo& sender_info, + cricket::MediaType media_type); + + std::vector* GetRemoteSenderInfos( + cricket::MediaType media_type); + std::vector* GetLocalSenderInfos( + cricket::MediaType media_type); + const RtpSenderInfo* FindSenderInfo(const std::vector& infos, + const std::string& stream_id, + const std::string sender_id) const; + + // Return the RtpSender with the given track attached. + rtc::scoped_refptr> + FindSenderForTrack(MediaStreamTrackInterface* track) const; + + // Return the RtpSender with the given id, or null if none exists. + rtc::scoped_refptr> + FindSenderById(const std::string& sender_id) const; + + // Return the RtpReceiver with the given id, or null if none exists. + rtc::scoped_refptr> + FindReceiverById(const std::string& receiver_id) const; + + TransceiverList* transceivers() { return &transceivers_; } + const TransceiverList* transceivers() const { return &transceivers_; } + + // Plan B helpers for getting the voice/video media channels for the single + // audio/video transceiver, if it exists. + cricket::VoiceMediaChannel* voice_media_channel() const; + cricket::VideoMediaChannel* video_media_channel() const; + + private: + rtc::Thread* signaling_thread() const { return signaling_thread_; } + rtc::Thread* worker_thread() const { return worker_thread_; } + cricket::ChannelManager* channel_manager() const { return channel_manager_; } + bool IsUnifiedPlan() const { return is_unified_plan_; } + void NoteUsageEvent(UsageEvent event) { + usage_pattern_->NoteUsageEvent(event); + } + + // AddTrack implementation when Unified Plan is specified. + RTCErrorOr> AddTrackUnifiedPlan( + rtc::scoped_refptr track, + const std::vector& stream_ids); + // AddTrack implementation when Plan B is specified. + RTCErrorOr> AddTrackPlanB( + rtc::scoped_refptr track, + const std::vector& stream_ids); + + // Create an RtpReceiver that sources an audio track. + void CreateAudioReceiver(MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) + RTC_RUN_ON(signaling_thread()); + + // Create an RtpReceiver that sources a video track. + void CreateVideoReceiver(MediaStreamInterface* stream, + const RtpSenderInfo& remote_sender_info) + RTC_RUN_ON(signaling_thread()); + rtc::scoped_refptr RemoveAndStopReceiver( + const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread()); + + PeerConnectionObserver* Observer() const; + void OnNegotiationNeeded(); + + TransceiverList transceivers_; + + // These lists store sender info seen in local/remote descriptions. + std::vector remote_audio_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + std::vector remote_video_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + std::vector local_audio_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + std::vector local_video_sender_infos_ + RTC_GUARDED_BY(signaling_thread()); + + bool closed_ = false; + bool const is_unified_plan_; + rtc::Thread* signaling_thread_; + rtc::Thread* worker_thread_; + cricket::ChannelManager* channel_manager_; + UsagePattern* usage_pattern_; + PeerConnectionObserver* observer_; + StatsCollectorInterface* const stats_; + std::function on_negotiation_needed_; + rtc::WeakPtrFactory weak_ptr_factory_ + RTC_GUARDED_BY(signaling_thread()); +}; + +} // namespace webrtc + +#endif // PC_RTP_TRANSMISSION_MANAGER_H_ diff --git a/pc/rtp_transport_unittest.cc b/pc/rtp_transport_unittest.cc index 03e8820c30..b3bd1db2e5 100644 --- a/pc/rtp_transport_unittest.cc +++ b/pc/rtp_transport_unittest.cc @@ -155,16 +155,16 @@ TEST(RtpTransportTest, SetRtpTransportWithNetworkRouteChanged) { rtc::NetworkRoute network_route; // Set a non-null RTP transport with a new network route. network_route.connected = true; - network_route.local_network_id = kLocalNetId; - network_route.remote_network_id = kRemoteNetId; + network_route.local = rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); + network_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); network_route.last_sent_packet_id = kLastPacketId; network_route.packet_overhead = kTransportOverheadPerPacket; fake_rtp.SetNetworkRoute(absl::optional(network_route)); transport.SetRtpPacketTransport(&fake_rtp); ASSERT_TRUE(observer.network_route()); EXPECT_TRUE(observer.network_route()->connected); - EXPECT_EQ(kLocalNetId, observer.network_route()->local_network_id); - EXPECT_EQ(kRemoteNetId, observer.network_route()->remote_network_id); + EXPECT_EQ(kLocalNetId, observer.network_route()->local.network_id()); + EXPECT_EQ(kRemoteNetId, observer.network_route()->remote.network_id()); EXPECT_EQ(kTransportOverheadPerPacket, observer.network_route()->packet_overhead); EXPECT_EQ(kLastPacketId, observer.network_route()->last_sent_packet_id); @@ -184,16 +184,16 @@ TEST(RtpTransportTest, SetRtcpTransportWithNetworkRouteChanged) { rtc::NetworkRoute network_route; // Set a non-null RTCP transport with a new network route. network_route.connected = true; - network_route.local_network_id = kLocalNetId; - network_route.remote_network_id = kRemoteNetId; + network_route.local = rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); + network_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); network_route.last_sent_packet_id = kLastPacketId; network_route.packet_overhead = kTransportOverheadPerPacket; fake_rtcp.SetNetworkRoute(absl::optional(network_route)); transport.SetRtcpPacketTransport(&fake_rtcp); ASSERT_TRUE(observer.network_route()); EXPECT_TRUE(observer.network_route()->connected); - EXPECT_EQ(kLocalNetId, observer.network_route()->local_network_id); - EXPECT_EQ(kRemoteNetId, observer.network_route()->remote_network_id); + EXPECT_EQ(kLocalNetId, observer.network_route()->local.network_id()); + EXPECT_EQ(kRemoteNetId, observer.network_route()->remote.network_id()); EXPECT_EQ(kTransportOverheadPerPacket, observer.network_route()->packet_overhead); EXPECT_EQ(kLastPacketId, observer.network_route()->last_sent_packet_id); diff --git a/pc/scenario_tests/BUILD.gn b/pc/scenario_tests/BUILD.gn new file mode 100644 index 0000000000..bcb69b9129 --- /dev/null +++ b/pc/scenario_tests/BUILD.gn @@ -0,0 +1,25 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +if (rtc_include_tests) { + rtc_library("pc_scenario_tests") { + testonly = true + sources = [ "goog_cc_test.cc" ] + deps = [ + "../../api:rtc_stats_api", + "../../modules/rtp_rtcp:rtp_rtcp", + "../../pc:pc_test_utils", + "../../pc:rtc_pc_base", + "../../test:field_trial", + "../../test:test_support", + "../../test/peer_scenario:peer_scenario", + ] + } +} diff --git a/pc/scenario_tests/goog_cc_test.cc b/pc/scenario_tests/goog_cc_test.cc new file mode 100644 index 0000000000..fba617dd5c --- /dev/null +++ b/pc/scenario_tests/goog_cc_test.cc @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/stats/rtcstats_objects.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "test/field_trial.h" +#include "test/gtest.h" +#include "test/peer_scenario/peer_scenario.h" +#include "test/peer_scenario/peer_scenario_client.h" + +namespace webrtc { +namespace test { + +// TODO(terelius): Use fake encoder and enable on Android once +// https://bugs.chromium.org/p/webrtc/issues/detail?id=11408 is fixed. +#if defined(WEBRTC_ANDROID) +#define MAYBE_NoBweChangeFromVideoUnmute DISABLED_NoBweChangeFromVideoUnmute +#else +#define MAYBE_NoBweChangeFromVideoUnmute NoBweChangeFromVideoUnmute +#endif +TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) { + // If transport wide sequence numbers are used for audio, and the call + // switches from audio only to video only, there will be a sharp change in + // packets sizes. This will create a change in propagation time which might be + // detected as an overuse. Using separate overuse detectors for audio and + // video avoids the issue. + std::string audio_twcc_trials( + "WebRTC-Audio-SendSideBwe/Enabled/" // + "WebRTC-SendSideBwe-WithOverhead/Enabled/" // + "WebRTC-Audio-AlrProbing/Disabled/"); + std::string separate_audio_video( + "WebRTC-Bwe-SeparateAudioPackets/" + "enabled:true,packet_threshold:15,time_threshold:1000ms/"); + ScopedFieldTrials field_trial(audio_twcc_trials + separate_audio_video); + PeerScenario s(*test_info_); + auto* caller = s.CreateClient(PeerScenarioClient::Config()); + auto* callee = s.CreateClient(PeerScenarioClient::Config()); + + BuiltInNetworkBehaviorConfig net_conf; + net_conf.link_capacity_kbps = 350; + net_conf.queue_delay_ms = 50; + auto send_node = s.net()->CreateEmulatedNode(net_conf); + auto ret_node = s.net()->CreateEmulatedNode(net_conf); + + PeerScenarioClient::VideoSendTrackConfig video_conf; + video_conf.generator.squares_video->framerate = 15; + auto video = caller->CreateVideo("VIDEO", video_conf); + auto audio = caller->CreateAudio("AUDIO", cricket::AudioOptions()); + + // Start ICE and exchange SDP. + s.SimpleConnection(caller, callee, {send_node}, {ret_node}); + + // Limit the encoder bitrate to ensure that there are no actual BWE overuses. + ASSERT_EQ(caller->pc()->GetSenders().size(), 2u); // 2 senders. + int num_video_streams = 0; + for (auto& rtp_sender : caller->pc()->GetSenders()) { + auto parameters = rtp_sender->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); // 1 stream per sender. + for (auto& encoding_parameters : parameters.encodings) { + if (encoding_parameters.ssrc == video.sender->ssrc()) { + num_video_streams++; + encoding_parameters.max_bitrate_bps = 220000; + encoding_parameters.max_framerate = 15; + } + } + rtp_sender->SetParameters(parameters); + } + ASSERT_EQ(num_video_streams, 1); // Exactly 1 video stream. + + auto get_bwe = [&] { + rtc::scoped_refptr callback( + new rtc::RefCountedObject()); + caller->pc()->GetStats(callback); + s.net()->time_controller()->Wait([&] { return callback->called(); }); + auto stats = + callback->report()->GetStatsOfType()[0]; + return DataRate::BitsPerSec(*stats->available_outgoing_bitrate); + }; + + s.ProcessMessages(TimeDelta::Seconds(15)); + const DataRate initial_bwe = get_bwe(); + EXPECT_GE(initial_bwe, DataRate::KilobitsPerSec(300)); + + // 10 seconds audio only. Bandwidth should not drop. + video.capturer->Stop(); + s.ProcessMessages(TimeDelta::Seconds(10)); + EXPECT_GE(get_bwe(), initial_bwe); + + // Resume video but stop audio. Bandwidth should not drop. + video.capturer->Start(); + RTCError status = caller->pc()->RemoveTrackNew(audio.sender); + ASSERT_TRUE(status.ok()); + audio.track->set_enabled(false); + for (int i = 0; i < 10; i++) { + s.ProcessMessages(TimeDelta::Seconds(1)); + EXPECT_GE(get_bwe(), initial_bwe); + } +} + +} // namespace test +} // namespace webrtc diff --git a/pc/data_channel.cc b/pc/sctp_data_channel.cc similarity index 54% rename from pc/data_channel.cc rename to pc/sctp_data_channel.cc index 4f871b4d50..c4357a8da6 100644 --- a/pc/data_channel.cc +++ b/pc/sctp_data_channel.cc @@ -1,5 +1,5 @@ /* - * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,33 +8,64 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "pc/data_channel.h" +#include "pc/sctp_data_channel.h" #include #include #include +#include "api/proxy.h" #include "media/sctp/sctp_transport_internal.h" #include "pc/sctp_utils.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { +namespace { + static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024; static size_t kMaxQueuedSendDataBytes = 16 * 1024 * 1024; -namespace { - static std::atomic g_unique_id{0}; int GenerateUniqueId() { return ++g_unique_id; } +// Define proxy for DataChannelInterface. +BEGIN_SIGNALING_PROXY_MAP(DataChannel) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*) +PROXY_METHOD0(void, UnregisterObserver) +BYPASS_PROXY_CONSTMETHOD0(std::string, label) +BYPASS_PROXY_CONSTMETHOD0(bool, reliable) +BYPASS_PROXY_CONSTMETHOD0(bool, ordered) +BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime) +BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits) +BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxRetransmitsOpt) +BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxPacketLifeTime) +BYPASS_PROXY_CONSTMETHOD0(std::string, protocol) +BYPASS_PROXY_CONSTMETHOD0(bool, negotiated) +// Can't bypass the proxy since the id may change. +PROXY_CONSTMETHOD0(int, id) +BYPASS_PROXY_CONSTMETHOD0(Priority, priority) +PROXY_CONSTMETHOD0(DataState, state) +PROXY_CONSTMETHOD0(RTCError, error) +PROXY_CONSTMETHOD0(uint32_t, messages_sent) +PROXY_CONSTMETHOD0(uint64_t, bytes_sent) +PROXY_CONSTMETHOD0(uint32_t, messages_received) +PROXY_CONSTMETHOD0(uint64_t, bytes_received) +PROXY_CONSTMETHOD0(uint64_t, buffered_amount) +PROXY_METHOD0(void, Close) +// TODO(bugs.webrtc.org/11547): Change to run on the network thread. +PROXY_METHOD1(bool, Send, const DataBuffer&) +END_PROXY_MAP() + } // namespace InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base) @@ -98,175 +129,167 @@ bool SctpSidAllocator::IsSidAvailable(int sid) const { return used_sids_.find(sid) == used_sids_.end(); } -bool DataChannel::PacketQueue::Empty() const { - return packets_.empty(); -} - -std::unique_ptr DataChannel::PacketQueue::PopFront() { - RTC_DCHECK(!packets_.empty()); - byte_count_ -= packets_.front()->size(); - std::unique_ptr packet = std::move(packets_.front()); - packets_.pop_front(); - return packet; -} - -void DataChannel::PacketQueue::PushFront(std::unique_ptr packet) { - byte_count_ += packet->size(); - packets_.push_front(std::move(packet)); -} - -void DataChannel::PacketQueue::PushBack(std::unique_ptr packet) { - byte_count_ += packet->size(); - packets_.push_back(std::move(packet)); -} - -void DataChannel::PacketQueue::Clear() { - packets_.clear(); - byte_count_ = 0; -} - -void DataChannel::PacketQueue::Swap(PacketQueue* other) { - size_t other_byte_count = other->byte_count_; - other->byte_count_ = byte_count_; - byte_count_ = other_byte_count; - - other->packets_.swap(packets_); -} - -rtc::scoped_refptr DataChannel::Create( - DataChannelProviderInterface* provider, - cricket::DataChannelType dct, +rtc::scoped_refptr SctpDataChannel::Create( + SctpDataChannelProviderInterface* provider, const std::string& label, - const InternalDataChannelInit& config) { - rtc::scoped_refptr channel( - new rtc::RefCountedObject(provider, dct, label)); - if (!channel->Init(config)) { - return NULL; + const InternalDataChannelInit& config, + rtc::Thread* signaling_thread, + rtc::Thread* network_thread) { + rtc::scoped_refptr channel( + new rtc::RefCountedObject( + config, provider, label, signaling_thread, network_thread)); + if (!channel->Init()) { + return nullptr; } return channel; } -bool DataChannel::IsSctpLike(cricket::DataChannelType type) { - return type == cricket::DCT_SCTP || type == cricket::DCT_MEDIA_TRANSPORT || - type == cricket::DCT_DATA_CHANNEL_TRANSPORT || - type == cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP; -} - -DataChannel::DataChannel(DataChannelProviderInterface* provider, - cricket::DataChannelType dct, - const std::string& label) - : internal_id_(GenerateUniqueId()), +// static +rtc::scoped_refptr SctpDataChannel::CreateProxy( + rtc::scoped_refptr channel) { + // TODO(bugs.webrtc.org/11547): incorporate the network thread in the proxy. + // Also, consider allowing the proxy object to own the reference (std::move). + // As is, the proxy has a raw pointer and no reference to the channel object + // and trusting that the lifetime management aligns with the + // sctp_data_channels_ array in SctpDataChannelController. + return DataChannelProxy::Create(channel->signaling_thread_, channel.get()); +} + +SctpDataChannel::SctpDataChannel(const InternalDataChannelInit& config, + SctpDataChannelProviderInterface* provider, + const std::string& label, + rtc::Thread* signaling_thread, + rtc::Thread* network_thread) + : signaling_thread_(signaling_thread), + network_thread_(network_thread), + internal_id_(GenerateUniqueId()), label_(label), + config_(config), observer_(nullptr), - state_(kConnecting), - messages_sent_(0), - bytes_sent_(0), - messages_received_(0), - bytes_received_(0), - buffered_amount_(0), - data_channel_type_(dct), - provider_(provider), - handshake_state_(kHandshakeInit), - connected_to_provider_(false), - send_ssrc_set_(false), - receive_ssrc_set_(false), - writable_(false), - send_ssrc_(0), - receive_ssrc_(0) {} - -bool DataChannel::Init(const InternalDataChannelInit& config) { - if (data_channel_type_ == cricket::DCT_RTP) { - if (config.reliable || config.id != -1 || config.maxRetransmits || - config.maxRetransmitTime) { - RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to " - "invalid DataChannelInit."; - return false; - } - handshake_state_ = kHandshakeReady; - } else if (IsSctpLike(data_channel_type_)) { - if (config.id < -1 || - (config.maxRetransmits && *config.maxRetransmits < 0) || - (config.maxRetransmitTime && *config.maxRetransmitTime < 0)) { - RTC_LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to " - "invalid DataChannelInit."; - return false; - } - if (config.maxRetransmits && config.maxRetransmitTime) { - RTC_LOG(LS_ERROR) - << "maxRetransmits and maxRetransmitTime should not be both set."; - return false; - } - config_ = config; - - switch (config_.open_handshake_role) { - case webrtc::InternalDataChannelInit::kNone: // pre-negotiated - handshake_state_ = kHandshakeReady; - break; - case webrtc::InternalDataChannelInit::kOpener: - handshake_state_ = kHandshakeShouldSendOpen; - break; - case webrtc::InternalDataChannelInit::kAcker: - handshake_state_ = kHandshakeShouldSendAck; - break; - } + provider_(provider) { + RTC_DCHECK_RUN_ON(signaling_thread_); +} - // Try to connect to the transport in case the transport channel already - // exists. - OnTransportChannelCreated(); - - // Checks if the transport is ready to send because the initial channel - // ready signal may have been sent before the DataChannel creation. - // This has to be done async because the upper layer objects (e.g. - // Chrome glue and WebKit) are not wired up properly until after this - // function returns. - if (provider_->ReadyToSendData()) { - invoker_.AsyncInvoke(RTC_FROM_HERE, rtc::Thread::Current(), - [this] { OnChannelReady(true); }); - } +bool SctpDataChannel::Init() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (config_.id < -1 || + (config_.maxRetransmits && *config_.maxRetransmits < 0) || + (config_.maxRetransmitTime && *config_.maxRetransmitTime < 0)) { + RTC_LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to " + "invalid DataChannelInit."; + return false; + } + if (config_.maxRetransmits && config_.maxRetransmitTime) { + RTC_LOG(LS_ERROR) + << "maxRetransmits and maxRetransmitTime should not be both set."; + return false; + } + + switch (config_.open_handshake_role) { + case webrtc::InternalDataChannelInit::kNone: // pre-negotiated + handshake_state_ = kHandshakeReady; + break; + case webrtc::InternalDataChannelInit::kOpener: + handshake_state_ = kHandshakeShouldSendOpen; + break; + case webrtc::InternalDataChannelInit::kAcker: + handshake_state_ = kHandshakeShouldSendAck; + break; + } + + // Try to connect to the transport in case the transport channel already + // exists. + OnTransportChannelCreated(); + + // Checks if the transport is ready to send because the initial channel + // ready signal may have been sent before the DataChannel creation. + // This has to be done async because the upper layer objects (e.g. + // Chrome glue and WebKit) are not wired up properly until after this + // function returns. + if (provider_->ReadyToSendData()) { + AddRef(); + rtc::Thread::Current()->PostTask(ToQueuedTask( + [this] { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (state_ != kClosed) + OnTransportReady(true); + }, + [this] { Release(); })); } return true; } -DataChannel::~DataChannel() {} +SctpDataChannel::~SctpDataChannel() { + RTC_DCHECK_RUN_ON(signaling_thread_); +} -void DataChannel::RegisterObserver(DataChannelObserver* observer) { +void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) { + RTC_DCHECK_RUN_ON(signaling_thread_); observer_ = observer; DeliverQueuedReceivedData(); } -void DataChannel::UnregisterObserver() { - observer_ = NULL; +void SctpDataChannel::UnregisterObserver() { + RTC_DCHECK_RUN_ON(signaling_thread_); + observer_ = nullptr; } -bool DataChannel::reliable() const { - if (data_channel_type_ == cricket::DCT_RTP) { - return false; - } else { - return !config_.maxRetransmits && !config_.maxRetransmitTime; - } +bool SctpDataChannel::reliable() const { + // May be called on any thread. + return !config_.maxRetransmits && !config_.maxRetransmitTime; } -uint64_t DataChannel::buffered_amount() const { +uint64_t SctpDataChannel::buffered_amount() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return buffered_amount_; } -void DataChannel::Close() { +void SctpDataChannel::Close() { + RTC_DCHECK_RUN_ON(signaling_thread_); if (state_ == kClosed) return; - send_ssrc_ = 0; - send_ssrc_set_ = false; SetState(kClosing); // Will send queued data before beginning the underlying closing procedure. UpdateState(); } -RTCError DataChannel::error() const { +SctpDataChannel::DataState SctpDataChannel::state() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return state_; +} + +RTCError SctpDataChannel::error() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return error_; } -bool DataChannel::Send(const DataBuffer& buffer) { - buffered_amount_ += buffer.size(); +uint32_t SctpDataChannel::messages_sent() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return messages_sent_; +} + +uint64_t SctpDataChannel::bytes_sent() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return bytes_sent_; +} + +uint32_t SctpDataChannel::messages_received() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return messages_received_; +} + +uint64_t SctpDataChannel::bytes_received() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + return bytes_received_; +} + +bool SctpDataChannel::Send(const DataBuffer& buffer) { + RTC_DCHECK_RUN_ON(signaling_thread_); + // TODO(bugs.webrtc.org/11547): Expect this method to be called on the network + // thread. Bring buffer management etc to the network thread and keep the + // operational state management on the signaling thread. + if (state_ != kOpen) { return false; } @@ -278,12 +301,11 @@ bool DataChannel::Send(const DataBuffer& buffer) { return true; } + buffered_amount_ += buffer.size(); + // If the queue is non-empty, we're waiting for SignalReadyToSend, // so just add to the end of the queue and keep waiting. if (!queued_send_data_.Empty()) { - // Only SCTP DataChannel queues the outgoing data when the transport is - // blocked. - RTC_DCHECK(IsSctpLike(data_channel_type_)); if (!QueueSendDataMessage(buffer)) { RTC_LOG(LS_ERROR) << "Closing the DataChannel due to a failure to queue " "additional data."; @@ -295,41 +317,30 @@ bool DataChannel::Send(const DataBuffer& buffer) { return true; } - bool success = SendDataMessage(buffer, true); - if (data_channel_type_ == cricket::DCT_RTP) { - return success; - } + SendDataMessage(buffer, true); // Always return true for SCTP DataChannel per the spec. return true; } -void DataChannel::SetReceiveSsrc(uint32_t receive_ssrc) { - RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP); - - if (receive_ssrc_set_) { - return; - } - receive_ssrc_ = receive_ssrc; - receive_ssrc_set_ = true; - UpdateState(); -} - -void DataChannel::SetSctpSid(int sid) { +void SctpDataChannel::SetSctpSid(int sid) { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK_LT(config_.id, 0); RTC_DCHECK_GE(sid, 0); - RTC_DCHECK(IsSctpLike(data_channel_type_)); + RTC_DCHECK_NE(handshake_state_, kHandshakeWaitingForAck); + RTC_DCHECK_EQ(state_, kConnecting); + if (config_.id == sid) { return; } - config_.id = sid; + const_cast(config_).id = sid; provider_->AddSctpDataStream(sid); } -void DataChannel::OnClosingProcedureStartedRemotely(int sid) { - if (IsSctpLike(data_channel_type_) && sid == config_.id && - state_ != kClosing && state_ != kClosed) { +void SctpDataChannel::OnClosingProcedureStartedRemotely(int sid) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (sid == config_.id && state_ != kClosing && state_ != kClosed) { // Don't bother sending queued data since the side that initiated the // closure wouldn't receive it anyway. See crbug.com/559394 for a lengthy // discussion about this. @@ -343,8 +354,9 @@ void DataChannel::OnClosingProcedureStartedRemotely(int sid) { } } -void DataChannel::OnClosingProcedureComplete(int sid) { - if (IsSctpLike(data_channel_type_) && sid == config_.id) { +void SctpDataChannel::OnClosingProcedureComplete(int sid) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (sid == config_.id) { // If the closing procedure is complete, we should have finished sending // all pending data and transitioned to kClosing already. RTC_DCHECK_EQ(state_, kClosing); @@ -354,8 +366,8 @@ void DataChannel::OnClosingProcedureComplete(int sid) { } } -void DataChannel::OnTransportChannelCreated() { - RTC_DCHECK(IsSctpLike(data_channel_type_)); +void SctpDataChannel::OnTransportChannelCreated() { + RTC_DCHECK_RUN_ON(signaling_thread_); if (!connected_to_provider_) { connected_to_provider_ = provider_->ConnectDataChannel(this); } @@ -366,44 +378,32 @@ void DataChannel::OnTransportChannelCreated() { } } -void DataChannel::OnTransportChannelClosed() { +void SctpDataChannel::OnTransportChannelClosed() { // The SctpTransport is unusable (for example, because the SCTP m= section // was rejected, or because the DTLS transport closed), so we need to close // abruptly. - // Note: this needs to differentiate between normal close and error close. - // https://w3c.github.io/webrtc-pc/#announcing-a-data-channel-as-closed - CloseAbruptlyWithError( - RTCError(RTCErrorType::NETWORK_ERROR, "Transport channel closed")); + RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA, + "Transport channel closed"); + error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE); + CloseAbruptlyWithError(std::move(error)); } -// The remote peer request that this channel shall be closed. -void DataChannel::RemotePeerRequestClose() { - RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP); - // Close with error code explicitly set to OK. - CloseAbruptlyWithError(RTCError()); +DataChannelStats SctpDataChannel::GetStats() const { + RTC_DCHECK_RUN_ON(signaling_thread_); + DataChannelStats stats{internal_id_, id(), label(), + protocol(), state(), messages_sent(), + messages_received(), bytes_sent(), bytes_received()}; + return stats; } -void DataChannel::SetSendSsrc(uint32_t send_ssrc) { - RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP); - if (send_ssrc_set_) { - return; - } - send_ssrc_ = send_ssrc; - send_ssrc_set_ = true; - UpdateState(); -} - -void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& payload) { - if (data_channel_type_ == cricket::DCT_RTP && params.ssrc != receive_ssrc_) { - return; - } - if (IsSctpLike(data_channel_type_) && params.sid != config_.id) { +void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, + const rtc::CopyOnWriteBuffer& payload) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (params.sid != config_.id) { return; } if (params.type == cricket::DMT_CONTROL) { - RTC_DCHECK(IsSctpLike(data_channel_type_)); if (handshake_state_ != kHandshakeWaitingForAck) { // Ignore it if we are not expecting an ACK message. RTC_LOG(LS_WARNING) @@ -448,11 +448,9 @@ void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size."; queued_received_data_.Clear(); - if (data_channel_type_ != cricket::DCT_RTP) { - CloseAbruptlyWithError( - RTCError(RTCErrorType::RESOURCE_EXHAUSTED, - "Queued received data exceeds the max buffer size.")); - } + CloseAbruptlyWithError( + RTCError(RTCErrorType::RESOURCE_EXHAUSTED, + "Queued received data exceeds the max buffer size.")); return; } @@ -460,7 +458,9 @@ void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, } } -void DataChannel::OnChannelReady(bool writable) { +void SctpDataChannel::OnTransportReady(bool writable) { + RTC_DCHECK_RUN_ON(signaling_thread_); + writable_ = writable; if (!writable) { return; @@ -468,10 +468,13 @@ void DataChannel::OnChannelReady(bool writable) { SendQueuedControlMessages(); SendQueuedDataMessages(); + UpdateState(); } -void DataChannel::CloseAbruptlyWithError(RTCError error) { +void SctpDataChannel::CloseAbruptlyWithError(RTCError error) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (state_ == kClosed) { return; } @@ -481,8 +484,9 @@ void DataChannel::CloseAbruptlyWithError(RTCError error) { } // Closing abruptly means any queued data gets thrown away. - queued_send_data_.Clear(); buffered_amount_ = 0; + + queued_send_data_.Clear(); queued_control_data_.Clear(); // Still go to "kClosing" before "kClosed", since observers may be expecting @@ -492,41 +496,38 @@ void DataChannel::CloseAbruptlyWithError(RTCError error) { SetState(kClosed); } -void DataChannel::CloseAbruptlyWithDataChannelFailure( +void SctpDataChannel::CloseAbruptlyWithDataChannelFailure( const std::string& message) { RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, message); error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE); CloseAbruptlyWithError(std::move(error)); } -void DataChannel::UpdateState() { - // UpdateState determines what to do from a few state variables. Include +void SctpDataChannel::UpdateState() { + RTC_DCHECK_RUN_ON(signaling_thread_); + // UpdateState determines what to do from a few state variables. Include // all conditions required for each state transition here for - // clarity. OnChannelReady(true) will send any queued data and then invoke + // clarity. OnTransportReady(true) will send any queued data and then invoke // UpdateState(). + switch (state_) { case kConnecting: { - if (send_ssrc_set_ == receive_ssrc_set_) { - if (data_channel_type_ == cricket::DCT_RTP && !connected_to_provider_) { - connected_to_provider_ = provider_->ConnectDataChannel(this); + if (connected_to_provider_) { + if (handshake_state_ == kHandshakeShouldSendOpen) { + rtc::CopyOnWriteBuffer payload; + WriteDataChannelOpenMessage(label_, config_, &payload); + SendControlMessage(payload); + } else if (handshake_state_ == kHandshakeShouldSendAck) { + rtc::CopyOnWriteBuffer payload; + WriteDataChannelOpenAckMessage(&payload); + SendControlMessage(payload); } - if (connected_to_provider_) { - if (handshake_state_ == kHandshakeShouldSendOpen) { - rtc::CopyOnWriteBuffer payload; - WriteDataChannelOpenMessage(label_, config_, &payload); - SendControlMessage(payload); - } else if (handshake_state_ == kHandshakeShouldSendAck) { - rtc::CopyOnWriteBuffer payload; - WriteDataChannelOpenAckMessage(&payload); - SendControlMessage(payload); - } - if (writable_ && (handshake_state_ == kHandshakeReady || - handshake_state_ == kHandshakeWaitingForAck)) { - SetState(kOpen); - // If we have received buffers before the channel got writable. - // Deliver them now. - DeliverQueuedReceivedData(); - } + if (writable_ && (handshake_state_ == kHandshakeReady || + handshake_state_ == kHandshakeWaitingForAck)) { + SetState(kOpen); + // If we have received buffers before the channel got writable. + // Deliver them now. + DeliverQueuedReceivedData(); } } break; @@ -538,25 +539,14 @@ void DataChannel::UpdateState() { // Wait for all queued data to be sent before beginning the closing // procedure. if (queued_send_data_.Empty() && queued_control_data_.Empty()) { - if (data_channel_type_ == cricket::DCT_RTP) { - // For RTP data channels, we can go to "closed" after we finish - // sending data and the send/recv SSRCs are unset. - if (connected_to_provider_) { - DisconnectFromProvider(); - } - if (!send_ssrc_set_ && !receive_ssrc_set_) { - SetState(kClosed); - } - } else { - // For SCTP data channels, we need to wait for the closing procedure - // to complete; after calling RemoveSctpDataStream, - // OnClosingProcedureComplete will end up called asynchronously - // afterwards. - if (connected_to_provider_ && !started_closing_procedure_ && - config_.id >= 0) { - started_closing_procedure_ = true; - provider_->RemoveSctpDataStream(config_.id); - } + // For SCTP data channels, we need to wait for the closing procedure + // to complete; after calling RemoveSctpDataStream, + // OnClosingProcedureComplete will end up called asynchronously + // afterwards. + if (connected_to_provider_ && !started_closing_procedure_ && + config_.id >= 0) { + started_closing_procedure_ = true; + provider_->RemoveSctpDataStream(config_.id); } } break; @@ -566,7 +556,8 @@ void DataChannel::UpdateState() { } } -void DataChannel::SetState(DataState state) { +void SctpDataChannel::SetState(DataState state) { + RTC_DCHECK_RUN_ON(signaling_thread_); if (state_ == state) { return; } @@ -582,7 +573,8 @@ void DataChannel::SetState(DataState state) { } } -void DataChannel::DisconnectFromProvider() { +void SctpDataChannel::DisconnectFromProvider() { + RTC_DCHECK_RUN_ON(signaling_thread_); if (!connected_to_provider_) return; @@ -590,7 +582,8 @@ void DataChannel::DisconnectFromProvider() { connected_to_provider_ = false; } -void DataChannel::DeliverQueuedReceivedData() { +void SctpDataChannel::DeliverQueuedReceivedData() { + RTC_DCHECK_RUN_ON(signaling_thread_); if (!observer_) { return; } @@ -603,7 +596,8 @@ void DataChannel::DeliverQueuedReceivedData() { } } -void DataChannel::SendQueuedDataMessages() { +void SctpDataChannel::SendQueuedDataMessages() { + RTC_DCHECK_RUN_ON(signaling_thread_); if (queued_send_data_.Empty()) { return; } @@ -620,28 +614,25 @@ void DataChannel::SendQueuedDataMessages() { } } -bool DataChannel::SendDataMessage(const DataBuffer& buffer, - bool queue_if_blocked) { +bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer, + bool queue_if_blocked) { + RTC_DCHECK_RUN_ON(signaling_thread_); cricket::SendDataParams send_params; - if (IsSctpLike(data_channel_type_)) { - send_params.ordered = config_.ordered; - // Send as ordered if it is still going through OPEN/ACK signaling. - if (handshake_state_ != kHandshakeReady && !config_.ordered) { - send_params.ordered = true; - RTC_LOG(LS_VERBOSE) - << "Sending data as ordered for unordered DataChannel " - "because the OPEN_ACK message has not been received."; - } - - send_params.max_rtx_count = - config_.maxRetransmits ? *config_.maxRetransmits : -1; - send_params.max_rtx_ms = - config_.maxRetransmitTime ? *config_.maxRetransmitTime : -1; - send_params.sid = config_.id; - } else { - send_params.ssrc = send_ssrc_; + send_params.ordered = config_.ordered; + // Send as ordered if it is still going through OPEN/ACK signaling. + if (handshake_state_ != kHandshakeReady && !config_.ordered) { + send_params.ordered = true; + RTC_LOG(LS_VERBOSE) + << "Sending data as ordered for unordered DataChannel " + "because the OPEN_ACK message has not been received."; } + + send_params.max_rtx_count = + config_.maxRetransmits ? *config_.maxRetransmits : -1; + send_params.max_rtx_ms = + config_.maxRetransmitTime ? *config_.maxRetransmitTime : -1; + send_params.sid = config_.id; send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT; cricket::SendDataResult send_result = cricket::SDR_SUCCESS; @@ -659,10 +650,6 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer, return true; } - if (!IsSctpLike(data_channel_type_)) { - return false; - } - if (send_result == cricket::SDR_BLOCK) { if (!queue_if_blocked || QueueSendDataMessage(buffer)) { return false; @@ -679,7 +666,8 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer, return false; } -bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) { +bool SctpDataChannel::QueueSendDataMessage(const DataBuffer& buffer) { + RTC_DCHECK_RUN_ON(signaling_thread_); size_t start_buffered_amount = queued_send_data_.byte_count(); if (start_buffered_amount + buffer.size() > kMaxQueuedSendDataBytes) { RTC_LOG(LS_ERROR) << "Can't buffer any more data for the data channel."; @@ -689,7 +677,8 @@ bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) { return true; } -void DataChannel::SendQueuedControlMessages() { +void SctpDataChannel::SendQueuedControlMessages() { + RTC_DCHECK_RUN_ON(signaling_thread_); PacketQueue control_packets; control_packets.Swap(&queued_control_data_); @@ -699,16 +688,18 @@ void DataChannel::SendQueuedControlMessages() { } } -void DataChannel::QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer) { +void SctpDataChannel::QueueControlMessage( + const rtc::CopyOnWriteBuffer& buffer) { + RTC_DCHECK_RUN_ON(signaling_thread_); queued_control_data_.PushBack(std::make_unique(buffer, true)); } -bool DataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { - bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen; - - RTC_DCHECK(IsSctpLike(data_channel_type_)); +bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(writable_); RTC_DCHECK_GE(config_.id, 0); + + bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen; RTC_DCHECK(!is_open_message || !config_.negotiated); cricket::SendDataParams send_params; @@ -742,7 +733,7 @@ bool DataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { } // static -void DataChannel::ResetInternalIdAllocatorForTesting(int new_value) { +void SctpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) { g_unique_id = new_value; } diff --git a/pc/sctp_data_channel.h b/pc/sctp_data_channel.h new file mode 100644 index 0000000000..6d121e6f80 --- /dev/null +++ b/pc/sctp_data_channel.h @@ -0,0 +1,283 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_SCTP_DATA_CHANNEL_H_ +#define PC_SCTP_DATA_CHANNEL_H_ + +#include +#include +#include + +#include "api/data_channel_interface.h" +#include "api/priority.h" +#include "api/scoped_refptr.h" +#include "api/transport/data_channel_transport_interface.h" +#include "media/base/media_channel.h" +#include "pc/data_channel_utils.h" +#include "rtc_base/ssl_stream_adapter.h" // For SSLRole +#include "rtc_base/third_party/sigslot/sigslot.h" + +namespace webrtc { + +class SctpDataChannel; + +// TODO(deadbeef): Get rid of this and have SctpDataChannel depend on +// SctpTransportInternal (pure virtual SctpTransport interface) instead. +class SctpDataChannelProviderInterface { + public: + // Sends the data to the transport. + virtual bool SendData(const cricket::SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + cricket::SendDataResult* result) = 0; + // Connects to the transport signals. + virtual bool ConnectDataChannel(SctpDataChannel* data_channel) = 0; + // Disconnects from the transport signals. + virtual void DisconnectDataChannel(SctpDataChannel* data_channel) = 0; + // Adds the data channel SID to the transport for SCTP. + virtual void AddSctpDataStream(int sid) = 0; + // Begins the closing procedure by sending an outgoing stream reset. Still + // need to wait for callbacks to tell when this completes. + virtual void RemoveSctpDataStream(int sid) = 0; + // Returns true if the transport channel is ready to send data. + virtual bool ReadyToSendData() const = 0; + + protected: + virtual ~SctpDataChannelProviderInterface() {} +}; + +// TODO(tommi): Change to not inherit from DataChannelInit but to have it as +// a const member. Block access to the 'id' member since it cannot be const. +struct InternalDataChannelInit : public DataChannelInit { + enum OpenHandshakeRole { kOpener, kAcker, kNone }; + // The default role is kOpener because the default |negotiated| is false. + InternalDataChannelInit() : open_handshake_role(kOpener) {} + explicit InternalDataChannelInit(const DataChannelInit& base); + OpenHandshakeRole open_handshake_role; +}; + +// Helper class to allocate unique IDs for SCTP DataChannels. +class SctpSidAllocator { + public: + // Gets the first unused odd/even id based on the DTLS role. If |role| is + // SSL_CLIENT, the allocated id starts from 0 and takes even numbers; + // otherwise, the id starts from 1 and takes odd numbers. + // Returns false if no ID can be allocated. + bool AllocateSid(rtc::SSLRole role, int* sid); + + // Attempts to reserve a specific sid. Returns false if it's unavailable. + bool ReserveSid(int sid); + + // Indicates that |sid| isn't in use any more, and is thus available again. + void ReleaseSid(int sid); + + private: + // Checks if |sid| is available to be assigned to a new SCTP data channel. + bool IsSidAvailable(int sid) const; + + std::set used_sids_; +}; + +// SctpDataChannel is an implementation of the DataChannelInterface based on +// SctpTransport. It provides an implementation of unreliable or +// reliabledata channels. + +// DataChannel states: +// kConnecting: The channel has been created the transport might not yet be +// ready. +// kOpen: The open handshake has been performed (if relevant) and the data +// channel is able to send messages. +// kClosing: DataChannelInterface::Close has been called, or the remote side +// initiated the closing procedure, but the closing procedure has not +// yet finished. +// kClosed: The closing handshake is finished (possibly initiated from this, +// side, possibly from the peer). +// +// How the closing procedure works for SCTP: +// 1. Alice calls Close(), state changes to kClosing. +// 2. Alice finishes sending any queued data. +// 3. Alice calls RemoveSctpDataStream, sends outgoing stream reset. +// 4. Bob receives incoming stream reset; OnClosingProcedureStartedRemotely +// called. +// 5. Bob sends outgoing stream reset. +// 6. Alice receives incoming reset, Bob receives acknowledgement. Both receive +// OnClosingProcedureComplete callback and transition to kClosed. +class SctpDataChannel : public DataChannelInterface, + public sigslot::has_slots<> { + public: + static rtc::scoped_refptr Create( + SctpDataChannelProviderInterface* provider, + const std::string& label, + const InternalDataChannelInit& config, + rtc::Thread* signaling_thread, + rtc::Thread* network_thread); + + // Instantiates an API proxy for a SctpDataChannel instance that will be + // handed out to external callers. + static rtc::scoped_refptr CreateProxy( + rtc::scoped_refptr channel); + + void RegisterObserver(DataChannelObserver* observer) override; + void UnregisterObserver() override; + + std::string label() const override { return label_; } + bool reliable() const override; + bool ordered() const override { return config_.ordered; } + // Backwards compatible accessors + uint16_t maxRetransmitTime() const override { + return config_.maxRetransmitTime ? *config_.maxRetransmitTime + : static_cast(-1); + } + uint16_t maxRetransmits() const override { + return config_.maxRetransmits ? *config_.maxRetransmits + : static_cast(-1); + } + absl::optional maxPacketLifeTime() const override { + return config_.maxRetransmitTime; + } + absl::optional maxRetransmitsOpt() const override { + return config_.maxRetransmits; + } + std::string protocol() const override { return config_.protocol; } + bool negotiated() const override { return config_.negotiated; } + int id() const override { return config_.id; } + Priority priority() const override { + return config_.priority ? *config_.priority : Priority::kLow; + } + + virtual int internal_id() const { return internal_id_; } + + uint64_t buffered_amount() const override; + void Close() override; + DataState state() const override; + RTCError error() const override; + uint32_t messages_sent() const override; + uint64_t bytes_sent() const override; + uint32_t messages_received() const override; + uint64_t bytes_received() const override; + bool Send(const DataBuffer& buffer) override; + + // Close immediately, ignoring any queued data or closing procedure. + // This is called when the underlying SctpTransport is being destroyed. + // It is also called by the PeerConnection if SCTP ID assignment fails. + void CloseAbruptlyWithError(RTCError error); + // Specializations of CloseAbruptlyWithError + void CloseAbruptlyWithDataChannelFailure(const std::string& message); + void CloseAbruptlyWithSctpCauseCode(const std::string& message, + uint16_t cause_code); + + // Slots for provider to connect signals to. + // + // TODO(deadbeef): Make these private once we're hooking up signals ourselves, + // instead of relying on SctpDataChannelProviderInterface. + + // Called when the SctpTransport's ready to use. That can happen when we've + // finished negotiation, or if the channel was created after negotiation has + // already finished. + void OnTransportReady(bool writable); + + void OnDataReceived(const cricket::ReceiveDataParams& params, + const rtc::CopyOnWriteBuffer& payload); + + // Sets the SCTP sid and adds to transport layer if not set yet. Should only + // be called once. + void SetSctpSid(int sid); + // The remote side started the closing procedure by resetting its outgoing + // stream (our incoming stream). Sets state to kClosing. + void OnClosingProcedureStartedRemotely(int sid); + // The closing procedure is complete; both incoming and outgoing stream + // resets are done and the channel can transition to kClosed. Called + // asynchronously after RemoveSctpDataStream. + void OnClosingProcedureComplete(int sid); + // Called when the transport channel is created. + // Only needs to be called for SCTP data channels. + void OnTransportChannelCreated(); + // Called when the transport channel is unusable. + // This method makes sure the DataChannel is disconnected and changes state + // to kClosed. + void OnTransportChannelClosed(); + + DataChannelStats GetStats() const; + + // Emitted when state transitions to kOpen. + sigslot::signal1 SignalOpened; + // Emitted when state transitions to kClosed. + // This signal can be used to tell when the channel's sid is free. + sigslot::signal1 SignalClosed; + + // Reset the allocator for internal ID values for testing, so that + // the internal IDs generated are predictable. Test only. + static void ResetInternalIdAllocatorForTesting(int new_value); + + protected: + SctpDataChannel(const InternalDataChannelInit& config, + SctpDataChannelProviderInterface* client, + const std::string& label, + rtc::Thread* signaling_thread, + rtc::Thread* network_thread); + ~SctpDataChannel() override; + + private: + // The OPEN(_ACK) signaling state. + enum HandshakeState { + kHandshakeInit, + kHandshakeShouldSendOpen, + kHandshakeShouldSendAck, + kHandshakeWaitingForAck, + kHandshakeReady + }; + + bool Init(); + void UpdateState(); + void SetState(DataState state); + void DisconnectFromProvider(); + + void DeliverQueuedReceivedData(); + + void SendQueuedDataMessages(); + bool SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked); + bool QueueSendDataMessage(const DataBuffer& buffer); + + void SendQueuedControlMessages(); + void QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer); + bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer); + + rtc::Thread* const signaling_thread_; + rtc::Thread* const network_thread_; + const int internal_id_; + const std::string label_; + const InternalDataChannelInit config_; + DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_) = nullptr; + DataState state_ RTC_GUARDED_BY(signaling_thread_) = kConnecting; + RTCError error_ RTC_GUARDED_BY(signaling_thread_); + uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0; + uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0; + // Number of bytes of data that have been queued using Send(). Increased + // before each transport send and decreased after each successful send. + uint64_t buffered_amount_ RTC_GUARDED_BY(signaling_thread_) = 0; + SctpDataChannelProviderInterface* const provider_ + RTC_GUARDED_BY(signaling_thread_); + HandshakeState handshake_state_ RTC_GUARDED_BY(signaling_thread_) = + kHandshakeInit; + bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false; + bool writable_ RTC_GUARDED_BY(signaling_thread_) = false; + // Did we already start the graceful SCTP closing procedure? + bool started_closing_procedure_ RTC_GUARDED_BY(signaling_thread_) = false; + // Control messages that always have to get sent out before any queued + // data. + PacketQueue queued_control_data_ RTC_GUARDED_BY(signaling_thread_); + PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_); + PacketQueue queued_send_data_ RTC_GUARDED_BY(signaling_thread_); +}; + +} // namespace webrtc + +#endif // PC_SCTP_DATA_CHANNEL_H_ diff --git a/pc/sctp_transport.cc b/pc/sctp_transport.cc index 532e91c67d..ea1165f94a 100644 --- a/pc/sctp_transport.cc +++ b/pc/sctp_transport.cc @@ -39,7 +39,7 @@ SctpTransport::~SctpTransport() { } SctpTransportInformation SctpTransport::Information() const { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); return info_; } @@ -66,7 +66,7 @@ void SctpTransport::Clear() { RTC_DCHECK_RUN_ON(owner_thread_); RTC_DCHECK(internal()); { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); // Note that we delete internal_sctp_transport_, but // only drop the reference to dtls_transport_. dtls_transport_ = nullptr; @@ -80,7 +80,7 @@ void SctpTransport::SetDtlsTransport( RTC_DCHECK_RUN_ON(owner_thread_); SctpTransportState next_state; { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); next_state = info_.state(); dtls_transport_ = transport; if (internal_sctp_transport_) { @@ -103,7 +103,7 @@ void SctpTransport::Start(int local_port, int remote_port, int max_message_size) { { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); // Record max message size on calling thread. info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(), max_message_size, info_.MaxChannels()); @@ -125,7 +125,7 @@ void SctpTransport::UpdateInformation(SctpTransportState state) { bool must_send_update; SctpTransportInformation info_copy(SctpTransportState::kNew); { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); must_send_update = (state != info_.state()); // TODO(https://bugs.webrtc.org/10358): Update max channels from internal // SCTP transport when available. @@ -149,7 +149,7 @@ void SctpTransport::UpdateInformation(SctpTransportState state) { void SctpTransport::OnAssociationChangeCommunicationUp() { RTC_DCHECK_RUN_ON(owner_thread_); { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); RTC_DCHECK(internal_sctp_transport_); if (internal_sctp_transport_->max_outbound_streams() && internal_sctp_transport_->max_inbound_streams()) { diff --git a/pc/sctp_transport.h b/pc/sctp_transport.h index a13a58c68e..a902ff02e8 100644 --- a/pc/sctp_transport.h +++ b/pc/sctp_transport.h @@ -17,6 +17,7 @@ #include "api/sctp_transport_interface.h" #include "media/sctp/sctp_transport.h" #include "pc/dtls_transport.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -47,12 +48,12 @@ class SctpTransport : public SctpTransportInterface, // internal() to be functions on the webrtc::SctpTransport interface, // and make the internal() function private. cricket::SctpTransportInternal* internal() { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); return internal_sctp_transport_.get(); } const cricket::SctpTransportInternal* internal() const { - rtc::CritScope scope(&lock_); + MutexLock lock(&lock_); return internal_sctp_transport_.get(); } @@ -71,7 +72,7 @@ class SctpTransport : public SctpTransportInterface, // Note - owner_thread never changes, but can't be const if we do // Invoke() on it. rtc::Thread* owner_thread_; - rtc::CriticalSection lock_; + mutable Mutex lock_; // Variables accessible off-thread, guarded by lock_ SctpTransportInformation info_ RTC_GUARDED_BY(lock_); std::unique_ptr internal_sctp_transport_ diff --git a/pc/sctp_utils.cc b/pc/sctp_utils.cc index 9cdff0e4da..1882a1525f 100644 --- a/pc/sctp_utils.cc +++ b/pc/sctp_utils.cc @@ -13,6 +13,7 @@ #include #include +#include "api/priority.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" @@ -34,6 +35,15 @@ enum DataChannelOpenMessageChannelType { DCOMCT_UNORDERED_PARTIAL_TIME = 0x82, }; +// Values of priority in the DC open protocol message. +// These are compared against an integer, so are enum, not enum class. +enum DataChannelPriority { + DCO_PRIORITY_VERY_LOW = 128, + DCO_PRIORITY_LOW = 256, + DCO_PRIORITY_MEDIUM = 512, + DCO_PRIORITY_HIGH = 1024, +}; + bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload) { // Format defined at // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04 @@ -76,6 +86,18 @@ bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload, << "Could not read OPEN message reliabilility prioirty."; return false; } + // Parse priority as defined in + // https://w3c.github.io/webrtc-priority/#rtcdatachannel-processing-steps + if (priority <= DCO_PRIORITY_VERY_LOW) { + config->priority = Priority::kVeryLow; + } else if (priority <= DCO_PRIORITY_LOW) { + config->priority = Priority::kLow; + } else if (priority <= DCO_PRIORITY_MEDIUM) { + config->priority = Priority::kMedium; + } else { + config->priority = Priority::kHigh; + } + uint32_t reliability_param; if (!buffer.ReadUInt32(&reliability_param)) { RTC_LOG(LS_WARNING) << "Could not read OPEN message reliabilility param."; @@ -146,6 +168,24 @@ bool WriteDataChannelOpenMessage(const std::string& label, uint8_t channel_type = 0; uint32_t reliability_param = 0; uint16_t priority = 0; + // Set priority according to + // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-12#section-6.4 + if (config.priority) { + switch (*config.priority) { + case Priority::kVeryLow: + priority = DCO_PRIORITY_VERY_LOW; + break; + case Priority::kLow: + priority = DCO_PRIORITY_LOW; + break; + case Priority::kMedium: + priority = DCO_PRIORITY_MEDIUM; + break; + case Priority::kHigh: + priority = DCO_PRIORITY_HIGH; + break; + } + } if (config.ordered) { if (config.maxRetransmits) { channel_type = DCOMCT_ORDERED_PARTIAL_RTXS; diff --git a/pc/sctp_utils_unittest.cc b/pc/sctp_utils_unittest.cc index 70c627714d..690a9dc523 100644 --- a/pc/sctp_utils_unittest.cc +++ b/pc/sctp_utils_unittest.cc @@ -45,6 +45,13 @@ class SctpUtilsTest : public ::testing::Test { } ASSERT_TRUE(buffer.ReadUInt16(&priority)); + if (config.priority) { + // Exact values are checked by round-trip conversion, but + // all values defined are greater than zero. + EXPECT_GT(priority, 0); + } else { + EXPECT_EQ(priority, 0); + } ASSERT_TRUE(buffer.ReadUInt32(&reliability)); if (config.maxRetransmits || config.maxRetransmitTime) { @@ -136,6 +143,27 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) { EXPECT_FALSE(output_config.maxRetransmitTime); } +TEST_F(SctpUtilsTest, WriteParseOpenMessageWithPriority) { + webrtc::DataChannelInit config; + std::string label = "abc"; + config.protocol = "y"; + config.priority = webrtc::Priority::kVeryLow; + + rtc::CopyOnWriteBuffer packet; + ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet)); + + VerifyOpenMessageFormat(packet, label, config); + + std::string output_label; + webrtc::DataChannelInit output_config; + ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label, + &output_config)); + + EXPECT_EQ(label, output_label); + ASSERT_TRUE(output_config.priority); + EXPECT_EQ(*config.priority, *output_config.priority); +} + TEST_F(SctpUtilsTest, WriteParseAckMessage) { rtc::CopyOnWriteBuffer packet; webrtc::WriteDataChannelOpenAckMessage(&packet); diff --git a/pc/sdp_offer_answer.cc b/pc/sdp_offer_answer.cc new file mode 100644 index 0000000000..bd37a02d66 --- /dev/null +++ b/pc/sdp_offer_answer.cc @@ -0,0 +1,5029 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/sdp_offer_answer.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/media_stream_proxy.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/uma_metrics.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "media/base/codec.h" +#include "media/base/media_engine.h" +#include "media/base/rid_description.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/port.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_description_factory.h" +#include "p2p/base/transport_info.h" +#include "pc/connection_context.h" +#include "pc/data_channel_utils.h" +#include "pc/media_protocol_names.h" +#include "pc/media_stream.h" +#include "pc/peer_connection.h" +#include "pc/peer_connection_message_handler.h" +#include "pc/rtp_data_channel.h" +#include "pc/rtp_media_utils.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_transport.h" +#include "pc/simulcast_description.h" +#include "pc/stats_collector.h" +#include "pc/usage_pattern.h" +#include "pc/webrtc_session_description_factory.h" +#include "rtc_base/bind.h" +#include "rtc_base/helpers.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/metrics.h" + +using cricket::ContentInfo; +using cricket::ContentInfos; +using cricket::MediaContentDescription; +using cricket::MediaProtocolType; +using cricket::RidDescription; +using cricket::RidDirection; +using cricket::SessionDescription; +using cricket::SimulcastDescription; +using cricket::SimulcastLayer; +using cricket::SimulcastLayerList; +using cricket::StreamParams; +using cricket::TransportInfo; + +using cricket::LOCAL_PORT_TYPE; +using cricket::PRFLX_PORT_TYPE; +using cricket::RELAY_PORT_TYPE; +using cricket::STUN_PORT_TYPE; + +namespace webrtc { + +namespace { + +typedef webrtc::PeerConnectionInterface::RTCOfferAnswerOptions + RTCOfferAnswerOptions; + +// Error messages +const char kInvalidSdp[] = "Invalid session description."; +const char kInvalidCandidates[] = "Description contains invalid candidates."; +const char kBundleWithoutRtcpMux[] = + "rtcp-mux must be enabled when BUNDLE " + "is enabled."; +const char kMlineMismatchInAnswer[] = + "The order of m-lines in answer doesn't match order in offer. Rejecting " + "answer."; +const char kMlineMismatchInSubsequentOffer[] = + "The order of m-lines in subsequent offer doesn't match order from " + "previous offer/answer."; +const char kSdpWithoutIceUfragPwd[] = + "Called with SDP without ice-ufrag and ice-pwd."; +const char kSdpWithoutDtlsFingerprint[] = + "Called with SDP without DTLS fingerprint."; +const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto."; + +const char kSessionError[] = "Session error code: "; +const char kSessionErrorDesc[] = "Session error description: "; + +// UMA metric names. +const char kSimulcastVersionApplyLocalDescription[] = + "WebRTC.PeerConnection.Simulcast.ApplyLocalDescription"; +const char kSimulcastVersionApplyRemoteDescription[] = + "WebRTC.PeerConnection.Simulcast.ApplyRemoteDescription"; +const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; + +// The length of RTCP CNAMEs. +static const int kRtcpCnameLength = 16; + +const char kDefaultStreamId[] = "default"; +// NOTE: Duplicated in peer_connection.cc: +static const char kDefaultAudioSenderId[] = "defaulta0"; +static const char kDefaultVideoSenderId[] = "defaultv0"; + +void NoteAddIceCandidateResult(int result) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.AddIceCandidate", result, + kAddIceCandidateMax); +} + +void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, + cricket::MediaType media_type) { + // Array of structs needed to map {KeyExchangeProtocolType, + // cricket::MediaType} to KeyExchangeProtocolMedia without using std::map in + // order to avoid -Wglobal-constructors and -Wexit-time-destructors. + static constexpr struct { + KeyExchangeProtocolType protocol_type; + cricket::MediaType media_type; + KeyExchangeProtocolMedia protocol_media; + } kEnumCounterKeyProtocolMediaMap[] = { + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_AUDIO, + kEnumCounterKeyProtocolMediaTypeDtlsAudio}, + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_VIDEO, + kEnumCounterKeyProtocolMediaTypeDtlsVideo}, + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_DATA, + kEnumCounterKeyProtocolMediaTypeDtlsData}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_AUDIO, + kEnumCounterKeyProtocolMediaTypeSdesAudio}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_VIDEO, + kEnumCounterKeyProtocolMediaTypeSdesVideo}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_DATA, + kEnumCounterKeyProtocolMediaTypeSdesData}, + }; + + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocol", protocol_type, + kEnumCounterKeyProtocolMax); + + for (const auto& i : kEnumCounterKeyProtocolMediaMap) { + if (i.protocol_type == protocol_type && i.media_type == media_type) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocolByMedia", + i.protocol_media, + kEnumCounterKeyProtocolMediaTypeMax); + } + } +} + +// Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd). +bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, + const SessionDescriptionInterface* new_desc, + const std::string& content_name) { + if (!old_desc) { + return false; + } + const SessionDescription* new_sd = new_desc->description(); + const SessionDescription* old_sd = old_desc->description(); + const ContentInfo* cinfo = new_sd->GetContentByName(content_name); + if (!cinfo || cinfo->rejected) { + return false; + } + // If the content isn't rejected, check if ufrag and password has changed. + const cricket::TransportDescription* new_transport_desc = + new_sd->GetTransportDescriptionByName(content_name); + const cricket::TransportDescription* old_transport_desc = + old_sd->GetTransportDescriptionByName(content_name); + if (!new_transport_desc || !old_transport_desc) { + // No transport description exists. This is not an ICE restart. + return false; + } + if (cricket::IceCredentialsChanged( + old_transport_desc->ice_ufrag, old_transport_desc->ice_pwd, + new_transport_desc->ice_ufrag, new_transport_desc->ice_pwd)) { + RTC_LOG(LS_INFO) << "Remote peer requests ICE restart for " << content_name + << "."; + return true; + } + return false; +} + +// Generates a string error message for SetLocalDescription/SetRemoteDescription +// from an RTCError. +std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, + SdpType type, + const RTCError& error) { + rtc::StringBuilder oss; + oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") + << " " << SdpTypeToString(type) << " sdp: " << error.message(); + return oss.Release(); +} + +std::string GetStreamIdsString(rtc::ArrayView stream_ids) { + std::string output = "streams=["; + const char* separator = ""; + for (const auto& stream_id : stream_ids) { + output.append(separator).append(stream_id); + separator = ", "; + } + output.append("]"); + return output; +} + +void ReportSimulcastApiVersion(const char* name, + const SessionDescription& session) { + bool has_legacy = false; + bool has_spec_compliant = false; + for (const ContentInfo& content : session.contents()) { + if (!content.media_description()) { + continue; + } + has_spec_compliant |= content.media_description()->HasSimulcast(); + for (const StreamParams& sp : content.media_description()->streams()) { + has_legacy |= sp.has_ssrc_group(cricket::kSimSsrcGroupSemantics); + } + } + + if (has_legacy) { + RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionLegacy, + kSimulcastApiVersionMax); + } + if (has_spec_compliant) { + RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionSpecCompliant, + kSimulcastApiVersionMax); + } + if (!has_legacy && !has_spec_compliant) { + RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionNone, + kSimulcastApiVersionMax); + } +} + +const ContentInfo* FindTransceiverMSection( + RtpTransceiverProxyWithInternal* transceiver, + const SessionDescriptionInterface* session_description) { + return transceiver->mid() + ? session_description->description()->GetContentByName( + *transceiver->mid()) + : nullptr; +} + +// If the direction is "recvonly" or "inactive", treat the description +// as containing no streams. +// See: https://code.google.com/p/webrtc/issues/detail?id=5054 +std::vector GetActiveStreams( + const cricket::MediaContentDescription* desc) { + return RtpTransceiverDirectionHasSend(desc->direction()) + ? desc->streams() + : std::vector(); +} + +// Logic to decide if an m= section can be recycled. This means that the new +// m= section is not rejected, but the old local or remote m= section is +// rejected. |old_content_one| and |old_content_two| refer to the m= section +// of the old remote and old local descriptions in no particular order. +// We need to check both the old local and remote because either +// could be the most current from the latest negotation. +bool IsMediaSectionBeingRecycled(SdpType type, + const ContentInfo& content, + const ContentInfo* old_content_one, + const ContentInfo* old_content_two) { + return type == SdpType::kOffer && !content.rejected && + ((old_content_one && old_content_one->rejected) || + (old_content_two && old_content_two->rejected)); +} + +// Verify that the order of media sections in |new_desc| matches +// |current_desc|. The number of m= sections in |new_desc| should be no +// less than |current_desc|. In the case of checking an answer's +// |new_desc|, the |current_desc| is the last offer that was set as the +// local or remote. In the case of checking an offer's |new_desc| we +// check against the local and remote descriptions stored from the last +// negotiation, because either of these could be the most up to date for +// possible rejected m sections. These are the |current_desc| and +// |secondary_current_desc|. +bool MediaSectionsInSameOrder(const SessionDescription& current_desc, + const SessionDescription* secondary_current_desc, + const SessionDescription& new_desc, + const SdpType type) { + if (current_desc.contents().size() > new_desc.contents().size()) { + return false; + } + + for (size_t i = 0; i < current_desc.contents().size(); ++i) { + const cricket::ContentInfo* secondary_content_info = nullptr; + if (secondary_current_desc && + i < secondary_current_desc->contents().size()) { + secondary_content_info = &secondary_current_desc->contents()[i]; + } + if (IsMediaSectionBeingRecycled(type, new_desc.contents()[i], + ¤t_desc.contents()[i], + secondary_content_info)) { + // For new offer descriptions, if the media section can be recycled, it's + // valid for the MID and media type to change. + continue; + } + if (new_desc.contents()[i].name != current_desc.contents()[i].name) { + return false; + } + const MediaContentDescription* new_desc_mdesc = + new_desc.contents()[i].media_description(); + const MediaContentDescription* current_desc_mdesc = + current_desc.contents()[i].media_description(); + if (new_desc_mdesc->type() != current_desc_mdesc->type()) { + return false; + } + } + return true; +} + +bool MediaSectionsHaveSameCount(const SessionDescription& desc1, + const SessionDescription& desc2) { + return desc1.contents().size() == desc2.contents().size(); +} +// Checks that each non-rejected content has SDES crypto keys or a DTLS +// fingerprint, unless it's in a BUNDLE group, in which case only the +// BUNDLE-tag section (first media section/description in the BUNDLE group) +// needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint +// to SDES keys, will be caught in JsepTransport negotiation, and backstopped +// by Channel's |srtp_required| check. +RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { + const cricket::ContentGroup* bundle = + desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentInfo& content_info : desc->contents()) { + if (content_info.rejected) { + continue; + } + // Note what media is used with each crypto protocol, for all sections. + NoteKeyProtocolAndMedia(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls + : webrtc::kEnumCounterKeyProtocolSdes, + content_info.media_description()->type()); + const std::string& mid = content_info.name; + if (bundle && bundle->HasContentName(mid) && + mid != *(bundle->FirstContentName())) { + // This isn't the first media section in the BUNDLE group, so it's not + // required to have crypto attributes, since only the crypto attributes + // from the first section actually get used. + continue; + } + + // If the content isn't rejected or bundled into another m= section, crypto + // must be present. + const MediaContentDescription* media = content_info.media_description(); + const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); + if (!media || !tinfo) { + // Something is not right. + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + } + if (dtls_enabled) { + if (!tinfo->description.identity_fingerprint) { + RTC_LOG(LS_WARNING) + << "Session description must have DTLS fingerprint if " + "DTLS enabled."; + return RTCError(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutDtlsFingerprint); + } + } else { + if (media->cryptos().empty()) { + RTC_LOG(LS_WARNING) + << "Session description must have SDES when DTLS disabled."; + return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutSdesCrypto); + } + } + } + return RTCError::OK(); +} + +// Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless +// it's in a BUNDLE group, in which case only the BUNDLE-tag section (first +// media section/description in the BUNDLE group) needs a ufrag and pwd. +bool VerifyIceUfragPwdPresent(const SessionDescription* desc) { + const cricket::ContentGroup* bundle = + desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentInfo& content_info : desc->contents()) { + if (content_info.rejected) { + continue; + } + const std::string& mid = content_info.name; + if (bundle && bundle->HasContentName(mid) && + mid != *(bundle->FirstContentName())) { + // This isn't the first media section in the BUNDLE group, so it's not + // required to have ufrag/password, since only the ufrag/password from + // the first section actually get used. + continue; + } + + // If the content isn't rejected or bundled into another m= section, + // ice-ufrag and ice-pwd must be present. + const TransportInfo* tinfo = desc->GetTransportInfoByName(mid); + if (!tinfo) { + // Something is not right. + RTC_LOG(LS_ERROR) << kInvalidSdp; + return false; + } + if (tinfo->description.ice_ufrag.empty() || + tinfo->description.ice_pwd.empty()) { + RTC_LOG(LS_ERROR) << "Session description must have ice ufrag and pwd."; + return false; + } + } + return true; +} + +static RTCError ValidateMids(const cricket::SessionDescription& description) { + std::set mids; + for (const cricket::ContentInfo& content : description.contents()) { + if (content.name.empty()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "A media section is missing a MID attribute."); + } + if (!mids.insert(content.name).second) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Duplicate a=mid value '" + content.name + "'."); + } + } + return RTCError::OK(); +} + +bool IsValidOfferToReceiveMedia(int value) { + typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; + return (value >= Options::kUndefined) && + (value <= Options::kMaxOfferToReceiveMedia); +} + +bool ValidateOfferAnswerOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options) { + return IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) && + IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video); +} + +// Map internal signaling state name to spec name: +// https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum +std::string GetSignalingStateString( + PeerConnectionInterface::SignalingState state) { + switch (state) { + case PeerConnectionInterface::kStable: + return "stable"; + case PeerConnectionInterface::kHaveLocalOffer: + return "have-local-offer"; + case PeerConnectionInterface::kHaveLocalPrAnswer: + return "have-local-pranswer"; + case PeerConnectionInterface::kHaveRemoteOffer: + return "have-remote-offer"; + case PeerConnectionInterface::kHaveRemotePrAnswer: + return "have-remote-pranswer"; + case PeerConnectionInterface::kClosed: + return "closed"; + } + RTC_NOTREACHED(); + return ""; +} + +// This method will extract any send encodings that were sent by the remote +// connection. This is currently only relevant for Simulcast scenario (where +// the number of layers may be communicated by the server). +static std::vector GetSendEncodingsFromRemoteDescription( + const MediaContentDescription& desc) { + if (!desc.HasSimulcast()) { + return {}; + } + std::vector result; + const SimulcastDescription& simulcast = desc.simulcast_description(); + + // This is a remote description, the parameters we are after should appear + // as receive streams. + for (const auto& alternatives : simulcast.receive_layers()) { + RTC_DCHECK(!alternatives.empty()); + // There is currently no way to specify or choose from alternatives. + // We will always use the first alternative, which is the most preferred. + const SimulcastLayer& layer = alternatives[0]; + RtpEncodingParameters parameters; + parameters.rid = layer.rid; + parameters.active = !layer.is_paused; + result.push_back(parameters); + } + + return result; +} + +static RTCError UpdateSimulcastLayerStatusInSender( + const std::vector& layers, + rtc::scoped_refptr sender) { + RTC_DCHECK(sender); + RtpParameters parameters = sender->GetParametersInternal(); + std::vector disabled_layers; + + // The simulcast envelope cannot be changed, only the status of the streams. + // So we will iterate over the send encodings rather than the layers. + for (RtpEncodingParameters& encoding : parameters.encodings) { + auto iter = std::find_if(layers.begin(), layers.end(), + [&encoding](const SimulcastLayer& layer) { + return layer.rid == encoding.rid; + }); + // A layer that cannot be found may have been removed by the remote party. + if (iter == layers.end()) { + disabled_layers.push_back(encoding.rid); + continue; + } + + encoding.active = !iter->is_paused; + } + + RTCError result = sender->SetParametersInternal(parameters); + if (result.ok()) { + result = sender->DisableEncodingLayers(disabled_layers); + } + + return result; +} + +static bool SimulcastIsRejected( + const ContentInfo* local_content, + const MediaContentDescription& answer_media_desc) { + bool simulcast_offered = local_content && + local_content->media_description() && + local_content->media_description()->HasSimulcast(); + bool simulcast_answered = answer_media_desc.HasSimulcast(); + bool rids_supported = RtpExtension::FindHeaderExtensionByUri( + answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri); + return simulcast_offered && (!simulcast_answered || !rids_supported); +} + +static RTCError DisableSimulcastInSender( + rtc::scoped_refptr sender) { + RTC_DCHECK(sender); + RtpParameters parameters = sender->GetParametersInternal(); + if (parameters.encodings.size() <= 1) { + return RTCError::OK(); + } + + std::vector disabled_layers; + std::transform( + parameters.encodings.begin() + 1, parameters.encodings.end(), + std::back_inserter(disabled_layers), + [](const RtpEncodingParameters& encoding) { return encoding.rid; }); + return sender->DisableEncodingLayers(disabled_layers); +} + +// The SDP parser used to populate these values by default for the 'content +// name' if an a=mid line was absent. +static absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { + switch (media_type) { + case cricket::MEDIA_TYPE_AUDIO: + return cricket::CN_AUDIO; + case cricket::MEDIA_TYPE_VIDEO: + return cricket::CN_VIDEO; + case cricket::MEDIA_TYPE_DATA: + return cricket::CN_DATA; + case cricket::MEDIA_TYPE_SCREEN: + return cricket::CN_VIDEO; + case cricket::MEDIA_TYPE_UNSUPPORTED: + return "not supported"; + } + RTC_NOTREACHED(); + return ""; +} + +// Add options to |[audio/video]_media_description_options| from |senders|. +void AddPlanBRtpSenderOptions( + const std::vector>>& senders, + cricket::MediaDescriptionOptions* audio_media_description_options, + cricket::MediaDescriptionOptions* video_media_description_options, + int num_sim_layers) { + for (const auto& sender : senders) { + if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (audio_media_description_options) { + audio_media_description_options->AddAudioSender( + sender->id(), sender->internal()->stream_ids()); + } + } else { + RTC_DCHECK(sender->media_type() == cricket::MEDIA_TYPE_VIDEO); + if (video_media_description_options) { + video_media_description_options->AddVideoSender( + sender->id(), sender->internal()->stream_ids(), {}, + SimulcastLayerList(), num_sim_layers); + } + } + } +} + +static cricket::MediaDescriptionOptions +GetMediaDescriptionOptionsForTransceiver( + rtc::scoped_refptr> + transceiver, + const std::string& mid, + bool is_create_offer) { + // NOTE: a stopping transceiver should be treated as a stopped one in + // createOffer as specified in + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. + bool stopped = + is_create_offer ? transceiver->stopping() : transceiver->stopped(); + cricket::MediaDescriptionOptions media_description_options( + transceiver->media_type(), mid, transceiver->direction(), stopped); + media_description_options.codec_preferences = + transceiver->codec_preferences(); + media_description_options.header_extensions = + transceiver->HeaderExtensionsToOffer(); + // This behavior is specified in JSEP. The gist is that: + // 1. The MSID is included if the RtpTransceiver's direction is sendonly or + // sendrecv. + // 2. If the MSID is included, then it must be included in any subsequent + // offer/answer exactly the same until the RtpTransceiver is stopped. + if (stopped || (!RtpTransceiverDirectionHasSend(transceiver->direction()) && + !transceiver->internal()->has_ever_been_used_to_send())) { + return media_description_options; + } + + cricket::SenderOptions sender_options; + sender_options.track_id = transceiver->sender()->id(); + sender_options.stream_ids = transceiver->sender()->stream_ids(); + + // The following sets up RIDs and Simulcast. + // RIDs are included if Simulcast is requested or if any RID was specified. + RtpParameters send_parameters = + transceiver->internal()->sender_internal()->GetParametersInternal(); + bool has_rids = std::any_of(send_parameters.encodings.begin(), + send_parameters.encodings.end(), + [](const RtpEncodingParameters& encoding) { + return !encoding.rid.empty(); + }); + + std::vector send_rids; + SimulcastLayerList send_layers; + for (const RtpEncodingParameters& encoding : send_parameters.encodings) { + if (encoding.rid.empty()) { + continue; + } + send_rids.push_back(RidDescription(encoding.rid, RidDirection::kSend)); + send_layers.AddLayer(SimulcastLayer(encoding.rid, !encoding.active)); + } + + if (has_rids) { + sender_options.rids = send_rids; + } + + sender_options.simulcast_layers = send_layers; + // When RIDs are configured, we must set num_sim_layers to 0 to. + // Otherwise, num_sim_layers must be 1 because either there is no + // simulcast, or simulcast is acheived by munging the SDP. + sender_options.num_sim_layers = has_rids ? 0 : 1; + media_description_options.sender_options.push_back(sender_options); + + return media_description_options; +} + +// Returns the ContentInfo at mline index |i|, or null if none exists. +static const ContentInfo* GetContentByIndex( + const SessionDescriptionInterface* sdesc, + size_t i) { + if (!sdesc) { + return nullptr; + } + const ContentInfos& contents = sdesc->description()->contents(); + return (i < contents.size() ? &contents[i] : nullptr); +} + +// From |rtc_options|, fill parts of |session_options| shared by all generated +// m= sectionss (in other words, nothing that involves a map/array). +void ExtractSharedMediaSessionOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, + cricket::MediaSessionOptions* session_options) { + session_options->vad_enabled = rtc_options.voice_activity_detection; + session_options->bundle_enabled = rtc_options.use_rtp_mux; + session_options->raw_packetization_for_video = + rtc_options.raw_packetization_for_video; +} + +// Generate a RTCP CNAME when a PeerConnection is created. +std::string GenerateRtcpCname() { + std::string cname; + if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) { + RTC_LOG(LS_ERROR) << "Failed to generate CNAME."; + RTC_NOTREACHED(); + } + return cname; +} + +// Add options to |session_options| from |rtp_data_channels|. +void AddRtpDataChannelOptions( + const std::map>& + rtp_data_channels, + cricket::MediaDescriptionOptions* data_media_description_options) { + if (!data_media_description_options) { + return; + } + // Check for data channels. + for (const auto& kv : rtp_data_channels) { + const RtpDataChannel* channel = kv.second; + if (channel->state() == RtpDataChannel::kConnecting || + channel->state() == RtpDataChannel::kOpen) { + // Legacy RTP data channels are signaled with the track/stream ID set to + // the data channel's label. + data_media_description_options->AddRtpDataChannel(channel->label(), + channel->label()); + } + } +} + +// Check if we can send |new_stream| on a PeerConnection. +bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, + webrtc::MediaStreamInterface* new_stream) { + if (!new_stream || !current_streams) { + return false; + } + if (current_streams->find(new_stream->id()) != nullptr) { + RTC_LOG(LS_ERROR) << "MediaStream with ID " << new_stream->id() + << " is already added."; + return false; + } + return true; +} + +} // namespace + +// Used by parameterless SetLocalDescription() to create an offer or answer. +// Upon completion of creating the session description, SetLocalDescription() is +// invoked with the result. +class SdpOfferAnswerHandler::ImplicitCreateSessionDescriptionObserver + : public CreateSessionDescriptionObserver { + public: + ImplicitCreateSessionDescriptionObserver( + rtc::WeakPtr sdp_handler, + rtc::scoped_refptr + set_local_description_observer) + : sdp_handler_(std::move(sdp_handler)), + set_local_description_observer_( + std::move(set_local_description_observer)) {} + ~ImplicitCreateSessionDescriptionObserver() override { + RTC_DCHECK(was_called_); + } + + void SetOperationCompleteCallback( + std::function operation_complete_callback) { + operation_complete_callback_ = std::move(operation_complete_callback); + } + + bool was_called() const { return was_called_; } + + void OnSuccess(SessionDescriptionInterface* desc_ptr) override { + RTC_DCHECK(!was_called_); + std::unique_ptr desc(desc_ptr); + was_called_ = true; + + // Abort early if |pc_| is no longer valid. + if (!sdp_handler_) { + operation_complete_callback_(); + return; + } + // DoSetLocalDescription() is a synchronous operation that invokes + // |set_local_description_observer_| with the result. + sdp_handler_->DoSetLocalDescription( + std::move(desc), std::move(set_local_description_observer_)); + operation_complete_callback_(); + } + + void OnFailure(RTCError error) override { + RTC_DCHECK(!was_called_); + was_called_ = true; + set_local_description_observer_->OnSetLocalDescriptionComplete(RTCError( + error.type(), std::string("SetLocalDescription failed to create " + "session description - ") + + error.message())); + operation_complete_callback_(); + } + + private: + bool was_called_ = false; + rtc::WeakPtr sdp_handler_; + rtc::scoped_refptr + set_local_description_observer_; + std::function operation_complete_callback_; +}; + +// Wraps a CreateSessionDescriptionObserver and an OperationsChain operation +// complete callback. When the observer is invoked, the wrapped observer is +// invoked followed by invoking the completion callback. +class CreateSessionDescriptionObserverOperationWrapper + : public CreateSessionDescriptionObserver { + public: + CreateSessionDescriptionObserverOperationWrapper( + rtc::scoped_refptr observer, + std::function operation_complete_callback) + : observer_(std::move(observer)), + operation_complete_callback_(std::move(operation_complete_callback)) { + RTC_DCHECK(observer_); + } + ~CreateSessionDescriptionObserverOperationWrapper() override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(was_called_); +#endif + } + + void OnSuccess(SessionDescriptionInterface* desc) override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(!was_called_); + was_called_ = true; +#endif // RTC_DCHECK_IS_ON + // Completing the operation before invoking the observer allows the observer + // to execute SetLocalDescription() without delay. + operation_complete_callback_(); + observer_->OnSuccess(desc); + } + + void OnFailure(RTCError error) override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(!was_called_); + was_called_ = true; +#endif // RTC_DCHECK_IS_ON + operation_complete_callback_(); + observer_->OnFailure(std::move(error)); + } + + private: +#if RTC_DCHECK_IS_ON + bool was_called_ = false; +#endif // RTC_DCHECK_IS_ON + rtc::scoped_refptr observer_; + std::function operation_complete_callback_; +}; + +// Wrapper for SetSessionDescriptionObserver that invokes the success or failure +// callback in a posted message handled by the peer connection. This introduces +// a delay that prevents recursive API calls by the observer, but this also +// means that the PeerConnection can be modified before the observer sees the +// result of the operation. This is ill-advised for synchronizing states. +// +// Implements both the SetLocalDescriptionObserverInterface and the +// SetRemoteDescriptionObserverInterface. +class SdpOfferAnswerHandler::SetSessionDescriptionObserverAdapter + : public SetLocalDescriptionObserverInterface, + public SetRemoteDescriptionObserverInterface { + public: + SetSessionDescriptionObserverAdapter( + rtc::WeakPtr handler, + rtc::scoped_refptr inner_observer) + : handler_(std::move(handler)), + inner_observer_(std::move(inner_observer)) {} + + // SetLocalDescriptionObserverInterface implementation. + void OnSetLocalDescriptionComplete(RTCError error) override { + OnSetDescriptionComplete(std::move(error)); + } + // SetRemoteDescriptionObserverInterface implementation. + void OnSetRemoteDescriptionComplete(RTCError error) override { + OnSetDescriptionComplete(std::move(error)); + } + + private: + void OnSetDescriptionComplete(RTCError error) { + if (!handler_) + return; + if (error.ok()) { + handler_->pc_->message_handler()->PostSetSessionDescriptionSuccess( + inner_observer_); + } else { + handler_->pc_->message_handler()->PostSetSessionDescriptionFailure( + inner_observer_, std::move(error)); + } + } + + rtc::WeakPtr handler_; + rtc::scoped_refptr inner_observer_; +}; + +class SdpOfferAnswerHandler::LocalIceCredentialsToReplace { + public: + // Sets the ICE credentials that need restarting to the ICE credentials of + // the current and pending descriptions. + void SetIceCredentialsFromLocalDescriptions( + const SessionDescriptionInterface* current_local_description, + const SessionDescriptionInterface* pending_local_description) { + ice_credentials_.clear(); + if (current_local_description) { + AppendIceCredentialsFromSessionDescription(*current_local_description); + } + if (pending_local_description) { + AppendIceCredentialsFromSessionDescription(*pending_local_description); + } + } + + void ClearIceCredentials() { ice_credentials_.clear(); } + + // Returns true if we have ICE credentials that need restarting. + bool HasIceCredentials() const { return !ice_credentials_.empty(); } + + // Returns true if |local_description| shares no ICE credentials with the + // ICE credentials that need restarting. + bool SatisfiesIceRestart( + const SessionDescriptionInterface& local_description) const { + for (const auto& transport_info : + local_description.description()->transport_infos()) { + if (ice_credentials_.find(std::make_pair( + transport_info.description.ice_ufrag, + transport_info.description.ice_pwd)) != ice_credentials_.end()) { + return false; + } + } + return true; + } + + private: + void AppendIceCredentialsFromSessionDescription( + const SessionDescriptionInterface& desc) { + for (const auto& transport_info : desc.description()->transport_infos()) { + ice_credentials_.insert( + std::make_pair(transport_info.description.ice_ufrag, + transport_info.description.ice_pwd)); + } + } + + std::set> ice_credentials_; +}; + +SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnection* pc) + : pc_(pc), + local_streams_(StreamCollection::Create()), + remote_streams_(StreamCollection::Create()), + operations_chain_(rtc::OperationsChain::Create()), + rtcp_cname_(GenerateRtcpCname()), + local_ice_credentials_to_replace_(new LocalIceCredentialsToReplace()), + weak_ptr_factory_(this) { + operations_chain_->SetOnChainEmptyCallback( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() { + if (!this_weak_ptr) + return; + this_weak_ptr->OnOperationsChainEmpty(); + }); +} + +SdpOfferAnswerHandler::~SdpOfferAnswerHandler() {} + +// Static +std::unique_ptr SdpOfferAnswerHandler::Create( + PeerConnection* pc, + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies) { + auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc)); + handler->Initialize(configuration, dependencies); + return handler; +} + +void SdpOfferAnswerHandler::Initialize( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies) { + RTC_DCHECK_RUN_ON(signaling_thread()); + video_options_.screencast_min_bitrate_kbps = + configuration.screencast_min_bitrate; + audio_options_.combined_audio_video_bwe = + configuration.combined_audio_video_bwe; + + audio_options_.audio_jitter_buffer_max_packets = + configuration.audio_jitter_buffer_max_packets; + + audio_options_.audio_jitter_buffer_fast_accelerate = + configuration.audio_jitter_buffer_fast_accelerate; + + audio_options_.audio_jitter_buffer_min_delay_ms = + configuration.audio_jitter_buffer_min_delay_ms; + + audio_options_.audio_jitter_buffer_enable_rtx_handling = + configuration.audio_jitter_buffer_enable_rtx_handling; + + // Obtain a certificate from RTCConfiguration if any were provided (optional). + rtc::scoped_refptr certificate; + if (!configuration.certificates.empty()) { + // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of + // just picking the first one. The decision should be made based on the DTLS + // handshake. The DTLS negotiations need to know about all certificates. + certificate = configuration.certificates[0]; + } + + webrtc_session_desc_factory_ = + std::make_unique( + signaling_thread(), channel_manager(), this, pc_->session_id(), + pc_->dtls_enabled(), std::move(dependencies.cert_generator), + certificate, &ssrc_generator_, + [this](const rtc::scoped_refptr& certificate) { + transport_controller()->SetLocalCertificate(certificate); + }); + + if (pc_->options()->disable_encryption) { + webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED); + } + + webrtc_session_desc_factory_->set_enable_encrypted_rtp_header_extensions( + pc_->GetCryptoOptions().srtp.enable_encrypted_rtp_header_extensions); + webrtc_session_desc_factory_->set_is_unified_plan(IsUnifiedPlan()); + + if (dependencies.video_bitrate_allocator_factory) { + video_bitrate_allocator_factory_ = + std::move(dependencies.video_bitrate_allocator_factory); + } else { + video_bitrate_allocator_factory_ = + CreateBuiltinVideoBitrateAllocatorFactory(); + } +} + +// ================================================================== +// Access to pc_ variables +cricket::ChannelManager* SdpOfferAnswerHandler::channel_manager() const { + return pc_->channel_manager(); +} +TransceiverList* SdpOfferAnswerHandler::transceivers() { + if (!pc_->rtp_manager()) { + return nullptr; + } + return pc_->rtp_manager()->transceivers(); +} +const TransceiverList* SdpOfferAnswerHandler::transceivers() const { + if (!pc_->rtp_manager()) { + return nullptr; + } + return pc_->rtp_manager()->transceivers(); +} +JsepTransportController* SdpOfferAnswerHandler::transport_controller() { + return pc_->transport_controller(); +} +const JsepTransportController* SdpOfferAnswerHandler::transport_controller() + const { + return pc_->transport_controller(); +} +DataChannelController* SdpOfferAnswerHandler::data_channel_controller() { + return pc_->data_channel_controller(); +} +const DataChannelController* SdpOfferAnswerHandler::data_channel_controller() + const { + return pc_->data_channel_controller(); +} +cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() { + return pc_->port_allocator(); +} +const cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() const { + return pc_->port_allocator(); +} +RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() { + return pc_->rtp_manager(); +} +const RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() const { + return pc_->rtp_manager(); +} + +// =================================================================== + +void SdpOfferAnswerHandler::PrepareForShutdown() { + RTC_DCHECK_RUN_ON(signaling_thread()); + weak_ptr_factory_.InvalidateWeakPtrs(); +} + +void SdpOfferAnswerHandler::Close() { + ChangeSignalingState(PeerConnectionInterface::kClosed); +} + +void SdpOfferAnswerHandler::RestartIce() { + RTC_DCHECK_RUN_ON(signaling_thread()); + local_ice_credentials_to_replace_->SetIceCredentialsFromLocalDescriptions( + current_local_description(), pending_local_description()); + UpdateNegotiationNeeded(); +} + +rtc::Thread* SdpOfferAnswerHandler::signaling_thread() const { + return pc_->signaling_thread(); +} + +void SdpOfferAnswerHandler::CreateOffer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + options](std::function operations_chain_callback) { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer_refptr->OnFailure( + RTCError(RTCErrorType::INTERNAL_ERROR, + "CreateOffer failed because the session was shut down")); + operations_chain_callback(); + return; + } + // The operation completes asynchronously when the wrapper is invoked. + rtc::scoped_refptr + observer_wrapper(new rtc::RefCountedObject< + CreateSessionDescriptionObserverOperationWrapper>( + std::move(observer_refptr), + std::move(operations_chain_callback))); + this_weak_ptr->DoCreateOffer(options, observer_wrapper); + }); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc_ptr) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + desc = std::unique_ptr(desc_ptr)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + // For consistency with SetSessionDescriptionObserverAdapter whose + // posted messages doesn't get processed when the PC is destroyed, we + // do not inform |observer_refptr| that the operation failed. + operations_chain_callback(); + return; + } + // SetSessionDescriptionObserverAdapter takes care of making sure the + // |observer_refptr| is invoked in a posted message. + this_weak_ptr->DoSetLocalDescription( + std::move(desc), + rtc::scoped_refptr( + new rtc::RefCountedObject( + this_weak_ptr, observer_refptr))); + // For backwards-compatability reasons, we declare the operation as + // completed here (rather than in a post), so that the operation chain + // is not blocked by this operation when the observer is invoked. This + // allows the observer to trigger subsequent offer/answer operations + // synchronously if the operation chain is now empty. + operations_chain_callback(); + }); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, + desc = std::move(desc)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer->OnSetLocalDescriptionComplete(RTCError( + RTCErrorType::INTERNAL_ERROR, + "SetLocalDescription failed because the session was shut down")); + operations_chain_callback(); + return; + } + this_weak_ptr->DoSetLocalDescription(std::move(desc), observer); + // DoSetLocalDescription() is implemented as a synchronous operation. + // The |observer| will already have been informed that it completed, and + // we can mark this operation as complete without any loose ends. + operations_chain_callback(); + }); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + SetSessionDescriptionObserver* observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + SetLocalDescription( + new rtc::RefCountedObject( + weak_ptr_factory_.GetWeakPtr(), observer)); +} + +void SdpOfferAnswerHandler::SetLocalDescription( + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // The |create_sdp_observer| handles performing DoSetLocalDescription() with + // the resulting description as well as completing the operation. + rtc::scoped_refptr + create_sdp_observer( + new rtc::RefCountedObject( + weak_ptr_factory_.GetWeakPtr(), observer)); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + create_sdp_observer](std::function operations_chain_callback) { + // The |create_sdp_observer| is responsible for completing the + // operation. + create_sdp_observer->SetOperationCompleteCallback( + std::move(operations_chain_callback)); + // Abort early if |this_weak_ptr| is no longer valid. This triggers the + // same code path as if DoCreateOffer() or DoCreateAnswer() failed. + if (!this_weak_ptr) { + create_sdp_observer->OnFailure(RTCError( + RTCErrorType::INTERNAL_ERROR, + "SetLocalDescription failed because the session was shut down")); + return; + } + switch (this_weak_ptr->signaling_state()) { + case PeerConnectionInterface::kStable: + case PeerConnectionInterface::kHaveLocalOffer: + case PeerConnectionInterface::kHaveRemotePrAnswer: + // TODO(hbos): If [LastCreatedOffer] exists and still represents the + // current state of the system, use that instead of creating another + // offer. + this_weak_ptr->DoCreateOffer( + PeerConnectionInterface::RTCOfferAnswerOptions(), + create_sdp_observer); + break; + case PeerConnectionInterface::kHaveLocalPrAnswer: + case PeerConnectionInterface::kHaveRemoteOffer: + // TODO(hbos): If [LastCreatedAnswer] exists and still represents + // the current state of the system, use that instead of creating + // another answer. + this_weak_ptr->DoCreateAnswer( + PeerConnectionInterface::RTCOfferAnswerOptions(), + create_sdp_observer); + break; + case PeerConnectionInterface::kClosed: + create_sdp_observer->OnFailure(RTCError( + RTCErrorType::INVALID_STATE, + "SetLocalDescription called when PeerConnection is closed.")); + break; + } + }); +} + +RTCError SdpOfferAnswerHandler::ApplyLocalDescription( + std::unique_ptr desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(desc); + + // Update stats here so that we have the most recent stats for tracks and + // streams that might be removed by updating the session description. + pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); + + // Take a reference to the old local description since it's used below to + // compare against the new local description. When setting the new local + // description, grab ownership of the replaced session description in case it + // is the same as |old_local_description|, to keep it alive for the duration + // of the method. + const SessionDescriptionInterface* old_local_description = + local_description(); + std::unique_ptr replaced_local_description; + SdpType type = desc->GetType(); + if (type == SdpType::kAnswer) { + replaced_local_description = pending_local_description_ + ? std::move(pending_local_description_) + : std::move(current_local_description_); + current_local_description_ = std::move(desc); + pending_local_description_ = nullptr; + current_remote_description_ = std::move(pending_remote_description_); + } else { + replaced_local_description = std::move(pending_local_description_); + pending_local_description_ = std::move(desc); + } + // The session description to apply now must be accessed by + // |local_description()|. + RTC_DCHECK(local_description()); + + // Report statistics about any use of simulcast. + ReportSimulcastApiVersion(kSimulcastVersionApplyLocalDescription, + *local_description()->description()); + + if (!is_caller_) { + if (remote_description()) { + // Remote description was applied first, so this PC is the callee. + is_caller_ = false; + } else { + // Local description is applied first, so this PC is the caller. + is_caller_ = true; + } + } + + RTCError error = PushdownTransportDescription(cricket::CS_LOCAL, type); + if (!error.ok()) { + return error; + } + + if (IsUnifiedPlan()) { + RTCError error = UpdateTransceiversAndDataChannels( + cricket::CS_LOCAL, *local_description(), old_local_description, + remote_description()); + if (!error.ok()) { + return error; + } + std::vector> remove_list; + std::vector> removed_streams; + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->stopped()) { + continue; + } + + // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots. + // Note that code paths that don't set MID won't be able to use + // information about DTLS transports. + if (transceiver->mid()) { + auto dtls_transport = transport_controller()->LookupDtlsTransportByMid( + *transceiver->mid()); + transceiver->internal()->sender_internal()->set_transport( + dtls_transport); + transceiver->internal()->receiver_internal()->set_transport( + dtls_transport); + } + + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, local_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = content->media_description(); + // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run + // the following steps: + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and + // transceiver's [[FiredDirection]] slot is either "sendrecv" or + // "recvonly", process the removal of a remote track for the media + // description, given transceiver, removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && + (transceiver->internal()->fired_direction() && + RtpTransceiverDirectionHasRecv( + *transceiver->internal()->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver, &remove_list, + &removed_streams); + } + // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and + // [[FiredDirection]] slots to direction. + transceiver->internal()->set_current_direction(media_desc->direction()); + transceiver->internal()->set_fired_direction(media_desc->direction()); + } + } + auto observer = pc_->Observer(); + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } + } else { + // Media channels will be created only when offer is set. These may use new + // transports just created by PushdownTransportDescription. + if (type == SdpType::kOffer) { + // TODO(bugs.webrtc.org/4676) - Handle CreateChannel failure, as new local + // description is applied. Restore back to old description. + RTCError error = CreateChannels(*local_description()->description()); + if (!error.ok()) { + return error; + } + } + // Remove unused channels if MediaContentDescription is rejected. + RemoveUnusedChannels(local_description()->description()); + } + + error = UpdateSessionState(type, cricket::CS_LOCAL, + local_description()->description()); + if (!error.ok()) { + return error; + } + + if (remote_description()) { + // Now that we have a local description, we can push down remote candidates. + UseCandidatesInSessionDescription(remote_description()); + } + + pending_ice_restarts_.clear(); + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } + + // If setting the description decided our SSL role, allocate any necessary + // SCTP sids. + rtc::SSLRole role; + if (IsSctpLike(pc_->data_channel_type()) && pc_->GetSctpSslRole(&role)) { + data_channel_controller()->AllocateSctpSids(role); + } + + if (IsUnifiedPlan()) { + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->stopped()) { + continue; + } + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, local_description()); + if (!content) { + continue; + } + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (content->rejected || !channel || channel->local_streams().empty()) { + // 0 is a special value meaning "this sender has no associated send + // stream". Need to call this so the sender won't attempt to configure + // a no longer existing stream and run into DCHECKs in the lower + // layers. + transceiver->internal()->sender_internal()->SetSsrc(0); + } else { + // Get the StreamParams from the channel which could generate SSRCs. + const std::vector& streams = channel->local_streams(); + transceiver->internal()->sender_internal()->set_stream_ids( + streams[0].stream_ids()); + transceiver->internal()->sender_internal()->SetSsrc( + streams[0].first_ssrc()); + } + } + } else { + // Plan B semantics. + + // Update state and SSRC of local MediaStreams and DataChannels based on the + // local session description. + const cricket::ContentInfo* audio_content = + GetFirstAudioContent(local_description()->description()); + if (audio_content) { + if (audio_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + } else { + const cricket::AudioContentDescription* audio_desc = + audio_content->media_description()->as_audio(); + UpdateLocalSenders(audio_desc->streams(), audio_desc->type()); + } + } + + const cricket::ContentInfo* video_content = + GetFirstVideoContent(local_description()->description()); + if (video_content) { + if (video_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + } else { + const cricket::VideoContentDescription* video_desc = + video_content->media_description()->as_video(); + UpdateLocalSenders(video_desc->streams(), video_desc->type()); + } + } + } + + const cricket::ContentInfo* data_content = + GetFirstDataContent(local_description()->description()); + if (data_content) { + const cricket::RtpDataContentDescription* rtp_data_desc = + data_content->media_description()->as_rtp_data(); + // rtp_data_desc will be null if this is an SCTP description. + if (rtp_data_desc) { + data_channel_controller()->UpdateLocalRtpDataChannels( + rtp_data_desc->streams()); + } + } + + if (type == SdpType::kAnswer && + local_ice_credentials_to_replace_->SatisfiesIceRestart( + *current_local_description_)) { + local_ice_credentials_to_replace_->ClearIceCredentials(); + } + + return RTCError::OK(); +} + +void SdpOfferAnswerHandler::SetRemoteDescription( + SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc_ptr) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + desc = std::unique_ptr(desc_ptr)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + // For consistency with SetSessionDescriptionObserverAdapter whose + // posted messages doesn't get processed when the PC is destroyed, we + // do not inform |observer_refptr| that the operation failed. + operations_chain_callback(); + return; + } + // SetSessionDescriptionObserverAdapter takes care of making sure the + // |observer_refptr| is invoked in a posted message. + this_weak_ptr->DoSetRemoteDescription( + std::move(desc), + rtc::scoped_refptr( + new rtc::RefCountedObject( + this_weak_ptr, observer_refptr))); + // For backwards-compatability reasons, we declare the operation as + // completed here (rather than in a post), so that the operation chain + // is not blocked by this operation when the observer is invoked. This + // allows the observer to trigger subsequent offer/answer operations + // synchronously if the operation chain is now empty. + operations_chain_callback(); + }); +} + +void SdpOfferAnswerHandler::SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, + desc = std::move(desc)]( + std::function operations_chain_callback) mutable { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer->OnSetRemoteDescriptionComplete(RTCError( + RTCErrorType::INTERNAL_ERROR, + "SetRemoteDescription failed because the session was shut down")); + operations_chain_callback(); + return; + } + this_weak_ptr->DoSetRemoteDescription(std::move(desc), + std::move(observer)); + // DoSetRemoteDescription() is implemented as a synchronous operation. + // The |observer| will already have been informed that it completed, and + // we can mark this operation as complete without any loose ends. + operations_chain_callback(); + }); +} + +RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( + std::unique_ptr desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(desc); + + // Update stats here so that we have the most recent stats for tracks and + // streams that might be removed by updating the session description. + pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); + + // Take a reference to the old remote description since it's used below to + // compare against the new remote description. When setting the new remote + // description, grab ownership of the replaced session description in case it + // is the same as |old_remote_description|, to keep it alive for the duration + // of the method. + const SessionDescriptionInterface* old_remote_description = + remote_description(); + std::unique_ptr replaced_remote_description; + SdpType type = desc->GetType(); + if (type == SdpType::kAnswer) { + replaced_remote_description = pending_remote_description_ + ? std::move(pending_remote_description_) + : std::move(current_remote_description_); + current_remote_description_ = std::move(desc); + pending_remote_description_ = nullptr; + current_local_description_ = std::move(pending_local_description_); + } else { + replaced_remote_description = std::move(pending_remote_description_); + pending_remote_description_ = std::move(desc); + } + // The session description to apply now must be accessed by + // |remote_description()|. + RTC_DCHECK(remote_description()); + + // Report statistics about any use of simulcast. + ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, + *remote_description()->description()); + + RTCError error = PushdownTransportDescription(cricket::CS_REMOTE, type); + if (!error.ok()) { + return error; + } + // Transport and Media channels will be created only when offer is set. + if (IsUnifiedPlan()) { + RTCError error = UpdateTransceiversAndDataChannels( + cricket::CS_REMOTE, *remote_description(), local_description(), + old_remote_description); + if (!error.ok()) { + return error; + } + } else { + // Media channels will be created only when offer is set. These may use new + // transports just created by PushdownTransportDescription. + if (type == SdpType::kOffer) { + // TODO(mallinath) - Handle CreateChannel failure, as new local + // description is applied. Restore back to old description. + RTCError error = CreateChannels(*remote_description()->description()); + if (!error.ok()) { + return error; + } + } + // Remove unused channels if MediaContentDescription is rejected. + RemoveUnusedChannels(remote_description()->description()); + } + + // NOTE: Candidates allocation will be initiated only when + // SetLocalDescription is called. + error = UpdateSessionState(type, cricket::CS_REMOTE, + remote_description()->description()); + if (!error.ok()) { + return error; + } + + if (local_description() && + !UseCandidatesInSessionDescription(remote_description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidCandidates); + } + + if (old_remote_description) { + for (const cricket::ContentInfo& content : + old_remote_description->description()->contents()) { + // Check if this new SessionDescription contains new ICE ufrag and + // password that indicates the remote peer requests an ICE restart. + // TODO(deadbeef): When we start storing both the current and pending + // remote description, this should reset pending_ice_restarts and compare + // against the current description. + if (CheckForRemoteIceRestart(old_remote_description, remote_description(), + content.name)) { + if (type == SdpType::kOffer) { + pending_ice_restarts_.insert(content.name); + } + } else { + // We retain all received candidates only if ICE is not restarted. + // When ICE is restarted, all previous candidates belong to an old + // generation and should not be kept. + // TODO(deadbeef): This goes against the W3C spec which says the remote + // description should only contain candidates from the last set remote + // description plus any candidates added since then. We should remove + // this once we're sure it won't break anything. + WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( + old_remote_description, content.name, mutable_remote_description()); + } + } + } + + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } + + // Set the the ICE connection state to connecting since the connection may + // become writable with peer reflexive candidates before any remote candidate + // is signaled. + // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix + // is to have a new signal the indicates a change in checking state from the + // transport and expose a new checking() member from transport that can be + // read to determine the current checking state. The existing SignalConnecting + // actually means "gathering candidates", so cannot be be used here. + if (remote_description()->GetType() != SdpType::kOffer && + remote_description()->number_of_mediasections() > 0u && + pc_->ice_connection_state() == + PeerConnectionInterface::kIceConnectionNew) { + pc_->SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); + } + + // If setting the description decided our SSL role, allocate any necessary + // SCTP sids. + rtc::SSLRole role; + if (IsSctpLike(pc_->data_channel_type()) && pc_->GetSctpSslRole(&role)) { + data_channel_controller()->AllocateSctpSids(role); + } + + if (IsUnifiedPlan()) { + std::vector> + now_receiving_transceivers; + std::vector> remove_list; + std::vector> added_streams; + std::vector> removed_streams; + for (const auto& transceiver : transceivers()->List()) { + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, remote_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = content->media_description(); + RtpTransceiverDirection local_direction = + RtpTransceiverDirectionReversed(media_desc->direction()); + // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the + // RTCSessionDescription: Set the associated remote streams given + // transceiver.[[Receiver]], msids, addList, and removeList". + // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription + if (RtpTransceiverDirectionHasRecv(local_direction)) { + std::vector stream_ids; + if (!media_desc->streams().empty()) { + // The remote description has signaled the stream IDs. + stream_ids = media_desc->streams()[0].stream_ids(); + } + transceivers() + ->StableState(transceiver) + ->SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); + + RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name + << " (" << GetStreamIdsString(stream_ids) << ")."; + SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), + stream_ids, &added_streams, + &removed_streams); + // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 + // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, + // and transceiver's current direction is neither sendrecv nor recvonly, + // process the addition of a remote track for the media description. + if (!transceiver->fired_direction() || + !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { + RTC_LOG(LS_INFO) + << "Processing the addition of a remote track for MID=" + << content->name << "."; + now_receiving_transceivers.push_back(transceiver); + } + } + // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's + // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the + // removal of a remote track for the media description, given transceiver, + // removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(local_direction) && + (transceiver->fired_direction() && + RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver, &remove_list, + &removed_streams); + } + // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. + transceiver->internal()->set_fired_direction(local_direction); + // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run + // the following steps: + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to + // direction. + transceiver->internal()->set_current_direction(local_direction); + // 2.2.8.1.11.[3-6]: Set the transport internal slots. + if (transceiver->mid()) { + auto dtls_transport = + transport_controller()->LookupDtlsTransportByMid( + *transceiver->mid()); + transceiver->internal()->sender_internal()->set_transport( + dtls_transport); + transceiver->internal()->receiver_internal()->set_transport( + dtls_transport); + } + } + // 2.2.8.1.12: If the media description is rejected, and transceiver is + // not already stopped, stop the RTCRtpTransceiver transceiver. + if (content->rejected && !transceiver->stopped()) { + RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name + << " since the media section was rejected."; + transceiver->internal()->StopTransceiverProcedure(); + } + if (!content->rejected && + RtpTransceiverDirectionHasRecv(local_direction)) { + if (!media_desc->streams().empty() && + media_desc->streams()[0].has_ssrcs()) { + uint32_t ssrc = media_desc->streams()[0].first_ssrc(); + transceiver->internal()->receiver_internal()->SetupMediaChannel(ssrc); + } else { + transceiver->internal() + ->receiver_internal() + ->SetupUnsignaledMediaChannel(); + } + } + } + // Once all processing has finished, fire off callbacks. + auto observer = pc_->Observer(); + for (const auto& transceiver : now_receiving_transceivers) { + pc_->stats()->AddTrack(transceiver->receiver()->track()); + observer->OnTrack(transceiver); + observer->OnAddTrack(transceiver->receiver(), + transceiver->receiver()->streams()); + } + for (const auto& stream : added_streams) { + observer->OnAddStream(stream); + } + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } + } + + const cricket::ContentInfo* audio_content = + GetFirstAudioContent(remote_description()->description()); + const cricket::ContentInfo* video_content = + GetFirstVideoContent(remote_description()->description()); + const cricket::AudioContentDescription* audio_desc = + GetFirstAudioContentDescription(remote_description()->description()); + const cricket::VideoContentDescription* video_desc = + GetFirstVideoContentDescription(remote_description()->description()); + const cricket::RtpDataContentDescription* rtp_data_desc = + GetFirstRtpDataContentDescription(remote_description()->description()); + + // Check if the descriptions include streams, just in case the peer supports + // MSID, but doesn't indicate so with "a=msid-semantic". + if (remote_description()->description()->msid_supported() || + (audio_desc && !audio_desc->streams().empty()) || + (video_desc && !video_desc->streams().empty())) { + remote_peer_supports_msid_ = true; + } + + // We wait to signal new streams until we finish processing the description, + // since only at that point will new streams have all their tracks. + rtc::scoped_refptr new_streams(StreamCollection::Create()); + + if (!IsUnifiedPlan()) { + // TODO(steveanton): When removing RTP senders/receivers in response to a + // rejected media section, there is some cleanup logic that expects the + // voice/ video channel to still be set. But in this method the voice/video + // channel would have been destroyed by the SetRemoteDescription caller + // above so the cleanup that relies on them fails to run. The RemoveSenders + // calls should be moved to right before the DestroyChannel calls to fix + // this. + + // Find all audio rtp streams and create corresponding remote AudioTracks + // and MediaStreams. + if (audio_content) { + if (audio_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + } else { + bool default_audio_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(audio_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(audio_desc), + default_audio_track_needed, audio_desc->type(), + new_streams); + } + } + + // Find all video rtp streams and create corresponding remote VideoTracks + // and MediaStreams. + if (video_content) { + if (video_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + } else { + bool default_video_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(video_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(video_desc), + default_video_track_needed, video_desc->type(), + new_streams); + } + } + + // If this is an RTP data transport, update the DataChannels with the + // information from the remote peer. + if (rtp_data_desc) { + data_channel_controller()->UpdateRemoteRtpDataChannels( + GetActiveStreams(rtp_data_desc)); + } + + // Iterate new_streams and notify the observer about new MediaStreams. + auto observer = pc_->Observer(); + for (size_t i = 0; i < new_streams->count(); ++i) { + MediaStreamInterface* new_stream = new_streams->at(i); + pc_->stats()->AddStream(new_stream); + observer->OnAddStream( + rtc::scoped_refptr(new_stream)); + } + + UpdateEndedRemoteMediaStreams(); + } + + if (type == SdpType::kAnswer && + local_ice_credentials_to_replace_->SatisfiesIceRestart( + *current_local_description_)) { + local_ice_credentials_to_replace_->ClearIceCredentials(); + } + + return RTCError::OK(); +} + +void SdpOfferAnswerHandler::DoSetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetLocalDescription"); + + if (!observer) { + RTC_LOG(LS_ERROR) << "SetLocalDescription - observer is NULL."; + return; + } + + if (!desc) { + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, "SessionDescription is NULL.")); + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "SetLocalDescription: " << error_message; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + // For SLD we support only explicit rollback. + if (desc->GetType() == SdpType::kRollback) { + if (IsUnifiedPlan()) { + observer->OnSetLocalDescriptionComplete(Rollback(desc->GetType())); + } else { + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Rollback not supported in Plan B")); + } + return; + } + + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL); + if (!error.ok()) { + std::string error_message = GetSetDescriptionErrorMessage( + cricket::CS_LOCAL, desc->GetType(), error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + // Grab the description type before moving ownership to ApplyLocalDescription, + // which may destroy it before returning. + const SdpType type = desc->GetType(); + + error = ApplyLocalDescription(std::move(desc)); + // |desc| may be destroyed at this point. + + if (!error.ok()) { + // If ApplyLocalDescription fails, the PeerConnection could be in an + // inconsistent state, so act conservatively here and set the session error + // so that future calls to SetLocalDescription/SetRemoteDescription fail. + SetSessionError(SessionError::kContent, error.message()); + std::string error_message = + GetSetDescriptionErrorMessage(cricket::CS_LOCAL, type, error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + RTC_DCHECK(local_description()); + + if (local_description()->GetType() == SdpType::kAnswer) { + RemoveStoppedTransceivers(); + + // TODO(deadbeef): We already had to hop to the network thread for + // MaybeStartGathering... + pc_->network_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, + port_allocator())); + // Make UMA notes about what was agreed to. + ReportNegotiatedSdpSemantics(*local_description()); + } + + observer->OnSetLocalDescriptionComplete(RTCError::OK()); + pc_->NoteUsageEvent(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED); + + // Check if negotiation is needed. We must do this after informing the + // observer that SetLocalDescription() has completed to ensure negotiation is + // not needed prior to the promise resolving. + if (IsUnifiedPlan()) { + bool was_negotiation_needed = is_negotiation_needed_; + UpdateNegotiationNeeded(); + if (signaling_state() == PeerConnectionInterface::kStable && + was_negotiation_needed && is_negotiation_needed_) { + // Legacy version. + pc_->Observer()->OnRenegotiationNeeded(); + // Spec-compliant version; the event may get invalidated before firing. + GenerateNegotiationNeededEvent(); + } + } + + // MaybeStartGathering needs to be called after informing the observer so that + // we don't signal any candidates before signaling that SetLocalDescription + // completed. + transport_controller()->MaybeStartGathering(); +} + +void SdpOfferAnswerHandler::DoCreateOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateOffer"); + + if (!observer) { + RTC_LOG(LS_ERROR) << "CreateOffer - observer is NULL."; + return; + } + + if (pc_->IsClosed()) { + std::string error = "CreateOffer called when PeerConnection is closed."; + RTC_LOG(LS_ERROR) << error; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "CreateOffer: " << error_message; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + if (!ValidateOfferAnswerOptions(options)) { + std::string error = "CreateOffer called with invalid options."; + RTC_LOG(LS_ERROR) << error; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error))); + return; + } + + // Legacy handling for offer_to_receive_audio and offer_to_receive_video. + // Specified in WebRTC section 4.4.3.2 "Legacy configuration extensions". + if (IsUnifiedPlan()) { + RTCError error = HandleLegacyOfferOptions(options); + if (!error.ok()) { + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, std::move(error)); + return; + } + } + + cricket::MediaSessionOptions session_options; + GetOptionsForOffer(options, &session_options); + webrtc_session_desc_factory_->CreateOffer(observer, options, session_options); +} + +void SdpOfferAnswerHandler::CreateAnswer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + observer_refptr = + rtc::scoped_refptr(observer), + options](std::function operations_chain_callback) { + // Abort early if |this_weak_ptr| is no longer valid. + if (!this_weak_ptr) { + observer_refptr->OnFailure(RTCError( + RTCErrorType::INTERNAL_ERROR, + "CreateAnswer failed because the session was shut down")); + operations_chain_callback(); + return; + } + // The operation completes asynchronously when the wrapper is invoked. + rtc::scoped_refptr + observer_wrapper(new rtc::RefCountedObject< + CreateSessionDescriptionObserverOperationWrapper>( + std::move(observer_refptr), + std::move(operations_chain_callback))); + this_weak_ptr->DoCreateAnswer(options, observer_wrapper); + }); +} + +void SdpOfferAnswerHandler::DoCreateAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateAnswer"); + if (!observer) { + RTC_LOG(LS_ERROR) << "CreateAnswer - observer is NULL."; + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "CreateAnswer: " << error_message; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + + if (!(signaling_state_ == PeerConnectionInterface::kHaveRemoteOffer || + signaling_state_ == PeerConnectionInterface::kHaveLocalPrAnswer)) { + std::string error = + "PeerConnection cannot create an answer in a state other than " + "have-remote-offer or have-local-pranswer."; + RTC_LOG(LS_ERROR) << error; + pc_->message_handler()->PostCreateSessionDescriptionFailure( + observer, RTCError(RTCErrorType::INVALID_STATE, std::move(error))); + return; + } + + // The remote description should be set if we're in the right state. + RTC_DCHECK(remote_description()); + + if (IsUnifiedPlan()) { + if (options.offer_to_receive_audio != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_audio is not " + "supported with Unified Plan semantics. Use the " + "RtpTransceiver API instead."; + } + if (options.offer_to_receive_video != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_video is not " + "supported with Unified Plan semantics. Use the " + "RtpTransceiver API instead."; + } + } + + cricket::MediaSessionOptions session_options; + GetOptionsForAnswer(options, &session_options); + webrtc_session_desc_factory_->CreateAnswer(observer, session_options); +} + +void SdpOfferAnswerHandler::DoSetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetRemoteDescription"); + + if (!observer) { + RTC_LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; + return; + } + + if (!desc) { + observer->OnSetRemoteDescriptionComplete(RTCError( + RTCErrorType::INVALID_PARAMETER, "SessionDescription is NULL.")); + return; + } + + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "SetRemoteDescription: " << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + return; + } + if (IsUnifiedPlan()) { + if (pc_->configuration()->enable_implicit_rollback) { + if (desc->GetType() == SdpType::kOffer && + signaling_state() == PeerConnectionInterface::kHaveLocalOffer) { + Rollback(desc->GetType()); + } + } + // Explicit rollback. + if (desc->GetType() == SdpType::kRollback) { + observer->OnSetRemoteDescriptionComplete(Rollback(desc->GetType())); + return; + } + } else if (desc->GetType() == SdpType::kRollback) { + observer->OnSetRemoteDescriptionComplete( + RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Rollback not supported in Plan B")); + return; + } + if (desc->GetType() == SdpType::kOffer || + desc->GetType() == SdpType::kAnswer) { + // Report to UMA the format of the received offer or answer. + pc_->ReportSdpFormatReceived(*desc); + } + + // Handle remote descriptions missing a=mid lines for interop with legacy end + // points. + FillInMissingRemoteMids(desc->description()); + + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE); + if (!error.ok()) { + std::string error_message = GetSetDescriptionErrorMessage( + cricket::CS_REMOTE, desc->GetType(), error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(error.type(), std::move(error_message))); + return; + } + + // Grab the description type before moving ownership to + // ApplyRemoteDescription, which may destroy it before returning. + const SdpType type = desc->GetType(); + + error = ApplyRemoteDescription(std::move(desc)); + // |desc| may be destroyed at this point. + + if (!error.ok()) { + // If ApplyRemoteDescription fails, the PeerConnection could be in an + // inconsistent state, so act conservatively here and set the session error + // so that future calls to SetLocalDescription/SetRemoteDescription fail. + SetSessionError(SessionError::kContent, error.message()); + std::string error_message = + GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type, error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(error.type(), std::move(error_message))); + return; + } + RTC_DCHECK(remote_description()); + + if (type == SdpType::kAnswer) { + RemoveStoppedTransceivers(); + // TODO(deadbeef): We already had to hop to the network thread for + // MaybeStartGathering... + pc_->network_thread()->Invoke( + RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, + port_allocator())); + // Make UMA notes about what was agreed to. + ReportNegotiatedSdpSemantics(*remote_description()); + } + + observer->OnSetRemoteDescriptionComplete(RTCError::OK()); + pc_->NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED); + + // Check if negotiation is needed. We must do this after informing the + // observer that SetRemoteDescription() has completed to ensure negotiation is + // not needed prior to the promise resolving. + if (IsUnifiedPlan()) { + bool was_negotiation_needed = is_negotiation_needed_; + UpdateNegotiationNeeded(); + if (signaling_state() == PeerConnectionInterface::kStable && + was_negotiation_needed && is_negotiation_needed_) { + // Legacy version. + pc_->Observer()->OnRenegotiationNeeded(); + // Spec-compliant version; the event may get invalidated before firing. + GenerateNegotiationNeededEvent(); + } + } +} + +void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( + rtc::scoped_refptr receiver, + const std::vector& stream_ids, + std::vector>* added_streams, + std::vector>* removed_streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector> media_streams; + for (const std::string& stream_id : stream_ids) { + rtc::scoped_refptr stream = + remote_streams_->find(stream_id); + if (!stream) { + stream = MediaStreamProxy::Create(rtc::Thread::Current(), + MediaStream::Create(stream_id)); + remote_streams_->AddStream(stream); + added_streams->push_back(stream); + } + media_streams.push_back(stream); + } + // Special case: "a=msid" missing, use random stream ID. + if (media_streams.empty() && + !(remote_description()->description()->msid_signaling() & + cricket::kMsidSignalingMediaSection)) { + if (!missing_msid_default_stream_) { + missing_msid_default_stream_ = MediaStreamProxy::Create( + rtc::Thread::Current(), MediaStream::Create(rtc::CreateRandomUuid())); + added_streams->push_back(missing_msid_default_stream_); + } + media_streams.push_back(missing_msid_default_stream_); + } + std::vector> previous_streams = + receiver->streams(); + // SetStreams() will add/remove the receiver's track to/from the streams. This + // differs from the spec - the spec uses an "addList" and "removeList" to + // update the stream-track relationships in a later step. We do this earlier, + // changing the order of things, but the end-result is the same. + // TODO(hbos): When we remove remote_streams(), use set_stream_ids() + // instead. https://crbug.com/webrtc/9480 + receiver->SetStreams(media_streams); + RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); +} + +bool SdpOfferAnswerHandler::AddIceCandidate( + const IceCandidateInterface* ice_candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate"); + if (pc_->IsClosed()) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: PeerConnection is closed."; + NoteAddIceCandidateResult(kAddIceCandidateFailClosed); + return false; + } + + if (!remote_description()) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: ICE candidates can't be added " + "without any remote session description."; + NoteAddIceCandidateResult(kAddIceCandidateFailNoRemoteDescription); + return false; + } + + if (!ice_candidate) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate is null."; + NoteAddIceCandidateResult(kAddIceCandidateFailNullCandidate); + return false; + } + + bool valid = false; + bool ready = ReadyToUseRemoteCandidate(ice_candidate, nullptr, &valid); + if (!valid) { + NoteAddIceCandidateResult(kAddIceCandidateFailNotValid); + return false; + } + + // Add this candidate to the remote session description. + if (!mutable_remote_description()->AddCandidate(ice_candidate)) { + RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate cannot be used."; + NoteAddIceCandidateResult(kAddIceCandidateFailInAddition); + return false; + } + + if (ready) { + bool result = UseCandidate(ice_candidate); + if (result) { + pc_->NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED); + NoteAddIceCandidateResult(kAddIceCandidateSuccess); + } else { + NoteAddIceCandidateResult(kAddIceCandidateFailNotUsable); + } + return result; + } else { + RTC_LOG(LS_INFO) << "AddIceCandidate: Not ready to use candidate."; + NoteAddIceCandidateResult(kAddIceCandidateFailNotReady); + return true; + } +} + +void SdpOfferAnswerHandler::AddIceCandidate( + std::unique_ptr candidate, + std::function callback) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. + operations_chain_->ChainOperation( + [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), + candidate = std::move(candidate), callback = std::move(callback)]( + std::function operations_chain_callback) { + if (!this_weak_ptr) { + operations_chain_callback(); + callback(RTCError( + RTCErrorType::INVALID_STATE, + "AddIceCandidate failed because the session was shut down")); + return; + } + if (!this_weak_ptr->AddIceCandidate(candidate.get())) { + operations_chain_callback(); + // Fail with an error type and message consistent with Chromium. + // TODO(hbos): Fail with error types according to spec. + callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Error processing ICE candidate")); + return; + } + operations_chain_callback(); + callback(RTCError::OK()); + }); +} + +bool SdpOfferAnswerHandler::RemoveIceCandidates( + const std::vector& candidates) { + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::RemoveIceCandidates"); + RTC_DCHECK_RUN_ON(signaling_thread()); + if (pc_->IsClosed()) { + RTC_LOG(LS_ERROR) << "RemoveIceCandidates: PeerConnection is closed."; + return false; + } + + if (!remote_description()) { + RTC_LOG(LS_ERROR) << "RemoveIceCandidates: ICE candidates can't be removed " + "without any remote session description."; + return false; + } + + if (candidates.empty()) { + RTC_LOG(LS_ERROR) << "RemoveIceCandidates: candidates are empty."; + return false; + } + + size_t number_removed = + mutable_remote_description()->RemoveCandidates(candidates); + if (number_removed != candidates.size()) { + RTC_LOG(LS_ERROR) + << "RemoveIceCandidates: Failed to remove candidates. Requested " + << candidates.size() << " but only " << number_removed + << " are removed."; + } + + // Remove the candidates from the transport controller. + RTCError error = transport_controller()->RemoveRemoteCandidates(candidates); + if (!error.ok()) { + RTC_LOG(LS_ERROR) + << "RemoveIceCandidates: Error when removing remote candidates: " + << error.message(); + } + return true; +} + +void SdpOfferAnswerHandler::AddLocalIceCandidate( + const JsepIceCandidate* candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (local_description()) { + mutable_local_description()->AddCandidate(candidate); + } +} + +void SdpOfferAnswerHandler::RemoveLocalIceCandidates( + const std::vector& candidates) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (local_description()) { + mutable_local_description()->RemoveCandidates(candidates); + } +} + +const SessionDescriptionInterface* SdpOfferAnswerHandler::local_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_local_description_ ? pending_local_description_.get() + : current_local_description_.get(); +} + +const SessionDescriptionInterface* SdpOfferAnswerHandler::remote_description() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_remote_description_ ? pending_remote_description_.get() + : current_remote_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::current_local_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return current_local_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::current_remote_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return current_remote_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::pending_local_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_local_description_.get(); +} + +const SessionDescriptionInterface* +SdpOfferAnswerHandler::pending_remote_description() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_remote_description_.get(); +} + +PeerConnectionInterface::SignalingState SdpOfferAnswerHandler::signaling_state() + const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return signaling_state_; +} + +void SdpOfferAnswerHandler::ChangeSignalingState( + PeerConnectionInterface::SignalingState signaling_state) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (signaling_state_ == signaling_state) { + return; + } + RTC_LOG(LS_INFO) << "Session: " << pc_->session_id() << " Old state: " + << GetSignalingStateString(signaling_state_) + << " New state: " + << GetSignalingStateString(signaling_state); + signaling_state_ = signaling_state; + pc_->Observer()->OnSignalingChange(signaling_state_); +} + +RTCError SdpOfferAnswerHandler::UpdateSessionState( + SdpType type, + cricket::ContentSource source, + const cricket::SessionDescription* description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + // If there's already a pending error then no state transition should happen. + // But all call-sites should be verifying this before calling us! + RTC_DCHECK(session_error() == SessionError::kNone); + + // If this is answer-ish we're ready to let media flow. + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + EnableSending(); + } + + // Update the signaling state according to the specified state machine (see + // https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum). + if (type == SdpType::kOffer) { + ChangeSignalingState(source == cricket::CS_LOCAL + ? PeerConnectionInterface::kHaveLocalOffer + : PeerConnectionInterface::kHaveRemoteOffer); + } else if (type == SdpType::kPrAnswer) { + ChangeSignalingState(source == cricket::CS_LOCAL + ? PeerConnectionInterface::kHaveLocalPrAnswer + : PeerConnectionInterface::kHaveRemotePrAnswer); + } else { + RTC_DCHECK(type == SdpType::kAnswer); + ChangeSignalingState(PeerConnectionInterface::kStable); + transceivers()->DiscardStableStates(); + have_pending_rtp_data_channel_ = false; + } + + // Update internal objects according to the session description's media + // descriptions. + RTCError error = PushdownMediaDescription(type, source); + if (!error.ok()) { + return error; + } + + return RTCError::OK(); +} + +bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent( + uint32_t event_id) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Plan B? Always fire to conform with useless legacy behavior. + if (!IsUnifiedPlan()) { + return true; + } + // The event ID has been invalidated. Either negotiation is no longer needed + // or a newer negotiation needed event has been generated. + if (event_id != negotiation_needed_event_id_) { + return false; + } + // The chain is no longer empty, update negotiation needed when it becomes + // empty. This should generate a newer negotiation needed event, making this + // one obsolete. + if (!operations_chain_->IsEmpty()) { + // Since we just suppressed an event that would have been fired, if + // negotiation is still needed by the time the chain becomes empty again, we + // must make sure to generate another event if negotiation is needed then. + // This happens when |is_negotiation_needed_| goes from false to true, so we + // set it to false until UpdateNegotiationNeeded() is called. + is_negotiation_needed_ = false; + update_negotiation_needed_on_empty_chain_ = true; + return false; + } + // We must not fire if the signaling state is no longer "stable". If + // negotiation is still needed when we return to "stable", a new negotiation + // needed event will be generated, so this one can safely be suppressed. + if (signaling_state_ != PeerConnectionInterface::kStable) { + return false; + } + // All checks have passed - please fire "negotiationneeded" now! + return true; +} + +rtc::scoped_refptr +SdpOfferAnswerHandler::local_streams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified " + "Plan SdpSemantics. Please use GetSenders " + "instead."; + return local_streams_; +} + +rtc::scoped_refptr +SdpOfferAnswerHandler::remote_streams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified " + "Plan SdpSemantics. Please use GetReceivers " + "instead."; + return remote_streams_; +} + +bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan " + "SdpSemantics. Please use AddTrack instead."; + if (pc_->IsClosed()) { + return false; + } + if (!CanAddLocalMediaStream(local_streams_, local_stream)) { + return false; + } + + local_streams_->AddStream(local_stream); + MediaStreamObserver* observer = new MediaStreamObserver(local_stream); + observer->SignalAudioTrackAdded.connect( + this, &SdpOfferAnswerHandler::OnAudioTrackAdded); + observer->SignalAudioTrackRemoved.connect( + this, &SdpOfferAnswerHandler::OnAudioTrackRemoved); + observer->SignalVideoTrackAdded.connect( + this, &SdpOfferAnswerHandler::OnVideoTrackAdded); + observer->SignalVideoTrackRemoved.connect( + this, &SdpOfferAnswerHandler::OnVideoTrackRemoved); + stream_observers_.push_back(std::unique_ptr(observer)); + + for (const auto& track : local_stream->GetAudioTracks()) { + rtp_manager()->AddAudioTrack(track.get(), local_stream); + } + for (const auto& track : local_stream->GetVideoTracks()) { + rtp_manager()->AddVideoTrack(track.get(), local_stream); + } + + pc_->stats()->AddStream(local_stream); + UpdateNegotiationNeeded(); + return true; +} + +void SdpOfferAnswerHandler::RemoveStream(MediaStreamInterface* local_stream) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_CHECK(!IsUnifiedPlan()) << "RemoveStream is not available with Unified " + "Plan SdpSemantics. Please use RemoveTrack " + "instead."; + TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream"); + if (!pc_->IsClosed()) { + for (const auto& track : local_stream->GetAudioTracks()) { + rtp_manager()->RemoveAudioTrack(track.get(), local_stream); + } + for (const auto& track : local_stream->GetVideoTracks()) { + rtp_manager()->RemoveVideoTrack(track.get(), local_stream); + } + } + local_streams_->RemoveStream(local_stream); + stream_observers_.erase( + std::remove_if( + stream_observers_.begin(), stream_observers_.end(), + [local_stream](const std::unique_ptr& observer) { + return observer->stream()->id().compare(local_stream->id()) == 0; + }), + stream_observers_.end()); + + if (pc_->IsClosed()) { + return; + } + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnAudioTrackAdded(AudioTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->AddAudioTrack(track, stream); + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnAudioTrackRemoved(AudioTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->RemoveAudioTrack(track, stream); + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnVideoTrackAdded(VideoTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->AddVideoTrack(track, stream); + UpdateNegotiationNeeded(); +} + +void SdpOfferAnswerHandler::OnVideoTrackRemoved(VideoTrackInterface* track, + MediaStreamInterface* stream) { + if (pc_->IsClosed()) { + return; + } + rtp_manager()->RemoveVideoTrack(track, stream); + UpdateNegotiationNeeded(); +} + +RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { + auto state = signaling_state(); + if (state != PeerConnectionInterface::kHaveLocalOffer && + state != PeerConnectionInterface::kHaveRemoteOffer) { + return RTCError(RTCErrorType::INVALID_STATE, + "Called in wrong signalingState: " + + GetSignalingStateString(signaling_state())); + } + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + std::vector> all_added_streams; + std::vector> all_removed_streams; + std::vector> removed_receivers; + + for (auto&& transceivers_stable_state_pair : transceivers()->StableStates()) { + auto transceiver = transceivers_stable_state_pair.first; + auto state = transceivers_stable_state_pair.second; + + if (state.remote_stream_ids()) { + std::vector> added_streams; + std::vector> removed_streams; + SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), + state.remote_stream_ids().value(), + &added_streams, &removed_streams); + all_added_streams.insert(all_added_streams.end(), added_streams.begin(), + added_streams.end()); + all_removed_streams.insert(all_removed_streams.end(), + removed_streams.begin(), + removed_streams.end()); + if (!state.has_m_section() && !state.newly_created()) { + continue; + } + } + + RTC_DCHECK(transceiver->internal()->mid().has_value()); + DestroyTransceiverChannel(transceiver); + + if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer && + transceiver->receiver()) { + removed_receivers.push_back(transceiver->receiver()); + } + if (state.newly_created()) { + if (transceiver->internal()->reused_for_addtrack()) { + transceiver->internal()->set_created_by_addtrack(true); + } else { + transceivers()->Remove(transceiver); + } + } + transceiver->internal()->sender_internal()->set_transport(nullptr); + transceiver->internal()->receiver_internal()->set_transport(nullptr); + transceiver->internal()->set_mid(state.mid()); + transceiver->internal()->set_mline_index(state.mline_index()); + } + transport_controller()->RollbackTransports(); + if (have_pending_rtp_data_channel_) { + DestroyDataChannelTransport(); + have_pending_rtp_data_channel_ = false; + } + transceivers()->DiscardStableStates(); + pending_local_description_.reset(); + pending_remote_description_.reset(); + ChangeSignalingState(PeerConnectionInterface::kStable); + + // Once all processing has finished, fire off callbacks. + for (const auto& receiver : removed_receivers) { + pc_->Observer()->OnRemoveTrack(receiver); + } + for (const auto& stream : all_added_streams) { + pc_->Observer()->OnAddStream(stream); + } + for (const auto& stream : all_removed_streams) { + pc_->Observer()->OnRemoveStream(stream); + } + + // The assumption is that in case of implicit rollback UpdateNegotiationNeeded + // gets called in SetRemoteDescription. + if (desc_type == SdpType::kRollback) { + UpdateNegotiationNeeded(); + if (is_negotiation_needed_) { + // Legacy version. + pc_->Observer()->OnRenegotiationNeeded(); + // Spec-compliant version; the event may get invalidated before firing. + GenerateNegotiationNeededEvent(); + } + } + return RTCError::OK(); +} + +bool SdpOfferAnswerHandler::IsUnifiedPlan() const { + return pc_->IsUnifiedPlan(); +} + +void SdpOfferAnswerHandler::OnOperationsChainEmpty() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (pc_->IsClosed() || !update_negotiation_needed_on_empty_chain_) + return; + update_negotiation_needed_on_empty_chain_ = false; + // Firing when chain is empty is only supported in Unified Plan to avoid Plan + // B regressions. (In Plan B, onnegotiationneeded is already broken anyway, so + // firing it even more might just be confusing.) + if (IsUnifiedPlan()) { + UpdateNegotiationNeeded(); + } +} + +absl::optional SdpOfferAnswerHandler::is_caller() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return is_caller_; +} + +bool SdpOfferAnswerHandler::HasNewIceCredentials() { + RTC_DCHECK_RUN_ON(signaling_thread()); + return local_ice_credentials_to_replace_->HasIceCredentials(); +} + +bool SdpOfferAnswerHandler::IceRestartPending( + const std::string& content_name) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return pending_ice_restarts_.find(content_name) != + pending_ice_restarts_.end(); +} + +bool SdpOfferAnswerHandler::NeedsIceRestart( + const std::string& content_name) const { + return transport_controller()->NeedsIceRestart(content_name); +} + +absl::optional SdpOfferAnswerHandler::GetDtlsRole( + const std::string& mid) const { + return transport_controller()->GetDtlsRole(mid); +} + +void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!IsUnifiedPlan()) { + pc_->Observer()->OnRenegotiationNeeded(); + GenerateNegotiationNeededEvent(); + return; + } + + // In the spec, a task is queued here to run the following steps - this is + // meant to ensure we do not fire onnegotiationneeded prematurely if multiple + // changes are being made at once. In order to support Chromium's + // implementation where the JavaScript representation of the PeerConnection + // lives on a separate thread though, the queuing of a task is instead + // performed by the PeerConnectionObserver posting from the signaling thread + // to the JavaScript main thread that negotiation is needed. And because the + // Operations Chain lives on the WebRTC signaling thread, + // ShouldFireNegotiationNeededEvent() must be called before firing the event + // to ensure the Operations Chain is still empty and the event has not been + // invalidated. + + // If connection's [[IsClosed]] slot is true, abort these steps. + if (pc_->IsClosed()) + return; + + // If connection's signaling state is not "stable", abort these steps. + if (signaling_state() != PeerConnectionInterface::kStable) + return; + + // NOTE + // The negotiation-needed flag will be updated once the state transitions to + // "stable", as part of the steps for setting an RTCSessionDescription. + + // If the result of checking if negotiation is needed is false, clear the + // negotiation-needed flag by setting connection's [[NegotiationNeeded]] slot + // to false, and abort these steps. + bool is_negotiation_needed = CheckIfNegotiationIsNeeded(); + if (!is_negotiation_needed) { + is_negotiation_needed_ = false; + // Invalidate any negotiation needed event that may previosuly have been + // generated. + ++negotiation_needed_event_id_; + return; + } + + // If connection's [[NegotiationNeeded]] slot is already true, abort these + // steps. + if (is_negotiation_needed_) + return; + + // Set connection's [[NegotiationNeeded]] slot to true. + is_negotiation_needed_ = true; + + // Queue a task that runs the following steps: + // If connection's [[IsClosed]] slot is true, abort these steps. + // If connection's [[NegotiationNeeded]] slot is false, abort these steps. + // Fire an event named negotiationneeded at connection. + pc_->Observer()->OnRenegotiationNeeded(); + // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent() is + // used in the task queued by the observer, this event will only fire when the + // chain is empty. + GenerateNegotiationNeededEvent(); +} + +bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { + RTC_DCHECK_RUN_ON(signaling_thread()); + // 1. If any implementation-specific negotiation is required, as described at + // the start of this section, return true. + + // 2. If connection.[[LocalIceCredentialsToReplace]] is not empty, return + // true. + if (local_ice_credentials_to_replace_->HasIceCredentials()) { + return true; + } + + // 3. Let description be connection.[[CurrentLocalDescription]]. + const SessionDescriptionInterface* description = current_local_description(); + if (!description) + return true; + + // 4. If connection has created any RTCDataChannels, and no m= section in + // description has been negotiated yet for data, return true. + if (data_channel_controller()->HasSctpDataChannels()) { + if (!cricket::GetFirstDataContent(description->description()->contents())) + return true; + } + + // 5. For each transceiver in connection's set of transceivers, perform the + // following checks: + for (const auto& transceiver : transceivers()->List()) { + const ContentInfo* current_local_msection = + FindTransceiverMSection(transceiver.get(), description); + + const ContentInfo* current_remote_msection = FindTransceiverMSection( + transceiver.get(), current_remote_description()); + + // 5.4 If transceiver is stopped and is associated with an m= section, + // but the associated m= section is not yet rejected in + // connection.[[CurrentLocalDescription]] or + // connection.[[CurrentRemoteDescription]], return true. + if (transceiver->stopped()) { + RTC_DCHECK(transceiver->stopping()); + if (current_local_msection && !current_local_msection->rejected && + ((current_remote_msection && !current_remote_msection->rejected) || + !current_remote_msection)) { + return true; + } + continue; + } + + // 5.1 If transceiver.[[Stopping]] is true and transceiver.[[Stopped]] is + // false, return true. + if (transceiver->stopping() && !transceiver->stopped()) + return true; + + // 5.2 If transceiver isn't stopped and isn't yet associated with an m= + // section in description, return true. + if (!current_local_msection) + return true; + + const MediaContentDescription* current_local_media_description = + current_local_msection->media_description(); + // 5.3 If transceiver isn't stopped and is associated with an m= section + // in description then perform the following checks: + + // 5.3.1 If transceiver.[[Direction]] is "sendrecv" or "sendonly", and the + // associated m= section in description either doesn't contain a single + // "a=msid" line, or the number of MSIDs from the "a=msid" lines in this + // m= section, or the MSID values themselves, differ from what is in + // transceiver.sender.[[AssociatedMediaStreamIds]], return true. + if (RtpTransceiverDirectionHasSend(transceiver->direction())) { + if (current_local_media_description->streams().size() == 0) + return true; + + std::vector msection_msids; + for (const auto& stream : current_local_media_description->streams()) { + for (const std::string& msid : stream.stream_ids()) + msection_msids.push_back(msid); + } + + std::vector transceiver_msids = + transceiver->sender()->stream_ids(); + if (msection_msids.size() != transceiver_msids.size()) + return true; + + absl::c_sort(transceiver_msids); + absl::c_sort(msection_msids); + if (transceiver_msids != msection_msids) + return true; + } + + // 5.3.2 If description is of type "offer", and the direction of the + // associated m= section in neither connection.[[CurrentLocalDescription]] + // nor connection.[[CurrentRemoteDescription]] matches + // transceiver.[[Direction]], return true. + if (description->GetType() == SdpType::kOffer) { + if (!current_remote_description()) + return true; + + if (!current_remote_msection) + return true; + + RtpTransceiverDirection current_local_direction = + current_local_media_description->direction(); + RtpTransceiverDirection current_remote_direction = + current_remote_msection->media_description()->direction(); + if (transceiver->direction() != current_local_direction && + transceiver->direction() != + RtpTransceiverDirectionReversed(current_remote_direction)) { + return true; + } + } + + // 5.3.3 If description is of type "answer", and the direction of the + // associated m= section in the description does not match + // transceiver.[[Direction]] intersected with the offered direction (as + // described in [JSEP] (section 5.3.1.)), return true. + if (description->GetType() == SdpType::kAnswer) { + if (!remote_description()) + return true; + + const ContentInfo* offered_remote_msection = + FindTransceiverMSection(transceiver.get(), remote_description()); + + RtpTransceiverDirection offered_direction = + offered_remote_msection + ? offered_remote_msection->media_description()->direction() + : RtpTransceiverDirection::kInactive; + + if (current_local_media_description->direction() != + (RtpTransceiverDirectionIntersection( + transceiver->direction(), + RtpTransceiverDirectionReversed(offered_direction)))) { + return true; + } + } + } + + // If all the preceding checks were performed and true was not returned, + // nothing remains to be negotiated; return false. + return false; +} + +void SdpOfferAnswerHandler::GenerateNegotiationNeededEvent() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ++negotiation_needed_event_id_; + pc_->Observer()->OnNegotiationNeededEvent(negotiation_needed_event_id_); +} + +RTCError SdpOfferAnswerHandler::ValidateSessionDescription( + const SessionDescriptionInterface* sdesc, + cricket::ContentSource source) { + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } + + if (!sdesc || !sdesc->description()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + } + + SdpType type = sdesc->GetType(); + if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || + (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_STATE, + "Called in wrong state: " + GetSignalingStateString(signaling_state())); + } + + RTCError error = ValidateMids(*sdesc->description()); + if (!error.ok()) { + return error; + } + + // Verify crypto settings. + std::string crypto_error; + if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED || + pc_->dtls_enabled()) { + RTCError crypto_error = + VerifyCrypto(sdesc->description(), pc_->dtls_enabled()); + if (!crypto_error.ok()) { + return crypto_error; + } + } + + // Verify ice-ufrag and ice-pwd. + if (!VerifyIceUfragPwdPresent(sdesc->description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutIceUfragPwd); + } + + if (!pc_->ValidateBundleSettings(sdesc->description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kBundleWithoutRtcpMux); + } + + // TODO(skvlad): When the local rtcp-mux policy is Require, reject any + // m-lines that do not rtcp-mux enabled. + + // Verify m-lines in Answer when compared against Offer. + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // With an answer we want to compare the new answer session description with + // the offer's session description from the current negotiation. + const cricket::SessionDescription* offer_desc = + (source == cricket::CS_LOCAL) ? remote_description()->description() + : local_description()->description(); + if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || + !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), + type)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInAnswer); + } + } else { + // The re-offers should respect the order of m= sections in current + // description. See RFC3264 Section 8 paragraph 4 for more details. + // With a re-offer, either the current local or current remote descriptions + // could be the most up to date, so we would like to check against both of + // them if they exist. It could be the case that one of them has a 0 port + // for a media section, but the other does not. This is important to check + // against in the case that we are recycling an m= section. + const cricket::SessionDescription* current_desc = nullptr; + const cricket::SessionDescription* secondary_current_desc = nullptr; + if (local_description()) { + current_desc = local_description()->description(); + if (remote_description()) { + secondary_current_desc = remote_description()->description(); + } + } else if (remote_description()) { + current_desc = remote_description()->description(); + } + if (current_desc && + !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, + *sdesc->description(), type)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInSubsequentOffer); + } + } + + if (IsUnifiedPlan()) { + // Ensure that each audio and video media section has at most one + // "StreamParams". This will return an error if receiving a session + // description from a "Plan B" endpoint which adds multiple tracks of the + // same type. With Unified Plan, there can only be at most one track per + // media section. + for (const ContentInfo& content : sdesc->description()->contents()) { + const MediaContentDescription& desc = *content.media_description(); + if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || + desc.type() == cricket::MEDIA_TYPE_VIDEO) && + desc.streams().size() > 1u) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Media section has more than one track specified " + "with a=ssrc lines which is not supported with " + "Unified Plan."); + } + } + } + + return RTCError::OK(); +} + +RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( + cricket::ContentSource source, + const SessionDescriptionInterface& new_session, + const SessionDescriptionInterface* old_local_description, + const SessionDescriptionInterface* old_remote_description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + + const cricket::ContentGroup* bundle_group = nullptr; + if (new_session.GetType() == SdpType::kOffer) { + auto bundle_group_or_error = + GetEarlyBundleGroup(*new_session.description()); + if (!bundle_group_or_error.ok()) { + return bundle_group_or_error.MoveError(); + } + bundle_group = bundle_group_or_error.MoveValue(); + } + + const ContentInfos& new_contents = new_session.description()->contents(); + for (size_t i = 0; i < new_contents.size(); ++i) { + const cricket::ContentInfo& new_content = new_contents[i]; + cricket::MediaType media_type = new_content.media_description()->type(); + mid_generator_.AddKnownId(new_content.name); + if (media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO) { + const cricket::ContentInfo* old_local_content = nullptr; + if (old_local_description && + i < old_local_description->description()->contents().size()) { + old_local_content = + &old_local_description->description()->contents()[i]; + } + const cricket::ContentInfo* old_remote_content = nullptr; + if (old_remote_description && + i < old_remote_description->description()->contents().size()) { + old_remote_content = + &old_remote_description->description()->contents()[i]; + } + auto transceiver_or_error = + AssociateTransceiver(source, new_session.GetType(), i, new_content, + old_local_content, old_remote_content); + if (!transceiver_or_error.ok()) { + // In the case where a transceiver is rejected locally, we don't + // expect to find a transceiver, but might find it in the case + // where state is still "stopping", not "stopped". + if (new_content.rejected) { + continue; + } + return transceiver_or_error.MoveError(); + } + auto transceiver = transceiver_or_error.MoveValue(); + RTCError error = + UpdateTransceiverChannel(transceiver, new_content, bundle_group); + if (!error.ok()) { + return error; + } + } else if (media_type == cricket::MEDIA_TYPE_DATA) { + if (pc_->GetDataMid() && new_content.name != *(pc_->GetDataMid())) { + // Ignore all but the first data section. + RTC_LOG(LS_INFO) << "Ignoring data media section with MID=" + << new_content.name; + continue; + } + RTCError error = UpdateDataChannel(source, new_content, bundle_group); + if (!error.ok()) { + return error; + } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + RTC_LOG(LS_INFO) << "Ignoring unsupported media type"; + } else { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Unknown section type."); + } + } + + return RTCError::OK(); +} + +RTCErrorOr>> +SdpOfferAnswerHandler::AssociateTransceiver( + cricket::ContentSource source, + SdpType type, + size_t mline_index, + const ContentInfo& content, + const ContentInfo* old_local_content, + const ContentInfo* old_remote_content) { + RTC_DCHECK(IsUnifiedPlan()); +#if RTC_DCHECK_IS_ON + // If this is an offer then the m= section might be recycled. If the m= + // section is being recycled (defined as: rejected in the current local or + // remote description and not rejected in new description), the transceiver + // should have been removed by RemoveStoppedtransceivers()-> + if (IsMediaSectionBeingRecycled(type, content, old_local_content, + old_remote_content)) { + const std::string& old_mid = + (old_local_content && old_local_content->rejected) + ? old_local_content->name + : old_remote_content->name; + auto old_transceiver = transceivers()->FindByMid(old_mid); + // The transceiver should be disassociated in RemoveStoppedTransceivers() + RTC_DCHECK(!old_transceiver); + } +#endif + + const MediaContentDescription* media_desc = content.media_description(); + auto transceiver = transceivers()->FindByMid(content.name); + if (source == cricket::CS_LOCAL) { + // Find the RtpTransceiver that corresponds to this m= section, using the + // mapping between transceivers and m= section indices established when + // creating the offer. + if (!transceiver) { + transceiver = transceivers()->FindByMLineIndex(mline_index); + } + if (!transceiver) { + // This may happen normally when media sections are rejected. + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Transceiver not found based on m-line index"); + } + } else { + RTC_DCHECK_EQ(source, cricket::CS_REMOTE); + // If the m= section is sendrecv or recvonly, and there are RtpTransceivers + // of the same type... + // When simulcast is requested, a transceiver cannot be associated because + // AddTrack cannot be called to initialize it. + if (!transceiver && + RtpTransceiverDirectionHasRecv(media_desc->direction()) && + !media_desc->HasSimulcast()) { + transceiver = FindAvailableTransceiverToReceive(media_desc->type()); + } + // If no RtpTransceiver was found in the previous step, create one with a + // recvonly direction. + if (!transceiver) { + RTC_LOG(LS_INFO) << "Adding " + << cricket::MediaTypeToString(media_desc->type()) + << " transceiver for MID=" << content.name + << " at i=" << mline_index + << " in response to the remote description."; + std::string sender_id = rtc::CreateRandomUuid(); + std::vector send_encodings = + GetSendEncodingsFromRemoteDescription(*media_desc); + auto sender = rtp_manager()->CreateSender(media_desc->type(), sender_id, + nullptr, {}, send_encodings); + std::string receiver_id; + if (!media_desc->streams().empty()) { + receiver_id = media_desc->streams()[0].id; + } else { + receiver_id = rtc::CreateRandomUuid(); + } + auto receiver = + rtp_manager()->CreateReceiver(media_desc->type(), receiver_id); + transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver); + transceiver->internal()->set_direction( + RtpTransceiverDirection::kRecvOnly); + if (type == SdpType::kOffer) { + transceivers()->StableState(transceiver)->set_newly_created(); + } + } + + RTC_DCHECK(transceiver); + + // Check if the offer indicated simulcast but the answer rejected it. + // This can happen when simulcast is not supported on the remote party. + if (SimulcastIsRejected(old_local_content, *media_desc)) { + RTC_HISTOGRAM_BOOLEAN(kSimulcastDisabled, true); + RTCError error = + DisableSimulcastInSender(transceiver->internal()->sender_internal()); + if (!error.ok()) { + RTC_LOG(LS_ERROR) << "Failed to remove rejected simulcast."; + return std::move(error); + } + } + } + + if (transceiver->media_type() != media_desc->type()) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Transceiver type does not match media description type."); + } + + if (media_desc->HasSimulcast()) { + std::vector layers = + source == cricket::CS_LOCAL + ? media_desc->simulcast_description().send_layers().GetAllLayers() + : media_desc->simulcast_description() + .receive_layers() + .GetAllLayers(); + RTCError error = UpdateSimulcastLayerStatusInSender( + layers, transceiver->internal()->sender_internal()); + if (!error.ok()) { + RTC_LOG(LS_ERROR) << "Failed updating status for simulcast layers."; + return std::move(error); + } + } + if (type == SdpType::kOffer) { + bool state_changes = transceiver->internal()->mid() != content.name || + transceiver->internal()->mline_index() != mline_index; + if (state_changes) { + transceivers() + ->StableState(transceiver) + ->SetMSectionIfUnset(transceiver->internal()->mid(), + transceiver->internal()->mline_index()); + } + } + // Associate the found or created RtpTransceiver with the m= section by + // setting the value of the RtpTransceiver's mid property to the MID of the m= + // section, and establish a mapping between the transceiver and the index of + // the m= section. + transceiver->internal()->set_mid(content.name); + transceiver->internal()->set_mline_index(mline_index); + return std::move(transceiver); +} + +RTCErrorOr +SdpOfferAnswerHandler::GetEarlyBundleGroup( + const SessionDescription& desc) const { + const cricket::ContentGroup* bundle_group = nullptr; + if (pc_->configuration()->bundle_policy == + PeerConnectionInterface::kBundlePolicyMaxBundle) { + bundle_group = desc.GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + if (!bundle_group) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max-bundle configured but session description " + "has no BUNDLE group"); + } + } + return bundle_group; +} + +RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( + rtc::scoped_refptr> + transceiver, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) { + RTC_DCHECK(IsUnifiedPlan()); + RTC_DCHECK(transceiver); + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (content.rejected) { + if (channel) { + transceiver->internal()->SetChannel(nullptr); + DestroyChannelInterface(channel); + } + } else { + if (!channel) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + channel = CreateVoiceChannel(content.name); + } else { + RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type()); + channel = CreateVideoChannel(content.name); + } + if (!channel) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "Failed to create channel for mid=" + content.name); + } + transceiver->internal()->SetChannel(channel); + } + } + return RTCError::OK(); +} + +RTCError SdpOfferAnswerHandler::UpdateDataChannel( + cricket::ContentSource source, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) { + if (pc_->data_channel_type() == cricket::DCT_NONE) { + // If data channels are disabled, ignore this media section. CreateAnswer + // will take care of rejecting it. + return RTCError::OK(); + } + if (content.rejected) { + RTC_LOG(LS_INFO) << "Rejected data channel, mid=" << content.mid(); + DestroyDataChannelTransport(); + } else { + if (!data_channel_controller()->rtp_data_channel() && + !data_channel_controller()->data_channel_transport()) { + RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); + if (!CreateDataChannel(content.name)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); + } + } + if (source == cricket::CS_REMOTE) { + const MediaContentDescription* data_desc = content.media_description(); + if (data_desc && cricket::IsRtpProtocol(data_desc->protocol())) { + data_channel_controller()->UpdateRemoteRtpDataChannels( + GetActiveStreams(data_desc)); + } + } + } + return RTCError::OK(); +} + +bool SdpOfferAnswerHandler::ExpectSetLocalDescription(SdpType type) { + PeerConnectionInterface::SignalingState state = signaling_state(); + if (type == SdpType::kOffer) { + return (state == PeerConnectionInterface::kStable) || + (state == PeerConnectionInterface::kHaveLocalOffer); + } else { + RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); + return (state == PeerConnectionInterface::kHaveRemoteOffer) || + (state == PeerConnectionInterface::kHaveLocalPrAnswer); + } +} + +bool SdpOfferAnswerHandler::ExpectSetRemoteDescription(SdpType type) { + PeerConnectionInterface::SignalingState state = signaling_state(); + if (type == SdpType::kOffer) { + return (state == PeerConnectionInterface::kStable) || + (state == PeerConnectionInterface::kHaveRemoteOffer); + } else { + RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer); + return (state == PeerConnectionInterface::kHaveLocalOffer) || + (state == PeerConnectionInterface::kHaveRemotePrAnswer); + } +} + +void SdpOfferAnswerHandler::FillInMissingRemoteMids( + cricket::SessionDescription* new_remote_description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(new_remote_description); + const cricket::ContentInfos no_infos; + const cricket::ContentInfos& local_contents = + (local_description() ? local_description()->description()->contents() + : no_infos); + const cricket::ContentInfos& remote_contents = + (remote_description() ? remote_description()->description()->contents() + : no_infos); + for (size_t i = 0; i < new_remote_description->contents().size(); ++i) { + cricket::ContentInfo& content = new_remote_description->contents()[i]; + if (!content.name.empty()) { + continue; + } + std::string new_mid; + absl::string_view source_explanation; + if (IsUnifiedPlan()) { + if (i < local_contents.size()) { + new_mid = local_contents[i].name; + source_explanation = "from the matching local media section"; + } else if (i < remote_contents.size()) { + new_mid = remote_contents[i].name; + source_explanation = "from the matching previous remote media section"; + } else { + new_mid = mid_generator_.GenerateString(); + source_explanation = "generated just now"; + } + } else { + new_mid = std::string( + GetDefaultMidForPlanB(content.media_description()->type())); + source_explanation = "to match pre-existing behavior"; + } + RTC_DCHECK(!new_mid.empty()); + content.name = new_mid; + new_remote_description->transport_infos()[i].content_name = new_mid; + RTC_LOG(LS_INFO) << "SetRemoteDescription: Remote media section at i=" << i + << " is missing an a=mid line. Filling in the value '" + << new_mid << "' " << source_explanation << "."; + } +} + +rtc::scoped_refptr> +SdpOfferAnswerHandler::FindAvailableTransceiverToReceive( + cricket::MediaType media_type) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + // From JSEP section 5.10 (Applying a Remote Description): + // If the m= section is sendrecv or recvonly, and there are RtpTransceivers of + // the same type that were added to the PeerConnection by addTrack and are not + // associated with any m= section and are not stopped, find the first such + // RtpTransceiver. + for (auto transceiver : transceivers()->List()) { + if (transceiver->media_type() == media_type && + transceiver->internal()->created_by_addtrack() && !transceiver->mid() && + !transceiver->stopped()) { + return transceiver; + } + } + return nullptr; +} + +const cricket::ContentInfo* +SdpOfferAnswerHandler::FindMediaSectionForTransceiver( + rtc::scoped_refptr> + transceiver, + const SessionDescriptionInterface* sdesc) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(transceiver); + RTC_DCHECK(sdesc); + if (IsUnifiedPlan()) { + if (!transceiver->internal()->mid()) { + // This transceiver is not associated with a media section yet. + return nullptr; + } + return sdesc->description()->GetContentByName( + *transceiver->internal()->mid()); + } else { + // Plan B only allows at most one audio and one video section, so use the + // first media section of that type. + return cricket::GetFirstMediaContent(sdesc->description()->contents(), + transceiver->media_type()); + } +} + +void SdpOfferAnswerHandler::GetOptionsForOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + ExtractSharedMediaSessionOptions(offer_answer_options, session_options); + + if (IsUnifiedPlan()) { + GetOptionsForUnifiedPlanOffer(offer_answer_options, session_options); + } else { + GetOptionsForPlanBOffer(offer_answer_options, session_options); + } + + // Intentionally unset the data channel type for RTP data channel with the + // second condition. Otherwise the RTP data channels would be successfully + // negotiated by default and the unit tests in WebRtcDataBrowserTest will fail + // when building with chromium. We want to leave RTP data channels broken, so + // people won't try to use them. + if (data_channel_controller()->HasRtpDataChannels() || + pc_->data_channel_type() != cricket::DCT_RTP) { + session_options->data_channel_type = pc_->data_channel_type(); + } + + // Apply ICE restart flag and renomination flag. + bool ice_restart = offer_answer_options.ice_restart || HasNewIceCredentials(); + for (auto& options : session_options->media_description_options) { + options.transport_options.ice_restart = ice_restart; + options.transport_options.enable_ice_renomination = + pc_->configuration()->enable_ice_renomination; + } + + session_options->rtcp_cname = rtcp_cname_; + session_options->crypto_options = pc_->GetCryptoOptions(); + session_options->pooled_ice_credentials = + pc_->network_thread()->Invoke>( + RTC_FROM_HERE, + rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, + port_allocator())); + session_options->offer_extmap_allow_mixed = + pc_->configuration()->offer_extmap_allow_mixed; + + // Allow fallback for using obsolete SCTP syntax. + // Note that the default in |session_options| is true, while + // the default in |options| is false. + session_options->use_obsolete_sctp_sdp = + offer_answer_options.use_obsolete_sctp_sdp; +} + +void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Figure out transceiver directional preferences. + bool send_audio = + !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); + bool send_video = + !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); + + // By default, generate sendrecv/recvonly m= sections. + bool recv_audio = true; + bool recv_video = true; + + // By default, only offer a new m= section if we have media to send with it. + bool offer_new_audio_description = send_audio; + bool offer_new_video_description = send_video; + bool offer_new_data_description = + data_channel_controller()->HasDataChannels(); + + // The "offer_to_receive_X" options allow those defaults to be overridden. + if (offer_answer_options.offer_to_receive_audio != + PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) { + recv_audio = (offer_answer_options.offer_to_receive_audio > 0); + offer_new_audio_description = + offer_new_audio_description || + (offer_answer_options.offer_to_receive_audio > 0); + } + if (offer_answer_options.offer_to_receive_video != + RTCOfferAnswerOptions::kUndefined) { + recv_video = (offer_answer_options.offer_to_receive_video > 0); + offer_new_video_description = + offer_new_video_description || + (offer_answer_options.offer_to_receive_video > 0); + } + + absl::optional audio_index; + absl::optional video_index; + absl::optional data_index; + // If a current description exists, generate m= sections in the same order, + // using the first audio/video/data section that appears and rejecting + // extraneous ones. + if (local_description()) { + GenerateMediaDescriptionOptions( + local_description(), + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), + &audio_index, &video_index, &data_index, session_options); + } + + // Add audio/video/data m= sections to the end if needed. + if (!audio_index && offer_new_audio_description) { + cricket::MediaDescriptionOptions options( + cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false); + options.header_extensions = + channel_manager()->GetSupportedAudioRtpHeaderExtensions(); + session_options->media_description_options.push_back(options); + audio_index = session_options->media_description_options.size() - 1; + } + if (!video_index && offer_new_video_description) { + cricket::MediaDescriptionOptions options( + cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false); + options.header_extensions = + channel_manager()->GetSupportedVideoRtpHeaderExtensions(); + session_options->media_description_options.push_back(options); + video_index = session_options->media_description_options.size() - 1; + } + if (!data_index && offer_new_data_description) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA)); + data_index = session_options->media_description_options.size() - 1; + } + + cricket::MediaDescriptionOptions* audio_media_description_options = + !audio_index ? nullptr + : &session_options->media_description_options[*audio_index]; + cricket::MediaDescriptionOptions* video_media_description_options = + !video_index ? nullptr + : &session_options->media_description_options[*video_index]; + + AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), + audio_media_description_options, + video_media_description_options, + offer_answer_options.num_simulcast_layers); +} + +void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( + const RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Rules for generating an offer are dictated by JSEP sections 5.2.1 (Initial + // Offers) and 5.2.2 (Subsequent Offers). + RTC_DCHECK_EQ(session_options->media_description_options.size(), 0); + const ContentInfos no_infos; + const ContentInfos& local_contents = + (local_description() ? local_description()->description()->contents() + : no_infos); + const ContentInfos& remote_contents = + (remote_description() ? remote_description()->description()->contents() + : no_infos); + // The mline indices that can be recycled. New transceivers should reuse these + // slots first. + std::queue recycleable_mline_indices; + // First, go through each media section that exists in either the local or + // remote description and generate a media section in this offer for the + // associated transceiver. If a media section can be recycled, generate a + // default, rejected media section here that can be later overwritten. + for (size_t i = 0; + i < std::max(local_contents.size(), remote_contents.size()); ++i) { + // Either |local_content| or |remote_content| is non-null. + const ContentInfo* local_content = + (i < local_contents.size() ? &local_contents[i] : nullptr); + const ContentInfo* current_local_content = + GetContentByIndex(current_local_description(), i); + const ContentInfo* remote_content = + (i < remote_contents.size() ? &remote_contents[i] : nullptr); + const ContentInfo* current_remote_content = + GetContentByIndex(current_remote_description(), i); + bool had_been_rejected = + (current_local_content && current_local_content->rejected) || + (current_remote_content && current_remote_content->rejected); + const std::string& mid = + (local_content ? local_content->name : remote_content->name); + cricket::MediaType media_type = + (local_content ? local_content->media_description()->type() + : remote_content->media_description()->type()); + if (media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO) { + // A media section is considered eligible for recycling if it is marked as + // rejected in either the current local or current remote description. + auto transceiver = transceivers()->FindByMid(mid); + if (!transceiver) { + // No associated transceiver. The media section has been stopped. + recycleable_mline_indices.push(i); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + // NOTE: a stopping transceiver should be treated as a stopped one in + // createOffer as specified in + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. + if (had_been_rejected && transceiver->stopping()) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions( + transceiver->media_type(), mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + recycleable_mline_indices.push(i); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid, + /*is_create_offer=*/true)); + // CreateOffer shouldn't really cause any state changes in + // PeerConnection, but we need a way to match new transceivers to new + // media sections in SetLocalDescription and JSEP specifies this is + // done by recording the index of the media section generated for the + // transceiver in the offer. + transceiver->internal()->set_mline_index(i); + } + } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + RTC_DCHECK(local_content->rejected); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); + if (had_been_rejected) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(mid)); + } else { + RTC_CHECK(pc_->GetDataMid()); + if (mid == *(pc_->GetDataMid())) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(mid)); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(mid)); + } + } + } + } + + // Next, look for transceivers that are newly added (that is, are not stopped + // and not associated). Reuse media sections marked as recyclable first, + // otherwise append to the end of the offer. New media sections should be + // added in the order they were added to the PeerConnection. + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->mid() || transceiver->stopping()) { + continue; + } + size_t mline_index; + if (!recycleable_mline_indices.empty()) { + mline_index = recycleable_mline_indices.front(); + recycleable_mline_indices.pop(); + session_options->media_description_options[mline_index] = + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid_generator_.GenerateString(), + /*is_create_offer=*/true); + } else { + mline_index = session_options->media_description_options.size(); + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, mid_generator_.GenerateString(), + /*is_create_offer=*/true)); + } + // See comment above for why CreateOffer changes the transceiver's state. + transceiver->internal()->set_mline_index(mline_index); + } + // Lastly, add a m-section if we have local data channels and an m section + // does not already exist. + if (!pc_->GetDataMid() && data_channel_controller()->HasDataChannels()) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData( + mid_generator_.GenerateString())); + } +} + +void SdpOfferAnswerHandler::GetOptionsForAnswer( + const RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + ExtractSharedMediaSessionOptions(offer_answer_options, session_options); + + if (IsUnifiedPlan()) { + GetOptionsForUnifiedPlanAnswer(offer_answer_options, session_options); + } else { + GetOptionsForPlanBAnswer(offer_answer_options, session_options); + } + + // Intentionally unset the data channel type for RTP data channel. Otherwise + // the RTP data channels would be successfully negotiated by default and the + // unit tests in WebRtcDataBrowserTest will fail when building with chromium. + // We want to leave RTP data channels broken, so people won't try to use them. + if (data_channel_controller()->HasRtpDataChannels() || + pc_->data_channel_type() != cricket::DCT_RTP) { + session_options->data_channel_type = pc_->data_channel_type(); + } + + // Apply ICE renomination flag. + for (auto& options : session_options->media_description_options) { + options.transport_options.enable_ice_renomination = + pc_->configuration()->enable_ice_renomination; + } + + session_options->rtcp_cname = rtcp_cname_; + session_options->crypto_options = pc_->GetCryptoOptions(); + session_options->pooled_ice_credentials = + pc_->network_thread()->Invoke>( + RTC_FROM_HERE, + rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, + port_allocator())); +} + +void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Figure out transceiver directional preferences. + bool send_audio = + !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty(); + bool send_video = + !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty(); + + // By default, generate sendrecv/recvonly m= sections. The direction is also + // restricted by the direction in the offer. + bool recv_audio = true; + bool recv_video = true; + + // The "offer_to_receive_X" options allow those defaults to be overridden. + if (offer_answer_options.offer_to_receive_audio != + RTCOfferAnswerOptions::kUndefined) { + recv_audio = (offer_answer_options.offer_to_receive_audio > 0); + } + if (offer_answer_options.offer_to_receive_video != + RTCOfferAnswerOptions::kUndefined) { + recv_video = (offer_answer_options.offer_to_receive_video > 0); + } + + absl::optional audio_index; + absl::optional video_index; + absl::optional data_index; + + // Generate m= sections that match those in the offer. + // Note that mediasession.cc will handle intersection our preferred + // direction with the offered direction. + GenerateMediaDescriptionOptions( + remote_description(), + RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), + RtpTransceiverDirectionFromSendRecv(send_video, recv_video), &audio_index, + &video_index, &data_index, session_options); + + cricket::MediaDescriptionOptions* audio_media_description_options = + !audio_index ? nullptr + : &session_options->media_description_options[*audio_index]; + cricket::MediaDescriptionOptions* video_media_description_options = + !video_index ? nullptr + : &session_options->media_description_options[*video_index]; + + AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(), + audio_media_description_options, + video_media_description_options, + offer_answer_options.num_simulcast_layers); +} + +void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, + cricket::MediaSessionOptions* session_options) { + // Rules for generating an answer are dictated by JSEP sections 5.3.1 (Initial + // Answers) and 5.3.2 (Subsequent Answers). + RTC_DCHECK(remote_description()); + RTC_DCHECK(remote_description()->GetType() == SdpType::kOffer); + for (const ContentInfo& content : + remote_description()->description()->contents()) { + cricket::MediaType media_type = content.media_description()->type(); + if (media_type == cricket::MEDIA_TYPE_AUDIO || + media_type == cricket::MEDIA_TYPE_VIDEO) { + auto transceiver = transceivers()->FindByMid(content.name); + if (transceiver) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForTransceiver( + transceiver, content.name, + /*is_create_offer=*/false)); + } else { + // This should only happen with rejected transceivers. + RTC_DCHECK(content.rejected); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, content.name, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + RTC_DCHECK(content.rejected); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(media_type, content.name, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); + // Reject all data sections if data channels are disabled. + // Reject a data section if it has already been rejected. + // Reject all data sections except for the first one. + if (pc_->data_channel_type() == cricket::DCT_NONE || content.rejected || + content.name != *(pc_->GetDataMid())) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(content.name)); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(content.name)); + } + } + } +} + +const char* SdpOfferAnswerHandler::SessionErrorToString( + SessionError error) const { + switch (error) { + case SessionError::kNone: + return "ERROR_NONE"; + case SessionError::kContent: + return "ERROR_CONTENT"; + case SessionError::kTransport: + return "ERROR_TRANSPORT"; + } + RTC_NOTREACHED(); + return ""; +} + +std::string SdpOfferAnswerHandler::GetSessionErrorMsg() { + RTC_DCHECK_RUN_ON(signaling_thread()); + rtc::StringBuilder desc; + desc << kSessionError << SessionErrorToString(session_error()) << ". "; + desc << kSessionErrorDesc << session_error_desc() << "."; + return desc.Release(); +} + +void SdpOfferAnswerHandler::SetSessionError(SessionError error, + const std::string& error_desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (error != session_error_) { + session_error_ = error; + session_error_desc_ = error_desc; + } +} + +RTCError SdpOfferAnswerHandler::HandleLegacyOfferOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(IsUnifiedPlan()); + + if (options.offer_to_receive_audio == 0) { + RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MEDIA_TYPE_AUDIO); + } else if (options.offer_to_receive_audio == 1) { + AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_AUDIO); + } else if (options.offer_to_receive_audio > 1) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, + "offer_to_receive_audio > 1 is not supported."); + } + + if (options.offer_to_receive_video == 0) { + RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MEDIA_TYPE_VIDEO); + } else if (options.offer_to_receive_video == 1) { + AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); + } else if (options.offer_to_receive_video > 1) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, + "offer_to_receive_video > 1 is not supported."); + } + + return RTCError::OK(); +} + +void SdpOfferAnswerHandler::RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MediaType media_type) { + for (const auto& transceiver : GetReceivingTransceiversOfType(media_type)) { + RtpTransceiverDirection new_direction = + RtpTransceiverDirectionWithRecvSet(transceiver->direction(), false); + if (new_direction != transceiver->direction()) { + RTC_LOG(LS_INFO) << "Changing " << cricket::MediaTypeToString(media_type) + << " transceiver (MID=" + << transceiver->mid().value_or("") << ") from " + << RtpTransceiverDirectionToString( + transceiver->direction()) + << " to " + << RtpTransceiverDirectionToString(new_direction) + << " since CreateOffer specified offer_to_receive=0"; + transceiver->internal()->set_direction(new_direction); + } + } +} + +void SdpOfferAnswerHandler::AddUpToOneReceivingTransceiverOfType( + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (GetReceivingTransceiversOfType(media_type).empty()) { + RTC_LOG(LS_INFO) + << "Adding one recvonly " << cricket::MediaTypeToString(media_type) + << " transceiver since CreateOffer specified offer_to_receive=1"; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kRecvOnly; + pc_->AddTransceiver(media_type, nullptr, init, + /*update_negotiation_needed=*/false); + } +} + +std::vector>> +SdpOfferAnswerHandler::GetReceivingTransceiversOfType( + cricket::MediaType media_type) { + std::vector< + rtc::scoped_refptr>> + receiving_transceivers; + for (const auto& transceiver : transceivers()->List()) { + if (!transceiver->stopped() && transceiver->media_type() == media_type && + RtpTransceiverDirectionHasRecv(transceiver->direction())) { + receiving_transceivers.push_back(transceiver); + } + } + return receiving_transceivers; +} + +void SdpOfferAnswerHandler::ProcessRemovalOfRemoteTrack( + rtc::scoped_refptr> + transceiver, + std::vector>* remove_list, + std::vector>* removed_streams) { + RTC_DCHECK(transceiver->mid()); + RTC_LOG(LS_INFO) << "Processing the removal of a track for MID=" + << *transceiver->mid(); + std::vector> previous_streams = + transceiver->internal()->receiver_internal()->streams(); + // This will remove the remote track from the streams. + transceiver->internal()->receiver_internal()->set_stream_ids({}); + remove_list->push_back(transceiver); + RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams); +} + +void SdpOfferAnswerHandler::RemoveRemoteStreamsIfEmpty( + const std::vector>& remote_streams, + std::vector>* removed_streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // TODO(https://crbug.com/webrtc/9480): When we use stream IDs instead of + // streams, see if the stream was removed by checking if this was the last + // receiver with that stream ID. + for (const auto& remote_stream : remote_streams) { + if (remote_stream->GetAudioTracks().empty() && + remote_stream->GetVideoTracks().empty()) { + remote_streams_->RemoveStream(remote_stream); + removed_streams->push_back(remote_stream); + } + } +} + +void SdpOfferAnswerHandler::RemoveSenders(cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + UpdateLocalSenders(std::vector(), media_type); + UpdateRemoteSendersList(std::vector(), false, + media_type, nullptr); +} + +void SdpOfferAnswerHandler::UpdateLocalSenders( + const std::vector& streams, + cricket::MediaType media_type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector* current_senders = + rtp_manager()->GetLocalSenderInfos(media_type); + + // Find removed tracks. I.e., tracks where the track id, stream id or ssrc + // don't match the new StreamParam. + for (auto sender_it = current_senders->begin(); + sender_it != current_senders->end(); + /* incremented manually */) { + const RtpSenderInfo& info = *sender_it; + const cricket::StreamParams* params = + cricket::GetStreamBySsrc(streams, info.first_ssrc); + if (!params || params->id != info.sender_id || + params->first_stream_id() != info.stream_id) { + rtp_manager()->OnLocalSenderRemoved(info, media_type); + sender_it = current_senders->erase(sender_it); + } else { + ++sender_it; + } + } + + // Find new and active senders. + for (const cricket::StreamParams& params : streams) { + // The sync_label is the MediaStream label and the |stream.id| is the + // sender id. + const std::string& stream_id = params.first_stream_id(); + const std::string& sender_id = params.id; + uint32_t ssrc = params.first_ssrc(); + const RtpSenderInfo* sender_info = + rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id); + if (!sender_info) { + current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); + rtp_manager()->OnLocalSenderAdded(current_senders->back(), media_type); + } + } +} + +void SdpOfferAnswerHandler::UpdateRemoteSendersList( + const cricket::StreamParamsVec& streams, + bool default_sender_needed, + cricket::MediaType media_type, + StreamCollection* new_streams) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(!IsUnifiedPlan()); + + std::vector* current_senders = + rtp_manager()->GetRemoteSenderInfos(media_type); + + // Find removed senders. I.e., senders where the sender id or ssrc don't match + // the new StreamParam. + for (auto sender_it = current_senders->begin(); + sender_it != current_senders->end(); + /* incremented manually */) { + const RtpSenderInfo& info = *sender_it; + const cricket::StreamParams* params = + cricket::GetStreamBySsrc(streams, info.first_ssrc); + std::string params_stream_id; + if (params) { + params_stream_id = + (!params->first_stream_id().empty() ? params->first_stream_id() + : kDefaultStreamId); + } + bool sender_exists = params && params->id == info.sender_id && + params_stream_id == info.stream_id; + // If this is a default track, and we still need it, don't remove it. + if ((info.stream_id == kDefaultStreamId && default_sender_needed) || + sender_exists) { + ++sender_it; + } else { + rtp_manager()->OnRemoteSenderRemoved( + info, remote_streams_->find(info.stream_id), media_type); + sender_it = current_senders->erase(sender_it); + } + } + + // Find new and active senders. + for (const cricket::StreamParams& params : streams) { + if (!params.has_ssrcs()) { + // The remote endpoint has streams, but didn't signal ssrcs. For an active + // sender, this means it is coming from a Unified Plan endpoint,so we just + // create a default. + default_sender_needed = true; + break; + } + + // |params.id| is the sender id and the stream id uses the first of + // |params.stream_ids|. The remote description could come from a Unified + // Plan endpoint, with multiple or no stream_ids() signaled. Since this is + // not supported in Plan B, we just take the first here and create the + // default stream ID if none is specified. + const std::string& stream_id = + (!params.first_stream_id().empty() ? params.first_stream_id() + : kDefaultStreamId); + const std::string& sender_id = params.id; + uint32_t ssrc = params.first_ssrc(); + + rtc::scoped_refptr stream = + remote_streams_->find(stream_id); + if (!stream) { + // This is a new MediaStream. Create a new remote MediaStream. + stream = MediaStreamProxy::Create(rtc::Thread::Current(), + MediaStream::Create(stream_id)); + remote_streams_->AddStream(stream); + new_streams->AddStream(stream); + } + + const RtpSenderInfo* sender_info = + rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id); + if (!sender_info) { + current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc)); + rtp_manager()->OnRemoteSenderAdded(current_senders->back(), stream, + media_type); + } + } + + // Add default sender if necessary. + if (default_sender_needed) { + rtc::scoped_refptr default_stream = + remote_streams_->find(kDefaultStreamId); + if (!default_stream) { + // Create the new default MediaStream. + default_stream = MediaStreamProxy::Create( + rtc::Thread::Current(), MediaStream::Create(kDefaultStreamId)); + remote_streams_->AddStream(default_stream); + new_streams->AddStream(default_stream); + } + std::string default_sender_id = (media_type == cricket::MEDIA_TYPE_AUDIO) + ? kDefaultAudioSenderId + : kDefaultVideoSenderId; + const RtpSenderInfo* default_sender_info = rtp_manager()->FindSenderInfo( + *current_senders, kDefaultStreamId, default_sender_id); + if (!default_sender_info) { + current_senders->push_back( + RtpSenderInfo(kDefaultStreamId, default_sender_id, /*ssrc=*/0)); + rtp_manager()->OnRemoteSenderAdded(current_senders->back(), + default_stream, media_type); + } + } +} + +void SdpOfferAnswerHandler::EnableSending() { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const auto& transceiver : transceivers()->List()) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel && !channel->enabled()) { + channel->Enable(true); + } + } + + if (data_channel_controller()->rtp_data_channel() && + !data_channel_controller()->rtp_data_channel()->enabled()) { + data_channel_controller()->rtp_data_channel()->Enable(true); + } +} + +RTCError SdpOfferAnswerHandler::PushdownMediaDescription( + SdpType type, + cricket::ContentSource source) { + const SessionDescriptionInterface* sdesc = + (source == cricket::CS_LOCAL ? local_description() + : remote_description()); + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(sdesc); + + if (!UpdatePayloadTypeDemuxingState(source)) { + // Note that this is never expected to fail, since RtpDemuxer doesn't return + // an error when changing payload type demux criteria, which is all this + // does. + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to update payload type demuxing state."); + } + + // Push down the new SDP media section for each audio/video transceiver. + for (const auto& transceiver : transceivers()->List()) { + const ContentInfo* content_info = + FindMediaSectionForTransceiver(transceiver, sdesc); + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (!channel || !content_info || content_info->rejected) { + continue; + } + const MediaContentDescription* content_desc = + content_info->media_description(); + if (!content_desc) { + continue; + } + std::string error; + bool success = (source == cricket::CS_LOCAL) + ? channel->SetLocalContent(content_desc, type, &error) + : channel->SetRemoteContent(content_desc, type, &error); + if (!success) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); + } + } + + // If using the RtpDataChannel, push down the new SDP section for it too. + if (data_channel_controller()->rtp_data_channel()) { + const ContentInfo* data_content = + cricket::GetFirstDataContent(sdesc->description()); + if (data_content && !data_content->rejected) { + const MediaContentDescription* data_desc = + data_content->media_description(); + if (data_desc) { + std::string error; + bool success = (source == cricket::CS_LOCAL) + ? data_channel_controller() + ->rtp_data_channel() + ->SetLocalContent(data_desc, type, &error) + : data_channel_controller() + ->rtp_data_channel() + ->SetRemoteContent(data_desc, type, &error); + if (!success) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); + } + } + } + } + + // Need complete offer/answer with an SCTP m= section before starting SCTP, + // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 + if (pc_->sctp_mid() && local_description() && remote_description()) { + rtc::scoped_refptr sctp_transport = + transport_controller()->GetSctpTransport(*(pc_->sctp_mid())); + auto local_sctp_description = cricket::GetFirstSctpDataContentDescription( + local_description()->description()); + auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription( + remote_description()->description()); + if (sctp_transport && local_sctp_description && remote_sctp_description) { + int max_message_size; + // A remote max message size of zero means "any size supported". + // We configure the connection with our own max message size. + if (remote_sctp_description->max_message_size() == 0) { + max_message_size = local_sctp_description->max_message_size(); + } else { + max_message_size = + std::min(local_sctp_description->max_message_size(), + remote_sctp_description->max_message_size()); + } + sctp_transport->Start(local_sctp_description->port(), + remote_sctp_description->port(), max_message_size); + } + } + + return RTCError::OK(); +} + +RTCError SdpOfferAnswerHandler::PushdownTransportDescription( + cricket::ContentSource source, + SdpType type) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + if (source == cricket::CS_LOCAL) { + const SessionDescriptionInterface* sdesc = local_description(); + RTC_DCHECK(sdesc); + return transport_controller()->SetLocalDescription(type, + sdesc->description()); + } else { + const SessionDescriptionInterface* sdesc = remote_description(); + RTC_DCHECK(sdesc); + return transport_controller()->SetRemoteDescription(type, + sdesc->description()); + } +} + +void SdpOfferAnswerHandler::RemoveStoppedTransceivers() { + RTC_DCHECK_RUN_ON(signaling_thread()); + // 3.2.10.1: For each transceiver in the connection's set of transceivers + // run the following steps: + if (!IsUnifiedPlan()) + return; + // Traverse a copy of the transceiver list. + auto transceiver_list = transceivers()->List(); + for (auto transceiver : transceiver_list) { + // 3.2.10.1.1: If transceiver is stopped, associated with an m= section + // and the associated m= section is rejected in + // connection.[[CurrentLocalDescription]] or + // connection.[[CurrentRemoteDescription]], remove the + // transceiver from the connection's set of transceivers. + if (!transceiver->stopped()) { + continue; + } + const ContentInfo* local_content = + FindMediaSectionForTransceiver(transceiver, local_description()); + const ContentInfo* remote_content = + FindMediaSectionForTransceiver(transceiver, remote_description()); + if ((local_content && local_content->rejected) || + (remote_content && remote_content->rejected)) { + RTC_LOG(LS_INFO) << "Dissociating transceiver" + << " since the media section is being recycled."; + transceiver->internal()->set_mid(absl::nullopt); + transceiver->internal()->set_mline_index(absl::nullopt); + transceivers()->Remove(transceiver); + continue; + } + if (!local_content && !remote_content) { + // TODO(bugs.webrtc.org/11973): Consider if this should be removed already + // See https://github.com/w3c/webrtc-pc/issues/2576 + RTC_LOG(LS_INFO) + << "Dropping stopped transceiver that was never associated"; + transceivers()->Remove(transceiver); + continue; + } + } +} + +void SdpOfferAnswerHandler::RemoveUnusedChannels( + const SessionDescription* desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Destroy video channel first since it may have a pointer to the + // voice channel. + const cricket::ContentInfo* video_info = cricket::GetFirstVideoContent(desc); + if (!video_info || video_info->rejected) { + DestroyTransceiverChannel(rtp_manager()->GetVideoTransceiver()); + } + + const cricket::ContentInfo* audio_info = cricket::GetFirstAudioContent(desc); + if (!audio_info || audio_info->rejected) { + DestroyTransceiverChannel(rtp_manager()->GetAudioTransceiver()); + } + + const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); + if (!data_info || data_info->rejected) { + DestroyDataChannelTransport(); + } +} + +void SdpOfferAnswerHandler::ReportNegotiatedSdpSemantics( + const SessionDescriptionInterface& answer) { + SdpSemanticNegotiated semantics_negotiated; + switch (answer.description()->msid_signaling()) { + case 0: + semantics_negotiated = kSdpSemanticNegotiatedNone; + break; + case cricket::kMsidSignalingMediaSection: + semantics_negotiated = kSdpSemanticNegotiatedUnifiedPlan; + break; + case cricket::kMsidSignalingSsrcAttribute: + semantics_negotiated = kSdpSemanticNegotiatedPlanB; + break; + case cricket::kMsidSignalingMediaSection | + cricket::kMsidSignalingSsrcAttribute: + semantics_negotiated = kSdpSemanticNegotiatedMixed; + break; + default: + RTC_NOTREACHED(); + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpSemanticNegotiated", + semantics_negotiated, kSdpSemanticNegotiatedMax); +} + +void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector> streams_to_remove; + for (size_t i = 0; i < remote_streams_->count(); ++i) { + MediaStreamInterface* stream = remote_streams_->at(i); + if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { + streams_to_remove.push_back(stream); + } + } + + for (auto& stream : streams_to_remove) { + remote_streams_->RemoveStream(stream); + pc_->Observer()->OnRemoveStream(std::move(stream)); + } +} + +bool SdpOfferAnswerHandler::UseCandidatesInSessionDescription( + const SessionDescriptionInterface* remote_desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!remote_desc) { + return true; + } + bool ret = true; + + for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) { + const IceCandidateCollection* candidates = remote_desc->candidates(m); + for (size_t n = 0; n < candidates->count(); ++n) { + const IceCandidateInterface* candidate = candidates->at(n); + bool valid = false; + if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) { + if (valid) { + RTC_LOG(LS_INFO) + << "UseCandidatesInSessionDescription: Not ready to use " + "candidate."; + } + continue; + } + ret = UseCandidate(candidate); + if (!ret) { + break; + } + } + } + return ret; +} + +bool SdpOfferAnswerHandler::UseCandidate( + const IceCandidateInterface* candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTCErrorOr result = + FindContentInfo(remote_description(), candidate); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "UseCandidate: Invalid candidate. " + << result.error().message(); + return false; + } + std::vector candidates; + candidates.push_back(candidate->candidate()); + // Invoking BaseSession method to handle remote candidates. + RTCError error = transport_controller()->AddRemoteCandidates( + result.value()->name, candidates); + if (error.ok()) { + ReportRemoteIceCandidateAdded(candidate->candidate()); + // Candidates successfully submitted for checking. + if (pc_->ice_connection_state() == + PeerConnectionInterface::kIceConnectionNew || + pc_->ice_connection_state() == + PeerConnectionInterface::kIceConnectionDisconnected) { + // If state is New, then the session has just gotten its first remote ICE + // candidates, so go to Checking. + // If state is Disconnected, the session is re-using old candidates or + // receiving additional ones, so go to Checking. + // If state is Connected, stay Connected. + // TODO(bemasc): If state is Connected, and the new candidates are for a + // newly added transport, then the state actually _should_ move to + // checking. Add a way to distinguish that case. + pc_->SetIceConnectionState( + PeerConnectionInterface::kIceConnectionChecking); + } + // TODO(bemasc): If state is Completed, go back to Connected. + } else { + RTC_LOG(LS_WARNING) << error.message(); + } + return true; +} + +// We need to check the local/remote description for the Transport instead of +// the session, because a new Transport added during renegotiation may have +// them unset while the session has them set from the previous negotiation. +// Not doing so may trigger the auto generation of transport description and +// mess up DTLS identity information, ICE credential, etc. +bool SdpOfferAnswerHandler::ReadyToUseRemoteCandidate( + const IceCandidateInterface* candidate, + const SessionDescriptionInterface* remote_desc, + bool* valid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + *valid = true; + + const SessionDescriptionInterface* current_remote_desc = + remote_desc ? remote_desc : remote_description(); + + if (!current_remote_desc) { + return false; + } + + RTCErrorOr result = + FindContentInfo(current_remote_desc, candidate); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Invalid candidate. " + << result.error().message(); + + *valid = false; + return false; + } + + std::string transport_name = GetTransportName(result.value()->name); + return !transport_name.empty(); +} + +void SdpOfferAnswerHandler::ReportRemoteIceCandidateAdded( + const cricket::Candidate& candidate) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); + if (candidate.address().IsPrivateIP()) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED); + } + if (candidate.address().IsUnresolvedIP()) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED); + } + if (candidate.address().family() == AF_INET6) { + pc_->NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED); + } +} + +RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( + const SessionDescriptionInterface* description, + const IceCandidateInterface* candidate) { + if (candidate->sdp_mline_index() >= 0) { + size_t mediacontent_index = + static_cast(candidate->sdp_mline_index()); + size_t content_size = description->description()->contents().size(); + if (mediacontent_index < content_size) { + return &description->description()->contents()[mediacontent_index]; + } else { + return RTCError(RTCErrorType::INVALID_RANGE, + "Media line index (" + + rtc::ToString(candidate->sdp_mline_index()) + + ") out of range (number of mlines: " + + rtc::ToString(content_size) + ")."); + } + } else if (!candidate->sdp_mid().empty()) { + auto& contents = description->description()->contents(); + auto it = absl::c_find_if( + contents, [candidate](const cricket::ContentInfo& content_info) { + return content_info.mid() == candidate->sdp_mid(); + }); + if (it == contents.end()) { + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "Mid " + candidate->sdp_mid() + + " specified but no media section with that mid found."); + } else { + return &*it; + } + } + + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Neither sdp_mline_index nor sdp_mid specified."); +} + +RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { + // Creating the media channels. Transports should already have been created + // at this point. + RTC_DCHECK_RUN_ON(signaling_thread()); + const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc); + if (voice && !voice->rejected && + !rtp_manager()->GetAudioTransceiver()->internal()->channel()) { + cricket::VoiceChannel* voice_channel = CreateVoiceChannel(voice->name); + if (!voice_channel) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create voice channel."); + } + rtp_manager()->GetAudioTransceiver()->internal()->SetChannel(voice_channel); + } + + const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); + if (video && !video->rejected && + !rtp_manager()->GetVideoTransceiver()->internal()->channel()) { + cricket::VideoChannel* video_channel = CreateVideoChannel(video->name); + if (!video_channel) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create video channel."); + } + rtp_manager()->GetVideoTransceiver()->internal()->SetChannel(video_channel); + } + + const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); + if (pc_->data_channel_type() != cricket::DCT_NONE && data && + !data->rejected && !data_channel_controller()->rtp_data_channel() && + !data_channel_controller()->data_channel_transport()) { + if (!CreateDataChannel(data->name)) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); + } + } + + return RTCError::OK(); +} + +// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. +cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel( + const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + + // TODO(bugs.webrtc.org/11992): CreateVoiceChannel internally switches to the + // worker thread. We shouldn't be using the |call_ptr_| hack here but simply + // be on the worker thread and use |call_| (update upstream code). + cricket::VoiceChannel* voice_channel; + { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + voice_channel = channel_manager()->CreateVoiceChannel( + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, audio_options()); + } + if (!voice_channel) { + return nullptr; + } + voice_channel->SignalSentPacket().connect(pc_, + &PeerConnection::OnSentPacket_w); + voice_channel->SetRtpTransport(rtp_transport); + + return voice_channel; +} + +// TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. +cricket::VideoChannel* SdpOfferAnswerHandler::CreateVideoChannel( + const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + + // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to the + // worker thread. We shouldn't be using the |call_ptr_| hack here but simply + // be on the worker thread and use |call_| (update upstream code). + cricket::VideoChannel* video_channel; + { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + video_channel = channel_manager()->CreateVideoChannel( + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, video_options(), + video_bitrate_allocator_factory_.get()); + } + if (!video_channel) { + return nullptr; + } + video_channel->SignalSentPacket().connect(pc_, + &PeerConnection::OnSentPacket_w); + video_channel->SetRtpTransport(rtp_transport); + + return video_channel; +} + +bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + switch (pc_->data_channel_type()) { + case cricket::DCT_SCTP: + if (pc_->network_thread()->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnection::SetupDataChannelTransport_n, pc_, + mid))) { + pc_->SetSctpDataMid(mid); + } else { + return false; + } + return true; + case cricket::DCT_RTP: + default: + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + // TODO(bugs.webrtc.org/9987): set_rtp_data_channel() should be called on + // the network thread like set_data_channel_transport is. + { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + data_channel_controller()->set_rtp_data_channel( + channel_manager()->CreateRtpDataChannel( + pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), + pc_->GetCryptoOptions(), &ssrc_generator_)); + } + if (!data_channel_controller()->rtp_data_channel()) { + return false; + } + data_channel_controller()->rtp_data_channel()->SignalSentPacket().connect( + pc_, &PeerConnection::OnSentPacket_w); + data_channel_controller()->rtp_data_channel()->SetRtpTransport( + rtp_transport); + SetHavePendingRtpDataChannel(); + return true; + } + return false; +} + +void SdpOfferAnswerHandler::DestroyTransceiverChannel( + rtc::scoped_refptr> + transceiver) { + RTC_DCHECK(transceiver); + + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel) { + transceiver->internal()->SetChannel(nullptr); + DestroyChannelInterface(channel); + } +} + +void SdpOfferAnswerHandler::DestroyDataChannelTransport() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (data_channel_controller()->rtp_data_channel()) { + data_channel_controller()->OnTransportChannelClosed(); + DestroyChannelInterface(data_channel_controller()->rtp_data_channel()); + data_channel_controller()->set_rtp_data_channel(nullptr); + } + + // Note: Cannot use rtc::Bind to create a functor to invoke because it will + // grab a reference to this PeerConnection. If this is called from the + // PeerConnection destructor, the RefCountedObject vtable will have already + // been destroyed (since it is a subclass of PeerConnection) and using + // rtc::Bind will cause "Pure virtual function called" error to appear. + + if (pc_->sctp_mid()) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + data_channel_controller()->OnTransportChannelClosed(); + pc_->network_thread()->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(pc_->network_thread()); + pc_->TeardownDataChannelTransport_n(); + }); + pc_->ResetSctpDataMid(); + } +} + +void SdpOfferAnswerHandler::DestroyChannelInterface( + cricket::ChannelInterface* channel) { + // TODO(bugs.webrtc.org/11992): All the below methods should be called on the + // worker thread. (they switch internally anyway). Change + // DestroyChannelInterface to either be called on the worker thread, or do + // this asynchronously on the worker. + RTC_DCHECK(channel); + switch (channel->media_type()) { + case cricket::MEDIA_TYPE_AUDIO: + channel_manager()->DestroyVoiceChannel( + static_cast(channel)); + break; + case cricket::MEDIA_TYPE_VIDEO: + channel_manager()->DestroyVideoChannel( + static_cast(channel)); + break; + case cricket::MEDIA_TYPE_DATA: + channel_manager()->DestroyRtpDataChannel( + static_cast(channel)); + break; + default: + RTC_NOTREACHED() << "Unknown media type: " << channel->media_type(); + break; + } +} + +void SdpOfferAnswerHandler::DestroyAllChannels() { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!transceivers()) { + return; + } + // Destroy video channels first since they may have a pointer to a voice + // channel. + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + DestroyTransceiverChannel(transceiver); + } + } + for (const auto& transceiver : transceivers()->List()) { + if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + DestroyTransceiverChannel(transceiver); + } + } + DestroyDataChannelTransport(); +} + +void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions( + const SessionDescriptionInterface* session_desc, + RtpTransceiverDirection audio_direction, + RtpTransceiverDirection video_direction, + absl::optional* audio_index, + absl::optional* video_index, + absl::optional* data_index, + cricket::MediaSessionOptions* session_options) { + RTC_DCHECK_RUN_ON(signaling_thread()); + for (const cricket::ContentInfo& content : + session_desc->description()->contents()) { + if (IsAudioContent(&content)) { + // If we already have an audio m= section, reject this extra one. + if (*audio_index) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions( + cricket::MEDIA_TYPE_AUDIO, content.name, + RtpTransceiverDirection::kInactive, /*stopped=*/true)); + } else { + bool stopped = (audio_direction == RtpTransceiverDirection::kInactive); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_AUDIO, + content.name, audio_direction, + stopped)); + *audio_index = session_options->media_description_options.size() - 1; + } + session_options->media_description_options.back().header_extensions = + channel_manager()->GetSupportedAudioRtpHeaderExtensions(); + } else if (IsVideoContent(&content)) { + // If we already have an video m= section, reject this extra one. + if (*video_index) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions( + cricket::MEDIA_TYPE_VIDEO, content.name, + RtpTransceiverDirection::kInactive, /*stopped=*/true)); + } else { + bool stopped = (video_direction == RtpTransceiverDirection::kInactive); + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_VIDEO, + content.name, video_direction, + stopped)); + *video_index = session_options->media_description_options.size() - 1; + } + session_options->media_description_options.back().header_extensions = + channel_manager()->GetSupportedVideoRtpHeaderExtensions(); + } else if (IsUnsupportedContent(&content)) { + session_options->media_description_options.push_back( + cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_UNSUPPORTED, + content.name, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); + } else { + RTC_DCHECK(IsDataContent(&content)); + // If we already have an data m= section, reject this extra one. + if (*data_index) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForRejectedData(content.name)); + } else { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData(content.name)); + *data_index = session_options->media_description_options.size() - 1; + } + } + } +} + +cricket::MediaDescriptionOptions +SdpOfferAnswerHandler::GetMediaDescriptionOptionsForActiveData( + const std::string& mid) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + // Direction for data sections is meaningless, but legacy endpoints might + // expect sendrecv. + cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, + RtpTransceiverDirection::kSendRecv, + /*stopped=*/false); + AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()), + &options); + return options; +} + +cricket::MediaDescriptionOptions +SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData( + const std::string& mid) const { + RTC_DCHECK_RUN_ON(signaling_thread()); + cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true); + AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()), + &options); + return options; +} + +const std::string SdpOfferAnswerHandler::GetTransportName( + const std::string& content_name) { + RTC_DCHECK_RUN_ON(signaling_thread()); + cricket::ChannelInterface* channel = pc_->GetChannel(content_name); + if (channel) { + return channel->transport_name(); + } + if (data_channel_controller()->data_channel_transport()) { + RTC_DCHECK(pc_->sctp_mid()); + if (content_name == *(pc_->sctp_mid())) { + return *(pc_->sctp_transport_name()); + } + } + // Return an empty string if failed to retrieve the transport name. + return ""; +} + +bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( + cricket::ContentSource source) { + RTC_DCHECK_RUN_ON(signaling_thread()); + // We may need to delete any created default streams and disable creation of + // new ones on the basis of payload type. This is needed to avoid SSRC + // collisions in Call's RtpDemuxer, in the case that a transceiver has + // created a default stream, and then some other channel gets the SSRC + // signaled in the corresponding Unified Plan "m=" section. Specifically, we + // need to disable payload type based demuxing when two bundled "m=" sections + // are using the same payload type(s). For more context + // see https://bugs.chromium.org/p/webrtc/issues/detail?id=11477 + const SessionDescriptionInterface* sdesc = + (source == cricket::CS_LOCAL ? local_description() + : remote_description()); + const cricket::ContentGroup* bundle_group = + sdesc->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + std::set audio_payload_types; + std::set video_payload_types; + bool pt_demuxing_enabled_audio = true; + bool pt_demuxing_enabled_video = true; + for (auto& content_info : sdesc->description()->contents()) { + // If this m= section isn't bundled, it's safe to demux by payload type + // since other m= sections using the same payload type will also be using + // different transports. + if (!bundle_group || !bundle_group->HasContentName(content_info.name)) { + continue; + } + if (content_info.rejected || + (source == cricket::ContentSource::CS_LOCAL && + !RtpTransceiverDirectionHasRecv( + content_info.media_description()->direction())) || + (source == cricket::ContentSource::CS_REMOTE && + !RtpTransceiverDirectionHasSend( + content_info.media_description()->direction()))) { + // Ignore transceivers that are not receiving. + continue; + } + switch (content_info.media_description()->type()) { + case cricket::MediaType::MEDIA_TYPE_AUDIO: { + const cricket::AudioContentDescription* audio_desc = + content_info.media_description()->as_audio(); + for (const cricket::AudioCodec& audio : audio_desc->codecs()) { + if (audio_payload_types.count(audio.id)) { + // Two m= sections are using the same payload type, thus demuxing + // by payload type is not possible. + pt_demuxing_enabled_audio = false; + } + audio_payload_types.insert(audio.id); + } + break; + } + case cricket::MediaType::MEDIA_TYPE_VIDEO: { + const cricket::VideoContentDescription* video_desc = + content_info.media_description()->as_video(); + for (const cricket::VideoCodec& video : video_desc->codecs()) { + if (video_payload_types.count(video.id)) { + // Two m= sections are using the same payload type, thus demuxing + // by payload type is not possible. + pt_demuxing_enabled_video = false; + } + video_payload_types.insert(video.id); + } + break; + } + default: + // Ignore data channels. + continue; + } + } + + // Gather all updates ahead of time so that all channels can be updated in a + // single Invoke; necessary due to thread guards. + std::vector> + channels_to_update; + for (const auto& transceiver : transceivers()->List()) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, sdesc); + if (!channel || !content) { + continue; + } + RtpTransceiverDirection local_direction = + content->media_description()->direction(); + if (source == cricket::CS_REMOTE) { + local_direction = RtpTransceiverDirectionReversed(local_direction); + } + channels_to_update.emplace_back(local_direction, + transceiver->internal()->channel()); + } + + if (channels_to_update.empty()) { + return true; + } + return pc_->worker_thread()->Invoke( + RTC_FROM_HERE, [&channels_to_update, bundle_group, + pt_demuxing_enabled_audio, pt_demuxing_enabled_video]() { + for (const auto& it : channels_to_update) { + RtpTransceiverDirection local_direction = it.first; + cricket::ChannelInterface* channel = it.second; + cricket::MediaType media_type = channel->media_type(); + bool in_bundle_group = (bundle_group && bundle_group->HasContentName( + channel->content_name())); + if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { + if (!channel->SetPayloadTypeDemuxingEnabled( + (!in_bundle_group || pt_demuxing_enabled_audio) && + RtpTransceiverDirectionHasRecv(local_direction))) { + return false; + } + } else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) { + if (!channel->SetPayloadTypeDemuxingEnabled( + (!in_bundle_group || pt_demuxing_enabled_video) && + RtpTransceiverDirectionHasRecv(local_direction))) { + return false; + } + } + } + return true; + }); +} + +} // namespace webrtc diff --git a/pc/sdp_offer_answer.h b/pc/sdp_offer_answer.h new file mode 100644 index 0000000000..43a3dbb5a8 --- /dev/null +++ b/pc/sdp_offer_answer.h @@ -0,0 +1,676 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_SDP_OFFER_ANSWER_H_ +#define PC_SDP_OFFER_ANSWER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_options.h" +#include "api/candidate.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_transceiver_direction.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/transport/data_channel_transport_interface.h" +#include "api/turn_customizer.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "media/base/media_channel.h" +#include "media/base/stream_params.h" +#include "p2p/base/port_allocator.h" +#include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/channel_manager.h" +#include "pc/data_channel_controller.h" +#include "pc/ice_server_parsing.h" +#include "pc/jsep_transport_controller.h" +#include "pc/media_session.h" +#include "pc/media_stream_observer.h" +#include "pc/peer_connection_factory.h" +#include "pc/peer_connection_internal.h" +#include "pc/rtc_stats_collector.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/sctp_transport.h" +#include "pc/sdp_state_provider.h" +#include "pc/session_description.h" +#include "pc/stats_collector.h" +#include "pc/stream_collection.h" +#include "pc/transceiver_list.h" +#include "pc/webrtc_session_description_factory.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/operations_chain.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/unique_id_generator.h" +#include "rtc_base/weak_ptr.h" + +namespace webrtc { + +// SdpOfferAnswerHandler is a component +// of the PeerConnection object as defined +// by the PeerConnectionInterface API surface. +// The class is responsible for the following: +// - Parsing and interpreting SDP. +// - Generating offers and answers based on the current state. +// This class lives on the signaling thread. +class SdpOfferAnswerHandler : public SdpStateProvider, + public sigslot::has_slots<> { + public: + ~SdpOfferAnswerHandler(); + + // Creates an SdpOfferAnswerHandler. Modifies dependencies. + static std::unique_ptr Create( + PeerConnection* pc, + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies); + + void ResetSessionDescFactory() { + RTC_DCHECK_RUN_ON(signaling_thread()); + webrtc_session_desc_factory_.reset(); + } + const WebRtcSessionDescriptionFactory* webrtc_session_desc_factory() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return webrtc_session_desc_factory_.get(); + } + + // Change signaling state to Closed, and perform appropriate actions. + void Close(); + + // Called as part of destroying the owning PeerConnection. + void PrepareForShutdown(); + + // Implementation of SdpStateProvider + PeerConnectionInterface::SignalingState signaling_state() const override; + + const SessionDescriptionInterface* local_description() const override; + const SessionDescriptionInterface* remote_description() const override; + const SessionDescriptionInterface* current_local_description() const override; + const SessionDescriptionInterface* current_remote_description() + const override; + const SessionDescriptionInterface* pending_local_description() const override; + const SessionDescriptionInterface* pending_remote_description() + const override; + + bool NeedsIceRestart(const std::string& content_name) const override; + bool IceRestartPending(const std::string& content_name) const override; + absl::optional GetDtlsRole( + const std::string& mid) const override; + + void RestartIce(); + + // JSEP01 + void CreateOffer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options); + void CreateAnswer( + CreateSessionDescriptionObserver* observer, + const PeerConnectionInterface::RTCOfferAnswerOptions& options); + + void SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + void SetLocalDescription( + rtc::scoped_refptr observer); + void SetLocalDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc); + void SetLocalDescription(SetSessionDescriptionObserver* observer); + + void SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc); + + PeerConnectionInterface::RTCConfiguration GetConfiguration(); + RTCError SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& configuration); + bool AddIceCandidate(const IceCandidateInterface* candidate); + void AddIceCandidate(std::unique_ptr candidate, + std::function callback); + bool RemoveIceCandidates(const std::vector& candidates); + // Adds a locally generated candidate to the local description. + void AddLocalIceCandidate(const JsepIceCandidate* candidate); + void RemoveLocalIceCandidates( + const std::vector& candidates); + bool ShouldFireNegotiationNeededEvent(uint32_t event_id); + + bool AddStream(MediaStreamInterface* local_stream); + void RemoveStream(MediaStreamInterface* local_stream); + + absl::optional is_caller(); + bool HasNewIceCredentials(); + void UpdateNegotiationNeeded(); + void SetHavePendingRtpDataChannel() { + RTC_DCHECK_RUN_ON(signaling_thread()); + have_pending_rtp_data_channel_ = true; + } + + // Returns the media section in the given session description that is + // associated with the RtpTransceiver. Returns null if none found or this + // RtpTransceiver is not associated. Logic varies depending on the + // SdpSemantics specified in the configuration. + const cricket::ContentInfo* FindMediaSectionForTransceiver( + rtc::scoped_refptr> + transceiver, + const SessionDescriptionInterface* sdesc) const; + + // Destroys all BaseChannels and destroys the SCTP data channel, if present. + void DestroyAllChannels(); + + rtc::scoped_refptr local_streams(); + rtc::scoped_refptr remote_streams(); + + private: + class ImplicitCreateSessionDescriptionObserver; + + friend class ImplicitCreateSessionDescriptionObserver; + class SetSessionDescriptionObserverAdapter; + + friend class SetSessionDescriptionObserverAdapter; + + enum class SessionError { + kNone, // No error. + kContent, // Error in BaseChannel SetLocalContent/SetRemoteContent. + kTransport, // Error from the underlying transport. + }; + + // Represents the [[LocalIceCredentialsToReplace]] internal slot in the spec. + // It makes the next CreateOffer() produce new ICE credentials even if + // RTCOfferAnswerOptions::ice_restart is false. + // https://w3c.github.io/webrtc-pc/#dfn-localufragstoreplace + // TODO(hbos): When JsepTransportController/JsepTransport supports rollback, + // move this type of logic to JsepTransportController/JsepTransport. + class LocalIceCredentialsToReplace; + + // Only called by the Create() function. + explicit SdpOfferAnswerHandler(PeerConnection* pc); + // Called from the `Create()` function. Can only be called + // once. Modifies dependencies. + void Initialize( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies& dependencies); + + rtc::Thread* signaling_thread() const; + // Non-const versions of local_description()/remote_description(), for use + // internally. + SessionDescriptionInterface* mutable_local_description() + RTC_RUN_ON(signaling_thread()) { + return pending_local_description_ ? pending_local_description_.get() + : current_local_description_.get(); + } + SessionDescriptionInterface* mutable_remote_description() + RTC_RUN_ON(signaling_thread()) { + return pending_remote_description_ ? pending_remote_description_.get() + : current_remote_description_.get(); + } + + // Synchronous implementations of SetLocalDescription/SetRemoteDescription + // that return an RTCError instead of invoking a callback. + RTCError ApplyLocalDescription( + std::unique_ptr desc); + RTCError ApplyRemoteDescription( + std::unique_ptr desc); + + // Implementation of the offer/answer exchange operations. These are chained + // onto the |operations_chain_| when the public CreateOffer(), CreateAnswer(), + // SetLocalDescription() and SetRemoteDescription() methods are invoked. + void DoCreateOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer); + void DoCreateAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& options, + rtc::scoped_refptr observer); + void DoSetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + void DoSetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer); + + // Update the state, signaling if necessary. + void ChangeSignalingState( + PeerConnectionInterface::SignalingState signaling_state); + + RTCError UpdateSessionState(SdpType type, + cricket::ContentSource source, + const cricket::SessionDescription* description); + + bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread()); + + // Signals from MediaStreamObserver. + void OnAudioTrackAdded(AudioTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + void OnAudioTrackRemoved(AudioTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + void OnVideoTrackAdded(VideoTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + void OnVideoTrackRemoved(VideoTrackInterface* track, + MediaStreamInterface* stream) + RTC_RUN_ON(signaling_thread()); + + // | desc_type | is the type of the description that caused the rollback. + RTCError Rollback(SdpType desc_type); + void OnOperationsChainEmpty(); + + // Runs the algorithm **set the associated remote streams** specified in + // https://w3c.github.io/webrtc-pc/#set-associated-remote-streams. + void SetAssociatedRemoteStreams( + rtc::scoped_refptr receiver, + const std::vector& stream_ids, + std::vector>* added_streams, + std::vector>* removed_streams); + + bool CheckIfNegotiationIsNeeded(); + void GenerateNegotiationNeededEvent(); + // Helper method which verifies SDP. + RTCError ValidateSessionDescription(const SessionDescriptionInterface* sdesc, + cricket::ContentSource source) + RTC_RUN_ON(signaling_thread()); + + // Updates the local RtpTransceivers according to the JSEP rules. Called as + // part of setting the local/remote description. + RTCError UpdateTransceiversAndDataChannels( + cricket::ContentSource source, + const SessionDescriptionInterface& new_session, + const SessionDescriptionInterface* old_local_description, + const SessionDescriptionInterface* old_remote_description); + + // Associate the given transceiver according to the JSEP rules. + RTCErrorOr< + rtc::scoped_refptr>> + AssociateTransceiver(cricket::ContentSource source, + SdpType type, + size_t mline_index, + const cricket::ContentInfo& content, + const cricket::ContentInfo* old_local_content, + const cricket::ContentInfo* old_remote_content) + RTC_RUN_ON(signaling_thread()); + + // If the BUNDLE policy is max-bundle, then we know for sure that all + // transports will be bundled from the start. This method returns the BUNDLE + // group if that's the case, or null if BUNDLE will be negotiated later. An + // error is returned if max-bundle is specified but the session description + // does not have a BUNDLE group. + RTCErrorOr GetEarlyBundleGroup( + const cricket::SessionDescription& desc) const + RTC_RUN_ON(signaling_thread()); + + // Either creates or destroys the transceiver's BaseChannel according to the + // given media section. + RTCError UpdateTransceiverChannel( + rtc::scoped_refptr> + transceiver, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); + + // Either creates or destroys the local data channel according to the given + // media section. + RTCError UpdateDataChannel(cricket::ContentSource source, + const cricket::ContentInfo& content, + const cricket::ContentGroup* bundle_group) + RTC_RUN_ON(signaling_thread()); + // Check if a call to SetLocalDescription is acceptable with a session + // description of the given type. + bool ExpectSetLocalDescription(SdpType type); + // Check if a call to SetRemoteDescription is acceptable with a session + // description of the given type. + bool ExpectSetRemoteDescription(SdpType type); + + // The offer/answer machinery assumes the media section MID is present and + // unique. To support legacy end points that do not supply a=mid lines, this + // method will modify the session description to add MIDs generated according + // to the SDP semantics. + void FillInMissingRemoteMids(cricket::SessionDescription* remote_description); + + // Returns an RtpTransciever, if available, that can be used to receive the + // given media type according to JSEP rules. + rtc::scoped_refptr> + FindAvailableTransceiverToReceive(cricket::MediaType media_type) const; + + // Returns a MediaSessionOptions struct with options decided by |options|, + // the local MediaStreams and DataChannels. + void GetOptionsForOffer(const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options); + void GetOptionsForPlanBOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + void GetOptionsForUnifiedPlanOffer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + + // Returns a MediaSessionOptions struct with options decided by + // |constraints|, the local MediaStreams and DataChannels. + void GetOptionsForAnswer(const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options); + void GetOptionsForPlanBAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + void GetOptionsForUnifiedPlanAnswer( + const PeerConnectionInterface::RTCOfferAnswerOptions& + offer_answer_options, + cricket::MediaSessionOptions* session_options) + RTC_RUN_ON(signaling_thread()); + + const char* SessionErrorToString(SessionError error) const; + std::string GetSessionErrorMsg(); + // Returns the last error in the session. See the enum above for details. + SessionError session_error() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return session_error_; + } + const std::string& session_error_desc() const { return session_error_desc_; } + + RTCError HandleLegacyOfferOptions( + const PeerConnectionInterface::RTCOfferAnswerOptions& options); + void RemoveRecvDirectionFromReceivingTransceiversOfType( + cricket::MediaType media_type) RTC_RUN_ON(signaling_thread()); + void AddUpToOneReceivingTransceiverOfType(cricket::MediaType media_type); + + std::vector< + rtc::scoped_refptr>> + GetReceivingTransceiversOfType(cricket::MediaType media_type) + RTC_RUN_ON(signaling_thread()); + + // Runs the algorithm specified in + // https://w3c.github.io/webrtc-pc/#process-remote-track-removal + // This method will update the following lists: + // |remove_list| is the list of transceivers for which the receiving track is + // being removed. + // |removed_streams| is the list of streams which no longer have a receiving + // track so should be removed. + void ProcessRemovalOfRemoteTrack( + rtc::scoped_refptr> + transceiver, + std::vector>* remove_list, + std::vector>* removed_streams); + + void RemoveRemoteStreamsIfEmpty( + const std::vector>& + remote_streams, + std::vector>* removed_streams); + + // Remove all local and remote senders of type |media_type|. + // Called when a media type is rejected (m-line set to port 0). + void RemoveSenders(cricket::MediaType media_type); + + // Loops through the vector of |streams| and finds added and removed + // StreamParams since last time this method was called. + // For each new or removed StreamParam, OnLocalSenderSeen or + // OnLocalSenderRemoved is invoked. + void UpdateLocalSenders(const std::vector& streams, + cricket::MediaType media_type); + + // Makes sure a MediaStreamTrack is created for each StreamParam in |streams|, + // and existing MediaStreamTracks are removed if there is no corresponding + // StreamParam. If |default_track_needed| is true, a default MediaStreamTrack + // is created if it doesn't exist; if false, it's removed if it exists. + // |media_type| is the type of the |streams| and can be either audio or video. + // If a new MediaStream is created it is added to |new_streams|. + void UpdateRemoteSendersList( + const std::vector& streams, + bool default_track_needed, + cricket::MediaType media_type, + StreamCollection* new_streams); + + // Enables media channels to allow sending of media. + // This enables media to flow on all configured audio/video channels and the + // RtpDataChannel. + void EnableSending(); + // Push the media parts of the local or remote session description + // down to all of the channels. + RTCError PushdownMediaDescription(SdpType type, + cricket::ContentSource source); + + RTCError PushdownTransportDescription(cricket::ContentSource source, + SdpType type); + // Helper function to remove stopped transceivers. + void RemoveStoppedTransceivers(); + // Deletes the corresponding channel of contents that don't exist in |desc|. + // |desc| can be null. This means that all channels are deleted. + void RemoveUnusedChannels(const cricket::SessionDescription* desc); + + // Report inferred negotiated SDP semantics from a local/remote answer to the + // UMA observer. + void ReportNegotiatedSdpSemantics(const SessionDescriptionInterface& answer); + + // Finds remote MediaStreams without any tracks and removes them from + // |remote_streams_| and notifies the observer that the MediaStreams no longer + // exist. + void UpdateEndedRemoteMediaStreams(); + + // Uses all remote candidates in |remote_desc| in this session. + bool UseCandidatesInSessionDescription( + const SessionDescriptionInterface* remote_desc); + // Uses |candidate| in this session. + bool UseCandidate(const IceCandidateInterface* candidate); + // Returns true if we are ready to push down the remote candidate. + // |remote_desc| is the new remote description, or NULL if the current remote + // description should be used. Output |valid| is true if the candidate media + // index is valid. + bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate, + const SessionDescriptionInterface* remote_desc, + bool* valid); + void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate) + RTC_RUN_ON(signaling_thread()); + + RTCErrorOr FindContentInfo( + const SessionDescriptionInterface* description, + const IceCandidateInterface* candidate) RTC_RUN_ON(signaling_thread()); + + // Functions for dealing with transports. + // Note that cricket code uses the term "channel" for what other code + // refers to as "transport". + + // Allocates media channels based on the |desc|. If |desc| doesn't have + // the BUNDLE option, this method will disable BUNDLE in PortAllocator. + // This method will also delete any existing media channels before creating. + RTCError CreateChannels(const cricket::SessionDescription& desc); + + // Helper methods to create media channels. + cricket::VoiceChannel* CreateVoiceChannel(const std::string& mid); + cricket::VideoChannel* CreateVideoChannel(const std::string& mid); + bool CreateDataChannel(const std::string& mid); + + // Destroys and clears the BaseChannel associated with the given transceiver, + // if such channel is set. + void DestroyTransceiverChannel( + rtc::scoped_refptr> + transceiver); + + // Destroys the RTP data channel transport and/or the SCTP data channel + // transport and clears it. + void DestroyDataChannelTransport(); + + // Destroys the given ChannelInterface. + // The channel cannot be accessed after this method is called. + void DestroyChannelInterface(cricket::ChannelInterface* channel); + // Generates MediaDescriptionOptions for the |session_opts| based on existing + // local description or remote description. + + void GenerateMediaDescriptionOptions( + const SessionDescriptionInterface* session_desc, + RtpTransceiverDirection audio_direction, + RtpTransceiverDirection video_direction, + absl::optional* audio_index, + absl::optional* video_index, + absl::optional* data_index, + cricket::MediaSessionOptions* session_options); + + // Generates the active MediaDescriptionOptions for the local data channel + // given the specified MID. + cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData( + const std::string& mid) const; + + // Generates the rejected MediaDescriptionOptions for the local data channel + // given the specified MID. + cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( + const std::string& mid) const; + + const std::string GetTransportName(const std::string& content_name); + // Based on number of transceivers per media type, enabled or disable + // payload type based demuxing in the affected channels. + bool UpdatePayloadTypeDemuxingState(cricket::ContentSource source); + + // ================================================================== + // Access to pc_ variables + cricket::ChannelManager* channel_manager() const; + TransceiverList* transceivers(); + const TransceiverList* transceivers() const; + DataChannelController* data_channel_controller(); + const DataChannelController* data_channel_controller() const; + cricket::PortAllocator* port_allocator(); + const cricket::PortAllocator* port_allocator() const; + RtpTransmissionManager* rtp_manager(); + const RtpTransmissionManager* rtp_manager() const; + JsepTransportController* transport_controller(); + const JsepTransportController* transport_controller() const; + // =================================================================== + const cricket::AudioOptions& audio_options() { return audio_options_; } + const cricket::VideoOptions& video_options() { return video_options_; } + + PeerConnection* const pc_; + + std::unique_ptr webrtc_session_desc_factory_ + RTC_GUARDED_BY(signaling_thread()); + + std::unique_ptr current_local_description_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr pending_local_description_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr current_remote_description_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr pending_remote_description_ + RTC_GUARDED_BY(signaling_thread()); + + PeerConnectionInterface::SignalingState signaling_state_ + RTC_GUARDED_BY(signaling_thread()) = PeerConnectionInterface::kStable; + + // Whether this peer is the caller. Set when the local description is applied. + absl::optional is_caller_ RTC_GUARDED_BY(signaling_thread()); + + // Streams added via AddStream. + const rtc::scoped_refptr local_streams_ + RTC_GUARDED_BY(signaling_thread()); + // Streams created as a result of SetRemoteDescription. + const rtc::scoped_refptr remote_streams_ + RTC_GUARDED_BY(signaling_thread()); + + std::vector> stream_observers_ + RTC_GUARDED_BY(signaling_thread()); + + // The operations chain is used by the offer/answer exchange methods to ensure + // they are executed in the right order. For example, if + // SetRemoteDescription() is invoked while CreateOffer() is still pending, the + // SRD operation will not start until CreateOffer() has completed. See + // https://w3c.github.io/webrtc-pc/#dfn-operations-chain. + rtc::scoped_refptr operations_chain_ + RTC_GUARDED_BY(signaling_thread()); + + // One PeerConnection has only one RTCP CNAME. + // https://tools.ietf.org/html/draft-ietf-rtcweb-rtp-usage-26#section-4.9 + const std::string rtcp_cname_; + + // MIDs will be generated using this generator which will keep track of + // all the MIDs that have been seen over the life of the PeerConnection. + rtc::UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread()); + + // List of content names for which the remote side triggered an ICE restart. + std::set pending_ice_restarts_ + RTC_GUARDED_BY(signaling_thread()); + + std::unique_ptr + local_ice_credentials_to_replace_ RTC_GUARDED_BY(signaling_thread()); + + bool remote_peer_supports_msid_ RTC_GUARDED_BY(signaling_thread()) = false; + bool is_negotiation_needed_ RTC_GUARDED_BY(signaling_thread()) = false; + uint32_t negotiation_needed_event_id_ = 0; + bool update_negotiation_needed_on_empty_chain_ + RTC_GUARDED_BY(signaling_thread()) = false; + + // In Unified Plan, if we encounter remote SDP that does not contain an a=msid + // line we create and use a stream with a random ID for our receivers. This is + // to support legacy endpoints that do not support the a=msid attribute (as + // opposed to streamless tracks with "a=msid:-"). + rtc::scoped_refptr missing_msid_default_stream_ + RTC_GUARDED_BY(signaling_thread()); + + // Used when rolling back RTP data channels. + bool have_pending_rtp_data_channel_ RTC_GUARDED_BY(signaling_thread()) = + false; + + // Updates the error state, signaling if necessary. + void SetSessionError(SessionError error, const std::string& error_desc); + + SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) = + SessionError::kNone; + std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread()); + + // Member variables for caching global options. + cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); + cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); + + // This object should be used to generate any SSRC that is not explicitly + // specified by the user (or by the remote party). + // The generator is not used directly, instead it is passed on to the + // channel manager and the session description factory. + rtc::UniqueRandomIdGenerator ssrc_generator_ + RTC_GUARDED_BY(signaling_thread()); + + // A video bitrate allocator factory. + // This can be injected using the PeerConnectionDependencies, + // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. + // Note that one can still choose to override this in a MediaEngine + // if one wants too. + std::unique_ptr + video_bitrate_allocator_factory_; + + rtc::WeakPtrFactory weak_ptr_factory_ + RTC_GUARDED_BY(signaling_thread()); +}; + +} // namespace webrtc + +#endif // PC_SDP_OFFER_ANSWER_H_ diff --git a/pc/sdp_state_provider.h b/pc/sdp_state_provider.h new file mode 100644 index 0000000000..23ffc91bd9 --- /dev/null +++ b/pc/sdp_state_provider.h @@ -0,0 +1,54 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_SDP_STATE_PROVIDER_H_ +#define PC_SDP_STATE_PROVIDER_H_ + +#include + +#include "api/jsep.h" +#include "api/peer_connection_interface.h" + +namespace webrtc { + +// This interface provides access to the state of an SDP offer/answer +// negotiation. +// +// All the functions are const, so using this interface serves as +// assurance that the user is not modifying the state. +class SdpStateProvider { + public: + virtual ~SdpStateProvider() {} + + virtual PeerConnectionInterface::SignalingState signaling_state() const = 0; + + virtual const SessionDescriptionInterface* local_description() const = 0; + virtual const SessionDescriptionInterface* remote_description() const = 0; + virtual const SessionDescriptionInterface* current_local_description() + const = 0; + virtual const SessionDescriptionInterface* current_remote_description() + const = 0; + virtual const SessionDescriptionInterface* pending_local_description() + const = 0; + virtual const SessionDescriptionInterface* pending_remote_description() + const = 0; + + // Whether an ICE restart has been asked for. Used in CreateOffer. + virtual bool NeedsIceRestart(const std::string& content_name) const = 0; + // Whether an ICE restart was indicated in the remote offer. + // Used in CreateAnswer. + virtual bool IceRestartPending(const std::string& content_name) const = 0; + virtual absl::optional GetDtlsRole( + const std::string& mid) const = 0; +}; + +} // namespace webrtc + +#endif // PC_SDP_STATE_PROVIDER_H_ diff --git a/pc/session_description.cc b/pc/session_description.cc index 4881f4d293..87d6667270 100644 --- a/pc/session_description.cc +++ b/pc/session_description.cc @@ -262,13 +262,6 @@ const ContentGroup* SessionDescription::GetGroupByName( } ContentInfo::~ContentInfo() { - if (description_ && description_.get() != description) { - // If description_ is null, we assume that a move operator - // has been applied. - RTC_LOG(LS_ERROR) << "ContentInfo::description has been updated by " - "assignment. This usage is deprecated."; - description_.reset(description); // ensure that it is destroyed. - } } // Copy operator. @@ -277,8 +270,7 @@ ContentInfo::ContentInfo(const ContentInfo& o) type(o.type), rejected(o.rejected), bundle_only(o.bundle_only), - description_(o.description_->Clone()), - description(description_.get()) {} + description_(o.description_->Clone()) {} ContentInfo& ContentInfo::operator=(const ContentInfo& o) { name = o.name; @@ -286,29 +278,14 @@ ContentInfo& ContentInfo::operator=(const ContentInfo& o) { rejected = o.rejected; bundle_only = o.bundle_only; description_ = o.description_->Clone(); - description = description_.get(); return *this; } const MediaContentDescription* ContentInfo::media_description() const { - if (description_.get() != description) { - // Someone's updated |description|, or used a move operator - // on the record. - RTC_LOG(LS_ERROR) << "ContentInfo::description has been updated by " - "assignment. This usage is deprecated."; - const_cast(this)->description_.reset(description); - } return description_.get(); } MediaContentDescription* ContentInfo::media_description() { - if (description_.get() != description) { - // Someone's updated |description|, or used a move operator - // on the record. - RTC_LOG(LS_ERROR) << "ContentInfo::description has been updated by " - "assignment. This usage is deprecated."; - description_.reset(description); - } return description_.get(); } diff --git a/pc/session_description.h b/pc/session_description.h index 7546d12bcb..3cdd09092a 100644 --- a/pc/session_description.h +++ b/pc/session_description.h @@ -26,6 +26,7 @@ #include "api/rtp_parameters.h" #include "api/rtp_transceiver_interface.h" #include "media/base/media_channel.h" +#include "media/base/media_constants.h" #include "media/base/stream_params.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" @@ -57,6 +58,7 @@ class AudioContentDescription; class VideoContentDescription; class RtpDataContentDescription; class SctpDataContentDescription; +class UnsupportedContentDescription; // Describes a session description media section. There are subclasses for each // media type (audio, video, data) that will have additional information. @@ -85,11 +87,19 @@ class MediaContentDescription { virtual SctpDataContentDescription* as_sctp() { return nullptr; } virtual const SctpDataContentDescription* as_sctp() const { return nullptr; } + virtual UnsupportedContentDescription* as_unsupported() { return nullptr; } + virtual const UnsupportedContentDescription* as_unsupported() const { + return nullptr; + } + virtual bool has_codecs() const = 0; - virtual MediaContentDescription* Copy() const = 0; - virtual std::unique_ptr Clone() const { - return absl::WrapUnique(Copy()); + // Copy operator that returns an unique_ptr. + // Not a virtual function. + // If a type-specific variant of Clone() is desired, override it, or + // simply use std::make_unique(*this) instead of Clone(). + std::unique_ptr Clone() const { + return absl::WrapUnique(CloneInternal()); } // |protocol| is the expected media transport protocol, such as RTP/AVPF, @@ -123,6 +133,10 @@ class MediaContentDescription { virtual int bandwidth() const { return bandwidth_; } virtual void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; } + virtual std::string bandwidth_type() const { return bandwidth_type_; } + virtual void set_bandwidth_type(std::string bandwidth_type) { + bandwidth_type_ = bandwidth_type; + } virtual const std::vector& cryptos() const { return cryptos_; } virtual void AddCrypto(const CryptoParams& params) { @@ -144,13 +158,6 @@ class MediaContentDescription { rtp_header_extensions_.push_back(ext); rtp_header_extensions_set_ = true; } - virtual void AddRtpHeaderExtension(const cricket::RtpHeaderExtension& ext) { - webrtc::RtpExtension webrtc_extension; - webrtc_extension.uri = ext.uri; - webrtc_extension.id = ext.id; - rtp_header_extensions_.push_back(webrtc_extension); - rtp_header_extensions_set_ = true; - } virtual void ClearRtpHeaderExtensions() { rtp_header_extensions_.clear(); rtp_header_extensions_set_ = true; @@ -204,6 +211,9 @@ class MediaContentDescription { virtual void set_conference_mode(bool enable) { conference_mode_ = enable; } virtual bool conference_mode() const { return conference_mode_; } + virtual void set_quality(int quality) { quality_ = quality; } + virtual int quality() const { return quality_; } + // https://tools.ietf.org/html/rfc4566#section-5.7 // May be present at the media or session level of SDP. If present at both // levels, the media-level attribute overwrites the session-level one. @@ -250,18 +260,13 @@ class MediaContentDescription { receive_rids_ = rids; } - virtual const absl::optional& alt_protocol() const { - return alt_protocol_; - } - virtual void set_alt_protocol(const absl::optional& protocol) { - alt_protocol_ = protocol; - } - protected: bool rtcp_mux_ = false; bool rtcp_reduced_size_ = false; bool remote_estimate_ = false; int bandwidth_ = kAutoBandwidth; + std::string bandwidth_type_ = kApplicationSpecificBandwidth; + int quality_ = 5; std::string protocol_; std::vector cryptos_; std::vector rtp_header_extensions_; @@ -279,13 +284,13 @@ class MediaContentDescription { SimulcastDescription simulcast_; std::vector receive_rids_; - absl::optional alt_protocol_; + private: + // Copy function that returns a raw pointer. Caller will assert ownership. + // Should only be called by the Clone() function. Must be implemented + // by each final subclass. + virtual MediaContentDescription* CloneInternal() const = 0; }; -// TODO(bugs.webrtc.org/8620): Remove this alias once downstream projects have -// updated. -using ContentDescription = MediaContentDescription; - template class MediaContentDescriptionImpl : public MediaContentDescription { public: @@ -337,34 +342,40 @@ class AudioContentDescription : public MediaContentDescriptionImpl { public: AudioContentDescription() {} - virtual AudioContentDescription* Copy() const { - return new AudioContentDescription(*this); - } virtual MediaType type() const { return MEDIA_TYPE_AUDIO; } virtual AudioContentDescription* as_audio() { return this; } virtual const AudioContentDescription* as_audio() const { return this; } + + private: + virtual AudioContentDescription* CloneInternal() const { + return new AudioContentDescription(*this); + } }; class VideoContentDescription : public MediaContentDescriptionImpl { public: - virtual VideoContentDescription* Copy() const { - return new VideoContentDescription(*this); - } virtual MediaType type() const { return MEDIA_TYPE_VIDEO; } virtual VideoContentDescription* as_video() { return this; } virtual const VideoContentDescription* as_video() const { return this; } + + private: + virtual VideoContentDescription* CloneInternal() const { + return new VideoContentDescription(*this); + } }; class RtpDataContentDescription : public MediaContentDescriptionImpl { public: RtpDataContentDescription() {} - RtpDataContentDescription* Copy() const override { - return new RtpDataContentDescription(*this); - } MediaType type() const override { return MEDIA_TYPE_DATA; } RtpDataContentDescription* as_rtp_data() override { return this; } const RtpDataContentDescription* as_rtp_data() const override { return this; } + + private: + RtpDataContentDescription* CloneInternal() const override { + return new RtpDataContentDescription(*this); + } }; class SctpDataContentDescription : public MediaContentDescription { @@ -375,9 +386,6 @@ class SctpDataContentDescription : public MediaContentDescription { use_sctpmap_(o.use_sctpmap_), port_(o.port_), max_message_size_(o.max_message_size_) {} - SctpDataContentDescription* Copy() const override { - return new SctpDataContentDescription(*this); - } MediaType type() const override { return MEDIA_TYPE_DATA; } SctpDataContentDescription* as_sctp() override { return this; } const SctpDataContentDescription* as_sctp() const override { return this; } @@ -398,6 +406,9 @@ class SctpDataContentDescription : public MediaContentDescription { } private: + SctpDataContentDescription* CloneInternal() const override { + return new SctpDataContentDescription(*this); + } bool use_sctpmap_ = true; // Note: "true" is no longer conformant. // Defaults should be constants imported from SCTP. Quick hack. int port_ = 5000; @@ -405,19 +416,39 @@ class SctpDataContentDescription : public MediaContentDescription { int max_message_size_ = 64 * 1024; }; +class UnsupportedContentDescription : public MediaContentDescription { + public: + explicit UnsupportedContentDescription(const std::string& media_type) + : media_type_(media_type) {} + MediaType type() const override { return MEDIA_TYPE_UNSUPPORTED; } + + UnsupportedContentDescription* as_unsupported() override { return this; } + const UnsupportedContentDescription* as_unsupported() const override { + return this; + } + + bool has_codecs() const override { return false; } + const std::string& media_type() const { return media_type_; } + + private: + UnsupportedContentDescription* CloneInternal() const override { + return new UnsupportedContentDescription(*this); + } + + std::string media_type_; +}; + // Protocol used for encoding media. This is the "top level" protocol that may // be wrapped by zero or many transport protocols (UDP, ICE, etc.). enum class MediaProtocolType { - kRtp, // Section will use the RTP protocol (e.g., for audio or video). - // https://tools.ietf.org/html/rfc3550 - kSctp // Section will use the SCTP protocol (e.g., for a data channel). - // https://tools.ietf.org/html/rfc4960 + kRtp, // Section will use the RTP protocol (e.g., for audio or video). + // https://tools.ietf.org/html/rfc3550 + kSctp, // Section will use the SCTP protocol (e.g., for a data channel). + // https://tools.ietf.org/html/rfc4960 + kOther // Section will use another top protocol which is not + // explicitly supported. }; -// TODO(bugs.webrtc.org/8620): Remove once downstream projects have updated. -constexpr MediaProtocolType NS_JINGLE_RTP = MediaProtocolType::kRtp; -constexpr MediaProtocolType NS_JINGLE_DRAFT_SCTP = MediaProtocolType::kSctp; - // Represents a session description section. Most information about the section // is stored in the description, which is a subclass of MediaContentDescription. // Owns the description. @@ -441,8 +472,6 @@ class RTC_EXPORT ContentInfo { void set_media_description(std::unique_ptr desc) { description_ = std::move(desc); - // For backwards compatibility only. - description = description_.get(); } // TODO(bugs.webrtc.org/8620): Rename this to mid. @@ -454,10 +483,6 @@ class RTC_EXPORT ContentInfo { private: friend class SessionDescription; std::unique_ptr description_; - - public: - // Kept for backwards compatibility only. - MediaContentDescription* description = nullptr; }; typedef std::vector ContentNames; @@ -541,29 +566,6 @@ class SessionDescription { bool bundle_only, std::unique_ptr description); void AddContent(ContentInfo&& content); - RTC_DEPRECATED void AddContent(const std::string& name, - MediaProtocolType type, - MediaContentDescription* description) { - AddContent(name, type, absl::WrapUnique(description)); - } - RTC_DEPRECATED void AddContent(const std::string& name, - MediaProtocolType type, - bool rejected, - MediaContentDescription* description) { - AddContent(name, type, rejected, absl::WrapUnique(description)); - } - RTC_DEPRECATED void AddContent(const std::string& name, - MediaProtocolType type, - bool rejected, - bool bundle_only, - MediaContentDescription* description) { - AddContent(name, type, rejected, bundle_only, - absl::WrapUnique(description)); - } - - RTC_DEPRECATED void AddContent(ContentInfo* content) { - AddContent(std::move(*content)); - } bool RemoveContentByName(const std::string& name); diff --git a/pc/srtp_filter.cc b/pc/srtp_filter.cc index d4ad3bb063..bd48eac83d 100644 --- a/pc/srtp_filter.cc +++ b/pc/srtp_filter.cc @@ -14,6 +14,7 @@ #include +#include "absl/strings/match.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/third_party/base64/base64.h" @@ -257,7 +258,7 @@ bool SrtpFilter::ParseKeyParams(const std::string& key_params, // example key_params: "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2" // Fail if key-method is wrong. - if (key_params.find("inline:") != 0) { + if (!absl::StartsWith(key_params, "inline:")) { return false; } diff --git a/pc/srtp_filter.h b/pc/srtp_filter.h index 5b6c99dcb5..fc60a356fe 100644 --- a/pc/srtp_filter.h +++ b/pc/srtp_filter.h @@ -24,7 +24,6 @@ #include "pc/session_description.h" #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread_checker.h" diff --git a/pc/srtp_session.cc b/pc/srtp_session.cc index 5ded455ee5..3aa488003f 100644 --- a/pc/srtp_session.cc +++ b/pc/srtp_session.cc @@ -13,7 +13,6 @@ #include "absl/base/attributes.h" #include "media/base/rtp_utils.h" #include "pc/external_hmac.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" #include "system_wrappers/include/metrics.h" @@ -364,16 +363,16 @@ bool SrtpSession::UpdateKey(int type, } ABSL_CONST_INIT int g_libsrtp_usage_count = 0; -ABSL_CONST_INIT rtc::GlobalLock g_libsrtp_lock; +ABSL_CONST_INIT webrtc::GlobalMutex g_libsrtp_lock(absl::kConstInit); void ProhibitLibsrtpInitialization() { - rtc::GlobalLockScope ls(&g_libsrtp_lock); + webrtc::GlobalMutexLock ls(&g_libsrtp_lock); ++g_libsrtp_usage_count; } // static bool SrtpSession::IncrementLibsrtpUsageCountAndMaybeInit() { - rtc::GlobalLockScope ls(&g_libsrtp_lock); + webrtc::GlobalMutexLock ls(&g_libsrtp_lock); RTC_DCHECK_GE(g_libsrtp_usage_count, 0); if (g_libsrtp_usage_count == 0) { @@ -402,7 +401,7 @@ bool SrtpSession::IncrementLibsrtpUsageCountAndMaybeInit() { // static void SrtpSession::DecrementLibsrtpUsageCountAndMaybeDeinit() { - rtc::GlobalLockScope ls(&g_libsrtp_lock); + webrtc::GlobalMutexLock ls(&g_libsrtp_lock); RTC_DCHECK_GE(g_libsrtp_usage_count, 1); if (--g_libsrtp_usage_count == 0) { diff --git a/pc/srtp_session.h b/pc/srtp_session.h index 0a26c02c9f..84445965b2 100644 --- a/pc/srtp_session.h +++ b/pc/srtp_session.h @@ -14,6 +14,8 @@ #include #include "api/scoped_refptr.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" // Forward declaration to avoid pulling in libsrtp headers here @@ -123,7 +125,7 @@ class SrtpSession { int rtp_auth_tag_len_ = 0; int rtcp_auth_tag_len_ = 0; bool inited_ = false; - static rtc::GlobalLock lock_; + static webrtc::GlobalMutex lock_; int last_send_seq_num_ = -1; bool external_auth_active_ = false; bool external_auth_enabled_ = false; diff --git a/pc/srtp_transport.cc b/pc/srtp_transport.cc index 6306d5006b..71a58d0850 100644 --- a/pc/srtp_transport.cc +++ b/pc/srtp_transport.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/strings/match.h" #include "media/base/rtp_utils.h" #include "pc/rtp_transport.h" #include "pc/srtp_session.h" @@ -493,7 +494,7 @@ bool SrtpTransport::ParseKeyParams(const std::string& key_params, // example key_params: "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2" // Fail if key-method is wrong. - if (key_params.find("inline:") != 0) { + if (!absl::StartsWith(key_params, "inline:")) { return false; } diff --git a/pc/stats_collector.cc b/pc/stats_collector.cc index c5999dacdb..cf3cafa457 100644 --- a/pc/stats_collector.cc +++ b/pc/stats_collector.cc @@ -16,7 +16,6 @@ #include #include "pc/channel.h" -#include "pc/peer_connection.h" #include "rtc_base/checks.h" #include "rtc_base/third_party/base64/base64.h" #include "system_wrappers/include/field_trial.h" @@ -307,6 +306,7 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, {StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms}, {StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms}, {StatsReport::kStatsValueNameFramesDecoded, info.frames_decoded}, + {StatsReport::kStatsValueNameFramesRendered, info.frames_rendered}, }; for (const auto& i : ints) @@ -483,6 +483,10 @@ const char* AdapterTypeToStatsType(rtc::AdapterType type) { case rtc::ADAPTER_TYPE_WIFI: return STATSREPORT_ADAPTER_TYPE_WIFI; case rtc::ADAPTER_TYPE_CELLULAR: + case rtc::ADAPTER_TYPE_CELLULAR_2G: + case rtc::ADAPTER_TYPE_CELLULAR_3G: + case rtc::ADAPTER_TYPE_CELLULAR_4G: + case rtc::ADAPTER_TYPE_CELLULAR_5G: return STATSREPORT_ADAPTER_TYPE_WWAN; case rtc::ADAPTER_TYPE_VPN: return STATSREPORT_ADAPTER_TYPE_VPN; @@ -987,7 +991,8 @@ class VoiceMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { } bool GetStatsOnWorkerThread() override { - return voice_media_channel_->GetStats(&voice_media_info); + return voice_media_channel_->GetStats(&voice_media_info, + /*get_and_clear_legacy_stats=*/true); } void ExtractStats(StatsCollector* collector) const override { @@ -1024,7 +1029,7 @@ class VideoMediaChannelStatsGatherer final : public MediaChannelStatsGatherer { void ExtractStats(StatsCollector* collector) const override { ExtractSenderReceiverStats(collector, video_media_info.receivers, - video_media_info.senders); + video_media_info.aggregated_senders); } bool HasRemoteAudio() const override { return false; } @@ -1142,19 +1147,20 @@ void StatsCollector::ExtractDataInfo() { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - for (const auto& dc : pc_->sctp_data_channels()) { + std::vector data_stats = pc_->GetDataChannelStats(); + for (const auto& stats : data_stats) { StatsReport::Id id(StatsReport::NewTypedIntId( - StatsReport::kStatsReportTypeDataChannel, dc->id())); + StatsReport::kStatsReportTypeDataChannel, stats.id)); StatsReport* report = reports_.ReplaceOrAddNew(id); report->set_timestamp(stats_gathering_started_); - report->AddString(StatsReport::kStatsValueNameLabel, dc->label()); + report->AddString(StatsReport::kStatsValueNameLabel, stats.label); // Filter out the initial id (-1). - if (dc->id() >= 0) { - report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id()); + if (stats.id >= 0) { + report->AddInt(StatsReport::kStatsValueNameDataChannelId, stats.id); } - report->AddString(StatsReport::kStatsValueNameProtocol, dc->protocol()); + report->AddString(StatsReport::kStatsValueNameProtocol, stats.protocol); report->AddString(StatsReport::kStatsValueNameState, - DataChannelInterface::DataStateString(dc->state())); + DataChannelInterface::DataStateString(stats.state)); } } diff --git a/pc/stats_collector.h b/pc/stats_collector.h index 041fe2f8fe..befbcabbf0 100644 --- a/pc/stats_collector.h +++ b/pc/stats_collector.h @@ -27,6 +27,7 @@ #include "api/stats_types.h" #include "p2p/base/port.h" #include "pc/peer_connection_internal.h" +#include "pc/stats_collector_interface.h" #include "rtc_base/network_constants.h" #include "rtc_base/ssl_certificate.h" @@ -44,7 +45,7 @@ const char* AdapterTypeToStatsType(rtc::AdapterType type); // A mapping between track ids and their StatsReport. typedef std::map TrackIdMap; -class StatsCollector { +class StatsCollector : public StatsCollectorInterface { public: // The caller is responsible for ensuring that the pc outlives the // StatsCollector instance. @@ -57,11 +58,13 @@ class StatsCollector { void AddTrack(MediaStreamTrackInterface* track); // Adds a local audio track that is used for getting some voice statistics. - void AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc); + void AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) override; // Removes a local audio tracks that is used for getting some voice // statistics. - void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc); + void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) override; // Gather statistics from the session and store them for future use. void UpdateStats(PeerConnectionInterface::StatsOutputLevel level); @@ -74,7 +77,8 @@ class StatsCollector { // of filling in |reports|. As is, there's a requirement that the caller // uses |reports| immediately without allowing any async activity on // the thread (message handling etc) and then discard the results. - void GetStats(MediaStreamTrackInterface* track, StatsReports* reports); + void GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) override; // Prepare a local or remote SSRC report for the given ssrc. Used internally // in the ExtractStatsFromList template. diff --git a/pc/stats_collector_interface.h b/pc/stats_collector_interface.h new file mode 100644 index 0000000000..4d5c98a4ab --- /dev/null +++ b/pc/stats_collector_interface.h @@ -0,0 +1,43 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains an interface for the (obsolete) StatsCollector class that +// is used by compilation units that do not wish to depend on the StatsCollector +// implementation. + +#ifndef PC_STATS_COLLECTOR_INTERFACE_H_ +#define PC_STATS_COLLECTOR_INTERFACE_H_ + +#include + +#include "api/media_stream_interface.h" +#include "api/stats_types.h" + +namespace webrtc { + +class StatsCollectorInterface { + public: + virtual ~StatsCollectorInterface() {} + + // Adds a local audio track that is used for getting some voice statistics. + virtual void AddLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) = 0; + + // Removes a local audio tracks that is used for getting some voice + // statistics. + virtual void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, + uint32_t ssrc) = 0; + virtual void GetStats(MediaStreamTrackInterface* track, + StatsReports* reports) = 0; +}; + +} // namespace webrtc + +#endif // PC_STATS_COLLECTOR_INTERFACE_H_ diff --git a/pc/stats_collector_unittest.cc b/pc/stats_collector_unittest.cc index c6b57c278e..3767081b56 100644 --- a/pc/stats_collector_unittest.cc +++ b/pc/stats_collector_unittest.cc @@ -19,13 +19,13 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/candidate.h" #include "api/data_channel_interface.h" +#include "api/media_stream_track.h" #include "api/scoped_refptr.h" #include "call/call.h" #include "media/base/media_channel.h" #include "modules/audio_processing/include/audio_processing_statistics.h" -#include "pc/data_channel.h" #include "pc/media_stream.h" -#include "pc/media_stream_track.h" +#include "pc/sctp_data_channel.h" #include "pc/test/fake_peer_connection_for_stats.h" #include "pc/test/fake_video_track_source.h" #include "pc/test/mock_rtp_receiver_internal.h" @@ -682,8 +682,7 @@ class StatsCollectorTest : public ::testing::Test { // Fake certificate to report. rtc::scoped_refptr local_certificate( - rtc::RTCCertificate::Create( - std::unique_ptr(local_identity.GetReference()))); + rtc::RTCCertificate::Create(local_identity.Clone())); pc->SetLocalCertificate(kTransportName, local_certificate); pc->SetRemoteCertChain(kTransportName, remote_identity.cert_chain().Clone()); @@ -913,7 +912,7 @@ TEST_P(StatsCollectorTrackTest, BytesCounterHandles64Bits) { video_sender_info.payload_bytes_sent = kBytesSent; video_sender_info.header_and_padding_bytes_sent = 0; VideoMediaInfo video_info; - video_info.senders.push_back(video_sender_info); + video_info.aggregated_senders.push_back(video_sender_info); auto* video_media_channel = pc->AddVideoChannel("video", "transport"); video_media_channel->SetStats(video_info); @@ -996,7 +995,7 @@ TEST_P(StatsCollectorTrackTest, VideoBandwidthEstimationInfoIsReported) { video_sender_info.header_and_padding_bytes_sent = 12; VideoMediaInfo video_info; - video_info.senders.push_back(video_sender_info); + video_info.aggregated_senders.push_back(video_sender_info); auto* video_media_channel = pc->AddVideoChannel("video", "transport"); video_media_channel->SetStats(video_info); @@ -1094,7 +1093,7 @@ TEST_P(StatsCollectorTrackTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) { video_sender_info.payload_bytes_sent = kBytesSent - 12; video_sender_info.header_and_padding_bytes_sent = 12; VideoMediaInfo video_info; - video_info.senders.push_back(video_sender_info); + video_info.aggregated_senders.push_back(video_sender_info); auto* video_media_channel = pc->AddVideoChannel("video", "transport"); video_media_channel->SetStats(video_info); @@ -1149,7 +1148,7 @@ TEST_P(StatsCollectorTrackTest, TransportObjectLinkedFromSsrcObject) { video_sender_info.payload_bytes_sent = kBytesSent - 12; video_sender_info.header_and_padding_bytes_sent = 12; VideoMediaInfo video_info; - video_info.senders.push_back(video_sender_info); + video_info.aggregated_senders.push_back(video_sender_info); auto* video_media_channel = pc->AddVideoChannel("video", "transport"); video_media_channel->SetStats(video_info); @@ -1212,7 +1211,7 @@ TEST_P(StatsCollectorTrackTest, RemoteSsrcInfoIsPresent) { video_sender_info.add_ssrc(kSsrcOfTrack); video_sender_info.remote_stats.push_back(remote_ssrc_stats); VideoMediaInfo video_info; - video_info.senders.push_back(video_sender_info); + video_info.aggregated_senders.push_back(video_sender_info); auto* video_media_channel = pc->AddVideoChannel("video", "transport"); video_media_channel->SetStats(video_info); @@ -1854,7 +1853,7 @@ TEST_P(StatsCollectorTrackTest, VerifyVideoSendSsrcStats) { video_sender_info.frames_encoded = 10; video_sender_info.qp_sum = 11; VideoMediaInfo video_info; - video_info.senders.push_back(video_sender_info); + video_info.aggregated_senders.push_back(video_sender_info); auto* video_media_channel = pc->AddVideoChannel("video", "transport"); video_media_channel->SetStats(video_info); diff --git a/pc/test/fake_audio_capture_module.cc b/pc/test/fake_audio_capture_module.cc index db0886ddad..a395df0409 100644 --- a/pc/test/fake_audio_capture_module.cc +++ b/pc/test/fake_audio_capture_module.cc @@ -47,7 +47,9 @@ FakeAudioCaptureModule::FakeAudioCaptureModule() current_mic_level_(kMaxVolume), started_(false), next_frame_time_(0), - frames_received_(0) {} + frames_received_(0) { + process_thread_checker_.Detach(); +} FakeAudioCaptureModule::~FakeAudioCaptureModule() { if (process_thread_) { @@ -65,7 +67,7 @@ rtc::scoped_refptr FakeAudioCaptureModule::Create() { } int FakeAudioCaptureModule::frames_received() const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return frames_received_; } @@ -77,7 +79,7 @@ int32_t FakeAudioCaptureModule::ActiveAudioLayer( int32_t FakeAudioCaptureModule::RegisterAudioCallback( webrtc::AudioTransport* audio_callback) { - rtc::CritScope cs(&crit_callback_); + webrtc::MutexLock lock(&mutex_); audio_callback_ = audio_callback; return 0; } @@ -181,7 +183,7 @@ int32_t FakeAudioCaptureModule::StartPlayout() { return -1; } { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); playing_ = true; } bool start = true; @@ -192,7 +194,7 @@ int32_t FakeAudioCaptureModule::StartPlayout() { int32_t FakeAudioCaptureModule::StopPlayout() { bool start = false; { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); playing_ = false; start = ShouldStartProcessing(); } @@ -201,7 +203,7 @@ int32_t FakeAudioCaptureModule::StopPlayout() { } bool FakeAudioCaptureModule::Playing() const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return playing_; } @@ -210,7 +212,7 @@ int32_t FakeAudioCaptureModule::StartRecording() { return -1; } { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); recording_ = true; } bool start = true; @@ -221,7 +223,7 @@ int32_t FakeAudioCaptureModule::StartRecording() { int32_t FakeAudioCaptureModule::StopRecording() { bool start = false; { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); recording_ = false; start = ShouldStartProcessing(); } @@ -230,7 +232,7 @@ int32_t FakeAudioCaptureModule::StopRecording() { } bool FakeAudioCaptureModule::Recording() const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return recording_; } @@ -288,13 +290,13 @@ int32_t FakeAudioCaptureModule::MicrophoneVolumeIsAvailable( } int32_t FakeAudioCaptureModule::SetMicrophoneVolume(uint32_t volume) { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); current_mic_level_ = volume; return 0; } int32_t FakeAudioCaptureModule::MicrophoneVolume(uint32_t* volume) const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); *volume = current_mic_level_; return 0; } @@ -448,29 +450,34 @@ void FakeAudioCaptureModule::UpdateProcessing(bool start) { if (process_thread_) { process_thread_->Stop(); process_thread_.reset(nullptr); + process_thread_checker_.Detach(); } + webrtc::MutexLock lock(&mutex_); started_ = false; } } void FakeAudioCaptureModule::StartProcessP() { - RTC_CHECK(process_thread_->IsCurrent()); - if (started_) { - // Already started. - return; + RTC_DCHECK_RUN_ON(&process_thread_checker_); + { + webrtc::MutexLock lock(&mutex_); + if (started_) { + // Already started. + return; + } } ProcessFrameP(); } void FakeAudioCaptureModule::ProcessFrameP() { - RTC_CHECK(process_thread_->IsCurrent()); - if (!started_) { - next_frame_time_ = rtc::TimeMillis(); - started_ = true; - } - + RTC_DCHECK_RUN_ON(&process_thread_checker_); { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); + if (!started_) { + next_frame_time_ = rtc::TimeMillis(); + started_ = true; + } + // Receive and send frames every kTimePerFrameMs. if (playing_) { ReceiveFrameP(); @@ -488,24 +495,22 @@ void FakeAudioCaptureModule::ProcessFrameP() { } void FakeAudioCaptureModule::ReceiveFrameP() { - RTC_CHECK(process_thread_->IsCurrent()); - { - rtc::CritScope cs(&crit_callback_); - if (!audio_callback_) { - return; - } - ResetRecBuffer(); - size_t nSamplesOut = 0; - int64_t elapsed_time_ms = 0; - int64_t ntp_time_ms = 0; - if (audio_callback_->NeedMorePlayData( - kNumberSamples, kNumberBytesPerSample, kNumberOfChannels, - kSamplesPerSecond, rec_buffer_, nSamplesOut, &elapsed_time_ms, - &ntp_time_ms) != 0) { - RTC_NOTREACHED(); - } - RTC_CHECK(nSamplesOut == kNumberSamples); + RTC_DCHECK_RUN_ON(&process_thread_checker_); + if (!audio_callback_) { + return; + } + ResetRecBuffer(); + size_t nSamplesOut = 0; + int64_t elapsed_time_ms = 0; + int64_t ntp_time_ms = 0; + if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample, + kNumberOfChannels, kSamplesPerSecond, + rec_buffer_, nSamplesOut, + &elapsed_time_ms, &ntp_time_ms) != 0) { + RTC_NOTREACHED(); } + RTC_CHECK(nSamplesOut == kNumberSamples); + // The SetBuffer() function ensures that after decoding, the audio buffer // should contain samples of similar magnitude (there is likely to be some // distortion due to the audio pipeline). If one sample is detected to @@ -513,25 +518,22 @@ void FakeAudioCaptureModule::ReceiveFrameP() { // has been received from the remote side (i.e. faked frames are not being // pulled). if (CheckRecBuffer(kHighSampleValue)) { - rtc::CritScope cs(&crit_); ++frames_received_; } } void FakeAudioCaptureModule::SendFrameP() { - RTC_CHECK(process_thread_->IsCurrent()); - rtc::CritScope cs(&crit_callback_); + RTC_DCHECK_RUN_ON(&process_thread_checker_); if (!audio_callback_) { return; } bool key_pressed = false; - uint32_t current_mic_level = 0; - MicrophoneVolume(¤t_mic_level); + uint32_t current_mic_level = current_mic_level_; if (audio_callback_->RecordedDataIsAvailable( send_buffer_, kNumberSamples, kNumberBytesPerSample, kNumberOfChannels, kSamplesPerSecond, kTotalDelayMs, kClockDriftMs, current_mic_level, key_pressed, current_mic_level) != 0) { RTC_NOTREACHED(); } - SetMicrophoneVolume(current_mic_level); + current_mic_level_ = current_mic_level; } diff --git a/pc/test/fake_audio_capture_module.h b/pc/test/fake_audio_capture_module.h index 0af3810290..ee85c9a490 100644 --- a/pc/test/fake_audio_capture_module.h +++ b/pc/test/fake_audio_capture_module.h @@ -24,15 +24,16 @@ #include "api/scoped_refptr.h" #include "modules/audio_device/include/audio_device.h" -#include "rtc_base/critical_section.h" #include "rtc_base/message_handler.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace rtc { class Thread; } // namespace rtc class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, - public rtc::MessageHandler { + public rtc::MessageHandlerAutoCleanup { public: typedef uint16_t Sample; @@ -47,13 +48,13 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, // Returns the number of frames that have been successfully pulled by the // instance. Note that correctly detecting success can only be done if the // pulled frame was generated/pushed from a FakeAudioCaptureModule. - int frames_received() const; + int frames_received() const RTC_LOCKS_EXCLUDED(mutex_); int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override; // Note: Calling this method from a callback may result in deadlock. - int32_t RegisterAudioCallback( - webrtc::AudioTransport* audio_callback) override; + int32_t RegisterAudioCallback(webrtc::AudioTransport* audio_callback) override + RTC_LOCKS_EXCLUDED(mutex_); int32_t Init() override; int32_t Terminate() override; @@ -80,12 +81,12 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, int32_t InitRecording() override; bool RecordingIsInitialized() const override; - int32_t StartPlayout() override; - int32_t StopPlayout() override; - bool Playing() const override; - int32_t StartRecording() override; - int32_t StopRecording() override; - bool Recording() const override; + int32_t StartPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_) override; + bool Playing() const RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t StartRecording() RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_) override; + bool Recording() const RTC_LOCKS_EXCLUDED(mutex_) override; int32_t InitSpeaker() override; bool SpeakerIsInitialized() const override; @@ -99,8 +100,10 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, int32_t MinSpeakerVolume(uint32_t* min_volume) const override; int32_t MicrophoneVolumeIsAvailable(bool* available) override; - int32_t SetMicrophoneVolume(uint32_t volume) override; - int32_t MicrophoneVolume(uint32_t* volume) const override; + int32_t SetMicrophoneVolume(uint32_t volume) + RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t MicrophoneVolume(uint32_t* volume) const + RTC_LOCKS_EXCLUDED(mutex_) override; int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override; int32_t MinMicrophoneVolume(uint32_t* min_volume) const override; @@ -170,26 +173,28 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, // Returns true/false depending on if recording or playback has been // enabled/started. - bool ShouldStartProcessing(); + bool ShouldStartProcessing() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Starts or stops the pushing and pulling of audio frames. - void UpdateProcessing(bool start); + void UpdateProcessing(bool start) RTC_LOCKS_EXCLUDED(mutex_); // Starts the periodic calling of ProcessFrame() in a thread safe way. void StartProcessP(); // Periodcally called function that ensures that frames are pulled and pushed // periodically if enabled/started. - void ProcessFrameP(); + void ProcessFrameP() RTC_LOCKS_EXCLUDED(mutex_); // Pulls frames from the registered webrtc::AudioTransport. - void ReceiveFrameP(); + void ReceiveFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Pushes frames to the registered webrtc::AudioTransport. - void SendFrameP(); + void SendFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Callback for playout and recording. - webrtc::AudioTransport* audio_callback_; + webrtc::AudioTransport* audio_callback_ RTC_GUARDED_BY(mutex_); - bool recording_; // True when audio is being pushed from the instance. - bool playing_; // True when audio is being pulled by the instance. + bool recording_ RTC_GUARDED_BY( + mutex_); // True when audio is being pushed from the instance. + bool playing_ RTC_GUARDED_BY( + mutex_); // True when audio is being pulled by the instance. bool play_is_initialized_; // True when the instance is ready to pull audio. bool rec_is_initialized_; // True when the instance is ready to push audio. @@ -197,13 +202,13 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, // Input to and output from RecordedDataIsAvailable(..) makes it possible to // modify the current mic level. The implementation does not care about the // mic level so it just feeds back what it receives. - uint32_t current_mic_level_; + uint32_t current_mic_level_ RTC_GUARDED_BY(mutex_); // next_frame_time_ is updated in a non-drifting manner to indicate the next // wall clock time the next frame should be generated and received. started_ // ensures that next_frame_time_ can be initialized properly on first call. - bool started_; - int64_t next_frame_time_; + bool started_ RTC_GUARDED_BY(mutex_); + int64_t next_frame_time_ RTC_GUARDED_BY(process_thread_checker_); std::unique_ptr process_thread_; @@ -219,10 +224,8 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule, // Protects variables that are accessed from process_thread_ and // the main thread. - rtc::CriticalSection crit_; - // Protects |audio_callback_| that is accessed from process_thread_ and - // the main thread. - rtc::CriticalSection crit_callback_; + mutable webrtc::Mutex mutex_; + webrtc::SequenceChecker process_thread_checker_; }; #endif // PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_ diff --git a/pc/test/fake_audio_capture_module_unittest.cc b/pc/test/fake_audio_capture_module_unittest.cc index 8dd252a733..63b41cdded 100644 --- a/pc/test/fake_audio_capture_module_unittest.cc +++ b/pc/test/fake_audio_capture_module_unittest.cc @@ -15,8 +15,8 @@ #include #include "api/scoped_refptr.h" -#include "rtc_base/critical_section.h" #include "rtc_base/gunit.h" +#include "rtc_base/synchronization/mutex.h" #include "test/gtest.h" class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { @@ -45,7 +45,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { const uint32_t currentMicLevel, const bool keyPressed, uint32_t& newMicLevel) override { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); rec_buffer_bytes_ = nSamples * nBytesPerSample; if ((rec_buffer_bytes_ == 0) || (rec_buffer_bytes_ > @@ -77,7 +77,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { size_t& nSamplesOut, int64_t* elapsed_time_ms, int64_t* ntp_time_ms) override { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); ++pull_iterations_; const size_t audio_buffer_size = nSamples * nBytesPerSample; const size_t bytes_out = @@ -91,11 +91,11 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { } int push_iterations() const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return push_iterations_; } int pull_iterations() const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return pull_iterations_; } @@ -115,7 +115,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { return min_buffer_size; } - rtc::CriticalSection crit_; + mutable webrtc::Mutex mutex_; int push_iterations_; int pull_iterations_; diff --git a/pc/test/fake_data_channel_provider.h b/pc/test/fake_data_channel_provider.h index 2ada4a992d..7145225ca6 100644 --- a/pc/test/fake_data_channel_provider.h +++ b/pc/test/fake_data_channel_provider.h @@ -13,10 +13,11 @@ #include -#include "pc/data_channel.h" +#include "pc/sctp_data_channel.h" #include "rtc_base/checks.h" -class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { +class FakeDataChannelProvider + : public webrtc::SctpDataChannelProviderInterface { public: FakeDataChannelProvider() : send_blocked_(false), @@ -44,7 +45,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { return true; } - bool ConnectDataChannel(webrtc::DataChannel* data_channel) override { + bool ConnectDataChannel(webrtc::SctpDataChannel* data_channel) override { RTC_CHECK(connected_channels_.find(data_channel) == connected_channels_.end()); if (!transport_available_) { @@ -55,7 +56,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { return true; } - void DisconnectDataChannel(webrtc::DataChannel* data_channel) override { + void DisconnectDataChannel(webrtc::SctpDataChannel* data_channel) override { RTC_CHECK(connected_channels_.find(data_channel) != connected_channels_.end()); RTC_LOG(LS_INFO) << "DataChannel disconnected " << data_channel; @@ -77,7 +78,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { recv_ssrcs_.erase(sid); // Unlike the real SCTP transport, act like the closing procedure finished // instantly, doing the same snapshot thing as below. - for (webrtc::DataChannel* ch : std::set( + for (webrtc::SctpDataChannel* ch : std::set( connected_channels_.begin(), connected_channels_.end())) { if (connected_channels_.count(ch)) { ch->OnClosingProcedureComplete(sid); @@ -93,12 +94,12 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { if (!blocked) { // Take a snapshot of the connected channels and check to see whether // each value is still in connected_channels_ before calling - // OnChannelReady(). This avoids problems where the set gets modified - // in response to OnChannelReady(). - for (webrtc::DataChannel* ch : std::set( + // OnTransportReady(). This avoids problems where the set gets modified + // in response to OnTransportReady(). + for (webrtc::SctpDataChannel* ch : std::set( connected_channels_.begin(), connected_channels_.end())) { if (connected_channels_.count(ch)) { - ch->OnChannelReady(true); + ch->OnTransportReady(true); } } } @@ -116,10 +117,10 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { RTC_CHECK(transport_available_); ready_to_send_ = ready; if (ready) { - std::set::iterator it; + std::set::iterator it; for (it = connected_channels_.begin(); it != connected_channels_.end(); ++it) { - (*it)->OnChannelReady(true); + (*it)->OnTransportReady(true); } } } @@ -130,7 +131,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { return last_send_data_params_; } - bool IsConnected(webrtc::DataChannel* data_channel) const { + bool IsConnected(webrtc::SctpDataChannel* data_channel) const { return connected_channels_.find(data_channel) != connected_channels_.end(); } @@ -148,7 +149,7 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface { bool transport_available_; bool ready_to_send_; bool transport_error_; - std::set connected_channels_; + std::set connected_channels_; std::set send_ssrcs_; std::set recv_ssrcs_; }; diff --git a/pc/test/fake_peer_connection_base.h b/pc/test/fake_peer_connection_base.h index 9f82c0a1b3..9531c6de5b 100644 --- a/pc/test/fake_peer_connection_base.h +++ b/pc/test/fake_peer_connection_base.h @@ -217,6 +217,8 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return IceGatheringState::kIceGatheringNew; } + absl::optional can_trickle_ice_candidates() { return absl::nullopt; } + bool StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) override { return false; @@ -246,20 +248,15 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return {}; } - sigslot::signal1& SignalDataChannelCreated() override { - return SignalDataChannelCreated_; + sigslot::signal1& SignalRtpDataChannelCreated() override { + return SignalRtpDataChannelCreated_; } - cricket::RtpDataChannel* rtp_data_channel() const override { return nullptr; } - - std::vector> sctp_data_channels() - const override { - return {}; + sigslot::signal1& SignalSctpDataChannelCreated() override { + return SignalSctpDataChannelCreated_; } - absl::optional sctp_content_name() const override { - return absl::nullopt; - } + cricket::RtpDataChannel* rtp_data_channel() const override { return nullptr; } absl::optional sctp_transport_name() const override { return absl::nullopt; @@ -301,7 +298,8 @@ class FakePeerConnectionBase : public PeerConnectionInternal { } protected: - sigslot::signal1 SignalDataChannelCreated_; + sigslot::signal1 SignalRtpDataChannelCreated_; + sigslot::signal1 SignalSctpDataChannelCreated_; }; } // namespace webrtc diff --git a/pc/test/fake_peer_connection_for_stats.h b/pc/test/fake_peer_connection_for_stats.h index c6391583f5..70f8dd50a1 100644 --- a/pc/test/fake_peer_connection_for_stats.h +++ b/pc/test/fake_peer_connection_for_stats.h @@ -36,7 +36,8 @@ class FakeVoiceMediaChannelForStats : public cricket::FakeVoiceMediaChannel { } // VoiceMediaChannel overrides. - bool GetStats(cricket::VoiceMediaInfo* info) override { + bool GetStats(cricket::VoiceMediaInfo* info, + bool get_and_clear_legacy_stats) override { if (stats_) { *info = *stats_; return true; @@ -174,11 +175,13 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { void AddSctpDataChannel(const std::string& label, const InternalDataChannelInit& init) { - AddSctpDataChannel(DataChannel::Create(&data_channel_provider_, - cricket::DCT_SCTP, label, init)); + // TODO(bugs.webrtc.org/11547): Supply a separate network thread. + AddSctpDataChannel(SctpDataChannel::Create(&data_channel_provider_, label, + init, rtc::Thread::Current(), + rtc::Thread::Current())); } - void AddSctpDataChannel(rtc::scoped_refptr data_channel) { + void AddSctpDataChannel(rtc::scoped_refptr data_channel) { sctp_data_channels_.push_back(data_channel); } @@ -257,9 +260,12 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return transceivers_; } - std::vector> sctp_data_channels() - const override { - return sctp_data_channels_; + std::vector GetDataChannelStats() const override { + RTC_DCHECK_RUN_ON(signaling_thread()); + std::vector stats; + for (const auto& channel : sctp_data_channels_) + stats.push_back(channel->GetStats()); + return stats; } cricket::CandidateStatsList GetPooledCandidateStats() const override { @@ -359,7 +365,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { std::unique_ptr voice_channel_; std::unique_ptr video_channel_; - std::vector> sctp_data_channels_; + std::vector> sctp_data_channels_; std::map transport_stats_by_name_; diff --git a/pc/test/fake_periodic_video_source.h b/pc/test/fake_periodic_video_source.h index 80dff70914..ac6e5a43e7 100644 --- a/pc/test/fake_periodic_video_source.h +++ b/pc/test/fake_periodic_video_source.h @@ -16,6 +16,7 @@ #include "api/video/video_source_interface.h" #include "media/base/fake_frame_source.h" #include "media/base/video_broadcaster.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" @@ -48,7 +49,7 @@ class FakePeriodicVideoSource final thread_checker_.Detach(); frame_source_.SetRotation(config.rotation); - TimeDelta frame_interval = TimeDelta::ms(config.frame_interval_ms); + TimeDelta frame_interval = TimeDelta::Millis(config.frame_interval_ms); RepeatingTaskHandle::Start(task_queue_->Get(), [this, frame_interval] { if (broadcaster_.wants().rotation_applied) { broadcaster_.OnFrame(frame_source_.GetFrameRotationApplied()); @@ -59,6 +60,11 @@ class FakePeriodicVideoSource final }); } + rtc::VideoSinkWants wants() const { + MutexLock lock(&mutex_); + return wants_; + } + void RemoveSink(rtc::VideoSinkInterface* sink) override { RTC_DCHECK(thread_checker_.IsCurrent()); broadcaster_.RemoveSink(sink); @@ -67,6 +73,10 @@ class FakePeriodicVideoSource final void AddOrUpdateSink(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) override { RTC_DCHECK(thread_checker_.IsCurrent()); + { + MutexLock lock(&mutex_); + wants_ = wants; + } broadcaster_.AddOrUpdateSink(sink, wants); } @@ -80,6 +90,8 @@ class FakePeriodicVideoSource final rtc::VideoBroadcaster broadcaster_; cricket::FakeFrameSource frame_source_; + mutable Mutex mutex_; + rtc::VideoSinkWants wants_ RTC_GUARDED_BY(&mutex_); std::unique_ptr task_queue_; }; diff --git a/pc/test/fake_periodic_video_track_source.h b/pc/test/fake_periodic_video_track_source.h index cc406d6d3f..98a456f232 100644 --- a/pc/test/fake_periodic_video_track_source.h +++ b/pc/test/fake_periodic_video_track_source.h @@ -29,6 +29,10 @@ class FakePeriodicVideoTrackSource : public VideoTrackSource { ~FakePeriodicVideoTrackSource() = default; + const FakePeriodicVideoSource& fake_periodic_source() const { + return source_; + } + protected: rtc::VideoSourceInterface* source() override { return &source_; } diff --git a/pc/test/fake_rtc_certificate_generator.h b/pc/test/fake_rtc_certificate_generator.h index 9c43ba9726..b726a4c0ba 100644 --- a/pc/test/fake_rtc_certificate_generator.h +++ b/pc/test/fake_rtc_certificate_generator.h @@ -118,7 +118,7 @@ static const rtc::RTCCertificatePEM kEcdsaPems[] = { class FakeRTCCertificateGenerator : public rtc::RTCCertificateGeneratorInterface, - public rtc::MessageHandler { + public rtc::MessageHandlerAutoCleanup { public: typedef rtc::TypedMessageData< rtc::scoped_refptr > diff --git a/pc/test/mock_channel_interface.h b/pc/test/mock_channel_interface.h index 255bd2fcee..1be4dcb0ce 100644 --- a/pc/test/mock_channel_interface.h +++ b/pc/test/mock_channel_interface.h @@ -24,25 +24,41 @@ namespace cricket { // implementation of BaseChannel. class MockChannelInterface : public cricket::ChannelInterface { public: - MOCK_CONST_METHOD0(media_type, cricket::MediaType()); - MOCK_CONST_METHOD0(media_channel, MediaChannel*()); - MOCK_CONST_METHOD0(transport_name, const std::string&()); - MOCK_CONST_METHOD0(content_name, const std::string&()); - MOCK_CONST_METHOD0(enabled, bool()); - MOCK_METHOD1(Enable, bool(bool)); - MOCK_METHOD0(SignalFirstPacketReceived, - sigslot::signal1&()); - MOCK_METHOD3(SetLocalContent, - bool(const cricket::MediaContentDescription*, - webrtc::SdpType, - std::string*)); - MOCK_METHOD3(SetRemoteContent, - bool(const cricket::MediaContentDescription*, - webrtc::SdpType, - std::string*)); - MOCK_CONST_METHOD0(local_streams, const std::vector&()); - MOCK_CONST_METHOD0(remote_streams, const std::vector&()); - MOCK_METHOD1(SetRtpTransport, bool(webrtc::RtpTransportInternal*)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(MediaChannel*, media_channel, (), (const, override)); + MOCK_METHOD(const std::string&, transport_name, (), (const, override)); + MOCK_METHOD(const std::string&, content_name, (), (const, override)); + MOCK_METHOD(bool, enabled, (), (const, override)); + MOCK_METHOD(bool, Enable, (bool), (override)); + MOCK_METHOD(sigslot::signal1&, + SignalFirstPacketReceived, + (), + (override)); + MOCK_METHOD(bool, + SetLocalContent, + (const cricket::MediaContentDescription*, + webrtc::SdpType, + std::string*), + (override)); + MOCK_METHOD(bool, + SetRemoteContent, + (const cricket::MediaContentDescription*, + webrtc::SdpType, + std::string*), + (override)); + MOCK_METHOD(bool, SetPayloadTypeDemuxingEnabled, (bool), (override)); + MOCK_METHOD(const std::vector&, + local_streams, + (), + (const, override)); + MOCK_METHOD(const std::vector&, + remote_streams, + (), + (const, override)); + MOCK_METHOD(bool, + SetRtpTransport, + (webrtc::RtpTransportInternal*), + (override)); }; } // namespace cricket diff --git a/pc/test/mock_data_channel.h b/pc/test/mock_data_channel.h index 3385ec2f75..ab4b0073da 100644 --- a/pc/test/mock_data_channel.h +++ b/pc/test/mock_data_channel.h @@ -13,24 +13,39 @@ #include -#include "pc/data_channel.h" +#include "pc/sctp_data_channel.h" #include "test/gmock.h" namespace webrtc { -class MockDataChannel : public rtc::RefCountedObject { +class MockSctpDataChannel : public rtc::RefCountedObject { public: - MockDataChannel(int id, DataState state) - : MockDataChannel(id, "MockDataChannel", state, "udp", 0, 0, 0, 0) {} - MockDataChannel(int id, - const std::string& label, - DataState state, - const std::string& protocol, - uint32_t messages_sent, - uint64_t bytes_sent, - uint32_t messages_received, - uint64_t bytes_received) - : rtc::RefCountedObject(nullptr, cricket::DCT_NONE, label) { + MockSctpDataChannel(int id, DataState state) + : MockSctpDataChannel(id, + "MockSctpDataChannel", + state, + "udp", + 0, + 0, + 0, + 0) {} + MockSctpDataChannel( + int id, + const std::string& label, + DataState state, + const std::string& protocol, + uint32_t messages_sent, + uint64_t bytes_sent, + uint32_t messages_received, + uint64_t bytes_received, + const InternalDataChannelInit& config = InternalDataChannelInit(), + rtc::Thread* signaling_thread = rtc::Thread::Current(), + rtc::Thread* network_thread = rtc::Thread::Current()) + : rtc::RefCountedObject(config, + nullptr, + label, + signaling_thread, + network_thread) { EXPECT_CALL(*this, id()).WillRepeatedly(::testing::Return(id)); EXPECT_CALL(*this, state()).WillRepeatedly(::testing::Return(state)); EXPECT_CALL(*this, protocol()).WillRepeatedly(::testing::Return(protocol)); @@ -43,13 +58,13 @@ class MockDataChannel : public rtc::RefCountedObject { EXPECT_CALL(*this, bytes_received()) .WillRepeatedly(::testing::Return(bytes_received)); } - MOCK_CONST_METHOD0(id, int()); - MOCK_CONST_METHOD0(state, DataState()); - MOCK_CONST_METHOD0(protocol, std::string()); - MOCK_CONST_METHOD0(messages_sent, uint32_t()); - MOCK_CONST_METHOD0(bytes_sent, uint64_t()); - MOCK_CONST_METHOD0(messages_received, uint32_t()); - MOCK_CONST_METHOD0(bytes_received, uint64_t()); + MOCK_METHOD(int, id, (), (const, override)); + MOCK_METHOD(DataState, state, (), (const, override)); + MOCK_METHOD(std::string, protocol, (), (const, override)); + MOCK_METHOD(uint32_t, messages_sent, (), (const, override)); + MOCK_METHOD(uint64_t, bytes_sent, (), (const, override)); + MOCK_METHOD(uint32_t, messages_received, (), (const, override)); + MOCK_METHOD(uint64_t, bytes_received, (), (const, override)); }; } // namespace webrtc diff --git a/pc/test/mock_delayable.h b/pc/test/mock_delayable.h index 548f9f8c0a..bef07c1970 100644 --- a/pc/test/mock_delayable.h +++ b/pc/test/mock_delayable.h @@ -21,9 +21,14 @@ namespace webrtc { class MockDelayable : public cricket::Delayable { public: - MOCK_METHOD2(SetBaseMinimumPlayoutDelayMs, bool(uint32_t ssrc, int delay_ms)); - MOCK_CONST_METHOD1(GetBaseMinimumPlayoutDelayMs, - absl::optional(uint32_t ssrc)); + MOCK_METHOD(bool, + SetBaseMinimumPlayoutDelayMs, + (uint32_t ssrc, int delay_ms), + (override)); + MOCK_METHOD(absl::optional, + GetBaseMinimumPlayoutDelayMs, + (uint32_t ssrc), + (const, override)); }; } // namespace webrtc diff --git a/pc/test/mock_peer_connection_observers.h b/pc/test/mock_peer_connection_observers.h index 2017735dc7..7766297843 100644 --- a/pc/test/mock_peer_connection_observers.h +++ b/pc/test/mock_peer_connection_observers.h @@ -85,6 +85,9 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { remote_streams_->RemoveStream(stream); } void OnRenegotiationNeeded() override { renegotiation_needed_ = true; } + void OnNegotiationNeededEvent(uint32_t event_id) override { + latest_negotiation_needed_event_ = event_id; + } void OnDataChannel( rtc::scoped_refptr data_channel) override { last_datachannel_ = data_channel; @@ -214,8 +217,18 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { return candidates; } - bool negotiation_needed() const { return renegotiation_needed_; } - void clear_negotiation_needed() { renegotiation_needed_ = false; } + bool legacy_renegotiation_needed() const { return renegotiation_needed_; } + void clear_legacy_renegotiation_needed() { renegotiation_needed_ = false; } + + bool has_negotiation_needed_event() { + return latest_negotiation_needed_event_.has_value(); + } + uint32_t latest_negotiation_needed_event() { + return latest_negotiation_needed_event_.value_or(0u); + } + void clear_latest_negotiation_needed_event() { + latest_negotiation_needed_event_ = absl::nullopt; + } rtc::scoped_refptr pc_; PeerConnectionInterface::SignalingState state_; @@ -223,6 +236,7 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { rtc::scoped_refptr last_datachannel_; rtc::scoped_refptr remote_streams_; bool renegotiation_needed_ = false; + absl::optional latest_negotiation_needed_event_; bool ice_gathering_complete_ = false; bool ice_connected_ = false; bool callback_triggered_ = false; @@ -297,7 +311,26 @@ class MockSetSessionDescriptionObserver std::string error_; }; -class MockSetRemoteDescriptionObserver +class FakeSetLocalDescriptionObserver + : public rtc::RefCountedObject { + public: + bool called() const { return error_.has_value(); } + RTCError& error() { + RTC_DCHECK(error_.has_value()); + return *error_; + } + + // SetLocalDescriptionObserverInterface implementation. + void OnSetLocalDescriptionComplete(RTCError error) override { + error_ = std::move(error); + } + + private: + // Set on complete, on success this is set to an RTCError::OK() error. + absl::optional error_; +}; + +class FakeSetRemoteDescriptionObserver : public rtc::RefCountedObject { public: bool called() const { return error_.has_value(); } diff --git a/pc/test/mock_rtp_receiver_internal.h b/pc/test/mock_rtp_receiver_internal.h index ffe78b5230..ba244039af 100644 --- a/pc/test/mock_rtp_receiver_internal.h +++ b/pc/test/mock_rtp_receiver_internal.h @@ -24,37 +24,55 @@ namespace webrtc { class MockRtpReceiverInternal : public RtpReceiverInternal { public: // RtpReceiverInterface methods. - MOCK_METHOD1(SetTrack, void(MediaStreamTrackInterface*)); - MOCK_CONST_METHOD0(track, rtc::scoped_refptr()); - MOCK_CONST_METHOD0(dtls_transport, - rtc::scoped_refptr()); - MOCK_CONST_METHOD0(stream_ids, std::vector()); - MOCK_CONST_METHOD0(streams, - std::vector>()); - MOCK_CONST_METHOD0(media_type, cricket::MediaType()); - MOCK_CONST_METHOD0(id, std::string()); - MOCK_CONST_METHOD0(GetParameters, RtpParameters()); - MOCK_METHOD1(SetObserver, void(RtpReceiverObserverInterface*)); - MOCK_METHOD1(SetJitterBufferMinimumDelay, void(absl::optional)); - MOCK_CONST_METHOD0(GetSources, std::vector()); - MOCK_METHOD1(SetFrameDecryptor, - void(rtc::scoped_refptr)); - MOCK_CONST_METHOD0(GetFrameDecryptor, - rtc::scoped_refptr()); + MOCK_METHOD(rtc::scoped_refptr, + track, + (), + (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + dtls_transport, + (), + (const, override)); + MOCK_METHOD(std::vector, stream_ids, (), (const, override)); + MOCK_METHOD(std::vector>, + streams, + (), + (const, override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const, override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); + MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); + MOCK_METHOD(void, + SetJitterBufferMinimumDelay, + (absl::optional), + (override)); + MOCK_METHOD(std::vector, GetSources, (), (const, override)); + MOCK_METHOD(void, + SetFrameDecryptor, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetFrameDecryptor, + (), + (const, override)); // RtpReceiverInternal methods. - MOCK_METHOD0(Stop, void()); - MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*)); - MOCK_METHOD1(SetupMediaChannel, void(uint32_t)); - MOCK_METHOD0(SetupUnsignaledMediaChannel, void()); - MOCK_CONST_METHOD0(ssrc, uint32_t()); - MOCK_METHOD0(NotifyFirstPacketReceived, void()); - MOCK_METHOD1(set_stream_ids, void(std::vector)); - MOCK_METHOD1(set_transport, void(rtc::scoped_refptr)); - MOCK_METHOD1( - SetStreams, - void(const std::vector>&)); - MOCK_CONST_METHOD0(AttachmentId, int()); + MOCK_METHOD(void, Stop, (), (override)); + MOCK_METHOD(void, StopAndEndTrack, (), (override)); + MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override)); + MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override)); + MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override)); + MOCK_METHOD(uint32_t, ssrc, (), (const, override)); + MOCK_METHOD(void, NotifyFirstPacketReceived, (), (override)); + MOCK_METHOD(void, set_stream_ids, (std::vector), (override)); + MOCK_METHOD(void, + set_transport, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + SetStreams, + (const std::vector>&), + (override)); + MOCK_METHOD(int, AttachmentId, (), (const, override)); }; } // namespace webrtc diff --git a/pc/test/mock_rtp_sender_internal.h b/pc/test/mock_rtp_sender_internal.h index 2cf0173bd1..5e7670ebf0 100644 --- a/pc/test/mock_rtp_sender_internal.h +++ b/pc/test/mock_rtp_sender_internal.h @@ -23,25 +23,46 @@ namespace webrtc { class MockRtpSenderInternal : public RtpSenderInternal { public: // RtpSenderInterface methods. - MOCK_METHOD1(SetTrack, bool(MediaStreamTrackInterface*)); - MOCK_CONST_METHOD0(track, rtc::scoped_refptr()); - MOCK_CONST_METHOD0(ssrc, uint32_t()); - MOCK_CONST_METHOD0(dtls_transport, - rtc::scoped_refptr()); - MOCK_CONST_METHOD0(media_type, cricket::MediaType()); - MOCK_CONST_METHOD0(id, std::string()); - MOCK_CONST_METHOD0(stream_ids, std::vector()); - MOCK_CONST_METHOD0(init_send_encodings, std::vector()); - MOCK_METHOD1(set_transport, void(rtc::scoped_refptr)); - MOCK_CONST_METHOD0(GetParameters, RtpParameters()); - MOCK_CONST_METHOD0(GetParametersInternal, RtpParameters()); - MOCK_METHOD1(SetParameters, RTCError(const RtpParameters&)); - MOCK_METHOD1(SetParametersInternal, RTCError(const RtpParameters&)); - MOCK_CONST_METHOD0(GetDtmfSender, rtc::scoped_refptr()); - MOCK_METHOD1(SetFrameEncryptor, - void(rtc::scoped_refptr)); - MOCK_CONST_METHOD0(GetFrameEncryptor, - rtc::scoped_refptr()); + MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override)); + MOCK_METHOD(rtc::scoped_refptr, + track, + (), + (const, override)); + MOCK_METHOD(uint32_t, ssrc, (), (const, override)); + MOCK_METHOD(rtc::scoped_refptr, + dtls_transport, + (), + (const, override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const, override)); + MOCK_METHOD(std::vector, stream_ids, (), (const, override)); + MOCK_METHOD(std::vector, + init_send_encodings, + (), + (const, override)); + MOCK_METHOD(void, + set_transport, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); + MOCK_METHOD(RtpParameters, GetParametersInternal, (), (const, override)); + MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); + MOCK_METHOD(RTCError, + SetParametersInternal, + (const RtpParameters&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetDtmfSender, + (), + (const, override)); + MOCK_METHOD(void, + SetFrameEncryptor, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetFrameEncryptor, + (), + (const, override)); // RtpSenderInternal methods. MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*)); @@ -54,6 +75,7 @@ class MockRtpSenderInternal : public RtpSenderInternal { MOCK_CONST_METHOD0(AttachmentId, int()); MOCK_METHOD1(DisableEncodingLayers, RTCError(const std::vector&)); + MOCK_METHOD0(SetTransceiverAsStopped, void()); }; } // namespace webrtc diff --git a/pc/test/peer_connection_test_wrapper.cc b/pc/test/peer_connection_test_wrapper.cc index 4f0d72e667..946f459f3b 100644 --- a/pc/test/peer_connection_test_wrapper.cc +++ b/pc/test/peer_connection_test_wrapper.cc @@ -80,7 +80,8 @@ PeerConnectionTestWrapper::PeerConnectionTestWrapper( rtc::Thread* worker_thread) : name_(name), network_thread_(network_thread), - worker_thread_(worker_thread) { + worker_thread_(worker_thread), + pending_negotiation_(false) { pc_thread_checker_.Detach(); } @@ -135,6 +136,17 @@ PeerConnectionTestWrapper::CreateDataChannel( return peer_connection_->CreateDataChannel(label, &init); } +void PeerConnectionTestWrapper::WaitForNegotiation() { + EXPECT_TRUE_WAIT(!pending_negotiation_, kMaxWait); +} + +void PeerConnectionTestWrapper::OnSignalingChange( + webrtc::PeerConnectionInterface::SignalingState new_state) { + if (new_state == webrtc::PeerConnectionInterface::SignalingState::kStable) { + pending_negotiation_ = false; + } +} + void PeerConnectionTestWrapper::OnAddTrack( rtc::scoped_refptr receiver, const std::vector>& streams) { @@ -182,6 +194,7 @@ void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) { void PeerConnectionTestWrapper::CreateOffer( const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) { RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": CreateOffer."; + pending_negotiation_ = true; peer_connection_->CreateOffer(this, options); } @@ -189,6 +202,7 @@ void PeerConnectionTestWrapper::CreateAnswer( const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) { RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": CreateAnswer."; + pending_negotiation_ = true; peer_connection_->CreateAnswer(this, options); } diff --git a/pc/test/peer_connection_test_wrapper.h b/pc/test/peer_connection_test_wrapper.h index 2dc88e9309..92599b78ab 100644 --- a/pc/test/peer_connection_test_wrapper.h +++ b/pc/test/peer_connection_test_wrapper.h @@ -49,15 +49,21 @@ class PeerConnectionTestWrapper rtc::scoped_refptr audio_encoder_factory, rtc::scoped_refptr audio_decoder_factory); + rtc::scoped_refptr pc_factory() + const { + return peer_connection_factory_; + } webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); } rtc::scoped_refptr CreateDataChannel( const std::string& label, const webrtc::DataChannelInit& init); + void WaitForNegotiation(); + // Implements PeerConnectionObserver. void OnSignalingChange( - webrtc::PeerConnectionInterface::SignalingState new_state) override {} + webrtc::PeerConnectionInterface::SignalingState new_state) override; void OnAddTrack( rtc::scoped_refptr receiver, const std::vector>& @@ -121,6 +127,7 @@ class PeerConnectionTestWrapper rtc::scoped_refptr fake_audio_capture_module_; std::unique_ptr renderer_; int num_get_user_media_calls_ = 0; + bool pending_negotiation_; }; #endif // PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_ diff --git a/pc/track_media_info_map.cc b/pc/track_media_info_map.cc index 781737a085..b3ec68bb27 100644 --- a/pc/track_media_info_map.cc +++ b/pc/track_media_info_map.cc @@ -10,9 +10,12 @@ #include "pc/track_media_info_map.h" +#include #include #include +#include "rtc_base/thread.h" + namespace webrtc { namespace { @@ -42,20 +45,12 @@ void GetAudioAndVideoTrackBySsrc( RTC_DCHECK(local_video_track_by_ssrc->empty()); RTC_DCHECK(remote_audio_track_by_ssrc->empty()); RTC_DCHECK(remote_video_track_by_ssrc->empty()); - // TODO(hbos): RTP senders/receivers uses a proxy to the signaling thread, and - // our sender/receiver implementations invokes on the worker thread. (This - // means one thread jump if on signaling thread and two thread jumps if on any - // other threads). Is there a way to avoid thread jump(s) on a per - // sender/receiver, per method basis? for (const auto& rtp_sender : rtp_senders) { cricket::MediaType media_type = rtp_sender->media_type(); MediaStreamTrackInterface* track = rtp_sender->track(); if (!track) { continue; } - RTC_DCHECK_EQ(track->kind(), media_type == cricket::MEDIA_TYPE_AUDIO - ? MediaStreamTrackInterface::kAudioKind - : MediaStreamTrackInterface::kVideoKind); // TODO(deadbeef): |ssrc| should be removed in favor of |GetParameters|. uint32_t ssrc = rtp_sender->ssrc(); if (ssrc != 0) { @@ -76,9 +71,6 @@ void GetAudioAndVideoTrackBySsrc( cricket::MediaType media_type = rtp_receiver->media_type(); MediaStreamTrackInterface* track = rtp_receiver->track(); RTC_DCHECK(track); - RTC_DCHECK_EQ(track->kind(), media_type == cricket::MEDIA_TYPE_AUDIO - ? MediaStreamTrackInterface::kAudioKind - : MediaStreamTrackInterface::kVideoKind); RtpParameters params = rtp_receiver->GetParameters(); for (const RtpEncodingParameters& encoding : params.encodings) { if (!encoding.ssrc) { @@ -114,6 +106,8 @@ TrackMediaInfoMap::TrackMediaInfoMap( const std::vector>& rtp_receivers) : voice_media_info_(std::move(voice_media_info)), video_media_info_(std::move(video_media_info)) { + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::map local_audio_track_by_ssrc; std::map local_video_track_by_ssrc; std::map remote_audio_track_by_ssrc; @@ -170,19 +164,36 @@ TrackMediaInfoMap::TrackMediaInfoMap( } if (video_media_info_) { for (auto& sender_info : video_media_info_->senders) { - VideoTrackInterface* associated_track = - FindValueOrNull(local_video_track_by_ssrc, sender_info.ssrc()); - if (associated_track) { - // One sender is associated with at most one track. - // One track may be associated with multiple senders. - video_track_by_sender_info_[&sender_info] = associated_track; - video_infos_by_local_track_[associated_track].push_back(&sender_info); + std::set ssrcs; + ssrcs.insert(sender_info.ssrc()); + for (auto& ssrc_group : sender_info.ssrc_groups) { + for (auto ssrc : ssrc_group.ssrcs) { + ssrcs.insert(ssrc); + } } + for (auto ssrc : ssrcs) { + VideoTrackInterface* associated_track = + FindValueOrNull(local_video_track_by_ssrc, ssrc); + if (associated_track) { + // One sender is associated with at most one track. + // One track may be associated with multiple senders. + video_track_by_sender_info_[&sender_info] = associated_track; + video_infos_by_local_track_[associated_track].push_back(&sender_info); + break; + } + } + } + for (auto& sender_info : video_media_info_->aggregated_senders) { if (sender_info.ssrc() == 0) continue; // Unconnected SSRC. bugs.webrtc.org/8673 RTC_DCHECK(video_info_by_sender_ssrc_.count(sender_info.ssrc()) == 0) << "Duplicate video sender SSRC: " << sender_info.ssrc(); video_info_by_sender_ssrc_[sender_info.ssrc()] = &sender_info; + VideoTrackInterface* associated_track = + FindValueOrNull(local_video_track_by_ssrc, sender_info.ssrc()); + if (associated_track) { + video_track_by_sender_info_[&sender_info] = associated_track; + } } for (auto& receiver_info : video_media_info_->receivers) { VideoTrackInterface* associated_track = diff --git a/pc/track_media_info_map_unittest.cc b/pc/track_media_info_map_unittest.cc index 4fa8a4ae03..0cb1e0e277 100644 --- a/pc/track_media_info_map_unittest.cc +++ b/pc/track_media_info_map_unittest.cc @@ -83,19 +83,21 @@ rtc::scoped_refptr CreateMockRtpReceiver( class TrackMediaInfoMapTest : public ::testing::Test { public: - TrackMediaInfoMapTest() + TrackMediaInfoMapTest() : TrackMediaInfoMapTest(true) {} + + explicit TrackMediaInfoMapTest(bool use_current_thread) : voice_media_info_(new cricket::VoiceMediaInfo()), video_media_info_(new cricket::VideoMediaInfo()), local_audio_track_(AudioTrack::Create("LocalAudioTrack", nullptr)), remote_audio_track_(AudioTrack::Create("RemoteAudioTrack", nullptr)), - local_video_track_( - VideoTrack::Create("LocalVideoTrack", - FakeVideoTrackSource::Create(false), - rtc::Thread::Current())), - remote_video_track_( - VideoTrack::Create("RemoteVideoTrack", - FakeVideoTrackSource::Create(false), - rtc::Thread::Current())) {} + local_video_track_(VideoTrack::Create( + "LocalVideoTrack", + FakeVideoTrackSource::Create(false), + use_current_thread ? rtc::Thread::Current() : nullptr)), + remote_video_track_(VideoTrack::Create( + "RemoteVideoTrack", + FakeVideoTrackSource::Create(false), + use_current_thread ? rtc::Thread::Current() : nullptr)) {} ~TrackMediaInfoMapTest() { // If we have a map the ownership has been passed to the map, only delete if @@ -131,6 +133,7 @@ class TrackMediaInfoMapTest : public ::testing::Test { video_sender_info.local_stats[i++].ssrc = ssrc; } video_media_info_->senders.push_back(video_sender_info); + video_media_info_->aggregated_senders.push_back(video_sender_info); } } @@ -416,7 +419,10 @@ TEST_F(TrackMediaInfoMapTest, GetAttachmentIdByTrack) { // base/test/gtest_util.h. #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {}; +class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest { + public: + TrackMediaInfoMapDeathTest() : TrackMediaInfoMapTest(false) {} +}; TEST_F(TrackMediaInfoMapDeathTest, MultipleOneSsrcReceiversPerTrack) { AddRtpReceiverWithSsrcs({1}, remote_audio_track_); diff --git a/pc/transceiver_list.cc b/pc/transceiver_list.cc new file mode 100644 index 0000000000..5fe148a222 --- /dev/null +++ b/pc/transceiver_list.cc @@ -0,0 +1,67 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/transceiver_list.h" + +namespace webrtc { + +void TransceiverStableState::set_newly_created() { + RTC_DCHECK(!has_m_section_); + newly_created_ = true; +} + +void TransceiverStableState::SetMSectionIfUnset( + absl::optional mid, + absl::optional mline_index) { + if (!has_m_section_) { + mid_ = mid; + mline_index_ = mline_index; + has_m_section_ = true; + } +} + +void TransceiverStableState::SetRemoteStreamIdsIfUnset( + const std::vector& ids) { + if (!remote_stream_ids_.has_value()) { + remote_stream_ids_ = ids; + } +} + +RtpTransceiverProxyRefPtr TransceiverList::FindBySender( + rtc::scoped_refptr sender) const { + for (auto transceiver : transceivers_) { + if (transceiver->sender() == sender) { + return transceiver; + } + } + return nullptr; +} + +RtpTransceiverProxyRefPtr TransceiverList::FindByMid( + const std::string& mid) const { + for (auto transceiver : transceivers_) { + if (transceiver->mid() == mid) { + return transceiver; + } + } + return nullptr; +} + +RtpTransceiverProxyRefPtr TransceiverList::FindByMLineIndex( + size_t mline_index) const { + for (auto transceiver : transceivers_) { + if (transceiver->internal()->mline_index() == mline_index) { + return transceiver; + } + } + return nullptr; +} + +} // namespace webrtc diff --git a/pc/transceiver_list.h b/pc/transceiver_list.h new file mode 100644 index 0000000000..cd77d67f44 --- /dev/null +++ b/pc/transceiver_list.h @@ -0,0 +1,100 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_TRANSCEIVER_LIST_H_ +#define PC_TRANSCEIVER_LIST_H_ + +#include +#include +#include +#include + +#include "pc/rtp_transceiver.h" + +namespace webrtc { + +typedef rtc::scoped_refptr> + RtpTransceiverProxyRefPtr; + +// Captures partial state to be used for rollback. Applicable only in +// Unified Plan. +class TransceiverStableState { + public: + TransceiverStableState() {} + void set_newly_created(); + void SetMSectionIfUnset(absl::optional mid, + absl::optional mline_index); + void SetRemoteStreamIdsIfUnset(const std::vector& ids); + absl::optional mid() const { return mid_; } + absl::optional mline_index() const { return mline_index_; } + absl::optional> remote_stream_ids() const { + return remote_stream_ids_; + } + bool has_m_section() const { return has_m_section_; } + bool newly_created() const { return newly_created_; } + + private: + absl::optional mid_; + absl::optional mline_index_; + absl::optional> remote_stream_ids_; + // Indicates that mid value from stable state has been captured and + // that rollback has to restore the transceiver. Also protects against + // subsequent overwrites. + bool has_m_section_ = false; + // Indicates that the transceiver was created as part of applying a + // description to track potential need for removing transceiver during + // rollback. + bool newly_created_ = false; +}; + +class TransceiverList { + public: + std::vector List() const { return transceivers_; } + + void Add(RtpTransceiverProxyRefPtr transceiver) { + transceivers_.push_back(transceiver); + } + void Remove(RtpTransceiverProxyRefPtr transceiver) { + transceivers_.erase( + std::remove(transceivers_.begin(), transceivers_.end(), transceiver), + transceivers_.end()); + } + RtpTransceiverProxyRefPtr FindBySender( + rtc::scoped_refptr sender) const; + RtpTransceiverProxyRefPtr FindByMid(const std::string& mid) const; + RtpTransceiverProxyRefPtr FindByMLineIndex(size_t mline_index) const; + + // Find or create the stable state for a transceiver. + TransceiverStableState* StableState(RtpTransceiverProxyRefPtr transceiver) { + return &(transceiver_stable_states_by_transceivers_[transceiver]); + } + + void DiscardStableStates() { + transceiver_stable_states_by_transceivers_.clear(); + } + + std::map& StableStates() { + return transceiver_stable_states_by_transceivers_; + } + + private: + std::vector transceivers_; + // Holds changes made to transceivers during applying descriptors for + // potential rollback. Gets cleared once signaling state goes to stable. + std::map + transceiver_stable_states_by_transceivers_; + // Holds remote stream ids for transceivers from stable state. + std::map> + remote_stream_ids_by_transceivers_; +}; + +} // namespace webrtc + +#endif // PC_TRANSCEIVER_LIST_H_ diff --git a/pc/usage_pattern.cc b/pc/usage_pattern.cc new file mode 100644 index 0000000000..848472148f --- /dev/null +++ b/pc/usage_pattern.cc @@ -0,0 +1,49 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/usage_pattern.h" + +#include "api/peer_connection_interface.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +void UsagePattern::NoteUsageEvent(UsageEvent event) { + usage_event_accumulator_ |= static_cast(event); +} + +void UsagePattern::ReportUsagePattern(PeerConnectionObserver* observer) const { + RTC_DLOG(LS_INFO) << "Usage signature is " << usage_event_accumulator_; + RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.PeerConnection.UsagePattern", + usage_event_accumulator_, + static_cast(UsageEvent::MAX_VALUE)); + const int bad_bits = + static_cast(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED) | + static_cast(UsageEvent::CANDIDATE_COLLECTED); + const int good_bits = + static_cast(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED) | + static_cast(UsageEvent::REMOTE_CANDIDATE_ADDED) | + static_cast(UsageEvent::ICE_STATE_CONNECTED); + if ((usage_event_accumulator_ & bad_bits) == bad_bits && + (usage_event_accumulator_ & good_bits) == 0) { + // If called after close(), we can't report, because observer may have + // been deallocated, and therefore pointer is null. Write to log instead. + if (observer) { + observer->OnInterestingUsage(usage_event_accumulator_); + } else { + RTC_LOG(LS_INFO) << "Interesting usage signature " + << usage_event_accumulator_ + << " observed after observer shutdown"; + } + } +} + +} // namespace webrtc diff --git a/pc/usage_pattern.h b/pc/usage_pattern.h new file mode 100644 index 0000000000..c4a8918ac2 --- /dev/null +++ b/pc/usage_pattern.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_USAGE_PATTERN_H_ +#define PC_USAGE_PATTERN_H_ + +namespace webrtc { + +class PeerConnectionObserver; + +// A bit in the usage pattern is registered when its defining event occurs +// at least once. +enum class UsageEvent : int { + TURN_SERVER_ADDED = 0x01, + STUN_SERVER_ADDED = 0x02, + DATA_ADDED = 0x04, + AUDIO_ADDED = 0x08, + VIDEO_ADDED = 0x10, + // |SetLocalDescription| returns successfully. + SET_LOCAL_DESCRIPTION_SUCCEEDED = 0x20, + // |SetRemoteDescription| returns successfully. + SET_REMOTE_DESCRIPTION_SUCCEEDED = 0x40, + // A local candidate (with type host, server-reflexive, or relay) is + // collected. + CANDIDATE_COLLECTED = 0x80, + // A remote candidate is successfully added via |AddIceCandidate|. + ADD_ICE_CANDIDATE_SUCCEEDED = 0x100, + ICE_STATE_CONNECTED = 0x200, + CLOSE_CALLED = 0x400, + // A local candidate with private IP is collected. + PRIVATE_CANDIDATE_COLLECTED = 0x800, + // A remote candidate with private IP is added, either via AddiceCandidate + // or from the remote description. + REMOTE_PRIVATE_CANDIDATE_ADDED = 0x1000, + // A local mDNS candidate is collected. + MDNS_CANDIDATE_COLLECTED = 0x2000, + // A remote mDNS candidate is added, either via AddIceCandidate or from the + // remote description. + REMOTE_MDNS_CANDIDATE_ADDED = 0x4000, + // A local candidate with IPv6 address is collected. + IPV6_CANDIDATE_COLLECTED = 0x8000, + // A remote candidate with IPv6 address is added, either via AddIceCandidate + // or from the remote description. + REMOTE_IPV6_CANDIDATE_ADDED = 0x10000, + // A remote candidate (with type host, server-reflexive, or relay) is + // successfully added, either via AddIceCandidate or from the remote + // description. + REMOTE_CANDIDATE_ADDED = 0x20000, + // An explicit host-host candidate pair is selected, i.e. both the local and + // the remote candidates have the host type. This does not include candidate + // pairs formed with equivalent prflx remote candidates, e.g. a host-prflx + // pair where the prflx candidate has the same base as a host candidate of + // the remote peer. + DIRECT_CONNECTION_SELECTED = 0x40000, + MAX_VALUE = 0x80000, +}; + +class UsagePattern { + public: + void NoteUsageEvent(UsageEvent event); + void ReportUsagePattern(PeerConnectionObserver* observer) const; + + private: + int usage_event_accumulator_ = 0; +}; + +} // namespace webrtc +#endif // PC_USAGE_PATTERN_H_ diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index c6fb5430cd..dd601259ec 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -16,7 +16,6 @@ #include #include "api/media_stream_proxy.h" -#include "api/media_stream_track_proxy.h" #include "api/video_track_source_proxy.h" #include "pc/jitter_buffer_delay.h" #include "pc/jitter_buffer_delay_proxy.h" @@ -43,7 +42,7 @@ VideoRtpReceiver::VideoRtpReceiver( : worker_thread_(worker_thread), id_(receiver_id), source_(new RefCountedObject(this)), - track_(VideoTrackProxy::Create( + track_(VideoTrackProxyWithInternal::Create( rtc::Thread::Current(), worker_thread, VideoTrack::Create( @@ -104,6 +103,18 @@ VideoRtpReceiver::GetFrameDecryptor() const { return frame_decryptor_; } +void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(worker_thread_); + frame_transformer_ = std::move(frame_transformer); + if (media_channel_ && !stopped_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } + }); +} + void VideoRtpReceiver::Stop() { // TODO(deadbeef): Need to do more here to fully stop receiving packets. if (stopped_) { @@ -124,6 +135,11 @@ void VideoRtpReceiver::Stop() { stopped_ = true; } +void VideoRtpReceiver::StopAndEndTrack() { + Stop(); + track_->internal()->set_ended(); +} + void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK(media_channel_); if (!stopped_ && ssrc_ == ssrc) { @@ -144,6 +160,11 @@ void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { if (encoded_sink_enabled) { SetEncodedSinkEnabled(true); } + + if (frame_transformer_ && media_channel_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } }); // Attach any existing frame decryptor to the media channel. @@ -251,6 +272,10 @@ void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { if (encoded_sink_enabled) { SetEncodedSinkEnabled(true); } + if (frame_transformer_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } } }); } diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index 0b8a73da61..74ae44431e 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -18,7 +18,9 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" +#include "api/media_stream_track_proxy.h" #include "api/media_types.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" @@ -30,6 +32,7 @@ #include "pc/jitter_buffer_delay_interface.h" #include "pc/rtp_receiver.h" #include "pc/video_rtp_track_source.h" +#include "pc/video_track.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/thread.h" @@ -83,8 +86,12 @@ class VideoRtpReceiver : public rtc::RefCountedObject, rtc::scoped_refptr GetFrameDecryptor() const override; + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) override; + // RtpReceiverInternal implementation. void Stop() override; + void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override { return ssrc_.value_or(0); } @@ -126,7 +133,7 @@ class VideoRtpReceiver : public rtc::RefCountedObject, // |source_| is held here to be able to change the state of the source when // the VideoRtpReceiver is stopped. rtc::scoped_refptr source_; - rtc::scoped_refptr track_; + rtc::scoped_refptr> track_; std::vector> streams_; bool stopped_ = true; RtpReceiverObserverInterface* observer_ = nullptr; @@ -134,6 +141,8 @@ class VideoRtpReceiver : public rtc::RefCountedObject, int attachment_id_ = 0; rtc::scoped_refptr frame_decryptor_; rtc::scoped_refptr dtls_transport_; + rtc::scoped_refptr frame_transformer_ + RTC_GUARDED_BY(worker_thread_); // Allows to thread safely change jitter buffer delay. Handles caching cases // if |SetJitterBufferMinimumDelay| is called before start. rtc::scoped_refptr delay_; diff --git a/pc/video_rtp_receiver_unittest.cc b/pc/video_rtp_receiver_unittest.cc index c4b7b8205d..b3eb6e6e35 100644 --- a/pc/video_rtp_receiver_unittest.cc +++ b/pc/video_rtp_receiver_unittest.cc @@ -32,16 +32,20 @@ class VideoRtpReceiverTest : public testing::Test { MockVideoMediaChannel(cricket::FakeVideoEngine* engine, const cricket::VideoOptions& options) : FakeVideoMediaChannel(engine, options) {} - MOCK_METHOD2(SetRecordableEncodedFrameCallback, - void(uint32_t, - std::function)); - MOCK_METHOD1(ClearRecordableEncodedFrameCallback, void(uint32_t)); - MOCK_METHOD1(GenerateKeyFrame, void(uint32_t)); + MOCK_METHOD(void, + SetRecordableEncodedFrameCallback, + (uint32_t, std::function), + (override)); + MOCK_METHOD(void, + ClearRecordableEncodedFrameCallback, + (uint32_t), + (override)); + MOCK_METHOD(void, GenerateKeyFrame, (uint32_t), (override)); }; class MockVideoSink : public rtc::VideoSinkInterface { public: - MOCK_METHOD1(OnFrame, void(const RecordableEncodedFrame&)); + MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override)); }; VideoRtpReceiverTest() diff --git a/pc/video_rtp_track_source.cc b/pc/video_rtp_track_source.cc index 2f15c42b4d..f96db962b1 100644 --- a/pc/video_rtp_track_source.cc +++ b/pc/video_rtp_track_source.cc @@ -31,7 +31,7 @@ rtc::VideoSinkInterface* VideoRtpTrackSource::sink() { void VideoRtpTrackSource::BroadcastRecordableEncodedFrame( const RecordableEncodedFrame& frame) const { - rtc::CritScope cs(&mu_); + MutexLock lock(&mu_); for (rtc::VideoSinkInterface* sink : encoded_sinks_) { sink->OnFrame(frame); } @@ -54,7 +54,7 @@ void VideoRtpTrackSource::AddEncodedSink( RTC_DCHECK(sink); size_t size = 0; { - rtc::CritScope cs(&mu_); + MutexLock lock(&mu_); RTC_DCHECK(std::find(encoded_sinks_.begin(), encoded_sinks_.end(), sink) == encoded_sinks_.end()); encoded_sinks_.push_back(sink); @@ -70,7 +70,7 @@ void VideoRtpTrackSource::RemoveEncodedSink( RTC_DCHECK_RUN_ON(&worker_sequence_checker_); size_t size = 0; { - rtc::CritScope cs(&mu_); + MutexLock lock(&mu_); auto it = std::find(encoded_sinks_.begin(), encoded_sinks_.end(), sink); if (it != encoded_sinks_.end()) { encoded_sinks_.erase(it); diff --git a/pc/video_rtp_track_source.h b/pc/video_rtp_track_source.h index e62cda70c3..b887849312 100644 --- a/pc/video_rtp_track_source.h +++ b/pc/video_rtp_track_source.h @@ -16,7 +16,7 @@ #include "media/base/video_broadcaster.h" #include "pc/video_track_source.h" #include "rtc_base/callback.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -72,7 +72,7 @@ class VideoRtpTrackSource : public VideoTrackSource { // It might be better if the decoder can handle multiple sinks and consider // the VideoSinkWants. rtc::VideoBroadcaster broadcaster_; - rtc::CriticalSection mu_; + mutable Mutex mu_; std::vector*> encoded_sinks_ RTC_GUARDED_BY(mu_); Callback* callback_ RTC_GUARDED_BY(worker_sequence_checker_); diff --git a/pc/video_rtp_track_source_unittest.cc b/pc/video_rtp_track_source_unittest.cc index dd527bf59b..ea1b4cacf8 100644 --- a/pc/video_rtp_track_source_unittest.cc +++ b/pc/video_rtp_track_source_unittest.cc @@ -19,13 +19,13 @@ namespace { class MockCallback : public VideoRtpTrackSource::Callback { public: - MOCK_METHOD0(OnGenerateKeyFrame, void()); - MOCK_METHOD1(OnEncodedSinkEnabled, void(bool)); + MOCK_METHOD(void, OnGenerateKeyFrame, (), (override)); + MOCK_METHOD(void, OnEncodedSinkEnabled, (bool), (override)); }; class MockSink : public rtc::VideoSinkInterface { public: - MOCK_METHOD1(OnFrame, void(const RecordableEncodedFrame&)); + MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override)); }; rtc::scoped_refptr MakeSource( @@ -118,7 +118,7 @@ class TestFrame : public RecordableEncodedFrame { EncodedResolution resolution() const override { return EncodedResolution{0, 0}; } - Timestamp render_time() const override { return Timestamp::ms(0); } + Timestamp render_time() const override { return Timestamp::Millis(0); } }; TEST(VideoRtpTrackSourceTest, BroadcastsFrames) { diff --git a/pc/video_track.h b/pc/video_track.h index 90e0758a6c..b7835dee29 100644 --- a/pc/video_track.h +++ b/pc/video_track.h @@ -14,12 +14,12 @@ #include #include "api/media_stream_interface.h" +#include "api/media_stream_track.h" #include "api/scoped_refptr.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/video_source_base.h" -#include "pc/media_stream_track.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" diff --git a/pc/video_track_source.h b/pc/video_track_source.h index ad0fe09619..27331eac4f 100644 --- a/pc/video_track_source.h +++ b/pc/video_track_source.h @@ -41,6 +41,13 @@ class RTC_EXPORT VideoTrackSource : public Notifier { const rtc::VideoSinkWants& wants) override; void RemoveSink(rtc::VideoSinkInterface* sink) override; + bool SupportsEncodedOutput() const override { return false; } + void GenerateKeyFrame() override {} + void AddEncodedSink( + rtc::VideoSinkInterface* sink) override {} + void RemoveEncodedSink( + rtc::VideoSinkInterface* sink) override {} + protected: virtual rtc::VideoSourceInterface* source() = 0; diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc index 575f3391a9..b599082304 100644 --- a/pc/webrtc_sdp.cc +++ b/pc/webrtc_sdp.cc @@ -55,9 +55,11 @@ using cricket::ContentInfo; using cricket::CryptoParams; using cricket::ICE_CANDIDATE_COMPONENT_RTCP; using cricket::ICE_CANDIDATE_COMPONENT_RTP; +using cricket::kApplicationSpecificBandwidth; using cricket::kCodecParamMaxPTime; using cricket::kCodecParamMinPTime; using cricket::kCodecParamPTime; +using cricket::kTransportSpecificBandwidth; using cricket::MediaContentDescription; using cricket::MediaProtocolType; using cricket::MediaType; @@ -73,6 +75,7 @@ using cricket::StreamParams; using cricket::StreamParamsVec; using cricket::TransportDescription; using cricket::TransportInfo; +using cricket::UnsupportedContentDescription; using cricket::VideoContentDescription; using rtc::SocketAddress; @@ -170,6 +173,7 @@ static const char kAttributePacketization[] = "packetization"; // Experimental flags static const char kAttributeXGoogleFlag[] = "x-google-flag"; static const char kValueConference[] = "conference"; +static const char kAttributeQuality[] = "quality"; static const char kAttributeRtcpRemoteEstimate[] = "remote-net-estimate"; @@ -217,24 +221,17 @@ static const char kMediaTypeVideo[] = "video"; static const char kMediaTypeAudio[] = "audio"; static const char kMediaTypeData[] = "application"; static const char kMediaPortRejected[] = "0"; -// draft-ietf-mmusic-trickle-ice-01 +static const char kMediaAttributeQualityHigh[] = "10"; + // draft-ietf-mmusic-trickle-ice-01 // When no candidates have been gathered, set the connection // address to IP6 ::. // TODO(perkj): FF can not parse IP6 ::. See http://crbug/430333 // Use IPV4 per default. static const char kDummyAddress[] = "0.0.0.0"; static const char kDummyPort[] = "9"; -// RFC 3556 -static const char kApplicationSpecificMaximum[] = "AS"; static const char kDefaultSctpmapProtocol[] = "webrtc-datachannel"; -// This is a non-standardized setting for plugin transports. -static const char kOpaqueTransportParametersLine[] = "x-opaque"; - -// This is a non-standardized setting for plugin transports. -static const char kAltProtocolLine[] = "x-alt-protocol"; - // RTP payload type is in the 0-127 range. Use -1 to indicate "all" payload // types. const int kWildcardPayloadType = -1; @@ -282,9 +279,6 @@ static bool ParseSessionDescription(const std::string& message, rtc::SocketAddress* connection_addr, cricket::SessionDescription* desc, SdpParseError* error); -static bool ParseGroupAttribute(const std::string& line, - cricket::SessionDescription* desc, - SdpParseError* error); static bool ParseMediaDescription( const std::string& message, const TransportDescription& session_td, @@ -308,6 +302,9 @@ static bool ParseContent( TransportDescription* transport, std::vector>* candidates, SdpParseError* error); +static bool ParseGroupAttribute(const std::string& line, + cricket::SessionDescription* desc, + SdpParseError* error); static bool ParseSsrcAttribute(const std::string& line, SsrcInfoVec* ssrc_infos, int* msid_signaling, @@ -523,25 +520,6 @@ static void InitAttrLine(const std::string& attribute, rtc::StringBuilder* os) { InitLine(kLineTypeAttributes, attribute, os); } -// Adds an x-otp SDP attribute line based on opaque transport parameters. -static void AddOpaqueTransportLine( - const cricket::OpaqueTransportParameters params, - std::string* message) { - rtc::StringBuilder os; - InitAttrLine(kOpaqueTransportParametersLine, &os); - os << kSdpDelimiterColon << params.protocol << kSdpDelimiterColon - << rtc::Base64::Encode(params.parameters); - AddLine(os.str(), message); -} - -static void AddAltProtocolLine(const std::string& protocol, - std::string* message) { - rtc::StringBuilder os; - InitAttrLine(kAltProtocolLine, &os); - os << kSdpDelimiterColon << protocol; - AddLine(os.str(), message); -} - // Writes a SDP attribute line based on |attribute| and |value| to |message|. static void AddAttributeLine(const std::string& attribute, int value, @@ -686,10 +664,12 @@ void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos, int msid_signaling) { RTC_DCHECK(tracks != NULL); for (const SsrcInfo& ssrc_info : ssrc_infos) { + // According to https://tools.ietf.org/html/rfc5576#section-6.1, the CNAME + // attribute is mandatory, but we relax that restriction. if (ssrc_info.cname.empty()) { - continue; + RTC_LOG(LS_WARNING) << "CNAME attribute missing for SSRC " + << ssrc_info.ssrc_id; } - std::vector stream_ids; std::string track_id; if (msid_signaling & cricket::kMsidSignalingMediaSection) { @@ -1109,11 +1089,14 @@ bool ParseCandidate(const std::string& message, if (!StringToProto(transport.c_str(), &protocol)) { return ParseFailed(first_line, "Unsupported transport type.", error); } + bool tcp_protocol = false; switch (protocol) { + // Supported protocols. case cricket::PROTO_UDP: + break; case cricket::PROTO_TCP: case cricket::PROTO_SSLTCP: - // Supported protocol. + tcp_protocol = true; break; default: return ParseFailed(first_line, "Unsupported transport type.", error); @@ -1170,9 +1153,14 @@ bool ParseCandidate(const std::string& message, return ParseFailed(first_line, "Invalid TCP candidate type.", error); } - if (protocol != cricket::PROTO_TCP) { + if (!tcp_protocol) { return ParseFailed(first_line, "Invalid non-TCP candidate", error); } + } else if (tcp_protocol) { + // We allow the tcptype to be missing, for backwards compatibility, + // treating it as a passive candidate. + // TODO(bugs.webrtc.org/11466): Treat a missing tcptype as an error? + tcptype = cricket::TCPTYPE_PASSIVE_STR; } // Extension @@ -1361,30 +1349,24 @@ void BuildMediaDescription(const ContentInfo* content_info, // RFC 4566 // m= // fmt is a list of payload type numbers that MAY be used in the session. - const char* type = NULL; - if (media_type == cricket::MEDIA_TYPE_AUDIO) - type = kMediaTypeAudio; - else if (media_type == cricket::MEDIA_TYPE_VIDEO) - type = kMediaTypeVideo; - else if (media_type == cricket::MEDIA_TYPE_DATA) - type = kMediaTypeData; - else - RTC_NOTREACHED(); - + std::string type; std::string fmt; if (media_type == cricket::MEDIA_TYPE_VIDEO) { + type = kMediaTypeVideo; const VideoContentDescription* video_desc = media_desc->as_video(); for (const cricket::VideoCodec& codec : video_desc->codecs()) { fmt.append(" "); fmt.append(rtc::ToString(codec.id)); } } else if (media_type == cricket::MEDIA_TYPE_AUDIO) { + type = kMediaTypeAudio; const AudioContentDescription* audio_desc = media_desc->as_audio(); for (const cricket::AudioCodec& codec : audio_desc->codecs()) { fmt.append(" "); fmt.append(rtc::ToString(codec.id)); } } else if (media_type == cricket::MEDIA_TYPE_DATA) { + type = kMediaTypeData; const cricket::SctpDataContentDescription* sctp_data_desc = media_desc->as_sctp(); if (sctp_data_desc) { @@ -1403,6 +1385,12 @@ void BuildMediaDescription(const ContentInfo* content_info, fmt.append(rtc::ToString(codec.id)); } } + } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + const UnsupportedContentDescription* unsupported_desc = + media_desc->as_unsupported(); + type = unsupported_desc->media_type(); + } else { + RTC_NOTREACHED(); } // The fmt must never be empty. If no codecs are found, set the fmt attribute // to 0. @@ -1451,10 +1439,18 @@ void BuildMediaDescription(const ContentInfo* content_info, AddLine(os.str(), message); // RFC 4566 - // b=AS: - if (media_desc->bandwidth() >= 1000) { - InitLine(kLineTypeSessionBandwidth, kApplicationSpecificMaximum, &os); - os << kSdpDelimiterColon << (media_desc->bandwidth() / 1000); + // b=AS: or + // b=TIAS: + int bandwidth = media_desc->bandwidth(); + std::string bandwidth_type = media_desc->bandwidth_type(); + if (bandwidth_type == kApplicationSpecificBandwidth && bandwidth >= 1000) { + InitLine(kLineTypeSessionBandwidth, bandwidth_type, &os); + bandwidth /= 1000; + os << kSdpDelimiterColon << bandwidth; + AddLine(os.str(), message); + } else if (bandwidth_type == kTransportSpecificBandwidth && bandwidth > 0) { + InitLine(kLineTypeSessionBandwidth, bandwidth_type, &os); + os << kSdpDelimiterColon << bandwidth; AddLine(os.str(), message); } @@ -1522,15 +1518,6 @@ void BuildMediaDescription(const ContentInfo* content_info, AddLine(os.str(), message); } } - - if (transport_info->description.opaque_parameters) { - AddOpaqueTransportLine(*transport_info->description.opaque_parameters, - message); - } - } - - if (media_desc->alt_protocol()) { - AddAltProtocolLine(*media_desc->alt_protocol(), message); } // RFC 3388 @@ -1584,6 +1571,8 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, // RFC 3264 // a=sendrecv || a=sendonly || a=sendrecv || a=inactive switch (media_desc->direction()) { + // Special case that for sdp purposes should be treated same as inactive. + case RtpTransceiverDirection::kStopped: case RtpTransceiverDirection::kInactive: InitAttrLine(kAttributeInactive, &os); break; @@ -1594,7 +1583,10 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, InitAttrLine(kAttributeRecvOnly, &os); break; case RtpTransceiverDirection::kSendRecv: + InitAttrLine(kAttributeSendRecv, &os); + break; default: + RTC_NOTREACHED(); InitAttrLine(kAttributeSendRecv, &os); break; } @@ -1781,25 +1773,12 @@ void WriteRtcpFbHeader(int payload_type, rtc::StringBuilder* os) { void WriteFmtpParameter(const std::string& parameter_name, const std::string& parameter_value, rtc::StringBuilder* os) { - // fmtp parameters: |parameter_name|=|parameter_value| - *os << parameter_name << kSdpDelimiterEqual << parameter_value; -} - -void WriteFmtpParameters(const cricket::CodecParameterMap& parameters, - rtc::StringBuilder* os) { - bool first = true; - for (const auto& entry : parameters) { - const std::string& key = entry.first; - const std::string& value = entry.second; - // Parameters are a semicolon-separated list, no spaces. - // The list is separated from the header by a space. - if (first) { - *os << kSdpDelimiterSpace; - first = false; - } else { - *os << kSdpDelimiterSemicolon; - } - WriteFmtpParameter(key, value, os); + if (parameter_name == "") { + // RFC 2198 and RFC 4733 don't use key-value pairs. + *os << parameter_value; + } else { + // fmtp parameters: |parameter_name|=|parameter_value| + *os << parameter_name << kSdpDelimiterEqual << parameter_value; } } @@ -1811,31 +1790,35 @@ bool IsFmtpParam(const std::string& name) { return name != kCodecParamPTime && name != kCodecParamMaxPTime; } -// Retreives fmtp parameters from |params|, which may contain other parameters -// as well, and puts them in |fmtp_parameters|. -void GetFmtpParams(const cricket::CodecParameterMap& params, - cricket::CodecParameterMap* fmtp_parameters) { - for (const auto& entry : params) { +bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters, + rtc::StringBuilder* os) { + bool empty = true; + const char* delimiter = ""; // No delimiter before first parameter. + for (const auto& entry : parameters) { const std::string& key = entry.first; const std::string& value = entry.second; + if (IsFmtpParam(key)) { - (*fmtp_parameters)[key] = value; + *os << delimiter; + // A semicolon before each subsequent parameter. + delimiter = kSdpDelimiterSemicolon; + WriteFmtpParameter(key, value, os); + empty = false; } } + + return !empty; } template void AddFmtpLine(const T& codec, std::string* message) { - cricket::CodecParameterMap fmtp_parameters; - GetFmtpParams(codec.params, &fmtp_parameters); - if (fmtp_parameters.empty()) { - // No need to add an fmtp if it will have no (optional) parameters. - return; - } rtc::StringBuilder os; WriteFmtpHeader(codec.id, &os); - WriteFmtpParameters(fmtp_parameters, &os); - AddLine(os.str(), message); + os << kSdpDelimiterSpace; + // Create FMTP line and check that it's nonempty. + if (WriteFmtpParameters(codec.params, &os)) { + AddLine(os.str(), message); + } return; } @@ -2014,7 +1997,11 @@ void BuildCandidate(const std::vector& candidates, << candidate.related_address().PortAsString() << " "; } - if (candidate.protocol() == cricket::TCP_PROTOCOL_NAME) { + // Note that we allow the tcptype to be missing, for backwards + // compatibility; the implementation treats this as a passive candidate. + // TODO(bugs.webrtc.org/11466): Treat a missing tcptype as an error? + if (candidate.protocol() == cricket::TCP_PROTOCOL_NAME && + !candidate.tcptype().empty()) { os << kTcpCandidateType << " " << candidate.tcptype() << " "; } @@ -2100,32 +2087,6 @@ bool ParseConnectionData(const std::string& line, return true; } -bool ParseOpaqueTransportLine(const std::string& line, - std::string* protocol, - std::string* transport_parameters, - SdpParseError* error) { - std::string value; - if (!GetValue(line, kOpaqueTransportParametersLine, &value, error)) { - return false; - } - std::string tmp_parameters; - if (!rtc::tokenize_first(value, kSdpDelimiterColonChar, protocol, - &tmp_parameters)) { - return ParseFailedGetValue(line, kOpaqueTransportParametersLine, error); - } - if (!rtc::Base64::Decode(tmp_parameters, rtc::Base64::DO_STRICT, - transport_parameters, nullptr)) { - return ParseFailedGetValue(line, kOpaqueTransportParametersLine, error); - } - return true; -} - -bool ParseAltProtocolLine(const std::string& line, - std::string* protocol, - SdpParseError* error) { - return GetValue(line, kAltProtocolLine, protocol, error); -} - bool ParseSessionDescription(const std::string& message, size_t* pos, std::string* session_id, @@ -2680,18 +2641,12 @@ bool ParseMediaDescription( if (!rtc::FromString(fields[1], &port) || !IsValidPort(port)) { return ParseFailed(line, "The port number is invalid", error); } - std::string protocol = fields[2]; + const std::string& protocol = fields[2]; // std::vector payload_types; if (cricket::IsRtpProtocol(protocol)) { for (size_t j = 3; j < fields.size(); ++j) { - // TODO(wu): Remove when below bug is fixed. - // https://bugzilla.mozilla.org/show_bug.cgi?id=996329 - if (fields[j].empty() && j == fields.size() - 1) { - continue; - } - int pl = 0; if (!GetPayloadTypeFromString(line, fields[j], &pl, error)) { return false; @@ -2711,17 +2666,18 @@ bool ParseMediaDescription( std::string content_name; bool bundle_only = false; int section_msid_signaling = 0; - if (HasAttribute(line, kMediaTypeVideo)) { + const std::string& media_type = fields[0]; + if (media_type == kMediaTypeVideo) { content = ParseContentDescription( message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol, payload_types, pos, &content_name, &bundle_only, §ion_msid_signaling, &transport, candidates, error); - } else if (HasAttribute(line, kMediaTypeAudio)) { + } else if (media_type == kMediaTypeAudio) { content = ParseContentDescription( message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol, payload_types, pos, &content_name, &bundle_only, §ion_msid_signaling, &transport, candidates, error); - } else if (HasAttribute(line, kMediaTypeData)) { + } else if (media_type == kMediaTypeData) { if (cricket::IsDtlsSctp(protocol)) { // The draft-03 format is: // m=application DTLS/SCTP ... @@ -2758,7 +2714,17 @@ bool ParseMediaDescription( } } else { RTC_LOG(LS_WARNING) << "Unsupported media type: " << line; - continue; + auto unsupported_desc = + std::make_unique(media_type); + if (!ParseContent(message, cricket::MEDIA_TYPE_UNSUPPORTED, mline_index, + protocol, payload_types, pos, &content_name, + &bundle_only, §ion_msid_signaling, + unsupported_desc.get(), &transport, candidates, + error)) { + return false; + } + unsupported_desc->set_protocol(protocol); + content = std::move(unsupported_desc); } if (!content.get()) { // ParseContentDescription returns NULL if failed. @@ -2786,7 +2752,9 @@ bool ParseMediaDescription( content_rejected = port_rejected; } - if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { + if (content->as_unsupported()) { + content_rejected = true; + } else if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { content->set_protocol(protocol); // Set the extmap. if (!session_extmaps.empty() && @@ -3038,46 +3006,61 @@ bool ParseContent(const std::string& message, // b=* (zero or more bandwidth information lines) if (IsLineType(line, kLineTypeSessionBandwidth)) { std::string bandwidth; - if (HasAttribute(line, kApplicationSpecificMaximum)) { - if (!GetValue(line, kApplicationSpecificMaximum, &bandwidth, error)) { - return false; - } else { - int b = 0; - if (!GetValueFromString(line, bandwidth, &b, error)) { - return false; - } - // TODO(deadbeef): Historically, applications may be setting a value - // of -1 to mean "unset any previously set bandwidth limit", even - // though ommitting the "b=AS" entirely will do just that. Once we've - // transitioned applications to doing the right thing, it would be - // better to treat this as a hard error instead of just ignoring it. - if (b == -1) { - RTC_LOG(LS_WARNING) - << "Ignoring \"b=AS:-1\"; will be treated as \"no " - "bandwidth limit\"."; - continue; - } - if (b < 0) { - return ParseFailed(line, "b=AS value can't be negative.", error); - } - // We should never use more than the default bandwidth for RTP-based - // data channels. Don't allow SDP to set the bandwidth, because - // that would give JS the opportunity to "break the Internet". - // See: https://code.google.com/p/chromium/issues/detail?id=280726 - if (media_type == cricket::MEDIA_TYPE_DATA && - cricket::IsRtpProtocol(protocol) && - b > cricket::kDataMaxBandwidth / 1000) { - rtc::StringBuilder description; - description << "RTP-based data channels may not send more than " - << cricket::kDataMaxBandwidth / 1000 << "kbps."; - return ParseFailed(line, description.str(), error); - } - // Prevent integer overflow. - b = std::min(b, INT_MAX / 1000); - media_desc->set_bandwidth(b * 1000); - } + std::string bandwidth_type; + if (!rtc::tokenize_first(line.substr(kLinePrefixLength), + kSdpDelimiterColonChar, &bandwidth_type, + &bandwidth)) { + return ParseFailed( + line, + "b= syntax error, does not match b=:.", + error); } - continue; + if (!(bandwidth_type == kApplicationSpecificBandwidth || + bandwidth_type == kTransportSpecificBandwidth)) { + // Ignore unknown bandwidth types. + continue; + } + int b = 0; + if (!GetValueFromString(line, bandwidth, &b, error)) { + return false; + } + // TODO(deadbeef): Historically, applications may be setting a value + // of -1 to mean "unset any previously set bandwidth limit", even + // though ommitting the "b=AS" entirely will do just that. Once we've + // transitioned applications to doing the right thing, it would be + // better to treat this as a hard error instead of just ignoring it. + if (bandwidth_type == kApplicationSpecificBandwidth && b == -1) { + RTC_LOG(LS_WARNING) << "Ignoring \"b=AS:-1\"; will be treated as \"no " + "bandwidth limit\"."; + continue; + } + if (b < 0) { + return ParseFailed( + line, "b=" + bandwidth_type + " value can't be negative.", error); + } + // We should never use more than the default bandwidth for RTP-based + // data channels. Don't allow SDP to set the bandwidth, because + // that would give JS the opportunity to "break the Internet". + // See: https://code.google.com/p/chromium/issues/detail?id=280726 + // Disallow TIAS since it shouldn't be generated for RTP data channels in + // the first place and provides another way to get around the limitation. + if (media_type == cricket::MEDIA_TYPE_DATA && + cricket::IsRtpProtocol(protocol) && + (b > cricket::kRtpDataMaxBandwidth / 1000 || + bandwidth_type == kTransportSpecificBandwidth)) { + rtc::StringBuilder description; + description << "RTP-based data channels may not send more than " + << cricket::kRtpDataMaxBandwidth / 1000 << "kbps."; + return ParseFailed(line, description.str(), error); + } + // Convert values. Prevent integer overflow. + if (bandwidth_type == kApplicationSpecificBandwidth) { + b = std::min(b, INT_MAX / 1000) * 1000; + } else { + b = std::min(b, INT_MAX); + } + media_desc->set_bandwidth(b); + media_desc->set_bandwidth_type(bandwidth_type); } // Parse the media level connection data. @@ -3132,19 +3115,15 @@ bool ParseContent(const std::string& message, if (!ParseIceOptions(line, &transport->transport_options, error)) { return false; } - } else if (HasAttribute(line, kOpaqueTransportParametersLine)) { - transport->opaque_parameters = cricket::OpaqueTransportParameters(); - if (!ParseOpaqueTransportLine( - line, &transport->opaque_parameters->protocol, - &transport->opaque_parameters->parameters, error)) { + } else if (HasAttribute(line, kAttributeQuality)) { + // RFC 4655: a=quality:xx + std::string quality(""); + if (!GetValue(line, kAttributeQuality, &quality, error)) { return false; } - } else if (HasAttribute(line, kAltProtocolLine)) { - std::string alt_protocol; - if (!ParseAltProtocolLine(line, &alt_protocol, error)) { - return false; + if (quality == kMediaAttributeQualityHigh) { + media_desc->set_quality(10); } - media_desc->set_alt_protocol(alt_protocol); } else if (HasAttribute(line, kAttributeFmtp)) { if (!ParseFmtpAttributes(line, media_type, media_desc, error)) { return false; @@ -3676,8 +3655,10 @@ bool ParseFmtpParam(const std::string& line, std::string* value, SdpParseError* error) { if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, parameter, value)) { - ParseFailed(line, "Unable to parse fmtp parameter. \'=\' missing.", error); - return false; + // Support for non-key-value lines like RFC 2198 or RFC 4733. + *parameter = ""; + *value = line; + return true; } // a=fmtp: =; =; ... return true; @@ -3695,7 +3676,7 @@ bool ParseFmtpAttributes(const std::string& line, std::string line_payload; std::string line_params; - // RFC 5576 + // https://tools.ietf.org/html/rfc4566#section-6 // a=fmtp: // At least two fields, whereas the second one is any of the optional // parameters. @@ -3724,17 +3705,15 @@ bool ParseFmtpAttributes(const std::string& line, cricket::CodecParameterMap codec_params; for (auto& iter : fields) { - if (iter.find(kSdpDelimiterEqual) == std::string::npos) { - // Only fmtps with equals are currently supported. Other fmtp types - // should be ignored. Unknown fmtps do not constitute an error. - continue; - } - std::string name; std::string value; if (!ParseFmtpParam(rtc::string_trim(iter), &name, &value, error)) { return false; } + if (codec_params.find(name) != codec_params.end()) { + RTC_LOG(LS_INFO) << "Overwriting duplicate fmtp parameter with key \"" + << name << "\"."; + } codec_params[name] = value; } diff --git a/pc/webrtc_sdp.h b/pc/webrtc_sdp.h index 94008a067f..588e02f139 100644 --- a/pc/webrtc_sdp.h +++ b/pc/webrtc_sdp.h @@ -22,12 +22,17 @@ #include +#include "media/base/codec.h" #include "rtc_base/system/rtc_export.h" namespace cricket { class Candidate; } // namespace cricket +namespace rtc { +class StringBuilder; +} // namespace rtc + namespace webrtc { class IceCandidateInterface; class JsepIceCandidate; @@ -95,6 +100,13 @@ RTC_EXPORT bool ParseCandidate(const std::string& message, SdpParseError* error, bool is_raw); +// Generates an FMTP line based on |parameters|. Please note that some +// parameters are not considered to be part of the FMTP line, see the function +// IsFmtpParam(). Returns true if the set of FMTP parameters is nonempty, false +// otherwise. +bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters, + rtc::StringBuilder* os); + } // namespace webrtc #endif // PC_WEBRTC_SDP_H_ diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc index 5bb4ffcd5a..cf5384725b 100644 --- a/pc/webrtc_sdp_unittest.cc +++ b/pc/webrtc_sdp_unittest.cc @@ -979,7 +979,11 @@ static void ReplaceDirection(RtpTransceiverDirection direction, new_direction = "a=recvonly"; break; case RtpTransceiverDirection::kSendRecv: + new_direction = "a=sendrecv"; + break; + case RtpTransceiverDirection::kStopped: default: + RTC_NOTREACHED(); new_direction = "a=sendrecv"; break; } @@ -1191,8 +1195,8 @@ class WebRtcSdpTest : public ::testing::Test { // Turns the existing reference description into a plan B description, // with 2 audio tracks and 3 video tracks. void MakePlanBDescription() { - audio_desc_ = audio_desc_->Copy(); - video_desc_ = video_desc_->Copy(); + audio_desc_ = new AudioContentDescription(*audio_desc_); + video_desc_ = new VideoContentDescription(*video_desc_); StreamParams audio_track_2; audio_track_2.id = kAudioTrackId2; @@ -1289,8 +1293,7 @@ class WebRtcSdpTest : public ::testing::Test { "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32", "dummy_session_params")); audio->set_protocol(cricket::kMediaProtocolSavpf); - AudioCodec opus(111, "opus", 48000, 0, 2); - audio->AddCodec(opus); + audio->AddCodec(AudioCodec(111, "opus", 48000, 0, 2)); audio->AddCodec(AudioCodec(103, "ISAC", 16000, 0, 1)); audio->AddCodec(AudioCodec(104, "ISAC", 32000, 0, 1)); return audio; @@ -1524,8 +1527,6 @@ class WebRtcSdpTest : public ::testing::Test { CompareSimulcastDescription( c1.media_description()->simulcast_description(), c2.media_description()->simulcast_description()); - EXPECT_EQ(c1.media_description()->alt_protocol(), - c2.media_description()->alt_protocol()); } // group @@ -1580,8 +1581,6 @@ class WebRtcSdpTest : public ::testing::Test { } EXPECT_EQ(transport1.description.transport_options, transport2.description.transport_options); - EXPECT_EQ(transport1.description.opaque_parameters, - transport2.description.opaque_parameters); } // global attributes @@ -1675,23 +1674,6 @@ class WebRtcSdpTest : public ::testing::Test { desc_.AddTransportInfo(transport_info); } - void AddOpaqueTransportParameters(const std::string& content_name, - cricket::OpaqueTransportParameters params) { - ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL); - cricket::TransportInfo info = *(desc_.GetTransportInfoByName(content_name)); - desc_.RemoveTransportInfoByName(content_name); - info.description.opaque_parameters = params; - desc_.AddTransportInfo(info); - } - - void AddAltProtocol(const std::string& content_name, - const std::string& alt_protocol) { - ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL); - cricket::MediaContentDescription* description = - desc_.GetContentDescriptionByName(content_name); - description->set_alt_protocol(alt_protocol); - } - void AddFingerprint() { desc_.RemoveTransportInfoByName(kAudioContentName); desc_.RemoveTransportInfoByName(kVideoContentName); @@ -1709,8 +1691,8 @@ class WebRtcSdpTest : public ::testing::Test { } void AddExtmap(bool encrypted) { - audio_desc_ = audio_desc_->Copy(); - video_desc_ = video_desc_->Copy(); + audio_desc_ = new AudioContentDescription(*audio_desc_); + video_desc_ = new VideoContentDescription(*video_desc_); audio_desc_->AddRtpHeaderExtension( RtpExtension(kExtmapUri, kExtmapId, encrypted)); video_desc_->AddRtpHeaderExtension( @@ -1790,8 +1772,8 @@ class WebRtcSdpTest : public ::testing::Test { } bool TestSerializeRejected(bool audio_rejected, bool video_rejected) { - audio_desc_ = audio_desc_->Copy(); - video_desc_ = video_desc_->Copy(); + audio_desc_ = new AudioContentDescription(*audio_desc_); + video_desc_ = new VideoContentDescription(*video_desc_); desc_.RemoveContentByName(kAudioContentName); desc_.RemoveContentByName(kVideoContentName); @@ -1872,8 +1854,8 @@ class WebRtcSdpTest : public ::testing::Test { JsepSessionDescription new_jdesc(SdpType::kOffer); EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc)); - audio_desc_ = audio_desc_->Copy(); - video_desc_ = video_desc_->Copy(); + audio_desc_ = new AudioContentDescription(*audio_desc_); + video_desc_ = new VideoContentDescription(*video_desc_); desc_.RemoveContentByName(kAudioContentName); desc_.RemoveContentByName(kVideoContentName); desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_rejected, @@ -1951,13 +1933,14 @@ class WebRtcSdpTest : public ::testing::Test { // description. "a=msid-semantic: WMS\r\n" // Pl type 111 preferred. - "m=audio 9 RTP/SAVPF 111 104 103\r\n" + "m=audio 9 RTP/SAVPF 111 104 103 105\r\n" // Pltype 111 listed before 103 and 104 in the map. "a=rtpmap:111 opus/48000/2\r\n" // Pltype 103 listed before 104. "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" - "a=fmtp:111 0-15,66,70\r\n" + "a=rtpmap:105 telephone-event/8000\r\n" + "a=fmtp:105 0-15,66,70\r\n" "a=fmtp:111 "; std::ostringstream os; os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo @@ -2004,6 +1987,14 @@ class WebRtcSdpTest : public ::testing::Test { VerifyCodecParameter(codec.params, "maxptime", params.max_ptime); } + cricket::AudioCodec dtmf = acd->codecs()[3]; + EXPECT_EQ("telephone-event", dtmf.name); + EXPECT_EQ(105, dtmf.id); + EXPECT_EQ(3u, + dtmf.params.size()); // ptime and max_ptime count as parameters. + EXPECT_EQ(dtmf.params.begin()->first, ""); + EXPECT_EQ(dtmf.params.begin()->second, "0-15,66,70"); + const VideoContentDescription* vcd = GetFirstVideoContentDescription(jdesc_output->description()); ASSERT_TRUE(vcd); @@ -2198,16 +2189,31 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundle) { TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBandwidth) { VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); - vcd->set_bandwidth(100 * 1000); + vcd->set_bandwidth(100 * 1000 + 755); // Integer division will drop the 755. + vcd->set_bandwidth_type("AS"); AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); - acd->set_bandwidth(50 * 1000); + acd->set_bandwidth(555); + acd->set_bandwidth_type("TIAS"); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); std::string message = webrtc::SdpSerialize(jdesc_); std::string sdp_with_bandwidth = kSdpFullString; InjectAfter("c=IN IP4 74.125.224.39\r\n", "b=AS:100\r\n", &sdp_with_bandwidth); - InjectAfter("c=IN IP4 74.125.127.126\r\n", "b=AS:50\r\n", + InjectAfter("c=IN IP4 74.125.127.126\r\n", "b=TIAS:555\r\n", + &sdp_with_bandwidth); + EXPECT_EQ(sdp_with_bandwidth, message); +} + +// Should default to b=AS if bandwidth_type isn't set. +TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithMissingBandwidthType) { + VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + vcd->set_bandwidth(100 * 1000); + ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), + jdesc_.session_version())); + std::string message = webrtc::SdpSerialize(jdesc_); + std::string sdp_with_bandwidth = kSdpFullString; + InjectAfter("c=IN IP4 74.125.224.39\r\n", "b=AS:100\r\n", &sdp_with_bandwidth); EXPECT_EQ(sdp_with_bandwidth, message); } @@ -2232,41 +2238,6 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIceOptions) { EXPECT_EQ(sdp_with_ice_options, message); } -TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithOpaqueTransportParams) { - cricket::OpaqueTransportParameters params; - params.protocol = "foo"; - params.parameters = "test64"; - AddOpaqueTransportParameters(kAudioContentName, params); - AddOpaqueTransportParameters(kVideoContentName, params); - - ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), - jdesc_.session_version())); - std::string message = webrtc::SdpSerialize(jdesc_); - - std::string sdp_with_transport_parameters = kSdpFullString; - InjectAfter(kAttributeIcePwdVoice, "a=x-opaque:foo:dGVzdDY0\r\n", - &sdp_with_transport_parameters); - InjectAfter(kAttributeIcePwdVideo, "a=x-opaque:foo:dGVzdDY0\r\n", - &sdp_with_transport_parameters); - EXPECT_EQ(message, sdp_with_transport_parameters); -} - -TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAltProtocol) { - AddAltProtocol(kAudioContentName, "foo"); - AddAltProtocol(kVideoContentName, "bar"); - - ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), - jdesc_.session_version())); - std::string message = webrtc::SdpSerialize(jdesc_); - - std::string sdp_with_alt_protocol = kSdpFullString; - InjectAfter(kAttributeIcePwdVoice, "a=x-alt-protocol:foo\r\n", - &sdp_with_alt_protocol); - InjectAfter(kAttributeIcePwdVideo, "a=x-alt-protocol:bar\r\n", - &sdp_with_alt_protocol); - EXPECT_EQ(message, sdp_with_alt_protocol); -} - TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRecvOnlyContent) { EXPECT_TRUE(TestSerializeDirection(RtpTransceiverDirection::kRecvOnly)); } @@ -2353,6 +2324,7 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithDataChannelAndBandwidth) { JsepSessionDescription jsep_desc(kDummyType); AddRtpDataChannel(); data_desc_->set_bandwidth(100 * 1000); + data_desc_->set_bandwidth_type("AS"); MakeDescriptionWithoutCandidates(&jsep_desc); std::string message = webrtc::SdpSerialize(jsep_desc); @@ -2441,8 +2413,6 @@ TEST_F(WebRtcSdpTest, SerializeHostnameCandidate) { EXPECT_EQ(std::string(kRawHostnameCandidate), message); } -// TODO(mallinath) : Enable this test once WebRTCSdp capable of parsing -// RFC 6544. TEST_F(WebRtcSdpTest, SerializeTcpCandidates) { Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp", rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority, @@ -2456,6 +2426,32 @@ TEST_F(WebRtcSdpTest, SerializeTcpCandidates) { EXPECT_EQ(std::string(kSdpTcpActiveCandidate), message); } +// Test serializing a TCP candidate that came in with a missing tcptype. This +// shouldn't happen according to the spec, but our implementation has been +// accepting this for quite some time, treating it as a passive candidate. +// +// So, we should be able to at least convert such candidates to and from SDP. +// See: bugs.webrtc.org/11423 +TEST_F(WebRtcSdpTest, ParseTcpCandidateWithoutTcptype) { + std::string missing_tcptype = + "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9999 typ host"; + JsepIceCandidate jcandidate(kDummyMid, kDummyIndex); + EXPECT_TRUE(SdpDeserializeCandidate(missing_tcptype, &jcandidate)); + + EXPECT_EQ(std::string(cricket::TCPTYPE_PASSIVE_STR), + jcandidate.candidate().tcptype()); +} + +TEST_F(WebRtcSdpTest, ParseSslTcpCandidate) { + std::string ssltcp = + "candidate:a0+B/1 1 ssltcp 2130706432 192.168.1.5 9999 typ host tcptype " + "passive"; + JsepIceCandidate jcandidate(kDummyMid, kDummyIndex); + EXPECT_TRUE(SdpDeserializeCandidate(ssltcp, &jcandidate)); + + EXPECT_EQ(std::string("ssltcp"), jcandidate.candidate().protocol()); +} + TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithH264) { cricket::VideoCodec h264_codec("H264"); h264_codec.SetParam("profile-level-id", "42e01f"); @@ -2632,6 +2628,41 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBandwidth) { EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth)); } +TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithTiasBandwidth) { + JsepSessionDescription jdesc_with_bandwidth(kDummyType); + std::string sdp_with_bandwidth = kSdpFullString; + InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n", "b=TIAS:100000\r\n", + &sdp_with_bandwidth); + InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", "b=TIAS:50000\r\n", + &sdp_with_bandwidth); + EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth)); + VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + vcd->set_bandwidth(100 * 1000); + AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + acd->set_bandwidth(50 * 1000); + ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), + jdesc_.session_version())); + EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth)); +} + +TEST_F(WebRtcSdpTest, + DeserializeSessionDescriptionWithUnknownBandwidthModifier) { + JsepSessionDescription jdesc_with_bandwidth(kDummyType); + std::string sdp_with_bandwidth = kSdpFullString; + InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n", + "b=unknown:100000\r\n", &sdp_with_bandwidth); + InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", + "b=unknown:50000\r\n", &sdp_with_bandwidth); + EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth)); + VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + vcd->set_bandwidth(-1); + AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + acd->set_bandwidth(-1); + ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), + jdesc_.session_version())); + EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth)); +} + TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) { JsepSessionDescription jdesc_with_ice_options(kDummyType); std::string sdp_with_ice_options = kSdpFullString; @@ -2655,48 +2686,6 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) { EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ice_options)); } -TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithOpaqueTransportParams) { - std::string sdp_with_transport_parameters = kSdpFullString; - InjectAfter(kAttributeIcePwdVoice, "a=x-opaque:foo:dGVzdDY0\r\n", - &sdp_with_transport_parameters); - InjectAfter(kAttributeIcePwdVideo, "a=x-opaque:foo:dGVzdDY0\r\n", - &sdp_with_transport_parameters); - - JsepSessionDescription jdesc_with_transport_parameters(kDummyType); - EXPECT_TRUE(SdpDeserialize(sdp_with_transport_parameters, - &jdesc_with_transport_parameters)); - - cricket::OpaqueTransportParameters params; - params.protocol = "foo"; - params.parameters = "test64"; - - AddOpaqueTransportParameters(kAudioContentName, params); - AddOpaqueTransportParameters(kVideoContentName, params); - - ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), - jdesc_.session_version())); - EXPECT_TRUE( - CompareSessionDescription(jdesc_, jdesc_with_transport_parameters)); -} - -TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithAltProtocol) { - std::string sdp_with_alt_protocol = kSdpFullString; - InjectAfter(kAttributeIcePwdVoice, "a=x-alt-protocol:foo\r\n", - &sdp_with_alt_protocol); - InjectAfter(kAttributeIcePwdVideo, "a=x-alt-protocol:bar\r\n", - &sdp_with_alt_protocol); - - JsepSessionDescription jdesc_with_alt_protocol(kDummyType); - EXPECT_TRUE(SdpDeserialize(sdp_with_alt_protocol, &jdesc_with_alt_protocol)); - - AddAltProtocol(kAudioContentName, "foo"); - AddAltProtocol(kVideoContentName, "bar"); - - ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), - jdesc_.session_version())); - EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_alt_protocol)); -} - TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithUfragPwd) { // Remove the original ice-ufrag and ice-pwd JsepSessionDescription jdesc_with_ufrag_pwd(kDummyType); @@ -2987,6 +2976,25 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpColonPort) { EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output)); } +TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsButWrongMediaType) { + bool use_sctpmap = true; + AddSctpDataChannel(use_sctpmap); + JsepSessionDescription jdesc(kDummyType); + ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion)); + + std::string sdp = kSdpSessionString; + sdp += kSdpSctpDataChannelString; + + const char needle[] = "m=application "; + sdp.replace(sdp.find(needle), strlen(needle), "m=application:bogus "); + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + + EXPECT_EQ(1u, jdesc_output.description()->contents().size()); + EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected); +} + // Helper function to set the max-message-size parameter in the // SCTP data codec. void MutateJsepSctpMaxMessageSize(const SessionDescription& desc, @@ -3335,6 +3343,7 @@ TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) { // Broken media description ExpectParseFailure("m=audio", "c=IN IP4 74.125.224.39"); ExpectParseFailure("m=video", kSdpDestroyer); + ExpectParseFailure("m=", "c=IN IP4 74.125.224.39"); // Invalid lines ExpectParseFailure("a=candidate", kSdpEmptyType); @@ -3379,6 +3388,13 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithInvalidAttributeValue) { // bandwidth ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", "b=AS:badvalue\r\n", "b=AS:badvalue"); + ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", "b=AS\r\n", + "b=AS"); + ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", "b=AS:\r\n", + "b=AS:"); + ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", + "b=AS:12:34\r\n", "b=AS:12:34"); + // rtcp-fb ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", "a=rtcp-fb:badvalue nack\r\n", @@ -3643,6 +3659,28 @@ TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithPTimeAndMaxPTime) { EXPECT_EQ(sdp_with_fmtp, message); } +TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithTelephoneEvent) { + AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + + cricket::AudioCodecs codecs = acd->codecs(); + cricket::AudioCodec dtmf(105, "telephone-event", 8000, 0, 1); + dtmf.params[""] = "0-15"; + codecs.push_back(dtmf); + acd->set_codecs(codecs); + + ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), + jdesc_.session_version())); + std::string message = webrtc::SdpSerialize(jdesc_); + std::string sdp_with_fmtp = kSdpFullString; + InjectAfter("m=audio 2345 RTP/SAVPF 111 103 104", " 105", &sdp_with_fmtp); + InjectAfter( + "a=rtpmap:104 ISAC/32000\r\n", + "a=rtpmap:105 telephone-event/8000\r\n" // No comma here. String merging! + "a=fmtp:105 0-15\r\n", + &sdp_with_fmtp); + EXPECT_EQ(sdp_with_fmtp, message); +} + TEST_F(WebRtcSdpTest, SerializeVideoFmtp) { VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); @@ -4672,3 +4710,48 @@ TEST_F(WebRtcSdpTest, DeserializeWithAllSctpProtocols) { EXPECT_TRUE(webrtc::SdpDeserialize(message, &jsep_output, &error)); } } + +// According to https://tools.ietf.org/html/rfc5576#section-6.1, the CNAME +// attribute is mandatory, but we relax that restriction. +TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCname) { + std::string sdp_without_cname = kSdpFullString; + Replace("a=ssrc:1 cname:stream_1_cname\r\n", "", &sdp_without_cname); + JsepSessionDescription new_jdesc(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp_without_cname, &new_jdesc)); + + audio_desc_->mutable_streams()[0].cname = ""; + ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), + jdesc_.session_version())); + EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc)); +} + +TEST_F(WebRtcSdpTest, DeserializeSdpWithUnsupportedMediaType) { + std::string sdp = kSdpSessionString; + sdp += + "m=bogus 9 RTP/SAVPF 0 8\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=mid:bogusmid\r\n"; + sdp += + "m=audio/something 9 RTP/SAVPF 0 8\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=mid:somethingmid\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output)); + + ASSERT_EQ(2u, jdesc_output.description()->contents().size()); + ASSERT_NE(nullptr, jdesc_output.description() + ->contents()[0] + .media_description() + ->as_unsupported()); + ASSERT_NE(nullptr, jdesc_output.description() + ->contents()[1] + .media_description() + ->as_unsupported()); + + EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected); + EXPECT_TRUE(jdesc_output.description()->contents()[1].rejected); + + EXPECT_EQ(jdesc_output.description()->contents()[0].name, "bogusmid"); + EXPECT_EQ(jdesc_output.description()->contents()[1].name, "somethingmid"); +} diff --git a/pc/webrtc_session_description_factory.cc b/pc/webrtc_session_description_factory.cc index aaef7fdeb6..2a9dc3fbd8 100644 --- a/pc/webrtc_session_description_factory.cc +++ b/pc/webrtc_session_description_factory.cc @@ -11,9 +11,10 @@ #include "pc/webrtc_session_description_factory.h" #include - +#include #include #include +#include #include #include @@ -22,6 +23,7 @@ #include "api/jsep.h" #include "api/jsep_session_description.h" #include "api/rtc_error.h" +#include "pc/sdp_state_provider.h" #include "pc/session_description.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" @@ -125,11 +127,14 @@ void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( rtc::Thread* signaling_thread, cricket::ChannelManager* channel_manager, - PeerConnectionInternal* pc, + const SdpStateProvider* sdp_info, const std::string& session_id, + bool dtls_enabled, std::unique_ptr cert_generator, const rtc::scoped_refptr& certificate, - UniqueRandomIdGenerator* ssrc_generator) + UniqueRandomIdGenerator* ssrc_generator, + std::function&)> + on_certificate_ready) : signaling_thread_(signaling_thread), session_desc_factory_(channel_manager, &transport_desc_factory_, @@ -139,20 +144,21 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( // to just use a random number as session id and start version from // |kInitSessionVersion|. session_version_(kInitSessionVersion), - cert_generator_(std::move(cert_generator)), - pc_(pc), + cert_generator_(dtls_enabled ? std::move(cert_generator) : nullptr), + sdp_info_(sdp_info), session_id_(session_id), - certificate_request_state_(CERTIFICATE_NOT_NEEDED) { + certificate_request_state_(CERTIFICATE_NOT_NEEDED), + on_certificate_ready_(on_certificate_ready) { RTC_DCHECK(signaling_thread_); - RTC_DCHECK(!(cert_generator_ && certificate)); - bool dtls_enabled = cert_generator_ || certificate; - // SRTP-SDES is disabled if DTLS is on. - SetSdesPolicy(dtls_enabled ? cricket::SEC_DISABLED : cricket::SEC_REQUIRED); + if (!dtls_enabled) { + SetSdesPolicy(cricket::SEC_REQUIRED); RTC_LOG(LS_VERBOSE) << "DTLS-SRTP disabled."; return; } + // SRTP-SDES is disabled if DTLS is on. + SetSdesPolicy(cricket::SEC_DISABLED); if (certificate) { // Use |certificate|. certificate_request_state_ = CERTIFICATE_WAITING; @@ -252,13 +258,13 @@ void WebRtcSessionDescriptionFactory::CreateAnswer( PostCreateSessionDescriptionFailed(observer, error); return; } - if (!pc_->remote_description()) { + if (!sdp_info_->remote_description()) { error += " can't be called before SetRemoteDescription."; RTC_LOG(LS_ERROR) << error; PostCreateSessionDescriptionFailed(observer, error); return; } - if (pc_->remote_description()->GetType() != SdpType::kOffer) { + if (sdp_info_->remote_description()->GetType() != SdpType::kOffer) { error += " failed because remote_description is not an offer."; RTC_LOG(LS_ERROR) << error; PostCreateSessionDescriptionFailed(observer, error); @@ -325,12 +331,12 @@ void WebRtcSessionDescriptionFactory::OnMessage(rtc::Message* msg) { void WebRtcSessionDescriptionFactory::InternalCreateOffer( CreateSessionDescriptionRequest request) { - if (pc_->local_description()) { + if (sdp_info_->local_description()) { // If the needs-ice-restart flag is set as described by JSEP, we should // generate an offer with a new ufrag/password to trigger an ICE restart. for (cricket::MediaDescriptionOptions& options : request.options.media_description_options) { - if (pc_->NeedsIceRestart(options.mid)) { + if (sdp_info_->NeedsIceRestart(options.mid)) { options.transport_options.ice_restart = true; } } @@ -338,8 +344,8 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( std::unique_ptr desc = session_desc_factory_.CreateOffer( - request.options, pc_->local_description() - ? pc_->local_description()->description() + request.options, sdp_info_->local_description() + ? sdp_info_->local_description()->description() : nullptr); if (!desc) { PostCreateSessionDescriptionFailed(request.observer, @@ -360,11 +366,11 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( auto offer = std::make_unique( SdpType::kOffer, std::move(desc), session_id_, rtc::ToString(session_version_++)); - if (pc_->local_description()) { + if (sdp_info_->local_description()) { for (const cricket::MediaDescriptionOptions& options : request.options.media_description_options) { if (!options.transport_options.ice_restart) { - CopyCandidatesFromSessionDescription(pc_->local_description(), + CopyCandidatesFromSessionDescription(sdp_info_->local_description(), options.mid, offer.get()); } } @@ -374,31 +380,34 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( void WebRtcSessionDescriptionFactory::InternalCreateAnswer( CreateSessionDescriptionRequest request) { - if (pc_->remote_description()) { + if (sdp_info_->remote_description()) { for (cricket::MediaDescriptionOptions& options : request.options.media_description_options) { // According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1 // an answer should also contain new ICE ufrag and password if an offer // has been received with new ufrag and password. options.transport_options.ice_restart = - pc_->IceRestartPending(options.mid); - // We should pass the current SSL role to the transport description + sdp_info_->IceRestartPending(options.mid); + // We should pass the current DTLS role to the transport description // factory, if there is already an existing ongoing session. - rtc::SSLRole ssl_role; - if (pc_->GetSslRole(options.mid, &ssl_role)) { + absl::optional dtls_role = + sdp_info_->GetDtlsRole(options.mid); + if (dtls_role) { options.transport_options.prefer_passive_role = - (rtc::SSL_SERVER == ssl_role); + (rtc::SSL_SERVER == *dtls_role); } } } std::unique_ptr desc = session_desc_factory_.CreateAnswer( - pc_->remote_description() ? pc_->remote_description()->description() - : nullptr, + sdp_info_->remote_description() + ? sdp_info_->remote_description()->description() + : nullptr, request.options, - pc_->local_description() ? pc_->local_description()->description() - : nullptr); + sdp_info_->local_description() + ? sdp_info_->local_description()->description() + : nullptr); if (!desc) { PostCreateSessionDescriptionFailed(request.observer, "Failed to initialize the answer."); @@ -416,13 +425,13 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( auto answer = std::make_unique( SdpType::kAnswer, std::move(desc), session_id_, rtc::ToString(session_version_++)); - if (pc_->local_description()) { + if (sdp_info_->local_description()) { // Include all local ICE candidates in the SessionDescription unless // the remote peer has requested an ICE restart. for (const cricket::MediaDescriptionOptions& options : request.options.media_description_options) { if (!options.transport_options.ice_restart) { - CopyCandidatesFromSessionDescription(pc_->local_description(), + CopyCandidatesFromSessionDescription(sdp_info_->local_description(), options.mid, answer.get()); } } @@ -481,7 +490,8 @@ void WebRtcSessionDescriptionFactory::SetCertificate( RTC_LOG(LS_VERBOSE) << "Setting new certificate."; certificate_request_state_ = CERTIFICATE_SUCCEEDED; - SignalCertificateReady(certificate); + + on_certificate_ready_(certificate); transport_desc_factory_.set_certificate(certificate); transport_desc_factory_.set_secure(cricket::SEC_ENABLED); diff --git a/pc/webrtc_session_description_factory.h b/pc/webrtc_session_description_factory.h index f70b847b4e..9256045d6b 100644 --- a/pc/webrtc_session_description_factory.h +++ b/pc/webrtc_session_description_factory.h @@ -12,7 +12,6 @@ #define PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_ #include - #include #include #include @@ -22,22 +21,23 @@ #include "api/scoped_refptr.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" +#include "pc/channel_manager.h" #include "pc/media_session.h" -#include "pc/peer_connection_internal.h" +#include "pc/sdp_state_provider.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/message_handler.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_message.h" #include "rtc_base/unique_id_generator.h" namespace webrtc { // DTLS certificate request callback class. class WebRtcCertificateGeneratorCallback - : public rtc::RTCCertificateGeneratorCallback, - public sigslot::has_slots<> { + : public rtc::RTCCertificateGeneratorCallback { public: // |rtc::RTCCertificateGeneratorCallback| overrides. void OnSuccess( @@ -80,11 +80,14 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, WebRtcSessionDescriptionFactory( rtc::Thread* signaling_thread, cricket::ChannelManager* channel_manager, - PeerConnectionInternal* pc, + const SdpStateProvider* sdp_info, const std::string& session_id, + bool dtls_enabled, std::unique_ptr cert_generator, const rtc::scoped_refptr& certificate, - rtc::UniqueRandomIdGenerator* ssrc_generator); + rtc::UniqueRandomIdGenerator* ssrc_generator, + std::function&)> + on_certificate_ready); virtual ~WebRtcSessionDescriptionFactory(); static void CopyCandidatesFromSessionDescription( @@ -110,9 +113,6 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, session_desc_factory_.set_is_unified_plan(is_unified_plan); } - sigslot::signal1&> - SignalCertificateReady; - // For testing. bool waiting_for_certificate_for_testing() const { return certificate_request_state_ == CERTIFICATE_WAITING; @@ -151,12 +151,13 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, cricket::MediaSessionDescriptionFactory session_desc_factory_; uint64_t session_version_; const std::unique_ptr cert_generator_; - // TODO(jiayl): remove the dependency on peer connection once bug 2264 is - // fixed. - PeerConnectionInternal* const pc_; + const SdpStateProvider* sdp_info_; const std::string session_id_; CertificateRequestState certificate_request_state_; + std::function&)> + on_certificate_ready_; + RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory); }; } // namespace webrtc diff --git a/presubmit_test.py b/presubmit_test.py index 287071c1a3..bb93765f28 100755 --- a/presubmit_test.py +++ b/presubmit_test.py @@ -20,146 +20,145 @@ class CheckBugEntryFieldTest(unittest.TestCase): - def testCommitMessageBugEntryWithNoError(self): - mock_input_api = MockInputApi() - mock_output_api = MockOutputApi() - mock_input_api.change = MockChange([], ['webrtc:1234']) - errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, - mock_output_api) - self.assertEqual(0, len(errors)) - - def testCommitMessageBugEntryReturnError(self): - mock_input_api = MockInputApi() - mock_output_api = MockOutputApi() - mock_input_api.change = MockChange([], ['webrtc:1234', 'webrtc=4321']) - errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, - mock_output_api) - self.assertEqual(1, len(errors)) - self.assertEqual(('Bogus Bug entry: webrtc=4321. Please specify' - ' the issue tracker prefix and the issue number,' - ' separated by a colon, e.g. webrtc:123 or' - ' chromium:12345.'), str(errors[0])) - - def testCommitMessageBugEntryWithoutPrefix(self): - mock_input_api = MockInputApi() - mock_output_api = MockOutputApi() - mock_input_api.change = MockChange([], ['1234']) - errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, - mock_output_api) - self.assertEqual(1, len(errors)) - self.assertEqual(('Bug entry requires issue tracker prefix, ' - 'e.g. webrtc:1234'), str(errors[0])) - - def testCommitMessageBugEntryIsNone(self): - mock_input_api = MockInputApi() - mock_output_api = MockOutputApi() - mock_input_api.change = MockChange([], ['None']) - errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, - mock_output_api) - self.assertEqual(0, len(errors)) - - def testCommitMessageBugEntrySupportInternalBugReference(self): - mock_input_api = MockInputApi() - mock_output_api = MockOutputApi() - mock_input_api.change.BUG = 'b/12345' - errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, - mock_output_api) - self.assertEqual(0, len(errors)) - mock_input_api.change.BUG = 'b/12345, webrtc:1234' - errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, - mock_output_api) - self.assertEqual(0, len(errors)) + def testCommitMessageBugEntryWithNoError(self): + mock_input_api = MockInputApi() + mock_output_api = MockOutputApi() + mock_input_api.change = MockChange([], ['webrtc:1234']) + errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, + mock_output_api) + self.assertEqual(0, len(errors)) + + def testCommitMessageBugEntryReturnError(self): + mock_input_api = MockInputApi() + mock_output_api = MockOutputApi() + mock_input_api.change = MockChange([], ['webrtc:1234', 'webrtc=4321']) + errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, + mock_output_api) + self.assertEqual(1, len(errors)) + self.assertEqual(('Bogus Bug entry: webrtc=4321. Please specify' + ' the issue tracker prefix and the issue number,' + ' separated by a colon, e.g. webrtc:123 or' + ' chromium:12345.'), str(errors[0])) + + def testCommitMessageBugEntryWithoutPrefix(self): + mock_input_api = MockInputApi() + mock_output_api = MockOutputApi() + mock_input_api.change = MockChange([], ['1234']) + errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, + mock_output_api) + self.assertEqual(1, len(errors)) + self.assertEqual(('Bug entry requires issue tracker prefix, ' + 'e.g. webrtc:1234'), str(errors[0])) + + def testCommitMessageBugEntryIsNone(self): + mock_input_api = MockInputApi() + mock_output_api = MockOutputApi() + mock_input_api.change = MockChange([], ['None']) + errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, + mock_output_api) + self.assertEqual(0, len(errors)) + + def testCommitMessageBugEntrySupportInternalBugReference(self): + mock_input_api = MockInputApi() + mock_output_api = MockOutputApi() + mock_input_api.change.BUG = 'b/12345' + errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, + mock_output_api) + self.assertEqual(0, len(errors)) + mock_input_api.change.BUG = 'b/12345, webrtc:1234' + errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, + mock_output_api) + self.assertEqual(0, len(errors)) class CheckNewlineAtTheEndOfProtoFilesTest(unittest.TestCase): - - def setUp(self): - self.tmp_dir = tempfile.mkdtemp() - self.proto_file_path = os.path.join(self.tmp_dir, 'foo.proto') - self.input_api = MockInputApi() - self.output_api = MockOutputApi() - - def tearDown(self): - shutil.rmtree(self.tmp_dir, ignore_errors=True) - - def testErrorIfProtoFileDoesNotEndWithNewline(self): - self._GenerateProtoWithoutNewlineAtTheEnd() - self.input_api.files = [MockFile(self.proto_file_path)] - errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(self.input_api, - self.output_api, - lambda x: True) - self.assertEqual(1, len(errors)) - self.assertEqual( - 'File %s must end with exactly one newline.' % self.proto_file_path, - str(errors[0])) - - def testNoErrorIfProtoFileEndsWithNewline(self): - self._GenerateProtoWithNewlineAtTheEnd() - self.input_api.files = [MockFile(self.proto_file_path)] - errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(self.input_api, - self.output_api, - lambda x: True) - self.assertEqual(0, len(errors)) - - def _GenerateProtoWithNewlineAtTheEnd(self): - with open(self.proto_file_path, 'w') as f: - f.write(textwrap.dedent(""" + def setUp(self): + self.tmp_dir = tempfile.mkdtemp() + self.proto_file_path = os.path.join(self.tmp_dir, 'foo.proto') + self.input_api = MockInputApi() + self.output_api = MockOutputApi() + + def tearDown(self): + shutil.rmtree(self.tmp_dir, ignore_errors=True) + + def testErrorIfProtoFileDoesNotEndWithNewline(self): + self._GenerateProtoWithoutNewlineAtTheEnd() + self.input_api.files = [MockFile(self.proto_file_path)] + errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles( + self.input_api, self.output_api, lambda x: True) + self.assertEqual(1, len(errors)) + self.assertEqual( + 'File %s must end with exactly one newline.' % + self.proto_file_path, str(errors[0])) + + def testNoErrorIfProtoFileEndsWithNewline(self): + self._GenerateProtoWithNewlineAtTheEnd() + self.input_api.files = [MockFile(self.proto_file_path)] + errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles( + self.input_api, self.output_api, lambda x: True) + self.assertEqual(0, len(errors)) + + def _GenerateProtoWithNewlineAtTheEnd(self): + with open(self.proto_file_path, 'w') as f: + f.write( + textwrap.dedent(""" syntax = "proto2"; option optimize_for = LITE_RUNTIME; package webrtc.audioproc; """)) - def _GenerateProtoWithoutNewlineAtTheEnd(self): - with open(self.proto_file_path, 'w') as f: - f.write(textwrap.dedent(""" + def _GenerateProtoWithoutNewlineAtTheEnd(self): + with open(self.proto_file_path, 'w') as f: + f.write( + textwrap.dedent(""" syntax = "proto2"; option optimize_for = LITE_RUNTIME; package webrtc.audioproc;""")) class CheckNoMixingSourcesTest(unittest.TestCase): + def setUp(self): + self.tmp_dir = tempfile.mkdtemp() + self.file_path = os.path.join(self.tmp_dir, 'BUILD.gn') + self.input_api = MockInputApi() + self.output_api = MockOutputApi() - def setUp(self): - self.tmp_dir = tempfile.mkdtemp() - self.file_path = os.path.join(self.tmp_dir, 'BUILD.gn') - self.input_api = MockInputApi() - self.output_api = MockOutputApi() - - def tearDown(self): - shutil.rmtree(self.tmp_dir, ignore_errors=True) + def tearDown(self): + shutil.rmtree(self.tmp_dir, ignore_errors=True) - def testErrorIfCAndCppAreMixed(self): - self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.cc', 'bar.h']) + def testErrorIfCAndCppAreMixed(self): + self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.cc', 'bar.h']) - def testErrorIfCAndObjCAreMixed(self): - self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.m', 'bar.h']) + def testErrorIfCAndObjCAreMixed(self): + self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.m', 'bar.h']) - def testErrorIfCAndObjCppAreMixed(self): - self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.mm', 'bar.h']) + def testErrorIfCAndObjCppAreMixed(self): + self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.mm', 'bar.h']) - def testErrorIfCppAndObjCAreMixed(self): - self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.m', 'bar.h']) + def testErrorIfCppAndObjCAreMixed(self): + self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.m', 'bar.h']) - def testErrorIfCppAndObjCppAreMixed(self): - self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.mm', 'bar.h']) + def testErrorIfCppAndObjCppAreMixed(self): + self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.mm', 'bar.h']) - def testNoErrorIfOnlyC(self): - self._AssertNumberOfErrorsWithSources(0, ['foo.c', 'bar.c', 'bar.h']) + def testNoErrorIfOnlyC(self): + self._AssertNumberOfErrorsWithSources(0, ['foo.c', 'bar.c', 'bar.h']) - def testNoErrorIfOnlyCpp(self): - self._AssertNumberOfErrorsWithSources(0, ['foo.cc', 'bar.cc', 'bar.h']) + def testNoErrorIfOnlyCpp(self): + self._AssertNumberOfErrorsWithSources(0, ['foo.cc', 'bar.cc', 'bar.h']) - def testNoErrorIfOnlyObjC(self): - self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.m', 'bar.h']) + def testNoErrorIfOnlyObjC(self): + self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.m', 'bar.h']) - def testNoErrorIfOnlyObjCpp(self): - self._AssertNumberOfErrorsWithSources(0, ['foo.mm', 'bar.mm', 'bar.h']) + def testNoErrorIfOnlyObjCpp(self): + self._AssertNumberOfErrorsWithSources(0, ['foo.mm', 'bar.mm', 'bar.h']) - def testNoErrorIfObjCAndObjCppAreMixed(self): - self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.mm', 'bar.h']) + def testNoErrorIfObjCAndObjCppAreMixed(self): + self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.mm', 'bar.h']) - def testNoErrorIfSourcesAreInExclusiveIfBranches(self): - self._GenerateBuildFile(textwrap.dedent(""" + def testNoErrorIfSourcesAreInExclusiveIfBranches(self): + self._GenerateBuildFile( + textwrap.dedent(""" rtc_library("bar_foo") { if (is_win) { sources = [ @@ -185,14 +184,15 @@ def testNoErrorIfSourcesAreInExclusiveIfBranches(self): } } """)) - self.input_api.files = [MockFile(self.file_path)] - errors = PRESUBMIT.CheckNoMixingSources(self.input_api, - [MockFile(self.file_path)], - self.output_api) - self.assertEqual(0, len(errors)) - - def testErrorIfSourcesAreNotInExclusiveIfBranches(self): - self._GenerateBuildFile(textwrap.dedent(""" + self.input_api.files = [MockFile(self.file_path)] + errors = PRESUBMIT.CheckNoMixingSources(self.input_api, + [MockFile(self.file_path)], + self.output_api) + self.assertEqual(0, len(errors)) + + def testErrorIfSourcesAreNotInExclusiveIfBranches(self): + self._GenerateBuildFile( + textwrap.dedent(""" rtc_library("bar_foo") { if (is_win) { sources = [ @@ -224,21 +224,23 @@ def testErrorIfSourcesAreNotInExclusiveIfBranches(self): } } """)) - self.input_api.files = [MockFile(self.file_path)] - errors = PRESUBMIT.CheckNoMixingSources(self.input_api, - [MockFile(self.file_path)], - self.output_api) - self.assertEqual(1, len(errors)) - self.assertTrue('bar.cc' in str(errors[0])) - self.assertTrue('bar.mm' in str(errors[0])) - self.assertTrue('foo.cc' in str(errors[0])) - self.assertTrue('foo.mm' in str(errors[0])) - self.assertTrue('bar.m' in str(errors[0])) - self.assertTrue('bar.c' in str(errors[0])) - - def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources): - assert len(sources) == 3, 'This function accepts a list of 3 source files' - self._GenerateBuildFile(textwrap.dedent(""" + self.input_api.files = [MockFile(self.file_path)] + errors = PRESUBMIT.CheckNoMixingSources(self.input_api, + [MockFile(self.file_path)], + self.output_api) + self.assertEqual(1, len(errors)) + self.assertTrue('bar.cc' in str(errors[0])) + self.assertTrue('bar.mm' in str(errors[0])) + self.assertTrue('foo.cc' in str(errors[0])) + self.assertTrue('foo.mm' in str(errors[0])) + self.assertTrue('bar.m' in str(errors[0])) + self.assertTrue('bar.c' in str(errors[0])) + + def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources): + assert len( + sources) == 3, 'This function accepts a list of 3 source files' + self._GenerateBuildFile( + textwrap.dedent(""" rtc_static_library("bar_foo") { sources = [ "%s", @@ -254,20 +256,20 @@ def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources): ], } """ % (tuple(sources) * 2))) - self.input_api.files = [MockFile(self.file_path)] - errors = PRESUBMIT.CheckNoMixingSources(self.input_api, - [MockFile(self.file_path)], - self.output_api) - self.assertEqual(number_of_errors, len(errors)) - if number_of_errors == 1: - for source in sources: - if not source.endswith('.h'): - self.assertTrue(source in str(errors[0])) + self.input_api.files = [MockFile(self.file_path)] + errors = PRESUBMIT.CheckNoMixingSources(self.input_api, + [MockFile(self.file_path)], + self.output_api) + self.assertEqual(number_of_errors, len(errors)) + if number_of_errors == 1: + for source in sources: + if not source.endswith('.h'): + self.assertTrue(source in str(errors[0])) - def _GenerateBuildFile(self, content): - with open(self.file_path, 'w') as f: - f.write(content) + def _GenerateBuildFile(self, content): + with open(self.file_path, 'w') as f: + f.write(content) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/presubmit_test_mocks.py b/presubmit_test_mocks.py index 510a553158..b15eb74dd8 100644 --- a/presubmit_test_mocks.py +++ b/presubmit_test_mocks.py @@ -14,117 +14,125 @@ class MockInputApi(object): - """Mock class for the InputApi class. + """Mock class for the InputApi class. This class can be used for unittests for presubmit by initializing the files attribute as the list of changed files. """ - def __init__(self): - self.change = MockChange([], []) - self.files = [] - self.presubmit_local_path = os.path.dirname(__file__) + def __init__(self): + self.change = MockChange([], []) + self.files = [] + self.presubmit_local_path = os.path.dirname(__file__) - def AffectedSourceFiles(self, file_filter=None): - return self.AffectedFiles(file_filter=file_filter) + def AffectedSourceFiles(self, file_filter=None): + return self.AffectedFiles(file_filter=file_filter) - def AffectedFiles(self, file_filter=None, include_deletes=False): - # pylint: disable=unused-argument - return self.files + def AffectedFiles(self, file_filter=None, include_deletes=False): + # pylint: disable=unused-argument + return self.files - @classmethod - def FilterSourceFile(cls, affected_file, white_list=(), black_list=()): - # pylint: disable=unused-argument - return True + @classmethod + def FilterSourceFile(cls, + affected_file, + files_to_check=(), + files_to_skip=()): + # pylint: disable=unused-argument + return True - def PresubmitLocalPath(self): - return self.presubmit_local_path + def PresubmitLocalPath(self): + return self.presubmit_local_path - def ReadFile(self, affected_file, mode='rU'): - filename = affected_file.AbsoluteLocalPath() - for f in self.files: - if f.LocalPath() == filename: - with open(filename, mode) as f: - return f.read() - # Otherwise, file is not in our mock API. - raise IOError, "No such file or directory: '%s'" % filename + def ReadFile(self, affected_file, mode='rU'): + filename = affected_file.AbsoluteLocalPath() + for f in self.files: + if f.LocalPath() == filename: + with open(filename, mode) as f: + return f.read() + # Otherwise, file is not in our mock API. + raise IOError, "No such file or directory: '%s'" % filename class MockOutputApi(object): - """Mock class for the OutputApi class. + """Mock class for the OutputApi class. An instance of this class can be passed to presubmit unittests for outputing various types of results. """ - class PresubmitResult(object): - def __init__(self, message, items=None, long_text=''): - self.message = message - self.items = items - self.long_text = long_text + class PresubmitResult(object): + def __init__(self, message, items=None, long_text=''): + self.message = message + self.items = items + self.long_text = long_text - def __repr__(self): - return self.message + def __repr__(self): + return self.message - class PresubmitError(PresubmitResult): - def __init__(self, message, items=None, long_text=''): - MockOutputApi.PresubmitResult.__init__(self, message, items, long_text) - self.type = 'error' + class PresubmitError(PresubmitResult): + def __init__(self, message, items=None, long_text=''): + MockOutputApi.PresubmitResult.__init__(self, message, items, + long_text) + self.type = 'error' class MockChange(object): - """Mock class for Change class. + """Mock class for Change class. This class can be used in presubmit unittests to mock the query of the current change. """ - def __init__(self, changed_files, bugs_from_description, tags=None): - self._changed_files = changed_files - self._bugs_from_description = bugs_from_description - self.tags = dict() if not tags else tags + def __init__(self, changed_files, bugs_from_description, tags=None): + self._changed_files = changed_files + self._bugs_from_description = bugs_from_description + self.tags = dict() if not tags else tags - def BugsFromDescription(self): - return self._bugs_from_description + def BugsFromDescription(self): + return self._bugs_from_description - def __getattr__(self, attr): - """Return tags directly as attributes on the object.""" - if not re.match(r"^[A-Z_]*$", attr): - raise AttributeError(self, attr) - return self.tags.get(attr) + def __getattr__(self, attr): + """Return tags directly as attributes on the object.""" + if not re.match(r"^[A-Z_]*$", attr): + raise AttributeError(self, attr) + return self.tags.get(attr) class MockFile(object): - """Mock class for the File class. + """Mock class for the File class. This class can be used to form the mock list of changed files in MockInputApi for presubmit unittests. """ - def __init__(self, local_path, new_contents=None, old_contents=None, - action='A'): - if new_contents is None: - new_contents = ["Data"] - self._local_path = local_path - self._new_contents = new_contents - self._changed_contents = [(i + 1, l) for i, l in enumerate(new_contents)] - self._action = action - self._old_contents = old_contents - - def Action(self): - return self._action - - def ChangedContents(self): - return self._changed_contents - - def NewContents(self): - return self._new_contents - - def LocalPath(self): - return self._local_path - - def AbsoluteLocalPath(self): - return self._local_path - - def OldContents(self): - return self._old_contents + def __init__(self, + local_path, + new_contents=None, + old_contents=None, + action='A'): + if new_contents is None: + new_contents = ["Data"] + self._local_path = local_path + self._new_contents = new_contents + self._changed_contents = [(i + 1, l) + for i, l in enumerate(new_contents)] + self._action = action + self._old_contents = old_contents + + def Action(self): + return self._action + + def ChangedContents(self): + return self._changed_contents + + def NewContents(self): + return self._new_contents + + def LocalPath(self): + return self._local_path + + def AbsoluteLocalPath(self): + return self._local_path + + def OldContents(self): + return self._old_contents diff --git a/pylintrc b/pylintrc index 9809ebe998..f26c84adce 100644 --- a/pylintrc +++ b/pylintrc @@ -97,9 +97,6 @@ max-line-length=80 # Maximum number of lines in a module max-module-lines=1000 -# We use two spaces for indents, instead of the usual four spaces or tab. -indent-string=' ' - [BASIC] diff --git a/resources/audio_coding/testfile16kHz.pcm.sha1 b/resources/audio_coding/testfile16kHz.pcm.sha1 new file mode 100644 index 0000000000..4d7df3be8f --- /dev/null +++ b/resources/audio_coding/testfile16kHz.pcm.sha1 @@ -0,0 +1 @@ +0d2702e5c350c2a4ad3a641c4d96271e8aa12e6c \ No newline at end of file diff --git a/resources/audio_processing/output_data_fixed.pb.sha1 b/resources/audio_processing/output_data_fixed.pb.sha1 index f27905087e..43e68303ac 100644 --- a/resources/audio_processing/output_data_fixed.pb.sha1 +++ b/resources/audio_processing/output_data_fixed.pb.sha1 @@ -1 +1 @@ -4010b1fe15eda1b42968cdb3f9fed399e1aa7197 \ No newline at end of file +0ff9ab4d46929552e21d16f266f9eba42575ba8d \ No newline at end of file diff --git a/resources/audio_processing/output_data_float.pb.sha1 b/resources/audio_processing/output_data_float.pb.sha1 index b8312fc58f..6c3ab91815 100644 --- a/resources/audio_processing/output_data_float.pb.sha1 +++ b/resources/audio_processing/output_data_float.pb.sha1 @@ -1 +1 @@ -d22d4b0bc8f59aa27da61e158b9d35596f3844f5 \ No newline at end of file +ed1172c80a1a001a8aa7ac0680a99018cbb7d278 \ No newline at end of file diff --git a/resources/audio_processing/output_data_float_avx2.pb.sha1 b/resources/audio_processing/output_data_float_avx2.pb.sha1 new file mode 100644 index 0000000000..2d4ad0c141 --- /dev/null +++ b/resources/audio_processing/output_data_float_avx2.pb.sha1 @@ -0,0 +1 @@ +a1dd718a6882bf8033a934e5beec73086cc91240 \ No newline at end of file diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn index 0805a5c549..6ee7190125 100644 --- a/rtc_base/BUILD.gn +++ b/rtc_base/BUILD.gn @@ -43,6 +43,25 @@ rtc_source_set("ignore_wundef") { sources = [ "ignore_wundef.h" ] } +rtc_source_set("untyped_function") { + sources = [ "untyped_function.h" ] + deps = [ "system:assume" ] +} + +rtc_source_set("callback_list") { + sources = [ + "callback_list.cc", + "callback_list.h", + ] + deps = [ + ":checks", + ":untyped_function", + "../api:function_view", + "system:assume", + "system:inline", + ] +} + # The subset of rtc_base approved for use outside of libjingle. # TODO(bugs.webrtc.org/9838): Create small and focused build targets and remove # the old concept of rtc_base and rtc_base_approved. @@ -55,12 +74,13 @@ rtc_library("rtc_base_approved") { ":type_traits", "../api:array_view", "../api:scoped_refptr", + "synchronization:mutex", "system:arch", "system:rtc_export", "system:unused", "third_party/base64", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] public_deps = [] # no-presubmit-check TODO(webrtc:8603) sources = [ @@ -105,6 +125,12 @@ rtc_library("rtc_base_approved") { if (is_win) { sources += [ + "win/get_activation_factory.cc", + "win/get_activation_factory.h", + "win/hstring.cc", + "win/hstring.h", + "win/scoped_com_initializer.cc", + "win/scoped_com_initializer.h", "win/windows_version.cc", "win/windows_version.h", ] @@ -112,7 +138,8 @@ rtc_library("rtc_base_approved") { } if (is_nacl) { - public_deps += [ "//native_client_sdk/src/libraries/nacl_io" ] # no-presubmit-check TODO(webrtc:8603) + public_deps += # no-presubmit-check TODO(webrtc:8603) + [ "//native_client_sdk/src/libraries/nacl_io" ] } if (is_android) { @@ -132,17 +159,15 @@ rtc_library("rtc_base_approved") { ":stringutils", ":thread_checker", ":timeutils", + "synchronization:sequence_checker", ] } rtc_source_set("macromagic") { - # TODO(bugs.webrtc.org/9606): This should not be public. - visibility = [ "*" ] sources = [ "arraysize.h", "constructor_magic.h", "format_macros.h", - "stringize_macros.h", "thread_annotations.h", ] deps = [ "system:arch" ] @@ -153,6 +178,7 @@ rtc_library("platform_thread_types") { "platform_thread_types.cc", "platform_thread_types.h", ] + deps = [ ":macromagic" ] } rtc_source_set("refcount") { @@ -167,15 +193,15 @@ rtc_source_set("refcount") { rtc_library("criticalsection") { sources = [ - "critical_section.cc", - "critical_section.h", + "deprecated/recursive_critical_section.cc", + "deprecated/recursive_critical_section.h", ] deps = [ ":atomicops", ":checks", ":macromagic", ":platform_thread_types", - "system:rtc_export", + "synchronization:yield", "system:unused", ] } @@ -184,8 +210,9 @@ rtc_library("platform_thread") { visibility = [ ":rtc_base_approved", ":rtc_task_queue_libevent", - ":rtc_task_queue_win", ":rtc_task_queue_stdlib", + ":rtc_task_queue_win", + "synchronization:mutex", "synchronization:sequence_checker", ] sources = [ @@ -200,8 +227,8 @@ rtc_library("platform_thread") { ":rtc_event", ":thread_checker", ":timeutils", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("rtc_event") { @@ -224,8 +251,8 @@ rtc_library("rtc_event") { ":checks", "synchronization:yield_policy", "system:warn_current_thread_is_deadlocked", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } @@ -239,6 +266,9 @@ rtc_library("logging") { ":platform_thread_types", ":stringutils", ":timeutils", + "synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/meta:type_traits", "//third_party/abseil-cpp/absl/strings", @@ -263,7 +293,7 @@ rtc_library("logging") { deps += [ "system:inline" ] if (is_mac) { - libs += [ "Foundation.framework" ] + frameworks = [ "Foundation.framework" ] } # logging.h needs the deprecation header while downstream projects are @@ -300,6 +330,8 @@ rtc_library("checks") { ":safe_compare", "system:inline", "system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/meta:type_traits", "//third_party/abseil-cpp/absl/strings", ] @@ -316,13 +348,20 @@ rtc_library("rate_limiter") { deps = [ ":rtc_base_approved", "../system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", + "synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("sanitizer") { sources = [ "sanitizer.h" ] - deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ] + absl_deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ] +} + +rtc_source_set("bounded_inline_vector") { + public = [ "bounded_inline_vector.h" ] + sources = [ "bounded_inline_vector_impl.h" ] + deps = [ ":checks" ] } rtc_source_set("divide_round") { @@ -383,12 +422,16 @@ rtc_library("stringutils") { "string_utils.h", "strings/string_builder.cc", "strings/string_builder.h", + "strings/string_format.cc", + "strings/string_format.h", ] deps = [ ":checks", ":macromagic", ":safe_minmax", "../api:array_view", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -424,8 +467,8 @@ rtc_library("rtc_task_queue") { "../api/task_queue", "system:rtc_export", "task_utils:to_queued_task", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_source_set("rtc_operations_chain") { @@ -441,6 +484,7 @@ rtc_source_set("rtc_operations_chain") { "../api:scoped_refptr", "synchronization:sequence_checker", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_enable_libevent) { @@ -460,6 +504,9 @@ if (rtc_enable_libevent) { ":safe_conversions", ":timeutils", "../api/task_queue", + "synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", ] @@ -480,8 +527,10 @@ if (is_mac || is_ios) { ":checks", ":logging", "../api/task_queue", - "//third_party/abseil-cpp/absl/strings", + "synchronization:mutex", + "system:gcd_helpers", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } } @@ -502,8 +551,9 @@ if (is_win) { ":safe_conversions", ":timeutils", "../api/task_queue", - "//third_party/abseil-cpp/absl/strings", + "synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } } @@ -522,8 +572,9 @@ rtc_library("rtc_task_queue_stdlib") { ":safe_conversions", ":timeutils", "../api/task_queue", - "//third_party/abseil-cpp/absl/strings", + "synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("weak_ptr") { @@ -542,8 +593,6 @@ rtc_library("rtc_numerics") { sources = [ "numerics/event_based_exponential_moving_average.cc", "numerics/event_based_exponential_moving_average.h", - "numerics/event_rate_counter.cc", - "numerics/event_rate_counter.h", "numerics/exp_filter.cc", "numerics/exp_filter.h", "numerics/math_utils.h", @@ -552,23 +601,29 @@ rtc_library("rtc_numerics") { "numerics/moving_median_filter.h", "numerics/percentile_filter.h", "numerics/running_statistics.h", - "numerics/sample_stats.cc", - "numerics/sample_stats.h", - "numerics/samples_stats_counter.cc", - "numerics/samples_stats_counter.h", "numerics/sequence_number_util.h", ] deps = [ ":checks", ":rtc_base_approved", - ":safe_compare", - "../api:array_view", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("rtc_stats_counters") { + sources = [ + "numerics/event_rate_counter.cc", + "numerics/event_rate_counter.h", + "numerics/sample_stats.cc", + "numerics/sample_stats.h", + ] + deps = [ + "../api/numerics", "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [] } config("rtc_json_suppressions") { @@ -596,7 +651,8 @@ rtc_library("rtc_json") { deps = [ ":stringutils" ] all_dependent_configs = [ "//third_party/jsoncpp:jsoncpp_config" ] if (rtc_build_json) { - public_deps = [ "//third_party/jsoncpp" ] # no-presubmit-check TODO(webrtc:8603) + public_deps = # no-presubmit-check TODO(webrtc:8603) + [ "//third_party/jsoncpp" ] } else { include_dirs = [ "$rtc_jsoncpp_root" ] @@ -675,6 +731,8 @@ rtc_source_set("threading") { # "message_handler.h", # "network_monitor.cc", # "network_monitor.h", + # "network_monitor_factory.cc", + # "network_monitor_factory.h", # "physical_socket_server.cc", # "physical_socket_server.h", # "signal_thread.cc", @@ -731,12 +789,26 @@ if (is_android) { if (is_win) { rtc_source_set("win32") { - # TODO(bugs.webrtc.org/9987): This build target will soon contain - # the following files: - # sources = [ - # "win32.cc", - # "win32.h", - # ] + sources = [ + "win32.cc", + "win32.h", + "win32_window.cc", + "win32_window.h", + ] + + deps = [ + ":checks", + ":macromagic", + ":rtc_base_approved", + ] + + libs = [ + "crypt32.lib", + "iphlpapi.lib", + "secur32.lib", + ] + + defines = [ "_CRT_NONSTDC_NO_DEPRECATE" ] } } @@ -748,17 +820,30 @@ rtc_library("rtc_base") { defines = [] deps = [ ":checks", + ":deprecation", + ":rtc_task_queue", ":stringutils", "../api:array_view", "../api:function_view", "../api:scoped_refptr", + "../api/numerics", "../api/task_queue", + "../system_wrappers:field_trial", "network:sent_packet", + "synchronization:mutex", + "synchronization:sequence_checker", "system:file_wrapper", + "system:inline", "system:rtc_export", + "task_utils:pending_task_safety_flag", + "task_utils:repeating_task", + "task_utils:to_queued_task", "third_party/base64", "third_party/sigslot", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/container:flat_hash_map", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -786,6 +871,8 @@ rtc_library("rtc_base") { "crypt_string.h", "data_rate_limiter.cc", "data_rate_limiter.h", + "deprecated/signal_thread.cc", + "deprecated/signal_thread.h", "dscp.h", "file_rotating_stream.cc", "file_rotating_stream.h", @@ -807,9 +894,13 @@ rtc_library("rtc_base") { "net_helpers.h", "network.cc", "network.h", + "network_constants.cc", "network_constants.h", "network_monitor.cc", "network_monitor.h", + "network_monitor_factory.cc", + "network_monitor_factory.h", + "network_route.cc", "network_route.h", "null_socket_server.cc", "null_socket_server.h", @@ -836,7 +927,6 @@ rtc_library("rtc_base") { "rtc_certificate.h", "rtc_certificate_generator.cc", "rtc_certificate_generator.h", - "signal_thread.cc", "signal_thread.h", "sigslot_repeater.h", "socket.cc", @@ -876,7 +966,6 @@ rtc_library("rtc_base") { "callback.h", "log_sinks.cc", "log_sinks.h", - "numerics/math_utils.h", "rolling_accumulator.h", "ssl_roots.h", ] @@ -917,7 +1006,7 @@ rtc_library("rtc_base") { deps += [ "system:cocoa_threading" ] } - if (is_linux) { + if (is_linux || is_chromeos) { libs += [ "dl", "rt", @@ -925,7 +1014,7 @@ rtc_library("rtc_base") { } if (is_ios) { - libs += [ + frameworks = [ "CFNetwork.framework", "Foundation.framework", "Security.framework", @@ -935,20 +1024,7 @@ rtc_library("rtc_base") { } if (is_win) { - sources += [ - "win32.cc", - "win32.h", - "win32_window.cc", - "win32_window.h", - ] - - libs += [ - "crypt32.lib", - "iphlpapi.lib", - "secur32.lib", - ] - - defines += [ "_CRT_NONSTDC_NO_DEPRECATE" ] + deps += [ ":win32" ] } if (is_posix || is_fuchsia) { @@ -959,7 +1035,9 @@ rtc_library("rtc_base") { } if (is_nacl) { - public_deps += [ "//native_client_sdk/src/libraries/nacl_io" ] # no-presubmit-check TODO(webrtc:8603) + public_deps += # no-presubmit-check TODO(webrtc:8603) + [ "//native_client_sdk/src/libraries/nacl_io" ] + defines += [ "timezone=_timezone" ] sources -= [ "ifaddrs_converter.cc" ] } @@ -981,8 +1059,8 @@ rtc_library("gunit_helpers") { ":rtc_base_tests_utils", ":stringutils", "../test:test_support", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("testclient") { @@ -994,10 +1072,23 @@ rtc_library("testclient") { deps = [ ":criticalsection", ":gunit_helpers", - ":macromagic", ":rtc_base", ":rtc_base_tests_utils", ":timeutils", + "synchronization:mutex", + ] +} + +rtc_library("callback_list_unittests") { + testonly = true + + sources = [ "callback_list_unittest.cc" ] + deps = [ + ":callback_list", + ":gunit_helpers", + ":rtc_base", + "../api:function_view", + "../test:test_support", ] } @@ -1046,7 +1137,10 @@ rtc_library("rtc_base_tests_utils") { "../api/units:time_delta", "../api/units:timestamp", "memory:fifo_buffer", + "synchronization:mutex", "third_party/sigslot", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", ] @@ -1061,15 +1155,14 @@ rtc_library("task_queue_for_test") { ] deps = [ ":checks", - ":macromagic", ":rtc_base_approved", ":rtc_event", ":rtc_task_queue", "../api/task_queue", "../api/task_queue:default_task_queue_factory", "task_utils:to_queued_task", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { @@ -1081,10 +1174,20 @@ if (rtc_include_tests) { ":rtc_base", ":rtc_base_tests_utils", "../test:test_support", + "synchronization:mutex", "third_party/sigslot", ] } + rtc_library("untyped_function_unittest") { + testonly = true + sources = [ "untyped_function_unittest.cc" ] + deps = [ + ":untyped_function", + "../test:test_support", + ] + } + rtc_library("rtc_base_nonparallel_tests") { testonly = true @@ -1109,8 +1212,8 @@ if (rtc_include_tests) { "../test:test_support", "third_party/sigslot", "//testing/gtest", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] if (is_win) { sources += [ "win32_socket_server_unittest.cc" ] } @@ -1123,13 +1226,14 @@ if (rtc_include_tests) { "base64_unittest.cc", "bind_unittest.cc", "bit_buffer_unittest.cc", + "bounded_inline_vector_unittest.cc", "buffer_queue_unittest.cc", "buffer_unittest.cc", "byte_buffer_unittest.cc", "byte_order_unittest.cc", "checks_unittest.cc", "copy_on_write_buffer_unittest.cc", - "critical_section_unittest.cc", + "deprecated/recursive_critical_section_unittest.cc", "event_tracer_unittest.cc", "event_unittest.cc", "logging_unittest.cc", @@ -1151,8 +1255,8 @@ if (rtc_include_tests) { "string_encode_unittest.cc", "string_to_number_unittest.cc", "string_utils_unittest.cc", - "stringize_macros_unittest.cc", "strings/string_builder_unittest.cc", + "strings/string_format_unittest.cc", "swap_queue_unittest.cc", "thread_annotations_unittest.cc", "thread_checker_unittest.cc", @@ -1165,6 +1269,7 @@ if (rtc_include_tests) { sources += [ "win/windows_version_unittest.cc" ] } deps = [ + ":bounded_inline_vector", ":checks", ":divide_round", ":gunit_helpers", @@ -1172,6 +1277,7 @@ if (rtc_include_tests) { ":rtc_base", ":rtc_base_approved", ":rtc_base_tests_utils", + ":rtc_numerics", ":rtc_task_queue", ":safe_compare", ":safe_minmax", @@ -1180,15 +1286,19 @@ if (rtc_include_tests) { ":testclient", "../api:array_view", "../api:scoped_refptr", + "../api/numerics", "../api/units:time_delta", "../system_wrappers", "../test:fileutils", "../test:test_main", "../test:test_support", "memory:unittests", + "synchronization:mutex", "task_utils:to_queued_task", "third_party/base64", "third_party/sigslot", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", ] @@ -1206,8 +1316,8 @@ if (rtc_include_tests) { ":task_queue_for_test", "../test:test_main", "../test:test_support", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("rtc_operations_chain_unittests") { @@ -1215,6 +1325,7 @@ if (rtc_include_tests) { sources = [ "operations_chain_unittest.cc" ] deps = [ + ":gunit_helpers", ":rtc_base", ":rtc_base_approved", ":rtc_event", @@ -1249,7 +1360,6 @@ if (rtc_include_tests) { "numerics/moving_median_filter_unittest.cc", "numerics/percentile_filter_unittest.cc", "numerics/running_statistics_unittest.cc", - "numerics/samples_stats_counter_unittest.cc", "numerics/sequence_number_util_unittest.cc", ] deps = [ @@ -1257,8 +1367,8 @@ if (rtc_include_tests) { ":rtc_numerics", "../test:test_main", "../test:test_support", - "//third_party/abseil-cpp/absl/algorithm:container", ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("rtc_json_unittests") { @@ -1282,39 +1392,24 @@ if (rtc_include_tests) { "callback_unittest.cc", "crc32_unittest.cc", "data_rate_limiter_unittest.cc", + "deprecated/signal_thread_unittest.cc", "fake_clock_unittest.cc", "helpers_unittest.cc", "ip_address_unittest.cc", "memory_usage_unittest.cc", "message_digest_unittest.cc", "nat_unittest.cc", + "network_route_unittest.cc", "network_unittest.cc", "proxy_unittest.cc", "rolling_accumulator_unittest.cc", "rtc_certificate_generator_unittest.cc", "rtc_certificate_unittest.cc", - "signal_thread_unittest.cc", "sigslot_tester_unittest.cc", "test_client_unittest.cc", "thread_unittest.cc", "unique_id_generator_unittest.cc", ] - if (is_win) { - sources += [ - "win32_unittest.cc", - "win32_window_unittest.cc", - ] - } - if (is_posix || is_fuchsia) { - sources += [ - "openssl_adapter_unittest.cc", - "openssl_session_cache_unittest.cc", - "openssl_utility_unittest.cc", - "ssl_adapter_unittest.cc", - "ssl_identity_unittest.cc", - "ssl_stream_adapter_unittest.cc", - ] - } deps = [ ":checks", ":gunit_helpers", @@ -1324,13 +1419,36 @@ if (rtc_include_tests) { "../api:array_view", "../api/task_queue", "../api/task_queue:task_queue_test", + "../test:field_trial", "../test:fileutils", + "../test:rtc_expect_death", "../test:test_main", "../test:test_support", "memory:fifo_buffer", + "synchronization:mutex", "synchronization:synchronization_unittests", + "task_utils:pending_task_safety_flag", "task_utils:to_queued_task", "third_party/sigslot", + ] + if (is_win) { + sources += [ + "win32_unittest.cc", + "win32_window_unittest.cc", + ] + deps += [ ":win32" ] + } + if (is_posix || is_fuchsia) { + sources += [ + "openssl_adapter_unittest.cc", + "openssl_session_cache_unittest.cc", + "openssl_utility_unittest.cc", + "ssl_adapter_unittest.cc", + "ssl_identity_unittest.cc", + "ssl_stream_adapter_unittest.cc", + ] + } + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", @@ -1362,4 +1480,8 @@ if (is_android) { "//third_party/android_deps:com_android_support_support_annotations_java", ] } + java_cpp_enum("network_monitor_enums") { + sources = [ "network_monitor.h" ] + visibility = [ "*" ] + } } diff --git a/rtc_base/DEPS b/rtc_base/DEPS index 679d06dfc8..c9f7dc5898 100644 --- a/rtc_base/DEPS +++ b/rtc_base/DEPS @@ -1,8 +1,8 @@ include_rules = [ "+base/third_party/libevent", "+json", - "+third_party/jsoncpp", "+system_wrappers", + "+third_party/jsoncpp", ] specific_include_rules = { diff --git a/rtc_base/OWNERS b/rtc_base/OWNERS index c396c74d60..107bbcd812 100644 --- a/rtc_base/OWNERS +++ b/rtc_base/OWNERS @@ -4,13 +4,7 @@ kwiberg@webrtc.org mflodman@webrtc.org qingsi@webrtc.org sergeyu@chromium.org -steveanton@webrtc.org tommi@webrtc.org -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* - per-file rate_statistics*=sprang@webrtc.org per-file rate_statistics*=stefan@webrtc.org diff --git a/rtc_base/async_invoker.cc b/rtc_base/async_invoker.cc index 26f8c523ab..8b410a4561 100644 --- a/rtc_base/async_invoker.cc +++ b/rtc_base/async_invoker.cc @@ -101,28 +101,6 @@ void AsyncInvoker::DoInvokeDelayed(const Location& posted_from, new ScopedMessageData(std::move(closure))); } -GuardedAsyncInvoker::GuardedAsyncInvoker() : thread_(Thread::Current()) { - thread_->SignalQueueDestroyed.connect(this, - &GuardedAsyncInvoker::ThreadDestroyed); -} - -GuardedAsyncInvoker::~GuardedAsyncInvoker() {} - -bool GuardedAsyncInvoker::Flush(uint32_t id) { - CritScope cs(&crit_); - if (thread_ == nullptr) - return false; - invoker_.Flush(thread_, id); - return true; -} - -void GuardedAsyncInvoker::ThreadDestroyed() { - CritScope cs(&crit_); - // We should never get more than one notification about the thread dying. - RTC_DCHECK(thread_ != nullptr); - thread_ = nullptr; -} - AsyncClosure::AsyncClosure(AsyncInvoker* invoker) : invoker_(invoker), invocation_complete_(invoker_->invocation_complete_) { invoker_->pending_invocations_.fetch_add(1, std::memory_order_relaxed); diff --git a/rtc_base/async_invoker.h b/rtc_base/async_invoker.h index f15955d811..983e710bcd 100644 --- a/rtc_base/async_invoker.h +++ b/rtc_base/async_invoker.h @@ -87,7 +87,7 @@ namespace rtc { // destruction. This can be done by starting each chain of invocations on the // same thread on which it will be destroyed, or by using some other // synchronization method. -class AsyncInvoker : public MessageHandler { +class AsyncInvoker : public MessageHandlerAutoCleanup { public: AsyncInvoker(); ~AsyncInvoker() override; @@ -169,97 +169,6 @@ class AsyncInvoker : public MessageHandler { RTC_DISALLOW_COPY_AND_ASSIGN(AsyncInvoker); }; -// Similar to AsyncInvoker, but guards against the Thread being destroyed while -// there are outstanding dangling pointers to it. It will connect to the current -// thread in the constructor, and will get notified when that thread is -// destroyed. After GuardedAsyncInvoker is constructed, it can be used from -// other threads to post functors to the thread it was constructed on. If that -// thread dies, any further calls to AsyncInvoke() will be safely ignored. -class GuardedAsyncInvoker : public sigslot::has_slots<> { - public: - GuardedAsyncInvoker(); - ~GuardedAsyncInvoker() override; - - // Synchronously execute all outstanding calls we own, and wait for calls to - // complete before returning. Optionally filter by message id. The destructor - // will not wait for outstanding calls, so if that behavior is desired, call - // Flush() first. Returns false if the thread has died. - bool Flush(uint32_t id = MQID_ANY); - - // Call |functor| asynchronously with no callback upon completion. Returns - // immediately. Returns false if the thread has died. - template - bool AsyncInvoke(const Location& posted_from, - FunctorT&& functor, - uint32_t id = 0) { - CritScope cs(&crit_); - if (thread_ == nullptr) - return false; - invoker_.AsyncInvoke( - posted_from, thread_, std::forward(functor), id); - return true; - } - - // Call |functor| asynchronously with |delay_ms|, with no callback upon - // completion. Returns immediately. Returns false if the thread has died. - template - bool AsyncInvokeDelayed(const Location& posted_from, - FunctorT&& functor, - uint32_t delay_ms, - uint32_t id = 0) { - CritScope cs(&crit_); - if (thread_ == nullptr) - return false; - invoker_.AsyncInvokeDelayed( - posted_from, thread_, std::forward(functor), delay_ms, id); - return true; - } - - // Call |functor| asynchronously, calling |callback| when done. Returns false - // if the thread has died. - template - bool AsyncInvoke(const Location& posted_from, - const Location& callback_posted_from, - FunctorT&& functor, - void (HostT::*callback)(ReturnT), - HostT* callback_host, - uint32_t id = 0) { - CritScope cs(&crit_); - if (thread_ == nullptr) - return false; - invoker_.AsyncInvoke( - posted_from, callback_posted_from, thread_, - std::forward(functor), callback, callback_host, id); - return true; - } - - // Call |functor| asynchronously calling |callback| when done. Overloaded for - // void return. Returns false if the thread has died. - template - bool AsyncInvoke(const Location& posted_from, - const Location& callback_posted_from, - FunctorT&& functor, - void (HostT::*callback)(), - HostT* callback_host, - uint32_t id = 0) { - CritScope cs(&crit_); - if (thread_ == nullptr) - return false; - invoker_.AsyncInvoke( - posted_from, callback_posted_from, thread_, - std::forward(functor), callback, callback_host, id); - return true; - } - - private: - // Callback when |thread_| is destroyed. - void ThreadDestroyed(); - - CriticalSection crit_; - Thread* thread_ RTC_GUARDED_BY(crit_); - AsyncInvoker invoker_ RTC_GUARDED_BY(crit_); -}; - } // namespace rtc #endif // RTC_BASE_ASYNC_INVOKER_H_ diff --git a/rtc_base/async_invoker_inl.h b/rtc_base/async_invoker_inl.h index bd9b0d1aa1..6307afe220 100644 --- a/rtc_base/async_invoker_inl.h +++ b/rtc_base/async_invoker_inl.h @@ -13,7 +13,6 @@ #include "api/scoped_refptr.h" #include "rtc_base/bind.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/message_handler.h" #include "rtc_base/ref_counted_object.h" diff --git a/rtc_base/async_packet_socket.cc b/rtc_base/async_packet_socket.cc index a42725c424..d5435d71d0 100644 --- a/rtc_base/async_packet_socket.cc +++ b/rtc_base/async_packet_socket.cc @@ -10,8 +10,6 @@ #include "rtc_base/async_packet_socket.h" -#include "rtc_base/net_helper.h" - namespace rtc { PacketTimeUpdateParams::PacketTimeUpdateParams() = default; @@ -35,12 +33,7 @@ void CopySocketInformationToPacketInfo(size_t packet_size_bytes, bool is_connectionless, rtc::PacketInfo* info) { info->packet_size_bytes = packet_size_bytes; - // TODO(srte): Make sure that the family of the local socket is always set - // in the VirtualSocket implementation and remove this check. - int family = socket_from.GetLocalAddress().family(); - if (family != 0) { - info->ip_overhead_bytes = cricket::GetIpOverhead(family); - } + info->ip_overhead_bytes = socket_from.GetLocalAddress().ipaddr().overhead(); } } // namespace rtc diff --git a/rtc_base/async_tcp_socket.cc b/rtc_base/async_tcp_socket.cc index d03ae32dde..35401d7c0a 100644 --- a/rtc_base/async_tcp_socket.cc +++ b/rtc_base/async_tcp_socket.cc @@ -16,6 +16,7 @@ #include #include +#include "api/array_view.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -147,33 +148,42 @@ int AsyncTCPSocketBase::SendTo(const void* pv, return -1; } -int AsyncTCPSocketBase::SendRaw(const void* pv, size_t cb) { - if (outbuf_.size() + cb > max_outsize_) { - socket_->SetError(EMSGSIZE); - return -1; - } - - RTC_DCHECK(!listen_); - outbuf_.AppendData(static_cast(pv), cb); - - return FlushOutBuffer(); -} - int AsyncTCPSocketBase::FlushOutBuffer() { RTC_DCHECK(!listen_); - int res = socket_->Send(outbuf_.data(), outbuf_.size()); - if (res <= 0) { - return res; - } - if (static_cast(res) > outbuf_.size()) { - RTC_NOTREACHED(); - return -1; + RTC_DCHECK_GT(outbuf_.size(), 0); + rtc::ArrayView view = outbuf_; + int res; + while (view.size() > 0) { + res = socket_->Send(view.data(), view.size()); + if (res <= 0) { + break; + } + if (static_cast(res) > view.size()) { + RTC_NOTREACHED(); + res = -1; + break; + } + view = view.subview(res); } - size_t new_size = outbuf_.size() - res; - if (new_size > 0) { - memmove(outbuf_.data(), outbuf_.data() + res, new_size); + if (res > 0) { + // The output buffer may have been written out over multiple partial Send(), + // so reconstruct the total written length. + RTC_DCHECK_EQ(view.size(), 0); + res = outbuf_.size(); + outbuf_.Clear(); + } else { + // There was an error when calling Send(), so there will still be data left + // to send at a later point. + RTC_DCHECK_GT(view.size(), 0); + // In the special case of EWOULDBLOCK, signal that we had a partial write. + if (socket_->GetError() == EWOULDBLOCK) { + res = outbuf_.size() - view.size(); + } + if (view.size() < outbuf_.size()) { + memmove(outbuf_.data(), view.data(), view.size()); + outbuf_.SetSize(view.size()); + } } - outbuf_.SetSize(new_size); return res; } diff --git a/rtc_base/async_tcp_socket.h b/rtc_base/async_tcp_socket.h index fecaba798c..e05cce1ec9 100644 --- a/rtc_base/async_tcp_socket.h +++ b/rtc_base/async_tcp_socket.h @@ -61,7 +61,6 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { static AsyncSocket* ConnectSocket(AsyncSocket* socket, const SocketAddress& bind_address, const SocketAddress& remote_address); - virtual int SendRaw(const void* pv, size_t cb); int FlushOutBuffer(); // Add data to |outbuf_|. void AppendToOutBuffer(const void* pv, size_t cb); diff --git a/rtc_base/bit_buffer.cc b/rtc_base/bit_buffer.cc index a6dc1c7ab8..540141fe52 100644 --- a/rtc_base/bit_buffer.cc +++ b/rtc_base/bit_buffer.cc @@ -162,6 +162,12 @@ bool BitBuffer::ConsumeBits(size_t bit_count) { bool BitBuffer::ReadNonSymmetric(uint32_t* val, uint32_t num_values) { RTC_DCHECK_GT(num_values, 0); RTC_DCHECK_LE(num_values, uint32_t{1} << 31); + if (num_values == 1) { + // When there is only one possible value, it requires zero bits to store it. + // But ReadBits doesn't support reading zero bits. + *val = 0; + return true; + } size_t count_bits = CountBits(num_values); uint32_t num_min_bits_values = (uint32_t{1} << count_bits) - num_values; @@ -308,6 +314,11 @@ bool BitBufferWriter::WriteBits(uint64_t val, size_t bit_count) { bool BitBufferWriter::WriteNonSymmetric(uint32_t val, uint32_t num_values) { RTC_DCHECK_LT(val, num_values); RTC_DCHECK_LE(num_values, uint32_t{1} << 31); + if (num_values == 1) { + // When there is only one possible value, it requires zero bits to store it. + // But WriteBits doesn't support writing zero bits. + return true; + } size_t count_bits = CountBits(num_values); uint32_t num_min_bits_values = (uint32_t{1} << count_bits) - num_values; diff --git a/rtc_base/bit_buffer_unittest.cc b/rtc_base/bit_buffer_unittest.cc index b3521b4951..656682c2ef 100644 --- a/rtc_base/bit_buffer_unittest.cc +++ b/rtc_base/bit_buffer_unittest.cc @@ -142,7 +142,7 @@ TEST(BitBufferTest, ReadBits) { EXPECT_FALSE(buffer.ReadBits(&val, 1)); } -TEST(BitBufferTest, SetOffsetValues) { +TEST(BitBufferDeathTest, SetOffsetValues) { uint8_t bytes[4] = {0}; BitBufferWriter buffer(bytes, 4); @@ -254,6 +254,28 @@ TEST(BitBufferWriterTest, NonSymmetricReadsMatchesWrites) { EXPECT_THAT(values, ElementsAre(0, 1, 2, 3, 4, 5)); } +TEST(BitBufferTest, ReadNonSymmetricOnlyValueConsumesNoBits) { + const uint8_t bytes[2] = {}; + BitBuffer reader(bytes, 2); + uint32_t value = 0xFFFFFFFF; + ASSERT_EQ(reader.RemainingBitCount(), 16u); + + EXPECT_TRUE(reader.ReadNonSymmetric(&value, /*num_values=*/1)); + + EXPECT_EQ(value, 0u); + EXPECT_EQ(reader.RemainingBitCount(), 16u); +} + +TEST(BitBufferWriterTest, WriteNonSymmetricOnlyValueConsumesNoBits) { + uint8_t bytes[2] = {}; + BitBufferWriter writer(bytes, 2); + ASSERT_EQ(writer.RemainingBitCount(), 16u); + + EXPECT_TRUE(writer.WriteNonSymmetric(0, /*num_values=*/1)); + + EXPECT_EQ(writer.RemainingBitCount(), 16u); +} + uint64_t GolombEncoded(uint32_t val) { val++; uint32_t bit_counter = val; diff --git a/rtc_base/bounded_inline_vector.h b/rtc_base/bounded_inline_vector.h new file mode 100644 index 0000000000..f8b7eb3d86 --- /dev/null +++ b/rtc_base/bounded_inline_vector.h @@ -0,0 +1,155 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_BOUNDED_INLINE_VECTOR_H_ +#define RTC_BASE_BOUNDED_INLINE_VECTOR_H_ + +#include + +#include +#include +#include + +#include "rtc_base/bounded_inline_vector_impl.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +// A small std::vector-like type whose capacity is a compile-time constant. It +// stores all data inline and never heap allocates (beyond what its element type +// requires). Trying to grow it beyond its constant capacity is an error. +// +// TODO(bugs.webrtc.org/11391): Comparison operators. +// TODO(bugs.webrtc.org/11391): Methods for adding and deleting elements. +template +class BoundedInlineVector { + static_assert(!std::is_const::value, "T may not be const"); + static_assert(fixed_capacity > 0, "Capacity must be strictly positive"); + + public: + using size_type = int; + using value_type = T; + using const_iterator = const T*; + + BoundedInlineVector() = default; + BoundedInlineVector(const BoundedInlineVector&) = default; + BoundedInlineVector(BoundedInlineVector&&) = default; + BoundedInlineVector& operator=(const BoundedInlineVector&) = default; + BoundedInlineVector& operator=(BoundedInlineVector&&) = default; + ~BoundedInlineVector() = default; + + // This constructor is implicit, to make it possible to write e.g. + // + // BoundedInlineVector x = {2.72, 3.14}; + // + // and + // + // BoundedInlineVector GetConstants() { + // return {2.72, 3.14}; + // } + template ::value>* = + nullptr> + BoundedInlineVector(Ts&&... elements) // NOLINT(runtime/explicit) + : storage_(std::forward(elements)...) { + static_assert(sizeof...(Ts) <= fixed_capacity, ""); + } + + template < + int other_capacity, + typename std::enable_if_t* = nullptr> + BoundedInlineVector(const BoundedInlineVector& other) { + RTC_DCHECK_LE(other.size(), fixed_capacity); + bounded_inline_vector_impl::CopyElements(other.data(), other.size(), + storage_.data, &storage_.size); + } + + template < + int other_capacity, + typename std::enable_if_t* = nullptr> + BoundedInlineVector(BoundedInlineVector&& other) { + RTC_DCHECK_LE(other.size(), fixed_capacity); + bounded_inline_vector_impl::MoveElements(other.data(), other.size(), + storage_.data, &storage_.size); + } + + template < + int other_capacity, + typename std::enable_if_t* = nullptr> + BoundedInlineVector& operator=( + const BoundedInlineVector& other) { + bounded_inline_vector_impl::DestroyElements(storage_.data, storage_.size); + RTC_DCHECK_LE(other.size(), fixed_capacity); + bounded_inline_vector_impl::CopyElements(other.data(), other.size(), + storage_.data, &storage_.size); + return *this; + } + + template < + int other_capacity, + typename std::enable_if_t* = nullptr> + BoundedInlineVector& operator=( + BoundedInlineVector&& other) { + bounded_inline_vector_impl::DestroyElements(storage_.data, storage_.size); + RTC_DCHECK_LE(other.size(), fixed_capacity); + bounded_inline_vector_impl::MoveElements(other.data(), other.size(), + storage_.data, &storage_.size); + return *this; + } + + bool empty() const { return storage_.size == 0; } + int size() const { return storage_.size; } + constexpr int capacity() const { return fixed_capacity; } + + // Resizes the BoundedInlineVector to the given size, which must not exceed + // its constant capacity. If the size is increased, the added elements are + // default constructed. + void resize(int new_size) { + RTC_DCHECK_GE(new_size, 0); + RTC_DCHECK_LE(new_size, fixed_capacity); + if (new_size > storage_.size) { + bounded_inline_vector_impl::DefaultInitializeElements( + storage_.data + storage_.size, new_size - storage_.size); + } else if (new_size < storage_.size) { + bounded_inline_vector_impl::DestroyElements(storage_.data + new_size, + storage_.size - new_size); + } + storage_.size = new_size; + } + + const T* data() const { return storage_.data; } + T* data() { return storage_.data; } + + const T& operator[](int index) const { + RTC_DCHECK_GE(index, 0); + RTC_DCHECK_LT(index, storage_.size); + return storage_.data[index]; + } + T& operator[](int index) { + RTC_DCHECK_GE(index, 0); + RTC_DCHECK_LT(index, storage_.size); + return storage_.data[index]; + } + + T* begin() { return storage_.data; } + T* end() { return storage_.data + storage_.size; } + const T* begin() const { return storage_.data; } + const T* end() const { return storage_.data + storage_.size; } + const T* cbegin() const { return storage_.data; } + const T* cend() const { return storage_.data + storage_.size; } + + private: + bounded_inline_vector_impl::Storage storage_; +}; + +} // namespace webrtc + +#endif // RTC_BASE_BOUNDED_INLINE_VECTOR_H_ diff --git a/rtc_base/bounded_inline_vector_impl.h b/rtc_base/bounded_inline_vector_impl.h new file mode 100644 index 0000000000..3539ace5bc --- /dev/null +++ b/rtc_base/bounded_inline_vector_impl.h @@ -0,0 +1,225 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_BOUNDED_INLINE_VECTOR_IMPL_H_ +#define RTC_BASE_BOUNDED_INLINE_VECTOR_IMPL_H_ + +#include + +#include +#include +#include +#include + +namespace webrtc { +namespace bounded_inline_vector_impl { + +template +struct BoolPack; + +// Tests if all its parameters (x0, x1, ..., xn) are true. The implementation +// checks whether (x0, x1, ..., xn, true) == (true, x0, x1, ..., xn), which is +// true iff true == x0 && x0 == x1 && x1 == x2 ... && xn-1 == xn && xn == true. +template +using AllTrue = std::is_same, BoolPack>; + +template +using AllConvertible = AllTrue::value...>; + +// Initializes part of an uninitialized array. Unlike normal array +// initialization, does not zero the remaining array elements. Caller is +// responsible for ensuring that there is enough space in `data`. +template +void InitializeElements(T* data) {} +template +void InitializeElements(T* data, U&& element, Us&&... elements) { + // Placement new, because we construct a new object in uninitialized memory. + ::new (data) T(std::forward(element)); + InitializeElements(data + 1, std::forward(elements)...); +} + +// Default initializes uninitialized array elements. +// TODO(kwiberg): Replace with std::uninitialized_default_construct_n() (C++17). +template +void DefaultInitializeElements(T* data, int size) { + for (int i = 0; i < size; ++i) { + // Placement new, because we construct a new object in uninitialized memory. + ::new (&data[i]) T; + } +} + +// Copies from source to uninitialized destination. Caller is responsible for +// ensuring that there is enough space in `dst_data`. +template +void CopyElements(const T* src_data, int src_size, T* dst_data, int* dst_size) { + if /*constexpr*/ (std::is_trivially_copy_constructible::value) { + std::memcpy(dst_data, src_data, src_size * sizeof(T)); + } else { + std::uninitialized_copy_n(src_data, src_size, dst_data); + } + *dst_size = src_size; +} + +// Moves from source to uninitialized destination. Caller is responsible for +// ensuring that there is enough space in `dst_data`. +template +void MoveElements(T* src_data, int src_size, T* dst_data, int* dst_size) { + if /*constexpr*/ (std::is_trivially_move_constructible::value) { + std::memcpy(dst_data, src_data, src_size * sizeof(T)); + } else { + // TODO(kwiberg): Use std::uninitialized_move_n() instead (C++17). + for (int i = 0; i < src_size; ++i) { + // Placement new, because we create a new object in uninitialized + // memory. + ::new (&dst_data[i]) T(std::move(src_data[i])); + } + } + *dst_size = src_size; +} + +// Destroys elements, leaving them uninitialized. +template +void DestroyElements(T* data, int size) { + if /*constexpr*/ (!std::is_trivially_destructible::value) { + for (int i = 0; i < size; ++i) { + data[i].~T(); + } + } +} + +// If elements are trivial and the total capacity is at most this many bytes, +// copy everything instead of just the elements that are in use; this is more +// efficient, and makes BoundedInlineVector trivially copyable. +static constexpr int kSmallSize = 64; + +// Storage implementations. +// +// There are diferent Storage structs for diferent kinds of element types. The +// common contract is the following: +// +// * They have public `size` variables and `data` array members. +// +// * Their owner is responsible for enforcing the invariant that the first +// `size` elements in `data` are initialized, and the remaining elements are +// not initialized. +// +// * They implement default construction, construction with one or more +// elements, copy/move construction, copy/move assignment, and destruction; +// the owner must ensure that the invariant holds whenever these operations +// occur. + +// Storage implementation for nontrivial element types. +template ::value, + bool is_small = (sizeof(T) * fixed_capacity <= kSmallSize)> +struct Storage { + static_assert(!std::is_trivial::value, ""); + + template < + typename... Ts, + typename std::enable_if_t::value>* = nullptr> + explicit Storage(Ts&&... elements) : size(sizeof...(Ts)) { + InitializeElements(data, std::forward(elements)...); + } + + Storage(const Storage& other) { + CopyElements(other.data, other.size, data, &size); + } + + Storage(Storage&& other) { + MoveElements(other.data, other.size, data, &size); + } + + Storage& operator=(const Storage& other) { + if (this != &other) { + DestroyElements(data, size); + CopyElements(other.data, other.size, data, &size); + } + return *this; + } + + Storage& operator=(Storage&& other) { + DestroyElements(data, size); + size = 0; // Needed in case of self assignment. + MoveElements(other.data, other.size, data, &size); + return *this; + } + + ~Storage() { DestroyElements(data, size); } + + int size; + union { + // Since this array is in a union, we get to construct and destroy it + // manually. + T data[fixed_capacity]; // NOLINT(runtime/arrays) + }; +}; + +// Storage implementation for trivial element types when the capacity is small +// enough that we can cheaply copy everything. +template +struct Storage { + static_assert(std::is_trivial::value, ""); + static_assert(sizeof(T) * fixed_capacity <= kSmallSize, ""); + + template < + typename... Ts, + typename std::enable_if_t::value>* = nullptr> + explicit Storage(Ts&&... elements) : size(sizeof...(Ts)) { + InitializeElements(data, std::forward(elements)...); + } + + Storage(const Storage&) = default; + Storage& operator=(const Storage&) = default; + ~Storage() = default; + + int size; + T data[fixed_capacity]; // NOLINT(runtime/arrays) +}; + +// Storage implementation for trivial element types when the capacity is large +// enough that we want to avoid copying uninitialized elements. +template +struct Storage { + static_assert(std::is_trivial::value, ""); + static_assert(sizeof(T) * fixed_capacity > kSmallSize, ""); + + template < + typename... Ts, + typename std::enable_if_t::value>* = nullptr> + explicit Storage(Ts&&... elements) : size(sizeof...(Ts)) { + InitializeElements(data, std::forward(elements)...); + } + + Storage(const Storage& other) : size(other.size) { + std::memcpy(data, other.data, other.size * sizeof(T)); + } + + Storage& operator=(const Storage& other) { + if (this != &other) { + size = other.size; + std::memcpy(data, other.data, other.size * sizeof(T)); + } + return *this; + } + + ~Storage() = default; + + int size; + union { + T data[fixed_capacity]; // NOLINT(runtime/arrays) + }; +}; + +} // namespace bounded_inline_vector_impl +} // namespace webrtc + +#endif // RTC_BASE_BOUNDED_INLINE_VECTOR_IMPL_H_ diff --git a/rtc_base/bounded_inline_vector_unittest.cc b/rtc_base/bounded_inline_vector_unittest.cc new file mode 100644 index 0000000000..50cf2e3153 --- /dev/null +++ b/rtc_base/bounded_inline_vector_unittest.cc @@ -0,0 +1,133 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/bounded_inline_vector.h" + +#include +#include +#include + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using SmallTrivial = BoundedInlineVector; +using LargeTrivial = BoundedInlineVector; +using NonTrivial = BoundedInlineVector; +static_assert(std::is_trivially_copyable::value, ""); +static_assert(!std::is_trivially_copyable::value, ""); +static_assert(std::is_trivially_destructible::value, ""); +static_assert(!std::is_trivially_copyable::value, ""); +static_assert(!std::is_trivially_destructible::value, ""); + +template +class BoundedInlineVectorTestAllTypes : public ::testing::Test {}; + +using AllTypes = + ::testing::Types, // Trivial nonprimitive type. + std::unique_ptr, // Move-only type. + std::string>; // Nontrivial copyable type. +TYPED_TEST_SUITE(BoundedInlineVectorTestAllTypes, AllTypes); + +template +class BoundedInlineVectorTestCopyableTypes : public ::testing::Test {}; + +using CopyableTypes = ::testing::Types, std::string>; +TYPED_TEST_SUITE(BoundedInlineVectorTestCopyableTypes, CopyableTypes); + +TYPED_TEST(BoundedInlineVectorTestAllTypes, ConstructEmpty) { + BoundedInlineVector x; + EXPECT_EQ(x.size(), 0); + EXPECT_EQ(x.begin(), x.end()); + static_assert(x.capacity() == 3, ""); +} + +TYPED_TEST(BoundedInlineVectorTestAllTypes, ConstructNonempty) { + BoundedInlineVector x = {TypeParam(), TypeParam()}; + EXPECT_EQ(x.size(), 2); + static_assert(x.capacity() == 3, ""); +} + +TYPED_TEST(BoundedInlineVectorTestCopyableTypes, CopyConstruct) { + BoundedInlineVector x = {TypeParam(), TypeParam()}; + BoundedInlineVector y = x; + EXPECT_EQ(y.size(), 2); + static_assert(x.capacity() == 3, ""); + static_assert(y.capacity() == 2, ""); +} + +TYPED_TEST(BoundedInlineVectorTestCopyableTypes, CopyAssign) { + BoundedInlineVector x = {TypeParam(), TypeParam()}; + BoundedInlineVector y; + EXPECT_EQ(y.size(), 0); + y = x; + EXPECT_EQ(y.size(), 2); +} + +TYPED_TEST(BoundedInlineVectorTestAllTypes, MoveConstruct) { + BoundedInlineVector x = {TypeParam(), TypeParam()}; + BoundedInlineVector y = std::move(x); + EXPECT_EQ(y.size(), 2); + static_assert(x.capacity() == 3, ""); + static_assert(y.capacity() == 2, ""); +} + +TYPED_TEST(BoundedInlineVectorTestAllTypes, MoveAssign) { + BoundedInlineVector x = {TypeParam(), TypeParam()}; + BoundedInlineVector y; + EXPECT_EQ(y.size(), 0); + y = std::move(x); + EXPECT_EQ(y.size(), 2); +} + +TEST(BoundedInlineVectorTestOneType, Iteration) { + BoundedInlineVector sv{"one", "two", "three", "four"}; + std::string cat; + for (const auto& s : sv) { + cat += s; + } + EXPECT_EQ(cat, "onetwothreefour"); +} + +TEST(BoundedInlineVectorTestOneType, Indexing) { + BoundedInlineVector x = {3.14}; + EXPECT_EQ(x[0], 3.14); +} + +template +BoundedInlineVector Returns(Ts... values) { + return {std::forward(values)...}; +} + +TYPED_TEST(BoundedInlineVectorTestAllTypes, Return) { + EXPECT_EQ((Returns().size()), 0); + EXPECT_EQ((Returns(TypeParam(), TypeParam()).size()), 2); +} + +TYPED_TEST(BoundedInlineVectorTestAllTypes, Resize) { + BoundedInlineVector x; + EXPECT_EQ(x.size(), 0); + x.resize(17); + EXPECT_EQ(x.size(), 17); + // Test one arbitrary element, mostly to give MSan a chance to scream. But if + // the type has a trivial default constructor we can't, because the element + // won't be initialized. + if (!std::is_trivially_default_constructible::value) { + EXPECT_EQ(x[4], TypeParam()); + } + x.resize(2); + EXPECT_EQ(x.size(), 2); +} + +} // namespace +} // namespace webrtc diff --git a/rtc_base/buffer.h b/rtc_base/buffer.h index 34ef8859a8..d1639e2f71 100644 --- a/rtc_base/buffer.h +++ b/rtc_base/buffer.h @@ -64,6 +64,7 @@ class BufferT { public: using value_type = T; + using const_iterator = const T*; // An empty BufferT. BufferT() : size_(0), capacity_(0), data_(nullptr) { @@ -369,7 +370,9 @@ class BufferT { : capacity; std::unique_ptr new_data(new T[new_capacity]); - std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T)); + if (data_ != nullptr) { + std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T)); + } MaybeZeroCompleteBuffer(); data_ = std::move(new_data); capacity_ = new_capacity; diff --git a/rtc_base/buffer_queue.cc b/rtc_base/buffer_queue.cc index 445045ceea..7879e933c7 100644 --- a/rtc_base/buffer_queue.cc +++ b/rtc_base/buffer_queue.cc @@ -21,23 +21,20 @@ BufferQueue::BufferQueue(size_t capacity, size_t default_size) : capacity_(capacity), default_size_(default_size) {} BufferQueue::~BufferQueue() { - CritScope cs(&crit_); - - for (Buffer* buffer : queue_) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + for (Buffer* buffer : queue_) delete buffer; - } - for (Buffer* buffer : free_list_) { + for (Buffer* buffer : free_list_) delete buffer; - } } size_t BufferQueue::size() const { - CritScope cs(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return queue_.size(); } void BufferQueue::Clear() { - CritScope cs(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); while (!queue_.empty()) { free_list_.push_back(queue_.front()); queue_.pop_front(); @@ -45,36 +42,30 @@ void BufferQueue::Clear() { } bool BufferQueue::ReadFront(void* buffer, size_t bytes, size_t* bytes_read) { - CritScope cs(&crit_); - if (queue_.empty()) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (queue_.empty()) return false; - } - bool was_writable = queue_.size() < capacity_; Buffer* packet = queue_.front(); queue_.pop_front(); bytes = std::min(bytes, packet->size()); memcpy(buffer, packet->data(), bytes); - if (bytes_read) { + + if (bytes_read) *bytes_read = bytes; - } + free_list_.push_back(packet); - if (!was_writable) { - NotifyWritableForTest(); - } return true; } bool BufferQueue::WriteBack(const void* buffer, size_t bytes, size_t* bytes_written) { - CritScope cs(&crit_); - if (queue_.size() == capacity_) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (queue_.size() == capacity_) return false; - } - bool was_readable = !queue_.empty(); Buffer* packet; if (!free_list_.empty()) { packet = free_list_.back(); @@ -84,13 +75,10 @@ bool BufferQueue::WriteBack(const void* buffer, } packet->SetData(static_cast(buffer), bytes); - if (bytes_written) { + if (bytes_written) *bytes_written = bytes; - } + queue_.push_back(packet); - if (!was_readable) { - NotifyReadableForTest(); - } return true; } diff --git a/rtc_base/buffer_queue.h b/rtc_base/buffer_queue.h index 5cb18d0220..24a9b04dc2 100644 --- a/rtc_base/buffer_queue.h +++ b/rtc_base/buffer_queue.h @@ -18,16 +18,16 @@ #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" namespace rtc { -class BufferQueue { +class BufferQueue final { public: // Creates a buffer queue with a given capacity and default buffer size. BufferQueue(size_t capacity, size_t default_size); - virtual ~BufferQueue(); + ~BufferQueue(); // Return number of queued buffers. size_t size() const; @@ -44,17 +44,22 @@ class BufferQueue { // Returns true unless no data could be written. bool WriteBack(const void* data, size_t bytes, size_t* bytes_written); - protected: - // These methods are called when the state of the queue changes. - virtual void NotifyReadableForTest() {} - virtual void NotifyWritableForTest() {} + bool is_writable() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return queue_.size() < capacity_; + } + + bool is_readable() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return !queue_.empty(); + } private: - size_t capacity_; - size_t default_size_; - CriticalSection crit_; - std::deque queue_ RTC_GUARDED_BY(crit_); - std::vector free_list_ RTC_GUARDED_BY(crit_); + webrtc::SequenceChecker sequence_checker_; + const size_t capacity_; + const size_t default_size_; + std::deque queue_ RTC_GUARDED_BY(sequence_checker_); + std::vector free_list_ RTC_GUARDED_BY(sequence_checker_); RTC_DISALLOW_COPY_AND_ASSIGN(BufferQueue); }; diff --git a/rtc_base/buffer_unittest.cc b/rtc_base/buffer_unittest.cc index 40d9edc565..8beae43cf9 100644 --- a/rtc_base/buffer_unittest.cc +++ b/rtc_base/buffer_unittest.cc @@ -14,12 +14,16 @@ #include #include "api/array_view.h" +#include "test/gmock.h" #include "test/gtest.h" namespace rtc { namespace { +using ::testing::ElementsAre; +using ::testing::ElementsAreArray; + // clang-format off const uint8_t kTestData[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf}; @@ -104,10 +108,12 @@ TEST(BufferTest, TestSetAndAppendWithUnknownArg) { buf.SetData(TestDataContainer()); EXPECT_EQ(3u, buf.size()); EXPECT_EQ(Buffer(kTestData, 3), buf); + EXPECT_THAT(buf, ElementsAre(0, 1, 2)); buf.AppendData(TestDataContainer()); EXPECT_EQ(6u, buf.size()); EXPECT_EQ(0, memcmp(buf.data(), kTestData, 3)); EXPECT_EQ(0, memcmp(buf.data() + 3, kTestData, 3)); + EXPECT_THAT(buf, ElementsAre(0, 1, 2, 0, 1, 2)); } TEST(BufferTest, TestSetSizeSmaller) { @@ -362,9 +368,7 @@ TEST(BufferTest, TestBracketWrite) { buf[i] = kTestData[i]; } - for (size_t i = 0; i != 7u; ++i) { - EXPECT_EQ(buf[i], kTestData[i]); - } + EXPECT_THAT(buf, ElementsAreArray(kTestData, 7)); } TEST(BufferTest, TestBeginEnd) { @@ -392,9 +396,7 @@ TEST(BufferTest, TestInt16) { EXPECT_EQ(buf.capacity(), 5u); EXPECT_NE(buf.data(), nullptr); EXPECT_FALSE(buf.empty()); - for (size_t i = 0; i != buf.size(); ++i) { - EXPECT_EQ(test_data[i], buf[i]); - } + EXPECT_THAT(buf, ElementsAreArray(test_data)); BufferT buf2(test_data); EXPECT_EQ(buf, buf2); buf2[0] = 9; @@ -445,7 +447,7 @@ TEST(BufferTest, TestStruct) { EXPECT_EQ(kObsidian, buf[2].stone); } -TEST(BufferTest, DieOnUseAfterMove) { +TEST(BufferDeathTest, DieOnUseAfterMove) { Buffer buf(17); Buffer buf2 = std::move(buf); EXPECT_EQ(buf2.size(), 17u); diff --git a/rtc_base/callback_list.cc b/rtc_base/callback_list.cc new file mode 100644 index 0000000000..ac947e2258 --- /dev/null +++ b/rtc_base/callback_list.cc @@ -0,0 +1,48 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/callback_list.h" + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace callback_list_impl { + +CallbackListReceivers::CallbackListReceivers() = default; + +CallbackListReceivers::~CallbackListReceivers() { + RTC_CHECK(!send_in_progress_); +} + +void CallbackListReceivers::Foreach( + rtc::FunctionView fv) { + RTC_CHECK(!send_in_progress_); + send_in_progress_ = true; + for (auto& r : receivers_) { + fv(r); + } + send_in_progress_ = false; +} + +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<1>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<2>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<3>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<4>); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::NontrivialUntypedFunctionArgs); +template void CallbackListReceivers::AddReceiver( + UntypedFunction::FunctionPointerUntypedFunctionArgs); + +} // namespace callback_list_impl +} // namespace webrtc diff --git a/rtc_base/callback_list.h b/rtc_base/callback_list.h new file mode 100644 index 0000000000..659b838d02 --- /dev/null +++ b/rtc_base/callback_list.h @@ -0,0 +1,167 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_CALLBACK_LIST_H_ +#define RTC_BASE_CALLBACK_LIST_H_ + +#include +#include + +#include "api/function_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/assume.h" +#include "rtc_base/system/inline.h" +#include "rtc_base/untyped_function.h" + +namespace webrtc { +namespace callback_list_impl { + +class CallbackListReceivers { + public: + CallbackListReceivers(); + CallbackListReceivers(const CallbackListReceivers&) = delete; + CallbackListReceivers& operator=(const CallbackListReceivers&) = delete; + CallbackListReceivers(CallbackListReceivers&&) = delete; + CallbackListReceivers& operator=(CallbackListReceivers&&) = delete; + ~CallbackListReceivers(); + + template + RTC_NO_INLINE void AddReceiver(UntypedFunctionArgsT args) { + RTC_CHECK(!send_in_progress_); + receivers_.push_back(UntypedFunction::Create(args)); + } + + void Foreach(rtc::FunctionView fv); + + private: + std::vector receivers_; + bool send_in_progress_ = false; +}; + +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<1>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<2>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<3>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::TrivialUntypedFunctionArgs<4>); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::NontrivialUntypedFunctionArgs); +extern template void CallbackListReceivers::AddReceiver( + UntypedFunction::FunctionPointerUntypedFunctionArgs); + +} // namespace callback_list_impl + +// A collection of receivers (callable objects) that can be called all at once. +// Optimized for minimal binary size. The template arguments dictate what +// signature the callbacks must have; for example, a CallbackList +// will require callbacks with signature void(int, float). +// +// CallbackList is neither copyable nor movable (could easily be made movable if +// necessary). Callbacks must be movable, but need not be copyable. +// +// Usage example: +// +// // Declaration (usually a member variable). +// CallbackList foo_; +// +// // Register callbacks. This can be done zero or more times. The +// // callbacks must accept the arguments types listed in the CallbackList's +// // template argument list, and must return void. +// foo_.AddReceiver([...](int a, float b) {...}); // Lambda. +// foo_.AddReceiver(SomeFunction); // Function pointer. +// +// // Call the zero or more receivers, one after the other. +// foo_.Send(17, 3.14); +// +// Callback lifetime considerations +// -------------------------------- +// +// CallbackList::AddReceiver() takes ownership of the given callback by moving +// it in place. The callback can be any callable object; in particular, it may +// have a nontrivial destructor, which will be run when the CallbackList is +// destroyed. The callback may thus access data via any type of smart pointer, +// expressing e.g. unique, shared, or weak ownership. Of course, if the data is +// guaranteed to outlive the callback, a plain raw pointer can be used. +// +// Take care when trying to have the callback own reference-counted data. The +// CallbackList will keep the callback alive, and the callback will keep its +// data alive, so as usual with reference-counted ownership, keep an eye out for +// cycles! +// +// Thread safety +// ------------- +// +// Like most C++ types, CallbackList is thread compatible: it's not safe to +// access it concurrently from multiple threads, but it can be made safe if it +// is protected by a mutex, for example. +// +// Excercise some care when deciding what mutexes to hold when you call +// CallbackList::Send(). In particular, do not hold mutexes that callbacks may +// need to grab. If a larger object has a CallbackList member and a single mutex +// that protects all of its data members, this may e.g. make it necessary to +// protect its CallbackList with a separate mutex; otherwise, there will be a +// deadlock if the callbacks try to access the object. +// +// CallbackList as a class data member +// ----------------------------------- +// +// CallbackList is a normal C++ data type, and should be private when it is a +// data member of a class. For thread safety reasons (see above), it is likely +// best to not have an accessor for the entire CallbackList, and instead only +// allow callers to add callbacks: +// +// template +// void AddFooCallback(F&& callback) { +// // Maybe grab a mutex here? +// foo_callbacks_.AddReceiver(std::forward(callback)); +// } +// +// Removing callbacks +// ------------------ +// +// TODO(kwiberg): The current design doesn’t support removing callbacks, only +// adding them, but removal support can easily be added. +template +class CallbackList { + public: + CallbackList() = default; + CallbackList(const CallbackList&) = delete; + CallbackList& operator=(const CallbackList&) = delete; + CallbackList(CallbackList&&) = delete; + CallbackList& operator=(CallbackList&&) = delete; + + // Adds a new receiver. The receiver (a callable object or a function pointer) + // must be movable, but need not be copyable. Its call signature should be + // `void(ArgT...)`. + template + void AddReceiver(F&& f) { + receivers_.AddReceiver( + UntypedFunction::PrepareArgs(std::forward(f))); + } + + // Calls all receivers with the given arguments. While the Send is in + // progress, no method calls are allowed; specifically, this means that the + // callbacks may not do anything with this CallbackList instance. + template + void Send(ArgU&&... args) { + receivers_.Foreach([&](UntypedFunction& f) { + f.Call(std::forward(args)...); + }); + } + + private: + callback_list_impl::CallbackListReceivers receivers_; +}; + +} // namespace webrtc + +#endif // RTC_BASE_CALLBACK_LIST_H_ diff --git a/rtc_base/callback_list_unittest.cc b/rtc_base/callback_list_unittest.cc new file mode 100644 index 0000000000..811f85ead3 --- /dev/null +++ b/rtc_base/callback_list_unittest.cc @@ -0,0 +1,214 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include + +#include "api/function_view.h" +#include "rtc_base/bind.h" +#include "rtc_base/callback_list.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +TEST(CallbackList, NoRecieverSingleMessageTest) { + CallbackList c; + + c.Send("message"); +} + +TEST(CallbackList, MultipleParameterMessageTest) { + CallbackList + c; + std::string str = "messege"; + int i = 10; + + c.Send(str, "message1", "message0", 123, &i, str); +} + +TEST(CallbackList, NoParameterMessageTest) { + CallbackList<> c; + + c.Send(); +} + +TEST(CallbackList, ReferenceTest) { + CallbackList c; + int index = 1; + + c.AddReceiver([](int& index) { index++; }); + c.Send(index); + + EXPECT_EQ(index, 2); +} + +enum State { + kNew, + kChecking, +}; + +TEST(CallbackList, SingleEnumValueTest) { + CallbackList c; + State s1 = kNew; + int index = 0; + + c.AddReceiver([&index](State s) { index++; }); + c.Send(s1); + + EXPECT_EQ(index, 1); +} + +TEST(CallbackList, SingleEnumReferenceTest) { + CallbackList c; + State s = kNew; + + c.AddReceiver([](State& s) { s = kChecking; }); + c.Send(s); + + EXPECT_EQ(s, kChecking); +} + +TEST(CallbackList, ConstReferenceTest) { + CallbackList c; + int i = 0; + int index = 1; + + c.AddReceiver([&i](const int& index) { i = index; }); + c.Send(index); + + EXPECT_EQ(i, 1); +} + +TEST(CallbackList, PointerTest) { + CallbackList c; + int index = 1; + + c.AddReceiver([](int* index) { (*index)++; }); + c.Send(&index); + + EXPECT_EQ(index, 2); +} + +TEST(CallbackList, CallByValue) { + CallbackList c; + int x = 17; + + c.AddReceiver([&x](int n) { x += n; }); + int y = 89; + c.Send(y); + + EXPECT_EQ(x, 106); +} + +void PlusOne(int& a) { + a++; +} + +TEST(CallbackList, FunctionPtrTest) { + CallbackList c; + int index = 1; + + c.AddReceiver(PlusOne); + c.Send(index); + + EXPECT_EQ(index, 2); +} + +struct LargeNonTrivial { + int a[17]; + + LargeNonTrivial() = default; + LargeNonTrivial(LargeNonTrivial&& m) {} + ~LargeNonTrivial() = default; + + void operator()(int& a) { a = 1; } +}; + +TEST(CallbackList, LargeNonTrivialTest) { + CallbackList c; + int i = 0; + static_assert(sizeof(LargeNonTrivial) > UntypedFunction::kInlineStorageSize, + ""); + c.AddReceiver(LargeNonTrivial()); + c.Send(i); + + EXPECT_EQ(i, 1); +} + +struct LargeTrivial { + int a[17]; + void operator()(int& x) { x = 1; } +}; + +TEST(CallbackList, LargeTrivial) { + CallbackList c; + LargeTrivial lt; + int i = 0; + + static_assert(sizeof(lt) > UntypedFunction::kInlineStorageSize, ""); + c.AddReceiver(lt); + c.Send(i); + + EXPECT_EQ(i, 1); +} + +struct OnlyNonTriviallyConstructible { + OnlyNonTriviallyConstructible() = default; + OnlyNonTriviallyConstructible(OnlyNonTriviallyConstructible&& m) {} + + void operator()(int& a) { a = 1; } +}; + +TEST(CallbackList, OnlyNonTriviallyMoveConstructible) { + CallbackList c; + int i = 0; + + c.AddReceiver(OnlyNonTriviallyConstructible()); + c.Send(i); + + EXPECT_EQ(i, 1); +} + +TEST(CallbackList, MultipleReceiverSendTest) { + CallbackList c; + std::function plus = PlusOne; + int index = 1; + + c.AddReceiver(plus); + c.AddReceiver([](int& i) { i--; }); + c.AddReceiver(plus); + c.AddReceiver(plus); + c.Send(index); + c.Send(index); + + EXPECT_EQ(index, 5); +} + +class A { + public: + void increment(int& i) const { i++; } +}; + +TEST(CallbackList, MemberFunctionTest) { + CallbackList c; + A a; + int index = 1; + + c.AddReceiver([&a](int& i) { a.increment(i); }); + c.Send(index); + + EXPECT_EQ(index, 2); +} +// todo(glahiru): Add a test case to catch some error for Karl's first fix +// todo(glahiru): Add a test for rtc::Bind +// which used the following code in the Send +} // namespace +} // namespace webrtc diff --git a/rtc_base/checks.cc b/rtc_base/checks.cc index e5fc2ed416..239ea9f0da 100644 --- a/rtc_base/checks.cc +++ b/rtc_base/checks.cc @@ -36,6 +36,21 @@ #include "rtc_base/checks.h" namespace { + +RTC_NORETURN void WriteFatalLogAndAbort(const std::string& output) { + const char* output_c = output.c_str(); +#if defined(WEBRTC_ANDROID) + __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output_c); +#endif + fflush(stdout); + fprintf(stderr, "%s", output_c); + fflush(stderr); +#if defined(WEBRTC_WIN) + DebugBreak(); +#endif + abort(); +} + #if defined(__GNUC__) __attribute__((__format__(__printf__, 2, 3))) #endif @@ -149,19 +164,7 @@ RTC_NORETURN void FatalLog(const char* file, va_end(args); - const char* output = s.c_str(); - -#if defined(WEBRTC_ANDROID) - __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output); -#endif - - fflush(stdout); - fprintf(stderr, "%s", output); - fflush(stderr); -#if defined(WEBRTC_WIN) - DebugBreak(); -#endif - abort(); + WriteFatalLogAndAbort(s); } #else // RTC_CHECK_MSG_ENABLED RTC_NORETURN void FatalLog(const char* file, int line) { @@ -174,21 +177,39 @@ RTC_NORETURN void FatalLog(const char* file, int line) { "# Check failed.\n" "# ", file, line, LAST_SYSTEM_ERROR); - const char* output = s.c_str(); + WriteFatalLogAndAbort(s); +} +#endif // RTC_CHECK_MSG_ENABLED -#if defined(WEBRTC_ANDROID) - __android_log_print(ANDROID_LOG_ERROR, RTC_LOG_TAG_ANDROID, "%s\n", output); -#endif +#if RTC_DCHECK_IS_ON - fflush(stdout); - fprintf(stderr, "%s", output); - fflush(stderr); -#if defined(WEBRTC_WIN) - DebugBreak(); -#endif - abort(); +RTC_NORETURN void UnreachableCodeReached(const char* file, int line) { + std::string s; + AppendFormat(&s, + "\n\n" + "#\n" + "# Unreachable code reached: %s, line %d\n" + "# last system error: %u\n" + "# ", + file, line, LAST_SYSTEM_ERROR); + WriteFatalLogAndAbort(s); } -#endif // RTC_CHECK_MSG_ENABLED + +#else // !RTC_DCHECK_IS_ON + +RTC_NORETURN void UnreachableCodeReached() { + std::string s; + AppendFormat(&s, + "\n\n" + "#\n" + "# Unreachable code reached (file and line unknown)\n" + "# last system error: %u\n" + "# ", + LAST_SYSTEM_ERROR); + WriteFatalLogAndAbort(s); +} + +#endif // !RTC_DCHECK_IS_ON } // namespace webrtc_checks_impl } // namespace rtc diff --git a/rtc_base/checks.h b/rtc_base/checks.h index 0b461c8984..508de2a577 100644 --- a/rtc_base/checks.h +++ b/rtc_base/checks.h @@ -69,7 +69,7 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg); // the reason that it's better to terminate might simply be that the error // handling code isn't in place yet; in production, the reason might be that // the author of the code truly believes that x will always be true, but that -// she recognizes that if she is wrong, abrupt and unpleasant process +// they recognizes that if they are wrong, abrupt and unpleasant process // termination is still better than carrying on with the assumption violated. // // RTC_CHECK always evaluates its argument, so it's OK for x to have side @@ -338,6 +338,22 @@ class FatalLogCall final { const char* message_; }; +#if RTC_DCHECK_IS_ON + +// Be helpful, and include file and line in the RTC_CHECK_NOTREACHED error +// message. +#define RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS __FILE__, __LINE__ +RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(const char* file, int line); + +#else + +// Be mindful of binary size, and don't include file and line in the +// RTC_CHECK_NOTREACHED error message. +#define RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS +RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(); + +#endif + } // namespace webrtc_checks_impl // The actual stream used isn't important. We reference |ignored| in the code @@ -345,17 +361,17 @@ class FatalLogCall final { // in a particularly convoluted way with an extra ?: because that appears to be // the simplest construct that keeps Visual Studio from complaining about // condition being unused). -#define RTC_EAT_STREAM_PARAMETERS(ignored) \ - (true ? true : ((void)(ignored), true)) \ - ? static_cast(0) \ - : rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ - rtc::webrtc_checks_impl::LogStreamer<>() +#define RTC_EAT_STREAM_PARAMETERS(ignored) \ + (true ? true : ((void)(ignored), true)) \ + ? static_cast(0) \ + : ::rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() // Call RTC_EAT_STREAM_PARAMETERS with an argument that fails to compile if // values of the same types as |a| and |b| can't be compared with the given // operation, and that would evaluate |a| and |b| if evaluated. #define RTC_EAT_STREAM_PARAMETERS_OP(op, a, b) \ - RTC_EAT_STREAM_PARAMETERS(((void)rtc::Safe##op(a, b))) + RTC_EAT_STREAM_PARAMETERS(((void)::rtc::Safe##op(a, b))) // RTC_CHECK dies with a fatal error if condition is not true. It is *not* // controlled by NDEBUG or anything else, so the check will be executed @@ -367,36 +383,36 @@ class FatalLogCall final { // RTC_CHECK_OP is a helper macro for binary operators. // Don't use this macro directly in your code, use RTC_CHECK_EQ et al below. #if RTC_CHECK_MSG_ENABLED -#define RTC_CHECK(condition) \ - (condition) ? static_cast(0) \ - : rtc::webrtc_checks_impl::FatalLogCall( \ - __FILE__, __LINE__, #condition) & \ - rtc::webrtc_checks_impl::LogStreamer<>() - -#define RTC_CHECK_OP(name, op, val1, val2) \ - rtc::Safe##name((val1), (val2)) \ - ? static_cast(0) \ - : rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ - #val1 " " #op " " #val2) & \ - rtc::webrtc_checks_impl::LogStreamer<>() << (val1) << (val2) +#define RTC_CHECK(condition) \ + (condition) ? static_cast(0) \ + : ::rtc::webrtc_checks_impl::FatalLogCall( \ + __FILE__, __LINE__, #condition) & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() + +#define RTC_CHECK_OP(name, op, val1, val2) \ + ::rtc::Safe##name((val1), (val2)) \ + ? static_cast(0) \ + : ::rtc::webrtc_checks_impl::FatalLogCall( \ + __FILE__, __LINE__, #val1 " " #op " " #val2) & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() << (val1) << (val2) #else -#define RTC_CHECK(condition) \ - (condition) \ - ? static_cast(0) \ - : true ? rtc::webrtc_checks_impl::FatalLogCall(__FILE__, \ - __LINE__, "") & \ - rtc::webrtc_checks_impl::LogStreamer<>() \ - : rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ - rtc::webrtc_checks_impl::LogStreamer<>() - -#define RTC_CHECK_OP(name, op, val1, val2) \ - rtc::Safe##name((val1), (val2)) \ - ? static_cast(0) \ - : true ? rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ - "") & \ - rtc::webrtc_checks_impl::LogStreamer<>() \ - : rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ - rtc::webrtc_checks_impl::LogStreamer<>() +#define RTC_CHECK(condition) \ + (condition) \ + ? static_cast(0) \ + : true ? ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, \ + __LINE__, "") & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() \ + : ::rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() + +#define RTC_CHECK_OP(name, op, val1, val2) \ + ::rtc::Safe##name((val1), (val2)) \ + ? static_cast(0) \ + : true ? ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, \ + __LINE__, "") & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() \ + : ::rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() #endif #define RTC_CHECK_EQ(val1, val2) RTC_CHECK_OP(Eq, ==, val1, val2) @@ -430,11 +446,19 @@ class FatalLogCall final { #define RTC_UNREACHABLE_CODE_HIT false #define RTC_NOTREACHED() RTC_DCHECK(RTC_UNREACHABLE_CODE_HIT) +// Kills the process with an error message. Never returns. Use when you wish to +// assert that a point in the code is never reached. +#define RTC_CHECK_NOTREACHED() \ + do { \ + ::rtc::webrtc_checks_impl::UnreachableCodeReached( \ + RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS); \ + } while (0) + // TODO(bugs.webrtc.org/8454): Add an RTC_ prefix or rename differently. -#define FATAL() \ - rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ - "FATAL()") & \ - rtc::webrtc_checks_impl::LogStreamer<>() +#define FATAL() \ + ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ + "FATAL()") & \ + ::rtc::webrtc_checks_impl::LogStreamer<>() // Performs the integer division a/b and returns the result. CHECKs that the // remainder is zero. diff --git a/rtc_base/checks_unittest.cc b/rtc_base/checks_unittest.cc index e6e094e597..91e04cf6a1 100644 --- a/rtc_base/checks_unittest.cc +++ b/rtc_base/checks_unittest.cc @@ -19,7 +19,7 @@ TEST(ChecksTest, ExpressionNotEvaluatedWhenCheckPassing) { } #if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(ChecksTest, Checks) { +TEST(ChecksDeathTest, Checks) { #if RTC_CHECK_MSG_ENABLED EXPECT_DEATH(FATAL() << "message", "\n\n#\n" diff --git a/rtc_base/constructor_magic.h b/rtc_base/constructor_magic.h index e63c2ff628..8d12a7b135 100644 --- a/rtc_base/constructor_magic.h +++ b/rtc_base/constructor_magic.h @@ -11,24 +11,10 @@ #ifndef RTC_BASE_CONSTRUCTOR_MAGIC_H_ #define RTC_BASE_CONSTRUCTOR_MAGIC_H_ -// Put this in the declarations for a class to be unassignable. -#define RTC_DISALLOW_ASSIGN(TypeName) \ - TypeName& operator=(const TypeName&) = delete - // A macro to disallow the copy constructor and operator= functions. This should // be used in the declarations for a class. #define RTC_DISALLOW_COPY_AND_ASSIGN(TypeName) \ TypeName(const TypeName&) = delete; \ - RTC_DISALLOW_ASSIGN(TypeName) - -// A macro to disallow all the implicit constructors, namely the default -// constructor, copy constructor and operator= functions. -// -// This should be used in the declarations for a class that wants to prevent -// anyone from instantiating it. This is especially useful for classes -// containing only static methods. -#define RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \ - TypeName() = delete; \ - RTC_DISALLOW_COPY_AND_ASSIGN(TypeName) + TypeName& operator=(const TypeName&) = delete #endif // RTC_BASE_CONSTRUCTOR_MAGIC_H_ diff --git a/rtc_base/critical_section.cc b/rtc_base/deprecated/recursive_critical_section.cc similarity index 80% rename from rtc_base/critical_section.cc rename to rtc_base/deprecated/recursive_critical_section.cc index 1969edefa5..068b9aa808 100644 --- a/rtc_base/critical_section.cc +++ b/rtc_base/deprecated/recursive_critical_section.cc @@ -8,17 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/yield.h" #include "rtc_base/system/unused.h" -// TODO(tommi): Split this file up to per-platform implementation files. - #if RTC_DCHECK_IS_ON #define RTC_CS_DEBUG_CODE(x) x #else // !RTC_DCHECK_IS_ON @@ -27,7 +26,7 @@ namespace rtc { -CriticalSection::CriticalSection() { +RecursiveCriticalSection::RecursiveCriticalSection() { #if defined(WEBRTC_WIN) InitializeCriticalSection(&crit_); #elif defined(WEBRTC_POSIX) @@ -42,7 +41,7 @@ CriticalSection::CriticalSection() { pthread_mutexattr_settype(&mutex_attribute, PTHREAD_MUTEX_RECURSIVE); #if defined(WEBRTC_MAC) pthread_mutexattr_setpolicy_np(&mutex_attribute, - _PTHREAD_MUTEX_POLICY_FAIRSHARE); + _PTHREAD_MUTEX_POLICY_FIRSTFIT); #endif pthread_mutex_init(&mutex_, &mutex_attribute); pthread_mutexattr_destroy(&mutex_attribute); @@ -56,7 +55,7 @@ CriticalSection::CriticalSection() { #endif } -CriticalSection::~CriticalSection() { +RecursiveCriticalSection::~RecursiveCriticalSection() { #if defined(WEBRTC_WIN) DeleteCriticalSection(&crit_); #elif defined(WEBRTC_POSIX) @@ -70,7 +69,7 @@ CriticalSection::~CriticalSection() { #endif } -void CriticalSection::Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION() { +void RecursiveCriticalSection::Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION() { #if defined(WEBRTC_WIN) EnterCriticalSection(&crit_); #elif defined(WEBRTC_POSIX) @@ -129,7 +128,8 @@ void CriticalSection::Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION() { #endif } -bool CriticalSection::TryEnter() const RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { +bool RecursiveCriticalSection::TryEnter() const + RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { #if defined(WEBRTC_WIN) return TryEnterCriticalSection(&crit_) != FALSE; #elif defined(WEBRTC_POSIX) @@ -162,7 +162,7 @@ bool CriticalSection::TryEnter() const RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { #endif } -void CriticalSection::Leave() const RTC_UNLOCK_FUNCTION() { +void RecursiveCriticalSection::Leave() const RTC_UNLOCK_FUNCTION() { RTC_DCHECK(CurrentThreadIsOwner()); #if defined(WEBRTC_WIN) LeaveCriticalSection(&crit_); @@ -190,7 +190,7 @@ void CriticalSection::Leave() const RTC_UNLOCK_FUNCTION() { #endif } -bool CriticalSection::CurrentThreadIsOwner() const { +bool RecursiveCriticalSection::CurrentThreadIsOwner() const { #if defined(WEBRTC_WIN) // OwningThread has type HANDLE but actually contains the Thread ID: // http://stackoverflow.com/questions/12675301/why-is-the-owningthread-member-of-critical-section-of-type-handle-when-it-is-de @@ -209,41 +209,11 @@ bool CriticalSection::CurrentThreadIsOwner() const { #endif } -CritScope::CritScope(const CriticalSection* cs) : cs_(cs) { +CritScope::CritScope(const RecursiveCriticalSection* cs) : cs_(cs) { cs_->Enter(); } CritScope::~CritScope() { cs_->Leave(); } -void GlobalLock::Lock() { -#if !defined(WEBRTC_WIN) && \ - (!defined(WEBRTC_MAC) || RTC_USE_NATIVE_MUTEX_ON_MAC) - const struct timespec ts_null = {0}; -#endif - - while (AtomicOps::CompareAndSwap(&lock_acquired_, 0, 1)) { -#if defined(WEBRTC_WIN) - ::Sleep(0); -#elif defined(WEBRTC_MAC) && !RTC_USE_NATIVE_MUTEX_ON_MAC - sched_yield(); -#else - nanosleep(&ts_null, nullptr); -#endif - } -} - -void GlobalLock::Unlock() { - int old_value = AtomicOps::CompareAndSwap(&lock_acquired_, 1, 0); - RTC_DCHECK_EQ(1, old_value) << "Unlock called without calling Lock first"; -} - -GlobalLockScope::GlobalLockScope(GlobalLock* lock) : lock_(lock) { - lock_->Lock(); -} - -GlobalLockScope::~GlobalLockScope() { - lock_->Unlock(); -} - } // namespace rtc diff --git a/rtc_base/critical_section.h b/rtc_base/deprecated/recursive_critical_section.h similarity index 71% rename from rtc_base/critical_section.h rename to rtc_base/deprecated/recursive_critical_section.h index cf10463bdf..c044c732b9 100644 --- a/rtc_base/critical_section.h +++ b/rtc_base/deprecated/recursive_critical_section.h @@ -8,13 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_CRITICAL_SECTION_H_ -#define RTC_BASE_CRITICAL_SECTION_H_ +#ifndef RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ +#define RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ -#include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/platform_thread_types.h" -#include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" #if defined(WEBRTC_WIN) @@ -43,13 +41,18 @@ namespace rtc { +// NOTE: This class is deprecated. Please use webrtc::Mutex instead! +// Search using https://www.google.com/?q=recursive+lock+considered+harmful +// to find the reasons. +// // Locking methods (Enter, TryEnter, Leave)are const to permit protecting -// members inside a const context without requiring mutable CriticalSections -// everywhere. CriticalSection is reentrant lock. -class RTC_LOCKABLE RTC_EXPORT CriticalSection { +// members inside a const context without requiring mutable +// RecursiveCriticalSections everywhere. RecursiveCriticalSection is +// reentrant lock. +class RTC_LOCKABLE RecursiveCriticalSection { public: - CriticalSection(); - ~CriticalSection(); + RecursiveCriticalSection(); + ~RecursiveCriticalSection(); void Enter() const RTC_EXCLUSIVE_LOCK_FUNCTION(); bool TryEnter() const RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true); @@ -87,37 +90,15 @@ class RTC_LOCKABLE RTC_EXPORT CriticalSection { // CritScope, for serializing execution through a scope. class RTC_SCOPED_LOCKABLE CritScope { public: - explicit CritScope(const CriticalSection* cs) RTC_EXCLUSIVE_LOCK_FUNCTION(cs); + explicit CritScope(const RecursiveCriticalSection* cs) + RTC_EXCLUSIVE_LOCK_FUNCTION(cs); ~CritScope() RTC_UNLOCK_FUNCTION(); private: - const CriticalSection* const cs_; + const RecursiveCriticalSection* const cs_; RTC_DISALLOW_COPY_AND_ASSIGN(CritScope); }; -// A lock used to protect global variables. Do NOT use for other purposes. -class RTC_LOCKABLE GlobalLock { - public: - constexpr GlobalLock() : lock_acquired_(0) {} - - void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(); - void Unlock() RTC_UNLOCK_FUNCTION(); - - private: - volatile int lock_acquired_; -}; - -// GlobalLockScope, for serializing execution through a scope. -class RTC_SCOPED_LOCKABLE GlobalLockScope { - public: - explicit GlobalLockScope(GlobalLock* lock) RTC_EXCLUSIVE_LOCK_FUNCTION(lock); - ~GlobalLockScope() RTC_UNLOCK_FUNCTION(); - - private: - GlobalLock* const lock_; - RTC_DISALLOW_COPY_AND_ASSIGN(GlobalLockScope); -}; - } // namespace rtc -#endif // RTC_BASE_CRITICAL_SECTION_H_ +#endif // RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ diff --git a/rtc_base/critical_section_unittest.cc b/rtc_base/deprecated/recursive_critical_section_unittest.cc similarity index 91% rename from rtc_base/critical_section_unittest.cc rename to rtc_base/deprecated/recursive_critical_section_unittest.cc index 16aefd2740..3fb7c519c1 100644 --- a/rtc_base/critical_section_unittest.cc +++ b/rtc_base/deprecated/recursive_critical_section_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include #include @@ -78,7 +78,7 @@ class CompareAndSwapVerifier { int zero_count_; }; -class RunnerBase : public MessageHandler { +class RunnerBase : public MessageHandlerAutoCleanup { public: explicit RunnerBase(int value) : threads_active_(0), @@ -124,7 +124,7 @@ class RTC_LOCKABLE CriticalSectionLock { void Unlock() RTC_UNLOCK_FUNCTION() { cs_.Leave(); } private: - CriticalSection cs_; + RecursiveCriticalSection cs_; }; template @@ -183,7 +183,7 @@ class AtomicOpRunner : public RunnerBase { } private: - CriticalSection all_values_crit_; + RecursiveCriticalSection all_values_crit_; Verifier verifier_; }; @@ -282,26 +282,7 @@ TEST(AtomicOpsTest, CompareAndSwap) { EXPECT_EQ(1, runner.shared_value()); } -TEST(GlobalLockTest, CanHaveStaticStorageDuration) { - static_assert(std::is_trivially_destructible::value, ""); - ABSL_CONST_INIT static GlobalLock global_lock; - global_lock.Lock(); - global_lock.Unlock(); -} - -TEST(GlobalLockTest, Basic) { - // Create and start lots of threads. - LockRunner runner; - std::vector> threads; - StartThreads(&threads, &runner); - runner.SetExpectedThreadCount(kNumThreads); - - // Release the hounds! - EXPECT_TRUE(runner.Run()); - EXPECT_EQ(0, runner.shared_value()); -} - -TEST(CriticalSectionTest, Basic) { +TEST(RecursiveCriticalSectionTest, Basic) { // Create and start lots of threads. LockRunner runner; std::vector> threads; @@ -339,7 +320,7 @@ class PerfTestData { private: uint8_t cache_line_barrier_1_[64]; - CriticalSection lock_; + RecursiveCriticalSection lock_; uint8_t cache_line_barrier_2_[64]; int64_t my_counter_ = 0; const int expected_count_; @@ -391,7 +372,7 @@ class PerfTestThread { // user 1m20.575s // sys 3m48.872s // Unit test output: -// [ OK ] CriticalSectionTest.Performance (294375 ms) +// [ OK ] RecursiveCriticalSectionTest.Performance (294375 ms) // // Native mutex implementation using first fit policy (current macOS default): // Approximate CPU usage: @@ -399,7 +380,7 @@ class PerfTestThread { // user 0m12.738s // sys 0m31.207s // Unit test output: -// [ OK ] CriticalSectionTest.Performance (11444 ms) +// [ OK ] RecursiveCriticalSectionTest.Performance (11444 ms) // // Special partially spin lock based implementation: // Approximate CPU usage: @@ -407,10 +388,10 @@ class PerfTestThread { // user 0m3.014s // sys 0m4.495s // Unit test output: -// [ OK ] CriticalSectionTest.Performance (1885 ms) +// [ OK ] RecursiveCriticalSectionTest.Performance (1885 ms) // // The test is disabled by default to avoid unecessarily loading the bots. -TEST(CriticalSectionTest, DISABLED_Performance) { +TEST(RecursiveCriticalSectionTest, DISABLED_Performance) { PerfTestThread threads[8]; Event event; diff --git a/rtc_base/signal_thread.cc b/rtc_base/deprecated/signal_thread.cc similarity index 72% rename from rtc_base/signal_thread.cc rename to rtc_base/deprecated/signal_thread.cc index e100fbe179..96bdd65155 100644 --- a/rtc_base/signal_thread.cc +++ b/rtc_base/deprecated/signal_thread.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/signal_thread.h" +#include "rtc_base/deprecated/signal_thread.h" #include @@ -23,26 +23,30 @@ namespace rtc { // SignalThread /////////////////////////////////////////////////////////////////////////////// -SignalThread::SignalThread() +DEPRECATED_SignalThread::DEPRECATED_SignalThread() : main_(Thread::Current()), worker_(this), state_(kInit), refcount_(1) { - main_->SignalQueueDestroyed.connect(this, - &SignalThread::OnMainThreadDestroyed); + main_->SignalQueueDestroyed.connect( + this, &DEPRECATED_SignalThread::OnMainThreadDestroyed); worker_.SetName("SignalThread", this); } -SignalThread::~SignalThread() { +DEPRECATED_SignalThread::~DEPRECATED_SignalThread() { + rtc::CritScope lock(&cs_); RTC_DCHECK(refcount_ == 0); } -bool SignalThread::SetName(const std::string& name, const void* obj) { +bool DEPRECATED_SignalThread::SetName(const std::string& name, + const void* obj) { EnterExit ee(this); + RTC_DCHECK(!destroy_called_); RTC_DCHECK(main_->IsCurrent()); RTC_DCHECK(kInit == state_); return worker_.SetName(name, obj); } -void SignalThread::Start() { +void DEPRECATED_SignalThread::Start() { EnterExit ee(this); + RTC_DCHECK(!destroy_called_); RTC_DCHECK(main_->IsCurrent()); if (kInit == state_ || kComplete == state_) { state_ = kRunning; @@ -53,9 +57,13 @@ void SignalThread::Start() { } } -void SignalThread::Destroy(bool wait) { +void DEPRECATED_SignalThread::Destroy(bool wait) { EnterExit ee(this); - RTC_DCHECK(main_->IsCurrent()); + // Sometimes the caller can't guarantee which thread will call Destroy, only + // that it will be the last thing it does. + // RTC_DCHECK(main_->IsCurrent()); + RTC_DCHECK(!destroy_called_); + destroy_called_ = true; if ((kInit == state_) || (kComplete == state_)) { refcount_--; } else if (kRunning == state_ || kReleasing == state_) { @@ -76,8 +84,9 @@ void SignalThread::Destroy(bool wait) { } } -void SignalThread::Release() { +void DEPRECATED_SignalThread::Release() { EnterExit ee(this); + RTC_DCHECK(!destroy_called_); RTC_DCHECK(main_->IsCurrent()); if (kComplete == state_) { refcount_--; @@ -89,13 +98,14 @@ void SignalThread::Release() { } } -bool SignalThread::ContinueWork() { +bool DEPRECATED_SignalThread::ContinueWork() { EnterExit ee(this); + RTC_DCHECK(!destroy_called_); RTC_DCHECK(worker_.IsCurrent()); return worker_.ProcessMessages(0); } -void SignalThread::OnMessage(Message* msg) { +void DEPRECATED_SignalThread::OnMessage(Message* msg) { EnterExit ee(this); if (ST_MSG_WORKER_DONE == msg->message_id) { RTC_DCHECK(main_->IsCurrent()); @@ -126,21 +136,21 @@ void SignalThread::OnMessage(Message* msg) { } } -SignalThread::Worker::Worker(SignalThread* parent) +DEPRECATED_SignalThread::Worker::Worker(DEPRECATED_SignalThread* parent) : Thread(std::make_unique(), /*do_init=*/false), parent_(parent) { DoInit(); } -SignalThread::Worker::~Worker() { +DEPRECATED_SignalThread::Worker::~Worker() { Stop(); } -void SignalThread::Worker::Run() { +void DEPRECATED_SignalThread::Worker::Run() { parent_->Run(); } -void SignalThread::Run() { +void DEPRECATED_SignalThread::Run() { DoWork(); { EnterExit ee(this); @@ -150,12 +160,12 @@ void SignalThread::Run() { } } -void SignalThread::OnMainThreadDestroyed() { +void DEPRECATED_SignalThread::OnMainThreadDestroyed() { EnterExit ee(this); main_ = nullptr; } -bool SignalThread::Worker::IsProcessingMessagesForTesting() { +bool DEPRECATED_SignalThread::Worker::IsProcessingMessagesForTesting() { return false; } diff --git a/rtc_base/deprecated/signal_thread.h b/rtc_base/deprecated/signal_thread.h new file mode 100644 index 0000000000..10805ad456 --- /dev/null +++ b/rtc_base/deprecated/signal_thread.h @@ -0,0 +1,172 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_ +#define RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_ + +#include + +#include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/deprecated/recursive_critical_section.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace rtc { + +/////////////////////////////////////////////////////////////////////////////// +// NOTE: this class has been deprecated. Do not use for new code. New code +// should use factilities exposed by api/task_queue/ instead. +// +// SignalThread - Base class for worker threads. The main thread should call +// Start() to begin work, and then follow one of these models: +// Normal: Wait for SignalWorkDone, and then call Release to destroy. +// Cancellation: Call Release(true), to abort the worker thread. +// Fire-and-forget: Call Release(false), which allows the thread to run to +// completion, and then self-destruct without further notification. +// Periodic tasks: Wait for SignalWorkDone, then eventually call Start() +// again to repeat the task. When the instance isn't needed anymore, +// call Release. DoWork, OnWorkStart and OnWorkStop are called again, +// on a new thread. +// The subclass should override DoWork() to perform the background task. By +// periodically calling ContinueWork(), it can check for cancellation. +// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work +// tasks in the context of the main thread. +/////////////////////////////////////////////////////////////////////////////// + +class DEPRECATED_SignalThread : public sigslot::has_slots<>, + protected MessageHandlerAutoCleanup { + public: + DEPRECATED_SignalThread(); + + // Context: Main Thread. Call before Start to change the worker's name. + bool SetName(const std::string& name, const void* obj); + + // Context: Main Thread. Call to begin the worker thread. + void Start(); + + // Context: Main Thread. If the worker thread is not running, deletes the + // object immediately. Otherwise, asks the worker thread to abort processing, + // and schedules the object to be deleted once the worker exits. + // SignalWorkDone will not be signalled. If wait is true, does not return + // until the thread is deleted. + void Destroy(bool wait); + + // Context: Main Thread. If the worker thread is complete, deletes the + // object immediately. Otherwise, schedules the object to be deleted once + // the worker thread completes. SignalWorkDone will be signalled. + void Release(); + + // Context: Main Thread. Signalled when work is complete. + sigslot::signal1 SignalWorkDone; + + enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE }; + + protected: + ~DEPRECATED_SignalThread() override; + + Thread* worker() { return &worker_; } + + // Context: Main Thread. Subclass should override to do pre-work setup. + virtual void OnWorkStart() {} + + // Context: Worker Thread. Subclass should override to do work. + virtual void DoWork() = 0; + + // Context: Worker Thread. Subclass should call periodically to + // dispatch messages and determine if the thread should terminate. + bool ContinueWork(); + + // Context: Worker Thread. Subclass should override when extra work is + // needed to abort the worker thread. + virtual void OnWorkStop() {} + + // Context: Main Thread. Subclass should override to do post-work cleanup. + virtual void OnWorkDone() {} + + // Context: Any Thread. If subclass overrides, be sure to call the base + // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE) + void OnMessage(Message* msg) override; + + private: + enum State { + kInit, // Initialized, but not started + kRunning, // Started and doing work + kReleasing, // Same as running, but to be deleted when work is done + kComplete, // Work is done + kStopping, // Work is being interrupted + }; + + class Worker : public Thread { + public: + explicit Worker(DEPRECATED_SignalThread* parent); + + Worker() = delete; + Worker(const Worker&) = delete; + Worker& operator=(const Worker&) = delete; + + ~Worker() override; + void Run() override; + bool IsProcessingMessagesForTesting() override; + + private: + DEPRECATED_SignalThread* parent_; + }; + + class RTC_SCOPED_LOCKABLE EnterExit { + public: + explicit EnterExit(DEPRECATED_SignalThread* t) + RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_) + : t_(t) { + t_->cs_.Enter(); + // If refcount_ is zero then the object has already been deleted and we + // will be double-deleting it in ~EnterExit()! (shouldn't happen) + RTC_DCHECK_NE(0, t_->refcount_); + ++t_->refcount_; + } + + EnterExit() = delete; + EnterExit(const EnterExit&) = delete; + EnterExit& operator=(const EnterExit&) = delete; + + ~EnterExit() RTC_UNLOCK_FUNCTION() { + bool d = (0 == --t_->refcount_); + t_->cs_.Leave(); + if (d) + delete t_; + } + + private: + DEPRECATED_SignalThread* t_; + }; + + void Run(); + void OnMainThreadDestroyed(); + + Thread* main_; + Worker worker_; + RecursiveCriticalSection cs_; + State state_ RTC_GUARDED_BY(cs_); + int refcount_ RTC_GUARDED_BY(cs_); + bool destroy_called_ RTC_GUARDED_BY(cs_) = false; + + RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_SignalThread); +}; + +typedef RTC_DEPRECATED DEPRECATED_SignalThread SignalThread; + +/////////////////////////////////////////////////////////////////////////////// + +} // namespace rtc + +#endif // RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_ diff --git a/rtc_base/signal_thread_unittest.cc b/rtc_base/deprecated/signal_thread_unittest.cc similarity index 93% rename from rtc_base/signal_thread_unittest.cc rename to rtc_base/deprecated/signal_thread_unittest.cc index 14761865b8..f5a49aad63 100644 --- a/rtc_base/signal_thread_unittest.cc +++ b/rtc_base/deprecated/signal_thread_unittest.cc @@ -13,9 +13,9 @@ #include #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/gunit.h" #include "rtc_base/null_socket_server.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "test/gtest.h" @@ -28,9 +28,9 @@ static const int kTimeout = 10000; class SignalThreadTest : public ::testing::Test, public sigslot::has_slots<> { public: - class SlowSignalThread : public SignalThread { + class SlowSignalThread : public DEPRECATED_SignalThread { public: - SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {} + explicit SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {} ~SlowSignalThread() override { EXPECT_EQ(harness_->main_thread_, Thread::Current()); @@ -70,7 +70,7 @@ class SignalThreadTest : public ::testing::Test, public sigslot::has_slots<> { RTC_DISALLOW_COPY_AND_ASSIGN(SlowSignalThread); }; - void OnWorkComplete(rtc::SignalThread* thread) { + void OnWorkComplete(rtc::DEPRECATED_SignalThread* thread) { SlowSignalThread* t = static_cast(thread); EXPECT_EQ(t->harness(), this); EXPECT_EQ(main_thread_, Thread::Current()); @@ -148,23 +148,23 @@ class OwnerThread : public Thread, public sigslot::has_slots<> { // Delete |signal_thread|. signal_thread->Destroy(true); { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); has_run_ = true; } } bool has_run() { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return has_run_; } - void OnWorkDone(SignalThread* /*signal_thread*/) { + void OnWorkDone(DEPRECATED_SignalThread* /*signal_thread*/) { FAIL() << " This shouldn't get called."; } private: - rtc::CriticalSection crit_; + webrtc::Mutex mutex_; SignalThreadTest* harness_; - bool has_run_ RTC_GUARDED_BY(crit_); + bool has_run_ RTC_GUARDED_BY(mutex_); RTC_DISALLOW_COPY_AND_ASSIGN(OwnerThread); }; diff --git a/rtc_base/event_tracer.cc b/rtc_base/event_tracer.cc index d23af21421..3af8183b1f 100644 --- a/rtc_base/event_tracer.cc +++ b/rtc_base/event_tracer.cc @@ -19,11 +19,11 @@ #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" @@ -120,7 +120,7 @@ class EventLogger final { arg.value.as_string = str_copy; } } - rtc::CritScope lock(&crit_); + webrtc::MutexLock lock(&mutex_); trace_events_.push_back( {name, category_enabled, phase, args, timestamp, 1, thread_id}); } @@ -136,7 +136,7 @@ class EventLogger final { bool shutting_down = shutdown_event_.Wait(kLoggingIntervalMs); std::vector events; { - rtc::CritScope lock(&crit_); + webrtc::MutexLock lock(&mutex_); trace_events_.swap(events); } std::string args_str; @@ -196,7 +196,7 @@ class EventLogger final { output_file_ = file; output_file_owned_ = owned; { - rtc::CritScope lock(&crit_); + webrtc::MutexLock lock(&mutex_); // Since the atomic fast-path for adding events to the queue can be // bypassed while the logging thread is shutting down there may be some // stale events in the queue, hence the vector needs to be cleared to not @@ -317,8 +317,8 @@ class EventLogger final { return output; } - rtc::CriticalSection crit_; - std::vector trace_events_ RTC_GUARDED_BY(crit_); + webrtc::Mutex mutex_; + std::vector trace_events_ RTC_GUARDED_BY(mutex_); rtc::PlatformThread logging_thread_; rtc::Event shutdown_event_; rtc::ThreadChecker thread_checker_; diff --git a/rtc_base/event_tracer_unittest.cc b/rtc_base/event_tracer_unittest.cc index 79cc9c0788..f4d41e4e7c 100644 --- a/rtc_base/event_tracer_unittest.cc +++ b/rtc_base/event_tracer_unittest.cc @@ -10,7 +10,7 @@ #include "rtc_base/event_tracer.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/trace_event.h" #include "test/gtest.h" @@ -20,17 +20,17 @@ namespace { class TestStatistics { public: void Reset() { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); events_logged_ = 0; } void Increment() { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); ++events_logged_; } int Count() const { - rtc::CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return events_logged_; } @@ -41,8 +41,8 @@ class TestStatistics { } private: - rtc::CriticalSection crit_; - int events_logged_ RTC_GUARDED_BY(crit_) = 0; + mutable webrtc::Mutex mutex_; + int events_logged_ RTC_GUARDED_BY(mutex_) = 0; }; } // namespace diff --git a/rtc_base/experiments/BUILD.gn b/rtc_base/experiments/BUILD.gn index 058e9b7f20..a40c9e0d80 100644 --- a/rtc_base/experiments/BUILD.gn +++ b/rtc_base/experiments/BUILD.gn @@ -17,8 +17,8 @@ rtc_library("alr_experiment") { "../:rtc_base_approved", "../../api/transport:field_trial_based_config", "../../api/transport:webrtc_key_value_config", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("field_trial_parser") { @@ -40,6 +40,8 @@ rtc_library("field_trial_parser") { "../../rtc_base:logging", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings:strings", "//third_party/abseil-cpp/absl/types:optional", @@ -57,8 +59,8 @@ rtc_library("quality_rampup_experiment") { "../../api/transport:field_trial_based_config", "../../api/transport:webrtc_key_value_config", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("quality_scaler_settings") { @@ -72,8 +74,8 @@ rtc_library("quality_scaler_settings") { "../../api/transport:field_trial_based_config", "../../api/transport:webrtc_key_value_config", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("quality_scaling_experiment") { @@ -85,8 +87,8 @@ rtc_library("quality_scaling_experiment") { "../:rtc_base_approved", "../../api/video_codecs:video_codecs_api", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("normalize_simulcast_size_experiment") { @@ -97,8 +99,8 @@ rtc_library("normalize_simulcast_size_experiment") { deps = [ "../:rtc_base_approved", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("balanced_degradation_settings") { @@ -111,8 +113,8 @@ rtc_library("balanced_degradation_settings") { "../:rtc_base_approved", "../../api/video_codecs:video_codecs_api", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("cpu_speed_experiment") { @@ -121,10 +123,11 @@ rtc_library("cpu_speed_experiment") { "cpu_speed_experiment.h", ] deps = [ + ":field_trial_parser", "../:rtc_base_approved", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtt_mult_experiment") { @@ -135,8 +138,8 @@ rtc_library("rtt_mult_experiment") { deps = [ "../:rtc_base_approved", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("jitter_upper_bound_experiment") { @@ -147,8 +150,8 @@ rtc_library("jitter_upper_bound_experiment") { deps = [ "../:rtc_base_approved", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rate_control_settings") { @@ -164,6 +167,9 @@ rtc_library("rate_control_settings") { "../../api/units:data_size", "../../api/video_codecs:video_codecs_api", "../../system_wrappers:field_trial", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -177,21 +183,8 @@ rtc_library("keyframe_interval_settings_experiment") { ":field_trial_parser", "../../api/transport:field_trial_based_config", "../../api/transport:webrtc_key_value_config", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("experimental_screenshare_settings") { - sources = [ - "experimental_screenshare_settings.cc", - "experimental_screenshare_settings.h", - ] - deps = [ - ":field_trial_parser", - "../../api/transport:field_trial_based_config", - "../../api/transport:webrtc_key_value_config", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("stable_target_rate_experiment") { @@ -204,8 +197,8 @@ rtc_library("stable_target_rate_experiment") { ":rate_control_settings", "../../api/transport:field_trial_based_config", "../../api/transport:webrtc_key_value_config", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("min_video_bitrate_experiment") { @@ -220,8 +213,8 @@ rtc_library("min_video_bitrate_experiment") { "../../rtc_base:checks", "../../rtc_base:logging", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { @@ -267,7 +260,7 @@ if (rtc_include_tests) { "../../test:field_trial", "../../test:test_main", "../../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/rtc_base/experiments/balanced_degradation_settings.cc b/rtc_base/experiments/balanced_degradation_settings.cc index 7a1e8913cc..d061597f70 100644 --- a/rtc_base/experiments/balanced_degradation_settings.cc +++ b/rtc_base/experiments/balanced_degradation_settings.cc @@ -34,11 +34,11 @@ std::vector DefaultConfigs() { {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}}, - {480 * 270, + {480 * 360, 10, 0, 0, - BalancedDegradationSettings::kNoFpsDiff, + 1, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, @@ -48,7 +48,7 @@ std::vector DefaultConfigs() { 15, 0, 0, - BalancedDegradationSettings::kNoFpsDiff, + 1, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, diff --git a/rtc_base/experiments/balanced_degradation_settings_unittest.cc b/rtc_base/experiments/balanced_degradation_settings_unittest.cc index 5721445ee4..92833ee98c 100644 --- a/rtc_base/experiments/balanced_degradation_settings_unittest.cc +++ b/rtc_base/experiments/balanced_degradation_settings_unittest.cc @@ -34,11 +34,11 @@ void VerifyIsDefault( {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}}, BalancedDegradationSettings::Config{ - 480 * 270, + 480 * 360, 10, 0, 0, - BalancedDegradationSettings::kNoFpsDiff, + 1, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, @@ -49,7 +49,7 @@ void VerifyIsDefault( 15, 0, 0, - BalancedDegradationSettings::kNoFpsDiff, + 1, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, diff --git a/rtc_base/experiments/cpu_speed_experiment.cc b/rtc_base/experiments/cpu_speed_experiment.cc index 6d5650acc8..0f53320093 100644 --- a/rtc_base/experiments/cpu_speed_experiment.cc +++ b/rtc_base/experiments/cpu_speed_experiment.cc @@ -12,8 +12,7 @@ #include -#include - +#include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -22,30 +21,18 @@ namespace { constexpr char kFieldTrial[] = "WebRTC-VP8-CpuSpeed-Arm"; constexpr int kMinSetting = -16; constexpr int kMaxSetting = -1; -} // namespace -absl::optional> -CpuSpeedExperiment::GetConfigs() { - if (!webrtc::field_trial::IsEnabled(kFieldTrial)) - return absl::nullopt; - - const std::string group = webrtc::field_trial::FindFullName(kFieldTrial); - if (group.empty()) - return absl::nullopt; - - std::vector configs(3); - if (sscanf(group.c_str(), "Enabled-%d,%d,%d,%d,%d,%d", &(configs[0].pixels), - &(configs[0].cpu_speed), &(configs[1].pixels), - &(configs[1].cpu_speed), &(configs[2].pixels), - &(configs[2].cpu_speed)) != 6) { - RTC_LOG(LS_WARNING) << "Too few parameters provided."; - return absl::nullopt; +std::vector GetValidOrEmpty( + const std::vector& configs) { + if (configs.empty()) { + RTC_LOG(LS_WARNING) << "Unsupported size, value ignored."; + return {}; } for (const auto& config : configs) { if (config.cpu_speed < kMinSetting || config.cpu_speed > kMaxSetting) { RTC_LOG(LS_WARNING) << "Unsupported cpu speed setting, value ignored."; - return absl::nullopt; + return {}; } } @@ -53,20 +40,51 @@ CpuSpeedExperiment::GetConfigs() { if (configs[i].pixels < configs[i - 1].pixels || configs[i].cpu_speed > configs[i - 1].cpu_speed) { RTC_LOG(LS_WARNING) << "Invalid parameter value provided."; - return absl::nullopt; + return {}; } } - return absl::optional>(configs); + return configs; } -int CpuSpeedExperiment::GetValue(int pixels, - const std::vector& configs) { +bool HasLeCores(const std::vector& configs) { for (const auto& config : configs) { + if (config.cpu_speed_le_cores == 0) + return false; + } + return true; +} +} // namespace + +CpuSpeedExperiment::CpuSpeedExperiment() : cores_("cores") { + FieldTrialStructList configs( + {FieldTrialStructMember("pixels", [](Config* c) { return &c->pixels; }), + FieldTrialStructMember("cpu_speed", + [](Config* c) { return &c->cpu_speed; }), + FieldTrialStructMember( + "cpu_speed_le_cores", + [](Config* c) { return &c->cpu_speed_le_cores; })}, + {}); + ParseFieldTrial({&configs, &cores_}, field_trial::FindFullName(kFieldTrial)); + + configs_ = GetValidOrEmpty(configs.Get()); +} + +CpuSpeedExperiment::~CpuSpeedExperiment() {} + +absl::optional CpuSpeedExperiment::GetValue(int pixels, + int num_cores) const { + if (configs_.empty()) + return absl::nullopt; + + bool use_le = HasLeCores(configs_) && cores_ && num_cores <= cores_.Value(); + + for (const auto& config : configs_) { if (pixels <= config.pixels) - return config.cpu_speed; + return use_le ? absl::optional(config.cpu_speed_le_cores) + : absl::optional(config.cpu_speed); } - return kMinSetting; + return absl::optional(kMinSetting); } } // namespace webrtc diff --git a/rtc_base/experiments/cpu_speed_experiment.h b/rtc_base/experiments/cpu_speed_experiment.h index e6c8340943..7c7268c559 100644 --- a/rtc_base/experiments/cpu_speed_experiment.h +++ b/rtc_base/experiments/cpu_speed_experiment.h @@ -15,25 +15,49 @@ #include "absl/types/optional.h" +#include "rtc_base/experiments/field_trial_parser.h" + namespace webrtc { class CpuSpeedExperiment { public: - struct Config { - bool operator==(const Config& o) const { - return pixels == o.pixels && cpu_speed == o.cpu_speed; - } + CpuSpeedExperiment(); + ~CpuSpeedExperiment(); + + // Example: + // WebRTC-VP8-CpuSpeed-Arm/pixels:100|200|300,cpu_speed:-1|-2|-3/ + // pixels <= 100 -> cpu speed: -1 + // pixels <= 200 -> cpu speed: -2 + // pixels <= 300 -> cpu speed: -3 - int pixels; // The video frame size. - int cpu_speed; // The |cpu_speed| to be used if the frame size is less - // than or equal to |pixels|. + // WebRTC-VP8-CpuSpeed-Arm/pixels:100|200|300,cpu_speed:-1|-2|-3/, + // cpu_speed_le_cores:-4|-5|-6,cores:3/ + // If |num_cores| > 3 + // pixels <= 100 -> cpu speed: -1 + // pixels <= 200 -> cpu speed: -2 + // pixels <= 300 -> cpu speed: -3 + // else + // pixels <= 100 -> cpu speed: -4 + // pixels <= 200 -> cpu speed: -5 + // pixels <= 300 -> cpu speed: -6 + + struct Config { + int pixels = 0; // The video frame size. + int cpu_speed = 0; // The |cpu_speed| to be used if the frame size is less + // than or equal to |pixels|. + // Optional. + int cpu_speed_le_cores = 0; // Same as |cpu_speed| above but only used if + // |num_cores| <= |cores_|. }; - // Returns the configurations from field trial on success. - static absl::optional> GetConfigs(); + // Gets the cpu speed based on |pixels| and |num_cores|. + absl::optional GetValue(int pixels, int num_cores) const; + + private: + std::vector configs_; - // Gets the cpu speed from the |configs| based on |pixels|. - static int GetValue(int pixels, const std::vector& configs); + // Threshold for when to use |cpu_speed_le_cores|. + FieldTrialOptional cores_; }; } // namespace webrtc diff --git a/rtc_base/experiments/cpu_speed_experiment_unittest.cc b/rtc_base/experiments/cpu_speed_experiment_unittest.cc index edc782c0ad..2105da3818 100644 --- a/rtc_base/experiments/cpu_speed_experiment_unittest.cc +++ b/rtc_base/experiments/cpu_speed_experiment_unittest.cc @@ -16,70 +16,91 @@ namespace webrtc { -TEST(CpuSpeedExperimentTest, GetConfigsFailsIfNotEnabled) { - EXPECT_FALSE(CpuSpeedExperiment::GetConfigs()); +TEST(CpuSpeedExperimentTest, NoValueIfNotEnabled) { + CpuSpeedExperiment cpu_speed_config; + EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); } -TEST(CpuSpeedExperimentTest, GetConfigsFailsForTooFewParameters) { +TEST(CpuSpeedExperimentTest, GetValue) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,-1,2000,-10,3000/"); - EXPECT_FALSE(CpuSpeedExperiment::GetConfigs()); + "WebRTC-VP8-CpuSpeed-Arm/pixels:1000,cpu_speed:-12,cores:4/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_EQ(-12, cpu_speed_config.GetValue(1, /*num_cores=*/1)); + EXPECT_EQ(-12, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); + EXPECT_EQ(-16, cpu_speed_config.GetValue(1001, /*num_cores=*/1)); } -TEST(CpuSpeedExperimentTest, GetConfigs) { +TEST(CpuSpeedExperimentTest, GetValueWithList) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,-1,2000,-10,3000,-16/"); - - const absl::optional> kConfigs = - CpuSpeedExperiment::GetConfigs(); - ASSERT_TRUE(kConfigs); - EXPECT_THAT(*kConfigs, - ::testing::ElementsAre(CpuSpeedExperiment::Config{1000, -1}, - CpuSpeedExperiment::Config{2000, -10}, - CpuSpeedExperiment::Config{3000, -16})); + "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:-1|-10|-16/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_EQ(-1, cpu_speed_config.GetValue(1, /*num_cores=*/1)); + EXPECT_EQ(-1, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); + EXPECT_EQ(-10, cpu_speed_config.GetValue(1001, /*num_cores=*/1)); + EXPECT_EQ(-10, cpu_speed_config.GetValue(2000, /*num_cores=*/1)); + EXPECT_EQ(-16, cpu_speed_config.GetValue(2001, /*num_cores=*/1)); + EXPECT_EQ(-16, cpu_speed_config.GetValue(3000, /*num_cores=*/1)); + EXPECT_EQ(-16, cpu_speed_config.GetValue(3001, /*num_cores=*/1)); } -TEST(CpuSpeedExperimentTest, GetValue) { +TEST(CpuSpeedExperimentTest, GetValueWithCores) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,-5,2000,-10,3000,-12/"); - - const absl::optional> kConfigs = - CpuSpeedExperiment::GetConfigs(); - ASSERT_TRUE(kConfigs); - ASSERT_EQ(3u, (*kConfigs).size()); - EXPECT_EQ(-5, CpuSpeedExperiment::GetValue(1, *kConfigs)); - EXPECT_EQ(-5, CpuSpeedExperiment::GetValue(1000, *kConfigs)); - EXPECT_EQ(-10, CpuSpeedExperiment::GetValue(1000 + 1, *kConfigs)); - EXPECT_EQ(-10, CpuSpeedExperiment::GetValue(2000, *kConfigs)); - EXPECT_EQ(-12, CpuSpeedExperiment::GetValue(2000 + 1, *kConfigs)); - EXPECT_EQ(-12, CpuSpeedExperiment::GetValue(3000, *kConfigs)); - EXPECT_EQ(-16, CpuSpeedExperiment::GetValue(3000 + 1, *kConfigs)); + "WebRTC-VP8-CpuSpeed-Arm/" + "pixels:1000|2000|3000,cpu_speed:-1|-10|-16," + "cpu_speed_le_cores:-5|-11|-16,cores:2/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_EQ(-5, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); + EXPECT_EQ(-11, cpu_speed_config.GetValue(2000, /*num_cores=*/2)); + EXPECT_EQ(-1, cpu_speed_config.GetValue(1000, /*num_cores=*/3)); + EXPECT_EQ(-10, cpu_speed_config.GetValue(2000, /*num_cores=*/4)); } -TEST(CpuSpeedExperimentTest, GetConfigsFailsForTooSmallValue) { +TEST(CpuSpeedExperimentTest, GetValueWithCoresUnconfigured) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-VP8-CpuSpeed-Arm/" + "pixels:1000|2000|3000,cpu_speed:-1|-10|-16," + "cpu_speed_le_cores:-5|-11|-16/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_EQ(-1, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); + EXPECT_EQ(-10, cpu_speed_config.GetValue(2000, /*num_cores=*/2)); +} + +TEST(CpuSpeedExperimentTest, GetValueFailsForTooSmallValue) { // Supported range: [-16, -1]. webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,-1,2000,-10,3000,-17/"); - EXPECT_FALSE(CpuSpeedExperiment::GetConfigs()); + "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:-1|-10|-17/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); } -TEST(CpuSpeedExperimentTest, GetConfigsFailsForTooLargeValue) { +TEST(CpuSpeedExperimentTest, GetValueFailsForTooLargeValue) { // Supported range: [-16, -1]. webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,0,2000,-10,3000,-16/"); - EXPECT_FALSE(CpuSpeedExperiment::GetConfigs()); + "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:0|-10|-16/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); } -TEST(CpuSpeedExperimentTest, GetConfigsFailsIfPixelsDecreasing) { +TEST(CpuSpeedExperimentTest, GetValueFailsIfPixelsDecreases) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,-5,999,-10,3000,-16/"); - EXPECT_FALSE(CpuSpeedExperiment::GetConfigs()); + "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|999|3000,cpu_speed:-5|-10|-16/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); } -TEST(CpuSpeedExperimentTest, GetConfigsFailsIfCpuSpeedIncreasing) { +TEST(CpuSpeedExperimentTest, GetValueFailsIfCpuSpeedIncreases) { webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/Enabled-1000,-5,2000,-4,3000,-16/"); - EXPECT_FALSE(CpuSpeedExperiment::GetConfigs()); + "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:-5|-4|-16/"); + + CpuSpeedExperiment cpu_speed_config; + EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); } } // namespace webrtc diff --git a/rtc_base/experiments/experimental_screenshare_settings.cc b/rtc_base/experiments/experimental_screenshare_settings.cc deleted file mode 100644 index ac5840a5ed..0000000000 --- a/rtc_base/experiments/experimental_screenshare_settings.cc +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/experimental_screenshare_settings.h" - -#include "api/transport/field_trial_based_config.h" - -namespace webrtc { - -namespace { -constexpr char kFieldTrialName[] = "WebRTC-ExperimentalScreenshareSettings"; -} // namespace - -ExperimentalScreenshareSettings::ExperimentalScreenshareSettings( - const WebRtcKeyValueConfig* key_value_config) - : max_qp_("max_qp"), - default_tl_in_base_layer_("default_tl_in_base_layer"), - base_layer_max_bitrate_("base_layer_max_bitrate"), - top_layer_max_bitrate("top_layer_max_bitrate") { - ParseFieldTrial({&max_qp_, &default_tl_in_base_layer_, - &base_layer_max_bitrate_, &top_layer_max_bitrate}, - key_value_config->Lookup(kFieldTrialName)); -} - -ExperimentalScreenshareSettings -ExperimentalScreenshareSettings::ParseFromFieldTrials() { - FieldTrialBasedConfig field_trial_config; - return ExperimentalScreenshareSettings(&field_trial_config); -} - -absl::optional ExperimentalScreenshareSettings::MaxQp() const { - return max_qp_.GetOptional(); -} - -absl::optional ExperimentalScreenshareSettings::DefaultTlInBaseLayer() - const { - return default_tl_in_base_layer_.GetOptional(); -} - -absl::optional ExperimentalScreenshareSettings::BaseLayerMaxBitrate() - const { - return base_layer_max_bitrate_.GetOptional(); -} - -absl::optional ExperimentalScreenshareSettings::TopLayerMaxBitrate() - const { - return top_layer_max_bitrate.GetOptional(); -} - -} // namespace webrtc diff --git a/rtc_base/experiments/experimental_screenshare_settings.h b/rtc_base/experiments/experimental_screenshare_settings.h deleted file mode 100644 index 8887a3e3c3..0000000000 --- a/rtc_base/experiments/experimental_screenshare_settings.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_EXPERIMENTS_EXPERIMENTAL_SCREENSHARE_SETTINGS_H_ -#define RTC_BASE_EXPERIMENTS_EXPERIMENTAL_SCREENSHARE_SETTINGS_H_ - -#include "absl/types/optional.h" -#include "api/transport/webrtc_key_value_config.h" -#include "rtc_base/experiments/field_trial_parser.h" - -namespace webrtc { - -class ExperimentalScreenshareSettings { - public: - static ExperimentalScreenshareSettings ParseFromFieldTrials(); - explicit ExperimentalScreenshareSettings( - const WebRtcKeyValueConfig* key_value_config); - - absl::optional MaxQp() const; - absl::optional DefaultTlInBaseLayer() const; - absl::optional BaseLayerMaxBitrate() const; - absl::optional TopLayerMaxBitrate() const; - - private: - FieldTrialOptional max_qp_; - FieldTrialOptional default_tl_in_base_layer_; - FieldTrialOptional base_layer_max_bitrate_; - FieldTrialOptional top_layer_max_bitrate; -}; - -} // namespace webrtc - -#endif // RTC_BASE_EXPERIMENTS_EXPERIMENTAL_SCREENSHARE_SETTINGS_H_ diff --git a/rtc_base/experiments/field_trial_units.cc b/rtc_base/experiments/field_trial_units.cc index 9c9cf434d9..5aceab76a0 100644 --- a/rtc_base/experiments/field_trial_units.cc +++ b/rtc_base/experiments/field_trial_units.cc @@ -51,9 +51,9 @@ absl::optional ParseTypedParameter(std::string str) { absl::optional result = ParseValueWithUnit(str); if (result) { if (result->unit.empty() || result->unit == "kbps") { - return DataRate::kbps(result->value); + return DataRate::KilobitsPerSec(result->value); } else if (result->unit == "bps") { - return DataRate::bps(result->value); + return DataRate::BitsPerSec(result->value); } } return absl::nullopt; @@ -64,7 +64,7 @@ absl::optional ParseTypedParameter(std::string str) { absl::optional result = ParseValueWithUnit(str); if (result) { if (result->unit.empty() || result->unit == "bytes") - return DataSize::bytes(result->value); + return DataSize::Bytes(result->value); } return absl::nullopt; } @@ -74,11 +74,11 @@ absl::optional ParseTypedParameter(std::string str) { absl::optional result = ParseValueWithUnit(str); if (result) { if (result->unit == "s" || result->unit == "seconds") { - return TimeDelta::seconds(result->value); + return TimeDelta::Seconds(result->value); } else if (result->unit == "us") { - return TimeDelta::us(result->value); + return TimeDelta::Micros(result->value); } else if (result->unit.empty() || result->unit == "ms") { - return TimeDelta::ms(result->value); + return TimeDelta::Millis(result->value); } } return absl::nullopt; diff --git a/rtc_base/experiments/field_trial_units_unittest.cc b/rtc_base/experiments/field_trial_units_unittest.cc index 189fd103e9..1f46d6f9ee 100644 --- a/rtc_base/experiments/field_trial_units_unittest.cc +++ b/rtc_base/experiments/field_trial_units_unittest.cc @@ -19,9 +19,9 @@ namespace webrtc { namespace { struct DummyExperiment { FieldTrialParameter target_rate = - FieldTrialParameter("t", DataRate::kbps(100)); + FieldTrialParameter("t", DataRate::KilobitsPerSec(100)); FieldTrialParameter period = - FieldTrialParameter("p", TimeDelta::ms(100)); + FieldTrialParameter("p", TimeDelta::Millis(100)); FieldTrialOptional max_buffer = FieldTrialOptional("b", absl::nullopt); @@ -33,21 +33,21 @@ struct DummyExperiment { TEST(FieldTrialParserUnitsTest, FallsBackToDefaults) { DummyExperiment exp(""); - EXPECT_EQ(exp.target_rate.Get(), DataRate::kbps(100)); + EXPECT_EQ(exp.target_rate.Get(), DataRate::KilobitsPerSec(100)); EXPECT_FALSE(exp.max_buffer.GetOptional().has_value()); - EXPECT_EQ(exp.period.Get(), TimeDelta::ms(100)); + EXPECT_EQ(exp.period.Get(), TimeDelta::Millis(100)); } TEST(FieldTrialParserUnitsTest, ParsesUnitParameters) { DummyExperiment exp("t:300kbps,b:5bytes,p:300ms"); - EXPECT_EQ(exp.target_rate.Get(), DataRate::kbps(300)); - EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::bytes(5)); - EXPECT_EQ(exp.period.Get(), TimeDelta::ms(300)); + EXPECT_EQ(exp.target_rate.Get(), DataRate::KilobitsPerSec(300)); + EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::Bytes(5)); + EXPECT_EQ(exp.period.Get(), TimeDelta::Millis(300)); } TEST(FieldTrialParserUnitsTest, ParsesDefaultUnitParameters) { DummyExperiment exp("t:300,b:5,p:300"); - EXPECT_EQ(exp.target_rate.Get(), DataRate::kbps(300)); - EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::bytes(5)); - EXPECT_EQ(exp.period.Get(), TimeDelta::ms(300)); + EXPECT_EQ(exp.target_rate.Get(), DataRate::KilobitsPerSec(300)); + EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::Bytes(5)); + EXPECT_EQ(exp.period.Get(), TimeDelta::Millis(300)); } TEST(FieldTrialParserUnitsTest, ParsesInfinityParameter) { DummyExperiment exp("t:inf,p:inf"); @@ -56,17 +56,19 @@ TEST(FieldTrialParserUnitsTest, ParsesInfinityParameter) { } TEST(FieldTrialParserUnitsTest, ParsesOtherUnitParameters) { DummyExperiment exp("t:300bps,p:0.3 seconds,b:8 bytes"); - EXPECT_EQ(exp.target_rate.Get(), DataRate::bps(300)); - EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::bytes(8)); - EXPECT_EQ(exp.period.Get(), TimeDelta::ms(300)); + EXPECT_EQ(exp.target_rate.Get(), DataRate::BitsPerSec(300)); + EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::Bytes(8)); + EXPECT_EQ(exp.period.Get(), TimeDelta::Millis(300)); } TEST(FieldTrialParserUnitsTest, IgnoresOutOfRange) { - FieldTrialConstrained rate("r", DataRate::kbps(30), - DataRate::kbps(10), DataRate::kbps(100)); - FieldTrialConstrained delta("d", TimeDelta::ms(30), - TimeDelta::ms(10), TimeDelta::ms(100)); + FieldTrialConstrained rate("r", DataRate::KilobitsPerSec(30), + DataRate::KilobitsPerSec(10), + DataRate::KilobitsPerSec(100)); + FieldTrialConstrained delta("d", TimeDelta::Millis(30), + TimeDelta::Millis(10), + TimeDelta::Millis(100)); FieldTrialConstrained size( - "s", DataSize::bytes(30), DataSize::bytes(10), DataSize::bytes(100)); + "s", DataSize::Bytes(30), DataSize::Bytes(10), DataSize::Bytes(100)); ParseFieldTrial({&rate, &delta, &size}, "r:0,d:0,s:0"); EXPECT_EQ(rate->kbps(), 30); EXPECT_EQ(delta->ms(), 30); diff --git a/rtc_base/experiments/min_video_bitrate_experiment.cc b/rtc_base/experiments/min_video_bitrate_experiment.cc index ee62d2eaf4..3b13e8e9bf 100644 --- a/rtc_base/experiments/min_video_bitrate_experiment.cc +++ b/rtc_base/experiments/min_video_bitrate_experiment.cc @@ -61,7 +61,7 @@ absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { const absl::optional fallback_min_bitrate_bps = GetFallbackMinBpsFromFieldTrial(type); if (fallback_min_bitrate_bps) { - return DataRate::bps(*fallback_min_bitrate_bps); + return DataRate::BitsPerSec(*fallback_min_bitrate_bps); } if (webrtc::field_trial::IsEnabled(kMinVideoBitrateExperiment)) { @@ -100,6 +100,9 @@ absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { return min_bitrate_av1.GetOptional(); case kVideoCodecH264: return min_bitrate_h264.GetOptional(); +#ifndef DISABLE_H265 + case kVideoCodecH265: +#endif case kVideoCodecGeneric: case kVideoCodecMultiplex: return absl::nullopt; diff --git a/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc b/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc index ca0550d65c..9792bad61c 100644 --- a/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc +++ b/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc @@ -59,16 +59,16 @@ TEST(GetExperimentalMinVideoBitrateTest, BrForAllCodecsIfDefined) { "WebRTC-Video-MinVideoBitrate/Enabled,br:123kbps/"); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ( GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); } TEST(GetExperimentalMinVideoBitrateTest, BrTrumpsSpecificCodecConfigs) { @@ -77,16 +77,16 @@ TEST(GetExperimentalMinVideoBitrateTest, BrTrumpsSpecificCodecConfigs) { "Enabled,br:123kbps,vp8_br:100kbps,vp9_br:200kbps,h264_br:300kbps/"); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); EXPECT_EQ( GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::make_optional(DataRate::kbps(123))); + absl::make_optional(DataRate::KilobitsPerSec(123))); } TEST(GetExperimentalMinVideoBitrateTest, @@ -116,11 +116,11 @@ TEST(GetExperimentalMinVideoBitrateTest, SpecificCodecConfigsUsedIfExpEnabled) { EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), absl::nullopt); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::kbps(100))); + absl::make_optional(DataRate::KilobitsPerSec(100))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::kbps(200))); + absl::make_optional(DataRate::KilobitsPerSec(200))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::kbps(300))); + absl::make_optional(DataRate::KilobitsPerSec(300))); EXPECT_EQ( GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), absl::nullopt); @@ -135,7 +135,7 @@ TEST(GetExperimentalMinVideoBitrateTest, "Enabled-444444,555555,666666/"); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::bps(666666))); + absl::make_optional(DataRate::BitsPerSec(666666))); } TEST(GetExperimentalMinVideoBitrateTest, @@ -149,9 +149,9 @@ TEST(GetExperimentalMinVideoBitrateTest, EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), absl::nullopt); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::kbps(200))); + absl::make_optional(DataRate::KilobitsPerSec(200))); EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::kbps(300))); + absl::make_optional(DataRate::KilobitsPerSec(300))); EXPECT_EQ( GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), absl::nullopt); diff --git a/rtc_base/experiments/quality_rampup_experiment.cc b/rtc_base/experiments/quality_rampup_experiment.cc index caf7e62368..ee6675c924 100644 --- a/rtc_base/experiments/quality_rampup_experiment.cc +++ b/rtc_base/experiments/quality_rampup_experiment.cc @@ -70,4 +70,8 @@ bool QualityRampupExperiment::BwHigh(int64_t now_ms, return (now_ms - *start_ms_) >= min_duration_ms_.Value(); } +bool QualityRampupExperiment::Enabled() const { + return min_pixels_ || min_duration_ms_ || max_bitrate_kbps_; +} + } // namespace webrtc diff --git a/rtc_base/experiments/quality_rampup_experiment.h b/rtc_base/experiments/quality_rampup_experiment.h index ff9d7d38e5..9d46901104 100644 --- a/rtc_base/experiments/quality_rampup_experiment.h +++ b/rtc_base/experiments/quality_rampup_experiment.h @@ -33,6 +33,8 @@ class QualityRampupExperiment final { // (max_bitrate_factor_) above |max_bitrate_kbps_| for |min_duration_ms_|. bool BwHigh(int64_t now_ms, uint32_t available_bw_kbps); + bool Enabled() const; + private: explicit QualityRampupExperiment( const WebRtcKeyValueConfig* const key_value_config); diff --git a/rtc_base/experiments/quality_scaler_settings.cc b/rtc_base/experiments/quality_scaler_settings.cc index c8d83ebe4d..d2443b05ce 100644 --- a/rtc_base/experiments/quality_scaler_settings.cc +++ b/rtc_base/experiments/quality_scaler_settings.cc @@ -21,14 +21,17 @@ const double kMinScaleFactor = 0.01; QualityScalerSettings::QualityScalerSettings( const WebRtcKeyValueConfig* const key_value_config) - : min_frames_("min_frames"), + : sampling_period_ms_("sampling_period_ms"), + average_qp_window_("average_qp_window"), + min_frames_("min_frames"), initial_scale_factor_("initial_scale_factor"), scale_factor_("scale_factor"), initial_bitrate_interval_ms_("initial_bitrate_interval_ms"), initial_bitrate_factor_("initial_bitrate_factor") { ParseFieldTrial( - {&min_frames_, &initial_scale_factor_, &scale_factor_, - &initial_bitrate_interval_ms_, &initial_bitrate_factor_}, + {&sampling_period_ms_, &average_qp_window_, &min_frames_, + &initial_scale_factor_, &scale_factor_, &initial_bitrate_interval_ms_, + &initial_bitrate_factor_}, key_value_config->Lookup("WebRTC-Video-QualityScalerSettings")); } @@ -37,6 +40,22 @@ QualityScalerSettings QualityScalerSettings::ParseFromFieldTrials() { return QualityScalerSettings(&field_trial_config); } +absl::optional QualityScalerSettings::SamplingPeriodMs() const { + if (sampling_period_ms_ && sampling_period_ms_.Value() <= 0) { + RTC_LOG(LS_WARNING) << "Unsupported sampling_period_ms value, ignored."; + return absl::nullopt; + } + return sampling_period_ms_.GetOptional(); +} + +absl::optional QualityScalerSettings::AverageQpWindow() const { + if (average_qp_window_ && average_qp_window_.Value() <= 0) { + RTC_LOG(LS_WARNING) << "Unsupported average_qp_window value, ignored."; + return absl::nullopt; + } + return average_qp_window_.GetOptional(); +} + absl::optional QualityScalerSettings::MinFrames() const { if (min_frames_ && min_frames_.Value() < kMinFrames) { RTC_LOG(LS_WARNING) << "Unsupported min_frames value, ignored."; diff --git a/rtc_base/experiments/quality_scaler_settings.h b/rtc_base/experiments/quality_scaler_settings.h index e3b12c54e3..b4b6a427a0 100644 --- a/rtc_base/experiments/quality_scaler_settings.h +++ b/rtc_base/experiments/quality_scaler_settings.h @@ -21,6 +21,8 @@ class QualityScalerSettings final { public: static QualityScalerSettings ParseFromFieldTrials(); + absl::optional SamplingPeriodMs() const; + absl::optional AverageQpWindow() const; absl::optional MinFrames() const; absl::optional InitialScaleFactor() const; absl::optional ScaleFactor() const; @@ -31,6 +33,8 @@ class QualityScalerSettings final { explicit QualityScalerSettings( const WebRtcKeyValueConfig* const key_value_config); + FieldTrialOptional sampling_period_ms_; + FieldTrialOptional average_qp_window_; FieldTrialOptional min_frames_; FieldTrialOptional initial_scale_factor_; FieldTrialOptional scale_factor_; diff --git a/rtc_base/experiments/rate_control_settings.cc b/rtc_base/experiments/rate_control_settings.cc index bf623bda86..6766db62c3 100644 --- a/rtc_base/experiments/rate_control_settings.cc +++ b/rtc_base/experiments/rate_control_settings.cc @@ -15,6 +15,7 @@ #include +#include "absl/strings/match.h" #include "api/transport/field_trial_based_config.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -27,11 +28,6 @@ const int kDefaultAcceptedQueueMs = 250; const int kDefaultMinPushbackTargetBitrateBps = 30000; -const char kVp8TrustedRateControllerFieldTrialName[] = - "WebRTC-LibvpxVp8TrustedRateController"; -const char kVp9TrustedRateControllerFieldTrialName[] = - "WebRTC-LibvpxVp9TrustedRateController"; - const char kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName[] = "WebRTC-UseBaseHeavyVP8TL3RateAllocation"; @@ -42,7 +38,7 @@ const char* kScreenshareHysteresisFieldTrialname = bool IsEnabled(const WebRtcKeyValueConfig* const key_value_config, absl::string_view key) { - return key_value_config->Lookup(key).find("Enabled") == 0; + return absl::StartsWith(key_value_config->Lookup(key), "Enabled"); } void ParseHysteresisFactor(const WebRtcKeyValueConfig* const key_value_config, @@ -63,7 +59,8 @@ constexpr char CongestionWindowConfig::kKey[]; std::unique_ptr CongestionWindowConfig::Parser() { return StructParametersParser::Create("QueueSize", &queue_size_ms, // "MinBitrate", &min_bitrate_bps, - "InitWin", &initial_data_window); + "InitWin", &initial_data_window, + "DropFrame", &drop_frame_only); } // static @@ -78,31 +75,25 @@ constexpr char VideoRateControlConfig::kKey[]; std::unique_ptr VideoRateControlConfig::Parser() { // The empty comments ensures that each pair is on a separate line. return StructParametersParser::Create( - "pacing_factor", &pacing_factor, // - "alr_probing", &alr_probing, // - "vp8_qp_max", &vp8_qp_max, // - "vp8_min_pixels", &vp8_min_pixels, // - "trust_vp8", &trust_vp8, // - "trust_vp9", &trust_vp9, // - "video_hysteresis", &video_hysteresis, // - "screenshare_hysteresis", &screenshare_hysteresis, // - "probe_max_allocation", &probe_max_allocation, // - "bitrate_adjuster", &bitrate_adjuster, // - "adjuster_use_headroom", &adjuster_use_headroom, // - "vp8_s0_boost", &vp8_s0_boost, // - "vp8_base_heavy_tl3_alloc", &vp8_base_heavy_tl3_alloc, // - "vp8_dynamic_rate", &vp8_dynamic_rate, // - "vp9_dynamic_rate", &vp9_dynamic_rate); + "pacing_factor", &pacing_factor, // + "alr_probing", &alr_probing, // + "vp8_qp_max", &vp8_qp_max, // + "vp8_min_pixels", &vp8_min_pixels, // + "trust_vp8", &trust_vp8, // + "trust_vp9", &trust_vp9, // + "video_hysteresis", &video_hysteresis, // + "screenshare_hysteresis", &screenshare_hysteresis, // + "probe_max_allocation", &probe_max_allocation, // + "bitrate_adjuster", &bitrate_adjuster, // + "adjuster_use_headroom", &adjuster_use_headroom, // + "vp8_s0_boost", &vp8_s0_boost, // + "vp8_base_heavy_tl3_alloc", &vp8_base_heavy_tl3_alloc); } RateControlSettings::RateControlSettings( const WebRtcKeyValueConfig* const key_value_config) : congestion_window_config_(CongestionWindowConfig::Parse( key_value_config->Lookup(CongestionWindowConfig::kKey))) { - video_config_.trust_vp8 = - IsEnabled(key_value_config, kVp8TrustedRateControllerFieldTrialName); - video_config_.trust_vp9 = - IsEnabled(key_value_config, kVp9TrustedRateControllerFieldTrialName); video_config_.vp8_base_heavy_tl3_alloc = IsEnabled( key_value_config, kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName); ParseHysteresisFactor(key_value_config, kVideoHysteresisFieldTrialname, @@ -142,6 +133,10 @@ bool RateControlSettings::UseCongestionWindowPushback() const { congestion_window_config_.min_bitrate_bps; } +bool RateControlSettings::UseCongestionWindowDropFrameOnly() const { + return congestion_window_config_.drop_frame_only; +} + uint32_t RateControlSettings::CongestionWindowMinPushbackTargetBitrateBps() const { return congestion_window_config_.min_bitrate_bps.value_or( @@ -185,18 +180,10 @@ bool RateControlSettings::Vp8BoostBaseLayerQuality() const { return video_config_.vp8_s0_boost; } -bool RateControlSettings::Vp8DynamicRateSettings() const { - return video_config_.vp8_dynamic_rate; -} - bool RateControlSettings::LibvpxVp9TrustedRateController() const { return video_config_.trust_vp9; } -bool RateControlSettings::Vp9DynamicRateSettings() const { - return video_config_.vp9_dynamic_rate; -} - double RateControlSettings::GetSimulcastHysteresisFactor( VideoCodecMode mode) const { if (mode == VideoCodecMode::kScreensharing) { diff --git a/rtc_base/experiments/rate_control_settings.h b/rtc_base/experiments/rate_control_settings.h index 3f1d8dee70..db7f1cd136 100644 --- a/rtc_base/experiments/rate_control_settings.h +++ b/rtc_base/experiments/rate_control_settings.h @@ -25,6 +25,7 @@ struct CongestionWindowConfig { absl::optional queue_size_ms; absl::optional min_bitrate_bps; absl::optional initial_data_window; + bool drop_frame_only = false; std::unique_ptr Parser(); static CongestionWindowConfig Parse(absl::string_view config); }; @@ -35,18 +36,16 @@ struct VideoRateControlConfig { bool alr_probing = false; absl::optional vp8_qp_max; absl::optional vp8_min_pixels; - bool trust_vp8 = false; - bool trust_vp9 = false; - double video_hysteresis = 1.0; + bool trust_vp8 = true; + bool trust_vp9 = true; + double video_hysteresis = 1.2; // Default to 35% hysteresis for simulcast screenshare. double screenshare_hysteresis = 1.35; bool probe_max_allocation = true; - bool bitrate_adjuster = false; - bool adjuster_use_headroom = false; - bool vp8_s0_boost = true; + bool bitrate_adjuster = true; + bool adjuster_use_headroom = true; + bool vp8_s0_boost = false; bool vp8_base_heavy_tl3_alloc = false; - bool vp8_dynamic_rate = false; - bool vp9_dynamic_rate = false; std::unique_ptr Parser(); }; @@ -66,6 +65,7 @@ class RateControlSettings final { bool UseCongestionWindow() const; int64_t GetCongestionWindowAdditionalTimeMs() const; bool UseCongestionWindowPushback() const; + bool UseCongestionWindowDropFrameOnly() const; uint32_t CongestionWindowMinPushbackTargetBitrateBps() const; absl::optional CongestionWindowInitialDataWindow() const; diff --git a/rtc_base/experiments/rate_control_settings_unittest.cc b/rtc_base/experiments/rate_control_settings_unittest.cc index b769c46a04..8d722722e4 100644 --- a/rtc_base/experiments/rate_control_settings_unittest.cc +++ b/rtc_base/experiments/rate_control_settings_unittest.cc @@ -99,15 +99,15 @@ TEST(RateControlSettingsTest, DoesNotGetTooSmallLibvpxVp8MinPixelValue) { TEST(RateControlSettingsTest, LibvpxTrustedRateController) { const RateControlSettings settings_before = RateControlSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings_before.LibvpxVp8TrustedRateController()); - EXPECT_FALSE(settings_before.LibvpxVp9TrustedRateController()); + EXPECT_TRUE(settings_before.LibvpxVp8TrustedRateController()); + EXPECT_TRUE(settings_before.LibvpxVp9TrustedRateController()); test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/trust_vp8:1,trust_vp9:1/"); + "WebRTC-VideoRateControl/trust_vp8:0,trust_vp9:0/"); const RateControlSettings settings_after = RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_after.LibvpxVp8TrustedRateController()); - EXPECT_TRUE(settings_after.LibvpxVp9TrustedRateController()); + EXPECT_FALSE(settings_after.LibvpxVp8TrustedRateController()); + EXPECT_FALSE(settings_after.LibvpxVp9TrustedRateController()); } TEST(RateControlSettingsTest, Vp8BaseHeavyTl3RateAllocationLegacyKey) { @@ -154,10 +154,10 @@ TEST(RateControlSettingsTest, GetSimulcastHysteresisFactor) { RateControlSettings::ParseFromFieldTrials(); EXPECT_DOUBLE_EQ(settings_before.GetSimulcastHysteresisFactor( VideoCodecMode::kRealtimeVideo), - 1.0); + 1.2); EXPECT_DOUBLE_EQ(settings_before.GetSimulcastHysteresisFactor( VideoEncoderConfig::ContentType::kRealtimeVideo), - 1.0); + 1.2); EXPECT_DOUBLE_EQ(settings_before.GetSimulcastHysteresisFactor( VideoCodecMode::kScreensharing), 1.35); @@ -167,16 +167,16 @@ TEST(RateControlSettingsTest, GetSimulcastHysteresisFactor) { test::ScopedFieldTrials field_trials( "WebRTC-VideoRateControl/" - "video_hysteresis:1.2,screenshare_hysteresis:1.4/"); + "video_hysteresis:1.0,screenshare_hysteresis:1.4/"); const RateControlSettings settings_after = RateControlSettings::ParseFromFieldTrials(); EXPECT_DOUBLE_EQ(settings_after.GetSimulcastHysteresisFactor( VideoCodecMode::kRealtimeVideo), - 1.2); + 1.0); EXPECT_DOUBLE_EQ(settings_after.GetSimulcastHysteresisFactor( VideoEncoderConfig::ContentType::kRealtimeVideo), - 1.2); + 1.0); EXPECT_DOUBLE_EQ(settings_after.GetSimulcastHysteresisFactor( VideoCodecMode::kScreensharing), 1.4); @@ -196,16 +196,16 @@ TEST(RateControlSettingsTest, TriggerProbeOnMaxAllocatedBitrateChange) { } TEST(RateControlSettingsTest, UseEncoderBitrateAdjuster) { - // Should be off by default. - EXPECT_FALSE( + // Should be on by default. + EXPECT_TRUE( RateControlSettings::ParseFromFieldTrials().UseEncoderBitrateAdjuster()); { - // Can be turned on via field trial. + // Can be turned off via field trial. test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/bitrate_adjuster:true/"); - EXPECT_TRUE(RateControlSettings::ParseFromFieldTrials() - .UseEncoderBitrateAdjuster()); + "WebRTC-VideoRateControl/bitrate_adjuster:false/"); + EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials() + .UseEncoderBitrateAdjuster()); } } diff --git a/rtc_base/experiments/stable_target_rate_experiment_unittest.cc b/rtc_base/experiments/stable_target_rate_experiment_unittest.cc index 71e757d68c..dbd841840d 100644 --- a/rtc_base/experiments/stable_target_rate_experiment_unittest.cc +++ b/rtc_base/experiments/stable_target_rate_experiment_unittest.cc @@ -19,7 +19,7 @@ TEST(StableBweExperimentTest, Default) { StableTargetRateExperiment config = StableTargetRateExperiment::ParseFromFieldTrials(); EXPECT_FALSE(config.IsEnabled()); - EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.0); + EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.2); EXPECT_EQ(config.GetScreenshareHysteresisFactor(), 1.35); } @@ -30,7 +30,7 @@ TEST(StableBweExperimentTest, EnabledNoHysteresis) { StableTargetRateExperiment config = StableTargetRateExperiment::ParseFromFieldTrials(); EXPECT_TRUE(config.IsEnabled()); - EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.0); + EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.2); EXPECT_EQ(config.GetScreenshareHysteresisFactor(), 1.35); } diff --git a/rtc_base/experiments/struct_parameters_parser_unittest.cc b/rtc_base/experiments/struct_parameters_parser_unittest.cc index 71b117f9dd..2f92b9fc6a 100644 --- a/rtc_base/experiments/struct_parameters_parser_unittest.cc +++ b/rtc_base/experiments/struct_parameters_parser_unittest.cc @@ -19,7 +19,7 @@ struct DummyConfig { unsigned size = 3; bool ping = 0; absl::optional duration; - absl::optional latency = TimeDelta::ms(100); + absl::optional latency = TimeDelta::Millis(100); std::unique_ptr Parser(); }; diff --git a/rtc_base/fake_clock.cc b/rtc_base/fake_clock.cc index e242e8e659..652a5afa3a 100644 --- a/rtc_base/fake_clock.cc +++ b/rtc_base/fake_clock.cc @@ -16,18 +16,18 @@ namespace rtc { int64_t FakeClock::TimeNanos() const { - CritScope cs(&lock_); + webrtc::MutexLock lock(&lock_); return time_ns_; } void FakeClock::SetTime(webrtc::Timestamp new_time) { - CritScope cs(&lock_); + webrtc::MutexLock lock(&lock_); RTC_DCHECK(new_time.us() * 1000 >= time_ns_); time_ns_ = new_time.us() * 1000; } void FakeClock::AdvanceTime(webrtc::TimeDelta delta) { - CritScope cs(&lock_); + webrtc::MutexLock lock(&lock_); time_ns_ += delta.ns(); } diff --git a/rtc_base/fake_clock.h b/rtc_base/fake_clock.h index 0ab9a937a8..edb507becb 100644 --- a/rtc_base/fake_clock.h +++ b/rtc_base/fake_clock.h @@ -15,7 +15,7 @@ #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -43,7 +43,7 @@ class FakeClock : public ClockInterface { void AdvanceTime(webrtc::TimeDelta delta); private: - CriticalSection lock_; + mutable webrtc::Mutex lock_; int64_t time_ns_ RTC_GUARDED_BY(lock_) = 0; }; diff --git a/rtc_base/fake_clock_unittest.cc b/rtc_base/fake_clock_unittest.cc index 0cabce8f5f..fc1d5100e6 100644 --- a/rtc_base/fake_clock_unittest.cc +++ b/rtc_base/fake_clock_unittest.cc @@ -21,13 +21,13 @@ TEST(ScopedFakeClockTest, OverridesGlobalClock) { ScopedFakeClock scoped; EXPECT_EQ(rtc::TimeMicros(), 0); - scoped.AdvanceTime(webrtc::TimeDelta::ms(1)); + scoped.AdvanceTime(webrtc::TimeDelta::Millis(1)); EXPECT_EQ(rtc::TimeMicros(), 1000); - scoped.SetTime(webrtc::Timestamp::us(kFixedTimeUs)); + scoped.SetTime(webrtc::Timestamp::Micros(kFixedTimeUs)); EXPECT_EQ(rtc::TimeMicros(), kFixedTimeUs); - scoped.AdvanceTime(webrtc::TimeDelta::ms(1)); + scoped.AdvanceTime(webrtc::TimeDelta::Millis(1)); EXPECT_EQ(rtc::TimeMicros(), kFixedTimeUs + 1000); } diff --git a/rtc_base/fake_network.h b/rtc_base/fake_network.h index 040b24205e..8bd50b69f0 100644 --- a/rtc_base/fake_network.h +++ b/rtc_base/fake_network.h @@ -31,7 +31,8 @@ const int kFakeIPv4NetworkPrefixLength = 24; const int kFakeIPv6NetworkPrefixLength = 64; // Fake network manager that allows us to manually specify the IPs to use. -class FakeNetworkManager : public NetworkManagerBase, public MessageHandler { +class FakeNetworkManager : public NetworkManagerBase, + public MessageHandlerAutoCleanup { public: FakeNetworkManager() {} diff --git a/rtc_base/fake_ssl_identity.cc b/rtc_base/fake_ssl_identity.cc index 309708f62d..0648363b2e 100644 --- a/rtc_base/fake_ssl_identity.cc +++ b/rtc_base/fake_ssl_identity.cc @@ -90,8 +90,8 @@ FakeSSLIdentity::FakeSSLIdentity(const FakeSSLIdentity& o) FakeSSLIdentity::~FakeSSLIdentity() = default; -FakeSSLIdentity* FakeSSLIdentity::GetReference() const { - return new FakeSSLIdentity(*this); +std::unique_ptr FakeSSLIdentity::CloneInternal() const { + return std::make_unique(*this); } const SSLCertificate& FakeSSLIdentity::certificate() const { diff --git a/rtc_base/fake_ssl_identity.h b/rtc_base/fake_ssl_identity.h index c3a8d1f171..512baba9fb 100644 --- a/rtc_base/fake_ssl_identity.h +++ b/rtc_base/fake_ssl_identity.h @@ -62,7 +62,6 @@ class FakeSSLIdentity : public SSLIdentity { ~FakeSSLIdentity() override; // SSLIdentity implementation. - FakeSSLIdentity* GetReference() const override; const SSLCertificate& certificate() const override; const SSLCertChain& cert_chain() const override; // Not implemented. @@ -73,6 +72,8 @@ class FakeSSLIdentity : public SSLIdentity { virtual bool operator==(const SSLIdentity& other) const; private: + std::unique_ptr CloneInternal() const override; + std::unique_ptr cert_chain_; }; diff --git a/rtc_base/firewall_socket_server.cc b/rtc_base/firewall_socket_server.cc index fc7917613c..8f44753760 100644 --- a/rtc_base/firewall_socket_server.cc +++ b/rtc_base/firewall_socket_server.cc @@ -163,19 +163,19 @@ void FirewallSocketServer::AddRule(bool allow, r.p = p; r.src = src; r.dst = dst; - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); rules_.push_back(r); } void FirewallSocketServer::ClearRules() { - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); rules_.clear(); } bool FirewallSocketServer::Check(FirewallProtocol p, const SocketAddress& src, const SocketAddress& dst) { - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); for (size_t i = 0; i < rules_.size(); ++i) { const Rule& r = rules_[i]; if ((r.p != p) && (r.p != FP_ANY)) @@ -239,12 +239,12 @@ FirewallManager::~FirewallManager() { } void FirewallManager::AddServer(FirewallSocketServer* server) { - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); servers_.push_back(server); } void FirewallManager::RemoveServer(FirewallSocketServer* server) { - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); servers_.erase(std::remove(servers_.begin(), servers_.end(), server), servers_.end()); } @@ -253,7 +253,7 @@ void FirewallManager::AddRule(bool allow, FirewallProtocol p, FirewallDirection d, const SocketAddress& addr) { - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); for (std::vector::const_iterator it = servers_.begin(); it != servers_.end(); ++it) { (*it)->AddRule(allow, p, d, addr); @@ -261,7 +261,7 @@ void FirewallManager::AddRule(bool allow, } void FirewallManager::ClearRules() { - CritScope scope(&crit_); + webrtc::MutexLock scope(&mutex_); for (std::vector::const_iterator it = servers_.begin(); it != servers_.end(); ++it) { (*it)->ClearRules(); diff --git a/rtc_base/firewall_socket_server.h b/rtc_base/firewall_socket_server.h index d174033e01..23b91d6ad3 100644 --- a/rtc_base/firewall_socket_server.h +++ b/rtc_base/firewall_socket_server.h @@ -14,11 +14,11 @@ #include #include "rtc_base/async_socket.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ip_address.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_server.h" +#include "rtc_base/synchronization/mutex.h" namespace rtc { @@ -90,7 +90,7 @@ class FirewallSocketServer : public SocketServer { private: SocketServer* server_; FirewallManager* manager_; - CriticalSection crit_; + webrtc::Mutex mutex_; struct Rule { bool allow; FirewallProtocol p; @@ -123,7 +123,7 @@ class FirewallManager { void ClearRules(); private: - CriticalSection crit_; + webrtc::Mutex mutex_; std::vector servers_; }; diff --git a/rtc_base/gunit.h b/rtc_base/gunit.h index d49912524c..dedf3ee067 100644 --- a/rtc_base/gunit.h +++ b/rtc_base/gunit.h @@ -100,7 +100,7 @@ #define SIMULATED_WAIT(ex, timeout, clock) \ for (int64_t start = rtc::TimeMillis(); \ !(ex) && rtc::TimeMillis() < start + (timeout);) { \ - (clock).AdvanceTime(webrtc::TimeDelta::ms(1)); \ + (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ } // This returns the result of the test in res, so that we don't re-evaluate @@ -111,7 +111,7 @@ int64_t start = rtc::TimeMillis(); \ res = (ex); \ while (!res && rtc::TimeMillis() < start + (timeout)) { \ - (clock).AdvanceTime(webrtc::TimeDelta::ms(1)); \ + (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ res = (ex); \ } \ } while (0) diff --git a/rtc_base/ip_address.cc b/rtc_base/ip_address.cc index cf7ffa8b90..9dd534c2b5 100644 --- a/rtc_base/ip_address.cc +++ b/rtc_base/ip_address.cc @@ -53,6 +53,17 @@ uint32_t IPAddress::v4AddressAsHostOrderInteger() const { } } +int IPAddress::overhead() const { + switch (family_) { + case AF_INET: // IPv4 + return 20; + case AF_INET6: // IPv6 + return 40; + default: + return 0; + } +} + bool IPAddress::IsNil() const { return IPIsUnspec(*this); } diff --git a/rtc_base/ip_address.h b/rtc_base/ip_address.h index 6d857afe84..ae135a69dc 100644 --- a/rtc_base/ip_address.h +++ b/rtc_base/ip_address.h @@ -111,6 +111,9 @@ class RTC_EXPORT IPAddress { // For socketaddress' benefit. Returns the IP in host byte order. uint32_t v4AddressAsHostOrderInteger() const; + // Get the network layer overhead per packet based on the IP address family. + int overhead() const; + // Whether this is an unspecified IP address. bool IsNil() const; diff --git a/rtc_base/location.h b/rtc_base/location.h index ad8f479135..ff1eea95a9 100644 --- a/rtc_base/location.h +++ b/rtc_base/location.h @@ -13,7 +13,6 @@ #include -#include "rtc_base/stringize_macros.h" #include "rtc_base/system/rtc_export.h" namespace rtc { diff --git a/rtc_base/logging.cc b/rtc_base/logging.cc index ff7369dd5c..13a5f02597 100644 --- a/rtc_base/logging.cc +++ b/rtc_base/logging.cc @@ -33,6 +33,7 @@ static const int kMaxLogLineSize = 1024 - 60; #endif // WEBRTC_MAC && !defined(WEBRTC_IOS) || WEBRTC_ANDROID +#include #include #include @@ -42,11 +43,11 @@ static const int kMaxLogLineSize = 1024 - 60; #include "absl/base/attributes.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -72,7 +73,9 @@ const char* FilenameFromPath(const char* file) { } // Global lock for log subsystem, only needed to serialize access to streams_. -CriticalSection g_log_crit; +// TODO(bugs.webrtc.org/11665): this is not currently constant initialized and +// trivially destructible. +webrtc::Mutex g_log_mutex_; } // namespace ///////////////////////////////////////////////////////////////////////////// @@ -85,8 +88,9 @@ bool LogMessage::log_to_stderr_ = true; // Note: we explicitly do not clean this up, because of the uncertain ordering // of destructors at program exit. Let the person who sets the stream trigger // cleanup by setting to null, or let it leak (safe at program exit). -ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(g_log_crit) = +ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(g_log_mutex_) = nullptr; +ABSL_CONST_INIT std::atomic LogMessage::streams_empty_ = {true}; // Boolean options default to false (0) bool LogMessage::thread_, LogMessage::timestamp_; @@ -107,9 +111,13 @@ LogMessage::LogMessage(const char* file, // Also ensure WallClockStartTime is initialized, so that it matches // LogStartTime. WallClockStartTime(); - print_stream_ << "[" << rtc::LeftPad('0', 3, rtc::ToString(time / 1000)) - << ":" << rtc::LeftPad('0', 3, rtc::ToString(time % 1000)) - << "] "; + // TODO(kwiberg): Switch to absl::StrFormat, if binary size is ok. + char timestamp[50]; // Maximum string length of an int64_t is 20. + int len = + snprintf(timestamp, sizeof(timestamp), "[%03" PRId64 ":%03" PRId64 "]", + time / 1000, time % 1000); + RTC_DCHECK_LT(len, sizeof(timestamp)); + print_stream_ << timestamp; } if (thread_) { @@ -193,7 +201,7 @@ LogMessage::~LogMessage() { #endif } - CritScope cs(&g_log_crit); + webrtc::MutexLock lock(&g_log_mutex_); for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { if (severity_ >= entry->min_severity_) { #if defined(WEBRTC_ANDROID) @@ -242,7 +250,7 @@ void LogMessage::LogTimestamps(bool on) { void LogMessage::LogToDebug(LoggingSeverity min_sev) { g_dbg_sev = min_sev; - CritScope cs(&g_log_crit); + webrtc::MutexLock lock(&g_log_mutex_); UpdateMinLogSeverity(); } @@ -251,7 +259,7 @@ void LogMessage::SetLogToStderr(bool log_to_stderr) { } int LogMessage::GetLogToStream(LogSink* stream) { - CritScope cs(&g_log_crit); + webrtc::MutexLock lock(&g_log_mutex_); LoggingSeverity sev = LS_NONE; for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { if (stream == nullptr || stream == entry) { @@ -262,15 +270,16 @@ int LogMessage::GetLogToStream(LogSink* stream) { } void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) { - CritScope cs(&g_log_crit); + webrtc::MutexLock lock(&g_log_mutex_); stream->min_severity_ = min_sev; stream->next_ = streams_; streams_ = stream; + streams_empty_.store(false, std::memory_order_relaxed); UpdateMinLogSeverity(); } void LogMessage::RemoveLogToStream(LogSink* stream) { - CritScope cs(&g_log_crit); + webrtc::MutexLock lock(&g_log_mutex_); for (LogSink** entry = &streams_; *entry != nullptr; entry = &(*entry)->next_) { if (*entry == stream) { @@ -278,6 +287,7 @@ void LogMessage::RemoveLogToStream(LogSink* stream) { break; } } + streams_empty_.store(streams_ == nullptr, std::memory_order_relaxed); UpdateMinLogSeverity(); } @@ -331,7 +341,7 @@ void LogMessage::ConfigureLogging(const char* params) { } void LogMessage::UpdateMinLogSeverity() - RTC_EXCLUSIVE_LOCKS_REQUIRED(g_log_crit) { + RTC_EXCLUSIVE_LOCKS_REQUIRED(g_log_mutex_) { LoggingSeverity min_sev = g_dbg_sev; for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { min_sev = std::min(min_sev, entry->min_severity_); @@ -435,12 +445,7 @@ void LogMessage::OutputToDebug(const std::string& str, bool LogMessage::IsNoop(LoggingSeverity severity) { if (severity >= g_dbg_sev || severity >= g_min_sev) return false; - - // TODO(tommi): We're grabbing this lock for every LogMessage instance that - // is going to be logged. This introduces unnecessary synchronization for - // a feature that's mostly used for testing. - CritScope cs(&g_log_crit); - return streams_ == nullptr; + return streams_empty_.load(std::memory_order_relaxed); } void LogMessage::FinishPrintStream() { @@ -481,11 +486,6 @@ void Log(const LogArgType* fmt, ...) { } } - if (LogMessage::IsNoop(meta.meta.Severity())) { - va_end(args); - return; - } - LogMessage log_message(meta.meta.File(), meta.meta.Line(), meta.meta.Severity(), meta.err_ctx, meta.err); if (tag) { diff --git a/rtc_base/logging.h b/rtc_base/logging.h index fe12068fa6..d2607c28b7 100644 --- a/rtc_base/logging.h +++ b/rtc_base/logging.h @@ -46,6 +46,7 @@ #include +#include #include // no-presubmit-check TODO(webrtc:8982) #include #include @@ -391,6 +392,18 @@ class LogCall final { } }; +// This class is used to explicitly ignore values in the conditional +// logging macros. This avoids compiler warnings like "value computed +// is not used" and "statement has no effect". +class LogMessageVoidify { + public: + LogMessageVoidify() = default; + // This has to be an operator with a precedence lower than << but + // higher than ?: + template + void operator&(LogStreamer&& streamer) {} +}; + } // namespace webrtc_logging_impl // Direct use of this class is deprecated; please use the logging macros @@ -451,9 +464,14 @@ class LogMessage { static void SetLogToStderr(bool log_to_stderr); // Stream: Any non-blocking stream interface. // Installs the |stream| to collect logs with severtiy |min_sev| or higher. - // |stream| must live until deinstalled by RemoveLogToStream + // |stream| must live until deinstalled by RemoveLogToStream. + // If |stream| is the first stream added to the system, we might miss some + // early concurrent log statement happening from another thread happening near + // this instant. static void AddLogToStream(LogSink* stream, LoggingSeverity min_sev); - // Removes the specified stream, without destroying it. + // Removes the specified stream, without destroying it. When the method + // has completed, it's guaranteed that |stream| will receive no more logging + // calls. static void RemoveLogToStream(LogSink* stream); // Returns the severity for the specified stream, of if none is specified, // the minimum stream severity. @@ -469,6 +487,12 @@ class LogMessage { // |streams_| collection is empty, the LogMessage will be considered a noop // LogMessage. static bool IsNoop(LoggingSeverity severity); + // Version of IsNoop that uses fewer instructions at the call site, since the + // caller doesn't have to pass an argument. + template + RTC_NO_INLINE static bool IsNoop() { + return IsNoop(S); + } #else // Next methods do nothing; no one will call these functions. LogMessage(const char* file, int line, LoggingSeverity sev) {} @@ -507,7 +531,11 @@ class LogMessage { inline static int GetLogToStream(LogSink* stream = nullptr) { return 0; } inline static int GetMinLogSeverity() { return 0; } inline static void ConfigureLogging(const char* params) {} - inline static bool IsNoop(LoggingSeverity severity) { return true; } + static constexpr bool IsNoop(LoggingSeverity severity) { return true; } + template + static constexpr bool IsNoop() { + return IsNoop(S); + } #endif // RTC_LOG_ENABLED() private: @@ -545,6 +573,12 @@ class LogMessage { // The output streams and their associated severities static LogSink* streams_; + // Holds true with high probability if |streams_| is empty, false with high + // probability otherwise. Operated on with std::memory_order_relaxed because + // it's ok to lose or log some additional statements near the instant streams + // are added/removed. + static std::atomic streams_empty_; + // Flags for formatting options static bool thread_, timestamp_; @@ -574,16 +608,18 @@ class LogMessage { // Logging Helpers ////////////////////////////////////////////////////////////////////// -#define RTC_LOG_FILE_LINE(sev, file, line) \ - RTC_LOG_ENABLED() && \ - rtc::webrtc_logging_impl::LogCall() & \ - rtc::webrtc_logging_impl::LogStreamer<>() \ - << rtc::webrtc_logging_impl::LogMetadata(file, line, sev) +#define RTC_LOG_FILE_LINE(sev, file, line) \ + ::rtc::webrtc_logging_impl::LogCall() & \ + ::rtc::webrtc_logging_impl::LogStreamer<>() \ + << ::rtc::webrtc_logging_impl::LogMetadata(file, line, sev) -#define RTC_LOG(sev) RTC_LOG_FILE_LINE(rtc::sev, __FILE__, __LINE__) +#define RTC_LOG(sev) \ + !rtc::LogMessage::IsNoop<::rtc::sev>() && \ + RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) // The _V version is for when a variable is passed in. -#define RTC_LOG_V(sev) RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) +#define RTC_LOG_V(sev) \ + !rtc::LogMessage::IsNoop(sev) && RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) // The _F version prefixes the message with the current function name. #if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F) @@ -595,18 +631,19 @@ class LogMessage { #define RTC_LOG_T_F(sev) RTC_LOG(sev) << this << ": " << __FUNCTION__ << ": " #endif -#define RTC_LOG_CHECK_LEVEL(sev) rtc::LogCheckLevel(rtc::sev) -#define RTC_LOG_CHECK_LEVEL_V(sev) rtc::LogCheckLevel(sev) +#define RTC_LOG_CHECK_LEVEL(sev) ::rtc::LogCheckLevel(::rtc::sev) +#define RTC_LOG_CHECK_LEVEL_V(sev) ::rtc::LogCheckLevel(sev) inline bool LogCheckLevel(LoggingSeverity sev) { return (LogMessage::GetMinLogSeverity() <= sev); } -#define RTC_LOG_E(sev, ctx, err) \ - RTC_LOG_ENABLED() && rtc::webrtc_logging_impl::LogCall() & \ - rtc::webrtc_logging_impl::LogStreamer<>() \ - << rtc::webrtc_logging_impl::LogMetadataErr { \ - {__FILE__, __LINE__, rtc::sev}, rtc::ERRCTX_##ctx, (err) \ +#define RTC_LOG_E(sev, ctx, err) \ + !rtc::LogMessage::IsNoop<::rtc::sev>() && \ + ::rtc::webrtc_logging_impl::LogCall() & \ + ::rtc::webrtc_logging_impl::LogStreamer<>() \ + << ::rtc::webrtc_logging_impl::LogMetadataErr { \ + {__FILE__, __LINE__, ::rtc::sev}, ::rtc::ERRCTX_##ctx, (err) \ } #define RTC_LOG_T(sev) RTC_LOG(sev) << this << ": " @@ -639,11 +676,12 @@ inline const char* AdaptString(const std::string& str) { } } // namespace webrtc_logging_impl -#define RTC_LOG_TAG(sev, tag) \ - RTC_LOG_ENABLED() && rtc::webrtc_logging_impl::LogCall() & \ - rtc::webrtc_logging_impl::LogStreamer<>() \ - << rtc::webrtc_logging_impl::LogMetadataTag { \ - sev, rtc::webrtc_logging_impl::AdaptString(tag) \ +#define RTC_LOG_TAG(sev, tag) \ + !rtc::LogMessage::IsNoop(sev) && \ + ::rtc::webrtc_logging_impl::LogCall() & \ + ::rtc::webrtc_logging_impl::LogStreamer<>() \ + << ::rtc::webrtc_logging_impl::LogMetadataTag { \ + sev, ::rtc::webrtc_logging_impl::AdaptString(tag) \ } #else @@ -660,9 +698,10 @@ inline const char* AdaptString(const std::string& str) { #define RTC_DLOG_V(sev) RTC_LOG_V(sev) #define RTC_DLOG_F(sev) RTC_LOG_F(sev) #else -#define RTC_DLOG_EAT_STREAM_PARAMS() \ - while (false) \ - rtc::webrtc_logging_impl::LogStreamer<>() +#define RTC_DLOG_EAT_STREAM_PARAMS() \ + while (false) \ + ::rtc::webrtc_logging_impl::LogMessageVoidify() & \ + (::rtc::webrtc_logging_impl::LogStreamer<>()) #define RTC_DLOG(sev) RTC_DLOG_EAT_STREAM_PARAMS() #define RTC_DLOG_V(sev) RTC_DLOG_EAT_STREAM_PARAMS() #define RTC_DLOG_F(sev) RTC_DLOG_EAT_STREAM_PARAMS() diff --git a/rtc_base/logging_unittest.cc b/rtc_base/logging_unittest.cc index a66f8b5608..6bb20abcc1 100644 --- a/rtc_base/logging_unittest.cc +++ b/rtc_base/logging_unittest.cc @@ -359,5 +359,19 @@ TEST(LogTest, EnumsAreSupported) { stream.Close(); } +TEST(LogTest, NoopSeverityDoesNotRunStringFormatting) { + if (!LogMessage::IsNoop(LS_VERBOSE)) { + RTC_LOG(LS_WARNING) << "Skipping test since verbose logging is turned on."; + return; + } + bool was_called = false; + auto cb = [&was_called]() { + was_called = true; + return "This could be an expensive callback."; + }; + RTC_LOG(LS_VERBOSE) << "This should not be logged: " << cb(); + EXPECT_FALSE(was_called); +} + } // namespace rtc -#endif +#endif // RTC_LOG_ENABLED() diff --git a/rtc_base/memory/BUILD.gn b/rtc_base/memory/BUILD.gn index 0dcd88f958..838fbc68d4 100644 --- a/rtc_base/memory/BUILD.gn +++ b/rtc_base/memory/BUILD.gn @@ -12,14 +12,6 @@ if (is_android) { import("//build/config/android/rules.gni") } -rtc_source_set("aligned_array") { - sources = [ "aligned_array.h" ] - deps = [ - ":aligned_malloc", - "..:checks", - ] -} - rtc_library("aligned_malloc") { sources = [ "aligned_malloc.cc", @@ -28,29 +20,35 @@ rtc_library("aligned_malloc") { deps = [ "..:checks" ] } +# Test only utility. +# TODO: Tag with `testonly = true` once all depending targets are correctly +# tagged. rtc_library("fifo_buffer") { visibility = [ - "../../p2p:rtc_p2p", + ":unittests", "..:rtc_base_tests_utils", "..:rtc_base_unittests", - ":unittests", + "../../p2p:rtc_p2p", # This needs to be fixed. ] sources = [ "fifo_buffer.cc", "fifo_buffer.h", ] - deps = [ "..:rtc_base" ] + deps = [ + "..:rtc_base", + "../synchronization:mutex", + "../task_utils:pending_task_safety_flag", + "../task_utils:to_queued_task", + ] } rtc_library("unittests") { testonly = true sources = [ - "aligned_array_unittest.cc", "aligned_malloc_unittest.cc", "fifo_buffer_unittest.cc", ] deps = [ - ":aligned_array", ":aligned_malloc", ":fifo_buffer", "../../test:test_support", diff --git a/rtc_base/memory/aligned_array.h b/rtc_base/memory/aligned_array.h deleted file mode 100644 index c67d87d404..0000000000 --- a/rtc_base/memory/aligned_array.h +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_MEMORY_ALIGNED_ARRAY_H_ -#define RTC_BASE_MEMORY_ALIGNED_ARRAY_H_ - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/memory/aligned_malloc.h" - -namespace webrtc { - -// Wrapper class for aligned arrays. Every row (and the first dimension) are -// aligned to the given byte alignment. -template -class AlignedArray { - public: - AlignedArray(size_t rows, size_t cols, size_t alignment) - : rows_(rows), cols_(cols) { - RTC_CHECK_GT(alignment, 0); - head_row_ = - static_cast(AlignedMalloc(rows_ * sizeof(*head_row_), alignment)); - for (size_t i = 0; i < rows_; ++i) { - head_row_[i] = static_cast( - AlignedMalloc(cols_ * sizeof(**head_row_), alignment)); - } - } - - ~AlignedArray() { - for (size_t i = 0; i < rows_; ++i) { - AlignedFree(head_row_[i]); - } - AlignedFree(head_row_); - } - - T* const* Array() { return head_row_; } - - const T* const* Array() const { return head_row_; } - - T* Row(size_t row) { - RTC_CHECK_LE(row, rows_); - return head_row_[row]; - } - - const T* Row(size_t row) const { - RTC_CHECK_LE(row, rows_); - return head_row_[row]; - } - - T& At(size_t row, size_t col) { - RTC_CHECK_LE(col, cols_); - return Row(row)[col]; - } - - const T& At(size_t row, size_t col) const { - RTC_CHECK_LE(col, cols_); - return Row(row)[col]; - } - - size_t rows() const { return rows_; } - - size_t cols() const { return cols_; } - - private: - size_t rows_; - size_t cols_; - T** head_row_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_MEMORY_ALIGNED_ARRAY_H_ diff --git a/rtc_base/memory/aligned_array_unittest.cc b/rtc_base/memory/aligned_array_unittest.cc deleted file mode 100644 index 81fd468a92..0000000000 --- a/rtc_base/memory/aligned_array_unittest.cc +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/memory/aligned_array.h" - -#include - -#include "test/gtest.h" - -namespace { - -bool IsAligned(const void* ptr, size_t alignment) { - return reinterpret_cast(ptr) % alignment == 0; -} - -} // namespace - -namespace webrtc { - -TEST(AlignedArrayTest, CheckAlignment) { - AlignedArray arr(10, 7, 128); - ASSERT_TRUE(IsAligned(arr.Array(), 128)); - for (size_t i = 0; i < 10; ++i) { - ASSERT_TRUE(IsAligned(arr.Row(i), 128)); - ASSERT_EQ(arr.Row(i), arr.Array()[i]); - } -} - -TEST(AlignedArrayTest, CheckOverlap) { - AlignedArray arr(10, 7, 128); - - for (size_t i = 0; i < 10; ++i) { - for (size_t j = 0; j < 7; ++j) { - arr.At(i, j) = 20 * i + j; - } - } - - for (size_t i = 0; i < 10; ++i) { - for (size_t j = 0; j < 7; ++j) { - ASSERT_EQ(arr.At(i, j), 20 * i + j); - ASSERT_EQ(arr.Row(i)[j], 20 * i + j); - ASSERT_EQ(arr.Array()[i][j], 20 * i + j); - } - } -} - -TEST(AlignedArrayTest, CheckRowsCols) { - AlignedArray arr(10, 7, 128); - ASSERT_EQ(arr.rows(), 10u); - ASSERT_EQ(arr.cols(), 7u); -} - -} // namespace webrtc diff --git a/rtc_base/memory/fifo_buffer.cc b/rtc_base/memory/fifo_buffer.cc index 44fb032e57..3fbea8dc20 100644 --- a/rtc_base/memory/fifo_buffer.cc +++ b/rtc_base/memory/fifo_buffer.cc @@ -39,13 +39,13 @@ FifoBuffer::FifoBuffer(size_t size, Thread* owner) FifoBuffer::~FifoBuffer() {} bool FifoBuffer::GetBuffered(size_t* size) const { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); *size = data_length_; return true; } bool FifoBuffer::SetCapacity(size_t size) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); if (data_length_ > size) { return false; } @@ -67,7 +67,7 @@ StreamResult FifoBuffer::ReadOffset(void* buffer, size_t bytes, size_t offset, size_t* bytes_read) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return ReadOffsetLocked(buffer, bytes, offset, bytes_read); } @@ -75,12 +75,12 @@ StreamResult FifoBuffer::WriteOffset(const void* buffer, size_t bytes, size_t offset, size_t* bytes_written) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return WriteOffsetLocked(buffer, bytes, offset, bytes_written); } StreamState FifoBuffer::GetState() const { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return state_; } @@ -88,7 +88,7 @@ StreamResult FifoBuffer::Read(void* buffer, size_t bytes, size_t* bytes_read, int* error) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); const bool was_writable = data_length_ < buffer_length_; size_t copy = 0; StreamResult result = ReadOffsetLocked(buffer, bytes, 0, ©); @@ -104,7 +104,7 @@ StreamResult FifoBuffer::Read(void* buffer, // if we were full before, and now we're not, post an event if (!was_writable && copy > 0) { - PostEvent(owner_, SE_WRITE, 0); + PostEvent(SE_WRITE, 0); } } return result; @@ -114,7 +114,7 @@ StreamResult FifoBuffer::Write(const void* buffer, size_t bytes, size_t* bytes_written, int* error) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); const bool was_readable = (data_length_ > 0); size_t copy = 0; @@ -129,19 +129,19 @@ StreamResult FifoBuffer::Write(const void* buffer, // if we didn't have any data to read before, and now we do, post an event if (!was_readable && copy > 0) { - PostEvent(owner_, SE_READ, 0); + PostEvent(SE_READ, 0); } } return result; } void FifoBuffer::Close() { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); state_ = SS_CLOSED; } const void* FifoBuffer::GetReadData(size_t* size) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); *size = (read_position_ + data_length_ <= buffer_length_) ? data_length_ : buffer_length_ - read_position_; @@ -149,18 +149,18 @@ const void* FifoBuffer::GetReadData(size_t* size) { } void FifoBuffer::ConsumeReadData(size_t size) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); RTC_DCHECK(size <= data_length_); const bool was_writable = data_length_ < buffer_length_; read_position_ = (read_position_ + size) % buffer_length_; data_length_ -= size; if (!was_writable && size > 0) { - PostEvent(owner_, SE_WRITE, 0); + PostEvent(SE_WRITE, 0); } } void* FifoBuffer::GetWriteBuffer(size_t* size) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); if (state_ == SS_CLOSED) { return nullptr; } @@ -180,17 +180,17 @@ void* FifoBuffer::GetWriteBuffer(size_t* size) { } void FifoBuffer::ConsumeWriteBuffer(size_t size) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); RTC_DCHECK(size <= buffer_length_ - data_length_); const bool was_readable = (data_length_ > 0); data_length_ += size; if (!was_readable && size > 0) { - PostEvent(owner_, SE_READ, 0); + PostEvent(SE_READ, 0); } } bool FifoBuffer::GetWriteRemaining(size_t* size) const { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); *size = buffer_length_ - data_length_; return true; } diff --git a/rtc_base/memory/fifo_buffer.h b/rtc_base/memory/fifo_buffer.h index f859815c70..bf2edf6e24 100644 --- a/rtc_base/memory/fifo_buffer.h +++ b/rtc_base/memory/fifo_buffer.h @@ -14,6 +14,9 @@ #include #include "rtc_base/stream.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace rtc { @@ -97,13 +100,19 @@ class FifoBuffer final : public StreamInterface { bool GetWriteRemaining(size_t* size) const; private: + void PostEvent(int events, int err) { + owner_->PostTask(webrtc::ToQueuedTask(task_safety_, [this, events, err]() { + SignalEvent(this, events, err); + })); + } + // Helper method that implements ReadOffset. Caller must acquire a lock // when calling this method. StreamResult ReadOffsetLocked(void* buffer, size_t bytes, size_t offset, size_t* bytes_read) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Helper method that implements WriteOffset. Caller must acquire a lock // when calling this method. @@ -111,22 +120,24 @@ class FifoBuffer final : public StreamInterface { size_t bytes, size_t offset, size_t* bytes_written) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + webrtc::ScopedTaskSafety task_safety_; // keeps the opened/closed state of the stream - StreamState state_ RTC_GUARDED_BY(crit_); + StreamState state_ RTC_GUARDED_BY(mutex_); // the allocated buffer - std::unique_ptr buffer_ RTC_GUARDED_BY(crit_); + std::unique_ptr buffer_ RTC_GUARDED_BY(mutex_); // size of the allocated buffer - size_t buffer_length_ RTC_GUARDED_BY(crit_); + size_t buffer_length_ RTC_GUARDED_BY(mutex_); // amount of readable data in the buffer - size_t data_length_ RTC_GUARDED_BY(crit_); + size_t data_length_ RTC_GUARDED_BY(mutex_); // offset to the readable data - size_t read_position_ RTC_GUARDED_BY(crit_); + size_t read_position_ RTC_GUARDED_BY(mutex_); // stream callbacks are dispatched on this thread - Thread* owner_; + Thread* const owner_; // object lock - CriticalSection crit_; + mutable webrtc::Mutex mutex_; RTC_DISALLOW_COPY_AND_ASSIGN(FifoBuffer); }; diff --git a/rtc_base/message_handler.cc b/rtc_base/message_handler.cc index 18a06e241d..e6e973dbd9 100644 --- a/rtc_base/message_handler.cc +++ b/rtc_base/message_handler.cc @@ -14,7 +14,16 @@ namespace rtc { -MessageHandler::~MessageHandler() { +MessageHandlerAutoCleanup::MessageHandlerAutoCleanup() {} + +MessageHandlerAutoCleanup::~MessageHandlerAutoCleanup() { + // Note that even though this clears currently pending messages for the + // message handler, it's still racy since it doesn't prevent threads that + // might be in the process of posting new messages with would-be dangling + // pointers. + // This is related to the design of Message having a raw pointer. + // We could consider whether it would be safer to require message handlers + // to be reference counted (as some are). ThreadManager::Clear(this); } diff --git a/rtc_base/message_handler.h b/rtc_base/message_handler.h index 85cb785485..62c8344e1f 100644 --- a/rtc_base/message_handler.h +++ b/rtc_base/message_handler.h @@ -21,17 +21,27 @@ namespace rtc { struct Message; -// Messages get dispatched to a MessageHandler +// MessageQueue/Thread Messages get dispatched via the MessageHandler interface. class RTC_EXPORT MessageHandler { public: - virtual ~MessageHandler(); + virtual ~MessageHandler() {} virtual void OnMessage(Message* msg) = 0; +}; + +// Warning: Provided for backwards compatibility. +// +// This class performs expensive cleanup in the dtor that will affect all +// instances of Thread (and their pending message queues) and will block the +// current thread as well as all other threads. +class RTC_EXPORT MessageHandlerAutoCleanup : public MessageHandler { + public: + ~MessageHandlerAutoCleanup() override; protected: - MessageHandler() {} + MessageHandlerAutoCleanup(); private: - RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandler); + RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerAutoCleanup); }; } // namespace rtc diff --git a/rtc_base/nat_server.cc b/rtc_base/nat_server.cc index 323a787ee0..725a57be9f 100644 --- a/rtc_base/nat_server.cc +++ b/rtc_base/nat_server.cc @@ -174,7 +174,7 @@ void NATServer::OnInternalUDPPacket(AsyncPacketSocket* socket, RTC_DCHECK(iter != int_map_->end()); // Allow the destination to send packets back to the source. - iter->second->WhitelistInsert(dest_addr); + iter->second->AllowlistInsert(dest_addr); // Send the packet to its intended destination. rtc::PacketOptions options; @@ -227,29 +227,29 @@ void NATServer::Translate(const SocketAddressPair& route) { bool NATServer::ShouldFilterOut(TransEntry* entry, const SocketAddress& ext_addr) { - return entry->WhitelistContains(ext_addr); + return entry->AllowlistContains(ext_addr); } NATServer::TransEntry::TransEntry(const SocketAddressPair& r, AsyncUDPSocket* s, NAT* nat) : route(r), socket(s) { - whitelist = new AddressSet(AddrCmp(nat)); + allowlist = new AddressSet(AddrCmp(nat)); } NATServer::TransEntry::~TransEntry() { - delete whitelist; + delete allowlist; delete socket; } -void NATServer::TransEntry::WhitelistInsert(const SocketAddress& addr) { - CritScope cs(&crit_); - whitelist->insert(addr); +void NATServer::TransEntry::AllowlistInsert(const SocketAddress& addr) { + webrtc::MutexLock lock(&mutex_); + allowlist->insert(addr); } -bool NATServer::TransEntry::WhitelistContains(const SocketAddress& ext_addr) { - CritScope cs(&crit_); - return whitelist->find(ext_addr) == whitelist->end(); +bool NATServer::TransEntry::AllowlistContains(const SocketAddress& ext_addr) { + webrtc::MutexLock lock(&mutex_); + return allowlist->find(ext_addr) == allowlist->end(); } } // namespace rtc diff --git a/rtc_base/nat_server.h b/rtc_base/nat_server.h index 46f01e9761..5078fbb2c1 100644 --- a/rtc_base/nat_server.h +++ b/rtc_base/nat_server.h @@ -20,6 +20,7 @@ #include "rtc_base/proxy_server.h" #include "rtc_base/socket_address_pair.h" #include "rtc_base/socket_factory.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" namespace rtc { @@ -96,13 +97,13 @@ class NATServer : public sigslot::has_slots<> { TransEntry(const SocketAddressPair& r, AsyncUDPSocket* s, NAT* nat); ~TransEntry(); - void WhitelistInsert(const SocketAddress& addr); - bool WhitelistContains(const SocketAddress& ext_addr); + void AllowlistInsert(const SocketAddress& addr); + bool AllowlistContains(const SocketAddress& ext_addr); SocketAddressPair route; AsyncUDPSocket* socket; - AddressSet* whitelist; - CriticalSection crit_; + AddressSet* allowlist; + webrtc::Mutex mutex_; }; typedef std::map InternalMap; diff --git a/rtc_base/net_helper.cc b/rtc_base/net_helper.cc index 7dcb599933..893b500d56 100644 --- a/rtc_base/net_helper.cc +++ b/rtc_base/net_helper.cc @@ -10,9 +10,6 @@ #include "rtc_base/net_helper.h" -#include "rtc_base/checks.h" -#include "rtc_base/ip_address.h" - namespace cricket { const char UDP_PROTOCOL_NAME[] = "udp"; @@ -20,23 +17,15 @@ const char TCP_PROTOCOL_NAME[] = "tcp"; const char SSLTCP_PROTOCOL_NAME[] = "ssltcp"; const char TLS_PROTOCOL_NAME[] = "tls"; -int GetIpOverhead(int addr_family) { - switch (addr_family) { - case AF_INET: // IPv4 - return 20; - case AF_INET6: // IPv6 - return 40; - default: - RTC_NOTREACHED() << "Invaild address family."; - return 0; - } -} - int GetProtocolOverhead(const std::string& protocol) { if (protocol == TCP_PROTOCOL_NAME || protocol == SSLTCP_PROTOCOL_NAME) { - return 20; + return kTcpHeaderSize; + } else if (protocol == UDP_PROTOCOL_NAME) { + return kUdpHeaderSize; + } else { + // TODO(srte): We should crash on unexpected input and handle TLS correctly. + return 8; } - return 8; } } // namespace cricket diff --git a/rtc_base/net_helper.h b/rtc_base/net_helper.h index e42502bb18..9abbbdefb2 100644 --- a/rtc_base/net_helper.h +++ b/rtc_base/net_helper.h @@ -21,8 +21,8 @@ extern const char TCP_PROTOCOL_NAME[]; extern const char SSLTCP_PROTOCOL_NAME[]; extern const char TLS_PROTOCOL_NAME[]; -// Get the network layer overhead per packet based on the IP address family. -int GetIpOverhead(int addr_family); +constexpr int kTcpHeaderSize = 20; +constexpr int kUdpHeaderSize = 8; // Get the transport layer overhead per packet based on the protocol. int GetProtocolOverhead(const std::string& protocol); diff --git a/rtc_base/net_helpers.cc b/rtc_base/net_helpers.cc index 6ff3791738..c6685e2a65 100644 --- a/rtc_base/net_helpers.cc +++ b/rtc_base/net_helpers.cc @@ -10,8 +10,6 @@ #include "rtc_base/net_helpers.h" -#include - #if defined(WEBRTC_WIN) #include #include @@ -26,8 +24,11 @@ #endif #endif // defined(WEBRTC_POSIX) && !defined(__native_client__) +#include "api/task_queue/task_queue_base.h" #include "rtc_base/logging.h" #include "rtc_base/signal_thread.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread... namespace rtc { @@ -83,18 +84,35 @@ int ResolveHostname(const std::string& hostname, #endif // !__native_client__ } -// AsyncResolver -AsyncResolver::AsyncResolver() : SignalThread(), error_(-1) {} +AsyncResolver::AsyncResolver() : error_(-1) {} -AsyncResolver::~AsyncResolver() = default; +AsyncResolver::~AsyncResolver() { + RTC_DCHECK_RUN_ON(&sequence_checker_); +} void AsyncResolver::Start(const SocketAddress& addr) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); addr_ = addr; - // SignalThred Start will kickoff the resolve process. - SignalThread::Start(); + webrtc::TaskQueueBase* current_task_queue = webrtc::TaskQueueBase::Current(); + popup_thread_ = Thread::Create(); + popup_thread_->Start(); + popup_thread_->PostTask(webrtc::ToQueuedTask( + [this, flag = safety_.flag(), addr, current_task_queue] { + std::vector addresses; + int error = + ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses); + current_task_queue->PostTask(webrtc::ToQueuedTask( + std::move(flag), [this, error, addresses = std::move(addresses)] { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ResolveDone(std::move(addresses), error); + })); + })); } bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); if (error_ != 0 || addresses_.empty()) return false; @@ -109,20 +127,40 @@ bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const { } int AsyncResolver::GetError() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); return error_; } void AsyncResolver::Destroy(bool wait) { - SignalThread::Destroy(wait); + // Some callers have trouble guaranteeing that Destroy is called on the + // sequence guarded by |sequence_checker_|. + // RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + destroy_called_ = true; + MaybeSelfDestruct(); } -void AsyncResolver::DoWork() { - error_ = - ResolveHostname(addr_.hostname().c_str(), addr_.family(), &addresses_); +const std::vector& AsyncResolver::addresses() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + return addresses_; } -void AsyncResolver::OnWorkDone() { +void AsyncResolver::ResolveDone(std::vector addresses, int error) { + addresses_ = addresses; + error_ = error; + recursion_check_ = true; SignalDone(this); + MaybeSelfDestruct(); +} + +void AsyncResolver::MaybeSelfDestruct() { + if (!recursion_check_) { + delete this; + } else { + recursion_check_ = false; + } } const char* inet_ntop(int af, const void* src, char* dst, socklen_t size) { diff --git a/rtc_base/net_helpers.h b/rtc_base/net_helpers.h index 1e06940be7..c6aa4be5b2 100644 --- a/rtc_base/net_helpers.h +++ b/rtc_base/net_helpers.h @@ -21,16 +21,23 @@ #include "rtc_base/async_resolver_interface.h" #include "rtc_base/ip_address.h" -#include "rtc_base/signal_thread.h" #include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace rtc { // AsyncResolver will perform async DNS resolution, signaling the result on // the SignalDone from AsyncResolverInterface when the operation completes. -class RTC_EXPORT AsyncResolver : public SignalThread, - public AsyncResolverInterface { +// +// This class is thread-compatible, and all methods and destruction needs to +// happen from the same rtc::Thread, except for Destroy which is allowed to +// happen on another context provided it's not happening concurrently to another +// public API call, and is the last access to the object. +class RTC_EXPORT AsyncResolver : public AsyncResolverInterface { public: AsyncResolver(); ~AsyncResolver() override; @@ -40,17 +47,22 @@ class RTC_EXPORT AsyncResolver : public SignalThread, int GetError() const override; void Destroy(bool wait) override; - const std::vector& addresses() const { return addresses_; } - void set_error(int error) { error_ = error; } - - protected: - void DoWork() override; - void OnWorkDone() override; + const std::vector& addresses() const; private: - SocketAddress addr_; - std::vector addresses_; - int error_; + void ResolveDone(std::vector addresses, int error) + RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); + void MaybeSelfDestruct(); + + SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_); + std::vector addresses_ RTC_GUARDED_BY(sequence_checker_); + int error_ RTC_GUARDED_BY(sequence_checker_); + webrtc::ScopedTaskSafety safety_ RTC_GUARDED_BY(sequence_checker_); + std::unique_ptr popup_thread_ RTC_GUARDED_BY(sequence_checker_); + bool recursion_check_ = + false; // Protects against SignalDone calling into Destroy. + bool destroy_called_ = false; + webrtc::SequenceChecker sequence_checker_; }; // rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid diff --git a/rtc_base/network.cc b/rtc_base/network.cc index df3487fe88..8aabdcb7e6 100644 --- a/rtc_base/network.cc +++ b/rtc_base/network.cc @@ -11,15 +11,7 @@ #include "rtc_base/network.h" #if defined(WEBRTC_POSIX) -// linux/if.h can't be included at the same time as the posix sys/if.h, and -// it's transitively required by linux/route.h, so include that version on -// linux instead of the standard posix one. -#if defined(WEBRTC_LINUX) -#include -#include -#elif !defined(__native_client__) #include -#endif #endif // WEBRTC_POSIX #if defined(WEBRTC_WIN) @@ -30,8 +22,6 @@ #include "rtc_base/ifaddrs_converter.h" #endif -#include - #include #include "absl/algorithm/container.h" @@ -45,6 +35,7 @@ #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" +#include "system_wrappers/include/field_trial.h" namespace rtc { namespace { @@ -95,29 +86,10 @@ bool SortNetworks(const Network* a, const Network* b) { return a->key() < b->key(); } -std::string AdapterTypeToString(AdapterType type) { - switch (type) { - case ADAPTER_TYPE_ANY: - return "Wildcard"; - case ADAPTER_TYPE_UNKNOWN: - return "Unknown"; - case ADAPTER_TYPE_ETHERNET: - return "Ethernet"; - case ADAPTER_TYPE_WIFI: - return "Wifi"; - case ADAPTER_TYPE_CELLULAR: - return "Cellular"; - case ADAPTER_TYPE_VPN: - return "VPN"; - case ADAPTER_TYPE_LOOPBACK: - return "Loopback"; - default: - RTC_NOTREACHED() << "Invalid type " << type; - return std::string(); - } -} - -uint16_t ComputeNetworkCostByType(int type) { +uint16_t ComputeNetworkCostByType(int type, + bool use_differentiated_cellular_costs) { + // TODO(jonaso) : Rollout support for cellular network cost using A/B + // experiment to make sure it does not introduce regressions. switch (type) { case rtc::ADAPTER_TYPE_ETHERNET: case rtc::ADAPTER_TYPE_LOOPBACK: @@ -125,7 +97,19 @@ uint16_t ComputeNetworkCostByType(int type) { case rtc::ADAPTER_TYPE_WIFI: return kNetworkCostLow; case rtc::ADAPTER_TYPE_CELLULAR: - return kNetworkCostHigh; + return kNetworkCostCellular; + case rtc::ADAPTER_TYPE_CELLULAR_2G: + return use_differentiated_cellular_costs ? kNetworkCostCellular2G + : kNetworkCostCellular; + case rtc::ADAPTER_TYPE_CELLULAR_3G: + return use_differentiated_cellular_costs ? kNetworkCostCellular3G + : kNetworkCostCellular; + case rtc::ADAPTER_TYPE_CELLULAR_4G: + return use_differentiated_cellular_costs ? kNetworkCostCellular4G + : kNetworkCostCellular; + case rtc::ADAPTER_TYPE_CELLULAR_5G: + return use_differentiated_cellular_costs ? kNetworkCostCellular5G + : kNetworkCostCellular; case rtc::ADAPTER_TYPE_ANY: // Candidates gathered from the any-address/wildcard ports, as backups, // are given the maximum cost so that if there are other candidates with @@ -173,6 +157,18 @@ bool IsIgnoredIPv6(const InterfaceAddress& ip) { } #endif // !defined(__native_client__) +// Note: consider changing to const Network* as arguments +// if/when considering other changes that should not trigger +// OnNetworksChanged. +bool ShouldAdapterChangeTriggerNetworkChange(rtc::AdapterType old_type, + rtc::AdapterType new_type) { + // skip triggering OnNetworksChanged if + // changing from one cellular to another. + if (Network::IsCellular(old_type) && Network::IsCellular(new_type)) + return false; + return true; +} + } // namespace // These addresses are used as the targets to find out the default local address @@ -269,7 +265,9 @@ webrtc::MdnsResponderInterface* NetworkManager::GetMdnsResponder() const { } NetworkManagerBase::NetworkManagerBase() - : enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED) {} + : enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED), + signal_network_preference_change_(webrtc::field_trial::IsEnabled( + "WebRTC-SignalNetworkPreferenceChange")) {} NetworkManagerBase::~NetworkManagerBase() { for (const auto& kv : networks_map_) { @@ -376,13 +374,22 @@ void NetworkManagerBase::MergeNetworkList(const NetworkList& new_networks, merged_list.push_back(existing_net); if (net->type() != ADAPTER_TYPE_UNKNOWN && net->type() != existing_net->type()) { + if (ShouldAdapterChangeTriggerNetworkChange(existing_net->type(), + net->type())) { + *changed = true; + } existing_net->set_type(net->type()); - *changed = true; } // If the existing network was not active, networks have changed. if (!existing_net->active()) { *changed = true; } + if (net->network_preference() != existing_net->network_preference()) { + existing_net->set_network_preference(net->network_preference()); + if (signal_network_preference_change_) { + *changed = true; + } + } RTC_DCHECK(net->active()); if (existing_net != net) { delete net; @@ -471,15 +478,16 @@ Network* NetworkManagerBase::GetNetworkFromAddress( return nullptr; } -BasicNetworkManager::BasicNetworkManager() - : thread_(nullptr), - sent_first_update_(false), - start_count_(0), - ignore_non_default_routes_(false) {} +BasicNetworkManager::BasicNetworkManager() {} + +BasicNetworkManager::BasicNetworkManager( + NetworkMonitorFactory* network_monitor_factory) + : network_monitor_factory_(network_monitor_factory) {} BasicNetworkManager::~BasicNetworkManager() {} void BasicNetworkManager::OnNetworksChanged() { + RTC_DCHECK_RUN_ON(thread_); RTC_LOG(LS_INFO) << "Network change was observed"; UpdateNetworksOnce(); } @@ -536,6 +544,7 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, AdapterType adapter_type = ADAPTER_TYPE_UNKNOWN; AdapterType vpn_underlying_adapter_type = ADAPTER_TYPE_UNKNOWN; + NetworkPreference network_preference = NetworkPreference::NEUTRAL; if (cursor->ifa_flags & IFF_LOOPBACK) { adapter_type = ADAPTER_TYPE_LOOPBACK; } else { @@ -543,6 +552,8 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, // Otherwise, get the adapter type based on a few name matching rules. if (network_monitor_) { adapter_type = network_monitor_->GetAdapterType(cursor->ifa_name); + network_preference = + network_monitor_->GetNetworkPreference(cursor->ifa_name); } if (adapter_type == ADAPTER_TYPE_UNKNOWN) { adapter_type = GetAdapterTypeFromName(cursor->ifa_name); @@ -568,6 +579,7 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, network->AddIP(ip); network->set_ignored(IsIgnoredNetwork(*network)); network->set_underlying_type_for_vpn(vpn_underlying_adapter_type); + network->set_network_preference(network_preference); if (include_ignored || !network->ignored()) { current_networks[key] = network.get(); networks->push_back(network.release()); @@ -580,6 +592,7 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, existing_network->set_underlying_type_for_vpn( vpn_underlying_adapter_type); } + existing_network->set_network_preference(network_preference); } } } @@ -767,33 +780,6 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored, } #endif // WEBRTC_WIN -#if defined(WEBRTC_LINUX) -bool IsDefaultRoute(const std::string& network_name) { - FILE* f = fopen("/proc/net/route", "r"); - if (!f) { - RTC_LOG(LS_WARNING) - << "Couldn't read /proc/net/route, skipping default " - "route check (assuming everything is a default route)."; - return true; - } - bool is_default_route = false; - char line[500]; - while (fgets(line, sizeof(line), f)) { - char iface_name[256]; - unsigned int iface_ip, iface_gw, iface_mask, iface_flags; - if (sscanf(line, "%255s %8X %8X %4X %*d %*u %*d %8X", iface_name, &iface_ip, - &iface_gw, &iface_flags, &iface_mask) == 5 && - network_name == iface_name && iface_mask == 0 && - (iface_flags & (RTF_UP | RTF_HOST)) == RTF_UP) { - is_default_route = true; - break; - } - } - fclose(f); - return is_default_route; -} -#endif - bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const { // Ignore networks on the explicit ignore list. for (const std::string& ignored_name : network_ignore_list_) { @@ -810,12 +796,6 @@ bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const { strncmp(network.name().c_str(), "vboxnet", 7) == 0) { return true; } -#if defined(WEBRTC_LINUX) - // Make sure this is a default route, if we're ignoring non-defaults. - if (ignore_non_default_routes_ && !IsDefaultRoute(network.name())) { - return true; - } -#endif #elif defined(WEBRTC_WIN) // Ignore any HOST side vmware adapters with a description like: // VMware Virtual Ethernet Adapter for VMnet1 @@ -826,6 +806,11 @@ bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const { } #endif + if (network_monitor_ && + !network_monitor_->IsAdapterAvailable(network.name())) { + return true; + } + // Ignore any networks with a 0.x.y.z IP if (network.prefix().family() == AF_INET) { return (network.prefix().v4AddressAsHostOrderInteger() < 0x01000000); @@ -836,6 +821,8 @@ bool BasicNetworkManager::IsIgnoredNetwork(const Network& network) const { void BasicNetworkManager::StartUpdating() { thread_ = Thread::Current(); + // Redundant but necessary for thread annotations. + RTC_DCHECK_RUN_ON(thread_); if (start_count_) { // If network interfaces are already discovered and signal is sent, // we should trigger network signal immediately for the new clients @@ -850,7 +837,7 @@ void BasicNetworkManager::StartUpdating() { } void BasicNetworkManager::StopUpdating() { - RTC_DCHECK(Thread::Current() == thread_); + RTC_DCHECK_RUN_ON(thread_); if (!start_count_) return; @@ -863,12 +850,11 @@ void BasicNetworkManager::StopUpdating() { } void BasicNetworkManager::StartNetworkMonitor() { - NetworkMonitorFactory* factory = NetworkMonitorFactory::GetFactory(); - if (factory == nullptr) { + if (network_monitor_factory_ == nullptr) { return; } if (!network_monitor_) { - network_monitor_.reset(factory->CreateNetworkMonitor()); + network_monitor_.reset(network_monitor_factory_->CreateNetworkMonitor()); if (!network_monitor_) { return; } @@ -886,6 +872,7 @@ void BasicNetworkManager::StopNetworkMonitor() { } void BasicNetworkManager::OnMessage(Message* msg) { + RTC_DCHECK_RUN_ON(thread_); switch (msg->message_id) { case kUpdateNetworksMessage: { UpdateNetworksContinually(); @@ -901,7 +888,6 @@ void BasicNetworkManager::OnMessage(Message* msg) { } IPAddress BasicNetworkManager::QueryDefaultLocalAddress(int family) const { - RTC_DCHECK(thread_ == Thread::Current()); RTC_DCHECK(thread_->socketserver() != nullptr); RTC_DCHECK(family == AF_INET || family == AF_INET6); @@ -930,8 +916,6 @@ void BasicNetworkManager::UpdateNetworksOnce() { if (!start_count_) return; - RTC_DCHECK(Thread::Current() == thread_); - NetworkList list; if (!CreateNetworks(false, &list)) { SignalError(); @@ -955,6 +939,7 @@ void BasicNetworkManager::UpdateNetworksContinually() { } void BasicNetworkManager::DumpNetworks() { + RTC_DCHECK_RUN_ON(thread_); NetworkList list; GetNetworks(&list); RTC_LOG(LS_INFO) << "NetworkManager detected " << list.size() << " networks:"; @@ -977,7 +962,9 @@ Network::Network(const std::string& name, scope_id_(0), ignored_(false), type_(ADAPTER_TYPE_UNKNOWN), - preference_(0) {} + preference_(0), + use_differentiated_cellular_costs_(webrtc::field_trial::IsEnabled( + "WebRTC-UseDifferentiatedCellularCosts")) {} Network::Network(const std::string& name, const std::string& desc, @@ -992,7 +979,9 @@ Network::Network(const std::string& name, scope_id_(0), ignored_(false), type_(type), - preference_(0) {} + preference_(0), + use_differentiated_cellular_costs_(webrtc::field_trial::IsEnabled( + "WebRTC-UseDifferentiatedCellularCosts")) {} Network::Network(const Network&) = default; @@ -1064,7 +1053,7 @@ webrtc::MdnsResponderInterface* Network::GetMdnsResponder() const { uint16_t Network::GetCost() const { AdapterType type = IsVpn() ? underlying_type_for_vpn_ : type_; - return ComputeNetworkCostByType(type); + return ComputeNetworkCostByType(type, use_differentiated_cellular_costs_); } std::string Network::ToString() const { diff --git a/rtc_base/network.h b/rtc_base/network.h index a8608d936a..7103f0fa2d 100644 --- a/rtc_base/network.h +++ b/rtc_base/network.h @@ -23,8 +23,11 @@ #include "rtc_base/mdns_responder_interface.h" #include "rtc_base/message_handler.h" #include "rtc_base/network_monitor.h" +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" #if defined(WEBRTC_POSIX) struct ifaddrs; @@ -212,15 +215,20 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // network id 0 because we only compare the network ids in the old and the new // best connections in the transport channel. uint16_t next_available_network_id_ = 1; + + // True if calling network_preference() with a changed value + // should result in firing the SignalNetworkChanged signal. + bool signal_network_preference_change_ = false; }; // Basic implementation of the NetworkManager interface that gets list // of networks using OS APIs. class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, - public MessageHandler, + public MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: BasicNetworkManager(); + explicit BasicNetworkManager(NetworkMonitorFactory* network_monitor_factory); ~BasicNetworkManager() override; void StartUpdating() override; @@ -234,60 +242,57 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, // Sets the network ignore list, which is empty by default. Any network on the // ignore list will be filtered from network enumeration results. + // Should be called only before initialization. void set_network_ignore_list(const std::vector& list) { + RTC_DCHECK(thread_ == nullptr); network_ignore_list_ = list; } -#if defined(WEBRTC_LINUX) - // Sets the flag for ignoring non-default routes. - // Defaults to false. - void set_ignore_non_default_routes(bool value) { - ignore_non_default_routes_ = value; - } -#endif - protected: #if defined(WEBRTC_POSIX) // Separated from CreateNetworks for tests. void ConvertIfAddrs(ifaddrs* interfaces, IfAddrsConverter* converter, bool include_ignored, - NetworkList* networks) const; + NetworkList* networks) const RTC_RUN_ON(thread_); #endif // defined(WEBRTC_POSIX) // Creates a network object for each network available on the machine. - bool CreateNetworks(bool include_ignored, NetworkList* networks) const; + bool CreateNetworks(bool include_ignored, NetworkList* networks) const + RTC_RUN_ON(thread_); // Determines if a network should be ignored. This should only be determined // based on the network's property instead of any individual IP. - bool IsIgnoredNetwork(const Network& network) const; + bool IsIgnoredNetwork(const Network& network) const RTC_RUN_ON(thread_); // This function connects a UDP socket to a public address and returns the // local address associated it. Since it binds to the "any" address // internally, it returns the default local address on a multi-homed endpoint. - IPAddress QueryDefaultLocalAddress(int family) const; + IPAddress QueryDefaultLocalAddress(int family) const RTC_RUN_ON(thread_); private: friend class NetworkTest; // Creates a network monitor and listens for network updates. - void StartNetworkMonitor(); + void StartNetworkMonitor() RTC_RUN_ON(thread_); // Stops and removes the network monitor. - void StopNetworkMonitor(); + void StopNetworkMonitor() RTC_RUN_ON(thread_); // Called when it receives updates from the network monitor. void OnNetworksChanged(); // Updates the networks and reschedules the next update. - void UpdateNetworksContinually(); + void UpdateNetworksContinually() RTC_RUN_ON(thread_); // Only updates the networks; does not reschedule the next update. - void UpdateNetworksOnce(); + void UpdateNetworksOnce() RTC_RUN_ON(thread_); - Thread* thread_; - bool sent_first_update_; - int start_count_; + Thread* thread_ = nullptr; + bool sent_first_update_ = true; + int start_count_ = 0; std::vector network_ignore_list_; - bool ignore_non_default_routes_; - std::unique_ptr network_monitor_; + NetworkMonitorFactory* network_monitor_factory_ RTC_GUARDED_BY(thread_) = + nullptr; + std::unique_ptr network_monitor_ + RTC_GUARDED_BY(thread_); }; // Represents a Unix-type network interface, with a name and single address. @@ -305,9 +310,13 @@ class RTC_EXPORT Network { AdapterType type); Network(const Network&); ~Network(); + // This signal is fired whenever type() or underlying_type_for_vpn() changes. sigslot::signal1 SignalTypeChanged; + // This signal is fired whenever network preference changes. + sigslot::signal1 SignalNetworkPreferenceChanged; + const DefaultLocalAddressProvider* default_local_address_provider() { return default_local_address_provider_; } @@ -418,6 +427,21 @@ class RTC_EXPORT Network { bool IsVpn() const { return type_ == ADAPTER_TYPE_VPN; } + bool IsCellular() const { return IsCellular(type_); } + + static bool IsCellular(AdapterType type) { + switch (type) { + case ADAPTER_TYPE_CELLULAR: + case ADAPTER_TYPE_CELLULAR_2G: + case ADAPTER_TYPE_CELLULAR_3G: + case ADAPTER_TYPE_CELLULAR_4G: + case ADAPTER_TYPE_CELLULAR_5G: + return true; + default: + return false; + } + } + uint16_t GetCost() const; // A unique id assigned by the network manager, which may be signaled // to the remote side in the candidate. @@ -437,6 +461,17 @@ class RTC_EXPORT Network { } } + // Property set by operating system/firmware that has information + // about connection strength to e.g WIFI router or CELL base towers. + NetworkPreference network_preference() const { return network_preference_; } + void set_network_preference(NetworkPreference val) { + if (network_preference_ == val) { + return; + } + network_preference_ = val; + SignalNetworkPreferenceChanged(this); + } + // Debugging description of this network std::string ToString() const; @@ -456,6 +491,8 @@ class RTC_EXPORT Network { int preference_; bool active_ = true; uint16_t id_ = 0; + bool use_differentiated_cellular_costs_ = false; + NetworkPreference network_preference_ = NetworkPreference::NEUTRAL; friend class NetworkManager; }; diff --git a/rtc_base/network/BUILD.gn b/rtc_base/network/BUILD.gn index 1d06defb3b..35ae3d45f7 100644 --- a/rtc_base/network/BUILD.gn +++ b/rtc_base/network/BUILD.gn @@ -13,8 +13,6 @@ rtc_library("sent_packet") { "sent_packet.cc", "sent_packet.h", ] - deps = [ - "../system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "../system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/rtc_base/network_constants.cc b/rtc_base/network_constants.cc new file mode 100644 index 0000000000..905aa3646c --- /dev/null +++ b/rtc_base/network_constants.cc @@ -0,0 +1,47 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/network_constants.h" + +#include "rtc_base/checks.h" + +namespace rtc { + +std::string AdapterTypeToString(AdapterType type) { + switch (type) { + case ADAPTER_TYPE_ANY: + return "Wildcard"; + case ADAPTER_TYPE_UNKNOWN: + return "Unknown"; + case ADAPTER_TYPE_ETHERNET: + return "Ethernet"; + case ADAPTER_TYPE_WIFI: + return "Wifi"; + case ADAPTER_TYPE_CELLULAR: + return "Cellular"; + case ADAPTER_TYPE_CELLULAR_2G: + return "Cellular2G"; + case ADAPTER_TYPE_CELLULAR_3G: + return "Cellular3G"; + case ADAPTER_TYPE_CELLULAR_4G: + return "Cellular4G"; + case ADAPTER_TYPE_CELLULAR_5G: + return "Cellular5G"; + case ADAPTER_TYPE_VPN: + return "VPN"; + case ADAPTER_TYPE_LOOPBACK: + return "Loopback"; + default: + RTC_NOTREACHED() << "Invalid type " << type; + return std::string(); + } +} + +} // namespace rtc diff --git a/rtc_base/network_constants.h b/rtc_base/network_constants.h index efb2c83455..cdb505f9d7 100644 --- a/rtc_base/network_constants.h +++ b/rtc_base/network_constants.h @@ -13,20 +13,29 @@ #include +#include + namespace rtc { -static const uint16_t kNetworkCostMax = 999; -static const uint16_t kNetworkCostHigh = 900; -static const uint16_t kNetworkCostUnknown = 50; -static const uint16_t kNetworkCostLow = 10; -static const uint16_t kNetworkCostMin = 0; +constexpr uint16_t kNetworkCostMax = 999; +constexpr uint16_t kNetworkCostCellular2G = 980; +constexpr uint16_t kNetworkCostCellular3G = 910; +constexpr uint16_t kNetworkCostCellular = 900; +constexpr uint16_t kNetworkCostCellular4G = 500; +constexpr uint16_t kNetworkCostCellular5G = 250; +constexpr uint16_t kNetworkCostUnknown = 50; +constexpr uint16_t kNetworkCostLow = 10; +constexpr uint16_t kNetworkCostMin = 0; + +// alias +constexpr uint16_t kNetworkCostHigh = kNetworkCostCellular; enum AdapterType { // This enum resembles the one in Chromium net::ConnectionType. ADAPTER_TYPE_UNKNOWN = 0, ADAPTER_TYPE_ETHERNET = 1 << 0, ADAPTER_TYPE_WIFI = 1 << 1, - ADAPTER_TYPE_CELLULAR = 1 << 2, + ADAPTER_TYPE_CELLULAR = 1 << 2, // This is CELLULAR of unknown type. ADAPTER_TYPE_VPN = 1 << 3, ADAPTER_TYPE_LOOPBACK = 1 << 4, // ADAPTER_TYPE_ANY is used for a network, which only contains a single "any @@ -35,8 +44,14 @@ enum AdapterType { // when the network uses a specific interface/IP, but its interface type can // not be determined or not fit in this enum. ADAPTER_TYPE_ANY = 1 << 5, + ADAPTER_TYPE_CELLULAR_2G = 1 << 6, + ADAPTER_TYPE_CELLULAR_3G = 1 << 7, + ADAPTER_TYPE_CELLULAR_4G = 1 << 8, + ADAPTER_TYPE_CELLULAR_5G = 1 << 9 }; +std::string AdapterTypeToString(AdapterType type); + } // namespace rtc #endif // RTC_BASE_NETWORK_CONSTANTS_H_ diff --git a/rtc_base/network_monitor.cc b/rtc_base/network_monitor.cc index 4eb52901f3..70c2ad5020 100644 --- a/rtc_base/network_monitor.cc +++ b/rtc_base/network_monitor.cc @@ -10,62 +10,21 @@ #include "rtc_base/network_monitor.h" -#include - #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" - -namespace { -const uint32_t UPDATE_NETWORKS_MESSAGE = 1; - -// This is set by NetworkMonitorFactory::SetFactory and the caller of -// NetworkMonitorFactory::SetFactory must be responsible for calling -// ReleaseFactory to destroy the factory. -rtc::NetworkMonitorFactory* network_monitor_factory = nullptr; -} // namespace namespace rtc { -NetworkMonitorInterface::NetworkMonitorInterface() {} - -NetworkMonitorInterface::~NetworkMonitorInterface() {} - -NetworkMonitorBase::NetworkMonitorBase() : worker_thread_(Thread::Current()) {} -NetworkMonitorBase::~NetworkMonitorBase() {} - -void NetworkMonitorBase::OnNetworksChanged() { - RTC_LOG(LS_VERBOSE) << "Network change is received at the network monitor"; - worker_thread_->Post(RTC_FROM_HERE, this, UPDATE_NETWORKS_MESSAGE); -} - -void NetworkMonitorBase::OnMessage(Message* msg) { - RTC_DCHECK(msg->message_id == UPDATE_NETWORKS_MESSAGE); - SignalNetworksChanged(); -} - -AdapterType NetworkMonitorBase::GetVpnUnderlyingAdapterType( - const std::string& interface_name) { - return ADAPTER_TYPE_UNKNOWN; -} - -NetworkMonitorFactory::NetworkMonitorFactory() {} -NetworkMonitorFactory::~NetworkMonitorFactory() {} - -void NetworkMonitorFactory::SetFactory(NetworkMonitorFactory* factory) { - if (network_monitor_factory != nullptr) { - delete network_monitor_factory; - } - network_monitor_factory = factory; -} -void NetworkMonitorFactory::ReleaseFactory(NetworkMonitorFactory* factory) { - if (factory == network_monitor_factory) { - SetFactory(nullptr); +const char* NetworkPreferenceToString(NetworkPreference preference) { + switch (preference) { + case NetworkPreference::NEUTRAL: + return "NEUTRAL"; + case NetworkPreference::NOT_PREFERRED: + return "NOT_PREFERRED"; } + RTC_CHECK_NOTREACHED(); } -NetworkMonitorFactory* NetworkMonitorFactory::GetFactory() { - return network_monitor_factory; -} +NetworkMonitorInterface::NetworkMonitorInterface() {} +NetworkMonitorInterface::~NetworkMonitorInterface() {} } // namespace rtc diff --git a/rtc_base/network_monitor.h b/rtc_base/network_monitor.h index ed4464db55..4a3002f427 100644 --- a/rtc_base/network_monitor.h +++ b/rtc_base/network_monitor.h @@ -13,7 +13,6 @@ #include "rtc_base/network_constants.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" namespace rtc { @@ -27,6 +26,16 @@ enum class NetworkBindingResult { NETWORK_CHANGED = -4 }; +// NetworkPreference property set by operating system/firmware that has +// information about connection strength to e.g WIFI router or CELL base towers. +// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc +enum class NetworkPreference { + NEUTRAL = 0, + NOT_PREFERRED = -1, +}; + +const char* NetworkPreferenceToString(NetworkPreference preference); + class NetworkBinderInterface { public: // Binds a socket to the network that is attached to |address| so that all @@ -53,8 +62,7 @@ class NetworkBinderInterface { * * Memory consideration: * NetworkMonitor is owned by the caller (NetworkManager). The global network - * monitor factory is owned by the factory itself but needs to be released from - * the factory creator. + * monitor factory is owned by the PeerConnectionFactory. */ // Generic network monitor interface. It starts and stops monitoring network // changes, and fires the SignalNetworksChanged event when networks change. @@ -68,54 +76,25 @@ class NetworkMonitorInterface { virtual void Start() = 0; virtual void Stop() = 0; - // Implementations should call this method on the base when networks change, - // and the base will fire SignalNetworksChanged on the right thread. - virtual void OnNetworksChanged() = 0; - virtual AdapterType GetAdapterType(const std::string& interface_name) = 0; virtual AdapterType GetVpnUnderlyingAdapterType( const std::string& interface_name) = 0; -}; - -class NetworkMonitorBase : public NetworkMonitorInterface, - public MessageHandler, - public sigslot::has_slots<> { - public: - NetworkMonitorBase(); - ~NetworkMonitorBase() override; - - void OnNetworksChanged() override; - - void OnMessage(Message* msg) override; - - AdapterType GetVpnUnderlyingAdapterType( - const std::string& interface_name) override; - protected: - Thread* worker_thread() { return worker_thread_; } - - private: - Thread* worker_thread_; -}; - -/* - * NetworkMonitorFactory creates NetworkMonitors. - */ -class NetworkMonitorFactory { - public: - // This is not thread-safe; it should be called once (or once per audio/video - // call) during the call initialization. - static void SetFactory(NetworkMonitorFactory* factory); - - static void ReleaseFactory(NetworkMonitorFactory* factory); - static NetworkMonitorFactory* GetFactory(); - - virtual NetworkMonitorInterface* CreateNetworkMonitor() = 0; - - virtual ~NetworkMonitorFactory(); + virtual NetworkPreference GetNetworkPreference( + const std::string& interface_name) = 0; - protected: - NetworkMonitorFactory(); + // Is this interface available to use? WebRTC shouldn't attempt to use it if + // this returns false. + // + // It's possible for this status to change, in which case + // SignalNetworksChanged will be fired. + // + // These specific use case this was added for was a phone with two SIM cards, + // where attempting to use all interfaces returned from getifaddrs caused the + // connection to be dropped. + virtual bool IsAdapterAvailable(const std::string& interface_name) { + return true; + } }; } // namespace rtc diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h b/rtc_base/network_monitor_factory.cc similarity index 58% rename from sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h rename to rtc_base/network_monitor_factory.cc index 64c49977c0..9fac4d95a0 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h +++ b/rtc_base/network_monitor_factory.cc @@ -1,5 +1,5 @@ /* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,4 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "components/audio/RTCAudioSession.h" +#include "rtc_base/network_monitor_factory.h" + +namespace rtc { + +NetworkMonitorFactory::NetworkMonitorFactory() {} +NetworkMonitorFactory::~NetworkMonitorFactory() {} + +} // namespace rtc diff --git a/rtc_base/network_monitor_factory.h b/rtc_base/network_monitor_factory.h new file mode 100644 index 0000000000..dadcd4aa8a --- /dev/null +++ b/rtc_base/network_monitor_factory.h @@ -0,0 +1,37 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_NETWORK_MONITOR_FACTORY_H_ +#define RTC_BASE_NETWORK_MONITOR_FACTORY_H_ + +namespace rtc { + +// Forward declaring this so it's not part of the API surface; it's only +// expected to be used by Android/iOS SDK code. +class NetworkMonitorInterface; + +/* + * NetworkMonitorFactory creates NetworkMonitors. + * Note that CreateNetworkMonitor is expected to be called on the network + * thread with the returned object only being used on that thread thereafter. + */ +class NetworkMonitorFactory { + public: + virtual NetworkMonitorInterface* CreateNetworkMonitor() = 0; + + virtual ~NetworkMonitorFactory(); + + protected: + NetworkMonitorFactory(); +}; + +} // namespace rtc + +#endif // RTC_BASE_NETWORK_MONITOR_FACTORY_H_ diff --git a/rtc_base/network_route.cc b/rtc_base/network_route.cc new file mode 100644 index 0000000000..80d135a92c --- /dev/null +++ b/rtc_base/network_route.cc @@ -0,0 +1,27 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/network_route.h" + +namespace rtc { + +bool RouteEndpoint::operator==(const RouteEndpoint& other) const { + return adapter_type_ == other.adapter_type_ && + adapter_id_ == other.adapter_id_ && network_id_ == other.network_id_ && + uses_turn_ == other.uses_turn_; +} + +bool NetworkRoute::operator==(const NetworkRoute& other) const { + return connected == other.connected && local == other.local && + remote == other.remote && packet_overhead == other.packet_overhead && + last_sent_packet_id == other.last_sent_packet_id; +} + +} // namespace rtc diff --git a/rtc_base/network_route.h b/rtc_base/network_route.h index 6a8f183513..17b43e5b69 100644 --- a/rtc_base/network_route.h +++ b/rtc_base/network_route.h @@ -13,21 +13,83 @@ #include +#include + +#include "rtc_base/network_constants.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/inline.h" + // TODO(honghaiz): Make a directory that describes the interfaces and structs // the media code can rely on and the network code can implement, and both can // depend on that, but not depend on each other. Then, move this file to that // directory. namespace rtc { +class RouteEndpoint { + public: + RouteEndpoint() {} // Used by tests. + RouteEndpoint(AdapterType adapter_type, + uint16_t adapter_id, + uint16_t network_id, + bool uses_turn) + : adapter_type_(adapter_type), + adapter_id_(adapter_id), + network_id_(network_id), + uses_turn_(uses_turn) {} + + RouteEndpoint(const RouteEndpoint&) = default; + RouteEndpoint& operator=(const RouteEndpoint&) = default; + + // Used by tests. + static RouteEndpoint CreateWithNetworkId(uint16_t network_id) { + return RouteEndpoint(ADAPTER_TYPE_UNKNOWN, + /* adapter_id = */ 0, network_id, + /* uses_turn = */ false); + } + RouteEndpoint CreateWithTurn(bool uses_turn) const { + return RouteEndpoint(adapter_type_, adapter_id_, network_id_, uses_turn); + } + + AdapterType adapter_type() const { return adapter_type_; } + uint16_t adapter_id() const { return adapter_id_; } + uint16_t network_id() const { return network_id_; } + bool uses_turn() const { return uses_turn_; } + + bool operator==(const RouteEndpoint& other) const; + + private: + AdapterType adapter_type_ = ADAPTER_TYPE_UNKNOWN; + uint16_t adapter_id_ = 0; + uint16_t network_id_ = 0; + bool uses_turn_ = false; +}; + struct NetworkRoute { bool connected = false; - uint16_t local_network_id = 0; - uint16_t remote_network_id = 0; + RouteEndpoint local; + RouteEndpoint remote; // Last packet id sent on the PREVIOUS route. int last_sent_packet_id = -1; // The overhead in bytes from IP layer and above. + // This is the maximum of any part of the route. int packet_overhead = 0; + + RTC_NO_INLINE inline std::string DebugString() const { + rtc::StringBuilder oss; + oss << "[ connected: " << connected << " local: [ " << local.adapter_id() + << "/" << local.network_id() << " " + << AdapterTypeToString(local.adapter_type()) + << " turn: " << local.uses_turn() << " ] remote: [ " + << remote.adapter_id() << "/" << remote.network_id() << " " + << AdapterTypeToString(remote.adapter_type()) + << " turn: " << remote.uses_turn() + << " ] packet_overhead_bytes: " << packet_overhead << " ]"; + return oss.Release(); + } + + bool operator==(const NetworkRoute& other) const; }; + } // namespace rtc #endif // RTC_BASE_NETWORK_ROUTE_H_ diff --git a/rtc_base/network_route_unittest.cc b/rtc_base/network_route_unittest.cc new file mode 100644 index 0000000000..485683b71f --- /dev/null +++ b/rtc_base/network_route_unittest.cc @@ -0,0 +1,24 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/network_route.h" + +#include "rtc_base/gunit.h" +#include "test/gmock.h" + +namespace rtc { + +TEST(NetworkRoute, Equals) { + NetworkRoute r1; + NetworkRoute r2 = r1; + EXPECT_TRUE(r1 == r2); +} + +} // namespace rtc diff --git a/rtc_base/network_unittest.cc b/rtc_base/network_unittest.cc index db97d0749c..13f888e04e 100644 --- a/rtc_base/network_unittest.cc +++ b/rtc_base/network_unittest.cc @@ -12,12 +12,16 @@ #include +#include #include #include +#include "absl/algorithm/container.h" +#include "absl/strings/match.h" #include "rtc_base/checks.h" #include "rtc_base/net_helpers.h" #include "rtc_base/network_monitor.h" +#include "rtc_base/network_monitor_factory.h" #if defined(WEBRTC_POSIX) #include #include @@ -39,7 +43,7 @@ namespace rtc { namespace { -class FakeNetworkMonitor : public NetworkMonitorBase { +class FakeNetworkMonitor : public NetworkMonitorInterface { public: void Start() override { started_ = true; } void Stop() override { started_ = false; } @@ -47,17 +51,33 @@ class FakeNetworkMonitor : public NetworkMonitorBase { AdapterType GetAdapterType(const std::string& if_name) override { // Note that the name matching rules are different from the // GetAdapterTypeFromName in NetworkManager. - if (if_name.find("wifi") == 0) { + if (absl::StartsWith(if_name, "wifi")) { return ADAPTER_TYPE_WIFI; } - if (if_name.find("cellular") == 0) { + if (absl::StartsWith(if_name, "cellular")) { return ADAPTER_TYPE_CELLULAR; } return ADAPTER_TYPE_UNKNOWN; } + AdapterType GetVpnUnderlyingAdapterType(const std::string& if_name) override { + return ADAPTER_TYPE_UNKNOWN; + } + NetworkPreference GetNetworkPreference(const std::string& if_name) override { + return NetworkPreference::NEUTRAL; + } + + bool IsAdapterAvailable(const std::string& if_name) override { + return absl::c_count(unavailable_adapters_, if_name) == 0; + } + + // Used to test IsAdapterAvailable. + void set_unavailable_adapters(std::vector unavailable_adapters) { + unavailable_adapters_ = unavailable_adapters; + } private: bool started_ = false; + std::vector unavailable_adapters_; }; class FakeNetworkMonitorFactory : public NetworkMonitorFactory { @@ -99,18 +119,27 @@ class NetworkTest : public ::testing::Test, public sigslot::has_slots<> { bool IsIgnoredNetwork(BasicNetworkManager& network_manager, const Network& network) { + RTC_DCHECK_RUN_ON(network_manager.thread_); return network_manager.IsIgnoredNetwork(network); } + IPAddress QueryDefaultLocalAddress(BasicNetworkManager& network_manager, + int family) { + RTC_DCHECK_RUN_ON(network_manager.thread_); + return network_manager.QueryDefaultLocalAddress(family); + } + NetworkManager::NetworkList GetNetworks( const BasicNetworkManager& network_manager, bool include_ignored) { + RTC_DCHECK_RUN_ON(network_manager.thread_); NetworkManager::NetworkList list; network_manager.CreateNetworks(include_ignored, &list); return list; } FakeNetworkMonitor* GetNetworkMonitor(BasicNetworkManager& network_manager) { + RTC_DCHECK_RUN_ON(network_manager.thread_); return static_cast( network_manager.network_monitor_.get()); } @@ -135,6 +164,7 @@ class NetworkTest : public ::testing::Test, public sigslot::has_slots<> { struct ifaddrs* interfaces, bool include_ignored, NetworkManager::NetworkList* networks) { + RTC_DCHECK_RUN_ON(network_manager.thread_); // Use the base IfAddrsConverter for test cases. std::unique_ptr ifaddrs_converter(new IfAddrsConverter()); network_manager.ConvertIfAddrs(interfaces, ifaddrs_converter.get(), @@ -246,6 +276,8 @@ class NetworkTest : public ::testing::Test, public sigslot::has_slots<> { class TestBasicNetworkManager : public BasicNetworkManager { public: + TestBasicNetworkManager(NetworkMonitorFactory* network_monitor_factory) + : BasicNetworkManager(network_monitor_factory) {} using BasicNetworkManager::QueryDefaultLocalAddress; using BasicNetworkManager::set_default_local_addresses; }; @@ -267,6 +299,7 @@ TEST_F(NetworkTest, TestIsIgnoredNetworkIgnoresIPsStartingWith0) { Network ipv4_network2("test_eth1", "Test Network Adapter 2", IPAddress(0x010000U), 24, ADAPTER_TYPE_ETHERNET); BasicNetworkManager network_manager; + network_manager.StartUpdating(); EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network1)); EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network2)); } @@ -277,14 +310,18 @@ TEST_F(NetworkTest, TestIgnoreList) { 24); Network include_me("include_me", "Include me please!", IPAddress(0x12345600U), 24); - BasicNetworkManager network_manager; - EXPECT_FALSE(IsIgnoredNetwork(network_manager, ignore_me)); - EXPECT_FALSE(IsIgnoredNetwork(network_manager, include_me)); + BasicNetworkManager default_network_manager; + default_network_manager.StartUpdating(); + EXPECT_FALSE(IsIgnoredNetwork(default_network_manager, ignore_me)); + EXPECT_FALSE(IsIgnoredNetwork(default_network_manager, include_me)); + + BasicNetworkManager ignoring_network_manager; std::vector ignore_list; ignore_list.push_back("ignore_me"); - network_manager.set_network_ignore_list(ignore_list); - EXPECT_TRUE(IsIgnoredNetwork(network_manager, ignore_me)); - EXPECT_FALSE(IsIgnoredNetwork(network_manager, include_me)); + ignoring_network_manager.set_network_ignore_list(ignore_list); + ignoring_network_manager.StartUpdating(); + EXPECT_TRUE(IsIgnoredNetwork(ignoring_network_manager, ignore_me)); + EXPECT_FALSE(IsIgnoredNetwork(ignoring_network_manager, include_me)); } // Test is failing on Windows opt: b/11288214 @@ -648,6 +685,7 @@ TEST_F(NetworkTest, TestMultiplePublicNetworksOnOneInterfaceMerge) { // Test that DumpNetworks does not crash. TEST_F(NetworkTest, TestCreateAndDumpNetworks) { BasicNetworkManager manager; + manager.StartUpdating(); NetworkManager::NetworkList list = GetNetworks(manager, true); bool changed; MergeNetworkList(manager, list, &changed); @@ -656,6 +694,7 @@ TEST_F(NetworkTest, TestCreateAndDumpNetworks) { TEST_F(NetworkTest, TestIPv6Toggle) { BasicNetworkManager manager; + manager.StartUpdating(); bool ipv6_found = false; NetworkManager::NetworkList list; list = GetNetworks(manager, true); @@ -752,6 +791,7 @@ TEST_F(NetworkTest, TestConvertIfAddrsNoAddress) { NetworkManager::NetworkList result; BasicNetworkManager manager; + manager.StartUpdating(); CallConvertIfAddrs(manager, &list, true, &result); EXPECT_TRUE(result.empty()); } @@ -767,6 +807,7 @@ TEST_F(NetworkTest, TestConvertIfAddrsMultiAddressesOnOneInterface) { "FFFF:FFFF:FFFF:FFFF::", 0); NetworkManager::NetworkList result; BasicNetworkManager manager; + manager.StartUpdating(); CallConvertIfAddrs(manager, list, true, &result); EXPECT_EQ(1U, result.size()); bool changed; @@ -786,46 +827,35 @@ TEST_F(NetworkTest, TestConvertIfAddrsNotRunning) { NetworkManager::NetworkList result; BasicNetworkManager manager; + manager.StartUpdating(); CallConvertIfAddrs(manager, &list, true, &result); EXPECT_TRUE(result.empty()); } -// Tests that the network type can be updated after the network monitor is -// started. +// Tests that the network type can be determined from the network monitor when +// it would otherwise be unknown. TEST_F(NetworkTest, TestGetAdapterTypeFromNetworkMonitor) { - char if_name1[20] = "wifi0"; - std::string ipv6_address1 = "1000:2000:3000:4000:0:0:0:1"; - std::string ipv6_address2 = "1000:2000:3000:8000:0:0:0:1"; + char if_name[20] = "wifi0"; + std::string ipv6_address = "1000:2000:3000:4000:0:0:0:1"; std::string ipv6_mask = "FFFF:FFFF:FFFF:FFFF::"; - BasicNetworkManager manager; - // A network created before the network monitor is started will get - // UNKNOWN type. - ifaddrs* addr_list = - InstallIpv6Network(if_name1, ipv6_address1, ipv6_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_UNKNOWN, GetAdapterType(manager)); + BasicNetworkManager manager_without_monitor; + manager_without_monitor.StartUpdating(); + // A network created without a network monitor will get UNKNOWN type. + ifaddrs* addr_list = InstallIpv6Network(if_name, ipv6_address, ipv6_mask, + manager_without_monitor); + EXPECT_EQ(ADAPTER_TYPE_UNKNOWN, GetAdapterType(manager_without_monitor)); ReleaseIfAddrs(addr_list); - // Note: Do not call ClearNetworks here in order to test that the type - // of an existing network can be changed after the network monitor starts - // and detects the network type correctly. - - // After the network monitor starts, the type will be updated. - FakeNetworkMonitorFactory* factory = new FakeNetworkMonitorFactory(); - NetworkMonitorFactory::SetFactory(factory); - // This brings up the hook with the network monitor. - manager.StartUpdating(); + + // With the fake network monitor the type should be correctly determined. + FakeNetworkMonitorFactory factory; + BasicNetworkManager manager_with_monitor(&factory); + manager_with_monitor.StartUpdating(); // Add the same ipv6 address as before but it has the right network type // detected by the network monitor now. - addr_list = InstallIpv6Network(if_name1, ipv6_address1, ipv6_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_WIFI, GetAdapterType(manager)); - ReleaseIfAddrs(addr_list); - ClearNetworks(manager); - - // Add another network with the type inferred from the network monitor. - char if_name2[20] = "cellular0"; - addr_list = InstallIpv6Network(if_name2, ipv6_address2, ipv6_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_CELLULAR, GetAdapterType(manager)); + addr_list = InstallIpv6Network(if_name, ipv6_address, ipv6_mask, + manager_with_monitor); + EXPECT_EQ(ADAPTER_TYPE_WIFI, GetAdapterType(manager_with_monitor)); ReleaseIfAddrs(addr_list); - ClearNetworks(manager); } // Test that the network type can be determined based on name matching in @@ -838,6 +868,7 @@ TEST_F(NetworkTest, TestGetAdapterTypeFromNameMatching) { std::string ipv6_address2 = "1000:2000:3000:8000:0:0:0:1"; std::string ipv6_mask = "FFFF:FFFF:FFFF:FFFF::"; BasicNetworkManager manager; + manager.StartUpdating(); // IPSec interface; name is in form "ipsec". char if_name[20] = "ipsec11"; @@ -898,52 +929,42 @@ TEST_F(NetworkTest, TestGetAdapterTypeFromNameMatching) { ReleaseIfAddrs(addr_list); #endif } -#endif // defined(WEBRTC_POSIX) -#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) -// If you want to test non-default routes, you can do the following on a linux -// machine: -// 1) Load the dummy network driver: -// sudo modprobe dummy -// sudo ifconfig dummy0 127.0.0.1 -// 2) Run this test and confirm the output says it found a dummy route (and -// passes). -// 3) When done: -// sudo rmmmod dummy -TEST_F(NetworkTest, TestIgnoreNonDefaultRoutes) { - BasicNetworkManager manager; - NetworkManager::NetworkList list; - list = GetNetworks(manager, false); - bool found_dummy = false; - RTC_LOG(LS_INFO) << "Looking for dummy network: "; - for (NetworkManager::NetworkList::iterator it = list.begin(); - it != list.end(); ++it) { - RTC_LOG(LS_INFO) << " Network name: " << (*it)->name(); - found_dummy |= (*it)->name().find("dummy0") != std::string::npos; - } - for (NetworkManager::NetworkList::iterator it = list.begin(); - it != list.end(); ++it) { - delete (*it); - } - if (!found_dummy) { - RTC_LOG(LS_INFO) << "No dummy found, quitting."; - return; - } - RTC_LOG(LS_INFO) << "Found dummy, running again while ignoring non-default " - "routes."; - manager.set_ignore_non_default_routes(true); - list = GetNetworks(manager, false); - for (NetworkManager::NetworkList::iterator it = list.begin(); - it != list.end(); ++it) { - RTC_LOG(LS_INFO) << " Network name: " << (*it)->name(); - EXPECT_TRUE((*it)->name().find("dummy0") == std::string::npos); - } - for (NetworkManager::NetworkList::iterator it = list.begin(); - it != list.end(); ++it) { - delete (*it); - } +// Test that an adapter won't be included in the network list if there's a +// network monitor that says it's unavailable. +TEST_F(NetworkTest, TestNetworkMonitorIsAdapterAvailable) { + char if_name1[20] = "pdp_ip0"; + char if_name2[20] = "pdp_ip1"; + ifaddrs* list = nullptr; + list = AddIpv6Address(list, if_name1, "1000:2000:3000:4000:0:0:0:1", + "FFFF:FFFF:FFFF:FFFF::", 0); + list = AddIpv6Address(list, if_name2, "1000:2000:3000:4000:0:0:0:2", + "FFFF:FFFF:FFFF:FFFF::", 0); + NetworkManager::NetworkList result; + + // Sanity check that both interfaces are included by default. + FakeNetworkMonitorFactory factory; + BasicNetworkManager manager(&factory); + manager.StartUpdating(); + CallConvertIfAddrs(manager, list, /*include_ignored=*/false, &result); + EXPECT_EQ(2u, result.size()); + bool changed; + // This ensures we release the objects created in CallConvertIfAddrs. + MergeNetworkList(manager, result, &changed); + result.clear(); + + // Now simulate one interface being unavailable. + FakeNetworkMonitor* network_monitor = GetNetworkMonitor(manager); + network_monitor->set_unavailable_adapters({if_name1}); + CallConvertIfAddrs(manager, list, /*include_ignored=*/false, &result); + EXPECT_EQ(1u, result.size()); + EXPECT_EQ(if_name2, result[0]->name()); + + MergeNetworkList(manager, result, &changed); + ReleaseIfAddrs(list); } -#endif + +#endif // defined(WEBRTC_POSIX) // Test MergeNetworkList successfully combines all IPs for the same // prefix/length into a single Network. @@ -1066,11 +1087,10 @@ TEST_F(NetworkTest, TestIPv6Selection) { } TEST_F(NetworkTest, TestNetworkMonitoring) { - BasicNetworkManager manager; + FakeNetworkMonitorFactory factory; + BasicNetworkManager manager(&factory); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); - FakeNetworkMonitorFactory* factory = new FakeNetworkMonitorFactory(); - NetworkMonitorFactory::SetFactory(factory); manager.StartUpdating(); FakeNetworkMonitor* network_monitor = GetNetworkMonitor(manager); EXPECT_TRUE(network_monitor && network_monitor->started()); @@ -1081,14 +1101,12 @@ TEST_F(NetworkTest, TestNetworkMonitoring) { ClearNetworks(manager); // Network manager is started, so the callback is called when the network // monitor fires the network-change event. - network_monitor->OnNetworksChanged(); + network_monitor->SignalNetworksChanged(); EXPECT_TRUE_WAIT(callback_called_, 1000); // Network manager is stopped. manager.StopUpdating(); EXPECT_FALSE(GetNetworkMonitor(manager)->started()); - - NetworkMonitorFactory::ReleaseFactory(factory); } // Fails on Android: https://bugs.chromium.org/p/webrtc/issues/detail?id=4364. @@ -1099,11 +1117,10 @@ TEST_F(NetworkTest, TestNetworkMonitoring) { #endif TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { IPAddress ip; - TestBasicNetworkManager manager; + FakeNetworkMonitorFactory factory; + TestBasicNetworkManager manager(&factory); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); - FakeNetworkMonitorFactory* factory = new FakeNetworkMonitorFactory(); - NetworkMonitorFactory::SetFactory(factory); manager.StartUpdating(); EXPECT_TRUE_WAIT(callback_called_, 1000); @@ -1114,12 +1131,12 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { EXPECT_TRUE(!networks.empty()); for (const auto* network : networks) { if (network->GetBestIP().family() == AF_INET) { - EXPECT_TRUE(manager.QueryDefaultLocalAddress(AF_INET) != IPAddress()); + EXPECT_TRUE(QueryDefaultLocalAddress(manager, AF_INET) != IPAddress()); } else if (network->GetBestIP().family() == AF_INET6 && !IPIsLoopback(network->GetBestIP())) { // Existence of an IPv6 loopback address doesn't mean it has IPv6 network // enabled. - EXPECT_TRUE(manager.QueryDefaultLocalAddress(AF_INET6) != IPAddress()); + EXPECT_TRUE(QueryDefaultLocalAddress(manager, AF_INET6) != IPAddress()); } } @@ -1162,4 +1179,65 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { manager.StopUpdating(); } +// Test that MergeNetworkList does not set change = true +// when changing from cellular_X to cellular_Y. +TEST_F(NetworkTest, TestWhenNetworkListChangeReturnsChangedFlag) { + BasicNetworkManager manager; + + IPAddress ip1; + EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); + Network* net1 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); + net1->set_type(ADAPTER_TYPE_CELLULAR_3G); + net1->AddIP(ip1); + NetworkManager::NetworkList list; + list.push_back(net1); + + { + bool changed; + MergeNetworkList(manager, list, &changed); + EXPECT_TRUE(changed); + NetworkManager::NetworkList list2; + manager.GetNetworks(&list2); + EXPECT_EQ(list2.size(), 1uL); + EXPECT_EQ(ADAPTER_TYPE_CELLULAR_3G, list2[0]->type()); + } + + // Modify net1 from 3G to 4G + { + Network* net2 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); + net2->set_type(ADAPTER_TYPE_CELLULAR_4G); + net2->AddIP(ip1); + list.clear(); + list.push_back(net2); + bool changed; + MergeNetworkList(manager, list, &changed); + + // Change from 3G to 4G shall not trigger OnNetworksChanged, + // i.e changed = false. + EXPECT_FALSE(changed); + NetworkManager::NetworkList list2; + manager.GetNetworks(&list2); + ASSERT_EQ(list2.size(), 1uL); + EXPECT_EQ(ADAPTER_TYPE_CELLULAR_4G, list2[0]->type()); + } + + // Don't modify. + { + Network* net2 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); + net2->set_type(ADAPTER_TYPE_CELLULAR_4G); + net2->AddIP(ip1); + list.clear(); + list.push_back(net2); + bool changed; + MergeNetworkList(manager, list, &changed); + + // No change. + EXPECT_FALSE(changed); + NetworkManager::NetworkList list2; + manager.GetNetworks(&list2); + ASSERT_EQ(list2.size(), 1uL); + EXPECT_EQ(ADAPTER_TYPE_CELLULAR_4G, list2[0]->type()); + } +} + } // namespace rtc diff --git a/rtc_base/null_socket_server.h b/rtc_base/null_socket_server.h index da22c79160..7afa504190 100644 --- a/rtc_base/null_socket_server.h +++ b/rtc_base/null_socket_server.h @@ -15,10 +15,11 @@ #include "rtc_base/event.h" #include "rtc_base/socket.h" #include "rtc_base/socket_server.h" +#include "rtc_base/system/rtc_export.h" namespace rtc { -class NullSocketServer : public SocketServer { +class RTC_EXPORT NullSocketServer : public SocketServer { public: NullSocketServer(); ~NullSocketServer() override; diff --git a/rtc_base/null_socket_server_unittest.cc b/rtc_base/null_socket_server_unittest.cc index 39c16313b1..a875d6c284 100644 --- a/rtc_base/null_socket_server_unittest.cc +++ b/rtc_base/null_socket_server_unittest.cc @@ -25,7 +25,8 @@ namespace rtc { static const uint32_t kTimeout = 5000U; -class NullSocketServerTest : public ::testing::Test, public MessageHandler { +class NullSocketServerTest : public ::testing::Test, + public MessageHandlerAutoCleanup { protected: void OnMessage(Message* message) override { ss_.WakeUp(); } diff --git a/rtc_base/numerics/math_utils.h b/rtc_base/numerics/math_utils.h index 4bf48e22bb..0f1d51b090 100644 --- a/rtc_base/numerics/math_utils.h +++ b/rtc_base/numerics/math_utils.h @@ -8,14 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NUMERICS_MATH_UTILS_H_ -#define RTC_BASE_NUMERICS_MATH_UTILS_H_ +#ifndef API_NUMERICS_MATH_UTILS_H_ +#define API_NUMERICS_MATH_UTILS_H_ #include #include #include "rtc_base/checks.h" +namespace webrtc { +namespace webrtc_impl { // Given two numbers |x| and |y| such that x >= y, computes the difference // x - y without causing undefined behavior due to signed overflow. template @@ -67,4 +69,7 @@ constexpr T minus_infinity_or_min() { return std::numeric_limits::min(); } -#endif // RTC_BASE_NUMERICS_MATH_UTILS_H_ +} // namespace webrtc_impl +} // namespace webrtc + +#endif // API_NUMERICS_MATH_UTILS_H_ diff --git a/rtc_base/numerics/moving_median_filter.h b/rtc_base/numerics/moving_median_filter.h index 723679ec8e..4abaeff5bc 100644 --- a/rtc_base/numerics/moving_median_filter.h +++ b/rtc_base/numerics/moving_median_filter.h @@ -13,6 +13,7 @@ #include +#include #include #include "rtc_base/checks.h" @@ -38,6 +39,9 @@ class MovingMedianFilter { // Get median over the latest window. T GetFilteredValue() const; + // The number of samples that are currently stored. + size_t GetNumberOfSamplesStored() const; + private: PercentileFilter percentile_filter_; std::list samples_; @@ -77,5 +81,10 @@ void MovingMedianFilter::Reset() { samples_stored_ = 0; } +template +size_t MovingMedianFilter::GetNumberOfSamplesStored() const { + return samples_stored_; +} + } // namespace webrtc #endif // RTC_BASE_NUMERICS_MOVING_MEDIAN_FILTER_H_ diff --git a/rtc_base/numerics/moving_median_filter_unittest.cc b/rtc_base/numerics/moving_median_filter_unittest.cc index 41684b2b5c..12c1114c25 100644 --- a/rtc_base/numerics/moving_median_filter_unittest.cc +++ b/rtc_base/numerics/moving_median_filter_unittest.cc @@ -11,6 +11,7 @@ #include "rtc_base/numerics/moving_median_filter.h" #include +#include #include "test/gtest.h" @@ -19,15 +20,17 @@ namespace webrtc { TEST(MovingMedianFilterTest, ProcessesNoSamples) { MovingMedianFilter filter(2); EXPECT_EQ(0, filter.GetFilteredValue()); + EXPECT_EQ(0u, filter.GetNumberOfSamplesStored()); } TEST(MovingMedianFilterTest, ReturnsMovingMedianWindow5) { MovingMedianFilter filter(5); const int64_t kSamples[5] = {1, 5, 2, 3, 4}; const int64_t kExpectedFilteredValues[5] = {1, 1, 2, 2, 3}; - for (int i = 0; i < 5; ++i) { + for (size_t i = 0; i < 5; ++i) { filter.Insert(kSamples[i]); EXPECT_EQ(kExpectedFilteredValues[i], filter.GetFilteredValue()); + EXPECT_EQ(i + 1, filter.GetNumberOfSamplesStored()); } } @@ -38,6 +41,7 @@ TEST(MovingMedianFilterTest, ReturnsMovingMedianWindow3) { for (int i = 0; i < 5; ++i) { filter.Insert(kSamples[i]); EXPECT_EQ(kExpectedFilteredValues[i], filter.GetFilteredValue()); + EXPECT_EQ(std::min(i + 1, 3), filter.GetNumberOfSamplesStored()); } } @@ -48,6 +52,7 @@ TEST(MovingMedianFilterTest, ReturnsMovingMedianWindow1) { for (int i = 0; i < 5; ++i) { filter.Insert(kSamples[i]); EXPECT_EQ(kExpectedFilteredValues[i], filter.GetFilteredValue()); + EXPECT_EQ(1u, filter.GetNumberOfSamplesStored()); } } diff --git a/rtc_base/numerics/running_statistics.h b/rtc_base/numerics/running_statistics.h index 4a3516d3f6..bbcc7e2a73 100644 --- a/rtc_base/numerics/running_statistics.h +++ b/rtc_base/numerics/running_statistics.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NUMERICS_RUNNING_STATISTICS_H_ -#define RTC_BASE_NUMERICS_RUNNING_STATISTICS_H_ +#ifndef API_NUMERICS_RUNNING_STATISTICS_H_ +#define API_NUMERICS_RUNNING_STATISTICS_H_ #include #include @@ -20,6 +20,7 @@ #include "rtc_base/numerics/math_utils.h" namespace webrtc { +namespace webrtc_impl { // tl;dr: Robust and efficient online computation of statistics, // using Welford's method for variance. [1] @@ -154,6 +155,7 @@ class RunningStatistics { double cumul_ = 0; // Variance * size_, sometimes noted m2. }; +} // namespace webrtc_impl } // namespace webrtc -#endif // RTC_BASE_NUMERICS_RUNNING_STATISTICS_H_ +#endif // API_NUMERICS_RUNNING_STATISTICS_H_ diff --git a/rtc_base/numerics/running_statistics_unittest.cc b/rtc_base/numerics/running_statistics_unittest.cc index 26dc7133e0..d593f3fc5a 100644 --- a/rtc_base/numerics/running_statistics_unittest.cc +++ b/rtc_base/numerics/running_statistics_unittest.cc @@ -21,6 +21,7 @@ // Tests were copied from samples_stats_counter_unittest.cc. namespace webrtc { +namespace webrtc_impl { namespace { RunningStatistics CreateStatsFilledWithIntsFrom1ToN(int n) { @@ -55,8 +56,6 @@ class RunningStatisticsTest : public ::testing::TestWithParam {}; constexpr int SIZE_FOR_MERGE = 5; -} // namespace - TEST(RunningStatistics, FullSimpleTest) { auto stats = CreateStatsFilledWithIntsFrom1ToN(100); @@ -192,4 +191,6 @@ INSTANTIATE_TEST_SUITE_P(RunningStatisticsTests, RunningStatisticsTest, ::testing::Range(0, SIZE_FOR_MERGE + 1)); +} // namespace +} // namespace webrtc_impl } // namespace webrtc diff --git a/rtc_base/numerics/safe_conversions.h b/rtc_base/numerics/safe_conversions.h index 5d58672510..e00219cbd7 100644 --- a/rtc_base/numerics/safe_conversions.h +++ b/rtc_base/numerics/safe_conversions.h @@ -63,12 +63,10 @@ inline constexpr Dst saturated_cast(Src value) { // Should fail only on attempting to assign NaN to a saturated integer. case internal::TYPE_INVALID: - FATAL(); - return std::numeric_limits::max(); + RTC_CHECK_NOTREACHED(); } - FATAL(); - return static_cast(value); + RTC_CHECK_NOTREACHED(); } } // namespace rtc diff --git a/rtc_base/numerics/sample_stats.cc b/rtc_base/numerics/sample_stats.cc index 7a6f01e3d4..6000b2b88f 100644 --- a/rtc_base/numerics/sample_stats.cc +++ b/rtc_base/numerics/sample_stats.cc @@ -59,7 +59,7 @@ void SampleStats::AddSample(TimeDelta delta) { } void SampleStats::AddSampleMs(double delta_ms) { - AddSample(TimeDelta::ms(delta_ms)); + AddSample(TimeDelta::Millis(delta_ms)); } void SampleStats::AddSamples(const SampleStats& other) { stats_.AddSamples(other.stats_); @@ -70,11 +70,11 @@ bool SampleStats::IsEmpty() { } TimeDelta SampleStats::Max() { - return TimeDelta::seconds(stats_.Max()); + return TimeDelta::Seconds(stats_.Max()); } TimeDelta SampleStats::Mean() { - return TimeDelta::seconds(stats_.Mean()); + return TimeDelta::Seconds(stats_.Mean()); } TimeDelta SampleStats::Median() { @@ -82,19 +82,19 @@ TimeDelta SampleStats::Median() { } TimeDelta SampleStats::Quantile(double quantile) { - return TimeDelta::seconds(stats_.Quantile(quantile)); + return TimeDelta::Seconds(stats_.Quantile(quantile)); } TimeDelta SampleStats::Min() { - return TimeDelta::seconds(stats_.Min()); + return TimeDelta::Seconds(stats_.Min()); } TimeDelta SampleStats::Variance() { - return TimeDelta::seconds(stats_.Variance()); + return TimeDelta::Seconds(stats_.Variance()); } TimeDelta SampleStats::StandardDeviation() { - return TimeDelta::seconds(stats_.StandardDeviation()); + return TimeDelta::Seconds(stats_.StandardDeviation()); } int SampleStats::Count() { @@ -118,11 +118,11 @@ bool SampleStats::IsEmpty() { } DataRate SampleStats::Max() { - return DataRate::bps(stats_.Max()); + return DataRate::BitsPerSec(stats_.Max()); } DataRate SampleStats::Mean() { - return DataRate::bps(stats_.Mean()); + return DataRate::BitsPerSec(stats_.Mean()); } DataRate SampleStats::Median() { @@ -130,19 +130,19 @@ DataRate SampleStats::Median() { } DataRate SampleStats::Quantile(double quantile) { - return DataRate::bps(stats_.Quantile(quantile)); + return DataRate::BitsPerSec(stats_.Quantile(quantile)); } DataRate SampleStats::Min() { - return DataRate::bps(stats_.Min()); + return DataRate::BitsPerSec(stats_.Min()); } DataRate SampleStats::Variance() { - return DataRate::bps(stats_.Variance()); + return DataRate::BitsPerSec(stats_.Variance()); } DataRate SampleStats::StandardDeviation() { - return DataRate::bps(stats_.StandardDeviation()); + return DataRate::BitsPerSec(stats_.StandardDeviation()); } int SampleStats::Count() { diff --git a/rtc_base/numerics/sample_stats.h b/rtc_base/numerics/sample_stats.h index f6347414b0..39af1c6a37 100644 --- a/rtc_base/numerics/sample_stats.h +++ b/rtc_base/numerics/sample_stats.h @@ -10,10 +10,10 @@ #ifndef RTC_BASE_NUMERICS_SAMPLE_STATS_H_ #define RTC_BASE_NUMERICS_SAMPLE_STATS_H_ +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/numerics/samples_stats_counter.h" namespace webrtc { template diff --git a/rtc_base/one_time_event.h b/rtc_base/one_time_event.h index c5ccbf6933..d33ddbd587 100644 --- a/rtc_base/one_time_event.h +++ b/rtc_base/one_time_event.h @@ -11,7 +11,7 @@ #ifndef RTC_BASE_ONE_TIME_EVENT_H_ #define RTC_BASE_ONE_TIME_EVENT_H_ -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { // Provides a simple way to perform an operation (such as logging) one @@ -26,7 +26,7 @@ class OneTimeEvent { public: OneTimeEvent() {} bool operator()() { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); if (happened_) { return false; } @@ -36,7 +36,7 @@ class OneTimeEvent { private: bool happened_ = false; - rtc::CriticalSection critsect_; + Mutex mutex_; }; // A non-thread-safe, ligher-weight version of the OneTimeEvent class. diff --git a/rtc_base/openssl_adapter.cc b/rtc_base/openssl_adapter.cc index 07c2b818cf..8fd882c2b3 100644 --- a/rtc_base/openssl_adapter.cc +++ b/rtc_base/openssl_adapter.cc @@ -20,6 +20,7 @@ #include +#include "absl/memory/memory.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" @@ -176,7 +177,6 @@ OpenSSLAdapter::OpenSSLAdapter(AsyncSocket* socket, role_(SSL_CLIENT), ssl_read_needs_write_(false), ssl_write_needs_read_(false), - restartable_(false), ssl_(nullptr), ssl_ctx_(nullptr), ssl_mode_(SSL_MODE_TLS), @@ -221,9 +221,10 @@ void OpenSSLAdapter::SetCertVerifier( ssl_cert_verifier_ = ssl_cert_verifier; } -void OpenSSLAdapter::SetIdentity(SSLIdentity* identity) { +void OpenSSLAdapter::SetIdentity(std::unique_ptr identity) { RTC_DCHECK(!identity_); - identity_.reset(static_cast(identity)); + identity_ = + absl::WrapUnique(static_cast(identity.release())); } void OpenSSLAdapter::SetRole(SSLRole role) { @@ -238,19 +239,18 @@ AsyncSocket* OpenSSLAdapter::Accept(SocketAddress* paddr) { } SSLAdapter* adapter = SSLAdapter::Create(socket); - adapter->SetIdentity(identity_->GetReference()); + adapter->SetIdentity(identity_->Clone()); adapter->SetRole(rtc::SSL_SERVER); adapter->SetIgnoreBadCert(ignore_bad_cert_); - adapter->StartSSL("", false); + adapter->StartSSL(""); return adapter; } -int OpenSSLAdapter::StartSSL(const char* hostname, bool restartable) { +int OpenSSLAdapter::StartSSL(const char* hostname) { if (state_ != SSL_NONE) return -1; ssl_host_name_ = hostname; - restartable_ = restartable; if (socket_->GetState() != Socket::CS_CONNECTED) { state_ = SSL_WAIT; @@ -646,7 +646,7 @@ int OpenSSLAdapter::RecvFrom(void* pv, int OpenSSLAdapter::Close() { Cleanup(); - state_ = restartable_ ? SSL_WAIT : SSL_NONE; + state_ = SSL_NONE; return AsyncSocketAdapter::Close(); } diff --git a/rtc_base/openssl_adapter.h b/rtc_base/openssl_adapter.h index c3cab2fd78..6f1f7dccab 100644 --- a/rtc_base/openssl_adapter.h +++ b/rtc_base/openssl_adapter.h @@ -32,7 +32,8 @@ namespace rtc { -class OpenSSLAdapter final : public SSLAdapter, public MessageHandler { +class OpenSSLAdapter final : public SSLAdapter, + public MessageHandlerAutoCleanup { public: static bool InitializeSSL(); static bool CleanupSSL(); @@ -53,10 +54,10 @@ class OpenSSLAdapter final : public SSLAdapter, public MessageHandler { void SetEllipticCurves(const std::vector& curves) override; void SetMode(SSLMode mode) override; void SetCertVerifier(SSLCertificateVerifier* ssl_cert_verifier) override; - void SetIdentity(SSLIdentity* identity) override; + void SetIdentity(std::unique_ptr identity) override; void SetRole(SSLRole role) override; AsyncSocket* Accept(SocketAddress* paddr) override; - int StartSSL(const char* hostname, bool restartable) override; + int StartSSL(const char* hostname) override; int Send(const void* pv, size_t cb) override; int SendTo(const void* pv, size_t cb, const SocketAddress& addr) override; int Recv(void* pv, size_t cb, int64_t* timestamp) override; @@ -127,9 +128,6 @@ class OpenSSLAdapter final : public SSLAdapter, public MessageHandler { SSLRole role_; bool ssl_read_needs_write_; bool ssl_write_needs_read_; - // If true, socket will retain SSL configuration after Close. - // TODO(juberti): Remove this unused flag. - bool restartable_; // This buffer is used if SSL_write fails with SSL_ERROR_WANT_WRITE, which // means we need to keep retrying with *the same exact data* until it // succeeds. Afterwards it will be cleared. diff --git a/rtc_base/openssl_adapter_unittest.cc b/rtc_base/openssl_adapter_unittest.cc index 77603e0a3b..4bd87992d4 100644 --- a/rtc_base/openssl_adapter_unittest.cc +++ b/rtc_base/openssl_adapter_unittest.cc @@ -25,28 +25,34 @@ namespace { class MockAsyncSocket : public AsyncSocket { public: virtual ~MockAsyncSocket() = default; - MOCK_METHOD1(Accept, AsyncSocket*(SocketAddress*)); - MOCK_CONST_METHOD0(GetLocalAddress, SocketAddress()); - MOCK_CONST_METHOD0(GetRemoteAddress, SocketAddress()); - MOCK_METHOD1(Bind, int(const SocketAddress&)); - MOCK_METHOD1(Connect, int(const SocketAddress&)); - MOCK_METHOD2(Send, int(const void*, size_t)); - MOCK_METHOD3(SendTo, int(const void*, size_t, const SocketAddress&)); - MOCK_METHOD3(Recv, int(void*, size_t, int64_t*)); - MOCK_METHOD4(RecvFrom, int(void*, size_t, SocketAddress*, int64_t*)); - MOCK_METHOD1(Listen, int(int)); - MOCK_METHOD0(Close, int()); - MOCK_CONST_METHOD0(GetError, int()); - MOCK_METHOD1(SetError, void(int)); - MOCK_CONST_METHOD0(GetState, ConnState()); - MOCK_METHOD2(GetOption, int(Option, int*)); - MOCK_METHOD2(SetOption, int(Option, int)); + MOCK_METHOD(AsyncSocket*, Accept, (SocketAddress*), (override)); + MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override)); + MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override)); + MOCK_METHOD(int, Bind, (const SocketAddress&), (override)); + MOCK_METHOD(int, Connect, (const SocketAddress&), (override)); + MOCK_METHOD(int, Send, (const void*, size_t), (override)); + MOCK_METHOD(int, + SendTo, + (const void*, size_t, const SocketAddress&), + (override)); + MOCK_METHOD(int, Recv, (void*, size_t, int64_t*), (override)); + MOCK_METHOD(int, + RecvFrom, + (void*, size_t, SocketAddress*, int64_t*), + (override)); + MOCK_METHOD(int, Listen, (int), (override)); + MOCK_METHOD(int, Close, (), (override)); + MOCK_METHOD(int, GetError, (), (const, override)); + MOCK_METHOD(void, SetError, (int), (override)); + MOCK_METHOD(ConnState, GetState, (), (const, override)); + MOCK_METHOD(int, GetOption, (Option, int*), (override)); + MOCK_METHOD(int, SetOption, (Option, int), (override)); }; class MockCertVerifier : public SSLCertificateVerifier { public: virtual ~MockCertVerifier() = default; - MOCK_METHOD1(Verify, bool(const SSLCertificate&)); + MOCK_METHOD(bool, Verify, (const SSLCertificate&), (override)); }; } // namespace @@ -80,7 +86,7 @@ TEST(OpenSSLAdapterTest, TestTransformAlpnProtocols) { TEST(OpenSSLAdapterTest, TestBeginSSLBeforeConnection) { AsyncSocket* async_socket = new MockAsyncSocket(); OpenSSLAdapter adapter(async_socket); - EXPECT_EQ(adapter.StartSSL("webrtc.org", false), 0); + EXPECT_EQ(adapter.StartSSL("webrtc.org"), 0); } // Verifies that the adapter factory can create new adapters. diff --git a/rtc_base/openssl_certificate.cc b/rtc_base/openssl_certificate.cc index 9459f76df6..bd9bb04fd4 100644 --- a/rtc_base/openssl_certificate.cc +++ b/rtc_base/openssl_certificate.cc @@ -244,13 +244,8 @@ std::unique_ptr OpenSSLCertificate::Clone() const { std::string OpenSSLCertificate::ToPEMString() const { BIO* bio = BIO_new(BIO_s_mem()); - if (!bio) { - FATAL() << "Unreachable code."; - } - if (!PEM_write_bio_X509(bio, x509_)) { - BIO_free(bio); - FATAL() << "Unreachable code."; - } + RTC_CHECK(bio); + RTC_CHECK(PEM_write_bio_X509(bio, x509_)); BIO_write(bio, "\0", 1); char* buffer; BIO_get_mem_data(bio, &buffer); @@ -264,13 +259,8 @@ void OpenSSLCertificate::ToDER(Buffer* der_buffer) const { der_buffer->SetSize(0); // Calculates the DER representation of the certificate, from scratch. BIO* bio = BIO_new(BIO_s_mem()); - if (!bio) { - FATAL() << "Unreachable code."; - } - if (!i2d_X509_bio(bio, x509_)) { - BIO_free(bio); - FATAL() << "Unreachable code."; - } + RTC_CHECK(bio); + RTC_CHECK(i2d_X509_bio(bio, x509_)); char* data = nullptr; size_t length = BIO_get_mem_data(bio, &data); der_buffer->SetData(data, length); diff --git a/rtc_base/openssl_identity.cc b/rtc_base/openssl_identity.cc index 8d12c07816..c94df40bfb 100644 --- a/rtc_base/openssl_identity.cc +++ b/rtc_base/openssl_identity.cc @@ -209,21 +209,24 @@ OpenSSLIdentity::OpenSSLIdentity(std::unique_ptr key_pair, OpenSSLIdentity::~OpenSSLIdentity() = default; -OpenSSLIdentity* OpenSSLIdentity::GenerateInternal( +std::unique_ptr OpenSSLIdentity::CreateInternal( const SSLIdentityParams& params) { std::unique_ptr key_pair( OpenSSLKeyPair::Generate(params.key_params)); if (key_pair) { std::unique_ptr certificate( OpenSSLCertificate::Generate(key_pair.get(), params)); - if (certificate != nullptr) - return new OpenSSLIdentity(std::move(key_pair), std::move(certificate)); + if (certificate != nullptr) { + return absl::WrapUnique( + new OpenSSLIdentity(std::move(key_pair), std::move(certificate))); + } } RTC_LOG(LS_INFO) << "Identity generation failed"; return nullptr; } -OpenSSLIdentity* OpenSSLIdentity::GenerateWithExpiration( +// static +std::unique_ptr OpenSSLIdentity::CreateWithExpiration( const std::string& common_name, const KeyParams& key_params, time_t certificate_lifetime) { @@ -235,16 +238,17 @@ OpenSSLIdentity* OpenSSLIdentity::GenerateWithExpiration( params.not_after = now + certificate_lifetime; if (params.not_before > params.not_after) return nullptr; - return GenerateInternal(params); + return CreateInternal(params); } -OpenSSLIdentity* OpenSSLIdentity::GenerateForTest( +std::unique_ptr OpenSSLIdentity::CreateForTest( const SSLIdentityParams& params) { - return GenerateInternal(params); + return CreateInternal(params); } -SSLIdentity* OpenSSLIdentity::FromPEMStrings(const std::string& private_key, - const std::string& certificate) { +std::unique_ptr OpenSSLIdentity::CreateFromPEMStrings( + const std::string& private_key, + const std::string& certificate) { std::unique_ptr cert( OpenSSLCertificate::FromPEMString(certificate)); if (!cert) { @@ -259,10 +263,11 @@ SSLIdentity* OpenSSLIdentity::FromPEMStrings(const std::string& private_key, return nullptr; } - return new OpenSSLIdentity(std::move(key_pair), std::move(cert)); + return absl::WrapUnique( + new OpenSSLIdentity(std::move(key_pair), std::move(cert))); } -SSLIdentity* OpenSSLIdentity::FromPEMChainStrings( +std::unique_ptr OpenSSLIdentity::CreateFromPEMChainStrings( const std::string& private_key, const std::string& certificate_chain) { BIO* bio = BIO_new_mem_buf(certificate_chain.data(), @@ -300,8 +305,8 @@ SSLIdentity* OpenSSLIdentity::FromPEMChainStrings( return nullptr; } - return new OpenSSLIdentity(std::move(key_pair), - std::make_unique(std::move(certs))); + return absl::WrapUnique(new OpenSSLIdentity( + std::move(key_pair), std::make_unique(std::move(certs)))); } const OpenSSLCertificate& OpenSSLIdentity::certificate() const { @@ -312,9 +317,11 @@ const SSLCertChain& OpenSSLIdentity::cert_chain() const { return *cert_chain_.get(); } -OpenSSLIdentity* OpenSSLIdentity::GetReference() const { - return new OpenSSLIdentity(absl::WrapUnique(key_pair_->GetReference()), - cert_chain_->Clone()); +std::unique_ptr OpenSSLIdentity::CloneInternal() const { + // We cannot use std::make_unique here because the referenced OpenSSLIdentity + // constructor is private. + return absl::WrapUnique(new OpenSSLIdentity( + absl::WrapUnique(key_pair_->GetReference()), cert_chain_->Clone())); } bool OpenSSLIdentity::ConfigureIdentity(SSL_CTX* ctx) { diff --git a/rtc_base/openssl_identity.h b/rtc_base/openssl_identity.h index f0c4fb895d..a2ac87cf45 100644 --- a/rtc_base/openssl_identity.h +++ b/rtc_base/openssl_identity.h @@ -60,19 +60,22 @@ class OpenSSLKeyPair final { // them consistently. class OpenSSLIdentity final : public SSLIdentity { public: - static OpenSSLIdentity* GenerateWithExpiration(const std::string& common_name, - const KeyParams& key_params, - time_t certificate_lifetime); - static OpenSSLIdentity* GenerateForTest(const SSLIdentityParams& params); - static SSLIdentity* FromPEMStrings(const std::string& private_key, - const std::string& certificate); - static SSLIdentity* FromPEMChainStrings(const std::string& private_key, - const std::string& certificate_chain); + static std::unique_ptr CreateWithExpiration( + const std::string& common_name, + const KeyParams& key_params, + time_t certificate_lifetime); + static std::unique_ptr CreateForTest( + const SSLIdentityParams& params); + static std::unique_ptr CreateFromPEMStrings( + const std::string& private_key, + const std::string& certificate); + static std::unique_ptr CreateFromPEMChainStrings( + const std::string& private_key, + const std::string& certificate_chain); ~OpenSSLIdentity() override; const OpenSSLCertificate& certificate() const override; const SSLCertChain& cert_chain() const override; - OpenSSLIdentity* GetReference() const override; // Configure an SSL context object to use our key and certificate. bool ConfigureIdentity(SSL_CTX* ctx); @@ -87,8 +90,10 @@ class OpenSSLIdentity final : public SSLIdentity { std::unique_ptr certificate); OpenSSLIdentity(std::unique_ptr key_pair, std::unique_ptr cert_chain); + std::unique_ptr CloneInternal() const override; - static OpenSSLIdentity* GenerateInternal(const SSLIdentityParams& params); + static std::unique_ptr CreateInternal( + const SSLIdentityParams& params); std::unique_ptr key_pair_; std::unique_ptr cert_chain_; diff --git a/rtc_base/openssl_stream_adapter.cc b/rtc_base/openssl_stream_adapter.cc index 28e8106e77..f59b4edf18 100644 --- a/rtc_base/openssl_stream_adapter.cc +++ b/rtc_base/openssl_stream_adapter.cc @@ -21,6 +21,7 @@ #include #endif +#include #include #include #include @@ -34,8 +35,10 @@ #include "rtc_base/openssl_identity.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/stream.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" +#include "system_wrappers/include/field_trial.h" #if (OPENSSL_VERSION_NUMBER < 0x10100000L) #error "webrtc requires at least OpenSSL version 1.1.0, to support DTLS-SRTP" @@ -49,7 +52,6 @@ namespace rtc { namespace { - // SRTP cipher suite table. |internal_name| is used to construct a // colon-separated profile strings which is needed by // SSL_CTX_set_tlsext_use_srtp(). @@ -264,8 +266,25 @@ static long stream_ctrl(BIO* b, int cmd, long num, void* ptr) { // OpenSSLStreamAdapter ///////////////////////////////////////////////////////////////////////////// -OpenSSLStreamAdapter::OpenSSLStreamAdapter(StreamInterface* stream) - : SSLStreamAdapter(stream), +static std::atomic g_use_legacy_tls_protocols_override(false); +static std::atomic g_allow_legacy_tls_protocols(false); + +void SetAllowLegacyTLSProtocols(const absl::optional& allow) { + g_use_legacy_tls_protocols_override.store(allow.has_value()); + if (allow.has_value()) + g_allow_legacy_tls_protocols.store(allow.value()); +} + +bool ShouldAllowLegacyTLSProtocols() { + return g_use_legacy_tls_protocols_override.load() + ? g_allow_legacy_tls_protocols.load() + : webrtc::field_trial::IsEnabled("WebRTC-LegacyTlsProtocols"); +} + +OpenSSLStreamAdapter::OpenSSLStreamAdapter( + std::unique_ptr stream) + : SSLStreamAdapter(std::move(stream)), + owner_(rtc::Thread::Current()), state_(SSL_NONE), role_(SSL_CLIENT), ssl_read_needs_write_(false), @@ -273,15 +292,23 @@ OpenSSLStreamAdapter::OpenSSLStreamAdapter(StreamInterface* stream) ssl_(nullptr), ssl_ctx_(nullptr), ssl_mode_(SSL_MODE_TLS), - ssl_max_version_(SSL_PROTOCOL_TLS_12) {} + ssl_max_version_(SSL_PROTOCOL_TLS_12), + // Default is to support legacy TLS protocols. + // This will be changed to default non-support in M82 or M83. + support_legacy_tls_protocols_flag_(ShouldAllowLegacyTLSProtocols()) {} OpenSSLStreamAdapter::~OpenSSLStreamAdapter() { + timeout_task_.Stop(); Cleanup(0); } -void OpenSSLStreamAdapter::SetIdentity(SSLIdentity* identity) { +void OpenSSLStreamAdapter::SetIdentity(std::unique_ptr identity) { RTC_DCHECK(!identity_); - identity_.reset(static_cast(identity)); + identity_.reset(static_cast(identity.release())); +} + +OpenSSLIdentity* OpenSSLStreamAdapter::GetIdentityForTesting() const { + return identity_.get(); } void OpenSSLStreamAdapter::SetServerRole(SSLRole role) { @@ -520,7 +547,7 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, size_t data_len, size_t* written, int* error) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data_len << ")"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data_len << ")"; switch (state_) { case SSL_NONE: @@ -560,18 +587,18 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: - RTC_LOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_VERBOSE) << " -- success"; RTC_DCHECK_GT(code, 0); RTC_DCHECK_LE(code, data_len); if (written) *written = code; return SR_SUCCESS; case SSL_ERROR_WANT_READ: - RTC_LOG(LS_VERBOSE) << " -- error want read"; + RTC_DLOG(LS_VERBOSE) << " -- error want read"; ssl_write_needs_read_ = true; return SR_BLOCK; case SSL_ERROR_WANT_WRITE: - RTC_LOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_VERBOSE) << " -- error want write"; return SR_BLOCK; case SSL_ERROR_ZERO_RETURN: @@ -589,7 +616,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, size_t data_len, size_t* read, int* error) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data_len << ")"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data_len << ")"; switch (state_) { case SSL_NONE: // pass-through in clear text @@ -627,7 +654,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, switch (ssl_error) { case SSL_ERROR_NONE: - RTC_LOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_VERBOSE) << " -- success"; RTC_DCHECK_GT(code, 0); RTC_DCHECK_LE(code, data_len); if (read) { @@ -639,7 +666,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, unsigned int pending = SSL_pending(ssl_); if (pending) { - RTC_LOG(LS_INFO) << " -- short DTLS read. flushing"; + RTC_DLOG(LS_INFO) << " -- short DTLS read. flushing"; FlushInput(pending); if (error) { *error = SSE_MSG_TRUNC; @@ -649,17 +676,16 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, } return SR_SUCCESS; case SSL_ERROR_WANT_READ: - RTC_LOG(LS_VERBOSE) << " -- error want read"; + RTC_DLOG(LS_VERBOSE) << " -- error want read"; return SR_BLOCK; case SSL_ERROR_WANT_WRITE: - RTC_LOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_VERBOSE) << " -- error want write"; ssl_read_needs_write_ = true; return SR_BLOCK; case SSL_ERROR_ZERO_RETURN: - RTC_LOG(LS_VERBOSE) << " -- remote side closed"; + RTC_DLOG(LS_VERBOSE) << " -- remote side closed"; Close(); return SR_EOS; - break; default: Error("SSL_read", (ssl_error ? ssl_error : -1), 0, false); if (error) { @@ -687,7 +713,7 @@ void OpenSSLStreamAdapter::FlushInput(unsigned int left) { return; } - RTC_LOG(LS_VERBOSE) << " -- flushed " << code << " bytes"; + RTC_DLOG(LS_VERBOSE) << " -- flushed " << code << " bytes"; left -= code; } } @@ -725,7 +751,7 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, RTC_DCHECK(stream == this->stream()); if ((events & SE_OPEN)) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN"; if (state_ != SSL_WAIT) { RTC_DCHECK(state_ == SSL_NONE); events_to_signal |= SE_OPEN; @@ -739,9 +765,9 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } if ((events & (SE_READ | SE_WRITE))) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent" - << ((events & SE_READ) ? " SE_READ" : "") - << ((events & SE_WRITE) ? " SE_WRITE" : ""); + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent" + << ((events & SE_READ) ? " SE_READ" : "") + << ((events & SE_WRITE) ? " SE_WRITE" : ""); if (state_ == SSL_NONE) { events_to_signal |= events & (SE_READ | SE_WRITE); } else if (state_ == SSL_CONNECTING) { @@ -752,20 +778,20 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } else if (state_ == SSL_CONNECTED) { if (((events & SE_READ) && ssl_write_needs_read_) || (events & SE_WRITE)) { - RTC_LOG(LS_VERBOSE) << " -- onStreamWriteable"; + RTC_DLOG(LS_VERBOSE) << " -- onStreamWriteable"; events_to_signal |= SE_WRITE; } if (((events & SE_WRITE) && ssl_read_needs_write_) || (events & SE_READ)) { - RTC_LOG(LS_VERBOSE) << " -- onStreamReadable"; + RTC_DLOG(LS_VERBOSE) << " -- onStreamReadable"; events_to_signal |= SE_READ; } } } if ((events & SE_CLOSE)) { - RTC_LOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent(SE_CLOSE, " << err - << ")"; + RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent(SE_CLOSE, " << err + << ")"; Cleanup(0); events_to_signal |= SE_CLOSE; // SE_CLOSE is the only event that uses the final parameter to OnEvent(). @@ -778,10 +804,37 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } } +void OpenSSLStreamAdapter::PostEvent(int events, int err) { + owner_->PostTask(webrtc::ToQueuedTask( + task_safety_, [this, events, err]() { SignalEvent(this, events, err); })); +} + +void OpenSSLStreamAdapter::SetTimeout(int delay_ms) { + // We need to accept 0 delay here as well as >0 delay, because + // DTLSv1_get_timeout seems to frequently return 0 ms. + RTC_DCHECK_GE(delay_ms, 0); + RTC_DCHECK(!timeout_task_.Running()); + + timeout_task_ = webrtc::RepeatingTaskHandle::DelayedStart( + owner_, webrtc::TimeDelta::Millis(delay_ms), + [flag = task_safety_.flag(), this]() { + if (flag->alive()) { + RTC_DLOG(LS_INFO) << "DTLS timeout expired"; + timeout_task_.Stop(); + DTLSv1_handle_timeout(ssl_); + ContinueSSL(); + } else { + RTC_NOTREACHED(); + } + // This callback will never run again (stopped above). + return webrtc::TimeDelta::PlusInfinity(); + }); +} + int OpenSSLStreamAdapter::BeginSSL() { RTC_DCHECK(state_ == SSL_CONNECTING); // The underlying stream has opened. - RTC_LOG(LS_INFO) << "BeginSSL with peer."; + RTC_DLOG(LS_INFO) << "BeginSSL with peer."; BIO* bio = nullptr; @@ -824,18 +877,18 @@ int OpenSSLStreamAdapter::BeginSSL() { } int OpenSSLStreamAdapter::ContinueSSL() { - RTC_LOG(LS_VERBOSE) << "ContinueSSL"; + RTC_DLOG(LS_VERBOSE) << "ContinueSSL"; RTC_DCHECK(state_ == SSL_CONNECTING); // Clear the DTLS timer - Thread::Current()->Clear(this, MSG_TIMEOUT); + timeout_task_.Stop(); const int code = (role_ == SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); const int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: - RTC_LOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_VERBOSE) << " -- success"; // By this point, OpenSSL should have given us a certificate, or errored // out if one was missing. RTC_DCHECK(peer_cert_chain_ || !GetClientAuthEnabled()); @@ -856,28 +909,27 @@ int OpenSSLStreamAdapter::ContinueSSL() { break; case SSL_ERROR_WANT_READ: { - RTC_LOG(LS_VERBOSE) << " -- error want read"; + RTC_DLOG(LS_VERBOSE) << " -- error want read"; struct timeval timeout; if (DTLSv1_get_timeout(ssl_, &timeout)) { int delay = timeout.tv_sec * 1000 + timeout.tv_usec / 1000; - - Thread::Current()->PostDelayed(RTC_FROM_HERE, delay, this, MSG_TIMEOUT, - 0); + SetTimeout(delay); } } break; case SSL_ERROR_WANT_WRITE: - RTC_LOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_VERBOSE) << " -- error want write"; break; case SSL_ERROR_ZERO_RETURN: default: - RTC_LOG(LS_VERBOSE) << " -- error " << code; SSLHandshakeError ssl_handshake_err = SSLHandshakeError::UNKNOWN; int err_code = ERR_peek_last_error(); if (err_code != 0 && ERR_GET_REASON(err_code) == SSL_R_NO_SHARED_CIPHER) { ssl_handshake_err = SSLHandshakeError::INCOMPATIBLE_CIPHERSUITE; } + RTC_DLOG(LS_VERBOSE) << " -- error " << code << ", " << err_code << ", " + << ERR_GET_REASON(err_code); SignalSSLHandshakeError(ssl_handshake_err); return (ssl_error != 0) ? ssl_error : -1; } @@ -900,7 +952,7 @@ void OpenSSLStreamAdapter::Error(const char* context, } void OpenSSLStreamAdapter::Cleanup(uint8_t alert) { - RTC_LOG(LS_INFO) << "Cleanup"; + RTC_DLOG(LS_INFO) << "Cleanup"; if (state_ != SSL_ERROR) { state_ = SSL_CLOSED; @@ -938,18 +990,7 @@ void OpenSSLStreamAdapter::Cleanup(uint8_t alert) { peer_cert_chain_.reset(); // Clear the DTLS timer - Thread::Current()->Clear(this, MSG_TIMEOUT); -} - -void OpenSSLStreamAdapter::OnMessage(Message* msg) { - // Process our own messages and then pass others to the superclass - if (MSG_TIMEOUT == msg->message_id) { - RTC_LOG(LS_INFO) << "DTLS timeout expired"; - DTLSv1_handle_timeout(ssl_); - ContinueSSL(); - } else { - StreamInterface::OnMessage(msg); - } + timeout_task_.Stop(); } SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { @@ -959,25 +1000,34 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { return nullptr; } - // TODO(https://bugs.webrtc.org/10261): Evaluate and drop (D)TLS 1.0 and 1.1 - // support by default. - SSL_CTX_set_min_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_VERSION); - switch (ssl_max_version_) { - case SSL_PROTOCOL_TLS_10: - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_VERSION); - break; - case SSL_PROTOCOL_TLS_11: - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_1_VERSION); - break; - case SSL_PROTOCOL_TLS_12: - default: - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); - break; + if (support_legacy_tls_protocols_flag_) { + // TODO(https://bugs.webrtc.org/10261): Completely remove this branch in + // M84. + SSL_CTX_set_min_proto_version( + ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_VERSION); + switch (ssl_max_version_) { + case SSL_PROTOCOL_TLS_10: + SSL_CTX_set_max_proto_version( + ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_VERSION); + break; + case SSL_PROTOCOL_TLS_11: + SSL_CTX_set_max_proto_version( + ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_1_VERSION); + break; + case SSL_PROTOCOL_TLS_12: + default: + SSL_CTX_set_max_proto_version( + ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); + break; + } + } else { + // TODO(https://bugs.webrtc.org/10261): Make this the default in M84. + SSL_CTX_set_min_proto_version( + ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); + SSL_CTX_set_max_proto_version( + ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); } + #ifdef OPENSSL_IS_BORINGSSL // SSL_CTX_set_current_time_cb is only supported in BoringSSL. if (g_use_time_callback_for_testing) { @@ -1053,7 +1103,7 @@ bool OpenSSLStreamAdapter::VerifyPeerCertificate() { // Ignore any verification error if the digest matches, since there is no // value in checking the validity of a self-signed cert issued by untrusted // sources. - RTC_LOG(LS_INFO) << "Accepted peer certificate."; + RTC_DLOG(LS_INFO) << "Accepted peer certificate."; peer_certificate_verified_ = true; return true; } @@ -1088,7 +1138,7 @@ int OpenSSLStreamAdapter::SSLVerifyCallback(X509_STORE_CTX* store, void* arg) { // If the peer certificate digest isn't known yet, we'll wait to verify // until it's known, and for now just return a success status. if (stream->peer_certificate_digest_algorithm_.empty()) { - RTC_LOG(LS_INFO) << "Waiting to verify certificate until digest is known."; + RTC_DLOG(LS_INFO) << "Waiting to verify certificate until digest is known."; return 1; } diff --git a/rtc_base/openssl_stream_adapter.h b/rtc_base/openssl_stream_adapter.h index 67f0ab73bc..fbfccd6844 100644 --- a/rtc_base/openssl_stream_adapter.h +++ b/rtc_base/openssl_stream_adapter.h @@ -19,11 +19,15 @@ #include #include +#include "absl/types/optional.h" #include "rtc_base/buffer.h" #include "rtc_base/openssl_identity.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" namespace rtc { @@ -55,12 +59,19 @@ class SSLCertChain; /////////////////////////////////////////////////////////////////////////////// +// If |allow| has a value, its value determines if legacy TLS protocols are +// allowed, overriding the default configuration. +// If |allow| has no value, any previous override is removed and the default +// configuration is restored. +RTC_EXPORT void SetAllowLegacyTLSProtocols(const absl::optional& allow); + class OpenSSLStreamAdapter final : public SSLStreamAdapter { public: - explicit OpenSSLStreamAdapter(StreamInterface* stream); + explicit OpenSSLStreamAdapter(std::unique_ptr stream); ~OpenSSLStreamAdapter() override; - void SetIdentity(SSLIdentity* identity) override; + void SetIdentity(std::unique_ptr identity) override; + OpenSSLIdentity* GetIdentityForTesting() const override; // Default argument is for compatibility void SetServerRole(SSLRole role = SSL_SERVER) override; @@ -136,7 +147,8 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { SSL_CLOSED // Clean close }; - enum { MSG_TIMEOUT = MSG_MAX + 1 }; + void PostEvent(int events, int err); + void SetTimeout(int delay_ms); // The following three methods return 0 on success and a negative // error code on failure. The error code may be from OpenSSL or -1 @@ -160,9 +172,6 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { void Error(const char* context, int err, uint8_t alert, bool signal); void Cleanup(uint8_t alert); - // Override MessageHandler - void OnMessage(Message* msg) override; - // Flush the input buffers by reading left bytes (for DTLS) void FlushInput(unsigned int left); @@ -183,6 +192,10 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { !peer_certificate_digest_value_.empty(); } + rtc::Thread* const owner_; + webrtc::ScopedTaskSafety task_safety_; + webrtc::RepeatingTaskHandle timeout_task_; + SSLState state_; SSLRole role_; int ssl_error_code_; // valid when state_ == SSL_ERROR or SSL_CLOSED @@ -216,6 +229,9 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { // A 50-ms initial timeout ensures rapid setup on fast connections, but may // be too aggressive for low bandwidth links. int dtls_handshake_timeout_ms_ = 50; + + // TODO(https://bugs.webrtc.org/10261): Completely remove this option in M84. + const bool support_legacy_tls_protocols_flag_; }; ///////////////////////////////////////////////////////////////////////////// diff --git a/rtc_base/operations_chain.cc b/rtc_base/operations_chain.cc index 68ee20babc..f707d339b6 100644 --- a/rtc_base/operations_chain.cc +++ b/rtc_base/operations_chain.cc @@ -19,12 +19,14 @@ OperationsChain::CallbackHandle::CallbackHandle( : operations_chain_(std::move(operations_chain)) {} OperationsChain::CallbackHandle::~CallbackHandle() { +#if RTC_DCHECK_IS_ON RTC_DCHECK(has_run_); +#endif } void OperationsChain::CallbackHandle::OnOperationComplete() { +#if RTC_DCHECK_IS_ON RTC_DCHECK(!has_run_); -#ifdef RTC_DCHECK_IS_ON has_run_ = true; #endif // RTC_DCHECK_IS_ON operations_chain_->OnOperationComplete(); @@ -49,6 +51,17 @@ OperationsChain::~OperationsChain() { RTC_DCHECK(chained_operations_.empty()); } +void OperationsChain::SetOnChainEmptyCallback( + std::function on_chain_empty_callback) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + on_chain_empty_callback_ = std::move(on_chain_empty_callback); +} + +bool OperationsChain::IsEmpty() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return chained_operations_.empty(); +} + std::function OperationsChain::CreateOperationsChainCallback() { return [handle = rtc::scoped_refptr( new CallbackHandle(this))]() { handle->OnOperationComplete(); }; @@ -59,9 +72,12 @@ void OperationsChain::OnOperationComplete() { // The front element is the operation that just completed, remove it. RTC_DCHECK(!chained_operations_.empty()); chained_operations_.pop(); - // If there are any other operations chained, execute the next one. + // If there are any other operations chained, execute the next one. Otherwise, + // invoke the "on chain empty" callback if it has been set. if (!chained_operations_.empty()) { chained_operations_.front()->Run(); + } else if (on_chain_empty_callback_.has_value()) { + on_chain_empty_callback_.value()(); } } diff --git a/rtc_base/operations_chain.h b/rtc_base/operations_chain.h index b6ec46e04a..44a3d9acb8 100644 --- a/rtc_base/operations_chain.h +++ b/rtc_base/operations_chain.h @@ -18,6 +18,7 @@ #include #include +#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" @@ -49,11 +50,15 @@ class OperationWithFunctor final : public Operation { : functor_(std::forward(functor)), callback_(std::move(callback)) {} - ~OperationWithFunctor() override { RTC_DCHECK(has_run_); } + ~OperationWithFunctor() override { +#if RTC_DCHECK_IS_ON + RTC_DCHECK(has_run_); +#endif // RTC_DCHECK_IS_ON + } void Run() override { +#if RTC_DCHECK_IS_ON RTC_DCHECK(!has_run_); -#ifdef RTC_DCHECK_IS_ON has_run_ = true; #endif // RTC_DCHECK_IS_ON // The functor being executed may invoke the callback synchronously, @@ -69,7 +74,7 @@ class OperationWithFunctor final : public Operation { private: typename std::remove_reference::type functor_; std::function callback_; -#ifdef RTC_DCHECK_IS_ON +#if RTC_DCHECK_IS_ON bool has_run_ = false; #endif // RTC_DCHECK_IS_ON }; @@ -112,6 +117,9 @@ class OperationsChain final : public RefCountedObject { static scoped_refptr Create(); ~OperationsChain(); + void SetOnChainEmptyCallback(std::function on_chain_empty_callback); + bool IsEmpty() const; + // Chains an operation. Chained operations are executed in FIFO order. The // operation starts when |functor| is executed by the OperationsChain and is // contractually obligated to invoke the callback passed to it when the @@ -163,7 +171,7 @@ class OperationsChain final : public RefCountedObject { private: scoped_refptr operations_chain_; -#ifdef RTC_DCHECK_IS_ON +#if RTC_DCHECK_IS_ON bool has_run_ = false; #endif // RTC_DCHECK_IS_ON @@ -181,6 +189,8 @@ class OperationsChain final : public RefCountedObject { // to it. std::queue> chained_operations_ RTC_GUARDED_BY(sequence_checker_); + absl::optional> on_chain_empty_callback_ + RTC_GUARDED_BY(sequence_checker_); RTC_DISALLOW_COPY_AND_ASSIGN(OperationsChain); }; diff --git a/rtc_base/operations_chain_unittest.cc b/rtc_base/operations_chain_unittest.cc index 968f94c060..5f183e42cb 100644 --- a/rtc_base/operations_chain_unittest.cc +++ b/rtc_base/operations_chain_unittest.cc @@ -10,6 +10,7 @@ #include "rtc_base/operations_chain.h" +#include #include #include #include @@ -17,6 +18,7 @@ #include "rtc_base/bind.h" #include "rtc_base/event.h" +#include "rtc_base/gunit.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" @@ -25,6 +27,12 @@ namespace rtc { using ::testing::ElementsAre; +namespace { + +constexpr int kDefaultTimeout = 3000; + +} // namespace + class OperationTracker { public: OperationTracker() : background_thread_(Thread::Create()) { @@ -120,6 +128,31 @@ class OperationTrackerProxy { return event; } + void SetOnChainEmptyCallback(std::function on_chain_empty_callback) { + Event event; + operations_chain_thread_->PostTask( + RTC_FROM_HERE, + [this, &event, + on_chain_empty_callback = std::move(on_chain_empty_callback)]() { + operations_chain_->SetOnChainEmptyCallback( + std::move(on_chain_empty_callback)); + event.Set(); + }); + event.Wait(Event::kForever); + } + + bool IsEmpty() { + Event event; + bool is_empty = false; + operations_chain_thread_->PostTask( + RTC_FROM_HERE, [this, &event, &is_empty]() { + is_empty = operations_chain_->IsEmpty(); + event.Set(); + }); + event.Wait(Event::kForever); + return is_empty; + } + std::unique_ptr ReleaseOperationChain() { std::unique_ptr event = std::make_unique(); operations_chain_thread_->PostTask(RTC_FROM_HERE, @@ -326,6 +359,87 @@ TEST(OperationsChainTest, OperationsAreExecutedInOrder) { operation6_completed_event.get())); } +TEST(OperationsChainTest, IsEmpty) { + OperationTrackerProxy operation_tracker_proxy; + operation_tracker_proxy.Initialize()->Wait(Event::kForever); + + // The chain is initially empty. + EXPECT_TRUE(operation_tracker_proxy.IsEmpty()); + // Chain a single event. + Event unblock_async_operation_event0; + auto async_operation_completed_event0 = + operation_tracker_proxy.PostAsynchronousOperation( + &unblock_async_operation_event0); + // The chain is not empty while an event is pending. + EXPECT_FALSE(operation_tracker_proxy.IsEmpty()); + // Completing the operation empties the chain. + unblock_async_operation_event0.Set(); + async_operation_completed_event0->Wait(Event::kForever); + EXPECT_TRUE(operation_tracker_proxy.IsEmpty()); + + // Chain multiple events. + Event unblock_async_operation_event1; + auto async_operation_completed_event1 = + operation_tracker_proxy.PostAsynchronousOperation( + &unblock_async_operation_event1); + Event unblock_async_operation_event2; + auto async_operation_completed_event2 = + operation_tracker_proxy.PostAsynchronousOperation( + &unblock_async_operation_event2); + // Again, the chain is not empty while an event is pending. + EXPECT_FALSE(operation_tracker_proxy.IsEmpty()); + // Upon completing the first event, the chain is still not empty. + unblock_async_operation_event1.Set(); + async_operation_completed_event1->Wait(Event::kForever); + EXPECT_FALSE(operation_tracker_proxy.IsEmpty()); + // Completing the last evenet empties the chain. + unblock_async_operation_event2.Set(); + async_operation_completed_event2->Wait(Event::kForever); + EXPECT_TRUE(operation_tracker_proxy.IsEmpty()); +} + +TEST(OperationsChainTest, OnChainEmptyCallback) { + OperationTrackerProxy operation_tracker_proxy; + operation_tracker_proxy.Initialize()->Wait(Event::kForever); + + std::atomic on_empty_callback_counter(0u); + operation_tracker_proxy.SetOnChainEmptyCallback( + [&on_empty_callback_counter] { ++on_empty_callback_counter; }); + + // Chain a single event. + Event unblock_async_operation_event0; + auto async_operation_completed_event0 = + operation_tracker_proxy.PostAsynchronousOperation( + &unblock_async_operation_event0); + // The callback is not invoked until the operation has completed. + EXPECT_EQ(0u, on_empty_callback_counter); + // Completing the operation empties the chain, invoking the callback. + unblock_async_operation_event0.Set(); + async_operation_completed_event0->Wait(Event::kForever); + EXPECT_TRUE_WAIT(1u == on_empty_callback_counter, kDefaultTimeout); + + // Chain multiple events. + Event unblock_async_operation_event1; + auto async_operation_completed_event1 = + operation_tracker_proxy.PostAsynchronousOperation( + &unblock_async_operation_event1); + Event unblock_async_operation_event2; + auto async_operation_completed_event2 = + operation_tracker_proxy.PostAsynchronousOperation( + &unblock_async_operation_event2); + // Again, the callback is not invoked until the operation has completed. + EXPECT_TRUE_WAIT(1u == on_empty_callback_counter, kDefaultTimeout); + // Upon completing the first event, the chain is still not empty, so the + // callback must not be invoked yet. + unblock_async_operation_event1.Set(); + async_operation_completed_event1->Wait(Event::kForever); + EXPECT_TRUE_WAIT(1u == on_empty_callback_counter, kDefaultTimeout); + // Completing the last evenet empties the chain, invoking the callback. + unblock_async_operation_event2.Set(); + async_operation_completed_event2->Wait(Event::kForever); + EXPECT_TRUE_WAIT(2u == on_empty_callback_counter, kDefaultTimeout); +} + TEST(OperationsChainTest, SafeToReleaseReferenceToOperationChainWhileOperationIsPending) { OperationTrackerProxy operation_tracker_proxy; @@ -369,14 +483,15 @@ TEST(OperationsChainTest, FunctorIsNotDestroyedWhileExecuting) { #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(OperationsChainTest, OperationNotInvokingCallbackShouldCrash) { +TEST(OperationsChainDeathTest, OperationNotInvokingCallbackShouldCrash) { scoped_refptr operations_chain = OperationsChain::Create(); EXPECT_DEATH( operations_chain->ChainOperation([](std::function callback) {}), ""); } -TEST(OperationsChainTest, OperationInvokingCallbackMultipleTimesShouldCrash) { +TEST(OperationsChainDeathTest, + OperationInvokingCallbackMultipleTimesShouldCrash) { scoped_refptr operations_chain = OperationsChain::Create(); EXPECT_DEATH( operations_chain->ChainOperation([](std::function callback) { diff --git a/rtc_base/physical_socket_server.cc b/rtc_base/physical_socket_server.cc index bd6a324ca9..cf6e792795 100644 --- a/rtc_base/physical_socket_server.cc +++ b/rtc_base/physical_socket_server.cc @@ -24,7 +24,6 @@ // "poll" will be used to wait for the signal dispatcher. #include #endif -#include #include #include #include @@ -104,6 +103,20 @@ typedef char* SockOptArg; #endif #endif +namespace { +class ScopedSetTrue { + public: + ScopedSetTrue(bool* value) : value_(value) { + RTC_DCHECK(!*value_); + *value_ = true; + } + ~ScopedSetTrue() { *value_ = false; } + + private: + bool* value_; +}; +} // namespace + namespace rtc { std::unique_ptr SocketServer::CreateDefault() { @@ -140,6 +153,7 @@ bool PhysicalSocket::Create(int family, int type) { Close(); s_ = ::socket(family, type, 0); udp_ = (SOCK_DGRAM == type); + family_ = family; UpdateLastError(); if (udp_) { SetEnabledEvents(DE_READ | DE_WRITE); @@ -197,12 +211,10 @@ int PhysicalSocket::Bind(const SocketAddress& bind_addr) { if (bind_addr.IsLoopbackIP()) { // If we couldn't bind to a loopback IP (which should only happen in // test scenarios), continue on. This may be expected behavior. - RTC_LOG(LS_VERBOSE) << "Binding socket to loopback address " - << bind_addr.ipaddr().ToString() + RTC_LOG(LS_VERBOSE) << "Binding socket to loopback address" << " failed; result: " << static_cast(result); } else { - RTC_LOG(LS_WARNING) << "Binding socket to network address " - << bind_addr.ipaddr().ToString() + RTC_LOG(LS_WARNING) << "Binding socket to network address" << " failed; result: " << static_cast(result); // If a network binding was attempted and failed, we should stop here // and not try to use the socket. Otherwise, we may end up sending @@ -289,9 +301,17 @@ int PhysicalSocket::GetOption(Option opt, int* value) { return -1; socklen_t optlen = sizeof(*value); int ret = ::getsockopt(s_, slevel, sopt, (SockOptArg)value, &optlen); - if (ret != -1 && opt == OPT_DONTFRAGMENT) { + if (ret == -1) { + return -1; + } + if (opt == OPT_DONTFRAGMENT) { #if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) *value = (*value != IP_PMTUDISC_DONT) ? 1 : 0; +#endif + } else if (opt == OPT_DSCP) { +#if defined(WEBRTC_POSIX) + // unshift DSCP value to get six most significant bits of IP DiffServ field + *value >>= 2; #endif } return ret; @@ -305,8 +325,19 @@ int PhysicalSocket::SetOption(Option opt, int value) { if (opt == OPT_DONTFRAGMENT) { #if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) value = (value) ? IP_PMTUDISC_DO : IP_PMTUDISC_DONT; +#endif + } else if (opt == OPT_DSCP) { +#if defined(WEBRTC_POSIX) + // shift DSCP value to fit six most significant bits of IP DiffServ field + value <<= 2; #endif } +#if defined(WEBRTC_POSIX) + if (sopt == IPV6_TCLASS) { + // Set the IPv4 option in all cases to support dual-stack sockets. + ::setsockopt(s_, IPPROTO_IP, IP_TOS, (SockOptArg)&value, sizeof(value)); + } +#endif return ::setsockopt(s_, slevel, sopt, (SockOptArg)&value, sizeof(value)); } @@ -554,8 +585,19 @@ int PhysicalSocket::TranslateOption(Option opt, int* slevel, int* sopt) { *sopt = TCP_NODELAY; break; case OPT_DSCP: +#if defined(WEBRTC_POSIX) + if (family_ == AF_INET6) { + *slevel = IPPROTO_IPV6; + *sopt = IPV6_TCLASS; + } else { + *slevel = IPPROTO_IP; + *sopt = IP_TOS; + } + break; +#else RTC_LOG(LS_WARNING) << "Socket::OPT_DSCP not supported."; return -1; +#endif case OPT_RTP_SENDTIME_EXTN_ID: return -1; // No logging is necessary as this not a OS socket option. default: @@ -807,7 +849,7 @@ void SocketDispatcher::OnEvent(uint32_t ff, int err) { #if defined(WEBRTC_USE_EPOLL) -static int GetEpollEvents(uint32_t ff) { +inline static int GetEpollEvents(uint32_t ff) { int events = 0; if (ff & (DE_READ | DE_ACCEPT)) { events |= EPOLLIN; @@ -864,6 +906,14 @@ int SocketDispatcher::Close() { #if defined(WEBRTC_WIN) id_ = 0; signal_close_ = false; +#endif +#if defined(WEBRTC_USE_EPOLL) + // If we're batching events, the socket can be closed and reopened + // during the batch. Set saved_enabled_events_ to 0 here so the new + // socket, if any, has the correct old events bitfield + if (saved_enabled_events_ != -1) { + saved_enabled_events_ = 0; + } #endif ss_->Remove(this); return PhysicalSocket::Close(); @@ -919,182 +969,7 @@ class EventDispatcher : public Dispatcher { PhysicalSocketServer* ss_; int afd_[2]; bool fSignaled_; - CriticalSection crit_; -}; - -// These two classes use the self-pipe trick to deliver POSIX signals to our -// select loop. This is the only safe, reliable, cross-platform way to do -// non-trivial things with a POSIX signal in an event-driven program (until -// proper pselect() implementations become ubiquitous). - -class PosixSignalHandler { - public: - // POSIX only specifies 32 signals, but in principle the system might have - // more and the programmer might choose to use them, so we size our array - // for 128. - static const int kNumPosixSignals = 128; - - // There is just a single global instance. (Signal handlers do not get any - // sort of user-defined void * parameter, so they can't access anything that - // isn't global.) - static PosixSignalHandler* Instance() { - static PosixSignalHandler* const instance = new PosixSignalHandler(); - return instance; - } - - // Returns true if the given signal number is set. - bool IsSignalSet(int signum) const { - RTC_DCHECK(signum < static_cast(arraysize(received_signal_))); - if (signum < static_cast(arraysize(received_signal_))) { - return received_signal_[signum]; - } else { - return false; - } - } - - // Clears the given signal number. - void ClearSignal(int signum) { - RTC_DCHECK(signum < static_cast(arraysize(received_signal_))); - if (signum < static_cast(arraysize(received_signal_))) { - received_signal_[signum] = false; - } - } - - // Returns the file descriptor to monitor for signal events. - int GetDescriptor() const { return afd_[0]; } - - // This is called directly from our real signal handler, so it must be - // signal-handler-safe. That means it cannot assume anything about the - // user-level state of the process, since the handler could be executed at any - // time on any thread. - void OnPosixSignalReceived(int signum) { - if (signum >= static_cast(arraysize(received_signal_))) { - // We don't have space in our array for this. - return; - } - // Set a flag saying we've seen this signal. - received_signal_[signum] = true; - // Notify application code that we got a signal. - const uint8_t b[1] = {0}; - if (-1 == write(afd_[1], b, sizeof(b))) { - // Nothing we can do here. If there's an error somehow then there's - // nothing we can safely do from a signal handler. - // No, we can't even safely log it. - // But, we still have to check the return value here. Otherwise, - // GCC 4.4.1 complains ignoring return value. Even (void) doesn't help. - return; - } - } - - private: - PosixSignalHandler() { - if (pipe(afd_) < 0) { - RTC_LOG_ERR(LS_ERROR) << "pipe failed"; - return; - } - if (fcntl(afd_[0], F_SETFL, O_NONBLOCK) < 0) { - RTC_LOG_ERR(LS_WARNING) << "fcntl #1 failed"; - } - if (fcntl(afd_[1], F_SETFL, O_NONBLOCK) < 0) { - RTC_LOG_ERR(LS_WARNING) << "fcntl #2 failed"; - } - memset(const_cast(static_cast(received_signal_)), 0, - sizeof(received_signal_)); - } - - ~PosixSignalHandler() { - int fd1 = afd_[0]; - int fd2 = afd_[1]; - // We clobber the stored file descriptor numbers here or else in principle - // a signal that happens to be delivered during application termination - // could erroneously write a zero byte to an unrelated file handle in - // OnPosixSignalReceived() if some other file happens to be opened later - // during shutdown and happens to be given the same file descriptor number - // as our pipe had. Unfortunately even with this precaution there is still a - // race where that could occur if said signal happens to be handled - // concurrently with this code and happens to have already read the value of - // afd_[1] from memory before we clobber it, but that's unlikely. - afd_[0] = -1; - afd_[1] = -1; - close(fd1); - close(fd2); - } - - int afd_[2]; - // These are boolean flags that will be set in our signal handler and read - // and cleared from Wait(). There is a race involved in this, but it is - // benign. The signal handler sets the flag before signaling the pipe, so - // we'll never end up blocking in select() while a flag is still true. - // However, if two of the same signal arrive close to each other then it's - // possible that the second time the handler may set the flag while it's still - // true, meaning that signal will be missed. But the first occurrence of it - // will still be handled, so this isn't a problem. - // Volatile is not necessary here for correctness, but this data _is_ volatile - // so I've marked it as such. - volatile uint8_t received_signal_[kNumPosixSignals]; -}; - -class PosixSignalDispatcher : public Dispatcher { - public: - PosixSignalDispatcher(PhysicalSocketServer* owner) : owner_(owner) { - owner_->Add(this); - } - - ~PosixSignalDispatcher() override { owner_->Remove(this); } - - uint32_t GetRequestedEvents() override { return DE_READ; } - - void OnPreEvent(uint32_t ff) override { - // Events might get grouped if signals come very fast, so we read out up to - // 16 bytes to make sure we keep the pipe empty. - uint8_t b[16]; - ssize_t ret = read(GetDescriptor(), b, sizeof(b)); - if (ret < 0) { - RTC_LOG_ERR(LS_WARNING) << "Error in read()"; - } else if (ret == 0) { - RTC_LOG(LS_WARNING) << "Should have read at least one byte"; - } - } - - void OnEvent(uint32_t ff, int err) override { - for (int signum = 0; signum < PosixSignalHandler::kNumPosixSignals; - ++signum) { - if (PosixSignalHandler::Instance()->IsSignalSet(signum)) { - PosixSignalHandler::Instance()->ClearSignal(signum); - HandlerMap::iterator i = handlers_.find(signum); - if (i == handlers_.end()) { - // This can happen if a signal is delivered to our process at around - // the same time as we unset our handler for it. It is not an error - // condition, but it's unusual enough to be worth logging. - RTC_LOG(LS_INFO) << "Received signal with no handler: " << signum; - } else { - // Otherwise, execute our handler. - (*i->second)(signum); - } - } - } - } - - int GetDescriptor() override { - return PosixSignalHandler::Instance()->GetDescriptor(); - } - - bool IsDescriptorClosed() override { return false; } - - void SetHandler(int signum, void (*handler)(int)) { - handlers_[signum] = handler; - } - - void ClearHandler(int signum) { handlers_.erase(signum); } - - bool HasHandlers() { return !handlers_.empty(); } - - private: - typedef std::map HandlerMap; - - HandlerMap handlers_; - // Our owner. - PhysicalSocketServer* owner_; + RecursiveCriticalSection crit_; }; #endif // WEBRTC_POSIX @@ -1168,30 +1043,31 @@ class Signaler : public EventDispatcher { bool* pf_; }; -PhysicalSocketServer::PhysicalSocketServer() : fWait_(false) { +PhysicalSocketServer::PhysicalSocketServer() + : +#if defined(WEBRTC_USE_EPOLL) + // Since Linux 2.6.8, the size argument is ignored, but must be greater + // than zero. Before that the size served as hint to the kernel for the + // amount of space to initially allocate in internal data structures. + epoll_fd_(epoll_create(FD_SETSIZE)), +#endif +#if defined(WEBRTC_WIN) + socket_ev_(WSACreateEvent()), +#endif + fWait_(false) { #if defined(WEBRTC_USE_EPOLL) - // Since Linux 2.6.8, the size argument is ignored, but must be greater than - // zero. Before that the size served as hint to the kernel for the amount of - // space to initially allocate in internal data structures. - epoll_fd_ = epoll_create(FD_SETSIZE); if (epoll_fd_ == -1) { // Not an error, will fall back to "select" below. RTC_LOG_E(LS_WARNING, EN, errno) << "epoll_create"; - epoll_fd_ = INVALID_SOCKET; + // Note that -1 == INVALID_SOCKET, the alias used by later checks. } #endif signal_wakeup_ = new Signaler(this, &fWait_); -#if defined(WEBRTC_WIN) - socket_ev_ = WSACreateEvent(); -#endif } PhysicalSocketServer::~PhysicalSocketServer() { #if defined(WEBRTC_WIN) WSACloseEvent(socket_ev_); -#endif -#if defined(WEBRTC_POSIX) - signal_dispatcher_.reset(); #endif delete signal_wakeup_; #if defined(WEBRTC_USE_EPOLL) @@ -1199,7 +1075,8 @@ PhysicalSocketServer::~PhysicalSocketServer() { close(epoll_fd_); } #endif - RTC_DCHECK(dispatchers_.empty()); + RTC_DCHECK(dispatcher_by_key_.empty()); + RTC_DCHECK(key_by_dispatcher_.empty()); } void PhysicalSocketServer::WakeUp() { @@ -1238,45 +1115,32 @@ AsyncSocket* PhysicalSocketServer::WrapSocket(SOCKET s) { void PhysicalSocketServer::Add(Dispatcher* pdispatcher) { CritScope cs(&crit_); - if (processing_dispatchers_) { - // A dispatcher is being added while a "Wait" call is processing the - // list of socket events. - // Defer adding to "dispatchers_" set until processing is done to avoid - // invalidating the iterator in "Wait". - pending_remove_dispatchers_.erase(pdispatcher); - pending_add_dispatchers_.insert(pdispatcher); - } else { - dispatchers_.insert(pdispatcher); + if (key_by_dispatcher_.count(pdispatcher)) { + RTC_LOG(LS_WARNING) + << "PhysicalSocketServer asked to add a duplicate dispatcher."; + return; } + uint64_t key = next_dispatcher_key_++; + dispatcher_by_key_.emplace(key, pdispatcher); + key_by_dispatcher_.emplace(pdispatcher, key); #if defined(WEBRTC_USE_EPOLL) if (epoll_fd_ != INVALID_SOCKET) { - AddEpoll(pdispatcher); + AddEpoll(pdispatcher, key); } #endif // WEBRTC_USE_EPOLL } void PhysicalSocketServer::Remove(Dispatcher* pdispatcher) { CritScope cs(&crit_); - if (processing_dispatchers_) { - // A dispatcher is being removed while a "Wait" call is processing the - // list of socket events. - // Defer removal from "dispatchers_" set until processing is done to avoid - // invalidating the iterator in "Wait". - if (!pending_add_dispatchers_.erase(pdispatcher) && - dispatchers_.find(pdispatcher) == dispatchers_.end()) { - RTC_LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown " - "dispatcher, potentially from a duplicate call to " - "Add."; - return; - } - - pending_remove_dispatchers_.insert(pdispatcher); - } else if (!dispatchers_.erase(pdispatcher)) { + if (!key_by_dispatcher_.count(pdispatcher)) { RTC_LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown " "dispatcher, potentially from a duplicate call to Add."; return; } + uint64_t key = key_by_dispatcher_.at(pdispatcher); + key_by_dispatcher_.erase(pdispatcher); + dispatcher_by_key_.erase(key); #if defined(WEBRTC_USE_EPOLL) if (epoll_fd_ != INVALID_SOCKET) { RemoveEpoll(pdispatcher); @@ -1290,34 +1154,22 @@ void PhysicalSocketServer::Update(Dispatcher* pdispatcher) { return; } + // Don't update dispatchers that haven't yet been added. CritScope cs(&crit_); - if (dispatchers_.find(pdispatcher) == dispatchers_.end()) { + if (!key_by_dispatcher_.count(pdispatcher)) { return; } - UpdateEpoll(pdispatcher); + UpdateEpoll(pdispatcher, key_by_dispatcher_.at(pdispatcher)); #endif } -void PhysicalSocketServer::AddRemovePendingDispatchers() { - if (!pending_add_dispatchers_.empty()) { - for (Dispatcher* pdispatcher : pending_add_dispatchers_) { - dispatchers_.insert(pdispatcher); - } - pending_add_dispatchers_.clear(); - } - - if (!pending_remove_dispatchers_.empty()) { - for (Dispatcher* pdispatcher : pending_remove_dispatchers_) { - dispatchers_.erase(pdispatcher); - } - pending_remove_dispatchers_.clear(); - } -} - #if defined(WEBRTC_POSIX) bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { + // We don't support reentrant waiting. + RTC_DCHECK(!waiting_); + ScopedSetTrue s(&waiting_); #if defined(WEBRTC_USE_EPOLL) // We don't keep a dedicated "epoll" descriptor containing only the non-IO // (i.e. signaling) dispatcher, so "poll" will be used instead of the default @@ -1343,6 +1195,9 @@ static void ProcessEvents(Dispatcher* dispatcher, &len); } + // Most often the socket is writable or readable or both, so make a single + // virtual call to get requested events + const uint32_t requested_events = dispatcher->GetRequestedEvents(); uint32_t ff = 0; // Check readable descriptors. If we're waiting on an accept, signal @@ -1350,7 +1205,7 @@ static void ProcessEvents(Dispatcher* dispatcher, // readable or really closed. // TODO(pthatcher): Only peek at TCP descriptors. if (readable) { - if (dispatcher->GetRequestedEvents() & DE_ACCEPT) { + if (requested_events & DE_ACCEPT) { ff |= DE_ACCEPT; } else if (errcode || dispatcher->IsDescriptorClosed()) { ff |= DE_CLOSE; @@ -1362,7 +1217,7 @@ static void ProcessEvents(Dispatcher* dispatcher, // Check writable descriptors. If we're waiting on a connect, detect // success versus failure by the reaped error code. if (writable) { - if (dispatcher->GetRequestedEvents() & DE_CONNECT) { + if (requested_events & DE_CONNECT) { if (!errcode) { ff |= DE_CONNECT; } else { @@ -1396,13 +1251,9 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { stop_us = rtc::TimeMicros() + cmsWait * 1000; } - // Zero all fd_sets. Don't need to do this inside the loop since - // select() zeros the descriptors not signaled fd_set fdsRead; - FD_ZERO(&fdsRead); fd_set fdsWrite; - FD_ZERO(&fdsWrite); // Explicitly unpoison these FDs on MemorySanitizer which doesn't handle the // inline assembly in FD_ZERO. // http://crbug.com/344505 @@ -1414,16 +1265,22 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { fWait_ = true; while (fWait_) { + // Zero all fd_sets. Although select() zeros the descriptors not signaled, + // we may need to do this for dispatchers that were deleted while + // iterating. + FD_ZERO(&fdsRead); + FD_ZERO(&fdsWrite); int fdmax = -1; { CritScope cr(&crit_); - // TODO(jbauch): Support re-entrant waiting. - RTC_DCHECK(!processing_dispatchers_); - for (Dispatcher* pdispatcher : dispatchers_) { + current_dispatcher_keys_.clear(); + for (auto const& kv : dispatcher_by_key_) { + uint64_t key = kv.first; + Dispatcher* pdispatcher = kv.second; // Query dispatchers for read and write wait state - RTC_DCHECK(pdispatcher); if (!process_io && (pdispatcher != signal_wakeup_)) continue; + current_dispatcher_keys_.push_back(key); int fd = pdispatcher->GetDescriptor(); // "select"ing a file descriptor that is equal to or larger than // FD_SETSIZE will result in undefined behavior. @@ -1461,8 +1318,14 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { } else { // We have signaled descriptors CritScope cr(&crit_); - processing_dispatchers_ = true; - for (Dispatcher* pdispatcher : dispatchers_) { + // Iterate only on the dispatchers whose sockets were passed into + // WSAEventSelect; this avoids the ABA problem (a socket being + // destroyed and a new one created with the same file descriptor). + for (uint64_t key : current_dispatcher_keys_) { + if (!dispatcher_by_key_.count(key)) + continue; + Dispatcher* pdispatcher = dispatcher_by_key_.at(key); + int fd = pdispatcher->GetDescriptor(); bool readable = FD_ISSET(fd, &fdsRead); @@ -1478,11 +1341,6 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { // The error code can be signaled through reads or writes. ProcessEvents(pdispatcher, readable, writable, readable || writable); } - - processing_dispatchers_ = false; - // Process deferred dispatchers that have been added/removed while the - // events were handled above. - AddRemovePendingDispatchers(); } // Recalc the time remaining to wait. Doing it here means it doesn't get @@ -1503,13 +1361,7 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { #if defined(WEBRTC_USE_EPOLL) -// Initial number of events to process with one call to "epoll_wait". -static const size_t kInitialEpollEvents = 128; - -// Maximum number of events to process with one call to "epoll_wait". -static const size_t kMaxEpollEvents = 8192; - -void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher) { +void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher, uint64_t key) { RTC_DCHECK(epoll_fd_ != INVALID_SOCKET); int fd = pdispatcher->GetDescriptor(); RTC_DCHECK(fd != INVALID_SOCKET); @@ -1519,7 +1371,7 @@ void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher) { struct epoll_event event = {0}; event.events = GetEpollEvents(pdispatcher->GetRequestedEvents()); - event.data.ptr = pdispatcher; + event.data.u64 = key; int err = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, fd, &event); RTC_DCHECK_EQ(err, 0); if (err == -1) { @@ -1548,7 +1400,7 @@ void PhysicalSocketServer::RemoveEpoll(Dispatcher* pdispatcher) { } } -void PhysicalSocketServer::UpdateEpoll(Dispatcher* pdispatcher) { +void PhysicalSocketServer::UpdateEpoll(Dispatcher* pdispatcher, uint64_t key) { RTC_DCHECK(epoll_fd_ != INVALID_SOCKET); int fd = pdispatcher->GetDescriptor(); RTC_DCHECK(fd != INVALID_SOCKET); @@ -1558,7 +1410,7 @@ void PhysicalSocketServer::UpdateEpoll(Dispatcher* pdispatcher) { struct epoll_event event = {0}; event.events = GetEpollEvents(pdispatcher->GetRequestedEvents()); - event.data.ptr = pdispatcher; + event.data.u64 = key; int err = epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, fd, &event); RTC_DCHECK_EQ(err, 0); if (err == -1) { @@ -1575,20 +1427,13 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { tvStop = TimeAfter(cmsWait); } - if (epoll_events_.empty()) { - // The initial space to receive events is created only if epoll is used. - epoll_events_.resize(kInitialEpollEvents); - } - fWait_ = true; - while (fWait_) { // Wait then call handlers as appropriate // < 0 means error // 0 means timeout // > 0 means count of descriptors ready - int n = epoll_wait(epoll_fd_, &epoll_events_[0], - static_cast(epoll_events_.size()), + int n = epoll_wait(epoll_fd_, epoll_events_.data(), epoll_events_.size(), static_cast(tvWait)); if (n < 0) { if (errno != EINTR) { @@ -1607,11 +1452,12 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { CritScope cr(&crit_); for (int i = 0; i < n; ++i) { const epoll_event& event = epoll_events_[i]; - Dispatcher* pdispatcher = static_cast(event.data.ptr); - if (dispatchers_.find(pdispatcher) == dispatchers_.end()) { + uint64_t key = event.data.u64; + if (!dispatcher_by_key_.count(key)) { // The dispatcher for this socket no longer exists. continue; } + Dispatcher* pdispatcher = dispatcher_by_key_.at(key); bool readable = (event.events & (EPOLLIN | EPOLLPRI)); bool writable = (event.events & EPOLLOUT); @@ -1621,16 +1467,9 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) { } } - if (static_cast(n) == epoll_events_.size() && - epoll_events_.size() < kMaxEpollEvents) { - // We used the complete space to receive events, increase size for future - // iterations. - epoll_events_.resize(std::max(epoll_events_.size() * 2, kMaxEpollEvents)); - } - if (cmsWait != kForever) { tvWait = TimeDiff(tvStop, TimeMillis()); - if (tvWait < 0) { + if (tvWait <= 0) { // Return success on timeout. return true; } @@ -1709,66 +1548,14 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, Dispatcher* dispatcher) { #endif // WEBRTC_USE_EPOLL -static void GlobalSignalHandler(int signum) { - PosixSignalHandler::Instance()->OnPosixSignalReceived(signum); -} - -bool PhysicalSocketServer::SetPosixSignalHandler(int signum, - void (*handler)(int)) { - // If handler is SIG_IGN or SIG_DFL then clear our user-level handler, - // otherwise set one. - if (handler == SIG_IGN || handler == SIG_DFL) { - if (!InstallSignal(signum, handler)) { - return false; - } - if (signal_dispatcher_) { - signal_dispatcher_->ClearHandler(signum); - if (!signal_dispatcher_->HasHandlers()) { - signal_dispatcher_.reset(); - } - } - } else { - if (!signal_dispatcher_) { - signal_dispatcher_.reset(new PosixSignalDispatcher(this)); - } - signal_dispatcher_->SetHandler(signum, handler); - if (!InstallSignal(signum, &GlobalSignalHandler)) { - return false; - } - } - return true; -} - -Dispatcher* PhysicalSocketServer::signal_dispatcher() { - return signal_dispatcher_.get(); -} - -bool PhysicalSocketServer::InstallSignal(int signum, void (*handler)(int)) { - struct sigaction act; - // It doesn't really matter what we set this mask to. - if (sigemptyset(&act.sa_mask) != 0) { - RTC_LOG_ERR(LS_ERROR) << "Couldn't set mask"; - return false; - } - act.sa_handler = handler; -#if !defined(__native_client__) - // Use SA_RESTART so that our syscalls don't get EINTR, since we don't need it - // and it's a nuisance. Though some syscalls still return EINTR and there's no - // real standard for which ones. :( - act.sa_flags = SA_RESTART; -#else - act.sa_flags = 0; -#endif - if (sigaction(signum, &act, nullptr) != 0) { - RTC_LOG_ERR(LS_ERROR) << "Couldn't set sigaction"; - return false; - } - return true; -} #endif // WEBRTC_POSIX #if defined(WEBRTC_WIN) bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { + // We don't support reentrant waiting. + RTC_DCHECK(!waiting_); + ScopedSetTrue s(&waiting_); + int64_t cmsTotal = cmsWait; int64_t cmsElapsed = 0; int64_t msStart = Time(); @@ -1776,37 +1563,40 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { fWait_ = true; while (fWait_) { std::vector events; - std::vector event_owners; + std::vector event_owners; events.push_back(socket_ev_); { CritScope cr(&crit_); - // TODO(jbauch): Support re-entrant waiting. - RTC_DCHECK(!processing_dispatchers_); - - // Calling "CheckSignalClose" might remove a closed dispatcher from the - // set. This must be deferred to prevent invalidating the iterator. - processing_dispatchers_ = true; - for (Dispatcher* disp : dispatchers_) { + // Get a snapshot of all current dispatchers; this is used to avoid the + // ABA problem (see later comment) and avoids the dispatcher_by_key_ + // iterator being invalidated by calling CheckSignalClose, which may + // remove the dispatcher from the list. + current_dispatcher_keys_.clear(); + for (auto const& kv : dispatcher_by_key_) { + current_dispatcher_keys_.push_back(kv.first); + } + for (uint64_t key : current_dispatcher_keys_) { + if (!dispatcher_by_key_.count(key)) { + continue; + } + Dispatcher* disp = dispatcher_by_key_.at(key); + if (!disp) + continue; if (!process_io && (disp != signal_wakeup_)) continue; SOCKET s = disp->GetSocket(); if (disp->CheckSignalClose()) { - // We just signalled close, don't poll this socket + // We just signalled close, don't poll this socket. } else if (s != INVALID_SOCKET) { WSAEventSelect(s, events[0], FlagsToEvents(disp->GetRequestedEvents())); } else { events.push_back(disp->GetWSAEvent()); - event_owners.push_back(disp); + event_owners.push_back(key); } } - - processing_dispatchers_ = false; - // Process deferred dispatchers that have been added/removed while the - // events were handled above. - AddRemovePendingDispatchers(); } // Which is shorter, the delay wait or the asked wait? @@ -1838,15 +1628,23 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { int index = dw - WSA_WAIT_EVENT_0; if (index > 0) { --index; // The first event is the socket event - Dispatcher* disp = event_owners[index]; - // The dispatcher could have been removed while waiting for events. - if (dispatchers_.find(disp) != dispatchers_.end()) { - disp->OnPreEvent(0); - disp->OnEvent(0, 0); + uint64_t key = event_owners[index]; + if (!dispatcher_by_key_.count(key)) { + // The dispatcher could have been removed while waiting for events. + continue; } + Dispatcher* disp = dispatcher_by_key_.at(key); + disp->OnPreEvent(0); + disp->OnEvent(0, 0); } else if (process_io) { - processing_dispatchers_ = true; - for (Dispatcher* disp : dispatchers_) { + // Iterate only on the dispatchers whose sockets were passed into + // WSAEventSelect; this avoids the ABA problem (a socket being + // destroyed and a new one created with the same SOCKET handle). + for (uint64_t key : current_dispatcher_keys_) { + if (!dispatcher_by_key_.count(key)) { + continue; + } + Dispatcher* disp = dispatcher_by_key_.at(key); SOCKET s = disp->GetSocket(); if (s == INVALID_SOCKET) continue; @@ -1912,11 +1710,6 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { } } } - - processing_dispatchers_ = false; - // Process deferred dispatchers that have been added/removed while the - // events were handled above. - AddRemovePendingDispatchers(); } // Reset the network event until new activity occurs diff --git a/rtc_base/physical_socket_server.h b/rtc_base/physical_socket_server.h index e85b2b0f1e..cc21a67b1a 100644 --- a/rtc_base/physical_socket_server.h +++ b/rtc_base/physical_socket_server.h @@ -16,14 +16,16 @@ #define WEBRTC_USE_EPOLL 1 #endif +#include #include -#include +#include #include -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/net_helpers.h" #include "rtc_base/socket_server.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" #if defined(WEBRTC_POSIX) typedef int SOCKET; @@ -41,9 +43,6 @@ enum DispatcherEvent { }; class Signaler; -#if defined(WEBRTC_POSIX) -class PosixSignalDispatcher; -#endif class Dispatcher { public: @@ -82,54 +81,52 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { void Remove(Dispatcher* dispatcher); void Update(Dispatcher* dispatcher); -#if defined(WEBRTC_POSIX) - // Sets the function to be executed in response to the specified POSIX signal. - // The function is executed from inside Wait() using the "self-pipe trick"-- - // regardless of which thread receives the signal--and hence can safely - // manipulate user-level data structures. - // "handler" may be SIG_IGN, SIG_DFL, or a user-specified function, just like - // with signal(2). - // Only one PhysicalSocketServer should have user-level signal handlers. - // Dispatching signals on multiple PhysicalSocketServers is not reliable. - // The signal mask is not modified. It is the caller's responsibily to - // maintain it as desired. - virtual bool SetPosixSignalHandler(int signum, void (*handler)(int)); - - protected: - Dispatcher* signal_dispatcher(); -#endif - private: - typedef std::set DispatcherSet; - - void AddRemovePendingDispatchers(); + // The number of events to process with one call to "epoll_wait". + static constexpr size_t kNumEpollEvents = 128; #if defined(WEBRTC_POSIX) bool WaitSelect(int cms, bool process_io); - static bool InstallSignal(int signum, void (*handler)(int)); - - std::unique_ptr signal_dispatcher_; #endif // WEBRTC_POSIX #if defined(WEBRTC_USE_EPOLL) - void AddEpoll(Dispatcher* dispatcher); + void AddEpoll(Dispatcher* dispatcher, uint64_t key); void RemoveEpoll(Dispatcher* dispatcher); - void UpdateEpoll(Dispatcher* dispatcher); + void UpdateEpoll(Dispatcher* dispatcher, uint64_t key); bool WaitEpoll(int cms); bool WaitPoll(int cms, Dispatcher* dispatcher); - int epoll_fd_ = INVALID_SOCKET; - std::vector epoll_events_; + // This array is accessed in isolation by a thread calling into Wait(). + // It's useless to use a SequenceChecker to guard it because a socket + // server can outlive the thread it's bound to, forcing the Wait call + // to have to reset the sequence checker on Wait calls. + std::array epoll_events_; + const int epoll_fd_ = INVALID_SOCKET; #endif // WEBRTC_USE_EPOLL - DispatcherSet dispatchers_; - DispatcherSet pending_add_dispatchers_; - DispatcherSet pending_remove_dispatchers_; - bool processing_dispatchers_ = false; - Signaler* signal_wakeup_; - CriticalSection crit_; - bool fWait_; + // uint64_t keys are used to uniquely identify a dispatcher in order to avoid + // the ABA problem during the epoll loop (a dispatcher being destroyed and + // replaced by one with the same address). + uint64_t next_dispatcher_key_ RTC_GUARDED_BY(crit_) = 0; + std::unordered_map dispatcher_by_key_ + RTC_GUARDED_BY(crit_); + // Reverse lookup necessary for removals/updates. + std::unordered_map key_by_dispatcher_ + RTC_GUARDED_BY(crit_); + // A list of dispatcher keys that we're interested in for the current + // select() or WSAWaitForMultipleEvents() loop. Again, used to avoid the ABA + // problem (a socket being destroyed and a new one created with the same + // handle, erroneously receiving the events from the destroyed socket). + // + // Kept as a member variable just for efficiency. + std::vector current_dispatcher_keys_; + Signaler* signal_wakeup_; // Assigned in constructor only + RecursiveCriticalSection crit_; #if defined(WEBRTC_WIN) - WSAEVENT socket_ev_; + const WSAEVENT socket_ev_; #endif + bool fWait_; + // Are we currently in a select()/epoll()/WSAWaitForMultipleEvents loop? + // Used for a DCHECK, because we don't support reentrant waiting. + bool waiting_ = false; }; class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> { @@ -199,12 +196,13 @@ class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> { virtual void EnableEvents(uint8_t events); virtual void DisableEvents(uint8_t events); - static int TranslateOption(Option opt, int* slevel, int* sopt); + int TranslateOption(Option opt, int* slevel, int* sopt); PhysicalSocketServer* ss_; SOCKET s_; bool udp_; - CriticalSection crit_; + int family_ = 0; + RecursiveCriticalSection crit_; int error_ RTC_GUARDED_BY(crit_); ConnState state_; AsyncResolver* resolver_; diff --git a/rtc_base/physical_socket_server_unittest.cc b/rtc_base/physical_socket_server_unittest.cc index 5083ca1791..648f39701a 100644 --- a/rtc_base/physical_socket_server_unittest.cc +++ b/rtc_base/physical_socket_server_unittest.cc @@ -381,6 +381,15 @@ TEST_F(PhysicalSocketTest, TestCloseInClosedCallbackIPv6) { SocketTest::TestCloseInClosedCallbackIPv6(); } +TEST_F(PhysicalSocketTest, TestDeleteInReadCallbackIPv4) { + MAYBE_SKIP_IPV4; + SocketTest::TestDeleteInReadCallbackIPv4(); +} + +TEST_F(PhysicalSocketTest, TestDeleteInReadCallbackIPv6) { + SocketTest::TestDeleteInReadCallbackIPv6(); +} + TEST_F(PhysicalSocketTest, TestSocketServerWaitIPv4) { MAYBE_SKIP_IPV4; SocketTest::TestSocketServerWaitIPv4(); @@ -501,139 +510,6 @@ TEST_F(PhysicalSocketTest, server_->set_network_binder(nullptr); } -class PosixSignalDeliveryTest : public ::testing::Test { - public: - static void RecordSignal(int signum) { - signals_received_.push_back(signum); - signaled_thread_ = Thread::Current(); - } - - protected: - void SetUp() override { ss_.reset(new PhysicalSocketServer()); } - - void TearDown() override { - ss_.reset(nullptr); - signals_received_.clear(); - signaled_thread_ = nullptr; - } - - bool ExpectSignal(int signum) { - if (signals_received_.empty()) { - RTC_LOG(LS_ERROR) << "ExpectSignal(): No signal received"; - return false; - } - if (signals_received_[0] != signum) { - RTC_LOG(LS_ERROR) << "ExpectSignal(): Received signal " - << signals_received_[0] << ", expected " << signum; - return false; - } - signals_received_.erase(signals_received_.begin()); - return true; - } - - bool ExpectNone() { - bool ret = signals_received_.empty(); - if (!ret) { - RTC_LOG(LS_ERROR) << "ExpectNone(): Received signal " - << signals_received_[0] << ", expected none"; - } - return ret; - } - - static std::vector signals_received_; - static Thread* signaled_thread_; - - std::unique_ptr ss_; -}; - -std::vector PosixSignalDeliveryTest::signals_received_; -Thread* PosixSignalDeliveryTest::signaled_thread_ = nullptr; - -// Test receiving a synchronous signal while not in Wait() and then entering -// Wait() afterwards. -// TODO(webrtc:7864): Fails on real iOS devices -#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM_FAMILY) -#define MAYBE_RaiseThenWait DISABLED_RaiseThenWait -#else -#define MAYBE_RaiseThenWait RaiseThenWait -#endif -TEST_F(PosixSignalDeliveryTest, MAYBE_RaiseThenWait) { - ASSERT_TRUE(ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal)); - raise(SIGTERM); - EXPECT_TRUE(ss_->Wait(0, true)); - EXPECT_TRUE(ExpectSignal(SIGTERM)); - EXPECT_TRUE(ExpectNone()); -} - -// Test that we can handle getting tons of repeated signals and that we see all -// the different ones. -// TODO(webrtc:7864): Fails on real iOS devices -#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM_FAMILY) -#define MAYBE_InsanelyManySignals DISABLED_InsanelyManySignals -#else -#define MAYBE_InsanelyManySignals InsanelyManySignals -#endif -TEST_F(PosixSignalDeliveryTest, MAYBE_InsanelyManySignals) { - ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal); - ss_->SetPosixSignalHandler(SIGINT, &RecordSignal); - for (int i = 0; i < 10000; ++i) { - raise(SIGTERM); - } - raise(SIGINT); - EXPECT_TRUE(ss_->Wait(0, true)); - // Order will be lowest signal numbers first. - EXPECT_TRUE(ExpectSignal(SIGINT)); - EXPECT_TRUE(ExpectSignal(SIGTERM)); - EXPECT_TRUE(ExpectNone()); -} - -// Test that a signal during a Wait() call is detected. -TEST_F(PosixSignalDeliveryTest, SignalDuringWait) { - ss_->SetPosixSignalHandler(SIGALRM, &RecordSignal); - alarm(1); - EXPECT_TRUE(ss_->Wait(1500, true)); - EXPECT_TRUE(ExpectSignal(SIGALRM)); - EXPECT_TRUE(ExpectNone()); -} - -// Test that it works no matter what thread the kernel chooses to give the -// signal to (since it's not guaranteed to be the one that Wait() runs on). -// TODO(webrtc:7864): Fails on real iOS devices -#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM_FAMILY) -#define MAYBE_SignalOnDifferentThread DISABLED_SignalOnDifferentThread -#else -#define MAYBE_SignalOnDifferentThread SignalOnDifferentThread -#endif -TEST_F(PosixSignalDeliveryTest, DISABLED_SignalOnDifferentThread) { - ss_->SetPosixSignalHandler(SIGTERM, &RecordSignal); - // Mask out SIGTERM so that it can't be delivered to this thread. - sigset_t mask; - sigemptyset(&mask); - sigaddset(&mask, SIGTERM); - EXPECT_EQ(0, pthread_sigmask(SIG_SETMASK, &mask, nullptr)); - // Start a new thread that raises it. It will have to be delivered to that - // thread. Our implementation should safely handle it and dispatch - // RecordSignal() on this thread. - std::unique_ptr thread(Thread::CreateWithSocketServer()); - thread->Start(); - thread->PostTask(RTC_FROM_HERE, [&thread]() { - thread->socketserver()->Wait(1000, false); - // Allow SIGTERM. This will be the only thread with it not masked so it will - // be delivered to us. - sigset_t mask; - sigemptyset(&mask); - pthread_sigmask(SIG_SETMASK, &mask, nullptr); - - // Raise it. - raise(SIGTERM); - }); - - EXPECT_TRUE(ss_->Wait(1500, true)); - EXPECT_TRUE(ExpectSignal(SIGTERM)); - EXPECT_EQ(Thread::Current(), signaled_thread_); - EXPECT_TRUE(ExpectNone()); -} - #endif } // namespace rtc diff --git a/rtc_base/platform_thread_types.cc b/rtc_base/platform_thread_types.cc index ed4a228262..b0243b41dc 100644 --- a/rtc_base/platform_thread_types.cc +++ b/rtc_base/platform_thread_types.cc @@ -15,6 +15,16 @@ #include #endif +#if defined(WEBRTC_WIN) +#include "rtc_base/arraysize.h" + +// The SetThreadDescription API was brought in version 1607 of Windows 10. +// For compatibility with various versions of winuser and avoid clashing with +// a potentially defined type, we use the RTC_ prefix. +typedef HRESULT(WINAPI* RTC_SetThreadDescription)(HANDLE hThread, + PCWSTR lpThreadDescription); +#endif + namespace rtc { PlatformThreadId CurrentThreadId() { @@ -58,6 +68,24 @@ bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b) { void SetCurrentThreadName(const char* name) { #if defined(WEBRTC_WIN) + // The SetThreadDescription API works even if no debugger is attached. + // The names set with this API also show up in ETW traces. Very handy. + static auto set_thread_description_func = + reinterpret_cast(::GetProcAddress( + ::GetModuleHandleA("Kernel32.dll"), "SetThreadDescription")); + if (set_thread_description_func) { + // Convert from ASCII to UTF-16. + wchar_t wide_thread_name[64]; + for (size_t i = 0; i < arraysize(wide_thread_name) - 1; ++i) { + wide_thread_name[i] = name[i]; + if (wide_thread_name[i] == L'\0') + break; + } + // Guarantee null-termination. + wide_thread_name[arraysize(wide_thread_name) - 1] = L'\0'; + set_thread_description_func(::GetCurrentThread(), wide_thread_name); + } + // For details see: // https://docs.microsoft.com/en-us/visualstudio/debugger/how-to-set-a-thread-name-in-native-code #pragma pack(push, 8) diff --git a/rtc_base/platform_thread_unittest.cc b/rtc_base/platform_thread_unittest.cc index 3f0408aa4b..a52e4cd9f5 100644 --- a/rtc_base/platform_thread_unittest.cc +++ b/rtc_base/platform_thread_unittest.cc @@ -10,7 +10,6 @@ #include "rtc_base/platform_thread.h" -#include "system_wrappers/include/sleep.h" #include "test/gtest.h" namespace rtc { diff --git a/rtc_base/random.cc b/rtc_base/random.cc index 5deb621727..0797660a26 100644 --- a/rtc_base/random.cc +++ b/rtc_base/random.cc @@ -49,14 +49,14 @@ int32_t Random::Rand(int32_t low, int32_t high) { template <> float Random::Rand() { double result = NextOutput() - 1; - result = result / 0xFFFFFFFFFFFFFFFEull; + result = result / (double) 0xFFFFFFFFFFFFFFFEull; return static_cast(result); } template <> double Random::Rand() { double result = NextOutput() - 1; - result = result / 0xFFFFFFFFFFFFFFFEull; + result = result / (double) 0xFFFFFFFFFFFFFFFEull; return result; } @@ -72,8 +72,8 @@ double Random::Gaussian(double mean, double standard_deviation) { // in the range [1, 2^64-1]. Normally this behavior is a bit frustrating, // but here it is exactly what we need. const double kPi = 3.14159265358979323846; - double u1 = static_cast(NextOutput()) / 0xFFFFFFFFFFFFFFFFull; - double u2 = static_cast(NextOutput()) / 0xFFFFFFFFFFFFFFFFull; + double u1 = static_cast(NextOutput()) / (double) 0xFFFFFFFFFFFFFFFFull; + double u2 = static_cast(NextOutput()) / (double) 0xFFFFFFFFFFFFFFFFull; return mean + standard_deviation * sqrt(-2 * log(u1)) * cos(2 * kPi * u2); } diff --git a/rtc_base/random.h b/rtc_base/random.h index 93241a3e97..0e2d103cb6 100644 --- a/rtc_base/random.h +++ b/rtc_base/random.h @@ -16,7 +16,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,6 +34,10 @@ class Random { // See also discussion here: https://codereview.webrtc.org/1623543002/ explicit Random(uint64_t seed); + Random() = delete; + Random(const Random&) = delete; + Random& operator=(const Random&) = delete; + // Return pseudo-random integer of the specified type. // We need to limit the size to 32 bits to keep the output close to uniform. template @@ -73,8 +76,6 @@ class Random { } uint64_t state_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Random); }; // Return pseudo-random number in the interval [0.0, 1.0). diff --git a/rtc_base/random_unittest.cc b/rtc_base/random_unittest.cc index f94b0c13fe..4eb6f754eb 100644 --- a/rtc_base/random_unittest.cc +++ b/rtc_base/random_unittest.cc @@ -120,7 +120,7 @@ void BucketTestSignedInterval(unsigned int bucket_count, ASSERT_GE(high, low); ASSERT_GE(bucket_count, 2u); - uint32_t interval = unsigned_difference(high, low) + 1; + uint32_t interval = webrtc_impl::unsigned_difference(high, low) + 1; uint32_t numbers_per_bucket; if (interval == 0) { // The computation high - low + 1 should be 2^32 but overflowed @@ -136,7 +136,8 @@ void BucketTestSignedInterval(unsigned int bucket_count, int32_t sample = prng->Rand(low, high); EXPECT_LE(low, sample); EXPECT_GE(high, sample); - buckets[unsigned_difference(sample, low) / numbers_per_bucket]++; + buckets[webrtc_impl::unsigned_difference(sample, low) / + numbers_per_bucket]++; } for (unsigned int i = 0; i < bucket_count; i++) { diff --git a/rtc_base/rate_limiter.cc b/rtc_base/rate_limiter.cc index 7394c3eb89..0f3f343aed 100644 --- a/rtc_base/rate_limiter.cc +++ b/rtc_base/rate_limiter.cc @@ -31,7 +31,7 @@ RateLimiter::~RateLimiter() {} // calling SetMaxRate() and a timed maintenance thread periodically updating // the RTT. bool RateLimiter::TryUseRate(size_t packet_size_bytes) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); int64_t now_ms = clock_->TimeInMilliseconds(); absl::optional current_rate = current_rate_.Rate(now_ms); if (current_rate) { @@ -53,14 +53,14 @@ bool RateLimiter::TryUseRate(size_t packet_size_bytes) { } void RateLimiter::SetMaxRate(uint32_t max_rate_bps) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); max_rate_bps_ = max_rate_bps; } // Set the window size over which to measure the current bitrate. // For retransmissions, this is typically the RTT. bool RateLimiter::SetWindowSize(int64_t window_size_ms) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); window_size_ms_ = window_size_ms; return current_rate_.SetWindowSize(window_size_ms, clock_->TimeInMilliseconds()); diff --git a/rtc_base/rate_limiter.h b/rtc_base/rate_limiter.h index 1c956d788b..9bbe21f9ca 100644 --- a/rtc_base/rate_limiter.h +++ b/rtc_base/rate_limiter.h @@ -14,9 +14,8 @@ #include #include -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -29,6 +28,11 @@ class Clock; class RateLimiter { public: RateLimiter(Clock* clock, int64_t max_window_ms); + + RateLimiter() = delete; + RateLimiter(const RateLimiter&) = delete; + RateLimiter& operator=(const RateLimiter&) = delete; + ~RateLimiter(); // Try to use rate to send bytes. Returns true on success and if so updates @@ -45,12 +49,10 @@ class RateLimiter { private: Clock* const clock_; - rtc::CriticalSection lock_; + Mutex lock_; RateStatistics current_rate_ RTC_GUARDED_BY(lock_); int64_t window_size_ms_ RTC_GUARDED_BY(lock_); uint32_t max_rate_bps_ RTC_GUARDED_BY(lock_); - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RateLimiter); }; } // namespace webrtc diff --git a/rtc_base/rate_statistics.cc b/rtc_base/rate_statistics.cc index c4c2e78581..85621fa555 100644 --- a/rtc_base/rate_statistics.cc +++ b/rtc_base/rate_statistics.cc @@ -20,29 +20,26 @@ namespace webrtc { +RateStatistics::Bucket::Bucket(int64_t timestamp) + : sum(0), num_samples(0), timestamp(timestamp) {} + RateStatistics::RateStatistics(int64_t window_size_ms, float scale) - : buckets_(new Bucket[window_size_ms]()), - accumulated_count_(0), + : accumulated_count_(0), + first_timestamp_(-1), num_samples_(0), - oldest_time_(-window_size_ms), - oldest_index_(0), scale_(scale), max_window_size_ms_(window_size_ms), current_window_size_ms_(max_window_size_ms_) {} RateStatistics::RateStatistics(const RateStatistics& other) - : accumulated_count_(other.accumulated_count_), + : buckets_(other.buckets_), + accumulated_count_(other.accumulated_count_), + first_timestamp_(other.first_timestamp_), overflow_(other.overflow_), num_samples_(other.num_samples_), - oldest_time_(other.oldest_time_), - oldest_index_(other.oldest_index_), scale_(other.scale_), max_window_size_ms_(other.max_window_size_ms_), - current_window_size_ms_(other.current_window_size_ms_) { - buckets_ = std::make_unique(other.max_window_size_ms_); - std::copy(other.buckets_.get(), - other.buckets_.get() + other.max_window_size_ms_, buckets_.get()); -} + current_window_size_ms_(other.current_window_size_ms_) {} RateStatistics::RateStatistics(RateStatistics&& other) = default; @@ -52,33 +49,33 @@ void RateStatistics::Reset() { accumulated_count_ = 0; overflow_ = false; num_samples_ = 0; - oldest_time_ = -max_window_size_ms_; - oldest_index_ = 0; + first_timestamp_ = -1; current_window_size_ms_ = max_window_size_ms_; - for (int64_t i = 0; i < max_window_size_ms_; i++) - buckets_[i] = Bucket(); + buckets_.clear(); } void RateStatistics::Update(int64_t count, int64_t now_ms) { - RTC_DCHECK_LE(0, count); - if (now_ms < oldest_time_) { - // Too old data is ignored. - return; - } + RTC_DCHECK_GE(count, 0); EraseOld(now_ms); + if (first_timestamp_ == -1) { + first_timestamp_ = now_ms; + } + + if (buckets_.empty() || now_ms != buckets_.back().timestamp) { + if (!buckets_.empty() && now_ms < buckets_.back().timestamp) { + RTC_LOG(LS_WARNING) << "Timestamp " << now_ms + << " is before the last added " + "timestamp in the rate window: " + << buckets_.back().timestamp << ", aligning to that."; + now_ms = buckets_.back().timestamp; + } + buckets_.emplace_back(now_ms); + } + Bucket& last_bucket = buckets_.back(); + last_bucket.sum += count; + ++last_bucket.num_samples; - // First ever sample, reset window to start now. - if (!IsInitialized()) - oldest_time_ = now_ms; - - uint32_t now_offset = rtc::dchecked_cast(now_ms - oldest_time_); - RTC_DCHECK_LT(now_offset, max_window_size_ms_); - uint32_t index = oldest_index_ + now_offset; - if (index >= max_window_size_ms_) - index -= max_window_size_ms_; - buckets_[index].sum += count; - ++buckets_[index].samples; if (std::numeric_limits::max() - accumulated_count_ > count) { accumulated_count_ += count; } else { @@ -92,10 +89,22 @@ absl::optional RateStatistics::Rate(int64_t now_ms) const { // of the members as mutable... const_cast(this)->EraseOld(now_ms); + int active_window_size = 0; + if (first_timestamp_ != -1) { + if (first_timestamp_ <= now_ms - current_window_size_ms_) { + // Count window as full even if no data points currently in view, if the + // data stream started before the window. + active_window_size = current_window_size_ms_; + } else { + // Size of a single bucket is 1ms, so even if now_ms == first_timestmap_ + // the window size should be 1. + active_window_size = now_ms - first_timestamp_ + 1; + } + } + // If window is a single bucket or there is only one sample in a data set that // has not grown to the full window size, or if the accumulator has // overflowed, treat this as rate unavailable. - int active_window_size = now_ms - oldest_time_ + 1; if (num_samples_ == 0 || active_window_size <= 1 || (num_samples_ <= 1 && rtc::SafeLt(active_window_size, current_window_size_ms_)) || @@ -114,43 +123,35 @@ absl::optional RateStatistics::Rate(int64_t now_ms) const { } void RateStatistics::EraseOld(int64_t now_ms) { - if (!IsInitialized()) - return; - // New oldest time that is included in data set. - int64_t new_oldest_time = now_ms - current_window_size_ms_ + 1; - - // New oldest time is older than the current one, no need to cull data. - if (new_oldest_time <= oldest_time_) - return; + const int64_t new_oldest_time = now_ms - current_window_size_ms_ + 1; // Loop over buckets and remove too old data points. - while (num_samples_ > 0 && oldest_time_ < new_oldest_time) { - const Bucket& oldest_bucket = buckets_[oldest_index_]; + while (!buckets_.empty() && buckets_.front().timestamp < new_oldest_time) { + const Bucket& oldest_bucket = buckets_.front(); RTC_DCHECK_GE(accumulated_count_, oldest_bucket.sum); - RTC_DCHECK_GE(num_samples_, oldest_bucket.samples); + RTC_DCHECK_GE(num_samples_, oldest_bucket.num_samples); accumulated_count_ -= oldest_bucket.sum; - num_samples_ -= oldest_bucket.samples; - buckets_[oldest_index_] = Bucket(); - if (++oldest_index_ >= max_window_size_ms_) - oldest_index_ = 0; - ++oldest_time_; + num_samples_ -= oldest_bucket.num_samples; + buckets_.pop_front(); // This does not clear overflow_ even when counter is empty. // TODO(https://bugs.webrtc.org/11247): Consider if overflow_ can be reset. } - oldest_time_ = new_oldest_time; } bool RateStatistics::SetWindowSize(int64_t window_size_ms, int64_t now_ms) { if (window_size_ms <= 0 || window_size_ms > max_window_size_ms_) return false; + if (first_timestamp_ != -1) { + // If the window changes (e.g. decreases - removing data point, then + // increases again) we need to update the first timestamp mark as + // otherwise it indicates the window coveres a region of zeros, suddenly + // under-estimating the rate. + first_timestamp_ = std::max(first_timestamp_, now_ms - window_size_ms + 1); + } current_window_size_ms_ = window_size_ms; EraseOld(now_ms); return true; } -bool RateStatistics::IsInitialized() const { - return oldest_time_ != -max_window_size_ms_; -} - } // namespace webrtc diff --git a/rtc_base/rate_statistics.h b/rtc_base/rate_statistics.h index 11c8cee7af..dc8d7f5272 100644 --- a/rtc_base/rate_statistics.h +++ b/rtc_base/rate_statistics.h @@ -14,6 +14,7 @@ #include #include +#include #include #include "absl/types/optional.h" @@ -28,6 +29,10 @@ namespace webrtc { // high; for instance, a 20 Mbit/sec video stream can wrap a 32-bit byte // counter in 14 minutes. +// Note that timestamps used in Update(), Rate() and SetWindowSize() must never +// decrease for two consecutive calls. +// TODO(bugs.webrtc.org/11600): Migrate from int64_t to Timestamp. + class RTC_EXPORT RateStatistics { public: static constexpr float kBpsScale = 8000.0f; @@ -65,19 +70,22 @@ class RTC_EXPORT RateStatistics { private: void EraseOld(int64_t now_ms); - bool IsInitialized() const; - // Counters are kept in buckets (circular buffer), with one bucket - // per millisecond. struct Bucket { + explicit Bucket(int64_t timestamp); int64_t sum; // Sum of all samples in this bucket. - int samples; // Number of samples in this bucket. + int num_samples; // Number of samples in this bucket. + const int64_t timestamp; // Timestamp this bucket corresponds to. }; - std::unique_ptr buckets_; + // All buckets within the time window, ordered by time. + std::deque buckets_; - // Total count recorded in buckets. + // Total count recorded in all buckets. int64_t accumulated_count_; + // Timestamp of the first data point seen, or -1 of none seen. + int64_t first_timestamp_; + // True if accumulated_count_ has ever grown too large to be // contained in its integer type. bool overflow_ = false; @@ -85,12 +93,6 @@ class RTC_EXPORT RateStatistics { // The total number of samples in the buckets. int num_samples_; - // Oldest time recorded in buckets. - int64_t oldest_time_; - - // Bucket index of oldest counter recorded in buckets. - int64_t oldest_index_; - // To convert counts/ms to desired units const float scale_; diff --git a/rtc_base/rolling_accumulator.h b/rtc_base/rolling_accumulator.h index 015229b04c..241bd72a11 100644 --- a/rtc_base/rolling_accumulator.h +++ b/rtc_base/rolling_accumulator.h @@ -40,7 +40,7 @@ class RollingAccumulator { size_t count() const { return static_cast(stats_.Size()); } void Reset() { - stats_ = webrtc::RunningStatistics(); + stats_ = webrtc::webrtc_impl::RunningStatistics(); next_index_ = 0U; max_ = T(); max_stale_ = false; @@ -129,7 +129,7 @@ class RollingAccumulator { double ComputeVariance() const { return stats_.GetVariance().value_or(0); } private: - webrtc::RunningStatistics stats_; + webrtc::webrtc_impl::RunningStatistics stats_; size_t next_index_; mutable T max_; mutable bool max_stale_; diff --git a/rtc_base/rtc_certificate.cc b/rtc_base/rtc_certificate.cc index 1edc393e6c..04ae99685d 100644 --- a/rtc_base/rtc_certificate.cc +++ b/rtc_base/rtc_certificate.cc @@ -64,7 +64,7 @@ RTCCertificatePEM RTCCertificate::ToPEM() const { scoped_refptr RTCCertificate::FromPEM( const RTCCertificatePEM& pem) { std::unique_ptr identity( - SSLIdentity::FromPEMStrings(pem.private_key(), pem.certificate())); + SSLIdentity::CreateFromPEMStrings(pem.private_key(), pem.certificate())); if (!identity) return nullptr; return new RefCountedObject(identity.release()); diff --git a/rtc_base/rtc_certificate_generator.cc b/rtc_base/rtc_certificate_generator.cc index cd9cccedf7..d95b645396 100644 --- a/rtc_base/rtc_certificate_generator.cc +++ b/rtc_base/rtc_certificate_generator.cc @@ -30,75 +30,6 @@ namespace { const char kIdentityName[] = "WebRTC"; const uint64_t kYearInSeconds = 365 * 24 * 60 * 60; -enum { - MSG_GENERATE, - MSG_GENERATE_DONE, -}; - -// Helper class for generating certificates asynchronously; a single task -// instance is responsible for a single asynchronous certificate generation -// request. We are using a separate helper class so that a generation request -// can outlive the |RTCCertificateGenerator| that spawned it. -class RTCCertificateGenerationTask : public RefCountInterface, - public MessageHandler { - public: - RTCCertificateGenerationTask( - Thread* signaling_thread, - Thread* worker_thread, - const KeyParams& key_params, - const absl::optional& expires_ms, - const scoped_refptr& callback) - : signaling_thread_(signaling_thread), - worker_thread_(worker_thread), - key_params_(key_params), - expires_ms_(expires_ms), - callback_(callback) { - RTC_DCHECK(signaling_thread_); - RTC_DCHECK(worker_thread_); - RTC_DCHECK(callback_); - } - ~RTCCertificateGenerationTask() override {} - - // Handles |MSG_GENERATE| and its follow-up |MSG_GENERATE_DONE|. - void OnMessage(Message* msg) override { - switch (msg->message_id) { - case MSG_GENERATE: - RTC_DCHECK(worker_thread_->IsCurrent()); - // Perform the certificate generation work here on the worker thread. - certificate_ = RTCCertificateGenerator::GenerateCertificate( - key_params_, expires_ms_); - // Handle callbacks on signaling thread. Pass on the |msg->pdata| - // (which references |this| with ref counting) to that thread. - signaling_thread_->Post(RTC_FROM_HERE, this, MSG_GENERATE_DONE, - msg->pdata); - break; - case MSG_GENERATE_DONE: - RTC_DCHECK(signaling_thread_->IsCurrent()); - // Perform callback with result here on the signaling thread. - if (certificate_) { - callback_->OnSuccess(certificate_); - } else { - callback_->OnFailure(); - } - // Destroy |msg->pdata| which references |this| with ref counting. This - // may result in |this| being deleted - do not touch member variables - // after this line. - delete msg->pdata; - return; - default: - RTC_NOTREACHED(); - } - } - - private: - Thread* const signaling_thread_; - Thread* const worker_thread_; - const KeyParams key_params_; - const absl::optional expires_ms_; - const scoped_refptr callback_; - scoped_refptr certificate_; -}; - } // namespace // static @@ -109,9 +40,9 @@ scoped_refptr RTCCertificateGenerator::GenerateCertificate( return nullptr; } - SSLIdentity* identity = nullptr; + std::unique_ptr identity; if (!expires_ms) { - identity = SSLIdentity::Generate(kIdentityName, key_params); + identity = SSLIdentity::Create(kIdentityName, key_params); } else { uint64_t expires_s = *expires_ms / 1000; // Limit the expiration time to something reasonable (a year). This was @@ -123,14 +54,12 @@ scoped_refptr RTCCertificateGenerator::GenerateCertificate( // |SSLIdentity::Generate| should stop relying on |time_t|. // See bugs.webrtc.org/5720. time_t cert_lifetime_s = static_cast(expires_s); - identity = SSLIdentity::GenerateWithExpiration(kIdentityName, key_params, - cert_lifetime_s); + identity = SSLIdentity::Create(kIdentityName, key_params, cert_lifetime_s); } if (!identity) { return nullptr; } - std::unique_ptr identity_sptr(identity); - return RTCCertificate::Create(std::move(identity_sptr)); + return RTCCertificate::Create(std::move(identity)); } RTCCertificateGenerator::RTCCertificateGenerator(Thread* signaling_thread, @@ -150,13 +79,16 @@ void RTCCertificateGenerator::GenerateCertificateAsync( // Create a new |RTCCertificateGenerationTask| for this generation request. It // is reference counted and referenced by the message data, ensuring it lives // until the task has completed (independent of |RTCCertificateGenerator|). - ScopedRefMessageData* msg_data = - new ScopedRefMessageData( - new RefCountedObject( - signaling_thread_, worker_thread_, key_params, expires_ms, - callback)); - worker_thread_->Post(RTC_FROM_HERE, msg_data->data().get(), MSG_GENERATE, - msg_data); + worker_thread_->PostTask(RTC_FROM_HERE, [key_params, expires_ms, + signaling_thread = signaling_thread_, + cb = callback]() { + scoped_refptr certificate = + RTCCertificateGenerator::GenerateCertificate(key_params, expires_ms); + signaling_thread->PostTask( + RTC_FROM_HERE, [cert = std::move(certificate), cb = std::move(cb)]() { + cert ? cb->OnSuccess(cert) : cb->OnFailure(); + }); + }); } } // namespace rtc diff --git a/rtc_base/rtc_certificate_generator_unittest.cc b/rtc_base/rtc_certificate_generator_unittest.cc index 959e65a4c4..2d6986a9ea 100644 --- a/rtc_base/rtc_certificate_generator_unittest.cc +++ b/rtc_base/rtc_certificate_generator_unittest.cc @@ -73,7 +73,7 @@ class RTCCertificateGeneratorTest : public ::testing::Test { : fixture_(new RefCountedObject()) {} protected: - static const int kGenerationTimeoutMs = 10000; + static constexpr int kGenerationTimeoutMs = 10000; scoped_refptr fixture_; }; diff --git a/rtc_base/rtc_certificate_unittest.cc b/rtc_base/rtc_certificate_unittest.cc index 1150eee0ab..96bd67ba85 100644 --- a/rtc_base/rtc_certificate_unittest.cc +++ b/rtc_base/rtc_certificate_unittest.cc @@ -33,7 +33,7 @@ class RTCCertificateTest : public ::testing::Test { protected: scoped_refptr GenerateECDSA() { std::unique_ptr identity( - SSLIdentity::Generate(kTestCertCommonName, KeyParams::ECDSA())); + SSLIdentity::Create(kTestCertCommonName, KeyParams::ECDSA())); RTC_CHECK(identity); return RTCCertificate::Create(std::move(identity)); } @@ -78,7 +78,7 @@ class RTCCertificateTest : public ::testing::Test { // is fast to generate. params.key_params = KeyParams::ECDSA(); - std::unique_ptr identity(SSLIdentity::GenerateForTest(params)); + std::unique_ptr identity(SSLIdentity::CreateForTest(params)); return RTCCertificate::Create(std::move(identity)); } }; diff --git a/rtc_base/signal_thread.h b/rtc_base/signal_thread.h index d9e8ade9b0..b444d54994 100644 --- a/rtc_base/signal_thread.h +++ b/rtc_base/signal_thread.h @@ -1,5 +1,5 @@ /* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,147 +11,9 @@ #ifndef RTC_BASE_SIGNAL_THREAD_H_ #define RTC_BASE_SIGNAL_THREAD_H_ -#include - -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" - -namespace rtc { - -/////////////////////////////////////////////////////////////////////////////// -// SignalThread - Base class for worker threads. The main thread should call -// Start() to begin work, and then follow one of these models: -// Normal: Wait for SignalWorkDone, and then call Release to destroy. -// Cancellation: Call Release(true), to abort the worker thread. -// Fire-and-forget: Call Release(false), which allows the thread to run to -// completion, and then self-destruct without further notification. -// Periodic tasks: Wait for SignalWorkDone, then eventually call Start() -// again to repeat the task. When the instance isn't needed anymore, -// call Release. DoWork, OnWorkStart and OnWorkStop are called again, -// on a new thread. -// The subclass should override DoWork() to perform the background task. By -// periodically calling ContinueWork(), it can check for cancellation. -// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work -// tasks in the context of the main thread. -/////////////////////////////////////////////////////////////////////////////// - -class SignalThread : public sigslot::has_slots<>, protected MessageHandler { - public: - SignalThread(); - - // Context: Main Thread. Call before Start to change the worker's name. - bool SetName(const std::string& name, const void* obj); - - // Context: Main Thread. Call to begin the worker thread. - void Start(); - - // Context: Main Thread. If the worker thread is not running, deletes the - // object immediately. Otherwise, asks the worker thread to abort processing, - // and schedules the object to be deleted once the worker exits. - // SignalWorkDone will not be signalled. If wait is true, does not return - // until the thread is deleted. - void Destroy(bool wait); - - // Context: Main Thread. If the worker thread is complete, deletes the - // object immediately. Otherwise, schedules the object to be deleted once - // the worker thread completes. SignalWorkDone will be signalled. - void Release(); - - // Context: Main Thread. Signalled when work is complete. - sigslot::signal1 SignalWorkDone; - - enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE }; - - protected: - ~SignalThread() override; - - Thread* worker() { return &worker_; } - - // Context: Main Thread. Subclass should override to do pre-work setup. - virtual void OnWorkStart() {} - - // Context: Worker Thread. Subclass should override to do work. - virtual void DoWork() = 0; - - // Context: Worker Thread. Subclass should call periodically to - // dispatch messages and determine if the thread should terminate. - bool ContinueWork(); - - // Context: Worker Thread. Subclass should override when extra work is - // needed to abort the worker thread. - virtual void OnWorkStop() {} - - // Context: Main Thread. Subclass should override to do post-work cleanup. - virtual void OnWorkDone() {} - - // Context: Any Thread. If subclass overrides, be sure to call the base - // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE) - void OnMessage(Message* msg) override; - - private: - enum State { - kInit, // Initialized, but not started - kRunning, // Started and doing work - kReleasing, // Same as running, but to be deleted when work is done - kComplete, // Work is done - kStopping, // Work is being interrupted - }; - - class Worker : public Thread { - public: - explicit Worker(SignalThread* parent); - ~Worker() override; - void Run() override; - bool IsProcessingMessagesForTesting() override; - - private: - SignalThread* parent_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Worker); - }; - - class RTC_SCOPED_LOCKABLE EnterExit { - public: - explicit EnterExit(SignalThread* t) RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_) - : t_(t) { - t_->cs_.Enter(); - // If refcount_ is zero then the object has already been deleted and we - // will be double-deleting it in ~EnterExit()! (shouldn't happen) - RTC_DCHECK_NE(0, t_->refcount_); - ++t_->refcount_; - } - ~EnterExit() RTC_UNLOCK_FUNCTION() { - bool d = (0 == --t_->refcount_); - t_->cs_.Leave(); - if (d) - delete t_; - } - - private: - SignalThread* t_; - - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit); - }; - - void Run(); - void OnMainThreadDestroyed(); - - Thread* main_; - Worker worker_; - CriticalSection cs_; - State state_; - int refcount_; - - RTC_DISALLOW_COPY_AND_ASSIGN(SignalThread); -}; - -/////////////////////////////////////////////////////////////////////////////// - -} // namespace rtc +// The facilities in this file have been deprecated. Please do not use them +// in new code. New code should use factilities exposed by api/task_queue/ +// instead. +#include "rtc_base/deprecated/signal_thread.h" #endif // RTC_BASE_SIGNAL_THREAD_H_ diff --git a/rtc_base/socket_unittest.cc b/rtc_base/socket_unittest.cc index 2af3a8ef88..82e2f6d4b2 100644 --- a/rtc_base/socket_unittest.cc +++ b/rtc_base/socket_unittest.cc @@ -149,6 +149,15 @@ void SocketTest::TestCloseInClosedCallbackIPv6() { CloseInClosedCallbackInternal(kIPv6Loopback); } +void SocketTest::TestDeleteInReadCallbackIPv4() { + DeleteInReadCallbackInternal(kIPv4Loopback); +} + +void SocketTest::TestDeleteInReadCallbackIPv6() { + MAYBE_SKIP_IPV6; + DeleteInReadCallbackInternal(kIPv6Loopback); +} + void SocketTest::TestSocketServerWaitIPv4() { SocketServerWaitInternal(kIPv4Loopback); } @@ -651,7 +660,43 @@ void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) { EXPECT_TRUE(Socket::CS_CLOSED == client->GetState()); } -class Sleeper : public MessageHandler { +// Helper class specifically for the test below. +class SocketDeleter : public sigslot::has_slots<> { + public: + explicit SocketDeleter(std::unique_ptr socket) + : socket_(std::move(socket)) {} + + void Delete(AsyncSocket* other) { socket_.reset(); } + + bool deleted() const { return socket_ == nullptr; } + + private: + std::unique_ptr socket_; +}; + +// Tested deleting a socket within another socket's read callback. A previous +// iteration of the select loop failed in this situation, if both sockets +// became readable at the same time. +void SocketTest::DeleteInReadCallbackInternal(const IPAddress& loopback) { + std::unique_ptr socket1( + ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM)); + std::unique_ptr socket2( + ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM)); + EXPECT_EQ(0, socket1->Bind(SocketAddress(loopback, 0))); + EXPECT_EQ(0, socket2->Bind(SocketAddress(loopback, 0))); + EXPECT_EQ(3, socket1->SendTo("foo", 3, socket1->GetLocalAddress())); + EXPECT_EQ(3, socket2->SendTo("bar", 3, socket1->GetLocalAddress())); + // Sleep a while to ensure sends are both completed at the same time. + Thread::SleepMs(1000); + + // Configure the helper class to delete socket 2 when socket 1 has a read + // event. + SocketDeleter deleter(std::move(socket2)); + socket1->SignalReadEvent.connect(&deleter, &SocketDeleter::Delete); + EXPECT_TRUE_WAIT(deleter.deleted(), kTimeout); +} + +class Sleeper : public MessageHandlerAutoCleanup { public: void OnMessage(Message* msg) override { Thread::Current()->SleepMs(500); } }; @@ -1027,6 +1072,15 @@ void SocketTest::GetSetOptionsInternal(const IPAddress& loopback) { int current_nd, desired_nd = 1; ASSERT_EQ(-1, socket->GetOption(Socket::OPT_NODELAY, ¤t_nd)); ASSERT_EQ(-1, socket->SetOption(Socket::OPT_NODELAY, desired_nd)); + +#if defined(WEBRTC_POSIX) + // Check DSCP. + int current_dscp, desired_dscp = 1; + ASSERT_NE(-1, socket->GetOption(Socket::OPT_DSCP, ¤t_dscp)); + ASSERT_NE(-1, socket->SetOption(Socket::OPT_DSCP, desired_dscp)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_DSCP, ¤t_dscp)); + ASSERT_EQ(desired_dscp, current_dscp); +#endif } void SocketTest::SocketRecvTimestamp(const IPAddress& loopback) { diff --git a/rtc_base/socket_unittest.h b/rtc_base/socket_unittest.h index 5197ccd82d..91ef39c59e 100644 --- a/rtc_base/socket_unittest.h +++ b/rtc_base/socket_unittest.h @@ -46,6 +46,8 @@ class SocketTest : public ::testing::Test { void TestServerCloseIPv6(); void TestCloseInClosedCallbackIPv4(); void TestCloseInClosedCallbackIPv6(); + void TestDeleteInReadCallbackIPv4(); + void TestDeleteInReadCallbackIPv6(); void TestSocketServerWaitIPv4(); void TestSocketServerWaitIPv6(); void TestTcpIPv4(); @@ -83,6 +85,7 @@ class SocketTest : public ::testing::Test { void ClientCloseDuringConnectInternal(const IPAddress& loopback); void ServerCloseInternal(const IPAddress& loopback); void CloseInClosedCallbackInternal(const IPAddress& loopback); + void DeleteInReadCallbackInternal(const IPAddress& loopback); void SocketServerWaitInternal(const IPAddress& loopback); void SingleFlowControlCallbackInternal(const IPAddress& loopback); void UdpInternal(const IPAddress& loopback); diff --git a/rtc_base/ssl_adapter.h b/rtc_base/ssl_adapter.h index e0ed81eaf3..805fdc7ba0 100644 --- a/rtc_base/ssl_adapter.h +++ b/rtc_base/ssl_adapter.h @@ -69,7 +69,8 @@ class SSLAdapter : public AsyncSocketAdapter { virtual void SetCertVerifier(SSLCertificateVerifier* ssl_cert_verifier) = 0; // Set the certificate this socket will present to incoming clients. - virtual void SetIdentity(SSLIdentity* identity) = 0; + // Takes ownership of |identity|. + virtual void SetIdentity(std::unique_ptr identity) = 0; // Choose whether the socket acts as a server socket or client socket. virtual void SetRole(SSLRole role) = 0; @@ -77,8 +78,7 @@ class SSLAdapter : public AsyncSocketAdapter { // StartSSL returns 0 if successful. // If StartSSL is called while the socket is closed or connecting, the SSL // negotiation will begin as soon as the socket connects. - // TODO(juberti): Remove |restartable|. - virtual int StartSSL(const char* hostname, bool restartable = false) = 0; + virtual int StartSSL(const char* hostname) = 0; // When an SSLAdapterFactory is used, an SSLAdapter may be used to resume // a previous SSL session, which results in an abbreviated handshake. diff --git a/rtc_base/ssl_adapter_unittest.cc b/rtc_base/ssl_adapter_unittest.cc index 3fa12217f7..498eba312b 100644 --- a/rtc_base/ssl_adapter_unittest.cc +++ b/rtc_base/ssl_adapter_unittest.cc @@ -12,6 +12,7 @@ #include #include +#include "absl/memory/memory.h" #include "rtc_base/gunit.h" #include "rtc_base/ip_address.h" #include "rtc_base/message_digest.h" @@ -49,7 +50,7 @@ static std::string GetSSLProtocolName(const rtc::SSLMode& ssl_mode) { class MockCertVerifier : public rtc::SSLCertificateVerifier { public: virtual ~MockCertVerifier() = default; - MOCK_METHOD1(Verify, bool(const rtc::SSLCertificate&)); + MOCK_METHOD(bool, Verify, (const rtc::SSLCertificate&), (override)); }; // TODO(benwright) - Move to using INSTANTIATE_TEST_SUITE_P instead of using @@ -110,7 +111,7 @@ class SSLAdapterTestDummyClient : public sigslot::has_slots<> { RTC_LOG(LS_INFO) << "Starting " << GetSSLProtocolName(ssl_mode_) << " handshake with " << hostname; - if (ssl_adapter_->StartSSL(hostname.c_str(), false) != 0) { + if (ssl_adapter_->StartSSL(hostname.c_str()) != 0) { return -1; } } @@ -163,7 +164,7 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> { const rtc::KeyParams& key_params) : ssl_mode_(ssl_mode) { // Generate a key pair and a certificate for this host. - ssl_identity_.reset(rtc::SSLIdentity::Generate(GetHostname(), key_params)); + ssl_identity_ = rtc::SSLIdentity::Create(GetHostname(), key_params); server_socket_.reset(CreateSocket(ssl_mode_)); @@ -254,9 +255,8 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> { private: void DoHandshake(rtc::AsyncSocket* socket) { - rtc::SocketStream* stream = new rtc::SocketStream(socket); - - ssl_stream_adapter_.reset(rtc::SSLStreamAdapter::Create(stream)); + ssl_stream_adapter_ = rtc::SSLStreamAdapter::Create( + std::make_unique(socket)); ssl_stream_adapter_->SetMode(ssl_mode_); ssl_stream_adapter_->SetServerRole(); @@ -268,7 +268,7 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> { // Accordingly, we must disable client authentication here. ssl_stream_adapter_->SetClientAuthEnabledForTesting(false); - ssl_stream_adapter_->SetIdentity(ssl_identity_->GetReference()); + ssl_stream_adapter_->SetIdentity(ssl_identity_->Clone()); // Set a bogus peer certificate digest. unsigned char digest[20]; diff --git a/rtc_base/ssl_identity.cc b/rtc_base/ssl_identity.cc index 64c0f67297..09d25d228e 100644 --- a/rtc_base/ssl_identity.cc +++ b/rtc_base/ssl_identity.cc @@ -210,42 +210,48 @@ std::string SSLIdentity::DerToPem(const std::string& pem_type, } // static -SSLIdentity* SSLIdentity::GenerateWithExpiration(const std::string& common_name, - const KeyParams& key_params, +std::unique_ptr SSLIdentity::Create(const std::string& common_name, + const KeyParams& key_param, time_t certificate_lifetime) { - return OpenSSLIdentity::GenerateWithExpiration(common_name, key_params, - certificate_lifetime); + return OpenSSLIdentity::CreateWithExpiration(common_name, key_param, + certificate_lifetime); } // static -SSLIdentity* SSLIdentity::Generate(const std::string& common_name, - const KeyParams& key_params) { - return OpenSSLIdentity::GenerateWithExpiration( - common_name, key_params, kDefaultCertificateLifetimeInSeconds); +std::unique_ptr SSLIdentity::Create(const std::string& common_name, + const KeyParams& key_param) { + return OpenSSLIdentity::CreateWithExpiration( + common_name, key_param, kDefaultCertificateLifetimeInSeconds); } // static -SSLIdentity* SSLIdentity::Generate(const std::string& common_name, - KeyType key_type) { - return OpenSSLIdentity::GenerateWithExpiration( +std::unique_ptr SSLIdentity::Create(const std::string& common_name, + KeyType key_type) { + return OpenSSLIdentity::CreateWithExpiration( common_name, KeyParams(key_type), kDefaultCertificateLifetimeInSeconds); } -SSLIdentity* SSLIdentity::GenerateForTest(const SSLIdentityParams& params) { - return OpenSSLIdentity::GenerateForTest(params); +// static +std::unique_ptr SSLIdentity::CreateForTest( + const SSLIdentityParams& params) { + return OpenSSLIdentity::CreateForTest(params); } +// Construct an identity from a private key and a certificate. // static -SSLIdentity* SSLIdentity::FromPEMStrings(const std::string& private_key, - const std::string& certificate) { - return OpenSSLIdentity::FromPEMStrings(private_key, certificate); +std::unique_ptr SSLIdentity::CreateFromPEMStrings( + const std::string& private_key, + const std::string& certificate) { + return OpenSSLIdentity::CreateFromPEMStrings(private_key, certificate); } +// Construct an identity from a private key and a certificate chain. // static -SSLIdentity* SSLIdentity::FromPEMChainStrings( +std::unique_ptr SSLIdentity::CreateFromPEMChainStrings( const std::string& private_key, const std::string& certificate_chain) { - return OpenSSLIdentity::FromPEMChainStrings(private_key, certificate_chain); + return OpenSSLIdentity::CreateFromPEMChainStrings(private_key, + certificate_chain); } bool operator==(const SSLIdentity& a, const SSLIdentity& b) { diff --git a/rtc_base/ssl_identity.h b/rtc_base/ssl_identity.h index 30e456b24e..d078b045a7 100644 --- a/rtc_base/ssl_identity.h +++ b/rtc_base/ssl_identity.h @@ -15,8 +15,10 @@ #include #include +#include #include +#include "rtc_base/deprecation.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -107,34 +109,33 @@ class RTC_EXPORT SSLIdentity { // should be a non-negative number. // Returns null on failure. // Caller is responsible for freeing the returned object. - static SSLIdentity* GenerateWithExpiration(const std::string& common_name, + static std::unique_ptr Create(const std::string& common_name, const KeyParams& key_param, time_t certificate_lifetime); - static SSLIdentity* Generate(const std::string& common_name, - const KeyParams& key_param); - static SSLIdentity* Generate(const std::string& common_name, - KeyType key_type); + static std::unique_ptr Create(const std::string& common_name, + const KeyParams& key_param); + static std::unique_ptr Create(const std::string& common_name, + KeyType key_type); - // Generates an identity with the specified validity period. - // TODO(torbjorng): Now that Generate() accepts relevant params, make tests - // use that instead of this function. - static SSLIdentity* GenerateForTest(const SSLIdentityParams& params); + // Allows fine-grained control over expiration time. + static std::unique_ptr CreateForTest( + const SSLIdentityParams& params); // Construct an identity from a private key and a certificate. - static SSLIdentity* FromPEMStrings(const std::string& private_key, - const std::string& certificate); + static std::unique_ptr CreateFromPEMStrings( + const std::string& private_key, + const std::string& certificate); // Construct an identity from a private key and a certificate chain. - static SSLIdentity* FromPEMChainStrings(const std::string& private_key, - const std::string& certificate_chain); + static std::unique_ptr CreateFromPEMChainStrings( + const std::string& private_key, + const std::string& certificate_chain); virtual ~SSLIdentity() {} // Returns a new SSLIdentity object instance wrapping the same // identity information. - // Caller is responsible for freeing the returned object. - // TODO(hbos,torbjorng): Rename to a less confusing name. - virtual SSLIdentity* GetReference() const = 0; + std::unique_ptr Clone() const { return CloneInternal(); } // Returns a temporary reference to the end-entity (leaf) certificate. virtual const SSLCertificate& certificate() const = 0; @@ -150,6 +151,9 @@ class RTC_EXPORT SSLIdentity { static std::string DerToPem(const std::string& pem_type, const unsigned char* data, size_t length); + + protected: + virtual std::unique_ptr CloneInternal() const = 0; }; bool operator==(const SSLIdentity& a, const SSLIdentity& b); diff --git a/rtc_base/ssl_identity_unittest.cc b/rtc_base/ssl_identity_unittest.cc index 8e4d02db41..0d9d0fd859 100644 --- a/rtc_base/ssl_identity_unittest.cc +++ b/rtc_base/ssl_identity_unittest.cc @@ -194,10 +194,10 @@ IdentityAndInfo CreateFakeIdentityAndInfoFromDers( class SSLIdentityTest : public ::testing::Test { public: void SetUp() override { - identity_rsa1_.reset(SSLIdentity::Generate("test1", rtc::KT_RSA)); - identity_rsa2_.reset(SSLIdentity::Generate("test2", rtc::KT_RSA)); - identity_ecdsa1_.reset(SSLIdentity::Generate("test3", rtc::KT_ECDSA)); - identity_ecdsa2_.reset(SSLIdentity::Generate("test4", rtc::KT_ECDSA)); + identity_rsa1_ = SSLIdentity::Create("test1", rtc::KT_RSA); + identity_rsa2_ = SSLIdentity::Create("test2", rtc::KT_RSA); + identity_ecdsa1_ = SSLIdentity::Create("test3", rtc::KT_ECDSA); + identity_ecdsa2_ = SSLIdentity::Create("test4", rtc::KT_ECDSA); ASSERT_TRUE(identity_rsa1_); ASSERT_TRUE(identity_rsa2_); @@ -303,8 +303,8 @@ class SSLIdentityTest : public ::testing::Test { std::string priv_pem = identity.PrivateKeyToPEMString(); std::string publ_pem = identity.PublicKeyToPEMString(); std::string cert_pem = identity.certificate().ToPEMString(); - std::unique_ptr clone( - SSLIdentity::FromPEMStrings(priv_pem, cert_pem)); + std::unique_ptr clone = + SSLIdentity::CreateFromPEMStrings(priv_pem, cert_pem); EXPECT_TRUE(clone); // Make sure the clone is identical to the original. @@ -390,7 +390,7 @@ TEST_F(SSLIdentityTest, IdentityComparison) { TEST_F(SSLIdentityTest, FromPEMStringsRSA) { std::unique_ptr identity( - SSLIdentity::FromPEMStrings(kRSA_PRIVATE_KEY_PEM, kRSA_CERT_PEM)); + SSLIdentity::CreateFromPEMStrings(kRSA_PRIVATE_KEY_PEM, kRSA_CERT_PEM)); EXPECT_TRUE(identity); EXPECT_EQ(kRSA_PRIVATE_KEY_PEM, identity->PrivateKeyToPEMString()); EXPECT_EQ(kRSA_PUBLIC_KEY_PEM, identity->PublicKeyToPEMString()); @@ -398,8 +398,8 @@ TEST_F(SSLIdentityTest, FromPEMStringsRSA) { } TEST_F(SSLIdentityTest, FromPEMStringsEC) { - std::unique_ptr identity( - SSLIdentity::FromPEMStrings(kECDSA_PRIVATE_KEY_PEM, kECDSA_CERT_PEM)); + std::unique_ptr identity(SSLIdentity::CreateFromPEMStrings( + kECDSA_PRIVATE_KEY_PEM, kECDSA_CERT_PEM)); EXPECT_TRUE(identity); EXPECT_EQ(kECDSA_PRIVATE_KEY_PEM, identity->PrivateKeyToPEMString()); EXPECT_EQ(kECDSA_PUBLIC_KEY_PEM, identity->PublicKeyToPEMString()); @@ -433,7 +433,7 @@ TEST_F(SSLIdentityTest, GetSignatureDigestAlgorithm) { TEST_F(SSLIdentityTest, SSLCertificateGetStatsRSA) { std::unique_ptr identity( - SSLIdentity::FromPEMStrings(kRSA_PRIVATE_KEY_PEM, kRSA_CERT_PEM)); + SSLIdentity::CreateFromPEMStrings(kRSA_PRIVATE_KEY_PEM, kRSA_CERT_PEM)); std::unique_ptr stats = identity->certificate().GetStats(); EXPECT_EQ(stats->fingerprint, kRSA_FINGERPRINT); @@ -443,8 +443,8 @@ TEST_F(SSLIdentityTest, SSLCertificateGetStatsRSA) { } TEST_F(SSLIdentityTest, SSLCertificateGetStatsECDSA) { - std::unique_ptr identity( - SSLIdentity::FromPEMStrings(kECDSA_PRIVATE_KEY_PEM, kECDSA_CERT_PEM)); + std::unique_ptr identity(SSLIdentity::CreateFromPEMStrings( + kECDSA_PRIVATE_KEY_PEM, kECDSA_CERT_PEM)); std::unique_ptr stats = identity->certificate().GetStats(); EXPECT_EQ(stats->fingerprint, kECDSA_FINGERPRINT); @@ -580,14 +580,13 @@ class SSLIdentityExpirationTest : public ::testing::Test { time_t lifetime = rtc::CreateRandomId() % (0x80000000 - time_before_generation); rtc::KeyParams key_params = rtc::KeyParams::ECDSA(rtc::EC_NIST_P256); - SSLIdentity* identity = - rtc::SSLIdentity::GenerateWithExpiration("", key_params, lifetime); + auto identity = + rtc::SSLIdentity::Create("", key_params, lifetime); time_t time_after_generation = time(nullptr); EXPECT_LE(time_before_generation + lifetime, identity->certificate().CertificateExpirationTime()); EXPECT_GE(time_after_generation + lifetime, identity->certificate().CertificateExpirationTime()); - delete identity; } } }; diff --git a/rtc_base/ssl_roots.h b/rtc_base/ssl_roots.h index 69f6e396f6..8f869f4a9e 100644 --- a/rtc_base/ssl_roots.h +++ b/rtc_base/ssl_roots.h @@ -15,74 +15,251 @@ // Google. // It was generated with the following command line: -// > python tools/sslroots/generate_sslroots.py -// https://pki.google.com/roots.pem +// > python tools_webrtc/sslroots/generate_sslroots.py +// https://pki.goog/roots.pem // clang-format off // Don't bother formatting generated code, // also it would breaks subject/issuer lines. -/* subject:/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */ -/* issuer :/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */ +/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */ +/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */ -const unsigned char GlobalSign_Root_CA_certificate[889]={ -0x30,0x82,0x03,0x75,0x30,0x82,0x02,0x5D,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, -0x00,0x00,0x00,0x00,0x01,0x15,0x4B,0x5A,0xC3,0x94,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x57,0x31,0x0B,0x30,0x09,0x06, -0x03,0x55,0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, -0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76, -0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F, -0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12, -0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20, -0x43,0x41,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x39,0x30,0x31,0x31,0x32,0x30,0x30, -0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,0x31,0x32,0x38,0x31,0x32,0x30,0x30,0x30, -0x30,0x5A,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x42, -0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62, -0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,0x2D,0x73,0x61,0x31,0x10,0x30,0x0E, -0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, -0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82, -0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0x0E,0xE6,0x99, -0x8D,0xCE,0xA3,0xE3,0x4F,0x8A,0x7E,0xFB,0xF1,0x8B,0x83,0x25,0x6B,0xEA,0x48,0x1F, -0xF1,0x2A,0xB0,0xB9,0x95,0x11,0x04,0xBD,0xF0,0x63,0xD1,0xE2,0x67,0x66,0xCF,0x1C, -0xDD,0xCF,0x1B,0x48,0x2B,0xEE,0x8D,0x89,0x8E,0x9A,0xAF,0x29,0x80,0x65,0xAB,0xE9, -0xC7,0x2D,0x12,0xCB,0xAB,0x1C,0x4C,0x70,0x07,0xA1,0x3D,0x0A,0x30,0xCD,0x15,0x8D, -0x4F,0xF8,0xDD,0xD4,0x8C,0x50,0x15,0x1C,0xEF,0x50,0xEE,0xC4,0x2E,0xF7,0xFC,0xE9, -0x52,0xF2,0x91,0x7D,0xE0,0x6D,0xD5,0x35,0x30,0x8E,0x5E,0x43,0x73,0xF2,0x41,0xE9, -0xD5,0x6A,0xE3,0xB2,0x89,0x3A,0x56,0x39,0x38,0x6F,0x06,0x3C,0x88,0x69,0x5B,0x2A, -0x4D,0xC5,0xA7,0x54,0xB8,0x6C,0x89,0xCC,0x9B,0xF9,0x3C,0xCA,0xE5,0xFD,0x89,0xF5, -0x12,0x3C,0x92,0x78,0x96,0xD6,0xDC,0x74,0x6E,0x93,0x44,0x61,0xD1,0x8D,0xC7,0x46, -0xB2,0x75,0x0E,0x86,0xE8,0x19,0x8A,0xD5,0x6D,0x6C,0xD5,0x78,0x16,0x95,0xA2,0xE9, -0xC8,0x0A,0x38,0xEB,0xF2,0x24,0x13,0x4F,0x73,0x54,0x93,0x13,0x85,0x3A,0x1B,0xBC, -0x1E,0x34,0xB5,0x8B,0x05,0x8C,0xB9,0x77,0x8B,0xB1,0xDB,0x1F,0x20,0x91,0xAB,0x09, -0x53,0x6E,0x90,0xCE,0x7B,0x37,0x74,0xB9,0x70,0x47,0x91,0x22,0x51,0x63,0x16,0x79, -0xAE,0xB1,0xAE,0x41,0x26,0x08,0xC8,0x19,0x2B,0xD1,0x46,0xAA,0x48,0xD6,0x64,0x2A, -0xD7,0x83,0x34,0xFF,0x2C,0x2A,0xC1,0x6C,0x19,0x43,0x4A,0x07,0x85,0xE7,0xD3,0x7C, -0xF6,0x21,0x68,0xEF,0xEA,0xF2,0x52,0x9F,0x7F,0x93,0x90,0xCF,0x02,0x03,0x01,0x00, -0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04, -0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, -0x14,0x60,0x7B,0x66,0x1A,0x45,0x0D,0x97,0xCA,0x89,0x50,0x2F,0x7D,0x04,0xCD,0x34, -0xA8,0xFF,0xFC,0xFD,0x4B,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xD6,0x73,0xE7,0x7C,0x4F,0x76,0xD0, -0x8D,0xBF,0xEC,0xBA,0xA2,0xBE,0x34,0xC5,0x28,0x32,0xB5,0x7C,0xFC,0x6C,0x9C,0x2C, -0x2B,0xBD,0x09,0x9E,0x53,0xBF,0x6B,0x5E,0xAA,0x11,0x48,0xB6,0xE5,0x08,0xA3,0xB3, -0xCA,0x3D,0x61,0x4D,0xD3,0x46,0x09,0xB3,0x3E,0xC3,0xA0,0xE3,0x63,0x55,0x1B,0xF2, -0xBA,0xEF,0xAD,0x39,0xE1,0x43,0xB9,0x38,0xA3,0xE6,0x2F,0x8A,0x26,0x3B,0xEF,0xA0, -0x50,0x56,0xF9,0xC6,0x0A,0xFD,0x38,0xCD,0xC4,0x0B,0x70,0x51,0x94,0x97,0x98,0x04, -0xDF,0xC3,0x5F,0x94,0xD5,0x15,0xC9,0x14,0x41,0x9C,0xC4,0x5D,0x75,0x64,0x15,0x0D, -0xFF,0x55,0x30,0xEC,0x86,0x8F,0xFF,0x0D,0xEF,0x2C,0xB9,0x63,0x46,0xF6,0xAA,0xFC, -0xDF,0xBC,0x69,0xFD,0x2E,0x12,0x48,0x64,0x9A,0xE0,0x95,0xF0,0xA6,0xEF,0x29,0x8F, -0x01,0xB1,0x15,0xB5,0x0C,0x1D,0xA5,0xFE,0x69,0x2C,0x69,0x24,0x78,0x1E,0xB3,0xA7, -0x1C,0x71,0x62,0xEE,0xCA,0xC8,0x97,0xAC,0x17,0x5D,0x8A,0xC2,0xF8,0x47,0x86,0x6E, -0x2A,0xC4,0x56,0x31,0x95,0xD0,0x67,0x89,0x85,0x2B,0xF9,0x6C,0xA6,0x5D,0x46,0x9D, -0x0C,0xAA,0x82,0xE4,0x99,0x51,0xDD,0x70,0xB7,0xDB,0x56,0x3D,0x61,0xE4,0x6A,0xE1, -0x5C,0xD6,0xF6,0xFE,0x3D,0xDE,0x41,0xCC,0x07,0xAE,0x63,0x52,0xBF,0x53,0x53,0xF4, -0x2B,0xE9,0xC7,0xFD,0xB6,0xF7,0x82,0x5F,0x85,0xD2,0x41,0x18,0xDB,0x81,0xB3,0x04, -0x1C,0xC5,0x1F,0xA4,0x80,0x6F,0x15,0x20,0xC9,0xDE,0x0C,0x88,0x0A,0x1D,0xD6,0x66, -0x55,0xE2,0xFC,0x48,0xC9,0x29,0x26,0x69,0xE0, +const unsigned char Comodo_AAA_Services_root_certificate[1078]={ +0x30,0x82,0x04,0x32,0x30,0x82,0x03,0x1A,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, +0x7B,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, +0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, +0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, +0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, +0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43, +0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55, +0x04,0x03,0x0C,0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, +0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x1E,0x17,0x0D, +0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32, +0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B, +0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06, +0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61, +0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04, +0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03, +0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,0x41,0x20,0x4C, +0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x0C, +0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65, +0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06, +0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F, +0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBE,0x40,0x9D,0xF4,0x6E,0xE1, +0xEA,0x76,0x87,0x1C,0x4D,0x45,0x44,0x8E,0xBE,0x46,0xC8,0x83,0x06,0x9D,0xC1,0x2A, +0xFE,0x18,0x1F,0x8E,0xE4,0x02,0xFA,0xF3,0xAB,0x5D,0x50,0x8A,0x16,0x31,0x0B,0x9A, +0x06,0xD0,0xC5,0x70,0x22,0xCD,0x49,0x2D,0x54,0x63,0xCC,0xB6,0x6E,0x68,0x46,0x0B, +0x53,0xEA,0xCB,0x4C,0x24,0xC0,0xBC,0x72,0x4E,0xEA,0xF1,0x15,0xAE,0xF4,0x54,0x9A, +0x12,0x0A,0xC3,0x7A,0xB2,0x33,0x60,0xE2,0xDA,0x89,0x55,0xF3,0x22,0x58,0xF3,0xDE, +0xDC,0xCF,0xEF,0x83,0x86,0xA2,0x8C,0x94,0x4F,0x9F,0x68,0xF2,0x98,0x90,0x46,0x84, +0x27,0xC7,0x76,0xBF,0xE3,0xCC,0x35,0x2C,0x8B,0x5E,0x07,0x64,0x65,0x82,0xC0,0x48, +0xB0,0xA8,0x91,0xF9,0x61,0x9F,0x76,0x20,0x50,0xA8,0x91,0xC7,0x66,0xB5,0xEB,0x78, +0x62,0x03,0x56,0xF0,0x8A,0x1A,0x13,0xEA,0x31,0xA3,0x1E,0xA0,0x99,0xFD,0x38,0xF6, +0xF6,0x27,0x32,0x58,0x6F,0x07,0xF5,0x6B,0xB8,0xFB,0x14,0x2B,0xAF,0xB7,0xAA,0xCC, +0xD6,0x63,0x5F,0x73,0x8C,0xDA,0x05,0x99,0xA8,0x38,0xA8,0xCB,0x17,0x78,0x36,0x51, +0xAC,0xE9,0x9E,0xF4,0x78,0x3A,0x8D,0xCF,0x0F,0xD9,0x42,0xE2,0x98,0x0C,0xAB,0x2F, +0x9F,0x0E,0x01,0xDE,0xEF,0x9F,0x99,0x49,0xF1,0x2D,0xDF,0xAC,0x74,0x4D,0x1B,0x98, +0xB5,0x47,0xC5,0xE5,0x29,0xD1,0xF9,0x90,0x18,0xC7,0x62,0x9C,0xBE,0x83,0xC7,0x26, +0x7B,0x3E,0x8A,0x25,0xC7,0xC0,0xDD,0x9D,0xE6,0x35,0x68,0x10,0x20,0x9D,0x8F,0xD8, +0xDE,0xD2,0xC3,0x84,0x9C,0x0D,0x5E,0xE8,0x2F,0xC9,0x02,0x03,0x01,0x00,0x01,0xA3, +0x81,0xC0,0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14, +0xA0,0x11,0x0A,0x23,0x3E,0x96,0xF1,0x07,0xEC,0xE2,0xAF,0x29,0xEF,0x82,0xA5,0x7F, +0xD0,0x30,0xA4,0xB4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04, +0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05, +0x30,0x03,0x01,0x01,0xFF,0x30,0x7B,0x06,0x03,0x55,0x1D,0x1F,0x04,0x74,0x30,0x72, +0x30,0x38,0xA0,0x36,0xA0,0x34,0x86,0x32,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63, +0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F, +0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65, +0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x36,0xA0,0x34,0xA0,0x32, +0x86,0x30,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D, +0x6F,0x64,0x6F,0x2E,0x6E,0x65,0x74,0x2F,0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69, +0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63, +0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05, +0x00,0x03,0x82,0x01,0x01,0x00,0x08,0x56,0xFC,0x02,0xF0,0x9B,0xE8,0xFF,0xA4,0xFA, +0xD6,0x7B,0xC6,0x44,0x80,0xCE,0x4F,0xC4,0xC5,0xF6,0x00,0x58,0xCC,0xA6,0xB6,0xBC, +0x14,0x49,0x68,0x04,0x76,0xE8,0xE6,0xEE,0x5D,0xEC,0x02,0x0F,0x60,0xD6,0x8D,0x50, +0x18,0x4F,0x26,0x4E,0x01,0xE3,0xE6,0xB0,0xA5,0xEE,0xBF,0xBC,0x74,0x54,0x41,0xBF, +0xFD,0xFC,0x12,0xB8,0xC7,0x4F,0x5A,0xF4,0x89,0x60,0x05,0x7F,0x60,0xB7,0x05,0x4A, +0xF3,0xF6,0xF1,0xC2,0xBF,0xC4,0xB9,0x74,0x86,0xB6,0x2D,0x7D,0x6B,0xCC,0xD2,0xF3, +0x46,0xDD,0x2F,0xC6,0xE0,0x6A,0xC3,0xC3,0x34,0x03,0x2C,0x7D,0x96,0xDD,0x5A,0xC2, +0x0E,0xA7,0x0A,0x99,0xC1,0x05,0x8B,0xAB,0x0C,0x2F,0xF3,0x5C,0x3A,0xCF,0x6C,0x37, +0x55,0x09,0x87,0xDE,0x53,0x40,0x6C,0x58,0xEF,0xFC,0xB6,0xAB,0x65,0x6E,0x04,0xF6, +0x1B,0xDC,0x3C,0xE0,0x5A,0x15,0xC6,0x9E,0xD9,0xF1,0x59,0x48,0x30,0x21,0x65,0x03, +0x6C,0xEC,0xE9,0x21,0x73,0xEC,0x9B,0x03,0xA1,0xE0,0x37,0xAD,0xA0,0x15,0x18,0x8F, +0xFA,0xBA,0x02,0xCE,0xA7,0x2C,0xA9,0x10,0x13,0x2C,0xD4,0xE5,0x08,0x26,0xAB,0x22, +0x97,0x60,0xF8,0x90,0x5E,0x74,0xD4,0xA2,0x9A,0x53,0xBD,0xF2,0xA9,0x68,0xE0,0xA2, +0x6E,0xC2,0xD7,0x6C,0xB1,0xA3,0x0F,0x9E,0xBF,0xEB,0x68,0xE7,0x56,0xF2,0xAE,0xF2, +0xE3,0x2B,0x38,0x3A,0x09,0x81,0xB5,0x6B,0x85,0xD7,0xBE,0x2D,0xED,0x3F,0x1A,0xB7, +0xB2,0x63,0xE2,0xF5,0x62,0x2C,0x82,0xD4,0x6A,0x00,0x41,0x50,0xF1,0x39,0x83,0x9F, +0x95,0xE9,0x36,0x96,0x98,0x6E, +}; + + +/* subject:/OU=GlobalSign Root CA - R6/O=GlobalSign/CN=GlobalSign */ +/* issuer :/OU=GlobalSign Root CA - R6/O=GlobalSign/CN=GlobalSign */ + + +const unsigned char GlobalSign_Root_CA___R6_certificate[1415]={ +0x30,0x82,0x05,0x83,0x30,0x82,0x03,0x6B,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x45, +0xE6,0xBB,0x03,0x83,0x33,0xC3,0x85,0x65,0x48,0xE6,0xFF,0x45,0x51,0x30,0x0D,0x06, +0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x4C,0x31,0x20, +0x30,0x1E,0x06,0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, +0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x36, +0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61, +0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A, +0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x31,0x34, +0x31,0x32,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x34,0x31, +0x32,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E, +0x06,0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67, +0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x36,0x31,0x13, +0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, +0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C, +0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09, +0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00, +0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0x95,0x07,0xE8,0x73,0xCA,0x66,0xF9, +0xEC,0x14,0xCA,0x7B,0x3C,0xF7,0x0D,0x08,0xF1,0xB4,0x45,0x0B,0x2C,0x82,0xB4,0x48, +0xC6,0xEB,0x5B,0x3C,0xAE,0x83,0xB8,0x41,0x92,0x33,0x14,0xA4,0x6F,0x7F,0xE9,0x2A, +0xCC,0xC6,0xB0,0x88,0x6B,0xC5,0xB6,0x89,0xD1,0xC6,0xB2,0xFF,0x14,0xCE,0x51,0x14, +0x21,0xEC,0x4A,0xDD,0x1B,0x5A,0xC6,0xD6,0x87,0xEE,0x4D,0x3A,0x15,0x06,0xED,0x64, +0x66,0x0B,0x92,0x80,0xCA,0x44,0xDE,0x73,0x94,0x4E,0xF3,0xA7,0x89,0x7F,0x4F,0x78, +0x63,0x08,0xC8,0x12,0x50,0x6D,0x42,0x66,0x2F,0x4D,0xB9,0x79,0x28,0x4D,0x52,0x1A, +0x8A,0x1A,0x80,0xB7,0x19,0x81,0x0E,0x7E,0xC4,0x8A,0xBC,0x64,0x4C,0x21,0x1C,0x43, +0x68,0xD7,0x3D,0x3C,0x8A,0xC5,0xB2,0x66,0xD5,0x90,0x9A,0xB7,0x31,0x06,0xC5,0xBE, +0xE2,0x6D,0x32,0x06,0xA6,0x1E,0xF9,0xB9,0xEB,0xAA,0xA3,0xB8,0xBF,0xBE,0x82,0x63, +0x50,0xD0,0xF0,0x18,0x89,0xDF,0xE4,0x0F,0x79,0xF5,0xEA,0xA2,0x1F,0x2A,0xD2,0x70, +0x2E,0x7B,0xE7,0xBC,0x93,0xBB,0x6D,0x53,0xE2,0x48,0x7C,0x8C,0x10,0x07,0x38,0xFF, +0x66,0xB2,0x77,0x61,0x7E,0xE0,0xEA,0x8C,0x3C,0xAA,0xB4,0xA4,0xF6,0xF3,0x95,0x4A, +0x12,0x07,0x6D,0xFD,0x8C,0xB2,0x89,0xCF,0xD0,0xA0,0x61,0x77,0xC8,0x58,0x74,0xB0, +0xD4,0x23,0x3A,0xF7,0x5D,0x3A,0xCA,0xA2,0xDB,0x9D,0x09,0xDE,0x5D,0x44,0x2D,0x90, +0xF1,0x81,0xCD,0x57,0x92,0xFA,0x7E,0xBC,0x50,0x04,0x63,0x34,0xDF,0x6B,0x93,0x18, +0xBE,0x6B,0x36,0xB2,0x39,0xE4,0xAC,0x24,0x36,0xB7,0xF0,0xEF,0xB6,0x1C,0x13,0x57, +0x93,0xB6,0xDE,0xB2,0xF8,0xE2,0x85,0xB7,0x73,0xA2,0xB8,0x35,0xAA,0x45,0xF2,0xE0, +0x9D,0x36,0xA1,0x6F,0x54,0x8A,0xF1,0x72,0x56,0x6E,0x2E,0x88,0xC5,0x51,0x42,0x44, +0x15,0x94,0xEE,0xA3,0xC5,0x38,0x96,0x9B,0x4E,0x4E,0x5A,0x0B,0x47,0xF3,0x06,0x36, +0x49,0x77,0x30,0xBC,0x71,0x37,0xE5,0xA6,0xEC,0x21,0x08,0x75,0xFC,0xE6,0x61,0x16, +0x3F,0x77,0xD5,0xD9,0x91,0x97,0x84,0x0A,0x6C,0xD4,0x02,0x4D,0x74,0xC0,0x14,0xED, +0xFD,0x39,0xFB,0x83,0xF2,0x5E,0x14,0xA1,0x04,0xB0,0x0B,0xE9,0xFE,0xEE,0x8F,0xE1, +0x6E,0x0B,0xB2,0x08,0xB3,0x61,0x66,0x09,0x6A,0xB1,0x06,0x3A,0x65,0x96,0x59,0xC0, +0xF0,0x35,0xFD,0xC9,0xDA,0x28,0x8D,0x1A,0x11,0x87,0x70,0x81,0x0A,0xA8,0x9A,0x75, +0x1D,0x9E,0x3A,0x86,0x05,0x00,0x9E,0xDB,0x80,0xD6,0x25,0xF9,0xDC,0x05,0x9E,0x27, +0x59,0x4C,0x76,0x39,0x5B,0xEA,0xF9,0xA5,0xA1,0xD8,0x83,0x0F,0xD1,0xFF,0xDF,0x30, +0x11,0xF9,0x85,0xCF,0x33,0x48,0xF5,0xCA,0x6D,0x64,0x14,0x2C,0x7A,0x58,0x4F,0xD3, +0x4B,0x08,0x49,0xC5,0x95,0x64,0x1A,0x63,0x0E,0x79,0x3D,0xF5,0xB3,0x8C,0xCA,0x58, +0xAD,0x9C,0x42,0x45,0x79,0x6E,0x0E,0x87,0x19,0x5C,0x54,0xB1,0x65,0xB6,0xBF,0x8C, +0x9B,0xDC,0x13,0xE9,0x0D,0x6F,0xB8,0x2E,0xDC,0x67,0x6E,0xC9,0x8B,0x11,0xB5,0x84, +0x14,0x8A,0x00,0x19,0x70,0x83,0x79,0x91,0x97,0x91,0xD4,0x1A,0x27,0xBF,0x37,0x1E, +0x32,0x07,0xD8,0x14,0x63,0x3C,0x28,0x4C,0xAF,0x02,0x03,0x01,0x00,0x01,0xA3,0x63, +0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02, +0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03, +0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xAE,0x6C, +0x05,0xA3,0x93,0x13,0xE2,0xA2,0xE7,0xE2,0xD7,0x1C,0xD6,0xC7,0xF0,0x7F,0xC8,0x67, +0x53,0xA0,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xAE, +0x6C,0x05,0xA3,0x93,0x13,0xE2,0xA2,0xE7,0xE2,0xD7,0x1C,0xD6,0xC7,0xF0,0x7F,0xC8, +0x67,0x53,0xA0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C, +0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x83,0x25,0xED,0xE8,0xD1,0xFD,0x95,0x52,0xCD, +0x9E,0xC0,0x04,0xA0,0x91,0x69,0xE6,0x5C,0xD0,0x84,0xDE,0xDC,0xAD,0xA2,0x4F,0xE8, +0x47,0x78,0xD6,0x65,0x98,0xA9,0x5B,0xA8,0x3C,0x87,0x7C,0x02,0x8A,0xD1,0x6E,0xB7, +0x16,0x73,0xE6,0x5F,0xC0,0x54,0x98,0xD5,0x74,0xBE,0xC1,0xCD,0xE2,0x11,0x91,0xAD, +0x23,0x18,0x3D,0xDD,0xE1,0x72,0x44,0x96,0xB4,0x95,0x5E,0xC0,0x7B,0x8E,0x99,0x78, +0x16,0x43,0x13,0x56,0x57,0xB3,0xA2,0xB3,0x3B,0xB5,0x77,0xDC,0x40,0x72,0xAC,0xA3, +0xEB,0x9B,0x35,0x3E,0xB1,0x08,0x21,0xA1,0xE7,0xC4,0x43,0x37,0x79,0x32,0xBE,0xB5, +0xE7,0x9C,0x2C,0x4C,0xBC,0x43,0x29,0x99,0x8E,0x30,0xD3,0xAC,0x21,0xE0,0xE3,0x1D, +0xFA,0xD8,0x07,0x33,0x76,0x54,0x00,0x22,0x2A,0xB9,0x4D,0x20,0x2E,0x70,0x68,0xDA, +0xE5,0x53,0xFC,0x83,0x5C,0xD3,0x9D,0xF2,0xFF,0x44,0x0C,0x44,0x66,0xF2,0xD2,0xE3, +0xBD,0x46,0x00,0x1A,0x6D,0x02,0xBA,0x25,0x5D,0x8D,0xA1,0x31,0x51,0xDD,0x54,0x46, +0x1C,0x4D,0xDB,0x99,0x96,0xEF,0x1A,0x1C,0x04,0x5C,0xA6,0x15,0xEF,0x78,0xE0,0x79, +0xFE,0x5D,0xDB,0x3E,0xAA,0x4C,0x55,0xFD,0x9A,0x15,0xA9,0x6F,0xE1,0xA6,0xFB,0xDF, +0x70,0x30,0xE9,0xC3,0xEE,0x42,0x46,0xED,0xC2,0x93,0x05,0x89,0xFA,0x7D,0x63,0x7B, +0x3F,0xD0,0x71,0x81,0x7C,0x00,0xE8,0x98,0xAE,0x0E,0x78,0x34,0xC3,0x25,0xFB,0xAF, +0x0A,0x9F,0x20,0x6B,0xDD,0x3B,0x13,0x8F,0x12,0x8C,0xE2,0x41,0x1A,0x48,0x7A,0x73, +0xA0,0x77,0x69,0xC7,0xB6,0x5C,0x7F,0x82,0xC8,0x1E,0xFE,0x58,0x1B,0x28,0x2B,0xA8, +0x6C,0xAD,0x5E,0x6D,0xC0,0x05,0xD2,0x7B,0xB7,0xEB,0x80,0xFE,0x25,0x37,0xFE,0x02, +0x9B,0x68,0xAC,0x42,0x5D,0xC3,0xEE,0xF5,0xCC,0xDC,0xF0,0x50,0x75,0xD2,0x36,0x69, +0x9C,0xE6,0x7B,0x04,0xDF,0x6E,0x06,0x69,0xB6,0xDE,0x0A,0x09,0x48,0x59,0x87,0xEB, +0x7B,0x14,0x60,0x7A,0x64,0xAA,0x69,0x43,0xEF,0x91,0xC7,0x4C,0xEC,0x18,0xDD,0x6C, +0xEF,0x53,0x2D,0x8C,0x99,0xE1,0x5E,0xF2,0x72,0x3E,0xCF,0x54,0xC8,0xBD,0x67,0xEC, +0xA4,0x0F,0x4C,0x45,0xFF,0xD3,0xB9,0x30,0x23,0x07,0x4C,0x8F,0x10,0xBF,0x86,0x96, +0xD9,0x99,0x5A,0xB4,0x99,0x57,0x1C,0xA4,0xCC,0xBB,0x15,0x89,0x53,0xBA,0x2C,0x05, +0x0F,0xE4,0xC4,0x9E,0x19,0xB1,0x18,0x34,0xD5,0x4C,0x9D,0xBA,0xED,0xF7,0x1F,0xAF, +0x24,0x95,0x04,0x78,0xA8,0x03,0xBB,0xEE,0x81,0xE5,0xDA,0x5F,0x7C,0x8B,0x4A,0xA1, +0x90,0x74,0x25,0xA7,0xB3,0x3E,0x4B,0xC8,0x2C,0x56,0xBD,0xC7,0xC8,0xEF,0x38,0xE2, +0x5C,0x92,0xF0,0x79,0xF7,0x9C,0x84,0xBA,0x74,0x2D,0x61,0x01,0x20,0x7E,0x7E,0xD1, +0xF2,0x4F,0x07,0x59,0x5F,0x8B,0x2D,0x43,0x52,0xEB,0x46,0x0C,0x94,0xE1,0xF5,0x66, +0x47,0x79,0x77,0xD5,0x54,0x5B,0x1F,0xAD,0x24,0x37,0xCB,0x45,0x5A,0x4E,0xA0,0x44, +0x48,0xC8,0xD8,0xB0,0x99,0xC5,0x15,0x84,0x09,0xF6,0xD6,0x49,0x49,0xC0,0x65,0xB8, +0xE6,0x1A,0x71,0x6E,0xA0,0xA8,0xF1,0x82,0xE8,0x45,0x3E,0x6C,0xD6,0x02,0xD7,0x0A, +0x67,0x83,0x05,0x5A,0xC9,0xA4,0x10, +}; + + +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */ + + +const unsigned char DigiCert_Global_Root_CA_certificate[947]={ +0x30,0x82,0x03,0xAF,0x30,0x82,0x02,0x97,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x08, +0x3B,0xE0,0x56,0x90,0x42,0x46,0xB1,0xA1,0x75,0x6A,0xC9,0x59,0x91,0xC7,0x4A,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x61, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43, +0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30, +0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30, +0x5A,0x30,0x61,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53, +0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43, +0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B, +0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63, +0x6F,0x6D,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67, +0x69,0x43,0x65,0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F, +0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, +0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, +0x02,0x82,0x01,0x01,0x00,0xE2,0x3B,0xE1,0x11,0x72,0xDE,0xA8,0xA4,0xD3,0xA3,0x57, +0xAA,0x50,0xA2,0x8F,0x0B,0x77,0x90,0xC9,0xA2,0xA5,0xEE,0x12,0xCE,0x96,0x5B,0x01, +0x09,0x20,0xCC,0x01,0x93,0xA7,0x4E,0x30,0xB7,0x53,0xF7,0x43,0xC4,0x69,0x00,0x57, +0x9D,0xE2,0x8D,0x22,0xDD,0x87,0x06,0x40,0x00,0x81,0x09,0xCE,0xCE,0x1B,0x83,0xBF, +0xDF,0xCD,0x3B,0x71,0x46,0xE2,0xD6,0x66,0xC7,0x05,0xB3,0x76,0x27,0x16,0x8F,0x7B, +0x9E,0x1E,0x95,0x7D,0xEE,0xB7,0x48,0xA3,0x08,0xDA,0xD6,0xAF,0x7A,0x0C,0x39,0x06, +0x65,0x7F,0x4A,0x5D,0x1F,0xBC,0x17,0xF8,0xAB,0xBE,0xEE,0x28,0xD7,0x74,0x7F,0x7A, +0x78,0x99,0x59,0x85,0x68,0x6E,0x5C,0x23,0x32,0x4B,0xBF,0x4E,0xC0,0xE8,0x5A,0x6D, +0xE3,0x70,0xBF,0x77,0x10,0xBF,0xFC,0x01,0xF6,0x85,0xD9,0xA8,0x44,0x10,0x58,0x32, +0xA9,0x75,0x18,0xD5,0xD1,0xA2,0xBE,0x47,0xE2,0x27,0x6A,0xF4,0x9A,0x33,0xF8,0x49, +0x08,0x60,0x8B,0xD4,0x5F,0xB4,0x3A,0x84,0xBF,0xA1,0xAA,0x4A,0x4C,0x7D,0x3E,0xCF, +0x4F,0x5F,0x6C,0x76,0x5E,0xA0,0x4B,0x37,0x91,0x9E,0xDC,0x22,0xE6,0x6D,0xCE,0x14, +0x1A,0x8E,0x6A,0xCB,0xFE,0xCD,0xB3,0x14,0x64,0x17,0xC7,0x5B,0x29,0x9E,0x32,0xBF, +0xF2,0xEE,0xFA,0xD3,0x0B,0x42,0xD4,0xAB,0xB7,0x41,0x32,0xDA,0x0C,0xD4,0xEF,0xF8, +0x81,0xD5,0xBB,0x8D,0x58,0x3F,0xB5,0x1B,0xE8,0x49,0x28,0xA2,0x70,0xDA,0x31,0x04, +0xDD,0xF7,0xB2,0x16,0xF2,0x4C,0x0A,0x4E,0x07,0xA8,0xED,0x4A,0x3D,0x5E,0xB5,0x7F, +0xA3,0x90,0xC3,0xAF,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E, +0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F, +0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, +0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x03,0xDE,0x50,0x35,0x56,0xD1, +0x4C,0xBB,0x66,0xF0,0xA3,0xE2,0x1B,0x1B,0xC3,0x97,0xB2,0x3D,0xD1,0x55,0x30,0x1F, +0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x03,0xDE,0x50,0x35,0x56, +0xD1,0x4C,0xBB,0x66,0xF0,0xA3,0xE2,0x1B,0x1B,0xC3,0x97,0xB2,0x3D,0xD1,0x55,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82, +0x01,0x01,0x00,0xCB,0x9C,0x37,0xAA,0x48,0x13,0x12,0x0A,0xFA,0xDD,0x44,0x9C,0x4F, +0x52,0xB0,0xF4,0xDF,0xAE,0x04,0xF5,0x79,0x79,0x08,0xA3,0x24,0x18,0xFC,0x4B,0x2B, +0x84,0xC0,0x2D,0xB9,0xD5,0xC7,0xFE,0xF4,0xC1,0x1F,0x58,0xCB,0xB8,0x6D,0x9C,0x7A, +0x74,0xE7,0x98,0x29,0xAB,0x11,0xB5,0xE3,0x70,0xA0,0xA1,0xCD,0x4C,0x88,0x99,0x93, +0x8C,0x91,0x70,0xE2,0xAB,0x0F,0x1C,0xBE,0x93,0xA9,0xFF,0x63,0xD5,0xE4,0x07,0x60, +0xD3,0xA3,0xBF,0x9D,0x5B,0x09,0xF1,0xD5,0x8E,0xE3,0x53,0xF4,0x8E,0x63,0xFA,0x3F, +0xA7,0xDB,0xB4,0x66,0xDF,0x62,0x66,0xD6,0xD1,0x6E,0x41,0x8D,0xF2,0x2D,0xB5,0xEA, +0x77,0x4A,0x9F,0x9D,0x58,0xE2,0x2B,0x59,0xC0,0x40,0x23,0xED,0x2D,0x28,0x82,0x45, +0x3E,0x79,0x54,0x92,0x26,0x98,0xE0,0x80,0x48,0xA8,0x37,0xEF,0xF0,0xD6,0x79,0x60, +0x16,0xDE,0xAC,0xE8,0x0E,0xCD,0x6E,0xAC,0x44,0x17,0x38,0x2F,0x49,0xDA,0xE1,0x45, +0x3E,0x2A,0xB9,0x36,0x53,0xCF,0x3A,0x50,0x06,0xF7,0x2E,0xE8,0xC4,0x57,0x49,0x6C, +0x61,0x21,0x18,0xD5,0x04,0xAD,0x78,0x3C,0x2C,0x3A,0x80,0x6B,0xA7,0xEB,0xAF,0x15, +0x14,0xE9,0xD8,0x89,0xC1,0xB9,0x38,0x6C,0xE2,0x91,0x6C,0x8A,0xFF,0x64,0xB9,0x77, +0x25,0x57,0x30,0xC0,0x1B,0x24,0xA3,0xE1,0xDC,0xE9,0xDF,0x47,0x7C,0xB5,0xB4,0x24, +0x08,0x05,0x30,0xEC,0x2D,0xBD,0x0B,0xBF,0x45,0xBF,0x50,0xB9,0xA9,0xF3,0xEB,0x98, +0x01,0x12,0xAD,0xC8,0x88,0xC6,0x98,0x34,0x5F,0x8D,0x0A,0x3C,0xC6,0xE9,0xD5,0x95, +0x95,0x6D,0xDE, }; @@ -189,271 +366,408 @@ const unsigned char USERTrust_RSA_Certification_Authority_certificate[1506]={ }; -/* subject:/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */ -/* issuer :/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */ +/* subject:/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */ +/* issuer :/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */ -const unsigned char Starfield_Class_2_CA_certificate[1043]={ -0x30,0x82,0x04,0x0F,0x30,0x82,0x02,0xF7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25, -0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65, -0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C, -0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29, -0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20, -0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20, -0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30, -0x36,0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36, -0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06, -0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04, -0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63, -0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31, -0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69, -0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72, -0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,0x00,0x30,0x82,0x01,0x08,0x02, -0x82,0x01,0x01,0x00,0xB7,0x32,0xC8,0xFE,0xE9,0x71,0xA6,0x04,0x85,0xAD,0x0C,0x11, -0x64,0xDF,0xCE,0x4D,0xEF,0xC8,0x03,0x18,0x87,0x3F,0xA1,0xAB,0xFB,0x3C,0xA6,0x9F, -0xF0,0xC3,0xA1,0xDA,0xD4,0xD8,0x6E,0x2B,0x53,0x90,0xFB,0x24,0xA4,0x3E,0x84,0xF0, -0x9E,0xE8,0x5F,0xEC,0xE5,0x27,0x44,0xF5,0x28,0xA6,0x3F,0x7B,0xDE,0xE0,0x2A,0xF0, -0xC8,0xAF,0x53,0x2F,0x9E,0xCA,0x05,0x01,0x93,0x1E,0x8F,0x66,0x1C,0x39,0xA7,0x4D, -0xFA,0x5A,0xB6,0x73,0x04,0x25,0x66,0xEB,0x77,0x7F,0xE7,0x59,0xC6,0x4A,0x99,0x25, -0x14,0x54,0xEB,0x26,0xC7,0xF3,0x7F,0x19,0xD5,0x30,0x70,0x8F,0xAF,0xB0,0x46,0x2A, -0xFF,0xAD,0xEB,0x29,0xED,0xD7,0x9F,0xAA,0x04,0x87,0xA3,0xD4,0xF9,0x89,0xA5,0x34, -0x5F,0xDB,0x43,0x91,0x82,0x36,0xD9,0x66,0x3C,0xB1,0xB8,0xB9,0x82,0xFD,0x9C,0x3A, -0x3E,0x10,0xC8,0x3B,0xEF,0x06,0x65,0x66,0x7A,0x9B,0x19,0x18,0x3D,0xFF,0x71,0x51, -0x3C,0x30,0x2E,0x5F,0xBE,0x3D,0x77,0x73,0xB2,0x5D,0x06,0x6C,0xC3,0x23,0x56,0x9A, -0x2B,0x85,0x26,0x92,0x1C,0xA7,0x02,0xB3,0xE4,0x3F,0x0D,0xAF,0x08,0x79,0x82,0xB8, -0x36,0x3D,0xEA,0x9C,0xD3,0x35,0xB3,0xBC,0x69,0xCA,0xF5,0xCC,0x9D,0xE8,0xFD,0x64, -0x8D,0x17,0x80,0x33,0x6E,0x5E,0x4A,0x5D,0x99,0xC9,0x1E,0x87,0xB4,0x9D,0x1A,0xC0, -0xD5,0x6E,0x13,0x35,0x23,0x5E,0xDF,0x9B,0x5F,0x3D,0xEF,0xD6,0xF7,0x76,0xC2,0xEA, -0x3E,0xBB,0x78,0x0D,0x1C,0x42,0x67,0x6B,0x04,0xD8,0xF8,0xD6,0xDA,0x6F,0x8B,0xF2, -0x44,0xA0,0x01,0xAB,0x02,0x01,0x03,0xA3,0x81,0xC5,0x30,0x81,0xC2,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBF,0x5F,0xB7,0xD1,0xCE,0xDD,0x1F,0x86, -0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,0x30,0x81,0x92,0x06, -0x03,0x55,0x1D,0x23,0x04,0x81,0x8A,0x30,0x81,0x87,0x80,0x14,0xBF,0x5F,0xB7,0xD1, -0xCE,0xDD,0x1F,0x86,0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7, -0xA1,0x6C,0xA4,0x6A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, -0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74, -0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F, -0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03, -0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43, -0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, -0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,0x01, -0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82, -0x01,0x01,0x00,0x05,0x9D,0x3F,0x88,0x9D,0xD1,0xC9,0x1A,0x55,0xA1,0xAC,0x69,0xF3, -0xF3,0x59,0xDA,0x9B,0x01,0x87,0x1A,0x4F,0x57,0xA9,0xA1,0x79,0x09,0x2A,0xDB,0xF7, -0x2F,0xB2,0x1E,0xCC,0xC7,0x5E,0x6A,0xD8,0x83,0x87,0xA1,0x97,0xEF,0x49,0x35,0x3E, -0x77,0x06,0x41,0x58,0x62,0xBF,0x8E,0x58,0xB8,0x0A,0x67,0x3F,0xEC,0xB3,0xDD,0x21, -0x66,0x1F,0xC9,0x54,0xFA,0x72,0xCC,0x3D,0x4C,0x40,0xD8,0x81,0xAF,0x77,0x9E,0x83, -0x7A,0xBB,0xA2,0xC7,0xF5,0x34,0x17,0x8E,0xD9,0x11,0x40,0xF4,0xFC,0x2C,0x2A,0x4D, -0x15,0x7F,0xA7,0x62,0x5D,0x2E,0x25,0xD3,0x00,0x0B,0x20,0x1A,0x1D,0x68,0xF9,0x17, -0xB8,0xF4,0xBD,0x8B,0xED,0x28,0x59,0xDD,0x4D,0x16,0x8B,0x17,0x83,0xC8,0xB2,0x65, -0xC7,0x2D,0x7A,0xA5,0xAA,0xBC,0x53,0x86,0x6D,0xDD,0x57,0xA4,0xCA,0xF8,0x20,0x41, -0x0B,0x68,0xF0,0xF4,0xFB,0x74,0xBE,0x56,0x5D,0x7A,0x79,0xF5,0xF9,0x1D,0x85,0xE3, -0x2D,0x95,0xBE,0xF5,0x71,0x90,0x43,0xCC,0x8D,0x1F,0x9A,0x00,0x0A,0x87,0x29,0xE9, -0x55,0x22,0x58,0x00,0x23,0xEA,0xE3,0x12,0x43,0x29,0x5B,0x47,0x08,0xDD,0x8C,0x41, -0x6A,0x65,0x06,0xA8,0xE5,0x21,0xAA,0x41,0xB4,0x95,0x21,0x95,0xB9,0x7D,0xD1,0x34, -0xAB,0x13,0xD6,0xAD,0xBC,0xDC,0xE2,0x3D,0x39,0xCD,0xBD,0x3E,0x75,0x70,0xA1,0x18, -0x59,0x03,0xC9,0x22,0xB4,0x8F,0x9C,0xD5,0x5E,0x2A,0xD7,0xA5,0xB6,0xD4,0x0A,0x6D, -0xF8,0xB7,0x40,0x11,0x46,0x9A,0x1F,0x79,0x0E,0x62,0xBF,0x0F,0x97,0xEC,0xE0,0x2F, -0x1F,0x17,0x94, -}; - - -/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */ -/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G3 */ - - -const unsigned char Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate[1054]={ -0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0x9B,0x7E,0x06,0x49,0xA3, -0x3E,0x62,0xB9,0xD5,0xEE,0x90,0x48,0x71,0x29,0xEF,0x57,0x30,0x0D,0x06,0x09,0x2A, -0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30, -0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03, -0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49, -0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65, -0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74, -0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28, -0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E, -0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74, -0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79, -0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53, -0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C, -0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30, -0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36, -0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03, -0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A, -0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E, -0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53, -0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72, -0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20, -0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49, -0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72, -0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30, -0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E, -0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20, -0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20, -0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, -0x02,0x82,0x01,0x01,0x00,0xCB,0xBA,0x9C,0x52,0xFC,0x78,0x1F,0x1A,0x1E,0x6F,0x1B, -0x37,0x73,0xBD,0xF8,0xC9,0x6B,0x94,0x12,0x30,0x4F,0xF0,0x36,0x47,0xF5,0xD0,0x91, -0x0A,0xF5,0x17,0xC8,0xA5,0x61,0xC1,0x16,0x40,0x4D,0xFB,0x8A,0x61,0x90,0xE5,0x76, -0x20,0xC1,0x11,0x06,0x7D,0xAB,0x2C,0x6E,0xA6,0xF5,0x11,0x41,0x8E,0xFA,0x2D,0xAD, -0x2A,0x61,0x59,0xA4,0x67,0x26,0x4C,0xD0,0xE8,0xBC,0x52,0x5B,0x70,0x20,0x04,0x58, -0xD1,0x7A,0xC9,0xA4,0x69,0xBC,0x83,0x17,0x64,0xAD,0x05,0x8B,0xBC,0xD0,0x58,0xCE, -0x8D,0x8C,0xF5,0xEB,0xF0,0x42,0x49,0x0B,0x9D,0x97,0x27,0x67,0x32,0x6E,0xE1,0xAE, -0x93,0x15,0x1C,0x70,0xBC,0x20,0x4D,0x2F,0x18,0xDE,0x92,0x88,0xE8,0x6C,0x85,0x57, -0x11,0x1A,0xE9,0x7E,0xE3,0x26,0x11,0x54,0xA2,0x45,0x96,0x55,0x83,0xCA,0x30,0x89, -0xE8,0xDC,0xD8,0xA3,0xED,0x2A,0x80,0x3F,0x7F,0x79,0x65,0x57,0x3E,0x15,0x20,0x66, -0x08,0x2F,0x95,0x93,0xBF,0xAA,0x47,0x2F,0xA8,0x46,0x97,0xF0,0x12,0xE2,0xFE,0xC2, -0x0A,0x2B,0x51,0xE6,0x76,0xE6,0xB7,0x46,0xB7,0xE2,0x0D,0xA6,0xCC,0xA8,0xC3,0x4C, -0x59,0x55,0x89,0xE6,0xE8,0x53,0x5C,0x1C,0xEA,0x9D,0xF0,0x62,0x16,0x0B,0xA7,0xC9, -0x5F,0x0C,0xF0,0xDE,0xC2,0x76,0xCE,0xAF,0xF7,0x6A,0xF2,0xFA,0x41,0xA6,0xA2,0x33, -0x14,0xC9,0xE5,0x7A,0x63,0xD3,0x9E,0x62,0x37,0xD5,0x85,0x65,0x9E,0x0E,0xE6,0x53, -0x24,0x74,0x1B,0x5E,0x1D,0x12,0x53,0x5B,0xC7,0x2C,0xE7,0x83,0x49,0x3B,0x15,0xAE, -0x8A,0x68,0xB9,0x57,0x97,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x11,0x14, -0x96,0xC1,0xAB,0x92,0x08,0xF7,0x3F,0x2F,0xC9,0xB2,0xFE,0xE4,0x5A,0x9F,0x64,0xDE, -0xDB,0x21,0x4F,0x86,0x99,0x34,0x76,0x36,0x57,0xDD,0xD0,0x15,0x2F,0xC5,0xAD,0x7F, -0x15,0x1F,0x37,0x62,0x73,0x3E,0xD4,0xE7,0x5F,0xCE,0x17,0x03,0xDB,0x35,0xFA,0x2B, -0xDB,0xAE,0x60,0x09,0x5F,0x1E,0x5F,0x8F,0x6E,0xBB,0x0B,0x3D,0xEA,0x5A,0x13,0x1E, -0x0C,0x60,0x6F,0xB5,0xC0,0xB5,0x23,0x22,0x2E,0x07,0x0B,0xCB,0xA9,0x74,0xCB,0x47, -0xBB,0x1D,0xC1,0xD7,0xA5,0x6B,0xCC,0x2F,0xD2,0x42,0xFD,0x49,0xDD,0xA7,0x89,0xCF, -0x53,0xBA,0xDA,0x00,0x5A,0x28,0xBF,0x82,0xDF,0xF8,0xBA,0x13,0x1D,0x50,0x86,0x82, -0xFD,0x8E,0x30,0x8F,0x29,0x46,0xB0,0x1E,0x3D,0x35,0xDA,0x38,0x62,0x16,0x18,0x4A, -0xAD,0xE6,0xB6,0x51,0x6C,0xDE,0xAF,0x62,0xEB,0x01,0xD0,0x1E,0x24,0xFE,0x7A,0x8F, -0x12,0x1A,0x12,0x68,0xB8,0xFB,0x66,0x99,0x14,0x14,0x45,0x5C,0xAE,0xE7,0xAE,0x69, -0x17,0x81,0x2B,0x5A,0x37,0xC9,0x5E,0x2A,0xF4,0xC6,0xE2,0xA1,0x5C,0x54,0x9B,0xA6, -0x54,0x00,0xCF,0xF0,0xF1,0xC1,0xC7,0x98,0x30,0x1A,0x3B,0x36,0x16,0xDB,0xA3,0x6E, -0xEA,0xFD,0xAD,0xB2,0xC2,0xDA,0xEF,0x02,0x47,0x13,0x8A,0xC0,0xF1,0xB3,0x31,0xAD, -0x4F,0x1C,0xE1,0x4F,0x9C,0xAF,0x0F,0x0C,0x9D,0xF7,0x78,0x0D,0xD8,0xF4,0x35,0x56, -0x80,0xDA,0xB7,0x6D,0x17,0x8F,0x9D,0x1E,0x81,0x64,0xE1,0xFE,0xC5,0x45,0xBA,0xAD, -0x6B,0xB9,0x0A,0x7A,0x4E,0x4F,0x4B,0x84,0xEE,0x4B,0xF1,0x7D,0xDD,0x11, -}; - - -/* subject:/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */ -/* issuer :/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */ - - -const unsigned char USERTrust_ECC_Certification_Authority_certificate[659]={ -0x30,0x82,0x02,0x8F,0x30,0x82,0x02,0x15,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x5C, -0x8B,0x99,0xC5,0x5A,0x94,0xC5,0xD2,0x71,0x56,0xDE,0xCD,0x89,0x80,0xCC,0x26,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x88,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06, -0x03,0x55,0x04,0x08,0x13,0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79, -0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65, -0x79,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13, -0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E, -0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13, -0x25,0x55,0x53,0x45,0x52,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x43,0x43,0x20,0x43, -0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, -0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x32,0x30,0x31, -0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32, -0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x88,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, -0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x08,0x13, -0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79,0x31,0x14,0x30,0x12,0x06, -0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65,0x79,0x20,0x43,0x69,0x74, -0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20, -0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72, -0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13,0x25,0x55,0x53,0x45,0x52, -0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66, -0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74, -0x79,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, -0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x1A,0xAC,0x54,0x5A,0xA9,0xF9,0x68, -0x23,0xE7,0x7A,0xD5,0x24,0x6F,0x53,0xC6,0x5A,0xD8,0x4B,0xAB,0xC6,0xD5,0xB6,0xD1, -0xE6,0x73,0x71,0xAE,0xDD,0x9C,0xD6,0x0C,0x61,0xFD,0xDB,0xA0,0x89,0x03,0xB8,0x05, -0x14,0xEC,0x57,0xCE,0xEE,0x5D,0x3F,0xE2,0x21,0xB3,0xCE,0xF7,0xD4,0x8A,0x79,0xE0, -0xA3,0x83,0x7E,0x2D,0x97,0xD0,0x61,0xC4,0xF1,0x99,0xDC,0x25,0x91,0x63,0xAB,0x7F, -0x30,0xA3,0xB4,0x70,0xE2,0xC7,0xA1,0x33,0x9C,0xF3,0xBF,0x2E,0x5C,0x53,0xB1,0x5F, -0xB3,0x7D,0x32,0x7F,0x8A,0x34,0xE3,0x79,0x79,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x3A,0xE1,0x09,0x86,0xD4,0xCF,0x19,0xC2, -0x96,0x76,0x74,0x49,0x76,0xDC,0xE0,0x35,0xC6,0x63,0x63,0x9A,0x30,0x0E,0x06,0x03, -0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03, -0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06, -0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x30, -0x36,0x67,0xA1,0x16,0x08,0xDC,0xE4,0x97,0x00,0x41,0x1D,0x4E,0xBE,0xE1,0x63,0x01, -0xCF,0x3B,0xAA,0x42,0x11,0x64,0xA0,0x9D,0x94,0x39,0x02,0x11,0x79,0x5C,0x7B,0x1D, -0xFA,0x64,0xB9,0xEE,0x16,0x42,0xB3,0xBF,0x8A,0xC2,0x09,0xC4,0xEC,0xE4,0xB1,0x4D, -0x02,0x31,0x00,0xE9,0x2A,0x61,0x47,0x8C,0x52,0x4A,0x4B,0x4E,0x18,0x70,0xF6,0xD6, -0x44,0xD6,0x6E,0xF5,0x83,0xBA,0x6D,0x58,0xBD,0x24,0xD9,0x56,0x48,0xEA,0xEF,0xC4, -0xA2,0x46,0x81,0x88,0x6A,0x3A,0x46,0xD1,0xA9,0x9B,0x4D,0xC9,0x61,0xDA,0xD1,0x5D, -0x57,0x6A,0x18, +const unsigned char GlobalSign_Root_CA___R3_certificate[867]={ +0x30,0x82,0x03,0x5F,0x30,0x82,0x02,0x47,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, +0x00,0x00,0x00,0x00,0x01,0x21,0x58,0x53,0x08,0xA2,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06, +0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, +0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30, +0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69, +0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F, +0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x33,0x31, +0x38,0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x31,0x38, +0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55, +0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52, +0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,0x11,0x06, +0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, +0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61, +0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48, +0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01, +0x0A,0x02,0x82,0x01,0x01,0x00,0xCC,0x25,0x76,0x90,0x79,0x06,0x78,0x22,0x16,0xF5, +0xC0,0x83,0xB6,0x84,0xCA,0x28,0x9E,0xFD,0x05,0x76,0x11,0xC5,0xAD,0x88,0x72,0xFC, +0x46,0x02,0x43,0xC7,0xB2,0x8A,0x9D,0x04,0x5F,0x24,0xCB,0x2E,0x4B,0xE1,0x60,0x82, +0x46,0xE1,0x52,0xAB,0x0C,0x81,0x47,0x70,0x6C,0xDD,0x64,0xD1,0xEB,0xF5,0x2C,0xA3, +0x0F,0x82,0x3D,0x0C,0x2B,0xAE,0x97,0xD7,0xB6,0x14,0x86,0x10,0x79,0xBB,0x3B,0x13, +0x80,0x77,0x8C,0x08,0xE1,0x49,0xD2,0x6A,0x62,0x2F,0x1F,0x5E,0xFA,0x96,0x68,0xDF, +0x89,0x27,0x95,0x38,0x9F,0x06,0xD7,0x3E,0xC9,0xCB,0x26,0x59,0x0D,0x73,0xDE,0xB0, +0xC8,0xE9,0x26,0x0E,0x83,0x15,0xC6,0xEF,0x5B,0x8B,0xD2,0x04,0x60,0xCA,0x49,0xA6, +0x28,0xF6,0x69,0x3B,0xF6,0xCB,0xC8,0x28,0x91,0xE5,0x9D,0x8A,0x61,0x57,0x37,0xAC, +0x74,0x14,0xDC,0x74,0xE0,0x3A,0xEE,0x72,0x2F,0x2E,0x9C,0xFB,0xD0,0xBB,0xBF,0xF5, +0x3D,0x00,0xE1,0x06,0x33,0xE8,0x82,0x2B,0xAE,0x53,0xA6,0x3A,0x16,0x73,0x8C,0xDD, +0x41,0x0E,0x20,0x3A,0xC0,0xB4,0xA7,0xA1,0xE9,0xB2,0x4F,0x90,0x2E,0x32,0x60,0xE9, +0x57,0xCB,0xB9,0x04,0x92,0x68,0x68,0xE5,0x38,0x26,0x60,0x75,0xB2,0x9F,0x77,0xFF, +0x91,0x14,0xEF,0xAE,0x20,0x49,0xFC,0xAD,0x40,0x15,0x48,0xD1,0x02,0x31,0x61,0x19, +0x5E,0xB8,0x97,0xEF,0xAD,0x77,0xB7,0x64,0x9A,0x7A,0xBF,0x5F,0xC1,0x13,0xEF,0x9B, +0x62,0xFB,0x0D,0x6C,0xE0,0x54,0x69,0x16,0xA9,0x03,0xDA,0x6E,0xE9,0x83,0x93,0x71, +0x76,0xC6,0x69,0x85,0x82,0x17,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30, +0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30, +0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, +0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x8F,0xF0,0x4B,0x7F,0xA8, +0x2E,0x45,0x24,0xAE,0x4D,0x50,0xFA,0x63,0x9A,0x8B,0xDE,0xE2,0xDD,0x1B,0xBC,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82, +0x01,0x01,0x00,0x4B,0x40,0xDB,0xC0,0x50,0xAA,0xFE,0xC8,0x0C,0xEF,0xF7,0x96,0x54, +0x45,0x49,0xBB,0x96,0x00,0x09,0x41,0xAC,0xB3,0x13,0x86,0x86,0x28,0x07,0x33,0xCA, +0x6B,0xE6,0x74,0xB9,0xBA,0x00,0x2D,0xAE,0xA4,0x0A,0xD3,0xF5,0xF1,0xF1,0x0F,0x8A, +0xBF,0x73,0x67,0x4A,0x83,0xC7,0x44,0x7B,0x78,0xE0,0xAF,0x6E,0x6C,0x6F,0x03,0x29, +0x8E,0x33,0x39,0x45,0xC3,0x8E,0xE4,0xB9,0x57,0x6C,0xAA,0xFC,0x12,0x96,0xEC,0x53, +0xC6,0x2D,0xE4,0x24,0x6C,0xB9,0x94,0x63,0xFB,0xDC,0x53,0x68,0x67,0x56,0x3E,0x83, +0xB8,0xCF,0x35,0x21,0xC3,0xC9,0x68,0xFE,0xCE,0xDA,0xC2,0x53,0xAA,0xCC,0x90,0x8A, +0xE9,0xF0,0x5D,0x46,0x8C,0x95,0xDD,0x7A,0x58,0x28,0x1A,0x2F,0x1D,0xDE,0xCD,0x00, +0x37,0x41,0x8F,0xED,0x44,0x6D,0xD7,0x53,0x28,0x97,0x7E,0xF3,0x67,0x04,0x1E,0x15, +0xD7,0x8A,0x96,0xB4,0xD3,0xDE,0x4C,0x27,0xA4,0x4C,0x1B,0x73,0x73,0x76,0xF4,0x17, +0x99,0xC2,0x1F,0x7A,0x0E,0xE3,0x2D,0x08,0xAD,0x0A,0x1C,0x2C,0xFF,0x3C,0xAB,0x55, +0x0E,0x0F,0x91,0x7E,0x36,0xEB,0xC3,0x57,0x49,0xBE,0xE1,0x2E,0x2D,0x7C,0x60,0x8B, +0xC3,0x41,0x51,0x13,0x23,0x9D,0xCE,0xF7,0x32,0x6B,0x94,0x01,0xA8,0x99,0xE7,0x2C, +0x33,0x1F,0x3A,0x3B,0x25,0xD2,0x86,0x40,0xCE,0x3B,0x2C,0x86,0x78,0xC9,0x61,0x2F, +0x14,0xBA,0xEE,0xDB,0x55,0x6F,0xDF,0x84,0xEE,0x05,0x09,0x4D,0xBD,0x28,0xD8,0x72, +0xCE,0xD3,0x62,0x50,0x65,0x1E,0xEB,0x92,0x97,0x83,0x31,0xD9,0xB3,0xB5,0xCA,0x47, +0x58,0x3F,0x5F, }; -/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */ -/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */ +/* subject:/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */ +/* issuer :/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */ -const unsigned char GeoTrust_Global_CA_certificate[856]={ -0x30,0x82,0x03,0x54,0x30,0x82,0x02,0x3C,0xA0,0x03,0x02,0x01,0x02,0x02,0x03,0x02, -0x34,0x56,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05, -0x00,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53, -0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72, -0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04, -0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62, -0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x31,0x30, -0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x32,0x30,0x35,0x32,0x31,0x30,0x34, -0x30,0x30,0x30,0x30,0x5A,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, -0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47, -0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19, -0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20, -0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F, -0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0xCC,0x18,0x63,0x30,0xFD, -0xF4,0x17,0x23,0x1A,0x56,0x7E,0x5B,0xDF,0x3C,0x6C,0x38,0xE4,0x71,0xB7,0x78,0x91, -0xD4,0xBC,0xA1,0xD8,0x4C,0xF8,0xA8,0x43,0xB6,0x03,0xE9,0x4D,0x21,0x07,0x08,0x88, -0xDA,0x58,0x2F,0x66,0x39,0x29,0xBD,0x05,0x78,0x8B,0x9D,0x38,0xE8,0x05,0xB7,0x6A, -0x7E,0x71,0xA4,0xE6,0xC4,0x60,0xA6,0xB0,0xEF,0x80,0xE4,0x89,0x28,0x0F,0x9E,0x25, -0xD6,0xED,0x83,0xF3,0xAD,0xA6,0x91,0xC7,0x98,0xC9,0x42,0x18,0x35,0x14,0x9D,0xAD, -0x98,0x46,0x92,0x2E,0x4F,0xCA,0xF1,0x87,0x43,0xC1,0x16,0x95,0x57,0x2D,0x50,0xEF, -0x89,0x2D,0x80,0x7A,0x57,0xAD,0xF2,0xEE,0x5F,0x6B,0xD2,0x00,0x8D,0xB9,0x14,0xF8, -0x14,0x15,0x35,0xD9,0xC0,0x46,0xA3,0x7B,0x72,0xC8,0x91,0xBF,0xC9,0x55,0x2B,0xCD, -0xD0,0x97,0x3E,0x9C,0x26,0x64,0xCC,0xDF,0xCE,0x83,0x19,0x71,0xCA,0x4E,0xE6,0xD4, -0xD5,0x7B,0xA9,0x19,0xCD,0x55,0xDE,0xC8,0xEC,0xD2,0x5E,0x38,0x53,0xE5,0x5C,0x4F, -0x8C,0x2D,0xFE,0x50,0x23,0x36,0xFC,0x66,0xE6,0xCB,0x8E,0xA4,0x39,0x19,0x00,0xB7, -0x95,0x02,0x39,0x91,0x0B,0x0E,0xFE,0x38,0x2E,0xD1,0x1D,0x05,0x9A,0xF6,0x4D,0x3E, -0x6F,0x0F,0x07,0x1D,0xAF,0x2C,0x1E,0x8F,0x60,0x39,0xE2,0xFA,0x36,0x53,0x13,0x39, -0xD4,0x5E,0x26,0x2B,0xDB,0x3D,0xA8,0x14,0xBD,0x32,0xEB,0x18,0x03,0x28,0x52,0x04, -0x71,0xE5,0xAB,0x33,0x3D,0xE1,0x38,0xBB,0x07,0x36,0x84,0x62,0x9C,0x79,0xEA,0x16, -0x30,0xF4,0x5F,0xC0,0x2B,0xE8,0x71,0x6B,0xE4,0xF9,0x02,0x03,0x01,0x00,0x01,0xA3, -0x53,0x30,0x51,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30, -0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC0, -0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,0xB8, -0xCA,0xCC,0x4E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14, -0xC0,0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65, -0xB8,0xCA,0xCC,0x4E,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, -0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x35,0xE3,0x29,0x6A,0xE5,0x2F,0x5D,0x54, -0x8E,0x29,0x50,0x94,0x9F,0x99,0x1A,0x14,0xE4,0x8F,0x78,0x2A,0x62,0x94,0xA2,0x27, -0x67,0x9E,0xD0,0xCF,0x1A,0x5E,0x47,0xE9,0xC1,0xB2,0xA4,0xCF,0xDD,0x41,0x1A,0x05, -0x4E,0x9B,0x4B,0xEE,0x4A,0x6F,0x55,0x52,0xB3,0x24,0xA1,0x37,0x0A,0xEB,0x64,0x76, -0x2A,0x2E,0x2C,0xF3,0xFD,0x3B,0x75,0x90,0xBF,0xFA,0x71,0xD8,0xC7,0x3D,0x37,0xD2, -0xB5,0x05,0x95,0x62,0xB9,0xA6,0xDE,0x89,0x3D,0x36,0x7B,0x38,0x77,0x48,0x97,0xAC, -0xA6,0x20,0x8F,0x2E,0xA6,0xC9,0x0C,0xC2,0xB2,0x99,0x45,0x00,0xC7,0xCE,0x11,0x51, -0x22,0x22,0xE0,0xA5,0xEA,0xB6,0x15,0x48,0x09,0x64,0xEA,0x5E,0x4F,0x74,0xF7,0x05, -0x3E,0xC7,0x8A,0x52,0x0C,0xDB,0x15,0xB4,0xBD,0x6D,0x9B,0xE5,0xC6,0xB1,0x54,0x68, -0xA9,0xE3,0x69,0x90,0xB6,0x9A,0xA5,0x0F,0xB8,0xB9,0x3F,0x20,0x7D,0xAE,0x4A,0xB5, -0xB8,0x9C,0xE4,0x1D,0xB6,0xAB,0xE6,0x94,0xA5,0xC1,0xC7,0x83,0xAD,0xDB,0xF5,0x27, -0x87,0x0E,0x04,0x6C,0xD5,0xFF,0xDD,0xA0,0x5D,0xED,0x87,0x52,0xB7,0x2B,0x15,0x02, -0xAE,0x39,0xA6,0x6A,0x74,0xE9,0xDA,0xC4,0xE7,0xBC,0x4D,0x34,0x1E,0xA9,0x5C,0x4D, -0x33,0x5F,0x92,0x09,0x2F,0x88,0x66,0x5D,0x77,0x97,0xC7,0x1D,0x76,0x13,0xA9,0xD5, -0xE5,0xF1,0x16,0x09,0x11,0x35,0xD5,0xAC,0xDB,0x24,0x71,0x70,0x2C,0x98,0x56,0x0B, -0xD9,0x17,0xB4,0xD1,0xE3,0x51,0x2B,0x5E,0x75,0xE8,0xD5,0xD0,0xDC,0x4F,0x34,0xED, -0xC2,0x05,0x66,0x80,0xA1,0xCB,0xE6,0x33, +const unsigned char GlobalSign_Root_CA___R2_certificate[958]={ +0x30,0x82,0x03,0xBA,0x30,0x82,0x02,0xA2,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, +0x00,0x00,0x00,0x00,0x01,0x0F,0x86,0x26,0xE6,0x0D,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06, +0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, +0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30, +0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69, +0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F, +0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31, +0x35,0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35, +0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55, +0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52, +0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,0x11,0x06, +0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, +0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61, +0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48, +0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01, +0x0A,0x02,0x82,0x01,0x01,0x00,0xA6,0xCF,0x24,0x0E,0xBE,0x2E,0x6F,0x28,0x99,0x45, +0x42,0xC4,0xAB,0x3E,0x21,0x54,0x9B,0x0B,0xD3,0x7F,0x84,0x70,0xFA,0x12,0xB3,0xCB, +0xBF,0x87,0x5F,0xC6,0x7F,0x86,0xD3,0xB2,0x30,0x5C,0xD6,0xFD,0xAD,0xF1,0x7B,0xDC, +0xE5,0xF8,0x60,0x96,0x09,0x92,0x10,0xF5,0xD0,0x53,0xDE,0xFB,0x7B,0x7E,0x73,0x88, +0xAC,0x52,0x88,0x7B,0x4A,0xA6,0xCA,0x49,0xA6,0x5E,0xA8,0xA7,0x8C,0x5A,0x11,0xBC, +0x7A,0x82,0xEB,0xBE,0x8C,0xE9,0xB3,0xAC,0x96,0x25,0x07,0x97,0x4A,0x99,0x2A,0x07, +0x2F,0xB4,0x1E,0x77,0xBF,0x8A,0x0F,0xB5,0x02,0x7C,0x1B,0x96,0xB8,0xC5,0xB9,0x3A, +0x2C,0xBC,0xD6,0x12,0xB9,0xEB,0x59,0x7D,0xE2,0xD0,0x06,0x86,0x5F,0x5E,0x49,0x6A, +0xB5,0x39,0x5E,0x88,0x34,0xEC,0xBC,0x78,0x0C,0x08,0x98,0x84,0x6C,0xA8,0xCD,0x4B, +0xB4,0xA0,0x7D,0x0C,0x79,0x4D,0xF0,0xB8,0x2D,0xCB,0x21,0xCA,0xD5,0x6C,0x5B,0x7D, +0xE1,0xA0,0x29,0x84,0xA1,0xF9,0xD3,0x94,0x49,0xCB,0x24,0x62,0x91,0x20,0xBC,0xDD, +0x0B,0xD5,0xD9,0xCC,0xF9,0xEA,0x27,0x0A,0x2B,0x73,0x91,0xC6,0x9D,0x1B,0xAC,0xC8, +0xCB,0xE8,0xE0,0xA0,0xF4,0x2F,0x90,0x8B,0x4D,0xFB,0xB0,0x36,0x1B,0xF6,0x19,0x7A, +0x85,0xE0,0x6D,0xF2,0x61,0x13,0x88,0x5C,0x9F,0xE0,0x93,0x0A,0x51,0x97,0x8A,0x5A, +0xCE,0xAF,0xAB,0xD5,0xF7,0xAA,0x09,0xAA,0x60,0xBD,0xDC,0xD9,0x5F,0xDF,0x72,0xA9, +0x60,0x13,0x5E,0x00,0x01,0xC9,0x4A,0xFA,0x3F,0xA4,0xEA,0x07,0x03,0x21,0x02,0x8E, +0x82,0xCA,0x03,0xC2,0x9B,0x8F,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x9C,0x30,0x81, +0x99,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, +0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01, +0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9B,0xE2,0x07, +0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86, +0x2E,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,0x2D,0x30,0x2B,0xA0,0x29, +0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x67, +0x6C,0x6F,0x62,0x61,0x6C,0x73,0x69,0x67,0x6E,0x2E,0x6E,0x65,0x74,0x2F,0x72,0x6F, +0x6F,0x74,0x2D,0x72,0x32,0x2E,0x63,0x72,0x6C,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23, +0x04,0x18,0x30,0x16,0x80,0x14,0x9B,0xE2,0x07,0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06, +0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,0x2E,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0x81, +0x53,0x87,0x1C,0x68,0x97,0x86,0x91,0xEC,0xE0,0x4A,0xB8,0x44,0x0B,0xAB,0x81,0xAC, +0x27,0x4F,0xD6,0xC1,0xB8,0x1C,0x43,0x78,0xB3,0x0C,0x9A,0xFC,0xEA,0x2C,0x3C,0x6E, +0x61,0x1B,0x4D,0x4B,0x29,0xF5,0x9F,0x05,0x1D,0x26,0xC1,0xB8,0xE9,0x83,0x00,0x62, +0x45,0xB6,0xA9,0x08,0x93,0xB9,0xA9,0x33,0x4B,0x18,0x9A,0xC2,0xF8,0x87,0x88,0x4E, +0xDB,0xDD,0x71,0x34,0x1A,0xC1,0x54,0xDA,0x46,0x3F,0xE0,0xD3,0x2A,0xAB,0x6D,0x54, +0x22,0xF5,0x3A,0x62,0xCD,0x20,0x6F,0xBA,0x29,0x89,0xD7,0xDD,0x91,0xEE,0xD3,0x5C, +0xA2,0x3E,0xA1,0x5B,0x41,0xF5,0xDF,0xE5,0x64,0x43,0x2D,0xE9,0xD5,0x39,0xAB,0xD2, +0xA2,0xDF,0xB7,0x8B,0xD0,0xC0,0x80,0x19,0x1C,0x45,0xC0,0x2D,0x8C,0xE8,0xF8,0x2D, +0xA4,0x74,0x56,0x49,0xC5,0x05,0xB5,0x4F,0x15,0xDE,0x6E,0x44,0x78,0x39,0x87,0xA8, +0x7E,0xBB,0xF3,0x79,0x18,0x91,0xBB,0xF4,0x6F,0x9D,0xC1,0xF0,0x8C,0x35,0x8C,0x5D, +0x01,0xFB,0xC3,0x6D,0xB9,0xEF,0x44,0x6D,0x79,0x46,0x31,0x7E,0x0A,0xFE,0xA9,0x82, +0xC1,0xFF,0xEF,0xAB,0x6E,0x20,0xC4,0x50,0xC9,0x5F,0x9D,0x4D,0x9B,0x17,0x8C,0x0C, +0xE5,0x01,0xC9,0xA0,0x41,0x6A,0x73,0x53,0xFA,0xA5,0x50,0xB4,0x6E,0x25,0x0F,0xFB, +0x4C,0x18,0xF4,0xFD,0x52,0xD9,0x8E,0x69,0xB1,0xE8,0x11,0x0F,0xDE,0x88,0xD8,0xFB, +0x1D,0x49,0xF7,0xAA,0xDE,0x95,0xCF,0x20,0x78,0xC2,0x60,0x12,0xDB,0x25,0x40,0x8C, +0x6A,0xFC,0x7E,0x42,0x38,0x40,0x64,0x12,0xF7,0x9E,0x81,0xE1,0x93,0x2E, }; -/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */ -/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */ +/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium */ +/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium */ -const unsigned char Starfield_Root_Certificate_Authority___G2_certificate[993]={ +const unsigned char AffirmTrust_Premium_certificate[1354]={ +0x30,0x82,0x05,0x46,0x30,0x82,0x03,0x2E,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x6D, +0x8C,0x14,0x46,0xB1,0xA6,0x0A,0xEE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, +0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B, +0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,0x30,0x1A,0x06, +0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, +0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30, +0x31,0x32,0x39,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x17,0x0D,0x34,0x30,0x31,0x32, +0x33,0x31,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x30,0x41,0x31,0x0B,0x30,0x09,0x06, +0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04, +0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C, +0x30,0x1A,0x06,0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54, +0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x82,0x02,0x22, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03, +0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC4,0x12,0xDF, +0xA9,0x5F,0xFE,0x41,0xDD,0xDD,0xF5,0x9F,0x8A,0xE3,0xF6,0xAC,0xE1,0x3C,0x78,0x9A, +0xBC,0xD8,0xF0,0x7F,0x7A,0xA0,0x33,0x2A,0xDC,0x8D,0x20,0x5B,0xAE,0x2D,0x6F,0xE7, +0x93,0xD9,0x36,0x70,0x6A,0x68,0xCF,0x8E,0x51,0xA3,0x85,0x5B,0x67,0x04,0xA0,0x10, +0x24,0x6F,0x5D,0x28,0x82,0xC1,0x97,0x57,0xD8,0x48,0x29,0x13,0xB6,0xE1,0xBE,0x91, +0x4D,0xDF,0x85,0x0C,0x53,0x18,0x9A,0x1E,0x24,0xA2,0x4F,0x8F,0xF0,0xA2,0x85,0x0B, +0xCB,0xF4,0x29,0x7F,0xD2,0xA4,0x58,0xEE,0x26,0x4D,0xC9,0xAA,0xA8,0x7B,0x9A,0xD9, +0xFA,0x38,0xDE,0x44,0x57,0x15,0xE5,0xF8,0x8C,0xC8,0xD9,0x48,0xE2,0x0D,0x16,0x27, +0x1D,0x1E,0xC8,0x83,0x85,0x25,0xB7,0xBA,0xAA,0x55,0x41,0xCC,0x03,0x22,0x4B,0x2D, +0x91,0x8D,0x8B,0xE6,0x89,0xAF,0x66,0xC7,0xE9,0xFF,0x2B,0xE9,0x3C,0xAC,0xDA,0xD2, +0xB3,0xC3,0xE1,0x68,0x9C,0x89,0xF8,0x7A,0x00,0x56,0xDE,0xF4,0x55,0x95,0x6C,0xFB, +0xBA,0x64,0xDD,0x62,0x8B,0xDF,0x0B,0x77,0x32,0xEB,0x62,0xCC,0x26,0x9A,0x9B,0xBB, +0xAA,0x62,0x83,0x4C,0xB4,0x06,0x7A,0x30,0xC8,0x29,0xBF,0xED,0x06,0x4D,0x97,0xB9, +0x1C,0xC4,0x31,0x2B,0xD5,0x5F,0xBC,0x53,0x12,0x17,0x9C,0x99,0x57,0x29,0x66,0x77, +0x61,0x21,0x31,0x07,0x2E,0x25,0x49,0x9D,0x18,0xF2,0xEE,0xF3,0x2B,0x71,0x8C,0xB5, +0xBA,0x39,0x07,0x49,0x77,0xFC,0xEF,0x2E,0x92,0x90,0x05,0x8D,0x2D,0x2F,0x77,0x7B, +0xEF,0x43,0xBF,0x35,0xBB,0x9A,0xD8,0xF9,0x73,0xA7,0x2C,0xF2,0xD0,0x57,0xEE,0x28, +0x4E,0x26,0x5F,0x8F,0x90,0x68,0x09,0x2F,0xB8,0xF8,0xDC,0x06,0xE9,0x2E,0x9A,0x3E, +0x51,0xA7,0xD1,0x22,0xC4,0x0A,0xA7,0x38,0x48,0x6C,0xB3,0xF9,0xFF,0x7D,0xAB,0x86, +0x57,0xE3,0xBA,0xD6,0x85,0x78,0x77,0xBA,0x43,0xEA,0x48,0x7F,0xF6,0xD8,0xBE,0x23, +0x6D,0x1E,0xBF,0xD1,0x36,0x6C,0x58,0x5C,0xF1,0xEE,0xA4,0x19,0x54,0x1A,0xF5,0x03, +0xD2,0x76,0xE6,0xE1,0x8C,0xBD,0x3C,0xB3,0xD3,0x48,0x4B,0xE2,0xC8,0xF8,0x7F,0x92, +0xA8,0x76,0x46,0x9C,0x42,0x65,0x3E,0xA4,0x1E,0xC1,0x07,0x03,0x5A,0x46,0x2D,0xB8, +0x97,0xF3,0xB7,0xD5,0xB2,0x55,0x21,0xEF,0xBA,0xDC,0x4C,0x00,0x97,0xFB,0x14,0x95, +0x27,0x33,0xBF,0xE8,0x43,0x47,0x46,0xD2,0x08,0x99,0x16,0x60,0x3B,0x9A,0x7E,0xD2, +0xE6,0xED,0x38,0xEA,0xEC,0x01,0x1E,0x3C,0x48,0x56,0x49,0x09,0xC7,0x4C,0x37,0x00, +0x9E,0x88,0x0E,0xC0,0x73,0xE1,0x6F,0x66,0xE9,0x72,0x47,0x30,0x3E,0x10,0xE5,0x0B, +0x03,0xC9,0x9A,0x42,0x00,0x6C,0xC5,0x94,0x7E,0x61,0xC4,0x8A,0xDF,0x7F,0x82,0x1A, +0x0B,0x59,0xC4,0x59,0x32,0x77,0xB3,0xBC,0x60,0x69,0x56,0x39,0xFD,0xB4,0x06,0x7B, +0x2C,0xD6,0x64,0x36,0xD9,0xBD,0x48,0xED,0x84,0x1F,0x7E,0xA5,0x22,0x8F,0x2A,0xB8, +0x42,0xF4,0x82,0xB7,0xD4,0x53,0x90,0x78,0x4E,0x2D,0x1A,0xFD,0x81,0x6F,0x44,0xD7, +0x3B,0x01,0x74,0x96,0x42,0xE0,0x00,0xE2,0x2E,0x6B,0xEA,0xC5,0xEE,0x72,0xAC,0xBB, +0xBF,0xFE,0xEA,0xAA,0xA8,0xF8,0xDC,0xF6,0xB2,0x79,0x8A,0xB6,0x67,0x02,0x03,0x01, +0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, +0x14,0x9D,0xC0,0x67,0xA6,0x0C,0x22,0xD9,0x26,0xF5,0x45,0xAB,0xA6,0x65,0x52,0x11, +0x27,0xD8,0x45,0xAC,0x63,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, +0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, +0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, +0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0xB3,0x57,0x4D,0x10,0x62,0x4E, +0x3A,0xE4,0xAC,0xEA,0xB8,0x1C,0xAF,0x32,0x23,0xC8,0xB3,0x49,0x5A,0x51,0x9C,0x76, +0x28,0x8D,0x79,0xAA,0x57,0x46,0x17,0xD5,0xF5,0x52,0xF6,0xB7,0x44,0xE8,0x08,0x44, +0xBF,0x18,0x84,0xD2,0x0B,0x80,0xCD,0xC5,0x12,0xFD,0x00,0x55,0x05,0x61,0x87,0x41, +0xDC,0xB5,0x24,0x9E,0x3C,0xC4,0xD8,0xC8,0xFB,0x70,0x9E,0x2F,0x78,0x96,0x83,0x20, +0x36,0xDE,0x7C,0x0F,0x69,0x13,0x88,0xA5,0x75,0x36,0x98,0x08,0xA6,0xC6,0xDF,0xAC, +0xCE,0xE3,0x58,0xD6,0xB7,0x3E,0xDE,0xBA,0xF3,0xEB,0x34,0x40,0xD8,0xA2,0x81,0xF5, +0x78,0x3F,0x2F,0xD5,0xA5,0xFC,0xD9,0xA2,0xD4,0x5E,0x04,0x0E,0x17,0xAD,0xFE,0x41, +0xF0,0xE5,0xB2,0x72,0xFA,0x44,0x82,0x33,0x42,0xE8,0x2D,0x58,0xF7,0x56,0x8C,0x62, +0x3F,0xBA,0x42,0xB0,0x9C,0x0C,0x5C,0x7E,0x2E,0x65,0x26,0x5C,0x53,0x4F,0x00,0xB2, +0x78,0x7E,0xA1,0x0D,0x99,0x2D,0x8D,0xB8,0x1D,0x8E,0xA2,0xC4,0xB0,0xFD,0x60,0xD0, +0x30,0xA4,0x8E,0xC8,0x04,0x62,0xA9,0xC4,0xED,0x35,0xDE,0x7A,0x97,0xED,0x0E,0x38, +0x5E,0x92,0x2F,0x93,0x70,0xA5,0xA9,0x9C,0x6F,0xA7,0x7D,0x13,0x1D,0x7E,0xC6,0x08, +0x48,0xB1,0x5E,0x67,0xEB,0x51,0x08,0x25,0xE9,0xE6,0x25,0x6B,0x52,0x29,0x91,0x9C, +0xD2,0x39,0x73,0x08,0x57,0xDE,0x99,0x06,0xB4,0x5B,0x9D,0x10,0x06,0xE1,0xC2,0x00, +0xA8,0xB8,0x1C,0x4A,0x02,0x0A,0x14,0xD0,0xC1,0x41,0xCA,0xFB,0x8C,0x35,0x21,0x7D, +0x82,0x38,0xF2,0xA9,0x54,0x91,0x19,0x35,0x93,0x94,0x6D,0x6A,0x3A,0xC5,0xB2,0xD0, +0xBB,0x89,0x86,0x93,0xE8,0x9B,0xC9,0x0F,0x3A,0xA7,0x7A,0xB8,0xA1,0xF0,0x78,0x46, +0xFA,0xFC,0x37,0x2F,0xE5,0x8A,0x84,0xF3,0xDF,0xFE,0x04,0xD9,0xA1,0x68,0xA0,0x2F, +0x24,0xE2,0x09,0x95,0x06,0xD5,0x95,0xCA,0xE1,0x24,0x96,0xEB,0x7C,0xF6,0x93,0x05, +0xBB,0xED,0x73,0xE9,0x2D,0xD1,0x75,0x39,0xD7,0xE7,0x24,0xDB,0xD8,0x4E,0x5F,0x43, +0x8F,0x9E,0xD0,0x14,0x39,0xBF,0x55,0x70,0x48,0x99,0x57,0x31,0xB4,0x9C,0xEE,0x4A, +0x98,0x03,0x96,0x30,0x1F,0x60,0x06,0xEE,0x1B,0x23,0xFE,0x81,0x60,0x23,0x1A,0x47, +0x62,0x85,0xA5,0xCC,0x19,0x34,0x80,0x6F,0xB3,0xAC,0x1A,0xE3,0x9F,0xF0,0x7B,0x48, +0xAD,0xD5,0x01,0xD9,0x67,0xB6,0xA9,0x72,0x93,0xEA,0x2D,0x66,0xB5,0xB2,0xB8,0xE4, +0x3D,0x3C,0xB2,0xEF,0x4C,0x8C,0xEA,0xEB,0x07,0xBF,0xAB,0x35,0x9A,0x55,0x86,0xBC, +0x18,0xA6,0xB5,0xA8,0x5E,0xB4,0x83,0x6C,0x6B,0x69,0x40,0xD3,0x9F,0xDC,0xF1,0xC3, +0x69,0x6B,0xB9,0xE1,0x6D,0x09,0xF4,0xF1,0xAA,0x50,0x76,0x0A,0x7A,0x7D,0x7A,0x17, +0xA1,0x55,0x96,0x42,0x99,0x31,0x09,0xDD,0x60,0x11,0x8D,0x05,0x30,0x7E,0xE6,0x8E, +0x46,0xD1,0x9D,0x14,0xDA,0xC7,0x17,0xE4,0x05,0x96,0x8C,0xC4,0x24,0xB5,0x1B,0xCF, +0x14,0x07,0xB2,0x40,0xF8,0xA3,0x9E,0x41,0x86,0xBC,0x04,0xD0,0x6B,0x96,0xC8,0x2A, +0x80,0x34,0xFD,0xBF,0xEF,0x06,0xA3,0xDD,0x58,0xC5,0x85,0x3D,0x3E,0x8F,0xFE,0x9E, +0x29,0xE0,0xB6,0xB8,0x09,0x68,0x19,0x1C,0x18,0x43, +}; + + +/* subject:/C=US/O=Google Trust Services LLC/CN=GTS Root R4 */ +/* issuer :/C=US/O=Google Trust Services LLC/CN=GTS Root R4 */ + + +const unsigned char GTS_Root_R4_certificate[526]={ +0x30,0x82,0x02,0x0A,0x30,0x82,0x01,0x91,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x6E, +0x47,0xA9,0xC8,0x8B,0x94,0xB6,0xE8,0xBB,0x3B,0x2A,0xD8,0xA2,0xB2,0xC1,0x99,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x47,0x31,0x0B,0x30, +0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x22,0x30,0x20,0x06,0x03, +0x55,0x04,0x0A,0x13,0x19,0x47,0x6F,0x6F,0x67,0x6C,0x65,0x20,0x54,0x72,0x75,0x73, +0x74,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x4C,0x4C,0x43,0x31,0x14, +0x30,0x12,0x06,0x03,0x55,0x04,0x03,0x13,0x0B,0x47,0x54,0x53,0x20,0x52,0x6F,0x6F, +0x74,0x20,0x52,0x34,0x30,0x1E,0x17,0x0D,0x31,0x36,0x30,0x36,0x32,0x32,0x30,0x30, +0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x36,0x32,0x32,0x30,0x30,0x30, +0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, +0x02,0x55,0x53,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0A,0x13,0x19,0x47,0x6F, +0x6F,0x67,0x6C,0x65,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x53,0x65,0x72,0x76,0x69, +0x63,0x65,0x73,0x20,0x4C,0x4C,0x43,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x03, +0x13,0x0B,0x47,0x54,0x53,0x20,0x52,0x6F,0x6F,0x74,0x20,0x52,0x34,0x30,0x76,0x30, +0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00, +0x22,0x03,0x62,0x00,0x04,0xF3,0x74,0x73,0xA7,0x68,0x8B,0x60,0xAE,0x43,0xB8,0x35, +0xC5,0x81,0x30,0x7B,0x4B,0x49,0x9D,0xFB,0xC1,0x61,0xCE,0xE6,0xDE,0x46,0xBD,0x6B, +0xD5,0x61,0x18,0x35,0xAE,0x40,0xDD,0x73,0xF7,0x89,0x91,0x30,0x5A,0xEB,0x3C,0xEE, +0x85,0x7C,0xA2,0x40,0x76,0x3B,0xA9,0xC6,0xB8,0x47,0xD8,0x2A,0xE7,0x92,0x91,0x6A, +0x73,0xE9,0xB1,0x72,0x39,0x9F,0x29,0x9F,0xA2,0x98,0xD3,0x5F,0x5E,0x58,0x86,0x65, +0x0F,0xA1,0x84,0x65,0x06,0xD1,0xDC,0x8B,0xC9,0xC7,0x73,0xC8,0x8C,0x6A,0x2F,0xE5, +0xC4,0xAB,0xD1,0x1D,0x8A,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F, +0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13, +0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D, +0x0E,0x04,0x16,0x04,0x14,0x80,0x4C,0xD6,0xEB,0x74,0xFF,0x49,0x36,0xA3,0xD5,0xD8, +0xFC,0xB5,0x3E,0xC5,0x6A,0xF0,0x94,0x1D,0x8C,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48, +0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,0x6A,0x50,0x52,0x74, +0x08,0xC4,0x70,0xDC,0x9E,0x50,0x74,0x21,0xE8,0x8D,0x7A,0x21,0xC3,0x4F,0x96,0x6E, +0x15,0xD1,0x22,0x35,0x61,0x2D,0xFA,0x08,0x37,0xEE,0x19,0x6D,0xAD,0xDB,0xB2,0xCC, +0x7D,0x07,0x34,0xF5,0x60,0x19,0x2C,0xB5,0x34,0xD9,0x6F,0x20,0x02,0x30,0x03,0x71, +0xB1,0xBA,0xA3,0x60,0x0B,0x86,0xED,0x9A,0x08,0x6A,0x95,0x68,0x9F,0xE2,0xB3,0xE1, +0x93,0x64,0x7C,0x5E,0x93,0xA6,0xDF,0x79,0x2D,0x8D,0x85,0xE3,0x94,0xCF,0x23,0x5D, +0x71,0xCC,0xF2,0xB0,0x4D,0xD6,0xFE,0x99,0xC8,0x94,0xA9,0x75,0xA2,0xE3, +}; + + +/* subject:/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */ +/* issuer :/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */ + + +const unsigned char Baltimore_CyberTrust_Root_certificate[891]={ +0x30,0x82,0x03,0x77,0x30,0x82,0x02,0x5F,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x02, +0x00,0x00,0xB9,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, +0x05,0x00,0x30,0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49, +0x45,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74, +0x69,0x6D,0x6F,0x72,0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A, +0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03, +0x55,0x04,0x03,0x13,0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43, +0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E, +0x17,0x0D,0x30,0x30,0x30,0x35,0x31,0x32,0x31,0x38,0x34,0x36,0x30,0x30,0x5A,0x17, +0x0D,0x32,0x35,0x30,0x35,0x31,0x32,0x32,0x33,0x35,0x39,0x30,0x30,0x5A,0x30,0x5A, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x45,0x31,0x12,0x30, +0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72, +0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,0x43,0x79,0x62,0x65, +0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13, +0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,0x79,0x62,0x65,0x72, +0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D, +0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01, +0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA3,0x04,0xBB,0x22,0xAB, +0x98,0x3D,0x57,0xE8,0x26,0x72,0x9A,0xB5,0x79,0xD4,0x29,0xE2,0xE1,0xE8,0x95,0x80, +0xB1,0xB0,0xE3,0x5B,0x8E,0x2B,0x29,0x9A,0x64,0xDF,0xA1,0x5D,0xED,0xB0,0x09,0x05, +0x6D,0xDB,0x28,0x2E,0xCE,0x62,0xA2,0x62,0xFE,0xB4,0x88,0xDA,0x12,0xEB,0x38,0xEB, +0x21,0x9D,0xC0,0x41,0x2B,0x01,0x52,0x7B,0x88,0x77,0xD3,0x1C,0x8F,0xC7,0xBA,0xB9, +0x88,0xB5,0x6A,0x09,0xE7,0x73,0xE8,0x11,0x40,0xA7,0xD1,0xCC,0xCA,0x62,0x8D,0x2D, +0xE5,0x8F,0x0B,0xA6,0x50,0xD2,0xA8,0x50,0xC3,0x28,0xEA,0xF5,0xAB,0x25,0x87,0x8A, +0x9A,0x96,0x1C,0xA9,0x67,0xB8,0x3F,0x0C,0xD5,0xF7,0xF9,0x52,0x13,0x2F,0xC2,0x1B, +0xD5,0x70,0x70,0xF0,0x8F,0xC0,0x12,0xCA,0x06,0xCB,0x9A,0xE1,0xD9,0xCA,0x33,0x7A, +0x77,0xD6,0xF8,0xEC,0xB9,0xF1,0x68,0x44,0x42,0x48,0x13,0xD2,0xC0,0xC2,0xA4,0xAE, +0x5E,0x60,0xFE,0xB6,0xA6,0x05,0xFC,0xB4,0xDD,0x07,0x59,0x02,0xD4,0x59,0x18,0x98, +0x63,0xF5,0xA5,0x63,0xE0,0x90,0x0C,0x7D,0x5D,0xB2,0x06,0x7A,0xF3,0x85,0xEA,0xEB, +0xD4,0x03,0xAE,0x5E,0x84,0x3E,0x5F,0xFF,0x15,0xED,0x69,0xBC,0xF9,0x39,0x36,0x72, +0x75,0xCF,0x77,0x52,0x4D,0xF3,0xC9,0x90,0x2C,0xB9,0x3D,0xE5,0xC9,0x23,0x53,0x3F, +0x1F,0x24,0x98,0x21,0x5C,0x07,0x99,0x29,0xBD,0xC6,0x3A,0xEC,0xE7,0x6E,0x86,0x3A, +0x6B,0x97,0x74,0x63,0x33,0xBD,0x68,0x18,0x31,0xF0,0x78,0x8D,0x76,0xBF,0xFC,0x9E, +0x8E,0x5D,0x2A,0x86,0xA7,0x4D,0x90,0xDC,0x27,0x1A,0x39,0x02,0x03,0x01,0x00,0x01, +0xA3,0x45,0x30,0x43,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xE5, +0x9D,0x59,0x30,0x82,0x47,0x58,0xCC,0xAC,0xFA,0x08,0x54,0x36,0x86,0x7B,0x3A,0xB5, +0x04,0x4D,0xF0,0x30,0x12,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x08,0x30, +0x06,0x01,0x01,0xFF,0x02,0x01,0x03,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01, +0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x85,0x0C,0x5D,0x8E,0xE4, +0x6F,0x51,0x68,0x42,0x05,0xA0,0xDD,0xBB,0x4F,0x27,0x25,0x84,0x03,0xBD,0xF7,0x64, +0xFD,0x2D,0xD7,0x30,0xE3,0xA4,0x10,0x17,0xEB,0xDA,0x29,0x29,0xB6,0x79,0x3F,0x76, +0xF6,0x19,0x13,0x23,0xB8,0x10,0x0A,0xF9,0x58,0xA4,0xD4,0x61,0x70,0xBD,0x04,0x61, +0x6A,0x12,0x8A,0x17,0xD5,0x0A,0xBD,0xC5,0xBC,0x30,0x7C,0xD6,0xE9,0x0C,0x25,0x8D, +0x86,0x40,0x4F,0xEC,0xCC,0xA3,0x7E,0x38,0xC6,0x37,0x11,0x4F,0xED,0xDD,0x68,0x31, +0x8E,0x4C,0xD2,0xB3,0x01,0x74,0xEE,0xBE,0x75,0x5E,0x07,0x48,0x1A,0x7F,0x70,0xFF, +0x16,0x5C,0x84,0xC0,0x79,0x85,0xB8,0x05,0xFD,0x7F,0xBE,0x65,0x11,0xA3,0x0F,0xC0, +0x02,0xB4,0xF8,0x52,0x37,0x39,0x04,0xD5,0xA9,0x31,0x7A,0x18,0xBF,0xA0,0x2A,0xF4, +0x12,0x99,0xF7,0xA3,0x45,0x82,0xE3,0x3C,0x5E,0xF5,0x9D,0x9E,0xB5,0xC8,0x9E,0x7C, +0x2E,0xC8,0xA4,0x9E,0x4E,0x08,0x14,0x4B,0x6D,0xFD,0x70,0x6D,0x6B,0x1A,0x63,0xBD, +0x64,0xE6,0x1F,0xB7,0xCE,0xF0,0xF2,0x9F,0x2E,0xBB,0x1B,0xB7,0xF2,0x50,0x88,0x73, +0x92,0xC2,0xE2,0xE3,0x16,0x8D,0x9A,0x32,0x02,0xAB,0x8E,0x18,0xDD,0xE9,0x10,0x11, +0xEE,0x7E,0x35,0xAB,0x90,0xAF,0x3E,0x30,0x94,0x7A,0xD0,0x33,0x3D,0xA7,0x65,0x0F, +0xF5,0xFC,0x8E,0x9E,0x62,0xCF,0x47,0x44,0x2C,0x01,0x5D,0xBB,0x1D,0xB5,0x32,0xD2, +0x47,0xD2,0x38,0x2E,0xD0,0xFE,0x81,0xDC,0x32,0x6A,0x1E,0xB5,0xEE,0x3C,0xD5,0xFC, +0xE7,0x81,0x1D,0x19,0xC3,0x24,0x42,0xEA,0x63,0x39,0xA9, +}; + + +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */ + + +const unsigned char DigiCert_Assured_ID_Root_CA_certificate[955]={ +0x30,0x82,0x03,0xB7,0x30,0x82,0x02,0x9F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0C, +0xE7,0xE0,0xE5,0x17,0xD8,0x46,0xFE,0x8F,0xE5,0x60,0xFC,0x1B,0xF0,0x30,0x39,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x65, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F, +0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30, +0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30, +0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, +0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44, +0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06, +0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65, +0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13, +0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65, +0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03, +0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAD,0x0E,0x15, +0xCE,0xE4,0x43,0x80,0x5C,0xB1,0x87,0xF3,0xB7,0x60,0xF9,0x71,0x12,0xA5,0xAE,0xDC, +0x26,0x94,0x88,0xAA,0xF4,0xCE,0xF5,0x20,0x39,0x28,0x58,0x60,0x0C,0xF8,0x80,0xDA, +0xA9,0x15,0x95,0x32,0x61,0x3C,0xB5,0xB1,0x28,0x84,0x8A,0x8A,0xDC,0x9F,0x0A,0x0C, +0x83,0x17,0x7A,0x8F,0x90,0xAC,0x8A,0xE7,0x79,0x53,0x5C,0x31,0x84,0x2A,0xF6,0x0F, +0x98,0x32,0x36,0x76,0xCC,0xDE,0xDD,0x3C,0xA8,0xA2,0xEF,0x6A,0xFB,0x21,0xF2,0x52, +0x61,0xDF,0x9F,0x20,0xD7,0x1F,0xE2,0xB1,0xD9,0xFE,0x18,0x64,0xD2,0x12,0x5B,0x5F, +0xF9,0x58,0x18,0x35,0xBC,0x47,0xCD,0xA1,0x36,0xF9,0x6B,0x7F,0xD4,0xB0,0x38,0x3E, +0xC1,0x1B,0xC3,0x8C,0x33,0xD9,0xD8,0x2F,0x18,0xFE,0x28,0x0F,0xB3,0xA7,0x83,0xD6, +0xC3,0x6E,0x44,0xC0,0x61,0x35,0x96,0x16,0xFE,0x59,0x9C,0x8B,0x76,0x6D,0xD7,0xF1, +0xA2,0x4B,0x0D,0x2B,0xFF,0x0B,0x72,0xDA,0x9E,0x60,0xD0,0x8E,0x90,0x35,0xC6,0x78, +0x55,0x87,0x20,0xA1,0xCF,0xE5,0x6D,0x0A,0xC8,0x49,0x7C,0x31,0x98,0x33,0x6C,0x22, +0xE9,0x87,0xD0,0x32,0x5A,0xA2,0xBA,0x13,0x82,0x11,0xED,0x39,0x17,0x9D,0x99,0x3A, +0x72,0xA1,0xE6,0xFA,0xA4,0xD9,0xD5,0x17,0x31,0x75,0xAE,0x85,0x7D,0x22,0xAE,0x3F, +0x01,0x46,0x86,0xF6,0x28,0x79,0xC8,0xB1,0xDA,0xE4,0x57,0x17,0xC4,0x7E,0x1C,0x0E, +0xB0,0xB4,0x92,0xA6,0x56,0xB3,0xBD,0xB2,0x97,0xED,0xAA,0xA7,0xF0,0xB7,0xC5,0xA8, +0x3F,0x95,0x16,0xD0,0xFF,0xA1,0x96,0xEB,0x08,0x5F,0x18,0x77,0x4F,0x02,0x03,0x01, +0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, +0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF, +0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, +0x04,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,0xA7, +0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30, +0x16,0x80,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7, +0xA7,0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xA2,0x0E,0xBC,0xDF,0xE2, +0xED,0xF0,0xE3,0x72,0x73,0x7A,0x64,0x94,0xBF,0xF7,0x72,0x66,0xD8,0x32,0xE4,0x42, +0x75,0x62,0xAE,0x87,0xEB,0xF2,0xD5,0xD9,0xDE,0x56,0xB3,0x9F,0xCC,0xCE,0x14,0x28, +0xB9,0x0D,0x97,0x60,0x5C,0x12,0x4C,0x58,0xE4,0xD3,0x3D,0x83,0x49,0x45,0x58,0x97, +0x35,0x69,0x1A,0xA8,0x47,0xEA,0x56,0xC6,0x79,0xAB,0x12,0xD8,0x67,0x81,0x84,0xDF, +0x7F,0x09,0x3C,0x94,0xE6,0xB8,0x26,0x2C,0x20,0xBD,0x3D,0xB3,0x28,0x89,0xF7,0x5F, +0xFF,0x22,0xE2,0x97,0x84,0x1F,0xE9,0x65,0xEF,0x87,0xE0,0xDF,0xC1,0x67,0x49,0xB3, +0x5D,0xEB,0xB2,0x09,0x2A,0xEB,0x26,0xED,0x78,0xBE,0x7D,0x3F,0x2B,0xF3,0xB7,0x26, +0x35,0x6D,0x5F,0x89,0x01,0xB6,0x49,0x5B,0x9F,0x01,0x05,0x9B,0xAB,0x3D,0x25,0xC1, +0xCC,0xB6,0x7F,0xC2,0xF1,0x6F,0x86,0xC6,0xFA,0x64,0x68,0xEB,0x81,0x2D,0x94,0xEB, +0x42,0xB7,0xFA,0x8C,0x1E,0xDD,0x62,0xF1,0xBE,0x50,0x67,0xB7,0x6C,0xBD,0xF3,0xF1, +0x1F,0x6B,0x0C,0x36,0x07,0x16,0x7F,0x37,0x7C,0xA9,0x5B,0x6D,0x7A,0xF1,0x12,0x46, +0x60,0x83,0xD7,0x27,0x04,0xBE,0x4B,0xCE,0x97,0xBE,0xC3,0x67,0x2A,0x68,0x11,0xDF, +0x80,0xE7,0x0C,0x33,0x66,0xBF,0x13,0x0D,0x14,0x6E,0xF3,0x7F,0x1F,0x63,0x10,0x1E, +0xFA,0x8D,0x1B,0x25,0x6D,0x6C,0x8F,0xA5,0xB7,0x61,0x01,0xB1,0xD2,0xA3,0x26,0xA1, +0x10,0x71,0x9D,0xAD,0xE2,0xC3,0xF9,0xC3,0x99,0x51,0xB7,0x2B,0x07,0x08,0xCE,0x2E, +0xE6,0x50,0xB2,0xA7,0xFA,0x0A,0x45,0x2F,0xA2,0xF0,0xF2, +}; + + +/* subject:/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */ +/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=Starfield Technologies, Inc./CN=Starfield Root Certificate Authority - G2 */ + + +const unsigned char Starfield_Root_Certificate_Authority___G2_certificate[993]={ 0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, 0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30, 0x81,0x8F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31, @@ -520,270 +834,595 @@ const unsigned char Starfield_Root_Certificate_Authority___G2_certificate[993]={ }; -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G3 */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G3 */ +/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Networking */ +/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Networking */ -const unsigned char DigiCert_Global_Root_G3_certificate[579]={ -0x30,0x82,0x02,0x3F,0x30,0x82,0x01,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x05, -0x55,0x56,0xBC,0xF2,0x5E,0xA4,0x35,0x35,0xC3,0xA4,0x0F,0xD5,0xAB,0x45,0x72,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x61,0x31,0x0B,0x30, -0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03, -0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E, -0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E, -0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x20,0x30,0x1E, -0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20, -0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x33,0x30,0x1E, -0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,0x30,0x30,0x5A,0x17, -0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,0x30,0x5A,0x30,0x61, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47, -0x33,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, -0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xDD,0xA7,0xD9,0xBB,0x8A,0xB8,0x0B, -0xFB,0x0B,0x7F,0x21,0xD2,0xF0,0xBE,0xBE,0x73,0xF3,0x33,0x5D,0x1A,0xBC,0x34,0xEA, -0xDE,0xC6,0x9B,0xBC,0xD0,0x95,0xF6,0xF0,0xCC,0xD0,0x0B,0xBA,0x61,0x5B,0x51,0x46, -0x7E,0x9E,0x2D,0x9F,0xEE,0x8E,0x63,0x0C,0x17,0xEC,0x07,0x70,0xF5,0xCF,0x84,0x2E, -0x40,0x83,0x9C,0xE8,0x3F,0x41,0x6D,0x3B,0xAD,0xD3,0xA4,0x14,0x59,0x36,0x78,0x9D, -0x03,0x43,0xEE,0x10,0x13,0x6C,0x72,0xDE,0xAE,0x88,0xA7,0xA1,0x6B,0xB5,0x43,0xCE, -0x67,0xDC,0x23,0xFF,0x03,0x1C,0xA3,0xE2,0x3E,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06, -0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E, -0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D, -0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0xDB,0x48,0xA4,0xF9,0xA1,0xC5, -0xD8,0xAE,0x36,0x41,0xCC,0x11,0x63,0x69,0x62,0x29,0xBC,0x4B,0xC6,0x30,0x0A,0x06, -0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31, -0x00,0xAD,0xBC,0xF2,0x6C,0x3F,0x12,0x4A,0xD1,0x2D,0x39,0xC3,0x0A,0x09,0x97,0x73, -0xF4,0x88,0x36,0x8C,0x88,0x27,0xBB,0xE6,0x88,0x8D,0x50,0x85,0xA7,0x63,0xF9,0x9E, -0x32,0xDE,0x66,0x93,0x0F,0xF1,0xCC,0xB1,0x09,0x8F,0xDD,0x6C,0xAB,0xFA,0x6B,0x7F, -0xA0,0x02,0x30,0x39,0x66,0x5B,0xC2,0x64,0x8D,0xB8,0x9E,0x50,0xDC,0xA8,0xD5,0x49, -0xA2,0xED,0xC7,0xDC,0xD1,0x49,0x7F,0x17,0x01,0xB8,0xC8,0x86,0x8F,0x4E,0x8C,0x88, -0x2B,0xA8,0x9A,0xA9,0x8A,0xC5,0xD1,0x00,0xBD,0xF8,0x54,0xE2,0x9A,0xE5,0x5B,0x7C, -0xB3,0x27,0x17, +const unsigned char AffirmTrust_Networking_certificate[848]={ +0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x7C, +0x4F,0x04,0x39,0x1C,0xD4,0x99,0x2D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, +0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B, +0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06, +0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, +0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x69,0x6E,0x67,0x30,0x1E,0x17,0x0D, +0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x38,0x32,0x34,0x5A,0x17,0x0D,0x33, +0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x38,0x32,0x34,0x5A,0x30,0x44,0x31,0x0B, +0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06, +0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, +0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69, +0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x69, +0x6E,0x67,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, +0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82, +0x01,0x01,0x00,0xB4,0x84,0xCC,0x33,0x17,0x2E,0x6B,0x94,0x6C,0x6B,0x61,0x52,0xA0, +0xEB,0xA3,0xCF,0x79,0x94,0x4C,0xE5,0x94,0x80,0x99,0xCB,0x55,0x64,0x44,0x65,0x8F, +0x67,0x64,0xE2,0x06,0xE3,0x5C,0x37,0x49,0xF6,0x2F,0x9B,0x84,0x84,0x1E,0x2D,0xF2, +0x60,0x9D,0x30,0x4E,0xCC,0x84,0x85,0xE2,0x2C,0xCF,0x1E,0x9E,0xFE,0x36,0xAB,0x33, +0x77,0x35,0x44,0xD8,0x35,0x96,0x1A,0x3D,0x36,0xE8,0x7A,0x0E,0xD8,0xD5,0x47,0xA1, +0x6A,0x69,0x8B,0xD9,0xFC,0xBB,0x3A,0xAE,0x79,0x5A,0xD5,0xF4,0xD6,0x71,0xBB,0x9A, +0x90,0x23,0x6B,0x9A,0xB7,0x88,0x74,0x87,0x0C,0x1E,0x5F,0xB9,0x9E,0x2D,0xFA,0xAB, +0x53,0x2B,0xDC,0xBB,0x76,0x3E,0x93,0x4C,0x08,0x08,0x8C,0x1E,0xA2,0x23,0x1C,0xD4, +0x6A,0xAD,0x22,0xBA,0x99,0x01,0x2E,0x6D,0x65,0xCB,0xBE,0x24,0x66,0x55,0x24,0x4B, +0x40,0x44,0xB1,0x1B,0xD7,0xE1,0xC2,0x85,0xC0,0xDE,0x10,0x3F,0x3D,0xED,0xB8,0xFC, +0xF1,0xF1,0x23,0x53,0xDC,0xBF,0x65,0x97,0x6F,0xD9,0xF9,0x40,0x71,0x8D,0x7D,0xBD, +0x95,0xD4,0xCE,0xBE,0xA0,0x5E,0x27,0x23,0xDE,0xFD,0xA6,0xD0,0x26,0x0E,0x00,0x29, +0xEB,0x3C,0x46,0xF0,0x3D,0x60,0xBF,0x3F,0x50,0xD2,0xDC,0x26,0x41,0x51,0x9E,0x14, +0x37,0x42,0x04,0xA3,0x70,0x57,0xA8,0x1B,0x87,0xED,0x2D,0xFA,0x7B,0xEE,0x8C,0x0A, +0xE3,0xA9,0x66,0x89,0x19,0xCB,0x41,0xF9,0xDD,0x44,0x36,0x61,0xCF,0xE2,0x77,0x46, +0xC8,0x7D,0xF6,0xF4,0x92,0x81,0x36,0xFD,0xDB,0x34,0xF1,0x72,0x7E,0xF3,0x0C,0x16, +0xBD,0xB4,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03, +0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x07,0x1F,0xD2,0xE7,0x9C,0xDA,0xC2,0x6E,0xA2, +0x40,0xB4,0xB0,0x7A,0x50,0x10,0x50,0x74,0xC4,0xC8,0xBD,0x30,0x0F,0x06,0x03,0x55, +0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03, +0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09, +0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00, +0x89,0x57,0xB2,0x16,0x7A,0xA8,0xC2,0xFD,0xD6,0xD9,0x9B,0x9B,0x34,0xC2,0x9C,0xB4, +0x32,0x14,0x4D,0xA7,0xA4,0xDF,0xEC,0xBE,0xA7,0xBE,0xF8,0x43,0xDB,0x91,0x37,0xCE, +0xB4,0x32,0x2E,0x50,0x55,0x1A,0x35,0x4E,0x76,0x43,0x71,0x20,0xEF,0x93,0x77,0x4E, +0x15,0x70,0x2E,0x87,0xC3,0xC1,0x1D,0x6D,0xDC,0xCB,0xB5,0x27,0xD4,0x2C,0x56,0xD1, +0x52,0x53,0x3A,0x44,0xD2,0x73,0xC8,0xC4,0x1B,0x05,0x65,0x5A,0x62,0x92,0x9C,0xEE, +0x41,0x8D,0x31,0xDB,0xE7,0x34,0xEA,0x59,0x21,0xD5,0x01,0x7A,0xD7,0x64,0xB8,0x64, +0x39,0xCD,0xC9,0xED,0xAF,0xED,0x4B,0x03,0x48,0xA7,0xA0,0x99,0x01,0x80,0xDC,0x65, +0xA3,0x36,0xAE,0x65,0x59,0x48,0x4F,0x82,0x4B,0xC8,0x65,0xF1,0x57,0x1D,0xE5,0x59, +0x2E,0x0A,0x3F,0x6C,0xD8,0xD1,0xF5,0xE5,0x09,0xB4,0x6C,0x54,0x00,0x0A,0xE0,0x15, +0x4D,0x87,0x75,0x6D,0xB7,0x58,0x96,0x5A,0xDD,0x6D,0xD2,0x00,0xA0,0xF4,0x9B,0x48, +0xBE,0xC3,0x37,0xA4,0xBA,0x36,0xE0,0x7C,0x87,0x85,0x97,0x1A,0x15,0xA2,0xDE,0x2E, +0xA2,0x5B,0xBD,0xAF,0x18,0xF9,0x90,0x50,0xCD,0x70,0x59,0xF8,0x27,0x67,0x47,0xCB, +0xC7,0xA0,0x07,0x3A,0x7D,0xD1,0x2C,0x5D,0x6C,0x19,0x3A,0x66,0xB5,0x7D,0xFD,0x91, +0x6F,0x82,0xB1,0xBE,0x08,0x93,0xDB,0x14,0x47,0xF1,0xA2,0x37,0xC7,0x45,0x9E,0x3C, +0xC7,0x77,0xAF,0x64,0xA8,0x93,0xDF,0xF6,0x69,0x83,0x82,0x60,0xF2,0x49,0x42,0x34, +0xED,0x5A,0x00,0x54,0x85,0x1C,0x16,0x36,0x92,0x0C,0x5C,0xFA,0xA6,0xAD,0xBF,0xDB, }; -/* subject:/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */ -/* issuer :/C=US/O=thawte, Inc./OU=(c) 2007 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G2 */ +/* subject:/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */ +/* issuer :/C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA */ -const unsigned char thawte_Primary_Root_CA___G2_certificate[652]={ -0x30,0x82,0x02,0x88,0x30,0x82,0x02,0x0D,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x35, -0xFC,0x26,0x5C,0xD9,0x84,0x4F,0xC9,0x3D,0x26,0x3D,0x57,0x9B,0xAE,0xD7,0x56,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x84,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E, -0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29, -0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E, -0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69, -0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22, -0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72, -0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20, -0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35,0x30,0x30,0x30,0x30, -0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39,0x35, -0x39,0x5A,0x30,0x81,0x84,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, -0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61, -0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x38,0x30,0x36,0x06,0x03,0x55, -0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x74,0x68,0x61, -0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20, -0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F, -0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x74,0x68, -0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52,0x6F,0x6F, -0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A, -0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00, -0x04,0xA2,0xD5,0x9C,0x82,0x7B,0x95,0x9D,0xF1,0x52,0x78,0x87,0xFE,0x8A,0x16,0xBF, -0x05,0xE6,0xDF,0xA3,0x02,0x4F,0x0D,0x07,0xC6,0x00,0x51,0xBA,0x0C,0x02,0x52,0x2D, -0x22,0xA4,0x42,0x39,0xC4,0xFE,0x8F,0xEA,0xC9,0xC1,0xBE,0xD4,0x4D,0xFF,0x9F,0x7A, -0x9E,0xE2,0xB1,0x7C,0x9A,0xAD,0xA7,0x86,0x09,0x73,0x87,0xD1,0xE7,0x9A,0xE3,0x7A, -0xA5,0xAA,0x6E,0xFB,0xBA,0xB3,0x70,0xC0,0x67,0x88,0xA2,0x35,0xD4,0xA3,0x9A,0xB1, -0xFD,0xAD,0xC2,0xEF,0x31,0xFA,0xA8,0xB9,0xF3,0xFB,0x08,0xC6,0x91,0xD1,0xFB,0x29, -0x95,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, -0x14,0x9A,0xD8,0x00,0x30,0x00,0xE7,0x6B,0x7F,0x85,0x18,0xEE,0x8B,0xB6,0xCE,0x8A, -0x0C,0xF8,0x11,0xE1,0xBB,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03, -0x03,0x03,0x69,0x00,0x30,0x66,0x02,0x31,0x00,0xDD,0xF8,0xE0,0x57,0x47,0x5B,0xA7, -0xE6,0x0A,0xC3,0xBD,0xF5,0x80,0x8A,0x97,0x35,0x0D,0x1B,0x89,0x3C,0x54,0x86,0x77, -0x28,0xCA,0xA1,0xF4,0x79,0xDE,0xB5,0xE6,0x38,0xB0,0xF0,0x65,0x70,0x8C,0x7F,0x02, -0x54,0xC2,0xBF,0xFF,0xD8,0xA1,0x3E,0xD9,0xCF,0x02,0x31,0x00,0xC4,0x8D,0x94,0xFC, -0xDC,0x53,0xD2,0xDC,0x9D,0x78,0x16,0x1F,0x15,0x33,0x23,0x53,0x52,0xE3,0x5A,0x31, -0x5D,0x9D,0xCA,0xAE,0xBD,0x13,0x29,0x44,0x0D,0x27,0x5B,0xA8,0xE7,0x68,0x9C,0x12, -0xF7,0x58,0x3F,0x2E,0x72,0x02,0x57,0xA3,0x8F,0xA1,0x14,0x2E, +const unsigned char GlobalSign_Root_CA_certificate[889]={ +0x30,0x82,0x03,0x75,0x30,0x82,0x02,0x5D,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, +0x00,0x00,0x00,0x00,0x01,0x15,0x4B,0x5A,0xC3,0x94,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x57,0x31,0x0B,0x30,0x09,0x06, +0x03,0x55,0x04,0x06,0x13,0x02,0x42,0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, +0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76, +0x2D,0x73,0x61,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F, +0x6F,0x74,0x20,0x43,0x41,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12, +0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20, +0x43,0x41,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x39,0x30,0x31,0x31,0x32,0x30,0x30, +0x30,0x30,0x5A,0x17,0x0D,0x32,0x38,0x30,0x31,0x32,0x38,0x31,0x32,0x30,0x30,0x30, +0x30,0x5A,0x30,0x57,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x42, +0x45,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0A,0x13,0x10,0x47,0x6C,0x6F,0x62, +0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x6E,0x76,0x2D,0x73,0x61,0x31,0x10,0x30,0x0E, +0x06,0x03,0x55,0x04,0x0B,0x13,0x07,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x31,0x1B, +0x30,0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, +0x69,0x67,0x6E,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82, +0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0x0E,0xE6,0x99, +0x8D,0xCE,0xA3,0xE3,0x4F,0x8A,0x7E,0xFB,0xF1,0x8B,0x83,0x25,0x6B,0xEA,0x48,0x1F, +0xF1,0x2A,0xB0,0xB9,0x95,0x11,0x04,0xBD,0xF0,0x63,0xD1,0xE2,0x67,0x66,0xCF,0x1C, +0xDD,0xCF,0x1B,0x48,0x2B,0xEE,0x8D,0x89,0x8E,0x9A,0xAF,0x29,0x80,0x65,0xAB,0xE9, +0xC7,0x2D,0x12,0xCB,0xAB,0x1C,0x4C,0x70,0x07,0xA1,0x3D,0x0A,0x30,0xCD,0x15,0x8D, +0x4F,0xF8,0xDD,0xD4,0x8C,0x50,0x15,0x1C,0xEF,0x50,0xEE,0xC4,0x2E,0xF7,0xFC,0xE9, +0x52,0xF2,0x91,0x7D,0xE0,0x6D,0xD5,0x35,0x30,0x8E,0x5E,0x43,0x73,0xF2,0x41,0xE9, +0xD5,0x6A,0xE3,0xB2,0x89,0x3A,0x56,0x39,0x38,0x6F,0x06,0x3C,0x88,0x69,0x5B,0x2A, +0x4D,0xC5,0xA7,0x54,0xB8,0x6C,0x89,0xCC,0x9B,0xF9,0x3C,0xCA,0xE5,0xFD,0x89,0xF5, +0x12,0x3C,0x92,0x78,0x96,0xD6,0xDC,0x74,0x6E,0x93,0x44,0x61,0xD1,0x8D,0xC7,0x46, +0xB2,0x75,0x0E,0x86,0xE8,0x19,0x8A,0xD5,0x6D,0x6C,0xD5,0x78,0x16,0x95,0xA2,0xE9, +0xC8,0x0A,0x38,0xEB,0xF2,0x24,0x13,0x4F,0x73,0x54,0x93,0x13,0x85,0x3A,0x1B,0xBC, +0x1E,0x34,0xB5,0x8B,0x05,0x8C,0xB9,0x77,0x8B,0xB1,0xDB,0x1F,0x20,0x91,0xAB,0x09, +0x53,0x6E,0x90,0xCE,0x7B,0x37,0x74,0xB9,0x70,0x47,0x91,0x22,0x51,0x63,0x16,0x79, +0xAE,0xB1,0xAE,0x41,0x26,0x08,0xC8,0x19,0x2B,0xD1,0x46,0xAA,0x48,0xD6,0x64,0x2A, +0xD7,0x83,0x34,0xFF,0x2C,0x2A,0xC1,0x6C,0x19,0x43,0x4A,0x07,0x85,0xE7,0xD3,0x7C, +0xF6,0x21,0x68,0xEF,0xEA,0xF2,0x52,0x9F,0x7F,0x93,0x90,0xCF,0x02,0x03,0x01,0x00, +0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04, +0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, +0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, +0x14,0x60,0x7B,0x66,0x1A,0x45,0x0D,0x97,0xCA,0x89,0x50,0x2F,0x7D,0x04,0xCD,0x34, +0xA8,0xFF,0xFC,0xFD,0x4B,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, +0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xD6,0x73,0xE7,0x7C,0x4F,0x76,0xD0, +0x8D,0xBF,0xEC,0xBA,0xA2,0xBE,0x34,0xC5,0x28,0x32,0xB5,0x7C,0xFC,0x6C,0x9C,0x2C, +0x2B,0xBD,0x09,0x9E,0x53,0xBF,0x6B,0x5E,0xAA,0x11,0x48,0xB6,0xE5,0x08,0xA3,0xB3, +0xCA,0x3D,0x61,0x4D,0xD3,0x46,0x09,0xB3,0x3E,0xC3,0xA0,0xE3,0x63,0x55,0x1B,0xF2, +0xBA,0xEF,0xAD,0x39,0xE1,0x43,0xB9,0x38,0xA3,0xE6,0x2F,0x8A,0x26,0x3B,0xEF,0xA0, +0x50,0x56,0xF9,0xC6,0x0A,0xFD,0x38,0xCD,0xC4,0x0B,0x70,0x51,0x94,0x97,0x98,0x04, +0xDF,0xC3,0x5F,0x94,0xD5,0x15,0xC9,0x14,0x41,0x9C,0xC4,0x5D,0x75,0x64,0x15,0x0D, +0xFF,0x55,0x30,0xEC,0x86,0x8F,0xFF,0x0D,0xEF,0x2C,0xB9,0x63,0x46,0xF6,0xAA,0xFC, +0xDF,0xBC,0x69,0xFD,0x2E,0x12,0x48,0x64,0x9A,0xE0,0x95,0xF0,0xA6,0xEF,0x29,0x8F, +0x01,0xB1,0x15,0xB5,0x0C,0x1D,0xA5,0xFE,0x69,0x2C,0x69,0x24,0x78,0x1E,0xB3,0xA7, +0x1C,0x71,0x62,0xEE,0xCA,0xC8,0x97,0xAC,0x17,0x5D,0x8A,0xC2,0xF8,0x47,0x86,0x6E, +0x2A,0xC4,0x56,0x31,0x95,0xD0,0x67,0x89,0x85,0x2B,0xF9,0x6C,0xA6,0x5D,0x46,0x9D, +0x0C,0xAA,0x82,0xE4,0x99,0x51,0xDD,0x70,0xB7,0xDB,0x56,0x3D,0x61,0xE4,0x6A,0xE1, +0x5C,0xD6,0xF6,0xFE,0x3D,0xDE,0x41,0xCC,0x07,0xAE,0x63,0x52,0xBF,0x53,0x53,0xF4, +0x2B,0xE9,0xC7,0xFD,0xB6,0xF7,0x82,0x5F,0x85,0xD2,0x41,0x18,0xDB,0x81,0xB3,0x04, +0x1C,0xC5,0x1F,0xA4,0x80,0x6F,0x15,0x20,0xC9,0xDE,0x0C,0x88,0x0A,0x1D,0xD6,0x66, +0x55,0xE2,0xFC,0x48,0xC9,0x29,0x26,0x69,0xE0, }; -/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */ -/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2008 VeriSign, Inc. - For authorized use only/CN=VeriSign Universal Root Certification Authority */ - - -const unsigned char VeriSign_Universal_Root_Certification_Authority_certificate[1213]={ -0x30,0x82,0x04,0xB9,0x30,0x82,0x03,0xA1,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x40, -0x1A,0xC4,0x64,0x21,0xB3,0x13,0x21,0x03,0x0E,0xBB,0xE4,0x12,0x1A,0xC5,0x1D,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81, -0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17, -0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67, -0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B, -0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74, -0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04, -0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69, -0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72, -0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20, -0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56, -0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61, -0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, -0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E, -0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17, -0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81, -0xBD,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17, -0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67, -0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B, -0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74, -0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04, -0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x56,0x65,0x72,0x69, -0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72, -0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20, -0x6F,0x6E,0x6C,0x79,0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x03,0x13,0x2F,0x56, -0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61, -0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, +/* subject:/C=US/O=Google Trust Services LLC/CN=GTS Root R3 */ +/* issuer :/C=US/O=Google Trust Services LLC/CN=GTS Root R3 */ + + +const unsigned char GTS_Root_R3_certificate[528]={ +0x30,0x82,0x02,0x0C,0x30,0x82,0x01,0x91,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x6E, +0x47,0xA9,0xC7,0x6C,0xA9,0x73,0x24,0x40,0x89,0x0F,0x03,0x55,0xDD,0x8D,0x1D,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x47,0x31,0x0B,0x30, +0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x22,0x30,0x20,0x06,0x03, +0x55,0x04,0x0A,0x13,0x19,0x47,0x6F,0x6F,0x67,0x6C,0x65,0x20,0x54,0x72,0x75,0x73, +0x74,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x4C,0x4C,0x43,0x31,0x14, +0x30,0x12,0x06,0x03,0x55,0x04,0x03,0x13,0x0B,0x47,0x54,0x53,0x20,0x52,0x6F,0x6F, +0x74,0x20,0x52,0x33,0x30,0x1E,0x17,0x0D,0x31,0x36,0x30,0x36,0x32,0x32,0x30,0x30, +0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x36,0x32,0x32,0x30,0x30,0x30, +0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, +0x02,0x55,0x53,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0A,0x13,0x19,0x47,0x6F, +0x6F,0x67,0x6C,0x65,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x53,0x65,0x72,0x76,0x69, +0x63,0x65,0x73,0x20,0x4C,0x4C,0x43,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x03, +0x13,0x0B,0x47,0x54,0x53,0x20,0x52,0x6F,0x6F,0x74,0x20,0x52,0x33,0x30,0x76,0x30, +0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00, +0x22,0x03,0x62,0x00,0x04,0x1F,0x4F,0x33,0x87,0x33,0x29,0x8A,0xA1,0x84,0xDE,0xCB, +0xC7,0x21,0x58,0x41,0x89,0xEA,0x56,0x9D,0x2B,0x4B,0x85,0xC6,0x1D,0x4C,0x27,0xBC, +0x7F,0x26,0x51,0x72,0x6F,0xE2,0x9F,0xD6,0xA3,0xCA,0xCC,0x45,0x14,0x46,0x8B,0xAD, +0xEF,0x7E,0x86,0x8C,0xEC,0xB1,0x7E,0x2F,0xFF,0xA9,0x71,0x9D,0x18,0x84,0x45,0x04, +0x41,0x55,0x6E,0x2B,0xEA,0x26,0x7F,0xBB,0x90,0x01,0xE3,0x4B,0x19,0xBA,0xE4,0x54, +0x96,0x45,0x09,0xB1,0xD5,0x6C,0x91,0x44,0xAD,0x84,0x13,0x8E,0x9A,0x8C,0x0D,0x80, +0x0C,0x32,0xF6,0xE0,0x27,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F, +0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13, +0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D, +0x0E,0x04,0x16,0x04,0x14,0xC1,0xF1,0x26,0xBA,0xA0,0x2D,0xAE,0x85,0x81,0xCF,0xD3, +0xF1,0x2A,0x12,0xBD,0xB8,0x0A,0x67,0xFD,0xBC,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48, +0xCE,0x3D,0x04,0x03,0x03,0x03,0x69,0x00,0x30,0x66,0x02,0x31,0x00,0x80,0x5B,0xA4, +0x7C,0x23,0xC0,0x95,0xA5,0x2C,0xDC,0xBE,0x89,0x6F,0x23,0xB9,0xA3,0xDD,0x65,0x00, +0x52,0x5E,0x91,0xAC,0xC8,0x9D,0x72,0x74,0x82,0x53,0x0B,0x7D,0xA9,0x40,0xBD,0x68, +0x60,0xC5,0xE1,0xB8,0x54,0x3B,0xC1,0x36,0x17,0x25,0xD8,0xC1,0xBD,0x02,0x31,0x00, +0x9E,0x35,0x92,0x74,0x85,0x25,0x51,0xF5,0x24,0xEC,0x64,0x52,0x24,0x50,0xA5,0x1F, +0xDB,0xE8,0xCB,0xC9,0x76,0xEC,0xEC,0x82,0x6E,0xF5,0x85,0x18,0x53,0xE8,0xB8,0xE3, +0x9A,0x29,0xAA,0x96,0xD3,0x83,0x23,0xC9,0xA4,0x7B,0x61,0xB3,0xCC,0x02,0xE8,0x5D, +}; + + +/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Certification Authority */ +/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Certification Authority */ + + +const unsigned char COMODO_RSA_Certification_Authority_certificate[1500]={ +0x30,0x82,0x05,0xD8,0x30,0x82,0x03,0xC0,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x4C, +0xAA,0xF9,0xCA,0xDB,0x63,0x6F,0xE0,0x1F,0xF7,0x4E,0xD8,0x5B,0x03,0x86,0x9D,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x81, +0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, +0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, +0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, +0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, +0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43, +0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55, +0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x52,0x53,0x41,0x20,0x43, +0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, +0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x31,0x31,0x39, +0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32, +0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, +0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13, +0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73, +0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61, +0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11, +0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65, +0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F, +0x44,0x4F,0x20,0x52,0x53,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, 0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82, -0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05, -0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC7, -0x61,0x37,0x5E,0xB1,0x01,0x34,0xDB,0x62,0xD7,0x15,0x9B,0xFF,0x58,0x5A,0x8C,0x23, -0x23,0xD6,0x60,0x8E,0x91,0xD7,0x90,0x98,0x83,0x7A,0xE6,0x58,0x19,0x38,0x8C,0xC5, -0xF6,0xE5,0x64,0x85,0xB4,0xA2,0x71,0xFB,0xED,0xBD,0xB9,0xDA,0xCD,0x4D,0x00,0xB4, -0xC8,0x2D,0x73,0xA5,0xC7,0x69,0x71,0x95,0x1F,0x39,0x3C,0xB2,0x44,0x07,0x9C,0xE8, -0x0E,0xFA,0x4D,0x4A,0xC4,0x21,0xDF,0x29,0x61,0x8F,0x32,0x22,0x61,0x82,0xC5,0x87, -0x1F,0x6E,0x8C,0x7C,0x5F,0x16,0x20,0x51,0x44,0xD1,0x70,0x4F,0x57,0xEA,0xE3,0x1C, -0xE3,0xCC,0x79,0xEE,0x58,0xD8,0x0E,0xC2,0xB3,0x45,0x93,0xC0,0x2C,0xE7,0x9A,0x17, -0x2B,0x7B,0x00,0x37,0x7A,0x41,0x33,0x78,0xE1,0x33,0xE2,0xF3,0x10,0x1A,0x7F,0x87, -0x2C,0xBE,0xF6,0xF5,0xF7,0x42,0xE2,0xE5,0xBF,0x87,0x62,0x89,0x5F,0x00,0x4B,0xDF, -0xC5,0xDD,0xE4,0x75,0x44,0x32,0x41,0x3A,0x1E,0x71,0x6E,0x69,0xCB,0x0B,0x75,0x46, -0x08,0xD1,0xCA,0xD2,0x2B,0x95,0xD0,0xCF,0xFB,0xB9,0x40,0x6B,0x64,0x8C,0x57,0x4D, -0xFC,0x13,0x11,0x79,0x84,0xED,0x5E,0x54,0xF6,0x34,0x9F,0x08,0x01,0xF3,0x10,0x25, -0x06,0x17,0x4A,0xDA,0xF1,0x1D,0x7A,0x66,0x6B,0x98,0x60,0x66,0xA4,0xD9,0xEF,0xD2, -0x2E,0x82,0xF1,0xF0,0xEF,0x09,0xEA,0x44,0xC9,0x15,0x6A,0xE2,0x03,0x6E,0x33,0xD3, -0xAC,0x9F,0x55,0x00,0xC7,0xF6,0x08,0x6A,0x94,0xB9,0x5F,0xDC,0xE0,0x33,0xF1,0x84, -0x60,0xF9,0x5B,0x27,0x11,0xB4,0xFC,0x16,0xF2,0xBB,0x56,0x6A,0x80,0x25,0x8D,0x02, -0x03,0x01,0x00,0x01,0xA3,0x81,0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D, -0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55, -0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B, -0x06,0x01,0x05,0x05,0x07,0x01,0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30, -0x59,0x30,0x57,0x30,0x55,0x16,0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66, -0x30,0x21,0x30,0x1F,0x30,0x07,0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F, -0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C, -0x7B,0x19,0x2E,0x30,0x25,0x16,0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F, -0x67,0x6F,0x2E,0x76,0x65,0x72,0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F, -0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D, -0x0E,0x04,0x16,0x04,0x14,0xB6,0x77,0xFA,0x69,0x48,0x47,0x9F,0x53,0x12,0xD5,0xC2, -0xEA,0x07,0x32,0x76,0x07,0xD1,0x97,0x07,0x19,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48, -0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x4A,0xF8,0xF8, -0xB0,0x03,0xE6,0x2C,0x67,0x7B,0xE4,0x94,0x77,0x63,0xCC,0x6E,0x4C,0xF9,0x7D,0x0E, -0x0D,0xDC,0xC8,0xB9,0x35,0xB9,0x70,0x4F,0x63,0xFA,0x24,0xFA,0x6C,0x83,0x8C,0x47, -0x9D,0x3B,0x63,0xF3,0x9A,0xF9,0x76,0x32,0x95,0x91,0xB1,0x77,0xBC,0xAC,0x9A,0xBE, -0xB1,0xE4,0x31,0x21,0xC6,0x81,0x95,0x56,0x5A,0x0E,0xB1,0xC2,0xD4,0xB1,0xA6,0x59, -0xAC,0xF1,0x63,0xCB,0xB8,0x4C,0x1D,0x59,0x90,0x4A,0xEF,0x90,0x16,0x28,0x1F,0x5A, -0xAE,0x10,0xFB,0x81,0x50,0x38,0x0C,0x6C,0xCC,0xF1,0x3D,0xC3,0xF5,0x63,0xE3,0xB3, -0xE3,0x21,0xC9,0x24,0x39,0xE9,0xFD,0x15,0x66,0x46,0xF4,0x1B,0x11,0xD0,0x4D,0x73, -0xA3,0x7D,0x46,0xF9,0x3D,0xED,0xA8,0x5F,0x62,0xD4,0xF1,0x3F,0xF8,0xE0,0x74,0x57, -0x2B,0x18,0x9D,0x81,0xB4,0xC4,0x28,0xDA,0x94,0x97,0xA5,0x70,0xEB,0xAC,0x1D,0xBE, -0x07,0x11,0xF0,0xD5,0xDB,0xDD,0xE5,0x8C,0xF0,0xD5,0x32,0xB0,0x83,0xE6,0x57,0xE2, -0x8F,0xBF,0xBE,0xA1,0xAA,0xBF,0x3D,0x1D,0xB5,0xD4,0x38,0xEA,0xD7,0xB0,0x5C,0x3A, -0x4F,0x6A,0x3F,0x8F,0xC0,0x66,0x6C,0x63,0xAA,0xE9,0xD9,0xA4,0x16,0xF4,0x81,0xD1, -0x95,0x14,0x0E,0x7D,0xCD,0x95,0x34,0xD9,0xD2,0x8F,0x70,0x73,0x81,0x7B,0x9C,0x7E, -0xBD,0x98,0x61,0xD8,0x45,0x87,0x98,0x90,0xC5,0xEB,0x86,0x30,0xC6,0x35,0xBF,0xF0, -0xFF,0xC3,0x55,0x88,0x83,0x4B,0xEF,0x05,0x92,0x06,0x71,0xF2,0xB8,0x98,0x93,0xB7, -0xEC,0xCD,0x82,0x61,0xF1,0x38,0xE6,0x4F,0x97,0x98,0x2A,0x5A,0x8D, +0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05, +0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0x91, +0xE8,0x54,0x92,0xD2,0x0A,0x56,0xB1,0xAC,0x0D,0x24,0xDD,0xC5,0xCF,0x44,0x67,0x74, +0x99,0x2B,0x37,0xA3,0x7D,0x23,0x70,0x00,0x71,0xBC,0x53,0xDF,0xC4,0xFA,0x2A,0x12, +0x8F,0x4B,0x7F,0x10,0x56,0xBD,0x9F,0x70,0x72,0xB7,0x61,0x7F,0xC9,0x4B,0x0F,0x17, +0xA7,0x3D,0xE3,0xB0,0x04,0x61,0xEE,0xFF,0x11,0x97,0xC7,0xF4,0x86,0x3E,0x0A,0xFA, +0x3E,0x5C,0xF9,0x93,0xE6,0x34,0x7A,0xD9,0x14,0x6B,0xE7,0x9C,0xB3,0x85,0xA0,0x82, +0x7A,0x76,0xAF,0x71,0x90,0xD7,0xEC,0xFD,0x0D,0xFA,0x9C,0x6C,0xFA,0xDF,0xB0,0x82, +0xF4,0x14,0x7E,0xF9,0xBE,0xC4,0xA6,0x2F,0x4F,0x7F,0x99,0x7F,0xB5,0xFC,0x67,0x43, +0x72,0xBD,0x0C,0x00,0xD6,0x89,0xEB,0x6B,0x2C,0xD3,0xED,0x8F,0x98,0x1C,0x14,0xAB, +0x7E,0xE5,0xE3,0x6E,0xFC,0xD8,0xA8,0xE4,0x92,0x24,0xDA,0x43,0x6B,0x62,0xB8,0x55, +0xFD,0xEA,0xC1,0xBC,0x6C,0xB6,0x8B,0xF3,0x0E,0x8D,0x9A,0xE4,0x9B,0x6C,0x69,0x99, +0xF8,0x78,0x48,0x30,0x45,0xD5,0xAD,0xE1,0x0D,0x3C,0x45,0x60,0xFC,0x32,0x96,0x51, +0x27,0xBC,0x67,0xC3,0xCA,0x2E,0xB6,0x6B,0xEA,0x46,0xC7,0xC7,0x20,0xA0,0xB1,0x1F, +0x65,0xDE,0x48,0x08,0xBA,0xA4,0x4E,0xA9,0xF2,0x83,0x46,0x37,0x84,0xEB,0xE8,0xCC, +0x81,0x48,0x43,0x67,0x4E,0x72,0x2A,0x9B,0x5C,0xBD,0x4C,0x1B,0x28,0x8A,0x5C,0x22, +0x7B,0xB4,0xAB,0x98,0xD9,0xEE,0xE0,0x51,0x83,0xC3,0x09,0x46,0x4E,0x6D,0x3E,0x99, +0xFA,0x95,0x17,0xDA,0x7C,0x33,0x57,0x41,0x3C,0x8D,0x51,0xED,0x0B,0xB6,0x5C,0xAF, +0x2C,0x63,0x1A,0xDF,0x57,0xC8,0x3F,0xBC,0xE9,0x5D,0xC4,0x9B,0xAF,0x45,0x99,0xE2, +0xA3,0x5A,0x24,0xB4,0xBA,0xA9,0x56,0x3D,0xCF,0x6F,0xAA,0xFF,0x49,0x58,0xBE,0xF0, +0xA8,0xFF,0xF4,0xB8,0xAD,0xE9,0x37,0xFB,0xBA,0xB8,0xF4,0x0B,0x3A,0xF9,0xE8,0x43, +0x42,0x1E,0x89,0xD8,0x84,0xCB,0x13,0xF1,0xD9,0xBB,0xE1,0x89,0x60,0xB8,0x8C,0x28, +0x56,0xAC,0x14,0x1D,0x9C,0x0A,0xE7,0x71,0xEB,0xCF,0x0E,0xDD,0x3D,0xA9,0x96,0xA1, +0x48,0xBD,0x3C,0xF7,0xAF,0xB5,0x0D,0x22,0x4C,0xC0,0x11,0x81,0xEC,0x56,0x3B,0xF6, +0xD3,0xA2,0xE2,0x5B,0xB7,0xB2,0x04,0x22,0x52,0x95,0x80,0x93,0x69,0xE8,0x8E,0x4C, +0x65,0xF1,0x91,0x03,0x2D,0x70,0x74,0x02,0xEA,0x8B,0x67,0x15,0x29,0x69,0x52,0x02, +0xBB,0xD7,0xDF,0x50,0x6A,0x55,0x46,0xBF,0xA0,0xA3,0x28,0x61,0x7F,0x70,0xD0,0xC3, +0xA2,0xAA,0x2C,0x21,0xAA,0x47,0xCE,0x28,0x9C,0x06,0x45,0x76,0xBF,0x82,0x18,0x27, +0xB4,0xD5,0xAE,0xB4,0xCB,0x50,0xE6,0x6B,0xF4,0x4C,0x86,0x71,0x30,0xE9,0xA6,0xDF, +0x16,0x86,0xE0,0xD8,0xFF,0x40,0xDD,0xFB,0xD0,0x42,0x88,0x7F,0xA3,0x33,0x3A,0x2E, +0x5C,0x1E,0x41,0x11,0x81,0x63,0xCE,0x18,0x71,0x6B,0x2B,0xEC,0xA6,0x8A,0xB7,0x31, +0x5C,0x3A,0x6A,0x47,0xE0,0xC3,0x79,0x59,0xD6,0x20,0x1A,0xAF,0xF2,0x6A,0x98,0xAA, +0x72,0xBC,0x57,0x4A,0xD2,0x4B,0x9D,0xBB,0x10,0xFC,0xB0,0x4C,0x41,0xE5,0xED,0x1D, +0x3D,0x5E,0x28,0x9D,0x9C,0xCC,0xBF,0xB3,0x51,0xDA,0xA7,0x47,0xE5,0x84,0x53,0x02, +0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04, +0x16,0x04,0x14,0xBB,0xAF,0x7E,0x02,0x3D,0xFA,0xA6,0xF1,0x3C,0x84,0x8E,0xAD,0xEE, +0x38,0x98,0xEC,0xD9,0x32,0x32,0xD4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01, +0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01, +0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, +0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x0A,0xF1,0xD5,0x46, +0x84,0xB7,0xAE,0x51,0xBB,0x6C,0xB2,0x4D,0x41,0x14,0x00,0x93,0x4C,0x9C,0xCB,0xE5, +0xC0,0x54,0xCF,0xA0,0x25,0x8E,0x02,0xF9,0xFD,0xB0,0xA2,0x0D,0xF5,0x20,0x98,0x3C, +0x13,0x2D,0xAC,0x56,0xA2,0xB0,0xD6,0x7E,0x11,0x92,0xE9,0x2E,0xBA,0x9E,0x2E,0x9A, +0x72,0xB1,0xBD,0x19,0x44,0x6C,0x61,0x35,0xA2,0x9A,0xB4,0x16,0x12,0x69,0x5A,0x8C, +0xE1,0xD7,0x3E,0xA4,0x1A,0xE8,0x2F,0x03,0xF4,0xAE,0x61,0x1D,0x10,0x1B,0x2A,0xA4, +0x8B,0x7A,0xC5,0xFE,0x05,0xA6,0xE1,0xC0,0xD6,0xC8,0xFE,0x9E,0xAE,0x8F,0x2B,0xBA, +0x3D,0x99,0xF8,0xD8,0x73,0x09,0x58,0x46,0x6E,0xA6,0x9C,0xF4,0xD7,0x27,0xD3,0x95, +0xDA,0x37,0x83,0x72,0x1C,0xD3,0x73,0xE0,0xA2,0x47,0x99,0x03,0x38,0x5D,0xD5,0x49, +0x79,0x00,0x29,0x1C,0xC7,0xEC,0x9B,0x20,0x1C,0x07,0x24,0x69,0x57,0x78,0xB2,0x39, +0xFC,0x3A,0x84,0xA0,0xB5,0x9C,0x7C,0x8D,0xBF,0x2E,0x93,0x62,0x27,0xB7,0x39,0xDA, +0x17,0x18,0xAE,0xBD,0x3C,0x09,0x68,0xFF,0x84,0x9B,0x3C,0xD5,0xD6,0x0B,0x03,0xE3, +0x57,0x9E,0x14,0xF7,0xD1,0xEB,0x4F,0xC8,0xBD,0x87,0x23,0xB7,0xB6,0x49,0x43,0x79, +0x85,0x5C,0xBA,0xEB,0x92,0x0B,0xA1,0xC6,0xE8,0x68,0xA8,0x4C,0x16,0xB1,0x1A,0x99, +0x0A,0xE8,0x53,0x2C,0x92,0xBB,0xA1,0x09,0x18,0x75,0x0C,0x65,0xA8,0x7B,0xCB,0x23, +0xB7,0x1A,0xC2,0x28,0x85,0xC3,0x1B,0xFF,0xD0,0x2B,0x62,0xEF,0xA4,0x7B,0x09,0x91, +0x98,0x67,0x8C,0x14,0x01,0xCD,0x68,0x06,0x6A,0x63,0x21,0x75,0x03,0x80,0x88,0x8A, +0x6E,0x81,0xC6,0x85,0xF2,0xA9,0xA4,0x2D,0xE7,0xF4,0xA5,0x24,0x10,0x47,0x83,0xCA, +0xCD,0xF4,0x8D,0x79,0x58,0xB1,0x06,0x9B,0xE7,0x1A,0x2A,0xD9,0x9D,0x01,0xD7,0x94, +0x7D,0xED,0x03,0x4A,0xCA,0xF0,0xDB,0xE8,0xA9,0x01,0x3E,0xF5,0x56,0x99,0xC9,0x1E, +0x8E,0x49,0x3D,0xBB,0xE5,0x09,0xB9,0xE0,0x4F,0x49,0x92,0x3D,0x16,0x82,0x40,0xCC, +0xCC,0x59,0xC6,0xE6,0x3A,0xED,0x12,0x2E,0x69,0x3C,0x6C,0x95,0xB1,0xFD,0xAA,0x1D, +0x7B,0x7F,0x86,0xBE,0x1E,0x0E,0x32,0x46,0xFB,0xFB,0x13,0x8F,0x75,0x7F,0x4C,0x8B, +0x4B,0x46,0x63,0xFE,0x00,0x34,0x40,0x70,0xC1,0xC3,0xB9,0xA1,0xDD,0xA6,0x70,0xE2, +0x04,0xB3,0x41,0xBC,0xE9,0x80,0x91,0xEA,0x64,0x9C,0x7A,0xE1,0x22,0x03,0xA9,0x9C, +0x6E,0x6F,0x0E,0x65,0x4F,0x6C,0x87,0x87,0x5E,0xF3,0x6E,0xA0,0xF9,0x75,0xA5,0x9B, +0x40,0xE8,0x53,0xB2,0x27,0x9D,0x4A,0xB9,0xC0,0x77,0x21,0x8D,0xFF,0x87,0xF2,0xDE, +0xBC,0x8C,0xEF,0x17,0xDF,0xB7,0x49,0x0B,0xD1,0xF2,0x6E,0x30,0x0B,0x1A,0x0E,0x4E, +0x76,0xED,0x11,0xFC,0xF5,0xE9,0x56,0xB2,0x7D,0xBF,0xC7,0x6D,0x0A,0x93,0x8C,0xA5, +0xD0,0xC0,0xB6,0x1D,0xBE,0x3A,0x4E,0x94,0xA2,0xD7,0x6E,0x6C,0x0B,0xC2,0x8A,0x7C, +0xFA,0x20,0xF3,0xC4,0xE4,0xE5,0xCD,0x0D,0xA8,0xCB,0x91,0x92,0xB1,0x7C,0x85,0xEC, +0xB5,0x14,0x69,0x66,0x0E,0x82,0xE7,0xCD,0xCE,0xC8,0x2D,0xA6,0x51,0x7F,0x21,0xC1, +0x35,0x53,0x85,0x06,0x4A,0x5D,0x9F,0xAD,0xBB,0x1B,0x5F,0x74, }; -/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */ -/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2007 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G4 */ - - -const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate[904]={ -0x30,0x82,0x03,0x84,0x30,0x82,0x03,0x0A,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x2F, -0x80,0xFE,0x23,0x8C,0x0E,0x22,0x0F,0x48,0x67,0x12,0x28,0x91,0x87,0xAC,0xB3,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0xCA,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20, -0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56, -0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65, -0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31, -0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67, -0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75, -0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C, -0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69, -0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62, -0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72, -0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x34,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31, -0x30,0x35,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31, -0x38,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06, -0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04, -0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63, -0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69, -0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F, -0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29, -0x20,0x32,0x30,0x30,0x37,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20, -0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F, -0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45, -0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67, -0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62,0x6C,0x69,0x63, -0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69, -0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79, -0x20,0x2D,0x20,0x47,0x34,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D, -0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xA7,0x56,0x7A, -0x7C,0x52,0xDA,0x64,0x9B,0x0E,0x2D,0x5C,0xD8,0x5E,0xAC,0x92,0x3D,0xFE,0x01,0xE6, -0x19,0x4A,0x3D,0x14,0x03,0x4B,0xFA,0x60,0x27,0x20,0xD9,0x83,0x89,0x69,0xFA,0x54, -0xC6,0x9A,0x18,0x5E,0x55,0x2A,0x64,0xDE,0x06,0xF6,0x8D,0x4A,0x3B,0xAD,0x10,0x3C, -0x65,0x3D,0x90,0x88,0x04,0x89,0xE0,0x30,0x61,0xB3,0xAE,0x5D,0x01,0xA7,0x7B,0xDE, -0x7C,0xB2,0xBE,0xCA,0x65,0x61,0x00,0x86,0xAE,0xDA,0x8F,0x7B,0xD0,0x89,0xAD,0x4D, -0x1D,0x59,0x9A,0x41,0xB1,0xBC,0x47,0x80,0xDC,0x9E,0x62,0xC3,0xF9,0xA3,0x81,0xB2, -0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30, -0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04, -0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01,0x0C, -0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16,0x09, -0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07,0x06, -0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D,0x8E, -0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16,0x23, -0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72,0x69, -0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F,0x2E, -0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0x16, -0x91,0xFD,0xEE,0xA6,0x6E,0xE4,0xB5,0x2E,0x49,0x8F,0x87,0x78,0x81,0x80,0xEC,0xE5, -0xB1,0xB5,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68, -0x00,0x30,0x65,0x02,0x30,0x66,0x21,0x0C,0x18,0x26,0x60,0x5A,0x38,0x7B,0x56,0x42, -0xE0,0xA7,0xFC,0x36,0x84,0x51,0x91,0x20,0x2C,0x76,0x4D,0x43,0x3D,0xC4,0x1D,0x84, -0x23,0xD0,0xAC,0xD6,0x7C,0x35,0x06,0xCE,0xCD,0x69,0xBD,0x90,0x0D,0xDB,0x6C,0x48, -0x42,0x1D,0x0E,0xAA,0x42,0x02,0x31,0x00,0x9C,0x3D,0x48,0x39,0x23,0x39,0x58,0x1A, -0x15,0x12,0x59,0x6A,0x9E,0xEF,0xD5,0x59,0xB2,0x1D,0x52,0x2C,0x99,0x71,0xCD,0xC7, -0x29,0xDF,0x1B,0x2A,0x61,0x7B,0x71,0xD1,0xDE,0xF3,0xC0,0xE5,0x0D,0x3A,0x4A,0xAA, -0x2D,0xA7,0xD8,0x86,0x2A,0xDD,0x2E,0x10, +/* subject:/C=US/O=Google Trust Services LLC/CN=GTS Root R2 */ +/* issuer :/C=US/O=Google Trust Services LLC/CN=GTS Root R2 */ + + +const unsigned char GTS_Root_R2_certificate[1374]={ +0x30,0x82,0x05,0x5A,0x30,0x82,0x03,0x42,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x6E, +0x47,0xA9,0xC6,0x5A,0xB3,0xE7,0x20,0xC5,0x30,0x9A,0x3F,0x68,0x52,0xF2,0x6F,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x47, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x22,0x30, +0x20,0x06,0x03,0x55,0x04,0x0A,0x13,0x19,0x47,0x6F,0x6F,0x67,0x6C,0x65,0x20,0x54, +0x72,0x75,0x73,0x74,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x4C,0x4C, +0x43,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x03,0x13,0x0B,0x47,0x54,0x53,0x20, +0x52,0x6F,0x6F,0x74,0x20,0x52,0x32,0x30,0x1E,0x17,0x0D,0x31,0x36,0x30,0x36,0x32, +0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x36,0x32,0x32, +0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, +0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0A,0x13, +0x19,0x47,0x6F,0x6F,0x67,0x6C,0x65,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x53,0x65, +0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x4C,0x4C,0x43,0x31,0x14,0x30,0x12,0x06,0x03, +0x55,0x04,0x03,0x13,0x0B,0x47,0x54,0x53,0x20,0x52,0x6F,0x6F,0x74,0x20,0x52,0x32, +0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, +0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01, +0x00,0xCE,0xDE,0xFD,0xA6,0xFB,0xEC,0xEC,0x14,0x34,0x3C,0x07,0x06,0x5A,0x6C,0x59, +0xF7,0x19,0x35,0xDD,0xF7,0xC1,0x9D,0x55,0xAA,0xD3,0xCD,0x3B,0xA4,0x93,0x72,0xEF, +0x0A,0xFA,0x6D,0x9D,0xF6,0xF0,0x85,0x80,0x5B,0xA1,0x48,0x52,0x9F,0x39,0xC5,0xB7, +0xEE,0x28,0xAC,0xEF,0xCB,0x76,0x68,0x14,0xB9,0xDF,0xAD,0x01,0x6C,0x99,0x1F,0xC4, +0x22,0x1D,0x9F,0xFE,0x72,0x77,0xE0,0x2C,0x5B,0xAF,0xE4,0x04,0xBF,0x4F,0x72,0xA0, +0x1A,0x34,0x98,0xE8,0x39,0x68,0xEC,0x95,0x25,0x7B,0x76,0xA1,0xE6,0x69,0xB9,0x85, +0x19,0xBD,0x89,0x8C,0xFE,0xAD,0xED,0x36,0xEA,0x73,0xBC,0xFF,0x83,0xE2,0xCB,0x7D, +0xC1,0xD2,0xCE,0x4A,0xB3,0x8D,0x05,0x9E,0x8B,0x49,0x93,0xDF,0xC1,0x5B,0xD0,0x6E, +0x5E,0xF0,0x2E,0x30,0x2E,0x82,0xFC,0xFA,0xBC,0xB4,0x17,0x0A,0x48,0xE5,0x88,0x9B, +0xC5,0x9B,0x6B,0xDE,0xB0,0xCA,0xB4,0x03,0xF0,0xDA,0xF4,0x90,0xB8,0x65,0x64,0xF7, +0x5C,0x4C,0xAD,0xE8,0x7E,0x66,0x5E,0x99,0xD7,0xB8,0xC2,0x3E,0xC8,0xD0,0x13,0x9D, +0xAD,0xEE,0xE4,0x45,0x7B,0x89,0x55,0xF7,0x8A,0x1F,0x62,0x52,0x84,0x12,0xB3,0xC2, +0x40,0x97,0xE3,0x8A,0x1F,0x47,0x91,0xA6,0x74,0x5A,0xD2,0xF8,0xB1,0x63,0x28,0x10, +0xB8,0xB3,0x09,0xB8,0x56,0x77,0x40,0xA2,0x26,0x98,0x79,0xC6,0xFE,0xDF,0x25,0xEE, +0x3E,0xE5,0xA0,0x7F,0xD4,0x61,0x0F,0x51,0x4B,0x3C,0x3F,0x8C,0xDA,0xE1,0x70,0x74, +0xD8,0xC2,0x68,0xA1,0xF9,0xC1,0x0C,0xE9,0xA1,0xE2,0x7F,0xBB,0x55,0x3C,0x76,0x06, +0xEE,0x6A,0x4E,0xCC,0x92,0x88,0x30,0x4D,0x9A,0xBD,0x4F,0x0B,0x48,0x9A,0x84,0xB5, +0x98,0xA3,0xD5,0xFB,0x73,0xC1,0x57,0x61,0xDD,0x28,0x56,0x75,0x13,0xAE,0x87,0x8E, +0xE7,0x0C,0x51,0x09,0x10,0x75,0x88,0x4C,0xBC,0x8D,0xF9,0x7B,0x3C,0xD4,0x22,0x48, +0x1F,0x2A,0xDC,0xEB,0x6B,0xBB,0x44,0xB1,0xCB,0x33,0x71,0x32,0x46,0xAF,0xAD,0x4A, +0xF1,0x8C,0xE8,0x74,0x3A,0xAC,0xE7,0x1A,0x22,0x73,0x80,0xD2,0x30,0xF7,0x25,0x42, +0xC7,0x22,0x3B,0x3B,0x12,0xAD,0x96,0x2E,0xC6,0xC3,0x76,0x07,0xAA,0x20,0xB7,0x35, +0x49,0x57,0xE9,0x92,0x49,0xE8,0x76,0x16,0x72,0x31,0x67,0x2B,0x96,0x7E,0x8A,0xA3, +0xC7,0x94,0x56,0x22,0xBF,0x6A,0x4B,0x7E,0x01,0x21,0xB2,0x23,0x32,0xDF,0xE4,0x9A, +0x44,0x6D,0x59,0x5B,0x5D,0xF5,0x00,0xA0,0x1C,0x9B,0xC6,0x78,0x97,0x8D,0x90,0xFF, +0x9B,0xC8,0xAA,0xB4,0xAF,0x11,0x51,0x39,0x5E,0xD9,0xFB,0x67,0xAD,0xD5,0x5B,0x11, +0x9D,0x32,0x9A,0x1B,0xBD,0xD5,0xBA,0x5B,0xA5,0xC9,0xCB,0x25,0x69,0x53,0x55,0x27, +0x5C,0xE0,0xCA,0x36,0xCB,0x88,0x61,0xFB,0x1E,0xB7,0xD0,0xCB,0xEE,0x16,0xFB,0xD3, +0xA6,0x4C,0xDE,0x92,0xA5,0xD4,0xE2,0xDF,0xF5,0x06,0x54,0xDE,0x2E,0x9D,0x4B,0xB4, +0x93,0x30,0xAA,0x81,0xCE,0xDD,0x1A,0xDC,0x51,0x73,0x0D,0x4F,0x70,0xE9,0xE5,0xB6, +0x16,0x21,0x19,0x79,0xB2,0xE6,0x89,0x0B,0x75,0x64,0xCA,0xD5,0xAB,0xBC,0x09,0xC1, +0x18,0xA1,0xFF,0xD4,0x54,0xA1,0x85,0x3C,0xFD,0x14,0x24,0x03,0xB2,0x87,0xD3,0xA4, +0xB7,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D, +0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D, +0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55, +0x1D,0x0E,0x04,0x16,0x04,0x14,0xBB,0xFF,0xCA,0x8E,0x23,0x9F,0x4F,0x99,0xCA,0xDB, +0xE2,0x68,0xA6,0xA5,0x15,0x27,0x17,0x1E,0xD9,0x0E,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0xB6,0x69, +0xF0,0xA6,0x77,0xFE,0x9E,0xEE,0x0B,0x81,0xAD,0xE1,0xC0,0xA9,0xC7,0xF9,0x35,0x1D, +0x40,0x82,0xAB,0xE6,0x04,0xB4,0xDF,0xCB,0xF7,0x1D,0x0F,0x83,0xF0,0x7E,0x13,0x4D, +0x8D,0x8C,0xEE,0xE3,0x33,0x22,0xC3,0x39,0xFC,0x40,0xDF,0x6E,0x41,0x4B,0x42,0x53, +0xBE,0x16,0x88,0xF1,0xD2,0x38,0x5E,0xC4,0x68,0x99,0x1C,0x98,0x52,0x93,0x8C,0xE7, +0x68,0xED,0x1B,0x6A,0x73,0x7A,0x05,0x40,0x4D,0x7F,0x65,0x3B,0xD6,0x58,0xF1,0xCE, +0x83,0x47,0x60,0xE3,0xFF,0x97,0xA9,0x9C,0x60,0x77,0x18,0x55,0xB5,0x7E,0x08,0x93, +0xCF,0xD0,0xF6,0x3C,0x67,0x03,0x15,0x61,0x09,0xF9,0x81,0x79,0xF5,0xEC,0x53,0xA4, +0x9F,0xC9,0x8F,0x01,0x8B,0x73,0xC4,0x77,0x76,0xDC,0x83,0xA2,0xF5,0x0C,0x49,0x1A, +0xA8,0x76,0xDE,0x92,0x9B,0x64,0xF8,0xB3,0x2C,0xC5,0x27,0xD3,0x07,0xC0,0x08,0x80, +0xA4,0x98,0x92,0xE3,0x01,0x96,0x02,0xAA,0x02,0xEE,0x8F,0x3B,0xC5,0xD1,0x6D,0x0A, +0x33,0x30,0x73,0x78,0xB9,0x4F,0x54,0x16,0xBF,0x0B,0x07,0xA1,0xA4,0x5C,0xE6,0xCB, +0xC9,0x5C,0x84,0x8F,0x0F,0xE0,0x15,0x77,0x2C,0x7E,0x26,0x7E,0xDA,0xC4,0x4B,0xDB, +0xA7,0x16,0x77,0x07,0xB0,0xCD,0x75,0xE8,0x72,0x42,0xD6,0x95,0x84,0x9D,0x86,0x83, +0xF2,0xE4,0x90,0xCD,0x09,0x47,0xD4,0x8B,0x03,0x70,0xDA,0x5A,0xC6,0x03,0x42,0xF4, +0xED,0x37,0xA2,0xF0,0x1B,0x50,0x54,0x4B,0x0E,0xD8,0x84,0xDE,0x19,0x28,0x99,0x81, +0x47,0xAE,0x09,0x1B,0x3F,0x48,0xD1,0xC3,0x6F,0xE2,0xB0,0x60,0x17,0xF5,0xEE,0x23, +0x02,0xA5,0xDA,0x00,0x5B,0x6D,0x90,0xAB,0xEE,0xA2,0xE9,0x1B,0x3B,0xE9,0xC7,0x44, +0x27,0x45,0x8E,0x6B,0x9F,0xF5,0xA4,0x84,0xBC,0x77,0xF9,0x6B,0x97,0xAC,0x3E,0x51, +0x45,0xA2,0x11,0xA6,0xCC,0x85,0xEE,0x0A,0x68,0xF2,0x3E,0x50,0x38,0x7A,0x24,0x62, +0x1E,0x17,0x20,0x37,0x6D,0x6A,0x4D,0xB7,0x09,0x9B,0xC9,0xFC,0xA4,0x58,0xF5,0xB6, +0xFB,0x9C,0x4E,0x18,0xBB,0x95,0x02,0xE7,0xA1,0xAD,0x9B,0x07,0xEE,0x36,0x6B,0x24, +0xD2,0x39,0x86,0xC1,0x93,0x83,0x50,0xD2,0x81,0x46,0xA8,0x5F,0x62,0x57,0x2C,0xBB, +0x6C,0x64,0x88,0x08,0x6E,0xEF,0x13,0x54,0x5F,0xDD,0x2D,0xC4,0x67,0x63,0xD3,0xCF, +0x89,0x37,0xBF,0x9D,0x20,0xF4,0xFB,0x7A,0x83,0x9B,0xA0,0x1E,0x81,0x00,0x50,0xC2, +0xE4,0x0C,0x22,0x59,0x52,0x10,0xED,0x43,0x56,0x87,0x00,0xF8,0x14,0x52,0xA7,0x1D, +0x8B,0x93,0x8C,0xA2,0x4D,0x46,0x7F,0x27,0xC6,0x71,0x9B,0x24,0xDE,0xE4,0xDA,0x86, +0x8B,0x0D,0x7E,0x6B,0x20,0xC1,0xC0,0x9E,0xE1,0x65,0xD8,0x6A,0xA3,0xA6,0xE8,0x85, +0x8B,0x3A,0x07,0x08,0x1C,0xBA,0xF5,0x8F,0x55,0x9A,0x18,0x75,0x7E,0xE5,0xEC,0x81, +0x66,0xD1,0x21,0x73,0xA1,0x35,0x44,0x0B,0x80,0x3D,0x5B,0x9C,0x5E,0x6F,0x2A,0x17, +0x96,0xD1,0x83,0x23,0x88,0x66,0x6D,0xE6,0x86,0xE2,0x70,0x32,0x2F,0x52,0x22,0xE7, +0xC8,0xE7,0x7F,0xC4,0x2C,0x60,0x5D,0x2F,0xC3,0xAF,0x9E,0x45,0x05,0xC3,0x84,0x02, +0xB7,0xFD,0x2C,0x08,0x52,0x4F,0x82,0xDD,0xA3,0xF0,0xD4,0x86,0x09,0x02, }; -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G2 */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G2 */ +/* subject:/O=Cybertrust, Inc/CN=Cybertrust Global Root */ +/* issuer :/O=Cybertrust, Inc/CN=Cybertrust Global Root */ -const unsigned char DigiCert_Global_Root_G2_certificate[914]={ -0x30,0x82,0x03,0x8E,0x30,0x82,0x02,0x76,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x03, -0x3A,0xF1,0xE6,0xA7,0x11,0xA9,0xA0,0xBB,0x28,0x64,0xB1,0x1D,0x09,0xFA,0xE5,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x61, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47, -0x32,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,0x30, -0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,0x30, -0x5A,0x30,0x61,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53, -0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43, -0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B, -0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63, -0x6F,0x6D,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67, +const unsigned char Cybertrust_Global_Root_certificate[933]={ +0x30,0x82,0x03,0xA1,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, +0x00,0x00,0x00,0x00,0x01,0x0F,0x85,0xAA,0x2D,0x48,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x3B,0x31,0x18,0x30,0x16,0x06, +0x03,0x55,0x04,0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74, +0x2C,0x20,0x49,0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16, +0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61, +0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,0x35, +0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,0x30, +0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04, +0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49, +0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x43,0x79,0x62, +0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52, +0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02, +0x82,0x01,0x01,0x00,0xF8,0xC8,0xBC,0xBD,0x14,0x50,0x66,0x13,0xFF,0xF0,0xD3,0x79, +0xEC,0x23,0xF2,0xB7,0x1A,0xC7,0x8E,0x85,0xF1,0x12,0x73,0xA6,0x19,0xAA,0x10,0xDB, +0x9C,0xA2,0x65,0x74,0x5A,0x77,0x3E,0x51,0x7D,0x56,0xF6,0xDC,0x23,0xB6,0xD4,0xED, +0x5F,0x58,0xB1,0x37,0x4D,0xD5,0x49,0x0E,0x6E,0xF5,0x6A,0x87,0xD6,0xD2,0x8C,0xD2, +0x27,0xC6,0xE2,0xFF,0x36,0x9F,0x98,0x65,0xA0,0x13,0x4E,0xC6,0x2A,0x64,0x9B,0xD5, +0x90,0x12,0xCF,0x14,0x06,0xF4,0x3B,0xE3,0xD4,0x28,0xBE,0xE8,0x0E,0xF8,0xAB,0x4E, +0x48,0x94,0x6D,0x8E,0x95,0x31,0x10,0x5C,0xED,0xA2,0x2D,0xBD,0xD5,0x3A,0x6D,0xB2, +0x1C,0xBB,0x60,0xC0,0x46,0x4B,0x01,0xF5,0x49,0xAE,0x7E,0x46,0x8A,0xD0,0x74,0x8D, +0xA1,0x0C,0x02,0xCE,0xEE,0xFC,0xE7,0x8F,0xB8,0x6B,0x66,0xF3,0x7F,0x44,0x00,0xBF, +0x66,0x25,0x14,0x2B,0xDD,0x10,0x30,0x1D,0x07,0x96,0x3F,0x4D,0xF6,0x6B,0xB8,0x8F, +0xB7,0x7B,0x0C,0xA5,0x38,0xEB,0xDE,0x47,0xDB,0xD5,0x5D,0x39,0xFC,0x88,0xA7,0xF3, +0xD7,0x2A,0x74,0xF1,0xE8,0x5A,0xA2,0x3B,0x9F,0x50,0xBA,0xA6,0x8C,0x45,0x35,0xC2, +0x50,0x65,0x95,0xDC,0x63,0x82,0xEF,0xDD,0xBF,0x77,0x4D,0x9C,0x62,0xC9,0x63,0x73, +0x16,0xD0,0x29,0x0F,0x49,0xA9,0x48,0xF0,0xB3,0xAA,0xB7,0x6C,0xC5,0xA7,0x30,0x39, +0x40,0x5D,0xAE,0xC4,0xE2,0x5D,0x26,0x53,0xF0,0xCE,0x1C,0x23,0x08,0x61,0xA8,0x94, +0x19,0xBA,0x04,0x62,0x40,0xEC,0x1F,0x38,0x70,0x77,0x12,0x06,0x71,0xA7,0x30,0x18, +0x5D,0x25,0x27,0xA5,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xA5,0x30,0x81,0xA2,0x30, +0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30, +0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, +0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB6,0x08,0x7B,0x0D,0x7A, +0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,0x57,0x30, +0x3F,0x06,0x03,0x55,0x1D,0x1F,0x04,0x38,0x30,0x36,0x30,0x34,0xA0,0x32,0xA0,0x30, +0x86,0x2E,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x32,0x2E,0x70,0x75, +0x62,0x6C,0x69,0x63,0x2D,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x63, +0x72,0x6C,0x2F,0x63,0x74,0x2F,0x63,0x74,0x72,0x6F,0x6F,0x74,0x2E,0x63,0x72,0x6C, +0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xB6,0x08,0x7B, +0x0D,0x7A,0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70, +0x57,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00, +0x03,0x82,0x01,0x01,0x00,0x56,0xEF,0x0A,0x23,0xA0,0x54,0x4E,0x95,0x97,0xC9,0xF8, +0x89,0xDA,0x45,0xC1,0xD4,0xA3,0x00,0x25,0xF4,0x1F,0x13,0xAB,0xB7,0xA3,0x85,0x58, +0x69,0xC2,0x30,0xAD,0xD8,0x15,0x8A,0x2D,0xE3,0xC9,0xCD,0x81,0x5A,0xF8,0x73,0x23, +0x5A,0xA7,0x7C,0x05,0xF3,0xFD,0x22,0x3B,0x0E,0xD1,0x06,0xC4,0xDB,0x36,0x4C,0x73, +0x04,0x8E,0xE5,0xB0,0x22,0xE4,0xC5,0xF3,0x2E,0xA5,0xD9,0x23,0xE3,0xB8,0x4E,0x4A, +0x20,0xA7,0x6E,0x02,0x24,0x9F,0x22,0x60,0x67,0x7B,0x8B,0x1D,0x72,0x09,0xC5,0x31, +0x5C,0xE9,0x79,0x9F,0x80,0x47,0x3D,0xAD,0xA1,0x0B,0x07,0x14,0x3D,0x47,0xFF,0x03, +0x69,0x1A,0x0C,0x0B,0x44,0xE7,0x63,0x25,0xA7,0x7F,0xB2,0xC9,0xB8,0x76,0x84,0xED, +0x23,0xF6,0x7D,0x07,0xAB,0x45,0x7E,0xD3,0xDF,0xB3,0xBF,0xE9,0x8A,0xB6,0xCD,0xA8, +0xA2,0x67,0x2B,0x52,0xD5,0xB7,0x65,0xF0,0x39,0x4C,0x63,0xA0,0x91,0x79,0x93,0x52, +0x0F,0x54,0xDD,0x83,0xBB,0x9F,0xD1,0x8F,0xA7,0x53,0x73,0xC3,0xCB,0xFF,0x30,0xEC, +0x7C,0x04,0xB8,0xD8,0x44,0x1F,0x93,0x5F,0x71,0x09,0x22,0xB7,0x6E,0x3E,0xEA,0x1C, +0x03,0x4E,0x9D,0x1A,0x20,0x61,0xFB,0x81,0x37,0xEC,0x5E,0xFC,0x0A,0x45,0xAB,0xD7, +0xE7,0x17,0x55,0xD0,0xA0,0xEA,0x60,0x9B,0xA6,0xF6,0xE3,0x8C,0x5B,0x29,0xC2,0x06, +0x60,0x14,0x9D,0x2D,0x97,0x4C,0xA9,0x93,0x15,0x9D,0x61,0xC4,0x01,0x5F,0x48,0xD6, +0x58,0xBD,0x56,0x31,0x12,0x4E,0x11,0xC8,0x21,0xE0,0xB3,0x11,0x91,0x65,0xDB,0xB4, +0xA6,0x88,0x38,0xCE,0x55, +}; + + +/* subject:/C=US/O=Google Trust Services LLC/CN=GTS Root R1 */ +/* issuer :/C=US/O=Google Trust Services LLC/CN=GTS Root R1 */ + + +const unsigned char GTS_Root_R1_certificate[1374]={ +0x30,0x82,0x05,0x5A,0x30,0x82,0x03,0x42,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x6E, +0x47,0xA9,0xC5,0x4B,0x47,0x0C,0x0D,0xEC,0x33,0xD0,0x89,0xB9,0x1C,0xF4,0xE1,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x47, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x22,0x30, +0x20,0x06,0x03,0x55,0x04,0x0A,0x13,0x19,0x47,0x6F,0x6F,0x67,0x6C,0x65,0x20,0x54, +0x72,0x75,0x73,0x74,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x4C,0x4C, +0x43,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x03,0x13,0x0B,0x47,0x54,0x53,0x20, +0x52,0x6F,0x6F,0x74,0x20,0x52,0x31,0x30,0x1E,0x17,0x0D,0x31,0x36,0x30,0x36,0x32, +0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x36,0x32,0x32, +0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, +0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0A,0x13, +0x19,0x47,0x6F,0x6F,0x67,0x6C,0x65,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x53,0x65, +0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x4C,0x4C,0x43,0x31,0x14,0x30,0x12,0x06,0x03, +0x55,0x04,0x03,0x13,0x0B,0x47,0x54,0x53,0x20,0x52,0x6F,0x6F,0x74,0x20,0x52,0x31, +0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, +0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01, +0x00,0xB6,0x11,0x02,0x8B,0x1E,0xE3,0xA1,0x77,0x9B,0x3B,0xDC,0xBF,0x94,0x3E,0xB7, +0x95,0xA7,0x40,0x3C,0xA1,0xFD,0x82,0xF9,0x7D,0x32,0x06,0x82,0x71,0xF6,0xF6,0x8C, +0x7F,0xFB,0xE8,0xDB,0xBC,0x6A,0x2E,0x97,0x97,0xA3,0x8C,0x4B,0xF9,0x2B,0xF6,0xB1, +0xF9,0xCE,0x84,0x1D,0xB1,0xF9,0xC5,0x97,0xDE,0xEF,0xB9,0xF2,0xA3,0xE9,0xBC,0x12, +0x89,0x5E,0xA7,0xAA,0x52,0xAB,0xF8,0x23,0x27,0xCB,0xA4,0xB1,0x9C,0x63,0xDB,0xD7, +0x99,0x7E,0xF0,0x0A,0x5E,0xEB,0x68,0xA6,0xF4,0xC6,0x5A,0x47,0x0D,0x4D,0x10,0x33, +0xE3,0x4E,0xB1,0x13,0xA3,0xC8,0x18,0x6C,0x4B,0xEC,0xFC,0x09,0x90,0xDF,0x9D,0x64, +0x29,0x25,0x23,0x07,0xA1,0xB4,0xD2,0x3D,0x2E,0x60,0xE0,0xCF,0xD2,0x09,0x87,0xBB, +0xCD,0x48,0xF0,0x4D,0xC2,0xC2,0x7A,0x88,0x8A,0xBB,0xBA,0xCF,0x59,0x19,0xD6,0xAF, +0x8F,0xB0,0x07,0xB0,0x9E,0x31,0xF1,0x82,0xC1,0xC0,0xDF,0x2E,0xA6,0x6D,0x6C,0x19, +0x0E,0xB5,0xD8,0x7E,0x26,0x1A,0x45,0x03,0x3D,0xB0,0x79,0xA4,0x94,0x28,0xAD,0x0F, +0x7F,0x26,0xE5,0xA8,0x08,0xFE,0x96,0xE8,0x3C,0x68,0x94,0x53,0xEE,0x83,0x3A,0x88, +0x2B,0x15,0x96,0x09,0xB2,0xE0,0x7A,0x8C,0x2E,0x75,0xD6,0x9C,0xEB,0xA7,0x56,0x64, +0x8F,0x96,0x4F,0x68,0xAE,0x3D,0x97,0xC2,0x84,0x8F,0xC0,0xBC,0x40,0xC0,0x0B,0x5C, +0xBD,0xF6,0x87,0xB3,0x35,0x6C,0xAC,0x18,0x50,0x7F,0x84,0xE0,0x4C,0xCD,0x92,0xD3, +0x20,0xE9,0x33,0xBC,0x52,0x99,0xAF,0x32,0xB5,0x29,0xB3,0x25,0x2A,0xB4,0x48,0xF9, +0x72,0xE1,0xCA,0x64,0xF7,0xE6,0x82,0x10,0x8D,0xE8,0x9D,0xC2,0x8A,0x88,0xFA,0x38, +0x66,0x8A,0xFC,0x63,0xF9,0x01,0xF9,0x78,0xFD,0x7B,0x5C,0x77,0xFA,0x76,0x87,0xFA, +0xEC,0xDF,0xB1,0x0E,0x79,0x95,0x57,0xB4,0xBD,0x26,0xEF,0xD6,0x01,0xD1,0xEB,0x16, +0x0A,0xBB,0x8E,0x0B,0xB5,0xC5,0xC5,0x8A,0x55,0xAB,0xD3,0xAC,0xEA,0x91,0x4B,0x29, +0xCC,0x19,0xA4,0x32,0x25,0x4E,0x2A,0xF1,0x65,0x44,0xD0,0x02,0xCE,0xAA,0xCE,0x49, +0xB4,0xEA,0x9F,0x7C,0x83,0xB0,0x40,0x7B,0xE7,0x43,0xAB,0xA7,0x6C,0xA3,0x8F,0x7D, +0x89,0x81,0xFA,0x4C,0xA5,0xFF,0xD5,0x8E,0xC3,0xCE,0x4B,0xE0,0xB5,0xD8,0xB3,0x8E, +0x45,0xCF,0x76,0xC0,0xED,0x40,0x2B,0xFD,0x53,0x0F,0xB0,0xA7,0xD5,0x3B,0x0D,0xB1, +0x8A,0xA2,0x03,0xDE,0x31,0xAD,0xCC,0x77,0xEA,0x6F,0x7B,0x3E,0xD6,0xDF,0x91,0x22, +0x12,0xE6,0xBE,0xFA,0xD8,0x32,0xFC,0x10,0x63,0x14,0x51,0x72,0xDE,0x5D,0xD6,0x16, +0x93,0xBD,0x29,0x68,0x33,0xEF,0x3A,0x66,0xEC,0x07,0x8A,0x26,0xDF,0x13,0xD7,0x57, +0x65,0x78,0x27,0xDE,0x5E,0x49,0x14,0x00,0xA2,0x00,0x7F,0x9A,0xA8,0x21,0xB6,0xA9, +0xB1,0x95,0xB0,0xA5,0xB9,0x0D,0x16,0x11,0xDA,0xC7,0x6C,0x48,0x3C,0x40,0xE0,0x7E, +0x0D,0x5A,0xCD,0x56,0x3C,0xD1,0x97,0x05,0xB9,0xCB,0x4B,0xED,0x39,0x4B,0x9C,0xC4, +0x3F,0xD2,0x55,0x13,0x6E,0x24,0xB0,0xD6,0x71,0xFA,0xF4,0xC1,0xBA,0xCC,0xED,0x1B, +0xF5,0xFE,0x81,0x41,0xD8,0x00,0x98,0x3D,0x3A,0xC8,0xAE,0x7A,0x98,0x37,0x18,0x05, +0x95,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D, +0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D, +0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55, +0x1D,0x0E,0x04,0x16,0x04,0x14,0xE4,0xAF,0x2B,0x26,0x71,0x1A,0x2B,0x48,0x27,0x85, +0x2F,0x52,0x66,0x2C,0xEF,0xF0,0x89,0x13,0x71,0x3E,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x38,0x96, +0x0A,0xEE,0x3D,0xB4,0x96,0x1E,0x5F,0xEF,0x9D,0x9C,0x0B,0x33,0x9F,0x2B,0xE0,0xCA, +0xFD,0xD2,0x8E,0x0A,0x1F,0x41,0x74,0xA5,0x7C,0xAA,0x84,0xD4,0xE5,0xF2,0x1E,0xE6, +0x37,0x52,0x32,0x9C,0x0B,0xD1,0x61,0x1D,0xBF,0x28,0xC1,0xB6,0x44,0x29,0x35,0x75, +0x77,0x98,0xB2,0x7C,0xD9,0xBD,0x74,0xAC,0x8A,0x68,0xE3,0xA9,0x31,0x09,0x29,0x01, +0x60,0x73,0xE3,0x47,0x7C,0x53,0xA8,0x90,0x4A,0x27,0xEF,0x4B,0xD7,0x9F,0x93,0xE7, +0x82,0x36,0xCE,0x9A,0x68,0x0C,0x82,0xE7,0xCF,0xD4,0x10,0x16,0x6F,0x5F,0x0E,0x99, +0x5C,0xF6,0x1F,0x71,0x7D,0xEF,0xEF,0x7B,0x2F,0x7E,0xEA,0x36,0xD6,0x97,0x70,0x0B, +0x15,0xEE,0xD7,0x5C,0x56,0x6A,0x33,0xA5,0xE3,0x49,0x38,0x0C,0xB8,0x7D,0xFB,0x8D, +0x85,0xA4,0xB1,0x59,0x5E,0xF4,0x6A,0xE1,0xDD,0xA1,0xF6,0x64,0x44,0xAE,0xE6,0x51, +0x83,0x21,0x66,0xC6,0x11,0x3E,0xF3,0xCE,0x47,0xEE,0x9C,0x28,0x1F,0x25,0xDA,0xFF, +0xAC,0x66,0x95,0xDD,0x35,0x0F,0x5C,0xEF,0x20,0x2C,0x62,0xFD,0x91,0xBA,0xA9,0xCC, +0xFC,0x5A,0x9C,0x93,0x81,0x83,0x29,0x97,0x4A,0x7C,0x5A,0x72,0xB4,0x39,0xD0,0xB7, +0x77,0xCB,0x79,0xFD,0x69,0x3A,0x92,0x37,0xED,0x6E,0x38,0x65,0x46,0x7E,0xE9,0x60, +0xBD,0x79,0x88,0x97,0x5F,0x38,0x12,0xF4,0xEE,0xAF,0x5B,0x82,0xC8,0x86,0xD5,0xE1, +0x99,0x6D,0x8C,0x04,0xF2,0x76,0xBA,0x49,0xF6,0x6E,0xE9,0x6D,0x1E,0x5F,0xA0,0xEF, +0x27,0x82,0x76,0x40,0xF8,0xA6,0xD3,0x58,0x5C,0x0F,0x2C,0x42,0xDA,0x42,0xC6,0x7B, +0x88,0x34,0xC7,0xC1,0xD8,0x45,0x9B,0xC1,0x3E,0xC5,0x61,0x1D,0xD9,0x63,0x50,0x49, +0xF6,0x34,0x85,0x6A,0xE0,0x18,0xC5,0x6E,0x47,0xAB,0x41,0x42,0x29,0x9B,0xF6,0x60, +0x0D,0xD2,0x31,0xD3,0x63,0x98,0x23,0x93,0x5A,0x00,0x81,0x48,0xB4,0xEF,0xCD,0x8A, +0xCD,0xC9,0xCF,0x99,0xEE,0xD9,0x9E,0xAA,0x36,0xE1,0x68,0x4B,0x71,0x49,0x14,0x36, +0x28,0x3A,0x3D,0x1D,0xCE,0x9A,0x8F,0x25,0xE6,0x80,0x71,0x61,0x2B,0xB5,0x7B,0xCC, +0xF9,0x25,0x16,0x81,0xE1,0x31,0x5F,0xA1,0xA3,0x7E,0x16,0xA4,0x9C,0x16,0x6A,0x97, +0x18,0xBD,0x76,0x72,0xA5,0x0B,0x9E,0x1D,0x36,0xE6,0x2F,0xA1,0x2F,0xBE,0x70,0x91, +0x0F,0xA8,0xE6,0xDA,0xF8,0xC4,0x92,0x40,0x6C,0x25,0x7E,0x7B,0xB3,0x09,0xDC,0xB2, +0x17,0xAD,0x80,0x44,0xF0,0x68,0xA5,0x8F,0x94,0x75,0xFF,0x74,0x5A,0xE8,0xA8,0x02, +0x7C,0x0C,0x09,0xE2,0xA9,0x4B,0x0B,0xA0,0x85,0x0B,0x62,0xB9,0xEF,0xA1,0x31,0x92, +0xFB,0xEF,0xF6,0x51,0x04,0x89,0x6C,0xE8,0xA9,0x74,0xA1,0xBB,0x17,0xB3,0xB5,0xFD, +0x49,0x0F,0x7C,0x3C,0xEC,0x83,0x18,0x20,0x43,0x4E,0xD5,0x93,0xBA,0xB4,0x34,0xB1, +0x1F,0x16,0x36,0x1F,0x0C,0xE6,0x64,0x39,0x16,0x4C,0xDC,0xE0,0xFE,0x1D,0xC8,0xA9, +0x62,0x3D,0x40,0xEA,0xCA,0xC5,0x34,0x02,0xB4,0xAE,0x89,0x88,0x33,0x35,0xDC,0x2C, +0x13,0x73,0xD8,0x27,0xF1,0xD0,0x72,0xEE,0x75,0x3B,0x22,0xDE,0x98,0x68,0x66,0x5B, +0xF1,0xC6,0x63,0x47,0x55,0x1C,0xBA,0xA5,0x08,0x51,0x75,0xA6,0x48,0x25, +}; + + +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G3 */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G3 */ + + +const unsigned char DigiCert_Global_Root_G3_certificate[579]={ +0x30,0x82,0x02,0x3F,0x30,0x82,0x01,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x05, +0x55,0x56,0xBC,0xF2,0x5E,0xA4,0x35,0x35,0xC3,0xA4,0x0F,0xD5,0xAB,0x45,0x72,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x61,0x31,0x0B,0x30, +0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03, +0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E, +0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E, +0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x20,0x30,0x1E, +0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20, +0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x33,0x30,0x1E, +0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,0x30,0x30,0x5A,0x17, +0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,0x30,0x5A,0x30,0x61, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47, +0x33,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, +0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0xDD,0xA7,0xD9,0xBB,0x8A,0xB8,0x0B, +0xFB,0x0B,0x7F,0x21,0xD2,0xF0,0xBE,0xBE,0x73,0xF3,0x33,0x5D,0x1A,0xBC,0x34,0xEA, +0xDE,0xC6,0x9B,0xBC,0xD0,0x95,0xF6,0xF0,0xCC,0xD0,0x0B,0xBA,0x61,0x5B,0x51,0x46, +0x7E,0x9E,0x2D,0x9F,0xEE,0x8E,0x63,0x0C,0x17,0xEC,0x07,0x70,0xF5,0xCF,0x84,0x2E, +0x40,0x83,0x9C,0xE8,0x3F,0x41,0x6D,0x3B,0xAD,0xD3,0xA4,0x14,0x59,0x36,0x78,0x9D, +0x03,0x43,0xEE,0x10,0x13,0x6C,0x72,0xDE,0xAE,0x88,0xA7,0xA1,0x6B,0xB5,0x43,0xCE, +0x67,0xDC,0x23,0xFF,0x03,0x1C,0xA3,0xE2,0x3E,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06, +0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E, +0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D, +0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB3,0xDB,0x48,0xA4,0xF9,0xA1,0xC5, +0xD8,0xAE,0x36,0x41,0xCC,0x11,0x63,0x69,0x62,0x29,0xBC,0x4B,0xC6,0x30,0x0A,0x06, +0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31, +0x00,0xAD,0xBC,0xF2,0x6C,0x3F,0x12,0x4A,0xD1,0x2D,0x39,0xC3,0x0A,0x09,0x97,0x73, +0xF4,0x88,0x36,0x8C,0x88,0x27,0xBB,0xE6,0x88,0x8D,0x50,0x85,0xA7,0x63,0xF9,0x9E, +0x32,0xDE,0x66,0x93,0x0F,0xF1,0xCC,0xB1,0x09,0x8F,0xDD,0x6C,0xAB,0xFA,0x6B,0x7F, +0xA0,0x02,0x30,0x39,0x66,0x5B,0xC2,0x64,0x8D,0xB8,0x9E,0x50,0xDC,0xA8,0xD5,0x49, +0xA2,0xED,0xC7,0xDC,0xD1,0x49,0x7F,0x17,0x01,0xB8,0xC8,0x86,0x8F,0x4E,0x8C,0x88, +0x2B,0xA8,0x9A,0xA9,0x8A,0xC5,0xD1,0x00,0xBD,0xF8,0x54,0xE2,0x9A,0xE5,0x5B,0x7C, +0xB3,0x27,0x17, +}; + + +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G2 */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root G2 */ + + +const unsigned char DigiCert_Global_Root_G2_certificate[914]={ +0x30,0x82,0x03,0x8E,0x30,0x82,0x02,0x76,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x03, +0x3A,0xF1,0xE6,0xA7,0x11,0xA9,0xA0,0xBB,0x28,0x64,0xB1,0x1D,0x09,0xFA,0xE5,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x61, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47, +0x32,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30,0x30, +0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30,0x30, +0x5A,0x30,0x61,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53, +0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43, +0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B, +0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63, +0x6F,0x6D,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67, 0x69,0x43,0x65,0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F, 0x74,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, 0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, @@ -829,2747 +1468,680 @@ const unsigned char DigiCert_Global_Root_G2_certificate[914]={ }; -/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Class 1 CA Root */ -/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Class 1 CA Root */ +/* subject:/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */ +/* issuer :/C=US/O=Starfield Technologies, Inc./OU=Starfield Class 2 Certification Authority */ -const unsigned char AddTrust_Low_Value_Services_Root_certificate[1052]={ -0x30,0x82,0x04,0x18,0x30,0x82,0x03,0x00,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, +const unsigned char Starfield_Class_2_CA_certificate[1043]={ +0x30,0x82,0x04,0x0F,0x30,0x82,0x02,0xF7,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, 0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14, -0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73, -0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41, -0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77, -0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x41,0x64, -0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x31,0x20,0x43, -0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30, -0x31,0x30,0x33,0x38,0x33,0x31,0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31, -0x30,0x33,0x38,0x33,0x31,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B, -0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06, -0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54, -0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03, -0x55,0x04,0x03,0x13,0x18,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6C, -0x61,0x73,0x73,0x20,0x31,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01, -0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00, -0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0x96,0x96, -0xD4,0x21,0x49,0x60,0xE2,0x6B,0xE8,0x41,0x07,0x0C,0xDE,0xC4,0xE0,0xDC,0x13,0x23, -0xCD,0xC1,0x35,0xC7,0xFB,0xD6,0x4E,0x11,0x0A,0x67,0x5E,0xF5,0x06,0x5B,0x6B,0xA5, -0x08,0x3B,0x5B,0x29,0x16,0x3A,0xE7,0x87,0xB2,0x34,0x06,0xC5,0xBC,0x05,0xA5,0x03, -0x7C,0x82,0xCB,0x29,0x10,0xAE,0xE1,0x88,0x81,0xBD,0xD6,0x9E,0xD3,0xFE,0x2D,0x56, -0xC1,0x15,0xCE,0xE3,0x26,0x9D,0x15,0x2E,0x10,0xFB,0x06,0x8F,0x30,0x04,0xDE,0xA7, -0xB4,0x63,0xB4,0xFF,0xB1,0x9C,0xAE,0x3C,0xAF,0x77,0xB6,0x56,0xC5,0xB5,0xAB,0xA2, -0xE9,0x69,0x3A,0x3D,0x0E,0x33,0x79,0x32,0x3F,0x70,0x82,0x92,0x99,0x61,0x6D,0x8D, -0x30,0x08,0x8F,0x71,0x3F,0xA6,0x48,0x57,0x19,0xF8,0x25,0xDC,0x4B,0x66,0x5C,0xA5, -0x74,0x8F,0x98,0xAE,0xC8,0xF9,0xC0,0x06,0x22,0xE7,0xAC,0x73,0xDF,0xA5,0x2E,0xFB, -0x52,0xDC,0xB1,0x15,0x65,0x20,0xFA,0x35,0x66,0x69,0xDE,0xDF,0x2C,0xF1,0x6E,0xBC, -0x30,0xDB,0x2C,0x24,0x12,0xDB,0xEB,0x35,0x35,0x68,0x90,0xCB,0x00,0xB0,0x97,0x21, -0x3D,0x74,0x21,0x23,0x65,0x34,0x2B,0xBB,0x78,0x59,0xA3,0xD6,0xE1,0x76,0x39,0x9A, -0xA4,0x49,0x8E,0x8C,0x74,0xAF,0x6E,0xA4,0x9A,0xA3,0xD9,0x9B,0xD2,0x38,0x5C,0x9B, -0xA2,0x18,0xCC,0x75,0x23,0x84,0xBE,0xEB,0xE2,0x4D,0x33,0x71,0x8E,0x1A,0xF0,0xC2, -0xF8,0xC7,0x1D,0xA2,0xAD,0x03,0x97,0x2C,0xF8,0xCF,0x25,0xC6,0xF6,0xB8,0x24,0x31, -0xB1,0x63,0x5D,0x92,0x7F,0x63,0xF0,0x25,0xC9,0x53,0x2E,0x1F,0xBF,0x4D,0x02,0x03, -0x01,0x00,0x01,0xA3,0x81,0xD2,0x30,0x81,0xCF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E, -0x04,0x16,0x04,0x14,0x95,0xB1,0xB4,0xF0,0x94,0xB6,0xBD,0xC7,0xDA,0xD1,0x11,0x09, -0x21,0xBE,0xC1,0xAF,0x49,0xFD,0x10,0x7B,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04, -0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x8F,0x06,0x03,0x55,0x1D,0x23,0x04,0x81, -0x87,0x30,0x81,0x84,0x80,0x14,0x95,0xB1,0xB4,0xF0,0x94,0xB6,0xBD,0xC7,0xDA,0xD1, -0x11,0x09,0x21,0xBE,0xC1,0xAF,0x49,0xFD,0x10,0x7B,0xA1,0x69,0xA4,0x67,0x30,0x65, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30, -0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74, -0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64, -0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F, -0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x41,0x64,0x64, -0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x31,0x20,0x43,0x41, -0x20,0x52,0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x2C,0x6D,0x64,0x1B, -0x1F,0xCD,0x0D,0xDD,0xB9,0x01,0xFA,0x96,0x63,0x34,0x32,0x48,0x47,0x99,0xAE,0x97, -0xED,0xFD,0x72,0x16,0xA6,0x73,0x47,0x5A,0xF4,0xEB,0xDD,0xE9,0xF5,0xD6,0xFB,0x45, -0xCC,0x29,0x89,0x44,0x5D,0xBF,0x46,0x39,0x3D,0xE8,0xEE,0xBC,0x4D,0x54,0x86,0x1E, -0x1D,0x6C,0xE3,0x17,0x27,0x43,0xE1,0x89,0x56,0x2B,0xA9,0x6F,0x72,0x4E,0x49,0x33, -0xE3,0x72,0x7C,0x2A,0x23,0x9A,0xBC,0x3E,0xFF,0x28,0x2A,0xED,0xA3,0xFF,0x1C,0x23, -0xBA,0x43,0x57,0x09,0x67,0x4D,0x4B,0x62,0x06,0x2D,0xF8,0xFF,0x6C,0x9D,0x60,0x1E, -0xD8,0x1C,0x4B,0x7D,0xB5,0x31,0x2F,0xD9,0xD0,0x7C,0x5D,0xF8,0xDE,0x6B,0x83,0x18, -0x78,0x37,0x57,0x2F,0xE8,0x33,0x07,0x67,0xDF,0x1E,0xC7,0x6B,0x2A,0x95,0x76,0xAE, -0x8F,0x57,0xA3,0xF0,0xF4,0x52,0xB4,0xA9,0x53,0x08,0xCF,0xE0,0x4F,0xD3,0x7A,0x53, -0x8B,0xFD,0xBB,0x1C,0x56,0x36,0xF2,0xFE,0xB2,0xB6,0xE5,0x76,0xBB,0xD5,0x22,0x65, -0xA7,0x3F,0xFE,0xD1,0x66,0xAD,0x0B,0xBC,0x6B,0x99,0x86,0xEF,0x3F,0x7D,0xF3,0x18, -0x32,0xCA,0x7B,0xC6,0xE3,0xAB,0x64,0x46,0x95,0xF8,0x26,0x69,0xD9,0x55,0x83,0x7B, -0x2C,0x96,0x07,0xFF,0x59,0x2C,0x44,0xA3,0xC6,0xE5,0xE9,0xA9,0xDC,0xA1,0x63,0x80, -0x5A,0x21,0x5E,0x21,0xCF,0x53,0x54,0xF0,0xBA,0x6F,0x89,0xDB,0xA8,0xAA,0x95,0xCF, -0x8B,0xE3,0x71,0xCC,0x1E,0x1B,0x20,0x44,0x08,0xC0,0x7A,0xB6,0x40,0xFD,0xC4,0xE4, -0x35,0xE1,0x1D,0x16,0x1C,0xD0,0xBC,0x2B,0x8E,0xD6,0x71,0xD9, -}; - - -/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */ -/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */ - - -const unsigned char AffirmTrust_Premium_ECC_certificate[514]={ -0x30,0x82,0x01,0xFE,0x30,0x82,0x01,0x85,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x74, -0x97,0x25,0x8A,0xC7,0x3F,0x7A,0x54,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D, -0x04,0x03,0x03,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, -0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66, -0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04, -0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x50, -0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,0x43,0x30,0x1E,0x17,0x0D,0x31,0x30, -0x30,0x31,0x32,0x39,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x17,0x0D,0x34,0x30,0x31, -0x32,0x33,0x31,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09, -0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55, -0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31, -0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D, -0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43, -0x43,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, -0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x0D,0x30,0x5E,0x1B,0x15,0x9D,0x03, -0xD0,0xA1,0x79,0x35,0xB7,0x3A,0x3C,0x92,0x7A,0xCA,0x15,0x1C,0xCD,0x62,0xF3,0x9C, -0x26,0x5C,0x07,0x3D,0xE5,0x54,0xFA,0xA3,0xD6,0xCC,0x12,0xEA,0xF4,0x14,0x5F,0xE8, -0x8E,0x19,0xAB,0x2F,0x2E,0x48,0xE6,0xAC,0x18,0x43,0x78,0xAC,0xD0,0x37,0xC3,0xBD, -0xB2,0xCD,0x2C,0xE6,0x47,0xE2,0x1A,0xE6,0x63,0xB8,0x3D,0x2E,0x2F,0x78,0xC4,0x4F, -0xDB,0xF4,0x0F,0xA4,0x68,0x4C,0x55,0x72,0x6B,0x95,0x1D,0x4E,0x18,0x42,0x95,0x78, -0xCC,0x37,0x3C,0x91,0xE2,0x9B,0x65,0x2B,0x29,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9A,0xAF,0x29,0x7A,0xC0,0x11,0x35,0x35, -0x26,0x51,0x30,0x00,0xC3,0x6A,0xFE,0x40,0xD5,0xAE,0xD6,0x3C,0x30,0x0F,0x06,0x03, -0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06, -0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0A,0x06, -0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30, -0x17,0x09,0xF3,0x87,0x88,0x50,0x5A,0xAF,0xC8,0xC0,0x42,0xBF,0x47,0x5F,0xF5,0x6C, -0x6A,0x86,0xE0,0xC4,0x27,0x74,0xE4,0x38,0x53,0xD7,0x05,0x7F,0x1B,0x34,0xE3,0xC6, -0x2F,0xB3,0xCA,0x09,0x3C,0x37,0x9D,0xD7,0xE7,0xB8,0x46,0xF1,0xFD,0xA1,0xE2,0x71, -0x02,0x30,0x42,0x59,0x87,0x43,0xD4,0x51,0xDF,0xBA,0xD3,0x09,0x32,0x5A,0xCE,0x88, -0x7E,0x57,0x3D,0x9C,0x5F,0x42,0x6B,0xF5,0x07,0x2D,0xB5,0xF0,0x82,0x93,0xF9,0x59, -0x6F,0xAE,0x64,0xFA,0x58,0xE5,0x8B,0x1E,0xE3,0x63,0xBE,0xB5,0x81,0xCD,0x6F,0x02, -0x8C,0x79, -}; - - -/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */ -/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 1999 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 4 Public Primary Certification Authority - G3 */ - - -const unsigned char Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate[1054]={ -0x30,0x82,0x04,0x1A,0x30,0x82,0x03,0x02,0x02,0x11,0x00,0xEC,0xA0,0xA7,0x8B,0x6E, -0x75,0x6A,0x01,0xCF,0xC4,0x7C,0xCC,0x2F,0x94,0x5E,0xD7,0x30,0x0D,0x06,0x09,0x2A, -0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81,0xCA,0x31,0x0B,0x30, -0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03, -0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49, -0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65, -0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74, -0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28, -0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E, -0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74, -0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79, -0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53, -0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C, -0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31,0x30,0x30, -0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31,0x36, -0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B,0x30,0x09,0x06,0x03, -0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x0A, -0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E, -0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56,0x65,0x72,0x69,0x53, -0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72, -0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31,0x28,0x63,0x29,0x20, -0x31,0x39,0x39,0x39,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20,0x49, -0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72, -0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30, -0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E, -0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x34,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20, -0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20, -0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, -0x02,0x82,0x01,0x01,0x00,0xAD,0xCB,0xA5,0x11,0x69,0xC6,0x59,0xAB,0xF1,0x8F,0xB5, -0x19,0x0F,0x56,0xCE,0xCC,0xB5,0x1F,0x20,0xE4,0x9E,0x26,0x25,0x4B,0xE0,0x73,0x65, -0x89,0x59,0xDE,0xD0,0x83,0xE4,0xF5,0x0F,0xB5,0xBB,0xAD,0xF1,0x7C,0xE8,0x21,0xFC, -0xE4,0xE8,0x0C,0xEE,0x7C,0x45,0x22,0x19,0x76,0x92,0xB4,0x13,0xB7,0x20,0x5B,0x09, -0xFA,0x61,0xAE,0xA8,0xF2,0xA5,0x8D,0x85,0xC2,0x2A,0xD6,0xDE,0x66,0x36,0xD2,0x9B, -0x02,0xF4,0xA8,0x92,0x60,0x7C,0x9C,0x69,0xB4,0x8F,0x24,0x1E,0xD0,0x86,0x52,0xF6, -0x32,0x9C,0x41,0x58,0x1E,0x22,0xBD,0xCD,0x45,0x62,0x95,0x08,0x6E,0xD0,0x66,0xDD, -0x53,0xA2,0xCC,0xF0,0x10,0xDC,0x54,0x73,0x8B,0x04,0xA1,0x46,0x33,0x33,0x5C,0x17, -0x40,0xB9,0x9E,0x4D,0xD3,0xF3,0xBE,0x55,0x83,0xE8,0xB1,0x89,0x8E,0x5A,0x7C,0x9A, -0x96,0x22,0x90,0x3B,0x88,0x25,0xF2,0xD2,0x53,0x88,0x02,0x0C,0x0B,0x78,0xF2,0xE6, -0x37,0x17,0x4B,0x30,0x46,0x07,0xE4,0x80,0x6D,0xA6,0xD8,0x96,0x2E,0xE8,0x2C,0xF8, -0x11,0xB3,0x38,0x0D,0x66,0xA6,0x9B,0xEA,0xC9,0x23,0x5B,0xDB,0x8E,0xE2,0xF3,0x13, -0x8E,0x1A,0x59,0x2D,0xAA,0x02,0xF0,0xEC,0xA4,0x87,0x66,0xDC,0xC1,0x3F,0xF5,0xD8, -0xB9,0xF4,0xEC,0x82,0xC6,0xD2,0x3D,0x95,0x1D,0xE5,0xC0,0x4F,0x84,0xC9,0xD9,0xA3, -0x44,0x28,0x06,0x6A,0xD7,0x45,0xAC,0xF0,0x6B,0x6A,0xEF,0x4E,0x5F,0xF8,0x11,0x82, -0x1E,0x38,0x63,0x34,0x66,0x50,0xD4,0x3E,0x93,0x73,0xFA,0x30,0xC3,0x66,0xAD,0xFF, -0x93,0x2D,0x97,0xEF,0x03,0x02,0x03,0x01,0x00,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x8F,0xFA, -0x25,0x6B,0x4F,0x5B,0xE4,0xA4,0x4E,0x27,0x55,0xAB,0x22,0x15,0x59,0x3C,0xCA,0xB5, -0x0A,0xD4,0x4A,0xDB,0xAB,0xDD,0xA1,0x5F,0x53,0xC5,0xA0,0x57,0x39,0xC2,0xCE,0x47, -0x2B,0xBE,0x3A,0xC8,0x56,0xBF,0xC2,0xD9,0x27,0x10,0x3A,0xB1,0x05,0x3C,0xC0,0x77, -0x31,0xBB,0x3A,0xD3,0x05,0x7B,0x6D,0x9A,0x1C,0x30,0x8C,0x80,0xCB,0x93,0x93,0x2A, -0x83,0xAB,0x05,0x51,0x82,0x02,0x00,0x11,0x67,0x6B,0xF3,0x88,0x61,0x47,0x5F,0x03, -0x93,0xD5,0x5B,0x0D,0xE0,0xF1,0xD4,0xA1,0x32,0x35,0x85,0xB2,0x3A,0xDB,0xB0,0x82, -0xAB,0xD1,0xCB,0x0A,0xBC,0x4F,0x8C,0x5B,0xC5,0x4B,0x00,0x3B,0x1F,0x2A,0x82,0xA6, -0x7E,0x36,0x85,0xDC,0x7E,0x3C,0x67,0x00,0xB5,0xE4,0x3B,0x52,0xE0,0xA8,0xEB,0x5D, -0x15,0xF9,0xC6,0x6D,0xF0,0xAD,0x1D,0x0E,0x85,0xB7,0xA9,0x9A,0x73,0x14,0x5A,0x5B, -0x8F,0x41,0x28,0xC0,0xD5,0xE8,0x2D,0x4D,0xA4,0x5E,0xCD,0xAA,0xD9,0xED,0xCE,0xDC, -0xD8,0xD5,0x3C,0x42,0x1D,0x17,0xC1,0x12,0x5D,0x45,0x38,0xC3,0x38,0xF3,0xFC,0x85, -0x2E,0x83,0x46,0x48,0xB2,0xD7,0x20,0x5F,0x92,0x36,0x8F,0xE7,0x79,0x0F,0x98,0x5E, -0x99,0xE8,0xF0,0xD0,0xA4,0xBB,0xF5,0x53,0xBD,0x2A,0xCE,0x59,0xB0,0xAF,0x6E,0x7F, -0x6C,0xBB,0xD2,0x1E,0x00,0xB0,0x21,0xED,0xF8,0x41,0x62,0x82,0xB9,0xD8,0xB2,0xC4, -0xBB,0x46,0x50,0xF3,0x31,0xC5,0x8F,0x01,0xA8,0x74,0xEB,0xF5,0x78,0x27,0xDA,0xE7, -0xF7,0x66,0x43,0xF3,0x9E,0x83,0x3E,0x20,0xAA,0xC3,0x35,0x60,0x91,0xCE, +0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25, +0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65, +0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C, +0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29, +0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20, +0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20, +0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30, +0x36,0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36, +0x32,0x39,0x31,0x37,0x33,0x39,0x31,0x36,0x5A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06, +0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04, +0x0A,0x13,0x1C,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63, +0x68,0x6E,0x6F,0x6C,0x6F,0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31, +0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69, +0x65,0x6C,0x64,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74, +0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72, +0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D,0x00,0x30,0x82,0x01,0x08,0x02, +0x82,0x01,0x01,0x00,0xB7,0x32,0xC8,0xFE,0xE9,0x71,0xA6,0x04,0x85,0xAD,0x0C,0x11, +0x64,0xDF,0xCE,0x4D,0xEF,0xC8,0x03,0x18,0x87,0x3F,0xA1,0xAB,0xFB,0x3C,0xA6,0x9F, +0xF0,0xC3,0xA1,0xDA,0xD4,0xD8,0x6E,0x2B,0x53,0x90,0xFB,0x24,0xA4,0x3E,0x84,0xF0, +0x9E,0xE8,0x5F,0xEC,0xE5,0x27,0x44,0xF5,0x28,0xA6,0x3F,0x7B,0xDE,0xE0,0x2A,0xF0, +0xC8,0xAF,0x53,0x2F,0x9E,0xCA,0x05,0x01,0x93,0x1E,0x8F,0x66,0x1C,0x39,0xA7,0x4D, +0xFA,0x5A,0xB6,0x73,0x04,0x25,0x66,0xEB,0x77,0x7F,0xE7,0x59,0xC6,0x4A,0x99,0x25, +0x14,0x54,0xEB,0x26,0xC7,0xF3,0x7F,0x19,0xD5,0x30,0x70,0x8F,0xAF,0xB0,0x46,0x2A, +0xFF,0xAD,0xEB,0x29,0xED,0xD7,0x9F,0xAA,0x04,0x87,0xA3,0xD4,0xF9,0x89,0xA5,0x34, +0x5F,0xDB,0x43,0x91,0x82,0x36,0xD9,0x66,0x3C,0xB1,0xB8,0xB9,0x82,0xFD,0x9C,0x3A, +0x3E,0x10,0xC8,0x3B,0xEF,0x06,0x65,0x66,0x7A,0x9B,0x19,0x18,0x3D,0xFF,0x71,0x51, +0x3C,0x30,0x2E,0x5F,0xBE,0x3D,0x77,0x73,0xB2,0x5D,0x06,0x6C,0xC3,0x23,0x56,0x9A, +0x2B,0x85,0x26,0x92,0x1C,0xA7,0x02,0xB3,0xE4,0x3F,0x0D,0xAF,0x08,0x79,0x82,0xB8, +0x36,0x3D,0xEA,0x9C,0xD3,0x35,0xB3,0xBC,0x69,0xCA,0xF5,0xCC,0x9D,0xE8,0xFD,0x64, +0x8D,0x17,0x80,0x33,0x6E,0x5E,0x4A,0x5D,0x99,0xC9,0x1E,0x87,0xB4,0x9D,0x1A,0xC0, +0xD5,0x6E,0x13,0x35,0x23,0x5E,0xDF,0x9B,0x5F,0x3D,0xEF,0xD6,0xF7,0x76,0xC2,0xEA, +0x3E,0xBB,0x78,0x0D,0x1C,0x42,0x67,0x6B,0x04,0xD8,0xF8,0xD6,0xDA,0x6F,0x8B,0xF2, +0x44,0xA0,0x01,0xAB,0x02,0x01,0x03,0xA3,0x81,0xC5,0x30,0x81,0xC2,0x30,0x1D,0x06, +0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xBF,0x5F,0xB7,0xD1,0xCE,0xDD,0x1F,0x86, +0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7,0x30,0x81,0x92,0x06, +0x03,0x55,0x1D,0x23,0x04,0x81,0x8A,0x30,0x81,0x87,0x80,0x14,0xBF,0x5F,0xB7,0xD1, +0xCE,0xDD,0x1F,0x86,0xF4,0x5B,0x55,0xAC,0xDC,0xD7,0x10,0xC2,0x0E,0xA9,0x88,0xE7, +0xA1,0x6C,0xA4,0x6A,0x30,0x68,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, +0x02,0x55,0x53,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0A,0x13,0x1C,0x53,0x74, +0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x54,0x65,0x63,0x68,0x6E,0x6F,0x6C,0x6F, +0x67,0x69,0x65,0x73,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x32,0x30,0x30,0x06,0x03, +0x55,0x04,0x0B,0x13,0x29,0x53,0x74,0x61,0x72,0x66,0x69,0x65,0x6C,0x64,0x20,0x43, +0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, +0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82,0x01, +0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82, +0x01,0x01,0x00,0x05,0x9D,0x3F,0x88,0x9D,0xD1,0xC9,0x1A,0x55,0xA1,0xAC,0x69,0xF3, +0xF3,0x59,0xDA,0x9B,0x01,0x87,0x1A,0x4F,0x57,0xA9,0xA1,0x79,0x09,0x2A,0xDB,0xF7, +0x2F,0xB2,0x1E,0xCC,0xC7,0x5E,0x6A,0xD8,0x83,0x87,0xA1,0x97,0xEF,0x49,0x35,0x3E, +0x77,0x06,0x41,0x58,0x62,0xBF,0x8E,0x58,0xB8,0x0A,0x67,0x3F,0xEC,0xB3,0xDD,0x21, +0x66,0x1F,0xC9,0x54,0xFA,0x72,0xCC,0x3D,0x4C,0x40,0xD8,0x81,0xAF,0x77,0x9E,0x83, +0x7A,0xBB,0xA2,0xC7,0xF5,0x34,0x17,0x8E,0xD9,0x11,0x40,0xF4,0xFC,0x2C,0x2A,0x4D, +0x15,0x7F,0xA7,0x62,0x5D,0x2E,0x25,0xD3,0x00,0x0B,0x20,0x1A,0x1D,0x68,0xF9,0x17, +0xB8,0xF4,0xBD,0x8B,0xED,0x28,0x59,0xDD,0x4D,0x16,0x8B,0x17,0x83,0xC8,0xB2,0x65, +0xC7,0x2D,0x7A,0xA5,0xAA,0xBC,0x53,0x86,0x6D,0xDD,0x57,0xA4,0xCA,0xF8,0x20,0x41, +0x0B,0x68,0xF0,0xF4,0xFB,0x74,0xBE,0x56,0x5D,0x7A,0x79,0xF5,0xF9,0x1D,0x85,0xE3, +0x2D,0x95,0xBE,0xF5,0x71,0x90,0x43,0xCC,0x8D,0x1F,0x9A,0x00,0x0A,0x87,0x29,0xE9, +0x55,0x22,0x58,0x00,0x23,0xEA,0xE3,0x12,0x43,0x29,0x5B,0x47,0x08,0xDD,0x8C,0x41, +0x6A,0x65,0x06,0xA8,0xE5,0x21,0xAA,0x41,0xB4,0x95,0x21,0x95,0xB9,0x7D,0xD1,0x34, +0xAB,0x13,0xD6,0xAD,0xBC,0xDC,0xE2,0x3D,0x39,0xCD,0xBD,0x3E,0x75,0x70,0xA1,0x18, +0x59,0x03,0xC9,0x22,0xB4,0x8F,0x9C,0xD5,0x5E,0x2A,0xD7,0xA5,0xB6,0xD4,0x0A,0x6D, +0xF8,0xB7,0x40,0x11,0x46,0x9A,0x1F,0x79,0x0E,0x62,0xBF,0x0F,0x97,0xEC,0xE0,0x2F, +0x1F,0x17,0x94, }; -/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */ -/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2006 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA */ +/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO Certification Authority */ +/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO Certification Authority */ -const unsigned char thawte_Primary_Root_CA_certificate[1060]={ -0x30,0x82,0x04,0x20,0x30,0x82,0x03,0x08,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x34, -0x4E,0xD5,0x57,0x20,0xD5,0xED,0xEC,0x49,0xF4,0x2F,0xCE,0x37,0xDB,0x2B,0x6D,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81, -0xA9,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15, -0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C, -0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F, -0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65, -0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31, -0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30, -0x30,0x36,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20, -0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64, -0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55, -0x04,0x03,0x13,0x16,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61, -0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36, -0x31,0x31,0x31,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30, -0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xA9,0x31,0x0B,0x30, -0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03, -0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63, -0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65,0x72,0x76,0x69,0x63, -0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31,0x38,0x30,0x36,0x06, -0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x74, -0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F, -0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65, -0x20,0x6F,0x6E,0x6C,0x79,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16, -0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x52, -0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86, +const unsigned char COMODO_Certification_Authority_certificate[1057]={ +0x30,0x82,0x04,0x1D,0x30,0x82,0x03,0x05,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x4E, +0x81,0x2D,0x8A,0x82,0x65,0xE0,0x0B,0x02,0xEE,0x3E,0x35,0x02,0x46,0xE5,0x3D,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81, +0x81,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, +0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, +0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, +0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, +0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43, +0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x27,0x30,0x25,0x06,0x03,0x55, +0x04,0x03,0x13,0x1E,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x65,0x72,0x74,0x69, +0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, +0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x30,0x31,0x30,0x30,0x30,0x30, +0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35, +0x39,0x5A,0x30,0x81,0x81,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, +0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65, +0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31, +0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72, +0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F, +0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x27,0x30, +0x25,0x06,0x03,0x55,0x04,0x03,0x13,0x1E,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43, +0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, +0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86, 0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82, -0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAC,0xA0,0xF0,0xFB,0x80,0x59,0xD4,0x9C,0xC7, -0xA4,0xCF,0x9D,0xA1,0x59,0x73,0x09,0x10,0x45,0x0C,0x0D,0x2C,0x6E,0x68,0xF1,0x6C, -0x5B,0x48,0x68,0x49,0x59,0x37,0xFC,0x0B,0x33,0x19,0xC2,0x77,0x7F,0xCC,0x10,0x2D, -0x95,0x34,0x1C,0xE6,0xEB,0x4D,0x09,0xA7,0x1C,0xD2,0xB8,0xC9,0x97,0x36,0x02,0xB7, -0x89,0xD4,0x24,0x5F,0x06,0xC0,0xCC,0x44,0x94,0x94,0x8D,0x02,0x62,0x6F,0xEB,0x5A, -0xDD,0x11,0x8D,0x28,0x9A,0x5C,0x84,0x90,0x10,0x7A,0x0D,0xBD,0x74,0x66,0x2F,0x6A, -0x38,0xA0,0xE2,0xD5,0x54,0x44,0xEB,0x1D,0x07,0x9F,0x07,0xBA,0x6F,0xEE,0xE9,0xFD, -0x4E,0x0B,0x29,0xF5,0x3E,0x84,0xA0,0x01,0xF1,0x9C,0xAB,0xF8,0x1C,0x7E,0x89,0xA4, -0xE8,0xA1,0xD8,0x71,0x65,0x0D,0xA3,0x51,0x7B,0xEE,0xBC,0xD2,0x22,0x60,0x0D,0xB9, -0x5B,0x9D,0xDF,0xBA,0xFC,0x51,0x5B,0x0B,0xAF,0x98,0xB2,0xE9,0x2E,0xE9,0x04,0xE8, -0x62,0x87,0xDE,0x2B,0xC8,0xD7,0x4E,0xC1,0x4C,0x64,0x1E,0xDD,0xCF,0x87,0x58,0xBA, -0x4A,0x4F,0xCA,0x68,0x07,0x1D,0x1C,0x9D,0x4A,0xC6,0xD5,0x2F,0x91,0xCC,0x7C,0x71, -0x72,0x1C,0xC5,0xC0,0x67,0xEB,0x32,0xFD,0xC9,0x92,0x5C,0x94,0xDA,0x85,0xC0,0x9B, -0xBF,0x53,0x7D,0x2B,0x09,0xF4,0x8C,0x9D,0x91,0x1F,0x97,0x6A,0x52,0xCB,0xDE,0x09, -0x36,0xA4,0x77,0xD8,0x7B,0x87,0x50,0x44,0xD5,0x3E,0x6E,0x29,0x69,0xFB,0x39,0x49, -0x26,0x1E,0x09,0xA5,0x80,0x7B,0x40,0x2D,0xEB,0xE8,0x27,0x85,0xC9,0xFE,0x61,0xFD, -0x7E,0xE6,0x7C,0x97,0x1D,0xD5,0x9D,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40, -0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01, +0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xD0,0x40,0x8B,0x8B,0x72,0xE3,0x91,0x1B,0xF7, +0x51,0xC1,0x1B,0x54,0x04,0x98,0xD3,0xA9,0xBF,0xC1,0xE6,0x8A,0x5D,0x3B,0x87,0xFB, +0xBB,0x88,0xCE,0x0D,0xE3,0x2F,0x3F,0x06,0x96,0xF0,0xA2,0x29,0x50,0x99,0xAE,0xDB, +0x3B,0xA1,0x57,0xB0,0x74,0x51,0x71,0xCD,0xED,0x42,0x91,0x4D,0x41,0xFE,0xA9,0xC8, +0xD8,0x6A,0x86,0x77,0x44,0xBB,0x59,0x66,0x97,0x50,0x5E,0xB4,0xD4,0x2C,0x70,0x44, +0xCF,0xDA,0x37,0x95,0x42,0x69,0x3C,0x30,0xC4,0x71,0xB3,0x52,0xF0,0x21,0x4D,0xA1, +0xD8,0xBA,0x39,0x7C,0x1C,0x9E,0xA3,0x24,0x9D,0xF2,0x83,0x16,0x98,0xAA,0x16,0x7C, +0x43,0x9B,0x15,0x5B,0xB7,0xAE,0x34,0x91,0xFE,0xD4,0x62,0x26,0x18,0x46,0x9A,0x3F, +0xEB,0xC1,0xF9,0xF1,0x90,0x57,0xEB,0xAC,0x7A,0x0D,0x8B,0xDB,0x72,0x30,0x6A,0x66, +0xD5,0xE0,0x46,0xA3,0x70,0xDC,0x68,0xD9,0xFF,0x04,0x48,0x89,0x77,0xDE,0xB5,0xE9, +0xFB,0x67,0x6D,0x41,0xE9,0xBC,0x39,0xBD,0x32,0xD9,0x62,0x02,0xF1,0xB1,0xA8,0x3D, +0x6E,0x37,0x9C,0xE2,0x2F,0xE2,0xD3,0xA2,0x26,0x8B,0xC6,0xB8,0x55,0x43,0x88,0xE1, +0x23,0x3E,0xA5,0xD2,0x24,0x39,0x6A,0x47,0xAB,0x00,0xD4,0xA1,0xB3,0xA9,0x25,0xFE, +0x0D,0x3F,0xA7,0x1D,0xBA,0xD3,0x51,0xC1,0x0B,0xA4,0xDA,0xAC,0x38,0xEF,0x55,0x50, +0x24,0x05,0x65,0x46,0x93,0x34,0x4F,0x2D,0x8D,0xAD,0xC6,0xD4,0x21,0x19,0xD2,0x8E, +0xCA,0x05,0x61,0x71,0x07,0x73,0x47,0xE5,0x8A,0x19,0x12,0xBD,0x04,0x4D,0xCE,0x4E, +0x9C,0xA5,0x48,0xAC,0xBB,0x26,0xF7,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x8E,0x30, +0x81,0x8B,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x0B,0x58,0xE5, +0x8B,0xC6,0x4C,0x15,0x37,0xA4,0x40,0xA9,0x30,0xA9,0x21,0xBE,0x47,0x36,0x5A,0x56, 0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, -0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7B,0x5B,0x45,0xCF, -0xAF,0xCE,0xCB,0x7A,0xFD,0x31,0x92,0x1A,0x6A,0xB6,0xF3,0x46,0xEB,0x57,0x48,0x50, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03, -0x82,0x01,0x01,0x00,0x79,0x11,0xC0,0x4B,0xB3,0x91,0xB6,0xFC,0xF0,0xE9,0x67,0xD4, -0x0D,0x6E,0x45,0xBE,0x55,0xE8,0x93,0xD2,0xCE,0x03,0x3F,0xED,0xDA,0x25,0xB0,0x1D, -0x57,0xCB,0x1E,0x3A,0x76,0xA0,0x4C,0xEC,0x50,0x76,0xE8,0x64,0x72,0x0C,0xA4,0xA9, -0xF1,0xB8,0x8B,0xD6,0xD6,0x87,0x84,0xBB,0x32,0xE5,0x41,0x11,0xC0,0x77,0xD9,0xB3, -0x60,0x9D,0xEB,0x1B,0xD5,0xD1,0x6E,0x44,0x44,0xA9,0xA6,0x01,0xEC,0x55,0x62,0x1D, -0x77,0xB8,0x5C,0x8E,0x48,0x49,0x7C,0x9C,0x3B,0x57,0x11,0xAC,0xAD,0x73,0x37,0x8E, -0x2F,0x78,0x5C,0x90,0x68,0x47,0xD9,0x60,0x60,0xE6,0xFC,0x07,0x3D,0x22,0x20,0x17, -0xC4,0xF7,0x16,0xE9,0xC4,0xD8,0x72,0xF9,0xC8,0x73,0x7C,0xDF,0x16,0x2F,0x15,0xA9, -0x3E,0xFD,0x6A,0x27,0xB6,0xA1,0xEB,0x5A,0xBA,0x98,0x1F,0xD5,0xE3,0x4D,0x64,0x0A, -0x9D,0x13,0xC8,0x61,0xBA,0xF5,0x39,0x1C,0x87,0xBA,0xB8,0xBD,0x7B,0x22,0x7F,0xF6, -0xFE,0xAC,0x40,0x79,0xE5,0xAC,0x10,0x6F,0x3D,0x8F,0x1B,0x79,0x76,0x8B,0xC4,0x37, -0xB3,0x21,0x18,0x84,0xE5,0x36,0x00,0xEB,0x63,0x20,0x99,0xB9,0xE9,0xFE,0x33,0x04, -0xBB,0x41,0xC8,0xC1,0x02,0xF9,0x44,0x63,0x20,0x9E,0x81,0xCE,0x42,0xD3,0xD6,0x3F, -0x2C,0x76,0xD3,0x63,0x9C,0x59,0xDD,0x8F,0xA6,0xE1,0x0E,0xA0,0x2E,0x41,0xF7,0x2E, -0x95,0x47,0xCF,0xBC,0xFD,0x33,0xF3,0xF6,0x0B,0x61,0x7E,0x7E,0x91,0x2B,0x81,0x47, -0xC2,0x27,0x30,0xEE,0xA7,0x10,0x5D,0x37,0x8F,0x5C,0x39,0x2B,0xE4,0x04,0xF0,0x7B, -0x8D,0x56,0x8C,0x68, -}; - - -/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Public CA Root */ -/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Public CA Root */ - - -const unsigned char AddTrust_Public_Services_Root_certificate[1049]={ -0x30,0x82,0x04,0x15,0x30,0x82,0x02,0xFD,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x64,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14, -0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73, -0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41, -0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77, -0x6F,0x72,0x6B,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x41,0x64, -0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x43,0x41, -0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31, -0x30,0x34,0x31,0x35,0x30,0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30, -0x34,0x31,0x35,0x30,0x5A,0x30,0x64,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, -0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41, -0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03, -0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54, -0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x20,0x30,0x1E,0x06,0x03,0x55, -0x04,0x03,0x13,0x17,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x75,0x62, -0x6C,0x69,0x63,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82, -0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xE9,0x1A,0x30,0x8F, -0x83,0x88,0x14,0xC1,0x20,0xD8,0x3C,0x9B,0x8F,0x1B,0x7E,0x03,0x74,0xBB,0xDA,0x69, -0xD3,0x46,0xA5,0xF8,0x8E,0xC2,0x0C,0x11,0x90,0x51,0xA5,0x2F,0x66,0x54,0x40,0x55, -0xEA,0xDB,0x1F,0x4A,0x56,0xEE,0x9F,0x23,0x6E,0xF4,0x39,0xCB,0xA1,0xB9,0x6F,0xF2, -0x7E,0xF9,0x5D,0x87,0x26,0x61,0x9E,0x1C,0xF8,0xE2,0xEC,0xA6,0x81,0xF8,0x21,0xC5, -0x24,0xCC,0x11,0x0C,0x3F,0xDB,0x26,0x72,0x7A,0xC7,0x01,0x97,0x07,0x17,0xF9,0xD7, -0x18,0x2C,0x30,0x7D,0x0E,0x7A,0x1E,0x62,0x1E,0xC6,0x4B,0xC0,0xFD,0x7D,0x62,0x77, -0xD3,0x44,0x1E,0x27,0xF6,0x3F,0x4B,0x44,0xB3,0xB7,0x38,0xD9,0x39,0x1F,0x60,0xD5, -0x51,0x92,0x73,0x03,0xB4,0x00,0x69,0xE3,0xF3,0x14,0x4E,0xEE,0xD1,0xDC,0x09,0xCF, -0x77,0x34,0x46,0x50,0xB0,0xF8,0x11,0xF2,0xFE,0x38,0x79,0xF7,0x07,0x39,0xFE,0x51, -0x92,0x97,0x0B,0x5B,0x08,0x5F,0x34,0x86,0x01,0xAD,0x88,0x97,0xEB,0x66,0xCD,0x5E, -0xD1,0xFF,0xDC,0x7D,0xF2,0x84,0xDA,0xBA,0x77,0xAD,0xDC,0x80,0x08,0xC7,0xA7,0x87, -0xD6,0x55,0x9F,0x97,0x6A,0xE8,0xC8,0x11,0x64,0xBA,0xE7,0x19,0x29,0x3F,0x11,0xB3, -0x78,0x90,0x84,0x20,0x52,0x5B,0x11,0xEF,0x78,0xD0,0x83,0xF6,0xD5,0x48,0x90,0xD0, -0x30,0x1C,0xCF,0x80,0xF9,0x60,0xFE,0x79,0xE4,0x88,0xF2,0xDD,0x00,0xEB,0x94,0x45, -0xEB,0x65,0x94,0x69,0x40,0xBA,0xC0,0xD5,0xB4,0xB8,0xBA,0x7D,0x04,0x11,0xA8,0xEB, -0x31,0x05,0x96,0x94,0x4E,0x58,0x21,0x8E,0x9F,0xD0,0x60,0xFD,0x02,0x03,0x01,0x00, -0x01,0xA3,0x81,0xD1,0x30,0x81,0xCE,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, -0x04,0x14,0x81,0x3E,0x37,0xD8,0x92,0xB0,0x1F,0x77,0x9F,0x5C,0xB4,0xAB,0x73,0xAA, -0xE7,0xF6,0x34,0x60,0x2F,0xFA,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03, -0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30, -0x03,0x01,0x01,0xFF,0x30,0x81,0x8E,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x86,0x30, -0x81,0x83,0x80,0x14,0x81,0x3E,0x37,0xD8,0x92,0xB0,0x1F,0x77,0x9F,0x5C,0xB4,0xAB, -0x73,0xAA,0xE7,0xF6,0x34,0x60,0x2F,0xFA,0xA1,0x68,0xA4,0x66,0x30,0x64,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41, -0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54, -0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B, -0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x41,0x64,0x64,0x54,0x72, -0x75,0x73,0x74,0x20,0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x43,0x41,0x20,0x52,0x6F, -0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x03,0xF7,0x15,0x4A,0xF8,0x24,0xDA, -0x23,0x56,0x16,0x93,0x76,0xDD,0x36,0x28,0xB9,0xAE,0x1B,0xB8,0xC3,0xF1,0x64,0xBA, -0x20,0x18,0x78,0x95,0x29,0x27,0x57,0x05,0xBC,0x7C,0x2A,0xF4,0xB9,0x51,0x55,0xDA, -0x87,0x02,0xDE,0x0F,0x16,0x17,0x31,0xF8,0xAA,0x79,0x2E,0x09,0x13,0xBB,0xAF,0xB2, -0x20,0x19,0x12,0xE5,0x93,0xF9,0x4B,0xF9,0x83,0xE8,0x44,0xD5,0xB2,0x41,0x25,0xBF, -0x88,0x75,0x6F,0xFF,0x10,0xFC,0x4A,0x54,0xD0,0x5F,0xF0,0xFA,0xEF,0x36,0x73,0x7D, -0x1B,0x36,0x45,0xC6,0x21,0x6D,0xB4,0x15,0xB8,0x4E,0xCF,0x9C,0x5C,0xA5,0x3D,0x5A, -0x00,0x8E,0x06,0xE3,0x3C,0x6B,0x32,0x7B,0xF2,0x9F,0xF0,0xB6,0xFD,0xDF,0xF0,0x28, -0x18,0x48,0xF0,0xC6,0xBC,0xD0,0xBF,0x34,0x80,0x96,0xC2,0x4A,0xB1,0x6D,0x8E,0xC7, -0x90,0x45,0xDE,0x2F,0x67,0xAC,0x45,0x04,0xA3,0x7A,0xDC,0x55,0x92,0xC9,0x47,0x66, -0xD8,0x1A,0x8C,0xC7,0xED,0x9C,0x4E,0x9A,0xE0,0x12,0xBB,0xB5,0x6A,0x4C,0x84,0xE1, -0xE1,0x22,0x0D,0x87,0x00,0x64,0xFE,0x8C,0x7D,0x62,0x39,0x65,0xA6,0xEF,0x42,0xB6, -0x80,0x25,0x12,0x61,0x01,0xA8,0x24,0x13,0x70,0x00,0x11,0x26,0x5F,0xFA,0x35,0x50, -0xC5,0x48,0xCC,0x06,0x47,0xE8,0x27,0xD8,0x70,0x8D,0x5F,0x64,0xE6,0xA1,0x44,0x26, -0x5E,0x22,0xEC,0x92,0xCD,0xFF,0x42,0x9A,0x44,0x21,0x6D,0x5C,0xC5,0xE3,0x22,0x1D, -0x5F,0x47,0x12,0xE7,0xCE,0x5F,0x5D,0xFA,0xD8,0xAA,0xB1,0x33,0x2D,0xD9,0x76,0xF2, -0x4E,0x3A,0x33,0x0C,0x2B,0xB3,0x2D,0x90,0x06, -}; - - -/* subject:/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Qualified CA Root */ -/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust TTP Network/CN=AddTrust Qualified CA Root */ - - -const unsigned char AddTrust_Qualified_Certificates_Root_certificate[1058]={ -0x30,0x82,0x04,0x1E,0x30,0x82,0x03,0x06,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x67,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14, -0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73, -0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41, -0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77, -0x6F,0x72,0x6B,0x31,0x23,0x30,0x21,0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x41,0x64, -0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x51,0x75,0x61,0x6C,0x69,0x66,0x69,0x65,0x64, -0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35, -0x33,0x30,0x31,0x30,0x34,0x34,0x35,0x30,0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33, -0x30,0x31,0x30,0x34,0x34,0x35,0x30,0x5A,0x30,0x67,0x31,0x0B,0x30,0x09,0x06,0x03, -0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A, -0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30, -0x1B,0x06,0x03,0x55,0x04,0x0B,0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74, -0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x23,0x30,0x21, -0x06,0x03,0x55,0x04,0x03,0x13,0x1A,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20, -0x51,0x75,0x61,0x6C,0x69,0x66,0x69,0x65,0x64,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F, -0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01, -0x01,0x00,0xE4,0x1E,0x9A,0xFE,0xDC,0x09,0x5A,0x87,0xA4,0x9F,0x47,0xBE,0x11,0x5F, -0xAF,0x84,0x34,0xDB,0x62,0x3C,0x79,0x78,0xB7,0xE9,0x30,0xB5,0xEC,0x0C,0x1C,0x2A, -0xC4,0x16,0xFF,0xE0,0xEC,0x71,0xEB,0x8A,0xF5,0x11,0x6E,0xED,0x4F,0x0D,0x91,0xD2, -0x12,0x18,0x2D,0x49,0x15,0x01,0xC2,0xA4,0x22,0x13,0xC7,0x11,0x64,0xFF,0x22,0x12, -0x9A,0xB9,0x8E,0x5C,0x2F,0x08,0xCF,0x71,0x6A,0xB3,0x67,0x01,0x59,0xF1,0x5D,0x46, -0xF3,0xB0,0x78,0xA5,0xF6,0x0E,0x42,0x7A,0xE3,0x7F,0x1B,0xCC,0xD0,0xF0,0xB7,0x28, -0xFD,0x2A,0xEA,0x9E,0xB3,0xB0,0xB9,0x04,0xAA,0xFD,0xF6,0xC7,0xB4,0xB1,0xB8,0x2A, -0xA0,0xFB,0x58,0xF1,0x19,0xA0,0x6F,0x70,0x25,0x7E,0x3E,0x69,0x4A,0x7F,0x0F,0x22, -0xD8,0xEF,0xAD,0x08,0x11,0x9A,0x29,0x99,0xE1,0xAA,0x44,0x45,0x9A,0x12,0x5E,0x3E, -0x9D,0x6D,0x52,0xFC,0xE7,0xA0,0x3D,0x68,0x2F,0xF0,0x4B,0x70,0x7C,0x13,0x38,0xAD, -0xBC,0x15,0x25,0xF1,0xD6,0xCE,0xAB,0xA2,0xC0,0x31,0xD6,0x2F,0x9F,0xE0,0xFF,0x14, -0x59,0xFC,0x84,0x93,0xD9,0x87,0x7C,0x4C,0x54,0x13,0xEB,0x9F,0xD1,0x2D,0x11,0xF8, -0x18,0x3A,0x3A,0xDE,0x25,0xD9,0xF7,0xD3,0x40,0xED,0xA4,0x06,0x12,0xC4,0x3B,0xE1, -0x91,0xC1,0x56,0x35,0xF0,0x14,0xDC,0x65,0x36,0x09,0x6E,0xAB,0xA4,0x07,0xC7,0x35, -0xD1,0xC2,0x03,0x33,0x36,0x5B,0x75,0x26,0x6D,0x42,0xF1,0x12,0x6B,0x43,0x6F,0x4B, -0x71,0x94,0xFA,0x34,0x1D,0xED,0x13,0x6E,0xCA,0x80,0x7F,0x98,0x2F,0x6C,0xB9,0x65, -0xD8,0xE9,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xD4,0x30,0x81,0xD1,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x39,0x95,0x8B,0x62,0x8B,0x5C,0xC9,0xD4, -0x80,0xBA,0x58,0x0F,0x97,0x3F,0x15,0x08,0x43,0xCC,0x98,0xA7,0x30,0x0B,0x06,0x03, -0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13, -0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x91,0x06,0x03,0x55, -0x1D,0x23,0x04,0x81,0x89,0x30,0x81,0x86,0x80,0x14,0x39,0x95,0x8B,0x62,0x8B,0x5C, -0xC9,0xD4,0x80,0xBA,0x58,0x0F,0x97,0x3F,0x15,0x08,0x43,0xCC,0x98,0xA7,0xA1,0x6B, -0xA4,0x69,0x30,0x67,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53, -0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54, -0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x0B, -0x13,0x14,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x54,0x54,0x50,0x20,0x4E, -0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x23,0x30,0x21,0x06,0x03,0x55,0x04,0x03,0x13, -0x1A,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x51,0x75,0x61,0x6C,0x69,0x66, -0x69,0x65,0x64,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01, -0x01,0x00,0x19,0xAB,0x75,0xEA,0xF8,0x8B,0x65,0x61,0x95,0x13,0xBA,0x69,0x04,0xEF, -0x86,0xCA,0x13,0xA0,0xC7,0xAA,0x4F,0x64,0x1B,0x3F,0x18,0xF6,0xA8,0x2D,0x2C,0x55, -0x8F,0x05,0xB7,0x30,0xEA,0x42,0x6A,0x1D,0xC0,0x25,0x51,0x2D,0xA7,0xBF,0x0C,0xB3, -0xED,0xEF,0x08,0x7F,0x6C,0x3C,0x46,0x1A,0xEA,0x18,0x43,0xDF,0x76,0xCC,0xF9,0x66, -0x86,0x9C,0x2C,0x68,0xF5,0xE9,0x17,0xF8,0x31,0xB3,0x18,0xC4,0xD6,0x48,0x7D,0x23, -0x4C,0x68,0xC1,0x7E,0xBB,0x01,0x14,0x6F,0xC5,0xD9,0x6E,0xDE,0xBB,0x04,0x42,0x6A, -0xF8,0xF6,0x5C,0x7D,0xE5,0xDA,0xFA,0x87,0xEB,0x0D,0x35,0x52,0x67,0xD0,0x9E,0x97, -0x76,0x05,0x93,0x3F,0x95,0xC7,0x01,0xE6,0x69,0x55,0x38,0x7F,0x10,0x61,0x99,0xC9, -0xE3,0x5F,0xA6,0xCA,0x3E,0x82,0x63,0x48,0xAA,0xE2,0x08,0x48,0x3E,0xAA,0xF2,0xB2, -0x85,0x62,0xA6,0xB4,0xA7,0xD9,0xBD,0x37,0x9C,0x68,0xB5,0x2D,0x56,0x7D,0xB0,0xB7, -0x3F,0xA0,0xB1,0x07,0xD6,0xE9,0x4F,0xDC,0xDE,0x45,0x71,0x30,0x32,0x7F,0x1B,0x2E, -0x09,0xF9,0xBF,0x52,0xA1,0xEE,0xC2,0x80,0x3E,0x06,0x5C,0x2E,0x55,0x40,0xC1,0x1B, -0xF5,0x70,0x45,0xB0,0xDC,0x5D,0xFA,0xF6,0x72,0x5A,0x77,0xD2,0x63,0xCD,0xCF,0x58, -0x89,0x00,0x42,0x63,0x3F,0x79,0x39,0xD0,0x44,0xB0,0x82,0x6E,0x41,0x19,0xE8,0xDD, -0xE0,0xC1,0x88,0x5A,0xD1,0x1E,0x71,0x93,0x1F,0x24,0x30,0x74,0xE5,0x1E,0xA8,0xDE, -0x3C,0x27,0x37,0x7F,0x83,0xAE,0x9E,0x77,0xCF,0xF0,0x30,0xB1,0xFF,0x4B,0x99,0xE8, -0xC6,0xA1, +0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01, +0x01,0xFF,0x30,0x49,0x06,0x03,0x55,0x1D,0x1F,0x04,0x42,0x30,0x40,0x30,0x3E,0xA0, +0x3C,0xA0,0x3A,0x86,0x38,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E, +0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x43,0x4F,0x4D, +0x4F,0x44,0x4F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E, +0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x2E,0x63,0x72,0x6C,0x30,0x0D,0x06, +0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01, +0x00,0x3E,0x98,0x9E,0x9B,0xF6,0x1B,0xE9,0xD7,0x39,0xB7,0x78,0xAE,0x1D,0x72,0x18, +0x49,0xD3,0x87,0xE4,0x43,0x82,0xEB,0x3F,0xC9,0xAA,0xF5,0xA8,0xB5,0xEF,0x55,0x7C, +0x21,0x52,0x65,0xF9,0xD5,0x0D,0xE1,0x6C,0xF4,0x3E,0x8C,0x93,0x73,0x91,0x2E,0x02, +0xC4,0x4E,0x07,0x71,0x6F,0xC0,0x8F,0x38,0x61,0x08,0xA8,0x1E,0x81,0x0A,0xC0,0x2F, +0x20,0x2F,0x41,0x8B,0x91,0xDC,0x48,0x45,0xBC,0xF1,0xC6,0xDE,0xBA,0x76,0x6B,0x33, +0xC8,0x00,0x2D,0x31,0x46,0x4C,0xED,0xE7,0x9D,0xCF,0x88,0x94,0xFF,0x33,0xC0,0x56, +0xE8,0x24,0x86,0x26,0xB8,0xD8,0x38,0x38,0xDF,0x2A,0x6B,0xDD,0x12,0xCC,0xC7,0x3F, +0x47,0x17,0x4C,0xA2,0xC2,0x06,0x96,0x09,0xD6,0xDB,0xFE,0x3F,0x3C,0x46,0x41,0xDF, +0x58,0xE2,0x56,0x0F,0x3C,0x3B,0xC1,0x1C,0x93,0x35,0xD9,0x38,0x52,0xAC,0xEE,0xC8, +0xEC,0x2E,0x30,0x4E,0x94,0x35,0xB4,0x24,0x1F,0x4B,0x78,0x69,0xDA,0xF2,0x02,0x38, +0xCC,0x95,0x52,0x93,0xF0,0x70,0x25,0x59,0x9C,0x20,0x67,0xC4,0xEE,0xF9,0x8B,0x57, +0x61,0xF4,0x92,0x76,0x7D,0x3F,0x84,0x8D,0x55,0xB7,0xE8,0xE5,0xAC,0xD5,0xF1,0xF5, +0x19,0x56,0xA6,0x5A,0xFB,0x90,0x1C,0xAF,0x93,0xEB,0xE5,0x1C,0xD4,0x67,0x97,0x5D, +0x04,0x0E,0xBE,0x0B,0x83,0xA6,0x17,0x83,0xB9,0x30,0x12,0xA0,0xC5,0x33,0x15,0x05, +0xB9,0x0D,0xFB,0xC7,0x05,0x76,0xE3,0xD8,0x4A,0x8D,0xFC,0x34,0x17,0xA3,0xC6,0x21, +0x28,0xBE,0x30,0x45,0x31,0x1E,0xC7,0x78,0xBE,0x58,0x61,0x38,0xAC,0x3B,0xE2,0x01, +0x65, }; -/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */ -/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2008 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G3 */ +/* subject:/OU=GlobalSign ECC Root CA - R4/O=GlobalSign/CN=GlobalSign */ +/* issuer :/OU=GlobalSign ECC Root CA - R4/O=GlobalSign/CN=GlobalSign */ -const unsigned char GeoTrust_Primary_Certification_Authority___G3_certificate[1026]={ -0x30,0x82,0x03,0xFE,0x30,0x82,0x02,0xE6,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x15, -0xAC,0x6E,0x94,0x19,0xB2,0x79,0x4B,0x41,0xF6,0x27,0xA9,0xC3,0x18,0x0F,0x1F,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81, -0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16, -0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73, -0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13, -0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75, -0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75, -0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C, -0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54, -0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72, -0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F, -0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x33,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30, -0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32, -0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09, -0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55, -0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63, -0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20, -0x32,0x30,0x30,0x38,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E, -0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69, -0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34, -0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20, -0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20, -0x2D,0x20,0x47,0x33,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, -0x02,0x82,0x01,0x01,0x00,0xDC,0xE2,0x5E,0x62,0x58,0x1D,0x33,0x57,0x39,0x32,0x33, -0xFA,0xEB,0xCB,0x87,0x8C,0xA7,0xD4,0x4A,0xDD,0x06,0x88,0xEA,0x64,0x8E,0x31,0x98, -0xA5,0x38,0x90,0x1E,0x98,0xCF,0x2E,0x63,0x2B,0xF0,0x46,0xBC,0x44,0xB2,0x89,0xA1, -0xC0,0x28,0x0C,0x49,0x70,0x21,0x95,0x9F,0x64,0xC0,0xA6,0x93,0x12,0x02,0x65,0x26, -0x86,0xC6,0xA5,0x89,0xF0,0xFA,0xD7,0x84,0xA0,0x70,0xAF,0x4F,0x1A,0x97,0x3F,0x06, -0x44,0xD5,0xC9,0xEB,0x72,0x10,0x7D,0xE4,0x31,0x28,0xFB,0x1C,0x61,0xE6,0x28,0x07, -0x44,0x73,0x92,0x22,0x69,0xA7,0x03,0x88,0x6C,0x9D,0x63,0xC8,0x52,0xDA,0x98,0x27, -0xE7,0x08,0x4C,0x70,0x3E,0xB4,0xC9,0x12,0xC1,0xC5,0x67,0x83,0x5D,0x33,0xF3,0x03, -0x11,0xEC,0x6A,0xD0,0x53,0xE2,0xD1,0xBA,0x36,0x60,0x94,0x80,0xBB,0x61,0x63,0x6C, -0x5B,0x17,0x7E,0xDF,0x40,0x94,0x1E,0xAB,0x0D,0xC2,0x21,0x28,0x70,0x88,0xFF,0xD6, -0x26,0x6C,0x6C,0x60,0x04,0x25,0x4E,0x55,0x7E,0x7D,0xEF,0xBF,0x94,0x48,0xDE,0xB7, -0x1D,0xDD,0x70,0x8D,0x05,0x5F,0x88,0xA5,0x9B,0xF2,0xC2,0xEE,0xEA,0xD1,0x40,0x41, -0x6D,0x62,0x38,0x1D,0x56,0x06,0xC5,0x03,0x47,0x51,0x20,0x19,0xFC,0x7B,0x10,0x0B, -0x0E,0x62,0xAE,0x76,0x55,0xBF,0x5F,0x77,0xBE,0x3E,0x49,0x01,0x53,0x3D,0x98,0x25, -0x03,0x76,0x24,0x5A,0x1D,0xB4,0xDB,0x89,0xEA,0x79,0xE5,0xB6,0xB3,0x3B,0x3F,0xBA, -0x4C,0x28,0x41,0x7F,0x06,0xAC,0x6A,0x8E,0xC1,0xD0,0xF6,0x05,0x1D,0x7D,0xE6,0x42, -0x86,0xE3,0xA5,0xD5,0x47,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F, -0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, +const unsigned char GlobalSign_ECC_Root_CA___R4_certificate[485]={ +0x30,0x82,0x01,0xE1,0x30,0x82,0x01,0x87,0xA0,0x03,0x02,0x01,0x02,0x02,0x11,0x2A, +0x38,0xA4,0x1C,0x96,0x0A,0x04,0xDE,0x42,0xB2,0x28,0xA5,0x0B,0xE8,0x34,0x98,0x02, +0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x02,0x30,0x50,0x31,0x24, +0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, +0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20, +0x2D,0x20,0x52,0x34,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47, +0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55, +0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E, +0x17,0x0D,0x31,0x32,0x31,0x31,0x31,0x33,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17, +0x0D,0x33,0x38,0x30,0x31,0x31,0x39,0x30,0x33,0x31,0x34,0x30,0x37,0x5A,0x30,0x50, +0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61, +0x6C,0x53,0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43, +0x41,0x20,0x2D,0x20,0x52,0x34,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13, +0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06, +0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, +0x30,0x59,0x30,0x13,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x08,0x2A, +0x86,0x48,0xCE,0x3D,0x03,0x01,0x07,0x03,0x42,0x00,0x04,0xB8,0xC6,0x79,0xD3,0x8F, +0x6C,0x25,0x0E,0x9F,0x2E,0x39,0x19,0x1C,0x03,0xA4,0xAE,0x9A,0xE5,0x39,0x07,0x09, +0x16,0xCA,0x63,0xB1,0xB9,0x86,0xF8,0x8A,0x57,0xC1,0x57,0xCE,0x42,0xFA,0x73,0xA1, +0xF7,0x65,0x42,0xFF,0x1E,0xC1,0x00,0xB2,0x6E,0x73,0x0E,0xFF,0xC7,0x21,0xE5,0x18, +0xA4,0xAA,0xD9,0x71,0x3F,0xA8,0xD4,0xB9,0xCE,0x8C,0x1D,0xA3,0x42,0x30,0x40,0x30, 0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30, -0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC4,0x79,0xCA,0x8E,0xA1,0x4E, -0x03,0x1D,0x1C,0xDC,0x6B,0xDB,0x31,0x5B,0x94,0x3E,0x3F,0x30,0x7F,0x2D,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01, -0x01,0x00,0x2D,0xC5,0x13,0xCF,0x56,0x80,0x7B,0x7A,0x78,0xBD,0x9F,0xAE,0x2C,0x99, -0xE7,0xEF,0xDA,0xDF,0x94,0x5E,0x09,0x69,0xA7,0xE7,0x6E,0x68,0x8C,0xBD,0x72,0xBE, -0x47,0xA9,0x0E,0x97,0x12,0xB8,0x4A,0xF1,0x64,0xD3,0x39,0xDF,0x25,0x34,0xD4,0xC1, -0xCD,0x4E,0x81,0xF0,0x0F,0x04,0xC4,0x24,0xB3,0x34,0x96,0xC6,0xA6,0xAA,0x30,0xDF, -0x68,0x61,0x73,0xD7,0xF9,0x8E,0x85,0x89,0xEF,0x0E,0x5E,0x95,0x28,0x4A,0x2A,0x27, -0x8F,0x10,0x8E,0x2E,0x7C,0x86,0xC4,0x02,0x9E,0xDA,0x0C,0x77,0x65,0x0E,0x44,0x0D, -0x92,0xFD,0xFD,0xB3,0x16,0x36,0xFA,0x11,0x0D,0x1D,0x8C,0x0E,0x07,0x89,0x6A,0x29, -0x56,0xF7,0x72,0xF4,0xDD,0x15,0x9C,0x77,0x35,0x66,0x57,0xAB,0x13,0x53,0xD8,0x8E, -0xC1,0x40,0xC5,0xD7,0x13,0x16,0x5A,0x72,0xC7,0xB7,0x69,0x01,0xC4,0x7A,0xB1,0x83, -0x01,0x68,0x7D,0x8D,0x41,0xA1,0x94,0x18,0xC1,0x25,0x5C,0xFC,0xF0,0xFE,0x83,0x02, -0x87,0x7C,0x0D,0x0D,0xCF,0x2E,0x08,0x5C,0x4A,0x40,0x0D,0x3E,0xEC,0x81,0x61,0xE6, -0x24,0xDB,0xCA,0xE0,0x0E,0x2D,0x07,0xB2,0x3E,0x56,0xDC,0x8D,0xF5,0x41,0x85,0x07, -0x48,0x9B,0x0C,0x0B,0xCB,0x49,0x3F,0x7D,0xEC,0xB7,0xFD,0xCB,0x8D,0x67,0x89,0x1A, -0xAB,0xED,0xBB,0x1E,0xA3,0x00,0x08,0x08,0x17,0x2A,0x82,0x5C,0x31,0x5D,0x46,0x8A, -0x2D,0x0F,0x86,0x9B,0x74,0xD9,0x45,0xFB,0xD4,0x40,0xB1,0x7A,0xAA,0x68,0x2D,0x86, -0xB2,0x99,0x22,0xE1,0xC1,0x2B,0xC7,0x9C,0xF8,0xF3,0x5F,0xA8,0x82,0x12,0xEB,0x19, -0x11,0x2D, -}; - - -/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */ -/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA 2 */ - - -const unsigned char GeoTrust_Universal_CA_2_certificate[1392]={ -0x30,0x82,0x05,0x6C,0x30,0x82,0x03,0x54,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x47,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16, -0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73, -0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13, -0x17,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72, -0x73,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33, -0x30,0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x30, -0x34,0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x30,0x47,0x31,0x0B,0x30,0x09,0x06,0x03, -0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A, -0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31, -0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x47,0x65,0x6F,0x54,0x72,0x75, -0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x20, -0x32,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02, -0x01,0x00,0xB3,0x54,0x52,0xC1,0xC9,0x3E,0xF2,0xD9,0xDC,0xB1,0x53,0x1A,0x59,0x29, -0xE7,0xB1,0xC3,0x45,0x28,0xE5,0xD7,0xD1,0xED,0xC5,0xC5,0x4B,0xA1,0xAA,0x74,0x7B, -0x57,0xAF,0x4A,0x26,0xFC,0xD8,0xF5,0x5E,0xA7,0x6E,0x19,0xDB,0x74,0x0C,0x4F,0x35, -0x5B,0x32,0x0B,0x01,0xE3,0xDB,0xEB,0x7A,0x77,0x35,0xEA,0xAA,0x5A,0xE0,0xD6,0xE8, -0xA1,0x57,0x94,0xF0,0x90,0xA3,0x74,0x56,0x94,0x44,0x30,0x03,0x1E,0x5C,0x4E,0x2B, -0x85,0x26,0x74,0x82,0x7A,0x0C,0x76,0xA0,0x6F,0x4D,0xCE,0x41,0x2D,0xA0,0x15,0x06, -0x14,0x5F,0xB7,0x42,0xCD,0x7B,0x8F,0x58,0x61,0x34,0xDC,0x2A,0x08,0xF9,0x2E,0xC3, -0x01,0xA6,0x22,0x44,0x1C,0x4C,0x07,0x82,0xE6,0x5B,0xCE,0xD0,0x4A,0x7C,0x04,0xD3, -0x19,0x73,0x27,0xF0,0xAA,0x98,0x7F,0x2E,0xAF,0x4E,0xEB,0x87,0x1E,0x24,0x77,0x6A, -0x5D,0xB6,0xE8,0x5B,0x45,0xBA,0xDC,0xC3,0xA1,0x05,0x6F,0x56,0x8E,0x8F,0x10,0x26, -0xA5,0x49,0xC3,0x2E,0xD7,0x41,0x87,0x22,0xE0,0x4F,0x86,0xCA,0x60,0xB5,0xEA,0xA1, -0x63,0xC0,0x01,0x97,0x10,0x79,0xBD,0x00,0x3C,0x12,0x6D,0x2B,0x15,0xB1,0xAC,0x4B, -0xB1,0xEE,0x18,0xB9,0x4E,0x96,0xDC,0xDC,0x76,0xFF,0x3B,0xBE,0xCF,0x5F,0x03,0xC0, -0xFC,0x3B,0xE8,0xBE,0x46,0x1B,0xFF,0xDA,0x40,0xC2,0x52,0xF7,0xFE,0xE3,0x3A,0xF7, -0x6A,0x77,0x35,0xD0,0xDA,0x8D,0xEB,0x5E,0x18,0x6A,0x31,0xC7,0x1E,0xBA,0x3C,0x1B, -0x28,0xD6,0x6B,0x54,0xC6,0xAA,0x5B,0xD7,0xA2,0x2C,0x1B,0x19,0xCC,0xA2,0x02,0xF6, -0x9B,0x59,0xBD,0x37,0x6B,0x86,0xB5,0x6D,0x82,0xBA,0xD8,0xEA,0xC9,0x56,0xBC,0xA9, -0x36,0x58,0xFD,0x3E,0x19,0xF3,0xED,0x0C,0x26,0xA9,0x93,0x38,0xF8,0x4F,0xC1,0x5D, -0x22,0x06,0xD0,0x97,0xEA,0xE1,0xAD,0xC6,0x55,0xE0,0x81,0x2B,0x28,0x83,0x3A,0xFA, -0xF4,0x7B,0x21,0x51,0x00,0xBE,0x52,0x38,0xCE,0xCD,0x66,0x79,0xA8,0xF4,0x81,0x56, -0xE2,0xD0,0x83,0x09,0x47,0x51,0x5B,0x50,0x6A,0xCF,0xDB,0x48,0x1A,0x5D,0x3E,0xF7, -0xCB,0xF6,0x65,0xF7,0x6C,0xF1,0x95,0xF8,0x02,0x3B,0x32,0x56,0x82,0x39,0x7A,0x5B, -0xBD,0x2F,0x89,0x1B,0xBF,0xA1,0xB4,0xE8,0xFF,0x7F,0x8D,0x8C,0xDF,0x03,0xF1,0x60, -0x4E,0x58,0x11,0x4C,0xEB,0xA3,0x3F,0x10,0x2B,0x83,0x9A,0x01,0x73,0xD9,0x94,0x6D, -0x84,0x00,0x27,0x66,0xAC,0xF0,0x70,0x40,0x09,0x42,0x92,0xAD,0x4F,0x93,0x0D,0x61, -0x09,0x51,0x24,0xD8,0x92,0xD5,0x0B,0x94,0x61,0xB2,0x87,0xB2,0xED,0xFF,0x9A,0x35, -0xFF,0x85,0x54,0xCA,0xED,0x44,0x43,0xAC,0x1B,0x3C,0x16,0x6B,0x48,0x4A,0x0A,0x1C, -0x40,0x88,0x1F,0x92,0xC2,0x0B,0x00,0x05,0xFF,0xF2,0xC8,0x02,0x4A,0xA4,0xAA,0xA9, -0xCC,0x99,0x96,0x9C,0x2F,0x58,0xE0,0x7D,0xE1,0xBE,0xBB,0x07,0xDC,0x5F,0x04,0x72, -0x5C,0x31,0x34,0xC3,0xEC,0x5F,0x2D,0xE0,0x3D,0x64,0x90,0x22,0xE6,0xD1,0xEC,0xB8, -0x2E,0xDD,0x59,0xAE,0xD9,0xA1,0x37,0xBF,0x54,0x35,0xDC,0x73,0x32,0x4F,0x8C,0x04, -0x1E,0x33,0xB2,0xC9,0x46,0xF1,0xD8,0x5C,0xC8,0x55,0x50,0xC9,0x68,0xBD,0xA8,0xBA, -0x36,0x09,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55, -0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03, -0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB,0xF0, -0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x1F,0x06,0x03,0x55, -0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x76,0xF3,0x55,0xE1,0xFA,0xA4,0x36,0xFB, -0xF0,0x9F,0x5C,0x62,0x71,0xED,0x3C,0xF4,0x47,0x38,0x10,0x2B,0x30,0x0E,0x06,0x03, -0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09, -0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00, -0x66,0xC1,0xC6,0x23,0xF3,0xD9,0xE0,0x2E,0x6E,0x5F,0xE8,0xCF,0xAE,0xB0,0xB0,0x25, -0x4D,0x2B,0xF8,0x3B,0x58,0x9B,0x40,0x24,0x37,0x5A,0xCB,0xAB,0x16,0x49,0xFF,0xB3, -0x75,0x79,0x33,0xA1,0x2F,0x6D,0x70,0x17,0x34,0x91,0xFE,0x67,0x7E,0x8F,0xEC,0x9B, -0xE5,0x5E,0x82,0xA9,0x55,0x1F,0x2F,0xDC,0xD4,0x51,0x07,0x12,0xFE,0xAC,0x16,0x3E, -0x2C,0x35,0xC6,0x63,0xFC,0xDC,0x10,0xEB,0x0D,0xA3,0xAA,0xD0,0x7C,0xCC,0xD1,0xD0, -0x2F,0x51,0x2E,0xC4,0x14,0x5A,0xDE,0xE8,0x19,0xE1,0x3E,0xC6,0xCC,0xA4,0x29,0xE7, -0x2E,0x84,0xAA,0x06,0x30,0x78,0x76,0x54,0x73,0x28,0x98,0x59,0x38,0xE0,0x00,0x0D, -0x62,0xD3,0x42,0x7D,0x21,0x9F,0xAE,0x3D,0x3A,0x8C,0xD5,0xFA,0x77,0x0D,0x18,0x2B, -0x16,0x0E,0x5F,0x36,0xE1,0xFC,0x2A,0xB5,0x30,0x24,0xCF,0xE0,0x63,0x0C,0x7B,0x58, -0x1A,0xFE,0x99,0xBA,0x42,0x12,0xB1,0x91,0xF4,0x7C,0x68,0xE2,0xC8,0xE8,0xAF,0x2C, -0xEA,0xC9,0x7E,0xAE,0xBB,0x2A,0x3D,0x0D,0x15,0xDC,0x34,0x95,0xB6,0x18,0x74,0xA8, -0x6A,0x0F,0xC7,0xB4,0xF4,0x13,0xC4,0xE4,0x5B,0xED,0x0A,0xD2,0xA4,0x97,0x4C,0x2A, -0xED,0x2F,0x6C,0x12,0x89,0x3D,0xF1,0x27,0x70,0xAA,0x6A,0x03,0x52,0x21,0x9F,0x40, -0xA8,0x67,0x50,0xF2,0xF3,0x5A,0x1F,0xDF,0xDF,0x23,0xF6,0xDC,0x78,0x4E,0xE6,0x98, -0x4F,0x55,0x3A,0x53,0xE3,0xEF,0xF2,0xF4,0x9F,0xC7,0x7C,0xD8,0x58,0xAF,0x29,0x22, -0x97,0xB8,0xE0,0xBD,0x91,0x2E,0xB0,0x76,0xEC,0x57,0x11,0xCF,0xEF,0x29,0x44,0xF3, -0xE9,0x85,0x7A,0x60,0x63,0xE4,0x5D,0x33,0x89,0x17,0xD9,0x31,0xAA,0xDA,0xD6,0xF3, -0x18,0x35,0x72,0xCF,0x87,0x2B,0x2F,0x63,0x23,0x84,0x5D,0x84,0x8C,0x3F,0x57,0xA0, -0x88,0xFC,0x99,0x91,0x28,0x26,0x69,0x99,0xD4,0x8F,0x97,0x44,0xBE,0x8E,0xD5,0x48, -0xB1,0xA4,0x28,0x29,0xF1,0x15,0xB4,0xE1,0xE5,0x9E,0xDD,0xF8,0x8F,0xA6,0x6F,0x26, -0xD7,0x09,0x3C,0x3A,0x1C,0x11,0x0E,0xA6,0x6C,0x37,0xF7,0xAD,0x44,0x87,0x2C,0x28, -0xC7,0xD8,0x74,0x82,0xB3,0xD0,0x6F,0x4A,0x57,0xBB,0x35,0x29,0x27,0xA0,0x8B,0xE8, -0x21,0xA7,0x87,0x64,0x36,0x5D,0xCC,0xD8,0x16,0xAC,0xC7,0xB2,0x27,0x40,0x92,0x55, -0x38,0x28,0x8D,0x51,0x6E,0xDD,0x14,0x67,0x53,0x6C,0x71,0x5C,0x26,0x84,0x4D,0x75, -0x5A,0xB6,0x7E,0x60,0x56,0xA9,0x4D,0xAD,0xFB,0x9B,0x1E,0x97,0xF3,0x0D,0xD9,0xD2, -0x97,0x54,0x77,0xDA,0x3D,0x12,0xB7,0xE0,0x1E,0xEF,0x08,0x06,0xAC,0xF9,0x85,0x87, -0xE9,0xA2,0xDC,0xAF,0x7E,0x18,0x12,0x83,0xFD,0x56,0x17,0x41,0x2E,0xD5,0x29,0x82, -0x7D,0x99,0xF4,0x31,0xF6,0x71,0xA9,0xCF,0x2C,0x01,0x27,0xA5,0x05,0xB9,0xAA,0xB2, -0x48,0x4E,0x2A,0xEF,0x9F,0x93,0x52,0x51,0x95,0x3C,0x52,0x73,0x8E,0x56,0x4C,0x17, -0x40,0xC0,0x09,0x28,0xE4,0x8B,0x6A,0x48,0x53,0xDB,0xEC,0xCD,0x55,0x55,0xF1,0xC6, -0xF8,0xE9,0xA2,0x2C,0x4C,0xA6,0xD1,0x26,0x5F,0x7E,0xAF,0x5A,0x4C,0xDA,0x1F,0xA6, -0xF2,0x1C,0x2C,0x7E,0xAE,0x02,0x16,0xD2,0x56,0xD0,0x2F,0x57,0x53,0x47,0xE8,0x92, -}; - - -/* subject:/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */ -/* issuer :/C=IE/O=Baltimore/OU=CyberTrust/CN=Baltimore CyberTrust Root */ - - -const unsigned char Baltimore_CyberTrust_Root_certificate[891]={ -0x30,0x82,0x03,0x77,0x30,0x82,0x02,0x5F,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x02, -0x00,0x00,0xB9,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, -0x05,0x00,0x30,0x5A,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49, -0x45,0x31,0x12,0x30,0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74, -0x69,0x6D,0x6F,0x72,0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A, -0x43,0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03, -0x55,0x04,0x03,0x13,0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43, -0x79,0x62,0x65,0x72,0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E, -0x17,0x0D,0x30,0x30,0x30,0x35,0x31,0x32,0x31,0x38,0x34,0x36,0x30,0x30,0x5A,0x17, -0x0D,0x32,0x35,0x30,0x35,0x31,0x32,0x32,0x33,0x35,0x39,0x30,0x30,0x5A,0x30,0x5A, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x49,0x45,0x31,0x12,0x30, -0x10,0x06,0x03,0x55,0x04,0x0A,0x13,0x09,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72, -0x65,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0B,0x13,0x0A,0x43,0x79,0x62,0x65, -0x72,0x54,0x72,0x75,0x73,0x74,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13, -0x19,0x42,0x61,0x6C,0x74,0x69,0x6D,0x6F,0x72,0x65,0x20,0x43,0x79,0x62,0x65,0x72, -0x54,0x72,0x75,0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01, -0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xA3,0x04,0xBB,0x22,0xAB, -0x98,0x3D,0x57,0xE8,0x26,0x72,0x9A,0xB5,0x79,0xD4,0x29,0xE2,0xE1,0xE8,0x95,0x80, -0xB1,0xB0,0xE3,0x5B,0x8E,0x2B,0x29,0x9A,0x64,0xDF,0xA1,0x5D,0xED,0xB0,0x09,0x05, -0x6D,0xDB,0x28,0x2E,0xCE,0x62,0xA2,0x62,0xFE,0xB4,0x88,0xDA,0x12,0xEB,0x38,0xEB, -0x21,0x9D,0xC0,0x41,0x2B,0x01,0x52,0x7B,0x88,0x77,0xD3,0x1C,0x8F,0xC7,0xBA,0xB9, -0x88,0xB5,0x6A,0x09,0xE7,0x73,0xE8,0x11,0x40,0xA7,0xD1,0xCC,0xCA,0x62,0x8D,0x2D, -0xE5,0x8F,0x0B,0xA6,0x50,0xD2,0xA8,0x50,0xC3,0x28,0xEA,0xF5,0xAB,0x25,0x87,0x8A, -0x9A,0x96,0x1C,0xA9,0x67,0xB8,0x3F,0x0C,0xD5,0xF7,0xF9,0x52,0x13,0x2F,0xC2,0x1B, -0xD5,0x70,0x70,0xF0,0x8F,0xC0,0x12,0xCA,0x06,0xCB,0x9A,0xE1,0xD9,0xCA,0x33,0x7A, -0x77,0xD6,0xF8,0xEC,0xB9,0xF1,0x68,0x44,0x42,0x48,0x13,0xD2,0xC0,0xC2,0xA4,0xAE, -0x5E,0x60,0xFE,0xB6,0xA6,0x05,0xFC,0xB4,0xDD,0x07,0x59,0x02,0xD4,0x59,0x18,0x98, -0x63,0xF5,0xA5,0x63,0xE0,0x90,0x0C,0x7D,0x5D,0xB2,0x06,0x7A,0xF3,0x85,0xEA,0xEB, -0xD4,0x03,0xAE,0x5E,0x84,0x3E,0x5F,0xFF,0x15,0xED,0x69,0xBC,0xF9,0x39,0x36,0x72, -0x75,0xCF,0x77,0x52,0x4D,0xF3,0xC9,0x90,0x2C,0xB9,0x3D,0xE5,0xC9,0x23,0x53,0x3F, -0x1F,0x24,0x98,0x21,0x5C,0x07,0x99,0x29,0xBD,0xC6,0x3A,0xEC,0xE7,0x6E,0x86,0x3A, -0x6B,0x97,0x74,0x63,0x33,0xBD,0x68,0x18,0x31,0xF0,0x78,0x8D,0x76,0xBF,0xFC,0x9E, -0x8E,0x5D,0x2A,0x86,0xA7,0x4D,0x90,0xDC,0x27,0x1A,0x39,0x02,0x03,0x01,0x00,0x01, -0xA3,0x45,0x30,0x43,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xE5, -0x9D,0x59,0x30,0x82,0x47,0x58,0xCC,0xAC,0xFA,0x08,0x54,0x36,0x86,0x7B,0x3A,0xB5, -0x04,0x4D,0xF0,0x30,0x12,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x08,0x30, -0x06,0x01,0x01,0xFF,0x02,0x01,0x03,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01, -0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x85,0x0C,0x5D,0x8E,0xE4, -0x6F,0x51,0x68,0x42,0x05,0xA0,0xDD,0xBB,0x4F,0x27,0x25,0x84,0x03,0xBD,0xF7,0x64, -0xFD,0x2D,0xD7,0x30,0xE3,0xA4,0x10,0x17,0xEB,0xDA,0x29,0x29,0xB6,0x79,0x3F,0x76, -0xF6,0x19,0x13,0x23,0xB8,0x10,0x0A,0xF9,0x58,0xA4,0xD4,0x61,0x70,0xBD,0x04,0x61, -0x6A,0x12,0x8A,0x17,0xD5,0x0A,0xBD,0xC5,0xBC,0x30,0x7C,0xD6,0xE9,0x0C,0x25,0x8D, -0x86,0x40,0x4F,0xEC,0xCC,0xA3,0x7E,0x38,0xC6,0x37,0x11,0x4F,0xED,0xDD,0x68,0x31, -0x8E,0x4C,0xD2,0xB3,0x01,0x74,0xEE,0xBE,0x75,0x5E,0x07,0x48,0x1A,0x7F,0x70,0xFF, -0x16,0x5C,0x84,0xC0,0x79,0x85,0xB8,0x05,0xFD,0x7F,0xBE,0x65,0x11,0xA3,0x0F,0xC0, -0x02,0xB4,0xF8,0x52,0x37,0x39,0x04,0xD5,0xA9,0x31,0x7A,0x18,0xBF,0xA0,0x2A,0xF4, -0x12,0x99,0xF7,0xA3,0x45,0x82,0xE3,0x3C,0x5E,0xF5,0x9D,0x9E,0xB5,0xC8,0x9E,0x7C, -0x2E,0xC8,0xA4,0x9E,0x4E,0x08,0x14,0x4B,0x6D,0xFD,0x70,0x6D,0x6B,0x1A,0x63,0xBD, -0x64,0xE6,0x1F,0xB7,0xCE,0xF0,0xF2,0x9F,0x2E,0xBB,0x1B,0xB7,0xF2,0x50,0x88,0x73, -0x92,0xC2,0xE2,0xE3,0x16,0x8D,0x9A,0x32,0x02,0xAB,0x8E,0x18,0xDD,0xE9,0x10,0x11, -0xEE,0x7E,0x35,0xAB,0x90,0xAF,0x3E,0x30,0x94,0x7A,0xD0,0x33,0x3D,0xA7,0x65,0x0F, -0xF5,0xFC,0x8E,0x9E,0x62,0xCF,0x47,0x44,0x2C,0x01,0x5D,0xBB,0x1D,0xB5,0x32,0xD2, -0x47,0xD2,0x38,0x2E,0xD0,0xFE,0x81,0xDC,0x32,0x6A,0x1E,0xB5,0xEE,0x3C,0xD5,0xFC, -0xE7,0x81,0x1D,0x19,0xC3,0x24,0x42,0xEA,0x63,0x39,0xA9, +0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, +0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x54,0xB0,0x7B,0xAD,0x45, +0xB8,0xE2,0x40,0x7F,0xFB,0x0A,0x6E,0xFB,0xBE,0x33,0xC9,0x3C,0xA3,0x84,0xD5,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x02,0x03,0x48,0x00,0x30,0x45, +0x02,0x21,0x00,0xDC,0x92,0xA1,0xA0,0x13,0xA6,0xCF,0x03,0xB0,0xE6,0xC4,0x21,0x97, +0x90,0xFA,0x14,0x57,0x2D,0x03,0xEC,0xEE,0x3C,0xD3,0x6E,0xCA,0xA8,0x6C,0x76,0xBC, +0xA2,0xDE,0xBB,0x02,0x20,0x27,0xA8,0x85,0x27,0x35,0x9B,0x56,0xC6,0xA3,0xF2,0x47, +0xD2,0xB7,0x6E,0x1B,0x02,0x00,0x17,0xAA,0x67,0xA6,0x15,0x91,0xDE,0xFA,0x94,0xEC, +0x7B,0x0B,0xF8,0x9F,0x84, }; - - -/* subject:/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */ -/* issuer :/OU=GlobalSign Root CA - R2/O=GlobalSign/CN=GlobalSign */ - - -const unsigned char GlobalSign_Root_CA___R2_certificate[958]={ -0x30,0x82,0x03,0xBA,0x30,0x82,0x02,0xA2,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, -0x00,0x00,0x00,0x00,0x01,0x0F,0x86,0x26,0xE6,0x0D,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06, -0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, -0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30, -0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69, -0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F, -0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31, -0x35,0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35, -0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55, -0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52, -0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x32,0x31,0x13,0x30,0x11,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, -0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61, -0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48, -0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01, -0x0A,0x02,0x82,0x01,0x01,0x00,0xA6,0xCF,0x24,0x0E,0xBE,0x2E,0x6F,0x28,0x99,0x45, -0x42,0xC4,0xAB,0x3E,0x21,0x54,0x9B,0x0B,0xD3,0x7F,0x84,0x70,0xFA,0x12,0xB3,0xCB, -0xBF,0x87,0x5F,0xC6,0x7F,0x86,0xD3,0xB2,0x30,0x5C,0xD6,0xFD,0xAD,0xF1,0x7B,0xDC, -0xE5,0xF8,0x60,0x96,0x09,0x92,0x10,0xF5,0xD0,0x53,0xDE,0xFB,0x7B,0x7E,0x73,0x88, -0xAC,0x52,0x88,0x7B,0x4A,0xA6,0xCA,0x49,0xA6,0x5E,0xA8,0xA7,0x8C,0x5A,0x11,0xBC, -0x7A,0x82,0xEB,0xBE,0x8C,0xE9,0xB3,0xAC,0x96,0x25,0x07,0x97,0x4A,0x99,0x2A,0x07, -0x2F,0xB4,0x1E,0x77,0xBF,0x8A,0x0F,0xB5,0x02,0x7C,0x1B,0x96,0xB8,0xC5,0xB9,0x3A, -0x2C,0xBC,0xD6,0x12,0xB9,0xEB,0x59,0x7D,0xE2,0xD0,0x06,0x86,0x5F,0x5E,0x49,0x6A, -0xB5,0x39,0x5E,0x88,0x34,0xEC,0xBC,0x78,0x0C,0x08,0x98,0x84,0x6C,0xA8,0xCD,0x4B, -0xB4,0xA0,0x7D,0x0C,0x79,0x4D,0xF0,0xB8,0x2D,0xCB,0x21,0xCA,0xD5,0x6C,0x5B,0x7D, -0xE1,0xA0,0x29,0x84,0xA1,0xF9,0xD3,0x94,0x49,0xCB,0x24,0x62,0x91,0x20,0xBC,0xDD, -0x0B,0xD5,0xD9,0xCC,0xF9,0xEA,0x27,0x0A,0x2B,0x73,0x91,0xC6,0x9D,0x1B,0xAC,0xC8, -0xCB,0xE8,0xE0,0xA0,0xF4,0x2F,0x90,0x8B,0x4D,0xFB,0xB0,0x36,0x1B,0xF6,0x19,0x7A, -0x85,0xE0,0x6D,0xF2,0x61,0x13,0x88,0x5C,0x9F,0xE0,0x93,0x0A,0x51,0x97,0x8A,0x5A, -0xCE,0xAF,0xAB,0xD5,0xF7,0xAA,0x09,0xAA,0x60,0xBD,0xDC,0xD9,0x5F,0xDF,0x72,0xA9, -0x60,0x13,0x5E,0x00,0x01,0xC9,0x4A,0xFA,0x3F,0xA4,0xEA,0x07,0x03,0x21,0x02,0x8E, -0x82,0xCA,0x03,0xC2,0x9B,0x8F,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x9C,0x30,0x81, -0x99,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, -0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01, -0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9B,0xE2,0x07, -0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06,0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86, -0x2E,0x30,0x36,0x06,0x03,0x55,0x1D,0x1F,0x04,0x2F,0x30,0x2D,0x30,0x2B,0xA0,0x29, -0xA0,0x27,0x86,0x25,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x67, -0x6C,0x6F,0x62,0x61,0x6C,0x73,0x69,0x67,0x6E,0x2E,0x6E,0x65,0x74,0x2F,0x72,0x6F, -0x6F,0x74,0x2D,0x72,0x32,0x2E,0x63,0x72,0x6C,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23, -0x04,0x18,0x30,0x16,0x80,0x14,0x9B,0xE2,0x07,0x57,0x67,0x1C,0x1E,0xC0,0x6A,0x06, -0xDE,0x59,0xB4,0x9A,0x2D,0xDF,0xDC,0x19,0x86,0x2E,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0x81, -0x53,0x87,0x1C,0x68,0x97,0x86,0x91,0xEC,0xE0,0x4A,0xB8,0x44,0x0B,0xAB,0x81,0xAC, -0x27,0x4F,0xD6,0xC1,0xB8,0x1C,0x43,0x78,0xB3,0x0C,0x9A,0xFC,0xEA,0x2C,0x3C,0x6E, -0x61,0x1B,0x4D,0x4B,0x29,0xF5,0x9F,0x05,0x1D,0x26,0xC1,0xB8,0xE9,0x83,0x00,0x62, -0x45,0xB6,0xA9,0x08,0x93,0xB9,0xA9,0x33,0x4B,0x18,0x9A,0xC2,0xF8,0x87,0x88,0x4E, -0xDB,0xDD,0x71,0x34,0x1A,0xC1,0x54,0xDA,0x46,0x3F,0xE0,0xD3,0x2A,0xAB,0x6D,0x54, -0x22,0xF5,0x3A,0x62,0xCD,0x20,0x6F,0xBA,0x29,0x89,0xD7,0xDD,0x91,0xEE,0xD3,0x5C, -0xA2,0x3E,0xA1,0x5B,0x41,0xF5,0xDF,0xE5,0x64,0x43,0x2D,0xE9,0xD5,0x39,0xAB,0xD2, -0xA2,0xDF,0xB7,0x8B,0xD0,0xC0,0x80,0x19,0x1C,0x45,0xC0,0x2D,0x8C,0xE8,0xF8,0x2D, -0xA4,0x74,0x56,0x49,0xC5,0x05,0xB5,0x4F,0x15,0xDE,0x6E,0x44,0x78,0x39,0x87,0xA8, -0x7E,0xBB,0xF3,0x79,0x18,0x91,0xBB,0xF4,0x6F,0x9D,0xC1,0xF0,0x8C,0x35,0x8C,0x5D, -0x01,0xFB,0xC3,0x6D,0xB9,0xEF,0x44,0x6D,0x79,0x46,0x31,0x7E,0x0A,0xFE,0xA9,0x82, -0xC1,0xFF,0xEF,0xAB,0x6E,0x20,0xC4,0x50,0xC9,0x5F,0x9D,0x4D,0x9B,0x17,0x8C,0x0C, -0xE5,0x01,0xC9,0xA0,0x41,0x6A,0x73,0x53,0xFA,0xA5,0x50,0xB4,0x6E,0x25,0x0F,0xFB, -0x4C,0x18,0xF4,0xFD,0x52,0xD9,0x8E,0x69,0xB1,0xE8,0x11,0x0F,0xDE,0x88,0xD8,0xFB, -0x1D,0x49,0xF7,0xAA,0xDE,0x95,0xCF,0x20,0x78,0xC2,0x60,0x12,0xDB,0x25,0x40,0x8C, -0x6A,0xFC,0x7E,0x42,0x38,0x40,0x64,0x12,0xF7,0x9E,0x81,0xE1,0x93,0x2E, -}; - - -/* subject:/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */ -/* issuer :/OU=GlobalSign Root CA - R3/O=GlobalSign/CN=GlobalSign */ - - -const unsigned char GlobalSign_Root_CA___R3_certificate[867]={ -0x30,0x82,0x03,0x5F,0x30,0x82,0x02,0x47,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, -0x00,0x00,0x00,0x00,0x01,0x21,0x58,0x53,0x08,0xA2,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06, -0x03,0x55,0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, -0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30, -0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69, -0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F, -0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x33,0x31, -0x38,0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x31,0x38, -0x31,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x4C,0x31,0x20,0x30,0x1E,0x06,0x03,0x55, -0x04,0x0B,0x13,0x17,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x20,0x52, -0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x52,0x33,0x31,0x13,0x30,0x11,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, -0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61, -0x6C,0x53,0x69,0x67,0x6E,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48, -0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01, -0x0A,0x02,0x82,0x01,0x01,0x00,0xCC,0x25,0x76,0x90,0x79,0x06,0x78,0x22,0x16,0xF5, -0xC0,0x83,0xB6,0x84,0xCA,0x28,0x9E,0xFD,0x05,0x76,0x11,0xC5,0xAD,0x88,0x72,0xFC, -0x46,0x02,0x43,0xC7,0xB2,0x8A,0x9D,0x04,0x5F,0x24,0xCB,0x2E,0x4B,0xE1,0x60,0x82, -0x46,0xE1,0x52,0xAB,0x0C,0x81,0x47,0x70,0x6C,0xDD,0x64,0xD1,0xEB,0xF5,0x2C,0xA3, -0x0F,0x82,0x3D,0x0C,0x2B,0xAE,0x97,0xD7,0xB6,0x14,0x86,0x10,0x79,0xBB,0x3B,0x13, -0x80,0x77,0x8C,0x08,0xE1,0x49,0xD2,0x6A,0x62,0x2F,0x1F,0x5E,0xFA,0x96,0x68,0xDF, -0x89,0x27,0x95,0x38,0x9F,0x06,0xD7,0x3E,0xC9,0xCB,0x26,0x59,0x0D,0x73,0xDE,0xB0, -0xC8,0xE9,0x26,0x0E,0x83,0x15,0xC6,0xEF,0x5B,0x8B,0xD2,0x04,0x60,0xCA,0x49,0xA6, -0x28,0xF6,0x69,0x3B,0xF6,0xCB,0xC8,0x28,0x91,0xE5,0x9D,0x8A,0x61,0x57,0x37,0xAC, -0x74,0x14,0xDC,0x74,0xE0,0x3A,0xEE,0x72,0x2F,0x2E,0x9C,0xFB,0xD0,0xBB,0xBF,0xF5, -0x3D,0x00,0xE1,0x06,0x33,0xE8,0x82,0x2B,0xAE,0x53,0xA6,0x3A,0x16,0x73,0x8C,0xDD, -0x41,0x0E,0x20,0x3A,0xC0,0xB4,0xA7,0xA1,0xE9,0xB2,0x4F,0x90,0x2E,0x32,0x60,0xE9, -0x57,0xCB,0xB9,0x04,0x92,0x68,0x68,0xE5,0x38,0x26,0x60,0x75,0xB2,0x9F,0x77,0xFF, -0x91,0x14,0xEF,0xAE,0x20,0x49,0xFC,0xAD,0x40,0x15,0x48,0xD1,0x02,0x31,0x61,0x19, -0x5E,0xB8,0x97,0xEF,0xAD,0x77,0xB7,0x64,0x9A,0x7A,0xBF,0x5F,0xC1,0x13,0xEF,0x9B, -0x62,0xFB,0x0D,0x6C,0xE0,0x54,0x69,0x16,0xA9,0x03,0xDA,0x6E,0xE9,0x83,0x93,0x71, -0x76,0xC6,0x69,0x85,0x82,0x17,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30, -0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30, -0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, -0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x8F,0xF0,0x4B,0x7F,0xA8, -0x2E,0x45,0x24,0xAE,0x4D,0x50,0xFA,0x63,0x9A,0x8B,0xDE,0xE2,0xDD,0x1B,0xBC,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82, -0x01,0x01,0x00,0x4B,0x40,0xDB,0xC0,0x50,0xAA,0xFE,0xC8,0x0C,0xEF,0xF7,0x96,0x54, -0x45,0x49,0xBB,0x96,0x00,0x09,0x41,0xAC,0xB3,0x13,0x86,0x86,0x28,0x07,0x33,0xCA, -0x6B,0xE6,0x74,0xB9,0xBA,0x00,0x2D,0xAE,0xA4,0x0A,0xD3,0xF5,0xF1,0xF1,0x0F,0x8A, -0xBF,0x73,0x67,0x4A,0x83,0xC7,0x44,0x7B,0x78,0xE0,0xAF,0x6E,0x6C,0x6F,0x03,0x29, -0x8E,0x33,0x39,0x45,0xC3,0x8E,0xE4,0xB9,0x57,0x6C,0xAA,0xFC,0x12,0x96,0xEC,0x53, -0xC6,0x2D,0xE4,0x24,0x6C,0xB9,0x94,0x63,0xFB,0xDC,0x53,0x68,0x67,0x56,0x3E,0x83, -0xB8,0xCF,0x35,0x21,0xC3,0xC9,0x68,0xFE,0xCE,0xDA,0xC2,0x53,0xAA,0xCC,0x90,0x8A, -0xE9,0xF0,0x5D,0x46,0x8C,0x95,0xDD,0x7A,0x58,0x28,0x1A,0x2F,0x1D,0xDE,0xCD,0x00, -0x37,0x41,0x8F,0xED,0x44,0x6D,0xD7,0x53,0x28,0x97,0x7E,0xF3,0x67,0x04,0x1E,0x15, -0xD7,0x8A,0x96,0xB4,0xD3,0xDE,0x4C,0x27,0xA4,0x4C,0x1B,0x73,0x73,0x76,0xF4,0x17, -0x99,0xC2,0x1F,0x7A,0x0E,0xE3,0x2D,0x08,0xAD,0x0A,0x1C,0x2C,0xFF,0x3C,0xAB,0x55, -0x0E,0x0F,0x91,0x7E,0x36,0xEB,0xC3,0x57,0x49,0xBE,0xE1,0x2E,0x2D,0x7C,0x60,0x8B, -0xC3,0x41,0x51,0x13,0x23,0x9D,0xCE,0xF7,0x32,0x6B,0x94,0x01,0xA8,0x99,0xE7,0x2C, -0x33,0x1F,0x3A,0x3B,0x25,0xD2,0x86,0x40,0xCE,0x3B,0x2C,0x86,0x78,0xC9,0x61,0x2F, -0x14,0xBA,0xEE,0xDB,0x55,0x6F,0xDF,0x84,0xEE,0x05,0x09,0x4D,0xBD,0x28,0xD8,0x72, -0xCE,0xD3,0x62,0x50,0x65,0x1E,0xEB,0x92,0x97,0x83,0x31,0xD9,0xB3,0xB5,0xCA,0x47, -0x58,0x3F,0x5F, -}; - - -/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Networking */ -/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Networking */ - - -const unsigned char AffirmTrust_Networking_certificate[848]={ -0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x7C, -0x4F,0x04,0x39,0x1C,0xD4,0x99,0x2D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B, -0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06, -0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, -0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x69,0x6E,0x67,0x30,0x1E,0x17,0x0D, -0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x38,0x32,0x34,0x5A,0x17,0x0D,0x33, -0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x38,0x32,0x34,0x5A,0x30,0x44,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06, -0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, -0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69, -0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x69, -0x6E,0x67,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82, -0x01,0x01,0x00,0xB4,0x84,0xCC,0x33,0x17,0x2E,0x6B,0x94,0x6C,0x6B,0x61,0x52,0xA0, -0xEB,0xA3,0xCF,0x79,0x94,0x4C,0xE5,0x94,0x80,0x99,0xCB,0x55,0x64,0x44,0x65,0x8F, -0x67,0x64,0xE2,0x06,0xE3,0x5C,0x37,0x49,0xF6,0x2F,0x9B,0x84,0x84,0x1E,0x2D,0xF2, -0x60,0x9D,0x30,0x4E,0xCC,0x84,0x85,0xE2,0x2C,0xCF,0x1E,0x9E,0xFE,0x36,0xAB,0x33, -0x77,0x35,0x44,0xD8,0x35,0x96,0x1A,0x3D,0x36,0xE8,0x7A,0x0E,0xD8,0xD5,0x47,0xA1, -0x6A,0x69,0x8B,0xD9,0xFC,0xBB,0x3A,0xAE,0x79,0x5A,0xD5,0xF4,0xD6,0x71,0xBB,0x9A, -0x90,0x23,0x6B,0x9A,0xB7,0x88,0x74,0x87,0x0C,0x1E,0x5F,0xB9,0x9E,0x2D,0xFA,0xAB, -0x53,0x2B,0xDC,0xBB,0x76,0x3E,0x93,0x4C,0x08,0x08,0x8C,0x1E,0xA2,0x23,0x1C,0xD4, -0x6A,0xAD,0x22,0xBA,0x99,0x01,0x2E,0x6D,0x65,0xCB,0xBE,0x24,0x66,0x55,0x24,0x4B, -0x40,0x44,0xB1,0x1B,0xD7,0xE1,0xC2,0x85,0xC0,0xDE,0x10,0x3F,0x3D,0xED,0xB8,0xFC, -0xF1,0xF1,0x23,0x53,0xDC,0xBF,0x65,0x97,0x6F,0xD9,0xF9,0x40,0x71,0x8D,0x7D,0xBD, -0x95,0xD4,0xCE,0xBE,0xA0,0x5E,0x27,0x23,0xDE,0xFD,0xA6,0xD0,0x26,0x0E,0x00,0x29, -0xEB,0x3C,0x46,0xF0,0x3D,0x60,0xBF,0x3F,0x50,0xD2,0xDC,0x26,0x41,0x51,0x9E,0x14, -0x37,0x42,0x04,0xA3,0x70,0x57,0xA8,0x1B,0x87,0xED,0x2D,0xFA,0x7B,0xEE,0x8C,0x0A, -0xE3,0xA9,0x66,0x89,0x19,0xCB,0x41,0xF9,0xDD,0x44,0x36,0x61,0xCF,0xE2,0x77,0x46, -0xC8,0x7D,0xF6,0xF4,0x92,0x81,0x36,0xFD,0xDB,0x34,0xF1,0x72,0x7E,0xF3,0x0C,0x16, -0xBD,0xB4,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03, -0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x07,0x1F,0xD2,0xE7,0x9C,0xDA,0xC2,0x6E,0xA2, -0x40,0xB4,0xB0,0x7A,0x50,0x10,0x50,0x74,0xC4,0xC8,0xBD,0x30,0x0F,0x06,0x03,0x55, -0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03, -0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09, -0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00, -0x89,0x57,0xB2,0x16,0x7A,0xA8,0xC2,0xFD,0xD6,0xD9,0x9B,0x9B,0x34,0xC2,0x9C,0xB4, -0x32,0x14,0x4D,0xA7,0xA4,0xDF,0xEC,0xBE,0xA7,0xBE,0xF8,0x43,0xDB,0x91,0x37,0xCE, -0xB4,0x32,0x2E,0x50,0x55,0x1A,0x35,0x4E,0x76,0x43,0x71,0x20,0xEF,0x93,0x77,0x4E, -0x15,0x70,0x2E,0x87,0xC3,0xC1,0x1D,0x6D,0xDC,0xCB,0xB5,0x27,0xD4,0x2C,0x56,0xD1, -0x52,0x53,0x3A,0x44,0xD2,0x73,0xC8,0xC4,0x1B,0x05,0x65,0x5A,0x62,0x92,0x9C,0xEE, -0x41,0x8D,0x31,0xDB,0xE7,0x34,0xEA,0x59,0x21,0xD5,0x01,0x7A,0xD7,0x64,0xB8,0x64, -0x39,0xCD,0xC9,0xED,0xAF,0xED,0x4B,0x03,0x48,0xA7,0xA0,0x99,0x01,0x80,0xDC,0x65, -0xA3,0x36,0xAE,0x65,0x59,0x48,0x4F,0x82,0x4B,0xC8,0x65,0xF1,0x57,0x1D,0xE5,0x59, -0x2E,0x0A,0x3F,0x6C,0xD8,0xD1,0xF5,0xE5,0x09,0xB4,0x6C,0x54,0x00,0x0A,0xE0,0x15, -0x4D,0x87,0x75,0x6D,0xB7,0x58,0x96,0x5A,0xDD,0x6D,0xD2,0x00,0xA0,0xF4,0x9B,0x48, -0xBE,0xC3,0x37,0xA4,0xBA,0x36,0xE0,0x7C,0x87,0x85,0x97,0x1A,0x15,0xA2,0xDE,0x2E, -0xA2,0x5B,0xBD,0xAF,0x18,0xF9,0x90,0x50,0xCD,0x70,0x59,0xF8,0x27,0x67,0x47,0xCB, -0xC7,0xA0,0x07,0x3A,0x7D,0xD1,0x2C,0x5D,0x6C,0x19,0x3A,0x66,0xB5,0x7D,0xFD,0x91, -0x6F,0x82,0xB1,0xBE,0x08,0x93,0xDB,0x14,0x47,0xF1,0xA2,0x37,0xC7,0x45,0x9E,0x3C, -0xC7,0x77,0xAF,0x64,0xA8,0x93,0xDF,0xF6,0x69,0x83,0x82,0x60,0xF2,0x49,0x42,0x34, -0xED,0x5A,0x00,0x54,0x85,0x1C,0x16,0x36,0x92,0x0C,0x5C,0xFA,0xA6,0xAD,0xBF,0xDB, -}; - - -/* subject:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */ -/* issuer :/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root */ - - -const unsigned char AddTrust_External_Root_certificate[1082]={ -0x30,0x82,0x04,0x36,0x30,0x82,0x03,0x1E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31,0x14, -0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75,0x73, -0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D,0x41, -0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C, -0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30,0x20, -0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20, -0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F,0x74, -0x30,0x1E,0x17,0x0D,0x30,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38, -0x5A,0x17,0x0D,0x32,0x30,0x30,0x35,0x33,0x30,0x31,0x30,0x34,0x38,0x33,0x38,0x5A, -0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53,0x45,0x31, -0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54,0x72,0x75, -0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B,0x13,0x1D, -0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61, -0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x22,0x30, -0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74, -0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52,0x6F,0x6F, -0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01, -0x01,0x00,0xB7,0xF7,0x1A,0x33,0xE6,0xF2,0x00,0x04,0x2D,0x39,0xE0,0x4E,0x5B,0xED, -0x1F,0xBC,0x6C,0x0F,0xCD,0xB5,0xFA,0x23,0xB6,0xCE,0xDE,0x9B,0x11,0x33,0x97,0xA4, -0x29,0x4C,0x7D,0x93,0x9F,0xBD,0x4A,0xBC,0x93,0xED,0x03,0x1A,0xE3,0x8F,0xCF,0xE5, -0x6D,0x50,0x5A,0xD6,0x97,0x29,0x94,0x5A,0x80,0xB0,0x49,0x7A,0xDB,0x2E,0x95,0xFD, -0xB8,0xCA,0xBF,0x37,0x38,0x2D,0x1E,0x3E,0x91,0x41,0xAD,0x70,0x56,0xC7,0xF0,0x4F, -0x3F,0xE8,0x32,0x9E,0x74,0xCA,0xC8,0x90,0x54,0xE9,0xC6,0x5F,0x0F,0x78,0x9D,0x9A, -0x40,0x3C,0x0E,0xAC,0x61,0xAA,0x5E,0x14,0x8F,0x9E,0x87,0xA1,0x6A,0x50,0xDC,0xD7, -0x9A,0x4E,0xAF,0x05,0xB3,0xA6,0x71,0x94,0x9C,0x71,0xB3,0x50,0x60,0x0A,0xC7,0x13, -0x9D,0x38,0x07,0x86,0x02,0xA8,0xE9,0xA8,0x69,0x26,0x18,0x90,0xAB,0x4C,0xB0,0x4F, -0x23,0xAB,0x3A,0x4F,0x84,0xD8,0xDF,0xCE,0x9F,0xE1,0x69,0x6F,0xBB,0xD7,0x42,0xD7, -0x6B,0x44,0xE4,0xC7,0xAD,0xEE,0x6D,0x41,0x5F,0x72,0x5A,0x71,0x08,0x37,0xB3,0x79, -0x65,0xA4,0x59,0xA0,0x94,0x37,0xF7,0x00,0x2F,0x0D,0xC2,0x92,0x72,0xDA,0xD0,0x38, -0x72,0xDB,0x14,0xA8,0x45,0xC4,0x5D,0x2A,0x7D,0xB7,0xB4,0xD6,0xC4,0xEE,0xAC,0xCD, -0x13,0x44,0xB7,0xC9,0x2B,0xDD,0x43,0x00,0x25,0xFA,0x61,0xB9,0x69,0x6A,0x58,0x23, -0x11,0xB7,0xA7,0x33,0x8F,0x56,0x75,0x59,0xF5,0xCD,0x29,0xD7,0x46,0xB7,0x0A,0x2B, -0x65,0xB6,0xD3,0x42,0x6F,0x15,0xB2,0xB8,0x7B,0xFB,0xEF,0xE9,0x5D,0x53,0xD5,0x34, -0x5A,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xDC,0x30,0x81,0xD9,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4,0x26,0xF7, -0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0x30,0x0B,0x06,0x03, -0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13, -0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x99,0x06,0x03,0x55, -0x1D,0x23,0x04,0x81,0x91,0x30,0x81,0x8E,0x80,0x14,0xAD,0xBD,0x98,0x7A,0x34,0xB4, -0x26,0xF7,0xFA,0xC4,0x26,0x54,0xEF,0x03,0xBD,0xE0,0x24,0xCB,0x54,0x1A,0xA1,0x73, -0xA4,0x71,0x30,0x6F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x53, -0x45,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x41,0x64,0x64,0x54, -0x72,0x75,0x73,0x74,0x20,0x41,0x42,0x31,0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x0B, -0x13,0x1D,0x41,0x64,0x64,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72, -0x6E,0x61,0x6C,0x20,0x54,0x54,0x50,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31, -0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x03,0x13,0x19,0x41,0x64,0x64,0x54,0x72,0x75, -0x73,0x74,0x20,0x45,0x78,0x74,0x65,0x72,0x6E,0x61,0x6C,0x20,0x43,0x41,0x20,0x52, -0x6F,0x6F,0x74,0x82,0x01,0x01,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xB0,0x9B,0xE0,0x85,0x25,0xC2, -0xD6,0x23,0xE2,0x0F,0x96,0x06,0x92,0x9D,0x41,0x98,0x9C,0xD9,0x84,0x79,0x81,0xD9, -0x1E,0x5B,0x14,0x07,0x23,0x36,0x65,0x8F,0xB0,0xD8,0x77,0xBB,0xAC,0x41,0x6C,0x47, -0x60,0x83,0x51,0xB0,0xF9,0x32,0x3D,0xE7,0xFC,0xF6,0x26,0x13,0xC7,0x80,0x16,0xA5, -0xBF,0x5A,0xFC,0x87,0xCF,0x78,0x79,0x89,0x21,0x9A,0xE2,0x4C,0x07,0x0A,0x86,0x35, -0xBC,0xF2,0xDE,0x51,0xC4,0xD2,0x96,0xB7,0xDC,0x7E,0x4E,0xEE,0x70,0xFD,0x1C,0x39, -0xEB,0x0C,0x02,0x51,0x14,0x2D,0x8E,0xBD,0x16,0xE0,0xC1,0xDF,0x46,0x75,0xE7,0x24, -0xAD,0xEC,0xF4,0x42,0xB4,0x85,0x93,0x70,0x10,0x67,0xBA,0x9D,0x06,0x35,0x4A,0x18, -0xD3,0x2B,0x7A,0xCC,0x51,0x42,0xA1,0x7A,0x63,0xD1,0xE6,0xBB,0xA1,0xC5,0x2B,0xC2, -0x36,0xBE,0x13,0x0D,0xE6,0xBD,0x63,0x7E,0x79,0x7B,0xA7,0x09,0x0D,0x40,0xAB,0x6A, -0xDD,0x8F,0x8A,0xC3,0xF6,0xF6,0x8C,0x1A,0x42,0x05,0x51,0xD4,0x45,0xF5,0x9F,0xA7, -0x62,0x21,0x68,0x15,0x20,0x43,0x3C,0x99,0xE7,0x7C,0xBD,0x24,0xD8,0xA9,0x91,0x17, -0x73,0x88,0x3F,0x56,0x1B,0x31,0x38,0x18,0xB4,0x71,0x0F,0x9A,0xCD,0xC8,0x0E,0x9E, -0x8E,0x2E,0x1B,0xE1,0x8C,0x98,0x83,0xCB,0x1F,0x31,0xF1,0x44,0x4C,0xC6,0x04,0x73, -0x49,0x76,0x60,0x0F,0xC7,0xF8,0xBD,0x17,0x80,0x6B,0x2E,0xE9,0xCC,0x4C,0x0E,0x5A, -0x9A,0x79,0x0F,0x20,0x0A,0x2E,0xD5,0x9E,0x63,0x26,0x1E,0x55,0x92,0x94,0xD8,0x82, -0x17,0x5A,0x7B,0xD0,0xBC,0xC7,0x8F,0x4E,0x86,0x04, -}; - - -/* subject:/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */ -/* issuer :/C=US/O=thawte, Inc./OU=Certification Services Division/OU=(c) 2008 thawte, Inc. - For authorized use only/CN=thawte Primary Root CA - G3 */ - - -const unsigned char thawte_Primary_Root_CA___G3_certificate[1070]={ -0x30,0x82,0x04,0x2A,0x30,0x82,0x03,0x12,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x60, -0x01,0x97,0xB7,0x46,0xA7,0xEA,0xB4,0xB4,0x9A,0xD6,0x4B,0x2F,0xF7,0x90,0xFB,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x81, -0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15, -0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65,0x2C, -0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13,0x1F, -0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53,0x65, -0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E,0x31, -0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32,0x30, -0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20, -0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64, -0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03,0x55, -0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D,0x61, -0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33,0x30, -0x1E,0x17,0x0D,0x30,0x38,0x30,0x34,0x30,0x32,0x30,0x30,0x30,0x30,0x30,0x30,0x5A, -0x17,0x0D,0x33,0x37,0x31,0x32,0x30,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30, -0x81,0xAE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31, -0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x74,0x68,0x61,0x77,0x74,0x65, -0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03,0x55,0x04,0x0B,0x13, -0x1F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x53, -0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x20,0x44,0x69,0x76,0x69,0x73,0x69,0x6F,0x6E, -0x31,0x38,0x30,0x36,0x06,0x03,0x55,0x04,0x0B,0x13,0x2F,0x28,0x63,0x29,0x20,0x32, -0x30,0x30,0x38,0x20,0x74,0x68,0x61,0x77,0x74,0x65,0x2C,0x20,0x49,0x6E,0x63,0x2E, -0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65, -0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x24,0x30,0x22,0x06,0x03, -0x55,0x04,0x03,0x13,0x1B,0x74,0x68,0x61,0x77,0x74,0x65,0x20,0x50,0x72,0x69,0x6D, -0x61,0x72,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20,0x2D,0x20,0x47,0x33, -0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, -0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01, -0x00,0xB2,0xBF,0x27,0x2C,0xFB,0xDB,0xD8,0x5B,0xDD,0x78,0x7B,0x1B,0x9E,0x77,0x66, -0x81,0xCB,0x3E,0xBC,0x7C,0xAE,0xF3,0xA6,0x27,0x9A,0x34,0xA3,0x68,0x31,0x71,0x38, -0x33,0x62,0xE4,0xF3,0x71,0x66,0x79,0xB1,0xA9,0x65,0xA3,0xA5,0x8B,0xD5,0x8F,0x60, -0x2D,0x3F,0x42,0xCC,0xAA,0x6B,0x32,0xC0,0x23,0xCB,0x2C,0x41,0xDD,0xE4,0xDF,0xFC, -0x61,0x9C,0xE2,0x73,0xB2,0x22,0x95,0x11,0x43,0x18,0x5F,0xC4,0xB6,0x1F,0x57,0x6C, -0x0A,0x05,0x58,0x22,0xC8,0x36,0x4C,0x3A,0x7C,0xA5,0xD1,0xCF,0x86,0xAF,0x88,0xA7, -0x44,0x02,0x13,0x74,0x71,0x73,0x0A,0x42,0x59,0x02,0xF8,0x1B,0x14,0x6B,0x42,0xDF, -0x6F,0x5F,0xBA,0x6B,0x82,0xA2,0x9D,0x5B,0xE7,0x4A,0xBD,0x1E,0x01,0x72,0xDB,0x4B, -0x74,0xE8,0x3B,0x7F,0x7F,0x7D,0x1F,0x04,0xB4,0x26,0x9B,0xE0,0xB4,0x5A,0xAC,0x47, -0x3D,0x55,0xB8,0xD7,0xB0,0x26,0x52,0x28,0x01,0x31,0x40,0x66,0xD8,0xD9,0x24,0xBD, -0xF6,0x2A,0xD8,0xEC,0x21,0x49,0x5C,0x9B,0xF6,0x7A,0xE9,0x7F,0x55,0x35,0x7E,0x96, -0x6B,0x8D,0x93,0x93,0x27,0xCB,0x92,0xBB,0xEA,0xAC,0x40,0xC0,0x9F,0xC2,0xF8,0x80, -0xCF,0x5D,0xF4,0x5A,0xDC,0xCE,0x74,0x86,0xA6,0x3E,0x6C,0x0B,0x53,0xCA,0xBD,0x92, -0xCE,0x19,0x06,0x72,0xE6,0x0C,0x5C,0x38,0x69,0xC7,0x04,0xD6,0xBC,0x6C,0xCE,0x5B, -0xF6,0xF7,0x68,0x9C,0xDC,0x25,0x15,0x48,0x88,0xA1,0xE9,0xA9,0xF8,0x98,0x9C,0xE0, -0xF3,0xD5,0x31,0x28,0x61,0x11,0x6C,0x67,0x96,0x8D,0x39,0x99,0xCB,0xC2,0x45,0x24, -0x39,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D, -0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55, -0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55, -0x1D,0x0E,0x04,0x16,0x04,0x14,0xAD,0x6C,0xAA,0x94,0x60,0x9C,0xED,0xE4,0xFF,0xFA, -0x3E,0x0A,0x74,0x2B,0x63,0x03,0xF7,0xB6,0x59,0xBF,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1A,0x40, -0xD8,0x95,0x65,0xAC,0x09,0x92,0x89,0xC6,0x39,0xF4,0x10,0xE5,0xA9,0x0E,0x66,0x53, -0x5D,0x78,0xDE,0xFA,0x24,0x91,0xBB,0xE7,0x44,0x51,0xDF,0xC6,0x16,0x34,0x0A,0xEF, -0x6A,0x44,0x51,0xEA,0x2B,0x07,0x8A,0x03,0x7A,0xC3,0xEB,0x3F,0x0A,0x2C,0x52,0x16, -0xA0,0x2B,0x43,0xB9,0x25,0x90,0x3F,0x70,0xA9,0x33,0x25,0x6D,0x45,0x1A,0x28,0x3B, -0x27,0xCF,0xAA,0xC3,0x29,0x42,0x1B,0xDF,0x3B,0x4C,0xC0,0x33,0x34,0x5B,0x41,0x88, -0xBF,0x6B,0x2B,0x65,0xAF,0x28,0xEF,0xB2,0xF5,0xC3,0xAA,0x66,0xCE,0x7B,0x56,0xEE, -0xB7,0xC8,0xCB,0x67,0xC1,0xC9,0x9C,0x1A,0x18,0xB8,0xC4,0xC3,0x49,0x03,0xF1,0x60, -0x0E,0x50,0xCD,0x46,0xC5,0xF3,0x77,0x79,0xF7,0xB6,0x15,0xE0,0x38,0xDB,0xC7,0x2F, -0x28,0xA0,0x0C,0x3F,0x77,0x26,0x74,0xD9,0x25,0x12,0xDA,0x31,0xDA,0x1A,0x1E,0xDC, -0x29,0x41,0x91,0x22,0x3C,0x69,0xA7,0xBB,0x02,0xF2,0xB6,0x5C,0x27,0x03,0x89,0xF4, -0x06,0xEA,0x9B,0xE4,0x72,0x82,0xE3,0xA1,0x09,0xC1,0xE9,0x00,0x19,0xD3,0x3E,0xD4, -0x70,0x6B,0xBA,0x71,0xA6,0xAA,0x58,0xAE,0xF4,0xBB,0xE9,0x6C,0xB6,0xEF,0x87,0xCC, -0x9B,0xBB,0xFF,0x39,0xE6,0x56,0x61,0xD3,0x0A,0xA7,0xC4,0x5C,0x4C,0x60,0x7B,0x05, -0x77,0x26,0x7A,0xBF,0xD8,0x07,0x52,0x2C,0x62,0xF7,0x70,0x63,0xD9,0x39,0xBC,0x6F, -0x1C,0xC2,0x79,0xDC,0x76,0x29,0xAF,0xCE,0xC5,0x2C,0x64,0x04,0x5E,0x88,0x36,0x6E, -0x31,0xD4,0x40,0x1A,0x62,0x34,0x36,0x3F,0x35,0x01,0xAE,0xAC,0x63,0xA0, -}; - - -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root CA */ - - -const unsigned char DigiCert_Assured_ID_Root_CA_certificate[955]={ -0x30,0x82,0x03,0xB7,0x30,0x82,0x02,0x9F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0C, -0xE7,0xE0,0xE5,0x17,0xD8,0x46,0xFE,0x8F,0xE5,0x60,0xFC,0x1B,0xF0,0x30,0x39,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x65, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F, -0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30, -0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30, -0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, -0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44, -0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06, -0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65, -0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13, -0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65, -0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03, -0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAD,0x0E,0x15, -0xCE,0xE4,0x43,0x80,0x5C,0xB1,0x87,0xF3,0xB7,0x60,0xF9,0x71,0x12,0xA5,0xAE,0xDC, -0x26,0x94,0x88,0xAA,0xF4,0xCE,0xF5,0x20,0x39,0x28,0x58,0x60,0x0C,0xF8,0x80,0xDA, -0xA9,0x15,0x95,0x32,0x61,0x3C,0xB5,0xB1,0x28,0x84,0x8A,0x8A,0xDC,0x9F,0x0A,0x0C, -0x83,0x17,0x7A,0x8F,0x90,0xAC,0x8A,0xE7,0x79,0x53,0x5C,0x31,0x84,0x2A,0xF6,0x0F, -0x98,0x32,0x36,0x76,0xCC,0xDE,0xDD,0x3C,0xA8,0xA2,0xEF,0x6A,0xFB,0x21,0xF2,0x52, -0x61,0xDF,0x9F,0x20,0xD7,0x1F,0xE2,0xB1,0xD9,0xFE,0x18,0x64,0xD2,0x12,0x5B,0x5F, -0xF9,0x58,0x18,0x35,0xBC,0x47,0xCD,0xA1,0x36,0xF9,0x6B,0x7F,0xD4,0xB0,0x38,0x3E, -0xC1,0x1B,0xC3,0x8C,0x33,0xD9,0xD8,0x2F,0x18,0xFE,0x28,0x0F,0xB3,0xA7,0x83,0xD6, -0xC3,0x6E,0x44,0xC0,0x61,0x35,0x96,0x16,0xFE,0x59,0x9C,0x8B,0x76,0x6D,0xD7,0xF1, -0xA2,0x4B,0x0D,0x2B,0xFF,0x0B,0x72,0xDA,0x9E,0x60,0xD0,0x8E,0x90,0x35,0xC6,0x78, -0x55,0x87,0x20,0xA1,0xCF,0xE5,0x6D,0x0A,0xC8,0x49,0x7C,0x31,0x98,0x33,0x6C,0x22, -0xE9,0x87,0xD0,0x32,0x5A,0xA2,0xBA,0x13,0x82,0x11,0xED,0x39,0x17,0x9D,0x99,0x3A, -0x72,0xA1,0xE6,0xFA,0xA4,0xD9,0xD5,0x17,0x31,0x75,0xAE,0x85,0x7D,0x22,0xAE,0x3F, -0x01,0x46,0x86,0xF6,0x28,0x79,0xC8,0xB1,0xDA,0xE4,0x57,0x17,0xC4,0x7E,0x1C,0x0E, -0xB0,0xB4,0x92,0xA6,0x56,0xB3,0xBD,0xB2,0x97,0xED,0xAA,0xA7,0xF0,0xB7,0xC5,0xA8, -0x3F,0x95,0x16,0xD0,0xFF,0xA1,0x96,0xEB,0x08,0x5F,0x18,0x77,0x4F,0x02,0x03,0x01, -0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF, -0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, -0x04,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7,0xA7, -0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30, -0x16,0x80,0x14,0x45,0xEB,0xA2,0xAF,0xF4,0x92,0xCB,0x82,0x31,0x2D,0x51,0x8B,0xA7, -0xA7,0x21,0x9D,0xF3,0x6D,0xC8,0x0F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xA2,0x0E,0xBC,0xDF,0xE2, -0xED,0xF0,0xE3,0x72,0x73,0x7A,0x64,0x94,0xBF,0xF7,0x72,0x66,0xD8,0x32,0xE4,0x42, -0x75,0x62,0xAE,0x87,0xEB,0xF2,0xD5,0xD9,0xDE,0x56,0xB3,0x9F,0xCC,0xCE,0x14,0x28, -0xB9,0x0D,0x97,0x60,0x5C,0x12,0x4C,0x58,0xE4,0xD3,0x3D,0x83,0x49,0x45,0x58,0x97, -0x35,0x69,0x1A,0xA8,0x47,0xEA,0x56,0xC6,0x79,0xAB,0x12,0xD8,0x67,0x81,0x84,0xDF, -0x7F,0x09,0x3C,0x94,0xE6,0xB8,0x26,0x2C,0x20,0xBD,0x3D,0xB3,0x28,0x89,0xF7,0x5F, -0xFF,0x22,0xE2,0x97,0x84,0x1F,0xE9,0x65,0xEF,0x87,0xE0,0xDF,0xC1,0x67,0x49,0xB3, -0x5D,0xEB,0xB2,0x09,0x2A,0xEB,0x26,0xED,0x78,0xBE,0x7D,0x3F,0x2B,0xF3,0xB7,0x26, -0x35,0x6D,0x5F,0x89,0x01,0xB6,0x49,0x5B,0x9F,0x01,0x05,0x9B,0xAB,0x3D,0x25,0xC1, -0xCC,0xB6,0x7F,0xC2,0xF1,0x6F,0x86,0xC6,0xFA,0x64,0x68,0xEB,0x81,0x2D,0x94,0xEB, -0x42,0xB7,0xFA,0x8C,0x1E,0xDD,0x62,0xF1,0xBE,0x50,0x67,0xB7,0x6C,0xBD,0xF3,0xF1, -0x1F,0x6B,0x0C,0x36,0x07,0x16,0x7F,0x37,0x7C,0xA9,0x5B,0x6D,0x7A,0xF1,0x12,0x46, -0x60,0x83,0xD7,0x27,0x04,0xBE,0x4B,0xCE,0x97,0xBE,0xC3,0x67,0x2A,0x68,0x11,0xDF, -0x80,0xE7,0x0C,0x33,0x66,0xBF,0x13,0x0D,0x14,0x6E,0xF3,0x7F,0x1F,0x63,0x10,0x1E, -0xFA,0x8D,0x1B,0x25,0x6D,0x6C,0x8F,0xA5,0xB7,0x61,0x01,0xB1,0xD2,0xA3,0x26,0xA1, -0x10,0x71,0x9D,0xAD,0xE2,0xC3,0xF9,0xC3,0x99,0x51,0xB7,0x2B,0x07,0x08,0xCE,0x2E, -0xE6,0x50,0xB2,0xA7,0xFA,0x0A,0x45,0x2F,0xA2,0xF0,0xF2, -}; - - -/* subject:/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */ -/* issuer :/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */ - - -const unsigned char Go_Daddy_Class_2_CA_certificate[1028]={ -0x30,0x82,0x04,0x00,0x30,0x82,0x02,0xE8,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21, -0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20, -0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63, -0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44, -0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72, -0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F, -0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x36,0x32,0x39,0x31,0x37, -0x30,0x36,0x32,0x30,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,0x32,0x39,0x31,0x37,0x30, -0x36,0x32,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, -0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68, -0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70, -0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13, -0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20, -0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20, -0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D, -0x00,0x30,0x82,0x01,0x08,0x02,0x82,0x01,0x01,0x00,0xDE,0x9D,0xD7,0xEA,0x57,0x18, -0x49,0xA1,0x5B,0xEB,0xD7,0x5F,0x48,0x86,0xEA,0xBE,0xDD,0xFF,0xE4,0xEF,0x67,0x1C, -0xF4,0x65,0x68,0xB3,0x57,0x71,0xA0,0x5E,0x77,0xBB,0xED,0x9B,0x49,0xE9,0x70,0x80, -0x3D,0x56,0x18,0x63,0x08,0x6F,0xDA,0xF2,0xCC,0xD0,0x3F,0x7F,0x02,0x54,0x22,0x54, -0x10,0xD8,0xB2,0x81,0xD4,0xC0,0x75,0x3D,0x4B,0x7F,0xC7,0x77,0xC3,0x3E,0x78,0xAB, -0x1A,0x03,0xB5,0x20,0x6B,0x2F,0x6A,0x2B,0xB1,0xC5,0x88,0x7E,0xC4,0xBB,0x1E,0xB0, -0xC1,0xD8,0x45,0x27,0x6F,0xAA,0x37,0x58,0xF7,0x87,0x26,0xD7,0xD8,0x2D,0xF6,0xA9, -0x17,0xB7,0x1F,0x72,0x36,0x4E,0xA6,0x17,0x3F,0x65,0x98,0x92,0xDB,0x2A,0x6E,0x5D, -0xA2,0xFE,0x88,0xE0,0x0B,0xDE,0x7F,0xE5,0x8D,0x15,0xE1,0xEB,0xCB,0x3A,0xD5,0xE2, -0x12,0xA2,0x13,0x2D,0xD8,0x8E,0xAF,0x5F,0x12,0x3D,0xA0,0x08,0x05,0x08,0xB6,0x5C, -0xA5,0x65,0x38,0x04,0x45,0x99,0x1E,0xA3,0x60,0x60,0x74,0xC5,0x41,0xA5,0x72,0x62, -0x1B,0x62,0xC5,0x1F,0x6F,0x5F,0x1A,0x42,0xBE,0x02,0x51,0x65,0xA8,0xAE,0x23,0x18, -0x6A,0xFC,0x78,0x03,0xA9,0x4D,0x7F,0x80,0xC3,0xFA,0xAB,0x5A,0xFC,0xA1,0x40,0xA4, -0xCA,0x19,0x16,0xFE,0xB2,0xC8,0xEF,0x5E,0x73,0x0D,0xEE,0x77,0xBD,0x9A,0xF6,0x79, -0x98,0xBC,0xB1,0x07,0x67,0xA2,0x15,0x0D,0xDD,0xA0,0x58,0xC6,0x44,0x7B,0x0A,0x3E, -0x62,0x28,0x5F,0xBA,0x41,0x07,0x53,0x58,0xCF,0x11,0x7E,0x38,0x74,0xC5,0xF8,0xFF, -0xB5,0x69,0x90,0x8F,0x84,0x74,0xEA,0x97,0x1B,0xAF,0x02,0x01,0x03,0xA3,0x81,0xC0, -0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xD2,0xC4, -0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,0xFE,0xDD,0xA8,0x6A, -0xD4,0xE3,0x30,0x81,0x8D,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x85,0x30,0x81,0x82, -0x80,0x14,0xD2,0xC4,0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1, -0xFE,0xDD,0xA8,0x6A,0xD4,0xE3,0xA1,0x67,0xA4,0x65,0x30,0x63,0x31,0x0B,0x30,0x09, -0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55, -0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79, -0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F, -0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20, -0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82, -0x01,0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03, -0x82,0x01,0x01,0x00,0x32,0x4B,0xF3,0xB2,0xCA,0x3E,0x91,0xFC,0x12,0xC6,0xA1,0x07, -0x8C,0x8E,0x77,0xA0,0x33,0x06,0x14,0x5C,0x90,0x1E,0x18,0xF7,0x08,0xA6,0x3D,0x0A, -0x19,0xF9,0x87,0x80,0x11,0x6E,0x69,0xE4,0x96,0x17,0x30,0xFF,0x34,0x91,0x63,0x72, -0x38,0xEE,0xCC,0x1C,0x01,0xA3,0x1D,0x94,0x28,0xA4,0x31,0xF6,0x7A,0xC4,0x54,0xD7, -0xF6,0xE5,0x31,0x58,0x03,0xA2,0xCC,0xCE,0x62,0xDB,0x94,0x45,0x73,0xB5,0xBF,0x45, -0xC9,0x24,0xB5,0xD5,0x82,0x02,0xAD,0x23,0x79,0x69,0x8D,0xB8,0xB6,0x4D,0xCE,0xCF, -0x4C,0xCA,0x33,0x23,0xE8,0x1C,0x88,0xAA,0x9D,0x8B,0x41,0x6E,0x16,0xC9,0x20,0xE5, -0x89,0x9E,0xCD,0x3B,0xDA,0x70,0xF7,0x7E,0x99,0x26,0x20,0x14,0x54,0x25,0xAB,0x6E, -0x73,0x85,0xE6,0x9B,0x21,0x9D,0x0A,0x6C,0x82,0x0E,0xA8,0xF8,0xC2,0x0C,0xFA,0x10, -0x1E,0x6C,0x96,0xEF,0x87,0x0D,0xC4,0x0F,0x61,0x8B,0xAD,0xEE,0x83,0x2B,0x95,0xF8, -0x8E,0x92,0x84,0x72,0x39,0xEB,0x20,0xEA,0x83,0xED,0x83,0xCD,0x97,0x6E,0x08,0xBC, -0xEB,0x4E,0x26,0xB6,0x73,0x2B,0xE4,0xD3,0xF6,0x4C,0xFE,0x26,0x71,0xE2,0x61,0x11, -0x74,0x4A,0xFF,0x57,0x1A,0x87,0x0F,0x75,0x48,0x2E,0xCF,0x51,0x69,0x17,0xA0,0x02, -0x12,0x61,0x95,0xD5,0xD1,0x40,0xB2,0x10,0x4C,0xEE,0xC4,0xAC,0x10,0x43,0xA6,0xA5, -0x9E,0x0A,0xD5,0x95,0x62,0x9A,0x0D,0xCF,0x88,0x82,0xC5,0x32,0x0C,0xE4,0x2B,0x9F, -0x45,0xE6,0x0D,0x9F,0x28,0x9C,0xB1,0xB9,0x2A,0x5A,0x57,0xAD,0x37,0x0F,0xAF,0x1D, -0x7F,0xDB,0xBD,0x9F, -}; - - -/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */ -/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Primary Certification Authority */ - - -const unsigned char GeoTrust_Primary_Certification_Authority_certificate[896]={ -0x30,0x82,0x03,0x7C,0x30,0x82,0x02,0x64,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18, -0xAC,0xB5,0x6A,0xFD,0x69,0xB6,0x15,0x3A,0x63,0x6C,0xAF,0xDA,0xFA,0xC4,0xA1,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x58, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30, -0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74, -0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28, -0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79, -0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41, -0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31, -0x32,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36,0x30,0x37,0x31, -0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03, -0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A, -0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31, -0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x65,0x6F,0x54,0x72,0x75, -0x73,0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82, -0x01,0x01,0x00,0xBE,0xB8,0x15,0x7B,0xFF,0xD4,0x7C,0x7D,0x67,0xAD,0x83,0x64,0x7B, -0xC8,0x42,0x53,0x2D,0xDF,0xF6,0x84,0x08,0x20,0x61,0xD6,0x01,0x59,0x6A,0x9C,0x44, -0x11,0xAF,0xEF,0x76,0xFD,0x95,0x7E,0xCE,0x61,0x30,0xBB,0x7A,0x83,0x5F,0x02,0xBD, -0x01,0x66,0xCA,0xEE,0x15,0x8D,0x6F,0xA1,0x30,0x9C,0xBD,0xA1,0x85,0x9E,0x94,0x3A, -0xF3,0x56,0x88,0x00,0x31,0xCF,0xD8,0xEE,0x6A,0x96,0x02,0xD9,0xED,0x03,0x8C,0xFB, -0x75,0x6D,0xE7,0xEA,0xB8,0x55,0x16,0x05,0x16,0x9A,0xF4,0xE0,0x5E,0xB1,0x88,0xC0, -0x64,0x85,0x5C,0x15,0x4D,0x88,0xC7,0xB7,0xBA,0xE0,0x75,0xE9,0xAD,0x05,0x3D,0x9D, -0xC7,0x89,0x48,0xE0,0xBB,0x28,0xC8,0x03,0xE1,0x30,0x93,0x64,0x5E,0x52,0xC0,0x59, -0x70,0x22,0x35,0x57,0x88,0x8A,0xF1,0x95,0x0A,0x83,0xD7,0xBC,0x31,0x73,0x01,0x34, -0xED,0xEF,0x46,0x71,0xE0,0x6B,0x02,0xA8,0x35,0x72,0x6B,0x97,0x9B,0x66,0xE0,0xCB, -0x1C,0x79,0x5F,0xD8,0x1A,0x04,0x68,0x1E,0x47,0x02,0xE6,0x9D,0x60,0xE2,0x36,0x97, -0x01,0xDF,0xCE,0x35,0x92,0xDF,0xBE,0x67,0xC7,0x6D,0x77,0x59,0x3B,0x8F,0x9D,0xD6, -0x90,0x15,0x94,0xBC,0x42,0x34,0x10,0xC1,0x39,0xF9,0xB1,0x27,0x3E,0x7E,0xD6,0x8A, -0x75,0xC5,0xB2,0xAF,0x96,0xD3,0xA2,0xDE,0x9B,0xE4,0x98,0xBE,0x7D,0xE1,0xE9,0x81, -0xAD,0xB6,0x6F,0xFC,0xD7,0x0E,0xDA,0xE0,0x34,0xB0,0x0D,0x1A,0x77,0xE7,0xE3,0x08, -0x98,0xEF,0x58,0xFA,0x9C,0x84,0xB7,0x36,0xAF,0xC2,0xDF,0xAC,0xD2,0xF4,0x10,0x06, -0x70,0x71,0x35,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03, -0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06, -0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x2C,0xD5,0x50,0x41,0x97,0x15,0x8B,0xF0, -0x8F,0x36,0x61,0x5B,0x4A,0xFB,0x6B,0xD9,0x99,0xC9,0x33,0x92,0x30,0x0D,0x06,0x09, -0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00, -0x5A,0x70,0x7F,0x2C,0xDD,0xB7,0x34,0x4F,0xF5,0x86,0x51,0xA9,0x26,0xBE,0x4B,0xB8, -0xAA,0xF1,0x71,0x0D,0xDC,0x61,0xC7,0xA0,0xEA,0x34,0x1E,0x7A,0x77,0x0F,0x04,0x35, -0xE8,0x27,0x8F,0x6C,0x90,0xBF,0x91,0x16,0x24,0x46,0x3E,0x4A,0x4E,0xCE,0x2B,0x16, -0xD5,0x0B,0x52,0x1D,0xFC,0x1F,0x67,0xA2,0x02,0x45,0x31,0x4F,0xCE,0xF3,0xFA,0x03, -0xA7,0x79,0x9D,0x53,0x6A,0xD9,0xDA,0x63,0x3A,0xF8,0x80,0xD7,0xD3,0x99,0xE1,0xA5, -0xE1,0xBE,0xD4,0x55,0x71,0x98,0x35,0x3A,0xBE,0x93,0xEA,0xAE,0xAD,0x42,0xB2,0x90, -0x6F,0xE0,0xFC,0x21,0x4D,0x35,0x63,0x33,0x89,0x49,0xD6,0x9B,0x4E,0xCA,0xC7,0xE7, -0x4E,0x09,0x00,0xF7,0xDA,0xC7,0xEF,0x99,0x62,0x99,0x77,0xB6,0x95,0x22,0x5E,0x8A, -0xA0,0xAB,0xF4,0xB8,0x78,0x98,0xCA,0x38,0x19,0x99,0xC9,0x72,0x9E,0x78,0xCD,0x4B, -0xAC,0xAF,0x19,0xA0,0x73,0x12,0x2D,0xFC,0xC2,0x41,0xBA,0x81,0x91,0xDA,0x16,0x5A, -0x31,0xB7,0xF9,0xB4,0x71,0x80,0x12,0x48,0x99,0x72,0x73,0x5A,0x59,0x53,0xC1,0x63, -0x52,0x33,0xED,0xA7,0xC9,0xD2,0x39,0x02,0x70,0xFA,0xE0,0xB1,0x42,0x66,0x29,0xAA, -0x9B,0x51,0xED,0x30,0x54,0x22,0x14,0x5F,0xD9,0xAB,0x1D,0xC1,0xE4,0x94,0xF0,0xF8, -0xF5,0x2B,0xF7,0xEA,0xCA,0x78,0x46,0xD6,0xB8,0x91,0xFD,0xA6,0x0D,0x2B,0x1A,0x14, -0x01,0x3E,0x80,0xF0,0x42,0xA0,0x95,0x07,0x5E,0x6D,0xCD,0xCC,0x4B,0xA4,0x45,0x8D, -0xAB,0x12,0xE8,0xB3,0xDE,0x5A,0xE5,0xA0,0x7C,0xE8,0x0F,0x22,0x1D,0x5A,0xE9,0x59, -}; - - -/* subject:/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */ -/* issuer :/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 */ - - -const unsigned char VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate[1239]={ -0x30,0x82,0x04,0xD3,0x30,0x82,0x03,0xBB,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x18, -0xDA,0xD1,0x9E,0x26,0x7D,0xE8,0xBB,0x4A,0x21,0x58,0xCD,0xCC,0x6B,0x3B,0x4A,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81, -0xCA,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17, -0x30,0x15,0x06,0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67, -0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B, -0x13,0x16,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74, -0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04, -0x0B,0x13,0x31,0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69, -0x53,0x69,0x67,0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72, -0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20, -0x6F,0x6E,0x6C,0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56, -0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20, -0x50,0x75,0x62,0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43, -0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, -0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x1E,0x17,0x0D,0x30, -0x36,0x31,0x31,0x30,0x38,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x36, -0x30,0x37,0x31,0x36,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0xCA,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x17,0x30,0x15,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0E,0x56,0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x2C,0x20, -0x49,0x6E,0x63,0x2E,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x0B,0x13,0x16,0x56, -0x65,0x72,0x69,0x53,0x69,0x67,0x6E,0x20,0x54,0x72,0x75,0x73,0x74,0x20,0x4E,0x65, -0x74,0x77,0x6F,0x72,0x6B,0x31,0x3A,0x30,0x38,0x06,0x03,0x55,0x04,0x0B,0x13,0x31, -0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x36,0x20,0x56,0x65,0x72,0x69,0x53,0x69,0x67, -0x6E,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75, -0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C, -0x79,0x31,0x45,0x30,0x43,0x06,0x03,0x55,0x04,0x03,0x13,0x3C,0x56,0x65,0x72,0x69, -0x53,0x69,0x67,0x6E,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x33,0x20,0x50,0x75,0x62, -0x6C,0x69,0x63,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72, -0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x35,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09, -0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00, -0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAF,0x24,0x08,0x08,0x29,0x7A,0x35, -0x9E,0x60,0x0C,0xAA,0xE7,0x4B,0x3B,0x4E,0xDC,0x7C,0xBC,0x3C,0x45,0x1C,0xBB,0x2B, -0xE0,0xFE,0x29,0x02,0xF9,0x57,0x08,0xA3,0x64,0x85,0x15,0x27,0xF5,0xF1,0xAD,0xC8, -0x31,0x89,0x5D,0x22,0xE8,0x2A,0xAA,0xA6,0x42,0xB3,0x8F,0xF8,0xB9,0x55,0xB7,0xB1, -0xB7,0x4B,0xB3,0xFE,0x8F,0x7E,0x07,0x57,0xEC,0xEF,0x43,0xDB,0x66,0x62,0x15,0x61, -0xCF,0x60,0x0D,0xA4,0xD8,0xDE,0xF8,0xE0,0xC3,0x62,0x08,0x3D,0x54,0x13,0xEB,0x49, -0xCA,0x59,0x54,0x85,0x26,0xE5,0x2B,0x8F,0x1B,0x9F,0xEB,0xF5,0xA1,0x91,0xC2,0x33, -0x49,0xD8,0x43,0x63,0x6A,0x52,0x4B,0xD2,0x8F,0xE8,0x70,0x51,0x4D,0xD1,0x89,0x69, -0x7B,0xC7,0x70,0xF6,0xB3,0xDC,0x12,0x74,0xDB,0x7B,0x5D,0x4B,0x56,0xD3,0x96,0xBF, -0x15,0x77,0xA1,0xB0,0xF4,0xA2,0x25,0xF2,0xAF,0x1C,0x92,0x67,0x18,0xE5,0xF4,0x06, -0x04,0xEF,0x90,0xB9,0xE4,0x00,0xE4,0xDD,0x3A,0xB5,0x19,0xFF,0x02,0xBA,0xF4,0x3C, -0xEE,0xE0,0x8B,0xEB,0x37,0x8B,0xEC,0xF4,0xD7,0xAC,0xF2,0xF6,0xF0,0x3D,0xAF,0xDD, -0x75,0x91,0x33,0x19,0x1D,0x1C,0x40,0xCB,0x74,0x24,0x19,0x21,0x93,0xD9,0x14,0xFE, -0xAC,0x2A,0x52,0xC7,0x8F,0xD5,0x04,0x49,0xE4,0x8D,0x63,0x47,0x88,0x3C,0x69,0x83, -0xCB,0xFE,0x47,0xBD,0x2B,0x7E,0x4F,0xC5,0x95,0xAE,0x0E,0x9D,0xD4,0xD1,0x43,0xC0, -0x67,0x73,0xE3,0x14,0x08,0x7E,0xE5,0x3F,0x9F,0x73,0xB8,0x33,0x0A,0xCF,0x5D,0x3F, -0x34,0x87,0x96,0x8A,0xEE,0x53,0xE8,0x25,0x15,0x02,0x03,0x01,0x00,0x01,0xA3,0x81, -0xB2,0x30,0x81,0xAF,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05, -0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04, -0x04,0x03,0x02,0x01,0x06,0x30,0x6D,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x01, -0x0C,0x04,0x61,0x30,0x5F,0xA1,0x5D,0xA0,0x5B,0x30,0x59,0x30,0x57,0x30,0x55,0x16, -0x09,0x69,0x6D,0x61,0x67,0x65,0x2F,0x67,0x69,0x66,0x30,0x21,0x30,0x1F,0x30,0x07, -0x06,0x05,0x2B,0x0E,0x03,0x02,0x1A,0x04,0x14,0x8F,0xE5,0xD3,0x1A,0x86,0xAC,0x8D, -0x8E,0x6B,0xC3,0xCF,0x80,0x6A,0xD4,0x48,0x18,0x2C,0x7B,0x19,0x2E,0x30,0x25,0x16, -0x23,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x6C,0x6F,0x67,0x6F,0x2E,0x76,0x65,0x72, -0x69,0x73,0x69,0x67,0x6E,0x2E,0x63,0x6F,0x6D,0x2F,0x76,0x73,0x6C,0x6F,0x67,0x6F, -0x2E,0x67,0x69,0x66,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x7F, -0xD3,0x65,0xA7,0xC2,0xDD,0xEC,0xBB,0xF0,0x30,0x09,0xF3,0x43,0x39,0xFA,0x02,0xAF, -0x33,0x31,0x33,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, -0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x93,0x24,0x4A,0x30,0x5F,0x62,0xCF,0xD8,0x1A, -0x98,0x2F,0x3D,0xEA,0xDC,0x99,0x2D,0xBD,0x77,0xF6,0xA5,0x79,0x22,0x38,0xEC,0xC4, -0xA7,0xA0,0x78,0x12,0xAD,0x62,0x0E,0x45,0x70,0x64,0xC5,0xE7,0x97,0x66,0x2D,0x98, -0x09,0x7E,0x5F,0xAF,0xD6,0xCC,0x28,0x65,0xF2,0x01,0xAA,0x08,0x1A,0x47,0xDE,0xF9, -0xF9,0x7C,0x92,0x5A,0x08,0x69,0x20,0x0D,0xD9,0x3E,0x6D,0x6E,0x3C,0x0D,0x6E,0xD8, -0xE6,0x06,0x91,0x40,0x18,0xB9,0xF8,0xC1,0xED,0xDF,0xDB,0x41,0xAA,0xE0,0x96,0x20, -0xC9,0xCD,0x64,0x15,0x38,0x81,0xC9,0x94,0xEE,0xA2,0x84,0x29,0x0B,0x13,0x6F,0x8E, -0xDB,0x0C,0xDD,0x25,0x02,0xDB,0xA4,0x8B,0x19,0x44,0xD2,0x41,0x7A,0x05,0x69,0x4A, -0x58,0x4F,0x60,0xCA,0x7E,0x82,0x6A,0x0B,0x02,0xAA,0x25,0x17,0x39,0xB5,0xDB,0x7F, -0xE7,0x84,0x65,0x2A,0x95,0x8A,0xBD,0x86,0xDE,0x5E,0x81,0x16,0x83,0x2D,0x10,0xCC, -0xDE,0xFD,0xA8,0x82,0x2A,0x6D,0x28,0x1F,0x0D,0x0B,0xC4,0xE5,0xE7,0x1A,0x26,0x19, -0xE1,0xF4,0x11,0x6F,0x10,0xB5,0x95,0xFC,0xE7,0x42,0x05,0x32,0xDB,0xCE,0x9D,0x51, -0x5E,0x28,0xB6,0x9E,0x85,0xD3,0x5B,0xEF,0xA5,0x7D,0x45,0x40,0x72,0x8E,0xB7,0x0E, -0x6B,0x0E,0x06,0xFB,0x33,0x35,0x48,0x71,0xB8,0x9D,0x27,0x8B,0xC4,0x65,0x5F,0x0D, -0x86,0x76,0x9C,0x44,0x7A,0xF6,0x95,0x5C,0xF6,0x5D,0x32,0x08,0x33,0xA4,0x54,0xB6, -0x18,0x3F,0x68,0x5C,0xF2,0x42,0x4A,0x85,0x38,0x54,0x83,0x5F,0xD1,0xE8,0x2C,0xF2, -0xAC,0x11,0xD6,0xA8,0xED,0x63,0x6A, -}; - - -/* subject:/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */ -/* issuer :/C=US/O=Equifax/OU=Equifax Secure Certificate Authority */ - - -const unsigned char Equifax_Secure_CA_certificate[804]={ -0x30,0x82,0x03,0x20,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x35, -0xDE,0xF4,0xCF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, -0x05,0x00,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55, -0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69, -0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71, -0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72, -0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x30,0x1E,0x17,0x0D,0x39,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31, -0x35,0x31,0x5A,0x17,0x0D,0x31,0x38,0x30,0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35, -0x31,0x5A,0x30,0x4E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55, -0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69, -0x66,0x61,0x78,0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71, -0x75,0x69,0x66,0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72, -0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x30,0x81,0x9F,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x01,0x05,0x00,0x03,0x81,0x8D,0x00,0x30,0x81,0x89,0x02,0x81,0x81,0x00,0xC1, -0x5D,0xB1,0x58,0x67,0x08,0x62,0xEE,0xA0,0x9A,0x2D,0x1F,0x08,0x6D,0x91,0x14,0x68, -0x98,0x0A,0x1E,0xFE,0xDA,0x04,0x6F,0x13,0x84,0x62,0x21,0xC3,0xD1,0x7C,0xCE,0x9F, -0x05,0xE0,0xB8,0x01,0xF0,0x4E,0x34,0xEC,0xE2,0x8A,0x95,0x04,0x64,0xAC,0xF1,0x6B, -0x53,0x5F,0x05,0xB3,0xCB,0x67,0x80,0xBF,0x42,0x02,0x8E,0xFE,0xDD,0x01,0x09,0xEC, -0xE1,0x00,0x14,0x4F,0xFC,0xFB,0xF0,0x0C,0xDD,0x43,0xBA,0x5B,0x2B,0xE1,0x1F,0x80, -0x70,0x99,0x15,0x57,0x93,0x16,0xF1,0x0F,0x97,0x6A,0xB7,0xC2,0x68,0x23,0x1C,0xCC, -0x4D,0x59,0x30,0xAC,0x51,0x1E,0x3B,0xAF,0x2B,0xD6,0xEE,0x63,0x45,0x7B,0xC5,0xD9, -0x5F,0x50,0xD2,0xE3,0x50,0x0F,0x3A,0x88,0xE7,0xBF,0x14,0xFD,0xE0,0xC7,0xB9,0x02, -0x03,0x01,0x00,0x01,0xA3,0x82,0x01,0x09,0x30,0x82,0x01,0x05,0x30,0x70,0x06,0x03, -0x55,0x1D,0x1F,0x04,0x69,0x30,0x67,0x30,0x65,0xA0,0x63,0xA0,0x61,0xA4,0x5F,0x30, -0x5D,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x10, -0x30,0x0E,0x06,0x03,0x55,0x04,0x0A,0x13,0x07,0x45,0x71,0x75,0x69,0x66,0x61,0x78, -0x31,0x2D,0x30,0x2B,0x06,0x03,0x55,0x04,0x0B,0x13,0x24,0x45,0x71,0x75,0x69,0x66, -0x61,0x78,0x20,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69,0x66, -0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x31, -0x0D,0x30,0x0B,0x06,0x03,0x55,0x04,0x03,0x13,0x04,0x43,0x52,0x4C,0x31,0x30,0x1A, -0x06,0x03,0x55,0x1D,0x10,0x04,0x13,0x30,0x11,0x81,0x0F,0x32,0x30,0x31,0x38,0x30, -0x38,0x32,0x32,0x31,0x36,0x34,0x31,0x35,0x31,0x5A,0x30,0x0B,0x06,0x03,0x55,0x1D, -0x0F,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18, -0x30,0x16,0x80,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23, -0x20,0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04, -0x16,0x04,0x14,0x48,0xE6,0x68,0xF9,0x2B,0xD2,0xB2,0x95,0xD7,0x47,0xD8,0x23,0x20, -0x10,0x4F,0x33,0x98,0x90,0x9F,0xD4,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05, -0x30,0x03,0x01,0x01,0xFF,0x30,0x1A,0x06,0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07, -0x41,0x00,0x04,0x0D,0x30,0x0B,0x1B,0x05,0x56,0x33,0x2E,0x30,0x63,0x03,0x02,0x06, -0xC0,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00, -0x03,0x81,0x81,0x00,0x58,0xCE,0x29,0xEA,0xFC,0xF7,0xDE,0xB5,0xCE,0x02,0xB9,0x17, -0xB5,0x85,0xD1,0xB9,0xE3,0xE0,0x95,0xCC,0x25,0x31,0x0D,0x00,0xA6,0x92,0x6E,0x7F, -0xB6,0x92,0x63,0x9E,0x50,0x95,0xD1,0x9A,0x6F,0xE4,0x11,0xDE,0x63,0x85,0x6E,0x98, -0xEE,0xA8,0xFF,0x5A,0xC8,0xD3,0x55,0xB2,0x66,0x71,0x57,0xDE,0xC0,0x21,0xEB,0x3D, -0x2A,0xA7,0x23,0x49,0x01,0x04,0x86,0x42,0x7B,0xFC,0xEE,0x7F,0xA2,0x16,0x52,0xB5, -0x67,0x67,0xD3,0x40,0xDB,0x3B,0x26,0x58,0xB2,0x28,0x77,0x3D,0xAE,0x14,0x77,0x61, -0xD6,0xFA,0x2A,0x66,0x27,0xA0,0x0D,0xFA,0xA7,0x73,0x5C,0xEA,0x70,0xF1,0x94,0x21, -0x65,0x44,0x5F,0xFA,0xFC,0xEF,0x29,0x68,0xA9,0xA2,0x87,0x79,0xEF,0x79,0xEF,0x4F, -0xAC,0x07,0x77,0x38, -}; - - -/* subject:/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */ -/* issuer :/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */ - - -const unsigned char Entrust_net_Premium_2048_Secure_Server_CA_certificate[1120]={ -0x30,0x82,0x04,0x5C,0x30,0x82,0x03,0x44,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x38, -0x63,0xB9,0x66,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, -0x05,0x00,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B, -0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x40,0x30,0x3E,0x06, -0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73, -0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x5F,0x32,0x30,0x34,0x38,0x20,0x69, -0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28, -0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30, -0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39, -0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D, -0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,0x13,0x2A,0x45, -0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31, -0x32,0x32,0x34,0x31,0x37,0x35,0x30,0x35,0x31,0x5A,0x17,0x0D,0x31,0x39,0x31,0x32, -0x32,0x34,0x31,0x38,0x32,0x30,0x35,0x31,0x5A,0x30,0x81,0xB4,0x31,0x14,0x30,0x12, -0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E, -0x65,0x74,0x31,0x40,0x30,0x3E,0x06,0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77, -0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53, -0x5F,0x32,0x30,0x34,0x38,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79, -0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69, -0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28, -0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E, -0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06, -0x03,0x55,0x04,0x03,0x13,0x2A,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65, -0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20, -0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29, -0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, -0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01, -0x00,0xAD,0x4D,0x4B,0xA9,0x12,0x86,0xB2,0xEA,0xA3,0x20,0x07,0x15,0x16,0x64,0x2A, -0x2B,0x4B,0xD1,0xBF,0x0B,0x4A,0x4D,0x8E,0xED,0x80,0x76,0xA5,0x67,0xB7,0x78,0x40, -0xC0,0x73,0x42,0xC8,0x68,0xC0,0xDB,0x53,0x2B,0xDD,0x5E,0xB8,0x76,0x98,0x35,0x93, -0x8B,0x1A,0x9D,0x7C,0x13,0x3A,0x0E,0x1F,0x5B,0xB7,0x1E,0xCF,0xE5,0x24,0x14,0x1E, -0xB1,0x81,0xA9,0x8D,0x7D,0xB8,0xCC,0x6B,0x4B,0x03,0xF1,0x02,0x0C,0xDC,0xAB,0xA5, -0x40,0x24,0x00,0x7F,0x74,0x94,0xA1,0x9D,0x08,0x29,0xB3,0x88,0x0B,0xF5,0x87,0x77, -0x9D,0x55,0xCD,0xE4,0xC3,0x7E,0xD7,0x6A,0x64,0xAB,0x85,0x14,0x86,0x95,0x5B,0x97, -0x32,0x50,0x6F,0x3D,0xC8,0xBA,0x66,0x0C,0xE3,0xFC,0xBD,0xB8,0x49,0xC1,0x76,0x89, -0x49,0x19,0xFD,0xC0,0xA8,0xBD,0x89,0xA3,0x67,0x2F,0xC6,0x9F,0xBC,0x71,0x19,0x60, -0xB8,0x2D,0xE9,0x2C,0xC9,0x90,0x76,0x66,0x7B,0x94,0xE2,0xAF,0x78,0xD6,0x65,0x53, -0x5D,0x3C,0xD6,0x9C,0xB2,0xCF,0x29,0x03,0xF9,0x2F,0xA4,0x50,0xB2,0xD4,0x48,0xCE, -0x05,0x32,0x55,0x8A,0xFD,0xB2,0x64,0x4C,0x0E,0xE4,0x98,0x07,0x75,0xDB,0x7F,0xDF, -0xB9,0x08,0x55,0x60,0x85,0x30,0x29,0xF9,0x7B,0x48,0xA4,0x69,0x86,0xE3,0x35,0x3F, -0x1E,0x86,0x5D,0x7A,0x7A,0x15,0xBD,0xEF,0x00,0x8E,0x15,0x22,0x54,0x17,0x00,0x90, -0x26,0x93,0xBC,0x0E,0x49,0x68,0x91,0xBF,0xF8,0x47,0xD3,0x9D,0x95,0x42,0xC1,0x0E, -0x4D,0xDF,0x6F,0x26,0xCF,0xC3,0x18,0x21,0x62,0x66,0x43,0x70,0xD6,0xD5,0xC0,0x07, -0xE1,0x02,0x03,0x01,0x00,0x01,0xA3,0x74,0x30,0x72,0x30,0x11,0x06,0x09,0x60,0x86, -0x48,0x01,0x86,0xF8,0x42,0x01,0x01,0x04,0x04,0x03,0x02,0x00,0x07,0x30,0x1F,0x06, -0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80, -0xBE,0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D, -0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,0xBE, -0xD8,0x89,0xB9,0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x1D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF6,0x7D,0x07,0x41,0x00,0x04,0x10,0x30,0x0E,0x1B,0x08, -0x56,0x35,0x2E,0x30,0x3A,0x34,0x2E,0x30,0x03,0x02,0x04,0x90,0x30,0x0D,0x06,0x09, -0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00, -0x59,0x47,0xAC,0x21,0x84,0x8A,0x17,0xC9,0x9C,0x89,0x53,0x1E,0xBA,0x80,0x85,0x1A, -0xC6,0x3C,0x4E,0x3E,0xB1,0x9C,0xB6,0x7C,0xC6,0x92,0x5D,0x18,0x64,0x02,0xE3,0xD3, -0x06,0x08,0x11,0x61,0x7C,0x63,0xE3,0x2B,0x9D,0x31,0x03,0x70,0x76,0xD2,0xA3,0x28, -0xA0,0xF4,0xBB,0x9A,0x63,0x73,0xED,0x6D,0xE5,0x2A,0xDB,0xED,0x14,0xA9,0x2B,0xC6, -0x36,0x11,0xD0,0x2B,0xEB,0x07,0x8B,0xA5,0xDA,0x9E,0x5C,0x19,0x9D,0x56,0x12,0xF5, -0x54,0x29,0xC8,0x05,0xED,0xB2,0x12,0x2A,0x8D,0xF4,0x03,0x1B,0xFF,0xE7,0x92,0x10, -0x87,0xB0,0x3A,0xB5,0xC3,0x9D,0x05,0x37,0x12,0xA3,0xC7,0xF4,0x15,0xB9,0xD5,0xA4, -0x39,0x16,0x9B,0x53,0x3A,0x23,0x91,0xF1,0xA8,0x82,0xA2,0x6A,0x88,0x68,0xC1,0x79, -0x02,0x22,0xBC,0xAA,0xA6,0xD6,0xAE,0xDF,0xB0,0x14,0x5F,0xB8,0x87,0xD0,0xDD,0x7C, -0x7F,0x7B,0xFF,0xAF,0x1C,0xCF,0xE6,0xDB,0x07,0xAD,0x5E,0xDB,0x85,0x9D,0xD0,0x2B, -0x0D,0x33,0xDB,0x04,0xD1,0xE6,0x49,0x40,0x13,0x2B,0x76,0xFB,0x3E,0xE9,0x9C,0x89, -0x0F,0x15,0xCE,0x18,0xB0,0x85,0x78,0x21,0x4F,0x6B,0x4F,0x0E,0xFA,0x36,0x67,0xCD, -0x07,0xF2,0xFF,0x08,0xD0,0xE2,0xDE,0xD9,0xBF,0x2A,0xAF,0xB8,0x87,0x86,0x21,0x3C, -0x04,0xCA,0xB7,0x94,0x68,0x7F,0xCF,0x3C,0xE9,0x98,0xD7,0x38,0xFF,0xEC,0xC0,0xD9, -0x50,0xF0,0x2E,0x4B,0x58,0xAE,0x46,0x6F,0xD0,0x2E,0xC3,0x60,0xDA,0x72,0x55,0x72, -0xBD,0x4C,0x45,0x9E,0x61,0xBA,0xBF,0x84,0x81,0x92,0x03,0xD1,0xD2,0x69,0x7C,0xC5, -}; - - -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G3 */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G3 */ - - -const unsigned char DigiCert_Assured_ID_Root_G3_certificate[586]={ -0x30,0x82,0x02,0x46,0x30,0x82,0x01,0xCD,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0B, -0xA1,0x5A,0xFA,0x1D,0xDF,0xA0,0xB5,0x49,0x44,0xAF,0xCD,0x24,0xA0,0x6C,0xEC,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x65,0x31,0x0B,0x30, -0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03, -0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E, -0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E, -0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22, -0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20, -0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20, -0x47,0x33,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30, -0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30, -0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55, -0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69, -0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, -0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E, -0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69, -0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49, -0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x33,0x30,0x76,0x30,0x10,0x06,0x07,0x2A, -0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00, -0x04,0x19,0xE7,0xBC,0xAC,0x44,0x65,0xED,0xCD,0xB8,0x3F,0x58,0xFB,0x8D,0xB1,0x57, -0xA9,0x44,0x2D,0x05,0x15,0xF2,0xEF,0x0B,0xFF,0x10,0x74,0x9F,0xB5,0x62,0x52,0x5F, -0x66,0x7E,0x1F,0xE5,0xDC,0x1B,0x45,0x79,0x0B,0xCC,0xC6,0x53,0x0A,0x9D,0x8D,0x5D, -0x02,0xD9,0xA9,0x59,0xDE,0x02,0x5A,0xF6,0x95,0x2A,0x0E,0x8D,0x38,0x4A,0x8A,0x49, -0xC6,0xBC,0xC6,0x03,0x38,0x07,0x5F,0x55,0xDA,0x7E,0x09,0x6E,0xE2,0x7F,0x5E,0xD0, -0x45,0x20,0x0F,0x59,0x76,0x10,0xD6,0xA0,0x24,0xF0,0x2D,0xDE,0x36,0xF2,0x6C,0x29, -0x39,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, -0x14,0xCB,0xD0,0xBD,0xA9,0xE1,0x98,0x05,0x51,0xA1,0x4D,0x37,0xA2,0x83,0x79,0xCE, -0x8D,0x1D,0x2A,0xE4,0x84,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03, -0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,0x25,0xA4,0x81,0x45,0x02,0x6B,0x12,0x4B, -0x75,0x74,0x4F,0xC8,0x23,0xE3,0x70,0xF2,0x75,0x72,0xDE,0x7C,0x89,0xF0,0xCF,0x91, -0x72,0x61,0x9E,0x5E,0x10,0x92,0x59,0x56,0xB9,0x83,0xC7,0x10,0xE7,0x38,0xE9,0x58, -0x26,0x36,0x7D,0xD5,0xE4,0x34,0x86,0x39,0x02,0x30,0x7C,0x36,0x53,0xF0,0x30,0xE5, -0x62,0x63,0x3A,0x99,0xE2,0xB6,0xA3,0x3B,0x9B,0x34,0xFA,0x1E,0xDA,0x10,0x92,0x71, -0x5E,0x91,0x13,0xA7,0xDD,0xA4,0x6E,0x92,0xCC,0x32,0xD6,0xF5,0x21,0x66,0xC7,0x2F, -0xEA,0x96,0x63,0x6A,0x65,0x45,0x92,0x95,0x01,0xB4, -}; - - -/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO Certification Authority */ -/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO Certification Authority */ - - -const unsigned char COMODO_Certification_Authority_certificate[1057]={ -0x30,0x82,0x04,0x1D,0x30,0x82,0x03,0x05,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x4E, -0x81,0x2D,0x8A,0x82,0x65,0xE0,0x0B,0x02,0xEE,0x3E,0x35,0x02,0x46,0xE5,0x3D,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81, -0x81,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, -0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, -0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, -0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43, -0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x27,0x30,0x25,0x06,0x03,0x55, -0x04,0x03,0x13,0x1E,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, -0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x30,0x31,0x30,0x30,0x30,0x30, -0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35, -0x39,0x5A,0x30,0x81,0x81,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, -0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65, -0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31, -0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72, -0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F, -0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x27,0x30, -0x25,0x06,0x03,0x55,0x04,0x03,0x13,0x1E,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43, -0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, -0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82, -0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xD0,0x40,0x8B,0x8B,0x72,0xE3,0x91,0x1B,0xF7, -0x51,0xC1,0x1B,0x54,0x04,0x98,0xD3,0xA9,0xBF,0xC1,0xE6,0x8A,0x5D,0x3B,0x87,0xFB, -0xBB,0x88,0xCE,0x0D,0xE3,0x2F,0x3F,0x06,0x96,0xF0,0xA2,0x29,0x50,0x99,0xAE,0xDB, -0x3B,0xA1,0x57,0xB0,0x74,0x51,0x71,0xCD,0xED,0x42,0x91,0x4D,0x41,0xFE,0xA9,0xC8, -0xD8,0x6A,0x86,0x77,0x44,0xBB,0x59,0x66,0x97,0x50,0x5E,0xB4,0xD4,0x2C,0x70,0x44, -0xCF,0xDA,0x37,0x95,0x42,0x69,0x3C,0x30,0xC4,0x71,0xB3,0x52,0xF0,0x21,0x4D,0xA1, -0xD8,0xBA,0x39,0x7C,0x1C,0x9E,0xA3,0x24,0x9D,0xF2,0x83,0x16,0x98,0xAA,0x16,0x7C, -0x43,0x9B,0x15,0x5B,0xB7,0xAE,0x34,0x91,0xFE,0xD4,0x62,0x26,0x18,0x46,0x9A,0x3F, -0xEB,0xC1,0xF9,0xF1,0x90,0x57,0xEB,0xAC,0x7A,0x0D,0x8B,0xDB,0x72,0x30,0x6A,0x66, -0xD5,0xE0,0x46,0xA3,0x70,0xDC,0x68,0xD9,0xFF,0x04,0x48,0x89,0x77,0xDE,0xB5,0xE9, -0xFB,0x67,0x6D,0x41,0xE9,0xBC,0x39,0xBD,0x32,0xD9,0x62,0x02,0xF1,0xB1,0xA8,0x3D, -0x6E,0x37,0x9C,0xE2,0x2F,0xE2,0xD3,0xA2,0x26,0x8B,0xC6,0xB8,0x55,0x43,0x88,0xE1, -0x23,0x3E,0xA5,0xD2,0x24,0x39,0x6A,0x47,0xAB,0x00,0xD4,0xA1,0xB3,0xA9,0x25,0xFE, -0x0D,0x3F,0xA7,0x1D,0xBA,0xD3,0x51,0xC1,0x0B,0xA4,0xDA,0xAC,0x38,0xEF,0x55,0x50, -0x24,0x05,0x65,0x46,0x93,0x34,0x4F,0x2D,0x8D,0xAD,0xC6,0xD4,0x21,0x19,0xD2,0x8E, -0xCA,0x05,0x61,0x71,0x07,0x73,0x47,0xE5,0x8A,0x19,0x12,0xBD,0x04,0x4D,0xCE,0x4E, -0x9C,0xA5,0x48,0xAC,0xBB,0x26,0xF7,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0x8E,0x30, -0x81,0x8B,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x0B,0x58,0xE5, -0x8B,0xC6,0x4C,0x15,0x37,0xA4,0x40,0xA9,0x30,0xA9,0x21,0xBE,0x47,0x36,0x5A,0x56, -0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, -0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01, -0x01,0xFF,0x30,0x49,0x06,0x03,0x55,0x1D,0x1F,0x04,0x42,0x30,0x40,0x30,0x3E,0xA0, -0x3C,0xA0,0x3A,0x86,0x38,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E, -0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x43,0x4F,0x4D, -0x4F,0x44,0x4F,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E, -0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x2E,0x63,0x72,0x6C,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01, -0x00,0x3E,0x98,0x9E,0x9B,0xF6,0x1B,0xE9,0xD7,0x39,0xB7,0x78,0xAE,0x1D,0x72,0x18, -0x49,0xD3,0x87,0xE4,0x43,0x82,0xEB,0x3F,0xC9,0xAA,0xF5,0xA8,0xB5,0xEF,0x55,0x7C, -0x21,0x52,0x65,0xF9,0xD5,0x0D,0xE1,0x6C,0xF4,0x3E,0x8C,0x93,0x73,0x91,0x2E,0x02, -0xC4,0x4E,0x07,0x71,0x6F,0xC0,0x8F,0x38,0x61,0x08,0xA8,0x1E,0x81,0x0A,0xC0,0x2F, -0x20,0x2F,0x41,0x8B,0x91,0xDC,0x48,0x45,0xBC,0xF1,0xC6,0xDE,0xBA,0x76,0x6B,0x33, -0xC8,0x00,0x2D,0x31,0x46,0x4C,0xED,0xE7,0x9D,0xCF,0x88,0x94,0xFF,0x33,0xC0,0x56, -0xE8,0x24,0x86,0x26,0xB8,0xD8,0x38,0x38,0xDF,0x2A,0x6B,0xDD,0x12,0xCC,0xC7,0x3F, -0x47,0x17,0x4C,0xA2,0xC2,0x06,0x96,0x09,0xD6,0xDB,0xFE,0x3F,0x3C,0x46,0x41,0xDF, -0x58,0xE2,0x56,0x0F,0x3C,0x3B,0xC1,0x1C,0x93,0x35,0xD9,0x38,0x52,0xAC,0xEE,0xC8, -0xEC,0x2E,0x30,0x4E,0x94,0x35,0xB4,0x24,0x1F,0x4B,0x78,0x69,0xDA,0xF2,0x02,0x38, -0xCC,0x95,0x52,0x93,0xF0,0x70,0x25,0x59,0x9C,0x20,0x67,0xC4,0xEE,0xF9,0x8B,0x57, -0x61,0xF4,0x92,0x76,0x7D,0x3F,0x84,0x8D,0x55,0xB7,0xE8,0xE5,0xAC,0xD5,0xF1,0xF5, -0x19,0x56,0xA6,0x5A,0xFB,0x90,0x1C,0xAF,0x93,0xEB,0xE5,0x1C,0xD4,0x67,0x97,0x5D, -0x04,0x0E,0xBE,0x0B,0x83,0xA6,0x17,0x83,0xB9,0x30,0x12,0xA0,0xC5,0x33,0x15,0x05, -0xB9,0x0D,0xFB,0xC7,0x05,0x76,0xE3,0xD8,0x4A,0x8D,0xFC,0x34,0x17,0xA3,0xC6,0x21, -0x28,0xBE,0x30,0x45,0x31,0x1E,0xC7,0x78,0xBE,0x58,0x61,0x38,0xAC,0x3B,0xE2,0x01, -0x65, -}; - - -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global Root CA */ - - -const unsigned char DigiCert_Global_Root_CA_certificate[947]={ -0x30,0x82,0x03,0xAF,0x30,0x82,0x02,0x97,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x08, -0x3B,0xE0,0x56,0x90,0x42,0x46,0xB1,0xA1,0x75,0x6A,0xC9,0x59,0x91,0xC7,0x4A,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x61, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43, -0x41,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30, -0x30,0x5A,0x17,0x0D,0x33,0x31,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30, -0x5A,0x30,0x61,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53, -0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43, -0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B, -0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63, -0x6F,0x6D,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x13,0x17,0x44,0x69,0x67, -0x69,0x43,0x65,0x72,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52,0x6F,0x6F, -0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, -0x02,0x82,0x01,0x01,0x00,0xE2,0x3B,0xE1,0x11,0x72,0xDE,0xA8,0xA4,0xD3,0xA3,0x57, -0xAA,0x50,0xA2,0x8F,0x0B,0x77,0x90,0xC9,0xA2,0xA5,0xEE,0x12,0xCE,0x96,0x5B,0x01, -0x09,0x20,0xCC,0x01,0x93,0xA7,0x4E,0x30,0xB7,0x53,0xF7,0x43,0xC4,0x69,0x00,0x57, -0x9D,0xE2,0x8D,0x22,0xDD,0x87,0x06,0x40,0x00,0x81,0x09,0xCE,0xCE,0x1B,0x83,0xBF, -0xDF,0xCD,0x3B,0x71,0x46,0xE2,0xD6,0x66,0xC7,0x05,0xB3,0x76,0x27,0x16,0x8F,0x7B, -0x9E,0x1E,0x95,0x7D,0xEE,0xB7,0x48,0xA3,0x08,0xDA,0xD6,0xAF,0x7A,0x0C,0x39,0x06, -0x65,0x7F,0x4A,0x5D,0x1F,0xBC,0x17,0xF8,0xAB,0xBE,0xEE,0x28,0xD7,0x74,0x7F,0x7A, -0x78,0x99,0x59,0x85,0x68,0x6E,0x5C,0x23,0x32,0x4B,0xBF,0x4E,0xC0,0xE8,0x5A,0x6D, -0xE3,0x70,0xBF,0x77,0x10,0xBF,0xFC,0x01,0xF6,0x85,0xD9,0xA8,0x44,0x10,0x58,0x32, -0xA9,0x75,0x18,0xD5,0xD1,0xA2,0xBE,0x47,0xE2,0x27,0x6A,0xF4,0x9A,0x33,0xF8,0x49, -0x08,0x60,0x8B,0xD4,0x5F,0xB4,0x3A,0x84,0xBF,0xA1,0xAA,0x4A,0x4C,0x7D,0x3E,0xCF, -0x4F,0x5F,0x6C,0x76,0x5E,0xA0,0x4B,0x37,0x91,0x9E,0xDC,0x22,0xE6,0x6D,0xCE,0x14, -0x1A,0x8E,0x6A,0xCB,0xFE,0xCD,0xB3,0x14,0x64,0x17,0xC7,0x5B,0x29,0x9E,0x32,0xBF, -0xF2,0xEE,0xFA,0xD3,0x0B,0x42,0xD4,0xAB,0xB7,0x41,0x32,0xDA,0x0C,0xD4,0xEF,0xF8, -0x81,0xD5,0xBB,0x8D,0x58,0x3F,0xB5,0x1B,0xE8,0x49,0x28,0xA2,0x70,0xDA,0x31,0x04, -0xDD,0xF7,0xB2,0x16,0xF2,0x4C,0x0A,0x4E,0x07,0xA8,0xED,0x4A,0x3D,0x5E,0xB5,0x7F, -0xA3,0x90,0xC3,0xAF,0x27,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0E, -0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0F, -0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, -0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x03,0xDE,0x50,0x35,0x56,0xD1, -0x4C,0xBB,0x66,0xF0,0xA3,0xE2,0x1B,0x1B,0xC3,0x97,0xB2,0x3D,0xD1,0x55,0x30,0x1F, -0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x03,0xDE,0x50,0x35,0x56, -0xD1,0x4C,0xBB,0x66,0xF0,0xA3,0xE2,0x1B,0x1B,0xC3,0x97,0xB2,0x3D,0xD1,0x55,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82, -0x01,0x01,0x00,0xCB,0x9C,0x37,0xAA,0x48,0x13,0x12,0x0A,0xFA,0xDD,0x44,0x9C,0x4F, -0x52,0xB0,0xF4,0xDF,0xAE,0x04,0xF5,0x79,0x79,0x08,0xA3,0x24,0x18,0xFC,0x4B,0x2B, -0x84,0xC0,0x2D,0xB9,0xD5,0xC7,0xFE,0xF4,0xC1,0x1F,0x58,0xCB,0xB8,0x6D,0x9C,0x7A, -0x74,0xE7,0x98,0x29,0xAB,0x11,0xB5,0xE3,0x70,0xA0,0xA1,0xCD,0x4C,0x88,0x99,0x93, -0x8C,0x91,0x70,0xE2,0xAB,0x0F,0x1C,0xBE,0x93,0xA9,0xFF,0x63,0xD5,0xE4,0x07,0x60, -0xD3,0xA3,0xBF,0x9D,0x5B,0x09,0xF1,0xD5,0x8E,0xE3,0x53,0xF4,0x8E,0x63,0xFA,0x3F, -0xA7,0xDB,0xB4,0x66,0xDF,0x62,0x66,0xD6,0xD1,0x6E,0x41,0x8D,0xF2,0x2D,0xB5,0xEA, -0x77,0x4A,0x9F,0x9D,0x58,0xE2,0x2B,0x59,0xC0,0x40,0x23,0xED,0x2D,0x28,0x82,0x45, -0x3E,0x79,0x54,0x92,0x26,0x98,0xE0,0x80,0x48,0xA8,0x37,0xEF,0xF0,0xD6,0x79,0x60, -0x16,0xDE,0xAC,0xE8,0x0E,0xCD,0x6E,0xAC,0x44,0x17,0x38,0x2F,0x49,0xDA,0xE1,0x45, -0x3E,0x2A,0xB9,0x36,0x53,0xCF,0x3A,0x50,0x06,0xF7,0x2E,0xE8,0xC4,0x57,0x49,0x6C, -0x61,0x21,0x18,0xD5,0x04,0xAD,0x78,0x3C,0x2C,0x3A,0x80,0x6B,0xA7,0xEB,0xAF,0x15, -0x14,0xE9,0xD8,0x89,0xC1,0xB9,0x38,0x6C,0xE2,0x91,0x6C,0x8A,0xFF,0x64,0xB9,0x77, -0x25,0x57,0x30,0xC0,0x1B,0x24,0xA3,0xE1,0xDC,0xE9,0xDF,0x47,0x7C,0xB5,0xB4,0x24, -0x08,0x05,0x30,0xEC,0x2D,0xBD,0x0B,0xBF,0x45,0xBF,0x50,0xB9,0xA9,0xF3,0xEB,0x98, -0x01,0x12,0xAD,0xC8,0x88,0xC6,0x98,0x34,0x5F,0x8D,0x0A,0x3C,0xC6,0xE9,0xD5,0x95, -0x95,0x6D,0xDE, -}; - - -/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */ -/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=AAA Certificate Services */ - - -const unsigned char Comodo_AAA_Services_root_certificate[1078]={ -0x30,0x82,0x04,0x32,0x30,0x82,0x03,0x1A,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x7B,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, -0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, -0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, -0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43, -0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55, -0x04,0x03,0x0C,0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x1E,0x17,0x0D, -0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32, -0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x7B,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06, -0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61, -0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04, -0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03, -0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43,0x41,0x20,0x4C, -0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x0C, -0x18,0x41,0x41,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65, -0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F, -0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBE,0x40,0x9D,0xF4,0x6E,0xE1, -0xEA,0x76,0x87,0x1C,0x4D,0x45,0x44,0x8E,0xBE,0x46,0xC8,0x83,0x06,0x9D,0xC1,0x2A, -0xFE,0x18,0x1F,0x8E,0xE4,0x02,0xFA,0xF3,0xAB,0x5D,0x50,0x8A,0x16,0x31,0x0B,0x9A, -0x06,0xD0,0xC5,0x70,0x22,0xCD,0x49,0x2D,0x54,0x63,0xCC,0xB6,0x6E,0x68,0x46,0x0B, -0x53,0xEA,0xCB,0x4C,0x24,0xC0,0xBC,0x72,0x4E,0xEA,0xF1,0x15,0xAE,0xF4,0x54,0x9A, -0x12,0x0A,0xC3,0x7A,0xB2,0x33,0x60,0xE2,0xDA,0x89,0x55,0xF3,0x22,0x58,0xF3,0xDE, -0xDC,0xCF,0xEF,0x83,0x86,0xA2,0x8C,0x94,0x4F,0x9F,0x68,0xF2,0x98,0x90,0x46,0x84, -0x27,0xC7,0x76,0xBF,0xE3,0xCC,0x35,0x2C,0x8B,0x5E,0x07,0x64,0x65,0x82,0xC0,0x48, -0xB0,0xA8,0x91,0xF9,0x61,0x9F,0x76,0x20,0x50,0xA8,0x91,0xC7,0x66,0xB5,0xEB,0x78, -0x62,0x03,0x56,0xF0,0x8A,0x1A,0x13,0xEA,0x31,0xA3,0x1E,0xA0,0x99,0xFD,0x38,0xF6, -0xF6,0x27,0x32,0x58,0x6F,0x07,0xF5,0x6B,0xB8,0xFB,0x14,0x2B,0xAF,0xB7,0xAA,0xCC, -0xD6,0x63,0x5F,0x73,0x8C,0xDA,0x05,0x99,0xA8,0x38,0xA8,0xCB,0x17,0x78,0x36,0x51, -0xAC,0xE9,0x9E,0xF4,0x78,0x3A,0x8D,0xCF,0x0F,0xD9,0x42,0xE2,0x98,0x0C,0xAB,0x2F, -0x9F,0x0E,0x01,0xDE,0xEF,0x9F,0x99,0x49,0xF1,0x2D,0xDF,0xAC,0x74,0x4D,0x1B,0x98, -0xB5,0x47,0xC5,0xE5,0x29,0xD1,0xF9,0x90,0x18,0xC7,0x62,0x9C,0xBE,0x83,0xC7,0x26, -0x7B,0x3E,0x8A,0x25,0xC7,0xC0,0xDD,0x9D,0xE6,0x35,0x68,0x10,0x20,0x9D,0x8F,0xD8, -0xDE,0xD2,0xC3,0x84,0x9C,0x0D,0x5E,0xE8,0x2F,0xC9,0x02,0x03,0x01,0x00,0x01,0xA3, -0x81,0xC0,0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14, -0xA0,0x11,0x0A,0x23,0x3E,0x96,0xF1,0x07,0xEC,0xE2,0xAF,0x29,0xEF,0x82,0xA5,0x7F, -0xD0,0x30,0xA4,0xB4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04, -0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05, -0x30,0x03,0x01,0x01,0xFF,0x30,0x7B,0x06,0x03,0x55,0x1D,0x1F,0x04,0x74,0x30,0x72, -0x30,0x38,0xA0,0x36,0xA0,0x34,0x86,0x32,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63, -0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F, -0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65, -0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x36,0xA0,0x34,0xA0,0x32, -0x86,0x30,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D, -0x6F,0x64,0x6F,0x2E,0x6E,0x65,0x74,0x2F,0x41,0x41,0x41,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63, -0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05, -0x00,0x03,0x82,0x01,0x01,0x00,0x08,0x56,0xFC,0x02,0xF0,0x9B,0xE8,0xFF,0xA4,0xFA, -0xD6,0x7B,0xC6,0x44,0x80,0xCE,0x4F,0xC4,0xC5,0xF6,0x00,0x58,0xCC,0xA6,0xB6,0xBC, -0x14,0x49,0x68,0x04,0x76,0xE8,0xE6,0xEE,0x5D,0xEC,0x02,0x0F,0x60,0xD6,0x8D,0x50, -0x18,0x4F,0x26,0x4E,0x01,0xE3,0xE6,0xB0,0xA5,0xEE,0xBF,0xBC,0x74,0x54,0x41,0xBF, -0xFD,0xFC,0x12,0xB8,0xC7,0x4F,0x5A,0xF4,0x89,0x60,0x05,0x7F,0x60,0xB7,0x05,0x4A, -0xF3,0xF6,0xF1,0xC2,0xBF,0xC4,0xB9,0x74,0x86,0xB6,0x2D,0x7D,0x6B,0xCC,0xD2,0xF3, -0x46,0xDD,0x2F,0xC6,0xE0,0x6A,0xC3,0xC3,0x34,0x03,0x2C,0x7D,0x96,0xDD,0x5A,0xC2, -0x0E,0xA7,0x0A,0x99,0xC1,0x05,0x8B,0xAB,0x0C,0x2F,0xF3,0x5C,0x3A,0xCF,0x6C,0x37, -0x55,0x09,0x87,0xDE,0x53,0x40,0x6C,0x58,0xEF,0xFC,0xB6,0xAB,0x65,0x6E,0x04,0xF6, -0x1B,0xDC,0x3C,0xE0,0x5A,0x15,0xC6,0x9E,0xD9,0xF1,0x59,0x48,0x30,0x21,0x65,0x03, -0x6C,0xEC,0xE9,0x21,0x73,0xEC,0x9B,0x03,0xA1,0xE0,0x37,0xAD,0xA0,0x15,0x18,0x8F, -0xFA,0xBA,0x02,0xCE,0xA7,0x2C,0xA9,0x10,0x13,0x2C,0xD4,0xE5,0x08,0x26,0xAB,0x22, -0x97,0x60,0xF8,0x90,0x5E,0x74,0xD4,0xA2,0x9A,0x53,0xBD,0xF2,0xA9,0x68,0xE0,0xA2, -0x6E,0xC2,0xD7,0x6C,0xB1,0xA3,0x0F,0x9E,0xBF,0xEB,0x68,0xE7,0x56,0xF2,0xAE,0xF2, -0xE3,0x2B,0x38,0x3A,0x09,0x81,0xB5,0x6B,0x85,0xD7,0xBE,0x2D,0xED,0x3F,0x1A,0xB7, -0xB2,0x63,0xE2,0xF5,0x62,0x2C,0x82,0xD4,0x6A,0x00,0x41,0x50,0xF1,0x39,0x83,0x9F, -0x95,0xE9,0x36,0x96,0x98,0x6E, -}; - - -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */ - - -const unsigned char DigiCert_High_Assurance_EV_Root_CA_certificate[969]={ -0x30,0x82,0x03,0xC5,0x30,0x82,0x02,0xAD,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x02, -0xAC,0x5C,0x26,0x6A,0x0B,0x40,0x9B,0x8F,0x0B,0x79,0xF2,0xAE,0x46,0x25,0x77,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x6C, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x48,0x69,0x67,0x68,0x20,0x41,0x73,0x73,0x75,0x72,0x61,0x6E,0x63, -0x65,0x20,0x45,0x56,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D, -0x30,0x36,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33, -0x31,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x6C,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49, -0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77, -0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x2B,0x30, -0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x48,0x69,0x67,0x68,0x20,0x41,0x73,0x73,0x75,0x72,0x61,0x6E,0x63,0x65,0x20, -0x45,0x56,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01, -0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC6,0xCC,0xE5,0x73,0xE6, -0xFB,0xD4,0xBB,0xE5,0x2D,0x2D,0x32,0xA6,0xDF,0xE5,0x81,0x3F,0xC9,0xCD,0x25,0x49, -0xB6,0x71,0x2A,0xC3,0xD5,0x94,0x34,0x67,0xA2,0x0A,0x1C,0xB0,0x5F,0x69,0xA6,0x40, -0xB1,0xC4,0xB7,0xB2,0x8F,0xD0,0x98,0xA4,0xA9,0x41,0x59,0x3A,0xD3,0xDC,0x94,0xD6, -0x3C,0xDB,0x74,0x38,0xA4,0x4A,0xCC,0x4D,0x25,0x82,0xF7,0x4A,0xA5,0x53,0x12,0x38, -0xEE,0xF3,0x49,0x6D,0x71,0x91,0x7E,0x63,0xB6,0xAB,0xA6,0x5F,0xC3,0xA4,0x84,0xF8, -0x4F,0x62,0x51,0xBE,0xF8,0xC5,0xEC,0xDB,0x38,0x92,0xE3,0x06,0xE5,0x08,0x91,0x0C, -0xC4,0x28,0x41,0x55,0xFB,0xCB,0x5A,0x89,0x15,0x7E,0x71,0xE8,0x35,0xBF,0x4D,0x72, -0x09,0x3D,0xBE,0x3A,0x38,0x50,0x5B,0x77,0x31,0x1B,0x8D,0xB3,0xC7,0x24,0x45,0x9A, -0xA7,0xAC,0x6D,0x00,0x14,0x5A,0x04,0xB7,0xBA,0x13,0xEB,0x51,0x0A,0x98,0x41,0x41, -0x22,0x4E,0x65,0x61,0x87,0x81,0x41,0x50,0xA6,0x79,0x5C,0x89,0xDE,0x19,0x4A,0x57, -0xD5,0x2E,0xE6,0x5D,0x1C,0x53,0x2C,0x7E,0x98,0xCD,0x1A,0x06,0x16,0xA4,0x68,0x73, -0xD0,0x34,0x04,0x13,0x5C,0xA1,0x71,0xD3,0x5A,0x7C,0x55,0xDB,0x5E,0x64,0xE1,0x37, -0x87,0x30,0x56,0x04,0xE5,0x11,0xB4,0x29,0x80,0x12,0xF1,0x79,0x39,0x88,0xA2,0x02, -0x11,0x7C,0x27,0x66,0xB7,0x88,0xB7,0x78,0xF2,0xCA,0x0A,0xA8,0x38,0xAB,0x0A,0x64, -0xC2,0xBF,0x66,0x5D,0x95,0x84,0xC1,0xA1,0x25,0x1E,0x87,0x5D,0x1A,0x50,0x0B,0x20, -0x12,0xCC,0x41,0xBB,0x6E,0x0B,0x51,0x38,0xB8,0x4B,0xCB,0x02,0x03,0x01,0x00,0x01, -0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04, -0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05, -0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14, -0xB1,0x3E,0xC3,0x69,0x03,0xF8,0xBF,0x47,0x01,0xD4,0x98,0x26,0x1A,0x08,0x02,0xEF, -0x63,0x64,0x2B,0xC3,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80, -0x14,0xB1,0x3E,0xC3,0x69,0x03,0xF8,0xBF,0x47,0x01,0xD4,0x98,0x26,0x1A,0x08,0x02, -0xEF,0x63,0x64,0x2B,0xC3,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1C,0x1A,0x06,0x97,0xDC,0xD7,0x9C, -0x9F,0x3C,0x88,0x66,0x06,0x08,0x57,0x21,0xDB,0x21,0x47,0xF8,0x2A,0x67,0xAA,0xBF, -0x18,0x32,0x76,0x40,0x10,0x57,0xC1,0x8A,0xF3,0x7A,0xD9,0x11,0x65,0x8E,0x35,0xFA, -0x9E,0xFC,0x45,0xB5,0x9E,0xD9,0x4C,0x31,0x4B,0xB8,0x91,0xE8,0x43,0x2C,0x8E,0xB3, -0x78,0xCE,0xDB,0xE3,0x53,0x79,0x71,0xD6,0xE5,0x21,0x94,0x01,0xDA,0x55,0x87,0x9A, -0x24,0x64,0xF6,0x8A,0x66,0xCC,0xDE,0x9C,0x37,0xCD,0xA8,0x34,0xB1,0x69,0x9B,0x23, -0xC8,0x9E,0x78,0x22,0x2B,0x70,0x43,0xE3,0x55,0x47,0x31,0x61,0x19,0xEF,0x58,0xC5, -0x85,0x2F,0x4E,0x30,0xF6,0xA0,0x31,0x16,0x23,0xC8,0xE7,0xE2,0x65,0x16,0x33,0xCB, -0xBF,0x1A,0x1B,0xA0,0x3D,0xF8,0xCA,0x5E,0x8B,0x31,0x8B,0x60,0x08,0x89,0x2D,0x0C, -0x06,0x5C,0x52,0xB7,0xC4,0xF9,0x0A,0x98,0xD1,0x15,0x5F,0x9F,0x12,0xBE,0x7C,0x36, -0x63,0x38,0xBD,0x44,0xA4,0x7F,0xE4,0x26,0x2B,0x0A,0xC4,0x97,0x69,0x0D,0xE9,0x8C, -0xE2,0xC0,0x10,0x57,0xB8,0xC8,0x76,0x12,0x91,0x55,0xF2,0x48,0x69,0xD8,0xBC,0x2A, -0x02,0x5B,0x0F,0x44,0xD4,0x20,0x31,0xDB,0xF4,0xBA,0x70,0x26,0x5D,0x90,0x60,0x9E, -0xBC,0x4B,0x17,0x09,0x2F,0xB4,0xCB,0x1E,0x43,0x68,0xC9,0x07,0x27,0xC1,0xD2,0x5C, -0xF7,0xEA,0x21,0xB9,0x68,0x12,0x9C,0x3C,0x9C,0xBF,0x9E,0xFC,0x80,0x5C,0x9B,0x63, -0xCD,0xEC,0x47,0xAA,0x25,0x27,0x67,0xA0,0x37,0xF3,0x00,0x82,0x7D,0x54,0xD7,0xA9, -0xF8,0xE9,0x2E,0x13,0xA3,0x77,0xE8,0x1F,0x4A, -}; - - -/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA */ -/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Universal CA */ - - -const unsigned char GeoTrust_Universal_CA_certificate[1388]={ -0x30,0x82,0x05,0x68,0x30,0x82,0x03,0x50,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16, -0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73, -0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x03,0x13, -0x15,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x55,0x6E,0x69,0x76,0x65,0x72, -0x73,0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,0x30,0x34, -0x30,0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x39,0x30,0x33,0x30,0x34,0x30, -0x35,0x30,0x30,0x30,0x30,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D, -0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1E,0x30, -0x1C,0x06,0x03,0x55,0x04,0x03,0x13,0x15,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74, -0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x02, -0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00, -0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xA6,0x15, -0x55,0xA0,0xA3,0xC6,0xE0,0x1F,0x8C,0x9D,0x21,0x50,0xD7,0xC1,0xBE,0x2B,0x5B,0xB5, -0xA4,0x9E,0xA1,0xD9,0x72,0x58,0xBD,0x00,0x1B,0x4C,0xBF,0x61,0xC9,0x14,0x1D,0x45, -0x82,0xAB,0xC6,0x1D,0x80,0xD6,0x3D,0xEB,0x10,0x9C,0x3A,0xAF,0x6D,0x24,0xF8,0xBC, -0x71,0x01,0x9E,0x06,0xF5,0x7C,0x5F,0x1E,0xC1,0x0E,0x55,0xCA,0x83,0x9A,0x59,0x30, -0xAE,0x19,0xCB,0x30,0x48,0x95,0xED,0x22,0x37,0x8D,0xF4,0x4A,0x9A,0x72,0x66,0x3E, -0xAD,0x95,0xC0,0xE0,0x16,0x00,0xE0,0x10,0x1F,0x2B,0x31,0x0E,0xD7,0x94,0x54,0xD3, -0x42,0x33,0xA0,0x34,0x1D,0x1E,0x45,0x76,0xDD,0x4F,0xCA,0x18,0x37,0xEC,0x85,0x15, -0x7A,0x19,0x08,0xFC,0xD5,0xC7,0x9C,0xF0,0xF2,0xA9,0x2E,0x10,0xA9,0x92,0xE6,0x3D, -0x58,0x3D,0xA9,0x16,0x68,0x3C,0x2F,0x75,0x21,0x18,0x7F,0x28,0x77,0xA5,0xE1,0x61, -0x17,0xB7,0xA6,0xE9,0xF8,0x1E,0x99,0xDB,0x73,0x6E,0xF4,0x0A,0xA2,0x21,0x6C,0xEE, -0xDA,0xAA,0x85,0x92,0x66,0xAF,0xF6,0x7A,0x6B,0x82,0xDA,0xBA,0x22,0x08,0x35,0x0F, -0xCF,0x42,0xF1,0x35,0xFA,0x6A,0xEE,0x7E,0x2B,0x25,0xCC,0x3A,0x11,0xE4,0x6D,0xAF, -0x73,0xB2,0x76,0x1D,0xAD,0xD0,0xB2,0x78,0x67,0x1A,0xA4,0x39,0x1C,0x51,0x0B,0x67, -0x56,0x83,0xFD,0x38,0x5D,0x0D,0xCE,0xDD,0xF0,0xBB,0x2B,0x96,0x1F,0xDE,0x7B,0x32, -0x52,0xFD,0x1D,0xBB,0xB5,0x06,0xA1,0xB2,0x21,0x5E,0xA5,0xD6,0x95,0x68,0x7F,0xF0, -0x99,0x9E,0xDC,0x45,0x08,0x3E,0xE7,0xD2,0x09,0x0D,0x35,0x94,0xDD,0x80,0x4E,0x53, -0x97,0xD7,0xB5,0x09,0x44,0x20,0x64,0x16,0x17,0x03,0x02,0x4C,0x53,0x0D,0x68,0xDE, -0xD5,0xAA,0x72,0x4D,0x93,0x6D,0x82,0x0E,0xDB,0x9C,0xBD,0xCF,0xB4,0xF3,0x5C,0x5D, -0x54,0x7A,0x69,0x09,0x96,0xD6,0xDB,0x11,0xC1,0x8D,0x75,0xA8,0xB4,0xCF,0x39,0xC8, -0xCE,0x3C,0xBC,0x24,0x7C,0xE6,0x62,0xCA,0xE1,0xBD,0x7D,0xA7,0xBD,0x57,0x65,0x0B, -0xE4,0xFE,0x25,0xED,0xB6,0x69,0x10,0xDC,0x28,0x1A,0x46,0xBD,0x01,0x1D,0xD0,0x97, -0xB5,0xE1,0x98,0x3B,0xC0,0x37,0x64,0xD6,0x3D,0x94,0xEE,0x0B,0xE1,0xF5,0x28,0xAE, -0x0B,0x56,0xBF,0x71,0x8B,0x23,0x29,0x41,0x8E,0x86,0xC5,0x4B,0x52,0x7B,0xD8,0x71, -0xAB,0x1F,0x8A,0x15,0xA6,0x3B,0x83,0x5A,0xD7,0x58,0x01,0x51,0xC6,0x4C,0x41,0xD9, -0x7F,0xD8,0x41,0x67,0x72,0xA2,0x28,0xDF,0x60,0x83,0xA9,0x9E,0xC8,0x7B,0xFC,0x53, -0x73,0x72,0x59,0xF5,0x93,0x7A,0x17,0x76,0x0E,0xCE,0xF7,0xE5,0x5C,0xD9,0x0B,0x55, -0x34,0xA2,0xAA,0x5B,0xB5,0x6A,0x54,0xE7,0x13,0xCA,0x57,0xEC,0x97,0x6D,0xF4,0x5E, -0x06,0x2F,0x45,0x8B,0x58,0xD4,0x23,0x16,0x92,0xE4,0x16,0x6E,0x28,0x63,0x59,0x30, -0xDF,0x50,0x01,0x9C,0x63,0x89,0x1A,0x9F,0xDB,0x17,0x94,0x82,0x70,0x37,0xC3,0x24, -0x9E,0x9A,0x47,0xD6,0x5A,0xCA,0x4E,0xA8,0x69,0x89,0x72,0x1F,0x91,0x6C,0xDB,0x7E, -0x9E,0x1B,0xAD,0xC7,0x1F,0x73,0xDD,0x2C,0x4F,0x19,0x65,0xFD,0x7F,0x93,0x40,0x10, -0x2E,0xD2,0xF0,0xED,0x3C,0x9E,0x2E,0x28,0x3E,0x69,0x26,0x33,0xC5,0x7B,0x02,0x03, -0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01, -0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04, -0x16,0x04,0x14,0xDA,0xBB,0x2E,0xAA,0xB0,0x0C,0xB8,0x88,0x26,0x51,0x74,0x5C,0x6D, -0x03,0xD3,0xC0,0xD8,0x8F,0x7A,0xD6,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18, -0x30,0x16,0x80,0x14,0xDA,0xBB,0x2E,0xAA,0xB0,0x0C,0xB8,0x88,0x26,0x51,0x74,0x5C, -0x6D,0x03,0xD3,0xC0,0xD8,0x8F,0x7A,0xD6,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01, -0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x31,0x78,0xE6,0xC7, -0xB5,0xDF,0xB8,0x94,0x40,0xC9,0x71,0xC4,0xA8,0x35,0xEC,0x46,0x1D,0xC2,0x85,0xF3, -0x28,0x58,0x86,0xB0,0x0B,0xFC,0x8E,0xB2,0x39,0x8F,0x44,0x55,0xAB,0x64,0x84,0x5C, -0x69,0xA9,0xD0,0x9A,0x38,0x3C,0xFA,0xE5,0x1F,0x35,0xE5,0x44,0xE3,0x80,0x79,0x94, -0x68,0xA4,0xBB,0xC4,0x9F,0x3D,0xE1,0x34,0xCD,0x30,0x46,0x8B,0x54,0x2B,0x95,0xA5, -0xEF,0xF7,0x3F,0x99,0x84,0xFD,0x35,0xE6,0xCF,0x31,0xC6,0xDC,0x6A,0xBF,0xA7,0xD7, -0x23,0x08,0xE1,0x98,0x5E,0xC3,0x5A,0x08,0x76,0xA9,0xA6,0xAF,0x77,0x2F,0xB7,0x60, -0xBD,0x44,0x46,0x6A,0xEF,0x97,0xFF,0x73,0x95,0xC1,0x8E,0xE8,0x93,0xFB,0xFD,0x31, -0xB7,0xEC,0x57,0x11,0x11,0x45,0x9B,0x30,0xF1,0x1A,0x88,0x39,0xC1,0x4F,0x3C,0xA7, -0x00,0xD5,0xC7,0xFC,0xAB,0x6D,0x80,0x22,0x70,0xA5,0x0C,0xE0,0x5D,0x04,0x29,0x02, -0xFB,0xCB,0xA0,0x91,0xD1,0x7C,0xD6,0xC3,0x7E,0x50,0xD5,0x9D,0x58,0xBE,0x41,0x38, -0xEB,0xB9,0x75,0x3C,0x15,0xD9,0x9B,0xC9,0x4A,0x83,0x59,0xC0,0xDA,0x53,0xFD,0x33, -0xBB,0x36,0x18,0x9B,0x85,0x0F,0x15,0xDD,0xEE,0x2D,0xAC,0x76,0x93,0xB9,0xD9,0x01, -0x8D,0x48,0x10,0xA8,0xFB,0xF5,0x38,0x86,0xF1,0xDB,0x0A,0xC6,0xBD,0x84,0xA3,0x23, -0x41,0xDE,0xD6,0x77,0x6F,0x85,0xD4,0x85,0x1C,0x50,0xE0,0xAE,0x51,0x8A,0xBA,0x8D, -0x3E,0x76,0xE2,0xB9,0xCA,0x27,0xF2,0x5F,0x9F,0xEF,0x6E,0x59,0x0D,0x06,0xD8,0x2B, -0x17,0xA4,0xD2,0x7C,0x6B,0xBB,0x5F,0x14,0x1A,0x48,0x8F,0x1A,0x4C,0xE7,0xB3,0x47, -0x1C,0x8E,0x4C,0x45,0x2B,0x20,0xEE,0x48,0xDF,0xE7,0xDD,0x09,0x8E,0x18,0xA8,0xDA, -0x40,0x8D,0x92,0x26,0x11,0x53,0x61,0x73,0x5D,0xEB,0xBD,0xE7,0xC4,0x4D,0x29,0x37, -0x61,0xEB,0xAC,0x39,0x2D,0x67,0x2E,0x16,0xD6,0xF5,0x00,0x83,0x85,0xA1,0xCC,0x7F, -0x76,0xC4,0x7D,0xE4,0xB7,0x4B,0x66,0xEF,0x03,0x45,0x60,0x69,0xB6,0x0C,0x52,0x96, -0x92,0x84,0x5E,0xA6,0xA3,0xB5,0xA4,0x3E,0x2B,0xD9,0xCC,0xD8,0x1B,0x47,0xAA,0xF2, -0x44,0xDA,0x4F,0xF9,0x03,0xE8,0xF0,0x14,0xCB,0x3F,0xF3,0x83,0xDE,0xD0,0xC1,0x54, -0xE3,0xB7,0xE8,0x0A,0x37,0x4D,0x8B,0x20,0x59,0x03,0x30,0x19,0xA1,0x2C,0xC8,0xBD, -0x11,0x1F,0xDF,0xAE,0xC9,0x4A,0xC5,0xF3,0x27,0x66,0x66,0x86,0xAC,0x68,0x91,0xFF, -0xD9,0xE6,0x53,0x1C,0x0F,0x8B,0x5C,0x69,0x65,0x0A,0x26,0xC8,0x1E,0x34,0xC3,0x5D, -0x51,0x7B,0xD7,0xA9,0x9C,0x06,0xA1,0x36,0xDD,0xD5,0x89,0x94,0xBC,0xD9,0xE4,0x2D, -0x0C,0x5E,0x09,0x6C,0x08,0x97,0x7C,0xA3,0x3D,0x7C,0x93,0xFF,0x3F,0xA1,0x14,0xA7, -0xCF,0xB5,0x5D,0xEB,0xDB,0xDB,0x1C,0xC4,0x76,0xDF,0x88,0xB9,0xBD,0x45,0x05,0x95, -0x1B,0xAE,0xFC,0x46,0x6A,0x4C,0xAF,0x48,0xE3,0xCE,0xAE,0x0F,0xD2,0x7E,0xEB,0xE6, -0x6C,0x9C,0x4F,0x81,0x6A,0x7A,0x64,0xAC,0xBB,0x3E,0xD5,0xE7,0xCB,0x76,0x2E,0xC5, -0xA7,0x48,0xC1,0x5C,0x90,0x0F,0xCB,0xC8,0x3F,0xFA,0xE6,0x32,0xE1,0x8D,0x1B,0x6F, -0xA4,0xE6,0x8E,0xD8,0xF9,0x29,0x48,0x8A,0xCE,0x73,0xFE,0x2C, -}; - - -/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */ -/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */ - - -const unsigned char COMODO_ECC_Certification_Authority_certificate[653]={ -0x30,0x82,0x02,0x89,0x30,0x82,0x02,0x0F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x1F, -0x47,0xAF,0xAA,0x62,0x00,0x70,0x50,0x54,0x4C,0x01,0x9E,0x9B,0x63,0x99,0x2A,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x85,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06, -0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61, -0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04, -0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03, -0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C, -0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13, -0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72, -0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,0x33,0x30,0x36,0x30,0x30,0x30, -0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39, -0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, -0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72, -0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72, -0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F, -0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D, -0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B, -0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20, -0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F, -0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x76,0x30,0x10,0x06, -0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03, -0x62,0x00,0x04,0x03,0x47,0x7B,0x2F,0x75,0xC9,0x82,0x15,0x85,0xFB,0x75,0xE4,0x91, -0x16,0xD4,0xAB,0x62,0x99,0xF5,0x3E,0x52,0x0B,0x06,0xCE,0x41,0x00,0x7F,0x97,0xE1, -0x0A,0x24,0x3C,0x1D,0x01,0x04,0xEE,0x3D,0xD2,0x8D,0x09,0x97,0x0C,0xE0,0x75,0xE4, -0xFA,0xFB,0x77,0x8A,0x2A,0xF5,0x03,0x60,0x4B,0x36,0x8B,0x16,0x23,0x16,0xAD,0x09, -0x71,0xF4,0x4A,0xF4,0x28,0x50,0xB4,0xFE,0x88,0x1C,0x6E,0x3F,0x6C,0x2F,0x2F,0x09, -0x59,0x5B,0xA5,0x5B,0x0B,0x33,0x99,0xE2,0xC3,0x3D,0x89,0xF9,0x6A,0x2C,0xEF,0xB2, -0xD3,0x06,0xE9,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, -0x04,0x14,0x75,0x71,0xA7,0x19,0x48,0x19,0xBC,0x9D,0x9D,0xEA,0x41,0x47,0xDF,0x94, -0xC4,0x48,0x77,0x99,0xD3,0x79,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF, -0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D, -0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00,0xEF,0x03,0x5B,0x7A,0xAC, -0xB7,0x78,0x0A,0x72,0xB7,0x88,0xDF,0xFF,0xB5,0x46,0x14,0x09,0x0A,0xFA,0xA0,0xE6, -0x7D,0x08,0xC6,0x1A,0x87,0xBD,0x18,0xA8,0x73,0xBD,0x26,0xCA,0x60,0x0C,0x9D,0xCE, -0x99,0x9F,0xCF,0x5C,0x0F,0x30,0xE1,0xBE,0x14,0x31,0xEA,0x02,0x30,0x14,0xF4,0x93, -0x3C,0x49,0xA7,0x33,0x7A,0x90,0x46,0x47,0xB3,0x63,0x7D,0x13,0x9B,0x4E,0xB7,0x6F, -0x18,0x37,0x80,0x53,0xFE,0xDD,0x20,0xE0,0x35,0x9A,0x36,0xD1,0xC7,0x01,0xB9,0xE6, -0xDC,0xDD,0xF3,0xFF,0x1D,0x2C,0x3A,0x16,0x57,0xD9,0x92,0x39,0xD6, -}; - - -/* subject:/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2009 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - G2 */ -/* issuer :/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2009 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - G2 */ - - -const unsigned char Entrust_Root_Certification_Authority___G2_certificate[1090]={ -0x30,0x82,0x04,0x3E,0x30,0x82,0x03,0x26,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x4A, -0x53,0x8C,0x28,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B, -0x05,0x00,0x30,0x81,0xBE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, -0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74, -0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03, -0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74, -0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D,0x74, -0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28, -0x63,0x29,0x20,0x32,0x30,0x30,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C, -0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,0x72,0x20,0x61,0x75,0x74,0x68, -0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31, -0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x45,0x6E,0x74,0x72,0x75,0x73, -0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, -0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D, -0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x37,0x30,0x37,0x31,0x37,0x32, -0x35,0x35,0x34,0x5A,0x17,0x0D,0x33,0x30,0x31,0x32,0x30,0x37,0x31,0x37,0x35,0x35, -0x35,0x34,0x5A,0x30,0x81,0xBE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, -0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E, -0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06, -0x03,0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,0x77,0x77,0x2E,0x65,0x6E, -0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D, -0x74,0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30, -0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74, -0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,0x72,0x20,0x61,0x75,0x74, -0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79, -0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x45,0x6E,0x74,0x72,0x75, -0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20, -0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, -0x02,0x82,0x01,0x01,0x00,0xBA,0x84,0xB6,0x72,0xDB,0x9E,0x0C,0x6B,0xE2,0x99,0xE9, -0x30,0x01,0xA7,0x76,0xEA,0x32,0xB8,0x95,0x41,0x1A,0xC9,0xDA,0x61,0x4E,0x58,0x72, -0xCF,0xFE,0xF6,0x82,0x79,0xBF,0x73,0x61,0x06,0x0A,0xA5,0x27,0xD8,0xB3,0x5F,0xD3, -0x45,0x4E,0x1C,0x72,0xD6,0x4E,0x32,0xF2,0x72,0x8A,0x0F,0xF7,0x83,0x19,0xD0,0x6A, -0x80,0x80,0x00,0x45,0x1E,0xB0,0xC7,0xE7,0x9A,0xBF,0x12,0x57,0x27,0x1C,0xA3,0x68, -0x2F,0x0A,0x87,0xBD,0x6A,0x6B,0x0E,0x5E,0x65,0xF3,0x1C,0x77,0xD5,0xD4,0x85,0x8D, -0x70,0x21,0xB4,0xB3,0x32,0xE7,0x8B,0xA2,0xD5,0x86,0x39,0x02,0xB1,0xB8,0xD2,0x47, -0xCE,0xE4,0xC9,0x49,0xC4,0x3B,0xA7,0xDE,0xFB,0x54,0x7D,0x57,0xBE,0xF0,0xE8,0x6E, -0xC2,0x79,0xB2,0x3A,0x0B,0x55,0xE2,0x50,0x98,0x16,0x32,0x13,0x5C,0x2F,0x78,0x56, -0xC1,0xC2,0x94,0xB3,0xF2,0x5A,0xE4,0x27,0x9A,0x9F,0x24,0xD7,0xC6,0xEC,0xD0,0x9B, -0x25,0x82,0xE3,0xCC,0xC2,0xC4,0x45,0xC5,0x8C,0x97,0x7A,0x06,0x6B,0x2A,0x11,0x9F, -0xA9,0x0A,0x6E,0x48,0x3B,0x6F,0xDB,0xD4,0x11,0x19,0x42,0xF7,0x8F,0x07,0xBF,0xF5, -0x53,0x5F,0x9C,0x3E,0xF4,0x17,0x2C,0xE6,0x69,0xAC,0x4E,0x32,0x4C,0x62,0x77,0xEA, -0xB7,0xE8,0xE5,0xBB,0x34,0xBC,0x19,0x8B,0xAE,0x9C,0x51,0xE7,0xB7,0x7E,0xB5,0x53, -0xB1,0x33,0x22,0xE5,0x6D,0xCF,0x70,0x3C,0x1A,0xFA,0xE2,0x9B,0x67,0xB6,0x83,0xF4, -0x8D,0xA5,0xAF,0x62,0x4C,0x4D,0xE0,0x58,0xAC,0x64,0x34,0x12,0x03,0xF8,0xB6,0x8D, -0x94,0x63,0x24,0xA4,0x71,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0E, -0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F, -0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, -0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x6A,0x72,0x26,0x7A,0xD0,0x1E, -0xEF,0x7D,0xE7,0x3B,0x69,0x51,0xD4,0x6C,0x8D,0x9F,0x90,0x12,0x66,0xAB,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01, -0x01,0x00,0x79,0x9F,0x1D,0x96,0xC6,0xB6,0x79,0x3F,0x22,0x8D,0x87,0xD3,0x87,0x03, -0x04,0x60,0x6A,0x6B,0x9A,0x2E,0x59,0x89,0x73,0x11,0xAC,0x43,0xD1,0xF5,0x13,0xFF, -0x8D,0x39,0x2B,0xC0,0xF2,0xBD,0x4F,0x70,0x8C,0xA9,0x2F,0xEA,0x17,0xC4,0x0B,0x54, -0x9E,0xD4,0x1B,0x96,0x98,0x33,0x3C,0xA8,0xAD,0x62,0xA2,0x00,0x76,0xAB,0x59,0x69, -0x6E,0x06,0x1D,0x7E,0xC4,0xB9,0x44,0x8D,0x98,0xAF,0x12,0xD4,0x61,0xDB,0x0A,0x19, -0x46,0x47,0xF3,0xEB,0xF7,0x63,0xC1,0x40,0x05,0x40,0xA5,0xD2,0xB7,0xF4,0xB5,0x9A, -0x36,0xBF,0xA9,0x88,0x76,0x88,0x04,0x55,0x04,0x2B,0x9C,0x87,0x7F,0x1A,0x37,0x3C, -0x7E,0x2D,0xA5,0x1A,0xD8,0xD4,0x89,0x5E,0xCA,0xBD,0xAC,0x3D,0x6C,0xD8,0x6D,0xAF, -0xD5,0xF3,0x76,0x0F,0xCD,0x3B,0x88,0x38,0x22,0x9D,0x6C,0x93,0x9A,0xC4,0x3D,0xBF, -0x82,0x1B,0x65,0x3F,0xA6,0x0F,0x5D,0xAA,0xFC,0xE5,0xB2,0x15,0xCA,0xB5,0xAD,0xC6, -0xBC,0x3D,0xD0,0x84,0xE8,0xEA,0x06,0x72,0xB0,0x4D,0x39,0x32,0x78,0xBF,0x3E,0x11, -0x9C,0x0B,0xA4,0x9D,0x9A,0x21,0xF3,0xF0,0x9B,0x0B,0x30,0x78,0xDB,0xC1,0xDC,0x87, -0x43,0xFE,0xBC,0x63,0x9A,0xCA,0xC5,0xC2,0x1C,0xC9,0xC7,0x8D,0xFF,0x3B,0x12,0x58, -0x08,0xE6,0xB6,0x3D,0xEC,0x7A,0x2C,0x4E,0xFB,0x83,0x96,0xCE,0x0C,0x3C,0x69,0x87, -0x54,0x73,0xA4,0x73,0xC2,0x93,0xFF,0x51,0x10,0xAC,0x15,0x54,0x01,0xD8,0xFC,0x05, -0xB1,0x89,0xA1,0x7F,0x74,0x83,0x9A,0x49,0xD7,0xDC,0x4E,0x7B,0x8A,0x48,0x6F,0x8B, -0x45,0xF6, -}; - - -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G2 */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G2 */ - - -const unsigned char DigiCert_Assured_ID_Root_G2_certificate[922]={ -0x30,0x82,0x03,0x96,0x30,0x82,0x02,0x7E,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0B, -0x93,0x1C,0x3A,0xD6,0x39,0x67,0xEA,0x67,0x23,0xBF,0xC3,0xAF,0x9A,0xF4,0x4B,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x65, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F, -0x6F,0x74,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31, -0x32,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32, -0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, -0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44, -0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06, -0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65, -0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13, -0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65, -0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x32,0x30,0x82,0x01,0x22, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03, -0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xD9,0xE7,0x28, -0x2F,0x52,0x3F,0x36,0x72,0x49,0x88,0x93,0x34,0xF3,0xF8,0x6A,0x1E,0x31,0x54,0x80, -0x9F,0xAD,0x54,0x41,0xB5,0x47,0xDF,0x96,0xA8,0xD4,0xAF,0x80,0x2D,0xB9,0x0A,0xCF, -0x75,0xFD,0x89,0xA5,0x7D,0x24,0xFA,0xE3,0x22,0x0C,0x2B,0xBC,0x95,0x17,0x0B,0x33, -0xBF,0x19,0x4D,0x41,0x06,0x90,0x00,0xBD,0x0C,0x4D,0x10,0xFE,0x07,0xB5,0xE7,0x1C, -0x6E,0x22,0x55,0x31,0x65,0x97,0xBD,0xD3,0x17,0xD2,0x1E,0x62,0xF3,0xDB,0xEA,0x6C, -0x50,0x8C,0x3F,0x84,0x0C,0x96,0xCF,0xB7,0xCB,0x03,0xE0,0xCA,0x6D,0xA1,0x14,0x4C, -0x1B,0x89,0xDD,0xED,0x00,0xB0,0x52,0x7C,0xAF,0x91,0x6C,0xB1,0x38,0x13,0xD1,0xE9, -0x12,0x08,0xC0,0x00,0xB0,0x1C,0x2B,0x11,0xDA,0x77,0x70,0x36,0x9B,0xAE,0xCE,0x79, -0x87,0xDC,0x82,0x70,0xE6,0x09,0x74,0x70,0x55,0x69,0xAF,0xA3,0x68,0x9F,0xBF,0xDD, -0xB6,0x79,0xB3,0xF2,0x9D,0x70,0x29,0x55,0xF4,0xAB,0xFF,0x95,0x61,0xF3,0xC9,0x40, -0x6F,0x1D,0xD1,0xBE,0x93,0xBB,0xD3,0x88,0x2A,0xBB,0x9D,0xBF,0x72,0x5A,0x56,0x71, -0x3B,0x3F,0xD4,0xF3,0xD1,0x0A,0xFE,0x28,0xEF,0xA3,0xEE,0xD9,0x99,0xAF,0x03,0xD3, -0x8F,0x60,0xB7,0xF2,0x92,0xA1,0xB1,0xBD,0x89,0x89,0x1F,0x30,0xCD,0xC3,0xA6,0x2E, -0x62,0x33,0xAE,0x16,0x02,0x77,0x44,0x5A,0xE7,0x81,0x0A,0x3C,0xA7,0x44,0x2E,0x79, -0xB8,0x3F,0x04,0xBC,0x5C,0xA0,0x87,0xE1,0x1B,0xAF,0x51,0x8E,0xCD,0xEC,0x2C,0xFA, -0xF8,0xFE,0x6D,0xF0,0x3A,0x7C,0xAA,0x8B,0xE4,0x67,0x95,0x31,0x8D,0x02,0x03,0x01, -0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF, -0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01, -0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, -0x04,0x14,0xCE,0xC3,0x4A,0xB9,0x99,0x55,0xF2,0xB8,0xDB,0x60,0xBF,0xA9,0x7E,0xBD, -0x56,0xB5,0x97,0x36,0xA7,0xD6,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xCA,0xA5,0x55,0x8C,0xE3,0xC8, -0x41,0x6E,0x69,0x27,0xA7,0x75,0x11,0xEF,0x3C,0x86,0x36,0x6F,0xD2,0x9D,0xC6,0x78, -0x38,0x1D,0x69,0x96,0xA2,0x92,0x69,0x2E,0x38,0x6C,0x9B,0x7D,0x04,0xD4,0x89,0xA5, -0xB1,0x31,0x37,0x8A,0xC9,0x21,0xCC,0xAB,0x6C,0xCD,0x8B,0x1C,0x9A,0xD6,0xBF,0x48, -0xD2,0x32,0x66,0xC1,0x8A,0xC0,0xF3,0x2F,0x3A,0xEF,0xC0,0xE3,0xD4,0x91,0x86,0xD1, -0x50,0xE3,0x03,0xDB,0x73,0x77,0x6F,0x4A,0x39,0x53,0xED,0xDE,0x26,0xC7,0xB5,0x7D, -0xAF,0x2B,0x42,0xD1,0x75,0x62,0xE3,0x4A,0x2B,0x02,0xC7,0x50,0x4B,0xE0,0x69,0xE2, -0x96,0x6C,0x0E,0x44,0x66,0x10,0x44,0x8F,0xAD,0x05,0xEB,0xF8,0x79,0xAC,0xA6,0x1B, -0xE8,0x37,0x34,0x9D,0x53,0xC9,0x61,0xAA,0xA2,0x52,0xAF,0x4A,0x70,0x16,0x86,0xC2, -0x3A,0xC8,0xB1,0x13,0x70,0x36,0xD8,0xCF,0xEE,0xF4,0x0A,0x34,0xD5,0x5B,0x4C,0xFD, -0x07,0x9C,0xA2,0xBA,0xD9,0x01,0x72,0x5C,0xF3,0x4D,0xC1,0xDD,0x0E,0xB1,0x1C,0x0D, -0xC4,0x63,0xBE,0xAD,0xF4,0x14,0xFB,0x89,0xEC,0xA2,0x41,0x0E,0x4C,0xCC,0xC8,0x57, -0x40,0xD0,0x6E,0x03,0xAA,0xCD,0x0C,0x8E,0x89,0x99,0x99,0x6C,0xF0,0x3C,0x30,0xAF, -0x38,0xDF,0x6F,0xBC,0xA3,0xBE,0x29,0x20,0x27,0xAB,0x74,0xFF,0x13,0x22,0x78,0xDE, -0x97,0x52,0x55,0x1E,0x83,0xB5,0x54,0x20,0x03,0xEE,0xAE,0xC0,0x4F,0x56,0xDE,0x37, -0xCC,0xC3,0x7F,0xAA,0x04,0x27,0xBB,0xD3,0x77,0xB8,0x62,0xDB,0x17,0x7C,0x9C,0x28, -0x22,0x13,0x73,0x6C,0xCF,0x26,0xF5,0x8A,0x29,0xE7, -}; - - -/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */ -/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */ - - -const unsigned char AffirmTrust_Commercial_certificate[848]={ -0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x77, -0x77,0x06,0x27,0x26,0xA9,0xB1,0x7C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B, -0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06, -0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, -0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x30,0x1E,0x17,0x0D, -0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x17,0x0D,0x33, -0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x30,0x44,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06, -0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, -0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69, -0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69, -0x61,0x6C,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82, -0x01,0x01,0x00,0xF6,0x1B,0x4F,0x67,0x07,0x2B,0xA1,0x15,0xF5,0x06,0x22,0xCB,0x1F, -0x01,0xB2,0xE3,0x73,0x45,0x06,0x44,0x49,0x2C,0xBB,0x49,0x25,0x14,0xD6,0xCE,0xC3, -0xB7,0xAB,0x2C,0x4F,0xC6,0x41,0x32,0x94,0x57,0xFA,0x12,0xA7,0x5B,0x0E,0xE2,0x8F, -0x1F,0x1E,0x86,0x19,0xA7,0xAA,0xB5,0x2D,0xB9,0x5F,0x0D,0x8A,0xC2,0xAF,0x85,0x35, -0x79,0x32,0x2D,0xBB,0x1C,0x62,0x37,0xF2,0xB1,0x5B,0x4A,0x3D,0xCA,0xCD,0x71,0x5F, -0xE9,0x42,0xBE,0x94,0xE8,0xC8,0xDE,0xF9,0x22,0x48,0x64,0xC6,0xE5,0xAB,0xC6,0x2B, -0x6D,0xAD,0x05,0xF0,0xFA,0xD5,0x0B,0xCF,0x9A,0xE5,0xF0,0x50,0xA4,0x8B,0x3B,0x47, -0xA5,0x23,0x5B,0x7A,0x7A,0xF8,0x33,0x3F,0xB8,0xEF,0x99,0x97,0xE3,0x20,0xC1,0xD6, -0x28,0x89,0xCF,0x94,0xFB,0xB9,0x45,0xED,0xE3,0x40,0x17,0x11,0xD4,0x74,0xF0,0x0B, -0x31,0xE2,0x2B,0x26,0x6A,0x9B,0x4C,0x57,0xAE,0xAC,0x20,0x3E,0xBA,0x45,0x7A,0x05, -0xF3,0xBD,0x9B,0x69,0x15,0xAE,0x7D,0x4E,0x20,0x63,0xC4,0x35,0x76,0x3A,0x07,0x02, -0xC9,0x37,0xFD,0xC7,0x47,0xEE,0xE8,0xF1,0x76,0x1D,0x73,0x15,0xF2,0x97,0xA4,0xB5, -0xC8,0x7A,0x79,0xD9,0x42,0xAA,0x2B,0x7F,0x5C,0xFE,0xCE,0x26,0x4F,0xA3,0x66,0x81, -0x35,0xAF,0x44,0xBA,0x54,0x1E,0x1C,0x30,0x32,0x65,0x9D,0xE6,0x3C,0x93,0x5E,0x50, -0x4E,0x7A,0xE3,0x3A,0xD4,0x6E,0xCC,0x1A,0xFB,0xF9,0xD2,0x37,0xAE,0x24,0x2A,0xAB, -0x57,0x03,0x22,0x28,0x0D,0x49,0x75,0x7F,0xB7,0x28,0xDA,0x75,0xBF,0x8E,0xE3,0xDC, -0x0E,0x79,0x31,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03, -0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9D,0x93,0xC6,0x53,0x8B,0x5E,0xCA,0xAF,0x3F, -0x9F,0x1E,0x0F,0xE5,0x99,0x95,0xBC,0x24,0xF6,0x94,0x8F,0x30,0x0F,0x06,0x03,0x55, -0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03, -0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09, -0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00, -0x58,0xAC,0xF4,0x04,0x0E,0xCD,0xC0,0x0D,0xFF,0x0A,0xFD,0xD4,0xBA,0x16,0x5F,0x29, -0xBD,0x7B,0x68,0x99,0x58,0x49,0xD2,0xB4,0x1D,0x37,0x4D,0x7F,0x27,0x7D,0x46,0x06, -0x5D,0x43,0xC6,0x86,0x2E,0x3E,0x73,0xB2,0x26,0x7D,0x4F,0x93,0xA9,0xB6,0xC4,0x2A, -0x9A,0xAB,0x21,0x97,0x14,0xB1,0xDE,0x8C,0xD3,0xAB,0x89,0x15,0xD8,0x6B,0x24,0xD4, -0xF1,0x16,0xAE,0xD8,0xA4,0x5C,0xD4,0x7F,0x51,0x8E,0xED,0x18,0x01,0xB1,0x93,0x63, -0xBD,0xBC,0xF8,0x61,0x80,0x9A,0x9E,0xB1,0xCE,0x42,0x70,0xE2,0xA9,0x7D,0x06,0x25, -0x7D,0x27,0xA1,0xFE,0x6F,0xEC,0xB3,0x1E,0x24,0xDA,0xE3,0x4B,0x55,0x1A,0x00,0x3B, -0x35,0xB4,0x3B,0xD9,0xD7,0x5D,0x30,0xFD,0x81,0x13,0x89,0xF2,0xC2,0x06,0x2B,0xED, -0x67,0xC4,0x8E,0xC9,0x43,0xB2,0x5C,0x6B,0x15,0x89,0x02,0xBC,0x62,0xFC,0x4E,0xF2, -0xB5,0x33,0xAA,0xB2,0x6F,0xD3,0x0A,0xA2,0x50,0xE3,0xF6,0x3B,0xE8,0x2E,0x44,0xC2, -0xDB,0x66,0x38,0xA9,0x33,0x56,0x48,0xF1,0x6D,0x1B,0x33,0x8D,0x0D,0x8C,0x3F,0x60, -0x37,0x9D,0xD3,0xCA,0x6D,0x7E,0x34,0x7E,0x0D,0x9F,0x72,0x76,0x8B,0x1B,0x9F,0x72, -0xFD,0x52,0x35,0x41,0x45,0x02,0x96,0x2F,0x1C,0xB2,0x9A,0x73,0x49,0x21,0xB1,0x49, -0x47,0x45,0x47,0xB4,0xEF,0x6A,0x34,0x11,0xC9,0x4D,0x9A,0xCC,0x59,0xB7,0xD6,0x02, -0x9E,0x5A,0x4E,0x65,0xB5,0x94,0xAE,0x1B,0xDF,0x29,0xB0,0x16,0xF1,0xBF,0x00,0x9E, -0x07,0x3A,0x17,0x64,0xB5,0x04,0xB5,0x23,0x21,0x99,0x0A,0x95,0x3B,0x97,0x7C,0xEF, -}; - - -/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium */ -/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium */ - - -const unsigned char AffirmTrust_Premium_certificate[1354]={ -0x30,0x82,0x05,0x46,0x30,0x82,0x03,0x2E,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x6D, -0x8C,0x14,0x46,0xB1,0xA6,0x0A,0xEE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x41,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B, -0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C,0x30,0x1A,0x06, -0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, -0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30, -0x31,0x32,0x39,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x17,0x0D,0x34,0x30,0x31,0x32, -0x33,0x31,0x31,0x34,0x31,0x30,0x33,0x36,0x5A,0x30,0x41,0x31,0x0B,0x30,0x09,0x06, -0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04, -0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1C, -0x30,0x1A,0x06,0x03,0x55,0x04,0x03,0x0C,0x13,0x41,0x66,0x66,0x69,0x72,0x6D,0x54, -0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x30,0x82,0x02,0x22, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03, -0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xC4,0x12,0xDF, -0xA9,0x5F,0xFE,0x41,0xDD,0xDD,0xF5,0x9F,0x8A,0xE3,0xF6,0xAC,0xE1,0x3C,0x78,0x9A, -0xBC,0xD8,0xF0,0x7F,0x7A,0xA0,0x33,0x2A,0xDC,0x8D,0x20,0x5B,0xAE,0x2D,0x6F,0xE7, -0x93,0xD9,0x36,0x70,0x6A,0x68,0xCF,0x8E,0x51,0xA3,0x85,0x5B,0x67,0x04,0xA0,0x10, -0x24,0x6F,0x5D,0x28,0x82,0xC1,0x97,0x57,0xD8,0x48,0x29,0x13,0xB6,0xE1,0xBE,0x91, -0x4D,0xDF,0x85,0x0C,0x53,0x18,0x9A,0x1E,0x24,0xA2,0x4F,0x8F,0xF0,0xA2,0x85,0x0B, -0xCB,0xF4,0x29,0x7F,0xD2,0xA4,0x58,0xEE,0x26,0x4D,0xC9,0xAA,0xA8,0x7B,0x9A,0xD9, -0xFA,0x38,0xDE,0x44,0x57,0x15,0xE5,0xF8,0x8C,0xC8,0xD9,0x48,0xE2,0x0D,0x16,0x27, -0x1D,0x1E,0xC8,0x83,0x85,0x25,0xB7,0xBA,0xAA,0x55,0x41,0xCC,0x03,0x22,0x4B,0x2D, -0x91,0x8D,0x8B,0xE6,0x89,0xAF,0x66,0xC7,0xE9,0xFF,0x2B,0xE9,0x3C,0xAC,0xDA,0xD2, -0xB3,0xC3,0xE1,0x68,0x9C,0x89,0xF8,0x7A,0x00,0x56,0xDE,0xF4,0x55,0x95,0x6C,0xFB, -0xBA,0x64,0xDD,0x62,0x8B,0xDF,0x0B,0x77,0x32,0xEB,0x62,0xCC,0x26,0x9A,0x9B,0xBB, -0xAA,0x62,0x83,0x4C,0xB4,0x06,0x7A,0x30,0xC8,0x29,0xBF,0xED,0x06,0x4D,0x97,0xB9, -0x1C,0xC4,0x31,0x2B,0xD5,0x5F,0xBC,0x53,0x12,0x17,0x9C,0x99,0x57,0x29,0x66,0x77, -0x61,0x21,0x31,0x07,0x2E,0x25,0x49,0x9D,0x18,0xF2,0xEE,0xF3,0x2B,0x71,0x8C,0xB5, -0xBA,0x39,0x07,0x49,0x77,0xFC,0xEF,0x2E,0x92,0x90,0x05,0x8D,0x2D,0x2F,0x77,0x7B, -0xEF,0x43,0xBF,0x35,0xBB,0x9A,0xD8,0xF9,0x73,0xA7,0x2C,0xF2,0xD0,0x57,0xEE,0x28, -0x4E,0x26,0x5F,0x8F,0x90,0x68,0x09,0x2F,0xB8,0xF8,0xDC,0x06,0xE9,0x2E,0x9A,0x3E, -0x51,0xA7,0xD1,0x22,0xC4,0x0A,0xA7,0x38,0x48,0x6C,0xB3,0xF9,0xFF,0x7D,0xAB,0x86, -0x57,0xE3,0xBA,0xD6,0x85,0x78,0x77,0xBA,0x43,0xEA,0x48,0x7F,0xF6,0xD8,0xBE,0x23, -0x6D,0x1E,0xBF,0xD1,0x36,0x6C,0x58,0x5C,0xF1,0xEE,0xA4,0x19,0x54,0x1A,0xF5,0x03, -0xD2,0x76,0xE6,0xE1,0x8C,0xBD,0x3C,0xB3,0xD3,0x48,0x4B,0xE2,0xC8,0xF8,0x7F,0x92, -0xA8,0x76,0x46,0x9C,0x42,0x65,0x3E,0xA4,0x1E,0xC1,0x07,0x03,0x5A,0x46,0x2D,0xB8, -0x97,0xF3,0xB7,0xD5,0xB2,0x55,0x21,0xEF,0xBA,0xDC,0x4C,0x00,0x97,0xFB,0x14,0x95, -0x27,0x33,0xBF,0xE8,0x43,0x47,0x46,0xD2,0x08,0x99,0x16,0x60,0x3B,0x9A,0x7E,0xD2, -0xE6,0xED,0x38,0xEA,0xEC,0x01,0x1E,0x3C,0x48,0x56,0x49,0x09,0xC7,0x4C,0x37,0x00, -0x9E,0x88,0x0E,0xC0,0x73,0xE1,0x6F,0x66,0xE9,0x72,0x47,0x30,0x3E,0x10,0xE5,0x0B, -0x03,0xC9,0x9A,0x42,0x00,0x6C,0xC5,0x94,0x7E,0x61,0xC4,0x8A,0xDF,0x7F,0x82,0x1A, -0x0B,0x59,0xC4,0x59,0x32,0x77,0xB3,0xBC,0x60,0x69,0x56,0x39,0xFD,0xB4,0x06,0x7B, -0x2C,0xD6,0x64,0x36,0xD9,0xBD,0x48,0xED,0x84,0x1F,0x7E,0xA5,0x22,0x8F,0x2A,0xB8, -0x42,0xF4,0x82,0xB7,0xD4,0x53,0x90,0x78,0x4E,0x2D,0x1A,0xFD,0x81,0x6F,0x44,0xD7, -0x3B,0x01,0x74,0x96,0x42,0xE0,0x00,0xE2,0x2E,0x6B,0xEA,0xC5,0xEE,0x72,0xAC,0xBB, -0xBF,0xFE,0xEA,0xAA,0xA8,0xF8,0xDC,0xF6,0xB2,0x79,0x8A,0xB6,0x67,0x02,0x03,0x01, -0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, -0x14,0x9D,0xC0,0x67,0xA6,0x0C,0x22,0xD9,0x26,0xF5,0x45,0xAB,0xA6,0x65,0x52,0x11, -0x27,0xD8,0x45,0xAC,0x63,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0xB3,0x57,0x4D,0x10,0x62,0x4E, -0x3A,0xE4,0xAC,0xEA,0xB8,0x1C,0xAF,0x32,0x23,0xC8,0xB3,0x49,0x5A,0x51,0x9C,0x76, -0x28,0x8D,0x79,0xAA,0x57,0x46,0x17,0xD5,0xF5,0x52,0xF6,0xB7,0x44,0xE8,0x08,0x44, -0xBF,0x18,0x84,0xD2,0x0B,0x80,0xCD,0xC5,0x12,0xFD,0x00,0x55,0x05,0x61,0x87,0x41, -0xDC,0xB5,0x24,0x9E,0x3C,0xC4,0xD8,0xC8,0xFB,0x70,0x9E,0x2F,0x78,0x96,0x83,0x20, -0x36,0xDE,0x7C,0x0F,0x69,0x13,0x88,0xA5,0x75,0x36,0x98,0x08,0xA6,0xC6,0xDF,0xAC, -0xCE,0xE3,0x58,0xD6,0xB7,0x3E,0xDE,0xBA,0xF3,0xEB,0x34,0x40,0xD8,0xA2,0x81,0xF5, -0x78,0x3F,0x2F,0xD5,0xA5,0xFC,0xD9,0xA2,0xD4,0x5E,0x04,0x0E,0x17,0xAD,0xFE,0x41, -0xF0,0xE5,0xB2,0x72,0xFA,0x44,0x82,0x33,0x42,0xE8,0x2D,0x58,0xF7,0x56,0x8C,0x62, -0x3F,0xBA,0x42,0xB0,0x9C,0x0C,0x5C,0x7E,0x2E,0x65,0x26,0x5C,0x53,0x4F,0x00,0xB2, -0x78,0x7E,0xA1,0x0D,0x99,0x2D,0x8D,0xB8,0x1D,0x8E,0xA2,0xC4,0xB0,0xFD,0x60,0xD0, -0x30,0xA4,0x8E,0xC8,0x04,0x62,0xA9,0xC4,0xED,0x35,0xDE,0x7A,0x97,0xED,0x0E,0x38, -0x5E,0x92,0x2F,0x93,0x70,0xA5,0xA9,0x9C,0x6F,0xA7,0x7D,0x13,0x1D,0x7E,0xC6,0x08, -0x48,0xB1,0x5E,0x67,0xEB,0x51,0x08,0x25,0xE9,0xE6,0x25,0x6B,0x52,0x29,0x91,0x9C, -0xD2,0x39,0x73,0x08,0x57,0xDE,0x99,0x06,0xB4,0x5B,0x9D,0x10,0x06,0xE1,0xC2,0x00, -0xA8,0xB8,0x1C,0x4A,0x02,0x0A,0x14,0xD0,0xC1,0x41,0xCA,0xFB,0x8C,0x35,0x21,0x7D, -0x82,0x38,0xF2,0xA9,0x54,0x91,0x19,0x35,0x93,0x94,0x6D,0x6A,0x3A,0xC5,0xB2,0xD0, -0xBB,0x89,0x86,0x93,0xE8,0x9B,0xC9,0x0F,0x3A,0xA7,0x7A,0xB8,0xA1,0xF0,0x78,0x46, -0xFA,0xFC,0x37,0x2F,0xE5,0x8A,0x84,0xF3,0xDF,0xFE,0x04,0xD9,0xA1,0x68,0xA0,0x2F, -0x24,0xE2,0x09,0x95,0x06,0xD5,0x95,0xCA,0xE1,0x24,0x96,0xEB,0x7C,0xF6,0x93,0x05, -0xBB,0xED,0x73,0xE9,0x2D,0xD1,0x75,0x39,0xD7,0xE7,0x24,0xDB,0xD8,0x4E,0x5F,0x43, -0x8F,0x9E,0xD0,0x14,0x39,0xBF,0x55,0x70,0x48,0x99,0x57,0x31,0xB4,0x9C,0xEE,0x4A, -0x98,0x03,0x96,0x30,0x1F,0x60,0x06,0xEE,0x1B,0x23,0xFE,0x81,0x60,0x23,0x1A,0x47, -0x62,0x85,0xA5,0xCC,0x19,0x34,0x80,0x6F,0xB3,0xAC,0x1A,0xE3,0x9F,0xF0,0x7B,0x48, -0xAD,0xD5,0x01,0xD9,0x67,0xB6,0xA9,0x72,0x93,0xEA,0x2D,0x66,0xB5,0xB2,0xB8,0xE4, -0x3D,0x3C,0xB2,0xEF,0x4C,0x8C,0xEA,0xEB,0x07,0xBF,0xAB,0x35,0x9A,0x55,0x86,0xBC, -0x18,0xA6,0xB5,0xA8,0x5E,0xB4,0x83,0x6C,0x6B,0x69,0x40,0xD3,0x9F,0xDC,0xF1,0xC3, -0x69,0x6B,0xB9,0xE1,0x6D,0x09,0xF4,0xF1,0xAA,0x50,0x76,0x0A,0x7A,0x7D,0x7A,0x17, -0xA1,0x55,0x96,0x42,0x99,0x31,0x09,0xDD,0x60,0x11,0x8D,0x05,0x30,0x7E,0xE6,0x8E, -0x46,0xD1,0x9D,0x14,0xDA,0xC7,0x17,0xE4,0x05,0x96,0x8C,0xC4,0x24,0xB5,0x1B,0xCF, -0x14,0x07,0xB2,0x40,0xF8,0xA3,0x9E,0x41,0x86,0xBC,0x04,0xD0,0x6B,0x96,0xC8,0x2A, -0x80,0x34,0xFD,0xBF,0xEF,0x06,0xA3,0xDD,0x58,0xC5,0x85,0x3D,0x3E,0x8F,0xFE,0x9E, -0x29,0xE0,0xB6,0xB8,0x09,0x68,0x19,0x1C,0x18,0x43, -}; - - -/* subject:/C=US/ST=Arizona/L=Scottsdale/O=GoDaddy.com, Inc./CN=Go Daddy Root Certificate Authority - G2 */ -/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=GoDaddy.com, Inc./CN=Go Daddy Root Certificate Authority - G2 */ - - -const unsigned char Go_Daddy_Root_Certificate_Authority___G2_certificate[969]={ -0x30,0x82,0x03,0xC5,0x30,0x82,0x02,0xAD,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30, -0x81,0x83,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31, -0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E, -0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74, -0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13, -0x11,0x47,0x6F,0x44,0x61,0x64,0x64,0x79,0x2E,0x63,0x6F,0x6D,0x2C,0x20,0x49,0x6E, -0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x6F,0x20, -0x44,0x61,0x64,0x64,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79, -0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x39,0x30,0x31,0x30, -0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,0x33,0x31,0x32,0x33, -0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x83,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x06,0x13,0x02,0x55,0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07, -0x41,0x72,0x69,0x7A,0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07, -0x13,0x0A,0x53,0x63,0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x1A,0x30,0x18, -0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x47,0x6F,0x44,0x61,0x64,0x64,0x79,0x2E,0x63, -0x6F,0x6D,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04, -0x03,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x52,0x6F,0x6F,0x74, -0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74, -0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82, -0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBF,0x71,0x62,0x08, -0xF1,0xFA,0x59,0x34,0xF7,0x1B,0xC9,0x18,0xA3,0xF7,0x80,0x49,0x58,0xE9,0x22,0x83, -0x13,0xA6,0xC5,0x20,0x43,0x01,0x3B,0x84,0xF1,0xE6,0x85,0x49,0x9F,0x27,0xEA,0xF6, -0x84,0x1B,0x4E,0xA0,0xB4,0xDB,0x70,0x98,0xC7,0x32,0x01,0xB1,0x05,0x3E,0x07,0x4E, -0xEE,0xF4,0xFA,0x4F,0x2F,0x59,0x30,0x22,0xE7,0xAB,0x19,0x56,0x6B,0xE2,0x80,0x07, -0xFC,0xF3,0x16,0x75,0x80,0x39,0x51,0x7B,0xE5,0xF9,0x35,0xB6,0x74,0x4E,0xA9,0x8D, -0x82,0x13,0xE4,0xB6,0x3F,0xA9,0x03,0x83,0xFA,0xA2,0xBE,0x8A,0x15,0x6A,0x7F,0xDE, -0x0B,0xC3,0xB6,0x19,0x14,0x05,0xCA,0xEA,0xC3,0xA8,0x04,0x94,0x3B,0x46,0x7C,0x32, -0x0D,0xF3,0x00,0x66,0x22,0xC8,0x8D,0x69,0x6D,0x36,0x8C,0x11,0x18,0xB7,0xD3,0xB2, -0x1C,0x60,0xB4,0x38,0xFA,0x02,0x8C,0xCE,0xD3,0xDD,0x46,0x07,0xDE,0x0A,0x3E,0xEB, -0x5D,0x7C,0xC8,0x7C,0xFB,0xB0,0x2B,0x53,0xA4,0x92,0x62,0x69,0x51,0x25,0x05,0x61, -0x1A,0x44,0x81,0x8C,0x2C,0xA9,0x43,0x96,0x23,0xDF,0xAC,0x3A,0x81,0x9A,0x0E,0x29, -0xC5,0x1C,0xA9,0xE9,0x5D,0x1E,0xB6,0x9E,0x9E,0x30,0x0A,0x39,0xCE,0xF1,0x88,0x80, -0xFB,0x4B,0x5D,0xCC,0x32,0xEC,0x85,0x62,0x43,0x25,0x34,0x02,0x56,0x27,0x01,0x91, -0xB4,0x3B,0x70,0x2A,0x3F,0x6E,0xB1,0xE8,0x9C,0x88,0x01,0x7D,0x9F,0xD4,0xF9,0xDB, -0x53,0x6D,0x60,0x9D,0xBF,0x2C,0xE7,0x58,0xAB,0xB8,0x5F,0x46,0xFC,0xCE,0xC4,0x1B, -0x03,0x3C,0x09,0xEB,0x49,0x31,0x5C,0x69,0x46,0xB3,0xE0,0x47,0x02,0x03,0x01,0x00, -0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, -0x14,0x3A,0x9A,0x85,0x07,0x10,0x67,0x28,0xB6,0xEF,0xF6,0xBD,0x05,0x41,0x6E,0x20, -0xC1,0x94,0xDA,0x0F,0xDE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0xDB,0x5D,0x79,0xD5,0xF9,0x97, -0x59,0x67,0x03,0x61,0xF1,0x7E,0x3B,0x06,0x31,0x75,0x2D,0xA1,0x20,0x8E,0x4F,0x65, -0x87,0xB4,0xF7,0xA6,0x9C,0xBC,0xD8,0xE9,0x2F,0xD0,0xDB,0x5A,0xEE,0xCF,0x74,0x8C, -0x73,0xB4,0x38,0x42,0xDA,0x05,0x7B,0xF8,0x02,0x75,0xB8,0xFD,0xA5,0xB1,0xD7,0xAE, -0xF6,0xD7,0xDE,0x13,0xCB,0x53,0x10,0x7E,0x8A,0x46,0xD1,0x97,0xFA,0xB7,0x2E,0x2B, -0x11,0xAB,0x90,0xB0,0x27,0x80,0xF9,0xE8,0x9F,0x5A,0xE9,0x37,0x9F,0xAB,0xE4,0xDF, -0x6C,0xB3,0x85,0x17,0x9D,0x3D,0xD9,0x24,0x4F,0x79,0x91,0x35,0xD6,0x5F,0x04,0xEB, -0x80,0x83,0xAB,0x9A,0x02,0x2D,0xB5,0x10,0xF4,0xD8,0x90,0xC7,0x04,0x73,0x40,0xED, -0x72,0x25,0xA0,0xA9,0x9F,0xEC,0x9E,0xAB,0x68,0x12,0x99,0x57,0xC6,0x8F,0x12,0x3A, -0x09,0xA4,0xBD,0x44,0xFD,0x06,0x15,0x37,0xC1,0x9B,0xE4,0x32,0xA3,0xED,0x38,0xE8, -0xD8,0x64,0xF3,0x2C,0x7E,0x14,0xFC,0x02,0xEA,0x9F,0xCD,0xFF,0x07,0x68,0x17,0xDB, -0x22,0x90,0x38,0x2D,0x7A,0x8D,0xD1,0x54,0xF1,0x69,0xE3,0x5F,0x33,0xCA,0x7A,0x3D, -0x7B,0x0A,0xE3,0xCA,0x7F,0x5F,0x39,0xE5,0xE2,0x75,0xBA,0xC5,0x76,0x18,0x33,0xCE, -0x2C,0xF0,0x2F,0x4C,0xAD,0xF7,0xB1,0xE7,0xCE,0x4F,0xA8,0xC4,0x9B,0x4A,0x54,0x06, -0xC5,0x7F,0x7D,0xD5,0x08,0x0F,0xE2,0x1C,0xFE,0x7E,0x17,0xB8,0xAC,0x5E,0xF6,0xD4, -0x16,0xB2,0x43,0x09,0x0C,0x4D,0xF6,0xA7,0x6B,0xB4,0x99,0x84,0x65,0xCA,0x7A,0x88, -0xE2,0xE2,0x44,0xBE,0x5C,0xF7,0xEA,0x1C,0xF5, + + +/* subject:/OU=GlobalSign ECC Root CA - R5/O=GlobalSign/CN=GlobalSign */ +/* issuer :/OU=GlobalSign ECC Root CA - R5/O=GlobalSign/CN=GlobalSign */ + + +const unsigned char GlobalSign_ECC_Root_CA___R5_certificate[546]={ +0x30,0x82,0x02,0x1E,0x30,0x82,0x01,0xA4,0xA0,0x03,0x02,0x01,0x02,0x02,0x11,0x60, +0x59,0x49,0xE0,0x26,0x2E,0xBB,0x55,0xF9,0x0A,0x77,0x8A,0x71,0xF9,0x4A,0xD8,0x6C, +0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x50,0x31,0x24, +0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, +0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20, +0x2D,0x20,0x52,0x35,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47, +0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55, +0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E, +0x17,0x0D,0x31,0x32,0x31,0x31,0x31,0x33,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17, +0x0D,0x33,0x38,0x30,0x31,0x31,0x39,0x30,0x33,0x31,0x34,0x30,0x37,0x5A,0x30,0x50, +0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61, +0x6C,0x53,0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43, +0x41,0x20,0x2D,0x20,0x52,0x35,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13, +0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06, +0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, +0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B, +0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x47,0x45,0x0E,0x96,0xFB,0x7D,0x5D,0xBF, +0xE9,0x39,0xD1,0x21,0xF8,0x9F,0x0B,0xB6,0xD5,0x7B,0x1E,0x92,0x3A,0x48,0x59,0x1C, +0xF0,0x62,0x31,0x2D,0xC0,0x7A,0x28,0xFE,0x1A,0xA7,0x5C,0xB3,0xB6,0xCC,0x97,0xE7, +0x45,0xD4,0x58,0xFA,0xD1,0x77,0x6D,0x43,0xA2,0xC0,0x87,0x65,0x34,0x0A,0x1F,0x7A, +0xDD,0xEB,0x3C,0x33,0xA1,0xC5,0x9D,0x4D,0xA4,0x6F,0x41,0x95,0x38,0x7F,0xC9,0x1E, +0x84,0xEB,0xD1,0x9E,0x49,0x92,0x87,0x94,0x87,0x0C,0x3A,0x85,0x4A,0x66,0x9F,0x9D, +0x59,0x93,0x4D,0x97,0x61,0x06,0x86,0x4A,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03, +0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03, +0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06, +0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x3D,0xE6,0x29,0x48,0x9B,0xEA,0x07,0xCA, +0x21,0x44,0x4A,0x26,0xDE,0x6E,0xDE,0xD2,0x83,0xD0,0x9F,0x59,0x30,0x0A,0x06,0x08, +0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00, +0xE5,0x69,0x12,0xC9,0x6E,0xDB,0xC6,0x31,0xBA,0x09,0x41,0xE1,0x97,0xF8,0xFB,0xFD, +0x9A,0xE2,0x7D,0x12,0xC9,0xED,0x7C,0x64,0xD3,0xCB,0x05,0x25,0x8B,0x56,0xD9,0xA0, +0xE7,0x5E,0x5D,0x4E,0x0B,0x83,0x9C,0x5B,0x76,0x29,0xA0,0x09,0x26,0x21,0x6A,0x62, +0x02,0x30,0x71,0xD2,0xB5,0x8F,0x5C,0xEA,0x3B,0xE1,0x78,0x09,0x85,0xA8,0x75,0x92, +0x3B,0xC8,0x5C,0xFD,0x48,0xEF,0x0D,0x74,0x22,0xA8,0x08,0xE2,0x6E,0xC5,0x49,0xCE, +0xC7,0x0C,0xBC,0xA7,0x61,0x69,0xF1,0xF7,0x3B,0xE1,0x2A,0xCB,0xF9,0x2B,0xF3,0x66, +0x90,0x37, }; -/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */ -/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Secure Certificate Services */ +/* subject:/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */ +/* issuer :/C=US/ST=New Jersey/L=Jersey City/O=The USERTRUST Network/CN=USERTrust ECC Certification Authority */ -const unsigned char Comodo_Secure_Services_root_certificate[1091]={ -0x30,0x82,0x04,0x3F,0x30,0x82,0x03,0x27,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, -0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, -0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, -0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43, -0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55, -0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30, -0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x5A, -0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A,0x30, -0x7E,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, -0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, -0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, -0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43, -0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x24,0x30,0x22,0x06,0x03,0x55, -0x04,0x03,0x0C,0x1B,0x53,0x65,0x63,0x75,0x72,0x65,0x20,0x43,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x30, -0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01, -0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00, -0xC0,0x71,0x33,0x82,0x8A,0xD0,0x70,0xEB,0x73,0x87,0x82,0x40,0xD5,0x1D,0xE4,0xCB, -0xC9,0x0E,0x42,0x90,0xF9,0xDE,0x34,0xB9,0xA1,0xBA,0x11,0xF4,0x25,0x85,0xF3,0xCC, -0x72,0x6D,0xF2,0x7B,0x97,0x6B,0xB3,0x07,0xF1,0x77,0x24,0x91,0x5F,0x25,0x8F,0xF6, -0x74,0x3D,0xE4,0x80,0xC2,0xF8,0x3C,0x0D,0xF3,0xBF,0x40,0xEA,0xF7,0xC8,0x52,0xD1, -0x72,0x6F,0xEF,0xC8,0xAB,0x41,0xB8,0x6E,0x2E,0x17,0x2A,0x95,0x69,0x0C,0xCD,0xD2, -0x1E,0x94,0x7B,0x2D,0x94,0x1D,0xAA,0x75,0xD7,0xB3,0x98,0xCB,0xAC,0xBC,0x64,0x53, -0x40,0xBC,0x8F,0xAC,0xAC,0x36,0xCB,0x5C,0xAD,0xBB,0xDD,0xE0,0x94,0x17,0xEC,0xD1, -0x5C,0xD0,0xBF,0xEF,0xA5,0x95,0xC9,0x90,0xC5,0xB0,0xAC,0xFB,0x1B,0x43,0xDF,0x7A, -0x08,0x5D,0xB7,0xB8,0xF2,0x40,0x1B,0x2B,0x27,0x9E,0x50,0xCE,0x5E,0x65,0x82,0x88, -0x8C,0x5E,0xD3,0x4E,0x0C,0x7A,0xEA,0x08,0x91,0xB6,0x36,0xAA,0x2B,0x42,0xFB,0xEA, -0xC2,0xA3,0x39,0xE5,0xDB,0x26,0x38,0xAD,0x8B,0x0A,0xEE,0x19,0x63,0xC7,0x1C,0x24, -0xDF,0x03,0x78,0xDA,0xE6,0xEA,0xC1,0x47,0x1A,0x0B,0x0B,0x46,0x09,0xDD,0x02,0xFC, -0xDE,0xCB,0x87,0x5F,0xD7,0x30,0x63,0x68,0xA1,0xAE,0xDC,0x32,0xA1,0xBA,0xBE,0xFE, -0x44,0xAB,0x68,0xB6,0xA5,0x17,0x15,0xFD,0xBD,0xD5,0xA7,0xA7,0x9A,0xE4,0x44,0x33, -0xE9,0x88,0x8E,0xFC,0xED,0x51,0xEB,0x93,0x71,0x4E,0xAD,0x01,0xE7,0x44,0x8E,0xAB, -0x2D,0xCB,0xA8,0xFE,0x01,0x49,0x48,0xF0,0xC0,0xDD,0xC7,0x68,0xD8,0x92,0xFE,0x3D, -0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC7,0x30,0x81,0xC4,0x30,0x1D,0x06,0x03,0x55, -0x1D,0x0E,0x04,0x16,0x04,0x14,0x3C,0xD8,0x93,0x88,0xC2,0xC0,0x82,0x09,0xCC,0x01, -0x99,0x06,0x93,0x20,0xE9,0x9E,0x70,0x09,0x63,0x4F,0x30,0x0E,0x06,0x03,0x55,0x1D, +const unsigned char USERTrust_ECC_Certification_Authority_certificate[659]={ +0x30,0x82,0x02,0x8F,0x30,0x82,0x02,0x15,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x5C, +0x8B,0x99,0xC5,0x5A,0x94,0xC5,0xD2,0x71,0x56,0xDE,0xCD,0x89,0x80,0xCC,0x26,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x88,0x31,0x0B, +0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06, +0x03,0x55,0x04,0x08,0x13,0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79, +0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65, +0x79,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13, +0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E, +0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13, +0x25,0x55,0x53,0x45,0x52,0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x43,0x43,0x20,0x43, +0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, +0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x32,0x30,0x31, +0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32, +0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x88,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, +0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x08,0x13, +0x0A,0x4E,0x65,0x77,0x20,0x4A,0x65,0x72,0x73,0x65,0x79,0x31,0x14,0x30,0x12,0x06, +0x03,0x55,0x04,0x07,0x13,0x0B,0x4A,0x65,0x72,0x73,0x65,0x79,0x20,0x43,0x69,0x74, +0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20, +0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72, +0x6B,0x31,0x2E,0x30,0x2C,0x06,0x03,0x55,0x04,0x03,0x13,0x25,0x55,0x53,0x45,0x52, +0x54,0x72,0x75,0x73,0x74,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66, +0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74, +0x79,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, +0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x1A,0xAC,0x54,0x5A,0xA9,0xF9,0x68, +0x23,0xE7,0x7A,0xD5,0x24,0x6F,0x53,0xC6,0x5A,0xD8,0x4B,0xAB,0xC6,0xD5,0xB6,0xD1, +0xE6,0x73,0x71,0xAE,0xDD,0x9C,0xD6,0x0C,0x61,0xFD,0xDB,0xA0,0x89,0x03,0xB8,0x05, +0x14,0xEC,0x57,0xCE,0xEE,0x5D,0x3F,0xE2,0x21,0xB3,0xCE,0xF7,0xD4,0x8A,0x79,0xE0, +0xA3,0x83,0x7E,0x2D,0x97,0xD0,0x61,0xC4,0xF1,0x99,0xDC,0x25,0x91,0x63,0xAB,0x7F, +0x30,0xA3,0xB4,0x70,0xE2,0xC7,0xA1,0x33,0x9C,0xF3,0xBF,0x2E,0x5C,0x53,0xB1,0x5F, +0xB3,0x7D,0x32,0x7F,0x8A,0x34,0xE3,0x79,0x79,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06, +0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x3A,0xE1,0x09,0x86,0xD4,0xCF,0x19,0xC2, +0x96,0x76,0x74,0x49,0x76,0xDC,0xE0,0x35,0xC6,0x63,0x63,0x9A,0x30,0x0E,0x06,0x03, +0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03, +0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06, +0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x30, +0x36,0x67,0xA1,0x16,0x08,0xDC,0xE4,0x97,0x00,0x41,0x1D,0x4E,0xBE,0xE1,0x63,0x01, +0xCF,0x3B,0xAA,0x42,0x11,0x64,0xA0,0x9D,0x94,0x39,0x02,0x11,0x79,0x5C,0x7B,0x1D, +0xFA,0x64,0xB9,0xEE,0x16,0x42,0xB3,0xBF,0x8A,0xC2,0x09,0xC4,0xEC,0xE4,0xB1,0x4D, +0x02,0x31,0x00,0xE9,0x2A,0x61,0x47,0x8C,0x52,0x4A,0x4B,0x4E,0x18,0x70,0xF6,0xD6, +0x44,0xD6,0x6E,0xF5,0x83,0xBA,0x6D,0x58,0xBD,0x24,0xD9,0x56,0x48,0xEA,0xEF,0xC4, +0xA2,0x46,0x81,0x88,0x6A,0x3A,0x46,0xD1,0xA9,0x9B,0x4D,0xC9,0x61,0xDA,0xD1,0x5D, +0x57,0x6A,0x18, +}; + + +/* subject:/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */ +/* issuer :/O=Entrust.net/OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/OU=(c) 1999 Entrust.net Limited/CN=Entrust.net Certification Authority (2048) */ + + +const unsigned char Entrust_net_Premium_2048_Secure_Server_CA_certificate[1070]={ +0x30,0x82,0x04,0x2A,0x30,0x82,0x03,0x12,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x38, +0x63,0xDE,0xF8,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, +0x05,0x00,0x30,0x81,0xB4,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x13,0x0B, +0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x31,0x40,0x30,0x3E,0x06, +0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74,0x72,0x75,0x73, +0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53,0x5F,0x32,0x30,0x34,0x38,0x20,0x69, +0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79,0x20,0x72,0x65,0x66,0x2E,0x20,0x28, +0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69,0x61,0x62,0x2E,0x29,0x31,0x25,0x30, +0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28,0x63,0x29,0x20,0x31,0x39,0x39,0x39, +0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D, +0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06,0x03,0x55,0x04,0x03,0x13,0x2A,0x45, +0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x20,0x43,0x65,0x72,0x74,0x69, +0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69, +0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29,0x30,0x1E,0x17,0x0D,0x39,0x39,0x31, +0x32,0x32,0x34,0x31,0x37,0x35,0x30,0x35,0x31,0x5A,0x17,0x0D,0x32,0x39,0x30,0x37, +0x32,0x34,0x31,0x34,0x31,0x35,0x31,0x32,0x5A,0x30,0x81,0xB4,0x31,0x14,0x30,0x12, +0x06,0x03,0x55,0x04,0x0A,0x13,0x0B,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E, +0x65,0x74,0x31,0x40,0x30,0x3E,0x06,0x03,0x55,0x04,0x0B,0x14,0x37,0x77,0x77,0x77, +0x2E,0x65,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x43,0x50,0x53, +0x5F,0x32,0x30,0x34,0x38,0x20,0x69,0x6E,0x63,0x6F,0x72,0x70,0x2E,0x20,0x62,0x79, +0x20,0x72,0x65,0x66,0x2E,0x20,0x28,0x6C,0x69,0x6D,0x69,0x74,0x73,0x20,0x6C,0x69, +0x61,0x62,0x2E,0x29,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x0B,0x13,0x1C,0x28, +0x63,0x29,0x20,0x31,0x39,0x39,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E, +0x6E,0x65,0x74,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x33,0x30,0x31,0x06, +0x03,0x55,0x04,0x03,0x13,0x2A,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65, +0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20, +0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x28,0x32,0x30,0x34,0x38,0x29, +0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, +0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01, +0x00,0xAD,0x4D,0x4B,0xA9,0x12,0x86,0xB2,0xEA,0xA3,0x20,0x07,0x15,0x16,0x64,0x2A, +0x2B,0x4B,0xD1,0xBF,0x0B,0x4A,0x4D,0x8E,0xED,0x80,0x76,0xA5,0x67,0xB7,0x78,0x40, +0xC0,0x73,0x42,0xC8,0x68,0xC0,0xDB,0x53,0x2B,0xDD,0x5E,0xB8,0x76,0x98,0x35,0x93, +0x8B,0x1A,0x9D,0x7C,0x13,0x3A,0x0E,0x1F,0x5B,0xB7,0x1E,0xCF,0xE5,0x24,0x14,0x1E, +0xB1,0x81,0xA9,0x8D,0x7D,0xB8,0xCC,0x6B,0x4B,0x03,0xF1,0x02,0x0C,0xDC,0xAB,0xA5, +0x40,0x24,0x00,0x7F,0x74,0x94,0xA1,0x9D,0x08,0x29,0xB3,0x88,0x0B,0xF5,0x87,0x77, +0x9D,0x55,0xCD,0xE4,0xC3,0x7E,0xD7,0x6A,0x64,0xAB,0x85,0x14,0x86,0x95,0x5B,0x97, +0x32,0x50,0x6F,0x3D,0xC8,0xBA,0x66,0x0C,0xE3,0xFC,0xBD,0xB8,0x49,0xC1,0x76,0x89, +0x49,0x19,0xFD,0xC0,0xA8,0xBD,0x89,0xA3,0x67,0x2F,0xC6,0x9F,0xBC,0x71,0x19,0x60, +0xB8,0x2D,0xE9,0x2C,0xC9,0x90,0x76,0x66,0x7B,0x94,0xE2,0xAF,0x78,0xD6,0x65,0x53, +0x5D,0x3C,0xD6,0x9C,0xB2,0xCF,0x29,0x03,0xF9,0x2F,0xA4,0x50,0xB2,0xD4,0x48,0xCE, +0x05,0x32,0x55,0x8A,0xFD,0xB2,0x64,0x4C,0x0E,0xE4,0x98,0x07,0x75,0xDB,0x7F,0xDF, +0xB9,0x08,0x55,0x60,0x85,0x30,0x29,0xF9,0x7B,0x48,0xA4,0x69,0x86,0xE3,0x35,0x3F, +0x1E,0x86,0x5D,0x7A,0x7A,0x15,0xBD,0xEF,0x00,0x8E,0x15,0x22,0x54,0x17,0x00,0x90, +0x26,0x93,0xBC,0x0E,0x49,0x68,0x91,0xBF,0xF8,0x47,0xD3,0x9D,0x95,0x42,0xC1,0x0E, +0x4D,0xDF,0x6F,0x26,0xCF,0xC3,0x18,0x21,0x62,0x66,0x43,0x70,0xD6,0xD5,0xC0,0x07, +0xE1,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03,0x55,0x1D, 0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D, -0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x81,0x06,0x03, -0x55,0x1D,0x1F,0x04,0x7A,0x30,0x78,0x30,0x3B,0xA0,0x39,0xA0,0x37,0x86,0x35,0x68, -0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F, -0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72, -0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73, -0x2E,0x63,0x72,0x6C,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74,0x74,0x70, -0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F,0x2E,0x6E,0x65, -0x74,0x2F,0x53,0x65,0x63,0x75,0x72,0x65,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, -0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82, -0x01,0x01,0x00,0x87,0x01,0x6D,0x23,0x1D,0x7E,0x5B,0x17,0x7D,0xC1,0x61,0x32,0xCF, -0x8F,0xE7,0xF3,0x8A,0x94,0x59,0x66,0xE0,0x9E,0x28,0xA8,0x5E,0xD3,0xB7,0xF4,0x34, -0xE6,0xAA,0x39,0xB2,0x97,0x16,0xC5,0x82,0x6F,0x32,0xA4,0xE9,0x8C,0xE7,0xAF,0xFD, -0xEF,0xC2,0xE8,0xB9,0x4B,0xAA,0xA3,0xF4,0xE6,0xDA,0x8D,0x65,0x21,0xFB,0xBA,0x80, -0xEB,0x26,0x28,0x85,0x1A,0xFE,0x39,0x8C,0xDE,0x5B,0x04,0x04,0xB4,0x54,0xF9,0xA3, -0x67,0x9E,0x41,0xFA,0x09,0x52,0xCC,0x05,0x48,0xA8,0xC9,0x3F,0x21,0x04,0x1E,0xCE, -0x48,0x6B,0xFC,0x85,0xE8,0xC2,0x7B,0xAF,0x7F,0xB7,0xCC,0xF8,0x5F,0x3A,0xFD,0x35, -0xC6,0x0D,0xEF,0x97,0xDC,0x4C,0xAB,0x11,0xE1,0x6B,0xCB,0x31,0xD1,0x6C,0xFB,0x48, -0x80,0xAB,0xDC,0x9C,0x37,0xB8,0x21,0x14,0x4B,0x0D,0x71,0x3D,0xEC,0x83,0x33,0x6E, -0xD1,0x6E,0x32,0x16,0xEC,0x98,0xC7,0x16,0x8B,0x59,0xA6,0x34,0xAB,0x05,0x57,0x2D, -0x93,0xF7,0xAA,0x13,0xCB,0xD2,0x13,0xE2,0xB7,0x2E,0x3B,0xCD,0x6B,0x50,0x17,0x09, -0x68,0x3E,0xB5,0x26,0x57,0xEE,0xB6,0xE0,0xB6,0xDD,0xB9,0x29,0x80,0x79,0x7D,0x8F, -0xA3,0xF0,0xA4,0x28,0xA4,0x15,0xC4,0x85,0xF4,0x27,0xD4,0x6B,0xBF,0xE5,0x5C,0xE4, -0x65,0x02,0x76,0x54,0xB4,0xE3,0x37,0x66,0x24,0xD3,0x19,0x61,0xC8,0x52,0x10,0xE5, -0x8B,0x37,0x9A,0xB9,0xA9,0xF9,0x1D,0xBF,0xEA,0x99,0x92,0x61,0x96,0xFF,0x01,0xCD, -0xA1,0x5F,0x0D,0xBC,0x71,0xBC,0x0E,0xAC,0x0B,0x1D,0x47,0x45,0x1D,0xC1,0xEC,0x7C, -0xEC,0xFD,0x29, +0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55, +0x1D,0x0E,0x04,0x16,0x04,0x14,0x55,0xE4,0x81,0xD1,0x11,0x80,0xBE,0xD8,0x89,0xB9, +0x08,0xA3,0x31,0xF9,0xA1,0x24,0x09,0x16,0xB9,0x70,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x3B,0x9B, +0x8F,0x56,0x9B,0x30,0xE7,0x53,0x99,0x7C,0x7A,0x79,0xA7,0x4D,0x97,0xD7,0x19,0x95, +0x90,0xFB,0x06,0x1F,0xCA,0x33,0x7C,0x46,0x63,0x8F,0x96,0x66,0x24,0xFA,0x40,0x1B, +0x21,0x27,0xCA,0xE6,0x72,0x73,0xF2,0x4F,0xFE,0x31,0x99,0xFD,0xC8,0x0C,0x4C,0x68, +0x53,0xC6,0x80,0x82,0x13,0x98,0xFA,0xB6,0xAD,0xDA,0x5D,0x3D,0xF1,0xCE,0x6E,0xF6, +0x15,0x11,0x94,0x82,0x0C,0xEE,0x3F,0x95,0xAF,0x11,0xAB,0x0F,0xD7,0x2F,0xDE,0x1F, +0x03,0x8F,0x57,0x2C,0x1E,0xC9,0xBB,0x9A,0x1A,0x44,0x95,0xEB,0x18,0x4F,0xA6,0x1F, +0xCD,0x7D,0x57,0x10,0x2F,0x9B,0x04,0x09,0x5A,0x84,0xB5,0x6E,0xD8,0x1D,0x3A,0xE1, +0xD6,0x9E,0xD1,0x6C,0x79,0x5E,0x79,0x1C,0x14,0xC5,0xE3,0xD0,0x4C,0x93,0x3B,0x65, +0x3C,0xED,0xDF,0x3D,0xBE,0xA6,0xE5,0x95,0x1A,0xC3,0xB5,0x19,0xC3,0xBD,0x5E,0x5B, +0xBB,0xFF,0x23,0xEF,0x68,0x19,0xCB,0x12,0x93,0x27,0x5C,0x03,0x2D,0x6F,0x30,0xD0, +0x1E,0xB6,0x1A,0xAC,0xDE,0x5A,0xF7,0xD1,0xAA,0xA8,0x27,0xA6,0xFE,0x79,0x81,0xC4, +0x79,0x99,0x33,0x57,0xBA,0x12,0xB0,0xA9,0xE0,0x42,0x6C,0x93,0xCA,0x56,0xDE,0xFE, +0x6D,0x84,0x0B,0x08,0x8B,0x7E,0x8D,0xEA,0xD7,0x98,0x21,0xC6,0xF3,0xE7,0x3C,0x79, +0x2F,0x5E,0x9C,0xD1,0x4C,0x15,0x8D,0xE1,0xEC,0x22,0x37,0xCC,0x9A,0x43,0x0B,0x97, +0xDC,0x80,0x90,0x8D,0xB3,0x67,0x9B,0x6F,0x48,0x08,0x15,0x56,0xCF,0xBF,0xF1,0x2B, +0x7C,0x5E,0x9A,0x76,0xE9,0x59,0x90,0xC5,0x7C,0x83,0x35,0x11,0x65,0x51, }; -/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Trusted Root G4 */ -/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Trusted Root G4 */ +/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */ +/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Premium ECC */ -const unsigned char DigiCert_Trusted_Root_G4_certificate[1428]={ -0x30,0x82,0x05,0x90,0x30,0x82,0x03,0x78,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x05, -0x9B,0x1B,0x57,0x9E,0x8E,0x21,0x32,0xE2,0x39,0x07,0xBD,0xA7,0x77,0x75,0x5C,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x62, -0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, -0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, -0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, -0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, -0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x44,0x69,0x67,0x69,0x43,0x65, -0x72,0x74,0x20,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x52,0x6F,0x6F,0x74,0x20, -0x47,0x34,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30, -0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30, -0x30,0x5A,0x30,0x62,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55, -0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69, -0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, -0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E, -0x63,0x6F,0x6D,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x44,0x69, -0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x52, -0x6F,0x6F,0x74,0x20,0x47,0x34,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82, -0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xBF,0xE6,0x90,0x73,0x68,0xDE,0xBB,0xE4,0x5D, -0x4A,0x3C,0x30,0x22,0x30,0x69,0x33,0xEC,0xC2,0xA7,0x25,0x2E,0xC9,0x21,0x3D,0xF2, -0x8A,0xD8,0x59,0xC2,0xE1,0x29,0xA7,0x3D,0x58,0xAB,0x76,0x9A,0xCD,0xAE,0x7B,0x1B, -0x84,0x0D,0xC4,0x30,0x1F,0xF3,0x1B,0xA4,0x38,0x16,0xEB,0x56,0xC6,0x97,0x6D,0x1D, -0xAB,0xB2,0x79,0xF2,0xCA,0x11,0xD2,0xE4,0x5F,0xD6,0x05,0x3C,0x52,0x0F,0x52,0x1F, -0xC6,0x9E,0x15,0xA5,0x7E,0xBE,0x9F,0xA9,0x57,0x16,0x59,0x55,0x72,0xAF,0x68,0x93, -0x70,0xC2,0xB2,0xBA,0x75,0x99,0x6A,0x73,0x32,0x94,0xD1,0x10,0x44,0x10,0x2E,0xDF, -0x82,0xF3,0x07,0x84,0xE6,0x74,0x3B,0x6D,0x71,0xE2,0x2D,0x0C,0x1B,0xEE,0x20,0xD5, -0xC9,0x20,0x1D,0x63,0x29,0x2D,0xCE,0xEC,0x5E,0x4E,0xC8,0x93,0xF8,0x21,0x61,0x9B, -0x34,0xEB,0x05,0xC6,0x5E,0xEC,0x5B,0x1A,0xBC,0xEB,0xC9,0xCF,0xCD,0xAC,0x34,0x40, -0x5F,0xB1,0x7A,0x66,0xEE,0x77,0xC8,0x48,0xA8,0x66,0x57,0x57,0x9F,0x54,0x58,0x8E, -0x0C,0x2B,0xB7,0x4F,0xA7,0x30,0xD9,0x56,0xEE,0xCA,0x7B,0x5D,0xE3,0xAD,0xC9,0x4F, -0x5E,0xE5,0x35,0xE7,0x31,0xCB,0xDA,0x93,0x5E,0xDC,0x8E,0x8F,0x80,0xDA,0xB6,0x91, -0x98,0x40,0x90,0x79,0xC3,0x78,0xC7,0xB6,0xB1,0xC4,0xB5,0x6A,0x18,0x38,0x03,0x10, -0x8D,0xD8,0xD4,0x37,0xA4,0x2E,0x05,0x7D,0x88,0xF5,0x82,0x3E,0x10,0x91,0x70,0xAB, -0x55,0x82,0x41,0x32,0xD7,0xDB,0x04,0x73,0x2A,0x6E,0x91,0x01,0x7C,0x21,0x4C,0xD4, -0xBC,0xAE,0x1B,0x03,0x75,0x5D,0x78,0x66,0xD9,0x3A,0x31,0x44,0x9A,0x33,0x40,0xBF, -0x08,0xD7,0x5A,0x49,0xA4,0xC2,0xE6,0xA9,0xA0,0x67,0xDD,0xA4,0x27,0xBC,0xA1,0x4F, -0x39,0xB5,0x11,0x58,0x17,0xF7,0x24,0x5C,0x46,0x8F,0x64,0xF7,0xC1,0x69,0x88,0x76, -0x98,0x76,0x3D,0x59,0x5D,0x42,0x76,0x87,0x89,0x97,0x69,0x7A,0x48,0xF0,0xE0,0xA2, -0x12,0x1B,0x66,0x9A,0x74,0xCA,0xDE,0x4B,0x1E,0xE7,0x0E,0x63,0xAE,0xE6,0xD4,0xEF, -0x92,0x92,0x3A,0x9E,0x3D,0xDC,0x00,0xE4,0x45,0x25,0x89,0xB6,0x9A,0x44,0x19,0x2B, -0x7E,0xC0,0x94,0xB4,0xD2,0x61,0x6D,0xEB,0x33,0xD9,0xC5,0xDF,0x4B,0x04,0x00,0xCC, -0x7D,0x1C,0x95,0xC3,0x8F,0xF7,0x21,0xB2,0xB2,0x11,0xB7,0xBB,0x7F,0xF2,0xD5,0x8C, -0x70,0x2C,0x41,0x60,0xAA,0xB1,0x63,0x18,0x44,0x95,0x1A,0x76,0x62,0x7E,0xF6,0x80, -0xB0,0xFB,0xE8,0x64,0xA6,0x33,0xD1,0x89,0x07,0xE1,0xBD,0xB7,0xE6,0x43,0xA4,0x18, -0xB8,0xA6,0x77,0x01,0xE1,0x0F,0x94,0x0C,0x21,0x1D,0xB2,0x54,0x29,0x25,0x89,0x6C, -0xE5,0x0E,0x52,0x51,0x47,0x74,0xBE,0x26,0xAC,0xB6,0x41,0x75,0xDE,0x7A,0xAC,0x5F, -0x8D,0x3F,0xC9,0xBC,0xD3,0x41,0x11,0x12,0x5B,0xE5,0x10,0x50,0xEB,0x31,0xC5,0xCA, -0x72,0x16,0x22,0x09,0xDF,0x7C,0x4C,0x75,0x3F,0x63,0xEC,0x21,0x5F,0xC4,0x20,0x51, -0x6B,0x6F,0xB1,0xAB,0x86,0x8B,0x4F,0xC2,0xD6,0x45,0x5F,0x9D,0x20,0xFC,0xA1,0x1E, -0xC5,0xC0,0x8F,0xA2,0xB1,0x7E,0x0A,0x26,0x99,0xF5,0xE4,0x69,0x2F,0x98,0x1D,0x2D, -0xF5,0xD9,0xA9,0xB2,0x1D,0xE5,0x1B,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40, -0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01, -0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, -0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xEC,0xD7,0xE3,0x82, -0xD2,0x71,0x5D,0x64,0x4C,0xDF,0x2E,0x67,0x3F,0xE7,0xBA,0x98,0xAE,0x1C,0x0F,0x4F, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03, -0x82,0x02,0x01,0x00,0xBB,0x61,0xD9,0x7D,0xA9,0x6C,0xBE,0x17,0xC4,0x91,0x1B,0xC3, -0xA1,0xA2,0x00,0x8D,0xE3,0x64,0x68,0x0F,0x56,0xCF,0x77,0xAE,0x70,0xF9,0xFD,0x9A, -0x4A,0x99,0xB9,0xC9,0x78,0x5C,0x0C,0x0C,0x5F,0xE4,0xE6,0x14,0x29,0x56,0x0B,0x36, -0x49,0x5D,0x44,0x63,0xE0,0xAD,0x9C,0x96,0x18,0x66,0x1B,0x23,0x0D,0x3D,0x79,0xE9, -0x6D,0x6B,0xD6,0x54,0xF8,0xD2,0x3C,0xC1,0x43,0x40,0xAE,0x1D,0x50,0xF5,0x52,0xFC, -0x90,0x3B,0xBB,0x98,0x99,0x69,0x6B,0xC7,0xC1,0xA7,0xA8,0x68,0xA4,0x27,0xDC,0x9D, -0xF9,0x27,0xAE,0x30,0x85,0xB9,0xF6,0x67,0x4D,0x3A,0x3E,0x8F,0x59,0x39,0x22,0x53, -0x44,0xEB,0xC8,0x5D,0x03,0xCA,0xED,0x50,0x7A,0x7D,0x62,0x21,0x0A,0x80,0xC8,0x73, -0x66,0xD1,0xA0,0x05,0x60,0x5F,0xE8,0xA5,0xB4,0xA7,0xAF,0xA8,0xF7,0x6D,0x35,0x9C, -0x7C,0x5A,0x8A,0xD6,0xA2,0x38,0x99,0xF3,0x78,0x8B,0xF4,0x4D,0xD2,0x20,0x0B,0xDE, -0x04,0xEE,0x8C,0x9B,0x47,0x81,0x72,0x0D,0xC0,0x14,0x32,0xEF,0x30,0x59,0x2E,0xAE, -0xE0,0x71,0xF2,0x56,0xE4,0x6A,0x97,0x6F,0x92,0x50,0x6D,0x96,0x8D,0x68,0x7A,0x9A, -0xB2,0x36,0x14,0x7A,0x06,0xF2,0x24,0xB9,0x09,0x11,0x50,0xD7,0x08,0xB1,0xB8,0x89, -0x7A,0x84,0x23,0x61,0x42,0x29,0xE5,0xA3,0xCD,0xA2,0x20,0x41,0xD7,0xD1,0x9C,0x64, -0xD9,0xEA,0x26,0xA1,0x8B,0x14,0xD7,0x4C,0x19,0xB2,0x50,0x41,0x71,0x3D,0x3F,0x4D, -0x70,0x23,0x86,0x0C,0x4A,0xDC,0x81,0xD2,0xCC,0x32,0x94,0x84,0x0D,0x08,0x09,0x97, -0x1C,0x4F,0xC0,0xEE,0x6B,0x20,0x74,0x30,0xD2,0xE0,0x39,0x34,0x10,0x85,0x21,0x15, -0x01,0x08,0xE8,0x55,0x32,0xDE,0x71,0x49,0xD9,0x28,0x17,0x50,0x4D,0xE6,0xBE,0x4D, -0xD1,0x75,0xAC,0xD0,0xCA,0xFB,0x41,0xB8,0x43,0xA5,0xAA,0xD3,0xC3,0x05,0x44,0x4F, -0x2C,0x36,0x9B,0xE2,0xFA,0xE2,0x45,0xB8,0x23,0x53,0x6C,0x06,0x6F,0x67,0x55,0x7F, -0x46,0xB5,0x4C,0x3F,0x6E,0x28,0x5A,0x79,0x26,0xD2,0xA4,0xA8,0x62,0x97,0xD2,0x1E, -0xE2,0xED,0x4A,0x8B,0xBC,0x1B,0xFD,0x47,0x4A,0x0D,0xDF,0x67,0x66,0x7E,0xB2,0x5B, -0x41,0xD0,0x3B,0xE4,0xF4,0x3B,0xF4,0x04,0x63,0xE9,0xEF,0xC2,0x54,0x00,0x51,0xA0, -0x8A,0x2A,0xC9,0xCE,0x78,0xCC,0xD5,0xEA,0x87,0x04,0x18,0xB3,0xCE,0xAF,0x49,0x88, -0xAF,0xF3,0x92,0x99,0xB6,0xB3,0xE6,0x61,0x0F,0xD2,0x85,0x00,0xE7,0x50,0x1A,0xE4, -0x1B,0x95,0x9D,0x19,0xA1,0xB9,0x9C,0xB1,0x9B,0xB1,0x00,0x1E,0xEF,0xD0,0x0F,0x4F, -0x42,0x6C,0xC9,0x0A,0xBC,0xEE,0x43,0xFA,0x3A,0x71,0xA5,0xC8,0x4D,0x26,0xA5,0x35, -0xFD,0x89,0x5D,0xBC,0x85,0x62,0x1D,0x32,0xD2,0xA0,0x2B,0x54,0xED,0x9A,0x57,0xC1, -0xDB,0xFA,0x10,0xCF,0x19,0xB7,0x8B,0x4A,0x1B,0x8F,0x01,0xB6,0x27,0x95,0x53,0xE8, -0xB6,0x89,0x6D,0x5B,0xBC,0x68,0xD4,0x23,0xE8,0x8B,0x51,0xA2,0x56,0xF9,0xF0,0xA6, -0x80,0xA0,0xD6,0x1E,0xB3,0xBC,0x0F,0x0F,0x53,0x75,0x29,0xAA,0xEA,0x13,0x77,0xE4, -0xDE,0x8C,0x81,0x21,0xAD,0x07,0x10,0x47,0x11,0xAD,0x87,0x3D,0x07,0xD1,0x75,0xBC, -0xCF,0xF3,0x66,0x7E, +const unsigned char AffirmTrust_Premium_ECC_certificate[514]={ +0x30,0x82,0x01,0xFE,0x30,0x82,0x01,0x85,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x74, +0x97,0x25,0x8A,0xC7,0x3F,0x7A,0x54,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D, +0x04,0x03,0x03,0x30,0x45,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, +0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66, +0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x20,0x30,0x1E,0x06,0x03,0x55,0x04, +0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x50, +0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43,0x43,0x30,0x1E,0x17,0x0D,0x31,0x30, +0x30,0x31,0x32,0x39,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x17,0x0D,0x34,0x30,0x31, +0x32,0x33,0x31,0x31,0x34,0x32,0x30,0x32,0x34,0x5A,0x30,0x45,0x31,0x0B,0x30,0x09, +0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55, +0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31, +0x20,0x30,0x1E,0x06,0x03,0x55,0x04,0x03,0x0C,0x17,0x41,0x66,0x66,0x69,0x72,0x6D, +0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x65,0x6D,0x69,0x75,0x6D,0x20,0x45,0x43, +0x43,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, +0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x0D,0x30,0x5E,0x1B,0x15,0x9D,0x03, +0xD0,0xA1,0x79,0x35,0xB7,0x3A,0x3C,0x92,0x7A,0xCA,0x15,0x1C,0xCD,0x62,0xF3,0x9C, +0x26,0x5C,0x07,0x3D,0xE5,0x54,0xFA,0xA3,0xD6,0xCC,0x12,0xEA,0xF4,0x14,0x5F,0xE8, +0x8E,0x19,0xAB,0x2F,0x2E,0x48,0xE6,0xAC,0x18,0x43,0x78,0xAC,0xD0,0x37,0xC3,0xBD, +0xB2,0xCD,0x2C,0xE6,0x47,0xE2,0x1A,0xE6,0x63,0xB8,0x3D,0x2E,0x2F,0x78,0xC4,0x4F, +0xDB,0xF4,0x0F,0xA4,0x68,0x4C,0x55,0x72,0x6B,0x95,0x1D,0x4E,0x18,0x42,0x95,0x78, +0xCC,0x37,0x3C,0x91,0xE2,0x9B,0x65,0x2B,0x29,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06, +0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9A,0xAF,0x29,0x7A,0xC0,0x11,0x35,0x35, +0x26,0x51,0x30,0x00,0xC3,0x6A,0xFE,0x40,0xD5,0xAE,0xD6,0x3C,0x30,0x0F,0x06,0x03, +0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06, +0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0A,0x06, +0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30, +0x17,0x09,0xF3,0x87,0x88,0x50,0x5A,0xAF,0xC8,0xC0,0x42,0xBF,0x47,0x5F,0xF5,0x6C, +0x6A,0x86,0xE0,0xC4,0x27,0x74,0xE4,0x38,0x53,0xD7,0x05,0x7F,0x1B,0x34,0xE3,0xC6, +0x2F,0xB3,0xCA,0x09,0x3C,0x37,0x9D,0xD7,0xE7,0xB8,0x46,0xF1,0xFD,0xA1,0xE2,0x71, +0x02,0x30,0x42,0x59,0x87,0x43,0xD4,0x51,0xDF,0xBA,0xD3,0x09,0x32,0x5A,0xCE,0x88, +0x7E,0x57,0x3D,0x9C,0x5F,0x42,0x6B,0xF5,0x07,0x2D,0xB5,0xF0,0x82,0x93,0xF9,0x59, +0x6F,0xAE,0x64,0xFA,0x58,0xE5,0x8B,0x1E,0xE3,0x63,0xBE,0xB5,0x81,0xCD,0x6F,0x02, +0x8C,0x79, }; -/* subject:/OU=GlobalSign ECC Root CA - R5/O=GlobalSign/CN=GlobalSign */ -/* issuer :/OU=GlobalSign ECC Root CA - R5/O=GlobalSign/CN=GlobalSign */ +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA */ -const unsigned char GlobalSign_ECC_Root_CA___R5_certificate[546]={ -0x30,0x82,0x02,0x1E,0x30,0x82,0x01,0xA4,0xA0,0x03,0x02,0x01,0x02,0x02,0x11,0x60, -0x59,0x49,0xE0,0x26,0x2E,0xBB,0x55,0xF9,0x0A,0x77,0x8A,0x71,0xF9,0x4A,0xD8,0x6C, -0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x50,0x31,0x24, -0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, -0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20, -0x2D,0x20,0x52,0x35,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47, -0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55, -0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E, -0x17,0x0D,0x31,0x32,0x31,0x31,0x31,0x33,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17, -0x0D,0x33,0x38,0x30,0x31,0x31,0x39,0x30,0x33,0x31,0x34,0x30,0x37,0x5A,0x30,0x50, -0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61, -0x6C,0x53,0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43, -0x41,0x20,0x2D,0x20,0x52,0x35,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13, -0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06, -0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, -0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B, -0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x47,0x45,0x0E,0x96,0xFB,0x7D,0x5D,0xBF, -0xE9,0x39,0xD1,0x21,0xF8,0x9F,0x0B,0xB6,0xD5,0x7B,0x1E,0x92,0x3A,0x48,0x59,0x1C, -0xF0,0x62,0x31,0x2D,0xC0,0x7A,0x28,0xFE,0x1A,0xA7,0x5C,0xB3,0xB6,0xCC,0x97,0xE7, -0x45,0xD4,0x58,0xFA,0xD1,0x77,0x6D,0x43,0xA2,0xC0,0x87,0x65,0x34,0x0A,0x1F,0x7A, -0xDD,0xEB,0x3C,0x33,0xA1,0xC5,0x9D,0x4D,0xA4,0x6F,0x41,0x95,0x38,0x7F,0xC9,0x1E, -0x84,0xEB,0xD1,0x9E,0x49,0x92,0x87,0x94,0x87,0x0C,0x3A,0x85,0x4A,0x66,0x9F,0x9D, -0x59,0x93,0x4D,0x97,0x61,0x06,0x86,0x4A,0xA3,0x42,0x30,0x40,0x30,0x0E,0x06,0x03, -0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03, -0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x3D,0xE6,0x29,0x48,0x9B,0xEA,0x07,0xCA, -0x21,0x44,0x4A,0x26,0xDE,0x6E,0xDE,0xD2,0x83,0xD0,0x9F,0x59,0x30,0x0A,0x06,0x08, -0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00, -0xE5,0x69,0x12,0xC9,0x6E,0xDB,0xC6,0x31,0xBA,0x09,0x41,0xE1,0x97,0xF8,0xFB,0xFD, -0x9A,0xE2,0x7D,0x12,0xC9,0xED,0x7C,0x64,0xD3,0xCB,0x05,0x25,0x8B,0x56,0xD9,0xA0, -0xE7,0x5E,0x5D,0x4E,0x0B,0x83,0x9C,0x5B,0x76,0x29,0xA0,0x09,0x26,0x21,0x6A,0x62, -0x02,0x30,0x71,0xD2,0xB5,0x8F,0x5C,0xEA,0x3B,0xE1,0x78,0x09,0x85,0xA8,0x75,0x92, -0x3B,0xC8,0x5C,0xFD,0x48,0xEF,0x0D,0x74,0x22,0xA8,0x08,0xE2,0x6E,0xC5,0x49,0xCE, -0xC7,0x0C,0xBC,0xA7,0x61,0x69,0xF1,0xF7,0x3B,0xE1,0x2A,0xCB,0xF9,0x2B,0xF3,0x66, -0x90,0x37, +const unsigned char DigiCert_High_Assurance_EV_Root_CA_certificate[969]={ +0x30,0x82,0x03,0xC5,0x30,0x82,0x02,0xAD,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x02, +0xAC,0x5C,0x26,0x6A,0x0B,0x40,0x9B,0x8F,0x0B,0x79,0xF2,0xAE,0x46,0x25,0x77,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x6C, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x48,0x69,0x67,0x68,0x20,0x41,0x73,0x73,0x75,0x72,0x61,0x6E,0x63, +0x65,0x20,0x45,0x56,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D, +0x30,0x36,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33, +0x31,0x31,0x31,0x31,0x30,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x6C,0x31,0x0B, +0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06, +0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49, +0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77, +0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x2B,0x30, +0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x48,0x69,0x67,0x68,0x20,0x41,0x73,0x73,0x75,0x72,0x61,0x6E,0x63,0x65,0x20, +0x45,0x56,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D, +0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01, +0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xC6,0xCC,0xE5,0x73,0xE6, +0xFB,0xD4,0xBB,0xE5,0x2D,0x2D,0x32,0xA6,0xDF,0xE5,0x81,0x3F,0xC9,0xCD,0x25,0x49, +0xB6,0x71,0x2A,0xC3,0xD5,0x94,0x34,0x67,0xA2,0x0A,0x1C,0xB0,0x5F,0x69,0xA6,0x40, +0xB1,0xC4,0xB7,0xB2,0x8F,0xD0,0x98,0xA4,0xA9,0x41,0x59,0x3A,0xD3,0xDC,0x94,0xD6, +0x3C,0xDB,0x74,0x38,0xA4,0x4A,0xCC,0x4D,0x25,0x82,0xF7,0x4A,0xA5,0x53,0x12,0x38, +0xEE,0xF3,0x49,0x6D,0x71,0x91,0x7E,0x63,0xB6,0xAB,0xA6,0x5F,0xC3,0xA4,0x84,0xF8, +0x4F,0x62,0x51,0xBE,0xF8,0xC5,0xEC,0xDB,0x38,0x92,0xE3,0x06,0xE5,0x08,0x91,0x0C, +0xC4,0x28,0x41,0x55,0xFB,0xCB,0x5A,0x89,0x15,0x7E,0x71,0xE8,0x35,0xBF,0x4D,0x72, +0x09,0x3D,0xBE,0x3A,0x38,0x50,0x5B,0x77,0x31,0x1B,0x8D,0xB3,0xC7,0x24,0x45,0x9A, +0xA7,0xAC,0x6D,0x00,0x14,0x5A,0x04,0xB7,0xBA,0x13,0xEB,0x51,0x0A,0x98,0x41,0x41, +0x22,0x4E,0x65,0x61,0x87,0x81,0x41,0x50,0xA6,0x79,0x5C,0x89,0xDE,0x19,0x4A,0x57, +0xD5,0x2E,0xE6,0x5D,0x1C,0x53,0x2C,0x7E,0x98,0xCD,0x1A,0x06,0x16,0xA4,0x68,0x73, +0xD0,0x34,0x04,0x13,0x5C,0xA1,0x71,0xD3,0x5A,0x7C,0x55,0xDB,0x5E,0x64,0xE1,0x37, +0x87,0x30,0x56,0x04,0xE5,0x11,0xB4,0x29,0x80,0x12,0xF1,0x79,0x39,0x88,0xA2,0x02, +0x11,0x7C,0x27,0x66,0xB7,0x88,0xB7,0x78,0xF2,0xCA,0x0A,0xA8,0x38,0xAB,0x0A,0x64, +0xC2,0xBF,0x66,0x5D,0x95,0x84,0xC1,0xA1,0x25,0x1E,0x87,0x5D,0x1A,0x50,0x0B,0x20, +0x12,0xCC,0x41,0xBB,0x6E,0x0B,0x51,0x38,0xB8,0x4B,0xCB,0x02,0x03,0x01,0x00,0x01, +0xA3,0x63,0x30,0x61,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04, +0x03,0x02,0x01,0x86,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05, +0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14, +0xB1,0x3E,0xC3,0x69,0x03,0xF8,0xBF,0x47,0x01,0xD4,0x98,0x26,0x1A,0x08,0x02,0xEF, +0x63,0x64,0x2B,0xC3,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80, +0x14,0xB1,0x3E,0xC3,0x69,0x03,0xF8,0xBF,0x47,0x01,0xD4,0x98,0x26,0x1A,0x08,0x02, +0xEF,0x63,0x64,0x2B,0xC3,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, +0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x1C,0x1A,0x06,0x97,0xDC,0xD7,0x9C, +0x9F,0x3C,0x88,0x66,0x06,0x08,0x57,0x21,0xDB,0x21,0x47,0xF8,0x2A,0x67,0xAA,0xBF, +0x18,0x32,0x76,0x40,0x10,0x57,0xC1,0x8A,0xF3,0x7A,0xD9,0x11,0x65,0x8E,0x35,0xFA, +0x9E,0xFC,0x45,0xB5,0x9E,0xD9,0x4C,0x31,0x4B,0xB8,0x91,0xE8,0x43,0x2C,0x8E,0xB3, +0x78,0xCE,0xDB,0xE3,0x53,0x79,0x71,0xD6,0xE5,0x21,0x94,0x01,0xDA,0x55,0x87,0x9A, +0x24,0x64,0xF6,0x8A,0x66,0xCC,0xDE,0x9C,0x37,0xCD,0xA8,0x34,0xB1,0x69,0x9B,0x23, +0xC8,0x9E,0x78,0x22,0x2B,0x70,0x43,0xE3,0x55,0x47,0x31,0x61,0x19,0xEF,0x58,0xC5, +0x85,0x2F,0x4E,0x30,0xF6,0xA0,0x31,0x16,0x23,0xC8,0xE7,0xE2,0x65,0x16,0x33,0xCB, +0xBF,0x1A,0x1B,0xA0,0x3D,0xF8,0xCA,0x5E,0x8B,0x31,0x8B,0x60,0x08,0x89,0x2D,0x0C, +0x06,0x5C,0x52,0xB7,0xC4,0xF9,0x0A,0x98,0xD1,0x15,0x5F,0x9F,0x12,0xBE,0x7C,0x36, +0x63,0x38,0xBD,0x44,0xA4,0x7F,0xE4,0x26,0x2B,0x0A,0xC4,0x97,0x69,0x0D,0xE9,0x8C, +0xE2,0xC0,0x10,0x57,0xB8,0xC8,0x76,0x12,0x91,0x55,0xF2,0x48,0x69,0xD8,0xBC,0x2A, +0x02,0x5B,0x0F,0x44,0xD4,0x20,0x31,0xDB,0xF4,0xBA,0x70,0x26,0x5D,0x90,0x60,0x9E, +0xBC,0x4B,0x17,0x09,0x2F,0xB4,0xCB,0x1E,0x43,0x68,0xC9,0x07,0x27,0xC1,0xD2,0x5C, +0xF7,0xEA,0x21,0xB9,0x68,0x12,0x9C,0x3C,0x9C,0xBF,0x9E,0xFC,0x80,0x5C,0x9B,0x63, +0xCD,0xEC,0x47,0xAA,0x25,0x27,0x67,0xA0,0x37,0xF3,0x00,0x82,0x7D,0x54,0xD7,0xA9, +0xF8,0xE9,0x2E,0x13,0xA3,0x77,0xE8,0x1F,0x4A, }; -/* subject:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */ -/* issuer :/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN-USERFirst-Hardware */ +/* subject:/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2009 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - G2 */ +/* issuer :/C=US/O=Entrust, Inc./OU=See www.entrust.net/legal-terms/OU=(c) 2009 Entrust, Inc. - for authorized use only/CN=Entrust Root Certification Authority - G2 */ -const unsigned char UTN_USERFirst_Hardware_Root_CA_certificate[1144]={ -0x30,0x82,0x04,0x74,0x30,0x82,0x03,0x5C,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x44, -0xBE,0x0C,0x8B,0x50,0x00,0x24,0xB4,0x11,0xD3,0x36,0x2A,0xFE,0x65,0x0A,0xFD,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81, -0x97,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06, -0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20, -0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54, -0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74, -0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68, -0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72, -0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03, -0x13,0x16,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D, -0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x37, -0x30,0x39,0x31,0x38,0x31,0x30,0x34,0x32,0x5A,0x17,0x0D,0x31,0x39,0x30,0x37,0x30, -0x39,0x31,0x38,0x31,0x39,0x32,0x32,0x5A,0x30,0x81,0x97,0x31,0x0B,0x30,0x09,0x06, -0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, -0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x07,0x13,0x0E, -0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20,0x43,0x69,0x74,0x79,0x31,0x1E, -0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45, -0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21, -0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F, -0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F, -0x6D,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x55,0x54,0x4E,0x2D, -0x55,0x53,0x45,0x52,0x46,0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61, -0x72,0x65,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82, -0x01,0x01,0x00,0xB1,0xF7,0xC3,0x38,0x3F,0xB4,0xA8,0x7F,0xCF,0x39,0x82,0x51,0x67, -0xD0,0x6D,0x9F,0xD2,0xFF,0x58,0xF3,0xE7,0x9F,0x2B,0xEC,0x0D,0x89,0x54,0x99,0xB9, -0x38,0x99,0x16,0xF7,0xE0,0x21,0x79,0x48,0xC2,0xBB,0x61,0x74,0x12,0x96,0x1D,0x3C, -0x6A,0x72,0xD5,0x3C,0x10,0x67,0x3A,0x39,0xED,0x2B,0x13,0xCD,0x66,0xEB,0x95,0x09, -0x33,0xA4,0x6C,0x97,0xB1,0xE8,0xC6,0xEC,0xC1,0x75,0x79,0x9C,0x46,0x5E,0x8D,0xAB, -0xD0,0x6A,0xFD,0xB9,0x2A,0x55,0x17,0x10,0x54,0xB3,0x19,0xF0,0x9A,0xF6,0xF1,0xB1, -0x5D,0xB6,0xA7,0x6D,0xFB,0xE0,0x71,0x17,0x6B,0xA2,0x88,0xFB,0x00,0xDF,0xFE,0x1A, -0x31,0x77,0x0C,0x9A,0x01,0x7A,0xB1,0x32,0xE3,0x2B,0x01,0x07,0x38,0x6E,0xC3,0xA5, -0x5E,0x23,0xBC,0x45,0x9B,0x7B,0x50,0xC1,0xC9,0x30,0x8F,0xDB,0xE5,0x2B,0x7A,0xD3, -0x5B,0xFB,0x33,0x40,0x1E,0xA0,0xD5,0x98,0x17,0xBC,0x8B,0x87,0xC3,0x89,0xD3,0x5D, -0xA0,0x8E,0xB2,0xAA,0xAA,0xF6,0x8E,0x69,0x88,0x06,0xC5,0xFA,0x89,0x21,0xF3,0x08, -0x9D,0x69,0x2E,0x09,0x33,0x9B,0x29,0x0D,0x46,0x0F,0x8C,0xCC,0x49,0x34,0xB0,0x69, -0x51,0xBD,0xF9,0x06,0xCD,0x68,0xAD,0x66,0x4C,0xBC,0x3E,0xAC,0x61,0xBD,0x0A,0x88, -0x0E,0xC8,0xDF,0x3D,0xEE,0x7C,0x04,0x4C,0x9D,0x0A,0x5E,0x6B,0x91,0xD6,0xEE,0xC7, -0xED,0x28,0x8D,0xAB,0x4D,0x87,0x89,0x73,0xD0,0x6E,0xA4,0xD0,0x1E,0x16,0x8B,0x14, -0xE1,0x76,0x44,0x03,0x7F,0x63,0xAC,0xE4,0xCD,0x49,0x9C,0xC5,0x92,0xF4,0xAB,0x32, -0xA1,0x48,0x5B,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xB9,0x30,0x81,0xB6,0x30,0x0B, -0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03,0x02,0x01,0xC6,0x30,0x0F,0x06,0x03,0x55, -0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03, -0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xA1,0x72,0x5F,0x26,0x1B,0x28,0x98,0x43,0x95, -0x5D,0x07,0x37,0xD5,0x85,0x96,0x9D,0x4B,0xD2,0xC3,0x45,0x30,0x44,0x06,0x03,0x55, -0x1D,0x1F,0x04,0x3D,0x30,0x3B,0x30,0x39,0xA0,0x37,0xA0,0x35,0x86,0x33,0x68,0x74, -0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75, -0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x55,0x54,0x4E,0x2D,0x55,0x53,0x45,0x52,0x46, -0x69,0x72,0x73,0x74,0x2D,0x48,0x61,0x72,0x64,0x77,0x61,0x72,0x65,0x2E,0x63,0x72, -0x6C,0x30,0x31,0x06,0x03,0x55,0x1D,0x25,0x04,0x2A,0x30,0x28,0x06,0x08,0x2B,0x06, -0x01,0x05,0x05,0x07,0x03,0x01,0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x05, -0x06,0x08,0x2B,0x06,0x01,0x05,0x05,0x07,0x03,0x06,0x06,0x08,0x2B,0x06,0x01,0x05, -0x05,0x07,0x03,0x07,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, -0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x47,0x19,0x0F,0xDE,0x74,0xC6,0x99,0x97, -0xAF,0xFC,0xAD,0x28,0x5E,0x75,0x8E,0xEB,0x2D,0x67,0xEE,0x4E,0x7B,0x2B,0xD7,0x0C, -0xFF,0xF6,0xDE,0xCB,0x55,0xA2,0x0A,0xE1,0x4C,0x54,0x65,0x93,0x60,0x6B,0x9F,0x12, -0x9C,0xAD,0x5E,0x83,0x2C,0xEB,0x5A,0xAE,0xC0,0xE4,0x2D,0xF4,0x00,0x63,0x1D,0xB8, -0xC0,0x6C,0xF2,0xCF,0x49,0xBB,0x4D,0x93,0x6F,0x06,0xA6,0x0A,0x22,0xB2,0x49,0x62, -0x08,0x4E,0xFF,0xC8,0xC8,0x14,0xB2,0x88,0x16,0x5D,0xE7,0x01,0xE4,0x12,0x95,0xE5, -0x45,0x34,0xB3,0x8B,0x69,0xBD,0xCF,0xB4,0x85,0x8F,0x75,0x51,0x9E,0x7D,0x3A,0x38, -0x3A,0x14,0x48,0x12,0xC6,0xFB,0xA7,0x3B,0x1A,0x8D,0x0D,0x82,0x40,0x07,0xE8,0x04, -0x08,0x90,0xA1,0x89,0xCB,0x19,0x50,0xDF,0xCA,0x1C,0x01,0xBC,0x1D,0x04,0x19,0x7B, -0x10,0x76,0x97,0x3B,0xEE,0x90,0x90,0xCA,0xC4,0x0E,0x1F,0x16,0x6E,0x75,0xEF,0x33, -0xF8,0xD3,0x6F,0x5B,0x1E,0x96,0xE3,0xE0,0x74,0x77,0x74,0x7B,0x8A,0xA2,0x6E,0x2D, -0xDD,0x76,0xD6,0x39,0x30,0x82,0xF0,0xAB,0x9C,0x52,0xF2,0x2A,0xC7,0xAF,0x49,0x5E, -0x7E,0xC7,0x68,0xE5,0x82,0x81,0xC8,0x6A,0x27,0xF9,0x27,0x88,0x2A,0xD5,0x58,0x50, -0x95,0x1F,0xF0,0x3B,0x1C,0x57,0xBB,0x7D,0x14,0x39,0x62,0x2B,0x9A,0xC9,0x94,0x92, -0x2A,0xA3,0x22,0x0C,0xFF,0x89,0x26,0x7D,0x5F,0x23,0x2B,0x47,0xD7,0x15,0x1D,0xA9, -0x6A,0x9E,0x51,0x0D,0x2A,0x51,0x9E,0x81,0xF9,0xD4,0x3B,0x5E,0x70,0x12,0x7F,0x10, -0x32,0x9C,0x1E,0xBB,0x9D,0xF8,0x66,0xA8, +const unsigned char Entrust_Root_Certification_Authority___G2_certificate[1090]={ +0x30,0x82,0x04,0x3E,0x30,0x82,0x03,0x26,0xA0,0x03,0x02,0x01,0x02,0x02,0x04,0x4A, +0x53,0x8C,0x28,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B, +0x05,0x00,0x30,0x81,0xBE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02, +0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E,0x74, +0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x03, +0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,0x77,0x77,0x2E,0x65,0x6E,0x74, +0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D,0x74, +0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28, +0x63,0x29,0x20,0x32,0x30,0x30,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74,0x2C, +0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,0x72,0x20,0x61,0x75,0x74,0x68, +0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31, +0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x45,0x6E,0x74,0x72,0x75,0x73, +0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, +0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D, +0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x37,0x30,0x37,0x31,0x37,0x32, +0x35,0x35,0x34,0x5A,0x17,0x0D,0x33,0x30,0x31,0x32,0x30,0x37,0x31,0x37,0x35,0x35, +0x35,0x34,0x5A,0x30,0x81,0xBE,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, +0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x45,0x6E, +0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06, +0x03,0x55,0x04,0x0B,0x13,0x1F,0x53,0x65,0x65,0x20,0x77,0x77,0x77,0x2E,0x65,0x6E, +0x74,0x72,0x75,0x73,0x74,0x2E,0x6E,0x65,0x74,0x2F,0x6C,0x65,0x67,0x61,0x6C,0x2D, +0x74,0x65,0x72,0x6D,0x73,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30, +0x28,0x63,0x29,0x20,0x32,0x30,0x30,0x39,0x20,0x45,0x6E,0x74,0x72,0x75,0x73,0x74, +0x2C,0x20,0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x66,0x6F,0x72,0x20,0x61,0x75,0x74, +0x68,0x6F,0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79, +0x31,0x32,0x30,0x30,0x06,0x03,0x55,0x04,0x03,0x13,0x29,0x45,0x6E,0x74,0x72,0x75, +0x73,0x74,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, +0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20, +0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, +0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A, +0x02,0x82,0x01,0x01,0x00,0xBA,0x84,0xB6,0x72,0xDB,0x9E,0x0C,0x6B,0xE2,0x99,0xE9, +0x30,0x01,0xA7,0x76,0xEA,0x32,0xB8,0x95,0x41,0x1A,0xC9,0xDA,0x61,0x4E,0x58,0x72, +0xCF,0xFE,0xF6,0x82,0x79,0xBF,0x73,0x61,0x06,0x0A,0xA5,0x27,0xD8,0xB3,0x5F,0xD3, +0x45,0x4E,0x1C,0x72,0xD6,0x4E,0x32,0xF2,0x72,0x8A,0x0F,0xF7,0x83,0x19,0xD0,0x6A, +0x80,0x80,0x00,0x45,0x1E,0xB0,0xC7,0xE7,0x9A,0xBF,0x12,0x57,0x27,0x1C,0xA3,0x68, +0x2F,0x0A,0x87,0xBD,0x6A,0x6B,0x0E,0x5E,0x65,0xF3,0x1C,0x77,0xD5,0xD4,0x85,0x8D, +0x70,0x21,0xB4,0xB3,0x32,0xE7,0x8B,0xA2,0xD5,0x86,0x39,0x02,0xB1,0xB8,0xD2,0x47, +0xCE,0xE4,0xC9,0x49,0xC4,0x3B,0xA7,0xDE,0xFB,0x54,0x7D,0x57,0xBE,0xF0,0xE8,0x6E, +0xC2,0x79,0xB2,0x3A,0x0B,0x55,0xE2,0x50,0x98,0x16,0x32,0x13,0x5C,0x2F,0x78,0x56, +0xC1,0xC2,0x94,0xB3,0xF2,0x5A,0xE4,0x27,0x9A,0x9F,0x24,0xD7,0xC6,0xEC,0xD0,0x9B, +0x25,0x82,0xE3,0xCC,0xC2,0xC4,0x45,0xC5,0x8C,0x97,0x7A,0x06,0x6B,0x2A,0x11,0x9F, +0xA9,0x0A,0x6E,0x48,0x3B,0x6F,0xDB,0xD4,0x11,0x19,0x42,0xF7,0x8F,0x07,0xBF,0xF5, +0x53,0x5F,0x9C,0x3E,0xF4,0x17,0x2C,0xE6,0x69,0xAC,0x4E,0x32,0x4C,0x62,0x77,0xEA, +0xB7,0xE8,0xE5,0xBB,0x34,0xBC,0x19,0x8B,0xAE,0x9C,0x51,0xE7,0xB7,0x7E,0xB5,0x53, +0xB1,0x33,0x22,0xE5,0x6D,0xCF,0x70,0x3C,0x1A,0xFA,0xE2,0x9B,0x67,0xB6,0x83,0xF4, +0x8D,0xA5,0xAF,0x62,0x4C,0x4D,0xE0,0x58,0xAC,0x64,0x34,0x12,0x03,0xF8,0xB6,0x8D, +0x94,0x63,0x24,0xA4,0x71,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0E, +0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F, +0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30, +0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x6A,0x72,0x26,0x7A,0xD0,0x1E, +0xEF,0x7D,0xE7,0x3B,0x69,0x51,0xD4,0x6C,0x8D,0x9F,0x90,0x12,0x66,0xAB,0x30,0x0D, +0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01, +0x01,0x00,0x79,0x9F,0x1D,0x96,0xC6,0xB6,0x79,0x3F,0x22,0x8D,0x87,0xD3,0x87,0x03, +0x04,0x60,0x6A,0x6B,0x9A,0x2E,0x59,0x89,0x73,0x11,0xAC,0x43,0xD1,0xF5,0x13,0xFF, +0x8D,0x39,0x2B,0xC0,0xF2,0xBD,0x4F,0x70,0x8C,0xA9,0x2F,0xEA,0x17,0xC4,0x0B,0x54, +0x9E,0xD4,0x1B,0x96,0x98,0x33,0x3C,0xA8,0xAD,0x62,0xA2,0x00,0x76,0xAB,0x59,0x69, +0x6E,0x06,0x1D,0x7E,0xC4,0xB9,0x44,0x8D,0x98,0xAF,0x12,0xD4,0x61,0xDB,0x0A,0x19, +0x46,0x47,0xF3,0xEB,0xF7,0x63,0xC1,0x40,0x05,0x40,0xA5,0xD2,0xB7,0xF4,0xB5,0x9A, +0x36,0xBF,0xA9,0x88,0x76,0x88,0x04,0x55,0x04,0x2B,0x9C,0x87,0x7F,0x1A,0x37,0x3C, +0x7E,0x2D,0xA5,0x1A,0xD8,0xD4,0x89,0x5E,0xCA,0xBD,0xAC,0x3D,0x6C,0xD8,0x6D,0xAF, +0xD5,0xF3,0x76,0x0F,0xCD,0x3B,0x88,0x38,0x22,0x9D,0x6C,0x93,0x9A,0xC4,0x3D,0xBF, +0x82,0x1B,0x65,0x3F,0xA6,0x0F,0x5D,0xAA,0xFC,0xE5,0xB2,0x15,0xCA,0xB5,0xAD,0xC6, +0xBC,0x3D,0xD0,0x84,0xE8,0xEA,0x06,0x72,0xB0,0x4D,0x39,0x32,0x78,0xBF,0x3E,0x11, +0x9C,0x0B,0xA4,0x9D,0x9A,0x21,0xF3,0xF0,0x9B,0x0B,0x30,0x78,0xDB,0xC1,0xDC,0x87, +0x43,0xFE,0xBC,0x63,0x9A,0xCA,0xC5,0xC2,0x1C,0xC9,0xC7,0x8D,0xFF,0x3B,0x12,0x58, +0x08,0xE6,0xB6,0x3D,0xEC,0x7A,0x2C,0x4E,0xFB,0x83,0x96,0xCE,0x0C,0x3C,0x69,0x87, +0x54,0x73,0xA4,0x73,0xC2,0x93,0xFF,0x51,0x10,0xAC,0x15,0x54,0x01,0xD8,0xFC,0x05, +0xB1,0x89,0xA1,0x7F,0x74,0x83,0x9A,0x49,0xD7,0xDC,0x4E,0x7B,0x8A,0x48,0x6F,0x8B, +0x45,0xF6, +}; + + +/* subject:/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */ +/* issuer :/C=US/O=The Go Daddy Group, Inc./OU=Go Daddy Class 2 Certification Authority */ + + +const unsigned char Go_Daddy_Class_2_CA_certificate[1028]={ +0x30,0x82,0x04,0x00,0x30,0x82,0x02,0xE8,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, +0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21, +0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20, +0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63, +0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44, +0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72, +0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F, +0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x36,0x32,0x39,0x31,0x37, +0x30,0x36,0x32,0x30,0x5A,0x17,0x0D,0x33,0x34,0x30,0x36,0x32,0x39,0x31,0x37,0x30, +0x36,0x32,0x30,0x5A,0x30,0x63,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, +0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0A,0x13,0x18,0x54,0x68, +0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x47,0x72,0x6F,0x75,0x70, +0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x0B,0x13, +0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x43,0x6C,0x61,0x73,0x73,0x20, +0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20, +0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82,0x01,0x20,0x30,0x0D,0x06, +0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0D, +0x00,0x30,0x82,0x01,0x08,0x02,0x82,0x01,0x01,0x00,0xDE,0x9D,0xD7,0xEA,0x57,0x18, +0x49,0xA1,0x5B,0xEB,0xD7,0x5F,0x48,0x86,0xEA,0xBE,0xDD,0xFF,0xE4,0xEF,0x67,0x1C, +0xF4,0x65,0x68,0xB3,0x57,0x71,0xA0,0x5E,0x77,0xBB,0xED,0x9B,0x49,0xE9,0x70,0x80, +0x3D,0x56,0x18,0x63,0x08,0x6F,0xDA,0xF2,0xCC,0xD0,0x3F,0x7F,0x02,0x54,0x22,0x54, +0x10,0xD8,0xB2,0x81,0xD4,0xC0,0x75,0x3D,0x4B,0x7F,0xC7,0x77,0xC3,0x3E,0x78,0xAB, +0x1A,0x03,0xB5,0x20,0x6B,0x2F,0x6A,0x2B,0xB1,0xC5,0x88,0x7E,0xC4,0xBB,0x1E,0xB0, +0xC1,0xD8,0x45,0x27,0x6F,0xAA,0x37,0x58,0xF7,0x87,0x26,0xD7,0xD8,0x2D,0xF6,0xA9, +0x17,0xB7,0x1F,0x72,0x36,0x4E,0xA6,0x17,0x3F,0x65,0x98,0x92,0xDB,0x2A,0x6E,0x5D, +0xA2,0xFE,0x88,0xE0,0x0B,0xDE,0x7F,0xE5,0x8D,0x15,0xE1,0xEB,0xCB,0x3A,0xD5,0xE2, +0x12,0xA2,0x13,0x2D,0xD8,0x8E,0xAF,0x5F,0x12,0x3D,0xA0,0x08,0x05,0x08,0xB6,0x5C, +0xA5,0x65,0x38,0x04,0x45,0x99,0x1E,0xA3,0x60,0x60,0x74,0xC5,0x41,0xA5,0x72,0x62, +0x1B,0x62,0xC5,0x1F,0x6F,0x5F,0x1A,0x42,0xBE,0x02,0x51,0x65,0xA8,0xAE,0x23,0x18, +0x6A,0xFC,0x78,0x03,0xA9,0x4D,0x7F,0x80,0xC3,0xFA,0xAB,0x5A,0xFC,0xA1,0x40,0xA4, +0xCA,0x19,0x16,0xFE,0xB2,0xC8,0xEF,0x5E,0x73,0x0D,0xEE,0x77,0xBD,0x9A,0xF6,0x79, +0x98,0xBC,0xB1,0x07,0x67,0xA2,0x15,0x0D,0xDD,0xA0,0x58,0xC6,0x44,0x7B,0x0A,0x3E, +0x62,0x28,0x5F,0xBA,0x41,0x07,0x53,0x58,0xCF,0x11,0x7E,0x38,0x74,0xC5,0xF8,0xFF, +0xB5,0x69,0x90,0x8F,0x84,0x74,0xEA,0x97,0x1B,0xAF,0x02,0x01,0x03,0xA3,0x81,0xC0, +0x30,0x81,0xBD,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xD2,0xC4, +0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1,0xFE,0xDD,0xA8,0x6A, +0xD4,0xE3,0x30,0x81,0x8D,0x06,0x03,0x55,0x1D,0x23,0x04,0x81,0x85,0x30,0x81,0x82, +0x80,0x14,0xD2,0xC4,0xB0,0xD2,0x91,0xD4,0x4C,0x11,0x71,0xB3,0x61,0xCB,0x3D,0xA1, +0xFE,0xDD,0xA8,0x6A,0xD4,0xE3,0xA1,0x67,0xA4,0x65,0x30,0x63,0x31,0x0B,0x30,0x09, +0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x21,0x30,0x1F,0x06,0x03,0x55, +0x04,0x0A,0x13,0x18,0x54,0x68,0x65,0x20,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79, +0x20,0x47,0x72,0x6F,0x75,0x70,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F, +0x06,0x03,0x55,0x04,0x0B,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20, +0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63, +0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x82, +0x01,0x00,0x30,0x0C,0x06,0x03,0x55,0x1D,0x13,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03, +0x82,0x01,0x01,0x00,0x32,0x4B,0xF3,0xB2,0xCA,0x3E,0x91,0xFC,0x12,0xC6,0xA1,0x07, +0x8C,0x8E,0x77,0xA0,0x33,0x06,0x14,0x5C,0x90,0x1E,0x18,0xF7,0x08,0xA6,0x3D,0x0A, +0x19,0xF9,0x87,0x80,0x11,0x6E,0x69,0xE4,0x96,0x17,0x30,0xFF,0x34,0x91,0x63,0x72, +0x38,0xEE,0xCC,0x1C,0x01,0xA3,0x1D,0x94,0x28,0xA4,0x31,0xF6,0x7A,0xC4,0x54,0xD7, +0xF6,0xE5,0x31,0x58,0x03,0xA2,0xCC,0xCE,0x62,0xDB,0x94,0x45,0x73,0xB5,0xBF,0x45, +0xC9,0x24,0xB5,0xD5,0x82,0x02,0xAD,0x23,0x79,0x69,0x8D,0xB8,0xB6,0x4D,0xCE,0xCF, +0x4C,0xCA,0x33,0x23,0xE8,0x1C,0x88,0xAA,0x9D,0x8B,0x41,0x6E,0x16,0xC9,0x20,0xE5, +0x89,0x9E,0xCD,0x3B,0xDA,0x70,0xF7,0x7E,0x99,0x26,0x20,0x14,0x54,0x25,0xAB,0x6E, +0x73,0x85,0xE6,0x9B,0x21,0x9D,0x0A,0x6C,0x82,0x0E,0xA8,0xF8,0xC2,0x0C,0xFA,0x10, +0x1E,0x6C,0x96,0xEF,0x87,0x0D,0xC4,0x0F,0x61,0x8B,0xAD,0xEE,0x83,0x2B,0x95,0xF8, +0x8E,0x92,0x84,0x72,0x39,0xEB,0x20,0xEA,0x83,0xED,0x83,0xCD,0x97,0x6E,0x08,0xBC, +0xEB,0x4E,0x26,0xB6,0x73,0x2B,0xE4,0xD3,0xF6,0x4C,0xFE,0x26,0x71,0xE2,0x61,0x11, +0x74,0x4A,0xFF,0x57,0x1A,0x87,0x0F,0x75,0x48,0x2E,0xCF,0x51,0x69,0x17,0xA0,0x02, +0x12,0x61,0x95,0xD5,0xD1,0x40,0xB2,0x10,0x4C,0xEE,0xC4,0xAC,0x10,0x43,0xA6,0xA5, +0x9E,0x0A,0xD5,0x95,0x62,0x9A,0x0D,0xCF,0x88,0x82,0xC5,0x32,0x0C,0xE4,0x2B,0x9F, +0x45,0xE6,0x0D,0x9F,0x28,0x9C,0xB1,0xB9,0x2A,0x5A,0x57,0xAD,0x37,0x0F,0xAF,0x1D, +0x7F,0xDB,0xBD,0x9F, }; -/* subject:/OU=GlobalSign ECC Root CA - R4/O=GlobalSign/CN=GlobalSign */ -/* issuer :/OU=GlobalSign ECC Root CA - R4/O=GlobalSign/CN=GlobalSign */ - - -const unsigned char GlobalSign_ECC_Root_CA___R4_certificate[485]={ -0x30,0x82,0x01,0xE1,0x30,0x82,0x01,0x87,0xA0,0x03,0x02,0x01,0x02,0x02,0x11,0x2A, -0x38,0xA4,0x1C,0x96,0x0A,0x04,0xDE,0x42,0xB2,0x28,0xA5,0x0B,0xE8,0x34,0x98,0x02, -0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x02,0x30,0x50,0x31,0x24, -0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53, -0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x41,0x20, -0x2D,0x20,0x52,0x34,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13,0x0A,0x47, -0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06,0x03,0x55, -0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x30,0x1E, -0x17,0x0D,0x31,0x32,0x31,0x31,0x31,0x33,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17, -0x0D,0x33,0x38,0x30,0x31,0x31,0x39,0x30,0x33,0x31,0x34,0x30,0x37,0x5A,0x30,0x50, -0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x47,0x6C,0x6F,0x62,0x61, -0x6C,0x53,0x69,0x67,0x6E,0x20,0x45,0x43,0x43,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43, -0x41,0x20,0x2D,0x20,0x52,0x34,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x0A,0x13, -0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E,0x31,0x13,0x30,0x11,0x06, -0x03,0x55,0x04,0x03,0x13,0x0A,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x53,0x69,0x67,0x6E, -0x30,0x59,0x30,0x13,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x08,0x2A, -0x86,0x48,0xCE,0x3D,0x03,0x01,0x07,0x03,0x42,0x00,0x04,0xB8,0xC6,0x79,0xD3,0x8F, -0x6C,0x25,0x0E,0x9F,0x2E,0x39,0x19,0x1C,0x03,0xA4,0xAE,0x9A,0xE5,0x39,0x07,0x09, -0x16,0xCA,0x63,0xB1,0xB9,0x86,0xF8,0x8A,0x57,0xC1,0x57,0xCE,0x42,0xFA,0x73,0xA1, -0xF7,0x65,0x42,0xFF,0x1E,0xC1,0x00,0xB2,0x6E,0x73,0x0E,0xFF,0xC7,0x21,0xE5,0x18, -0xA4,0xAA,0xD9,0x71,0x3F,0xA8,0xD4,0xB9,0xCE,0x8C,0x1D,0xA3,0x42,0x30,0x40,0x30, -0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30, -0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, -0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x54,0xB0,0x7B,0xAD,0x45, -0xB8,0xE2,0x40,0x7F,0xFB,0x0A,0x6E,0xFB,0xBE,0x33,0xC9,0x3C,0xA3,0x84,0xD5,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x02,0x03,0x48,0x00,0x30,0x45, -0x02,0x21,0x00,0xDC,0x92,0xA1,0xA0,0x13,0xA6,0xCF,0x03,0xB0,0xE6,0xC4,0x21,0x97, -0x90,0xFA,0x14,0x57,0x2D,0x03,0xEC,0xEE,0x3C,0xD3,0x6E,0xCA,0xA8,0x6C,0x76,0xBC, -0xA2,0xDE,0xBB,0x02,0x20,0x27,0xA8,0x85,0x27,0x35,0x9B,0x56,0xC6,0xA3,0xF2,0x47, -0xD2,0xB7,0x6E,0x1B,0x02,0x00,0x17,0xAA,0x67,0xA6,0x15,0x91,0xDE,0xFA,0x94,0xEC, -0x7B,0x0B,0xF8,0x9F,0x84, -}; +/* subject:/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */ +/* issuer :/C=US/O=AffirmTrust/CN=AffirmTrust Commercial */ -/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */ -/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Universal CA/CN=TC TrustCenter Universal CA I */ - - -const unsigned char TC_TrustCenter_Universal_CA_I_certificate[993]={ -0x30,0x82,0x03,0xDD,0x30,0x82,0x02,0xC5,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x1D, -0xA2,0x00,0x01,0x00,0x02,0xEC,0xB7,0x60,0x80,0x78,0x8D,0xB6,0x06,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x79,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06, -0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65, -0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55, -0x04,0x0B,0x13,0x1B,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74, -0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31, -0x26,0x30,0x24,0x06,0x03,0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75, -0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73, -0x61,0x6C,0x20,0x43,0x41,0x20,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x33,0x32, -0x32,0x31,0x35,0x35,0x34,0x32,0x38,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31, -0x32,0x32,0x35,0x39,0x35,0x39,0x5A,0x30,0x79,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, -0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13, -0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20, -0x47,0x6D,0x62,0x48,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x0B,0x13,0x1B,0x54, -0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x55,0x6E, -0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41,0x31,0x26,0x30,0x24,0x06,0x03, -0x55,0x04,0x03,0x13,0x1D,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E, -0x74,0x65,0x72,0x20,0x55,0x6E,0x69,0x76,0x65,0x72,0x73,0x61,0x6C,0x20,0x43,0x41, -0x20,0x49,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, +const unsigned char AffirmTrust_Commercial_certificate[848]={ +0x30,0x82,0x03,0x4C,0x30,0x82,0x02,0x34,0xA0,0x03,0x02,0x01,0x02,0x02,0x08,0x77, +0x77,0x06,0x27,0x26,0xA9,0xB1,0x7C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, +0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, +0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06,0x03,0x55,0x04,0x0A,0x0C,0x0B, +0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x31,0x1F,0x30,0x1D,0x06, +0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, +0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69,0x61,0x6C,0x30,0x1E,0x17,0x0D, +0x31,0x30,0x30,0x31,0x32,0x39,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x17,0x0D,0x33, +0x30,0x31,0x32,0x33,0x31,0x31,0x34,0x30,0x36,0x30,0x36,0x5A,0x30,0x44,0x31,0x0B, +0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x14,0x30,0x12,0x06, +0x03,0x55,0x04,0x0A,0x0C,0x0B,0x41,0x66,0x66,0x69,0x72,0x6D,0x54,0x72,0x75,0x73, +0x74,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x0C,0x16,0x41,0x66,0x66,0x69, +0x72,0x6D,0x54,0x72,0x75,0x73,0x74,0x20,0x43,0x6F,0x6D,0x6D,0x65,0x72,0x63,0x69, +0x61,0x6C,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, 0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82, -0x01,0x01,0x00,0xA4,0x77,0x23,0x96,0x44,0xAF,0x90,0xF4,0x31,0xA7,0x10,0xF4,0x26, -0x87,0x9C,0xF3,0x38,0xD9,0x0F,0x5E,0xDE,0xCF,0x41,0xE8,0x31,0xAD,0xC6,0x74,0x91, -0x24,0x96,0x78,0x1E,0x09,0xA0,0x9B,0x9A,0x95,0x4A,0x4A,0xF5,0x62,0x7C,0x02,0xA8, -0xCA,0xAC,0xFB,0x5A,0x04,0x76,0x39,0xDE,0x5F,0xF1,0xF9,0xB3,0xBF,0xF3,0x03,0x58, -0x55,0xD2,0xAA,0xB7,0xE3,0x04,0x22,0xD1,0xF8,0x94,0xDA,0x22,0x08,0x00,0x8D,0xD3, -0x7C,0x26,0x5D,0xCC,0x77,0x79,0xE7,0x2C,0x78,0x39,0xA8,0x26,0x73,0x0E,0xA2,0x5D, -0x25,0x69,0x85,0x4F,0x55,0x0E,0x9A,0xEF,0xC6,0xB9,0x44,0xE1,0x57,0x3D,0xDF,0x1F, -0x54,0x22,0xE5,0x6F,0x65,0xAA,0x33,0x84,0x3A,0xF3,0xCE,0x7A,0xBE,0x55,0x97,0xAE, -0x8D,0x12,0x0F,0x14,0x33,0xE2,0x50,0x70,0xC3,0x49,0x87,0x13,0xBC,0x51,0xDE,0xD7, -0x98,0x12,0x5A,0xEF,0x3A,0x83,0x33,0x92,0x06,0x75,0x8B,0x92,0x7C,0x12,0x68,0x7B, -0x70,0x6A,0x0F,0xB5,0x9B,0xB6,0x77,0x5B,0x48,0x59,0x9D,0xE4,0xEF,0x5A,0xAD,0xF3, -0xC1,0x9E,0xD4,0xD7,0x45,0x4E,0xCA,0x56,0x34,0x21,0xBC,0x3E,0x17,0x5B,0x6F,0x77, -0x0C,0x48,0x01,0x43,0x29,0xB0,0xDD,0x3F,0x96,0x6E,0xE6,0x95,0xAA,0x0C,0xC0,0x20, -0xB6,0xFD,0x3E,0x36,0x27,0x9C,0xE3,0x5C,0xCF,0x4E,0x81,0xDC,0x19,0xBB,0x91,0x90, -0x7D,0xEC,0xE6,0x97,0x04,0x1E,0x93,0xCC,0x22,0x49,0xD7,0x97,0x86,0xB6,0x13,0x0A, -0x3C,0x43,0x23,0x77,0x7E,0xF0,0xDC,0xE6,0xCD,0x24,0x1F,0x3B,0x83,0x9B,0x34,0x3A, -0x83,0x34,0xE3,0x02,0x03,0x01,0x00,0x01,0xA3,0x63,0x30,0x61,0x30,0x1F,0x06,0x03, -0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE, -0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0F,0x06, -0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E, -0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D, -0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x92,0xA4,0x75,0x2C,0xA4,0x9E,0xBE, -0x81,0x44,0xEB,0x79,0xFC,0x8A,0xC5,0x95,0xA5,0xEB,0x10,0x75,0x73,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01, -0x00,0x28,0xD2,0xE0,0x86,0xD5,0xE6,0xF8,0x7B,0xF0,0x97,0xDC,0x22,0x6B,0x3B,0x95, -0x14,0x56,0x0F,0x11,0x30,0xA5,0x9A,0x4F,0x3A,0xB0,0x3A,0xE0,0x06,0xCB,0x65,0xF5, -0xED,0xC6,0x97,0x27,0xFE,0x25,0xF2,0x57,0xE6,0x5E,0x95,0x8C,0x3E,0x64,0x60,0x15, -0x5A,0x7F,0x2F,0x0D,0x01,0xC5,0xB1,0x60,0xFD,0x45,0x35,0xCF,0xF0,0xB2,0xBF,0x06, -0xD9,0xEF,0x5A,0xBE,0xB3,0x62,0x21,0xB4,0xD7,0xAB,0x35,0x7C,0x53,0x3E,0xA6,0x27, -0xF1,0xA1,0x2D,0xDA,0x1A,0x23,0x9D,0xCC,0xDD,0xEC,0x3C,0x2D,0x9E,0x27,0x34,0x5D, -0x0F,0xC2,0x36,0x79,0xBC,0xC9,0x4A,0x62,0x2D,0xED,0x6B,0xD9,0x7D,0x41,0x43,0x7C, -0xB6,0xAA,0xCA,0xED,0x61,0xB1,0x37,0x82,0x15,0x09,0x1A,0x8A,0x16,0x30,0xD8,0xEC, -0xC9,0xD6,0x47,0x72,0x78,0x4B,0x10,0x46,0x14,0x8E,0x5F,0x0E,0xAF,0xEC,0xC7,0x2F, -0xAB,0x10,0xD7,0xB6,0xF1,0x6E,0xEC,0x86,0xB2,0xC2,0xE8,0x0D,0x92,0x73,0xDC,0xA2, -0xF4,0x0F,0x3A,0xBF,0x61,0x23,0x10,0x89,0x9C,0x48,0x40,0x6E,0x70,0x00,0xB3,0xD3, -0xBA,0x37,0x44,0x58,0x11,0x7A,0x02,0x6A,0x88,0xF0,0x37,0x34,0xF0,0x19,0xE9,0xAC, -0xD4,0x65,0x73,0xF6,0x69,0x8C,0x64,0x94,0x3A,0x79,0x85,0x29,0xB0,0x16,0x2B,0x0C, -0x82,0x3F,0x06,0x9C,0xC7,0xFD,0x10,0x2B,0x9E,0x0F,0x2C,0xB6,0x9E,0xE3,0x15,0xBF, -0xD9,0x36,0x1C,0xBA,0x25,0x1A,0x52,0x3D,0x1A,0xEC,0x22,0x0C,0x1C,0xE0,0xA4,0xA2, -0x3D,0xF0,0xE8,0x39,0xCF,0x81,0xC0,0x7B,0xED,0x5D,0x1F,0x6F,0xC5,0xD0,0x0B,0xD7, -0x98, -}; - - -/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */ -/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=Comodo CA Limited/CN=Trusted Certificate Services */ - - -const unsigned char Comodo_Trusted_Services_root_certificate[1095]={ -0x30,0x82,0x04,0x43,0x30,0x82,0x03,0x2B,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, -0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, -0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, -0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20,0x43, -0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03,0x55, -0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73, -0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x31,0x30,0x31,0x30,0x30,0x30,0x30,0x30,0x30, -0x5A,0x17,0x0D,0x32,0x38,0x31,0x32,0x33,0x31,0x32,0x33,0x35,0x39,0x35,0x39,0x5A, -0x30,0x7F,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31, -0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x0C,0x12,0x47,0x72,0x65,0x61,0x74,0x65, -0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E, -0x06,0x03,0x55,0x04,0x07,0x0C,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A, -0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x0C,0x11,0x43,0x6F,0x6D,0x6F,0x64,0x6F,0x20, -0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x25,0x30,0x23,0x06,0x03, -0x55,0x04,0x03,0x0C,0x1C,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x43,0x65,0x72, -0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x53,0x65,0x72,0x76,0x69,0x63,0x65, -0x73,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, -0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01, -0x01,0x00,0xDF,0x71,0x6F,0x36,0x58,0x53,0x5A,0xF2,0x36,0x54,0x57,0x80,0xC4,0x74, -0x08,0x20,0xED,0x18,0x7F,0x2A,0x1D,0xE6,0x35,0x9A,0x1E,0x25,0xAC,0x9C,0xE5,0x96, -0x7E,0x72,0x52,0xA0,0x15,0x42,0xDB,0x59,0xDD,0x64,0x7A,0x1A,0xD0,0xB8,0x7B,0xDD, -0x39,0x15,0xBC,0x55,0x48,0xC4,0xED,0x3A,0x00,0xEA,0x31,0x11,0xBA,0xF2,0x71,0x74, -0x1A,0x67,0xB8,0xCF,0x33,0xCC,0xA8,0x31,0xAF,0xA3,0xE3,0xD7,0x7F,0xBF,0x33,0x2D, -0x4C,0x6A,0x3C,0xEC,0x8B,0xC3,0x92,0xD2,0x53,0x77,0x24,0x74,0x9C,0x07,0x6E,0x70, -0xFC,0xBD,0x0B,0x5B,0x76,0xBA,0x5F,0xF2,0xFF,0xD7,0x37,0x4B,0x4A,0x60,0x78,0xF7, -0xF0,0xFA,0xCA,0x70,0xB4,0xEA,0x59,0xAA,0xA3,0xCE,0x48,0x2F,0xA9,0xC3,0xB2,0x0B, -0x7E,0x17,0x72,0x16,0x0C,0xA6,0x07,0x0C,0x1B,0x38,0xCF,0xC9,0x62,0xB7,0x3F,0xA0, -0x93,0xA5,0x87,0x41,0xF2,0xB7,0x70,0x40,0x77,0xD8,0xBE,0x14,0x7C,0xE3,0xA8,0xC0, -0x7A,0x8E,0xE9,0x63,0x6A,0xD1,0x0F,0x9A,0xC6,0xD2,0xF4,0x8B,0x3A,0x14,0x04,0x56, -0xD4,0xED,0xB8,0xCC,0x6E,0xF5,0xFB,0xE2,0x2C,0x58,0xBD,0x7F,0x4F,0x6B,0x2B,0xF7, -0x60,0x24,0x58,0x24,0xCE,0x26,0xEF,0x34,0x91,0x3A,0xD5,0xE3,0x81,0xD0,0xB2,0xF0, -0x04,0x02,0xD7,0x5B,0xB7,0x3E,0x92,0xAC,0x6B,0x12,0x8A,0xF9,0xE4,0x05,0xB0,0x3B, -0x91,0x49,0x5C,0xB2,0xEB,0x53,0xEA,0xF8,0x9F,0x47,0x86,0xEE,0xBF,0x95,0xC0,0xC0, -0x06,0x9F,0xD2,0x5B,0x5E,0x11,0x1B,0xF4,0xC7,0x04,0x35,0x29,0xD2,0x55,0x5C,0xE4, -0xED,0xEB,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xC9,0x30,0x81,0xC6,0x30,0x1D,0x06, -0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC5,0x7B,0x58,0xBD,0xED,0xDA,0x25,0x69, -0xD2,0xF7,0x59,0x16,0xA8,0xB3,0x32,0xC0,0x7B,0x27,0x5B,0xF4,0x30,0x0E,0x06,0x03, -0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03, -0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x81,0x83, -0x06,0x03,0x55,0x1D,0x1F,0x04,0x7C,0x30,0x7A,0x30,0x3C,0xA0,0x3A,0xA0,0x38,0x86, -0x36,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F, -0x64,0x6F,0x63,0x61,0x2E,0x63,0x6F,0x6D,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64, -0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69, -0x63,0x65,0x73,0x2E,0x63,0x72,0x6C,0x30,0x3A,0xA0,0x38,0xA0,0x36,0x86,0x34,0x68, -0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C,0x2E,0x63,0x6F,0x6D,0x6F,0x64,0x6F, -0x2E,0x6E,0x65,0x74,0x2F,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x43,0x65,0x72,0x74, -0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x53,0x65,0x72,0x76,0x69,0x63,0x65,0x73,0x2E, -0x63,0x72,0x6C,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05, -0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xC8,0x93,0x81,0x3B,0x89,0xB4,0xAF,0xB8,0x84, -0x12,0x4C,0x8D,0xD2,0xF0,0xDB,0x70,0xBA,0x57,0x86,0x15,0x34,0x10,0xB9,0x2F,0x7F, -0x1E,0xB0,0xA8,0x89,0x60,0xA1,0x8A,0xC2,0x77,0x0C,0x50,0x4A,0x9B,0x00,0x8B,0xD8, -0x8B,0xF4,0x41,0xE2,0xD0,0x83,0x8A,0x4A,0x1C,0x14,0x06,0xB0,0xA3,0x68,0x05,0x70, -0x31,0x30,0xA7,0x53,0x9B,0x0E,0xE9,0x4A,0xA0,0x58,0x69,0x67,0x0E,0xAE,0x9D,0xF6, -0xA5,0x2C,0x41,0xBF,0x3C,0x06,0x6B,0xE4,0x59,0xCC,0x6D,0x10,0xF1,0x96,0x6F,0x1F, -0xDF,0xF4,0x04,0x02,0xA4,0x9F,0x45,0x3E,0xC8,0xD8,0xFA,0x36,0x46,0x44,0x50,0x3F, -0x82,0x97,0x91,0x1F,0x28,0xDB,0x18,0x11,0x8C,0x2A,0xE4,0x65,0x83,0x57,0x12,0x12, -0x8C,0x17,0x3F,0x94,0x36,0xFE,0x5D,0xB0,0xC0,0x04,0x77,0x13,0xB8,0xF4,0x15,0xD5, -0x3F,0x38,0xCC,0x94,0x3A,0x55,0xD0,0xAC,0x98,0xF5,0xBA,0x00,0x5F,0xE0,0x86,0x19, -0x81,0x78,0x2F,0x28,0xC0,0x7E,0xD3,0xCC,0x42,0x0A,0xF5,0xAE,0x50,0xA0,0xD1,0x3E, -0xC6,0xA1,0x71,0xEC,0x3F,0xA0,0x20,0x8C,0x66,0x3A,0x89,0xB4,0x8E,0xD4,0xD8,0xB1, -0x4D,0x25,0x47,0xEE,0x2F,0x88,0xC8,0xB5,0xE1,0x05,0x45,0xC0,0xBE,0x14,0x71,0xDE, -0x7A,0xFD,0x8E,0x7B,0x7D,0x4D,0x08,0x96,0xA5,0x12,0x73,0xF0,0x2D,0xCA,0x37,0x27, -0x74,0x12,0x27,0x4C,0xCB,0xB6,0x97,0xE9,0xD9,0xAE,0x08,0x6D,0x5A,0x39,0x40,0xDD, -0x05,0x47,0x75,0x6A,0x5A,0x21,0xB3,0xA3,0x18,0xCF,0x4E,0xF7,0x2E,0x57,0xB7,0x98, -0x70,0x5E,0xC8,0xC4,0x78,0xB0,0x62, +0x01,0x01,0x00,0xF6,0x1B,0x4F,0x67,0x07,0x2B,0xA1,0x15,0xF5,0x06,0x22,0xCB,0x1F, +0x01,0xB2,0xE3,0x73,0x45,0x06,0x44,0x49,0x2C,0xBB,0x49,0x25,0x14,0xD6,0xCE,0xC3, +0xB7,0xAB,0x2C,0x4F,0xC6,0x41,0x32,0x94,0x57,0xFA,0x12,0xA7,0x5B,0x0E,0xE2,0x8F, +0x1F,0x1E,0x86,0x19,0xA7,0xAA,0xB5,0x2D,0xB9,0x5F,0x0D,0x8A,0xC2,0xAF,0x85,0x35, +0x79,0x32,0x2D,0xBB,0x1C,0x62,0x37,0xF2,0xB1,0x5B,0x4A,0x3D,0xCA,0xCD,0x71,0x5F, +0xE9,0x42,0xBE,0x94,0xE8,0xC8,0xDE,0xF9,0x22,0x48,0x64,0xC6,0xE5,0xAB,0xC6,0x2B, +0x6D,0xAD,0x05,0xF0,0xFA,0xD5,0x0B,0xCF,0x9A,0xE5,0xF0,0x50,0xA4,0x8B,0x3B,0x47, +0xA5,0x23,0x5B,0x7A,0x7A,0xF8,0x33,0x3F,0xB8,0xEF,0x99,0x97,0xE3,0x20,0xC1,0xD6, +0x28,0x89,0xCF,0x94,0xFB,0xB9,0x45,0xED,0xE3,0x40,0x17,0x11,0xD4,0x74,0xF0,0x0B, +0x31,0xE2,0x2B,0x26,0x6A,0x9B,0x4C,0x57,0xAE,0xAC,0x20,0x3E,0xBA,0x45,0x7A,0x05, +0xF3,0xBD,0x9B,0x69,0x15,0xAE,0x7D,0x4E,0x20,0x63,0xC4,0x35,0x76,0x3A,0x07,0x02, +0xC9,0x37,0xFD,0xC7,0x47,0xEE,0xE8,0xF1,0x76,0x1D,0x73,0x15,0xF2,0x97,0xA4,0xB5, +0xC8,0x7A,0x79,0xD9,0x42,0xAA,0x2B,0x7F,0x5C,0xFE,0xCE,0x26,0x4F,0xA3,0x66,0x81, +0x35,0xAF,0x44,0xBA,0x54,0x1E,0x1C,0x30,0x32,0x65,0x9D,0xE6,0x3C,0x93,0x5E,0x50, +0x4E,0x7A,0xE3,0x3A,0xD4,0x6E,0xCC,0x1A,0xFB,0xF9,0xD2,0x37,0xAE,0x24,0x2A,0xAB, +0x57,0x03,0x22,0x28,0x0D,0x49,0x75,0x7F,0xB7,0x28,0xDA,0x75,0xBF,0x8E,0xE3,0xDC, +0x0E,0x79,0x31,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03, +0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x9D,0x93,0xC6,0x53,0x8B,0x5E,0xCA,0xAF,0x3F, +0x9F,0x1E,0x0F,0xE5,0x99,0x95,0xBC,0x24,0xF6,0x94,0x8F,0x30,0x0F,0x06,0x03,0x55, +0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03, +0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0D,0x06,0x09, +0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00, +0x58,0xAC,0xF4,0x04,0x0E,0xCD,0xC0,0x0D,0xFF,0x0A,0xFD,0xD4,0xBA,0x16,0x5F,0x29, +0xBD,0x7B,0x68,0x99,0x58,0x49,0xD2,0xB4,0x1D,0x37,0x4D,0x7F,0x27,0x7D,0x46,0x06, +0x5D,0x43,0xC6,0x86,0x2E,0x3E,0x73,0xB2,0x26,0x7D,0x4F,0x93,0xA9,0xB6,0xC4,0x2A, +0x9A,0xAB,0x21,0x97,0x14,0xB1,0xDE,0x8C,0xD3,0xAB,0x89,0x15,0xD8,0x6B,0x24,0xD4, +0xF1,0x16,0xAE,0xD8,0xA4,0x5C,0xD4,0x7F,0x51,0x8E,0xED,0x18,0x01,0xB1,0x93,0x63, +0xBD,0xBC,0xF8,0x61,0x80,0x9A,0x9E,0xB1,0xCE,0x42,0x70,0xE2,0xA9,0x7D,0x06,0x25, +0x7D,0x27,0xA1,0xFE,0x6F,0xEC,0xB3,0x1E,0x24,0xDA,0xE3,0x4B,0x55,0x1A,0x00,0x3B, +0x35,0xB4,0x3B,0xD9,0xD7,0x5D,0x30,0xFD,0x81,0x13,0x89,0xF2,0xC2,0x06,0x2B,0xED, +0x67,0xC4,0x8E,0xC9,0x43,0xB2,0x5C,0x6B,0x15,0x89,0x02,0xBC,0x62,0xFC,0x4E,0xF2, +0xB5,0x33,0xAA,0xB2,0x6F,0xD3,0x0A,0xA2,0x50,0xE3,0xF6,0x3B,0xE8,0x2E,0x44,0xC2, +0xDB,0x66,0x38,0xA9,0x33,0x56,0x48,0xF1,0x6D,0x1B,0x33,0x8D,0x0D,0x8C,0x3F,0x60, +0x37,0x9D,0xD3,0xCA,0x6D,0x7E,0x34,0x7E,0x0D,0x9F,0x72,0x76,0x8B,0x1B,0x9F,0x72, +0xFD,0x52,0x35,0x41,0x45,0x02,0x96,0x2F,0x1C,0xB2,0x9A,0x73,0x49,0x21,0xB1,0x49, +0x47,0x45,0x47,0xB4,0xEF,0x6A,0x34,0x11,0xC9,0x4D,0x9A,0xCC,0x59,0xB7,0xD6,0x02, +0x9E,0x5A,0x4E,0x65,0xB5,0x94,0xAE,0x1B,0xDF,0x29,0xB0,0x16,0xF1,0xBF,0x00,0x9E, +0x07,0x3A,0x17,0x64,0xB5,0x04,0xB5,0x23,0x21,0x99,0x0A,0x95,0x3B,0x97,0x7C,0xEF, }; @@ -3655,153 +2227,216 @@ const unsigned char Entrust_Root_Certification_Authority_certificate[1173]={ }; -/* subject:/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 2 CA/CN=TC TrustCenter Class 2 CA II */ -/* issuer :/C=DE/O=TC TrustCenter GmbH/OU=TC TrustCenter Class 2 CA/CN=TC TrustCenter Class 2 CA II */ - - -const unsigned char TC_TrustCenter_Class_2_CA_II_certificate[1198]={ -0x30,0x82,0x04,0xAA,0x30,0x82,0x03,0x92,0xA0,0x03,0x02,0x01,0x02,0x02,0x0E,0x2E, -0x6A,0x00,0x01,0x00,0x02,0x1F,0xD7,0x52,0x21,0x2C,0x11,0x5C,0x3B,0x30,0x0D,0x06, -0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x76,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06, -0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65, -0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62,0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55, -0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74, -0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x41,0x31,0x25,0x30, -0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C,0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74, -0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43, -0x41,0x20,0x49,0x49,0x30,0x1E,0x17,0x0D,0x30,0x36,0x30,0x31,0x31,0x32,0x31,0x34, -0x33,0x38,0x34,0x33,0x5A,0x17,0x0D,0x32,0x35,0x31,0x32,0x33,0x31,0x32,0x32,0x35, -0x39,0x35,0x39,0x5A,0x30,0x76,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, -0x02,0x44,0x45,0x31,0x1C,0x30,0x1A,0x06,0x03,0x55,0x04,0x0A,0x13,0x13,0x54,0x43, -0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x47,0x6D,0x62, -0x48,0x31,0x22,0x30,0x20,0x06,0x03,0x55,0x04,0x0B,0x13,0x19,0x54,0x43,0x20,0x54, -0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43,0x6C,0x61,0x73,0x73, -0x20,0x32,0x20,0x43,0x41,0x31,0x25,0x30,0x23,0x06,0x03,0x55,0x04,0x03,0x13,0x1C, -0x54,0x43,0x20,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x20,0x43, -0x6C,0x61,0x73,0x73,0x20,0x32,0x20,0x43,0x41,0x20,0x49,0x49,0x30,0x82,0x01,0x22, +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G2 */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G2 */ + + +const unsigned char DigiCert_Assured_ID_Root_G2_certificate[922]={ +0x30,0x82,0x03,0x96,0x30,0x82,0x02,0x7E,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0B, +0x93,0x1C,0x3A,0xD6,0x39,0x67,0xEA,0x67,0x23,0xBF,0xC3,0xAF,0x9A,0xF4,0x4B,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30,0x65, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F, +0x6F,0x74,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31, +0x32,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32, +0x30,0x30,0x30,0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, +0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44, +0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06, +0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65, +0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13, +0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65, +0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x32,0x30,0x82,0x01,0x22, 0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03, -0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xAB,0x80,0x87, -0x9B,0x8E,0xF0,0xC3,0x7C,0x87,0xD7,0xE8,0x24,0x82,0x11,0xB3,0x3C,0xDD,0x43,0x62, -0xEE,0xF8,0xC3,0x45,0xDA,0xE8,0xE1,0xA0,0x5F,0xD1,0x2A,0xB2,0xEA,0x93,0x68,0xDF, -0xB4,0xC8,0xD6,0x43,0xE9,0xC4,0x75,0x59,0x7F,0xFC,0xE1,0x1D,0xF8,0x31,0x70,0x23, -0x1B,0x88,0x9E,0x27,0xB9,0x7B,0xFD,0x3A,0xD2,0xC9,0xA9,0xE9,0x14,0x2F,0x90,0xBE, -0x03,0x52,0xC1,0x49,0xCD,0xF6,0xFD,0xE4,0x08,0x66,0x0B,0x57,0x8A,0xA2,0x42,0xA0, -0xB8,0xD5,0x7F,0x69,0x5C,0x90,0x32,0xB2,0x97,0x0D,0xCA,0x4A,0xDC,0x46,0x3E,0x02, -0x55,0x89,0x53,0xE3,0x1A,0x5A,0xCB,0x36,0xC6,0x07,0x56,0xF7,0x8C,0xCF,0x11,0xF4, -0x4C,0xBB,0x30,0x70,0x04,0x95,0xA5,0xF6,0x39,0x8C,0xFD,0x73,0x81,0x08,0x7D,0x89, -0x5E,0x32,0x1E,0x22,0xA9,0x22,0x45,0x4B,0xB0,0x66,0x2E,0x30,0xCC,0x9F,0x65,0xFD, -0xFC,0xCB,0x81,0xA9,0xF1,0xE0,0x3B,0xAF,0xA3,0x86,0xD1,0x89,0xEA,0xC4,0x45,0x79, -0x50,0x5D,0xAE,0xE9,0x21,0x74,0x92,0x4D,0x8B,0x59,0x82,0x8F,0x94,0xE3,0xE9,0x4A, -0xF1,0xE7,0x49,0xB0,0x14,0xE3,0xF5,0x62,0xCB,0xD5,0x72,0xBD,0x1F,0xB9,0xD2,0x9F, -0xA0,0xCD,0xA8,0xFA,0x01,0xC8,0xD9,0x0D,0xDF,0xDA,0xFC,0x47,0x9D,0xB3,0xC8,0x54, -0xDF,0x49,0x4A,0xF1,0x21,0xA9,0xFE,0x18,0x4E,0xEE,0x48,0xD4,0x19,0xBB,0xEF,0x7D, -0xE4,0xE2,0x9D,0xCB,0x5B,0xB6,0x6E,0xFF,0xE3,0xCD,0x5A,0xE7,0x74,0x82,0x05,0xBA, -0x80,0x25,0x38,0xCB,0xE4,0x69,0x9E,0xAF,0x41,0xAA,0x1A,0x84,0xF5,0x02,0x03,0x01, -0x00,0x01,0xA3,0x82,0x01,0x34,0x30,0x82,0x01,0x30,0x30,0x0F,0x06,0x03,0x55,0x1D, -0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55, -0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55, -0x1D,0x0E,0x04,0x16,0x04,0x14,0xE3,0xAB,0x54,0x4C,0x80,0xA1,0xDB,0x56,0x43,0xB7, -0x91,0x4A,0xCB,0xF3,0x82,0x7A,0x13,0x5C,0x08,0xAB,0x30,0x81,0xED,0x06,0x03,0x55, -0x1D,0x1F,0x04,0x81,0xE5,0x30,0x81,0xE2,0x30,0x81,0xDF,0xA0,0x81,0xDC,0xA0,0x81, -0xD9,0x86,0x35,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72, -0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65,0x72,0x2E,0x64,0x65,0x2F,0x63,0x72,0x6C, -0x2F,0x76,0x32,0x2F,0x74,0x63,0x5F,0x63,0x6C,0x61,0x73,0x73,0x5F,0x32,0x5F,0x63, -0x61,0x5F,0x49,0x49,0x2E,0x63,0x72,0x6C,0x86,0x81,0x9F,0x6C,0x64,0x61,0x70,0x3A, -0x2F,0x2F,0x77,0x77,0x77,0x2E,0x74,0x72,0x75,0x73,0x74,0x63,0x65,0x6E,0x74,0x65, -0x72,0x2E,0x64,0x65,0x2F,0x43,0x4E,0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75, -0x73,0x74,0x43,0x65,0x6E,0x74,0x65,0x72,0x25,0x32,0x30,0x43,0x6C,0x61,0x73,0x73, -0x25,0x32,0x30,0x32,0x25,0x32,0x30,0x43,0x41,0x25,0x32,0x30,0x49,0x49,0x2C,0x4F, -0x3D,0x54,0x43,0x25,0x32,0x30,0x54,0x72,0x75,0x73,0x74,0x43,0x65,0x6E,0x74,0x65, -0x72,0x25,0x32,0x30,0x47,0x6D,0x62,0x48,0x2C,0x4F,0x55,0x3D,0x72,0x6F,0x6F,0x74, -0x63,0x65,0x72,0x74,0x73,0x2C,0x44,0x43,0x3D,0x74,0x72,0x75,0x73,0x74,0x63,0x65, -0x6E,0x74,0x65,0x72,0x2C,0x44,0x43,0x3D,0x64,0x65,0x3F,0x63,0x65,0x72,0x74,0x69, -0x66,0x69,0x63,0x61,0x74,0x65,0x52,0x65,0x76,0x6F,0x63,0x61,0x74,0x69,0x6F,0x6E, -0x4C,0x69,0x73,0x74,0x3F,0x62,0x61,0x73,0x65,0x3F,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x8C,0xD7, -0xDF,0x7E,0xEE,0x1B,0x80,0x10,0xB3,0x83,0xF5,0xDB,0x11,0xEA,0x6B,0x4B,0xA8,0x92, -0x18,0xD9,0xF7,0x07,0x39,0xF5,0x2C,0xBE,0x06,0x75,0x7A,0x68,0x53,0x15,0x1C,0xEA, -0x4A,0xED,0x5E,0xFC,0x23,0xB2,0x13,0xA0,0xD3,0x09,0xFF,0xF6,0xF6,0x2E,0x6B,0x41, -0x71,0x79,0xCD,0xE2,0x6D,0xFD,0xAE,0x59,0x6B,0x85,0x1D,0xB8,0x4E,0x22,0x9A,0xED, -0x66,0x39,0x6E,0x4B,0x94,0xE6,0x55,0xFC,0x0B,0x1B,0x8B,0x77,0xC1,0x53,0x13,0x66, -0x89,0xD9,0x28,0xD6,0x8B,0xF3,0x45,0x4A,0x63,0xB7,0xFD,0x7B,0x0B,0x61,0x5D,0xB8, -0x6D,0xBE,0xC3,0xDC,0x5B,0x79,0xD2,0xED,0x86,0xE5,0xA2,0x4D,0xBE,0x5E,0x74,0x7C, -0x6A,0xED,0x16,0x38,0x1F,0x7F,0x58,0x81,0x5A,0x1A,0xEB,0x32,0x88,0x2D,0xB2,0xF3, -0x39,0x77,0x80,0xAF,0x5E,0xB6,0x61,0x75,0x29,0xDB,0x23,0x4D,0x88,0xCA,0x50,0x28, -0xCB,0x85,0xD2,0xD3,0x10,0xA2,0x59,0x6E,0xD3,0x93,0x54,0x00,0x7A,0xA2,0x46,0x95, -0x86,0x05,0x9C,0xA9,0x19,0x98,0xE5,0x31,0x72,0x0C,0x00,0xE2,0x67,0xD9,0x40,0xE0, -0x24,0x33,0x7B,0x6F,0x2C,0xB9,0x5C,0xAB,0x65,0x9D,0x2C,0xAC,0x76,0xEA,0x35,0x99, -0xF5,0x97,0xB9,0x0F,0x24,0xEC,0xC7,0x76,0x21,0x28,0x65,0xAE,0x57,0xE8,0x07,0x88, -0x75,0x4A,0x56,0xA0,0xD2,0x05,0x3A,0xA4,0xE6,0x8D,0x92,0x88,0x2C,0xF3,0xF2,0xE1, -0xC1,0xC6,0x61,0xDB,0x41,0xC5,0xC7,0x9B,0xF7,0x0E,0x1A,0x51,0x45,0xC2,0x61,0x6B, -0xDC,0x64,0x27,0x17,0x8C,0x5A,0xB7,0xDA,0x74,0x28,0xCD,0x97,0xE4,0xBD, +0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xD9,0xE7,0x28, +0x2F,0x52,0x3F,0x36,0x72,0x49,0x88,0x93,0x34,0xF3,0xF8,0x6A,0x1E,0x31,0x54,0x80, +0x9F,0xAD,0x54,0x41,0xB5,0x47,0xDF,0x96,0xA8,0xD4,0xAF,0x80,0x2D,0xB9,0x0A,0xCF, +0x75,0xFD,0x89,0xA5,0x7D,0x24,0xFA,0xE3,0x22,0x0C,0x2B,0xBC,0x95,0x17,0x0B,0x33, +0xBF,0x19,0x4D,0x41,0x06,0x90,0x00,0xBD,0x0C,0x4D,0x10,0xFE,0x07,0xB5,0xE7,0x1C, +0x6E,0x22,0x55,0x31,0x65,0x97,0xBD,0xD3,0x17,0xD2,0x1E,0x62,0xF3,0xDB,0xEA,0x6C, +0x50,0x8C,0x3F,0x84,0x0C,0x96,0xCF,0xB7,0xCB,0x03,0xE0,0xCA,0x6D,0xA1,0x14,0x4C, +0x1B,0x89,0xDD,0xED,0x00,0xB0,0x52,0x7C,0xAF,0x91,0x6C,0xB1,0x38,0x13,0xD1,0xE9, +0x12,0x08,0xC0,0x00,0xB0,0x1C,0x2B,0x11,0xDA,0x77,0x70,0x36,0x9B,0xAE,0xCE,0x79, +0x87,0xDC,0x82,0x70,0xE6,0x09,0x74,0x70,0x55,0x69,0xAF,0xA3,0x68,0x9F,0xBF,0xDD, +0xB6,0x79,0xB3,0xF2,0x9D,0x70,0x29,0x55,0xF4,0xAB,0xFF,0x95,0x61,0xF3,0xC9,0x40, +0x6F,0x1D,0xD1,0xBE,0x93,0xBB,0xD3,0x88,0x2A,0xBB,0x9D,0xBF,0x72,0x5A,0x56,0x71, +0x3B,0x3F,0xD4,0xF3,0xD1,0x0A,0xFE,0x28,0xEF,0xA3,0xEE,0xD9,0x99,0xAF,0x03,0xD3, +0x8F,0x60,0xB7,0xF2,0x92,0xA1,0xB1,0xBD,0x89,0x89,0x1F,0x30,0xCD,0xC3,0xA6,0x2E, +0x62,0x33,0xAE,0x16,0x02,0x77,0x44,0x5A,0xE7,0x81,0x0A,0x3C,0xA7,0x44,0x2E,0x79, +0xB8,0x3F,0x04,0xBC,0x5C,0xA0,0x87,0xE1,0x1B,0xAF,0x51,0x8E,0xCD,0xEC,0x2C,0xFA, +0xF8,0xFE,0x6D,0xF0,0x3A,0x7C,0xAA,0x8B,0xE4,0x67,0x95,0x31,0x8D,0x02,0x03,0x01, +0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF, +0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01, +0xFF,0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, +0x04,0x14,0xCE,0xC3,0x4A,0xB9,0x99,0x55,0xF2,0xB8,0xDB,0x60,0xBF,0xA9,0x7E,0xBD, +0x56,0xB5,0x97,0x36,0xA7,0xD6,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, +0x01,0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0xCA,0xA5,0x55,0x8C,0xE3,0xC8, +0x41,0x6E,0x69,0x27,0xA7,0x75,0x11,0xEF,0x3C,0x86,0x36,0x6F,0xD2,0x9D,0xC6,0x78, +0x38,0x1D,0x69,0x96,0xA2,0x92,0x69,0x2E,0x38,0x6C,0x9B,0x7D,0x04,0xD4,0x89,0xA5, +0xB1,0x31,0x37,0x8A,0xC9,0x21,0xCC,0xAB,0x6C,0xCD,0x8B,0x1C,0x9A,0xD6,0xBF,0x48, +0xD2,0x32,0x66,0xC1,0x8A,0xC0,0xF3,0x2F,0x3A,0xEF,0xC0,0xE3,0xD4,0x91,0x86,0xD1, +0x50,0xE3,0x03,0xDB,0x73,0x77,0x6F,0x4A,0x39,0x53,0xED,0xDE,0x26,0xC7,0xB5,0x7D, +0xAF,0x2B,0x42,0xD1,0x75,0x62,0xE3,0x4A,0x2B,0x02,0xC7,0x50,0x4B,0xE0,0x69,0xE2, +0x96,0x6C,0x0E,0x44,0x66,0x10,0x44,0x8F,0xAD,0x05,0xEB,0xF8,0x79,0xAC,0xA6,0x1B, +0xE8,0x37,0x34,0x9D,0x53,0xC9,0x61,0xAA,0xA2,0x52,0xAF,0x4A,0x70,0x16,0x86,0xC2, +0x3A,0xC8,0xB1,0x13,0x70,0x36,0xD8,0xCF,0xEE,0xF4,0x0A,0x34,0xD5,0x5B,0x4C,0xFD, +0x07,0x9C,0xA2,0xBA,0xD9,0x01,0x72,0x5C,0xF3,0x4D,0xC1,0xDD,0x0E,0xB1,0x1C,0x0D, +0xC4,0x63,0xBE,0xAD,0xF4,0x14,0xFB,0x89,0xEC,0xA2,0x41,0x0E,0x4C,0xCC,0xC8,0x57, +0x40,0xD0,0x6E,0x03,0xAA,0xCD,0x0C,0x8E,0x89,0x99,0x99,0x6C,0xF0,0x3C,0x30,0xAF, +0x38,0xDF,0x6F,0xBC,0xA3,0xBE,0x29,0x20,0x27,0xAB,0x74,0xFF,0x13,0x22,0x78,0xDE, +0x97,0x52,0x55,0x1E,0x83,0xB5,0x54,0x20,0x03,0xEE,0xAE,0xC0,0x4F,0x56,0xDE,0x37, +0xCC,0xC3,0x7F,0xAA,0x04,0x27,0xBB,0xD3,0x77,0xB8,0x62,0xDB,0x17,0x7C,0x9C,0x28, +0x22,0x13,0x73,0x6C,0xCF,0x26,0xF5,0x8A,0x29,0xE7, +}; + + +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Trusted Root G4 */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Trusted Root G4 */ + + +const unsigned char DigiCert_Trusted_Root_G4_certificate[1428]={ +0x30,0x82,0x05,0x90,0x30,0x82,0x03,0x78,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x05, +0x9B,0x1B,0x57,0x9E,0x8E,0x21,0x32,0xE2,0x39,0x07,0xBD,0xA7,0x77,0x75,0x5C,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x62, +0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30, +0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74, +0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77, +0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31, +0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x44,0x69,0x67,0x69,0x43,0x65, +0x72,0x74,0x20,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x52,0x6F,0x6F,0x74,0x20, +0x47,0x34,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30, +0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30, +0x30,0x5A,0x30,0x62,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55, +0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69, +0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, +0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E, +0x63,0x6F,0x6D,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x03,0x13,0x18,0x44,0x69, +0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x54,0x72,0x75,0x73,0x74,0x65,0x64,0x20,0x52, +0x6F,0x6F,0x74,0x20,0x47,0x34,0x30,0x82,0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86, +0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82, +0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0xBF,0xE6,0x90,0x73,0x68,0xDE,0xBB,0xE4,0x5D, +0x4A,0x3C,0x30,0x22,0x30,0x69,0x33,0xEC,0xC2,0xA7,0x25,0x2E,0xC9,0x21,0x3D,0xF2, +0x8A,0xD8,0x59,0xC2,0xE1,0x29,0xA7,0x3D,0x58,0xAB,0x76,0x9A,0xCD,0xAE,0x7B,0x1B, +0x84,0x0D,0xC4,0x30,0x1F,0xF3,0x1B,0xA4,0x38,0x16,0xEB,0x56,0xC6,0x97,0x6D,0x1D, +0xAB,0xB2,0x79,0xF2,0xCA,0x11,0xD2,0xE4,0x5F,0xD6,0x05,0x3C,0x52,0x0F,0x52,0x1F, +0xC6,0x9E,0x15,0xA5,0x7E,0xBE,0x9F,0xA9,0x57,0x16,0x59,0x55,0x72,0xAF,0x68,0x93, +0x70,0xC2,0xB2,0xBA,0x75,0x99,0x6A,0x73,0x32,0x94,0xD1,0x10,0x44,0x10,0x2E,0xDF, +0x82,0xF3,0x07,0x84,0xE6,0x74,0x3B,0x6D,0x71,0xE2,0x2D,0x0C,0x1B,0xEE,0x20,0xD5, +0xC9,0x20,0x1D,0x63,0x29,0x2D,0xCE,0xEC,0x5E,0x4E,0xC8,0x93,0xF8,0x21,0x61,0x9B, +0x34,0xEB,0x05,0xC6,0x5E,0xEC,0x5B,0x1A,0xBC,0xEB,0xC9,0xCF,0xCD,0xAC,0x34,0x40, +0x5F,0xB1,0x7A,0x66,0xEE,0x77,0xC8,0x48,0xA8,0x66,0x57,0x57,0x9F,0x54,0x58,0x8E, +0x0C,0x2B,0xB7,0x4F,0xA7,0x30,0xD9,0x56,0xEE,0xCA,0x7B,0x5D,0xE3,0xAD,0xC9,0x4F, +0x5E,0xE5,0x35,0xE7,0x31,0xCB,0xDA,0x93,0x5E,0xDC,0x8E,0x8F,0x80,0xDA,0xB6,0x91, +0x98,0x40,0x90,0x79,0xC3,0x78,0xC7,0xB6,0xB1,0xC4,0xB5,0x6A,0x18,0x38,0x03,0x10, +0x8D,0xD8,0xD4,0x37,0xA4,0x2E,0x05,0x7D,0x88,0xF5,0x82,0x3E,0x10,0x91,0x70,0xAB, +0x55,0x82,0x41,0x32,0xD7,0xDB,0x04,0x73,0x2A,0x6E,0x91,0x01,0x7C,0x21,0x4C,0xD4, +0xBC,0xAE,0x1B,0x03,0x75,0x5D,0x78,0x66,0xD9,0x3A,0x31,0x44,0x9A,0x33,0x40,0xBF, +0x08,0xD7,0x5A,0x49,0xA4,0xC2,0xE6,0xA9,0xA0,0x67,0xDD,0xA4,0x27,0xBC,0xA1,0x4F, +0x39,0xB5,0x11,0x58,0x17,0xF7,0x24,0x5C,0x46,0x8F,0x64,0xF7,0xC1,0x69,0x88,0x76, +0x98,0x76,0x3D,0x59,0x5D,0x42,0x76,0x87,0x89,0x97,0x69,0x7A,0x48,0xF0,0xE0,0xA2, +0x12,0x1B,0x66,0x9A,0x74,0xCA,0xDE,0x4B,0x1E,0xE7,0x0E,0x63,0xAE,0xE6,0xD4,0xEF, +0x92,0x92,0x3A,0x9E,0x3D,0xDC,0x00,0xE4,0x45,0x25,0x89,0xB6,0x9A,0x44,0x19,0x2B, +0x7E,0xC0,0x94,0xB4,0xD2,0x61,0x6D,0xEB,0x33,0xD9,0xC5,0xDF,0x4B,0x04,0x00,0xCC, +0x7D,0x1C,0x95,0xC3,0x8F,0xF7,0x21,0xB2,0xB2,0x11,0xB7,0xBB,0x7F,0xF2,0xD5,0x8C, +0x70,0x2C,0x41,0x60,0xAA,0xB1,0x63,0x18,0x44,0x95,0x1A,0x76,0x62,0x7E,0xF6,0x80, +0xB0,0xFB,0xE8,0x64,0xA6,0x33,0xD1,0x89,0x07,0xE1,0xBD,0xB7,0xE6,0x43,0xA4,0x18, +0xB8,0xA6,0x77,0x01,0xE1,0x0F,0x94,0x0C,0x21,0x1D,0xB2,0x54,0x29,0x25,0x89,0x6C, +0xE5,0x0E,0x52,0x51,0x47,0x74,0xBE,0x26,0xAC,0xB6,0x41,0x75,0xDE,0x7A,0xAC,0x5F, +0x8D,0x3F,0xC9,0xBC,0xD3,0x41,0x11,0x12,0x5B,0xE5,0x10,0x50,0xEB,0x31,0xC5,0xCA, +0x72,0x16,0x22,0x09,0xDF,0x7C,0x4C,0x75,0x3F,0x63,0xEC,0x21,0x5F,0xC4,0x20,0x51, +0x6B,0x6F,0xB1,0xAB,0x86,0x8B,0x4F,0xC2,0xD6,0x45,0x5F,0x9D,0x20,0xFC,0xA1,0x1E, +0xC5,0xC0,0x8F,0xA2,0xB1,0x7E,0x0A,0x26,0x99,0xF5,0xE4,0x69,0x2F,0x98,0x1D,0x2D, +0xF5,0xD9,0xA9,0xB2,0x1D,0xE5,0x1B,0x02,0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40, +0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01, +0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01, +0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xEC,0xD7,0xE3,0x82, +0xD2,0x71,0x5D,0x64,0x4C,0xDF,0x2E,0x67,0x3F,0xE7,0xBA,0x98,0xAE,0x1C,0x0F,0x4F, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03, +0x82,0x02,0x01,0x00,0xBB,0x61,0xD9,0x7D,0xA9,0x6C,0xBE,0x17,0xC4,0x91,0x1B,0xC3, +0xA1,0xA2,0x00,0x8D,0xE3,0x64,0x68,0x0F,0x56,0xCF,0x77,0xAE,0x70,0xF9,0xFD,0x9A, +0x4A,0x99,0xB9,0xC9,0x78,0x5C,0x0C,0x0C,0x5F,0xE4,0xE6,0x14,0x29,0x56,0x0B,0x36, +0x49,0x5D,0x44,0x63,0xE0,0xAD,0x9C,0x96,0x18,0x66,0x1B,0x23,0x0D,0x3D,0x79,0xE9, +0x6D,0x6B,0xD6,0x54,0xF8,0xD2,0x3C,0xC1,0x43,0x40,0xAE,0x1D,0x50,0xF5,0x52,0xFC, +0x90,0x3B,0xBB,0x98,0x99,0x69,0x6B,0xC7,0xC1,0xA7,0xA8,0x68,0xA4,0x27,0xDC,0x9D, +0xF9,0x27,0xAE,0x30,0x85,0xB9,0xF6,0x67,0x4D,0x3A,0x3E,0x8F,0x59,0x39,0x22,0x53, +0x44,0xEB,0xC8,0x5D,0x03,0xCA,0xED,0x50,0x7A,0x7D,0x62,0x21,0x0A,0x80,0xC8,0x73, +0x66,0xD1,0xA0,0x05,0x60,0x5F,0xE8,0xA5,0xB4,0xA7,0xAF,0xA8,0xF7,0x6D,0x35,0x9C, +0x7C,0x5A,0x8A,0xD6,0xA2,0x38,0x99,0xF3,0x78,0x8B,0xF4,0x4D,0xD2,0x20,0x0B,0xDE, +0x04,0xEE,0x8C,0x9B,0x47,0x81,0x72,0x0D,0xC0,0x14,0x32,0xEF,0x30,0x59,0x2E,0xAE, +0xE0,0x71,0xF2,0x56,0xE4,0x6A,0x97,0x6F,0x92,0x50,0x6D,0x96,0x8D,0x68,0x7A,0x9A, +0xB2,0x36,0x14,0x7A,0x06,0xF2,0x24,0xB9,0x09,0x11,0x50,0xD7,0x08,0xB1,0xB8,0x89, +0x7A,0x84,0x23,0x61,0x42,0x29,0xE5,0xA3,0xCD,0xA2,0x20,0x41,0xD7,0xD1,0x9C,0x64, +0xD9,0xEA,0x26,0xA1,0x8B,0x14,0xD7,0x4C,0x19,0xB2,0x50,0x41,0x71,0x3D,0x3F,0x4D, +0x70,0x23,0x86,0x0C,0x4A,0xDC,0x81,0xD2,0xCC,0x32,0x94,0x84,0x0D,0x08,0x09,0x97, +0x1C,0x4F,0xC0,0xEE,0x6B,0x20,0x74,0x30,0xD2,0xE0,0x39,0x34,0x10,0x85,0x21,0x15, +0x01,0x08,0xE8,0x55,0x32,0xDE,0x71,0x49,0xD9,0x28,0x17,0x50,0x4D,0xE6,0xBE,0x4D, +0xD1,0x75,0xAC,0xD0,0xCA,0xFB,0x41,0xB8,0x43,0xA5,0xAA,0xD3,0xC3,0x05,0x44,0x4F, +0x2C,0x36,0x9B,0xE2,0xFA,0xE2,0x45,0xB8,0x23,0x53,0x6C,0x06,0x6F,0x67,0x55,0x7F, +0x46,0xB5,0x4C,0x3F,0x6E,0x28,0x5A,0x79,0x26,0xD2,0xA4,0xA8,0x62,0x97,0xD2,0x1E, +0xE2,0xED,0x4A,0x8B,0xBC,0x1B,0xFD,0x47,0x4A,0x0D,0xDF,0x67,0x66,0x7E,0xB2,0x5B, +0x41,0xD0,0x3B,0xE4,0xF4,0x3B,0xF4,0x04,0x63,0xE9,0xEF,0xC2,0x54,0x00,0x51,0xA0, +0x8A,0x2A,0xC9,0xCE,0x78,0xCC,0xD5,0xEA,0x87,0x04,0x18,0xB3,0xCE,0xAF,0x49,0x88, +0xAF,0xF3,0x92,0x99,0xB6,0xB3,0xE6,0x61,0x0F,0xD2,0x85,0x00,0xE7,0x50,0x1A,0xE4, +0x1B,0x95,0x9D,0x19,0xA1,0xB9,0x9C,0xB1,0x9B,0xB1,0x00,0x1E,0xEF,0xD0,0x0F,0x4F, +0x42,0x6C,0xC9,0x0A,0xBC,0xEE,0x43,0xFA,0x3A,0x71,0xA5,0xC8,0x4D,0x26,0xA5,0x35, +0xFD,0x89,0x5D,0xBC,0x85,0x62,0x1D,0x32,0xD2,0xA0,0x2B,0x54,0xED,0x9A,0x57,0xC1, +0xDB,0xFA,0x10,0xCF,0x19,0xB7,0x8B,0x4A,0x1B,0x8F,0x01,0xB6,0x27,0x95,0x53,0xE8, +0xB6,0x89,0x6D,0x5B,0xBC,0x68,0xD4,0x23,0xE8,0x8B,0x51,0xA2,0x56,0xF9,0xF0,0xA6, +0x80,0xA0,0xD6,0x1E,0xB3,0xBC,0x0F,0x0F,0x53,0x75,0x29,0xAA,0xEA,0x13,0x77,0xE4, +0xDE,0x8C,0x81,0x21,0xAD,0x07,0x10,0x47,0x11,0xAD,0x87,0x3D,0x07,0xD1,0x75,0xBC, +0xCF,0xF3,0x66,0x7E, }; -/* subject:/O=Cybertrust, Inc/CN=Cybertrust Global Root */ -/* issuer :/O=Cybertrust, Inc/CN=Cybertrust Global Root */ +/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */ +/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO ECC Certification Authority */ -const unsigned char Cybertrust_Global_Root_certificate[933]={ -0x30,0x82,0x03,0xA1,0x30,0x82,0x02,0x89,0xA0,0x03,0x02,0x01,0x02,0x02,0x0B,0x04, -0x00,0x00,0x00,0x00,0x01,0x0F,0x85,0xAA,0x2D,0x48,0x30,0x0D,0x06,0x09,0x2A,0x86, -0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x3B,0x31,0x18,0x30,0x16,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74, -0x2C,0x20,0x49,0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16, -0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61, -0x6C,0x20,0x52,0x6F,0x6F,0x74,0x30,0x1E,0x17,0x0D,0x30,0x36,0x31,0x32,0x31,0x35, -0x30,0x38,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x31,0x31,0x32,0x31,0x35,0x30, -0x38,0x30,0x30,0x30,0x30,0x5A,0x30,0x3B,0x31,0x18,0x30,0x16,0x06,0x03,0x55,0x04, -0x0A,0x13,0x0F,0x43,0x79,0x62,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2C,0x20,0x49, -0x6E,0x63,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x43,0x79,0x62, -0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x52, -0x6F,0x6F,0x74,0x30,0x82,0x01,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7, -0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02, -0x82,0x01,0x01,0x00,0xF8,0xC8,0xBC,0xBD,0x14,0x50,0x66,0x13,0xFF,0xF0,0xD3,0x79, -0xEC,0x23,0xF2,0xB7,0x1A,0xC7,0x8E,0x85,0xF1,0x12,0x73,0xA6,0x19,0xAA,0x10,0xDB, -0x9C,0xA2,0x65,0x74,0x5A,0x77,0x3E,0x51,0x7D,0x56,0xF6,0xDC,0x23,0xB6,0xD4,0xED, -0x5F,0x58,0xB1,0x37,0x4D,0xD5,0x49,0x0E,0x6E,0xF5,0x6A,0x87,0xD6,0xD2,0x8C,0xD2, -0x27,0xC6,0xE2,0xFF,0x36,0x9F,0x98,0x65,0xA0,0x13,0x4E,0xC6,0x2A,0x64,0x9B,0xD5, -0x90,0x12,0xCF,0x14,0x06,0xF4,0x3B,0xE3,0xD4,0x28,0xBE,0xE8,0x0E,0xF8,0xAB,0x4E, -0x48,0x94,0x6D,0x8E,0x95,0x31,0x10,0x5C,0xED,0xA2,0x2D,0xBD,0xD5,0x3A,0x6D,0xB2, -0x1C,0xBB,0x60,0xC0,0x46,0x4B,0x01,0xF5,0x49,0xAE,0x7E,0x46,0x8A,0xD0,0x74,0x8D, -0xA1,0x0C,0x02,0xCE,0xEE,0xFC,0xE7,0x8F,0xB8,0x6B,0x66,0xF3,0x7F,0x44,0x00,0xBF, -0x66,0x25,0x14,0x2B,0xDD,0x10,0x30,0x1D,0x07,0x96,0x3F,0x4D,0xF6,0x6B,0xB8,0x8F, -0xB7,0x7B,0x0C,0xA5,0x38,0xEB,0xDE,0x47,0xDB,0xD5,0x5D,0x39,0xFC,0x88,0xA7,0xF3, -0xD7,0x2A,0x74,0xF1,0xE8,0x5A,0xA2,0x3B,0x9F,0x50,0xBA,0xA6,0x8C,0x45,0x35,0xC2, -0x50,0x65,0x95,0xDC,0x63,0x82,0xEF,0xDD,0xBF,0x77,0x4D,0x9C,0x62,0xC9,0x63,0x73, -0x16,0xD0,0x29,0x0F,0x49,0xA9,0x48,0xF0,0xB3,0xAA,0xB7,0x6C,0xC5,0xA7,0x30,0x39, -0x40,0x5D,0xAE,0xC4,0xE2,0x5D,0x26,0x53,0xF0,0xCE,0x1C,0x23,0x08,0x61,0xA8,0x94, -0x19,0xBA,0x04,0x62,0x40,0xEC,0x1F,0x38,0x70,0x77,0x12,0x06,0x71,0xA7,0x30,0x18, -0x5D,0x25,0x27,0xA5,0x02,0x03,0x01,0x00,0x01,0xA3,0x81,0xA5,0x30,0x81,0xA2,0x30, -0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30, -0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF, -0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xB6,0x08,0x7B,0x0D,0x7A, -0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70,0x57,0x30, -0x3F,0x06,0x03,0x55,0x1D,0x1F,0x04,0x38,0x30,0x36,0x30,0x34,0xA0,0x32,0xA0,0x30, -0x86,0x2E,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x32,0x2E,0x70,0x75, -0x62,0x6C,0x69,0x63,0x2D,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x63, -0x72,0x6C,0x2F,0x63,0x74,0x2F,0x63,0x74,0x72,0x6F,0x6F,0x74,0x2E,0x63,0x72,0x6C, -0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14,0xB6,0x08,0x7B, -0x0D,0x7A,0xCC,0xAC,0x20,0x4C,0x86,0x56,0x32,0x5E,0xCF,0xAB,0x6E,0x85,0x2D,0x70, -0x57,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00, -0x03,0x82,0x01,0x01,0x00,0x56,0xEF,0x0A,0x23,0xA0,0x54,0x4E,0x95,0x97,0xC9,0xF8, -0x89,0xDA,0x45,0xC1,0xD4,0xA3,0x00,0x25,0xF4,0x1F,0x13,0xAB,0xB7,0xA3,0x85,0x58, -0x69,0xC2,0x30,0xAD,0xD8,0x15,0x8A,0x2D,0xE3,0xC9,0xCD,0x81,0x5A,0xF8,0x73,0x23, -0x5A,0xA7,0x7C,0x05,0xF3,0xFD,0x22,0x3B,0x0E,0xD1,0x06,0xC4,0xDB,0x36,0x4C,0x73, -0x04,0x8E,0xE5,0xB0,0x22,0xE4,0xC5,0xF3,0x2E,0xA5,0xD9,0x23,0xE3,0xB8,0x4E,0x4A, -0x20,0xA7,0x6E,0x02,0x24,0x9F,0x22,0x60,0x67,0x7B,0x8B,0x1D,0x72,0x09,0xC5,0x31, -0x5C,0xE9,0x79,0x9F,0x80,0x47,0x3D,0xAD,0xA1,0x0B,0x07,0x14,0x3D,0x47,0xFF,0x03, -0x69,0x1A,0x0C,0x0B,0x44,0xE7,0x63,0x25,0xA7,0x7F,0xB2,0xC9,0xB8,0x76,0x84,0xED, -0x23,0xF6,0x7D,0x07,0xAB,0x45,0x7E,0xD3,0xDF,0xB3,0xBF,0xE9,0x8A,0xB6,0xCD,0xA8, -0xA2,0x67,0x2B,0x52,0xD5,0xB7,0x65,0xF0,0x39,0x4C,0x63,0xA0,0x91,0x79,0x93,0x52, -0x0F,0x54,0xDD,0x83,0xBB,0x9F,0xD1,0x8F,0xA7,0x53,0x73,0xC3,0xCB,0xFF,0x30,0xEC, -0x7C,0x04,0xB8,0xD8,0x44,0x1F,0x93,0x5F,0x71,0x09,0x22,0xB7,0x6E,0x3E,0xEA,0x1C, -0x03,0x4E,0x9D,0x1A,0x20,0x61,0xFB,0x81,0x37,0xEC,0x5E,0xFC,0x0A,0x45,0xAB,0xD7, -0xE7,0x17,0x55,0xD0,0xA0,0xEA,0x60,0x9B,0xA6,0xF6,0xE3,0x8C,0x5B,0x29,0xC2,0x06, -0x60,0x14,0x9D,0x2D,0x97,0x4C,0xA9,0x93,0x15,0x9D,0x61,0xC4,0x01,0x5F,0x48,0xD6, -0x58,0xBD,0x56,0x31,0x12,0x4E,0x11,0xC8,0x21,0xE0,0xB3,0x11,0x91,0x65,0xDB,0xB4, -0xA6,0x88,0x38,0xCE,0x55, +const unsigned char COMODO_ECC_Certification_Authority_certificate[653]={ +0x30,0x82,0x02,0x89,0x30,0x82,0x02,0x0F,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x1F, +0x47,0xAF,0xAA,0x62,0x00,0x70,0x50,0x54,0x4C,0x01,0x9E,0x9B,0x63,0x99,0x2A,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x85,0x31,0x0B, +0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06, +0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61, +0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04, +0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03, +0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C, +0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13, +0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74, +0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72, +0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x30,0x38,0x30,0x33,0x30,0x36,0x30,0x30,0x30, +0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32,0x33,0x35,0x39, +0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13, +0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72, +0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72, +0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F, +0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D, +0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B, +0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20, +0x45,0x43,0x43,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F, +0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x76,0x30,0x10,0x06, +0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03, +0x62,0x00,0x04,0x03,0x47,0x7B,0x2F,0x75,0xC9,0x82,0x15,0x85,0xFB,0x75,0xE4,0x91, +0x16,0xD4,0xAB,0x62,0x99,0xF5,0x3E,0x52,0x0B,0x06,0xCE,0x41,0x00,0x7F,0x97,0xE1, +0x0A,0x24,0x3C,0x1D,0x01,0x04,0xEE,0x3D,0xD2,0x8D,0x09,0x97,0x0C,0xE0,0x75,0xE4, +0xFA,0xFB,0x77,0x8A,0x2A,0xF5,0x03,0x60,0x4B,0x36,0x8B,0x16,0x23,0x16,0xAD,0x09, +0x71,0xF4,0x4A,0xF4,0x28,0x50,0xB4,0xFE,0x88,0x1C,0x6E,0x3F,0x6C,0x2F,0x2F,0x09, +0x59,0x5B,0xA5,0x5B,0x0B,0x33,0x99,0xE2,0xC3,0x3D,0x89,0xF9,0x6A,0x2C,0xEF,0xB2, +0xD3,0x06,0xE9,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16, +0x04,0x14,0x75,0x71,0xA7,0x19,0x48,0x19,0xBC,0x9D,0x9D,0xEA,0x41,0x47,0xDF,0x94, +0xC4,0x48,0x77,0x99,0xD3,0x79,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, +0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF, +0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D, +0x04,0x03,0x03,0x03,0x68,0x00,0x30,0x65,0x02,0x31,0x00,0xEF,0x03,0x5B,0x7A,0xAC, +0xB7,0x78,0x0A,0x72,0xB7,0x88,0xDF,0xFF,0xB5,0x46,0x14,0x09,0x0A,0xFA,0xA0,0xE6, +0x7D,0x08,0xC6,0x1A,0x87,0xBD,0x18,0xA8,0x73,0xBD,0x26,0xCA,0x60,0x0C,0x9D,0xCE, +0x99,0x9F,0xCF,0x5C,0x0F,0x30,0xE1,0xBE,0x14,0x31,0xEA,0x02,0x30,0x14,0xF4,0x93, +0x3C,0x49,0xA7,0x33,0x7A,0x90,0x46,0x47,0xB3,0x63,0x7D,0x13,0x9B,0x4E,0xB7,0x6F, +0x18,0x37,0x80,0x53,0xFE,0xDD,0x20,0xE0,0x35,0x9A,0x36,0xD1,0xC7,0x01,0xB9,0xE6, +0xDC,0xDD,0xF3,0xFF,0x1D,0x2C,0x3A,0x16,0x57,0xD9,0x92,0x39,0xD6, }; @@ -3861,424 +2496,264 @@ const unsigned char Entrust_Root_Certification_Authority___EC1_certificate[765]= }; -/* subject:/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */ -/* issuer :/C=US/O=GeoTrust Inc./OU=(c) 2007 GeoTrust Inc. - For authorized use only/CN=GeoTrust Primary Certification Authority - G2 */ - - -const unsigned char GeoTrust_Primary_Certification_Authority___G2_certificate[690]={ -0x30,0x82,0x02,0xAE,0x30,0x82,0x02,0x35,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x3C, -0xB2,0xF4,0x48,0x0A,0x00,0xE2,0xFE,0xEB,0x24,0x3B,0x5E,0x60,0x3E,0xC3,0x6B,0x30, -0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x81,0x98,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06, -0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49, -0x6E,0x63,0x2E,0x31,0x39,0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63, -0x29,0x20,0x32,0x30,0x30,0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20, -0x49,0x6E,0x63,0x2E,0x20,0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F, -0x72,0x69,0x7A,0x65,0x64,0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36, -0x30,0x34,0x06,0x03,0x55,0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73, -0x74,0x20,0x50,0x72,0x69,0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66, -0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74, -0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x37,0x31,0x31,0x30,0x35, -0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32, -0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x98,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, -0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13, -0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x39, -0x30,0x37,0x06,0x03,0x55,0x04,0x0B,0x13,0x30,0x28,0x63,0x29,0x20,0x32,0x30,0x30, -0x37,0x20,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x20, -0x2D,0x20,0x46,0x6F,0x72,0x20,0x61,0x75,0x74,0x68,0x6F,0x72,0x69,0x7A,0x65,0x64, -0x20,0x75,0x73,0x65,0x20,0x6F,0x6E,0x6C,0x79,0x31,0x36,0x30,0x34,0x06,0x03,0x55, -0x04,0x03,0x13,0x2D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x50,0x72,0x69, -0x6D,0x61,0x72,0x79,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69, -0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47, -0x32,0x30,0x76,0x30,0x10,0x06,0x07,0x2A,0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05, -0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00,0x04,0x15,0xB1,0xE8,0xFD,0x03,0x15,0x43, -0xE5,0xAC,0xEB,0x87,0x37,0x11,0x62,0xEF,0xD2,0x83,0x36,0x52,0x7D,0x45,0x57,0x0B, -0x4A,0x8D,0x7B,0x54,0x3B,0x3A,0x6E,0x5F,0x15,0x02,0xC0,0x50,0xA6,0xCF,0x25,0x2F, -0x7D,0xCA,0x48,0xB8,0xC7,0x50,0x63,0x1C,0x2A,0x21,0x08,0x7C,0x9A,0x36,0xD8,0x0B, -0xFE,0xD1,0x26,0xC5,0x58,0x31,0x30,0x28,0x25,0xF3,0x5D,0x5D,0xA3,0xB8,0xB6,0xA5, -0xB4,0x92,0xED,0x6C,0x2C,0x9F,0xEB,0xDD,0x43,0x89,0xA2,0x3C,0x4B,0x48,0x91,0x1D, -0x50,0xEC,0x26,0xDF,0xD6,0x60,0x2E,0xBD,0x21,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06, -0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E, -0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D, -0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x15,0x5F,0x35,0x57,0x51,0x55,0xFB, -0x25,0xB2,0xAD,0x03,0x69,0xFC,0x01,0xA3,0xFA,0xBE,0x11,0x55,0xD5,0x30,0x0A,0x06, -0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30, -0x64,0x96,0x59,0xA6,0xE8,0x09,0xDE,0x8B,0xBA,0xFA,0x5A,0x88,0x88,0xF0,0x1F,0x91, -0xD3,0x46,0xA8,0xF2,0x4A,0x4C,0x02,0x63,0xFB,0x6C,0x5F,0x38,0xDB,0x2E,0x41,0x93, -0xA9,0x0E,0xE6,0x9D,0xDC,0x31,0x1C,0xB2,0xA0,0xA7,0x18,0x1C,0x79,0xE1,0xC7,0x36, -0x02,0x30,0x3A,0x56,0xAF,0x9A,0x74,0x6C,0xF6,0xFB,0x83,0xE0,0x33,0xD3,0x08,0x5F, -0xA1,0x9C,0xC2,0x5B,0x9F,0x46,0xD6,0xB6,0xCB,0x91,0x06,0x63,0xA2,0x06,0xE7,0x33, -0xAC,0x3E,0xA8,0x81,0x12,0xD0,0xCB,0xBA,0xD0,0x92,0x0B,0xB6,0x9E,0x96,0xAA,0x04, -0x0F,0x8A, -}; - - -/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */ -/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA 2 */ - - -const unsigned char GeoTrust_Global_CA_2_certificate[874]={ -0x30,0x82,0x03,0x66,0x30,0x82,0x02,0x4E,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x01, -0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30, -0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x16, -0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72,0x75,0x73, -0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B,0x06,0x03,0x55,0x04,0x03,0x13, -0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62,0x61,0x6C, -0x20,0x43,0x41,0x20,0x32,0x30,0x1E,0x17,0x0D,0x30,0x34,0x30,0x33,0x30,0x34,0x30, -0x35,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x31,0x39,0x30,0x33,0x30,0x34,0x30,0x35, -0x30,0x30,0x30,0x30,0x5A,0x30,0x44,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, -0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47, -0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1D,0x30,0x1B, -0x06,0x03,0x55,0x04,0x03,0x13,0x14,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20, -0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x20,0x32,0x30,0x82,0x01,0x22,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82, -0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xEF,0x3C,0x4D,0x40, -0x3D,0x10,0xDF,0x3B,0x53,0x00,0xE1,0x67,0xFE,0x94,0x60,0x15,0x3E,0x85,0x88,0xF1, -0x89,0x0D,0x90,0xC8,0x28,0x23,0x99,0x05,0xE8,0x2B,0x20,0x9D,0xC6,0xF3,0x60,0x46, -0xD8,0xC1,0xB2,0xD5,0x8C,0x31,0xD9,0xDC,0x20,0x79,0x24,0x81,0xBF,0x35,0x32,0xFC, -0x63,0x69,0xDB,0xB1,0x2A,0x6B,0xEE,0x21,0x58,0xF2,0x08,0xE9,0x78,0xCB,0x6F,0xCB, -0xFC,0x16,0x52,0xC8,0x91,0xC4,0xFF,0x3D,0x73,0xDE,0xB1,0x3E,0xA7,0xC2,0x7D,0x66, -0xC1,0xF5,0x7E,0x52,0x24,0x1A,0xE2,0xD5,0x67,0x91,0xD0,0x82,0x10,0xD7,0x78,0x4B, -0x4F,0x2B,0x42,0x39,0xBD,0x64,0x2D,0x40,0xA0,0xB0,0x10,0xD3,0x38,0x48,0x46,0x88, -0xA1,0x0C,0xBB,0x3A,0x33,0x2A,0x62,0x98,0xFB,0x00,0x9D,0x13,0x59,0x7F,0x6F,0x3B, -0x72,0xAA,0xEE,0xA6,0x0F,0x86,0xF9,0x05,0x61,0xEA,0x67,0x7F,0x0C,0x37,0x96,0x8B, -0xE6,0x69,0x16,0x47,0x11,0xC2,0x27,0x59,0x03,0xB3,0xA6,0x60,0xC2,0x21,0x40,0x56, -0xFA,0xA0,0xC7,0x7D,0x3A,0x13,0xE3,0xEC,0x57,0xC7,0xB3,0xD6,0xAE,0x9D,0x89,0x80, -0xF7,0x01,0xE7,0x2C,0xF6,0x96,0x2B,0x13,0x0D,0x79,0x2C,0xD9,0xC0,0xE4,0x86,0x7B, -0x4B,0x8C,0x0C,0x72,0x82,0x8A,0xFB,0x17,0xCD,0x00,0x6C,0x3A,0x13,0x3C,0xB0,0x84, -0x87,0x4B,0x16,0x7A,0x29,0xB2,0x4F,0xDB,0x1D,0xD4,0x0B,0xF3,0x66,0x37,0xBD,0xD8, -0xF6,0x57,0xBB,0x5E,0x24,0x7A,0xB8,0x3C,0x8B,0xB9,0xFA,0x92,0x1A,0x1A,0x84,0x9E, -0xD8,0x74,0x8F,0xAA,0x1B,0x7F,0x5E,0xF4,0xFE,0x45,0x22,0x21,0x02,0x03,0x01,0x00, -0x01,0xA3,0x63,0x30,0x61,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, -0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, -0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9,0x10, -0x15,0x58,0x20,0x05,0x09,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16, -0x80,0x14,0x71,0x38,0x36,0xF2,0x02,0x31,0x53,0x47,0x2B,0x6E,0xBA,0x65,0x46,0xA9, -0x10,0x15,0x58,0x20,0x05,0x09,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, -0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D, -0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x03,0xF7,0xB5,0x2B,0xAB,0x5D, -0x10,0xFC,0x7B,0xB2,0xB2,0x5E,0xAC,0x9B,0x0E,0x7E,0x53,0x78,0x59,0x3E,0x42,0x04, -0xFE,0x75,0xA3,0xAD,0xAC,0x81,0x4E,0xD7,0x02,0x8B,0x5E,0xC4,0x2D,0xC8,0x52,0x76, -0xC7,0x2C,0x1F,0xFC,0x81,0x32,0x98,0xD1,0x4B,0xC6,0x92,0x93,0x33,0x35,0x31,0x2F, -0xFC,0xD8,0x1D,0x44,0xDD,0xE0,0x81,0x7F,0x9D,0xE9,0x8B,0xE1,0x64,0x91,0x62,0x0B, -0x39,0x08,0x8C,0xAC,0x74,0x9D,0x59,0xD9,0x7A,0x59,0x52,0x97,0x11,0xB9,0x16,0x7B, -0x6F,0x45,0xD3,0x96,0xD9,0x31,0x7D,0x02,0x36,0x0F,0x9C,0x3B,0x6E,0xCF,0x2C,0x0D, -0x03,0x46,0x45,0xEB,0xA0,0xF4,0x7F,0x48,0x44,0xC6,0x08,0x40,0xCC,0xDE,0x1B,0x70, -0xB5,0x29,0xAD,0xBA,0x8B,0x3B,0x34,0x65,0x75,0x1B,0x71,0x21,0x1D,0x2C,0x14,0x0A, -0xB0,0x96,0x95,0xB8,0xD6,0xEA,0xF2,0x65,0xFB,0x29,0xBA,0x4F,0xEA,0x91,0x93,0x74, -0x69,0xB6,0xF2,0xFF,0xE1,0x1A,0xD0,0x0C,0xD1,0x76,0x85,0xCB,0x8A,0x25,0xBD,0x97, -0x5E,0x2C,0x6F,0x15,0x99,0x26,0xE7,0xB6,0x29,0xFF,0x22,0xEC,0xC9,0x02,0xC7,0x56, -0x00,0xCD,0x49,0xB9,0xB3,0x6C,0x7B,0x53,0x04,0x1A,0xE2,0xA8,0xC9,0xAA,0x12,0x05, -0x23,0xC2,0xCE,0xE7,0xBB,0x04,0x02,0xCC,0xC0,0x47,0xA2,0xE4,0xC4,0x29,0x2F,0x5B, -0x45,0x57,0x89,0x51,0xEE,0x3C,0xEB,0x52,0x08,0xFF,0x07,0x35,0x1E,0x9F,0x35,0x6A, -0x47,0x4A,0x56,0x98,0xD1,0x5A,0x85,0x1F,0x8C,0xF5,0x22,0xBF,0xAB,0xCE,0x83,0xF3, -0xE2,0x22,0x29,0xAE,0x7D,0x83,0x40,0xA8,0xBA,0x6C, -}; - - -/* subject:/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Certification Authority */ -/* issuer :/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Certification Authority */ +/* subject:/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */ +/* issuer :/C=US/O=GeoTrust Inc./CN=GeoTrust Global CA */ -const unsigned char COMODO_RSA_Certification_Authority_certificate[1500]={ -0x30,0x82,0x05,0xD8,0x30,0x82,0x03,0xC0,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x4C, -0xAA,0xF9,0xCA,0xDB,0x63,0x6F,0xE0,0x1F,0xF7,0x4E,0xD8,0x5B,0x03,0x86,0x9D,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x30,0x81, -0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B, -0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13,0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72, -0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73,0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06, -0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61,0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30, -0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43, -0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65,0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55, -0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x52,0x53,0x41,0x20,0x43, -0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74, -0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x1E,0x17,0x0D,0x31,0x30,0x30,0x31,0x31,0x39, -0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x38,0x32, -0x33,0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x85,0x31,0x0B,0x30,0x09,0x06,0x03,0x55, -0x04,0x06,0x13,0x02,0x47,0x42,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x08,0x13, -0x12,0x47,0x72,0x65,0x61,0x74,0x65,0x72,0x20,0x4D,0x61,0x6E,0x63,0x68,0x65,0x73, -0x74,0x65,0x72,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x07,0x13,0x07,0x53,0x61, -0x6C,0x66,0x6F,0x72,0x64,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13,0x11, -0x43,0x4F,0x4D,0x4F,0x44,0x4F,0x20,0x43,0x41,0x20,0x4C,0x69,0x6D,0x69,0x74,0x65, -0x64,0x31,0x2B,0x30,0x29,0x06,0x03,0x55,0x04,0x03,0x13,0x22,0x43,0x4F,0x4D,0x4F, -0x44,0x4F,0x20,0x52,0x53,0x41,0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61, -0x74,0x69,0x6F,0x6E,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79,0x30,0x82, -0x02,0x22,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05, -0x00,0x03,0x82,0x02,0x0F,0x00,0x30,0x82,0x02,0x0A,0x02,0x82,0x02,0x01,0x00,0x91, -0xE8,0x54,0x92,0xD2,0x0A,0x56,0xB1,0xAC,0x0D,0x24,0xDD,0xC5,0xCF,0x44,0x67,0x74, -0x99,0x2B,0x37,0xA3,0x7D,0x23,0x70,0x00,0x71,0xBC,0x53,0xDF,0xC4,0xFA,0x2A,0x12, -0x8F,0x4B,0x7F,0x10,0x56,0xBD,0x9F,0x70,0x72,0xB7,0x61,0x7F,0xC9,0x4B,0x0F,0x17, -0xA7,0x3D,0xE3,0xB0,0x04,0x61,0xEE,0xFF,0x11,0x97,0xC7,0xF4,0x86,0x3E,0x0A,0xFA, -0x3E,0x5C,0xF9,0x93,0xE6,0x34,0x7A,0xD9,0x14,0x6B,0xE7,0x9C,0xB3,0x85,0xA0,0x82, -0x7A,0x76,0xAF,0x71,0x90,0xD7,0xEC,0xFD,0x0D,0xFA,0x9C,0x6C,0xFA,0xDF,0xB0,0x82, -0xF4,0x14,0x7E,0xF9,0xBE,0xC4,0xA6,0x2F,0x4F,0x7F,0x99,0x7F,0xB5,0xFC,0x67,0x43, -0x72,0xBD,0x0C,0x00,0xD6,0x89,0xEB,0x6B,0x2C,0xD3,0xED,0x8F,0x98,0x1C,0x14,0xAB, -0x7E,0xE5,0xE3,0x6E,0xFC,0xD8,0xA8,0xE4,0x92,0x24,0xDA,0x43,0x6B,0x62,0xB8,0x55, -0xFD,0xEA,0xC1,0xBC,0x6C,0xB6,0x8B,0xF3,0x0E,0x8D,0x9A,0xE4,0x9B,0x6C,0x69,0x99, -0xF8,0x78,0x48,0x30,0x45,0xD5,0xAD,0xE1,0x0D,0x3C,0x45,0x60,0xFC,0x32,0x96,0x51, -0x27,0xBC,0x67,0xC3,0xCA,0x2E,0xB6,0x6B,0xEA,0x46,0xC7,0xC7,0x20,0xA0,0xB1,0x1F, -0x65,0xDE,0x48,0x08,0xBA,0xA4,0x4E,0xA9,0xF2,0x83,0x46,0x37,0x84,0xEB,0xE8,0xCC, -0x81,0x48,0x43,0x67,0x4E,0x72,0x2A,0x9B,0x5C,0xBD,0x4C,0x1B,0x28,0x8A,0x5C,0x22, -0x7B,0xB4,0xAB,0x98,0xD9,0xEE,0xE0,0x51,0x83,0xC3,0x09,0x46,0x4E,0x6D,0x3E,0x99, -0xFA,0x95,0x17,0xDA,0x7C,0x33,0x57,0x41,0x3C,0x8D,0x51,0xED,0x0B,0xB6,0x5C,0xAF, -0x2C,0x63,0x1A,0xDF,0x57,0xC8,0x3F,0xBC,0xE9,0x5D,0xC4,0x9B,0xAF,0x45,0x99,0xE2, -0xA3,0x5A,0x24,0xB4,0xBA,0xA9,0x56,0x3D,0xCF,0x6F,0xAA,0xFF,0x49,0x58,0xBE,0xF0, -0xA8,0xFF,0xF4,0xB8,0xAD,0xE9,0x37,0xFB,0xBA,0xB8,0xF4,0x0B,0x3A,0xF9,0xE8,0x43, -0x42,0x1E,0x89,0xD8,0x84,0xCB,0x13,0xF1,0xD9,0xBB,0xE1,0x89,0x60,0xB8,0x8C,0x28, -0x56,0xAC,0x14,0x1D,0x9C,0x0A,0xE7,0x71,0xEB,0xCF,0x0E,0xDD,0x3D,0xA9,0x96,0xA1, -0x48,0xBD,0x3C,0xF7,0xAF,0xB5,0x0D,0x22,0x4C,0xC0,0x11,0x81,0xEC,0x56,0x3B,0xF6, -0xD3,0xA2,0xE2,0x5B,0xB7,0xB2,0x04,0x22,0x52,0x95,0x80,0x93,0x69,0xE8,0x8E,0x4C, -0x65,0xF1,0x91,0x03,0x2D,0x70,0x74,0x02,0xEA,0x8B,0x67,0x15,0x29,0x69,0x52,0x02, -0xBB,0xD7,0xDF,0x50,0x6A,0x55,0x46,0xBF,0xA0,0xA3,0x28,0x61,0x7F,0x70,0xD0,0xC3, -0xA2,0xAA,0x2C,0x21,0xAA,0x47,0xCE,0x28,0x9C,0x06,0x45,0x76,0xBF,0x82,0x18,0x27, -0xB4,0xD5,0xAE,0xB4,0xCB,0x50,0xE6,0x6B,0xF4,0x4C,0x86,0x71,0x30,0xE9,0xA6,0xDF, -0x16,0x86,0xE0,0xD8,0xFF,0x40,0xDD,0xFB,0xD0,0x42,0x88,0x7F,0xA3,0x33,0x3A,0x2E, -0x5C,0x1E,0x41,0x11,0x81,0x63,0xCE,0x18,0x71,0x6B,0x2B,0xEC,0xA6,0x8A,0xB7,0x31, -0x5C,0x3A,0x6A,0x47,0xE0,0xC3,0x79,0x59,0xD6,0x20,0x1A,0xAF,0xF2,0x6A,0x98,0xAA, -0x72,0xBC,0x57,0x4A,0xD2,0x4B,0x9D,0xBB,0x10,0xFC,0xB0,0x4C,0x41,0xE5,0xED,0x1D, -0x3D,0x5E,0x28,0x9D,0x9C,0xCC,0xBF,0xB3,0x51,0xDA,0xA7,0x47,0xE5,0x84,0x53,0x02, -0x03,0x01,0x00,0x01,0xA3,0x42,0x30,0x40,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04, -0x16,0x04,0x14,0xBB,0xAF,0x7E,0x02,0x3D,0xFA,0xA6,0xF1,0x3C,0x84,0x8E,0xAD,0xEE, -0x38,0x98,0xEC,0xD9,0x32,0x32,0xD4,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01, -0xFF,0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01, -0xFF,0x04,0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86, -0xF7,0x0D,0x01,0x01,0x0C,0x05,0x00,0x03,0x82,0x02,0x01,0x00,0x0A,0xF1,0xD5,0x46, -0x84,0xB7,0xAE,0x51,0xBB,0x6C,0xB2,0x4D,0x41,0x14,0x00,0x93,0x4C,0x9C,0xCB,0xE5, -0xC0,0x54,0xCF,0xA0,0x25,0x8E,0x02,0xF9,0xFD,0xB0,0xA2,0x0D,0xF5,0x20,0x98,0x3C, -0x13,0x2D,0xAC,0x56,0xA2,0xB0,0xD6,0x7E,0x11,0x92,0xE9,0x2E,0xBA,0x9E,0x2E,0x9A, -0x72,0xB1,0xBD,0x19,0x44,0x6C,0x61,0x35,0xA2,0x9A,0xB4,0x16,0x12,0x69,0x5A,0x8C, -0xE1,0xD7,0x3E,0xA4,0x1A,0xE8,0x2F,0x03,0xF4,0xAE,0x61,0x1D,0x10,0x1B,0x2A,0xA4, -0x8B,0x7A,0xC5,0xFE,0x05,0xA6,0xE1,0xC0,0xD6,0xC8,0xFE,0x9E,0xAE,0x8F,0x2B,0xBA, -0x3D,0x99,0xF8,0xD8,0x73,0x09,0x58,0x46,0x6E,0xA6,0x9C,0xF4,0xD7,0x27,0xD3,0x95, -0xDA,0x37,0x83,0x72,0x1C,0xD3,0x73,0xE0,0xA2,0x47,0x99,0x03,0x38,0x5D,0xD5,0x49, -0x79,0x00,0x29,0x1C,0xC7,0xEC,0x9B,0x20,0x1C,0x07,0x24,0x69,0x57,0x78,0xB2,0x39, -0xFC,0x3A,0x84,0xA0,0xB5,0x9C,0x7C,0x8D,0xBF,0x2E,0x93,0x62,0x27,0xB7,0x39,0xDA, -0x17,0x18,0xAE,0xBD,0x3C,0x09,0x68,0xFF,0x84,0x9B,0x3C,0xD5,0xD6,0x0B,0x03,0xE3, -0x57,0x9E,0x14,0xF7,0xD1,0xEB,0x4F,0xC8,0xBD,0x87,0x23,0xB7,0xB6,0x49,0x43,0x79, -0x85,0x5C,0xBA,0xEB,0x92,0x0B,0xA1,0xC6,0xE8,0x68,0xA8,0x4C,0x16,0xB1,0x1A,0x99, -0x0A,0xE8,0x53,0x2C,0x92,0xBB,0xA1,0x09,0x18,0x75,0x0C,0x65,0xA8,0x7B,0xCB,0x23, -0xB7,0x1A,0xC2,0x28,0x85,0xC3,0x1B,0xFF,0xD0,0x2B,0x62,0xEF,0xA4,0x7B,0x09,0x91, -0x98,0x67,0x8C,0x14,0x01,0xCD,0x68,0x06,0x6A,0x63,0x21,0x75,0x03,0x80,0x88,0x8A, -0x6E,0x81,0xC6,0x85,0xF2,0xA9,0xA4,0x2D,0xE7,0xF4,0xA5,0x24,0x10,0x47,0x83,0xCA, -0xCD,0xF4,0x8D,0x79,0x58,0xB1,0x06,0x9B,0xE7,0x1A,0x2A,0xD9,0x9D,0x01,0xD7,0x94, -0x7D,0xED,0x03,0x4A,0xCA,0xF0,0xDB,0xE8,0xA9,0x01,0x3E,0xF5,0x56,0x99,0xC9,0x1E, -0x8E,0x49,0x3D,0xBB,0xE5,0x09,0xB9,0xE0,0x4F,0x49,0x92,0x3D,0x16,0x82,0x40,0xCC, -0xCC,0x59,0xC6,0xE6,0x3A,0xED,0x12,0x2E,0x69,0x3C,0x6C,0x95,0xB1,0xFD,0xAA,0x1D, -0x7B,0x7F,0x86,0xBE,0x1E,0x0E,0x32,0x46,0xFB,0xFB,0x13,0x8F,0x75,0x7F,0x4C,0x8B, -0x4B,0x46,0x63,0xFE,0x00,0x34,0x40,0x70,0xC1,0xC3,0xB9,0xA1,0xDD,0xA6,0x70,0xE2, -0x04,0xB3,0x41,0xBC,0xE9,0x80,0x91,0xEA,0x64,0x9C,0x7A,0xE1,0x22,0x03,0xA9,0x9C, -0x6E,0x6F,0x0E,0x65,0x4F,0x6C,0x87,0x87,0x5E,0xF3,0x6E,0xA0,0xF9,0x75,0xA5,0x9B, -0x40,0xE8,0x53,0xB2,0x27,0x9D,0x4A,0xB9,0xC0,0x77,0x21,0x8D,0xFF,0x87,0xF2,0xDE, -0xBC,0x8C,0xEF,0x17,0xDF,0xB7,0x49,0x0B,0xD1,0xF2,0x6E,0x30,0x0B,0x1A,0x0E,0x4E, -0x76,0xED,0x11,0xFC,0xF5,0xE9,0x56,0xB2,0x7D,0xBF,0xC7,0x6D,0x0A,0x93,0x8C,0xA5, -0xD0,0xC0,0xB6,0x1D,0xBE,0x3A,0x4E,0x94,0xA2,0xD7,0x6E,0x6C,0x0B,0xC2,0x8A,0x7C, -0xFA,0x20,0xF3,0xC4,0xE4,0xE5,0xCD,0x0D,0xA8,0xCB,0x91,0x92,0xB1,0x7C,0x85,0xEC, -0xB5,0x14,0x69,0x66,0x0E,0x82,0xE7,0xCD,0xCE,0xC8,0x2D,0xA6,0x51,0x7F,0x21,0xC1, -0x35,0x53,0x85,0x06,0x4A,0x5D,0x9F,0xAD,0xBB,0x1B,0x5F,0x74, +const unsigned char GeoTrust_Global_CA_certificate[856]={ +0x30,0x82,0x03,0x54,0x30,0x82,0x02,0x3C,0xA0,0x03,0x02,0x01,0x02,0x02,0x03,0x02, +0x34,0x56,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05, +0x00,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53, +0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47,0x65,0x6F,0x54,0x72, +0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04, +0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x47,0x6C,0x6F,0x62, +0x61,0x6C,0x20,0x43,0x41,0x30,0x1E,0x17,0x0D,0x30,0x32,0x30,0x35,0x32,0x31,0x30, +0x34,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x32,0x32,0x30,0x35,0x32,0x31,0x30,0x34, +0x30,0x30,0x30,0x30,0x5A,0x30,0x42,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, +0x13,0x02,0x55,0x53,0x31,0x16,0x30,0x14,0x06,0x03,0x55,0x04,0x0A,0x13,0x0D,0x47, +0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20,0x49,0x6E,0x63,0x2E,0x31,0x1B,0x30,0x19, +0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x47,0x65,0x6F,0x54,0x72,0x75,0x73,0x74,0x20, +0x47,0x6C,0x6F,0x62,0x61,0x6C,0x20,0x43,0x41,0x30,0x82,0x01,0x22,0x30,0x0D,0x06, +0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01,0x0F, +0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDA,0xCC,0x18,0x63,0x30,0xFD, +0xF4,0x17,0x23,0x1A,0x56,0x7E,0x5B,0xDF,0x3C,0x6C,0x38,0xE4,0x71,0xB7,0x78,0x91, +0xD4,0xBC,0xA1,0xD8,0x4C,0xF8,0xA8,0x43,0xB6,0x03,0xE9,0x4D,0x21,0x07,0x08,0x88, +0xDA,0x58,0x2F,0x66,0x39,0x29,0xBD,0x05,0x78,0x8B,0x9D,0x38,0xE8,0x05,0xB7,0x6A, +0x7E,0x71,0xA4,0xE6,0xC4,0x60,0xA6,0xB0,0xEF,0x80,0xE4,0x89,0x28,0x0F,0x9E,0x25, +0xD6,0xED,0x83,0xF3,0xAD,0xA6,0x91,0xC7,0x98,0xC9,0x42,0x18,0x35,0x14,0x9D,0xAD, +0x98,0x46,0x92,0x2E,0x4F,0xCA,0xF1,0x87,0x43,0xC1,0x16,0x95,0x57,0x2D,0x50,0xEF, +0x89,0x2D,0x80,0x7A,0x57,0xAD,0xF2,0xEE,0x5F,0x6B,0xD2,0x00,0x8D,0xB9,0x14,0xF8, +0x14,0x15,0x35,0xD9,0xC0,0x46,0xA3,0x7B,0x72,0xC8,0x91,0xBF,0xC9,0x55,0x2B,0xCD, +0xD0,0x97,0x3E,0x9C,0x26,0x64,0xCC,0xDF,0xCE,0x83,0x19,0x71,0xCA,0x4E,0xE6,0xD4, +0xD5,0x7B,0xA9,0x19,0xCD,0x55,0xDE,0xC8,0xEC,0xD2,0x5E,0x38,0x53,0xE5,0x5C,0x4F, +0x8C,0x2D,0xFE,0x50,0x23,0x36,0xFC,0x66,0xE6,0xCB,0x8E,0xA4,0x39,0x19,0x00,0xB7, +0x95,0x02,0x39,0x91,0x0B,0x0E,0xFE,0x38,0x2E,0xD1,0x1D,0x05,0x9A,0xF6,0x4D,0x3E, +0x6F,0x0F,0x07,0x1D,0xAF,0x2C,0x1E,0x8F,0x60,0x39,0xE2,0xFA,0x36,0x53,0x13,0x39, +0xD4,0x5E,0x26,0x2B,0xDB,0x3D,0xA8,0x14,0xBD,0x32,0xEB,0x18,0x03,0x28,0x52,0x04, +0x71,0xE5,0xAB,0x33,0x3D,0xE1,0x38,0xBB,0x07,0x36,0x84,0x62,0x9C,0x79,0xEA,0x16, +0x30,0xF4,0x5F,0xC0,0x2B,0xE8,0x71,0x6B,0xE4,0xF9,0x02,0x03,0x01,0x00,0x01,0xA3, +0x53,0x30,0x51,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30, +0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0xC0, +0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65,0xB8, +0xCA,0xCC,0x4E,0x30,0x1F,0x06,0x03,0x55,0x1D,0x23,0x04,0x18,0x30,0x16,0x80,0x14, +0xC0,0x7A,0x98,0x68,0x8D,0x89,0xFB,0xAB,0x05,0x64,0x0C,0x11,0x7D,0xAA,0x7D,0x65, +0xB8,0xCA,0xCC,0x4E,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01, +0x05,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x35,0xE3,0x29,0x6A,0xE5,0x2F,0x5D,0x54, +0x8E,0x29,0x50,0x94,0x9F,0x99,0x1A,0x14,0xE4,0x8F,0x78,0x2A,0x62,0x94,0xA2,0x27, +0x67,0x9E,0xD0,0xCF,0x1A,0x5E,0x47,0xE9,0xC1,0xB2,0xA4,0xCF,0xDD,0x41,0x1A,0x05, +0x4E,0x9B,0x4B,0xEE,0x4A,0x6F,0x55,0x52,0xB3,0x24,0xA1,0x37,0x0A,0xEB,0x64,0x76, +0x2A,0x2E,0x2C,0xF3,0xFD,0x3B,0x75,0x90,0xBF,0xFA,0x71,0xD8,0xC7,0x3D,0x37,0xD2, +0xB5,0x05,0x95,0x62,0xB9,0xA6,0xDE,0x89,0x3D,0x36,0x7B,0x38,0x77,0x48,0x97,0xAC, +0xA6,0x20,0x8F,0x2E,0xA6,0xC9,0x0C,0xC2,0xB2,0x99,0x45,0x00,0xC7,0xCE,0x11,0x51, +0x22,0x22,0xE0,0xA5,0xEA,0xB6,0x15,0x48,0x09,0x64,0xEA,0x5E,0x4F,0x74,0xF7,0x05, +0x3E,0xC7,0x8A,0x52,0x0C,0xDB,0x15,0xB4,0xBD,0x6D,0x9B,0xE5,0xC6,0xB1,0x54,0x68, +0xA9,0xE3,0x69,0x90,0xB6,0x9A,0xA5,0x0F,0xB8,0xB9,0x3F,0x20,0x7D,0xAE,0x4A,0xB5, +0xB8,0x9C,0xE4,0x1D,0xB6,0xAB,0xE6,0x94,0xA5,0xC1,0xC7,0x83,0xAD,0xDB,0xF5,0x27, +0x87,0x0E,0x04,0x6C,0xD5,0xFF,0xDD,0xA0,0x5D,0xED,0x87,0x52,0xB7,0x2B,0x15,0x02, +0xAE,0x39,0xA6,0x6A,0x74,0xE9,0xDA,0xC4,0xE7,0xBC,0x4D,0x34,0x1E,0xA9,0x5C,0x4D, +0x33,0x5F,0x92,0x09,0x2F,0x88,0x66,0x5D,0x77,0x97,0xC7,0x1D,0x76,0x13,0xA9,0xD5, +0xE5,0xF1,0x16,0x09,0x11,0x35,0xD5,0xAC,0xDB,0x24,0x71,0x70,0x2C,0x98,0x56,0x0B, +0xD9,0x17,0xB4,0xD1,0xE3,0x51,0x2B,0x5E,0x75,0xE8,0xD5,0xD0,0xDC,0x4F,0x34,0xED, +0xC2,0x05,0x66,0x80,0xA1,0xCB,0xE6,0x33, }; -/* subject:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC */ -/* issuer :/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC */ +/* subject:/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G3 */ +/* issuer :/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Assured ID Root G3 */ -const unsigned char UTN_DATACorp_SGC_Root_CA_certificate[1122]={ -0x30,0x82,0x04,0x5E,0x30,0x82,0x03,0x46,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x44, -0xBE,0x0C,0x8B,0x50,0x00,0x21,0xB4,0x11,0xD3,0x2A,0x68,0x06,0xA9,0xAD,0x69,0x30, -0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x30,0x81, -0x93,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x0B, -0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55,0x54,0x31,0x17,0x30,0x15,0x06, -0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74,0x20,0x4C,0x61,0x6B,0x65,0x20, -0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03,0x55,0x04,0x0A,0x13,0x15,0x54, -0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55,0x53,0x54,0x20,0x4E,0x65,0x74, -0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03,0x55,0x04,0x0B,0x13,0x18,0x68, -0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E,0x75,0x73,0x65,0x72,0x74,0x72, -0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1B,0x30,0x19,0x06,0x03,0x55,0x04,0x03, -0x13,0x12,0x55,0x54,0x4E,0x20,0x2D,0x20,0x44,0x41,0x54,0x41,0x43,0x6F,0x72,0x70, -0x20,0x53,0x47,0x43,0x30,0x1E,0x17,0x0D,0x39,0x39,0x30,0x36,0x32,0x34,0x31,0x38, -0x35,0x37,0x32,0x31,0x5A,0x17,0x0D,0x31,0x39,0x30,0x36,0x32,0x34,0x31,0x39,0x30, -0x36,0x33,0x30,0x5A,0x30,0x81,0x93,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06, -0x13,0x02,0x55,0x53,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x08,0x13,0x02,0x55, -0x54,0x31,0x17,0x30,0x15,0x06,0x03,0x55,0x04,0x07,0x13,0x0E,0x53,0x61,0x6C,0x74, -0x20,0x4C,0x61,0x6B,0x65,0x20,0x43,0x69,0x74,0x79,0x31,0x1E,0x30,0x1C,0x06,0x03, -0x55,0x04,0x0A,0x13,0x15,0x54,0x68,0x65,0x20,0x55,0x53,0x45,0x52,0x54,0x52,0x55, -0x53,0x54,0x20,0x4E,0x65,0x74,0x77,0x6F,0x72,0x6B,0x31,0x21,0x30,0x1F,0x06,0x03, -0x55,0x04,0x0B,0x13,0x18,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x77,0x77,0x77,0x2E, -0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x1B,0x30, -0x19,0x06,0x03,0x55,0x04,0x03,0x13,0x12,0x55,0x54,0x4E,0x20,0x2D,0x20,0x44,0x41, -0x54,0x41,0x43,0x6F,0x72,0x70,0x20,0x53,0x47,0x43,0x30,0x82,0x01,0x22,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82,0x01, -0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xDF,0xEE,0x58,0x10,0xA2, -0x2B,0x6E,0x55,0xC4,0x8E,0xBF,0x2E,0x46,0x09,0xE7,0xE0,0x08,0x0F,0x2E,0x2B,0x7A, -0x13,0x94,0x1B,0xBD,0xF6,0xB6,0x80,0x8E,0x65,0x05,0x93,0x00,0x1E,0xBC,0xAF,0xE2, -0x0F,0x8E,0x19,0x0D,0x12,0x47,0xEC,0xAC,0xAD,0xA3,0xFA,0x2E,0x70,0xF8,0xDE,0x6E, -0xFB,0x56,0x42,0x15,0x9E,0x2E,0x5C,0xEF,0x23,0xDE,0x21,0xB9,0x05,0x76,0x27,0x19, -0x0F,0x4F,0xD6,0xC3,0x9C,0xB4,0xBE,0x94,0x19,0x63,0xF2,0xA6,0x11,0x0A,0xEB,0x53, -0x48,0x9C,0xBE,0xF2,0x29,0x3B,0x16,0xE8,0x1A,0xA0,0x4C,0xA6,0xC9,0xF4,0x18,0x59, -0x68,0xC0,0x70,0xF2,0x53,0x00,0xC0,0x5E,0x50,0x82,0xA5,0x56,0x6F,0x36,0xF9,0x4A, -0xE0,0x44,0x86,0xA0,0x4D,0x4E,0xD6,0x47,0x6E,0x49,0x4A,0xCB,0x67,0xD7,0xA6,0xC4, -0x05,0xB9,0x8E,0x1E,0xF4,0xFC,0xFF,0xCD,0xE7,0x36,0xE0,0x9C,0x05,0x6C,0xB2,0x33, -0x22,0x15,0xD0,0xB4,0xE0,0xCC,0x17,0xC0,0xB2,0xC0,0xF4,0xFE,0x32,0x3F,0x29,0x2A, -0x95,0x7B,0xD8,0xF2,0xA7,0x4E,0x0F,0x54,0x7C,0xA1,0x0D,0x80,0xB3,0x09,0x03,0xC1, -0xFF,0x5C,0xDD,0x5E,0x9A,0x3E,0xBC,0xAE,0xBC,0x47,0x8A,0x6A,0xAE,0x71,0xCA,0x1F, -0xB1,0x2A,0xB8,0x5F,0x42,0x05,0x0B,0xEC,0x46,0x30,0xD1,0x72,0x0B,0xCA,0xE9,0x56, -0x6D,0xF5,0xEF,0xDF,0x78,0xBE,0x61,0xBA,0xB2,0xA5,0xAE,0x04,0x4C,0xBC,0xA8,0xAC, -0x69,0x15,0x97,0xBD,0xEF,0xEB,0xB4,0x8C,0xBF,0x35,0xF8,0xD4,0xC3,0xD1,0x28,0x0E, -0x5C,0x3A,0x9F,0x70,0x18,0x33,0x20,0x77,0xC4,0xA2,0xAF,0x02,0x03,0x01,0x00,0x01, -0xA3,0x81,0xAB,0x30,0x81,0xA8,0x30,0x0B,0x06,0x03,0x55,0x1D,0x0F,0x04,0x04,0x03, -0x02,0x01,0xC6,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04,0x05,0x30, -0x03,0x01,0x01,0xFF,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04,0x14,0x53, -0x32,0xD1,0xB3,0xCF,0x7F,0xFA,0xE0,0xF1,0xA0,0x5D,0x85,0x4E,0x92,0xD2,0x9E,0x45, -0x1D,0xB4,0x4F,0x30,0x3D,0x06,0x03,0x55,0x1D,0x1F,0x04,0x36,0x30,0x34,0x30,0x32, -0xA0,0x30,0xA0,0x2E,0x86,0x2C,0x68,0x74,0x74,0x70,0x3A,0x2F,0x2F,0x63,0x72,0x6C, -0x2E,0x75,0x73,0x65,0x72,0x74,0x72,0x75,0x73,0x74,0x2E,0x63,0x6F,0x6D,0x2F,0x55, -0x54,0x4E,0x2D,0x44,0x41,0x54,0x41,0x43,0x6F,0x72,0x70,0x53,0x47,0x43,0x2E,0x63, -0x72,0x6C,0x30,0x2A,0x06,0x03,0x55,0x1D,0x25,0x04,0x23,0x30,0x21,0x06,0x08,0x2B, -0x06,0x01,0x05,0x05,0x07,0x03,0x01,0x06,0x0A,0x2B,0x06,0x01,0x04,0x01,0x82,0x37, -0x0A,0x03,0x03,0x06,0x09,0x60,0x86,0x48,0x01,0x86,0xF8,0x42,0x04,0x01,0x30,0x0D, -0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x05,0x05,0x00,0x03,0x82,0x01, -0x01,0x00,0x27,0x35,0x97,0x00,0x8A,0x8B,0x28,0xBD,0xC6,0x33,0x30,0x1E,0x29,0xFC, -0xE2,0xF7,0xD5,0x98,0xD4,0x40,0xBB,0x60,0xCA,0xBF,0xAB,0x17,0x2C,0x09,0x36,0x7F, -0x50,0xFA,0x41,0xDC,0xAE,0x96,0x3A,0x0A,0x23,0x3E,0x89,0x59,0xC9,0xA3,0x07,0xED, -0x1B,0x37,0xAD,0xFC,0x7C,0xBE,0x51,0x49,0x5A,0xDE,0x3A,0x0A,0x54,0x08,0x16,0x45, -0xC2,0x99,0xB1,0x87,0xCD,0x8C,0x68,0xE0,0x69,0x03,0xE9,0xC4,0x4E,0x98,0xB2,0x3B, -0x8C,0x16,0xB3,0x0E,0xA0,0x0C,0x98,0x50,0x9B,0x93,0xA9,0x70,0x09,0xC8,0x2C,0xA3, -0x8F,0xDF,0x02,0xE4,0xE0,0x71,0x3A,0xF1,0xB4,0x23,0x72,0xA0,0xAA,0x01,0xDF,0xDF, -0x98,0x3E,0x14,0x50,0xA0,0x31,0x26,0xBD,0x28,0xE9,0x5A,0x30,0x26,0x75,0xF9,0x7B, -0x60,0x1C,0x8D,0xF3,0xCD,0x50,0x26,0x6D,0x04,0x27,0x9A,0xDF,0xD5,0x0D,0x45,0x47, -0x29,0x6B,0x2C,0xE6,0x76,0xD9,0xA9,0x29,0x7D,0x32,0xDD,0xC9,0x36,0x3C,0xBD,0xAE, -0x35,0xF1,0x11,0x9E,0x1D,0xBB,0x90,0x3F,0x12,0x47,0x4E,0x8E,0xD7,0x7E,0x0F,0x62, -0x73,0x1D,0x52,0x26,0x38,0x1C,0x18,0x49,0xFD,0x30,0x74,0x9A,0xC4,0xE5,0x22,0x2F, -0xD8,0xC0,0x8D,0xED,0x91,0x7A,0x4C,0x00,0x8F,0x72,0x7F,0x5D,0xDA,0xDD,0x1B,0x8B, -0x45,0x6B,0xE7,0xDD,0x69,0x97,0xA8,0xC5,0x56,0x4C,0x0F,0x0C,0xF6,0x9F,0x7A,0x91, -0x37,0xF6,0x97,0x82,0xE0,0xDD,0x71,0x69,0xFF,0x76,0x3F,0x60,0x4D,0x3C,0xCF,0xF7, -0x99,0xF9,0xC6,0x57,0xF4,0xC9,0x55,0x39,0x78,0xBA,0x2C,0x79,0xC9,0xA6,0x88,0x2B, -0xF4,0x08, +const unsigned char DigiCert_Assured_ID_Root_G3_certificate[586]={ +0x30,0x82,0x02,0x46,0x30,0x82,0x01,0xCD,0xA0,0x03,0x02,0x01,0x02,0x02,0x10,0x0B, +0xA1,0x5A,0xFA,0x1D,0xDF,0xA0,0xB5,0x49,0x44,0xAF,0xCD,0x24,0xA0,0x6C,0xEC,0x30, +0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03,0x03,0x30,0x65,0x31,0x0B,0x30, +0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31,0x15,0x30,0x13,0x06,0x03, +0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x49,0x6E, +0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04,0x0B,0x13,0x10,0x77,0x77,0x77,0x2E, +0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E,0x63,0x6F,0x6D,0x31,0x24,0x30,0x22, +0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69,0x67,0x69,0x43,0x65,0x72,0x74,0x20, +0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49,0x44,0x20,0x52,0x6F,0x6F,0x74,0x20, +0x47,0x33,0x30,0x1E,0x17,0x0D,0x31,0x33,0x30,0x38,0x30,0x31,0x31,0x32,0x30,0x30, +0x30,0x30,0x5A,0x17,0x0D,0x33,0x38,0x30,0x31,0x31,0x35,0x31,0x32,0x30,0x30,0x30, +0x30,0x5A,0x30,0x65,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55, +0x53,0x31,0x15,0x30,0x13,0x06,0x03,0x55,0x04,0x0A,0x13,0x0C,0x44,0x69,0x67,0x69, +0x43,0x65,0x72,0x74,0x20,0x49,0x6E,0x63,0x31,0x19,0x30,0x17,0x06,0x03,0x55,0x04, +0x0B,0x13,0x10,0x77,0x77,0x77,0x2E,0x64,0x69,0x67,0x69,0x63,0x65,0x72,0x74,0x2E, +0x63,0x6F,0x6D,0x31,0x24,0x30,0x22,0x06,0x03,0x55,0x04,0x03,0x13,0x1B,0x44,0x69, +0x67,0x69,0x43,0x65,0x72,0x74,0x20,0x41,0x73,0x73,0x75,0x72,0x65,0x64,0x20,0x49, +0x44,0x20,0x52,0x6F,0x6F,0x74,0x20,0x47,0x33,0x30,0x76,0x30,0x10,0x06,0x07,0x2A, +0x86,0x48,0xCE,0x3D,0x02,0x01,0x06,0x05,0x2B,0x81,0x04,0x00,0x22,0x03,0x62,0x00, +0x04,0x19,0xE7,0xBC,0xAC,0x44,0x65,0xED,0xCD,0xB8,0x3F,0x58,0xFB,0x8D,0xB1,0x57, +0xA9,0x44,0x2D,0x05,0x15,0xF2,0xEF,0x0B,0xFF,0x10,0x74,0x9F,0xB5,0x62,0x52,0x5F, +0x66,0x7E,0x1F,0xE5,0xDC,0x1B,0x45,0x79,0x0B,0xCC,0xC6,0x53,0x0A,0x9D,0x8D,0x5D, +0x02,0xD9,0xA9,0x59,0xDE,0x02,0x5A,0xF6,0x95,0x2A,0x0E,0x8D,0x38,0x4A,0x8A,0x49, +0xC6,0xBC,0xC6,0x03,0x38,0x07,0x5F,0x55,0xDA,0x7E,0x09,0x6E,0xE2,0x7F,0x5E,0xD0, +0x45,0x20,0x0F,0x59,0x76,0x10,0xD6,0xA0,0x24,0xF0,0x2D,0xDE,0x36,0xF2,0x6C,0x29, +0x39,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, +0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, +0x04,0x04,0x03,0x02,0x01,0x86,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, +0x14,0xCB,0xD0,0xBD,0xA9,0xE1,0x98,0x05,0x51,0xA1,0x4D,0x37,0xA2,0x83,0x79,0xCE, +0x8D,0x1D,0x2A,0xE4,0x84,0x30,0x0A,0x06,0x08,0x2A,0x86,0x48,0xCE,0x3D,0x04,0x03, +0x03,0x03,0x67,0x00,0x30,0x64,0x02,0x30,0x25,0xA4,0x81,0x45,0x02,0x6B,0x12,0x4B, +0x75,0x74,0x4F,0xC8,0x23,0xE3,0x70,0xF2,0x75,0x72,0xDE,0x7C,0x89,0xF0,0xCF,0x91, +0x72,0x61,0x9E,0x5E,0x10,0x92,0x59,0x56,0xB9,0x83,0xC7,0x10,0xE7,0x38,0xE9,0x58, +0x26,0x36,0x7D,0xD5,0xE4,0x34,0x86,0x39,0x02,0x30,0x7C,0x36,0x53,0xF0,0x30,0xE5, +0x62,0x63,0x3A,0x99,0xE2,0xB6,0xA3,0x3B,0x9B,0x34,0xFA,0x1E,0xDA,0x10,0x92,0x71, +0x5E,0x91,0x13,0xA7,0xDD,0xA4,0x6E,0x92,0xCC,0x32,0xD6,0xF5,0x21,0x66,0xC7,0x2F, +0xEA,0x96,0x63,0x6A,0x65,0x45,0x92,0x95,0x01,0xB4, +}; + + +/* subject:/C=US/ST=Arizona/L=Scottsdale/O=GoDaddy.com, Inc./CN=Go Daddy Root Certificate Authority - G2 */ +/* issuer :/C=US/ST=Arizona/L=Scottsdale/O=GoDaddy.com, Inc./CN=Go Daddy Root Certificate Authority - G2 */ + + +const unsigned char Go_Daddy_Root_Certificate_Authority___G2_certificate[969]={ +0x30,0x82,0x03,0xC5,0x30,0x82,0x02,0xAD,0xA0,0x03,0x02,0x01,0x02,0x02,0x01,0x00, +0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x0B,0x05,0x00,0x30, +0x81,0x83,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x55,0x53,0x31, +0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07,0x41,0x72,0x69,0x7A,0x6F,0x6E, +0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07,0x13,0x0A,0x53,0x63,0x6F,0x74, +0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x1A,0x30,0x18,0x06,0x03,0x55,0x04,0x0A,0x13, +0x11,0x47,0x6F,0x44,0x61,0x64,0x64,0x79,0x2E,0x63,0x6F,0x6D,0x2C,0x20,0x49,0x6E, +0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04,0x03,0x13,0x28,0x47,0x6F,0x20, +0x44,0x61,0x64,0x64,0x79,0x20,0x52,0x6F,0x6F,0x74,0x20,0x43,0x65,0x72,0x74,0x69, +0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74,0x68,0x6F,0x72,0x69,0x74,0x79, +0x20,0x2D,0x20,0x47,0x32,0x30,0x1E,0x17,0x0D,0x30,0x39,0x30,0x39,0x30,0x31,0x30, +0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x33,0x37,0x31,0x32,0x33,0x31,0x32,0x33, +0x35,0x39,0x35,0x39,0x5A,0x30,0x81,0x83,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04, +0x06,0x13,0x02,0x55,0x53,0x31,0x10,0x30,0x0E,0x06,0x03,0x55,0x04,0x08,0x13,0x07, +0x41,0x72,0x69,0x7A,0x6F,0x6E,0x61,0x31,0x13,0x30,0x11,0x06,0x03,0x55,0x04,0x07, +0x13,0x0A,0x53,0x63,0x6F,0x74,0x74,0x73,0x64,0x61,0x6C,0x65,0x31,0x1A,0x30,0x18, +0x06,0x03,0x55,0x04,0x0A,0x13,0x11,0x47,0x6F,0x44,0x61,0x64,0x64,0x79,0x2E,0x63, +0x6F,0x6D,0x2C,0x20,0x49,0x6E,0x63,0x2E,0x31,0x31,0x30,0x2F,0x06,0x03,0x55,0x04, +0x03,0x13,0x28,0x47,0x6F,0x20,0x44,0x61,0x64,0x64,0x79,0x20,0x52,0x6F,0x6F,0x74, +0x20,0x43,0x65,0x72,0x74,0x69,0x66,0x69,0x63,0x61,0x74,0x65,0x20,0x41,0x75,0x74, +0x68,0x6F,0x72,0x69,0x74,0x79,0x20,0x2D,0x20,0x47,0x32,0x30,0x82,0x01,0x22,0x30, +0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x82, +0x01,0x0F,0x00,0x30,0x82,0x01,0x0A,0x02,0x82,0x01,0x01,0x00,0xBF,0x71,0x62,0x08, +0xF1,0xFA,0x59,0x34,0xF7,0x1B,0xC9,0x18,0xA3,0xF7,0x80,0x49,0x58,0xE9,0x22,0x83, +0x13,0xA6,0xC5,0x20,0x43,0x01,0x3B,0x84,0xF1,0xE6,0x85,0x49,0x9F,0x27,0xEA,0xF6, +0x84,0x1B,0x4E,0xA0,0xB4,0xDB,0x70,0x98,0xC7,0x32,0x01,0xB1,0x05,0x3E,0x07,0x4E, +0xEE,0xF4,0xFA,0x4F,0x2F,0x59,0x30,0x22,0xE7,0xAB,0x19,0x56,0x6B,0xE2,0x80,0x07, +0xFC,0xF3,0x16,0x75,0x80,0x39,0x51,0x7B,0xE5,0xF9,0x35,0xB6,0x74,0x4E,0xA9,0x8D, +0x82,0x13,0xE4,0xB6,0x3F,0xA9,0x03,0x83,0xFA,0xA2,0xBE,0x8A,0x15,0x6A,0x7F,0xDE, +0x0B,0xC3,0xB6,0x19,0x14,0x05,0xCA,0xEA,0xC3,0xA8,0x04,0x94,0x3B,0x46,0x7C,0x32, +0x0D,0xF3,0x00,0x66,0x22,0xC8,0x8D,0x69,0x6D,0x36,0x8C,0x11,0x18,0xB7,0xD3,0xB2, +0x1C,0x60,0xB4,0x38,0xFA,0x02,0x8C,0xCE,0xD3,0xDD,0x46,0x07,0xDE,0x0A,0x3E,0xEB, +0x5D,0x7C,0xC8,0x7C,0xFB,0xB0,0x2B,0x53,0xA4,0x92,0x62,0x69,0x51,0x25,0x05,0x61, +0x1A,0x44,0x81,0x8C,0x2C,0xA9,0x43,0x96,0x23,0xDF,0xAC,0x3A,0x81,0x9A,0x0E,0x29, +0xC5,0x1C,0xA9,0xE9,0x5D,0x1E,0xB6,0x9E,0x9E,0x30,0x0A,0x39,0xCE,0xF1,0x88,0x80, +0xFB,0x4B,0x5D,0xCC,0x32,0xEC,0x85,0x62,0x43,0x25,0x34,0x02,0x56,0x27,0x01,0x91, +0xB4,0x3B,0x70,0x2A,0x3F,0x6E,0xB1,0xE8,0x9C,0x88,0x01,0x7D,0x9F,0xD4,0xF9,0xDB, +0x53,0x6D,0x60,0x9D,0xBF,0x2C,0xE7,0x58,0xAB,0xB8,0x5F,0x46,0xFC,0xCE,0xC4,0x1B, +0x03,0x3C,0x09,0xEB,0x49,0x31,0x5C,0x69,0x46,0xB3,0xE0,0x47,0x02,0x03,0x01,0x00, +0x01,0xA3,0x42,0x30,0x40,0x30,0x0F,0x06,0x03,0x55,0x1D,0x13,0x01,0x01,0xFF,0x04, +0x05,0x30,0x03,0x01,0x01,0xFF,0x30,0x0E,0x06,0x03,0x55,0x1D,0x0F,0x01,0x01,0xFF, +0x04,0x04,0x03,0x02,0x01,0x06,0x30,0x1D,0x06,0x03,0x55,0x1D,0x0E,0x04,0x16,0x04, +0x14,0x3A,0x9A,0x85,0x07,0x10,0x67,0x28,0xB6,0xEF,0xF6,0xBD,0x05,0x41,0x6E,0x20, +0xC1,0x94,0xDA,0x0F,0xDE,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01, +0x01,0x0B,0x05,0x00,0x03,0x82,0x01,0x01,0x00,0x99,0xDB,0x5D,0x79,0xD5,0xF9,0x97, +0x59,0x67,0x03,0x61,0xF1,0x7E,0x3B,0x06,0x31,0x75,0x2D,0xA1,0x20,0x8E,0x4F,0x65, +0x87,0xB4,0xF7,0xA6,0x9C,0xBC,0xD8,0xE9,0x2F,0xD0,0xDB,0x5A,0xEE,0xCF,0x74,0x8C, +0x73,0xB4,0x38,0x42,0xDA,0x05,0x7B,0xF8,0x02,0x75,0xB8,0xFD,0xA5,0xB1,0xD7,0xAE, +0xF6,0xD7,0xDE,0x13,0xCB,0x53,0x10,0x7E,0x8A,0x46,0xD1,0x97,0xFA,0xB7,0x2E,0x2B, +0x11,0xAB,0x90,0xB0,0x27,0x80,0xF9,0xE8,0x9F,0x5A,0xE9,0x37,0x9F,0xAB,0xE4,0xDF, +0x6C,0xB3,0x85,0x17,0x9D,0x3D,0xD9,0x24,0x4F,0x79,0x91,0x35,0xD6,0x5F,0x04,0xEB, +0x80,0x83,0xAB,0x9A,0x02,0x2D,0xB5,0x10,0xF4,0xD8,0x90,0xC7,0x04,0x73,0x40,0xED, +0x72,0x25,0xA0,0xA9,0x9F,0xEC,0x9E,0xAB,0x68,0x12,0x99,0x57,0xC6,0x8F,0x12,0x3A, +0x09,0xA4,0xBD,0x44,0xFD,0x06,0x15,0x37,0xC1,0x9B,0xE4,0x32,0xA3,0xED,0x38,0xE8, +0xD8,0x64,0xF3,0x2C,0x7E,0x14,0xFC,0x02,0xEA,0x9F,0xCD,0xFF,0x07,0x68,0x17,0xDB, +0x22,0x90,0x38,0x2D,0x7A,0x8D,0xD1,0x54,0xF1,0x69,0xE3,0x5F,0x33,0xCA,0x7A,0x3D, +0x7B,0x0A,0xE3,0xCA,0x7F,0x5F,0x39,0xE5,0xE2,0x75,0xBA,0xC5,0x76,0x18,0x33,0xCE, +0x2C,0xF0,0x2F,0x4C,0xAD,0xF7,0xB1,0xE7,0xCE,0x4F,0xA8,0xC4,0x9B,0x4A,0x54,0x06, +0xC5,0x7F,0x7D,0xD5,0x08,0x0F,0xE2,0x1C,0xFE,0x7E,0x17,0xB8,0xAC,0x5E,0xF6,0xD4, +0x16,0xB2,0x43,0x09,0x0C,0x4D,0xF6,0xA7,0x6B,0xB4,0x99,0x84,0x65,0xCA,0x7A,0x88, +0xE2,0xE2,0x44,0xBE,0x5C,0xF7,0xEA,0x1C,0xF5, }; const unsigned char* const kSSLCertCertificateList[] = { - GlobalSign_Root_CA_certificate, + Comodo_AAA_Services_root_certificate, + GlobalSign_Root_CA___R6_certificate, + DigiCert_Global_Root_CA_certificate, USERTrust_RSA_Certification_Authority_certificate, - Starfield_Class_2_CA_certificate, - Verisign_Class_3_Public_Primary_Certification_Authority___G3_certificate, - USERTrust_ECC_Certification_Authority_certificate, - GeoTrust_Global_CA_certificate, + GlobalSign_Root_CA___R3_certificate, + GlobalSign_Root_CA___R2_certificate, + AffirmTrust_Premium_certificate, + GTS_Root_R4_certificate, + Baltimore_CyberTrust_Root_certificate, + DigiCert_Assured_ID_Root_CA_certificate, Starfield_Root_Certificate_Authority___G2_certificate, + AffirmTrust_Networking_certificate, + GlobalSign_Root_CA_certificate, + GTS_Root_R3_certificate, + COMODO_RSA_Certification_Authority_certificate, + GTS_Root_R2_certificate, + Cybertrust_Global_Root_certificate, + GTS_Root_R1_certificate, DigiCert_Global_Root_G3_certificate, - thawte_Primary_Root_CA___G2_certificate, - VeriSign_Universal_Root_Certification_Authority_certificate, - VeriSign_Class_3_Public_Primary_Certification_Authority___G4_certificate, DigiCert_Global_Root_G2_certificate, - AddTrust_Low_Value_Services_Root_certificate, - AffirmTrust_Premium_ECC_certificate, - Verisign_Class_4_Public_Primary_Certification_Authority___G3_certificate, - thawte_Primary_Root_CA_certificate, - AddTrust_Public_Services_Root_certificate, - AddTrust_Qualified_Certificates_Root_certificate, - GeoTrust_Primary_Certification_Authority___G3_certificate, - GeoTrust_Universal_CA_2_certificate, - Baltimore_CyberTrust_Root_certificate, - GlobalSign_Root_CA___R2_certificate, - GlobalSign_Root_CA___R3_certificate, - AffirmTrust_Networking_certificate, - AddTrust_External_Root_certificate, - thawte_Primary_Root_CA___G3_certificate, - DigiCert_Assured_ID_Root_CA_certificate, - Go_Daddy_Class_2_CA_certificate, - GeoTrust_Primary_Certification_Authority_certificate, - VeriSign_Class_3_Public_Primary_Certification_Authority___G5_certificate, - Equifax_Secure_CA_certificate, - Entrust_net_Premium_2048_Secure_Server_CA_certificate, - DigiCert_Assured_ID_Root_G3_certificate, + Starfield_Class_2_CA_certificate, COMODO_Certification_Authority_certificate, - DigiCert_Global_Root_CA_certificate, - Comodo_AAA_Services_root_certificate, + GlobalSign_ECC_Root_CA___R4_certificate, + GlobalSign_ECC_Root_CA___R5_certificate, + USERTrust_ECC_Certification_Authority_certificate, + Entrust_net_Premium_2048_Secure_Server_CA_certificate, + AffirmTrust_Premium_ECC_certificate, DigiCert_High_Assurance_EV_Root_CA_certificate, - GeoTrust_Universal_CA_certificate, - COMODO_ECC_Certification_Authority_certificate, Entrust_Root_Certification_Authority___G2_certificate, - DigiCert_Assured_ID_Root_G2_certificate, + Go_Daddy_Class_2_CA_certificate, AffirmTrust_Commercial_certificate, - AffirmTrust_Premium_certificate, - Go_Daddy_Root_Certificate_Authority___G2_certificate, - Comodo_Secure_Services_root_certificate, - DigiCert_Trusted_Root_G4_certificate, - GlobalSign_ECC_Root_CA___R5_certificate, - UTN_USERFirst_Hardware_Root_CA_certificate, - GlobalSign_ECC_Root_CA___R4_certificate, - TC_TrustCenter_Universal_CA_I_certificate, - Comodo_Trusted_Services_root_certificate, Entrust_Root_Certification_Authority_certificate, - TC_TrustCenter_Class_2_CA_II_certificate, - Cybertrust_Global_Root_certificate, + DigiCert_Assured_ID_Root_G2_certificate, + DigiCert_Trusted_Root_G4_certificate, + COMODO_ECC_Certification_Authority_certificate, Entrust_Root_Certification_Authority___EC1_certificate, - GeoTrust_Primary_Certification_Authority___G2_certificate, - GeoTrust_Global_CA_2_certificate, - COMODO_RSA_Certification_Authority_certificate, - UTN_DATACorp_SGC_Root_CA_certificate, + GeoTrust_Global_CA_certificate, + DigiCert_Assured_ID_Root_G3_certificate, + Go_Daddy_Root_Certificate_Authority___G2_certificate, }; const size_t kSSLCertCertificateSizeList[] = { - 889, + 1078, + 1415, + 947, 1506, - 1043, - 1054, - 659, - 856, + 867, + 958, + 1354, + 526, + 891, + 955, 993, + 848, + 889, + 528, + 1500, + 1374, + 933, + 1374, 579, - 652, - 1213, - 904, 914, - 1052, - 514, - 1054, - 1060, - 1049, - 1058, - 1026, - 1392, - 891, - 958, - 867, - 848, - 1082, - 1070, - 955, - 1028, - 896, - 1239, - 804, - 1120, - 586, + 1043, 1057, - 947, - 1078, + 485, + 546, + 659, + 1070, + 514, 969, - 1388, - 653, 1090, - 922, + 1028, 848, - 1354, - 969, - 1091, - 1428, - 546, - 1144, - 485, - 993, - 1095, 1173, - 1198, - 933, + 922, + 1428, + 653, 765, - 690, - 874, - 1500, - 1122, + 856, + 586, + 969, }; // clang-format on diff --git a/rtc_base/ssl_stream_adapter.cc b/rtc_base/ssl_stream_adapter.cc index 372c37ff0d..354622e6f0 100644 --- a/rtc_base/ssl_stream_adapter.cc +++ b/rtc_base/ssl_stream_adapter.cc @@ -10,6 +10,7 @@ #include "rtc_base/ssl_stream_adapter.h" +#include "absl/memory/memory.h" #include "rtc_base/openssl_stream_adapter.h" /////////////////////////////////////////////////////////////////////////////// @@ -89,12 +90,13 @@ bool IsGcmCryptoSuiteName(const std::string& crypto_suite) { crypto_suite == CS_AEAD_AES_128_GCM); } -SSLStreamAdapter* SSLStreamAdapter::Create(StreamInterface* stream) { - return new OpenSSLStreamAdapter(stream); +std::unique_ptr SSLStreamAdapter::Create( + std::unique_ptr stream) { + return std::make_unique(std::move(stream)); } -SSLStreamAdapter::SSLStreamAdapter(StreamInterface* stream) - : StreamAdapterInterface(stream) {} +SSLStreamAdapter::SSLStreamAdapter(std::unique_ptr stream) + : StreamAdapterInterface(stream.release()) {} SSLStreamAdapter::~SSLStreamAdapter() {} diff --git a/rtc_base/ssl_stream_adapter.h b/rtc_base/ssl_stream_adapter.h index 484657ebaf..7bff726510 100644 --- a/rtc_base/ssl_stream_adapter.h +++ b/rtc_base/ssl_stream_adapter.h @@ -17,6 +17,8 @@ #include #include +#include "absl/memory/memory.h" +#include "rtc_base/deprecation.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/stream.h" @@ -90,6 +92,12 @@ bool IsGcmCryptoSuiteName(const std::string& crypto_suite); enum SSLRole { SSL_CLIENT, SSL_SERVER }; enum SSLMode { SSL_MODE_TLS, SSL_MODE_DTLS }; + +// Note: TLS_10, TLS_11, and DTLS_10 will all be ignored, and only DTLS1_2 will +// be accepted unless the trial flag WebRTC-LegacyTlsProtocols/Enabled/ is +// passed in or an explicit override is used. Support for the legacy protocol +// versions will be completely removed in the future. +// See https://bugs.webrtc.org/10261. enum SSLProtocolVersion { SSL_PROTOCOL_NOT_GIVEN = -1, SSL_PROTOCOL_TLS_10 = 0, @@ -116,15 +124,17 @@ class SSLStreamAdapter : public StreamAdapterInterface { // Instantiate an SSLStreamAdapter wrapping the given stream, // (using the selected implementation for the platform). // Caller is responsible for freeing the returned object. - static SSLStreamAdapter* Create(StreamInterface* stream); + static std::unique_ptr Create( + std::unique_ptr stream); - explicit SSLStreamAdapter(StreamInterface* stream); + explicit SSLStreamAdapter(std::unique_ptr stream); ~SSLStreamAdapter() override; // Specify our SSL identity: key and certificate. SSLStream takes ownership // of the SSLIdentity object and will free it when appropriate. Should be // called no more than once on a given SSLStream instance. - virtual void SetIdentity(SSLIdentity* identity) = 0; + virtual void SetIdentity(std::unique_ptr identity) = 0; + virtual SSLIdentity* GetIdentityForTesting() const = 0; // Call this to indicate that we are to play the server role (or client role, // if the default argument is replaced by SSL_CLIENT). diff --git a/rtc_base/ssl_stream_adapter_unittest.cc b/rtc_base/ssl_stream_adapter_unittest.cc index d9cfe1b9bf..379acace6e 100644 --- a/rtc_base/ssl_stream_adapter_unittest.cc +++ b/rtc_base/ssl_stream_adapter_unittest.cc @@ -13,6 +13,7 @@ #include #include +#include "absl/memory/memory.h" #include "rtc_base/buffer_queue.h" #include "rtc_base/checks.h" #include "rtc_base/gunit.h" @@ -20,23 +21,30 @@ #include "rtc_base/memory/fifo_buffer.h" #include "rtc_base/memory_stream.h" #include "rtc_base/message_digest.h" +#include "rtc_base/openssl_stream_adapter.h" #include "rtc_base/ssl_adapter.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "test/field_trial.h" -using ::testing::WithParamInterface; -using ::testing::Values; using ::testing::Combine; using ::testing::tuple; +using ::testing::Values; +using ::testing::WithParamInterface; static const int kBlockSize = 4096; static const char kExporterLabel[] = "label"; static const unsigned char kExporterContext[] = "context"; static int kExporterContextLen = sizeof(kExporterContext); -static const char kRSA_PRIVATE_KEY_PEM[] = - "-----BEGIN RSA PRIVATE KEY-----\n" +// A private key used for testing, broken into pieces in order to avoid +// issues with Git's checks for private keys in repos. +#define RSA_PRIVATE_KEY_HEADER "-----BEGIN RSA PRIVATE KEY-----\n" + +static const char kRSA_PRIVATE_KEY_PEM[] = RSA_PRIVATE_KEY_HEADER "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n" "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n" "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n" @@ -53,6 +61,8 @@ static const char kRSA_PRIVATE_KEY_PEM[] = "UCXiYxSsu20QNVw=\n" "-----END RSA PRIVATE KEY-----\n"; +#undef RSA_PRIVATE_KEY_HEADER + static const char kCERT_PEM[] = "-----BEGIN CERTIFICATE-----\n" "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n" @@ -206,7 +216,15 @@ class SSLDummyStreamBase : public rtc::StreamInterface, out_->Close(); } - protected: + private: + void PostEvent(int events, int err) { + thread_->PostTask(webrtc::ToQueuedTask(task_safety_, [this, events, err]() { + SignalEvent(this, events, err); + })); + } + + webrtc::ScopedTaskSafety task_safety_; + rtc::Thread* const thread_ = rtc::Thread::Current(); SSLStreamAdapterTestBase* test_base_; const std::string side_; rtc::StreamInterface* in_; @@ -223,10 +241,10 @@ class SSLDummyStreamTLS : public SSLDummyStreamBase { : SSLDummyStreamBase(test, side, in, out) {} }; -class BufferQueueStream : public rtc::BufferQueue, public rtc::StreamInterface { +class BufferQueueStream : public rtc::StreamInterface { public: BufferQueueStream(size_t capacity, size_t default_size) - : rtc::BufferQueue(capacity, default_size) {} + : buffer_(capacity, default_size) {} // Implementation of abstract StreamInterface methods. @@ -238,9 +256,13 @@ class BufferQueueStream : public rtc::BufferQueue, public rtc::StreamInterface { size_t buffer_len, size_t* read, int* error) override { - if (!ReadFront(buffer, buffer_len, read)) { + const bool was_writable = buffer_.is_writable(); + if (!buffer_.ReadFront(buffer, buffer_len, read)) return rtc::SR_BLOCK; - } + + if (!was_writable) + NotifyWritableForTest(); + return rtc::SR_SUCCESS; } @@ -249,9 +271,13 @@ class BufferQueueStream : public rtc::BufferQueue, public rtc::StreamInterface { size_t data_len, size_t* written, int* error) override { - if (!WriteBack(data, data_len, written)) { + const bool was_readable = buffer_.is_readable(); + if (!buffer_.WriteBack(data, data_len, written)) return rtc::SR_BLOCK; - } + + if (!was_readable) + NotifyReadableForTest(); + return rtc::SR_SUCCESS; } @@ -259,9 +285,19 @@ class BufferQueueStream : public rtc::BufferQueue, public rtc::StreamInterface { void Close() override {} protected: - void NotifyReadableForTest() override { PostEvent(rtc::SE_READ, 0); } + void NotifyReadableForTest() { PostEvent(rtc::SE_READ, 0); } + void NotifyWritableForTest() { PostEvent(rtc::SE_WRITE, 0); } - void NotifyWritableForTest() override { PostEvent(rtc::SE_WRITE, 0); } + private: + void PostEvent(int events, int err) { + thread_->PostTask(webrtc::ToQueuedTask(task_safety_, [this, events, err]() { + SignalEvent(this, events, err); + })); + } + + rtc::Thread* const thread_ = rtc::Thread::Current(); + webrtc::ScopedTaskSafety task_safety_; + rtc::BufferQueue buffer_; }; class SSLDummyStreamDTLS : public SSLDummyStreamBase { @@ -292,8 +328,6 @@ class SSLStreamAdapterTestBase : public ::testing::Test, server_key_type_(server_key_type), client_stream_(nullptr), server_stream_(nullptr), - client_identity_(nullptr), - server_identity_(nullptr), delay_(0), mtu_(1460), loss_(0), @@ -314,23 +348,26 @@ class SSLStreamAdapterTestBase : public ::testing::Test, void SetUp() override { CreateStreams(); - client_ssl_.reset(rtc::SSLStreamAdapter::Create(client_stream_)); - server_ssl_.reset(rtc::SSLStreamAdapter::Create(server_stream_)); + client_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); + server_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); // Set up the slots client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); + std::unique_ptr client_identity; if (!client_cert_pem_.empty() && !client_private_key_pem_.empty()) { - client_identity_ = rtc::SSLIdentity::FromPEMStrings( + client_identity = rtc::SSLIdentity::CreateFromPEMStrings( client_private_key_pem_, client_cert_pem_); } else { - client_identity_ = rtc::SSLIdentity::Generate("client", client_key_type_); + client_identity = rtc::SSLIdentity::Create("client", client_key_type_); } - server_identity_ = rtc::SSLIdentity::Generate("server", server_key_type_); + auto server_identity = rtc::SSLIdentity::Create("server", server_key_type_); - client_ssl_->SetIdentity(client_identity_); - server_ssl_->SetIdentity(server_identity_); + client_ssl_->SetIdentity(std::move(client_identity)); + server_ssl_->SetIdentity(std::move(server_identity)); } void TearDown() override { @@ -346,8 +383,10 @@ class SSLStreamAdapterTestBase : public ::testing::Test, void ResetIdentitiesWithValidity(int not_before, int not_after) { CreateStreams(); - client_ssl_.reset(rtc::SSLStreamAdapter::Create(client_stream_)); - server_ssl_.reset(rtc::SSLStreamAdapter::Create(server_stream_)); + client_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); + server_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); @@ -359,17 +398,17 @@ class SSLStreamAdapterTestBase : public ::testing::Test, client_params.common_name = "client"; client_params.not_before = now + not_before; client_params.not_after = now + not_after; - client_identity_ = rtc::SSLIdentity::GenerateForTest(client_params); + auto client_identity = rtc::SSLIdentity::CreateForTest(client_params); rtc::SSLIdentityParams server_params; server_params.key_params = rtc::KeyParams(rtc::KT_DEFAULT); server_params.common_name = "server"; server_params.not_before = now + not_before; server_params.not_after = now + not_after; - server_identity_ = rtc::SSLIdentity::GenerateForTest(server_params); + auto server_identity = rtc::SSLIdentity::CreateForTest(server_params); - client_ssl_->SetIdentity(client_identity_); - server_ssl_->SetIdentity(server_identity_); + client_ssl_->SetIdentity(std::move(client_identity)); + server_ssl_->SetIdentity(std::move(server_identity)); } virtual void OnEvent(rtc::StreamInterface* stream, int sig, int err) { @@ -398,10 +437,10 @@ class SSLStreamAdapterTestBase : public ::testing::Test, RTC_LOG(LS_INFO) << "Setting peer identities by digest"; - rv = server_identity_->certificate().ComputeDigest( + rv = server_identity()->certificate().ComputeDigest( rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); ASSERT_TRUE(rv); - rv = client_identity_->certificate().ComputeDigest( + rv = client_identity()->certificate().ComputeDigest( rtc::DIGEST_SHA_1, client_digest, 20, &client_digest_len); ASSERT_TRUE(rv); @@ -628,6 +667,19 @@ class SSLStreamAdapterTestBase : public ::testing::Test, virtual void TestTransfer(int size) = 0; protected: + rtc::SSLIdentity* client_identity() const { + if (!client_ssl_) { + return nullptr; + } + return client_ssl_->GetIdentityForTesting(); + } + rtc::SSLIdentity* server_identity() const { + if (!server_ssl_) { + return nullptr; + } + return server_ssl_->GetIdentityForTesting(); + } + std::string client_cert_pem_; std::string client_private_key_pem_; rtc::KeyParams client_key_type_; @@ -636,8 +688,6 @@ class SSLStreamAdapterTestBase : public ::testing::Test, SSLDummyStreamBase* server_stream_; // freed by server_ssl_ destructor std::unique_ptr client_ssl_; std::unique_ptr server_ssl_; - rtc::SSLIdentity* client_identity_; // freed by client_ssl_ destructor - rtc::SSLIdentity* server_identity_; // freed by server_ssl_ destructor int delay_; size_t mtu_; int loss_; @@ -767,24 +817,18 @@ class SSLStreamAdapterTestTLS rtc::MemoryStream recv_stream_; }; -class SSLStreamAdapterTestDTLS - : public SSLStreamAdapterTestBase, - public WithParamInterface> { +class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { public: - SSLStreamAdapterTestDTLS() - : SSLStreamAdapterTestBase("", - "", - true, - ::testing::get<0>(GetParam()), - ::testing::get<1>(GetParam())), + SSLStreamAdapterTestDTLSBase(rtc::KeyParams param1, rtc::KeyParams param2) + : SSLStreamAdapterTestBase("", "", true, param1, param2), client_buffer_(kBufferCapacity, kDefaultBufferSize), server_buffer_(kBufferCapacity, kDefaultBufferSize), packet_size_(1000), count_(0), sent_(0) {} - SSLStreamAdapterTestDTLS(const std::string& cert_pem, - const std::string& private_key_pem) + SSLStreamAdapterTestDTLSBase(const std::string& cert_pem, + const std::string& private_key_pem) : SSLStreamAdapterTestBase(cert_pem, private_key_pem, true), client_buffer_(kBufferCapacity, kDefaultBufferSize), server_buffer_(kBufferCapacity, kDefaultBufferSize), @@ -883,15 +927,30 @@ class SSLStreamAdapterTestDTLS } } - private: + protected: BufferQueueStream client_buffer_; BufferQueueStream server_buffer_; + + private: size_t packet_size_; int count_; int sent_; std::set received_; }; +class SSLStreamAdapterTestDTLS + : public SSLStreamAdapterTestDTLSBase, + public WithParamInterface> { + public: + SSLStreamAdapterTestDTLS() + : SSLStreamAdapterTestDTLSBase(::testing::get<0>(GetParam()), + ::testing::get<1>(GetParam())) {} + + SSLStreamAdapterTestDTLS(const std::string& cert_pem, + const std::string& private_key_pem) + : SSLStreamAdapterTestDTLSBase(cert_pem, private_key_pem) {} +}; + rtc::StreamResult SSLDummyStreamBase::Write(const void* data, size_t data_len, size_t* written, @@ -924,8 +983,10 @@ class SSLStreamAdapterTestDTLSCertChain : public SSLStreamAdapterTestDTLS { void SetUp() override { CreateStreams(); - client_ssl_.reset(rtc::SSLStreamAdapter::Create(client_stream_)); - server_ssl_.reset(rtc::SSLStreamAdapter::Create(server_stream_)); + client_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); + server_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); // Set up the slots client_ssl_->SignalEvent.connect( @@ -935,14 +996,15 @@ class SSLStreamAdapterTestDTLSCertChain : public SSLStreamAdapterTestDTLS { reinterpret_cast(this), &SSLStreamAdapterTestBase::OnEvent); + std::unique_ptr client_identity; if (!client_cert_pem_.empty() && !client_private_key_pem_.empty()) { - client_identity_ = rtc::SSLIdentity::FromPEMStrings( + client_identity = rtc::SSLIdentity::CreateFromPEMStrings( client_private_key_pem_, client_cert_pem_); } else { - client_identity_ = rtc::SSLIdentity::Generate("client", client_key_type_); + client_identity = rtc::SSLIdentity::Create("client", client_key_type_); } - client_ssl_->SetIdentity(client_identity_); + client_ssl_->SetIdentity(std::move(client_identity)); } }; @@ -960,13 +1022,13 @@ TEST_P(SSLStreamAdapterTestTLS, GetPeerCertChainWithOneCertificate) { ASSERT_NE(nullptr, cert_chain); EXPECT_EQ(1u, cert_chain->GetSize()); EXPECT_EQ(cert_chain->Get(0).ToPEMString(), - server_identity_->certificate().ToPEMString()); + server_identity()->certificate().ToPEMString()); } TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshake) { - server_identity_ = rtc::SSLIdentity::FromPEMChainStrings( + auto server_identity = rtc::SSLIdentity::CreateFromPEMChainStrings( kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kCACert); - server_ssl_->SetIdentity(server_identity_); + server_ssl_->SetIdentity(std::move(server_identity)); TestHandshake(); std::unique_ptr peer_cert_chain = client_ssl_->GetPeerSSLCertChain(); @@ -977,11 +1039,8 @@ TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshake) { } TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshakeWithCopy) { - std::unique_ptr identity( - rtc::SSLIdentity::FromPEMChainStrings(kRSA_PRIVATE_KEY_PEM, - std::string(kCERT_PEM) + kCACert)); - server_identity_ = identity->GetReference(); - server_ssl_->SetIdentity(server_identity_); + server_ssl_->SetIdentity(rtc::SSLIdentity::CreateFromPEMChainStrings( + kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kCACert)); TestHandshake(); std::unique_ptr peer_cert_chain = client_ssl_->GetPeerSSLCertChain(); @@ -992,9 +1051,8 @@ TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshakeWithCopy) { } TEST_F(SSLStreamAdapterTestDTLSCertChain, ThreeCertHandshake) { - server_identity_ = rtc::SSLIdentity::FromPEMChainStrings( - kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kIntCert1 + kCACert); - server_ssl_->SetIdentity(server_identity_); + server_ssl_->SetIdentity(rtc::SSLIdentity::CreateFromPEMChainStrings( + kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kIntCert1 + kCACert)); TestHandshake(); std::unique_ptr peer_cert_chain = client_ssl_->GetPeerSSLCertChain(); @@ -1060,7 +1118,7 @@ TEST_P(SSLStreamAdapterTestTLS, bool rv; rtc::SSLPeerCertificateDigestError err; - rv = server_identity_->certificate().ComputeDigest( + rv = server_identity()->certificate().ComputeDigest( rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); ASSERT_TRUE(rv); @@ -1078,7 +1136,7 @@ TEST_P(SSLStreamAdapterTestTLS, TestSetPeerCertificateDigestWithInvalidLength) { bool rv; rtc::SSLPeerCertificateDigestError err; - rv = server_identity_->certificate().ComputeDigest( + rv = server_identity()->certificate().ComputeDigest( rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); ASSERT_TRUE(rv); @@ -1380,9 +1438,114 @@ TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) { ASSERT_EQ(kCERT_PEM, server_peer_cert->ToPEMString()); } +// Test getting the used DTLS 1.2 ciphers. +// DTLS 1.2 enabled for client and server -> DTLS 1.2 will be used. +TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Both) { + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); + TestHandshake(); + + int client_cipher; + ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); + int server_cipher; + ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); + + ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(true)); + ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(false)); + + ASSERT_EQ(client_cipher, server_cipher); + ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( + server_cipher, ::testing::get<1>(GetParam()).type())); +} + // Test getting the used DTLS ciphers. -// DTLS 1.2 enabled for neither client nor server -> DTLS 1.0 will be used. +// DTLS 1.2 is max version for client and server. TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuite) { + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); + TestHandshake(); + + int client_cipher; + ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); + int server_cipher; + ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); + + ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(true)); + ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(false)); + + ASSERT_EQ(client_cipher, server_cipher); + ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( + server_cipher, ::testing::get<1>(GetParam()).type())); +} + +// The RSA keysizes here might look strange, why not include the RFC's size +// 2048?. The reason is test case slowness; testing two sizes to exercise +// parametrization is sufficient. +INSTANTIATE_TEST_SUITE_P( + SSLStreamAdapterTestsTLS, + SSLStreamAdapterTestTLS, + Combine(Values(rtc::KeyParams::RSA(1024, 65537), + rtc::KeyParams::RSA(1152, 65537), + rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)), + Values(rtc::KeyParams::RSA(1024, 65537), + rtc::KeyParams::RSA(1152, 65537), + rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)))); +INSTANTIATE_TEST_SUITE_P( + SSLStreamAdapterTestsDTLS, + SSLStreamAdapterTestDTLS, + Combine(Values(rtc::KeyParams::RSA(1024, 65537), + rtc::KeyParams::RSA(1152, 65537), + rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)), + Values(rtc::KeyParams::RSA(1024, 65537), + rtc::KeyParams::RSA(1152, 65537), + rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)))); + +// Tests for enabling / disabling legacy TLS protocols in DTLS. +class SSLStreamAdapterTestDTLSLegacyProtocols + : public SSLStreamAdapterTestDTLSBase { + public: + SSLStreamAdapterTestDTLSLegacyProtocols() + : SSLStreamAdapterTestDTLSBase(rtc::KeyParams::ECDSA(rtc::EC_NIST_P256), + rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)) { + } + + // Do not use the SetUp version from the parent class. + void SetUp() override {} + + // The legacy TLS protocols flag is read when the OpenSSLStreamAdapter is + // initialized, so we set the experiment while creationg client_ssl_ + // and server_ssl_. + + void ConfigureClient(std::string experiment) { + webrtc::test::ScopedFieldTrials trial(experiment); + client_stream_ = + new SSLDummyStreamDTLS(this, "c2s", &client_buffer_, &server_buffer_); + client_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); + client_ssl_->SignalEvent.connect( + static_cast(this), + &SSLStreamAdapterTestBase::OnEvent); + auto client_identity = rtc::SSLIdentity::Create("client", client_key_type_); + client_ssl_->SetIdentity(std::move(client_identity)); + } + + void ConfigureServer(std::string experiment) { + webrtc::test::ScopedFieldTrials trial(experiment); + server_stream_ = + new SSLDummyStreamDTLS(this, "s2c", &server_buffer_, &client_buffer_); + server_ssl_ = + rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); + server_ssl_->SignalEvent.connect( + static_cast(this), + &SSLStreamAdapterTestBase::OnEvent); + server_ssl_->SetIdentity( + rtc::SSLIdentity::Create("server", server_key_type_)); + } +}; + +// Test getting the used DTLS ciphers. +// DTLS 1.2 enabled for neither client nor server -> DTLS 1.0 will be used. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, TestGetSslCipherSuite) { + ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); + ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); TestHandshake(); @@ -1395,13 +1558,14 @@ TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuite) { ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(false)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( - server_cipher, ::testing::get<1>(GetParam()).type())); } // Test getting the used DTLS 1.2 ciphers. // DTLS 1.2 enabled for client and server -> DTLS 1.2 will be used. -TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Both) { +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslCipherSuiteDtls12Both) { + ConfigureClient(""); + ConfigureServer(""); SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); TestHandshake(); @@ -1414,12 +1578,13 @@ TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Both) { ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(false)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( - server_cipher, ::testing::get<1>(GetParam()).type())); } // DTLS 1.2 enabled for client only -> DTLS 1.0 will be used. -TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Client) { +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslCipherSuiteDtls12Client) { + ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); + ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_12); TestHandshake(); @@ -1432,12 +1597,13 @@ TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Client) { ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(false)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( - server_cipher, ::testing::get<1>(GetParam()).type())); } // DTLS 1.2 enabled for server only -> DTLS 1.0 will be used. -TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Server) { +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslCipherSuiteDtls12Server) { + ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); + ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_10); TestHandshake(); @@ -1450,28 +1616,86 @@ TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Server) { ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(false)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( - server_cipher, ::testing::get<1>(GetParam()).type())); } -// The RSA keysizes here might look strange, why not include the RFC's size -// 2048?. The reason is test case slowness; testing two sizes to exercise -// parametrization is sufficient. -INSTANTIATE_TEST_SUITE_P( - SSLStreamAdapterTestsTLS, - SSLStreamAdapterTestTLS, - Combine(Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)), - Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)))); -INSTANTIATE_TEST_SUITE_P( - SSLStreamAdapterTestsDTLS, - SSLStreamAdapterTestDTLS, - Combine(Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)), - Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)))); +// Client has legacy TLS versions disabled, server has DTLS 1.0 only. +// This is meant to cause a failure. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyDisabledServer10) { + ConfigureClient(""); + ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_12); + // Handshake should fail. + TestHandshake(false); +} + +// Both client and server have legacy TLS versions disabled and support +// DTLS 1.2. This should work. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyDisabledServer12) { + ConfigureClient(""); + ConfigureServer(""); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); + TestHandshake(); +} + +// Both client and server have legacy TLS versions enabled and support DTLS 1.0. +// This should work. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyEnabledClient10Server10) { + ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); + ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); + TestHandshake(); +} + +// Legacy protocols are disabled in the client, max TLS version is 1.0 +// This should be a configuration error, and handshake should fail. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyDisabledClient10Server10) { + ConfigureClient(""); + ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); + TestHandshake(false); +} + +// Both client and server have legacy TLS versions enabled and support DTLS 1.0. +// This should work. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyOverrideEnabledClient10Server10) { + rtc::SetAllowLegacyTLSProtocols(true); + ConfigureClient(""); + ConfigureServer(""); + // Remove override. + rtc::SetAllowLegacyTLSProtocols(absl::nullopt); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); + TestHandshake(); +} + +// Client has legacy TLS disabled and server has legacy TLS enabled via +// override. Handshake for DTLS 1.0 should fail. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyOverrideDisabledClient10EnabledServer10) { + rtc::SetAllowLegacyTLSProtocols(false); + ConfigureClient(""); + rtc::SetAllowLegacyTLSProtocols(true); + ConfigureServer(""); + // Remove override. + rtc::SetAllowLegacyTLSProtocols(absl::nullopt); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); + TestHandshake(false); +} + +// Client has legacy TLS enabled and server has legacy TLS disabled via +// override. Handshake for DTLS 1.0 should fail. +TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, + TestGetSslVersionLegacyOverrideEnabledClient10DisabledServer10) { + rtc::SetAllowLegacyTLSProtocols(true); + ConfigureClient(""); + rtc::SetAllowLegacyTLSProtocols(false); + ConfigureServer(""); + // Remove override. + rtc::SetAllowLegacyTLSProtocols(absl::nullopt); + SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); + TestHandshake(false); +} diff --git a/rtc_base/stream.cc b/rtc_base/stream.cc index 1b0a4d759b..ee72f8d2b8 100644 --- a/rtc_base/stream.cc +++ b/rtc_base/stream.cc @@ -24,7 +24,6 @@ namespace rtc { /////////////////////////////////////////////////////////////////////////////// // StreamInterface /////////////////////////////////////////////////////////////////////////////// -StreamInterface::~StreamInterface() {} StreamResult StreamInterface::WriteAll(const void* data, size_t data_len, @@ -44,29 +43,12 @@ StreamResult StreamInterface::WriteAll(const void* data, return result; } -void StreamInterface::PostEvent(Thread* t, int events, int err) { - t->Post(RTC_FROM_HERE, this, MSG_POST_EVENT, - new StreamEventData(events, err)); -} - -void StreamInterface::PostEvent(int events, int err) { - PostEvent(Thread::Current(), events, err); -} - bool StreamInterface::Flush() { return false; } StreamInterface::StreamInterface() {} -void StreamInterface::OnMessage(Message* msg) { - if (MSG_POST_EVENT == msg->message_id) { - StreamEventData* pe = static_cast(msg->pdata); - SignalEvent(this, pe->events, pe->error); - delete msg->pdata; - } -} - /////////////////////////////////////////////////////////////////////////////// // StreamAdapterInterface /////////////////////////////////////////////////////////////////////////////// diff --git a/rtc_base/stream.h b/rtc_base/stream.h index bfb9dc2c41..9bf11a2405 100644 --- a/rtc_base/stream.h +++ b/rtc_base/stream.h @@ -15,7 +15,6 @@ #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/message_handler.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -49,16 +48,9 @@ enum StreamResult { SR_ERROR, SR_SUCCESS, SR_BLOCK, SR_EOS }; // SE_WRITE: Data can be written, so Write is likely to not return SR_BLOCK enum StreamEvent { SE_OPEN = 1, SE_READ = 2, SE_WRITE = 4, SE_CLOSE = 8 }; -struct StreamEventData : public MessageData { - int events, error; - StreamEventData(int ev, int er) : events(ev), error(er) {} -}; - -class RTC_EXPORT StreamInterface : public MessageHandler { +class RTC_EXPORT StreamInterface { public: - enum { MSG_POST_EVENT = 0xF1F1, MSG_MAX = MSG_POST_EVENT }; - - ~StreamInterface() override; + virtual ~StreamInterface() {} virtual StreamState GetState() const = 0; @@ -97,13 +89,6 @@ class RTC_EXPORT StreamInterface : public MessageHandler { // certain events will be raised in the future. sigslot::signal3 SignalEvent; - // Like calling SignalEvent, but posts a message to the specified thread, - // which will call SignalEvent. This helps unroll the stack and prevent - // re-entrancy. - void PostEvent(Thread* t, int events, int err); - // Like the aforementioned method, but posts to the current thread. - void PostEvent(int events, int err); - // Return true if flush is successful. virtual bool Flush(); @@ -126,9 +111,6 @@ class RTC_EXPORT StreamInterface : public MessageHandler { protected: StreamInterface(); - // MessageHandler Interface - void OnMessage(Message* msg) override; - private: RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterface); }; diff --git a/rtc_base/string_utils.cc b/rtc_base/string_utils.cc index dfbb548050..1720c62d5e 100644 --- a/rtc_base/string_utils.cc +++ b/rtc_base/string_utils.cc @@ -50,10 +50,4 @@ std::string ToHex(const int i) { return std::string(buffer); } -std::string LeftPad(char padding, unsigned length, std::string s) { - if (s.length() >= length) - return s; - return std::string(length - s.length(), padding) + s; -} - } // namespace rtc diff --git a/rtc_base/string_utils.h b/rtc_base/string_utils.h index 3518702ec0..23c55cb893 100644 --- a/rtc_base/string_utils.h +++ b/rtc_base/string_utils.h @@ -88,8 +88,6 @@ std::string string_trim(const std::string& s); // TODO(jonasolsson): replace with absl::Hex when that becomes available. std::string ToHex(const int i); -std::string LeftPad(char padding, unsigned length, std::string s); - } // namespace rtc #endif // RTC_BASE_STRING_UTILS_H_ diff --git a/rtc_base/stringize_macros.h b/rtc_base/stringize_macros.h deleted file mode 100644 index aee8d14551..0000000000 --- a/rtc_base/stringize_macros.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Modified from the Chromium original: -// src/base/strings/stringize_macros.h - -// This file defines preprocessor macros for stringizing preprocessor -// symbols (or their output) and manipulating preprocessor symbols -// that define strings. - -#ifndef RTC_BASE_STRINGIZE_MACROS_H_ -#define RTC_BASE_STRINGIZE_MACROS_H_ - -// This is not very useful as it does not expand defined symbols if -// called directly. Use its counterpart without the _NO_EXPANSION -// suffix, below. -#define STRINGIZE_NO_EXPANSION(x) #x - -// Use this to quote the provided parameter, first expanding it if it -// is a preprocessor symbol. -// -// For example, if: -// #define A FOO -// #define B(x) myobj->FunctionCall(x) -// -// Then: -// STRINGIZE(A) produces "FOO" -// STRINGIZE(B(y)) produces "myobj->FunctionCall(y)" -#define STRINGIZE(x) STRINGIZE_NO_EXPANSION(x) - -#endif // RTC_BASE_STRINGIZE_MACROS_H_ diff --git a/rtc_base/stringize_macros_unittest.cc b/rtc_base/stringize_macros_unittest.cc deleted file mode 100644 index 78e6b55b2d..0000000000 --- a/rtc_base/stringize_macros_unittest.cc +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/stringize_macros.h" - -#include "test/gtest.h" - -// Macros as per documentation in header file. -#define PREPROCESSOR_UTIL_UNITTEST_A FOO -#define PREPROCESSOR_UTIL_UNITTEST_B(x) myobj->FunctionCall(x) -#define PREPROCESSOR_UTIL_UNITTEST_C "foo" - -TEST(StringizeTest, Ansi) { - EXPECT_STREQ("PREPROCESSOR_UTIL_UNITTEST_A", - STRINGIZE_NO_EXPANSION(PREPROCESSOR_UTIL_UNITTEST_A)); - EXPECT_STREQ("PREPROCESSOR_UTIL_UNITTEST_B(y)", - STRINGIZE_NO_EXPANSION(PREPROCESSOR_UTIL_UNITTEST_B(y))); - EXPECT_STREQ("PREPROCESSOR_UTIL_UNITTEST_C", - STRINGIZE_NO_EXPANSION(PREPROCESSOR_UTIL_UNITTEST_C)); - - EXPECT_STREQ("FOO", STRINGIZE(PREPROCESSOR_UTIL_UNITTEST_A)); - EXPECT_STREQ("myobj->FunctionCall(y)", - STRINGIZE(PREPROCESSOR_UTIL_UNITTEST_B(y))); - EXPECT_STREQ("\"foo\"", STRINGIZE(PREPROCESSOR_UTIL_UNITTEST_C)); -} diff --git a/rtc_base/strings/string_builder_unittest.cc b/rtc_base/strings/string_builder_unittest.cc index 84717ad1d1..99dfd86292 100644 --- a/rtc_base/strings/string_builder_unittest.cc +++ b/rtc_base/strings/string_builder_unittest.cc @@ -59,7 +59,7 @@ TEST(SimpleStringBuilder, StdString) { // off. #if (GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)) || !RTC_DCHECK_IS_ON -TEST(SimpleStringBuilder, BufferOverrunConstCharP) { +TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharP) { char sb_buf[4]; SimpleStringBuilder sb(sb_buf); const char* const msg = "This is just too much"; @@ -71,7 +71,7 @@ TEST(SimpleStringBuilder, BufferOverrunConstCharP) { #endif } -TEST(SimpleStringBuilder, BufferOverrunStdString) { +TEST(SimpleStringBuilderDeathTest, BufferOverrunStdString) { char sb_buf[4]; SimpleStringBuilder sb(sb_buf); sb << 12; @@ -84,7 +84,7 @@ TEST(SimpleStringBuilder, BufferOverrunStdString) { #endif } -TEST(SimpleStringBuilder, BufferOverrunInt) { +TEST(SimpleStringBuilderDeathTest, BufferOverrunInt) { char sb_buf[4]; SimpleStringBuilder sb(sb_buf); constexpr int num = -12345; @@ -100,7 +100,7 @@ TEST(SimpleStringBuilder, BufferOverrunInt) { #endif } -TEST(SimpleStringBuilder, BufferOverrunDouble) { +TEST(SimpleStringBuilderDeathTest, BufferOverrunDouble) { char sb_buf[5]; SimpleStringBuilder sb(sb_buf); constexpr double num = 123.456; @@ -113,7 +113,7 @@ TEST(SimpleStringBuilder, BufferOverrunDouble) { #endif } -TEST(SimpleStringBuilder, BufferOverrunConstCharPAlreadyFull) { +TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharPAlreadyFull) { char sb_buf[4]; SimpleStringBuilder sb(sb_buf); sb << 123; @@ -126,7 +126,7 @@ TEST(SimpleStringBuilder, BufferOverrunConstCharPAlreadyFull) { #endif } -TEST(SimpleStringBuilder, BufferOverrunIntAlreadyFull) { +TEST(SimpleStringBuilderDeathTest, BufferOverrunIntAlreadyFull) { char sb_buf[4]; SimpleStringBuilder sb(sb_buf); sb << "xyz"; diff --git a/rtc_base/strings/string_format.cc b/rtc_base/strings/string_format.cc new file mode 100644 index 0000000000..f92be339ce --- /dev/null +++ b/rtc_base/strings/string_format.cc @@ -0,0 +1,41 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "rtc_base/strings/string_format.h" + +#include "rtc_base/checks.h" + +namespace rtc { + +namespace { + +// This is an arbitrary limitation that can be changed if necessary, or removed +// if someone has the time and inclination to replicate the fancy logic from +// Chromium's base::StringPrinf(). +constexpr int kMaxSize = 512; + +} // namespace + +std::string StringFormat(const char* fmt, ...) { + char buffer[kMaxSize]; + va_list args; + va_start(args, fmt); + int result = vsnprintf(buffer, kMaxSize, fmt, args); + va_end(args); + RTC_DCHECK_GE(result, 0) << "ERROR: vsnprintf() failed with error " << result; + RTC_DCHECK_LT(result, kMaxSize) + << "WARNING: string was truncated from " << result << " to " + << (kMaxSize - 1) << " characters"; + return std::string(buffer); +} + +} // namespace rtc diff --git a/rtc_base/strings/string_format.h b/rtc_base/strings/string_format.h new file mode 100644 index 0000000000..13124d2925 --- /dev/null +++ b/rtc_base/strings/string_format.h @@ -0,0 +1,31 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_STRINGS_STRING_FORMAT_H_ +#define RTC_BASE_STRINGS_STRING_FORMAT_H_ + +#include + +namespace rtc { + +#if defined(__GNUC__) +#define RTC_PRINTF_FORMAT(format_param, dots_param) \ + __attribute__((format(printf, format_param, dots_param))) +#else +#define RTC_PRINTF_FORMAT(format_param, dots_param) +#endif + +// Return a C++ string given printf-like input. +// Based on base::StringPrintf() in Chrome but without its fancy dynamic memory +// allocation for any size of the input buffer. +std::string StringFormat(const char* fmt, ...) RTC_PRINTF_FORMAT(1, 2); +} // namespace rtc + +#endif // RTC_BASE_STRINGS_STRING_FORMAT_H_ diff --git a/rtc_base/strings/string_format_unittest.cc b/rtc_base/strings/string_format_unittest.cc new file mode 100644 index 0000000000..d0e8eb2d71 --- /dev/null +++ b/rtc_base/strings/string_format_unittest.cc @@ -0,0 +1,35 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/strings/string_format.h" + +#include "rtc_base/checks.h" +#include "test/gtest.h" + +namespace rtc { + +TEST(StringFormatTest, Empty) { + EXPECT_EQ("", StringFormat("%s", "")); +} + +TEST(StringFormatTest, Misc) { + EXPECT_EQ("123hello w", StringFormat("%3d%2s %1c", 123, "hello", 'w')); + EXPECT_EQ("3 = three", StringFormat("%d = %s", 1 + 2, "three")); +} + +TEST(StringFormatTest, MaxSizeShouldWork) { + const int kSrcLen = 512; + char str[kSrcLen]; + std::fill_n(str, kSrcLen, 'A'); + str[kSrcLen - 1] = 0; + EXPECT_EQ(str, StringFormat("%s", str)); +} + +} // namespace rtc diff --git a/rtc_base/swap_queue.h b/rtc_base/swap_queue.h index eb0b1fff0c..9eac49a933 100644 --- a/rtc_base/swap_queue.h +++ b/rtc_base/swap_queue.h @@ -141,7 +141,8 @@ class SwapQueue { return false; } - std::swap(*input, queue_[next_write_index_]); + using std::swap; + swap(*input, queue_[next_write_index_]); // Increment the value of num_elements_ to account for the inserted element. // Release memory ordering prevents the reads and writes to @@ -181,7 +182,8 @@ class SwapQueue { return false; } - std::swap(*output, queue_[next_read_index_]); + using std::swap; + swap(*output, queue_[next_read_index_]); // Decrement the value of num_elements_ to account for the removed element. // Release memory ordering prevents the reads and writes to diff --git a/rtc_base/swap_queue_unittest.cc b/rtc_base/swap_queue_unittest.cc index 199ac6b185..3862d850fa 100644 --- a/rtc_base/swap_queue_unittest.cc +++ b/rtc_base/swap_queue_unittest.cc @@ -135,7 +135,7 @@ TEST(SwapQueueTest, SuccessfulItemVerifyFunctor) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) { +TEST(SwapQueueDeathTest, UnsuccessfulItemVerifyFunctor) { // Queue item verifier for the test. auto minus_2_verifier = [](const int& i) { return i > -2; }; SwapQueue queue(2, minus_2_verifier); @@ -148,7 +148,7 @@ TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) { EXPECT_DEATH(result = queue.Insert(&invalid_value), ""); } -TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) { +TEST(SwapQueueDeathTest, UnSuccessfulItemVerifyInsert) { std::vector template_element(kChunkSize); SwapQueue, SwapQueueItemVerifier, &LengthVerifierFunction>> @@ -158,7 +158,7 @@ TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) { EXPECT_DEATH(result = queue.Insert(&invalid_chunk), ""); } -TEST(SwapQueueTest, UnSuccessfulItemVerifyRemove) { +TEST(SwapQueueDeathTest, UnSuccessfulItemVerifyRemove) { std::vector template_element(kChunkSize); SwapQueue, SwapQueueItemVerifier, &LengthVerifierFunction>> diff --git a/rtc_base/synchronization/BUILD.gn b/rtc_base/synchronization/BUILD.gn index 3e7b22d4f9..618e224a5d 100644 --- a/rtc_base/synchronization/BUILD.gn +++ b/rtc_base/synchronization/BUILD.gn @@ -12,21 +12,35 @@ if (is_android) { import("//build/config/android/rules.gni") } -rtc_library("rw_lock_wrapper") { - public = [ "rw_lock_wrapper.h" ] - sources = [ "rw_lock_wrapper.cc" ] - deps = [ "..:macromagic" ] - if (is_win) { - sources += [ - "rw_lock_win.cc", - "rw_lock_win.h", - ] - deps += [ "..:logging" ] - } else { - sources += [ - "rw_lock_posix.cc", - "rw_lock_posix.h", - ] +rtc_library("yield") { + sources = [ + "yield.cc", + "yield.h", + ] + deps = [] +} + +rtc_library("mutex") { + sources = [ + "mutex.cc", + "mutex.h", + "mutex_critical_section.h", + "mutex_pthread.h", + ] + if (rtc_use_absl_mutex) { + sources += [ "mutex_abseil.h" ] + } + + deps = [ + ":yield", + "..:checks", + "..:macromagic", + "..:platform_thread_types", + "../system:unused", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] + if (rtc_use_absl_mutex) { + absl_deps += [ "//third_party/abseil-cpp/absl/synchronization" ] } } @@ -36,10 +50,12 @@ rtc_library("sequence_checker") { "sequence_checker.h", ] deps = [ + ":mutex", "..:checks", "..:criticalsection", "..:macromagic", "..:platform_thread_types", + "..:stringutils", "../../api/task_queue", "../system:rtc_export", ] @@ -50,8 +66,8 @@ rtc_library("yield_policy") { "yield_policy.cc", "yield_policy.h", ] - deps = [ - "..:checks", + deps = [ "..:checks" ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:config", "//third_party/abseil-cpp/absl/base:core_headers", ] @@ -60,11 +76,30 @@ rtc_library("yield_policy") { if (rtc_include_tests) { rtc_library("synchronization_unittests") { testonly = true - sources = [ "yield_policy_unittest.cc" ] + sources = [ + "mutex_unittest.cc", + "yield_policy_unittest.cc", + ] deps = [ + ":mutex", + ":yield", ":yield_policy", + "..:checks", + "..:macromagic", + "..:rtc_base", "..:rtc_event", "../../test:test_support", + "//third_party/google_benchmark", + ] + } + + rtc_library("mutex_benchmark") { + testonly = true + sources = [ "mutex_benchmark.cc" ] + deps = [ + ":mutex", + "../system:unused", + "//third_party/google_benchmark", ] } diff --git a/rtc_base/synchronization/DEPS b/rtc_base/synchronization/DEPS new file mode 100644 index 0000000000..4ed1f2444b --- /dev/null +++ b/rtc_base/synchronization/DEPS @@ -0,0 +1,11 @@ +specific_include_rules = { + "mutex_abseil\.h": [ + "+absl/synchronization" + ], + ".*_benchmark\.cc": [ + "+benchmark", + ], + ".*_unittest\.cc": [ + "+benchmark", + ] +} diff --git a/rtc_base/synchronization/mutex.cc b/rtc_base/synchronization/mutex.cc new file mode 100644 index 0000000000..6c2d6ff7f0 --- /dev/null +++ b/rtc_base/synchronization/mutex.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/synchronization/mutex.h" + +#include "rtc_base/checks.h" +#include "rtc_base/synchronization/yield.h" + +namespace webrtc { + +#if !defined(WEBRTC_ABSL_MUTEX) +void GlobalMutex::Lock() { + while (mutex_locked_.exchange(1)) { + YieldCurrentThread(); + } +} + +void GlobalMutex::Unlock() { + int old = mutex_locked_.exchange(0); + RTC_DCHECK_EQ(old, 1) << "Unlock called without calling Lock first"; +} + +GlobalMutexLock::GlobalMutexLock(GlobalMutex* mutex) : mutex_(mutex) { + mutex_->Lock(); +} + +GlobalMutexLock::~GlobalMutexLock() { + mutex_->Unlock(); +} +#endif // #if !defined(WEBRTC_ABSL_MUTEX) + +} // namespace webrtc diff --git a/rtc_base/synchronization/mutex.h b/rtc_base/synchronization/mutex.h new file mode 100644 index 0000000000..620fe74e4a --- /dev/null +++ b/rtc_base/synchronization/mutex.h @@ -0,0 +1,108 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_H_ +#define RTC_BASE_SYNCHRONIZATION_MUTEX_H_ + +#include + +#include "absl/base/const_init.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/unused.h" +#include "rtc_base/thread_annotations.h" + +#if defined(WEBRTC_ABSL_MUTEX) +#include "rtc_base/synchronization/mutex_abseil.h" // nogncheck +#elif defined(WEBRTC_WIN) +#include "rtc_base/synchronization/mutex_critical_section.h" +#elif defined(WEBRTC_POSIX) +#include "rtc_base/synchronization/mutex_pthread.h" +#else +#error Unsupported platform. +#endif + +namespace webrtc { + +// The Mutex guarantees exclusive access and aims to follow Abseil semantics +// (i.e. non-reentrant etc). +class RTC_LOCKABLE Mutex final { + public: + Mutex() = default; + Mutex(const Mutex&) = delete; + Mutex& operator=(const Mutex&) = delete; + + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { + impl_.Lock(); + } + RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + return impl_.TryLock(); + } + void Unlock() RTC_UNLOCK_FUNCTION() { + impl_.Unlock(); + } + + private: + MutexImpl impl_; +}; + +// MutexLock, for serializing execution through a scope. +class RTC_SCOPED_LOCKABLE MutexLock final { + public: + MutexLock(const MutexLock&) = delete; + MutexLock& operator=(const MutexLock&) = delete; + + explicit MutexLock(Mutex* mutex) RTC_EXCLUSIVE_LOCK_FUNCTION(mutex) + : mutex_(mutex) { + mutex->Lock(); + } + ~MutexLock() RTC_UNLOCK_FUNCTION() { mutex_->Unlock(); } + + private: + Mutex* mutex_; +}; + +// A mutex used to protect global variables. Do NOT use for other purposes. +#if defined(WEBRTC_ABSL_MUTEX) +using GlobalMutex = absl::Mutex; +using GlobalMutexLock = absl::MutexLock; +#else +class RTC_LOCKABLE GlobalMutex final { + public: + GlobalMutex(const GlobalMutex&) = delete; + GlobalMutex& operator=(const GlobalMutex&) = delete; + + constexpr explicit GlobalMutex(absl::ConstInitType /*unused*/) + : mutex_locked_(0) {} + + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION(); + void Unlock() RTC_UNLOCK_FUNCTION(); + + private: + std::atomic mutex_locked_; // 0 means lock not taken, 1 means taken. +}; + +// GlobalMutexLock, for serializing execution through a scope. +class RTC_SCOPED_LOCKABLE GlobalMutexLock final { + public: + GlobalMutexLock(const GlobalMutexLock&) = delete; + GlobalMutexLock& operator=(const GlobalMutexLock&) = delete; + + explicit GlobalMutexLock(GlobalMutex* mutex) + RTC_EXCLUSIVE_LOCK_FUNCTION(mutex_); + ~GlobalMutexLock() RTC_UNLOCK_FUNCTION(); + + private: + GlobalMutex* mutex_; +}; +#endif // if defined(WEBRTC_ABSL_MUTEX) + +} // namespace webrtc + +#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_H_ diff --git a/rtc_base/synchronization/mutex_abseil.h b/rtc_base/synchronization/mutex_abseil.h new file mode 100644 index 0000000000..4ad1d07eef --- /dev/null +++ b/rtc_base/synchronization/mutex_abseil.h @@ -0,0 +1,37 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_ +#define RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_ + +#include "absl/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +class RTC_LOCKABLE MutexImpl final { + public: + MutexImpl() = default; + MutexImpl(const MutexImpl&) = delete; + MutexImpl& operator=(const MutexImpl&) = delete; + + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); } + RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + return mutex_.TryLock(); + } + void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); } + + private: + absl::Mutex mutex_; +}; + +} // namespace webrtc + +#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_ diff --git a/rtc_base/synchronization/mutex_benchmark.cc b/rtc_base/synchronization/mutex_benchmark.cc new file mode 100644 index 0000000000..40adca65d8 --- /dev/null +++ b/rtc_base/synchronization/mutex_benchmark.cc @@ -0,0 +1,95 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "benchmark/benchmark.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/unused.h" + +namespace webrtc { + +class PerfTestData { + public: + PerfTestData() : cache_line_barrier_1_(), cache_line_barrier_2_() { + cache_line_barrier_1_[0]++; // Avoid 'is not used'. + cache_line_barrier_2_[0]++; // Avoid 'is not used'. + } + + int AddToCounter(int add) { + MutexLock mu(&mu_); + my_counter_ += add; + return 0; + } + + private: + uint8_t cache_line_barrier_1_[64]; + Mutex mu_; + uint8_t cache_line_barrier_2_[64]; + int64_t my_counter_ = 0; +}; + +void BM_LockWithMutex(benchmark::State& state) { + static PerfTestData test_data; + for (auto s : state) { + RTC_UNUSED(s); + benchmark::DoNotOptimize(test_data.AddToCounter(2)); + } +} + +BENCHMARK(BM_LockWithMutex)->Threads(1); +BENCHMARK(BM_LockWithMutex)->Threads(2); +BENCHMARK(BM_LockWithMutex)->Threads(4); +BENCHMARK(BM_LockWithMutex)->ThreadPerCpu(); + +} // namespace webrtc + +/* + +Results: + +NB when reproducing: Remember to turn of power management features such as CPU +scaling before running! + +pthreads (Linux): +---------------------------------------------------------------------- +Run on (12 X 4500 MHz CPU s) +CPU Caches: + L1 Data 32 KiB (x6) + L1 Instruction 32 KiB (x6) + L2 Unified 1024 KiB (x6) + L3 Unified 8448 KiB (x1) +Load Average: 0.26, 0.28, 0.44 +---------------------------------------------------------------------- +Benchmark Time CPU Iterations +---------------------------------------------------------------------- +BM_LockWithMutex/threads:1 13.4 ns 13.4 ns 52192906 +BM_LockWithMutex/threads:2 44.2 ns 88.4 ns 8189944 +BM_LockWithMutex/threads:4 52.0 ns 198 ns 3743244 +BM_LockWithMutex/threads:12 84.9 ns 944 ns 733524 + +std::mutex performs like the pthread implementation (Linux). + +Abseil (Linux): +---------------------------------------------------------------------- +Run on (12 X 4500 MHz CPU s) +CPU Caches: + L1 Data 32 KiB (x6) + L1 Instruction 32 KiB (x6) + L2 Unified 1024 KiB (x6) + L3 Unified 8448 KiB (x1) +Load Average: 0.27, 0.24, 0.37 +---------------------------------------------------------------------- +Benchmark Time CPU Iterations +---------------------------------------------------------------------- +BM_LockWithMutex/threads:1 15.0 ns 15.0 ns 46550231 +BM_LockWithMutex/threads:2 91.1 ns 182 ns 4059212 +BM_LockWithMutex/threads:4 40.8 ns 131 ns 5496560 +BM_LockWithMutex/threads:12 37.0 ns 130 ns 5377668 + +*/ diff --git a/rtc_base/synchronization/mutex_critical_section.h b/rtc_base/synchronization/mutex_critical_section.h new file mode 100644 index 0000000000..d206794988 --- /dev/null +++ b/rtc_base/synchronization/mutex_critical_section.h @@ -0,0 +1,54 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_ +#define RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_ + +#if defined(WEBRTC_WIN) +// clang-format off +// clang formating would change include order. + +// Include winsock2.h before including to maintain consistency with +// win32.h. To include win32.h directly, it must be broken out into its own +// build target. +#include +#include +#include // must come after windows headers. +// clang-format on + +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +class RTC_LOCKABLE MutexImpl final { + public: + MutexImpl() { InitializeCriticalSection(&critical_section_); } + MutexImpl(const MutexImpl&) = delete; + MutexImpl& operator=(const MutexImpl&) = delete; + ~MutexImpl() { DeleteCriticalSection(&critical_section_); } + + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { + EnterCriticalSection(&critical_section_); + } + RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + return TryEnterCriticalSection(&critical_section_) != FALSE; + } + void Unlock() RTC_UNLOCK_FUNCTION() { + LeaveCriticalSection(&critical_section_); + } + + private: + CRITICAL_SECTION critical_section_; +}; + +} // namespace webrtc + +#endif // #if defined(WEBRTC_WIN) +#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_ diff --git a/rtc_base/synchronization/mutex_pthread.h b/rtc_base/synchronization/mutex_pthread.h new file mode 100644 index 0000000000..c9496e72c9 --- /dev/null +++ b/rtc_base/synchronization/mutex_pthread.h @@ -0,0 +1,53 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_ +#define RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_ + +#if defined(WEBRTC_POSIX) + +#include +#if defined(WEBRTC_MAC) +#include +#endif + +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +class RTC_LOCKABLE MutexImpl final { + public: + MutexImpl() { + pthread_mutexattr_t mutex_attribute; + pthread_mutexattr_init(&mutex_attribute); +#if defined(WEBRTC_MAC) + pthread_mutexattr_setpolicy_np(&mutex_attribute, + _PTHREAD_MUTEX_POLICY_FIRSTFIT); +#endif + pthread_mutex_init(&mutex_, &mutex_attribute); + pthread_mutexattr_destroy(&mutex_attribute); + } + MutexImpl(const MutexImpl&) = delete; + MutexImpl& operator=(const MutexImpl&) = delete; + ~MutexImpl() { pthread_mutex_destroy(&mutex_); } + + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { pthread_mutex_lock(&mutex_); } + RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + return pthread_mutex_trylock(&mutex_) == 0; + } + void Unlock() RTC_UNLOCK_FUNCTION() { pthread_mutex_unlock(&mutex_); } + + private: + pthread_mutex_t mutex_; +}; + +} // namespace webrtc +#endif // #if defined(WEBRTC_POSIX) +#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_ diff --git a/rtc_base/synchronization/mutex_unittest.cc b/rtc_base/synchronization/mutex_unittest.cc new file mode 100644 index 0000000000..b8c45d0a8c --- /dev/null +++ b/rtc_base/synchronization/mutex_unittest.cc @@ -0,0 +1,206 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/synchronization/mutex.h" + +#include +#include + +#include +#include +#include +#include +#include + +#include "benchmark/benchmark.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/location.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/yield.h" +#include "rtc_base/thread.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::rtc::Event; +using ::rtc::Message; +using ::rtc::MessageHandler; +using ::rtc::Thread; + +constexpr int kNumThreads = 16; + +template +class RTC_LOCKABLE RawMutexLocker { + public: + explicit RawMutexLocker(MutexType& mutex) : mutex_(mutex) {} + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); } + void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); } + + private: + MutexType& mutex_; +}; + +class RTC_LOCKABLE RawMutexTryLocker { + public: + explicit RawMutexTryLocker(Mutex& mutex) : mutex_(mutex) {} + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { + while (!mutex_.TryLock()) { + YieldCurrentThread(); + } + } + void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); } + + private: + Mutex& mutex_; +}; + +template +class MutexLockLocker { + public: + explicit MutexLockLocker(MutexType& mutex) : mutex_(mutex) {} + void Lock() { lock_ = std::make_unique(&mutex_); } + void Unlock() { lock_ = nullptr; } + + private: + MutexType& mutex_; + std::unique_ptr lock_; +}; + +template +class LockRunner : public rtc::MessageHandlerAutoCleanup { + public: + template + explicit LockRunner(Args... args) + : threads_active_(0), + start_event_(true, false), + done_event_(true, false), + shared_value_(0), + mutex_(args...), + locker_(mutex_) {} + + bool Run() { + // Signal all threads to start. + start_event_.Set(); + + // Wait for all threads to finish. + return done_event_.Wait(kLongTime); + } + + void SetExpectedThreadCount(int count) { threads_active_ = count; } + + int shared_value() { + int shared_value; + locker_.Lock(); + shared_value = shared_value_; + locker_.Unlock(); + return shared_value_; + } + + void OnMessage(Message* msg) override { + ASSERT_TRUE(start_event_.Wait(kLongTime)); + locker_.Lock(); + + EXPECT_EQ(0, shared_value_); + int old = shared_value_; + + // Use a loop to increase the chance of race. If the |locker_| + // implementation is faulty, it would be improbable that the error slips + // through. + for (int i = 0; i < kOperationsToRun; ++i) { + benchmark::DoNotOptimize(++shared_value_); + } + EXPECT_EQ(old + kOperationsToRun, shared_value_); + shared_value_ = 0; + + locker_.Unlock(); + if (threads_active_.fetch_sub(1) == 1) { + done_event_.Set(); + } + } + + private: + static constexpr int kLongTime = 10000; // 10 seconds + static constexpr int kOperationsToRun = 1000; + + std::atomic threads_active_; + Event start_event_; + Event done_event_; + int shared_value_; + MutexType mutex_; + MutexLocker locker_; +}; + +void StartThreads(std::vector>& threads, + MessageHandler* handler) { + for (int i = 0; i < kNumThreads; ++i) { + std::unique_ptr thread(Thread::Create()); + thread->Start(); + thread->Post(RTC_FROM_HERE, handler); + threads.push_back(std::move(thread)); + } +} + +TEST(MutexTest, ProtectsSharedResourceWithMutexAndRawMutexLocker) { + std::vector> threads; + LockRunner> runner; + StartThreads(threads, &runner); + runner.SetExpectedThreadCount(kNumThreads); + EXPECT_TRUE(runner.Run()); + EXPECT_EQ(0, runner.shared_value()); +} + +TEST(MutexTest, ProtectsSharedResourceWithMutexAndRawMutexTryLocker) { + std::vector> threads; + LockRunner runner; + StartThreads(threads, &runner); + runner.SetExpectedThreadCount(kNumThreads); + EXPECT_TRUE(runner.Run()); + EXPECT_EQ(0, runner.shared_value()); +} + +TEST(MutexTest, ProtectsSharedResourceWithMutexAndMutexLocker) { + std::vector> threads; + LockRunner> runner; + StartThreads(threads, &runner); + runner.SetExpectedThreadCount(kNumThreads); + EXPECT_TRUE(runner.Run()); + EXPECT_EQ(0, runner.shared_value()); +} + +TEST(MutexTest, ProtectsSharedResourceWithGlobalMutexAndRawMutexLocker) { + std::vector> threads; + LockRunner> runner(absl::kConstInit); + StartThreads(threads, &runner); + runner.SetExpectedThreadCount(kNumThreads); + EXPECT_TRUE(runner.Run()); + EXPECT_EQ(0, runner.shared_value()); +} + +TEST(MutexTest, ProtectsSharedResourceWithGlobalMutexAndMutexLocker) { + std::vector> threads; + LockRunner> runner( + absl::kConstInit); + StartThreads(threads, &runner); + runner.SetExpectedThreadCount(kNumThreads); + EXPECT_TRUE(runner.Run()); + EXPECT_EQ(0, runner.shared_value()); +} + +TEST(MutexTest, GlobalMutexCanHaveStaticStorageDuration) { + ABSL_CONST_INIT static GlobalMutex global_lock(absl::kConstInit); + global_lock.Lock(); + global_lock.Unlock(); +} + +} // namespace +} // namespace webrtc diff --git a/rtc_base/synchronization/rw_lock_posix.cc b/rtc_base/synchronization/rw_lock_posix.cc deleted file mode 100644 index 15ef3d706e..0000000000 --- a/rtc_base/synchronization/rw_lock_posix.cc +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/rw_lock_posix.h" - -#include - -namespace webrtc { - -RWLockPosix::RWLockPosix() : lock_() {} - -RWLockPosix::~RWLockPosix() { - pthread_rwlock_destroy(&lock_); -} - -RWLockPosix* RWLockPosix::Create() { - RWLockPosix* ret_val = new RWLockPosix(); - if (!ret_val->Init()) { - delete ret_val; - return NULL; - } - return ret_val; -} - -bool RWLockPosix::Init() { - return pthread_rwlock_init(&lock_, 0) == 0; -} - -void RWLockPosix::AcquireLockExclusive() { - pthread_rwlock_wrlock(&lock_); -} - -void RWLockPosix::ReleaseLockExclusive() { - pthread_rwlock_unlock(&lock_); -} - -void RWLockPosix::AcquireLockShared() { - pthread_rwlock_rdlock(&lock_); -} - -void RWLockPosix::ReleaseLockShared() { - pthread_rwlock_unlock(&lock_); -} - -} // namespace webrtc diff --git a/rtc_base/synchronization/rw_lock_posix.h b/rtc_base/synchronization/rw_lock_posix.h deleted file mode 100644 index a103fe7714..0000000000 --- a/rtc_base/synchronization/rw_lock_posix.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYNCHRONIZATION_RW_LOCK_POSIX_H_ -#define RTC_BASE_SYNCHRONIZATION_RW_LOCK_POSIX_H_ - -#include - -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -namespace webrtc { - -class RWLockPosix : public RWLockWrapper { - public: - static RWLockPosix* Create(); - ~RWLockPosix() override; - - void AcquireLockExclusive() override; - void ReleaseLockExclusive() override; - - void AcquireLockShared() override; - void ReleaseLockShared() override; - - private: - RWLockPosix(); - bool Init(); - - pthread_rwlock_t lock_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_SYNCHRONIZATION_RW_LOCK_POSIX_H_ diff --git a/rtc_base/synchronization/rw_lock_win.cc b/rtc_base/synchronization/rw_lock_win.cc deleted file mode 100644 index 3274c78a94..0000000000 --- a/rtc_base/synchronization/rw_lock_win.cc +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/rw_lock_win.h" - -#include "rtc_base/logging.h" - -namespace webrtc { - -RWLockWin::RWLockWin() { - InitializeSRWLock(&lock_); -} - -RWLockWin* RWLockWin::Create() { - return new RWLockWin(); -} - -void RWLockWin::AcquireLockExclusive() { - AcquireSRWLockExclusive(&lock_); -} - -void RWLockWin::ReleaseLockExclusive() { - ReleaseSRWLockExclusive(&lock_); -} - -void RWLockWin::AcquireLockShared() { - AcquireSRWLockShared(&lock_); -} - -void RWLockWin::ReleaseLockShared() { - ReleaseSRWLockShared(&lock_); -} - -} // namespace webrtc diff --git a/rtc_base/synchronization/rw_lock_win.h b/rtc_base/synchronization/rw_lock_win.h deleted file mode 100644 index 43bde1da9b..0000000000 --- a/rtc_base/synchronization/rw_lock_win.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYNCHRONIZATION_RW_LOCK_WIN_H_ -#define RTC_BASE_SYNCHRONIZATION_RW_LOCK_WIN_H_ - -#include - -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -namespace webrtc { - -class RWLockWin : public RWLockWrapper { - public: - static RWLockWin* Create(); - - void AcquireLockExclusive() override; - void ReleaseLockExclusive() override; - - void AcquireLockShared() override; - void ReleaseLockShared() override; - - private: - RWLockWin(); - - SRWLOCK lock_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_SYNCHRONIZATION_RW_LOCK_WIN_H_ diff --git a/rtc_base/synchronization/rw_lock_wrapper.cc b/rtc_base/synchronization/rw_lock_wrapper.cc deleted file mode 100644 index fb464192a3..0000000000 --- a/rtc_base/synchronization/rw_lock_wrapper.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/synchronization/rw_lock_wrapper.h" - -#if defined(_WIN32) -#include "rtc_base/synchronization/rw_lock_win.h" -#else -#include "rtc_base/synchronization/rw_lock_posix.h" -#endif - -namespace webrtc { - -RWLockWrapper* RWLockWrapper::CreateRWLock() { -#ifdef _WIN32 - return RWLockWin::Create(); -#else - return RWLockPosix::Create(); -#endif -} - -} // namespace webrtc diff --git a/rtc_base/synchronization/rw_lock_wrapper.h b/rtc_base/synchronization/rw_lock_wrapper.h deleted file mode 100644 index 39f52fca35..0000000000 --- a/rtc_base/synchronization/rw_lock_wrapper.h +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SYNCHRONIZATION_RW_LOCK_WRAPPER_H_ -#define RTC_BASE_SYNCHRONIZATION_RW_LOCK_WRAPPER_H_ - -#include "rtc_base/thread_annotations.h" - -// Note, Windows pre-Vista version of RW locks are not supported natively. For -// these OSs regular critical sections have been used to approximate RW lock -// functionality and will therefore have worse performance. - -namespace webrtc { - -class RTC_LOCKABLE RWLockWrapper { - public: - static RWLockWrapper* CreateRWLock(); - virtual ~RWLockWrapper() {} - - virtual void AcquireLockExclusive() RTC_EXCLUSIVE_LOCK_FUNCTION() = 0; - virtual void ReleaseLockExclusive() RTC_UNLOCK_FUNCTION() = 0; - - virtual void AcquireLockShared() RTC_SHARED_LOCK_FUNCTION() = 0; - virtual void ReleaseLockShared() RTC_UNLOCK_FUNCTION() = 0; -}; - -// RAII extensions of the RW lock. Prevents Acquire/Release missmatches and -// provides more compact locking syntax. -class RTC_SCOPED_LOCKABLE ReadLockScoped { - public: - explicit ReadLockScoped(RWLockWrapper& rw_lock) - RTC_SHARED_LOCK_FUNCTION(rw_lock) - : rw_lock_(rw_lock) { - rw_lock_.AcquireLockShared(); - } - - ~ReadLockScoped() RTC_UNLOCK_FUNCTION() { rw_lock_.ReleaseLockShared(); } - - private: - RWLockWrapper& rw_lock_; -}; - -class RTC_SCOPED_LOCKABLE WriteLockScoped { - public: - explicit WriteLockScoped(RWLockWrapper& rw_lock) - RTC_EXCLUSIVE_LOCK_FUNCTION(rw_lock) - : rw_lock_(rw_lock) { - rw_lock_.AcquireLockExclusive(); - } - - ~WriteLockScoped() RTC_UNLOCK_FUNCTION() { rw_lock_.ReleaseLockExclusive(); } - - private: - RWLockWrapper& rw_lock_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_SYNCHRONIZATION_RW_LOCK_WRAPPER_H_ diff --git a/rtc_base/synchronization/sequence_checker.cc b/rtc_base/synchronization/sequence_checker.cc index d64f32a616..1de26cf0fe 100644 --- a/rtc_base/synchronization/sequence_checker.cc +++ b/rtc_base/synchronization/sequence_checker.cc @@ -13,6 +13,8 @@ #include #endif +#include "rtc_base/strings/string_builder.h" + namespace webrtc { namespace { // On Mac, returns the label of the current dispatch queue; elsewhere, return @@ -24,8 +26,16 @@ const void* GetSystemQueueRef() { return nullptr; #endif } + } // namespace +std::string ExpectationToString(const webrtc::SequenceChecker* checker) { +#if RTC_DCHECK_IS_ON + return checker->ExpectationToString(); +#endif + return std::string(); +} + SequenceCheckerImpl::SequenceCheckerImpl() : attached_(true), valid_thread_(rtc::CurrentThreadRef()), @@ -38,7 +48,7 @@ bool SequenceCheckerImpl::IsCurrent() const { const TaskQueueBase* const current_queue = TaskQueueBase::Current(); const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef(); const void* const current_system_queue = GetSystemQueueRef(); - rtc::CritScope scoped_lock(&lock_); + MutexLock scoped_lock(&lock_); if (!attached_) { // Previously detached. attached_ = true; valid_thread_ = current_thread; @@ -56,10 +66,47 @@ bool SequenceCheckerImpl::IsCurrent() const { } void SequenceCheckerImpl::Detach() { - rtc::CritScope scoped_lock(&lock_); + MutexLock scoped_lock(&lock_); attached_ = false; // We don't need to touch the other members here, they will be // reset on the next call to IsCurrent(). } +#if RTC_DCHECK_IS_ON +std::string SequenceCheckerImpl::ExpectationToString() const { + const TaskQueueBase* const current_queue = TaskQueueBase::Current(); + const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef(); + const void* const current_system_queue = GetSystemQueueRef(); + MutexLock scoped_lock(&lock_); + if (!attached_) + return "Checker currently not attached."; + + // The format of the string is meant to compliment the one we have inside of + // FatalLog() (checks.cc). Example: + // + // # Expected: TQ: 0x0 SysQ: 0x7fff69541330 Thread: 0x11dcf6dc0 + // # Actual: TQ: 0x7fa8f0604190 SysQ: 0x7fa8f0604a30 Thread: 0x700006f1a000 + // TaskQueue doesn't match + + rtc::StringBuilder message; + message.AppendFormat( + "# Expected: TQ: %p SysQ: %p Thread: %p\n" + "# Actual: TQ: %p SysQ: %p Thread: %p\n", + valid_queue_, valid_system_queue_, + reinterpret_cast(valid_thread_), current_queue, + current_system_queue, reinterpret_cast(current_thread)); + + if ((valid_queue_ || current_queue) && valid_queue_ != current_queue) { + message << "TaskQueue doesn't match\n"; + } else if (valid_system_queue_ && + valid_system_queue_ != current_system_queue) { + message << "System queue doesn't match\n"; + } else if (!rtc::IsThreadRefEqual(valid_thread_, current_thread)) { + message << "Threads don't match\n"; + } + + return message.Release(); +} +#endif // RTC_DCHECK_IS_ON + } // namespace webrtc diff --git a/rtc_base/synchronization/sequence_checker.h b/rtc_base/synchronization/sequence_checker.h index fe644fa14e..ecf8490cec 100644 --- a/rtc_base/synchronization/sequence_checker.h +++ b/rtc_base/synchronization/sequence_checker.h @@ -10,9 +10,11 @@ #ifndef RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_ #define RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_ +#include + #include "api/task_queue/task_queue_base.h" -#include "rtc_base/critical_section.h" #include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -34,8 +36,13 @@ class RTC_EXPORT SequenceCheckerImpl { // used exclusively on another thread. void Detach(); + // Returns a string that is formatted to match with the error string printed + // by RTC_CHECK() when a condition is not met. + // This is used in conjunction with the RTC_DCHECK_RUN_ON() macro. + std::string ExpectationToString() const; + private: - rtc::CriticalSection lock_; + mutable Mutex lock_; // These are mutable so that IsCurrent can set them. mutable bool attached_ RTC_GUARDED_BY(lock_); mutable rtc::PlatformThreadRef valid_thread_ RTC_GUARDED_BY(lock_); @@ -162,8 +169,19 @@ class RTC_SCOPED_LOCKABLE SequenceCheckerScope { #define RTC_RUN_ON(x) \ RTC_THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(x)) +namespace webrtc { +std::string ExpectationToString(const webrtc::SequenceChecker* checker); + +// Catch-all implementation for types other than explicitly supported above. +template +std::string ExpectationToString(const ThreadLikeObject*) { + return std::string(); +} + +} // namespace webrtc + #define RTC_DCHECK_RUN_ON(x) \ webrtc::webrtc_seq_check_impl::SequenceCheckerScope seq_check_scope(x); \ - RTC_DCHECK((x)->IsCurrent()) + RTC_DCHECK((x)->IsCurrent()) << webrtc::ExpectationToString(x) #endif // RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_ diff --git a/rtc_base/synchronization/sequence_checker_unittest.cc b/rtc_base/synchronization/sequence_checker_unittest.cc index 1e62e9759b..6fcb522c54 100644 --- a/rtc_base/synchronization/sequence_checker_unittest.cc +++ b/rtc_base/synchronization/sequence_checker_unittest.cc @@ -31,7 +31,7 @@ class CompileTimeTestForGuardedBy { int CalledOnSequence() RTC_RUN_ON(sequence_checker_) { return guarded_; } void CallMeFromSequence() { - RTC_DCHECK_RUN_ON(&sequence_checker_) << "Should be called on sequence"; + RTC_DCHECK_RUN_ON(&sequence_checker_); guarded_ = 41; } @@ -158,7 +158,12 @@ void TestAnnotationsOnWrongQueue() { } #if RTC_DCHECK_IS_ON -TEST(SequenceCheckerTest, TestAnnotationsOnWrongQueueDebug) { +// Note: Ending the test suite name with 'DeathTest' is important as it causes +// gtest to order this test before any other non-death-tests, to avoid potential +// global process state pollution such as shared worker threads being started +// (e.g. a side effect of calling InitCocoaMultiThreading() on Mac causes one or +// two additional threads to be created). +TEST(SequenceCheckerDeathTest, TestAnnotationsOnWrongQueueDebug) { ASSERT_DEATH({ TestAnnotationsOnWrongQueue(); }, ""); } #else diff --git a/rtc_base/synchronization/yield.cc b/rtc_base/synchronization/yield.cc new file mode 100644 index 0000000000..cbb58d12ab --- /dev/null +++ b/rtc_base/synchronization/yield.cc @@ -0,0 +1,36 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/synchronization/yield.h" + +#if defined(WEBRTC_WIN) +#include +#else +#include +#include +#endif + +namespace webrtc { + +void YieldCurrentThread() { + // TODO(bugs.webrtc.org/11634): use dedicated OS functionality instead of + // sleep for yielding. +#if defined(WEBRTC_WIN) + ::Sleep(0); +#elif defined(WEBRTC_MAC) && defined(RTC_USE_NATIVE_MUTEX_ON_MAC) && \ + !RTC_USE_NATIVE_MUTEX_ON_MAC + sched_yield(); +#else + static const struct timespec ts_null = {0}; + nanosleep(&ts_null, nullptr); +#endif +} + +} // namespace webrtc diff --git a/rtc_base/synchronization/yield.h b/rtc_base/synchronization/yield.h new file mode 100644 index 0000000000..d4f5f99f37 --- /dev/null +++ b/rtc_base/synchronization/yield.h @@ -0,0 +1,20 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_BASE_SYNCHRONIZATION_YIELD_H_ +#define RTC_BASE_SYNCHRONIZATION_YIELD_H_ + +namespace webrtc { + +// Request rescheduling of threads. +void YieldCurrentThread(); + +} // namespace webrtc + +#endif // RTC_BASE_SYNCHRONIZATION_YIELD_H_ diff --git a/rtc_base/synchronization/yield_policy_unittest.cc b/rtc_base/synchronization/yield_policy_unittest.cc index e0c622510a..0bf38f4537 100644 --- a/rtc_base/synchronization/yield_policy_unittest.cc +++ b/rtc_base/synchronization/yield_policy_unittest.cc @@ -20,7 +20,7 @@ namespace rtc { namespace { class MockYieldHandler : public YieldInterface { public: - MOCK_METHOD0(YieldExecution, void()); + MOCK_METHOD(void, YieldExecution, (), (override)); }; } // namespace TEST(YieldPolicyTest, HandlerReceivesYieldSignalWhenSet) { diff --git a/rtc_base/system/BUILD.gn b/rtc_base/system/BUILD.gn index 937fec11e2..bf8cf94e3a 100644 --- a/rtc_base/system/BUILD.gn +++ b/rtc_base/system/BUILD.gn @@ -44,6 +44,10 @@ rtc_source_set("unused") { sources = [ "unused.h" ] } +rtc_source_set("assume") { + sources = [ "assume.h" ] +} + rtc_source_set("rtc_export") { sources = [ "rtc_export.h", @@ -58,19 +62,28 @@ if (is_mac || is_ios) { "cocoa_threading.mm", ] deps = [ "..:checks" ] - libs = [ "Foundation.framework" ] + frameworks = [ "Foundation.framework" ] + } + + rtc_library("gcd_helpers") { + sources = [ + "gcd_helpers.h", + "gcd_helpers.m", + ] + include_dirs = [ "../.." ] } } rtc_source_set("thread_registry") { sources = [ "thread_registry.h" ] - deps = [ "..:rtc_base_approved" ] + deps = [ + "..:rtc_base_approved", + "../synchronization:mutex", + ] if (is_android && !build_with_chromium) { sources += [ "thread_registry.cc" ] - deps += [ - "../../sdk/android:native_api_stacktrace", - "//third_party/abseil-cpp/absl/base:core_headers", - ] + deps += [ "../../sdk/android:native_api_stacktrace" ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } } diff --git a/rtc_base/system/assume.h b/rtc_base/system/assume.h new file mode 100644 index 0000000000..231c9e18ad --- /dev/null +++ b/rtc_base/system/assume.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYSTEM_ASSUME_H_ +#define RTC_BASE_SYSTEM_ASSUME_H_ + +// Possibly evaluate `p`, promising the compiler that the result is true; the +// compiler is allowed (but not required) to use this information when +// optimizing the code. USE WITH CAUTION! If you promise the compiler things +// that aren't true, it will build a broken binary for you. +// +// As a simple example, the compiler is allowed to transform this +// +// RTC_ASSUME(x == 4); +// return x; +// +// into this +// +// return 4; +// +// It is even allowed to propagate the assumption "backwards in time", if it can +// prove that it must have held at some earlier time. For example, the compiler +// is allowed to transform this +// +// int Add(int x, int y) { +// if (x == 17) +// y += 1; +// RTC_ASSUME(x != 17); +// return x + y; +// } +// +// into this +// +// int Add(int x, int y) { +// return x + y; +// } +// +// since if `x` isn't 17 on the third line of the function body, the test of `x +// == 17` on the first line must fail since nothing can modify the local +// variable `x` in between. +// +// The intended use is to allow the compiler to optimize better. For example, +// here we allow the compiler to omit an instruction that ensures correct +// rounding of negative arguments: +// +// int DivBy2(int x) { +// RTC_ASSUME(x >= 0); +// return x / 2; +// } +// +// and here we allow the compiler to possibly omit a null check: +// +// void Delete(int* p) { +// RTC_ASSUME(p != nullptr); +// delete p; +// } +// +// clang-format off +#if defined(__GNUC__) +#define RTC_ASSUME(p) do { if (!(p)) __builtin_unreachable(); } while (0) +#else +#define RTC_ASSUME(p) do {} while (0) +#endif +// clang-format on + +#endif // RTC_BASE_SYSTEM_ASSUME_H_ diff --git a/rtc_base/system/file_wrapper.cc b/rtc_base/system/file_wrapper.cc index 5409d74ef6..2828790e09 100644 --- a/rtc_base/system/file_wrapper.cc +++ b/rtc_base/system/file_wrapper.cc @@ -118,4 +118,10 @@ bool FileWrapper::Close() { return success; } +FILE* FileWrapper::Release() { + FILE* file = file_; + file_ = nullptr; + return file; +} + } // namespace webrtc diff --git a/rtc_base/system/file_wrapper.h b/rtc_base/system/file_wrapper.h index 63d1c17c11..42c463cb15 100644 --- a/rtc_base/system/file_wrapper.h +++ b/rtc_base/system/file_wrapper.h @@ -14,7 +14,7 @@ #include #include -#include "rtc_base/critical_section.h" +#include // Implementation that can read (exclusive) or write from/to a file. @@ -66,6 +66,12 @@ class FileWrapper final { // Calling Close on an already closed file does nothing and returns success. bool Close(); + // Releases and returns the wrapped file without closing it. This call passes + // the ownership of the file to the caller, and the wrapper is no longer + // responsible for closing it. Similarly the previously wrapped file is no + // longer available for the wrapper to use in any aspect. + FILE* Release(); + // Write any buffered data to the underlying file. Returns true on success, // false on write error. Note: Flushing when closing, is not required. bool Flush(); diff --git a/rtc_base/system/gcd_helpers.h b/rtc_base/system/gcd_helpers.h new file mode 100644 index 0000000000..a8df0a9d83 --- /dev/null +++ b/rtc_base/system/gcd_helpers.h @@ -0,0 +1,29 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYSTEM_GCD_HELPERS_H_ +#define RTC_BASE_SYSTEM_GCD_HELPERS_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +DISPATCH_RETURNS_RETAINED DISPATCH_WARN_RESULT DISPATCH_NOTHROW dispatch_queue_t +RTCDispatchQueueCreateWithTarget(const char* label, + dispatch_queue_attr_t attr, + dispatch_queue_t target); + +#ifdef __cplusplus +} +#endif + +#endif // RTC_BASE_SYSTEM_GCD_HELPERS_H_ diff --git a/rtc_base/system/gcd_helpers.m b/rtc_base/system/gcd_helpers.m new file mode 100644 index 0000000000..fd9a361fa1 --- /dev/null +++ b/rtc_base/system/gcd_helpers.m @@ -0,0 +1,22 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/system/gcd_helpers.h" + +dispatch_queue_t RTCDispatchQueueCreateWithTarget(const char *label, + dispatch_queue_attr_t attr, + dispatch_queue_t target) { + if (@available(iOS 10, macOS 10.12, tvOS 10, watchOS 3, *)) { + return dispatch_queue_create_with_target(label, attr, target); + } + dispatch_queue_t queue = dispatch_queue_create(label, attr); + dispatch_set_target_queue(queue, target); + return queue; +} diff --git a/rtc_base/system/thread_registry.cc b/rtc_base/system/thread_registry.cc index 86605446c7..b0e83ca1e9 100644 --- a/rtc_base/system/thread_registry.cc +++ b/rtc_base/system/thread_registry.cc @@ -14,9 +14,9 @@ #include #include "absl/base/attributes.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/mutex.h" #include "sdk/android/native_api/stacktrace/stacktrace.h" namespace webrtc { @@ -30,7 +30,7 @@ struct ThreadData { // The map of registered threads, and the lock that protects it. We create the // map on first use, and never destroy it. -ABSL_CONST_INIT rtc::GlobalLock g_thread_registry_lock; +ABSL_CONST_INIT GlobalMutex g_thread_registry_lock(absl::kConstInit); ABSL_CONST_INIT std::map* g_registered_threads = nullptr; @@ -38,7 +38,7 @@ ABSL_CONST_INIT std::map* ScopedRegisterThreadForDebugging::ScopedRegisterThreadForDebugging( rtc::Location location) { - rtc::GlobalLockScope gls(&g_thread_registry_lock); + GlobalMutexLock gls(&g_thread_registry_lock); if (g_registered_threads == nullptr) { g_registered_threads = new std::map(); @@ -49,14 +49,14 @@ ScopedRegisterThreadForDebugging::ScopedRegisterThreadForDebugging( } ScopedRegisterThreadForDebugging::~ScopedRegisterThreadForDebugging() { - rtc::GlobalLockScope gls(&g_thread_registry_lock); + GlobalMutexLock gls(&g_thread_registry_lock); RTC_DCHECK(g_registered_threads != nullptr); const int num_erased = g_registered_threads->erase(this); RTC_DCHECK_EQ(num_erased, 1); } void PrintStackTracesOfRegisteredThreads() { - rtc::GlobalLockScope gls(&g_thread_registry_lock); + GlobalMutexLock gls(&g_thread_registry_lock); if (g_registered_threads == nullptr) { return; } diff --git a/rtc_base/task_queue_for_test.h b/rtc_base/task_queue_for_test.h index 7844dc4ad9..dd5679bc99 100644 --- a/rtc_base/task_queue_for_test.h +++ b/rtc_base/task_queue_for_test.h @@ -66,6 +66,14 @@ class RTC_LOCKABLE TaskQueueForTest : public rtc::TaskQueue { void SendTask(Closure&& task, rtc::Location loc) { ::webrtc::SendTask(loc, Get(), std::forward(task)); } + + // Wait for the completion of all tasks posted prior to the + // WaitForPreviouslyPostedTasks() call. + void WaitForPreviouslyPostedTasks() { + // Post an empty task on the queue and wait for it to finish, to ensure + // that all already posted tasks on the queue get executed. + SendTask([]() {}, RTC_FROM_HERE); + } }; } // namespace webrtc diff --git a/rtc_base/task_queue_gcd.cc b/rtc_base/task_queue_gcd.cc index cb516cc4cc..2276f635c5 100644 --- a/rtc_base/task_queue_gcd.cc +++ b/rtc_base/task_queue_gcd.cc @@ -24,6 +24,7 @@ #include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/system/gcd_helpers.h" namespace webrtc { namespace { @@ -67,16 +68,16 @@ class TaskQueueGcd : public TaskQueueBase { }; TaskQueueGcd::TaskQueueGcd(absl::string_view queue_name, int gcd_priority) - : queue_(dispatch_queue_create(std::string(queue_name).c_str(), - DISPATCH_QUEUE_SERIAL)), + : queue_(RTCDispatchQueueCreateWithTarget( + std::string(queue_name).c_str(), + DISPATCH_QUEUE_SERIAL, + dispatch_get_global_queue(gcd_priority, 0))), is_active_(true) { RTC_CHECK(queue_); dispatch_set_context(queue_, this); // Assign a finalizer that will delete the queue when the last reference // is released. This may run after the TaskQueue::Delete. dispatch_set_finalizer_f(queue_, &DeleteQueue); - - dispatch_set_target_queue(queue_, dispatch_get_global_queue(gcd_priority, 0)); } TaskQueueGcd::~TaskQueueGcd() = default; diff --git a/rtc_base/task_queue_libevent.cc b/rtc_base/task_queue_libevent.cc index 349a5f21fc..38660cd5a2 100644 --- a/rtc_base/task_queue_libevent.cc +++ b/rtc_base/task_queue_libevent.cc @@ -29,11 +29,11 @@ #include "api/task_queue/task_queue_base.h" #include "base/third_party/libevent/event.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/platform_thread.h" #include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -130,7 +130,7 @@ class TaskQueueLibevent final : public TaskQueueBase { event_base* event_base_; event wakeup_event_; rtc::PlatformThread thread_; - rtc::CriticalSection pending_lock_; + Mutex pending_lock_; absl::InlinedVector, 4> pending_ RTC_GUARDED_BY(pending_lock_); // Holds a list of events pending timers for cleanup when the loop exits. @@ -216,7 +216,7 @@ void TaskQueueLibevent::Delete() { void TaskQueueLibevent::PostTask(std::unique_ptr task) { { - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); bool had_pending_tasks = !pending_.empty(); pending_.push_back(std::move(task)); @@ -282,7 +282,7 @@ void TaskQueueLibevent::OnWakeup(int socket, case kRunTasks: { absl::InlinedVector, 4> tasks; { - rtc::CritScope lock(&me->pending_lock_); + MutexLock lock(&me->pending_lock_); tasks.swap(me->pending_); } RTC_DCHECK(!tasks.empty()); diff --git a/rtc_base/task_queue_stdlib.cc b/rtc_base/task_queue_stdlib.cc index 7052f7c6db..5de634512e 100644 --- a/rtc_base/task_queue_stdlib.cc +++ b/rtc_base/task_queue_stdlib.cc @@ -22,10 +22,10 @@ #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -97,7 +97,7 @@ class TaskQueueStdlib final : public TaskQueueBase { // tasks (including delayed tasks). rtc::PlatformThread thread_; - rtc::CriticalSection pending_lock_; + Mutex pending_lock_; // Indicates if the worker thread needs to shutdown now. bool thread_should_quit_ RTC_GUARDED_BY(pending_lock_){false}; @@ -135,7 +135,7 @@ void TaskQueueStdlib::Delete() { RTC_DCHECK(!IsCurrent()); { - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); thread_should_quit_ = true; } @@ -148,7 +148,7 @@ void TaskQueueStdlib::Delete() { void TaskQueueStdlib::PostTask(std::unique_ptr task) { { - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); OrderId order = thread_posting_order_++; pending_queue_.push(std::pair>( @@ -166,7 +166,7 @@ void TaskQueueStdlib::PostDelayedTask(std::unique_ptr task, delay.next_fire_at_ms_ = fire_at; { - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); delay.order_ = ++thread_posting_order_; delayed_queue_[delay] = std::move(task); } @@ -179,7 +179,7 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { auto tick = rtc::TimeMillis(); - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); if (thread_should_quit_) { result.final_task_ = true; diff --git a/rtc_base/task_queue_win.cc b/rtc_base/task_queue_win.cc index 8c11b8764a..5eb3776cea 100644 --- a/rtc_base/task_queue_win.cc +++ b/rtc_base/task_queue_win.cc @@ -33,12 +33,12 @@ #include "api/task_queue/task_queue_base.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace { @@ -205,7 +205,7 @@ class TaskQueueWin : public TaskQueueBase { timer_tasks_; UINT_PTR timer_id_ = 0; WorkerThread thread_; - rtc::CriticalSection pending_lock_; + Mutex pending_lock_; std::queue> pending_ RTC_GUARDED_BY(pending_lock_); HANDLE in_queue_; @@ -235,7 +235,7 @@ void TaskQueueWin::Delete() { } void TaskQueueWin::PostTask(std::unique_ptr task) { - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); pending_.push(std::move(task)); ::SetEvent(in_queue_); } @@ -262,7 +262,7 @@ void TaskQueueWin::RunPendingTasks() { while (true) { std::unique_ptr task; { - rtc::CritScope lock(&pending_lock_); + MutexLock lock(&pending_lock_); if (pending_.empty()) break; task = std::move(pending_.front()); diff --git a/rtc_base/task_utils/BUILD.gn b/rtc_base/task_utils/BUILD.gn index 2e7d53ceb2..54f9a048f0 100644 --- a/rtc_base/task_utils/BUILD.gn +++ b/rtc_base/task_utils/BUILD.gn @@ -21,17 +21,48 @@ rtc_library("repeating_task") { "../../api/task_queue", "../../api/units:time_delta", "../../api/units:timestamp", + "../../system_wrappers:system_wrappers", + "../synchronization:sequence_checker", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +} + +rtc_library("pending_task_safety_flag") { + sources = [ + "pending_task_safety_flag.cc", + "pending_task_safety_flag.h", + ] + deps = [ + "..:checks", + "..:refcount", + "..:thread_checker", + "../../api:scoped_refptr", "../synchronization:sequence_checker", - "//third_party/abseil-cpp/absl/memory", ] } rtc_source_set("to_queued_task") { sources = [ "to_queued_task.h" ] - deps = [ "../../api/task_queue" ] + deps = [ + ":pending_task_safety_flag", + "../../api/task_queue", + ] } if (rtc_include_tests) { + rtc_library("pending_task_safety_flag_unittests") { + testonly = true + sources = [ "pending_task_safety_flag_unittest.cc" ] + deps = [ + ":pending_task_safety_flag", + ":to_queued_task", + "..:rtc_base_approved", + "..:rtc_task_queue", + "..:task_queue_for_test", + "../../test:test_support", + ] + } + rtc_library("repeating_task_unittests") { testonly = true sources = [ "repeating_task_unittest.cc" ] @@ -51,7 +82,7 @@ if (rtc_include_tests) { ":to_queued_task", "../../api/task_queue", "../../test:test_support", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } } diff --git a/rtc_base/task_utils/pending_task_safety_flag.cc b/rtc_base/task_utils/pending_task_safety_flag.cc new file mode 100644 index 0000000000..4be2131f3f --- /dev/null +++ b/rtc_base/task_utils/pending_task_safety_flag.cc @@ -0,0 +1,32 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/task_utils/pending_task_safety_flag.h" + +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +// static +rtc::scoped_refptr PendingTaskSafetyFlag::Create() { + return new rtc::RefCountedObject(); +} + +void PendingTaskSafetyFlag::SetNotAlive() { + RTC_DCHECK_RUN_ON(&main_sequence_); + alive_ = false; +} + +bool PendingTaskSafetyFlag::alive() const { + RTC_DCHECK_RUN_ON(&main_sequence_); + return alive_; +} + +} // namespace webrtc diff --git a/rtc_base/task_utils/pending_task_safety_flag.h b/rtc_base/task_utils/pending_task_safety_flag.h new file mode 100644 index 0000000000..580fb3f912 --- /dev/null +++ b/rtc_base/task_utils/pending_task_safety_flag.h @@ -0,0 +1,85 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ +#define RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ + +#include "api/scoped_refptr.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +// Use this flag to drop pending tasks that have been posted to the "main" +// thread/TQ and end up running after the owning instance has been +// deleted. The owning instance signals deletion by calling SetNotAlive() from +// its destructor. +// +// When posting a task, post a copy (capture by-value in a lambda) of the flag +// instance and before performing the work, check the |alive()| state. Abort if +// alive() returns |false|: +// +// // Running outside of the main thread. +// my_task_queue_->PostTask(ToQueuedTask( +// [safety = pending_task_safety_flag_, this]() { +// // Now running on the main thread. +// if (!safety->alive()) +// return; +// MyMethod(); +// })); +// +// Or implicitly by letting ToQueuedTask do the checking: +// +// // Running outside of the main thread. +// my_task_queue_->PostTask(ToQueuedTask(pending_task_safety_flag_, +// [this]() { MyMethod(); })); +// +// Note that checking the state only works on the construction/destruction +// thread of the ReceiveStatisticsProxy instance. +class PendingTaskSafetyFlag : public rtc::RefCountInterface { + public: + static rtc::scoped_refptr Create(); + + ~PendingTaskSafetyFlag() = default; + + void SetNotAlive(); + bool alive() const; + + protected: + PendingTaskSafetyFlag() = default; + + private: + bool alive_ = true; + SequenceChecker main_sequence_; +}; + +// Makes using PendingTaskSafetyFlag very simple. Automatic PTSF creation +// and signalling of destruction when the ScopedTaskSafety instance goes out +// of scope. +// Should be used by the class that wants tasks dropped after destruction. +// Requirements are that the instance be constructed and destructed on +// the same thread as the potentially dropped tasks would be running on. +class ScopedTaskSafety { + public: + ScopedTaskSafety() = default; + ~ScopedTaskSafety() { flag_->SetNotAlive(); } + + // Returns a new reference to the safety flag. + rtc::scoped_refptr flag() const { return flag_; } + + private: + rtc::scoped_refptr flag_ = + PendingTaskSafetyFlag::Create(); +}; + +} // namespace webrtc + +#endif // RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ diff --git a/rtc_base/task_utils/pending_task_safety_flag_unittest.cc b/rtc_base/task_utils/pending_task_safety_flag_unittest.cc new file mode 100644 index 0000000000..6df2fe2ffb --- /dev/null +++ b/rtc_base/task_utils/pending_task_safety_flag_unittest.cc @@ -0,0 +1,163 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/task_utils/pending_task_safety_flag.h" + +#include + +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { +using ::testing::AtLeast; +using ::testing::Invoke; +using ::testing::MockFunction; +using ::testing::NiceMock; +using ::testing::Return; +} // namespace + +TEST(PendingTaskSafetyFlagTest, Basic) { + rtc::scoped_refptr safety_flag; + { + // Scope for the |owner| instance. + class Owner { + public: + Owner() = default; + ~Owner() { flag_->SetNotAlive(); } + + rtc::scoped_refptr flag_ = + PendingTaskSafetyFlag::Create(); + } owner; + EXPECT_TRUE(owner.flag_->alive()); + safety_flag = owner.flag_; + EXPECT_TRUE(safety_flag->alive()); + } + // |owner| now out of scope. + EXPECT_FALSE(safety_flag->alive()); +} + +TEST(PendingTaskSafetyFlagTest, BasicScoped) { + rtc::scoped_refptr safety_flag; + { + struct Owner { + ScopedTaskSafety safety; + } owner; + safety_flag = owner.safety.flag(); + EXPECT_TRUE(safety_flag->alive()); + } + // |owner| now out of scope. + EXPECT_FALSE(safety_flag->alive()); +} + +TEST(PendingTaskSafetyFlagTest, PendingTaskSuccess) { + TaskQueueForTest tq1("OwnerHere"); + TaskQueueForTest tq2("OwnerNotHere"); + + class Owner { + public: + Owner() : tq_main_(TaskQueueBase::Current()) { RTC_DCHECK(tq_main_); } + ~Owner() { + RTC_DCHECK(tq_main_->IsCurrent()); + flag_->SetNotAlive(); + } + + void DoStuff() { + RTC_DCHECK(!tq_main_->IsCurrent()); + tq_main_->PostTask(ToQueuedTask([safe = flag_, this]() { + if (!safe->alive()) + return; + stuff_done_ = true; + })); + } + + bool stuff_done() const { return stuff_done_; } + + private: + TaskQueueBase* const tq_main_; + bool stuff_done_ = false; + rtc::scoped_refptr flag_{ + PendingTaskSafetyFlag::Create()}; + }; + + std::unique_ptr owner; + tq1.SendTask( + [&owner]() { + owner.reset(new Owner()); + EXPECT_FALSE(owner->stuff_done()); + }, + RTC_FROM_HERE); + ASSERT_TRUE(owner); + tq2.SendTask([&owner]() { owner->DoStuff(); }, RTC_FROM_HERE); + tq1.SendTask( + [&owner]() { + EXPECT_TRUE(owner->stuff_done()); + owner.reset(); + }, + RTC_FROM_HERE); + ASSERT_FALSE(owner); +} + +TEST(PendingTaskSafetyFlagTest, PendingTaskDropped) { + TaskQueueForTest tq1("OwnerHere"); + TaskQueueForTest tq2("OwnerNotHere"); + + class Owner { + public: + explicit Owner(bool* stuff_done) + : tq_main_(TaskQueueBase::Current()), stuff_done_(stuff_done) { + RTC_DCHECK(tq_main_); + *stuff_done_ = false; + } + ~Owner() { + RTC_DCHECK(tq_main_->IsCurrent()); + } + + void DoStuff() { + RTC_DCHECK(!tq_main_->IsCurrent()); + tq_main_->PostTask( + ToQueuedTask(safety_, [this]() { *stuff_done_ = true; })); + } + + private: + TaskQueueBase* const tq_main_; + bool* const stuff_done_; + ScopedTaskSafety safety_; + }; + + std::unique_ptr owner; + bool stuff_done = false; + tq1.SendTask([&owner, &stuff_done]() { owner.reset(new Owner(&stuff_done)); }, + RTC_FROM_HERE); + ASSERT_TRUE(owner); + // Queue up a task on tq1 that will execute before the 'DoStuff' task + // can, and delete the |owner| before the 'stuff' task can execute. + rtc::Event blocker; + tq1.PostTask([&blocker, &owner]() { + blocker.Wait(rtc::Event::kForever); + owner.reset(); + }); + + // Queue up a DoStuff... + tq2.SendTask([&owner]() { owner->DoStuff(); }, RTC_FROM_HERE); + + ASSERT_TRUE(owner); + blocker.Set(); + + // Run an empty task on tq1 to flush all the queued tasks. + tq1.SendTask([]() {}, RTC_FROM_HERE); + ASSERT_FALSE(owner); + EXPECT_FALSE(stuff_done); +} +} // namespace webrtc diff --git a/rtc_base/task_utils/repeating_task.cc b/rtc_base/task_utils/repeating_task.cc index aeeb7c071a..574e6331f1 100644 --- a/rtc_base/task_utils/repeating_task.cc +++ b/rtc_base/task_utils/repeating_task.cc @@ -17,10 +17,13 @@ namespace webrtc { namespace webrtc_repeating_task_impl { + RepeatingTaskBase::RepeatingTaskBase(TaskQueueBase* task_queue, - TimeDelta first_delay) + TimeDelta first_delay, + Clock* clock) : task_queue_(task_queue), - next_run_time_(Timestamp::us(rtc::TimeMicros()) + first_delay) {} + clock_(clock), + next_run_time_(clock_->CurrentTime() + first_delay) {} RepeatingTaskBase::~RepeatingTaskBase() = default; @@ -38,7 +41,7 @@ bool RepeatingTaskBase::Run() { return true; RTC_DCHECK(delay.IsFinite()); - TimeDelta lost_time = Timestamp::us(rtc::TimeMicros()) - next_run_time_; + TimeDelta lost_time = clock_->CurrentTime() - next_run_time_; next_run_time_ += delay; delay -= lost_time; delay = std::max(delay, TimeDelta::Zero()); @@ -51,6 +54,7 @@ bool RepeatingTaskBase::Run() { } void RepeatingTaskBase::Stop() { + RTC_DCHECK_RUN_ON(task_queue_); RTC_DCHECK(next_run_time_.IsFinite()); next_run_time_ = Timestamp::PlusInfinity(); } @@ -75,7 +79,6 @@ RepeatingTaskHandle::RepeatingTaskHandle( void RepeatingTaskHandle::Stop() { if (repeating_task_) { - RTC_DCHECK_RUN_ON(repeating_task_->task_queue_); repeating_task_->Stop(); repeating_task_ = nullptr; } diff --git a/rtc_base/task_utils/repeating_task.h b/rtc_base/task_utils/repeating_task.h index 1545d6f757..487b7d19d4 100644 --- a/rtc_base/task_utils/repeating_task.h +++ b/rtc_base/task_utils/repeating_task.h @@ -19,8 +19,7 @@ #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/synchronization/sequence_checker.h" -#include "rtc_base/thread_checker.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -29,17 +28,20 @@ class RepeatingTaskHandle; namespace webrtc_repeating_task_impl { class RepeatingTaskBase : public QueuedTask { public: - RepeatingTaskBase(TaskQueueBase* task_queue, TimeDelta first_delay); + RepeatingTaskBase(TaskQueueBase* task_queue, + TimeDelta first_delay, + Clock* clock); ~RepeatingTaskBase() override; - virtual TimeDelta RunClosure() = 0; + + void Stop(); private: - friend class ::webrtc::RepeatingTaskHandle; + virtual TimeDelta RunClosure() = 0; bool Run() final; - void Stop() RTC_RUN_ON(task_queue_); TaskQueueBase* const task_queue_; + Clock* const clock_; // This is always finite, except for the special case where it's PlusInfinity // to signal that the task should stop. Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_); @@ -51,8 +53,9 @@ class RepeatingTaskImpl final : public RepeatingTaskBase { public: RepeatingTaskImpl(TaskQueueBase* task_queue, TimeDelta first_delay, - Closure&& closure) - : RepeatingTaskBase(task_queue, first_delay), + Closure&& closure, + Clock* clock) + : RepeatingTaskBase(task_queue, first_delay, clock), closure_(std::forward(closure)) { static_assert( std::is_same::type>::type closure_; }; @@ -92,10 +95,11 @@ class RepeatingTaskHandle { // repeated task is owned by the TaskQueue. template static RepeatingTaskHandle Start(TaskQueueBase* task_queue, - Closure&& closure) { + Closure&& closure, + Clock* clock = Clock::GetRealTimeClock()) { auto repeating_task = std::make_unique< webrtc_repeating_task_impl::RepeatingTaskImpl>( - task_queue, TimeDelta::Zero(), std::forward(closure)); + task_queue, TimeDelta::Zero(), std::forward(closure), clock); auto* repeating_task_ptr = repeating_task.get(); task_queue->PostTask(std::move(repeating_task)); return RepeatingTaskHandle(repeating_task_ptr); @@ -104,12 +108,14 @@ class RepeatingTaskHandle { // DelayedStart is equivalent to Start except that the first invocation of the // closure will be delayed by the given amount. template - static RepeatingTaskHandle DelayedStart(TaskQueueBase* task_queue, - TimeDelta first_delay, - Closure&& closure) { + static RepeatingTaskHandle DelayedStart( + TaskQueueBase* task_queue, + TimeDelta first_delay, + Closure&& closure, + Clock* clock = Clock::GetRealTimeClock()) { auto repeating_task = std::make_unique< webrtc_repeating_task_impl::RepeatingTaskImpl>( - task_queue, first_delay, std::forward(closure)); + task_queue, first_delay, std::forward(closure), clock); auto* repeating_task_ptr = repeating_task.get(); task_queue->PostDelayedTask(std::move(repeating_task), first_delay.ms()); return RepeatingTaskHandle(repeating_task_ptr); diff --git a/rtc_base/task_utils/repeating_task_unittest.cc b/rtc_base/task_utils/repeating_task_unittest.cc index 469ee316f3..2fb15d1e5a 100644 --- a/rtc_base/task_utils/repeating_task_unittest.cc +++ b/rtc_base/task_utils/repeating_task_unittest.cc @@ -30,7 +30,7 @@ using ::testing::MockFunction; using ::testing::NiceMock; using ::testing::Return; -constexpr TimeDelta kTimeout = TimeDelta::Millis<1000>(); +constexpr TimeDelta kTimeout = TimeDelta::Millis(1000); void Sleep(TimeDelta time_delta) { // Note that Chromium style guide prohibits use of and in @@ -40,8 +40,23 @@ void Sleep(TimeDelta time_delta) { class MockClosure { public: - MOCK_METHOD0(Call, TimeDelta()); - MOCK_METHOD0(Delete, void()); + MOCK_METHOD(TimeDelta, Call, ()); + MOCK_METHOD(void, Delete, ()); +}; + +class MockTaskQueue : public TaskQueueBase { + public: + MockTaskQueue() : task_queue_setter_(this) {} + + MOCK_METHOD(void, Delete, (), (override)); + MOCK_METHOD(void, PostTask, (std::unique_ptr task), (override)); + MOCK_METHOD(void, + PostDelayedTask, + (std::unique_ptr task, uint32_t milliseconds), + (override)); + + private: + CurrentTaskQueueSetter task_queue_setter_; }; class MoveOnlyClosure { @@ -63,8 +78,8 @@ class MoveOnlyClosure { } // namespace TEST(RepeatingTaskTest, TaskIsStoppedOnStop) { - const TimeDelta kShortInterval = TimeDelta::ms(50); - const TimeDelta kLongInterval = TimeDelta::ms(200); + const TimeDelta kShortInterval = TimeDelta::Millis(50); + const TimeDelta kLongInterval = TimeDelta::Millis(200); const int kShortIntervalCount = 4; const int kMargin = 1; @@ -90,10 +105,10 @@ TEST(RepeatingTaskTest, TaskIsStoppedOnStop) { TEST(RepeatingTaskTest, CompensatesForLongRunTime) { const int kTargetCount = 20; const int kTargetCountMargin = 2; - const TimeDelta kRepeatInterval = TimeDelta::ms(2); + const TimeDelta kRepeatInterval = TimeDelta::Millis(2); // Sleeping inside the task for longer than the repeat interval once, should // be compensated for by repeating the task faster to catch up. - const TimeDelta kSleepDuration = TimeDelta::ms(20); + const TimeDelta kSleepDuration = TimeDelta::Millis(20); const int kSleepAtCount = 3; std::atomic_int counter(0); @@ -115,10 +130,10 @@ TEST(RepeatingTaskTest, CompensatesForShortRunTime) { RepeatingTaskHandle::Start(task_queue.Get(), [&] { ++counter; // Sleeping for the 100 ms should be compensated. - Sleep(TimeDelta::ms(100)); - return TimeDelta::ms(300); + Sleep(TimeDelta::Millis(100)); + return TimeDelta::Millis(300); }); - Sleep(TimeDelta::ms(400)); + Sleep(TimeDelta::Millis(400)); // We expect that the task have been called twice, once directly at Start and // once after 300 ms has passed. @@ -132,7 +147,7 @@ TEST(RepeatingTaskTest, CancelDelayedTaskBeforeItRuns) { EXPECT_CALL(mock, Delete).WillOnce(Invoke([&done] { done.Set(); })); TaskQueueForTest task_queue("queue"); auto handle = RepeatingTaskHandle::DelayedStart( - task_queue.Get(), TimeDelta::ms(100), MoveOnlyClosure(&mock)); + task_queue.Get(), TimeDelta::Millis(100), MoveOnlyClosure(&mock)); task_queue.PostTask( [handle = std::move(handle)]() mutable { handle.Stop(); }); EXPECT_TRUE(done.Wait(kTimeout.ms())); @@ -141,7 +156,7 @@ TEST(RepeatingTaskTest, CancelDelayedTaskBeforeItRuns) { TEST(RepeatingTaskTest, CancelTaskAfterItRuns) { rtc::Event done; MockClosure mock; - EXPECT_CALL(mock, Call).WillOnce(Return(TimeDelta::ms(100))); + EXPECT_CALL(mock, Call).WillOnce(Return(TimeDelta::Millis(100))); EXPECT_CALL(mock, Delete).WillOnce(Invoke([&done] { done.Set(); })); TaskQueueForTest task_queue("queue"); auto handle = @@ -159,10 +174,10 @@ TEST(RepeatingTaskTest, TaskCanStopItself) { handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { ++counter; handle.Stop(); - return TimeDelta::ms(2); + return TimeDelta::Millis(2); }); }); - Sleep(TimeDelta::ms(10)); + Sleep(TimeDelta::Millis(10)); EXPECT_EQ(counter.load(), 1); } @@ -184,8 +199,8 @@ TEST(RepeatingTaskTest, StartPeriodicTask) { MockFunction closure; rtc::Event done; EXPECT_CALL(closure, Call()) - .WillOnce(Return(TimeDelta::ms(20))) - .WillOnce(Return(TimeDelta::ms(20))) + .WillOnce(Return(TimeDelta::Millis(20))) + .WillOnce(Return(TimeDelta::Millis(20))) .WillOnce(Invoke([&done] { done.Set(); return kTimeout; @@ -199,7 +214,7 @@ TEST(RepeatingTaskTest, Example) { class ObjectOnTaskQueue { public: void DoPeriodicTask() {} - TimeDelta TimeUntilNextRun() { return TimeDelta::ms(100); } + TimeDelta TimeUntilNextRun() { return TimeDelta::Millis(100); } void StartPeriodicTask(RepeatingTaskHandle* handle, TaskQueueBase* task_queue) { *handle = RepeatingTaskHandle::Start(task_queue, [this] { @@ -228,4 +243,37 @@ TEST(RepeatingTaskTest, Example) { // task queue destruction and running the desctructor closure. } +TEST(RepeatingTaskTest, ClockIntegration) { + std::unique_ptr delayed_task; + uint32_t expected_ms = 0; + SimulatedClock clock(Timestamp::Millis(0)); + + NiceMock task_queue; + ON_CALL(task_queue, PostDelayedTask) + .WillByDefault( + Invoke([&delayed_task, &expected_ms](std::unique_ptr task, + uint32_t milliseconds) { + EXPECT_EQ(milliseconds, expected_ms); + delayed_task = std::move(task); + })); + + expected_ms = 100; + RepeatingTaskHandle handle = RepeatingTaskHandle::DelayedStart( + &task_queue, TimeDelta::Millis(100), + [&clock]() { + EXPECT_EQ(Timestamp::Millis(100), clock.CurrentTime()); + // Simulate work happening for 10ms. + clock.AdvanceTimeMilliseconds(10); + return TimeDelta::Millis(100); + }, + &clock); + + clock.AdvanceTimeMilliseconds(100); + QueuedTask* task_to_run = delayed_task.release(); + expected_ms = 90; + EXPECT_FALSE(task_to_run->Run()); + EXPECT_NE(nullptr, delayed_task.get()); + handle.Stop(); +} + } // namespace webrtc diff --git a/rtc_base/task_utils/to_queued_task.h b/rtc_base/task_utils/to_queued_task.h index ab5e2c9235..07ab0ebe26 100644 --- a/rtc_base/task_utils/to_queued_task.h +++ b/rtc_base/task_utils/to_queued_task.h @@ -16,6 +16,7 @@ #include #include "api/task_queue/queued_task.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace webrtc { namespace webrtc_new_closure_impl { @@ -35,6 +36,25 @@ class ClosureTask : public QueuedTask { typename std::decay::type closure_; }; +template +class SafetyClosureTask : public QueuedTask { + public: + explicit SafetyClosureTask(rtc::scoped_refptr safety, + Closure&& closure) + : closure_(std::forward(closure)), + safety_flag_(std::move(safety)) {} + + private: + bool Run() override { + if (safety_flag_->alive()) + closure_(); + return true; + } + + typename std::decay::type closure_; + rtc::scoped_refptr safety_flag_; +}; + // Extends ClosureTask to also allow specifying cleanup code. // This is useful when using lambdas if guaranteeing cleanup, even if a task // was dropped (queue is too full), is required. @@ -60,7 +80,26 @@ std::unique_ptr ToQueuedTask(Closure&& closure) { std::forward(closure)); } -template +template +std::unique_ptr ToQueuedTask( + rtc::scoped_refptr safety, + Closure&& closure) { + return std::make_unique>( + std::move(safety), std::forward(closure)); +} + +template +std::unique_ptr ToQueuedTask(const ScopedTaskSafety& safety, + Closure&& closure) { + return ToQueuedTask(safety.flag(), std::forward(closure)); +} + +template ::type>::type, + ScopedTaskSafety>::value>::type* = nullptr> std::unique_ptr ToQueuedTask(Closure&& closure, Cleanup&& cleanup) { return std::make_unique< webrtc_new_closure_impl::ClosureTaskWithCleanup>( diff --git a/rtc_base/task_utils/to_queued_task_unittest.cc b/rtc_base/task_utils/to_queued_task_unittest.cc index 45dec773fc..261b9e891b 100644 --- a/rtc_base/task_utils/to_queued_task_unittest.cc +++ b/rtc_base/task_utils/to_queued_task_unittest.cc @@ -126,5 +126,23 @@ TEST(ToQueuedTaskTest, AcceptsMoveOnlyCleanup) { RunTask(std::move(task)); } +TEST(ToQueuedTaskTest, PendingTaskSafetyFlag) { + rtc::scoped_refptr flag = + PendingTaskSafetyFlag::Create(); + + int count = 0; + // Create two identical tasks that increment the |count|. + auto task1 = ToQueuedTask(flag, [&count]() { ++count; }); + auto task2 = ToQueuedTask(flag, [&count]() { ++count; }); + + EXPECT_EQ(0, count); + RunTask(std::move(task1)); + EXPECT_EQ(1, count); + flag->SetNotAlive(); + // Now task2 should actually not run. + RunTask(std::move(task2)); + EXPECT_EQ(1, count); +} + } // namespace } // namespace webrtc diff --git a/rtc_base/test_client.cc b/rtc_base/test_client.cc index e5aa9d7987..f23ac2aec0 100644 --- a/rtc_base/test_client.cc +++ b/rtc_base/test_client.cc @@ -75,7 +75,7 @@ std::unique_ptr TestClient::NextPacket(int timeout_ms) { int64_t end = TimeAfter(timeout_ms); while (TimeUntil(end) > 0) { { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); if (packets_.size() != 0) { break; } @@ -85,7 +85,7 @@ std::unique_ptr TestClient::NextPacket(int timeout_ms) { // Return the first packet placed in the queue. std::unique_ptr packet; - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); if (packets_.size() > 0) { packet = std::move(packets_.front()); packets_.erase(packets_.begin()); @@ -149,7 +149,7 @@ void TestClient::OnPacket(AsyncPacketSocket* socket, size_t size, const SocketAddress& remote_addr, const int64_t& packet_time_us) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); packets_.push_back( std::make_unique(remote_addr, buf, size, packet_time_us)); } diff --git a/rtc_base/test_client.h b/rtc_base/test_client.h index b45cf005bb..6989fe1d57 100644 --- a/rtc_base/test_client.h +++ b/rtc_base/test_client.h @@ -16,8 +16,8 @@ #include "rtc_base/async_udp_socket.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/fake_clock.h" +#include "rtc_base/synchronization/mutex.h" namespace rtc { @@ -105,7 +105,7 @@ class TestClient : public sigslot::has_slots<> { void AdvanceTime(int ms); ThreadProcessingFakeClock* fake_clock_ = nullptr; - CriticalSection crit_; + webrtc::Mutex mutex_; std::unique_ptr socket_; std::vector> packets_; int ready_to_send_count_ = 0; diff --git a/rtc_base/thread.cc b/rtc_base/thread.cc index 00a582cc06..32449020c5 100644 --- a/rtc_base/thread.cc +++ b/rtc_base/thread.cc @@ -31,9 +31,12 @@ #include "absl/algorithm/container.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" +#include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/null_socket_server.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -71,7 +74,7 @@ const int kSlowDispatchLoggingThreshold = 50; // 50 ms class MessageHandlerWithTask final : public MessageHandler { public: - MessageHandlerWithTask() = default; + MessageHandlerWithTask() {} void OnMessage(Message* msg) override { static_cast(msg->pdata)->Run(); @@ -86,8 +89,8 @@ class MessageHandlerWithTask final : public MessageHandler { class RTC_SCOPED_LOCKABLE MarkProcessingCritScope { public: - MarkProcessingCritScope(const CriticalSection* cs, size_t* processing) - RTC_EXCLUSIVE_LOCK_FUNCTION(cs) + MarkProcessingCritScope(const RecursiveCriticalSection* cs, + size_t* processing) RTC_EXCLUSIVE_LOCK_FUNCTION(cs) : cs_(cs), processing_(processing) { cs_->Enter(); *processing_ += 1; @@ -99,7 +102,7 @@ class RTC_SCOPED_LOCKABLE MarkProcessingCritScope { } private: - const CriticalSection* const cs_; + const RecursiveCriticalSection* const cs_; size_t* processing_; RTC_DISALLOW_COPY_AND_ASSIGN(MarkProcessingCritScope); @@ -142,9 +145,47 @@ void ThreadManager::RemoveInternal(Thread* message_queue) { if (iter != message_queues_.end()) { message_queues_.erase(iter); } +#if RTC_DCHECK_IS_ON + RemoveFromSendGraph(message_queue); +#endif + } +} + +#if RTC_DCHECK_IS_ON +void ThreadManager::RemoveFromSendGraph(Thread* thread) { + for (auto it = send_graph_.begin(); it != send_graph_.end();) { + if (it->first == thread) { + it = send_graph_.erase(it); + } else { + it->second.erase(thread); + ++it; + } } } +void ThreadManager::RegisterSendAndCheckForCycles(Thread* source, + Thread* target) { + RTC_DCHECK(source); + RTC_DCHECK(target); + + CritScope cs(&crit_); + std::deque all_targets({target}); + // We check the pre-existing who-sends-to-who graph for any path from target + // to source. This loop is guaranteed to terminate because per the send graph + // invariant, there are no cycles in the graph. + for (size_t i = 0; i < all_targets.size(); i++) { + const auto& targets = send_graph_[all_targets[i]]; + all_targets.insert(all_targets.end(), targets.begin(), targets.end()); + } + RTC_CHECK_EQ(absl::c_count(all_targets, source), 0) + << " send loop between " << source->name() << " and " << target->name(); + + // We may now insert source -> target without creating a cycle, since there + // was no path from target to source per the prior CHECK. + send_graph_[source].insert(target); +} +#endif + // static void ThreadManager::Clear(MessageHandler* handler) { return Instance()->ClearInternal(handler); @@ -260,6 +301,21 @@ void ThreadManager::SetCurrentThread(Thread* thread) { RTC_DLOG(LS_ERROR) << "SetCurrentThread: Overwriting an existing value?"; } #endif // RTC_DLOG_IS_ON + + if (thread) { + thread->EnsureIsCurrentTaskQueue(); + } else { + Thread* current = CurrentThread(); + if (current) { + // The current thread is being cleared, e.g. as a result of + // UnwrapCurrent() being called or when a thread is being stopped + // (see PreRun()). This signals that the Thread instance is being detached + // from the thread, which also means that TaskQueue::Current() must not + // return a pointer to the Thread instance. + current->ClearCurrentTaskQueue(); + } + } + SetCurrentThreadInternal(thread); } @@ -404,9 +460,6 @@ bool Thread::Get(Message* pmsg, int cmsWait, bool process_io) { int64_t msStart = TimeMillis(); int64_t msCurrent = msStart; while (true) { - // Check for sent messages - ReceiveSendsFromThread(nullptr); - // Check for posted events int64_t cmsDelayNext = kForever; bool first_pass = true; @@ -791,7 +844,6 @@ void* Thread::PreRun(void* pv) { Thread* thread = static_cast(pv); ThreadManager::Instance()->SetCurrentThread(thread); rtc::SetCurrentThreadName(thread->name_.c_str()); - CurrentTaskQueueSetter set_current_task_queue(thread); #if defined(WEBRTC_MAC) ScopedAutoReleasePool pool; #endif @@ -836,88 +888,69 @@ void Thread::Send(const Location& posted_from, msg.message_id = id; msg.pdata = pdata; if (IsCurrent()) { - phandler->OnMessage(&msg); + msg.phandler->OnMessage(&msg); return; } AssertBlockingIsAllowedOnCurrentThread(); - AutoThread thread; Thread* current_thread = Thread::Current(); - RTC_DCHECK(current_thread != nullptr); // AutoThread ensures this - - bool ready = false; - { - CritScope cs(&crit_); - _SendMessage smsg; - smsg.thread = current_thread; - smsg.msg = msg; - smsg.ready = &ready; - sendlist_.push_back(smsg); - } - - // Wait for a reply - WakeUpSocketServer(); - bool waited = false; - crit_.Enter(); - while (!ready) { - crit_.Leave(); - // We need to limit "ReceiveSends" to |this| thread to avoid an arbitrary - // thread invoking calls on the current thread. - current_thread->ReceiveSendsFromThread(this); - current_thread->socketserver()->Wait(kForever, false); - waited = true; - crit_.Enter(); +#if RTC_DCHECK_IS_ON + if (current_thread) { + RTC_DCHECK(current_thread->IsInvokeToThreadAllowed(this)); + ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread, + this); } - crit_.Leave(); - - // Our Wait loop above may have consumed some WakeUp events for this - // Thread, that weren't relevant to this Send. Losing these WakeUps can - // cause problems for some SocketServers. - // - // Concrete example: - // Win32SocketServer on thread A calls Send on thread B. While processing the - // message, thread B Posts a message to A. We consume the wakeup for that - // Post while waiting for the Send to complete, which means that when we exit - // this loop, we need to issue another WakeUp, or else the Posted message - // won't be processed in a timely manner. - - if (waited) { - current_thread->socketserver()->WakeUp(); - } -} - -void Thread::ReceiveSendsFromThread(const Thread* source) { - // Receive a sent message. Cleanup scenarios: - // - thread sending exits: We don't allow this, since thread can exit - // only via Join, so Send must complete. - // - thread receiving exits: Wakeup/set ready in Thread::Clear() - // - object target cleared: Wakeup/set ready in Thread::Clear() - _SendMessage smsg; +#endif - crit_.Enter(); - while (PopSendMessageFromThread(source, &smsg)) { - crit_.Leave(); + // Perhaps down the line we can get rid of this workaround and always require + // current_thread to be valid when Send() is called. + std::unique_ptr done_event; + if (!current_thread) + done_event.reset(new rtc::Event()); - Dispatch(&smsg.msg); + bool ready = false; + PostTask(webrtc::ToQueuedTask( + [&msg]() mutable { msg.phandler->OnMessage(&msg); }, + [this, &ready, current_thread, done = done_event.get()] { + if (current_thread) { + CritScope cs(&crit_); + ready = true; + current_thread->socketserver()->WakeUp(); + } else { + done->Set(); + } + })); + if (current_thread) { + bool waited = false; crit_.Enter(); - *smsg.ready = true; - smsg.thread->socketserver()->WakeUp(); - } - crit_.Leave(); -} + while (!ready) { + crit_.Leave(); + current_thread->socketserver()->Wait(kForever, false); + waited = true; + crit_.Enter(); + } + crit_.Leave(); -bool Thread::PopSendMessageFromThread(const Thread* source, _SendMessage* msg) { - for (auto it = sendlist_.begin(); it != sendlist_.end(); ++it) { - if (it->thread == source || source == nullptr) { - *msg = *it; - sendlist_.erase(it); - return true; + // Our Wait loop above may have consumed some WakeUp events for this + // Thread, that weren't relevant to this Send. Losing these WakeUps can + // cause problems for some SocketServers. + // + // Concrete example: + // Win32SocketServer on thread A calls Send on thread B. While processing + // the message, thread B Posts a message to A. We consume the wakeup for + // that Post while waiting for the Send to complete, which means that when + // we exit this loop, we need to issue another WakeUp, or else the Posted + // message won't be processed in a timely manner. + + if (waited) { + current_thread->socketserver()->WakeUp(); } + } else { + done_event->Wait(rtc::Event::kForever); } - return false; } void Thread::InvokeInternal(const Location& posted_from, @@ -938,6 +971,17 @@ void Thread::InvokeInternal(const Location& posted_from, Send(posted_from, &handler); } +// Called by the ThreadManager when being set as the current thread. +void Thread::EnsureIsCurrentTaskQueue() { + task_queue_registration_ = + std::make_unique(this); +} + +// Called by the ThreadManager when being set as the current thread. +void Thread::ClearCurrentTaskQueue() { + task_queue_registration_.reset(); +} + void Thread::QueuedTaskHandler::OnMessage(Message* msg) { RTC_DCHECK(msg); auto* data = static_cast*>(msg->pdata); @@ -952,6 +996,50 @@ void Thread::QueuedTaskHandler::OnMessage(Message* msg) { task.release(); } +void Thread::AllowInvokesToThread(Thread* thread) { +#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) + if (!IsCurrent()) { + PostTask(webrtc::ToQueuedTask( + [thread, this]() { AllowInvokesToThread(thread); })); + return; + } + RTC_DCHECK_RUN_ON(this); + allowed_threads_.push_back(thread); + invoke_policy_enabled_ = true; +#endif +} + +void Thread::DisallowAllInvokes() { +#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) + if (!IsCurrent()) { + PostTask(webrtc::ToQueuedTask([this]() { DisallowAllInvokes(); })); + return; + } + RTC_DCHECK_RUN_ON(this); + allowed_threads_.clear(); + invoke_policy_enabled_ = true; +#endif +} + +// Returns true if no policies added or if there is at least one policy +// that permits invocation to |target| thread. +bool Thread::IsInvokeToThreadAllowed(rtc::Thread* target) { +#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) + RTC_DCHECK_RUN_ON(this); + if (!invoke_policy_enabled_) { + return true; + } + for (const auto* thread : allowed_threads_) { + if (thread == target) { + return true; + } + } + return false; +#else + return true; +#endif +} + void Thread::PostTask(std::unique_ptr task) { // Though Post takes MessageData by raw pointer (last parameter), it still // takes it with ownership. @@ -981,26 +1069,6 @@ void Thread::Clear(MessageHandler* phandler, uint32_t id, MessageList* removed) { CritScope cs(&crit_); - - // Remove messages on sendlist_ with phandler - // Object target cleared: remove from send list, wakeup/set ready - // if sender not null. - for (auto iter = sendlist_.begin(); iter != sendlist_.end();) { - _SendMessage smsg = *iter; - if (smsg.msg.Match(phandler, id)) { - if (removed) { - removed->push_back(smsg.msg); - } else { - delete smsg.msg.pdata; - } - iter = sendlist_.erase(iter); - *smsg.ready = true; - smsg.thread->socketserver()->WakeUp(); - continue; - } - ++iter; - } - ClearInternal(phandler, id, removed); } diff --git a/rtc_base/thread.h b/rtc_base/thread.h index d08c3bd09c..ed19e98927 100644 --- a/rtc_base/thread.h +++ b/rtc_base/thread.h @@ -14,8 +14,10 @@ #include #include +#include #include #include +#include #include #include #include @@ -27,7 +29,7 @@ #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/location.h" #include "rtc_base/message_handler.h" #include "rtc_base/platform_thread_types.h" @@ -80,9 +82,6 @@ class RTC_EXPORT ThreadManager { static void Remove(Thread* message_queue); static void Clear(MessageHandler* handler); - // TODO(nisse): Delete alias, as soon as downstream code is updated. - static void ProcessAllMessageQueues() { ProcessAllMessageQueuesForTesting(); } - // For testing purposes, for use with a simulated clock. // Ensures that all message queues have processed delayed messages // up until the current point in time. @@ -112,6 +111,13 @@ class RTC_EXPORT ThreadManager { bool IsMainThread(); +#if RTC_DCHECK_IS_ON + // Registers that a Send operation is to be performed between |source| and + // |target|, while checking that this does not cause a send cycle that could + // potentially cause a deadlock. + void RegisterSendAndCheckForCycles(Thread* source, Thread* target); +#endif + private: ThreadManager(); ~ThreadManager(); @@ -121,6 +127,9 @@ class RTC_EXPORT ThreadManager { void RemoveInternal(Thread* message_queue); void ClearInternal(MessageHandler* handler); void ProcessAllMessageQueuesInternal(); +#if RTC_DCHECK_IS_ON + void RemoveFromSendGraph(Thread* thread) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); +#endif // This list contains all live Threads. std::vector message_queues_ RTC_GUARDED_BY(crit_); @@ -128,8 +137,14 @@ class RTC_EXPORT ThreadManager { // Methods that don't modify the list of message queues may be called in a // re-entrant fashion. "processing_" keeps track of the depth of re-entrant // calls. - CriticalSection crit_; + RecursiveCriticalSection crit_; size_t processing_ RTC_GUARDED_BY(crit_) = 0; +#if RTC_DCHECK_IS_ON + // Represents all thread seand actions by storing all send targets per thread. + // This is used by RegisterSendAndCheckForCycles. This graph has no cycles + // since we will trigger a CHECK failure if a cycle is introduced. + std::map> send_graph_ RTC_GUARDED_BY(crit_); +#endif #if defined(WEBRTC_POSIX) pthread_key_t key_; @@ -145,13 +160,6 @@ class RTC_EXPORT ThreadManager { RTC_DISALLOW_COPY_AND_ASSIGN(ThreadManager); }; -struct _SendMessage { - _SendMessage() {} - Thread* thread; - Message msg; - bool* ready; -}; - // WARNING! SUBCLASSES MUST CALL Stop() IN THEIR DESTRUCTORS! See ~Thread(). class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { @@ -327,6 +335,19 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { InvokeInternal(posted_from, functor); } + // Allows invoke to specified |thread|. Thread never will be dereferenced and + // will be used only for reference-based comparison, so instance can be safely + // deleted. If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing. + void AllowInvokesToThread(Thread* thread); + + // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing. + void DisallowAllInvokes(); + // Returns true if |target| was allowed by AllowInvokesToThread() or if no + // calls were made to AllowInvokesToThread and DisallowAllInvokes. Otherwise + // returns false. + // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined always returns true. + bool IsInvokeToThreadAllowed(rtc::Thread* target); + // Posts a task to invoke the functor on |this| thread asynchronously, i.e. // without blocking the thread that invoked PostTask(). Ownership of |functor| // is passed and (usually, see below) destroyed on |this| thread after it is @@ -417,13 +438,6 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // irrevocable. Must be called on this thread. void DisallowBlockingCalls() { SetAllowBlockingCalls(false); } -#ifdef WEBRTC_ANDROID - // Sets the per-thread allow-blocking-calls flag to true, sidestepping the - // invariants upheld by DisallowBlockingCalls() and - // ScopedDisallowBlockingCalls. Must be called on this thread. - void DEPRECATED_AllowBlockingCalls() { SetAllowBlockingCalls(true); } -#endif - protected: class CurrentThreadSetter : CurrentTaskQueueSetter { public: @@ -508,11 +522,12 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { friend class ScopedDisallowBlockingCalls; - CriticalSection* CritForTest() { return &crit_; } + RecursiveCriticalSection* CritForTest() { return &crit_; } private: class QueuedTaskHandler final : public MessageHandler { public: + QueuedTaskHandler() {} void OnMessage(Message* msg) override; }; @@ -537,19 +552,15 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Return true if the thread is currently running. bool IsRunning(); - // Processes received "Send" requests. If |source| is not null, only requests - // from |source| are processed, otherwise, all requests are processed. - void ReceiveSendsFromThread(const Thread* source); - - // If |source| is not null, pops the first "Send" message from |source| in - // |sendlist_|, otherwise, pops the first "Send" message of |sendlist_|. - // The caller must lock |crit_| before calling. - // Returns true if there is such a message. - bool PopSendMessageFromThread(const Thread* source, _SendMessage* msg); - void InvokeInternal(const Location& posted_from, rtc::FunctionView functor); + // Called by the ThreadManager when being set as the current thread. + void EnsureIsCurrentTaskQueue(); + + // Called by the ThreadManager when being unset as the current thread. + void ClearCurrentTaskQueue(); + // Returns a static-lifetime MessageHandler which runs message with // MessageLikeTask payload data. static MessageHandler* GetPostTaskMessageHandler(); @@ -559,7 +570,11 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { MessageList messages_ RTC_GUARDED_BY(crit_); PriorityQueue delayed_messages_ RTC_GUARDED_BY(crit_); uint32_t delayed_next_num_ RTC_GUARDED_BY(crit_); - CriticalSection crit_; +#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) + std::vector allowed_threads_ RTC_GUARDED_BY(this); + bool invoke_policy_enabled_ RTC_GUARDED_BY(this) = false; +#endif + RecursiveCriticalSection crit_; bool fInitialized_; bool fDestroyed_; @@ -570,7 +585,6 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Used if SocketServer ownership lies with |this|. std::unique_ptr own_ss_; - std::list<_SendMessage> sendlist_; std::string name_; // TODO(tommi): Add thread checks for proper use of control methods. @@ -595,6 +609,8 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // Runs webrtc::QueuedTask posted to the Thread. QueuedTaskHandler queued_task_handler_; + std::unique_ptr + task_queue_registration_; friend class ThreadManager; @@ -604,7 +620,9 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // AutoThread automatically installs itself at construction // uninstalls at destruction, if a Thread object is // _not already_ associated with the current OS thread. - +// +// NOTE: *** This class should only be used by tests *** +// class AutoThread : public Thread { public: AutoThread(); diff --git a/rtc_base/thread_unittest.cc b/rtc_base/thread_unittest.cc index 2cd21de0e8..51321985ed 100644 --- a/rtc_base/thread_unittest.cc +++ b/rtc_base/thread_unittest.cc @@ -22,11 +22,14 @@ #include "rtc_base/null_socket_server.h" #include "rtc_base/physical_socket_server.h" #include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "test/testsupport/rtc_expect_death.h" #if defined(WEBRTC_WIN) #include // NOLINT + #endif namespace rtc { @@ -93,7 +96,7 @@ class SocketClient : public TestGenerator, public sigslot::has_slots<> { }; // Receives messages and sends on a socket. -class MessageClient : public MessageHandler, public TestGenerator { +class MessageClient : public MessageHandlerAutoCleanup, public TestGenerator { public: MessageClient(Thread* pth, Socket* socket) : socket_(socket) {} @@ -160,17 +163,17 @@ class AtomicBool { public: explicit AtomicBool(bool value = false) : flag_(value) {} AtomicBool& operator=(bool value) { - CritScope scoped_lock(&cs_); + webrtc::MutexLock scoped_lock(&mutex_); flag_ = value; return *this; } bool get() const { - CritScope scoped_lock(&cs_); + webrtc::MutexLock scoped_lock(&mutex_); return flag_; } private: - CriticalSection cs_; + mutable webrtc::Mutex mutex_; bool flag_; }; @@ -287,6 +290,63 @@ TEST(ThreadTest, Wrap) { ThreadManager::Instance()->SetCurrentThread(current_thread); } +#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) +TEST(ThreadTest, InvokeToThreadAllowedReturnsTrueWithoutPolicies) { + // Create and start the thread. + auto thread1 = Thread::CreateWithSocketServer(); + auto thread2 = Thread::CreateWithSocketServer(); + + thread1->PostTask(ToQueuedTask( + [&]() { EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread2.get())); })); + Thread* th_main = Thread::Current(); + th_main->ProcessMessages(100); +} + +TEST(ThreadTest, InvokeAllowedWhenThreadsAdded) { + // Create and start the thread. + auto thread1 = Thread::CreateWithSocketServer(); + auto thread2 = Thread::CreateWithSocketServer(); + auto thread3 = Thread::CreateWithSocketServer(); + auto thread4 = Thread::CreateWithSocketServer(); + + thread1->AllowInvokesToThread(thread2.get()); + thread1->AllowInvokesToThread(thread3.get()); + + thread1->PostTask(ToQueuedTask([&]() { + EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread2.get())); + EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread3.get())); + EXPECT_FALSE(thread1->IsInvokeToThreadAllowed(thread4.get())); + })); + Thread* th_main = Thread::Current(); + th_main->ProcessMessages(100); +} + +TEST(ThreadTest, InvokesDisallowedWhenDisallowAllInvokes) { + // Create and start the thread. + auto thread1 = Thread::CreateWithSocketServer(); + auto thread2 = Thread::CreateWithSocketServer(); + + thread1->DisallowAllInvokes(); + + thread1->PostTask(ToQueuedTask([&]() { + EXPECT_FALSE(thread1->IsInvokeToThreadAllowed(thread2.get())); + })); + Thread* th_main = Thread::Current(); + th_main->ProcessMessages(100); +} +#endif // (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) + +TEST(ThreadTest, InvokesAllowedByDefault) { + // Create and start the thread. + auto thread1 = Thread::CreateWithSocketServer(); + auto thread2 = Thread::CreateWithSocketServer(); + + thread1->PostTask(ToQueuedTask( + [&]() { EXPECT_TRUE(thread1->IsInvokeToThreadAllowed(thread2.get())); })); + Thread* th_main = Thread::Current(); + th_main->ProcessMessages(100); +} + TEST(ThreadTest, Invoke) { // Create and start the thread. auto thread = Thread::CreateWithSocketServer(); @@ -307,29 +367,38 @@ TEST(ThreadTest, Invoke) { } // Verifies that two threads calling Invoke on each other at the same time does -// not deadlock. -TEST(ThreadTest, TwoThreadsInvokeNoDeadlock) { +// not deadlock but crash. +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +TEST(ThreadTest, TwoThreadsInvokeDeathTest) { + ::testing::GTEST_FLAG(death_test_style) = "threadsafe"; AutoThread thread; - Thread* current_thread = Thread::Current(); - ASSERT_TRUE(current_thread != nullptr); - + Thread* main_thread = Thread::Current(); auto other_thread = Thread::CreateWithSocketServer(); other_thread->Start(); + other_thread->Invoke(RTC_FROM_HERE, [main_thread] { + RTC_EXPECT_DEATH(main_thread->Invoke(RTC_FROM_HERE, [] {}), "loop"); + }); +} - struct LocalFuncs { - static void Set(bool* out) { *out = true; } - static void InvokeSet(Thread* thread, bool* out) { - thread->Invoke(RTC_FROM_HERE, Bind(&Set, out)); - } - }; - - bool called = false; - other_thread->Invoke( - RTC_FROM_HERE, Bind(&LocalFuncs::InvokeSet, current_thread, &called)); - - EXPECT_TRUE(called); +TEST(ThreadTest, ThreeThreadsInvokeDeathTest) { + ::testing::GTEST_FLAG(death_test_style) = "threadsafe"; + AutoThread thread; + Thread* first = Thread::Current(); + + auto second = Thread::Create(); + second->Start(); + auto third = Thread::Create(); + third->Start(); + + second->Invoke(RTC_FROM_HERE, [&] { + third->Invoke(RTC_FROM_HERE, [&] { + RTC_EXPECT_DEATH(first->Invoke(RTC_FROM_HERE, [] {}), "loop"); + }); + }); } +#endif + // Verifies that if thread A invokes a call on thread B and thread C is trying // to invoke A at the same time, thread A does not handle C's invoke while // invoking B. @@ -346,18 +415,18 @@ TEST(ThreadTest, ThreeThreadsInvoke) { explicit LockedBool(bool value) : value_(value) {} void Set(bool value) { - CritScope lock(&crit_); + webrtc::MutexLock lock(&mutex_); value_ = value; } bool Get() { - CritScope lock(&crit_); + webrtc::MutexLock lock(&mutex_); return value_; } private: - CriticalSection crit_; - bool value_ RTC_GUARDED_BY(crit_); + webrtc::Mutex mutex_; + bool value_ RTC_GUARDED_BY(mutex_); }; struct LocalFuncs { @@ -380,7 +449,6 @@ TEST(ThreadTest, ThreeThreadsInvoke) { Thread* thread1, Thread* thread2, LockedBool* out) { - CriticalSection crit; LockedBool async_invoked(false); invoker->AsyncInvoke( @@ -506,7 +574,7 @@ TEST_F(ThreadQueueTest, DisposeNotLocked) { EXPECT_FALSE(was_locked); } -class DeletedMessageHandler : public MessageHandler { +class DeletedMessageHandler : public MessageHandlerAutoCleanup { public: explicit DeletedMessageHandler(bool* deleted) : deleted_(deleted) {} ~DeletedMessageHandler() override { *deleted_ = true; } @@ -596,12 +664,13 @@ TEST(ThreadManager, ProcessAllMessageQueuesWithClearedQueue) { ThreadManager::ProcessAllMessageQueuesForTesting(); } -class RefCountedHandler : public MessageHandler, public rtc::RefCountInterface { +class RefCountedHandler : public MessageHandlerAutoCleanup, + public rtc::RefCountInterface { public: void OnMessage(Message* msg) override {} }; -class EmptyHandler : public MessageHandler { +class EmptyHandler : public MessageHandlerAutoCleanup { public: void OnMessage(Message* msg) override {} }; @@ -770,105 +839,6 @@ TEST_F(AsyncInvokeTest, FlushWithIds) { EXPECT_TRUE(flag2.get()); } -class GuardedAsyncInvokeTest : public ::testing::Test { - public: - void IntCallback(int value) { - EXPECT_EQ(expected_thread_, Thread::Current()); - int_value_ = value; - } - void SetExpectedThreadForIntCallback(Thread* thread) { - expected_thread_ = thread; - } - - protected: - const static int kWaitTimeout = 1000; - GuardedAsyncInvokeTest() : int_value_(0), expected_thread_(nullptr) {} - - int int_value_; - Thread* expected_thread_; -}; - -// Functor for creating an invoker. -struct CreateInvoker { - CreateInvoker(std::unique_ptr* invoker) - : invoker_(invoker) {} - void operator()() { invoker_->reset(new GuardedAsyncInvoker()); } - std::unique_ptr* invoker_; -}; - -// Test that we can call AsyncInvoke() after the thread died. -TEST_F(GuardedAsyncInvokeTest, KillThreadFireAndForget) { - // Create and start the thread. - std::unique_ptr thread(Thread::Create()); - thread->Start(); - std::unique_ptr invoker; - // Create the invoker on |thread|. - thread->Invoke(RTC_FROM_HERE, CreateInvoker(&invoker)); - // Kill |thread|. - thread = nullptr; - // Try calling functor. - AtomicBool called; - EXPECT_FALSE(invoker->AsyncInvoke(RTC_FROM_HERE, FunctorB(&called))); - // With thread gone, nothing should happen. - WAIT(called.get(), kWaitTimeout); - EXPECT_FALSE(called.get()); -} - -// The remaining tests check that GuardedAsyncInvoker behaves as AsyncInvoker -// when Thread is still alive. -TEST_F(GuardedAsyncInvokeTest, FireAndForget) { - GuardedAsyncInvoker invoker; - // Try calling functor. - AtomicBool called; - EXPECT_TRUE(invoker.AsyncInvoke(RTC_FROM_HERE, FunctorB(&called))); - EXPECT_TRUE_WAIT(called.get(), kWaitTimeout); -} - -TEST_F(GuardedAsyncInvokeTest, NonCopyableFunctor) { - GuardedAsyncInvoker invoker; - // Try calling functor. - AtomicBool called; - EXPECT_TRUE(invoker.AsyncInvoke(RTC_FROM_HERE, FunctorD(&called))); - EXPECT_TRUE_WAIT(called.get(), kWaitTimeout); -} - -TEST_F(GuardedAsyncInvokeTest, Flush) { - GuardedAsyncInvoker invoker; - AtomicBool flag1; - AtomicBool flag2; - // Queue two async calls to the current thread. - EXPECT_TRUE(invoker.AsyncInvoke(RTC_FROM_HERE, FunctorB(&flag1))); - EXPECT_TRUE(invoker.AsyncInvoke(RTC_FROM_HERE, FunctorB(&flag2))); - // Because we haven't pumped messages, these should not have run yet. - EXPECT_FALSE(flag1.get()); - EXPECT_FALSE(flag2.get()); - // Force them to run now. - EXPECT_TRUE(invoker.Flush()); - EXPECT_TRUE(flag1.get()); - EXPECT_TRUE(flag2.get()); -} - -TEST_F(GuardedAsyncInvokeTest, FlushWithIds) { - GuardedAsyncInvoker invoker; - AtomicBool flag1; - AtomicBool flag2; - // Queue two async calls to the current thread, one with a message id. - EXPECT_TRUE(invoker.AsyncInvoke(RTC_FROM_HERE, FunctorB(&flag1), 5)); - EXPECT_TRUE(invoker.AsyncInvoke(RTC_FROM_HERE, FunctorB(&flag2))); - // Because we haven't pumped messages, these should not have run yet. - EXPECT_FALSE(flag1.get()); - EXPECT_FALSE(flag2.get()); - // Execute pending calls with id == 5. - EXPECT_TRUE(invoker.Flush(5)); - EXPECT_TRUE(flag1.get()); - EXPECT_FALSE(flag2.get()); - flag1 = false; - // Execute all pending calls. The id == 5 call should not execute again. - EXPECT_TRUE(invoker.Flush()); - EXPECT_FALSE(flag1.get()); - EXPECT_TRUE(flag2.get()); -} - void ThreadIsCurrent(Thread* thread, bool* result, Event* event) { *result = thread->IsCurrent(); event->Set(); @@ -1134,10 +1104,22 @@ TEST(ThreadPostDelayedTaskTest, InvokesInDelayOrder) { // All tasks have been posted before the first one is unblocked. first.Set(); // Only if the chain is invoked in delay order will the last event be set. - clock.AdvanceTime(webrtc::TimeDelta::ms(11)); + clock.AdvanceTime(webrtc::TimeDelta::Millis(11)); EXPECT_TRUE(fourth.Wait(0)); } +TEST(ThreadPostDelayedTaskTest, IsCurrentTaskQueue) { + auto current_tq = webrtc::TaskQueueBase::Current(); + { + std::unique_ptr thread(rtc::Thread::Create()); + thread->WrapCurrent(); + EXPECT_EQ(webrtc::TaskQueueBase::Current(), + static_cast(thread.get())); + thread->UnwrapCurrent(); + } + EXPECT_EQ(webrtc::TaskQueueBase::Current(), current_tq); +} + class ThreadFactory : public webrtc::TaskQueueFactory { public: std::unique_ptr diff --git a/rtc_base/time/BUILD.gn b/rtc_base/time/BUILD.gn index e13ccd35ee..9a1d99b610 100644 --- a/rtc_base/time/BUILD.gn +++ b/rtc_base/time/BUILD.gn @@ -17,5 +17,4 @@ rtc_library("timestamp_extrapolator") { "timestamp_extrapolator.cc", "timestamp_extrapolator.h", ] - deps = [ "../synchronization:rw_lock_wrapper" ] } diff --git a/rtc_base/time/timestamp_extrapolator.cc b/rtc_base/time/timestamp_extrapolator.cc index bf9f726c42..99445284dc 100644 --- a/rtc_base/time/timestamp_extrapolator.cc +++ b/rtc_base/time/timestamp_extrapolator.cc @@ -15,8 +15,7 @@ namespace webrtc { TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms) - : _rwLock(RWLockWrapper::CreateRWLock()), - _startMs(0), + : _startMs(0), _firstTimestamp(0), _wrapArounds(0), _prevUnwrappedTimestamp(-1), @@ -34,12 +33,7 @@ TimestampExtrapolator::TimestampExtrapolator(int64_t start_ms) Reset(start_ms); } -TimestampExtrapolator::~TimestampExtrapolator() { - delete _rwLock; -} - void TimestampExtrapolator::Reset(int64_t start_ms) { - WriteLockScoped wl(*_rwLock); _startMs = start_ms; _prevMs = _startMs; _firstTimestamp = 0; @@ -58,13 +52,10 @@ void TimestampExtrapolator::Reset(int64_t start_ms) { } void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { - _rwLock->AcquireLockExclusive(); if (tMs - _prevMs > 10e3) { // Ten seconds without a complete frame. // Reset the extrapolator - _rwLock->ReleaseLockExclusive(); Reset(tMs); - _rwLock->AcquireLockExclusive(); } else { _prevMs = tMs; } @@ -100,7 +91,6 @@ void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { if (_prevUnwrappedTimestamp >= 0 && unwrapped_ts90khz < _prevUnwrappedTimestamp) { // Drop reordered frames. - _rwLock->ReleaseLockExclusive(); return; } @@ -131,11 +121,9 @@ void TimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz) { if (_packetCount < _startUpFilterDelayInPackets) { _packetCount++; } - _rwLock->ReleaseLockExclusive(); } int64_t TimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz) { - ReadLockScoped rl(*_rwLock); int64_t localTimeMs = 0; CheckForWrapArounds(timestamp90khz); double unwrapped_ts90khz = diff --git a/rtc_base/time/timestamp_extrapolator.h b/rtc_base/time/timestamp_extrapolator.h index 63af57b227..b325d2cbaa 100644 --- a/rtc_base/time/timestamp_extrapolator.h +++ b/rtc_base/time/timestamp_extrapolator.h @@ -13,14 +13,12 @@ #include -#include "rtc_base/synchronization/rw_lock_wrapper.h" - namespace webrtc { +// Not thread safe. class TimestampExtrapolator { public: explicit TimestampExtrapolator(int64_t start_ms); - ~TimestampExtrapolator(); void Update(int64_t tMs, uint32_t ts90khz); int64_t ExtrapolateLocalTime(uint32_t timestamp90khz); void Reset(int64_t start_ms); @@ -28,7 +26,6 @@ class TimestampExtrapolator { private: void CheckForWrapArounds(uint32_t ts90khz); bool DelayChangeDetection(double error); - RWLockWrapper* _rwLock; double _w[2]; double _pP[2][2]; int64_t _startMs; diff --git a/rtc_base/time_utils.cc b/rtc_base/time_utils.cc index 8d919262d3..11c9d5a47f 100644 --- a/rtc_base/time_utils.cc +++ b/rtc_base/time_utils.cc @@ -247,7 +247,7 @@ int64_t TimestampWrapAroundHandler::Unwrap(uint32_t ts) { ++num_wrap_; } else if ((ts - last_ts_) > 0xf0000000) { // Backwards wrap. Unwrap with last wrap count and don't update last_ts_. - return ts + ((num_wrap_ - 1) << 32); + return ts + (num_wrap_ - 1) * (int64_t{1} << 32); } last_ts_ = ts; diff --git a/rtc_base/time_utils_unittest.cc b/rtc_base/time_utils_unittest.cc index 824c2c056b..2663714b7a 100644 --- a/rtc_base/time_utils_unittest.cc +++ b/rtc_base/time_utils_unittest.cc @@ -218,7 +218,7 @@ TEST(FakeClock, TimeFunctionsUseFakeClock) { FakeClock clock; SetClockForTesting(&clock); - clock.SetTime(webrtc::Timestamp::us(987654)); + clock.SetTime(webrtc::Timestamp::Micros(987654)); EXPECT_EQ(987u, Time32()); EXPECT_EQ(987, TimeMillis()); EXPECT_EQ(987654, TimeMicros()); @@ -237,21 +237,21 @@ TEST(FakeClock, InitialTime) { TEST(FakeClock, SetTime) { FakeClock clock; - clock.SetTime(webrtc::Timestamp::us(123)); + clock.SetTime(webrtc::Timestamp::Micros(123)); EXPECT_EQ(123000, clock.TimeNanos()); - clock.SetTime(webrtc::Timestamp::us(456)); + clock.SetTime(webrtc::Timestamp::Micros(456)); EXPECT_EQ(456000, clock.TimeNanos()); } TEST(FakeClock, AdvanceTime) { FakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::us(1u)); + clock.AdvanceTime(webrtc::TimeDelta::Micros(1u)); EXPECT_EQ(1000, clock.TimeNanos()); - clock.AdvanceTime(webrtc::TimeDelta::us(2222u)); + clock.AdvanceTime(webrtc::TimeDelta::Micros(2222u)); EXPECT_EQ(2223000, clock.TimeNanos()); - clock.AdvanceTime(webrtc::TimeDelta::ms(3333u)); + clock.AdvanceTime(webrtc::TimeDelta::Millis(3333u)); EXPECT_EQ(3335223000, clock.TimeNanos()); - clock.AdvanceTime(webrtc::TimeDelta::seconds(4444u)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(4444u)); EXPECT_EQ(4447335223000, clock.TimeNanos()); } @@ -282,7 +282,7 @@ TEST(FakeClock, SettingTimeWakesThreads) { // Advance the fake clock, expecting the worker thread to wake up // and dispatch the message instantly. - clock.AdvanceTime(webrtc::TimeDelta::seconds(60u)); + clock.AdvanceTime(webrtc::TimeDelta::Seconds(60u)); EXPECT_TRUE(message_handler_dispatched.Wait(0)); worker->Stop(); diff --git a/rtc_base/timestamp_aligner.cc b/rtc_base/timestamp_aligner.cc index b797420f01..c9f8f9de33 100644 --- a/rtc_base/timestamp_aligner.cc +++ b/rtc_base/timestamp_aligner.cc @@ -23,29 +23,36 @@ TimestampAligner::TimestampAligner() : frames_seen_(0), offset_us_(0), clip_bias_us_(0), - prev_translated_time_us_(std::numeric_limits::min()) {} + prev_translated_time_us_(std::numeric_limits::min()), + prev_time_offset_us_(0) {} TimestampAligner::~TimestampAligner() {} -int64_t TimestampAligner::TranslateTimestamp(int64_t camera_time_us, +int64_t TimestampAligner::TranslateTimestamp(int64_t capturer_time_us, int64_t system_time_us) { - return ClipTimestamp( - camera_time_us + UpdateOffset(camera_time_us, system_time_us), + const int64_t translated_timestamp = ClipTimestamp( + capturer_time_us + UpdateOffset(capturer_time_us, system_time_us), system_time_us); + prev_time_offset_us_ = translated_timestamp - capturer_time_us; + return translated_timestamp; } -int64_t TimestampAligner::UpdateOffset(int64_t camera_time_us, +int64_t TimestampAligner::TranslateTimestamp(int64_t capturer_time_us) const { + return capturer_time_us + prev_time_offset_us_; +} + +int64_t TimestampAligner::UpdateOffset(int64_t capturer_time_us, int64_t system_time_us) { - // Estimate the offset between system monotonic time and the capture - // time from the camera. The camera is assumed to provide more + // Estimate the offset between system monotonic time and the capturer's + // time. The capturer is assumed to provide more // accurate timestamps than we get from the system time. But the - // camera may use its own free-running clock with a large offset and + // capturer may use its own free-running clock with a large offset and // a small drift compared to the system clock. So the model is // basically // // y_k = c_0 + c_1 * x_k + v_k // - // where x_k is the camera timestamp, believed to be accurate in its + // where x_k is the capturer's timestamp, believed to be accurate in its // own scale. y_k is our reading of the system clock. v_k is the // measurement noise, i.e., the delay from frame capture until the // system clock was read. @@ -73,18 +80,18 @@ int64_t TimestampAligner::UpdateOffset(int64_t camera_time_us, // exponential averaging. // The input for averaging, y_k - x_k in the above notation. - int64_t diff_us = system_time_us - camera_time_us; + int64_t diff_us = system_time_us - capturer_time_us; // The deviation from the current average. int64_t error_us = diff_us - offset_us_; // If the current difference is far from the currently estimated // offset, the filter is reset. This could happen, e.g., if the - // camera clock is reset, or cameras are plugged in and out, or if + // capturer's clock is reset, cameras are plugged in and out, or // the application process is temporarily suspended. Expected to // happen for the very first timestamp (|frames_seen_| = 0). The // threshold of 300 ms should make this unlikely in normal // operation, and at the same time, converging gradually rather than - // resetting the filter should be tolerable for jumps in camera time + // resetting the filter should be tolerable for jumps in capturer's time // below this threshold. static const int64_t kResetThresholdUs = 300000; if (std::abs(error_us) > kResetThresholdUs) { diff --git a/rtc_base/timestamp_aligner.h b/rtc_base/timestamp_aligner.h index 151bcdccb1..da45aa6d1f 100644 --- a/rtc_base/timestamp_aligner.h +++ b/rtc_base/timestamp_aligner.h @@ -18,14 +18,15 @@ namespace rtc { -// The TimestampAligner class helps translating camera timestamps into -// the same timescale as is used by rtc::TimeMicros(). Some cameras -// have built in timestamping which is more accurate than reading the -// system clock, but using a different epoch and unknown clock drift. -// Frame timestamps in webrtc should use rtc::TimeMicros (system monotonic -// time), and this class provides a filter which lets us use the -// rtc::TimeMicros timescale, and at the same time take advantage of -// higher accuracy of the camera clock. +// The TimestampAligner class helps translating timestamps of a capture system +// into the same timescale as is used by rtc::TimeMicros(). Some capture systems +// provide timestamps, which comes from the capturing hardware (camera or sound +// card) or stamped close to the capturing hardware. Such timestamps are more +// accurate (less jittery) than reading the system clock, but may have a +// different epoch and unknown clock drift. Frame timestamps in webrtc should +// use rtc::TimeMicros (system monotonic time), and this class provides a filter +// which lets us use the rtc::TimeMicros timescale, and at the same time take +// advantage of higher accuracy of the capturer's clock. // This class is not thread safe, so all calls to it must be synchronized // externally. @@ -35,18 +36,24 @@ class RTC_EXPORT TimestampAligner { ~TimestampAligner(); public: - // Translates camera timestamps to the same timescale as is used by - // rtc::TimeMicros(). |camera_time_us| is assumed to be accurate, but + // Translates timestamps of a capture system to the same timescale as is used + // by rtc::TimeMicros(). |capturer_time_us| is assumed to be accurate, but // with an unknown epoch and clock drift. |system_time_us| is // time according to rtc::TimeMicros(), preferably read as soon as // possible when the frame is captured. It may have poor accuracy // due to poor resolution or scheduling delays. Returns the // translated timestamp. - int64_t TranslateTimestamp(int64_t camera_time_us, int64_t system_time_us); + int64_t TranslateTimestamp(int64_t capturer_time_us, int64_t system_time_us); + + // Returns the translated timestamp without updating the states. This is to + // allow TimestampAligner to translate capturer time into system clock based + // on earlier observations. It won't guarantee monotonicity. + int64_t TranslateTimestamp(int64_t capturer_time_us) const; protected: - // Update the estimated offset between camera time and system monotonic time. - int64_t UpdateOffset(int64_t camera_time_us, int64_t system_time_us); + // Update the estimated offset between capturer's time and system monotonic + // time. + int64_t UpdateOffset(int64_t capturer_time_us, int64_t system_time_us); // Clip timestamp, return value is always // <= |system_time_us|, and @@ -57,16 +64,19 @@ class RTC_EXPORT TimestampAligner { private: // State for the timestamp translation. int frames_seen_; - // Estimated offset between camera time and system monotonic time. + // Estimated offset between capturer's time and system monotonic time. int64_t offset_us_; // State for the ClipTimestamp method, applied after the filter. - // A large negative camera clock drift tends to push translated + // A large negative clock drift of the capturer tends to push translated // timestamps into the future. |clip_bias_us_| is subtracted from the // translated timestamps, to get them back from the future. int64_t clip_bias_us_; // Used to ensure that translated timestamps are monotonous. int64_t prev_translated_time_us_; + // Offset between |prev_translated_time_us_| and the corresponding capturer + // time. + int64_t prev_time_offset_us_; RTC_DISALLOW_COPY_AND_ASSIGN(TimestampAligner); }; diff --git a/rtc_base/timestamp_aligner_unittest.cc b/rtc_base/timestamp_aligner_unittest.cc index 17d9e06ce5..df6207a22c 100644 --- a/rtc_base/timestamp_aligner_unittest.cc +++ b/rtc_base/timestamp_aligner_unittest.cc @@ -152,27 +152,27 @@ TEST(TimestampAlignerTest, ClipToMonotonous) { // {0, c1, c1 + c2}, we exhibit non-monotonous behaviour if and only // if c1 > s1 + 2 s2 + 4 c2. const int kNumSamples = 3; - const int64_t camera_time_us[kNumSamples] = {0, 80000, 90001}; - const int64_t system_time_us[kNumSamples] = {0, 10000, 20000}; + const int64_t kCaptureTimeUs[kNumSamples] = {0, 80000, 90001}; + const int64_t kSystemTimeUs[kNumSamples] = {0, 10000, 20000}; const int64_t expected_offset_us[kNumSamples] = {0, -35000, -46667}; // Non-monotonic translated timestamps can happen when only for // translated timestamps in the future. Which is tolerated if // |timestamp_aligner.clip_bias_us| is large enough. Instead of // changing that private member for this test, just add the bias to - // |system_time_us| when calling ClipTimestamp. + // |kSystemTimeUs| when calling ClipTimestamp. const int64_t kClipBiasUs = 100000; bool did_clip = false; int64_t prev_timestamp_us = std::numeric_limits::min(); for (int i = 0; i < kNumSamples; i++) { int64_t offset_us = - timestamp_aligner.UpdateOffset(camera_time_us[i], system_time_us[i]); + timestamp_aligner.UpdateOffset(kCaptureTimeUs[i], kSystemTimeUs[i]); EXPECT_EQ(offset_us, expected_offset_us[i]); - int64_t translated_timestamp_us = camera_time_us[i] + offset_us; + int64_t translated_timestamp_us = kCaptureTimeUs[i] + offset_us; int64_t clip_timestamp_us = timestamp_aligner.ClipTimestamp( - translated_timestamp_us, system_time_us[i] + kClipBiasUs); + translated_timestamp_us, kSystemTimeUs[i] + kClipBiasUs); if (translated_timestamp_us <= prev_timestamp_us) { did_clip = true; EXPECT_EQ(clip_timestamp_us, @@ -186,4 +186,22 @@ TEST(TimestampAlignerTest, ClipToMonotonous) { EXPECT_TRUE(did_clip); } +TEST(TimestampAlignerTest, TranslateTimestampWithoutStateUpdate) { + TimestampAligner timestamp_aligner; + + constexpr int kNumSamples = 4; + constexpr int64_t kCaptureTimeUs[kNumSamples] = {0, 80000, 90001, 100000}; + constexpr int64_t kSystemTimeUs[kNumSamples] = {0, 10000, 20000, 30000}; + constexpr int64_t kQueryCaptureTimeOffsetUs[kNumSamples] = {0, 123, -321, + 345}; + + for (int i = 0; i < kNumSamples; i++) { + int64_t reference_timestamp = timestamp_aligner.TranslateTimestamp( + kCaptureTimeUs[i], kSystemTimeUs[i]); + EXPECT_EQ(reference_timestamp - kQueryCaptureTimeOffsetUs[i], + timestamp_aligner.TranslateTimestamp( + kCaptureTimeUs[i] - kQueryCaptureTimeOffsetUs[i])); + } +} + } // namespace rtc diff --git a/rtc_base/units/unit_base_unittest.cc b/rtc_base/units/unit_base_unittest.cc index 02ead7ce0b..bbdbd8cd10 100644 --- a/rtc_base/units/unit_base_unittest.cc +++ b/rtc_base/units/unit_base_unittest.cc @@ -39,7 +39,7 @@ class TestUnit final : public rtc_units_impl::RelativeUnit { } private: - friend class UnitBase; + friend class rtc_units_impl::UnitBase; static constexpr bool one_sided = false; using RelativeUnit::RelativeUnit; }; diff --git a/rtc_base/untyped_function.h b/rtc_base/untyped_function.h new file mode 100644 index 0000000000..c1f59458b9 --- /dev/null +++ b/rtc_base/untyped_function.h @@ -0,0 +1,324 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_UNTYPED_FUNCTION_H_ +#define RTC_BASE_UNTYPED_FUNCTION_H_ + +#include +#include +#include +#include +#include + +#include "rtc_base/system/assume.h" + +namespace webrtc { +namespace webrtc_function_impl { + +using FunVoid = void(); + +// Inline storage size is this many machine words. +enum : size_t { kInlineStorageWords = 4 }; + +union VoidUnion { + void* void_ptr; + FunVoid* fun_ptr; + typename std::aligned_storage::type + inline_storage; +}; + +// Returns the number of elements of the `inline_storage` array required to +// store an object of type T. +template +constexpr size_t InlineStorageSize() { + // sizeof(T) / sizeof(uintptr_t), but rounded up. + return (sizeof(T) + sizeof(uintptr_t) - 1) / sizeof(uintptr_t); +} + +template +struct CallHelpers; +template +struct CallHelpers { + // Return type of the three helpers below. + using return_type = RetT; + // Complete function type of the three helpers below. + using function_type = RetT(VoidUnion*, ArgT...); + // Helper for calling the `void_ptr` case of VoidUnion. + template + static RetT CallVoidPtr(VoidUnion* vu, ArgT... args) { + return (*static_cast(vu->void_ptr))(std::forward(args)...); + } + // Helper for calling the `fun_ptr` case of VoidUnion. + static RetT CallFunPtr(VoidUnion* vu, ArgT... args) { + return (reinterpret_cast(vu->fun_ptr))( + std::forward(args)...); + } + // Helper for calling the `inline_storage` case of VoidUnion. + template + static RetT CallInlineStorage(VoidUnion* vu, ArgT... args) { + return (*reinterpret_cast(&vu->inline_storage))( + std::forward(args)...); + } +}; + +} // namespace webrtc_function_impl + +// A class that holds (and owns) any callable. The same function call signature +// must be provided when constructing and calling the object. +// +// The point of not having the call signature as a class template parameter is +// to have one single concrete type for all signatures; this reduces binary +// size. +class UntypedFunction final { + public: + // Callables of at most this size can be stored inline, if they are trivial. + // (Useful in tests and benchmarks; avoid using this in production code.) + enum : size_t { + kInlineStorageSize = sizeof(webrtc_function_impl::VoidUnion::inline_storage) + }; + static_assert(kInlineStorageSize == + webrtc_function_impl::kInlineStorageWords * + sizeof(uintptr_t), + ""); + + // The *UntypedFunctionArgs structs are used to transfer arguments from + // PrepareArgs() to Create(). They are trivial, but may own heap allocations, + // so make sure to pass them to Create() exactly once! + // + // The point of doing Create(PrepareArgs(foo)) instead of just Create(foo) is + // to separate the code that has to be inlined (PrepareArgs) from the code + // that can be noninlined (Create); the *UntypedFunctionArgs types are + // designed to efficiently carry the required information from one to the + // other. + template + struct TrivialUntypedFunctionArgs { + static_assert(N >= 1, ""); + static_assert(N <= webrtc_function_impl::kInlineStorageWords, ""); + // We use an uintptr_t array here instead of std::aligned_storage, because + // the former can be efficiently passed in registers when using + // TrivialUntypedFunctionArgs as a function argument. (We can't do the same + // in VoidUnion, because std::aligned_storage but not uintptr_t can be + // legally reinterpret_casted to arbitrary types. + // TrivialUntypedFunctionArgs, on the other hand, only needs to handle + // placement new and memcpy.) + alignas(std::max_align_t) uintptr_t inline_storage[N]; + webrtc_function_impl::FunVoid* call; + }; + struct NontrivialUntypedFunctionArgs { + void* void_ptr; + webrtc_function_impl::FunVoid* call; + void (*del)(webrtc_function_impl::VoidUnion*); + }; + struct FunctionPointerUntypedFunctionArgs { + webrtc_function_impl::FunVoid* fun_ptr; + webrtc_function_impl::FunVoid* call; + }; + + // Create function for lambdas and other callables that are trivial and small; + // it accepts every type of argument except those noted in its enable_if call. + template < + typename Signature, + typename F, + typename F_deref = typename std::remove_reference::type, + typename std::enable_if< + // Not for function pointers; we have another overload for that below. + !std::is_function< + typename std::remove_pointer::type>::value && + + // Not for nullptr; we have a constructor for that below. + !std::is_same::type>::value && + + // Not for UntypedFunction objects; use move construction or + // assignment. + !std::is_same::type>::value && + + // Only for trivial callables that will fit in inline storage. + std::is_trivially_move_constructible::value && + std::is_trivially_destructible::value && + sizeof(F_deref) <= kInlineStorageSize>::type* = nullptr, + size_t InlineSize = webrtc_function_impl::InlineStorageSize()> + static TrivialUntypedFunctionArgs PrepareArgs(F&& f) { + // The callable is trivial and small enough, so we just store its bytes + // in the inline storage. + TrivialUntypedFunctionArgs args; + new (&args.inline_storage) F_deref(std::forward(f)); + args.call = reinterpret_cast( + webrtc_function_impl::CallHelpers< + Signature>::template CallInlineStorage); + return args; + } + template + static UntypedFunction Create(TrivialUntypedFunctionArgs args) { + webrtc_function_impl::VoidUnion vu; + std::memcpy(&vu.inline_storage, args.inline_storage, + sizeof(args.inline_storage)); + return UntypedFunction(vu, args.call, nullptr); + } + + // Create function for lambdas and other callables that are nontrivial or + // large; it accepts every type of argument except those noted in its + // enable_if call. + template ::type, + typename std::enable_if< + // Not for function pointers; we have another overload for that + // below. + !std::is_function< + typename std::remove_pointer::type>::value && + + // Not for nullptr; we have a constructor for that below. + !std::is_same::type>::value && + + // Not for UntypedFunction objects; use move construction or + // assignment. + !std::is_same::type>::value && + + // Only for nontrivial callables, or callables that won't fit in + // inline storage. + !(std::is_trivially_move_constructible::value && + std::is_trivially_destructible::value && + sizeof(F_deref) <= kInlineStorageSize)>::type* = nullptr> + static NontrivialUntypedFunctionArgs PrepareArgs(F&& f) { + // The callable is either nontrivial or too large, so we can't keep it + // in the inline storage; use the heap instead. + NontrivialUntypedFunctionArgs args; + args.void_ptr = new F_deref(std::forward(f)); + args.call = reinterpret_cast( + webrtc_function_impl::CallHelpers::template CallVoidPtr< + F_deref>); + args.del = static_cast( + [](webrtc_function_impl::VoidUnion* vu) { + // Assuming that this pointer isn't null allows the + // compiler to eliminate a null check in the (inlined) + // delete operation. + RTC_ASSUME(vu->void_ptr != nullptr); + delete reinterpret_cast(vu->void_ptr); + }); + return args; + } + static UntypedFunction Create(NontrivialUntypedFunctionArgs args) { + webrtc_function_impl::VoidUnion vu; + vu.void_ptr = args.void_ptr; + return UntypedFunction(vu, args.call, args.del); + } + + // Create function that accepts function pointers. If the argument is null, + // the result is an empty UntypedFunction. + template + static FunctionPointerUntypedFunctionArgs PrepareArgs(Signature* f) { + FunctionPointerUntypedFunctionArgs args; + args.fun_ptr = reinterpret_cast(f); + args.call = reinterpret_cast( + webrtc_function_impl::CallHelpers::CallFunPtr); + return args; + } + static UntypedFunction Create(FunctionPointerUntypedFunctionArgs args) { + webrtc_function_impl::VoidUnion vu; + vu.fun_ptr = args.fun_ptr; + return UntypedFunction(vu, args.fun_ptr == nullptr ? nullptr : args.call, + nullptr); + } + + // Prepares arguments and creates an UntypedFunction in one go. + template + static UntypedFunction Create(F&& f) { + return Create(PrepareArgs(std::forward(f))); + } + + // Default constructor. Creates an empty UntypedFunction. + UntypedFunction() : call_(nullptr), delete_(nullptr) {} + + // Nullptr constructor and assignment. Creates an empty UntypedFunction. + UntypedFunction(std::nullptr_t) // NOLINT(runtime/explicit) + : call_(nullptr), delete_(nullptr) {} + UntypedFunction& operator=(std::nullptr_t) { + call_ = nullptr; + if (delete_) { + delete_(&f_); + delete_ = nullptr; + } + return *this; + } + + // Not copyable. + UntypedFunction(const UntypedFunction&) = delete; + UntypedFunction& operator=(const UntypedFunction&) = delete; + + // Move construction and assignment. + UntypedFunction(UntypedFunction&& other) + : f_(other.f_), call_(other.call_), delete_(other.delete_) { + other.delete_ = nullptr; + } + UntypedFunction& operator=(UntypedFunction&& other) { + if (delete_) { + delete_(&f_); + } + f_ = other.f_; + call_ = other.call_; + delete_ = other.delete_; + other.delete_ = nullptr; + return *this; + } + + ~UntypedFunction() { + if (delete_) { + delete_(&f_); + } + } + + friend void swap(UntypedFunction& a, UntypedFunction& b) { + using std::swap; + swap(a.f_, b.f_); + swap(a.call_, b.call_); + swap(a.delete_, b.delete_); + } + + // Returns true if we have a function, false if we don't (i.e., we're null). + explicit operator bool() const { return call_ != nullptr; } + + template + typename webrtc_function_impl::CallHelpers::return_type Call( + ArgT&&... args) { + return reinterpret_cast< + typename webrtc_function_impl::CallHelpers::function_type*>( + call_)(&f_, std::forward(args)...); + } + + // Returns true iff we don't need to call a destructor. This is guaranteed + // to hold for a moved-from object. + bool IsTriviallyDestructible() { return delete_ == nullptr; } + + private: + UntypedFunction(webrtc_function_impl::VoidUnion f, + webrtc_function_impl::FunVoid* call, + void (*del)(webrtc_function_impl::VoidUnion*)) + : f_(f), call_(call), delete_(del) {} + + // The callable thing, or a pointer to it. + webrtc_function_impl::VoidUnion f_; + + // Pointer to a dispatch function that knows the type of the callable thing + // that's stored in f_, and how to call it. An UntypedFunction object is empty + // (null) iff call_ is null. + webrtc_function_impl::FunVoid* call_; + + // Pointer to a function that knows how to delete the callable thing that's + // stored in f_. Null if `f_` is trivially deletable. + void (*delete_)(webrtc_function_impl::VoidUnion*); +}; + +} // namespace webrtc + +#endif // RTC_BASE_UNTYPED_FUNCTION_H_ diff --git a/rtc_base/untyped_function_unittest.cc b/rtc_base/untyped_function_unittest.cc new file mode 100644 index 0000000000..8ea26e7a43 --- /dev/null +++ b/rtc_base/untyped_function_unittest.cc @@ -0,0 +1,309 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/untyped_function.h" + +#include +#include + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Pointee; + +TEST(UntypedFunction, Empty1) { + UntypedFunction uf; + EXPECT_FALSE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); +} + +TEST(UntypedFunction, Empty2) { + UntypedFunction uf = nullptr; + EXPECT_FALSE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); +} + +TEST(UntypedFunction, Empty3) { + UntypedFunction uf = UntypedFunction::Create(nullptr); + EXPECT_FALSE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); +} + +TEST(UntypedFunction, CallTrivialWithInt) { + auto uf = UntypedFunction::Create([](int x) { return x + 5; }); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(17), 22); +} + +TEST(UntypedFunction, CallTrivialWithPointer) { + auto uf = UntypedFunction::Create([](int* x) { return *x; }); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + int x = 12; + EXPECT_EQ(uf.Call(&x), 12); +} + +TEST(UntypedFunction, CallTrivialWithReference) { + auto uf = UntypedFunction::Create([](int& x) { x = 3; }); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + int x = 12; + uf.Call(x); + EXPECT_EQ(x, 3); +} + +TEST(UntypedFunction, CallTrivialWithRvalueReference) { + auto uf = UntypedFunction::Create([](int&& x) { return x - 2; }); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(34), 32); +} + +TEST(UntypedFunction, CallNontrivialWithInt) { + std::vector list; + auto uf = UntypedFunction::Create([list](int x) mutable { + list.push_back(x); + return list.size(); + }); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(17), 1); + EXPECT_EQ(uf.Call(17), 2); +} + +TEST(UntypedFunction, CallNontrivialWithPointer) { + std::vector list; + auto uf = UntypedFunction::Create([list](int* x) mutable { + list.push_back(*x); + return list.data(); + }); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + int x = 12; + EXPECT_THAT(uf.Call(&x), Pointee(12)); +} + +TEST(UntypedFunction, CallNontrivialWithReference) { + std::vector list = {34, 35, 36}; + auto uf = + UntypedFunction::Create([list](int& x) { x = list[1]; }); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + int x = 12; + uf.Call(x); + EXPECT_EQ(x, 35); +} + +TEST(UntypedFunction, CallNontrivialWithRvalueReference) { + std::vector list; + auto uf = UntypedFunction::Create([list](int&& x) mutable { + list.push_back(x); + return list.size(); + }); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(34), 1); + EXPECT_EQ(uf.Call(34), 2); +} + +int AddFive(int x) { + return x + 5; +} +int DereferencePointer(int* x) { + return *x; +} +void AssignThree(int& x) { + x = 3; +} +int SubtractTwo(int&& x) { + return x - 2; +} + +TEST(UntypedFunction, CallFunctionPointerWithInt) { + auto uf = UntypedFunction::Create(AddFive); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(17), 22); +} + +TEST(UntypedFunction, CallFunctionPointerWithPointer) { + auto uf = UntypedFunction::Create(DereferencePointer); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + int x = 12; + EXPECT_EQ(uf.Call(&x), 12); +} + +TEST(UntypedFunction, CallFunctionPointerWithReference) { + auto uf = UntypedFunction::Create(AssignThree); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + int x = 12; + uf.Call(x); + EXPECT_EQ(x, 3); +} + +TEST(UntypedFunction, CallFunctionPointerWithRvalueReference) { + auto uf = UntypedFunction::Create(SubtractTwo); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(34), 32); +} + +TEST(UntypedFunction, CallTrivialWithNoArgs) { + int arr[] = {1, 2, 3}; + static_assert(sizeof(arr) <= UntypedFunction::kInlineStorageSize, ""); + auto uf = UntypedFunction::Create([arr] { return arr[1]; }); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(), 2); +} + +TEST(UntypedFunction, CallLargeTrivialWithNoArgs) { + int arr[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0}; + static_assert(sizeof(arr) > UntypedFunction::kInlineStorageSize, ""); + auto uf = UntypedFunction::Create([arr] { return arr[4]; }); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(), 5); +} + +TEST(UntypedFunction, MoveonlyReturnValue) { + auto uf = UntypedFunction::Create()>( + [] { return std::make_unique(567); }); + EXPECT_THAT(uf.Call()>(), Pointee(567)); +} + +TEST(UntypedFunction, MoveonlyArgument) { + auto uf = UntypedFunction::Create)>( + [](std::unique_ptr x) { return *x + 19; }); + EXPECT_EQ(uf.Call)>(std::make_unique(40)), 59); +} + +TEST(UntypedFunction, MoveOnlyCallable) { + auto uf = UntypedFunction::Create( + [x = std::make_unique(17)] { return ++*x; }); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(), 18); + EXPECT_EQ(uf.Call(), 19); + UntypedFunction uf2 = std::move(uf); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_FALSE(uf2.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(), 20); + EXPECT_EQ(uf.Call(), 21); +} + +class Destroyer { + public: + explicit Destroyer(int& destroy_count) : destroy_count_(&destroy_count) {} + ~Destroyer() { ++*destroy_count_; } + int operator()() { return 72; } + int* destroy_count_; +}; + +TEST(UntypedFunction, CallableIsDestroyed) { + int destroy_count = 0; + { + auto uf = UntypedFunction::Create(Destroyer(destroy_count)); + // Destruction count is 1 here, because the temporary we created above was + // destroyed. + EXPECT_EQ(destroy_count, 1); + { + auto uf2 = std::move(uf); + EXPECT_EQ(destroy_count, 1); + } + // `uf2` was destroyed. + EXPECT_EQ(destroy_count, 2); + } + // `uf` was destroyed, but it didn't contain a Destroyer since we moved it to + // `uf2` above. + EXPECT_EQ(destroy_count, 2); +} + +TEST(UntypedFunction, MoveAssign) { + int destroy_count = 0; + auto uf = UntypedFunction::Create(Destroyer(destroy_count)); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + // Destruction count is 1 here, because the temporary we created above was + // destroyed. + EXPECT_EQ(destroy_count, 1); + UntypedFunction uf2 = nullptr; + EXPECT_FALSE(uf2); + EXPECT_TRUE(uf2.IsTriviallyDestructible()); + + uf2 = std::move(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_TRUE(uf2); + EXPECT_FALSE(uf2.IsTriviallyDestructible()); + EXPECT_EQ(destroy_count, 1); // The callable was not destroyed. + EXPECT_EQ(uf2.Call(), 72); + + UntypedFunction uf3 = nullptr; + uf2 = std::move(uf3); + EXPECT_FALSE(uf2); + EXPECT_TRUE(uf2.IsTriviallyDestructible()); + EXPECT_EQ(destroy_count, 2); // The callable was destroyed by the assignment. +} + +TEST(UntypedFunction, NullptrAssign) { + int destroy_count = 0; + auto uf = UntypedFunction::Create(Destroyer(destroy_count)); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + // Destruction count is 1 here, because the temporary we created above was + // destroyed. + EXPECT_EQ(destroy_count, 1); + + uf = nullptr; + EXPECT_FALSE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_EQ(destroy_count, 2); // The callable was destroyed by the assignment. +} + +TEST(UntypedFunction, Swap) { + int x = 13; + auto uf = UntypedFunction::Create([x]() mutable { return ++x; }); + EXPECT_TRUE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + auto y = std::make_unique(113); + auto uf2 = + UntypedFunction::Create([y = std::move(y)] { return ++*y; }); + EXPECT_TRUE(uf2); + EXPECT_FALSE(uf2.IsTriviallyDestructible()); + UntypedFunction uf3 = nullptr; + EXPECT_FALSE(uf3); + EXPECT_TRUE(uf3.IsTriviallyDestructible()); + + EXPECT_EQ(uf.Call(), 14); + swap(uf, uf2); + EXPECT_TRUE(uf); + EXPECT_FALSE(uf.IsTriviallyDestructible()); + EXPECT_TRUE(uf2); + EXPECT_TRUE(uf2.IsTriviallyDestructible()); + EXPECT_EQ(uf.Call(), 114); + EXPECT_EQ(uf2.Call(), 15); + + swap(uf, uf3); + EXPECT_FALSE(uf); + EXPECT_TRUE(uf.IsTriviallyDestructible()); + EXPECT_TRUE(uf3); + EXPECT_FALSE(uf3.IsTriviallyDestructible()); + EXPECT_EQ(uf3.Call(), 115); +} + +} // namespace +} // namespace webrtc diff --git a/rtc_base/virtual_socket_server.cc b/rtc_base/virtual_socket_server.cc index 318f34a2a2..3d412d66cc 100644 --- a/rtc_base/virtual_socket_server.cc +++ b/rtc_base/virtual_socket_server.cc @@ -19,6 +19,7 @@ #include "absl/algorithm/container.h" #include "rtc_base/checks.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/fake_clock.h" #include "rtc_base/logging.h" #include "rtc_base/physical_socket_server.h" @@ -637,7 +638,7 @@ bool VirtualSocketServer::ProcessMessagesUntilIdle() { if (fake_clock_) { // If using a fake clock, advance it in millisecond increments until the // queue is empty. - fake_clock_->AdvanceTime(webrtc::TimeDelta::ms(1)); + fake_clock_->AdvanceTime(webrtc::TimeDelta::Millis(1)); } else { // Otherwise, run a normal message loop. Message msg; diff --git a/rtc_base/virtual_socket_server.h b/rtc_base/virtual_socket_server.h index f45fabf0af..f33ebccd36 100644 --- a/rtc_base/virtual_socket_server.h +++ b/rtc_base/virtual_socket_server.h @@ -17,6 +17,7 @@ #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" #include "rtc_base/message_handler.h" @@ -294,7 +295,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { std::map alternative_address_mapping_; std::unique_ptr delay_dist_; - CriticalSection delay_crit_; + RecursiveCriticalSection delay_crit_; double drop_prob_; bool sending_blocked_ = false; @@ -304,7 +305,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // Implements the socket interface using the virtual network. Packets are // passed as messages using the message queue of the socket server. class VirtualSocket : public AsyncSocket, - public MessageHandler, + public MessageHandlerAutoCleanup, public sigslot::has_slots<> { public: VirtualSocket(VirtualSocketServer* server, int family, int type, bool async); @@ -379,7 +380,7 @@ class VirtualSocket : public AsyncSocket, bool ready_to_send_ = true; // Critical section to protect the recv_buffer and queue_ - CriticalSection crit_; + RecursiveCriticalSection crit_; // Network model that enforces bandwidth and capacity constraints NetworkQueue network_; diff --git a/rtc_base/virtual_socket_unittest.cc b/rtc_base/virtual_socket_unittest.cc index b274b40857..78003f5cb2 100644 --- a/rtc_base/virtual_socket_unittest.cc +++ b/rtc_base/virtual_socket_unittest.cc @@ -53,7 +53,7 @@ using webrtc::testing::SSE_WRITE; using webrtc::testing::StreamSink; // Sends at a constant rate but with random packet sizes. -struct Sender : public MessageHandler { +struct Sender : public MessageHandlerAutoCleanup { Sender(Thread* th, AsyncSocket* s, uint32_t rt) : thread(th), socket(std::make_unique(s)), @@ -99,7 +99,8 @@ struct Sender : public MessageHandler { char dummy[4096]; }; -struct Receiver : public MessageHandler, public sigslot::has_slots<> { +struct Receiver : public MessageHandlerAutoCleanup, + public sigslot::has_slots<> { Receiver(Thread* th, AsyncSocket* s, uint32_t bw) : thread(th), socket(std::make_unique(s)), diff --git a/rtc_base/weak_ptr.h b/rtc_base/weak_ptr.h index 3e63a7587d..8b2ba099cb 100644 --- a/rtc_base/weak_ptr.h +++ b/rtc_base/weak_ptr.h @@ -241,6 +241,10 @@ class WeakPtrFactory { public: explicit WeakPtrFactory(T* ptr) : ptr_(ptr) {} + WeakPtrFactory() = delete; + WeakPtrFactory(const WeakPtrFactory&) = delete; + WeakPtrFactory& operator=(const WeakPtrFactory&) = delete; + ~WeakPtrFactory() { ptr_ = nullptr; } WeakPtr GetWeakPtr() { @@ -263,7 +267,6 @@ class WeakPtrFactory { private: internal::WeakReferenceOwner weak_reference_owner_; T* ptr_; - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WeakPtrFactory); }; } // namespace rtc diff --git a/rtc_base/win/get_activation_factory.cc b/rtc_base/win/get_activation_factory.cc new file mode 100644 index 0000000000..b3be9abfa7 --- /dev/null +++ b/rtc_base/win/get_activation_factory.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/win/get_activation_factory.h" + +#include +#include + +namespace { + +FARPROC LoadComBaseFunction(const char* function_name) { + static HMODULE const handle = + ::LoadLibraryExW(L"combase.dll", nullptr, LOAD_LIBRARY_SEARCH_SYSTEM32); + return handle ? ::GetProcAddress(handle, function_name) : nullptr; +} + +decltype(&::RoGetActivationFactory) GetRoGetActivationFactoryFunction() { + static decltype(&::RoGetActivationFactory) const function = + reinterpret_cast( + LoadComBaseFunction("RoGetActivationFactory")); + return function; +} + +} // namespace + +namespace webrtc { + +bool ResolveCoreWinRTDelayload() { + return GetRoGetActivationFactoryFunction() && + ResolveCoreWinRTStringDelayload(); +} + +HRESULT RoGetActivationFactoryProxy(HSTRING class_id, + const IID& iid, + void** out_factory) { + auto get_factory_func = GetRoGetActivationFactoryFunction(); + if (!get_factory_func) + return E_FAIL; + return get_factory_func(class_id, iid, out_factory); +} + +} // namespace webrtc diff --git a/rtc_base/win/get_activation_factory.h b/rtc_base/win/get_activation_factory.h new file mode 100644 index 0000000000..801f39d313 --- /dev/null +++ b/rtc_base/win/get_activation_factory.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_WIN_GET_ACTIVATION_FACTORY_H_ +#define RTC_BASE_WIN_GET_ACTIVATION_FACTORY_H_ + +#include + +#include "rtc_base/win/hstring.h" + +namespace webrtc { + +// Provides access to Core WinRT functions which may not be available on +// Windows 7. Loads functions dynamically at runtime to prevent library +// dependencies. + +// Callers must check the return value of ResolveCoreWinRTDelayLoad() before +// using these functions. + +bool ResolveCoreWinRTDelayload(); + +HRESULT RoGetActivationFactoryProxy(HSTRING class_id, + const IID& iid, + void** out_factory); + +// Retrieves an activation factory for the type specified. +template +HRESULT GetActivationFactory(InterfaceType** factory) { + HSTRING class_id_hstring; + HRESULT hr = CreateHstring(runtime_class_id, wcslen(runtime_class_id), + &class_id_hstring); + if (FAILED(hr)) + return hr; + + hr = RoGetActivationFactoryProxy(class_id_hstring, IID_PPV_ARGS(factory)); + if (FAILED(hr)) + return hr; + + return DeleteHstring(class_id_hstring); +} + +} // namespace webrtc + +#endif // RTC_BASE_WIN_GET_ACTIVATION_FACTORY_H_ diff --git a/rtc_base/win/hstring.cc b/rtc_base/win/hstring.cc new file mode 100644 index 0000000000..5a362a97c9 --- /dev/null +++ b/rtc_base/win/hstring.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/win/hstring.h" + +#include +#include + +namespace { + +FARPROC LoadComBaseFunction(const char* function_name) { + static HMODULE const handle = + ::LoadLibraryExW(L"combase.dll", nullptr, LOAD_LIBRARY_SEARCH_SYSTEM32); + return handle ? ::GetProcAddress(handle, function_name) : nullptr; +} + +decltype(&::WindowsCreateString) GetWindowsCreateString() { + static decltype(&::WindowsCreateString) const function = + reinterpret_cast( + LoadComBaseFunction("WindowsCreateString")); + return function; +} + +decltype(&::WindowsDeleteString) GetWindowsDeleteString() { + static decltype(&::WindowsDeleteString) const function = + reinterpret_cast( + LoadComBaseFunction("WindowsDeleteString")); + return function; +} + +} // namespace + +namespace webrtc { + +bool ResolveCoreWinRTStringDelayload() { + return GetWindowsDeleteString() && GetWindowsCreateString(); +} + +HRESULT CreateHstring(const wchar_t* src, uint32_t len, HSTRING* out_hstr) { + decltype(&::WindowsCreateString) create_string_func = + GetWindowsCreateString(); + if (!create_string_func) + return E_FAIL; + return create_string_func(src, len, out_hstr); +} + +HRESULT DeleteHstring(HSTRING hstr) { + decltype(&::WindowsDeleteString) delete_string_func = + GetWindowsDeleteString(); + if (!delete_string_func) + return E_FAIL; + return delete_string_func(hstr); +} + +} // namespace webrtc diff --git a/rtc_base/win/hstring.h b/rtc_base/win/hstring.h new file mode 100644 index 0000000000..8fb119a9e6 --- /dev/null +++ b/rtc_base/win/hstring.h @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_WIN_HSTRING_H_ +#define RTC_BASE_WIN_HSTRING_H_ + +#include +#include +#include + +namespace webrtc { + +// Callers must check the return value of ResolveCoreWinRTStringDelayLoad() +// before using these functions. +bool ResolveCoreWinRTStringDelayload(); + +HRESULT CreateHstring(const wchar_t* src, uint32_t len, HSTRING* out_hstr); + +HRESULT DeleteHstring(HSTRING hstr); + +} // namespace webrtc + +#endif // RTC_BASE_WIN_HSTRING_H_ diff --git a/rtc_base/win/scoped_com_initializer.cc b/rtc_base/win/scoped_com_initializer.cc new file mode 100644 index 0000000000..b83ad32a67 --- /dev/null +++ b/rtc_base/win/scoped_com_initializer.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/win/scoped_com_initializer.h" + +namespace webrtc { + +ScopedCOMInitializer::ScopedCOMInitializer() { + RTC_DLOG(INFO) << "Single-Threaded Apartment (STA) COM thread"; + Initialize(COINIT_APARTMENTTHREADED); +} + +// Constructor for MTA initialization. +ScopedCOMInitializer::ScopedCOMInitializer(SelectMTA mta) { + RTC_DLOG(INFO) << "Multi-Threaded Apartment (MTA) COM thread"; + Initialize(COINIT_MULTITHREADED); +} + +ScopedCOMInitializer::~ScopedCOMInitializer() { + if (Succeeded()) { + CoUninitialize(); + } +} + +void ScopedCOMInitializer::Initialize(COINIT init) { + // Initializes the COM library for use by the calling thread, sets the + // thread's concurrency model, and creates a new apartment for the thread + // if one is required. CoInitializeEx must be called at least once, and is + // usually called only once, for each thread that uses the COM library. + hr_ = CoInitializeEx(NULL, init); + RTC_CHECK_NE(RPC_E_CHANGED_MODE, hr_) + << "Invalid COM thread model change (MTA->STA)"; + // Multiple calls to CoInitializeEx by the same thread are allowed as long + // as they pass the same concurrency flag, but subsequent valid calls + // return S_FALSE. To close the COM library gracefully on a thread, each + // successful call to CoInitializeEx, including any call that returns + // S_FALSE, must be balanced by a corresponding call to CoUninitialize. + if (hr_ == S_OK) { + RTC_DLOG(INFO) + << "The COM library was initialized successfully on this thread"; + } else if (hr_ == S_FALSE) { + RTC_DLOG(WARNING) + << "The COM library is already initialized on this thread"; + } +} + +} // namespace webrtc diff --git a/rtc_base/win/scoped_com_initializer.h b/rtc_base/win/scoped_com_initializer.h new file mode 100644 index 0000000000..918812fc72 --- /dev/null +++ b/rtc_base/win/scoped_com_initializer.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_WIN_SCOPED_COM_INITIALIZER_H_ +#define RTC_BASE_WIN_SCOPED_COM_INITIALIZER_H_ + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +// Initializes COM in the constructor (STA or MTA), and uninitializes COM in the +// destructor. Taken from base::win::ScopedCOMInitializer. +// +// WARNING: This should only be used once per thread, ideally scoped to a +// similar lifetime as the thread itself. You should not be using this in +// random utility functions that make COM calls; instead ensure that these +// functions are running on a COM-supporting thread! +// See https://msdn.microsoft.com/en-us/library/ms809971.aspx for details. +class ScopedCOMInitializer { + public: + // Enum value provided to initialize the thread as an MTA instead of STA. + // There are two types of apartments, Single Threaded Apartments (STAs) + // and Multi Threaded Apartments (MTAs). Within a given process there can + // be multiple STA’s but there is only one MTA. STA is typically used by + // "GUI applications" and MTA by "worker threads" with no UI message loop. + enum SelectMTA { kMTA }; + + // Constructor for STA initialization. + ScopedCOMInitializer(); + + // Constructor for MTA initialization. + explicit ScopedCOMInitializer(SelectMTA mta); + + ~ScopedCOMInitializer(); + + ScopedCOMInitializer(const ScopedCOMInitializer&) = delete; + ScopedCOMInitializer& operator=(const ScopedCOMInitializer&) = delete; + + bool Succeeded() { return SUCCEEDED(hr_); } + + private: + void Initialize(COINIT init); + + HRESULT hr_; +}; + +} // namespace webrtc + +#endif // RTC_BASE_WIN_SCOPED_COM_INITIALIZER_H_ diff --git a/rtc_base/win/windows_version.cc b/rtc_base/win/windows_version.cc index 2e6c1577ce..1df21d85c0 100644 --- a/rtc_base/win/windows_version.cc +++ b/rtc_base/win/windows_version.cc @@ -203,8 +203,18 @@ Version MajorMinorBuildToVersion(int major, int minor, int build) { return VERSION_WIN10_RS2; } else if (build < 17134) { return VERSION_WIN10_RS3; - } else { + } else if (build < 17763) { return VERSION_WIN10_RS4; + } else if (build < 18362) { + return VERSION_WIN10_RS5; + } else if (build < 18363) { + return VERSION_WIN10_19H1; + } else if (build < 19041) { + return VERSION_WIN10_19H1; + } else if (build < 19042) { + return VERSION_WIN10_20H1; + } else { + return VERSION_WIN10_20H2; } } else if (major > 6) { RTC_NOTREACHED(); diff --git a/rtc_base/win/windows_version.h b/rtc_base/win/windows_version.h index 1ad319e4cc..4dbca1011f 100644 --- a/rtc_base/win/windows_version.h +++ b/rtc_base/win/windows_version.h @@ -43,6 +43,11 @@ enum Version { VERSION_WIN10_RS2 = 10, // Redstone 2: Version 1703, Build 15063. VERSION_WIN10_RS3 = 11, // Redstone 3: Version 1709, Build 16299. VERSION_WIN10_RS4 = 12, // Redstone 4: Version 1803, Build 17134. + VERSION_WIN10_RS5 = 13, // Redstone 5: Version 1809, Build 17763. + VERSION_WIN10_19H1 = 14, // 19H1: Version 1903, Build 18362. + VERSION_WIN10_19H2 = 15, // 19H2: Version 1909, Build 18363. + VERSION_WIN10_20H1 = 16, // 20H1: Version 2004, Build 19041. + VERSION_WIN10_20H2 = 17, // 20H2: Version 20H2, Build 19042. // On edit, update tools\metrics\histograms\enums.xml "WindowsVersion" and // "GpuBlacklistFeatureTestResultsWindows2". VERSION_WIN_LAST, // Indicates error condition. diff --git a/rtc_base/win32_socket_server.cc b/rtc_base/win32_socket_server.cc index 8a5b93a608..cfe21a3630 100644 --- a/rtc_base/win32_socket_server.cc +++ b/rtc_base/win32_socket_server.cc @@ -733,7 +733,7 @@ bool Win32SocketServer::Wait(int cms, bool process_io) { MSG msg; b = GetMessage(&msg, nullptr, s_wm_wakeup_id, s_wm_wakeup_id); { - CritScope scope(&cs_); + webrtc::MutexLock lock(&mutex_); posted_ = false; } } else { @@ -747,7 +747,7 @@ void Win32SocketServer::WakeUp() { if (wnd_.handle()) { // Set the "message pending" flag, if not already set. { - CritScope scope(&cs_); + webrtc::MutexLock lock(&mutex_); if (posted_) return; posted_ = true; @@ -760,7 +760,7 @@ void Win32SocketServer::WakeUp() { void Win32SocketServer::Pump() { // Clear the "message pending" flag. { - CritScope scope(&cs_); + webrtc::MutexLock lock(&mutex_); posted_ = false; } diff --git a/rtc_base/win32_socket_server.h b/rtc_base/win32_socket_server.h index 92fd68cd83..317acce0d2 100644 --- a/rtc_base/win32_socket_server.h +++ b/rtc_base/win32_socket_server.h @@ -13,10 +13,10 @@ #if defined(WEBRTC_WIN) #include "rtc_base/async_socket.h" -#include "rtc_base/critical_section.h" #include "rtc_base/socket.h" #include "rtc_base/socket_factory.h" #include "rtc_base/socket_server.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" #include "rtc_base/win32_window.h" @@ -123,7 +123,7 @@ class Win32SocketServer : public SocketServer { static const wchar_t kWindowName[]; Thread* message_queue_; MessageWindow wnd_; - CriticalSection cs_; + webrtc::Mutex mutex_; bool posted_; HWND hdlg_; }; diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn index bffa033655..8a10d1440a 100644 --- a/rtc_tools/BUILD.gn +++ b/rtc_tools/BUILD.gn @@ -17,12 +17,12 @@ group("rtc_tools") { deps = [ ":frame_analyzer", ":video_file_reader", - ":video_quality_analysis", ] if (!build_with_chromium) { deps += [ ":psnr_ssim_analyzer", ":rgba_to_i420_converter", + ":video_quality_analysis", ] if (rtc_enable_protobuf) { deps += [ ":chart_proto" ] @@ -30,7 +30,10 @@ group("rtc_tools") { } if (rtc_include_tests) { - deps += [ ":tools_unittests" ] + deps += [ + ":tools_unittests", + ":yuv_to_ivf_converter", + ] if (rtc_enable_protobuf) { if (!build_with_chromium) { deps += [ ":event_log_visualizer" ] @@ -53,10 +56,11 @@ rtc_library("video_file_reader") { deps = [ "../api:scoped_refptr", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -71,9 +75,10 @@ rtc_library("video_file_writer") { ":video_file_reader", "../api:scoped_refptr", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -98,15 +103,14 @@ rtc_library("video_quality_analysis") { "../api:array_view", "../api:scoped_refptr", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../common_video", "../rtc_base:checks", "../rtc_base:rtc_base_approved", "../test:perf_test", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_executable("frame_analyzer") { @@ -127,7 +131,9 @@ rtc_executable("frame_analyzer") { ] } -if (!build_with_chromium && !build_with_mozilla) { +# TODO(bugs.webrtc.org/11474): Enable this on win if needed. For now it +# is only required for Linux and Android. +if (!build_with_chromium && !build_with_mozilla && !is_win && !is_ios) { action("frame_analyzer_host") { script = "//tools_webrtc/executable_host_build.py" outputs = [ "${root_out_dir}/frame_analyzer_host" ] @@ -138,9 +144,10 @@ if (!build_with_chromium && !build_with_mozilla) { } } -# Only expose the targets needed by Chromium (e.g. frame_analyzer) to avoid -# building a lot of redundant code as part of Chromium builds. -if (!build_with_chromium) { +if (!is_component_build) { + # This target can be built from Chromium but it doesn't support + # is_component_build=true because it depends on WebRTC testonly code + # which is not part of //third_party/webrtc_overrides:webrtc_component. rtc_executable("rtp_generator") { visibility = [ "*" ] testonly = true @@ -183,8 +190,61 @@ if (!build_with_chromium) { "//third_party/abseil-cpp/absl/flags:usage", "//third_party/abseil-cpp/absl/strings", ] + if (build_with_chromium) { + # When building from Chromium, WebRTC's metrics and field trial + # implementations need to be replaced by the Chromium ones. + deps += [ "//third_party/webrtc_overrides:webrtc_component" ] + } } + # This target can be built from Chromium but it doesn't support + # is_component_build=true because it depends on WebRTC testonly code + # which is not part of //third_party/webrtc_overrides:webrtc_component. + rtc_executable("video_replay") { + visibility = [ "*" ] + testonly = true + sources = [ "video_replay.cc" ] + deps = [ + "../api/rtc_event_log", + "../api/task_queue:default_task_queue_factory", + "../api/test/video:function_video_factory", + "../api/transport:field_trial_based_config", + "../api/video_codecs:video_codecs_api", + "../call", + "../call:call_interfaces", + "../common_video", + "../media:rtc_internal_video_codecs", + "../rtc_base:checks", + "../rtc_base:rtc_json", + "../rtc_base:stringutils", + "../rtc_base:timeutils", + "../system_wrappers", + "../test:call_config_utils", + "../test:encoder_settings", + "../test:fake_video_codecs", + "../test:null_transport", + "../test:rtp_test_utils", + "../test:run_test", + "../test:run_test_interface", + "../test:test_common", + "../test:test_renderer", + "../test:test_support", + "../test:video_test_common", + "../test:video_test_support", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + ] + if (build_with_chromium) { + # When building from Chromium, WebRTC's metrics and field trial + # implementations need to be replaced by the Chromium ones. + deps += [ "//third_party/webrtc_overrides:webrtc_component" ] + } + } +} + +# Only expose the targets needed by Chromium (e.g. frame_analyzer) to avoid +# building a lot of redundant code as part of Chromium builds. +if (!build_with_chromium) { rtc_executable("psnr_ssim_analyzer") { testonly = true sources = [ "psnr_ssim_analyzer/psnr_ssim_analyzer.cc" ] @@ -260,8 +320,14 @@ if (!build_with_chromium) { rtc_library("event_log_visualizer_utils") { visibility = [ "*" ] sources = [ + "rtc_event_log_visualizer/alerts.cc", + "rtc_event_log_visualizer/alerts.h", + "rtc_event_log_visualizer/analyze_audio.cc", + "rtc_event_log_visualizer/analyze_audio.h", "rtc_event_log_visualizer/analyzer.cc", "rtc_event_log_visualizer/analyzer.h", + "rtc_event_log_visualizer/analyzer_common.cc", + "rtc_event_log_visualizer/analyzer_common.h", "rtc_event_log_visualizer/log_simulation.cc", "rtc_event_log_visualizer/log_simulation.h", "rtc_event_log_visualizer/plot_base.cc", @@ -270,11 +336,11 @@ if (!build_with_chromium) { "rtc_event_log_visualizer/plot_protobuf.h", "rtc_event_log_visualizer/plot_python.cc", "rtc_event_log_visualizer/plot_python.h", - "rtc_event_log_visualizer/triage_notifications.h", ] deps = [ ":chart_proto", "../api:function_view", + "../rtc_base:deprecation", "../rtc_base:ignore_wundef", # TODO(kwiberg): Remove this dependency. @@ -301,14 +367,53 @@ if (!build_with_chromium) { "../rtc_base:rtc_base_approved", "../rtc_base:rtc_numerics", "../rtc_base:stringutils", + "../test:explicit_key_value_config", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", ] } } } if (rtc_include_tests) { + rtc_executable("yuv_to_ivf_converter") { + visibility = [ "*" ] + testonly = true + sources = [ "converter/yuv_to_ivf_converter.cc" ] + deps = [ + "../api:create_frame_generator", + "../api:frame_generator_api", + "../api/task_queue:default_task_queue_factory", + "../api/video:encoded_image", + "../api/video:video_frame", + "../api/video_codecs:video_codecs_api", + "../media:rtc_media_base", + "../modules/rtp_rtcp:rtp_rtcp_format", + "../modules/video_coding:video_codec_interface", + "../modules/video_coding:video_coding_utility", + "../modules/video_coding:webrtc_h264", + "../modules/video_coding:webrtc_vp8", + "../modules/video_coding:webrtc_vp9", + "../rtc_base:checks", + "../rtc_base:criticalsection", + "../rtc_base:logging", + "../rtc_base:rtc_event", + "../rtc_base:rtc_task_queue", + "../rtc_base/synchronization:mutex", + "../rtc_base/system:file_wrapper", + "../test:video_test_common", + "../test:video_test_support", + "//third_party/abseil-cpp/absl/debugging:failure_signal_handler", + "//third_party/abseil-cpp/absl/debugging:symbolize", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/strings", + ] + } + if (rtc_enable_protobuf && !build_with_chromium) { rtc_executable("event_log_visualizer") { testonly = true @@ -379,7 +484,6 @@ if (rtc_include_tests) { ":video_quality_analysis", "../api:scoped_refptr", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../common_video", "../rtc_base", diff --git a/rtc_tools/DEPS b/rtc_tools/DEPS index 0cddb4acbb..5ccd86b63b 100644 --- a/rtc_tools/DEPS +++ b/rtc_tools/DEPS @@ -19,3 +19,14 @@ include_rules = [ "+third_party/libyuv", ] +specific_include_rules = { + ".*ivf_converter\.cc": [ + "+absl/debugging/failure_signal_handler.h", + "+absl/debugging/symbolize.h", + "+modules/video_coding/codecs/vp8/include/vp8.h", + "+modules/video_coding/codecs/vp9/include/vp9.h", + "+modules/video_coding/include/video_error_codes.h", + "+modules/video_coding/utility/ivf_file_writer.h", + "+modules/video_coding/codecs/h264/include/h264.h", + ], +} diff --git a/rtc_tools/OWNERS b/rtc_tools/OWNERS index 77385fcd31..c2f49200e7 100644 --- a/rtc_tools/OWNERS +++ b/rtc_tools/OWNERS @@ -1,7 +1,3 @@ -per-file BUILD.gn=* - -phoglund@webrtc.org -oprypin@webrtc.org mbonadei@webrtc.org # For video analysis tools diff --git a/rtc_tools/compare_videos.py b/rtc_tools/compare_videos.py index ee8cf455b2..a54eb42979 100755 --- a/rtc_tools/compare_videos.py +++ b/rtc_tools/compare_videos.py @@ -18,7 +18,6 @@ import sys import tempfile - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) # Chrome browsertests will throw away stderr; avoid that output gets lost. @@ -26,131 +25,154 @@ def _ParseArgs(): - """Registers the command-line options.""" - usage = 'usage: %prog [options]' - parser = optparse.OptionParser(usage=usage) - - parser.add_option('--label', type='string', default='MY_TEST', - help=('Label of the test, used to identify different ' - 'tests. Default: %default')) - parser.add_option('--ref_video', type='string', - help='Reference video to compare with (YUV).') - parser.add_option('--test_video', type='string', - help=('Test video to be compared with the reference ' - 'video (YUV).')) - parser.add_option('--frame_analyzer', type='string', - help='Path to the frame analyzer executable.') - parser.add_option('--aligned_output_file', type='string', - help='Path for output aligned YUV or Y4M file.') - parser.add_option('--vmaf', type='string', - help='Path to VMAF executable.') - parser.add_option('--vmaf_model', type='string', - help='Path to VMAF model.') - parser.add_option('--vmaf_phone_model', action='store_true', - help='Whether to use phone model in VMAF.') - parser.add_option('--yuv_frame_width', type='int', default=640, - help='Width of the YUV file\'s frames. Default: %default') - parser.add_option('--yuv_frame_height', type='int', default=480, - help='Height of the YUV file\'s frames. Default: %default') - parser.add_option('--chartjson_result_file', type='str', default=None, - help='Where to store perf results in chartjson format.') - options, _ = parser.parse_args() - - if not options.ref_video: - parser.error('You must provide a path to the reference video!') - if not os.path.exists(options.ref_video): - parser.error('Cannot find the reference video at %s' % options.ref_video) - - if not options.test_video: - parser.error('You must provide a path to the test video!') - if not os.path.exists(options.test_video): - parser.error('Cannot find the test video at %s' % options.test_video) - - if not options.frame_analyzer: - parser.error('You must provide the path to the frame analyzer executable!') - if not os.path.exists(options.frame_analyzer): - parser.error('Cannot find frame analyzer executable at %s!' % - options.frame_analyzer) - - if options.vmaf and not options.vmaf_model: - parser.error('You must provide a path to a VMAF model to use VMAF.') - - return options + """Registers the command-line options.""" + usage = 'usage: %prog [options]' + parser = optparse.OptionParser(usage=usage) + + parser.add_option('--label', + type='string', + default='MY_TEST', + help=('Label of the test, used to identify different ' + 'tests. Default: %default')) + parser.add_option('--ref_video', + type='string', + help='Reference video to compare with (YUV).') + parser.add_option('--test_video', + type='string', + help=('Test video to be compared with the reference ' + 'video (YUV).')) + parser.add_option('--frame_analyzer', + type='string', + help='Path to the frame analyzer executable.') + parser.add_option('--aligned_output_file', + type='string', + help='Path for output aligned YUV or Y4M file.') + parser.add_option('--vmaf', type='string', help='Path to VMAF executable.') + parser.add_option('--vmaf_model', + type='string', + help='Path to VMAF model.') + parser.add_option('--vmaf_phone_model', + action='store_true', + help='Whether to use phone model in VMAF.') + parser.add_option( + '--yuv_frame_width', + type='int', + default=640, + help='Width of the YUV file\'s frames. Default: %default') + parser.add_option( + '--yuv_frame_height', + type='int', + default=480, + help='Height of the YUV file\'s frames. Default: %default') + parser.add_option('--chartjson_result_file', + type='str', + default=None, + help='Where to store perf results in chartjson format.') + options, _ = parser.parse_args() + + if not options.ref_video: + parser.error('You must provide a path to the reference video!') + if not os.path.exists(options.ref_video): + parser.error('Cannot find the reference video at %s' % + options.ref_video) + + if not options.test_video: + parser.error('You must provide a path to the test video!') + if not os.path.exists(options.test_video): + parser.error('Cannot find the test video at %s' % options.test_video) + + if not options.frame_analyzer: + parser.error( + 'You must provide the path to the frame analyzer executable!') + if not os.path.exists(options.frame_analyzer): + parser.error('Cannot find frame analyzer executable at %s!' % + options.frame_analyzer) + + if options.vmaf and not options.vmaf_model: + parser.error('You must provide a path to a VMAF model to use VMAF.') + + return options + def _DevNull(): - """On Windows, sometimes the inherited stdin handle from the parent process + """On Windows, sometimes the inherited stdin handle from the parent process fails. Workaround this by passing null to stdin to the subprocesses commands. This function can be used to create the null file handler. """ - return open(os.devnull, 'r') + return open(os.devnull, 'r') def _RunFrameAnalyzer(options, yuv_directory=None): - """Run frame analyzer to compare the videos and print output.""" - cmd = [ - options.frame_analyzer, - '--label=%s' % options.label, - '--reference_file=%s' % options.ref_video, - '--test_file=%s' % options.test_video, - '--width=%d' % options.yuv_frame_width, - '--height=%d' % options.yuv_frame_height, - ] - if options.chartjson_result_file: - cmd.append('--chartjson_result_file=%s' % options.chartjson_result_file) - if options.aligned_output_file: - cmd.append('--aligned_output_file=%s' % options.aligned_output_file) - if yuv_directory: - cmd.append('--yuv_directory=%s' % yuv_directory) - frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(), - stdout=sys.stdout, stderr=sys.stderr) - frame_analyzer.wait() - if frame_analyzer.returncode != 0: - print('Failed to run frame analyzer.') - return frame_analyzer.returncode + """Run frame analyzer to compare the videos and print output.""" + cmd = [ + options.frame_analyzer, + '--label=%s' % options.label, + '--reference_file=%s' % options.ref_video, + '--test_file=%s' % options.test_video, + '--width=%d' % options.yuv_frame_width, + '--height=%d' % options.yuv_frame_height, + ] + if options.chartjson_result_file: + cmd.append('--chartjson_result_file=%s' % + options.chartjson_result_file) + if options.aligned_output_file: + cmd.append('--aligned_output_file=%s' % options.aligned_output_file) + if yuv_directory: + cmd.append('--yuv_directory=%s' % yuv_directory) + frame_analyzer = subprocess.Popen(cmd, + stdin=_DevNull(), + stdout=sys.stdout, + stderr=sys.stderr) + frame_analyzer.wait() + if frame_analyzer.returncode != 0: + print('Failed to run frame analyzer.') + return frame_analyzer.returncode def _RunVmaf(options, yuv_directory, logfile): - """ Run VMAF to compare videos and print output. + """ Run VMAF to compare videos and print output. The yuv_directory is assumed to have been populated with a reference and test video in .yuv format, with names according to the label. """ - cmd = [ - options.vmaf, - 'yuv420p', - str(options.yuv_frame_width), - str(options.yuv_frame_height), - os.path.join(yuv_directory, "ref.yuv"), - os.path.join(yuv_directory, "test.yuv"), - options.vmaf_model, - '--log', - logfile, - '--log-fmt', - 'json', - ] - if options.vmaf_phone_model: - cmd.append('--phone-model') - - vmaf = subprocess.Popen(cmd, stdin=_DevNull(), - stdout=sys.stdout, stderr=sys.stderr) - vmaf.wait() - if vmaf.returncode != 0: - print('Failed to run VMAF.') - return 1 - - # Read per-frame scores from VMAF output and print. - with open(logfile) as f: - vmaf_data = json.load(f) - vmaf_scores = [] - for frame in vmaf_data['frames']: - vmaf_scores.append(frame['metrics']['vmaf']) - print('RESULT VMAF: %s=' % options.label, vmaf_scores) - - return 0 + cmd = [ + options.vmaf, + 'yuv420p', + str(options.yuv_frame_width), + str(options.yuv_frame_height), + os.path.join(yuv_directory, "ref.yuv"), + os.path.join(yuv_directory, "test.yuv"), + options.vmaf_model, + '--log', + logfile, + '--log-fmt', + 'json', + ] + if options.vmaf_phone_model: + cmd.append('--phone-model') + + vmaf = subprocess.Popen(cmd, + stdin=_DevNull(), + stdout=sys.stdout, + stderr=sys.stderr) + vmaf.wait() + if vmaf.returncode != 0: + print('Failed to run VMAF.') + return 1 + + # Read per-frame scores from VMAF output and print. + with open(logfile) as f: + vmaf_data = json.load(f) + vmaf_scores = [] + for frame in vmaf_data['frames']: + vmaf_scores.append(frame['metrics']['vmaf']) + print('RESULT VMAF: %s=' % options.label, vmaf_scores) + + return 0 def main(): - """The main function. + """The main function. A simple invocation is: ./webrtc/rtc_tools/compare_videos.py @@ -161,27 +183,28 @@ def main(): Running vmaf requires the following arguments: --vmaf, --vmaf_model, --yuv_frame_width, --yuv_frame_height """ - options = _ParseArgs() + options = _ParseArgs() - if options.vmaf: - try: - # Directory to save temporary YUV files for VMAF in frame_analyzer. - yuv_directory = tempfile.mkdtemp() - _, vmaf_logfile = tempfile.mkstemp() + if options.vmaf: + try: + # Directory to save temporary YUV files for VMAF in frame_analyzer. + yuv_directory = tempfile.mkdtemp() + _, vmaf_logfile = tempfile.mkstemp() - # Run frame analyzer to compare the videos and print output. - if _RunFrameAnalyzer(options, yuv_directory=yuv_directory) != 0: - return 1 + # Run frame analyzer to compare the videos and print output. + if _RunFrameAnalyzer(options, yuv_directory=yuv_directory) != 0: + return 1 + + # Run VMAF for further video comparison and print output. + return _RunVmaf(options, yuv_directory, vmaf_logfile) + finally: + shutil.rmtree(yuv_directory) + os.remove(vmaf_logfile) + else: + return _RunFrameAnalyzer(options) - # Run VMAF for further video comparison and print output. - return _RunVmaf(options, yuv_directory, vmaf_logfile) - finally: - shutil.rmtree(yuv_directory) - os.remove(vmaf_logfile) - else: - return _RunFrameAnalyzer(options) + return 0 - return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/rtc_tools/converter/yuv_to_ivf_converter.cc b/rtc_tools/converter/yuv_to_ivf_converter.cc new file mode 100644 index 0000000000..e4a1e125f8 --- /dev/null +++ b/rtc_tools/converter/yuv_to_ivf_converter.cc @@ -0,0 +1,287 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "absl/debugging/failure_signal_handler.h" +#include "absl/debugging/symbolize.h" +#include "absl/flags/flag.h" +#include "absl/flags/parse.h" +#include "absl/strings/match.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "media/base/media_constants.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/utility/ivf_file_writer.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/file_wrapper.h" +#include "rtc_base/task_queue.h" +#include "test/testsupport/frame_reader.h" +#include "test/video_codec_settings.h" + +#if defined(WEBRTC_USE_H264) +#include "modules/video_coding/codecs/h264/include/h264.h" +#endif + +ABSL_FLAG(std::string, input, "", "Input YUV file to convert to IVF"); +ABSL_FLAG(int, width, 0, "Input frame width"); +ABSL_FLAG(int, height, 0, "Input frame height"); +ABSL_FLAG(std::string, codec, cricket::kVp8CodecName, "Codec to use"); +ABSL_FLAG(std::string, output, "", "Output IVF file"); + +namespace webrtc { +namespace test { +namespace { + +constexpr int kMaxFramerate = 30; +// We use very big value here to ensure that codec won't hit any limits. +constexpr uint32_t kBitrateBps = 100000000; +constexpr int kKeyFrameIntervalMs = 30000; +constexpr int kMaxFrameEncodeWaitTimeoutMs = 2000; +constexpr int kFrameLogInterval = 100; +static const VideoEncoder::Capabilities kCapabilities(false); + +class IvfFileWriterEncodedCallback : public EncodedImageCallback { + public: + IvfFileWriterEncodedCallback(const std::string& file_name, + VideoCodecType video_codec_type, + int expected_frames_count) + : file_writer_( + IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(file_name), 0)), + video_codec_type_(video_codec_type), + expected_frames_count_(expected_frames_count) { + RTC_CHECK(file_writer_.get()); + } + ~IvfFileWriterEncodedCallback() { RTC_CHECK(file_writer_->Close()); } + + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override { + RTC_CHECK(file_writer_->WriteFrame(encoded_image, video_codec_type_)); + + MutexLock lock(&lock_); + received_frames_count_++; + RTC_CHECK_LE(received_frames_count_, expected_frames_count_); + if (received_frames_count_ % kFrameLogInterval == 0) { + RTC_LOG(INFO) << received_frames_count_ << " out of " + << expected_frames_count_ << " frames written"; + } + next_frame_written_.Set(); + return Result(Result::Error::OK); + } + + void WaitNextFrameWritten(int timeout_ms) { + RTC_CHECK(next_frame_written_.Wait(timeout_ms)); + next_frame_written_.Reset(); + } + + private: + std::unique_ptr file_writer_; + const VideoCodecType video_codec_type_; + const int expected_frames_count_; + + Mutex lock_; + int received_frames_count_ RTC_GUARDED_BY(lock_) = 0; + rtc::Event next_frame_written_; +}; + +class Encoder { + public: + Encoder(int width, + int height, + int frames_count, + const std::string& output_file_name, + VideoCodecType video_codec_type, + std::unique_ptr video_encoder) + : video_encoder_(std::move(video_encoder)), + task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue( + "Encoder", + TaskQueueFactory::Priority::HIGH)) { + ivf_writer_callback_ = std::make_unique( + output_file_name, video_codec_type, frames_count); + + task_queue_.PostTask([width, height, video_codec_type, this]() { + VideoCodec codec_settings; + CodecSettings(video_codec_type, &codec_settings); + codec_settings.width = width; + codec_settings.height = height; + codec_settings.maxFramerate = kMaxFramerate; + codec_settings.startBitrate = kBitrateBps; + codec_settings.minBitrate = kBitrateBps; + codec_settings.maxBitrate = kBitrateBps; + switch (video_codec_type) { + case VideoCodecType::kVideoCodecVP8: { + VideoCodecVP8* vp8_settings = codec_settings.VP8(); + vp8_settings->frameDroppingOn = false; + vp8_settings->keyFrameInterval = kKeyFrameIntervalMs; + vp8_settings->denoisingOn = false; + } break; + case VideoCodecType::kVideoCodecVP9: { + VideoCodecVP9* vp9_settings = codec_settings.VP9(); + vp9_settings->denoisingOn = false; + vp9_settings->frameDroppingOn = false; + vp9_settings->keyFrameInterval = kKeyFrameIntervalMs; + vp9_settings->automaticResizeOn = false; + } break; + case VideoCodecType::kVideoCodecH264: { + VideoCodecH264* h264_settings = codec_settings.H264(); + h264_settings->frameDroppingOn = false; + h264_settings->keyFrameInterval = kKeyFrameIntervalMs; + } break; + default: + RTC_CHECK(false) << "Unsupported codec type"; + } + VideoBitrateAllocation bitrate_allocation; + bitrate_allocation.SetBitrate(0, 0, kBitrateBps); + + video_encoder_->RegisterEncodeCompleteCallback( + ivf_writer_callback_.get()); + RTC_CHECK_EQ( + WEBRTC_VIDEO_CODEC_OK, + video_encoder_->InitEncode( + &codec_settings, + VideoEncoder::Settings(kCapabilities, /*number_of_cores=*/4, + /*max_payload_size=*/0))); + video_encoder_->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, + static_cast(codec_settings.maxFramerate))); + }); + } + + void Encode(const VideoFrame& frame) { + task_queue_.PostTask([frame, this]() { + RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK, + video_encoder_->Encode(frame, nullptr)); + }); + } + + void WaitNextFrameWritten(int timeout_ms) { + ivf_writer_callback_->WaitNextFrameWritten(timeout_ms); + } + + private: + std::unique_ptr video_encoder_; + std::unique_ptr ivf_writer_callback_; + + rtc::TaskQueue task_queue_; +}; + +int GetFrameCount(std::string yuv_file_name, int width, int height) { + std::unique_ptr yuv_reader = + std::make_unique(std::move(yuv_file_name), width, + height); + RTC_CHECK(yuv_reader->Init()); + int frames_count = yuv_reader->NumberOfFrames(); + yuv_reader->Close(); + return frames_count; +} + +VideoFrame BuildFrame(FrameGeneratorInterface::VideoFrameData frame_data, + uint32_t rtp_timestamp) { + return VideoFrame::Builder() + .set_video_frame_buffer(frame_data.buffer) + .set_update_rect(frame_data.update_rect) + .set_timestamp_rtp(rtp_timestamp) + .build(); +} + +void WriteVideoFile(std::string input_file_name, + int width, + int height, + std::string output_file_name, + VideoCodecType video_codec_type, + std::unique_ptr video_encoder) { + int frames_count = GetFrameCount(input_file_name, width, height); + + std::unique_ptr frame_generator = + CreateFromYuvFileFrameGenerator({input_file_name}, width, height, + /*frame_repeat_count=*/1); + + Encoder encoder(width, height, frames_count, output_file_name, + video_codec_type, std::move(video_encoder)); + + uint32_t last_frame_timestamp = 0; + + for (int i = 0; i < frames_count; ++i) { + const uint32_t timestamp = + last_frame_timestamp + kVideoPayloadTypeFrequency / kMaxFramerate; + VideoFrame frame = BuildFrame(frame_generator->NextFrame(), timestamp); + + last_frame_timestamp = timestamp; + + encoder.Encode(frame); + encoder.WaitNextFrameWritten(kMaxFrameEncodeWaitTimeoutMs); + + if ((i + 1) % kFrameLogInterval == 0) { + RTC_LOG(INFO) << i + 1 << " out of " << frames_count + << " frames are sent for encoding"; + } + } + RTC_LOG(INFO) << "All " << frames_count << " frame are sent for encoding"; +} + +} // namespace +} // namespace test +} // namespace webrtc + +int main(int argc, char* argv[]) { + // Initialize the symbolizer to get a human-readable stack trace. + absl::InitializeSymbolizer(argv[0]); + + absl::FailureSignalHandlerOptions options; + absl::InstallFailureSignalHandler(options); + + absl::ParseCommandLine(argc, argv); + + std::string codec_name = absl::GetFlag(FLAGS_codec); + std::string input_file_name = absl::GetFlag(FLAGS_input); + std::string output_file_name = absl::GetFlag(FLAGS_output); + int width = absl::GetFlag(FLAGS_width); + int height = absl::GetFlag(FLAGS_height); + RTC_CHECK_NE(input_file_name, "") << "--input is required"; + RTC_CHECK_NE(output_file_name, "") << "--output is required"; + RTC_CHECK_GT(width, 0) << "width must be greater then 0"; + RTC_CHECK_GT(height, 0) << "height must be greater then 0"; + if (absl::EqualsIgnoreCase(codec_name, cricket::kVp8CodecName)) { + webrtc::test::WriteVideoFile( + input_file_name, width, height, output_file_name, + webrtc::VideoCodecType::kVideoCodecVP8, webrtc::VP8Encoder::Create()); + return 0; + } + if (absl::EqualsIgnoreCase(codec_name, cricket::kVp9CodecName)) { + webrtc::test::WriteVideoFile( + input_file_name, width, height, output_file_name, + webrtc::VideoCodecType::kVideoCodecVP9, webrtc::VP9Encoder::Create()); + return 0; + } +#if defined(WEBRTC_USE_H264) + if (absl::EqualsIgnoreCase(codec_name, cricket::kH264CodecName)) { + webrtc::test::WriteVideoFile( + input_file_name, width, height, output_file_name, + webrtc::VideoCodecType::kVideoCodecH264, + webrtc::H264Encoder::Create( + cricket::VideoCodec(cricket::kH264CodecName))); + return 0; + } +#endif + RTC_CHECK(false) << "Unsupported codec: " << codec_name; + return 1; +} diff --git a/rtc_tools/frame_analyzer/frame_analyzer.cc b/rtc_tools/frame_analyzer/frame_analyzer.cc index 1a22bd6214..70af305e61 100644 --- a/rtc_tools/frame_analyzer/frame_analyzer.cc +++ b/rtc_tools/frame_analyzer/frame_analyzer.cc @@ -164,7 +164,9 @@ int main(int argc, char* argv[]) { std::string chartjson_result_file = absl::GetFlag(FLAGS_chartjson_result_file); if (!chartjson_result_file.empty()) { - webrtc::test::WritePerfResults(chartjson_result_file); + if (!webrtc::test::WritePerfResults(chartjson_result_file)) { + return 1; + } } std::string aligned_output_file = absl::GetFlag(FLAGS_aligned_output_file); if (!aligned_output_file.empty()) { diff --git a/rtc_tools/loopback_test/OWNERS b/rtc_tools/loopback_test/OWNERS deleted file mode 100644 index 296f71fffc..0000000000 --- a/rtc_tools/loopback_test/OWNERS +++ /dev/null @@ -1 +0,0 @@ -andresp@webrtc.org diff --git a/rtc_tools/metrics_plotter.py b/rtc_tools/metrics_plotter.py index 54ccee9c01..3b746ad8ee 100644 --- a/rtc_tools/metrics_plotter.py +++ b/rtc_tools/metrics_plotter.py @@ -24,6 +24,7 @@ } """ +import argparse import fileinput import json import matplotlib.pyplot as plt @@ -38,36 +39,59 @@ def main(): - metrics = [] - for line in fileinput.input(): - line = line.strip() - if line.startswith(LINE_PREFIX): - line = line.replace(LINE_PREFIX, '') - metrics.append(json.loads(line)) - else: - print line - - for metric in metrics: - figure = plt.figure() - figure.canvas.set_window_title(metric[TRACE_NAME]) - - x_values = [] - y_values = [] - start_x = None - for sample in metric['samples']: - if start_x is None: - start_x = sample['time'] - # Time is us, we want to show it in seconds. - x_values.append((sample['time'] - start_x) / MICROSECONDS_IN_SECOND) - y_values.append(sample['value']) - - plt.ylabel('%s (%s)' % (metric[GRAPH_NAME], metric[UNITS])) - plt.xlabel('time (s)') - plt.title(metric[GRAPH_NAME]) - plt.plot(x_values, y_values) - - plt.show() + parser = argparse.ArgumentParser( + description='Plots metrics exported from WebRTC perf tests') + parser.add_argument( + '-m', + '--metrics', + type=str, + nargs='*', + help= + 'Metrics to plot. If nothing specified then will plot all available') + args = parser.parse_args() + + metrics_to_plot = set() + if args.metrics: + for metric in args.metrics: + metrics_to_plot.add(metric) + + metrics = [] + for line in fileinput.input('-'): + line = line.strip() + if line.startswith(LINE_PREFIX): + line = line.replace(LINE_PREFIX, '') + metrics.append(json.loads(line)) + else: + print line + + for metric in metrics: + if len(metrics_to_plot + ) > 0 and metric[GRAPH_NAME] not in metrics_to_plot: + continue + + figure = plt.figure() + figure.canvas.set_window_title(metric[TRACE_NAME]) + + x_values = [] + y_values = [] + start_x = None + samples = metric['samples'] + samples.sort(key=lambda x: x['time']) + for sample in samples: + if start_x is None: + start_x = sample['time'] + # Time is us, we want to show it in seconds. + x_values.append( + (sample['time'] - start_x) / MICROSECONDS_IN_SECOND) + y_values.append(sample['value']) + + plt.ylabel('%s (%s)' % (metric[GRAPH_NAME], metric[UNITS])) + plt.xlabel('time (s)') + plt.title(metric[GRAPH_NAME]) + plt.plot(x_values, y_values) + + plt.show() if __name__ == '__main__': - main() + main() diff --git a/rtc_tools/network_tester/BUILD.gn b/rtc_tools/network_tester/BUILD.gn index 8c3a0186d7..ff823806e2 100644 --- a/rtc_tools/network_tester/BUILD.gn +++ b/rtc_tools/network_tester/BUILD.gn @@ -48,10 +48,11 @@ if (rtc_enable_protobuf) { "../../rtc_base:protobuf_utils", "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_task_queue", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/third_party/sigslot", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } network_tester_unittests_resources = [ @@ -101,7 +102,7 @@ if (is_android) { testonly = true apk_name = "NetworkTesterMobile" android_manifest = "androidapp/AndroidManifest.xml" - min_sdk_version = 17 + min_sdk_version = 21 target_sdk_version = 24 deps = [ @@ -115,13 +116,14 @@ if (is_android) { rtc_android_library("NetworkTesterMobile_javalib") { testonly = true - android_manifest_for_lint = "androidapp/AndroidManifest.xml" + android_manifest = "androidapp/AndroidManifest.xml" sources = [ "androidapp/src/com/google/media/networktester/MainActivity.java", "androidapp/src/com/google/media/networktester/NetworkTester.java", ] + resources_package = "com.google.media.networktester" deps = [ ":NetworkTesterMobile_resources", "../../rtc_base:base_java", @@ -130,8 +132,24 @@ if (is_android) { android_resources("NetworkTesterMobile_resources") { testonly = true - resource_dirs = [ "androidapp/res" ] custom_package = "com.google.media.networktester" + sources = [ + "androidapp/res/layout/activity_main.xml", + "androidapp/res/mipmap-hdpi/ic_launcher.png", + "androidapp/res/mipmap-mdpi/ic_launcher.png", + "androidapp/res/mipmap-xhdpi/ic_launcher.png", + "androidapp/res/mipmap-xxhdpi/ic_launcher.png", + "androidapp/res/mipmap-xxxhdpi/ic_launcher.png", + "androidapp/res/values-v17/styles.xml", + "androidapp/res/values-w820dp/dimens.xml", + "androidapp/res/values/colors.xml", + "androidapp/res/values/dimens.xml", + "androidapp/res/values/strings.xml", + ] + + # Needed for Bazel converter. + resource_dirs = [ "androidapp/res" ] + assert(resource_dirs != []) # Mark as used. } rtc_shared_library("network_tester_so") { diff --git a/rtc_tools/network_tester/androidapp/AndroidManifest.xml b/rtc_tools/network_tester/androidapp/AndroidManifest.xml index f391063f6d..1ff519396b 100755 --- a/rtc_tools/network_tester/androidapp/AndroidManifest.xml +++ b/rtc_tools/network_tester/androidapp/AndroidManifest.xml @@ -4,7 +4,7 @@ - -
\ No newline at end of file + diff --git a/rtc_tools/network_tester/androidapp/src/com/google/media/networktester/MainActivity.java b/rtc_tools/network_tester/androidapp/src/com/google/media/networktester/MainActivity.java index 2a78d6b9f9..33f442fce2 100644 --- a/rtc_tools/network_tester/androidapp/src/com/google/media/networktester/MainActivity.java +++ b/rtc_tools/network_tester/androidapp/src/com/google/media/networktester/MainActivity.java @@ -11,12 +11,8 @@ package com.google.media.networktester; import android.app.Activity; -import android.content.Context; -import android.net.ConnectivityManager; -import android.net.NetworkInfo; import android.os.Bundle; import android.os.Handler; -import android.os.ParcelFileDescriptor; import android.view.View; import android.view.View.OnClickListener; import android.view.WindowManager; @@ -25,7 +21,7 @@ public class MainActivity extends Activity { Button startButton; Button stopButton; - NetworkTester networkTester = null; + NetworkTester networkTester; Handler mainThreadHandler; @Override diff --git a/rtc_tools/network_tester/create_network_tester_config.py b/rtc_tools/network_tester/create_network_tester_config.py index 57c1eb894f..de8d058f9d 100644 --- a/rtc_tools/network_tester/create_network_tester_config.py +++ b/rtc_tools/network_tester/create_network_tester_config.py @@ -10,21 +10,21 @@ import network_tester_config_pb2 -def AddConfig(all_configs, - packet_send_interval_ms, - packet_size, +def AddConfig(all_configs, packet_send_interval_ms, packet_size, execution_time_ms): - config = all_configs.configs.add() - config.packet_send_interval_ms = packet_send_interval_ms - config.packet_size = packet_size - config.execution_time_ms = execution_time_ms + config = all_configs.configs.add() + config.packet_send_interval_ms = packet_send_interval_ms + config.packet_size = packet_size + config.execution_time_ms = execution_time_ms + def main(): - all_configs = network_tester_config_pb2.NetworkTesterAllConfigs() - AddConfig(all_configs, 10, 50, 200) - AddConfig(all_configs, 10, 100, 200) - with open("network_tester_config.dat", 'wb') as f: - f.write(all_configs.SerializeToString()) + all_configs = network_tester_config_pb2.NetworkTesterAllConfigs() + AddConfig(all_configs, 10, 50, 200) + AddConfig(all_configs, 10, 100, 200) + with open("network_tester_config.dat", 'wb') as f: + f.write(all_configs.SerializeToString()) + if __name__ == "__main__": - main() + main() diff --git a/rtc_tools/network_tester/parse_packet_log.py b/rtc_tools/network_tester/parse_packet_log.py index 98fd0f6964..be86e0c88d 100755 --- a/rtc_tools/network_tester/parse_packet_log.py +++ b/rtc_tools/network_tester/parse_packet_log.py @@ -20,128 +20,131 @@ import network_tester_packet_pb2 + def GetSize(file_to_parse): - data = file_to_parse.read(1) - if data == '': - return 0 - return struct.unpack(' - self.window_time): - self.bytes = self.bytes - packet.packet_size - self.packet_window.remove(packet) - - def AddPacket(self, packet): - """This functions returns bits / second""" - self.send_interval = packet.arrival_timestamp - self.latest_packet_time - self.latest_packet_time = packet.arrival_timestamp - self.RemoveOldPackets() - self.packet_window.append(packet) - self.bytes = self.bytes + packet.packet_size - return self.bytes * 8 + def __init__(self): + self.packet_window = [] + self.window_time = 1000000 + self.bytes = 0 + self.latest_packet_time = 0 + self.send_interval = 0 + + def RemoveOldPackets(self): + for packet in self.packet_window: + if (self.latest_packet_time - packet.arrival_timestamp > + self.window_time): + self.bytes = self.bytes - packet.packet_size + self.packet_window.remove(packet) + + def AddPacket(self, packet): + """This functions returns bits / second""" + self.send_interval = packet.arrival_timestamp - self.latest_packet_time + self.latest_packet_time = packet.arrival_timestamp + self.RemoveOldPackets() + self.packet_window.append(packet) + self.bytes = self.bytes + packet.packet_size + return self.bytes * 8 def CreateReceiveBiratePlot(packets, plot): - bitrate = MovingAverageBitrate() - y = [bitrate.AddPacket(packet) for packet in packets] - plot.grid(True) - plot.set_title("Receive birate [bps]") - plot.plot(GetTimeAxis(packets), y) + bitrate = MovingAverageBitrate() + y = [bitrate.AddPacket(packet) for packet in packets] + plot.grid(True) + plot.set_title("Receive birate [bps]") + plot.plot(GetTimeAxis(packets), y) def CreatePacketlossPlot(packets, plot): - packets_look_up = {} - first_sequence_number = packets[0].sequence_number - last_sequence_number = packets[-1].sequence_number - for packet in packets: - packets_look_up[packet.sequence_number] = packet - y = [] - x = [] - first_arrival_time = 0 - last_arrival_time = 0 - last_arrival_time_diff = 0 - for sequence_number in range(first_sequence_number, last_sequence_number + 1): - if sequence_number in packets_look_up: - y.append(0) - if first_arrival_time == 0: - first_arrival_time = packets_look_up[sequence_number].arrival_timestamp - x_time = (packets_look_up[sequence_number].arrival_timestamp - - first_arrival_time) - if last_arrival_time != 0: - last_arrival_time_diff = x_time - last_arrival_time - last_arrival_time = x_time - x.append(x_time / 1000000.0) - else: - if last_arrival_time != 0 and last_arrival_time_diff != 0: - x.append((last_arrival_time + last_arrival_time_diff) / 1000000.0) - y.append(1) - plot.grid(True) - plot.set_title("Lost packets [0/1]") - plot.plot(x, y) + packets_look_up = {} + first_sequence_number = packets[0].sequence_number + last_sequence_number = packets[-1].sequence_number + for packet in packets: + packets_look_up[packet.sequence_number] = packet + y = [] + x = [] + first_arrival_time = 0 + last_arrival_time = 0 + last_arrival_time_diff = 0 + for sequence_number in range(first_sequence_number, + last_sequence_number + 1): + if sequence_number in packets_look_up: + y.append(0) + if first_arrival_time == 0: + first_arrival_time = packets_look_up[ + sequence_number].arrival_timestamp + x_time = (packets_look_up[sequence_number].arrival_timestamp - + first_arrival_time) + if last_arrival_time != 0: + last_arrival_time_diff = x_time - last_arrival_time + last_arrival_time = x_time + x.append(x_time / 1000000.0) + else: + if last_arrival_time != 0 and last_arrival_time_diff != 0: + x.append( + (last_arrival_time + last_arrival_time_diff) / 1000000.0) + y.append(1) + plot.grid(True) + plot.set_title("Lost packets [0/1]") + plot.plot(x, y) def main(): - parser = OptionParser() - parser.add_option("-f", - "--packet_log_file", - dest="packet_log_file", - help="packet_log file to parse") + parser = OptionParser() + parser.add_option("-f", + "--packet_log_file", + dest="packet_log_file", + help="packet_log file to parse") - options = parser.parse_args()[0] + options = parser.parse_args()[0] - packets = ParsePacketLog(options.packet_log_file) - f, plots = plt.subplots(3, sharex=True) - plt.xlabel('time [sec]') - CreateSendTimeDiffPlot(packets, plots[0]) - CreateReceiveBiratePlot(packets, plots[1]) - CreatePacketlossPlot(packets, plots[2]) - f.subplots_adjust(hspace=0.3) - plt.show() + packets = ParsePacketLog(options.packet_log_file) + f, plots = plt.subplots(3, sharex=True) + plt.xlabel('time [sec]') + CreateSendTimeDiffPlot(packets, plots[0]) + CreateReceiveBiratePlot(packets, plots[1]) + CreatePacketlossPlot(packets, plots[2]) + f.subplots_adjust(hspace=0.3) + plt.show() if __name__ == "__main__": - main() + main() diff --git a/rtc_tools/network_tester/test_controller.cc b/rtc_tools/network_tester/test_controller.cc index 49b470ce5f..85a5a57bc0 100644 --- a/rtc_tools/network_tester/test_controller.cc +++ b/rtc_tools/network_tester/test_controller.cc @@ -43,7 +43,7 @@ void TestController::SendConnectTo(const std::string& hostname, int port) { NetworkTesterPacket packet; packet.set_type(NetworkTesterPacket::HAND_SHAKING); SendData(packet, absl::nullopt); - rtc::CritScope scoped_lock(&local_test_done_lock_); + MutexLock scoped_lock(&local_test_done_lock_); local_test_done_ = false; remote_test_done_ = false; } @@ -71,13 +71,13 @@ void TestController::OnTestDone() { NetworkTesterPacket packet; packet.set_type(NetworkTesterPacket::TEST_DONE); SendData(packet, absl::nullopt); - rtc::CritScope scoped_lock(&local_test_done_lock_); + MutexLock scoped_lock(&local_test_done_lock_); local_test_done_ = true; } bool TestController::IsTestDone() { RTC_DCHECK_RUN_ON(&test_controller_thread_checker_); - rtc::CritScope scoped_lock(&local_test_done_lock_); + MutexLock scoped_lock(&local_test_done_lock_); return local_test_done_ && remote_test_done_; } @@ -100,7 +100,7 @@ void TestController::OnReadPacket(rtc::AsyncPacketSocket* socket, SendData(packet, absl::nullopt); packet_sender_.reset(new PacketSender(this, config_file_path_)); packet_sender_->StartSending(); - rtc::CritScope scoped_lock(&local_test_done_lock_); + MutexLock scoped_lock(&local_test_done_lock_); local_test_done_ = false; remote_test_done_ = false; break; @@ -108,7 +108,7 @@ void TestController::OnReadPacket(rtc::AsyncPacketSocket* socket, case NetworkTesterPacket::TEST_START: { packet_sender_.reset(new PacketSender(this, config_file_path_)); packet_sender_->StartSending(); - rtc::CritScope scoped_lock(&local_test_done_lock_); + MutexLock scoped_lock(&local_test_done_lock_); local_test_done_ = false; remote_test_done_ = false; break; diff --git a/rtc_tools/network_tester/test_controller.h b/rtc_tools/network_tester/test_controller.h index d04158d934..b73ac94329 100644 --- a/rtc_tools/network_tester/test_controller.h +++ b/rtc_tools/network_tester/test_controller.h @@ -22,9 +22,9 @@ #include "p2p/base/basic_packet_socket_factory.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread_annotations.h" @@ -35,7 +35,6 @@ #ifdef WEBRTC_NETWORK_TESTER_PROTO RTC_PUSH_IGNORING_WUNDEF() #include "rtc_tools/network_tester/network_tester_packet.pb.h" - RTC_POP_IGNORING_WUNDEF() using webrtc::network_tester::packet::NetworkTesterPacket; #else @@ -75,7 +74,7 @@ class TestController : public sigslot::has_slots<> { rtc::BasicPacketSocketFactory socket_factory_; const std::string config_file_path_; PacketLogger packet_logger_; - rtc::CriticalSection local_test_done_lock_; + Mutex local_test_done_lock_; bool local_test_done_ RTC_GUARDED_BY(local_test_done_lock_); bool remote_test_done_; std::array send_data_; diff --git a/rtc_tools/py_event_log_analyzer/misc.py b/rtc_tools/py_event_log_analyzer/misc.py index 629497c018..c21f0c466b 100644 --- a/rtc_tools/py_event_log_analyzer/misc.py +++ b/rtc_tools/py_event_log_analyzer/misc.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Utility functions for calculating statistics. """ @@ -15,18 +14,17 @@ def CountReordered(sequence_numbers): - """Returns number of reordered indices. + """Returns number of reordered indices. A reordered index is an index `i` for which sequence_numbers[i] >= sequence_numbers[i + 1] """ - return sum(1 for (s1, s2) in zip(sequence_numbers, - sequence_numbers[1:]) if - s1 >= s2) + return sum(1 for (s1, s2) in zip(sequence_numbers, sequence_numbers[1:]) + if s1 >= s2) def SsrcNormalizedSizeTable(data_points): - """Counts proportion of data for every SSRC. + """Counts proportion of data for every SSRC. Args: data_points: list of pb_parse.DataPoint @@ -37,14 +35,14 @@ def SsrcNormalizedSizeTable(data_points): SSRC `s` to the total size of all packets. """ - mapping = collections.defaultdict(int) - for point in data_points: - mapping[point.ssrc] += point.size - return NormalizeCounter(mapping) + mapping = collections.defaultdict(int) + for point in data_points: + mapping[point.ssrc] += point.size + return NormalizeCounter(mapping) def NormalizeCounter(counter): - """Returns a normalized version of the dictionary `counter`. + """Returns a normalized version of the dictionary `counter`. Does not modify `counter`. @@ -52,12 +50,12 @@ def NormalizeCounter(counter): A new dictionary, in which every value in `counter` has been divided by the total to sum up to 1. """ - total = sum(counter.values()) - return {key: counter[key] / total for key in counter} + total = sum(counter.values()) + return {key: counter[key] / total for key in counter} def Unwrap(data, mod): - """Returns `data` unwrapped modulo `mod`. Does not modify data. + """Returns `data` unwrapped modulo `mod`. Does not modify data. Adds integer multiples of mod to all elements of data except the first, such that all pairs of consecutive elements (a, b) satisfy @@ -66,22 +64,22 @@ def Unwrap(data, mod): E.g. Unwrap([0, 1, 2, 0, 1, 2, 7, 8], 3) -> [0, 1, 2, 3, 4, 5, 4, 5] """ - lst = data[:] - for i in range(1, len(data)): - lst[i] = lst[i - 1] + (lst[i] - lst[i - 1] + - mod // 2) % mod - (mod // 2) - return lst + lst = data[:] + for i in range(1, len(data)): + lst[i] = lst[i - 1] + (lst[i] - lst[i - 1] + mod // 2) % mod - (mod // + 2) + return lst def SsrcDirections(data_points): - ssrc_is_incoming = {} - for point in data_points: - ssrc_is_incoming[point.ssrc] = point.incoming - return ssrc_is_incoming + ssrc_is_incoming = {} + for point in data_points: + ssrc_is_incoming[point.ssrc] = point.incoming + return ssrc_is_incoming # Python 2/3-compatible input function if sys.version_info[0] <= 2: - get_input = raw_input # pylint: disable=invalid-name + get_input = raw_input # pylint: disable=invalid-name else: - get_input = input # pylint: disable=invalid-name + get_input = input # pylint: disable=invalid-name diff --git a/rtc_tools/py_event_log_analyzer/misc_test.py b/rtc_tools/py_event_log_analyzer/misc_test.py index 33449a7076..e855dc7d11 100755 --- a/rtc_tools/py_event_log_analyzer/misc_test.py +++ b/rtc_tools/py_event_log_analyzer/misc_test.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Run the tests with python misc_test.py @@ -22,51 +21,52 @@ class TestMisc(unittest.TestCase): - def testUnwrapMod3(self): - data = [0, 1, 2, 0, -1, -2, -3, -4] - unwrapped_3 = misc.Unwrap(data, 3) - self.assertEqual([0, 1, 2, 3, 2, 1, 0, -1], unwrapped_3) + def testUnwrapMod3(self): + data = [0, 1, 2, 0, -1, -2, -3, -4] + unwrapped_3 = misc.Unwrap(data, 3) + self.assertEqual([0, 1, 2, 3, 2, 1, 0, -1], unwrapped_3) + + def testUnwrapMod4(self): + data = [0, 1, 2, 0, -1, -2, -3, -4] + unwrapped_4 = misc.Unwrap(data, 4) + self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], unwrapped_4) - def testUnwrapMod4(self): - data = [0, 1, 2, 0, -1, -2, -3, -4] - unwrapped_4 = misc.Unwrap(data, 4) - self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], unwrapped_4) + def testDataShouldNotChangeAfterUnwrap(self): + data = [0, 1, 2, 0, -1, -2, -3, -4] + _ = misc.Unwrap(data, 4) - def testDataShouldNotChangeAfterUnwrap(self): - data = [0, 1, 2, 0, -1, -2, -3, -4] - _ = misc.Unwrap(data, 4) + self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], data) - self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], data) + def testRandomlyMultiplesOfModAdded(self): + # `unwrap` definition says only multiples of mod are added. + random_data = [random.randint(0, 9) for _ in range(100)] - def testRandomlyMultiplesOfModAdded(self): - # `unwrap` definition says only multiples of mod are added. - random_data = [random.randint(0, 9) for _ in range(100)] + for mod in range(1, 100): + random_data_unwrapped_mod = misc.Unwrap(random_data, mod) - for mod in range(1, 100): - random_data_unwrapped_mod = misc.Unwrap(random_data, mod) + for (old_a, a) in zip(random_data, random_data_unwrapped_mod): + self.assertEqual((old_a - a) % mod, 0) - for (old_a, a) in zip(random_data, random_data_unwrapped_mod): - self.assertEqual((old_a - a) % mod, 0) + def testRandomlyAgainstInequalityDefinition(self): + # Data has to satisfy -mod/2 <= difference < mod/2 for every + # difference between consecutive values after unwrap. + random_data = [random.randint(0, 9) for _ in range(100)] - def testRandomlyAgainstInequalityDefinition(self): - # Data has to satisfy -mod/2 <= difference < mod/2 for every - # difference between consecutive values after unwrap. - random_data = [random.randint(0, 9) for _ in range(100)] + for mod in range(1, 100): + random_data_unwrapped_mod = misc.Unwrap(random_data, mod) - for mod in range(1, 100): - random_data_unwrapped_mod = misc.Unwrap(random_data, mod) + for (a, b) in zip(random_data_unwrapped_mod, + random_data_unwrapped_mod[1:]): + self.assertTrue(-mod / 2 <= b - a < mod / 2) - for (a, b) in zip(random_data_unwrapped_mod, - random_data_unwrapped_mod[1:]): - self.assertTrue(-mod / 2 <= b - a < mod / 2) + def testRandomlyDataShouldNotChangeAfterUnwrap(self): + random_data = [random.randint(0, 9) for _ in range(100)] + random_data_copy = random_data[:] + for mod in range(1, 100): + _ = misc.Unwrap(random_data, mod) - def testRandomlyDataShouldNotChangeAfterUnwrap(self): - random_data = [random.randint(0, 9) for _ in range(100)] - random_data_copy = random_data[:] - for mod in range(1, 100): - _ = misc.Unwrap(random_data, mod) + self.assertEqual(random_data, random_data_copy) - self.assertEqual(random_data, random_data_copy) if __name__ == "__main__": - unittest.main() + unittest.main() diff --git a/rtc_tools/py_event_log_analyzer/pb_parse.py b/rtc_tools/py_event_log_analyzer/pb_parse.py index bc835ae023..23e6ae4487 100644 --- a/rtc_tools/py_event_log_analyzer/pb_parse.py +++ b/rtc_tools/py_event_log_analyzer/pb_parse.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Parses protobuf RTC dumps.""" from __future__ import division @@ -14,26 +13,26 @@ class DataPoint(object): - """Simple container class for RTP events.""" + """Simple container class for RTP events.""" - def __init__(self, rtp_header_str, packet_size, - arrival_timestamp_us, incoming): - """Builds a data point by parsing an RTP header, size and arrival time. + def __init__(self, rtp_header_str, packet_size, arrival_timestamp_us, + incoming): + """Builds a data point by parsing an RTP header, size and arrival time. RTP header structure is defined in RFC 3550 section 5.1. """ - self.size = packet_size - self.arrival_timestamp_ms = arrival_timestamp_us / 1000 - self.incoming = incoming - header = struct.unpack_from("!HHII", rtp_header_str, 0) - (first2header_bytes, self.sequence_number, self.timestamp, - self.ssrc) = header - self.payload_type = first2header_bytes & 0b01111111 - self.marker_bit = (first2header_bytes & 0b10000000) >> 7 + self.size = packet_size + self.arrival_timestamp_ms = arrival_timestamp_us / 1000 + self.incoming = incoming + header = struct.unpack_from("!HHII", rtp_header_str, 0) + (first2header_bytes, self.sequence_number, self.timestamp, + self.ssrc) = header + self.payload_type = first2header_bytes & 0b01111111 + self.marker_bit = (first2header_bytes & 0b10000000) >> 7 def ParseProtobuf(file_path): - """Parses RTC event log from protobuf file. + """Parses RTC event log from protobuf file. Args: file_path: path to protobuf file of RTC event stream @@ -41,12 +40,12 @@ def ParseProtobuf(file_path): Returns: all RTP packet events from the event stream as a list of DataPoints """ - event_stream = rtc_pb.EventStream() - with open(file_path, "rb") as f: - event_stream.ParseFromString(f.read()) - - return [DataPoint(event.rtp_packet.header, - event.rtp_packet.packet_length, - event.timestamp_us, event.rtp_packet.incoming) - for event in event_stream.stream - if event.HasField("rtp_packet")] + event_stream = rtc_pb.EventStream() + with open(file_path, "rb") as f: + event_stream.ParseFromString(f.read()) + + return [ + DataPoint(event.rtp_packet.header, event.rtp_packet.packet_length, + event.timestamp_us, event.rtp_packet.incoming) + for event in event_stream.stream if event.HasField("rtp_packet") + ] diff --git a/rtc_tools/py_event_log_analyzer/rtp_analyzer.py b/rtc_tools/py_event_log_analyzer/rtp_analyzer.py index ebf4d7fb2a..53f413552a 100644 --- a/rtc_tools/py_event_log_analyzer/rtp_analyzer.py +++ b/rtc_tools/py_event_log_analyzer/rtp_analyzer.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Displays statistics and plots graphs from RTC protobuf dump.""" from __future__ import division @@ -24,13 +23,13 @@ class RTPStatistics(object): - """Has methods for calculating and plotting RTP stream statistics.""" + """Has methods for calculating and plotting RTP stream statistics.""" - BANDWIDTH_SMOOTHING_WINDOW_SIZE = 10 - PLOT_RESOLUTION_MS = 50 + BANDWIDTH_SMOOTHING_WINDOW_SIZE = 10 + PLOT_RESOLUTION_MS = 50 - def __init__(self, data_points): - """Initializes object with data_points and computes simple statistics. + def __init__(self, data_points): + """Initializes object with data_points and computes simple statistics. Computes percentages of number of packets and packet sizes by SSRC. @@ -41,238 +40,245 @@ def __init__(self, data_points): """ - self.data_points = data_points - self.ssrc_frequencies = misc.NormalizeCounter( - collections.Counter([pt.ssrc for pt in self.data_points])) - self.ssrc_size_table = misc.SsrcNormalizedSizeTable(self.data_points) - self.bandwidth_kbps = None - self.smooth_bw_kbps = None - - def PrintHeaderStatistics(self): - print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format( - "SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC")) - for point in self.data_points: - print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format( - point.sequence_number, point.timestamp, - int(point.arrival_timestamp_ms), point.size, point.payload_type, - point.marker_bit, "0x{:x}".format(point.ssrc))) - - def PrintSsrcInfo(self, ssrc_id, ssrc): - """Prints packet and size statistics for a given SSRC. + self.data_points = data_points + self.ssrc_frequencies = misc.NormalizeCounter( + collections.Counter([pt.ssrc for pt in self.data_points])) + self.ssrc_size_table = misc.SsrcNormalizedSizeTable(self.data_points) + self.bandwidth_kbps = None + self.smooth_bw_kbps = None + + def PrintHeaderStatistics(self): + print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format( + "SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC")) + for point in self.data_points: + print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format( + point.sequence_number, point.timestamp, + int(point.arrival_timestamp_ms), point.size, + point.payload_type, point.marker_bit, + "0x{:x}".format(point.ssrc))) + + def PrintSsrcInfo(self, ssrc_id, ssrc): + """Prints packet and size statistics for a given SSRC. Args: ssrc_id: textual identifier of SSRC printed beside statistics for it. ssrc: SSRC by which to filter data and display statistics """ - filtered_ssrc = [point for point in self.data_points if point.ssrc - == ssrc] - payloads = misc.NormalizeCounter( - collections.Counter([point.payload_type for point in - filtered_ssrc])) - - payload_info = "payload type(s): {}".format( - ", ".join(str(payload) for payload in payloads)) - print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format( - ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100, - self.ssrc_size_table[ssrc] * 100)) - print(" packet sizes:") - (bin_counts, bin_bounds) = numpy.histogram([point.size for point in - filtered_ssrc], bins=5, - density=False) - bin_proportions = bin_counts / sum(bin_counts) - print("\n".join([ - " {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i], bin_bounds[i + 1], - bin_proportions[i] * 100) - for i in range(len(bin_proportions)) - ])) - - def ChooseSsrc(self): - """Queries user for SSRC.""" - - if len(self.ssrc_frequencies) == 1: - chosen_ssrc = self.ssrc_frequencies.keys()[0] - self.PrintSsrcInfo("", chosen_ssrc) - return chosen_ssrc - - ssrc_is_incoming = misc.SsrcDirections(self.data_points) - incoming = [ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc]] - outgoing = [ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc]] - - print("\nIncoming:\n") - for (i, ssrc) in enumerate(incoming): - self.PrintSsrcInfo(i, ssrc) - - print("\nOutgoing:\n") - for (i, ssrc) in enumerate(outgoing): - self.PrintSsrcInfo(i + len(incoming), ssrc) - - while True: - chosen_index = int(misc.get_input("choose one> ")) - if 0 <= chosen_index < len(self.ssrc_frequencies): - return (incoming + outgoing)[chosen_index] - else: - print("Invalid index!") - - def FilterSsrc(self, chosen_ssrc): - """Filters and wraps data points. + filtered_ssrc = [ + point for point in self.data_points if point.ssrc == ssrc + ] + payloads = misc.NormalizeCounter( + collections.Counter( + [point.payload_type for point in filtered_ssrc])) + + payload_info = "payload type(s): {}".format(", ".join( + str(payload) for payload in payloads)) + print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format( + ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100, + self.ssrc_size_table[ssrc] * 100)) + print(" packet sizes:") + (bin_counts, + bin_bounds) = numpy.histogram([point.size for point in filtered_ssrc], + bins=5, + density=False) + bin_proportions = bin_counts / sum(bin_counts) + print("\n".join([ + " {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i], + bin_bounds[i + 1], + bin_proportions[i] * 100) + for i in range(len(bin_proportions)) + ])) + + def ChooseSsrc(self): + """Queries user for SSRC.""" + + if len(self.ssrc_frequencies) == 1: + chosen_ssrc = self.ssrc_frequencies.keys()[0] + self.PrintSsrcInfo("", chosen_ssrc) + return chosen_ssrc + + ssrc_is_incoming = misc.SsrcDirections(self.data_points) + incoming = [ + ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc] + ] + outgoing = [ + ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc] + ] + + print("\nIncoming:\n") + for (i, ssrc) in enumerate(incoming): + self.PrintSsrcInfo(i, ssrc) + + print("\nOutgoing:\n") + for (i, ssrc) in enumerate(outgoing): + self.PrintSsrcInfo(i + len(incoming), ssrc) + + while True: + chosen_index = int(misc.get_input("choose one> ")) + if 0 <= chosen_index < len(self.ssrc_frequencies): + return (incoming + outgoing)[chosen_index] + else: + print("Invalid index!") + + def FilterSsrc(self, chosen_ssrc): + """Filters and wraps data points. Removes data points with `ssrc != chosen_ssrc`. Unwraps sequence numbers and timestamps for the chosen selection. """ - self.data_points = [point for point in self.data_points if - point.ssrc == chosen_ssrc] - unwrapped_sequence_numbers = misc.Unwrap( - [point.sequence_number for point in self.data_points], 2**16 - 1) - for (data_point, sequence_number) in zip(self.data_points, - unwrapped_sequence_numbers): - data_point.sequence_number = sequence_number - - unwrapped_timestamps = misc.Unwrap([point.timestamp for point in - self.data_points], 2**32 - 1) - - for (data_point, timestamp) in zip(self.data_points, - unwrapped_timestamps): - data_point.timestamp = timestamp - - def PrintSequenceNumberStatistics(self): - seq_no_set = set(point.sequence_number for point in - self.data_points) - missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + ( - 1 - len(seq_no_set)) - print("Missing sequence numbers: {} out of {} ({:.2f}%)".format( - missing_sequence_numbers, - len(seq_no_set), - 100 * missing_sequence_numbers / len(seq_no_set) - )) - print("Duplicated packets: {}".format(len(self.data_points) - - len(seq_no_set))) - print("Reordered packets: {}".format( - misc.CountReordered([point.sequence_number for point in - self.data_points]))) - - def EstimateFrequency(self, always_query_sample_rate): - """Estimates frequency and updates data. + self.data_points = [ + point for point in self.data_points if point.ssrc == chosen_ssrc + ] + unwrapped_sequence_numbers = misc.Unwrap( + [point.sequence_number for point in self.data_points], 2**16 - 1) + for (data_point, sequence_number) in zip(self.data_points, + unwrapped_sequence_numbers): + data_point.sequence_number = sequence_number + + unwrapped_timestamps = misc.Unwrap( + [point.timestamp for point in self.data_points], 2**32 - 1) + + for (data_point, timestamp) in zip(self.data_points, + unwrapped_timestamps): + data_point.timestamp = timestamp + + def PrintSequenceNumberStatistics(self): + seq_no_set = set(point.sequence_number for point in self.data_points) + missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + ( + 1 - len(seq_no_set)) + print("Missing sequence numbers: {} out of {} ({:.2f}%)".format( + missing_sequence_numbers, len(seq_no_set), + 100 * missing_sequence_numbers / len(seq_no_set))) + print("Duplicated packets: {}".format( + len(self.data_points) - len(seq_no_set))) + print("Reordered packets: {}".format( + misc.CountReordered( + [point.sequence_number for point in self.data_points]))) + + def EstimateFrequency(self, always_query_sample_rate): + """Estimates frequency and updates data. Guesses the most probable frequency by looking at changes in timestamps (RFC 3550 section 5.1), calculates clock drifts and sending time of packets. Updates `self.data_points` with changes in delay and send time. """ - delta_timestamp = (self.data_points[-1].timestamp - - self.data_points[0].timestamp) - delta_arr_timestamp = float((self.data_points[-1].arrival_timestamp_ms - - self.data_points[0].arrival_timestamp_ms)) - freq_est = delta_timestamp / delta_arr_timestamp - - freq_vec = [8, 16, 32, 48, 90] - freq = None - for f in freq_vec: - if abs((freq_est - f) / f) < 0.05: - freq = f - - print("Estimated frequency: {:.3f}kHz".format(freq_est)) - if freq is None or always_query_sample_rate: - if not always_query_sample_rate: - print ("Frequency could not be guessed.", end=" ") - freq = int(misc.get_input("Input frequency (in kHz)> ")) - else: - print("Guessed frequency: {}kHz".format(freq)) - - for point in self.data_points: - point.real_send_time_ms = (point.timestamp - - self.data_points[0].timestamp) / freq - point.delay = point.arrival_timestamp_ms - point.real_send_time_ms - - def PrintDurationStatistics(self): - """Prints delay, clock drift and bitrate statistics.""" - - min_delay = min(point.delay for point in self.data_points) - - for point in self.data_points: - point.absdelay = point.delay - min_delay - - stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000 - print("Stream duration at sender: {:.1f} seconds".format( - stream_duration_sender - )) - - arrival_timestamps_ms = [point.arrival_timestamp_ms for point in - self.data_points] - stream_duration_receiver = (max(arrival_timestamps_ms) - - min(arrival_timestamps_ms)) / 1000 - print("Stream duration at receiver: {:.1f} seconds".format( - stream_duration_receiver - )) - - print("Clock drift: {:.2f}%".format( - 100 * (stream_duration_receiver / stream_duration_sender - 1) - )) - - total_size = sum(point.size for point in self.data_points) * 8 / 1000 - print("Send average bitrate: {:.2f} kbps".format( - total_size / stream_duration_sender)) - - print("Receive average bitrate: {:.2f} kbps".format( - total_size / stream_duration_receiver)) - - def RemoveReordered(self): - last = self.data_points[0] - data_points_ordered = [last] - for point in self.data_points[1:]: - if point.sequence_number > last.sequence_number and ( - point.real_send_time_ms > last.real_send_time_ms): - data_points_ordered.append(point) - last = point - self.data_points = data_points_ordered - - def ComputeBandwidth(self): - """Computes bandwidth averaged over several consecutive packets. + delta_timestamp = (self.data_points[-1].timestamp - + self.data_points[0].timestamp) + delta_arr_timestamp = float( + (self.data_points[-1].arrival_timestamp_ms - + self.data_points[0].arrival_timestamp_ms)) + freq_est = delta_timestamp / delta_arr_timestamp + + freq_vec = [8, 16, 32, 48, 90] + freq = None + for f in freq_vec: + if abs((freq_est - f) / f) < 0.05: + freq = f + + print("Estimated frequency: {:.3f}kHz".format(freq_est)) + if freq is None or always_query_sample_rate: + if not always_query_sample_rate: + print("Frequency could not be guessed.", end=" ") + freq = int(misc.get_input("Input frequency (in kHz)> ")) + else: + print("Guessed frequency: {}kHz".format(freq)) + + for point in self.data_points: + point.real_send_time_ms = (point.timestamp - + self.data_points[0].timestamp) / freq + point.delay = point.arrival_timestamp_ms - point.real_send_time_ms + + def PrintDurationStatistics(self): + """Prints delay, clock drift and bitrate statistics.""" + + min_delay = min(point.delay for point in self.data_points) + + for point in self.data_points: + point.absdelay = point.delay - min_delay + + stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000 + print("Stream duration at sender: {:.1f} seconds".format( + stream_duration_sender)) + + arrival_timestamps_ms = [ + point.arrival_timestamp_ms for point in self.data_points + ] + stream_duration_receiver = (max(arrival_timestamps_ms) - + min(arrival_timestamps_ms)) / 1000 + print("Stream duration at receiver: {:.1f} seconds".format( + stream_duration_receiver)) + + print("Clock drift: {:.2f}%".format( + 100 * (stream_duration_receiver / stream_duration_sender - 1))) + + total_size = sum(point.size for point in self.data_points) * 8 / 1000 + print("Send average bitrate: {:.2f} kbps".format( + total_size / stream_duration_sender)) + + print("Receive average bitrate: {:.2f} kbps".format( + total_size / stream_duration_receiver)) + + def RemoveReordered(self): + last = self.data_points[0] + data_points_ordered = [last] + for point in self.data_points[1:]: + if point.sequence_number > last.sequence_number and ( + point.real_send_time_ms > last.real_send_time_ms): + data_points_ordered.append(point) + last = point + self.data_points = data_points_ordered + + def ComputeBandwidth(self): + """Computes bandwidth averaged over several consecutive packets. The number of consecutive packets used in the average is BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with numpy.correlate. """ - start_ms = self.data_points[0].real_send_time_ms - stop_ms = self.data_points[-1].real_send_time_ms - (self.bandwidth_kbps, _) = numpy.histogram( - [point.real_send_time_ms for point in self.data_points], - bins=numpy.arange(start_ms, stop_ms, - RTPStatistics.PLOT_RESOLUTION_MS), - weights=[point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS - for point in self.data_points] - ) - correlate_filter = (numpy.ones( - RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) / - RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) - self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter) - - def PlotStatistics(self): - """Plots changes in delay and average bandwidth.""" - - start_ms = self.data_points[0].real_send_time_ms - stop_ms = self.data_points[-1].real_send_time_ms - time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000, - RTPStatistics.PLOT_RESOLUTION_MS / 1000) - - delay = CalculateDelay(start_ms, stop_ms, - RTPStatistics.PLOT_RESOLUTION_MS, - self.data_points) - - plt.figure(1) - plt.plot(time_axis, delay[:len(time_axis)]) - plt.xlabel("Send time [s]") - plt.ylabel("Relative transport delay [ms]") - - plt.figure(2) - plt.plot(time_axis[:len(self.smooth_bw_kbps)], self.smooth_bw_kbps) - plt.xlabel("Send time [s]") - plt.ylabel("Bandwidth [kbps]") - - plt.show() + start_ms = self.data_points[0].real_send_time_ms + stop_ms = self.data_points[-1].real_send_time_ms + (self.bandwidth_kbps, _) = numpy.histogram( + [point.real_send_time_ms for point in self.data_points], + bins=numpy.arange(start_ms, stop_ms, + RTPStatistics.PLOT_RESOLUTION_MS), + weights=[ + point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS + for point in self.data_points + ]) + correlate_filter = ( + numpy.ones(RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) / + RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) + self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, + correlate_filter) + + def PlotStatistics(self): + """Plots changes in delay and average bandwidth.""" + + start_ms = self.data_points[0].real_send_time_ms + stop_ms = self.data_points[-1].real_send_time_ms + time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000, + RTPStatistics.PLOT_RESOLUTION_MS / 1000) + + delay = CalculateDelay(start_ms, stop_ms, + RTPStatistics.PLOT_RESOLUTION_MS, + self.data_points) + + plt.figure(1) + plt.plot(time_axis, delay[:len(time_axis)]) + plt.xlabel("Send time [s]") + plt.ylabel("Relative transport delay [ms]") + + plt.figure(2) + plt.plot(time_axis[:len(self.smooth_bw_kbps)], self.smooth_bw_kbps) + plt.xlabel("Send time [s]") + plt.ylabel("Bandwidth [kbps]") + + plt.show() def CalculateDelay(start, stop, step, points): - """Quantizes the time coordinates for the delay. + """Quantizes the time coordinates for the delay. Quantizes points by rounding the timestamps downwards to the nearest point in the time sequence start, start+step, start+2*step... Takes @@ -280,61 +286,67 @@ def CalculateDelay(start, stop, step, points): masked array, in which time points with no value are masked. """ - grouped_delays = [[] for _ in numpy.arange(start, stop + step, step)] - rounded_value_index = lambda x: int((x - start) / step) - for point in points: - grouped_delays[rounded_value_index(point.real_send_time_ms) - ].append(point.absdelay) - regularized_delays = [numpy.average(arr) if arr else -1 for arr in - grouped_delays] - return numpy.ma.masked_values(regularized_delays, -1) + grouped_delays = [[] for _ in numpy.arange(start, stop + step, step)] + rounded_value_index = lambda x: int((x - start) / step) + for point in points: + grouped_delays[rounded_value_index(point.real_send_time_ms)].append( + point.absdelay) + regularized_delays = [ + numpy.average(arr) if arr else -1 for arr in grouped_delays + ] + return numpy.ma.masked_values(regularized_delays, -1) def main(): - usage = "Usage: %prog [options] " - parser = optparse.OptionParser(usage=usage) - parser.add_option("--dump_header_to_stdout", - default=False, action="store_true", - help="print header info to stdout; similar to rtp_analyze") - parser.add_option("--query_sample_rate", - default=False, action="store_true", - help="always query user for real sample rate") + usage = "Usage: %prog [options] " + parser = optparse.OptionParser(usage=usage) + parser.add_option( + "--dump_header_to_stdout", + default=False, + action="store_true", + help="print header info to stdout; similar to rtp_analyze") + parser.add_option("--query_sample_rate", + default=False, + action="store_true", + help="always query user for real sample rate") + + parser.add_option("--working_directory", + default=None, + action="store", + help="directory in which to search for relative paths") - parser.add_option("--working_directory", - default=None, action="store", - help="directory in which to search for relative paths") + (options, args) = parser.parse_args() - (options, args) = parser.parse_args() + if len(args) < 1: + parser.print_help() + sys.exit(0) - if len(args) < 1: - parser.print_help() - sys.exit(0) + input_file = args[0] - input_file = args[0] + if options.working_directory and not os.path.isabs(input_file): + input_file = os.path.join(options.working_directory, input_file) - if options.working_directory and not os.path.isabs(input_file): - input_file = os.path.join(options.working_directory, input_file) + data_points = pb_parse.ParseProtobuf(input_file) + rtp_stats = RTPStatistics(data_points) - data_points = pb_parse.ParseProtobuf(input_file) - rtp_stats = RTPStatistics(data_points) + if options.dump_header_to_stdout: + print("Printing header info to stdout.", file=sys.stderr) + rtp_stats.PrintHeaderStatistics() + sys.exit(0) - if options.dump_header_to_stdout: - print("Printing header info to stdout.", file=sys.stderr) - rtp_stats.PrintHeaderStatistics() - sys.exit(0) + chosen_ssrc = rtp_stats.ChooseSsrc() + print("Chosen SSRC: 0X{:X}".format(chosen_ssrc)) - chosen_ssrc = rtp_stats.ChooseSsrc() - print("Chosen SSRC: 0X{:X}".format(chosen_ssrc)) + rtp_stats.FilterSsrc(chosen_ssrc) - rtp_stats.FilterSsrc(chosen_ssrc) + print("Statistics:") + rtp_stats.PrintSequenceNumberStatistics() + rtp_stats.EstimateFrequency(options.query_sample_rate) + rtp_stats.PrintDurationStatistics() + rtp_stats.RemoveReordered() + rtp_stats.ComputeBandwidth() + rtp_stats.PlotStatistics() - print("Statistics:") - rtp_stats.PrintSequenceNumberStatistics() - rtp_stats.EstimateFrequency(options.query_sample_rate) - rtp_stats.PrintDurationStatistics() - rtp_stats.RemoveReordered() - rtp_stats.ComputeBandwidth() - rtp_stats.PlotStatistics() if __name__ == "__main__": - main() + main() diff --git a/rtc_tools/py_event_log_analyzer/rtp_analyzer_test.py b/rtc_tools/py_event_log_analyzer/rtp_analyzer_test.py index dc6cb22509..bc93b6912d 100755 --- a/rtc_tools/py_event_log_analyzer/rtp_analyzer_test.py +++ b/rtc_tools/py_event_log_analyzer/rtp_analyzer_test.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Run the tests with python rtp_analyzer_test.py @@ -19,43 +18,43 @@ MISSING_NUMPY = False # pylint: disable=invalid-name try: - import numpy - import rtp_analyzer + import numpy + import rtp_analyzer except ImportError: - MISSING_NUMPY = True + MISSING_NUMPY = True FakePoint = collections.namedtuple("FakePoint", ["real_send_time_ms", "absdelay"]) class TestDelay(unittest.TestCase): - def AssertMaskEqual(self, masked_array, data, mask): - self.assertEqual(list(masked_array.data), data) + def AssertMaskEqual(self, masked_array, data, mask): + self.assertEqual(list(masked_array.data), data) - if isinstance(masked_array.mask, numpy.bool_): - array_mask = masked_array.mask - else: - array_mask = list(masked_array.mask) - self.assertEqual(array_mask, mask) + if isinstance(masked_array.mask, numpy.bool_): + array_mask = masked_array.mask + else: + array_mask = list(masked_array.mask) + self.assertEqual(array_mask, mask) - def testCalculateDelaySimple(self): - points = [FakePoint(0, 0), FakePoint(1, 0)] - mask = rtp_analyzer.CalculateDelay(0, 1, 1, points) - self.AssertMaskEqual(mask, [0, 0], False) + def testCalculateDelaySimple(self): + points = [FakePoint(0, 0), FakePoint(1, 0)] + mask = rtp_analyzer.CalculateDelay(0, 1, 1, points) + self.AssertMaskEqual(mask, [0, 0], False) - def testCalculateDelayMissing(self): - points = [FakePoint(0, 0), FakePoint(2, 0)] - mask = rtp_analyzer.CalculateDelay(0, 2, 1, points) - self.AssertMaskEqual(mask, [0, -1, 0], [False, True, False]) + def testCalculateDelayMissing(self): + points = [FakePoint(0, 0), FakePoint(2, 0)] + mask = rtp_analyzer.CalculateDelay(0, 2, 1, points) + self.AssertMaskEqual(mask, [0, -1, 0], [False, True, False]) - def testCalculateDelayBorders(self): - points = [FakePoint(0, 0), FakePoint(2, 0)] - mask = rtp_analyzer.CalculateDelay(0, 3, 2, points) - self.AssertMaskEqual(mask, [0, 0, -1], [False, False, True]) + def testCalculateDelayBorders(self): + points = [FakePoint(0, 0), FakePoint(2, 0)] + mask = rtp_analyzer.CalculateDelay(0, 3, 2, points) + self.AssertMaskEqual(mask, [0, 0, -1], [False, False, True]) if __name__ == "__main__": - if MISSING_NUMPY: - print "Missing numpy, skipping test." - else: - unittest.main() + if MISSING_NUMPY: + print "Missing numpy, skipping test." + else: + unittest.main() diff --git a/rtc_tools/rtc_event_log_visualizer/alerts.cc b/rtc_tools/rtc_event_log_visualizer/alerts.cc new file mode 100644 index 0000000000..2d1868fa28 --- /dev/null +++ b/rtc_tools/rtc_event_log_visualizer/alerts.cc @@ -0,0 +1,235 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_tools/rtc_event_log_visualizer/alerts.h" + +#include + +#include +#include +#include +#include + +#include "logging/rtc_event_log/rtc_event_processor.h" +#include "rtc_base/checks.h" +#include "rtc_base/format_macros.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +void TriageHelper::AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction) { + // With 100 packets/s (~800kbps), false positives would require 10 s without + // data. + constexpr int64_t kMaxSeqNumJump = 1000; + // With a 90 kHz clock, false positives would require 10 s without data. + constexpr int64_t kTicksPerMillisec = 90; + constexpr int64_t kCaptureTimeGraceMs = 10000; + + std::string seq_num_explanation = + direction == kIncomingPacket + ? "Incoming RTP sequence number jumps more than 1000. Counter may " + "have been reset or rewritten incorrectly in a group call." + : "Outgoing RTP sequence number jumps more than 1000. Counter may " + "have been reset."; + std::string capture_time_explanation = + direction == kIncomingPacket ? "Incoming capture time jumps more than " + "10s. Clock might have been reset." + : "Outgoing capture time jumps more than " + "10s. Clock might have been reset."; + TriageAlertType seq_num_alert = direction == kIncomingPacket + ? TriageAlertType::kIncomingSeqNumJump + : TriageAlertType::kOutgoingSeqNumJump; + TriageAlertType capture_time_alert = + direction == kIncomingPacket ? TriageAlertType::kIncomingCaptureTimeJump + : TriageAlertType::kOutgoingCaptureTimeJump; + + const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us(); + + // Check for gaps in sequence numbers and capture timestamps. + for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) { + if (IsRtxSsrc(parsed_log, direction, stream.ssrc)) { + continue; + } + auto packets = stream.packet_view; + if (packets.empty()) { + continue; + } + SeqNumUnwrapper seq_num_unwrapper; + int64_t last_seq_num = + seq_num_unwrapper.Unwrap(packets[0].header.sequenceNumber); + SeqNumUnwrapper capture_time_unwrapper; + int64_t last_capture_time = + capture_time_unwrapper.Unwrap(packets[0].header.timestamp); + int64_t last_log_time_ms = packets[0].log_time_ms(); + for (const auto& packet : packets) { + if (packet.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + + int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber); + if (std::abs(seq_num - last_seq_num) > kMaxSeqNumJump) { + Alert(seq_num_alert, config_.GetCallTimeSec(packet.log_time_us()), + seq_num_explanation); + } + last_seq_num = seq_num; + + int64_t capture_time = + capture_time_unwrapper.Unwrap(packet.header.timestamp); + if (std::abs(capture_time - last_capture_time) > + kTicksPerMillisec * + (kCaptureTimeGraceMs + packet.log_time_ms() - last_log_time_ms)) { + Alert(capture_time_alert, config_.GetCallTimeSec(packet.log_time_us()), + capture_time_explanation); + } + last_capture_time = capture_time; + } + } +} + +void TriageHelper::AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction) { + constexpr int64_t kMaxRtpTransmissionGap = 500000; + constexpr int64_t kMaxRtcpTransmissionGap = 3000000; + std::string rtp_explanation = + direction == kIncomingPacket + ? "No RTP packets received for more than 500ms. This indicates a " + "network problem. Temporary video freezes and choppy or robotic " + "audio is unavoidable. Unnecessary BWE drops is a known issue." + : "No RTP packets sent for more than 500 ms. This might be an issue " + "with the pacer."; + std::string rtcp_explanation = + direction == kIncomingPacket + ? "No RTCP packets received for more than 3 s. Could be a longer " + "connection outage" + : "No RTCP packets sent for more than 3 s. This is most likely a " + "bug."; + TriageAlertType rtp_alert = direction == kIncomingPacket + ? TriageAlertType::kIncomingRtpGap + : TriageAlertType::kOutgoingRtpGap; + TriageAlertType rtcp_alert = direction == kIncomingPacket + ? TriageAlertType::kIncomingRtcpGap + : TriageAlertType::kOutgoingRtcpGap; + + const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us(); + + // TODO(terelius): The parser could provide a list of all packets, ordered + // by time, for each direction. + std::multimap rtp_in_direction; + for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) { + for (const LoggedRtpPacket& rtp_packet : stream.packet_view) + rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet); + } + absl::optional last_rtp_time; + for (const auto& kv : rtp_in_direction) { + int64_t timestamp = kv.first; + if (timestamp > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t duration = timestamp - last_rtp_time.value_or(0); + if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) { + // No packet sent/received for more than 500 ms. + Alert(rtp_alert, config_.GetCallTimeSec(timestamp), rtp_explanation); + } + last_rtp_time.emplace(timestamp); + } + + absl::optional last_rtcp_time; + if (direction == kIncomingPacket) { + for (const auto& rtcp : parsed_log.incoming_rtcp_packets()) { + if (rtcp.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); + if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { + // No feedback sent/received for more than 2000 ms. + Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()), + rtcp_explanation); + } + last_rtcp_time.emplace(rtcp.log_time_us()); + } + } else { + for (const auto& rtcp : parsed_log.outgoing_rtcp_packets()) { + if (rtcp.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); + if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { + // No feedback sent/received for more than 2000 ms. + Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()), + rtcp_explanation); + } + last_rtcp_time.emplace(rtcp.log_time_us()); + } + } +} + +// TODO(terelius): Notifications could possibly be generated by the same code +// that produces the graphs. There is some code duplication that could be +// avoided, but that might be solved anyway when we move functionality from the +// analyzer to the parser. +void TriageHelper::AnalyzeLog(const ParsedRtcEventLog& parsed_log) { + AnalyzeStreamGaps(parsed_log, kIncomingPacket); + AnalyzeStreamGaps(parsed_log, kOutgoingPacket); + AnalyzeTransmissionGaps(parsed_log, kIncomingPacket); + AnalyzeTransmissionGaps(parsed_log, kOutgoingPacket); + + const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us(); + + int64_t first_occurrence = parsed_log.last_timestamp(); + constexpr double kMaxLossFraction = 0.05; + // Loss feedback + int64_t total_lost_packets = 0; + int64_t total_expected_packets = 0; + for (auto& bwe_update : parsed_log.bwe_loss_updates()) { + if (bwe_update.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t lost_packets = static_cast(bwe_update.fraction_lost) / 255 * + bwe_update.expected_packets; + total_lost_packets += lost_packets; + total_expected_packets += bwe_update.expected_packets; + if (bwe_update.fraction_lost >= 255 * kMaxLossFraction) { + first_occurrence = std::min(first_occurrence, bwe_update.log_time_us()); + } + } + double avg_outgoing_loss = + static_cast(total_lost_packets) / total_expected_packets; + if (avg_outgoing_loss > kMaxLossFraction) { + Alert(TriageAlertType::kOutgoingHighLoss, first_occurrence, + "More than 5% of outgoing packets lost."); + } +} + +void TriageHelper::Print(FILE* file) { + fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n"); + for (const auto& alert : triage_alerts_) { + fprintf(file, "%d %s. First occurrence at %3.3lf\n", alert.second.count, + alert.second.explanation.c_str(), alert.second.first_occurrence); + } + fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n"); +} + +void TriageHelper::ProcessAlerts( + std::function f) { + for (const auto& alert : triage_alerts_) { + f(alert.second.count, alert.second.first_occurrence, + alert.second.explanation); + } +} + +} // namespace webrtc diff --git a/rtc_tools/rtc_event_log_visualizer/alerts.h b/rtc_tools/rtc_event_log_visualizer/alerts.h new file mode 100644 index 0000000000..d3e41666aa --- /dev/null +++ b/rtc_tools/rtc_event_log_visualizer/alerts.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_ +#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_ + +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" + +namespace webrtc { + +enum class TriageAlertType { + kUnknown = 0, + kIncomingRtpGap, + kOutgoingRtpGap, + kIncomingRtcpGap, + kOutgoingRtcpGap, + kIncomingSeqNumJump, + kOutgoingSeqNumJump, + kIncomingCaptureTimeJump, + kOutgoingCaptureTimeJump, + kOutgoingHighLoss, + kLast, +}; + +struct TriageAlert { + TriageAlertType type = TriageAlertType::kUnknown; + int count = 0; + float first_occurrence = -1; + std::string explanation; +}; + +class TriageHelper { + public: + explicit TriageHelper(const AnalyzerConfig& config) : config_(config) {} + + void AnalyzeLog(const ParsedRtcEventLog& parsed_log); + + void AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction); + void AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction); + void Print(FILE* file); + + void ProcessAlerts(std::function f); + + private: + AnalyzerConfig config_; + std::map triage_alerts_; + + void Alert(TriageAlertType type, + float time_seconds, + absl::string_view explanation) { + std::map::iterator it = + triage_alerts_.find(type); + + if (it == triage_alerts_.end()) { + TriageAlert alert; + alert.type = type; + alert.first_occurrence = time_seconds; + alert.count = 1; + alert.explanation = std::string(explanation); + triage_alerts_.insert(std::make_pair(type, alert)); + } else { + it->second.count += 1; + } + } + RTC_DISALLOW_COPY_AND_ASSIGN(TriageHelper); +}; + +} // namespace webrtc + +#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_ diff --git a/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc new file mode 100644 index 0000000000..becc0044ab --- /dev/null +++ b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc @@ -0,0 +1,503 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h" + +#include +#include +#include +#include + +#include "modules/audio_coding/neteq/tools/audio_sink.h" +#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h" +#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h" +#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h" +#include "modules/audio_coding/neteq/tools/neteq_test.h" +#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event) + -> absl::optional { + if (ana_event.config.bitrate_bps) + return absl::optional( + static_cast(*ana_event.config.bitrate_bps)); + return absl::nullopt; + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints( + ToCallTime, GetAnaBitrateBps, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder target bitrate"); +} + +void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder frame length", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaFrameLengthMs = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.frame_length_ms) + return absl::optional( + static_cast(*ana_event.config.frame_length_ms)); + return absl::optional(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints( + ToCallTime, GetAnaFrameLengthMs, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder frame length"); +} + +void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder uplink packet loss fraction", + LineStyle::kLine, PointStyle::kHighlight); + auto GetAnaPacketLoss = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.uplink_packet_loss_fraction) + return absl::optional(static_cast( + *ana_event.config.uplink_packet_loss_fraction)); + return absl::optional(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints( + ToCallTime, GetAnaPacketLoss, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin, + kTopMargin); + plot->SetTitle("Reported audio encoder lost packets"); +} + +void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder FEC", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaFecEnabled = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.enable_fec) + return absl::optional( + static_cast(*ana_event.config.enable_fec)); + return absl::optional(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints( + ToCallTime, GetAnaFecEnabled, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder FEC"); +} + +void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder DTX", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaDtxEnabled = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.enable_dtx) + return absl::optional( + static_cast(*ana_event.config.enable_dtx)); + return absl::optional(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints( + ToCallTime, GetAnaDtxEnabled, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder DTX"); +} + +void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaNumChannels = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.num_channels) + return absl::optional( + static_cast(*ana_event.config.num_channels)); + return absl::optional(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints( + ToCallTime, GetAnaNumChannels, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))", + kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder number of channels"); +} + +class NetEqStreamInput : public test::NetEqInput { + public: + // Does not take any ownership, and all pointers must refer to valid objects + // that outlive the one constructed. + NetEqStreamInput(const std::vector* packet_stream, + const std::vector* output_events, + absl::optional end_time_ms) + : packet_stream_(*packet_stream), + packet_stream_it_(packet_stream_.begin()), + output_events_it_(output_events->begin()), + output_events_end_(output_events->end()), + end_time_ms_(end_time_ms) { + RTC_DCHECK(packet_stream); + RTC_DCHECK(output_events); + } + + absl::optional NextPacketTime() const override { + if (packet_stream_it_ == packet_stream_.end()) { + return absl::nullopt; + } + if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) { + return absl::nullopt; + } + return packet_stream_it_->rtp.log_time_ms(); + } + + absl::optional NextOutputEventTime() const override { + if (output_events_it_ == output_events_end_) { + return absl::nullopt; + } + if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) { + return absl::nullopt; + } + return output_events_it_->log_time_ms(); + } + + std::unique_ptr PopPacket() override { + if (packet_stream_it_ == packet_stream_.end()) { + return std::unique_ptr(); + } + std::unique_ptr packet_data(new PacketData()); + packet_data->header = packet_stream_it_->rtp.header; + packet_data->time_ms = packet_stream_it_->rtp.log_time_ms(); + + // This is a header-only "dummy" packet. Set the payload to all zeros, with + // length according to the virtual length. + packet_data->payload.SetSize(packet_stream_it_->rtp.total_length - + packet_stream_it_->rtp.header_length); + std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0); + + ++packet_stream_it_; + return packet_data; + } + + void AdvanceOutputEvent() override { + if (output_events_it_ != output_events_end_) { + ++output_events_it_; + } + } + + bool ended() const override { return !NextEventTime(); } + + absl::optional NextHeader() const override { + if (packet_stream_it_ == packet_stream_.end()) { + return absl::nullopt; + } + return packet_stream_it_->rtp.header; + } + + private: + const std::vector& packet_stream_; + std::vector::const_iterator packet_stream_it_; + std::vector::const_iterator output_events_it_; + const std::vector::const_iterator output_events_end_; + const absl::optional end_time_ms_; +}; + +namespace { + +// Factory to create a "replacement decoder" that produces the decoded audio +// by reading from a file rather than from the encoded payloads. +class ReplacementAudioDecoderFactory : public AudioDecoderFactory { + public: + ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name, + int file_sample_rate_hz) + : replacement_file_name_(replacement_file_name), + file_sample_rate_hz_(file_sample_rate_hz) {} + + std::vector GetSupportedDecoders() override { + RTC_NOTREACHED(); + return {}; + } + + bool IsSupportedDecoder(const SdpAudioFormat& format) override { + return true; + } + + std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + absl::optional codec_pair_id) override { + auto replacement_file = std::make_unique( + replacement_file_name_, file_sample_rate_hz_); + replacement_file->set_output_rate_hz(48000); + return std::make_unique( + std::move(replacement_file), 48000, false); + } + + private: + const std::string replacement_file_name_; + const int file_sample_rate_hz_; +}; + +// Creates a NetEq test object and all necessary input and output helpers. Runs +// the test and returns the NetEqDelayAnalyzer object that was used to +// instrument the test. +std::unique_ptr CreateNetEqTestAndRun( + const std::vector* packet_stream, + const std::vector* output_events, + absl::optional end_time_ms, + const std::string& replacement_file_name, + int file_sample_rate_hz) { + std::unique_ptr input( + new NetEqStreamInput(packet_stream, output_events, end_time_ms)); + + constexpr int kReplacementPt = 127; + std::set cn_types; + std::set forbidden_types; + input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt, + cn_types, forbidden_types)); + + NetEq::Config config; + config.max_packets_in_buffer = 200; + config.enable_fast_accelerate = true; + + std::unique_ptr output(new test::VoidAudioSink()); + + rtc::scoped_refptr decoder_factory = + new rtc::RefCountedObject( + replacement_file_name, file_sample_rate_hz); + + test::NetEqTest::DecoderMap codecs = { + {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}}; + + std::unique_ptr delay_cb( + new test::NetEqDelayAnalyzer); + std::unique_ptr neteq_stats_getter( + new test::NetEqStatsGetter(std::move(delay_cb))); + test::DefaultNetEqTestErrorCallback error_cb; + test::NetEqTest::Callbacks callbacks; + callbacks.error_callback = &error_cb; + callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer(); + callbacks.get_audio_callback = neteq_stats_getter.get(); + + test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr, + /*factory=*/nullptr, std::move(input), std::move(output), + callbacks); + test.Run(); + return neteq_stats_getter; +} +} // namespace + +NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const std::string& replacement_file_name, + int file_sample_rate_hz) { + NetEqStatsGetterMap neteq_stats; + + for (const auto& stream : parsed_log.incoming_rtp_packets_by_ssrc()) { + const uint32_t ssrc = stream.ssrc; + if (!IsAudioSsrc(parsed_log, kIncomingPacket, ssrc)) + continue; + const std::vector* audio_packets = + &stream.incoming_packets; + if (audio_packets == nullptr) { + // No incoming audio stream found. + continue; + } + + RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end()); + + std::map>::const_iterator + output_events_it = parsed_log.audio_playout_events().find(ssrc); + if (output_events_it == parsed_log.audio_playout_events().end()) { + // Could not find output events with SSRC matching the input audio stream. + // Using the first available stream of output events. + output_events_it = parsed_log.audio_playout_events().cbegin(); + } + + int64_t end_time_ms = parsed_log.first_log_segment().stop_time_ms(); + + neteq_stats[ssrc] = CreateNetEqTestAndRun( + audio_packets, &output_events_it->second, end_time_ms, + replacement_file_name, file_sample_rate_hz); + } + + return neteq_stats; +} + +// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created +// for, this method generates a plot for the jitter buffer delay profile. +void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + uint32_t ssrc, + const test::NetEqStatsGetter* stats_getter, + Plot* plot) { + test::NetEqDelayAnalyzer::Delays arrival_delay_ms; + test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms; + test::NetEqDelayAnalyzer::Delays playout_delay_ms; + test::NetEqDelayAnalyzer::Delays target_delay_ms; + + stats_getter->delay_analyzer()->CreateGraphs( + &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms, + &target_delay_ms); + + TimeSeries time_series_packet_arrival("packet arrival delay", + LineStyle::kLine); + TimeSeries time_series_relative_packet_arrival( + "Relative packet arrival delay", LineStyle::kLine); + TimeSeries time_series_play_time("Playout delay", LineStyle::kLine); + TimeSeries time_series_target_time("Target delay", LineStyle::kLine, + PointStyle::kHighlight); + + for (const auto& data : arrival_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y)); + } + for (const auto& data : corrected_arrival_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_relative_packet_arrival.points.emplace_back( + TimeSeriesPoint(x, y)); + } + for (const auto& data : playout_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y)); + } + for (const auto& data : target_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y)); + } + + plot->AppendTimeSeries(std::move(time_series_packet_arrival)); + plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival)); + plot->AppendTimeSeries(std::move(time_series_play_time)); + plot->AppendTimeSeries(std::move(time_series_target_time)); + + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin, + kTopMargin); + plot->SetTitle("NetEq timing for " + + GetStreamName(parsed_log, kIncomingPacket, ssrc)); +} + +template +void CreateNetEqStatsGraphInternal( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats, + rtc::FunctionView>*( + const test::NetEqStatsGetter*)> data_extractor, + rtc::FunctionView stats_extractor, + const std::string& plot_name, + Plot* plot) { + std::map time_series; + + for (const auto& st : neteq_stats) { + const uint32_t ssrc = st.first; + const std::vector>* data_vector = + data_extractor(st.second.get()); + for (const auto& data : *data_vector) { + const float time = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float value = stats_extractor(data.second); + time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value)); + } + } + + for (auto& series : time_series) { + series.second.label = + GetStreamName(parsed_log, kIncomingPacket, series.first); + series.second.line_style = LineStyle::kLine; + plot->AppendTimeSeries(std::move(series.second)); + } + + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin); + plot->SetTitle(plot_name); +} + +void CreateNetEqNetworkStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats, + rtc::FunctionView stats_extractor, + const std::string& plot_name, + Plot* plot) { + CreateNetEqStatsGraphInternal( + parsed_log, config, neteq_stats, + [](const test::NetEqStatsGetter* stats_getter) { + return stats_getter->stats(); + }, + stats_extractor, plot_name, plot); +} + +void CreateNetEqLifetimeStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats, + rtc::FunctionView stats_extractor, + const std::string& plot_name, + Plot* plot) { + CreateNetEqStatsGraphInternal( + parsed_log, config, neteq_stats, + [](const test::NetEqStatsGetter* stats_getter) { + return stats_getter->lifetime_stats(); + }, + stats_extractor, plot_name, plot); +} + +} // namespace webrtc diff --git a/rtc_tools/rtc_event_log_visualizer/analyze_audio.h b/rtc_tools/rtc_event_log_visualizer/analyze_audio.h new file mode 100644 index 0000000000..726e84492d --- /dev/null +++ b/rtc_tools/rtc_event_log_visualizer/analyze_audio.h @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_ +#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_ + +#include +#include +#include +#include + +#include "api/function_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h" +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" +#include "rtc_tools/rtc_event_log_visualizer/plot_base.h" + +namespace webrtc { + +void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); + +using NetEqStatsGetterMap = + std::map>; +NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const std::string& replacement_file_name, + int file_sample_rate_hz); + +void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + uint32_t ssrc, + const test::NetEqStatsGetter* stats_getter, + Plot* plot); +void CreateNetEqNetworkStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats_getters, + rtc::FunctionView stats_extractor, + const std::string& plot_name, + Plot* plot); +void CreateNetEqLifetimeStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats_getters, + rtc::FunctionView stats_extractor, + const std::string& plot_name, + Plot* plot); + +} // namespace webrtc + +#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_ diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer.cc b/rtc_tools/rtc_event_log_visualizer/analyzer.cc index 59b96e423e..6d84b1b5ca 100644 --- a/rtc_tools/rtc_event_log_visualizer/analyzer.cc +++ b/rtc_tools/rtc_event_log_visualizer/analyzer.cc @@ -28,14 +28,9 @@ #include "call/call.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" +#include "logging/rtc_event_log/rtc_event_processor.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" -#include "modules/audio_coding/neteq/tools/audio_sink.h" -#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h" -#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h" -#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h" -#include "modules/audio_coding/neteq/tools/neteq_test.h" -#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" @@ -44,7 +39,6 @@ #include "modules/pacing/paced_sender.h" #include "modules/pacing/packet_router.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" @@ -53,6 +47,7 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_utility.h" #include "rtc_base/checks.h" #include "rtc_base/format_macros.h" @@ -61,17 +56,12 @@ #include "rtc_base/rate_statistics.h" #include "rtc_base/strings/string_builder.h" #include "rtc_tools/rtc_event_log_visualizer/log_simulation.h" - -#ifndef BWE_TEST_LOGGING_COMPILE_TIME_ENABLE -#define BWE_TEST_LOGGING_COMPILE_TIME_ENABLE 0 -#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +#include "test/explicit_key_value_config.h" namespace webrtc { namespace { -const int kNumMicrosecsPerSec = 1000000; - std::string SsrcToString(uint32_t ssrc) { rtc::StringBuilder ss; ss << "SSRC " << ssrc; @@ -167,11 +157,6 @@ absl::optional EstimateRtpClockFrequency( return absl::nullopt; } -constexpr float kLeftMargin = 0.01f; -constexpr float kRightMargin = 0.02f; -constexpr float kBottomMargin = 0.02f; -constexpr float kTopMargin = 0.05f; - absl::optional NetworkDelayDiff_AbsSendTime( const LoggedRtpPacketIncoming& old_packet, const LoggedRtpPacketIncoming& new_packet) { @@ -221,99 +206,6 @@ absl::optional NetworkDelayDiff_CaptureTime( return delay_change; } -// For each element in data_view, use |f()| to extract a y-coordinate and -// store the result in a TimeSeries. -template -void ProcessPoints(rtc::FunctionView fx, - rtc::FunctionView(const DataType&)> fy, - const IterableType& data_view, - TimeSeries* result) { - for (size_t i = 0; i < data_view.size(); i++) { - const DataType& elem = data_view[i]; - float x = fx(elem); - absl::optional y = fy(elem); - if (y) - result->points.emplace_back(x, *y); - } -} - -// For each pair of adjacent elements in |data|, use |f()| to extract a -// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate -// will be the time of the second element in the pair. -template -void ProcessPairs( - rtc::FunctionView fx, - rtc::FunctionView(const DataType&, - const DataType&)> fy, - const IterableType& data, - TimeSeries* result) { - for (size_t i = 1; i < data.size(); i++) { - float x = fx(data[i]); - absl::optional y = fy(data[i - 1], data[i]); - if (y) - result->points.emplace_back(x, static_cast(*y)); - } -} - -// For each pair of adjacent elements in |data|, use |f()| to extract a -// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate -// will be the time of the second element in the pair. -template -void AccumulatePairs( - rtc::FunctionView fx, - rtc::FunctionView(const DataType&, - const DataType&)> fy, - const IterableType& data, - TimeSeries* result) { - ResultType sum = 0; - for (size_t i = 1; i < data.size(); i++) { - float x = fx(data[i]); - absl::optional y = fy(data[i - 1], data[i]); - if (y) { - sum += *y; - result->points.emplace_back(x, static_cast(sum)); - } - } -} - -// Calculates a moving average of |data| and stores the result in a TimeSeries. -// A data point is generated every |step| microseconds from |begin_time| -// to |end_time|. The value of each data point is the average of the data -// during the preceding |window_duration_us| microseconds. -template -void MovingAverage( - rtc::FunctionView(const DataType&)> fy, - const IterableType& data_view, - AnalyzerConfig config, - TimeSeries* result) { - size_t window_index_begin = 0; - size_t window_index_end = 0; - ResultType sum_in_window = 0; - - for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_; - t += config.step_) { - while (window_index_end < data_view.size() && - data_view[window_index_end].log_time_us() < t) { - absl::optional value = fy(data_view[window_index_end]); - if (value) - sum_in_window += *value; - ++window_index_end; - } - while (window_index_begin < data_view.size() && - data_view[window_index_begin].log_time_us() < - t - config.window_duration_) { - absl::optional value = fy(data_view[window_index_begin]); - if (value) - sum_in_window -= *value; - ++window_index_begin; - } - float window_duration_s = - static_cast(config.window_duration_) / kNumMicrosecsPerSec; - float x = config.GetCallTimeSec(t); - float y = sum_in_window / window_duration_s; - result->points.emplace_back(x, y); - } -} template TimeSeries CreateRtcpTypeTimeSeries(const std::vector& rtcp_list, @@ -464,32 +356,21 @@ EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log, config_.begin_time_ = config_.end_time_ = 0; } - const auto& log_start_events = parsed_log_.start_log_events(); - const auto& log_end_events = parsed_log_.stop_log_events(); - auto start_iter = log_start_events.begin(); - auto end_iter = log_end_events.begin(); - while (start_iter != log_start_events.end()) { - int64_t start = start_iter->log_time_us(); - ++start_iter; - absl::optional next_start; - if (start_iter != log_start_events.end()) - next_start.emplace(start_iter->log_time_us()); - if (end_iter != log_end_events.end() && - end_iter->log_time_us() <= - next_start.value_or(std::numeric_limits::max())) { - int64_t end = end_iter->log_time_us(); - RTC_DCHECK_LE(start, end); - log_segments_.push_back(std::make_pair(start, end)); - ++end_iter; - } else { - // we're missing an end event. Assume that it occurred just before the - // next start. - log_segments_.push_back( - std::make_pair(start, next_start.value_or(config_.end_time_))); - } - } - RTC_LOG(LS_INFO) << "Found " << log_segments_.size() - << " (LOG_START, LOG_END) segments in log."; + RTC_LOG(LS_INFO) << "Log is " + << (parsed_log_.last_timestamp() - + parsed_log_.first_timestamp()) / + 1000000 + << " seconds long."; +} + +EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log, + const AnalyzerConfig& config) + : parsed_log_(log), config_(config) { + RTC_LOG(LS_INFO) << "Log is " + << (parsed_log_.last_timestamp() - + parsed_log_.first_timestamp()) / + 1000000 + << " seconds long."; } class BitrateObserver : public RemoteBitrateObserver { @@ -526,7 +407,7 @@ void EventLogAnalyzer::CreatePacketGraph(PacketDirection direction, continue; } - TimeSeries time_series(GetStreamName(direction, stream.ssrc), + TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kBar); auto GetPacketSize = [](const LoggedRtpPacket& packet) { return absl::optional(packet.total_length); @@ -596,8 +477,8 @@ void EventLogAnalyzer::CreateAccumulatedPacketsGraph(PacketDirection direction, for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { if (!MatchingSsrc(stream.ssrc, desired_ssrc_)) continue; - std::string label = - std::string("RTP ") + GetStreamName(direction, stream.ssrc); + std::string label = std::string("RTP ") + + GetStreamName(parsed_log_, direction, stream.ssrc); CreateAccumulatedPacketsTimeSeries(plot, stream.packet_view, label); } std::string label = @@ -617,6 +498,95 @@ void EventLogAnalyzer::CreateAccumulatedPacketsGraph(PacketDirection direction, " RTP/RTCP packets"); } +void EventLogAnalyzer::CreatePacketRateGraph(PacketDirection direction, + Plot* plot) { + auto CountPackets = [](auto packet) { return 1.0; }; + for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { + // Filter on SSRC. + if (!MatchingSsrc(stream.ssrc, desired_ssrc_)) { + continue; + } + TimeSeries time_series( + std::string("RTP ") + + GetStreamName(parsed_log_, direction, stream.ssrc), + LineStyle::kLine); + MovingAverage(CountPackets, stream.packet_view, + config_, &time_series); + plot->AppendTimeSeries(std::move(time_series)); + } + TimeSeries time_series( + std::string("RTCP ") + "(" + GetDirectionAsShortString(direction) + ")", + LineStyle::kLine); + if (direction == kIncomingPacket) { + MovingAverage( + CountPackets, parsed_log_.incoming_rtcp_packets(), config_, + &time_series); + } else { + MovingAverage( + CountPackets, parsed_log_.outgoing_rtcp_packets(), config_, + &time_series); + } + plot->AppendTimeSeries(std::move(time_series)); + + plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), + "Time (s)", kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Packet Rate (packets/s)", kBottomMargin, + kTopMargin); + plot->SetTitle("Rate of " + GetDirectionAsString(direction) + + " RTP/RTCP packets"); +} + +void EventLogAnalyzer::CreateTotalPacketRateGraph(PacketDirection direction, + Plot* plot) { + // Contains a log timestamp to enable counting logged events of different + // types using MovingAverage(). + class LogTime { + public: + explicit LogTime(int64_t log_time_us) : log_time_us_(log_time_us) {} + + int64_t log_time_us() const { return log_time_us_; } + + private: + int64_t log_time_us_; + }; + + std::vector packet_times; + auto handle_rtp = [&](const LoggedRtpPacket& packet) { + packet_times.emplace_back(packet.log_time_us()); + }; + RtcEventProcessor process; + for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { + process.AddEvents(stream.packet_view, handle_rtp); + } + if (direction == kIncomingPacket) { + auto handle_incoming_rtcp = [&](const LoggedRtcpPacketIncoming& packet) { + packet_times.emplace_back(packet.log_time_us()); + }; + process.AddEvents(parsed_log_.incoming_rtcp_packets(), + handle_incoming_rtcp); + } else { + auto handle_outgoing_rtcp = [&](const LoggedRtcpPacketOutgoing& packet) { + packet_times.emplace_back(packet.log_time_us()); + }; + process.AddEvents(parsed_log_.outgoing_rtcp_packets(), + handle_outgoing_rtcp); + } + process.ProcessEventsInOrder(); + TimeSeries time_series(std::string("Total ") + "(" + + GetDirectionAsShortString(direction) + ") packets", + LineStyle::kLine); + MovingAverage([](auto packet) { return 1; }, packet_times, + config_, &time_series); + plot->AppendTimeSeries(std::move(time_series)); + + plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), + "Time (s)", kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Packet Rate (packets/s)", kBottomMargin, + kTopMargin); + plot->SetTitle("Rate of all " + GetDirectionAsString(direction) + + " RTP/RTCP packets"); +} + // For each SSRC, plot the time between the consecutive playouts. void EventLogAnalyzer::CreatePlayoutGraph(Plot* plot) { for (const auto& playout_stream : parsed_log_.audio_playout_events()) { @@ -647,9 +617,9 @@ void EventLogAnalyzer::CreatePlayoutGraph(Plot* plot) { void EventLogAnalyzer::CreateAudioLevelGraph(PacketDirection direction, Plot* plot) { for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { - if (!IsAudioSsrc(direction, stream.ssrc)) + if (!IsAudioSsrc(parsed_log_, direction, stream.ssrc)) continue; - TimeSeries time_series(GetStreamName(direction, stream.ssrc), + TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kLine); for (auto& packet : stream.packet_view) { if (packet.header.extension.hasAudioLevel) { @@ -678,8 +648,9 @@ void EventLogAnalyzer::CreateSequenceNumberGraph(Plot* plot) { continue; } - TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc), - LineStyle::kBar); + TimeSeries time_series( + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc), + LineStyle::kBar); auto GetSequenceNumberDiff = [](const LoggedRtpPacketIncoming& old_packet, const LoggedRtpPacketIncoming& new_packet) { int64_t diff = @@ -712,8 +683,9 @@ void EventLogAnalyzer::CreateIncomingPacketLossGraph(Plot* plot) { continue; } - TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc), - LineStyle::kLine, PointStyle::kHighlight); + TimeSeries time_series( + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc), + LineStyle::kLine, PointStyle::kHighlight); // TODO(terelius): Should the window and step size be read from the class // instead? const int64_t kWindowUs = 1000000; @@ -766,7 +738,7 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) { for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) { // Filter on SSRC. if (!MatchingSsrc(stream.ssrc, desired_ssrc_) || - IsRtxSsrc(kIncomingPacket, stream.ssrc)) { + IsRtxSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) { continue; } @@ -777,15 +749,14 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) { << packets.size() << " packets in the stream."; continue; } - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits::max() - : log_segments_.front().second; + int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us(); absl::optional estimated_frequency = - EstimateRtpClockFrequency(packets, end_time_us); + EstimateRtpClockFrequency(packets, segment_end_us); if (!estimated_frequency) continue; const double frequency_hz = *estimated_frequency; - if (IsVideoSsrc(kIncomingPacket, stream.ssrc) && frequency_hz != 90000) { + if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc) && + frequency_hz != 90000) { RTC_LOG(LS_WARNING) << "Video stream should use a 90 kHz clock but appears to use " << frequency_hz / 1000 << ". Discarding."; @@ -802,14 +773,16 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) { }; TimeSeries capture_time_data( - GetStreamName(kIncomingPacket, stream.ssrc) + " capture-time", + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) + + " capture-time", LineStyle::kLine); AccumulatePairs( ToCallTime, ToNetworkDelay, packets, &capture_time_data); plot->AppendTimeSeries(std::move(capture_time_data)); TimeSeries send_time_data( - GetStreamName(kIncomingPacket, stream.ssrc) + " abs-send-time", + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) + + " abs-send-time", LineStyle::kLine); AccumulatePairs( ToCallTime, NetworkDelayDiff_AbsSendTime, packets, &send_time_data); @@ -1102,7 +1075,7 @@ void EventLogAnalyzer::CreateStreamBitrateGraph(PacketDirection direction, continue; } - TimeSeries time_series(GetStreamName(direction, stream.ssrc), + TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kLine); auto GetPacketSizeKilobits = [](const LoggedRtpPacket& packet) { return packet.total_length * 8.0 / 1000.0; @@ -1228,17 +1201,21 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { // TODO(holmer): Log the call config and use that here instead. static const uint32_t kDefaultStartBitrateBps = 300000; NetworkControllerConfig cc_config; - cc_config.constraints.at_time = Timestamp::us(clock.TimeInMicroseconds()); - cc_config.constraints.starting_rate = DataRate::bps(kDefaultStartBitrateBps); + cc_config.constraints.at_time = Timestamp::Micros(clock.TimeInMicroseconds()); + cc_config.constraints.starting_rate = + DataRate::BitsPerSec(kDefaultStartBitrateBps); cc_config.event_log = &null_event_log; auto goog_cc = factory.Create(cc_config); TimeSeries time_series("Delay-based estimate", LineStyle::kStep, PointStyle::kHighlight); - TimeSeries acked_time_series("Acked bitrate", LineStyle::kLine, + TimeSeries acked_time_series("Raw acked bitrate", LineStyle::kLine, PointStyle::kHighlight); - TimeSeries acked_estimate_time_series( - "Acked bitrate estimate", LineStyle::kLine, PointStyle::kHighlight); + TimeSeries robust_time_series("Robust throughput estimate", LineStyle::kLine, + PointStyle::kHighlight); + TimeSeries acked_estimate_time_series("Ackednowledged bitrate estimate", + LineStyle::kLine, + PointStyle::kHighlight); auto rtp_iterator = outgoing_rtp.begin(); auto rtcp_iterator = incoming_rtcp.begin(); @@ -1264,20 +1241,18 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { return std::numeric_limits::max(); }; - RateStatistics acked_bitrate(250, 8000); -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) - FieldTrialBasedConfig field_trial_config_; - // The event_log_visualizer should normally not be compiled with - // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE since the normal plots won't work. - // However, compiling with BWE_TEST_LOGGING, running with --plot=sendside_bwe - // and piping the output to plot_dynamics.py can be used as a hack to get the - // internal state of various BWE components. In this case, it is important - // we don't instantiate the AcknowledgedBitrateEstimator both here and in - // GoogCcNetworkController since that would lead to duplicate outputs. + RateStatistics acked_bitrate(750, 8000); + test::ExplicitKeyValueConfig throughput_config( + "WebRTC-Bwe-RobustThroughputEstimatorSettings/" + "enabled:true,reduce_bias:true,assume_shared_link:false,initial_packets:" + "10,min_packets:25,window_duration:750ms,unacked_weight:0.5/"); + std::unique_ptr + robust_throughput_estimator( + AcknowledgedBitrateEstimatorInterface::Create(&throughput_config)); + FieldTrialBasedConfig field_trial_config; std::unique_ptr acknowledged_bitrate_estimator( - AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config_)); -#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) + AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config)); int64_t time_us = std::min({NextRtpTime(), NextRtcpTime(), NextProcessTime()}); int64_t last_update_us = 0; @@ -1287,25 +1262,40 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtpTime()); const RtpPacketType& rtp_packet = *rtp_iterator->second; if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) { - RTC_DCHECK(rtp_packet.rtp.header.extension.hasTransportSequenceNumber); RtpPacketSendInfo packet_info; packet_info.ssrc = rtp_packet.rtp.header.ssrc; packet_info.transport_sequence_number = rtp_packet.rtp.header.extension.transportSequenceNumber; packet_info.rtp_sequence_number = rtp_packet.rtp.header.sequenceNumber; - packet_info.has_rtp_sequence_number = true; packet_info.length = rtp_packet.rtp.total_length; + if (IsRtxSsrc(parsed_log_, PacketDirection::kOutgoingPacket, + rtp_packet.rtp.header.ssrc)) { + // Don't set the optional media type as we don't know if it is + // a retransmission, FEC or padding. + } else if (IsVideoSsrc(parsed_log_, PacketDirection::kOutgoingPacket, + rtp_packet.rtp.header.ssrc)) { + packet_info.packet_type = RtpPacketMediaType::kVideo; + } else if (IsAudioSsrc(parsed_log_, PacketDirection::kOutgoingPacket, + rtp_packet.rtp.header.ssrc)) { + packet_info.packet_type = RtpPacketMediaType::kAudio; + } transport_feedback.AddPacket( packet_info, 0u, // Per packet overhead bytes. - Timestamp::us(rtp_packet.rtp.log_time_us())); - rtc::SentPacket sent_packet( - rtp_packet.rtp.header.extension.transportSequenceNumber, - rtp_packet.rtp.log_time_us() / 1000); - auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet); - if (sent_msg) - observer.Update(goog_cc->OnSentPacket(*sent_msg)); + Timestamp::Micros(rtp_packet.rtp.log_time_us())); } + rtc::SentPacket sent_packet; + sent_packet.send_time_ms = rtp_packet.rtp.log_time_ms(); + sent_packet.info.included_in_allocation = true; + sent_packet.info.packet_size_bytes = rtp_packet.rtp.total_length; + if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) { + sent_packet.packet_id = + rtp_packet.rtp.header.extension.transportSequenceNumber; + sent_packet.info.included_in_feedback = true; + } + auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet); + if (sent_msg) + observer.Update(goog_cc->OnSentPacket(*sent_msg)); ++rtp_iterator; } if (clock.TimeInMicroseconds() >= NextRtcpTime()) { @@ -1313,20 +1303,20 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { auto feedback_msg = transport_feedback.ProcessTransportFeedback( rtcp_iterator->transport_feedback, - Timestamp::ms(clock.TimeInMilliseconds())); + Timestamp::Millis(clock.TimeInMilliseconds())); absl::optional bitrate_bps; if (feedback_msg) { observer.Update(goog_cc->OnTransportPacketsFeedback(*feedback_msg)); std::vector feedback = feedback_msg->SortedByReceiveTime(); if (!feedback.empty()) { -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) acknowledged_bitrate_estimator->IncomingPacketFeedbackVector( feedback); -#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) - for (const PacketResult& packet : feedback) + robust_throughput_estimator->IncomingPacketFeedbackVector(feedback); + for (const PacketResult& packet : feedback) { acked_bitrate.Update(packet.sent_packet.size.bytes(), packet.receive_time.ms()); + } bitrate_bps = acked_bitrate.Rate(feedback.back().receive_time.ms()); } } @@ -1334,18 +1324,20 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { float x = config_.GetCallTimeSec(clock.TimeInMicroseconds()); float y = bitrate_bps.value_or(0) / 1000; acked_time_series.points.emplace_back(x, y); -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) + y = robust_throughput_estimator->bitrate() + .value_or(DataRate::Zero()) + .kbps(); + robust_time_series.points.emplace_back(x, y); y = acknowledged_bitrate_estimator->bitrate() .value_or(DataRate::Zero()) .kbps(); acked_estimate_time_series.points.emplace_back(x, y); -#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) ++rtcp_iterator; } if (clock.TimeInMicroseconds() >= NextProcessTime()) { RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextProcessTime()); ProcessInterval msg; - msg.at_time = Timestamp::us(clock.TimeInMicroseconds()); + msg.at_time = Timestamp::Micros(clock.TimeInMicroseconds()); observer.Update(goog_cc->OnProcessInterval(msg)); next_process_time_us_ += process_interval.us(); } @@ -1360,6 +1352,7 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { } // Add the data set to the plot. plot->AppendTimeSeries(std::move(time_series)); + plot->AppendTimeSeries(std::move(robust_time_series)); plot->AppendTimeSeries(std::move(acked_time_series)); plot->AppendTimeSeriesIfNotEmpty(std::move(acked_estimate_time_series)); @@ -1387,14 +1380,16 @@ void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) { } private: - uint32_t last_bitrate_bps_; - bool bitrate_updated_; + // We don't know the start bitrate, but assume that it is the default 300 + // kbps. + uint32_t last_bitrate_bps_ = 300000; + bool bitrate_updated_ = false; }; std::multimap incoming_rtp; for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) { - if (IsVideoSsrc(kIncomingPacket, stream.ssrc)) { + if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) { for (const auto& rtp_packet : stream.incoming_packets) incoming_rtp.insert( std::make_pair(rtp_packet.rtp.log_time_us(), &rtp_packet)); @@ -1497,7 +1492,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) { const std::vector& packets = stream.outgoing_packets; - if (IsRtxSsrc(kOutgoingPacket, stream.ssrc)) { + if (IsRtxSsrc(parsed_log_, kOutgoingPacket, stream.ssrc)) { continue; } @@ -1507,14 +1502,12 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) { "pacer delay with less than 2 packets in the stream"; continue; } - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits::max() - : log_segments_.front().second; + int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us(); absl::optional estimated_frequency = - EstimateRtpClockFrequency(packets, end_time_us); + EstimateRtpClockFrequency(packets, segment_end_us); if (!estimated_frequency) continue; - if (IsVideoSsrc(kOutgoingPacket, stream.ssrc) && + if (IsVideoSsrc(parsed_log_, kOutgoingPacket, stream.ssrc) && *estimated_frequency != 90000) { RTC_LOG(LS_WARNING) << "Video stream should use a 90 kHz clock but appears to use " @@ -1523,7 +1516,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) { } TimeSeries pacer_delay_series( - GetStreamName(kOutgoingPacket, stream.ssrc) + "(" + + GetStreamName(parsed_log_, kOutgoingPacket, stream.ssrc) + "(" + std::to_string(*estimated_frequency / 1000) + " kHz)", LineStyle::kLine, PointStyle::kHighlight); SeqNumUnwrapper timestamp_unwrapper; @@ -1556,7 +1549,7 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction, Plot* plot) { for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { TimeSeries rtp_timestamps( - GetStreamName(direction, stream.ssrc) + " capture-time", + GetStreamName(parsed_log_, direction, stream.ssrc) + " capture-time", LineStyle::kLine, PointStyle::kHighlight); for (const auto& packet : stream.packet_view) { float x = config_.GetCallTimeSec(packet.log_time_us()); @@ -1566,7 +1559,8 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction, plot->AppendTimeSeries(std::move(rtp_timestamps)); TimeSeries rtcp_timestamps( - GetStreamName(direction, stream.ssrc) + " rtcp capture-time", + GetStreamName(parsed_log_, direction, stream.ssrc) + + " rtcp capture-time", LineStyle::kLine, PointStyle::kHighlight); // TODO(terelius): Why only sender reports? const auto& sender_reports = parsed_log_.sender_reports(direction); @@ -1603,7 +1597,8 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot( bool inserted; if (sr_report_it == sr_reports_by_ssrc.end()) { std::tie(sr_report_it, inserted) = sr_reports_by_ssrc.emplace( - ssrc, TimeSeries(GetStreamName(direction, ssrc) + " Sender Reports", + ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) + + " Sender Reports", LineStyle::kLine, PointStyle::kHighlight)); } sr_report_it->second.points.emplace_back(x, y); @@ -1624,9 +1619,9 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot( bool inserted; if (rr_report_it == rr_reports_by_ssrc.end()) { std::tie(rr_report_it, inserted) = rr_reports_by_ssrc.emplace( - ssrc, - TimeSeries(GetStreamName(direction, ssrc) + " Receiver Reports", - LineStyle::kLine, PointStyle::kHighlight)); + ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) + + " Receiver Reports", + LineStyle::kLine, PointStyle::kHighlight)); } rr_report_it->second.points.emplace_back(x, y); } @@ -1641,463 +1636,6 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot( plot->SetTitle(title); } -void EventLogAnalyzer::CreateAudioEncoderTargetBitrateGraph(Plot* plot) { - TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event) - -> absl::optional { - if (ana_event.config.bitrate_bps) - return absl::optional( - static_cast(*ana_event.config.bitrate_bps)); - return absl::nullopt; - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints( - ToCallTime, GetAnaBitrateBps, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder target bitrate"); -} - -void EventLogAnalyzer::CreateAudioEncoderFrameLengthGraph(Plot* plot) { - TimeSeries time_series("Audio encoder frame length", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaFrameLengthMs = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.frame_length_ms) - return absl::optional( - static_cast(*ana_event.config.frame_length_ms)); - return absl::optional(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints( - ToCallTime, GetAnaFrameLengthMs, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder frame length"); -} - -void EventLogAnalyzer::CreateAudioEncoderPacketLossGraph(Plot* plot) { - TimeSeries time_series("Audio encoder uplink packet loss fraction", - LineStyle::kLine, PointStyle::kHighlight); - auto GetAnaPacketLoss = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.uplink_packet_loss_fraction) - return absl::optional(static_cast( - *ana_event.config.uplink_packet_loss_fraction)); - return absl::optional(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints( - ToCallTime, GetAnaPacketLoss, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin, - kTopMargin); - plot->SetTitle("Reported audio encoder lost packets"); -} - -void EventLogAnalyzer::CreateAudioEncoderEnableFecGraph(Plot* plot) { - TimeSeries time_series("Audio encoder FEC", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaFecEnabled = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.enable_fec) - return absl::optional( - static_cast(*ana_event.config.enable_fec)); - return absl::optional(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints( - ToCallTime, GetAnaFecEnabled, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder FEC"); -} - -void EventLogAnalyzer::CreateAudioEncoderEnableDtxGraph(Plot* plot) { - TimeSeries time_series("Audio encoder DTX", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaDtxEnabled = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.enable_dtx) - return absl::optional( - static_cast(*ana_event.config.enable_dtx)); - return absl::optional(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints( - ToCallTime, GetAnaDtxEnabled, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder DTX"); -} - -void EventLogAnalyzer::CreateAudioEncoderNumChannelsGraph(Plot* plot) { - TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaNumChannels = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.num_channels) - return absl::optional( - static_cast(*ana_event.config.num_channels)); - return absl::optional(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints( - ToCallTime, GetAnaNumChannels, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))", - kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder number of channels"); -} - -class NetEqStreamInput : public test::NetEqInput { - public: - // Does not take any ownership, and all pointers must refer to valid objects - // that outlive the one constructed. - NetEqStreamInput(const std::vector* packet_stream, - const std::vector* output_events, - absl::optional end_time_ms) - : packet_stream_(*packet_stream), - packet_stream_it_(packet_stream_.begin()), - output_events_it_(output_events->begin()), - output_events_end_(output_events->end()), - end_time_ms_(end_time_ms) { - RTC_DCHECK(packet_stream); - RTC_DCHECK(output_events); - } - - absl::optional NextPacketTime() const override { - if (packet_stream_it_ == packet_stream_.end()) { - return absl::nullopt; - } - if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) { - return absl::nullopt; - } - return packet_stream_it_->rtp.log_time_ms(); - } - - absl::optional NextOutputEventTime() const override { - if (output_events_it_ == output_events_end_) { - return absl::nullopt; - } - if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) { - return absl::nullopt; - } - return output_events_it_->log_time_ms(); - } - - std::unique_ptr PopPacket() override { - if (packet_stream_it_ == packet_stream_.end()) { - return std::unique_ptr(); - } - std::unique_ptr packet_data(new PacketData()); - packet_data->header = packet_stream_it_->rtp.header; - packet_data->time_ms = packet_stream_it_->rtp.log_time_ms(); - - // This is a header-only "dummy" packet. Set the payload to all zeros, with - // length according to the virtual length. - packet_data->payload.SetSize(packet_stream_it_->rtp.total_length - - packet_stream_it_->rtp.header_length); - std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0); - - ++packet_stream_it_; - return packet_data; - } - - void AdvanceOutputEvent() override { - if (output_events_it_ != output_events_end_) { - ++output_events_it_; - } - } - - bool ended() const override { return !NextEventTime(); } - - absl::optional NextHeader() const override { - if (packet_stream_it_ == packet_stream_.end()) { - return absl::nullopt; - } - return packet_stream_it_->rtp.header; - } - - private: - const std::vector& packet_stream_; - std::vector::const_iterator packet_stream_it_; - std::vector::const_iterator output_events_it_; - const std::vector::const_iterator output_events_end_; - const absl::optional end_time_ms_; -}; - -namespace { - -// Factory to create a "replacement decoder" that produces the decoded audio -// by reading from a file rather than from the encoded payloads. -class ReplacementAudioDecoderFactory : public AudioDecoderFactory { - public: - ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name, - int file_sample_rate_hz) - : replacement_file_name_(replacement_file_name), - file_sample_rate_hz_(file_sample_rate_hz) {} - - std::vector GetSupportedDecoders() override { - RTC_NOTREACHED(); - return {}; - } - - bool IsSupportedDecoder(const SdpAudioFormat& format) override { - return true; - } - - std::unique_ptr MakeAudioDecoder( - const SdpAudioFormat& format, - absl::optional codec_pair_id) override { - auto replacement_file = std::make_unique( - replacement_file_name_, file_sample_rate_hz_); - replacement_file->set_output_rate_hz(48000); - return std::make_unique( - std::move(replacement_file), 48000, false); - } - - private: - const std::string replacement_file_name_; - const int file_sample_rate_hz_; -}; - -// Creates a NetEq test object and all necessary input and output helpers. Runs -// the test and returns the NetEqDelayAnalyzer object that was used to -// instrument the test. -std::unique_ptr CreateNetEqTestAndRun( - const std::vector* packet_stream, - const std::vector* output_events, - absl::optional end_time_ms, - const std::string& replacement_file_name, - int file_sample_rate_hz) { - std::unique_ptr input( - new NetEqStreamInput(packet_stream, output_events, end_time_ms)); - - constexpr int kReplacementPt = 127; - std::set cn_types; - std::set forbidden_types; - input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt, - cn_types, forbidden_types)); - - NetEq::Config config; - config.max_packets_in_buffer = 200; - config.enable_fast_accelerate = true; - - std::unique_ptr output(new test::VoidAudioSink()); - - rtc::scoped_refptr decoder_factory = - new rtc::RefCountedObject( - replacement_file_name, file_sample_rate_hz); - - test::NetEqTest::DecoderMap codecs = { - {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}}; - - std::unique_ptr delay_cb( - new test::NetEqDelayAnalyzer); - std::unique_ptr neteq_stats_getter( - new test::NetEqStatsGetter(std::move(delay_cb))); - test::DefaultNetEqTestErrorCallback error_cb; - test::NetEqTest::Callbacks callbacks; - callbacks.error_callback = &error_cb; - callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer(); - callbacks.get_audio_callback = neteq_stats_getter.get(); - - test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr, - /*factory=*/nullptr, std::move(input), std::move(output), - callbacks); - test.Run(); - return neteq_stats_getter; -} -} // namespace - -EventLogAnalyzer::NetEqStatsGetterMap EventLogAnalyzer::SimulateNetEq( - const std::string& replacement_file_name, - int file_sample_rate_hz) const { - NetEqStatsGetterMap neteq_stats; - - for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) { - const uint32_t ssrc = stream.ssrc; - if (!IsAudioSsrc(kIncomingPacket, ssrc)) - continue; - const std::vector* audio_packets = - &stream.incoming_packets; - if (audio_packets == nullptr) { - // No incoming audio stream found. - continue; - } - - RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end()); - - std::map>::const_iterator - output_events_it = parsed_log_.audio_playout_events().find(ssrc); - if (output_events_it == parsed_log_.audio_playout_events().end()) { - // Could not find output events with SSRC matching the input audio stream. - // Using the first available stream of output events. - output_events_it = parsed_log_.audio_playout_events().cbegin(); - } - - absl::optional end_time_ms = - log_segments_.empty() - ? absl::nullopt - : absl::optional(log_segments_.front().second / 1000); - - neteq_stats[ssrc] = CreateNetEqTestAndRun( - audio_packets, &output_events_it->second, end_time_ms, - replacement_file_name, file_sample_rate_hz); - } - - return neteq_stats; -} - -// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created -// for, this method generates a plot for the jitter buffer delay profile. -void EventLogAnalyzer::CreateAudioJitterBufferGraph( - uint32_t ssrc, - const test::NetEqStatsGetter* stats_getter, - Plot* plot) const { - test::NetEqDelayAnalyzer::Delays arrival_delay_ms; - test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms; - test::NetEqDelayAnalyzer::Delays playout_delay_ms; - test::NetEqDelayAnalyzer::Delays target_delay_ms; - - stats_getter->delay_analyzer()->CreateGraphs( - &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms, - &target_delay_ms); - - TimeSeries time_series_packet_arrival("packet arrival delay", - LineStyle::kLine); - TimeSeries time_series_relative_packet_arrival( - "Relative packet arrival delay", LineStyle::kLine); - TimeSeries time_series_play_time("Playout delay", LineStyle::kLine); - TimeSeries time_series_target_time("Target delay", LineStyle::kLine, - PointStyle::kHighlight); - - for (const auto& data : arrival_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y)); - } - for (const auto& data : corrected_arrival_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_relative_packet_arrival.points.emplace_back( - TimeSeriesPoint(x, y)); - } - for (const auto& data : playout_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y)); - } - for (const auto& data : target_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y)); - } - - plot->AppendTimeSeries(std::move(time_series_packet_arrival)); - plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival)); - plot->AppendTimeSeries(std::move(time_series_play_time)); - plot->AppendTimeSeries(std::move(time_series_target_time)); - - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin, - kTopMargin); - plot->SetTitle("NetEq timing for " + GetStreamName(kIncomingPacket, ssrc)); -} - -template -void EventLogAnalyzer::CreateNetEqStatsGraphInternal( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView>*( - const test::NetEqStatsGetter*)> data_extractor, - rtc::FunctionView stats_extractor, - const std::string& plot_name, - Plot* plot) const { - std::map time_series; - - for (const auto& st : neteq_stats) { - const uint32_t ssrc = st.first; - const std::vector>* data_vector = - data_extractor(st.second.get()); - for (const auto& data : *data_vector) { - const float time = - config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float value = stats_extractor(data.second); - time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value)); - } - } - - for (auto& series : time_series) { - series.second.label = GetStreamName(kIncomingPacket, series.first); - series.second.line_style = LineStyle::kLine; - plot->AppendTimeSeries(std::move(series.second)); - } - - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin); - plot->SetTitle(plot_name); -} - -void EventLogAnalyzer::CreateNetEqNetworkStatsGraph( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView stats_extractor, - const std::string& plot_name, - Plot* plot) const { - CreateNetEqStatsGraphInternal( - neteq_stats, - [](const test::NetEqStatsGetter* stats_getter) { - return stats_getter->stats(); - }, - stats_extractor, plot_name, plot); -} - -void EventLogAnalyzer::CreateNetEqLifetimeStatsGraph( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView stats_extractor, - const std::string& plot_name, - Plot* plot) const { - CreateNetEqStatsGraphInternal( - neteq_stats, - [](const test::NetEqStatsGetter* stats_getter) { - return stats_getter->lifetime_stats(); - }, - stats_extractor, plot_name, plot); -} - void EventLogAnalyzer::CreateIceCandidatePairConfigGraph(Plot* plot) { std::map configs_by_cp_id; for (const auto& config : parsed_log_.ice_candidate_pair_configs()) { @@ -2237,181 +1775,4 @@ void EventLogAnalyzer::CreateDtlsWritableStateGraph(Plot* plot) { plot->SetTitle("DTLS Writable State"); } -void EventLogAnalyzer::PrintNotifications(FILE* file) { - fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n"); - for (const auto& alert : incoming_rtp_recv_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : incoming_rtcp_recv_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_rtp_send_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_rtcp_send_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : incoming_seq_num_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : incoming_capture_time_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_seq_num_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_capture_time_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_high_loss_alerts_) { - fprintf(file, " : %s\n", alert.ToString().c_str()); - } - fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n"); -} - -void EventLogAnalyzer::CreateStreamGapAlerts(PacketDirection direction) { - // With 100 packets/s (~800kbps), false positives would require 10 s without - // data. - constexpr int64_t kMaxSeqNumJump = 1000; - // With a 90 kHz clock, false positives would require 10 s without data. - constexpr int64_t kMaxCaptureTimeJump = 900000; - - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits::max() - : log_segments_.front().second; - - SeqNumUnwrapper seq_num_unwrapper; - absl::optional last_seq_num; - SeqNumUnwrapper capture_time_unwrapper; - absl::optional last_capture_time; - // Check for gaps in sequence numbers and capture timestamps. - for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { - for (const auto& packet : stream.packet_view) { - if (packet.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - - int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber); - if (last_seq_num.has_value() && - std::abs(seq_num - last_seq_num.value()) > kMaxSeqNumJump) { - Alert_SeqNumJump(direction, - config_.GetCallTimeSec(packet.log_time_us()), - packet.header.ssrc); - } - last_seq_num.emplace(seq_num); - - int64_t capture_time = - capture_time_unwrapper.Unwrap(packet.header.timestamp); - if (last_capture_time.has_value() && - std::abs(capture_time - last_capture_time.value()) > - kMaxCaptureTimeJump) { - Alert_CaptureTimeJump(direction, - config_.GetCallTimeSec(packet.log_time_us()), - packet.header.ssrc); - } - last_capture_time.emplace(capture_time); - } - } -} - -void EventLogAnalyzer::CreateTransmissionGapAlerts(PacketDirection direction) { - constexpr int64_t kMaxRtpTransmissionGap = 500000; - constexpr int64_t kMaxRtcpTransmissionGap = 2000000; - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits::max() - : log_segments_.front().second; - - // TODO(terelius): The parser could provide a list of all packets, ordered - // by time, for each direction. - std::multimap rtp_in_direction; - for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { - for (const LoggedRtpPacket& rtp_packet : stream.packet_view) - rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet); - } - absl::optional last_rtp_time; - for (const auto& kv : rtp_in_direction) { - int64_t timestamp = kv.first; - if (timestamp > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t duration = timestamp - last_rtp_time.value_or(0); - if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) { - // No packet sent/received for more than 500 ms. - Alert_RtpLogTimeGap(direction, config_.GetCallTimeSec(timestamp), - duration / 1000); - } - last_rtp_time.emplace(timestamp); - } - - absl::optional last_rtcp_time; - if (direction == kIncomingPacket) { - for (const auto& rtcp : parsed_log_.incoming_rtcp_packets()) { - if (rtcp.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); - if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { - // No feedback sent/received for more than 2000 ms. - Alert_RtcpLogTimeGap(direction, - config_.GetCallTimeSec(rtcp.log_time_us()), - duration / 1000); - } - last_rtcp_time.emplace(rtcp.log_time_us()); - } - } else { - for (const auto& rtcp : parsed_log_.outgoing_rtcp_packets()) { - if (rtcp.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); - if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { - // No feedback sent/received for more than 2000 ms. - Alert_RtcpLogTimeGap(direction, - config_.GetCallTimeSec(rtcp.log_time_us()), - duration / 1000); - } - last_rtcp_time.emplace(rtcp.log_time_us()); - } - } -} - -// TODO(terelius): Notifications could possibly be generated by the same code -// that produces the graphs. There is some code duplication that could be -// avoided, but that might be solved anyway when we move functionality from the -// analyzer to the parser. -void EventLogAnalyzer::CreateTriageNotifications() { - CreateStreamGapAlerts(kIncomingPacket); - CreateStreamGapAlerts(kOutgoingPacket); - CreateTransmissionGapAlerts(kIncomingPacket); - CreateTransmissionGapAlerts(kOutgoingPacket); - - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits::max() - : log_segments_.front().second; - - constexpr double kMaxLossFraction = 0.05; - // Loss feedback - int64_t total_lost_packets = 0; - int64_t total_expected_packets = 0; - for (auto& bwe_update : parsed_log_.bwe_loss_updates()) { - if (bwe_update.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t lost_packets = static_cast(bwe_update.fraction_lost) / 255 * - bwe_update.expected_packets; - total_lost_packets += lost_packets; - total_expected_packets += bwe_update.expected_packets; - } - double avg_outgoing_loss = - static_cast(total_lost_packets) / total_expected_packets; - if (avg_outgoing_loss > kMaxLossFraction) { - Alert_OutgoingHighLoss(avg_outgoing_loss); - } -} - } // namespace webrtc diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer.h b/rtc_tools/rtc_event_log_visualizer/analyzer.h index c4f722069b..4918cf48e1 100644 --- a/rtc_tools/rtc_event_log_visualizer/analyzer.h +++ b/rtc_tools/rtc_event_log_visualizer/analyzer.h @@ -21,41 +21,18 @@ #include "logging/rtc_event_log/rtc_event_log_parser.h" #include "modules/audio_coding/neteq/tools/neteq_stats_getter.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" #include "rtc_tools/rtc_event_log_visualizer/plot_base.h" -#include "rtc_tools/rtc_event_log_visualizer/triage_notifications.h" namespace webrtc { -class AnalyzerConfig { - public: - float GetCallTimeSec(int64_t timestamp_us) const { - int64_t offset = normalize_time_ ? begin_time_ : 0; - return static_cast(timestamp_us - offset) / 1000000; - } - - float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); } - - float CallEndTimeSec() const { return GetCallTimeSec(end_time_); } - - // Window and step size used for calculating moving averages, e.g. bitrate. - // The generated data points will be |step_| microseconds apart. - // Only events occurring at most |window_duration_| microseconds before the - // current data point will be part of the average. - int64_t window_duration_; - int64_t step_; - - // First and last events of the log. - int64_t begin_time_; - int64_t end_time_; - bool normalize_time_; -}; - class EventLogAnalyzer { public: // The EventLogAnalyzer keeps a reference to the ParsedRtcEventLogNew for the // duration of its lifetime. The ParsedRtcEventLogNew must not be destroyed or // modified while the EventLogAnalyzer is being used. EventLogAnalyzer(const ParsedRtcEventLog& log, bool normalize_time); + EventLogAnalyzer(const ParsedRtcEventLog& log, const AnalyzerConfig& config); void CreatePacketGraph(PacketDirection direction, Plot* plot); @@ -63,6 +40,10 @@ class EventLogAnalyzer { void CreateAccumulatedPacketsGraph(PacketDirection direction, Plot* plot); + void CreatePacketRateGraph(PacketDirection direction, Plot* plot); + + void CreateTotalPacketRateGraph(PacketDirection direction, Plot* plot); + void CreatePlayoutGraph(Plot* plot); void CreateAudioLevelGraph(PacketDirection direction, Plot* plot); @@ -98,32 +79,6 @@ class EventLogAnalyzer { std::string yaxis_label, Plot* plot); - void CreateAudioEncoderTargetBitrateGraph(Plot* plot); - void CreateAudioEncoderFrameLengthGraph(Plot* plot); - void CreateAudioEncoderPacketLossGraph(Plot* plot); - void CreateAudioEncoderEnableFecGraph(Plot* plot); - void CreateAudioEncoderEnableDtxGraph(Plot* plot); - void CreateAudioEncoderNumChannelsGraph(Plot* plot); - - using NetEqStatsGetterMap = - std::map>; - NetEqStatsGetterMap SimulateNetEq(const std::string& replacement_file_name, - int file_sample_rate_hz) const; - - void CreateAudioJitterBufferGraph(uint32_t ssrc, - const test::NetEqStatsGetter* stats_getter, - Plot* plot) const; - void CreateNetEqNetworkStatsGraph( - const NetEqStatsGetterMap& neteq_stats_getters, - rtc::FunctionView stats_extractor, - const std::string& plot_name, - Plot* plot) const; - void CreateNetEqLifetimeStatsGraph( - const NetEqStatsGetterMap& neteq_stats_getters, - rtc::FunctionView stats_extractor, - const std::string& plot_name, - Plot* plot) const; - void CreateIceCandidatePairConfigGraph(Plot* plot); void CreateIceConnectivityCheckGraph(Plot* plot); @@ -134,145 +89,11 @@ class EventLogAnalyzer { void PrintNotifications(FILE* file); private: - struct LayerDescription { - LayerDescription(uint32_t ssrc, - uint8_t spatial_layer, - uint8_t temporal_layer) - : ssrc(ssrc), - spatial_layer(spatial_layer), - temporal_layer(temporal_layer) {} - bool operator<(const LayerDescription& other) const { - if (ssrc != other.ssrc) - return ssrc < other.ssrc; - if (spatial_layer != other.spatial_layer) - return spatial_layer < other.spatial_layer; - return temporal_layer < other.temporal_layer; - } - uint32_t ssrc; - uint8_t spatial_layer; - uint8_t temporal_layer; - }; - - bool IsRtxSsrc(PacketDirection direction, uint32_t ssrc) const { - if (direction == kIncomingPacket) { - return parsed_log_.incoming_rtx_ssrcs().find(ssrc) != - parsed_log_.incoming_rtx_ssrcs().end(); - } else { - return parsed_log_.outgoing_rtx_ssrcs().find(ssrc) != - parsed_log_.outgoing_rtx_ssrcs().end(); - } - } - - bool IsVideoSsrc(PacketDirection direction, uint32_t ssrc) const { - if (direction == kIncomingPacket) { - return parsed_log_.incoming_video_ssrcs().find(ssrc) != - parsed_log_.incoming_video_ssrcs().end(); - } else { - return parsed_log_.outgoing_video_ssrcs().find(ssrc) != - parsed_log_.outgoing_video_ssrcs().end(); - } - } - - bool IsAudioSsrc(PacketDirection direction, uint32_t ssrc) const { - if (direction == kIncomingPacket) { - return parsed_log_.incoming_audio_ssrcs().find(ssrc) != - parsed_log_.incoming_audio_ssrcs().end(); - } else { - return parsed_log_.outgoing_audio_ssrcs().find(ssrc) != - parsed_log_.outgoing_audio_ssrcs().end(); - } - } - - template - void CreateNetEqStatsGraphInternal( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView>*( - const test::NetEqStatsGetter*)> data_extractor, - rtc::FunctionView stats_extractor, - const std::string& plot_name, - Plot* plot) const; - template void CreateAccumulatedPacketsTimeSeries(Plot* plot, const IterableType& packets, const std::string& label); - void CreateStreamGapAlerts(PacketDirection direction); - void CreateTransmissionGapAlerts(PacketDirection direction); - - std::string GetStreamName(PacketDirection direction, uint32_t ssrc) const { - char buffer[200]; - rtc::SimpleStringBuilder name(buffer); - if (IsAudioSsrc(direction, ssrc)) { - name << "Audio "; - } else if (IsVideoSsrc(direction, ssrc)) { - name << "Video "; - } else { - name << "Unknown "; - } - if (IsRtxSsrc(direction, ssrc)) { - name << "RTX "; - } - if (direction == kIncomingPacket) - name << "(In) "; - else - name << "(Out) "; - name << "SSRC " << ssrc; - return name.str(); - } - - std::string GetLayerName(LayerDescription layer) const { - char buffer[100]; - rtc::SimpleStringBuilder name(buffer); - name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl " - << layer.temporal_layer; - return name.str(); - } - - void Alert_RtpLogTimeGap(PacketDirection direction, - float time_seconds, - int64_t duration) { - if (direction == kIncomingPacket) { - incoming_rtp_recv_time_gaps_.emplace_back(time_seconds, duration); - } else { - outgoing_rtp_send_time_gaps_.emplace_back(time_seconds, duration); - } - } - - void Alert_RtcpLogTimeGap(PacketDirection direction, - float time_seconds, - int64_t duration) { - if (direction == kIncomingPacket) { - incoming_rtcp_recv_time_gaps_.emplace_back(time_seconds, duration); - } else { - outgoing_rtcp_send_time_gaps_.emplace_back(time_seconds, duration); - } - } - - void Alert_SeqNumJump(PacketDirection direction, - float time_seconds, - uint32_t ssrc) { - if (direction == kIncomingPacket) { - incoming_seq_num_jumps_.emplace_back(time_seconds, ssrc); - } else { - outgoing_seq_num_jumps_.emplace_back(time_seconds, ssrc); - } - } - - void Alert_CaptureTimeJump(PacketDirection direction, - float time_seconds, - uint32_t ssrc) { - if (direction == kIncomingPacket) { - incoming_capture_time_jumps_.emplace_back(time_seconds, ssrc); - } else { - outgoing_capture_time_jumps_.emplace_back(time_seconds, ssrc); - } - } - - void Alert_OutgoingHighLoss(double avg_loss_fraction) { - outgoing_high_loss_alerts_.emplace_back(avg_loss_fraction); - } - std::string GetCandidatePairLogDescriptionFromId(uint32_t candidate_pair_id); const ParsedRtcEventLog& parsed_log_; @@ -281,20 +102,6 @@ class EventLogAnalyzer { // If left empty, all SSRCs will be considered relevant. std::vector desired_ssrc_; - // Stores the timestamps for all log segments, in the form of associated start - // and end events. - std::vector> log_segments_; - - std::vector incoming_rtp_recv_time_gaps_; - std::vector incoming_rtcp_recv_time_gaps_; - std::vector outgoing_rtp_send_time_gaps_; - std::vector outgoing_rtcp_send_time_gaps_; - std::vector incoming_seq_num_jumps_; - std::vector incoming_capture_time_jumps_; - std::vector outgoing_seq_num_jumps_; - std::vector outgoing_capture_time_jumps_; - std::vector outgoing_high_loss_alerts_; - std::map candidate_pair_desc_by_id_; AnalyzerConfig config_; diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc b/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc new file mode 100644 index 0000000000..3d3ce5a4ac --- /dev/null +++ b/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc @@ -0,0 +1,83 @@ + +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" + +namespace webrtc { + +bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + if (direction == kIncomingPacket) { + return parsed_log.incoming_rtx_ssrcs().find(ssrc) != + parsed_log.incoming_rtx_ssrcs().end(); + } else { + return parsed_log.outgoing_rtx_ssrcs().find(ssrc) != + parsed_log.outgoing_rtx_ssrcs().end(); + } +} + +bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + if (direction == kIncomingPacket) { + return parsed_log.incoming_video_ssrcs().find(ssrc) != + parsed_log.incoming_video_ssrcs().end(); + } else { + return parsed_log.outgoing_video_ssrcs().find(ssrc) != + parsed_log.outgoing_video_ssrcs().end(); + } +} + +bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + if (direction == kIncomingPacket) { + return parsed_log.incoming_audio_ssrcs().find(ssrc) != + parsed_log.incoming_audio_ssrcs().end(); + } else { + return parsed_log.outgoing_audio_ssrcs().find(ssrc) != + parsed_log.outgoing_audio_ssrcs().end(); + } +} + +std::string GetStreamName(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + char buffer[200]; + rtc::SimpleStringBuilder name(buffer); + if (IsAudioSsrc(parsed_log, direction, ssrc)) { + name << "Audio "; + } else if (IsVideoSsrc(parsed_log, direction, ssrc)) { + name << "Video "; + } else { + name << "Unknown "; + } + if (IsRtxSsrc(parsed_log, direction, ssrc)) { + name << "RTX "; + } + if (direction == kIncomingPacket) + name << "(In) "; + else + name << "(Out) "; + name << "SSRC " << ssrc; + return name.str(); +} + +std::string GetLayerName(LayerDescription layer) { + char buffer[100]; + rtc::SimpleStringBuilder name(buffer); + name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl " + << layer.temporal_layer; + return name.str(); +} + +} // namespace webrtc diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer_common.h b/rtc_tools/rtc_event_log_visualizer/analyzer_common.h new file mode 100644 index 0000000000..d5776acf62 --- /dev/null +++ b/rtc_tools/rtc_event_log_visualizer/analyzer_common.h @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_ +#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/function_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "rtc_tools/rtc_event_log_visualizer/plot_base.h" + +namespace webrtc { + +constexpr int kNumMicrosecsPerSec = 1000000; +constexpr float kLeftMargin = 0.01f; +constexpr float kRightMargin = 0.02f; +constexpr float kBottomMargin = 0.02f; +constexpr float kTopMargin = 0.05f; + +class AnalyzerConfig { + public: + float GetCallTimeSec(int64_t timestamp_us) const { + int64_t offset = normalize_time_ ? begin_time_ : 0; + return static_cast(timestamp_us - offset) / 1000000; + } + + float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); } + + float CallEndTimeSec() const { return GetCallTimeSec(end_time_); } + + // Window and step size used for calculating moving averages, e.g. bitrate. + // The generated data points will be |step_| microseconds apart. + // Only events occurring at most |window_duration_| microseconds before the + // current data point will be part of the average. + int64_t window_duration_; + int64_t step_; + + // First and last events of the log. + int64_t begin_time_; + int64_t end_time_; + bool normalize_time_; +}; + +struct LayerDescription { + LayerDescription(uint32_t ssrc, uint8_t spatial_layer, uint8_t temporal_layer) + : ssrc(ssrc), + spatial_layer(spatial_layer), + temporal_layer(temporal_layer) {} + bool operator<(const LayerDescription& other) const { + if (ssrc != other.ssrc) + return ssrc < other.ssrc; + if (spatial_layer != other.spatial_layer) + return spatial_layer < other.spatial_layer; + return temporal_layer < other.temporal_layer; + } + uint32_t ssrc; + uint8_t spatial_layer; + uint8_t temporal_layer; +}; + +bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); +bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); +bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); + +std::string GetStreamName(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); +std::string GetLayerName(LayerDescription layer); + +// For each element in data_view, use |f()| to extract a y-coordinate and +// store the result in a TimeSeries. +template +void ProcessPoints(rtc::FunctionView fx, + rtc::FunctionView(const DataType&)> fy, + const IterableType& data_view, + TimeSeries* result) { + for (size_t i = 0; i < data_view.size(); i++) { + const DataType& elem = data_view[i]; + float x = fx(elem); + absl::optional y = fy(elem); + if (y) + result->points.emplace_back(x, *y); + } +} + +// For each pair of adjacent elements in |data|, use |f()| to extract a +// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate +// will be the time of the second element in the pair. +template +void ProcessPairs( + rtc::FunctionView fx, + rtc::FunctionView(const DataType&, + const DataType&)> fy, + const IterableType& data, + TimeSeries* result) { + for (size_t i = 1; i < data.size(); i++) { + float x = fx(data[i]); + absl::optional y = fy(data[i - 1], data[i]); + if (y) + result->points.emplace_back(x, static_cast(*y)); + } +} + +// For each pair of adjacent elements in |data|, use |f()| to extract a +// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate +// will be the time of the second element in the pair. +template +void AccumulatePairs( + rtc::FunctionView fx, + rtc::FunctionView(const DataType&, + const DataType&)> fy, + const IterableType& data, + TimeSeries* result) { + ResultType sum = 0; + for (size_t i = 1; i < data.size(); i++) { + float x = fx(data[i]); + absl::optional y = fy(data[i - 1], data[i]); + if (y) { + sum += *y; + result->points.emplace_back(x, static_cast(sum)); + } + } +} + +// Calculates a moving average of |data| and stores the result in a TimeSeries. +// A data point is generated every |step| microseconds from |begin_time| +// to |end_time|. The value of each data point is the average of the data +// during the preceding |window_duration_us| microseconds. +template +void MovingAverage( + rtc::FunctionView(const DataType&)> fy, + const IterableType& data_view, + AnalyzerConfig config, + TimeSeries* result) { + size_t window_index_begin = 0; + size_t window_index_end = 0; + ResultType sum_in_window = 0; + + for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_; + t += config.step_) { + while (window_index_end < data_view.size() && + data_view[window_index_end].log_time_us() < t) { + absl::optional value = fy(data_view[window_index_end]); + if (value) + sum_in_window += *value; + ++window_index_end; + } + while (window_index_begin < data_view.size() && + data_view[window_index_begin].log_time_us() < + t - config.window_duration_) { + absl::optional value = fy(data_view[window_index_begin]); + if (value) + sum_in_window -= *value; + ++window_index_begin; + } + float window_duration_s = + static_cast(config.window_duration_) / kNumMicrosecsPerSec; + float x = config.GetCallTimeSec(t); + float y = sum_in_window / window_duration_s; + result->points.emplace_back(x, y); + } +} + +} // namespace webrtc + +#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_ diff --git a/rtc_tools/rtc_event_log_visualizer/log_simulation.cc b/rtc_tools/rtc_event_log_visualizer/log_simulation.cc index e3399a8c9c..0e5b5d04a9 100644 --- a/rtc_tools/rtc_event_log_visualizer/log_simulation.cc +++ b/rtc_tools/rtc_event_log_visualizer/log_simulation.cc @@ -33,13 +33,13 @@ void LogBasedNetworkControllerSimulation::ProcessUntil(Timestamp to_time) { if (last_process_.IsInfinite()) { NetworkControllerConfig config; config.constraints.at_time = to_time; - config.constraints.min_data_rate = DataRate::kbps(30); - config.constraints.starting_rate = DataRate::kbps(300); + config.constraints.min_data_rate = DataRate::KilobitsPerSec(30); + config.constraints.starting_rate = DataRate::KilobitsPerSec(300); config.event_log = &null_event_log_; controller_ = factory_->Create(config); } if (last_process_.IsInfinite() || - to_time - last_process_ > TimeDelta::seconds(1)) { + to_time - last_process_ > TimeDelta::Seconds(1)) { last_process_ = to_time; current_time_ = to_time; ProcessInterval msg; @@ -86,7 +86,6 @@ void LogBasedNetworkControllerSimulation::OnPacketSent( packet_info.ssrc = packet.ssrc; packet_info.transport_sequence_number = packet.transport_seq_no; packet_info.rtp_sequence_number = packet.stream_seq_no; - packet_info.has_rtp_sequence_number = true; packet_info.length = packet.size; packet_info.pacing_info = probe_info; transport_feedback_.AddPacket(packet_info, packet.overhead, @@ -107,7 +106,7 @@ void LogBasedNetworkControllerSimulation::OnPacketSent( void LogBasedNetworkControllerSimulation::OnFeedback( const LoggedRtcpPacketTransportFeedback& feedback) { - auto feedback_time = Timestamp::ms(feedback.log_time_ms()); + auto feedback_time = Timestamp::Millis(feedback.log_time_ms()); ProcessUntil(feedback_time); auto msg = transport_feedback_.ProcessTransportFeedback( feedback.transport_feedback, feedback_time); @@ -119,7 +118,7 @@ void LogBasedNetworkControllerSimulation::OnReceiverReport( const LoggedRtcpPacketReceiverReport& report) { if (report.rr.report_blocks().empty()) return; - auto report_time = Timestamp::ms(report.log_time_ms()); + auto report_time = Timestamp::Millis(report.log_time_ms()); ProcessUntil(report_time); int packets_delta = 0; int lost_delta = 0; @@ -150,7 +149,7 @@ void LogBasedNetworkControllerSimulation::OnReceiverReport( CompactNtp(TimeMicrosToNtp(report.log_time_us())); uint32_t rtt_ntp = receive_time_ntp - rb.delay_since_last_sr() - rb.last_sr(); - rtt = std::min(rtt, TimeDelta::ms(CompactNtpRttToMs(rtt_ntp))); + rtt = std::min(rtt, TimeDelta::Millis(CompactNtpRttToMs(rtt_ntp))); } } if (rtt.IsFinite()) { @@ -164,12 +163,12 @@ void LogBasedNetworkControllerSimulation::OnReceiverReport( void LogBasedNetworkControllerSimulation::OnIceConfig( const LoggedIceCandidatePairConfig& candidate) { if (candidate.type == IceCandidatePairConfigType::kSelected) { - auto log_time = Timestamp::us(candidate.log_time_us()); + auto log_time = Timestamp::Micros(candidate.log_time_us()); ProcessUntil(log_time); NetworkRouteChange msg; msg.at_time = log_time; - msg.constraints.min_data_rate = DataRate::kbps(30); - msg.constraints.starting_rate = DataRate::kbps(300); + msg.constraints.min_data_rate = DataRate::KilobitsPerSec(30); + msg.constraints.starting_rate = DataRate::KilobitsPerSec(300); msg.constraints.at_time = log_time; HandleStateUpdate(controller_->OnNetworkRouteChange(msg)); } diff --git a/rtc_tools/rtc_event_log_visualizer/main.cc b/rtc_tools/rtc_event_log_visualizer/main.cc index cac0cb3fa0..ab4b7ebac1 100644 --- a/rtc_tools/rtc_event_log_visualizer/main.cc +++ b/rtc_tools/rtc_event_log_visualizer/main.cc @@ -30,10 +30,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_tools/rtc_event_log_visualizer/alerts.h" +#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h" #include "rtc_tools/rtc_event_log_visualizer/analyzer.h" #include "rtc_tools/rtc_event_log_visualizer/plot_base.h" -#include "rtc_tools/rtc_event_log_visualizer/plot_protobuf.h" -#include "rtc_tools/rtc_event_log_visualizer/plot_python.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" #include "test/testsupport/file_utils.h" @@ -77,7 +77,7 @@ ABSL_FLAG(bool, ABSL_FLAG(bool, print_triage_alerts, - false, + true, "Print triage alerts, i.e. a list of potential problems."); ABSL_FLAG(bool, @@ -194,9 +194,9 @@ int main(int argc, char* argv[]) { "A tool for visualizing WebRTC event logs.\n" "Example usage:\n" "./event_log_visualizer | python\n"); - absl::FlagsUsageConfig config; - config.contains_help_flags = &ContainsHelppackageFlags; - absl::SetFlagsUsageConfig(config); + absl::FlagsUsageConfig flag_config; + flag_config.contains_help_flags = &ContainsHelppackageFlags; + absl::SetFlagsUsageConfig(flag_config); std::vector args = absl::ParseCommandLine(argc, argv); // Print RTC_LOG warnings and errors even in release builds. @@ -229,8 +229,7 @@ int main(int argc, char* argv[]) { {"simulated_neteq_stats", {"simulated_neteq_jitter_buffer_delay", "simulated_neteq_preferred_buffer_size", - "simulated_neteq_concealment_events", - "simulated_neteq_packet_loss_rate", "simulated_neteq_preemptive_rate", + "simulated_neteq_concealment_events", "simulated_neteq_preemptive_rate", "simulated_neteq_accelerate_rate", "simulated_neteq_speech_expand_rate", "simulated_neteq_expand_rate"}}}; @@ -261,16 +260,22 @@ int main(int argc, char* argv[]) { } } - webrtc::EventLogAnalyzer analyzer(parsed_log, - absl::GetFlag(FLAGS_normalize_time)); - std::unique_ptr collection; - if (absl::GetFlag(FLAGS_protobuf_output)) { - collection.reset(new webrtc::ProtobufPlotCollection()); - } else { - collection.reset( - new webrtc::PythonPlotCollection(absl::GetFlag(FLAGS_shared_xaxis))); + webrtc::AnalyzerConfig config; + config.window_duration_ = 250000; + config.step_ = 10000; + config.normalize_time_ = absl::GetFlag(FLAGS_normalize_time); + config.begin_time_ = parsed_log.first_timestamp(); + config.end_time_ = parsed_log.last_timestamp(); + if (config.end_time_ < config.begin_time_) { + RTC_LOG(LS_WARNING) << "Log end time " << config.end_time_ + << " not after begin time " << config.begin_time_ + << ". Nothing to analyze. Is the log broken?"; + return -1; } + webrtc::EventLogAnalyzer analyzer(parsed_log, config); + webrtc::PlotCollection collection; + PlotMap plots; plots.RegisterPlot("incoming_packet_sizes", [&](Plot* plot) { analyzer.CreatePacketGraph(webrtc::kIncomingPacket, plot); @@ -291,6 +296,18 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("outgoing_packet_count", [&](Plot* plot) { analyzer.CreateAccumulatedPacketsGraph(webrtc::kOutgoingPacket, plot); }); + plots.RegisterPlot("incoming_packet_rate", [&](Plot* plot) { + analyzer.CreatePacketRateGraph(webrtc::kIncomingPacket, plot); + }); + plots.RegisterPlot("outgoing_packet_rate", [&](Plot* plot) { + analyzer.CreatePacketRateGraph(webrtc::kOutgoingPacket, plot); + }); + plots.RegisterPlot("total_incoming_packet_rate", [&](Plot* plot) { + analyzer.CreateTotalPacketRateGraph(webrtc::kIncomingPacket, plot); + }); + plots.RegisterPlot("total_outgoing_packet_rate", [&](Plot* plot) { + analyzer.CreateTotalPacketRateGraph(webrtc::kOutgoingPacket, plot); + }); plots.RegisterPlot("audio_playout", [&](Plot* plot) { analyzer.CreatePlayoutGraph(plot); }); plots.RegisterPlot("incoming_audio_level", [&](Plot* plot) { @@ -411,22 +428,22 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("pacer_delay", [&](Plot* plot) { analyzer.CreatePacerDelayGraph(plot); }); plots.RegisterPlot("audio_encoder_bitrate", [&](Plot* plot) { - analyzer.CreateAudioEncoderTargetBitrateGraph(plot); + CreateAudioEncoderTargetBitrateGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_frame_length", [&](Plot* plot) { - analyzer.CreateAudioEncoderFrameLengthGraph(plot); + CreateAudioEncoderFrameLengthGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_packet_loss", [&](Plot* plot) { - analyzer.CreateAudioEncoderPacketLossGraph(plot); + CreateAudioEncoderPacketLossGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_fec", [&](Plot* plot) { - analyzer.CreateAudioEncoderEnableFecGraph(plot); + CreateAudioEncoderEnableFecGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_dtx", [&](Plot* plot) { - analyzer.CreateAudioEncoderEnableDtxGraph(plot); + CreateAudioEncoderEnableDtxGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_num_channels", [&](Plot* plot) { - analyzer.CreateAudioEncoderNumChannelsGraph(plot); + CreateAudioEncoderNumChannelsGraph(parsed_log, config, plot); }); plots.RegisterPlot("ice_candidate_pair_config", [&](Plot* plot) { @@ -449,14 +466,14 @@ int main(int argc, char* argv[]) { wav_path = webrtc::test::ResourcePath( "audio_processing/conversational_speech/EN_script2_F_sp2_B1", "wav"); } - absl::optional neteq_stats; + absl::optional neteq_stats; plots.RegisterPlot("simulated_neteq_expand_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.expand_rate / 16384.f; }, @@ -465,10 +482,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_speech_expand_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.speech_expand_rate / 16384.f; }, @@ -477,10 +494,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_accelerate_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.accelerate_rate / 16384.f; }, @@ -489,34 +506,22 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_preemptive_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.preemptive_rate / 16384.f; }, "Preemptive rate", plot); }); - plots.RegisterPlot("simulated_neteq_packet_loss_rate", [&](Plot* plot) { - if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); - } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, - [](const webrtc::NetEqNetworkStatistics& stats) { - return stats.packet_loss_rate / 16384.f; - }, - "Packet loss rate", plot); - }); - plots.RegisterPlot("simulated_neteq_concealment_events", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqLifetimeStatsGraph( - *neteq_stats, + webrtc::CreateNetEqLifetimeStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqLifetimeStatistics& stats) { return static_cast(stats.concealment_events); }, @@ -525,10 +530,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_preferred_buffer_size", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.preferred_buffer_size_ms; }, @@ -575,7 +580,7 @@ int main(int argc, char* argv[]) { for (const auto& plot : plots) { if (plot.enabled) { - Plot* output = collection->AppendNewPlot(); + Plot* output = collection.AppendNewPlot(); plot.plot_func(output); output->SetId(plot.label); } @@ -589,21 +594,28 @@ int main(int argc, char* argv[]) { if (absl::c_find(plot_flags, "simulated_neteq_jitter_buffer_delay") != plot_flags.end()) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - for (webrtc::EventLogAnalyzer::NetEqStatsGetterMap::const_iterator it = - neteq_stats->cbegin(); + for (webrtc::NetEqStatsGetterMap::const_iterator it = neteq_stats->cbegin(); it != neteq_stats->cend(); ++it) { - analyzer.CreateAudioJitterBufferGraph(it->first, it->second.get(), - collection->AppendNewPlot()); + webrtc::CreateAudioJitterBufferGraph(parsed_log, config, it->first, + it->second.get(), + collection.AppendNewPlot()); } } - collection->Draw(); + if (absl::GetFlag(FLAGS_protobuf_output)) { + webrtc::analytics::ChartCollection proto_charts; + collection.ExportProtobuf(&proto_charts); + std::cout << proto_charts.SerializeAsString(); + } else { + collection.PrintPythonCode(absl::GetFlag(FLAGS_shared_xaxis)); + } if (absl::GetFlag(FLAGS_print_triage_alerts)) { - analyzer.CreateTriageNotifications(); - analyzer.PrintNotifications(stderr); + webrtc::TriageHelper triage_alerts(config); + triage_alerts.AnalyzeLog(parsed_log); + triage_alerts.Print(stderr); } return 0; diff --git a/rtc_tools/rtc_event_log_visualizer/plot_base.cc b/rtc_tools/rtc_event_log_visualizer/plot_base.cc index dfcd26fed5..dce601a832 100644 --- a/rtc_tools/rtc_event_log_visualizer/plot_base.cc +++ b/rtc_tools/rtc_event_log_visualizer/plot_base.cc @@ -11,6 +11,7 @@ #include "rtc_tools/rtc_event_log_visualizer/plot_base.h" #include +#include #include "rtc_base/checks.h" @@ -93,4 +94,232 @@ void Plot::AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series) { } } +void Plot::PrintPythonCode() const { + // Write python commands to stdout. Intended program usage is + // ./event_log_visualizer event_log160330.dump | python + + if (!series_list_.empty()) { + printf("color_count = %zu\n", series_list_.size()); + printf( + "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i " + "in range(color_count)]\n"); + printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n"); + + for (size_t i = 0; i < series_list_.size(); i++) { + printf("\n# === Series: %s ===\n", series_list_[i].label.c_str()); + // List x coordinates + printf("x%zu = [", i); + if (!series_list_[i].points.empty()) + printf("%.3f", series_list_[i].points[0].x); + for (size_t j = 1; j < series_list_[i].points.size(); j++) + printf(", %.3f", series_list_[i].points[j].x); + printf("]\n"); + + // List y coordinates + printf("y%zu = [", i); + if (!series_list_[i].points.empty()) + printf("%G", series_list_[i].points[0].y); + for (size_t j = 1; j < series_list_[i].points.size(); j++) + printf(", %G", series_list_[i].points[j].y); + printf("]\n"); + + if (series_list_[i].line_style == LineStyle::kBar) { + // There is a plt.bar function that draws bar plots, + // but it is *way* too slow to be useful. + printf( + "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: " + "max(t,0), y%zu), color=colors[%zu], " + "label=\'%s\')\n", + i, i, i, i, series_list_[i].label.c_str()); + if (series_list_[i].point_style == PointStyle::kHighlight) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], " + "marker='.', ls=' ')\n", + i, i, i); + } + } else if (series_list_[i].line_style == LineStyle::kLine) { + if (series_list_[i].point_style == PointStyle::kHighlight) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " + "marker='.')\n", + i, i, i, series_list_[i].label.c_str()); + } else { + printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i, + i, i, series_list_[i].label.c_str()); + } + } else if (series_list_[i].line_style == LineStyle::kStep) { + // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on + // to illustrate the "steps". This can be expressed by duplicating all + // elements except the first in x and the last in y. + printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i); + printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i); + printf( + "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], " + "label=\'%s\')\n", + i, i, i, series_list_[i].label.c_str()); + if (series_list_[i].point_style == PointStyle::kHighlight) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], " + "marker='.', ls=' ')\n", + i, i, i); + } + } else if (series_list_[i].line_style == LineStyle::kNone) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " + "marker='o', ls=' ')\n", + i, i, i, series_list_[i].label.c_str()); + } else { + printf("raise Exception(\"Unknown graph type\")\n"); + } + } + + // IntervalSeries + printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n"); + RTC_CHECK_LE(interval_list_.size(), 4); + // To get the intervals to show up in the legend we have to create patches + // for them. + printf("legend_patches = []\n"); + for (size_t i = 0; i < interval_list_.size(); i++) { + // List intervals + printf("\n# === IntervalSeries: %s ===\n", + interval_list_[i].label.c_str()); + printf("ival%zu = [", i); + if (!interval_list_[i].intervals.empty()) { + printf("(%G, %G)", interval_list_[i].intervals[0].begin, + interval_list_[i].intervals[0].end); + } + for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) { + printf(", (%G, %G)", interval_list_[i].intervals[j].begin, + interval_list_[i].intervals[j].end); + } + printf("]\n"); + + printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size()); + if (interval_list_[i].orientation == IntervalSeries::kVertical) { + printf( + " plt.axhspan(ival%zu[i][0], ival%zu[i][1], " + "facecolor=interval_colors[%zu], " + "alpha=0.3)\n", + i, i, i); + } else { + printf( + " plt.axvspan(ival%zu[i][0], ival%zu[i][1], " + "facecolor=interval_colors[%zu], " + "alpha=0.3)\n", + i, i, i); + } + printf( + "legend_patches.append(mpatches.Patch(ec=\'black\', " + "fc=interval_colors[%zu], label='%s'))\n", + i, interval_list_[i].label.c_str()); + } + } + + printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_); + printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_); + printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str()); + printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str()); + printf("plt.title(\'%s\')\n", title_.c_str()); + printf("fig = plt.gcf()\n"); + printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str()); + if (!yaxis_tick_labels_.empty()) { + printf("yaxis_tick_labels = ["); + for (const auto& kv : yaxis_tick_labels_) { + printf("(%f,\"%s\"),", kv.first, kv.second.c_str()); + } + printf("]\n"); + printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n"); + printf("plt.yticks(*yaxis_tick_labels)\n"); + } + if (!series_list_.empty() || !interval_list_.empty()) { + printf("handles, labels = plt.gca().get_legend_handles_labels()\n"); + printf("for lp in legend_patches:\n"); + printf(" handles.append(lp)\n"); + printf(" labels.append(lp.get_label())\n"); + printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n"); + } +} + +void Plot::ExportProtobuf(webrtc::analytics::Chart* chart) const { + for (size_t i = 0; i < series_list_.size(); i++) { + webrtc::analytics::DataSet* data_set = chart->add_data_sets(); + for (const auto& point : series_list_[i].points) { + data_set->add_x_values(point.x); + } + for (const auto& point : series_list_[i].points) { + data_set->add_y_values(point.y); + } + + if (series_list_[i].line_style == LineStyle::kBar) { + data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART); + } else if (series_list_[i].line_style == LineStyle::kLine) { + data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART); + } else if (series_list_[i].line_style == LineStyle::kStep) { + data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART); + } else if (series_list_[i].line_style == LineStyle::kNone) { + data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART); + } else { + data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED); + } + + if (series_list_[i].point_style == PointStyle::kHighlight) + data_set->set_highlight_points(true); + + data_set->set_label(series_list_[i].label); + } + + chart->set_xaxis_min(xaxis_min_); + chart->set_xaxis_max(xaxis_max_); + chart->set_yaxis_min(yaxis_min_); + chart->set_yaxis_max(yaxis_max_); + chart->set_xaxis_label(xaxis_label_); + chart->set_yaxis_label(yaxis_label_); + chart->set_title(title_); + chart->set_id(id_); + + for (const auto& kv : yaxis_tick_labels_) { + webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels(); + tick->set_value(kv.first); + tick->set_label(kv.second); + } +} + +void PlotCollection::PrintPythonCode(bool shared_xaxis) const { + printf("import matplotlib.pyplot as plt\n"); + printf("plt.rcParams.update({'figure.max_open_warning': 0})\n"); + printf("import matplotlib.patches as mpatches\n"); + printf("import matplotlib.patheffects as pe\n"); + printf("import colorsys\n"); + for (size_t i = 0; i < plots_.size(); i++) { + printf("plt.figure(%zu)\n", i); + if (shared_xaxis) { + // Link x-axes across all figures for synchronized zooming. + if (i == 0) { + printf("axis0 = plt.subplot(111)\n"); + } else { + printf("plt.subplot(111, sharex=axis0)\n"); + } + } + plots_[i]->PrintPythonCode(); + } + printf("plt.show()\n"); +} + +void PlotCollection::ExportProtobuf( + webrtc::analytics::ChartCollection* collection) const { + for (const auto& plot : plots_) { + // TODO(terelius): Ensure that there is no way to insert plots other than + // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast + // here. + webrtc::analytics::Chart* protobuf_representation = + collection->add_charts(); + plot->ExportProtobuf(protobuf_representation); + } +} + +Plot* PlotCollection::AppendNewPlot() { + plots_.push_back(std::make_unique()); + return plots_.back().get(); +} + } // namespace webrtc diff --git a/rtc_tools/rtc_event_log_visualizer/plot_base.h b/rtc_tools/rtc_event_log_visualizer/plot_base.h index 5e4ebfa522..06a206f031 100644 --- a/rtc_tools/rtc_event_log_visualizer/plot_base.h +++ b/rtc_tools/rtc_event_log_visualizer/plot_base.h @@ -15,6 +15,13 @@ #include #include +#include "rtc_base/deprecation.h" +#include "rtc_base/ignore_wundef.h" + +RTC_PUSH_IGNORING_WUNDEF() +#include "rtc_tools/rtc_event_log_visualizer/proto/chart.pb.h" +RTC_POP_IGNORING_WUNDEF() + namespace webrtc { enum class LineStyle { @@ -94,8 +101,8 @@ class Plot { public: virtual ~Plot() {} - // Overloaded to draw the plot. - virtual void Draw() = 0; + // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead. + RTC_DEPRECATED virtual void Draw() {} // Sets the lower x-axis limit to min_value (if left_margin == 0). // Sets the upper x-axis limit to max_value (if right_margin == 0). @@ -158,6 +165,12 @@ class Plot { // Otherwise, the call has no effect and the timeseries is destroyed. void AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series); + // Replaces PythonPlot::Draw() + void PrintPythonCode() const; + + // Replaces ProtobufPlot::Draw() + void ExportProtobuf(webrtc::analytics::Chart* chart) const; + protected: float xaxis_min_; float xaxis_max_; @@ -175,8 +188,17 @@ class Plot { class PlotCollection { public: virtual ~PlotCollection() {} - virtual void Draw() = 0; - virtual Plot* AppendNewPlot() = 0; + + // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead. + RTC_DEPRECATED virtual void Draw() {} + + virtual Plot* AppendNewPlot(); + + // Replaces PythonPlotCollection::Draw() + void PrintPythonCode(bool shared_xaxis) const; + + // Replaces ProtobufPlotCollections::Draw() + void ExportProtobuf(webrtc::analytics::ChartCollection* collection) const; protected: std::vector> plots_; diff --git a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc index 9e82c01ba6..0f43191e8b 100644 --- a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc +++ b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc @@ -24,49 +24,7 @@ ProtobufPlot::~ProtobufPlot() {} void ProtobufPlot::Draw() {} -void ProtobufPlot::ExportProtobuf(webrtc::analytics::Chart* chart) { - for (size_t i = 0; i < series_list_.size(); i++) { - webrtc::analytics::DataSet* data_set = chart->add_data_sets(); - for (const auto& point : series_list_[i].points) { - data_set->add_x_values(point.x); - } - for (const auto& point : series_list_[i].points) { - data_set->add_y_values(point.y); - } - if (series_list_[i].line_style == LineStyle::kBar) { - data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART); - } else if (series_list_[i].line_style == LineStyle::kLine) { - data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART); - } else if (series_list_[i].line_style == LineStyle::kStep) { - data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART); - } else if (series_list_[i].line_style == LineStyle::kNone) { - data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART); - } else { - data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED); - } - - if (series_list_[i].point_style == PointStyle::kHighlight) - data_set->set_highlight_points(true); - - data_set->set_label(series_list_[i].label); - } - - chart->set_xaxis_min(xaxis_min_); - chart->set_xaxis_max(xaxis_max_); - chart->set_yaxis_min(yaxis_min_); - chart->set_yaxis_max(yaxis_max_); - chart->set_xaxis_label(xaxis_label_); - chart->set_yaxis_label(yaxis_label_); - chart->set_title(title_); - chart->set_id(id_); - - for (const auto& kv : yaxis_tick_labels_) { - webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels(); - tick->set_value(kv.first); - tick->set_label(kv.second); - } -} ProtobufPlotCollection::ProtobufPlotCollection() {} @@ -78,19 +36,6 @@ void ProtobufPlotCollection::Draw() { std::cout << collection.SerializeAsString(); } -void ProtobufPlotCollection::ExportProtobuf( - webrtc::analytics::ChartCollection* collection) { - for (const auto& plot : plots_) { - // TODO(terelius): Ensure that there is no way to insert plots other than - // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast - // here. - webrtc::analytics::Chart* protobuf_representation = - collection->add_charts(); - static_cast(plot.get()) - ->ExportProtobuf(protobuf_representation); - } -} - Plot* ProtobufPlotCollection::AppendNewPlot() { Plot* plot = new ProtobufPlot(); plots_.push_back(std::unique_ptr(plot)); diff --git a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h index 738247a309..0773b58d20 100644 --- a/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h +++ b/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h @@ -23,16 +23,15 @@ class ProtobufPlot final : public Plot { ProtobufPlot(); ~ProtobufPlot() override; void Draw() override; - void ExportProtobuf(webrtc::analytics::Chart* chart); }; class ProtobufPlotCollection final : public PlotCollection { public: - ProtobufPlotCollection(); + // This class is deprecated. Use PlotCollection and ExportProtobuf() instead. + RTC_DEPRECATED ProtobufPlotCollection(); ~ProtobufPlotCollection() override; void Draw() override; Plot* AppendNewPlot() override; - void ExportProtobuf(webrtc::analytics::ChartCollection* collection); }; } // namespace webrtc diff --git a/rtc_tools/rtc_event_log_visualizer/plot_python.cc b/rtc_tools/rtc_event_log_visualizer/plot_python.cc index e7cde45f30..b3708110df 100644 --- a/rtc_tools/rtc_event_log_visualizer/plot_python.cc +++ b/rtc_tools/rtc_event_log_visualizer/plot_python.cc @@ -25,149 +25,7 @@ PythonPlot::PythonPlot() {} PythonPlot::~PythonPlot() {} void PythonPlot::Draw() { - // Write python commands to stdout. Intended program usage is - // ./event_log_visualizer event_log160330.dump | python - - if (!series_list_.empty()) { - printf("color_count = %zu\n", series_list_.size()); - printf( - "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i " - "in range(color_count)]\n"); - printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n"); - - for (size_t i = 0; i < series_list_.size(); i++) { - printf("\n# === Series: %s ===\n", series_list_[i].label.c_str()); - // List x coordinates - printf("x%zu = [", i); - if (!series_list_[i].points.empty()) - printf("%.3f", series_list_[i].points[0].x); - for (size_t j = 1; j < series_list_[i].points.size(); j++) - printf(", %.3f", series_list_[i].points[j].x); - printf("]\n"); - - // List y coordinates - printf("y%zu = [", i); - if (!series_list_[i].points.empty()) - printf("%G", series_list_[i].points[0].y); - for (size_t j = 1; j < series_list_[i].points.size(); j++) - printf(", %G", series_list_[i].points[j].y); - printf("]\n"); - - if (series_list_[i].line_style == LineStyle::kBar) { - // There is a plt.bar function that draws bar plots, - // but it is *way* too slow to be useful. - printf( - "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: " - "max(t,0), y%zu), color=colors[%zu], " - "label=\'%s\')\n", - i, i, i, i, series_list_[i].label.c_str()); - if (series_list_[i].point_style == PointStyle::kHighlight) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], " - "marker='.', ls=' ')\n", - i, i, i); - } - } else if (series_list_[i].line_style == LineStyle::kLine) { - if (series_list_[i].point_style == PointStyle::kHighlight) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " - "marker='.')\n", - i, i, i, series_list_[i].label.c_str()); - } else { - printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i, - i, i, series_list_[i].label.c_str()); - } - } else if (series_list_[i].line_style == LineStyle::kStep) { - // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on - // to illustrate the "steps". This can be expressed by duplicating all - // elements except the first in x and the last in y. - printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i); - printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i); - printf( - "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], " - "label=\'%s\')\n", - i, i, i, series_list_[i].label.c_str()); - if (series_list_[i].point_style == PointStyle::kHighlight) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], " - "marker='.', ls=' ')\n", - i, i, i); - } - } else if (series_list_[i].line_style == LineStyle::kNone) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " - "marker='o', ls=' ')\n", - i, i, i, series_list_[i].label.c_str()); - } else { - printf("raise Exception(\"Unknown graph type\")\n"); - } - } - - // IntervalSeries - printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n"); - RTC_CHECK_LE(interval_list_.size(), 4); - // To get the intervals to show up in the legend we have to create patches - // for them. - printf("legend_patches = []\n"); - for (size_t i = 0; i < interval_list_.size(); i++) { - // List intervals - printf("\n# === IntervalSeries: %s ===\n", - interval_list_[i].label.c_str()); - printf("ival%zu = [", i); - if (!interval_list_[i].intervals.empty()) { - printf("(%G, %G)", interval_list_[i].intervals[0].begin, - interval_list_[i].intervals[0].end); - } - for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) { - printf(", (%G, %G)", interval_list_[i].intervals[j].begin, - interval_list_[i].intervals[j].end); - } - printf("]\n"); - - printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size()); - if (interval_list_[i].orientation == IntervalSeries::kVertical) { - printf( - " plt.axhspan(ival%zu[i][0], ival%zu[i][1], " - "facecolor=interval_colors[%zu], " - "alpha=0.3)\n", - i, i, i); - } else { - printf( - " plt.axvspan(ival%zu[i][0], ival%zu[i][1], " - "facecolor=interval_colors[%zu], " - "alpha=0.3)\n", - i, i, i); - } - printf( - "legend_patches.append(mpatches.Patch(ec=\'black\', " - "fc=interval_colors[%zu], label='%s'))\n", - i, interval_list_[i].label.c_str()); - } - } - - printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_); - printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_); - printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str()); - printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str()); - printf("plt.title(\'%s\')\n", title_.c_str()); - printf("fig = plt.gcf()\n"); - printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str()); - if (!yaxis_tick_labels_.empty()) { - printf("yaxis_tick_labels = ["); - for (const auto& kv : yaxis_tick_labels_) { - printf("(%f,\"%s\"),", kv.first, kv.second.c_str()); - } - printf("]\n"); - printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n"); - printf("plt.yticks(*yaxis_tick_labels)\n"); - } - if (!series_list_.empty() || !interval_list_.empty()) { - printf("handles, labels = plt.gca().get_legend_handles_labels()\n"); - printf("for lp in legend_patches:\n"); - printf(" handles.append(lp)\n"); - printf(" labels.append(lp.get_label())\n"); - printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n"); - } + PrintPythonCode(); } PythonPlotCollection::PythonPlotCollection(bool shared_xaxis) @@ -176,24 +34,7 @@ PythonPlotCollection::PythonPlotCollection(bool shared_xaxis) PythonPlotCollection::~PythonPlotCollection() {} void PythonPlotCollection::Draw() { - printf("import matplotlib.pyplot as plt\n"); - printf("plt.rcParams.update({'figure.max_open_warning': 0})\n"); - printf("import matplotlib.patches as mpatches\n"); - printf("import matplotlib.patheffects as pe\n"); - printf("import colorsys\n"); - for (size_t i = 0; i < plots_.size(); i++) { - printf("plt.figure(%zu)\n", i); - if (shared_xaxis_) { - // Link x-axes across all figures for synchronized zooming. - if (i == 0) { - printf("axis0 = plt.subplot(111)\n"); - } else { - printf("plt.subplot(111, sharex=axis0)\n"); - } - } - plots_[i]->Draw(); - } - printf("plt.show()\n"); + PrintPythonCode(shared_xaxis_); } Plot* PythonPlotCollection::AppendNewPlot() { diff --git a/rtc_tools/rtc_event_log_visualizer/plot_python.h b/rtc_tools/rtc_event_log_visualizer/plot_python.h index dcdcf23fcf..998ed7b221 100644 --- a/rtc_tools/rtc_event_log_visualizer/plot_python.h +++ b/rtc_tools/rtc_event_log_visualizer/plot_python.h @@ -23,7 +23,8 @@ class PythonPlot final : public Plot { class PythonPlotCollection final : public PlotCollection { public: - explicit PythonPlotCollection(bool shared_xaxis = false); + // This class is deprecated. Use PlotCollection and PrintPythonCode() instead. + RTC_DEPRECATED explicit PythonPlotCollection(bool shared_xaxis = false); ~PythonPlotCollection() override; void Draw() override; Plot* AppendNewPlot() override; diff --git a/rtc_tools/rtc_event_log_visualizer/triage_notifications.h b/rtc_tools/rtc_event_log_visualizer/triage_notifications.h deleted file mode 100644 index 23b31ece42..0000000000 --- a/rtc_tools/rtc_event_log_visualizer/triage_notifications.h +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_ -#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_ - -#include - -namespace webrtc { - -class IncomingRtpReceiveTimeGap { - public: - IncomingRtpReceiveTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTP packets received for ") + - std::to_string(duration_) + std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class IncomingRtcpReceiveTimeGap { - public: - IncomingRtcpReceiveTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTCP packets received for ") + - std::to_string(duration_) + std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class OutgoingRtpSendTimeGap { - public: - OutgoingRtpSendTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTP packets sent for ") + std::to_string(duration_) + - std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class OutgoingRtcpSendTimeGap { - public: - OutgoingRtcpSendTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTCP packets sent for ") + - std::to_string(duration_) + std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class IncomingSeqNumJump { - public: - IncomingSeqNumJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Sequence number jumps on incoming SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class IncomingCaptureTimeJump { - public: - IncomingCaptureTimeJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Capture timestamp jumps on incoming SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class OutgoingSeqNoJump { - public: - OutgoingSeqNoJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Sequence number jumps on outgoing SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class OutgoingCaptureTimeJump { - public: - OutgoingCaptureTimeJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Capture timestamp jumps on outgoing SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class OutgoingHighLoss { - public: - explicit OutgoingHighLoss(double avg_loss_fraction) - : avg_loss_fraction_(avg_loss_fraction) {} - std::string ToString() const { - return std::string("High average loss (") + - std::to_string(avg_loss_fraction_ * 100) + - std::string("%) across the call."); - } - - private: - double avg_loss_fraction_; -}; - -} // namespace webrtc - -#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_ diff --git a/rtc_tools/rtp_generator/rtp_generator.h b/rtc_tools/rtp_generator/rtp_generator.h index 6248c6a636..a317bf7278 100644 --- a/rtc_tools/rtp_generator/rtp_generator.h +++ b/rtc_tools/rtp_generator/rtp_generator.h @@ -27,7 +27,6 @@ #include "call/rtp_config.h" #include "call/video_send_stream.h" #include "media/engine/webrtc_video_engine.h" -#include "rtc_base/constructor_magic.h" #include "test/frame_generator_capturer.h" #include "test/rtp_file_reader.h" #include "test/rtp_file_writer.h" @@ -79,6 +78,11 @@ class RtpGenerator final : public webrtc::Transport { public: // Construct a new RtpGenerator using the specified options. explicit RtpGenerator(const RtpGeneratorOptions& options); + + RtpGenerator() = delete; + RtpGenerator(const RtpGenerator&) = delete; + RtpGenerator& operator=(const RtpGenerator&) = delete; + // Cleans up the VideoSendStream. ~RtpGenerator() override; // Generates an rtp_dump that is written out to @@ -113,9 +117,6 @@ class RtpGenerator final : public webrtc::Transport { std::vector durations_ms_; uint32_t start_ms_ = 0; std::unique_ptr task_queue_; - - // This object cannot be copied. - RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpGenerator); }; } // namespace webrtc diff --git a/rtc_tools/sanitizers_unittest.cc b/rtc_tools/sanitizers_unittest.cc index b997bf0c23..9606f42216 100644 --- a/rtc_tools/sanitizers_unittest.cc +++ b/rtc_tools/sanitizers_unittest.cc @@ -110,7 +110,7 @@ void DataRace() { thread2.Join(); // TSan seems to mess with gtest's death detection. // Fail intentionally, and rely on detecting the error message. - RTC_CHECK(false); + RTC_CHECK_NOTREACHED(); } TEST(SanitizersDeathTest, ThreadSanitizer) { diff --git a/rtc_tools/testing/build_apprtc.py b/rtc_tools/testing/build_apprtc.py index 367a2602d5..e93b7e06c7 100755 --- a/rtc_tools/testing/build_apprtc.py +++ b/rtc_tools/testing/build_apprtc.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Builds the AppRTC collider using the golang toolchain. The golang toolchain is downloaded by download_apprtc.py. We use that here @@ -24,44 +23,44 @@ import utils - USAGE_STR = "Usage: {} " def _ConfigureApprtcServerToDeveloperMode(app_yaml_path): - for line in fileinput.input(app_yaml_path, inplace=True): - # We can't click past these in browser-based tests, so disable them. - line = line.replace('BYPASS_JOIN_CONFIRMATION: false', - 'BYPASS_JOIN_CONFIRMATION: true') - sys.stdout.write(line) + for line in fileinput.input(app_yaml_path, inplace=True): + # We can't click past these in browser-based tests, so disable them. + line = line.replace('BYPASS_JOIN_CONFIRMATION: false', + 'BYPASS_JOIN_CONFIRMATION: true') + sys.stdout.write(line) def main(argv): - if len(argv) != 4: - return USAGE_STR.format(argv[0]) + if len(argv) != 4: + return USAGE_STR.format(argv[0]) - apprtc_dir = os.path.abspath(argv[1]) - go_root_dir = os.path.abspath(argv[2]) - golang_workspace = os.path.abspath(argv[3]) + apprtc_dir = os.path.abspath(argv[1]) + go_root_dir = os.path.abspath(argv[2]) + golang_workspace = os.path.abspath(argv[3]) - app_yaml_path = os.path.join(apprtc_dir, 'out', 'app_engine', 'app.yaml') - _ConfigureApprtcServerToDeveloperMode(app_yaml_path) + app_yaml_path = os.path.join(apprtc_dir, 'out', 'app_engine', 'app.yaml') + _ConfigureApprtcServerToDeveloperMode(app_yaml_path) - utils.RemoveDirectory(golang_workspace) + utils.RemoveDirectory(golang_workspace) - collider_dir = os.path.join(apprtc_dir, 'src', 'collider') - shutil.copytree(collider_dir, os.path.join(golang_workspace, 'src')) + collider_dir = os.path.join(apprtc_dir, 'src', 'collider') + shutil.copytree(collider_dir, os.path.join(golang_workspace, 'src')) - golang_path = os.path.join(go_root_dir, 'bin', - 'go' + utils.GetExecutableExtension()) - golang_env = os.environ.copy() - golang_env['GOROOT'] = go_root_dir - golang_env['GOPATH'] = golang_workspace - collider_out = os.path.join(golang_workspace, - 'collidermain' + utils.GetExecutableExtension()) - subprocess.check_call([golang_path, 'build', '-o', collider_out, - 'collidermain'], env=golang_env) + golang_path = os.path.join(go_root_dir, 'bin', + 'go' + utils.GetExecutableExtension()) + golang_env = os.environ.copy() + golang_env['GOROOT'] = go_root_dir + golang_env['GOPATH'] = golang_workspace + collider_out = os.path.join( + golang_workspace, 'collidermain' + utils.GetExecutableExtension()) + subprocess.check_call( + [golang_path, 'build', '-o', collider_out, 'collidermain'], + env=golang_env) if __name__ == '__main__': - sys.exit(main(sys.argv)) + sys.exit(main(sys.argv)) diff --git a/rtc_tools/testing/download_apprtc.py b/rtc_tools/testing/download_apprtc.py index f6db785275..a77955a3f6 100755 --- a/rtc_tools/testing/download_apprtc.py +++ b/rtc_tools/testing/download_apprtc.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Downloads prebuilt AppRTC and Go from WebRTC storage and unpacks it. Requires that depot_tools is installed and in the PATH. @@ -21,38 +20,37 @@ import utils - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) def _GetGoArchivePathForPlatform(): - archive_extension = 'zip' if utils.GetPlatform() == 'win' else 'tar.gz' - return os.path.join(utils.GetPlatform(), 'go.%s' % archive_extension) + archive_extension = 'zip' if utils.GetPlatform() == 'win' else 'tar.gz' + return os.path.join(utils.GetPlatform(), 'go.%s' % archive_extension) def main(argv): - if len(argv) > 2: - return 'Usage: %s [output_dir]' % argv[0] + if len(argv) > 2: + return 'Usage: %s [output_dir]' % argv[0] - output_dir = os.path.abspath(argv[1]) if len(argv) > 1 else None + output_dir = os.path.abspath(argv[1]) if len(argv) > 1 else None - apprtc_zip_path = os.path.join(SCRIPT_DIR, 'prebuilt_apprtc.zip') - if os.path.isfile(apprtc_zip_path + '.sha1'): - utils.DownloadFilesFromGoogleStorage(SCRIPT_DIR, auto_platform=False) + apprtc_zip_path = os.path.join(SCRIPT_DIR, 'prebuilt_apprtc.zip') + if os.path.isfile(apprtc_zip_path + '.sha1'): + utils.DownloadFilesFromGoogleStorage(SCRIPT_DIR, auto_platform=False) - if output_dir is not None: - utils.RemoveDirectory(os.path.join(output_dir, 'apprtc')) - utils.UnpackArchiveTo(apprtc_zip_path, output_dir) + if output_dir is not None: + utils.RemoveDirectory(os.path.join(output_dir, 'apprtc')) + utils.UnpackArchiveTo(apprtc_zip_path, output_dir) - golang_path = os.path.join(SCRIPT_DIR, 'golang') - golang_zip_path = os.path.join(golang_path, _GetGoArchivePathForPlatform()) - if os.path.isfile(golang_zip_path + '.sha1'): - utils.DownloadFilesFromGoogleStorage(golang_path) + golang_path = os.path.join(SCRIPT_DIR, 'golang') + golang_zip_path = os.path.join(golang_path, _GetGoArchivePathForPlatform()) + if os.path.isfile(golang_zip_path + '.sha1'): + utils.DownloadFilesFromGoogleStorage(golang_path) - if output_dir is not None: - utils.RemoveDirectory(os.path.join(output_dir, 'go')) - utils.UnpackArchiveTo(golang_zip_path, output_dir) + if output_dir is not None: + utils.RemoveDirectory(os.path.join(output_dir, 'go')) + utils.UnpackArchiveTo(golang_zip_path, output_dir) if __name__ == '__main__': - sys.exit(main(sys.argv)) + sys.exit(main(sys.argv)) diff --git a/rtc_tools/testing/setup_apprtc.py b/rtc_tools/testing/setup_apprtc.py index 2b463e004d..387ba694a3 100755 --- a/rtc_tools/testing/setup_apprtc.py +++ b/rtc_tools/testing/setup_apprtc.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """This script sets up AppRTC and its dependencies. Requires that depot_tools is installed and in the PATH. @@ -19,27 +18,26 @@ import utils - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) def main(argv): - if len(argv) == 1: - return 'Usage %s ' % argv[0] + if len(argv) == 1: + return 'Usage %s ' % argv[0] - output_dir = os.path.abspath(argv[1]) + output_dir = os.path.abspath(argv[1]) - download_apprtc_path = os.path.join(SCRIPT_DIR, 'download_apprtc.py') - utils.RunSubprocessWithRetry([sys.executable, download_apprtc_path, - output_dir]) + download_apprtc_path = os.path.join(SCRIPT_DIR, 'download_apprtc.py') + utils.RunSubprocessWithRetry( + [sys.executable, download_apprtc_path, output_dir]) - build_apprtc_path = os.path.join(SCRIPT_DIR, 'build_apprtc.py') - apprtc_dir = os.path.join(output_dir, 'apprtc') - go_dir = os.path.join(output_dir, 'go') - collider_dir = os.path.join(output_dir, 'collider') - utils.RunSubprocessWithRetry([sys.executable, build_apprtc_path, - apprtc_dir, go_dir, collider_dir]) + build_apprtc_path = os.path.join(SCRIPT_DIR, 'build_apprtc.py') + apprtc_dir = os.path.join(output_dir, 'apprtc') + go_dir = os.path.join(output_dir, 'go') + collider_dir = os.path.join(output_dir, 'collider') + utils.RunSubprocessWithRetry( + [sys.executable, build_apprtc_path, apprtc_dir, go_dir, collider_dir]) if __name__ == '__main__': - sys.exit(main(sys.argv)) + sys.exit(main(sys.argv)) diff --git a/rtc_tools/testing/utils.py b/rtc_tools/testing/utils.py index 7968dad62b..8a5de50cf8 100755 --- a/rtc_tools/testing/utils.py +++ b/rtc_tools/testing/utils.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Utilities for all our deps-management stuff.""" from __future__ import absolute_import @@ -23,36 +22,37 @@ def RunSubprocessWithRetry(cmd): - """Invokes the subprocess and backs off exponentially on fail.""" - for i in range(5): - try: - subprocess.check_call(cmd) - return - except subprocess.CalledProcessError as exception: - backoff = pow(2, i) - print('Got %s, retrying in %d seconds...' % (exception, backoff)) - time.sleep(backoff) + """Invokes the subprocess and backs off exponentially on fail.""" + for i in range(5): + try: + subprocess.check_call(cmd) + return + except subprocess.CalledProcessError as exception: + backoff = pow(2, i) + print('Got %s, retrying in %d seconds...' % (exception, backoff)) + time.sleep(backoff) - print('Giving up.') - raise exception + print('Giving up.') + raise exception def DownloadFilesFromGoogleStorage(path, auto_platform=True): - print('Downloading files in %s...' % path) + print('Downloading files in %s...' % path) - extension = 'bat' if 'win32' in sys.platform else 'py' - cmd = ['download_from_google_storage.%s' % extension, - '--bucket=chromium-webrtc-resources', - '--directory', path] - if auto_platform: - cmd += ['--auto_platform', '--recursive'] - subprocess.check_call(cmd) + extension = 'bat' if 'win32' in sys.platform else 'py' + cmd = [ + 'download_from_google_storage.%s' % extension, + '--bucket=chromium-webrtc-resources', '--directory', path + ] + if auto_platform: + cmd += ['--auto_platform', '--recursive'] + subprocess.check_call(cmd) # Code partially copied from # https://cs.chromium.org#chromium/build/scripts/common/chromium_utils.py def RemoveDirectory(*path): - """Recursively removes a directory, even if it's marked read-only. + """Recursively removes a directory, even if it's marked read-only. Remove the directory located at *path, if it exists. @@ -67,62 +67,63 @@ def RemoveDirectory(*path): bit and try again, so we do that too. It's hand-waving, but sometimes it works. :/ """ - file_path = os.path.join(*path) - print('Deleting `{}`.'.format(file_path)) - if not os.path.exists(file_path): - print('`{}` does not exist.'.format(file_path)) - return - - if sys.platform == 'win32': - # Give up and use cmd.exe's rd command. - file_path = os.path.normcase(file_path) - for _ in range(3): - print('RemoveDirectory running %s' % (' '.join( - ['cmd.exe', '/c', 'rd', '/q', '/s', file_path]))) - if not subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', file_path]): - break - print(' Failed') - time.sleep(3) - return - else: - shutil.rmtree(file_path, ignore_errors=True) + file_path = os.path.join(*path) + print('Deleting `{}`.'.format(file_path)) + if not os.path.exists(file_path): + print('`{}` does not exist.'.format(file_path)) + return + + if sys.platform == 'win32': + # Give up and use cmd.exe's rd command. + file_path = os.path.normcase(file_path) + for _ in range(3): + print('RemoveDirectory running %s' % + (' '.join(['cmd.exe', '/c', 'rd', '/q', '/s', file_path]))) + if not subprocess.call( + ['cmd.exe', '/c', 'rd', '/q', '/s', file_path]): + break + print(' Failed') + time.sleep(3) + return + else: + shutil.rmtree(file_path, ignore_errors=True) def UnpackArchiveTo(archive_path, output_dir): - extension = os.path.splitext(archive_path)[1] - if extension == '.zip': - _UnzipArchiveTo(archive_path, output_dir) - else: - _UntarArchiveTo(archive_path, output_dir) + extension = os.path.splitext(archive_path)[1] + if extension == '.zip': + _UnzipArchiveTo(archive_path, output_dir) + else: + _UntarArchiveTo(archive_path, output_dir) def _UnzipArchiveTo(archive_path, output_dir): - print('Unzipping {} in {}.'.format(archive_path, output_dir)) - zip_file = zipfile.ZipFile(archive_path) - try: - zip_file.extractall(output_dir) - finally: - zip_file.close() + print('Unzipping {} in {}.'.format(archive_path, output_dir)) + zip_file = zipfile.ZipFile(archive_path) + try: + zip_file.extractall(output_dir) + finally: + zip_file.close() def _UntarArchiveTo(archive_path, output_dir): - print('Untarring {} in {}.'.format(archive_path, output_dir)) - tar_file = tarfile.open(archive_path, 'r:gz') - try: - tar_file.extractall(output_dir) - finally: - tar_file.close() + print('Untarring {} in {}.'.format(archive_path, output_dir)) + tar_file = tarfile.open(archive_path, 'r:gz') + try: + tar_file.extractall(output_dir) + finally: + tar_file.close() def GetPlatform(): - if sys.platform.startswith('win'): - return 'win' - if sys.platform.startswith('linux'): - return 'linux' - if sys.platform.startswith('darwin'): - return 'mac' - raise Exception("Can't run on platform %s." % sys.platform) + if sys.platform.startswith('win'): + return 'win' + if sys.platform.startswith('linux'): + return 'linux' + if sys.platform.startswith('darwin'): + return 'mac' + raise Exception("Can't run on platform %s." % sys.platform) def GetExecutableExtension(): - return '.exe' if GetPlatform() == 'win' else '' + return '.exe' if GetPlatform() == 'win' else '' diff --git a/video/video_replay.cc b/rtc_tools/video_replay.cc similarity index 98% rename from video/video_replay.cc rename to rtc_tools/video_replay.cc index 90989db7d6..ddcae6f87a 100644 --- a/video/video_replay.cc +++ b/rtc_tools/video_replay.cc @@ -335,7 +335,6 @@ class RtpReplayer final { for (auto& decoder : receive_config.decoders) { decoder = test::CreateMatchingDecoder(decoder.payload_type, decoder.video_format.name); - decoder.decoder_factory = stream_state->decoder_factory.get(); } // Create a window for this config. std::stringstream window_title; @@ -344,6 +343,7 @@ class RtpReplayer final { test::VideoRenderer::Create(window_title.str().c_str(), 640, 480)); // Create a receive stream for this config. receive_config.renderer = stream_state->sinks.back().get(); + receive_config.decoder_factory = stream_state->decoder_factory.get(); stream_state->receive_streams.emplace_back( call->CreateVideoReceiveStream(std::move(receive_config))); } @@ -402,7 +402,7 @@ class RtpReplayer final { DecoderBitstreamFilename().c_str()); }); } - decoder.decoder_factory = stream_state->decoder_factory.get(); + receive_config.decoder_factory = stream_state->decoder_factory.get(); receive_config.decoders.push_back(decoder); stream_state->receive_streams.emplace_back( @@ -521,6 +521,7 @@ int main(int argc, char* argv[]) { absl::GetFlag(FLAGS_transmission_offset_id))); RTC_CHECK(ValidateInputFilenameNotEmpty(absl::GetFlag(FLAGS_input_file))); + rtc::ThreadManager::Instance()->WrapCurrentThread(); webrtc::test::RunTest(webrtc::RtpReplay); return 0; } diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 13793fb06b..97cdb3cc3f 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -35,8 +35,8 @@ rtc_library("media_constraints") { deps = [ "../api:audio_options_api", "../api:libjingle_peerconnection_api", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("sdk_tests") { @@ -56,9 +56,7 @@ if (is_ios || is_mac) { # This is needed so that framework headers can include base headers # without pathname (so it works from within the framework module). "objc/base", - - # This is here for backward compatiblity reasons. - "objc/Framework/Headers", # TODO(bugs.webrtc.org/9627): Remove this. + "objc/avconf", ] cflags = [ "-Wimplicit-retain-self", @@ -154,7 +152,7 @@ if (is_ios || is_mac) { "../rtc_base:checks", ] - libs = [ + frameworks = [ "AVFoundation.framework", "CoreMedia.framework", ] @@ -272,10 +270,10 @@ if (is_ios || is_mac) { "../rtc_base:checks", "../system_wrappers:field_trial", "../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/base:core_headers", ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] - libs = [ "AudioToolbox.framework" ] + frameworks = [ "AudioToolbox.framework" ] } # This target exists to expose :audio_session_objc and @@ -327,7 +325,7 @@ if (is_ios || is_mac) { public_configs = [ ":common_config_objc" ] - libs = [ "AVFoundation.framework" ] + frameworks = [ "AVFoundation.framework" ] deps = [ ":base_objc", @@ -337,6 +335,35 @@ if (is_ios || is_mac) { "../rtc_base:rtc_base_approved", ] } + + rtc_source_set("network_monitor_observer") { + visibility = [ ":*" ] + + sources = [ "objc/native/src/network_monitor_observer.h" ] + + deps = [ "../rtc_base" ] + } + + rtc_library("network_monitor_objc") { + visibility = [ "*" ] + + sources = [ + "objc/components/network/RTCNetworkMonitor+Private.h", + "objc/components/network/RTCNetworkMonitor.h", + "objc/components/network/RTCNetworkMonitor.mm", + ] + + configs += [ ":used_from_extension" ] + + frameworks = [ "Network.framework" ] + + deps = [ + ":base_objc", + ":helpers_objc", + ":network_monitor_observer", + "../rtc_base/system:gcd_helpers", + ] + } } rtc_library("videosource_objc") { @@ -354,7 +381,6 @@ if (is_ios || is_mac) { "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../common_video", "../media:rtc_media_base", @@ -383,7 +409,6 @@ if (is_ios || is_mac) { deps = [ ":base_objc", "//api/video:video_frame", - "//api/video:video_frame_i420", "//api/video:video_rtp_headers", "//common_video", "//rtc_base:checks", @@ -394,7 +419,7 @@ if (is_ios || is_mac) { "..:common_objc", ":used_from_extension", ] - libs = [ + frameworks = [ "VideoToolbox.framework", "CoreGraphics.framework", "CoreVideo.framework", @@ -412,19 +437,19 @@ if (is_ios || is_mac) { "objc/components/renderer/opengl/RTCShader.mm", "objc/components/renderer/opengl/RTCVideoViewShading.h", ] - libs = [ "CoreVideo.framework" ] + frameworks = [ "CoreVideo.framework" ] if (is_ios) { sources += [ "objc/components/renderer/opengl/RTCNV12TextureCache.h", "objc/components/renderer/opengl/RTCNV12TextureCache.m", ] - libs += [ + frameworks += [ "GLKit.framework", "OpenGLES.framework", "QuartzCore.framework", ] } else if (is_mac) { - libs += [ + frameworks += [ "CoreMedia.framework", "OpenGL.framework", ] @@ -444,8 +469,8 @@ if (is_ios || is_mac) { "../media:rtc_media_base", "../rtc_base", "../rtc_base:checks", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] configs += [ "..:common_objc", @@ -504,7 +529,7 @@ if (is_ios || is_mac) { "objc/components/renderer/metal/RTCMTLVideoView.m", ] } - libs = [ + frameworks = [ "CoreVideo.framework", "Metal.framework", "MetalKit.framework", @@ -514,7 +539,7 @@ if (is_ios || is_mac) { "objc/components/renderer/metal/RTCMTLNSVideoView.h", "objc/components/renderer/metal/RTCMTLNSVideoView.m", ] - libs += [ "AppKit.framework" ] + frameworks += [ "AppKit.framework" ] } deps = [ ":base_objc", @@ -552,16 +577,13 @@ if (is_ios || is_mac) { sources = [ "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCCameraVideoCapturer.m", + "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCFileVideoCapturer.m", ] - if (is_ios) { - sources += [ - "objc/components/capturer/RTCFileVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.m", - ] - } - libs = [ + frameworks = [ "AVFoundation.framework", "CoreVideo.framework", + "QuartzCore.framework", ] configs += [ "..:common_objc" ] @@ -573,6 +595,7 @@ if (is_ios || is_mac) { ":helpers_objc", ":video_objc", ":videoframebuffer_objc", + "../rtc_base/system:gcd_helpers", ] } @@ -585,6 +608,10 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCCodecSpecificInfoH264.mm", "objc/components/video_codec/RTCH264ProfileLevelId.h", "objc/components/video_codec/RTCH264ProfileLevelId.mm", + "objc/components/video_codec/RTCCodecSpecificInfoH265.h", + "objc/components/video_codec/RTCCodecSpecificInfoH265.mm", + "objc/components/video_codec/RTCH265ProfileLevelId.h", + "objc/components/video_codec/RTCH265ProfileLevelId.mm", ] if (is_ios) { sources += [ @@ -775,8 +802,6 @@ if (is_ios || is_mac) { sources = [ "objc/api/peerconnection/RTCEncodedImage+Private.h", "objc/api/peerconnection/RTCEncodedImage+Private.mm", - "objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h", - "objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm", "objc/api/peerconnection/RTCVideoCodecInfo+Private.h", "objc/api/peerconnection/RTCVideoCodecInfo+Private.mm", "objc/api/peerconnection/RTCVideoEncoderSettings+Private.h", @@ -922,7 +947,6 @@ if (is_ios || is_mac) { ":base_objc", ":file_logger_objc", ":helpers_objc", - ":legacy_header_paths", ":mediaconstraints_objc", ":mediasource_objc", ":native_api", @@ -945,7 +969,7 @@ if (is_ios || is_mac) { "../api/crypto:frame_encryptor_interface", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue:default_task_queue_factory", - "../api/transport/media:media_transport_interface", + "../api/transport:field_trial_based_config", "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", @@ -968,91 +992,6 @@ if (is_ios || is_mac) { } } - # TODO(bugs.webrtc.org/9627): Remove this target. - rtc_source_set("legacy_header_paths") { - sources = [ - "objc/Framework/Classes/Common/NSString+StdString.h", - "objc/Framework/Classes/Common/scoped_cftyperef.h", - "objc/Framework/Classes/PeerConnection/RTCConfiguration+Native.h", - "objc/Framework/Classes/PeerConnection/RTCPeerConnectionFactory+Native.h", - "objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h", - "objc/Framework/Classes/Video/RTCDefaultShader.h", - "objc/Framework/Classes/Video/RTCNV12TextureCache.h", - "objc/Framework/Classes/VideoToolbox/nalu_rewriter.h", - "objc/Framework/Headers/WebRTC/RTCAudioSession.h", - "objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h", - "objc/Framework/Headers/WebRTC/RTCAudioSource.h", - "objc/Framework/Headers/WebRTC/RTCAudioTrack.h", - "objc/Framework/Headers/WebRTC/RTCCVPixelBuffer.h", - "objc/Framework/Headers/WebRTC/RTCCallbackLogger.h", - "objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h", - "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h", - "objc/Framework/Headers/WebRTC/RTCCertificate.h", - "objc/Framework/Headers/WebRTC/RTCConfiguration.h", - "objc/Framework/Headers/WebRTC/RTCDataChannel.h", - "objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h", - "objc/Framework/Headers/WebRTC/RTCDefaultVideoDecoderFactory.h", - "objc/Framework/Headers/WebRTC/RTCDefaultVideoEncoderFactory.h", - "objc/Framework/Headers/WebRTC/RTCDispatcher.h", - "objc/Framework/Headers/WebRTC/RTCDtmfSender.h", - "objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h", - "objc/Framework/Headers/WebRTC/RTCFieldTrials.h", - "objc/Framework/Headers/WebRTC/RTCFileLogger.h", - "objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h", - "objc/Framework/Headers/WebRTC/RTCH264ProfileLevelId.h", - "objc/Framework/Headers/WebRTC/RTCIceCandidate.h", - "objc/Framework/Headers/WebRTC/RTCIceServer.h", - "objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h", - "objc/Framework/Headers/WebRTC/RTCLogging.h", - "objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h", - "objc/Framework/Headers/WebRTC/RTCMTLVideoView.h", - "objc/Framework/Headers/WebRTC/RTCMacros.h", - "objc/Framework/Headers/WebRTC/RTCMediaConstraints.h", - "objc/Framework/Headers/WebRTC/RTCMediaSource.h", - "objc/Framework/Headers/WebRTC/RTCMediaStream.h", - "objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h", - "objc/Framework/Headers/WebRTC/RTCMetrics.h", - "objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h", - "objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h", - "objc/Framework/Headers/WebRTC/RTCPeerConnection.h", - "objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h", - "objc/Framework/Headers/WebRTC/RTCPeerConnectionFactoryOptions.h", - "objc/Framework/Headers/WebRTC/RTCRtcpParameters.h", - "objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h", - "objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h", - "objc/Framework/Headers/WebRTC/RTCRtpHeaderExtension.h", - "objc/Framework/Headers/WebRTC/RTCRtpParameters.h", - "objc/Framework/Headers/WebRTC/RTCRtpReceiver.h", - "objc/Framework/Headers/WebRTC/RTCRtpSender.h", - "objc/Framework/Headers/WebRTC/RTCRtpTransceiver.h", - "objc/Framework/Headers/WebRTC/RTCSSLAdapter.h", - "objc/Framework/Headers/WebRTC/RTCSessionDescription.h", - "objc/Framework/Headers/WebRTC/RTCTracing.h", - "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h", - "objc/Framework/Headers/WebRTC/RTCVideoCodec.h", - "objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h", - "objc/Framework/Headers/WebRTC/RTCVideoCodecH264.h", - "objc/Framework/Headers/WebRTC/RTCVideoCodecInfo.h", - "objc/Framework/Headers/WebRTC/RTCVideoDecoderVP8.h", - "objc/Framework/Headers/WebRTC/RTCVideoDecoderVP9.h", - "objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h", - "objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h", - "objc/Framework/Headers/WebRTC/RTCVideoFrame.h", - "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h", - "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h", - "objc/Framework/Headers/WebRTC/RTCVideoSource.h", - "objc/Framework/Headers/WebRTC/RTCVideoTrack.h", - "objc/Framework/Headers/WebRTC/RTCVideoViewShading.h", - "objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h", - "objc/Framework/Native/api/audio_device_module.h", - "objc/Framework/Native/api/video_decoder_factory.h", - "objc/Framework/Native/api/video_encoder_factory.h", - "objc/Framework/Native/api/video_frame_buffer.h", - "objc/Framework/Native/src/objc_video_decoder_factory.h", - "objc/Framework/Native/src/objc_video_encoder_factory.h", - ] - } - if (rtc_include_tests) { if (is_ios) { rtc_library("sdk_unittests_sources") { @@ -1063,6 +1002,7 @@ if (is_ios || is_mac) { "objc/unittests/ObjCVideoTrackSource_xctest.mm", "objc/unittests/RTCCVPixelBuffer_xctest.mm", "objc/unittests/RTCCallbackLogger_xctest.m", + "objc/unittests/RTCEncodedImage_xctest.mm", "objc/unittests/RTCFileVideoCapturer_xctest.mm", "objc/unittests/RTCH264ProfileLevelId_xctest.m", "objc/unittests/RTCNV12TextureCache_xctest.m", @@ -1074,7 +1014,7 @@ if (is_ios || is_mac) { # TODO(peterhanspers): Reenable these tests on simulator. # See bugs.webrtc.org/7812 - if (!use_ios_simulator) { + if (target_environment != "simulator") { sources += [ "objc/unittests/RTCAudioDeviceModule_xctest.mm", "objc/unittests/RTCAudioDevice_xctest.mm", @@ -1084,6 +1024,7 @@ if (is_ios || is_mac) { deps = [ ":audio_device", ":audio_session_objc", + ":base_native_additions_objc", ":base_objc", ":callback_logger_objc", ":framework_objc", @@ -1101,7 +1042,7 @@ if (is_ios || is_mac) { ":videotoolbox_objc", "../api:scoped_refptr", "../api/task_queue:default_task_queue_factory", - "../api/video:video_frame_i420", + "../api/video:video_frame", "../common_video", "../media:rtc_media_base", "../media:rtc_media_tests_utils", @@ -1136,7 +1077,8 @@ if (is_ios || is_mac) { } # These tests use static linking. - rtc_ios_xctest_test("sdk_unittests") { + rtc_test("sdk_unittests") { + is_xctest = true info_plist = "//test/ios/Info.plist" sources = [ "objc/unittests/main.mm" ] @@ -1152,7 +1094,8 @@ if (is_ios || is_mac) { } # These tests link to the framework. - rtc_ios_xctest_test("sdk_framework_unittests") { + rtc_test("sdk_framework_unittests") { + is_xctest = true info_plist = "//test/ios/Info.plist" sources = [ "objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m", @@ -1214,7 +1157,6 @@ if (is_ios || is_mac) { "../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:builtin_audio_decoder_factory", "../api/audio_codecs:builtin_audio_encoder_factory", - "../api/transport/media:media_transport_interface", "../api/video_codecs:video_codecs_api", "../media:rtc_media_base", "../modules:module_api", @@ -1265,6 +1207,7 @@ if (is_ios || is_mac) { "objc/components/audio/RTCAudioSessionConfiguration.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/network/RTCNetworkMonitor.h", "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/opengl/RTCEAGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", @@ -1276,6 +1219,11 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoDecoderH264.h", "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", "objc/components/video_codec/RTCVideoEncoderH264.h", + "objc/components/video_codec/RTCH265ProfileLevelId.h", + "objc/components/video_codec/RTCVideoDecoderFactoryH265.h", + "objc/components/video_codec/RTCVideoDecoderH265.h", + "objc/components/video_codec/RTCVideoEncoderFactoryH265.h", + "objc/components/video_codec/RTCVideoEncoderH265.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", @@ -1322,6 +1270,9 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoEncoderVP9.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + "objc/avconf/CFHijackCapturerDelegate.h", + "objc/avconf/CFRPCapturer.h", + "objc/avconf/CFVideoProcessor.h", ] if (!build_with_chromium) { @@ -1331,6 +1282,12 @@ if (is_ios || is_mac) { ] } + if (rtc_use_bt_mixer) { + common_objc_headers += [ + "objc/avconf/CFAudioMixer.h", + ] + } + sources = common_objc_headers public_headers = common_objc_headers @@ -1342,6 +1299,7 @@ if (is_ios || is_mac) { deps = [ ":audio_objc", + ":avconf_objc", ":base_objc", ":default_codec_factory_objc", ":native_api", @@ -1351,6 +1309,7 @@ if (is_ios || is_mac) { ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + "../api:create_peerconnection_factory", "../rtc_base:rtc_base_approved", ] if (rtc_use_metal_rendering) { @@ -1363,13 +1322,19 @@ if (is_ios || is_mac) { ] } - libs = [ + frameworks = [ "AVFoundation.framework", "CoreGraphics.framework", "CoreMedia.framework", "GLKit.framework", ] + if (rtc_use_recorder || rtc_use_bt_mixer) { + deps += [ + "//third_party/ffmpeg", + ] + } + configs += [ "..:common_objc", ":used_from_extension", @@ -1464,6 +1429,11 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoDecoderH264.h", "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", "objc/components/video_codec/RTCVideoEncoderH264.h", + "objc/components/video_codec/RTCH265ProfileLevelId.h", + "objc/components/video_codec/RTCVideoDecoderFactoryH265.h", + "objc/components/video_codec/RTCVideoDecoderH265.h", + "objc/components/video_codec/RTCVideoEncoderFactoryH265.h", + "objc/components/video_codec/RTCVideoEncoderH265.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", ] @@ -1496,7 +1466,7 @@ if (is_ios || is_mac) { ] } - libs = [ + frameworks = [ "AVFoundation.framework", "CoreGraphics.framework", "CoreMedia.framework", @@ -1539,6 +1509,8 @@ if (is_ios || is_mac) { visibility = [ "*" ] allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove. sources = [ + "objc/native/api/network_monitor_factory.h", + "objc/native/api/network_monitor_factory.mm", "objc/native/api/video_capturer.h", "objc/native/api/video_capturer.mm", "objc/native/api/video_decoder_factory.h", @@ -1569,8 +1541,29 @@ if (is_ios || is_mac) { "../api/video_codecs:video_codecs_api", "../common_video", "../rtc_base", - "//third_party/abseil-cpp/absl/memory", ] + if (is_ios) { + deps += [ ":native_network_monitor" ] + } + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] + } + + if (is_ios) { + rtc_library("native_network_monitor") { + visibility = [ "*" ] + + sources = [ + "objc/native/src/objc_network_monitor.h", + "objc/native/src/objc_network_monitor.mm", + ] + + deps = [ + ":network_monitor_objc", + ":network_monitor_observer", + "../rtc_base", + "../rtc_base/synchronization:sequence_checker", + ] + } } rtc_library("native_video") { @@ -1602,7 +1595,6 @@ if (is_ios || is_mac) { ":vpx_codec_constants", ":wrapped_native_codec_objc", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../common_video", @@ -1617,8 +1609,8 @@ if (is_ios || is_mac) { rtc_library("video_toolbox_cc") { visibility = [ - ":videotoolbox_objc", ":sdk_unittests_sources", + ":videotoolbox_objc", ] sources = [ "objc/components/video_codec/helpers.cc", @@ -1647,6 +1639,14 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderFactoryH264.m", "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_codec/RTCVideoEncoderH264.mm", + "objc/components/video_codec/RTCVideoDecoderFactoryH265.h", + "objc/components/video_codec/RTCVideoDecoderFactoryH265.m", + "objc/components/video_codec/RTCVideoDecoderH265.h", + "objc/components/video_codec/RTCVideoDecoderH265.mm", + "objc/components/video_codec/RTCVideoEncoderFactoryH265.h", + "objc/components/video_codec/RTCVideoEncoderFactoryH265.m", + "objc/components/video_codec/RTCVideoEncoderH265.h", + "objc/components/video_codec/RTCVideoEncoderH265.mm", ] configs += [ @@ -1673,12 +1673,46 @@ if (is_ios || is_mac) { "//third_party/libyuv", ] - libs = [ + frameworks = [ "CoreFoundation.framework", "CoreMedia.framework", "CoreVideo.framework", "VideoToolbox.framework", ] } + + rtc_library("avconf_objc") { + sources = [ + "objc/avconf/CFHijackCapturerDelegate.h", + "objc/avconf/CFHijackCapturerDelegate.mm", + "objc/avconf/CFRPCapturer.h", + "objc/avconf/CFRPCapturer.m", + "objc/avconf/CFVideoProcessor.h", + ] + if (rtc_use_bt_mixer) { + sources += [ + "objc/avconf/CFAudioMixer.h", + "objc/avconf/CFAudioMixer.mm", + ] + } + + deps = [ + ":base_objc", + ] + if (rtc_use_bt_mixer) { + deps += [ + "//modules/backing_track:backing_track", + ] + } + + frameworks = [ + "ReplayKit.framework", + ] + + configs += [ + "..:common_objc", + ] + public_configs = [ ":common_config_objc" ] + } } } diff --git a/sdk/OWNERS b/sdk/OWNERS index cfbb17afec..4d31ffb663 100644 --- a/sdk/OWNERS +++ b/sdk/OWNERS @@ -1,6 +1 @@ magjed@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/sdk/android/AndroidManifest.xml b/sdk/android/AndroidManifest.xml index 5afbd7bfe3..417f45fc5e 100644 --- a/sdk/android/AndroidManifest.xml +++ b/sdk/android/AndroidManifest.xml @@ -10,5 +10,5 @@ --> - + diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 2d511e2eac..fe7c03f077 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -66,6 +66,7 @@ if (is_android) { ":native_api_base", ":native_api_codecs", ":native_api_jni", + ":native_api_network_monitor", ":native_api_peerconnection", ":native_api_stacktrace", ":native_api_video", @@ -116,7 +117,6 @@ if (is_android) { ":builtin_audio_codecs_jni", ":default_video_codec_factory_jni", ":java_audio_device_module_jni", - ":legacy_hwcodecs_jni", ":peerconnection_jni", ":video_jni", "../../api:create_peerconnection_factory", @@ -148,6 +148,12 @@ if (is_android) { "../../pc:libjingle_peerconnection", "../../rtc_base", ] + + if (rtc_use_recorder) { + deps += [ + "//third_party/ffmpeg", + ] + } output_extension = "so" } @@ -172,7 +178,6 @@ if (is_android) { ] deps = [ - "//rtc_base:base_java", "//third_party/android_deps:com_android_support_support_annotations_java", ] } @@ -270,19 +275,20 @@ if (is_android) { "api/org/webrtc/FrameDecryptor.java", "api/org/webrtc/FrameEncryptor.java", "api/org/webrtc/IceCandidate.java", - "api/org/webrtc/MediaCodecVideoDecoder.java", - "api/org/webrtc/MediaCodecVideoEncoder.java", "api/org/webrtc/MediaConstraints.java", "api/org/webrtc/MediaSource.java", "api/org/webrtc/MediaStream.java", "api/org/webrtc/MediaStreamTrack.java", - "api/org/webrtc/MediaTransportFactoryFactory.java", "api/org/webrtc/NativeLibraryLoader.java", "api/org/webrtc/NativePeerConnectionFactory.java", "api/org/webrtc/NetEqFactoryFactory.java", + "api/org/webrtc/NetworkChangeDetector.java", + "api/org/webrtc/NetworkChangeDetectorFactory.java", "api/org/webrtc/NetworkControllerFactoryFactory.java", - "api/org/webrtc/NetworkMonitor.java", # TODO(sakal): Break dependencies and move to base_java. - "api/org/webrtc/NetworkMonitorAutoDetect.java", # TODO(sakal): Break dependencies and move to base_java. + + # TODO(sakal): Break dependencies and move to base_java. + "api/org/webrtc/NetworkMonitor.java", + "api/org/webrtc/NetworkMonitorAutoDetect.java", "api/org/webrtc/NetworkStatePredictorFactoryFactory.java", "api/org/webrtc/PeerConnection.java", "api/org/webrtc/PeerConnectionDependencies.java", @@ -323,8 +329,14 @@ if (is_android) { ":video_java", "//modules/audio_device:audio_device_java", "//rtc_base:base_java", + "//third_party/android_deps:androidx_annotation_annotation_java", "//third_party/android_deps:com_android_support_support_annotations_java", ] + srcjar_deps = [ + "//api:priority_enums", + "//api/video:video_frame_enums", + "//rtc_base:network_monitor_enums", + ] } # Modules, in alphabetical order. @@ -388,6 +400,7 @@ if (is_android) { rtc_android_library("hwcodecs_java") { visibility = [ "*" ] sources = [ + "api/org/webrtc/VideoCapabilityParser.java", "api/org/webrtc/HardwareVideoDecoderFactory.java", "api/org/webrtc/HardwareVideoEncoderFactory.java", "api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java", @@ -403,7 +416,7 @@ if (is_android) { "src/java/org/webrtc/MediaCodecWrapperFactory.java", "src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java", "src/java/org/webrtc/NV12Buffer.java", - "src/java/org/webrtc/VideoCodecType.java", + "src/java/org/webrtc/VideoCodecMimeType.java", ] deps = [ @@ -465,6 +478,7 @@ if (is_android) { ] deps = [ + ":base_java", ":video_api_java", ":video_java", "//rtc_base:base_java", @@ -552,10 +566,11 @@ if (current_os == "linux" || is_android) { "../../rtc_base", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../rtc_base/synchronization:sequence_checker", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_jni") { @@ -565,9 +580,6 @@ if (current_os == "linux" || is_android) { deps = [ ":base_jni", - ":builtin_audio_codecs_jni", - "../../api/audio_codecs:builtin_audio_decoder_factory", - "../../api/audio_codecs:builtin_audio_encoder_factory", "../../modules/audio_processing", "../../modules/audio_processing:api", "../../rtc_base:rtc_base_approved", @@ -592,41 +604,6 @@ if (current_os == "linux" || is_android) { ] } - # Corresponds to MediaCodecVideoEncoder/Decoder in Java. - rtc_library("legacy_hwcodecs_jni") { - visibility = [ "*" ] - allow_poison = [ "software_video_codecs" ] - sources = [ - "src/jni/android_media_codec_common.h", - "src/jni/android_media_decoder.cc", - "src/jni/android_media_encoder.cc", - ] - deps = [ - ":base_jni", - ":default_video_codec_factory_jni", - ":generated_video_jni", - ":native_api_jni", - ":video_jni", - ":videoframe_jni", - "../../api:scoped_refptr", - "../../api/task_queue", - "../../api/video_codecs:video_codecs_api", - "../../common_video", - "../../media:rtc_internal_video_codecs", - "../../media:rtc_media_base", - "../../modules/video_coding:video_codec_interface", - "../../modules/video_coding:video_coding_utility", - "../../rtc_base", - "../../rtc_base:checks", - "../../rtc_base:rtc_task_queue", - "../../rtc_base:weak_ptr", - "../../rtc_base/synchronization:sequence_checker", - "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/memory", - "//third_party/libyuv", - ] - } - rtc_library("video_jni") { visibility = [ "*" ] sources = [ @@ -685,10 +662,11 @@ if (current_os == "linux" || is_android) { "../../rtc_base", "../../rtc_base:checks", "../../rtc_base:rtc_task_queue", + "../../rtc_base/synchronization:mutex", "../../rtc_base/task_utils:to_queued_task", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("peerconnection_jni") { @@ -776,6 +754,8 @@ if (current_os == "linux" || is_android) { "../../rtc_base:rtc_task_queue", "../../rtc_base/system:thread_registry", "../../system_wrappers:field_trial", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", ] @@ -852,6 +832,11 @@ if (current_os == "linux" || is_android) { "src/jni/jni_generator_helper.cc", "src/jni/jni_generator_helper.h", ] + if (rtc_use_bt_mixer) { + sources += [ + "src/jni/audio_mixer_jni.cc", + ] + } public = [ "native_api/jni/class_loader.h", @@ -868,8 +853,14 @@ if (current_os == "linux" || is_android) { "//api:array_view", "//rtc_base:checks", "//rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + + if (rtc_use_bt_mixer) { + deps += [ + "//modules/backing_track:backing_track", + ] + } } rtc_library("native_api_base") { @@ -877,8 +868,6 @@ if (current_os == "linux" || is_android) { sources = [ "native_api/base/init.cc", "native_api/base/init.h", - "native_api/base/network_monitor.cc", - "native_api/base/network_monitor.h", ] deps = [ @@ -935,6 +924,19 @@ if (current_os == "linux" || is_android) { ] } + rtc_library("native_api_network_monitor") { + visibility = [ "*" ] + sources = [ + "native_api/network_monitor/network_monitor.cc", + "native_api/network_monitor/network_monitor.h", + ] + + deps = [ + ":base_jni", + "//rtc_base", + ] + } + # API for creating Java PeerConnectionFactory from C++ equivalents. rtc_library("native_api_peerconnection") { visibility = [ "*" ] @@ -963,8 +965,9 @@ if (current_os == "linux" || is_android) { "../../rtc_base:criticalsection", "../../rtc_base:logging", "../../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/base:core_headers", + "../../rtc_base/synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } # API for creating C++ wrapper implementations of api/mediastreaminterface.h @@ -1073,8 +1076,8 @@ if (current_os == "linux" || is_android) { "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("java_audio_device_module") { @@ -1096,8 +1099,8 @@ if (current_os == "linux" || is_android) { "../../rtc_base:rtc_base_approved", "../../system_wrappers:field_trial", "../../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_enable_android_aaudio) { @@ -1123,8 +1126,8 @@ if (current_os == "linux" || is_android) { "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } @@ -1149,8 +1152,8 @@ if (current_os == "linux" || is_android) { "../../modules/audio_device:audio_device_buffer", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } ######################### @@ -1184,8 +1187,8 @@ if (current_os == "linux" || is_android) { generate_jni("generated_base_jni") { sources = [ + "api/org/webrtc/NetworkChangeDetector.java", "api/org/webrtc/NetworkMonitor.java", - "api/org/webrtc/NetworkMonitorAutoDetect.java", "api/org/webrtc/RefCounted.java", "src/java/org/webrtc/Histogram.java", "src/java/org/webrtc/JniCommon.java", @@ -1198,8 +1201,6 @@ if (current_os == "linux" || is_android) { sources = [ "api/org/webrtc/EncodedImage.java", "api/org/webrtc/JavaI420Buffer.java", - "api/org/webrtc/MediaCodecVideoDecoder.java", - "api/org/webrtc/MediaCodecVideoEncoder.java", "api/org/webrtc/TimestampAligner.java", "api/org/webrtc/VideoCodecInfo.java", "api/org/webrtc/VideoCodecStatus.java", @@ -1338,7 +1339,7 @@ if (is_android) { rtc_instrumentation_test_apk("android_instrumentation_test_apk") { apk_name = "android_instrumentation_test_apk" android_manifest = "instrumentationtests/AndroidManifest.xml" - min_sdk_version = 16 + min_sdk_version = 21 target_sdk_version = 21 sources = [ @@ -1354,7 +1355,6 @@ if (is_android) { "instrumentationtests/src/org/webrtc/GlRectDrawerTest.java", "instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java", "instrumentationtests/src/org/webrtc/LoggableTest.java", - "instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java", "instrumentationtests/src/org/webrtc/NetworkMonitorTest.java", "instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java", "instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java", @@ -1377,17 +1377,29 @@ if (is_android) { data = [ "../../sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m" ] deps = [ + ":audio_api_java", ":base_java", + ":builtin_audio_codecs_java", + ":camera_java", ":default_video_codec_factory_java", + ":filevideo_java", + ":hwcodecs_java", ":libjingle_peerconnection_java", ":libjingle_peerconnection_metrics_default_java", + ":peerconnection_java", + ":surfaceviewrenderer_java", + ":swcodecs_java", ":video_api_java", ":video_java", "//base:base_java_test_support", "//rtc_base:base_java", + "//third_party/android_deps:com_android_support_support_annotations_java", "//third_party/android_support_test_runner:rules_java", "//third_party/android_support_test_runner:runner_java", "//third_party/google-truth:google_truth_java", + "//third_party/guava:guava_android_java", + "//third_party/hamcrest:hamcrest_java", + "//third_party/hamcrest:hamcrest_library_java", "//third_party/junit", "//third_party/mockito:mockito_java", ] @@ -1477,6 +1489,7 @@ if (is_android) { "../../pc:libjingle_peerconnection", "../../rtc_base:checks", "../../rtc_base:rtc_base", + "../../rtc_base/synchronization:mutex", "../../rtc_base/system:inline", "../../system_wrappers", "../../system_wrappers:field_trial", @@ -1484,8 +1497,8 @@ if (is_android) { "../../test:fileutils", "../../test:test_support", "../../testing/gtest", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_android_library("native_unittests_java") { @@ -1537,13 +1550,22 @@ if (is_android) { "tests/src/org/webrtc/GlGenericDrawerTest.java", "tests/src/org/webrtc/HardwareVideoEncoderTest.java", "tests/src/org/webrtc/IceCandidateTest.java", + "tests/src/org/webrtc/RefCountDelegateTest.java", "tests/src/org/webrtc/ScalingSettingsTest.java", ] deps = [ + ":base_java", + ":camera_java", + ":hwcodecs_java", ":libjingle_peerconnection_java", + ":peerconnection_java", + ":video_api_java", + ":video_java", "//base:base_java_test_support", + "//third_party/android_deps:com_android_support_support_annotations_java", "//third_party/google-truth:google_truth_java", + "//third_party/guava:guava_android_java", ] additional_jar_files = [ [ diff --git a/sdk/android/OWNERS b/sdk/android/OWNERS index 4af75f35bc..a9d3a82348 100644 --- a/sdk/android/OWNERS +++ b/sdk/android/OWNERS @@ -1,4 +1,3 @@ -glaznev@webrtc.org magjed@webrtc.org sakal@webrtc.org per-file *Audio*.java=henrika@webrtc.org diff --git a/sdk/android/PRESUBMIT.py b/sdk/android/PRESUBMIT.py index bc0cbcb8c8..876a860510 100644 --- a/sdk/android/PRESUBMIT.py +++ b/sdk/android/PRESUBMIT.py @@ -6,23 +6,26 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. + def CheckChangeOnUpload(input_api, output_api): - results = [] - results.extend(CheckPatchFormatted(input_api, output_api)) - return results + results = [] + results.extend(CheckPatchFormatted(input_api, output_api)) + return results + def CheckPatchFormatted(input_api, output_api): - import git_cl - cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()] - code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True) - if code == 2: - short_path = input_api.basename(input_api.PresubmitLocalPath()) - full_path = input_api.os_path.relpath(input_api.PresubmitLocalPath(), - input_api.change.RepositoryRoot()) - return [output_api.PresubmitPromptWarning( - 'The %s directory requires source formatting. ' - 'Please run git cl format %s' % - (short_path, full_path))] - # As this is just a warning, ignore all other errors if the user - # happens to have a broken clang-format, doesn't use git, etc etc. - return [] + import git_cl + cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()] + code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True) + if code == 2: + short_path = input_api.basename(input_api.PresubmitLocalPath()) + full_path = input_api.os_path.relpath( + input_api.PresubmitLocalPath(), input_api.change.RepositoryRoot()) + return [ + output_api.PresubmitPromptWarning( + 'The %s directory requires source formatting. ' + 'Please run git cl format %s' % (short_path, full_path)) + ] + # As this is just a warning, ignore all other errors if the user + # happens to have a broken clang-format, doesn't use git, etc etc. + return [] diff --git a/sdk/android/api/org/webrtc/CandidatePairChangeEvent.java b/sdk/android/api/org/webrtc/CandidatePairChangeEvent.java index 395b629c54..b8e6685a87 100644 --- a/sdk/android/api/org/webrtc/CandidatePairChangeEvent.java +++ b/sdk/android/api/org/webrtc/CandidatePairChangeEvent.java @@ -20,12 +20,20 @@ public final class CandidatePairChangeEvent { public final int lastDataReceivedMs; public final String reason; + /** + * An estimate from the ICE stack on how long it was disconnected before + * changing to the new candidate pair in this event. + * The first time an candidate pair is signaled the value will be 0. + */ + public final int estimatedDisconnectedTimeMs; + @CalledByNative - CandidatePairChangeEvent( - IceCandidate local, IceCandidate remote, int lastDataReceivedMs, String reason) { + CandidatePairChangeEvent(IceCandidate local, IceCandidate remote, int lastDataReceivedMs, + String reason, int estimatedDisconnectedTimeMs) { this.local = local; this.remote = remote; this.lastDataReceivedMs = lastDataReceivedMs; this.reason = reason; + this.estimatedDisconnectedTimeMs = estimatedDisconnectedTimeMs; } } diff --git a/sdk/android/api/org/webrtc/EglRenderer.java b/sdk/android/api/org/webrtc/EglRenderer.java index 8c6be56a8a..47bd0cf99e 100644 --- a/sdk/android/api/org/webrtc/EglRenderer.java +++ b/sdk/android/api/org/webrtc/EglRenderer.java @@ -128,8 +128,8 @@ public void dispatchMessage(Message msg) { // paused. private long minRenderPeriodNs; - // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed - // from the render thread. + // EGL and GL resources for drawing YUV/OES textures. After initialization, these are only + // accessed from the render thread. @Nullable private EglBase eglBase; private final VideoFrameDrawer frameDrawer; @Nullable private RendererCommon.GlDrawer drawer; @@ -290,7 +290,9 @@ public void release() { // Release EGL and GL resources on render thread. renderThreadHandler.postAtFrontOfQueue(() -> { // Detach current shader program. - GLES20.glUseProgram(/* program= */ 0); + synchronized (EglBase.lock) { + GLES20.glUseProgram(/* program= */ 0); + } if (drawer != null) { drawer.release(); drawer = null; diff --git a/sdk/android/api/org/webrtc/EncodedImage.java b/sdk/android/api/org/webrtc/EncodedImage.java index b50136c373..84c420c425 100644 --- a/sdk/android/api/org/webrtc/EncodedImage.java +++ b/sdk/android/api/org/webrtc/EncodedImage.java @@ -54,7 +54,6 @@ static FrameType fromNativeIndex(int nativeIndex) { public final long captureTimeNs; public final FrameType frameType; public final int rotation; - public final boolean completeFrame; public final @Nullable Integer qp; // TODO(bugs.webrtc.org/9378): Use retain and release from jni code. @@ -71,7 +70,7 @@ public void release() { @CalledByNative private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int encodedWidth, int encodedHeight, long captureTimeNs, FrameType frameType, int rotation, - boolean completeFrame, @Nullable Integer qp) { + @Nullable Integer qp) { this.buffer = buffer; this.encodedWidth = encodedWidth; this.encodedHeight = encodedHeight; @@ -79,7 +78,6 @@ private EncodedImage(ByteBuffer buffer, @Nullable Runnable releaseCallback, int this.captureTimeNs = captureTimeNs; this.frameType = frameType; this.rotation = rotation; - this.completeFrame = completeFrame; this.qp = qp; this.refCountDelegate = new RefCountDelegate(releaseCallback); } @@ -114,11 +112,6 @@ private int getRotation() { return rotation; } - @CalledByNative - private boolean getCompleteFrame() { - return completeFrame; - } - @CalledByNative private @Nullable Integer getQp() { return qp; @@ -136,7 +129,6 @@ public static class Builder { private long captureTimeNs; private EncodedImage.FrameType frameType; private int rotation; - private boolean completeFrame; private @Nullable Integer qp; private Builder() {} @@ -178,11 +170,6 @@ public Builder setRotation(int rotation) { return this; } - public Builder setCompleteFrame(boolean completeFrame) { - this.completeFrame = completeFrame; - return this; - } - public Builder setQp(@Nullable Integer qp) { this.qp = qp; return this; @@ -190,7 +177,7 @@ public Builder setQp(@Nullable Integer qp) { public EncodedImage createEncodedImage() { return new EncodedImage(buffer, releaseCallback, encodedWidth, encodedHeight, captureTimeNs, - frameType, rotation, completeFrame, qp); + frameType, rotation, qp); } } } diff --git a/sdk/android/api/org/webrtc/GlShader.java b/sdk/android/api/org/webrtc/GlShader.java index db5c630663..8f4cda3ba6 100644 --- a/sdk/android/api/org/webrtc/GlShader.java +++ b/sdk/android/api/org/webrtc/GlShader.java @@ -114,7 +114,9 @@ public void useProgram() { if (program == -1) { throw new RuntimeException("The program has been released"); } - GLES20.glUseProgram(program); + synchronized (EglBase.lock) { + GLES20.glUseProgram(program); + } GlUtil.checkNoGLES2Error("glUseProgram"); } diff --git a/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java index ac3b1b97c1..2876df028e 100644 --- a/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java +++ b/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java @@ -18,18 +18,9 @@ public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory { private final static Predicate defaultAllowedPredicate = new Predicate() { - private String[] prefixBlacklist = - Arrays.copyOf(MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES, - MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES.length); @Override public boolean test(MediaCodecInfo arg) { - final String name = arg.getName(); - for (String prefix : prefixBlacklist) { - if (name.startsWith(prefix)) { - return false; - } - } - return true; + return MediaCodecUtils.isHardwareAccelerated(arg); } }; diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java index b6c397aafe..e4f93ed8bd 100644 --- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java @@ -13,6 +13,8 @@ import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX; import static org.webrtc.MediaCodecUtils.INTEL_PREFIX; import static org.webrtc.MediaCodecUtils.QCOM_PREFIX; +import static org.webrtc.MediaCodecUtils.HISI_PREFIX; +import static org.webrtc.MediaCodecUtils.IMG_PREFIX; import android.media.MediaCodecInfo; import android.media.MediaCodecList; @@ -41,6 +43,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { @Nullable private final EglBase14.Context sharedContext; private final boolean enableIntelVp8Encoder; private final boolean enableH264HighProfile; + private final String extraMediaCodecFile = "sdcard/mediaCodec.xml"; + private final VideoCapabilityParser vcp = new VideoCapabilityParser(); + @Nullable private final Predicate codecAllowedPredicate; /** @@ -94,7 +99,7 @@ public VideoEncoder createEncoder(VideoCodecInfo input) { return null; } - VideoCodecType type = VideoCodecType.valueOf(input.name); + VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.name); MediaCodecInfo info = findCodecForType(type); if (info == null) { @@ -108,7 +113,7 @@ public VideoEncoder createEncoder(VideoCodecInfo input) { Integer yuvColorFormat = MediaCodecUtils.selectColorFormat( MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime)); - if (type == VideoCodecType.H264) { + if (type == VideoCodecMimeType.H264) { boolean isHighProfile = H264Utils.isSameH264Profile( input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)); boolean isBaselineProfile = H264Utils.isSameH264Profile( @@ -137,15 +142,16 @@ public VideoCodecInfo[] getSupportedCodecs() { List supportedCodecInfos = new ArrayList(); // Generate a list of supported codecs in order of preference: - // VP8, VP9, H264 (high profile), and H264 (baseline profile). - for (VideoCodecType type : - new VideoCodecType[] {VideoCodecType.VP8, VideoCodecType.VP9, VideoCodecType.H264}) { + // VP8, VP9, H.265(optional), H264 (high profile), and H264 (baseline profile). + for (VideoCodecMimeType type : new VideoCodecMimeType[] { + VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, VideoCodecMimeType.H264, + VideoCodecMimeType.H265}) { MediaCodecInfo codec = findCodecForType(type); if (codec != null) { String name = type.name(); // TODO(sakal): Always add H264 HP once WebRTC correctly removes codecs that are not // supported by the decoder. - if (type == VideoCodecType.H264 && isH264HighProfileSupported(codec)) { + if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); } @@ -158,7 +164,7 @@ public VideoCodecInfo[] getSupportedCodecs() { return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); } - private @Nullable MediaCodecInfo findCodecForType(VideoCodecType type) { + private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { MediaCodecInfo info = null; try { @@ -179,7 +185,7 @@ public VideoCodecInfo[] getSupportedCodecs() { } // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type. - private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) { + private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { if (!MediaCodecUtils.codecSupportsType(info, type)) { return false; } @@ -194,7 +200,7 @@ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) { // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the // current SDK. - private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecType type) { + private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) { switch (type) { case VP8: return isHardwareSupportedInCurrentSdkVp8(info); @@ -202,6 +208,8 @@ private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecT return isHardwareSupportedInCurrentSdkVp9(info); case H264: return isHardwareSupportedInCurrentSdkH264(info); + case H265: + return isHardwareSupportedInCurrentSdkH265(info); } return false; } @@ -210,16 +218,22 @@ private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) { String name = info.getName(); // QCOM Vp8 encoder is supported in KITKAT or later. return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + // Hisi VP8 encoder seems to be supported. Needs more testing. + || (name.startsWith(HISI_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + || (name.startsWith(IMG_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) // Exynos VP8 encoder is supported in M or later. || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) // Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled. || (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP - && enableIntelVp8Encoder); + && enableIntelVp8Encoder) + || vcp.isExtraHardwareSupported(name, "video/x-vnd.on2.vp8", vcp.parseWithTag(vcp.loadWithDom(extraMediaCodecFile), "Decoders")); } private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) { String name = info.getName(); - return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX)) + return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX) + || name.startsWith(IMG_PREFIX) + || vcp.isExtraHardwareSupported(name, "video/x-vnd.on2.vp9", vcp.parseWithTag(vcp.loadWithDom(extraMediaCodecFile), "Decoders"))) // Both QCOM and Exynos VP9 encoders are supported in N or later. && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; } @@ -234,7 +248,23 @@ private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) { return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) // Exynos H264 encoder is supported in LOLLIPOP or later. || (name.startsWith(EXYNOS_PREFIX) - && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP); + && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) + || (name.startsWith(HISI_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + || (name.startsWith(IMG_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + || vcp.isExtraHardwareSupported(name, "video/avc", vcp.parseWithTag(vcp.loadWithDom(extraMediaCodecFile), "Decoders")); + } + + private boolean isHardwareSupportedInCurrentSdkH265(MediaCodecInfo info) { + String name = info.getName(); + // QCOM H265 encoder is supported in KITKAT or later. + return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + // Exynos H265 encoder is supported in LOLLIPOP or later. + || (name.startsWith(EXYNOS_PREFIX) + && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) + // Hisi VP8 encoder seems to be supported. Needs more testing. + || (name.startsWith(HISI_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + || (name.startsWith(IMG_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) + || vcp.isExtraHardwareSupported(name, "video/hevc", vcp.parseWithTag(vcp.loadWithDom(extraMediaCodecFile), "Decoders")); } private boolean isMediaCodecAllowed(MediaCodecInfo info) { @@ -244,19 +274,20 @@ private boolean isMediaCodecAllowed(MediaCodecInfo info) { return codecAllowedPredicate.test(info); } - private int getKeyFrameIntervalSec(VideoCodecType type) { + private int getKeyFrameIntervalSec(VideoCodecMimeType type) { switch (type) { case VP8: // Fallthrough intended. case VP9: return 100; case H264: + case H265: return 20; } - throw new IllegalArgumentException("Unsupported VideoCodecType " + type); + throw new IllegalArgumentException("Unsupported VideoCodecMimeType " + type); } - private int getForcedKeyFrameIntervalMs(VideoCodecType type, String codecName) { - if (type == VideoCodecType.VP8 && codecName.startsWith(QCOM_PREFIX)) { + private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) { + if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) { if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP || Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) { return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS; @@ -270,9 +301,9 @@ private int getForcedKeyFrameIntervalMs(VideoCodecType type, String codecName) { return 0; } - private BitrateAdjuster createBitrateAdjuster(VideoCodecType type, String codecName) { + private BitrateAdjuster createBitrateAdjuster(VideoCodecMimeType type, String codecName) { if (codecName.startsWith(EXYNOS_PREFIX)) { - if (type == VideoCodecType.VP8) { + if (type == VideoCodecMimeType.VP8) { // Exynos VP8 encoders need dynamic bitrate adjustment. return new DynamicBitrateAdjuster(); } else { diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java deleted file mode 100644 index 8c91de049e..0000000000 --- a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java +++ /dev/null @@ -1,1023 +0,0 @@ -/* - * Copyright 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc; - -import android.media.MediaCodec; -import android.media.MediaCodecInfo; -import android.media.MediaCodecInfo.CodecCapabilities; -import android.media.MediaCodecList; -import android.media.MediaFormat; -import android.os.Build; -import android.os.SystemClock; -import android.support.annotation.Nullable; -import android.view.Surface; -import java.nio.ByteBuffer; -import java.util.ArrayDeque; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import org.webrtc.EglBase; -import org.webrtc.VideoFrame; - -// Java-side of peerconnection.cc:MediaCodecVideoDecoder. -// This class is an implementation detail of the Java PeerConnection API. -@SuppressWarnings("deprecation") -@Deprecated -public class MediaCodecVideoDecoder { - // This class is constructed, operated, and destroyed by its C++ incarnation, - // so the class and its methods have non-public visibility. The API this - // class exposes aims to mimic the webrtc::VideoDecoder API as closely as - // possibly to minimize the amount of translation work necessary. - - private static final String TAG = "MediaCodecVideoDecoder"; - - /** - * Create a VideoDecoderFactory that can be injected in the PeerConnectionFactory and replicate - * the old behavior. - */ - public static VideoDecoderFactory createFactory() { - return new DefaultVideoDecoderFactory(new HwDecoderFactory()); - } - - // Factory for creating HW MediaCodecVideoDecoder instances. - static class HwDecoderFactory implements VideoDecoderFactory { - private static boolean isSameCodec(VideoCodecInfo codecA, VideoCodecInfo codecB) { - if (!codecA.name.equalsIgnoreCase(codecB.name)) { - return false; - } - return codecA.name.equalsIgnoreCase("H264") - ? H264Utils.isSameH264Profile(codecA.params, codecB.params) - : true; - } - - private static boolean isCodecSupported( - VideoCodecInfo[] supportedCodecs, VideoCodecInfo codec) { - for (VideoCodecInfo supportedCodec : supportedCodecs) { - if (isSameCodec(supportedCodec, codec)) { - return true; - } - } - return false; - } - - private static VideoCodecInfo[] getSupportedHardwareCodecs() { - final List codecs = new ArrayList(); - - if (isVp8HwSupported()) { - Logging.d(TAG, "VP8 HW Decoder supported."); - codecs.add(new VideoCodecInfo("VP8", new HashMap<>())); - } - - if (isVp9HwSupported()) { - Logging.d(TAG, "VP9 HW Decoder supported."); - codecs.add(new VideoCodecInfo("VP9", new HashMap<>())); - } - - if (isH264HighProfileHwSupported()) { - Logging.d(TAG, "H.264 High Profile HW Decoder supported."); - codecs.add(H264Utils.DEFAULT_H264_HIGH_PROFILE_CODEC); - } - - if (isH264HwSupported()) { - Logging.d(TAG, "H.264 HW Decoder supported."); - codecs.add(H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC); - } - - return codecs.toArray(new VideoCodecInfo[codecs.size()]); - } - - private final VideoCodecInfo[] supportedHardwareCodecs = getSupportedHardwareCodecs(); - - @Override - public VideoCodecInfo[] getSupportedCodecs() { - return supportedHardwareCodecs; - } - - @Nullable - @Override - public VideoDecoder createDecoder(VideoCodecInfo codec) { - if (!isCodecSupported(supportedHardwareCodecs, codec)) { - Logging.d(TAG, "No HW video decoder for codec " + codec.name); - return null; - } - Logging.d(TAG, "Create HW video decoder for " + codec.name); - return new WrappedNativeVideoDecoder() { - @Override - public long createNativeVideoDecoder() { - return nativeCreateDecoder(codec.name, useSurface()); - } - }; - } - } - - private static final long MAX_DECODE_TIME_MS = 200; - - // TODO(magjed): Use MediaFormat constants when part of the public API. - private static final String FORMAT_KEY_STRIDE = "stride"; - private static final String FORMAT_KEY_SLICE_HEIGHT = "slice-height"; - private static final String FORMAT_KEY_CROP_LEFT = "crop-left"; - private static final String FORMAT_KEY_CROP_RIGHT = "crop-right"; - private static final String FORMAT_KEY_CROP_TOP = "crop-top"; - private static final String FORMAT_KEY_CROP_BOTTOM = "crop-bottom"; - - // Tracks webrtc::VideoCodecType. - public enum VideoCodecType { - VIDEO_CODEC_UNKNOWN, - VIDEO_CODEC_VP8, - VIDEO_CODEC_VP9, - VIDEO_CODEC_AV1, - VIDEO_CODEC_H264; - - @CalledByNative("VideoCodecType") - static VideoCodecType fromNativeIndex(int nativeIndex) { - return values()[nativeIndex]; - } - } - - // Timeout for input buffer dequeue. - private static final int DEQUEUE_INPUT_TIMEOUT = 500000; - // Timeout for codec releasing. - private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; - // Max number of output buffers queued before starting to drop decoded frames. - private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; - // Active running decoder instance. Set in initDecode() (called from native code) - // and reset to null in release() call. - @Nullable private static MediaCodecVideoDecoder runningInstance; - @Nullable private static MediaCodecVideoDecoderErrorCallback errorCallback; - private static int codecErrors; - // List of disabled codec types - can be set from application. - private static Set hwDecoderDisabledTypes = new HashSet(); - @Nullable private static EglBase eglBase; - - @Nullable private Thread mediaCodecThread; - @Nullable private MediaCodec mediaCodec; - private ByteBuffer[] inputBuffers; - private ByteBuffer[] outputBuffers; - private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; - private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9"; - private static final String H264_MIME_TYPE = "video/avc"; - // List of supported HW VP8 decoders. - private static final String[] supportedVp8HwCodecPrefixes() { - ArrayList supportedPrefixes = new ArrayList(); - supportedPrefixes.add("OMX.qcom."); - supportedPrefixes.add("OMX.Nvidia."); - supportedPrefixes.add("OMX.Exynos."); - supportedPrefixes.add("OMX.Intel."); - if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-MediaTekVP8").equals("Enabled") - && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - supportedPrefixes.add("OMX.MTK."); - } - return supportedPrefixes.toArray(new String[supportedPrefixes.size()]); - } - // List of supported HW VP9 decoders. - private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."}; - // List of supported HW H.264 decoders. - private static final String[] supportedH264HwCodecPrefixes() { - ArrayList supportedPrefixes = new ArrayList(); - supportedPrefixes.add("OMX.qcom."); - supportedPrefixes.add("OMX.Intel."); - supportedPrefixes.add("OMX.Exynos."); - if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-MediaTekH264").equals("Enabled") - && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) { - supportedPrefixes.add("OMX.MTK."); - } - return supportedPrefixes.toArray(new String[supportedPrefixes.size()]); - } - - // List of supported HW H.264 high profile decoders. - private static final String supportedQcomH264HighProfileHwCodecPrefix = "OMX.qcom."; - private static final String supportedExynosH264HighProfileHwCodecPrefix = "OMX.Exynos."; - private static final String supportedMediaTekH264HighProfileHwCodecPrefix = "OMX.MTK."; - - // NV12 color format supported by QCOM codec, but not declared in MediaCodec - - // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h - private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01; - private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02; - private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03; - private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; - // Allowable color formats supported by codec - in order of preference. - private static final List supportedColorList = Arrays.asList( - CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar, - CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, - COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka, - COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, - COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); - - private int colorFormat; - private int width; - private int height; - private int stride; - private int sliceHeight; - private boolean hasDecodedFirstFrame; - private final Queue decodeStartTimeMs = new ArrayDeque(); - - // The below variables are only used when decoding to a Surface. - @Nullable private TextureListener textureListener; - private int droppedFrames; - @Nullable private Surface surface; - private final Queue dequeuedSurfaceOutputBuffers = - new ArrayDeque(); - - // MediaCodec error handler - invoked when critical error happens which may prevent - // further use of media codec API. Now it means that one of media codec instances - // is hanging and can no longer be used in the next call. - public static interface MediaCodecVideoDecoderErrorCallback { - void onMediaCodecVideoDecoderCriticalError(int codecErrors); - } - - /** Set EGL context used by HW decoding. The EGL context must be shared with the remote render. */ - public static void setEglContext(EglBase.Context eglContext) { - if (eglBase != null) { - Logging.w(TAG, "Egl context already set."); - eglBase.release(); - } - eglBase = EglBase.create(eglContext); - } - - /** Dispose the EGL context used by HW decoding. */ - public static void disposeEglContext() { - if (eglBase != null) { - eglBase.release(); - eglBase = null; - } - } - - static boolean useSurface() { - return eglBase != null; - } - - public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) { - Logging.d(TAG, "Set error callback"); - MediaCodecVideoDecoder.errorCallback = errorCallback; - } - - // Functions to disable HW decoding - can be called from applications for platforms - // which have known HW decoding problems. - public static void disableVp8HwCodec() { - Logging.w(TAG, "VP8 decoding is disabled by application."); - hwDecoderDisabledTypes.add(VP8_MIME_TYPE); - } - - public static void disableVp9HwCodec() { - Logging.w(TAG, "VP9 decoding is disabled by application."); - hwDecoderDisabledTypes.add(VP9_MIME_TYPE); - } - - public static void disableH264HwCodec() { - Logging.w(TAG, "H.264 decoding is disabled by application."); - hwDecoderDisabledTypes.add(H264_MIME_TYPE); - } - - // Functions to query if HW decoding is supported. - public static boolean isVp8HwSupported() { - return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) - && (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes()) != null); - } - - public static boolean isVp9HwSupported() { - return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) - && (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null); - } - - public static boolean isH264HwSupported() { - return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) - && (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes()) != null); - } - - public static boolean isH264HighProfileHwSupported() { - if (hwDecoderDisabledTypes.contains(H264_MIME_TYPE)) { - return false; - } - // Support H.264 HP decoding on QCOM chips for Android L and above. - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP - && findDecoder(H264_MIME_TYPE, new String[] {supportedQcomH264HighProfileHwCodecPrefix}) - != null) { - return true; - } - // Support H.264 HP decoding on Exynos chips for Android M and above. - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M - && findDecoder(H264_MIME_TYPE, new String[] {supportedExynosH264HighProfileHwCodecPrefix}) - != null) { - return true; - } - // Support H.264 HP decoding on MediaTek chips for Android O_MR1 and above - if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-MediaTekH264").equals("Enabled") - && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1 - && findDecoder(H264_MIME_TYPE, new String[] {supportedMediaTekH264HighProfileHwCodecPrefix}) - != null) { - return true; - } - return false; - } - - public static void printStackTrace() { - if (runningInstance != null && runningInstance.mediaCodecThread != null) { - StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace(); - if (mediaCodecStackTraces.length > 0) { - Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:"); - for (StackTraceElement stackTrace : mediaCodecStackTraces) { - Logging.d(TAG, stackTrace.toString()); - } - } - } - } - - // Helper struct for findDecoder() below. - private static class DecoderProperties { - public DecoderProperties(String codecName, int colorFormat) { - this.codecName = codecName; - this.colorFormat = colorFormat; - } - public final String codecName; // OpenMax component name for VP8 codec. - public final int colorFormat; // Color format supported by codec. - } - - private static @Nullable DecoderProperties findDecoder( - String mime, String[] supportedCodecPrefixes) { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { - return null; // MediaCodec.setParameters is missing. - } - Logging.d(TAG, "Trying to find HW decoder for mime " + mime); - for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { - MediaCodecInfo info = null; - try { - info = MediaCodecList.getCodecInfoAt(i); - } catch (IllegalArgumentException e) { - Logging.e(TAG, "Cannot retrieve decoder codec info", e); - } - if (info == null || info.isEncoder()) { - continue; - } - String name = null; - for (String mimeType : info.getSupportedTypes()) { - if (mimeType.equals(mime)) { - name = info.getName(); - break; - } - } - if (name == null) { - continue; // No HW support in this codec; try the next one. - } - Logging.d(TAG, "Found candidate decoder " + name); - - // Check if this is supported decoder. - boolean supportedCodec = false; - for (String codecPrefix : supportedCodecPrefixes) { - if (name.startsWith(codecPrefix)) { - supportedCodec = true; - break; - } - } - if (!supportedCodec) { - continue; - } - - // Check if codec supports either yuv420 or nv12. - CodecCapabilities capabilities; - try { - capabilities = info.getCapabilitiesForType(mime); - } catch (IllegalArgumentException e) { - Logging.e(TAG, "Cannot retrieve decoder capabilities", e); - continue; - } - for (int colorFormat : capabilities.colorFormats) { - Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); - } - for (int supportedColorFormat : supportedColorList) { - for (int codecColorFormat : capabilities.colorFormats) { - if (codecColorFormat == supportedColorFormat) { - // Found supported HW decoder. - Logging.d(TAG, "Found target decoder " + name + ". Color: 0x" - + Integer.toHexString(codecColorFormat)); - return new DecoderProperties(name, codecColorFormat); - } - } - } - } - Logging.d(TAG, "No HW decoder found for mime " + mime); - return null; // No HW decoder. - } - - @CalledByNative - MediaCodecVideoDecoder() {} - - private void checkOnMediaCodecThread() throws IllegalStateException { - if (mediaCodecThread.getId() != Thread.currentThread().getId()) { - throw new IllegalStateException("MediaCodecVideoDecoder previously operated on " - + mediaCodecThread + " but is now called on " + Thread.currentThread()); - } - } - - @CalledByNativeUnchecked - private boolean initDecode(VideoCodecType type, int width, int height) { - if (mediaCodecThread != null) { - throw new RuntimeException("initDecode: Forgot to release()?"); - } - - String mime = null; - String[] supportedCodecPrefixes = null; - if (type == VideoCodecType.VIDEO_CODEC_VP8) { - mime = VP8_MIME_TYPE; - supportedCodecPrefixes = supportedVp8HwCodecPrefixes(); - } else if (type == VideoCodecType.VIDEO_CODEC_VP9) { - mime = VP9_MIME_TYPE; - supportedCodecPrefixes = supportedVp9HwCodecPrefixes; - } else if (type == VideoCodecType.VIDEO_CODEC_H264) { - mime = H264_MIME_TYPE; - supportedCodecPrefixes = supportedH264HwCodecPrefixes(); - } else { - throw new RuntimeException("initDecode: Non-supported codec " + type); - } - DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); - if (properties == null) { - throw new RuntimeException("Cannot find HW decoder for " + type); - } - - Logging.d(TAG, - "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x" - + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface()); - - runningInstance = this; // Decoder is now running and can be queried for stack traces. - mediaCodecThread = Thread.currentThread(); - try { - this.width = width; - this.height = height; - stride = width; - sliceHeight = height; - - if (useSurface()) { - @Nullable - final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( - "Decoder SurfaceTextureHelper", eglBase.getEglBaseContext()); - if (surfaceTextureHelper != null) { - textureListener = new TextureListener(surfaceTextureHelper); - textureListener.setSize(width, height); - surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); - } - } - - MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); - if (!useSurface()) { - format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); - } - Logging.d(TAG, " Format: " + format); - mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName); - if (mediaCodec == null) { - Logging.e(TAG, "Can not create media decoder"); - return false; - } - mediaCodec.configure(format, surface, null, 0); - mediaCodec.start(); - - colorFormat = properties.colorFormat; - outputBuffers = mediaCodec.getOutputBuffers(); - inputBuffers = mediaCodec.getInputBuffers(); - decodeStartTimeMs.clear(); - hasDecodedFirstFrame = false; - dequeuedSurfaceOutputBuffers.clear(); - droppedFrames = 0; - Logging.d(TAG, - "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); - return true; - } catch (IllegalStateException e) { - Logging.e(TAG, "initDecode failed", e); - return false; - } - } - - // Resets the decoder so it can start decoding frames with new resolution. - // Flushes MediaCodec and clears decoder output buffers. - @CalledByNativeUnchecked - private void reset(int width, int height) { - if (mediaCodecThread == null || mediaCodec == null) { - throw new RuntimeException("Incorrect reset call for non-initialized decoder."); - } - Logging.d(TAG, "Java reset: " + width + " x " + height); - - mediaCodec.flush(); - - this.width = width; - this.height = height; - if (textureListener != null) { - textureListener.setSize(width, height); - } - decodeStartTimeMs.clear(); - dequeuedSurfaceOutputBuffers.clear(); - hasDecodedFirstFrame = false; - droppedFrames = 0; - } - - @CalledByNativeUnchecked - private void release() { - Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames); - checkOnMediaCodecThread(); - - // Run Mediacodec stop() and release() on separate thread since sometime - // Mediacodec.stop() may hang. - final CountDownLatch releaseDone = new CountDownLatch(1); - - Runnable runMediaCodecRelease = new Runnable() { - @Override - public void run() { - try { - Logging.d(TAG, "Java releaseDecoder on release thread"); - mediaCodec.stop(); - mediaCodec.release(); - Logging.d(TAG, "Java releaseDecoder on release thread done"); - } catch (Exception e) { - Logging.e(TAG, "Media decoder release failed", e); - } - releaseDone.countDown(); - } - }; - new Thread(runMediaCodecRelease).start(); - - if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { - Logging.e(TAG, "Media decoder release timeout"); - codecErrors++; - if (errorCallback != null) { - Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors); - errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors); - } - } - - mediaCodec = null; - mediaCodecThread = null; - runningInstance = null; - if (useSurface()) { - surface.release(); - surface = null; - textureListener.release(); - } - Logging.d(TAG, "Java releaseDecoder done"); - } - - // Dequeue an input buffer and return its index, -1 if no input buffer is - // available, or -2 if the codec is no longer operative. - @CalledByNativeUnchecked - private int dequeueInputBuffer() { - checkOnMediaCodecThread(); - try { - return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); - } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueIntputBuffer failed", e); - return -2; - } - } - - @CalledByNativeUnchecked - private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs, - long timeStampMs, long ntpTimeStamp) { - checkOnMediaCodecThread(); - try { - inputBuffers[inputBufferIndex].position(0); - inputBuffers[inputBufferIndex].limit(size); - decodeStartTimeMs.add( - new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp)); - mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0); - return true; - } catch (IllegalStateException e) { - Logging.e(TAG, "decode failed", e); - return false; - } - } - - private static class TimeStamps { - public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) { - this.decodeStartTimeMs = decodeStartTimeMs; - this.timeStampMs = timeStampMs; - this.ntpTimeStampMs = ntpTimeStampMs; - } - // Time when this frame was queued for decoding. - private final long decodeStartTimeMs; - // Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame. - private final long timeStampMs; - // Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame. - private final long ntpTimeStampMs; - } - - // Helper struct for dequeueOutputBuffer() below. - private static class DecodedOutputBuffer { - public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs, - long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) { - this.index = index; - this.offset = offset; - this.size = size; - this.presentationTimeStampMs = presentationTimeStampMs; - this.timeStampMs = timeStampMs; - this.ntpTimeStampMs = ntpTimeStampMs; - this.decodeTimeMs = decodeTime; - this.endDecodeTimeMs = endDecodeTime; - } - - private final int index; - private final int offset; - private final int size; - // Presentation timestamp returned in dequeueOutputBuffer call. - private final long presentationTimeStampMs; - // C++ inputImage._timeStamp value for output frame. - private final long timeStampMs; - // C++ inputImage.ntp_time_ms_ value for output frame. - private final long ntpTimeStampMs; - // Number of ms it took to decode this frame. - private final long decodeTimeMs; - // System time when this frame decoding finished. - private final long endDecodeTimeMs; - - @CalledByNative("DecodedOutputBuffer") - int getIndex() { - return index; - } - - @CalledByNative("DecodedOutputBuffer") - int getOffset() { - return offset; - } - - @CalledByNative("DecodedOutputBuffer") - int getSize() { - return size; - } - - @CalledByNative("DecodedOutputBuffer") - long getPresentationTimestampMs() { - return presentationTimeStampMs; - } - - @CalledByNative("DecodedOutputBuffer") - long getTimestampMs() { - return timeStampMs; - } - - @CalledByNative("DecodedOutputBuffer") - long getNtpTimestampMs() { - return ntpTimeStampMs; - } - - @CalledByNative("DecodedOutputBuffer") - long getDecodeTimeMs() { - return decodeTimeMs; - } - } - - // Helper struct for dequeueTextureBuffer() below. - private static class DecodedTextureBuffer { - private final VideoFrame.Buffer videoFrameBuffer; - // Presentation timestamp returned in dequeueOutputBuffer call. - private final long presentationTimeStampMs; - // C++ inputImage._timeStamp value for output frame. - private final long timeStampMs; - // C++ inputImage.ntp_time_ms_ value for output frame. - private final long ntpTimeStampMs; - // Number of ms it took to decode this frame. - private final long decodeTimeMs; - // Interval from when the frame finished decoding until this buffer has been created. - // Since there is only one texture, this interval depend on the time from when - // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec - // so that the texture can be updated with the next decoded frame. - private final long frameDelayMs; - - // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame - // that was dropped. - public DecodedTextureBuffer(VideoFrame.Buffer videoFrameBuffer, long presentationTimeStampMs, - long timeStampMs, long ntpTimeStampMs, long decodeTimeMs, long frameDelay) { - this.videoFrameBuffer = videoFrameBuffer; - this.presentationTimeStampMs = presentationTimeStampMs; - this.timeStampMs = timeStampMs; - this.ntpTimeStampMs = ntpTimeStampMs; - this.decodeTimeMs = decodeTimeMs; - this.frameDelayMs = frameDelay; - } - - @CalledByNative("DecodedTextureBuffer") - VideoFrame.Buffer getVideoFrameBuffer() { - return videoFrameBuffer; - } - - @CalledByNative("DecodedTextureBuffer") - long getPresentationTimestampMs() { - return presentationTimeStampMs; - } - - @CalledByNative("DecodedTextureBuffer") - long getTimeStampMs() { - return timeStampMs; - } - - @CalledByNative("DecodedTextureBuffer") - long getNtpTimestampMs() { - return ntpTimeStampMs; - } - - @CalledByNative("DecodedTextureBuffer") - long getDecodeTimeMs() { - return decodeTimeMs; - } - - @CalledByNative("DecodedTextureBuffer") - long getFrameDelayMs() { - return frameDelayMs; - } - } - - // Poll based texture listener. - private class TextureListener implements VideoSink { - private final SurfaceTextureHelper surfaceTextureHelper; - // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll(). - private final Object newFrameLock = new Object(); - // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to - // onFrame(). - @Nullable private DecodedOutputBuffer bufferToRender; - @Nullable private DecodedTextureBuffer renderedBuffer; - - public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { - this.surfaceTextureHelper = surfaceTextureHelper; - surfaceTextureHelper.startListening(this); - } - - public void addBufferToRender(DecodedOutputBuffer buffer) { - if (bufferToRender != null) { - Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture."); - throw new IllegalStateException("Waiting for a texture."); - } - bufferToRender = buffer; - } - - public boolean isWaitingForTexture() { - synchronized (newFrameLock) { - return bufferToRender != null; - } - } - - public void setSize(int width, int height) { - surfaceTextureHelper.setTextureSize(width, height); - } - - // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread. - @Override - public void onFrame(VideoFrame frame) { - synchronized (newFrameLock) { - if (renderedBuffer != null) { - Logging.e(TAG, "Unexpected onFrame() called while already holding a texture."); - throw new IllegalStateException("Already holding a texture."); - } - // |timestampNs| is always zero on some Android versions. - final VideoFrame.Buffer buffer = frame.getBuffer(); - buffer.retain(); - renderedBuffer = new DecodedTextureBuffer(buffer, bufferToRender.presentationTimeStampMs, - bufferToRender.timeStampMs, bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs, - SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); - bufferToRender = null; - newFrameLock.notifyAll(); - } - } - - // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise. - @Nullable - @SuppressWarnings("WaitNotInLoop") - public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) { - synchronized (newFrameLock) { - if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) { - try { - newFrameLock.wait(timeoutMs); - } catch (InterruptedException e) { - // Restore the interrupted status by reinterrupting the thread. - Thread.currentThread().interrupt(); - } - } - DecodedTextureBuffer returnedBuffer = renderedBuffer; - renderedBuffer = null; - return returnedBuffer; - } - } - - public void release() { - // SurfaceTextureHelper.stopListening() will block until any onFrame() in progress is done. - // Therefore, the call must be outside any synchronized statement that is also used in the - // onFrame() above to avoid deadlocks. - surfaceTextureHelper.stopListening(); - synchronized (newFrameLock) { - if (renderedBuffer != null) { - renderedBuffer.getVideoFrameBuffer().release(); - renderedBuffer = null; - } - } - surfaceTextureHelper.dispose(); - } - } - - // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer. - // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an - // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException - // upon codec error. - @CalledByNativeUnchecked - private @Nullable DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { - checkOnMediaCodecThread(); - if (decodeStartTimeMs.isEmpty()) { - return null; - } - // Drain the decoder until receiving a decoded buffer or hitting - // MediaCodec.INFO_TRY_AGAIN_LATER. - final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); - while (true) { - final int result = - mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); - switch (result) { - case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: - outputBuffers = mediaCodec.getOutputBuffers(); - Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); - if (hasDecodedFirstFrame) { - throw new RuntimeException("Unexpected output buffer change event."); - } - break; - case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: - MediaFormat format = mediaCodec.getOutputFormat(); - Logging.d(TAG, "Decoder format changed: " + format.toString()); - final int newWidth; - final int newHeight; - if (format.containsKey(FORMAT_KEY_CROP_LEFT) && format.containsKey(FORMAT_KEY_CROP_RIGHT) - && format.containsKey(FORMAT_KEY_CROP_BOTTOM) - && format.containsKey(FORMAT_KEY_CROP_TOP)) { - newWidth = 1 + format.getInteger(FORMAT_KEY_CROP_RIGHT) - - format.getInteger(FORMAT_KEY_CROP_LEFT); - newHeight = 1 + format.getInteger(FORMAT_KEY_CROP_BOTTOM) - - format.getInteger(FORMAT_KEY_CROP_TOP); - } else { - newWidth = format.getInteger(MediaFormat.KEY_WIDTH); - newHeight = format.getInteger(MediaFormat.KEY_HEIGHT); - } - if (hasDecodedFirstFrame && (newWidth != width || newHeight != height)) { - throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height - + ". New " + newWidth + "*" + newHeight); - } - width = newWidth; - height = newHeight; - if (textureListener != null) { - textureListener.setSize(width, height); - } - - if (!useSurface() && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { - colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); - Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); - if (!supportedColorList.contains(colorFormat)) { - throw new IllegalStateException("Non supported color format: " + colorFormat); - } - } - if (format.containsKey(FORMAT_KEY_STRIDE)) { - stride = format.getInteger(FORMAT_KEY_STRIDE); - } - if (format.containsKey(FORMAT_KEY_SLICE_HEIGHT)) { - sliceHeight = format.getInteger(FORMAT_KEY_SLICE_HEIGHT); - } - Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight); - stride = Math.max(width, stride); - sliceHeight = Math.max(height, sliceHeight); - break; - case MediaCodec.INFO_TRY_AGAIN_LATER: - return null; - default: - hasDecodedFirstFrame = true; - TimeStamps timeStamps = decodeStartTimeMs.remove(); - long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs; - if (decodeTimeMs > MAX_DECODE_TIME_MS) { - Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms" - + ". Q size: " + decodeStartTimeMs.size() - + ". Might be caused by resuming H264 decoding after a pause."); - decodeTimeMs = MAX_DECODE_TIME_MS; - } - return new DecodedOutputBuffer(result, info.offset, info.size, - TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs, - timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime()); - } - } - } - - // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer. - // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an - // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException - // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if - // a frame can't be returned. - @CalledByNativeUnchecked - private @Nullable DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { - checkOnMediaCodecThread(); - if (!useSurface()) { - throw new IllegalStateException("dequeueTexture() called for byte buffer decoding."); - } - DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs); - if (outputBuffer != null) { - dequeuedSurfaceOutputBuffers.add(outputBuffer); - } - - MaybeRenderDecodedTextureBuffer(); - // Check if there is texture ready now by waiting max |dequeueTimeoutMs|. - DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs); - if (renderedBuffer != null) { - MaybeRenderDecodedTextureBuffer(); - return renderedBuffer; - } - - if ((dequeuedSurfaceOutputBuffers.size() - >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length) - || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) { - ++droppedFrames; - // Drop the oldest frame still in dequeuedSurfaceOutputBuffers. - // The oldest frame is owned by |textureListener| and can't be dropped since - // mediaCodec.releaseOutputBuffer has already been called. - final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove(); - if (dequeueTimeoutMs > 0) { - // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to - // return the one and only texture even if it does not render. - Logging.w(TAG, "Draining decoder. Dropping frame with TS: " - + droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: " - + droppedFrames); - } else { - Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() - + ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs - + ". Total number of dropped frames: " + droppedFrames); - } - - mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */); - return new DecodedTextureBuffer(null /* videoFrameBuffer */, - droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs, - droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs, - SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs); - } - return null; - } - - private void MaybeRenderDecodedTextureBuffer() { - if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) { - return; - } - // Get the first frame in the queue and render to the decoder output surface. - final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove(); - textureListener.addBufferToRender(buffer); - mediaCodec.releaseOutputBuffer(buffer.index, true /* render */); - } - - // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for - // non-surface decoding. - // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured - // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws - // MediaCodec.CodecException upon codec error. - @CalledByNativeUnchecked - private void returnDecodedOutputBuffer(int index) - throws IllegalStateException, MediaCodec.CodecException { - checkOnMediaCodecThread(); - if (useSurface()) { - throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding."); - } - mediaCodec.releaseOutputBuffer(index, false /* render */); - } - - @CalledByNative - ByteBuffer[] getInputBuffers() { - return inputBuffers; - } - - @CalledByNative - ByteBuffer[] getOutputBuffers() { - return outputBuffers; - } - - @CalledByNative - int getColorFormat() { - return colorFormat; - } - - @CalledByNative - int getWidth() { - return width; - } - - @CalledByNative - int getHeight() { - return height; - } - - @CalledByNative - int getStride() { - return stride; - } - - @CalledByNative - int getSliceHeight() { - return sliceHeight; - } - - private static native long nativeCreateDecoder(String codec, boolean useSurface); -} diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java deleted file mode 100644 index 5f5d44b581..0000000000 --- a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java +++ /dev/null @@ -1,1108 +0,0 @@ -/* - * Copyright 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc; - -import android.annotation.TargetApi; -import android.graphics.Matrix; -import android.media.MediaCodec; -import android.media.MediaCodecInfo; -import android.media.MediaCodecInfo.CodecCapabilities; -import android.media.MediaCodecList; -import android.media.MediaFormat; -import android.opengl.GLES20; -import android.os.Build; -import android.os.Bundle; -import android.support.annotation.Nullable; -import android.view.Surface; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import org.webrtc.EglBase; -import org.webrtc.EglBase14; -import org.webrtc.VideoFrame; - -// Java-side of peerconnection.cc:MediaCodecVideoEncoder. -// This class is an implementation detail of the Java PeerConnection API. -@TargetApi(19) -@SuppressWarnings("deprecation") -@Deprecated -public class MediaCodecVideoEncoder { - // This class is constructed, operated, and destroyed by its C++ incarnation, - // so the class and its methods have non-public visibility. The API this - // class exposes aims to mimic the webrtc::VideoEncoder API as closely as - // possibly to minimize the amount of translation work necessary. - - private static final String TAG = "MediaCodecVideoEncoder"; - - /** - * Create a VideoEncoderFactory that can be injected in the PeerConnectionFactory and replicate - * the old behavior. - */ - public static VideoEncoderFactory createFactory() { - return new DefaultVideoEncoderFactory(new HwEncoderFactory()); - } - - // Factory for creating HW MediaCodecVideoEncoder instances. - static class HwEncoderFactory implements VideoEncoderFactory { - private static boolean isSameCodec(VideoCodecInfo codecA, VideoCodecInfo codecB) { - if (!codecA.name.equalsIgnoreCase(codecB.name)) { - return false; - } - return codecA.name.equalsIgnoreCase("H264") - ? H264Utils.isSameH264Profile(codecA.params, codecB.params) - : true; - } - - private static boolean isCodecSupported( - VideoCodecInfo[] supportedCodecs, VideoCodecInfo codec) { - for (VideoCodecInfo supportedCodec : supportedCodecs) { - if (isSameCodec(supportedCodec, codec)) { - return true; - } - } - return false; - } - - private static VideoCodecInfo[] getSupportedHardwareCodecs() { - final List codecs = new ArrayList(); - - if (isVp8HwSupported()) { - Logging.d(TAG, "VP8 HW Encoder supported."); - codecs.add(new VideoCodecInfo("VP8", new HashMap<>())); - } - - if (isVp9HwSupported()) { - Logging.d(TAG, "VP9 HW Encoder supported."); - codecs.add(new VideoCodecInfo("VP9", new HashMap<>())); - } - - // Check if high profile is supported by decoder. If yes, encoder can always - // fall back to baseline profile as a subset as high profile. - if (MediaCodecVideoDecoder.isH264HighProfileHwSupported()) { - Logging.d(TAG, "H.264 High Profile HW Encoder supported."); - codecs.add(H264Utils.DEFAULT_H264_HIGH_PROFILE_CODEC); - } - - if (isH264HwSupported()) { - Logging.d(TAG, "H.264 HW Encoder supported."); - codecs.add(H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC); - } - - return codecs.toArray(new VideoCodecInfo[codecs.size()]); - } - - private final VideoCodecInfo[] supportedHardwareCodecs = getSupportedHardwareCodecs(); - - @Override - public VideoCodecInfo[] getSupportedCodecs() { - return supportedHardwareCodecs; - } - - @Nullable - @Override - public VideoEncoder createEncoder(VideoCodecInfo info) { - if (!isCodecSupported(supportedHardwareCodecs, info)) { - Logging.d(TAG, "No HW video encoder for codec " + info.name); - return null; - } - Logging.d(TAG, "Create HW video encoder for " + info.name); - return new WrappedNativeVideoEncoder() { - @Override - public long createNativeVideoEncoder() { - return nativeCreateEncoder( - info, /* hasEgl14Context= */ staticEglBase instanceof EglBase14); - } - - @Override - public boolean isHardwareEncoder() { - return true; - } - }; - } - } - - // Tracks webrtc::VideoCodecType. - public enum VideoCodecType { - VIDEO_CODEC_UNKNOWN, - VIDEO_CODEC_VP8, - VIDEO_CODEC_VP9, - VIDEO_CODEC_AV1, - VIDEO_CODEC_H264; - - @CalledByNative("VideoCodecType") - static VideoCodecType fromNativeIndex(int nativeIndex) { - return values()[nativeIndex]; - } - } - - private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing. - private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait. - private static final int BITRATE_ADJUSTMENT_FPS = 30; - private static final int MAXIMUM_INITIAL_FPS = 30; - private static final double BITRATE_CORRECTION_SEC = 3.0; - // Maximum bitrate correction scale - no more than 4 times. - private static final double BITRATE_CORRECTION_MAX_SCALE = 4; - // Amount of correction steps to reach correction maximum scale. - private static final int BITRATE_CORRECTION_STEPS = 20; - // Forced key frame interval - used to reduce color distortions on Qualcomm platform. - private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000; - private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000; - private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000; - - // Active running encoder instance. Set in initEncode() (called from native code) - // and reset to null in release() call. - @Nullable private static MediaCodecVideoEncoder runningInstance; - @Nullable private static MediaCodecVideoEncoderErrorCallback errorCallback; - private static int codecErrors; - // List of disabled codec types - can be set from application. - private static Set hwEncoderDisabledTypes = new HashSet(); - @Nullable private static EglBase staticEglBase; - - @Nullable private Thread mediaCodecThread; - @Nullable private MediaCodec mediaCodec; - private ByteBuffer[] outputBuffers; - @Nullable private EglBase14 eglBase; - private int profile; - private int width; - private int height; - @Nullable private Surface inputSurface; - @Nullable private GlRectDrawer drawer; - - private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; - private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9"; - private static final String H264_MIME_TYPE = "video/avc"; - - private static final int VIDEO_AVCProfileHigh = 8; - private static final int VIDEO_AVCLevel3 = 0x100; - - // Type of bitrate adjustment for video encoder. - public enum BitrateAdjustmentType { - // No adjustment - video encoder has no known bitrate problem. - NO_ADJUSTMENT, - // Framerate based bitrate adjustment is required - HW encoder does not use frame - // timestamps to calculate frame bitrate budget and instead is relying on initial - // fps configuration assuming that all frames are coming at fixed initial frame rate. - FRAMERATE_ADJUSTMENT, - // Dynamic bitrate adjustment is required - HW encoder used frame timestamps, but actual - // bitrate deviates too much from the target value. - DYNAMIC_ADJUSTMENT - } - - // Should be in sync with webrtc::H264::Profile. - public static enum H264Profile { - CONSTRAINED_BASELINE(0), - BASELINE(1), - MAIN(2), - CONSTRAINED_HIGH(3), - HIGH(4); - - private final int value; - - H264Profile(int value) { - this.value = value; - } - - public int getValue() { - return value; - } - } - - // Class describing supported media codec properties. - private static class MediaCodecProperties { - public final String codecPrefix; - // Minimum Android SDK required for this codec to be used. - public final int minSdk; - // Flag if encoder implementation does not use frame timestamps to calculate frame bitrate - // budget and instead is relying on initial fps configuration assuming that all frames are - // coming at fixed initial frame rate. Bitrate adjustment is required for this case. - public final BitrateAdjustmentType bitrateAdjustmentType; - - MediaCodecProperties( - String codecPrefix, int minSdk, BitrateAdjustmentType bitrateAdjustmentType) { - this.codecPrefix = codecPrefix; - this.minSdk = minSdk; - this.bitrateAdjustmentType = bitrateAdjustmentType; - } - } - - /** - * Set EGL context used by HW encoding. The EGL context must be shared with the video capturer - * and any local render. - */ - public static void setEglContext(EglBase.Context eglContext) { - if (staticEglBase != null) { - Logging.w(TAG, "Egl context already set."); - staticEglBase.release(); - } - staticEglBase = EglBase.create(eglContext); - } - - /** Dispose the EGL context used by HW encoding. */ - public static void disposeEglContext() { - if (staticEglBase != null) { - staticEglBase.release(); - staticEglBase = null; - } - } - - @Nullable - static EglBase.Context getEglContext() { - return staticEglBase == null ? null : staticEglBase.getEglBaseContext(); - } - - // List of supported HW VP8 encoders. - private static final MediaCodecProperties qcomVp8HwProperties = new MediaCodecProperties( - "OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT); - private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties( - "OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT); - private static final MediaCodecProperties intelVp8HwProperties = new MediaCodecProperties( - "OMX.Intel.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.NO_ADJUSTMENT); - private static MediaCodecProperties[] vp8HwList() { - final ArrayList supported_codecs = new ArrayList(); - supported_codecs.add(qcomVp8HwProperties); - supported_codecs.add(exynosVp8HwProperties); - if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-IntelVP8").equals("Enabled")) { - supported_codecs.add(intelVp8HwProperties); - } - return supported_codecs.toArray(new MediaCodecProperties[supported_codecs.size()]); - } - - // List of supported HW VP9 encoders. - private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties( - "OMX.qcom.", Build.VERSION_CODES.N, BitrateAdjustmentType.NO_ADJUSTMENT); - private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties( - "OMX.Exynos.", Build.VERSION_CODES.N, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT); - private static final MediaCodecProperties[] vp9HwList = - new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties}; - - // List of supported HW H.264 encoders. - private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties( - "OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT); - private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties( - "OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT); - private static final MediaCodecProperties mediatekH264HwProperties = new MediaCodecProperties( - "OMX.MTK.", Build.VERSION_CODES.O_MR1, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT); - private static final MediaCodecProperties[] h264HwList() { - final ArrayList supported_codecs = new ArrayList(); - supported_codecs.add(qcomH264HwProperties); - supported_codecs.add(exynosH264HwProperties); - if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-MediaTekH264").equals("Enabled")) { - supported_codecs.add(mediatekH264HwProperties); - } - return supported_codecs.toArray(new MediaCodecProperties[supported_codecs.size()]); - } - - // List of supported HW H.264 high profile encoders. - private static final MediaCodecProperties exynosH264HighProfileHwProperties = - new MediaCodecProperties( - "OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT); - private static final MediaCodecProperties[] h264HighProfileHwList = - new MediaCodecProperties[] {exynosH264HighProfileHwProperties}; - - // List of devices with poor H.264 encoder quality. - // HW H.264 encoder on below devices has poor bitrate control - actual - // bitrates deviates a lot from the target value. - private static final String[] H264_HW_EXCEPTION_MODELS = - new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"}; - - // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined - // in OMX_Video.h - private static final int VIDEO_ControlRateConstant = 2; - // NV12 color format supported by QCOM codec, but not declared in MediaCodec - - // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h - private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; - // Allowable color formats supported by codec - in order of preference. - private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar, - CodecCapabilities.COLOR_FormatYUV420SemiPlanar, - CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, - COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; - private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface}; - private VideoCodecType type; - private int colorFormat; - - // Variables used for dynamic bitrate adjustment. - private BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT; - private double bitrateAccumulator; - private double bitrateAccumulatorMax; - private double bitrateObservationTimeMs; - private int bitrateAdjustmentScaleExp; - private int targetBitrateBps; - private int targetFps; - - // Interval in ms to force key frame generation. Used to reduce the time of color distortions - // happened sometime when using Qualcomm video encoder. - private long forcedKeyFrameMs; - private long lastKeyFrameMs; - - // SPS and PPS NALs (Config frame) for H.264. - @Nullable private ByteBuffer configData; - - // MediaCodec error handler - invoked when critical error happens which may prevent - // further use of media codec API. Now it means that one of media codec instances - // is hanging and can no longer be used in the next call. - public static interface MediaCodecVideoEncoderErrorCallback { - void onMediaCodecVideoEncoderCriticalError(int codecErrors); - } - - public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) { - Logging.d(TAG, "Set error callback"); - MediaCodecVideoEncoder.errorCallback = errorCallback; - } - - // Functions to disable HW encoding - can be called from applications for platforms - // which have known HW decoding problems. - public static void disableVp8HwCodec() { - Logging.w(TAG, "VP8 encoding is disabled by application."); - hwEncoderDisabledTypes.add(VP8_MIME_TYPE); - } - - public static void disableVp9HwCodec() { - Logging.w(TAG, "VP9 encoding is disabled by application."); - hwEncoderDisabledTypes.add(VP9_MIME_TYPE); - } - - public static void disableH264HwCodec() { - Logging.w(TAG, "H.264 encoding is disabled by application."); - hwEncoderDisabledTypes.add(H264_MIME_TYPE); - } - - // Functions to query if HW encoding is supported. - public static boolean isVp8HwSupported() { - return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) - && (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList) != null); - } - - public static @Nullable EncoderProperties vp8HwEncoderProperties() { - if (hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)) { - return null; - } else { - return findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList); - } - } - - public static boolean isVp9HwSupported() { - return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) - && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null); - } - - public static boolean isH264HwSupported() { - return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) - && (findHwEncoder(H264_MIME_TYPE, h264HwList(), supportedColorList) != null); - } - - public static boolean isH264HighProfileHwSupported() { - return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) - && (findHwEncoder(H264_MIME_TYPE, h264HighProfileHwList, supportedColorList) != null); - } - - public static boolean isVp8HwSupportedUsingTextures() { - return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) - && (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedSurfaceColorList) != null); - } - - public static boolean isVp9HwSupportedUsingTextures() { - return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) - && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null); - } - - public static boolean isH264HwSupportedUsingTextures() { - return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) - && (findHwEncoder(H264_MIME_TYPE, h264HwList(), supportedSurfaceColorList) != null); - } - - // Helper struct for findHwEncoder() below. - public static class EncoderProperties { - public EncoderProperties( - String codecName, int colorFormat, BitrateAdjustmentType bitrateAdjustmentType) { - this.codecName = codecName; - this.colorFormat = colorFormat; - this.bitrateAdjustmentType = bitrateAdjustmentType; - } - public final String codecName; // OpenMax component name for HW codec. - public final int colorFormat; // Color format supported by codec. - public final BitrateAdjustmentType bitrateAdjustmentType; // Bitrate adjustment type - } - - private static @Nullable EncoderProperties findHwEncoder( - String mime, MediaCodecProperties[] supportedHwCodecProperties, int[] colorList) { - // MediaCodec.setParameters is missing for JB and below, so bitrate - // can not be adjusted dynamically. - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { - return null; - } - - // Check if device is in H.264 exception list. - if (mime.equals(H264_MIME_TYPE)) { - List exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS); - if (exceptionModels.contains(Build.MODEL)) { - Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder."); - return null; - } - } - - for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { - MediaCodecInfo info = null; - try { - info = MediaCodecList.getCodecInfoAt(i); - } catch (IllegalArgumentException e) { - Logging.e(TAG, "Cannot retrieve encoder codec info", e); - } - if (info == null || !info.isEncoder()) { - continue; - } - String name = null; - for (String mimeType : info.getSupportedTypes()) { - if (mimeType.equals(mime)) { - name = info.getName(); - break; - } - } - if (name == null) { - continue; // No HW support in this codec; try the next one. - } - Logging.v(TAG, "Found candidate encoder " + name); - - // Check if this is supported HW encoder. - boolean supportedCodec = false; - BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT; - for (MediaCodecProperties codecProperties : supportedHwCodecProperties) { - if (name.startsWith(codecProperties.codecPrefix)) { - if (Build.VERSION.SDK_INT < codecProperties.minSdk) { - Logging.w( - TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT); - continue; - } - if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) { - bitrateAdjustmentType = codecProperties.bitrateAdjustmentType; - Logging.w( - TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType); - } - supportedCodec = true; - break; - } - } - if (!supportedCodec) { - continue; - } - - // Check if HW codec supports known color format. - CodecCapabilities capabilities; - try { - capabilities = info.getCapabilitiesForType(mime); - } catch (IllegalArgumentException e) { - Logging.e(TAG, "Cannot retrieve encoder capabilities", e); - continue; - } - for (int colorFormat : capabilities.colorFormats) { - Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); - } - - for (int supportedColorFormat : colorList) { - for (int codecColorFormat : capabilities.colorFormats) { - if (codecColorFormat == supportedColorFormat) { - // Found supported HW encoder. - Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x" - + Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: " - + bitrateAdjustmentType); - return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType); - } - } - } - } - return null; // No HW encoder. - } - - @CalledByNative - MediaCodecVideoEncoder() {} - - private void checkOnMediaCodecThread() { - if (mediaCodecThread.getId() != Thread.currentThread().getId()) { - throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread - + " but is now called on " + Thread.currentThread()); - } - } - - public static void printStackTrace() { - if (runningInstance != null && runningInstance.mediaCodecThread != null) { - StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace(); - if (mediaCodecStackTraces.length > 0) { - Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:"); - for (StackTraceElement stackTrace : mediaCodecStackTraces) { - Logging.d(TAG, stackTrace.toString()); - } - } - } - } - - static @Nullable MediaCodec createByCodecName(String codecName) { - try { - // In the L-SDK this call can throw IOException so in order to work in - // both cases catch an exception. - return MediaCodec.createByCodecName(codecName); - } catch (Exception e) { - return null; - } - } - - @CalledByNativeUnchecked - boolean initEncode(VideoCodecType type, int profile, int width, int height, int kbps, int fps, - boolean useSurface) { - Logging.d(TAG, - "Java initEncode: " + type + ". Profile: " + profile + " : " + width + " x " + height - + ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface); - - this.profile = profile; - this.width = width; - this.height = height; - if (mediaCodecThread != null) { - throw new RuntimeException("Forgot to release()?"); - } - EncoderProperties properties = null; - String mime = null; - int keyFrameIntervalSec = 0; - boolean configureH264HighProfile = false; - if (type == VideoCodecType.VIDEO_CODEC_VP8) { - mime = VP8_MIME_TYPE; - properties = findHwEncoder( - VP8_MIME_TYPE, vp8HwList(), useSurface ? supportedSurfaceColorList : supportedColorList); - keyFrameIntervalSec = 100; - } else if (type == VideoCodecType.VIDEO_CODEC_VP9) { - mime = VP9_MIME_TYPE; - properties = findHwEncoder( - VP9_MIME_TYPE, vp9HwList, useSurface ? supportedSurfaceColorList : supportedColorList); - keyFrameIntervalSec = 100; - } else if (type == VideoCodecType.VIDEO_CODEC_H264) { - mime = H264_MIME_TYPE; - properties = findHwEncoder(H264_MIME_TYPE, h264HwList(), - useSurface ? supportedSurfaceColorList : supportedColorList); - if (profile == H264Profile.CONSTRAINED_HIGH.getValue()) { - EncoderProperties h264HighProfileProperties = findHwEncoder(H264_MIME_TYPE, - h264HighProfileHwList, useSurface ? supportedSurfaceColorList : supportedColorList); - if (h264HighProfileProperties != null) { - Logging.d(TAG, "High profile H.264 encoder supported."); - configureH264HighProfile = true; - } else { - Logging.d(TAG, "High profile H.264 encoder requested, but not supported. Use baseline."); - } - } - keyFrameIntervalSec = 20; - } else { - throw new RuntimeException("initEncode: Non-supported codec " + type); - } - if (properties == null) { - throw new RuntimeException("Can not find HW encoder for " + type); - } - runningInstance = this; // Encoder is now running and can be queried for stack traces. - colorFormat = properties.colorFormat; - bitrateAdjustmentType = properties.bitrateAdjustmentType; - if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT) { - fps = BITRATE_ADJUSTMENT_FPS; - } else { - fps = Math.min(fps, MAXIMUM_INITIAL_FPS); - } - - forcedKeyFrameMs = 0; - lastKeyFrameMs = -1; - if (type == VideoCodecType.VIDEO_CODEC_VP8 - && properties.codecName.startsWith(qcomVp8HwProperties.codecPrefix)) { - if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP - || Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) { - forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS; - } else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) { - forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS; - } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) { - forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS; - } - } - - Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType - + ". Key frame interval: " + forcedKeyFrameMs + " . Initial fps: " + fps); - targetBitrateBps = 1000 * kbps; - targetFps = fps; - bitrateAccumulatorMax = targetBitrateBps / 8.0; - bitrateAccumulator = 0; - bitrateObservationTimeMs = 0; - bitrateAdjustmentScaleExp = 0; - - mediaCodecThread = Thread.currentThread(); - try { - MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); - format.setInteger(MediaFormat.KEY_BIT_RATE, targetBitrateBps); - format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); - format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); - format.setInteger(MediaFormat.KEY_FRAME_RATE, targetFps); - format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); - if (configureH264HighProfile) { - format.setInteger("profile", VIDEO_AVCProfileHigh); - format.setInteger("level", VIDEO_AVCLevel3); - } - Logging.d(TAG, " Format: " + format); - mediaCodec = createByCodecName(properties.codecName); - this.type = type; - if (mediaCodec == null) { - Logging.e(TAG, "Can not create media encoder"); - release(); - return false; - } - mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); - - if (useSurface) { - eglBase = - EglBase.createEgl14((EglBase14.Context) getEglContext(), EglBase.CONFIG_RECORDABLE); - // Create an input surface and keep a reference since we must release the surface when done. - inputSurface = mediaCodec.createInputSurface(); - eglBase.createSurface(inputSurface); - drawer = new GlRectDrawer(); - } - mediaCodec.start(); - outputBuffers = mediaCodec.getOutputBuffers(); - Logging.d(TAG, "Output buffers: " + outputBuffers.length); - - } catch (IllegalStateException e) { - Logging.e(TAG, "initEncode failed", e); - release(); - return false; - } - return true; - } - - @CalledByNativeUnchecked - ByteBuffer[] getInputBuffers() { - ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); - Logging.d(TAG, "Input buffers: " + inputBuffers.length); - return inputBuffers; - } - - void checkKeyFrameRequired(boolean requestedKeyFrame, long presentationTimestampUs) { - long presentationTimestampMs = (presentationTimestampUs + 500) / 1000; - if (lastKeyFrameMs < 0) { - lastKeyFrameMs = presentationTimestampMs; - } - boolean forcedKeyFrame = false; - if (!requestedKeyFrame && forcedKeyFrameMs > 0 - && presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs) { - forcedKeyFrame = true; - } - if (requestedKeyFrame || forcedKeyFrame) { - // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could - // indicate this in queueInputBuffer() below and guarantee _this_ frame - // be encoded as a key frame, but sadly that flag is ignored. Instead, - // we request a key frame "soon". - if (requestedKeyFrame) { - Logging.d(TAG, "Sync frame request"); - } else { - Logging.d(TAG, "Sync frame forced"); - } - Bundle b = new Bundle(); - b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); - mediaCodec.setParameters(b); - lastKeyFrameMs = presentationTimestampMs; - } - } - - @CalledByNativeUnchecked - boolean encodeBuffer( - boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) { - checkOnMediaCodecThread(); - try { - checkKeyFrameRequired(isKeyframe, presentationTimestampUs); - mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0); - return true; - } catch (IllegalStateException e) { - Logging.e(TAG, "encodeBuffer failed", e); - return false; - } - } - - /** - * Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode. - */ - @CalledByNativeUnchecked - boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex, - long presentationTimestampUs) { - checkOnMediaCodecThread(); - try { - checkKeyFrameRequired(isKeyframe, presentationTimestampUs); - - VideoFrame.Buffer buffer = frame.getBuffer(); - if (buffer instanceof VideoFrame.TextureBuffer) { - VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer; - eglBase.makeCurrent(); - // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, - // but it's a workaround for bug webrtc:5147. - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - VideoFrameDrawer.drawTexture(drawer, textureBuffer, new Matrix() /* renderMatrix */, width, - height, 0 /* viewportX */, 0 /* viewportY */, width, height); - eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); - } else { - VideoFrame.I420Buffer i420Buffer = buffer.toI420(); - final int chromaHeight = (height + 1) / 2; - final ByteBuffer dataY = i420Buffer.getDataY(); - final ByteBuffer dataU = i420Buffer.getDataU(); - final ByteBuffer dataV = i420Buffer.getDataV(); - final int strideY = i420Buffer.getStrideY(); - final int strideU = i420Buffer.getStrideU(); - final int strideV = i420Buffer.getStrideV(); - if (dataY.capacity() < strideY * height) { - throw new RuntimeException("Y-plane buffer size too small."); - } - if (dataU.capacity() < strideU * chromaHeight) { - throw new RuntimeException("U-plane buffer size too small."); - } - if (dataV.capacity() < strideV * chromaHeight) { - throw new RuntimeException("V-plane buffer size too small."); - } - nativeFillInputBuffer( - nativeEncoder, bufferIndex, dataY, strideY, dataU, strideU, dataV, strideV); - i420Buffer.release(); - // I420 consists of one full-resolution and two half-resolution planes. - // 1 + 1 / 4 + 1 / 4 = 3 / 2 - int yuvSize = width * height * 3 / 2; - mediaCodec.queueInputBuffer(bufferIndex, 0, yuvSize, presentationTimestampUs, 0); - } - return true; - } catch (RuntimeException e) { - Logging.e(TAG, "encodeFrame failed", e); - return false; - } - } - - @CalledByNativeUnchecked - void release() { - Logging.d(TAG, "Java releaseEncoder"); - checkOnMediaCodecThread(); - - class CaughtException { - Exception e; - } - final CaughtException caughtException = new CaughtException(); - boolean stopHung = false; - - if (mediaCodec != null) { - // Run Mediacodec stop() and release() on separate thread since sometime - // Mediacodec.stop() may hang. - final CountDownLatch releaseDone = new CountDownLatch(1); - - Runnable runMediaCodecRelease = new Runnable() { - @Override - public void run() { - Logging.d(TAG, "Java releaseEncoder on release thread"); - try { - mediaCodec.stop(); - } catch (Exception e) { - Logging.e(TAG, "Media encoder stop failed", e); - } - try { - mediaCodec.release(); - } catch (Exception e) { - Logging.e(TAG, "Media encoder release failed", e); - caughtException.e = e; - } - Logging.d(TAG, "Java releaseEncoder on release thread done"); - - releaseDone.countDown(); - } - }; - new Thread(runMediaCodecRelease).start(); - - if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { - Logging.e(TAG, "Media encoder release timeout"); - stopHung = true; - } - - mediaCodec = null; - } - - mediaCodecThread = null; - if (drawer != null) { - drawer.release(); - drawer = null; - } - if (eglBase != null) { - eglBase.release(); - eglBase = null; - } - if (inputSurface != null) { - inputSurface.release(); - inputSurface = null; - } - runningInstance = null; - - if (stopHung) { - codecErrors++; - if (errorCallback != null) { - Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors); - errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors); - } - throw new RuntimeException("Media encoder release timeout."); - } - - // Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add - // stack trace for the waiting thread as well. - if (caughtException.e != null) { - final RuntimeException runtimeException = new RuntimeException(caughtException.e); - runtimeException.setStackTrace(ThreadUtils.concatStackTraces( - caughtException.e.getStackTrace(), runtimeException.getStackTrace())); - throw runtimeException; - } - - Logging.d(TAG, "Java releaseEncoder done"); - } - - @CalledByNativeUnchecked - private boolean setRates(int kbps, int frameRate) { - checkOnMediaCodecThread(); - - int codecBitrateBps = 1000 * kbps; - if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) { - bitrateAccumulatorMax = codecBitrateBps / 8.0; - if (targetBitrateBps > 0 && codecBitrateBps < targetBitrateBps) { - // Rescale the accumulator level if the accumulator max decreases - bitrateAccumulator = bitrateAccumulator * codecBitrateBps / targetBitrateBps; - } - } - targetBitrateBps = codecBitrateBps; - targetFps = frameRate; - - // Adjust actual encoder bitrate based on bitrate adjustment type. - if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) { - codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps; - Logging.v(TAG, - "setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps); - } else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) { - Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: " - + bitrateAdjustmentScaleExp); - if (bitrateAdjustmentScaleExp != 0) { - codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp)); - } - } else { - Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps); - } - - try { - Bundle params = new Bundle(); - params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, codecBitrateBps); - mediaCodec.setParameters(params); - return true; - } catch (IllegalStateException e) { - Logging.e(TAG, "setRates failed", e); - return false; - } - } - - // Dequeue an input buffer and return its index, -1 if no input buffer is - // available, or -2 if the codec is no longer operative. - @CalledByNativeUnchecked - int dequeueInputBuffer() { - checkOnMediaCodecThread(); - try { - return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT); - } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueIntputBuffer failed", e); - return -2; - } - } - - // Helper struct for dequeueOutputBuffer() below. - static class OutputBufferInfo { - public OutputBufferInfo( - int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) { - this.index = index; - this.buffer = buffer; - this.isKeyFrame = isKeyFrame; - this.presentationTimestampUs = presentationTimestampUs; - } - - public final int index; - public final ByteBuffer buffer; - public final boolean isKeyFrame; - public final long presentationTimestampUs; - - @CalledByNative("OutputBufferInfo") - int getIndex() { - return index; - } - - @CalledByNative("OutputBufferInfo") - ByteBuffer getBuffer() { - return buffer; - } - - @CalledByNative("OutputBufferInfo") - boolean isKeyFrame() { - return isKeyFrame; - } - - @CalledByNative("OutputBufferInfo") - long getPresentationTimestampUs() { - return presentationTimestampUs; - } - } - - // Dequeue and return an output buffer, or null if no output is ready. Return - // a fake OutputBufferInfo with index -1 if the codec is no longer operable. - @Nullable - @CalledByNativeUnchecked - OutputBufferInfo dequeueOutputBuffer() { - checkOnMediaCodecThread(); - try { - MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); - int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT); - // Check if this is config frame and save configuration data. - if (result >= 0) { - boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0; - if (isConfigFrame) { - Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size); - configData = ByteBuffer.allocateDirect(info.size); - outputBuffers[result].position(info.offset); - outputBuffers[result].limit(info.offset + info.size); - configData.put(outputBuffers[result]); - // Log few SPS header bytes to check profile and level. - String spsData = ""; - for (int i = 0; i < (info.size < 8 ? info.size : 8); i++) { - spsData += Integer.toHexString(configData.get(i) & 0xff) + " "; - } - Logging.d(TAG, spsData); - // Release buffer back. - mediaCodec.releaseOutputBuffer(result, false); - // Query next output. - result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT); - } - } - if (result >= 0) { - // MediaCodec doesn't care about Buffer position/remaining/etc so we can - // mess with them to get a slice and avoid having to pass extra - // (BufferInfo-related) parameters back to C++. - ByteBuffer outputBuffer = outputBuffers[result].duplicate(); - outputBuffer.position(info.offset); - outputBuffer.limit(info.offset + info.size); - reportEncodedFrame(info.size); - - // Check key frame flag. - boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0; - if (isKeyFrame) { - Logging.d(TAG, "Sync frame generated"); - } - if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) { - Logging.d(TAG, "Appending config frame of size " + configData.capacity() - + " to output buffer with offset " + info.offset + ", size " + info.size); - // For H.264 key frame append SPS and PPS NALs at the start - ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size); - configData.rewind(); - keyFrameBuffer.put(configData); - keyFrameBuffer.put(outputBuffer); - keyFrameBuffer.position(0); - return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs); - } else { - return new OutputBufferInfo( - result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs); - } - } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { - outputBuffers = mediaCodec.getOutputBuffers(); - return dequeueOutputBuffer(); - } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { - return dequeueOutputBuffer(); - } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) { - return null; - } - throw new RuntimeException("dequeueOutputBuffer: " + result); - } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueOutputBuffer failed", e); - return new OutputBufferInfo(-1, null, false, -1); - } - } - - private double getBitrateScale(int bitrateAdjustmentScaleExp) { - return Math.pow(BITRATE_CORRECTION_MAX_SCALE, - (double) bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS); - } - - private void reportEncodedFrame(int size) { - if (targetFps == 0 || bitrateAdjustmentType != BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) { - return; - } - - // Accumulate the difference between actial and expected frame sizes. - double expectedBytesPerFrame = targetBitrateBps / (8.0 * targetFps); - bitrateAccumulator += (size - expectedBytesPerFrame); - bitrateObservationTimeMs += 1000.0 / targetFps; - - // Put a cap on the accumulator, i.e., don't let it grow beyond some level to avoid - // using too old data for bitrate adjustment. - double bitrateAccumulatorCap = BITRATE_CORRECTION_SEC * bitrateAccumulatorMax; - bitrateAccumulator = Math.min(bitrateAccumulator, bitrateAccumulatorCap); - bitrateAccumulator = Math.max(bitrateAccumulator, -bitrateAccumulatorCap); - - // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much - // form the target value. - if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) { - Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax - + ". ExpScale: " + bitrateAdjustmentScaleExp); - boolean bitrateAdjustmentScaleChanged = false; - if (bitrateAccumulator > bitrateAccumulatorMax) { - // Encoder generates too high bitrate - need to reduce the scale. - int bitrateAdjustmentInc = (int) (bitrateAccumulator / bitrateAccumulatorMax + 0.5); - bitrateAdjustmentScaleExp -= bitrateAdjustmentInc; - bitrateAccumulator = bitrateAccumulatorMax; - bitrateAdjustmentScaleChanged = true; - } else if (bitrateAccumulator < -bitrateAccumulatorMax) { - // Encoder generates too low bitrate - need to increase the scale. - int bitrateAdjustmentInc = (int) (-bitrateAccumulator / bitrateAccumulatorMax + 0.5); - bitrateAdjustmentScaleExp += bitrateAdjustmentInc; - bitrateAccumulator = -bitrateAccumulatorMax; - bitrateAdjustmentScaleChanged = true; - } - if (bitrateAdjustmentScaleChanged) { - bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS); - bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS); - Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: " - + getBitrateScale(bitrateAdjustmentScaleExp)); - setRates(targetBitrateBps / 1000, targetFps); - } - bitrateObservationTimeMs = 0; - } - } - - // Release a dequeued output buffer back to the codec for re-use. Return - // false if the codec is no longer operable. - @CalledByNativeUnchecked - boolean releaseOutputBuffer(int index) { - checkOnMediaCodecThread(); - try { - mediaCodec.releaseOutputBuffer(index, false); - return true; - } catch (IllegalStateException e) { - Logging.e(TAG, "releaseOutputBuffer failed", e); - return false; - } - } - - @CalledByNative - int getColorFormat() { - return colorFormat; - } - - @CalledByNative - static boolean isTextureBuffer(VideoFrame.Buffer buffer) { - return buffer instanceof VideoFrame.TextureBuffer; - } - - /** Fills an inputBuffer with the given index with data from the byte buffers. */ - private static native void nativeFillInputBuffer(long encoder, int inputBuffer, ByteBuffer dataY, - int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV); - private static native long nativeCreateEncoder(VideoCodecInfo info, boolean hasEgl14Context); -} diff --git a/sdk/android/api/org/webrtc/MediaSource.java b/sdk/android/api/org/webrtc/MediaSource.java index 0b19e1a775..9245e3e2eb 100644 --- a/sdk/android/api/org/webrtc/MediaSource.java +++ b/sdk/android/api/org/webrtc/MediaSource.java @@ -25,9 +25,11 @@ static State fromNativeIndex(int nativeIndex) { } } + private final RefCountDelegate refCountDelegate; private long nativeSource; public MediaSource(long nativeSource) { + refCountDelegate = new RefCountDelegate(() -> JniCommon.nativeReleaseRef(nativeSource)); this.nativeSource = nativeSource; } @@ -38,7 +40,7 @@ public State state() { public void dispose() { checkMediaSourceExists(); - JniCommon.nativeReleaseRef(nativeSource); + refCountDelegate.release(); nativeSource = 0; } @@ -48,6 +50,20 @@ protected long getNativeMediaSource() { return nativeSource; } + /** + * Runs code in {@code runnable} holding a reference to the media source. If the object has + * already been released, does nothing. + */ + void runWithReference(Runnable runnable) { + if (refCountDelegate.safeRetain()) { + try { + runnable.run(); + } finally { + refCountDelegate.release(); + } + } + } + private void checkMediaSourceExists() { if (nativeSource == 0) { throw new IllegalStateException("MediaSource has been disposed."); diff --git a/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java b/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java deleted file mode 100644 index c16a37a6d7..0000000000 --- a/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc; - -/** - * Factory for creating webrtc::MediaTransportFactory instances. - */ -public interface MediaTransportFactoryFactory { - /** - * Dynamically allocates a webrtc::MediaTransportFactory instance and returns a pointer to it. - * The caller takes ownership of the object. - */ - public long createNativeMediaTransportFactory(); -} diff --git a/sdk/android/api/org/webrtc/NetworkChangeDetector.java b/sdk/android/api/org/webrtc/NetworkChangeDetector.java new file mode 100644 index 0000000000..d9e7b736f1 --- /dev/null +++ b/sdk/android/api/org/webrtc/NetworkChangeDetector.java @@ -0,0 +1,116 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.support.annotation.Nullable; +import java.util.List; + +/** Interface for detecting network changes */ +public interface NetworkChangeDetector { + // java equivalent of c++ android_network_monitor.h / NetworkType. + public static enum ConnectionType { + CONNECTION_UNKNOWN, + CONNECTION_ETHERNET, + CONNECTION_WIFI, + CONNECTION_5G, + CONNECTION_4G, + CONNECTION_3G, + CONNECTION_2G, + CONNECTION_UNKNOWN_CELLULAR, + CONNECTION_BLUETOOTH, + CONNECTION_VPN, + CONNECTION_NONE + } + + public static class IPAddress { + public final byte[] address; + + public IPAddress(byte[] address) { + this.address = address; + } + + @CalledByNative("IPAddress") + private byte[] getAddress() { + return address; + } + } + + /** Java version of NetworkMonitor.NetworkInformation */ + public static class NetworkInformation { + public final String name; + public final ConnectionType type; + // Used to specify the underlying network type if the type is CONNECTION_VPN. + public final ConnectionType underlyingTypeForVpn; + public final long handle; + public final IPAddress[] ipAddresses; + + public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn, + long handle, IPAddress[] addresses) { + this.name = name; + this.type = type; + this.underlyingTypeForVpn = underlyingTypeForVpn; + this.handle = handle; + this.ipAddresses = addresses; + } + + @CalledByNative("NetworkInformation") + private IPAddress[] getIpAddresses() { + return ipAddresses; + } + + @CalledByNative("NetworkInformation") + private ConnectionType getConnectionType() { + return type; + } + + @CalledByNative("NetworkInformation") + private ConnectionType getUnderlyingConnectionTypeForVpn() { + return underlyingTypeForVpn; + } + + @CalledByNative("NetworkInformation") + private long getHandle() { + return handle; + } + + @CalledByNative("NetworkInformation") + private String getName() { + return name; + } + }; + + /** Observer interface by which observer is notified of network changes. */ + public static interface Observer { + /** Called when default network changes. */ + public void onConnectionTypeChanged(ConnectionType newConnectionType); + + public void onNetworkConnect(NetworkInformation networkInfo); + + public void onNetworkDisconnect(long networkHandle); + + /** + * Called when network preference change for a (list of) connection type(s). (e.g WIFI) is + * |NOT_PREFERRED| or |NEUTRAL|. + * + *

note: |types| is a list of ConnectionTypes, so that all cellular types can be modified in + * one call. + */ + public void onNetworkPreference(List types, /*@NetworkPreference*/ int preference); + } + + public ConnectionType getCurrentConnectionType(); + + public boolean supportNetworkCallback(); + + @Nullable public List getActiveNetworkList(); + + public void destroy(); +} diff --git a/sdk/android/api/org/webrtc/NetworkChangeDetectorFactory.java b/sdk/android/api/org/webrtc/NetworkChangeDetectorFactory.java new file mode 100644 index 0000000000..14e98b2387 --- /dev/null +++ b/sdk/android/api/org/webrtc/NetworkChangeDetectorFactory.java @@ -0,0 +1,17 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import android.content.Context; + +public interface NetworkChangeDetectorFactory { + public NetworkChangeDetector create(NetworkChangeDetector.Observer observer, Context context); +} diff --git a/sdk/android/api/org/webrtc/NetworkMonitor.java b/sdk/android/api/org/webrtc/NetworkMonitor.java index 364bb4d0b5..566302b0b4 100644 --- a/sdk/android/api/org/webrtc/NetworkMonitor.java +++ b/sdk/android/api/org/webrtc/NetworkMonitor.java @@ -10,14 +10,12 @@ package org.webrtc; -import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID; - import android.content.Context; import android.os.Build; import android.support.annotation.Nullable; import java.util.ArrayList; import java.util.List; -import org.webrtc.NetworkMonitorAutoDetect; +import org.webrtc.NetworkChangeDetector; /** * Borrowed from Chromium's @@ -32,7 +30,7 @@ public class NetworkMonitor { * Alerted when the connection type of the network changes. The alert is fired on the UI thread. */ public interface NetworkObserver { - public void onConnectionTypeChanged(NetworkMonitorAutoDetect.ConnectionType connectionType); + public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType connectionType); } private static final String TAG = "NetworkMonitor"; @@ -43,24 +41,43 @@ private static class InstanceHolder { static final NetworkMonitor instance = new NetworkMonitor(); } + // Factory for creating NetworkChangeDetector. + private NetworkChangeDetectorFactory networkChangeDetectorFactory = + new NetworkChangeDetectorFactory() { + @Override + public NetworkChangeDetector create( + NetworkChangeDetector.Observer observer, Context context) { + return new NetworkMonitorAutoDetect(observer, context); + } + }; + // Native observers of the connection type changes. private final ArrayList nativeNetworkObservers; // Java observers of the connection type changes. private final ArrayList networkObservers; - private final Object autoDetectLock = new Object(); + private final Object networkChangeDetectorLock = new Object(); // Object that detects the connection type changes and brings up mobile networks. - @Nullable private NetworkMonitorAutoDetect autoDetect; + @Nullable private NetworkChangeDetector networkChangeDetector; // Also guarded by autoDetectLock. private int numObservers; - private volatile NetworkMonitorAutoDetect.ConnectionType currentConnectionType; + private volatile NetworkChangeDetector.ConnectionType currentConnectionType; private NetworkMonitor() { nativeNetworkObservers = new ArrayList(); networkObservers = new ArrayList(); numObservers = 0; - currentConnectionType = NetworkMonitorAutoDetect.ConnectionType.CONNECTION_UNKNOWN; + currentConnectionType = NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN; + } + + /** + * Set the factory that will be used to create the network change detector. + * Needs to be called before the monitoring is starts. + */ + public void setNetworkChangeDetectorFactory(NetworkChangeDetectorFactory factory) { + assertIsTrue(numObservers == 0); + this.networkChangeDetectorFactory = factory; } // TODO(sakal): Remove once downstream dependencies have been updated. @@ -85,13 +102,12 @@ private static void assertIsTrue(boolean condition) { * CHANGE_NETWORK_STATE permission. */ public void startMonitoring(Context applicationContext) { - synchronized (autoDetectLock) { + synchronized (networkChangeDetectorLock) { ++numObservers; - if (autoDetect == null) { - autoDetect = createAutoDetect(applicationContext); + if (networkChangeDetector == null) { + networkChangeDetector = createNetworkChangeDetector(applicationContext); } - currentConnectionType = - NetworkMonitorAutoDetect.getConnectionType(autoDetect.getCurrentNetworkState()); + currentConnectionType = networkChangeDetector.getCurrentConnectionType(); } } @@ -122,12 +138,15 @@ private void startMonitoring(@Nullable Context applicationContext, long nativeOb notifyObserversOfConnectionTypeChange(currentConnectionType); } - /** Stop network monitoring. If no one is monitoring networks, destroy and reset autoDetect. */ + /** + * Stop network monitoring. If no one is monitoring networks, destroy and reset + * networkChangeDetector. + */ public void stopMonitoring() { - synchronized (autoDetectLock) { + synchronized (networkChangeDetectorLock) { if (--numObservers == 0) { - autoDetect.destroy(); - autoDetect = null; + networkChangeDetector.destroy(); + networkChangeDetector = null; } } } @@ -144,8 +163,8 @@ private void stopMonitoring(long nativeObserver) { // Returns true if network binding is supported on this platform. @CalledByNative private boolean networkBindingSupported() { - synchronized (autoDetectLock) { - return autoDetect != null && autoDetect.supportNetworkCallback(); + synchronized (networkChangeDetectorLock) { + return networkChangeDetector != null && networkChangeDetector.supportNetworkCallback(); } } @@ -154,27 +173,19 @@ private static int androidSdkInt() { return Build.VERSION.SDK_INT; } - private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() { + private NetworkChangeDetector.ConnectionType getCurrentConnectionType() { return currentConnectionType; } - private long getCurrentDefaultNetId() { - synchronized (autoDetectLock) { - return autoDetect == null ? INVALID_NET_ID : autoDetect.getDefaultNetId(); - } - } - - private NetworkMonitorAutoDetect createAutoDetect(Context appContext) { - return new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() { - + private NetworkChangeDetector createNetworkChangeDetector(Context appContext) { + return networkChangeDetectorFactory.create(new NetworkChangeDetector.Observer() { @Override - public void onConnectionTypeChanged( - NetworkMonitorAutoDetect.ConnectionType newConnectionType) { + public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType newConnectionType) { updateCurrentConnectionType(newConnectionType); } @Override - public void onNetworkConnect(NetworkMonitorAutoDetect.NetworkInformation networkInfo) { + public void onNetworkConnect(NetworkChangeDetector.NetworkInformation networkInfo) { notifyObserversOfNetworkConnect(networkInfo); } @@ -182,18 +193,23 @@ public void onNetworkConnect(NetworkMonitorAutoDetect.NetworkInformation network public void onNetworkDisconnect(long networkHandle) { notifyObserversOfNetworkDisconnect(networkHandle); } + + @Override + public void onNetworkPreference( + List types, int preference) { + notifyObserversOfNetworkPreference(types, preference); + } }, appContext); } - private void updateCurrentConnectionType( - NetworkMonitorAutoDetect.ConnectionType newConnectionType) { + private void updateCurrentConnectionType(NetworkChangeDetector.ConnectionType newConnectionType) { currentConnectionType = newConnectionType; notifyObserversOfConnectionTypeChange(newConnectionType); } /** Alerts all observers of a connection change. */ private void notifyObserversOfConnectionTypeChange( - NetworkMonitorAutoDetect.ConnectionType newConnectionType) { + NetworkChangeDetector.ConnectionType newConnectionType) { List nativeObservers = getNativeNetworkObserversSync(); for (Long nativeObserver : nativeObservers) { nativeNotifyConnectionTypeChanged(nativeObserver); @@ -209,7 +225,7 @@ private void notifyObserversOfConnectionTypeChange( } private void notifyObserversOfNetworkConnect( - NetworkMonitorAutoDetect.NetworkInformation networkInfo) { + NetworkChangeDetector.NetworkInformation networkInfo) { List nativeObservers = getNativeNetworkObserversSync(); for (Long nativeObserver : nativeObservers) { nativeNotifyOfNetworkConnect(nativeObserver, networkInfo); @@ -223,17 +239,28 @@ private void notifyObserversOfNetworkDisconnect(long networkHandle) { } } + private void notifyObserversOfNetworkPreference( + List types, int preference) { + List nativeObservers = getNativeNetworkObserversSync(); + for (NetworkChangeDetector.ConnectionType type : types) { + for (Long nativeObserver : nativeObservers) { + nativeNotifyOfNetworkPreference(nativeObserver, type, preference); + } + } + } + private void updateObserverActiveNetworkList(long nativeObserver) { - List networkInfoList; - synchronized (autoDetectLock) { - networkInfoList = (autoDetect == null) ? null : autoDetect.getActiveNetworkList(); + List networkInfoList; + synchronized (networkChangeDetectorLock) { + networkInfoList = + (networkChangeDetector == null) ? null : networkChangeDetector.getActiveNetworkList(); } if (networkInfoList == null || networkInfoList.size() == 0) { return; } - NetworkMonitorAutoDetect.NetworkInformation[] networkInfos = - new NetworkMonitorAutoDetect.NetworkInformation[networkInfoList.size()]; + NetworkChangeDetector.NetworkInformation[] networkInfos = + new NetworkChangeDetector.NetworkInformation[networkInfoList.size()]; networkInfos = networkInfoList.toArray(networkInfos); nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos); } @@ -278,30 +305,35 @@ public void removeObserver(NetworkObserver observer) { /** Checks if there currently is connectivity. */ public static boolean isOnline() { - NetworkMonitorAutoDetect.ConnectionType connectionType = - getInstance().getCurrentConnectionType(); - return connectionType != NetworkMonitorAutoDetect.ConnectionType.CONNECTION_NONE; + NetworkChangeDetector.ConnectionType connectionType = getInstance().getCurrentConnectionType(); + return connectionType != NetworkChangeDetector.ConnectionType.CONNECTION_NONE; } private native void nativeNotifyConnectionTypeChanged(long nativeAndroidNetworkMonitor); + private native void nativeNotifyOfNetworkConnect( - long nativeAndroidNetworkMonitor, NetworkMonitorAutoDetect.NetworkInformation networkInfo); + long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation networkInfo); + private native void nativeNotifyOfNetworkDisconnect( long nativeAndroidNetworkMonitor, long networkHandle); + private native void nativeNotifyOfActiveNetworkList( - long nativeAndroidNetworkMonitor, NetworkMonitorAutoDetect.NetworkInformation[] networkInfos); + long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation[] networkInfos); + + private native void nativeNotifyOfNetworkPreference( + long nativeAndroidNetworkMonitor, NetworkChangeDetector.ConnectionType type, int preference); // For testing only. @Nullable - NetworkMonitorAutoDetect getNetworkMonitorAutoDetect() { - synchronized (autoDetectLock) { - return autoDetect; + NetworkChangeDetector getNetworkChangeDetector() { + synchronized (networkChangeDetectorLock) { + return networkChangeDetector; } } // For testing only. int getNumObservers() { - synchronized (autoDetectLock) { + synchronized (networkChangeDetectorLock) { return numObservers; } } @@ -309,7 +341,9 @@ int getNumObservers() { // For testing only. static NetworkMonitorAutoDetect createAndSetAutoDetectForTest(Context context) { NetworkMonitor networkMonitor = getInstance(); - NetworkMonitorAutoDetect autoDetect = networkMonitor.createAutoDetect(context); - return networkMonitor.autoDetect = autoDetect; + NetworkChangeDetector networkChangeDetector = + networkMonitor.createNetworkChangeDetector(context); + networkMonitor.networkChangeDetector = networkChangeDetector; + return (NetworkMonitorAutoDetect) networkChangeDetector; } } diff --git a/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java b/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java index 0ac469c1de..3d233b3423 100644 --- a/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java +++ b/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java @@ -41,79 +41,10 @@ * Borrowed from Chromium's * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java * - * Used by the NetworkMonitor to listen to platform changes in connectivity. - * Note that use of this class requires that the app have the platform - * ACCESS_NETWORK_STATE permission. + *

Used by the NetworkMonitor to listen to platform changes in connectivity. Note that use of + * this class requires that the app have the platform ACCESS_NETWORK_STATE permission. */ -public class NetworkMonitorAutoDetect extends BroadcastReceiver { - public static enum ConnectionType { - CONNECTION_UNKNOWN, - CONNECTION_ETHERNET, - CONNECTION_WIFI, - CONNECTION_4G, - CONNECTION_3G, - CONNECTION_2G, - CONNECTION_UNKNOWN_CELLULAR, - CONNECTION_BLUETOOTH, - CONNECTION_VPN, - CONNECTION_NONE - } - - public static class IPAddress { - public final byte[] address; - public IPAddress(byte[] address) { - this.address = address; - } - - @CalledByNative("IPAddress") - private byte[] getAddress() { - return address; - } - } - - /** Java version of NetworkMonitor.NetworkInformation */ - public static class NetworkInformation { - public final String name; - public final ConnectionType type; - // Used to specify the underlying network type if the type is CONNECTION_VPN. - public final ConnectionType underlyingTypeForVpn; - public final long handle; - public final IPAddress[] ipAddresses; - public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn, - long handle, IPAddress[] addresses) { - this.name = name; - this.type = type; - this.underlyingTypeForVpn = underlyingTypeForVpn; - this.handle = handle; - this.ipAddresses = addresses; - } - - @CalledByNative("NetworkInformation") - private IPAddress[] getIpAddresses() { - return ipAddresses; - } - - @CalledByNative("NetworkInformation") - private ConnectionType getConnectionType() { - return type; - } - - @CalledByNative("NetworkInformation") - private ConnectionType getUnderlyingConnectionTypeForVpn() { - return underlyingTypeForVpn; - } - - @CalledByNative("NetworkInformation") - private long getHandle() { - return handle; - } - - @CalledByNative("NetworkInformation") - private String getName() { - return name; - } - }; - +public class NetworkMonitorAutoDetect extends BroadcastReceiver implements NetworkChangeDetector { static class NetworkState { private final boolean connected; // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is @@ -181,7 +112,10 @@ public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCa public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) { // A link property change may indicate the IP address changes. // so forward the new NetworkInformation to the observer. - Logging.d(TAG, "link properties changed: " + linkProperties.toString()); + // + // linkProperties.toString() has PII that cannot be redacted + // very reliably, so do not include in log. + Logging.d(TAG, "link properties changed"); onNetworkChanged(network); } @@ -409,8 +343,8 @@ long getDefaultNetId() { } NetworkState networkState = getNetworkState(network); - ConnectionType connectionType = getConnectionType(networkState); - if (connectionType == ConnectionType.CONNECTION_NONE) { + NetworkChangeDetector.ConnectionType connectionType = getConnectionType(networkState); + if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_NONE) { // This may not be an error. The OS may signal a network event with connection type // NONE when the network disconnects. Logging.d(TAG, "Network " + network.toString() + " is disconnected"); @@ -419,13 +353,14 @@ long getDefaultNetId() { // Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type, // which appears to be usable. Just log them here. - if (connectionType == ConnectionType.CONNECTION_UNKNOWN - || connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) { + if (connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN + || connectionType == NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR) { Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType + " because it has type " + networkState.getNetworkType() + " and subtype " + networkState.getNetworkSubType()); } - // ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the underlying network is + // NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the + // underlying network is // unknown. ConnectionType underlyingConnectionTypeForVpn = getUnderlyingConnectionTypeForVpn(networkState); @@ -528,12 +463,12 @@ static class WifiDirectManagerDelegate extends BroadcastReceiver { // (NETWORK_UNSPECIFIED) for these addresses. private static final int WIFI_P2P_NETWORK_HANDLE = 0; private final Context context; - private final Observer observer; + private final NetworkChangeDetector.Observer observer; // Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is // connected. @Nullable private NetworkInformation wifiP2pNetworkInfo; - WifiDirectManagerDelegate(Observer observer, Context context) { + WifiDirectManagerDelegate(NetworkChangeDetector.Observer observer, Context context) { this.context = context; this.observer = observer; IntentFilter intentFilter = new IntentFilter(); @@ -598,9 +533,10 @@ private void onWifiP2pGroupChange(@Nullable WifiP2pGroup wifiP2pGroup) { ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress()); } - wifiP2pNetworkInfo = - new NetworkInformation(wifiP2pGroup.getInterface(), ConnectionType.CONNECTION_WIFI, - ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE, ipAddresses); + wifiP2pNetworkInfo = new NetworkInformation(wifiP2pGroup.getInterface(), + NetworkChangeDetector.ConnectionType.CONNECTION_WIFI, + NetworkChangeDetector.ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE, + ipAddresses); observer.onNetworkConnect(wifiP2pNetworkInfo); } @@ -613,11 +549,11 @@ private void onWifiP2pStateChange(int state) { } } - static final long INVALID_NET_ID = -1; + private static final long INVALID_NET_ID = -1; private static final String TAG = "NetworkMonitorAutoDetect"; // Observer for the connection type change. - private final Observer observer; + private final NetworkChangeDetector.Observer observer; private final IntentFilter intentFilter; private final Context context; // Used to request mobile network. It does not do anything except for keeping @@ -631,26 +567,12 @@ private void onWifiP2pStateChange(int state) { private WifiDirectManagerDelegate wifiDirectManagerDelegate; private boolean isRegistered; - private ConnectionType connectionType; + private NetworkChangeDetector.ConnectionType connectionType; private String wifiSSID; - /** - * Observer interface by which observer is notified of network changes. - */ - public static interface Observer { - /** - * Called when default network changes. - */ - public void onConnectionTypeChanged(ConnectionType newConnectionType); - public void onNetworkConnect(NetworkInformation networkInfo); - public void onNetworkDisconnect(long networkHandle); - } - - /** - * Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread. - */ + /** Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread. */ @SuppressLint("NewApi") - public NetworkMonitorAutoDetect(Observer observer, Context context) { + public NetworkMonitorAutoDetect(NetworkChangeDetector.Observer observer, Context context) { this.observer = observer; this.context = context; connectivityManagerDelegate = new ConnectivityManagerDelegate(context); @@ -685,6 +607,7 @@ public NetworkMonitorAutoDetect(Observer observer, Context context) { } } + @Override public boolean supportNetworkCallback() { return connectivityManagerDelegate.supportNetworkCallback(); } @@ -711,8 +634,9 @@ boolean isReceiverRegisteredForTesting() { return isRegistered; } + @Override @Nullable - List getActiveNetworkList() { + public List getActiveNetworkList() { List connectivityManagerList = connectivityManagerDelegate.getActiveNetworkList(); if (connectivityManagerList == null) { @@ -726,6 +650,7 @@ List getActiveNetworkList() { return result; } + @Override public void destroy() { if (allNetworkCallback != null) { connectivityManagerDelegate.releaseCallback(allNetworkCallback); @@ -775,21 +700,21 @@ public long getDefaultNetId() { return connectivityManagerDelegate.getDefaultNetId(); } - private static ConnectionType getConnectionType( + private static NetworkChangeDetector.ConnectionType getConnectionType( boolean isConnected, int networkType, int networkSubtype) { if (!isConnected) { - return ConnectionType.CONNECTION_NONE; + return NetworkChangeDetector.ConnectionType.CONNECTION_NONE; } switch (networkType) { case ConnectivityManager.TYPE_ETHERNET: - return ConnectionType.CONNECTION_ETHERNET; + return NetworkChangeDetector.ConnectionType.CONNECTION_ETHERNET; case ConnectivityManager.TYPE_WIFI: - return ConnectionType.CONNECTION_WIFI; + return NetworkChangeDetector.ConnectionType.CONNECTION_WIFI; case ConnectivityManager.TYPE_WIMAX: - return ConnectionType.CONNECTION_4G; + return NetworkChangeDetector.ConnectionType.CONNECTION_4G; case ConnectivityManager.TYPE_BLUETOOTH: - return ConnectionType.CONNECTION_BLUETOOTH; + return NetworkChangeDetector.ConnectionType.CONNECTION_BLUETOOTH; case ConnectivityManager.TYPE_MOBILE: // Use information from TelephonyManager to classify the connection. switch (networkSubtype) { @@ -798,7 +723,8 @@ private static ConnectionType getConnectionType( case TelephonyManager.NETWORK_TYPE_CDMA: case TelephonyManager.NETWORK_TYPE_1xRTT: case TelephonyManager.NETWORK_TYPE_IDEN: - return ConnectionType.CONNECTION_2G; + case TelephonyManager.NETWORK_TYPE_GSM: + return NetworkChangeDetector.ConnectionType.CONNECTION_2G; case TelephonyManager.NETWORK_TYPE_UMTS: case TelephonyManager.NETWORK_TYPE_EVDO_0: case TelephonyManager.NETWORK_TYPE_EVDO_A: @@ -808,27 +734,37 @@ private static ConnectionType getConnectionType( case TelephonyManager.NETWORK_TYPE_EVDO_B: case TelephonyManager.NETWORK_TYPE_EHRPD: case TelephonyManager.NETWORK_TYPE_HSPAP: - return ConnectionType.CONNECTION_3G; + case TelephonyManager.NETWORK_TYPE_TD_SCDMA: + return NetworkChangeDetector.ConnectionType.CONNECTION_3G; case TelephonyManager.NETWORK_TYPE_LTE: - return ConnectionType.CONNECTION_4G; + case TelephonyManager.NETWORK_TYPE_IWLAN: + return NetworkChangeDetector.ConnectionType.CONNECTION_4G; + case TelephonyManager.NETWORK_TYPE_NR: + return NetworkChangeDetector.ConnectionType.CONNECTION_5G; default: - return ConnectionType.CONNECTION_UNKNOWN_CELLULAR; + return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN_CELLULAR; } case ConnectivityManager.TYPE_VPN: - return ConnectionType.CONNECTION_VPN; + return NetworkChangeDetector.ConnectionType.CONNECTION_VPN; default: - return ConnectionType.CONNECTION_UNKNOWN; + return NetworkChangeDetector.ConnectionType.CONNECTION_UNKNOWN; } } - public static ConnectionType getConnectionType(NetworkState networkState) { + public static NetworkChangeDetector.ConnectionType getConnectionType(NetworkState networkState) { return getConnectionType(networkState.isConnected(), networkState.getNetworkType(), networkState.getNetworkSubType()); } - private static ConnectionType getUnderlyingConnectionTypeForVpn(NetworkState networkState) { + @Override + public NetworkChangeDetector.ConnectionType getCurrentConnectionType() { + return getConnectionType(getCurrentNetworkState()); + } + + private static NetworkChangeDetector.ConnectionType getUnderlyingConnectionTypeForVpn( + NetworkState networkState) { if (networkState.getNetworkType() != ConnectivityManager.TYPE_VPN) { - return ConnectionType.CONNECTION_NONE; + return NetworkChangeDetector.ConnectionType.CONNECTION_NONE; } return getConnectionType(networkState.isConnected(), networkState.getUnderlyingNetworkTypeForVpn(), @@ -836,7 +772,7 @@ private static ConnectionType getUnderlyingConnectionTypeForVpn(NetworkState net } private String getWifiSSID(NetworkState networkState) { - if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) + if (getConnectionType(networkState) != NetworkChangeDetector.ConnectionType.CONNECTION_WIFI) return ""; return wifiManagerDelegate.getWifiSSID(); } @@ -851,7 +787,7 @@ public void onReceive(Context context, Intent intent) { } private void connectionTypeChanged(NetworkState networkState) { - ConnectionType newConnectionType = getConnectionType(networkState); + NetworkChangeDetector.ConnectionType newConnectionType = getConnectionType(networkState); String newWifiSSID = getWifiSSID(networkState); if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return; diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java index 7891b7f6b4..4df2193139 100644 --- a/sdk/android/api/org/webrtc/PeerConnection.java +++ b/sdk/android/api/org/webrtc/PeerConnection.java @@ -382,7 +382,11 @@ public enum AdapterType { CELLULAR(1 << 2), VPN(1 << 3), LOOPBACK(1 << 4), - ADAPTER_TYPE_ANY(1 << 5); + ADAPTER_TYPE_ANY(1 << 5), + CELLULAR_2G(1 << 6), + CELLULAR_3G(1 << 7), + CELLULAR_4G(1 << 8), + CELLULAR_5G(1 << 9); public final Integer bitMask; private AdapterType(Integer bitMask) { @@ -532,18 +536,6 @@ public static class RTCConfiguration { // Null indicates no change to currently configured value. @Nullable public Boolean allowCodecSwitching; - /* - * Experimental flag that enables a use of media transport. If this is true, the media transport - * factory MUST be provided to the PeerConnectionFactory. - */ - public boolean useMediaTransport; - - /* - * Experimental flag that enables a use of media transport for data channels. If this is true, - * the media transport factory MUST be provided to the PeerConnectionFactory. - */ - public boolean useMediaTransportForDataChannels; - /** * Defines advanced optional cryptographic settings related to SRTP and * frame encryption for native WebRTC. Setting this will overwrite any @@ -598,8 +590,6 @@ public RTCConfiguration(List iceServers) { networkPreference = AdapterType.UNKNOWN; sdpSemantics = SdpSemantics.PLAN_B; activeResetSrtpParams = false; - useMediaTransport = false; - useMediaTransportForDataChannels = false; cryptoOptions = null; turnLoggingId = null; allowCodecSwitching = null; @@ -812,16 +802,6 @@ Boolean getAllowCodecSwitching() { return allowCodecSwitching; } - @CalledByNative("RTCConfiguration") - boolean getUseMediaTransport() { - return useMediaTransport; - } - - @CalledByNative("RTCConfiguration") - boolean getUseMediaTransportForDataChannels() { - return useMediaTransportForDataChannels; - } - @Nullable @CalledByNative("RTCConfiguration") CryptoOptions getCryptoOptions() { @@ -1179,6 +1159,14 @@ public void stopRtcEventLog() { nativeStopRtcEventLog(); } + public int startRecorder(int dir, String path) { + return nativeStartRecorder(dir, path); + } + + public int stopRecorder(int dir) { + return nativeStopRecorder(dir); + } + // TODO(fischman): add support for DTMF-related methods once that API // stabilizes. public SignalingState signalingState() { @@ -1292,4 +1280,6 @@ private native RtpTransceiver nativeAddTransceiverOfType( MediaStreamTrack.MediaType mediaType, RtpTransceiver.RtpTransceiverInit init); private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes); private native void nativeStopRtcEventLog(); + private native int nativeStartRecorder(int dir, String path); + private native int nativeStopRecorder(int dir); } diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java index 683ac88364..c87e639f23 100644 --- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java +++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java @@ -175,7 +175,6 @@ public static class Builder { @Nullable private FecControllerFactoryFactoryInterface fecControllerFactoryFactory; @Nullable private NetworkControllerFactoryFactory networkControllerFactoryFactory; @Nullable private NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory; - @Nullable private MediaTransportFactoryFactory mediaTransportFactoryFactory; @Nullable private NetEqFactoryFactory neteqFactoryFactory; private Builder() {} @@ -247,13 +246,6 @@ public Builder setNetworkStatePredictorFactoryFactory( return this; } - /** Sets a MediaTransportFactoryFactory for a PeerConnectionFactory. */ - public Builder setMediaTransportFactoryFactory( - MediaTransportFactoryFactory mediaTransportFactoryFactory) { - this.mediaTransportFactoryFactory = mediaTransportFactoryFactory; - return this; - } - /** * Sets a NetEqFactoryFactory for the PeerConnectionFactory. When using a * custom NetEqFactoryFactory, the AudioDecoderFactoryFactory will be set @@ -284,9 +276,6 @@ public PeerConnectionFactory createPeerConnectionFactory() { networkStatePredictorFactoryFactory == null ? 0 : networkStatePredictorFactoryFactory.createNativeNetworkStatePredictorFactory(), - mediaTransportFactoryFactory == null - ? 0 - : mediaTransportFactoryFactory.createNativeMediaTransportFactory(), neteqFactoryFactory == null ? 0 : neteqFactoryFactory.createNativeNetEqFactory()); } } @@ -501,8 +490,6 @@ public void dispose() { networkThread = null; workerThread = null; signalingThread = null; - MediaCodecVideoEncoder.disposeEglContext(); - MediaCodecVideoDecoder.disposeEglContext(); nativeFactory = 0; } @@ -609,7 +596,7 @@ private static native PeerConnectionFactory nativeCreatePeerConnectionFactory(Co long audioDecoderFactory, VideoEncoderFactory encoderFactory, VideoDecoderFactory decoderFactory, long nativeAudioProcessor, long nativeFecControllerFactory, long nativeNetworkControllerFactory, - long nativeNetworkStatePredictorFactory, long mediaTransportFactory, long neteqFactory); + long nativeNetworkStatePredictorFactory, long neteqFactory); private static native long nativeCreatePeerConnection(long factory, PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver, diff --git a/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java index 82417fd980..d334dfab4e 100644 --- a/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java +++ b/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java @@ -21,19 +21,9 @@ public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderF */ private static final Predicate defaultAllowedPredicate = new Predicate() { - private String[] prefixWhitelist = - Arrays.copyOf(MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES, - MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES.length); - @Override public boolean test(MediaCodecInfo arg) { - final String name = arg.getName(); - for (String prefix : prefixWhitelist) { - if (name.startsWith(prefix)) { - return true; - } - } - return false; + return MediaCodecUtils.isSoftwareOnly(arg); } }; diff --git a/sdk/android/api/org/webrtc/RtpParameters.java b/sdk/android/api/org/webrtc/RtpParameters.java index 4293ce77d2..6ea011a887 100644 --- a/sdk/android/api/org/webrtc/RtpParameters.java +++ b/sdk/android/api/org/webrtc/RtpParameters.java @@ -50,6 +50,20 @@ public static class Encoding { // Set to true to cause this encoding to be sent, and false for it not to // be sent. public boolean active = true; + // The relative bitrate priority of this encoding. Currently this is + // implemented for the entire RTP sender by using the value of the first + // encoding parameter. + // See: https://w3c.github.io/webrtc-priority/#enumdef-rtcprioritytype + // "very-low" = 0.5 + // "low" = 1.0 + // "medium" = 2.0 + // "high" = 4.0 + public double bitratePriority = 1.0; + // The relative DiffServ Code Point priority for this encoding, allowing + // packets to be marked relatively higher or lower without affecting + // bandwidth allocations. + //@Priority public int networkPriority = Priority.LOW; + public int networkPriority = 1; // If non-null, this represents the Transport Independent Application // Specific maximum bandwidth defined in RFC3890. If null, there is no // maximum bitrate. @@ -75,10 +89,13 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { } @CalledByNative("Encoding") - Encoding(String rid, boolean active, Integer maxBitrateBps, Integer minBitrateBps, - Integer maxFramerate, Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc) { + Encoding(String rid, boolean active, double bitratePriority, /*@Priority*/ int networkPriority, + Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, + Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc) { this.rid = rid; this.active = active; + this.bitratePriority = bitratePriority; + this.networkPriority = networkPriority; this.maxBitrateBps = maxBitrateBps; this.minBitrateBps = minBitrateBps; this.maxFramerate = maxFramerate; @@ -98,6 +115,17 @@ boolean getActive() { return active; } + @CalledByNative("Encoding") + double getBitratePriority() { + return bitratePriority; + } + + @CalledByNative("Encoding") + //@Priority + int getNetworkPriority() { + return networkPriority; + } + @Nullable @CalledByNative("Encoding") Integer getMaxBitrateBps() { diff --git a/sdk/android/api/org/webrtc/RtpTransceiver.java b/sdk/android/api/org/webrtc/RtpTransceiver.java index 64d8eb41d1..aff1bfbde1 100644 --- a/sdk/android/api/org/webrtc/RtpTransceiver.java +++ b/sdk/android/api/org/webrtc/RtpTransceiver.java @@ -200,19 +200,40 @@ public RtpTransceiverDirection getCurrentDirection() { * sendrecv, sendonly, recvonly, or inactive. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ - public void setDirection(RtpTransceiverDirection rtpTransceiverDirection) { + public boolean setDirection(RtpTransceiverDirection rtpTransceiverDirection) { checkRtpTransceiverExists(); - nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection); + return nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection); } /** - * The Stop method irreversibly stops the RtpTransceiver. The sender of this - * transceiver will no longer send, the receiver will no longer receive. - * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop + * The Stop method will for the time being call the StopInternal method. + * After a migration procedure, stop() will be equivalent to StopStandard. */ public void stop() { checkRtpTransceiverExists(); - nativeStop(nativeRtpTransceiver); + nativeStopInternal(nativeRtpTransceiver); + } + + /** + * The StopInternal method stops the RtpTransceiver, like Stop, but goes + * immediately to Stopped state. + */ + public void stopInternal() { + checkRtpTransceiverExists(); + nativeStopInternal(nativeRtpTransceiver); + } + + /** + * The StopStandard method irreversibly stops the RtpTransceiver. The sender + * of this transceiver will no longer send, the receiver will no longer + * receive. + * + *

The transceiver will enter Stopping state and signal NegotiationNeeded. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop + */ + public void stopStandard() { + checkRtpTransceiverExists(); + nativeStopStandard(nativeRtpTransceiver); } @CalledByNative @@ -237,7 +258,8 @@ private void checkRtpTransceiverExists() { private static native boolean nativeStopped(long rtpTransceiver); private static native RtpTransceiverDirection nativeDirection(long rtpTransceiver); private static native RtpTransceiverDirection nativeCurrentDirection(long rtpTransceiver); - private static native void nativeStop(long rtpTransceiver); - private static native void nativeSetDirection( + private static native void nativeStopInternal(long rtpTransceiver); + private static native void nativeStopStandard(long rtpTransceiver); + private static native boolean nativeSetDirection( long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection); } diff --git a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java index bff5ad74a2..e37b34d9b2 100644 --- a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java +++ b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java @@ -75,6 +75,11 @@ private void checkNotDisposed() { } } + @Nullable + public MediaProjection getMediaProjection() { + return mediaProjection; + } + @Override // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. @SuppressWarnings("NoSynchronizedMethodCheck") diff --git a/sdk/android/api/org/webrtc/SessionDescription.java b/sdk/android/api/org/webrtc/SessionDescription.java index 62601f0bf2..be89599a5f 100644 --- a/sdk/android/api/org/webrtc/SessionDescription.java +++ b/sdk/android/api/org/webrtc/SessionDescription.java @@ -22,7 +22,8 @@ public class SessionDescription { public static enum Type { OFFER, PRANSWER, - ANSWER; + ANSWER, + ROLLBACK; public String canonicalForm() { return name().toLowerCase(Locale.US); diff --git a/sdk/android/api/org/webrtc/SurfaceEglRenderer.java b/sdk/android/api/org/webrtc/SurfaceEglRenderer.java index dfda8cb770..7a6db15887 100644 --- a/sdk/android/api/org/webrtc/SurfaceEglRenderer.java +++ b/sdk/android/api/org/webrtc/SurfaceEglRenderer.java @@ -24,7 +24,7 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Callback { private static final String TAG = "SurfaceEglRenderer"; - // Callback for reporting renderer events. Read-only after initilization so no lock required. + // Callback for reporting renderer events. Read-only after initialization so no lock required. private RendererCommon.RendererEvents rendererEvents; private final Object layoutLock = new Object(); diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java index 3522a87487..0dd45cfc93 100644 --- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java +++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java @@ -199,6 +199,10 @@ private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean ali oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); surfaceTexture = new SurfaceTexture(oesTextureId); setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> { + if (hasPendingTexture) { + Logging.d(TAG, "A frame is already pending, dropping frame."); + } + hasPendingTexture = true; tryDeliverTextureFrame(); }, handler); @@ -263,6 +267,17 @@ public void setTextureSize(int textureWidth, int textureHeight) { }); } + /** + * Forces a frame to be produced. If no new frame is available, the last frame is sent to the + * listener again. + */ + public void forceFrame() { + handler.post(() -> { + hasPendingTexture = true; + tryDeliverTextureFrame(); + }); + } + /** Set the rotation of the delivered frames. */ public void setFrameRotation(int rotation) { handler.post(() -> this.frameRotation = rotation); diff --git a/sdk/android/api/org/webrtc/SurfaceViewRenderer.java b/sdk/android/api/org/webrtc/SurfaceViewRenderer.java index c39416c3e1..f62d27432d 100644 --- a/sdk/android/api/org/webrtc/SurfaceViewRenderer.java +++ b/sdk/android/api/org/webrtc/SurfaceViewRenderer.java @@ -31,7 +31,7 @@ public class SurfaceViewRenderer extends SurfaceView new RendererCommon.VideoLayoutMeasure(); private final SurfaceEglRenderer eglRenderer; - // Callback for reporting renderer events. Read-only after initilization so no lock required. + // Callback for reporting renderer events. Read-only after initialization so no lock required. private RendererCommon.RendererEvents rendererEvents; // Accessed only on the main thread. diff --git a/sdk/android/api/org/webrtc/VideoCapabilityParser.java b/sdk/android/api/org/webrtc/VideoCapabilityParser.java new file mode 100644 index 0000000000..b749e3c73a --- /dev/null +++ b/sdk/android/api/org/webrtc/VideoCapabilityParser.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.webrtc.Logging; +import org.xml.sax.SAXException; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashMap; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; + +public class VideoCapabilityParser { + + public Document loadWithDom(String xmlFilePath) { + Document document = null; + File file = new File(xmlFilePath); + if (file.exists()) { + try { + InputStream inputStream = new FileInputStream(file); + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + document = documentBuilder.parse(inputStream); + } catch (FileNotFoundException e) { + } catch (ParserConfigurationException e) { + } catch (IOException e) { + } catch (SAXException e) { + } + } + return document; + } + + public ArrayList> parseWithTag(Document document, String tag) { + if (document == null) { + return null; + } + ArrayList> extraMediaCodecList = new ArrayList<>(); + NodeList sList = document.getElementsByTagName(tag); + for (int i = 0; i < sList.getLength(); i++) { + Element encoded = (Element) sList.item(i); + NodeList nodeList = encoded.getElementsByTagName("MediaCodec"); + for (i = 0; i < nodeList.getLength(); i++) { + HashMap map = new HashMap<>(); + Node node = nodeList.item(i); + map.put("name", node.getAttributes().getNamedItem("name").getNodeValue()); + map.put("type", node.getAttributes().getNamedItem("type").getNodeValue()); + extraMediaCodecList.add(map); + } + } + return extraMediaCodecList; + } + + public boolean isExtraHardwareSupported(String name , String type, ArrayList> extraMediaCodecMap){ + boolean result = false; + if (extraMediaCodecMap != null) { + for (HashMap item : extraMediaCodecMap){ + if (name.startsWith(item.get("name")) && type.startsWith(item.get("type"))){ + result=true; + break; + } + } + } + return result; + } +} diff --git a/sdk/android/api/org/webrtc/VideoEncoderFactory.java b/sdk/android/api/org/webrtc/VideoEncoderFactory.java index b318e8ba85..050aa710ac 100644 --- a/sdk/android/api/org/webrtc/VideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/VideoEncoderFactory.java @@ -14,6 +14,24 @@ /** Factory for creating VideoEncoders. */ public interface VideoEncoderFactory { + public interface VideoEncoderSelector { + /** Called with the VideoCodecInfo of the currently used encoder. */ + @CalledByNative("VideoEncoderSelector") void onCurrentEncoder(VideoCodecInfo info); + + /** + * Called with the current available bitrate. Returns null if the encoder selector prefers to + * keep the current encoder or a VideoCodecInfo if a new encoder is preferred. + */ + @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onAvailableBitrate(int kbps); + + /** + * Called when the currently used encoder signal itself as broken. Returns null if the encoder + * selector prefers to keep the current encoder or a VideoCodecInfo if a new encoder is + * preferred. + */ + @Nullable @CalledByNative("VideoEncoderSelector") VideoCodecInfo onEncoderBroken(); + } + /** Creates an encoder for the given video codec. */ @Nullable @CalledByNative VideoEncoder createEncoder(VideoCodecInfo info); @@ -32,4 +50,13 @@ public interface VideoEncoderFactory { default VideoCodecInfo[] getImplementations() { return getSupportedCodecs(); } + + /** + * Returns a VideoEncoderSelector if implemented by the VideoEncoderFactory, + * null otherwise. + */ + @CalledByNative + default VideoEncoderSelector getEncoderSelector() { + return null; + } } diff --git a/sdk/android/api/org/webrtc/VideoProcessor.java b/sdk/android/api/org/webrtc/VideoProcessor.java index 3a89090e2d..19a2b382c9 100644 --- a/sdk/android/api/org/webrtc/VideoProcessor.java +++ b/sdk/android/api/org/webrtc/VideoProcessor.java @@ -54,7 +54,7 @@ default void onFrameCaptured(VideoFrame frame, FrameAdaptationParameters paramet /** * Set the sink that receives the output from this processor. Null can be passed in to unregister - * a sink. After this call returns, no frames should be delivered to an unregistered sink. + * a sink. */ void setSink(@Nullable VideoSink sink); diff --git a/sdk/android/api/org/webrtc/VideoSource.java b/sdk/android/api/org/webrtc/VideoSource.java index 6c528fd05b..b0bffd6ff1 100644 --- a/sdk/android/api/org/webrtc/VideoSource.java +++ b/sdk/android/api/org/webrtc/VideoSource.java @@ -135,7 +135,9 @@ public void setVideoProcessor(@Nullable VideoProcessor newVideoProcessor) { } videoProcessor = newVideoProcessor; if (newVideoProcessor != null) { - newVideoProcessor.setSink(nativeAndroidVideoTrackSource::onFrameCaptured); + newVideoProcessor.setSink( + (frame) + -> runWithReference(() -> nativeAndroidVideoTrackSource.onFrameCaptured(frame))); if (isCapturerRunning) { newVideoProcessor.onCapturerStarted(/* success= */ true); } diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index ece6f35d4c..43fce4f89d 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -10,8 +10,13 @@ package org.webrtc.audio; -import android.media.AudioManager; import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioDeviceInfo; +import android.media.AudioManager; +import android.os.Build; +import android.support.annotation.RequiresApi; +import java.util.concurrent.ScheduledExecutorService; import org.webrtc.JniCommon; import org.webrtc.Logging; @@ -28,6 +33,7 @@ public static Builder builder(Context context) { public static class Builder { private final Context context; + private ScheduledExecutorService scheduler; private final AudioManager audioManager; private int inputSampleRate; private int outputSampleRate; @@ -42,6 +48,7 @@ public static class Builder { private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported(); private boolean useStereoInput; private boolean useStereoOutput; + private AudioAttributes audioAttributes; private Builder(Context context) { this.context = context; @@ -50,6 +57,11 @@ private Builder(Context context) { this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); } + public Builder setScheduler(ScheduledExecutorService scheduler) { + this.scheduler = scheduler; + return this; + } + /** * Call this method if the default handling of querying the native sample rate shall be * overridden. Can be useful on some devices where the available Android APIs are known to @@ -183,11 +195,19 @@ public Builder setUseStereoOutput(boolean useStereoOutput) { return this; } + /** + * Set custom {@link AudioAttributes} to use. + */ + public Builder setAudioAttributes(AudioAttributes audioAttributes) { + this.audioAttributes = audioAttributes; + return this; + } + /** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). */ - public AudioDeviceModule createAudioDeviceModule() { + public JavaAudioDeviceModule createAudioDeviceModule() { Logging.d(TAG, "createAudioDeviceModule"); if (useHardwareNoiseSuppressor) { Logging.d(TAG, "HW NS will be used."); @@ -205,11 +225,15 @@ public AudioDeviceModule createAudioDeviceModule() { } Logging.d(TAG, "HW AEC will not be used."); } - final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource, - audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, - useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + ScheduledExecutorService executor = this.scheduler; + if (executor == null) { + executor = WebRtcAudioRecord.newDefaultScheduler(); + } + final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, + audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, + samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack( - context, audioManager, audioTrackErrorCallback, audioTrackStateCallback); + context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } @@ -369,6 +393,18 @@ public void setMicrophoneMute(boolean mute) { audioInput.setMicrophoneMute(mute); } + /** + * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should + * only be used if a client gives an explicit option for choosing a physical device to record + * from. Otherwise the best-matching device for other parameters will be used. Calling after + * recording is started may cause a temporary interruption if the audio routing changes. + */ + @RequiresApi(Build.VERSION_CODES.M) + public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) { + Logging.d(TAG, "setPreferredInputDevice: " + preferredInputDevice); + audioInput.setPreferredDevice(preferredInputDevice); + } + private static native long nativeCreateAudioDeviceModule(Context context, AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput); diff --git a/sdk/android/instrumentationtests/AndroidManifest.xml b/sdk/android/instrumentationtests/AndroidManifest.xml index 75df968f13..55028da703 100644 --- a/sdk/android/instrumentationtests/AndroidManifest.xml +++ b/sdk/android/instrumentationtests/AndroidManifest.xml @@ -16,7 +16,7 @@ - + diff --git a/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java b/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java index 1591cae0e6..4eb033b210 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java @@ -101,7 +101,6 @@ public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo in .setCaptureTimeNs(frame.captureTimeNs) .setFrameType(frame.frameType) .setRotation(frame.rotation) - .setCompleteFrame(frame.completeFrame) .setQp(frame.qp) .createEncodedImage()); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java deleted file mode 100644 index 7693b592b2..0000000000 --- a/sdk/android/instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import android.annotation.TargetApi; -import android.os.Build; -import android.support.test.filters.SmallTest; -import android.util.Log; -import java.nio.ByteBuffer; -import org.chromium.base.test.BaseJUnit4ClassRunner; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo; - -@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) -@RunWith(BaseJUnit4ClassRunner.class) -public class MediaCodecVideoEncoderTest { - final static String TAG = "MCVideoEncoderTest"; - final static int profile = MediaCodecVideoEncoder.H264Profile.CONSTRAINED_BASELINE.getValue(); - - @Test - @SmallTest - public void testInitializeUsingByteBuffer() { - if (!MediaCodecVideoEncoder.isVp8HwSupported()) { - Log.i(TAG, "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer"); - return; - } - MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); - assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, - 640, 480, 300, 30, /* useSurface= */ false)); - encoder.release(); - } - - @Test - @SmallTest - public void testInitilizeUsingTextures() { - if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { - Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures"); - return; - } - EglBase14 eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN); - MediaCodecVideoEncoder.setEglContext(eglBase.getEglBaseContext()); - MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); - assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, - 640, 480, 300, 30, /* useSurface= */ true)); - encoder.release(); - MediaCodecVideoEncoder.disposeEglContext(); - eglBase.release(); - } - - @Test - @SmallTest - public void testInitializeUsingByteBufferReInitilizeUsingTextures() { - if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) { - Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures"); - return; - } - MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); - assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, - 640, 480, 300, 30, /* useSurface= */ false)); - encoder.release(); - EglBase14 eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN); - MediaCodecVideoEncoder.setEglContext(eglBase.getEglBaseContext()); - assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, - 640, 480, 300, 30, /* useSurface= */ true)); - encoder.release(); - MediaCodecVideoEncoder.disposeEglContext(); - eglBase.release(); - } - - @Test - @SmallTest - public void testEncoderUsingByteBuffer() throws InterruptedException { - if (!MediaCodecVideoEncoder.isVp8HwSupported()) { - Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer"); - return; - } - - final int width = 640; - final int height = 480; - final int min_size = width * height * 3 / 2; - final long presentationTimestampUs = 2; - - MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); - - assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, - width, height, 300, 30, /* useSurface= */ false)); - ByteBuffer[] inputBuffers = encoder.getInputBuffers(); - assertNotNull(inputBuffers); - assertTrue(min_size <= inputBuffers[0].capacity()); - - int bufferIndex; - do { - Thread.sleep(10); - bufferIndex = encoder.dequeueInputBuffer(); - } while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet. - - assertTrue(bufferIndex >= 0); - assertTrue(bufferIndex < inputBuffers.length); - assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs)); - - OutputBufferInfo info; - do { - info = encoder.dequeueOutputBuffer(); - Thread.sleep(10); - } while (info == null); - assertTrue(info.index >= 0); - assertEquals(presentationTimestampUs, info.presentationTimestampUs); - assertTrue(info.buffer.capacity() > 0); - encoder.releaseOutputBuffer(info.index); - - encoder.release(); - } -} diff --git a/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java b/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java index 36136ca933..5f7e07df55 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java @@ -15,7 +15,6 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID; import android.annotation.SuppressLint; import android.content.Context; @@ -31,14 +30,15 @@ import android.support.test.filters.MediumTest; import android.support.test.filters.SmallTest; import android.support.test.rule.UiThreadTestRule; +import java.util.List; import org.chromium.base.test.BaseJUnit4ClassRunner; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; -import org.webrtc.NetworkMonitorAutoDetect.ConnectionType; +import org.webrtc.NetworkChangeDetector.ConnectionType; +import org.webrtc.NetworkChangeDetector.NetworkInformation; import org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate; -import org.webrtc.NetworkMonitorAutoDetect.NetworkInformation; import org.webrtc.NetworkMonitorAutoDetect.NetworkState; /** @@ -53,6 +53,9 @@ public class NetworkMonitorTest { @Rule public UiThreadTestRule uiThreadTestRule = new UiThreadTestRule(); + private static final long INVALID_NET_ID = -1; + private NetworkChangeDetector detector; + /** * Listens for alerts fired by the NetworkMonitor when network status changes. */ @@ -155,6 +158,10 @@ public void onNetworkConnect(NetworkInformation networkInfo) {} @Override public void onNetworkDisconnect(long networkHandle) {} + + @Override + public void onNetworkPreference(List types, @NetworkPreference int preference) { + } } private static final Object lock = new Object(); @@ -179,6 +186,17 @@ private static Handler getUiThreadHandler() { */ private void createTestMonitor() { Context context = InstrumentationRegistry.getTargetContext(); + + NetworkMonitor.getInstance().setNetworkChangeDetectorFactory( + new NetworkChangeDetectorFactory() { + @Override + public NetworkChangeDetector create( + NetworkChangeDetector.Observer observer, Context context) { + detector = new NetworkMonitorAutoDetect(observer, context); + return detector; + } + }); + receiver = NetworkMonitor.createAndSetAutoDetectForTest(context); assertNotNull(receiver); @@ -311,9 +329,9 @@ public void testStartStopMonitoring() { Context context = ContextUtils.getApplicationContext(); networkMonitor.startMonitoring(context); assertEquals(1, networkMonitor.getNumObservers()); - assertNotNull(networkMonitor.getNetworkMonitorAutoDetect()); + assertEquals(detector, networkMonitor.getNetworkChangeDetector()); networkMonitor.stopMonitoring(); assertEquals(0, networkMonitor.getNumObservers()); - assertNull(networkMonitor.getNetworkMonitorAutoDetect()); + assertNull(networkMonitor.getNetworkChangeDetector()); } } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java b/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java index 88be833504..c380310b83 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java @@ -1488,6 +1488,38 @@ public void testRemoteStreamUpdatedWhenTracksAddedOrRemoved() throws Exception { factory.dispose(); } + @Test + @SmallTest + public void testRollback() throws Exception { + PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory(); + PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList()); + config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; + + ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer"); + PeerConnection pc = factory.createPeerConnection(config, offeringExpectations); + + SdpObserverLatch sdpLatch = new SdpObserverLatch(); + pc.createOffer(sdpLatch, new MediaConstraints()); + assertTrue(sdpLatch.await()); + SessionDescription offer = sdpLatch.getSdp(); + + sdpLatch = new SdpObserverLatch(); + offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER); + pc.setLocalDescription(sdpLatch, offer); + assertTrue(sdpLatch.await()); + + SessionDescription rollback = new SessionDescription(SessionDescription.Type.ROLLBACK, ""); + sdpLatch = new SdpObserverLatch(); + offeringExpectations.expectSignalingChange(SignalingState.STABLE); + // TODO(bugs.webrtc.org/11970): determine if triggering ONN (twice even) is correct. + offeringExpectations.expectRenegotiationNeeded(); + offeringExpectations.expectRenegotiationNeeded(); + pc.setLocalDescription(sdpLatch, rollback); + assertTrue(sdpLatch.await()); + + assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS)); + } + private static void negotiate(PeerConnection offeringPC, ObserverExpectations offeringExpectations, PeerConnection answeringPC, ObserverExpectations answeringExpectations) { diff --git a/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java b/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java index 14d76d0c4c..af9c62bda9 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java @@ -12,6 +12,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; @@ -54,7 +55,7 @@ public void testSetDegradationPreference() throws Exception { RtpParameters parameters = sender.getParameters(); assertNotNull(parameters); - assertEquals(DegradationPreference.BALANCED, parameters.degradationPreference); + assertNull(parameters.degradationPreference); parameters.degradationPreference = DegradationPreference.MAINTAIN_FRAMERATE; assertTrue(sender.setParameters(parameters)); diff --git a/sdk/android/native_api/codecs/wrapper.cc b/sdk/android/native_api/codecs/wrapper.cc index 08cafd83b8..c3f2095335 100644 --- a/sdk/android/native_api/codecs/wrapper.cc +++ b/sdk/android/native_api/codecs/wrapper.cc @@ -16,6 +16,7 @@ #include "sdk/android/src/jni/video_codec_info.h" #include "sdk/android/src/jni/video_decoder_factory_wrapper.h" #include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_encoder_wrapper.h" namespace webrtc { @@ -38,4 +39,11 @@ std::unique_ptr JavaToNativeVideoEncoderFactory( jni, JavaParamRef(encoder_factory)); } +std::vector +JavaToNativeResolutionBitrateLimits(JNIEnv* jni, + const jobjectArray j_bitrate_limits_array) { + return jni::JavaToNativeResolutionBitrateLimits( + jni, JavaParamRef(j_bitrate_limits_array)); +} + } // namespace webrtc diff --git a/sdk/android/native_api/codecs/wrapper.h b/sdk/android/native_api/codecs/wrapper.h index b1ff24c679..2246fd76d2 100644 --- a/sdk/android/native_api/codecs/wrapper.h +++ b/sdk/android/native_api/codecs/wrapper.h @@ -13,9 +13,11 @@ #include #include +#include #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" namespace webrtc { @@ -35,6 +37,12 @@ std::unique_ptr JavaToNativeVideoEncoderFactory( JNIEnv* jni, jobject encoder_factory); +// Creates an array of VideoEncoder::ResolutionBitrateLimits from Java array +// of ResolutionBitrateLimits. +std::vector +JavaToNativeResolutionBitrateLimits(JNIEnv* jni, + const jobjectArray j_bitrate_limits_array); + } // namespace webrtc #endif // SDK_ANDROID_NATIVE_API_CODECS_WRAPPER_H_ diff --git a/sdk/android/native_api/jni/java_types.cc b/sdk/android/native_api/jni/java_types.cc index a97c81f1f2..af02c10f4c 100644 --- a/sdk/android/native_api/jni/java_types.cc +++ b/sdk/android/native_api/jni/java_types.cc @@ -10,6 +10,7 @@ #include "sdk/android/native_api/jni/java_types.h" +#include #include #include @@ -51,14 +52,15 @@ Iterable::Iterator::Iterator(JNIEnv* jni, const JavaRef& iterable) Iterable::Iterator::Iterator(Iterator&& other) : jni_(std::move(other.jni_)), iterator_(std::move(other.iterator_)), - value_(std::move(other.value_)), - thread_checker_(std::move(other.thread_checker_)) {} + value_(std::move(other.value_)) { + RTC_DCHECK_RUN_ON(&thread_checker_); +} Iterable::Iterator::~Iterator() = default; // Advances the iterator one step. Iterable::Iterator& Iterable::Iterator::operator++() { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); if (AtEnd()) { // Can't move past the end. return *this; @@ -93,7 +95,7 @@ ScopedJavaLocalRef& Iterable::Iterator::operator*() { } bool Iterable::Iterator::AtEnd() const { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); return jni_ == nullptr || IsNull(jni_, iterator_); } diff --git a/sdk/android/native_api/jni/java_types.h b/sdk/android/native_api/jni/java_types.h index 955911c186..26fdd5a0b8 100644 --- a/sdk/android/native_api/jni/java_types.h +++ b/sdk/android/native_api/jni/java_types.h @@ -18,7 +18,9 @@ #define SDK_ANDROID_NATIVE_API_JNI_JAVA_TYPES_H_ #include + #include +#include #include #include diff --git a/sdk/android/native_api/jni/scoped_java_ref.h b/sdk/android/native_api/jni/scoped_java_ref.h index e37a992445..ac2c4f4c88 100644 --- a/sdk/android/native_api/jni/scoped_java_ref.h +++ b/sdk/android/native_api/jni/scoped_java_ref.h @@ -172,6 +172,7 @@ class ScopedJavaGlobalRef : public JavaRef { public: using JavaRef::obj_; + ScopedJavaGlobalRef() = default; explicit constexpr ScopedJavaGlobalRef(std::nullptr_t) {} ScopedJavaGlobalRef(JNIEnv* env, const JavaRef& other) : JavaRef(static_cast(env->NewGlobalRef(other.obj()))) {} @@ -185,6 +186,21 @@ class ScopedJavaGlobalRef : public JavaRef { AttachCurrentThreadIfNeeded()->DeleteGlobalRef(obj_); } + void operator=(const JavaRef& other) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + if (obj_ != nullptr) { + env->DeleteGlobalRef(obj_); + } + obj_ = other.is_null() ? nullptr : env->NewGlobalRef(other.obj()); + } + + void operator=(std::nullptr_t) { + if (obj_ != nullptr) { + AttachCurrentThreadIfNeeded()->DeleteGlobalRef(obj_); + } + obj_ = nullptr; + } + // Releases the reference to the caller. The caller *must* delete the // reference when it is done with it. Note that calling a Java method // is *not* a transfer of ownership and Release() should not be used. diff --git a/sdk/android/native_api/base/network_monitor.cc b/sdk/android/native_api/network_monitor/network_monitor.cc similarity index 93% rename from sdk/android/native_api/base/network_monitor.cc rename to sdk/android/native_api/network_monitor/network_monitor.cc index 515e9f21fb..38be7fdef7 100644 --- a/sdk/android/native_api/base/network_monitor.cc +++ b/sdk/android/native_api/network_monitor/network_monitor.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "sdk/android/native_api/base/network_monitor.h" +#include "sdk/android/native_api/network_monitor/network_monitor.h" #include diff --git a/sdk/android/native_api/base/network_monitor.h b/sdk/android/native_api/network_monitor/network_monitor.h similarity index 80% rename from sdk/android/native_api/base/network_monitor.h rename to sdk/android/native_api/network_monitor/network_monitor.h index 135ebb1e86..45ecd75543 100644 --- a/sdk/android/native_api/base/network_monitor.h +++ b/sdk/android/native_api/network_monitor/network_monitor.h @@ -8,14 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef SDK_ANDROID_NATIVE_API_BASE_NETWORK_MONITOR_H_ -#define SDK_ANDROID_NATIVE_API_BASE_NETWORK_MONITOR_H_ +#ifndef SDK_ANDROID_NATIVE_API_NETWORK_MONITOR_NETWORK_MONITOR_H_ +#define SDK_ANDROID_NATIVE_API_NETWORK_MONITOR_NETWORK_MONITOR_H_ #include #include -#include "rtc_base/network_monitor.h" +#include "rtc_base/network_monitor_factory.h" namespace webrtc { @@ -33,4 +33,4 @@ CreateAndroidNetworkMonitorFactory(); } // namespace webrtc -#endif // SDK_ANDROID_NATIVE_API_BASE_NETWORK_MONITOR_H_ +#endif // SDK_ANDROID_NATIVE_API_NETWORK_MONITOR_NETWORK_MONITOR_H_ diff --git a/sdk/android/native_api/peerconnection/peer_connection_factory.cc b/sdk/android/native_api/peerconnection/peer_connection_factory.cc index e6839754ac..4e742d1b7a 100644 --- a/sdk/android/native_api/peerconnection/peer_connection_factory.cc +++ b/sdk/android/native_api/peerconnection/peer_connection_factory.cc @@ -23,11 +23,10 @@ jobject NativeToJavaPeerConnectionFactory( rtc::scoped_refptr pcf, std::unique_ptr network_thread, std::unique_ptr worker_thread, - std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory) { + std::unique_ptr signaling_thread) { return webrtc::jni::NativeToJavaPeerConnectionFactory( jni, pcf, std::move(network_thread), std::move(worker_thread), - std::move(signaling_thread), network_monitor_factory); + std::move(signaling_thread)); } } // namespace webrtc diff --git a/sdk/android/native_api/peerconnection/peer_connection_factory.h b/sdk/android/native_api/peerconnection/peer_connection_factory.h index 889d6092e7..00550a9b12 100644 --- a/sdk/android/native_api/peerconnection/peer_connection_factory.h +++ b/sdk/android/native_api/peerconnection/peer_connection_factory.h @@ -26,8 +26,7 @@ jobject NativeToJavaPeerConnectionFactory( rtc::scoped_refptr pcf, std::unique_ptr network_thread, std::unique_ptr worker_thread, - std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory = nullptr); + std::unique_ptr signaling_thread); } // namespace webrtc diff --git a/sdk/android/native_api/stacktrace/stacktrace.cc b/sdk/android/native_api/stacktrace/stacktrace.cc index df1ee6435a..cea3490091 100644 --- a/sdk/android/native_api/stacktrace/stacktrace.cc +++ b/sdk/android/native_api/stacktrace/stacktrace.cc @@ -27,9 +27,9 @@ #endif #include "absl/base/attributes.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -92,7 +92,7 @@ struct SignalHandlerOutputState { }; // Global lock to ensure only one thread gets interrupted at a time. -ABSL_CONST_INIT rtc::GlobalLock g_signal_handler_lock; +ABSL_CONST_INIT GlobalMutex g_signal_handler_lock(absl::kConstInit); // Argument passed to the ThreadSignalHandler() from the sampling thread to the // sampled (stopped) thread. This value is set just before sending signal to the // thread and reset when handler is done. @@ -105,6 +105,10 @@ _Unwind_Reason_Code UnwindBacktrace(struct _Unwind_Context* unwind_context, SignalHandlerOutputState* const output_state = static_cast(unwind_output_state); + // Abort if output state is corrupt. + if (output_state == nullptr) + return _URC_END_OF_STACK; + // Avoid overflowing the stack trace array. if (output_state->stack_size_counter >= kMaxStackSize) return _URC_END_OF_STACK; @@ -121,8 +125,13 @@ _Unwind_Reason_Code UnwindBacktrace(struct _Unwind_Context* unwind_context, // This signal handler is exectued on the interrupted thread. void SignalHandler(int signum, siginfo_t* info, void* ptr) { - _Unwind_Backtrace(&UnwindBacktrace, g_signal_handler_output_state); - g_signal_handler_output_state->signal_handler_finish_event.Signal(); + // This should have been set by the thread requesting the stack trace. + SignalHandlerOutputState* signal_handler_output_state = + g_signal_handler_output_state; + if (signal_handler_output_state != nullptr) { + _Unwind_Backtrace(&UnwindBacktrace, signal_handler_output_state); + signal_handler_output_state->signal_handler_finish_event.Signal(); + } } // Temporarily change the signal handler to a function that records a raw stack @@ -144,7 +153,7 @@ const char* CaptureRawStacktrace(int pid, act.sa_flags = SA_RESTART | SA_SIGINFO; sigemptyset(&act.sa_mask); - rtc::GlobalLockScope ls(&g_signal_handler_lock); + GlobalMutexLock ls(&g_signal_handler_lock); g_signal_handler_output_state = params; if (sigaction(kSignal, &act, &old_act) != 0) diff --git a/sdk/android/native_api/video/video_source.cc b/sdk/android/native_api/video/video_source.cc index a813f40313..1f4bc4dead 100644 --- a/sdk/android/native_api/video/video_source.cc +++ b/sdk/android/native_api/video/video_source.cc @@ -89,6 +89,14 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface { } private: + // Encoded sinks not implemented for JavaVideoTrackSourceImpl. + bool SupportsEncodedOutput() const override { return false; } + void GenerateKeyFrame() override {} + void AddEncodedSink( + rtc::VideoSinkInterface* sink) override {} + void RemoveEncodedSink( + rtc::VideoSinkInterface* sink) override {} + rtc::scoped_refptr android_video_track_source_; ScopedJavaGlobalRef native_capturer_observer_; }; diff --git a/sdk/android/native_unittests/audio_device/audio_device_unittest.cc b/sdk/android/native_unittests/audio_device/audio_device_unittest.cc index 88dfad4a88..31da60cbc3 100644 --- a/sdk/android/native_unittests/audio_device/audio_device_unittest.cc +++ b/sdk/android/native_unittests/audio_device/audio_device_unittest.cc @@ -16,9 +16,9 @@ #include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/mock_audio_transport.h" #include "rtc_base/arraysize.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/format_macros.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "sdk/android/generated_native_unittests_jni/BuildInfo_jni.h" #include "sdk/android/native_api/audio_device_module/audio_device_android.h" @@ -179,7 +179,7 @@ class FifoAudioStream : public AudioStreamInterface { } int16_t* memory = new int16_t[frames_per_buffer_]; memcpy(static_cast(&memory[0]), source, bytes_per_buffer_); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); fifo_->push_back(memory); const size_t size = fifo_->size(); if (size > largest_size_) { @@ -195,7 +195,7 @@ class FifoAudioStream : public AudioStreamInterface { void Read(void* destination, size_t num_frames) override { ASSERT_EQ(num_frames, frames_per_buffer_); PRINTD("-"); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (fifo_->empty()) { memset(destination, 0, bytes_per_buffer_); } else { @@ -226,7 +226,7 @@ class FifoAudioStream : public AudioStreamInterface { } using AudioBufferList = std::list; - rtc::CriticalSection lock_; + Mutex lock_; const size_t frames_per_buffer_; const size_t bytes_per_buffer_; std::unique_ptr fifo_; diff --git a/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc b/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc index 54613f9f57..75535d052b 100644 --- a/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc +++ b/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc @@ -100,7 +100,7 @@ TEST(PeerConnectionFactoryTest, NativeToJavaPeerConnectionFactory) { jobject java_factory = NativeToJavaPeerConnectionFactory( jni, factory, std::move(network_thread), std::move(worker_thread), - std::move(signaling_thread), nullptr /* network_monitor_factory */); + std::move(signaling_thread)); RTC_LOG(INFO) << java_factory; diff --git a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc index e3b5e78cdd..fcd9c9b8f1 100644 --- a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc +++ b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc @@ -16,12 +16,12 @@ #include #include -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/inline.h" #include "system_wrappers/include/sleep.h" #include "test/gtest.h" @@ -118,15 +118,15 @@ class RtcEventDeadlock : public DeadlockInterface { class RtcCriticalSectionDeadlock : public DeadlockInterface { public: RtcCriticalSectionDeadlock() - : critscope_(std::make_unique(&crit_)) {} + : mutex_lock_(std::make_unique(&mutex_)) {} private: - void Deadlock() override { rtc::CritScope lock(&crit_); } + void Deadlock() override { MutexLock lock(&mutex_); } - void Release() override { critscope_.reset(); } + void Release() override { mutex_lock_.reset(); } - rtc::CriticalSection crit_; - std::unique_ptr critscope_; + Mutex mutex_; + std::unique_ptr mutex_lock_; }; class SpinDeadlock : public DeadlockInterface { diff --git a/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java b/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java index 5b6e030b91..dcb045fa6b 100644 --- a/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java +++ b/sdk/android/src/java/org/webrtc/AndroidVideoDecoder.java @@ -56,7 +56,7 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink { private final MediaCodecWrapperFactory mediaCodecWrapperFactory; private final String codecName; - private final VideoCodecType codecType; + private final VideoCodecMimeType codecType; private static class FrameInfo { final long decodeStartTimeMs; @@ -129,7 +129,7 @@ private static class DecodedTextureMetadata { @Nullable private MediaCodecWrapper codec; AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, - VideoCodecType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) { + VideoCodecMimeType codecType, int colorFormat, @Nullable EglBase.Context sharedContext) { if (!isSupportedColorFormat(colorFormat)) { throw new IllegalArgumentException("Unsupported color format: " + colorFormat); } @@ -180,7 +180,7 @@ private VideoCodecStatus initDecodeInternal(int width, int height) { try { codec = mediaCodecWrapperFactory.createByCodecName(codecName); - } catch (IOException | IllegalArgumentException e) { + } catch (IOException | IllegalArgumentException | IllegalStateException e) { Logging.e(TAG, "Cannot create media decoder " + codecName); return VideoCodecStatus.FALLBACK_SOFTWARE; } @@ -191,7 +191,7 @@ private VideoCodecStatus initDecodeInternal(int width, int height) { } codec.configure(format, surface, null, 0); codec.start(); - } catch (IllegalStateException e) { + } catch (IllegalStateException | IllegalArgumentException e) { Logging.e(TAG, "initDecode failed", e); release(); return VideoCodecStatus.FALLBACK_SOFTWARE; @@ -246,10 +246,6 @@ public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) { Logging.e(TAG, "decode() - key frame required first"); return VideoCodecStatus.NO_OUTPUT; } - if (!frame.completeFrame) { - Logging.e(TAG, "decode() - complete frame required first"); - return VideoCodecStatus.NO_OUTPUT; - } } int index; diff --git a/sdk/android/src/java/org/webrtc/EglBase14Impl.java b/sdk/android/src/java/org/webrtc/EglBase14Impl.java index 1c519ffde5..202b0daaaf 100644 --- a/sdk/android/src/java/org/webrtc/EglBase14Impl.java +++ b/sdk/android/src/java/org/webrtc/EglBase14Impl.java @@ -170,7 +170,9 @@ public void release() { checkIsNotReleased(); releaseSurface(); detachCurrent(); - EGL14.eglDestroyContext(eglDisplay, eglContext); + synchronized (EglBase.lock) { + EGL14.eglDestroyContext(eglDisplay, eglContext); + } EGL14.eglReleaseThread(); EGL14.eglTerminate(eglDisplay); eglContext = EGL14.EGL_NO_CONTEXT; diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java index 5cfd06597c..f2d236cd37 100644 --- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java +++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java @@ -103,7 +103,7 @@ public void waitForZero() { // --- Initialized on construction. private final MediaCodecWrapperFactory mediaCodecWrapperFactory; private final String codecName; - private final VideoCodecType codecType; + private final VideoCodecMimeType codecType; private final Integer surfaceColorFormat; private final Integer yuvColorFormat; private final YuvFormat yuvFormat; @@ -180,7 +180,7 @@ public void waitForZero() { * @throws IllegalArgumentException if colorFormat is unsupported */ public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, - VideoCodecType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, + VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, Map params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; @@ -240,7 +240,7 @@ private VideoCodecStatus initEncodeInternal() { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getCodecConfigFramerate()); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); - if (codecType == VideoCodecType.H264) { + if (codecType == VideoCodecMimeType.H264) { String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID); if (profileLevelId == null) { profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1; @@ -370,7 +370,6 @@ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; EncodedImage.Builder builder = EncodedImage.builder() .setCaptureTimeNs(videoFrame.getTimestampNs()) - .setCompleteFrame(true) .setEncodedWidth(videoFrame.getBuffer().getWidth()) .setEncodedHeight(videoFrame.getBuffer().getHeight()) .setRotation(videoFrame.getRotation()); @@ -465,11 +464,11 @@ public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, i public ScalingSettings getScalingSettings() { encodeThreadChecker.checkIsOnValidThread(); if (automaticResizeOn) { - if (codecType == VideoCodecType.VP8) { + if (codecType == VideoCodecMimeType.VP8) { final int kLowVp8QpThreshold = 29; final int kHighVp8QpThreshold = 95; return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold); - } else if (codecType == VideoCodecType.H264) { + } else if (codecType == VideoCodecMimeType.H264) { final int kLowH264QpThreshold = 24; final int kHighH264QpThreshold = 37; return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold); @@ -563,7 +562,7 @@ protected void deliverEncodedImage() { } final ByteBuffer frameBuffer; - if (isKeyFrame && codecType == VideoCodecType.H264) { + if (isKeyFrame && (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) { Logging.d(TAG, "Prepending config frame of size " + configBuffer.capacity() + " to output buffer with offset " + info.offset + ", size " + info.size); diff --git a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java index 8223317a59..77db3cda83 100644 --- a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java +++ b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java @@ -10,6 +10,7 @@ package org.webrtc; +import android.annotation.TargetApi; import android.media.MediaCodecInfo; import android.media.MediaCodecInfo.CodecCapabilities; import android.os.Build; @@ -28,7 +29,10 @@ class MediaCodecUtils { static final String INTEL_PREFIX = "OMX.Intel."; static final String NVIDIA_PREFIX = "OMX.Nvidia."; static final String QCOM_PREFIX = "OMX.qcom."; - static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = {"OMX.google.", "OMX.SEC."}; + static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = { + "OMX.google.", "OMX.SEC.", "c2.android"}; + static final String HISI_PREFIX = "OMX.hisi."; + static final String IMG_PREFIX = "OMX.IMG."; // NV12 color format supported by QCOM codec, but not declared in MediaCodec - // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h @@ -76,7 +80,7 @@ private static int[] getTextureColorFormats() { return null; } - static boolean codecSupportsType(MediaCodecInfo info, VideoCodecType type) { + static boolean codecSupportsType(MediaCodecInfo info, VideoCodecMimeType type) { for (String mimeType : info.getSupportedTypes()) { if (type.mimeType().equals(mimeType)) { return true; @@ -85,10 +89,11 @@ static boolean codecSupportsType(MediaCodecInfo info, VideoCodecType type) { return false; } - static Map getCodecProperties(VideoCodecType type, boolean highProfile) { + static Map getCodecProperties(VideoCodecMimeType type, boolean highProfile) { switch (type) { case VP8: case VP9: + case H265: return new HashMap(); case H264: return H264Utils.getDefaultH264Params(highProfile); @@ -97,6 +102,36 @@ static Map getCodecProperties(VideoCodecType type, boolean highP } } + static boolean isHardwareAccelerated(MediaCodecInfo info) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return isHardwareAcceleratedQOrHigher(info); + } + return !isSoftwareOnly(info); + } + + @TargetApi(29) + private static boolean isHardwareAcceleratedQOrHigher(android.media.MediaCodecInfo codecInfo) { + return codecInfo.isHardwareAccelerated(); + } + + static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + return isSoftwareOnlyQOrHigher(codecInfo); + } + String name = codecInfo.getName(); + for (String prefix : SOFTWARE_IMPLEMENTATION_PREFIXES) { + if (name.startsWith(prefix)) { + return true; + } + } + return false; + } + + @TargetApi(29) + private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo codecInfo) { + return codecInfo.isSoftwareOnly(); + } + private MediaCodecUtils() { // This class should not be instantiated. } diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 4d18ca4fb4..a4682003db 100644 --- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -46,7 +46,7 @@ public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext, @Nullable @Override public VideoDecoder createDecoder(VideoCodecInfo codecType) { - VideoCodecType type = VideoCodecType.valueOf(codecType.getName()); + VideoCodecMimeType type = VideoCodecMimeType.valueOf(codecType.getName()); MediaCodecInfo info = findCodecForType(type); if (info == null) { @@ -63,13 +63,14 @@ public VideoDecoder createDecoder(VideoCodecInfo codecType) { public VideoCodecInfo[] getSupportedCodecs() { List supportedCodecInfos = new ArrayList(); // Generate a list of supported codecs in order of preference: - // VP8, VP9, H264 (high profile), and H264 (baseline profile). - for (VideoCodecType type : - new VideoCodecType[] {VideoCodecType.VP8, VideoCodecType.VP9, VideoCodecType.H264}) { + // VP8, VP9, H.265(optional), H264 (high profile), and H264 (baseline profile). + for (VideoCodecMimeType type : new VideoCodecMimeType[] { + VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, VideoCodecMimeType.H264, + VideoCodecMimeType.H265}) { MediaCodecInfo codec = findCodecForType(type); if (codec != null) { String name = type.name(); - if (type == VideoCodecType.H264 && isH264HighProfileSupported(codec)) { + if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); } @@ -82,7 +83,7 @@ public VideoCodecInfo[] getSupportedCodecs() { return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); } - private @Nullable MediaCodecInfo findCodecForType(VideoCodecType type) { + private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) { // HW decoding is not supported on builds before KITKAT. if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { return null; @@ -109,7 +110,7 @@ public VideoCodecInfo[] getSupportedCodecs() { } // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type. - private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) { + private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { String name = info.getName(); if (!MediaCodecUtils.codecSupportsType(info, type)) { return false; diff --git a/sdk/android/src/java/org/webrtc/RefCountDelegate.java b/sdk/android/src/java/org/webrtc/RefCountDelegate.java index 58be7aa0fb..acbc0c3ed9 100644 --- a/sdk/android/src/java/org/webrtc/RefCountDelegate.java +++ b/sdk/android/src/java/org/webrtc/RefCountDelegate.java @@ -45,4 +45,19 @@ public void release() { releaseCallback.run(); } } + + /** + * Tries to retain the object. Can be used in scenarios where it is unknown if the object has + * already been released. Returns true if successful or false if the object was already released. + */ + boolean safeRetain() { + int currentRefCount = refCount.get(); + while (currentRefCount != 0) { + if (refCount.weakCompareAndSet(currentRefCount, currentRefCount + 1)) { + return true; + } + currentRefCount = refCount.get(); + } + return false; + } } diff --git a/sdk/android/src/java/org/webrtc/VideoCodecType.java b/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java similarity index 84% rename from sdk/android/src/java/org/webrtc/VideoCodecType.java rename to sdk/android/src/java/org/webrtc/VideoCodecMimeType.java index 2d4ef9ad64..47f4f955c8 100644 --- a/sdk/android/src/java/org/webrtc/VideoCodecType.java +++ b/sdk/android/src/java/org/webrtc/VideoCodecMimeType.java @@ -11,14 +11,15 @@ package org.webrtc; /** Enumeration of supported video codec types. */ -enum VideoCodecType { +enum VideoCodecMimeType { VP8("video/x-vnd.on2.vp8"), VP9("video/x-vnd.on2.vp9"), - H264("video/avc"); + H264("video/avc"), + H265("video/hevc"); private final String mimeType; - private VideoCodecType(String mimeType) { + private VideoCodecMimeType(String mimeType) { this.mimeType = mimeType; } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java index 6f575be2ce..6b69b264ea 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java @@ -219,14 +219,14 @@ private static void assertTrue(boolean condition) { // Returns true if an effect of the specified type is available. Functionally // equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but // faster as it avoids the expensive OS call to enumerate effects. - private static boolean isEffectTypeAvailable(UUID effectType, UUID blackListedUuid) { + private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUuid) { Descriptor[] effects = getAvailableEffects(); if (effects == null) { return false; } for (Descriptor d : effects) { if (d.type.equals(effectType)) { - return !d.uuid.equals(blackListedUuid); + return !d.uuid.equals(blockListedUuid); } } return false; diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java index b7b78f731f..734695937a 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -21,6 +21,7 @@ import android.os.Build; import android.os.Process; import android.support.annotation.Nullable; +import android.support.annotation.RequiresApi; import java.lang.System; import java.nio.ByteBuffer; import java.util.Arrays; @@ -30,7 +31,10 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; @@ -87,13 +91,14 @@ class WebRtcAudioRecord { private @Nullable AudioRecord audioRecord; private @Nullable AudioRecordThread audioThread; + private @Nullable AudioDeviceInfo preferredDevice; - private @Nullable ScheduledExecutorService executor; + private final ScheduledExecutorService executor; private @Nullable ScheduledFuture future; private volatile boolean microphoneMute; - private boolean audioSourceMatchesRecordingSession; - private boolean isAudioConfigVerified; + private final AtomicReference audioSourceMatchesRecordingSessionRef = + new AtomicReference<>(); private byte[] emptyBytes; private final @Nullable AudioRecordErrorCallback errorCallback; @@ -177,14 +182,15 @@ public void stopThread() { @CalledByNative WebRtcAudioRecord(Context context, AudioManager audioManager) { - this(context, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, - null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */, - WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, + DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, + null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), WebRtcAudioEffects.isNoiseSuppressorSupported()); } - public WebRtcAudioRecord(Context context, AudioManager audioManager, int audioSource, - int audioFormat, @Nullable AudioRecordErrorCallback errorCallback, + public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, + AudioManager audioManager, int audioSource, int audioFormat, + @Nullable AudioRecordErrorCallback errorCallback, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { @@ -195,6 +201,7 @@ public WebRtcAudioRecord(Context context, AudioManager audioManager, int audioSo throw new IllegalArgumentException("HW NS not supported"); } this.context = context; + this.executor = scheduler; this.audioManager = audioManager; this.audioSource = audioSource; this.audioFormat = audioFormat; @@ -225,7 +232,7 @@ boolean isNoiseSuppressorSupported() { // checked before using the returned value of isAudioSourceMatchingRecordingSession(). @CalledByNative boolean isAudioConfigVerified() { - return isAudioConfigVerified; + return audioSourceMatchesRecordingSessionRef.get() != null; } // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when @@ -234,7 +241,8 @@ boolean isAudioConfigVerified() { // enabled in WebRtcAudioRecord to ensure that the returned value is valid. @CalledByNative boolean isAudioSourceMatchingRecordingSession() { - if (!isAudioConfigVerified) { + Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get(); + if (audioSourceMatchesRecordingSession == null) { Logging.w(TAG, "Audio configuration has not yet been verified"); return false; } @@ -296,11 +304,16 @@ private int initRecording(int sampleRate, int channels) { // Throws IllegalArgumentException. audioRecord = createAudioRecordOnMOrHigher( audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + if (preferredDevice != null) { + setPreferredDevice(preferredDevice); + } } else { // Use the old AudioRecord constructor for API levels below 23. // Throws UnsupportedOperationException. audioRecord = createAudioRecordOnLowerThanM( audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); } } catch (IllegalArgumentException | UnsupportedOperationException e) { // Report of exception message is sufficient. Example: "Cannot create AudioRecord". @@ -319,7 +332,7 @@ private int initRecording(int sampleRate, int channels) { // Check number of active recording sessions. Should be zero but we have seen conflict cases // and adding a log for it can help us figure out details about conflicting sessions. final int numActiveRecordingSessions = - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (numActiveRecordingSessions != 0) { // Log the conflict as a warning since initialization did in fact succeed. Most likely, the // upcoming call to startRecording() will fail under these conditions. @@ -329,6 +342,23 @@ private int initRecording(int sampleRate, int channels) { return framesPerBuffer; } + /** + * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts + * is valid but may cause a temporary interruption if the audio routing changes. + */ + @RequiresApi(Build.VERSION_CODES.M) + @TargetApi(Build.VERSION_CODES.M) + void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { + Logging.d( + TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null)); + this.preferredDevice = preferredDevice; + if (audioRecord != null) { + if (!audioRecord.setPreferredDevice(preferredDevice)) { + Logging.e(TAG, "setPreferredDevice failed"); + } + } + } + @CalledByNative private boolean startRecording() { Logging.d(TAG, "startRecording"); @@ -349,7 +379,7 @@ private boolean startRecording() { } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); - scheduleLogRecordingConfigurationsTask(); + scheduleLogRecordingConfigurationsTask(audioRecord); return true; } @@ -364,10 +394,6 @@ private boolean stopRecording() { } future = null; } - if (executor != null) { - executor.shutdownNow(); - executor = null; - } audioThread.stopThread(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); @@ -420,8 +446,8 @@ private void logMainParametersExtended() { @TargetApi(Build.VERSION_CODES.N) // Checks the number of active recording sessions and logs the states of all active sessions. - // Returns number of active sessions. - private int logRecordingConfigurations(boolean verifyAudioConfig) { + // Returns number of active sessions. Note that this could occur on arbituary thread. + private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher"); return 0; @@ -429,6 +455,7 @@ private int logRecordingConfigurations(boolean verifyAudioConfig) { if (audioRecord == null) { return 0; } + // Get a list of the currently active audio recording configurations of the device (can be more // than one). An empty list indicates there is no recording active when queried. List configs = audioManager.getActiveRecordingConfigurations(); @@ -441,10 +468,9 @@ private int logRecordingConfigurations(boolean verifyAudioConfig) { // to the AudioRecord instance) is matching what the audio recording configuration lists // as its client parameters. If these do not match, recording might work but under invalid // conditions. - audioSourceMatchesRecordingSession = + audioSourceMatchesRecordingSessionRef.set( verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), - audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs); - isAudioConfigVerified = true; + audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); } } return numActiveRecordingSessions; @@ -479,12 +505,13 @@ private void releaseAudioResources() { audioRecord.release(); audioRecord = null; } + audioSourceMatchesRecordingSessionRef.set(null); } private void reportWebRtcAudioRecordInitError(String errorMessage) { Logging.e(TAG, "Init recording error: " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordInitError(errorMessage); } @@ -494,7 +521,7 @@ private void reportWebRtcAudioRecordStartError( AudioRecordStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); } @@ -542,18 +569,18 @@ private static int getBytesPerSample(int audioFormat) { // Use an ExecutorService to schedule a task after a given delay where the task consists of // checking (by logging) the current status of active recording sessions. - private void scheduleLogRecordingConfigurationsTask() { + private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { Logging.d(TAG, "scheduleLogRecordingConfigurationsTask"); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { return; } - if (executor != null) { - executor.shutdownNow(); - } - executor = Executors.newSingleThreadScheduledExecutor(); Callable callable = () -> { - logRecordingConfigurations(true /* verifyAudioConfig */); + if (this.audioRecord == audioRecord) { + logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */); + } else { + Logging.d(TAG, "audio record has changed"); + } return "Scheduled task is done"; }; @@ -682,4 +709,22 @@ private static String audioStateToString(int state) { return "INVALID"; } } + + private static final AtomicInteger nextSchedulerId = new AtomicInteger(0); + + static ScheduledExecutorService newDefaultScheduler() { + AtomicInteger nextThreadId = new AtomicInteger(0); + return Executors.newScheduledThreadPool(0, new ThreadFactory() { + /** + * Constructs a new {@code Thread} + */ + @Override + public Thread newThread(Runnable r) { + Thread thread = Executors.defaultThreadFactory().newThread(r); + thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s", + nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); + return thread; + } + }); + } } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index edc9dd179d..3e01b958de 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -71,6 +71,7 @@ private static int getDefaultUsageAttribute() { private ByteBuffer byteBuffer; + private @Nullable final AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; @@ -162,15 +163,17 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { - this(context, audioManager, null /* errorCallback */, null /* stateCallback */); + this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, + null /* stateCallback */); } WebRtcAudioTrack(Context context, AudioManager audioManager, - @Nullable AudioTrackErrorCallback errorCallback, + @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, @Nullable AudioTrackStateCallback stateCallback) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; + this.audioAttributes = audioAttributes; this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = new VolumeLogger(audioManager); @@ -183,7 +186,7 @@ public void setNativeAudioTrack(long nativeAudioTrack) { } @CalledByNative - private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) { + private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { threadChecker.checkIsOnValidThread(); Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels @@ -212,14 +215,14 @@ private boolean initPlayout(int sampleRate, int channels, double bufferSizeFacto // can happen that |minBufferSizeInBytes| contains an invalid value. if (minBufferSizeInBytes < byteBuffer.capacity()) { reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); - return false; + return -1; } // Ensure that prevision audio session was stopped correctly before trying // to create a new AudioTrack. if (audioTrack != null) { reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); - return false; + return -1; } try { // Create an AudioTrack object and initialize its associated audio buffer. @@ -231,8 +234,8 @@ private boolean initPlayout(int sampleRate, int channels, double bufferSizeFacto // supersede the notion of stream types for defining the behavior of audio playback, // and to allow certain platforms or routing policies to use this information for more // refined volume or routing decisions. - audioTrack = - createAudioTrackOnLollipopOrHigher(sampleRate, channelConfig, minBufferSizeInBytes); + audioTrack = createAudioTrackOnLollipopOrHigher( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); } else { // Use default constructor for API levels below 21. audioTrack = @@ -241,7 +244,7 @@ private boolean initPlayout(int sampleRate, int channels, double bufferSizeFacto } catch (IllegalArgumentException e) { reportWebRtcAudioTrackInitError(e.getMessage()); releaseAudioResources(); - return false; + return -1; } // It can happen that an AudioTrack is created but it was not successfully @@ -250,11 +253,11 @@ private boolean initPlayout(int sampleRate, int channels, double bufferSizeFacto if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { reportWebRtcAudioTrackInitError("Initialization of audio track failed."); releaseAudioResources(); - return false; + return -1; } logMainParameters(); logMainParametersExtended(); - return true; + return minBufferSizeInBytes; } @CalledByNative @@ -383,8 +386,8 @@ private void logMainParameters() { // It allows certain platforms or routing policies to use this information for more // refined volume or routing decisions. @TargetApi(Build.VERSION_CODES.LOLLIPOP) - private static AudioTrack createAudioTrackOnLollipopOrHigher( - int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { + private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz, + int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { Logging.d(TAG, "createAudioTrackOnLollipopOrHigher"); // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control // performance when Android O is supported. Add some logging in the mean time. @@ -394,11 +397,26 @@ private static AudioTrack createAudioTrackOnLollipopOrHigher( if (sampleRateInHz != nativeOutputSampleRate) { Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); } + + AudioAttributes.Builder attributesBuilder = + new AudioAttributes.Builder() + .setUsage(DEFAULT_USAGE) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); + + if (overrideAttributes != null) { + if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) { + attributesBuilder.setUsage(overrideAttributes.getUsage()); + } + if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) { + attributesBuilder.setContentType(overrideAttributes.getContentType()); + } + + attributesBuilder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()) + .setFlags(overrideAttributes.getFlags()); + } + // Create an audio track where the audio usage is for VoIP and the content type is speech. - return new AudioTrack(new AudioAttributes.Builder() - .setUsage(DEFAULT_USAGE) - .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) - .build(), + return new AudioTrack(attributesBuilder.build(), new AudioFormat.Builder() .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(sampleRateInHz) @@ -423,6 +441,14 @@ private void logBufferSizeInFrames() { } } + @CalledByNative + private int getBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return audioTrack.getBufferSizeInFrames(); + } + return -1; + } + private void logBufferCapacityInFrames() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { Logging.d(TAG, diff --git a/sdk/android/src/jni/android_media_codec_common.h b/sdk/android/src/jni/android_media_codec_common.h deleted file mode 100644 index be2eb19ba6..0000000000 --- a/sdk/android/src/jni/android_media_codec_common.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef SDK_ANDROID_SRC_JNI_ANDROID_MEDIA_CODEC_COMMON_H_ -#define SDK_ANDROID_SRC_JNI_ANDROID_MEDIA_CODEC_COMMON_H_ - -#include - -#include "rtc_base/logging.h" -#include "rtc_base/thread.h" -#include "sdk/android/src/jni/jni_helpers.h" - -namespace webrtc { -namespace jni { - -// Uncomment this define to enable verbose logging for every encoded/decoded -// video frame. -//#define TRACK_BUFFER_TIMING - -#define TAG_COMMON "MediaCodecVideo" - -// Color formats supported by encoder or decoder - should include all -// colors from supportedColorList in MediaCodecVideoEncoder.java and -// MediaCodecVideoDecoder.java. Supported color format set in encoder -// and decoder could be different. -enum COLOR_FORMATTYPE { - COLOR_FormatYUV420Planar = 0x13, - COLOR_FormatYUV420SemiPlanar = 0x15, - COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00, - // NV12 color format supported by QCOM codec, but not declared in MediaCodec - - // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h - // This format is presumably similar to COLOR_FormatYUV420SemiPlanar, - // but requires some (16, 32?) byte alignment. - COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01, - COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02, - COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03, - COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04 -}; - -// Arbitrary interval to poll the codec for new outputs. -enum { kMediaCodecPollMs = 10 }; -// Arbitrary interval to poll at when there should be no more frames. -enum { kMediaCodecPollNoFramesMs = 100 }; -// Media codec maximum output buffer ready timeout. -enum { kMediaCodecTimeoutMs = 1000 }; -// Interval to print codec statistics (bitrate, fps, encoding/decoding time). -enum { kMediaCodecStatisticsIntervalMs = 3000 }; -// Maximum amount of pending frames for VP8 decoder. -enum { kMaxPendingFramesVp8 = 1 }; -// Maximum amount of pending frames for VP9 decoder. -enum { kMaxPendingFramesVp9 = 1 }; -// Maximum amount of pending frames for H.264 decoder. -enum { kMaxPendingFramesH264 = 4 }; -// Maximum amount of decoded frames for which per-frame logging is enabled. -enum { kMaxDecodedLogFrames = 10 }; -// Maximum amount of encoded frames for which per-frame logging is enabled. -enum { kMaxEncodedLogFrames = 10 }; - -static inline void AllowBlockingCalls() { - rtc::Thread* current_thread = rtc::Thread::Current(); - if (current_thread != NULL) - current_thread->DEPRECATED_AllowBlockingCalls(); -} - -// Checks for any Java exception, prints stack backtrace and clears -// currently thrown exception. -static inline bool CheckException(JNIEnv* jni) { - if (jni->ExceptionCheck()) { - RTC_LOG_TAG(rtc::LS_ERROR, TAG_COMMON) << "Java JNI exception."; - jni->ExceptionDescribe(); - jni->ExceptionClear(); - return true; - } - return false; -} - -} // namespace jni -} // namespace webrtc - -#endif // SDK_ANDROID_SRC_JNI_ANDROID_MEDIA_CODEC_COMMON_H_ diff --git a/sdk/android/src/jni/android_media_decoder.cc b/sdk/android/src/jni/android_media_decoder.cc deleted file mode 100644 index 94ce42d2e0..0000000000 --- a/sdk/android/src/jni/android_media_decoder.cc +++ /dev/null @@ -1,788 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include - -#include "api/scoped_refptr.h" -#include "api/video_codecs/sdp_video_format.h" -#include "common_video/h264/h264_bitstream_parser.h" -#include "common_video/include/i420_buffer_pool.h" -#include "media/base/media_constants.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/utility/vp8_header_parser.h" -#include "rtc_base/bind.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" -#include "sdk/android/generated_video_jni/MediaCodecVideoDecoder_jni.h" -#include "sdk/android/native_api/jni/java_types.h" -#include "sdk/android/src/jni/android_media_codec_common.h" -#include "sdk/android/src/jni/video_frame.h" -#include "third_party/libyuv/include/libyuv/convert.h" -#include "third_party/libyuv/include/libyuv/planar_functions.h" -#include "third_party/libyuv/include/libyuv/video_common.h" - -using rtc::Bind; -using rtc::ThreadManager; -namespace webrtc { -namespace jni { - -// Logging macros. -#define TAG_DECODER "MediaCodecVideoDecoder" -#ifdef TRACK_BUFFER_TIMING -#define ALOGV(...) \ - __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) -#else -#define ALOGV(...) -#endif -#define ALOGD RTC_LOG_TAG(rtc::LS_INFO, TAG_DECODER) -#define ALOGW RTC_LOG_TAG(rtc::LS_WARNING, TAG_DECODER) -#define ALOGE RTC_LOG_TAG(rtc::LS_ERROR, TAG_DECODER) - -enum { kMaxWarningLogFrames = 2 }; - -class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler { - public: - explicit MediaCodecVideoDecoder(JNIEnv* jni, - VideoCodecType codecType, - bool use_surface); - ~MediaCodecVideoDecoder() override; - - int32_t InitDecode(const VideoCodec* codecSettings, - int32_t numberOfCores) override; - - int32_t Decode(const EncodedImage& inputImage, - bool missingFrames, - int64_t renderTimeMs = -1) override; - - int32_t RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) override; - - int32_t Release() override; - - bool PrefersLateDecoding() const override { return true; } - - // rtc::MessageHandler implementation. - void OnMessage(rtc::Message* msg) override; - - const char* ImplementationName() const override; - - private: - // CHECK-fail if not running on |codec_thread_|. - void CheckOnCodecThread(); - - int32_t InitDecodeOnCodecThread(); - int32_t ResetDecodeOnCodecThread(); - int32_t ReleaseOnCodecThread(); - int32_t DecodeOnCodecThread(const EncodedImage& inputImage); - // Deliver any outputs pending in the MediaCodec to our |callback_| and return - // true on success. - bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); - int32_t ProcessHWErrorOnCodecThread(); - void EnableFrameLogOnWarning(); - void ResetVariables(); - - // Type of video codec. - VideoCodecType codecType_; - - bool key_frame_required_; - bool inited_; - bool sw_fallback_required_; - const bool use_surface_; - VideoCodec codec_; - I420BufferPool decoded_frame_pool_; - DecodedImageCallback* callback_; - int frames_received_; // Number of frames received by decoder. - int frames_decoded_; // Number of frames decoded by decoder. - // Number of decoded frames for which log information is displayed. - int frames_decoded_logged_; - int64_t start_time_ms_; // Start time for statistics. - int current_frames_; // Number of frames in the current statistics interval. - int current_bytes_; // Encoded bytes in the current statistics interval. - int current_decoding_time_ms_; // Overall decoding time in the current second - int current_delay_time_ms_; // Overall delay time in the current second. - int32_t max_pending_frames_; // Maximum number of pending input frames. - H264BitstreamParser h264_bitstream_parser_; - std::deque> pending_frame_qps_; - - // State that is constant for the lifetime of this object once the ctor - // returns. - std::unique_ptr - codec_thread_; // Thread on which to operate MediaCodec. - ScopedJavaGlobalRef j_media_codec_video_decoder_; - - // Global references; must be deleted in Release(). - std::vector> input_buffers_; -}; - -MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni, - VideoCodecType codecType, - bool use_surface) - : codecType_(codecType), - key_frame_required_(true), - inited_(false), - sw_fallback_required_(false), - use_surface_(use_surface), - codec_thread_(rtc::Thread::Create()), - j_media_codec_video_decoder_( - jni, - Java_MediaCodecVideoDecoder_Constructor(jni)) { - codec_thread_->SetName("MediaCodecVideoDecoder", NULL); - RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; - - ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; - memset(&codec_, 0, sizeof(codec_)); - AllowBlockingCalls(); -} - -MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { - // Call Release() to ensure no more callbacks to us after we are deleted. - Release(); -} - -int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, - int32_t numberOfCores) { - ALOGD << "InitDecode."; - if (inst == NULL) { - ALOGE << "NULL VideoCodec instance"; - return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - } - // Factory should guard against other codecs being used with us. - RTC_CHECK(inst->codecType == codecType_) - << "Unsupported codec " << inst->codecType << " for " << codecType_; - - if (sw_fallback_required_) { - ALOGE << "InitDecode() - fallback to SW decoder"; - return WEBRTC_VIDEO_CODEC_OK; - } - // Save VideoCodec instance for later. - if (&codec_ != inst) { - codec_ = *inst; - } - // If maxFramerate is not set then assume 30 fps. - codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; - - // Call Java init. - return codec_thread_->Invoke( - RTC_FROM_HERE, - Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); -} - -void MediaCodecVideoDecoder::ResetVariables() { - CheckOnCodecThread(); - - key_frame_required_ = true; - frames_received_ = 0; - frames_decoded_ = 0; - frames_decoded_logged_ = kMaxDecodedLogFrames; - start_time_ms_ = rtc::TimeMillis(); - current_frames_ = 0; - current_bytes_ = 0; - current_decoding_time_ms_ = 0; - current_delay_time_ms_ = 0; - pending_frame_qps_.clear(); -} - -int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { - CheckOnCodecThread(); - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - ALOGD << "InitDecodeOnCodecThread Type: " << static_cast(codecType_) - << ". " << codec_.width << " x " << codec_.height - << ". Fps: " << static_cast(codec_.maxFramerate); - - // Release previous codec first if it was allocated before. - int ret_val = ReleaseOnCodecThread(); - if (ret_val < 0) { - ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; - sw_fallback_required_ = true; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - ResetVariables(); - - ScopedJavaLocalRef j_video_codec_enum = - Java_VideoCodecType_fromNativeIndex(jni, codecType_); - bool success = Java_MediaCodecVideoDecoder_initDecode( - jni, j_media_codec_video_decoder_, j_video_codec_enum, codec_.width, - codec_.height); - - if (CheckException(jni) || !success) { - ALOGE << "Codec initialization error - fallback to SW codec."; - sw_fallback_required_ = true; - return WEBRTC_VIDEO_CODEC_ERROR; - } - inited_ = true; - - switch (codecType_) { - case kVideoCodecVP8: - max_pending_frames_ = kMaxPendingFramesVp8; - break; - case kVideoCodecVP9: - max_pending_frames_ = kMaxPendingFramesVp9; - break; - case kVideoCodecH264: - max_pending_frames_ = kMaxPendingFramesH264; - break; - default: - max_pending_frames_ = 0; - } - ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; - - ScopedJavaLocalRef input_buffers = - Java_MediaCodecVideoDecoder_getInputBuffers(jni, - j_media_codec_video_decoder_); - input_buffers_ = JavaToNativeVector>( - jni, input_buffers, [](JNIEnv* env, const JavaRef& o) { - return ScopedJavaGlobalRef(env, o); - }); - - codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); - - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { - CheckOnCodecThread(); - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - ALOGD << "ResetDecodeOnCodecThread Type: " << static_cast(codecType_) - << ". " << codec_.width << " x " << codec_.height; - ALOGD << " Frames received: " << frames_received_ - << ". Frames decoded: " << frames_decoded_; - - inited_ = false; - rtc::ThreadManager::Clear(this); - ResetVariables(); - - Java_MediaCodecVideoDecoder_reset(jni, j_media_codec_video_decoder_, - codec_.width, codec_.height); - - if (CheckException(jni)) { - ALOGE << "Soft reset error - fallback to SW codec."; - sw_fallback_required_ = true; - return WEBRTC_VIDEO_CODEC_ERROR; - } - inited_ = true; - - codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); - - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t MediaCodecVideoDecoder::Release() { - ALOGD << "DecoderRelease request"; - return codec_thread_->Invoke( - RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); -} - -int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { - if (!inited_) { - return WEBRTC_VIDEO_CODEC_OK; - } - CheckOnCodecThread(); - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_ - << ". Frames decoded: " << frames_decoded_; - ScopedLocalRefFrame local_ref_frame(jni); - input_buffers_.clear(); - Java_MediaCodecVideoDecoder_release(jni, j_media_codec_video_decoder_); - inited_ = false; - rtc::ThreadManager::Clear(this); - if (CheckException(jni)) { - ALOGE << "Decoder release exception"; - return WEBRTC_VIDEO_CODEC_ERROR; - } - ALOGD << "DecoderReleaseOnCodecThread done"; - return WEBRTC_VIDEO_CODEC_OK; -} - -void MediaCodecVideoDecoder::CheckOnCodecThread() { - RTC_CHECK(codec_thread_.get() == ThreadManager::Instance()->CurrentThread()) - << "Running on wrong thread!"; -} - -void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { - // Log next 2 output frames. - frames_decoded_logged_ = - std::max(frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); -} - -int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { - CheckOnCodecThread(); - int ret_val = ReleaseOnCodecThread(); - if (ret_val < 0) { - ALOGE << "ProcessHWError: Release failure"; - } - if (codecType_ == kVideoCodecH264) { - // For now there is no SW H.264 which can be used as fallback codec. - // So try to restart hw codec for now. - ret_val = InitDecodeOnCodecThread(); - ALOGE << "Reset H.264 codec done. Status: " << ret_val; - if (ret_val == WEBRTC_VIDEO_CODEC_OK) { - // H.264 codec was succesfully reset - return regular error code. - return WEBRTC_VIDEO_CODEC_ERROR; - } else { - // Fail to restart H.264 codec - return error code which should stop the - // call. - return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; - } - } else { - sw_fallback_required_ = true; - ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; - return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; - } -} - -int32_t MediaCodecVideoDecoder::Decode( - const EncodedImage& inputImage, - bool missingFrames, - int64_t renderTimeMs) { - if (sw_fallback_required_) { - ALOGE << "Decode() - fallback to SW codec"; - return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; - } - if (callback_ == NULL) { - ALOGE << "Decode() - callback_ is NULL"; - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (inputImage.data() == NULL && inputImage.size() > 0) { - ALOGE << "Decode() - inputImage is incorrect"; - return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - } - if (!inited_) { - ALOGE << "Decode() - decoder is not initialized"; - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - // Check if encoded frame dimension has changed. - if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && - (inputImage._encodedWidth != codec_.width || - inputImage._encodedHeight != codec_.height)) { - ALOGW << "Input resolution changed from " << codec_.width << " x " - << codec_.height << " to " << inputImage._encodedWidth << " x " - << inputImage._encodedHeight; - codec_.width = inputImage._encodedWidth; - codec_.height = inputImage._encodedHeight; - int32_t ret; - if (use_surface_ && - (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) { - // Soft codec reset - only for surface decoding. - ret = codec_thread_->Invoke( - RTC_FROM_HERE, - Bind(&MediaCodecVideoDecoder::ResetDecodeOnCodecThread, this)); - } else { - // Hard codec reset. - ret = InitDecode(&codec_, 1); - } - if (ret < 0) { - ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; - sw_fallback_required_ = true; - return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; - } - } - - // Always start with a complete key frame. - if (key_frame_required_) { - if (inputImage._frameType != VideoFrameType::kVideoFrameKey) { - ALOGE << "Decode() - key frame is required"; - return WEBRTC_VIDEO_CODEC_ERROR; - } - if (!inputImage._completeFrame) { - ALOGE << "Decode() - complete frame is required"; - return WEBRTC_VIDEO_CODEC_ERROR; - } - key_frame_required_ = false; - } - if (inputImage.size() == 0) { - return WEBRTC_VIDEO_CODEC_ERROR; - } - - return codec_thread_->Invoke( - RTC_FROM_HERE, - Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); -} - -int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( - const EncodedImage& inputImage) { - CheckOnCodecThread(); - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - - // Try to drain the decoder and wait until output is not too - // much behind the input. - if (codecType_ == kVideoCodecH264 && - frames_received_ > frames_decoded_ + max_pending_frames_) { - // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. - ALOGW << "Decoder is too far behind. Try to drain. Received: " - << frames_received_ << ". Decoded: " << frames_decoded_; - EnableFrameLogOnWarning(); - } - const int64_t drain_start = rtc::TimeMillis(); - while ((frames_received_ > frames_decoded_ + max_pending_frames_) && - (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { - if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { - ALOGE << "DeliverPendingOutputs error. Frames received: " - << frames_received_ << ". Frames decoded: " << frames_decoded_; - return ProcessHWErrorOnCodecThread(); - } - } - if (frames_received_ > frames_decoded_ + max_pending_frames_) { - ALOGE << "Output buffer dequeue timeout. Frames received: " - << frames_received_ << ". Frames decoded: " << frames_decoded_; - return ProcessHWErrorOnCodecThread(); - } - - // Get input buffer. - int j_input_buffer_index = Java_MediaCodecVideoDecoder_dequeueInputBuffer( - jni, j_media_codec_video_decoder_); - if (CheckException(jni) || j_input_buffer_index < 0) { - ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index - << ". Retry DeliverPendingOutputs."; - EnableFrameLogOnWarning(); - // Try to drain the decoder. - if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { - ALOGE << "DeliverPendingOutputs error. Frames received: " - << frames_received_ << ". Frames decoded: " << frames_decoded_; - return ProcessHWErrorOnCodecThread(); - } - // Try dequeue input buffer one last time. - j_input_buffer_index = Java_MediaCodecVideoDecoder_dequeueInputBuffer( - jni, j_media_codec_video_decoder_); - if (CheckException(jni) || j_input_buffer_index < 0) { - ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; - return ProcessHWErrorOnCodecThread(); - } - } - - // Copy encoded data to Java ByteBuffer. - jobject j_input_buffer = input_buffers_[j_input_buffer_index].obj(); - uint8_t* buffer = - reinterpret_cast(jni->GetDirectBufferAddress(j_input_buffer)); - RTC_CHECK(buffer) << "Indirect buffer??"; - size_t buffer_capacity = - rtc::dchecked_cast(jni->GetDirectBufferCapacity(j_input_buffer)); - if (CheckException(jni) || buffer_capacity < inputImage.size()) { - ALOGE << "Input frame size " << inputImage.size() - << " is bigger than buffer size " << buffer_capacity; - return ProcessHWErrorOnCodecThread(); - } - jlong presentation_timestamp_us = static_cast( - static_cast(frames_received_) * 1000000 / codec_.maxFramerate); - memcpy(buffer, inputImage.data(), inputImage.size()); - - if (frames_decoded_ < frames_decoded_logged_) { - ALOGD << "Decoder frame in # " << frames_received_ - << ". Type: " << static_cast(inputImage._frameType) - << ". Buffer # " << j_input_buffer_index - << ". TS: " << presentation_timestamp_us / 1000 - << ". Size: " << inputImage.size(); - } - - // Save input image timestamps for later output. - frames_received_++; - current_bytes_ += inputImage.size(); - absl::optional qp; - if (codecType_ == kVideoCodecVP8) { - int qp_int; - if (vp8::GetQp(inputImage.data(), inputImage.size(), &qp_int)) { - qp = qp_int; - } - } else if (codecType_ == kVideoCodecH264) { - h264_bitstream_parser_.ParseBitstream(inputImage.data(), inputImage.size()); - int qp_int; - if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { - qp = qp_int; - } - } - pending_frame_qps_.push_back(qp); - - // Feed input to decoder. - bool success = Java_MediaCodecVideoDecoder_queueInputBuffer( - jni, j_media_codec_video_decoder_, j_input_buffer_index, - static_cast(inputImage.size()), presentation_timestamp_us, - static_cast(inputImage.Timestamp()), inputImage.ntp_time_ms_); - if (CheckException(jni) || !success) { - ALOGE << "queueInputBuffer error"; - return ProcessHWErrorOnCodecThread(); - } - - // Try to drain the decoder - if (!DeliverPendingOutputs(jni, 0)) { - ALOGE << "DeliverPendingOutputs error"; - return ProcessHWErrorOnCodecThread(); - } - - return WEBRTC_VIDEO_CODEC_OK; -} - -bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni, - int dequeue_timeout_ms) { - CheckOnCodecThread(); - if (frames_received_ <= frames_decoded_) { - // No need to query for output buffers - decoder is drained. - return true; - } - // Get decoder output. - ScopedJavaLocalRef j_decoder_output_buffer = - (use_surface_ ? &Java_MediaCodecVideoDecoder_dequeueTextureBuffer - : &Java_MediaCodecVideoDecoder_dequeueOutputBuffer)( - jni, j_media_codec_video_decoder_, dequeue_timeout_ms); - if (CheckException(jni)) { - ALOGE << "dequeueOutputBuffer() error"; - return false; - } - if (IsNull(jni, j_decoder_output_buffer)) { - // No decoded frame ready. - return true; - } - - // Get decoded video frame properties. - int color_format = Java_MediaCodecVideoDecoder_getColorFormat( - jni, j_media_codec_video_decoder_); - int width = - Java_MediaCodecVideoDecoder_getWidth(jni, j_media_codec_video_decoder_); - int height = - Java_MediaCodecVideoDecoder_getHeight(jni, j_media_codec_video_decoder_); - - rtc::scoped_refptr frame_buffer; - int64_t presentation_timestamps_ms = 0; - int64_t output_timestamps_ms = 0; - int64_t output_ntp_timestamps_ms = 0; - int decode_time_ms = 0; - int64_t frame_delayed_ms = 0; - if (use_surface_) { - // Extract data from Java DecodedTextureBuffer. - presentation_timestamps_ms = - Java_DecodedTextureBuffer_getPresentationTimestampMs( - jni, j_decoder_output_buffer); - output_timestamps_ms = - Java_DecodedTextureBuffer_getTimeStampMs(jni, j_decoder_output_buffer); - output_ntp_timestamps_ms = Java_DecodedTextureBuffer_getNtpTimestampMs( - jni, j_decoder_output_buffer); - decode_time_ms = - Java_DecodedTextureBuffer_getDecodeTimeMs(jni, j_decoder_output_buffer); - - ScopedJavaLocalRef j_video_frame_buffer = - Java_DecodedTextureBuffer_getVideoFrameBuffer(jni, - j_decoder_output_buffer); - // |video_frame_buffer| == null represents a dropped frame. - if (!j_video_frame_buffer.is_null()) { - frame_delayed_ms = Java_DecodedTextureBuffer_getFrameDelayMs( - jni, j_decoder_output_buffer); - frame_buffer = AndroidVideoBuffer::Adopt(jni, j_video_frame_buffer); - } else { - EnableFrameLogOnWarning(); - } - } else { - // Extract data from Java ByteBuffer and create output yuv420 frame - - // for non surface decoding only. - int stride = Java_MediaCodecVideoDecoder_getStride( - jni, j_media_codec_video_decoder_); - const int slice_height = Java_MediaCodecVideoDecoder_getSliceHeight( - jni, j_media_codec_video_decoder_); - const int output_buffer_index = - Java_DecodedOutputBuffer_getIndex(jni, j_decoder_output_buffer); - const int output_buffer_offset = - Java_DecodedOutputBuffer_getOffset(jni, j_decoder_output_buffer); - const int output_buffer_size = - Java_DecodedOutputBuffer_getSize(jni, j_decoder_output_buffer); - presentation_timestamps_ms = - Java_DecodedOutputBuffer_getPresentationTimestampMs( - jni, j_decoder_output_buffer); - output_timestamps_ms = - Java_DecodedOutputBuffer_getTimestampMs(jni, j_decoder_output_buffer); - output_ntp_timestamps_ms = Java_DecodedOutputBuffer_getNtpTimestampMs( - jni, j_decoder_output_buffer); - - decode_time_ms = - Java_DecodedOutputBuffer_getDecodeTimeMs(jni, j_decoder_output_buffer); - RTC_CHECK_GE(slice_height, height); - - if (output_buffer_size < width * height * 3 / 2) { - ALOGE << "Insufficient output buffer size: " << output_buffer_size; - return false; - } - if (output_buffer_size < stride * height * 3 / 2 && - slice_height == height && stride > width) { - // Some codecs (Exynos) incorrectly report stride information for - // output byte buffer, so actual stride value need to be corrected. - stride = output_buffer_size * 2 / (height * 3); - } - ScopedJavaLocalRef output_buffers = - Java_MediaCodecVideoDecoder_getOutputBuffers( - jni, j_media_codec_video_decoder_); - jobject output_buffer = - jni->GetObjectArrayElement(output_buffers.obj(), output_buffer_index); - uint8_t* payload = - reinterpret_cast(jni->GetDirectBufferAddress(output_buffer)); - if (CheckException(jni)) { - return false; - } - payload += output_buffer_offset; - - // Create yuv420 frame. - rtc::scoped_refptr i420_buffer = - decoded_frame_pool_.CreateBuffer(width, height); - if (color_format == COLOR_FormatYUV420Planar) { - RTC_CHECK_EQ(0, stride % 2); - const int uv_stride = stride / 2; - const uint8_t* y_ptr = payload; - const uint8_t* u_ptr = y_ptr + stride * slice_height; - - // Note that the case with odd |slice_height| is handled in a special way. - // The chroma height contained in the payload is rounded down instead of - // up, making it one row less than what we expect in WebRTC. Therefore, we - // have to duplicate the last chroma rows for this case. Also, the offset - // between the Y plane and the U plane is unintuitive for this case. See - // http://bugs.webrtc.org/6651 for more info. - const int chroma_width = (width + 1) / 2; - const int chroma_height = - (slice_height % 2 == 0) ? (height + 1) / 2 : height / 2; - const int u_offset = uv_stride * slice_height / 2; - const uint8_t* v_ptr = u_ptr + u_offset; - libyuv::CopyPlane(y_ptr, stride, i420_buffer->MutableDataY(), - i420_buffer->StrideY(), width, height); - libyuv::CopyPlane(u_ptr, uv_stride, i420_buffer->MutableDataU(), - i420_buffer->StrideU(), chroma_width, chroma_height); - libyuv::CopyPlane(v_ptr, uv_stride, i420_buffer->MutableDataV(), - i420_buffer->StrideV(), chroma_width, chroma_height); - if (slice_height % 2 == 1) { - RTC_CHECK_EQ(height, slice_height); - // Duplicate the last chroma rows. - uint8_t* u_last_row_ptr = i420_buffer->MutableDataU() + - chroma_height * i420_buffer->StrideU(); - memcpy(u_last_row_ptr, u_last_row_ptr - i420_buffer->StrideU(), - i420_buffer->StrideU()); - uint8_t* v_last_row_ptr = i420_buffer->MutableDataV() + - chroma_height * i420_buffer->StrideV(); - memcpy(v_last_row_ptr, v_last_row_ptr - i420_buffer->StrideV(), - i420_buffer->StrideV()); - } - } else { - // All other supported formats are nv12. - const uint8_t* y_ptr = payload; - const uint8_t* uv_ptr = y_ptr + stride * slice_height; - libyuv::NV12ToI420(y_ptr, stride, uv_ptr, stride, - i420_buffer->MutableDataY(), i420_buffer->StrideY(), - i420_buffer->MutableDataU(), i420_buffer->StrideU(), - i420_buffer->MutableDataV(), i420_buffer->StrideV(), - width, height); - } - frame_buffer = i420_buffer; - - // Return output byte buffer back to codec. - Java_MediaCodecVideoDecoder_returnDecodedOutputBuffer( - jni, j_media_codec_video_decoder_, output_buffer_index); - if (CheckException(jni)) { - ALOGE << "returnDecodedOutputBuffer error"; - return false; - } - } - if (frames_decoded_ < frames_decoded_logged_) { - ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << " x " - << height << ". Color: " << color_format - << ". TS: " << presentation_timestamps_ms - << ". DecTime: " << static_cast(decode_time_ms) - << ". DelayTime: " << static_cast(frame_delayed_ms); - } - - // Calculate and print decoding statistics - every 3 seconds. - frames_decoded_++; - current_frames_++; - current_decoding_time_ms_ += decode_time_ms; - current_delay_time_ms_ += frame_delayed_ms; - int statistic_time_ms = rtc::TimeMillis() - start_time_ms_; - if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && - current_frames_ > 0) { - int current_bitrate = current_bytes_ * 8 / statistic_time_ms; - int current_fps = - (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms; - ALOGD << "Frames decoded: " << frames_decoded_ - << ". Received: " << frames_received_ - << ". Bitrate: " << current_bitrate - << " kbps" - ". Fps: " - << current_fps - << ". DecTime: " << (current_decoding_time_ms_ / current_frames_) - << ". DelayTime: " << (current_delay_time_ms_ / current_frames_) - << " for last " << statistic_time_ms << " ms."; - start_time_ms_ = rtc::TimeMillis(); - current_frames_ = 0; - current_bytes_ = 0; - current_decoding_time_ms_ = 0; - current_delay_time_ms_ = 0; - } - - // If the frame was dropped, frame_buffer is left as nullptr. - if (frame_buffer) { - VideoFrame decoded_frame = VideoFrame::Builder() - .set_video_frame_buffer(frame_buffer) - .set_timestamp_rtp(0) - .set_timestamp_ms(0) - .set_rotation(kVideoRotation_0) - .build(); - decoded_frame.set_timestamp(output_timestamps_ms); - decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); - - absl::optional qp = pending_frame_qps_.front(); - pending_frame_qps_.pop_front(); - callback_->Decoded(decoded_frame, decode_time_ms, qp); - } - return true; -} - -int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) { - callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - if (!inited_) { - return; - } - // We only ever send one message to |this| directly (not through a Bind()'d - // functor), so expect no ID/data. - RTC_CHECK(!msg->message_id) << "Unexpected message!"; - RTC_CHECK(!msg->pdata) << "Unexpected message!"; - CheckOnCodecThread(); - - if (!DeliverPendingOutputs(jni, 0)) { - ALOGE << "OnMessage: DeliverPendingOutputs error"; - ProcessHWErrorOnCodecThread(); - return; - } - codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); -} - -const char* MediaCodecVideoDecoder::ImplementationName() const { - return "MediaCodec"; -} - -static jlong JNI_MediaCodecVideoDecoder_CreateDecoder( - JNIEnv* env, - const JavaParamRef& codec, - jboolean use_surface) { - ScopedLocalRefFrame local_ref_frame(env); - return jlongFromPointer(new MediaCodecVideoDecoder( - env, PayloadStringToCodecType(JavaToNativeString(env, codec)), - use_surface)); -} - -} // namespace jni -} // namespace webrtc diff --git a/sdk/android/src/jni/android_media_encoder.cc b/sdk/android/src/jni/android_media_encoder.cc deleted file mode 100644 index 4b4ad10dc5..0000000000 --- a/sdk/android/src/jni/android_media_encoder.cc +++ /dev/null @@ -1,1250 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include -#include - -#include "absl/memory/memory.h" -#include "api/task_queue/queued_task.h" -#include "api/task_queue/task_queue_base.h" -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_encoder.h" -#include "common_video/h264/h264_bitstream_parser.h" -#include "common_video/h264/h264_common.h" -#include "common_video/h264/profile_level_id.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" -#include "media/engine/internal_encoder_factory.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/utility/quality_scaler.h" -#include "modules/video_coding/utility/vp8_header_parser.h" -#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" -#include "rtc_base/bind.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/synchronization/sequence_checker.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/weak_ptr.h" -#include "sdk/android/generated_video_jni/MediaCodecVideoEncoder_jni.h" -#include "sdk/android/native_api/jni/java_types.h" -#include "sdk/android/src/jni/android_media_codec_common.h" -#include "sdk/android/src/jni/jni_helpers.h" -#include "sdk/android/src/jni/video_codec_info.h" -#include "sdk/android/src/jni/video_frame.h" -#include "system_wrappers/include/field_trial.h" -#include "third_party/libyuv/include/libyuv/convert.h" -#include "third_party/libyuv/include/libyuv/convert_from.h" -#include "third_party/libyuv/include/libyuv/video_common.h" - -using rtc::Bind; -using rtc::ThreadManager; - -namespace webrtc { -namespace jni { - -// Maximum supported HW video encoder fps. -#define MAX_VIDEO_FPS 30 -// Maximum allowed fps value in SetRates() call. -#define MAX_ALLOWED_VIDEO_FPS 60 -// Maximum allowed frames in encoder input queue. -#define MAX_ENCODER_Q_SIZE 2 -// Maximum amount of dropped frames caused by full encoder queue - exceeding -// this threshold means that encoder probably got stuck and need to be reset. -#define ENCODER_STALL_FRAMEDROP_THRESHOLD 60 - -// Logging macros. -#define TAG_ENCODER "MediaCodecVideoEncoder" -#ifdef TRACK_BUFFER_TIMING -#define ALOGV(...) -__android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__) -#else -#define ALOGV(...) -#endif -#define ALOGD RTC_LOG_TAG(rtc::LS_INFO, TAG_ENCODER) -#define ALOGW RTC_LOG_TAG(rtc::LS_WARNING, TAG_ENCODER) -#define ALOGE RTC_LOG_TAG(rtc::LS_ERROR, TAG_ENCODER) - - namespace { - // Maximum time limit between incoming frames before requesting a key frame. - const int64_t kFrameDiffThresholdMs = 350; - const int kMinKeyFrameInterval = 6; - const char kCustomQPThresholdsFieldTrial[] = "WebRTC-CustomQPThresholds"; -} // namespace - -// MediaCodecVideoEncoder is a VideoEncoder implementation that uses -// Android's MediaCodec SDK API behind the scenes to implement (hopefully) -// HW-backed video encode. This C++ class is implemented as a very thin shim, -// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. -// MediaCodecVideoEncoder must be operated on a single task queue, currently -// this is the encoder queue from ViE encoder. -class MediaCodecVideoEncoder : public VideoEncoder { - public: - ~MediaCodecVideoEncoder() override; - MediaCodecVideoEncoder(JNIEnv* jni, - const SdpVideoFormat& format, - bool has_egl_context); - - // VideoEncoder implementation. - int32_t InitEncode(const VideoCodec* codec_settings, - const Settings& settings) override; - int32_t Encode(const VideoFrame& input_image, - const std::vector* frame_types) override; - int32_t RegisterEncodeCompleteCallback( - EncodedImageCallback* callback) override; - int32_t Release() override; - void SetRates(const RateControlParameters& parameters) override; - EncoderInfo GetEncoderInfo() const override; - - // Fills the input buffer with data from the buffers passed as parameters. - bool FillInputBuffer(JNIEnv* jni, - int input_buffer_index, - uint8_t const* buffer_y, - int stride_y, - uint8_t const* buffer_u, - int stride_u, - uint8_t const* buffer_v, - int stride_v); - - private: - class EncodeTask : public QueuedTask { - public: - explicit EncodeTask(rtc::WeakPtr encoder); - bool Run() override; - - private: - rtc::WeakPtr encoder_; - }; - - // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to - // restore the codec to an operable state. Necessary after all manner of - // OMX-layer errors. Returns true if the codec was reset successfully. - bool ResetCodec(); - - // Fallback to a software encoder if one is supported else try to reset the - // encoder. Called with |reset_if_fallback_unavailable| equal to false from - // init/release encoder so that we don't go into infinite recursion. - // Returns true if the codec was reset successfully. - bool ProcessHWError(bool reset_if_fallback_unavailable); - - // Calls ProcessHWError(true). Returns WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if - // sw_fallback_required_ was set or WEBRTC_VIDEO_CODEC_ERROR otherwise. - int32_t ProcessHWErrorOnEncode(); - - // If width==0 then this is assumed to be a re-initialization and the - // previously-current values are reused instead of the passed parameters - // (makes it easier to reason about thread-safety). - int32_t InitEncodeInternal(int width, - int height, - int kbps, - int fps, - bool use_surface); - // Reconfigure to match |frame| in width, height. Also reconfigures the - // encoder if |frame| is a texture/byte buffer and the encoder is initialized - // for byte buffer/texture. Returns false if reconfiguring fails. - bool MaybeReconfigureEncoder(JNIEnv* jni, const VideoFrame& frame); - - // Returns true if the frame is a texture frame and we should use surface - // based encoding. - bool IsTextureFrame(JNIEnv* jni, const VideoFrame& frame); - - bool EncodeByteBuffer(JNIEnv* jni, - bool key_frame, - const VideoFrame& frame, - int input_buffer_index); - // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture - // frame. - bool EncodeJavaFrame(JNIEnv* jni, - bool key_frame, - const JavaRef& frame, - int input_buffer_index); - - // Deliver any outputs pending in the MediaCodec to our |callback_| and return - // true on success. - bool DeliverPendingOutputs(JNIEnv* jni); - - VideoEncoder::ScalingSettings GetScalingSettingsInternal() const; - - // Displays encoder statistics. - void LogStatistics(bool force_log); - - VideoCodecType GetCodecType() const; - -#if RTC_DCHECK_IS_ON - // Mutex for protecting inited_. It is only used for correctness checking on - // debug build. It is used for checking that encoder has been released in the - // destructor. Because this might happen on a different thread, we need a - // mutex. - rtc::CriticalSection inited_crit_; -#endif - - // Type of video codec. - const SdpVideoFormat format_; - - EncodedImageCallback* callback_; - - // State that is constant for the lifetime of this object once the ctor - // returns. - SequenceChecker encoder_queue_checker_; - ScopedJavaGlobalRef j_media_codec_video_encoder_; - - // State that is valid only between InitEncode() and the next Release(). - int width_; // Frame width in pixels. - int height_; // Frame height in pixels. - bool inited_; - bool use_surface_; - enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. - uint32_t last_set_bitrate_kbps_; // Last-requested bitrate in kbps. - uint32_t last_set_fps_; // Last-requested frame rate. - int64_t current_timestamp_us_; // Current frame timestamps in us. - int frames_received_; // Number of frames received by encoder. - int frames_encoded_; // Number of frames encoded by encoder. - int frames_dropped_media_encoder_; // Number of frames dropped by encoder. - // Number of dropped frames caused by full queue. - int consecutive_full_queue_frame_drops_; - int64_t stat_start_time_ms_; // Start time for statistics. - int current_frames_; // Number of frames in the current statistics interval. - int current_bytes_; // Encoded bytes in the current statistics interval. - int current_acc_qp_; // Accumulated QP in the current statistics interval. - int current_encoding_time_ms_; // Overall encoding time in the current second - int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. - int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. - // Holds the task while the polling loop is paused. - std::unique_ptr encode_task_; - - struct InputFrameInfo { - InputFrameInfo(int64_t encode_start_time, - int32_t frame_timestamp, - int64_t frame_render_time_ms, - VideoRotation rotation) - : encode_start_time(encode_start_time), - frame_timestamp(frame_timestamp), - frame_render_time_ms(frame_render_time_ms), - rotation(rotation) {} - // Time when video frame is sent to encoder input. - const int64_t encode_start_time; - - // Input frame information. - const int32_t frame_timestamp; - const int64_t frame_render_time_ms; - const VideoRotation rotation; - }; - std::list input_frame_infos_; - int32_t output_timestamp_; // Last output frame timestamp from - // |input_frame_infos_|. - int64_t output_render_time_ms_; // Last output frame render time from - // |input_frame_infos_|. - VideoRotation output_rotation_; // Last output frame rotation from - // |input_frame_infos_|. - - // Frame size in bytes fed to MediaCodec. - int yuv_size_; - // True only when between a callback_->OnEncodedImage() call return a positive - // value and the next Encode() call being ignored. - bool drop_next_input_frame_; - bool scale_; - H264::Profile profile_; - // Global references; must be deleted in Release(). - std::vector> input_buffers_; - H264BitstreamParser h264_bitstream_parser_; - - // VP9 variables to populate codec specific structure. - GofInfoVP9 gof_; // Contains each frame's temporal information for - // non-flexible VP9 mode. - size_t gof_idx_; - - const bool has_egl_context_; - EncoderInfo encoder_info_; - - // Temporary fix for VP8. - // Sends a key frame if frames are largely spaced apart (possibly - // corresponding to a large image change). - int64_t last_frame_received_ms_; - int frames_received_since_last_key_; - VideoCodecMode codec_mode_; - - bool sw_fallback_required_; - - // All other member variables should be before WeakPtrFactory. Valid only from - // InitEncode to Release. - std::unique_ptr> weak_factory_; -}; - -MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { -#if RTC_DCHECK_IS_ON - rtc::CritScope lock(&inited_crit_); - RTC_DCHECK(!inited_); -#endif -} - -MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, - const SdpVideoFormat& format, - bool has_egl_context) - : format_(format), - callback_(NULL), - j_media_codec_video_encoder_( - jni, - Java_MediaCodecVideoEncoder_Constructor(jni)), - inited_(false), - use_surface_(false), - has_egl_context_(has_egl_context), - sw_fallback_required_(false) { - encoder_queue_checker_.Detach(); -} - -int32_t MediaCodecVideoEncoder::InitEncode(const VideoCodec* codec_settings, - const Settings& settings) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - if (codec_settings == NULL) { - ALOGE << "NULL VideoCodec instance"; - return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - } - // Factory should guard against other codecs being used with us. - const VideoCodecType codec_type = GetCodecType(); - RTC_CHECK(codec_settings->codecType == codec_type) - << "Unsupported codec " << codec_settings->codecType << " for " - << codec_type; - if (sw_fallback_required_) { - return WEBRTC_VIDEO_CODEC_OK; - } - codec_mode_ = codec_settings->mode; - int init_width = codec_settings->width; - int init_height = codec_settings->height; - // Scaling is optionally enabled for VP8 and VP9. - // TODO(pbos): Extract automaticResizeOn out of VP8 settings. - scale_ = false; - if (codec_type == kVideoCodecVP8) { - scale_ = codec_settings->VP8().automaticResizeOn; - } else if (codec_type == kVideoCodecVP9) { - scale_ = codec_settings->VP9().automaticResizeOn; - } else { - scale_ = true; - } - - ALOGD << "InitEncode request: " << init_width << " x " << init_height; - ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); - - if (codec_settings->numberOfSimulcastStreams > 1) { - ALOGD << "Number of simulcast layers requested: " - << codec_settings->numberOfSimulcastStreams - << ". Requesting software fallback."; - return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; - } - - // Check allowed H.264 profile - profile_ = H264::Profile::kProfileBaseline; - if (codec_type == kVideoCodecH264) { - const absl::optional profile_level_id = - H264::ParseSdpProfileLevelId(format_.parameters); - RTC_DCHECK(profile_level_id); - profile_ = profile_level_id->profile; - ALOGD << "H.264 profile: " << profile_; - } - - encoder_info_.supports_native_handle = has_egl_context_; - encoder_info_.implementation_name = "MediaCodec"; - encoder_info_.scaling_settings = GetScalingSettingsInternal(); - encoder_info_.is_hardware_accelerated = true; - encoder_info_.has_internal_source = false; - - return InitEncodeInternal( - init_width, init_height, codec_settings->startBitrate, - codec_settings->maxFramerate, - codec_settings->expect_encode_from_texture && has_egl_context_); -} - -bool MediaCodecVideoEncoder::ResetCodec() { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - ALOGE << "Reset"; - if (Release() != WEBRTC_VIDEO_CODEC_OK) { - ALOGE << "Releasing codec failed during reset."; - return false; - } - if (InitEncodeInternal(width_, height_, 0, 0, false) != - WEBRTC_VIDEO_CODEC_OK) { - ALOGE << "Initializing encoder failed during reset."; - return false; - } - return true; -} - -MediaCodecVideoEncoder::EncodeTask::EncodeTask( - rtc::WeakPtr encoder) - : encoder_(encoder) {} - -bool MediaCodecVideoEncoder::EncodeTask::Run() { - if (!encoder_) { - // Encoder was destroyed. - return true; - } - - RTC_DCHECK_RUN_ON(&encoder_->encoder_queue_checker_); - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - - if (!encoder_->inited_) { - encoder_->encode_task_ = absl::WrapUnique(this); - return false; - } - - // It would be nice to recover from a failure here if one happened, but it's - // unclear how to signal such a failure to the app, so instead we stay silent - // about it and let the next app-called API method reveal the borkedness. - encoder_->DeliverPendingOutputs(jni); - - if (!encoder_) { - // Encoder can be destroyed in DeliverPendingOutputs. - return true; - } - - // Call log statistics here so it's called even if no frames are being - // delivered. - encoder_->LogStatistics(false); - - // If there aren't more frames to deliver, we can start polling at lower rate. - if (encoder_->input_frame_infos_.empty()) { - TaskQueueBase::Current()->PostDelayedTask(absl::WrapUnique(this), - kMediaCodecPollNoFramesMs); - } else { - TaskQueueBase::Current()->PostDelayedTask(absl::WrapUnique(this), - kMediaCodecPollMs); - } - - return false; -} - -bool IsFormatSupported(const std::vector& supported_formats, - const SdpVideoFormat& format) { - for (const SdpVideoFormat& supported_format : supported_formats) { - if (cricket::IsSameCodec(format.name, format.parameters, - supported_format.name, - supported_format.parameters)) { - return true; - } - } - return false; -} - -bool MediaCodecVideoEncoder::ProcessHWError( - bool reset_if_fallback_unavailable) { - ALOGE << "ProcessHWError"; - if (IsFormatSupported(InternalEncoderFactory().GetSupportedFormats(), - format_)) { - ALOGE << "Fallback to SW encoder."; - sw_fallback_required_ = true; - return false; - } else if (reset_if_fallback_unavailable) { - ALOGE << "Reset encoder."; - return ResetCodec(); - } - return false; -} - -int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() { - ProcessHWError(true /* reset_if_fallback_unavailable */); - return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE - : WEBRTC_VIDEO_CODEC_ERROR; -} - -VideoCodecType MediaCodecVideoEncoder::GetCodecType() const { - return PayloadStringToCodecType(format_.name); -} - -int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, - int height, - int kbps, - int fps, - bool use_surface) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - if (sw_fallback_required_) { - return WEBRTC_VIDEO_CODEC_OK; - } - RTC_CHECK(!use_surface || has_egl_context_) << "EGL context not set."; - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - - const VideoCodecType codec_type = GetCodecType(); - ALOGD << "InitEncodeInternal Type: " << static_cast(codec_type) << ", " - << width << " x " << height << ". Bitrate: " << kbps - << " kbps. Fps: " << fps << ". Profile: " << profile_ << "."; - if (kbps == 0) { - kbps = last_set_bitrate_kbps_; - } - if (fps == 0) { - fps = MAX_VIDEO_FPS; - } - - width_ = width; - height_ = height; - last_set_bitrate_kbps_ = kbps; - last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; - yuv_size_ = width_ * height_ * 3 / 2; - frames_received_ = 0; - frames_encoded_ = 0; - frames_dropped_media_encoder_ = 0; - consecutive_full_queue_frame_drops_ = 0; - current_timestamp_us_ = 0; - stat_start_time_ms_ = rtc::TimeMillis(); - current_frames_ = 0; - current_bytes_ = 0; - current_acc_qp_ = 0; - current_encoding_time_ms_ = 0; - last_input_timestamp_ms_ = -1; - last_output_timestamp_ms_ = -1; - output_timestamp_ = 0; - output_render_time_ms_ = 0; - input_frame_infos_.clear(); - drop_next_input_frame_ = false; - use_surface_ = use_surface; - gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1); - gof_idx_ = 0; - last_frame_received_ms_ = -1; - frames_received_since_last_key_ = kMinKeyFrameInterval; - - // We enforce no extra stride/padding in the format creation step. - ScopedJavaLocalRef j_video_codec_enum = - Java_VideoCodecType_fromNativeIndex(jni, codec_type); - const bool encode_status = Java_MediaCodecVideoEncoder_initEncode( - jni, j_media_codec_video_encoder_, j_video_codec_enum, profile_, width, - height, kbps, fps, use_surface); - - if (!encode_status) { - ALOGE << "Failed to configure encoder."; - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - if (CheckException(jni)) { - ALOGE << "Exception in init encode."; - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - - if (!use_surface) { - ScopedJavaLocalRef input_buffers = - Java_MediaCodecVideoEncoder_getInputBuffers( - jni, j_media_codec_video_encoder_); - if (CheckException(jni)) { - ALOGE << "Exception in get input buffers."; - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - - if (IsNull(jni, input_buffers)) { - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - - switch (Java_MediaCodecVideoEncoder_getColorFormat( - jni, j_media_codec_video_encoder_)) { - case COLOR_FormatYUV420Planar: - encoder_fourcc_ = libyuv::FOURCC_YU12; - break; - case COLOR_FormatYUV420SemiPlanar: - case COLOR_QCOM_FormatYUV420SemiPlanar: - case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: - encoder_fourcc_ = libyuv::FOURCC_NV12; - break; - default: - RTC_LOG(LS_ERROR) << "Wrong color format."; - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - - RTC_CHECK(input_buffers_.empty()) - << "Unexpected double InitEncode without Release"; - input_buffers_ = JavaToNativeVector>( - jni, input_buffers, [](JNIEnv* env, const JavaRef& o) { - return ScopedJavaGlobalRef(env, o); - }); - for (const ScopedJavaGlobalRef& buffer : input_buffers_) { - int64_t yuv_buffer_capacity = jni->GetDirectBufferCapacity(buffer.obj()); - if (CheckException(jni)) { - ALOGE << "Exception in get direct buffer capacity."; - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; - } - } - - { -#if RTC_DCHECK_IS_ON - rtc::CritScope lock(&inited_crit_); -#endif - inited_ = true; - } - weak_factory_.reset(new rtc::WeakPtrFactory(this)); - encode_task_.reset(new EncodeTask(weak_factory_->GetWeakPtr())); - - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t MediaCodecVideoEncoder::Encode( - const VideoFrame& frame, - const std::vector* frame_types) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - if (sw_fallback_required_) - return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - const int64_t frame_input_time_ms = rtc::TimeMillis(); - - if (!inited_) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - bool send_key_frame = false; - if (codec_mode_ == VideoCodecMode::kRealtimeVideo) { - ++frames_received_since_last_key_; - int64_t now_ms = rtc::TimeMillis(); - if (last_frame_received_ms_ != -1 && - (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { - // Add limit to prevent triggering a key for every frame for very low - // framerates (e.g. if frame diff > kFrameDiffThresholdMs). - if (frames_received_since_last_key_ > kMinKeyFrameInterval) { - ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); - send_key_frame = true; - } - frames_received_since_last_key_ = 0; - } - last_frame_received_ms_ = now_ms; - } - - frames_received_++; - if (!DeliverPendingOutputs(jni)) { - if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { - return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE - : WEBRTC_VIDEO_CODEC_ERROR; - } - } - if (frames_encoded_ < kMaxEncodedLogFrames) { - ALOGD << "Encoder frame in # " << (frames_received_ - 1) - << ". TS: " << static_cast(current_timestamp_us_ / 1000) - << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ - << ". Kbps: " << last_set_bitrate_kbps_; - } - - if (drop_next_input_frame_) { - ALOGW << "Encoder drop frame - failed callback."; - drop_next_input_frame_ = false; - current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; - frames_dropped_media_encoder_++; - return WEBRTC_VIDEO_CODEC_OK; - } - - RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; - - // Check if we accumulated too many frames in encoder input buffers and drop - // frame if so. - if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { - ALOGD << "Already " << input_frame_infos_.size() - << " frames in the queue, dropping" - ". TS: " - << static_cast(current_timestamp_us_ / 1000) - << ". Fps: " << last_set_fps_ - << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; - current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; - consecutive_full_queue_frame_drops_++; - if (consecutive_full_queue_frame_drops_ >= - ENCODER_STALL_FRAMEDROP_THRESHOLD) { - ALOGE << "Encoder got stuck."; - return ProcessHWErrorOnEncode(); - } - frames_dropped_media_encoder_++; - return WEBRTC_VIDEO_CODEC_OK; - } - consecutive_full_queue_frame_drops_ = 0; - - rtc::scoped_refptr input_buffer(frame.video_frame_buffer()); - - VideoFrame input_frame = VideoFrame::Builder() - .set_video_frame_buffer(input_buffer) - .set_timestamp_rtp(frame.timestamp()) - .set_timestamp_ms(frame.render_time_ms()) - .set_rotation(frame.rotation()) - .set_id(frame.id()) - .build(); - - if (!MaybeReconfigureEncoder(jni, input_frame)) { - ALOGE << "Failed to reconfigure encoder."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - - const bool key_frame = - frame_types->front() != VideoFrameType::kVideoFrameDelta || - send_key_frame; - bool encode_status = true; - - int j_input_buffer_index = -1; - if (!use_surface_) { - j_input_buffer_index = Java_MediaCodecVideoEncoder_dequeueInputBuffer( - jni, j_media_codec_video_encoder_); - if (CheckException(jni)) { - ALOGE << "Exception in dequeu input buffer."; - return ProcessHWErrorOnEncode(); - } - if (j_input_buffer_index == -1) { - // Video codec falls behind - no input buffer available. - ALOGW << "Encoder drop frame - no input buffers available"; - if (frames_received_ > 1) { - current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; - frames_dropped_media_encoder_++; - } else { - // Input buffers are not ready after codec initialization, HW is still - // allocating thme - this is expected and should not result in drop - // frame report. - frames_received_ = 0; - } - return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. - } else if (j_input_buffer_index == -2) { - return ProcessHWErrorOnEncode(); - } - } - - if (input_frame.video_frame_buffer()->type() != - VideoFrameBuffer::Type::kNative) { - encode_status = - EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); - } else { - ScopedJavaLocalRef j_frame = NativeToJavaVideoFrame(jni, frame); - encode_status = - EncodeJavaFrame(jni, key_frame, j_frame, j_input_buffer_index); - ReleaseJavaVideoFrame(jni, j_frame); - } - - if (!encode_status) { - ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); - return ProcessHWErrorOnEncode(); - } - - // Save input image timestamps for later output. - input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), - input_frame.render_time_ms(), - input_frame.rotation()); - - last_input_timestamp_ms_ = - current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; - - current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; - - // Start the polling loop if it is not started. - if (encode_task_) { - TaskQueueBase::Current()->PostDelayedTask(std::move(encode_task_), - kMediaCodecPollMs); - } - - if (!DeliverPendingOutputs(jni)) { - return ProcessHWErrorOnEncode(); - } - return WEBRTC_VIDEO_CODEC_OK; -} - -bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(JNIEnv* jni, - const VideoFrame& frame) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - - bool is_texture = IsTextureFrame(jni, frame); - const bool reconfigure_due_to_format = is_texture != use_surface_; - const bool reconfigure_due_to_size = - frame.width() != width_ || frame.height() != height_; - - if (reconfigure_due_to_format) { - ALOGD << "Reconfigure encoder due to format change. " - << (use_surface_ ? "Reconfiguring to encode from byte buffer." - : "Reconfiguring to encode from texture."); - LogStatistics(true); - } - if (reconfigure_due_to_size) { - ALOGW << "Reconfigure encoder due to frame resolution change from " - << width_ << " x " << height_ << " to " << frame.width() << " x " - << frame.height(); - LogStatistics(true); - width_ = frame.width(); - height_ = frame.height(); - } - - if (!reconfigure_due_to_format && !reconfigure_due_to_size) - return true; - - Release(); - - return InitEncodeInternal(width_, height_, 0, 0, is_texture) == - WEBRTC_VIDEO_CODEC_OK; -} - -bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni, - const VideoFrame& frame) { - if (frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) { - return false; - } - return Java_MediaCodecVideoEncoder_isTextureBuffer( - jni, static_cast(frame.video_frame_buffer().get()) - ->video_frame_buffer()); -} - -bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, - bool key_frame, - const VideoFrame& frame, - int input_buffer_index) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - RTC_CHECK(!use_surface_); - - rtc::scoped_refptr i420_buffer = - frame.video_frame_buffer()->ToI420(); - if (!FillInputBuffer(jni, input_buffer_index, i420_buffer->DataY(), - i420_buffer->StrideY(), i420_buffer->DataU(), - i420_buffer->StrideU(), i420_buffer->DataV(), - i420_buffer->StrideV())) { - return false; - } - bool encode_status = Java_MediaCodecVideoEncoder_encodeBuffer( - jni, j_media_codec_video_encoder_, key_frame, input_buffer_index, - yuv_size_, current_timestamp_us_); - if (CheckException(jni)) { - ALOGE << "Exception in encode buffer."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - return encode_status; -} - -bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni, - int input_buffer_index, - uint8_t const* buffer_y, - int stride_y, - uint8_t const* buffer_u, - int stride_u, - uint8_t const* buffer_v, - int stride_v) { - uint8_t* yuv_buffer = reinterpret_cast( - jni->GetDirectBufferAddress(input_buffers_[input_buffer_index].obj())); - if (CheckException(jni)) { - ALOGE << "Exception in get direct buffer address."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - RTC_CHECK(yuv_buffer) << "Indirect buffer??"; - - RTC_CHECK(!libyuv::ConvertFromI420(buffer_y, stride_y, buffer_u, stride_u, - buffer_v, stride_v, yuv_buffer, width_, - width_, height_, encoder_fourcc_)) - << "ConvertFromI420 failed"; - return true; -} - -bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni, - bool key_frame, - const JavaRef& frame, - int input_buffer_index) { - bool encode_status = Java_MediaCodecVideoEncoder_encodeFrame( - jni, j_media_codec_video_encoder_, jlongFromPointer(this), key_frame, - frame, input_buffer_index, current_timestamp_us_); - if (CheckException(jni)) { - ALOGE << "Exception in encode frame."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - return encode_status; -} - -int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( - EncodedImageCallback* callback) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t MediaCodecVideoEncoder::Release() { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - if (!inited_) { - return WEBRTC_VIDEO_CODEC_OK; - } - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ALOGD << "EncoderRelease: Frames received: " << frames_received_ - << ". Encoded: " << frames_encoded_ - << ". Dropped: " << frames_dropped_media_encoder_; - encode_task_.reset(nullptr); - weak_factory_.reset(nullptr); - ScopedLocalRefFrame local_ref_frame(jni); - input_buffers_.clear(); - Java_MediaCodecVideoEncoder_release(jni, j_media_codec_video_encoder_); - if (CheckException(jni)) { - ALOGE << "Exception in release."; - ProcessHWError(false /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - { -#if RTC_DCHECK_IS_ON - rtc::CritScope lock(&inited_crit_); -#endif - inited_ = false; - } - use_surface_ = false; - ALOGD << "EncoderRelease done."; - // It's legal to move the encoder to another queue now. - encoder_queue_checker_.Detach(); - return WEBRTC_VIDEO_CODEC_OK; -} - -void MediaCodecVideoEncoder::SetRates(const RateControlParameters& parameters) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - const uint32_t new_bit_rate = parameters.bitrate.get_sum_kbps(); - if (sw_fallback_required_) - return; - uint32_t frame_rate = static_cast(parameters.framerate_fps + 0.5); - frame_rate = - (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS; - if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) { - return; - } - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedLocalRefFrame local_ref_frame(jni); - if (new_bit_rate > 0) { - last_set_bitrate_kbps_ = new_bit_rate; - } - if (frame_rate > 0) { - last_set_fps_ = frame_rate; - } - bool ret = Java_MediaCodecVideoEncoder_setRates( - jni, j_media_codec_video_encoder_, - rtc::dchecked_cast(last_set_bitrate_kbps_), - rtc::dchecked_cast(last_set_fps_)); - if (CheckException(jni) || !ret) { - ProcessHWError(true /* reset_if_fallback_unavailable */); - } -} - -VideoEncoder::EncoderInfo MediaCodecVideoEncoder::GetEncoderInfo() const { - return encoder_info_; -} - -bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { - RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - - while (true) { - ScopedJavaLocalRef j_output_buffer_info = - Java_MediaCodecVideoEncoder_dequeueOutputBuffer( - jni, j_media_codec_video_encoder_); - if (CheckException(jni)) { - ALOGE << "Exception in set dequeue output buffer."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - if (IsNull(jni, j_output_buffer_info)) { - break; - } - - int output_buffer_index = - Java_OutputBufferInfo_getIndex(jni, j_output_buffer_info); - if (output_buffer_index == -1) { - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - - // Get key and config frame flags. - ScopedJavaLocalRef j_output_buffer = - Java_OutputBufferInfo_getBuffer(jni, j_output_buffer_info); - bool key_frame = - Java_OutputBufferInfo_isKeyFrame(jni, j_output_buffer_info); - - // Get frame timestamps from a queue - for non config frames only. - int64_t encoding_start_time_ms = 0; - int64_t frame_encoding_time_ms = 0; - last_output_timestamp_ms_ = - Java_OutputBufferInfo_getPresentationTimestampUs(jni, - j_output_buffer_info) / - rtc::kNumMicrosecsPerMillisec; - if (!input_frame_infos_.empty()) { - const InputFrameInfo& frame_info = input_frame_infos_.front(); - output_timestamp_ = frame_info.frame_timestamp; - output_render_time_ms_ = frame_info.frame_render_time_ms; - output_rotation_ = frame_info.rotation; - encoding_start_time_ms = frame_info.encode_start_time; - input_frame_infos_.pop_front(); - } - - // Extract payload. - size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer.obj()); - uint8_t* payload = reinterpret_cast( - jni->GetDirectBufferAddress(j_output_buffer.obj())); - if (CheckException(jni)) { - ALOGE << "Exception in get direct buffer address."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return WEBRTC_VIDEO_CODEC_ERROR; - } - - // Callback - return encoded frame. - const VideoCodecType codec_type = GetCodecType(); - EncodedImageCallback::Result callback_result( - EncodedImageCallback::Result::OK); - if (callback_) { - auto image = std::make_unique(); - // The corresponding (and deprecated) java classes are not prepared for - // late calls to releaseOutputBuffer, so to keep things simple, make a - // copy here, and call releaseOutputBuffer before returning. - image->SetEncodedData(EncodedImageBuffer::Create(payload, payload_size)); - image->_encodedWidth = width_; - image->_encodedHeight = height_; - image->SetTimestamp(output_timestamp_); - image->capture_time_ms_ = output_render_time_ms_; - image->rotation_ = output_rotation_; - image->content_type_ = (codec_mode_ == VideoCodecMode::kScreensharing) - ? VideoContentType::SCREENSHARE - : VideoContentType::UNSPECIFIED; - image->timing_.flags = VideoSendTiming::kInvalid; - image->_frameType = (key_frame ? VideoFrameType::kVideoFrameKey - : VideoFrameType::kVideoFrameDelta); - image->_completeFrame = true; - CodecSpecificInfo info; - memset(&info, 0, sizeof(info)); - info.codecType = codec_type; - if (codec_type == kVideoCodecVP8) { - info.codecSpecific.VP8.nonReference = false; - info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx; - info.codecSpecific.VP8.layerSync = false; - info.codecSpecific.VP8.keyIdx = kNoKeyIdx; - } else if (codec_type == kVideoCodecVP9) { - if (key_frame) { - gof_idx_ = 0; - } - info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true; - info.codecSpecific.VP9.flexible_mode = false; - info.codecSpecific.VP9.ss_data_available = key_frame ? true : false; - info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx; - info.codecSpecific.VP9.temporal_up_switch = true; - info.codecSpecific.VP9.inter_layer_predicted = false; - info.codecSpecific.VP9.gof_idx = - static_cast(gof_idx_++ % gof_.num_frames_in_gof); - info.codecSpecific.VP9.num_spatial_layers = 1; - info.codecSpecific.VP9.first_frame_in_picture = true; - info.codecSpecific.VP9.end_of_picture = true; - info.codecSpecific.VP9.spatial_layer_resolution_present = false; - if (info.codecSpecific.VP9.ss_data_available) { - info.codecSpecific.VP9.spatial_layer_resolution_present = true; - info.codecSpecific.VP9.width[0] = width_; - info.codecSpecific.VP9.height[0] = height_; - info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_); - } - } - - // Generate a header describing a single fragment. - RTPFragmentationHeader header; - memset(&header, 0, sizeof(header)); - if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) { - header.VerifyAndAllocateFragmentationHeader(1); - header.fragmentationOffset[0] = 0; - header.fragmentationLength[0] = image->size(); - if (codec_type == kVideoCodecVP8) { - int qp; - if (vp8::GetQp(payload, payload_size, &qp)) { - current_acc_qp_ += qp; - image->qp_ = qp; - } - } else if (codec_type == kVideoCodecVP9) { - int qp; - if (vp9::GetQp(payload, payload_size, &qp)) { - current_acc_qp_ += qp; - image->qp_ = qp; - } - } - } else if (codec_type == kVideoCodecH264) { - h264_bitstream_parser_.ParseBitstream(payload, payload_size); - int qp; - if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { - current_acc_qp_ += qp; - image->qp_ = qp; - } - // For H.264 search for start codes. - const std::vector nalu_idxs = - H264::FindNaluIndices(payload, payload_size); - if (nalu_idxs.empty()) { - ALOGE << "Start code is not found!"; - ALOGE << "Data:" << image->data()[0] << " " << image->data()[1] << " " - << image->data()[2] << " " << image->data()[3] << " " - << image->data()[4] << " " << image->data()[5]; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); - for (size_t i = 0; i < nalu_idxs.size(); i++) { - header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset; - header.fragmentationLength[i] = nalu_idxs[i].payload_size; - } - } - - callback_result = callback_->OnEncodedImage(*image, &info, &header); - } - - // Return output buffer back to the encoder. - bool success = Java_MediaCodecVideoEncoder_releaseOutputBuffer( - jni, j_media_codec_video_encoder_, output_buffer_index); - if (CheckException(jni) || !success) { - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - - // Print per frame statistics. - if (encoding_start_time_ms > 0) { - frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; - } - if (frames_encoded_ < kMaxEncodedLogFrames) { - int current_latency = static_cast(last_input_timestamp_ms_ - - last_output_timestamp_ms_); - ALOGD << "Encoder frame out # " << frames_encoded_ - << ". Key: " << key_frame << ". Size: " << payload_size - << ". TS: " << static_cast(last_output_timestamp_ms_) - << ". Latency: " << current_latency - << ". EncTime: " << frame_encoding_time_ms; - } - - // Calculate and print encoding statistics - every 3 seconds. - frames_encoded_++; - current_frames_++; - current_bytes_ += payload_size; - current_encoding_time_ms_ += frame_encoding_time_ms; - LogStatistics(false); - - // Errors in callback_result are currently ignored. - if (callback_result.drop_next_frame) - drop_next_input_frame_ = true; - } - return true; -} - -void MediaCodecVideoEncoder::LogStatistics(bool force_log) { - int statistic_time_ms = rtc::TimeMillis() - stat_start_time_ms_; - if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) && - statistic_time_ms > 0) { - // Prevent division by zero. - int current_frames_divider = current_frames_ != 0 ? current_frames_ : 1; - - int current_bitrate = current_bytes_ * 8 / statistic_time_ms; - int current_fps = - (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms; - ALOGD << "Encoded frames: " << frames_encoded_ - << ". Bitrate: " << current_bitrate - << ", target: " << last_set_bitrate_kbps_ - << " kbps" - ", fps: " - << current_fps << ", encTime: " - << (current_encoding_time_ms_ / current_frames_divider) - << ". QP: " << (current_acc_qp_ / current_frames_divider) - << " for last " << statistic_time_ms << " ms."; - stat_start_time_ms_ = rtc::TimeMillis(); - current_frames_ = 0; - current_bytes_ = 0; - current_acc_qp_ = 0; - current_encoding_time_ms_ = 0; - } -} - -VideoEncoder::ScalingSettings -MediaCodecVideoEncoder::GetScalingSettingsInternal() const { - if (!scale_) - return VideoEncoder::ScalingSettings::kOff; - - const VideoCodecType codec_type = GetCodecType(); - if (field_trial::IsEnabled(kCustomQPThresholdsFieldTrial)) { - std::string experiment_string = - field_trial::FindFullName(kCustomQPThresholdsFieldTrial); - ALOGD << "QP custom thresholds: " << experiment_string << " for codec " - << codec_type; - int low_vp8_qp_threshold; - int high_vp8_qp_threshold; - int low_h264_qp_threshold; - int high_h264_qp_threshold; - int parsed_values = sscanf(experiment_string.c_str(), "Enabled-%u,%u,%u,%u", - &low_vp8_qp_threshold, &high_vp8_qp_threshold, - &low_h264_qp_threshold, &high_h264_qp_threshold); - if (parsed_values == 4) { - RTC_CHECK_GT(high_vp8_qp_threshold, low_vp8_qp_threshold); - RTC_CHECK_GT(low_vp8_qp_threshold, 0); - RTC_CHECK_GT(high_h264_qp_threshold, low_h264_qp_threshold); - RTC_CHECK_GT(low_h264_qp_threshold, 0); - if (codec_type == kVideoCodecVP8) { - return VideoEncoder::ScalingSettings(low_vp8_qp_threshold, - high_vp8_qp_threshold); - } else if (codec_type == kVideoCodecH264) { - return VideoEncoder::ScalingSettings(low_h264_qp_threshold, - high_h264_qp_threshold); - } - } - } - if (codec_type == kVideoCodecVP8) { - // Same as in vp8_impl.cc. - static const int kLowVp8QpThreshold = 29; - static const int kHighVp8QpThreshold = 95; - - return VideoEncoder::ScalingSettings(kLowVp8QpThreshold, - kHighVp8QpThreshold); - } else if (codec_type == kVideoCodecVP9) { - // QP is obtained from VP9-bitstream, so the QP corresponds to the bitstream - // range of [0, 255] and not the user-level range of [0,63]. - static const int kLowVp9QpThreshold = 96; - static const int kHighVp9QpThreshold = 185; - - return VideoEncoder::ScalingSettings(kLowVp9QpThreshold, - kHighVp9QpThreshold); - } else if (codec_type == kVideoCodecH264) { - // Same as in h264_encoder_impl.cc. - static const int kLowH264QpThreshold = 24; - static const int kHighH264QpThreshold = 37; - - return VideoEncoder::ScalingSettings(kLowH264QpThreshold, - kHighH264QpThreshold); - } - return VideoEncoder::ScalingSettings::kOff; -} - -static void JNI_MediaCodecVideoEncoder_FillInputBuffer( - JNIEnv* jni, - jlong native_encoder, - jint input_buffer, - const JavaParamRef& j_buffer_y, - jint stride_y, - const JavaParamRef& j_buffer_u, - jint stride_u, - const JavaParamRef& j_buffer_v, - jint stride_v) { - uint8_t* buffer_y = - static_cast(jni->GetDirectBufferAddress(j_buffer_y.obj())); - uint8_t* buffer_u = - static_cast(jni->GetDirectBufferAddress(j_buffer_u.obj())); - uint8_t* buffer_v = - static_cast(jni->GetDirectBufferAddress(j_buffer_v.obj())); - - RTC_DCHECK(buffer_y) << "GetDirectBufferAddress returned null. Ensure that " - "getDataY returns a direct ByteBuffer."; - RTC_DCHECK(buffer_u) << "GetDirectBufferAddress returned null. Ensure that " - "getDataU returns a direct ByteBuffer."; - RTC_DCHECK(buffer_v) << "GetDirectBufferAddress returned null. Ensure that " - "getDataV returns a direct ByteBuffer."; - - reinterpret_cast(native_encoder) - ->FillInputBuffer(jni, input_buffer, buffer_y, stride_y, buffer_u, - stride_u, buffer_v, stride_v); -} - -static jlong JNI_MediaCodecVideoEncoder_CreateEncoder( - JNIEnv* env, - const JavaParamRef& format, - jboolean has_egl_context) { - ScopedLocalRefFrame local_ref_frame(env); - return jlongFromPointer(new MediaCodecVideoEncoder( - env, VideoCodecInfoToSdpVideoFormat(env, format), has_egl_context)); -} - -} // namespace jni -} // namespace webrtc diff --git a/sdk/android/src/jni/android_network_monitor.cc b/sdk/android/src/jni/android_network_monitor.cc index d29be44b9c..434e6d3af9 100644 --- a/sdk/android/src/jni/android_network_monitor.cc +++ b/sdk/android/src/jni/android_network_monitor.cc @@ -21,7 +21,8 @@ #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "sdk/android/generated_base_jni/NetworkMonitorAutoDetect_jni.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h" #include "sdk/android/generated_base_jni/NetworkMonitor_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -30,6 +31,37 @@ namespace webrtc { namespace jni { +namespace { + +const char* NetworkTypeToString(NetworkType type) { + switch (type) { + case NETWORK_UNKNOWN: + return "UNKNOWN"; + case NETWORK_ETHERNET: + return "ETHERNET"; + case NETWORK_WIFI: + return "WIFI"; + case NETWORK_5G: + return "5G"; + case NETWORK_4G: + return "4G"; + case NETWORK_3G: + return "3G"; + case NETWORK_2G: + return "2G"; + case NETWORK_UNKNOWN_CELLULAR: + return "UNKNOWN_CELLULAR"; + case NETWORK_BLUETOOTH: + return "BLUETOOTH"; + case NETWORK_VPN: + return "VPN"; + case NETWORK_NONE: + return "NONE"; + } +} + +} // namespace + enum AndroidSdkVersion { SDK_VERSION_LOLLIPOP = 21, SDK_VERSION_MARSHMALLOW = 23 @@ -48,6 +80,9 @@ static NetworkType GetNetworkTypeFromJava( if (enum_name == "CONNECTION_WIFI") { return NetworkType::NETWORK_WIFI; } + if (enum_name == "CONNECTION_5G") { + return NetworkType::NETWORK_5G; + } if (enum_name == "CONNECTION_4G") { return NetworkType::NETWORK_4G; } @@ -73,7 +108,9 @@ static NetworkType GetNetworkTypeFromJava( return NetworkType::NETWORK_UNKNOWN; } -static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) { +static rtc::AdapterType AdapterTypeFromNetworkType( + NetworkType network_type, + bool surface_cellular_types) { switch (network_type) { case NETWORK_UNKNOWN: return rtc::ADAPTER_TYPE_UNKNOWN; @@ -81,9 +118,18 @@ static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) { return rtc::ADAPTER_TYPE_ETHERNET; case NETWORK_WIFI: return rtc::ADAPTER_TYPE_WIFI; + case NETWORK_5G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_5G + : rtc::ADAPTER_TYPE_CELLULAR; case NETWORK_4G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_4G + : rtc::ADAPTER_TYPE_CELLULAR; case NETWORK_3G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_3G + : rtc::ADAPTER_TYPE_CELLULAR; case NETWORK_2G: + return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_2G + : rtc::ADAPTER_TYPE_CELLULAR; case NETWORK_UNKNOWN_CELLULAR: return rtc::ADAPTER_TYPE_CELLULAR; case NETWORK_VPN: @@ -173,10 +219,6 @@ std::string NetworkInformation::ToString() const { if (type == NETWORK_VPN) { ss << "; underlying_type_for_vpn " << underlying_type_for_vpn; } - ss << "; address"; - for (const rtc::IPAddress address : ip_addresses) { - ss << " " << address.ToString(); - } ss << "]"; return ss.Release(); } @@ -186,16 +228,19 @@ AndroidNetworkMonitor::AndroidNetworkMonitor( const JavaRef& j_application_context) : android_sdk_int_(Java_NetworkMonitor_androidSdkInt(env)), j_application_context_(env, j_application_context), - j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)) {} + j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)), + network_thread_(rtc::Thread::Current()) {} AndroidNetworkMonitor::~AndroidNetworkMonitor() = default; void AndroidNetworkMonitor::Start() { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); if (started_) { return; } started_ = true; + surface_cellular_types_ = + webrtc::field_trial::IsEnabled("WebRTC-SurfaceCellularTypes"); find_network_handle_without_ipv6_temporary_part_ = webrtc::field_trial::IsEnabled( "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart"); @@ -203,7 +248,7 @@ void AndroidNetworkMonitor::Start() { // This is kind of magic behavior, but doing this allows the SocketServer to // use this as a NetworkBinder to bind sockets on a particular network when // it creates sockets. - worker_thread()->socketserver()->set_network_binder(this); + network_thread_->socketserver()->set_network_binder(this); JNIEnv* env = AttachCurrentThreadIfNeeded(); Java_NetworkMonitor_startMonitoring( @@ -211,7 +256,7 @@ void AndroidNetworkMonitor::Start() { } void AndroidNetworkMonitor::Stop() { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); if (!started_) { return; } @@ -220,8 +265,8 @@ void AndroidNetworkMonitor::Stop() { // Once the network monitor stops, it will clear all network information and // it won't find the network handle to bind anyway. - if (worker_thread()->socketserver()->network_binder() == this) { - worker_thread()->socketserver()->set_network_binder(nullptr); + if (network_thread_->socketserver()->network_binder() == this) { + network_thread_->socketserver()->set_network_binder(nullptr); } JNIEnv* env = AttachCurrentThreadIfNeeded(); @@ -237,7 +282,7 @@ void AndroidNetworkMonitor::Stop() { rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( int socket_fd, const rtc::IPAddress& address) { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); // Android prior to Lollipop didn't have support for binding sockets to // networks. This may also occur if there is no connectivity manager @@ -334,35 +379,29 @@ rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( return rtc::NetworkBindingResult::FAILURE; } -void AndroidNetworkMonitor::OnNetworkConnected( - const NetworkInformation& network_info) { - worker_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_w, - this, network_info)); - // Fire SignalNetworksChanged to update the list of networks. - OnNetworksChanged(); -} - -void AndroidNetworkMonitor::OnNetworkConnected_w( +void AndroidNetworkMonitor::OnNetworkConnected_n( const NetworkInformation& network_info) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString(); adapter_type_by_name_[network_info.interface_name] = - AdapterTypeFromNetworkType(network_info.type); + AdapterTypeFromNetworkType(network_info.type, surface_cellular_types_); if (network_info.type == NETWORK_VPN) { vpn_underlying_adapter_type_by_name_[network_info.interface_name] = - AdapterTypeFromNetworkType(network_info.underlying_type_for_vpn); + AdapterTypeFromNetworkType(network_info.underlying_type_for_vpn, + surface_cellular_types_); } network_info_by_handle_[network_info.handle] = network_info; for (const rtc::IPAddress& address : network_info.ip_addresses) { network_handle_by_address_[address] = network_info.handle; } + SignalNetworksChanged(); } absl::optional AndroidNetworkMonitor::FindNetworkHandleFromAddress( const rtc::IPAddress& ip_address) const { - RTC_LOG(LS_INFO) << "Find network handle for address: " - << ip_address.ToString(); + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Find network handle."; if (find_network_handle_without_ipv6_temporary_part_) { for (auto const& iter : network_info_by_handle_) { const std::vector& addresses = iter.second.ip_addresses; @@ -384,14 +423,9 @@ AndroidNetworkMonitor::FindNetworkHandleFromAddress( } } -void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) { +void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Network disconnected for handle " << handle; - worker_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle)); -} - -void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) { auto iter = network_info_by_handle_.find(handle); if (iter != network_info_by_handle_.end()) { for (const rtc::IPAddress& address : iter->second.ip_addresses) { @@ -401,20 +435,33 @@ void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) { } } +void AndroidNetworkMonitor::OnNetworkPreference_n( + NetworkType type, + rtc::NetworkPreference preference) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_INFO) << "Android network monitor preference for " + << NetworkTypeToString(type) << " changed to " + << rtc::NetworkPreferenceToString(preference); + auto adapter_type = AdapterTypeFromNetworkType(type, surface_cellular_types_); + network_preference_by_adapter_type_[adapter_type] = preference; + SignalNetworksChanged(); +} + void AndroidNetworkMonitor::SetNetworkInfos( const std::vector& network_infos) { - RTC_CHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); network_handle_by_address_.clear(); network_info_by_handle_.clear(); RTC_LOG(LS_INFO) << "Android network monitor found " << network_infos.size() << " networks"; - for (NetworkInformation network : network_infos) { - OnNetworkConnected_w(network); + for (const NetworkInformation& network : network_infos) { + OnNetworkConnected_n(network); } } rtc::AdapterType AndroidNetworkMonitor::GetAdapterType( const std::string& if_name) { + RTC_DCHECK_RUN_ON(network_thread_); auto iter = adapter_type_by_name_.find(if_name); rtc::AdapterType type = (iter == adapter_type_by_name_.end()) ? rtc::ADAPTER_TYPE_UNKNOWN @@ -427,6 +474,7 @@ rtc::AdapterType AndroidNetworkMonitor::GetAdapterType( rtc::AdapterType AndroidNetworkMonitor::GetVpnUnderlyingAdapterType( const std::string& if_name) { + RTC_DCHECK_RUN_ON(network_thread_); auto iter = vpn_underlying_adapter_type_by_name_.find(if_name); rtc::AdapterType type = (iter == vpn_underlying_adapter_type_by_name_.end()) ? rtc::ADAPTER_TYPE_UNKNOWN @@ -434,6 +482,30 @@ rtc::AdapterType AndroidNetworkMonitor::GetVpnUnderlyingAdapterType( return type; } +rtc::NetworkPreference AndroidNetworkMonitor::GetNetworkPreference( + const std::string& if_name) { + RTC_DCHECK_RUN_ON(network_thread_); + auto iter = adapter_type_by_name_.find(if_name); + if (iter == adapter_type_by_name_.end()) { + return rtc::NetworkPreference::NEUTRAL; + } + + rtc::AdapterType adapter_type = iter->second; + if (adapter_type == rtc::ADAPTER_TYPE_VPN) { + auto iter2 = vpn_underlying_adapter_type_by_name_.find(if_name); + if (iter2 != vpn_underlying_adapter_type_by_name_.end()) { + adapter_type = iter2->second; + } + } + + auto preference_iter = network_preference_by_adapter_type_.find(adapter_type); + if (preference_iter == network_preference_by_adapter_type_.end()) { + return rtc::NetworkPreference::NEUTRAL; + } + + return preference_iter->second; +} + AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory() : j_application_context_(nullptr) {} @@ -453,7 +525,11 @@ AndroidNetworkMonitorFactory::CreateNetworkMonitor() { void AndroidNetworkMonitor::NotifyConnectionTypeChanged( JNIEnv* env, const JavaRef& j_caller) { - OnNetworksChanged(); + invoker_.AsyncInvoke(RTC_FROM_HERE, network_thread_, [this] { + RTC_LOG(LS_INFO) + << "Android network monitor detected connection type change."; + SignalNetworksChanged(); + }); } void AndroidNetworkMonitor::NotifyOfActiveNetworkList( @@ -472,14 +548,33 @@ void AndroidNetworkMonitor::NotifyOfNetworkConnect( const JavaRef& j_network_info) { NetworkInformation network_info = GetNetworkInformationFromJava(env, j_network_info); - OnNetworkConnected(network_info); + network_thread_->Invoke( + RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_n, + this, network_info)); } void AndroidNetworkMonitor::NotifyOfNetworkDisconnect( JNIEnv* env, const JavaRef& j_caller, jlong network_handle) { - OnNetworkDisconnected(static_cast(network_handle)); + network_thread_->Invoke( + RTC_FROM_HERE, + rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_n, this, + static_cast(network_handle))); +} + +void AndroidNetworkMonitor::NotifyOfNetworkPreference( + JNIEnv* env, + const JavaRef& j_caller, + const JavaRef& j_connection_type, + jint jpreference) { + NetworkType type = GetNetworkTypeFromJava(env, j_connection_type); + rtc::NetworkPreference preference = + static_cast(jpreference); + + network_thread_->Invoke( + RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkPreference_n, + this, type, preference)); } } // namespace jni diff --git a/sdk/android/src/jni/android_network_monitor.h b/sdk/android/src/jni/android_network_monitor.h index 81647bd307..eff2122549 100644 --- a/sdk/android/src/jni/android_network_monitor.h +++ b/sdk/android/src/jni/android_network_monitor.h @@ -17,8 +17,11 @@ #include #include "absl/types/optional.h" +#include "rtc_base/async_invoker.h" #include "rtc_base/network_monitor.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "sdk/android/src/jni/jni_helpers.h" namespace webrtc { @@ -26,11 +29,12 @@ namespace jni { typedef int64_t NetworkHandle; -// c++ equivalent of java NetworkMonitorAutoDetect.ConnectionType. +// c++ equivalent of java NetworkChangeDetector.ConnectionType. enum NetworkType { NETWORK_UNKNOWN, NETWORK_ETHERNET, NETWORK_WIFI, + NETWORK_5G, NETWORK_4G, NETWORK_3G, NETWORK_2G, @@ -59,7 +63,7 @@ struct NetworkInformation { std::string ToString() const; }; -class AndroidNetworkMonitor : public rtc::NetworkMonitorBase, +class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, public rtc::NetworkBinderInterface { public: AndroidNetworkMonitor(JNIEnv* env, @@ -78,8 +82,9 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorBase, rtc::AdapterType GetAdapterType(const std::string& if_name) override; rtc::AdapterType GetVpnUnderlyingAdapterType( const std::string& if_name) override; - void OnNetworkConnected(const NetworkInformation& network_info); - void OnNetworkDisconnected(NetworkHandle network_handle); + rtc::NetworkPreference GetNetworkPreference( + const std::string& if_name) override; + // Always expected to be called on the network thread. void SetNetworkInfos(const std::vector& network_infos); @@ -94,25 +99,40 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorBase, void NotifyOfActiveNetworkList(JNIEnv* env, const JavaRef& j_caller, const JavaRef& j_network_infos); + void NotifyOfNetworkPreference(JNIEnv* env, + const JavaRef& j_caller, + const JavaRef& j_connection_type, + jint preference); // Visible for testing. absl::optional FindNetworkHandleFromAddress( const rtc::IPAddress& address) const; private: - void OnNetworkConnected_w(const NetworkInformation& network_info); - void OnNetworkDisconnected_w(NetworkHandle network_handle); + void OnNetworkConnected_n(const NetworkInformation& network_info); + void OnNetworkDisconnected_n(NetworkHandle network_handle); + void OnNetworkPreference_n(NetworkType type, + rtc::NetworkPreference preference); const int android_sdk_int_; ScopedJavaGlobalRef j_application_context_; ScopedJavaGlobalRef j_network_monitor_; - rtc::ThreadChecker thread_checker_; - bool started_ = false; - std::map adapter_type_by_name_; - std::map vpn_underlying_adapter_type_by_name_; - std::map network_handle_by_address_; - std::map network_info_by_handle_; - bool find_network_handle_without_ipv6_temporary_part_; + rtc::Thread* network_thread_; + bool started_ RTC_GUARDED_BY(network_thread_) = false; + std::map adapter_type_by_name_ + RTC_GUARDED_BY(network_thread_); + std::map vpn_underlying_adapter_type_by_name_ + RTC_GUARDED_BY(network_thread_); + std::map network_handle_by_address_ + RTC_GUARDED_BY(network_thread_); + std::map network_info_by_handle_ + RTC_GUARDED_BY(network_thread_); + std::map + network_preference_by_adapter_type_ RTC_GUARDED_BY(network_thread_); + bool find_network_handle_without_ipv6_temporary_part_ + RTC_GUARDED_BY(network_thread_) = false; + bool surface_cellular_types_ RTC_GUARDED_BY(network_thread_) = false; + rtc::AsyncInvoker invoker_; }; class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory { diff --git a/sdk/android/src/jni/android_video_track_source.h b/sdk/android/src/jni/android_video_track_source.h index 378d380a11..eeac48f1e8 100644 --- a/sdk/android/src/jni/android_video_track_source.h +++ b/sdk/android/src/jni/android_video_track_source.h @@ -13,10 +13,8 @@ #include -#include "common_video/include/i420_buffer_pool.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/adapted_video_track_source.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/checks.h" #include "rtc_base/thread.h" #include "rtc_base/timestamp_aligner.h" diff --git a/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/sdk/android/src/jni/audio_device/aaudio_recorder.cc index 65bef4b1ba..34b14f4509 100644 --- a/sdk/android/src/jni/audio_device/aaudio_recorder.cc +++ b/sdk/android/src/jni/audio_device/aaudio_recorder.cc @@ -18,8 +18,6 @@ #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/sleep.h" - namespace webrtc { namespace jni { diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc index b4cb184177..eb5d93fa29 100644 --- a/sdk/android/src/jni/audio_device/audio_device_module.cc +++ b/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -151,15 +151,13 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t PlayoutDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t RecordingDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetPlayoutDevice(uint16_t index) override { @@ -171,8 +169,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetPlayoutDevice( AudioDeviceModule::WindowsDeviceType device) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetRecordingDevice(uint16_t index) override { @@ -184,8 +181,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetRecordingDevice( AudioDeviceModule::WindowsDeviceType device) override { - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t PlayoutIsAvailable(bool* available) override { @@ -396,62 +392,52 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetMicrophoneVolume(uint32_t volume) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << volume << ")"; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneVolume(uint32_t* volume) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MinMicrophoneVolume(uint32_t* minVolume) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SpeakerMuteIsAvailable(bool* available) override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetSpeakerMute(bool enable) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SpeakerMute(bool* enabled) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Should never be called"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMuteIsAvailable(bool* available) override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t SetMicrophoneMute(bool enable) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMute(bool* enabled) const override { RTC_LOG(INFO) << __FUNCTION__; - FATAL() << "Not implemented"; - return -1; + RTC_CHECK_NOTREACHED(); } int32_t StereoPlayoutIsAvailable(bool* available) const override { @@ -516,7 +502,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } // Returns true if the device both supports built in AEC and the device - // is not blacklisted. + // is not blocklisted. // Currently, if OpenSL ES is used in both directions, this method will still // report the correct value and it has the correct effect. As an example: // a device supports built in AEC and this method returns true. Libjingle @@ -544,7 +530,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } // Returns true if the device both supports built in NS and the device - // is not blacklisted. + // is not blocklisted. // TODO(henrika): add implementation for OpenSL ES based audio as well. // In addition, see comments for BuiltInAECIsAvailable(). bool BuiltInNSIsAvailable() const override { @@ -569,8 +555,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t EnableBuiltInAGC(bool enable) override { RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; - FATAL() << "HW AGC is not available"; - return -1; + RTC_CHECK_NOTREACHED(); } // TODO(henrika): add implementation for OpenSL ES based audio as well. diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc index 8f0a041711..d5b880b1b0 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -20,6 +20,7 @@ #include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h" #include "sdk/android/src/jni/jni_helpers.h" #include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { @@ -89,12 +90,33 @@ int32_t AudioTrackJni::InitPlayout() { nullptr); if (buffer_size_factor == 0) buffer_size_factor = 1.0; - if (!Java_WebRtcAudioTrack_initPlayout( - env_, j_audio_track_, audio_parameters_.sample_rate(), - static_cast(audio_parameters_.channels()), buffer_size_factor)) { + int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout( + env_, j_audio_track_, audio_parameters_.sample_rate(), + static_cast(audio_parameters_.channels()), buffer_size_factor); + if (requested_buffer_size_bytes < 0) { RTC_LOG(LS_ERROR) << "InitPlayout failed"; return -1; } + // Update UMA histograms for both the requested and actual buffer size. + // To avoid division by zero, we assume the sample rate is 48k if an invalid + // value is found. + const int sample_rate = audio_parameters_.sample_rate() <= 0 + ? 48000 + : audio_parameters_.sample_rate(); + // This calculation assumes that audio is mono. + const int requested_buffer_size_ms = + (requested_buffer_size_bytes * 1000) / (2 * sample_rate); + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs", + requested_buffer_size_ms, 0, 1000, 100); + int actual_buffer_size_frames = + Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_); + if (actual_buffer_size_frames >= 0) { + const int actual_buffer_size_ms = + actual_buffer_size_frames * 1000 / sample_rate; + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs", + actual_buffer_size_ms, 0, 1000, 100); + } + initialized_ = true; return 0; } diff --git a/sdk/android/src/jni/audio_mixer_jni.cc b/sdk/android/src/jni/audio_mixer_jni.cc new file mode 100644 index 0000000000..422b35df70 --- /dev/null +++ b/sdk/android/src/jni/audio_mixer_jni.cc @@ -0,0 +1,181 @@ +// +// Created by Piasy on 06/06/2019. +// + +#include +#include + +#include "audio/audio_transport_impl.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/backing_track/avx_helper.h" +#include "modules/backing_track/bt_audio_mixer.h" + +extern "C" { + +struct MixerHolder { + webrtc::BtAudioMixer* mixer; + int16_t* buffer; + jobject callback; + jmethodID onSsrcFinished; + jmethodID onSsrcError; +}; + +static JavaVM* g_vm = nullptr; + +static int enterJava(JNIEnv** env) { + int getEnvStat = g_vm->GetEnv((void**) env, JNI_VERSION_1_6); + if (getEnvStat == JNI_EDETACHED) { + g_vm->AttachCurrentThread(env, NULL); + } else if (getEnvStat != JNI_OK) { + return getEnvStat; + } + + return getEnvStat; +} + +static void leaveJava(int getEnvStat) { + if (getEnvStat == JNI_EDETACHED) { + g_vm->DetachCurrentThread(); + } +} + +static void preDeliverRecordedData(void* opaque, void* audioSamples, + const size_t nSamples, const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec) { + MixerHolder* holder = reinterpret_cast(opaque); + size_t size = nSamples * nBytesPerSample * nChannels; + holder->mixer->AddRecordedDataAndMix(audioSamples, size, holder->buffer); + memcpy(audioSamples, holder->buffer, size); +} + +static void onSourceFinish(void* opaque, int32_t ssrc) { + MixerHolder* holder = reinterpret_cast(opaque); + JNIEnv* env = nullptr; + int stat = enterJava(&env); + + if (env && holder && holder->callback && holder->onSsrcFinished) { + env->CallVoidMethod(holder->callback, holder->onSsrcFinished, ssrc); + } + + leaveJava(stat); +} + +static void onSourceError(void* opaque, int32_t ssrc, int32_t code) { + MixerHolder* holder = reinterpret_cast(opaque); + JNIEnv* env = nullptr; + int stat = enterJava(&env); + + if (env && holder && holder->callback && holder->onSsrcError) { + env->CallVoidMethod(holder->callback, holder->onSsrcError, ssrc, code); + } + + leaveJava(stat); +} + +JNIEXPORT jlong JNICALL Java_com_piasy_avconf_AudioMixer_nativeCreate( + JNIEnv* env, jclass, jint musicSsrc, jstring backingTrack_, jint recSsrc, + jint captureSampleRate, jint captureChannelNum, jint frameDurationUs, + jboolean enableMusicSyncFix, jint waiting_mix_delay_frames, + jobject callback) { + env->GetJavaVM(&g_vm); + + const char* backingTrack = env->GetStringUTFChars(backingTrack_, 0); + + MixerHolder* holder = new MixerHolder(); + holder->callback = env->NewGlobalRef(callback); + jclass clazz = env->FindClass("com/piasy/avconf/AudioMixer$MixerCallback"); + holder->onSsrcFinished = + env->GetMethodID(clazz, "onMixerSsrcFinished", "(I)V"); + holder->onSsrcError = env->GetMethodID(clazz, "onMixerSsrcError", "(II)V"); + + webrtc::MixerConfig config(std::vector(), + captureSampleRate, captureChannelNum, + frameDurationUs, enableMusicSyncFix, + waiting_mix_delay_frames); + config.sources.emplace_back(webrtc::MixerSource::TYPE_RECORD, recSsrc, 1, + 1, true, true, false, false, "", + captureSampleRate, captureChannelNum); + config.sources.emplace_back( + webrtc::MixerSource::TYPE_FILE, musicSsrc, 1, 1, false, false, false, + false, std::string(backingTrack), captureSampleRate, captureChannelNum); + webrtc::BtAudioMixer* mixer = new webrtc::BtAudioMixer( + config, onSourceFinish, onSourceError, holder); + + holder->mixer = mixer; + holder->buffer = new int16_t[frameDurationUs * captureSampleRate / + 1000 * captureChannelNum]; + + webrtc::AudioDeviceBuffer* adb = webrtc::AudioDeviceBuffer::Instance(); + if (adb) { + webrtc::AudioTransportImpl* audio_transport = + reinterpret_cast( + adb->audio_transport()); + audio_transport->SetPreDeliverRecordedDataCallback( + preDeliverRecordedData, holder); + } + + env->ReleaseStringUTFChars(backingTrack_, backingTrack); + + return reinterpret_cast(holder); +} + +JNIEXPORT void JNICALL Java_com_piasy_avconf_AudioMixer_nativeToggleEnable( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc, jboolean enable) { + reinterpret_cast(nativeMixer)->mixer + ->ToggleEnable(ssrc, enable); +} + +JNIEXPORT void JNICALL Java_com_piasy_avconf_AudioMixer_nativeToggleStreaming( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc, jboolean streaming) { + reinterpret_cast(nativeMixer)->mixer + ->ToggleStreaming(ssrc, streaming); +} + +JNIEXPORT void JNICALL Java_com_piasy_avconf_AudioMixer_nativeTogglePlayback( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc, jboolean playback) { + reinterpret_cast(nativeMixer)->mixer + ->TogglePlayback(ssrc, playback); +} + +JNIEXPORT void JNICALL Java_com_piasy_avconf_AudioMixer_nativeUpdateVolume( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc, jfloat volume) { + reinterpret_cast(nativeMixer)->mixer + ->UpdateVolume(ssrc, volume, volume); +} + +JNIEXPORT jlong JNICALL Java_com_piasy_avconf_AudioMixer_nativeGetLengthMs( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc) { + return reinterpret_cast(nativeMixer)->mixer + ->GetLengthMs(ssrc); +} + +JNIEXPORT jlong JNICALL Java_com_piasy_avconf_AudioMixer_nativeGetProgressMs( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc) { + return reinterpret_cast(nativeMixer)->mixer + ->GetProgressMs(ssrc); +} + +JNIEXPORT void JNICALL Java_com_piasy_avconf_AudioMixer_nativeSeek( + JNIEnv*, jclass, jlong nativeMixer, jint ssrc, jlong progressMs) { + reinterpret_cast(nativeMixer)->mixer + ->Seek(ssrc, progressMs); +} + +JNIEXPORT void JNICALL Java_com_piasy_avconf_AudioMixer_nativeDestroy( + JNIEnv*, jclass, jlong nativeMixer) { + webrtc::AudioDeviceBuffer* adb = webrtc::AudioDeviceBuffer::Instance(); + if (adb) { + webrtc::AudioTransportImpl* audio_transport = + reinterpret_cast( + adb->audio_transport()); + audio_transport->SetPreDeliverRecordedDataCallback(nullptr, nullptr); + } + + MixerHolder* holder = reinterpret_cast(nativeMixer); + delete holder->mixer; + delete[] holder->buffer; + delete holder; +} + +} diff --git a/sdk/android/src/jni/encoded_image.cc b/sdk/android/src/jni/encoded_image.cc index e13653ca34..839f6a8f6a 100644 --- a/sdk/android/src/jni/encoded_image.cc +++ b/sdk/android/src/jni/encoded_image.cc @@ -70,7 +70,7 @@ ScopedJavaLocalRef NativeToJavaEncodedImage( static_cast(image._encodedWidth), static_cast(image._encodedHeight), image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type, - static_cast(image.rotation_), image._completeFrame, qp); + static_cast(image.rotation_), qp); } ScopedJavaLocalRef NativeToJavaFrameTypeArray( @@ -98,8 +98,6 @@ EncodedImage JavaToNativeEncodedImage(JNIEnv* env, Java_EncodedImage_getEncodedHeight(env, j_encoded_image); frame.rotation_ = (VideoRotation)Java_EncodedImage_getRotation(env, j_encoded_image); - frame._completeFrame = - Java_EncodedImage_getCompleteFrame(env, j_encoded_image); frame.qp_ = JavaToNativeOptionalInt( env, Java_EncodedImage_getQp(env, j_encoded_image)) diff --git a/sdk/android/src/jni/pc/audio.cc b/sdk/android/src/jni/pc/audio.cc index 376c42178a..74c8b5547a 100644 --- a/sdk/android/src/jni/pc/audio.cc +++ b/sdk/android/src/jni/pc/audio.cc @@ -10,21 +10,11 @@ #include "sdk/android/src/jni/pc/audio.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "modules/audio_processing/include/audio_processing.h" namespace webrtc { namespace jni { -rtc::scoped_refptr CreateAudioDecoderFactory() { - return CreateBuiltinAudioDecoderFactory(); -} - -rtc::scoped_refptr CreateAudioEncoderFactory() { - return CreateBuiltinAudioEncoderFactory(); -} - rtc::scoped_refptr CreateAudioProcessing() { return AudioProcessingBuilder().Create(); } diff --git a/sdk/android/src/jni/pc/audio.h b/sdk/android/src/jni/pc/audio.h index 1e8b3accc2..7a79bed986 100644 --- a/sdk/android/src/jni/pc/audio.h +++ b/sdk/android/src/jni/pc/audio.h @@ -11,20 +11,14 @@ #ifndef SDK_ANDROID_SRC_JNI_PC_AUDIO_H_ #define SDK_ANDROID_SRC_JNI_PC_AUDIO_H_ +#include "api/scoped_refptr.h" // Adding 'nogncheck' to disable the gn include headers check. // We don't want this target depend on audio related targets -#include "api/audio_codecs/audio_decoder_factory.h" // nogncheck -#include "api/audio_codecs/audio_encoder_factory.h" // nogncheck -#include "api/scoped_refptr.h" #include "modules/audio_processing/include/audio_processing.h" // nogncheck namespace webrtc { namespace jni { -rtc::scoped_refptr CreateAudioDecoderFactory(); - -rtc::scoped_refptr CreateAudioEncoderFactory(); - rtc::scoped_refptr CreateAudioProcessing(); } // namespace jni diff --git a/sdk/android/src/jni/pc/owned_factory_and_threads.cc b/sdk/android/src/jni/pc/owned_factory_and_threads.cc index e42b117e57..5e00ece8ce 100644 --- a/sdk/android/src/jni/pc/owned_factory_and_threads.cc +++ b/sdk/android/src/jni/pc/owned_factory_and_threads.cc @@ -19,19 +19,11 @@ OwnedFactoryAndThreads::OwnedFactoryAndThreads( std::unique_ptr network_thread, std::unique_ptr worker_thread, std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory, const rtc::scoped_refptr& factory) : network_thread_(std::move(network_thread)), worker_thread_(std::move(worker_thread)), signaling_thread_(std::move(signaling_thread)), - network_monitor_factory_(network_monitor_factory), factory_(factory) {} -OwnedFactoryAndThreads::~OwnedFactoryAndThreads() { - if (network_monitor_factory_ != nullptr) { - rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_); - } -} - } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/owned_factory_and_threads.h b/sdk/android/src/jni/pc/owned_factory_and_threads.h index 845d4dbd70..e87879c13f 100644 --- a/sdk/android/src/jni/pc/owned_factory_and_threads.h +++ b/sdk/android/src/jni/pc/owned_factory_and_threads.h @@ -33,25 +33,19 @@ class OwnedFactoryAndThreads { std::unique_ptr network_thread, std::unique_ptr worker_thread, std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory, const rtc::scoped_refptr& factory); - ~OwnedFactoryAndThreads(); + ~OwnedFactoryAndThreads() = default; PeerConnectionFactoryInterface* factory() { return factory_.get(); } rtc::Thread* network_thread() { return network_thread_.get(); } rtc::Thread* signaling_thread() { return signaling_thread_.get(); } rtc::Thread* worker_thread() { return worker_thread_.get(); } - rtc::NetworkMonitorFactory* network_monitor_factory() { - return network_monitor_factory_; - } - void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; } private: const std::unique_ptr network_thread_; const std::unique_ptr worker_thread_; const std::unique_ptr signaling_thread_; - rtc::NetworkMonitorFactory* network_monitor_factory_; const rtc::scoped_refptr factory_; }; diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 0ae39fbf66..c9c35b9a63 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -128,7 +128,8 @@ ScopedJavaLocalRef NativeToJavaCandidatePairChange( env, NativeToJavaCandidate(env, selected_pair.local_candidate()), NativeToJavaCandidate(env, selected_pair.remote_candidate()), static_cast(event.last_data_received_ms), - NativeToJavaString(env, event.reason)); + NativeToJavaString(env, event.reason), + static_cast(event.estimated_disconnected_time_ms)); } } // namespace @@ -264,11 +265,6 @@ void JavaToNativeRTCConfiguration( rtc_config->sdp_semantics = JavaToNativeSdpSemantics(jni, j_sdp_semantics); rtc_config->active_reset_srtp_params = Java_RTCConfiguration_getActiveResetSrtpParams(jni, j_rtc_config); - rtc_config->use_media_transport = - Java_RTCConfiguration_getUseMediaTransport(jni, j_rtc_config); - rtc_config->use_media_transport_for_data_channels = - Java_RTCConfiguration_getUseMediaTransportForDataChannels(jni, - j_rtc_config); rtc_config->crypto_options = JavaToNativeOptionalCryptoOptions(jni, j_crypto_options); @@ -483,17 +479,39 @@ static jlong JNI_PeerConnection_GetNativePeerConnection( static ScopedJavaLocalRef JNI_PeerConnection_GetLocalDescription( JNIEnv* jni, const JavaParamRef& j_pc) { - const SessionDescriptionInterface* sdp = - ExtractNativePC(jni, j_pc)->local_description(); - return sdp ? NativeToJavaSessionDescription(jni, sdp) : nullptr; + PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc); + // It's only safe to operate on SessionDescriptionInterface on the + // signaling thread, but |jni| may only be used on the current thread, so we + // must do this odd dance. + std::string sdp; + std::string type; + pc->signaling_thread()->Invoke(RTC_FROM_HERE, [pc, &sdp, &type] { + const SessionDescriptionInterface* desc = pc->local_description(); + if (desc) { + RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + type = desc->type(); + } + }); + return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type); } static ScopedJavaLocalRef JNI_PeerConnection_GetRemoteDescription( JNIEnv* jni, const JavaParamRef& j_pc) { - const SessionDescriptionInterface* sdp = - ExtractNativePC(jni, j_pc)->remote_description(); - return sdp ? NativeToJavaSessionDescription(jni, sdp) : nullptr; + PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc); + // It's only safe to operate on SessionDescriptionInterface on the + // signaling thread, but |jni| may only be used on the current thread, so we + // must do this odd dance. + std::string sdp; + std::string type; + pc->signaling_thread()->Invoke(RTC_FROM_HERE, [pc, &sdp, &type] { + const SessionDescriptionInterface* desc = pc->remote_description(); + if (desc) { + RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + type = desc->type(); + } + }); + return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type); } static ScopedJavaLocalRef JNI_PeerConnection_GetCertificate( @@ -763,9 +781,9 @@ static jboolean JNI_PeerConnection_SetBitrate( const JavaParamRef& j_min, const JavaParamRef& j_current, const JavaParamRef& j_max) { - PeerConnectionInterface::BitrateParameters params; + BitrateSettings params; params.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min); - params.current_bitrate_bps = JavaToNativeOptionalInt(jni, j_current); + params.start_bitrate_bps = JavaToNativeOptionalInt(jni, j_current); params.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max); return ExtractNativePC(jni, j_pc)->SetBitrate(params).ok(); } @@ -794,6 +812,22 @@ static void JNI_PeerConnection_StopRtcEventLog( ExtractNativePC(jni, j_pc)->StopRtcEventLog(); } +static int JNI_PeerConnection_StartRecorder( + JNIEnv* jni, + const JavaParamRef& j_pc, + int dir, + const JavaParamRef& j_path) { + std::string path = JavaToNativeString(jni, j_path); + return ExtractNativePC(jni, j_pc)->StartRecorder(dir, path); +} + +static int JNI_PeerConnection_StopRecorder( + JNIEnv* jni, + const JavaParamRef& j_pc, + int dir) { + return ExtractNativePC(jni, j_pc)->StopRecorder(dir); +} + static ScopedJavaLocalRef JNI_PeerConnection_SignalingState( JNIEnv* env, const JavaParamRef& j_pc) { diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc index 48dd6e41d8..2392db2403 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -138,11 +138,10 @@ ScopedJavaLocalRef NativeToScopedJavaPeerConnectionFactory( rtc::scoped_refptr pcf, std::unique_ptr network_thread, std::unique_ptr worker_thread, - std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory) { + std::unique_ptr signaling_thread) { OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads( std::move(network_thread), std::move(worker_thread), - std::move(signaling_thread), network_monitor_factory, pcf); + std::move(signaling_thread), pcf); ScopedJavaLocalRef j_pcf = Java_PeerConnectionFactory_Constructor( env, NativeToJavaPointer(owned_factory)); @@ -172,17 +171,15 @@ PeerConnectionFactoryInterface* PeerConnectionFactoryFromJava(jlong j_p) { // Set in PeerConnectionFactory_initializeAndroidGlobals(). static bool factory_static_initialized = false; - jobject NativeToJavaPeerConnectionFactory( JNIEnv* jni, rtc::scoped_refptr pcf, std::unique_ptr network_thread, std::unique_ptr worker_thread, - std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory) { + std::unique_ptr signaling_thread) { return NativeToScopedJavaPeerConnectionFactory( jni, pcf, std::move(network_thread), std::move(worker_thread), - std::move(signaling_thread), network_monitor_factory) + std::move(signaling_thread)) .Release(); } @@ -246,7 +243,7 @@ static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) { // Following parameters are optional: // |audio_device_module|, |jencoder_factory|, |jdecoder_factory|, -// |audio_processor|, |media_transport_factory|, |fec_controller_factory|, +// |audio_processor|, |fec_controller_factory|, // |network_state_predictor_factory|, |neteq_factory|. ScopedJavaLocalRef CreatePeerConnectionFactoryForJava( JNIEnv* jni, @@ -263,7 +260,6 @@ ScopedJavaLocalRef CreatePeerConnectionFactoryForJava( network_controller_factory, std::unique_ptr network_state_predictor_factory, - std::unique_ptr media_transport_factory, std::unique_ptr neteq_factory) { // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but // ThreadManager only WrapCurrentThread()s the thread where it is first @@ -285,18 +281,9 @@ ScopedJavaLocalRef CreatePeerConnectionFactoryForJava( signaling_thread->SetName("signaling_thread", NULL); RTC_CHECK(signaling_thread->Start()) << "Failed to start thread"; - rtc::NetworkMonitorFactory* network_monitor_factory = nullptr; - const absl::optional options = JavaToNativePeerConnectionFactoryOptions(jni, joptions); - // Do not create network_monitor_factory only if the options are - // provided and disable_network_monitor therein is set to true. - if (!(options && options->disable_network_monitor)) { - network_monitor_factory = new AndroidNetworkMonitorFactory(); - rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory); - } - PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = network_thread.get(); dependencies.worker_thread = worker_thread.get(); @@ -310,8 +297,11 @@ ScopedJavaLocalRef CreatePeerConnectionFactoryForJava( std::move(network_controller_factory); dependencies.network_state_predictor_factory = std::move(network_state_predictor_factory); - dependencies.media_transport_factory = std::move(media_transport_factory); dependencies.neteq_factory = std::move(neteq_factory); + if (!(options && options->disable_network_monitor)) { + dependencies.network_monitor_factory = + std::make_unique(); + } cricket::MediaEngineDependencies media_dependencies; media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); @@ -338,7 +328,7 @@ ScopedJavaLocalRef CreatePeerConnectionFactoryForJava( return NativeToScopedJavaPeerConnectionFactory( jni, factory, std::move(network_thread), std::move(worker_thread), - std::move(signaling_thread), network_monitor_factory); + std::move(signaling_thread)); } static ScopedJavaLocalRef @@ -355,7 +345,6 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory( jlong native_fec_controller_factory, jlong native_network_controller_factory, jlong native_network_state_predictor_factory, - jlong native_media_transport_factory, jlong native_neteq_factory) { rtc::scoped_refptr audio_processor = reinterpret_cast(native_audio_processor); @@ -372,8 +361,6 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory( native_network_controller_factory), TakeOwnershipOfUniquePtr( native_network_state_predictor_factory), - TakeOwnershipOfUniquePtr( - native_media_transport_factory), TakeOwnershipOfUniquePtr(native_neteq_factory)); } diff --git a/sdk/android/src/jni/pc/peer_connection_factory.h b/sdk/android/src/jni/pc/peer_connection_factory.h index 904352f425..5bfdb7a808 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.h +++ b/sdk/android/src/jni/pc/peer_connection_factory.h @@ -24,8 +24,7 @@ jobject NativeToJavaPeerConnectionFactory( rtc::scoped_refptr pcf, std::unique_ptr network_thread, std::unique_ptr worker_thread, - std::unique_ptr signaling_thread, - rtc::NetworkMonitorFactory* network_monitor_factory = nullptr); + std::unique_ptr signaling_thread); } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/rtp_parameters.cc b/sdk/android/src/jni/pc/rtp_parameters.cc index 4bed3f8127..a65fa6eaa9 100644 --- a/sdk/android/src/jni/pc/rtp_parameters.cc +++ b/sdk/android/src/jni/pc/rtp_parameters.cc @@ -47,6 +47,7 @@ ScopedJavaLocalRef NativeToJavaRtpEncodingParameter( const RtpEncodingParameters& encoding) { return Java_Encoding_Constructor( env, NativeToJavaString(env, encoding.rid), encoding.active, + encoding.bitrate_priority, static_cast(encoding.network_priority), NativeToJavaInteger(env, encoding.max_bitrate_bps), NativeToJavaInteger(env, encoding.min_bitrate_bps), NativeToJavaInteger(env, encoding.max_framerate), @@ -95,6 +96,10 @@ RtpEncodingParameters JavaToNativeRtpEncodingParameters( encoding.active = Java_Encoding_getActive(jni, j_encoding_parameters); ScopedJavaLocalRef j_max_bitrate = Java_Encoding_getMaxBitrateBps(jni, j_encoding_parameters); + encoding.bitrate_priority = + Java_Encoding_getBitratePriority(jni, j_encoding_parameters); + encoding.network_priority = static_cast( + Java_Encoding_getNetworkPriority(jni, j_encoding_parameters)); encoding.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max_bitrate); ScopedJavaLocalRef j_min_bitrate = Java_Encoding_getMinBitrateBps(jni, j_encoding_parameters); @@ -187,8 +192,10 @@ ScopedJavaLocalRef NativeToJavaRtpParameters( const RtpParameters& parameters) { return Java_RtpParameters_Constructor( env, NativeToJavaString(env, parameters.transaction_id), - Java_DegradationPreference_fromNativeIndex( - env, static_cast(parameters.degradation_preference)), + parameters.degradation_preference.has_value() + ? Java_DegradationPreference_fromNativeIndex( + env, static_cast(*parameters.degradation_preference)) + : nullptr, NativeToJavaRtpRtcpParameters(env, parameters.rtcp), NativeToJavaList(env, parameters.header_extensions, &NativeToJavaRtpHeaderExtensionParameter), diff --git a/sdk/android/src/jni/pc/rtp_transceiver.cc b/sdk/android/src/jni/pc/rtp_transceiver.cc index 7d8cfdef49..1d468461f1 100644 --- a/sdk/android/src/jni/pc/rtp_transceiver.cc +++ b/sdk/android/src/jni/pc/rtp_transceiver.cc @@ -139,23 +139,37 @@ ScopedJavaLocalRef JNI_RtpTransceiver_CurrentDirection( : nullptr; } -void JNI_RtpTransceiver_Stop(JNIEnv* jni, - jlong j_rtp_transceiver_pointer) { - reinterpret_cast(j_rtp_transceiver_pointer)->Stop(); +void JNI_RtpTransceiver_StopInternal(JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + reinterpret_cast(j_rtp_transceiver_pointer) + ->StopInternal(); +} + +void JNI_RtpTransceiver_StopStandard(JNIEnv* jni, + jlong j_rtp_transceiver_pointer) { + reinterpret_cast(j_rtp_transceiver_pointer) + ->StopStandard(); } -void JNI_RtpTransceiver_SetDirection( +jboolean JNI_RtpTransceiver_SetDirection( JNIEnv* jni, jlong j_rtp_transceiver_pointer, const base::android::JavaParamRef& j_rtp_transceiver_direction) { if (IsNull(jni, j_rtp_transceiver_direction)) { - return; + return false; } RtpTransceiverDirection direction = static_cast( Java_RtpTransceiverDirection_getNativeIndex(jni, j_rtp_transceiver_direction)); - reinterpret_cast(j_rtp_transceiver_pointer) - ->SetDirection(direction); + webrtc::RTCError error = + reinterpret_cast(j_rtp_transceiver_pointer) + ->SetDirectionWithError(direction); + if (!error.ok()) { + RTC_LOG(LS_WARNING) << "SetDirection failed, code " + << ToString(error.type()) << ", message " + << error.message(); + } + return error.ok(); } } // namespace jni diff --git a/sdk/android/src/jni/pc/sdp_observer.cc b/sdk/android/src/jni/pc/sdp_observer.cc index fc59d1749a..d1842a3db0 100644 --- a/sdk/android/src/jni/pc/sdp_observer.cc +++ b/sdk/android/src/jni/pc/sdp_observer.cc @@ -31,8 +31,11 @@ CreateSdpObserverJni::~CreateSdpObserverJni() = default; void CreateSdpObserverJni::OnSuccess(SessionDescriptionInterface* desc) { JNIEnv* env = AttachCurrentThreadIfNeeded(); - Java_SdpObserver_onCreateSuccess(env, j_observer_global_, - NativeToJavaSessionDescription(env, desc)); + std::string sdp; + RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + Java_SdpObserver_onCreateSuccess( + env, j_observer_global_, + NativeToJavaSessionDescription(env, sdp, desc->type())); // OnSuccess transfers ownership of the description (there's a TODO to make // it use unique_ptr...). delete desc; diff --git a/sdk/android/src/jni/pc/session_description.cc b/sdk/android/src/jni/pc/session_description.cc index 1b335215dc..bbac721e51 100644 --- a/sdk/android/src/jni/pc/session_description.cc +++ b/sdk/android/src/jni/pc/session_description.cc @@ -37,12 +37,10 @@ std::unique_ptr JavaToNativeSessionDescription( ScopedJavaLocalRef NativeToJavaSessionDescription( JNIEnv* jni, - const SessionDescriptionInterface* desc) { - std::string sdp; - RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp; + const std::string& sdp, + const std::string& type) { return Java_SessionDescription_Constructor( - jni, - Java_Type_fromCanonicalForm(jni, NativeToJavaString(jni, desc->type())), + jni, Java_Type_fromCanonicalForm(jni, NativeToJavaString(jni, type)), NativeToJavaString(jni, sdp)); } diff --git a/sdk/android/src/jni/pc/session_description.h b/sdk/android/src/jni/pc/session_description.h index fe308474a7..f0f49cb2ee 100644 --- a/sdk/android/src/jni/pc/session_description.h +++ b/sdk/android/src/jni/pc/session_description.h @@ -13,6 +13,7 @@ #include #include +#include #include "api/jsep.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" @@ -26,7 +27,8 @@ std::unique_ptr JavaToNativeSessionDescription( ScopedJavaLocalRef NativeToJavaSessionDescription( JNIEnv* jni, - const SessionDescriptionInterface* desc); + const std::string& sdp, + const std::string& type); } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/video_decoder_wrapper.cc b/sdk/android/src/jni/video_decoder_wrapper.cc index 54c6e1574c..d45c6f1d87 100644 --- a/sdk/android/src/jni/video_decoder_wrapper.cc +++ b/sdk/android/src/jni/video_decoder_wrapper.cc @@ -109,7 +109,7 @@ int32_t VideoDecoderWrapper::Decode( frame_extra_info.qp = qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt; { - rtc::CritScope cs(&frame_extra_infos_lock_); + MutexLock lock(&frame_extra_infos_lock_); frame_extra_infos_.push_back(frame_extra_info); } @@ -135,7 +135,7 @@ int32_t VideoDecoderWrapper::Release() { jni, Java_VideoDecoder_release(jni, decoder_)); RTC_LOG(LS_INFO) << "release: " << status; { - rtc::CritScope cs(&frame_extra_infos_lock_); + MutexLock lock(&frame_extra_infos_lock_); frame_extra_infos_.clear(); } initialized_ = false; @@ -163,7 +163,7 @@ void VideoDecoderWrapper::OnDecodedFrame( FrameExtraInfo frame_extra_info; { - rtc::CritScope cs(&frame_extra_infos_lock_); + MutexLock lock(&frame_extra_infos_lock_); do { if (frame_extra_infos_.empty()) { @@ -257,6 +257,17 @@ absl::optional VideoDecoderWrapper::ParseQP( } break; } +#ifndef DISABLE_H265 + case kVideoCodecH265: { + h265_bitstream_parser_.ParseBitstream(input_image.data(), + input_image.size()); + int qp_int; + if (h265_bitstream_parser_.GetLastSliceQp(&qp_int)) { + qp = qp_int; + } + break; + } +#endif default: break; // Default is to not provide QP. } diff --git a/sdk/android/src/jni/video_decoder_wrapper.h b/sdk/android/src/jni/video_decoder_wrapper.h index a7f686872c..42b821f2f3 100644 --- a/sdk/android/src/jni/video_decoder_wrapper.h +++ b/sdk/android/src/jni/video_decoder_wrapper.h @@ -12,12 +12,17 @@ #define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_ #include + #include #include #include "api/video_codecs/video_decoder.h" #include "common_video/h264/h264_bitstream_parser.h" +#ifndef DISABLE_H265 +#include "common_video/h265/h265_bitstream_parser.h" +#endif #include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -98,12 +103,16 @@ class VideoDecoderWrapper : public VideoDecoder { bool initialized_ RTC_GUARDED_BY(decoder_thread_checker_); H264BitstreamParser h264_bitstream_parser_ RTC_GUARDED_BY(decoder_thread_checker_); +#ifndef DISABLE_H265 + H265BitstreamParser h265_bitstream_parser_ + RTC_GUARDED_BY(decoder_thread_checker_); +#endif DecodedImageCallback* callback_ RTC_GUARDED_BY(callback_race_checker_); // Accessed both on the decoder thread and the callback thread. std::atomic qp_parsing_enabled_; - rtc::CriticalSection frame_extra_infos_lock_; + Mutex frame_extra_infos_lock_; std::deque frame_extra_infos_ RTC_GUARDED_BY(frame_extra_infos_lock_); }; diff --git a/sdk/android/src/jni/video_encoder_factory_wrapper.cc b/sdk/android/src/jni/video_encoder_factory_wrapper.cc index 538cc5bade..8ab4191db2 100644 --- a/sdk/android/src/jni/video_encoder_factory_wrapper.cc +++ b/sdk/android/src/jni/video_encoder_factory_wrapper.cc @@ -20,6 +20,49 @@ namespace webrtc { namespace jni { +namespace { +class VideoEncoderSelectorWrapper + : public VideoEncoderFactory::EncoderSelectorInterface { + public: + VideoEncoderSelectorWrapper(JNIEnv* jni, + const JavaRef& encoder_selector) + : encoder_selector_(jni, encoder_selector) {} + + void OnCurrentEncoder(const SdpVideoFormat& format) override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef j_codec_info = + SdpVideoFormatToVideoCodecInfo(jni, format); + Java_VideoEncoderSelector_onCurrentEncoder(jni, encoder_selector_, + j_codec_info); + } + + absl::optional OnAvailableBitrate( + const DataRate& rate) override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef codec_info = + Java_VideoEncoderSelector_onAvailableBitrate(jni, encoder_selector_, + rate.kbps()); + if (codec_info.is_null()) { + return absl::nullopt; + } + return VideoCodecInfoToSdpVideoFormat(jni, codec_info); + } + + absl::optional OnEncoderBroken() override { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef codec_info = + Java_VideoEncoderSelector_onEncoderBroken(jni, encoder_selector_); + if (codec_info.is_null()) { + return absl::nullopt; + } + return VideoCodecInfoToSdpVideoFormat(jni, codec_info); + } + + private: + const ScopedJavaGlobalRef encoder_selector_; +}; + +} // namespace VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper( JNIEnv* jni, @@ -58,19 +101,16 @@ std::vector VideoEncoderFactoryWrapper::GetImplementations() return implementations_; } -VideoEncoderFactory::CodecInfo VideoEncoderFactoryWrapper::QueryVideoEncoder( - const SdpVideoFormat& format) const { +std::unique_ptr +VideoEncoderFactoryWrapper::GetEncoderSelector() const { JNIEnv* jni = AttachCurrentThreadIfNeeded(); - ScopedJavaLocalRef j_codec_info = - SdpVideoFormatToVideoCodecInfo(jni, format); - ScopedJavaLocalRef encoder = Java_VideoEncoderFactory_createEncoder( - jni, encoder_factory_, j_codec_info); + ScopedJavaLocalRef selector = + Java_VideoEncoderFactory_getEncoderSelector(jni, encoder_factory_); + if (selector.is_null()) { + return nullptr; + } - CodecInfo codec_info; - // Check if this is a wrapped native software encoder implementation. - codec_info.is_hardware_accelerated = IsHardwareVideoEncoder(jni, encoder); - codec_info.has_internal_source = false; - return codec_info; + return std::make_unique(jni, selector); } } // namespace jni diff --git a/sdk/android/src/jni/video_encoder_factory_wrapper.h b/sdk/android/src/jni/video_encoder_factory_wrapper.h index 7f033aea80..2be6b1b33f 100644 --- a/sdk/android/src/jni/video_encoder_factory_wrapper.h +++ b/sdk/android/src/jni/video_encoder_factory_wrapper.h @@ -37,7 +37,7 @@ class VideoEncoderFactoryWrapper : public VideoEncoderFactory { std::vector GetImplementations() const override; - CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override; + std::unique_ptr GetEncoderSelector() const override; private: const ScopedJavaGlobalRef encoder_factory_; diff --git a/sdk/android/src/jni/video_encoder_wrapper.cc b/sdk/android/src/jni/video_encoder_wrapper.cc index 446b12ce53..eaa270b118 100644 --- a/sdk/android/src/jni/video_encoder_wrapper.cc +++ b/sdk/android/src/jni/video_encoder_wrapper.cc @@ -13,6 +13,9 @@ #include #include "common_video/h264/h264_common.h" +#ifndef DISABLE_H265 +#include "common_video/h265/h265_common.h" +#endif #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/vp8_header_parser.h" @@ -38,7 +41,8 @@ VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, // Get bitrate limits in the constructor. This is a static property of the // encoder and is expected to be available before it is initialized. - encoder_info_.resolution_bitrate_limits = GetResolutionBitrateLimits(jni); + encoder_info_.resolution_bitrate_limits = JavaToNativeResolutionBitrateLimits( + jni, Java_VideoEncoder_getResolutionBitrateLimits(jni, encoder_)); } VideoEncoderWrapper::~VideoEncoderWrapper() = default; @@ -112,7 +116,10 @@ int32_t VideoEncoderWrapper::Release() { int32_t status = JavaToNativeVideoCodecStatus( jni, Java_VideoEncoder_release(jni, encoder_)); RTC_LOG(LS_INFO) << "release: " << status; - frame_extra_infos_.clear(); + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.clear(); + } initialized_ = false; return status; @@ -137,7 +144,10 @@ int32_t VideoEncoderWrapper::Encode( FrameExtraInfo info; info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec; info.timestamp_rtp = frame.timestamp(); - frame_extra_infos_.push_back(info); + { + MutexLock lock(&frame_extra_infos_lock_); + frame_extra_infos_.push_back(info); + } ScopedJavaLocalRef j_frame = NativeToJavaVideoFrame(jni, frame); ScopedJavaLocalRef ret = @@ -210,37 +220,6 @@ VideoEncoderWrapper::GetScalingSettingsInternal(JNIEnv* jni) const { } } -std::vector -VideoEncoderWrapper::GetResolutionBitrateLimits(JNIEnv* jni) const { - std::vector resolution_bitrate_limits; - - ScopedJavaLocalRef j_bitrate_limits_array = - Java_VideoEncoder_getResolutionBitrateLimits(jni, encoder_); - - const jsize num_thresholds = - jni->GetArrayLength(j_bitrate_limits_array.obj()); - for (int i = 0; i < num_thresholds; ++i) { - ScopedJavaLocalRef j_bitrate_limits = ScopedJavaLocalRef( - jni, jni->GetObjectArrayElement(j_bitrate_limits_array.obj(), i)); - - jint frame_size_pixels = - Java_ResolutionBitrateLimits_getFrameSizePixels(jni, j_bitrate_limits); - jint min_start_bitrate_bps = - Java_ResolutionBitrateLimits_getMinStartBitrateBps(jni, - j_bitrate_limits); - jint min_bitrate_bps = - Java_ResolutionBitrateLimits_getMinBitrateBps(jni, j_bitrate_limits); - jint max_bitrate_bps = - Java_ResolutionBitrateLimits_getMaxBitrateBps(jni, j_bitrate_limits); - - resolution_bitrate_limits.push_back(VideoEncoder::ResolutionBitrateLimits( - frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps, - max_bitrate_bps)); - } - - return resolution_bitrate_limits; -} - void VideoEncoderWrapper::OnEncodedFrame( JNIEnv* jni, const JavaRef& j_encoded_image) { @@ -259,19 +238,23 @@ void VideoEncoderWrapper::OnEncodedFrame( // entries that don't belong to us, and we need to be careful not to // remove them. Removing only those entries older than the current frame // provides this guarantee. - while (!frame_extra_infos_.empty() && - frame_extra_infos_.front().capture_time_ns < capture_time_ns) { + FrameExtraInfo frame_extra_info; + { + MutexLock lock(&frame_extra_infos_lock_); + while (!frame_extra_infos_.empty() && + frame_extra_infos_.front().capture_time_ns < capture_time_ns) { + frame_extra_infos_.pop_front(); + } + if (frame_extra_infos_.empty() || + frame_extra_infos_.front().capture_time_ns != capture_time_ns) { + RTC_LOG(LS_WARNING) + << "Java encoder produced an unexpected frame with timestamp: " + << capture_time_ns; + return; + } + frame_extra_info = frame_extra_infos_.front(); frame_extra_infos_.pop_front(); } - if (frame_extra_infos_.empty() || - frame_extra_infos_.front().capture_time_ns != capture_time_ns) { - RTC_LOG(LS_WARNING) - << "Java encoder produced an unexpected frame with timestamp: " - << capture_time_ns; - return; - } - FrameExtraInfo frame_extra_info = std::move(frame_extra_infos_.front()); - frame_extra_infos_.pop_front(); // This is a bit subtle. The |frame| variable from the lambda capture is // const. Which implies that (i) we need to make a copy to be able to @@ -284,13 +267,12 @@ void VideoEncoderWrapper::OnEncodedFrame( frame_copy.SetTimestamp(frame_extra_info.timestamp_rtp); frame_copy.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec; - RTPFragmentationHeader header = ParseFragmentationHeader(frame); if (frame_copy.qp_ < 0) frame_copy.qp_ = ParseQp(frame); CodecSpecificInfo info(ParseCodecSpecificInfo(frame)); - callback_->OnEncodedImage(frame_copy, &info, &header); + callback_->OnEncodedImage(frame_copy, &info); } int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni, @@ -319,35 +301,6 @@ int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni, return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; } -RTPFragmentationHeader VideoEncoderWrapper::ParseFragmentationHeader( - rtc::ArrayView buffer) { - RTPFragmentationHeader header; - if (codec_settings_.codecType == kVideoCodecH264) { - h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size()); - - // For H.264 search for start codes. - const std::vector nalu_idxs = - H264::FindNaluIndices(buffer.data(), buffer.size()); - if (nalu_idxs.empty()) { - RTC_LOG(LS_ERROR) << "Start code is not found!"; - RTC_LOG(LS_ERROR) << "Data:" << buffer[0] << " " << buffer[1] << " " - << buffer[2] << " " << buffer[3] << " " << buffer[4] - << " " << buffer[5]; - } - header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); - for (size_t i = 0; i < nalu_idxs.size(); i++) { - header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset; - header.fragmentationLength[i] = nalu_idxs[i].payload_size; - } - } else { - // Generate a header describing a single fragment. - header.VerifyAndAllocateFragmentationHeader(1); - header.fragmentationOffset[0] = 0; - header.fragmentationLength[0] = buffer.size(); - } - return header; -} - int VideoEncoderWrapper::ParseQp(rtc::ArrayView buffer) { int qp; bool success; @@ -359,8 +312,14 @@ int VideoEncoderWrapper::ParseQp(rtc::ArrayView buffer) { success = vp9::GetQp(buffer.data(), buffer.size(), &qp); break; case kVideoCodecH264: + h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size()); success = h264_bitstream_parser_.GetLastSliceQp(&qp); break; +#ifndef DISABLE_H265 + case kVideoCodecH265: + success = h265_bitstream_parser_.GetLastSliceQp(&qp); + break; +#endif default: // Default is to not provide QP. success = false; break; @@ -396,7 +355,6 @@ CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo( static_cast(gof_idx_++ % gof_.num_frames_in_gof); info.codecSpecific.VP9.num_spatial_layers = 1; info.codecSpecific.VP9.first_frame_in_picture = true; - info.codecSpecific.VP9.end_of_picture = true; info.codecSpecific.VP9.spatial_layer_resolution_present = false; if (info.codecSpecific.VP9.ss_data_available) { info.codecSpecific.VP9.spatial_layer_resolution_present = true; @@ -455,5 +413,34 @@ bool IsHardwareVideoEncoder(JNIEnv* jni, const JavaRef& j_encoder) { return Java_VideoEncoder_isHardwareEncoder(jni, j_encoder); } +std::vector +JavaToNativeResolutionBitrateLimits( + JNIEnv* jni, + const JavaRef& j_bitrate_limits_array) { + std::vector resolution_bitrate_limits; + + const jsize array_length = jni->GetArrayLength(j_bitrate_limits_array.obj()); + for (int i = 0; i < array_length; ++i) { + ScopedJavaLocalRef j_bitrate_limits = ScopedJavaLocalRef( + jni, jni->GetObjectArrayElement(j_bitrate_limits_array.obj(), i)); + + jint frame_size_pixels = + Java_ResolutionBitrateLimits_getFrameSizePixels(jni, j_bitrate_limits); + jint min_start_bitrate_bps = + Java_ResolutionBitrateLimits_getMinStartBitrateBps(jni, + j_bitrate_limits); + jint min_bitrate_bps = + Java_ResolutionBitrateLimits_getMinBitrateBps(jni, j_bitrate_limits); + jint max_bitrate_bps = + Java_ResolutionBitrateLimits_getMaxBitrateBps(jni, j_bitrate_limits); + + resolution_bitrate_limits.push_back(VideoEncoder::ResolutionBitrateLimits( + frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps, + max_bitrate_bps)); + } + + return resolution_bitrate_limits; +} + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/video_encoder_wrapper.h b/sdk/android/src/jni/video_encoder_wrapper.h index 52323bc8a6..16eb1c2b8b 100644 --- a/sdk/android/src/jni/video_encoder_wrapper.h +++ b/sdk/android/src/jni/video_encoder_wrapper.h @@ -13,13 +13,18 @@ #include #include +#include #include #include #include "absl/types/optional.h" #include "api/video_codecs/video_encoder.h" #include "common_video/h264/h264_bitstream_parser.h" +#ifndef DISABLE_H265 +#include "common_video/h265/h265_bitstream_parser.h" +#endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/synchronization/mutex.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/video_frame.h" @@ -66,8 +71,6 @@ class VideoEncoderWrapper : public VideoEncoder { const JavaRef& j_value, const char* method_name); - RTPFragmentationHeader ParseFragmentationHeader( - rtc::ArrayView buffer); int ParseQp(rtc::ArrayView buffer); CodecSpecificInfo ParseCodecSpecificInfo(const EncodedImage& frame); ScopedJavaLocalRef ToJavaBitrateAllocation( @@ -83,7 +86,10 @@ class VideoEncoderWrapper : public VideoEncoder { const ScopedJavaGlobalRef encoder_; const ScopedJavaGlobalRef int_array_class_; - std::deque frame_extra_infos_; + // Modified both on the encoder thread and the callback thread. + Mutex frame_extra_infos_lock_; + std::deque frame_extra_infos_ + RTC_GUARDED_BY(frame_extra_infos_lock_); EncodedImageCallback* callback_; bool initialized_; int num_resets_; @@ -92,6 +98,9 @@ class VideoEncoderWrapper : public VideoEncoder { VideoCodec codec_settings_; EncoderInfo encoder_info_; H264BitstreamParser h264_bitstream_parser_; +#ifndef DISABLE_H265 + H265BitstreamParser h265_bitstream_parser_; +#endif // VP9 variables to populate codec specific structure. GofInfoVP9 gof_; // Contains each frame's temporal information for @@ -108,6 +117,11 @@ std::unique_ptr JavaToNativeVideoEncoder( bool IsHardwareVideoEncoder(JNIEnv* jni, const JavaRef& j_encoder); +std::vector +JavaToNativeResolutionBitrateLimits( + JNIEnv* jni, + const JavaRef& j_bitrate_limits_array); + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/video_frame.cc b/sdk/android/src/jni/video_frame.cc index d57fe8f9b7..860eebe5e2 100644 --- a/sdk/android/src/jni/video_frame.cc +++ b/sdk/android/src/jni/video_frame.cc @@ -152,14 +152,14 @@ const ScopedJavaGlobalRef& AndroidVideoBuffer::video_frame_buffer() return j_video_frame_buffer_; } -rtc::scoped_refptr AndroidVideoBuffer::CropAndScale( - JNIEnv* jni, +rtc::scoped_refptr AndroidVideoBuffer::CropAndScale( int crop_x, int crop_y, int crop_width, int crop_height, int scale_width, int scale_height) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); return Adopt(jni, Java_Buffer_cropAndScale(jni, j_video_frame_buffer_, crop_x, crop_y, crop_width, crop_height, scale_width, scale_height)); diff --git a/sdk/android/src/jni/video_frame.h b/sdk/android/src/jni/video_frame.h index f6b569a3e5..5e39b8a770 100644 --- a/sdk/android/src/jni/video_frame.h +++ b/sdk/android/src/jni/video_frame.h @@ -42,13 +42,12 @@ class AndroidVideoBuffer : public VideoFrameBuffer { // Crops a region defined by |crop_x|, |crop_y|, |crop_width| and // |crop_height|. Scales it to size |scale_width| x |scale_height|. - rtc::scoped_refptr CropAndScale(JNIEnv* jni, - int crop_x, - int crop_y, - int crop_width, - int crop_height, - int scale_width, - int scale_height); + rtc::scoped_refptr CropAndScale(int crop_x, + int crop_y, + int crop_width, + int crop_height, + int scale_width, + int scale_height) override; protected: // Should not be called directly. Adopts the Java VideoFrame.Buffer. Use diff --git a/sdk/android/src/jni/vp8_codec.cc b/sdk/android/src/jni/vp8_codec.cc index 32a5a62a8d..8b34495dc2 100644 --- a/sdk/android/src/jni/vp8_codec.cc +++ b/sdk/android/src/jni/vp8_codec.cc @@ -19,7 +19,7 @@ namespace webrtc { namespace jni { static jlong JNI_LibvpxVp8Encoder_CreateEncoder(JNIEnv* jni) { - return jlongFromPointer(VP8Encoder::Create(nullptr).release()); + return jlongFromPointer(VP8Encoder::Create().release()); } static jlong JNI_LibvpxVp8Decoder_CreateDecoder(JNIEnv* jni) { diff --git a/sdk/android/tests/resources/robolectric.properties b/sdk/android/tests/resources/robolectric.properties index 3acb7e5d59..a9bc625b18 100644 --- a/sdk/android/tests/resources/robolectric.properties +++ b/sdk/android/tests/resources/robolectric.properties @@ -1 +1 @@ -sdk=19,21,25,26 +sdk=21,25,26 diff --git a/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java b/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java index 73f37c110c..644b24b1b3 100644 --- a/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java +++ b/sdk/android/tests/src/org/webrtc/AndroidVideoDecoderTest.java @@ -14,7 +14,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyLong; -import static org.mockito.Matchers.eq; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; @@ -62,7 +62,7 @@ private class TestDecoder extends AndroidVideoDecoder { private boolean deliverDecodedFrameDone = true; public TestDecoder(MediaCodecWrapperFactory mediaCodecFactory, String codecName, - VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) { + VideoCodecMimeType codecType, int colorFormat, EglBase.Context sharedContext) { super(mediaCodecFactory, codecName, codecType, colorFormat, sharedContext); } @@ -143,10 +143,10 @@ protected void copyPlane( } private class TestDecoderBuilder { - private VideoCodecType codecType = VideoCodecType.VP8; + private VideoCodecMimeType codecType = VideoCodecMimeType.VP8; private boolean useSurface = true; - public TestDecoderBuilder setCodecType(VideoCodecType codecType) { + public TestDecoderBuilder setCodecType(VideoCodecMimeType codecType) { this.codecType = codecType; return this; } @@ -187,7 +187,6 @@ private EncodedImage createTestEncodedImage() { return EncodedImage.builder() .setBuffer(ByteBuffer.wrap(ENCODED_TEST_DATA), null) .setFrameType(FrameType.VideoFrameKey) - .setCompleteFrame(true) .createEncodedImage(); } @@ -216,7 +215,8 @@ public void cleanUp() { @Test public void testInit() { // Set-up. - AndroidVideoDecoder decoder = new TestDecoderBuilder().setCodecType(VideoCodecType.VP8).build(); + AndroidVideoDecoder decoder = + new TestDecoderBuilder().setCodecType(VideoCodecMimeType.VP8).build(); // Test. assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback)) @@ -232,7 +232,7 @@ public void testInit() { assertThat(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) .isEqualTo(TEST_DECODER_SETTINGS.height); assertThat(mediaFormat.getString(MediaFormat.KEY_MIME)) - .isEqualTo(VideoCodecType.VP8.mimeType()); + .isEqualTo(VideoCodecMimeType.VP8.mimeType()); } @Test diff --git a/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java b/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java index 7de3a42136..728e401495 100644 --- a/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java +++ b/sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java @@ -63,7 +63,7 @@ private static class TestEncoder extends HardwareVideoEncoder { private boolean deliverEncodedImageDone = true; TestEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, - VideoCodecType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, + VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, Map params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { super(mediaCodecWrapperFactory, codecName, codecType, surfaceColorFormat, yuvColorFormat, @@ -113,9 +113,9 @@ protected void fillInputBuffer(ByteBuffer buffer, Buffer videoFrameBuffer) { } private class TestEncoderBuilder { - private VideoCodecType codecType = VideoCodecType.VP8; + private VideoCodecMimeType codecType = VideoCodecMimeType.VP8; - public TestEncoderBuilder setCodecType(VideoCodecType codecType) { + public TestEncoderBuilder setCodecType(VideoCodecMimeType codecType) { this.codecType = codecType; return this; } @@ -149,7 +149,7 @@ public void setUp() { public void testInit() { // Set-up. HardwareVideoEncoder encoder = - new TestEncoderBuilder().setCodecType(VideoCodecType.VP8).build(); + new TestEncoderBuilder().setCodecType(VideoCodecMimeType.VP8).build(); // Test. assertThat(encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback)) @@ -165,7 +165,7 @@ public void testInit() { assertThat(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) .isEqualTo(TEST_ENCODER_SETTINGS.height); assertThat(mediaFormat.getString(MediaFormat.KEY_MIME)) - .isEqualTo(VideoCodecType.VP8.mimeType()); + .isEqualTo(VideoCodecMimeType.VP8.mimeType()); assertThat(fakeMediaCodecWrapper.getConfiguredFlags()) .isEqualTo(MediaCodec.CONFIGURE_FLAG_ENCODE); @@ -233,7 +233,6 @@ public void testDeliversOutputData() throws InterruptedException { assertThat(videoFrame.encodedHeight).isEqualTo(TEST_ENCODER_SETTINGS.height); assertThat(videoFrame.rotation).isEqualTo(0); assertThat(videoFrame.captureTimeNs).isEqualTo(42); - assertThat(videoFrame.completeFrame).isTrue(); assertThat(videoFrame.frameType).isEqualTo(FrameType.VideoFrameKey); CodecTestHelper.assertEqualContents( outputData, videoFrame.buffer, /* offset= */ 0, videoFrame.buffer.capacity()); diff --git a/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java b/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java new file mode 100644 index 0000000000..1f449c8bb7 --- /dev/null +++ b/sdk/android/tests/src/org/webrtc/RefCountDelegateTest.java @@ -0,0 +1,83 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +import org.chromium.testing.local.LocalRobolectricTestRunner; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.robolectric.annotation.Config; + +@RunWith(LocalRobolectricTestRunner.class) +@Config(manifest = Config.NONE) +public class RefCountDelegateTest { + @Mock Runnable mockReleaseCallback; + private RefCountDelegate refCountDelegate; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + + refCountDelegate = new RefCountDelegate(mockReleaseCallback); + } + + @Test + public void testReleaseRunsReleaseCallback() { + refCountDelegate.release(); + verify(mockReleaseCallback).run(); + } + + @Test + public void testRetainIncreasesRefCount() { + refCountDelegate.retain(); + + refCountDelegate.release(); + verify(mockReleaseCallback, never()).run(); + + refCountDelegate.release(); + verify(mockReleaseCallback).run(); + } + + @Test(expected = IllegalStateException.class) + public void testReleaseAfterFreeThrowsIllegalStateException() { + refCountDelegate.release(); + refCountDelegate.release(); + } + + @Test(expected = IllegalStateException.class) + public void testRetainAfterFreeThrowsIllegalStateException() { + refCountDelegate.release(); + refCountDelegate.retain(); + } + + @Test + public void testSafeRetainBeforeFreeReturnsTrueAndIncreasesRefCount() { + assertThat(refCountDelegate.safeRetain()).isTrue(); + + refCountDelegate.release(); + verify(mockReleaseCallback, never()).run(); + + refCountDelegate.release(); + verify(mockReleaseCallback).run(); + } + + @Test + public void testSafeRetainAfterFreeReturnsFalse() { + refCountDelegate.release(); + assertThat(refCountDelegate.safeRetain()).isFalse(); + } +} diff --git a/sdk/android_gradle/.gitignore b/sdk/android_gradle/.gitignore new file mode 100644 index 0000000000..4f79da1c9f --- /dev/null +++ b/sdk/android_gradle/.gitignore @@ -0,0 +1,9 @@ +**.iml +.gradle +/local.properties +/.idea/ +.DS_Store +/captures +**/build +**/.externalNativeBuild +**/.cxx diff --git a/sdk/android_gradle/AppRTCMobile/build.gradle b/sdk/android_gradle/AppRTCMobile/build.gradle new file mode 100644 index 0000000000..6f8613f570 --- /dev/null +++ b/sdk/android_gradle/AppRTCMobile/build.gradle @@ -0,0 +1,45 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion rootProject.ext.androidCompileSdkVersion + buildToolsVersion rootProject.ext.androidBuildToolsVersion + + defaultConfig { + minSdkVersion rootProject.ext.minSdkVersion + targetSdkVersion rootProject.ext.targetSdkVersion + versionCode rootProject.ext.releaseVersionCode + versionName rootProject.ext.releaseVersionName + applicationId "org.appspot.apprtc" + multiDexEnabled true + + ndk.abiFilters = ['arm64-v8a'] + + sourceSets.main { + manifest.srcFile "$rootProject.rootDir/../../examples/androidapp/AndroidManifest.xml" + res.srcDirs "$rootProject.rootDir/../../examples/androidapp/res" + java.srcDirs "$rootProject.rootDir/../../examples/androidapp/src" + } + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + packagingOptions { + exclude 'META-INF/MANIFEST.MF' + } +} + +dependencies { + implementation "com.android.support:appcompat-v7:${rootProject.ext.androidSupportSdkVersion}" + + implementation (name:'autobanh', ext:'jar') + + implementation project(':webrtc') + //implementation 'org.webrtc:google-webrtc:1.0.23995' +} diff --git a/sdk/android_gradle/README.md b/sdk/android_gradle/README.md new file mode 100644 index 0000000000..3deb66fbc2 --- /dev/null +++ b/sdk/android_gradle/README.md @@ -0,0 +1,27 @@ +# WebRTC Android Studio project + +A reference gradle project that let you explore WebRTC Android in Android Studio. + +## Debug native code in Android Studio + +_break since #30771._ + +Edit `gradle.properties`, set `compile_native_code=true` and other variables according to your WebRTC checkout location, then enjoy :) + +Note: + ++ You need download and sync WebRTC repo by yourself, this project won't do that for you; ++ Checkout the same WebRTC commit as this project does, which is [#30987](https://webrtc.googlesource.com/src/+/04c1b445019e10e54b96f70403d25cc54215faf3); ++ Use the same version of Android SDK and NDK as WebRTC does; ++ (re)Create `protoc` after updating WebRTC repo, to create the `protoc` program, you need build WebRTC Android via ninja once, let's assume the output dir is `out/android_ninja`, then the `protoc` will be `out/android_ninja/clang_x64/protoc`; ++ Delete `webrtc_build_dir` after updating WebRTC repo; + +## WebRTC src extractor + +`python3 webrtc_src_extractor.py ` + +If you only want use a small part of WebRTC code, this script could help you find all related sources and headers, and copy them into `dst dir`. Note that it's just a best effort script, you may still need copy some files manually. + +## Caveat + ++ Delete `webrtc_build_dir` and `.externalNativeBuild`, run `./gradlew genWebrtcSrc`, and "Refresh Linked C++ Projects" (note that "Sync Project with Gradle Files" won't work) before your build and debug, otherwise the generated sources may not be compiled, undefined reference error will happen, e.g. `webrtc::rtclog::Event` related references; diff --git a/sdk/android_gradle/build.gradle b/sdk/android_gradle/build.gradle new file mode 100644 index 0000000000..66aeb556c9 --- /dev/null +++ b/sdk/android_gradle/build.gradle @@ -0,0 +1,39 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + google() + mavenCentral() + } + dependencies { + classpath 'com.android.tools.build:gradle:4.2.0' + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + flatDir { + dirs "$rootProject.rootDir/../../examples/androidapp/third_party/autobanh/lib" + } + google() + mavenCentral() + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} + +ext { + minSdkVersion = 16 + androidCompileSdkVersion = 30 + androidBuildToolsVersion = '30.0.3' + targetSdkVersion = 22 + androidSupportSdkVersion = '28.0.0' + + releaseVersionCode = 1 + releaseVersionName = '1.0.32599' +} diff --git a/sdk/android_gradle/extract_src_from_gn.py b/sdk/android_gradle/extract_src_from_gn.py new file mode 100644 index 0000000000..ef895be5c1 --- /dev/null +++ b/sdk/android_gradle/extract_src_from_gn.py @@ -0,0 +1,42 @@ +import sys + +def output(lines, start, end, prefix): + index = start + while index <= end: + line = lines[index] + splitter = None + if '\'' in line: + splitter = '\'' + elif '\"' in line: + splitter = '\"' + if splitter is not None: + path = line.split(splitter)[1] + if not path.endswith('.h'): + if path.startswith('//'): + print(prefix + path[2:]) + else: + print(prefix + path) + index = index + 1 + +def extract(gn_path, gn_src_set, output_prefix): + with open(gn_path, 'r') as f: + content = f.read().split('\n') + start_line = -1 + end_line = -1 + index = 0 + all_lines = len(content) + while index < all_lines: + if content[index].endswith('%s = [' % gn_src_set): + start_line = index + if start_line != -1 and content[index].endswith(']'): + end_line = index + break + index = index + 1 + if start_line != -1 and end_line != -1: + output(content, start_line + 1, end_line - 1, output_prefix) + +if __name__ == '__main__': + gn_path = sys.argv[1] + gn_src_set = sys.argv[2] + output_prefix = sys.argv[3] + extract(gn_path, gn_src_set, output_prefix) diff --git a/sdk/android_gradle/gradle.properties b/sdk/android_gradle/gradle.properties new file mode 100644 index 0000000000..03a5255eea --- /dev/null +++ b/sdk/android_gradle/gradle.properties @@ -0,0 +1,33 @@ +# Project-wide Gradle settings. + +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. + +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html + +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx1536m + +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects + +# org.gradle.parallel=true + +compile_native_code=false + +# the absolute path of WebRTC Android checkout, please use exactly the same commit as this repo. +webrtc_repo=/Users/piasy/src/media/webrtc_repo/webrtc_android/src +# the relative path of where generated source file is put, relative to `webrtc_repo`. +webrtc_build_dir=out/android_studio + +# the relative path of Android sdk jar, relative to `webrtc_repo`. +android_jar=third_party/android_sdk/public/platforms/android-29/android.jar + +# the absolute path of Python 2.x executable +py2=/Users/piasy/anaconda3/envs/py2/bin/python +# the absolute path of protoc executable, see README about how to create it. +protoc=/Users/piasy/src/media/webrtc_repo/webrtc_android/src/out/protoc diff --git a/sdk/android_gradle/gradle/wrapper/gradle-wrapper.jar b/sdk/android_gradle/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000..13372aef5e Binary files /dev/null and b/sdk/android_gradle/gradle/wrapper/gradle-wrapper.jar differ diff --git a/sdk/android_gradle/gradle/wrapper/gradle-wrapper.properties b/sdk/android_gradle/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..c0d4616f4c --- /dev/null +++ b/sdk/android_gradle/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Tue Jun 06 09:26:20 CST 2017 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-all.zip diff --git a/sdk/android_gradle/gradlew b/sdk/android_gradle/gradlew new file mode 100755 index 0000000000..9d82f78915 --- /dev/null +++ b/sdk/android_gradle/gradlew @@ -0,0 +1,160 @@ +#!/usr/bin/env bash + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; +esac + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules +function splitJvmOpts() { + JVM_OPTS=("$@") +} +eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS +JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" + +exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/sdk/android_gradle/gradlew.bat b/sdk/android_gradle/gradlew.bat new file mode 100644 index 0000000000..aec99730b4 --- /dev/null +++ b/sdk/android_gradle/gradlew.bat @@ -0,0 +1,90 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windowz variants + +if not "%OS%" == "Windows_NT" goto win9xME_args +if "%@eval[2+2]" == "4" goto 4NT_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* +goto execute + +:4NT_args +@rem Get arguments from the 4NT Shell from JP Software +set CMD_LINE_ARGS=%$ + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/sdk/android_gradle/settings.gradle b/sdk/android_gradle/settings.gradle new file mode 100644 index 0000000000..c9deeadd6c --- /dev/null +++ b/sdk/android_gradle/settings.gradle @@ -0,0 +1 @@ +include ':AppRTCMobile', ':webrtc' diff --git a/sdk/android_gradle/webrtc/CMakeLists.txt b/sdk/android_gradle/webrtc/CMakeLists.txt new file mode 100644 index 0000000000..7f74a1d557 --- /dev/null +++ b/sdk/android_gradle/webrtc/CMakeLists.txt @@ -0,0 +1,219 @@ +cmake_minimum_required(VERSION 3.7) + +set(CWD ${CMAKE_CURRENT_LIST_DIR}) + +set(TEST_CC_FILTER ".*gunit.cc|.*/mock/.*|.*mock_.*|.*/mocks/.*|.*fake.*|.*/test/.*|.*/tests/.*|.*_test_.*|.*unittest.*|.*/end_to_end_tests/.*|.*_test.cc|.*_tests.cc|.*_integrationtest.cc|.*_perftest.cc|.*test_utils.cc|.*testutils.cc|.*testclient.cc|.*test.c") +set(OTHER_PLATFORM_CC_FILTER ".*_chromeos.cc|.*_freebsd.cc|.*_fuchsia.cc|.*/fuchsia/.*|.*_ios.cc|.*_ios.mm|.*/ios/.*|.*_mac.cc|.*_mac.mm|.*/mac/.*|.*_openbsd.cc|.*_win.cc|.*/win/.*|.*win32.*|.*/windows/.*|.*sse.cc|.*sse2.cc|.*_mips.cc|.*_mips.c") + +# add at first, avoid header search path, definition chaos +add_subdirectory(third_party) + +add_definitions(-DWEBRTC_POSIX=1, -DWEBRTC_LINUX=1, -DWEBRTC_ANDROID=1) +add_definitions(-DWEBRTC_ENABLE_PROTOBUF=1) +add_definitions(-DWEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE) +add_definitions(-DHAVE_PTHREAD -DHAVE_SCTP -DHAVE_WEBRTC_VIDEO -DHAVE_WEBRTC_VOICE) +add_definitions(-DUSE_BUILTIN_SW_CODECS) +add_definitions(-DENABLE_RTC_EVENT_LOG) +add_definitions(-DWEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=0) + +if (${ANDROID_ABI} STREQUAL "armeabi-v7a") + add_definitions(-DWEBRTC_USE_BUILTIN_ISAC_FIX=1 -DWEBRTC_USE_BUILTIN_ISAC_FLOAT=0) +else() + add_definitions(-DWEBRTC_USE_BUILTIN_ISAC_FIX=0 -DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1) +endif() + +add_definitions(-DWEBRTC_OPUS_VARIABLE_COMPLEXITY=0) + +add_definitions(-DNO_TCMALLOC=1) + +if (${ANDROID_ABI} STREQUAL "arm64-v8a") + add_definitions(-DWEBRTC_ARCH_ARM64 -DWEBRTC_HAS_NEON) +elseif (${ANDROID_ABI} STREQUAL "armeabi-v7a") + add_definitions(-DWEBRTC_ARCH_ARM -DWEBRTC_ARCH_ARM_V7) +endif() + +add_definitions(-DWEBRTC_CODEC_ILBC -DWEBRTC_CODEC_OPUS -DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1 -DWEBRTC_CODEC_ISAC -DWEBRTC_CODEC_RED) + +add_definitions(-DWEBRTC_INTELLIGIBILITY_ENHANCER=0 -DWEBRTC_NS_FIXED) + +add_definitions(-DWEBRTC_APM_DEBUG_DUMP=0) + +add_definitions(-DHAVE_NETINET_IN_H) + +include_directories( + ${WEBRTC_REPO} + + ${WEBRTC_REPO}/${WEBRTC_BUILD_DIR}/gen + + ${WEBRTC_REPO}/third_party/abseil-cpp + ${WEBRTC_REPO}/third_party/boringssl/src/include + ${WEBRTC_REPO}/third_party/icu/source/common + ${WEBRTC_REPO}/third_party/ffmpeg + ${WEBRTC_REPO}/third_party/jsoncpp/source/include + ${WEBRTC_REPO}/third_party/libsrtp/config + ${WEBRTC_REPO}/third_party/libsrtp/crypto/include + ${WEBRTC_REPO}/third_party/libsrtp/include + ${WEBRTC_REPO}/third_party/libyuv/include + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx + ${WEBRTC_REPO}/third_party/opus/src/include + ${WEBRTC_REPO}/third_party/protobuf/src + ${WEBRTC_REPO}/third_party/usrsctp/usrsctplib +) + +file(GLOB_RECURSE src_api + ${WEBRTC_REPO}/api/*.cc +) +list(FILTER src_api EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_api EXCLUDE REGEX + ".*echo_canceller3_config_json.cc|.*default_task_queue_factory_gcd.cc|.*default_task_queue_factory_stdlib.cc|.*default_task_queue_factory_win.cc|.*video_stream_decoder_create.cc" +) + +file(GLOB_RECURSE src_audio + ${WEBRTC_REPO}/audio/*.cc +) +list(FILTER src_audio EXCLUDE REGEX ${TEST_CC_FILTER}) + +file(GLOB_RECURSE src_call + ${WEBRTC_REPO}/call/*.cc +) +list(FILTER src_call EXCLUDE REGEX ${TEST_CC_FILTER}) +list(APPEND src_call ${WEBRTC_REPO}/call/fake_network_pipe.cc) + +file(GLOB_RECURSE src_common_audio + ${WEBRTC_REPO}/common_audio/*.c + ${WEBRTC_REPO}/common_audio/*.cc +) +list(FILTER src_common_audio EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_common_audio EXCLUDE REGEX ${OTHER_PLATFORM_CC_FILTER}) + +file(GLOB_RECURSE src_common_video + ${WEBRTC_REPO}/common_video/*.cc +) +list(FILTER src_common_video EXCLUDE REGEX ${TEST_CC_FILTER}) + +file(GLOB_RECURSE src_logging + ${WEBRTC_REPO}/logging/rtc_event_log/encoder/*.cc + ${WEBRTC_REPO}/logging/rtc_event_log/events/*.cc + ${WEBRTC_REPO}/logging/rtc_event_log/output/*.cc + ${WEBRTC_REPO}/logging/rtc_event_log/ice_logger.cc + ${WEBRTC_REPO}/logging/rtc_event_log/rtc_event_log.cc + ${WEBRTC_REPO}/logging/rtc_event_log/rtc_event_log_factory.cc + ${WEBRTC_REPO}/logging/rtc_event_log/rtc_event_log_impl.cc + ${WEBRTC_REPO}/logging/rtc_event_log/rtc_stream_config.cc + ${WEBRTC_REPO}/${WEBRTC_BUILD_DIR}/gen/logging/*.cc +) +list(FILTER src_logging EXCLUDE REGEX ${TEST_CC_FILTER}) + +file(GLOB_RECURSE src_media + ${WEBRTC_REPO}/media/*.cc +) +list(FILTER src_media EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_media EXCLUDE REGEX ".*fakertp.cc") + +file(GLOB_RECURSE src_modules + ${WEBRTC_REPO}/modules/*.c + ${WEBRTC_REPO}/modules/*.cc + ${WEBRTC_REPO}/${WEBRTC_BUILD_DIR}/gen/modules/*.cc +) +list(FILTER src_modules EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_modules EXCLUDE REGEX ${OTHER_PLATFORM_CC_FILTER}) +list(FILTER src_modules EXCLUDE REGEX + ".*/desktop_capture/.|.*/linux/.*|.*include/audio_device_factory.cc|.*bwe_simulations.cc|.*/audio_coding/neteq/tools/.*|.*/remote_bitrate_estimator/tools/.*|.*rnn_vad_tool.cc|.*null_aec_dump_factory.cc|.*h264_color_space.cc|.*vp9_noop.cc|.*h264_decoder_impl.cc|.*h264_encoder_impl.cc|.*/android/aaudio_.*|.*ensure_initialized.cc|.*fixed_gain_controller.cc|.*click_annotate.cc" +) + +file(GLOB_RECURSE src_ortc + ${WEBRTC_REPO}/ortc/*.cc +) +list(FILTER src_ortc EXCLUDE REGEX ${TEST_CC_FILTER}) + +file(GLOB_RECURSE src_p2p + ${WEBRTC_REPO}/p2p/*.cc +) +list(FILTER src_p2p EXCLUDE REGEX ${TEST_CC_FILTER}) + +file(GLOB_RECURSE src_pc + ${WEBRTC_REPO}/pc/*.cc +) +list(FILTER src_pc EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_pc EXCLUDE REGEX + ".*peer_connection_wrapper.cc" +) + +file(GLOB_RECURSE src_rtc_base + ${WEBRTC_REPO}/rtc_base/*.cc +) +list(FILTER src_rtc_base EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_rtc_base EXCLUDE REGEX ${OTHER_PLATFORM_CC_FILTER}) +list(FILTER src_rtc_base EXCLUDE REGEX + ".*mac_utils.cc|.*mac_ifaddrs_converter.cc|.*test_echo_server.cc|.*task_queue_gcd.cc|.*task_queue_stdlib.cc|.*task_queue_for_test.cc|.*virtual_socket_server.cc|.*nat_socket_factory.cc|.*nat_server.cc|.*strings/json.cc" +) + +file(GLOB_RECURSE src_sdk + ${WEBRTC_REPO}/sdk/android/*.cc + ${WEBRTC_REPO}/sdk/media_constraints.cc +) +list(FILTER src_sdk EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_sdk EXCLUDE REGEX + ".*null_audio.cc|.*null_media.cc|.*null_video.cc|.*/audio_device/aaudio_.*" +) + +file(GLOB_RECURSE src_stats + ${WEBRTC_REPO}/stats/*.cc +) +list(FILTER src_stats EXCLUDE REGEX ${TEST_CC_FILTER}) + +file(GLOB_RECURSE src_system_wrappers + ${WEBRTC_REPO}/system_wrappers/*.cc +) +list(FILTER src_system_wrappers EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_system_wrappers EXCLUDE REGEX ${OTHER_PLATFORM_CC_FILTER}) + +file(GLOB_RECURSE src_video + ${WEBRTC_REPO}/video/*.cc +) +list(FILTER src_video EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_video EXCLUDE REGEX + ".*_loopback.cc|.*replay.cc|.*video_analyzer.cc|.*video_loopback_main.cc|.*video_stream_decoder_impl.cc" +) + +add_library(jingle_peerconnection_so SHARED + ${src_api} + ${src_audio} + ${src_call} + ${src_common_audio} + ${src_common_video} + ${src_logging} + ${src_media} + ${src_modules} + ${src_ortc} + ${src_p2p} + ${src_pc} + ${src_rtc_base} + ${src_sdk} + ${src_stats} + ${src_system_wrappers} + ${src_video} +) + +target_link_libraries(jingle_peerconnection_so + absl::optional + absl::variant + absl::strings + crypto + event + #json + opus + pffft + protobuf_lite + rnnoise + srtp + ssl + usrsctp + vpx + yuv + + android + log + GLESv2 + OpenSLES +) diff --git a/sdk/android_gradle/webrtc/build.gradle b/sdk/android_gradle/webrtc/build.gradle new file mode 100644 index 0000000000..713e0f172e --- /dev/null +++ b/sdk/android_gradle/webrtc/build.gradle @@ -0,0 +1,47 @@ +apply plugin: 'com.android.library' + +def compileNativeCode = "true" == rootProject.findProperty("compile_native_code") + +android { + compileSdkVersion rootProject.ext.androidCompileSdkVersion + buildToolsVersion rootProject.ext.androidBuildToolsVersion + + defaultConfig { + minSdkVersion rootProject.ext.minSdkVersion + targetSdkVersion rootProject.ext.targetSdkVersion + versionCode rootProject.ext.releaseVersionCode + versionName rootProject.ext.releaseVersionName + + sourceSets.main.java.srcDirs = [ + "$rootProject.rootDir/../android/api", + "$rootProject.rootDir/../android/src/java", + "$rootProject.rootDir/../../rtc_base/java/src", + "$rootProject.rootDir/../../modules/audio_device/android/java/src", + "$rootProject.rootDir/webrtc/src/main/java", + ] + + if (!compileNativeCode) { + sourceSets.main.jniLibs.srcDirs = ['prebuilt_libs'] + ndk.abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64' + } + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } +} + +if (compileNativeCode) { + apply from: 'webrtc.gradle' +} + +dependencies { + api 'com.google.code.findbugs:jsr305:3.0.2' + api "com.android.support:support-annotations:${rootProject.ext.androidSupportSdkVersion}" +} diff --git a/sdk/android_gradle/webrtc/prebuilt_libs/arm64-v8a/libjingle_peerconnection_so.so b/sdk/android_gradle/webrtc/prebuilt_libs/arm64-v8a/libjingle_peerconnection_so.so new file mode 100755 index 0000000000..af10abce48 Binary files /dev/null and b/sdk/android_gradle/webrtc/prebuilt_libs/arm64-v8a/libjingle_peerconnection_so.so differ diff --git a/sdk/android_gradle/webrtc/prebuilt_libs/armeabi-v7a/libjingle_peerconnection_so.so b/sdk/android_gradle/webrtc/prebuilt_libs/armeabi-v7a/libjingle_peerconnection_so.so new file mode 100755 index 0000000000..7df265811c Binary files /dev/null and b/sdk/android_gradle/webrtc/prebuilt_libs/armeabi-v7a/libjingle_peerconnection_so.so differ diff --git a/sdk/android_gradle/webrtc/prebuilt_libs/x86/libjingle_peerconnection_so.so b/sdk/android_gradle/webrtc/prebuilt_libs/x86/libjingle_peerconnection_so.so new file mode 100755 index 0000000000..37ea188c12 Binary files /dev/null and b/sdk/android_gradle/webrtc/prebuilt_libs/x86/libjingle_peerconnection_so.so differ diff --git a/sdk/android_gradle/webrtc/prebuilt_libs/x86_64/libjingle_peerconnection_so.so b/sdk/android_gradle/webrtc/prebuilt_libs/x86_64/libjingle_peerconnection_so.so new file mode 100755 index 0000000000..0e9c5086e1 Binary files /dev/null and b/sdk/android_gradle/webrtc/prebuilt_libs/x86_64/libjingle_peerconnection_so.so differ diff --git a/sdk/android_gradle/webrtc/src/main/AndroidManifest.xml b/sdk/android_gradle/webrtc/src/main/AndroidManifest.xml new file mode 100644 index 0000000000..ea1952bc74 --- /dev/null +++ b/sdk/android_gradle/webrtc/src/main/AndroidManifest.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + diff --git a/sdk/android_gradle/webrtc/src/main/java/com/piasy/avconf/AudioMixer.java b/sdk/android_gradle/webrtc/src/main/java/com/piasy/avconf/AudioMixer.java new file mode 100644 index 0000000000..8c264aaf75 --- /dev/null +++ b/sdk/android_gradle/webrtc/src/main/java/com/piasy/avconf/AudioMixer.java @@ -0,0 +1,139 @@ +package com.piasy.avconf; + +/** + * Created by Piasy{github.com/Piasy} on 2019-06-06. + */ +public class AudioMixer { + private static final int REC_SSRC = 9999999; + private static final int MUSIC_SSRC = 9999001; + + private long mNativeMixer; + private boolean mMusicEnabled; + private boolean mMusicStreaming; + private boolean mMicEcho; + private float mMusicVolume; + private float mMicVolume; + + public AudioMixer(String backingTrack, int captureSampleRate, int captureChannelNum, + int frameDurationUs, boolean enableMusicSyncFix, int waitingMixDelayFrame, + MixerCallback callback) { + mNativeMixer = nativeCreate(MUSIC_SSRC, backingTrack, REC_SSRC, captureSampleRate, + captureChannelNum, frameDurationUs, enableMusicSyncFix, waitingMixDelayFrame, + callback); + + mMusicEnabled = false; + mMusicStreaming = false; + mMicEcho = false; + mMusicVolume = 1.0F; + mMicVolume = 1.0F; + } + + private static native long nativeCreate(int musicSsrc, String backingTrack, int recSsrc, + int captureSampleRate, int captureChannelNum, int frameDurationUs, + boolean enableMusicSyncFix, int waitingMixDelayFrame, MixerCallback callback); + + private static native void nativeToggleEnable(long nativeMixer, int ssrc, boolean enable); + + private static native void nativeToggleStreaming(long nativeMixer, int ssrc, boolean streaming); + + private static native void nativeTogglePlayback(long nativeMixer, int ssrc, boolean playback); + + private static native void nativeUpdateVolume(long nativeMixer, int ssrc, float volume); + + private static native long nativeGetLengthMs(long nativeMixer, int ssrc); + + private static native long nativeGetProgressMs(long nativeMixer, int ssrc); + + private static native long nativeSeek(long nativeMixer, int ssrc, long progressMs); + + private static native void nativeDestroy(long nativeMixer); + + public void startMixer() { + mMusicEnabled = true; + applyMixerSettings(); + } + + public void pauseMixer() { + mMusicEnabled = false; + applyMixerSettings(); + } + + public void resumeMixer() { + startMixer(); + } + + public void toggleMusicStreaming(boolean streaming) { + mMusicStreaming = streaming; + applyMixerSettings(); + } + + public void toggleMicEcho(boolean micEcho) { + mMicEcho = micEcho; + applyMixerSettings(); + } + + public void setMicVolume(float volume) { + mMicVolume = volume; + applyMixerSettings(); + } + + public void setMusicVolume(float volume) { + mMusicVolume = volume; + applyMixerSettings(); + } + + public synchronized long getMusicLengthMs() { + if (mNativeMixer == 0) { + return -1; + } + + return nativeGetLengthMs(mNativeMixer, MUSIC_SSRC); + } + + public synchronized long getMusicProgressMs() { + if (mNativeMixer == 0) { + return -1; + } + + return nativeGetProgressMs(mNativeMixer, MUSIC_SSRC); + } + + public synchronized void seekMusic(long progressMs) { + if (mNativeMixer == 0) { + return; + } + + nativeSeek(mNativeMixer, MUSIC_SSRC, progressMs); + } + + public synchronized void stopMixer() { + if (mNativeMixer == 0) { + return; + } + + nativeDestroy(mNativeMixer); + mNativeMixer = 0; + } + + private synchronized void applyMixerSettings() { + if (mNativeMixer == 0) { + return; + } + + nativeToggleEnable(mNativeMixer, MUSIC_SSRC, mMusicEnabled); + nativeTogglePlayback(mNativeMixer, MUSIC_SSRC, true); + nativeToggleStreaming(mNativeMixer, MUSIC_SSRC, mMusicStreaming); + nativeUpdateVolume(mNativeMixer, MUSIC_SSRC, mMusicVolume); + + nativeToggleEnable(mNativeMixer, REC_SSRC, true); + nativeTogglePlayback(mNativeMixer, REC_SSRC, mMicEcho); + nativeToggleStreaming(mNativeMixer, REC_SSRC, true); + nativeUpdateVolume(mNativeMixer, REC_SSRC, mMicVolume); + } + + public interface MixerCallback { + void onMixerSsrcFinished(int ssrc); + + void onMixerSsrcError(int ssrc, int code); + } +} diff --git a/sdk/android_gradle/webrtc/src/main/java/com/piasy/avconf/HijackCapturerObserver.java b/sdk/android_gradle/webrtc/src/main/java/com/piasy/avconf/HijackCapturerObserver.java new file mode 100644 index 0000000000..2df4a58ee5 --- /dev/null +++ b/sdk/android_gradle/webrtc/src/main/java/com/piasy/avconf/HijackCapturerObserver.java @@ -0,0 +1,83 @@ +package com.piasy.avconf; + +import android.graphics.Matrix; +import android.os.Handler; +import android.os.Looper; +import org.webrtc.CapturerObserver; +import org.webrtc.Logging; +import org.webrtc.TextureBufferImpl; +import org.webrtc.TimestampAligner; +import org.webrtc.VideoFrame; +import org.webrtc.YuvConverter; + +/** + * Created by Piasy{github.com/Piasy} on 2019-12-21. + */ +public class HijackCapturerObserver implements CapturerObserver { + private static final String TAG = "HijackCapturerObserver"; + private static final long BLACK_FRAME_INTERVAL_MS = 100; + + private final Handler mainHandler = new Handler(Looper.getMainLooper()); + private final CapturerObserver realObserver; + private final VideoFrame.Buffer blackBuffer; + private volatile TimestampAligner timestampAligner = new TimestampAligner(); + private boolean muted = false; + private final Runnable blackFrameProducer = this::produceBlackFrame; + + public HijackCapturerObserver(CapturerObserver realObserver) { + this.realObserver = realObserver; + + // actually this buffer won't be used, VideoBroadcaster::OnFrame will replace it with + // a black frame buffer and pass modified frame to track. + blackBuffer = new TextureBufferImpl(480, 640, VideoFrame.TextureBuffer.Type.RGB, 0, + new Matrix(), mainHandler, new YuvConverter(), null); + } + + public void toggleMute(boolean muted) { + Logging.d(TAG, "toggleMute " + muted); + this.muted = muted; + if (muted) { + // frame interval of 25 fps + mainHandler.postDelayed(blackFrameProducer, 40); + } else { + mainHandler.removeCallbacks(blackFrameProducer); + } + } + + public void dispose() { + Logging.d(TAG, "dispose"); + muted = false; + timestampAligner.dispose(); + timestampAligner = null; + mainHandler.removeCallbacks(blackFrameProducer); + } + + @Override + public void onCapturerStarted(boolean success) { + realObserver.onCapturerStarted(success); + } + + @Override + public void onCapturerStopped() { + realObserver.onCapturerStopped(); + } + + @Override + public void onFrameCaptured(VideoFrame frame) { + if (!muted && timestampAligner != null) { + realObserver.onFrameCaptured(new VideoFrame(frame.getBuffer(), frame.getRotation(), + timestampAligner.translateTimestamp(frame.getTimestampNs()))); + } + } + + private void produceBlackFrame() { + if (!muted) { + return; + } + + realObserver.onFrameCaptured( + new VideoFrame(blackBuffer, 0, TimestampAligner.getRtcTimeNanos())); + + mainHandler.postDelayed(blackFrameProducer, BLACK_FRAME_INTERVAL_MS); + } +} diff --git a/sdk/android_gradle/webrtc/third_party/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/CMakeLists.txt new file mode 100644 index 0000000000..0c54a50a6a --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/CMakeLists.txt @@ -0,0 +1,16 @@ +cmake_minimum_required(VERSION 3.4.1) + +set(ENABLE_STATIC TRUE) + +add_subdirectory(abseil-cpp) +add_subdirectory(boringssl) +#add_subdirectory(jsoncpp) +add_subdirectory(libevent) +add_subdirectory(libsrtp) +add_subdirectory(libvpx) +add_subdirectory(libyuv) +add_subdirectory(opus) +add_subdirectory(pffft) +add_subdirectory(protobuf) +add_subdirectory(rnnoise) +add_subdirectory(usrsctp) diff --git a/sdk/android_gradle/webrtc/third_party/abseil-cpp/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/abseil-cpp/CMakeLists.txt new file mode 100644 index 0000000000..85141ab079 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/abseil-cpp/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 3.4.1) + +set(CMAKE_INSTALL_PREFIX /tmp) +add_subdirectory(${WEBRTC_REPO}/third_party/abseil-cpp abseil-cpp) diff --git a/sdk/android_gradle/webrtc/third_party/boringssl/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/boringssl/CMakeLists.txt new file mode 100644 index 0000000000..01e1cd1d19 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/boringssl/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 3.4.1) + +set(ANDROID true) +add_subdirectory(${WEBRTC_REPO}/third_party/boringssl/src boringssl) diff --git a/sdk/android_gradle/webrtc/third_party/jsoncpp/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/jsoncpp/CMakeLists.txt new file mode 100644 index 0000000000..a3f612f435 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/jsoncpp/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.4.1) + +include_directories( + ${WEBRTC_REPO}/third_party/jsoncpp/source/include +) + +file(GLOB_RECURSE src_json + ${WEBRTC_REPO}/third_party/jsoncpp/source/src/lib_json/*.cpp +) +list(FILTER src_json EXCLUDE REGEX ${TEST_CC_FILTER}) + +add_library(json STATIC + ${src_json} +) diff --git a/sdk/android_gradle/webrtc/third_party/libevent/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/libevent/CMakeLists.txt new file mode 100644 index 0000000000..15e14623e3 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/libevent/CMakeLists.txt @@ -0,0 +1,33 @@ +cmake_minimum_required(VERSION 3.4.1) + +add_definitions(-DHAVE_CONFIG_H=1) + +include_directories( + ${WEBRTC_REPO} + + ${WEBRTC_REPO}/base/third_party/libevent + ${WEBRTC_REPO}/base/third_party/libevent/android +) + +# infered from base/third_party/libevent/BUILD.gn, the base source set, plus Android source (epoll.c) +set(src_event + ${WEBRTC_REPO}/base/third_party/libevent/buffer.c + ${WEBRTC_REPO}/base/third_party/libevent/evbuffer.c + ${WEBRTC_REPO}/base/third_party/libevent/evdns.c + ${WEBRTC_REPO}/base/third_party/libevent/event.c + ${WEBRTC_REPO}/base/third_party/libevent/event_tagging.c + ${WEBRTC_REPO}/base/third_party/libevent/evrpc.c + ${WEBRTC_REPO}/base/third_party/libevent/evutil.c + ${WEBRTC_REPO}/base/third_party/libevent/http.c + ${WEBRTC_REPO}/base/third_party/libevent/log.c + ${WEBRTC_REPO}/base/third_party/libevent/poll.c + ${WEBRTC_REPO}/base/third_party/libevent/select.c + ${WEBRTC_REPO}/base/third_party/libevent/signal.c + ${WEBRTC_REPO}/base/third_party/libevent/strlcpy.c + + ${WEBRTC_REPO}/base/third_party/libevent/epoll.c +) + +add_library(event STATIC + ${src_event} +) diff --git a/sdk/android_gradle/webrtc/third_party/libjpeg_turbo/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/libjpeg_turbo/CMakeLists.txt new file mode 100644 index 0000000000..bd45e32b68 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/libjpeg_turbo/CMakeLists.txt @@ -0,0 +1,13 @@ +cmake_minimum_required(VERSION 3.4.1) + +file(GLOB_RECURSE src_jpeg_turbo + ${WEBRTC_REPO}/third_party/libjpeg_turbo/*.S + ${WEBRTC_REPO}/third_party/libjpeg_turbo/*.c + ) +list(FILTER src_jpeg_turbo EXCLUDE REGEX ${TEST_CC_FILTER}) +list(FILTER src_jpeg_turbo EXCLUDE REGEX + ".*ext.c|.*arith.c|.*565.c|.*bmp.c|.*djpeg.c|.*jstdhuff.c|.*altivec.c|.*bench.c|.*turbojpeg.c|.*turbojpeg-jni.c") + +add_library(jpeg_turbo STATIC + ${src_jpeg_turbo} + ) diff --git a/sdk/android_gradle/webrtc/third_party/libsrtp/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/libsrtp/CMakeLists.txt new file mode 100644 index 0000000000..b6a19269e3 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/libsrtp/CMakeLists.txt @@ -0,0 +1,40 @@ +cmake_minimum_required(VERSION 3.4.1) + +include_directories( + ${WEBRTC_REPO}/third_party/boringssl/src/include + ${WEBRTC_REPO}/third_party/libsrtp/config + ${WEBRTC_REPO}/third_party/libsrtp/crypto/include + ${WEBRTC_REPO}/third_party/libsrtp/include + ${WEBRTC_REPO}/third_party/protobuf/src +) + +add_definitions( + -DHAVE_CONFIG_H + -DOPENSSL + -DHAVE_STDLIB_H + -DHAVE_STRING_H + -DHAVE_STDINT_H + -DHAVE_INTTYPES_H + -DHAVE_INT16_T + -DHAVE_INT32_T + -DHAVE_INT8_T + -DHAVE_UINT16_T + -DHAVE_UINT32_T + -DHAVE_UINT64_T + -DHAVE_UINT8_T + -DHAVE_ARPA_INET_H + -DHAVE_NETINET_IN_H + -DHAVE_SYS_TYPES_H + -DHAVE_UNISTD_H + -DPACKAGE_STRING="libsrtp2 2.1.0-pre" + -DPACKAGE_VERSION="2.1.0-pre" +) + +file(GLOB_RECURSE src_srtp + ${WEBRTC_REPO}/third_party/libsrtp/*.c +) +list(FILTER src_srtp EXCLUDE REGEX ${TEST_CC_FILTER}) + +add_library(srtp STATIC + ${src_srtp} +) diff --git a/sdk/android_gradle/webrtc/third_party/libvpx/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/libvpx/CMakeLists.txt new file mode 100644 index 0000000000..1204a92db9 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/libvpx/CMakeLists.txt @@ -0,0 +1,228 @@ +cmake_minimum_required(VERSION 3.4.1) + +include_directories( + ${WEBRTC_REPO}/third_party/libvpx/source/config + ${WEBRTC_REPO}/third_party/libvpx/source/config/linux/arm64 + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx +) + +# extracted from `libvpx_srcs_arm64` of third_party/libvpx/libvpx_srcs.gni +# python extract_src_from_gn.py third_party/libvpx/libvpx_srcs.gni libvpx_srcs_arm64 ' ${WEBRTC_REPO}/' +set(src_vpx_arm64 + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/alloccommon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/loopfilter_arm.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/bilinearpredict_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/copymem_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/dc_only_idct_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/dequant_idct_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/dequantizeb_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/idct_blk_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/iwalsh_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/loopfiltersimplehorizontaledge_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/loopfiltersimpleverticaledge_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/mbloopfilter_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/shortidct4x4llm_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/sixtappredict_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/arm/neon/vp8_loopfilter_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/blockd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/dequantize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/entropy.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/entropymode.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/entropymv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/extend.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/filter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/findnearmv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/generic/systemdependent.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/idct_blk.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/idctllm.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/loopfilter_filters.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/mbpitch.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/mfqe.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/modecont.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/postproc.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/quant_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/reconinter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/reconintra.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/reconintra4x4.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/rtcd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/setupintrarecon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/swapyv12buffer.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/treecoder.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/vp8_loopfilter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/common/vp8_skin_detection.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/decoder/dboolhuff.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/decoder/decodeframe.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/decoder/decodemv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/decoder/detokenize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/decoder/onyxd_if.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/decoder/threading.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/arm/neon/denoising_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/arm/neon/fastquantizeb_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/arm/neon/shortfdct_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/arm/neon/vp8_shortwalsh4x4_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/bitstream.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/boolhuff.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/copy_c.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/dct.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/denoising.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/encodeframe.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/encodeintra.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/encodemb.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/encodemv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/ethreading.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/lookahead.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/mcomp.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/modecosts.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/mr_dissim.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/onyx_if.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/pickinter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/picklpf.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/ratectrl.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/rdopt.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/segmentation.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/tokenize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/treewriter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/encoder/vp8_quantize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/vp8_cx_iface.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp8/vp8_dx_iface.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/arm/neon/vp9_iht16x16_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/arm/neon/vp9_iht4x4_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/arm/neon/vp9_iht8x8_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_alloccommon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_blockd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_common_data.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_entropy.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_entropymode.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_entropymv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_filter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_frame_buffers.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_idct.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_loopfilter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_mfqe.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_mvref_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_postproc.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_pred_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_quant_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_reconinter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_reconintra.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_rtcd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_scale.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_scan.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_seg_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_thread_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/common/vp9_tile_common.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decodeframe.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decodemv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/decoder/vp9_decoder.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/decoder/vp9_detokenize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/decoder/vp9_dsubexp.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/decoder/vp9_job_queue.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/arm/neon/vp9_denoiser_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/arm/neon/vp9_error_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/arm/neon/vp9_frame_scale_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/arm/neon/vp9_quantize_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_aq_cyclicrefresh.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_bitstream.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_context_tree.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_cost.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_dct.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_denoiser.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodeframe.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodemb.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encodemv.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_encoder.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ethread.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_extend.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_frame_scale.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_lookahead.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_mcomp.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_multi_thread.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_noise_estimate.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_picklpf.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_pickmode.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_quantize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_ratectrl.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_rd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_rdopt.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_resize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_segmentation.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_skin_detection.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_speed_features.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_subexp.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_svc_layercontext.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_tokenize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/encoder/vp9_treewriter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/vp9_cx_iface.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vp9/vp9_dx_iface.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx/src/vpx_codec.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx/src/vpx_decoder.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx/src/vpx_encoder.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx/src/vpx_image.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/add_noise.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/avg_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/avg_pred_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/deblock_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct16x16_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct32x32_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct_partial_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/fwd_txfm_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/hadamard_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct16x16_1_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct16x16_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct32x32_135_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct32x32_1_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct32x32_34_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct32x32_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct4x4_1_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct4x4_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct8x8_1_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/idct8x8_add_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/intrapred_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/loopfilter_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/quantize_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/sad4d_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/sad_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/subpel_variance_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/subtract_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/sum_squares_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/variance_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/vpx_convolve8_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/vpx_convolve_avg_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/vpx_convolve_copy_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/vpx_convolve_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/arm/vpx_scaled_convolve8_neon.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/avg.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/bitreader.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/bitreader_buffer.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/bitwriter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/bitwriter_buffer.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/deblock.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/fwd_txfm.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/intrapred.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/inv_txfm.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/loopfilter.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/prob.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/psnr.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/quantize.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/sad.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/skin_detection.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/subtract.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/sum_squares.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/variance.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/vpx_convolve.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_dsp/vpx_dsp_rtcd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_mem/vpx_mem.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_ports/arm_cpudetect.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_scale/generic/gen_scalers.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_scale/generic/vpx_scale.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_scale/generic/yv12config.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_scale/generic/yv12extend.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_scale/vpx_scale_rtcd.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_util/vpx_thread.c + ${WEBRTC_REPO}/third_party/libvpx/source/libvpx/vpx_util/vpx_write_yuv_frame.c +) + +add_library(vpx STATIC + ${src_vpx_arm64} +) diff --git a/sdk/android_gradle/webrtc/third_party/libyuv/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/libyuv/CMakeLists.txt new file mode 100644 index 0000000000..8b45ea1660 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/libyuv/CMakeLists.txt @@ -0,0 +1,3 @@ +cmake_minimum_required(VERSION 3.4.1) + +add_subdirectory(${WEBRTC_REPO}/third_party/libyuv libyuv) diff --git a/sdk/android_gradle/webrtc/third_party/opus/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/opus/CMakeLists.txt new file mode 100644 index 0000000000..db05bb1ed4 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/opus/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.4.1) + +add_definitions(-DOPUS_BUILD -DOPUS_EXPORT= -DHAVE_LRINT -DHAVE_LRINTF -DVAR_ARRAYS -DOPUS_WILL_BE_SLOW -DFIXED_POINT) + +include_directories( + ${WEBRTC_REPO}/third_party/opus/src/celt + ${WEBRTC_REPO}/third_party/opus/src/include + ${WEBRTC_REPO}/third_party/opus/src/silk + ${WEBRTC_REPO}/third_party/opus/src/silk/fixed +) + +set(OPUS_SRC_FILTER ".*opus_custom_demo.c|.*mlp_train.c|.*opus_compare.c|.*opus_demo.c|.*repacketizer_demo.c") +file(GLOB src_opus_arm64 + ${WEBRTC_REPO}/third_party/opus/src/celt/*.c + ${WEBRTC_REPO}/third_party/opus/src/silk/*.c + ${WEBRTC_REPO}/third_party/opus/src/silk/fixed/*.c + ${WEBRTC_REPO}/third_party/opus/src/src/*.c +) +list(FILTER src_opus_arm64 EXCLUDE REGEX ${OPUS_SRC_FILTER}) + +add_library(opus STATIC + ${src_opus_arm64} +) diff --git a/sdk/android_gradle/webrtc/third_party/pffft/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/pffft/CMakeLists.txt new file mode 100644 index 0000000000..56d056e031 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/pffft/CMakeLists.txt @@ -0,0 +1,13 @@ +cmake_minimum_required(VERSION 3.4.1) + +include_directories( + ${WEBRTC_REPO}/third_party/pffft/src +) + +file(GLOB src_pffft + ${WEBRTC_REPO}/third_party/pffft/src/pffft.c +) + +add_library(pffft STATIC + ${src_pffft} +) diff --git a/sdk/android_gradle/webrtc/third_party/protobuf/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/protobuf/CMakeLists.txt new file mode 100644 index 0000000000..b2572d49ee --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/protobuf/CMakeLists.txt @@ -0,0 +1,43 @@ +cmake_minimum_required(VERSION 3.4.1) + +add_definitions(-DHAVE_PTHREAD) + +include_directories( + ${WEBRTC_REPO}/third_party/protobuf/src +) + +# extracted from `protobuf_lite_sources` of third_party/protobuf/BUILD.gn +# python extract_src_from_gn.py third_party/protobuf/BUILD.gn protobuf_lite_sources ' ${WEBRTC_REPO}/third_party/protobuf/' +set(src_protobuf_lite + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/any_lite.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/arena.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/arenastring.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/extension_set.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/generated_enum_util.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/generated_message_table_driven_lite.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/generated_message_util.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/implicit_weak_message.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/io/coded_stream.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/io/io_win32.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/io/strtod.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/io/zero_copy_stream.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/io/zero_copy_stream_impl.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/message_lite.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/repeated_field.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/bytestream.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/common.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/int128.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/status.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/statusor.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/stringpiece.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/stringprintf.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/structurally_valid.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/strutil.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/stubs/time.cc + ${WEBRTC_REPO}/third_party/protobuf/src/google/protobuf/wire_format_lite.cc +) + +add_library(protobuf_lite STATIC + ${src_protobuf_lite} +) diff --git a/sdk/android_gradle/webrtc/third_party/rnnoise/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/rnnoise/CMakeLists.txt new file mode 100644 index 0000000000..10fc623ee7 --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/rnnoise/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.4.1) + +include_directories( + ${WEBRTC_REPO} +) + +file(GLOB_RECURSE src_rnnoise + ${WEBRTC_REPO}/third_party/rnnoise/src/*.cc +) +list(FILTER src_rnnoise EXCLUDE REGEX ${TEST_CC_FILTER}) + +add_library(rnnoise STATIC + ${src_rnnoise} +) diff --git a/sdk/android_gradle/webrtc/third_party/usrsctp/CMakeLists.txt b/sdk/android_gradle/webrtc/third_party/usrsctp/CMakeLists.txt new file mode 100644 index 0000000000..87a2c4069c --- /dev/null +++ b/sdk/android_gradle/webrtc/third_party/usrsctp/CMakeLists.txt @@ -0,0 +1,17 @@ +cmake_minimum_required(VERSION 3.4.1) + +add_definitions(-DSCTP_PROCESS_LEVEL_LOCKS -DSCTP_SIMPLE_ALLOCATOR -DSCTP_USE_OPENSSL_SHA1 -D__Userspace__ -D__Userspace_os_Linux -D_GNU_SOURCE -DNON_WINDOWS_DEFINE) + +include_directories( + ${WEBRTC_REPO}/third_party/boringssl/src/include + ${WEBRTC_REPO}/third_party/usrsctp/usrsctplib/usrsctplib +) + +file(GLOB_RECURSE src_usrsctp + ${WEBRTC_REPO}/third_party/usrsctp/usrsctplib/usrsctplib/*.c +) +list(FILTER src_usrsctp EXCLUDE REGEX ${TEST_CC_FILTER}) + +add_library(usrsctp STATIC + ${src_usrsctp} +) diff --git a/sdk/android_gradle/webrtc/webrtc.gradle b/sdk/android_gradle/webrtc/webrtc.gradle new file mode 100644 index 0000000000..b6a1288237 --- /dev/null +++ b/sdk/android_gradle/webrtc/webrtc.gradle @@ -0,0 +1,320 @@ +def webrtc_repo = rootProject.property("webrtc_repo") +def webrtc_build_dir = rootProject.property("webrtc_build_dir") + +def android_jar = rootProject.property("android_jar") + +def py2 = rootProject.property("py2") +def protoc = rootProject.property("protoc") + +android { + ndkVersion '21.3.6528147' + + defaultConfig { + versionCode rootProject.ext.releaseVersionCode + versionName rootProject.ext.releaseVersionName + + externalNativeBuild { + cmake { + arguments '-DANDROID_TOOLCHAIN=clang', '-DANDROID_STL=c++_shared', + "-DWEBRTC_REPO=${rootProject.property("webrtc_repo")}", + "-DWEBRTC_BUILD_DIR=${rootProject.property("webrtc_build_dir")}" + cppFlags '-std=c++14' + abiFilters 'arm64-v8a' //, 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64' + //targets "protobuf_lite", "absl_base", "absl_optional", "absl_variant", "yuv", "ssl", "crypto", "event", "json", "srtp", "vpx", "opus", "rnnoise", "usrsctp" + } + } + } + + externalNativeBuild { + cmake { + version "3.18.1" + path 'CMakeLists.txt' + } + } +} + +task genWebrtcSrc(type: Exec) { + if (!new File(webrtc_repo).exists()) { + throw new IllegalArgumentException("webrtc_repo doesn't exist: ${webrtc_repo}") + } + if (!new File("${webrtc_repo}/${android_jar}").exists()) { + throw new IllegalArgumentException("android_jar doesn't exist: ${webrtc_repo}") + } + if (!new File(py2).exists()) { + throw new IllegalArgumentException("py2 doesn't exist: ${webrtc_repo}") + } + if (!new File(protoc).exists()) { + throw new IllegalArgumentException("protoc doesn't exist: ${webrtc_repo}") + } + + def buildDirFile = new File("${webrtc_repo}/${webrtc_build_dir}") + + workingDir webrtc_repo + + def createBuildDir = "mkdir -p ${webrtc_repo}/${webrtc_build_dir}" + + def genBuildFlags = + "mkdir -p ${webrtc_build_dir}/gen/base/debug/ ; " + + "rspfile=${webrtc_build_dir}/gen/" + + "__base_debugging_buildflags___build_toolchain_mac_clang_x64__rule.rsp && " + + "echo \"--flags ENABLE_LOCATION_SOURCE=true ENABLE_PROFILING=false " + + "CAN_UNWIND_WITH_FRAME_POINTERS=true UNSAFE_DEVELOPER_BUILD=true " + + "CAN_UNWIND_WITH_CFI_TABLE=false\" > \${rspfile} && " + + "${py2} build/write_buildflag_header.py --output base/debug/debugging_buildflags.h " + + "--rulename //base\$:debugging_buildflags --gen-dir ${webrtc_build_dir}/gen " + + "--definitions \${rspfile} && " + + "mkdir -p ${webrtc_build_dir}/gen/base/allocator/ ; " + + "rspfile=${webrtc_build_dir}/gen/" + + "__base_allocator_buildflags___build_toolchain_mac_clang_x64__rule.rsp && " + + "echo \"--flags USE_ALLOCATOR_SHIM=true USE_NEW_TCMALLOC=false\" > \${rspfile} && " + + "${py2} build/write_buildflag_header.py --output base/allocator/buildflags.h " + + "--rulename //base/allocator\$:buildflags " + + "--gen-dir ${webrtc_build_dir}/gen --definitions \${rspfile}" + + def genProtoSrc = + "mkdir -p ${webrtc_build_dir}/gen/logging/rtc_event_log ; " + + "${py2} tools/protoc_wrapper/protoc_wrapper.py rtc_event_log.proto " + + "--protoc ${protoc} --proto-in-dir logging/rtc_event_log " + + "--cc-out-dir ${webrtc_build_dir}/gen/logging/rtc_event_log && " + + "${py2} tools/protoc_wrapper/protoc_wrapper.py rtc_event_log2.proto " + + "--protoc ${protoc} --proto-in-dir logging/rtc_event_log " + + "--cc-out-dir ${webrtc_build_dir}/gen/logging/rtc_event_log && " + + "mkdir -p ${webrtc_build_dir}/gen/modules/audio_coding/audio_network_adaptor ; " + + "${py2} tools/protoc_wrapper/protoc_wrapper.py config.proto " + + "--protoc ${protoc} --proto-in-dir modules/audio_coding/audio_network_adaptor " + + "--cc-out-dir ${webrtc_build_dir}/gen/modules/audio_coding/audio_network_adaptor && " + + "${py2} tools/protoc_wrapper/protoc_wrapper.py debug_dump.proto " + + "--protoc ${protoc} --proto-in-dir modules/audio_coding/audio_network_adaptor " + + "--cc-out-dir ${webrtc_build_dir}/gen/modules/audio_coding/audio_network_adaptor && " + + "mkdir -p ${webrtc_build_dir}/gen/modules/audio_processing ; " + + "${py2} tools/protoc_wrapper/protoc_wrapper.py debug.proto " + + "--protoc ${protoc} --proto-in-dir modules/audio_processing " + + "--cc-out-dir ${webrtc_build_dir}/gen/modules/audio_processing" + + def genJni = + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_audio_device_module_base_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_audio_device_module_base_jni/WebRtcAudioManager_jni.h " + + "--input_file sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java" + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_base_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_base_jni/NetworkMonitor_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_base_jni/NetworkMonitorAutoDetect_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_base_jni/RefCounted_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_base_jni/Histogram_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_base_jni/JniCommon_jni.h " + + "--input_file sdk/android/api/org/webrtc/NetworkMonitor.java " + + "--input_file sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java " + + "--input_file sdk/android/api/org/webrtc/RefCounted.java " + + "--input_file sdk/android/src/java/org/webrtc/Histogram.java " + + "--input_file sdk/android/src/java/org/webrtc/JniCommon.java" + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_builtin_audio_codecs_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioDecoderFactoryFactory_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioEncoderFactoryFactory_jni.h " + + "--input_file sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java " + + "--input_file sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --jar_file ${android_jar} " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Integer_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Double_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Long_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Iterable_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Iterator_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Boolean_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/BigInteger_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Map_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/LinkedHashMap_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/ArrayList_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_external_classes_jni/Enum_jni.h " + + "--input_file java/lang/Integer.class " + + "--input_file java/lang/Double.class " + + "--input_file java/lang/Long.class " + + "--input_file java/lang/Iterable.class " + + "--input_file java/util/Iterator.class " + + "--input_file java/lang/Boolean.class " + + "--input_file java/math/BigInteger.class " + + "--input_file java/util/Map.class " + + "--input_file java/util/LinkedHashMap.class " + + "--input_file java/util/ArrayList.class " + + "--input_file java/lang/Enum.class " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_java_audio_device_module_native_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h " + + "--input_file sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java " + + "--input_file sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_java_audio_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_java_audio_jni/JavaAudioDeviceModule_jni.h " + + "--input_file sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_libvpx_vp8_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Decoder_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Encoder_jni.h " + + "--input_file sdk/android/api/org/webrtc/LibvpxVp8Decoder.java " + + "--input_file sdk/android/api/org/webrtc/LibvpxVp8Encoder.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_libvpx_vp9_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h " + + "--input_file sdk/android/api/org/webrtc/LibvpxVp9Decoder.java " + + "--input_file sdk/android/api/org/webrtc/LibvpxVp9Encoder.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_logging_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_logging_jni/JNILogging_jni.h " + + "--input_file sdk/android/src/java/org/webrtc/JNILogging.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_metrics_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_metrics_jni/Metrics_jni.h " + + "--input_file sdk/android/api/org/webrtc/Metrics.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_native_api_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_native_api_jni/JniHelper_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_native_api_jni/WebRtcClassLoader_jni.h " + + "--input_file sdk/android/src/java/org/webrtc/JniHelper.java " + + "--input_file sdk/android/src/java/org/webrtc/WebRtcClassLoader.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/AudioTrack_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/CallSessionFileRotatingLogSink_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/CryptoOptions_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/DataChannel_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/DtmfSender_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/IceCandidate_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/MediaConstraints_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/MediaSource_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/MediaStream_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/MediaStreamTrack_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/PeerConnection_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RTCStats_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RTCStatsCollectorCallback_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RTCStatsReport_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RtcCertificatePem_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RtpParameters_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RtpReceiver_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RtpSender_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/SSLCertificateVerifier_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/SdpObserver_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/SessionDescription_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/StatsObserver_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/StatsReport_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_peerconnection_jni/TurnCustomizer_jni.h " + + "--input_file sdk/android/api/org/webrtc/AudioTrack.java " + + "--input_file sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java " + + "--input_file sdk/android/api/org/webrtc/CandidatePairChangeEvent.java " + + "--input_file sdk/android/api/org/webrtc/CryptoOptions.java " + + "--input_file sdk/android/api/org/webrtc/DataChannel.java " + + "--input_file sdk/android/api/org/webrtc/DtmfSender.java " + + "--input_file sdk/android/api/org/webrtc/IceCandidate.java " + + "--input_file sdk/android/api/org/webrtc/MediaConstraints.java " + + "--input_file sdk/android/api/org/webrtc/MediaSource.java " + + "--input_file sdk/android/api/org/webrtc/MediaStream.java " + + "--input_file sdk/android/api/org/webrtc/MediaStreamTrack.java " + + "--input_file sdk/android/api/org/webrtc/PeerConnection.java " + + "--input_file sdk/android/api/org/webrtc/PeerConnectionFactory.java " + + "--input_file sdk/android/api/org/webrtc/RTCStats.java " + + "--input_file sdk/android/api/org/webrtc/RTCStatsCollectorCallback.java " + + "--input_file sdk/android/api/org/webrtc/RTCStatsReport.java " + + "--input_file sdk/android/api/org/webrtc/RtcCertificatePem.java " + + "--input_file sdk/android/api/org/webrtc/RtpParameters.java " + + "--input_file sdk/android/api/org/webrtc/RtpReceiver.java " + + "--input_file sdk/android/api/org/webrtc/RtpSender.java " + + "--input_file sdk/android/api/org/webrtc/RtpTransceiver.java " + + "--input_file sdk/android/api/org/webrtc/SSLCertificateVerifier.java " + + "--input_file sdk/android/api/org/webrtc/SdpObserver.java " + + "--input_file sdk/android/api/org/webrtc/SessionDescription.java " + + "--input_file sdk/android/api/org/webrtc/StatsObserver.java " + + "--input_file sdk/android/api/org/webrtc/StatsReport.java " + + "--input_file sdk/android/api/org/webrtc/TurnCustomizer.java " + + " && " + + + "mkdir -p ${webrtc_build_dir}/gen/sdk/android/generated_video_jni ; " + + "${py2} base/android/jni_generator/jni_generator.py --ptr_type=long --includes ../../../../../../sdk/android/src/jni/jni_generator_helper.h --use_proxy_hash -n \"webrtc::jni\" " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/EncodedImage_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/JavaI420Buffer_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/MediaCodecVideoDecoder_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/MediaCodecVideoEncoder_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/TimestampAligner_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoCodecInfo_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoCodecStatus_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoDecoder_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoDecoderFactory_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoDecoderFallback_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoEncoder_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoEncoderFactory_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoEncoderFallback_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoFrame_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoSink_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoTrack_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/YuvHelper_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/H264Utils_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/NV12Buffer_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/NV21Buffer_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/NativeAndroidVideoTrackSource_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/NativeCapturerObserver_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoDecoderWrapper_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/VideoEncoderWrapper_jni.h " + + "--output_file ${webrtc_build_dir}/gen/sdk/android/generated_video_jni/WrappedNativeI420Buffer_jni.h " + + "--input_file sdk/android/api/org/webrtc/EncodedImage.java " + + "--input_file sdk/android/api/org/webrtc/JavaI420Buffer.java " + + "--input_file sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java " + + "--input_file sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java " + + "--input_file sdk/android/api/org/webrtc/TimestampAligner.java " + + "--input_file sdk/android/api/org/webrtc/VideoCodecInfo.java " + + "--input_file sdk/android/api/org/webrtc/VideoCodecStatus.java " + + "--input_file sdk/android/api/org/webrtc/VideoDecoder.java " + + "--input_file sdk/android/api/org/webrtc/VideoDecoderFactory.java " + + "--input_file sdk/android/api/org/webrtc/VideoDecoderFallback.java " + + "--input_file sdk/android/api/org/webrtc/VideoEncoder.java " + + "--input_file sdk/android/api/org/webrtc/VideoEncoderFactory.java " + + "--input_file sdk/android/api/org/webrtc/VideoEncoderFallback.java " + + "--input_file sdk/android/api/org/webrtc/VideoFrame.java " + + "--input_file sdk/android/api/org/webrtc/VideoSink.java " + + "--input_file sdk/android/api/org/webrtc/VideoTrack.java " + + "--input_file sdk/android/api/org/webrtc/YuvHelper.java " + + "--input_file sdk/android/src/java/org/webrtc/H264Utils.java " + + "--input_file sdk/android/src/java/org/webrtc/NV12Buffer.java " + + "--input_file sdk/android/src/java/org/webrtc/NV21Buffer.java " + + "--input_file sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java " + + "--input_file sdk/android/src/java/org/webrtc/NativeCapturerObserver.java " + + "--input_file sdk/android/src/java/org/webrtc/VideoDecoderWrapper.java " + + "--input_file sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java " + + "--input_file sdk/android/src/java/org/webrtc/WrappedNativeI420Buffer.java " + + def cmd = ['echo', 'already generated'] + if (!buildDirFile.exists()) { + cmd = ['sh', '-c', "${createBuildDir} && ${genBuildFlags} && ${genProtoSrc} && ${genJni}"] + } + commandLine cmd +} + +afterEvaluate { + externalNativeBuildDebug.dependsOn(genWebrtcSrc) + externalNativeBuildRelease.dependsOn(genWebrtcSrc) +} diff --git a/sdk/android_gradle/webrtc_src_extractor.py b/sdk/android_gradle/webrtc_src_extractor.py new file mode 100644 index 0000000000..b6981702f8 --- /dev/null +++ b/sdk/android_gradle/webrtc_src_extractor.py @@ -0,0 +1,138 @@ +import sys +import glob +import subprocess + +src_black_list_keywords = [ + 'googletest', 'gtest', 'mock', '_unittest', '_test', '_integrationtest', '_benchmark' +] +src_white_list = [ + 'rtc_base/gtest_prod_util.h' +] +special_related_src = { + 'common_audio/include/audio_util.h': [ + 'common_audio/audio_util.cc', + ], + 'system_wrappers/include/metrics.h': [ + # 'system_wrappers/include/metrics_default.h', + # header won't be included in `find_sources`, + # but will be included in `extract_includes` + 'system_wrappers/source/metrics_default.cc', + ], + 'third_party/abseil-cpp/absl/strings/string_view.cc': [ + 'third_party/abseil-cpp/absl/strings/internal/memutil.cc', + ], +} + +def file_not_blacklisted(file): + for keyword in src_black_list_keywords: + if keyword in file: + discard = True + for white_list_file in src_white_list: + if white_list_file in file: + discard = False + if discard: + return False + return True + +def find_full_path(src_set, file): + for src in src_set: + if src.endswith(file): + return src + return None + +def find_sources(all_src, headers): + sources = [] + for header in headers: + name = '.'.join(header.split('.')[:-1]) + for src in all_src: + if src.startswith(name) and file_not_blacklisted(src): + sources.append(src) + break + for special_header, related_srcs in special_related_src.items(): + if header.endswith(special_header): + for related_src in related_srcs: + full_path = find_full_path(all_src, related_src) + if full_path is not None: + sources.append(full_path) + return sources + +def get_full_path(headers, sources, file): + header = find_full_path(headers, file) + if header is not None: + return header + source = find_full_path(sources, file) + if source is not None: + return source + return None + +def extract_includes(headers, file): + with open(file, 'r') as f: + includes = [line for line in f.read().split('\n') if line.startswith('#include') and '\"' in line] + includes = [line.split('\"')[1] for line in includes] + + results = [] + for include in includes: + full_path = get_full_path(headers, [], include) + if full_path is not None and file_not_blacklisted(full_path): + results.append(full_path) + return results + +def find_all_related_files(headers, sources, wanted): + # 对 candidates 中的每个 file,读出直接相关的头文件和源文件 related,把 file 加入到 searched 中, + # 同时对于 related 里不在 searched 中的项目,加入到 new_candidates 中, + # candidates 遍历结束后,把 new_candidates 赋值给 candidates,继续遍历, + # 直到 candidates 为空。 + # candidates 初始化为 wanted。 + # searched 就是所求。 + + searched = set() + candidates = [] + candidates.extend(wanted) + while len(candidates) != 0: + new_candidates = [] + for file in candidates: + full_path = get_full_path(headers, sources, file) + if full_path is None: + continue + + searched.add(full_path) + related_headers = extract_includes(headers, full_path) + related_sources = find_sources(sources, related_headers) + + new_candidates.extend([f for f in related_headers if f not in searched]) + new_candidates.extend([f for f in related_sources if f not in searched]) + + candidates = new_candidates + + print('searched', len(searched)) + print('candidates', len(candidates)) + return sorted(searched) + +def get_parents(repo, file): + parts = file[len(repo):].split('/') + return parts[:-1] + +def copy_to(repo, file, dst): + parents = get_parents(repo, file) + dir = '%s/%s' % (dst, '/'.join(parents)) + subprocess.run('mkdir -p %s' % dir, shell=True, check=True) + subprocess.run('cp %s %s' % (file, dir), shell=True, check=True) + +if __name__ == '__main__': + repo = sys.argv[1] + dst = sys.argv[2] + wanted = sys.argv[3:] + + all_files = glob.glob('%s/**/*' % repo, recursive=True) + headers = [f for f in all_files if f.endswith('.h') or f.endswith('.hpp')] + sources = [f for f in all_files if f.endswith('.c') or f.endswith('.cc') or f.endswith('.cpp')] + all_files = None + + print('wanted', wanted) + print('header', len(headers)) + print('source', len(sources)) + + needed = find_all_related_files(headers, sources, wanted) + + for file in needed: + copy_to(repo, file, dst) diff --git a/sdk/build_android_libs.sh b/sdk/build_android_libs.sh new file mode 100755 index 0000000000..0f24526f55 --- /dev/null +++ b/sdk/build_android_libs.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +set -e + +export PATH=/root/src/media/webrtc_repo/webrtc_android/src/third_party/llvm-build/Release+Asserts/bin:$PATH + +pushd third_party/ffmpeg + +git reset --hard +git apply ../../sdk/ffmpeg-others-build.diff + +python chromium/scripts/build_ffmpeg.py android arm-neon --branding Chrome -- \ + --disable-asm \ + --disable-encoders --disable-hwaccels --disable-bsfs --disable-devices --disable-filters \ + --disable-protocols --enable-protocol=file \ + --disable-parsers --enable-parser=mpegaudio --enable-parser=h264 --enable-parser=hevc \ + --disable-demuxers --enable-demuxer=mov --enable-demuxer=mp3 --enable-demuxer=mpegts \ + --disable-decoders --enable-decoder=mp3 --enable-decoder=aac \ + --disable-muxers --enable-muxer=matroska \ + --enable-swresample + +python chromium/scripts/build_ffmpeg.py android ia32 --branding Chrome -- \ + --disable-asm \ + --disable-encoders --disable-hwaccels --disable-bsfs --disable-devices --disable-filters \ + --disable-protocols --enable-protocol=file \ + --disable-parsers --enable-parser=mpegaudio --enable-parser=h264 --enable-parser=hevc \ + --disable-demuxers --enable-demuxer=mov --enable-demuxer=mp3 --enable-demuxer=mpegts \ + --disable-decoders --enable-decoder=mp3 --enable-decoder=aac \ + --disable-muxers --enable-muxer=matroska \ + --enable-swresample + +python chromium/scripts/build_ffmpeg.py android arm64 --branding Chrome -- \ + --disable-asm \ + --disable-encoders --disable-hwaccels --disable-bsfs --disable-devices --disable-filters \ + --disable-protocols --enable-protocol=file \ + --disable-parsers --enable-parser=mpegaudio --enable-parser=h264 --enable-parser=hevc \ + --disable-demuxers --enable-demuxer=mov --enable-demuxer=mp3 --enable-demuxer=mpegts \ + --disable-decoders --enable-decoder=mp3 --enable-decoder=aac \ + --disable-muxers --enable-muxer=matroska \ + --enable-swresample + +python chromium/scripts/build_ffmpeg.py android x64 --branding Chrome -- \ + --disable-asm \ + --disable-encoders --disable-hwaccels --disable-bsfs --disable-devices --disable-filters \ + --disable-protocols --enable-protocol=file \ + --disable-parsers --enable-parser=mpegaudio --enable-parser=h264 --enable-parser=hevc \ + --disable-demuxers --enable-demuxer=mov --enable-demuxer=mp3 --enable-demuxer=mpegts \ + --disable-decoders --enable-decoder=mp3 --enable-decoder=aac \ + --disable-muxers --enable-muxer=matroska \ + --enable-swresample + +./chromium/scripts/copy_config.sh +./chromium/scripts/generate_gn.py + +popd + +gn gen out/android_release_arm --args='target_os="android" target_cpu="arm" is_debug=false ffmpeg_branding="Chrome"' +ninja -C out/android_release_arm libjingle_peerconnection_so +cp out/android_release_arm/libjingle_peerconnection_so.so ../../webrtc_ios/src/sdk/android_gradle/webrtc/prebuilt_libs/armeabi-v7a/ + +gn gen out/android_release_x86 --args='target_os="android" target_cpu="x86" is_debug=false ffmpeg_branding="Chrome"' +ninja -C out/android_release_x86 libjingle_peerconnection_so +cp out/android_release_x86/libjingle_peerconnection_so.so ../../webrtc_ios/src/sdk/android_gradle/webrtc/prebuilt_libs/x86/ + +gn gen out/android_release_arm64 --args='target_os="android" target_cpu="arm64" is_debug=false ffmpeg_branding="Chrome"' +ninja -C out/android_release_arm64 libjingle_peerconnection_so +cp out/android_release_arm64/libjingle_peerconnection_so.so ../../webrtc_ios/src/sdk/android_gradle/webrtc/prebuilt_libs/arm64-v8a/ + +gn gen out/android_release_x64 --args='target_os="android" target_cpu="x64" is_debug=false ffmpeg_branding="Chrome"' +ninja -C out/android_release_x64 libjingle_peerconnection_so +cp out/android_release_x64/libjingle_peerconnection_so.so ../../webrtc_ios/src/sdk/android_gradle/webrtc/prebuilt_libs/x86_64/ diff --git a/sdk/build_ios_framework.sh b/sdk/build_ios_framework.sh new file mode 100755 index 0000000000..dfdaca7645 --- /dev/null +++ b/sdk/build_ios_framework.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +set -e + +pushd third_party/ffmpeg + +git reset --hard +git apply ../../sdk/ffmpeg-ios-build.diff + +python chromium/scripts/build_ffmpeg.py ios arm64 --branding Chrome -- \ + --disable-asm \ + --disable-encoders --disable-hwaccels --disable-bsfs --disable-devices --disable-filters \ + --disable-protocols --enable-protocol=file \ + --disable-parsers --enable-parser=mpegaudio --enable-parser=h264 --enable-parser=hevc \ + --disable-demuxers --enable-demuxer=mov --enable-demuxer=mp3 --enable-demuxer=mpegts \ + --disable-decoders --enable-decoder=mp3 --enable-decoder=aac \ + --disable-muxers --enable-muxer=matroska \ + --enable-swresample + +python chromium/scripts/build_ffmpeg.py ios x64 --branding Chrome -- \ + --disable-asm \ + --disable-encoders --disable-hwaccels --disable-bsfs --disable-devices --disable-filters \ + --disable-protocols --enable-protocol=file \ + --disable-parsers --enable-parser=mpegaudio --enable-parser=h264 --enable-parser=hevc \ + --disable-demuxers --enable-demuxer=mov --enable-demuxer=mp3 --enable-demuxer=mpegts \ + --disable-decoders --enable-decoder=mp3 --enable-decoder=aac \ + --disable-muxers --enable-muxer=matroska \ + --enable-swresample + +./chromium/scripts/copy_config.sh +./chromium/scripts/generate_gn.py + +popd + +gn gen out/ios_release_arm64 --args='target_os = "ios" target_cpu = "arm64" ios_enable_code_signing = false use_xcode_clang = true is_component_build = false is_debug = false ios_deployment_target = "10.0" rtc_libvpx_build_vp9 = false enable_ios_bitcode = true use_goma = false enable_dsyms = true enable_stripping = true ffmpeg_branding="Chrome"' +ninja -C out/ios_release_arm64 framework_objc + +gn gen out/ios_release_x64 --args='target_os = "ios" target_cpu = "x64" ios_enable_code_signing = false use_xcode_clang = true is_component_build = false is_debug = false ios_deployment_target = "10.0" rtc_libvpx_build_vp9 = false enable_ios_bitcode = true use_goma = false enable_dsyms = true enable_stripping = true ffmpeg_branding="Chrome"' +ninja -C out/ios_release_x64 framework_objc diff --git a/sdk/ffmpeg-ios-build.diff b/sdk/ffmpeg-ios-build.diff new file mode 100644 index 0000000000..4bbb28664b --- /dev/null +++ b/sdk/ffmpeg-ios-build.diff @@ -0,0 +1,249 @@ +From e70771600ba51662f96174a0966a7e1ce3a76aac Mon Sep 17 00:00:00 2001 +From: Piasy +Date: Sun, 20 Jun 2021 22:39:58 +0800 +Subject: [PATCH] Support build on iOS + +--- + BUILD.gn | 19 ++++++++++ + chromium/scripts/build_ffmpeg.py | 63 ++++++++++++++++++++++++++------ + chromium/scripts/copy_config.sh | 2 +- + chromium/scripts/generate_gn.py | 4 +- + configure | 4 +- + 5 files changed, 76 insertions(+), 16 deletions(-) + +diff --git a/BUILD.gn b/BUILD.gn +index f7f34ea414..2e9bb892bd 100755 +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -210,6 +210,12 @@ target(link_target_type, "ffmpeg_internal") { + "PIC", + "FFMPEG_CONFIGURATION=NULL", + ] ++ if (is_ios) { ++ defines -= [ ++ "_POSIX_C_SOURCE=200112", ++ "_XOPEN_SOURCE=600", ++ ] ++ } + + if (is_component_ffmpeg) { + configs += [ "//build/config/sanitizers:cfi_icall_disable" ] +@@ -286,6 +292,14 @@ target(link_target_type, "ffmpeg_internal") { + # ffmpeg uses its own deprecated functions. + "-Wno-deprecated-declarations", + ] ++ if (is_ios) { ++ cflags += [ ++ "-Wno-error=misleading-indentation", ++ "-Wno-pointer-bool-conversion", ++ "-Wno-error=implicit-const-int-float-conversion", ++ "-Wno-error=macro-redefined", ++ ] ++ } + + if (!(is_android && use_call_graph)) { + # Remove default stack frames config so we can force -fomit-frame-pointer. +@@ -348,6 +362,11 @@ target(link_target_type, "ffmpeg_internal") { + "z", + "rt", + ] ++ if (is_ios) { ++ libs -= [ ++ "rt", ++ ] ++ } + } + if (is_component_ffmpeg) { + # Export all symbols when building as component. +diff --git a/chromium/scripts/build_ffmpeg.py b/chromium/scripts/build_ffmpeg.py +index 75333a2c9f..42a80e6840 100755 +--- a/chromium/scripts/build_ffmpeg.py ++++ b/chromium/scripts/build_ffmpeg.py +@@ -40,6 +40,7 @@ ARCH_MAP = { + 'arm64' + ], + 'mac': ['x64', 'arm64'], ++ 'ios': ['x64', 'arm64'], + 'win': ['ia32', 'x64', 'arm64'], + } + +@@ -148,7 +149,7 @@ def DetermineHostOsAndArch(): + def GetDsoName(target_os, dso_name, dso_version): + if target_os in ('linux', 'linux-noasm', 'android'): + return 'lib%s.so.%s' % (dso_name, dso_version) +- elif target_os == 'mac': ++ elif target_os in ('mac', 'ios'): + return 'lib%s.%s.dylib' % (dso_name, dso_version) + elif target_os == 'win': + return '%s-%s.dll' % (dso_name, dso_version) +@@ -472,11 +473,12 @@ def BuildFFmpeg(target_os, target_arch, host_os, host_arch, parallel_jobs, + r'LD=' + os.path.join(SCRIPTS_DIR, 'fake_linker.py'))]) + + if target_os in (host_os, host_os + '-noasm', 'android', +- 'win', 'mac') and not config_only: ++ 'win', 'mac', 'ios') and not config_only: + libraries = [ + os.path.join('libavcodec', GetDsoName(target_os, 'avcodec', 58)), + os.path.join('libavformat', GetDsoName(target_os, 'avformat', 58)), + os.path.join('libavutil', GetDsoName(target_os, 'avutil', 56)), ++ os.path.join('libswresample', GetDsoName(target_os, 'swresample', 3)), + ] + PrintAndCheckCall( + ['make', '-j%d' % parallel_jobs] + libraries, cwd=config_dir) +@@ -537,7 +539,7 @@ def main(argv): + configure_args = args[2:] + + +- if target_os not in ('android', 'linux', 'linux-noasm', 'mac', 'win', 'all'): ++ if target_os not in ('android', 'linux', 'linux-noasm', 'mac', 'ios', 'win', 'all'): + parser.print_help() + return 1 + +@@ -550,7 +552,7 @@ def main(argv): + parallel_jobs = 8 + + if target_os.split('-', 1)[0] != host_os and (host_os != 'linux' or +- host_arch != 'x64'): ++ host_arch != 'x64') and target_os != 'ios': + print('Cross compilation can only be done from a linux x64 host.') + return 1 + +@@ -834,18 +836,23 @@ def ConfigureAndBuild(target_arch, target_os, host_os, host_arch, parallel_jobs, + ]) + + if 'win' not in target_os: +- configure_flags['Common'].extend([ +- '--enable-pic', +- '--cc=clang', +- '--cxx=clang++', +- '--ld=clang', +- ]) ++ if target_os == 'ios': ++ configure_flags['Common'].extend([ ++ '--enable-pic', ++ ]) ++ else: ++ configure_flags['Common'].extend([ ++ '--enable-pic', ++ '--cc=clang', ++ '--cxx=clang++', ++ '--ld=clang', ++ ]) + + # Clang Linux will use the first 'ld' it finds on the path, which will + # typically be the system one, so explicitly configure use of Clang's + # ld.lld, to ensure that things like cross-compilation and LTO work. + # This does not work for ia32 and is always used on mac. +- if target_arch != 'ia32' and target_os != 'mac': ++ if target_arch != 'ia32' and target_os != 'mac' and target_os != 'ios': + configure_flags['Common'].append('--extra-ldflags=-fuse-ld=lld') + + # Should be run on Mac, unless we're cross-compiling on Linux. +@@ -886,6 +893,40 @@ def ConfigureAndBuild(target_arch, target_os, host_os, host_arch, parallel_jobs, + target_os), + file=sys.stderr) + ++ if target_os == 'ios': ++ if host_os != 'mac': ++ print( ++ 'Script should be run on a Mac host.\n', ++ file=sys.stderr) ++ return 1 ++ ++ configure_flags['Common'].extend([ ++ '--enable-cross-compile', ++ '--target-os=darwin', ++ ]) ++ ++ if target_arch == 'x64': ++ configure_flags['Common'].extend([ ++ '--arch=x86_64', ++ '--cc=xcrun -sdk iphonesimulator clang', ++ '--as=gas-preprocessor.pl -- xcrun -sdk iphonesimulator clang', ++ '--extra-cflags=-arch x86_64 -mios-simulator-version-min=10.0', ++ '--extra-ldflags=-arch x86_64 -mios-simulator-version-min=10.0', ++ ]) ++ elif target_arch == 'arm64': ++ configure_flags['Common'].extend([ ++ '--arch=arm64', ++ '--cc=xcrun -sdk iphoneos clang', ++ '--as=gas-preprocessor.pl -arch aarch64 -- xcrun -sdk iphoneos clang', ++ '--extra-cflags=-arch arm64 -mios-version-min=10.0 -fembed-bitcode', ++ '--extra-ldflags=-arch arm64 -mios-version-min=10.0 -fembed-bitcode', ++ ]) ++ else: ++ print( ++ 'Error: Unknown target arch %r for target OS %r!' % (target_arch, ++ target_os), ++ file=sys.stderr) ++ + # Should be run on Windows. + if target_os == 'win': + configure_flags['Common'].extend([ +diff --git a/chromium/scripts/copy_config.sh b/chromium/scripts/copy_config.sh +index 0e5159d6f4..a7609b0cdf 100755 +--- a/chromium/scripts/copy_config.sh ++++ b/chromium/scripts/copy_config.sh +@@ -5,7 +5,7 @@ + # found in the LICENSE file. + + # Use this to copy all config files into the tree. +-for os in android linux linux-noasm mac win; do ++for os in android linux linux-noasm mac ios win; do + for target in Chromium Chrome ChromeOS; do + # Copy config files for various architectures: + # - ia32/x64 have config.asm, config.h +diff --git a/chromium/scripts/generate_gn.py b/chromium/scripts/generate_gn.py +index 0f606dfaef..0750c2e742 100755 +--- a/chromium/scripts/generate_gn.py ++++ b/chromium/scripts/generate_gn.py +@@ -81,7 +81,7 @@ SUPPORT_MATRIX = { + Attr.TARGET: + set(['Chromium', 'Chrome', 'ChromeOS']), + Attr.PLATFORM: +- set(['android', 'linux', 'win', 'mac']) ++ set(['android', 'linux', 'win', 'mac', 'ios']) + } + + +@@ -121,7 +121,6 @@ def CleanObjectFiles(object_files): + 'libavcodec/x86/dnxhd_mmx.o', + 'libavformat/sdp.o', + 'libavutil/adler32.o', +- 'libavutil/audio_fifo.o', + 'libavutil/blowfish.o', + 'libavutil/cast5.o', + 'libavutil/des.o', +@@ -660,6 +659,7 @@ IGNORED_INCLUDE_FILES = [ + LICENSE_WHITELIST = [ + 'BSD (3 clause) LGPL (v2.1 or later)', + 'BSL (v1) LGPL (v2.1 or later)', ++ 'BSL LGPL (v2.1 or later) GENERATED FILE', + 'ISC GENERATED FILE', + 'LGPL (v2.1 or later)', + 'LGPL (v2.1 or later) GENERATED FILE', +diff --git a/configure b/configure +index 985a5a8ffc..3dbf73d009 100755 +--- a/configure ++++ b/configure +@@ -2635,7 +2635,7 @@ faandct_select="fdctdsp" + faanidct_deps="faan" + faanidct_select="idctdsp" + h264dsp_select="startcode" +-hevcparse_select="golomb" ++hevcparse_select="atsc_a53 golomb" + frame_thread_encoder_deps="encoders threads" + intrax8_select="blockdsp idctdsp" + mdct_select="fft" +@@ -3145,7 +3145,7 @@ av1_qsv_decoder_select="qsvdec" + # parsers + aac_parser_select="adts_header" + av1_parser_select="cbs_av1" +-h264_parser_select="golomb h264dsp h264parse" ++h264_parser_select="atsc_a53 golomb h264dsp h264parse" + hevc_parser_select="hevcparse" + mpegaudio_parser_select="mpegaudioheader" + mpegvideo_parser_select="mpegvideo" +-- +2.31.0 + diff --git a/sdk/ffmpeg-others-build.diff b/sdk/ffmpeg-others-build.diff new file mode 100644 index 0000000000..6ee273566b --- /dev/null +++ b/sdk/ffmpeg-others-build.diff @@ -0,0 +1,68 @@ +From 3d62dd0c664ce0bdcd87b9e019d3a69b4a1752f9 Mon Sep 17 00:00:00 2001 +From: Piasy +Date: Sun, 20 Jun 2021 22:47:41 +0800 +Subject: [PATCH] Fix avc/hevc parser + swresample build + +--- + chromium/scripts/build_ffmpeg.py | 1 + + chromium/scripts/generate_gn.py | 2 +- + configure | 4 ++-- + 3 files changed, 4 insertions(+), 3 deletions(-) + +diff --git a/chromium/scripts/build_ffmpeg.py b/chromium/scripts/build_ffmpeg.py +index 75333a2c9f..dc7a3520cf 100755 +--- a/chromium/scripts/build_ffmpeg.py ++++ b/chromium/scripts/build_ffmpeg.py +@@ -477,6 +477,7 @@ def BuildFFmpeg(target_os, target_arch, host_os, host_arch, parallel_jobs, + os.path.join('libavcodec', GetDsoName(target_os, 'avcodec', 58)), + os.path.join('libavformat', GetDsoName(target_os, 'avformat', 58)), + os.path.join('libavutil', GetDsoName(target_os, 'avutil', 56)), ++ os.path.join('libswresample', GetDsoName(target_os, 'swresample', 3)), + ] + PrintAndCheckCall( + ['make', '-j%d' % parallel_jobs] + libraries, cwd=config_dir) +diff --git a/chromium/scripts/generate_gn.py b/chromium/scripts/generate_gn.py +index 0f606dfaef..300ccd5e6f 100755 +--- a/chromium/scripts/generate_gn.py ++++ b/chromium/scripts/generate_gn.py +@@ -121,7 +121,6 @@ def CleanObjectFiles(object_files): + 'libavcodec/x86/dnxhd_mmx.o', + 'libavformat/sdp.o', + 'libavutil/adler32.o', +- 'libavutil/audio_fifo.o', + 'libavutil/blowfish.o', + 'libavutil/cast5.o', + 'libavutil/des.o', +@@ -660,6 +659,7 @@ IGNORED_INCLUDE_FILES = [ + LICENSE_WHITELIST = [ + 'BSD (3 clause) LGPL (v2.1 or later)', + 'BSL (v1) LGPL (v2.1 or later)', ++ 'BSL LGPL (v2.1 or later) GENERATED FILE', + 'ISC GENERATED FILE', + 'LGPL (v2.1 or later)', + 'LGPL (v2.1 or later) GENERATED FILE', +diff --git a/configure b/configure +index 985a5a8ffc..3dbf73d009 100755 +--- a/configure ++++ b/configure +@@ -2635,7 +2635,7 @@ faandct_select="fdctdsp" + faanidct_deps="faan" + faanidct_select="idctdsp" + h264dsp_select="startcode" +-hevcparse_select="golomb" ++hevcparse_select="atsc_a53 golomb" + frame_thread_encoder_deps="encoders threads" + intrax8_select="blockdsp idctdsp" + mdct_select="fft" +@@ -3145,7 +3145,7 @@ av1_qsv_decoder_select="qsvdec" + # parsers + aac_parser_select="adts_header" + av1_parser_select="cbs_av1" +-h264_parser_select="golomb h264dsp h264parse" ++h264_parser_select="atsc_a53 golomb h264dsp h264parse" + hevc_parser_select="hevcparse" + mpegaudio_parser_select="mpegaudioheader" + mpegvideo_parser_select="mpegvideo" +-- +2.31.0 + diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h b/sdk/objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h deleted file mode 100644 index 6ee1bc5435..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/audio/RTCAudioSessionConfiguration.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCAudioSource.h b/sdk/objc/Framework/Headers/WebRTC/RTCAudioSource.h deleted file mode 100644 index a7dc191319..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCAudioSource.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCAudioSource.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCCVPixelBuffer.h b/sdk/objc/Framework/Headers/WebRTC/RTCCVPixelBuffer.h deleted file mode 100644 index 7a4f847be5..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCCVPixelBuffer.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/video_frame_buffer/RTCCVPixelBuffer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCCallbackLogger.h b/sdk/objc/Framework/Headers/WebRTC/RTCCallbackLogger.h deleted file mode 100644 index c4585228ed..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCCallbackLogger.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/logging/RTCCallbackLogger.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h b/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h deleted file mode 100644 index 388e72fce0..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "helpers/RTCCameraPreviewView.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h b/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h deleted file mode 100644 index aac6773b6c..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/capturer/RTCCameraVideoCapturer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCCertificate.h b/sdk/objc/Framework/Headers/WebRTC/RTCCertificate.h deleted file mode 100644 index 9e3b7dcf1a..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCCertificate.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCCertificate.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h b/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h deleted file mode 100644 index fe2f0cdfdb..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCConfiguration.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h b/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h deleted file mode 100644 index 4f35079471..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCDataChannel.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h b/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h deleted file mode 100644 index 20cb4e1c51..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCDataChannelConfiguration.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h b/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h deleted file mode 100644 index 2ae9cd89c8..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "helpers/RTCDispatcher.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h b/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h deleted file mode 100644 index ec5b1d7087..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/renderer/opengl/RTCEAGLVideoView.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h b/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h deleted file mode 100644 index 386989abf0..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCFieldTrials.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h b/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h deleted file mode 100644 index ae7d9ef95f..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCFileLogger.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h b/sdk/objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h deleted file mode 100644 index 344eca9b00..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCFileVideoCapturer.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/capturer/RTCFileVideoCapturer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCH264ProfileLevelId.h b/sdk/objc/Framework/Headers/WebRTC/RTCH264ProfileLevelId.h deleted file mode 100644 index 57798148a0..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCH264ProfileLevelId.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/video_codec/RTCH264ProfileLevelId.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h b/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h deleted file mode 100644 index dbeea22000..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCIceCandidate.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h b/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h deleted file mode 100644 index 967b19c066..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCIceServer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h b/sdk/objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h deleted file mode 100644 index a727a495fc..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCLegacyStatsReport.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCLegacyStatsReport.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h b/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h deleted file mode 100644 index bb5e25dbde..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "base/RTCLogging.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h b/sdk/objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h deleted file mode 100644 index 4368a8a95b..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMTLNSVideoView.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/renderer/metal/RTCMTLNSVideoView.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h b/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h deleted file mode 100644 index 9f43dc5613..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMTLVideoView.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/renderer/metal/RTCMTLVideoView.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h b/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h deleted file mode 100644 index 8582a33600..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "base/RTCMacros.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h b/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h deleted file mode 100644 index 1059725c05..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCMediaConstraints.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMediaSource.h b/sdk/objc/Framework/Headers/WebRTC/RTCMediaSource.h deleted file mode 100644 index f642524d2a..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMediaSource.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCMediaSource.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h b/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h deleted file mode 100644 index 9b4d03213c..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCMediaStream.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h b/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h deleted file mode 100644 index 9de0edbfbd..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCMediaStreamTrack.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMetrics.h b/sdk/objc/Framework/Headers/WebRTC/RTCMetrics.h deleted file mode 100644 index b44289ea34..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMetrics.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCMetrics.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h b/sdk/objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h deleted file mode 100644 index 5ee84c1457..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCMetricsSampleInfo.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCMetricsSampleInfo.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h b/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h deleted file mode 100644 index 3fa89aa30e..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/renderer/opengl/RTCNSGLVideoView.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h b/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h deleted file mode 100644 index df9bc85cdd..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCPeerConnection.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h b/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h deleted file mode 100644 index ff544a00c8..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCPeerConnectionFactory.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactoryOptions.h b/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactoryOptions.h deleted file mode 100644 index c4699c963e..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactoryOptions.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCPeerConnectionFactoryOptions.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtcpParameters.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtcpParameters.h deleted file mode 100644 index 28f4f5fcfc..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtcpParameters.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtcpParameters.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h deleted file mode 100644 index d4b76012f9..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpCodecParameters.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h deleted file mode 100644 index a1510ba98f..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpEncodingParameters.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpHeaderExtension.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpHeaderExtension.h deleted file mode 100644 index 3bc6b2ba54..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpHeaderExtension.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpHeaderExtension.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h deleted file mode 100644 index 0e5b7e2178..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpParameters.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h deleted file mode 100644 index ff61f824a0..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpReceiver.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h deleted file mode 100644 index d5a4e65d09..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpSender.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCRtpTransceiver.h b/sdk/objc/Framework/Headers/WebRTC/RTCRtpTransceiver.h deleted file mode 100644 index ff07e29fb7..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCRtpTransceiver.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCRtpTransceiver.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h b/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h deleted file mode 100644 index c721875e70..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCSSLAdapter.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h b/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h deleted file mode 100644 index b5d55f7729..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCSessionDescription.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h b/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h deleted file mode 100644 index fd4a6ffecb..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCTracing.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCapturer.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoCapturer.h deleted file mode 100644 index 93586fc0d5..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCapturer.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "base/RTCVideoCapturer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h deleted file mode 100644 index b4511a9985..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/video_codec/RTCVideoCodecConstants.h" -#import "base/RTCCodecSpecificInfo.h" -#import "base/RTCEncodedImage.h" -#import "base/RTCRtpFragmentationHeader.h" -#import "base/RTCVideoCodecInfo.h" -#import "base/RTCVideoDecoder.h" -#import "base/RTCVideoEncoder.h" -#import "base/RTCVideoEncoderQpThresholds.h" -#import "base/RTCVideoEncoderSettings.h" -#import "components/video_codec/RTCH264ProfileLevelId.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecH264.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecH264.h deleted file mode 100644 index d1ff92e27a..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecH264.h +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/video_codec/RTCCodecSpecificInfoH264.h" -#import "components/video_codec/RTCH264ProfileLevelId.h" -#import "components/video_codec/RTCVideoDecoderFactoryH264.h" -#import "components/video_codec/RTCVideoDecoderH264.h" -#import "components/video_codec/RTCVideoEncoderFactoryH264.h" -#import "components/video_codec/RTCVideoEncoderH264.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecInfo.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecInfo.h deleted file mode 100644 index 10c5fea465..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecInfo.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "base/RTCVideoCodecInfo.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP8.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP8.h deleted file mode 100644 index 9c9e840e6d..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP8.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/video_codec/RTCVideoDecoderVP8.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP9.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP9.h deleted file mode 100644 index 1a7c38267b..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoDecoderVP9.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/video_codec/RTCVideoDecoderVP9.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h deleted file mode 100644 index 65a7850884..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP8.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/video_codec/RTCVideoEncoderVP8.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h deleted file mode 100644 index 5d01835805..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoEncoderVP9.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/video_codec/RTCVideoEncoderVP9.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h deleted file mode 100644 index ec1543d8e3..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "base/RTCVideoFrame.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h deleted file mode 100644 index 8d4be50868..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/video_frame_buffer/RTCNativeI420Buffer.h" -#import "api/video_frame_buffer/RTCNativeMutableI420Buffer.h" -#import "base/RTCI420Buffer.h" -#import "base/RTCMutableI420Buffer.h" -#import "base/RTCMutableYUVPlanarBuffer.h" -#import "base/RTCVideoFrameBuffer.h" -#import "base/RTCYUVPlanarBuffer.h" -#import "components/video_frame_buffer/RTCCVPixelBuffer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h deleted file mode 100644 index 0e32c02c9a..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "base/RTCVideoRenderer.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h deleted file mode 100644 index 91e635de6c..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCVideoSource.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h deleted file mode 100644 index b5dd1ddf59..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "api/peerconnection/RTCVideoTrack.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoViewShading.h b/sdk/objc/Framework/Headers/WebRTC/RTCVideoViewShading.h deleted file mode 100644 index 7c6cd7a2e5..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoViewShading.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "components/renderer/opengl/RTCVideoViewShading.h" diff --git a/sdk/objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h b/sdk/objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h deleted file mode 100644 index 724dc8dca4..0000000000 --- a/sdk/objc/Framework/Headers/WebRTC/UIDevice+RTCDevice.h +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "helpers/UIDevice+RTCDevice.h" diff --git a/sdk/objc/OWNERS b/sdk/objc/OWNERS index 4695e82a8d..6af9062b2d 100644 --- a/sdk/objc/OWNERS +++ b/sdk/objc/OWNERS @@ -7,8 +7,3 @@ denicija@webrtc.org # Rubberstamps of e.g. reverts and critical bug fixes. magjed@webrtc.org tkchin@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index d983ae6c33..9b123d2d05 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -23,7 +23,7 @@ NS_ASSUME_NONNULL_BEGIN * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to * this video renderer. */ -@property(nonatomic, readonly) id videoRenderer; +@property(nonatomic, readonly) id videoRenderer; /** * The native VideoSinkInterface surface exposed by this adapter. Calls made @@ -33,7 +33,7 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, readonly) rtc::VideoSinkInterface *nativeVideoRenderer; /** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */ -- (instancetype)initWithNativeRenderer:(id)videoRenderer +- (instancetype)initWithNativeRenderer:(id)videoRenderer NS_DESIGNATED_INITIALIZER; @end diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index 27dd6c2c52..ef02f72f60 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -26,7 +26,7 @@ } void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { - RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); + RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) @@ -51,7 +51,7 @@ @implementation RTCVideoRendererAdapter { @synthesize videoRenderer = _videoRenderer; -- (instancetype)initWithNativeRenderer:(id)videoRenderer { +- (instancetype)initWithNativeRenderer:(id)videoRenderer { NSParameterAssert(videoRenderer); if (self = [super init]) { _videoRenderer = videoRenderer; diff --git a/sdk/objc/api/logging/RTCCallbackLogger.h b/sdk/objc/api/logging/RTCCallbackLogger.h index 2bce03fe0f..c1aeb825cb 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.h +++ b/sdk/objc/api/logging/RTCCallbackLogger.h @@ -22,7 +22,7 @@ typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message, // This class intercepts WebRTC logs and forwards them to a registered block. // This class is not threadsafe. RTC_OBJC_EXPORT -@interface RTCCallbackLogger : NSObject +@interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject // The severity level to capture. The default is kRTCLoggingSeverityInfo. @property(nonatomic, assign) RTCLoggingSeverity severity; diff --git a/sdk/objc/api/logging/RTCCallbackLogger.mm b/sdk/objc/api/logging/RTCCallbackLogger.mm index e58b03b549..443fee1a65 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.mm +++ b/sdk/objc/api/logging/RTCCallbackLogger.mm @@ -64,7 +64,7 @@ static RTCLoggingSeverity NativeSeverityToObjcSeverity(rtc::LoggingSeverity seve RTCCallbackLoggerMessageAndSeverityHandler callback_handler_; }; -@implementation RTCCallbackLogger { +@implementation RTC_OBJC_TYPE (RTCCallbackLogger) { BOOL _hasStarted; std::unique_ptr _logSink; } diff --git a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h index bf1ea62044..2c333f9d73 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h @@ -12,20 +12,22 @@ #import "RTCMediaSource+Private.h" -@interface RTCAudioSource () +@interface RTC_OBJC_TYPE (RTCAudioSource) +() -/** - * The AudioSourceInterface object passed to this RTCAudioSource during - * construction. - */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + /** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeAudioSource; /** Initialize an RTCAudioSource from a native AudioSourceInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeAudioSource:(rtc::scoped_refptr)nativeAudioSource NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.h b/sdk/objc/api/peerconnection/RTCAudioSource.h index d1030e3fef..9f78dcd992 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCAudioSource : RTCMediaSource +@interface RTC_OBJC_TYPE (RTCAudioSource) : RTC_OBJC_TYPE(RTCMediaSource) - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm index a6822f6702..b56c6e9648 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.mm +++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm @@ -12,13 +12,13 @@ #include "rtc_base/checks.h" -@implementation RTCAudioSource { +@implementation RTC_OBJC_TYPE (RTCAudioSource) { } @synthesize volume = _volume; @synthesize nativeAudioSource = _nativeAudioSource; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeAudioSource: (rtc::scoped_refptr)nativeAudioSource { RTC_DCHECK(factory); @@ -32,7 +32,7 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type { RTC_NOTREACHED(); @@ -41,7 +41,7 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - (NSString *)description { NSString *stateString = [[self class] stringForState:self.state]; - return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString]; + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCAudioSource)( %p ): %@", self, stateString]; } - (void)setVolume:(double)volume { diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 88dd971b67..6495500484 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -14,15 +14,16 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; -@interface RTCAudioTrack () +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@interface RTC_OBJC_TYPE (RTCAudioTrack) +() -/** AudioTrackInterface created or passed in at construction. */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; + /** AudioTrackInterface created or passed in at construction. */ + @property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; /** Initialize an RTCAudioTrack with an id. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCAudioSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index 501ef92ec4..95eb5d3d48 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -13,15 +13,15 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioSource; +@class RTC_OBJC_TYPE(RTCAudioSource); RTC_OBJC_EXPORT -@interface RTCAudioTrack : RTCMediaStreamTrack +@interface RTC_OBJC_TYPE (RTCAudioTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack) - (instancetype)init NS_UNAVAILABLE; /** The audio source for this audio track. */ -@property(nonatomic, readonly) RTCAudioSource *source; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 3389b7612c..6a97f46eaa 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -17,12 +17,12 @@ #include "rtc_base/checks.h" -@implementation RTCAudioTrack +@implementation RTC_OBJC_TYPE (RTCAudioTrack) @synthesize source = _source; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCAudioSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId { RTC_DCHECK(factory); RTC_DCHECK(source); @@ -37,7 +37,7 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack type:(RTCMediaStreamTrackType)type { NSParameterAssert(factory); @@ -46,14 +46,13 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return [super initWithFactory:factory nativeTrack:nativeTrack type:type]; } - -- (RTCAudioSource *)source { +- (RTC_OBJC_TYPE(RTCAudioSource) *)source { if (!_source) { rtc::scoped_refptr source = self.nativeAudioTrack->GetSource(); if (source) { - _source = - [[RTCAudioSource alloc] initWithFactory:self.factory nativeAudioSource:source.get()]; + _source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory + nativeAudioSource:source.get()]; } } return _source; diff --git a/sdk/objc/api/peerconnection/RTCCertificate.h b/sdk/objc/api/peerconnection/RTCCertificate.h index 50c1ca55a3..5ac8984d4a 100644 --- a/sdk/objc/api/peerconnection/RTCCertificate.h +++ b/sdk/objc/api/peerconnection/RTCCertificate.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCCertificate : NSObject +@interface RTC_OBJC_TYPE (RTCCertificate) : NSObject /** Private key in PEM. */ @property(nonatomic, readonly, copy) NSString *private_key; @@ -37,7 +37,7 @@ RTC_OBJC_EXPORT * provided. * - name: "ECDSA" or "RSASSA-PKCS1-v1_5" */ -+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params; ++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params; @end diff --git a/sdk/objc/api/peerconnection/RTCCertificate.mm b/sdk/objc/api/peerconnection/RTCCertificate.mm index 250cfc4920..e5c33e407c 100644 --- a/sdk/objc/api/peerconnection/RTCCertificate.mm +++ b/sdk/objc/api/peerconnection/RTCCertificate.mm @@ -16,7 +16,7 @@ #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_identity.h" -@implementation RTCCertificate +@implementation RTC_OBJC_TYPE (RTCCertificate) @synthesize private_key = _private_key; @synthesize certificate = _certificate; @@ -35,7 +35,7 @@ - (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString return self; } -+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params { ++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params { rtc::KeyType keyType = rtc::KT_ECDSA; NSString *keyTypeString = [params valueForKey:@"name"]; if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) { @@ -63,8 +63,9 @@ + (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)param RTC_LOG(LS_INFO) << "CERT PEM "; RTC_LOG(LS_INFO) << pem_certificate; - RTCCertificate *cert = [[RTCCertificate alloc] initWithPrivateKey:@(pem_private_key.c_str()) - certificate:@(pem_certificate.c_str())]; + RTC_OBJC_TYPE(RTCCertificate) *cert = + [[RTC_OBJC_TYPE(RTCCertificate) alloc] initWithPrivateKey:@(pem_private_key.c_str()) + certificate:@(pem_certificate.c_str())]; return cert; } diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Native.h b/sdk/objc/api/peerconnection/RTCConfiguration+Native.h index 54783f049a..07c0da6041 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration+Native.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration+Native.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCConfiguration () +@interface RTC_OBJC_TYPE (RTCConfiguration) +() -/** Optional TurnCustomizer. - * With this class one can modify outgoing TURN messages. - * The object passed in must remain valid until PeerConnection::Close() is - * called. - */ -@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer; + /** Optional TurnCustomizer. + * With this class one can modify outgoing TURN messages. + * The object passed in must remain valid until PeerConnection::Close() is + * called. + */ + @property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer; @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h index 845f779272..70a6532dbc 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCConfiguration () +@interface RTC_OBJC_TYPE (RTCConfiguration) +() -+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy: - (RTCIceTransportPolicy)policy; + + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy + : (RTCIceTransportPolicy)policy; + (RTCIceTransportPolicy)transportPolicyForTransportsType: (webrtc::PeerConnectionInterface::IceTransportsType)nativeType; @@ -65,8 +66,8 @@ NS_ASSUME_NONNULL_BEGIN + (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics; /** - * RTCConfiguration struct representation of this RTCConfiguration. This is - * needed to pass to the underlying C++ APIs. + * RTCConfiguration struct representation of this RTCConfiguration. + * This is needed to pass to the underlying C++ APIs. */ - (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration; diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index 7400296451..86eaa6cee5 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -14,7 +14,7 @@ #import "RTCCryptoOptions.h" #import "RTCMacros.h" -@class RTCIceServer; +@class RTC_OBJC_TYPE(RTCIceServer); /** * Represents the ice transport policy. This exposes the same states in C++, @@ -70,13 +70,18 @@ typedef NS_ENUM(NSInteger, RTCSdpSemantics) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCConfiguration : NSObject +@interface RTC_OBJC_TYPE (RTCConfiguration) : NSObject + +/** If true, allows DSCP codes to be set on outgoing packets, configured using + * networkPriority field of RTCRtpEncodingParameters. Defaults to false. + */ +@property(nonatomic, assign) BOOL enableDscp; /** An array of Ice Servers available to be used by ICE. */ -@property(nonatomic, copy) NSArray *iceServers; +@property(nonatomic, copy) NSArray *iceServers; /** An RTCCertificate for 're' use. */ -@property(nonatomic, nullable) RTCCertificate *certificate; +@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCertificate) * certificate; /** Which candidates the ICE agent is allowed to use. The W3C calls it * |iceTransportPolicy|, while in C++ it is called |type|. */ @@ -168,9 +173,9 @@ RTC_OBJC_EXPORT * * UnifiedPlan will cause RTCPeerConnection to create offers and answers with * multiple m= sections where each m= section maps to one RTCRtpSender and one - * RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both video. This - * will also cause RTCPeerConnection to ignore all but the first a=ssrc lines - * that form a Plan B stream. + * RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both + * video. This will also cause RTCPeerConnection) to ignore all but the first a=ssrc + * lines that form a Plan B stream. * * For users who wish to send multiple audio/video streams and need to stay * interoperable with legacy WebRTC implementations or use legacy APIs, @@ -192,24 +197,12 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) BOOL allowCodecSwitching; -/** - * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection - * that it should use the MediaTransportInterface. - */ -@property(nonatomic, assign) BOOL useMediaTransport; - -/** - * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection - * that it should use the MediaTransportInterface for data channels. - */ -@property(nonatomic, assign) BOOL useMediaTransportForDataChannels; - /** * Defines advanced optional cryptographic settings related to SRTP and * frame encryption for native WebRTC. Setting this will overwrite any * options set through the PeerConnectionFactory (which is deprecated). */ -@property(nonatomic, nullable) RTCCryptoOptions *cryptoOptions; +@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCryptoOptions) * cryptoOptions; /** * Time interval between audio RTCP reports. diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 7f9f591b74..55abbcdb18 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -20,8 +20,9 @@ #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_identity.h" -@implementation RTCConfiguration +@implementation RTC_OBJC_TYPE (RTCConfiguration) +@synthesize enableDscp = _enableDscp; @synthesize iceServers = _iceServers; @synthesize certificate = _certificate; @synthesize iceTransportPolicy = _iceTransportPolicy; @@ -51,8 +52,6 @@ @implementation RTCConfiguration @synthesize turnCustomizer = _turnCustomizer; @synthesize activeResetSrtpParams = _activeResetSrtpParams; @synthesize allowCodecSwitching = _allowCodecSwitching; -@synthesize useMediaTransport = _useMediaTransport; -@synthesize useMediaTransportForDataChannels = _useMediaTransportForDataChannels; @synthesize cryptoOptions = _cryptoOptions; @synthesize rtcpAudioReportIntervalMs = _rtcpAudioReportIntervalMs; @synthesize rtcpVideoReportIntervalMs = _rtcpVideoReportIntervalMs; @@ -66,9 +65,11 @@ - (instancetype)init { - (instancetype)initWithNativeConfiguration: (const webrtc::PeerConnectionInterface::RTCConfiguration &)config { if (self = [super init]) { + _enableDscp = config.dscp(); NSMutableArray *iceServers = [NSMutableArray array]; for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) { - RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server]; + RTC_OBJC_TYPE(RTCIceServer) *iceServer = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:server]; [iceServers addObject:iceServer]; } _iceServers = iceServers; @@ -76,9 +77,9 @@ - (instancetype)initWithNativeConfiguration: rtc::scoped_refptr native_cert; native_cert = config.certificates[0]; rtc::RTCCertificatePEM native_pem = native_cert->ToPEM(); - _certificate = - [[RTCCertificate alloc] initWithPrivateKey:@(native_pem.private_key().c_str()) - certificate:@(native_pem.certificate().c_str())]; + _certificate = [[RTC_OBJC_TYPE(RTCCertificate) alloc] + initWithPrivateKey:@(native_pem.private_key().c_str()) + certificate:@(native_pem.certificate().c_str())]; } _iceTransportPolicy = [[self class] transportPolicyForTransportsType:config.type]; @@ -103,8 +104,6 @@ - (instancetype)initWithNativeConfiguration: _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout; _iceBackupCandidatePairPingInterval = config.ice_backup_candidate_pair_ping_interval; - _useMediaTransport = config.use_media_transport; - _useMediaTransportForDataChannels = config.use_media_transport_for_data_channels; _keyType = RTCEncryptionKeyTypeECDSA; _iceCandidatePoolSize = config.ice_candidate_pool_size; _shouldPruneTurnPorts = config.prune_turn_ports; @@ -120,7 +119,7 @@ - (instancetype)initWithNativeConfiguration: _turnCustomizer = config.turn_customizer; _activeResetSrtpParams = config.active_reset_srtp_params; if (config.crypto_options) { - _cryptoOptions = [[RTCCryptoOptions alloc] + _cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp .enable_gcm_crypto_suites srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp @@ -138,7 +137,7 @@ - (instancetype)initWithNativeConfiguration: } - (NSString *)description { - static NSString *formatString = @"RTCConfiguration: " + static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): " @"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n" @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n}\n"; @@ -166,7 +165,7 @@ - (NSString *)description { _disableIPV6OnWiFi, _maxIPv6Networks, _activeResetSrtpParams, - _useMediaTransport]; + _enableDscp]; } #pragma mark - Private @@ -177,7 +176,8 @@ - (NSString *)description { nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration( webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive)); - for (RTCIceServer *iceServer in _iceServers) { + nativeConfig->set_dscp(_enableDscp); + for (RTC_OBJC_TYPE(RTCIceServer) * iceServer in _iceServers) { nativeConfig->servers.push_back(iceServer.nativeServer); } nativeConfig->type = @@ -203,8 +203,6 @@ - (NSString *)description { _iceConnectionReceivingTimeout; nativeConfig->ice_backup_candidate_pair_ping_interval = _iceBackupCandidatePairPingInterval; - nativeConfig->use_media_transport = _useMediaTransport; - nativeConfig->use_media_transport_for_data_channels = _useMediaTransportForDataChannels; rtc::KeyType keyType = [[self class] nativeEncryptionKeyTypeForKeyType:_keyType]; if (_certificate != nullptr) { diff --git a/sdk/objc/api/peerconnection/RTCCryptoOptions.h b/sdk/objc/api/peerconnection/RTCCryptoOptions.h index b465bb5a73..759a45e037 100644 --- a/sdk/objc/api/peerconnection/RTCCryptoOptions.h +++ b/sdk/objc/api/peerconnection/RTCCryptoOptions.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN * as Objective-C doesn't support nested structures. */ RTC_OBJC_EXPORT -@interface RTCCryptoOptions : NSObject +@interface RTC_OBJC_TYPE (RTCCryptoOptions) : NSObject /** * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used diff --git a/sdk/objc/api/peerconnection/RTCCryptoOptions.mm b/sdk/objc/api/peerconnection/RTCCryptoOptions.mm index a059f75599..fbaa1de58d 100644 --- a/sdk/objc/api/peerconnection/RTCCryptoOptions.mm +++ b/sdk/objc/api/peerconnection/RTCCryptoOptions.mm @@ -10,7 +10,7 @@ #import "RTCCryptoOptions.h" -@implementation RTCCryptoOptions +@implementation RTC_OBJC_TYPE (RTCCryptoOptions) @synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites; @synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher; diff --git a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h index e327fb4f3e..2cdbdabec6 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h @@ -15,27 +15,29 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCDataBuffer () +@interface RTC_OBJC_TYPE (RTCDataBuffer) +() -/** - * The native DataBuffer representation of this RTCDatabuffer object. This is - * needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer; + /** + * The native DataBuffer representation of this RTCDatabuffer object. This is + * needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer; /** Initialize an RTCDataBuffer from a native DataBuffer. */ - (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer; @end -@interface RTCDataChannel () +@interface RTC_OBJC_TYPE (RTCDataChannel) +() -/** Initialize an RTCDataChannel from a native DataChannelInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - nativeDataChannel:(rtc::scoped_refptr)nativeDataChannel - NS_DESIGNATED_INITIALIZER; + /** Initialize an RTCDataChannel from a native DataChannelInterface. */ + - (instancetype)initWithFactory + : (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel + : (rtc::scoped_refptr)nativeDataChannel NS_DESIGNATED_INITIALIZER; + (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState: (RTCDataChannelState)state; diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.h b/sdk/objc/api/peerconnection/RTCDataChannel.h index 0cc2de87f2..2d0661f136 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCDataBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCDataBuffer) : NSObject /** NSData representation of the underlying buffer. */ @property(nonatomic, readonly) NSData *data; @@ -34,20 +34,22 @@ RTC_OBJC_EXPORT @end -@class RTCDataChannel; +@class RTC_OBJC_TYPE(RTCDataChannel); RTC_OBJC_EXPORT -@protocol RTCDataChannelDelegate +@protocol RTC_OBJC_TYPE +(RTCDataChannelDelegate) -/** The data channel state changed. */ -- (void)dataChannelDidChangeState:(RTCDataChannel *)dataChannel; + /** The data channel state changed. */ + - (void)dataChannelDidChangeState : (RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel; /** The data channel successfully received a data buffer. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel - didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer; +- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel + didReceiveMessageWithBuffer:(RTC_OBJC_TYPE(RTCDataBuffer) *)buffer; @optional /** The data channel's |bufferedAmount| changed. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel didChangeBufferedAmount:(uint64_t)amount; +- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel + didChangeBufferedAmount:(uint64_t)amount; @end @@ -60,7 +62,7 @@ typedef NS_ENUM(NSInteger, RTCDataChannelState) { }; RTC_OBJC_EXPORT -@interface RTCDataChannel : NSObject +@interface RTC_OBJC_TYPE (RTCDataChannel) : NSObject /** * A label that can be used to distinguish this data channel from other data @@ -115,7 +117,7 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) uint64_t bufferedAmount; /** The delegate for this data channel. */ -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; - (instancetype)init NS_UNAVAILABLE; @@ -123,7 +125,7 @@ RTC_OBJC_EXPORT - (void)close; /** Attempt to send |data| on this data channel's underlying data transport. */ -- (BOOL)sendData:(RTCDataBuffer *)data; +- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data; @end diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.mm b/sdk/objc/api/peerconnection/RTCDataChannel.mm index 35c009eb96..4a79cefdb4 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.mm +++ b/sdk/objc/api/peerconnection/RTCDataChannel.mm @@ -18,21 +18,21 @@ class DataChannelDelegateAdapter : public DataChannelObserver { public: - DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; } + DataChannelDelegateAdapter(RTC_OBJC_TYPE(RTCDataChannel) * channel) { channel_ = channel; } void OnStateChange() override { [channel_.delegate dataChannelDidChangeState:channel_]; } void OnMessage(const DataBuffer& buffer) override { - RTCDataBuffer *data_buffer = - [[RTCDataBuffer alloc] initWithNativeBuffer:buffer]; + RTC_OBJC_TYPE(RTCDataBuffer) *data_buffer = + [[RTC_OBJC_TYPE(RTCDataBuffer) alloc] initWithNativeBuffer:buffer]; [channel_.delegate dataChannel:channel_ didReceiveMessageWithBuffer:data_buffer]; } void OnBufferedAmountChange(uint64_t previousAmount) override { - id delegate = channel_.delegate; + id delegate = channel_.delegate; SEL sel = @selector(dataChannel:didChangeBufferedAmount:); if ([delegate respondsToSelector:sel]) { [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount]; @@ -40,12 +40,11 @@ void OnBufferedAmountChange(uint64_t previousAmount) override { } private: - __weak RTCDataChannel *channel_; + __weak RTC_OBJC_TYPE(RTCDataChannel) * channel_; }; } - -@implementation RTCDataBuffer { +@implementation RTC_OBJC_TYPE (RTCDataBuffer) { std::unique_ptr _dataBuffer; } @@ -83,9 +82,8 @@ - (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer { @end - -@implementation RTCDataChannel { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCDataChannel) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeDataChannel; std::unique_ptr _observer; BOOL _isObserverRegistered; @@ -152,21 +150,20 @@ - (void)close { _nativeDataChannel->Close(); } -- (BOOL)sendData:(RTCDataBuffer *)data { +- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data { return _nativeDataChannel->Send(*data.nativeDataBuffer); } - (NSString *)description { - return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDataChannel):\n%ld\n%@\n%@", (long)self.channelId, self.label, - [[self class] - stringForState:self.readyState]]; + [[self class] stringForState:self.readyState]]; } #pragma mark - Private -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel: (rtc::scoped_refptr)nativeDataChannel { NSParameterAssert(nativeDataChannel); diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h index 244f742ccc..5aef10fcef 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h +++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h @@ -14,9 +14,10 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCDataChannelConfiguration () +@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) +() -@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit; + @property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit; @end diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h index 96d33f4d72..9459ae0a13 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCDataChannelConfiguration : NSObject +@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) : NSObject /** Set to YES if ordered delivery is required. */ @property(nonatomic, assign) BOOL isOrdered; diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm index 198bfbbaed..bf775b1afd 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCDataChannelConfiguration +@implementation RTC_OBJC_TYPE (RTCDataChannelConfiguration) @synthesize nativeDataChannelInit = _nativeDataChannelInit; diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h b/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h index ec054818ef..49a62164cd 100644 --- a/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h +++ b/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h @@ -14,7 +14,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCDtmfSender : NSObject +@interface RTC_OBJC_TYPE (RTCDtmfSender) : NSObject @property(nonatomic, readonly) rtc::scoped_refptr nativeDtmfSender; diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender.h b/sdk/objc/api/peerconnection/RTCDtmfSender.h index 5d86d01892..0f1b6ba4da 100644 --- a/sdk/objc/api/peerconnection/RTCDtmfSender.h +++ b/sdk/objc/api/peerconnection/RTCDtmfSender.h @@ -15,14 +15,15 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@protocol RTCDtmfSender +@protocol RTC_OBJC_TYPE +(RTCDtmfSender) -/** - * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise - * returns false. To be able to send DTMF, the associated RTCRtpSender must be - * able to send packets, and a "telephone-event" codec must be negotiated. - */ -@property(nonatomic, readonly) BOOL canInsertDtmf; + /** + * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise + * returns false. To be able to send DTMF, the associated RTCRtpSender must be + * able to send packets, and a "telephone-event" codec must be negotiated. + */ + @property(nonatomic, readonly) BOOL canInsertDtmf; /** * Queues a task that sends the DTMF tones. The tones parameter is treated diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender.mm b/sdk/objc/api/peerconnection/RTCDtmfSender.mm index 77d0678275..ee3b79cd37 100644 --- a/sdk/objc/api/peerconnection/RTCDtmfSender.mm +++ b/sdk/objc/api/peerconnection/RTCDtmfSender.mm @@ -15,7 +15,7 @@ #include "rtc_base/time_utils.h" -@implementation RTCDtmfSender { +@implementation RTC_OBJC_TYPE (RTCDtmfSender) { rtc::scoped_refptr _nativeDtmfSender; } @@ -48,12 +48,11 @@ - (NSTimeInterval)interToneGap { } - (NSString *)description { - return [NSString - stringWithFormat: - @"RTCDtmfSender {\n remainingTones: %@\n duration: %f sec\n interToneGap: %f sec\n}", - [self remainingTones], - [self duration], - [self interToneGap]]; + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDtmfSender) {\n remainingTones: %@\n " + @"duration: %f sec\n interToneGap: %f sec\n}", + [self remainingTones], + [self duration], + [self interToneGap]]; } #pragma mark - Private @@ -67,7 +66,8 @@ - (instancetype)initWithNativeDtmfSender: NSParameterAssert(nativeDtmfSender); if (self = [super init]) { _nativeDtmfSender = nativeDtmfSender; - RTCLogInfo(@"RTCDtmfSender(%p): created DTMF sender: %@", self, self.description); + RTCLogInfo( + @"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description); } return self; } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h index e96ce7bc8e..a078b0aded 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCEncodedImage (Private) +@interface RTC_OBJC_TYPE (RTCEncodedImage) +(Private) -- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage; + - (instancetype)initWithNativeEncodedImage : (const webrtc::EncodedImage &)encodedImage; - (webrtc::EncodedImage)nativeEncodedImage; @end diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index f1df13e554..b3e0a7bb67 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -13,6 +13,29 @@ #import #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/ref_counted_object.h" + +namespace { +// An implementation of EncodedImageBufferInterface that doesn't perform any copies. +class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface { + public: + static rtc::scoped_refptr Create(NSData *data) { + return new rtc::RefCountedObject(data); + } + const uint8_t *data() const override { return static_cast(data_.bytes); } + // TODO(bugs.webrtc.org/9378): delete this non-const data method. + uint8_t *data() override { + return const_cast(static_cast(data_.bytes)); + } + size_t size() const override { return data_.length; } + + protected: + explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {} + ~ObjCEncodedImageBuffer() {} + + NSData *data_; +}; +} // A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated // objects. @@ -33,9 +56,10 @@ - (instancetype)initWithEncodedImageBuffer: } @end -@implementation RTCEncodedImage (Private) +@implementation RTC_OBJC_TYPE (RTCEncodedImage) +(Private) -- (rtc::scoped_refptr)encodedData { + - (rtc::scoped_refptr)encodedData { RTCWrappedEncodedImageBuffer *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; @@ -51,8 +75,11 @@ - (void)setEncodedData:(rtc::scoped_refptr) - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage { if (self = [super init]) { + // A reference to the encodedData must be stored so that it's kept alive as long + // self.buffer references its underlying data. + self.encodedData = encodedImage.GetEncodedData(); // Wrap the buffer in NSData without copying, do not take ownership. - self.buffer = [NSData dataWithBytesNoCopy:encodedImage.mutable_data() + self.buffer = [NSData dataWithBytesNoCopy:self.encodedData->data() length:encodedImage.size() freeWhenDone:NO]; self.encodedWidth = rtc::dchecked_cast(encodedImage._encodedWidth); @@ -65,7 +92,6 @@ - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encoded self.encodeFinishMs = encodedImage.timing_.encode_finish_ms; self.frameType = static_cast(encodedImage._frameType); self.rotation = static_cast(encodedImage.rotation_); - self.completeFrame = encodedImage._completeFrame; self.qp = @(encodedImage.qp_); self.contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ? RTCVideoContentTypeScreenshare : @@ -77,8 +103,13 @@ - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encoded - (webrtc::EncodedImage)nativeEncodedImage { // Return the pointer without copying. - webrtc::EncodedImage encodedImage( - (uint8_t *)self.buffer.bytes, (size_t)self.buffer.length, (size_t)self.buffer.length); + webrtc::EncodedImage encodedImage; + if (self.encodedData) { + encodedImage.SetEncodedData(self.encodedData); + } else if (self.buffer) { + encodedImage.SetEncodedData(ObjCEncodedImageBuffer::Create(self.buffer)); + } + encodedImage.set_size(self.buffer.length); encodedImage._encodedWidth = rtc::dchecked_cast(self.encodedWidth); encodedImage._encodedHeight = rtc::dchecked_cast(self.encodedHeight); encodedImage.SetTimestamp(self.timeStamp); @@ -89,7 +120,6 @@ - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encoded encodedImage.timing_.encode_finish_ms = self.encodeFinishMs; encodedImage._frameType = webrtc::VideoFrameType(self.frameType); encodedImage.rotation_ = webrtc::VideoRotation(self.rotation); - encodedImage._completeFrame = self.completeFrame; encodedImage.qp_ = self.qp ? self.qp.intValue : -1; encodedImage.content_type_ = (self.contentType == RTCVideoContentTypeScreenshare) ? webrtc::VideoContentType::SCREENSHARE : diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.h b/sdk/objc/api/peerconnection/RTCFieldTrials.h index 61443e8bb2..7477ad020f 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.h +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.h @@ -21,6 +21,7 @@ RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03AdvertisedKey; RTC_EXTERN NSString * const kRTCFieldTrialFlexFec03Key; RTC_EXTERN NSString * const kRTCFieldTrialH264HighProfileKey; RTC_EXTERN NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey; +RTC_EXTERN NSString *const kRTCFieldTrialUseNWPathMonitor; /** The valid value for field trials above. */ RTC_EXTERN NSString * const kRTCFieldTrialEnabledValue; diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 4a30db2f70..c52dfe4e45 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -25,6 +25,7 @@ NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey = @"WebRTC-Audio-MinimizeResamplingOnMobile"; +NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor"; NSString * const kRTCFieldTrialEnabledValue = @"Enabled"; static std::unique_ptr gFieldTrialInitString; diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.h b/sdk/objc/api/peerconnection/RTCFileLogger.h index cd5c1c466b..853e673a05 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.h +++ b/sdk/objc/api/peerconnection/RTCFileLogger.h @@ -34,7 +34,7 @@ NS_ASSUME_NONNULL_BEGIN // For kRTCFileLoggerTypeApp, the oldest log is overwritten. // This class is not threadsafe. RTC_OBJC_EXPORT -@interface RTCFileLogger : NSObject +@interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject // The severity level to capture. The default is kRTCFileLoggerSeverityInfo. @property(nonatomic, assign) RTCFileLoggerSeverity severity; diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.mm b/sdk/objc/api/peerconnection/RTCFileLogger.mm index 2532fcf36f..9562245611 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.mm +++ b/sdk/objc/api/peerconnection/RTCFileLogger.mm @@ -21,7 +21,7 @@ NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB. const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log"; -@implementation RTCFileLogger { +@implementation RTC_OBJC_TYPE (RTCFileLogger) { BOOL _hasStarted; NSString *_dirPath; NSUInteger _maxFileSize; diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h b/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h index 8c9156c402..409e16b608 100644 --- a/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h +++ b/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h @@ -16,13 +16,14 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCIceCandidate () - -/** - * The native IceCandidateInterface representation of this RTCIceCandidate - * object. This is needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) std::unique_ptr nativeCandidate; +@interface RTC_OBJC_TYPE (RTCIceCandidate) +() + + /** + * The native IceCandidateInterface representation of this RTCIceCandidate + * object. This is needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, readonly) std::unique_ptr nativeCandidate; /** * Initialize an RTCIceCandidate from a native IceCandidateInterface. No diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate.h b/sdk/objc/api/peerconnection/RTCIceCandidate.h index 3e305cc418..f84843af6c 100644 --- a/sdk/objc/api/peerconnection/RTCIceCandidate.h +++ b/sdk/objc/api/peerconnection/RTCIceCandidate.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCIceCandidate : NSObject +@interface RTC_OBJC_TYPE (RTCIceCandidate) : NSObject /** * If present, the identifier of the "media stream identification" for the media diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate.mm b/sdk/objc/api/peerconnection/RTCIceCandidate.mm index cbae3f3ddf..48385ef5b4 100644 --- a/sdk/objc/api/peerconnection/RTCIceCandidate.mm +++ b/sdk/objc/api/peerconnection/RTCIceCandidate.mm @@ -15,7 +15,7 @@ #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" -@implementation RTCIceCandidate +@implementation RTC_OBJC_TYPE (RTCIceCandidate) @synthesize sdpMid = _sdpMid; @synthesize sdpMLineIndex = _sdpMLineIndex; @@ -35,7 +35,7 @@ - (instancetype)initWithSdp:(NSString *)sdp } - (NSString *)description { - return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceCandidate):\n%@\n%d\n%@\n%@", _sdpMid, _sdpMLineIndex, _sdp, @@ -50,7 +50,7 @@ - (instancetype)initWithNativeCandidate: std::string sdp; candidate->ToString(&sdp); - RTCIceCandidate *rtcCandidate = + RTC_OBJC_TYPE(RTCIceCandidate) *rtcCandidate = [self initWithSdp:[NSString stringForStdString:sdp] sdpMLineIndex:candidate->sdp_mline_index() sdpMid:[NSString stringForStdString:candidate->sdp_mid()]]; diff --git a/sdk/objc/api/peerconnection/RTCIceServer+Private.h b/sdk/objc/api/peerconnection/RTCIceServer+Private.h index 53fbb45dc2..3eee819965 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer+Private.h +++ b/sdk/objc/api/peerconnection/RTCIceServer+Private.h @@ -14,13 +14,14 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCIceServer () - -/** - * IceServer struct representation of this RTCIceServer object's data. - * This is needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer; +@interface RTC_OBJC_TYPE (RTCIceServer) +() + + /** + * IceServer struct representation of this RTCIceServer object's data. + * This is needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer; /** Initialize an RTCIceServer from a native IceServer. */ - (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer; diff --git a/sdk/objc/api/peerconnection/RTCIceServer.h b/sdk/objc/api/peerconnection/RTCIceServer.h index ab5fc4a9ed..dd66c61a0b 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.h +++ b/sdk/objc/api/peerconnection/RTCIceServer.h @@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCIceServer : NSObject +@interface RTC_OBJC_TYPE (RTCIceServer) : NSObject /** URI(s) for this server represented as NSStrings. */ @property(nonatomic, readonly) NSArray *urlStrings; diff --git a/sdk/objc/api/peerconnection/RTCIceServer.mm b/sdk/objc/api/peerconnection/RTCIceServer.mm index 2138e4c16a..19a0a7e9e8 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.mm +++ b/sdk/objc/api/peerconnection/RTCIceServer.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCIceServer +@implementation RTC_OBJC_TYPE (RTCIceServer) @synthesize urlStrings = _urlStrings; @synthesize username = _username; @@ -97,7 +97,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings } - (NSString *)description { - return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceServer):\n%@\n%@\n%@\n%@\n%@\n%@\n%@", _urlStrings, _username, _credential, diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h index d87659d4d6..faa7962821 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCLegacyStatsReport () +@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) +() -/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */ -- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport; + /** Initialize an RTCLegacyStatsReport object from a native StatsReport. */ + - (instancetype)initWithNativeReport : (const webrtc::StatsReport &)nativeReport; @end diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h index 85f2b8fb3d..b3bd12c5d7 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** This does not currently conform to the spec. */ RTC_OBJC_EXPORT -@interface RTCLegacyStatsReport : NSObject +@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) : NSObject /** Time since 1970-01-01T00:00:00Z in milliseconds. */ @property(nonatomic, readonly) CFTimeInterval timestamp; diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm index 89e1b85a69..bd7a1ad9c9 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm @@ -15,7 +15,7 @@ #include "rtc_base/checks.h" -@implementation RTCLegacyStatsReport +@implementation RTC_OBJC_TYPE (RTCLegacyStatsReport) @synthesize timestamp = _timestamp; @synthesize type = _type; @@ -23,7 +23,7 @@ @implementation RTCLegacyStatsReport @synthesize values = _values; - (NSString *)description { - return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCLegacyStatsReport):\n%@\n%@\n%f\n%@", _reportId, _type, _timestamp, diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h b/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h index b3e1b10a88..97eee8307d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h @@ -16,13 +16,14 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMediaConstraints () - -/** - * A MediaConstraints representation of this RTCMediaConstraints object. This is - * needed to pass to the underlying C++ APIs. - */ -- (std::unique_ptr)nativeConstraints; +@interface RTC_OBJC_TYPE (RTCMediaConstraints) +() + + /** + * A MediaConstraints representation of this RTCMediaConstraints object. This is + * needed to pass to the underlying C++ APIs. + */ + - (std::unique_ptr)nativeConstraints; /** Return a native Constraints object representing these constraints */ + (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints: diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/sdk/objc/api/peerconnection/RTCMediaConstraints.h index 5c1a12e33a..c5baf20c1d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.h +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.h @@ -31,7 +31,7 @@ RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue; RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse; RTC_OBJC_EXPORT -@interface RTCMediaConstraints : NSObject +@interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm index bfdbdde6c5..0f46e4b8fe 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm @@ -28,7 +28,7 @@ NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue); NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse); -@implementation RTCMediaConstraints { +@implementation RTC_OBJC_TYPE (RTCMediaConstraints) { NSDictionary *_mandatory; NSDictionary *_optional; } @@ -47,9 +47,8 @@ - (instancetype)initWithMandatoryConstraints: } - (NSString *)description { - return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@", - _mandatory, - _optional]; + return [NSString + stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaConstraints):\n%@\n%@", _mandatory, _optional]; } #pragma mark - Private diff --git a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h index 7d69aaae7c..edda892e50 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h @@ -14,18 +14,20 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); typedef NS_ENUM(NSInteger, RTCMediaSourceType) { RTCMediaSourceTypeAudio, RTCMediaSourceTypeVideo, }; -@interface RTCMediaSource () +@interface RTC_OBJC_TYPE (RTCMediaSource) +() -@property(nonatomic, readonly) rtc::scoped_refptr nativeMediaSource; + @property(nonatomic, + readonly) rtc::scoped_refptr nativeMediaSource; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.h b/sdk/objc/api/peerconnection/RTCMediaSource.h index 838c783208..ba19c2a352 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource.h @@ -22,7 +22,7 @@ typedef NS_ENUM(NSInteger, RTCSourceState) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCMediaSource : NSObject +@interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject /** The current state of the RTCMediaSource. */ @property(nonatomic, readonly) RTCSourceState state; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.mm b/sdk/objc/api/peerconnection/RTCMediaSource.mm index 6ec41c3b50..61472a782a 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.mm +++ b/sdk/objc/api/peerconnection/RTCMediaSource.mm @@ -12,14 +12,14 @@ #include "rtc_base/checks.h" -@implementation RTCMediaSource { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCMediaSource) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; RTCMediaSourceType _type; } @synthesize nativeMediaSource = _nativeMediaSource; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type { RTC_DCHECK(factory); diff --git a/sdk/objc/api/peerconnection/RTCMediaStream+Private.h b/sdk/objc/api/peerconnection/RTCMediaStream+Private.h index 23149ce56e..6c8a602766 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaStream+Private.h @@ -14,19 +14,22 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMediaStream () +@interface RTC_OBJC_TYPE (RTCMediaStream) +() -/** - * MediaStreamInterface representation of this RTCMediaStream object. This is - * needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeMediaStream; + /** + * MediaStreamInterface representation of this RTCMediaStream object. This is + * needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeMediaStream; /** Initialize an RTCMediaStream with an id. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory streamId:(NSString *)streamId; +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + streamId:(NSString *)streamId; /** Initialize an RTCMediaStream from a native MediaStreamInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaStream:(rtc::scoped_refptr)nativeMediaStream; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.h b/sdk/objc/api/peerconnection/RTCMediaStream.h index bb9bec690a..2d56f15c7d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream.h +++ b/sdk/objc/api/peerconnection/RTCMediaStream.h @@ -14,18 +14,18 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioTrack; -@class RTCPeerConnectionFactory; -@class RTCVideoTrack; +@class RTC_OBJC_TYPE(RTCAudioTrack); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@class RTC_OBJC_TYPE(RTCVideoTrack); RTC_OBJC_EXPORT -@interface RTCMediaStream : NSObject +@interface RTC_OBJC_TYPE (RTCMediaStream) : NSObject /** The audio tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *audioTracks; +@property(nonatomic, strong, readonly) NSArray *audioTracks; /** The video tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *videoTracks; +@property(nonatomic, strong, readonly) NSArray *videoTracks; /** An identifier for this media stream. */ @property(nonatomic, readonly) NSString *streamId; @@ -33,16 +33,16 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; /** Adds the given audio track to this media stream. */ -- (void)addAudioTrack:(RTCAudioTrack *)audioTrack; +- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack; /** Adds the given video track to this media stream. */ -- (void)addVideoTrack:(RTCVideoTrack *)videoTrack; +- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack; /** Removes the given audio track to this media stream. */ -- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack; +- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack; /** Removes the given video track to this media stream. */ -- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack; +- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.mm b/sdk/objc/api/peerconnection/RTCMediaStream.mm index c1a402a648..a6292b547c 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStream.mm @@ -18,14 +18,14 @@ #import "RTCVideoTrack+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCMediaStream { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCMediaStream) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; NSMutableArray *_audioTracks; NSMutableArray *_videoTracks; rtc::scoped_refptr _nativeMediaStream; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory streamId:(NSString *)streamId { NSParameterAssert(factory); NSParameterAssert(streamId.length); @@ -35,11 +35,11 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return [self initWithFactory:factory nativeMediaStream:stream]; } -- (NSArray *)audioTracks { +- (NSArray *)audioTracks { return [_audioTracks copy]; } -- (NSArray *)videoTracks { +- (NSArray *)videoTracks { return [_videoTracks copy]; } @@ -47,32 +47,32 @@ - (NSString *)streamId { return [NSString stringForStdString:_nativeMediaStream->id()]; } -- (void)addAudioTrack:(RTCAudioTrack *)audioTrack { +- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack { if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) { [_audioTracks addObject:audioTrack]; } } -- (void)addVideoTrack:(RTCVideoTrack *)videoTrack { +- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack { if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) { [_videoTracks addObject:videoTrack]; } } -- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack { +- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack { NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack]; NSAssert(index != NSNotFound, - @"|removeAudioTrack| called on unexpected RTCAudioTrack"); + @"|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)"); if (index != NSNotFound && _nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) { [_audioTracks removeObjectAtIndex:index]; } } -- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack { +- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack { NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack]; NSAssert(index != NSNotFound, - @"|removeVideoTrack| called on unexpected RTCVideoTrack"); + @"|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)"); if (index != NSNotFound && _nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) { [_videoTracks removeObjectAtIndex:index]; @@ -80,7 +80,7 @@ - (void)removeVideoTrack:(RTCVideoTrack *)videoTrack { } - (NSString *)description { - return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStream):\n%@\nA=%lu\nV=%lu", self.streamId, (unsigned long)self.audioTracks.count, (unsigned long)self.videoTracks.count]; @@ -92,7 +92,7 @@ - (NSString *)description { return _nativeMediaStream; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaStream: (rtc::scoped_refptr)nativeMediaStream { NSParameterAssert(nativeMediaStream); @@ -108,15 +108,19 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory for (auto &track : audioTracks) { RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio; - RTCAudioTrack *audioTrack = - [[RTCAudioTrack alloc] initWithFactory:_factory nativeTrack:track type:type]; + RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack = + [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory + nativeTrack:track + type:type]; [_audioTracks addObject:audioTrack]; } for (auto &track : videoTracks) { RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo; - RTCVideoTrack *videoTrack = - [[RTCVideoTrack alloc] initWithFactory:_factory nativeTrack:track type:type]; + RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack = + [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory + nativeTrack:track + type:type]; [_videoTracks addObject:videoTrack]; } } diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h index 176bb73d85..ee51e27b2d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h @@ -19,11 +19,13 @@ typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) { NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCMediaStreamTrack () +@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) +() -@property(nonatomic, readonly) RTCPeerConnectionFactory *factory; + @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * + factory; /** * The native MediaStreamTrackInterface passed in or created during @@ -34,14 +36,14 @@ NS_ASSUME_NONNULL_BEGIN /** * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack; -- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track; +- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track; + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState: (RTCMediaStreamTrackState)state; @@ -51,9 +53,9 @@ NS_ASSUME_NONNULL_BEGIN + (NSString *)stringForState:(RTCMediaStreamTrackState)state; -+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack: - (rtc::scoped_refptr)nativeTrack - factory:(RTCPeerConnectionFactory *)factory; ++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *) + mediaTrackForNativeTrack:(rtc::scoped_refptr)nativeTrack + factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h index d1ea0f28f3..2200122ccd 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h @@ -26,7 +26,7 @@ RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio; RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo; RTC_OBJC_EXPORT -@interface RTCMediaStreamTrack : NSObject +@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject /** * The kind of track. For example, "audio" if this track represents an audio diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm index 07992a0202..f1e128ca60 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm @@ -19,8 +19,8 @@ NSString * const kRTCMediaStreamTrackKindVideo = @(webrtc::MediaStreamTrackInterface::kVideoKind); -@implementation RTCMediaStreamTrack { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeTrack; RTCMediaStreamTrackType _type; } @@ -47,7 +47,7 @@ - (RTCMediaStreamTrackState)readyState { - (NSString *)description { NSString *readyState = [[self class] stringForState:self.readyState]; - return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStreamTrack):\n%@\n%@\n%@\n%@", self.kind, self.trackId, self.isEnabled ? @"enabled" : @"disabled", @@ -61,7 +61,7 @@ - (BOOL)isEqual:(id)object { if (![object isMemberOfClass:[self class]]) { return NO; } - return [self isEqualToTrack:(RTCMediaStreamTrack *)object]; + return [self isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)object]; } - (NSUInteger)hash { @@ -76,7 +76,7 @@ - (NSUInteger)hash { @synthesize factory = _factory; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack type:(RTCMediaStreamTrackType)type { NSParameterAssert(nativeTrack); @@ -89,7 +89,7 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack { NSParameterAssert(nativeTrack); if (nativeTrack->kind() == @@ -103,7 +103,7 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return nil; } -- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track { +- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { if (!track) { return NO; } @@ -139,21 +139,22 @@ + (NSString *)stringForState:(RTCMediaStreamTrackState)state { } } -+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack: - (rtc::scoped_refptr)nativeTrack - factory:(RTCPeerConnectionFactory *)factory { ++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *) + mediaTrackForNativeTrack:(rtc::scoped_refptr)nativeTrack + factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory { NSParameterAssert(nativeTrack); NSParameterAssert(factory); if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) { - return [[RTCAudioTrack alloc] initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeAudio]; + return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack + type:RTCMediaStreamTrackTypeAudio]; } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { - return [[RTCVideoTrack alloc] initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeVideo]; + return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack + type:RTCMediaStreamTrackTypeVideo]; } else { - return [[RTCMediaStreamTrack alloc] initWithFactory:factory nativeTrack:nativeTrack]; + return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack]; } } diff --git a/sdk/objc/api/peerconnection/RTCMetrics.h b/sdk/objc/api/peerconnection/RTCMetrics.h index 6629fdacec..fddbb27c90 100644 --- a/sdk/objc/api/peerconnection/RTCMetrics.h +++ b/sdk/objc/api/peerconnection/RTCMetrics.h @@ -20,4 +20,4 @@ RTC_EXTERN void RTCEnableMetrics(void); /** Gets and clears native histograms. */ -RTC_EXTERN NSArray* RTCGetAndResetMetrics(void); +RTC_EXTERN NSArray* RTCGetAndResetMetrics(void); diff --git a/sdk/objc/api/peerconnection/RTCMetrics.mm b/sdk/objc/api/peerconnection/RTCMetrics.mm index 8ca9d965bd..b3ad352084 100644 --- a/sdk/objc/api/peerconnection/RTCMetrics.mm +++ b/sdk/objc/api/peerconnection/RTCMetrics.mm @@ -16,7 +16,7 @@ void RTCEnableMetrics(void) { webrtc::metrics::Enable(); } -NSArray *RTCGetAndResetMetrics(void) { +NSArray *RTCGetAndResetMetrics(void) { std::map> histograms; webrtc::metrics::GetAndReset(&histograms); @@ -24,8 +24,8 @@ void RTCEnableMetrics(void) { NSMutableArray *metrics = [NSMutableArray arrayWithCapacity:histograms.size()]; for (auto const &histogram : histograms) { - RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc] - initWithNativeSampleInfo:*histogram.second]; + RTC_OBJC_TYPE(RTCMetricsSampleInfo) *metric = + [[RTC_OBJC_TYPE(RTCMetricsSampleInfo) alloc] initWithNativeSampleInfo:*histogram.second]; [metrics addObject:metric]; } return metrics; diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h index c465b1c756..e4aa41f6c7 100644 --- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h +++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMetricsSampleInfo () +@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) +() -/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */ -- (instancetype)initWithNativeSampleInfo:(const webrtc::metrics::SampleInfo &)info; + /** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */ + - (instancetype)initWithNativeSampleInfo : (const webrtc::metrics::SampleInfo &)info; @end diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h index cd38ab9a91..47a877b6fb 100644 --- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h +++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCMetricsSampleInfo : NSObject +@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) : NSObject /** * Example of RTCMetricsSampleInfo: diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm index a4937fbeac..e4be94e90a 100644 --- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm +++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCMetricsSampleInfo +@implementation RTC_OBJC_TYPE (RTCMetricsSampleInfo) @synthesize name = _name; @synthesize min = _min; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm index 6c84fa3f61..1ded45d670 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm @@ -14,10 +14,12 @@ #import "RTCDataChannelConfiguration+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCPeerConnection (DataChannel) +@implementation RTC_OBJC_TYPE (RTCPeerConnection) +(DataChannel) -- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label - configuration:(RTCDataChannelConfiguration *)configuration { + - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel + : (NSString *)label configuration + : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration { std::string labelString = [NSString stdStringForString:label]; const webrtc::DataChannelInit nativeInit = configuration.nativeDataChannelInit; @@ -27,7 +29,8 @@ - (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label if (!dataChannel) { return nil; } - return [[RTCDataChannel alloc] initWithFactory:self.factory nativeDataChannel:dataChannel]; + return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory + nativeDataChannel:dataChannel]; } @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h index ce08cd5f6a..735881025a 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h @@ -22,7 +22,7 @@ namespace webrtc { */ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { public: - PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection); + PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection); ~PeerConnectionDelegateAdapter() override; void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override; @@ -58,15 +58,17 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { void OnRemoveTrack(rtc::scoped_refptr receiver) override; private: - __weak RTCPeerConnection *peer_connection_; + __weak RTC_OBJC_TYPE(RTCPeerConnection) * peer_connection_; }; } // namespace webrtc -@interface RTCPeerConnection () +@interface RTC_OBJC_TYPE (RTCPeerConnection) +() -/** The factory used to create this RTCPeerConnection */ -@property(nonatomic, readonly) RTCPeerConnectionFactory *factory; + /** The factory used to create this RTCPeerConnection */ + @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * + factory; /** The native PeerConnectionInterface created during construction. */ @property(nonatomic, readonly) rtc::scoped_refptr @@ -75,10 +77,20 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { /** Initialize an RTCPeerConnection with a configuration, constraints, and * delegate. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - configuration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate:(nullable id)delegate +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(nullable id)delegate; + +/** Initialize an RTCPeerConnection with a configuration, constraints, + * delegate and PeerConnectionDependencies. + */ +- (instancetype)initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + dependencies: + (std::unique_ptr)dependencies + delegate:(nullable id)delegate NS_DESIGNATED_INITIALIZER; + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState: diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm index e2965ebab7..8ded55200e 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm @@ -28,7 +28,8 @@ void OnStatsDelivered(const rtc::scoped_refptr &report) override { RTC_DCHECK(completion_handler_); - RTCStatisticsReport *statisticsReport = [[RTCStatisticsReport alloc] initWithReport:*report]; + RTC_OBJC_TYPE(RTCStatisticsReport) *statisticsReport = + [[RTC_OBJC_TYPE(RTCStatisticsReport) alloc] initWithReport:*report]; completion_handler_(statisticsReport); completion_handler_ = nil; } @@ -39,8 +40,8 @@ void OnStatsDelivered(const rtc::scoped_refptr &report) ov class StatsObserverAdapter : public StatsObserver { public: - StatsObserverAdapter(void (^completionHandler) - (NSArray *stats)) { + StatsObserverAdapter( + void (^completionHandler)(NSArray *stats)) { completion_handler_ = completionHandler; } @@ -50,8 +51,8 @@ void OnComplete(const StatsReports& reports) override { RTC_DCHECK(completion_handler_); NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()]; for (const auto* report : reports) { - RTCLegacyStatsReport *statsReport = - [[RTCLegacyStatsReport alloc] initWithNativeReport:*report]; + RTC_OBJC_TYPE(RTCLegacyStatsReport) *statsReport = + [[RTC_OBJC_TYPE(RTCLegacyStatsReport) alloc] initWithNativeReport:*report]; [stats addObject:statsReport]; } completion_handler_(stats); @@ -59,20 +60,21 @@ void OnComplete(const StatsReports& reports) override { } private: - void (^completion_handler_)(NSArray *stats); + void (^completion_handler_)(NSArray *stats); }; } // namespace webrtc -@implementation RTCPeerConnection (Stats) +@implementation RTC_OBJC_TYPE (RTCPeerConnection) +(Stats) -- (void)statisticsForSender:(RTCRtpSender *)sender - completionHandler:(RTCStatisticsCompletionHandler)completionHandler { + - (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler + : (RTCStatisticsCompletionHandler)completionHandler { rtc::scoped_refptr collector( new rtc::RefCountedObject(completionHandler)); self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector); } -- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver +- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver completionHandler:(RTCStatisticsCompletionHandler)completionHandler { rtc::scoped_refptr collector( new rtc::RefCountedObject(completionHandler)); @@ -85,10 +87,10 @@ - (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completi self.nativePeerConnection->GetStats(collector); } -- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack +- (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel completionHandler: - (void (^)(NSArray *stats))completionHandler { + (void (^)(NSArray *stats))completionHandler { rtc::scoped_refptr observer( new rtc::RefCountedObject (completionHandler)); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.h b/sdk/objc/api/peerconnection/RTCPeerConnection.h index 012295c241..db252acbc5 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.h @@ -12,21 +12,21 @@ #import "RTCMacros.h" -@class RTCConfiguration; -@class RTCDataChannel; -@class RTCDataChannelConfiguration; -@class RTCIceCandidate; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCMediaStreamTrack; -@class RTCPeerConnectionFactory; -@class RTCRtpReceiver; -@class RTCRtpSender; -@class RTCRtpTransceiver; -@class RTCRtpTransceiverInit; -@class RTCSessionDescription; -@class RTCStatisticsReport; -@class RTCLegacyStatsReport; +@class RTC_OBJC_TYPE(RTCConfiguration); +@class RTC_OBJC_TYPE(RTCDataChannel); +@class RTC_OBJC_TYPE(RTCDataChannelConfiguration); +@class RTC_OBJC_TYPE(RTCIceCandidate); +@class RTC_OBJC_TYPE(RTCMediaConstraints); +@class RTC_OBJC_TYPE(RTCMediaStream); +@class RTC_OBJC_TYPE(RTCMediaStreamTrack); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@class RTC_OBJC_TYPE(RTCRtpReceiver); +@class RTC_OBJC_TYPE(RTCRtpSender); +@class RTC_OBJC_TYPE(RTCRtpTransceiver); +@class RTC_OBJC_TYPE(RTCRtpTransceiverInit); +@class RTC_OBJC_TYPE(RTCSessionDescription); +@class RTC_OBJC_TYPE(RTCStatisticsReport); +@class RTC_OBJC_TYPE(RTCLegacyStatsReport); typedef NS_ENUM(NSInteger, RTCRtpMediaType); @@ -81,45 +81,49 @@ typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) { RTCStatsOutputLevelDebug, }; -@class RTCPeerConnection; +@class RTC_OBJC_TYPE(RTCPeerConnection); RTC_OBJC_EXPORT -@protocol RTCPeerConnectionDelegate +@protocol RTC_OBJC_TYPE +(RTCPeerConnectionDelegate) -/** Called when the SignalingState changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeSignalingState:(RTCSignalingState)stateChanged; + /** Called when the SignalingState changed. */ + - (void)peerConnection + : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState + : (RTCSignalingState)stateChanged; /** Called when media is received on a new stream from remote peer. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Called when a remote peer closes a stream. * This is not called when RTCSdpSemanticsUnifiedPlan is specified. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Called when negotiation is needed, for example ICE has restarted. */ -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection; +- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection; /** Called any time the IceConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState; /** Called any time the IceGatheringState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState; /** New ice candidate has been found. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didGenerateIceCandidate:(RTCIceCandidate *)candidate; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate; /** Called when a group of local Ice candidates have been removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveIceCandidates:(NSArray *)candidates; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveIceCandidates:(NSArray *)candidates; /** New data channel has been opened. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didOpenDataChannel:(RTCDataChannel *)dataChannel; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel; /** Called when signaling indicates a transceiver will be receiving media from * the remote endpoint. @@ -128,72 +132,72 @@ RTC_OBJC_EXPORT @optional /** Called any time the IceConnectionState changes following standardized * transition. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState; /** Called any time the PeerConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeConnectionState:(RTCPeerConnectionState)newState; -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver; /** Called when a receiver and its track are created. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didAddReceiver:(RTCRtpReceiver *)rtpReceiver - streams:(NSArray *)mediaStreams; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didAddReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver + streams:(NSArray *)mediaStreams; /** Called when the receiver and its track are removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver; /** Called when the selected ICE candidate pair is changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeLocalCandidate:(RTCIceCandidate *)local - remoteCandidate:(RTCIceCandidate *)remote +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local + remoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote lastReceivedMs:(int)lastDataReceivedMs changeReason:(NSString *)reason; @end RTC_OBJC_EXPORT -@interface RTCPeerConnection : NSObject +@interface RTC_OBJC_TYPE (RTCPeerConnection) : NSObject /** The object that will be notifed about events such as state changes and * streams being added or removed. */ -@property(nonatomic, weak, nullable) id delegate; +@property(nonatomic, weak, nullable) id delegate; /** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use * |senders| instead. */ -@property(nonatomic, readonly) NSArray *localStreams; -@property(nonatomic, readonly, nullable) RTCSessionDescription *localDescription; -@property(nonatomic, readonly, nullable) RTCSessionDescription *remoteDescription; +@property(nonatomic, readonly) NSArray *localStreams; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription; @property(nonatomic, readonly) RTCSignalingState signalingState; @property(nonatomic, readonly) RTCIceConnectionState iceConnectionState; @property(nonatomic, readonly) RTCPeerConnectionState connectionState; @property(nonatomic, readonly) RTCIceGatheringState iceGatheringState; -@property(nonatomic, readonly, copy) RTCConfiguration *configuration; +@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration; /** Gets all RTCRtpSenders associated with this peer connection. * Note: reading this property returns different instances of RTCRtpSender. * Use isEqual: instead of == to compare RTCRtpSender instances. */ -@property(nonatomic, readonly) NSArray *senders; +@property(nonatomic, readonly) NSArray *senders; /** Gets all RTCRtpReceivers associated with this peer connection. * Note: reading this property returns different instances of RTCRtpReceiver. * Use isEqual: instead of == to compare RTCRtpReceiver instances. */ -@property(nonatomic, readonly) NSArray *receivers; +@property(nonatomic, readonly) NSArray *receivers; /** Gets all RTCRtpTransceivers associated with this peer connection. * Note: reading this property returns different instances of - * RTCRtpTransceiver. Use isEqual: instead of == to compare RTCRtpTransceiver - * instances. - * This is only available with RTCSdpSemanticsUnifiedPlan specified. + * RTCRtpTransceiver. Use isEqual: instead of == to compare + * RTCRtpTransceiver instances. This is only available with + * RTCSdpSemanticsUnifiedPlan specified. */ -@property(nonatomic, readonly) NSArray *transceivers; +@property(nonatomic, readonly) NSArray *transceivers; - (instancetype)init NS_UNAVAILABLE; @@ -203,38 +207,39 @@ RTC_OBJC_EXPORT * new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies * cannot be changed with this method. */ -- (BOOL)setConfiguration:(RTCConfiguration *)configuration; +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration; /** Terminate all media and close the transport. */ - (void)close; /** Provide a remote candidate to the ICE Agent. */ -- (void)addIceCandidate:(RTCIceCandidate *)candidate; +- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate; /** Remove a group of remote candidates from the ICE Agent. */ -- (void)removeIceCandidates:(NSArray *)candidates; +- (void)removeIceCandidates:(NSArray *)candidates; /** Add a new media stream to be sent on this peer connection. * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use * addTrack instead. */ -- (void)addStream:(RTCMediaStream *)stream; +- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Remove the given media stream from this peer connection. * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use * removeTrack instead. */ -- (void)removeStream:(RTCMediaStream *)stream; +- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Add a new media stream track to be sent on this peer connection, and return - * the newly created RTCRtpSender. The RTCRtpSender will be associated with - * the streams specified in the |streamIds| list. + * the newly created RTCRtpSender. The RTCRtpSender will be + * associated with the streams specified in the |streamIds| list. * * Errors: If an error occurs, returns nil. An error can occur if: * - A sender already exists for the track. * - The peer connection is closed. */ -- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray *)streamIds; +- (RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + streamIds:(NSArray *)streamIds; /** With PlanB semantics, removes an RTCRtpSender from this peer connection. * @@ -243,7 +248,7 @@ RTC_OBJC_EXPORT * * Returns YES on success. */ -- (BOOL)removeTrack:(RTCRtpSender *)sender; +- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender; /** addTransceiver creates a new RTCRtpTransceiver and adds it to the set of * transceivers. Adding a transceiver will cause future calls to CreateOffer @@ -266,33 +271,37 @@ RTC_OBJC_EXPORT * of the transceiver (and sender/receiver) will be derived from the kind of * the track. */ -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track; -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track - init:(RTCRtpTransceiverInit *)init; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack: + (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *) + addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init; /** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio * or RTCRtpMediaTypeVideo. */ -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType; -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType - init:(RTCRtpTransceiverInit *)init; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *) + init; /** Generate an SDP offer. */ -- (void)offerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp, +- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler:(nullable void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * _Nullable sdp, NSError *_Nullable error))completionHandler; /** Generate an SDP answer. */ -- (void)answerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp, - NSError *_Nullable error))completionHandler; +- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler: + (nullable void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * _Nullable sdp, + NSError *_Nullable error))completionHandler; /** Apply the supplied RTCSessionDescription as the local description. */ -- (void)setLocalDescription:(RTCSessionDescription *)sdp +- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler; /** Apply the supplied RTCSessionDescription as the remote description. */ -- (void)setRemoteDescription:(RTCSessionDescription *)sdp +- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler; /** Limits the bandwidth allocated for all RTP streams sent by this @@ -308,37 +317,45 @@ RTC_OBJC_EXPORT - (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes; - (void)stopRtcEventLog; +- (int32_t)startRecorder:(int32_t)dir path:(NSString*)path; +- (int32_t)stopRecorder:(int32_t)dir; + @end -@interface RTCPeerConnection (Media) +@interface RTC_OBJC_TYPE (RTCPeerConnection) +(Media) -/** Create an RTCRtpSender with the specified kind and media stream ID. - * See RTCMediaStreamTrack.h for available kinds. - * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use - * addTransceiver instead. - */ -- (RTCRtpSender *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId; + /** Create an RTCRtpSender with the specified kind and media stream ID. + * See RTCMediaStreamTrack.h for available kinds. + * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use + * addTransceiver instead. + */ + - (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind : (NSString *)kind streamId + : (NSString *)streamId; @end -@interface RTCPeerConnection (DataChannel) +@interface RTC_OBJC_TYPE (RTCPeerConnection) +(DataChannel) -/** Create a new data channel with the given label and configuration. */ -- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label - configuration:(RTCDataChannelConfiguration *)configuration; + /** Create a new data channel with the given label and configuration. */ + - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel + : (NSString *)label configuration : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration; @end -typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *); +typedef void (^RTCStatisticsCompletionHandler)(RTC_OBJC_TYPE(RTCStatisticsReport) *); -@interface RTCPeerConnection (Stats) +@interface RTC_OBJC_TYPE (RTCPeerConnection) +(Stats) -/** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil - * statistics are gathered for all tracks. - */ -- (void)statsForTrack:(nullable RTCMediaStreamTrack *)mediaStreamTrack - statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel - completionHandler:(nullable void (^)(NSArray *stats))completionHandler; + /** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil + * statistics are gathered for all tracks. + */ + - (void)statsForTrack + : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel + : (RTCStatsOutputLevel)statsOutputLevel completionHandler + : (nullable void (^)(NSArray *stats))completionHandler; /** Gather statistic through the v2 statistics API. */ - (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler; @@ -346,13 +363,13 @@ typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *); /** Spec-compliant getStats() performing the stats selection algorithm with the * sender. */ -- (void)statisticsForSender:(RTCRtpSender *)sender +- (void)statisticsForSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler:(RTCStatisticsCompletionHandler)completionHandler; /** Spec-compliant getStats() performing the stats selection algorithm with the * receiver. */ -- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver +- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver completionHandler:(RTCStatisticsCompletionHandler)completionHandler; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm index 32a8a4baea..f6252f6cd8 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm @@ -29,12 +29,10 @@ #include "api/jsep_ice_candidate.h" #include "api/rtc_event_log_output_file.h" -#include "api/transport/media/media_transport_interface.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" -NSString * const kRTCPeerConnectionErrorDomain = - @"org.webrtc.RTCPeerConnection"; +NSString *const kRTCPeerConnectionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)"; int const kRTCPeerConnnectionSessionDescriptionError = -1; namespace webrtc { @@ -42,9 +40,8 @@ class CreateSessionDescriptionObserverAdapter : public CreateSessionDescriptionObserver { public: - CreateSessionDescriptionObserverAdapter( - void (^completionHandler)(RTCSessionDescription *sessionDescription, - NSError *error)) { + CreateSessionDescriptionObserverAdapter(void (^completionHandler)( + RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, NSError *error)) { completion_handler_ = completionHandler; } @@ -54,9 +51,8 @@ void OnSuccess(SessionDescriptionInterface *desc) override { RTC_DCHECK(completion_handler_); std::unique_ptr description = std::unique_ptr(desc); - RTCSessionDescription* session = - [[RTCSessionDescription alloc] initWithNativeDescription: - description.get()]; + RTC_OBJC_TYPE(RTCSessionDescription) *session = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description.get()]; completion_handler_(session, nil); completion_handler_ = nil; } @@ -74,8 +70,8 @@ void OnFailure(RTCError error) override { } private: - void (^completion_handler_) - (RTCSessionDescription *sessionDescription, NSError *error); + void (^completion_handler_)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, + NSError *error); }; class SetSessionDescriptionObserverAdapter : @@ -110,8 +106,8 @@ void OnFailure(RTCError error) override { void (^completion_handler_)(NSError *error); }; -PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter( - RTCPeerConnection *peerConnection) { +PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * + peerConnection) { peer_connection_ = peerConnection; } @@ -122,26 +118,28 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnSignalingChange( PeerConnectionInterface::SignalingState new_state) { RTCSignalingState state = - [[RTCPeerConnection class] signalingStateForNativeState:new_state]; - RTCPeerConnection *peer_connection = peer_connection_; + [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didChangeSignalingState:state]; } void PeerConnectionDelegateAdapter::OnAddStream( rtc::scoped_refptr stream) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCMediaStream *mediaStream = - [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCMediaStream) *mediaStream = + [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory + nativeMediaStream:stream]; [peer_connection.delegate peerConnection:peer_connection didAddStream:mediaStream]; } void PeerConnectionDelegateAdapter::OnRemoveStream( rtc::scoped_refptr stream) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCMediaStream *mediaStream = - [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCMediaStream) *mediaStream = + [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory + nativeMediaStream:stream]; [peer_connection.delegate peerConnection:peer_connection didRemoveStream:mediaStream]; @@ -149,10 +147,10 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnTrack( rtc::scoped_refptr nativeTransceiver) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCRtpTransceiver *transceiver = - [[RTCRtpTransceiver alloc] initWithFactory:peer_connection.factory - nativeRtpTransceiver:nativeTransceiver]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = + [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:peer_connection.factory + nativeRtpTransceiver:nativeTransceiver]; if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) { [peer_connection.delegate peerConnection:peer_connection @@ -162,21 +160,23 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnDataChannel( rtc::scoped_refptr data_channel) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCDataChannel *dataChannel = [[RTCDataChannel alloc] initWithFactory:peer_connection.factory - nativeDataChannel:data_channel]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCDataChannel) *dataChannel = + [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:peer_connection.factory + nativeDataChannel:data_channel]; [peer_connection.delegate peerConnection:peer_connection didOpenDataChannel:dataChannel]; } void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() { - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection]; } void PeerConnectionDelegateAdapter::OnIceConnectionChange( PeerConnectionInterface::IceConnectionState new_state) { - RTCIceConnectionState state = [RTCPeerConnection iceConnectionStateForNativeState:new_state]; + RTCIceConnectionState state = + [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeIceConnectionState:state]; } @@ -184,7 +184,8 @@ void OnFailure(RTCError error) override { PeerConnectionInterface::IceConnectionState new_state) { if ([peer_connection_.delegate respondsToSelector:@selector(peerConnection:didChangeStandardizedIceConnectionState:)]) { - RTCIceConnectionState state = [RTCPeerConnection iceConnectionStateForNativeState:new_state]; + RTCIceConnectionState state = + [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeStandardizedIceConnectionState:state]; } @@ -194,7 +195,8 @@ void OnFailure(RTCError error) override { PeerConnectionInterface::PeerConnectionState new_state) { if ([peer_connection_.delegate respondsToSelector:@selector(peerConnection:didChangeConnectionState:)]) { - RTCPeerConnectionState state = [RTCPeerConnection connectionStateForNativeState:new_state]; + RTCPeerConnectionState state = + [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeConnectionState:state]; } } @@ -202,17 +204,17 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnIceGatheringChange( PeerConnectionInterface::IceGatheringState new_state) { RTCIceGatheringState state = - [[RTCPeerConnection class] iceGatheringStateForNativeState:new_state]; - RTCPeerConnection *peer_connection = peer_connection_; + [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didChangeIceGatheringState:state]; } void PeerConnectionDelegateAdapter::OnIceCandidate( const IceCandidateInterface *candidate) { - RTCIceCandidate *iceCandidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:candidate]; - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didGenerateIceCandidate:iceCandidate]; } @@ -224,11 +226,11 @@ void OnFailure(RTCError error) override { for (const auto& candidate : candidates) { std::unique_ptr candidate_wrapper( new JsepIceCandidate(candidate.transport_name(), -1, candidate)); - RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc] - initWithNativeCandidate:candidate_wrapper.get()]; + RTC_OBJC_TYPE(RTCIceCandidate) *ice_candidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate_wrapper.get()]; [ice_candidates addObject:ice_candidate]; } - RTCPeerConnection* peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didRemoveIceCandidates:ice_candidates]; } @@ -238,13 +240,13 @@ void OnFailure(RTCError error) override { const auto &selected_pair = event.selected_candidate_pair; auto local_candidate_wrapper = std::make_unique( selected_pair.local_candidate().transport_name(), -1, selected_pair.local_candidate()); - RTCIceCandidate *local_candidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:local_candidate_wrapper.release()]; + RTC_OBJC_TYPE(RTCIceCandidate) *local_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] + initWithNativeCandidate:local_candidate_wrapper.release()]; auto remote_candidate_wrapper = std::make_unique( selected_pair.remote_candidate().transport_name(), -1, selected_pair.remote_candidate()); - RTCIceCandidate *remote_candidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:remote_candidate_wrapper.release()]; - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCIceCandidate) *remote_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] + initWithNativeCandidate:remote_candidate_wrapper.release()]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; NSString *nsstr_reason = [NSString stringForStdString:event.reason]; if ([peer_connection.delegate respondsToSelector:@selector @@ -260,17 +262,19 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnAddTrack( rtc::scoped_refptr receiver, const std::vector> &streams) { - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; if ([peer_connection.delegate respondsToSelector:@selector(peerConnection: didAddReceiver:streams:)]) { NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()]; for (const auto &nativeStream : streams) { - RTCMediaStream *mediaStream = [[RTCMediaStream alloc] initWithFactory:peer_connection.factory - nativeMediaStream:nativeStream]; + RTC_OBJC_TYPE(RTCMediaStream) *mediaStream = + [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory + nativeMediaStream:nativeStream]; [mediaStreams addObject:mediaStream]; } - RTCRtpReceiver *rtpReceiver = [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory - nativeRtpReceiver:receiver]; + RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory + nativeRtpReceiver:receiver]; [peer_connection.delegate peerConnection:peer_connection didAddReceiver:rtpReceiver @@ -280,19 +284,20 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnRemoveTrack( rtc::scoped_refptr receiver) { - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) { - RTCRtpReceiver *rtpReceiver = [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory - nativeRtpReceiver:receiver]; + RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory + nativeRtpReceiver:receiver]; [peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver]; } } } // namespace webrtc -@implementation RTCPeerConnection { - RTCPeerConnectionFactory *_factory; - NSMutableArray *_localStreams; +@implementation RTC_OBJC_TYPE (RTCPeerConnection) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; + NSMutableArray *_localStreams; std::unique_ptr _observer; rtc::scoped_refptr _peerConnection; std::unique_ptr _nativeConstraints; @@ -302,11 +307,28 @@ @implementation RTCPeerConnection { @synthesize delegate = _delegate; @synthesize factory = _factory; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - configuration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate:(id)delegate { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(id)delegate { NSParameterAssert(factory); + std::unique_ptr dependencies = + std::make_unique(nullptr); + return [self initWithDependencies:factory + configuration:configuration + constraints:constraints + dependencies:std::move(dependencies) + delegate:delegate]; +} + +- (instancetype)initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + dependencies: + (std::unique_ptr)dependencies + delegate:(id)delegate { + NSParameterAssert(factory); + NSParameterAssert(dependencies.get()); std::unique_ptr config( [configuration createNativeConfiguration]); if (!config) { @@ -315,13 +337,12 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory if (self = [super init]) { _observer.reset(new webrtc::PeerConnectionDelegateAdapter(self)); _nativeConstraints = constraints.nativeConstraints; - CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(), - config.get()); - _peerConnection = - factory.nativeFactory->CreatePeerConnection(*config, - nullptr, - nullptr, - _observer.get()); + CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(), config.get()); + + webrtc::PeerConnectionDependencies deps = std::move(*dependencies.release()); + deps.observer = _observer.get(); + _peerConnection = factory.nativeFactory->CreatePeerConnection(*config, std::move(deps)); + if (!_peerConnection) { return nil; } @@ -332,24 +353,32 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return self; } -- (NSArray *)localStreams { +- (NSArray *)localStreams { return [_localStreams copy]; } -- (RTCSessionDescription *)localDescription { - const webrtc::SessionDescriptionInterface *description = - _peerConnection->local_description(); - return description ? - [[RTCSessionDescription alloc] initWithNativeDescription:description] - : nil; +- (RTC_OBJC_TYPE(RTCSessionDescription) *)localDescription { + // It's only safe to operate on SessionDescriptionInterface on the signaling thread. + return _peerConnection->signaling_thread()->Invoke( + RTC_FROM_HERE, [self] { + const webrtc::SessionDescriptionInterface *description = + _peerConnection->local_description(); + return description ? + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] : + nil; + }); } -- (RTCSessionDescription *)remoteDescription { - const webrtc::SessionDescriptionInterface *description = - _peerConnection->remote_description(); - return description ? - [[RTCSessionDescription alloc] initWithNativeDescription:description] - : nil; +- (RTC_OBJC_TYPE(RTCSessionDescription) *)remoteDescription { + // It's only safe to operate on SessionDescriptionInterface on the signaling thread. + return _peerConnection->signaling_thread()->Invoke( + RTC_FROM_HERE, [self] { + const webrtc::SessionDescriptionInterface *description = + _peerConnection->remote_description(); + return description ? + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] : + nil; + }); } - (RTCSignalingState)signalingState { @@ -371,7 +400,7 @@ - (RTCIceGatheringState)iceGatheringState { _peerConnection->ice_gathering_state()]; } -- (BOOL)setConfiguration:(RTCConfiguration *)configuration { +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration { std::unique_ptr config( [configuration createNativeConfiguration]); if (!config) { @@ -382,25 +411,25 @@ - (BOOL)setConfiguration:(RTCConfiguration *)configuration { return _peerConnection->SetConfiguration(*config).ok(); } -- (RTCConfiguration *)configuration { +- (RTC_OBJC_TYPE(RTCConfiguration) *)configuration { webrtc::PeerConnectionInterface::RTCConfiguration config = _peerConnection->GetConfiguration(); - return [[RTCConfiguration alloc] initWithNativeConfiguration:config]; + return [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:config]; } - (void)close { _peerConnection->Close(); } -- (void)addIceCandidate:(RTCIceCandidate *)candidate { +- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate { std::unique_ptr iceCandidate( candidate.nativeCandidate); _peerConnection->AddIceCandidate(iceCandidate.get()); } -- (void)removeIceCandidates:(NSArray *)iceCandidates { +- (void)removeIceCandidates:(NSArray *)iceCandidates { std::vector candidates; - for (RTCIceCandidate *iceCandidate in iceCandidates) { + for (RTC_OBJC_TYPE(RTCIceCandidate) * iceCandidate in iceCandidates) { std::unique_ptr candidate( iceCandidate.nativeCandidate); if (candidate) { @@ -414,7 +443,7 @@ - (void)removeIceCandidates:(NSArray *)iceCandidates { } } -- (void)addStream:(RTCMediaStream *)stream { +- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream { if (!_peerConnection->AddStream(stream.nativeMediaStream)) { RTCLogError(@"Failed to add stream: %@", stream); return; @@ -422,12 +451,13 @@ - (void)addStream:(RTCMediaStream *)stream { [_localStreams addObject:stream]; } -- (void)removeStream:(RTCMediaStream *)stream { +- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream { _peerConnection->RemoveStream(stream.nativeMediaStream); [_localStreams removeObject:stream]; } -- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray *)streamIds { +- (RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + streamIds:(NSArray *)streamIds { std::vector nativeStreamIds; for (NSString *streamId in streamIds) { nativeStreamIds.push_back([streamId UTF8String]); @@ -438,11 +468,11 @@ - (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArrayRemoveTrack(sender.nativeRtpSender); if (!result) { RTCLogError(@"Failed to remote track %@", sender); @@ -450,12 +480,15 @@ - (BOOL)removeTrack:(RTCRtpSender *)sender { return result; } -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track { - return [self addTransceiverWithTrack:track init:[[RTCRtpTransceiverInit alloc] init]]; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack: + (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { + return [self addTransceiverWithTrack:track + init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; } -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track - init:(RTCRtpTransceiverInit *)init { +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *) + addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init { webrtc::RTCErrorOr> nativeTransceiverOrError = _peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit); if (!nativeTransceiverOrError.ok()) { @@ -463,33 +496,36 @@ - (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track @"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message()); return nil; } - return [[RTCRtpTransceiver alloc] initWithFactory:self.factory - nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; + return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] + initWithFactory:self.factory + nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; } -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType { - return [self addTransceiverOfType:mediaType init:[[RTCRtpTransceiverInit alloc] init]]; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType { + return [self addTransceiverOfType:mediaType + init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; } -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType - init:(RTCRtpTransceiverInit *)init { +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *) + init { webrtc::RTCErrorOr> nativeTransceiverOrError = - _peerConnection->AddTransceiver([RTCRtpReceiver nativeMediaTypeForMediaType:mediaType], - init.nativeInit); + _peerConnection->AddTransceiver( + [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:mediaType], init.nativeInit); if (!nativeTransceiverOrError.ok()) { RTCLogError(@"Failed to add transceiver %@: %s", - [RTCRtpReceiver stringForMediaType:mediaType], + [RTC_OBJC_TYPE(RTCRtpReceiver) stringForMediaType:mediaType], nativeTransceiverOrError.error().message()); return nil; } - return [[RTCRtpTransceiver alloc] initWithFactory:self.factory - nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; + return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] + initWithFactory:self.factory + nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; } -- (void)offerForConstraints:(RTCMediaConstraints *)constraints - completionHandler: - (void (^)(RTCSessionDescription *sessionDescription, - NSError *error))completionHandler { +- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler:(void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, + NSError *error))completionHandler { rtc::scoped_refptr observer(new rtc::RefCountedObject (completionHandler)); @@ -499,10 +535,9 @@ - (void)offerForConstraints:(RTCMediaConstraints *)constraints _peerConnection->CreateOffer(observer, options); } -- (void)answerForConstraints:(RTCMediaConstraints *)constraints - completionHandler: - (void (^)(RTCSessionDescription *sessionDescription, - NSError *error))completionHandler { +- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler:(void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, + NSError *error))completionHandler { rtc::scoped_refptr observer(new rtc::RefCountedObject (completionHandler)); @@ -512,7 +547,7 @@ - (void)answerForConstraints:(RTCMediaConstraints *)constraints _peerConnection->CreateAnswer(observer, options); } -- (void)setLocalDescription:(RTCSessionDescription *)sdp +- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(void (^)(NSError *error))completionHandler { rtc::scoped_refptr observer( new rtc::RefCountedObject( @@ -520,7 +555,7 @@ - (void)setLocalDescription:(RTCSessionDescription *)sdp _peerConnection->SetLocalDescription(observer, sdp.nativeDescription); } -- (void)setRemoteDescription:(RTCSessionDescription *)sdp +- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(void (^)(NSError *error))completionHandler { rtc::scoped_refptr observer( new rtc::RefCountedObject( @@ -531,12 +566,12 @@ - (void)setRemoteDescription:(RTCSessionDescription *)sdp - (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps currentBitrateBps:(nullable NSNumber *)currentBitrateBps maxBitrateBps:(nullable NSNumber *)maxBitrateBps { - webrtc::PeerConnectionInterface::BitrateParameters params; + webrtc::BitrateSettings params; if (minBitrateBps != nil) { params.min_bitrate_bps = absl::optional(minBitrateBps.intValue); } if (currentBitrateBps != nil) { - params.current_bitrate_bps = absl::optional(currentBitrateBps.intValue); + params.start_bitrate_bps = absl::optional(currentBitrateBps.intValue); } if (maxBitrateBps != nil) { params.max_bitrate_bps = absl::optional(maxBitrateBps.intValue); @@ -572,48 +607,58 @@ - (void)stopRtcEventLog { _hasStartedRtcEventLog = NO; } -- (RTCRtpSender *)senderWithKind:(NSString *)kind - streamId:(NSString *)streamId { +- (int32_t)startRecorder:(int32_t)dir path:(NSString*)path { + return _peerConnection->StartRecorder(dir, [path UTF8String]); +} + +- (int32_t)stopRecorder:(int32_t)dir { + return _peerConnection->StopRecorder(dir); +} + +- (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId { std::string nativeKind = [NSString stdStringForString:kind]; std::string nativeStreamId = [NSString stdStringForString:streamId]; rtc::scoped_refptr nativeSender( _peerConnection->CreateSender(nativeKind, nativeStreamId)); - return nativeSender ? - [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender] : - nil; + return nativeSender ? [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory + nativeRtpSender:nativeSender] : + nil; } -- (NSArray *)senders { +- (NSArray *)senders { std::vector> nativeSenders( _peerConnection->GetSenders()); NSMutableArray *senders = [[NSMutableArray alloc] init]; for (const auto &nativeSender : nativeSenders) { - RTCRtpSender *sender = - [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender]; + RTC_OBJC_TYPE(RTCRtpSender) *sender = + [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory + nativeRtpSender:nativeSender]; [senders addObject:sender]; } return senders; } -- (NSArray *)receivers { +- (NSArray *)receivers { std::vector> nativeReceivers( _peerConnection->GetReceivers()); NSMutableArray *receivers = [[NSMutableArray alloc] init]; for (const auto &nativeReceiver : nativeReceivers) { - RTCRtpReceiver *receiver = - [[RTCRtpReceiver alloc] initWithFactory:self.factory nativeRtpReceiver:nativeReceiver]; + RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:self.factory + nativeRtpReceiver:nativeReceiver]; [receivers addObject:receiver]; } return receivers; } -- (NSArray *)transceivers { +- (NSArray *)transceivers { std::vector> nativeTransceivers( _peerConnection->GetTransceivers()); NSMutableArray *transceivers = [[NSMutableArray alloc] init]; for (const auto &nativeTransceiver : nativeTransceivers) { - RTCRtpTransceiver *transceiver = [[RTCRtpTransceiver alloc] initWithFactory:self.factory - nativeRtpTransceiver:nativeTransceiver]; + RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = + [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:self.factory + nativeRtpTransceiver:nativeTransceiver]; [transceivers addObject:transceiver]; } return transceivers; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index 7a57645c0e..c7a79f5615 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -11,17 +11,18 @@ #import "RTCPeerConnectionFactory.h" #include "api/scoped_refptr.h" +#include "api/peer_connection_interface.h" namespace webrtc { class AudioDeviceModule; class AudioEncoderFactory; class AudioDecoderFactory; -class MediaTransportFactory; class NetworkControllerFactoryInterface; class VideoEncoderFactory; class VideoDecoderFactory; class AudioProcessing; +struct PeerConnectionDependencies; } // namespace webrtc @@ -30,9 +31,10 @@ NS_ASSUME_NONNULL_BEGIN /** * This class extension exposes methods that work directly with injectable C++ components. */ -@interface RTCPeerConnectionFactory () +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) +() -- (instancetype)initNative NS_DESIGNATED_INITIALIZER; + - (instancetype)initNative NS_DESIGNATED_INITIALIZER; /* Initializer used when WebRTC is compiled with no media support */ - (instancetype)initWithNoMedia; @@ -63,30 +65,28 @@ NS_ASSUME_NONNULL_BEGIN audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: (rtc::scoped_refptr)audioProcessingModule - mediaTransportFactory: - (std::unique_ptr)mediaTransportFactory; + networkControllerFactory:(std::unique_ptr) + networkControllerFactory; - (instancetype) - initWithNativeAudioEncoderFactory: - (rtc::scoped_refptr)audioEncoderFactory - nativeAudioDecoderFactory: - (rtc::scoped_refptr)audioDecoderFactory - nativeVideoEncoderFactory: - (std::unique_ptr)videoEncoderFactory - nativeVideoDecoderFactory: - (std::unique_ptr)videoDecoderFactory - audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule - audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule - networkControllerFactory:(std::unique_ptr) - networkControllerFactory - mediaTransportFactory: - (std::unique_ptr)mediaTransportFactory; - -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory - mediaTransportFactory: - (std::unique_ptr)mediaTransportFactory; + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory; + + +/** Initialize an RTCPeerConnection with a configuration, constraints, and + * dependencies. + */ +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + dependencies:(std::unique_ptr)dependencies + delegate:(nullable id)delegate; + + +- (instancetype)initWithNativePeerConnectionFactory:( + rtc::scoped_refptr)factory; + @end + NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h index db7829c977..ef61c2ed01 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h @@ -15,16 +15,16 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactory () +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) +() -/** - * PeerConnectionFactoryInterface created and held by this - * RTCPeerConnectionFactory object. This is needed to pass to the underlying - * C++ APIs. - */ -@property(nonatomic, readonly) - rtc::scoped_refptr - nativeFactory; + /** + * PeerConnectionFactoryInterface created and held by this + * RTCPeerConnectionFactory object. This is needed to pass to the underlying + * C++ APIs. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeFactory; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index c808218b54..3dcd3b6495 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -14,61 +14,69 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioSource; -@class RTCAudioTrack; -@class RTCConfiguration; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCPeerConnection; -@class RTCVideoSource; -@class RTCVideoTrack; -@class RTCPeerConnectionFactoryOptions; -@protocol RTCPeerConnectionDelegate; -@protocol RTCVideoDecoderFactory; -@protocol RTCVideoEncoderFactory; +@class RTC_OBJC_TYPE(RTCAudioSource); +@class RTC_OBJC_TYPE(RTCAudioTrack); +@class RTC_OBJC_TYPE(RTCConfiguration); +@class RTC_OBJC_TYPE(RTCMediaConstraints); +@class RTC_OBJC_TYPE(RTCMediaStream); +@class RTC_OBJC_TYPE(RTCPeerConnection); +@class RTC_OBJC_TYPE(RTCVideoSource); +@class RTC_OBJC_TYPE(RTCVideoTrack); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); +@protocol RTC_OBJC_TYPE +(RTCPeerConnectionDelegate); +@protocol RTC_OBJC_TYPE +(RTCVideoDecoderFactory); +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderFactory); RTC_OBJC_EXPORT -@interface RTCPeerConnectionFactory : NSObject +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject /* Initialize object with default H264 video encoder/decoder factories */ - (instancetype)init; /* Initialize object with injectable video encoder/decoder factories */ -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory; +- (instancetype) + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory; /** Initialize an RTCAudioSource with constraints. */ -- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints; +- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: + (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; -/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source with no - * constraints. +/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source + * with no constraints. */ -- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId; /** Initialize an RTCAudioTrack with a source and an id. */ -- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source trackId:(NSString *)trackId; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source + trackId:(NSString *)trackId; -/** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer - * implementation, e.g. RTCCameraVideoCapturer, in order to produce frames. +/** Initialize a generic RTCVideoSource. The RTCVideoSource should be + * passed to a RTCVideoCapturer implementation, e.g. + * RTCCameraVideoCapturer, in order to produce frames. */ -- (RTCVideoSource *)videoSource; +- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource; /** Initialize an RTCVideoTrack with a source and an id. */ -- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source trackId:(NSString *)trackId; +- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source + trackId:(NSString *)trackId; /** Initialize an RTCMediaStream with an id. */ -- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId; +- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId; /** Initialize an RTCPeerConnection with a configuration, constraints, and * delegate. */ -- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate: - (nullable id)delegate; +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(nullable id)delegate; /** Set the options to be used for subsequently created RTCPeerConnections */ -- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options; +- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options; /** Start an AecDump recording. This API call will likely change in the future. */ - (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 4ac33d2436..1b5414ec16 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -25,6 +25,9 @@ #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoEncoderFactory.h" #import "helpers/NSString+StdString.h" +#include "sdk/objc/native/api/network_monitor_factory.h" +#include "system_wrappers/include/field_trial.h" + #ifndef HAVE_NO_MEDIA #import "components/video_codec/RTCVideoDecoderFactoryH264.h" #import "components/video_codec/RTCVideoEncoderFactoryH264.h" @@ -34,6 +37,7 @@ #include "api/audio_codecs/builtin_audio_encoder_factory.h" // nogncheck #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "modules/audio_device/include/audio_device.h" // nogncheck #include "modules/audio_processing/include/audio_processing.h" // nogncheck @@ -52,10 +56,9 @@ // C++ target. // TODO(zhihuang): Remove nogncheck once MediaEngineInterface is moved to C++ // API layer. -#include "api/transport/media/media_transport_interface.h" #include "media/engine/webrtc_media_engine.h" // nogncheck -@implementation RTCPeerConnectionFactory { +@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _networkThread; std::unique_ptr _workerThread; std::unique_ptr _signalingThread; @@ -76,22 +79,21 @@ - (instancetype)init { #ifdef HAVE_NO_MEDIA return [self initWithNoMedia]; #else - return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() - nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() - nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory( - [[RTCVideoEncoderFactoryH264 alloc] init]) - nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory( - [[RTCVideoDecoderFactoryH264 alloc] init]) - audioDeviceModule:[self audioDeviceModule] - audioProcessingModule:nullptr - mediaTransportFactory:nullptr]; + return [self + initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE( + RTCVideoEncoderFactoryH264) alloc] init]) + nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( + RTCVideoDecoderFactoryH264) alloc] init]) + audioDeviceModule:[self audioDeviceModule] + audioProcessingModule:nullptr]; #endif } -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory - mediaTransportFactory: - (std::unique_ptr)mediaTransportFactory { +- (instancetype) + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory { #ifdef HAVE_NO_MEDIA return [self initWithNoMedia]; #else @@ -108,17 +110,9 @@ - (instancetype)initWithEncoderFactory:(nullable id)enco nativeVideoEncoderFactory:std::move(native_encoder_factory) nativeVideoDecoderFactory:std::move(native_decoder_factory) audioDeviceModule:[self audioDeviceModule] - audioProcessingModule:nullptr - mediaTransportFactory:std::move(mediaTransportFactory)]; + audioProcessingModule:nullptr]; #endif } -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory { - return [self initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory - mediaTransportFactory:nullptr]; -} - - (instancetype)initNative { if (self = [super init]) { _networkThread = rtc::Thread::CreateWithSocketServer(); @@ -145,6 +139,9 @@ - (instancetype)initWithNoMedia { dependencies.network_thread = _networkThread.get(); dependencies.worker_thread = _workerThread.get(); dependencies.signaling_thread = _signalingThread.get(); + if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) { + dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory(); + } _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!"); } @@ -168,30 +165,7 @@ - (instancetype)initWithNativeAudioEncoderFactory: nativeVideoDecoderFactory:std::move(videoDecoderFactory) audioDeviceModule:audioDeviceModule audioProcessingModule:audioProcessingModule - mediaTransportFactory:nullptr]; -} - -- (instancetype)initWithNativeAudioEncoderFactory: - (rtc::scoped_refptr)audioEncoderFactory - nativeAudioDecoderFactory: - (rtc::scoped_refptr)audioDecoderFactory - nativeVideoEncoderFactory: - (std::unique_ptr)videoEncoderFactory - nativeVideoDecoderFactory: - (std::unique_ptr)videoDecoderFactory - audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule - audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule - mediaTransportFactory:(std::unique_ptr) - mediaTransportFactory { - return [self initWithNativeAudioEncoderFactory:audioEncoderFactory - nativeAudioDecoderFactory:audioDecoderFactory - nativeVideoEncoderFactory:std::move(videoEncoderFactory) - nativeVideoDecoderFactory:std::move(videoDecoderFactory) - audioDeviceModule:audioDeviceModule - audioProcessingModule:audioProcessingModule - networkControllerFactory:nullptr - mediaTransportFactory:std::move(mediaTransportFactory)]; + networkControllerFactory:nullptr]; } - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioEncoderFactory @@ -206,16 +180,18 @@ - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory: (std::unique_ptr) - networkControllerFactory - mediaTransportFactory:(std::unique_ptr) - mediaTransportFactory { + networkControllerFactory { if (self = [self initNative]) { webrtc::PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = _networkThread.get(); dependencies.worker_thread = _workerThread.get(); dependencies.signaling_thread = _signalingThread.get(); + if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) { + dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory(); + } #ifndef HAVE_NO_MEDIA dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); + dependencies.trials = std::make_unique(); cricket::MediaEngineDependencies media_deps; media_deps.adm = std::move(audioDeviceModule); media_deps.task_queue_factory = dependencies.task_queue_factory.get(); @@ -228,20 +204,28 @@ - (instancetype)initWithNativeAudioEncoderFactory: } else { media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create(); } + media_deps.trials = dependencies.trials.get(); dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); dependencies.call_factory = webrtc::CreateCallFactory(); dependencies.event_log_factory = std::make_unique(dependencies.task_queue_factory.get()); dependencies.network_controller_factory = std::move(networkControllerFactory); - dependencies.media_transport_factory = std::move(mediaTransportFactory); #endif _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!"); } return self; } +- (instancetype)initWithNativePeerConnectionFactory: + (rtc::scoped_refptr)factory { + if (self = [self initNative]) { + _nativeFactory = factory; + } + return self; +} -- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints { +- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: + (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints { std::unique_ptr nativeConstraints; if (constraints) { nativeConstraints = constraints.nativeConstraints; @@ -251,52 +235,58 @@ - (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)c rtc::scoped_refptr source = _nativeFactory->CreateAudioSource(options); - return [[RTCAudioSource alloc] initWithFactory:self nativeAudioSource:source]; + return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self nativeAudioSource:source]; } -- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId { - RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil]; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId { + RTC_OBJC_TYPE(RTCAudioSource) *audioSource = [self audioSourceWithConstraints:nil]; return [self audioTrackWithSource:audioSource trackId:trackId]; } -- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source - trackId:(NSString *)trackId { - return [[RTCAudioTrack alloc] initWithFactory:self - source:source - trackId:trackId]; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source + trackId:(NSString *)trackId { + return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self source:source trackId:trackId]; +} + +- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource { + return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self + signalingThread:_signalingThread.get() + workerThread:_workerThread.get()]; } -- (RTCVideoSource *)videoSource { - return [[RTCVideoSource alloc] initWithFactory:self - signalingThread:_signalingThread.get() - workerThread:_workerThread.get()]; +- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source + trackId:(NSString *)trackId { + return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self source:source trackId:trackId]; } -- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source - trackId:(NSString *)trackId { - return [[RTCVideoTrack alloc] initWithFactory:self - source:source - trackId:trackId]; +- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId { + return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self streamId:streamId]; } -- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId { - return [[RTCMediaStream alloc] initWithFactory:self - streamId:streamId]; +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate: + (nullable id)delegate { + return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self + configuration:configuration + constraints:constraints + delegate:delegate]; } -- (RTCPeerConnection *)peerConnectionWithConfiguration: - (RTCConfiguration *)configuration - constraints: - (RTCMediaConstraints *)constraints - delegate: - (nullable id)delegate { - return [[RTCPeerConnection alloc] initWithFactory:self - configuration:configuration - constraints:constraints - delegate:delegate]; +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + dependencies:(std::unique_ptr)dependencies + delegate:(id)delegate { + return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithDependencies:self + configuration:configuration + constraints:constraints + dependencies:std::move(dependencies) + delegate:delegate]; } -- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options { +- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options { RTC_DCHECK(options != nil); _nativeFactory->SetOptions(options.nativeOptions); } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index 3bb75eec68..522e520e12 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -32,12 +32,12 @@ + (RTCPeerConnectionFactoryBuilder *)defaultBuilder { auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory(); [builder setAudioDecoderFactory:audioDecoderFactory]; - auto videoEncoderFactory = - webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]); + auto videoEncoderFactory = webrtc::ObjCToNativeVideoEncoderFactory( + [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); [builder setVideoEncoderFactory:std::move(videoEncoderFactory)]; - auto videoDecoderFactory = - webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]); + auto videoDecoderFactory = webrtc::ObjCToNativeVideoDecoderFactory( + [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); [builder setVideoDecoderFactory:std::move(videoDecoderFactory)]; #if defined(WEBRTC_IOS) diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index 189eb736b2..f0b0de156a 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -29,7 +29,7 @@ NS_ASSUME_NONNULL_BEGIN + (RTCPeerConnectionFactoryBuilder *)builder; -- (RTCPeerConnectionFactory *)createPeerConnectionFactory; +- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; - (void)setVideoEncoderFactory:(std::unique_ptr)videoEncoderFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index af3d259e68..991ec5a41c 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -13,7 +13,6 @@ #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" -#include "api/transport/media/media_transport_interface.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "modules/audio_device/include/audio_device.h" @@ -26,22 +25,21 @@ @implementation RTCPeerConnectionFactoryBuilder { rtc::scoped_refptr _audioDecoderFactory; rtc::scoped_refptr _audioDeviceModule; rtc::scoped_refptr _audioProcessingModule; - std::unique_ptr _mediaTransportFactory; } + (RTCPeerConnectionFactoryBuilder *)builder { return [[RTCPeerConnectionFactoryBuilder alloc] init]; } -- (RTCPeerConnectionFactory *)createPeerConnectionFactory { - RTCPeerConnectionFactory *factory = [RTCPeerConnectionFactory alloc]; +- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]; return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory nativeAudioDecoderFactory:_audioDecoderFactory nativeVideoEncoderFactory:std::move(_videoEncoderFactory) nativeVideoDecoderFactory:std::move(_videoDecoderFactory) audioDeviceModule:_audioDeviceModule - audioProcessingModule:_audioProcessingModule - mediaTransportFactory:std::move(_mediaTransportFactory)]; + audioProcessingModule:_audioProcessingModule]; } - (void)setVideoEncoderFactory:(std::unique_ptr)videoEncoderFactory { diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h index 986b0e698d..8832b23695 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h @@ -14,12 +14,12 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryOptions () +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) +() -/** Returns the equivalent native PeerConnectionFactoryInterface::Options - * structure. */ -@property(nonatomic, readonly) - webrtc::PeerConnectionFactoryInterface::Options nativeOptions; + /** Returns the equivalent native PeerConnectionFactoryInterface::Options + * structure. */ + @property(nonatomic, readonly) webrtc::PeerConnectionFactoryInterface::Options nativeOptions; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h index 4bec8695bd..bfc54a5d7b 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCPeerConnectionFactoryOptions : NSObject +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) : NSObject @property(nonatomic, assign) BOOL disableEncryption; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm index f0cc6a6c81..5467bd5fc9 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm @@ -25,7 +25,7 @@ void setNetworkBit(webrtc::PeerConnectionFactoryInterface::Options* options, } } // namespace -@implementation RTCPeerConnectionFactoryOptions +@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) @synthesize disableEncryption = _disableEncryption; @synthesize disableNetworkMonitor = _disableNetworkMonitor; diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h index 5471bf4d62..94c1f92956 100644 --- a/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtcpParameters () +@interface RTC_OBJC_TYPE (RTCRtcpParameters) +() -/** Returns the equivalent native RtcpParameters structure. */ -@property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters; + /** Returns the equivalent native RtcpParameters structure. */ + @property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters; /** Initialize the object with a native RtcpParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters.h b/sdk/objc/api/peerconnection/RTCRtcpParameters.h index 5c265806b1..1bbaedcf7e 100644 --- a/sdk/objc/api/peerconnection/RTCRtcpParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtcpParameters.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCRtcpParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtcpParameters) : NSObject /** The Canonical Name used by RTCP. */ @property(nonatomic, readonly, copy) NSString *cname; diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters.mm b/sdk/objc/api/peerconnection/RTCRtcpParameters.mm index 0c33dda961..4d6084b90d 100644 --- a/sdk/objc/api/peerconnection/RTCRtcpParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtcpParameters.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCRtcpParameters +@implementation RTC_OBJC_TYPE (RTCRtcpParameters) @synthesize cname = _cname; @synthesize isReducedSize = _isReducedSize; diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h index 1b297edeba..7833068837 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpCodecParameters () +@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) +() -/** Returns the equivalent native RtpCodecParameters structure. */ -@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters; + /** Returns the equivalent native RtpCodecParameters structure. */ + @property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters; /** Initialize the object with a native RtpCodecParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 5d3cac5c96..a68d9eb873 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -31,9 +31,9 @@ RTC_EXTERN const NSString *const kRTCVp8CodecName; RTC_EXTERN const NSString *const kRTCVp9CodecName; RTC_EXTERN const NSString *const kRTCH264CodecName; -/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTCRtpCodecParameters */ +/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTC_OBJC_TYPE(RTCRtpCodecParameters) */ RTC_OBJC_EXPORT -@interface RTCRtpCodecParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) : NSObject /** The RTP payload type. */ @property(nonatomic, assign) int payloadType; diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index f25679e329..a7169ae29e 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -34,7 +34,7 @@ const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); -@implementation RTCRtpCodecParameters +@implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) @synthesize payloadType = _payloadType; @synthesize name = _name; @@ -59,9 +59,15 @@ - (instancetype)initWithNativeParameters: case cricket::MEDIA_TYPE_VIDEO: _kind = kRTCMediaStreamTrackKindVideo; break; + case cricket::MEDIA_TYPE_SCREEN: + _kind = kRTCMediaStreamTrackKindVideo; + break; case cricket::MEDIA_TYPE_DATA: RTC_NOTREACHED(); break; + case cricket::MEDIA_TYPE_UNSUPPORTED: + RTC_NOTREACHED(); + break; } if (nativeParameters.clock_rate) { _clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate]; diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h index e3684d3ca5..074c9b175b 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpEncodingParameters () +@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) +() -/** Returns the equivalent native RtpEncodingParameters structure. */ -@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters; + /** Returns the equivalent native RtpEncodingParameters structure. */ + @property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters; /** Initialize the object with a native RtpEncodingParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index 16eabf9b2f..facd7e5129 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -14,8 +14,16 @@ NS_ASSUME_NONNULL_BEGIN +/** Corresponds to webrtc::Priority. */ +typedef NS_ENUM(NSInteger, RTCPriority) { + RTCPriorityVeryLow, + RTCPriorityLow, + RTCPriorityMedium, + RTCPriorityHigh +}; + RTC_OBJC_EXPORT -@interface RTCRtpEncodingParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) : NSObject /** The idenfifier for the encoding layer. This is used in simulcast. */ @property(nonatomic, copy, nullable) NSString *rid; @@ -51,8 +59,11 @@ RTC_OBJC_EXPORT /** The SSRC being used by this encoding. */ @property(nonatomic, readonly, nullable) NSNumber *ssrc; +/** The relative bitrate priority. */ +@property(nonatomic, assign) double bitratePriority; + /** The relative DiffServ Code Point priority. */ -@property(nonatomic, assign) double networkPriority; +@property(nonatomic, assign) RTCPriority networkPriority; - (instancetype)init NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index 7378473f50..eec6ce4015 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCRtpEncodingParameters +@implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize rid = _rid; @synthesize isActive = _isActive; @@ -22,6 +22,7 @@ @implementation RTCRtpEncodingParameters @synthesize numTemporalLayers = _numTemporalLayers; @synthesize scaleResolutionDownBy = _scaleResolutionDownBy; @synthesize ssrc = _ssrc; +@synthesize bitratePriority = _bitratePriority; @synthesize networkPriority = _networkPriority; - (instancetype)init { @@ -56,7 +57,9 @@ - (instancetype)initWithNativeParameters: if (nativeParameters.ssrc) { _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } - _networkPriority = nativeParameters.network_priority; + _bitratePriority = nativeParameters.bitrate_priority; + _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) + priorityFromNativePriority:nativeParameters.network_priority]; } return self; } @@ -86,8 +89,36 @@ - (instancetype)initWithNativeParameters: if (_ssrc != nil) { parameters.ssrc = absl::optional(_ssrc.unsignedLongValue); } - parameters.network_priority = _networkPriority; + parameters.bitrate_priority = _bitratePriority; + parameters.network_priority = + [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; return parameters; } ++ (webrtc::Priority)nativePriorityFromPriority:(RTCPriority)networkPriority { + switch (networkPriority) { + case RTCPriorityVeryLow: + return webrtc::Priority::kVeryLow; + case RTCPriorityLow: + return webrtc::Priority::kLow; + case RTCPriorityMedium: + return webrtc::Priority::kMedium; + case RTCPriorityHigh: + return webrtc::Priority::kHigh; + } +} + ++ (RTCPriority)priorityFromNativePriority:(webrtc::Priority)nativePriority { + switch (nativePriority) { + case webrtc::Priority::kVeryLow: + return RTCPriorityVeryLow; + case webrtc::Priority::kLow: + return RTCPriorityLow; + case webrtc::Priority::kMedium: + return RTCPriorityMedium; + case webrtc::Priority::kHigh: + return RTCPriorityHigh; + } +} + @end diff --git a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm deleted file mode 100644 index 3a4415a342..0000000000 --- a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCRtpFragmentationHeader+Private.h" - -#include "modules/include/module_common_types.h" - -@implementation RTCRtpFragmentationHeader (Private) - -- (instancetype)initWithNativeFragmentationHeader: - (const webrtc::RTPFragmentationHeader *)fragmentationHeader { - if (self = [super init]) { - if (fragmentationHeader) { - int count = fragmentationHeader->fragmentationVectorSize; - NSMutableArray *offsets = [NSMutableArray array]; - NSMutableArray *lengths = [NSMutableArray array]; - NSMutableArray *timeDiffs = [NSMutableArray array]; - NSMutableArray *plTypes = [NSMutableArray array]; - for (int i = 0; i < count; ++i) { - [offsets addObject:@(fragmentationHeader->fragmentationOffset[i])]; - [lengths addObject:@(fragmentationHeader->fragmentationLength[i])]; - [timeDiffs addObject:@(0)]; - [plTypes addObject:@(0)]; - } - self.fragmentationOffset = [offsets copy]; - self.fragmentationLength = [lengths copy]; - self.fragmentationTimeDiff = [timeDiffs copy]; - self.fragmentationPlType = [plTypes copy]; - } - } - - return self; -} - -- (std::unique_ptr)createNativeFragmentationHeader { - auto fragmentationHeader = - std::unique_ptr(new webrtc::RTPFragmentationHeader); - fragmentationHeader->VerifyAndAllocateFragmentationHeader(self.fragmentationOffset.count); - for (NSUInteger i = 0; i < self.fragmentationOffset.count; ++i) { - fragmentationHeader->fragmentationOffset[i] = (size_t)self.fragmentationOffset[i].unsignedIntValue; - fragmentationHeader->fragmentationLength[i] = (size_t)self.fragmentationLength[i].unsignedIntValue; - } - - return fragmentationHeader; -} - -@end diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h index 8a2a2311e4..6255847fb9 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpHeaderExtension () +@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) +() -/** Returns the equivalent native RtpExtension structure. */ -@property(nonatomic, readonly) webrtc::RtpExtension nativeParameters; + /** Returns the equivalent native RtpExtension structure. */ + @property(nonatomic, readonly) webrtc::RtpExtension nativeParameters; /** Initialize the object with a native RtpExtension structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h index 32114499ce..15be5af56c 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCRtpHeaderExtension : NSObject +@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) : NSObject /** The URI of the RTP header extension, as defined in RFC5285. */ @property(nonatomic, readonly, copy) NSString *uri; diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm index afc47868fe..a19228e629 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCRtpHeaderExtension +@implementation RTC_OBJC_TYPE (RTCRtpHeaderExtension) @synthesize uri = _uri; @synthesize id = _id; diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h index a88ccfa75e..369475a81d 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpParameters () +@interface RTC_OBJC_TYPE (RTCRtpParameters) +() -/** Returns the equivalent native RtpParameters structure. */ -@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters; + /** Returns the equivalent native RtpParameters structure. */ + @property(nonatomic, readonly) webrtc::RtpParameters nativeParameters; /** Initialize the object with a native RtpParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.h b/sdk/objc/api/peerconnection/RTCRtpParameters.h index 8ee8d712e0..fff6a85886 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.h @@ -27,22 +27,23 @@ typedef NS_ENUM(NSInteger, RTCDegradationPreference) { }; RTC_OBJC_EXPORT -@interface RTCRtpParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtpParameters) : NSObject /** A unique identifier for the last set of parameters applied. */ @property(nonatomic, copy) NSString *transactionId; /** Parameters used for RTCP. */ -@property(nonatomic, readonly, copy) RTCRtcpParameters *rtcp; +@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCRtcpParameters) * rtcp; /** An array containing parameters for RTP header extensions. */ -@property(nonatomic, readonly, copy) NSArray *headerExtensions; +@property(nonatomic, readonly, copy) + NSArray *headerExtensions; /** The currently active encodings in the order of preference. */ -@property(nonatomic, copy) NSArray *encodings; +@property(nonatomic, copy) NSArray *encodings; /** The negotiated set of send codecs in order of preference. */ -@property(nonatomic, copy) NSArray *codecs; +@property(nonatomic, copy) NSArray *codecs; /** * Degradation preference in case of CPU adaptation or constrained bandwidth. diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/sdk/objc/api/peerconnection/RTCRtpParameters.mm index d70f7da323..2236b9aa36 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.mm @@ -16,7 +16,7 @@ #import "RTCRtpHeaderExtension+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCRtpParameters +@implementation RTC_OBJC_TYPE (RTCRtpParameters) @synthesize transactionId = _transactionId; @synthesize rtcp = _rtcp; @@ -33,32 +33,33 @@ - (instancetype)initWithNativeParameters: (const webrtc::RtpParameters &)nativeParameters { if (self = [self init]) { _transactionId = [NSString stringForStdString:nativeParameters.transaction_id]; - _rtcp = [[RTCRtcpParameters alloc] initWithNativeParameters:nativeParameters.rtcp]; + _rtcp = + [[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp]; NSMutableArray *headerExtensions = [[NSMutableArray alloc] init]; for (const auto &headerExtension : nativeParameters.header_extensions) { - [headerExtensions - addObject:[[RTCRtpHeaderExtension alloc] initWithNativeParameters:headerExtension]]; + [headerExtensions addObject:[[RTC_OBJC_TYPE(RTCRtpHeaderExtension) alloc] + initWithNativeParameters:headerExtension]]; } _headerExtensions = headerExtensions; NSMutableArray *encodings = [[NSMutableArray alloc] init]; for (const auto &encoding : nativeParameters.encodings) { - [encodings addObject:[[RTCRtpEncodingParameters alloc] + [encodings addObject:[[RTC_OBJC_TYPE(RTCRtpEncodingParameters) alloc] initWithNativeParameters:encoding]]; } _encodings = encodings; NSMutableArray *codecs = [[NSMutableArray alloc] init]; for (const auto &codec : nativeParameters.codecs) { - [codecs addObject:[[RTCRtpCodecParameters alloc] - initWithNativeParameters:codec]]; + [codecs + addObject:[[RTC_OBJC_TYPE(RTCRtpCodecParameters) alloc] initWithNativeParameters:codec]]; } _codecs = codecs; - _degradationPreference = @([RTCRtpParameters + _degradationPreference = [RTC_OBJC_TYPE(RTCRtpParameters) degradationPreferenceFromNativeDegradationPreference:nativeParameters - .degradation_preference]); + .degradation_preference]; } return self; } @@ -67,17 +68,17 @@ - (instancetype)initWithNativeParameters: webrtc::RtpParameters parameters; parameters.transaction_id = [NSString stdStringForString:_transactionId]; parameters.rtcp = [_rtcp nativeParameters]; - for (RTCRtpHeaderExtension *headerExtension in _headerExtensions) { + for (RTC_OBJC_TYPE(RTCRtpHeaderExtension) * headerExtension in _headerExtensions) { parameters.header_extensions.push_back(headerExtension.nativeParameters); } - for (RTCRtpEncodingParameters *encoding in _encodings) { + for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in _encodings) { parameters.encodings.push_back(encoding.nativeParameters); } - for (RTCRtpCodecParameters *codec in _codecs) { + for (RTC_OBJC_TYPE(RTCRtpCodecParameters) * codec in _codecs) { parameters.codecs.push_back(codec.nativeParameters); } if (_degradationPreference) { - parameters.degradation_preference = [RTCRtpParameters + parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters) nativeDegradationPreferenceFromDegradationPreference:(RTCDegradationPreference) _degradationPreference.intValue]; } @@ -98,17 +99,21 @@ - (instancetype)initWithNativeParameters: } } -+ (RTCDegradationPreference)degradationPreferenceFromNativeDegradationPreference: - (webrtc::DegradationPreference)nativeDegradationPreference { - switch (nativeDegradationPreference) { ++ (NSNumber *)degradationPreferenceFromNativeDegradationPreference: + (absl::optional)nativeDegradationPreference { + if (!nativeDegradationPreference.has_value()) { + return nil; + } + + switch (*nativeDegradationPreference) { case webrtc::DegradationPreference::DISABLED: - return RTCDegradationPreferenceDisabled; + return @(RTCDegradationPreferenceDisabled); case webrtc::DegradationPreference::MAINTAIN_FRAMERATE: - return RTCDegradationPreferenceMaintainFramerate; + return @(RTCDegradationPreferenceMaintainFramerate); case webrtc::DegradationPreference::MAINTAIN_RESOLUTION: - return RTCDegradationPreferenceMaintainResolution; + return @(RTCDegradationPreferenceMaintainResolution); case webrtc::DegradationPreference::BALANCED: - return RTCDegradationPreferenceBalanced; + return @(RTCDegradationPreferenceBalanced); } } diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h index e085529527..c15ce70079 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h @@ -18,13 +18,14 @@ NS_ASSUME_NONNULL_BEGIN /** * This class extension exposes methods that work directly with injectable C++ components. */ -@interface RTCRtpReceiver () +@interface RTC_OBJC_TYPE (RTCRtpReceiver) +() -/** Sets a user defined frame decryptor that will decrypt the entire frame. - * This will decrypt the entire frame using the user provided decryption - * mechanism regardless of whether SRTP is enabled or not. - */ -- (void)setFrameDecryptor:(rtc::scoped_refptr)frameDecryptor; + /** Sets a user defined frame decryptor that will decrypt the entire frame. + * This will decrypt the entire frame using the user provided decryption + * mechanism regardless of whether SRTP is enabled or not. + */ + - (void)setFrameDecryptor : (rtc::scoped_refptr)frameDecryptor; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h index 6f56739f0b..6aed0b4bc5 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h @@ -14,28 +14,30 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); namespace webrtc { class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface { public: - RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver); + RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver); void OnFirstPacketReceived(cricket::MediaType media_type) override; private: - __weak RTCRtpReceiver* receiver_; + __weak RTC_OBJC_TYPE(RTCRtpReceiver) * receiver_; }; } // namespace webrtc -@interface RTCRtpReceiver () +@interface RTC_OBJC_TYPE (RTCRtpReceiver) +() -@property(nonatomic, readonly) rtc::scoped_refptr nativeRtpReceiver; + @property(nonatomic, + readonly) rtc::scoped_refptr nativeRtpReceiver; /** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpReceiver:(rtc::scoped_refptr)nativeRtpReceiver NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/sdk/objc/api/peerconnection/RTCRtpReceiver.h index 7a7dacea2b..1e407fd71b 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.h @@ -21,35 +21,39 @@ typedef NS_ENUM(NSInteger, RTCRtpMediaType) { RTCRtpMediaTypeAudio, RTCRtpMediaTypeVideo, RTCRtpMediaTypeData, + RTCRtpMediaTypeUnsupported, }; -@class RTCRtpReceiver; +@class RTC_OBJC_TYPE(RTCRtpReceiver); RTC_OBJC_EXPORT -@protocol RTCRtpReceiverDelegate - -/** Called when the first RTP packet is received. - * - * Note: Currently if there are multiple RtpReceivers of the same media type, - * they will all call OnFirstPacketReceived at once. - * - * For example, if we create three audio receivers, A/B/C, they will listen to - * the same signal from the underneath network layer. Whenever the first audio packet - * is received, the underneath signal will be fired. All the receivers A/B/C will be - * notified and the callback of the receiver's delegate will be called. - * - * The process is the same for video receivers. - */ -- (void)rtpReceiver:(RTCRtpReceiver *)rtpReceiver - didReceiveFirstPacketForMediaType:(RTCRtpMediaType)mediaType; +@protocol RTC_OBJC_TYPE +(RTCRtpReceiverDelegate) + + /** Called when the first RTP packet is received. + * + * Note: Currently if there are multiple RtpReceivers of the same media type, + * they will all call OnFirstPacketReceived at once. + * + * For example, if we create three audio receivers, A/B/C, they will listen to + * the same signal from the underneath network layer. Whenever the first audio packet + * is received, the underneath signal will be fired. All the receivers A/B/C will be + * notified and the callback of the receiver's delegate will be called. + * + * The process is the same for video receivers. + */ + - (void)rtpReceiver + : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType + : (RTCRtpMediaType)mediaType; @end RTC_OBJC_EXPORT -@protocol RTCRtpReceiver +@protocol RTC_OBJC_TYPE +(RTCRtpReceiver) -/** A unique identifier for this receiver. */ -@property(nonatomic, readonly) NSString *receiverId; + /** A unique identifier for this receiver. */ + @property(nonatomic, readonly) NSString *receiverId; /** The currently active RTCRtpParameters, as defined in * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. @@ -58,22 +62,22 @@ RTC_OBJC_EXPORT * but this API also applies them to receivers, similar to ORTC: * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. */ -@property(nonatomic, readonly) RTCRtpParameters *parameters; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpParameters) * parameters; /** The RTCMediaStreamTrack associated with the receiver. * Note: reading this property returns a new instance of * RTCMediaStreamTrack. Use isEqual: instead of == to compare * RTCMediaStreamTrack instances. */ -@property(nonatomic, readonly, nullable) RTCMediaStreamTrack *track; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track; /** The delegate for this RtpReceiver. */ -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; @end RTC_OBJC_EXPORT -@interface RTCRtpReceiver : NSObject +@interface RTC_OBJC_TYPE (RTCRtpReceiver) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm index deeb4cb21b..dd58f929d1 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm @@ -20,8 +20,7 @@ namespace webrtc { -RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter( - RTCRtpReceiver *receiver) { +RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver) { RTC_CHECK(receiver); receiver_ = receiver; } @@ -29,15 +28,15 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived( cricket::MediaType media_type) { RTCRtpMediaType packet_media_type = - [RTCRtpReceiver mediaTypeForNativeMediaType:media_type]; - RTCRtpReceiver *receiver = receiver_; + [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type]; + RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_; [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type]; } } // namespace webrtc -@implementation RTCRtpReceiver { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCRtpReceiver) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeRtpReceiver; std::unique_ptr _observer; } @@ -48,23 +47,24 @@ - (NSString *)receiverId { return [NSString stringForStdString:_nativeRtpReceiver->id()]; } -- (RTCRtpParameters *)parameters { - return [[RTCRtpParameters alloc] +- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters { + return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc] initWithNativeParameters:_nativeRtpReceiver->GetParameters()]; } -- (nullable RTCMediaStreamTrack *)track { +- (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { rtc::scoped_refptr nativeTrack( _nativeRtpReceiver->track()); if (nativeTrack) { - return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory]; + return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack + factory:_factory]; } return nil; } - (NSString *)description { - return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}", - self.receiverId]; + return [NSString + stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpReceiver) {\n receiverId: %@\n}", self.receiverId]; } - (void)dealloc { @@ -83,7 +83,7 @@ - (BOOL)isEqual:(id)object { if (![object isMemberOfClass:[self class]]) { return NO; } - RTCRtpReceiver *receiver = (RTCRtpReceiver *)object; + RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = (RTC_OBJC_TYPE(RTCRtpReceiver) *)object; return _nativeRtpReceiver == receiver.nativeRtpReceiver; } @@ -103,14 +103,13 @@ - (void)setFrameDecryptor:(rtc::scoped_refptr)f return _nativeRtpReceiver; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpReceiver: (rtc::scoped_refptr)nativeRtpReceiver { if (self = [super init]) { _factory = factory; _nativeRtpReceiver = nativeRtpReceiver; - RTCLogInfo( - @"RTCRtpReceiver(%p): created receiver: %@", self, self.description); + RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description); _observer.reset(new webrtc::RtpReceiverDelegateAdapter(self)); _nativeRtpReceiver->SetObserver(_observer.get()); } @@ -126,6 +125,10 @@ + (RTCRtpMediaType)mediaTypeForNativeMediaType: return RTCRtpMediaTypeVideo; case cricket::MEDIA_TYPE_DATA: return RTCRtpMediaTypeData; + case cricket::MEDIA_TYPE_SCREEN: + return RTCRtpMediaTypeVideo; + case cricket::MEDIA_TYPE_UNSUPPORTED: + return RTCRtpMediaTypeUnsupported; } } @@ -137,6 +140,8 @@ + (RTCRtpMediaType)mediaTypeForNativeMediaType: return cricket::MEDIA_TYPE_VIDEO; case RTCRtpMediaTypeData: return cricket::MEDIA_TYPE_DATA; + case RTCRtpMediaTypeUnsupported: + return cricket::MEDIA_TYPE_UNSUPPORTED; } } @@ -148,6 +153,8 @@ + (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType { return @"VIDEO"; case RTCRtpMediaTypeData: return @"DATA"; + case RTCRtpMediaTypeUnsupported: + return @"UNSUPPORTED"; } } diff --git a/sdk/objc/api/peerconnection/RTCRtpSender+Native.h b/sdk/objc/api/peerconnection/RTCRtpSender+Native.h index 89a691cd54..249d5c5e09 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender+Native.h +++ b/sdk/objc/api/peerconnection/RTCRtpSender+Native.h @@ -18,14 +18,15 @@ NS_ASSUME_NONNULL_BEGIN /** * This class extension exposes methods that work directly with injectable C++ components. */ -@interface RTCRtpSender () +@interface RTC_OBJC_TYPE (RTCRtpSender) +() -/** Sets a defined frame encryptor that will encrypt the entire frame - * before it is sent across the network. This will encrypt the entire frame - * using the user provided encryption mechanism regardless of whether SRTP is - * enabled or not. - */ -- (void)setFrameEncryptor:(rtc::scoped_refptr)frameEncryptor; + /** Sets a defined frame encryptor that will encrypt the entire frame + * before it is sent across the network. This will encrypt the entire frame + * using the user provided encryption mechanism regardless of whether SRTP is + * enabled or not. + */ + - (void)setFrameEncryptor : (rtc::scoped_refptr)frameEncryptor; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpSender+Private.h b/sdk/objc/api/peerconnection/RTCRtpSender+Private.h index 389b833ffa..6fdb42bb22 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpSender+Private.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCRtpSender () +@interface RTC_OBJC_TYPE (RTCRtpSender) +() -@property(nonatomic, readonly) rtc::scoped_refptr nativeRtpSender; + @property(nonatomic, readonly) rtc::scoped_refptr nativeRtpSender; /** Initialize an RTCRtpSender with a native RtpSenderInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpSender:(rtc::scoped_refptr)nativeRtpSender NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.h b/sdk/objc/api/peerconnection/RTCRtpSender.h index c03b4cc88c..fcdf199869 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender.h +++ b/sdk/objc/api/peerconnection/RTCRtpSender.h @@ -18,7 +18,8 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@protocol RTCRtpSender +@protocol RTC_OBJC_TYPE +(RTCRtpSender) /** A unique identifier for this sender. */ @property(nonatomic, readonly) NSString *senderId; @@ -26,25 +27,25 @@ RTC_OBJC_EXPORT /** The currently active RTCRtpParameters, as defined in * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. */ -@property(nonatomic, copy) RTCRtpParameters *parameters; +@property(nonatomic, copy) RTC_OBJC_TYPE(RTCRtpParameters) * parameters; /** The RTCMediaStreamTrack associated with the sender. * Note: reading this property returns a new instance of * RTCMediaStreamTrack. Use isEqual: instead of == to compare * RTCMediaStreamTrack instances. */ -@property(nonatomic, copy, nullable) RTCMediaStreamTrack *track; +@property(nonatomic, copy, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track; /** IDs of streams associated with the RTP sender */ @property(nonatomic, copy) NSArray *streamIds; /** The RTCDtmfSender accociated with the RTP sender. */ -@property(nonatomic, readonly, nullable) id dtmfSender; +@property(nonatomic, readonly, nullable) id dtmfSender; @end RTC_OBJC_EXPORT -@interface RTCRtpSender : NSObject +@interface RTC_OBJC_TYPE (RTCRtpSender) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.mm b/sdk/objc/api/peerconnection/RTCRtpSender.mm index d29265102b..1ca9360ab8 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender.mm +++ b/sdk/objc/api/peerconnection/RTCRtpSender.mm @@ -19,8 +19,8 @@ #include "api/media_stream_interface.h" -@implementation RTCRtpSender { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCRtpSender) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeRtpSender; } @@ -30,30 +30,30 @@ - (NSString *)senderId { return [NSString stringForStdString:_nativeRtpSender->id()]; } -- (RTCRtpParameters *)parameters { - return [[RTCRtpParameters alloc] +- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters { + return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc] initWithNativeParameters:_nativeRtpSender->GetParameters()]; } -- (void)setParameters:(RTCRtpParameters *)parameters { +- (void)setParameters:(RTC_OBJC_TYPE(RTCRtpParameters) *)parameters { if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) { - RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self, - parameters); + RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set parameters: %@", self, parameters); } } -- (RTCMediaStreamTrack *)track { +- (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { rtc::scoped_refptr nativeTrack( _nativeRtpSender->track()); if (nativeTrack) { - return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory]; + return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack + factory:_factory]; } return nil; } -- (void)setTrack:(RTCMediaStreamTrack *)track { +- (void)setTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { if (!_nativeRtpSender->SetTrack(track.nativeTrack)) { - RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track); + RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set track %@", self, track); } } @@ -75,8 +75,8 @@ - (void)setStreamIds:(NSArray *)streamIds { } - (NSString *)description { - return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}", - self.senderId]; + return [NSString + stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpSender) {\n senderId: %@\n}", self.senderId]; } - (BOOL)isEqual:(id)object { @@ -89,7 +89,7 @@ - (BOOL)isEqual:(id)object { if (![object isMemberOfClass:[self class]]) { return NO; } - RTCRtpSender *sender = (RTCRtpSender *)object; + RTC_OBJC_TYPE(RTCRtpSender) *sender = (RTC_OBJC_TYPE(RTCRtpSender) *)object; return _nativeRtpSender == sender.nativeRtpSender; } @@ -109,7 +109,7 @@ - (void)setFrameEncryptor:(rtc::scoped_refptr)f return _nativeRtpSender; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpSender:(rtc::scoped_refptr)nativeRtpSender { NSParameterAssert(factory); NSParameterAssert(nativeRtpSender); @@ -119,9 +119,10 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory rtc::scoped_refptr nativeDtmfSender( _nativeRtpSender->GetDtmfSender()); if (nativeDtmfSender) { - _dtmfSender = [[RTCDtmfSender alloc] initWithNativeDtmfSender:nativeDtmfSender]; + _dtmfSender = + [[RTC_OBJC_TYPE(RTCDtmfSender) alloc] initWithNativeDtmfSender:nativeDtmfSender]; } - RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description); + RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): created sender: %@", self, self.description); } return self; } diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h index d7f6b585e4..65d45fb88e 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h @@ -14,21 +14,23 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCRtpTransceiverInit () +@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) +() -@property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit; + @property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit; @end -@interface RTCRtpTransceiver () +@interface RTC_OBJC_TYPE (RTCRtpTransceiver) +() -@property(nonatomic, readonly) rtc::scoped_refptr - nativeRtpTransceiver; + @property(nonatomic, + readonly) rtc::scoped_refptr nativeRtpTransceiver; /** Initialize an RTCRtpTransceiver with a native RtpTransceiverInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpTransceiver: (rtc::scoped_refptr)nativeRtpTransceiver NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index 8ef3fc1d42..fd59013639 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -16,12 +16,15 @@ NS_ASSUME_NONNULL_BEGIN +extern NSString *const kRTCRtpTransceiverErrorDomain; + /** https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection */ typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) { RTCRtpTransceiverDirectionSendRecv, RTCRtpTransceiverDirectionSendOnly, RTCRtpTransceiverDirectionRecvOnly, RTCRtpTransceiverDirectionInactive, + RTCRtpTransceiverDirectionStopped }; /** Structure for initializing an RTCRtpTransceiver in a call to @@ -29,7 +32,7 @@ typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) { * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit */ RTC_OBJC_EXPORT -@interface RTCRtpTransceiverInit : NSObject +@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject /** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */ @property(nonatomic) RTCRtpTransceiverDirection direction; @@ -38,14 +41,14 @@ RTC_OBJC_EXPORT @property(nonatomic) NSArray *streamIds; /** TODO(bugs.webrtc.org/7600): Not implemented. */ -@property(nonatomic) NSArray *sendEncodings; +@property(nonatomic) NSArray *sendEncodings; @end -@class RTCRtpTransceiver; +@class RTC_OBJC_TYPE(RTCRtpTransceiver); -/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the WebRTC - * specification. A transceiver represents a combination of an RTCRtpSender +/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the + * WebRTC specification. A transceiver represents a combination of an RTCRtpSender * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an * RTCRtpTransceiver is said to be associated with a media description if its * mid property is non-nil; otherwise, it is said to be disassociated. @@ -58,12 +61,13 @@ RTC_OBJC_EXPORT * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver */ RTC_OBJC_EXPORT -@protocol RTCRtpTransceiver +@protocol RTC_OBJC_TYPE +(RTCRtpTransceiver) -/** Media type of the transceiver. The sender and receiver will also have this - * type. - */ -@property(nonatomic, readonly) RTCRtpMediaType mediaType; + /** Media type of the transceiver. The sender and receiver will also have this + * type. + */ + @property(nonatomic, readonly) RTCRtpMediaType mediaType; /** The mid attribute is the mid negotiated and present in the local and * remote descriptions. Before negotiation is complete, the mid value may be @@ -77,14 +81,14 @@ RTC_OBJC_EXPORT * present, regardless of the direction of media. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender */ -@property(nonatomic, readonly) RTCRtpSender *sender; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpSender) * sender; /** The receiver attribute exposes the RTCRtpReceiver corresponding to the RTP * media that may be received with the transceiver's mid. The receiver is * always present, regardless of the direction of media. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver */ -@property(nonatomic, readonly) RTCRtpReceiver *receiver; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpReceiver) * receiver; /** The isStopped attribute indicates that the sender of this transceiver will * no longer send, and that the receiver will no longer receive. It is true if @@ -96,12 +100,9 @@ RTC_OBJC_EXPORT /** The direction attribute indicates the preferred direction of this * transceiver, which will be used in calls to createOffer and createAnswer. - * An update of directionality does not take effect immediately. Instead, - * future calls to createOffer and createAnswer mark the corresponding media - * descriptions as sendrecv, sendonly, recvonly, or inactive. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ -@property(nonatomic) RTCRtpTransceiverDirection direction; +@property(nonatomic, readonly) RTCRtpTransceiverDirection direction; /** The currentDirection attribute indicates the current direction negotiated * for this transceiver. If this transceiver has never been represented in an @@ -115,12 +116,19 @@ RTC_OBJC_EXPORT * this transceiver will no longer send, the receiver will no longer receive. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop */ -- (void)stop; +- (void)stopInternal; + +/** An update of directionality does not take effect immediately. Instead, + * future calls to createOffer and createAnswer mark the corresponding media + * descriptions as sendrecv, sendonly, recvonly, or inactive. + * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + */ +- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error; @end RTC_OBJC_EXPORT -@interface RTCRtpTransceiver : NSObject +@interface RTC_OBJC_TYPE (RTCRtpTransceiver) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index fe1ebb5c5d..ae1cf79864 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -17,7 +17,9 @@ #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" -@implementation RTCRtpTransceiverInit +NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver"; + +@implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) @synthesize direction = _direction; @synthesize streamIds = _streamIds; @@ -32,11 +34,12 @@ - (instancetype)init { - (webrtc::RtpTransceiverInit)nativeInit { webrtc::RtpTransceiverInit init; - init.direction = [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:_direction]; + init.direction = + [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:_direction]; for (NSString *streamId in _streamIds) { init.stream_ids.push_back([streamId UTF8String]); } - for (RTCRtpEncodingParameters *sendEncoding in _sendEncodings) { + for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * sendEncoding in _sendEncodings) { init.send_encodings.push_back(sendEncoding.nativeParameters); } return init; @@ -44,13 +47,14 @@ - (instancetype)init { @end -@implementation RTCRtpTransceiver { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCRtpTransceiver) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeRtpTransceiver; } - (RTCRtpMediaType)mediaType { - return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()]; + return [RTC_OBJC_TYPE(RTCRtpReceiver) + mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()]; } - (NSString *)mid { @@ -69,18 +73,27 @@ - (BOOL)isStopped { } - (RTCRtpTransceiverDirection)direction { - return [RTCRtpTransceiver + return [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()]; } -- (void)setDirection:(RTCRtpTransceiverDirection)direction { - _nativeRtpTransceiver->SetDirection( - [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:direction]); +- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error { + webrtc::RTCError nativeError = _nativeRtpTransceiver->SetDirectionWithError( + [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]); + + if (!nativeError.ok() && error) { + *error = [NSError errorWithDomain:kRTCRtpTransceiverErrorDomain + code:static_cast(nativeError.type()) + userInfo:@{ + @"message" : [NSString stringWithCString:nativeError.message() + encoding:NSUTF8StringEncoding] + }]; + } } - (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut { if (_nativeRtpTransceiver->current_direction()) { - *currentDirectionOut = [RTCRtpTransceiver + *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()]; return YES; } else { @@ -88,13 +101,15 @@ - (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut { } } -- (void)stop { - _nativeRtpTransceiver->Stop(); +- (void)stopInternal { + _nativeRtpTransceiver->StopInternal(); } - (NSString *)description { return [NSString - stringWithFormat:@"RTCRtpTransceiver {\n sender: %@\n receiver: %@\n}", _sender, _receiver]; + stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpTransceiver) {\n sender: %@\n receiver: %@\n}", + _sender, + _receiver]; } - (BOOL)isEqual:(id)object { @@ -107,7 +122,7 @@ - (BOOL)isEqual:(id)object { if (![object isMemberOfClass:[self class]]) { return NO; } - RTCRtpTransceiver *transceiver = (RTCRtpTransceiver *)object; + RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = (RTC_OBJC_TYPE(RTCRtpTransceiver) *)object; return _nativeRtpTransceiver == transceiver.nativeRtpTransceiver; } @@ -121,7 +136,7 @@ - (NSUInteger)hash { return _nativeRtpTransceiver; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpTransceiver: (rtc::scoped_refptr)nativeRtpTransceiver { NSParameterAssert(factory); @@ -129,11 +144,13 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory if (self = [super init]) { _factory = factory; _nativeRtpTransceiver = nativeRtpTransceiver; - _sender = [[RTCRtpSender alloc] initWithFactory:_factory - nativeRtpSender:nativeRtpTransceiver->sender()]; - _receiver = [[RTCRtpReceiver alloc] initWithFactory:_factory - nativeRtpReceiver:nativeRtpTransceiver->receiver()]; - RTCLogInfo(@"RTCRtpTransceiver(%p): created transceiver: %@", self, self.description); + _sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory + nativeRtpSender:nativeRtpTransceiver->sender()]; + _receiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:_factory + nativeRtpReceiver:nativeRtpTransceiver->receiver()]; + RTCLogInfo( + @"RTC_OBJC_TYPE(RTCRtpTransceiver)(%p): created transceiver: %@", self, self.description); } return self; } @@ -149,6 +166,8 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return webrtc::RtpTransceiverDirection::kRecvOnly; case RTCRtpTransceiverDirectionInactive: return webrtc::RtpTransceiverDirection::kInactive; + case RTCRtpTransceiverDirectionStopped: + return webrtc::RtpTransceiverDirection::kStopped; } } @@ -163,6 +182,8 @@ + (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection: return RTCRtpTransceiverDirectionRecvOnly; case webrtc::RtpTransceiverDirection::kInactive: return RTCRtpTransceiverDirectionInactive; + case webrtc::RtpTransceiverDirection::kStopped: + return RTCRtpTransceiverDirectionStopped; } } diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h index cc255cd582..0f0a06a887 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCSessionDescription () - -/** - * The native SessionDescriptionInterface representation of this - * RTCSessionDescription object. This is needed to pass to the underlying C++ - * APIs. - */ -@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription; +@interface RTC_OBJC_TYPE (RTCSessionDescription) +() + + /** + * The native SessionDescriptionInterface representation of this + * RTCSessionDescription object. This is needed to pass to the underlying C++ + * APIs. + */ + @property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription; /** * Initialize an RTCSessionDescription from a native diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.h b/sdk/objc/api/peerconnection/RTCSessionDescription.h index b9bcab1a46..8a9479d5cf 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.h @@ -20,12 +20,13 @@ typedef NS_ENUM(NSInteger, RTCSdpType) { RTCSdpTypeOffer, RTCSdpTypePrAnswer, RTCSdpTypeAnswer, + RTCSdpTypeRollback, }; NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCSessionDescription : NSObject +@interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject /** The type of session description. */ @property(nonatomic, readonly) RTCSdpType type; diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/sdk/objc/api/peerconnection/RTCSessionDescription.mm index 21e5e42f66..9fd97fee23 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.mm +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.mm @@ -15,7 +15,7 @@ #include "rtc_base/checks.h" -@implementation RTCSessionDescription +@implementation RTC_OBJC_TYPE (RTCSessionDescription) @synthesize type = _type; @synthesize sdp = _sdp; @@ -31,7 +31,6 @@ + (RTCSdpType)typeForString:(NSString *)string { } - (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp { - NSParameterAssert(sdp.length); if (self = [super init]) { _type = type; _sdp = [sdp copy]; @@ -40,7 +39,7 @@ - (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp { } - (NSString *)description { - return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCSessionDescription):\n%@\n%@", [[self class] stringForType:_type], _sdp]; } @@ -83,6 +82,8 @@ - (instancetype)initWithNativeDescription: return webrtc::SessionDescriptionInterface::kPrAnswer; case RTCSdpTypeAnswer: return webrtc::SessionDescriptionInterface::kAnswer; + case RTCSdpTypeRollback: + return webrtc::SessionDescriptionInterface::kRollback; } } @@ -93,6 +94,8 @@ + (RTCSdpType)typeForStdString:(const std::string &)string { return RTCSdpTypePrAnswer; } else if (string == webrtc::SessionDescriptionInterface::kAnswer) { return RTCSdpTypeAnswer; + } else if (string == webrtc::SessionDescriptionInterface::kRollback) { + return RTCSdpTypeRollback; } else { RTC_NOTREACHED(); return RTCSdpTypeOffer; diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h b/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h index 0220d186b7..47c5241d51 100644 --- a/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h +++ b/sdk/objc/api/peerconnection/RTCStatisticsReport+Private.h @@ -12,8 +12,8 @@ #include "api/stats/rtc_stats_report.h" -@interface RTCStatisticsReport (Private) +@interface RTC_OBJC_TYPE (RTCStatisticsReport) (Private) -- (instancetype)initWithReport:(const webrtc::RTCStatsReport &)report; +- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report; @end diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport.h b/sdk/objc/api/peerconnection/RTCStatisticsReport.h index 6fbd59b112..38d93e8771 100644 --- a/sdk/objc/api/peerconnection/RTCStatisticsReport.h +++ b/sdk/objc/api/peerconnection/RTCStatisticsReport.h @@ -10,25 +10,29 @@ #import -@class RTCStatistics; +#import "RTCMacros.h" + +@class RTC_OBJC_TYPE(RTCStatistics); NS_ASSUME_NONNULL_BEGIN /** A statistics report. Encapsulates a number of RTCStatistics objects. */ -@interface RTCStatisticsReport : NSObject +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCStatisticsReport) : NSObject /** The timestamp of the report in microseconds since 1970-01-01T00:00:00Z. */ @property(nonatomic, readonly) CFTimeInterval timestamp_us; /** RTCStatistics objects by id. */ -@property(nonatomic, readonly) NSDictionary *statistics; +@property(nonatomic, readonly) NSDictionary *statistics; - (instancetype)init NS_UNAVAILABLE; @end /** A part of a report (a subreport) covering a certain area. */ -@interface RTCStatistics : NSObject +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCStatistics) : NSObject /** The id of this subreport, e.g. "RTCMediaStreamTrack_receiver_2". */ @property(nonatomic, readonly) NSString *id; diff --git a/sdk/objc/api/peerconnection/RTCStatisticsReport.mm b/sdk/objc/api/peerconnection/RTCStatisticsReport.mm index 526976707d..1dd72772ed 100644 --- a/sdk/objc/api/peerconnection/RTCStatisticsReport.mm +++ b/sdk/objc/api/peerconnection/RTCStatisticsReport.mm @@ -37,7 +37,7 @@ case RTCStatsMemberInterface::kSequenceBool: { std::vector sequence = *member->cast_to>>(); NSMutableArray *array = [NSMutableArray arrayWithCapacity:sequence.size()]; - for (const auto &item : sequence) { + for (auto item : sequence) { [array addObject:[NSNumber numberWithBool:item]]; } return [array copy]; @@ -100,7 +100,7 @@ } } // namespace webrtc -@implementation RTCStatistics +@implementation RTC_OBJC_TYPE (RTCStatistics) @synthesize id = _id; @synthesize timestamp_us = _timestamp_us; @@ -139,7 +139,7 @@ - (NSString *)description { @end -@implementation RTCStatisticsReport +@implementation RTC_OBJC_TYPE (RTCStatisticsReport) @synthesize timestamp_us = _timestamp_us; @synthesize statistics = _statistics; @@ -151,16 +151,17 @@ - (NSString *)description { @end -@implementation RTCStatisticsReport (Private) +@implementation RTC_OBJC_TYPE (RTCStatisticsReport) (Private) -- (instancetype)initWithReport:(const webrtc::RTCStatsReport &)report { +- (instancetype)initWithReport : (const webrtc::RTCStatsReport &)report { if (self = [super init]) { _timestamp_us = report.timestamp_us(); NSMutableDictionary *statisticsById = [NSMutableDictionary dictionaryWithCapacity:report.size()]; for (const auto &stat : report) { - RTCStatistics *statistics = [[RTCStatistics alloc] initWithStatistics:stat]; + RTC_OBJC_TYPE(RTCStatistics) *statistics = + [[RTC_OBJC_TYPE(RTCStatistics) alloc] initWithStatistics:stat]; statisticsById[statistics.id] = statistics; } _statistics = [statisticsById copy]; diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h index 9c2178fb6b..5eff996c4f 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interface for converting to/from internal C++ formats. */ -@interface RTCVideoCodecInfo (Private) +@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) +(Private) -- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format; + - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format; - (webrtc::SdpVideoFormat)nativeSdpVideoFormat; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm index 21aacf6281..2eb8d366d2 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm @@ -12,9 +12,10 @@ #import "helpers/NSString+StdString.h" -@implementation RTCVideoCodecInfo (Private) +@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) +(Private) -- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format { + - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format { NSMutableDictionary *params = [NSMutableDictionary dictionary]; for (auto it = format.parameters.begin(); it != format.parameters.end(); ++it) { [params setObject:[NSString stringForStdString:it->second] diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h index 5b062455bc..8323b18dc1 100644 --- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCVideoEncoderSettings (Private) +@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) +(Private) -- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *__nullable)videoCodec; + - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *__nullable)videoCodec; - (webrtc::VideoCodec)nativeVideoCodec; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm index fe7e690517..dec3a61090 100644 --- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm @@ -12,9 +12,10 @@ #import "helpers/NSString+StdString.h" -@implementation RTCVideoEncoderSettings (Private) +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings) +(Private) -- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *)videoCodec { + - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec { if (self = [super init]) { if (videoCodec) { const char *codecName = CodecTypeToPayloadString(videoCodec->codecType); diff --git a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h index 1827e6b924..0504a73d65 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h @@ -17,26 +17,27 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoSource () +@interface RTC_OBJC_TYPE (RTCVideoSource) +() -/** - * The VideoTrackSourceInterface object passed to this RTCVideoSource during - * construction. - */ -@property(nonatomic, readonly) rtc::scoped_refptr - nativeVideoSource; + /** + * The VideoTrackSourceInterface object passed to this RTCVideoSource during + * construction. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeVideoSource; /** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeVideoSource: (rtc::scoped_refptr)nativeVideoSource NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type NS_UNAVAILABLE; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory signalingThread:(rtc::Thread *)signalingThread workerThread:(rtc::Thread *)workerThread; diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.h b/sdk/objc/api/peerconnection/RTCVideoSource.h index ec8a45c1c2..f3a9420e64 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.h +++ b/sdk/objc/api/peerconnection/RTCVideoSource.h @@ -13,12 +13,13 @@ #import "RTCMacros.h" #import "RTCMediaSource.h" #import "RTCVideoCapturer.h" +#import "CFVideoProcessor.h" NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCVideoSource : RTCMediaSource +@interface RTC_OBJC_TYPE (RTCVideoSource) : RTC_OBJC_TYPE(RTCMediaSource) - (instancetype)init NS_UNAVAILABLE; @@ -32,6 +33,8 @@ RTC_OBJC_EXPORT */ - (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps; +- (void)setVideoProcessor:(id)processor; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index 789c8436e5..b641877627 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -24,11 +24,12 @@ // TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once // RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more // info. -@implementation RTCVideoSource { +@implementation RTC_OBJC_TYPE (RTCVideoSource) { rtc::scoped_refptr _nativeVideoSource; + id _videoProcessor; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeVideoSource: (rtc::scoped_refptr)nativeVideoSource { RTC_DCHECK(factory); @@ -37,18 +38,19 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory nativeMediaSource:nativeVideoSource type:RTCMediaSourceTypeVideo]) { _nativeVideoSource = nativeVideoSource; + _videoProcessor = nil; } return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type { RTC_NOTREACHED(); return nil; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory signalingThread:(rtc::Thread *)signalingThread workerThread:(rtc::Thread *)workerThread { rtc::scoped_refptr objCVideoTrackSource( @@ -61,17 +63,43 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - (NSString *)description { NSString *stateString = [[self class] stringForState:self.state]; - return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString]; + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCVideoSource)( %p ): %@", self, stateString]; } -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { - getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame); +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + @synchronized (self) { + if (_videoProcessor) { + [_videoProcessor onVideoFrame:frame]; + } else { + getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame); + } + } } - (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps { getObjCVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps); } +- (void)setVideoProcessor:(id)processor { + @synchronized (self) { + if (_videoProcessor) { + [_videoProcessor setVideoProcessorDelegate:nil]; + } + _videoProcessor = processor; + if (_videoProcessor) { + [_videoProcessor setVideoProcessorDelegate:self]; + } + } +} + +- (void)onProcessedVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + webrtc::ObjCVideoTrackSource *objcSource = getObjCVideoSource(_nativeVideoSource); + if (objcSource) { + objcSource->OnCapturedFrame(frame); + } +} + #pragma mark - Private - (rtc::scoped_refptr)nativeVideoSource { diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h b/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h index dd3d172820..f1a8d7e4ed 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoTrack () +@interface RTC_OBJC_TYPE (RTCVideoTrack) +() -/** VideoTrackInterface created or passed in at construction. */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeVideoTrack; + /** VideoTrackInterface created or passed in at construction. */ + @property(nonatomic, readonly) rtc::scoped_refptr nativeVideoTrack; /** Initialize an RTCVideoTrack with its source and an id. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCVideoSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCVideoSource) *)source trackId:(NSString *)trackId; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h index b946889eb4..5382b7169f 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h @@ -14,23 +14,24 @@ NS_ASSUME_NONNULL_BEGIN -@protocol RTCVideoRenderer; -@class RTCPeerConnectionFactory; -@class RTCVideoSource; +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@class RTC_OBJC_TYPE(RTCVideoSource); RTC_OBJC_EXPORT -@interface RTCVideoTrack : RTCMediaStreamTrack +@interface RTC_OBJC_TYPE (RTCVideoTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack) /** The video source for this video track. */ -@property(nonatomic, readonly) RTCVideoSource *source; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source; - (instancetype)init NS_UNAVAILABLE; /** Register a renderer that will render all frames received on this track. */ -- (void)addRenderer:(id)renderer; +- (void)addRenderer:(id)renderer; /** Deregister a renderer. */ -- (void)removeRenderer:(id)renderer; +- (void)removeRenderer:(id)renderer; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index 77936a640b..3f38dd51a9 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -16,14 +16,14 @@ #import "api/RTCVideoRendererAdapter+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCVideoTrack { +@implementation RTC_OBJC_TYPE (RTCVideoTrack) { NSMutableArray *_adapters; } @synthesize source = _source; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCVideoSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCVideoSource) *)source trackId:(NSString *)trackId { NSParameterAssert(factory); NSParameterAssert(source); @@ -38,7 +38,7 @@ - (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack: (rtc::scoped_refptr)nativeMediaTrack type:(RTCMediaStreamTrackType)type { @@ -57,19 +57,19 @@ - (void)dealloc { } } -- (RTCVideoSource *)source { +- (RTC_OBJC_TYPE(RTCVideoSource) *)source { if (!_source) { rtc::scoped_refptr source = self.nativeVideoTrack->GetSource(); if (source) { - _source = - [[RTCVideoSource alloc] initWithFactory:self.factory nativeVideoSource:source.get()]; + _source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory + nativeVideoSource:source.get()]; } } return _source; } -- (void)addRenderer:(id)renderer { +- (void)addRenderer:(id)renderer { // Make sure we don't have this renderer yet. for (RTCVideoRendererAdapter *adapter in _adapters) { if (adapter.videoRenderer == renderer) { @@ -85,7 +85,7 @@ - (void)addRenderer:(id)renderer { rtc::VideoSinkWants()); } -- (void)removeRenderer:(id)renderer { +- (void)removeRenderer:(id)renderer { __block NSUInteger indexToRemove = NSNotFound; [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, NSUInteger idx, diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h index 00786dc514..a118b25ed7 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h @@ -14,12 +14,12 @@ #import "RTCVideoDecoder.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderVP8 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderVP8) : NSObject /* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp8Decoder; ++ (id)vp8Decoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm index 9750bd8bab..c150cf6d3a 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm @@ -11,15 +11,16 @@ #import +#import "RTCMacros.h" #import "RTCVideoDecoderVP8.h" #import "RTCWrappedNativeVideoDecoder.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" -@implementation RTCVideoDecoderVP8 +@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP8) -+ (id)vp8Decoder { - return [[RTCWrappedNativeVideoDecoder alloc] ++ (id)vp8Decoder { + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc] initWithNativeDecoder:std::unique_ptr(webrtc::VP8Decoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h index b74c1ef999..b3a1743057 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h @@ -14,12 +14,12 @@ #import "RTCVideoDecoder.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderVP9 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderVP9) : NSObject /* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp9Decoder; ++ (id)vp9Decoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm index 48582fedf9..a44a870850 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm @@ -11,15 +11,16 @@ #import +#import "RTCMacros.h" #import "RTCVideoDecoderVP9.h" #import "RTCWrappedNativeVideoDecoder.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" -@implementation RTCVideoDecoderVP9 +@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP9) -+ (id)vp9Decoder { - return [[RTCWrappedNativeVideoDecoder alloc] ++ (id)vp9Decoder { + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) alloc] initWithNativeDecoder:std::unique_ptr(webrtc::VP9Decoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h index 8d87a89893..e136a5bda8 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h @@ -14,12 +14,12 @@ #import "RTCVideoEncoder.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderVP8 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderVP8) : NSObject /* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp8Encoder; ++ (id)vp8Encoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm index 677f6ddf5f..d72f705813 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm @@ -11,15 +11,16 @@ #import +#import "RTCMacros.h" #import "RTCVideoEncoderVP8.h" #import "RTCWrappedNativeVideoEncoder.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" -@implementation RTCVideoEncoderVP8 +@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP8) -+ (id)vp8Encoder { - return [[RTCWrappedNativeVideoEncoder alloc] ++ (id)vp8Encoder { + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc] initWithNativeEncoder:std::unique_ptr(webrtc::VP8Encoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h index 9efea4be2a..8f961ef337 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h @@ -14,12 +14,12 @@ #import "RTCVideoEncoder.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderVP9 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderVP9) : NSObject /* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp9Encoder; ++ (id)vp9Encoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm index a5d84084d0..736051cff9 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm @@ -11,15 +11,16 @@ #import +#import "RTCMacros.h" #import "RTCVideoEncoderVP9.h" #import "RTCWrappedNativeVideoEncoder.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" -@implementation RTCVideoEncoderVP9 +@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9) -+ (id)vp9Encoder { - return [[RTCWrappedNativeVideoEncoder alloc] ++ (id)vp9Encoder { + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc] initWithNativeEncoder:std::unique_ptr(webrtc::VP9Encoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h index b5694c7d94..3a9b39e959 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h @@ -10,12 +10,13 @@ #import +#import "base/RTCMacros.h" #import "base/RTCVideoDecoder.h" #include "api/video_codecs/video_decoder.h" #include "media/base/codec.h" -@interface RTCWrappedNativeVideoDecoder : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedNativeVideoDecoder) : NSObject - (instancetype)initWithNativeDecoder:(std::unique_ptr)decoder; diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm index dce479c890..48d09cf396 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm @@ -11,9 +11,10 @@ #import #import "RTCWrappedNativeVideoDecoder.h" +#import "base/RTCMacros.h" #import "helpers/NSString+StdString.h" -@implementation RTCWrappedNativeVideoDecoder { +@implementation RTC_OBJC_TYPE (RTCWrappedNativeVideoDecoder) { std::unique_ptr _wrappedDecoder; } @@ -29,7 +30,7 @@ - (instancetype)initWithNativeDecoder:(std::unique_ptr)dec return std::move(_wrappedDecoder); } -#pragma mark - RTCVideoDecoder +#pragma mark - RTC_OBJC_TYPE(RTCVideoDecoder) - (void)setCallback:(RTCVideoDecoderCallback)callback { RTC_NOTREACHED(); @@ -45,9 +46,9 @@ - (NSInteger)releaseDecoder { return 0; } -- (NSInteger)decode:(RTCEncodedImage *)encodedImage +- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info + codecSpecificInfo:(nullable id)info renderTimeMs:(int64_t)renderTimeMs { RTC_NOTREACHED(); return 0; diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h index b4ef88264b..8df9ceec35 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h @@ -10,13 +10,14 @@ #import +#import "base/RTCMacros.h" #import "base/RTCVideoEncoder.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "media/base/codec.h" -@interface RTCWrappedNativeVideoEncoder : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedNativeVideoEncoder) : NSObject - (instancetype)initWithNativeEncoder:(std::unique_ptr)encoder; diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm index 9afd54f55f..843b6ad001 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm @@ -11,9 +11,10 @@ #import #import "RTCWrappedNativeVideoEncoder.h" +#import "base/RTCMacros.h" #import "helpers/NSString+StdString.h" -@implementation RTCWrappedNativeVideoEncoder { +@implementation RTC_OBJC_TYPE (RTCWrappedNativeVideoEncoder) { std::unique_ptr _wrappedEncoder; } @@ -29,13 +30,13 @@ - (instancetype)initWithNativeEncoder:(std::unique_ptr)enc return std::move(_wrappedEncoder); } -#pragma mark - RTCVideoEncoder +#pragma mark - RTC_OBJC_TYPE(RTCVideoEncoder) - (void)setCallback:(RTCVideoEncoderCallback)callback { RTC_NOTREACHED(); } -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings +- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores { RTC_NOTREACHED(); return 0; @@ -46,8 +47,8 @@ - (NSInteger)releaseEncoder { return 0; } -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)info +- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame + codecSpecificInfo:(nullable id)info frameTypes:(NSArray *)frameTypes { RTC_NOTREACHED(); return 0; @@ -63,7 +64,7 @@ - (NSString *)implementationName { return nil; } -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings { +- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { RTC_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h index fad08c2453..20dc807991 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h +++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h @@ -14,7 +14,8 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCI420Buffer () { +@interface RTC_OBJC_TYPE (RTCI420Buffer) +() { @protected rtc::scoped_refptr _i420Buffer; } diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h index 9a904f5396..3afe2090a2 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h +++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN /** RTCI420Buffer implements the RTCI420Buffer protocol */ RTC_OBJC_EXPORT -@interface RTCI420Buffer : NSObject +@interface RTC_OBJC_TYPE (RTCI420Buffer) : NSObject @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm index d9d5d15716..f82f206e91 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm +++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm @@ -17,7 +17,7 @@ #include "third_party/libyuv/include/libyuv.h" #endif -@implementation RTCI420Buffer +@implementation RTC_OBJC_TYPE (RTCI420Buffer) - (instancetype)initWithWidth:(int)width height:(int)height { if (self = [super init]) { @@ -99,7 +99,7 @@ - (const uint8_t *)dataV { return _i420Buffer->DataV(); } -- (id)toI420 { +- (id)toI420 { return self; } diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h index 6cd5110460..053a10a304 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h +++ b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h @@ -18,7 +18,7 @@ NS_ASSUME_NONNULL_BEGIN /** Mutable version of RTCI420Buffer */ RTC_OBJC_EXPORT -@interface RTCMutableI420Buffer : RTCI420Buffer +@interface RTC_OBJC_TYPE (RTCMutableI420Buffer) : RTC_OBJC_TYPE(RTCI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm index 5c6c1ffb64..1e669bcb9c 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm +++ b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm @@ -14,7 +14,7 @@ #include "api/video/i420_buffer.h" -@implementation RTCMutableI420Buffer +@implementation RTC_OBJC_TYPE (RTCMutableI420Buffer) - (uint8_t *)mutableDataY { return static_cast(_i420Buffer.get())->MutableDataY(); diff --git a/sdk/objc/avconf/CFAudioMixer.h b/sdk/objc/avconf/CFAudioMixer.h new file mode 100644 index 0000000000..b8f0eeb10b --- /dev/null +++ b/sdk/objc/avconf/CFAudioMixer.h @@ -0,0 +1,78 @@ +// +/* + * The MIT License (MIT) + * + * Copyright (c) 2019 Piasy + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ +// + + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol CFAudioMixerDelegate +- (void)onSsrcFinished:(int32_t)ssrc; +- (void)onSsrcError:(int32_t)ssrc code:(int32_t)code; +@end + +RTC_OBJC_EXPORT +@interface CFAudioMixer : NSObject + +- (instancetype)initWithBackingTrack:(NSString*)backingTrack + captureSampleRate:(int32_t)captureSampleRate + captureChannelNum:(int32_t)captureChannelNum + frameDurationUs:(int32_t)frameDurationUs + enableMusicSyncFix:(bool)enableMusicSyncFix + waitingMixDelayFrame:(int32_t)waitingMixDelayFrame + delegate:(id)delegate; + +- (void)startMixer; + +- (void)pauseMixer; + +- (void)resumeMixer; + +- (void)toggleMusicStreaming:(bool)streaming; + +- (void)toggleMicEcho:(bool)micEcho; + +- (void)setMicVolume:(float)volume; + +- (void)setMusicVolume:(float)volume; + +- (int64_t)getMusicLengthMs; + +- (int64_t)getMusicProgressMs; + +- (void)seekMusic:(int64_t)progressMs; + +- (void)stopMixer; + + +- (void)onSsrcFinished:(int32_t)ssrc; +- (void)onSsrcError:(int32_t)ssrc code:(int32_t)code; +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/avconf/CFAudioMixer.mm b/sdk/objc/avconf/CFAudioMixer.mm new file mode 100644 index 0000000000..48f0e6a7a0 --- /dev/null +++ b/sdk/objc/avconf/CFAudioMixer.mm @@ -0,0 +1,242 @@ + +// +/* + * The MIT License (MIT) + * + * Copyright (c) 2019 Piasy + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ +// + + +#import "CFAudioMixer.h" + +#import "base/RTCLogging.h" + +#include +#include "audio/audio_transport_impl.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/backing_track/avx_helper.h" +#include "modules/backing_track/bt_audio_mixer.h" + +#define TAG "CFAudioMixer" + +static constexpr int32_t kRecSsrc = 9999999; +static constexpr int32_t kMusicSsrc = 9999001; + +struct MixerHolder { + webrtc::BtAudioMixer* mixer; + int16_t* buffer; +}; + +static void preDeliverRecordedData(void* opaque, void* audioSamples, + const size_t nSamples, const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec) { + MixerHolder* holder = reinterpret_cast(opaque); + size_t size = nSamples * nBytesPerSample * nChannels; + holder->mixer->AddRecordedDataAndMix(audioSamples, size, holder->buffer); + memcpy(audioSamples, holder->buffer, size); +} + +static void onSourceFinish(void* opaque, int32_t ssrc) { + CFAudioMixer* mixer = CFBridgingRelease(opaque); + [mixer onSsrcFinished:ssrc]; +} + +static void onSourceError(void* opaque, int32_t ssrc, int32_t code) { + CFAudioMixer* mixer = CFBridgingRelease(opaque); + [mixer onSsrcError:ssrc code:code]; +} + +@implementation CFAudioMixer { + webrtc::BtAudioMixer* _mixer; + MixerHolder* _holder; + id _delegate; + + bool _musicEnabled; + bool _musicStreaming; + bool _micEcho; + + float _micVolume; + float _musicVolume; +} + +- (instancetype)initWithBackingTrack:(NSString*)backingTrack + captureSampleRate:(int32_t)captureSampleRate + captureChannelNum:(int32_t)captureChannelNum + frameDurationUs:(int32_t)frameDurationUs + enableMusicSyncFix:(bool)enableMusicSyncFix + waitingMixDelayFrame:(int32_t)waitingMixDelayFrame + delegate:(id)delegate { + self = [super init]; + if (self) { + _delegate = delegate; + + webrtc::MixerConfig config(std::vector(), + captureSampleRate, captureChannelNum, + frameDurationUs, enableMusicSyncFix, + waitingMixDelayFrame); + config.sources.emplace_back(webrtc::MixerSource::TYPE_RECORD, kRecSsrc, + 1, 1, true, true, false, false, "", + captureSampleRate, captureChannelNum); + config.sources.emplace_back(webrtc::MixerSource::TYPE_FILE, kMusicSsrc, + 1, 1, false, false, false, false, + std::string([backingTrack UTF8String]), + captureSampleRate, captureChannelNum); + _mixer = new webrtc::BtAudioMixer(config, onSourceFinish, onSourceError, + (void*)CFBridgingRetain(self)); + + _holder = new MixerHolder(); + _holder->mixer = _mixer; + _holder->buffer = new int16_t[frameDurationUs * captureSampleRate / + 1000 * captureChannelNum]; + + webrtc::AudioDeviceBuffer* adb = webrtc::AudioDeviceBuffer::Instance(); + if (adb) { + webrtc::AudioTransportImpl* audio_transport = + reinterpret_cast( + adb->audio_transport()); + audio_transport->SetPreDeliverRecordedDataCallback( + preDeliverRecordedData, _holder); + } + + _musicEnabled = false; + _musicStreaming = false; + _micEcho = false; + _micVolume = 1.0F; + _musicVolume = 1.0F; + } + return self; +} + +- (void)startMixer { + _musicEnabled = true; + [self applyMixerSettings]; +} + +- (void)pauseMixer { + _musicEnabled = false; + [self applyMixerSettings]; +} + +- (void)resumeMixer { + [self startMixer]; +} + +- (void)toggleMusicStreaming:(bool)streaming { + _musicStreaming = streaming; + [self applyMixerSettings]; +} + +- (void)toggleMicEcho:(bool)micEcho { + _micEcho = micEcho; + [self applyMixerSettings]; +} + +- (void)setMicVolume:(float)volume { + _micVolume = volume; + [self applyMixerSettings]; +} + +- (void)setMusicVolume:(float)volume { + _musicVolume = volume; + [self applyMixerSettings]; +} + +- (int64_t)getMusicLengthMs { + @synchronized(self) { + if (!_mixer) { + return -1; + } + return _mixer->GetLengthMs(kMusicSsrc); + } +} + +- (int64_t)getMusicProgressMs { + @synchronized(self) { + if (!_mixer) { + return -1; + } + return _mixer->GetProgressMs(kMusicSsrc); + } +} + +- (void)seekMusic:(int64_t)progressMs { + @synchronized(self) { + if (!_mixer) { + return; + } + _mixer->Seek(kMusicSsrc, progressMs); + } +} + +- (void)stopMixer { + webrtc::AudioDeviceBuffer* adb = webrtc::AudioDeviceBuffer::Instance(); + if (adb) { + webrtc::AudioTransportImpl* audio_transport = + reinterpret_cast( + adb->audio_transport()); + audio_transport->SetPreDeliverRecordedDataCallback(nullptr, nullptr); + } + + @synchronized(self) { + if (_holder) { + delete _holder->mixer; + delete[] _holder->buffer; + delete _holder; + _holder = nullptr; + _mixer = nullptr; + } + } +} + +- (void)onSsrcFinished:(int32_t)ssrc { + [_delegate onSsrcFinished:ssrc]; +} + +- (void)onSsrcError:(int32_t)ssrc code:(int32_t)code { + [_delegate onSsrcError:ssrc code:code]; +} + +- (void)dealloc { + [self stopMixer]; +} + +#pragma mark - private + +- (void)applyMixerSettings { + @synchronized(self) { + if (!_mixer) { + return; + } + _mixer->ToggleEnable(kMusicSsrc, _musicEnabled); + _mixer->TogglePlayback(kMusicSsrc, true); + _mixer->ToggleStreaming(kMusicSsrc, _musicStreaming); + _mixer->UpdateVolume(kMusicSsrc, _musicVolume, _musicVolume); + + _mixer->ToggleEnable(kRecSsrc, true); + _mixer->TogglePlayback(kRecSsrc, _micEcho); + _mixer->ToggleStreaming(kRecSsrc, true); + _mixer->UpdateVolume(kRecSsrc, _micVolume, _micVolume); + } +} + +@end diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h b/sdk/objc/avconf/CFHijackCapturerDelegate.h similarity index 50% rename from sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h rename to sdk/objc/avconf/CFHijackCapturerDelegate.h index 3233e4e9f2..d1d881a012 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec+Private.h +++ b/sdk/objc/avconf/CFHijackCapturerDelegate.h @@ -8,8 +8,23 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "api/peerconnection/RTCEncodedImage+Private.h" -#import "api/peerconnection/RTCRtpFragmentationHeader+Private.h" -#import "api/peerconnection/RTCVideoCodecInfo+Private.h" -#import "api/peerconnection/RTCVideoEncoderSettings+Private.h" -#import "components/video_codec/RTCCodecSpecificInfoH264+Private.h" +#import +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface CFHijackCapturerDelegate : NSObject + +- (instancetype)initWithRealDelegate:(id)delegate; + +- (void)toggleMute:(bool)muted; + +- (void)dispose; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/avconf/CFHijackCapturerDelegate.mm b/sdk/objc/avconf/CFHijackCapturerDelegate.mm new file mode 100644 index 0000000000..75b730d9ba --- /dev/null +++ b/sdk/objc/avconf/CFHijackCapturerDelegate.mm @@ -0,0 +1,137 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "CFHijackCapturerDelegate.h" + +#import "base/RTCVideoFrameBuffer.h" + +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/timestamp_aligner.h" + +const int64_t kBlackFrameIntervalMs = 100; + +@interface EmptyVideoFrameBuffer : NSObject + +- (instancetype)initWithWidth:(int)width height:(int)height; + +@end + +@implementation CFHijackCapturerDelegate { + id _realDelegate; + bool _muted; + rtc::TimestampAligner* _timestampAligner; + EmptyVideoFrameBuffer* _blackBuffer; + RTCVideoCapturer* _dummyCapturer; +} + +- (instancetype)initWithRealDelegate:(id)delegate { + self = [super init]; + if (self) { + _realDelegate = delegate; + _muted = false; + _timestampAligner = new rtc::TimestampAligner(); + _blackBuffer = + [[EmptyVideoFrameBuffer alloc] initWithWidth:480 height:640]; + _dummyCapturer = [[RTCVideoCapturer alloc] initWithDelegate:self]; + } + return self; +} + +- (void)toggleMute:(bool)muted { + RTC_LOG(LS_INFO) << "HijackCapturerObserver toggleMute " << muted; + _muted = muted; + if (_muted) { + __weak CFHijackCapturerDelegate* weakSelf = self; + // frame interval of 25 fps + [self dispatchAfter:40 + block:^{ + [weakSelf produceBlackFrame]; + }]; + } +} + +- (void)dispose { + RTC_LOG(LS_INFO) << "HijackCapturerObserver dispose"; + _muted = false; + delete _timestampAligner; + _timestampAligner = nullptr; +} + +- (void)capturer:(RTCVideoCapturer*)capturer + didCaptureVideoFrame:(RTCVideoFrame*)frame { + if (!_muted && _timestampAligner) { + int64_t translatedTimestampNs = + _timestampAligner->TranslateTimestamp( + frame.timeStampNs / rtc::kNumNanosecsPerMicrosec, + rtc::TimeMicros()) * + rtc::kNumNanosecsPerMicrosec; + RTCVideoFrame* videoFrame = + [[RTCVideoFrame alloc] initWithBuffer:frame.buffer + rotation:frame.rotation + timeStampNs:translatedTimestampNs]; + [_realDelegate capturer:capturer didCaptureVideoFrame:videoFrame]; + } +} + +- (void)dispatchAfter:(int64_t)afterMs block:(dispatch_block_t)block { + dispatch_after( + dispatch_time(DISPATCH_TIME_NOW, (int64_t)(afterMs * NSEC_PER_MSEC)), + dispatch_get_main_queue(), block); +} + +- (void)produceBlackFrame { + if (!_muted) { + return; + } + + RTCVideoFrame* videoFrame = + [[RTCVideoFrame alloc] initWithBuffer:_blackBuffer + rotation:RTCVideoRotation_0 + timeStampNs:rtc::TimeNanos()]; + // capturer param won't be used + [_realDelegate capturer:_dummyCapturer didCaptureVideoFrame:videoFrame]; + + __weak CFHijackCapturerDelegate* weakSelf = self; + [self dispatchAfter:kBlackFrameIntervalMs + block:^{ + [weakSelf produceBlackFrame]; + }]; +} + +@end + +@implementation EmptyVideoFrameBuffer { + int _width; + int _height; +} + +- (instancetype)initWithWidth:(int)width height:(int)height { + self = [super init]; + if (self) { + _width = width; + _height = height; + } + return self; +} + +- (int)width { + return _width; +} + +- (int)height { + return _height; +} + +- (id)toI420 { + return nil; +} + +@end diff --git a/sdk/objc/avconf/CFRPCapturer.h b/sdk/objc/avconf/CFRPCapturer.h new file mode 100644 index 0000000000..37004456d8 --- /dev/null +++ b/sdk/objc/avconf/CFRPCapturer.h @@ -0,0 +1,47 @@ +// +/* + * The MIT License (MIT) + * + * Copyright (c) 2018 Piasy + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ +// + + +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface CFRPCapturer : RTCVideoCapturer + +- (instancetype)initWithDelegate:(id)delegate + andErrorHandler:(void (^)(NSString*))handler; + +- (void)startCapture; + +- (void)stopCapture; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/avconf/CFRPCapturer.m b/sdk/objc/avconf/CFRPCapturer.m new file mode 100644 index 0000000000..29e63462e2 --- /dev/null +++ b/sdk/objc/avconf/CFRPCapturer.m @@ -0,0 +1,148 @@ +// +/* + * The MIT License (MIT) + * + * Copyright (c) 2018 Piasy + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ +// + + +#import "CFRPCapturer.h" + +#import + +#import "base/RTCLogging.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#define TAG "CFRPCapturer" + +@implementation CFRPCapturer { + void (^_errorHandler)(NSString*); +} + +- (instancetype)initWithDelegate:(id)delegate + andErrorHandler:(void (^)(NSString*))handler { + self = [super initWithDelegate:delegate]; + if (self) { + _errorHandler = handler; + } + return self; +} + +- (void)startCapture { + RPScreenRecorder* recorder = [RPScreenRecorder sharedRecorder]; + if (recorder.recording) { + RTCLogError(TAG " startCapture already started"); + return; + } + RTCLogInfo(TAG " startCapture"); + __weak CFRPCapturer* weakSelf = self; + if (@available(iOS 11.0, *)) { + // recorder.microphoneEnabled = YES; + [recorder startCaptureWithHandler:^(CMSampleBufferRef sampleBuffer, + RPSampleBufferType sampleBufferType, + NSError* error) { + switch (sampleBufferType) { + case RPSampleBufferTypeVideo: { + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || + !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = + CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + CFRPCapturer* strongSelf = weakSelf; + if (strongSelf) { + RTCCVPixelBuffer* rtcPixelBuffer = + [[RTCCVPixelBuffer alloc] + initWithPixelBuffer:pixelBuffer]; + int64_t timeStampNs = + CMTimeGetSeconds( + CMSampleBufferGetPresentationTimeStamp( + sampleBuffer)) * + NSEC_PER_SEC; + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] + initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + [strongSelf notifyFrame:videoFrame]; + } + break; + } + case RPSampleBufferTypeAudioApp: + break; + case RPSampleBufferTypeAudioMic: + break; + default: + break; + } + } + completionHandler:^(NSError* error) { + CFRPCapturer* strongSelf = weakSelf; + if (error) { + RTCLogError(TAG " startCapture fail: %@", + error.localizedDescription); + if (!strongSelf) { + RTCLogError(TAG " startCapture fail, but no strong self"); + return; + } + strongSelf->_errorHandler(@""); + } else { + RTCLogInfo(TAG " startCapture success"); + if (!strongSelf) { + RTCLogError(TAG + " startCapture success, but no strong self"); + return; + } + } + }]; + } else { + RTCLogError(TAG " startCapture fail: OS not support"); + } +} + +- (void)stopCapture { + RTCLogInfo(TAG " stopCapture"); + if (@available(iOS 11.0, *)) { + [[RPScreenRecorder sharedRecorder] + stopCaptureWithHandler:^(NSError* _Nullable error) { + if (error) { + RTCLogError(TAG " stopCapture fail: %@", + error.localizedDescription); + } else { + RTCLogInfo(TAG " stopCapture success"); + } + }]; + } else { + RTCLogError(TAG " stopCapture fail: OS not support"); + } +} + +- (void)notifyFrame:(RTCVideoFrame*)videoFrame { + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end diff --git a/sdk/objc/avconf/CFVideoProcessor.h b/sdk/objc/avconf/CFVideoProcessor.h new file mode 100644 index 0000000000..07df6b044a --- /dev/null +++ b/sdk/objc/avconf/CFVideoProcessor.h @@ -0,0 +1,24 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol CFVideoProcessorDelegate + +- (void)onProcessedVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +@end + +RTC_OBJC_EXPORT +@protocol CFVideoProcessor + +- (void)setVideoProcessorDelegate:(nullable id)delegate; + +- (void)onVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCCodecSpecificInfo.h b/sdk/objc/base/RTCCodecSpecificInfo.h index e2ae4cafa1..5e7800e524 100644 --- a/sdk/objc/base/RTCCodecSpecificInfo.h +++ b/sdk/objc/base/RTCCodecSpecificInfo.h @@ -18,7 +18,7 @@ NS_ASSUME_NONNULL_BEGIN * Corresponds to webrtc::CodecSpecificInfo. */ RTC_OBJC_EXPORT -@protocol RTCCodecSpecificInfo -@end +@protocol RTC_OBJC_TYPE +(RTCCodecSpecificInfo) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCEncodedImage.h b/sdk/objc/base/RTCEncodedImage.h index 670c7276ff..28529e5906 100644 --- a/sdk/objc/base/RTCEncodedImage.h +++ b/sdk/objc/base/RTCEncodedImage.h @@ -31,7 +31,7 @@ typedef NS_ENUM(NSUInteger, RTCVideoContentType) { /** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */ RTC_OBJC_EXPORT -@interface RTCEncodedImage : NSObject +@interface RTC_OBJC_TYPE (RTCEncodedImage) : NSObject @property(nonatomic, strong) NSData *buffer; @property(nonatomic, assign) int32_t encodedWidth; @@ -44,7 +44,6 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) int64_t encodeFinishMs; @property(nonatomic, assign) RTCFrameType frameType; @property(nonatomic, assign) RTCVideoRotation rotation; -@property(nonatomic, assign) BOOL completeFrame; @property(nonatomic, strong) NSNumber *qp; @property(nonatomic, assign) RTCVideoContentType contentType; diff --git a/sdk/objc/base/RTCEncodedImage.m b/sdk/objc/base/RTCEncodedImage.m index 024a57c541..ad8441aabd 100644 --- a/sdk/objc/base/RTCEncodedImage.m +++ b/sdk/objc/base/RTCEncodedImage.m @@ -10,7 +10,7 @@ #import "RTCEncodedImage.h" -@implementation RTCEncodedImage +@implementation RTC_OBJC_TYPE (RTCEncodedImage) @synthesize buffer = _buffer; @synthesize encodedWidth = _encodedWidth; @@ -23,7 +23,6 @@ @implementation RTCEncodedImage @synthesize encodeFinishMs = _encodeFinishMs; @synthesize frameType = _frameType; @synthesize rotation = _rotation; -@synthesize completeFrame = _completeFrame; @synthesize qp = _qp; @synthesize contentType = _contentType; diff --git a/sdk/objc/base/RTCI420Buffer.h b/sdk/objc/base/RTCI420Buffer.h index a6c7e41bcb..b97f05a5ba 100644 --- a/sdk/objc/base/RTCI420Buffer.h +++ b/sdk/objc/base/RTCI420Buffer.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** Protocol for RTCYUVPlanarBuffers containing I420 data */ RTC_OBJC_EXPORT -@protocol RTCI420Buffer -@end +@protocol RTC_OBJC_TYPE +(RTCI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCMacros.h b/sdk/objc/base/RTCMacros.h index 7f7e64cb76..e527ff6bc4 100644 --- a/sdk/objc/base/RTCMacros.h +++ b/sdk/objc/base/RTCMacros.h @@ -11,6 +11,30 @@ #ifndef SDK_OBJC_BASE_RTCMACROS_H_ #define SDK_OBJC_BASE_RTCMACROS_H_ +// Internal macros used to correctly concatenate symbols. +#define RTC_SYMBOL_CONCAT_HELPER(a, b) a##b +#define RTC_SYMBOL_CONCAT(a, b) RTC_SYMBOL_CONCAT_HELPER(a, b) + +// RTC_OBJC_TYPE_PREFIX +// +// Macro used to prepend a prefix to the API types that are exported with +// RTC_OBJC_EXPORT. +// +// Clients can patch the definition of this macro locally and build +// WebRTC.framework with their own prefix in case symbol clashing is a +// problem. +// +// This macro must only be defined here and not on via compiler flag to +// ensure it has a unique value. +#define RTC_OBJC_TYPE_PREFIX + +// RCT_OBJC_TYPE +// +// Macro used internally to declare API types. Declaring an API type without +// using this macro will not include the declared type in the set of types +// that will be affected by the configurable RTC_OBJC_TYPE_PREFIX. +#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name) + #define RTC_OBJC_EXPORT __attribute__((visibility("default"))) #if defined(__cplusplus) diff --git a/sdk/objc/base/RTCMutableI420Buffer.h b/sdk/objc/base/RTCMutableI420Buffer.h index 098fb9a66f..cde721980b 100644 --- a/sdk/objc/base/RTCMutableI420Buffer.h +++ b/sdk/objc/base/RTCMutableI420Buffer.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN /** Extension of the I420 buffer with mutable data access */ RTC_OBJC_EXPORT -@protocol RTCMutableI420Buffer -@end +@protocol RTC_OBJC_TYPE +(RTCMutableI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCMutableYUVPlanarBuffer.h b/sdk/objc/base/RTCMutableYUVPlanarBuffer.h index 00dfcd94ca..bd14e3bca3 100644 --- a/sdk/objc/base/RTCMutableYUVPlanarBuffer.h +++ b/sdk/objc/base/RTCMutableYUVPlanarBuffer.h @@ -16,9 +16,10 @@ NS_ASSUME_NONNULL_BEGIN /** Extension of the YUV planar data buffer with mutable data access */ RTC_OBJC_EXPORT -@protocol RTCMutableYUVPlanarBuffer +@protocol RTC_OBJC_TYPE +(RTCMutableYUVPlanarBuffer) -@property(nonatomic, readonly) uint8_t *mutableDataY; + @property(nonatomic, readonly) uint8_t *mutableDataY; @property(nonatomic, readonly) uint8_t *mutableDataU; @property(nonatomic, readonly) uint8_t *mutableDataV; diff --git a/sdk/objc/base/RTCRtpFragmentationHeader.h b/sdk/objc/base/RTCRtpFragmentationHeader.h index 2e26b08b8a..001b4e9deb 100644 --- a/sdk/objc/base/RTCRtpFragmentationHeader.h +++ b/sdk/objc/base/RTCRtpFragmentationHeader.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** Information for header. Corresponds to webrtc::RTPFragmentationHeader. */ RTC_OBJC_EXPORT -@interface RTCRtpFragmentationHeader : NSObject +@interface RTC_OBJC_TYPE (RTCRtpFragmentationHeader) : NSObject @property(nonatomic, strong) NSArray *fragmentationOffset; @property(nonatomic, strong) NSArray *fragmentationLength; diff --git a/sdk/objc/base/RTCRtpFragmentationHeader.m b/sdk/objc/base/RTCRtpFragmentationHeader.m index 8049abc411..60e2f5d1e6 100644 --- a/sdk/objc/base/RTCRtpFragmentationHeader.m +++ b/sdk/objc/base/RTCRtpFragmentationHeader.m @@ -10,11 +10,11 @@ #import "RTCRtpFragmentationHeader.h" -@implementation RTCRtpFragmentationHeader +@implementation RTC_OBJC_TYPE (RTCRtpFragmentationHeader) @synthesize fragmentationOffset = _fragmentationOffset; @synthesize fragmentationLength = _fragmentationLength; @synthesize fragmentationTimeDiff = _fragmentationTimeDiff; @synthesize fragmentationPlType = _fragmentationPlType; -@end \ No newline at end of file +@end diff --git a/sdk/objc/base/RTCVideoCapturer.h b/sdk/objc/base/RTCVideoCapturer.h index 5212627692..a1ffdcf38e 100644 --- a/sdk/objc/base/RTCVideoCapturer.h +++ b/sdk/objc/base/RTCVideoCapturer.h @@ -14,19 +14,21 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCVideoCapturer; +@class RTC_OBJC_TYPE(RTCVideoCapturer); RTC_OBJC_EXPORT -@protocol RTCVideoCapturerDelegate -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame; +@protocol RTC_OBJC_TYPE +(RTCVideoCapturerDelegate) - + (void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame + : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end RTC_OBJC_EXPORT -@interface RTCVideoCapturer : NSObject +@interface RTC_OBJC_TYPE (RTCVideoCapturer) : NSObject -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; -- (instancetype)initWithDelegate:(id)delegate; +- (instancetype)initWithDelegate:(id)delegate; @end diff --git a/sdk/objc/base/RTCVideoCapturer.m b/sdk/objc/base/RTCVideoCapturer.m index 39cc377746..ca31a731f0 100644 --- a/sdk/objc/base/RTCVideoCapturer.m +++ b/sdk/objc/base/RTCVideoCapturer.m @@ -10,11 +10,11 @@ #import "RTCVideoCapturer.h" -@implementation RTCVideoCapturer +@implementation RTC_OBJC_TYPE (RTCVideoCapturer) @synthesize delegate = _delegate; -- (instancetype)initWithDelegate:(id)delegate { +- (instancetype)initWithDelegate:(id)delegate { if (self = [super init]) { _delegate = delegate; } diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index 2162caaa21..fa28958f25 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */ RTC_OBJC_EXPORT -@interface RTCVideoCodecInfo : NSObject +@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject - (instancetype)init NS_UNAVAILABLE; @@ -26,7 +26,7 @@ RTC_OBJC_EXPORT parameters:(nullable NSDictionary *)parameters NS_DESIGNATED_INITIALIZER; -- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info; +- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; @property(nonatomic, readonly) NSString *name; @property(nonatomic, readonly) NSDictionary *parameters; diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index 7fb17ca7d2..ce26ae1de3 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -10,7 +10,7 @@ #import "RTCVideoCodecInfo.h" -@implementation RTCVideoCodecInfo +@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) @synthesize name = _name; @synthesize parameters = _parameters; @@ -29,7 +29,7 @@ - (instancetype)initWithName:(NSString *)name return self; } -- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info { +- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if (!info || ![self.name isEqualToString:info.name] || ![self.parameters isEqualToDictionary:info.parameters]) { diff --git a/sdk/objc/base/RTCVideoDecoder.h b/sdk/objc/base/RTCVideoDecoder.h index 8077c698e4..ca3680e40b 100644 --- a/sdk/objc/base/RTCVideoDecoder.h +++ b/sdk/objc/base/RTCVideoDecoder.h @@ -19,18 +19,19 @@ NS_ASSUME_NONNULL_BEGIN /** Callback block for decoder. */ -typedef void (^RTCVideoDecoderCallback)(RTCVideoFrame *frame); +typedef void (^RTCVideoDecoderCallback)(RTC_OBJC_TYPE(RTCVideoFrame) * frame, int32_t qp); /** Protocol for decoder implementations. */ RTC_OBJC_EXPORT -@protocol RTCVideoDecoder +@protocol RTC_OBJC_TYPE +(RTCVideoDecoder) -- (void)setCallback:(RTCVideoDecoderCallback)callback; + - (void)setCallback : (RTCVideoDecoderCallback)callback; - (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores; - (NSInteger)releaseDecoder; -- (NSInteger)decode:(RTCEncodedImage *)encodedImage +- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info + codecSpecificInfo:(nullable id)info renderTimeMs:(int64_t)renderTimeMs; - (NSString *)implementationName; diff --git a/sdk/objc/base/RTCVideoDecoderFactory.h b/sdk/objc/base/RTCVideoDecoderFactory.h index 3e24153b82..8d90138521 100644 --- a/sdk/objc/base/RTCVideoDecoderFactory.h +++ b/sdk/objc/base/RTCVideoDecoderFactory.h @@ -16,12 +16,16 @@ NS_ASSUME_NONNULL_BEGIN -/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. */ +/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. + */ RTC_OBJC_EXPORT -@protocol RTCVideoDecoderFactory +@protocol RTC_OBJC_TYPE +(RTCVideoDecoderFactory) -- (nullable id)createDecoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? + - (nullable id)createDecoder + : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (NSArray *) + supportedCodecs; // TODO(andersc): "supportedFormats" instead? @end diff --git a/sdk/objc/base/RTCVideoEncoder.h b/sdk/objc/base/RTCVideoEncoder.h index c5257674d8..7d1a7afd7f 100644 --- a/sdk/objc/base/RTCVideoEncoder.h +++ b/sdk/objc/base/RTCVideoEncoder.h @@ -21,20 +21,21 @@ NS_ASSUME_NONNULL_BEGIN /** Callback block for encoder. */ -typedef BOOL (^RTCVideoEncoderCallback)(RTCEncodedImage *frame, - id info, - RTCRtpFragmentationHeader *header); +typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame, + id info, + RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header); /** Protocol for encoder implementations. */ RTC_OBJC_EXPORT -@protocol RTCVideoEncoder +@protocol RTC_OBJC_TYPE +(RTCVideoEncoder) -- (void)setCallback:(RTCVideoEncoderCallback)callback; -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings + - (void)setCallback : (RTCVideoEncoderCallback)callback; +- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores; - (NSInteger)releaseEncoder; -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)info +- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame + codecSpecificInfo:(nullable id)info frameTypes:(NSArray *)frameTypes; - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate; - (NSString *)implementationName; @@ -42,7 +43,7 @@ RTC_OBJC_EXPORT /** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to * keep the QP from the encoded images within the given range. Returning nil from this function * disables quality scaling. */ -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings; +- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings; @end diff --git a/sdk/objc/base/RTCVideoEncoderFactory.h b/sdk/objc/base/RTCVideoEncoderFactory.h index 3291797d5a..b115b2ad39 100644 --- a/sdk/objc/base/RTCVideoEncoderFactory.h +++ b/sdk/objc/base/RTCVideoEncoderFactory.h @@ -16,15 +16,33 @@ NS_ASSUME_NONNULL_BEGIN -/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. */ +/** RTCVideoEncoderFactory is an Objective-C version of + webrtc::VideoEncoderFactory::VideoEncoderSelector. + */ +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderSelector) + + - (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate; +- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder; + +@end + +/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. + */ RTC_OBJC_EXPORT -@protocol RTCVideoEncoderFactory +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderFactory) -- (nullable id)createEncoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? + - (nullable id)createEncoder + : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (NSArray *) + supportedCodecs; // TODO(andersc): "supportedFormats" instead? @optional -- (NSArray *)implementations; +- (NSArray *)implementations; +- (nullable id)encoderSelector; @end diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.h b/sdk/objc/base/RTCVideoEncoderQpThresholds.h index 2b48f45ce0..1a6e9e88ab 100644 --- a/sdk/objc/base/RTCVideoEncoderQpThresholds.h +++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */ RTC_OBJC_EXPORT -@interface RTCVideoEncoderQpThresholds : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject - (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high; diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.m b/sdk/objc/base/RTCVideoEncoderQpThresholds.m index 5bd06ffd8d..fb7012f44f 100644 --- a/sdk/objc/base/RTCVideoEncoderQpThresholds.m +++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.m @@ -10,7 +10,7 @@ #import "RTCVideoEncoderQpThresholds.h" -@implementation RTCVideoEncoderQpThresholds +@implementation RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) @synthesize low = _low; @synthesize high = _high; diff --git a/sdk/objc/base/RTCVideoEncoderSettings.h b/sdk/objc/base/RTCVideoEncoderSettings.h index a9403f8dec..ae792eab71 100644 --- a/sdk/objc/base/RTCVideoEncoderSettings.h +++ b/sdk/objc/base/RTCVideoEncoderSettings.h @@ -21,7 +21,7 @@ typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) { /** Settings for encoder. Corresponds to webrtc::VideoCodec. */ RTC_OBJC_EXPORT -@interface RTCVideoEncoderSettings : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) : NSObject @property(nonatomic, strong) NSString *name; diff --git a/sdk/objc/base/RTCVideoEncoderSettings.m b/sdk/objc/base/RTCVideoEncoderSettings.m index f68bc8cb56..f66cd2cf77 100644 --- a/sdk/objc/base/RTCVideoEncoderSettings.m +++ b/sdk/objc/base/RTCVideoEncoderSettings.m @@ -10,7 +10,7 @@ #import "RTCVideoEncoderSettings.h" -@implementation RTCVideoEncoderSettings +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings) @synthesize name = _name; @synthesize width = _width; diff --git a/sdk/objc/base/RTCVideoFrame.h b/sdk/objc/base/RTCVideoFrame.h index 9aca7433f3..f5638d27cf 100644 --- a/sdk/objc/base/RTCVideoFrame.h +++ b/sdk/objc/base/RTCVideoFrame.h @@ -22,11 +22,12 @@ typedef NS_ENUM(NSInteger, RTCVideoRotation) { RTCVideoRotation_270 = 270, }; -@protocol RTCVideoFrameBuffer; +@protocol RTC_OBJC_TYPE +(RTCVideoFrameBuffer); // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. RTC_OBJC_EXPORT -@interface RTCVideoFrame : NSObject +@interface RTC_OBJC_TYPE (RTCVideoFrame) : NSObject /** Width without rotation applied. */ @property(nonatomic, readonly) int width; @@ -41,7 +42,7 @@ RTC_OBJC_EXPORT /** Timestamp 90 kHz. */ @property(nonatomic, assign) int32_t timeStamp; -@property(nonatomic, readonly) id buffer; +@property(nonatomic, readonly) id buffer; - (instancetype)init NS_UNAVAILABLE; - (instancetype) new NS_UNAVAILABLE; @@ -71,14 +72,14 @@ RTC_OBJC_EXPORT /** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. */ -- (instancetype)initWithBuffer:(id)frameBuffer +- (instancetype)initWithBuffer:(id)frameBuffer rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs; /** Return a frame that is guaranteed to be I420, i.e. it is possible to access * the YUV data on it. */ -- (RTCVideoFrame *)newI420VideoFrame; +- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame; @end diff --git a/sdk/objc/base/RTCVideoFrame.mm b/sdk/objc/base/RTCVideoFrame.mm index 0a44b04e6b..e162238d73 100644 --- a/sdk/objc/base/RTCVideoFrame.mm +++ b/sdk/objc/base/RTCVideoFrame.mm @@ -13,7 +13,7 @@ #import "RTCI420Buffer.h" #import "RTCVideoFrameBuffer.h" -@implementation RTCVideoFrame { +@implementation RTC_OBJC_TYPE (RTCVideoFrame) { RTCVideoRotation _rotation; int64_t _timeStampNs; } @@ -37,10 +37,10 @@ - (int64_t)timeStampNs { return _timeStampNs; } -- (RTCVideoFrame *)newI420VideoFrame { - return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420] - rotation:_rotation - timeStampNs:_timeStampNs]; +- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame { + return [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:[_buffer toI420] + rotation:_rotation + timeStampNs:_timeStampNs]; } - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer @@ -63,7 +63,7 @@ - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer return nil; } -- (instancetype)initWithBuffer:(id)buffer +- (instancetype)initWithBuffer:(id)buffer rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs { if (self = [super init]) { diff --git a/sdk/objc/base/RTCVideoFrameBuffer.h b/sdk/objc/base/RTCVideoFrameBuffer.h index bb9e6fba63..82d057eea0 100644 --- a/sdk/objc/base/RTCVideoFrameBuffer.h +++ b/sdk/objc/base/RTCVideoFrameBuffer.h @@ -14,16 +14,18 @@ NS_ASSUME_NONNULL_BEGIN -@protocol RTCI420Buffer; +@protocol RTC_OBJC_TYPE +(RTCI420Buffer); // RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer. RTC_OBJC_EXPORT -@protocol RTCVideoFrameBuffer +@protocol RTC_OBJC_TYPE +(RTCVideoFrameBuffer) -@property(nonatomic, readonly) int width; + @property(nonatomic, readonly) int width; @property(nonatomic, readonly) int height; -- (id)toI420; +- (id)toI420; @end diff --git a/sdk/objc/base/RTCVideoRenderer.h b/sdk/objc/base/RTCVideoRenderer.h index 7b359a35c2..0f763295ad 100644 --- a/sdk/objc/base/RTCVideoRenderer.h +++ b/sdk/objc/base/RTCVideoRenderer.h @@ -17,23 +17,26 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCVideoFrame; +@class RTC_OBJC_TYPE(RTCVideoFrame); RTC_OBJC_EXPORT -@protocol RTCVideoRenderer +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer) -/** The size of the frame. */ -- (void)setSize:(CGSize)size; + /** The size of the frame. */ + - (void)setSize : (CGSize)size; /** The frame to be displayed. */ -- (void)renderFrame:(nullable RTCVideoFrame *)frame; +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end RTC_OBJC_EXPORT -@protocol RTCVideoViewDelegate +@protocol RTC_OBJC_TYPE +(RTCVideoViewDelegate) -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size; + - (void)videoView : (id)videoView didChangeVideoSize + : (CGSize)size; @end diff --git a/sdk/objc/base/RTCYUVPlanarBuffer.h b/sdk/objc/base/RTCYUVPlanarBuffer.h index 8ceb66c99d..be01b915f5 100644 --- a/sdk/objc/base/RTCYUVPlanarBuffer.h +++ b/sdk/objc/base/RTCYUVPlanarBuffer.h @@ -17,9 +17,10 @@ NS_ASSUME_NONNULL_BEGIN /** Protocol for RTCVideoFrameBuffers containing YUV planar data. */ RTC_OBJC_EXPORT -@protocol RTCYUVPlanarBuffer +@protocol RTC_OBJC_TYPE +(RTCYUVPlanarBuffer) -@property(nonatomic, readonly) int chromaWidth; + @property(nonatomic, readonly) int chromaWidth; @property(nonatomic, readonly) int chromaHeight; @property(nonatomic, readonly) const uint8_t *dataY; @property(nonatomic, readonly) const uint8_t *dataU; diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index c81ce1b916..b2753f282e 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -13,17 +13,18 @@ #import "base/RTCLogging.h" -@implementation RTCAudioSession (Configuration) +@implementation RTC_OBJC_TYPE (RTCAudioSession) +(Configuration) -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration - error:(NSError **)outError { + - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error + : (NSError **)outError { return [self setConfiguration:configuration active:NO shouldSetActive:NO error:outError]; } -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration active:(BOOL)active error:(NSError **)outError { return [self setConfiguration:configuration @@ -34,7 +35,7 @@ - (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration #pragma mark - Private -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration active:(BOOL)active shouldSetActive:(BOOL)shouldSetActive error:(NSError **)outError { diff --git a/sdk/objc/components/audio/RTCAudioSession+Private.h b/sdk/objc/components/audio/RTCAudioSession+Private.h index 8cf9339377..4c1eb1c44a 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Private.h +++ b/sdk/objc/components/audio/RTCAudioSession+Private.h @@ -12,14 +12,15 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioSessionConfiguration; +@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration); -@interface RTCAudioSession () +@interface RTC_OBJC_TYPE (RTCAudioSession) +() -/** Number of times setActive:YES has succeeded without a balanced call to - * setActive:NO. - */ -@property(nonatomic, readonly) int activationCount; + /** Number of times setActive:YES has succeeded without a balanced call to + * setActive:NO. + */ + @property(nonatomic, readonly) int activationCount; /** The number of times |beginWebRTCSession| was called without a balanced call * to |endWebRTCSession|. @@ -40,7 +41,7 @@ NS_ASSUME_NONNULL_BEGIN * the list. This delegate will be notified before other delegates of * audio events. */ -- (void)pushDelegate:(id)delegate; +- (void)pushDelegate:(id)delegate; /** Signals RTCAudioSession that a WebRTC session is about to begin and * audio configuration is needed. Will configure the audio session for WebRTC diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h index b5bba2f21e..f917e327a4 100644 --- a/sdk/objc/components/audio/RTCAudioSession.h +++ b/sdk/objc/components/audio/RTCAudioSession.h @@ -21,78 +21,81 @@ extern NSInteger const kRTCAudioSessionErrorLockRequired; /** Unknown configuration error occurred. */ extern NSInteger const kRTCAudioSessionErrorConfiguration; -@class RTCAudioSession; -@class RTCAudioSessionConfiguration; +@class RTC_OBJC_TYPE(RTCAudioSession); +@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration); // Surfaces AVAudioSession events. WebRTC will listen directly for notifications // from AVAudioSession and handle them before calling these delegate methods, // at which point applications can perform additional processing if required. RTC_OBJC_EXPORT -@protocol RTCAudioSessionDelegate +@protocol RTC_OBJC_TYPE +(RTCAudioSessionDelegate) -@optional + @optional /** Called on a system notification thread when AVAudioSession starts an * interruption event. */ -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session; +- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called on a system notification thread when AVAudioSession ends an * interruption event. */ -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session +- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session shouldResumeSession:(BOOL)shouldResumeSession; /** Called on a system notification thread when AVAudioSession changes the * route. */ -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session +- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session reason:(AVAudioSessionRouteChangeReason)reason previousRoute:(AVAudioSessionRouteDescription *)previousRoute; /** Called on a system notification thread when AVAudioSession media server * terminates. */ -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session; +- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called on a system notification thread when AVAudioSession media server * restarts. */ -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session; +- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session; // TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification. -- (void)audioSession:(RTCAudioSession *)session didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session + didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord; /** Called on a WebRTC thread when the audio device is notified to begin * playback or recording. */ -- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session; +- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called on a WebRTC thread when the audio device is notified to stop * playback or recording. */ -- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session; +- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called when the AVAudioSession output volume value changes. */ -- (void)audioSession:(RTCAudioSession *)audioSession didChangeOutputVolume:(float)outputVolume; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession + didChangeOutputVolume:(float)outputVolume; /** Called when the audio device detects a playout glitch. The argument is the * number of glitches detected so far in the current audio playout session. */ -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches; /** Called when the audio session is about to change the active state. */ -- (void)audioSession:(RTCAudioSession *)audioSession willSetActive:(BOOL)active; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active; /** Called after the audio session sucessfully changed the active state. */ -- (void)audioSession:(RTCAudioSession *)audioSession didSetActive:(BOOL)active; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active; /** Called after the audio session failed to change the active state. */ -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession failedToSetActive:(BOOL)active error:(NSError *)error; @@ -103,10 +106,11 @@ RTC_OBJC_EXPORT * case of this is when CallKit activates the audio session for the application */ RTC_OBJC_EXPORT -@protocol RTCAudioSessionActivationDelegate +@protocol RTC_OBJC_TYPE +(RTCAudioSessionActivationDelegate) -/** Called when the audio session is activated outside of the app by iOS. */ -- (void)audioSessionDidActivate:(AVAudioSession *)session; + /** Called when the audio session is activated outside of the app by iOS. */ + - (void)audioSessionDidActivate : (AVAudioSession *)session; /** Called when the audio session is deactivated outside of the app by iOS. */ - (void)audioSessionDidDeactivate:(AVAudioSession *)session; @@ -121,7 +125,7 @@ RTC_OBJC_EXPORT * activated only once. See |setActive:error:|. */ RTC_OBJC_EXPORT -@interface RTCAudioSession : NSObject +@interface RTC_OBJC_TYPE (RTCAudioSession) : NSObject /** Convenience property to access the AVAudioSession singleton. Callers should * not call setters on AVAudioSession directly, but other method invocations @@ -196,9 +200,9 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; /** Adds a delegate, which is held weakly. */ -- (void)addDelegate:(id)delegate; +- (void)addDelegate:(id)delegate; /** Removes an added delegate. */ -- (void)removeDelegate:(id)delegate; +- (void)removeDelegate:(id)delegate; /** Request exclusive access to the audio session for configuration. This call * will block if the lock is held by another object. @@ -237,19 +241,21 @@ RTC_OBJC_EXPORT error:(NSError **)outError; @end -@interface RTCAudioSession (Configuration) +@interface RTC_OBJC_TYPE (RTCAudioSession) +(Configuration) -/** Applies the configuration to the current session. Attempts to set all - * properties even if previous ones fail. Only the last error will be - * returned. - * |lockForConfiguration| must be called first. - */ -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration error:(NSError **)outError; + /** Applies the configuration to the current session. Attempts to set all + * properties even if previous ones fail. Only the last error will be + * returned. + * |lockForConfiguration| must be called first. + */ + - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error + : (NSError **)outError; /** Convenience method that calls both setConfiguration and setActive. * |lockForConfiguration| must be called first. */ -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration active:(BOOL)active error:(NSError **)outError; diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 260529db7e..520b2d1d37 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -16,26 +16,26 @@ #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" #import "RTCAudioSessionConfiguration.h" #import "base/RTCLogging.h" - -NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession"; +NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; NSInteger const kRTCAudioSessionErrorLockRequired = -1; NSInteger const kRTCAudioSessionErrorConfiguration = -2; NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; -@interface RTCAudioSession () -@property(nonatomic, readonly) std::vector<__weak id > delegates; +@interface RTC_OBJC_TYPE (RTCAudioSession) +() @property(nonatomic, + readonly) std::vector<__weak id > delegates; @end // This class needs to be thread-safe because it is accessed from many threads. // TODO(tkchin): Consider more granular locking. We're not expecting a lot of // lock contention so coarse locks should be fine for now. -@implementation RTCAudioSession { - rtc::CriticalSection _crit; +@implementation RTC_OBJC_TYPE (RTCAudioSession) { + rtc::RecursiveCriticalSection _crit; AVAudioSession *_session; volatile int _activationCount; volatile int _lockRecursionCount; @@ -54,7 +54,7 @@ @implementation RTCAudioSession { + (instancetype)sharedInstance { static dispatch_once_t onceToken; - static RTCAudioSession *sharedInstance = nil; + static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil; dispatch_once(&onceToken, ^{ sharedInstance = [[self alloc] init]; }); @@ -102,9 +102,9 @@ - (instancetype)initWithAudioSession:(id)audioSession { [_session addObserver:self forKeyPath:kRTCAudioSessionOutputVolumeSelector options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld - context:(__bridge void*)RTCAudioSession.class]; + context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; - RTCLog(@"RTCAudioSession (%p): init.", self); + RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self); } return self; } @@ -113,25 +113,24 @@ - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; [_session removeObserver:self forKeyPath:kRTCAudioSessionOutputVolumeSelector - context:(__bridge void*)RTCAudioSession.class]; - RTCLog(@"RTCAudioSession (%p): dealloc.", self); + context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; + RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self); } - (NSString *)description { - NSString *format = - @"RTCAudioSession: {\n" - " category: %@\n" - " categoryOptions: %ld\n" - " mode: %@\n" - " isActive: %d\n" - " sampleRate: %.2f\n" - " IOBufferDuration: %f\n" - " outputNumberOfChannels: %ld\n" - " inputNumberOfChannels: %ld\n" - " outputLatency: %f\n" - " inputLatency: %f\n" - " outputVolume: %f\n" - "}"; + NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n" + " category: %@\n" + " categoryOptions: %ld\n" + " mode: %@\n" + " isActive: %d\n" + " sampleRate: %.2f\n" + " IOBufferDuration: %f\n" + " outputNumberOfChannels: %ld\n" + " inputNumberOfChannels: %ld\n" + " outputLatency: %f\n" + " inputLatency: %f\n" + " outputVolume: %f\n" + "}"; NSString *description = [NSString stringWithFormat:format, self.category, (long)self.categoryOptions, self.mode, self.isActive, self.sampleRate, self.IOBufferDuration, @@ -206,7 +205,7 @@ - (BOOL)ignoresPreferredAttributeConfigurationErrors { } // TODO(tkchin): Check for duplicates. -- (void)addDelegate:(id)delegate { +- (void)addDelegate:(id)delegate { RTCLog(@"Adding delegate: (%p)", delegate); if (!delegate) { return; @@ -217,7 +216,7 @@ - (void)addDelegate:(id)delegate { } } -- (void)removeDelegate:(id)delegate { +- (void)removeDelegate:(id)delegate { RTCLog(@"Removing delegate: (%p)", delegate); if (!delegate) { return; @@ -621,7 +620,7 @@ + (NSError *)lockError { return error; } -- (std::vector<__weak id >)delegates { +- (std::vector<__weak id >)delegates { @synchronized(self) { // Note: this returns a copy. return _delegates; @@ -629,7 +628,7 @@ + (NSError *)lockError { } // TODO(tkchin): check for duplicates. -- (void)pushDelegate:(id)delegate { +- (void)pushDelegate:(id)delegate { @synchronized(self) { _delegates.insert(_delegates.begin(), delegate); } @@ -687,7 +686,7 @@ - (BOOL)checkLock:(NSError **)outError { // acquire lock if it hasn't already been called. if (!self.isLocked) { if (outError) { - *outError = [RTCAudioSession lockError]; + *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError]; } return NO; } @@ -730,8 +729,8 @@ - (BOOL)configureWebRTCSession:(NSError **)outError { // Configure the AVAudioSession and activate it. // Provide an error even if there isn't one so we can log it. NSError *error = nil; - RTCAudioSessionConfiguration *webRTCConfig = - [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; if (![self setConfiguration:webRTCConfig active:YES error:&error]) { RTCLogError(@"Failed to set WebRTC audio configuration: %@", error.localizedDescription); @@ -866,7 +865,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { - if (context == (__bridge void*)RTCAudioSession.class) { + if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) { if (object == _session) { NSNumber *newVolume = change[NSKeyValueChangeNewKey]; RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue); diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h index 9f3765da22..4582b80557 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h @@ -23,7 +23,7 @@ RTC_EXTERN const double kRTCAudioSessionLowComplexityIOBufferDuration; // Struct to hold configuration values. RTC_OBJC_EXPORT -@interface RTCAudioSessionConfiguration : NSObject +@interface RTC_OBJC_TYPE (RTCAudioSessionConfiguration) : NSObject @property(nonatomic, strong) NSString *category; @property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions; @@ -41,7 +41,7 @@ RTC_OBJC_EXPORT /** Returns the configuration that WebRTC needs. */ + (instancetype)webRTCConfiguration; /** Provide a way to override the default configuration. */ -+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration; ++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration; @end diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 2247e65ab5..39e9ac13ec 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -51,9 +51,9 @@ // TODO(henrika): monitor this size and determine if it should be modified. const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06; -static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil; +static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil; -@implementation RTCAudioSessionConfiguration +@implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) @synthesize category = _category; @synthesize categoryOptions = _categoryOptions; @@ -105,9 +105,9 @@ + (void)initialize { } + (instancetype)currentConfiguration { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; - RTCAudioSessionConfiguration *config = - [[RTCAudioSessionConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config = + [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init]; config.category = session.category; config.categoryOptions = session.categoryOptions; config.mode = session.mode; @@ -120,11 +120,11 @@ + (instancetype)currentConfiguration { + (instancetype)webRTCConfiguration { @synchronized(self) { - return (RTCAudioSessionConfiguration *)gWebRTCConfiguration; + return (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)gWebRTCConfiguration; } } -+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration { ++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration { @synchronized(self) { gWebRTCConfiguration = configuration; } diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index 7ca2d757eb..e28f26f9ae 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,7 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter : NSObject +@interface RTCNativeAudioSessionDelegateAdapter : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index aef97b9fe5..daddf314a4 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -26,20 +26,20 @@ - (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer { return self; } -#pragma mark - RTCAudioSessionDelegate +#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate) -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session { +- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session { _observer->OnInterruptionBegin(); } -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session +- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session shouldResumeSession:(BOOL)shouldResumeSession { _observer->OnInterruptionEnd(); } -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session - reason:(AVAudioSessionRouteChangeReason)reason - previousRoute:(AVAudioSessionRouteDescription *)previousRoute { +- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session + reason:(AVAudioSessionRouteChangeReason)reason + previousRoute:(AVAudioSessionRouteDescription *)previousRoute { switch (reason) { case AVAudioSessionRouteChangeReasonUnknown: case AVAudioSessionRouteChangeReasonNewDeviceAvailable: @@ -64,24 +64,24 @@ - (void)audioSessionDidChangeRoute:(RTCAudioSession *)session } } -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session { +- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session { +- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSession:(RTCAudioSession *)session +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord { _observer->OnCanPlayOrRecordChange(canPlayOrRecord); } -- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session { +- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session { +- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didChangeOutputVolume:(float)outputVolume { _observer->OnChangedOutputVolume(); } diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 2b5e56f4ec..fed5a37827 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -17,10 +17,10 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate -// (usually RTCVideoSource). +// Camera capture that implements RTCVideoCapturer. Delivers frames to a +// RTCVideoCapturerDelegate (usually RTCVideoSource). NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") -@interface RTCCameraVideoCapturer : RTCVideoCapturer +@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) // Capture session that is used for capturing. Valid from initialization to dealloc. @property(readonly, nonatomic) AVCaptureSession *captureSession; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index f83c03e4c0..6edcec88e9 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -21,11 +21,13 @@ #import "helpers/AVCaptureSession+DevicePosition.h" #import "helpers/RTCDispatcher+Private.h" +#include "rtc_base/system/gcd_helpers.h" const int64_t kNanosecondsPerSecond = 1000000000; -@interface RTCCameraVideoCapturer () -@property(nonatomic, readonly) dispatch_queue_t frameQueue; +@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) +() @property(nonatomic, + readonly) dispatch_queue_t frameQueue; @property(nonatomic, strong) AVCaptureDevice *currentDevice; @property(nonatomic, assign) BOOL hasRetriedOnFatalError; @property(nonatomic, assign) BOOL isRunning; @@ -33,7 +35,7 @@ @interface RTCCameraVideoCapturer ())delegate { +- (instancetype)initWithDelegate:(__weak id)delegate { return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]]; } // This initializer is used for testing. -- (instancetype)initWithDelegate:(__weak id)delegate +- (instancetype)initWithDelegate:(__weak id)delegate captureSession:(AVCaptureSession *)captureSession { if (self = [super initWithDelegate:delegate]) { // Create the capture session and all relevant inputs and outputs. We need @@ -109,9 +111,9 @@ - (instancetype)initWithDelegate:(__weak id)delegate } - (void)dealloc { - NSAssert( - !_willBeRunning, - @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?"); + NSAssert(!_willBeRunning, + @"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to " + @"call stopCapture?"); [[NSNotificationCenter defaultCenter] removeObserver:self]; } @@ -153,7 +155,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device fps:(NSInteger)fps completionHandler:(nullable void (^)(NSError *))completionHandler { _willBeRunning = YES; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps); @@ -195,7 +197,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { _willBeRunning = NO; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ RTCLogInfo("Stop"); @@ -224,10 +226,10 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #if TARGET_OS_IPHONE - (void)deviceOrientationDidChange:(NSNotification *)notification { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - [self updateOrientation]; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + [self updateOrientation]; + }]; } #endif @@ -286,12 +288,14 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput _rotation = RTCVideoRotation_0; #endif - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kNanosecondsPerSecond; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:_rotation - timeStampNs:timeStampNs]; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:_rotation + timeStampNs:timeStampNs]; [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; } @@ -342,29 +346,29 @@ - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; RTCLogError(@"Capture session runtime error: %@", error); - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ #if TARGET_OS_IPHONE - if (error.code == AVErrorMediaServicesWereReset) { - [self handleNonFatalError]; - } else { - [self handleFatalError]; - } + if (error.code == AVErrorMediaServicesWereReset) { + [self handleNonFatalError]; + } else { + [self handleFatalError]; + } #else - [self handleFatalError]; + [self handleFatalError]; #endif - }]; + }]; } - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { RTCLog(@"Capture session started."); - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - // If we successfully restarted after an unknown error, - // allow future retries on fatal errors. - self.hasRetriedOnFatalError = NO; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + // If we successfully restarted after an unknown + // error, allow future retries on fatal errors. + self.hasRetriedOnFatalError = NO; + }]; } - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { @@ -372,7 +376,7 @@ - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { } - (void)handleFatalError { - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ if (!self.hasRetriedOnFatalError) { @@ -386,13 +390,13 @@ - (void)handleFatalError { } - (void)handleNonFatalError { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - RTCLog(@"Restarting capture session after error."); - if (self.isRunning) { - [self.captureSession startRunning]; - } - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + RTCLog(@"Restarting capture session after error."); + if (self.isRunning) { + [self.captureSession startRunning]; + } + }]; } #if TARGET_OS_IPHONE @@ -400,13 +404,14 @@ - (void)handleNonFatalError { #pragma mark - UIApplication notifications - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - if (self.isRunning && !self.captureSession.isRunning) { - RTCLog(@"Restarting capture session on active."); - [self.captureSession startRunning]; - } - }]; + [RTC_OBJC_TYPE(RTCDispatcher) + dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + if (self.isRunning && !self.captureSession.isRunning) { + RTCLog(@"Restarting capture session on active."); + [self.captureSession startRunning]; + } + }]; } #endif // TARGET_OS_IPHONE @@ -415,10 +420,10 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - (dispatch_queue_t)frameQueue { if (!_frameQueue) { - _frameQueue = - dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL); - dispatch_set_target_queue(_frameQueue, - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); + _frameQueue = RTCDispatchQueueCreateWithTarget( + "org.webrtc.cameravideocapturer.video", + DISPATCH_QUEUE_SERIAL, + dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); } return _frameQueue; } @@ -447,7 +452,8 @@ - (void)setupVideoDataOutput { // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the // device with the most efficient output format first. Find the first format that we support. - NSSet *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats]; + NSSet *supportedPixelFormats = + [RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats]; NSMutableOrderedSet *availablePixelFormats = [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes]; [availablePixelFormats intersectSet:supportedPixelFormats]; @@ -464,7 +470,7 @@ - (void)setupVideoDataOutput { - (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format { FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) { + if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) { mediaSubType = _preferredOutputPixelFormat; } @@ -478,7 +484,7 @@ - (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format { #pragma mark - Private, called inside capture queue - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; @@ -490,7 +496,7 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger } - (void)reconfigureCaptureSessionInput { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; AVCaptureDeviceInput *input = @@ -512,7 +518,7 @@ - (void)reconfigureCaptureSessionInput { } - (void)updateOrientation { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"updateOrientation must be called on the capture queue."); #if TARGET_OS_IPHONE _orientation = [UIDevice currentDevice].orientation; diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.h b/sdk/objc/components/capturer/RTCFileVideoCapturer.h index 0782588d9c..19262c64cf 100644 --- a/sdk/objc/components/capturer/RTCFileVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.h @@ -27,7 +27,7 @@ typedef void (^RTCFileVideoCapturerErrorBlock)(NSError *error); RTC_OBJC_EXPORT NS_CLASS_AVAILABLE_IOS(10) -@interface RTCFileVideoCapturer : RTCVideoCapturer +@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) /** * Starts asynchronous capture of frames from video file. diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/sdk/objc/components/capturer/RTCFileVideoCapturer.m index 207a21d8c0..4c39ccda3a 100644 --- a/sdk/objc/components/capturer/RTCFileVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.m @@ -13,8 +13,10 @@ #import "base/RTCLogging.h" #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" +#include "rtc_base/system/gcd_helpers.h" -NSString *const kRTCFileVideoCapturerErrorDomain = @"org.webrtc.RTCFileVideoCapturer"; +NSString *const kRTCFileVideoCapturerErrorDomain = + @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)"; typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) { RTCFileVideoCapturerErrorCode_CapturerRunning = 2000, @@ -27,12 +29,12 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) { RTCFileVideoCapturerStatusStopped }; -@interface RTCFileVideoCapturer () -@property(nonatomic, assign) CMTime lastPresentationTime; +@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) +() @property(nonatomic, assign) CMTime lastPresentationTime; @property(nonatomic, strong) NSURL *fileURL; @end -@implementation RTCFileVideoCapturer { +@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { AVAssetReader *_reader; AVAssetReaderTrackOutput *_outTrack; RTCFileVideoCapturerStatus _status; @@ -118,9 +120,10 @@ - (nullable NSString *)pathForFileName:(NSString *)fileName { - (dispatch_queue_t)frameQueue { if (!_frameQueue) { - _frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATCH_QUEUE_SERIAL); - dispatch_set_target_queue(_frameQueue, - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)); + _frameQueue = RTCDispatchQueueCreateWithTarget( + "org.webrtc.filecapturer.video", + DISPATCH_QUEUE_SERIAL, + dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)); } return _frameQueue; } @@ -180,11 +183,14 @@ - (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer { return; } - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; NSTimeInterval timeStampSeconds = CACurrentMediaTime(); int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); - RTCVideoFrame *videoFrame = - [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs]; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:0 + timeStampNs:timeStampNs]; CFRelease(sampleBuffer); dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ diff --git a/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/sdk/objc/components/network/RTCNetworkMonitor+Private.h new file mode 100644 index 0000000000..efb37bb63b --- /dev/null +++ b/sdk/objc/components/network/RTCNetworkMonitor+Private.h @@ -0,0 +1,23 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCNetworkMonitor.h" + +#include "sdk/objc/native/src/network_monitor_observer.h" + +@interface RTCNetworkMonitor () + +/** |observer| is a raw pointer and should be kept alive + * for this object's lifetime. + */ +- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer + NS_DESIGNATED_INITIALIZER; + +@end diff --git a/common_video/include/video_frame.h b/sdk/objc/components/network/RTCNetworkMonitor.h similarity index 52% rename from common_video/include/video_frame.h rename to sdk/objc/components/network/RTCNetworkMonitor.h index ba280f2a8c..21d22f5463 100644 --- a/common_video/include/video_frame.h +++ b/sdk/objc/components/network/RTCNetworkMonitor.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,10 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef COMMON_VIDEO_INCLUDE_VIDEO_FRAME_H_ -#define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_H_ +#import -// TODO(nisse): Delete this file, after downstream code is updated. -#include "api/video/encoded_image.h" +NS_ASSUME_NONNULL_BEGIN -#endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_H_ +/** Listens for NWPathMonitor updates and forwards the results to a C++ + * observer. + */ +@interface RTCNetworkMonitor : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm new file mode 100644 index 0000000000..8ac7d3a0d2 --- /dev/null +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -0,0 +1,109 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCNetworkMonitor+Private.h" + +#import + +#import "base/RTCLogging.h" +#import "helpers/RTCDispatcher+Private.h" + +namespace { + +rtc::AdapterType AdapterTypeFromInterfaceType(nw_interface_type_t interfaceType) { + rtc::AdapterType adapterType = rtc::ADAPTER_TYPE_UNKNOWN; + switch (interfaceType) { + case nw_interface_type_other: + adapterType = rtc::ADAPTER_TYPE_UNKNOWN; + break; + case nw_interface_type_wifi: + adapterType = rtc::ADAPTER_TYPE_WIFI; + break; + case nw_interface_type_cellular: + adapterType = rtc::ADAPTER_TYPE_CELLULAR; + break; + case nw_interface_type_wired: + adapterType = rtc::ADAPTER_TYPE_ETHERNET; + break; + case nw_interface_type_loopback: + adapterType = rtc::ADAPTER_TYPE_LOOPBACK; + break; + default: + adapterType = rtc::ADAPTER_TYPE_UNKNOWN; + break; + } + return adapterType; +} + +} // namespace + +@implementation RTCNetworkMonitor { + webrtc::NetworkMonitorObserver *_observer; + nw_path_monitor_t _pathMonitor; + dispatch_queue_t _monitorQueue; +} + +- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { + RTC_DCHECK(observer); + if (self = [super init]) { + _observer = observer; + if (@available(iOS 12, *)) { + _pathMonitor = nw_path_monitor_create(); + if (_pathMonitor == nil) { + RTCLog(@"nw_path_monitor_create failed."); + return nil; + } + RTCLog(@"NW path monitor created."); + __weak RTCNetworkMonitor *weakSelf = self; + nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { + if (weakSelf == nil) { + return; + } + RTCNetworkMonitor *strongSelf = weakSelf; + RTCLog(@"NW path monitor: updated."); + nw_path_status_t status = nw_path_get_status(path); + if (status == nw_path_status_invalid) { + RTCLog(@"NW path monitor status: invalid."); + } else if (status == nw_path_status_unsatisfied) { + RTCLog(@"NW path monitor status: unsatisfied."); + } else if (status == nw_path_status_satisfied) { + RTCLog(@"NW path monitor status: satisfied."); + } else if (status == nw_path_status_satisfiable) { + RTCLog(@"NW path monitor status: satisfiable."); + } + std::map *map = + new std::map(); + nw_path_enumerate_interfaces( + path, (nw_path_enumerate_interfaces_block_t) ^ (nw_interface_t interface) { + const char *name = nw_interface_get_name(interface); + nw_interface_type_t interfaceType = nw_interface_get_type(interface); + RTCLog(@"NW path monitor available interface: %s", name); + rtc::AdapterType adapterType = AdapterTypeFromInterfaceType(interfaceType); + map->insert(std::pair(name, adapterType)); + }); + strongSelf->_observer->OnPathUpdate(std::move(*map)); + delete map; + }); + nw_path_monitor_set_queue( + _pathMonitor, + [RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]); + nw_path_monitor_start(_pathMonitor); + } + } + return self; +} + +- (void)dealloc { + if (@available(iOS 12, *)) { + nw_path_monitor_cancel(_pathMonitor); + } +} + +@end diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index 6cd7ff3055..f4c76fa313 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -97,7 +97,7 @@ - (void)getWidth:(nonnull int *)width cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { *width = frame.width; *height = frame.height; *cropWidth = frame.width; @@ -106,7 +106,7 @@ - (void)getWidth:(nonnull int *)width *cropY = 0; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { if (![super setupTexturesForFrame:frame]) { return NO; } @@ -116,7 +116,7 @@ - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { return NO; } - id buffer = [frame.buffer toI420]; + id buffer = [frame.buffer toI420]; // Luma (y) texture. if (!_descriptor || _width != frame.width || _height != frame.height) { diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index 7b615396d0..f70e2ad5ee 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -15,9 +15,9 @@ NS_AVAILABLE_MAC(10.11) RTC_OBJC_EXPORT -@interface RTCMTLNSVideoView : NSView +@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; + (BOOL)isMetalAvailable; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m index ac5294e4c0..625fb1caa7 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m @@ -17,13 +17,13 @@ #import "RTCMTLI420Renderer.h" -@interface RTCMTLNSVideoView () -@property(nonatomic) id renderer; +@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) +() @property(nonatomic) id renderer; @property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTCVideoFrame *videoFrame; +@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @end -@implementation RTCMTLNSVideoView { +@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { id _renderer; } @@ -102,7 +102,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { - (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - (void)setSize:(CGSize)size { _metalView.drawableSize = size; @@ -112,7 +112,7 @@ - (void)setSize:(CGSize)size { [_metalView draw]; } -- (void)renderFrame:(nullable RTCVideoFrame *)frame { +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { if (frame == nil) { return; } diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 98835cb518..7b037c6dbc 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -95,8 +95,8 @@ - (void)getWidth:(nonnull int *)width cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { - RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); *cropWidth = pixelBuffer.cropWidth; @@ -105,12 +105,12 @@ - (void)getWidth:(nonnull int *)width *cropY = pixelBuffer.cropY; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]); +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); if (![super setupTexturesForFrame:frame]) { return NO; } - CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer; + CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer; id lumaTexture = nil; id chromaTexture = nil; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index eb4c2ba106..c6adcd0fb5 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -93,8 +93,8 @@ - (void)getWidth:(nonnull int *)width cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { - RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); *cropWidth = pixelBuffer.cropWidth; @@ -103,12 +103,12 @@ - (void)getWidth:(nonnull int *)width *cropY = pixelBuffer.cropY; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]); +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); if (![super setupTexturesForFrame:frame]) { return NO; } - CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer; + CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer; id gpuTexture = nil; CVMetalTextureRef textureOut = nullptr; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index f442886b79..916d4d4430 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN @interface RTCMTLRenderer (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame; +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; - (void)uploadTexturesToRenderEncoder:(id)renderEncoder; - (void)getWidth:(nonnull int *)width height:(nonnull int *)height @@ -27,7 +27,7 @@ NS_ASSUME_NONNULL_BEGIN cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame; + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index 9c1f3719b3..aa31545973 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -28,7 +28,7 @@ NS_ASSUME_NONNULL_BEGIN * * @param frame The frame to be rendered. */ -- (void)drawFrame:(RTCVideoFrame *)frame; +- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; /** * Sets the provided view as rendering destination if possible. diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 63cf225bac..e8d161330f 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -167,11 +167,11 @@ - (void)getWidth:(int *)width cropHeight:(int *)cropHeight cropX:(int *)cropX cropY:(int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTC_NOTREACHED() << "Virtual method not implemented in subclass."; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { // Apply rotation override if set. RTCVideoRotation rotation; NSValue *rotationOverride = self.rotationOverride; @@ -311,7 +311,7 @@ - (void)render { #pragma mark - RTCMTLRenderer -- (void)drawFrame:(RTCVideoFrame *)frame { +- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { @autoreleasepool { // Wait until the inflight (curently sent to GPU) command buffer // has completed the GPU work. diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index 36cb144a13..5678112ade 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -27,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN NS_CLASS_AVAILABLE_IOS(9) RTC_OBJC_EXPORT -@interface RTCMTLVideoView : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; @property(nonatomic) UIViewContentMode videoContentMode; diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index c9a622e484..f5be7c061c 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -29,17 +29,17 @@ #define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") #define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") -@interface RTCMTLVideoView () -@property(nonatomic) RTCMTLI420Renderer *rendererI420; +@interface RTC_OBJC_TYPE (RTCMTLVideoView) +() @property(nonatomic) RTCMTLI420Renderer *rendererI420; @property(nonatomic) RTCMTLNV12Renderer *rendererNV12; @property(nonatomic) RTCMTLRGBRenderer *rendererRGB; @property(nonatomic) MTKView *metalView; -@property(atomic) RTCVideoFrame *videoFrame; +@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @property(nonatomic) int64_t lastFrameTimeNs; @end -@implementation RTCMTLVideoView +@implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize delegate = _delegate; @synthesize rendererI420 = _rendererI420; @@ -110,9 +110,10 @@ + (RTCMTLRGBRenderer *)createRGBRenderer { } - (void)configure { - NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device"); + NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable], + @"Metal not availiable on this device"); - self.metalView = [RTCMTLVideoView createMetalView:self.bounds]; + self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; self.metalView.contentMode = UIViewContentModeScaleAspectFill; [self addSubview:self.metalView]; @@ -140,7 +141,7 @@ - (void)layoutSubviews { - (void)drawInMTKView:(nonnull MTKView *)view { NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance."); - RTCVideoFrame *videoFrame = self.videoFrame; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame; // Skip rendering if we've already rendered this frame. if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) { return; @@ -151,12 +152,12 @@ - (void)drawInMTKView:(nonnull MTKView *)view { } RTCMTLRenderer *renderer; - if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { - RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer; + if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) { if (!self.rendererRGB) { - self.rendererRGB = [RTCMTLVideoView createRGBRenderer]; + self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer]; if (![self.rendererRGB addRenderingDestination:self.metalView]) { self.rendererRGB = nil; RTCLogError(@"Failed to create RGB renderer"); @@ -166,7 +167,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { renderer = self.rendererRGB; } else { if (!self.rendererNV12) { - self.rendererNV12 = [RTCMTLVideoView createNV12Renderer]; + self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer]; if (![self.rendererNV12 addRenderingDestination:self.metalView]) { self.rendererNV12 = nil; RTCLogError(@"Failed to create NV12 renderer"); @@ -177,7 +178,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { } } else { if (!self.rendererI420) { - self.rendererI420 = [RTCMTLVideoView createI420Renderer]; + self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer]; if (![self.rendererI420 addRenderingDestination:self.metalView]) { self.rendererI420 = nil; RTCLogError(@"Failed to create I420 renderer"); @@ -236,12 +237,12 @@ - (CGSize)drawableSize { } } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - (void)setSize:(CGSize)size { - __weak RTCMTLVideoView *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ - RTCMTLVideoView *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf; strongSelf.videoFrameSize = size; CGSize drawableSize = [strongSelf drawableSize]; @@ -252,7 +253,7 @@ - (void)setSize:(CGSize)size { }); } -- (void)renderFrame:(nullable RTCVideoFrame *)frame { +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { if (!self.isEnabled) { return; } diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index 034a22bdd0..71a073ab21 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -12,11 +12,11 @@ NS_ASSUME_NONNULL_BEGIN -/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and - * RTCEAGLVideoView if no external shader is specified. This shader will render +/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView + * and RTCEAGLVideoView if no external shader is specified. This shader will render * the video in a rectangle without any color or geometric transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTCDefaultShader : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h index 73cd3a1a26..24b26cd602 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h @@ -17,23 +17,25 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCEAGLVideoView; +@class RTC_OBJC_TYPE(RTCEAGLVideoView); /** - * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its - * bounds using OpenGLES 2.0 or OpenGLES 3.0. + * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames + * in its bounds using OpenGLES 2.0 or OpenGLES 3.0. */ RTC_OBJC_EXPORT NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.") -@interface RTCEAGLVideoView : UIView +@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; - (instancetype)initWithFrame:(CGRect)frame - shader:(id)shader NS_DESIGNATED_INITIALIZER; + shader:(id)shader + NS_DESIGNATED_INITIALIZER; - (instancetype)initWithCoder:(NSCoder *)aDecoder - shader:(id)shader NS_DESIGNATED_INITIALIZER; + shader:(id)shader + NS_DESIGNATED_INITIALIZER; /** @abstract Wrapped RTCVideoRotation, or nil. */ diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index 6a01d48f32..a3435a7815 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -21,7 +21,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -// RTCEAGLVideoView wraps a GLKView which is setup with +// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with // enableSetNeedsDisplay = NO for the purpose of gaining control of // exactly when to call -[GLKView display]. This need for extra // control is required to avoid triggering method calls on GLKView @@ -30,23 +30,24 @@ // error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is // the method that will trigger the binding of the render // buffer. Because the standard behaviour of -[UIView setNeedsDisplay] -// is disabled for the reasons above, the RTCEAGLVideoView maintains +// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains // its own |isDirty| flag. -@interface RTCEAGLVideoView () -// |videoFrame| is set when we receive a frame from a worker thread and is read -// from the display link callback so atomicity is required. -@property(atomic, strong) RTCVideoFrame *videoFrame; +@interface RTC_OBJC_TYPE (RTCEAGLVideoView) +() + // |videoFrame| is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic, readonly) GLKView *glkView; @end -@implementation RTCEAGLVideoView { +@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { RTCDisplayLinkTimer *_timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; - id _shader; + id _shader; RTCNV12TextureCache *_nv12TextureCache; RTCI420TextureCache *_i420TextureCache; // As timestamps should be unique between frames, will store last @@ -67,7 +68,7 @@ - (instancetype)initWithCoder:(NSCoder *)aDecoder { return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; } -- (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { +- (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { if (self = [super initWithFrame:frame]) { _shader = shader; if (![self configure]) { @@ -77,7 +78,8 @@ - (instancetype)initWithFrame:(CGRect)frame shader:(id)shad return self; } -- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id)shader { +- (instancetype)initWithCoder:(NSCoder *)aDecoder + shader:(id)shader { if (self = [super initWithCoder:aDecoder]) { _shader = shader; if (![self configure]) { @@ -127,11 +129,11 @@ - (BOOL)configure { // Frames are received on a separate thread, so we poll for current frame // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. - __weak RTCEAGLVideoView *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ - RTCEAGLVideoView *strongSelf = weakSelf; - [strongSelf displayLinkTimerDidFire]; - }]; + RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; + [strongSelf displayLinkTimerDidFire]; + }]; if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) { [self setupGL]; } @@ -182,7 +184,7 @@ - (void)layoutSubviews { - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { // The renderer will draw the frame to the framebuffer corresponding to the // one used by |view|. - RTCVideoFrame *frame = self.videoFrame; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) { return; } @@ -192,7 +194,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } [self ensureGLContext]; glClear(GL_COLOR_BUFFER_BIT); - if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; } @@ -223,18 +225,18 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) // These methods may be called on non-main thread. - (void)setSize:(CGSize)size { - __weak RTCEAGLVideoView *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ - RTCEAGLVideoView *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size]; }); } -- (void)renderFrame:(RTCVideoFrame *)frame { +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { self.videoFrame = frame; } diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 07172e713a..9fdcc5a695 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -20,6 +20,6 @@ - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER; -- (void)uploadFrameToTextures:(RTCVideoFrame *)frame; +- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index 865f3a258a..5dccd4bf6a 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -123,10 +123,10 @@ - (void)uploadPlane:(const uint8_t *)plane uploadPlane); } -- (void)uploadFrameToTextures:(RTCVideoFrame *)frame { +- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; - id buffer = [frame.buffer toI420]; + id buffer = [frame.buffer toI420]; const int chromaWidth = buffer.chromaWidth; const int chromaHeight = buffer.chromaHeight; diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h index 2540f38154..c9ee986f88 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h @@ -19,20 +19,21 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCNSGLVideoView; +@class RTC_OBJC_TYPE(RTCNSGLVideoView); RTC_OBJC_EXPORT -@protocol RTCNSGLVideoViewDelegate -@end +@protocol RTC_OBJC_TYPE +(RTCNSGLVideoViewDelegate) @end RTC_OBJC_EXPORT -@interface RTCNSGLVideoView : NSOpenGLView +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; - (instancetype)initWithFrame:(NSRect)frameRect pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader NS_DESIGNATED_INITIALIZER; + shader:(id)shader + NS_DESIGNATED_INITIALIZER; @end diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m index 714cae79c6..de54e36711 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -23,10 +23,12 @@ #import "base/RTCLogging.h" #import "base/RTCVideoFrame.h" -@interface RTCNSGLVideoView () -// |videoFrame| is set when we receive a frame from a worker thread and is read -// from the display link callback so atomicity is required. -@property(atomic, strong) RTCVideoFrame *videoFrame; +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) +() + // |videoFrame| is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * + videoFrame; @property(atomic, strong) RTCI420TextureCache *i420TextureCache; - (void)drawFrame; @@ -38,15 +40,16 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext) { - RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext; + RTC_OBJC_TYPE(RTCNSGLVideoView) *view = + (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext; [view drawFrame]; return kCVReturnSuccess; } -@implementation RTCNSGLVideoView { +@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { CVDisplayLinkRef _displayLink; - RTCVideoFrame *_lastDrawnFrame; - id _shader; + RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame; + id _shader; } @synthesize delegate = _delegate; @@ -59,7 +62,7 @@ - (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)f - (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader { + shader:(id)shader { if (self = [super initWithFrame:frame pixelFormat:format]) { _shader = shader; } @@ -105,7 +108,7 @@ - (void)clearGLContext { [super clearGLContext]; } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) // These methods may be called on non-main thread. - (void)setSize:(CGSize)size { @@ -114,14 +117,14 @@ - (void)setSize:(CGSize)size { }); } -- (void)renderFrame:(RTCVideoFrame *)frame { +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { self.videoFrame = frame; } #pragma mark - Private - (void)drawFrame { - RTCVideoFrame *frame = self.videoFrame; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; if (!frame || frame == _lastDrawnFrame) { return; } diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index 9cba823271..f202b836b5 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -10,7 +10,9 @@ #import -@class RTCVideoFrame; +#import "base/RTCMacros.h" + +@class RTC_OBJC_TYPE(RTCVideoFrame); NS_ASSUME_NONNULL_BEGIN @@ -22,7 +24,7 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)init NS_UNAVAILABLE; - (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER; -- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame; +- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; - (void)releaseTextures; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index aab62d4363..a520ac45b4 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -76,10 +76,10 @@ - (BOOL)loadTexture:(CVOpenGLESTextureRef *)textureOut return YES; } -- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame { - NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]], +- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]], @"frame must be CVPixelBuffer backed"); - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer; return [self loadTexture:&_yTextureRef pixelBuffer:pixelBuffer diff --git a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h index 6876cc3ab5..9df30a8fa0 100644 --- a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h +++ b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h @@ -15,19 +15,17 @@ NS_ASSUME_NONNULL_BEGIN /** - * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in - * rendering for the RTCEAGLVideoView/RTCNSGLVideoView. + * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders + * used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView. */ RTC_OBJC_EXPORT -@protocol RTCVideoViewShading +@protocol RTC_OBJC_TYPE +(RTCVideoViewShading) -/** Callback for I420 frames. Each plane is given as a texture. */ -- (void)applyShadingForFrameWithWidth:(int)width - height:(int)height - rotation:(RTCVideoRotation)rotation - yPlane:(GLuint)yPlane - uPlane:(GLuint)uPlane - vPlane:(GLuint)vPlane; + /** Callback for I420 frames. Each plane is given as a texture. */ + - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation + : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane + : (GLuint)vPlane; /** Callback for NV12 frames. Each plane is given as a texture. */ - (void)applyShadingForFrameWithWidth:(int)width diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h index f67fa94ca8..a0cd8515d1 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCCodecSpecificInfoH264 () +@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) +() -- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo; + - (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo; @end diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h index ece9570a13..ae3003a115 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h @@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) { }; RTC_OBJC_EXPORT -@interface RTCCodecSpecificInfoH264 : NSObject +@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject @property(nonatomic, assign) RTCH264PacketizationMode packetizationMode; diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm index 57f2411e3b..e38ed307b3 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm @@ -13,7 +13,7 @@ #import "RTCH264ProfileLevelId.h" // H264 specific settings. -@implementation RTCCodecSpecificInfoH264 +@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) @synthesize packetizationMode = _packetizationMode; diff --git a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265+Private.h similarity index 59% rename from sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h rename to sdk/objc/components/video_codec/RTCCodecSpecificInfoH265+Private.h index cfb7fb1145..a22d3594f5 100644 --- a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265+Private.h @@ -7,19 +7,18 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +/* This file is borrowed from sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h */ -#import "base/RTCRtpFragmentationHeader.h" +#import "RTCCodecSpecificInfoH265.h" -#include "modules/include/module_common_types.h" +#include "modules/video_coding/include/video_codec_interface.h" NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCRtpFragmentationHeader (Private) +@interface RTCCodecSpecificInfoH265 () -- (instancetype)initWithNativeFragmentationHeader: - (const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader; -- (std::unique_ptr)createNativeFragmentationHeader; +- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo; @end diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265.h new file mode 100644 index 0000000000..82865a9cdb --- /dev/null +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265.h @@ -0,0 +1,28 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +/* This file is borrowed from sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h. */ + +#import + +#import "RTCCodecSpecificInfo.h" +#import "RTCMacros.h" + +/** Class for H265 specific config. */ +typedef NS_ENUM(NSUInteger, RTCH265PacketizationMode) { + RTCH265PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed + RTCH265PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed +}; + +RTC_OBJC_EXPORT +@interface RTCCodecSpecificInfoH265 : NSObject + +@property(nonatomic, assign) RTCH265PacketizationMode packetizationMode; + +@end diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265.mm b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265.mm new file mode 100644 index 0000000000..94a46985f7 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH265.mm @@ -0,0 +1,28 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + /* This file is borrowed from sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm */ + +#import "RTCCodecSpecificInfoH265+Private.h" + +// H265 specific settings. +@implementation RTCCodecSpecificInfoH265 + +@synthesize packetizationMode = _packetizationMode; + +- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo { + webrtc::CodecSpecificInfo codecSpecificInfo; + codecSpecificInfo.codecType = webrtc::kVideoCodecH265; + codecSpecificInfo.codecSpecific.H265.packetization_mode = + (webrtc::H265PacketizationMode)_packetizationMode; + + return codecSpecificInfo; +} + +@end diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h index 7ca9463a59..de5a9c4684 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h +++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h @@ -16,10 +16,11 @@ NS_ASSUME_NONNULL_BEGIN /** This decoder factory include support for all codecs bundled with WebRTC. If using custom - * codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory. + * codecs, create custom implementations of RTCVideoEncoderFactory and + * RTCVideoDecoderFactory. */ RTC_OBJC_EXPORT -@interface RTCDefaultVideoDecoderFactory : NSObject +@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m index bdb18517ca..22b271febb 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m @@ -18,31 +18,84 @@ #if defined(RTC_ENABLE_VP9) #import "api/video_codec/RTCVideoDecoderVP9.h" #endif +#if !defined(DISABLE_H265) +#import "RTCH265ProfileLevelId.h" +#import "RTCVideoDecoderH265.h" +#endif -@implementation RTCDefaultVideoDecoderFactory +@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) + +- (NSArray *)supportedCodecs { + NSDictionary *constrainedHighParams = @{ + @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"level-asymmetry-allowed" : @"1", + @"packetization-mode" : @"1", + }; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedHighParams]; + + NSDictionary *constrainedBaselineParams = @{ + @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"level-asymmetry-allowed" : @"1", + @"packetization-mode" : @"1", + }; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedBaselineParams]; + + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name]; -- (id)createDecoder:(RTCVideoCodecInfo *)info { - if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { - return [[RTCVideoDecoderH264 alloc] init]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { - return [RTCVideoDecoderVP8 vp8Decoder]; #if defined(RTC_ENABLE_VP9) - } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) { - return [RTCVideoDecoderVP9 vp9Decoder]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]; #endif - } - return nil; -} +#if !defined(DISABLE_H265) + if (@available(iOS 11, *)) { + if ([RTCVideoDecoderH265 supported]) { + return @[ + constrainedHighInfo, + constrainedBaselineInfo, + vp8Info, +#if defined(RTC_ENABLE_VP9) + vp9Info, +#endif + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH265Name], + ]; + } + } +#endif -- (NSArray *)supportedCodecs { return @[ - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name], - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name], + constrainedHighInfo, + constrainedBaselineInfo, + vp8Info, #if defined(RTC_ENABLE_VP9) - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name], + vp9Info, #endif ]; } +- (id)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { + return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] initWithCodecInfo:info]; + } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { + return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder]; +#if defined(RTC_ENABLE_VP9) + } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) { + return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder]; +#endif +#if !defined(DISABLE_H265) + } else if (@available(iOS 11, *)) { + if ([info.name isEqualToString:kRTCVideoCodecH265Name]) { + return [[RTC_OBJC_TYPE(RTCVideoDecoderH265) alloc] initWithCodecInfo:info]; + } +#endif + } + + return nil; +} + @end diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h index c45e54362b..92ab40c95b 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h @@ -16,14 +16,15 @@ NS_ASSUME_NONNULL_BEGIN /** This encoder factory include support for all codecs bundled with WebRTC. If using custom - * codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory. + * codecs, create custom implementations of RTCVideoEncoderFactory and + * RTCVideoDecoderFactory. */ RTC_OBJC_EXPORT -@interface RTCDefaultVideoEncoderFactory : NSObject +@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject -@property(nonatomic, retain) RTCVideoCodecInfo *preferredCodec; +@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec; -+ (NSArray *)supportedCodecs; ++ (NSArray *)supportedCodecs; @end diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index b72296b64f..04ee571de6 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -18,34 +18,56 @@ #if defined(RTC_ENABLE_VP9) #import "api/video_codec/RTCVideoEncoderVP9.h" #endif +#if !defined(DISABLE_H265) +#import "RTCH265ProfileLevelId.h" +#import "RTCVideoEncoderH265.h" +#endif -@implementation RTCDefaultVideoEncoderFactory +@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) @synthesize preferredCodec; -+ (NSArray *)supportedCodecs { ++ (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name - parameters:constrainedHighParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name - parameters:constrainedBaselineParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedBaselineParams]; - RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name]; #if defined(RTC_ENABLE_VP9) - RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]; +#endif + +#if !defined(DISABLE_H265) + if (@available(iOS 11, *)) { + if ([RTCVideoEncoderH265 supported]) { + return @[ + constrainedHighInfo, + constrainedBaselineInfo, + vp8Info, +#if defined(RTC_ENABLE_VP9) + vp9Info, +#endif + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH265Name], + ]; + } + } #endif return @[ @@ -58,24 +80,31 @@ @implementation RTCDefaultVideoEncoderFactory ]; } -- (id)createEncoder:(RTCVideoCodecInfo *)info { +- (id)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { - return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info]; + return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info]; } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { - return [RTCVideoEncoderVP8 vp8Encoder]; + return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder]; #if defined(RTC_ENABLE_VP9) } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) { - return [RTCVideoEncoderVP9 vp9Encoder]; + return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder]; +#endif +#if !defined(DISABLE_H265) + } else if (@available(iOS 11, *)) { + if ([info.name isEqualToString:kRTCVideoCodecH265Name]) { + return [[RTC_OBJC_TYPE(RTCVideoEncoderH265) alloc] initWithCodecInfo:info]; + } #endif } return nil; } -- (NSArray *)supportedCodecs { - NSMutableArray *codecs = [[[self class] supportedCodecs] mutableCopy]; +- (NSArray *)supportedCodecs { + NSMutableArray *codecs = + [[[self class] supportedCodecs] mutableCopy]; - NSMutableArray *orderedCodecs = [NSMutableArray array]; + NSMutableArray *orderedCodecs = [NSMutableArray array]; NSUInteger index = [codecs indexOfObject:self.preferredCodec]; if (index != NSNotFound) { [orderedCodecs addObject:[codecs objectAtIndex:index]]; diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h index 56b353215a..dac7bb5610 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h @@ -48,7 +48,7 @@ typedef NS_ENUM(NSUInteger, RTCH264Level) { }; RTC_OBJC_EXPORT -@interface RTCH264ProfileLevelId : NSObject +@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject @property(nonatomic, readonly) RTCH264Profile profile; @property(nonatomic, readonly) RTCH264Level level; diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm index afd9fcb44c..b985d9df02 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm @@ -75,15 +75,16 @@ } // namespace -@interface RTCH264ProfileLevelId () +@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) +() -@property(nonatomic, assign) RTCH264Profile profile; + @property(nonatomic, assign) RTCH264Profile profile; @property(nonatomic, assign) RTCH264Level level; @property(nonatomic, strong) NSString *hexString; @end -@implementation RTCH264ProfileLevelId +@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId) @synthesize profile = _profile; @synthesize level = _level; diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCDefaultVideoDecoderFactory.h b/sdk/objc/components/video_codec/RTCH265ProfileLevelId.h similarity index 70% rename from sdk/objc/Framework/Headers/WebRTC/RTCDefaultVideoDecoderFactory.h rename to sdk/objc/components/video_codec/RTCH265ProfileLevelId.h index dc46f3f67b..8e3486d06d 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCDefaultVideoDecoderFactory.h +++ b/sdk/objc/components/video_codec/RTCH265ProfileLevelId.h @@ -8,4 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "components/video_codec/RTCDefaultVideoDecoderFactory.h" +#import + +#import "RTCMacros.h" + +RTC_OBJC_EXPORT extern NSString *const kRTCVideoCodecH265Name; +RTC_OBJC_EXPORT extern NSString *const kRTCLevel31Main; diff --git a/api/test/mock_frame_decryptor.cc b/sdk/objc/components/video_codec/RTCH265ProfileLevelId.mm similarity index 63% rename from api/test/mock_frame_decryptor.cc rename to sdk/objc/components/video_codec/RTCH265ProfileLevelId.mm index f4b54f966c..fe93aceb79 100644 --- a/api/test/mock_frame_decryptor.cc +++ b/sdk/objc/components/video_codec/RTCH265ProfileLevelId.mm @@ -6,13 +6,13 @@ * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. + * */ -#include "api/test/mock_frame_decryptor.h" - -namespace webrtc { +#import "RTCH265ProfileLevelId.h" -MockFrameDecryptor::MockFrameDecryptor() = default; -MockFrameDecryptor::~MockFrameDecryptor() = default; +#include "media/base/media_constants.h" -} // namespace webrtc +NSString *const kRTCVideoCodecH265Name = @(cricket::kH265CodecName); +// TODO(jianjunz): This is value is not correct. +NSString *const kRTCLevel31Main = @"4d001f"; diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h index 4fcff1dff7..88bacbbdfe 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h @@ -14,5 +14,5 @@ #import "RTCVideoDecoderFactory.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderFactoryH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject @end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m index b9b9aa72c6..bdae19d687 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m @@ -13,15 +13,37 @@ #import "RTCH264ProfileLevelId.h" #import "RTCVideoDecoderH264.h" -@implementation RTCVideoDecoderFactoryH264 +@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) -- (id)createDecoder:(RTCVideoCodecInfo *)info { - return [[RTCVideoDecoderH264 alloc] init]; +- (NSArray *)supportedCodecs { + NSMutableArray *codecs = [NSMutableArray array]; + NSString *codecName = kRTCVideoCodecH264Name; + + NSDictionary *constrainedHighParams = @{ + @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"level-asymmetry-allowed" : @"1", + @"packetization-mode" : @"1", + }; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedHighParams]; + [codecs addObject:constrainedHighInfo]; + + NSDictionary *constrainedBaselineParams = @{ + @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"level-asymmetry-allowed" : @"1", + @"packetization-mode" : @"1", + }; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedBaselineParams]; + [codecs addObject:constrainedBaselineInfo]; + + return [codecs copy]; } -- (NSArray *)supportedCodecs { - NSString *codecName = kRTCVideoCodecH264Name; - return @[ [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:nil] ]; +- (id)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init]; } @end diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCDefaultVideoEncoderFactory.h b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH265.h similarity index 65% rename from sdk/objc/Framework/Headers/WebRTC/RTCDefaultVideoEncoderFactory.h rename to sdk/objc/components/video_codec/RTCVideoDecoderFactoryH265.h index 7588ffb84a..97f484d507 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCDefaultVideoEncoderFactory.h +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH265.h @@ -8,4 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "components/video_codec/RTCDefaultVideoEncoderFactory.h" +#import + +#import "RTCMacros.h" +#import "RTCVideoDecoderFactory.h" + +RTC_OBJC_EXPORT +API_AVAILABLE(ios(11.0)) +@interface RTCVideoDecoderFactoryH265 : NSObject +@end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH265.m b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH265.m new file mode 100644 index 0000000000..2a94cbd470 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH265.m @@ -0,0 +1,27 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCVideoDecoderFactoryH265.h" + +#import "RTCH265ProfileLevelId.h" +#import "RTCVideoDecoderH265.h" + +@implementation RTCVideoDecoderFactoryH265 + +- (id)createDecoder:(RTCVideoCodecInfo*)info { + return [[RTCVideoDecoderH265 alloc] init]; +} + +- (NSArray*)supportedCodecs { + NSString* codecName = kRTCVideoCodecH265Name; + return @[ [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:nil] ]; +} + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.h b/sdk/objc/components/video_codec/RTCVideoDecoderH264.h index b860276206..ba6168b98c 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.h +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.h @@ -12,7 +12,11 @@ #import "RTCMacros.h" #import "RTCVideoDecoder.h" +#import "RTCVideoCodecInfo.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject + +- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo; + @end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm index 04bdabf643..227a0499b8 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm @@ -19,10 +19,16 @@ #import "helpers.h" #import "helpers/scoped_cftyperef.h" +#import "RTCH264ProfileLevelId.h" + #if defined(WEBRTC_IOS) #import "helpers/UIDevice+RTCDevice.h" #endif +#include "common_video/h264/h264_bitstream_parser.h" +#if !defined(DISABLE_H265) +#include "common_video/h265/h265_bitstream_parser.h" +#endif #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -32,13 +38,14 @@ // Struct that we pass to the decoder per frame to decode. We receive it again // in the decoder callback. struct RTCFrameDecodeParams { - RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts) : callback(cb), timestamp(ts) {} + RTCFrameDecodeParams(RTCVideoDecoderCallback cb, int64_t ts, int32_t qp) : callback(cb), timestamp(ts), qp(qp) {} RTCVideoDecoderCallback callback; int64_t timestamp; + int32_t qp; }; -@interface RTCVideoDecoderH264 () -- (void)setError:(OSStatus)error; +@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) +() - (void)setError : (OSStatus)error; @end // This is the callback function that VideoToolbox calls when decode is @@ -53,34 +60,43 @@ void decompressionOutputCallback(void *decoderRef, std::unique_ptr decodeParams( reinterpret_cast(params)); if (status != noErr) { - RTCVideoDecoderH264 *decoder = (__bridge RTCVideoDecoderH264 *)decoderRef; + RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder = + (__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef; [decoder setError:status]; RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status; return; } // TODO(tkchin): Handle CVO properly. - RTCCVPixelBuffer *frameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:imageBuffer]; - RTCVideoFrame *decodedFrame = - [[RTCVideoFrame alloc] initWithBuffer:frameBuffer - rotation:RTCVideoRotation_0 - timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] + initWithBuffer:frameBuffer + rotation:RTCVideoRotation_0 + timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec]; decodedFrame.timeStamp = decodeParams->timestamp; - decodeParams->callback(decodedFrame); + decodeParams->callback(decodedFrame, decodeParams->qp); } // Decoder. -@implementation RTCVideoDecoderH264 { +@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) { CMVideoFormatDescriptionRef _videoFormat; CMMemoryPoolRef _memoryPool; VTDecompressionSessionRef _decompressionSession; RTCVideoDecoderCallback _callback; OSStatus _error; + webrtc::H264BitstreamParser _h264BitstreamParser; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *_codecInfo; + +#if !defined(DISABLE_H265) + webrtc::H265BitstreamParser _h265BitstreamParser; +#endif } -- (instancetype)init { +- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { self = [super init]; if (self) { _memoryPool = CMMemoryPoolCreate(nil); + _codecInfo = codecInfo; } return self; } @@ -96,9 +112,9 @@ - (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores { return WEBRTC_VIDEO_CODEC_OK; } -- (NSInteger)decode:(RTCEncodedImage *)inputImage +- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info + codecSpecificInfo:(nullable id)info renderTimeMs:(int64_t)renderTimeMs { RTC_DCHECK(inputImage.buffer); @@ -108,9 +124,20 @@ - (NSInteger)decode:(RTCEncodedImage *)inputImage return WEBRTC_VIDEO_CODEC_ERROR; } +#if !defined(DISABLE_H265) + rtc::ScopedCFTypeRef inputFormat(nullptr); + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + inputFormat = rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes, + inputImage.buffer.length)); + } else if (@available(iOS 11, *)) { + inputFormat = rtc::ScopedCF(webrtc::CreateH265VideoFormatDescription((uint8_t*)inputImage.buffer.bytes, + inputImage.buffer.length)); + } +#else rtc::ScopedCFTypeRef inputFormat = rtc::ScopedCF(webrtc::CreateVideoFormatDescription((uint8_t *)inputImage.buffer.bytes, inputImage.buffer.length)); +#endif if (inputFormat) { // Check if the video format has changed, and reinitialize decoder if // needed. @@ -132,6 +159,26 @@ - (NSInteger)decode:(RTCEncodedImage *)inputImage return WEBRTC_VIDEO_CODEC_ERROR; } CMSampleBufferRef sampleBuffer = nullptr; +#if !defined(DISABLE_H265) + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + if (!webrtc::H264AnnexBBufferToCMSampleBuffer((uint8_t *)inputImage.buffer.bytes, + inputImage.buffer.length, + _videoFormat, + &sampleBuffer, + _memoryPool)) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + } else if (@available(iOS 11, *)) { + if (!webrtc::H265AnnexBBufferToCMSampleBuffer((uint8_t*)inputImage.buffer.bytes, + inputImage.buffer.length, + _videoFormat, + &sampleBuffer)) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + } else { + return WEBRTC_VIDEO_CODEC_ERROR; + } +#else if (!webrtc::H264AnnexBBufferToCMSampleBuffer((uint8_t *)inputImage.buffer.bytes, inputImage.buffer.length, _videoFormat, @@ -139,10 +186,27 @@ - (NSInteger)decode:(RTCEncodedImage *)inputImage _memoryPool)) { return WEBRTC_VIDEO_CODEC_ERROR; } +#endif RTC_DCHECK(sampleBuffer); VTDecodeFrameFlags decodeFlags = kVTDecodeFrame_EnableAsynchronousDecompression; std::unique_ptr frameDecodeParams; - frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp)); + + int qp = -1; +#if !defined(DISABLE_H265) + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + _h264BitstreamParser.ParseBitstream((uint8_t *)inputImage.buffer.bytes, inputImage.buffer.length); + _h264BitstreamParser.GetLastSliceQp(&qp); + } else { + _h265BitstreamParser.ParseBitstream((uint8_t *)inputImage.buffer.bytes, inputImage.buffer.length); + _h265BitstreamParser.GetLastSliceQp(&qp); + } +#else + _h264BitstreamParser.ParseBitstream((uint8_t *)inputImage.buffer.bytes, inputImage.buffer.length); + _h264BitstreamParser.GetLastSliceQp(&qp); +#endif + + frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp, qp)); + OSStatus status = VTDecompressionSessionDecodeFrame( _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr); #if defined(WEBRTC_IOS) @@ -152,7 +216,7 @@ - (NSInteger)decode:(RTCEncodedImage *)inputImage [self resetDecompressionSession] == WEBRTC_VIDEO_CODEC_OK) { RTC_LOG(LS_INFO) << "Failed to decode frame with code: " << status << " retrying decode after decompression session reset"; - frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp)); + frameDecodeParams.reset(new RTCFrameDecodeParams(_callback, inputImage.timeStamp, qp)); status = VTDecompressionSessionDecodeFrame( _decompressionSession, sampleBuffer, decodeFlags, frameDecodeParams.release(), nullptr); } @@ -200,20 +264,31 @@ - (int)resetDecompressionSession { // CVPixelBuffers directly to the renderer. // TODO(tkchin): Maybe only set OpenGL/IOSurface keys if we know that that // we can pass CVPixelBuffers as native handles in decoder output. +#if TARGET_OS_SIMULATOR + static size_t const attributesSize = 2; +#else static size_t const attributesSize = 3; +#endif + CFTypeRef keys[attributesSize] = { #if defined(WEBRTC_IOS) - kCVPixelBufferOpenGLESCompatibilityKey, + kCVPixelBufferOpenGLESCompatibilityKey, #elif defined(WEBRTC_MAC) - kCVPixelBufferOpenGLCompatibilityKey, + kCVPixelBufferOpenGLCompatibilityKey, #endif - kCVPixelBufferIOSurfacePropertiesKey, - kCVPixelBufferPixelFormatTypeKey - }; +#if !(TARGET_OS_SIMULATOR) + kCVPixelBufferIOSurfacePropertiesKey, +#endif + kCVPixelBufferPixelFormatTypeKey}; CFDictionaryRef ioSurfaceValue = CreateCFTypeDictionary(nullptr, nullptr, 0); int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; CFNumberRef pixelFormat = CFNumberCreate(nullptr, kCFNumberLongType, &nv12type); +#if TARGET_OS_SIMULATOR + CFTypeRef values[attributesSize] = {kCFBooleanTrue, pixelFormat}; +#else CFTypeRef values[attributesSize] = {kCFBooleanTrue, ioSurfaceValue, pixelFormat}; +#endif + CFDictionaryRef attributes = CreateCFTypeDictionary(keys, values, attributesSize); if (ioSurfaceValue) { CFRelease(ioSurfaceValue); diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCDtmfSender.h b/sdk/objc/components/video_codec/RTCVideoDecoderH265.h similarity index 65% rename from sdk/objc/Framework/Headers/WebRTC/RTCDtmfSender.h rename to sdk/objc/components/video_codec/RTCVideoDecoderH265.h index 20407102c6..999e130edd 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCDtmfSender.h +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH265.h @@ -8,4 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "api/peerconnection/RTCDtmfSender.h" +#import + +#import "RTCVideoDecoderH264.h" + +RTC_OBJC_EXPORT +API_AVAILABLE(ios(11.0)) +@interface RTC_OBJC_TYPE (RTCVideoDecoderH265) : RTC_OBJC_TYPE(RTCVideoDecoderH264) + ++ (bool)supported; + +@end diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.cc b/sdk/objc/components/video_codec/RTCVideoDecoderH265.mm similarity index 71% rename from modules/rtp_rtcp/mocks/mock_rtp_rtcp.cc rename to sdk/objc/components/video_codec/RTCVideoDecoderH265.mm index 061f82765c..7066547483 100644 --- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.cc +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH265.mm @@ -6,13 +6,16 @@ * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. + * */ -#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" +#import "RTCVideoDecoderH265.h" -namespace webrtc { +@implementation RTC_OBJC_TYPE (RTCVideoDecoderH265) -MockRtpRtcp::MockRtpRtcp() = default; -MockRtpRtcp::~MockRtpRtcp() = default; ++ (bool)supported { + // TODO(piasy): impl + return true; +} -} // namespace webrtc +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h index c64405e4da..45fc4be2ea 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h @@ -14,5 +14,5 @@ #import "RTCVideoEncoderFactory.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderFactoryH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m index bbc15e9d5d..9843849307 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m @@ -13,10 +13,10 @@ #import "RTCH264ProfileLevelId.h" #import "RTCVideoEncoderH264.h" -@implementation RTCVideoEncoderFactoryH264 +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) -- (NSArray *)supportedCodecs { - NSMutableArray *codecs = [NSMutableArray array]; +- (NSArray *)supportedCodecs { + NSMutableArray *codecs = [NSMutableArray array]; NSString *codecName = kRTCVideoCodecH264Name; NSDictionary *constrainedHighParams = @{ @@ -24,8 +24,9 @@ @implementation RTCVideoEncoderFactoryH264 @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedHighParams]; [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ @@ -33,15 +34,16 @@ @implementation RTCVideoEncoderFactoryH264 @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedBaselineParams]; [codecs addObject:constrainedBaselineInfo]; return [codecs copy]; } -- (id)createEncoder:(RTCVideoCodecInfo *)info { - return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info]; +- (id)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info]; } @end diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH265.h similarity index 66% rename from sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h rename to sdk/objc/components/video_codec/RTCVideoEncoderFactoryH265.h index 5e8d353aa1..aebea740c6 100644 --- a/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH265.h @@ -8,7 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "base/RTCVideoDecoderFactory.h" -#import "base/RTCVideoEncoderFactory.h" -#import "components/video_codec/RTCDefaultVideoDecoderFactory.h" -#import "components/video_codec/RTCDefaultVideoEncoderFactory.h" +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderFactory.h" + +RTC_OBJC_EXPORT +API_AVAILABLE(ios(11.0)) +@interface RTCVideoEncoderFactoryH265 : NSObject +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH265.m b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH265.m new file mode 100644 index 0000000000..1abdfaecdf --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH265.m @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCVideoEncoderFactoryH265.h" + +#import "RTCH265ProfileLevelId.h" +#import "RTCVideoEncoderH265.h" + +@implementation RTCVideoEncoderFactoryH265 + +- (NSArray*)supportedCodecs { + NSMutableArray* codecs = [NSMutableArray array]; + NSString* codecName = kRTCVideoCodecH265Name; + + NSDictionary* mainParams = @{ + @"profile-level-id" : kRTCLevel31Main, + @"level-asymmetry-allowed" : @"1", + @"packetization-mode" : @"1", + }; + RTCVideoCodecInfo* constrainedBaselineInfo = + [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:mainParams]; + [codecs addObject:constrainedBaselineInfo]; + + return [codecs copy]; +} + +- (id)createEncoder:(RTCVideoCodecInfo*)info { + return [[RTCVideoEncoderH265 alloc] initWithCodecInfo:info]; +} + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.h b/sdk/objc/components/video_codec/RTCVideoEncoderH264.h index a9c05580a4..9f4f4c7c8d 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.h +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.h @@ -15,8 +15,8 @@ #import "RTCVideoEncoder.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject -- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo; +- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo; @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 6584c88c61..3ada730e7f 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -19,7 +19,10 @@ #endif #import "RTCCodecSpecificInfoH264.h" #import "RTCH264ProfileLevelId.h" -#import "api/peerconnection/RTCRtpFragmentationHeader+Private.h" +#if !defined(DISABLE_H265) +#import "RTCCodecSpecificInfoH265.h" +#import "RTCH265ProfileLevelId.h" +#endif #import "api/peerconnection/RTCVideoCodecInfo+Private.h" #import "base/RTCCodecSpecificInfo.h" #import "base/RTCI420Buffer.h" @@ -31,8 +34,10 @@ #include "common_video/h264/h264_bitstream_parser.h" #include "common_video/h264/profile_level_id.h" +#if !defined(DISABLE_H265) +#include "common_video/h265/h265_bitstream_parser.h" +#endif #include "common_video/include/bitrate_adjuster.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/buffer.h" #include "rtc_base/logging.h" @@ -40,17 +45,14 @@ #include "sdk/objc/components/video_codec/nalu_rewriter.h" #include "third_party/libyuv/include/libyuv/convert_from.h" -@interface RTCVideoEncoderH264 () +@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) +() -- (void)frameWasEncoded:(OSStatus)status - flags:(VTEncodeInfoFlags)infoFlags - sampleBuffer:(CMSampleBufferRef)sampleBuffer - codecSpecificInfo:(id)codecSpecificInfo - width:(int32_t)width - height:(int32_t)height - renderTimeMs:(int64_t)renderTimeMs - timestamp:(uint32_t)timestamp - rotation:(RTCVideoRotation)rotation; + - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer + : (CMSampleBufferRef)sampleBuffer codecSpecificInfo + : (id)codecSpecificInfo width : (int32_t)width height + : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation + : (RTCVideoRotation)rotation; @end @@ -70,23 +72,35 @@ - (void)frameWasEncoded:(OSStatus)status // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { - RTCFrameEncodeParams(RTCVideoEncoderH264 *e, - RTCCodecSpecificInfoH264 *csi, + RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e, +#if !defined(DISABLE_H265) + id csi, +#else + RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi, +#endif int32_t w, int32_t h, int64_t rtms, uint32_t ts, RTCVideoRotation r) : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) { +#if !defined(DISABLE_H265) + codecSpecificInfo = csi; +#else if (csi) { codecSpecificInfo = csi; } else { - codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init]; + codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init]; } +#endif } - RTCVideoEncoderH264 *encoder; - RTCCodecSpecificInfoH264 *codecSpecificInfo; + RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder; +#if !defined(DISABLE_H265) + id codecSpecificInfo; +#else + RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo; +#endif int32_t width; int32_t height; int64_t render_time_ms; @@ -97,7 +111,8 @@ - (void)frameWasEncoded:(OSStatus)status // We receive I420Frames as input, but we need to feed CVPixelBuffers into the // encoder. This performs the copy and format conversion. // TODO(tkchin): See if encoder will accept i420 frames and compare performance. -bool CopyVideoFrameToNV12PixelBuffer(id frameBuffer, CVPixelBufferRef pixelBuffer) { +bool CopyVideoFrameToNV12PixelBuffer(id frameBuffer, + CVPixelBufferRef pixelBuffer) { RTC_DCHECK(pixelBuffer); RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat); RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height); @@ -313,14 +328,17 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id } } // namespace -@implementation RTCVideoEncoderH264 { - RTCVideoCodecInfo *_codecInfo; +@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { + RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; RTCH264PacketizationMode _packetizationMode; +#if !defined(DISABLE_H265) + RTCH265PacketizationMode _packetizationModeH265; +#endif absl::optional _profile_level_id; RTCVideoEncoderCallback _callback; int32_t _width; @@ -330,6 +348,9 @@ @implementation RTCVideoEncoderH264 { RTCVideoCodecMode _mode; webrtc::H264BitstreamParser _h264BitstreamParser; +#if !defined(DISABLE_H265) + webrtc::H265BitstreamParser _h265BitstreamParser; +#endif std::vector _frameScaleBuffer; } @@ -340,16 +361,23 @@ @implementation RTCVideoEncoderH264 { // drastically reduced bitrate, so we want to avoid that. In steady state // conditions, 0.95 seems to give us better overall bitrate over long periods // of time. -- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo { +- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); _packetizationMode = RTCH264PacketizationModeNonInterleaved; +#if !defined(DISABLE_H265) + _packetizationModeH265 = RTCH265PacketizationModeNonInterleaved; +#endif _profile_level_id = webrtc::H264::ParseSdpProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); RTC_DCHECK(_profile_level_id); RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id)); +#if !defined(DISABLE_H265) + RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name] || [codecInfo.name isEqualToString:kRTCVideoCodecH265Name]); +#else RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); +#endif } return self; } @@ -358,10 +386,14 @@ - (void)dealloc { [self destroyCompressionSession]; } -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings +- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores { RTC_DCHECK(settings); +#if !defined(DISABLE_H265) + RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name] || [settings.name isEqualToString:kRTCVideoCodecH265Name]); +#else RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]); +#endif _width = settings.width; _height = settings.height; @@ -388,8 +420,8 @@ - (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat]; } -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)codecSpecificInfo +- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame + codecSpecificInfo:(nullable id)codecSpecificInfo frameTypes:(NSArray *)frameTypes { RTC_DCHECK_EQ(frame.width, _width); RTC_DCHECK_EQ(frame.height, _height); @@ -404,9 +436,10 @@ - (NSInteger)encode:(RTCVideoFrame *)frame } CVPixelBufferRef pixelBuffer = nullptr; - if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { // Native frame buffer - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; if (![rtcPixelBuffer requiresCropping]) { // This pixel buffer might have a higher resolution than what the // compression session is configured to. The compression session can @@ -469,15 +502,36 @@ - (NSInteger)encode:(RTCVideoFrame *)frame frameProperties = CreateCFTypeDictionary(keys, values, 1); } +#if !defined(DISABLE_H265) + id csi = codecSpecificInfo; + if (csi == nil) { + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + RTCCodecSpecificInfoH264 *csiH264 = [[RTCCodecSpecificInfoH264 alloc] init]; + csiH264.packetizationMode = _packetizationMode; + csi = csiH264; + } else { + RTCCodecSpecificInfoH265 *csiH265 = [[RTCCodecSpecificInfoH265 alloc] init]; + csiH265.packetizationMode = _packetizationModeH265; + csi = csiH265; + } + } +#endif + std::unique_ptr encodeParams; encodeParams.reset(new RTCFrameEncodeParams(self, +#if !defined(DISABLE_H265) + csi, +#else codecSpecificInfo, +#endif _width, _height, frame.timeStampNs / rtc::kNumNanosecsPerMillisec, frame.timeStamp, frame.rotation)); +#if defined(DISABLE_H265) encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; +#endif // Update the bitrate if needed. [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate]; @@ -543,17 +597,18 @@ - (NSInteger)releaseEncoder { return WEBRTC_VIDEO_CODEC_OK; } -- (OSType)pixelFormatOfFrame:(RTCVideoFrame *)frame { +- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { // Use NV12 for non-native frames. - if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer); } return kNV12PixelFormat; } -- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTCVideoFrame *)frame { +- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { BOOL resetCompressionSession = NO; // If we're capturing native frames in another pixel format than the compression session is @@ -631,7 +686,11 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { VTCompressionSessionCreate(nullptr, // use default allocator _width, _height, + #if !defined(DISABLE_H265) + [_codecInfo.name isEqualToString:kRTCVideoCodecH264Name] ? kCMVideoCodecType_H264 : kCMVideoCodecType_HEVC, + #else kCMVideoCodecType_H264, + #endif encoder_specs, // use hardware accelerated encoder if available sourceAttributes, nullptr, // use default compressed data allocator @@ -674,9 +733,17 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { - (void)configureCompressionSession { RTC_DCHECK(_compressionSession); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); +#if !defined(DISABLE_H265) + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + SetVTSessionProperty(_compressionSession, + kVTCompressionPropertyKey_ProfileLevel, + ExtractProfile(*_profile_level_id)); + } +#else SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, ExtractProfile(*_profile_level_id)); +#endif SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate]; // TODO(tkchin): Look at entropy mode and colorspace matrices. @@ -755,7 +822,7 @@ - (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate - (void)frameWasEncoded:(OSStatus)status flags:(VTEncodeInfoFlags)infoFlags sampleBuffer:(CMSampleBufferRef)sampleBuffer - codecSpecificInfo:(id)codecSpecificInfo + codecSpecificInfo:(id)codecSpecificInfo width:(int32_t)width height:(int32_t)height renderTimeMs:(int64_t)renderTimeMs @@ -782,25 +849,32 @@ - (void)frameWasEncoded:(OSStatus)status RTC_LOG(LS_INFO) << "Generated keyframe"; } - // Convert the sample buffer into a buffer suitable for RTP packetization. - // TODO(tkchin): Allocate buffers through a pool. - std::unique_ptr buffer(new rtc::Buffer()); - RTCRtpFragmentationHeader *header; - { - std::unique_ptr header_cpp; - bool result = - H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp); - header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()]; - if (!result) { + __block std::unique_ptr buffer = std::make_unique(); +#if !defined(DISABLE_H265) + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) { return; } + } else if (@available(iOS 11, *)) { + if (!webrtc::H265CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) { + return; + } + } +#else + if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) { + return; } +#endif - RTCEncodedImage *frame = [[RTCEncodedImage alloc] init]; - frame.buffer = [NSData dataWithBytesNoCopy:buffer->data() length:buffer->size() freeWhenDone:NO]; + RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init]; + // This assumes ownership of `buffer` and is responsible for freeing it when done. + frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data() + length:buffer->size() + deallocator:^(void *bytes, NSUInteger size) { + buffer.reset(); + }]; frame.encodedWidth = width; frame.encodedHeight = height; - frame.completeFrame = YES; frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta; frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; @@ -810,10 +884,22 @@ - (void)frameWasEncoded:(OSStatus)status frame.flags = webrtc::VideoSendTiming::kInvalid; int qp; +#if !defined(DISABLE_H265) + if ([_codecInfo.name isEqualToString:kRTCVideoCodecH264Name]) { + _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size()); + _h264BitstreamParser.GetLastSliceQp(&qp); + } else { + _h265BitstreamParser.ParseBitstream(buffer->data(), buffer->size()); + _h265BitstreamParser.GetLastSliceQp(&qp); + } +#else _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size()); _h264BitstreamParser.GetLastSliceQp(&qp); +#endif frame.qp = @(qp); + RTC_OBJC_TYPE(RTCRtpFragmentationHeader) *header = + [[RTC_OBJC_TYPE(RTCRtpFragmentationHeader) alloc] init]; BOOL res = _callback(frame, codecSpecificInfo, header); if (!res) { RTC_LOG(LS_ERROR) << "Encode callback failed"; @@ -822,9 +908,10 @@ - (void)frameWasEncoded:(OSStatus)status _bitrateAdjuster->Update(frame.buffer.length); } -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings { - return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold - high:kHighH264QpThreshold]; +- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { + return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc] + initWithThresholdsLow:kLowH264QpThreshold + high:kHighH264QpThreshold]; } @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH265.h b/sdk/objc/components/video_codec/RTCVideoEncoderH265.h new file mode 100644 index 0000000000..41c964fcce --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH265.h @@ -0,0 +1,21 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCVideoEncoderH264.h" + +RTC_OBJC_EXPORT +API_AVAILABLE(ios(11.0)) +@interface RTC_OBJC_TYPE (RTCVideoEncoderH265) : RTC_OBJC_TYPE(RTCVideoEncoderH264) + ++ (bool)supported; + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH265.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH265.mm new file mode 100644 index 0000000000..07c8ab8546 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH265.mm @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + * + */ + +#import "RTCVideoEncoderH265.h" +#import "RTCH265ProfileLevelId.h" +#include "modules/video_coding/include/video_error_codes.h" + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderH265) + ++ (bool)supported { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH265Name]; + RTC_OBJC_TYPE(RTCVideoEncoderH265) *encoder = [[RTC_OBJC_TYPE(RTCVideoEncoderH265) alloc] initWithCodecInfo:info]; + RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings = [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] init]; + settings.name = kRTCVideoCodecH265Name; + settings.width = 1280; + settings.height = 720; + settings.startBitrate = 800; + settings.mode = RTCVideoCodecModeRealtimeVideo; + bool supported = [encoder startEncodeWithSettings:settings numberOfCores:1] == WEBRTC_VIDEO_CODEC_OK; + [encoder releaseEncoder]; + encoder = nil; + return supported; +} + +@end diff --git a/sdk/objc/components/video_codec/nalu_rewriter.cc b/sdk/objc/components/video_codec/nalu_rewriter.cc index dc258d6064..b65f6534a8 100644 --- a/sdk/objc/components/video_codec/nalu_rewriter.cc +++ b/sdk/objc/components/video_codec/nalu_rewriter.cc @@ -29,14 +29,10 @@ using H264::ParseNaluType; const char kAnnexBHeaderBytes[4] = {0, 0, 0, 1}; const size_t kAvccHeaderByteSize = sizeof(uint32_t); -bool H264CMSampleBufferToAnnexBBuffer( - CMSampleBufferRef avcc_sample_buffer, - bool is_keyframe, - rtc::Buffer* annexb_buffer, - std::unique_ptr* out_header) { +bool H264CMSampleBufferToAnnexBBuffer(CMSampleBufferRef avcc_sample_buffer, + bool is_keyframe, + rtc::Buffer* annexb_buffer) { RTC_DCHECK(avcc_sample_buffer); - RTC_DCHECK(out_header); - out_header->reset(nullptr); // Get format description from the sample buffer. CMVideoFormatDescriptionRef description = @@ -61,10 +57,6 @@ bool H264CMSampleBufferToAnnexBBuffer( // Truncate any previous data in the buffer without changing its capacity. annexb_buffer->SetSize(0); - size_t nalu_offset = 0; - std::vector frag_offsets; - std::vector frag_lengths; - // Place all parameter sets at the front of buffer. if (is_keyframe) { size_t param_set_size = 0; @@ -80,10 +72,6 @@ bool H264CMSampleBufferToAnnexBBuffer( annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes)); annexb_buffer->AppendData(reinterpret_cast(param_set), param_set_size); - // Update fragmentation. - frag_offsets.push_back(nalu_offset + sizeof(kAnnexBHeaderBytes)); - frag_lengths.push_back(param_set_size); - nalu_offset += sizeof(kAnnexBHeaderBytes) + param_set_size; } } @@ -132,10 +120,6 @@ bool H264CMSampleBufferToAnnexBBuffer( // Update buffer. annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes)); annexb_buffer->AppendData(data_ptr + nalu_header_size, packet_size); - // Update fragmentation. - frag_offsets.push_back(nalu_offset + sizeof(kAnnexBHeaderBytes)); - frag_lengths.push_back(packet_size); - nalu_offset += sizeof(kAnnexBHeaderBytes) + packet_size; size_t bytes_written = packet_size + sizeof(kAnnexBHeaderBytes); bytes_remaining -= bytes_written; @@ -143,14 +127,6 @@ bool H264CMSampleBufferToAnnexBBuffer( } RTC_DCHECK_EQ(bytes_remaining, (size_t)0); - std::unique_ptr header(new RTPFragmentationHeader()); - header->VerifyAndAllocateFragmentationHeader(frag_offsets.size()); - RTC_DCHECK_EQ(frag_lengths.size(), frag_offsets.size()); - for (size_t i = 0; i < frag_offsets.size(); ++i) { - header->fragmentationOffset[i] = frag_offsets[i]; - header->fragmentationLength[i] = frag_lengths[i]; - } - *out_header = std::move(header); CFRelease(contiguous_buffer); return true; } @@ -248,6 +224,218 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer, return true; } +#ifndef DISABLE_H265 +bool H265CMSampleBufferToAnnexBBuffer( + CMSampleBufferRef hvcc_sample_buffer, + bool is_keyframe, + rtc::Buffer* annexb_buffer) { + RTC_DCHECK(hvcc_sample_buffer); + + // Get format description from the sample buffer. + CMVideoFormatDescriptionRef description = + CMSampleBufferGetFormatDescription(hvcc_sample_buffer); + if (description == nullptr) { + RTC_LOG(LS_ERROR) << "Failed to get sample buffer's description."; + return false; + } + + // Get parameter set information. + int nalu_header_size = 0; + size_t param_set_count = 0; + OSStatus status = CMVideoFormatDescriptionGetHEVCParameterSetAtIndex( + description, 0, nullptr, nullptr, ¶m_set_count, &nalu_header_size); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to get parameter set."; + return false; + } + RTC_CHECK_EQ(nalu_header_size, kAvccHeaderByteSize); + RTC_DCHECK_EQ(param_set_count, 3); + + // Truncate any previous data in the buffer without changing its capacity. + annexb_buffer->SetSize(0); + + size_t nalu_offset = 0; + std::vector frag_offsets; + std::vector frag_lengths; + + // Place all parameter sets at the front of buffer. + if (is_keyframe) { + size_t param_set_size = 0; + const uint8_t* param_set = nullptr; + for (size_t i = 0; i < param_set_count; ++i) { + status = CMVideoFormatDescriptionGetHEVCParameterSetAtIndex( + description, i, ¶m_set, ¶m_set_size, nullptr, nullptr); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to get parameter set."; + return false; + } + // Update buffer. + annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes)); + annexb_buffer->AppendData(reinterpret_cast(param_set), + param_set_size); + // Update fragmentation. + frag_offsets.push_back(nalu_offset + sizeof(kAnnexBHeaderBytes)); + frag_lengths.push_back(param_set_size); + nalu_offset += sizeof(kAnnexBHeaderBytes) + param_set_size; + } + } + + // Get block buffer from the sample buffer. + CMBlockBufferRef block_buffer = + CMSampleBufferGetDataBuffer(hvcc_sample_buffer); + if (block_buffer == nullptr) { + RTC_LOG(LS_ERROR) << "Failed to get sample buffer's block buffer."; + return false; + } + CMBlockBufferRef contiguous_buffer = nullptr; + // Make sure block buffer is contiguous. + if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) { + status = CMBlockBufferCreateContiguous( + nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: " + << status; + return false; + } + } else { + contiguous_buffer = block_buffer; + // Retain to make cleanup easier. + CFRetain(contiguous_buffer); + block_buffer = nullptr; + } + + // Now copy the actual data. + char* data_ptr = nullptr; + size_t block_buffer_size = CMBlockBufferGetDataLength(contiguous_buffer); + status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr, nullptr, + &data_ptr); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to get block buffer data."; + CFRelease(contiguous_buffer); + return false; + } + size_t bytes_remaining = block_buffer_size; + while (bytes_remaining > 0) { + // The size type here must match |nalu_header_size|, we expect 4 bytes. + // Read the length of the next packet of data. Must convert from big endian + // to host endian. + RTC_DCHECK_GE(bytes_remaining, (size_t)nalu_header_size); + uint32_t* uint32_data_ptr = reinterpret_cast(data_ptr); + uint32_t packet_size = CFSwapInt32BigToHost(*uint32_data_ptr); + // Update buffer. + annexb_buffer->AppendData(kAnnexBHeaderBytes, sizeof(kAnnexBHeaderBytes)); + annexb_buffer->AppendData(data_ptr + nalu_header_size, packet_size); + // Update fragmentation. + frag_offsets.push_back(nalu_offset + sizeof(kAnnexBHeaderBytes)); + frag_lengths.push_back(packet_size); + nalu_offset += sizeof(kAnnexBHeaderBytes) + packet_size; + + size_t bytes_written = packet_size + sizeof(kAnnexBHeaderBytes); + bytes_remaining -= bytes_written; + data_ptr += bytes_written; + } + RTC_DCHECK_EQ(bytes_remaining, (size_t)0); + + CFRelease(contiguous_buffer); + return true; +} + +bool H265AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer, + size_t annexb_buffer_size, + CMVideoFormatDescriptionRef video_format, + CMSampleBufferRef* out_sample_buffer) { + RTC_DCHECK(annexb_buffer); + RTC_DCHECK(out_sample_buffer); + RTC_DCHECK(video_format); + *out_sample_buffer = nullptr; + + AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size); + if (reader.SeekToNextNaluOfType(H265::kVps)) { + // Buffer contains an SPS NALU - skip it and the following PPS + const uint8_t* data; + size_t data_len; + if (!reader.ReadNalu(&data, &data_len)) { + RTC_LOG(LS_ERROR) << "Failed to read VPS"; + return false; + } + if (!reader.ReadNalu(&data, &data_len)) { + RTC_LOG(LS_ERROR) << "Failed to read SPS"; + return false; + } + if (!reader.ReadNalu(&data, &data_len)) { + RTC_LOG(LS_ERROR) << "Failed to read PPS"; + return false; + } + } else { + // No SPS NALU - start reading from the first NALU in the buffer + reader.SeekToStart(); + } + + // Allocate memory as a block buffer. + // TODO(tkchin): figure out how to use a pool. + CMBlockBufferRef block_buffer = nullptr; + OSStatus status = CMBlockBufferCreateWithMemoryBlock( + nullptr, nullptr, reader.BytesRemaining(), nullptr, nullptr, 0, + reader.BytesRemaining(), kCMBlockBufferAssureMemoryNowFlag, + &block_buffer); + if (status != kCMBlockBufferNoErr) { + RTC_LOG(LS_ERROR) << "Failed to create block buffer."; + return false; + } + + // Make sure block buffer is contiguous. + CMBlockBufferRef contiguous_buffer = nullptr; + if (!CMBlockBufferIsRangeContiguous(block_buffer, 0, 0)) { + status = CMBlockBufferCreateContiguous( + nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: " + << status; + CFRelease(block_buffer); + return false; + } + } else { + contiguous_buffer = block_buffer; + block_buffer = nullptr; + } + + // Get a raw pointer into allocated memory. + size_t block_buffer_size = 0; + char* data_ptr = nullptr; + status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr, + &block_buffer_size, &data_ptr); + if (status != kCMBlockBufferNoErr) { + RTC_LOG(LS_ERROR) << "Failed to get block buffer data pointer."; + CFRelease(contiguous_buffer); + return false; + } + RTC_DCHECK(block_buffer_size == reader.BytesRemaining()); + + // Write Avcc NALUs into block buffer memory. + AvccBufferWriter writer(reinterpret_cast(data_ptr), + block_buffer_size); + while (reader.BytesRemaining() > 0) { + const uint8_t* nalu_data_ptr = nullptr; + size_t nalu_data_size = 0; + if (reader.ReadNalu(&nalu_data_ptr, &nalu_data_size)) { + writer.WriteNalu(nalu_data_ptr, nalu_data_size); + } + } + + // Create sample buffer. + status = CMSampleBufferCreate(nullptr, contiguous_buffer, true, nullptr, + nullptr, video_format, 1, 0, nullptr, 0, + nullptr, out_sample_buffer); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to create sample buffer."; + CFRelease(contiguous_buffer); + return false; + } + CFRelease(contiguous_buffer); + return true; +} +#endif + CMVideoFormatDescriptionRef CreateVideoFormatDescription( const uint8_t* annexb_buffer, size_t annexb_buffer_size) { @@ -278,6 +466,43 @@ CMVideoFormatDescriptionRef CreateVideoFormatDescription( return description; } +#ifndef DISABLE_H265 +CMVideoFormatDescriptionRef CreateH265VideoFormatDescription( + const uint8_t* annexb_buffer, + size_t annexb_buffer_size) { + const uint8_t* param_set_ptrs[3] = {}; + size_t param_set_sizes[3] = {}; + AnnexBBufferReader reader(annexb_buffer, annexb_buffer_size); + // Skip everyting before the VPS, then read the VPS, SPS and PPS + if (!reader.SeekToNextNaluOfType(H265::kVps)) { + return nullptr; + } + if (!reader.ReadNalu(¶m_set_ptrs[0], ¶m_set_sizes[0])) { + RTC_LOG(LS_ERROR) << "Failed to read VPS"; + return nullptr; + } + if (!reader.ReadNalu(¶m_set_ptrs[1], ¶m_set_sizes[1])) { + RTC_LOG(LS_ERROR) << "Failed to read SPS"; + return nullptr; + } + if (!reader.ReadNalu(¶m_set_ptrs[2], ¶m_set_sizes[2])) { + RTC_LOG(LS_ERROR) << "Failed to read PPS"; + return nullptr; + } + + // Parse the SPS and PPS into a CMVideoFormatDescription. + CMVideoFormatDescriptionRef description = nullptr; + OSStatus status = CMVideoFormatDescriptionCreateFromHEVCParameterSets( + kCFAllocatorDefault, 3, param_set_ptrs, param_set_sizes, 4, nullptr, + &description); + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to create video format description."; + return nullptr; + } + return description; +} +#endif + AnnexBBufferReader::AnnexBBufferReader(const uint8_t* annexb_buffer, size_t length) : start_(annexb_buffer), length_(length) { @@ -324,6 +549,19 @@ bool AnnexBBufferReader::SeekToNextNaluOfType(NaluType type) { } return false; } + +#ifndef DISABLE_H265 +bool AnnexBBufferReader::SeekToNextNaluOfType(H265::NaluType type) { + for (; offset_ != offsets_.end(); ++offset_) { + if (offset_->payload_size < 1) + continue; + if (H265::ParseNaluType(*(start_ + offset_->payload_start_offset)) == type) + return true; + } + return false; +} +#endif + AvccBufferWriter::AvccBufferWriter(uint8_t* const avcc_buffer, size_t length) : start_(avcc_buffer), offset_(0), length_(length) { RTC_DCHECK(avcc_buffer); diff --git a/sdk/objc/components/video_codec/nalu_rewriter.h b/sdk/objc/components/video_codec/nalu_rewriter.h index a0c1aa90af..490f8a22ab 100644 --- a/sdk/objc/components/video_codec/nalu_rewriter.h +++ b/sdk/objc/components/video_codec/nalu_rewriter.h @@ -18,6 +18,9 @@ #include #include "common_video/h264/h264_common.h" +#ifndef DISABLE_H265 +#include "common_video/h265/h265_common.h" +#endif #include "modules/include/module_common_types.h" #include "rtc_base/buffer.h" @@ -27,13 +30,10 @@ namespace webrtc { // Converts a sample buffer emitted from the VideoToolbox encoder into a buffer // suitable for RTP. The sample buffer is in avcc format whereas the rtp buffer -// needs to be in Annex B format. Data is written directly to |annexb_buffer| -// and a new RTPFragmentationHeader is returned in |out_header|. -bool H264CMSampleBufferToAnnexBBuffer( - CMSampleBufferRef avcc_sample_buffer, - bool is_keyframe, - rtc::Buffer* annexb_buffer, - std::unique_ptr* out_header); +// needs to be in Annex B format. Data is written directly to |annexb_buffer|. +bool H264CMSampleBufferToAnnexBBuffer(CMSampleBufferRef avcc_sample_buffer, + bool is_keyframe, + rtc::Buffer* annexb_buffer); // Converts a buffer received from RTP into a sample buffer suitable for the // VideoToolbox decoder. The RTP buffer is in annex b format whereas the sample @@ -47,6 +47,30 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer, CMSampleBufferRef* out_sample_buffer, CMMemoryPoolRef memory_pool); +#ifndef DISABLE_H265 +// Converts a sample buffer emitted from the VideoToolbox encoder into a buffer +// suitable for RTP. The sample buffer is in hvcc format whereas the rtp buffer +// needs to be in Annex B format. Data is written directly to |annexb_buffer| +// and a new RTPFragmentationHeader is returned in |out_header|. +bool H265CMSampleBufferToAnnexBBuffer( + CMSampleBufferRef hvcc_sample_buffer, + bool is_keyframe, + rtc::Buffer* annexb_buffer) + __OSX_AVAILABLE_STARTING(__MAC_10_12, __IPHONE_11_0); + + // Converts a buffer received from RTP into a sample buffer suitable for the +// VideoToolbox decoder. The RTP buffer is in annex b format whereas the sample +// buffer is in hvcc format. +// If |is_keyframe| is true then |video_format| is ignored since the format will +// be read from the buffer. Otherwise |video_format| must be provided. +// Caller is responsible for releasing the created sample buffer. +bool H265AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer, + size_t annexb_buffer_size, + CMVideoFormatDescriptionRef video_format, + CMSampleBufferRef* out_sample_buffer) + __OSX_AVAILABLE_STARTING(__MAC_10_12, __IPHONE_11_0); +#endif + // Returns a video format description created from the sps/pps information in // the Annex B buffer. If there is no such information, nullptr is returned. // The caller is responsible for releasing the description. @@ -54,6 +78,13 @@ CMVideoFormatDescriptionRef CreateVideoFormatDescription( const uint8_t* annexb_buffer, size_t annexb_buffer_size); +#ifndef DISABLE_H265 +CMVideoFormatDescriptionRef CreateH265VideoFormatDescription( + const uint8_t* annexb_buffer, + size_t annexb_buffer_size) + __OSX_AVAILABLE_STARTING(__MAC_10_12, __IPHONE_11_0); +#endif + // Helper class for reading NALUs from an RTP Annex B buffer. class AnnexBBufferReader final { public: @@ -78,6 +109,9 @@ class AnnexBBufferReader final { // Return true if a NALU of the desired type is found, false if we // reached the end instead bool SeekToNextNaluOfType(H264::NaluType type); +#ifndef DISABLE_H265 + bool SeekToNextNaluOfType(H265::NaluType type); +#endif private: // Returns the the next offset that contains NALU data. diff --git a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h index 432a382574..17eebd0aff 100644 --- a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h +++ b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN /** RTCVideoFrameBuffer containing a CVPixelBufferRef */ RTC_OBJC_EXPORT -@interface RTCCVPixelBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCCVPixelBuffer) : NSObject @property(nonatomic, readonly) CVPixelBufferRef pixelBuffer; @property(nonatomic, readonly) int cropX; diff --git a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm index 01b6405dc4..df8077b35f 100644 --- a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm +++ b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm @@ -22,7 +22,7 @@ #import #endif -@implementation RTCCVPixelBuffer { +@implementation RTC_OBJC_TYPE (RTCCVPixelBuffer) { int _width; int _height; int _bufferWidth; @@ -152,13 +152,13 @@ - (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer return YES; } -- (id)toI420 { +- (id)toI420 { const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer); CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly); - RTCMutableI420Buffer* i420Buffer = - [[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]]; + RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer = + [[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]]; switch (pixelFormat) { case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: diff --git a/sdk/objc/helpers/RTCCameraPreviewView.h b/sdk/objc/helpers/RTCCameraPreviewView.h index 17f8d33d77..db9b15a45c 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.h +++ b/sdk/objc/helpers/RTCCameraPreviewView.h @@ -19,7 +19,7 @@ * AVCaptureSession. */ RTC_OBJC_EXPORT -@interface RTCCameraPreviewView : UIView +@interface RTC_OBJC_TYPE (RTCCameraPreviewView) : UIView /** The capture session being rendered in the view. Capture session * is assigned to AVCaptureVideoPreviewLayer async in the same diff --git a/sdk/objc/helpers/RTCCameraPreviewView.m b/sdk/objc/helpers/RTCCameraPreviewView.m index 57dadea5c7..12e87d8d64 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.m +++ b/sdk/objc/helpers/RTCCameraPreviewView.m @@ -15,7 +15,7 @@ #import "RTCDispatcher+Private.h" -@implementation RTCCameraPreviewView +@implementation RTC_OBJC_TYPE (RTCCameraPreviewView) @synthesize captureSession = _captureSession; @@ -48,15 +48,15 @@ - (void)setCaptureSession:(AVCaptureSession *)captureSession { return; } _captureSession = captureSession; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain block:^{ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer]; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ previewLayer.session = captureSession; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain block:^{ [self setCorrectVideoOrientation]; diff --git a/sdk/objc/helpers/RTCDispatcher+Private.h b/sdk/objc/helpers/RTCDispatcher+Private.h index aa741f4db9..195c651790 100644 --- a/sdk/objc/helpers/RTCDispatcher+Private.h +++ b/sdk/objc/helpers/RTCDispatcher+Private.h @@ -10,8 +10,9 @@ #import "RTCDispatcher.h" -@interface RTCDispatcher () +@interface RTC_OBJC_TYPE (RTCDispatcher) +() -+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType; + + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.h b/sdk/objc/helpers/RTCDispatcher.h index 4f8359b32c..e148af6dea 100644 --- a/sdk/objc/helpers/RTCDispatcher.h +++ b/sdk/objc/helpers/RTCDispatcher.h @@ -20,13 +20,15 @@ typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) { RTCDispatcherTypeCaptureSession, // Used for operations on AVAudioSession. RTCDispatcherTypeAudioSession, + // Used for operations on NWPathMonitor. + RTCDispatcherTypeNetworkMonitor, }; /** Dispatcher that asynchronously dispatches blocks to a specific * shared dispatch queue. */ RTC_OBJC_EXPORT -@interface RTCDispatcher : NSObject +@interface RTC_OBJC_TYPE (RTCDispatcher) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/helpers/RTCDispatcher.m b/sdk/objc/helpers/RTCDispatcher.m index b9d64a4c54..4df19bc297 100644 --- a/sdk/objc/helpers/RTCDispatcher.m +++ b/sdk/objc/helpers/RTCDispatcher.m @@ -12,8 +12,9 @@ static dispatch_queue_t kAudioSessionQueue = nil; static dispatch_queue_t kCaptureSessionQueue = nil; +static dispatch_queue_t kNetworkMonitorQueue = nil; -@implementation RTCDispatcher +@implementation RTC_OBJC_TYPE (RTCDispatcher) + (void)initialize { static dispatch_once_t onceToken; @@ -24,6 +25,8 @@ + (void)initialize { kCaptureSessionQueue = dispatch_queue_create( "org.webrtc.RTCDispatcherCaptureSession", DISPATCH_QUEUE_SERIAL); + kNetworkMonitorQueue = + dispatch_queue_create("org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL); }); } @@ -54,6 +57,8 @@ + (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType { return kCaptureSessionQueue; case RTCDispatcherTypeAudioSession: return kAudioSessionQueue; + case RTCDispatcherTypeNetworkMonitor: + return kNetworkMonitorQueue; } } diff --git a/sdk/objc/native/api/network_monitor_factory.h b/sdk/objc/native/api/network_monitor_factory.h new file mode 100644 index 0000000000..903c66893d --- /dev/null +++ b/sdk/objc/native/api/network_monitor_factory.h @@ -0,0 +1,24 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_ +#define SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_ + +#include + +#include "rtc_base/network_monitor_factory.h" + +namespace webrtc { + +std::unique_ptr CreateNetworkMonitorFactory(); + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_API_NETWORK_MONITOR_FACTORY_H_ diff --git a/sdk/objc/native/api/network_monitor_factory.mm b/sdk/objc/native/api/network_monitor_factory.mm new file mode 100644 index 0000000000..de762a9a0f --- /dev/null +++ b/sdk/objc/native/api/network_monitor_factory.mm @@ -0,0 +1,30 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "network_monitor_factory.h" + +#if defined(WEBRTC_IOS) +#include "sdk/objc/native/src/objc_network_monitor.h" +#endif + +#include "rtc_base/logging.h" + +namespace webrtc { + +std::unique_ptr CreateNetworkMonitorFactory() { + RTC_LOG(LS_INFO) << __FUNCTION__; +#if defined(WEBRTC_IOS) + return std::make_unique(); +#else + return nullptr; +#endif +} + +} diff --git a/sdk/objc/native/api/video_capturer.h b/sdk/objc/native/api/video_capturer.h index 464d148bd1..9847d8148b 100644 --- a/sdk/objc/native/api/video_capturer.h +++ b/sdk/objc/native/api/video_capturer.h @@ -20,7 +20,7 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoCapturer( - RTCVideoCapturer* objc_video_capturer, + RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread* signaling_thread, rtc::Thread* worker_thread); diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index 26185509a7..6dd0edbcd9 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -17,7 +17,7 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoCapturer( - RTCVideoCapturer *objc_video_capturer, + RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; diff --git a/sdk/objc/native/api/video_decoder_factory.h b/sdk/objc/native/api/video_decoder_factory.h index 710bb6eba5..03d8af3cfe 100644 --- a/sdk/objc/native/api/video_decoder_factory.h +++ b/sdk/objc/native/api/video_decoder_factory.h @@ -20,7 +20,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoDecoderFactory( - id objc_video_decoder_factory); + id objc_video_decoder_factory); } // namespace webrtc diff --git a/sdk/objc/native/api/video_decoder_factory.mm b/sdk/objc/native/api/video_decoder_factory.mm index 8d0e4ab4cb..d418f2fe6f 100644 --- a/sdk/objc/native/api/video_decoder_factory.mm +++ b/sdk/objc/native/api/video_decoder_factory.mm @@ -17,7 +17,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoDecoderFactory( - id objc_video_decoder_factory) { + id objc_video_decoder_factory) { return std::make_unique(objc_video_decoder_factory); } diff --git a/sdk/objc/native/api/video_encoder_factory.h b/sdk/objc/native/api/video_encoder_factory.h index 8dab48c48d..6e551b288d 100644 --- a/sdk/objc/native/api/video_encoder_factory.h +++ b/sdk/objc/native/api/video_encoder_factory.h @@ -20,7 +20,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoEncoderFactory( - id objc_video_encoder_factory); + id objc_video_encoder_factory); } // namespace webrtc diff --git a/sdk/objc/native/api/video_encoder_factory.mm b/sdk/objc/native/api/video_encoder_factory.mm index b582deb108..6fa5563f75 100644 --- a/sdk/objc/native/api/video_encoder_factory.mm +++ b/sdk/objc/native/api/video_encoder_factory.mm @@ -17,7 +17,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoEncoderFactory( - id objc_video_encoder_factory) { + id objc_video_encoder_factory) { return std::make_unique(objc_video_encoder_factory); } diff --git a/sdk/objc/native/api/video_frame.h b/sdk/objc/native/api/video_frame.h index f8dd568b87..b4416ffabe 100644 --- a/sdk/objc/native/api/video_frame.h +++ b/sdk/objc/native/api/video_frame.h @@ -17,7 +17,7 @@ namespace webrtc { -RTCVideoFrame* NativeToObjCVideoFrame(const VideoFrame& frame); +RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame); } // namespace webrtc diff --git a/sdk/objc/native/api/video_frame.mm b/sdk/objc/native/api/video_frame.mm index 02dd830dce..b82994fd5f 100644 --- a/sdk/objc/native/api/video_frame.mm +++ b/sdk/objc/native/api/video_frame.mm @@ -14,7 +14,7 @@ namespace webrtc { -RTCVideoFrame* NativeToObjCVideoFrame(const VideoFrame& frame) { +RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame) { return ToObjCVideoFrame(frame); } diff --git a/sdk/objc/native/api/video_frame_buffer.h b/sdk/objc/native/api/video_frame_buffer.h index 54a73750f7..204d65d850 100644 --- a/sdk/objc/native/api/video_frame_buffer.h +++ b/sdk/objc/native/api/video_frame_buffer.h @@ -19,9 +19,9 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoFrameBuffer( - id objc_video_frame_buffer); + id objc_video_frame_buffer); -id NativeToObjCVideoFrameBuffer( +id NativeToObjCVideoFrameBuffer( const rtc::scoped_refptr& buffer); } // namespace webrtc diff --git a/sdk/objc/native/api/video_frame_buffer.mm b/sdk/objc/native/api/video_frame_buffer.mm index 2abda42871..6dc99756a6 100644 --- a/sdk/objc/native/api/video_frame_buffer.mm +++ b/sdk/objc/native/api/video_frame_buffer.mm @@ -15,11 +15,11 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoFrameBuffer( - id objc_video_frame_buffer) { + id objc_video_frame_buffer) { return new rtc::RefCountedObject(objc_video_frame_buffer); } -id NativeToObjCVideoFrameBuffer( +id NativeToObjCVideoFrameBuffer( const rtc::scoped_refptr &buffer) { return ToObjCVideoFrameBuffer(buffer); } diff --git a/sdk/objc/native/api/video_renderer.h b/sdk/objc/native/api/video_renderer.h index afa65430cb..04796b8049 100644 --- a/sdk/objc/native/api/video_renderer.h +++ b/sdk/objc/native/api/video_renderer.h @@ -21,7 +21,7 @@ namespace webrtc { std::unique_ptr> ObjCToNativeVideoRenderer( - id objc_video_renderer); + id objc_video_renderer); } // namespace webrtc diff --git a/sdk/objc/native/api/video_renderer.mm b/sdk/objc/native/api/video_renderer.mm index 66316856f4..e92d47d1e3 100644 --- a/sdk/objc/native/api/video_renderer.mm +++ b/sdk/objc/native/api/video_renderer.mm @@ -17,7 +17,7 @@ namespace webrtc { std::unique_ptr> ObjCToNativeVideoRenderer( - id objc_video_renderer) { + id objc_video_renderer) { return std::make_unique(objc_video_renderer); } diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 6bf1d9b8ea..3d953c0331 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -21,7 +21,6 @@ #include "rtc_base/atomic_ops.h" #include "rtc_base/bind.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -125,6 +124,7 @@ static void LogDeviceInfo() { AudioDeviceIOS::~AudioDeviceIOS() { RTC_DCHECK(thread_checker_.IsCurrent()); LOGI() << "~dtor" << ios::GetCurrentThreadDescription(); + thread_->Clear(this); Terminate(); audio_session_observer_ = nil; } @@ -152,7 +152,8 @@ static void LogDeviceInfo() { // here. They have not been set and confirmed yet since configureForWebRTC // is not called until audio is about to start. However, it makes sense to // store the parameters now and then verify at a later stage. - RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* config = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels); record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels); // Ensure that the audio device buffer (ADB) knows about the internal audio @@ -532,12 +533,12 @@ static void LogDeviceInfo() { // Allocate new buffers given the potentially new stream format. SetupAudioBuffersForActiveAudioSession(); } - UpdateAudioUnit([RTCAudioSession sharedInstance].canPlayOrRecord); + UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord); } void AudioDeviceIOS::HandleValidRouteChange() { RTC_DCHECK_RUN_ON(&thread_checker_); - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; RTCLog(@"%@", session); HandleSampleRateChange(session.sampleRate); } @@ -565,7 +566,7 @@ static void LogDeviceInfo() { // The audio unit is already initialized or started. // Check to see if the sample rate or buffer size has changed. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; const double session_sample_rate = session.sampleRate; const NSTimeInterval session_buffer_duration = session.IOBufferDuration; const size_t session_frames_per_buffer = @@ -646,7 +647,7 @@ static void LogDeviceInfo() { int64_t glitch_count = num_detected_playout_glitches_; dispatch_async(dispatch_get_main_queue(), ^{ - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session notifyDidDetectPlayoutGlitch:glitch_count]; }); } @@ -678,7 +679,7 @@ static void LogDeviceInfo() { void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { LOGI() << "SetupAudioBuffersForActiveAudioSession"; // Verify the current values once the audio session has been activated. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; double sample_rate = session.sampleRate; NSTimeInterval io_buffer_duration = session.IOBufferDuration; RTCLog(@"%@", session); @@ -687,7 +688,8 @@ static void LogDeviceInfo() { // hardware sample rate but continue and use the non-ideal sample rate after // reinitializing the audio parameters. Most BT headsets only support 8kHz or // 16kHz. - RTCAudioSessionConfiguration* webRTCConfig = [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* webRTCConfig = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; if (sample_rate != webRTCConfig.sampleRate) { RTC_LOG(LS_WARNING) << "Unable to set the preferred sample rate"; } @@ -797,7 +799,7 @@ static void LogDeviceInfo() { if (should_start_audio_unit) { RTCLog(@"Starting audio unit for UpdateAudioUnit"); // Log session settings before trying to start audio streaming. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; RTCLog(@"%@", session); if (!audio_unit_->Start()) { RTCLogError(@"Failed to start audio unit."); @@ -827,7 +829,7 @@ static void LogDeviceInfo() { RTCLogWarning(@"Audio session already configured."); return false; } - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; bool success = [session configureWebRTCSession:nil]; [session unlockForConfiguration]; @@ -847,7 +849,7 @@ static void LogDeviceInfo() { RTCLogWarning(@"Audio session already unconfigured."); return; } - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; [session unconfigureWebRTCSession:nil]; [session endWebRTCSession:nil]; @@ -865,7 +867,7 @@ static void LogDeviceInfo() { return false; } - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; // Subscribe to audio session events. [session pushDelegate:audio_session_observer_]; is_interrupted_ = session.isInterrupted ? true : false; @@ -915,7 +917,7 @@ static void LogDeviceInfo() { io_thread_checker_.Detach(); // Remove audio session notification observers. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session removeDelegate:audio_session_observer_]; // All I/O should be stopped or paused prior to deactivating the audio diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.h b/sdk/objc/native/src/audio/audio_device_module_ios.h index 625eec284e..fcd3bd7bc1 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.h +++ b/sdk/objc/native/src/audio/audio_device_module_ios.h @@ -19,7 +19,6 @@ #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" namespace webrtc { diff --git a/sdk/objc/native/src/network_monitor_observer.h b/sdk/objc/native/src/network_monitor_observer.h new file mode 100644 index 0000000000..85fd3b992f --- /dev/null +++ b/sdk/objc/native/src/network_monitor_observer.h @@ -0,0 +1,39 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_NETWORK_MONITOR_OBSERVER_H_ +#define SDK_OBJC_NATIVE_SRC_NETWORK_MONITOR_OBSERVER_H_ + +#include +#include + +#include "rtc_base/network_constants.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +// Observer interface for listening to NWPathMonitor updates. +class NetworkMonitorObserver { + public: + // Called when a path update occurs, on network monitor dispatch queue. + // + // |adapter_type_by_name| is a map from interface name (i.e. "pdp_ip0") to + // adapter type, for all available interfaces on the current path. If an + // interface name isn't present it can be assumed to be unavailable. + virtual void OnPathUpdate( + std::map adapter_type_by_name) = 0; + + protected: + virtual ~NetworkMonitorObserver() {} +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_AUDIO_AUDIO_SESSION_OBSERVER_H_ diff --git a/sdk/objc/native/src/objc_frame_buffer.h b/sdk/objc/native/src/objc_frame_buffer.h index f941dad6e4..9c1ff17876 100644 --- a/sdk/objc/native/src/objc_frame_buffer.h +++ b/sdk/objc/native/src/objc_frame_buffer.h @@ -13,15 +13,18 @@ #import +#import "base/RTCMacros.h" + #include "common_video/include/video_frame_buffer.h" -@protocol RTCVideoFrameBuffer; +@protocol RTC_OBJC_TYPE +(RTCVideoFrameBuffer); namespace webrtc { class ObjCFrameBuffer : public VideoFrameBuffer { public: - explicit ObjCFrameBuffer(id); + explicit ObjCFrameBuffer(id); ~ObjCFrameBuffer() override; Type type() const override; @@ -31,15 +34,15 @@ class ObjCFrameBuffer : public VideoFrameBuffer { rtc::scoped_refptr ToI420() override; - id wrapped_frame_buffer() const; + id wrapped_frame_buffer() const; private: - id frame_buffer_; + id frame_buffer_; int width_; int height_; }; -id ToObjCVideoFrameBuffer( +id ToObjCVideoFrameBuffer( const rtc::scoped_refptr& buffer); } // namespace webrtc diff --git a/sdk/objc/native/src/objc_frame_buffer.mm b/sdk/objc/native/src/objc_frame_buffer.mm index 52e434152c..deb38a7a74 100644 --- a/sdk/objc/native/src/objc_frame_buffer.mm +++ b/sdk/objc/native/src/objc_frame_buffer.mm @@ -17,10 +17,10 @@ namespace { -/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTCI420Buffer */ +/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */ class ObjCI420FrameBuffer : public I420BufferInterface { public: - explicit ObjCI420FrameBuffer(id frame_buffer) + explicit ObjCI420FrameBuffer(id frame_buffer) : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {} ~ObjCI420FrameBuffer() override {} @@ -41,14 +41,14 @@ explicit ObjCI420FrameBuffer(id frame_buffer) int StrideV() const override { return frame_buffer_.strideV; } private: - id frame_buffer_; + id frame_buffer_; int width_; int height_; }; } // namespace -ObjCFrameBuffer::ObjCFrameBuffer(id frame_buffer) +ObjCFrameBuffer::ObjCFrameBuffer(id frame_buffer) : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {} ObjCFrameBuffer::~ObjCFrameBuffer() {} @@ -72,15 +72,16 @@ explicit ObjCI420FrameBuffer(id frame_buffer) return buffer; } -id ObjCFrameBuffer::wrapped_frame_buffer() const { +id ObjCFrameBuffer::wrapped_frame_buffer() const { return frame_buffer_; } -id ToObjCVideoFrameBuffer(const rtc::scoped_refptr& buffer) { +id ToObjCVideoFrameBuffer( + const rtc::scoped_refptr& buffer) { if (buffer->type() == VideoFrameBuffer::Type::kNative) { return static_cast(buffer.get())->wrapped_frame_buffer(); } else { - return [[RTCI420Buffer alloc] initWithFrameBuffer:buffer->ToI420()]; + return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()]; } } diff --git a/sdk/objc/native/src/objc_network_monitor.h b/sdk/objc/native/src/objc_network_monitor.h new file mode 100644 index 0000000000..7fcb1c7fd0 --- /dev/null +++ b/sdk/objc/native/src/objc_network_monitor.h @@ -0,0 +1,68 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_ + +#include + +#include "sdk/objc/components/network/RTCNetworkMonitor+Private.h" +#include "sdk/objc/native/src/network_monitor_observer.h" + +#include "rtc_base/async_invoker.h" +#include "rtc_base/network_monitor.h" +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +class ObjCNetworkMonitorFactory : public rtc::NetworkMonitorFactory { + public: + ObjCNetworkMonitorFactory() = default; + ~ObjCNetworkMonitorFactory() override = default; + + rtc::NetworkMonitorInterface* CreateNetworkMonitor() override; +}; + +class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface, + public NetworkMonitorObserver { + public: + ObjCNetworkMonitor() = default; + ~ObjCNetworkMonitor() override; + + void Start() override; + void Stop() override; + + rtc::AdapterType GetAdapterType(const std::string& interface_name) override; + rtc::AdapterType GetVpnUnderlyingAdapterType( + const std::string& interface_name) override; + rtc::NetworkPreference GetNetworkPreference( + const std::string& interface_name) override; + bool IsAdapterAvailable(const std::string& interface_name) override; + + // NetworkMonitorObserver override. + // Fans out updates to observers on the correct thread. + void OnPathUpdate( + std::map adapter_type_by_name) override; + + private: + rtc::Thread* thread_ = nullptr; + bool started_ = false; + std::map adapter_type_by_name_ + RTC_GUARDED_BY(thread_); + rtc::AsyncInvoker invoker_; + RTCNetworkMonitor* network_monitor_ = nil; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_NETWORK_MONITOR_H_ diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm new file mode 100644 index 0000000000..e0e50ed1c8 --- /dev/null +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -0,0 +1,86 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/objc/native/src/objc_network_monitor.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +rtc::NetworkMonitorInterface* ObjCNetworkMonitorFactory::CreateNetworkMonitor() { + return new ObjCNetworkMonitor(); +} + +ObjCNetworkMonitor::~ObjCNetworkMonitor() { + network_monitor_ = nil; +} + +void ObjCNetworkMonitor::Start() { + if (started_) { + return; + } + thread_ = rtc::Thread::Current(); + RTC_DCHECK_RUN_ON(thread_); + network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; + if (network_monitor_ == nil) { + RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; + } + started_ = true; +} + +void ObjCNetworkMonitor::Stop() { + RTC_DCHECK_RUN_ON(thread_); + if (!started_) { + return; + } + network_monitor_ = nil; + started_ = false; +} + +rtc::AdapterType ObjCNetworkMonitor::GetAdapterType(const std::string& interface_name) { + RTC_DCHECK_RUN_ON(thread_); + if (adapter_type_by_name_.find(interface_name) == adapter_type_by_name_.end()) { + return rtc::ADAPTER_TYPE_UNKNOWN; + } + return adapter_type_by_name_.at(interface_name); +} + +rtc::AdapterType ObjCNetworkMonitor::GetVpnUnderlyingAdapterType( + const std::string& interface_name) { + return rtc::ADAPTER_TYPE_UNKNOWN; +} + +rtc::NetworkPreference ObjCNetworkMonitor::GetNetworkPreference(const std::string& interface_name) { + return rtc::NetworkPreference::NEUTRAL; +} + +bool ObjCNetworkMonitor::IsAdapterAvailable(const std::string& interface_name) { + RTC_DCHECK_RUN_ON(thread_); + if (adapter_type_by_name_.empty()) { + // If we have no path update, assume everything's available, because it's + // preferable for WebRTC to try all interfaces rather than none at all. + return true; + } + return adapter_type_by_name_.find(interface_name) != adapter_type_by_name_.end(); +} + +void ObjCNetworkMonitor::OnPathUpdate( + std::map adapter_type_by_name) { + RTC_DCHECK(network_monitor_ != nil); + invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, adapter_type_by_name] { + RTC_DCHECK_RUN_ON(thread_); + adapter_type_by_name_ = adapter_type_by_name; + SignalNetworksChanged(); + }); +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_decoder_factory.h b/sdk/objc/native/src/objc_video_decoder_factory.h index 9911bbfe01..30ad8c2a4b 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.h +++ b/sdk/objc/native/src/objc_video_decoder_factory.h @@ -11,26 +11,29 @@ #ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_ #define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_ +#import "base/RTCMacros.h" + #include "api/video_codecs/video_decoder_factory.h" #include "media/base/codec.h" -@protocol RTCVideoDecoderFactory; +@protocol RTC_OBJC_TYPE +(RTCVideoDecoderFactory); namespace webrtc { class ObjCVideoDecoderFactory : public VideoDecoderFactory { public: - explicit ObjCVideoDecoderFactory(id); + explicit ObjCVideoDecoderFactory(id); ~ObjCVideoDecoderFactory() override; - id wrapped_decoder_factory() const; + id wrapped_decoder_factory() const; std::vector GetSupportedFormats() const override; std::unique_ptr CreateVideoDecoder( const SdpVideoFormat& format) override; private: - id decoder_factory_; + id decoder_factory_; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm index 5aca02d5ab..974651afdf 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.mm +++ b/sdk/objc/native/src/objc_video_decoder_factory.mm @@ -10,6 +10,7 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" +#import "base/RTCMacros.h" #import "base/RTCVideoDecoder.h" #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoFrame.h" @@ -33,7 +34,7 @@ namespace { class ObjCVideoDecoder : public VideoDecoder { public: - ObjCVideoDecoder(id decoder) + ObjCVideoDecoder(id decoder) : decoder_(decoder), implementation_name_([decoder implementationName].stdString) {} int32_t InitDecode(const VideoCodec *codec_settings, int32_t number_of_cores) override { @@ -43,8 +44,8 @@ int32_t InitDecode(const VideoCodec *codec_settings, int32_t number_of_cores) ov int32_t Decode(const EncodedImage &input_image, bool missing_frames, int64_t render_time_ms = -1) override { - RTCEncodedImage *encodedImage = - [[RTCEncodedImage alloc] initWithNativeEncodedImage:input_image]; + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image]; return [decoder_ decode:encodedImage missingFrames:missing_frames @@ -53,7 +54,7 @@ int32_t Decode(const EncodedImage &input_image, } int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override { - [decoder_ setCallback:^(RTCVideoFrame *frame) { + [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame, int32_t qp) { const rtc::scoped_refptr buffer = new rtc::RefCountedObject(frame.buffer); VideoFrame videoFrame = @@ -65,7 +66,8 @@ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override .build(); videoFrame.set_timestamp(frame.timeStamp); - callback->Decoded(videoFrame); + callback->Decoded(videoFrame, absl::nullopt, + qp >= 0 ? absl::optional((uint8_t) qp) : absl::nullopt); }]; return WEBRTC_VIDEO_CODEC_OK; @@ -76,29 +78,30 @@ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override const char *ImplementationName() const override { return implementation_name_.c_str(); } private: - id decoder_; + id decoder_; const std::string implementation_name_; }; } // namespace -ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(id decoder_factory) +ObjCVideoDecoderFactory::ObjCVideoDecoderFactory( + id decoder_factory) : decoder_factory_(decoder_factory) {} ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {} -id ObjCVideoDecoderFactory::wrapped_decoder_factory() const { +id ObjCVideoDecoderFactory::wrapped_decoder_factory() const { return decoder_factory_; } std::unique_ptr ObjCVideoDecoderFactory::CreateVideoDecoder( const SdpVideoFormat &format) { NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()]; - for (RTCVideoCodecInfo *codecInfo in decoder_factory_.supportedCodecs) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) { if ([codecName isEqualToString:codecInfo.name]) { - id decoder = [decoder_factory_ createDecoder:codecInfo]; + id decoder = [decoder_factory_ createDecoder:codecInfo]; - if ([decoder isKindOfClass:[RTCWrappedNativeVideoDecoder class]]) { - return [(RTCWrappedNativeVideoDecoder *)decoder releaseWrappedDecoder]; + if ([decoder isKindOfClass:[RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) class]]) { + return [(RTC_OBJC_TYPE(RTCWrappedNativeVideoDecoder) *)decoder releaseWrappedDecoder]; } else { return std::unique_ptr(new ObjCVideoDecoder(decoder)); } @@ -110,7 +113,7 @@ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override std::vector ObjCVideoDecoderFactory::GetSupportedFormats() const { std::vector supported_formats; - for (RTCVideoCodecInfo *supportedCodec in decoder_factory_.supportedCodecs) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } diff --git a/sdk/objc/native/src/objc_video_encoder_factory.h b/sdk/objc/native/src/objc_video_encoder_factory.h index 6974377380..38db5e6ae7 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.h +++ b/sdk/objc/native/src/objc_video_encoder_factory.h @@ -13,27 +13,30 @@ #import +#import "base/RTCMacros.h" + #include "api/video_codecs/video_encoder_factory.h" -@protocol RTCVideoEncoderFactory; +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderFactory); namespace webrtc { class ObjCVideoEncoderFactory : public VideoEncoderFactory { public: - explicit ObjCVideoEncoderFactory(id); + explicit ObjCVideoEncoderFactory(id); ~ObjCVideoEncoderFactory() override; - id wrapped_encoder_factory() const; + id wrapped_encoder_factory() const; std::vector GetSupportedFormats() const override; std::vector GetImplementations() const override; std::unique_ptr CreateVideoEncoder( const SdpVideoFormat& format) override; - CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override; + std::unique_ptr GetEncoderSelector() const override; private: - id encoder_factory_; + id encoder_factory_; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm index b0c7c2c5f1..7095fe892b 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.mm +++ b/sdk/objc/native/src/objc_video_encoder_factory.mm @@ -12,11 +12,14 @@ #include +#import "base/RTCMacros.h" #import "base/RTCVideoEncoder.h" #import "base/RTCVideoEncoderFactory.h" #import "components/video_codec/RTCCodecSpecificInfoH264+Private.h" +#ifndef DISABLE_H265 +#import "components/video_codec/RTCCodecSpecificInfoH265+Private.h" +#endif #import "sdk/objc/api/peerconnection/RTCEncodedImage+Private.h" -#import "sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h" #import "sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h" #import "sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h" #import "sdk/objc/api/video_codec/RTCVideoCodecConstants.h" @@ -38,32 +41,34 @@ class ObjCVideoEncoder : public VideoEncoder { public: - ObjCVideoEncoder(id encoder) + ObjCVideoEncoder(id encoder) : encoder_(encoder), implementation_name_([encoder implementationName].stdString) {} int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override { - RTCVideoEncoderSettings *settings = - [[RTCVideoEncoderSettings alloc] initWithNativeVideoCodec:codec_settings]; + RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings = + [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings]; return [encoder_ startEncodeWithSettings:settings numberOfCores:encoder_settings.number_of_cores]; } int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override { - [encoder_ setCallback:^BOOL(RTCEncodedImage *_Nonnull frame, - id _Nonnull info, - RTCRtpFragmentationHeader *_Nonnull header) { + [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame, + id _Nonnull info, + RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * _Nonnull header) { EncodedImage encodedImage = [frame nativeEncodedImage]; // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases. CodecSpecificInfo codecSpecificInfo; - if ([info isKindOfClass:[RTCCodecSpecificInfoH264 class]]) { - codecSpecificInfo = [(RTCCodecSpecificInfoH264 *)info nativeCodecSpecificInfo]; + if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) { + codecSpecificInfo = + [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo]; +#ifndef DISABLE_H265 + } else if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH265) class]]) { + codecSpecificInfo = [(RTCCodecSpecificInfoH265 *)info nativeCodecSpecificInfo]; +#endif } - std::unique_ptr fragmentationHeader = - [header createNativeFragmentationHeader]; - EncodedImageCallback::Result res = - callback->OnEncodedImage(encodedImage, &codecSpecificInfo, fragmentationHeader.get()); + EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo); return res.error == EncodedImageCallback::Result::OK; }]; @@ -95,7 +100,7 @@ void SetRates(const RateControlParameters ¶meters) override { info.supports_native_handle = true; info.implementation_name = implementation_name_; - RTCVideoEncoderQpThresholds *qp_thresholds = [encoder_ scalingSettings]; + RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings]; info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) : ScalingSettings::kOff; @@ -105,23 +110,54 @@ void SetRates(const RateControlParameters ¶meters) override { } private: - id encoder_; + id encoder_; const std::string implementation_name_; }; + +class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface { + public: + ObjcVideoEncoderSelector(id selector) { + selector_ = selector; + } + void OnCurrentEncoder(const SdpVideoFormat &format) override { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; + [selector_ registerCurrentEncoderInfo:info]; + } + absl::optional OnEncoderBroken() override { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder]; + if (info) { + return [info nativeSdpVideoFormat]; + } + return absl::nullopt; + } + absl::optional OnAvailableBitrate(const DataRate &rate) override { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps()]; + if (info) { + return [info nativeSdpVideoFormat]; + } + return absl::nullopt; + } + + private: + id selector_; +}; + } // namespace -ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(id encoder_factory) +ObjCVideoEncoderFactory::ObjCVideoEncoderFactory( + id encoder_factory) : encoder_factory_(encoder_factory) {} ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {} -id ObjCVideoEncoderFactory::wrapped_encoder_factory() const { +id ObjCVideoEncoderFactory::wrapped_encoder_factory() const { return encoder_factory_; } std::vector ObjCVideoEncoderFactory::GetSupportedFormats() const { std::vector supported_formats; - for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ supportedCodecs]) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } @@ -130,9 +166,9 @@ void SetRates(const RateControlParameters ¶meters) override { } std::vector ObjCVideoEncoderFactory::GetImplementations() const { - if ([encoder_factory_ respondsToSelector:SEL("implementations")]) { + if ([encoder_factory_ respondsToSelector:@selector(implementations)]) { std::vector supported_formats; - for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ implementations]) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } @@ -141,28 +177,27 @@ void SetRates(const RateControlParameters ¶meters) override { return GetSupportedFormats(); } -VideoEncoderFactory::CodecInfo ObjCVideoEncoderFactory::QueryVideoEncoder( - const SdpVideoFormat &format) const { - // TODO(andersc): This is a hack until we figure out how this should be done properly. - NSString *formatName = [NSString stringForStdString:format.name]; - NSSet *wrappedSoftwareFormats = - [NSSet setWithObjects:kRTCVideoCodecVp8Name, kRTCVideoCodecVp9Name, nil]; - - CodecInfo codec_info; - codec_info.is_hardware_accelerated = ![wrappedSoftwareFormats containsObject:formatName]; - codec_info.has_internal_source = false; - return codec_info; -} - std::unique_ptr ObjCVideoEncoderFactory::CreateVideoEncoder( const SdpVideoFormat &format) { - RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format]; - id encoder = [encoder_factory_ createEncoder:info]; - if ([encoder isKindOfClass:[RTCWrappedNativeVideoEncoder class]]) { - return [(RTCWrappedNativeVideoEncoder *)encoder releaseWrappedEncoder]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; + id encoder = [encoder_factory_ createEncoder:info]; + if ([encoder isKindOfClass:[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) class]]) { + return [(RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) *)encoder releaseWrappedEncoder]; } else { return std::unique_ptr(new ObjCVideoEncoder(encoder)); } } +std::unique_ptr + ObjCVideoEncoderFactory::GetEncoderSelector() const { + if ([encoder_factory_ respondsToSelector:@selector(encoderSelector)]) { + id selector = [encoder_factory_ encoderSelector]; + if (selector) { + return absl::make_unique(selector); + } + } + return nullptr; +} + } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_frame.h b/sdk/objc/native/src/objc_video_frame.h index fd74aca8a1..c2931cb2f8 100644 --- a/sdk/objc/native/src/objc_video_frame.h +++ b/sdk/objc/native/src/objc_video_frame.h @@ -17,7 +17,7 @@ namespace webrtc { -RTCVideoFrame* ToObjCVideoFrame(const VideoFrame& frame); +RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame& frame); } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm index 76f7add6fc..2e8ce6153e 100644 --- a/sdk/objc/native/src/objc_video_frame.mm +++ b/sdk/objc/native/src/objc_video_frame.mm @@ -15,11 +15,11 @@ namespace webrtc { -RTCVideoFrame *ToObjCVideoFrame(const VideoFrame &frame) { - RTCVideoFrame *videoFrame = - [[RTCVideoFrame alloc] initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) - rotation:RTCVideoRotation(frame.rotation()) - timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; +RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) { + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] + initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) + rotation:RTCVideoRotation(frame.rotation()) + timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; videoFrame.timeStamp = frame.timestamp(); return videoFrame; diff --git a/sdk/objc/native/src/objc_video_renderer.h b/sdk/objc/native/src/objc_video_renderer.h index 9396ab6025..f9c35eae96 100644 --- a/sdk/objc/native/src/objc_video_renderer.h +++ b/sdk/objc/native/src/objc_video_renderer.h @@ -14,20 +14,23 @@ #import #import +#import "base/RTCMacros.h" + #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" -@protocol RTCVideoRenderer; +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer); namespace webrtc { class ObjCVideoRenderer : public rtc::VideoSinkInterface { public: - ObjCVideoRenderer(id renderer); + ObjCVideoRenderer(id renderer); void OnFrame(const VideoFrame& nativeVideoFrame) override; private: - id renderer_; + id renderer_; CGSize size_; }; diff --git a/sdk/objc/native/src/objc_video_renderer.mm b/sdk/objc/native/src/objc_video_renderer.mm index 486b7e3b00..4a9b647ec3 100644 --- a/sdk/objc/native/src/objc_video_renderer.mm +++ b/sdk/objc/native/src/objc_video_renderer.mm @@ -10,6 +10,7 @@ #include "sdk/objc/native/src/objc_video_renderer.h" +#import "base/RTCMacros.h" #import "base/RTCVideoFrame.h" #import "base/RTCVideoRenderer.h" @@ -17,11 +18,11 @@ namespace webrtc { -ObjCVideoRenderer::ObjCVideoRenderer(id renderer) +ObjCVideoRenderer::ObjCVideoRenderer(id renderer) : renderer_(renderer), size_(CGSizeZero) {} void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) { - RTCVideoFrame* videoFrame = ToObjCVideoFrame(nativeVideoFrame); + RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = ToObjCVideoFrame(nativeVideoFrame); CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 93e7d15e2f..dad6544315 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -17,9 +17,9 @@ #include "media/base/adapted_video_track_source.h" #include "rtc_base/timestamp_aligner.h" -RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTCObjCVideoSourceAdapter : NSObject @end namespace webrtc { @@ -42,7 +42,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { bool remote() const override; - void OnCapturedFrame(RTCVideoFrame* frame); + void OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame); // Called by RTCVideoSource. void OnOutputFormatRequest(int width, int height, int fps); diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 580180a1a2..85ad087e8b 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -25,7 +25,8 @@ @implementation RTCObjCVideoSourceAdapter @synthesize objCVideoTrackSource = _objCVideoTrackSource; -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { _objCVideoTrackSource->OnCapturedFrame(frame); } @@ -61,7 +62,7 @@ - (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFram video_adapter()->OnOutputFormatRequest(format); } -void ObjCVideoTrackSource::OnCapturedFrame(RTCVideoFrame *frame) { +void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; const int64_t translated_timestamp_us = timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros()); @@ -88,10 +89,11 @@ - (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFram if (adapted_width == frame.width && adapted_height == frame.height) { // No adaption - optimized path. buffer = new rtc::RefCountedObject(frame.buffer); - } else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { // Adapted CVPixelBuffer frame. - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; - buffer = new rtc::RefCountedObject([[RTCCVPixelBuffer alloc] + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + buffer = new rtc::RefCountedObject([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:rtcPixelBuffer.pixelBuffer adaptedWidth:adapted_width adaptedHeight:adapted_height diff --git a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm index a937957c19..ca3d67293f 100644 --- a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm +++ b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm @@ -25,7 +25,7 @@ #include "rtc_base/ref_counted_object.h" #include "sdk/objc/native/api/video_frame.h" -typedef void (^VideoSinkCallback)(RTCVideoFrame *); +typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *); namespace { @@ -63,10 +63,13 @@ - (void)testOnCapturedFrameAdaptsFrame { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; @@ -92,10 +95,13 @@ - (void)testOnCapturedFrameAdaptsFrameWithAlignment { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; @@ -119,11 +125,13 @@ - (void)testOnCapturedFrameAdaptationResultsInCommonResolutions { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = [[RTCVideoFrame alloc] initWithBuffer:buffer - rotation:RTCVideoRotation_0 - timeStampNs:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; @@ -159,16 +167,19 @@ - (void)testOnCapturedFrameWithoutAdaptation { CVPixelBufferCreate( NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(frame.width, outputFrame.width); XCTAssertEqual(frame.height, outputFrame.height); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(buffer.cropX, outputBuffer.cropX); XCTAssertEqual(buffer.cropY, outputBuffer.cropY); XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); @@ -192,16 +203,19 @@ - (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 0); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); @@ -225,16 +239,19 @@ - (void)testOnCapturedFrameCVPixelBufferNeedsCropping { CVPixelBufferCreate( NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 10); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); @@ -259,22 +276,25 @@ - (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation { NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); // Create a frame that's already adapted down. - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:640 - adaptedHeight:360 - cropWidth:720 - cropHeight:1280 - cropX:0 - cropY:0]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:640 + adaptedHeight:360 + cropWidth:720 + cropHeight:1280 + cropX:0 + cropY:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 480); XCTAssertEqual(outputFrame.height, 270); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 0); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(outputBuffer.cropWidth, 640); @@ -300,22 +320,25 @@ - (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping { CVPixelBufferCreate( NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:370 - adaptedHeight:640 - cropWidth:370 - cropHeight:640 - cropX:10 - cropY:0]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:370 + adaptedHeight:640 + cropWidth:370 + cropHeight:640 + cropX:10 + cropY:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 14); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(outputBuffer.cropWidth, 360); @@ -341,22 +364,25 @@ - (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping { CVPixelBufferCreate( NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:300 - adaptedHeight:640 - cropWidth:300 - cropHeight:640 - cropX:40 - cropY:0]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:300 + adaptedHeight:640 + cropWidth:300 + cropHeight:640 + cropX:40 + cropY:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 300); XCTAssertEqual(outputFrame.height, 534); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 40); XCTAssertEqual(outputBuffer.cropY, 52); XCTAssertEqual(outputBuffer.cropWidth, 300); @@ -379,16 +405,19 @@ ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { - (void)testOnCapturedFrameI420BufferNeedsAdaptation { rtc::scoped_refptr i420Buffer = CreateI420Gradient(720, 1280); - RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCI420Buffer) *buffer = + [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer; + RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); @@ -408,16 +437,19 @@ ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { - (void)testOnCapturedFrameI420BufferNeedsCropping { rtc::scoped_refptr i420Buffer = CreateI420Gradient(380, 640); - RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCI420Buffer) *buffer = + [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer; + RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); XCTAssertGreaterThanOrEqual(psnr, 40); diff --git a/sdk/objc/unittests/RTCAudioDevice_xctest.mm b/sdk/objc/unittests/RTCAudioDevice_xctest.mm index a3db613dfe..c936399f34 100644 --- a/sdk/objc/unittests/RTCAudioDevice_xctest.mm +++ b/sdk/objc/unittests/RTCAudioDevice_xctest.mm @@ -21,7 +21,7 @@ @interface RTCAudioDeviceTests : XCTestCase { std::unique_ptr _audio_device; } -@property(nonatomic) RTCAudioSession *audioSession; +@property(nonatomic) RTC_OBJC_TYPE(RTCAudioSession) * audioSession; @end @@ -34,7 +34,7 @@ - (void)setUp { _audioDeviceModule = webrtc::CreateAudioDeviceModule(); _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS()); - self.audioSession = [RTCAudioSession sharedInstance]; + self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSError *error = nil; [self.audioSession lockForConfiguration]; @@ -61,21 +61,21 @@ - (void)tearDown { // Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly // after an iOS AVAudioSessionInterruptionTypeEnded notification event. -// AudioDeviceIOS listens to RTCAudioSession interrupted notifications by: +// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by: // - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_ -// callback with RTCAudioSession's delegate list. -// - When RTCAudioSession receives an iOS audio interrupted notification, it +// callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list. +// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it // passes the notification to callbacks in its delegate list which sets // AudioDeviceIOS's is_interrupted_ flag to true. // - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its // audio_session_observer_ callback is removed from RTCAudioSessions's // delegate list. -// So if RTCAudioSession receives an iOS end audio interruption notification, -// AudioDeviceIOS is not notified as its callback is not in RTCAudioSession's +// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification, +// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s // delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in // the wrong (true) state and the audio session will ignore audio changes. -// As RTCAudioSession keeps its own interrupted state, the fix is to initialize -// AudioDeviceIOS's is_interrupted_ flag to RTCAudioSession's isInterrupted +// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize +// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted // flag in AudioDeviceIOS.InitPlayOrRecord. - (void)testInterruptedAudioSession { XCTAssertTrue(self.audioSession.isActive); diff --git a/sdk/objc/unittests/RTCAudioSessionTest.mm b/sdk/objc/unittests/RTCAudioSessionTest.mm index c2140c3ba6..4e309ca2fa 100644 --- a/sdk/objc/unittests/RTCAudioSessionTest.mm +++ b/sdk/objc/unittests/RTCAudioSessionTest.mm @@ -20,9 +20,11 @@ #import "components/audio/RTCAudioSession.h" #import "components/audio/RTCAudioSessionConfiguration.h" -@interface RTCAudioSession (UnitTesting) +@interface RTC_OBJC_TYPE (RTCAudioSession) +(UnitTesting) -@property(nonatomic, readonly) std::vector<__weak id > delegates; + @property(nonatomic, + readonly) std::vector<__weak id > delegates; - (instancetype)initWithAudioSession:(id)audioSession; @@ -38,7 +40,7 @@ @implementation MockAVAudioSession @synthesize outputVolume = _outputVolume; @end -@interface RTCAudioSessionTestDelegate : NSObject +@interface RTCAudioSessionTestDelegate : NSObject @property (nonatomic, readonly) float outputVolume; @@ -55,31 +57,31 @@ - (instancetype)init { return self; } -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session { +- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session +- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session shouldResumeSession:(BOOL)shouldResumeSession { } -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session - reason:(AVAudioSessionRouteChangeReason)reason - previousRoute:(AVAudioSessionRouteDescription *)previousRoute { +- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session + reason:(AVAudioSessionRouteChangeReason)reason + previousRoute:(AVAudioSessionRouteDescription *)previousRoute { } -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session { +- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session { +- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionShouldConfigure:(RTCAudioSession *)session { +- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session { +- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didChangeOutputVolume:(float)outputVolume { _outputVolume = outputVolume; } @@ -95,14 +97,14 @@ @implementation RTCTestRemoveOnDeallocDelegate - (instancetype)init { if (self = [super init]) { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session addDelegate:self]; } return self; } - (void)dealloc { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session removeDelegate:self]; } @@ -118,7 +120,7 @@ - (void)testLockForConfiguration; @implementation RTCAudioSessionTest - (void)testLockForConfiguration { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; for (size_t i = 0; i < 2; i++) { [session lockForConfiguration]; @@ -132,7 +134,7 @@ - (void)testLockForConfiguration { } - (void)testAddAndRemoveDelegates { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSMutableArray *delegates = [NSMutableArray array]; const size_t count = 5; for (size_t i = 0; i < count; ++i) { @@ -151,7 +153,7 @@ - (void)testAddAndRemoveDelegates { } - (void)testPushDelegate { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSMutableArray *delegates = [NSMutableArray array]; const size_t count = 2; for (size_t i = 0; i < count; ++i) { @@ -184,7 +186,7 @@ - (void)testPushDelegate { // Tests that delegates added to the audio session properly zero out. This is // checking an implementation detail (that vectors of __weak work as expected). - (void)testZeroingWeakDelegate { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; @autoreleasepool { // Add a delegate to the session. There should be one delegate at this // point. @@ -212,12 +214,12 @@ - (void)testRemoveDelegateOnDealloc { [[RTCTestRemoveOnDeallocDelegate alloc] init]; EXPECT_TRUE(delegate); } - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; EXPECT_EQ(0u, session.delegates.size()); } - (void)testAudioSessionActivation { - RTCAudioSession *audioSession = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; EXPECT_EQ(0, audioSession.activationCount); [audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]]; EXPECT_EQ(1, audioSession.activationCount); @@ -255,10 +257,10 @@ - (void)testConfigureWebRTCSession { setActive:YES withOptions:0 error:((NSError __autoreleasing **)[OCMArg anyPointer])]). andDo(setActiveBlock); - id mockAudioSession = OCMPartialMock([RTCAudioSession sharedInstance]); + id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]); OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession); - RTCAudioSession *audioSession = mockAudioSession; + RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession; EXPECT_EQ(0, audioSession.activationCount); [audioSession lockForConfiguration]; EXPECT_TRUE([audioSession checkLock:nil]); @@ -286,7 +288,8 @@ - (void)testConfigureWebRTCSession { - (void)testAudioVolumeDidNotify { MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init]; - RTCAudioSession *session = [[RTCAudioSession alloc] initWithAudioSession:mockAVAudioSession]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession]; RTCAudioSessionTestDelegate *delegate = [[RTCAudioSessionTestDelegate alloc] init]; [session addDelegate:delegate]; @@ -304,8 +307,8 @@ - (void)testAudioVolumeDidNotify { class AudioSessionTest : public ::testing::Test { protected: void TearDown() override { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; - for (id delegate : session.delegates) { + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + for (id delegate : session.delegates) { [session removeDelegate:delegate]; } } diff --git a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm index ee970643ab..3a1ab24773 100644 --- a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm +++ b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm @@ -31,7 +31,8 @@ - (void)testRequiresCroppingNoCrop { CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertFalse([buffer requiresCropping]); @@ -42,13 +43,14 @@ - (void)testRequiresCroppingWithCrop { CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *croppedBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:720 - adaptedHeight:1280 - cropWidth:360 - cropHeight:640 - cropX:100 - cropY:100]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:720 + adaptedHeight:1280 + cropWidth:360 + cropHeight:640 + cropX:100 + cropY:100]; XCTAssertTrue([croppedBuffer requiresCropping]); @@ -60,7 +62,8 @@ - (void)testRequiresScalingNoScale { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]); CVBufferRelease(pixelBufferRef); @@ -71,7 +74,8 @@ - (void)testRequiresScalingWithScale { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]); CVBufferRelease(pixelBufferRef); @@ -82,13 +86,14 @@ - (void)testRequiresScalingWithScaleAndMatchingCrop { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:720 - adaptedHeight:1280 - cropWidth:360 - cropHeight:640 - cropX:100 - cropY:100]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:720 + adaptedHeight:1280 + cropWidth:360 + cropHeight:640 + cropX:100 + cropY:100]; XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]); CVBufferRelease(pixelBufferRef); @@ -99,7 +104,8 @@ - (void)testBufferSize_NV12 { CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000); CVBufferRelease(pixelBufferRef); @@ -109,7 +115,8 @@ - (void)testBufferSize_RGB { CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0); CVBufferRelease(pixelBufferRef); @@ -198,7 +205,8 @@ - (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat rtc::scoped_refptr i420Buffer = CreateI420Gradient(720, 1280); CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual(buffer.width, 720); XCTAssertEqual(buffer.height, 1280); @@ -218,14 +226,14 @@ - (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()]; - RTCCVPixelBuffer *scaledBuffer = - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef]; XCTAssertEqual(scaledBuffer.width, outputSize.width); XCTAssertEqual(scaledBuffer.height, outputSize.height); if (outputSize.width > 0 && outputSize.height > 0) { - RTCI420Buffer *originalBufferI420 = [buffer toI420]; - RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420]; double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); @@ -244,14 +252,14 @@ - (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat cropX:(int)cropX DrawGradientInRGBPixelBuffer(pixelBufferRef); - RTCCVPixelBuffer *buffer = - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef) - adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX - cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY - cropX:cropX - cropY:cropY]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] + initWithPixelBuffer:pixelBufferRef + adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef) + adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef) + cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX + cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY + cropX:cropX + cropY:cropY]; XCTAssertEqual(buffer.width, 720); XCTAssertEqual(buffer.height, 1280); @@ -260,13 +268,13 @@ - (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat cropX:(int)cropX CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef); [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL]; - RTCCVPixelBuffer *scaledBuffer = - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef]; XCTAssertEqual(scaledBuffer.width, 360); XCTAssertEqual(scaledBuffer.height, 640); - RTCI420Buffer *originalBufferI420 = [buffer toI420]; - RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420]; double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); @@ -282,8 +290,9 @@ - (void)toI420WithPixelFormat:(OSType)pixelFormat { CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCI420Buffer *fromCVPixelBuffer = [buffer toI420]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420]; double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]); double target = webrtc::kPerfectPSNR; diff --git a/sdk/objc/unittests/RTCCallbackLogger_xctest.m b/sdk/objc/unittests/RTCCallbackLogger_xctest.m index ceaa762f1f..1b6fb1c07b 100644 --- a/sdk/objc/unittests/RTCCallbackLogger_xctest.m +++ b/sdk/objc/unittests/RTCCallbackLogger_xctest.m @@ -14,7 +14,7 @@ @interface RTCCallbackLoggerTests : XCTestCase -@property(nonatomic, strong) RTCCallbackLogger *logger; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCallbackLogger) * logger; @end @@ -23,7 +23,7 @@ @implementation RTCCallbackLoggerTests @synthesize logger; - (void)setUp { - self.logger = [[RTCCallbackLogger alloc] init]; + self.logger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init]; } - (void)tearDown { diff --git a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm index bd74fc7d6a..34551e5ac8 100644 --- a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm +++ b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm @@ -59,9 +59,11 @@ CMSampleBufferRef createTestSampleBufferRef() { } #endif -@interface RTCCameraVideoCapturer (Tests) -- (instancetype)initWithDelegate:(__weak id)delegate - captureSession:(AVCaptureSession *)captureSession; +@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) +(Tests) - + (instancetype)initWithDelegate + : (__weak id)delegate captureSession + : (AVCaptureSession *)captureSession; @end @interface RTCCameraVideoCapturerTests : NSObject @@ -69,7 +71,7 @@ @interface RTCCameraVideoCapturerTests : NSObject @property(nonatomic, strong) id deviceMock; @property(nonatomic, strong) id captureConnectionMock; @property(nonatomic, strong) id captureSessionMock; -@property(nonatomic, strong) RTCCameraVideoCapturer *capturer; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer; @end @implementation RTCCameraVideoCapturerTests @@ -80,9 +82,10 @@ @implementation RTCCameraVideoCapturerTests @synthesize capturer = _capturer; - (void)setup { - self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate)); + self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]); - self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock]; + self.capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock]; self.deviceMock = [self createDeviceMock]; } @@ -94,10 +97,11 @@ - (void)setupWithMockedCaptureSession { OCMStub([self.captureSessionMock addOutput:[OCMArg any]]); OCMStub([self.captureSessionMock beginConfiguration]); OCMStub([self.captureSessionMock commitConfiguration]); - self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate)); + self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]); - self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock - captureSession:self.captureSessionMock]; + self.capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock + captureSession:self.captureSessionMock]; self.deviceMock = [self createDeviceMock]; } @@ -160,7 +164,8 @@ - (void)testSupportedFormatsForDevice { OCMStub([self.deviceMock formats]).andReturn(formats); // when - NSArray *supportedFormats = [RTCCameraVideoCapturer supportedFormatsForDevice:self.deviceMock]; + NSArray *supportedFormats = + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock]; // then EXPECT_EQ(supportedFormats.count, 3u); @@ -199,7 +204,8 @@ - (void)testDelegateCallbackWithValidBufferAndOrientationUpdate { // then [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) { + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270); return YES; }]]; @@ -240,22 +246,23 @@ - (void)testRotationCamera:(AVCaptureDevicePosition)camera CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) { - if (camera == AVCaptureDevicePositionFront) { - if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); - } - } else if (camera == AVCaptureDevicePositionBack) { - if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); - } - } - return YES; - }]]; + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + if (camera == AVCaptureDevicePositionFront) { + if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); + } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); + } + } else if (camera == AVCaptureDevicePositionBack) { + if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); + } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); + } + } + return YES; + }]]; NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; @@ -298,12 +305,13 @@ - (void)testRotationFrame { CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) { - // Front camera and landscape left should return 180. But the frame says its from the back - // camera, so rotation should be 0. - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); - return YES; - }]]; + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + // Front camera and landscape left should return 180. But the frame says its + // from the back camera, so rotation should be 0. + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); + return YES; + }]]; NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; diff --git a/sdk/objc/unittests/RTCCertificateTest.mm b/sdk/objc/unittests/RTCCertificateTest.mm index 5bf1eb3fe4..38c935cef2 100644 --- a/sdk/objc/unittests/RTCCertificateTest.mm +++ b/sdk/objc/unittests/RTCCertificateTest.mm @@ -29,38 +29,39 @@ - (void)testCertificateIsUsedInConfig; @implementation RTCCertificateTest - (void)testCertificateIsUsedInConfig { - RTCConfiguration *originalConfig = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; originalConfig.iceServers = @[ server ]; // Generate a new certificate. - RTCCertificate *originalCertificate = [RTCCertificate generateCertificateWithParams:@{ - @"expires" : @100000, - @"name" : @"RSASSA-PKCS1-v1_5" - }]; + RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate) + generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}]; // Store certificate in configuration. originalConfig.certificate = originalCertificate; - RTCMediaConstraints *contraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; - RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; // Create PeerConnection with this certificate. - RTCPeerConnection *peerConnection = + RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = [factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil]; // Retrieve certificate from the configuration. - RTCConfiguration *retrievedConfig = peerConnection.configuration; + RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration; // Extract PEM strings from original certificate. std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String]; std::string originalCertificateField = [[originalCertificate certificate] UTF8String]; // Extract PEM strings from certificate retrieved from configuration. - RTCCertificate *retrievedCertificate = retrievedConfig.certificate; + RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate; std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String]; std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String]; diff --git a/sdk/objc/unittests/RTCConfigurationTest.mm b/sdk/objc/unittests/RTCConfigurationTest.mm index 3fb4d428e4..51e4a70893 100644 --- a/sdk/objc/unittests/RTCConfigurationTest.mm +++ b/sdk/objc/unittests/RTCConfigurationTest.mm @@ -28,9 +28,10 @@ @implementation RTCConfigurationTest - (void)testConversionToNativeConfiguration { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -47,10 +48,11 @@ - (void)testConversionToNativeConfiguration { config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; - config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:YES - srtpEnableEncryptedRtpHeaderExtensions:YES - sframeRequireFrameEncryption:YES]; + config.cryptoOptions = + [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:YES + srtpEnableEncryptedRtpHeaderExtensions:YES + sframeRequireFrameEncryption:YES]; config.rtcpAudioReportIntervalMs = 2500; config.rtcpVideoReportIntervalMs = 3750; @@ -89,9 +91,10 @@ - (void)testConversionToNativeConfiguration { - (void)testNativeConversionToConfiguration { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -108,20 +111,21 @@ - (void)testNativeConversionToConfiguration { config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; - config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:NO - srtpEnableEncryptedRtpHeaderExtensions:NO - sframeRequireFrameEncryption:NO]; + config.cryptoOptions = + [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:NO + srtpEnableEncryptedRtpHeaderExtensions:NO + sframeRequireFrameEncryption:NO]; config.rtcpAudioReportIntervalMs = 1500; config.rtcpVideoReportIntervalMs = 2150; webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig = [config createNativeConfiguration]; - RTCConfiguration *newConfig = [[RTCConfiguration alloc] - initWithNativeConfiguration:*nativeConfig]; + RTC_OBJC_TYPE(RTCConfiguration) *newConfig = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig]; EXPECT_EQ([config.iceServers count], newConfig.iceServers.count); - RTCIceServer *newServer = newConfig.iceServers[0]; - RTCIceServer *origServer = config.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0]; EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count); std::string origUrl = origServer.urlStrings.firstObject.UTF8String; std::string url = newServer.urlStrings.firstObject.UTF8String; @@ -152,7 +156,7 @@ - (void)testNativeConversionToConfiguration { } - (void)testDefaultValues { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; EXPECT_EQ(config.cryptoOptions, nil); } diff --git a/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm b/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm index a96ae51707..b3461cc854 100644 --- a/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm +++ b/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm @@ -30,8 +30,8 @@ - (void)testConversionToNativeDataChannelInit { int channelId = 4; NSString *protocol = @"protocol"; - RTCDataChannelConfiguration *dataChannelConfig = - [[RTCDataChannelConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig = + [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init]; dataChannelConfig.isOrdered = isOrdered; dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime; dataChannelConfig.maxRetransmits = maxRetransmits; @@ -50,7 +50,7 @@ - (void)testConversionToNativeDataChannelInit { @end -TEST(RTCDataChannelConfiguration, NativeDataChannelInitConversionTest) { +TEST(RTC_OBJC_TYPE(RTCDataChannelConfiguration), NativeDataChannelInitConversionTest) { @autoreleasepool { RTCDataChannelConfigurationTest *test = [[RTCDataChannelConfigurationTest alloc] init]; diff --git a/sdk/objc/unittests/RTCEncodedImage_xctest.mm b/sdk/objc/unittests/RTCEncodedImage_xctest.mm new file mode 100644 index 0000000000..84804fee87 --- /dev/null +++ b/sdk/objc/unittests/RTCEncodedImage_xctest.mm @@ -0,0 +1,55 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "api/peerconnection/RTCEncodedImage+Private.h" + +#import + +@interface RTCEncodedImageTests : XCTestCase +@end + +@implementation RTCEncodedImageTests + +- (void)testInitializedWithNativeEncodedImage { + const auto encoded_data = webrtc::EncodedImageBuffer::Create(); + webrtc::EncodedImage encoded_image; + encoded_image.SetEncodedData(encoded_data); + + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image]; + + XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data); +} + +- (void)testInitWithNSData { + NSData *bufferData = [NSData data]; + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init]; + encodedImage.buffer = bufferData; + + webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage]; + XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr); + XCTAssertEqual(result_encoded_image.GetEncodedData()->data(), bufferData.bytes); +} + +- (void)testRetainsNativeEncodedImage { + RTC_OBJC_TYPE(RTCEncodedImage) * encodedImage; + { + const auto encoded_data = webrtc::EncodedImageBuffer::Create(); + webrtc::EncodedImage encoded_image; + encoded_image.SetEncodedData(encoded_data); + encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image]; + } + webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage]; + XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr); + XCTAssertTrue(result_encoded_image.GetEncodedData()->data() != nullptr); +} + +@end diff --git a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm index 01deb68a32..2407c88c1a 100644 --- a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm +++ b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm @@ -17,7 +17,7 @@ NSString *const kTestFileName = @"foreman.mp4"; static const int kTestTimeoutMs = 5 * 1000; // 5secs. -@interface MockCapturerDelegate : NSObject +@interface MockCapturerDelegate : NSObject @property(nonatomic, assign) NSInteger capturedFramesCount; @@ -26,7 +26,8 @@ @interface MockCapturerDelegate : NSObject @implementation MockCapturerDelegate @synthesize capturedFramesCount = _capturedFramesCount; -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { self.capturedFramesCount++; } @@ -35,7 +36,7 @@ - (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFram NS_CLASS_AVAILABLE_IOS(10) @interface RTCFileVideoCapturerTests : XCTestCase -@property(nonatomic, strong) RTCFileVideoCapturer *capturer; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * capturer; @property(nonatomic, strong) MockCapturerDelegate *mockDelegate; @end @@ -46,7 +47,7 @@ @implementation RTCFileVideoCapturerTests - (void)setUp { self.mockDelegate = [[MockCapturerDelegate alloc] init]; - self.capturer = [[RTCFileVideoCapturer alloc] initWithDelegate:self.mockDelegate]; + self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate]; } - (void)tearDown { diff --git a/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m b/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m index 066958692f..ec9dc41796 100644 --- a/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m +++ b/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m @@ -22,24 +22,26 @@ @interface RTCH264ProfileLevelIdTests : XCTestCase @implementation RTCH264ProfileLevelIdTests - (void)testInitWithString { - RTCH264ProfileLevelId *profileLevelId = - [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedHigh]; + RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh]; XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh); XCTAssertEqual(profileLevelId.level, RTCH264Level3_1); - profileLevelId = [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedBaseline]; + profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline]; XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline); XCTAssertEqual(profileLevelId.level, RTCH264Level3_1); } - (void)testInitWithProfileAndLevel { - RTCH264ProfileLevelId *profileLevelId = - [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedHigh - level:RTCH264Level3_1]; + RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh + level:RTCH264Level3_1]; XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh); - profileLevelId = [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedBaseline - level:RTCH264Level3_1]; + profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] + initWithProfile:RTCH264ProfileConstrainedBaseline + level:RTCH264Level3_1]; XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedBaseline); } diff --git a/sdk/objc/unittests/RTCIceCandidateTest.mm b/sdk/objc/unittests/RTCIceCandidateTest.mm index 18dcdad8b4..b0b6cb62a0 100644 --- a/sdk/objc/unittests/RTCIceCandidateTest.mm +++ b/sdk/objc/unittests/RTCIceCandidateTest.mm @@ -30,9 +30,8 @@ - (void)testCandidate { "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa " "59052 typ host generation 0"; - RTCIceCandidate *candidate = [[RTCIceCandidate alloc] initWithSdp:sdp - sdpMLineIndex:0 - sdpMid:@"audio"]; + RTC_OBJC_TYPE(RTCIceCandidate) *candidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"]; std::unique_ptr nativeCandidate = candidate.nativeCandidate; @@ -51,8 +50,8 @@ - (void)testInitFromNativeCandidate { webrtc::IceCandidateInterface *nativeCandidate = webrtc::CreateIceCandidate("audio", 0, sdp, nullptr); - RTCIceCandidate *iceCandidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:nativeCandidate]; + RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate]; EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]); EXPECT_EQ(0, iceCandidate.sdpMLineIndex); diff --git a/sdk/objc/unittests/RTCIceServerTest.mm b/sdk/objc/unittests/RTCIceServerTest.mm index 8ef5195b95..5dbb92f16d 100644 --- a/sdk/objc/unittests/RTCIceServerTest.mm +++ b/sdk/objc/unittests/RTCIceServerTest.mm @@ -28,8 +28,8 @@ - (void)testInitFromNativeServer; @implementation RTCIceServerTest - (void)testOneURLServer { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ - @"stun:stun1.example.net" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); @@ -39,8 +39,8 @@ - (void)testOneURLServer { } - (void)testTwoURLServer { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ - @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(2u, iceStruct.urls.size()); @@ -51,10 +51,10 @@ - (void)testTwoURLServer { } - (void)testPasswordCredential { - RTCIceServer *server = [[RTCIceServer alloc] - initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential"]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential"]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -63,11 +63,12 @@ - (void)testPasswordCredential { } - (void)testHostname { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname"]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname"]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -77,12 +78,13 @@ - (void)testHostname { } - (void)testTlsAlpnProtocols { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname" - tlsAlpnProtocols:@[ @"proto1", @"proto2" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname" + tlsAlpnProtocols:@[ @"proto1", @"proto2" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -93,13 +95,14 @@ - (void)testTlsAlpnProtocols { } - (void)testTlsEllipticCurves { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname" - tlsAlpnProtocols:@[ @"proto1", @"proto2" ] - tlsEllipticCurves:@[ @"curve1", @"curve2" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname" + tlsAlpnProtocols:@[ @"proto1", @"proto2" ] + tlsEllipticCurves:@[ @"curve1", @"curve2" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -121,8 +124,8 @@ - (void)testInitFromNativeServer { nativeServer.tls_elliptic_curves.push_back("curve1"); nativeServer.tls_elliptic_curves.push_back("curve2"); - RTCIceServer *iceServer = - [[RTCIceServer alloc] initWithNativeServer:nativeServer]; + RTC_OBJC_TYPE(RTCIceServer) *iceServer = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer]; EXPECT_EQ(1u, iceServer.urlStrings.count); EXPECT_EQ("stun:stun.example.net", [NSString stdStringForString:iceServer.urlStrings.firstObject]); diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index d792d0445d..eb519bb13f 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -21,8 +21,11 @@ #import "components/renderer/metal/RTCMTLNV12Renderer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -// Extension of RTCMTLVideoView for testing purposes. -@interface RTCMTLVideoView (Testing) +// Extension of RTC_OBJC_TYPE(RTCMTLVideoView) for testing purposes. +@interface RTC_OBJC_TYPE (RTCMTLVideoView) +(Testing) + + @property(nonatomic, readonly) MTKView *metalView; + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; @@ -46,7 +49,7 @@ @implementation RTCMTLVideoViewTests @synthesize frameMock = _frameMock; - (void)setUp { - self.classMock = OCMClassMock([RTCMTLVideoView class]); + self.classMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLVideoView) class]); [self startMockingNilView]; } @@ -62,15 +65,16 @@ - (void)tearDown { } - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { - id frameMock = OCMClassMock([RTCVideoFrame class]); + id frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]); if (hasCVPixelBuffer) { CVPixelBufferRef pixelBufferRef; CVPixelBufferCreate( kCFAllocatorDefault, 200, 200, kCVPixelFormatType_420YpCbCr8Planar, nil, &pixelBufferRef); OCMStub([frameMock buffer]) - .andReturn([[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]); + .andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]); } else { - OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]); + OCMStub([frameMock buffer]) + .andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:200 height:200]); } OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX)); return frameMock; @@ -96,7 +100,8 @@ - (void)testInitAssertsIfMetalUnavailabe { // when BOOL asserts = NO; @try { - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; (void)realView; } @catch (NSException *ex) { asserts = YES; @@ -109,8 +114,9 @@ - (void)testRTCVideoRenderNilFrameCallback { // given OCMStub([self.classMock isMetalAvailable]).andReturn(YES); - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; - self.frameMock = OCMClassMock([RTCVideoFrame class]); + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]); [[self.frameMock reject] buffer]; [[self.classMock reject] createNV12Renderer]; @@ -118,7 +124,7 @@ - (void)testRTCVideoRenderNilFrameCallback { // when [realView renderFrame:nil]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; // then [self.frameMock verify]; @@ -135,11 +141,12 @@ - (void)testRTCVideoRenderFrameCallbackI420 { OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock); [[self.classMock reject] createNV12Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; // when [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; // then [self.rendererI420Mock verify]; @@ -156,11 +163,12 @@ - (void)testRTCVideoRenderFrameCallbackNV12 { OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; // when [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; // then [self.rendererNV12Mock verify]; @@ -176,21 +184,22 @@ - (void)testRTCVideoRenderWorksAfterReconstruction { OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; [self.rendererNV12Mock verify]; [self.classMock verify]; // Recreate view. - realView = [[RTCMTLVideoView alloc] init]; + realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]); // View hould reinit renderer. OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; [self.rendererNV12Mock verify]; [self.classMock verify]; } @@ -204,9 +213,10 @@ - (void)testDontRedrawOldFrame { OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; [self.rendererNV12Mock verify]; [self.classMock verify]; @@ -214,7 +224,7 @@ - (void)testDontRedrawOldFrame { [[self.rendererNV12Mock reject] drawFrame:[OCMArg any]]; [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; [self.rendererNV12Mock verify]; } @@ -228,9 +238,10 @@ - (void)testDoDrawNewFrame { OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; [self.rendererNV12Mock verify]; [self.classMock verify]; @@ -240,7 +251,7 @@ - (void)testDoDrawNewFrame { OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]); [realView renderFrame:self.frameMock]; - [realView drawInMTKView:nil]; + [realView drawInMTKView:realView.metalView]; [self.rendererNV12Mock verify]; } @@ -248,11 +259,12 @@ - (void)testDoDrawNewFrame { - (void)testReportsSizeChangesToDelegate { OCMStub([self.classMock isMetalAvailable]).andReturn(YES); - id delegateMock = OCMProtocolMock(@protocol(RTCVideoViewDelegate)); + id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate))); CGSize size = CGSizeMake(640, 480); OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]); - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; realView.delegate = delegateMock; [realView setSize:size]; @@ -267,7 +279,7 @@ - (void)testSetContentMode { createMetalView:CGRectZero]; OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]); - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init]; [realView setVideoContentMode:UIViewContentModeScaleAspectFill]; OCMVerify(metalKitView); diff --git a/sdk/objc/unittests/RTCMediaConstraintsTest.mm b/sdk/objc/unittests/RTCMediaConstraintsTest.mm index 4d5e450fff..7664a7ef11 100644 --- a/sdk/objc/unittests/RTCMediaConstraintsTest.mm +++ b/sdk/objc/unittests/RTCMediaConstraintsTest.mm @@ -28,9 +28,9 @@ - (void)testMediaConstraints { NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"}; NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"}; - RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatory - optionalConstraints:optional]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory + optionalConstraints:optional]; std::unique_ptr nativeConstraints = [constraints nativeConstraints]; diff --git a/sdk/objc/unittests/RTCNV12TextureCache_xctest.m b/sdk/objc/unittests/RTCNV12TextureCache_xctest.m index d5fa65b173..7bdc538f67 100644 --- a/sdk/objc/unittests/RTCNV12TextureCache_xctest.m +++ b/sdk/objc/unittests/RTCNV12TextureCache_xctest.m @@ -43,10 +43,12 @@ - (void)tearDown { - (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame { CVPixelBufferRef nullPixelBuffer = NULL; - RTCCVPixelBuffer *badFrameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:nullPixelBuffer]; - RTCVideoFrame *badFrame = [[RTCVideoFrame alloc] initWithBuffer:badFrameBuffer - rotation:RTCVideoRotation_0 - timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *badFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; [_nv12TextureCache uploadFrameToTextures:badFrame]; } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 40b3aa0399..14131dc38d 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -22,7 +22,6 @@ #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/transport/media/media_transport_interface.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "modules/audio_device/include/audio_device.h" @@ -39,7 +38,7 @@ - (void)testDefaultComponentsBuilder; @implementation RTCPeerConnectionFactoryBuilderTest - (void)testBuilder { - id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]); + id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); OCMExpect([factoryMock alloc]).andReturn(factoryMock); #ifdef HAVE_NO_MEDIA RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]); @@ -50,17 +49,17 @@ - (void)testBuilder { nativeVideoEncoderFactory:nullptr nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr - audioProcessingModule:nullptr - mediaTransportFactory:nullptr]); + audioProcessingModule:nullptr]); #endif RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; - RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = + [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); OCMVerifyAll(factoryMock); } - (void)testDefaultComponentsBuilder { - id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]); + id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); OCMExpect([factoryMock alloc]).andReturn(factoryMock); #ifdef HAVE_NO_MEDIA RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]); @@ -71,11 +70,11 @@ - (void)testDefaultComponentsBuilder { nativeVideoEncoderFactory:nullptr nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr - audioProcessingModule:nullptr - mediaTransportFactory:nullptr]); + audioProcessingModule:nullptr]); #endif RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; - RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = + [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); OCMVerifyAll(factoryMock); } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m index 477b541276..629095b81d 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m +++ b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m @@ -19,6 +19,7 @@ #import "api/peerconnection/RTCRtpReceiver.h" #import "api/peerconnection/RTCRtpSender.h" #import "api/peerconnection/RTCRtpTransceiver.h" +#import "api/peerconnection/RTCSessionDescription.h" #import "api/peerconnection/RTCVideoSource.h" #import @@ -30,16 +31,17 @@ @implementation RTCPeerConnectionFactoryTests - (void)testPeerConnectionLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; [peerConnection close]; @@ -53,11 +55,11 @@ - (void)testPeerConnectionLifetime { - (void)testMediaStreamLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCMediaStream *mediaStream; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCMediaStream) * mediaStream; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; mediaStream = [factory mediaStreamWithStreamId:@"mediaStream"]; factory = nil; } @@ -69,17 +71,19 @@ - (void)testMediaStreamLifetime { - (void)testDataChannelLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; - RTCDataChannelConfiguration *dataChannelConfig = [[RTCDataChannelConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig = + [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; - RTCDataChannel *dataChannel; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; + RTC_OBJC_TYPE(RTCDataChannel) * dataChannel; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; dataChannel = @@ -97,18 +101,20 @@ - (void)testDataChannelLifetime { - (void)testRTCRtpTransceiverLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - RTCMediaConstraints *contraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; - RTCRtpTransceiverInit *init = [[RTCRtpTransceiverInit alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init = + [[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; - RTCRtpTransceiver *tranceiver; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; + RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init]; @@ -125,16 +131,17 @@ - (void)testRTCRtpTransceiverLifetime { - (void)testRTCRtpSenderLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; - RTCRtpSender *sender; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; + RTC_OBJC_TYPE(RTCRtpSender) * sender; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"]; @@ -151,19 +158,20 @@ - (void)testRTCRtpSenderLifetime { - (void)testRTCRtpReceiverLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *pc1; - RTCPeerConnection *pc2; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * pc1; + RTC_OBJC_TYPE(RTCPeerConnection) * pc2; - NSArray *receivers1; - NSArray *receivers2; + NSArray *receivers1; + NSArray *receivers2; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"]; @@ -197,11 +205,11 @@ - (void)testRTCRtpReceiverLifetime { - (void)testAudioSourceLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCAudioSource *audioSource; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCAudioSource) * audioSource; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; audioSource = [factory audioSourceWithConstraints:nil]; XCTAssertNotNil(audioSource); factory = nil; @@ -214,11 +222,11 @@ - (void)testAudioSourceLifetime { - (void)testVideoSourceLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCVideoSource *videoSource; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCVideoSource) * videoSource; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; videoSource = [factory videoSource]; XCTAssertNotNil(videoSource); factory = nil; @@ -231,11 +239,11 @@ - (void)testVideoSourceLifetime { - (void)testAudioTrackLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCAudioTrack *audioTrack; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCAudioTrack) * audioTrack; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; audioTrack = [factory audioTrackWithTrackId:@"audioTrack"]; XCTAssertNotNil(audioTrack); factory = nil; @@ -248,11 +256,11 @@ - (void)testAudioTrackLifetime { - (void)testVideoTrackLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCVideoTrack *videoTrack; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCVideoTrack) * videoTrack; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"]; XCTAssertNotNil(videoTrack); factory = nil; @@ -263,20 +271,70 @@ - (void)testVideoTrackLifetime { XCTAssertTrue(true, "Expect test does not crash"); } -- (bool)negotiatePeerConnection:(RTCPeerConnection *)pc1 - withPeerConnection:(RTCPeerConnection *)pc2 +- (void)testRollback { + @autoreleasepool { + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ + kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue + } + optionalConstraints:nil]; + + __block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + __block RTC_OBJC_TYPE(RTCPeerConnection) * pc1; + RTCSessionDescription *rollback = [[RTCSessionDescription alloc] initWithType:RTCSdpTypeRollback + sdp:@""]; + + @autoreleasepool { + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; + pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; + dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0); + [pc1 offerForConstraints:constraints + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) { + XCTAssertNil(error); + XCTAssertNotNil(offer); + + __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1; + [pc1 setLocalDescription:offer + completionHandler:^(NSError *error) { + XCTAssertNil(error); + [weakPC1 setLocalDescription:rollback + completionHandler:^(NSError *error) { + XCTAssertNil(error); + }]; + }]; + NSTimeInterval negotiationTimeout = 15; + dispatch_semaphore_wait( + negotiatedSem, + dispatch_time(DISPATCH_TIME_NOW, (int64_t)(negotiationTimeout * NSEC_PER_SEC))); + + XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable); + + [pc1 close]; + pc1 = nil; + factory = nil; + }]; + } + + XCTAssertTrue(true, "Expect test does not crash"); + } +} + +- (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1 + withPeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc2 negotiationTimeout:(NSTimeInterval)timeout { - __weak RTCPeerConnection *weakPC1 = pc1; - __weak RTCPeerConnection *weakPC2 = pc2; - RTCMediaConstraints *sdpConstraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{ + __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1; + __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2; + RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue } - optionalConstraints:nil]; + optionalConstraints:nil]; dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0); [weakPC1 offerForConstraints:sdpConstraints - completionHandler:^(RTCSessionDescription *offer, NSError *error) { + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) { XCTAssertNil(error); XCTAssertNotNil(offer); [weakPC1 @@ -289,8 +347,9 @@ - (bool)negotiatePeerConnection:(RTCPeerConnection *)pc1 XCTAssertNil(error); [weakPC2 answerForConstraints:sdpConstraints - completionHandler:^(RTCSessionDescription *answer, - NSError *error) { + completionHandler:^( + RTC_OBJC_TYPE(RTCSessionDescription) * answer, + NSError * error) { XCTAssertNil(error); XCTAssertNotNil(answer); [weakPC2 diff --git a/sdk/objc/unittests/RTCPeerConnectionTest.mm b/sdk/objc/unittests/RTCPeerConnectionTest.mm index 3532258799..e45ca93a6c 100644 --- a/sdk/objc/unittests/RTCPeerConnectionTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionTest.mm @@ -10,6 +10,7 @@ #import +#include #include #include "rtc_base/gunit.h" @@ -20,20 +21,23 @@ #import "api/peerconnection/RTCIceServer.h" #import "api/peerconnection/RTCMediaConstraints.h" #import "api/peerconnection/RTCPeerConnection.h" +#import "api/peerconnection/RTCPeerConnectionFactory+Native.h" #import "api/peerconnection/RTCPeerConnectionFactory.h" #import "helpers/NSString+StdString.h" @interface RTCPeerConnectionTest : NSObject - (void)testConfigurationGetter; +- (void)testWithDependencies; @end @implementation RTCPeerConnectionTest - (void)testConfigurationGetter { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -51,18 +55,21 @@ - (void)testConfigurationGetter { RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; config.activeResetSrtpParams = YES; - config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:YES - srtpEnableEncryptedRtpHeaderExtensions:NO - sframeRequireFrameEncryption:NO]; + config.cryptoOptions = + [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:YES + srtpEnableEncryptedRtpHeaderExtensions:NO + sframeRequireFrameEncryption:NO]; - RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; - RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - RTCConfiguration *newConfig; + RTC_OBJC_TYPE(RTCConfiguration) * newConfig; @autoreleasepool { - RTCPeerConnection *peerConnection = + RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; newConfig = peerConnection.configuration; @@ -75,8 +82,8 @@ - (void)testConfigurationGetter { } EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]); - RTCIceServer *newServer = newConfig.iceServers[0]; - RTCIceServer *origServer = config.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0]; std::string origUrl = origServer.urlStrings.firstObject.UTF8String; std::string url = newServer.urlStrings.firstObject.UTF8String; EXPECT_EQ(origUrl, url); @@ -104,6 +111,32 @@ - (void)testConfigurationGetter { newConfig.cryptoOptions.sframeRequireFrameEncryption); } +- (void)testWithDependencies { + NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; + + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + config.iceServers = @[ server ]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; + + RTC_OBJC_TYPE(RTCConfiguration) * newConfig; + std::unique_ptr pc_dependencies = + std::make_unique(nullptr); + @autoreleasepool { + RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = + [factory peerConnectionWithDependencies:config + constraints:contraints + dependencies:std::move(pc_dependencies) + delegate:nil]; + newConfig = peerConnection.configuration; + } +} + @end TEST(RTCPeerConnectionTest, ConfigurationGetterTest) { @@ -112,3 +145,10 @@ - (void)testConfigurationGetter { [test testConfigurationGetter]; } } + +TEST(RTCPeerConnectionTest, TestWithDependencies) { + @autoreleasepool { + RTCPeerConnectionTest *test = [[RTCPeerConnectionTest alloc] init]; + [test testWithDependencies]; + } +} diff --git a/sdk/objc/unittests/RTCSessionDescriptionTest.mm b/sdk/objc/unittests/RTCSessionDescriptionTest.mm index 0807eedf3a..ee65649cbc 100644 --- a/sdk/objc/unittests/RTCSessionDescriptionTest.mm +++ b/sdk/objc/unittests/RTCSessionDescriptionTest.mm @@ -24,19 +24,18 @@ - (void)testInitFromNativeSessionDescription; @implementation RTCSessionDescriptionTest /** - * Test conversion of an Objective-C RTCSessionDescription to a native + * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native * SessionDescriptionInterface (based on the types and SDP strings being equal). */ - (void)testSessionDescriptionConversion { - RTCSessionDescription *description = - [[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer - sdp:[self sdp]]; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]]; webrtc::SessionDescriptionInterface *nativeDescription = description.nativeDescription; EXPECT_EQ(RTCSdpTypeAnswer, - [RTCSessionDescription typeForStdString:nativeDescription->type()]); + [RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]); std::string sdp; nativeDescription->ToString(&sdp); @@ -51,11 +50,10 @@ - (void)testInitFromNativeSessionDescription { [self sdp].stdString, nullptr); - RTCSessionDescription *description = - [[RTCSessionDescription alloc] initWithNativeDescription: - nativeDescription]; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:nativeDescription]; EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer, - [RTCSessionDescription stdStringForType:description.type]); + [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]); EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]); } diff --git a/sdk/objc/unittests/nalu_rewriter_xctest.mm b/sdk/objc/unittests/nalu_rewriter_xctest.mm index 490d228573..995cc80da4 100644 --- a/sdk/objc/unittests/nalu_rewriter_xctest.mm +++ b/sdk/objc/unittests/nalu_rewriter_xctest.mm @@ -276,14 +276,12 @@ - (void)testH264CMSampleBufferToAnnexBBuffer { // clang-format on rtc::Buffer annexb_buffer(arraysize(cmsample_data)); - std::unique_ptr out_header_ptr; CMSampleBufferRef sample_buffer = [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)]; Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer, /* is_keyframe */ false, - &annexb_buffer, - &out_header_ptr); + &annexb_buffer); XCTAssertTrue(result); @@ -293,16 +291,6 @@ - (void)testH264CMSampleBufferToAnnexBBuffer { memcmp(expected_annex_b_data, annexb_buffer.data(), arraysize(expected_annex_b_data)); XCTAssertEqual(0, data_comparison_result); - - webrtc::RTPFragmentationHeader* out_header = out_header_ptr.get(); - - XCTAssertEqual(2, (int)out_header->Size()); - - XCTAssertEqual(4, (int)out_header->Offset(0)); - XCTAssertEqual(4, (int)out_header->Length(0)); - - XCTAssertEqual(12, (int)out_header->Offset(1)); - XCTAssertEqual(2, (int)out_header->Length(1)); } - (void)testH264CMSampleBufferToAnnexBBufferWithKeyframe { @@ -321,14 +309,12 @@ - (void)testH264CMSampleBufferToAnnexBBufferWithKeyframe { // clang-format on rtc::Buffer annexb_buffer(arraysize(cmsample_data)); - std::unique_ptr out_header_ptr; CMSampleBufferRef sample_buffer = [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)]; Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer, /* is_keyframe */ true, - &annexb_buffer, - &out_header_ptr); + &annexb_buffer); XCTAssertTrue(result); @@ -341,22 +327,6 @@ - (void)testH264CMSampleBufferToAnnexBBufferWithKeyframe { memcmp(expected_annex_b_data, annexb_buffer.data() + arraysize(SPS_PPS_BUFFER), arraysize(expected_annex_b_data))); - - webrtc::RTPFragmentationHeader* out_header = out_header_ptr.get(); - - XCTAssertEqual(4, (int)out_header->Size()); - - XCTAssertEqual(4, (int)out_header->Offset(0)); - XCTAssertEqual(14, (int)out_header->Length(0)); - - XCTAssertEqual(22, (int)out_header->Offset(1)); - XCTAssertEqual(4, (int)out_header->Length(1)); - - XCTAssertEqual(30, (int)out_header->Offset(2)); - XCTAssertEqual(4, (int)out_header->Length(2)); - - XCTAssertEqual(38, (int)out_header->Offset(3)); - XCTAssertEqual(2, (int)out_header->Length(3)); } - (CMVideoFormatDescriptionRef)createDescription { diff --git a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm index 2246eaaf5c..cc31f67b3c 100644 --- a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm @@ -13,6 +13,7 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" +#import "base/RTCMacros.h" #import "base/RTCVideoDecoder.h" #import "base/RTCVideoDecoderFactory.h" #include "media/base/codec.h" @@ -20,8 +21,8 @@ #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/gunit.h" -id CreateDecoderFactoryReturning(int return_code) { - id decoderMock = OCMProtocolMock(@protocol(RTCVideoDecoder)); +id CreateDecoderFactoryReturning(int return_code) { + id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder))); OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code); OCMStub([decoderMock decode:[OCMArg any] missingFrames:NO @@ -30,22 +31,24 @@ .andReturn(return_code); OCMStub([decoderMock releaseDecoder]).andReturn(return_code); - id decoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoDecoderFactory)); - RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil]; + id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory))); + RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil]; OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]); OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock); return decoderFactoryMock; } -id CreateOKDecoderFactory() { +id CreateOKDecoderFactory() { return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK); } -id CreateErrorDecoderFactory() { +id CreateErrorDecoderFactory() { return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR); } -std::unique_ptr GetObjCDecoder(id factory) { +std::unique_ptr GetObjCDecoder( + id factory) { webrtc::ObjCVideoDecoderFactory decoder_factory(factory); return decoder_factory.CreateVideoDecoder(webrtc::SdpVideoFormat(cricket::kH264CodecName)); } @@ -70,6 +73,7 @@ std::unique_ptr decoder = GetObjCDecoder(CreateOKDecoderFactory()); webrtc::EncodedImage encoded_image; + encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create()); EXPECT_EQ(decoder->Decode(encoded_image, false, 0), WEBRTC_VIDEO_CODEC_OK); } @@ -78,6 +82,7 @@ std::unique_ptr decoder = GetObjCDecoder(CreateErrorDecoderFactory()); webrtc::EncodedImage encoded_image; + encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create()); EXPECT_EQ(decoder->Decode(encoded_image, false, 0), WEBRTC_VIDEO_CODEC_ERROR); } diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm index cd7d739c4e..728dc018e2 100644 --- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm @@ -25,8 +25,8 @@ #include "rtc_base/gunit.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -id CreateEncoderFactoryReturning(int return_code) { - id encoderMock = OCMProtocolMock(@protocol(RTCVideoEncoder)); +id CreateEncoderFactoryReturning(int return_code) { + id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder))); OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1]) .andReturn(return_code); OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]]) @@ -34,22 +34,25 @@ OCMStub([encoderMock releaseEncoder]).andReturn(return_code); OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code); - id encoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoEncoderFactory)); - RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil]; + id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory))); + RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil]; OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]); + OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]); OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock); return encoderFactoryMock; } -id CreateOKEncoderFactory() { +id CreateOKEncoderFactory() { return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK); } -id CreateErrorEncoderFactory() { +id CreateErrorEncoderFactory() { return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR); } -std::unique_ptr GetObjCEncoder(id factory) { +std::unique_ptr GetObjCEncoder( + id factory) { webrtc::ObjCVideoEncoderFactory encoder_factory(factory); webrtc::SdpVideoFormat format("H264"); return encoder_factory.CreateVideoEncoder(format); @@ -82,7 +85,7 @@ CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); rtc::scoped_refptr buffer = new rtc::RefCountedObject( - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]); + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) @@ -100,7 +103,7 @@ CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); rtc::scoped_refptr buffer = new rtc::RefCountedObject( - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]); + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) diff --git a/stats/OWNERS b/stats/OWNERS index 633d6b9a76..7e98070d5d 100644 --- a/stats/OWNERS +++ b/stats/OWNERS @@ -1,7 +1,2 @@ hbos@webrtc.org hta@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/stats/rtc_stats.cc b/stats/rtc_stats.cc index b8e9633f46..59de664c0e 100644 --- a/stats/rtc_stats.cc +++ b/stats/rtc_stats.cc @@ -35,6 +35,20 @@ std::string VectorToString(const std::vector& vector) { return sb.Release(); } +// This overload is required because std::vector range loops don't +// return references but objects, causing -Wrange-loop-analysis diagnostics. +std::string VectorToString(const std::vector& vector) { + rtc::StringBuilder sb; + sb << "["; + const char* separator = ""; + for (bool element : vector) { + sb << separator << rtc::ToString(element); + separator = ","; + } + sb << "]"; + return sb.Release(); +} + // Produces "[\"a\",\"b\",\"c\"]". Works for vectors of both const char* and // std::string element types. template diff --git a/stats/rtc_stats_report.cc b/stats/rtc_stats_report.cc index a56d30d3c3..d29d819fc3 100644 --- a/stats/rtc_stats_report.cc +++ b/stats/rtc_stats_report.cc @@ -98,13 +98,12 @@ std::unique_ptr RTCStatsReport::Take(const std::string& id) { return stats; } -void RTCStatsReport::TakeMembersFrom( - rtc::scoped_refptr victim) { - for (StatsMap::iterator it = victim->stats_.begin(); - it != victim->stats_.end(); ++it) { +void RTCStatsReport::TakeMembersFrom(rtc::scoped_refptr other) { + for (StatsMap::iterator it = other->stats_.begin(); it != other->stats_.end(); + ++it) { AddStats(std::unique_ptr(it->second.release())); } - victim->stats_.clear(); + other->stats_.clear(); } RTCStatsReport::ConstIterator RTCStatsReport::begin() const { diff --git a/stats/rtcstats_objects.cc b/stats/rtcstats_objects.cc index 77cbc09954..30b9dd75b7 100644 --- a/stats/rtcstats_objects.cc +++ b/stats/rtcstats_objects.cc @@ -124,7 +124,7 @@ RTCCodecStats::~RTCCodecStats() {} WEBRTC_RTCSTATS_IMPL(RTCDataChannelStats, RTCStats, "data-channel", &label, &protocol, - &datachannelid, + &data_channel_identifier, &state, &messages_sent, &bytes_sent, @@ -140,7 +140,7 @@ RTCDataChannelStats::RTCDataChannelStats(std::string&& id, int64_t timestamp_us) : RTCStats(std::move(id), timestamp_us), label("label"), protocol("protocol"), - datachannelid("datachannelid"), + data_channel_identifier("dataChannelIdentifier"), state("state"), messages_sent("messagesSent"), bytes_sent("bytesSent"), @@ -151,7 +151,7 @@ RTCDataChannelStats::RTCDataChannelStats(const RTCDataChannelStats& other) : RTCStats(other.id(), other.timestamp_us()), label(other.label), protocol(other.protocol), - datachannelid(other.datachannelid), + data_channel_identifier(other.data_channel_identifier), state(other.state), messages_sent(other.messages_sent), bytes_sent(other.bytes_sent), @@ -395,6 +395,7 @@ WEBRTC_RTCSTATS_IMPL(RTCMediaStreamTrackStats, RTCStats, "track", &jitter_buffer_flushes, &delayed_packet_outage_samples, &relative_packet_arrival_delay, + &jitter_buffer_target_delay, &interruption_count, &total_interruption_duration, &freeze_count, @@ -454,6 +455,7 @@ RTCMediaStreamTrackStats::RTCMediaStreamTrackStats(std::string&& id, relative_packet_arrival_delay( "relativePacketArrivalDelay", {NonStandardGroupId::kRtcStatsRelativePacketArrivalDelay}), + jitter_buffer_target_delay("jitterBufferTargetDelay"), interruption_count("interruptionCount"), total_interruption_duration("totalInterruptionDuration"), freeze_count("freezeCount"), @@ -503,6 +505,7 @@ RTCMediaStreamTrackStats::RTCMediaStreamTrackStats( jitter_buffer_flushes(other.jitter_buffer_flushes), delayed_packet_outage_samples(other.delayed_packet_outage_samples), relative_packet_arrival_delay(other.relative_packet_arrival_delay), + jitter_buffer_target_delay(other.jitter_buffer_target_delay), interruption_count(other.interruption_count), total_interruption_duration(other.total_interruption_duration), freeze_count(other.freeze_count), @@ -601,6 +604,18 @@ WEBRTC_RTCSTATS_IMPL( &packets_lost, &last_packet_received_timestamp, &jitter, + &jitter_buffer_delay, + &jitter_buffer_emitted_count, + &total_samples_received, + &concealed_samples, + &silent_concealed_samples, + &concealment_events, + &inserted_samples_for_deceleration, + &removed_samples_for_acceleration, + &audio_level, + &total_audio_energy, + &total_samples_duration, + &frames_received, &round_trip_time, &packets_discarded, &packets_repaired, @@ -612,8 +627,14 @@ WEBRTC_RTCSTATS_IMPL( &burst_discard_rate, &gap_loss_rate, &gap_discard_rate, + &frame_width, + &frame_height, + &frame_bit_depth, + &frames_per_second, &frames_decoded, + &frames_rendered, &key_frames_decoded, + &frames_dropped, &total_decode_time, &total_inter_frame_delay, &total_squared_inter_frame_delay, @@ -637,6 +658,18 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, packets_lost("packetsLost"), last_packet_received_timestamp("lastPacketReceivedTimestamp"), jitter("jitter"), + jitter_buffer_delay("jitterBufferDelay"), + jitter_buffer_emitted_count("jitterBufferEmittedCount"), + total_samples_received("totalSamplesReceived"), + concealed_samples("concealedSamples"), + silent_concealed_samples("silentConcealedSamples"), + concealment_events("concealmentEvents"), + inserted_samples_for_deceleration("insertedSamplesForDeceleration"), + removed_samples_for_acceleration("removedSamplesForAcceleration"), + audio_level("audioLevel"), + total_audio_energy("totalAudioEnergy"), + total_samples_duration("totalSamplesDuration"), + frames_received("framesReceived"), round_trip_time("roundTripTime"), packets_discarded("packetsDiscarded"), packets_repaired("packetsRepaired"), @@ -648,8 +681,14 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, burst_discard_rate("burstDiscardRate"), gap_loss_rate("gapLossRate"), gap_discard_rate("gapDiscardRate"), + frame_width("frameWidth"), + frame_height("frameHeight"), + frame_bit_depth("frameBitDepth"), + frames_per_second("framesPerSecond"), frames_decoded("framesDecoded"), + frames_rendered("framesRendered"), key_frames_decoded("keyFramesDecoded"), + frames_dropped("framesDropped"), total_decode_time("totalDecodeTime"), total_inter_frame_delay("totalInterFrameDelay"), total_squared_inter_frame_delay("totalSquaredInterFrameDelay"), @@ -668,6 +707,19 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats( packets_lost(other.packets_lost), last_packet_received_timestamp(other.last_packet_received_timestamp), jitter(other.jitter), + jitter_buffer_delay(other.jitter_buffer_delay), + jitter_buffer_emitted_count(other.jitter_buffer_emitted_count), + total_samples_received(other.total_samples_received), + concealed_samples(other.concealed_samples), + silent_concealed_samples(other.silent_concealed_samples), + concealment_events(other.concealment_events), + inserted_samples_for_deceleration( + other.inserted_samples_for_deceleration), + removed_samples_for_acceleration(other.removed_samples_for_acceleration), + audio_level(other.audio_level), + total_audio_energy(other.total_audio_energy), + total_samples_duration(other.total_samples_duration), + frames_received(other.frames_received), round_trip_time(other.round_trip_time), packets_discarded(other.packets_discarded), packets_repaired(other.packets_repaired), @@ -679,8 +731,14 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats( burst_discard_rate(other.burst_discard_rate), gap_loss_rate(other.gap_loss_rate), gap_discard_rate(other.gap_discard_rate), + frame_width(other.frame_width), + frame_height(other.frame_height), + frame_bit_depth(other.frame_bit_depth), + frames_per_second(other.frames_per_second), frames_decoded(other.frames_decoded), + frames_rendered(other.frames_rendered), key_frames_decoded(other.key_frames_decoded), + frames_dropped(other.frames_dropped), total_decode_time(other.total_decode_time), total_inter_frame_delay(other.total_inter_frame_delay), total_squared_inter_frame_delay(other.total_squared_inter_frame_delay), @@ -695,6 +753,7 @@ WEBRTC_RTCSTATS_IMPL( RTCOutboundRTPStreamStats, RTCRTPStreamStats, "outbound-rtp", &media_source_id, &remote_id, + &rid, &packets_sent, &retransmitted_packets_sent, &bytes_sent, @@ -705,6 +764,11 @@ WEBRTC_RTCSTATS_IMPL( &key_frames_encoded, &total_encode_time, &total_encoded_bytes_target, + &frame_width, + &frame_height, + &frames_per_second, + &frames_sent, + &huge_frames_sent, &total_packet_send_delay, &quality_limitation_reason, &quality_limitation_resolution_changes, @@ -721,6 +785,7 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, : RTCRTPStreamStats(std::move(id), timestamp_us), media_source_id("mediaSourceId"), remote_id("remoteId"), + rid("rid"), packets_sent("packetsSent"), retransmitted_packets_sent("retransmittedPacketsSent"), bytes_sent("bytesSent"), @@ -731,6 +796,11 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, key_frames_encoded("keyFramesEncoded"), total_encode_time("totalEncodeTime"), total_encoded_bytes_target("totalEncodedBytesTarget"), + frame_width("frameWidth"), + frame_height("frameHeight"), + frames_per_second("framesPerSecond"), + frames_sent("framesSent"), + huge_frames_sent("hugeFramesSent"), total_packet_send_delay("totalPacketSendDelay"), quality_limitation_reason("qualityLimitationReason"), quality_limitation_resolution_changes( @@ -743,6 +813,7 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( : RTCRTPStreamStats(other), media_source_id(other.media_source_id), remote_id(other.remote_id), + rid(other.rid), packets_sent(other.packets_sent), retransmitted_packets_sent(other.retransmitted_packets_sent), bytes_sent(other.bytes_sent), @@ -753,6 +824,11 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( key_frames_encoded(other.key_frames_encoded), total_encode_time(other.total_encode_time), total_encoded_bytes_target(other.total_encoded_bytes_target), + frame_width(other.frame_width), + frame_height(other.frame_height), + frames_per_second(other.frames_per_second), + frames_sent(other.frames_sent), + huge_frames_sent(other.huge_frames_sent), total_packet_send_delay(other.total_packet_send_delay), quality_limitation_reason(other.quality_limitation_reason), quality_limitation_resolution_changes( @@ -885,7 +961,9 @@ RTCVideoSourceStats::~RTCVideoSourceStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL(RTCTransportStats, RTCStats, "transport", &bytes_sent, + &packets_sent, &bytes_received, + &packets_received, &rtcp_transport_stats_id, &dtls_state, &selected_candidate_pair_id, @@ -904,7 +982,9 @@ RTCTransportStats::RTCTransportStats(const std::string& id, RTCTransportStats::RTCTransportStats(std::string&& id, int64_t timestamp_us) : RTCStats(std::move(id), timestamp_us), bytes_sent("bytesSent"), + packets_sent("packetsSent"), bytes_received("bytesReceived"), + packets_received("packetsReceived"), rtcp_transport_stats_id("rtcpTransportStatsId"), dtls_state("dtlsState"), selected_candidate_pair_id("selectedCandidatePairId"), @@ -918,7 +998,9 @@ RTCTransportStats::RTCTransportStats(std::string&& id, int64_t timestamp_us) RTCTransportStats::RTCTransportStats(const RTCTransportStats& other) : RTCStats(other.id(), other.timestamp_us()), bytes_sent(other.bytes_sent), + packets_sent(other.packets_sent), bytes_received(other.bytes_received), + packets_received(other.packets_received), rtcp_transport_stats_id(other.rtcp_transport_stats_id), dtls_state(other.dtls_state), selected_candidate_pair_id(other.selected_candidate_pair_id), diff --git a/style-guide.md b/style-guide.md index 901217a86d..80c3302156 100644 --- a/style-guide.md +++ b/style-guide.md @@ -31,6 +31,10 @@ WebRTC is written in C++14, but with some restrictions: [chromium-cpp]: https://chromium-cpp.appspot.com/ +Unlike the Chromium and Google C++ style guides, we do not allow C++20-style +designated initializers, because we want to stay compatible with compilers that +do not yet support them. + ### Abseil You may use a subset of the utilities provided by the [Abseil][abseil] diff --git a/system_wrappers/BUILD.gn b/system_wrappers/BUILD.gn index 24ee39b1a4..f44ff5b8bf 100644 --- a/system_wrappers/BUILD.gn +++ b/system_wrappers/BUILD.gn @@ -16,6 +16,7 @@ rtc_library("system_wrappers") { visibility = [ "*" ] sources = [ "include/clock.h", + "include/cpu_features_wrapper.h", "include/cpu_info.h", "include/ntp_time.h", "include/rtp_to_ntp_estimator.h", @@ -30,15 +31,16 @@ rtc_library("system_wrappers") { defines = [] libs = [] deps = [ - ":cpu_features_api", + ":field_trial", "../api:array_view", "../api/units:timestamp", "../modules:module_api_public", "../rtc_base:checks", - "../rtc_base/synchronization:rw_lock_wrapper", + "../rtc_base/synchronization:mutex", "../rtc_base/system:arch", - "//third_party/abseil-cpp/absl/types:optional", + "../rtc_base/system:rtc_export", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] if (is_android) { if (build_with_mozilla) { @@ -48,15 +50,16 @@ rtc_library("system_wrappers") { "/nsprpub/pr/include", ] } else { - deps += [ ":cpu_features_android" ] + sources += [ "source/cpu_features_android.cc" ] + deps += [ "//third_party/android_sdk:cpu_features" ] } libs += [ "log" ] } - if (is_linux) { + if (is_linux || is_chromeos) { if (!build_with_chromium) { - deps += [ ":cpu_features_linux" ] + sources += [ "source/cpu_features_linux.cc" ] } libs += [ "rt" ] @@ -67,7 +70,7 @@ rtc_library("system_wrappers") { # Windows needs ../rtc_base due to include of # webrtc/rtc_base/win32.h in source/clock.cc. - deps += [ "../rtc_base" ] + deps += [ "../rtc_base:win32" ] } deps += [ @@ -76,10 +79,6 @@ rtc_library("system_wrappers") { ] } -rtc_source_set("cpu_features_api") { - sources = [ "include/cpu_features_wrapper.h" ] -} - rtc_library("field_trial") { visibility = [ "*" ] public = [ "include/field_trial.h" ] @@ -91,20 +90,8 @@ rtc_library("field_trial") { "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/strings", - ] -} - -rtc_library("event_wrapper") { - visibility = [ - "../modules/video_coding:video_coding_legacy", - "../modules/video_coding:video_coding_unittests", - ] - sources = [ - "include/event_wrapper.h", - "source/event.cc", ] - deps = [ "../rtc_base:rtc_event" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("metrics") { @@ -117,27 +104,10 @@ rtc_library("metrics") { deps = [ "../rtc_base:checks", "../rtc_base:rtc_base_approved", + "../rtc_base/synchronization:mutex", ] } -if (is_android && !build_with_mozilla) { - rtc_library("cpu_features_android") { - sources = [ "source/cpu_features_android.c" ] - - deps = [ "//third_party/android_sdk:cpu_features" ] - } -} - -if (is_linux) { - rtc_library("cpu_features_linux") { - sources = [ "source/cpu_features_linux.c" ] - deps = [ - ":cpu_features_api", - "../rtc_base/system:arch", - ] - } -} - if (rtc_include_tests) { rtc_test("system_wrappers_unittests") { testonly = true diff --git a/system_wrappers/OWNERS b/system_wrappers/OWNERS index a08f6114b9..0a2fb1566d 100644 --- a/system_wrappers/OWNERS +++ b/system_wrappers/OWNERS @@ -1,9 +1,3 @@ henrika@webrtc.org mflodman@webrtc.org -niklas.enbom@webrtc.org nisse@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/system_wrappers/include/clock.h b/system_wrappers/include/clock.h index c026a4e7d9..3c60f63da8 100644 --- a/system_wrappers/include/clock.h +++ b/system_wrappers/include/clock.h @@ -13,10 +13,11 @@ #include +#include #include #include "api/units/timestamp.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" +#include "rtc_base/system/rtc_export.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -28,12 +29,12 @@ const uint32_t kNtpJan1970 = 2208988800UL; const double kMagicNtpFractionalUnit = 4.294967296E+9; // A clock interface that allows reading of absolute and relative timestamps. -class Clock { +class RTC_EXPORT Clock { public: virtual ~Clock() {} // Return a timestamp relative to an unspecified epoch. virtual Timestamp CurrentTime() { - return Timestamp::us(TimeInMicroseconds()); + return Timestamp::Micros(TimeInMicroseconds()); } virtual int64_t TimeInMilliseconds() { return CurrentTime().ms(); } virtual int64_t TimeInMicroseconds() { return CurrentTime().us(); } @@ -77,8 +78,12 @@ class SimulatedClock : public Clock { void AdvanceTime(TimeDelta delta); private: - Timestamp time_; - std::unique_ptr lock_; + // The time is read and incremented with relaxed order. Each thread will see + // monotonically increasing time, and when threads post tasks or messages to + // one another, the synchronization done as part of the message passing should + // ensure that any causual chain of events on multiple threads also + // corresponds to monotonically increasing time. + std::atomic time_us_; }; } // namespace webrtc diff --git a/system_wrappers/include/cpu_features_wrapper.h b/system_wrappers/include/cpu_features_wrapper.h index 739161afca..612b4a5d6b 100644 --- a/system_wrappers/include/cpu_features_wrapper.h +++ b/system_wrappers/include/cpu_features_wrapper.h @@ -13,12 +13,10 @@ #include -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif +namespace webrtc { // List of features in x86. -typedef enum { kSSE2, kSSE3 } CPUFeature; +typedef enum { kSSE2, kSSE3, kAVX2 } CPUFeature; // List of features in ARM. enum { @@ -28,21 +26,17 @@ enum { kCPUFeatureLDREXSTREX = (1 << 3) }; -typedef int (*WebRtc_CPUInfo)(CPUFeature feature); - // Returns true if the CPU supports the feature. -extern WebRtc_CPUInfo WebRtc_GetCPUInfo; +int GetCPUInfo(CPUFeature feature); // No CPU feature is available => straight C path. -extern WebRtc_CPUInfo WebRtc_GetCPUInfoNoASM; +int GetCPUInfoNoASM(CPUFeature feature); // Return the features in an ARM device. // It detects the features in the hardware platform, and returns supported // values in the above enum definition as a bitmask. -extern uint64_t WebRtc_GetCPUFeaturesARM(void); +uint64_t GetCPUFeaturesARM(void); -#if defined(__cplusplus) || defined(c_plusplus) -} // extern "C" -#endif +} // namespace webrtc #endif // SYSTEM_WRAPPERS_INCLUDE_CPU_FEATURES_WRAPPER_H_ diff --git a/system_wrappers/include/ntp_time.h b/system_wrappers/include/ntp_time.h index 1f57558b71..332f8f4624 100644 --- a/system_wrappers/include/ntp_time.h +++ b/system_wrappers/include/ntp_time.h @@ -70,11 +70,15 @@ inline int64_t Int64MsToQ32x32(int64_t milliseconds) { double result = std::round(milliseconds * (NtpTime::kFractionsPerSecond / 1000.0)); - if (result <= std::numeric_limits::min()) { + // Explicitly cast values to double to avoid implicit conversion warnings + // The conversion of the std::numeric_limits::max() triggers + // -Wimplicit-int-float-conversion warning in clang 10.0.0 without explicit + // cast + if (result <= static_cast(std::numeric_limits::min())) { return std::numeric_limits::min(); } - if (result >= std::numeric_limits::max()) { + if (result >= static_cast(std::numeric_limits::max())) { return std::numeric_limits::max(); } @@ -89,11 +93,15 @@ inline uint64_t Int64MsToUQ32x32(int64_t milliseconds) { double result = std::round(milliseconds * (NtpTime::kFractionsPerSecond / 1000.0)); - if (result <= std::numeric_limits::min()) { + // Explicitly cast values to double to avoid implicit conversion warnings + // The conversion of the std::numeric_limits::max() triggers + // -Wimplicit-int-float-conversion warning in clang 10.0.0 without explicit + // cast + if (result <= static_cast(std::numeric_limits::min())) { return std::numeric_limits::min(); } - if (result >= std::numeric_limits::max()) { + if (result >= static_cast(std::numeric_limits::max())) { return std::numeric_limits::max(); } diff --git a/system_wrappers/source/clock.cc b/system_wrappers/source/clock.cc index 3eea155711..0ae624d849 100644 --- a/system_wrappers/source/clock.cc +++ b/system_wrappers/source/clock.cc @@ -17,7 +17,6 @@ #include -#include "rtc_base/critical_section.h" #elif defined(WEBRTC_POSIX) @@ -26,13 +25,15 @@ #endif // defined(WEBRTC_POSIX) -#include "rtc_base/synchronization/rw_lock_wrapper.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" namespace webrtc { class RealTimeClock : public Clock { - Timestamp CurrentTime() override { return Timestamp::us(rtc::TimeMicros()); } + Timestamp CurrentTime() override { + return Timestamp::Micros(rtc::TimeMicros()); + } // Return a timestamp in milliseconds relative to some arbitrary source; the // source is fixed for this clock. int64_t TimeInMilliseconds() override { return rtc::TimeMillis(); } @@ -148,7 +149,7 @@ class WindowsRealTimeClock : public RealTimeClock { DWORD t; LARGE_INTEGER elapsed_ms; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // time MUST be fetched inside the critical section to avoid non-monotonic // last_time_ms_ values that'll register as incorrect wraparounds due to // concurrent calls to GetTime. @@ -198,7 +199,7 @@ class WindowsRealTimeClock : public RealTimeClock { return ref; } - rtc::CriticalSection crit_; + Mutex mutex_; DWORD last_time_ms_; LONG num_timer_wraps_; const ReferencePoint ref_point_; @@ -237,16 +238,15 @@ Clock* Clock::GetRealTimeClock() { } SimulatedClock::SimulatedClock(int64_t initial_time_us) - : SimulatedClock(Timestamp::us(initial_time_us)) {} + : time_us_(initial_time_us) {} SimulatedClock::SimulatedClock(Timestamp initial_time) - : time_(initial_time), lock_(RWLockWrapper::CreateRWLock()) {} + : SimulatedClock(initial_time.us()) {} SimulatedClock::~SimulatedClock() {} Timestamp SimulatedClock::CurrentTime() { - ReadLockScoped synchronize(*lock_); - return time_; + return Timestamp::Micros(time_us_.load(std::memory_order_relaxed)); } NtpTime SimulatedClock::CurrentNtpTime() { @@ -262,16 +262,20 @@ int64_t SimulatedClock::CurrentNtpInMilliseconds() { } void SimulatedClock::AdvanceTimeMilliseconds(int64_t milliseconds) { - AdvanceTime(TimeDelta::ms(milliseconds)); + AdvanceTime(TimeDelta::Millis(milliseconds)); } void SimulatedClock::AdvanceTimeMicroseconds(int64_t microseconds) { - AdvanceTime(TimeDelta::us(microseconds)); + AdvanceTime(TimeDelta::Micros(microseconds)); } +// TODO(bugs.webrtc.org(12102): It's desirable to let a single thread own +// advancement of the clock. We could then replace this read-modify-write +// operation with just a thread checker. But currently, that breaks a couple of +// tests, in particular, RepeatingTaskTest.ClockIntegration and +// CallStatsTest.LastProcessedRtt. void SimulatedClock::AdvanceTime(TimeDelta delta) { - WriteLockScoped synchronize(*lock_); - time_ += delta; + time_us_.fetch_add(delta.us(), std::memory_order_relaxed); } } // namespace webrtc diff --git a/system_wrappers/source/cpu_features.cc b/system_wrappers/source/cpu_features.cc index ebcb48c15f..0f81212894 100644 --- a/system_wrappers/source/cpu_features.cc +++ b/system_wrappers/source/cpu_features.cc @@ -12,11 +12,14 @@ #include "rtc_base/system/arch.h" #include "system_wrappers/include/cpu_features_wrapper.h" +#include "system_wrappers/include/field_trial.h" #if defined(WEBRTC_ARCH_X86_FAMILY) && defined(_MSC_VER) #include #endif +namespace webrtc { + // No CPU feature is available => straight C path. int GetCPUInfoNoASM(CPUFeature feature) { (void)feature; @@ -24,6 +27,22 @@ int GetCPUInfoNoASM(CPUFeature feature) { } #if defined(WEBRTC_ARCH_X86_FAMILY) + +#if defined(WEBRTC_ENABLE_AVX2) +// xgetbv returns the value of an Intel Extended Control Register (XCR). +// Currently only XCR0 is defined by Intel so |xcr| should always be zero. +static uint64_t xgetbv(uint32_t xcr) { +#if defined(_MSC_VER) + return _xgetbv(xcr); +#else + uint32_t eax, edx; + + __asm__ volatile("xgetbv" : "=a"(eax), "=d"(edx) : "c"(xcr)); + return (static_cast(edx) << 32) | eax; +#endif // _MSC_VER +} +#endif // WEBRTC_ENABLE_AVX2 + #ifndef _MSC_VER // Intrinsic for "cpuid". #if defined(__pic__) && defined(__i386__) @@ -41,7 +60,7 @@ static inline void __cpuid(int cpu_info[4], int info_type) { __asm__ volatile("cpuid\n" : "=a"(cpu_info[0]), "=b"(cpu_info[1]), "=c"(cpu_info[2]), "=d"(cpu_info[3]) - : "a"(info_type)); + : "a"(info_type), "c"(0)); } #endif #endif // _MSC_VER @@ -49,7 +68,7 @@ static inline void __cpuid(int cpu_info[4], int info_type) { #if defined(WEBRTC_ARCH_X86_FAMILY) // Actual feature detection for x86. -static int GetCPUInfo(CPUFeature feature) { +int GetCPUInfo(CPUFeature feature) { int cpu_info[4]; __cpuid(cpu_info, 1); if (feature == kSSE2) { @@ -58,15 +77,39 @@ static int GetCPUInfo(CPUFeature feature) { if (feature == kSSE3) { return 0 != (cpu_info[2] & 0x00000001); } +#if defined(WEBRTC_ENABLE_AVX2) + if (feature == kAVX2 && + !webrtc::field_trial::IsEnabled("WebRTC-Avx2SupportKillSwitch")) { + int cpu_info7[4]; + __cpuid(cpu_info7, 0); + int num_ids = cpu_info7[0]; + if (num_ids < 7) { + return 0; + } + // Interpret CPU feature information. + __cpuid(cpu_info7, 7); + + // AVX instructions can be used when + // a) AVX are supported by the CPU, + // b) XSAVE is supported by the CPU, + // c) XSAVE is enabled by the kernel. + // See http://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled + // AVX2 support needs (avx_support && (cpu_info7[1] & 0x00000020) != 0;). + return (cpu_info[2] & 0x10000000) != 0 && + (cpu_info[2] & 0x04000000) != 0 /* XSAVE */ && + (cpu_info[2] & 0x08000000) != 0 /* OSXSAVE */ && + (xgetbv(0) & 0x00000006) == 6 /* XSAVE enabled by kernel */ && + (cpu_info7[1] & 0x00000020) != 0; + } +#endif // WEBRTC_ENABLE_AVX2 return 0; } #else // Default to straight C for other platforms. -static int GetCPUInfo(CPUFeature feature) { +int GetCPUInfo(CPUFeature feature) { (void)feature; return 0; } #endif -WebRtc_CPUInfo WebRtc_GetCPUInfo = GetCPUInfo; -WebRtc_CPUInfo WebRtc_GetCPUInfoNoASM = GetCPUInfoNoASM; +} // namespace webrtc diff --git a/system_wrappers/source/cpu_features_android.c b/system_wrappers/source/cpu_features_android.cc similarity index 85% rename from system_wrappers/source/cpu_features_android.c rename to system_wrappers/source/cpu_features_android.cc index 0cb3a6c5ee..95cc609b09 100644 --- a/system_wrappers/source/cpu_features_android.c +++ b/system_wrappers/source/cpu_features_android.cc @@ -10,6 +10,10 @@ #include -uint64_t WebRtc_GetCPUFeaturesARM(void) { +namespace webrtc { + +uint64_t GetCPUFeaturesARM(void) { return android_getCpuFeatures(); } + +} // namespace webrtc diff --git a/system_wrappers/source/cpu_features_linux.c b/system_wrappers/source/cpu_features_linux.cc similarity index 87% rename from system_wrappers/source/cpu_features_linux.c rename to system_wrappers/source/cpu_features_linux.cc index 004de5a6a9..335bed4da3 100644 --- a/system_wrappers/source/cpu_features_linux.c +++ b/system_wrappers/source/cpu_features_linux.cc @@ -8,32 +8,39 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include #include #include -#include -#ifndef __GLIBC_PREREQ -#define __GLIBC_PREREQ(a, b) 0 + +#ifdef __GLIBC_PREREQ +#define WEBRTC_GLIBC_PREREQ(a, b) __GLIBC_PREREQ(a, b) +#else +#define WEBRTC_GLIBC_PREREQ(a, b) 0 #endif -#if __GLIBC_PREREQ(2, 16) + +#if WEBRTC_GLIBC_PREREQ(2, 16) #include #else -#include -#include #include +#include #include +#include #endif + #include "rtc_base/system/arch.h" #include "system_wrappers/include/cpu_features_wrapper.h" #if defined(WEBRTC_ARCH_ARM_FAMILY) #include -uint64_t WebRtc_GetCPUFeaturesARM(void) { +namespace webrtc { + +uint64_t GetCPUFeaturesARM(void) { uint64_t result = 0; int architecture = 0; - unsigned long hwcap = 0; + uint64_t hwcap = 0; const char* platform = NULL; -#if __GLIBC_PREREQ(2, 16) +#if WEBRTC_GLIBC_PREREQ(2, 16) hwcap = getauxval(AT_HWCAP); platform = (const char*)getauxval(AT_PLATFORM); #else @@ -57,7 +64,7 @@ uint64_t WebRtc_GetCPUFeaturesARM(void) { } close(fd); } -#endif // __GLIBC_PREREQ(2,16) +#endif // WEBRTC_GLIBC_PREREQ(2, 16) #if defined(__aarch64__) architecture = 8; if ((hwcap & HWCAP_FP) != 0) @@ -84,4 +91,6 @@ uint64_t WebRtc_GetCPUFeaturesARM(void) { result |= kCPUFeatureLDREXSTREX; return result; } + +} // namespace webrtc #endif // WEBRTC_ARCH_ARM_FAMILY diff --git a/system_wrappers/source/field_trial_unittest.cc b/system_wrappers/source/field_trial_unittest.cc index fdabe1b7e6..ada6313e67 100644 --- a/system_wrappers/source/field_trial_unittest.cc +++ b/system_wrappers/source/field_trial_unittest.cc @@ -32,7 +32,7 @@ TEST(FieldTrialValidationTest, AcceptsValidInputs) { EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/B/C/Audio/Enabled/")); } -TEST(FieldTrialValidationTest, RejectsBadInputs) { +TEST(FieldTrialValidationDeathTest, RejectsBadInputs) { // Bad delimiters RTC_EXPECT_DEATH(InitFieldTrialsFromString("Audio/EnabledVideo/Disabled/"), "Invalid field trials string:"); @@ -90,7 +90,7 @@ TEST(FieldTrialMergingTest, MergesValidInput) { "Audio/Enabled/Video/Enabled/"); } -TEST(FieldTrialMergingTest, DchecksBadInput) { +TEST(FieldTrialMergingDeathTest, DchecksBadInput) { RTC_EXPECT_DEATH(MergeFieldTrialsStrings("Audio/Enabled/", "garbage"), "Invalid field trials string:"); } diff --git a/system_wrappers/source/metrics.cc b/system_wrappers/source/metrics.cc index 2383272887..d42833643d 100644 --- a/system_wrappers/source/metrics.cc +++ b/system_wrappers/source/metrics.cc @@ -11,7 +11,8 @@ #include -#include "rtc_base/critical_section.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" // Default implementation of histogram methods for WebRTC clients that do not @@ -38,7 +39,7 @@ class RtcHistogram { sample = std::min(sample, max_); sample = std::max(sample, min_ - 1); // Underflow bucket. - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); if (info_.samples.size() == kMaxSampleMapSize && info_.samples.find(sample) == info_.samples.end()) { return; @@ -48,7 +49,7 @@ class RtcHistogram { // Returns a copy (or nullptr if there are no samples) and clears samples. std::unique_ptr GetAndReset() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); if (info_.samples.empty()) return nullptr; @@ -64,19 +65,19 @@ class RtcHistogram { // Functions only for testing. void Reset() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); info_.samples.clear(); } int NumEvents(int sample) const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto it = info_.samples.find(sample); return (it == info_.samples.end()) ? 0 : it->second; } int NumSamples() const { int num_samples = 0; - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); for (const auto& sample : info_.samples) { num_samples += sample.second; } @@ -84,20 +85,20 @@ class RtcHistogram { } int MinSample() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return (info_.samples.empty()) ? -1 : info_.samples.begin()->first; } std::map Samples() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return info_.samples; } private: - rtc::CriticalSection crit_; + mutable Mutex mutex_; const int min_; const int max_; - SampleInfo info_ RTC_GUARDED_BY(crit_); + SampleInfo info_ RTC_GUARDED_BY(mutex_); RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogram); }; @@ -111,7 +112,7 @@ class RtcHistogramMap { int min, int max, int bucket_count) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& it = map_.find(name); if (it != map_.end()) return reinterpret_cast(it->second.get()); @@ -122,7 +123,7 @@ class RtcHistogramMap { } Histogram* GetEnumerationHistogram(const std::string& name, int boundary) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& it = map_.find(name); if (it != map_.end()) return reinterpret_cast(it->second.get()); @@ -134,7 +135,7 @@ class RtcHistogramMap { void GetAndReset( std::map>* histograms) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); for (const auto& kv : map_) { std::unique_ptr info = kv.second->GetAndReset(); if (info) @@ -144,39 +145,39 @@ class RtcHistogramMap { // Functions only for testing. void Reset() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); for (const auto& kv : map_) kv.second->Reset(); } int NumEvents(const std::string& name, int sample) const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? 0 : it->second->NumEvents(sample); } int NumSamples(const std::string& name) const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? 0 : it->second->NumSamples(); } int MinSample(const std::string& name) const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? -1 : it->second->MinSample(); } std::map Samples(const std::string& name) const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); const auto& it = map_.find(name); return (it == map_.end()) ? std::map() : it->second->Samples(); } private: - rtc::CriticalSection crit_; + mutable Mutex mutex_; std::map> map_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogramMap); }; diff --git a/system_wrappers/source/metrics_unittest.cc b/system_wrappers/source/metrics_unittest.cc index 9e5bc86ba9..7532b2ad83 100644 --- a/system_wrappers/source/metrics_unittest.cc +++ b/system_wrappers/source/metrics_unittest.cc @@ -114,7 +114,8 @@ TEST_F(MetricsTest, RtcHistogramsCounts_AddSample) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(MetricsTest, RtcHistogramsCounts_InvalidIndex) { +using MetricsDeathTest = MetricsTest; +TEST_F(MetricsDeathTest, RtcHistogramsCounts_InvalidIndex) { EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(-1, "Name", kSample), ""); EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(3, "Name", kSample), ""); EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(3u, "Name", kSample), ""); diff --git a/test/BUILD.gn b/test/BUILD.gn index 464f44f60a..9fa0e006dc 100644 --- a/test/BUILD.gn +++ b/test/BUILD.gn @@ -22,22 +22,21 @@ group("test") { ":test_renderer", ":test_support", ":video_test_common", - "pc/e2e", ] if (rtc_include_tests) { deps += [ ":test_main", ":test_support_unittests", - "scenario/scenario_tests", + "pc/e2e", ] } } rtc_library("frame_generator_impl") { visibility = [ - "../api:create_frame_generator", ":*", + "../api:create_frame_generator", ] testonly = true sources = [ @@ -53,7 +52,7 @@ rtc_library("frame_generator_impl") { "../api/video:encoded_image", "../api/video:video_frame", "../api/video:video_frame_i010", - "../api/video:video_frame_i420", + "../api/video:video_frame_nv12", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../common_video", @@ -68,11 +67,12 @@ rtc_library("frame_generator_impl") { "../rtc_base:criticalsection", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_event", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../rtc_base/system:file_wrapper", "../system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("frame_utils") { @@ -85,7 +85,6 @@ rtc_library("frame_utils") { deps = [ "../api:scoped_refptr", "../api/video:video_frame", - "../api/video:video_frame_i420", ] } @@ -112,8 +111,6 @@ rtc_library("video_test_common") { "../api:scoped_refptr", "../api/task_queue", "../api/video:video_frame", - "../api/video:video_frame_i010", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../common_video", @@ -126,9 +123,11 @@ rtc_library("video_test_common") { "../rtc_base:rtc_base_approved", "../rtc_base:rtc_task_queue", "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", "../system_wrappers", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (!build_with_chromium) { @@ -160,10 +159,8 @@ if (!build_with_chromium) { "platform_video_capturer.cc", "platform_video_capturer.h", ] - deps = [ - ":video_test_common", - "//third_party/abseil-cpp/absl/memory", - ] + deps = [ ":video_test_common" ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] if (is_mac || is_ios) { deps += [ ":video_test_mac" ] } else { @@ -203,6 +200,7 @@ rtc_library("rtp_test_utils") { "../rtc_base:criticalsection", "../rtc_base:logging", "../rtc_base:macromagic", + "../rtc_base/synchronization:mutex", "../rtc_base/system:arch", ] } @@ -218,25 +216,51 @@ rtc_library("field_trial") { deps = [ "../system_wrappers:field_trial" ] } +rtc_library("explicit_key_value_config") { + sources = [ + "explicit_key_value_config.cc", + "explicit_key_value_config.h", + ] + + deps = [ + "../api/transport:webrtc_key_value_config", + "../rtc_base:checks", + "../system_wrappers:field_trial", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] +} + rtc_library("perf_test") { visibility = [ "*" ] testonly = true sources = [ + "testsupport/perf_result_reporter.cc", + "testsupport/perf_result_reporter.h", "testsupport/perf_test.cc", "testsupport/perf_test.h", - "testsupport/perf_test_graphjson_writer.cc", - "testsupport/perf_test_graphjson_writer.h", - "testsupport/perf_test_histogram_writer.cc", "testsupport/perf_test_histogram_writer.h", "testsupport/perf_test_result_writer.h", ] deps = [ "../api:array_view", + "../api/numerics", "../rtc_base:checks", "../rtc_base:criticalsection", + "../rtc_base:logging", "../rtc_base:rtc_numerics", - "//third_party/abseil-cpp/absl/flags:flag", + "../rtc_base/synchronization:mutex", + "../test:fileutils", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + if (rtc_enable_protobuf) { + sources += [ "testsupport/perf_test_histogram_writer.cc" ] + deps += [ + "//third_party/catapult/tracing/tracing:histogram", + "//third_party/catapult/tracing/tracing:reserved_infos", + ] + } else { + sources += [ "testsupport/perf_test_histogram_writer_no_protobuf.cc" ] + } } if (is_ios) { @@ -253,6 +277,7 @@ if (is_ios) { ":perf_test", "../sdk:helpers_objc", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] configs += [ ":test_support_objc_config" ] } @@ -309,7 +334,70 @@ rtc_source_set("test_support") { ] } +rtc_library("video_test_support") { + testonly = true + + sources = [ + "testsupport/frame_reader.h", + "testsupport/frame_writer.h", + "testsupport/mock/mock_frame_reader.h", + "testsupport/video_frame_writer.cc", + "testsupport/video_frame_writer.h", + "testsupport/y4m_frame_reader.cc", + "testsupport/y4m_frame_writer.cc", + "testsupport/yuv_frame_reader.cc", + "testsupport/yuv_frame_writer.cc", + ] + + deps = [ + ":fileutils", + ":frame_utils", + ":test_support", + ":video_test_common", + "../api:scoped_refptr", + "../api/video:encoded_image", + "../api/video:video_frame", + "../api/video_codecs:video_codecs_api", + "../common_video", + "../media:rtc_media_base", + "../modules/video_coding:video_codec_interface", + "../modules/video_coding:video_coding_utility", + "../modules/video_coding:webrtc_h264", + "../modules/video_coding:webrtc_vp8", + "../modules/video_coding:webrtc_vp9", + "../rtc_base:checks", + "../rtc_base:criticalsection", + "../rtc_base:logging", + "../rtc_base:rtc_base_approved", + "../rtc_base:rtc_event", + "../rtc_base/synchronization:sequence_checker", + "../rtc_base/system:file_wrapper", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + + if (!is_ios) { + deps += [ "//third_party:jpeg" ] + sources += [ "testsupport/jpeg_frame_writer.cc" ] + } else { + sources += [ "testsupport/jpeg_frame_writer_ios.cc" ] + } + + if (is_android) { + deps += [ "//base" ] + } +} + if (rtc_include_tests) { + rtc_library("resources_dir_flag") { + testonly = true + visibility = [ "*" ] + sources = [ + "testsupport/resources_dir_flag.cc", + "testsupport/resources_dir_flag.h", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ] + } + rtc_library("test_main_lib") { visibility = [ "*" ] testonly = true @@ -329,8 +417,12 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_approved", "../system_wrappers:field_trial", "../system_wrappers:metrics", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -341,63 +433,21 @@ if (rtc_include_tests) { deps = [ ":test_main_lib", + ":test_support", + ] + + absl_deps = [ "//third_party/abseil-cpp/absl/debugging:failure_signal_handler", "//third_party/abseil-cpp/absl/debugging:symbolize", + "//third_party/abseil-cpp/absl/flags:parse", ] } - rtc_library("video_test_support") { + rtc_library("benchmark_main") { testonly = true + sources = [ "benchmark_main.cc" ] - sources = [ - "testsupport/frame_reader.h", - "testsupport/frame_writer.h", - "testsupport/mock/mock_frame_reader.h", - "testsupport/video_frame_writer.cc", - "testsupport/video_frame_writer.h", - "testsupport/y4m_frame_reader.cc", - "testsupport/y4m_frame_writer.cc", - "testsupport/yuv_frame_reader.cc", - "testsupport/yuv_frame_writer.cc", - ] - - deps = [ - ":fileutils", - ":frame_utils", - ":test_support", - ":video_test_common", - "../api:scoped_refptr", - "../api/video:encoded_image", - "../api/video:video_frame", - "../api/video:video_frame_i420", - "../api/video_codecs:video_codecs_api", - "../common_video", - "../media:rtc_media_base", - "../modules/video_coding:video_codec_interface", - "../modules/video_coding:video_coding_utility", - "../modules/video_coding:webrtc_h264", - "../modules/video_coding:webrtc_vp8", - "../modules/video_coding:webrtc_vp9", - "../rtc_base:checks", - "../rtc_base:criticalsection", - "../rtc_base:logging", - "../rtc_base:rtc_base_approved", - "../rtc_base:rtc_event", - "../rtc_base/synchronization:sequence_checker", - "../rtc_base/system:file_wrapper", - "//third_party/abseil-cpp/absl/types:optional", - ] - - if (!is_ios) { - deps += [ "//third_party:jpeg" ] - sources += [ "testsupport/jpeg_frame_writer.cc" ] - } else { - sources += [ "testsupport/jpeg_frame_writer_ios.cc" ] - } - - if (is_android) { - deps += [ "//base" ] - } + deps = [ "//third_party/google_benchmark" ] } rtc_library("test_support_test_artifacts") { @@ -410,6 +460,8 @@ if (rtc_include_tests) { ":fileutils", "../rtc_base:logging", "../rtc_base/system:file_wrapper", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", ] @@ -445,6 +497,7 @@ if (rtc_include_tests) { ":perf_test", ":rtc_expect_death", ":rtp_test_utils", + ":test_common", ":test_main", ":test_support", ":test_support_test_artifacts", @@ -459,7 +512,6 @@ if (rtc_include_tests) { "../api/test/video:function_video_factory", "../api/video:encoded_image", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video_codecs:video_codecs_api", "../call:video_stream_api", "../common_video", @@ -474,7 +526,10 @@ if (rtc_include_tests) { "../modules/video_coding:webrtc_vp9", "../rtc_base:criticalsection", "../rtc_base:rtc_event", + "../rtc_base:rtc_task_queue", + "../rtc_base/synchronization:mutex", "../rtc_base/system:file_wrapper", + "../rtc_base/task_utils:to_queued_task", "pc/e2e:e2e_unittests", "peer_scenario/tests", "scenario:scenario_unittests", @@ -492,6 +547,7 @@ if (rtc_include_tests) { "frame_generator_unittest.cc", "rtp_file_reader_unittest.cc", "rtp_file_writer_unittest.cc", + "run_loop_unittest.cc", "testsupport/ivf_video_frame_generator_unittest.cc", "testsupport/perf_test_unittest.cc", "testsupport/test_artifacts_unittest.cc", @@ -502,6 +558,11 @@ if (rtc_include_tests) { "testsupport/yuv_frame_writer_unittest.cc", ] + if (rtc_enable_protobuf) { + sources += [ "testsupport/perf_test_histogram_writer_unittest.cc" ] + deps += [ "//third_party/catapult/tracing/tracing:histogram" ] + } + data = test_support_unittests_resources if (is_android) { deps += [ "//testing/android/native_test:native_test_support" ] @@ -564,8 +625,8 @@ rtc_library("fileutils") { ":fileutils_override_impl", "../rtc_base:checks", "../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] if (is_ios) { deps += [ ":fileutils_ios_objc" ] } @@ -573,20 +634,10 @@ rtc_library("fileutils") { deps += [ ":fileutils_mac_objc" ] } if (is_win) { - deps += [ "../rtc_base" ] + deps += [ "../rtc_base:win32" ] } } -rtc_library("resources_dir_flag") { - testonly = true - visibility = [ "*" ] - sources = [ - "testsupport/resources_dir_flag.cc", - "testsupport/resources_dir_flag.h", - ] - deps = [ "//third_party/abseil-cpp/absl/flags:flag" ] -} - # We separate header into own target to make it possible for downstream # projects to override implementation. rtc_source_set("fileutils_override_api") { @@ -603,8 +654,8 @@ rtc_library("fileutils_override_impl") { "../rtc_base:checks", "../rtc_base:macromagic", "../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] if (is_ios) { deps += [ ":fileutils_ios_objc" ] } @@ -612,7 +663,7 @@ rtc_library("fileutils_override_impl") { deps += [ ":fileutils_mac_objc" ] } if (is_win) { - deps += [ "../rtc_base" ] + deps += [ "../rtc_base:win32" ] } } @@ -654,8 +705,8 @@ rtc_library("fileutils_unittests") { ":fileutils", ":test_support", "../rtc_base:checks", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("direct_transport") { @@ -675,10 +726,11 @@ rtc_library("direct_transport") { "../call:simulated_packet_receiver", "../rtc_base:macromagic", "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../rtc_base/task_utils:repeating_task", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] public_deps = # no-presubmit-check TODO(webrtc:8603) [ "../call:fake_network" ] } @@ -705,11 +757,9 @@ rtc_library("fake_video_codecs") { "../api/video:encoded_image", "../api/video:video_bitrate_allocation", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../api/video_codecs:vp8_temporal_layers_factory", - "../modules:module_api", "../modules/video_coding:codec_globals_headers", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", @@ -718,10 +768,11 @@ rtc_library("fake_video_codecs") { "../rtc_base:macromagic", "../rtc_base:rtc_task_queue", "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("null_transport") { @@ -765,31 +816,19 @@ rtc_library("test_common") { "fake_videorenderer.h", "layer_filtering_transport.cc", "layer_filtering_transport.h", - "mock_transport.cc", - "mock_transport.h", "rtp_rtcp_observer.h", + "run_loop.cc", + "run_loop.h", "video_decoder_proxy_factory.h", "video_encoder_proxy_factory.h", ] - if (current_os != "winuwp") { - # The filtering of *_win.cc is not done for WinUWP (intentionally) as - # most _win.cc files are compatible with WinUWP. However, the - # peek/dispatch Win32 runloops are entirely WinUWP incompatible thus - # WinUWP uses the generic runloop as defined for non-Windows targets. - sources += [ "win/run_loop_win.cc" ] - } - if (!is_win || current_os == "winuwp") { - sources += [ - "run_loop.cc", - "run_loop.h", - ] - } deps = [ ":direct_transport", ":encoder_settings", ":fake_video_codecs", ":fileutils", + ":mock_transport", ":rtp_test_utils", ":test_support", ":video_test_common", @@ -823,17 +862,31 @@ rtc_library("test_common") { "../modules/video_coding:codec_globals_headers", "../rtc_base:checks", "../rtc_base:criticalsection", + "../rtc_base:rtc_base", "../rtc_base:rtc_event", "../rtc_base:task_queue_for_test", + "../rtc_base/task_utils:to_queued_task", "../system_wrappers", "../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] if (!is_android && !build_with_chromium) { deps += [ "../modules/video_capture:video_capture_internal_impl" ] } } +rtc_library("mock_transport") { + testonly = true + sources = [ + "mock_transport.cc", + "mock_transport.h", + ] + deps = [ + ":test_support", + "../api:transport_api", + ] +} + rtc_source_set("test_renderer") { public_deps = # no-presubmit-check TODO(webrtc:8603) [ ":test_renderer_generic" ] @@ -844,6 +897,28 @@ rtc_source_set("test_renderer") { } } +rtc_library("mock_frame_transformer") { + visibility = [ "*" ] + testonly = true + sources = [ "mock_frame_transformer.h" ] + deps = [ + "../api:frame_transformer_interface", + "../test:test_support", + ] +} + +rtc_library("mock_transformable_frame") { + visibility = [ "*" ] + + testonly = true + sources = [ "mock_transformable_frame.h" ] + + deps = [ + "../api:frame_transformer_interface", + "../test:test_support", + ] +} + if (is_mac) { rtc_library("test_renderer_objc") { testonly = true @@ -856,7 +931,7 @@ if (is_mac) { ":test_renderer_generic", "../rtc_base:rtc_base_approved", ] - libs = [ + frameworks = [ "Cocoa.framework", "OpenGL.framework", "CoreVideo.framework", @@ -887,17 +962,17 @@ rtc_library("test_renderer_generic") { ] deps += [ "../api:scoped_refptr" ] } - if (!(is_linux && rtc_use_x11) && !is_mac && !is_win) { + if (!((is_linux || is_chromeos) && rtc_use_x11) && !is_mac && !is_win) { sources += [ "null_platform_renderer.cc" ] } - if ((is_linux && rtc_use_x11) || is_mac) { + if (((is_linux || is_chromeos) && rtc_use_x11) || is_mac) { sources += [ "gl/gl_renderer.cc", "gl/gl_renderer.h", ] } - if (is_linux && rtc_use_x11) { + if ((is_linux || is_chromeos) && rtc_use_x11) { sources += [ "linux/glx_renderer.cc", "linux/glx_renderer.h", @@ -940,8 +1015,8 @@ rtc_library("audio_codec_mocks") { "../api/audio_codecs:builtin_audio_decoder_factory", "../rtc_base:checks", "../rtc_base:refcount", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("copy_to_file_audio_capturer") { @@ -955,8 +1030,8 @@ rtc_library("copy_to_file_audio_capturer") { "../common_audio", "../modules/audio_device:audio_device_impl", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("copy_to_file_audio_capturer_unittest") { diff --git a/test/DEPS b/test/DEPS index 0f4fd2fcce..2cbb1d2dc3 100644 --- a/test/DEPS +++ b/test/DEPS @@ -6,6 +6,7 @@ include_rules = [ "+common_video", "+logging/rtc_event_log", "+media/base", + "+media/sctp", "+media/engine", "+modules/audio_coding", "+modules/congestion_controller", @@ -54,6 +55,10 @@ specific_include_rules = { "+pc", "+p2p", ], + ".*test_peer_factory\.(h|cc)": [ + "+pc", + "+p2p", + ], ".*network_emulation_pc_unittest\.cc": [ "+pc/peer_connection_wrapper.h", "+pc/test/mock_peer_connection_observers.h", @@ -65,5 +70,11 @@ specific_include_rules = { ".*sdp_changer\.(h|cc)": [ "+pc", "+p2p", + ], + ".*test_video_capturer_video_track_source.h": [ + "+pc", + ], + "benchmark_main\.cc": [ + "+benchmark", ] } diff --git a/test/OWNERS b/test/OWNERS index 33189a6b2b..6f8099845b 100644 --- a/test/OWNERS +++ b/test/OWNERS @@ -1,5 +1,4 @@ nisse@webrtc.org -phoglund@webrtc.org sprang@webrtc.org srte@webrtc.org stefan@webrtc.org diff --git a/test/android/AndroidManifest.xml b/test/android/AndroidManifest.xml index c1ddfd4a02..ee2fec8716 100644 --- a/test/android/AndroidManifest.xml +++ b/test/android/AndroidManifest.xml @@ -14,7 +14,7 @@ be found in the AUTHORS file in the root of the source tree. android:versionCode="1" android:versionName="1.0"> - + diff --git a/test/audio_decoder_proxy_factory.h b/test/audio_decoder_proxy_factory.h index 821e6c6fb8..04f397f684 100644 --- a/test/audio_decoder_proxy_factory.h +++ b/test/audio_decoder_proxy_factory.h @@ -21,7 +21,7 @@ namespace webrtc { namespace test { -// An decoder factory with a single underlying AudioDecoder object, intended for +// A decoder factory with a single underlying AudioDecoder object, intended for // test purposes. Each call to MakeAudioDecoder returns a proxy for the same // decoder, typically a mock or fake decoder. class AudioDecoderProxyFactory : public AudioDecoderFactory { diff --git a/test/benchmark_main.cc b/test/benchmark_main.cc new file mode 100644 index 0000000000..1a79c24913 --- /dev/null +++ b/test/benchmark_main.cc @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "benchmark/benchmark.h" + +int main(int argc, char* argv[]) { + benchmark::Initialize(&argc, argv); + benchmark::RunSpecifiedBenchmarks(); + return 0; +} diff --git a/test/call_test.cc b/test/call_test.cc index a230e025e0..dd7c576ef9 100644 --- a/test/call_test.cc +++ b/test/call_test.cc @@ -43,7 +43,7 @@ CallTest::CallTest() fake_encoder_factory_([this]() { std::unique_ptr fake_encoder; if (video_encoder_configs_[0].codec_type == kVideoCodecVP8) { - fake_encoder = std::make_unique(clock_); + fake_encoder = std::make_unique(clock_); } else { fake_encoder = std::make_unique(clock_); } @@ -257,7 +257,7 @@ void CallTest::CreateVideoSendConfig(VideoSendStream::Config* video_config, &video_config->rtp.extensions); AddRtpExtensionByUri(RtpExtension::kGenericFrameDescriptorUri00, &video_config->rtp.extensions); - AddRtpExtensionByUri(RtpExtension::kGenericFrameDescriptorUri01, + AddRtpExtensionByUri(RtpExtension::kDependencyDescriptorUri, &video_config->rtp.extensions); if (video_encoder_configs_.empty()) { video_encoder_configs_.emplace_back(); @@ -388,9 +388,9 @@ void CallTest::AddMatchingVideoReceiveConfigs( decoder.video_format = SdpVideoFormat(video_send_config.rtp.payload_name); // Force fake decoders on non-selected simulcast streams. if (!decode_sub_stream || i == *decode_sub_stream) { - decoder.decoder_factory = decoder_factory; + video_recv_config.decoder_factory = decoder_factory; } else { - decoder.decoder_factory = &fake_decoder_factory_; + video_recv_config.decoder_factory = &fake_decoder_factory_; } video_recv_config.decoders.push_back(decoder); receive_configs->emplace_back(std::move(video_recv_config)); diff --git a/test/call_test.h b/test/call_test.h index 3f4aa072e7..4b26097b6c 100644 --- a/test/call_test.h +++ b/test/call_test.h @@ -31,6 +31,7 @@ #include "test/fake_vp8_encoder.h" #include "test/frame_generator_capturer.h" #include "test/rtp_rtcp_observer.h" +#include "test/run_loop.h" namespace webrtc { namespace test { @@ -176,6 +177,8 @@ class CallTest : public ::testing::Test { FlexfecReceiveStream::Config* GetFlexFecConfig(); TaskQueueBase* task_queue() { return task_queue_.get(); } + test::RunLoop loop_; + Clock* const clock_; const FieldTrialBasedConfig field_trials_; diff --git a/test/configurable_frame_size_encoder.cc b/test/configurable_frame_size_encoder.cc index dd259456fd..e3965ef770 100644 --- a/test/configurable_frame_size_encoder.cc +++ b/test/configurable_frame_size_encoder.cc @@ -17,7 +17,6 @@ #include #include "api/video/encoded_image.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" @@ -28,12 +27,8 @@ namespace test { ConfigurableFrameSizeEncoder::ConfigurableFrameSizeEncoder( size_t max_frame_size) : callback_(NULL), - max_frame_size_(max_frame_size), current_frame_size_(max_frame_size), - buffer_(new uint8_t[max_frame_size]), - codec_type_(kVideoCodecGeneric) { - memset(buffer_.get(), 0, max_frame_size); -} + codec_type_(kVideoCodecGeneric) {} ConfigurableFrameSizeEncoder::~ConfigurableFrameSizeEncoder() {} @@ -51,18 +46,18 @@ int32_t ConfigurableFrameSizeEncoder::InitEncode( int32_t ConfigurableFrameSizeEncoder::Encode( const VideoFrame& inputImage, const std::vector* frame_types) { - EncodedImage encodedImage(buffer_.get(), current_frame_size_, - max_frame_size_); - encodedImage._completeFrame = true; + EncodedImage encodedImage; + auto buffer = EncodedImageBuffer::Create(current_frame_size_); + memset(buffer->data(), 0, current_frame_size_); + encodedImage.SetEncodedData(buffer); encodedImage._encodedHeight = inputImage.height(); encodedImage._encodedWidth = inputImage.width(); encodedImage._frameType = VideoFrameType::kVideoFrameKey; encodedImage.SetTimestamp(inputImage.timestamp()); encodedImage.capture_time_ms_ = inputImage.render_time_ms(); - RTPFragmentationHeader* fragmentation = NULL; CodecSpecificInfo specific{}; specific.codecType = codec_type_; - callback_->OnEncodedImage(encodedImage, &specific, fragmentation); + callback_->OnEncodedImage(encodedImage, &specific); if (post_encode_callback_) { (*post_encode_callback_)(); } @@ -83,7 +78,6 @@ void ConfigurableFrameSizeEncoder::SetRates( const RateControlParameters& parameters) {} int32_t ConfigurableFrameSizeEncoder::SetFrameSize(size_t size) { - RTC_DCHECK_LE(size, max_frame_size_); current_frame_size_ = size; return WEBRTC_VIDEO_CODEC_OK; } diff --git a/test/configurable_frame_size_encoder.h b/test/configurable_frame_size_encoder.h index 0ffe3b22cd..8dd5157b5b 100644 --- a/test/configurable_frame_size_encoder.h +++ b/test/configurable_frame_size_encoder.h @@ -60,9 +60,7 @@ class ConfigurableFrameSizeEncoder : public VideoEncoder { EncodedImageCallback* callback_; absl::optional> post_encode_callback_; - const size_t max_frame_size_; size_t current_frame_size_; - std::unique_ptr buffer_; VideoCodecType codec_type_; }; diff --git a/test/direct_transport.cc b/test/direct_transport.cc index 84273b0830..9c7a8f88d0 100644 --- a/test/direct_transport.cc +++ b/test/direct_transport.cc @@ -55,7 +55,6 @@ DirectTransport::~DirectTransport() { } void DirectTransport::SetReceiver(PacketReceiver* receiver) { - rtc::CritScope cs(&process_lock_); fake_network_->SetReceiver(receiver); } @@ -84,7 +83,7 @@ void DirectTransport::SendPacket(const uint8_t* data, size_t length) { int64_t send_time_us = rtc::TimeMicros(); fake_network_->DeliverPacket(media_type, rtc::CopyOnWriteBuffer(data, length), send_time_us); - rtc::CritScope cs(&process_lock_); + MutexLock lock(&process_lock_); if (!next_process_task_.Running()) ProcessPackets(); } @@ -108,12 +107,12 @@ void DirectTransport::ProcessPackets() { return; next_process_task_ = RepeatingTaskHandle::DelayedStart( - task_queue_, TimeDelta::ms(*initial_delay_ms), [this] { + task_queue_, TimeDelta::Millis(*initial_delay_ms), [this] { fake_network_->Process(); if (auto delay_ms = fake_network_->TimeUntilNextProcess()) - return TimeDelta::ms(*delay_ms); + return TimeDelta::Millis(*delay_ms); // Otherwise stop the task. - rtc::CritScope cs(&process_lock_); + MutexLock lock(&process_lock_); next_process_task_.Stop(); // Since this task is stopped, return value doesn't matter. return TimeDelta::Zero(); diff --git a/test/direct_transport.h b/test/direct_transport.h index e0b2251eea..2fc3b7f76b 100644 --- a/test/direct_transport.h +++ b/test/direct_transport.h @@ -17,6 +17,7 @@ #include "api/test/simulated_network.h" #include "call/call.h" #include "call/simulated_packet_receiver.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -66,7 +67,7 @@ class DirectTransport : public Transport { TaskQueueBase* const task_queue_; - rtc::CriticalSection process_lock_; + Mutex process_lock_; RepeatingTaskHandle next_process_task_ RTC_GUARDED_BY(&process_lock_); const Demuxer demuxer_; diff --git a/test/encoder_settings.cc b/test/encoder_settings.cc index ff3b096ef6..f90931a83c 100644 --- a/test/encoder_settings.cc +++ b/test/encoder_settings.cc @@ -10,7 +10,6 @@ #include "test/encoder_settings.h" #include -#include #include "api/scoped_refptr.h" #include "api/video_codecs/sdp_video_format.h" @@ -54,62 +53,46 @@ std::vector CreateVideoStreams( stream_settings[i].height = (i + 1) * height / encoder_config.number_of_streams; stream_settings[i].max_framerate = 30; + stream_settings[i].max_qp = 56; stream_settings[i].min_bitrate_bps = DefaultVideoStreamFactory::kDefaultMinBitratePerStream[i]; - int target_bitrate_bps = -1; - int max_bitrate_bps = -1; - // Use configured values instead of default values if values has been - // configured. - if (i < encoder_config.simulcast_layers.size()) { - const VideoStream& stream = encoder_config.simulcast_layers[i]; - - max_bitrate_bps = - stream.max_bitrate_bps > 0 - ? stream.max_bitrate_bps - : DefaultVideoStreamFactory::kMaxBitratePerStream[i]; - max_bitrate_bps = std::min(bitrate_left_bps, max_bitrate_bps); - - target_bitrate_bps = - stream.target_bitrate_bps > 0 - ? stream.target_bitrate_bps - : DefaultVideoStreamFactory::kMaxBitratePerStream[i]; - target_bitrate_bps = std::min(max_bitrate_bps, target_bitrate_bps); - - if (stream.min_bitrate_bps > 0) { - RTC_DCHECK_LE(stream.min_bitrate_bps, target_bitrate_bps); - stream_settings[i].min_bitrate_bps = stream.min_bitrate_bps; - } - if (stream.max_framerate > 0) { - stream_settings[i].max_framerate = stream.max_framerate; - } - if (stream.num_temporal_layers) { - RTC_DCHECK_GE(*stream.num_temporal_layers, 1); - stream_settings[i].num_temporal_layers = stream.num_temporal_layers; - } - if (stream.scale_resolution_down_by >= 1.0) { - stream_settings[i].width = width / stream.scale_resolution_down_by; - stream_settings[i].height = height / stream.scale_resolution_down_by; - } - } else { - max_bitrate_bps = std::min( - bitrate_left_bps, DefaultVideoStreamFactory::kMaxBitratePerStream[i]); - target_bitrate_bps = max_bitrate_bps; + // Use configured values instead of default values if set. + const VideoStream stream = (i < encoder_config.simulcast_layers.size()) + ? encoder_config.simulcast_layers[i] + : VideoStream(); + + int max_bitrate_bps = + stream.max_bitrate_bps > 0 + ? stream.max_bitrate_bps + : DefaultVideoStreamFactory::kMaxBitratePerStream[i]; + max_bitrate_bps = std::min(bitrate_left_bps, max_bitrate_bps); + + int target_bitrate_bps = + stream.target_bitrate_bps > 0 + ? stream.target_bitrate_bps + : DefaultVideoStreamFactory::kMaxBitratePerStream[i]; + target_bitrate_bps = std::min(max_bitrate_bps, target_bitrate_bps); + + if (stream.min_bitrate_bps > 0) { + RTC_DCHECK_LE(stream.min_bitrate_bps, target_bitrate_bps); + stream_settings[i].min_bitrate_bps = stream.min_bitrate_bps; + } + if (stream.max_framerate > 0) { + stream_settings[i].max_framerate = stream.max_framerate; + } + if (stream.num_temporal_layers) { + RTC_DCHECK_GE(*stream.num_temporal_layers, 1); + stream_settings[i].num_temporal_layers = stream.num_temporal_layers; + } + if (stream.scale_resolution_down_by >= 1.0) { + stream_settings[i].width = width / stream.scale_resolution_down_by; + stream_settings[i].height = height / stream.scale_resolution_down_by; } - - RTC_DCHECK_NE(target_bitrate_bps, -1); - RTC_DCHECK_NE(max_bitrate_bps, -1); stream_settings[i].target_bitrate_bps = target_bitrate_bps; stream_settings[i].max_bitrate_bps = max_bitrate_bps; - stream_settings[i].max_qp = 56; + stream_settings[i].active = stream.active; - if (i < encoder_config.simulcast_layers.size()) { - // Higher level controls are setting the active configuration for the - // VideoStream. - stream_settings[i].active = encoder_config.simulcast_layers[i].active; - } else { - stream_settings[i].active = true; - } bitrate_left_bps -= stream_settings[i].target_bitrate_bps; } diff --git a/test/explicit_key_value_config.cc b/test/explicit_key_value_config.cc new file mode 100644 index 0000000000..69f725a9e2 --- /dev/null +++ b/test/explicit_key_value_config.cc @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/explicit_key_value_config.h" + +#include "api/transport/webrtc_key_value_config.h" +#include "rtc_base/checks.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace test { + +ExplicitKeyValueConfig::ExplicitKeyValueConfig(const std::string& s) { + std::string::size_type field_start = 0; + while (field_start < s.size()) { + std::string::size_type separator_pos = s.find('/', field_start); + RTC_CHECK_NE(separator_pos, std::string::npos) + << "Missing separator '/' after field trial key."; + RTC_CHECK_GT(separator_pos, field_start) + << "Field trial key cannot be empty."; + std::string key = s.substr(field_start, separator_pos - field_start); + field_start = separator_pos + 1; + + RTC_CHECK_LT(field_start, s.size()) + << "Missing value after field trial key. String ended."; + separator_pos = s.find('/', field_start); + RTC_CHECK_NE(separator_pos, std::string::npos) + << "Missing terminating '/' in field trial string."; + RTC_CHECK_GT(separator_pos, field_start) + << "Field trial value cannot be empty."; + std::string value = s.substr(field_start, separator_pos - field_start); + field_start = separator_pos + 1; + + key_value_map_[key] = value; + } + // This check is technically redundant due to earlier checks. + // We nevertheless keep the check to make it clear that the entire + // string has been processed, and without indexing past the end. + RTC_CHECK_EQ(field_start, s.size()); +} + +std::string ExplicitKeyValueConfig::Lookup(absl::string_view key) const { + auto it = key_value_map_.find(std::string(key)); + if (it != key_value_map_.end()) + return it->second; + return ""; +} + +} // namespace test +} // namespace webrtc diff --git a/test/explicit_key_value_config.h b/test/explicit_key_value_config.h new file mode 100644 index 0000000000..9a3bc84f60 --- /dev/null +++ b/test/explicit_key_value_config.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_EXPLICIT_KEY_VALUE_CONFIG_H_ +#define TEST_EXPLICIT_KEY_VALUE_CONFIG_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/transport/webrtc_key_value_config.h" + +namespace webrtc { +namespace test { + +class ExplicitKeyValueConfig : public WebRtcKeyValueConfig { + public: + explicit ExplicitKeyValueConfig(const std::string& s); + std::string Lookup(absl::string_view key) const override; + + private: + std::map key_value_map_; +}; + +} // namespace test +} // namespace webrtc + +#endif // TEST_EXPLICIT_KEY_VALUE_CONFIG_H_ diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc index 64b4a4e9ff..814be280ec 100644 --- a/test/fake_encoder.cc +++ b/test/fake_encoder.cc @@ -67,19 +67,19 @@ void FakeEncoder::SetFecControllerOverride( void FakeEncoder::SetMaxBitrate(int max_kbps) { RTC_DCHECK_GE(max_kbps, -1); // max_kbps == -1 disables it. - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); max_target_bitrate_kbps_ = max_kbps; - SetRates(current_rate_settings_); + SetRatesLocked(current_rate_settings_); } void FakeEncoder::SetQp(int qp) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); qp_ = qp; } int32_t FakeEncoder::InitEncode(const VideoCodec* config, const Settings& settings) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); config_ = *config; current_rate_settings_.bitrate.SetBitrate(0, 0, config_.startBitrate * 1000); current_rate_settings_.framerate_fps = config_.maxFramerate; @@ -92,7 +92,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, const std::vector* frame_types) { unsigned char max_framerate; unsigned char num_simulcast_streams; - SimulcastStream simulcast_streams[kMaxSimulcastStreams]; + SpatialLayer simulcast_streams[kMaxSimulcastStreams]; EncodedImageCallback* callback; RateControlParameters rates; VideoCodecMode mode; @@ -100,7 +100,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, uint32_t counter; absl::optional qp; { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); max_framerate = config_.maxFramerate; num_simulcast_streams = config_.numberOfSimulcastStreams; for (int i = 0; i < num_simulcast_streams; ++i) { @@ -128,14 +128,15 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, continue; } - EncodedImage encoded; - encoded.SetEncodedData( - EncodedImageBuffer::Create(frame_info.layers[i].size)); - + auto buffer = EncodedImageBuffer::Create(frame_info.layers[i].size); // Fill the buffer with arbitrary data. Write someting to make Asan happy. - memset(encoded.data(), 9, frame_info.layers[i].size); + memset(buffer->data(), 9, frame_info.layers[i].size); // Write a counter to the image to make each frame unique. - WriteCounter(encoded.data() + frame_info.layers[i].size - 4, counter); + WriteCounter(buffer->data() + frame_info.layers[i].size - 4, counter); + + EncodedImage encoded; + encoded.SetEncodedData(buffer); + encoded.SetTimestamp(input_image.timestamp()); encoded._frameType = frame_info.keyframe ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; @@ -144,23 +145,22 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, if (qp) encoded.qp_ = *qp; encoded.SetSpatialIndex(i); - CodecSpecificInfo codec_specific; - std::unique_ptr fragmentation = - EncodeHook(&encoded, &codec_specific); + CodecSpecificInfo codec_specific = EncodeHook(encoded, buffer); - if (callback->OnEncodedImage(encoded, &codec_specific, fragmentation.get()) - .error != EncodedImageCallback::Result::OK) { + if (callback->OnEncodedImage(encoded, &codec_specific).error != + EncodedImageCallback::Result::OK) { return -1; } } return 0; } -std::unique_ptr FakeEncoder::EncodeHook( - EncodedImage* encoded_image, - CodecSpecificInfo* codec_specific) { - codec_specific->codecType = kVideoCodecGeneric; - return nullptr; +CodecSpecificInfo FakeEncoder::EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer) { + CodecSpecificInfo codec_specific; + codec_specific.codecType = kVideoCodecGeneric; + return codec_specific; } FakeEncoder::FrameInfo FakeEncoder::NextFrame( @@ -168,7 +168,7 @@ FakeEncoder::FrameInfo FakeEncoder::NextFrame( bool keyframe, uint8_t num_simulcast_streams, const VideoBitrateAllocation& target_bitrate, - SimulcastStream simulcast_streams[kMaxSimulcastStreams], + SpatialLayer simulcast_streams[kMaxSimulcastStreams], int framerate) { FrameInfo frame_info; frame_info.keyframe = keyframe; @@ -182,7 +182,7 @@ FakeEncoder::FrameInfo FakeEncoder::NextFrame( } } - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); for (uint8_t i = 0; i < num_simulcast_streams; ++i) { if (target_bitrate.GetBitrate(i, 0) > 0) { int temporal_id = last_frame_info_.layers.size() > i @@ -232,7 +232,7 @@ FakeEncoder::FrameInfo FakeEncoder::NextFrame( int32_t FakeEncoder::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); callback_ = callback; return 0; } @@ -242,7 +242,11 @@ int32_t FakeEncoder::Release() { } void FakeEncoder::SetRates(const RateControlParameters& parameters) { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); + SetRatesLocked(parameters); +} + +void FakeEncoder::SetRatesLocked(const RateControlParameters& parameters) { current_rate_settings_ = parameters; int allocated_bitrate_kbps = parameters.bitrate.get_sum_kbps(); @@ -272,74 +276,75 @@ const char* FakeEncoder::kImplementationName = "fake_encoder"; VideoEncoder::EncoderInfo FakeEncoder::GetEncoderInfo() const { EncoderInfo info; info.implementation_name = kImplementationName; + MutexLock lock(&mutex_); + for (int sid = 0; sid < config_.numberOfSimulcastStreams; ++sid) { + int number_of_temporal_layers = + config_.simulcastStream[sid].numberOfTemporalLayers; + info.fps_allocation[sid].clear(); + for (int tid = 0; tid < number_of_temporal_layers; ++tid) { + // {1/4, 1/2, 1} allocation for num layers = 3. + info.fps_allocation[sid].push_back(255 / + (number_of_temporal_layers - tid)); + } + } return info; } int FakeEncoder::GetConfiguredInputFramerate() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return static_cast(current_rate_settings_.framerate_fps + 0.5); } FakeH264Encoder::FakeH264Encoder(Clock* clock) : FakeEncoder(clock), idr_counter_(0) {} -std::unique_ptr FakeH264Encoder::EncodeHook( - EncodedImage* encoded_image, - CodecSpecificInfo* codec_specific) { +CodecSpecificInfo FakeH264Encoder::EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer) { + static constexpr std::array kStartCode = {0, 0, 1}; const size_t kSpsSize = 8; const size_t kPpsSize = 11; const int kIdrFrequency = 10; int current_idr_counter; { - rtc::CritScope cs(&local_crit_sect_); + MutexLock lock(&local_mutex_); current_idr_counter = idr_counter_; ++idr_counter_; } - auto fragmentation = std::make_unique(); + for (size_t i = 0; i < encoded_image.size(); ++i) { + buffer->data()[i] = static_cast(i); + } if (current_idr_counter % kIdrFrequency == 0 && - encoded_image->size() > kSpsSize + kPpsSize + 1) { - const size_t kNumSlices = 3; - fragmentation->VerifyAndAllocateFragmentationHeader(kNumSlices); - fragmentation->fragmentationOffset[0] = 0; - fragmentation->fragmentationLength[0] = kSpsSize; - fragmentation->fragmentationOffset[1] = kSpsSize; - fragmentation->fragmentationLength[1] = kPpsSize; - fragmentation->fragmentationOffset[2] = kSpsSize + kPpsSize; - fragmentation->fragmentationLength[2] = - encoded_image->size() - (kSpsSize + kPpsSize); + encoded_image.size() > kSpsSize + kPpsSize + 1 + 3 * kStartCode.size()) { const size_t kSpsNalHeader = 0x67; const size_t kPpsNalHeader = 0x68; const size_t kIdrNalHeader = 0x65; - encoded_image->data()[fragmentation->fragmentationOffset[0]] = - kSpsNalHeader; - encoded_image->data()[fragmentation->fragmentationOffset[1]] = - kPpsNalHeader; - encoded_image->data()[fragmentation->fragmentationOffset[2]] = - kIdrNalHeader; + uint8_t* data = buffer->data(); + memcpy(data, kStartCode.data(), kStartCode.size()); + data += kStartCode.size(); + data[0] = kSpsNalHeader; + data += kSpsSize; + + memcpy(data, kStartCode.data(), kStartCode.size()); + data += kStartCode.size(); + data[0] = kPpsNalHeader; + data += kPpsSize; + + memcpy(data, kStartCode.data(), kStartCode.size()); + data += kStartCode.size(); + data[0] = kIdrNalHeader; } else { - const size_t kNumSlices = 1; - fragmentation->VerifyAndAllocateFragmentationHeader(kNumSlices); - fragmentation->fragmentationOffset[0] = 0; - fragmentation->fragmentationLength[0] = encoded_image->size(); + memcpy(buffer->data(), kStartCode.data(), kStartCode.size()); const size_t kNalHeader = 0x41; - encoded_image->data()[fragmentation->fragmentationOffset[0]] = kNalHeader; + buffer->data()[kStartCode.size()] = kNalHeader; } - uint8_t value = 0; - int fragment_counter = 0; - for (size_t i = 0; i < encoded_image->size(); ++i) { - if (fragment_counter == fragmentation->fragmentationVectorSize || - i != fragmentation->fragmentationOffset[fragment_counter]) { - encoded_image->data()[i] = value++; - } else { - ++fragment_counter; - } - } - codec_specific->codecType = kVideoCodecH264; - codec_specific->codecSpecific.H264.packetization_mode = - H264PacketizationMode::NonInterleaved; - return fragmentation; + CodecSpecificInfo codec_specific; + codec_specific.codecType = kVideoCodecH264; + codec_specific.codecSpecific.H264.packetization_mode = + H264PacketizationMode::NonInterleaved; + return codec_specific; } DelayedEncoder::DelayedEncoder(Clock* clock, int delay_ms) diff --git a/test/fake_encoder.h b/test/fake_encoder.h index 39838d16f1..abd3134154 100644 --- a/test/fake_encoder.h +++ b/test/fake_encoder.h @@ -24,9 +24,8 @@ #include "api/video/video_frame.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -40,21 +39,23 @@ class FakeEncoder : public VideoEncoder { virtual ~FakeEncoder() = default; // Sets max bitrate. Not thread-safe, call before registering the encoder. - void SetMaxBitrate(int max_kbps); - void SetQp(int qp); + void SetMaxBitrate(int max_kbps) RTC_LOCKS_EXCLUDED(mutex_); + void SetQp(int qp) RTC_LOCKS_EXCLUDED(mutex_); void SetFecControllerOverride( FecControllerOverride* fec_controller_override) override; - int32_t InitEncode(const VideoCodec* config, - const Settings& settings) override; + int32_t InitEncode(const VideoCodec* config, const Settings& settings) + RTC_LOCKS_EXCLUDED(mutex_) override; int32_t Encode(const VideoFrame& input_image, - const std::vector* frame_types) override; - int32_t RegisterEncodeCompleteCallback( - EncodedImageCallback* callback) override; + const std::vector* frame_types) + RTC_LOCKS_EXCLUDED(mutex_) override; + int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback) + RTC_LOCKS_EXCLUDED(mutex_) override; int32_t Release() override; - void SetRates(const RateControlParameters& parameters) override; - int GetConfiguredInputFramerate() const; + void SetRates(const RateControlParameters& parameters) + RTC_LOCKS_EXCLUDED(mutex_) override; + int GetConfiguredInputFramerate() const RTC_LOCKS_EXCLUDED(mutex_); EncoderInfo GetEncoderInfo() const override; static const char* kImplementationName; @@ -78,28 +79,31 @@ class FakeEncoder : public VideoEncoder { bool keyframe, uint8_t num_simulcast_streams, const VideoBitrateAllocation& target_bitrate, - SimulcastStream simulcast_streams[kMaxSimulcastStreams], - int framerate); + SpatialLayer simulcast_streams[kMaxSimulcastStreams], + int framerate) RTC_LOCKS_EXCLUDED(mutex_); // Called before the frame is passed to callback_->OnEncodedImage, to let - // subclasses fill out codec_specific, possibly modify encodedImage. - // Returns an RTPFragmentationHeader, if needed by the codec. - virtual std::unique_ptr EncodeHook( - EncodedImage* encoded_image, - CodecSpecificInfo* codec_specific); + // subclasses fill out CodecSpecificInfo, possibly modify |encoded_image| or + // |buffer|. + virtual CodecSpecificInfo EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer); + + void SetRatesLocked(const RateControlParameters& parameters) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - FrameInfo last_frame_info_ RTC_GUARDED_BY(crit_sect_); + FrameInfo last_frame_info_ RTC_GUARDED_BY(mutex_); Clock* const clock_; - VideoCodec config_ RTC_GUARDED_BY(crit_sect_); - EncodedImageCallback* callback_ RTC_GUARDED_BY(crit_sect_); - RateControlParameters current_rate_settings_ RTC_GUARDED_BY(crit_sect_); - int max_target_bitrate_kbps_ RTC_GUARDED_BY(crit_sect_); - bool pending_keyframe_ RTC_GUARDED_BY(crit_sect_); - uint32_t counter_ RTC_GUARDED_BY(crit_sect_); - rtc::CriticalSection crit_sect_; + VideoCodec config_ RTC_GUARDED_BY(mutex_); + EncodedImageCallback* callback_ RTC_GUARDED_BY(mutex_); + RateControlParameters current_rate_settings_ RTC_GUARDED_BY(mutex_); + int max_target_bitrate_kbps_ RTC_GUARDED_BY(mutex_); + bool pending_keyframe_ RTC_GUARDED_BY(mutex_); + uint32_t counter_ RTC_GUARDED_BY(mutex_); + mutable Mutex mutex_; bool used_layers_[kMaxSimulcastStreams]; - absl::optional qp_ RTC_GUARDED_BY(crit_sect_); + absl::optional qp_ RTC_GUARDED_BY(mutex_); // Current byte debt to be payed over a number of frames. // The debt is acquired by keyframes overshooting the bitrate target. @@ -112,12 +116,12 @@ class FakeH264Encoder : public FakeEncoder { virtual ~FakeH264Encoder() = default; private: - std::unique_ptr EncodeHook( - EncodedImage* encoded_image, - CodecSpecificInfo* codec_specific) override; + CodecSpecificInfo EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer) override; - int idr_counter_ RTC_GUARDED_BY(local_crit_sect_); - rtc::CriticalSection local_crit_sect_; + int idr_counter_ RTC_GUARDED_BY(local_mutex_); + Mutex local_mutex_; }; class DelayedEncoder : public test::FakeEncoder { diff --git a/test/fake_vp8_encoder.cc b/test/fake_vp8_encoder.cc index 60bc36c570..a24fab81bb 100644 --- a/test/fake_vp8_encoder.cc +++ b/test/fake_vp8_encoder.cc @@ -45,11 +45,11 @@ namespace webrtc { namespace test { -FakeVP8Encoder::FakeVP8Encoder(Clock* clock) : FakeEncoder(clock) { +FakeVp8Encoder::FakeVp8Encoder(Clock* clock) : FakeEncoder(clock) { sequence_checker_.Detach(); } -int32_t FakeVP8Encoder::InitEncode(const VideoCodec* config, +int32_t FakeVp8Encoder::InitEncode(const VideoCodec* config, const Settings& settings) { RTC_DCHECK_RUN_ON(&sequence_checker_); auto result = FakeEncoder::InitEncode(config, settings); @@ -64,52 +64,65 @@ int32_t FakeVP8Encoder::InitEncode(const VideoCodec* config, return WEBRTC_VIDEO_CODEC_OK; } -int32_t FakeVP8Encoder::Release() { +int32_t FakeVp8Encoder::Release() { auto result = FakeEncoder::Release(); sequence_checker_.Detach(); return result; } -void FakeVP8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, - size_t size_bytes, - VideoFrameType frame_type, - int stream_idx, - uint32_t timestamp) { +CodecSpecificInfo FakeVp8Encoder::PopulateCodecSpecific( + size_t size_bytes, + VideoFrameType frame_type, + int stream_idx, + uint32_t timestamp) { RTC_DCHECK_RUN_ON(&sequence_checker_); - codec_specific->codecType = kVideoCodecVP8; - codec_specific->codecSpecific.VP8.keyIdx = kNoKeyIdx; - codec_specific->codecSpecific.VP8.nonReference = false; + CodecSpecificInfo codec_specific; + codec_specific.codecType = kVideoCodecVP8; + codec_specific.codecSpecific.VP8.keyIdx = kNoKeyIdx; + codec_specific.codecSpecific.VP8.nonReference = false; if (size_bytes > 0) { frame_buffer_controller_->OnEncodeDone( stream_idx, timestamp, size_bytes, - frame_type == VideoFrameType::kVideoFrameKey, -1, codec_specific); + frame_type == VideoFrameType::kVideoFrameKey, -1, &codec_specific); } else { frame_buffer_controller_->OnFrameDropped(stream_idx, timestamp); } + return codec_specific; } -std::unique_ptr FakeVP8Encoder::EncodeHook( - EncodedImage* encoded_image, - CodecSpecificInfo* codec_specific) { +CodecSpecificInfo FakeVp8Encoder::EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer) { RTC_DCHECK_RUN_ON(&sequence_checker_); - uint8_t stream_idx = encoded_image->SpatialIndex().value_or(0); + uint8_t stream_idx = encoded_image.SpatialIndex().value_or(0); frame_buffer_controller_->NextFrameConfig(stream_idx, - encoded_image->Timestamp()); - PopulateCodecSpecific(codec_specific, encoded_image->size(), - encoded_image->_frameType, stream_idx, - encoded_image->Timestamp()); + encoded_image.Timestamp()); + CodecSpecificInfo codec_specific = + PopulateCodecSpecific(encoded_image.size(), encoded_image._frameType, + stream_idx, encoded_image.Timestamp()); // Write width and height to the payload the same way as the real encoder // does. - WriteFakeVp8(encoded_image->data(), encoded_image->_encodedWidth, - encoded_image->_encodedHeight, - encoded_image->_frameType == VideoFrameType::kVideoFrameKey); - return nullptr; + WriteFakeVp8(buffer->data(), encoded_image._encodedWidth, + encoded_image._encodedHeight, + encoded_image._frameType == VideoFrameType::kVideoFrameKey); + return codec_specific; } -VideoEncoder::EncoderInfo FakeVP8Encoder::GetEncoderInfo() const { +VideoEncoder::EncoderInfo FakeVp8Encoder::GetEncoderInfo() const { EncoderInfo info; info.implementation_name = "FakeVp8Encoder"; + MutexLock lock(&mutex_); + for (int sid = 0; sid < config_.numberOfSimulcastStreams; ++sid) { + int number_of_temporal_layers = + config_.simulcastStream[sid].numberOfTemporalLayers; + info.fps_allocation[sid].clear(); + for (int tid = 0; tid < number_of_temporal_layers; ++tid) { + // {1/4, 1/2, 1} allocation for num layers = 3. + info.fps_allocation[sid].push_back(255 / + (number_of_temporal_layers - tid)); + } + } return info; } diff --git a/test/fake_vp8_encoder.h b/test/fake_vp8_encoder.h index a0d8e167c7..178a46070d 100644 --- a/test/fake_vp8_encoder.h +++ b/test/fake_vp8_encoder.h @@ -22,7 +22,6 @@ #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_temporal_layers.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" @@ -32,10 +31,10 @@ namespace webrtc { namespace test { -class FakeVP8Encoder : public FakeEncoder { +class FakeVp8Encoder : public FakeEncoder { public: - explicit FakeVP8Encoder(Clock* clock); - virtual ~FakeVP8Encoder() = default; + explicit FakeVp8Encoder(Clock* clock); + virtual ~FakeVp8Encoder() = default; int32_t InitEncode(const VideoCodec* config, const Settings& settings) override; @@ -45,15 +44,14 @@ class FakeVP8Encoder : public FakeEncoder { EncoderInfo GetEncoderInfo() const override; private: - void PopulateCodecSpecific(CodecSpecificInfo* codec_specific, - size_t size_bytes, - VideoFrameType frame_type, - int stream_idx, - uint32_t timestamp); - - std::unique_ptr EncodeHook( - EncodedImage* encoded_image, - CodecSpecificInfo* codec_specific) override; + CodecSpecificInfo PopulateCodecSpecific(size_t size_bytes, + VideoFrameType frame_type, + int stream_idx, + uint32_t timestamp); + + CodecSpecificInfo EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer) override; SequenceChecker sequence_checker_; diff --git a/test/fake_vp8_encoder_unittest.cc b/test/fake_vp8_encoder_unittest.cc index 454f9461c1..e79e8e421b 100644 --- a/test/fake_vp8_encoder_unittest.cc +++ b/test/fake_vp8_encoder_unittest.cc @@ -28,7 +28,7 @@ namespace { std::unique_ptr CreateSpecificSimulcastTestFixture() { std::unique_ptr encoder_factory = std::make_unique([]() { - return std::make_unique(Clock::GetRealTimeClock()); + return std::make_unique(Clock::GetRealTimeClock()); }); std::unique_ptr decoder_factory = std::make_unique( @@ -104,5 +104,11 @@ TEST(TestFakeVp8Codec, TestDecodeWidthHeightSet) { fixture->TestDecodeWidthHeightSet(); } +TEST(TestFakeVp8Codec, + TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation) { + auto fixture = CreateSpecificSimulcastTestFixture(); + fixture->TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation(); +} + } // namespace test } // namespace webrtc diff --git a/test/frame_forwarder.cc b/test/frame_forwarder.cc index d1a2ddb1c2..e89f753bd3 100644 --- a/test/frame_forwarder.cc +++ b/test/frame_forwarder.cc @@ -18,32 +18,42 @@ FrameForwarder::FrameForwarder() : sink_(nullptr) {} FrameForwarder::~FrameForwarder() {} void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (sink_) sink_->OnFrame(video_frame); } void FrameForwarder::AddOrUpdateSink(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + AddOrUpdateSinkLocked(sink, wants); +} + +void FrameForwarder::AddOrUpdateSinkLocked( + rtc::VideoSinkInterface* sink, + const rtc::VideoSinkWants& wants) { RTC_DCHECK(!sink_ || sink_ == sink); sink_ = sink; sink_wants_ = wants; } void FrameForwarder::RemoveSink(rtc::VideoSinkInterface* sink) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RTC_DCHECK_EQ(sink, sink_); sink_ = nullptr; } rtc::VideoSinkWants FrameForwarder::sink_wants() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + return sink_wants_; +} + +rtc::VideoSinkWants FrameForwarder::sink_wants_locked() const { return sink_wants_; } bool FrameForwarder::has_sinks() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return sink_ != nullptr; } diff --git a/test/frame_forwarder.h b/test/frame_forwarder.h index cf29f5f074..bbf11cc939 100644 --- a/test/frame_forwarder.h +++ b/test/frame_forwarder.h @@ -12,7 +12,7 @@ #include "api/video/video_frame.h" #include "api/video/video_source_interface.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace test { @@ -26,18 +26,26 @@ class FrameForwarder : public rtc::VideoSourceInterface { FrameForwarder(); ~FrameForwarder() override; // Forwards |video_frame| to the registered |sink_|. - virtual void IncomingCapturedFrame(const VideoFrame& video_frame); - rtc::VideoSinkWants sink_wants() const; - bool has_sinks() const; + virtual void IncomingCapturedFrame(const VideoFrame& video_frame) + RTC_LOCKS_EXCLUDED(mutex_); + rtc::VideoSinkWants sink_wants() const RTC_LOCKS_EXCLUDED(mutex_); + bool has_sinks() const RTC_LOCKS_EXCLUDED(mutex_); protected: + rtc::VideoSinkWants sink_wants_locked() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + const rtc::VideoSinkWants& wants) + RTC_LOCKS_EXCLUDED(mutex_) override; + void AddOrUpdateSinkLocked(rtc::VideoSinkInterface* sink, + const rtc::VideoSinkWants& wants) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void RemoveSink(rtc::VideoSinkInterface* sink) + RTC_LOCKS_EXCLUDED(mutex_) override; - rtc::CriticalSection crit_; - rtc::VideoSinkInterface* sink_ RTC_GUARDED_BY(crit_); - rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + rtc::VideoSinkInterface* sink_ RTC_GUARDED_BY(mutex_); + rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(mutex_); }; } // namespace test diff --git a/test/frame_generator.cc b/test/frame_generator.cc index e3b4a06596..6c551d410c 100644 --- a/test/frame_generator.cc +++ b/test/frame_generator.cc @@ -16,6 +16,7 @@ #include #include "api/video/i010_buffer.h" +#include "api/video/nv12_buffer.h" #include "api/video/video_rotation.h" #include "common_video/include/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" @@ -46,7 +47,7 @@ SquareGenerator::SquareGenerator(int width, } void SquareGenerator::ChangeResolution(size_t width, size_t height) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); width_ = static_cast(width); height_ = static_cast(height); RTC_CHECK(width_ > 0); @@ -65,12 +66,13 @@ rtc::scoped_refptr SquareGenerator::CreateI420Buffer(int width, } FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); rtc::scoped_refptr buffer = nullptr; switch (type_) { case OutputType::kI420: - case OutputType::kI010: { + case OutputType::kI010: + case OutputType::kNV12: { buffer = CreateI420Buffer(width_, height_); break; } @@ -96,6 +98,8 @@ FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() { if (type_ == OutputType::kI010) { buffer = I010Buffer::Copy(*buffer->ToI420()); + } else if (type_ == OutputType::kNV12) { + buffer = NV12Buffer::Copy(*buffer->ToI420()); } return VideoFrameData(buffer, absl::nullopt); @@ -116,21 +120,23 @@ void SquareGenerator::Square::Draw( RTC_DCHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 || frame_buffer->type() == VideoFrameBuffer::Type::kI420A); rtc::scoped_refptr buffer = frame_buffer->ToI420(); - x_ = (x_ + random_generator_.Rand(0, 4)) % (buffer->width() - length_); - y_ = (y_ + random_generator_.Rand(0, 4)) % (buffer->height() - length_); - for (int y = y_; y < y_ + length_; ++y) { + int length_cap = std::min(buffer->height(), buffer->width()) / 4; + int length = std::min(length_, length_cap); + x_ = (x_ + random_generator_.Rand(0, 4)) % (buffer->width() - length); + y_ = (y_ + random_generator_.Rand(0, 4)) % (buffer->height() - length); + for (int y = y_; y < y_ + length; ++y) { uint8_t* pos_y = (const_cast(buffer->DataY()) + x_ + y * buffer->StrideY()); - memset(pos_y, yuv_y_, length_); + memset(pos_y, yuv_y_, length); } - for (int y = y_; y < y_ + length_; y = y + 2) { + for (int y = y_; y < y_ + length; y = y + 2) { uint8_t* pos_u = (const_cast(buffer->DataU()) + x_ / 2 + y / 2 * buffer->StrideU()); - memset(pos_u, yuv_u_, length_ / 2); + memset(pos_u, yuv_u_, length / 2); uint8_t* pos_v = (const_cast(buffer->DataV()) + x_ / 2 + y / 2 * buffer->StrideV()); - memset(pos_v, yuv_v_, length_ / 2); + memset(pos_v, yuv_v_, length / 2); } if (frame_buffer->type() == VideoFrameBuffer::Type::kI420) @@ -138,10 +144,10 @@ void SquareGenerator::Square::Draw( // Optionally draw on alpha plane if given. const webrtc::I420ABufferInterface* yuva_buffer = frame_buffer->GetI420A(); - for (int y = y_; y < y_ + length_; ++y) { + for (int y = y_; y < y_ + length; ++y) { uint8_t* pos_y = (const_cast(yuva_buffer->DataA()) + x_ + y * yuva_buffer->StrideA()); - memset(pos_y, yuv_a_, length_); + memset(pos_y, yuv_a_, length); } } diff --git a/test/frame_generator.h b/test/frame_generator.h index 6f59c1ed0b..94e15cb0de 100644 --- a/test/frame_generator.h +++ b/test/frame_generator.h @@ -20,8 +20,8 @@ #include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_source_interface.h" -#include "rtc_base/critical_section.h" #include "rtc_base/random.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -57,11 +57,11 @@ class SquareGenerator : public FrameGeneratorInterface { const uint8_t yuv_a_; }; - rtc::CriticalSection crit_; + Mutex mutex_; const OutputType type_; - int width_ RTC_GUARDED_BY(&crit_); - int height_ RTC_GUARDED_BY(&crit_); - std::vector> squares_ RTC_GUARDED_BY(&crit_); + int width_ RTC_GUARDED_BY(&mutex_); + int height_ RTC_GUARDED_BY(&mutex_); + std::vector> squares_ RTC_GUARDED_BY(&mutex_); }; class YuvFileGenerator : public FrameGeneratorInterface { diff --git a/test/frame_generator_capturer.cc b/test/frame_generator_capturer.cc index 36249d672d..266cff8734 100644 --- a/test/frame_generator_capturer.cc +++ b/test/frame_generator_capturer.cc @@ -17,9 +17,9 @@ #include #include +#include "absl/strings/match.h" #include "api/test/create_frame_generator.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/task_queue.h" #include "rtc_base/time_utils.h" @@ -34,7 +34,7 @@ std::string TransformFilePath(std::string path) { int ext_pos = path.rfind("."); if (ext_pos < 0) { return test::ResourcePath(path, "yuv"); - } else if (path.find(resource_prefix) == 0) { + } else if (absl::StartsWith(path, resource_prefix)) { std::string name = path.substr(resource_prefix.length(), ext_pos); std::string ext = path.substr(ext_pos, path.size()); return test::ResourcePath(name, ext); @@ -149,13 +149,13 @@ std::unique_ptr FrameGeneratorCapturer::Create( } void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); fake_rotation_ = rotation; } void FrameGeneratorCapturer::SetFakeColorSpace( absl::optional color_space) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); fake_color_space_ = color_space; } @@ -167,15 +167,15 @@ bool FrameGeneratorCapturer::Init() { frame_task_ = RepeatingTaskHandle::DelayedStart( task_queue_.Get(), - TimeDelta::seconds(1) / GetCurrentConfiguredFramerate(), [this] { + TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate(), [this] { InsertFrame(); - return TimeDelta::seconds(1) / GetCurrentConfiguredFramerate(); + return TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate(); }); return true; } void FrameGeneratorCapturer::InsertFrame() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (sending_) { FrameGeneratorInterface::VideoFrameData frame_data = frame_generator_->NextFrame(); @@ -204,29 +204,29 @@ void FrameGeneratorCapturer::InsertFrame() { void FrameGeneratorCapturer::Start() { { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); sending_ = true; } if (!frame_task_.Running()) { frame_task_ = RepeatingTaskHandle::Start(task_queue_.Get(), [this] { InsertFrame(); - return TimeDelta::seconds(1) / GetCurrentConfiguredFramerate(); + return TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate(); }); } } void FrameGeneratorCapturer::Stop() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); sending_ = false; } void FrameGeneratorCapturer::ChangeResolution(size_t width, size_t height) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); frame_generator_->ChangeResolution(width, height); } void FrameGeneratorCapturer::ChangeFramerate(int target_framerate) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); RTC_CHECK(target_capture_fps_ > 0); if (target_framerate > source_fps_) RTC_LOG(LS_WARNING) << "Target framerate clamped from " << target_framerate @@ -244,7 +244,7 @@ void FrameGeneratorCapturer::ChangeFramerate(int target_framerate) { } void FrameGeneratorCapturer::SetSinkWantsObserver(SinkWantsObserver* observer) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); RTC_DCHECK(!sink_wants_observer_); sink_wants_observer_ = observer; } @@ -253,7 +253,7 @@ void FrameGeneratorCapturer::AddOrUpdateSink( rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { TestVideoCapturer::AddOrUpdateSink(sink, wants); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (sink_wants_observer_) { // Tests need to observe unmodified sink wants. sink_wants_observer_->OnSinkWantsChanged(sink, wants); @@ -265,7 +265,7 @@ void FrameGeneratorCapturer::RemoveSink( rtc::VideoSinkInterface* sink) { TestVideoCapturer::RemoveSink(sink); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); UpdateFps(GetSinkWants().max_framerate_fps); } @@ -283,7 +283,7 @@ void FrameGeneratorCapturer::ForceFrame() { } int FrameGeneratorCapturer::GetCurrentConfiguredFramerate() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (wanted_fps_ && *wanted_fps_ < target_capture_fps_) return *wanted_fps_; return target_capture_fps_; diff --git a/test/frame_generator_capturer.h b/test/frame_generator_capturer.h index aaed205423..1e915fca21 100644 --- a/test/frame_generator_capturer.h +++ b/test/frame_generator_capturer.h @@ -16,7 +16,7 @@ #include "api/task_queue/task_queue_factory.h" #include "api/test/frame_generator_interface.h" #include "api/video/video_frame.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "system_wrappers/include/clock.h" @@ -29,6 +29,7 @@ namespace frame_gen_cap_impl { template class AutoOpt : public absl::optional { public: + using absl::optional::optional; T* operator->() { if (!absl::optional::has_value()) this->emplace(T()); @@ -48,7 +49,7 @@ struct FrameGeneratorCapturerConfig { struct SquareSlides { int framerate = 30; - TimeDelta change_interval = TimeDelta::seconds(10); + TimeDelta change_interval = TimeDelta::Seconds(10); int width = 1600; int height = 1200; }; @@ -63,9 +64,9 @@ struct FrameGeneratorCapturerConfig { struct ImageSlides { int framerate = 30; - TimeDelta change_interval = TimeDelta::seconds(10); + TimeDelta change_interval = TimeDelta::Seconds(10); struct Crop { - TimeDelta scroll_duration = TimeDelta::seconds(0); + TimeDelta scroll_duration = TimeDelta::Seconds(0); absl::optional width; absl::optional height; } crop; @@ -156,7 +157,7 @@ class FrameGeneratorCapturer : public TestVideoCapturer { bool sending_; SinkWantsObserver* sink_wants_observer_ RTC_GUARDED_BY(&lock_); - rtc::CriticalSection lock_; + Mutex lock_; std::unique_ptr frame_generator_; int source_fps_ RTC_GUARDED_BY(&lock_); diff --git a/test/frame_generator_capturer_unittest.cc b/test/frame_generator_capturer_unittest.cc index 1434474c3f..a76cb95d44 100644 --- a/test/frame_generator_capturer_unittest.cc +++ b/test/frame_generator_capturer_unittest.cc @@ -22,12 +22,12 @@ using ::testing::Property; class MockVideoSinkInterfaceVideoFrame : public rtc::VideoSinkInterface { public: - MOCK_METHOD1(OnFrame, void(const VideoFrame& frame)); - MOCK_METHOD0(OnDiscardedFrame, void()); + MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override)); + MOCK_METHOD(void, OnDiscardedFrame, (), (override)); }; } // namespace TEST(FrameGeneratorCapturerTest, CreateFromConfig) { - GlobalSimulatedTimeController time(Timestamp::seconds(1000)); + GlobalSimulatedTimeController time(Timestamp::Seconds(1000)); FrameGeneratorCapturerConfig config; config.squares_video->width = 300; config.squares_video->height = 200; @@ -39,7 +39,7 @@ TEST(FrameGeneratorCapturerTest, CreateFromConfig) { capturer->Start(); EXPECT_CALL(mock_sink, OnFrame(Property(&VideoFrame::width, Eq(300)))) .Times(21); - time.AdvanceTime(TimeDelta::seconds(1)); + time.AdvanceTime(TimeDelta::Seconds(1)); } } // namespace test } // namespace webrtc diff --git a/test/fuzzers/BUILD.gn b/test/fuzzers/BUILD.gn index a15e5f0a03..4975f42a98 100644 --- a/test/fuzzers/BUILD.gn +++ b/test/fuzzers/BUILD.gn @@ -28,6 +28,7 @@ rtc_library("webrtc_fuzzer_main") { } rtc_library("fuzz_data_helper") { + testonly = true sources = [ "fuzz_data_helper.cc", "fuzz_data_helper.h", @@ -39,6 +40,10 @@ rtc_library("fuzz_data_helper") { visibility = [ ":*" ] # Only targets in this file can depend on this. } +set_defaults("webrtc_fuzzer_test") { + absl_deps = [] +} + template("webrtc_fuzzer_test") { fuzzer_test(target_name) { forward_variables_from(invoker, "*") @@ -46,6 +51,21 @@ template("webrtc_fuzzer_test") { ":fuzz_data_helper", ":webrtc_fuzzer_main", ] + + # If absl_deps is [], no action is needed. If not [], then it needs to be + # converted to //third_party/abseil-cpp:absl when build_with_chromium=true + # otherwise it just needs to be added to deps. + if (absl_deps != []) { + if (!defined(deps)) { + deps = [] + } + if (build_with_chromium) { + deps += [ "//third_party/abseil-cpp:absl" ] + } else { + deps += absl_deps + } + } + if (!build_with_chromium && is_clang) { suppressed_configs = [ "//build/config/clang:find_bad_constructs" ] } @@ -148,6 +168,7 @@ webrtc_fuzzer_test("ulpfec_generator_fuzzer") { "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../system_wrappers", ] } @@ -192,10 +213,8 @@ webrtc_fuzzer_test("rtcp_receiver_fuzzer") { webrtc_fuzzer_test("rtp_packet_fuzzer") { sources = [ "rtp_packet_fuzzer.cc" ] - deps = [ - "../../modules/rtp_rtcp:rtp_rtcp_format", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "../../modules/rtp_rtcp:rtp_rtcp_format" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] seed_corpus = "corpora/rtp-corpus" } @@ -228,6 +247,7 @@ webrtc_fuzzer_test("congestion_controller_feedback_fuzzer") { } rtc_library("audio_decoder_fuzzer") { + testonly = true sources = [ "audio_decoder_fuzzer.cc", "audio_decoder_fuzzer.h", @@ -237,8 +257,8 @@ rtc_library("audio_decoder_fuzzer") { "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") { @@ -290,13 +310,45 @@ webrtc_fuzzer_test("audio_decoder_multiopus_fuzzer") { ] } +rtc_library("audio_encoder_fuzzer") { + testonly = true + sources = [ + "audio_encoder_fuzzer.cc", + "audio_encoder_fuzzer.h", + ] + deps = [ + ":fuzz_data_helper", + "../../api:array_view", + "../../api/audio_codecs:audio_codecs_api", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + ] +} + webrtc_fuzzer_test("audio_encoder_opus_fuzzer") { sources = [ "audio_encoder_opus_fuzzer.cc" ] deps = [ - "../../api:array_view", + ":audio_encoder_fuzzer", "../../api/audio_codecs/opus:audio_encoder_opus", "../../rtc_base:checks", - "../../rtc_base:rtc_base_approved", + ] +} + +webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") { + sources = [ "audio_encoder_isac_fixed_fuzzer.cc" ] + deps = [ + ":audio_encoder_fuzzer", + "../../api/audio_codecs/isac:audio_encoder_isac_fix", + "../../rtc_base:checks", + ] +} + +webrtc_fuzzer_test("audio_encoder_isac_float_fuzzer") { + sources = [ "audio_encoder_isac_float_fuzzer.cc" ] + deps = [ + ":audio_encoder_fuzzer", + "../../api/audio_codecs/isac:audio_encoder_isac_float", + "../../rtc_base:checks", ] } @@ -338,7 +390,7 @@ webrtc_fuzzer_test("neteq_signal_fuzzer") { webrtc_fuzzer_test("residual_echo_detector_fuzzer") { sources = [ "residual_echo_detector_fuzzer.cc" ] deps = [ - "../../modules/audio_processing", + "../../api/audio:echo_detector_creator", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", ] @@ -391,6 +443,7 @@ webrtc_fuzzer_test("pseudotcp_parser_fuzzer") { } rtc_library("audio_processing_fuzzer_helper") { + testonly = true sources = [ "audio_processing_fuzzer_helper.cc", "audio_processing_fuzzer_helper.h", @@ -400,10 +453,11 @@ rtc_library("audio_processing_fuzzer_helper") { "../../api/audio:audio_frame_api", "../../modules/audio_processing", "../../modules/audio_processing:api", + "../../modules/audio_processing:audio_frame_proxies", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } webrtc_fuzzer_test("audio_processing_fuzzer") { @@ -416,6 +470,7 @@ webrtc_fuzzer_test("audio_processing_fuzzer") { "../../modules/audio_processing", "../../modules/audio_processing:api", "../../modules/audio_processing:audio_buffer", + "../../modules/audio_processing:audioproc_test_utils", "../../modules/audio_processing/aec3", "../../modules/audio_processing/aec_dump", "../../modules/audio_processing/aec_dump:aec_dump_impl", @@ -423,8 +478,8 @@ webrtc_fuzzer_test("audio_processing_fuzzer") { "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_minmax", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] seed_corpus = "corpora/audio_processing-corpus" } @@ -529,15 +584,6 @@ webrtc_fuzzer_test("string_to_number_fuzzer") { seed_corpus = "corpora/string_to_number-corpus" } -webrtc_fuzzer_test("rtp_rtcp_demuxer_helper_fuzzer") { - sources = [ "rtp_rtcp_demuxer_helper_fuzzer.cc" ] - deps = [ - "../../api:array_view", - "../../call:rtp_receiver", - ] - seed_corpus = "corpora/rtcp-corpus" -} - webrtc_fuzzer_test("sctp_utils_fuzzer") { sources = [ "sctp_utils_fuzzer.cc" ] deps = [ diff --git a/test/fuzzers/OWNERS b/test/fuzzers/OWNERS index 020cfce07a..1ba807eff3 100644 --- a/test/fuzzers/OWNERS +++ b/test/fuzzers/OWNERS @@ -1,4 +1,3 @@ henrik.lundin@webrtc.org aleloi@webrtc.org saza@webrtc.org -benwright@webrtc.org diff --git a/test/fuzzers/audio_encoder_fuzzer.cc b/test/fuzzers/audio_encoder_fuzzer.cc new file mode 100644 index 0000000000..54def44480 --- /dev/null +++ b/test/fuzzers/audio_encoder_fuzzer.cc @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/fuzzers/audio_encoder_fuzzer.h" + +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "test/fuzzers/fuzz_data_helper.h" + +namespace webrtc { + +// This function reads bytes from |data_view|, interprets them as RTP timestamp +// and input samples, and sends them for encoding. The process continues until +// no more data is available. +void FuzzAudioEncoder(rtc::ArrayView data_view, + std::unique_ptr encoder) { + test::FuzzDataHelper data(data_view); + const size_t block_size_samples = + encoder->SampleRateHz() / 100 * encoder->NumChannels(); + const size_t block_size_bytes = block_size_samples * sizeof(int16_t); + if (data_view.size() / block_size_bytes > 1000) { + // If the size of the fuzzer data is more than 1000 input blocks (i.e., more + // than 10 seconds), then don't fuzz at all for the fear of timing out. + return; + } + + rtc::BufferT input_aligned(block_size_samples); + rtc::Buffer encoded; + + // Each round in the loop below will need one block of samples + a 32-bit + // timestamp from the fuzzer input. + const size_t bytes_to_read = block_size_bytes + sizeof(uint32_t); + while (data.CanReadBytes(bytes_to_read)) { + const uint32_t timestamp = data.Read(); + auto byte_array = data.ReadByteArray(block_size_bytes); + // Align the data by copying to another array. + RTC_DCHECK_EQ(input_aligned.size() * sizeof(int16_t), + byte_array.size() * sizeof(uint8_t)); + memcpy(input_aligned.data(), byte_array.data(), byte_array.size()); + auto info = encoder->Encode(timestamp, input_aligned, &encoded); + } +} + +} // namespace webrtc diff --git a/test/fuzzers/audio_encoder_fuzzer.h b/test/fuzzers/audio_encoder_fuzzer.h new file mode 100644 index 0000000000..0c879df4d3 --- /dev/null +++ b/test/fuzzers/audio_encoder_fuzzer.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_FUZZERS_AUDIO_ENCODER_FUZZER_H_ +#define TEST_FUZZERS_AUDIO_ENCODER_FUZZER_H_ + +#include + +#include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" + +namespace webrtc { + +void FuzzAudioEncoder(rtc::ArrayView data_view, + std::unique_ptr encoder); + +} // namespace webrtc + +#endif // TEST_FUZZERS_AUDIO_ENCODER_FUZZER_H_ diff --git a/api/video/video_frame_marking.h b/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc similarity index 51% rename from api/video/video_frame_marking.h rename to test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc index 2a34852f1d..5357dc1b3e 100644 --- a/api/video/video_frame_marking.h +++ b/test/fuzzers/audio_encoder_isac_fixed_fuzzer.cc @@ -8,22 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef API_VIDEO_VIDEO_FRAME_MARKING_H_ -#define API_VIDEO_VIDEO_FRAME_MARKING_H_ +#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" +#include "rtc_base/checks.h" +#include "test/fuzzers/audio_encoder_fuzzer.h" namespace webrtc { -struct FrameMarking { - bool start_of_frame; - bool end_of_frame; - bool independent_frame; - bool discardable_frame; - bool base_layer_sync; - uint8_t temporal_id; - uint8_t layer_id; - uint8_t tl0_pic_idx; -}; +void FuzzOneInput(const uint8_t* data, size_t size) { + AudioEncoderIsacFix::Config config; + RTC_CHECK(config.IsOk()); + constexpr int kPayloadType = 100; + FuzzAudioEncoder( + /*data_view=*/{data, size}, + /*encoder=*/AudioEncoderIsacFix::MakeAudioEncoder(config, kPayloadType)); +} } // namespace webrtc - -#endif // API_VIDEO_VIDEO_FRAME_MARKING_H_ diff --git a/test/fuzzers/audio_encoder_isac_float_fuzzer.cc b/test/fuzzers/audio_encoder_isac_float_fuzzer.cc new file mode 100644 index 0000000000..f9e2e0206d --- /dev/null +++ b/test/fuzzers/audio_encoder_isac_float_fuzzer.cc @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/isac/audio_encoder_isac_float.h" +#include "rtc_base/checks.h" +#include "test/fuzzers/audio_encoder_fuzzer.h" + +namespace webrtc { + +void FuzzOneInput(const uint8_t* data, size_t size) { + AudioEncoderIsacFloat::Config config; + config.sample_rate_hz = 16000; + RTC_CHECK(config.IsOk()); + constexpr int kPayloadType = 100; + FuzzAudioEncoder(/*data_view=*/{data, size}, + /*encoder=*/AudioEncoderIsacFloat::MakeAudioEncoder( + config, kPayloadType)); +} + +} // namespace webrtc diff --git a/test/fuzzers/audio_encoder_opus_fuzzer.cc b/test/fuzzers/audio_encoder_opus_fuzzer.cc index 50c285616b..d67e6d6067 100644 --- a/test/fuzzers/audio_encoder_opus_fuzzer.cc +++ b/test/fuzzers/audio_encoder_opus_fuzzer.cc @@ -8,57 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/array_view.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" -#include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "test/fuzzers/fuzz_data_helper.h" +#include "test/fuzzers/audio_encoder_fuzzer.h" namespace webrtc { -namespace { - -// This function reads bytes from |data_view|, interprets them -// as RTP timestamp and input samples, and sends them for encoding. The process -// continues until no more data is available. -void FuzzAudioEncoder(rtc::ArrayView data_view, - AudioEncoder* encoder) { - test::FuzzDataHelper data(data_view); - const size_t block_size_samples = - encoder->SampleRateHz() / 100 * encoder->NumChannels(); - const size_t block_size_bytes = block_size_samples * sizeof(int16_t); - if (data_view.size() / block_size_bytes > 1000) { - // If the size of the fuzzer data is more than 1000 input blocks (i.e., more - // than 10 seconds), then don't fuzz at all for the fear of timing out. - return; - } - - rtc::BufferT input_aligned(block_size_samples); - rtc::Buffer encoded; - - // Each round in the loop below will need one block of samples + a 32-bit - // timestamp from the fuzzer input. - const size_t bytes_to_read = block_size_bytes + sizeof(uint32_t); - while (data.CanReadBytes(bytes_to_read)) { - const uint32_t timestamp = data.Read(); - auto byte_array = data.ReadByteArray(block_size_bytes); - // Align the data by copying to another array. - RTC_DCHECK_EQ(input_aligned.size() * sizeof(int16_t), - byte_array.size() * sizeof(uint8_t)); - memcpy(input_aligned.data(), byte_array.data(), byte_array.size()); - auto info = encoder->Encode(timestamp, input_aligned, &encoded); - } -} - -} // namespace void FuzzOneInput(const uint8_t* data, size_t size) { AudioEncoderOpus::Config config; config.frame_size_ms = 20; RTC_CHECK(config.IsOk()); constexpr int kPayloadType = 100; - std::unique_ptr enc = - AudioEncoderOpus::MakeAudioEncoder(config, kPayloadType); - FuzzAudioEncoder(rtc::ArrayView(data, size), enc.get()); + FuzzAudioEncoder( + /*data_view=*/{data, size}, + /*encoder=*/AudioEncoderOpus::MakeAudioEncoder(config, kPayloadType)); } } // namespace webrtc diff --git a/test/fuzzers/audio_processing_configs_fuzzer.cc b/test/fuzzers/audio_processing_configs_fuzzer.cc index 8fe9ad1c55..d511b7bc0e 100644 --- a/test/fuzzers/audio_processing_configs_fuzzer.cc +++ b/test/fuzzers/audio_processing_configs_fuzzer.cc @@ -16,6 +16,7 @@ #include "api/task_queue/default_task_queue_factory.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/include/audio_processing.h" +#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "rtc_base/arraysize.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/task_queue.h" @@ -108,7 +109,7 @@ std::unique_ptr CreateApm(test::FuzzDataHelper* fuzz_data, config.Set(new ExperimentalNs(exp_ns)); std::unique_ptr apm( - AudioProcessingBuilder() + AudioProcessingBuilderForTesting() .SetEchoControlFactory(std::move(echo_control_factory)) .Create(config)); diff --git a/test/fuzzers/audio_processing_fuzzer_helper.cc b/test/fuzzers/audio_processing_fuzzer_helper.cc index 87b68f42e3..cb53a04cbb 100644 --- a/test/fuzzers/audio_processing_fuzzer_helper.cc +++ b/test/fuzzers/audio_processing_fuzzer_helper.cc @@ -16,6 +16,7 @@ #include #include "api/audio/audio_frame.h" +#include "modules/audio_processing/include/audio_frame_proxies.h" #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/checks.h" @@ -130,9 +131,9 @@ void FuzzAudioProcessing(test::FuzzDataHelper* fuzz_data, GenerateFixedFrame(fuzz_data, input_rate, num_channels, &fixed_frame); if (is_capture) { - apm_return_code = apm->ProcessStream(&fixed_frame); + apm_return_code = ProcessAudioFrame(apm.get(), &fixed_frame); } else { - apm_return_code = apm->ProcessReverseStream(&fixed_frame); + apm_return_code = ProcessReverseAudioFrame(apm.get(), &fixed_frame); } } diff --git a/test/fuzzers/flexfec_sender_fuzzer.cc b/test/fuzzers/flexfec_sender_fuzzer.cc index 4882f7df51..8ddd1c0fe0 100644 --- a/test/fuzzers/flexfec_sender_fuzzer.cc +++ b/test/fuzzers/flexfec_sender_fuzzer.cc @@ -41,7 +41,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { FecProtectionParams params = { data[i++], static_cast(data[i++] % 100), data[i++] <= 127 ? kFecMaskRandom : kFecMaskBursty}; - sender.SetFecParameters(params); + sender.SetProtectionParameters(params, params); uint16_t seq_num = data[i++]; while (i + 1 < size) { @@ -59,11 +59,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { RtpPacketToSend rtp_packet(nullptr); if (!rtp_packet.Parse(packet.get(), kRtpHeaderSize + payload_size)) break; - sender.AddRtpPacketAndGenerateFec(rtp_packet); - if (sender.FecAvailable()) { - std::vector> fec_packets = - sender.GetFecPackets(); - } + sender.AddPacketAndGenerateFec(rtp_packet); + sender.GetFecPackets(); } } diff --git a/test/fuzzers/frame_buffer2_fuzzer.cc b/test/fuzzers/frame_buffer2_fuzzer.cc index 3ee40fda3a..7ec7da5eca 100644 --- a/test/fuzzers/frame_buffer2_fuzzer.cc +++ b/test/fuzzers/frame_buffer2_fuzzer.cc @@ -64,7 +64,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { return; } DataReader reader(data, size); - GlobalSimulatedTimeController time_controller(Timestamp::seconds(0)); + GlobalSimulatedTimeController time_controller(Timestamp::Seconds(0)); rtc::TaskQueue task_queue( time_controller.GetTaskQueueFactory()->CreateTaskQueue( "time_tq", TaskQueueFactory::Priority::NORMAL)); @@ -106,7 +106,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { } } - time_controller.AdvanceTime(TimeDelta::ms(reader.GetNum())); + time_controller.AdvanceTime(TimeDelta::Millis(reader.GetNum())); } } diff --git a/test/fuzzers/neteq_rtp_fuzzer.cc b/test/fuzzers/neteq_rtp_fuzzer.cc index d978199448..348c84f040 100644 --- a/test/fuzzers/neteq_rtp_fuzzer.cc +++ b/test/fuzzers/neteq_rtp_fuzzer.cc @@ -8,7 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include #include +#include #include #include @@ -64,6 +66,7 @@ class FuzzRtpInput : public NetEqInput { std::numeric_limits::max())); packet_ = input_->PopPacket(); FuzzHeader(); + MaybeFuzzPayload(); } absl::optional NextPacketTime() const override { @@ -79,6 +82,7 @@ class FuzzRtpInput : public NetEqInput { std::unique_ptr packet_to_return = std::move(packet_); packet_ = input_->PopPacket(); FuzzHeader(); + MaybeFuzzPayload(); return packet_to_return; } @@ -116,6 +120,30 @@ class FuzzRtpInput : public NetEqInput { RTC_CHECK_EQ(data_ix_ - start_ix, kNumBytesToFuzz); } + void MaybeFuzzPayload() { + // Read one byte of fuzz data to determine how many payload bytes to fuzz. + if (data_ix_ + 1 > data_.size()) { + ended_ = true; + return; + } + size_t bytes_to_fuzz = data_[data_ix_++]; + + // Restrict number of bytes to fuzz to 16; a reasonably low number enough to + // cover a few RED headers. Also don't write outside the payload length. + bytes_to_fuzz = std::min(bytes_to_fuzz % 16, packet_->payload.size()); + + if (bytes_to_fuzz == 0) + return; + + if (data_ix_ + bytes_to_fuzz > data_.size()) { + ended_ = true; + return; + } + + std::memcpy(packet_->payload.data(), &data_[data_ix_], bytes_to_fuzz); + data_ix_ += bytes_to_fuzz; + } + bool ended_ = false; rtc::ArrayView data_; size_t data_ix_ = 0; diff --git a/test/fuzzers/residual_echo_detector_fuzzer.cc b/test/fuzzers/residual_echo_detector_fuzzer.cc index 99ea06a08e..da4b6ededf 100644 --- a/test/fuzzers/residual_echo_detector_fuzzer.cc +++ b/test/fuzzers/residual_echo_detector_fuzzer.cc @@ -15,7 +15,7 @@ #include #include -#include "modules/audio_processing/residual_echo_detector.h" +#include "api/audio/echo_detector_creator.h" #include "rtc_base/checks.h" #include "rtc_base/ref_counted_object.h" @@ -43,8 +43,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { read_idx += 2; std::bitset<16> call_order(call_order_int); - rtc::scoped_refptr echo_detector = - new rtc::RefCountedObject(); + rtc::scoped_refptr echo_detector = CreateEchoDetector(); std::vector input(1); // Call AnalyzeCaptureAudio once to prevent the flushing of the buffer. echo_detector->AnalyzeCaptureAudio(input); diff --git a/test/fuzzers/rtcp_receiver_fuzzer.cc b/test/fuzzers/rtcp_receiver_fuzzer.cc index 38213c3a6e..8bad9e456a 100644 --- a/test/fuzzers/rtcp_receiver_fuzzer.cc +++ b/test/fuzzers/rtcp_receiver_fuzzer.cc @@ -7,9 +7,9 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_receiver.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" @@ -40,7 +40,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { NullModuleRtpRtcp rtp_rtcp_module; SimulatedClock clock(1234); - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = &clock; config.rtcp_report_interval_ms = kRtcpIntervalMs; config.local_media_ssrc = 1; diff --git a/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc b/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc index 6c6b1d393f..168e7b606b 100644 --- a/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc +++ b/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc @@ -34,6 +34,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { rtp_payloads.push_back(fuzz_input.ReadByteArray(next_size)); } // Run code under test. - VideoRtpDepacketizerAv1::AssembleFrame(rtp_payloads); + VideoRtpDepacketizerAv1().AssembleFrame(rtp_payloads); } } // namespace webrtc diff --git a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc index 0aa8e75a16..8b19a088de 100644 --- a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc +++ b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc @@ -63,25 +63,30 @@ class NullCallback : public video_coding::OnCompleteFrameCallback { std::unique_ptr frame) override {} }; -RtpGenericFrameDescriptor GenerateRtpGenericFrameDescriptor( - DataReader* reader) { - RtpGenericFrameDescriptor res; - res.SetFirstPacketInSubFrame(true); - res.SetFrameId(reader->GetNum()); - - int spatial_layer = - reader->GetNum() % RtpGenericFrameDescriptor::kMaxSpatialLayers; - res.SetSpatialLayersBitmask(1 << spatial_layer); - res.SetTemporalLayer(reader->GetNum() % - RtpGenericFrameDescriptor::kMaxTemporalLayers); - - int num_diffs = (reader->GetNum() % - RtpGenericFrameDescriptor::kMaxNumFrameDependencies); +absl::optional +GenerateGenericFrameDependencies(DataReader* reader) { + absl::optional result; + uint8_t flags = reader->GetNum(); + if (flags & 0b1000'0000) { + // i.e. with 50% chance there are no generic dependencies. + // in such case codec-specfic code path of the RtpFrameReferenceFinder will + // be validated. + return result; + } + + result.emplace(); + result->frame_id = reader->GetNum(); + result->spatial_index = (flags & 0b0111'0000) >> 4; + result->temporal_index = (flags & 0b0000'1110) >> 1; + + // Larger than supported by the RtpFrameReferenceFinder. + int num_diffs = (reader->GetNum() % 16); for (int i = 0; i < num_diffs; ++i) { - res.AddFrameDependencyDiff(reader->GetNum() % (1 << 14)); + result->dependencies.push_back(result->frame_id - + (reader->GetNum() % (1 << 14))); } - return res; + return result; } } // namespace @@ -90,7 +95,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { NullCallback cb; video_coding::RtpFrameReferenceFinder reference_finder(&cb); - auto codec = static_cast(reader.GetNum() % 4); + auto codec = static_cast(reader.GetNum() % 5); while (reader.MoreToRead()) { uint16_t first_seq_num = reader.GetNum(); @@ -127,7 +132,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { break; } - reader.CopyTo(&video_header.frame_marking); + video_header.generic = GenerateGenericFrameDependencies(&reader); // clang-format off auto frame = std::make_unique( @@ -146,7 +151,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { VideoContentType::UNSPECIFIED, video_header, /*color_space=*/absl::nullopt, - GenerateRtpGenericFrameDescriptor(&reader), RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on diff --git a/test/fuzzers/rtp_packet_fuzzer.cc b/test/fuzzers/rtp_packet_fuzzer.cc index 25fec2c094..3f03114a33 100644 --- a/test/fuzzers/rtp_packet_fuzzer.cc +++ b/test/fuzzers/rtp_packet_fuzzer.cc @@ -15,6 +15,7 @@ #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" namespace webrtc { // We decide which header extensions to register by reading four bytes @@ -99,10 +100,11 @@ void FuzzOneInput(const uint8_t* data, size_t size) { &feedback_request); break; } - case kRtpExtensionPlayoutDelay: - PlayoutDelay playout; + case kRtpExtensionPlayoutDelay: { + VideoPlayoutDelay playout; packet.GetExtension(&playout); break; + } case kRtpExtensionVideoContentType: VideoContentType content_type; packet.GetExtension(&content_type); @@ -111,10 +113,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { VideoSendTiming timing; packet.GetExtension(&timing); break; - case kRtpExtensionFrameMarking: - FrameMarking frame_marking; - packet.GetExtension(&frame_marking); - break; case kRtpExtensionRtpStreamId: { std::string rsid; packet.GetExtension(&rsid); @@ -135,11 +133,6 @@ void FuzzOneInput(const uint8_t* data, size_t size) { packet.GetExtension(&descriptor); break; } - case kRtpExtensionGenericFrameDescriptor01: { - RtpGenericFrameDescriptor descriptor; - packet.GetExtension(&descriptor); - break; - } case kRtpExtensionColorSpace: { ColorSpace color_space; packet.GetExtension(&color_space); @@ -150,6 +143,11 @@ void FuzzOneInput(const uint8_t* data, size_t size) { packet.GetExtension(&noise_level); break; } + case kRtpExtensionVideoLayersAllocation: { + VideoLayersAllocation allocation; + packet.GetExtension(&allocation); + break; + } case kRtpExtensionGenericFrameDescriptor02: // This extension requires state to read and so complicated that // deserves own fuzzer. diff --git a/test/fuzzers/ulpfec_generator_fuzzer.cc b/test/fuzzers/ulpfec_generator_fuzzer.cc index 306f7a0da9..9426ef0ad3 100644 --- a/test/fuzzers/ulpfec_generator_fuzzer.cc +++ b/test/fuzzers/ulpfec_generator_fuzzer.cc @@ -16,6 +16,7 @@ #include "modules/rtp_rtcp/source/ulpfec_generator.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -25,13 +26,14 @@ constexpr uint8_t kRedPayloadType = 97; } // namespace void FuzzOneInput(const uint8_t* data, size_t size) { - UlpfecGenerator generator; + SimulatedClock clock(1); + UlpfecGenerator generator(kRedPayloadType, kFecPayloadType, &clock); size_t i = 0; if (size < 4) return; FecProtectionParams params = { data[i++] % 128, static_cast(data[i++] % 10), kFecMaskBursty}; - generator.SetFecParameters(params); + generator.SetProtectionParameters(params, params); uint16_t seq_num = data[i++]; uint16_t prev_seq_num = 0; while (i + 3 < size) { @@ -41,6 +43,9 @@ void FuzzOneInput(const uint8_t* data, size_t size) { break; rtc::CopyOnWriteBuffer packet(&data[i], payload_size + rtp_header_length); packet.EnsureCapacity(IP_PACKET_SIZE); + // Write a valid parsable header (version = 2, no padding, no extensions, + // no CSRCs). + ByteWriter::WriteBigEndian(&packet[0], 2 << 6); // Make sure sequence numbers are increasing. ByteWriter::WriteBigEndian(&packet[2], seq_num++); i += payload_size + rtp_header_length; @@ -51,16 +56,15 @@ void FuzzOneInput(const uint8_t* data, size_t size) { // number became out of order. if (protect && IsNewerSequenceNumber(seq_num, prev_seq_num) && seq_num < prev_seq_num + kUlpfecMaxMediaPackets) { - generator.AddRtpPacketAndGenerateFec(packet, rtp_header_length); + RtpPacketToSend rtp_packet(nullptr); + // Check that we actually have a parsable packet, we want to fuzz FEC + // logic, not RTP header parsing. + RTC_CHECK(rtp_packet.Parse(packet)); + generator.AddPacketAndGenerateFec(rtp_packet); prev_seq_num = seq_num; } - const size_t num_fec_packets = generator.NumAvailableFecPackets(); - if (num_fec_packets > 0) { - std::vector> fec_packets = - generator.GetUlpfecPacketsAsRed(kRedPayloadType, kFecPayloadType, - 100); - RTC_CHECK_EQ(num_fec_packets, fec_packets.size()); - } + + generator.GetFecPackets(); } } } // namespace webrtc diff --git a/test/fuzzers/ulpfec_receiver_fuzzer.cc b/test/fuzzers/ulpfec_receiver_fuzzer.cc index 9c76976290..042aa5d112 100644 --- a/test/fuzzers/ulpfec_receiver_fuzzer.cc +++ b/test/fuzzers/ulpfec_receiver_fuzzer.cc @@ -44,7 +44,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { size_t packet_length = kRtpHeaderSize + fuzz_data.Read(); auto raw_packet = fuzz_data.ReadByteArray(packet_length); - RtpPacket parsed_packet; + RtpPacketReceived parsed_packet; if (!parsed_packet.Parse(raw_packet)) continue; diff --git a/test/fuzzers/utils/BUILD.gn b/test/fuzzers/utils/BUILD.gn index 165ac93d8c..6249156058 100644 --- a/test/fuzzers/utils/BUILD.gn +++ b/test/fuzzers/utils/BUILD.gn @@ -18,6 +18,7 @@ rtc_library("rtp_replayer") { "../../../api/rtc_event_log", "../../../api/task_queue:default_task_queue_factory", "../../../api/test/video:function_video_factory", + "../../../api/transport:field_trial_based_config", "../../../api/video_codecs:video_codecs_api", "../../../call", "../../../call:call_interfaces", @@ -35,6 +36,7 @@ rtc_library("rtp_replayer") { "../../../test:rtp_test_utils", "../../../test:run_test", "../../../test:run_test_interface", + "../../../test:test_common", "../../../test:test_renderer", "../../../test:test_support", "../../../test:video_test_common", diff --git a/test/fuzzers/utils/rtp_replayer.cc b/test/fuzzers/utils/rtp_replayer.cc index af03be2f2c..a664adb31d 100644 --- a/test/fuzzers/utils/rtp_replayer.cc +++ b/test/fuzzers/utils/rtp_replayer.cc @@ -16,6 +16,7 @@ #include #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "rtc_base/strings/json.h" #include "system_wrappers/include/clock.h" #include "test/call_config_utils.h" @@ -23,6 +24,7 @@ #include "test/fake_decoder.h" #include "test/rtp_file_reader.h" #include "test/rtp_header_parser.h" +#include "test/run_loop.h" namespace webrtc { namespace test { @@ -42,12 +44,13 @@ void RtpReplayer::Replay( std::vector receive_stream_configs, const uint8_t* rtp_dump_data, size_t rtp_dump_size) { + RunLoop loop; rtc::ScopedBaseFakeClock fake_clock; // Work around: webrtc calls webrtc::Random(clock.TimeInMicroseconds()) // everywhere and Random expects non-zero seed. Let's set the clock non-zero // to make them happy. - fake_clock.SetTime(webrtc::Timestamp::ms(1)); + fake_clock.SetTime(webrtc::Timestamp::Millis(1)); // Attempt to create an RtpReader from the input file. auto rtp_reader = CreateRtpReader(rtp_dump_data, rtp_dump_size); @@ -62,6 +65,8 @@ void RtpReplayer::Replay( CreateDefaultTaskQueueFactory(); Call::Config call_config(&event_log); call_config.task_queue_factory = task_queue_factory.get(); + FieldTrialBasedConfig field_trials; + call_config.trials = &field_trials; std::unique_ptr call(Call::Create(call_config)); SetupVideoStreams(&receive_stream_configs, stream_state.get(), call.get()); @@ -108,7 +113,6 @@ void RtpReplayer::SetupVideoStreams( for (auto& decoder : receive_config.decoders) { decoder = test::CreateMatchingDecoder(decoder.payload_type, decoder.video_format.name); - decoder.decoder_factory = stream_state->decoder_factory.get(); } // Create the window to display the rendered video. @@ -116,6 +120,7 @@ void RtpReplayer::SetupVideoStreams( test::VideoRenderer::Create("Fuzzing WebRTC Video Config", 640, 480)); // Create a receive stream for this config. receive_config.renderer = stream_state->sinks.back().get(); + receive_config.decoder_factory = stream_state->decoder_factory.get(); stream_state->receive_streams.emplace_back( call->CreateVideoReceiveStream(std::move(receive_config))); } @@ -155,7 +160,7 @@ void RtpReplayer::ReplayPackets(rtc::FakeClock* clock, if (deliver_in_ms > 0) { // StatsCounter::ReportMetricToAggregatedCounter is O(elapsed time). // Set an upper limit to prevent waste time. - clock->AdvanceTime(webrtc::TimeDelta::ms( + clock->AdvanceTime(webrtc::TimeDelta::Millis( std::min(deliver_in_ms, static_cast(100)))); } diff --git a/test/ios/Info.plist b/test/ios/Info.plist index e02ce91165..8d24728091 100644 --- a/test/ios/Info.plist +++ b/test/ios/Info.plist @@ -16,6 +16,8 @@ ${PRODUCT_NAME} CFBundlePackageType APPL + CFBundleShortVersionString + 1.0 CFBundleSignature ???? CFBundleVersion diff --git a/test/ios/test_support.h b/test/ios/test_support.h index 2c5b600ce8..10958572cf 100644 --- a/test/ios/test_support.h +++ b/test/ios/test_support.h @@ -11,6 +11,11 @@ #ifndef TEST_IOS_TEST_SUPPORT_H_ #define TEST_IOS_TEST_SUPPORT_H_ +#include +#include + +#include "absl/types/optional.h" + namespace rtc { namespace test { // Launches an iOS app that serves as a host for a test suite. @@ -20,7 +25,8 @@ void RunTestsFromIOSApp(); void InitTestSuite(int (*test_suite)(void), int argc, char* argv[], - bool save_chartjson_result); + bool save_chartjson_result, + absl::optional> metrics_to_plot); } // namespace test } // namespace rtc diff --git a/test/ios/test_support.mm b/test/ios/test_support.mm index 86005974fb..86d2e6ce17 100644 --- a/test/ios/test_support.mm +++ b/test/ios/test_support.mm @@ -33,7 +33,8 @@ static int (*g_test_suite)(void) = NULL; static int g_argc; static char **g_argv; -static bool g_save_chartjson_result; +static bool g_write_perf_output; +static absl::optional> g_metrics_to_plot; @interface UIApplication (Testing) - (void)_terminateWithStatus:(int)status; @@ -75,19 +76,23 @@ - (void)runTests { int exitStatus = g_test_suite(); - if (g_save_chartjson_result) { - // Stores data into a json file under the app's document directory. - NSString* fileName = @"perf_result.json"; + if (g_write_perf_output) { + // Stores data into a proto file under the app's document directory. + NSString *fileName = @"perftest-output.pb"; NSArray* outputDirectories = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory, NSUserDomainMask, YES); if ([outputDirectories count] != 0) { NSString* outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; - webrtc::test::WritePerfResults( - [NSString stdStringForString:outputPath]); + if (!webrtc::test::WritePerfResults([NSString stdStringForString:outputPath])) { + exit(1); + } } } + if (g_metrics_to_plot) { + webrtc::test::PrintPlottableResults(*g_metrics_to_plot); + } // If a test app is too fast, it will exit before Instruments has has a // a chance to initialize and no test results will be seen. @@ -109,12 +114,16 @@ - (void)runTests { // Note: This is not thread safe, and must be called from the same thread as // runTests above. -void InitTestSuite(int (*test_suite)(void), int argc, char *argv[], - bool save_chartjson_result) { +void InitTestSuite(int (*test_suite)(void), + int argc, + char *argv[], + bool write_perf_output, + absl::optional> metrics_to_plot) { g_test_suite = test_suite; g_argc = argc; g_argv = argv; - g_save_chartjson_result = save_chartjson_result; + g_write_perf_output = write_perf_output; + g_metrics_to_plot = std::move(metrics_to_plot); } void RunTestsFromIOSApp() { diff --git a/test/layer_filtering_transport.cc b/test/layer_filtering_transport.cc index ad6e117131..d906e07046 100644 --- a/test/layer_filtering_transport.cc +++ b/test/layer_filtering_transport.cc @@ -121,8 +121,6 @@ bool LayerFilteringTransport::SendRtp(const uint8_t* packet, if (vp9_header.ss_data_available) { RTC_DCHECK(vp9_header.temporal_idx == kNoTemporalIdx || vp9_header.temporal_idx == 0); - RTC_DCHECK(vp9_header.spatial_idx == kNoSpatialIdx || - vp9_header.spatial_idx == 0); num_active_spatial_layers_ = vp9_header.num_spatial_layers; } } diff --git a/test/logging/BUILD.gn b/test/logging/BUILD.gn index db2a5447ac..1af2ecfdac 100644 --- a/test/logging/BUILD.gn +++ b/test/logging/BUILD.gn @@ -27,6 +27,6 @@ rtc_library("log_writer") { "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:stringutils", "../../test:fileutils", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/test/mac_capturer.mm b/test/mac_capturer.mm index 8d50a804f9..1f84c1bb96 100644 --- a/test/mac_capturer.mm +++ b/test/mac_capturer.mm @@ -15,14 +15,15 @@ #import "sdk/objc/native/api/video_capturer.h" #import "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCTestVideoSourceAdapter : NSObject +@interface RTCTestVideoSourceAdapter : NSObject @property(nonatomic) webrtc::test::MacCapturer *capturer; @end @implementation RTCTestVideoSourceAdapter @synthesize capturer = _capturer; -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; rtc::scoped_refptr buffer = new rtc::RefCountedObject(frame.buffer); @@ -39,7 +40,7 @@ - (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFram AVCaptureDeviceFormat *SelectClosestFormat(AVCaptureDevice *device, size_t width, size_t height) { NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]; AVCaptureDeviceFormat *selectedFormat = nil; int currentDiff = INT_MAX; for (AVCaptureDeviceFormat *format in formats) { @@ -67,11 +68,12 @@ - (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFram adapter_ = (__bridge_retained void *)adapter; adapter.capturer = this; - RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:adapter]; + RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:adapter]; capturer_ = (__bridge_retained void *)capturer; AVCaptureDevice *device = - [[RTCCameraVideoCapturer captureDevices] objectAtIndex:capture_device_index]; + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices] objectAtIndex:capture_device_index]; AVCaptureDeviceFormat *format = SelectClosestFormat(device, width, height); [capturer startCaptureWithDevice:device format:format fps:target_fps]; } @@ -87,7 +89,8 @@ - (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFram #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wunused-variable" RTCTestVideoSourceAdapter *adapter = (__bridge_transfer RTCTestVideoSourceAdapter *)adapter_; - RTCCameraVideoCapturer *capturer = (__bridge_transfer RTCCameraVideoCapturer *)capturer_; + RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer = + (__bridge_transfer RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer_; [capturer stopCapture]; #pragma clang diagnostic pop } diff --git a/test/mock_audio_decoder.h b/test/mock_audio_decoder.h index 7c6db5acc2..8f44bf891d 100644 --- a/test/mock_audio_decoder.h +++ b/test/mock_audio_decoder.h @@ -20,16 +20,18 @@ class MockAudioDecoder : public AudioDecoder { public: MockAudioDecoder(); ~MockAudioDecoder(); - MOCK_METHOD0(Die, void()); - MOCK_METHOD5(DecodeInternal, - int(const uint8_t*, size_t, int, int16_t*, SpeechType*)); - MOCK_CONST_METHOD0(HasDecodePlc, bool()); - MOCK_METHOD2(DecodePlc, size_t(size_t, int16_t*)); - MOCK_METHOD0(Reset, void()); - MOCK_METHOD0(ErrorCode, int()); - MOCK_CONST_METHOD2(PacketDuration, int(const uint8_t*, size_t)); - MOCK_CONST_METHOD0(Channels, size_t()); - MOCK_CONST_METHOD0(SampleRateHz, int()); + MOCK_METHOD(void, Die, ()); + MOCK_METHOD(int, + DecodeInternal, + (const uint8_t*, size_t, int, int16_t*, SpeechType*), + (override)); + MOCK_METHOD(bool, HasDecodePlc, (), (const, override)); + MOCK_METHOD(size_t, DecodePlc, (size_t, int16_t*), (override)); + MOCK_METHOD(void, Reset, (), (override)); + MOCK_METHOD(int, ErrorCode, (), (override)); + MOCK_METHOD(int, PacketDuration, (const uint8_t*, size_t), (const, override)); + MOCK_METHOD(size_t, Channels, (), (const, override)); + MOCK_METHOD(int, SampleRateHz, (), (const, override)); }; } // namespace webrtc diff --git a/test/mock_audio_decoder_factory.h b/test/mock_audio_decoder_factory.h index cdf2919543..cdb03d3f38 100644 --- a/test/mock_audio_decoder_factory.h +++ b/test/mock_audio_decoder_factory.h @@ -24,19 +24,23 @@ namespace webrtc { class MockAudioDecoderFactory : public AudioDecoderFactory { public: - MOCK_METHOD0(GetSupportedDecoders, std::vector()); - MOCK_METHOD1(IsSupportedDecoder, bool(const SdpAudioFormat&)); + MOCK_METHOD(std::vector, + GetSupportedDecoders, + (), + (override)); + MOCK_METHOD(bool, IsSupportedDecoder, (const SdpAudioFormat&), (override)); std::unique_ptr MakeAudioDecoder( const SdpAudioFormat& format, - absl::optional codec_pair_id) { + absl::optional codec_pair_id) override { std::unique_ptr return_value; MakeAudioDecoderMock(format, codec_pair_id, &return_value); return return_value; } - MOCK_METHOD3(MakeAudioDecoderMock, - void(const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value)); + MOCK_METHOD(void, + MakeAudioDecoderMock, + (const SdpAudioFormat& format, + absl::optional codec_pair_id, + std::unique_ptr*)); // Creates a MockAudioDecoderFactory with no formats and that may not be // invoked to create a codec - useful for initializing a voice engine, for diff --git a/test/mock_audio_encoder.h b/test/mock_audio_encoder.h index 2dfd15ca98..87b8cc8c8e 100644 --- a/test/mock_audio_encoder.h +++ b/test/mock_audio_encoder.h @@ -21,43 +21,50 @@ namespace webrtc { class MockAudioEncoder : public AudioEncoder { public: - // TODO(nisse): Valid overrides commented out, because the gmock - // methods don't use any override declarations, and we want to avoid - // warnings from -Winconsistent-missing-override. See - // http://crbug.com/428099. MockAudioEncoder(); ~MockAudioEncoder(); - MOCK_METHOD1(Mark, void(std::string desc)); - MOCK_CONST_METHOD0(SampleRateHz, int()); - MOCK_CONST_METHOD0(NumChannels, size_t()); - MOCK_CONST_METHOD0(RtpTimestampRateHz, int()); - MOCK_CONST_METHOD0(Num10MsFramesInNextPacket, size_t()); - MOCK_CONST_METHOD0(Max10MsFramesInAPacket, size_t()); - MOCK_CONST_METHOD0(GetTargetBitrate, int()); - MOCK_CONST_METHOD0(GetFrameLengthRange, - absl::optional>()); - - MOCK_METHOD0(Reset, void()); - MOCK_METHOD1(SetFec, bool(bool enable)); - MOCK_METHOD1(SetDtx, bool(bool enable)); - MOCK_METHOD1(SetApplication, bool(Application application)); - MOCK_METHOD1(SetMaxPlaybackRate, void(int frequency_hz)); - MOCK_METHOD1(SetMaxBitrate, void(int max_bps)); - MOCK_METHOD1(SetMaxPayloadSize, void(int max_payload_size_bytes)); - MOCK_METHOD2(OnReceivedUplinkBandwidth, - void(int target_audio_bitrate_bps, - absl::optional probing_interval_ms)); - MOCK_METHOD1(OnReceivedUplinkPacketLossFraction, - void(float uplink_packet_loss_fraction)); - - MOCK_METHOD2(EnableAudioNetworkAdaptor, - bool(const std::string& config_string, RtcEventLog* event_log)); + MOCK_METHOD(int, SampleRateHz, (), (const, override)); + MOCK_METHOD(size_t, NumChannels, (), (const, override)); + MOCK_METHOD(int, RtpTimestampRateHz, (), (const, override)); + MOCK_METHOD(size_t, Num10MsFramesInNextPacket, (), (const, override)); + MOCK_METHOD(size_t, Max10MsFramesInAPacket, (), (const, override)); + MOCK_METHOD(int, GetTargetBitrate, (), (const, override)); + MOCK_METHOD((absl::optional>), + GetFrameLengthRange, + (), + (const, override)); + + MOCK_METHOD(void, Reset, (), (override)); + MOCK_METHOD(bool, SetFec, (bool enable), (override)); + MOCK_METHOD(bool, SetDtx, (bool enable), (override)); + MOCK_METHOD(bool, SetApplication, (Application application), (override)); + MOCK_METHOD(void, SetMaxPlaybackRate, (int frequency_hz), (override)); + MOCK_METHOD(void, + OnReceivedUplinkBandwidth, + (int target_audio_bitrate_bps, + absl::optional probing_interval_ms), + (override)); + MOCK_METHOD(void, + OnReceivedUplinkPacketLossFraction, + (float uplink_packet_loss_fraction), + (override)); + MOCK_METHOD(void, + OnReceivedOverhead, + (size_t overhead_bytes_per_packet), + (override)); + + MOCK_METHOD(bool, + EnableAudioNetworkAdaptor, + (const std::string& config_string, RtcEventLog*), + (override)); // Note, we explicitly chose not to create a mock for the Encode method. - MOCK_METHOD3(EncodeImpl, - EncodedInfo(uint32_t timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded)); + MOCK_METHOD(EncodedInfo, + EncodeImpl, + (uint32_t timestamp, + rtc::ArrayView audio, + rtc::Buffer*), + (override)); class FakeEncoding { public: diff --git a/test/mock_audio_encoder_factory.h b/test/mock_audio_encoder_factory.h index 3e774a39e9..392a4c11e2 100644 --- a/test/mock_audio_encoder_factory.h +++ b/test/mock_audio_encoder_factory.h @@ -24,23 +24,29 @@ namespace webrtc { class MockAudioEncoderFactory : public ::testing::NiceMock { public: - MOCK_METHOD0(GetSupportedEncoders, std::vector()); - MOCK_METHOD1(QueryAudioEncoder, - absl::optional(const SdpAudioFormat& format)); + MOCK_METHOD(std::vector, + GetSupportedEncoders, + (), + (override)); + MOCK_METHOD(absl::optional, + QueryAudioEncoder, + (const SdpAudioFormat& format), + (override)); std::unique_ptr MakeAudioEncoder( int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id) { + absl::optional codec_pair_id) override { std::unique_ptr return_value; MakeAudioEncoderMock(payload_type, format, codec_pair_id, &return_value); return return_value; } - MOCK_METHOD4(MakeAudioEncoderMock, - void(int payload_type, - const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value)); + MOCK_METHOD(void, + MakeAudioEncoderMock, + (int payload_type, + const SdpAudioFormat& format, + absl::optional codec_pair_id, + std::unique_ptr*)); // Creates a MockAudioEncoderFactory with no formats and that may not be // invoked to create a codec - useful for initializing a voice engine, for diff --git a/test/mock_frame_transformer.h b/test/mock_frame_transformer.h new file mode 100644 index 0000000000..617cda8a43 --- /dev/null +++ b/test/mock_frame_transformer.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_MOCK_FRAME_TRANSFORMER_H_ +#define TEST_MOCK_FRAME_TRANSFORMER_H_ + +#include +#include + +#include "api/frame_transformer_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockFrameTransformer : public FrameTransformerInterface { + public: + MOCK_METHOD(void, + Transform, + (std::unique_ptr), + (override)); + MOCK_METHOD(void, + RegisterTransformedFrameCallback, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + RegisterTransformedFrameSinkCallback, + (rtc::scoped_refptr, uint32_t), + (override)); + MOCK_METHOD(void, UnregisterTransformedFrameCallback, (), (override)); + MOCK_METHOD(void, + UnregisterTransformedFrameSinkCallback, + (uint32_t), + (override)); +}; + +} // namespace webrtc + +#endif // TEST_MOCK_FRAME_TRANSFORMER_H_ diff --git a/test/mock_transformable_frame.h b/test/mock_transformable_frame.h new file mode 100644 index 0000000000..13764f56e8 --- /dev/null +++ b/test/mock_transformable_frame.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_MOCK_TRANSFORMABLE_FRAME_H_ +#define TEST_MOCK_TRANSFORMABLE_FRAME_H_ + +#include "api/frame_transformer_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockTransformableFrame : public TransformableFrameInterface { + public: + MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); + MOCK_METHOD(void, SetData, (rtc::ArrayView), (override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); +}; + +} // namespace webrtc + +#endif // TEST_MOCK_TRANSFORMABLE_FRAME_H_ diff --git a/test/mock_transport.h b/test/mock_transport.h index 7eaf3c94c3..9c4dc4bf8d 100644 --- a/test/mock_transport.h +++ b/test/mock_transport.h @@ -21,11 +21,13 @@ class MockTransport : public Transport { MockTransport(); ~MockTransport(); - MOCK_METHOD3(SendRtp, - bool(const uint8_t* data, - size_t len, - const PacketOptions& options)); - MOCK_METHOD2(SendRtcp, bool(const uint8_t* data, size_t len)); + MOCK_METHOD(bool, + SendRtp, + (const uint8_t*, size_t, const PacketOptions&), + (override)); + MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t len), (override)); }; + } // namespace webrtc + #endif // TEST_MOCK_TRANSPORT_H_ diff --git a/test/network/BUILD.gn b/test/network/BUILD.gn index 4b01479c9b..081064f34c 100644 --- a/test/network/BUILD.gn +++ b/test/network/BUILD.gn @@ -10,13 +10,13 @@ import("../../webrtc.gni") rtc_library("emulated_network") { visibility = [ - "../../api:create_network_emulation_manager", ":*", + "../../api:create_network_emulation_manager", ] if (rtc_include_tests) { visibility += [ - "../scenario:*", "../peer_scenario:*", + "../scenario:*", ] } testonly = true @@ -35,9 +35,11 @@ rtc_library("emulated_network") { "traffic_route.h", ] deps = [ + "../../api:array_view", "../../api:network_emulation_manager_api", "../../api:simulated_network_api", "../../api:time_controller", + "../../api/numerics", "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:time_delta", @@ -48,12 +50,15 @@ rtc_library("emulated_network") { "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_minmax", "../../rtc_base:task_queue_for_test", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/task_utils:repeating_task", "../../rtc_base/third_party/sigslot", "../../system_wrappers", "../scenario:column_printer", "../time_controller", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", @@ -72,7 +77,7 @@ rtc_library("network_emulation_unittest") { "../../rtc_base:gunit_helpers", "../../rtc_base:logging", "../../rtc_base:rtc_event", - "../../system_wrappers:system_wrappers", + "../../rtc_base/synchronization:mutex", ] } @@ -88,6 +93,7 @@ rtc_library("network_emulation_pc_unittest") { "../../api:simulated_network_api", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", + "../../api/transport:field_trial_based_config", "../../call:simulated_network", "../../media:rtc_audio_video", "../../media:rtc_media_engine_defaults", @@ -108,14 +114,15 @@ rtc_library("cross_traffic_unittest") { deps = [ ":emulated_network", "../:test_support", + "../../api:network_emulation_manager_api", "../../api:simulated_network_api", "../../call:simulated_network", "../../rtc_base", "../../rtc_base:logging", "../../rtc_base:rtc_event", "//test/time_controller:time_controller", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("feedback_generator") { @@ -130,8 +137,8 @@ rtc_library("feedback_generator") { "../../call:simulated_network", "../../rtc_base:checks", "../time_controller", - "//third_party/abseil-cpp/absl/memory", ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("feedback_generator_unittest") { diff --git a/test/network/cross_traffic.cc b/test/network/cross_traffic.cc index 972e89f6fc..56e7635142 100644 --- a/test/network/cross_traffic.cc +++ b/test/network/cross_traffic.cc @@ -144,13 +144,16 @@ void TcpMessageRouteImpl::SendMessage(size_t size, cwnd_ = 10; ssthresh_ = INFINITY; } - size_t data_left = size; - size_t kMaxPacketSize = 1200; + int64_t data_left = static_cast(size); + int64_t kMaxPacketSize = 1200; + int64_t kMinPacketSize = 4; Message message{std::move(handler)}; while (data_left > 0) { - size_t packet_size = std::min(data_left, kMaxPacketSize); + int64_t packet_size = std::min(data_left, kMaxPacketSize); int fragment_id = next_fragment_id_++; - pending_.push_back(MessageFragment{fragment_id, packet_size}); + pending_.push_back(MessageFragment{ + fragment_id, + static_cast(std::max(kMinPacketSize, packet_size))}); message.pending_fragment_ids.insert(fragment_id); data_left -= packet_size; } @@ -207,7 +210,7 @@ void TcpMessageRouteImpl::HandleLoss(Timestamp at_time) { } void TcpMessageRouteImpl::SendPackets(Timestamp at_time) { - const TimeDelta kPacketTimeout = TimeDelta::seconds(1); + const TimeDelta kPacketTimeout = TimeDelta::Seconds(1); int cwnd = std::ceil(cwnd_); int packets_to_send = std::max(cwnd - static_cast(in_flight_.size()), 0); while (packets_to_send-- > 0 && !pending_.empty()) { diff --git a/test/network/cross_traffic.h b/test/network/cross_traffic.h index 663881fc64..942b863bbf 100644 --- a/test/network/cross_traffic.h +++ b/test/network/cross_traffic.h @@ -29,10 +29,10 @@ namespace test { struct RandomWalkConfig { int random_seed = 1; - DataRate peak_rate = DataRate::kbps(100); - DataSize min_packet_size = DataSize::bytes(200); - TimeDelta min_packet_interval = TimeDelta::ms(1); - TimeDelta update_interval = TimeDelta::ms(200); + DataRate peak_rate = DataRate::KilobitsPerSec(100); + DataSize min_packet_size = DataSize::Bytes(200); + TimeDelta min_packet_interval = TimeDelta::Millis(1); + TimeDelta update_interval = TimeDelta::Millis(200); double variance = 0.6; double bias = -0.1; }; @@ -63,11 +63,11 @@ class RandomWalkCrossTraffic { }; struct PulsedPeaksConfig { - DataRate peak_rate = DataRate::kbps(100); - DataSize min_packet_size = DataSize::bytes(200); - TimeDelta min_packet_interval = TimeDelta::ms(1); - TimeDelta send_duration = TimeDelta::ms(100); - TimeDelta hold_duration = TimeDelta::ms(2000); + DataRate peak_rate = DataRate::KilobitsPerSec(100); + DataSize min_packet_size = DataSize::Bytes(200); + TimeDelta min_packet_interval = TimeDelta::Millis(1); + TimeDelta send_duration = TimeDelta::Millis(100); + TimeDelta hold_duration = TimeDelta::Millis(2000); }; class PulsedPeaksCrossTraffic { @@ -150,10 +150,10 @@ class TcpMessageRouteImpl final : public TcpMessageRoute { }; struct FakeTcpConfig { - DataSize packet_size = DataSize::bytes(1200); + DataSize packet_size = DataSize::Bytes(1200); DataSize send_limit = DataSize::PlusInfinity(); - TimeDelta process_interval = TimeDelta::ms(200); - TimeDelta packet_timeout = TimeDelta::seconds(1); + TimeDelta process_interval = TimeDelta::Millis(200); + TimeDelta packet_timeout = TimeDelta::Seconds(1); }; class FakeTcpCrossTraffic diff --git a/test/network/cross_traffic_unittest.cc b/test/network/cross_traffic_unittest.cc index 43967e693c..c8d848f154 100644 --- a/test/network/cross_traffic_unittest.cc +++ b/test/network/cross_traffic_unittest.cc @@ -16,6 +16,7 @@ #include #include "absl/memory/memory.h" +#include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" #include "call/simulated_network.h" #include "rtc_base/event.h" @@ -46,12 +47,14 @@ struct TrafficCounterFixture { SimulatedClock clock{0}; CountingReceiver counter; TaskQueueForTest task_queue_; - EmulatedEndpointImpl endpoint{/*id=*/1, - rtc::IPAddress(kTestIpAddress), - /*is_enabled=*/true, - /*type=*/rtc::AdapterType::ADAPTER_TYPE_UNKNOWN, - &task_queue_, - &clock}; + EmulatedEndpointImpl endpoint{ + /*id=*/1, + rtc::IPAddress(kTestIpAddress), + EmulatedEndpointConfig::StatsGatheringMode::kDefault, + /*is_enabled=*/true, + /*type=*/rtc::AdapterType::ADAPTER_TYPE_UNKNOWN, + &task_queue_, + &clock}; }; } // namespace @@ -70,15 +73,15 @@ TEST(CrossTrafficTest, PulsedPeaksCrossTraffic) { TrafficRoute traffic(&fixture.clock, &fixture.counter, &fixture.endpoint); PulsedPeaksConfig config; - config.peak_rate = DataRate::kbps(1000); - config.min_packet_size = DataSize::bytes(1); - config.min_packet_interval = TimeDelta::ms(25); - config.send_duration = TimeDelta::ms(500); - config.hold_duration = TimeDelta::ms(250); + config.peak_rate = DataRate::KilobitsPerSec(1000); + config.min_packet_size = DataSize::Bytes(1); + config.min_packet_interval = TimeDelta::Millis(25); + config.send_duration = TimeDelta::Millis(500); + config.hold_duration = TimeDelta::Millis(250); PulsedPeaksCrossTraffic pulsed_peaks(config, &traffic); - const auto kRunTime = TimeDelta::seconds(1); + const auto kRunTime = TimeDelta::Seconds(1); while (fixture.clock.TimeInMilliseconds() < kRunTime.ms()) { - pulsed_peaks.Process(Timestamp::ms(fixture.clock.TimeInMilliseconds())); + pulsed_peaks.Process(Timestamp::Millis(fixture.clock.TimeInMilliseconds())); fixture.clock.AdvanceTimeMilliseconds(1); } @@ -95,17 +98,17 @@ TEST(CrossTrafficTest, RandomWalkCrossTraffic) { TrafficRoute traffic(&fixture.clock, &fixture.counter, &fixture.endpoint); RandomWalkConfig config; - config.peak_rate = DataRate::kbps(1000); - config.min_packet_size = DataSize::bytes(1); - config.min_packet_interval = TimeDelta::ms(25); - config.update_interval = TimeDelta::ms(500); + config.peak_rate = DataRate::KilobitsPerSec(1000); + config.min_packet_size = DataSize::Bytes(1); + config.min_packet_interval = TimeDelta::Millis(25); + config.update_interval = TimeDelta::Millis(500); config.variance = 0.0; config.bias = 1.0; RandomWalkCrossTraffic random_walk(config, &traffic); - const auto kRunTime = TimeDelta::seconds(1); + const auto kRunTime = TimeDelta::Seconds(1); while (fixture.clock.TimeInMilliseconds() < kRunTime.ms()) { - random_walk.Process(Timestamp::ms(fixture.clock.TimeInMilliseconds())); + random_walk.Process(Timestamp::Millis(fixture.clock.TimeInMilliseconds())); fixture.clock.AdvanceTimeMilliseconds(1); } @@ -144,10 +147,10 @@ TEST(TcpMessageRouteTest, DeliveredOnLossyNetwork) { // If there was no loss, we would have delivered the message in ca 1 second, // with 50% it should take much longer. - net.time_controller()->AdvanceTime(TimeDelta::seconds(5)); + net.time_controller()->AdvanceTime(TimeDelta::Seconds(5)); ASSERT_EQ(deliver_count, 0); // But given enough time the messsage will be delivered, but only once. - net.time_controller()->AdvanceTime(TimeDelta::seconds(60)); + net.time_controller()->AdvanceTime(TimeDelta::Seconds(60)); EXPECT_EQ(deliver_count, 1); } diff --git a/test/network/emulated_network_manager.cc b/test/network/emulated_network_manager.cc index 2dc2fad5b0..ec8b2b3554 100644 --- a/test/network/emulated_network_manager.cc +++ b/test/network/emulated_network_manager.cc @@ -80,7 +80,8 @@ void EmulatedNetworkManager::StopUpdating() { } void EmulatedNetworkManager::GetStats( - std::function stats_callback) const { + std::function)> stats_callback) + const { task_queue_->PostTask([stats_callback, this]() { stats_callback(endpoints_container_->GetStats()); }); diff --git a/test/network/emulated_network_manager.h b/test/network/emulated_network_manager.h index 92555eee23..2321af0e04 100644 --- a/test/network/emulated_network_manager.h +++ b/test/network/emulated_network_manager.h @@ -11,12 +11,12 @@ #ifndef TEST_NETWORK_EMULATED_NETWORK_MANAGER_H_ #define TEST_NETWORK_EMULATED_NETWORK_MANAGER_H_ +#include #include #include #include "api/test/network_emulation_manager.h" #include "api/test/time_controller.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ip_address.h" #include "rtc_base/network.h" #include "rtc_base/socket_server.h" @@ -50,15 +50,18 @@ class EmulatedNetworkManager : public rtc::NetworkManagerBase, // EmulatedNetworkManagerInterface API rtc::Thread* network_thread() override { return network_thread_.get(); } rtc::NetworkManager* network_manager() override { return this; } - void GetStats( - std::function stats_callback) const override; + std::vector endpoints() const override { + return endpoints_container_->GetEndpoints(); + } + void GetStats(std::function)> + stats_callback) const override; private: void UpdateNetworksOnce(); void MaybeSignalNetworksChanged(); TaskQueueForTest* const task_queue_; - EndpointsContainer* const endpoints_container_; + const EndpointsContainer* const endpoints_container_; std::unique_ptr network_thread_; bool sent_first_update_ RTC_GUARDED_BY(network_thread_); diff --git a/test/network/fake_network_socket_server.cc b/test/network/fake_network_socket_server.cc index 60dfbe33d5..bee2846be7 100644 --- a/test/network/fake_network_socket_server.cc +++ b/test/network/fake_network_socket_server.cc @@ -280,7 +280,7 @@ EmulatedEndpointImpl* FakeNetworkSocketServer::GetEndpointNode( } void FakeNetworkSocketServer::Unregister(FakeNetworkSocket* socket) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); sockets_.erase(absl::c_find(sockets_, socket)); } @@ -297,7 +297,7 @@ rtc::AsyncSocket* FakeNetworkSocketServer::CreateAsyncSocket(int family, RTC_DCHECK(thread_) << "must be attached to thread before creating sockets"; FakeNetworkSocket* out = new FakeNetworkSocket(this, thread_); { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); sockets_.push_back(out); } return out; diff --git a/test/network/fake_network_socket_server.h b/test/network/fake_network_socket_server.h index 3a007588e3..2cf4d7c86d 100644 --- a/test/network/fake_network_socket_server.h +++ b/test/network/fake_network_socket_server.h @@ -16,9 +16,9 @@ #include "api/units/timestamp.h" #include "rtc_base/async_socket.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/socket_server.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "system_wrappers/include/clock.h" #include "test/network/network_emulation.h" @@ -58,7 +58,7 @@ class FakeNetworkSocketServer : public rtc::SocketServer, rtc::Event wakeup_; rtc::Thread* thread_ = nullptr; - rtc::CriticalSection lock_; + Mutex lock_; std::vector sockets_ RTC_GUARDED_BY(lock_); }; diff --git a/test/network/feedback_generator.cc b/test/network/feedback_generator.cc index f2e345f6b2..f0903dd3e7 100644 --- a/test/network/feedback_generator.cc +++ b/test/network/feedback_generator.cc @@ -37,7 +37,7 @@ void FeedbackGeneratorImpl::Sleep(TimeDelta duration) { void FeedbackGeneratorImpl::SendPacket(size_t size) { SentPacket sent; sent.send_time = Now(); - sent.size = DataSize::bytes(size); + sent.size = DataSize::Bytes(size); sent.sequence_number = sequence_number_++; route_.SendRequest(size, sent); } diff --git a/test/network/feedback_generator_unittest.cc b/test/network/feedback_generator_unittest.cc index 54029d0d22..ca04bd3c9f 100644 --- a/test/network/feedback_generator_unittest.cc +++ b/test/network/feedback_generator_unittest.cc @@ -17,7 +17,7 @@ TEST(FeedbackGeneratorTest, ReportsFeedbackForSentPackets) { auto gen = CreateFeedbackGenerator(FeedbackGenerator::Config()); for (int i = 0; i < 10; ++i) { gen->SendPacket(kPacketSize); - gen->Sleep(TimeDelta::ms(50)); + gen->Sleep(TimeDelta::Millis(50)); } auto feedback_list = gen->PopFeedback(); EXPECT_GT(feedback_list.size(), 0u); diff --git a/test/network/network_emulation.cc b/test/network/network_emulation.cc index 57dcf51242..bf6c0683d4 100644 --- a/test/network/network_emulation.cc +++ b/test/network/network_emulation.cc @@ -14,12 +14,261 @@ #include #include +#include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "rtc_base/bind.h" #include "rtc_base/logging.h" namespace webrtc { +DataRate EmulatedNetworkOutgoingStatsImpl::AverageSendRate() const { + RTC_DCHECK_GE(packets_sent_, 2); + RTC_DCHECK(first_packet_sent_time_.IsFinite()); + RTC_DCHECK(last_packet_sent_time_.IsFinite()); + return (bytes_sent_ - first_sent_packet_size_) / + (last_packet_sent_time_ - first_packet_sent_time_); +} + +DataRate EmulatedNetworkIncomingStatsImpl::AverageReceiveRate() const { + RTC_DCHECK_GE(packets_received_, 2); + RTC_DCHECK(first_packet_received_time_.IsFinite()); + RTC_DCHECK(last_packet_received_time_.IsFinite()); + return (bytes_received_ - first_received_packet_size_) / + (last_packet_received_time_ - first_packet_received_time_); +} + +std::map> +EmulatedNetworkStatsImpl::OutgoingStatsPerDestination() const { + std::map> out; + for (const auto& entry : outgoing_stats_per_destination_) { + out.emplace(entry.first, std::make_unique( + *entry.second)); + } + return out; +} + +std::map> +EmulatedNetworkStatsImpl::IncomingStatsPerSource() const { + std::map> out; + for (const auto& entry : incoming_stats_per_source_) { + out.emplace(entry.first, std::make_unique( + *entry.second)); + } + return out; +} + +std::unique_ptr +EmulatedNetworkStatsImpl::GetOverallOutgoingStats() const { + EmulatedNetworkOutgoingStatsBuilder builder; + for (const auto& entry : outgoing_stats_per_destination_) { + builder.AddOutgoingStats(*entry.second); + } + return builder.Build(); +} + +std::unique_ptr +EmulatedNetworkStatsImpl::GetOverallIncomingStats() const { + EmulatedNetworkIncomingStatsBuilder builder; + for (const auto& entry : incoming_stats_per_source_) { + builder.AddIncomingStats(*entry.second); + } + return builder.Build(); +} + +EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder() { + sequence_checker_.Detach(); +} + +void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent( + Timestamp sent_time, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_CHECK_GE(packet_size, DataSize::Zero()); + if (first_packet_sent_time_.IsInfinite()) { + first_packet_sent_time_ = sent_time; + first_sent_packet_size_ = packet_size; + } + last_packet_sent_time_ = sent_time; + packets_sent_++; + bytes_sent_ += packet_size; + if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + sent_packets_size_counter_.AddSample(packet_size.bytes()); + } +} + +void EmulatedNetworkOutgoingStatsBuilder::AddOutgoingStats( + const EmulatedNetworkOutgoingStats& stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + packets_sent_ += stats.PacketsSent(); + bytes_sent_ += stats.BytesSent(); + sent_packets_size_counter_.AddSamples(stats.SentPacketsSizeCounter()); + if (first_packet_sent_time_ > stats.FirstPacketSentTime()) { + first_packet_sent_time_ = stats.FirstPacketSentTime(); + first_sent_packet_size_ = stats.FirstSentPacketSize(); + } + if (last_packet_sent_time_ < stats.LastPacketSentTime()) { + last_packet_sent_time_ = stats.LastPacketSentTime(); + } +} + +std::unique_ptr +EmulatedNetworkOutgoingStatsBuilder::Build() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return std::make_unique( + packets_sent_, bytes_sent_, sent_packets_size_counter_, + first_sent_packet_size_, first_packet_sent_time_, last_packet_sent_time_); +} + +EmulatedNetworkIncomingStatsBuilder::EmulatedNetworkIncomingStatsBuilder() { + sequence_checker_.Detach(); +} + +void EmulatedNetworkIncomingStatsBuilder::OnPacketDropped( + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + packets_dropped_++; + bytes_dropped_ += packet_size; + if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + dropped_packets_size_counter_.AddSample(packet_size.bytes()); + } +} + +void EmulatedNetworkIncomingStatsBuilder::OnPacketReceived( + Timestamp received_time, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_CHECK_GE(packet_size, DataSize::Zero()); + if (first_packet_received_time_.IsInfinite()) { + first_packet_received_time_ = received_time; + first_received_packet_size_ = packet_size; + } + last_packet_received_time_ = received_time; + packets_received_++; + bytes_received_ += packet_size; + if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + received_packets_size_counter_.AddSample(packet_size.bytes()); + } +} + +void EmulatedNetworkIncomingStatsBuilder::AddIncomingStats( + const EmulatedNetworkIncomingStats& stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + packets_received_ += stats.PacketsReceived(); + bytes_received_ += stats.BytesReceived(); + received_packets_size_counter_.AddSamples(stats.ReceivedPacketsSizeCounter()); + packets_dropped_ += stats.PacketsDropped(); + bytes_dropped_ += stats.BytesDropped(); + dropped_packets_size_counter_.AddSamples(stats.DroppedPacketsSizeCounter()); + if (first_packet_received_time_ > stats.FirstPacketReceivedTime()) { + first_packet_received_time_ = stats.FirstPacketReceivedTime(); + first_received_packet_size_ = stats.FirstReceivedPacketSize(); + } + if (last_packet_received_time_ < stats.LastPacketReceivedTime()) { + last_packet_received_time_ = stats.LastPacketReceivedTime(); + } +} + +std::unique_ptr +EmulatedNetworkIncomingStatsBuilder::Build() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return std::make_unique( + packets_received_, bytes_received_, received_packets_size_counter_, + packets_dropped_, bytes_dropped_, dropped_packets_size_counter_, + first_received_packet_size_, first_packet_received_time_, + last_packet_received_time_); +} + +EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder() { + sequence_checker_.Detach(); +} + +EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( + rtc::IPAddress local_ip) { + local_addresses_.push_back(local_ip); + sequence_checker_.Detach(); +} + +void EmulatedNetworkStatsBuilder::OnPacketSent( + Timestamp queued_time, + Timestamp sent_time, + rtc::IPAddress destination_ip, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (mode == EmulatedEndpointConfig::StatsGatheringMode::kDebug) { + sent_packets_queue_wait_time_us_.AddSample((sent_time - queued_time).us()); + } + outgoing_stats_per_destination_[destination_ip].OnPacketSent( + sent_time, packet_size, mode); +} + +void EmulatedNetworkStatsBuilder::OnPacketDropped( + rtc::IPAddress source_ip, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + incoming_stats_per_source_[source_ip].OnPacketDropped(packet_size, mode); +} + +void EmulatedNetworkStatsBuilder::OnPacketReceived( + Timestamp received_time, + rtc::IPAddress source_ip, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + incoming_stats_per_source_[source_ip].OnPacketReceived(received_time, + packet_size, mode); +} + +void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( + const EmulatedNetworkStats& stats) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + // Append IPs from other endpoints stats to the builder. + for (const rtc::IPAddress& addr : stats.LocalAddresses()) { + local_addresses_.push_back(addr); + } + + sent_packets_queue_wait_time_us_.AddSamples( + stats.SentPacketsQueueWaitTimeUs()); + + // Add outgoing stats from other endpoints to the builder. + const std::map> + outgoing_stats_per_destination = stats.OutgoingStatsPerDestination(); + for (const auto& entry : outgoing_stats_per_destination) { + outgoing_stats_per_destination_[entry.first].AddOutgoingStats( + *entry.second); + } + + // Add incoming stats from other endpoints to the builder. + const std::map> + incoming_stats_per_source = stats.IncomingStatsPerSource(); + for (const auto& entry : incoming_stats_per_source) { + incoming_stats_per_source_[entry.first].AddIncomingStats(*entry.second); + } +} + +std::unique_ptr EmulatedNetworkStatsBuilder::Build() + const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + std::map> + outgoing_stats; + for (const auto& entry : outgoing_stats_per_destination_) { + outgoing_stats.emplace(entry.first, entry.second.Build()); + } + std::map> + incoming_stats; + for (const auto& entry : incoming_stats_per_source_) { + incoming_stats.emplace(entry.first, entry.second.Build()); + } + return std::make_unique( + local_addresses_, sent_packets_queue_wait_time_us_, + std::move(outgoing_stats), std::move(incoming_stats)); +} + void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { task_queue_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(task_queue_); @@ -40,7 +289,7 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { process_task_ = RepeatingTaskHandle::DelayedStart( task_queue_->Get(), std::max(TimeDelta::Zero(), - Timestamp::us(*next_time_us) - current_time), + Timestamp::Micros(*next_time_us) - current_time), [this]() { RTC_DCHECK_RUN_ON(task_queue_); Timestamp current_time = clock_->CurrentTime(); @@ -52,7 +301,7 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { return TimeDelta::Zero(); // This is ignored. } RTC_DCHECK_GE(*next_time_us, current_time.us()); - return Timestamp::us(*next_time_us) - current_time; + return Timestamp::Micros(*next_time_us) - current_time; }); }); } @@ -74,7 +323,7 @@ void LinkEmulation::Process(Timestamp at_time) { if (delivery_info.receive_time_us != PacketDeliveryInfo::kNotReceived) { packet->packet.arrival_time = - Timestamp::us(delivery_info.receive_time_us); + Timestamp::Micros(delivery_info.receive_time_us); receiver_->OnPacketReceived(std::move(packet->packet)); } while (!packets_.empty() && packets_.front().removed) { @@ -166,20 +415,24 @@ void EmulatedNetworkNode::ClearRoute(const rtc::IPAddress& receiver_ip, EmulatedNetworkNode::~EmulatedNetworkNode() = default; -EmulatedEndpointImpl::EmulatedEndpointImpl(uint64_t id, - const rtc::IPAddress& ip, - bool is_enabled, - rtc::AdapterType type, - rtc::TaskQueue* task_queue, - Clock* clock) +EmulatedEndpointImpl::EmulatedEndpointImpl( + uint64_t id, + const rtc::IPAddress& ip, + EmulatedEndpointConfig::StatsGatheringMode stats_gathering_mode, + bool is_enabled, + rtc::AdapterType type, + rtc::TaskQueue* task_queue, + Clock* clock) : id_(id), peer_local_addr_(ip), + stats_gathering_mode_(stats_gathering_mode), is_enabled_(is_enabled), type_(type), clock_(clock), task_queue_(task_queue), router_(task_queue_), - next_port_(kFirstEphemeralPort) { + next_port_(kFirstEphemeralPort), + stats_builder_(peer_local_addr_) { constexpr int kIPv4NetworkPrefixLength = 24; constexpr int kIPv6NetworkPrefixLength = 64; @@ -212,16 +465,15 @@ void EmulatedEndpointImpl::SendPacket(const rtc::SocketAddress& from, clock_->CurrentTime(), application_overhead); task_queue_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(task_queue_); - Timestamp current_time = clock_->CurrentTime(); - if (stats_.first_packet_sent_time.IsInfinite()) { - stats_.first_packet_sent_time = current_time; - stats_.first_sent_packet_size = DataSize::bytes(packet.ip_packet_size()); + stats_builder_.OnPacketSent( + packet.arrival_time, clock_->CurrentTime(), packet.to.ipaddr(), + DataSize::Bytes(packet.ip_packet_size()), stats_gathering_mode_); + + if (packet.to.ipaddr() == peer_local_addr_) { + OnPacketReceived(std::move(packet)); + } else { + router_.OnPacketReceived(std::move(packet)); } - stats_.last_packet_sent_time = current_time; - stats_.packets_sent++; - stats_.bytes_sent += DataSize::bytes(packet.ip_packet_size()); - - router_.OnPacketReceived(std::move(packet)); }); } @@ -282,7 +534,9 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { << packet.to.ipaddr().ToString() << "; Receiver peer_local_addr_=" << peer_local_addr_.ToString(); rtc::CritScope crit(&receiver_lock_); - UpdateReceiveStats(packet); + stats_builder_.OnPacketReceived(clock_->CurrentTime(), packet.from.ipaddr(), + DataSize::Bytes(packet.ip_packet_size()), + stats_gathering_mode_); auto it = port_to_receiver_.find(packet.to.port()); if (it == port_to_receiver_.end()) { // It can happen, that remote peer closed connection, but there still some @@ -290,8 +544,9 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { // process: one peer closed connection, second still sending data. RTC_LOG(INFO) << "Drop packet: no receiver registered in " << id_ << " on port " << packet.to.port(); - stats_.packets_dropped++; - stats_.bytes_dropped += DataSize::bytes(packet.ip_packet_size()); + stats_builder_.OnPacketDropped(packet.from.ipaddr(), + DataSize::Bytes(packet.ip_packet_size()), + stats_gathering_mode_); return; } // Endpoint assumes frequent calls to bind and unbind methods, so it holds @@ -317,22 +572,9 @@ bool EmulatedEndpointImpl::Enabled() const { return is_enabled_; } -EmulatedNetworkStats EmulatedEndpointImpl::stats() { +std::unique_ptr EmulatedEndpointImpl::stats() const { RTC_DCHECK_RUN_ON(task_queue_); - return stats_; -} - -void EmulatedEndpointImpl::UpdateReceiveStats(const EmulatedIpPacket& packet) { - RTC_DCHECK_RUN_ON(task_queue_); - Timestamp current_time = clock_->CurrentTime(); - if (stats_.first_packet_received_time.IsInfinite()) { - stats_.first_packet_received_time = current_time; - stats_.first_received_packet_size = - DataSize::bytes(packet.ip_packet_size()); - } - stats_.last_packet_received_time = current_time; - stats_.packets_received++; - stats_.bytes_received += DataSize::bytes(packet.ip_packet_size()); + return stats_builder_.Build(); } EndpointsContainer::EndpointsContainer( @@ -371,39 +613,16 @@ EndpointsContainer::GetEnabledNetworks() const { return networks; } -EmulatedNetworkStats EndpointsContainer::GetStats() const { - EmulatedNetworkStats stats; +std::vector EndpointsContainer::GetEndpoints() const { + return std::vector(endpoints_.begin(), endpoints_.end()); +} + +std::unique_ptr EndpointsContainer::GetStats() const { + EmulatedNetworkStatsBuilder stats_builder; for (auto* endpoint : endpoints_) { - EmulatedNetworkStats endpoint_stats = endpoint->stats(); - stats.packets_sent += endpoint_stats.packets_sent; - stats.bytes_sent += endpoint_stats.bytes_sent; - stats.packets_received += endpoint_stats.packets_received; - stats.bytes_received += endpoint_stats.bytes_received; - stats.packets_dropped += endpoint_stats.packets_dropped; - stats.bytes_dropped += endpoint_stats.bytes_dropped; - if (stats.first_packet_received_time > - endpoint_stats.first_packet_received_time) { - stats.first_packet_received_time = - endpoint_stats.first_packet_received_time; - stats.first_received_packet_size = - endpoint_stats.first_received_packet_size; - } - if (stats.first_packet_sent_time > endpoint_stats.first_packet_sent_time) { - stats.first_packet_sent_time = endpoint_stats.first_packet_sent_time; - stats.first_sent_packet_size = endpoint_stats.first_sent_packet_size; - } - if (stats.last_packet_received_time.IsInfinite() || - stats.last_packet_received_time < - endpoint_stats.last_packet_received_time) { - stats.last_packet_received_time = - endpoint_stats.last_packet_received_time; - } - if (stats.last_packet_sent_time.IsInfinite() || - stats.last_packet_sent_time < endpoint_stats.last_packet_sent_time) { - stats.last_packet_sent_time = endpoint_stats.last_packet_sent_time; - } + stats_builder.AddEmulatedNetworkStats(*endpoint->stats()); } - return stats; + return stats_builder.Build(); } } // namespace webrtc diff --git a/test/network/network_emulation.h b/test/network/network_emulation.h index 75e9c2c78a..13d4386d0d 100644 --- a/test/network/network_emulation.h +++ b/test/network/network_emulation.h @@ -20,6 +20,8 @@ #include #include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" #include "api/units/timestamp.h" @@ -27,13 +29,365 @@ #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { +// This class is immutable and so thread safe. +class EmulatedNetworkOutgoingStatsImpl final + : public EmulatedNetworkOutgoingStats { + public: + EmulatedNetworkOutgoingStatsImpl( + int64_t packets_sent, + DataSize bytes_sent, + SamplesStatsCounter sent_packets_size_counter, + DataSize first_sent_packet_size, + Timestamp first_packet_sent_time, + Timestamp last_packet_sent_time) + : packets_sent_(packets_sent), + bytes_sent_(bytes_sent), + sent_packets_size_counter_(std::move(sent_packets_size_counter)), + first_sent_packet_size_(first_sent_packet_size), + first_packet_sent_time_(first_packet_sent_time), + last_packet_sent_time_(last_packet_sent_time) {} + explicit EmulatedNetworkOutgoingStatsImpl( + const EmulatedNetworkOutgoingStats& stats) + : packets_sent_(stats.PacketsSent()), + bytes_sent_(stats.BytesSent()), + sent_packets_size_counter_(stats.SentPacketsSizeCounter()), + first_sent_packet_size_(stats.FirstSentPacketSize()), + first_packet_sent_time_(stats.FirstPacketSentTime()), + last_packet_sent_time_(stats.LastPacketSentTime()) {} + ~EmulatedNetworkOutgoingStatsImpl() override = default; + + int64_t PacketsSent() const override { return packets_sent_; } + + DataSize BytesSent() const override { return bytes_sent_; } + + const SamplesStatsCounter& SentPacketsSizeCounter() const override { + return sent_packets_size_counter_; + } + + DataSize FirstSentPacketSize() const override { + return first_sent_packet_size_; + } + + Timestamp FirstPacketSentTime() const override { + return first_packet_sent_time_; + } + + Timestamp LastPacketSentTime() const override { + return last_packet_sent_time_; + } + + DataRate AverageSendRate() const override; + + private: + const int64_t packets_sent_; + const DataSize bytes_sent_; + const SamplesStatsCounter sent_packets_size_counter_; + const DataSize first_sent_packet_size_; + const Timestamp first_packet_sent_time_; + const Timestamp last_packet_sent_time_; +}; + +// This class is immutable and so thread safe. +class EmulatedNetworkIncomingStatsImpl final + : public EmulatedNetworkIncomingStats { + public: + EmulatedNetworkIncomingStatsImpl( + int64_t packets_received, + DataSize bytes_received, + SamplesStatsCounter received_packets_size_counter, + int64_t packets_dropped, + DataSize bytes_dropped, + SamplesStatsCounter dropped_packets_size_counter, + DataSize first_received_packet_size, + Timestamp first_packet_received_time, + Timestamp last_packet_received_time) + : packets_received_(packets_received), + bytes_received_(bytes_received), + received_packets_size_counter_(received_packets_size_counter), + packets_dropped_(packets_dropped), + bytes_dropped_(bytes_dropped), + dropped_packets_size_counter_(dropped_packets_size_counter), + first_received_packet_size_(first_received_packet_size), + first_packet_received_time_(first_packet_received_time), + last_packet_received_time_(last_packet_received_time) {} + explicit EmulatedNetworkIncomingStatsImpl( + const EmulatedNetworkIncomingStats& stats) + : packets_received_(stats.PacketsReceived()), + bytes_received_(stats.BytesReceived()), + received_packets_size_counter_(stats.ReceivedPacketsSizeCounter()), + packets_dropped_(stats.PacketsDropped()), + bytes_dropped_(stats.BytesDropped()), + dropped_packets_size_counter_(stats.DroppedPacketsSizeCounter()), + first_received_packet_size_(stats.FirstReceivedPacketSize()), + first_packet_received_time_(stats.FirstPacketReceivedTime()), + last_packet_received_time_(stats.LastPacketReceivedTime()) {} + ~EmulatedNetworkIncomingStatsImpl() override = default; + + int64_t PacketsReceived() const override { return packets_received_; } + + DataSize BytesReceived() const override { return bytes_received_; } + + const SamplesStatsCounter& ReceivedPacketsSizeCounter() const override { + return received_packets_size_counter_; + } + + int64_t PacketsDropped() const override { return packets_dropped_; } + + DataSize BytesDropped() const override { return bytes_dropped_; } + + const SamplesStatsCounter& DroppedPacketsSizeCounter() const override { + return dropped_packets_size_counter_; + } + + DataSize FirstReceivedPacketSize() const override { + return first_received_packet_size_; + } + + Timestamp FirstPacketReceivedTime() const override { + return first_packet_received_time_; + } + + Timestamp LastPacketReceivedTime() const override { + return last_packet_received_time_; + } + + DataRate AverageReceiveRate() const override; + + private: + const int64_t packets_received_; + const DataSize bytes_received_; + const SamplesStatsCounter received_packets_size_counter_; + const int64_t packets_dropped_; + const DataSize bytes_dropped_; + const SamplesStatsCounter dropped_packets_size_counter_; + const DataSize first_received_packet_size_; + const Timestamp first_packet_received_time_; + const Timestamp last_packet_received_time_; +}; + +// This class is immutable and so is thread safe. +class EmulatedNetworkStatsImpl final : public EmulatedNetworkStats { + public: + EmulatedNetworkStatsImpl( + std::vector local_addresses, + SamplesStatsCounter sent_packets_queue_wait_time_us, + std::map> + outgoing_stats_per_destination, + std::map> + incoming_stats_per_source) + : local_addresses_(std::move(local_addresses)), + sent_packets_queue_wait_time_us_(sent_packets_queue_wait_time_us), + outgoing_stats_per_destination_( + std::move(outgoing_stats_per_destination)), + incoming_stats_per_source_(std::move(incoming_stats_per_source)), + overall_outgoing_stats_(GetOverallOutgoingStats()), + overall_incoming_stats_(GetOverallIncomingStats()) {} + ~EmulatedNetworkStatsImpl() override = default; + + std::vector LocalAddresses() const override { + return local_addresses_; + } + + int64_t PacketsSent() const override { + return overall_outgoing_stats_->PacketsSent(); + } + + DataSize BytesSent() const override { + return overall_outgoing_stats_->BytesSent(); + } + + const SamplesStatsCounter& SentPacketsSizeCounter() const override { + return overall_outgoing_stats_->SentPacketsSizeCounter(); + } + + const SamplesStatsCounter& SentPacketsQueueWaitTimeUs() const override { + return sent_packets_queue_wait_time_us_; + } + + DataSize FirstSentPacketSize() const override { + return overall_outgoing_stats_->FirstSentPacketSize(); + } + + Timestamp FirstPacketSentTime() const override { + return overall_outgoing_stats_->FirstPacketSentTime(); + } + + Timestamp LastPacketSentTime() const override { + return overall_outgoing_stats_->LastPacketSentTime(); + } + + DataRate AverageSendRate() const override { + return overall_outgoing_stats_->AverageSendRate(); + } + + int64_t PacketsReceived() const override { + return overall_incoming_stats_->PacketsReceived(); + } + + DataSize BytesReceived() const override { + return overall_incoming_stats_->BytesReceived(); + } + + const SamplesStatsCounter& ReceivedPacketsSizeCounter() const override { + return overall_incoming_stats_->ReceivedPacketsSizeCounter(); + } + + int64_t PacketsDropped() const override { + return overall_incoming_stats_->PacketsDropped(); + } + + DataSize BytesDropped() const override { + return overall_incoming_stats_->BytesDropped(); + } + + const SamplesStatsCounter& DroppedPacketsSizeCounter() const override { + return overall_incoming_stats_->DroppedPacketsSizeCounter(); + } + + DataSize FirstReceivedPacketSize() const override { + return overall_incoming_stats_->FirstReceivedPacketSize(); + } + + Timestamp FirstPacketReceivedTime() const override { + return overall_incoming_stats_->FirstPacketReceivedTime(); + } + + Timestamp LastPacketReceivedTime() const override { + return overall_incoming_stats_->LastPacketReceivedTime(); + } + + DataRate AverageReceiveRate() const override { + return overall_incoming_stats_->AverageReceiveRate(); + } + + std::map> + OutgoingStatsPerDestination() const override; + + std::map> + IncomingStatsPerSource() const override; + + private: + std::unique_ptr GetOverallOutgoingStats() const; + std::unique_ptr GetOverallIncomingStats() const; + + const std::vector local_addresses_; + const SamplesStatsCounter sent_packets_queue_wait_time_us_; + const std::map> + outgoing_stats_per_destination_; + const std::map> + incoming_stats_per_source_; + const std::unique_ptr overall_outgoing_stats_; + const std::unique_ptr overall_incoming_stats_; +}; + +class EmulatedNetworkOutgoingStatsBuilder { + public: + EmulatedNetworkOutgoingStatsBuilder(); + + void OnPacketSent(Timestamp sent_time, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode); + + void AddOutgoingStats(const EmulatedNetworkOutgoingStats& stats); + + std::unique_ptr Build() const; + + private: + SequenceChecker sequence_checker_; + + int64_t packets_sent_ RTC_GUARDED_BY(sequence_checker_) = 0; + DataSize bytes_sent_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero(); + SamplesStatsCounter sent_packets_size_counter_ + RTC_GUARDED_BY(sequence_checker_); + DataSize first_sent_packet_size_ RTC_GUARDED_BY(sequence_checker_) = + DataSize::Zero(); + Timestamp first_packet_sent_time_ RTC_GUARDED_BY(sequence_checker_) = + Timestamp::PlusInfinity(); + Timestamp last_packet_sent_time_ RTC_GUARDED_BY(sequence_checker_) = + Timestamp::MinusInfinity(); +}; + +class EmulatedNetworkIncomingStatsBuilder { + public: + EmulatedNetworkIncomingStatsBuilder(); + + void OnPacketDropped(DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode); + + void OnPacketReceived(Timestamp received_time, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode); + + // Adds stats collected from another endpoints to the builder. + void AddIncomingStats(const EmulatedNetworkIncomingStats& stats); + + std::unique_ptr Build() const; + + private: + SequenceChecker sequence_checker_; + + int64_t packets_received_ RTC_GUARDED_BY(sequence_checker_) = 0; + DataSize bytes_received_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero(); + SamplesStatsCounter received_packets_size_counter_ + RTC_GUARDED_BY(sequence_checker_); + int64_t packets_dropped_ RTC_GUARDED_BY(sequence_checker_) = 0; + DataSize bytes_dropped_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero(); + SamplesStatsCounter dropped_packets_size_counter_ + RTC_GUARDED_BY(sequence_checker_); + DataSize first_received_packet_size_ RTC_GUARDED_BY(sequence_checker_) = + DataSize::Zero(); + Timestamp first_packet_received_time_ RTC_GUARDED_BY(sequence_checker_) = + Timestamp::PlusInfinity(); + Timestamp last_packet_received_time_ RTC_GUARDED_BY(sequence_checker_) = + Timestamp::MinusInfinity(); +}; + +// All methods of EmulatedNetworkStatsBuilder have to be used on a single +// thread. It may be created on another thread. +class EmulatedNetworkStatsBuilder { + public: + EmulatedNetworkStatsBuilder(); + explicit EmulatedNetworkStatsBuilder(rtc::IPAddress local_ip); + + void OnPacketSent(Timestamp queued_time, + Timestamp sent_time, + rtc::IPAddress destination_ip, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode); + + void OnPacketDropped(rtc::IPAddress source_ip, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode); + + void OnPacketReceived(Timestamp received_time, + rtc::IPAddress source_ip, + DataSize packet_size, + EmulatedEndpointConfig::StatsGatheringMode mode); + + void AddEmulatedNetworkStats(const EmulatedNetworkStats& stats); + + std::unique_ptr Build() const; + + private: + SequenceChecker sequence_checker_; + + std::vector local_addresses_ + RTC_GUARDED_BY(sequence_checker_); + SamplesStatsCounter sent_packets_queue_wait_time_us_; + std::map + outgoing_stats_per_destination_ RTC_GUARDED_BY(sequence_checker_); + std::map + incoming_stats_per_source_ RTC_GUARDED_BY(sequence_checker_); +}; class LinkEmulation : public EmulatedNetworkReceiverInterface { public: @@ -128,12 +482,14 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { // from other EmulatedNetworkNodes. class EmulatedEndpointImpl : public EmulatedEndpoint { public: - EmulatedEndpointImpl(uint64_t id, - const rtc::IPAddress& ip, - bool is_enabled, - rtc::AdapterType type, - rtc::TaskQueue* task_queue, - Clock* clock); + EmulatedEndpointImpl( + uint64_t id, + const rtc::IPAddress& ip, + EmulatedEndpointConfig::StatsGatheringMode stats_gathering_mode, + bool is_enabled, + rtc::AdapterType type, + rtc::TaskQueue* task_queue, + Clock* clock); ~EmulatedEndpointImpl() override; uint64_t GetId() const; @@ -161,19 +517,19 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { const rtc::Network& network() const { return *network_.get(); } - EmulatedNetworkStats stats() override; + std::unique_ptr stats() const; private: static constexpr uint16_t kFirstEphemeralPort = 49152; uint16_t NextPort() RTC_EXCLUSIVE_LOCKS_REQUIRED(receiver_lock_); - void UpdateReceiveStats(const EmulatedIpPacket& packet); - rtc::CriticalSection receiver_lock_; + rtc::RecursiveCriticalSection receiver_lock_; rtc::ThreadChecker enabled_state_checker_; - uint64_t id_; + const uint64_t id_; // Peer's local IP address for this endpoint network interface. const rtc::IPAddress peer_local_addr_; + const EmulatedEndpointConfig::StatsGatheringMode stats_gathering_mode_; bool is_enabled_ RTC_GUARDED_BY(enabled_state_checker_); const rtc::AdapterType type_; Clock* const clock_; @@ -185,7 +541,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { std::map port_to_receiver_ RTC_GUARDED_BY(receiver_lock_); - EmulatedNetworkStats stats_ RTC_GUARDED_BY(task_queue_); + EmulatedNetworkStatsBuilder stats_builder_ RTC_GUARDED_BY(task_queue_); }; class EmulatedRoute { @@ -200,6 +556,8 @@ class EmulatedRoute { EmulatedEndpointImpl* to; bool active; }; + +// This object is immutable and so thread safe. class EndpointsContainer { public: explicit EndpointsContainer( @@ -211,7 +569,8 @@ class EndpointsContainer { // Returns list of networks for enabled endpoints. Caller takes ownership of // returned rtc::Network objects. std::vector> GetEnabledNetworks() const; - EmulatedNetworkStats GetStats() const; + std::vector GetEndpoints() const; + std::unique_ptr GetStats() const; private: const std::vector endpoints_; diff --git a/test/network/network_emulation_manager.cc b/test/network/network_emulation_manager.cc index 3be1185710..4a2e31e0f9 100644 --- a/test/network/network_emulation_manager.cc +++ b/test/network/network_emulation_manager.cc @@ -36,7 +36,7 @@ std::unique_ptr CreateTimeController(TimeMode mode) { case TimeMode::kSimulated: // Using an offset of 100000 to get nice fixed width and readable // timestamps in typical test scenarios. - const Timestamp kSimulatedStartTime = Timestamp::seconds(100000); + const Timestamp kSimulatedStartTime = Timestamp::Seconds(100000); return std::make_unique( kSimulatedStartTime); } @@ -98,8 +98,8 @@ EmulatedEndpoint* NetworkEmulationManagerImpl::CreateEndpoint( bool res = used_ip_addresses_.insert(*ip).second; RTC_CHECK(res) << "IP=" << ip->ToString() << " already in use"; auto node = std::make_unique( - next_node_id_++, *ip, config.start_as_enabled, config.type, &task_queue_, - clock_); + next_node_id_++, *ip, config.stats_gathering_mode, + config.start_as_enabled, config.type, &task_queue_, clock_); EmulatedEndpoint* out = node.get(); endpoints_.push_back(std::move(node)); return out; @@ -295,6 +295,22 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( return out; } +void NetworkEmulationManagerImpl::GetStats( + rtc::ArrayView endpoints, + std::function)> stats_callback) { + task_queue_.PostTask([endpoints, stats_callback]() { + EmulatedNetworkStatsBuilder stats_builder; + for (auto* endpoint : endpoints) { + // It's safe to cast here because EmulatedEndpointImpl can be the only + // implementation of EmulatedEndpoint, because only it has access to + // EmulatedEndpoint constructor. + auto endpoint_impl = static_cast(endpoint); + stats_builder.AddEmulatedNetworkStats(*endpoint_impl->stats()); + } + stats_callback(stats_builder.Build()); + }); +} + absl::optional NetworkEmulationManagerImpl::GetNextIPv4Address() { uint32_t addresses_count = kMaxIPv4Address - kMinIPv4Address; diff --git a/test/network/network_emulation_manager.h b/test/network/network_emulation_manager.h index 2b33fa1575..7532b0a88f 100644 --- a/test/network/network_emulation_manager.h +++ b/test/network/network_emulation_manager.h @@ -17,6 +17,7 @@ #include #include +#include "api/array_view.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" @@ -83,6 +84,10 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { EmulatedNetworkManagerInterface* CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) override; + void GetStats(rtc::ArrayView endpoints, + std::function)> + stats_callback) override; + TimeController* time_controller() override { return time_controller_.get(); } Timestamp Now() const; diff --git a/test/network/network_emulation_pc_unittest.cc b/test/network/network_emulation_pc_unittest.cc index e04da34076..3d0140f5a2 100644 --- a/test/network/network_emulation_pc_unittest.cc +++ b/test/network/network_emulation_pc_unittest.cc @@ -16,6 +16,7 @@ #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" #include "call/simulated_network.h" #include "media/engine/webrtc_media_engine.h" #include "media/engine/webrtc_media_engine_defaults.h" @@ -59,6 +60,7 @@ rtc::scoped_refptr CreatePeerConnectionFactory( std::make_unique(pcf_deps.task_queue_factory.get()); pcf_deps.network_thread = network_thread; pcf_deps.signaling_thread = signaling_thread; + pcf_deps.trials = std::make_unique(); cricket::MediaEngineDependencies media_deps; media_deps.task_queue_factory = pcf_deps.task_queue_factory.get(); media_deps.adm = TestAudioDeviceModule::Create( @@ -67,6 +69,7 @@ rtc::scoped_refptr CreatePeerConnectionFactory( kSamplingFrequency), TestAudioDeviceModule::CreateDiscardRenderer(kSamplingFrequency), /*speed=*/1.f); + media_deps.trials = pcf_deps.trials.get(); SetMediaEngineDefaults(&media_deps); pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); return CreateModularPeerConnectionFactory(std::move(pcf_deps)); diff --git a/test/network/network_emulation_unittest.cc b/test/network/network_emulation_unittest.cc index 70dfa77930..476906fc87 100644 --- a/test/network/network_emulation_unittest.cc +++ b/test/network/network_emulation_unittest.cc @@ -19,7 +19,7 @@ #include "call/simulated_network.h" #include "rtc_base/event.h" #include "rtc_base/gunit.h" -#include "system_wrappers/include/sleep.h" +#include "rtc_base/synchronization/mutex.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/network/network_emulation_manager.h" @@ -28,8 +28,10 @@ namespace webrtc { namespace test { namespace { -constexpr TimeDelta kNetworkPacketWaitTimeout = TimeDelta::Millis<100>(); -constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds<1>(); +using ::testing::ElementsAreArray; + +constexpr TimeDelta kNetworkPacketWaitTimeout = TimeDelta::Millis(100); +constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); constexpr int kOverheadIpv4Udp = 20 + 8; class SocketReader : public sigslot::has_slots<> { @@ -48,12 +50,12 @@ class SocketReader : public sigslot::has_slots<> { int64_t timestamp; len_ = socket_->Recv(buf_, size_, ×tamp); - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); received_count_++; } int ReceivedCount() { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return received_count_; } @@ -64,13 +66,13 @@ class SocketReader : public sigslot::has_slots<> { size_t size_; int len_; - rtc::CriticalSection lock_; + Mutex lock_; int received_count_ RTC_GUARDED_BY(lock_) = 0; }; class MockReceiver : public EmulatedNetworkReceiverInterface { public: - MOCK_METHOD1(OnPacketReceived, void(EmulatedIpPacket packet)); + MOCK_METHOD(void, OnPacketReceived, (EmulatedIpPacket packet), (override)); }; class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test { @@ -233,7 +235,7 @@ TEST(NetworkEmulationManagerTest, Run) { [&]() { s2->Send(data.data(), data.size()); }); } - network_manager.time_controller()->AdvanceTime(TimeDelta::seconds(1)); + network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); EXPECT_EQ(r1.ReceivedCount(), 1000); EXPECT_EQ(r2.ReceivedCount(), 1000); @@ -244,22 +246,83 @@ TEST(NetworkEmulationManagerTest, Run) { const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; std::atomic received_stats_count{0}; - nt1->GetStats([&](EmulatedNetworkStats st) { - EXPECT_EQ(st.packets_sent, 2000l); - EXPECT_EQ(st.bytes_sent.bytes(), single_packet_size * 2000l); - EXPECT_EQ(st.packets_received, 2000l); - EXPECT_EQ(st.bytes_received.bytes(), single_packet_size * 2000l); - EXPECT_EQ(st.packets_dropped, 0l); - EXPECT_EQ(st.bytes_dropped.bytes(), 0l); + nt1->GetStats([&](std::unique_ptr st) { + EXPECT_EQ(st->PacketsSent(), 2000l); + EXPECT_EQ(st->BytesSent().bytes(), single_packet_size * 2000l); + EXPECT_THAT(st->LocalAddresses(), + ElementsAreArray({alice_endpoint->GetPeerLocalAddress()})); + EXPECT_EQ(st->PacketsReceived(), 2000l); + EXPECT_EQ(st->BytesReceived().bytes(), single_packet_size * 2000l); + EXPECT_EQ(st->PacketsDropped(), 0l); + EXPECT_EQ(st->BytesDropped().bytes(), 0l); + + rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); + std::map> + source_st = st->IncomingStatsPerSource(); + ASSERT_EQ(source_st.size(), 1lu); + EXPECT_EQ(source_st.at(bob_ip)->PacketsReceived(), 2000l); + EXPECT_EQ(source_st.at(bob_ip)->BytesReceived().bytes(), + single_packet_size * 2000l); + EXPECT_EQ(source_st.at(bob_ip)->PacketsDropped(), 0l); + EXPECT_EQ(source_st.at(bob_ip)->BytesDropped().bytes(), 0l); + + std::map> + dest_st = st->OutgoingStatsPerDestination(); + ASSERT_EQ(dest_st.size(), 1lu); + EXPECT_EQ(dest_st.at(bob_ip)->PacketsSent(), 2000l); + EXPECT_EQ(dest_st.at(bob_ip)->BytesSent().bytes(), + single_packet_size * 2000l); + + // No debug stats are collected by default. + EXPECT_TRUE(st->SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st->SentPacketsQueueWaitTimeUs().IsEmpty()); + EXPECT_TRUE(st->ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st->DroppedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(dest_st.at(bob_ip)->SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(source_st.at(bob_ip)->ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(source_st.at(bob_ip)->DroppedPacketsSizeCounter().IsEmpty()); + received_stats_count++; }); - nt2->GetStats([&](EmulatedNetworkStats st) { - EXPECT_EQ(st.packets_sent, 2000l); - EXPECT_EQ(st.bytes_sent.bytes(), single_packet_size * 2000l); - EXPECT_EQ(st.packets_received, 2000l); - EXPECT_EQ(st.bytes_received.bytes(), single_packet_size * 2000l); - EXPECT_EQ(st.packets_dropped, 0l); - EXPECT_EQ(st.bytes_dropped.bytes(), 0l); + nt2->GetStats([&](std::unique_ptr st) { + EXPECT_EQ(st->PacketsSent(), 2000l); + EXPECT_EQ(st->BytesSent().bytes(), single_packet_size * 2000l); + EXPECT_THAT(st->LocalAddresses(), + ElementsAreArray({bob_endpoint->GetPeerLocalAddress()})); + EXPECT_EQ(st->PacketsReceived(), 2000l); + EXPECT_EQ(st->BytesReceived().bytes(), single_packet_size * 2000l); + EXPECT_EQ(st->PacketsDropped(), 0l); + EXPECT_EQ(st->BytesDropped().bytes(), 0l); + EXPECT_GT(st->FirstReceivedPacketSize(), DataSize::Zero()); + EXPECT_TRUE(st->FirstPacketReceivedTime().IsFinite()); + EXPECT_TRUE(st->LastPacketReceivedTime().IsFinite()); + + rtc::IPAddress alice_ip = alice_endpoint->GetPeerLocalAddress(); + std::map> + source_st = st->IncomingStatsPerSource(); + ASSERT_EQ(source_st.size(), 1lu); + EXPECT_EQ(source_st.at(alice_ip)->PacketsReceived(), 2000l); + EXPECT_EQ(source_st.at(alice_ip)->BytesReceived().bytes(), + single_packet_size * 2000l); + EXPECT_EQ(source_st.at(alice_ip)->PacketsDropped(), 0l); + EXPECT_EQ(source_st.at(alice_ip)->BytesDropped().bytes(), 0l); + + std::map> + dest_st = st->OutgoingStatsPerDestination(); + ASSERT_EQ(dest_st.size(), 1lu); + EXPECT_EQ(dest_st.at(alice_ip)->PacketsSent(), 2000l); + EXPECT_EQ(dest_st.at(alice_ip)->BytesSent().bytes(), + single_packet_size * 2000l); + + // No debug stats are collected by default. + EXPECT_TRUE(st->SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st->SentPacketsQueueWaitTimeUs().IsEmpty()); + EXPECT_TRUE(st->ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(st->DroppedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(dest_st.at(alice_ip)->SentPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(source_st.at(alice_ip)->ReceivedPacketsSizeCounter().IsEmpty()); + EXPECT_TRUE(source_st.at(alice_ip)->DroppedPacketsSizeCounter().IsEmpty()); + received_stats_count++; }); ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 2, @@ -267,6 +330,105 @@ TEST(NetworkEmulationManagerTest, Run) { *network_manager.time_controller()); } +TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { + NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + + EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( + std::make_unique(BuiltInNetworkBehaviorConfig())); + EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode( + std::make_unique(BuiltInNetworkBehaviorConfig())); + EmulatedEndpointConfig debug_config; + debug_config.stats_gathering_mode = + EmulatedEndpointConfig::StatsGatheringMode::kDebug; + EmulatedEndpoint* alice_endpoint = + network_manager.CreateEndpoint(debug_config); + EmulatedEndpoint* bob_endpoint = + network_manager.CreateEndpoint(EmulatedEndpointConfig()); + network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); + network_manager.CreateRoute(bob_endpoint, {bob_node}, alice_endpoint); + + EmulatedNetworkManagerInterface* nt1 = + network_manager.CreateEmulatedNetworkManagerInterface({alice_endpoint}); + EmulatedNetworkManagerInterface* nt2 = + network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint}); + + rtc::Thread* t1 = nt1->network_thread(); + rtc::Thread* t2 = nt2->network_thread(); + + rtc::CopyOnWriteBuffer data("Hello"); + for (uint64_t j = 0; j < 2; j++) { + auto* s1 = t1->socketserver()->CreateAsyncSocket(AF_INET, SOCK_DGRAM); + auto* s2 = t2->socketserver()->CreateAsyncSocket(AF_INET, SOCK_DGRAM); + + SocketReader r1(s1, t1); + SocketReader r2(s2, t2); + + rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); + rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); + + t1->Invoke(RTC_FROM_HERE, [&] { + s1->Bind(a1); + a1 = s1->GetLocalAddress(); + }); + t2->Invoke(RTC_FROM_HERE, [&] { + s2->Bind(a2); + a2 = s2->GetLocalAddress(); + }); + + t1->Invoke(RTC_FROM_HERE, [&] { s1->Connect(a2); }); + t2->Invoke(RTC_FROM_HERE, [&] { s2->Connect(a1); }); + + for (uint64_t i = 0; i < 1000; i++) { + t1->PostTask(RTC_FROM_HERE, + [&]() { s1->Send(data.data(), data.size()); }); + t2->PostTask(RTC_FROM_HERE, + [&]() { s2->Send(data.data(), data.size()); }); + } + + network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); + + EXPECT_EQ(r1.ReceivedCount(), 1000); + EXPECT_EQ(r2.ReceivedCount(), 1000); + + t1->Invoke(RTC_FROM_HERE, [&] { delete s1; }); + t2->Invoke(RTC_FROM_HERE, [&] { delete s2; }); + } + + const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; + std::atomic received_stats_count{0}; + nt1->GetStats([&](std::unique_ptr st) { + rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); + std::map> + source_st = st->IncomingStatsPerSource(); + ASSERT_EQ(source_st.size(), 1lu); + + std::map> + dest_st = st->OutgoingStatsPerDestination(); + ASSERT_EQ(dest_st.size(), 1lu); + + // No debug stats are collected by default. + EXPECT_EQ(st->SentPacketsSizeCounter().NumSamples(), 2000l); + EXPECT_EQ(st->ReceivedPacketsSizeCounter().GetAverage(), + single_packet_size); + EXPECT_EQ(st->SentPacketsQueueWaitTimeUs().NumSamples(), 2000l); + EXPECT_LT(st->SentPacketsQueueWaitTimeUs().GetMax(), 1); + EXPECT_TRUE(st->DroppedPacketsSizeCounter().IsEmpty()); + EXPECT_EQ(dest_st.at(bob_ip)->SentPacketsSizeCounter().NumSamples(), 2000l); + EXPECT_EQ(dest_st.at(bob_ip)->SentPacketsSizeCounter().GetAverage(), + single_packet_size); + EXPECT_EQ(source_st.at(bob_ip)->ReceivedPacketsSizeCounter().NumSamples(), + 2000l); + EXPECT_EQ(source_st.at(bob_ip)->ReceivedPacketsSizeCounter().GetAverage(), + single_packet_size); + EXPECT_TRUE(source_st.at(bob_ip)->DroppedPacketsSizeCounter().IsEmpty()); + + received_stats_count++; + }); + ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 1, + kStatsWaitTimeout.ms(), + *network_manager.time_controller()); +} + TEST(NetworkEmulationManagerTest, ThroughputStats) { NetworkEmulationManagerImpl network_manager(TimeMode::kRealTime); @@ -315,7 +477,7 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { // Send 11 packets, totalizing 1 second between the first and the last. const int kNumPacketsSent = 11; - const TimeDelta kDelay = TimeDelta::ms(100); + const TimeDelta kDelay = TimeDelta::Millis(100); for (int i = 0; i < kNumPacketsSent; i++) { t1->PostTask(RTC_FROM_HERE, [&]() { s1->Send(data.data(), data.size()); }); t2->PostTask(RTC_FROM_HERE, [&]() { s2->Send(data.data(), data.size()); }); @@ -323,14 +485,14 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { } std::atomic received_stats_count{0}; - nt1->GetStats([&](EmulatedNetworkStats st) { - EXPECT_EQ(st.packets_sent, kNumPacketsSent); - EXPECT_EQ(st.bytes_sent.bytes(), kSinglePacketSize * kNumPacketsSent); + nt1->GetStats([&](std::unique_ptr st) { + EXPECT_EQ(st->PacketsSent(), kNumPacketsSent); + EXPECT_EQ(st->BytesSent().bytes(), kSinglePacketSize * kNumPacketsSent); const double tolerance = 0.95; // Accept 5% tolerance for timing. - EXPECT_GE(st.last_packet_sent_time - st.first_packet_sent_time, + EXPECT_GE(st->LastPacketSentTime() - st->FirstPacketSentTime(), (kNumPacketsSent - 1) * kDelay * tolerance); - EXPECT_GT(st.AverageSendRate().bps(), 0); + EXPECT_GT(st->AverageSendRate().bps(), 0); received_stats_count++; }); @@ -392,5 +554,19 @@ TEST_F(NetworkEmulationManagerThreeNodesRoutingTest, SendPacketsAndValidateDelivery(); } +TEST(NetworkEmulationManagerTest, EndpointLoopback) { + NetworkEmulationManagerImpl network_manager(TimeMode::kSimulated); + auto endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); + + MockReceiver receiver; + EXPECT_CALL(receiver, OnPacketReceived(::testing::_)).Times(1); + ASSERT_EQ(endpoint->BindReceiver(80, &receiver), 80); + + endpoint->SendPacket(rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80), + rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80), + "Hello"); + network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); +} + } // namespace test } // namespace webrtc diff --git a/test/pc/e2e/BUILD.gn b/test/pc/e2e/BUILD.gn index 9aef78a35d..3901297063 100644 --- a/test/pc/e2e/BUILD.gn +++ b/test/pc/e2e/BUILD.gn @@ -8,508 +8,760 @@ import("../../../webrtc.gni") -group("e2e") { - testonly = true - - deps = [ - ":default_encoded_image_data_injector", - ":encoded_image_data_injector_api", - ":example_video_quality_analyzer", - ":id_generator", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", - ":single_process_encoded_image_data_injector", - ] - if (rtc_include_tests) { - deps += [ - ":peerconnection_quality_test", - ":test_peer", - ":video_quality_analyzer_injection_helper", +if (!build_with_chromium) { + group("e2e") { + testonly = true + + deps = [ + ":default_encoded_image_data_injector", + ":encoded_image_data_injector_api", + ":example_video_quality_analyzer", + ":id_generator", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":single_process_encoded_image_data_injector", ] + if (rtc_include_tests) { + deps += [ + ":peerconnection_quality_test", + ":test_peer", + ":video_quality_analyzer_injection_helper", + ] + } } -} -if (rtc_include_tests) { - group("e2e_unittests") { + if (rtc_include_tests) { + group("e2e_unittests") { + testonly = true + + deps = [ + ":default_encoded_image_data_injector_unittest", + ":default_video_quality_analyzer_test", + ":multi_head_queue_test", + ":peer_connection_e2e_smoke_test", + ":single_process_encoded_image_data_injector_unittest", + ] + } + } + + rtc_library("peer_connection_quality_test_params") { + visibility = [ "*" ] testonly = true + sources = [ "peer_connection_quality_test_params.h" ] deps = [ - ":default_encoded_image_data_injector_unittest", - ":peer_connection_e2e_smoke_test", - ":single_process_encoded_image_data_injector_unittest", + "../../../api:callfactory_api", + "../../../api:fec_controller_api", + "../../../api:libjingle_peerconnection_api", + "../../../api:packet_socket_factory", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/rtc_event_log", + "../../../api/task_queue", + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../api/video_codecs:video_codecs_api", + "../../../rtc_base", ] } -} - -rtc_library("peer_connection_quality_test_params") { - visibility = [ "*" ] - testonly = true - sources = [ "peer_connection_quality_test_params.h" ] - - deps = [ - "../../../api:callfactory_api", - "../../../api:fec_controller_api", - "../../../api:libjingle_peerconnection_api", - "../../../api:packet_socket_factory", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api/rtc_event_log", - "../../../api/task_queue", - "../../../api/transport:network_control", - "../../../api/transport/media:media_transport_interface", - "../../../api/video_codecs:video_codecs_api", - "../../../rtc_base", - ] -} -rtc_library("encoded_image_data_injector_api") { - visibility = [ "*" ] - testonly = true - sources = [ "analyzer/video/encoded_image_data_injector.h" ] + rtc_library("encoded_image_data_injector_api") { + visibility = [ "*" ] + testonly = true + sources = [ "analyzer/video/encoded_image_data_injector.h" ] - deps = [ "../../../api/video:encoded_image" ] -} + deps = [ "../../../api/video:encoded_image" ] + } -rtc_library("default_encoded_image_data_injector") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/default_encoded_image_data_injector.cc", - "analyzer/video/default_encoded_image_data_injector.h", - ] - - deps = [ - ":encoded_image_data_injector_api", - "../../../api/video:encoded_image", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "//third_party/abseil-cpp/absl/memory", - ] -} + rtc_library("default_encoded_image_data_injector") { + visibility = [ "*" ] + testonly = true + sources = [ + "analyzer/video/default_encoded_image_data_injector.cc", + "analyzer/video/default_encoded_image_data_injector.h", + ] -rtc_library("single_process_encoded_image_data_injector") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/single_process_encoded_image_data_injector.cc", - "analyzer/video/single_process_encoded_image_data_injector.h", - ] - - deps = [ - ":encoded_image_data_injector_api", - "../../../api/video:encoded_image", - "../../../rtc_base:checks", - "../../../rtc_base:criticalsection", - "//third_party/abseil-cpp/absl/memory", - ] -} + deps = [ + ":encoded_image_data_injector_api", + "../../../api/video:encoded_image", + "../../../rtc_base:checks", + "../../../rtc_base:criticalsection", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] + } -rtc_library("id_generator") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/id_generator.cc", - "analyzer/video/id_generator.h", - ] - deps = [] -} + rtc_library("single_process_encoded_image_data_injector") { + visibility = [ "*" ] + testonly = true + sources = [ + "analyzer/video/single_process_encoded_image_data_injector.cc", + "analyzer/video/single_process_encoded_image_data_injector.h", + ] -rtc_library("simulcast_dummy_buffer_helper") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/simulcast_dummy_buffer_helper.cc", - "analyzer/video/simulcast_dummy_buffer_helper.h", - ] - deps = [ - "../../../api/video:video_frame", - "../../../api/video:video_frame_i420", - ] -} + deps = [ + ":encoded_image_data_injector_api", + "../../../api/video:encoded_image", + "../../../rtc_base:checks", + "../../../rtc_base:criticalsection", + "../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] + } -rtc_library("quality_analyzing_video_decoder") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/quality_analyzing_video_decoder.cc", - "analyzer/video/quality_analyzing_video_decoder.h", - ] - deps = [ - ":encoded_image_data_injector_api", - ":id_generator", - ":simulcast_dummy_buffer_helper", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_frame_i420", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/video_coding:video_codec_interface", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "//third_party/abseil-cpp/absl/types:optional", - ] -} + rtc_library("id_generator") { + visibility = [ "*" ] + testonly = true + sources = [ + "analyzer/video/id_generator.cc", + "analyzer/video/id_generator.h", + ] + deps = [] + } -rtc_library("quality_analyzing_video_encoder") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/quality_analyzing_video_encoder.cc", - "analyzer/video/quality_analyzing_video_encoder.h", - ] - deps = [ - ":encoded_image_data_injector_api", - ":id_generator", - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/video_coding:video_codec_interface", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - ] -} + rtc_library("simulcast_dummy_buffer_helper") { + visibility = [ "*" ] + testonly = true + sources = [ + "analyzer/video/simulcast_dummy_buffer_helper.cc", + "analyzer/video/simulcast_dummy_buffer_helper.h", + ] + deps = [ "../../../api/video:video_frame" ] + } -if (rtc_include_tests) { - rtc_library("video_quality_analyzer_injection_helper") { + rtc_library("quality_analyzing_video_decoder") { visibility = [ "*" ] testonly = true sources = [ - "analyzer/video/video_quality_analyzer_injection_helper.cc", - "analyzer/video/video_quality_analyzer_injection_helper.h", + "analyzer/video/quality_analyzing_video_decoder.cc", + "analyzer/video/quality_analyzing_video_decoder.h", ] deps = [ ":encoded_image_data_injector_api", ":id_generator", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", ":simulcast_dummy_buffer_helper", - "../..:test_renderer", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:stats_observer_interface", "../../../api:video_quality_analyzer_api", + "../../../api/video:encoded_image", "../../../api/video:video_frame", "../../../api/video:video_rtp_headers", "../../../api/video_codecs:video_codecs_api", - "../../../test:video_test_common", - "../../../test:video_test_support", - "//third_party/abseil-cpp/absl/memory", + "../../../modules/video_coding:video_codec_interface", + "../../../rtc_base:criticalsection", + "../../../rtc_base:logging", + "../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", ] } - rtc_library("echo_emulation") { + rtc_library("quality_analyzing_video_encoder") { visibility = [ "*" ] testonly = true sources = [ - "echo/echo_emulation.cc", - "echo/echo_emulation.h", + "analyzer/video/quality_analyzing_video_encoder.cc", + "analyzer/video/quality_analyzing_video_encoder.h", ] deps = [ - "../../../api:peer_connection_quality_test_fixture_api", - "../../../modules/audio_device:audio_device_impl", - "../../../rtc_base:rtc_base_approved", + ":encoded_image_data_injector_api", + ":id_generator", + "../../../api:video_quality_analyzer_api", + "../../../api/video:encoded_image", + "../../../api/video:video_frame", + "../../../api/video:video_rtp_headers", + "../../../api/video_codecs:video_codecs_api", + "../../../modules/video_coding:video_codec_interface", + "../../../rtc_base:criticalsection", + "../../../rtc_base:logging", + "../../../rtc_base/synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("test_peer") { + if (rtc_include_tests) { + rtc_library("video_quality_analyzer_injection_helper") { + visibility = [ "*" ] + testonly = true + sources = [ + "analyzer/video/video_quality_analyzer_injection_helper.cc", + "analyzer/video/video_quality_analyzer_injection_helper.h", + ] + deps = [ + ":encoded_image_data_injector_api", + ":id_generator", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":simulcast_dummy_buffer_helper", + "../..:test_renderer", + "../../../api:array_view", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:stats_observer_interface", + "../../../api:video_quality_analyzer_api", + "../../../api/video:video_frame", + "../../../api/video:video_rtp_headers", + "../../../api/video_codecs:video_codecs_api", + "../../../rtc_base:criticalsection", + "../../../rtc_base/synchronization:mutex", + "../../../test:video_test_common", + "../../../test:video_test_support", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] + } + + rtc_library("echo_emulation") { + visibility = [ "*" ] + testonly = true + sources = [ + "echo/echo_emulation.cc", + "echo/echo_emulation.h", + ] + deps = [ + "../../../api:peer_connection_quality_test_fixture_api", + "../../../modules/audio_device:audio_device_impl", + "../../../rtc_base:rtc_base_approved", + ] + } + + rtc_library("test_peer") { + visibility = [ "*" ] + testonly = true + sources = [ + "test_peer.cc", + "test_peer.h", + ] + deps = [ + ":peer_configurer", + ":peer_connection_quality_test_params", + "../../../api:frame_generator_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:scoped_refptr", + "../../../modules/audio_processing:api", + "../../../pc:peerconnection_wrapper", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/types:variant", + ] + } + + rtc_library("test_peer_factory") { + visibility = [ "*" ] + testonly = true + sources = [ + "test_peer_factory.cc", + "test_peer_factory.h", + ] + deps = [ + ":echo_emulation", + ":peer_configurer", + ":peer_connection_quality_test_params", + ":quality_analyzing_video_encoder", + ":test_peer", + ":video_quality_analyzer_injection_helper", + "../..:copy_to_file_audio_capturer", + "../../../api:create_time_controller", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:time_controller", + "../../../api/rtc_event_log:rtc_event_log_factory", + "../../../api/task_queue:default_task_queue_factory", + "../../../api/transport:field_trial_based_config", + "../../../api/video_codecs:builtin_video_decoder_factory", + "../../../api/video_codecs:builtin_video_encoder_factory", + "../../../media:rtc_audio_video", + "../../../media:rtc_media_engine_defaults", + "../../../modules/audio_device:audio_device_impl", + "../../../modules/audio_processing/aec_dump", + "../../../p2p:rtc_p2p", + "../../../rtc_base:rtc_task_queue", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] + } + + rtc_library("media_helper") { + visibility = [ "*" ] + testonly = true + sources = [ + "media/media_helper.cc", + "media/media_helper.h", + "media/test_video_capturer_video_track_source.h", + ] + deps = [ + ":peer_configurer", + ":test_peer", + ":video_quality_analyzer_injection_helper", + "../..:fileutils", + "../..:platform_video_capturer", + "../..:video_test_common", + "../../../api:create_frame_generator", + "../../../api:frame_generator_api", + "../../../api:media_stream_interface", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/video:video_frame", + "../../../pc:peerconnection", + "../../../pc:video_track_source", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ] + } + + rtc_library("peer_configurer") { + visibility = [ "*" ] + testonly = true + sources = [ + "peer_configurer.cc", + "peer_configurer.h", + ] + deps = [ + ":peer_connection_quality_test_params", + "../..:fileutils", + "../../../api:callfactory_api", + "../../../api:create_peer_connection_quality_test_frame_generator", + "../../../api:fec_controller_api", + "../../../api:packet_socket_factory", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/rtc_event_log", + "../../../api/task_queue", + "../../../api/transport:network_control", + "../../../api/video_codecs:video_codecs_api", + "../../../rtc_base", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } + + rtc_library("test_activities_executor") { + visibility = [ "*" ] + testonly = true + sources = [ + "test_activities_executor.cc", + "test_activities_executor.h", + ] + deps = [ + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:checks", + "../../../rtc_base:criticalsection", + "../../../rtc_base:logging", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:task_queue_for_test", + "../../../rtc_base/synchronization:mutex", + "../../../rtc_base/task_utils:repeating_task", + "../../../system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + + rtc_library("peerconnection_quality_test") { + visibility = [ "*" ] + testonly = true + + sources = [ + "peer_connection_quality_test.cc", + "peer_connection_quality_test.h", + ] + deps = [ + ":analyzer_helper", + ":cross_media_metrics_reporter", + ":default_audio_quality_analyzer", + ":default_video_quality_analyzer", + ":media_helper", + ":peer_configurer", + ":peer_connection_quality_test_params", + ":sdp_changer", + ":single_process_encoded_image_data_injector", + ":stats_poller", + ":test_activities_executor", + ":test_peer", + ":test_peer_factory", + ":video_quality_analyzer_injection_helper", + ":video_quality_metrics_reporter", + "../..:field_trial", + "../..:fileutils", + "../..:perf_test", + "../../../api:audio_quality_analyzer_api", + "../../../api:libjingle_peerconnection_api", + "../../../api:media_stream_interface", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtc_event_log_output_file", + "../../../api:scoped_refptr", + "../../../api:time_controller", + "../../../api:video_quality_analyzer_api", + "../../../api/rtc_event_log", + "../../../api/task_queue", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../pc:pc_test_utils", + "../../../pc:peerconnection", + "../../../rtc_base", + "../../../rtc_base:gunit_helpers", + "../../../rtc_base:macromagic", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:safe_conversions", + "../../../rtc_base:task_queue_for_test", + "../../../rtc_base/synchronization:mutex", + "../../../system_wrappers", + "../../../system_wrappers:field_trial", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } + + rtc_library("single_process_encoded_image_data_injector_unittest") { + testonly = true + sources = [ + "analyzer/video/single_process_encoded_image_data_injector_unittest.cc", + ] + deps = [ + ":single_process_encoded_image_data_injector", + "../../../api/video:encoded_image", + "../../../rtc_base:rtc_base_approved", + "../../../test:test_support", + ] + } + + rtc_library("default_encoded_image_data_injector_unittest") { + testonly = true + sources = + [ "analyzer/video/default_encoded_image_data_injector_unittest.cc" ] + deps = [ + ":default_encoded_image_data_injector", + "../../../api/video:encoded_image", + "../../../rtc_base:rtc_base_approved", + "../../../test:test_support", + ] + } + + peer_connection_e2e_smoke_test_resources = [ + "../../../resources/pc_quality_smoke_test_alice_source.wav", + "../../../resources/pc_quality_smoke_test_bob_source.wav", + ] + if (is_ios) { + bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") { + testonly = true + sources = peer_connection_e2e_smoke_test_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + } + + rtc_library("peer_connection_e2e_smoke_test") { + testonly = true + + sources = [ "peer_connection_e2e_smoke_test.cc" ] + deps = [ + ":default_audio_quality_analyzer", + ":default_video_quality_analyzer", + ":network_quality_metrics_reporter", + ":stats_based_network_quality_metrics_reporter", + "../../../api:callfactory_api", + "../../../api:create_network_emulation_manager", + "../../../api:create_peer_connection_quality_test_frame_generator", + "../../../api:create_peerconnection_quality_test_fixture", + "../../../api:libjingle_peerconnection_api", + "../../../api:media_stream_interface", + "../../../api:network_emulation_manager_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:scoped_refptr", + "../../../api:simulated_network_api", + "../../../api/audio_codecs:builtin_audio_decoder_factory", + "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/video_codecs:builtin_video_decoder_factory", + "../../../api/video_codecs:builtin_video_encoder_factory", + "../../../call:simulated_network", + "../../../media:rtc_audio_video", + "../../../modules/audio_device:audio_device_impl", + "../../../p2p:rtc_p2p", + "../../../pc:pc_test_utils", + "../../../pc:peerconnection_wrapper", + "../../../rtc_base", + "../../../rtc_base:gunit_helpers", + "../../../rtc_base:logging", + "../../../rtc_base:rtc_event", + "../../../system_wrappers:field_trial", + "../../../test:field_trial", + "../../../test:fileutils", + "../../../test:test_support", + ] + data = peer_connection_e2e_smoke_test_resources + if (is_ios) { + deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ] + } + } + + rtc_library("stats_poller") { + visibility = [ "*" ] + testonly = true + sources = [ + "stats_poller.cc", + "stats_poller.h", + ] + deps = [ + ":test_peer", + "../../../api:libjingle_peerconnection_api", + "../../../api:rtc_stats_api", + "../../../api:stats_observer_interface", + "../../../rtc_base:logging", + ] + } + + rtc_library("default_video_quality_analyzer_test") { + testonly = true + sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + "../..:test_support", + "../../../api:create_frame_generator", + "../../../api:rtp_packet_info", + "../../../api/video:encoded_image", + "../../../api/video:video_frame", + "../../../common_video", + "../../../modules/rtp_rtcp:rtp_rtcp_format", + "../../../rtc_base:stringutils", + "../../../rtc_tools:video_quality_analysis", + "../../../system_wrappers", + ] + } + + rtc_library("multi_head_queue_test") { + testonly = true + sources = [ "analyzer/video/multi_head_queue_test.cc" ] + deps = [ + ":multi_head_queue", + "../../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + } + + rtc_library("analyzer_helper") { visibility = [ "*" ] - testonly = true sources = [ - "test_peer.cc", - "test_peer.h", + "analyzer_helper.cc", + "analyzer_helper.h", ] deps = [ - ":echo_emulation", - ":peer_connection_quality_test_params", - ":video_quality_analyzer_injection_helper", - "../../../api:frame_generator_api", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:scoped_refptr", - "../../../api/rtc_event_log:rtc_event_log_factory", - "../../../api/task_queue", - "../../../api/task_queue:default_task_queue_factory", - "../../../api/video_codecs:builtin_video_decoder_factory", - "../../../api/video_codecs:builtin_video_encoder_factory", - "../../../media:rtc_audio_video", - "../../../media:rtc_media_base", - "../../../media:rtc_media_engine_defaults", - "../../../modules/audio_device:audio_device_api", - "../../../modules/audio_device:audio_device_impl", - "../../../modules/audio_processing:api", - "../../../modules/audio_processing/aec_dump:aec_dump", - "../../../p2p:rtc_p2p", - "../../../pc:pc_test_utils", - "../../../pc:peerconnection_wrapper", - "../../../rtc_base", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base:rtc_task_queue", - "../../../test:copy_to_file_audio_capturer", - "../../../test:video_test_common", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", + "../../../api:track_id_stream_info_map", + "../../../rtc_base:macromagic", + "../../../rtc_base/synchronization:sequence_checker", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("peerconnection_quality_test") { + rtc_library("default_audio_quality_analyzer") { visibility = [ "*" ] testonly = true - sources = [ - "peer_connection_quality_test.cc", - "peer_connection_quality_test.h", + "analyzer/audio/default_audio_quality_analyzer.cc", + "analyzer/audio/default_audio_quality_analyzer.h", ] + deps = [ - ":analyzer_helper", - ":default_audio_quality_analyzer", - ":default_video_quality_analyzer", - ":peer_connection_quality_test_params", - ":sdp_changer", - ":single_process_encoded_image_data_injector", - ":stats_poller", - ":test_peer", - ":video_quality_analyzer_injection_helper", - "../..:field_trial", - "../..:platform_video_capturer", - "../..:video_test_common", + "../..:perf_test", "../../../api:audio_quality_analyzer_api", - "../../../api:create_frame_generator", - "../../../api:frame_generator_api", - "../../../api:libjingle_peerconnection_api", - "../../../api:media_stream_interface", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:rtc_event_log_output_file", - "../../../api:scoped_refptr", - "../../../api:video_quality_analyzer_api", - "../../../api/rtc_event_log", - "../../../api/task_queue", - "../../../api/task_queue:default_task_queue_factory", + "../../../api:rtc_stats_api", + "../../../api:stats_observer_interface", + "../../../api:track_id_stream_info_map", + "../../../api/numerics", "../../../api/units:time_delta", "../../../api/units:timestamp", - "../../../api/video:video_frame", - "../../../pc:pc_test_utils", - "../../../pc:peerconnection", - "../../../rtc_base", - "../../../rtc_base:gunit_helpers", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base:rtc_task_queue", - "../../../rtc_base:safe_conversions", - "../../../rtc_base:task_queue_for_test", - "../../../rtc_base/task_utils:repeating_task", - "../../../system_wrappers", - "../../../system_wrappers:field_trial", - "../../../test:fileutils", - "../../../test:video_test_support", + "../../../rtc_base:criticalsection", + "../../../rtc_base:logging", + "../../../rtc_base:rtc_numerics", + "../../../rtc_base/synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("single_process_encoded_image_data_injector_unittest") { + rtc_library("example_video_quality_analyzer") { + visibility = [ "*" ] testonly = true sources = [ - "analyzer/video/single_process_encoded_image_data_injector_unittest.cc", + "analyzer/video/example_video_quality_analyzer.cc", + "analyzer/video/example_video_quality_analyzer.h", ] + deps = [ - ":single_process_encoded_image_data_injector", + "../../../api:array_view", + "../../../api:video_quality_analyzer_api", "../../../api/video:encoded_image", - "../../../rtc_base:rtc_base_approved", - "../../../test:test_support", + "../../../api/video:video_frame", + "../../../api/video:video_rtp_headers", + "../../../rtc_base:criticalsection", + "../../../rtc_base:logging", + "../../../rtc_base/synchronization:mutex", ] } - rtc_library("default_encoded_image_data_injector_unittest") { + rtc_library("video_quality_metrics_reporter") { + visibility = [ "*" ] + testonly = true - sources = - [ "analyzer/video/default_encoded_image_data_injector_unittest.cc" ] + sources = [ + "analyzer/video/video_quality_metrics_reporter.cc", + "analyzer/video/video_quality_metrics_reporter.h", + ] deps = [ - ":default_encoded_image_data_injector", + "../..:perf_test", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtc_stats_api", + "../../../api:track_id_stream_info_map", + "../../../api/numerics", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:criticalsection", + "../../../rtc_base:rtc_numerics", + "../../../rtc_base/synchronization:mutex", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } + + rtc_library("default_video_quality_analyzer") { + visibility = [ "*" ] + + testonly = true + sources = [ + "analyzer/video/default_video_quality_analyzer.cc", + "analyzer/video/default_video_quality_analyzer.h", + ] + + deps = [ + ":multi_head_queue", + "../..:perf_test", + "../../../api:array_view", + "../../../api:video_quality_analyzer_api", + "../../../api/numerics", + "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../api/video:encoded_image", + "../../../api/video:video_frame", + "../../../api/video:video_rtp_headers", + "../../../common_video", + "../../../rtc_base:criticalsection", + "../../../rtc_base:logging", "../../../rtc_base:rtc_base_approved", - "../../../test:test_support", + "../../../rtc_base:rtc_base_tests_utils", + "../../../rtc_base:rtc_event", + "../../../rtc_base:rtc_numerics", + "../../../rtc_base:timeutils", + "../../../rtc_base/synchronization:mutex", + "../../../rtc_tools:video_quality_analysis", + "../../../system_wrappers", ] } - peer_connection_e2e_smoke_test_resources = [ - "../../../resources/pc_quality_smoke_test_alice_source.wav", - "../../../resources/pc_quality_smoke_test_bob_source.wav", - ] - if (is_ios) { - bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") { - testonly = true - sources = peer_connection_e2e_smoke_test_resources - outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] - } + rtc_library("network_quality_metrics_reporter") { + visibility = [ "*" ] + testonly = true + sources = [ + "network_quality_metrics_reporter.cc", + "network_quality_metrics_reporter.h", + ] + deps = [ + "../..:perf_test", + "../../../api:network_emulation_manager_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtc_stats_api", + "../../../api:track_id_stream_info_map", + "../../../api/units:data_size", + "../../../rtc_base:criticalsection", + "../../../rtc_base:rtc_event", + "../../../rtc_base/synchronization:mutex", + "../../../system_wrappers:field_trial", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("peer_connection_e2e_smoke_test") { + rtc_library("stats_based_network_quality_metrics_reporter") { + visibility = [ "*" ] testonly = true - - sources = [ "peer_connection_e2e_smoke_test.cc" ] + sources = [ + "stats_based_network_quality_metrics_reporter.cc", + "stats_based_network_quality_metrics_reporter.h", + ] deps = [ - ":default_audio_quality_analyzer", - ":default_video_quality_analyzer", - ":network_quality_metrics_reporter", - "../../../api:callfactory_api", - "../../../api:create_network_emulation_manager", - "../../../api:create_peerconnection_quality_test_fixture", - "../../../api:libjingle_peerconnection_api", + "../..:perf_test", + "../../../api:array_view", "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtc_stats_api", "../../../api:scoped_refptr", - "../../../api:simulated_network_api", - "../../../api/audio_codecs:builtin_audio_decoder_factory", - "../../../api/audio_codecs:builtin_audio_encoder_factory", - "../../../api/video_codecs:builtin_video_decoder_factory", - "../../../api/video_codecs:builtin_video_encoder_factory", - "../../../call:simulated_network", - "../../../media:rtc_audio_video", - "../../../modules/audio_device:audio_device_impl", - "../../../p2p:rtc_p2p", - "../../../pc:pc_test_utils", - "../../../pc:peerconnection_wrapper", + "../../../api/numerics", + "../../../api/test/network_emulation", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:timestamp", "../../../rtc_base", - "../../../rtc_base:gunit_helpers", - "../../../rtc_base:logging", "../../../rtc_base:rtc_event", + "../../../rtc_base:stringutils", + "../../../rtc_base/synchronization:mutex", "../../../system_wrappers:field_trial", - "../../../test:field_trial", - "../../../test:fileutils", - "../../../test:test_support", ] - data = peer_connection_e2e_smoke_test_resources - if (is_ios) { - deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ] - } + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } - rtc_library("stats_poller") { + rtc_library("cross_media_metrics_reporter") { + visibility = [ "*" ] testonly = true sources = [ - "stats_poller.cc", - "stats_poller.h", + "cross_media_metrics_reporter.cc", + "cross_media_metrics_reporter.h", ] deps = [ - ":test_peer", - "../../../api:libjingle_peerconnection_api", - "../../../api:stats_observer_interface", - "../../../rtc_base:logging", + "../..:perf_test", + "../../../api:network_emulation_manager_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtc_stats_api", + "../../../api:track_id_stream_info_map", + "../../../api/numerics", + "../../../api/units:timestamp", + "../../../rtc_base:criticalsection", + "../../../rtc_base:rtc_event", + "../../../rtc_base:rtc_numerics", + "../../../rtc_base/synchronization:mutex", + "../../../system_wrappers:field_trial", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", ] } -} - -rtc_library("analyzer_helper") { - visibility = [ "*" ] - sources = [ - "analyzer_helper.cc", - "analyzer_helper.h", - ] - deps = [ - "../../../api:track_id_stream_label_map", - "../../../rtc_base:macromagic", - "../../../rtc_base/synchronization:sequence_checker", - ] -} - -rtc_library("default_audio_quality_analyzer") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/audio/default_audio_quality_analyzer.cc", - "analyzer/audio/default_audio_quality_analyzer.h", - ] - - deps = [ - "../..:perf_test", - "../../../api:audio_quality_analyzer_api", - "../../../api:libjingle_peerconnection_api", - "../../../api:stats_observer_interface", - "../../../api:track_id_stream_label_map", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base:rtc_numerics", - ] -} - -rtc_library("example_video_quality_analyzer") { - visibility = [ "*" ] - testonly = true - sources = [ - "analyzer/video/example_video_quality_analyzer.cc", - "analyzer/video/example_video_quality_analyzer.h", - ] - - deps = [ - "../../../api:video_quality_analyzer_api", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - ] -} - -rtc_library("default_video_quality_analyzer") { - visibility = [ "*" ] - - testonly = true - sources = [ - "analyzer/video/default_video_quality_analyzer.cc", - "analyzer/video/default_video_quality_analyzer.h", - ] - - deps = [ - "../..:perf_test", - "../../../api:video_quality_analyzer_api", - "../../../api/units:time_delta", - "../../../api/units:timestamp", - "../../../api/video:encoded_image", - "../../../api/video:video_frame", - "../../../api/video:video_rtp_headers", - "../../../common_video", - "../../../rtc_base:criticalsection", - "../../../rtc_base:logging", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base:rtc_event", - "../../../rtc_base:rtc_numerics", - "../../../system_wrappers", - ] -} -rtc_library("network_quality_metrics_reporter") { - visibility = [ "*" ] - testonly = true - sources = [ - "network_quality_metrics_reporter.cc", - "network_quality_metrics_reporter.h", - ] - deps = [ - "../..:perf_test", - "../../../api:libjingle_peerconnection_api", - "../../../api:network_emulation_manager_api", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../rtc_base:criticalsection", - "../../../rtc_base:rtc_event", - "../../../system_wrappers:field_trial", - ] -} + rtc_library("sdp_changer") { + visibility = [ "*" ] + testonly = true + sources = [ + "sdp/sdp_changer.cc", + "sdp/sdp_changer.h", + ] + deps = [ + "../../../api:array_view", + "../../../api:libjingle_peerconnection_api", + "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtp_parameters", + "../../../media:rtc_media_base", + "../../../p2p:rtc_p2p", + "../../../pc:peerconnection", + "../../../pc:rtc_pc_base", + "../../../rtc_base:stringutils", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } -rtc_library("sdp_changer") { - testonly = true - sources = [ - "sdp/sdp_changer.cc", - "sdp/sdp_changer.h", - ] - deps = [ - "../../../api:array_view", - "../../../api:libjingle_peerconnection_api", - "../../../api:peer_connection_quality_test_fixture_api", - "../../../api:rtp_parameters", - "../../../media:rtc_media_base", - "../../../p2p:rtc_p2p", - "../../../pc:peerconnection", - "../../../pc:rtc_pc_base", - "../../../rtc_base:stringutils", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] + rtc_library("multi_head_queue") { + visibility = [ "*" ] + testonly = true + sources = [ "analyzer/video/multi_head_queue.h" ] + deps = [ "../../../rtc_base:checks" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } } diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc index b8f1740e46..8830436b09 100644 --- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc @@ -10,87 +10,103 @@ #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h" -#include "api/stats_types.h" +#include "api/stats/rtc_stats.h" +#include "api/stats/rtcstats_objects.h" #include "rtc_base/logging.h" namespace webrtc { namespace webrtc_pc_e2e { -namespace { -static const char kStatsAudioMediaType[] = "audio"; - -} // namespace - -void DefaultAudioQualityAnalyzer::Start( - std::string test_case_name, - TrackIdStreamLabelMap* analyzer_helper) { +void DefaultAudioQualityAnalyzer::Start(std::string test_case_name, + TrackIdStreamInfoMap* analyzer_helper) { test_case_name_ = std::move(test_case_name); analyzer_helper_ = analyzer_helper; } void DefaultAudioQualityAnalyzer::OnStatsReports( - const std::string& pc_label, - const StatsReports& stats_reports) { - for (const StatsReport* stats_report : stats_reports) { - // NetEq stats are only present in kStatsReportTypeSsrc reports, so all - // other reports are just ignored. - if (stats_report->type() != StatsReport::StatsType::kStatsReportTypeSsrc) { - continue; - } - // Ignoring stats reports of "video" SSRC. - const webrtc::StatsReport::Value* media_type = stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameMediaType); - RTC_CHECK(media_type); - if (strcmp(media_type->static_string_val(), kStatsAudioMediaType) != 0) { + absl::string_view pc_label, + const rtc::scoped_refptr& report) { + // TODO(https://crbug.com/webrtc/11683): use "inbound-rtp" instead of "track" + // stats when required audio metrics moved there + auto stats = report->GetStatsOfType(); + + for (auto& stat : stats) { + if (!stat->kind.is_defined() || + !(*stat->kind == RTCMediaStreamTrackKind::kAudio) || + !*stat->remote_source) { continue; } - if (stats_report->FindValue( - webrtc::StatsReport::kStatsValueNameBytesSent)) { - // If kStatsValueNameBytesSent is present, it means it's a send stream, - // but we need audio metrics for receive stream, so skip it. - continue; + + StatsSample sample; + sample.total_samples_received = + stat->total_samples_received.ValueOrDefault(0ul); + sample.concealed_samples = stat->concealed_samples.ValueOrDefault(0ul); + sample.removed_samples_for_acceleration = + stat->removed_samples_for_acceleration.ValueOrDefault(0ul); + sample.inserted_samples_for_deceleration = + stat->inserted_samples_for_deceleration.ValueOrDefault(0ul); + sample.silent_concealed_samples = + stat->silent_concealed_samples.ValueOrDefault(0ul); + sample.jitter_buffer_delay = + TimeDelta::Seconds(stat->jitter_buffer_delay.ValueOrDefault(0.)); + sample.jitter_buffer_target_delay = + TimeDelta::Seconds(stat->jitter_buffer_target_delay.ValueOrDefault(0.)); + sample.jitter_buffer_emitted_count = + stat->jitter_buffer_emitted_count.ValueOrDefault(0ul); + + const std::string stream_label = std::string( + analyzer_helper_->GetStreamLabelFromTrackId(*stat->track_identifier)); + + MutexLock lock(&lock_); + StatsSample prev_sample = last_stats_sample_[stream_label]; + RTC_CHECK_GE(sample.total_samples_received, + prev_sample.total_samples_received); + double total_samples_diff = static_cast( + sample.total_samples_received - prev_sample.total_samples_received); + if (total_samples_diff == 0) { + return; } - const webrtc::StatsReport::Value* expand_rate = stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameExpandRate); - const webrtc::StatsReport::Value* accelerate_rate = stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameAccelerateRate); - const webrtc::StatsReport::Value* preemptive_rate = stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNamePreemptiveExpandRate); - const webrtc::StatsReport::Value* speech_expand_rate = - stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameSpeechExpandRate); - const webrtc::StatsReport::Value* preferred_buffer_size_ms = - stats_report->FindValue(StatsReport::StatsValueName:: - kStatsValueNamePreferredJitterBufferMs); - RTC_CHECK(expand_rate); - RTC_CHECK(accelerate_rate); - RTC_CHECK(preemptive_rate); - RTC_CHECK(speech_expand_rate); - RTC_CHECK(preferred_buffer_size_ms); - - const std::string& stream_label = - GetStreamLabelFromStatsReport(stats_report); - - rtc::CritScope crit(&lock_); AudioStreamStats& audio_stream_stats = streams_stats_[stream_label]; - audio_stream_stats.expand_rate.AddSample(expand_rate->float_val()); - audio_stream_stats.accelerate_rate.AddSample(accelerate_rate->float_val()); - audio_stream_stats.preemptive_rate.AddSample(preemptive_rate->float_val()); + audio_stream_stats.expand_rate.AddSample( + (sample.concealed_samples - prev_sample.concealed_samples) / + total_samples_diff); + audio_stream_stats.accelerate_rate.AddSample( + (sample.removed_samples_for_acceleration - + prev_sample.removed_samples_for_acceleration) / + total_samples_diff); + audio_stream_stats.preemptive_rate.AddSample( + (sample.inserted_samples_for_deceleration - + prev_sample.inserted_samples_for_deceleration) / + total_samples_diff); + + int64_t speech_concealed_samples = + sample.concealed_samples - sample.silent_concealed_samples; + int64_t prev_speech_concealed_samples = + prev_sample.concealed_samples - prev_sample.silent_concealed_samples; audio_stream_stats.speech_expand_rate.AddSample( - speech_expand_rate->float_val()); - audio_stream_stats.preferred_buffer_size_ms.AddSample( - preferred_buffer_size_ms->int_val()); - } -} + (speech_concealed_samples - prev_speech_concealed_samples) / + total_samples_diff); + + int64_t jitter_buffer_emitted_count_diff = + sample.jitter_buffer_emitted_count - + prev_sample.jitter_buffer_emitted_count; + if (jitter_buffer_emitted_count_diff > 0) { + TimeDelta jitter_buffer_delay_diff = + sample.jitter_buffer_delay - prev_sample.jitter_buffer_delay; + TimeDelta jitter_buffer_target_delay_diff = + sample.jitter_buffer_target_delay - + prev_sample.jitter_buffer_target_delay; + audio_stream_stats.average_jitter_buffer_delay_ms.AddSample( + jitter_buffer_delay_diff.ms() / + jitter_buffer_emitted_count_diff); + audio_stream_stats.preferred_buffer_size_ms.AddSample( + jitter_buffer_target_delay_diff.ms() / + jitter_buffer_emitted_count_diff); + } -const std::string& DefaultAudioQualityAnalyzer::GetStreamLabelFromStatsReport( - const StatsReport* stats_report) const { - const webrtc::StatsReport::Value* report_track_id = stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameTrackId); - RTC_CHECK(report_track_id); - return analyzer_helper_->GetStreamLabelFromTrackId( - report_track_id->string_val()); + last_stats_sample_[stream_label] = sample; + } } std::string DefaultAudioQualityAnalyzer::GetTestCaseName( @@ -100,7 +116,7 @@ std::string DefaultAudioQualityAnalyzer::GetTestCaseName( void DefaultAudioQualityAnalyzer::Stop() { using ::webrtc::test::ImproveDirection; - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); for (auto& item : streams_stats_) { ReportResult("expand_rate", item.first, item.second.expand_rate, "unitless", ImproveDirection::kSmallerIsBetter); @@ -111,6 +127,9 @@ void DefaultAudioQualityAnalyzer::Stop() { ReportResult("speech_expand_rate", item.first, item.second.speech_expand_rate, "unitless", ImproveDirection::kSmallerIsBetter); + ReportResult("average_jitter_buffer_delay_ms", item.first, + item.second.average_jitter_buffer_delay_ms, "ms", + ImproveDirection::kNone); ReportResult("preferred_buffer_size_ms", item.first, item.second.preferred_buffer_size_ms, "ms", ImproveDirection::kNone); @@ -119,7 +138,7 @@ void DefaultAudioQualityAnalyzer::Stop() { std::map DefaultAudioQualityAnalyzer::GetAudioStreamsStats() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return streams_stats_; } diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h index 33aaefd4c3..4ad0dd3da2 100644 --- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h +++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h @@ -14,11 +14,12 @@ #include #include -#include "api/stats_types.h" +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/test/audio_quality_analyzer_interface.h" -#include "api/test/track_id_stream_label_map.h" -#include "rtc_base/critical_section.h" -#include "rtc_base/numerics/samples_stats_counter.h" +#include "api/test/track_id_stream_info_map.h" +#include "api/units/time_delta.h" +#include "rtc_base/synchronization/mutex.h" #include "test/testsupport/perf_test.h" namespace webrtc { @@ -29,25 +30,34 @@ struct AudioStreamStats { SamplesStatsCounter accelerate_rate; SamplesStatsCounter preemptive_rate; SamplesStatsCounter speech_expand_rate; + SamplesStatsCounter average_jitter_buffer_delay_ms; SamplesStatsCounter preferred_buffer_size_ms; }; -// TODO(bugs.webrtc.org/10430): Migrate to the new GetStats as soon as -// bugs.webrtc.org/10428 is fixed. class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface { public: void Start(std::string test_case_name, - TrackIdStreamLabelMap* analyzer_helper) override; - void OnStatsReports(const std::string& pc_label, - const StatsReports& stats_reports) override; + TrackIdStreamInfoMap* analyzer_helper) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override; void Stop() override; // Returns audio quality stats per stream label. std::map GetAudioStreamsStats() const; private: - const std::string& GetStreamLabelFromStatsReport( - const StatsReport* stats_report) const; + struct StatsSample { + uint64_t total_samples_received = 0; + uint64_t concealed_samples = 0; + uint64_t removed_samples_for_acceleration = 0; + uint64_t inserted_samples_for_deceleration = 0; + uint64_t silent_concealed_samples = 0; + TimeDelta jitter_buffer_delay = TimeDelta::Zero(); + TimeDelta jitter_buffer_target_delay = TimeDelta::Zero(); + uint64_t jitter_buffer_emitted_count = 0; + }; + std::string GetTestCaseName(const std::string& stream_label) const; void ReportResult(const std::string& metric_name, const std::string& stream_label, @@ -56,10 +66,11 @@ class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface { webrtc::test::ImproveDirection improve_direction) const; std::string test_case_name_; - TrackIdStreamLabelMap* analyzer_helper_; + TrackIdStreamInfoMap* analyzer_helper_; - rtc::CriticalSection lock_; + mutable Mutex lock_; std::map streams_stats_ RTC_GUARDED_BY(lock_); + std::map last_stats_sample_ RTC_GUARDED_BY(lock_); }; } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc index 2634e6eea4..c5eab0a1b0 100644 --- a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc +++ b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc @@ -41,30 +41,34 @@ EncodedImage DefaultEncodedImageDataInjector::InjectData( bool discard, const EncodedImage& source, int /*coding_entity_id*/) { - EncodedImage out = source; - out.SetEncodedData( - EncodedImageBuffer::Create(source.size() + kEncodedImageBufferExpansion)); - memcpy(out.data(), source.data(), source.size()); + auto buffer = + EncodedImageBuffer::Create(source.size() + kEncodedImageBufferExpansion); + memcpy(buffer->data(), source.data(), source.size()); + size_t insertion_pos = source.size(); - out.data()[insertion_pos] = id & 0x00ff; - out.data()[insertion_pos + 1] = (id & 0xff00) >> 8; - out.data()[insertion_pos + 2] = source.size() & 0x000000ff; - out.data()[insertion_pos + 3] = (source.size() & 0x0000ff00) >> 8; - out.data()[insertion_pos + 4] = (source.size() & 0x00ff0000) >> 16; - out.data()[insertion_pos + 5] = (source.size() & 0xff000000) >> 24; + buffer->data()[insertion_pos] = id & 0x00ff; + buffer->data()[insertion_pos + 1] = (id & 0xff00) >> 8; + buffer->data()[insertion_pos + 2] = source.size() & 0x000000ff; + buffer->data()[insertion_pos + 3] = (source.size() & 0x0000ff00) >> 8; + buffer->data()[insertion_pos + 4] = (source.size() & 0x00ff0000) >> 16; + buffer->data()[insertion_pos + 5] = (source.size() & 0xff000000) >> 24; // We will store discard flag in the high bit of high byte of the size. RTC_CHECK_LT(source.size(), 1U << 31) << "High bit is already in use"; - out.data()[insertion_pos + 5] = - out.data()[insertion_pos + 5] | ((discard ? 1 : 0) << 7); + buffer->data()[insertion_pos + 5] = + buffer->data()[insertion_pos + 5] | ((discard ? 1 : 0) << 7); + + EncodedImage out = source; + out.SetEncodedData(buffer); return out; } EncodedImageExtractionResult DefaultEncodedImageDataInjector::ExtractData( const EncodedImage& source, int /*coding_entity_id*/) { + auto buffer = EncodedImageBuffer::Create(source.size()); EncodedImage out = source; - out.SetEncodedData(EncodedImageBuffer::Create(source.size())); + out.SetEncodedData(buffer); size_t source_pos = source.size() - 1; absl::optional id = absl::nullopt; @@ -115,7 +119,7 @@ EncodedImageExtractionResult DefaultEncodedImageDataInjector::ExtractData( if (!info.discard) { // Copy next encoded image payload from concatenated buffer only if it is // not discarded. - memcpy(&out.data()[out_pos], &source.data()[source_pos], info.length); + memcpy(&buffer->data()[out_pos], &source.data()[source_pos], info.length); out_pos += info.length; } source_pos += info.length + kEncodedImageBufferExpansion; diff --git a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h index f4bd81ce90..b60c214703 100644 --- a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h +++ b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.h @@ -19,7 +19,6 @@ #include #include "api/video/encoded_image.h" -#include "rtc_base/critical_section.h" #include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h" namespace webrtc { @@ -64,6 +63,8 @@ class DefaultEncodedImageDataInjector : public EncodedImageDataInjector, bool discard, const EncodedImage& source, int /*coding_entity_id*/) override; + + void Start(int expected_receivers_count) override {} EncodedImageExtractionResult ExtractData(const EncodedImage& source, int coding_entity_id) override; }; diff --git a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector_unittest.cc b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector_unittest.cc index 3ad978f66a..2ba2298fb5 100644 --- a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector_unittest.cc +++ b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector_unittest.cc @@ -20,22 +20,28 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { -rtc::Buffer CreateBufferOfSizeNFilledWithValuesFromX(size_t n, uint8_t x) { - rtc::Buffer buffer(n); +rtc::scoped_refptr +CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(size_t n, uint8_t x) { + auto buffer = EncodedImageBuffer::Create(n); for (size_t i = 0; i < n; ++i) { - buffer[i] = static_cast(x + i); + buffer->data()[i] = static_cast(x + i); } return buffer; } -} // namespace +EncodedImage CreateEncodedImageOfSizeNFilledWithValuesFromX(size_t n, + uint8_t x) { + EncodedImage image; + image.SetEncodedData( + CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(n, x)); + return image; +} TEST(DefaultEncodedImageDataInjector, InjectExtractDiscardFalse) { DefaultEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - - EncodedImage source(buffer.data(), 10, 10); + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source.SetTimestamp(123456789); EncodedImageExtractionResult out = @@ -50,10 +56,9 @@ TEST(DefaultEncodedImageDataInjector, InjectExtractDiscardFalse) { TEST(DefaultEncodedImageDataInjector, InjectExtractDiscardTrue) { DefaultEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - - EncodedImage source(buffer.data(), 10, 10); + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source.SetTimestamp(123456789); EncodedImageExtractionResult out = @@ -65,19 +70,16 @@ TEST(DefaultEncodedImageDataInjector, InjectExtractDiscardTrue) { TEST(DefaultEncodedImageDataInjector, Inject3Extract3) { DefaultEncodedImageDataInjector injector; - - rtc::Buffer buffer1 = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - rtc::Buffer buffer2 = CreateBufferOfSizeNFilledWithValuesFromX(10, 11); - rtc::Buffer buffer3 = CreateBufferOfSizeNFilledWithValuesFromX(10, 21); + injector.Start(1); // 1st frame - EncodedImage source1(buffer1.data(), 10, 10); + EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source1.SetTimestamp(123456710); // 2nd frame 1st spatial layer - EncodedImage source2(buffer2.data(), 10, 10); + EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 11); source2.SetTimestamp(123456720); // 2nd frame 2nd spatial layer - EncodedImage source3(buffer3.data(), 10, 10); + EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 21); source3.SetTimestamp(123456720); EncodedImage intermediate1 = injector.InjectData(510, false, source1, 1); @@ -108,16 +110,13 @@ TEST(DefaultEncodedImageDataInjector, Inject3Extract3) { TEST(DefaultEncodedImageDataInjector, InjectExtractFromConcatenated) { DefaultEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer1 = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - rtc::Buffer buffer2 = CreateBufferOfSizeNFilledWithValuesFromX(10, 11); - rtc::Buffer buffer3 = CreateBufferOfSizeNFilledWithValuesFromX(10, 21); - - EncodedImage source1(buffer1.data(), 10, 10); + EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source1.SetTimestamp(123456710); - EncodedImage source2(buffer2.data(), 10, 10); + EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 11); source2.SetTimestamp(123456710); - EncodedImage source3(buffer3.data(), 10, 10); + EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 21); source3.SetTimestamp(123456710); // Inject id into 3 images with same frame id. @@ -133,8 +132,9 @@ TEST(DefaultEncodedImageDataInjector, InjectExtractFromConcatenated) { concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size()); concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size()); concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size()); - EncodedImage concatenated(concatenated_buffer.data(), concatenated_length, - concatenated_length); + EncodedImage concatenated; + concatenated.SetEncodedData(EncodedImageBuffer::Create( + concatenated_buffer.data(), concatenated_length)); // Extract frame id from concatenated image EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2); @@ -151,16 +151,13 @@ TEST(DefaultEncodedImageDataInjector, InjectExtractFromConcatenated) { TEST(DefaultEncodedImageDataInjector, InjectExtractFromConcatenatedAllDiscarded) { DefaultEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer1 = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - rtc::Buffer buffer2 = CreateBufferOfSizeNFilledWithValuesFromX(10, 11); - rtc::Buffer buffer3 = CreateBufferOfSizeNFilledWithValuesFromX(10, 21); - - EncodedImage source1(buffer1.data(), 10, 10); + EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source1.SetTimestamp(123456710); - EncodedImage source2(buffer2.data(), 10, 10); + EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 11); source2.SetTimestamp(123456710); - EncodedImage source3(buffer3.data(), 10, 10); + EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 21); source3.SetTimestamp(123456710); // Inject id into 3 images with same frame id. @@ -176,8 +173,9 @@ TEST(DefaultEncodedImageDataInjector, concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size()); concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size()); concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size()); - EncodedImage concatenated(concatenated_buffer.data(), concatenated_length, - concatenated_length); + EncodedImage concatenated; + concatenated.SetEncodedData(EncodedImageBuffer::Create( + concatenated_buffer.data(), concatenated_length)); // Extract frame id from concatenated image EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2); @@ -187,5 +185,6 @@ TEST(DefaultEncodedImageDataInjector, EXPECT_EQ(out.image.size(), 0ul); } +} // namespace } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc index fcef1fea3e..3765f3dec8 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc @@ -14,9 +14,16 @@ #include #include +#include "api/array_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/units/time_delta.h" +#include "api/video/i420_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "rtc_base/cpu_time.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/time_utils.h" +#include "rtc_tools/frame_analyzer/video_geometry_aligner.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -32,6 +39,7 @@ void LogFrameCounters(const std::string& name, const FrameCounters& counters) { RTC_LOG(INFO) << "[" << name << "] Pre encoded : " << counters.pre_encoded; RTC_LOG(INFO) << "[" << name << "] Encoded : " << counters.encoded; RTC_LOG(INFO) << "[" << name << "] Received : " << counters.received; + RTC_LOG(INFO) << "[" << name << "] Decoded : " << counters.decoded; RTC_LOG(INFO) << "[" << name << "] Rendered : " << counters.rendered; RTC_LOG(INFO) << "[" << name << "] Dropped : " << counters.dropped; } @@ -43,6 +51,20 @@ void LogStreamInternalStats(const std::string& name, const StreamStats& stats) { << stats.dropped_before_encoder; } +template +absl::optional MaybeGetValue(const std::map& map, size_t key) { + auto it = map.find(key); + if (it == map.end()) { + return absl::nullopt; + } + return it->second; +} + +SamplesStatsCounter::StatsSample StatsSample(double value, + Timestamp sampling_time) { + return SamplesStatsCounter::StatsSample{value, sampling_time}; +} + } // namespace void RateCounter::AddEvent(Timestamp event_time) { @@ -62,17 +84,63 @@ double RateCounter::GetEventsPerSecond() const { (event_last_time_ - event_first_time_).us() * kMicrosPerSecond; } +std::string StatsKey::ToString() const { + rtc::StringBuilder out; + out << stream_label << "_" << sender << "_" << receiver; + return out.str(); +} + +bool operator<(const StatsKey& a, const StatsKey& b) { + if (a.stream_label != b.stream_label) { + return a.stream_label < b.stream_label; + } + if (a.sender != b.sender) { + return a.sender < b.sender; + } + return a.receiver < b.receiver; +} + +bool operator==(const StatsKey& a, const StatsKey& b) { + return a.stream_label == b.stream_label && a.sender == b.sender && + a.receiver == b.receiver; +} + +std::string InternalStatsKey::ToString() const { + rtc::StringBuilder out; + out << "stream=" << stream << "_sender=" << sender + << "_receiver=" << receiver; + return out.str(); +} + +bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) { + if (a.stream != b.stream) { + return a.stream < b.stream; + } + if (a.sender != b.sender) { + return a.sender < b.sender; + } + return a.receiver < b.receiver; +} + +bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) { + return a.stream == b.stream && a.sender == b.sender && + a.receiver == b.receiver; +} + DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer( - bool heavy_metrics_computation_enabled) - : heavy_metrics_computation_enabled_(heavy_metrics_computation_enabled), - clock_(Clock::GetRealTimeClock()) {} + webrtc::Clock* clock, + DefaultVideoQualityAnalyzerOptions options) + : options_(options), clock_(clock) {} DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() { Stop(); } -void DefaultVideoQualityAnalyzer::Start(std::string test_case_name, - int max_threads_count) { +void DefaultVideoQualityAnalyzer::Start( + std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) { test_label_ = std::move(test_case_name); + peers_ = std::make_unique(peer_names); for (int i = 0; i < max_threads_count; i++) { auto thread = std::make_unique( &DefaultVideoQualityAnalyzer::ProcessComparisonsThread, this, @@ -82,123 +150,203 @@ void DefaultVideoQualityAnalyzer::Start(std::string test_case_name, thread_pool_.push_back(std::move(thread)); } { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); RTC_CHECK(start_time_.IsMinusInfinity()); state_ = State::kActive; start_time_ = Now(); } + StartMeasuringCpuProcessTime(); } uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured( + absl::string_view peer_name, const std::string& stream_label, const webrtc::VideoFrame& frame) { // |next_frame_id| is atomic, so we needn't lock here. uint16_t frame_id = next_frame_id_++; Timestamp start_time = Timestamp::MinusInfinity(); + size_t peer_index = peers_->index(peer_name); + size_t stream_index; { - rtc::CritScope crit(&lock_); - // Create a local copy of start_time_ to access it under |comparison_lock_| - // without holding a |lock_| + MutexLock lock(&lock_); + // Create a local copy of start_time_ to access it under + // |comparison_lock_| without holding a |lock_| start_time = start_time_; + stream_index = streams_.AddIfAbsent(stream_label); } { // Ensure stats for this stream exists. - rtc::CritScope crit(&comparison_lock_); - if (stream_stats_.find(stream_label) == stream_stats_.end()) { - stream_stats_.insert({stream_label, StreamStats()}); - // Assume that the first freeze was before first stream frame captured. - // This way time before the first freeze would be counted as time between - // freezes. - stream_last_freeze_end_time_.insert({stream_label, start_time}); + MutexLock lock(&comparison_lock_); + for (size_t i = 0; i < peers_->size(); ++i) { + if (i == peer_index) { + continue; + } + InternalStatsKey stats_key(stream_index, peer_index, i); + if (stream_stats_.find(stats_key) == stream_stats_.end()) { + stream_stats_.insert({stats_key, StreamStats()}); + // Assume that the first freeze was before first stream frame captured. + // This way time before the first freeze would be counted as time + // between freezes. + stream_last_freeze_end_time_.insert({stats_key, start_time}); + } else { + // When we see some |stream_label| for the first time we need to create + // stream stats object for it and set up some states, but we need to do + // it only once and for all receivers, so on the next frame on the same + // |stream_label| we can be sure, that it's already done and we needn't + // to scan though all peers again. + break; + } } } { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); + stream_to_sender_[stream_index] = peer_index; frame_counters_.captured++; - stream_frame_counters_[stream_label].captured++; + for (size_t i = 0; i < peers_->size(); ++i) { + if (i != peer_index) { + InternalStatsKey key(stream_index, peer_index, i); + stream_frame_counters_[key].captured++; + } + } - StreamState* state = &stream_states_[stream_label]; - state->frame_ids.push_back(frame_id); + auto state_it = stream_states_.find(stream_index); + if (state_it == stream_states_.end()) { + stream_states_.emplace(stream_index, + StreamState(peer_index, peers_->size())); + } + StreamState* state = &stream_states_.at(stream_index); + state->PushBack(frame_id); // Update frames in flight info. auto it = captured_frames_in_flight_.find(frame_id); if (it != captured_frames_in_flight_.end()) { - // We overflow uint16_t and hit previous frame id and this frame is still - // in flight. It means that this stream wasn't rendered for long time and - // we need to process existing frame as dropped. - auto stats_it = frame_stats_.find(frame_id); - RTC_DCHECK(stats_it != frame_stats_.end()); - - RTC_DCHECK(frame_id == state->frame_ids.front()); - state->frame_ids.pop_front(); - frame_counters_.dropped++; - stream_frame_counters_[stream_label].dropped++; - AddComparison(it->second, absl::nullopt, true, stats_it->second); + // If we overflow uint16_t and hit previous frame id and this frame is + // still in flight, it means that this stream wasn't rendered for long + // time and we need to process existing frame as dropped. + for (size_t i = 0; i < peers_->size(); ++i) { + if (i == peer_index) { + continue; + } + + uint16_t oldest_frame_id = state->PopFront(i); + RTC_DCHECK_EQ(frame_id, oldest_frame_id); + frame_counters_.dropped++; + InternalStatsKey key(stream_index, peer_index, i); + stream_frame_counters_.at(key).dropped++; + + MutexLock lock1(&comparison_lock_); + analyzer_stats_.frames_in_flight_left_count.AddSample( + StatsSample(captured_frames_in_flight_.size(), Now())); + AddComparison(InternalStatsKey(stream_index, peer_index, i), + it->second.frame(), absl::nullopt, true, + it->second.GetStatsForPeer(i)); + } captured_frames_in_flight_.erase(it); - frame_stats_.erase(stats_it); } - captured_frames_in_flight_.insert( - std::pair(frame_id, frame)); + captured_frames_in_flight_.emplace( + frame_id, + FrameInFlight(stream_index, frame, + /*captured_time=*/Now(), peer_index, peers_->size())); // Set frame id on local copy of the frame - captured_frames_in_flight_.at(frame_id).set_id(frame_id); - frame_stats_.insert(std::pair( - frame_id, FrameStats(stream_label, /*captured_time=*/Now()))); + captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id); // Update history stream<->frame mapping for (auto it = stream_to_frame_id_history_.begin(); it != stream_to_frame_id_history_.end(); ++it) { it->second.erase(frame_id); } - stream_to_frame_id_history_[stream_label].insert(frame_id); + stream_to_frame_id_history_[stream_index].insert(frame_id); + + // If state has too many frames that are in flight => remove the oldest + // queued frame in order to avoid to use too much memory. + if (state->GetAliveFramesCount() > + options_.max_frames_in_flight_per_stream_count) { + uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead(); + auto it = captured_frames_in_flight_.find(frame_id_to_remove); + RTC_CHECK(it != captured_frames_in_flight_.end()) + << "Frame with ID " << frame_id_to_remove + << " is expected to be in flight, but hasn't been found in " + << "|captured_frames_in_flight_|"; + bool is_removed = it->second.RemoveFrame(); + RTC_DCHECK(is_removed) + << "Invalid stream state: alive frame is removed already"; + } } return frame_id; } void DefaultVideoQualityAnalyzer::OnFramePreEncode( + absl::string_view peer_name, const webrtc::VideoFrame& frame) { - rtc::CritScope crit(&lock_); - auto it = frame_stats_.find(frame.id()); - RTC_DCHECK(it != frame_stats_.end()) + MutexLock lock(&lock_); + auto it = captured_frames_in_flight_.find(frame.id()); + RTC_DCHECK(it != captured_frames_in_flight_.end()) << "Frame id=" << frame.id() << " not found"; frame_counters_.pre_encoded++; - stream_frame_counters_[it->second.stream_label].pre_encoded++; - it->second.pre_encode_time = Now(); + size_t peer_index = peers_->index(peer_name); + for (size_t i = 0; i < peers_->size(); ++i) { + if (i != peer_index) { + InternalStatsKey key(it->second.stream(), peer_index, i); + stream_frame_counters_.at(key).pre_encoded++; + } + } + it->second.SetPreEncodeTime(Now()); } void DefaultVideoQualityAnalyzer::OnFrameEncoded( + absl::string_view peer_name, uint16_t frame_id, - const webrtc::EncodedImage& encoded_image) { - rtc::CritScope crit(&lock_); - auto it = frame_stats_.find(frame_id); - RTC_DCHECK(it != frame_stats_.end()); + const webrtc::EncodedImage& encoded_image, + const EncoderStats& stats) { + MutexLock lock(&lock_); + auto it = captured_frames_in_flight_.find(frame_id); + RTC_DCHECK(it != captured_frames_in_flight_.end()); // For SVC we can receive multiple encoded images for one frame, so to cover // all cases we have to pick the last encode time. - if (it->second.encoded_time.IsInfinite()) { + if (!it->second.HasEncodedTime()) { // Increase counters only when we meet this frame first time. frame_counters_.encoded++; - stream_frame_counters_[it->second.stream_label].encoded++; + size_t peer_index = peers_->index(peer_name); + for (size_t i = 0; i < peers_->size(); ++i) { + if (i != peer_index) { + InternalStatsKey key(it->second.stream(), peer_index, i); + stream_frame_counters_.at(key).encoded++; + } + } } - it->second.encoded_time = Now(); + it->second.OnFrameEncoded(Now(), encoded_image.size(), + stats.target_encode_bitrate); } void DefaultVideoQualityAnalyzer::OnFrameDropped( + absl::string_view peer_name, webrtc::EncodedImageCallback::DropReason reason) { // Here we do nothing, because we will see this drop on renderer side. } void DefaultVideoQualityAnalyzer::OnFramePreDecode( + absl::string_view peer_name, uint16_t frame_id, const webrtc::EncodedImage& input_image) { - rtc::CritScope crit(&lock_); - auto it = frame_stats_.find(frame_id); - RTC_DCHECK(it != frame_stats_.end()); - RTC_DCHECK(it->second.received_time.IsInfinite()) - << "Received multiple spatial layers for stream_label=" - << it->second.stream_label; + MutexLock lock(&lock_); + size_t peer_index = peers_->index(peer_name); + + auto it = captured_frames_in_flight_.find(frame_id); + if (it == captured_frames_in_flight_.end() || + it->second.HasReceivedTime(peer_index)) { + // It means this frame was predecoded before, so we can skip it. It may + // happen when we have multiple simulcast streams in one track and received + // the same picture from two different streams because SFU can't reliably + // correlate two simulcast streams and started relaying the second stream + // from the same frame it has relayed right before for the first stream. + return; + } + frame_counters_.received++; - stream_frame_counters_[it->second.stream_label].received++; - it->second.decode_start_time = Now(); + InternalStatsKey key(it->second.stream(), + stream_to_sender_.at(it->second.stream()), peer_index); + stream_frame_counters_.at(key).received++; // Determine the time of the last received packet of this video frame. RTC_DCHECK(!input_image.PacketInfos().empty()); int64_t last_receive_time = @@ -208,107 +356,149 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode( return a.receive_time_ms() < b.receive_time_ms(); }) ->receive_time_ms(); - it->second.received_time = Timestamp::ms(last_receive_time); + it->second.OnFramePreDecode( + peer_index, + /*received_time=*/Timestamp::Millis(last_receive_time), + /*decode_start_time=*/Now()); } void DefaultVideoQualityAnalyzer::OnFrameDecoded( + absl::string_view peer_name, const webrtc::VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) { - rtc::CritScope crit(&lock_); - auto it = frame_stats_.find(frame.id()); - RTC_DCHECK(it != frame_stats_.end()); + const DecoderStats& stats) { + MutexLock lock(&lock_); + size_t peer_index = peers_->index(peer_name); + + auto it = captured_frames_in_flight_.find(frame.id()); + if (it == captured_frames_in_flight_.end() || + it->second.HasDecodeEndTime(peer_index)) { + // It means this frame was decoded before, so we can skip it. It may happen + // when we have multiple simulcast streams in one track and received + // the same picture from two different streams because SFU can't reliably + // correlate two simulcast streams and started relaying the second stream + // from the same frame it has relayed right before for the first stream. + return; + } frame_counters_.decoded++; - stream_frame_counters_[it->second.stream_label].decoded++; - it->second.decode_end_time = Now(); + InternalStatsKey key(it->second.stream(), + stream_to_sender_.at(it->second.stream()), peer_index); + stream_frame_counters_.at(key).decoded++; + it->second.SetDecodeEndTime(peer_index, Now()); } void DefaultVideoQualityAnalyzer::OnFrameRendered( + absl::string_view peer_name, const webrtc::VideoFrame& frame) { - rtc::CritScope crit(&lock_); - auto stats_it = frame_stats_.find(frame.id()); - RTC_DCHECK(stats_it != frame_stats_.end()); - FrameStats* frame_stats = &stats_it->second; + MutexLock lock(&lock_); + size_t peer_index = peers_->index(peer_name); + + auto frame_it = captured_frames_in_flight_.find(frame.id()); + if (frame_it == captured_frames_in_flight_.end() || + frame_it->second.HasRenderedTime(peer_index)) { + // It means this frame was rendered before, so we can skip it. It may happen + // when we have multiple simulcast streams in one track and received + // the same picture from two different streams because SFU can't reliably + // correlate two simulcast streams and started relaying the second stream + // from the same frame it has relayed right before for the first stream. + return; + } + + // Find corresponding captured frame. + FrameInFlight* frame_in_flight = &frame_it->second; + absl::optional captured_frame = frame_in_flight->frame(); + + const size_t stream_index = frame_in_flight->stream(); + StreamState* state = &stream_states_.at(stream_index); + const InternalStatsKey stats_key(stream_index, state->owner(), peer_index); + // Update frames counters. frame_counters_.rendered++; - stream_frame_counters_[frame_stats->stream_label].rendered++; + stream_frame_counters_.at(stats_key).rendered++; // Update current frame stats. - frame_stats->rendered_time = Now(); - frame_stats->rendered_frame_width = frame.width(); - frame_stats->rendered_frame_height = frame.height(); - - // Find corresponding captured frame. - auto frame_it = captured_frames_in_flight_.find(frame.id()); - RTC_DCHECK(frame_it != captured_frames_in_flight_.end()); - const VideoFrame& captured_frame = frame_it->second; + frame_in_flight->OnFrameRendered(peer_index, Now(), frame.width(), + frame.height()); // After we received frame here we need to check if there are any dropped // frames between this one and last one, that was rendered for this video // stream. - - const std::string& stream_label = frame_stats->stream_label; - StreamState* state = &stream_states_[stream_label]; int dropped_count = 0; - while (!state->frame_ids.empty() && state->frame_ids.front() != frame.id()) { + while (!state->IsEmpty(peer_index) && + state->Front(peer_index) != frame.id()) { dropped_count++; - uint16_t dropped_frame_id = state->frame_ids.front(); - state->frame_ids.pop_front(); + uint16_t dropped_frame_id = state->PopFront(peer_index); // Frame with id |dropped_frame_id| was dropped. We need: // 1. Update global and stream frame counters // 2. Extract corresponding frame from |captured_frames_in_flight_| - // 3. Extract corresponding frame stats from |frame_stats_| - // 4. Send extracted frame to comparison with dropped=true - // 5. Cleanup dropped frame + // 3. Send extracted frame to comparison with dropped=true + // 4. Cleanup dropped frame frame_counters_.dropped++; - stream_frame_counters_[stream_label].dropped++; + stream_frame_counters_.at(stats_key).dropped++; - auto dropped_frame_stats_it = frame_stats_.find(dropped_frame_id); - RTC_DCHECK(dropped_frame_stats_it != frame_stats_.end()); auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id); - RTC_CHECK(dropped_frame_it != captured_frames_in_flight_.end()); + RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end()); + absl::optional dropped_frame = dropped_frame_it->second.frame(); + dropped_frame_it->second.MarkDropped(peer_index); - AddComparison(dropped_frame_it->second, absl::nullopt, true, - dropped_frame_stats_it->second); + { + MutexLock lock1(&comparison_lock_); + analyzer_stats_.frames_in_flight_left_count.AddSample( + StatsSample(captured_frames_in_flight_.size(), Now())); + AddComparison(stats_key, dropped_frame, absl::nullopt, true, + dropped_frame_it->second.GetStatsForPeer(peer_index)); + } - frame_stats_.erase(dropped_frame_stats_it); - captured_frames_in_flight_.erase(dropped_frame_it); + if (dropped_frame_it->second.HaveAllPeersReceived()) { + captured_frames_in_flight_.erase(dropped_frame_it); + } } - RTC_DCHECK(!state->frame_ids.empty()); - state->frame_ids.pop_front(); + RTC_DCHECK(!state->IsEmpty(peer_index)); + state->PopFront(peer_index); - if (state->last_rendered_frame_time) { - frame_stats->prev_frame_rendered_time = - state->last_rendered_frame_time.value(); + if (state->last_rendered_frame_time(peer_index)) { + frame_in_flight->SetPrevFrameRenderedTime( + peer_index, state->last_rendered_frame_time(peer_index).value()); } - state->last_rendered_frame_time = frame_stats->rendered_time; + state->SetLastRenderedFrameTime(peer_index, + frame_in_flight->rendered_time(peer_index)); { - rtc::CritScope cr(&comparison_lock_); - stream_stats_[stream_label].skipped_between_rendered.AddSample( - dropped_count); + MutexLock cr(&comparison_lock_); + stream_stats_[stats_key].skipped_between_rendered.AddSample( + StatsSample(dropped_count, Now())); } - AddComparison(captured_frame, frame, false, *frame_stats); - captured_frames_in_flight_.erase(frame_it); - frame_stats_.erase(stats_it); + { + MutexLock lock(&comparison_lock_); + analyzer_stats_.frames_in_flight_left_count.AddSample( + StatsSample(captured_frames_in_flight_.size(), Now())); + AddComparison(stats_key, captured_frame, frame, false, + frame_in_flight->GetStatsForPeer(peer_index)); + } + + if (frame_it->second.HaveAllPeersReceived()) { + captured_frames_in_flight_.erase(frame_it); + } } void DefaultVideoQualityAnalyzer::OnEncoderError( + absl::string_view peer_name, const webrtc::VideoFrame& frame, int32_t error_code) { RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id() << ", code=" << error_code; } -void DefaultVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id, +void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, int32_t error_code) { RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id << ", code=" << error_code; } void DefaultVideoQualityAnalyzer::Stop() { + StopMeasuringCpuProcessTime(); { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); if (state_ == State::kStopped) { return; } @@ -327,143 +517,122 @@ void DefaultVideoQualityAnalyzer::Stop() { // Time between freezes. // Count time since the last freeze to the end of the call as time // between freezes. - rtc::CritScope crit1(&lock_); - rtc::CritScope crit2(&comparison_lock_); - for (auto& item : stream_stats_) { - const StreamState& state = stream_states_[item.first]; - // If there are no freezes in the call we have to report - // time_between_freezes_ms as call duration and in such case - // |stream_last_freeze_end_time_| for this stream will be |start_time_|. - // If there is freeze, then we need add time from last rendered frame - // to last freeze end as time between freezes. - if (state.last_rendered_frame_time) { - item.second.time_between_freezes_ms.AddSample( - (state.last_rendered_frame_time.value() - - stream_last_freeze_end_time_.at(item.first)) - .ms()); + MutexLock lock1(&lock_); + MutexLock lock2(&comparison_lock_); + for (auto& state_entry : stream_states_) { + const size_t stream_index = state_entry.first; + const StreamState& stream_state = state_entry.second; + for (size_t i = 0; i < peers_->size(); ++i) { + if (i == static_cast(stream_state.owner())) { + continue; + } + + InternalStatsKey stats_key(stream_index, stream_state.owner(), i); + + // If there are no freezes in the call we have to report + // time_between_freezes_ms as call duration and in such case + // |stream_last_freeze_end_time_| for this stream will be |start_time_|. + // If there is freeze, then we need add time from last rendered frame + // to last freeze end as time between freezes. + if (stream_state.last_rendered_frame_time(i)) { + stream_stats_[stats_key].time_between_freezes_ms.AddSample( + StatsSample( + stream_state.last_rendered_frame_time(i).value().ms() - + stream_last_freeze_end_time_.at(stats_key).ms(), + Now())); + } } } + analyzer_stats_.frames_in_flight_left_count.AddSample( + StatsSample(captured_frames_in_flight_.size(), Now())); } ReportResults(); } std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) { - rtc::CritScope crit1(&lock_); - auto it = frame_stats_.find(frame_id); - if (it != frame_stats_.end()) { - return it->second.stream_label; + MutexLock lock1(&lock_); + auto it = captured_frames_in_flight_.find(frame_id); + if (it != captured_frames_in_flight_.end()) { + return streams_.name(it->second.stream()); } for (auto hist_it = stream_to_frame_id_history_.begin(); hist_it != stream_to_frame_id_history_.end(); ++hist_it) { auto hist_set_it = hist_it->second.find(frame_id); if (hist_set_it != hist_it->second.end()) { - return hist_it->first; + return streams_.name(hist_it->first); } } RTC_CHECK(false) << "Unknown frame_id=" << frame_id; } -std::set DefaultVideoQualityAnalyzer::GetKnownVideoStreams() - const { - rtc::CritScope crit2(&comparison_lock_); - std::set out; +std::set DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const { + MutexLock lock1(&lock_); + MutexLock lock2(&comparison_lock_); + std::set out; for (auto& item : stream_stats_) { - out.insert(item.first); + RTC_LOG(INFO) << item.first.ToString() << " ==> " + << ToStatsKey(item.first).ToString(); + out.insert(ToStatsKey(item.first)); } return out; } -const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() { - rtc::CritScope crit(&lock_); +const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() const { + MutexLock lock(&lock_); return frame_counters_; } -const std::map& +std::map DefaultVideoQualityAnalyzer::GetPerStreamCounters() const { - rtc::CritScope crit(&lock_); - return stream_frame_counters_; + MutexLock lock(&lock_); + std::map out; + for (auto& item : stream_frame_counters_) { + out.emplace(ToStatsKey(item.first), item.second); + } + return out; } -std::map DefaultVideoQualityAnalyzer::GetStats() - const { - rtc::CritScope cri(&comparison_lock_); - return stream_stats_; +std::map DefaultVideoQualityAnalyzer::GetStats() const { + MutexLock lock1(&lock_); + MutexLock lock2(&comparison_lock_); + std::map out; + for (auto& item : stream_stats_) { + out.emplace(ToStatsKey(item.first), item.second); + } + return out; } AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); return analyzer_stats_; } -// TODO(bugs.webrtc.org/10430): Migrate to the new GetStats as soon as -// bugs.webrtc.org/10428 is fixed. -void DefaultVideoQualityAnalyzer::OnStatsReports( - const std::string& pc_label, - const StatsReports& stats_reports) { - for (const StatsReport* stats_report : stats_reports) { - // The only stats collected by this analyzer are present in - // kStatsReportTypeBwe reports, so all other reports are just ignored. - if (stats_report->type() != StatsReport::StatsType::kStatsReportTypeBwe) { - continue; - } - const webrtc::StatsReport::Value* available_send_bandwidth = - stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameAvailableSendBandwidth); - const webrtc::StatsReport::Value* retransmission_bitrate = - stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameRetransmitBitrate); - const webrtc::StatsReport::Value* transmission_bitrate = - stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameTransmitBitrate); - const webrtc::StatsReport::Value* actual_encode_bitrate = - stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameActualEncBitrate); - const webrtc::StatsReport::Value* target_encode_bitrate = - stats_report->FindValue( - StatsReport::StatsValueName::kStatsValueNameTargetEncBitrate); - RTC_CHECK(available_send_bandwidth); - RTC_CHECK(retransmission_bitrate); - RTC_CHECK(transmission_bitrate); - RTC_CHECK(actual_encode_bitrate); - RTC_CHECK(target_encode_bitrate); - - rtc::CritScope crit(&video_bwe_stats_lock_); - VideoBweStats& video_bwe_stats = video_bwe_stats_[pc_label]; - video_bwe_stats.available_send_bandwidth.AddSample( - available_send_bandwidth->int_val()); - video_bwe_stats.transmission_bitrate.AddSample( - transmission_bitrate->int_val()); - video_bwe_stats.retransmission_bitrate.AddSample( - retransmission_bitrate->int_val()); - video_bwe_stats.actual_encode_bitrate.AddSample( - actual_encode_bitrate->int_val()); - video_bwe_stats.target_encode_bitrate.AddSample( - target_encode_bitrate->int_val()); - } -} - -std::map -DefaultVideoQualityAnalyzer::GetVideoBweStats() const { - rtc::CritScope crit(&video_bwe_stats_lock_); - return video_bwe_stats_; -} - void DefaultVideoQualityAnalyzer::AddComparison( + InternalStatsKey stats_key, absl::optional captured, absl::optional rendered, bool dropped, FrameStats frame_stats) { - rtc::CritScope crit(&comparison_lock_); - analyzer_stats_.comparisons_queue_size.AddSample(comparisons_.size()); + StartExcludingCpuThreadTime(); + analyzer_stats_.comparisons_queue_size.AddSample( + StatsSample(comparisons_.size(), Now())); // If there too many computations waiting in the queue, we won't provide // frames itself to make future computations lighter. if (comparisons_.size() >= kMaxActiveComparisons) { - comparisons_.emplace_back(dropped, frame_stats); + comparisons_.emplace_back(std::move(stats_key), absl::nullopt, + absl::nullopt, dropped, std::move(frame_stats), + OverloadReason::kCpu); } else { - comparisons_.emplace_back(std::move(captured), std::move(rendered), dropped, - frame_stats); + OverloadReason overload_reason = OverloadReason::kNone; + if (!captured && !dropped) { + overload_reason = OverloadReason::kMemory; + } + comparisons_.emplace_back(std::move(stats_key), std::move(captured), + std::move(rendered), dropped, + std::move(frame_stats), overload_reason); } comparison_available_event_.Set(); + StopExcludingCpuThreadTime(); } void DefaultVideoQualityAnalyzer::ProcessComparisonsThread(void* obj) { @@ -475,7 +644,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparisons() { // Try to pick next comparison to perform from the queue. absl::optional comparison = absl::nullopt; { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); if (!comparisons_.empty()) { comparison = comparisons_.front(); comparisons_.pop_front(); @@ -489,7 +658,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparisons() { { // If there are no comparisons and state is stopped => // no more frames expected. - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); more_frames_expected = state_ != State::kStopped; } if (!more_frames_expected) { @@ -500,7 +669,9 @@ void DefaultVideoQualityAnalyzer::ProcessComparisons() { continue; } + StartExcludingCpuThreadTime(); ProcessComparison(comparison.value()); + StopExcludingCpuThreadTime(); } } @@ -509,32 +680,48 @@ void DefaultVideoQualityAnalyzer::ProcessComparison( // Perform expensive psnr and ssim calculations while not holding lock. double psnr = -1.0; double ssim = -1.0; - if (heavy_metrics_computation_enabled_ && comparison.captured && + if (options_.heavy_metrics_computation_enabled && comparison.captured && !comparison.dropped) { - psnr = I420PSNR(&*comparison.captured, &*comparison.rendered); - ssim = I420SSIM(&*comparison.captured, &*comparison.rendered); + rtc::scoped_refptr reference_buffer = + comparison.captured->video_frame_buffer()->ToI420(); + rtc::scoped_refptr test_buffer = + comparison.rendered->video_frame_buffer()->ToI420(); + if (options_.adjust_cropping_before_comparing_frames) { + test_buffer = + ScaleVideoFrameBuffer(*test_buffer.get(), reference_buffer->width(), + reference_buffer->height()); + reference_buffer = test::AdjustCropping(reference_buffer, test_buffer); + } + psnr = I420PSNR(*reference_buffer.get(), *test_buffer.get()); + ssim = I420SSIM(*reference_buffer.get(), *test_buffer.get()); } const FrameStats& frame_stats = comparison.frame_stats; - rtc::CritScope crit(&comparison_lock_); - auto stats_it = stream_stats_.find(frame_stats.stream_label); - RTC_CHECK(stats_it != stream_stats_.end()); + MutexLock lock(&comparison_lock_); + auto stats_it = stream_stats_.find(comparison.stats_key); + RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString(); StreamStats* stats = &stats_it->second; analyzer_stats_.comparisons_done++; - if (!comparison.captured) { - analyzer_stats_.overloaded_comparisons_done++; + if (comparison.overload_reason == OverloadReason::kCpu) { + analyzer_stats_.cpu_overloaded_comparisons_done++; + } else if (comparison.overload_reason == OverloadReason::kMemory) { + analyzer_stats_.memory_overloaded_comparisons_done++; } if (psnr > 0) { - stats->psnr.AddSample(psnr); + stats->psnr.AddSample(StatsSample(psnr, frame_stats.rendered_time)); } if (ssim > 0) { - stats->ssim.AddSample(ssim); + stats->ssim.AddSample(StatsSample(ssim, frame_stats.received_time)); } if (frame_stats.encoded_time.IsFinite()) { - stats->encode_time_ms.AddSample( - (frame_stats.encoded_time - frame_stats.pre_encode_time).ms()); + stats->encode_time_ms.AddSample(StatsSample( + (frame_stats.encoded_time - frame_stats.pre_encode_time).ms(), + frame_stats.encoded_time)); stats->encode_frame_rate.AddEvent(frame_stats.encoded_time); + stats->total_encoded_images_payload += frame_stats.encoded_image_size; + stats->target_encode_bitrate.AddSample(StatsSample( + frame_stats.target_encode_bitrate, frame_stats.encoded_time)); } else { if (frame_stats.pre_encode_time.IsFinite()) { stats->dropped_by_encoder++; @@ -545,34 +732,40 @@ void DefaultVideoQualityAnalyzer::ProcessComparison( // Next stats can be calculated only if frame was received on remote side. if (!comparison.dropped) { stats->resolution_of_rendered_frame.AddSample( - *comparison.frame_stats.rendered_frame_width * - *comparison.frame_stats.rendered_frame_height); - stats->transport_time_ms.AddSample( - (frame_stats.decode_start_time - frame_stats.encoded_time).ms()); - stats->total_delay_incl_transport_ms.AddSample( - (frame_stats.rendered_time - frame_stats.captured_time).ms()); - stats->decode_time_ms.AddSample( - (frame_stats.decode_end_time - frame_stats.decode_start_time).ms()); - stats->receive_to_render_time_ms.AddSample( - (frame_stats.rendered_time - frame_stats.received_time).ms()); + StatsSample(*comparison.frame_stats.rendered_frame_width * + *comparison.frame_stats.rendered_frame_height, + frame_stats.rendered_time)); + stats->transport_time_ms.AddSample(StatsSample( + (frame_stats.decode_start_time - frame_stats.encoded_time).ms(), + frame_stats.received_time)); + stats->total_delay_incl_transport_ms.AddSample(StatsSample( + (frame_stats.rendered_time - frame_stats.captured_time).ms(), + frame_stats.received_time)); + stats->decode_time_ms.AddSample(StatsSample( + (frame_stats.decode_end_time - frame_stats.decode_start_time).ms(), + frame_stats.decode_end_time)); + stats->receive_to_render_time_ms.AddSample(StatsSample( + (frame_stats.rendered_time - frame_stats.received_time).ms(), + frame_stats.rendered_time)); if (frame_stats.prev_frame_rendered_time.IsFinite()) { TimeDelta time_between_rendered_frames = frame_stats.rendered_time - frame_stats.prev_frame_rendered_time; - stats->time_between_rendered_frames_ms.AddSample( - time_between_rendered_frames.ms()); + stats->time_between_rendered_frames_ms.AddSample(StatsSample( + time_between_rendered_frames.ms(), frame_stats.rendered_time)); double average_time_between_rendered_frames_ms = stats->time_between_rendered_frames_ms.GetAverage(); if (time_between_rendered_frames.ms() > std::max(kFreezeThresholdMs + average_time_between_rendered_frames_ms, 3 * average_time_between_rendered_frames_ms)) { - stats->freeze_time_ms.AddSample(time_between_rendered_frames.ms()); + stats->freeze_time_ms.AddSample(StatsSample( + time_between_rendered_frames.ms(), frame_stats.rendered_time)); auto freeze_end_it = - stream_last_freeze_end_time_.find(frame_stats.stream_label); + stream_last_freeze_end_time_.find(comparison.stats_key); RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end()); - stats->time_between_freezes_ms.AddSample( - (frame_stats.prev_frame_rendered_time - freeze_end_it->second) - .ms()); + stats->time_between_freezes_ms.AddSample(StatsSample( + (frame_stats.prev_frame_rendered_time - freeze_end_it->second).ms(), + frame_stats.rendered_time)); freeze_end_it->second = frame_stats.rendered_time; } } @@ -580,22 +773,21 @@ void DefaultVideoQualityAnalyzer::ProcessComparison( } void DefaultVideoQualityAnalyzer::ReportResults() { - rtc::CritScope crit1(&lock_); - rtc::CritScope crit2(&comparison_lock_); + using ::webrtc::test::ImproveDirection; + + MutexLock lock1(&lock_); + MutexLock lock2(&comparison_lock_); for (auto& item : stream_stats_) { - ReportResults(GetTestCaseName(item.first), item.second, - stream_frame_counters_.at(item.first)); - } - { - rtc::CritScope video_bwe_crit(&video_bwe_stats_lock_); - for (const auto& item : video_bwe_stats_) { - ReportVideoBweResults(GetTestCaseName(item.first), item.second); - } + ReportResults(GetTestCaseName(StatsKeyToMetricName(ToStatsKey(item.first))), + item.second, stream_frame_counters_.at(item.first)); } + test::PrintResult("cpu_usage", "", test_label_.c_str(), GetCpuUsagePercent(), + "%", false, ImproveDirection::kSmallerIsBetter); LogFrameCounters("Global", frame_counters_); for (auto& item : stream_stats_) { - LogFrameCounters(item.first, stream_frame_counters_.at(item.first)); - LogStreamInternalStats(item.first, item.second); + LogFrameCounters(ToStatsKey(item.first).ToString(), + stream_frame_counters_.at(item.first)); + LogStreamInternalStats(ToStatsKey(item.first).ToString(), item.second); } if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) { RTC_LOG(INFO) << "comparisons_queue_size min=" @@ -605,28 +797,10 @@ void DefaultVideoQualityAnalyzer::ReportResults() { << analyzer_stats_.comparisons_queue_size.GetPercentile(0.99); } RTC_LOG(INFO) << "comparisons_done=" << analyzer_stats_.comparisons_done; - RTC_LOG(INFO) << "overloaded_comparisons_done=" - << analyzer_stats_.overloaded_comparisons_done; -} - -void DefaultVideoQualityAnalyzer::ReportVideoBweResults( - const std::string& test_case_name, - const VideoBweStats& video_bwe_stats) { - ReportResult("available_send_bandwidth", test_case_name, - video_bwe_stats.available_send_bandwidth / kBitsInByte, - "bytesPerSecond"); - ReportResult("transmission_bitrate", test_case_name, - video_bwe_stats.transmission_bitrate / kBitsInByte, - "bytesPerSecond"); - ReportResult("retransmission_bitrate", test_case_name, - video_bwe_stats.retransmission_bitrate / kBitsInByte, - "bytesPerSecond"); - ReportResult("actual_encode_bitrate", test_case_name, - video_bwe_stats.actual_encode_bitrate / kBitsInByte, - "bytesPerSecond"); - ReportResult("target_encode_bitrate", test_case_name, - video_bwe_stats.target_encode_bitrate / kBitsInByte, - "bytesPerSecond"); + RTC_LOG(INFO) << "cpu_overloaded_comparisons_done=" + << analyzer_stats_.cpu_overloaded_comparisons_done; + RTC_LOG(INFO) << "memory_overloaded_comparisons_done=" + << analyzer_stats_.memory_overloaded_comparisons_done; } void DefaultVideoQualityAnalyzer::ReportResults( @@ -634,6 +808,34 @@ void DefaultVideoQualityAnalyzer::ReportResults( const StreamStats& stats, const FrameCounters& frame_counters) { using ::webrtc::test::ImproveDirection; + TimeDelta test_duration = Now() - start_time_; + + double sum_squared_interframe_delays_secs = 0; + Timestamp video_start_time = Timestamp::PlusInfinity(); + Timestamp video_end_time = Timestamp::MinusInfinity(); + for (const SamplesStatsCounter::StatsSample& sample : + stats.time_between_rendered_frames_ms.GetTimedSamples()) { + double interframe_delay_ms = sample.value; + const double interframe_delays_secs = interframe_delay_ms / 1000.0; + // Sum of squared inter frame intervals is used to calculate the harmonic + // frame rate metric. The metric aims to reflect overall experience related + // to smoothness of video playback and includes both freezes and pauses. + sum_squared_interframe_delays_secs += + interframe_delays_secs * interframe_delays_secs; + if (sample.time < video_start_time) { + video_start_time = sample.time; + } + if (sample.time > video_end_time) { + video_end_time = sample.time; + } + } + double harmonic_framerate_fps = 0; + TimeDelta video_duration = video_end_time - video_start_time; + if (sum_squared_interframe_delays_secs > 0.0 && video_duration.IsFinite()) { + harmonic_framerate_fps = static_cast(video_duration.us()) / + static_cast(kMicrosPerSecond) / + sum_squared_interframe_delays_secs; + } ReportResult("psnr", test_case_name, stats.psnr, "dB", ImproveDirection::kBiggerIsBetter); @@ -647,11 +849,14 @@ void DefaultVideoQualityAnalyzer::ReportResults( ReportResult("time_between_rendered_frames", test_case_name, stats.time_between_rendered_frames_ms, "ms", ImproveDirection::kSmallerIsBetter); + test::PrintResult("harmonic_framerate", "", test_case_name, + harmonic_framerate_fps, "Hz", /*important=*/false, + ImproveDirection::kBiggerIsBetter); test::PrintResult("encode_frame_rate", "", test_case_name, stats.encode_frame_rate.IsEmpty() ? 0 : stats.encode_frame_rate.GetEventsPerSecond(), - "fps", /*important=*/false, + "Hz", /*important=*/false, ImproveDirection::kBiggerIsBetter); ReportResult("encode_time", test_case_name, stats.encode_time_ms, "ms", ImproveDirection::kSmallerIsBetter); @@ -681,6 +886,14 @@ void DefaultVideoQualityAnalyzer::ReportResults( /*important=*/false, ImproveDirection::kSmallerIsBetter); ReportResult("max_skipped", test_case_name, stats.skipped_between_rendered, "count", ImproveDirection::kSmallerIsBetter); + ReportResult("target_encode_bitrate", test_case_name, + stats.target_encode_bitrate / kBitsInByte, "bytesPerSecond", + ImproveDirection::kNone); + test::PrintResult( + "actual_encode_bitrate", "", test_case_name, + static_cast(stats.total_encoded_images_payload) / + static_cast(test_duration.us()) * kMicrosPerSecond, + "bytesPerSecond", /*important=*/false, ImproveDirection::kNone); } void DefaultVideoQualityAnalyzer::ReportResult( @@ -702,27 +915,250 @@ Timestamp DefaultVideoQualityAnalyzer::Now() { return clock_->CurrentTime(); } -DefaultVideoQualityAnalyzer::FrameStats::FrameStats(std::string stream_label, - Timestamp captured_time) - : stream_label(std::move(stream_label)), captured_time(captured_time) {} +StatsKey DefaultVideoQualityAnalyzer::ToStatsKey( + const InternalStatsKey& key) const { + return StatsKey(streams_.name(key.stream), peers_->name(key.sender), + peers_->name(key.receiver)); +} + +std::string DefaultVideoQualityAnalyzer::StatsKeyToMetricName( + const StatsKey& key) { + if (peers_->size() <= 2) { + return key.stream_label; + } + return key.ToString(); +} + +void DefaultVideoQualityAnalyzer::StartMeasuringCpuProcessTime() { + MutexLock lock(&cpu_measurement_lock_); + cpu_time_ -= rtc::GetProcessCpuTimeNanos(); + wallclock_time_ -= rtc::SystemTimeNanos(); +} + +void DefaultVideoQualityAnalyzer::StopMeasuringCpuProcessTime() { + MutexLock lock(&cpu_measurement_lock_); + cpu_time_ += rtc::GetProcessCpuTimeNanos(); + wallclock_time_ += rtc::SystemTimeNanos(); +} + +void DefaultVideoQualityAnalyzer::StartExcludingCpuThreadTime() { + MutexLock lock(&cpu_measurement_lock_); + cpu_time_ += rtc::GetThreadCpuTimeNanos(); +} + +void DefaultVideoQualityAnalyzer::StopExcludingCpuThreadTime() { + MutexLock lock(&cpu_measurement_lock_); + cpu_time_ -= rtc::GetThreadCpuTimeNanos(); +} + +double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() { + MutexLock lock(&cpu_measurement_lock_); + return static_cast(cpu_time_) / wallclock_time_ * 100.0; +} DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison( + InternalStatsKey stats_key, absl::optional captured, absl::optional rendered, bool dropped, - FrameStats frame_stats) - : captured(std::move(captured)), + FrameStats frame_stats, + OverloadReason overload_reason) + : stats_key(std::move(stats_key)), + captured(std::move(captured)), rendered(std::move(rendered)), dropped(dropped), - frame_stats(std::move(frame_stats)) {} + frame_stats(std::move(frame_stats)), + overload_reason(overload_reason) {} + +uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) { + absl::optional frame_id = frame_ids_.PopFront(peer); + RTC_DCHECK(frame_id.has_value()); + + // If alive's frame queue is longer than all others, than also pop frame from + // it, because that frame is received by all receivers. + size_t owner_size = frame_ids_.size(owner_); + size_t other_size = 0; + for (size_t i = 0; i < frame_ids_.readers_count(); ++i) { + size_t cur_size = frame_ids_.size(i); + if (i != owner_ && cur_size > other_size) { + other_size = cur_size; + } + } + if (owner_size > other_size) { + absl::optional alive_frame_id = frame_ids_.PopFront(owner_); + RTC_DCHECK(alive_frame_id.has_value()); + RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value()); + } -DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison( - bool dropped, - FrameStats frame_stats) - : captured(absl::nullopt), - rendered(absl::nullopt), - dropped(dropped), - frame_stats(std::move(frame_stats)) {} + return frame_id.value(); +} + +uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() { + absl::optional frame_id = frame_ids_.PopFront(owner_); + RTC_DCHECK(frame_id.has_value()); + return frame_id.value(); +} + +void DefaultVideoQualityAnalyzer::StreamState::SetLastRenderedFrameTime( + size_t peer, + Timestamp time) { + auto it = last_rendered_frame_time_.find(peer); + if (it == last_rendered_frame_time_.end()) { + last_rendered_frame_time_.insert({peer, time}); + } else { + it->second = time; + } +} + +absl::optional +DefaultVideoQualityAnalyzer::StreamState::last_rendered_frame_time( + size_t peer) const { + return MaybeGetValue(last_rendered_frame_time_, peer); +} + +bool DefaultVideoQualityAnalyzer::FrameInFlight::RemoveFrame() { + if (!frame_) { + return false; + } + frame_ = absl::nullopt; + return true; +} + +void DefaultVideoQualityAnalyzer::FrameInFlight::SetFrameId(uint16_t id) { + if (frame_) { + frame_->set_id(id); + } +} + +std::vector +DefaultVideoQualityAnalyzer::FrameInFlight::GetPeersWhichDidntReceive() const { + std::vector out; + for (size_t i = 0; i < peers_count_; ++i) { + auto it = receiver_stats_.find(i); + if (i != owner_ && it != receiver_stats_.end() && + it->second.rendered_time.IsInfinite()) { + out.push_back(i); + } + } + return out; +} + +bool DefaultVideoQualityAnalyzer::FrameInFlight::HaveAllPeersReceived() const { + for (size_t i = 0; i < peers_count_; ++i) { + if (i == owner_) { + continue; + } + + auto it = receiver_stats_.find(i); + if (it == receiver_stats_.end()) { + return false; + } + + if (!it->second.dropped && it->second.rendered_time.IsInfinite()) { + return false; + } + } + return true; +} + +void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameEncoded( + webrtc::Timestamp time, + int64_t encoded_image_size, + uint32_t target_encode_bitrate) { + encoded_time_ = time; + encoded_image_size_ = encoded_image_size; + target_encode_bitrate_ += target_encode_bitrate; +} + +void DefaultVideoQualityAnalyzer::FrameInFlight::OnFramePreDecode( + size_t peer, + webrtc::Timestamp received_time, + webrtc::Timestamp decode_start_time) { + receiver_stats_[peer].received_time = received_time; + receiver_stats_[peer].decode_start_time = decode_start_time; +} + +bool DefaultVideoQualityAnalyzer::FrameInFlight::HasReceivedTime( + size_t peer) const { + auto it = receiver_stats_.find(peer); + if (it == receiver_stats_.end()) { + return false; + } + return it->second.received_time.IsFinite(); +} + +bool DefaultVideoQualityAnalyzer::FrameInFlight::HasDecodeEndTime( + size_t peer) const { + auto it = receiver_stats_.find(peer); + if (it == receiver_stats_.end()) { + return false; + } + return it->second.decode_end_time.IsFinite(); +} + +void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameRendered( + size_t peer, + webrtc::Timestamp time, + int width, + int height) { + receiver_stats_[peer].rendered_time = time; + receiver_stats_[peer].rendered_frame_width = width; + receiver_stats_[peer].rendered_frame_height = height; +} + +bool DefaultVideoQualityAnalyzer::FrameInFlight::HasRenderedTime( + size_t peer) const { + auto it = receiver_stats_.find(peer); + if (it == receiver_stats_.end()) { + return false; + } + return it->second.rendered_time.IsFinite(); +} + +DefaultVideoQualityAnalyzer::FrameStats +DefaultVideoQualityAnalyzer::FrameInFlight::GetStatsForPeer(size_t peer) const { + FrameStats stats(captured_time_); + stats.pre_encode_time = pre_encode_time_; + stats.encoded_time = encoded_time_; + stats.target_encode_bitrate = target_encode_bitrate_; + stats.encoded_image_size = encoded_image_size_; + + absl::optional receiver_stats = + MaybeGetValue(receiver_stats_, peer); + if (receiver_stats.has_value()) { + stats.received_time = receiver_stats->received_time; + stats.decode_start_time = receiver_stats->decode_start_time; + stats.decode_end_time = receiver_stats->decode_end_time; + stats.rendered_time = receiver_stats->rendered_time; + stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time; + stats.rendered_frame_width = receiver_stats->rendered_frame_width; + stats.rendered_frame_height = receiver_stats->rendered_frame_height; + } + return stats; +} + +size_t DefaultVideoQualityAnalyzer::NamesCollection::AddIfAbsent( + absl::string_view name) { + auto it = index_.find(name); + if (it != index_.end()) { + return it->second; + } + size_t out = names_.size(); + size_t old_capacity = names_.capacity(); + names_.emplace_back(name); + size_t new_capacity = names_.capacity(); + + if (old_capacity == new_capacity) { + index_.emplace(names_[out], out); + } else { + // Reallocation happened in the vector, so we need to rebuild |index_| + index_.clear(); + for (size_t i = 0; i < names_.size(); ++i) { + index_.emplace(names_[i], i); + } + } + return out; +} } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h index d99e6ac303..08fc466bed 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h @@ -19,20 +19,27 @@ #include #include +#include "api/array_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" -#include "rtc_base/numerics/samples_stats_counter.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" +#include "test/pc/e2e/analyzer/video/multi_head_queue.h" #include "test/testsupport/perf_test.h" namespace webrtc { namespace webrtc_pc_e2e { +// WebRTC will request a key frame after 3 seconds if no frames were received. +// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without +// key frame request. +constexpr size_t kDefaultMaxFramesInFlightPerStream = 270; + class RateCounter { public: void AddEvent(Timestamp event_time); @@ -96,7 +103,9 @@ struct StreamStats { // Mean time between one freeze end and next freeze start. SamplesStatsCounter time_between_freezes_ms; SamplesStatsCounter resolution_of_rendered_frame; + SamplesStatsCounter target_encode_bitrate; + int64_t total_encoded_images_payload = 0; int64_t dropped_by_encoder = 0; int64_t dropped_before_encoder = 0; }; @@ -105,71 +114,133 @@ struct AnalyzerStats { // Size of analyzer internal comparisons queue, measured when new element // id added to the queue. SamplesStatsCounter comparisons_queue_size; - // Amount of performed comparisons of 2 video frames from captured and + // Number of performed comparisons of 2 video frames from captured and // rendered streams. int64_t comparisons_done = 0; - // Amount of overloaded comparisons. Comparison is overloaded if it is queued - // when there are too many not processed comparisons in the queue. Overloaded - // comparison doesn't include metrics, that require heavy computations like - // SSIM and PSNR. - int64_t overloaded_comparisons_done = 0; + // Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is + // queued when there are too many not processed comparisons in the queue. + // Overloaded comparison doesn't include metrics like SSIM and PSNR that + // require heavy computations. + int64_t cpu_overloaded_comparisons_done = 0; + // Number of memory overloaded comparisons. Comparison is memory overloaded if + // it is queued when its captured frame was already removed due to high memory + // usage for that video stream. + int64_t memory_overloaded_comparisons_done = 0; + // Count of frames in flight in analyzer measured when new comparison is added + // and after analyzer was stopped. + SamplesStatsCounter frames_in_flight_left_count; }; -struct VideoBweStats { - SamplesStatsCounter available_send_bandwidth; - SamplesStatsCounter transmission_bitrate; - SamplesStatsCounter retransmission_bitrate; - SamplesStatsCounter actual_encode_bitrate; - SamplesStatsCounter target_encode_bitrate; +struct StatsKey { + StatsKey(std::string stream_label, std::string sender, std::string receiver) + : stream_label(std::move(stream_label)), + sender(std::move(sender)), + receiver(std::move(receiver)) {} + + std::string ToString() const; + + // Label of video stream to which stats belongs to. + std::string stream_label; + // Name of the peer which send this stream. + std::string sender; + // Name of the peer on which stream was received. + std::string receiver; +}; + +// Required to use StatsKey as std::map key. +bool operator<(const StatsKey& a, const StatsKey& b); +bool operator==(const StatsKey& a, const StatsKey& b); + +struct InternalStatsKey { + InternalStatsKey(size_t stream, size_t sender, size_t receiver) + : stream(stream), sender(sender), receiver(receiver) {} + + std::string ToString() const; + + size_t stream; + size_t sender; + size_t receiver; +}; + +// Required to use InternalStatsKey as std::map key. +bool operator<(const InternalStatsKey& a, const InternalStatsKey& b); +bool operator==(const InternalStatsKey& a, const InternalStatsKey& b); + +struct DefaultVideoQualityAnalyzerOptions { + // Tells DefaultVideoQualityAnalyzer if heavy metrics like PSNR and SSIM have + // to be computed or not. + bool heavy_metrics_computation_enabled = true; + // If true DefaultVideoQualityAnalyzer will try to adjust frames before + // computing PSNR and SSIM for them. In some cases picture may be shifted by + // a few pixels after the encode/decode step. Those difference is invisible + // for a human eye, but it affects the metrics. So the adjustment is used to + // get metrics that are closer to how human persepts the video. This feature + // significantly slows down the comparison, so turn it on only when it is + // needed. + bool adjust_cropping_before_comparing_frames = false; + // Amount of frames that are queued in the DefaultVideoQualityAnalyzer from + // the point they were captured to the point they were rendered on all + // receivers per stream. + size_t max_frames_in_flight_per_stream_count = + kDefaultMaxFramesInFlightPerStream; }; class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { public: explicit DefaultVideoQualityAnalyzer( - bool heavy_metrics_computation_enabled = true); + webrtc::Clock* clock, + DefaultVideoQualityAnalyzerOptions options = + DefaultVideoQualityAnalyzerOptions()); ~DefaultVideoQualityAnalyzer() override; - void Start(std::string test_case_name, int max_threads_count) override; - uint16_t OnFrameCaptured(const std::string& stream_label, + void Start(std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) override; + uint16_t OnFrameCaptured(absl::string_view peer_name, + const std::string& stream_label, const VideoFrame& frame) override; - void OnFramePreEncode(const VideoFrame& frame) override; - void OnFrameEncoded(uint16_t frame_id, - const EncodedImage& encoded_image) override; - void OnFrameDropped(EncodedImageCallback::DropReason reason) override; - void OnFramePreDecode(uint16_t frame_id, + void OnFramePreEncode(absl::string_view peer_name, + const VideoFrame& frame) override; + void OnFrameEncoded(absl::string_view peer_name, + uint16_t frame_id, + const EncodedImage& encoded_image, + const EncoderStats& stats) override; + void OnFrameDropped(absl::string_view peer_name, + EncodedImageCallback::DropReason reason) override; + void OnFramePreDecode(absl::string_view peer_name, + uint16_t frame_id, const EncodedImage& input_image) override; - void OnFrameDecoded(const VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) override; - void OnFrameRendered(const VideoFrame& frame) override; - void OnEncoderError(const VideoFrame& frame, int32_t error_code) override; - void OnDecoderError(uint16_t frame_id, int32_t error_code) override; + void OnFrameDecoded(absl::string_view peer_name, + const VideoFrame& frame, + const DecoderStats& stats) override; + void OnFrameRendered(absl::string_view peer_name, + const VideoFrame& frame) override; + void OnEncoderError(absl::string_view peer_name, + const VideoFrame& frame, + int32_t error_code) override; + void OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, + int32_t error_code) override; void Stop() override; std::string GetStreamLabel(uint16_t frame_id) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override {} // Returns set of stream labels, that were met during test call. - std::set GetKnownVideoStreams() const; - const FrameCounters& GetGlobalCounters(); + std::set GetKnownVideoStreams() const; + const FrameCounters& GetGlobalCounters() const; // Returns frame counter per stream label. Valid stream labels can be obtained // by calling GetKnownVideoStreams() - const std::map& GetPerStreamCounters() const; + std::map GetPerStreamCounters() const; // Returns video quality stats per stream label. Valid stream labels can be // obtained by calling GetKnownVideoStreams() - std::map GetStats() const; + std::map GetStats() const; AnalyzerStats GetAnalyzerStats() const; - // Will be called everytime new stats reports are available for the - // Peer Connection identified by |pc_label|. - void OnStatsReports(const std::string& pc_label, - const StatsReports& stats_reports) override; - - std::map GetVideoBweStats() const; - private: struct FrameStats { - FrameStats(std::string stream_label, Timestamp captured_time); - - std::string stream_label; + FrameStats(Timestamp captured_time) : captured_time(captured_time) {} // Frame events timestamp. Timestamp captured_time; @@ -182,10 +253,23 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { Timestamp rendered_time = Timestamp::MinusInfinity(); Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity(); + int64_t encoded_image_size = 0; + uint32_t target_encode_bitrate = 0; + absl::optional rendered_frame_width = absl::nullopt; absl::optional rendered_frame_height = absl::nullopt; }; + // Describes why comparison was done in overloaded mode (without calculating + // PSNR and SSIM). + enum class OverloadReason { + kNone, + // Not enough CPU to process all incoming comparisons. + kCpu, + // Not enough memory to store captured frames for all comparisons. + kMemory + }; + // Represents comparison between two VideoFrames. Contains video frames itself // and stats. Can be one of two types: // 1. Normal - in this case |captured| is presented and either |rendered| is @@ -195,12 +279,14 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // because there were too many comparisons in the queue. |dropped| can be // true or false showing was frame dropped or not. struct FrameComparison { - FrameComparison(absl::optional captured, + FrameComparison(InternalStatsKey stats_key, + absl::optional captured, absl::optional rendered, bool dropped, - FrameStats frame_stats); - FrameComparison(bool dropped, FrameStats frameStats); + FrameStats frame_stats, + OverloadReason overload_reason); + InternalStatsKey stats_key; // Frames can be omitted if there too many computations waiting in the // queue. absl::optional captured; @@ -210,41 +296,190 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // will be |absl::nullopt|. bool dropped; FrameStats frame_stats; + OverloadReason overload_reason; }; // Represents a current state of video stream. - struct StreamState { + class StreamState { + public: + StreamState(size_t owner, size_t peers_count) + : owner_(owner), frame_ids_(peers_count) {} + + size_t owner() const { return owner_; } + + void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); } + // Crash if state is empty. + uint16_t PopFront(size_t peer); + bool IsEmpty(size_t peer) const { return frame_ids_.IsEmpty(peer); } + // Crash if state is empty. + uint16_t Front(size_t peer) const { return frame_ids_.Front(peer).value(); } + + size_t GetAliveFramesCount() { return frame_ids_.size(owner_); } + uint16_t MarkNextAliveFrameAsDead(); + + void SetLastRenderedFrameTime(size_t peer, Timestamp time); + absl::optional last_rendered_frame_time(size_t peer) const; + + private: + // Index of the owner. Owner's queue in |frame_ids_| will keep alive frames. + const size_t owner_; // To correctly determine dropped frames we have to know sequence of frames // in each stream so we will keep a list of frame ids inside the stream. - // When the frame is rendered, we will pop ids from the list for until id - // will match with rendered one. All ids before matched one can be - // considered as dropped: + // This list is represented by multi head queue of frame ids with separate + // head for each receiver. When the frame is rendered, we will pop ids from + // the corresponding head until id will match with rendered one. All ids + // before matched one can be considered as dropped: // // | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 | // // If we received frame with id frame_id3, then we will pop frame_id1 and // frame_id2 and consider that frames as dropped and then compare received // frame with the one from |captured_frames_in_flight_| with id frame_id3. - std::deque frame_ids; - absl::optional last_rendered_frame_time = absl::nullopt; + // + // To track alive frames (frames that contains frame's payload in + // |captured_frames_in_flight_|) the head which corresponds to |owner_| will + // be used. So that head will point to the first alive frame in frames list. + MultiHeadQueue frame_ids_; + std::map last_rendered_frame_time_; }; enum State { kNew, kActive, kStopped }; - void AddComparison(absl::optional captured, + struct ReceiverFrameStats { + // Time when last packet of a frame was received. + Timestamp received_time = Timestamp::MinusInfinity(); + Timestamp decode_start_time = Timestamp::MinusInfinity(); + Timestamp decode_end_time = Timestamp::MinusInfinity(); + Timestamp rendered_time = Timestamp::MinusInfinity(); + Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity(); + + absl::optional rendered_frame_width = absl::nullopt; + absl::optional rendered_frame_height = absl::nullopt; + + bool dropped = false; + }; + + class FrameInFlight { + public: + FrameInFlight(size_t stream, + VideoFrame frame, + Timestamp captured_time, + size_t owner, + size_t peers_count) + : stream_(stream), + owner_(owner), + peers_count_(peers_count), + frame_(std::move(frame)), + captured_time_(captured_time) {} + + size_t stream() const { return stream_; } + const absl::optional& frame() const { return frame_; } + // Returns was frame removed or not. + bool RemoveFrame(); + void SetFrameId(uint16_t id); + + std::vector GetPeersWhichDidntReceive() const; + bool HaveAllPeersReceived() const; + + void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; } + + void OnFrameEncoded(webrtc::Timestamp time, + int64_t encoded_image_size, + uint32_t target_encode_bitrate); + + bool HasEncodedTime() const { return encoded_time_.IsFinite(); } + + void OnFramePreDecode(size_t peer, + webrtc::Timestamp received_time, + webrtc::Timestamp decode_start_time); + + bool HasReceivedTime(size_t peer) const; + + void SetDecodeEndTime(size_t peer, webrtc::Timestamp time) { + receiver_stats_[peer].decode_end_time = time; + } + + bool HasDecodeEndTime(size_t peer) const; + + void OnFrameRendered(size_t peer, + webrtc::Timestamp time, + int width, + int height); + + bool HasRenderedTime(size_t peer) const; + + // Crash if rendered time is not set for specified |peer|. + webrtc::Timestamp rendered_time(size_t peer) const { + return receiver_stats_.at(peer).rendered_time; + } + + void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; } + + void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) { + receiver_stats_[peer].prev_frame_rendered_time = time; + } + + FrameStats GetStatsForPeer(size_t peer) const; + + private: + const size_t stream_; + const size_t owner_; + const size_t peers_count_; + absl::optional frame_; + + // Frame events timestamp. + Timestamp captured_time_; + Timestamp pre_encode_time_ = Timestamp::MinusInfinity(); + Timestamp encoded_time_ = Timestamp::MinusInfinity(); + int64_t encoded_image_size_ = 0; + uint32_t target_encode_bitrate_ = 0; + std::map receiver_stats_; + }; + + class NamesCollection { + public: + NamesCollection() = default; + explicit NamesCollection(rtc::ArrayView names) { + names_ = std::vector(names.begin(), names.end()); + for (size_t i = 0; i < names_.size(); ++i) { + index_.emplace(names_[i], i); + } + } + + size_t size() const { return names_.size(); } + + size_t index(absl::string_view name) const { return index_.at(name); } + + const std::string& name(size_t index) const { return names_[index]; } + + bool HasName(absl::string_view name) const { + return index_.find(name) != index_.end(); + } + + // Add specified |name| to the collection if it isn't presented. + // Returns index which corresponds to specified |name|. + size_t AddIfAbsent(absl::string_view name); + + private: + std::vector names_; + std::map index_; + }; + + void AddComparison(InternalStatsKey stats_key, + absl::optional captured, absl::optional rendered, bool dropped, - FrameStats frame_stats); + FrameStats frame_stats) + RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_); static void ProcessComparisonsThread(void* obj); void ProcessComparisons(); void ProcessComparison(const FrameComparison& comparison); // Report results for all metrics for all streams. void ReportResults(); - static void ReportVideoBweResults(const std::string& test_case_name, - const VideoBweStats& video_bwe_stats); - static void ReportResults(const std::string& test_case_name, - const StreamStats& stats, - const FrameCounters& frame_counters); + void ReportResults(const std::string& test_case_name, + const StreamStats& stats, + const FrameCounters& frame_counters) + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); // Report result for single metric for specified stream. static void ReportResult(const std::string& metric_name, const std::string& test_case_name, @@ -255,51 +490,72 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // Returns name of current test case for reporting. std::string GetTestCaseName(const std::string& stream_label) const; Timestamp Now(); - - const bool heavy_metrics_computation_enabled_; + StatsKey ToStatsKey(const InternalStatsKey& key) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + // Returns string representation of stats key for metrics naming. Used for + // backward compatibility by metrics naming for 2 peers cases. + std::string StatsKeyToMetricName(const StatsKey& key); + + void StartMeasuringCpuProcessTime(); + void StopMeasuringCpuProcessTime(); + void StartExcludingCpuThreadTime(); + void StopExcludingCpuThreadTime(); + double GetCpuUsagePercent(); + + // TODO(titovartem) restore const when old constructor will be removed. + DefaultVideoQualityAnalyzerOptions options_; webrtc::Clock* const clock_; std::atomic next_frame_id_{0}; std::string test_label_; + std::unique_ptr peers_; - rtc::CriticalSection lock_; + mutable Mutex lock_; State state_ RTC_GUARDED_BY(lock_) = State::kNew; Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity(); + // Mapping from stream label to unique size_t value to use in stats and avoid + // extra string copying. + NamesCollection streams_ RTC_GUARDED_BY(lock_); // Frames that were captured by all streams and still aren't rendered by any - // stream or deemed dropped. - std::map captured_frames_in_flight_ + // stream or deemed dropped. Frame with id X can be removed from this map if: + // 1. The frame with id X was received in OnFrameRendered + // 2. The frame with id Y > X was received in OnFrameRendered + // 3. Next available frame id for newly captured frame is X + // 4. There too many frames in flight for current video stream and X is the + // oldest frame id in this stream. + std::map captured_frames_in_flight_ RTC_GUARDED_BY(lock_); // Global frames count for all video streams. FrameCounters frame_counters_ RTC_GUARDED_BY(lock_); - // Frame counters per each stream. - std::map stream_frame_counters_ + // Frame counters per each stream per each receiver. + std::map stream_frame_counters_ RTC_GUARDED_BY(lock_); - std::map frame_stats_ RTC_GUARDED_BY(lock_); - std::map stream_states_ RTC_GUARDED_BY(lock_); - - // Stores history mapping between stream labels and frame ids. Updated when - // frame id overlap. It required to properly return stream label after 1st - // frame from simulcast streams was already rendered and last is still - // encoding. - std::map> stream_to_frame_id_history_ + // Map from stream index in |streams_| to its StreamState. + std::map stream_states_ RTC_GUARDED_BY(lock_); + // Map from stream index in |streams_| to sender peer index in |peers_|. + std::map stream_to_sender_ RTC_GUARDED_BY(lock_); + + // Stores history mapping between stream index in |streams_| and frame ids. + // Updated when frame id overlap. It required to properly return stream label + // after 1st frame from simulcast streams was already rendered and last is + // still encoding. + std::map> stream_to_frame_id_history_ RTC_GUARDED_BY(lock_); - rtc::CriticalSection comparison_lock_; - std::map stream_stats_ + mutable Mutex comparison_lock_; + std::map stream_stats_ RTC_GUARDED_BY(comparison_lock_); - std::map stream_last_freeze_end_time_ + std::map stream_last_freeze_end_time_ RTC_GUARDED_BY(comparison_lock_); std::deque comparisons_ RTC_GUARDED_BY(comparison_lock_); AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(comparison_lock_); - rtc::CriticalSection video_bwe_stats_lock_; - // Map between a peer connection label (provided by the framework) and - // its video BWE stats. - std::map video_bwe_stats_ - RTC_GUARDED_BY(video_bwe_stats_lock_); - std::vector> thread_pool_; rtc::Event comparison_available_event_; + + Mutex cpu_measurement_lock_; + int64_t cpu_time_ RTC_GUARDED_BY(cpu_measurement_lock_) = 0; + int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_) = 0; }; } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc new file mode 100644 index 0000000000..20155bb099 --- /dev/null +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc @@ -0,0 +1,692 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include + +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" +#include "api/test/create_frame_generator.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_tools/frame_analyzer/video_geometry_aligner.h" +#include "system_wrappers/include/sleep.h" +#include "test/gtest.h" +#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample; + +constexpr int kAnalyzerMaxThreadsCount = 1; +constexpr int kMaxFramesInFlightPerStream = 10; +constexpr int kFrameWidth = 320; +constexpr int kFrameHeight = 240; +constexpr double kMaxSsim = 1; +constexpr char kStreamLabel[] = "video-stream"; +constexpr char kSenderPeerName[] = "alice"; +constexpr char kReceiverPeerName[] = "bob"; + +DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() { + DefaultVideoQualityAnalyzerOptions options; + options.heavy_metrics_computation_enabled = false; + options.adjust_cropping_before_comparing_frames = false; + options.max_frames_in_flight_per_stream_count = kMaxFramesInFlightPerStream; + return options; +} + +VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator, + int64_t timestamp_us) { + test::FrameGeneratorInterface::VideoFrameData frame_data = + frame_generator->NextFrame(); + return VideoFrame::Builder() + .set_video_frame_buffer(frame_data.buffer) + .set_update_rect(frame_data.update_rect) + .set_timestamp_us(timestamp_us) + .build(); +} + +EncodedImage FakeEncode(const VideoFrame& frame) { + EncodedImage image; + std::vector packet_infos; + packet_infos.push_back( + RtpPacketInfo(/*ssrc=*/1, + /*csrcs=*/{}, + /*rtp_timestamp=*/frame.timestamp(), + /*audio_level=*/absl::nullopt, + /*absolute_capture_time=*/absl::nullopt, + /*receive_time_ms=*/frame.timestamp_us() + 10)); + image.SetPacketInfos(RtpPacketInfos(packet_infos)); + return image; +} + +VideoFrame DeepCopy(const VideoFrame& frame) { + VideoFrame copy = frame; + copy.set_video_frame_buffer( + I420Buffer::Copy(*frame.video_frame_buffer()->ToI420())); + return copy; +} + +std::vector GetSortedSamples(const SamplesStatsCounter& counter) { + rtc::ArrayView view = counter.GetTimedSamples(); + std::vector out(view.begin(), view.end()); + std::sort(out.begin(), out.end(), + [](const StatsSample& a, const StatsSample& b) { + return a.time < b.time; + }); + return out; +} + +std::string ToString(const std::vector& values) { + rtc::StringBuilder out; + for (const auto& v : values) { + out << "{ time_ms=" << v.time.ms() << "; value=" << v.value << "}, "; + } + return out.str(); +} + +TEST(DefaultVideoQualityAnalyzerTest, + MemoryOverloadedAndThenAllFramesReceived) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + std::map captured_frames; + std::vector frames_order; + for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + frames_order.push_back(frame.id()); + captured_frames.insert({frame.id(), frame}); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + } + + for (const uint16_t& frame_id : frames_order) { + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, + kMaxFramesInFlightPerStream); + EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 2); + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 2); + EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 2); + EXPECT_EQ(frame_counters.dropped, 0); +} + +TEST(DefaultVideoQualityAnalyzerTest, + FillMaxMemoryReceiveAllMemoryOverloadedAndThenAllFramesReceived) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + std::map captured_frames; + std::vector frames_order; + // Feel analyzer's memory up to limit + for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + frames_order.push_back(frame.id()); + captured_frames.insert({frame.id(), frame}); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + } + + // Receive all frames. + for (const uint16_t& frame_id : frames_order) { + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + frames_order.clear(); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + + // Overload analyzer's memory up to limit + for (int i = 0; i < 2 * kMaxFramesInFlightPerStream; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + frames_order.push_back(frame.id()); + captured_frames.insert({frame.id(), frame}); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + } + + // Receive all frames. + for (const uint16_t& frame_id : frames_order) { + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, + kMaxFramesInFlightPerStream); + EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 3); + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 3); + EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 3); + EXPECT_EQ(frame_counters.dropped, 0); +} + +TEST(DefaultVideoQualityAnalyzerTest, + MemoryOverloadedHalfDroppedAndThenHalfFramesReceived) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + std::map captured_frames; + std::vector frames_order; + for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + frames_order.push_back(frame.id()); + captured_frames.insert({frame.id(), frame}); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + } + + for (size_t i = kMaxFramesInFlightPerStream; i < frames_order.size(); ++i) { + uint16_t frame_id = frames_order.at(i); + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 2); + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 2); + EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream); + EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream); +} + +TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + std::map captured_frames; + std::vector frames_order; + for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + frames_order.push_back(frame.id()); + captured_frames.insert({frame.id(), frame}); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + } + + for (size_t i = 1; i < frames_order.size(); i += 2) { + uint16_t frame_id = frames_order.at(i); + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream); + + std::vector frames_in_flight_sizes = + GetSortedSamples(stats.frames_in_flight_left_count); + EXPECT_EQ(frames_in_flight_sizes.back().value, 0) + << ToString(frames_in_flight_sizes); + + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream); + EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2); + EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream / 2); + EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream / 2); + EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2); +} + +TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + VideoFrame captured_frame = NextFrame(frame_generator.get(), 0); + captured_frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame)); + analyzer.OnFramePreEncode(kSenderPeerName, captured_frame); + analyzer.OnFrameEncoded(kSenderPeerName, captured_frame.id(), + FakeEncode(captured_frame), + VideoQualityAnalyzerInterface::EncoderStats()); + + VideoFrame received_frame = DeepCopy(captured_frame); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + + received_frame = DeepCopy(captured_frame); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(stats.comparisons_done, 1); + + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, 1); + EXPECT_EQ(frame_counters.received, 1); + EXPECT_EQ(frame_counters.decoded, 1); + EXPECT_EQ(frame_counters.rendered, 1); + EXPECT_EQ(frame_counters.dropped, 0); +} + +TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + constexpr char kAlice[] = "alice"; + constexpr char kBob[] = "bob"; + constexpr char kCharlie[] = "charlie"; + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", std::vector{kAlice, kBob, kCharlie}, + kAnalyzerMaxThreadsCount); + + std::map captured_frames; + std::vector frames_order; + for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame)); + frames_order.push_back(frame.id()); + captured_frames.insert({frame.id(), frame}); + analyzer.OnFramePreEncode(kAlice, frame); + SleepMs(20); + analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + } + + SleepMs(50); + + for (size_t i = 1; i < frames_order.size(); i += 2) { + uint16_t frame_id = frames_order.at(i); + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kBob, received_frame.id(), + FakeEncode(received_frame)); + SleepMs(30); + analyzer.OnFrameDecoded(kBob, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + SleepMs(10); + analyzer.OnFrameRendered(kBob, received_frame); + } + + for (size_t i = 1; i < frames_order.size(); i += 2) { + uint16_t frame_id = frames_order.at(i); + VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id)); + analyzer.OnFramePreDecode(kCharlie, received_frame.id(), + FakeEncode(received_frame)); + SleepMs(40); + analyzer.OnFrameDecoded(kCharlie, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + SleepMs(5); + analyzer.OnFrameRendered(kCharlie, received_frame); + } + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(analyzer_stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(analyzer_stats.comparisons_done, kMaxFramesInFlightPerStream * 2); + + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream); + EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream); + EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream); + EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream); + EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream); + EXPECT_EQ(analyzer.GetKnownVideoStreams().size(), 2lu); + for (auto stream_key : analyzer.GetKnownVideoStreams()) { + FrameCounters stream_conters = + analyzer.GetPerStreamCounters().at(stream_key); + // On some devices the pipeline can be too slow, so we actually can't + // force real constraints here. Lets just check, that at least 1 + // frame passed whole pipeline. + EXPECT_GE(stream_conters.captured, 10); + EXPECT_GE(stream_conters.pre_encoded, 10); + EXPECT_GE(stream_conters.encoded, 10); + EXPECT_GE(stream_conters.received, 5); + EXPECT_GE(stream_conters.decoded, 5); + EXPECT_GE(stream_conters.rendered, 5); + EXPECT_GE(stream_conters.dropped, 5); + } + + std::map stats = analyzer.GetStats(); + const StatsKey kAliceBobStats(kStreamLabel, kAlice, kBob); + const StatsKey kAliceCharlieStats(kStreamLabel, kAlice, kCharlie); + EXPECT_EQ(stats.size(), 2lu); + { + auto it = stats.find(kAliceBobStats); + EXPECT_FALSE(it == stats.end()); + ASSERT_FALSE(it->second.encode_time_ms.IsEmpty()); + EXPECT_GE(it->second.encode_time_ms.GetMin(), 20); + ASSERT_FALSE(it->second.decode_time_ms.IsEmpty()); + EXPECT_GE(it->second.decode_time_ms.GetMin(), 30); + ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty()); + EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(), + kFrameWidth * kFrameHeight - 1); + EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(), + kFrameWidth * kFrameHeight + 1); + } + { + auto it = stats.find(kAliceCharlieStats); + EXPECT_FALSE(it == stats.end()); + ASSERT_FALSE(it->second.encode_time_ms.IsEmpty()); + EXPECT_GE(it->second.encode_time_ms.GetMin(), 20); + ASSERT_FALSE(it->second.decode_time_ms.IsEmpty()); + EXPECT_GE(it->second.decode_time_ms.GetMin(), 30); + ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty()); + EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(), + kFrameWidth * kFrameHeight - 1); + EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(), + kFrameWidth * kFrameHeight + 1); + } +} + +TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + constexpr char kAlice[] = "alice"; + constexpr char kBob[] = "bob"; + constexpr char kCharlie[] = "charlie"; + + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + AnalyzerOptionsForTest()); + analyzer.Start("test_case", std::vector{kAlice, kBob, kCharlie}, + kAnalyzerMaxThreadsCount); + + VideoFrame captured_frame = NextFrame(frame_generator.get(), 0); + captured_frame.set_id( + analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame)); + analyzer.OnFramePreEncode(kAlice, captured_frame); + analyzer.OnFrameEncoded(kAlice, captured_frame.id(), + FakeEncode(captured_frame), + VideoQualityAnalyzerInterface::EncoderStats()); + + VideoFrame received_frame = DeepCopy(captured_frame); + analyzer.OnFramePreDecode(kBob, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kBob, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kBob, received_frame); + + received_frame = DeepCopy(captured_frame); + analyzer.OnFramePreDecode(kBob, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kBob, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kBob, received_frame); + + // Give analyzer some time to process frames on async thread. The computations + // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it + // means we have an issue! + SleepMs(100); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(stats.comparisons_done, 1); + + FrameCounters frame_counters = analyzer.GetGlobalCounters(); + EXPECT_EQ(frame_counters.captured, 1); + EXPECT_EQ(frame_counters.received, 1); + EXPECT_EQ(frame_counters.decoded, 1); + EXPECT_EQ(frame_counters.rendered, 1); + EXPECT_EQ(frame_counters.dropped, 0); +} + +TEST(DefaultVideoQualityAnalyzerTest, HeavyQualityMetricsFromEqualFrames) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzerOptions analyzer_options; + analyzer_options.heavy_metrics_computation_enabled = true; + analyzer_options.adjust_cropping_before_comparing_frames = false; + analyzer_options.max_frames_in_flight_per_stream_count = + kMaxFramesInFlightPerStream; + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + analyzer_options); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + + VideoFrame received_frame = DeepCopy(frame); + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + + // Give analyzer some time to process frames on async thread. Heavy metrics + // computation is turned on, so giving some extra time to be sure that + // computatio have ended. + SleepMs(500); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream); + + std::vector frames_in_flight_sizes = + GetSortedSamples(stats.frames_in_flight_left_count); + EXPECT_EQ(frames_in_flight_sizes.back().value, 0) + << ToString(frames_in_flight_sizes); + + std::map stream_stats = analyzer.GetStats(); + const StatsKey kAliceBobStats(kStreamLabel, kSenderPeerName, + kReceiverPeerName); + EXPECT_EQ(stream_stats.size(), 1lu); + + auto it = stream_stats.find(kAliceBobStats); + EXPECT_GE(it->second.psnr.GetMin(), kPerfectPSNR); + EXPECT_GE(it->second.ssim.GetMin(), kMaxSsim); +} + +TEST(DefaultVideoQualityAnalyzerTest, + HeavyQualityMetricsFromShiftedFramesWithAdjustment) { + std::unique_ptr frame_generator = + test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, + /*type=*/absl::nullopt, + /*num_squares=*/absl::nullopt); + + DefaultVideoQualityAnalyzerOptions analyzer_options; + analyzer_options.heavy_metrics_computation_enabled = true; + analyzer_options.adjust_cropping_before_comparing_frames = true; + analyzer_options.max_frames_in_flight_per_stream_count = + kMaxFramesInFlightPerStream; + DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), + analyzer_options); + analyzer.Start("test_case", + std::vector{kSenderPeerName, kReceiverPeerName}, + kAnalyzerMaxThreadsCount); + + for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) { + VideoFrame frame = NextFrame(frame_generator.get(), i); + frame.set_id( + analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame)); + analyzer.OnFramePreEncode(kSenderPeerName, frame); + analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame), + VideoQualityAnalyzerInterface::EncoderStats()); + + VideoFrame received_frame = frame; + // Shift frame by a few pixels. + test::CropRegion crop_region{0, 1, 3, 0}; + rtc::scoped_refptr cropped_buffer = + CropAndZoom(crop_region, received_frame.video_frame_buffer()->ToI420()); + received_frame.set_video_frame_buffer(cropped_buffer); + + analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(), + FakeEncode(received_frame)); + analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, + VideoQualityAnalyzerInterface::DecoderStats()); + analyzer.OnFrameRendered(kReceiverPeerName, received_frame); + } + + // Give analyzer some time to process frames on async thread. Heavy metrics + // computation is turned on, so giving some extra time to be sure that + // computatio have ended. + SleepMs(500); + analyzer.Stop(); + + AnalyzerStats stats = analyzer.GetAnalyzerStats(); + EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0); + EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream); + + std::vector frames_in_flight_sizes = + GetSortedSamples(stats.frames_in_flight_left_count); + EXPECT_EQ(frames_in_flight_sizes.back().value, 0) + << ToString(frames_in_flight_sizes); + + std::map stream_stats = analyzer.GetStats(); + const StatsKey kAliceBobStats(kStreamLabel, kSenderPeerName, + kReceiverPeerName); + EXPECT_EQ(stream_stats.size(), 1lu); + + auto it = stream_stats.find(kAliceBobStats); + EXPECT_GE(it->second.psnr.GetMin(), kPerfectPSNR); + EXPECT_GE(it->second.ssim.GetMin(), kMaxSsim); +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/encoded_image_data_injector.h b/test/pc/e2e/analyzer/video/encoded_image_data_injector.h index 8e218629fc..ddd6959b91 100644 --- a/test/pc/e2e/analyzer/video/encoded_image_data_injector.h +++ b/test/pc/e2e/analyzer/video/encoded_image_data_injector.h @@ -47,6 +47,11 @@ class EncodedImageDataExtractor { public: virtual ~EncodedImageDataExtractor() = default; + // Invoked by framework before any image will come to the extractor. + // |expected_receivers_count| is the expected amount of receivers for each + // encoded image. + virtual void Start(int expected_receivers_count) = 0; + // Returns encoded image id, extracted from payload and also encoded image // with its original payload. For concatenated spatial layers it should be the // same id. |coding_entity_id| is unique id of decoder or encoder. diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc index caa639a229..198a6cb42f 100644 --- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc @@ -10,6 +10,7 @@ #include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h" +#include "api/array_view.h" #include "rtc_base/logging.h" namespace webrtc { @@ -18,13 +19,16 @@ namespace webrtc_pc_e2e { ExampleVideoQualityAnalyzer::ExampleVideoQualityAnalyzer() = default; ExampleVideoQualityAnalyzer::~ExampleVideoQualityAnalyzer() = default; -void ExampleVideoQualityAnalyzer::Start(std::string test_case_name, - int max_threads_count) {} +void ExampleVideoQualityAnalyzer::Start( + std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) {} uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured( + absl::string_view peer_name, const std::string& stream_label, const webrtc::VideoFrame& frame) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); uint16_t frame_id = next_frame_id_++; auto it = frames_in_flight_.find(frame_id); if (it == frames_in_flight_.end()) { @@ -45,69 +49,77 @@ uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured( } void ExampleVideoQualityAnalyzer::OnFramePreEncode( + absl::string_view peer_name, const webrtc::VideoFrame& frame) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); ++frames_pre_encoded_; } void ExampleVideoQualityAnalyzer::OnFrameEncoded( + absl::string_view peer_name, uint16_t frame_id, - const webrtc::EncodedImage& encoded_image) { - rtc::CritScope crit(&lock_); + const webrtc::EncodedImage& encoded_image, + const EncoderStats& stats) { + MutexLock lock(&lock_); ++frames_encoded_; } void ExampleVideoQualityAnalyzer::OnFrameDropped( + absl::string_view peer_name, webrtc::EncodedImageCallback::DropReason reason) { RTC_LOG(INFO) << "Frame dropped by encoder"; - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); ++frames_dropped_; } void ExampleVideoQualityAnalyzer::OnFramePreDecode( + absl::string_view peer_name, uint16_t frame_id, const webrtc::EncodedImage& encoded_image) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); ++frames_received_; } void ExampleVideoQualityAnalyzer::OnFrameDecoded( + absl::string_view peer_name, const webrtc::VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) { - rtc::CritScope crit(&lock_); + const DecoderStats& stats) { + MutexLock lock(&lock_); ++frames_decoded_; } void ExampleVideoQualityAnalyzer::OnFrameRendered( + absl::string_view peer_name, const webrtc::VideoFrame& frame) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); frames_in_flight_.erase(frame.id()); ++frames_rendered_; } void ExampleVideoQualityAnalyzer::OnEncoderError( + absl::string_view peer_name, const webrtc::VideoFrame& frame, int32_t error_code) { RTC_LOG(LS_ERROR) << "Failed to encode frame " << frame.id() << ". Code: " << error_code; } -void ExampleVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id, +void ExampleVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, int32_t error_code) { RTC_LOG(LS_ERROR) << "Failed to decode frame " << frame_id << ". Code: " << error_code; } void ExampleVideoQualityAnalyzer::Stop() { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); RTC_LOG(INFO) << "There are " << frames_in_flight_.size() << " frames in flight, assuming all of them are dropped"; frames_dropped_ += frames_in_flight_.size(); } std::string ExampleVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); auto it = frames_to_stream_label_.find(frame_id); RTC_DCHECK(it != frames_to_stream_label_.end()) << "Unknown frame_id=" << frame_id; @@ -115,37 +127,37 @@ std::string ExampleVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) { } uint64_t ExampleVideoQualityAnalyzer::frames_captured() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_captured_; } uint64_t ExampleVideoQualityAnalyzer::frames_pre_encoded() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_pre_encoded_; } uint64_t ExampleVideoQualityAnalyzer::frames_encoded() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_encoded_; } uint64_t ExampleVideoQualityAnalyzer::frames_received() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_received_; } uint64_t ExampleVideoQualityAnalyzer::frames_decoded() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_decoded_; } uint64_t ExampleVideoQualityAnalyzer::frames_rendered() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_rendered_; } uint64_t ExampleVideoQualityAnalyzer::frames_dropped() const { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); return frames_dropped_; } diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h index 8b29e1223b..9f004396ae 100644 --- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h @@ -16,10 +16,11 @@ #include #include +#include "api/array_view.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -33,21 +34,34 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { ExampleVideoQualityAnalyzer(); ~ExampleVideoQualityAnalyzer() override; - void Start(std::string test_case_name, int max_threads_count) override; - uint16_t OnFrameCaptured(const std::string& stream_label, + void Start(std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) override; + uint16_t OnFrameCaptured(absl::string_view peer_name, + const std::string& stream_label, const VideoFrame& frame) override; - void OnFramePreEncode(const VideoFrame& frame) override; - void OnFrameEncoded(uint16_t frame_id, - const EncodedImage& encoded_image) override; - void OnFrameDropped(EncodedImageCallback::DropReason reason) override; - void OnFramePreDecode(uint16_t frame_id, + void OnFramePreEncode(absl::string_view peer_name, + const VideoFrame& frame) override; + void OnFrameEncoded(absl::string_view peer_name, + uint16_t frame_id, + const EncodedImage& encoded_image, + const EncoderStats& stats) override; + void OnFrameDropped(absl::string_view peer_name, + EncodedImageCallback::DropReason reason) override; + void OnFramePreDecode(absl::string_view peer_name, + uint16_t frame_id, const EncodedImage& encoded_image) override; - void OnFrameDecoded(const VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) override; - void OnFrameRendered(const VideoFrame& frame) override; - void OnEncoderError(const VideoFrame& frame, int32_t error_code) override; - void OnDecoderError(uint16_t frame_id, int32_t error_code) override; + void OnFrameDecoded(absl::string_view peer_name, + const VideoFrame& frame, + const DecoderStats& stats) override; + void OnFrameRendered(absl::string_view peer_name, + const VideoFrame& frame) override; + void OnEncoderError(absl::string_view peer_name, + const VideoFrame& frame, + int32_t error_code) override; + void OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, + int32_t error_code) override; void Stop() override; std::string GetStreamLabel(uint16_t frame_id) override; @@ -65,7 +79,7 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // when it will be received in peer B, so we need to guard it with lock. // Also because analyzer will serve for all video streams it can be called // from different threads inside one peer. - rtc::CriticalSection lock_; + mutable Mutex lock_; // Stores frame ids, that are currently going from one peer to another. We // need to keep them to correctly determine dropped frames and also correctly // process frame id overlap. diff --git a/test/pc/e2e/analyzer/video/multi_head_queue.h b/test/pc/e2e/analyzer/video/multi_head_queue.h new file mode 100644 index 0000000000..52314a60d5 --- /dev/null +++ b/test/pc/e2e/analyzer/video/multi_head_queue.h @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_ +#define TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// A queue that allows more than one reader. Readers are independent, and all +// readers will see all elements; an inserted element stays in the queue until +// all readers have extracted it. Elements are copied and copying is assumed to +// be cheap. +template +class MultiHeadQueue { + public: + // Creates queue with exactly |readers_count| readers. + explicit MultiHeadQueue(size_t readers_count) { + for (size_t i = 0; i < readers_count; ++i) { + queues_.push_back(std::deque()); + } + } + + // Add value to the end of the queue. Complexity O(readers_count). + void PushBack(T value) { + for (auto& queue : queues_) { + queue.push_back(value); + } + } + + // Extract element from specified head. Complexity O(1). + absl::optional PopFront(size_t index) { + RTC_CHECK_LT(index, queues_.size()); + if (queues_[index].empty()) { + return absl::nullopt; + } + T out = queues_[index].front(); + queues_[index].pop_front(); + return out; + } + + // Returns element at specified head. Complexity O(1). + absl::optional Front(size_t index) const { + RTC_CHECK_LT(index, queues_.size()); + if (queues_[index].empty()) { + return absl::nullopt; + } + return queues_[index].front(); + } + + // Returns true if for specified head there are no more elements in the queue + // or false otherwise. Complexity O(1). + bool IsEmpty(size_t index) const { + RTC_CHECK_LT(index, queues_.size()); + return queues_[index].empty(); + } + + // Returns size of the longest queue between all readers. + // Complexity O(readers_count). + size_t size() const { + size_t size = 0; + for (auto& queue : queues_) { + if (queue.size() > size) { + size = queue.size(); + } + } + return size; + } + + // Returns size of the specified queue. Complexity O(1). + size_t size(size_t index) const { + RTC_CHECK_LT(index, queues_.size()); + return queues_[index].size(); + } + + size_t readers_count() const { return queues_.size(); } + + private: + std::vector> queues_; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_ diff --git a/test/pc/e2e/analyzer/video/multi_head_queue_test.cc b/test/pc/e2e/analyzer/video/multi_head_queue_test.cc new file mode 100644 index 0000000000..3a4ab6cdbb --- /dev/null +++ b/test/pc/e2e/analyzer/video/multi_head_queue_test.cc @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/analyzer/video/multi_head_queue.h" +#include "absl/types/optional.h" +#include "test/gtest.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +TEST(MultiHeadQueueTest, GetOnEmpty) { + MultiHeadQueue queue = MultiHeadQueue(10); + EXPECT_TRUE(queue.IsEmpty(0)); + for (int i = 0; i < 10; ++i) { + EXPECT_FALSE(queue.PopFront(i).has_value()); + EXPECT_FALSE(queue.Front(i).has_value()); + } +} + +TEST(MultiHeadQueueTest, SingleHeadOneAddOneRemove) { + MultiHeadQueue queue = MultiHeadQueue(1); + queue.PushBack(1); + EXPECT_EQ(queue.size(), 1lu); + EXPECT_TRUE(queue.Front(0).has_value()); + EXPECT_EQ(queue.Front(0).value(), 1); + absl::optional value = queue.PopFront(0); + EXPECT_TRUE(value.has_value()); + EXPECT_EQ(value.value(), 1); + EXPECT_EQ(queue.size(), 0lu); + EXPECT_TRUE(queue.IsEmpty(0)); +} + +TEST(MultiHeadQueueTest, SingleHead) { + MultiHeadQueue queue = MultiHeadQueue(1); + for (size_t i = 0; i < 10; ++i) { + queue.PushBack(i); + EXPECT_EQ(queue.size(), i + 1); + } + for (size_t i = 0; i < 10; ++i) { + absl::optional value = queue.PopFront(0); + EXPECT_EQ(queue.size(), 10 - i - 1); + ASSERT_TRUE(value.has_value()); + EXPECT_EQ(value.value(), i); + } +} + +TEST(MultiHeadQueueTest, ThreeHeadsAddAllRemoveAllPerHead) { + MultiHeadQueue queue = MultiHeadQueue(3); + for (size_t i = 0; i < 10; ++i) { + queue.PushBack(i); + EXPECT_EQ(queue.size(), i + 1); + } + for (size_t i = 0; i < 10; ++i) { + absl::optional value = queue.PopFront(0); + EXPECT_EQ(queue.size(), 10lu); + ASSERT_TRUE(value.has_value()); + EXPECT_EQ(value.value(), i); + } + for (size_t i = 0; i < 10; ++i) { + absl::optional value = queue.PopFront(1); + EXPECT_EQ(queue.size(), 10lu); + ASSERT_TRUE(value.has_value()); + EXPECT_EQ(value.value(), i); + } + for (size_t i = 0; i < 10; ++i) { + absl::optional value = queue.PopFront(2); + EXPECT_EQ(queue.size(), 10 - i - 1); + ASSERT_TRUE(value.has_value()); + EXPECT_EQ(value.value(), i); + } +} + +TEST(MultiHeadQueueTest, ThreeHeadsAddAllRemoveAll) { + MultiHeadQueue queue = MultiHeadQueue(3); + for (size_t i = 0; i < 10; ++i) { + queue.PushBack(i); + EXPECT_EQ(queue.size(), i + 1); + } + for (size_t i = 0; i < 10; ++i) { + absl::optional value1 = queue.PopFront(0); + absl::optional value2 = queue.PopFront(1); + absl::optional value3 = queue.PopFront(2); + EXPECT_EQ(queue.size(), 10 - i - 1); + ASSERT_TRUE(value1.has_value()); + ASSERT_TRUE(value2.has_value()); + ASSERT_TRUE(value3.has_value()); + EXPECT_EQ(value1.value(), i); + EXPECT_EQ(value2.value(), i); + EXPECT_EQ(value3.value(), i); + } +} + +} // namespace +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc index f490cc4094..196f900bd3 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/video/i420_buffer.h" #include "modules/video_coding/include/video_error_codes.h" @@ -26,10 +27,12 @@ namespace webrtc_pc_e2e { QualityAnalyzingVideoDecoder::QualityAnalyzingVideoDecoder( int id, + absl::string_view peer_name, std::unique_ptr delegate, EncodedImageDataExtractor* extractor, VideoQualityAnalyzerInterface* analyzer) : id_(id), + peer_name_(peer_name), implementation_name_("AnalyzingDecoder-" + std::string(delegate->ImplementationName())), delegate_(std::move(delegate)), @@ -76,7 +79,7 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image, EncodedImage* origin_image; { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); // Store id to be able to retrieve it in analyzing callback. timestamp_to_frame_id_.insert({input_image.Timestamp(), out.id}); // Store encoded image to prevent its destruction while it is used in @@ -87,17 +90,17 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image, // We can safely dereference |origin_image|, because it can be removed from // the map only after |delegate_| Decode method will be invoked. Image will be // removed inside DecodedImageCallback, which can be done on separate thread. - analyzer_->OnFramePreDecode(out.id, *origin_image); + analyzer_->OnFramePreDecode(peer_name_, out.id, *origin_image); int32_t result = delegate_->Decode(*origin_image, missing_frames, render_time_ms); if (result != WEBRTC_VIDEO_CODEC_OK) { // If delegate decoder failed, then cleanup data for this image. { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); timestamp_to_frame_id_.erase(input_image.Timestamp()); decoding_images_.erase(out.id); } - analyzer_->OnDecoderError(out.id, result); + analyzer_->OnDecoderError(peer_name_, out.id, result); } return result; } @@ -109,11 +112,15 @@ int32_t QualityAnalyzingVideoDecoder::RegisterDecodeCompleteCallback( } int32_t QualityAnalyzingVideoDecoder::Release() { - rtc::CritScope crit(&lock_); + // Release decoder first. During release process it can still decode some + // frames, so we don't take a lock to prevent deadlock. + int32_t result = delegate_->Release(); + + MutexLock lock(&lock_); analyzing_callback_->SetDelegateCallback(nullptr); timestamp_to_frame_id_.clear(); decoding_images_.clear(); - return delegate_->Release(); + return result; } bool QualityAnalyzingVideoDecoder::PrefersLateDecoding() const { @@ -131,7 +138,7 @@ QualityAnalyzingVideoDecoder::DecoderCallback::~DecoderCallback() = default; void QualityAnalyzingVideoDecoder::DecoderCallback::SetDelegateCallback( DecodedImageCallback* delegate) { - rtc::CritScope crit(&callback_lock_); + MutexLock lock(&callback_lock_); delegate_callback_ = delegate; } @@ -143,7 +150,7 @@ int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( decoder_->OnFrameDecoded(&decodedImage, /*decode_time_ms=*/absl::nullopt, /*qp=*/absl::nullopt); - rtc::CritScope crit(&callback_lock_); + MutexLock lock(&callback_lock_); RTC_DCHECK(delegate_callback_); return delegate_callback_->Decoded(decodedImage); } @@ -153,7 +160,7 @@ int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( int64_t decode_time_ms) { decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, /*qp=*/absl::nullopt); - rtc::CritScope crit(&callback_lock_); + MutexLock lock(&callback_lock_); RTC_DCHECK(delegate_callback_); return delegate_callback_->Decoded(decodedImage, decode_time_ms); } @@ -164,7 +171,7 @@ void QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( absl::optional qp) { decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, qp); - rtc::CritScope crit(&callback_lock_); + MutexLock lock(&callback_lock_); RTC_DCHECK(delegate_callback_); delegate_callback_->Decoded(decodedImage, decode_time_ms, qp); } @@ -179,7 +186,7 @@ QualityAnalyzingVideoDecoder::DecoderCallback::IrrelevantSimulcastStreamDecoded( .set_timestamp_rtp(timestamp_ms) .set_id(frame_id) .build(); - rtc::CritScope crit(&callback_lock_); + MutexLock lock(&callback_lock_); RTC_DCHECK(delegate_callback_); delegate_callback_->Decoded(dummy_frame, absl::nullopt, absl::nullopt); return WEBRTC_VIDEO_CODEC_OK; @@ -200,7 +207,7 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded( absl::optional qp) { uint16_t frame_id; { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); auto it = timestamp_to_frame_id_.find(frame->timestamp()); if (it == timestamp_to_frame_id_.end()) { // Ensure, that we have info about this frame. It can happen that for some @@ -218,15 +225,19 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded( // Set frame id to the value, that was extracted from corresponding encoded // image. frame->set_id(frame_id); - analyzer_->OnFrameDecoded(*frame, decode_time_ms, qp); + VideoQualityAnalyzerInterface::DecoderStats stats; + stats.decode_time_ms = decode_time_ms; + analyzer_->OnFrameDecoded(peer_name_, *frame, stats); } QualityAnalyzingVideoDecoderFactory::QualityAnalyzingVideoDecoderFactory( + absl::string_view peer_name, std::unique_ptr delegate, IdGenerator* id_generator, EncodedImageDataExtractor* extractor, VideoQualityAnalyzerInterface* analyzer) - : delegate_(std::move(delegate)), + : peer_name_(peer_name), + delegate_(std::move(delegate)), id_generator_(id_generator), extractor_(extractor), analyzer_(analyzer) {} @@ -243,7 +254,8 @@ QualityAnalyzingVideoDecoderFactory::CreateVideoDecoder( const SdpVideoFormat& format) { std::unique_ptr decoder = delegate_->CreateVideoDecoder(format); return std::make_unique( - id_generator_->GetNextId(), std::move(decoder), extractor_, analyzer_); + id_generator_->GetNextId(), peer_name_, std::move(decoder), extractor_, + analyzer_); } std::unique_ptr @@ -253,7 +265,8 @@ QualityAnalyzingVideoDecoderFactory::LegacyCreateVideoDecoder( std::unique_ptr decoder = delegate_->LegacyCreateVideoDecoder(format, receive_stream_id); return std::make_unique( - id_generator_->GetNextId(), std::move(decoder), extractor_, analyzer_); + id_generator_->GetNextId(), peer_name_, std::move(decoder), extractor_, + analyzer_); } } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h index 5cbc882226..2381f593b9 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h @@ -16,13 +16,14 @@ #include #include +#include "absl/strings/string_view.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h" #include "test/pc/e2e/analyzer/video/id_generator.h" @@ -45,14 +46,15 @@ namespace webrtc_pc_e2e { // callback, where video analyzer will be called again and then decoded frame // will be passed to origin callback, provided by user. // -// Quality decoder registers its own callback in origin decoder at the same -// time, when user registers his callback in quality decoder. +// Quality decoder registers its own callback in origin decoder, at the same +// time the user registers their callback in quality decoder. class QualityAnalyzingVideoDecoder : public VideoDecoder { public: // Creates analyzing decoder. |id| is unique coding entity id, that will // be used to distinguish all encoders and decoders inside // EncodedImageDataInjector and EncodedImageIdExtracor. QualityAnalyzingVideoDecoder(int id, + absl::string_view peer_name, std::unique_ptr delegate, EncodedImageDataExtractor* extractor, VideoQualityAnalyzerInterface* analyzer); @@ -95,7 +97,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder { rtc::scoped_refptr dummy_frame_buffer_; - rtc::CriticalSection callback_lock_; + Mutex callback_lock_; DecodedImageCallback* delegate_callback_ RTC_GUARDED_BY(callback_lock_); }; @@ -104,6 +106,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder { absl::optional qp); const int id_; + const std::string peer_name_; const std::string implementation_name_; std::unique_ptr delegate_; EncodedImageDataExtractor* const extractor_; @@ -113,7 +116,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder { // VideoDecoder interface assumes async delivery of decoded video frames. // This lock is used to protect shared state, that have to be propagated // from received EncodedImage to resulted VideoFrame. - rtc::CriticalSection lock_; + Mutex lock_; std::map timestamp_to_frame_id_ RTC_GUARDED_BY(lock_); // Stores currently being decoded images by frame id. Because @@ -129,6 +132,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder { class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory { public: QualityAnalyzingVideoDecoderFactory( + absl::string_view peer_name, std::unique_ptr delegate, IdGenerator* id_generator, EncodedImageDataExtractor* extractor, @@ -144,6 +148,7 @@ class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory { const std::string& receive_stream_id) override; private: + const std::string peer_name_; std::unique_ptr delegate_; IdGenerator* const id_generator_; EncodedImageDataExtractor* const extractor_; diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc index af95790028..04ec892e12 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc @@ -14,10 +14,10 @@ #include #include +#include "absl/strings/string_view.h" #include "api/video/video_codec_type.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_error_codes.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" namespace webrtc { @@ -54,12 +54,14 @@ std::pair GetMinMaxBitratesBps(const VideoCodec& codec, QualityAnalyzingVideoEncoder::QualityAnalyzingVideoEncoder( int id, + absl::string_view peer_name, std::unique_ptr delegate, double bitrate_multiplier, std::map> stream_required_spatial_index, EncodedImageDataInjector* injector, VideoQualityAnalyzerInterface* analyzer) : id_(id), + peer_name_(peer_name), delegate_(std::move(delegate)), bitrate_multiplier_(bitrate_multiplier), stream_required_spatial_index_(std::move(stream_required_spatial_index)), @@ -77,7 +79,7 @@ void QualityAnalyzingVideoEncoder::SetFecControllerOverride( int32_t QualityAnalyzingVideoEncoder::InitEncode( const VideoCodec* codec_settings, const Settings& settings) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); codec_settings_ = *codec_settings; mode_ = SimulcastMode::kNormal; if (codec_settings->codecType == kVideoCodecVP9) { @@ -108,34 +110,38 @@ int32_t QualityAnalyzingVideoEncoder::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) { // We need to get a lock here because delegate_callback can be hypothetically // accessed from different thread (encoder one) concurrently. - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); delegate_callback_ = callback; return delegate_->RegisterEncodeCompleteCallback(this); } int32_t QualityAnalyzingVideoEncoder::Release() { - rtc::CritScope crit(&lock_); + // Release encoder first. During release process it can still encode some + // frames, so we don't take a lock to prevent deadlock. + int32_t result = delegate_->Release(); + + MutexLock lock(&lock_); delegate_callback_ = nullptr; - return delegate_->Release(); + return result; } int32_t QualityAnalyzingVideoEncoder::Encode( const VideoFrame& frame, const std::vector* frame_types) { { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); // Store id to be able to retrieve it in analyzing callback. timestamp_to_frame_id_list_.push_back({frame.timestamp(), frame.id()}); // If this list is growing, it means that we are not receiving new encoded // images from encoder. So it should be a bug in setup on in the encoder. RTC_DCHECK_LT(timestamp_to_frame_id_list_.size(), kMaxFrameInPipelineCount); } - analyzer_->OnFramePreEncode(frame); + analyzer_->OnFramePreEncode(peer_name_, frame); int32_t result = delegate_->Encode(frame, frame_types); if (result != WEBRTC_VIDEO_CODEC_OK) { // If origin encoder failed, then cleanup data for this frame. { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); // The timestamp-frame_id pair can be not the last one, so we need to // find it first and then remove. We will search from the end, because // usually it will be the last or close to the last one. @@ -148,7 +154,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode( } } } - analyzer_->OnEncoderError(frame, result); + analyzer_->OnEncoderError(peer_name_, frame, result); } return result; } @@ -157,6 +163,10 @@ void QualityAnalyzingVideoEncoder::SetRates( const VideoEncoder::RateControlParameters& parameters) { RTC_DCHECK_GT(bitrate_multiplier_, 0.0); if (fabs(bitrate_multiplier_ - kNoMultiplier) < kEps) { + { + MutexLock lock(&lock_); + bitrate_allocation_ = parameters.bitrate; + } return delegate_->SetRates(parameters); } @@ -196,6 +206,10 @@ void QualityAnalyzingVideoEncoder::SetRates( RateControlParameters adjusted_params = parameters; adjusted_params.bitrate = multiplied_allocation; + { + MutexLock lock(&lock_); + bitrate_allocation_ = adjusted_params.bitrate; + } return delegate_->SetRates(adjusted_params); } @@ -218,12 +232,12 @@ VideoEncoder::EncoderInfo QualityAnalyzingVideoEncoder::GetEncoderInfo() const { // pair - remove the front pair and got to the step 1. EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codec_specific_info) { uint16_t frame_id; bool discard = false; + uint32_t target_encode_bitrate = 0; { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); std::pair timestamp_frame_id; while (!timestamp_to_frame_id_list_.empty()) { timestamp_frame_id = timestamp_to_frame_id_list_.front(); @@ -253,11 +267,18 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( frame_id = timestamp_frame_id.second; discard = ShouldDiscard(frame_id, encoded_image); + if (!discard) { + target_encode_bitrate = bitrate_allocation_.GetSpatialLayerSum( + encoded_image.SpatialIndex().value_or(0)); + } } if (!discard) { - // Analyzer should see only encoded images, that weren't discarded. - analyzer_->OnFrameEncoded(frame_id, encoded_image); + // Analyzer should see only encoded images, that weren't discarded. But all + // not discarded layers have to be passed. + VideoQualityAnalyzerInterface::EncoderStats stats; + stats.target_encode_bitrate = target_encode_bitrate; + analyzer_->OnFrameEncoded(peer_name_, frame_id, encoded_image, stats); } // Image data injector injects frame id and discard flag into provided @@ -268,17 +289,16 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( const EncodedImage& image = injector_->InjectData(frame_id, discard, encoded_image, id_); { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); RTC_DCHECK(delegate_callback_); - return delegate_callback_->OnEncodedImage(image, codec_specific_info, - fragmentation); + return delegate_callback_->OnEncodedImage(image, codec_specific_info); } } void QualityAnalyzingVideoEncoder::OnDroppedFrame( EncodedImageCallback::DropReason reason) { - rtc::CritScope crit(&lock_); - analyzer_->OnFrameDropped(reason); + MutexLock lock(&lock_); + analyzer_->OnFrameDropped(peer_name_, reason); RTC_DCHECK(delegate_callback_); delegate_callback_->OnDroppedFrame(reason); } @@ -290,6 +310,9 @@ bool QualityAnalyzingVideoEncoder::ShouldDiscard( absl::optional required_spatial_index = stream_required_spatial_index_[stream_label]; if (required_spatial_index) { + if (*required_spatial_index == kAnalyzeAnySpatialStream) { + return false; + } absl::optional cur_spatial_index = encoded_image.SpatialIndex(); if (!cur_spatial_index) { cur_spatial_index = 0; @@ -325,13 +348,15 @@ bool QualityAnalyzingVideoEncoder::ShouldDiscard( } QualityAnalyzingVideoEncoderFactory::QualityAnalyzingVideoEncoderFactory( + absl::string_view peer_name, std::unique_ptr delegate, double bitrate_multiplier, std::map> stream_required_spatial_index, IdGenerator* id_generator, EncodedImageDataInjector* injector, VideoQualityAnalyzerInterface* analyzer) - : delegate_(std::move(delegate)), + : peer_name_(peer_name), + delegate_(std::move(delegate)), bitrate_multiplier_(bitrate_multiplier), stream_required_spatial_index_(std::move(stream_required_spatial_index)), id_generator_(id_generator), @@ -355,9 +380,9 @@ std::unique_ptr QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder( const SdpVideoFormat& format) { return std::make_unique( - id_generator_->GetNextId(), delegate_->CreateVideoEncoder(format), - bitrate_multiplier_, stream_required_spatial_index_, injector_, - analyzer_); + id_generator_->GetNextId(), peer_name_, + delegate_->CreateVideoEncoder(format), bitrate_multiplier_, + stream_required_spatial_index_, injector_, analyzer_); } } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h index 247be73212..96d9d77e34 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h @@ -16,19 +16,25 @@ #include #include +#include "absl/strings/string_view.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/video_frame.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h" #include "test/pc/e2e/analyzer/video/id_generator.h" namespace webrtc { namespace webrtc_pc_e2e { +// Tells QualityAnalyzingVideoEncoder that it shouldn't mark any spatial stream +// as to be discarded. In such case the top stream will be passed to +// VideoQualityAnalyzerInterface as a reference. +constexpr int kAnalyzeAnySpatialStream = -1; + // QualityAnalyzingVideoEncoder is used to wrap origin video encoder and inject // VideoQualityAnalyzerInterface before and after encoder. // @@ -44,8 +50,8 @@ namespace webrtc_pc_e2e { // injected into EncodedImage with passed EncodedImageDataInjector. Then new // EncodedImage will be passed to origin callback, provided by user. // -// Quality encoder registers its own callback in origin encoder at the same -// time, when user registers his callback in quality encoder. +// Quality encoder registers its own callback in origin encoder, at the same +// time the user registers their callback in quality encoder. class QualityAnalyzingVideoEncoder : public VideoEncoder, public EncodedImageCallback { public: @@ -54,6 +60,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, // EncodedImageDataInjector and EncodedImageIdExtracor. QualityAnalyzingVideoEncoder( int id, + absl::string_view peer_name, std::unique_ptr delegate, double bitrate_multiplier, std::map> stream_required_spatial_index, @@ -77,8 +84,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, // Methods of EncodedImageCallback interface. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; + const CodecSpecificInfo* codec_specific_info) override; void OnDroppedFrame(DropReason reason) override; private: @@ -134,8 +140,15 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); const int id_; + const std::string peer_name_; std::unique_ptr delegate_; const double bitrate_multiplier_; + // Contains mapping from stream label to optional spatial index. + // If we have stream label "Foo" and mapping contains + // 1. |absl::nullopt| means "Foo" isn't simulcast/SVC stream + // 2. |kAnalyzeAnySpatialStream| means all simulcast/SVC streams are required + // 3. Concrete value means that particular simulcast/SVC stream have to be + // analyzed. std::map> stream_required_spatial_index_; EncodedImageDataInjector* const injector_; VideoQualityAnalyzerInterface* const analyzer_; @@ -143,13 +156,14 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, // VideoEncoder interface assumes async delivery of encoded images. // This lock is used to protect shared state, that have to be propagated // from received VideoFrame to resulted EncodedImage. - rtc::CriticalSection lock_; + Mutex lock_; VideoCodec codec_settings_; SimulcastMode mode_ RTC_GUARDED_BY(lock_); EncodedImageCallback* delegate_callback_ RTC_GUARDED_BY(lock_); std::list> timestamp_to_frame_id_list_ RTC_GUARDED_BY(lock_); + VideoBitrateAllocation bitrate_allocation_ RTC_GUARDED_BY(lock_); }; // Produces QualityAnalyzingVideoEncoder, which hold decoders, produced by @@ -158,6 +172,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory { public: QualityAnalyzingVideoEncoderFactory( + absl::string_view peer_name, std::unique_ptr delegate, double bitrate_multiplier, std::map> stream_required_spatial_index, @@ -174,6 +189,7 @@ class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory { const SdpVideoFormat& format) override; private: + const std::string peer_name_; std::unique_ptr delegate_; const double bitrate_multiplier_; std::map> stream_required_spatial_index_; diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc index ec0d26b780..304cb67d37 100644 --- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc +++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc @@ -19,13 +19,6 @@ namespace webrtc { namespace webrtc_pc_e2e { -namespace { - -// Number of bytes from the beginning of the EncodedImage buffer that will be -// used to store frame id and sub id. -constexpr size_t kUsedBufferSize = 3; - -} // namespace SingleProcessEncodedImageDataInjector::SingleProcessEncodedImageDataInjector() = default; @@ -37,58 +30,72 @@ EncodedImage SingleProcessEncodedImageDataInjector::InjectData( bool discard, const EncodedImage& source, int coding_entity_id) { - RTC_CHECK(source.size() >= kUsedBufferSize); + RTC_CHECK(source.size() >= ExtractionInfo::kUsedBufferSize); ExtractionInfo info; - info.length = source.size(); info.discard = discard; - size_t insertion_pos = source.size() - kUsedBufferSize; - memcpy(info.origin_data, &source.data()[insertion_pos], kUsedBufferSize); + size_t insertion_pos = source.size() - ExtractionInfo::kUsedBufferSize; + memcpy(info.origin_data, &source.data()[insertion_pos], + ExtractionInfo::kUsedBufferSize); { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); // Will create new one if missed. ExtractionInfoVector& ev = extraction_cache_[id]; info.sub_id = ev.next_sub_id++; ev.infos[info.sub_id] = info; } + auto buffer = EncodedImageBuffer::Create(source.data(), source.size()); + buffer->data()[insertion_pos] = id & 0x00ff; + buffer->data()[insertion_pos + 1] = (id & 0xff00) >> 8; + buffer->data()[insertion_pos + 2] = info.sub_id; + EncodedImage out = source; - out.data()[insertion_pos] = id & 0x00ff; - out.data()[insertion_pos + 1] = (id & 0xff00) >> 8; - out.data()[insertion_pos + 2] = info.sub_id; + out.SetEncodedData(buffer); return out; } EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData( const EncodedImage& source, int coding_entity_id) { + size_t size = source.size(); + auto buffer = EncodedImageBuffer::Create(source.data(), source.size()); EncodedImage out = source; + out.SetEncodedData(buffer); + + std::vector frame_sizes; + std::vector frame_sl_index; + size_t max_spatial_index = out.SpatialIndex().value_or(0); + for (size_t i = 0; i <= max_spatial_index; ++i) { + auto frame_size = source.SpatialLayerFrameSize(i); + if (frame_size.value_or(0)) { + frame_sl_index.push_back(i); + frame_sizes.push_back(frame_size.value()); + } + } + if (frame_sizes.empty()) { + frame_sizes.push_back(size); + } - // Both |source| and |out| image will share the same buffer for payload or - // out will have a copy for it, so we can operate on the |out| buffer only. - uint8_t* buffer = out.data(); - size_t size = out.size(); - - // |pos| is pointing to end of current encoded image. - size_t pos = size - 1; + size_t prev_frames_size = 0; absl::optional id = absl::nullopt; bool discard = true; std::vector extraction_infos; - // Go through whole buffer and find all related extraction infos in - // order from 1st encoded image to the last. - while (true) { - size_t insertion_pos = pos - kUsedBufferSize + 1; + for (size_t frame_size : frame_sizes) { + size_t insertion_pos = + prev_frames_size + frame_size - ExtractionInfo::kUsedBufferSize; // Extract frame id from first 2 bytes starting from insertion pos. - uint16_t next_id = buffer[insertion_pos] + (buffer[insertion_pos + 1] << 8); + uint16_t next_id = buffer->data()[insertion_pos] + + (buffer->data()[insertion_pos + 1] << 8); // Extract frame sub id from second 3 byte starting from insertion pos. - uint8_t sub_id = buffer[insertion_pos + 2]; + uint8_t sub_id = buffer->data()[insertion_pos + 2]; RTC_CHECK(!id || *id == next_id) << "Different frames encoded into single encoded image: " << *id << " vs " << next_id; id = next_id; ExtractionInfo info; { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); auto ext_vector_it = extraction_cache_.find(next_id); RTC_CHECK(ext_vector_it != extraction_cache_.end()) << "Unknown frame_id=" << next_id; @@ -96,44 +103,53 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData( auto info_it = ext_vector_it->second.infos.find(sub_id); RTC_CHECK(info_it != ext_vector_it->second.infos.end()) << "Unknown sub_id=" << sub_id << " for frame_id=" << next_id; + info_it->second.received_count++; info = info_it->second; - ext_vector_it->second.infos.erase(info_it); + if (info.received_count == expected_receivers_count_) { + ext_vector_it->second.infos.erase(info_it); + } } - extraction_infos.push_back(info); // We need to discard encoded image only if all concatenated encoded images // have to be discarded. discard = discard && info.discard; - if (pos < info.length) { - break; - } - pos -= info.length; + + extraction_infos.push_back(info); + prev_frames_size += frame_size; } RTC_CHECK(id); - std::reverse(extraction_infos.begin(), extraction_infos.end()); + if (discard) { out.set_size(0); + for (size_t i = 0; i <= max_spatial_index; ++i) { + out.SetSpatialLayerFrameSize(i, 0); + } return EncodedImageExtractionResult{*id, out, true}; } // Make a pass from begin to end to restore origin payload and erase discarded // encoded images. - pos = 0; - auto extraction_infos_it = extraction_infos.begin(); - while (pos < size) { - RTC_DCHECK(extraction_infos_it != extraction_infos.end()); - const ExtractionInfo& info = *extraction_infos_it; + size_t pos = 0; + for (size_t frame_index = 0; frame_index < frame_sizes.size(); + ++frame_index) { + RTC_CHECK(pos < size); + const size_t frame_size = frame_sizes[frame_index]; + const ExtractionInfo& info = extraction_infos[frame_index]; if (info.discard) { // If this encoded image is marked to be discarded - erase it's payload // from the buffer. - memmove(&buffer[pos], &buffer[pos + info.length], - size - pos - info.length); - size -= info.length; + memmove(&buffer->data()[pos], &buffer->data()[pos + frame_size], + size - pos - frame_size); + RTC_CHECK_LT(frame_index, frame_sl_index.size()) + << "codec doesn't support discard option or the image, that was " + "supposed to be discarded, is lost"; + out.SetSpatialLayerFrameSize(frame_sl_index[frame_index], 0); + size -= frame_size; } else { - memcpy(&buffer[pos + info.length - kUsedBufferSize], info.origin_data, - kUsedBufferSize); - pos += info.length; + memcpy( + &buffer->data()[pos + frame_size - ExtractionInfo::kUsedBufferSize], + info.origin_data, ExtractionInfo::kUsedBufferSize); + pos += frame_size; } - ++extraction_infos_it; } out.set_size(pos); diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h index 3787cc51aa..8cf1bc4828 100644 --- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h +++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h @@ -18,7 +18,7 @@ #include #include "api/video/encoded_image.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h" namespace webrtc { @@ -50,6 +50,11 @@ class SingleProcessEncodedImageDataInjector : public EncodedImageDataInjector, bool discard, const EncodedImage& source, int coding_entity_id) override; + + void Start(int expected_receivers_count) override { + MutexLock crit(&lock_); + expected_receivers_count_ = expected_receivers_count; + } EncodedImageExtractionResult ExtractData(const EncodedImage& source, int coding_entity_id) override; @@ -57,15 +62,18 @@ class SingleProcessEncodedImageDataInjector : public EncodedImageDataInjector, // Contains data required to extract frame id from EncodedImage and restore // original buffer. struct ExtractionInfo { + // Number of bytes from the beginning of the EncodedImage buffer that will + // be used to store frame id and sub id. + const static size_t kUsedBufferSize = 3; // Frame sub id to distinguish encoded images for different spatial layers. uint8_t sub_id; - // Length of the origin buffer encoded image. - size_t length; // Flag to show is this encoded images should be discarded by analyzing // decoder because of not required spatial layer/simulcast stream. bool discard; // Data from first 3 bytes of origin encoded image's payload. - uint8_t origin_data[3]; + uint8_t origin_data[ExtractionInfo::kUsedBufferSize]; + // Count of how many times this frame was received. + int received_count = 0; }; struct ExtractionInfoVector { @@ -77,7 +85,8 @@ class SingleProcessEncodedImageDataInjector : public EncodedImageDataInjector, std::map infos; }; - rtc::CriticalSection lock_; + Mutex lock_; + int expected_receivers_count_ RTC_GUARDED_BY(lock_); // Stores a mapping from frame id to extraction info for spatial layers // for this frame id. There can be a lot of them, because if frame was // dropped we can't clean it up, because we won't receive a signal on diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc index 67cafa75a6..da2391467d 100644 --- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc +++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc @@ -20,22 +20,28 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { -rtc::Buffer CreateBufferOfSizeNFilledWithValuesFromX(size_t n, uint8_t x) { - rtc::Buffer buffer(n); +rtc::scoped_refptr +CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(size_t n, uint8_t x) { + auto buffer = EncodedImageBuffer::Create(n); for (size_t i = 0; i < n; ++i) { - buffer[i] = static_cast(x + i); + buffer->data()[i] = static_cast(x + i); } return buffer; } -} // namespace +EncodedImage CreateEncodedImageOfSizeNFilledWithValuesFromX(size_t n, + uint8_t x) { + EncodedImage image; + image.SetEncodedData( + CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(n, x)); + return image; +} TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardFalse) { SingleProcessEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - - EncodedImage source(buffer.data(), 10, 10); + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source.SetTimestamp(123456789); EncodedImageExtractionResult out = @@ -43,7 +49,7 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardFalse) { EXPECT_EQ(out.id, 512); EXPECT_FALSE(out.discard); EXPECT_EQ(out.image.size(), 10ul); - EXPECT_EQ(out.image.capacity(), 10ul); + EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul); for (int i = 0; i < 10; ++i) { EXPECT_EQ(out.image.data()[i], i + 1); } @@ -51,10 +57,9 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardFalse) { TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardTrue) { SingleProcessEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - - EncodedImage source(buffer.data(), 10, 10); + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source.SetTimestamp(123456789); EncodedImageExtractionResult out = @@ -62,24 +67,70 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardTrue) { EXPECT_EQ(out.id, 512); EXPECT_TRUE(out.discard); EXPECT_EQ(out.image.size(), 0ul); - EXPECT_EQ(out.image.capacity(), 10ul); + EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul); } -TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) { +TEST(SingleProcessEncodedImageDataInjector, InjectWithUnsetSpatialLayerSizes) { SingleProcessEncodedImageDataInjector injector; + injector.Start(1); + + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); + source.SetTimestamp(123456789); + + EncodedImage intermediate = injector.InjectData(512, false, source, 1); + intermediate.SetSpatialIndex(2); + + EncodedImageExtractionResult out = injector.ExtractData(intermediate, 2); + EXPECT_EQ(out.id, 512); + EXPECT_FALSE(out.discard); + EXPECT_EQ(out.image.size(), 10ul); + for (int i = 0; i < 10; ++i) { + EXPECT_EQ(out.image.data()[i], i + 1); + } + EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2); + for (int i = 0; i < 3; ++i) { + EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul); + } +} - rtc::Buffer buffer1 = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - rtc::Buffer buffer2 = CreateBufferOfSizeNFilledWithValuesFromX(10, 11); - rtc::Buffer buffer3 = CreateBufferOfSizeNFilledWithValuesFromX(10, 21); +TEST(SingleProcessEncodedImageDataInjector, InjectWithZeroSpatialLayerSizes) { + SingleProcessEncodedImageDataInjector injector; + injector.Start(1); + + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); + source.SetTimestamp(123456789); + + EncodedImage intermediate = injector.InjectData(512, false, source, 1); + intermediate.SetSpatialIndex(2); + intermediate.SetSpatialLayerFrameSize(0, 0); + intermediate.SetSpatialLayerFrameSize(1, 0); + intermediate.SetSpatialLayerFrameSize(2, 0); + + EncodedImageExtractionResult out = injector.ExtractData(intermediate, 2); + EXPECT_EQ(out.id, 512); + EXPECT_FALSE(out.discard); + EXPECT_EQ(out.image.size(), 10ul); + for (int i = 0; i < 10; ++i) { + EXPECT_EQ(out.image.data()[i], i + 1); + } + EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2); + for (int i = 0; i < 3; ++i) { + EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul); + } +} + +TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) { + SingleProcessEncodedImageDataInjector injector; + injector.Start(1); // 1st frame - EncodedImage source1(buffer1.data(), 10, 10); + EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source1.SetTimestamp(123456710); // 2nd frame 1st spatial layer - EncodedImage source2(buffer2.data(), 10, 10); + EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 11); source2.SetTimestamp(123456720); // 2nd frame 2nd spatial layer - EncodedImage source3(buffer3.data(), 10, 10); + EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 21); source3.SetTimestamp(123456720); EncodedImage intermediate1 = injector.InjectData(510, false, source1, 1); @@ -94,18 +145,18 @@ TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) { EXPECT_EQ(out1.id, 510); EXPECT_FALSE(out1.discard); EXPECT_EQ(out1.image.size(), 10ul); - EXPECT_EQ(out1.image.capacity(), 10ul); + EXPECT_EQ(out1.image.SpatialLayerFrameSize(0).value_or(0), 0ul); for (int i = 0; i < 10; ++i) { EXPECT_EQ(out1.image.data()[i], i + 1); } EXPECT_EQ(out2.id, 520); EXPECT_TRUE(out2.discard); EXPECT_EQ(out2.image.size(), 0ul); - EXPECT_EQ(out2.image.capacity(), 10ul); + EXPECT_EQ(out2.image.SpatialLayerFrameSize(0).value_or(0), 0ul); EXPECT_EQ(out3.id, 520); EXPECT_FALSE(out3.discard); EXPECT_EQ(out3.image.size(), 10ul); - EXPECT_EQ(out3.image.capacity(), 10ul); + EXPECT_EQ(out3.image.SpatialLayerFrameSize(0).value_or(0), 0ul); for (int i = 0; i < 10; ++i) { EXPECT_EQ(out3.image.data()[i], i + 21); } @@ -113,16 +164,13 @@ TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) { TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) { SingleProcessEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer1 = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - rtc::Buffer buffer2 = CreateBufferOfSizeNFilledWithValuesFromX(10, 11); - rtc::Buffer buffer3 = CreateBufferOfSizeNFilledWithValuesFromX(10, 21); - - EncodedImage source1(buffer1.data(), 10, 10); + EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source1.SetTimestamp(123456710); - EncodedImage source2(buffer2.data(), 10, 10); + EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 11); source2.SetTimestamp(123456710); - EncodedImage source3(buffer3.data(), 10, 10); + EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 21); source3.SetTimestamp(123456710); // Inject id into 3 images with same frame id. @@ -138,8 +186,13 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) { concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size()); concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size()); concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size()); - EncodedImage concatenated(concatenated_buffer.data(), concatenated_length, - concatenated_length); + EncodedImage concatenated; + concatenated.SetEncodedData(EncodedImageBuffer::Create( + concatenated_buffer.data(), concatenated_length)); + concatenated.SetSpatialIndex(2); + concatenated.SetSpatialLayerFrameSize(0, intermediate1.size()); + concatenated.SetSpatialLayerFrameSize(1, intermediate2.size()); + concatenated.SetSpatialLayerFrameSize(2, intermediate3.size()); // Extract frame id from concatenated image EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2); @@ -147,26 +200,26 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) { EXPECT_EQ(out.id, 512); EXPECT_FALSE(out.discard); EXPECT_EQ(out.image.size(), 2 * 10ul); - EXPECT_EQ(out.image.capacity(), 3 * 10ul); for (int i = 0; i < 10; ++i) { EXPECT_EQ(out.image.data()[i], i + 1); EXPECT_EQ(out.image.data()[i + 10], i + 21); } + EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2); + EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 10ul); + EXPECT_EQ(out.image.SpatialLayerFrameSize(1).value_or(0), 0ul); + EXPECT_EQ(out.image.SpatialLayerFrameSize(2).value_or(0), 10ul); } TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenatedAllDiscarded) { SingleProcessEncodedImageDataInjector injector; + injector.Start(1); - rtc::Buffer buffer1 = CreateBufferOfSizeNFilledWithValuesFromX(10, 1); - rtc::Buffer buffer2 = CreateBufferOfSizeNFilledWithValuesFromX(10, 11); - rtc::Buffer buffer3 = CreateBufferOfSizeNFilledWithValuesFromX(10, 21); - - EncodedImage source1(buffer1.data(), 10, 10); + EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); source1.SetTimestamp(123456710); - EncodedImage source2(buffer2.data(), 10, 10); + EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 11); source2.SetTimestamp(123456710); - EncodedImage source3(buffer3.data(), 10, 10); + EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 21); source3.SetTimestamp(123456710); // Inject id into 3 images with same frame id. @@ -182,8 +235,13 @@ TEST(SingleProcessEncodedImageDataInjector, concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size()); concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size()); concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size()); - EncodedImage concatenated(concatenated_buffer.data(), concatenated_length, - concatenated_length); + EncodedImage concatenated; + concatenated.SetEncodedData(EncodedImageBuffer::Create( + concatenated_buffer.data(), concatenated_length)); + concatenated.SetSpatialIndex(2); + concatenated.SetSpatialLayerFrameSize(0, intermediate1.size()); + concatenated.SetSpatialLayerFrameSize(1, intermediate2.size()); + concatenated.SetSpatialLayerFrameSize(2, intermediate3.size()); // Extract frame id from concatenated image EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2); @@ -191,8 +249,71 @@ TEST(SingleProcessEncodedImageDataInjector, EXPECT_EQ(out.id, 512); EXPECT_TRUE(out.discard); EXPECT_EQ(out.image.size(), 0ul); - EXPECT_EQ(out.image.capacity(), 3 * 10ul); + EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2); + for (int i = 0; i < 3; ++i) { + EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul); + } +} + +TEST(SingleProcessEncodedImageDataInjector, InjectOnceExtractTwice) { + SingleProcessEncodedImageDataInjector injector; + injector.Start(2); + + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); + source.SetTimestamp(123456789); + + EncodedImageExtractionResult out = + injector.ExtractData(injector.InjectData(/*id=*/512, /*discard=*/false, + source, /*coding_entity_id=*/1), + /*coding_entity_id=*/2); + EXPECT_EQ(out.id, 512); + EXPECT_FALSE(out.discard); + EXPECT_EQ(out.image.size(), 10ul); + EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul); + for (int i = 0; i < 10; ++i) { + EXPECT_EQ(out.image.data()[i], i + 1); + } + out = + injector.ExtractData(injector.InjectData(/*id=*/512, /*discard=*/false, + source, /*coding_entity_id=*/1), + 2); + EXPECT_EQ(out.id, 512); + EXPECT_FALSE(out.discard); + EXPECT_EQ(out.image.size(), 10ul); + EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul); + for (int i = 0; i < 10; ++i) { + EXPECT_EQ(out.image.data()[i], i + 1); + } +} + +// Death tests. +// Disabled on Android because death tests misbehave on Android, see +// base/test/gtest_util.h. +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +EncodedImage DeepCopyEncodedImage(const EncodedImage& source) { + EncodedImage copy = source; + copy.SetEncodedData(EncodedImageBuffer::Create(source.data(), source.size())); + return copy; } +TEST(SingleProcessEncodedImageDataInjector, InjectOnceExtractMoreThenExpected) { + SingleProcessEncodedImageDataInjector injector; + injector.Start(2); + + EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(10, 1); + source.SetTimestamp(123456789); + + EncodedImage modified = injector.InjectData(/*id=*/512, /*discard=*/false, + source, /*coding_entity_id=*/1); + + injector.ExtractData(DeepCopyEncodedImage(modified), /*coding_entity_id=*/2); + injector.ExtractData(DeepCopyEncodedImage(modified), /*coding_entity_id=*/2); + EXPECT_DEATH(injector.ExtractData(DeepCopyEncodedImage(modified), + /*coding_entity_id=*/2), + "Unknown sub_id=0 for frame_id=512"); +} +#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +} // namespace } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc index 70dbcd265e..ebfb41697d 100644 --- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc +++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc @@ -14,6 +14,8 @@ #include #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h" #include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h" @@ -43,10 +45,12 @@ class AnalyzingFramePreprocessor : public test::TestVideoCapturer::FramePreprocessor { public: AnalyzingFramePreprocessor( - std::string stream_label, + absl::string_view peer_name, + absl::string_view stream_label, VideoQualityAnalyzerInterface* analyzer, std::vector>> sinks) - : stream_label_(std::move(stream_label)), + : peer_name_(peer_name), + stream_label_(stream_label), analyzer_(analyzer), sinks_(std::move(sinks)) {} ~AnalyzingFramePreprocessor() override = default; @@ -54,7 +58,8 @@ class AnalyzingFramePreprocessor VideoFrame Preprocess(const VideoFrame& source_frame) override { // Copy VideoFrame to be able to set id on it. VideoFrame frame = source_frame; - uint16_t frame_id = analyzer_->OnFrameCaptured(stream_label_, frame); + uint16_t frame_id = + analyzer_->OnFrameCaptured(peer_name_, stream_label_, frame); frame.set_id(frame_id); for (auto& sink : sinks_) { @@ -64,41 +69,13 @@ class AnalyzingFramePreprocessor } private: + const std::string peer_name_; const std::string stream_label_; VideoQualityAnalyzerInterface* const analyzer_; const std::vector>> sinks_; }; -// Implements the video sink, that forwards rendered frames to the video quality -// analyzer and provided sinks. -class AnalyzingVideoSink final : public rtc::VideoSinkInterface { - public: - AnalyzingVideoSink( - VideoQualityAnalyzerInterface* analyzer, - std::vector>> sinks) - : analyzer_(analyzer), sinks_(std::move(sinks)) { - RTC_DCHECK(analyzer_); - } - ~AnalyzingVideoSink() override = default; - - void OnFrame(const VideoFrame& frame) override { - if (IsDummyFrameBuffer(frame.video_frame_buffer()->ToI420())) { - // This is dummy frame, so we don't need to process it further. - return; - } - analyzer_->OnFrameRendered(frame); - for (auto& sink : sinks_) { - sink->OnFrame(frame); - } - } - - private: - VideoQualityAnalyzerInterface* const analyzer_; - const std::vector>> - sinks_; -}; - } // namespace VideoQualityAnalyzerInjectionHelper::VideoQualityAnalyzerInjectionHelper( @@ -117,29 +94,33 @@ VideoQualityAnalyzerInjectionHelper::~VideoQualityAnalyzerInjectionHelper() = std::unique_ptr VideoQualityAnalyzerInjectionHelper::WrapVideoEncoderFactory( + absl::string_view peer_name, std::unique_ptr delegate, double bitrate_multiplier, std::map> stream_required_spatial_index) const { return std::make_unique( - std::move(delegate), bitrate_multiplier, + peer_name, std::move(delegate), bitrate_multiplier, std::move(stream_required_spatial_index), encoding_entities_id_generator_.get(), injector_, analyzer_.get()); } std::unique_ptr VideoQualityAnalyzerInjectionHelper::WrapVideoDecoderFactory( + absl::string_view peer_name, std::unique_ptr delegate) const { return std::make_unique( - std::move(delegate), encoding_entities_id_generator_.get(), extractor_, - analyzer_.get()); + peer_name, std::move(delegate), encoding_entities_id_generator_.get(), + extractor_, analyzer_.get()); } std::unique_ptr VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor( - const VideoConfig& config, - test::VideoFrameWriter* writer) const { + absl::string_view peer_name, + const VideoConfig& config) { std::vector>> sinks; + test::VideoFrameWriter* writer = + MaybeCreateVideoWriter(config.input_dump_file_name, config); if (writer) { sinks.push_back(std::make_unique(writer)); } @@ -148,40 +129,111 @@ VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor( test::VideoRenderer::Create((*config.stream_label + "-capture").c_str(), config.width, config.height))); } + { + MutexLock lock(&lock_); + known_video_configs_.insert({*config.stream_label, config}); + } return std::make_unique( - std::move(*config.stream_label), analyzer_.get(), std::move(sinks)); + peer_name, std::move(*config.stream_label), analyzer_.get(), + std::move(sinks)); } std::unique_ptr> VideoQualityAnalyzerInjectionHelper::CreateVideoSink( - const VideoConfig& config, - test::VideoFrameWriter* writer) const { - std::vector>> sinks; - if (writer) { - sinks.push_back(std::make_unique(writer)); - } - if (config.show_on_screen) { - sinks.push_back(absl::WrapUnique( - test::VideoRenderer::Create((*config.stream_label + "-render").c_str(), - config.width, config.height))); - } - return std::make_unique(analyzer_.get(), - std::move(sinks)); + absl::string_view peer_name) { + return std::make_unique(peer_name, this); } -void VideoQualityAnalyzerInjectionHelper::Start(std::string test_case_name, - int max_threads_count) { - analyzer_->Start(std::move(test_case_name), max_threads_count); +void VideoQualityAnalyzerInjectionHelper::Start( + std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) { + analyzer_->Start(std::move(test_case_name), peer_names, max_threads_count); + extractor_->Start(peer_names.size()); } void VideoQualityAnalyzerInjectionHelper::OnStatsReports( - const std::string& pc_label, - const StatsReports& stats_reports) { - analyzer_->OnStatsReports(pc_label, stats_reports); + absl::string_view pc_label, + const rtc::scoped_refptr& report) { + analyzer_->OnStatsReports(pc_label, report); } void VideoQualityAnalyzerInjectionHelper::Stop() { analyzer_->Stop(); + for (const auto& video_writer : video_writers_) { + video_writer->Close(); + } + video_writers_.clear(); +} + +test::VideoFrameWriter* +VideoQualityAnalyzerInjectionHelper::MaybeCreateVideoWriter( + absl::optional file_name, + const PeerConnectionE2EQualityTestFixture::VideoConfig& config) { + if (!file_name.has_value()) { + return nullptr; + } + // TODO(titovartem) create only one file writer for simulcast video track. + // For now this code will be invoked for each simulcast stream separately, but + // only one file will be used. + auto video_writer = std::make_unique( + file_name.value(), config.width, config.height, config.fps); + test::VideoFrameWriter* out = video_writer.get(); + video_writers_.push_back(std::move(video_writer)); + return out; +} + +void VideoQualityAnalyzerInjectionHelper::OnFrame(absl::string_view peer_name, + const VideoFrame& frame) { + rtc::scoped_refptr i420_buffer = + frame.video_frame_buffer()->ToI420(); + if (IsDummyFrameBuffer(i420_buffer)) { + // This is dummy frame, so we don't need to process it further. + return; + } + // Copy entire video frame including video buffer to ensure that analyzer + // won't hold any WebRTC internal buffers. + VideoFrame frame_copy = frame; + frame_copy.set_video_frame_buffer(I420Buffer::Copy(*i420_buffer)); + analyzer_->OnFrameRendered(peer_name, frame_copy); + + std::string stream_label = analyzer_->GetStreamLabel(frame.id()); + std::vector>>* sinks = + PopulateSinks(stream_label); + if (sinks == nullptr) { + return; + } + for (auto& sink : *sinks) { + sink->OnFrame(frame); + } +} + +std::vector>>* +VideoQualityAnalyzerInjectionHelper::PopulateSinks( + const std::string& stream_label) { + MutexLock lock(&lock_); + auto sinks_it = sinks_.find(stream_label); + if (sinks_it != sinks_.end()) { + return &sinks_it->second; + } + auto it = known_video_configs_.find(stream_label); + RTC_DCHECK(it != known_video_configs_.end()) + << "No video config for stream " << stream_label; + const VideoConfig& config = it->second; + + std::vector>> sinks; + test::VideoFrameWriter* writer = + MaybeCreateVideoWriter(config.output_dump_file_name, config); + if (writer) { + sinks.push_back(std::make_unique(writer)); + } + if (config.show_on_screen) { + sinks.push_back(absl::WrapUnique( + test::VideoRenderer::Create((*config.stream_label + "-render").c_str(), + config.width, config.height))); + } + sinks_.insert({stream_label, std::move(sinks)}); + return &(sinks_.find(stream_label)->second); } } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h index ccda57baaf..d741288345 100644 --- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h +++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h @@ -14,7 +14,10 @@ #include #include #include +#include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/stats_observer_interface.h" #include "api/test/video_quality_analyzer_interface.h" @@ -22,6 +25,7 @@ #include "api/video/video_sink_interface.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h" #include "test/pc/e2e/analyzer/video/id_generator.h" #include "test/test_video_capturer.h" @@ -45,6 +49,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { // Wraps video encoder factory to give video quality analyzer access to frames // before encoding and encoded images after. std::unique_ptr WrapVideoEncoderFactory( + absl::string_view peer_name, std::unique_ptr delegate, double bitrate_multiplier, std::map> stream_required_spatial_index) @@ -52,36 +57,74 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { // Wraps video decoder factory to give video quality analyzer access to // received encoded images and frames, that were decoded from them. std::unique_ptr WrapVideoDecoderFactory( + absl::string_view peer_name, std::unique_ptr delegate) const; // Creates VideoFrame preprocessor, that will allow video quality analyzer to - // get access to the captured frames. If |writer| in not nullptr, will dump - // captured frames with provided writer. + // get access to the captured frames. If provided config also specifies + // |input_dump_file_name|, video will be written into that file. std::unique_ptr - CreateFramePreprocessor(const VideoConfig& config, - test::VideoFrameWriter* writer) const; + CreateFramePreprocessor(absl::string_view peer_name, + const VideoConfig& config); // Creates sink, that will allow video quality analyzer to get access to - // the rendered frames. If |writer| in not nullptr, will dump rendered - // frames with provided writer. + // the rendered frames. If corresponding video track has + // |output_dump_file_name| in its VideoConfig, then video also will be written + // into that file. std::unique_ptr> CreateVideoSink( - const VideoConfig& config, - test::VideoFrameWriter* writer) const; + absl::string_view peer_name); - void Start(std::string test_case_name, int max_threads_count); + void Start(std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count); // Forwards |stats_reports| for Peer Connection |pc_label| to // |analyzer_|. - void OnStatsReports(const std::string& pc_label, - const StatsReports& stats_reports) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override; // Stops VideoQualityAnalyzerInterface to populate final data and metrics. + // Should be invoked after analyzed video tracks are disposed. void Stop(); private: + class AnalyzingVideoSink final : public rtc::VideoSinkInterface { + public: + explicit AnalyzingVideoSink(absl::string_view peer_name, + VideoQualityAnalyzerInjectionHelper* helper) + : peer_name_(peer_name), helper_(helper) {} + ~AnalyzingVideoSink() override = default; + + void OnFrame(const VideoFrame& frame) override { + helper_->OnFrame(peer_name_, frame); + } + + private: + const std::string peer_name_; + VideoQualityAnalyzerInjectionHelper* const helper_; + }; + + test::VideoFrameWriter* MaybeCreateVideoWriter( + absl::optional file_name, + const PeerConnectionE2EQualityTestFixture::VideoConfig& config); + // Creates a deep copy of the frame and passes it to the video analyzer, while + // passing real frame to the sinks + void OnFrame(absl::string_view peer_name, const VideoFrame& frame); + std::vector>>* + PopulateSinks(const std::string& stream_label); + std::unique_ptr analyzer_; EncodedImageDataInjector* injector_; EncodedImageDataExtractor* extractor_; + std::vector> video_writers_; + + Mutex lock_; + std::map known_video_configs_ RTC_GUARDED_BY(lock_); + std::map>>> + sinks_ RTC_GUARDED_BY(lock_); + std::unique_ptr> encoding_entities_id_generator_; }; diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc new file mode 100644 index 0000000000..cc675cc2df --- /dev/null +++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h" + +#include "api/stats/rtc_stats.h" +#include "api/stats/rtcstats_objects.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +void VideoQualityMetricsReporter::Start( + absl::string_view test_case_name, + const TrackIdStreamInfoMap* /*reporter_helper*/) { + test_case_name_ = std::string(test_case_name); + start_time_ = Now(); +} + +void VideoQualityMetricsReporter::OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) { + RTC_CHECK(start_time_) + << "Please invoke Start(...) method before calling OnStatsReports(...)"; + + auto transport_stats = report->GetStatsOfType(); + if (transport_stats.size() == 0u || + !transport_stats[0]->selected_candidate_pair_id.is_defined()) { + return; + } + RTC_DCHECK_EQ(transport_stats.size(), 1); + std::string selected_ice_id = + transport_stats[0]->selected_candidate_pair_id.ValueToString(); + // Use the selected ICE candidate pair ID to get the appropriate ICE stats. + const RTCIceCandidatePairStats ice_candidate_pair_stats = + report->Get(selected_ice_id)->cast_to(); + + auto outbound_rtp_stats = report->GetStatsOfType(); + StatsSample sample; + for (auto& s : outbound_rtp_stats) { + if (!s->media_type.is_defined()) { + continue; + } + if (!(*s->media_type == RTCMediaStreamTrackKind::kVideo)) { + continue; + } + if (s->timestamp_us() > sample.sample_time.us()) { + sample.sample_time = Timestamp::Micros(s->timestamp_us()); + } + sample.retransmitted_bytes_sent += + DataSize::Bytes(s->retransmitted_bytes_sent.ValueOrDefault(0ul)); + sample.bytes_sent += DataSize::Bytes(s->bytes_sent.ValueOrDefault(0ul)); + sample.header_bytes_sent += + DataSize::Bytes(s->header_bytes_sent.ValueOrDefault(0ul)); + } + + MutexLock lock(&video_bwe_stats_lock_); + VideoBweStats& video_bwe_stats = video_bwe_stats_[std::string(pc_label)]; + if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) { + video_bwe_stats.available_send_bandwidth.AddSample( + DataRate::BitsPerSec( + *ice_candidate_pair_stats.available_outgoing_bitrate) + .bytes_per_sec()); + } + + StatsSample prev_sample = last_stats_sample_[std::string(pc_label)]; + if (prev_sample.sample_time.IsZero()) { + prev_sample.sample_time = start_time_.value(); + } + last_stats_sample_[std::string(pc_label)] = sample; + + TimeDelta time_between_samples = sample.sample_time - prev_sample.sample_time; + if (time_between_samples.IsZero()) { + return; + } + + DataRate retransmission_bitrate = + (sample.retransmitted_bytes_sent - prev_sample.retransmitted_bytes_sent) / + time_between_samples; + video_bwe_stats.retransmission_bitrate.AddSample( + retransmission_bitrate.bytes_per_sec()); + DataRate transmission_bitrate = + (sample.bytes_sent + sample.header_bytes_sent - prev_sample.bytes_sent - + prev_sample.header_bytes_sent) / + time_between_samples; + video_bwe_stats.transmission_bitrate.AddSample( + transmission_bitrate.bytes_per_sec()); +} + +void VideoQualityMetricsReporter::StopAndReportResults() { + MutexLock video_bwemutex_(&video_bwe_stats_lock_); + for (const auto& item : video_bwe_stats_) { + ReportVideoBweResults(GetTestCaseName(item.first), item.second); + } +} + +std::string VideoQualityMetricsReporter::GetTestCaseName( + const std::string& stream_label) const { + return test_case_name_ + "/" + stream_label; +} + +void VideoQualityMetricsReporter::ReportVideoBweResults( + const std::string& test_case_name, + const VideoBweStats& video_bwe_stats) { + ReportResult("available_send_bandwidth", test_case_name, + video_bwe_stats.available_send_bandwidth, "bytesPerSecond"); + ReportResult("transmission_bitrate", test_case_name, + video_bwe_stats.transmission_bitrate, "bytesPerSecond"); + ReportResult("retransmission_bitrate", test_case_name, + video_bwe_stats.retransmission_bitrate, "bytesPerSecond"); +} + +void VideoQualityMetricsReporter::ReportResult( + const std::string& metric_name, + const std::string& test_case_name, + const SamplesStatsCounter& counter, + const std::string& unit, + webrtc::test::ImproveDirection improve_direction) { + test::PrintResult(metric_name, /*modifier=*/"", test_case_name, counter, unit, + /*important=*/false, improve_direction); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h new file mode 100644 index 0000000000..ff195a450e --- /dev/null +++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_ +#define TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/track_id_stream_info_map.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" +#include "test/testsupport/perf_test.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +struct VideoBweStats { + SamplesStatsCounter available_send_bandwidth; + SamplesStatsCounter transmission_bitrate; + SamplesStatsCounter retransmission_bitrate; +}; + +class VideoQualityMetricsReporter + : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter { + public: + VideoQualityMetricsReporter(Clock* const clock) : clock_(clock) {} + ~VideoQualityMetricsReporter() override = default; + + void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override; + void StopAndReportResults() override; + + private: + struct StatsSample { + DataSize bytes_sent = DataSize::Zero(); + DataSize header_bytes_sent = DataSize::Zero(); + DataSize retransmitted_bytes_sent = DataSize::Zero(); + + Timestamp sample_time = Timestamp::Zero(); + }; + + std::string GetTestCaseName(const std::string& stream_label) const; + static void ReportVideoBweResults(const std::string& test_case_name, + const VideoBweStats& video_bwe_stats); + // Report result for single metric for specified stream. + static void ReportResult(const std::string& metric_name, + const std::string& test_case_name, + const SamplesStatsCounter& counter, + const std::string& unit, + webrtc::test::ImproveDirection improve_direction = + webrtc::test::ImproveDirection::kNone); + Timestamp Now() const { return clock_->CurrentTime(); } + + Clock* const clock_; + + std::string test_case_name_; + absl::optional start_time_; + + Mutex video_bwe_stats_lock_; + // Map between a peer connection label (provided by the framework) and + // its video BWE stats. + std::map video_bwe_stats_ + RTC_GUARDED_BY(video_bwe_stats_lock_); + std::map last_stats_sample_ + RTC_GUARDED_BY(video_bwe_stats_lock_); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_ diff --git a/test/pc/e2e/analyzer_helper.cc b/test/pc/e2e/analyzer_helper.cc index f11b3bb803..852f0a3435 100644 --- a/test/pc/e2e/analyzer_helper.cc +++ b/test/pc/e2e/analyzer_helper.cc @@ -22,16 +22,36 @@ AnalyzerHelper::AnalyzerHelper() { void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id, std::string stream_label) { RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); - track_to_stream_map_.insert({std::move(track_id), std::move(stream_label)}); + track_to_stream_map_.insert( + {std::move(track_id), StreamInfo{stream_label, stream_label}}); } -const std::string& AnalyzerHelper::GetStreamLabelFromTrackId( - const std::string& track_id) const { +void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id, + std::string stream_label, + std::string sync_group) { + RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); + track_to_stream_map_.insert( + {std::move(track_id), + StreamInfo{std::move(stream_label), std::move(sync_group)}}); +} + +const AnalyzerHelper::StreamInfo& AnalyzerHelper::GetStreamInfoFromTrackId( + absl::string_view track_id) const { RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); - auto track_to_stream_pair = track_to_stream_map_.find(track_id); + auto track_to_stream_pair = track_to_stream_map_.find(std::string(track_id)); RTC_CHECK(track_to_stream_pair != track_to_stream_map_.end()); return track_to_stream_pair->second; } +absl::string_view AnalyzerHelper::GetStreamLabelFromTrackId( + absl::string_view track_id) const { + return GetStreamInfoFromTrackId(track_id).stream_label; +} + +absl::string_view AnalyzerHelper::GetSyncGroupLabelFromTrackId( + absl::string_view track_id) const { + return GetStreamInfoFromTrackId(track_id).sync_group; +} + } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/analyzer_helper.h b/test/pc/e2e/analyzer_helper.h index 51cfe5587d..4b0e0c3ac4 100644 --- a/test/pc/e2e/analyzer_helper.h +++ b/test/pc/e2e/analyzer_helper.h @@ -14,7 +14,8 @@ #include #include -#include "api/test/track_id_stream_label_map.h" +#include "absl/strings/string_view.h" +#include "api/test/track_id_stream_info_map.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" @@ -22,25 +23,40 @@ namespace webrtc { namespace webrtc_pc_e2e { // This class is a utility that provides bookkeeping capabilities that -// are useful to associate stats reports track_ids to the remote stream_id. +// are useful to associate stats reports track_ids to the remote stream info. // The framework will populate an instance of this class and it will pass // it to the Start method of Media Quality Analyzers. // An instance of AnalyzerHelper must only be accessed from a single // thread and since stats collection happens on the signaling thread, -// both AddTrackToStreamMapping and GetStreamLabelFromTrackId must be -// invoked from the signaling thread. -class AnalyzerHelper : public TrackIdStreamLabelMap { +// AddTrackToStreamMapping, GetStreamLabelFromTrackId and +// GetSyncGroupLabelFromTrackId must be invoked from the signaling thread. Get +// methods should be invoked only after all data is added. Mixing Get methods +// with adding new data may lead to undefined behaviour. +class AnalyzerHelper : public TrackIdStreamInfoMap { public: AnalyzerHelper(); void AddTrackToStreamMapping(std::string track_id, std::string stream_label); + void AddTrackToStreamMapping(std::string track_id, + std::string stream_label, + std::string sync_group); - const std::string& GetStreamLabelFromTrackId( - const std::string& track_id) const override; + absl::string_view GetStreamLabelFromTrackId( + absl::string_view track_id) const override; + + absl::string_view GetSyncGroupLabelFromTrackId( + absl::string_view track_id) const override; private: + struct StreamInfo { + std::string stream_label; + std::string sync_group; + }; + + const StreamInfo& GetStreamInfoFromTrackId(absl::string_view track_id) const; + SequenceChecker signaling_sequence_checker_; - std::map track_to_stream_map_ + std::map track_to_stream_map_ RTC_GUARDED_BY(signaling_sequence_checker_); }; diff --git a/test/pc/e2e/cross_media_metrics_reporter.cc b/test/pc/e2e/cross_media_metrics_reporter.cc new file mode 100644 index 0000000000..96f661fd4f --- /dev/null +++ b/test/pc/e2e/cross_media_metrics_reporter.cc @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/cross_media_metrics_reporter.h" + +#include +#include + +#include "api/stats/rtc_stats.h" +#include "api/stats/rtcstats_objects.h" +#include "api/units/timestamp.h" +#include "rtc_base/event.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +void CrossMediaMetricsReporter::Start( + absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) { + test_case_name_ = std::string(test_case_name); + reporter_helper_ = reporter_helper; +} + +void CrossMediaMetricsReporter::OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) { + auto inbound_stats = report->GetStatsOfType(); + std::map> + sync_group_stats; + for (const auto& stat : inbound_stats) { + auto media_source_stat = + report->GetAs(*stat->track_id); + if (stat->estimated_playout_timestamp.ValueOrDefault(0.) > 0 && + media_source_stat->track_identifier.is_defined()) { + sync_group_stats[reporter_helper_->GetSyncGroupLabelFromTrackId( + *media_source_stat->track_identifier)] + .push_back(stat); + } + } + + MutexLock lock(&mutex_); + for (const auto& pair : sync_group_stats) { + // If there is less than two streams, it is not a sync group. + if (pair.second.size() < 2) { + continue; + } + auto sync_group = std::string(pair.first); + const RTCInboundRTPStreamStats* audio_stat = pair.second[0]; + const RTCInboundRTPStreamStats* video_stat = pair.second[1]; + + RTC_CHECK(pair.second.size() == 2 && audio_stat->kind.is_defined() && + video_stat->kind.is_defined() && + *audio_stat->kind != *video_stat->kind) + << "Sync group should consist of one audio and one video stream."; + + if (*audio_stat->kind == RTCMediaStreamTrackKind::kVideo) { + std::swap(audio_stat, video_stat); + } + // Stream labels of a sync group are same for all polls, so we need it add + // it only once. + if (stats_info_.find(sync_group) == stats_info_.end()) { + auto audio_source_stat = + report->GetAs(*audio_stat->track_id); + auto video_source_stat = + report->GetAs(*video_stat->track_id); + // *_source_stat->track_identifier is always defined here because we + // checked it while grouping stats. + stats_info_[sync_group].audio_stream_label = + std::string(reporter_helper_->GetStreamLabelFromTrackId( + *audio_source_stat->track_identifier)); + stats_info_[sync_group].video_stream_label = + std::string(reporter_helper_->GetStreamLabelFromTrackId( + *video_source_stat->track_identifier)); + } + + double audio_video_playout_diff = *audio_stat->estimated_playout_timestamp - + *video_stat->estimated_playout_timestamp; + if (audio_video_playout_diff > 0) { + stats_info_[sync_group].audio_ahead_ms.AddSample( + audio_video_playout_diff); + stats_info_[sync_group].video_ahead_ms.AddSample(0); + } else { + stats_info_[sync_group].audio_ahead_ms.AddSample(0); + stats_info_[sync_group].video_ahead_ms.AddSample( + std::abs(audio_video_playout_diff)); + } + } +} + +void CrossMediaMetricsReporter::StopAndReportResults() { + MutexLock lock(&mutex_); + for (const auto& pair : stats_info_) { + const std::string& sync_group = pair.first; + ReportResult("audio_ahead_ms", + GetTestCaseName(pair.second.audio_stream_label, sync_group), + pair.second.audio_ahead_ms, "ms", + webrtc::test::ImproveDirection::kSmallerIsBetter); + ReportResult("video_ahead_ms", + GetTestCaseName(pair.second.video_stream_label, sync_group), + pair.second.video_ahead_ms, "ms", + webrtc::test::ImproveDirection::kSmallerIsBetter); + } +} + +void CrossMediaMetricsReporter::ReportResult( + const std::string& metric_name, + const std::string& test_case_name, + const SamplesStatsCounter& counter, + const std::string& unit, + webrtc::test::ImproveDirection improve_direction) { + test::PrintResult(metric_name, /*modifier=*/"", test_case_name, counter, unit, + /*important=*/false, improve_direction); +} + +std::string CrossMediaMetricsReporter::GetTestCaseName( + const std::string& stream_label, + const std::string& sync_group) const { + return test_case_name_ + "/" + sync_group + "_" + stream_label; +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/cross_media_metrics_reporter.h b/test/pc/e2e/cross_media_metrics_reporter.h new file mode 100644 index 0000000000..6ddc994d1f --- /dev/null +++ b/test/pc/e2e/cross_media_metrics_reporter.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_ +#define TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/track_id_stream_info_map.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" +#include "test/testsupport/perf_test.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class CrossMediaMetricsReporter + : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter { + public: + CrossMediaMetricsReporter() = default; + ~CrossMediaMetricsReporter() override = default; + + void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override; + void StopAndReportResults() override; + + private: + struct StatsInfo { + SamplesStatsCounter audio_ahead_ms; + SamplesStatsCounter video_ahead_ms; + + std::string audio_stream_label; + std::string video_stream_label; + }; + + static void ReportResult(const std::string& metric_name, + const std::string& test_case_name, + const SamplesStatsCounter& counter, + const std::string& unit, + webrtc::test::ImproveDirection improve_direction = + webrtc::test::ImproveDirection::kNone); + std::string GetTestCaseName(const std::string& stream_label, + const std::string& sync_group) const; + + std::string test_case_name_; + const TrackIdStreamInfoMap* reporter_helper_; + + Mutex mutex_; + std::map stats_info_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_ diff --git a/test/pc/e2e/echo/echo_emulation.cc b/test/pc/e2e/echo/echo_emulation.cc index 2beaa34cbd..230e8e3eca 100644 --- a/test/pc/e2e/echo/echo_emulation.cc +++ b/test/pc/e2e/echo/echo_emulation.cc @@ -57,17 +57,7 @@ void EchoEmulatingCapturer::OnAudioRendered( } queue_input_.assign(data.begin(), data.end()); if (!renderer_queue_.Insert(&queue_input_)) { - // Test audio device works too slow with sanitizers and on some platforms - // and can't properly process audio, so when capturer will be stopped - // renderer will quickly overfill the queue. - // TODO(crbug.com/webrtc/10850) remove it when test ADM will be fast enough. -#if defined(THREAD_SANITIZER) || defined(MEMORY_SANITIZER) || \ - defined(ADDRESS_SANITIZER) || defined(WEBRTC_ANDROID) || \ - (defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)) RTC_LOG(WARNING) << "Echo queue is full"; -#else - RTC_CHECK(false) << "Echo queue is full"; -#endif } } diff --git a/test/pc/e2e/media/media_helper.cc b/test/pc/e2e/media/media_helper.cc new file mode 100644 index 0000000000..d1c27838a6 --- /dev/null +++ b/test/pc/e2e/media/media_helper.cc @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/media/media_helper.h" + +#include +#include + +#include "absl/types/variant.h" +#include "api/media_stream_interface.h" +#include "api/test/create_frame_generator.h" +#include "test/frame_generator_capturer.h" +#include "test/platform_video_capturer.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using VideoConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; +using AudioConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; +using CapturingDeviceIndex = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex; + +} // namespace + +void MediaHelper::MaybeAddAudio(TestPeer* peer) { + if (!peer->params()->audio_config) { + return; + } + const AudioConfig& audio_config = peer->params()->audio_config.value(); + rtc::scoped_refptr source = + peer->pc_factory()->CreateAudioSource(audio_config.audio_options); + rtc::scoped_refptr track = + peer->pc_factory()->CreateAudioTrack(*audio_config.stream_label, source); + std::string sync_group = audio_config.sync_group + ? audio_config.sync_group.value() + : audio_config.stream_label.value(); + peer->AddTrack(track, {sync_group, *audio_config.stream_label}); +} + +std::vector> +MediaHelper::MaybeAddVideo(TestPeer* peer) { + // Params here valid because of pre-run validation. + Params* params = peer->params(); + std::vector> out; + for (size_t i = 0; i < params->video_configs.size(); ++i) { + auto video_config = params->video_configs[i]; + // Setup input video source into peer connection. + std::unique_ptr capturer = CreateVideoCapturer( + video_config, peer->ReleaseVideoSource(i), + video_quality_analyzer_injection_helper_->CreateFramePreprocessor( + params->name.value(), video_config)); + bool is_screencast = + video_config.content_hint == VideoTrackInterface::ContentHint::kText || + video_config.content_hint == + VideoTrackInterface::ContentHint::kDetailed; + rtc::scoped_refptr source = + new rtc::RefCountedObject( + std::move(capturer), is_screencast); + out.push_back(source); + RTC_LOG(INFO) << "Adding video with video_config.stream_label=" + << video_config.stream_label.value(); + rtc::scoped_refptr track = + peer->pc_factory()->CreateVideoTrack(video_config.stream_label.value(), + source); + if (video_config.content_hint.has_value()) { + track->set_content_hint(video_config.content_hint.value()); + } + std::string sync_group = video_config.sync_group + ? video_config.sync_group.value() + : video_config.stream_label.value(); + RTCErrorOr> sender = + peer->AddTrack(track, {sync_group, *video_config.stream_label}); + RTC_CHECK(sender.ok()); + if (video_config.temporal_layers_count) { + RtpParameters rtp_parameters = sender.value()->GetParameters(); + for (auto& encoding_parameters : rtp_parameters.encodings) { + encoding_parameters.num_temporal_layers = + video_config.temporal_layers_count; + } + RTCError res = sender.value()->SetParameters(rtp_parameters); + RTC_CHECK(res.ok()) << "Failed to set RTP parameters"; + } + } + return out; +} + +std::unique_ptr MediaHelper::CreateVideoCapturer( + const VideoConfig& video_config, + PeerConfigurerImpl::VideoSource source, + std::unique_ptr + frame_preprocessor) { + CapturingDeviceIndex* capturing_device_index = + absl::get_if(&source); + if (capturing_device_index != nullptr) { + std::unique_ptr capturer = + test::CreateVideoCapturer(video_config.width, video_config.height, + video_config.fps, + static_cast(*capturing_device_index)); + RTC_CHECK(capturer) + << "Failed to obtain input stream from capturing device #" + << *capturing_device_index; + capturer->SetFramePreprocessor(std::move(frame_preprocessor)); + return capturer; + } + + auto capturer = std::make_unique( + clock_, + absl::get>( + std::move(source)), + video_config.fps, *task_queue_factory_); + capturer->SetFramePreprocessor(std::move(frame_preprocessor)); + capturer->Init(); + return capturer; +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/media/media_helper.h b/test/pc/e2e/media/media_helper.h new file mode 100644 index 0000000000..4e977e3002 --- /dev/null +++ b/test/pc/e2e/media/media_helper.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_MEDIA_MEDIA_HELPER_H_ +#define TEST_PC_E2E_MEDIA_MEDIA_HELPER_H_ + +#include +#include + +#include "api/test/frame_generator_interface.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" +#include "test/pc/e2e/media/test_video_capturer_video_track_source.h" +#include "test/pc/e2e/peer_configurer.h" +#include "test/pc/e2e/test_peer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class MediaHelper { + public: + MediaHelper(VideoQualityAnalyzerInjectionHelper* + video_quality_analyzer_injection_helper, + TaskQueueFactory* task_queue_factory, + Clock* clock) + : clock_(clock), + task_queue_factory_(task_queue_factory), + video_quality_analyzer_injection_helper_( + video_quality_analyzer_injection_helper) {} + + void MaybeAddAudio(TestPeer* peer); + + std::vector> + MaybeAddVideo(TestPeer* peer); + + private: + std::unique_ptr CreateVideoCapturer( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + PeerConfigurerImpl::VideoSource source, + std::unique_ptr + frame_preprocessor); + + Clock* const clock_; + TaskQueueFactory* const task_queue_factory_; + VideoQualityAnalyzerInjectionHelper* video_quality_analyzer_injection_helper_; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_MEDIA_MEDIA_HELPER_H_ diff --git a/test/pc/e2e/media/test_video_capturer_video_track_source.h b/test/pc/e2e/media/test_video_capturer_video_track_source.h new file mode 100644 index 0000000000..c883a2e8e9 --- /dev/null +++ b/test/pc/e2e/media/test_video_capturer_video_track_source.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_ +#define TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_ + +#include +#include + +#include "api/video/video_frame.h" +#include "api/video/video_source_interface.h" +#include "pc/video_track_source.h" +#include "test/test_video_capturer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class TestVideoCapturerVideoTrackSource : public VideoTrackSource { + public: + TestVideoCapturerVideoTrackSource( + std::unique_ptr video_capturer, + bool is_screencast) + : VideoTrackSource(/*remote=*/false), + video_capturer_(std::move(video_capturer)), + is_screencast_(is_screencast) {} + + ~TestVideoCapturerVideoTrackSource() = default; + + void Start() { SetState(kLive); } + + void Stop() { SetState(kMuted); } + + bool is_screencast() const override { return is_screencast_; } + + protected: + rtc::VideoSourceInterface* source() override { + return video_capturer_.get(); + } + + private: + std::unique_ptr video_capturer_; + const bool is_screencast_; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_ diff --git a/test/pc/e2e/network_quality_metrics_reporter.cc b/test/pc/e2e/network_quality_metrics_reporter.cc index 56f0337037..2df45291d8 100644 --- a/test/pc/e2e/network_quality_metrics_reporter.cc +++ b/test/pc/e2e/network_quality_metrics_reporter.cc @@ -11,7 +11,8 @@ #include -#include "api/stats_types.h" +#include "api/stats/rtc_stats.h" +#include "api/stats/rtcstats_objects.h" #include "rtc_base/event.h" #include "system_wrappers/include/field_trial.h" #include "test/testsupport/perf_test.h" @@ -28,67 +29,75 @@ constexpr int kStatsWaitTimeoutMs = 1000; constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; } -void NetworkQualityMetricsReporter::Start(absl::string_view test_case_name) { +void NetworkQualityMetricsReporter::Start( + absl::string_view test_case_name, + const TrackIdStreamInfoMap* /*reporter_helper*/) { test_case_name_ = std::string(test_case_name); // Check that network stats are clean before test execution. - EmulatedNetworkStats alice_stats = PopulateStats(alice_network_); - RTC_CHECK_EQ(alice_stats.packets_sent, 0); - RTC_CHECK_EQ(alice_stats.packets_received, 0); - EmulatedNetworkStats bob_stats = PopulateStats(bob_network_); - RTC_CHECK_EQ(bob_stats.packets_sent, 0); - RTC_CHECK_EQ(bob_stats.packets_received, 0); + std::unique_ptr alice_stats = + PopulateStats(alice_network_); + RTC_CHECK_EQ(alice_stats->PacketsSent(), 0); + RTC_CHECK_EQ(alice_stats->PacketsReceived(), 0); + std::unique_ptr bob_stats = PopulateStats(bob_network_); + RTC_CHECK_EQ(bob_stats->PacketsSent(), 0); + RTC_CHECK_EQ(bob_stats->PacketsReceived(), 0); } void NetworkQualityMetricsReporter::OnStatsReports( - const std::string& pc_label, - const StatsReports& reports) { - rtc::CritScope cs(&lock_); - int64_t payload_bytes_received = 0; - int64_t payload_bytes_sent = 0; - for (const StatsReport* report : reports) { - if (report->type() == StatsReport::kStatsReportTypeSsrc) { - const auto* received = - report->FindValue(StatsReport::kStatsValueNameBytesReceived); - if (received) { - payload_bytes_received += received->int64_val(); - } - const auto* sent = - report->FindValue(StatsReport::kStatsValueNameBytesSent); - if (sent) { - payload_bytes_sent += sent->int64_val(); - } - } + absl::string_view pc_label, + const rtc::scoped_refptr& report) { + DataSize payload_received = DataSize::Zero(); + DataSize payload_sent = DataSize::Zero(); + + auto inbound_stats = report->GetStatsOfType(); + for (const auto& stat : inbound_stats) { + payload_received += + DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) + + stat->header_bytes_received.ValueOrDefault(0ul)); } - PCStats& stats = pc_stats_[pc_label]; - stats.payload_bytes_received = payload_bytes_received; - stats.payload_bytes_sent = payload_bytes_sent; + + auto outbound_stats = report->GetStatsOfType(); + for (const auto& stat : outbound_stats) { + payload_sent += + DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) + + stat->header_bytes_sent.ValueOrDefault(0ul)); + } + + MutexLock lock(&lock_); + PCStats& stats = pc_stats_[std::string(pc_label)]; + stats.payload_received = payload_received; + stats.payload_sent = payload_sent; } void NetworkQualityMetricsReporter::StopAndReportResults() { - EmulatedNetworkStats alice_stats = PopulateStats(alice_network_); - EmulatedNetworkStats bob_stats = PopulateStats(bob_network_); - ReportStats("alice", alice_stats, - alice_stats.packets_sent - bob_stats.packets_received); - ReportStats("bob", bob_stats, - bob_stats.packets_sent - alice_stats.packets_received); + std::unique_ptr alice_stats = + PopulateStats(alice_network_); + std::unique_ptr bob_stats = PopulateStats(bob_network_); + int64_t alice_packets_loss = + alice_stats->PacketsSent() - bob_stats->PacketsReceived(); + int64_t bob_packets_loss = + bob_stats->PacketsSent() - alice_stats->PacketsReceived(); + ReportStats("alice", std::move(alice_stats), alice_packets_loss); + ReportStats("bob", std::move(bob_stats), bob_packets_loss); if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) { RTC_LOG(LS_ERROR) << "Non-standard GetStats; \"payload\" counts include RTP headers"; } - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); for (const auto& pair : pc_stats_) { ReportPCStats(pair.first, pair.second); } } -EmulatedNetworkStats NetworkQualityMetricsReporter::PopulateStats( +std::unique_ptr +NetworkQualityMetricsReporter::PopulateStats( EmulatedNetworkManagerInterface* network) { rtc::Event wait; - EmulatedNetworkStats stats; - network->GetStats([&](const EmulatedNetworkStats& s) { - stats = s; + std::unique_ptr stats; + network->GetStats([&](std::unique_ptr s) { + stats = std::move(s); wait.Set(); }); bool stats_received = wait.Wait(kStatsWaitTimeoutMs); @@ -98,26 +107,26 @@ EmulatedNetworkStats NetworkQualityMetricsReporter::PopulateStats( void NetworkQualityMetricsReporter::ReportStats( const std::string& network_label, - const EmulatedNetworkStats& stats, + std::unique_ptr stats, int64_t packet_loss) { - ReportResult("bytes_sent", network_label, stats.bytes_sent.bytes(), + ReportResult("bytes_sent", network_label, stats->BytesSent().bytes(), "sizeInBytes"); - ReportResult("packets_sent", network_label, stats.packets_sent, "unitless"); + ReportResult("packets_sent", network_label, stats->PacketsSent(), "unitless"); ReportResult( "average_send_rate", network_label, - stats.packets_sent >= 2 ? stats.AverageSendRate().bytes_per_sec() : 0, + stats->PacketsSent() >= 2 ? stats->AverageSendRate().bytes_per_sec() : 0, "bytesPerSecond"); - ReportResult("bytes_dropped", network_label, stats.bytes_dropped.bytes(), + ReportResult("bytes_dropped", network_label, stats->BytesDropped().bytes(), "sizeInBytes"); - ReportResult("packets_dropped", network_label, stats.packets_dropped, + ReportResult("packets_dropped", network_label, stats->PacketsDropped(), "unitless"); - ReportResult("bytes_received", network_label, stats.bytes_received.bytes(), + ReportResult("bytes_received", network_label, stats->BytesReceived().bytes(), "sizeInBytes"); - ReportResult("packets_received", network_label, stats.packets_received, + ReportResult("packets_received", network_label, stats->PacketsReceived(), "unitless"); ReportResult("average_receive_rate", network_label, - stats.packets_received >= 2 - ? stats.AverageReceiveRate().bytes_per_sec() + stats->PacketsReceived() >= 2 + ? stats->AverageReceiveRate().bytes_per_sec() : 0, "bytesPerSecond"); ReportResult("sent_packets_loss", network_label, packet_loss, "unitless"); @@ -125,9 +134,9 @@ void NetworkQualityMetricsReporter::ReportStats( void NetworkQualityMetricsReporter::ReportPCStats(const std::string& pc_label, const PCStats& stats) { - ReportResult("payload_bytes_received", pc_label, stats.payload_bytes_received, - "sizeInBytes"); - ReportResult("payload_bytes_sent", pc_label, stats.payload_bytes_sent, + ReportResult("payload_bytes_received", pc_label, + stats.payload_received.bytes(), "sizeInBytes"); + ReportResult("payload_bytes_sent", pc_label, stats.payload_sent.bytes(), "sizeInBytes"); } diff --git a/test/pc/e2e/network_quality_metrics_reporter.h b/test/pc/e2e/network_quality_metrics_reporter.h index 6454f17526..50c36234a5 100644 --- a/test/pc/e2e/network_quality_metrics_reporter.h +++ b/test/pc/e2e/network_quality_metrics_reporter.h @@ -11,11 +11,15 @@ #ifndef TEST_PC_E2E_NETWORK_QUALITY_METRICS_REPORTER_H_ #define TEST_PC_E2E_NETWORK_QUALITY_METRICS_REPORTER_H_ +#include #include +#include "absl/strings/string_view.h" #include "api/test/network_emulation_manager.h" #include "api/test/peerconnection_quality_test_fixture.h" -#include "rtc_base/critical_section.h" +#include "api/test/track_id_stream_info_map.h" +#include "api/units/data_size.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -29,23 +33,25 @@ class NetworkQualityMetricsReporter ~NetworkQualityMetricsReporter() override = default; // Network stats must be empty when this method will be invoked. - void Start(absl::string_view test_case_name) override; - void OnStatsReports(const std::string& pc_label, - const StatsReports& reports) override; + void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override; void StopAndReportResults() override; private: struct PCStats { // TODO(nisse): Separate audio and video counters. Depends on standard stat // counters, enabled by field trial "WebRTC-UseStandardBytesStats". - int64_t payload_bytes_received = 0; - int64_t payload_bytes_sent = 0; + DataSize payload_received = DataSize::Zero(); + DataSize payload_sent = DataSize::Zero(); }; - static EmulatedNetworkStats PopulateStats( + static std::unique_ptr PopulateStats( EmulatedNetworkManagerInterface* network); void ReportStats(const std::string& network_label, - const EmulatedNetworkStats& stats, + std::unique_ptr stats, int64_t packet_loss); void ReportPCStats(const std::string& pc_label, const PCStats& stats); void ReportResult(const std::string& metric_name, @@ -58,7 +64,7 @@ class NetworkQualityMetricsReporter EmulatedNetworkManagerInterface* alice_network_; EmulatedNetworkManagerInterface* bob_network_; - rtc::CriticalSection lock_; + Mutex lock_; std::map pc_stats_ RTC_GUARDED_BY(lock_); }; diff --git a/test/pc/e2e/peer_configurer.cc b/test/pc/e2e/peer_configurer.cc new file mode 100644 index 0000000000..b5616b5d68 --- /dev/null +++ b/test/pc/e2e/peer_configurer.cc @@ -0,0 +1,204 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/peer_configurer.h" + +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/arraysize.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using AudioConfig = PeerConnectionE2EQualityTestFixture::AudioConfig; +using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; +using RunParams = PeerConnectionE2EQualityTestFixture::RunParams; +using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig; + +// List of default names of generic participants according to +// https://en.wikipedia.org/wiki/Alice_and_Bob +constexpr absl::string_view kDefaultNames[] = {"alice", "bob", "charlie", + "david", "erin", "frank"}; + +class DefaultNamesProvider { + public: + // Caller have to ensure that default names array will outlive names provider + // instance. + explicit DefaultNamesProvider( + absl::string_view prefix, + rtc::ArrayView default_names = {}) + : prefix_(prefix), default_names_(default_names) {} + + void MaybeSetName(absl::optional* name) { + if (name->has_value()) { + known_names_.insert(name->value()); + } else { + *name = GenerateName(); + } + } + + private: + std::string GenerateName() { + std::string name; + do { + name = GenerateNameInternal(); + } while (!known_names_.insert(name).second); + return name; + } + + std::string GenerateNameInternal() { + if (counter_ < default_names_.size()) { + return std::string(default_names_[counter_++]); + } + return prefix_ + std::to_string(counter_++); + } + + const std::string prefix_; + const rtc::ArrayView default_names_; + + std::set known_names_; + size_t counter_ = 0; +}; + +} // namespace + +void SetDefaultValuesForMissingParams( + RunParams* run_params, + std::vector>* peers) { + DefaultNamesProvider peer_names_provider("peer_", kDefaultNames); + for (size_t i = 0; i < peers->size(); ++i) { + auto* peer = peers->at(i).get(); + auto* p = peer->params(); + peer_names_provider.MaybeSetName(&p->name); + DefaultNamesProvider video_stream_names_provider( + *p->name + "_auto_video_stream_label_"); + for (VideoConfig& video_config : p->video_configs) { + video_stream_names_provider.MaybeSetName(&video_config.stream_label); + } + if (p->audio_config) { + DefaultNamesProvider audio_stream_names_provider( + *p->name + "_auto_audio_stream_label_"); + audio_stream_names_provider.MaybeSetName(&p->audio_config->stream_label); + } + } + + if (run_params->video_codecs.empty()) { + run_params->video_codecs.push_back( + VideoCodecConfig(cricket::kVp8CodecName)); + } +} + +void ValidateParams( + const RunParams& run_params, + const std::vector>& peers) { + RTC_CHECK_GT(run_params.video_encoder_bitrate_multiplier, 0.0); + RTC_CHECK_GE(run_params.video_codecs.size(), 1); + + std::set peer_names; + std::set video_labels; + std::set audio_labels; + std::set video_sync_groups; + std::set audio_sync_groups; + int media_streams_count = 0; + + for (size_t i = 0; i < peers.size(); ++i) { + Params* p = peers[i]->params(); + + { + RTC_CHECK(p->name); + bool inserted = peer_names.insert(p->name.value()).second; + RTC_CHECK(inserted) << "Duplicate name=" << p->name.value(); + } + + if (p->audio_config) { + media_streams_count++; + } + media_streams_count += p->video_configs.size(); + + // Validate that all video stream labels are unique and sync groups are + // valid. + for (const VideoConfig& video_config : p->video_configs) { + RTC_CHECK(video_config.stream_label); + bool inserted = + video_labels.insert(video_config.stream_label.value()).second; + RTC_CHECK(inserted) << "Duplicate video_config.stream_label=" + << video_config.stream_label.value(); + + // TODO(bugs.webrtc.org/4762): remove this check after synchronization of + // more than two streams is supported. + if (video_config.sync_group.has_value()) { + bool sync_group_inserted = + video_sync_groups.insert(video_config.sync_group.value()).second; + RTC_CHECK(sync_group_inserted) + << "Sync group shouldn't consist of more than two streams (one " + "video and one audio). Duplicate video_config.sync_group=" + << video_config.sync_group.value(); + } + + if (video_config.simulcast_config) { + if (video_config.simulcast_config->target_spatial_index) { + RTC_CHECK_GE(*video_config.simulcast_config->target_spatial_index, 0); + RTC_CHECK_LT(*video_config.simulcast_config->target_spatial_index, + video_config.simulcast_config->simulcast_streams_count); + } + RTC_CHECK_EQ(run_params.video_codecs.size(), 1) + << "Only 1 video codec is supported when simulcast is enabled in " + << "at least 1 video config"; + RTC_CHECK(!video_config.max_encode_bitrate_bps) + << "Setting max encode bitrate is not implemented for simulcast."; + RTC_CHECK(!video_config.min_encode_bitrate_bps) + << "Setting min encode bitrate is not implemented for simulcast."; + if (run_params.video_codecs[0].name == cricket::kVp8CodecName && + !video_config.simulcast_config->encoding_params.empty()) { + RTC_CHECK_EQ(video_config.simulcast_config->simulcast_streams_count, + video_config.simulcast_config->encoding_params.size()) + << "|encoding_params| have to be specified for each simulcast " + << "stream in |simulcast_config|."; + } + } + } + if (p->audio_config) { + bool inserted = + audio_labels.insert(p->audio_config->stream_label.value()).second; + RTC_CHECK(inserted) << "Duplicate audio_config.stream_label=" + << p->audio_config->stream_label.value(); + // TODO(bugs.webrtc.org/4762): remove this check after synchronization of + // more than two streams is supported. + if (p->audio_config->sync_group.has_value()) { + bool sync_group_inserted = + audio_sync_groups.insert(p->audio_config->sync_group.value()) + .second; + RTC_CHECK(sync_group_inserted) + << "Sync group shouldn't consist of more than two streams (one " + "video and one audio). Duplicate audio_config.sync_group=" + << p->audio_config->sync_group.value(); + } + // Check that if mode input file name specified only if mode is kFile. + if (p->audio_config.value().mode == AudioConfig::Mode::kGenerated) { + RTC_CHECK(!p->audio_config.value().input_file_name); + } + if (p->audio_config.value().mode == AudioConfig::Mode::kFile) { + RTC_CHECK(p->audio_config.value().input_file_name); + RTC_CHECK( + test::FileExists(p->audio_config.value().input_file_name.value())) + << p->audio_config.value().input_file_name.value() + << " doesn't exist"; + } + } + } + + RTC_CHECK_GT(media_streams_count, 0) << "No media in the call."; +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/peer_configurer.h b/test/pc/e2e/peer_configurer.h new file mode 100644 index 0000000000..422d3d7341 --- /dev/null +++ b/test/pc/e2e/peer_configurer.h @@ -0,0 +1,230 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef TEST_PC_E2E_PEER_CONFIGURER_H_ +#define TEST_PC_E2E_PEER_CONFIGURER_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/async_resolver_factory.h" +#include "api/call/call_factory_interface.h" +#include "api/fec_controller.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/transport/network_control.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/network.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread.h" +#include "test/pc/e2e/peer_connection_quality_test_params.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class PeerConfigurerImpl final + : public PeerConnectionE2EQualityTestFixture::PeerConfigurer { + public: + using VideoSource = + absl::variant, + PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex>; + + PeerConfigurerImpl(rtc::Thread* network_thread, + rtc::NetworkManager* network_manager) + : components_(std::make_unique(network_thread, + network_manager)), + params_(std::make_unique()) {} + + PeerConfigurer* SetName(absl::string_view name) override { + params_->name = std::string(name); + return this; + } + + // Implementation of PeerConnectionE2EQualityTestFixture::PeerConfigurer. + PeerConfigurer* SetTaskQueueFactory( + std::unique_ptr task_queue_factory) override { + components_->pcf_dependencies->task_queue_factory = + std::move(task_queue_factory); + return this; + } + PeerConfigurer* SetCallFactory( + std::unique_ptr call_factory) override { + components_->pcf_dependencies->call_factory = std::move(call_factory); + return this; + } + PeerConfigurer* SetEventLogFactory( + std::unique_ptr event_log_factory) override { + components_->pcf_dependencies->event_log_factory = + std::move(event_log_factory); + return this; + } + PeerConfigurer* SetFecControllerFactory( + std::unique_ptr fec_controller_factory) + override { + components_->pcf_dependencies->fec_controller_factory = + std::move(fec_controller_factory); + return this; + } + PeerConfigurer* SetNetworkControllerFactory( + std::unique_ptr + network_controller_factory) override { + components_->pcf_dependencies->network_controller_factory = + std::move(network_controller_factory); + return this; + } + PeerConfigurer* SetVideoEncoderFactory( + std::unique_ptr video_encoder_factory) override { + components_->pcf_dependencies->video_encoder_factory = + std::move(video_encoder_factory); + return this; + } + PeerConfigurer* SetVideoDecoderFactory( + std::unique_ptr video_decoder_factory) override { + components_->pcf_dependencies->video_decoder_factory = + std::move(video_decoder_factory); + return this; + } + + PeerConfigurer* SetAsyncResolverFactory( + std::unique_ptr async_resolver_factory) + override { + components_->pc_dependencies->async_resolver_factory = + std::move(async_resolver_factory); + return this; + } + PeerConfigurer* SetRTCCertificateGenerator( + std::unique_ptr cert_generator) + override { + components_->pc_dependencies->cert_generator = std::move(cert_generator); + return this; + } + PeerConfigurer* SetSSLCertificateVerifier( + std::unique_ptr tls_cert_verifier) override { + components_->pc_dependencies->tls_cert_verifier = + std::move(tls_cert_verifier); + return this; + } + + PeerConfigurer* AddVideoConfig( + PeerConnectionE2EQualityTestFixture::VideoConfig config) override { + video_sources_.push_back( + CreateSquareFrameGenerator(config, /*type=*/absl::nullopt)); + params_->video_configs.push_back(std::move(config)); + return this; + } + PeerConfigurer* AddVideoConfig( + PeerConnectionE2EQualityTestFixture::VideoConfig config, + std::unique_ptr generator) override { + params_->video_configs.push_back(std::move(config)); + video_sources_.push_back(std::move(generator)); + return this; + } + PeerConfigurer* AddVideoConfig( + PeerConnectionE2EQualityTestFixture::VideoConfig config, + PeerConnectionE2EQualityTestFixture::CapturingDeviceIndex index) + override { + params_->video_configs.push_back(std::move(config)); + video_sources_.push_back(index); + return this; + } + PeerConfigurer* SetAudioConfig( + PeerConnectionE2EQualityTestFixture::AudioConfig config) override { + params_->audio_config = std::move(config); + return this; + } + PeerConfigurer* SetNetEqFactory( + std::unique_ptr neteq_factory) override { + components_->pcf_dependencies->neteq_factory = std::move(neteq_factory); + return this; + } + PeerConfigurer* SetRtcEventLogPath(std::string path) override { + params_->rtc_event_log_path = std::move(path); + return this; + } + PeerConfigurer* SetAecDumpPath(std::string path) override { + params_->aec_dump_path = std::move(path); + return this; + } + PeerConfigurer* SetRTCConfiguration( + PeerConnectionInterface::RTCConfiguration configuration) override { + params_->rtc_configuration = std::move(configuration); + return this; + } + PeerConfigurer* SetBitrateSettings( + BitrateSettings bitrate_settings) override { + params_->bitrate_settings = bitrate_settings; + return this; + } + + PeerConfigurer* SetIceTransportFactory( + std::unique_ptr factory) override { + components_->pc_dependencies->ice_transport_factory = std::move(factory); + return this; + } + // Implementation of PeerConnectionE2EQualityTestFixture::PeerConfigurer end. + + InjectableComponents* components() { return components_.get(); } + Params* params() { return params_.get(); } + std::vector* video_sources() { return &video_sources_; } + + // Returns InjectableComponents and transfer ownership to the caller. + // Can be called once. + std::unique_ptr ReleaseComponents() { + RTC_CHECK(components_); + auto components = std::move(components_); + components_ = nullptr; + return components; + } + // Returns Params and transfer ownership to the caller. + // Can be called once. + std::unique_ptr ReleaseParams() { + RTC_CHECK(params_); + auto params = std::move(params_); + params_ = nullptr; + return params; + } + // Returns video sources and transfer frame generators ownership to the + // caller. Can be called once. + std::vector ReleaseVideoSources() { + auto video_sources = std::move(video_sources_); + video_sources_.clear(); + return video_sources; + } + + private: + std::unique_ptr components_; + std::unique_ptr params_; + std::vector video_sources_; +}; + +// Set missing params to default values if it is required: +// * Generate video stream labels if some of them are missing +// * Generate audio stream labels if some of them are missing +// * Set video source generation mode if it is not specified +// * Video codecs under test +void SetDefaultValuesForMissingParams( + PeerConnectionE2EQualityTestFixture::RunParams* run_params, + std::vector>* peers); +// Validate peer's parameters, also ensure uniqueness of all video stream +// labels. +void ValidateParams( + const PeerConnectionE2EQualityTestFixture::RunParams& run_params, + const std::vector>& peers); + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_PEER_CONFIGURER_H_ diff --git a/test/pc/e2e/peer_connection_e2e_smoke_test.cc b/test/pc/e2e/peer_connection_e2e_smoke_test.cc index 6c78bbf6a0..10f62835a9 100644 --- a/test/pc/e2e/peer_connection_e2e_smoke_test.cc +++ b/test/pc/e2e/peer_connection_e2e_smoke_test.cc @@ -11,7 +11,9 @@ #include #include +#include "api/media_stream_interface.h" #include "api/test/create_network_emulation_manager.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" #include "api/test/create_peerconnection_quality_test_fixture.h" #include "api/test/network_emulation_manager.h" #include "api/test/peerconnection_quality_test_fixture.h" @@ -21,7 +23,7 @@ #include "test/gtest.h" #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" -#include "test/pc/e2e/network_quality_metrics_reporter.h" +#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h" #include "test/testsupport/file_utils.h" namespace webrtc { @@ -44,88 +46,83 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test { using EchoEmulationConfig = PeerConnectionE2EQualityTestFixture::EchoEmulationConfig; - void RunTest(const std::string& test_case_name, - const RunParams& run_params, - rtc::FunctionView alice_configurer, - rtc::FunctionView bob_configurer) { - // Setup emulated network - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - - auto alice_network_behavior = - std::make_unique(BuiltInNetworkBehaviorConfig()); - SimulatedNetwork* alice_network_behavior_ptr = alice_network_behavior.get(); - EmulatedNetworkNode* alice_node = - network_emulation_manager->CreateEmulatedNode( - std::move(alice_network_behavior)); - EmulatedNetworkNode* bob_node = - network_emulation_manager->CreateEmulatedNode( - std::make_unique(BuiltInNetworkBehaviorConfig())); - auto* alice_endpoint = - network_emulation_manager->CreateEndpoint(EmulatedEndpointConfig()); + void SetUp() override { + network_emulation_ = CreateNetworkEmulationManager(); + auto video_quality_analyzer = std::make_unique( + network_emulation_->time_controller()->GetClock()); + video_quality_analyzer_ = video_quality_analyzer.get(); + fixture_ = CreatePeerConnectionE2EQualityTestFixture( + testing::UnitTest::GetInstance()->current_test_info()->name(), + *network_emulation_->time_controller(), + /*audio_quality_analyzer=*/nullptr, std::move(video_quality_analyzer)); + test::ScopedFieldTrials field_trials( + std::string(field_trial::GetFieldTrialString()) + + "WebRTC-UseStandardBytesStats/Enabled/"); + } + + std::pair + CreateNetwork() { + EmulatedNetworkNode* alice_node = network_emulation_->CreateEmulatedNode( + std::make_unique(BuiltInNetworkBehaviorConfig())); + EmulatedNetworkNode* bob_node = network_emulation_->CreateEmulatedNode( + std::make_unique(BuiltInNetworkBehaviorConfig())); + + EmulatedEndpoint* alice_endpoint = + network_emulation_->CreateEndpoint(EmulatedEndpointConfig()); EmulatedEndpoint* bob_endpoint = - network_emulation_manager->CreateEndpoint(EmulatedEndpointConfig()); - network_emulation_manager->CreateRoute(alice_endpoint, {alice_node}, - bob_endpoint); - network_emulation_manager->CreateRoute(bob_endpoint, {bob_node}, - alice_endpoint); - - // Create analyzers. - std::unique_ptr video_quality_analyzer = - std::make_unique(); - // This is only done for the sake of smoke testing. In general there should - // be no need to explicitly pull data from analyzers after the run. - auto* video_analyzer_ptr = - static_cast(video_quality_analyzer.get()); - - auto fixture = CreatePeerConnectionE2EQualityTestFixture( - test_case_name, /*audio_quality_analyzer=*/nullptr, - std::move(video_quality_analyzer)); - fixture->ExecuteAt(TimeDelta::seconds(2), - [alice_network_behavior_ptr](TimeDelta) { - BuiltInNetworkBehaviorConfig config; - config.loss_percent = 5; - alice_network_behavior_ptr->SetConfig(config); - }); - - // Setup components. We need to provide rtc::NetworkManager compatible with - // emulated network layer. + network_emulation_->CreateEndpoint(EmulatedEndpointConfig()); + + network_emulation_->CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); + network_emulation_->CreateRoute(bob_endpoint, {bob_node}, alice_endpoint); + EmulatedNetworkManagerInterface* alice_network = - network_emulation_manager->CreateEmulatedNetworkManagerInterface( + network_emulation_->CreateEmulatedNetworkManagerInterface( {alice_endpoint}); EmulatedNetworkManagerInterface* bob_network = - network_emulation_manager->CreateEmulatedNetworkManagerInterface( + network_emulation_->CreateEmulatedNetworkManagerInterface( {bob_endpoint}); - fixture->AddPeer(alice_network->network_thread(), - alice_network->network_manager(), alice_configurer); - fixture->AddPeer(bob_network->network_thread(), - bob_network->network_manager(), bob_configurer); - fixture->AddQualityMetricsReporter( - std::make_unique(alice_network, - bob_network)); + return std::make_pair(alice_network, bob_network); + } + + void AddPeer(EmulatedNetworkManagerInterface* network, + rtc::FunctionView configurer) { + fixture_->AddPeer(network->network_thread(), network->network_manager(), + configurer); + } - fixture->Run(run_params); + void RunAndCheckEachVideoStreamReceivedFrames(const RunParams& run_params) { + fixture_->Run(run_params); - EXPECT_GE(fixture->GetRealTestDuration(), run_params.run_duration); - for (auto stream_label : video_analyzer_ptr->GetKnownVideoStreams()) { + EXPECT_GE(fixture_->GetRealTestDuration(), run_params.run_duration); + for (auto stream_key : video_quality_analyzer_->GetKnownVideoStreams()) { FrameCounters stream_conters = - video_analyzer_ptr->GetPerStreamCounters().at(stream_label); - // 150 = 30fps * 5s. On some devices pipeline can be too slow, so it can - // happen, that frames will stuck in the middle, so we actually can't - // force real constraints here, so lets just check, that at least 1 frame - // passed whole pipeline. - EXPECT_GE(stream_conters.captured, 150); - EXPECT_GE(stream_conters.pre_encoded, 1); - EXPECT_GE(stream_conters.encoded, 1); - EXPECT_GE(stream_conters.received, 1); - EXPECT_GE(stream_conters.decoded, 1); - EXPECT_GE(stream_conters.rendered, 1); + video_quality_analyzer_->GetPerStreamCounters().at(stream_key); + // On some devices the pipeline can be too slow, so we actually can't + // force real constraints here. Lets just check, that at least 1 + // frame passed whole pipeline. + int64_t expected_min_fps = run_params.run_duration.seconds() * 15; + EXPECT_GE(stream_conters.captured, expected_min_fps) + << stream_key.ToString(); + EXPECT_GE(stream_conters.pre_encoded, 1) << stream_key.ToString(); + EXPECT_GE(stream_conters.encoded, 1) << stream_key.ToString(); + EXPECT_GE(stream_conters.received, 1) << stream_key.ToString(); + EXPECT_GE(stream_conters.decoded, 1) << stream_key.ToString(); + EXPECT_GE(stream_conters.rendered, 1) << stream_key.ToString(); } } -}; -} // namespace + NetworkEmulationManager* network_emulation() { + return network_emulation_.get(); + } + + PeerConnectionE2EQualityTestFixture* fixture() { return fixture_.get(); } + + private: + std::unique_ptr network_emulation_; + DefaultVideoQualityAnalyzer* video_quality_analyzer_; + std::unique_ptr fixture_; +}; // IOS debug builds can be quite slow, disabling to avoid issues with timeouts. #if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG) @@ -134,51 +131,141 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test { #define MAYBE_Smoke Smoke #endif TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) { - RunParams run_params(TimeDelta::seconds(7)); + std::pair + network_links = CreateNetwork(); + AddPeer(network_links.first, [](PeerConfigurer* alice) { + VideoConfig video(160, 120, 15); + video.stream_label = "alice-video"; + video.sync_group = "alice-media"; + alice->AddVideoConfig(std::move(video)); + + AudioConfig audio; + audio.stream_label = "alice-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); + audio.sampling_frequency_in_hz = 48000; + audio.sync_group = "alice-media"; + alice->SetAudioConfig(std::move(audio)); + }); + AddPeer(network_links.second, [](PeerConfigurer* charlie) { + charlie->SetName("charlie"); + VideoConfig video(160, 120, 15); + video.stream_label = "charlie-video"; + video.temporal_layers_count = 2; + charlie->AddVideoConfig(std::move(video)); + + AudioConfig audio; + audio.stream_label = "charlie-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); + charlie->SetAudioConfig(std::move(audio)); + }); + fixture()->AddQualityMetricsReporter( + std::make_unique( + std::map>( + {{"alice", network_links.first->endpoints()}, + {"charlie", network_links.second->endpoints()}}), + network_emulation())); + RunParams run_params(TimeDelta::Seconds(2)); + run_params.video_codecs = { + VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}; + run_params.use_flex_fec = true; + run_params.use_ulp_fec = true; + run_params.video_encoder_bitrate_multiplier = 1.1; + RunAndCheckEachVideoStreamReceivedFrames(run_params); +} + +// IOS debug builds can be quite slow, disabling to avoid issues with timeouts. +#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG) +#define MAYBE_ChangeNetworkConditions DISABLED_ChangeNetworkConditions +#else +#define MAYBE_ChangeNetworkConditions ChangeNetworkConditions +#endif +TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_ChangeNetworkConditions) { + NetworkEmulationManager::SimulatedNetworkNode alice_node = + network_emulation() + ->NodeBuilder() + .config(BuiltInNetworkBehaviorConfig()) + .Build(); + NetworkEmulationManager::SimulatedNetworkNode bob_node = + network_emulation() + ->NodeBuilder() + .config(BuiltInNetworkBehaviorConfig()) + .Build(); + + EmulatedEndpoint* alice_endpoint = + network_emulation()->CreateEndpoint(EmulatedEndpointConfig()); + EmulatedEndpoint* bob_endpoint = + network_emulation()->CreateEndpoint(EmulatedEndpointConfig()); + + network_emulation()->CreateRoute(alice_endpoint, {alice_node.node}, + bob_endpoint); + network_emulation()->CreateRoute(bob_endpoint, {bob_node.node}, + alice_endpoint); + + EmulatedNetworkManagerInterface* alice_network = + network_emulation()->CreateEmulatedNetworkManagerInterface( + {alice_endpoint}); + EmulatedNetworkManagerInterface* bob_network = + network_emulation()->CreateEmulatedNetworkManagerInterface( + {bob_endpoint}); + + AddPeer(alice_network, [](PeerConfigurer* alice) { + VideoConfig video(160, 120, 15); + video.stream_label = "alice-video"; + video.sync_group = "alice-media"; + alice->AddVideoConfig(std::move(video)); + }); + AddPeer(bob_network, [](PeerConfigurer* bob) {}); + fixture()->AddQualityMetricsReporter( + std::make_unique( + std::map>( + {{"alice", alice_network->endpoints()}, + {"bob", bob_network->endpoints()}}), + network_emulation())); + + fixture()->ExecuteAt(TimeDelta::Seconds(1), [alice_node](TimeDelta) { + BuiltInNetworkBehaviorConfig config; + config.loss_percent = 5; + alice_node.simulation->SetConfig(config); + }); + + RunParams run_params(TimeDelta::Seconds(2)); run_params.video_codecs = { VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}; run_params.use_flex_fec = true; run_params.use_ulp_fec = true; run_params.video_encoder_bitrate_multiplier = 1.1; - test::ScopedFieldTrials field_trials( - std::string(field_trial::GetFieldTrialString()) + - "WebRTC-UseStandardBytesStats/Enabled/"); - RunTest( - "smoke", run_params, - [](PeerConfigurer* alice) { - VideoConfig video(640, 360, 30); - video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "alice-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - audio.sampling_frequency_in_hz = 48000; - alice->SetAudioConfig(std::move(audio)); - }, - [](PeerConfigurer* bob) { - VideoConfig video(640, 360, 30); - video.stream_label = "bob-video"; - video.temporal_layers_count = 2; - bob->AddVideoConfig(std::move(video)); - - VideoConfig screenshare(640, 360, 30); - screenshare.stream_label = "bob-screenshare"; - screenshare.screen_share_config = - ScreenShareConfig(TimeDelta::seconds(2)); - screenshare.screen_share_config->scrolling_params = ScrollingParams( - TimeDelta::ms(1800), kDefaultSlidesWidth, kDefaultSlidesHeight); - bob->AddVideoConfig(screenshare); - - AudioConfig audio; - audio.stream_label = "bob-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); - bob->SetAudioConfig(std::move(audio)); + RunAndCheckEachVideoStreamReceivedFrames(run_params); +} + +// IOS debug builds can be quite slow, disabling to avoid issues with timeouts. +#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG) +#define MAYBE_Screenshare DISABLED_Screenshare +#else +#define MAYBE_Screenshare Screenshare +#endif +TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Screenshare) { + std::pair + network_links = CreateNetwork(); + AddPeer( + network_links.first, [](PeerConfigurer* alice) { + VideoConfig screenshare(320, 180, 30); + screenshare.stream_label = "alice-screenshare"; + screenshare.content_hint = VideoTrackInterface::ContentHint::kText; + ScreenShareConfig screen_share_config = + ScreenShareConfig(TimeDelta::Seconds(2)); + screen_share_config.scrolling_params = ScrollingParams( + TimeDelta::Millis(1800), kDefaultSlidesWidth, kDefaultSlidesHeight); + auto screen_share_frame_generator = + CreateScreenShareFrameGenerator(screenshare, screen_share_config); + alice->AddVideoConfig(std::move(screenshare), + std::move(screen_share_frame_generator)); }); + AddPeer(network_links.second, [](PeerConfigurer* bob) {}); + RunAndCheckEachVideoStreamReceivedFrames(RunParams(TimeDelta::Seconds(2))); } // IOS debug builds can be quite slow, disabling to avoid issues with timeouts. @@ -188,27 +275,28 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) { #define MAYBE_Echo Echo #endif TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Echo) { - RunParams run_params(TimeDelta::seconds(7)); + std::pair + network_links = CreateNetwork(); + AddPeer(network_links.first, [](PeerConfigurer* alice) { + AudioConfig audio; + audio.stream_label = "alice-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); + audio.sampling_frequency_in_hz = 48000; + alice->SetAudioConfig(std::move(audio)); + }); + AddPeer(network_links.second, [](PeerConfigurer* bob) { + AudioConfig audio; + audio.stream_label = "bob-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); + bob->SetAudioConfig(std::move(audio)); + }); + RunParams run_params(TimeDelta::Seconds(2)); run_params.echo_emulation_config = EchoEmulationConfig(); - RunTest( - "smoke", run_params, - [](PeerConfigurer* alice) { - AudioConfig audio; - audio.stream_label = "alice-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - audio.sampling_frequency_in_hz = 48000; - alice->SetAudioConfig(std::move(audio)); - }, - [](PeerConfigurer* bob) { - AudioConfig audio; - audio.stream_label = "bob-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); - bob->SetAudioConfig(std::move(audio)); - }); + RunAndCheckEachVideoStreamReceivedFrames(run_params); } // IOS debug builds can be quite slow, disabling to avoid issues with timeouts. @@ -218,35 +306,25 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Echo) { #define MAYBE_Simulcast Simulcast #endif TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) { - RunParams run_params(TimeDelta::seconds(7)); + std::pair + network_links = CreateNetwork(); + AddPeer(network_links.first, [](PeerConfigurer* alice) { + VideoConfig simulcast(1280, 720, 15); + simulcast.stream_label = "alice-simulcast"; + simulcast.simulcast_config = VideoSimulcastConfig(2, 0); + alice->AddVideoConfig(std::move(simulcast)); + + AudioConfig audio; + audio.stream_label = "alice-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); + alice->SetAudioConfig(std::move(audio)); + }); + AddPeer(network_links.second, [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(2)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; - RunTest( - "simulcast", run_params, - [](PeerConfigurer* alice) { - VideoConfig simulcast(1280, 720, 30); - simulcast.stream_label = "alice-simulcast"; - simulcast.simulcast_config = VideoSimulcastConfig(3, 0); - alice->AddVideoConfig(std::move(simulcast)); - - AudioConfig audio; - audio.stream_label = "alice-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - alice->SetAudioConfig(std::move(audio)); - }, - [](PeerConfigurer* bob) { - VideoConfig video(640, 360, 30); - video.stream_label = "bob-video"; - bob->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "bob-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); - bob->SetAudioConfig(std::move(audio)); - }); + RunAndCheckEachVideoStreamReceivedFrames(run_params); } // IOS debug builds can be quite slow, disabling to avoid issues with timeouts. @@ -256,37 +334,27 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) { #define MAYBE_Svc Svc #endif TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) { - RunParams run_params(TimeDelta::seconds(7)); + std::pair + network_links = CreateNetwork(); + AddPeer(network_links.first, [](PeerConfigurer* alice) { + VideoConfig simulcast(1280, 720, 15); + simulcast.stream_label = "alice-svc"; + // Because we have network with packets loss we can analyze only the + // highest spatial layer in SVC mode. + simulcast.simulcast_config = VideoSimulcastConfig(2, 1); + alice->AddVideoConfig(std::move(simulcast)); + + AudioConfig audio; + audio.stream_label = "alice-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); + alice->SetAudioConfig(std::move(audio)); + }); + AddPeer(network_links.second, [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(2)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp9CodecName)}; - RunTest( - "simulcast", run_params, - [](PeerConfigurer* alice) { - VideoConfig simulcast(1280, 720, 30); - simulcast.stream_label = "alice-simulcast"; - // Because we have network with packets loss we can analyze only the - // highest spatial layer in SVC mode. - simulcast.simulcast_config = VideoSimulcastConfig(3, 2); - alice->AddVideoConfig(std::move(simulcast)); - - AudioConfig audio; - audio.stream_label = "alice-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - alice->SetAudioConfig(std::move(audio)); - }, - [](PeerConfigurer* bob) { - VideoConfig video(640, 360, 30); - video.stream_label = "bob-video"; - bob->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "bob-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); - bob->SetAudioConfig(std::move(audio)); - }); + RunAndCheckEachVideoStreamReceivedFrames(run_params); } // IOS debug builds can be quite slow, disabling to avoid issues with timeouts. @@ -296,50 +364,34 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) { #define MAYBE_HighBitrate HighBitrate #endif TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) { - RunParams run_params(TimeDelta::seconds(7)); + std::pair + network_links = CreateNetwork(); + AddPeer(network_links.first, [](PeerConfigurer* alice) { + BitrateSettings bitrate_settings; + bitrate_settings.start_bitrate_bps = 3'000'000; + bitrate_settings.max_bitrate_bps = 3'000'000; + alice->SetBitrateSettings(bitrate_settings); + VideoConfig video(800, 600, 15); + video.stream_label = "alice-video"; + video.min_encode_bitrate_bps = 500'000; + video.max_encode_bitrate_bps = 3'000'000; + alice->AddVideoConfig(std::move(video)); + + AudioConfig audio; + audio.stream_label = "alice-audio"; + audio.mode = AudioConfig::Mode::kFile; + audio.input_file_name = + test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); + audio.sampling_frequency_in_hz = 48000; + alice->SetAudioConfig(std::move(audio)); + }); + AddPeer(network_links.second, [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(2)); run_params.video_codecs = { VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}; - - RunTest( - "smoke", run_params, - [](PeerConfigurer* alice) { - PeerConnectionInterface::BitrateParameters bitrate_params; - bitrate_params.current_bitrate_bps = 3'000'000; - bitrate_params.max_bitrate_bps = 3'000'000; - alice->SetBitrateParameters(bitrate_params); - VideoConfig video(800, 600, 30); - video.stream_label = "alice-video"; - video.min_encode_bitrate_bps = 500'000; - video.max_encode_bitrate_bps = 3'000'000; - alice->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "alice-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - audio.sampling_frequency_in_hz = 48000; - alice->SetAudioConfig(std::move(audio)); - }, - [](PeerConfigurer* bob) { - PeerConnectionInterface::BitrateParameters bitrate_params; - bitrate_params.current_bitrate_bps = 3'000'000; - bitrate_params.max_bitrate_bps = 3'000'000; - bob->SetBitrateParameters(bitrate_params); - VideoConfig video(800, 600, 30); - video.stream_label = "bob-video"; - video.min_encode_bitrate_bps = 500'000; - video.max_encode_bitrate_bps = 3'000'000; - bob->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "bob-audio"; - audio.mode = AudioConfig::Mode::kFile; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); - bob->SetAudioConfig(std::move(audio)); - }); + RunAndCheckEachVideoStreamReceivedFrames(run_params); } +} // namespace } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/peer_connection_quality_test.cc b/test/pc/e2e/peer_connection_quality_test.cc index e5667e6ff1..26039f8b5e 100644 --- a/test/pc/e2e/peer_connection_quality_test.cc +++ b/test/pc/e2e/peer_connection_quality_test.cc @@ -14,17 +14,15 @@ #include #include +#include "absl/strings/string_view.h" #include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log_output_file.h" #include "api/scoped_refptr.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/test/create_frame_generator.h" +#include "api/test/time_controller.h" #include "api/test/video_quality_analyzer_interface.h" -#include "api/units/time_delta.h" -#include "api/video/video_source_interface.h" #include "pc/sdp_utils.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/bind.h" @@ -32,12 +30,14 @@ #include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/cpu_info.h" #include "system_wrappers/include/field_trial.h" -#include "test/frame_generator_capturer.h" #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" +#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h" +#include "test/pc/e2e/cross_media_metrics_reporter.h" #include "test/pc/e2e/stats_poller.h" -#include "test/platform_video_capturer.h" +#include "test/pc/e2e/test_peer_factory.h" #include "test/testsupport/file_utils.h" +#include "test/testsupport/perf_test.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -46,7 +46,7 @@ namespace { using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig; -constexpr int kDefaultTimeoutMs = 10000; +constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(10); constexpr char kSignalThreadName[] = "signaling_thread"; // 1 signaling, 2 network, 2 worker and 2 extra for codecs etc. constexpr int kPeerConnectionUsedThreads = 7; @@ -55,33 +55,16 @@ constexpr int kPeerConnectionUsedThreads = 7; constexpr int kFrameworkUsedThreads = 2; constexpr int kMaxVideoAnalyzerThreads = 8; -constexpr TimeDelta kStatsUpdateInterval = TimeDelta::Seconds<1>(); +constexpr TimeDelta kStatsUpdateInterval = TimeDelta::Seconds(1); -constexpr TimeDelta kAliveMessageLogInterval = TimeDelta::Seconds<30>(); +constexpr TimeDelta kAliveMessageLogInterval = TimeDelta::Seconds(30); -constexpr int kQuickTestModeRunDurationMs = 100; +constexpr TimeDelta kQuickTestModeRunDuration = TimeDelta::Millis(100); // Field trials to enable Flex FEC advertising and receiving. constexpr char kFlexFecEnabledFieldTrials[] = "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/"; -std::string VideoConfigSourcePresenceToString( - const VideoConfig& video_config, - bool has_user_provided_generator) { - char buf[1024]; - rtc::SimpleStringBuilder builder(buf); - builder << "video_config.generator=" << video_config.generator.has_value() - << "; video_config.input_file_name=" - << video_config.input_file_name.has_value() - << "; video_config.screen_share_config=" - << video_config.screen_share_config.has_value() - << "; video_config.capturing_device_index=" - << video_config.capturing_device_index.has_value() - << "; has_user_provided_generator=" << has_user_provided_generator - << ";"; - return builder.str(); -} - class FixturePeerConnectionObserver : public MockPeerConnectionObserver { public: // |on_track_callback| will be called when any new track will be added to peer @@ -121,16 +104,20 @@ class FixturePeerConnectionObserver : public MockPeerConnectionObserver { PeerConnectionE2EQualityTest::PeerConnectionE2EQualityTest( std::string test_case_name, + TimeController& time_controller, std::unique_ptr audio_quality_analyzer, std::unique_ptr video_quality_analyzer) - : clock_(Clock::GetRealTimeClock()), - task_queue_factory_(CreateDefaultTaskQueueFactory()), - test_case_name_(std::move(test_case_name)) { + : time_controller_(time_controller), + task_queue_factory_(time_controller_.CreateTaskQueueFactory()), + test_case_name_(std::move(test_case_name)), + executor_(std::make_unique( + time_controller_.GetClock())) { // Create default video quality analyzer. We will always create an analyzer, // even if there are no video streams, because it will be installed into video // encoder/decoder factories. if (video_quality_analyzer == nullptr) { - video_quality_analyzer = std::make_unique(); + video_quality_analyzer = std::make_unique( + time_controller_.GetClock()); } encoded_image_id_controller_ = std::make_unique(); @@ -148,74 +135,14 @@ PeerConnectionE2EQualityTest::PeerConnectionE2EQualityTest( void PeerConnectionE2EQualityTest::ExecuteAt( TimeDelta target_time_since_start, std::function func) { - ExecuteTask(target_time_since_start, absl::nullopt, func); + executor_->ScheduleActivity(target_time_since_start, absl::nullopt, func); } void PeerConnectionE2EQualityTest::ExecuteEvery( TimeDelta initial_delay_since_start, TimeDelta interval, std::function func) { - ExecuteTask(initial_delay_since_start, interval, func); -} - -void PeerConnectionE2EQualityTest::ExecuteTask( - TimeDelta initial_delay_since_start, - absl::optional interval, - std::function func) { - RTC_CHECK(initial_delay_since_start.IsFinite() && - initial_delay_since_start >= TimeDelta::Zero()); - RTC_CHECK(!interval || - (interval->IsFinite() && *interval > TimeDelta::Zero())); - rtc::CritScope crit(&lock_); - ScheduledActivity activity(initial_delay_since_start, interval, func); - if (start_time_.IsInfinite()) { - scheduled_activities_.push(std::move(activity)); - } else { - PostTask(std::move(activity)); - } -} - -void PeerConnectionE2EQualityTest::PostTask(ScheduledActivity activity) { - // Because start_time_ will never change at this point copy it to local - // variable to capture in in lambda without requirement to hold a lock. - Timestamp start_time = start_time_; - - TimeDelta remaining_delay = - activity.initial_delay_since_start == TimeDelta::Zero() - ? TimeDelta::Zero() - : activity.initial_delay_since_start - (Now() - start_time_); - if (remaining_delay < TimeDelta::Zero()) { - RTC_LOG(WARNING) << "Executing late task immediately, late by=" - << ToString(remaining_delay.Abs()); - remaining_delay = TimeDelta::Zero(); - } - - if (activity.interval) { - if (remaining_delay == TimeDelta::Zero()) { - repeating_task_handles_.push_back(RepeatingTaskHandle::Start( - task_queue_->Get(), [activity, start_time, this]() { - activity.func(Now() - start_time); - return *activity.interval; - })); - return; - } - repeating_task_handles_.push_back(RepeatingTaskHandle::DelayedStart( - task_queue_->Get(), remaining_delay, [activity, start_time, this]() { - activity.func(Now() - start_time); - return *activity.interval; - })); - return; - } - - if (remaining_delay == TimeDelta::Zero()) { - task_queue_->PostTask( - [activity, start_time, this]() { activity.func(Now() - start_time); }); - return; - } - - task_queue_->PostDelayedTask( - [activity, start_time, this]() { activity.func(Now() - start_time); }, - remaining_delay.ms()); + executor_->ScheduleActivity(initial_delay_since_start, interval, func); } void PeerConnectionE2EQualityTest::AddQualityMetricsReporter( @@ -233,82 +160,89 @@ void PeerConnectionE2EQualityTest::AddPeer( } void PeerConnectionE2EQualityTest::Run(RunParams run_params) { + SetDefaultValuesForMissingParams(&run_params, &peer_configurations_); + ValidateParams(run_params, peer_configurations_); RTC_CHECK_EQ(peer_configurations_.size(), 2) << "Only peer to peer calls are allowed, please add 2 peers"; - std::unique_ptr alice_params = - peer_configurations_[0]->ReleaseParams(); - std::unique_ptr alice_components = - peer_configurations_[0]->ReleaseComponents(); - std::vector> - alice_video_generators = - peer_configurations_[0]->ReleaseVideoGenerators(); - std::unique_ptr bob_params = peer_configurations_[1]->ReleaseParams(); - std::unique_ptr bob_components = - peer_configurations_[1]->ReleaseComponents(); - std::vector> - bob_video_generators = peer_configurations_[1]->ReleaseVideoGenerators(); + std::unique_ptr alice_configurer = + std::move(peer_configurations_[0]); + std::unique_ptr bob_configurer = + std::move(peer_configurations_[1]); peer_configurations_.clear(); - SetDefaultValuesForMissingParams( - &run_params, {alice_params.get(), bob_params.get()}, - {&alice_video_generators, &bob_video_generators}); - ValidateParams(run_params, {alice_params.get(), bob_params.get()}, - {&alice_video_generators, &bob_video_generators}); + for (size_t i = 0; i < bob_configurer->params()->video_configs.size(); ++i) { + // We support simulcast only from caller. + RTC_CHECK(!bob_configurer->params()->video_configs[i].simulcast_config) + << "Only simulcast stream from first peer is supported"; + } + SetupRequiredFieldTrials(run_params); // Print test summary - RTC_LOG(INFO) - << "Media quality test: Alice will make a call to Bob with media video=" - << !alice_params->video_configs.empty() - << "; audio=" << alice_params->audio_config.has_value() - << ". Bob will respond with media video=" - << !bob_params->video_configs.empty() - << "; audio=" << bob_params->audio_config.has_value(); - - const std::unique_ptr signaling_thread = rtc::Thread::Create(); - signaling_thread->SetName(kSignalThreadName, nullptr); - signaling_thread->Start(); + RTC_LOG(INFO) << "Media quality test: " << *alice_configurer->params()->name + << " will make a call to " << *bob_configurer->params()->name + << " with media video=" + << !alice_configurer->params()->video_configs.empty() + << "; audio=" + << alice_configurer->params()->audio_config.has_value() << ". " + << *bob_configurer->params()->name + << " will respond with media video=" + << !bob_configurer->params()->video_configs.empty() + << "; audio=" + << bob_configurer->params()->audio_config.has_value(); + + const std::unique_ptr signaling_thread = + time_controller_.CreateThread(kSignalThreadName); + media_helper_ = std::make_unique( + video_quality_analyzer_injection_helper_.get(), task_queue_factory_.get(), + time_controller_.GetClock()); // Create a |task_queue_|. - task_queue_ = std::make_unique("pc_e2e_quality_test"); + task_queue_ = std::make_unique( + time_controller_.GetTaskQueueFactory()->CreateTaskQueue( + "pc_e2e_quality_test", webrtc::TaskQueueFactory::Priority::NORMAL)); // Create call participants: Alice and Bob. // Audio streams are intercepted in AudioDeviceModule, so if it is required to // catch output of Alice's stream, Alice's output_dump_file_name should be // passed to Bob's TestPeer setup as audio output file name. - absl::optional alice_remote_audio_config = - TestPeer::CreateRemoteAudioConfig(bob_params->audio_config); - absl::optional bob_remote_audio_config = - TestPeer::CreateRemoteAudioConfig(alice_params->audio_config); - // Copy Alice and Bob video configs to correctly pass them into lambdas. - std::vector alice_video_configs = alice_params->video_configs; - std::vector bob_video_configs = bob_params->video_configs; - - alice_ = TestPeer::CreateTestPeer( - std::move(alice_components), std::move(alice_params), - std::move(alice_video_generators), + absl::optional alice_remote_audio_config = + RemotePeerAudioConfig::Create(bob_configurer->params()->audio_config); + absl::optional bob_remote_audio_config = + RemotePeerAudioConfig::Create(alice_configurer->params()->audio_config); + // Copy Alice and Bob video configs and names to correctly pass them into + // lambdas. + std::vector alice_video_configs = + alice_configurer->params()->video_configs; + std::string alice_name = alice_configurer->params()->name.value(); + std::vector bob_video_configs = + bob_configurer->params()->video_configs; + std::string bob_name = bob_configurer->params()->name.value(); + + TestPeerFactory test_peer_factory( + signaling_thread.get(), time_controller_, + video_quality_analyzer_injection_helper_.get(), task_queue_.get()); + alice_ = test_peer_factory.CreateTestPeer( + std::move(alice_configurer), std::make_unique( - [this, bob_video_configs]( + [this, bob_video_configs, alice_name]( rtc::scoped_refptr transceiver) { - OnTrackCallback(transceiver, bob_video_configs); + OnTrackCallback(alice_name, transceiver, bob_video_configs); }, [this]() { StartVideo(alice_video_sources_); }), - video_quality_analyzer_injection_helper_.get(), signaling_thread.get(), alice_remote_audio_config, run_params.video_encoder_bitrate_multiplier, - run_params.echo_emulation_config, task_queue_.get()); - bob_ = TestPeer::CreateTestPeer( - std::move(bob_components), std::move(bob_params), - std::move(bob_video_generators), + run_params.echo_emulation_config); + bob_ = test_peer_factory.CreateTestPeer( + std::move(bob_configurer), std::make_unique( - [this, alice_video_configs]( - rtc::scoped_refptr transceiver) { - OnTrackCallback(transceiver, alice_video_configs); + [this, alice_video_configs, + bob_name](rtc::scoped_refptr transceiver) { + OnTrackCallback(bob_name, transceiver, alice_video_configs); }, [this]() { StartVideo(bob_video_sources_); }), - video_quality_analyzer_injection_helper_.get(), signaling_thread.get(), bob_remote_audio_config, run_params.video_encoder_bitrate_multiplier, - run_params.echo_emulation_config, task_queue_.get()); + run_params.echo_emulation_config); int num_cores = CpuInfo::DetectNumberOfCores(); RTC_DCHECK_GE(num_cores, 1); @@ -321,12 +255,20 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { video_analyzer_threads = std::min(video_analyzer_threads, kMaxVideoAnalyzerThreads); RTC_LOG(INFO) << "video_analyzer_threads=" << video_analyzer_threads; - - video_quality_analyzer_injection_helper_->Start(test_case_name_, - video_analyzer_threads); + quality_metrics_reporters_.push_back( + std::make_unique( + time_controller_.GetClock())); + quality_metrics_reporters_.push_back( + std::make_unique()); + + video_quality_analyzer_injection_helper_->Start( + test_case_name_, + std::vector{alice_->params()->name.value(), + bob_->params()->name.value()}, + video_analyzer_threads); audio_quality_analyzer_->Start(test_case_name_, &analyzer_helper_); for (auto& reporter : quality_metrics_reporters_) { - reporter->Start(test_case_name_); + reporter->Start(test_case_name_, &analyzer_helper_); } // Start RTCEventLog recording if requested. @@ -351,75 +293,71 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { return kAliveMessageLogInterval; }); - RTC_LOG(INFO) << "Configuration is done. Now Alice is calling to Bob..."; - - // Setup call. - signaling_thread->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnectionE2EQualityTest::SetupCallOnSignalingThread, this, - run_params)); - { - rtc::CritScope crit(&lock_); - start_time_ = Now(); - while (!scheduled_activities_.empty()) { - PostTask(std::move(scheduled_activities_.front())); - scheduled_activities_.pop(); - } - } + RTC_LOG(INFO) << "Configuration is done. Now " << *alice_->params()->name + << " is calling to " << *bob_->params()->name << "..."; + // Setup stats poller. std::vector observers = { audio_quality_analyzer_.get(), video_quality_analyzer_injection_helper_.get()}; for (auto& reporter : quality_metrics_reporters_) { observers.push_back(reporter.get()); } - StatsPoller stats_poller(observers, - {{"alice", alice_.get()}, {"bob", bob_.get()}}); - - task_queue_->PostTask([&stats_poller, this]() { - RTC_DCHECK_RUN_ON(task_queue_.get()); - stats_polling_task_ = - RepeatingTaskHandle::Start(task_queue_->Get(), [this, &stats_poller]() { - RTC_DCHECK_RUN_ON(task_queue_.get()); - stats_poller.PollStatsAndNotifyObservers(); - return kStatsUpdateInterval; - }); - }); + StatsPoller stats_poller(observers, {{*alice_->params()->name, alice_.get()}, + {*bob_->params()->name, bob_.get()}}); + executor_->ScheduleActivity(TimeDelta::Zero(), kStatsUpdateInterval, + [&stats_poller](TimeDelta) { + stats_poller.PollStatsAndNotifyObservers(); + }); + + // Setup call. + signaling_thread->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnectionE2EQualityTest::SetupCallOnSignalingThread, this, + run_params)); + std::unique_ptr signaling_interceptor = + CreateSignalingInterceptor(run_params); + // Connect peers. + signaling_thread->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnectionE2EQualityTest::ExchangeOfferAnswer, this, + signaling_interceptor.get())); + WaitUntilIceCandidatesGathered(signaling_thread.get()); + + signaling_thread->Invoke( + RTC_FROM_HERE, + rtc::Bind(&PeerConnectionE2EQualityTest::ExchangeIceCandidates, this, + signaling_interceptor.get())); + WaitUntilPeersAreConnected(signaling_thread.get()); + + executor_->Start(task_queue_.get()); + Timestamp start_time = Now(); - rtc::Event done; bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest"); if (is_quick_test_enabled) { - done.Wait(kQuickTestModeRunDurationMs); + time_controller_.AdvanceTime(kQuickTestModeRunDuration); } else { - done.Wait(run_params.run_duration.ms()); + time_controller_.AdvanceTime(run_params.run_duration); } RTC_LOG(INFO) << "Test is done, initiating disconnect sequence."; + // Stop all client started tasks to prevent their access to any call related + // objects after these objects will be destroyed during call tear down. + executor_->Stop(); + // There is no guarantee, that last stats collection will happen at the end + // of the call, so we force it after executor, which is among others is doing + // stats collection, was stopped. task_queue_->SendTask( - [&stats_poller, this]() { - RTC_DCHECK_RUN_ON(task_queue_.get()); - stats_polling_task_.Stop(); + [&stats_poller]() { // Get final end-of-call stats. stats_poller.PollStatsAndNotifyObservers(); }, RTC_FROM_HERE); - // We need to detach AEC dumping from peers, because dump uses |task_queue_| // inside. alice_->DetachAecDump(); bob_->DetachAecDump(); - // Stop all client started tasks on task queue to prevent their access to any - // call related objects after these objects will be destroyed during call tear - // down. - task_queue_->SendTask( - [this]() { - rtc::CritScope crit(&lock_); - for (auto& handle : repeating_task_handles_) { - handle.Stop(); - } - }, - RTC_FROM_HERE); // Tear down the call. signaling_thread->Invoke( RTC_FROM_HERE, @@ -428,10 +366,11 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { Timestamp end_time = Now(); RTC_LOG(INFO) << "All peers are disconnected."; { - rtc::CritScope crit(&lock_); - real_test_duration_ = end_time - start_time_; + MutexLock lock(&lock_); + real_test_duration_ = end_time - start_time; } + ReportGeneralTestResults(); audio_quality_analyzer_->Stop(); video_quality_analyzer_injection_helper_->Stop(); for (auto& reporter : quality_metrics_reporters_) { @@ -441,175 +380,12 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { // Reset |task_queue_| after test to cleanup. task_queue_.reset(); - // Ensuring that TestPeers have been destroyed in order to correctly close - // Audio dumps. - RTC_CHECK(!alice_); - RTC_CHECK(!bob_); - // Ensuring that TestVideoCapturerVideoTrackSource and VideoFrameWriter - // are destroyed on the right thread. + alice_ = nullptr; + bob_ = nullptr; + // Ensuring that TestVideoCapturerVideoTrackSource are destroyed on the right + // thread. RTC_CHECK(alice_video_sources_.empty()); RTC_CHECK(bob_video_sources_.empty()); - RTC_CHECK(video_writers_.empty()); -} - -void PeerConnectionE2EQualityTest::SetDefaultValuesForMissingParams( - RunParams* run_params, - std::vector params, - std::vector>*> - video_generators) { - int video_counter = 0; - int audio_counter = 0; - std::set video_labels; - std::set audio_labels; - for (size_t i = 0; i < params.size(); ++i) { - auto* p = params[i]; - for (size_t j = 0; j < p->video_configs.size(); ++j) { - VideoConfig& video_config = p->video_configs[j]; - std::unique_ptr& video_generator = - (*video_generators[i])[j]; - if (!video_config.generator && !video_config.input_file_name && - !video_config.screen_share_config && - !video_config.capturing_device_index && !video_generator) { - video_config.generator = VideoGeneratorType::kDefault; - } - if (!video_config.stream_label) { - std::string label; - do { - label = "_auto_video_stream_label_" + std::to_string(video_counter); - ++video_counter; - } while (!video_labels.insert(label).second); - video_config.stream_label = label; - } - } - if (p->audio_config) { - if (!p->audio_config->stream_label) { - std::string label; - do { - label = "_auto_audio_stream_label_" + std::to_string(audio_counter); - ++audio_counter; - } while (!audio_labels.insert(label).second); - p->audio_config->stream_label = label; - } - } - } - - if (run_params->video_codecs.empty()) { - run_params->video_codecs.push_back(VideoCodecConfig( - run_params->video_codec_name, run_params->video_codec_required_params)); - } -} - -void PeerConnectionE2EQualityTest::ValidateParams( - const RunParams& run_params, - std::vector params, - std::vector>*> - video_generators) { - RTC_CHECK_GT(run_params.video_encoder_bitrate_multiplier, 0.0); - - std::set video_labels; - std::set audio_labels; - int media_streams_count = 0; - - bool has_simulcast = false; - for (size_t i = 0; i < params.size(); ++i) { - Params* p = params[i]; - if (p->audio_config) { - media_streams_count++; - } - media_streams_count += p->video_configs.size(); - - // Validate that each video config has exactly one of |generator|, - // |input_file_name| or |screen_share_config| set. Also validate that all - // video stream labels are unique. - for (size_t j = 0; j < p->video_configs.size(); ++j) { - VideoConfig& video_config = p->video_configs[j]; - RTC_CHECK(video_config.stream_label); - bool inserted = - video_labels.insert(video_config.stream_label.value()).second; - RTC_CHECK(inserted) << "Duplicate video_config.stream_label=" - << video_config.stream_label.value(); - int input_sources_count = 0; - if (video_config.generator) - ++input_sources_count; - if (video_config.input_file_name) - ++input_sources_count; - if (video_config.screen_share_config) - ++input_sources_count; - if (video_config.capturing_device_index) - ++input_sources_count; - if ((*video_generators[i])[j]) - ++input_sources_count; - - // TODO(titovartem) handle video_generators case properly - RTC_CHECK_EQ(input_sources_count, 1) << VideoConfigSourcePresenceToString( - video_config, (*video_generators[i])[j] != nullptr); - - if (video_config.screen_share_config) { - if (video_config.screen_share_config->slides_yuv_file_names.empty()) { - if (video_config.screen_share_config->scrolling_params) { - // If we have scrolling params, then its |source_width| and - // |source_heigh| will be used as width and height of video input, - // so we have to validate it against width and height of default - // input. - RTC_CHECK_EQ(video_config.screen_share_config->scrolling_params - ->source_width, - kDefaultSlidesWidth); - RTC_CHECK_EQ(video_config.screen_share_config->scrolling_params - ->source_height, - kDefaultSlidesHeight); - } else { - RTC_CHECK_EQ(video_config.width, kDefaultSlidesWidth); - RTC_CHECK_EQ(video_config.height, kDefaultSlidesHeight); - } - } - if (video_config.screen_share_config->scrolling_params) { - RTC_CHECK_LE( - video_config.screen_share_config->scrolling_params->duration, - video_config.screen_share_config->slide_change_interval); - RTC_CHECK_GE( - video_config.screen_share_config->scrolling_params->source_width, - video_config.width); - RTC_CHECK_GE( - video_config.screen_share_config->scrolling_params->source_height, - video_config.height); - } - } - if (video_config.simulcast_config) { - has_simulcast = true; - // We support simulcast only from caller. - RTC_CHECK_EQ(i, 0) - << "Only simulcast stream from first peer is supported"; - RTC_CHECK(!video_config.max_encode_bitrate_bps) - << "Setting max encode bitrate is not implemented for simulcast."; - RTC_CHECK(!video_config.min_encode_bitrate_bps) - << "Setting min encode bitrate is not implemented for simulcast."; - } - } - if (p->audio_config) { - bool inserted = - audio_labels.insert(p->audio_config->stream_label.value()).second; - RTC_CHECK(inserted) << "Duplicate audio_config.stream_label=" - << p->audio_config->stream_label.value(); - // Check that if mode input file name specified only if mode is kFile. - if (p->audio_config.value().mode == AudioConfig::Mode::kGenerated) { - RTC_CHECK(!p->audio_config.value().input_file_name); - } - if (p->audio_config.value().mode == AudioConfig::Mode::kFile) { - RTC_CHECK(p->audio_config.value().input_file_name); - RTC_CHECK( - test::FileExists(p->audio_config.value().input_file_name.value())) - << p->audio_config.value().input_file_name.value() - << " doesn't exist"; - } - } - } - if (has_simulcast) { - RTC_CHECK_EQ(run_params.video_codecs.size(), 1) - << "Only 1 video codec is supported when simulcast is enabled in at " - << "least 1 video config"; - } - - RTC_CHECK_GT(media_streams_count, 0) << "No media in the call."; } void PeerConnectionE2EQualityTest::SetupRequiredFieldTrials( @@ -625,33 +401,26 @@ void PeerConnectionE2EQualityTest::SetupRequiredFieldTrials( } void PeerConnectionE2EQualityTest::OnTrackCallback( + absl::string_view peer_name, rtc::scoped_refptr transceiver, std::vector remote_video_configs) { const rtc::scoped_refptr& track = transceiver->receiver()->track(); - RTC_CHECK_EQ(transceiver->receiver()->stream_ids().size(), 1); - std::string stream_label = transceiver->receiver()->stream_ids().front(); - analyzer_helper_.AddTrackToStreamMapping(track->id(), stream_label); + RTC_CHECK_EQ(transceiver->receiver()->stream_ids().size(), 2) + << "Expected 2 stream ids: 1st - sync group, 2nd - unique stream label"; + std::string sync_group = transceiver->receiver()->stream_ids()[0]; + std::string stream_label = transceiver->receiver()->stream_ids()[1]; + analyzer_helper_.AddTrackToStreamMapping(track->id(), stream_label, + sync_group); if (track->kind() != MediaStreamTrackInterface::kVideoKind) { return; } - VideoConfig* video_config = nullptr; - for (auto& config : remote_video_configs) { - if (config.stream_label == stream_label) { - video_config = &config; - break; - } - } - RTC_CHECK(video_config); - test::VideoFrameWriter* writer = MaybeCreateVideoWriter( - video_config->output_dump_file_name, *video_config); // It is safe to cast here, because it is checked above that // track->kind() is kVideoKind. auto* video_track = static_cast(track.get()); std::unique_ptr> video_sink = - video_quality_analyzer_injection_helper_->CreateVideoSink(*video_config, - writer); + video_quality_analyzer_injection_helper_->CreateVideoSink(peer_name); video_track->AddOrUpdateSink(video_sink.get(), rtc::VideoSinkWants()); output_video_sinks_.push_back(std::move(video_sink)); } @@ -683,10 +452,14 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread( // Because simulcast enabled |run_params.video_codecs| has only 1 element. if (run_params.video_codecs[0].name == cricket::kVp8CodecName) { // For Vp8 simulcast we need to add as many RtpEncodingParameters to the - // track as many simulcast streams requested. + // track as many simulcast streams requested. If they specified in + // |video_config.simulcast_config| it should be copied from there. for (int i = 0; i < video_config.simulcast_config->simulcast_streams_count; ++i) { RtpEncodingParameters enc_params; + if (video_config.simulcast_config->encoding_params.size() > 0) { + enc_params = video_config.simulcast_config->encoding_params[i]; + } // We need to be sure, that all rids will be unique with all mids. enc_params.rid = std::to_string(alice_transceivers_counter) + "000" + std::to_string(i); @@ -722,182 +495,19 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread( } // Then add media for Alice and Bob - alice_video_sources_ = MaybeAddMedia(alice_.get()); - bob_video_sources_ = MaybeAddMedia(bob_.get()); + media_helper_->MaybeAddAudio(alice_.get()); + alice_video_sources_ = media_helper_->MaybeAddVideo(alice_.get()); + media_helper_->MaybeAddAudio(bob_.get()); + bob_video_sources_ = media_helper_->MaybeAddVideo(bob_.get()); SetPeerCodecPreferences(alice_.get(), run_params); SetPeerCodecPreferences(bob_.get(), run_params); - - SetupCall(run_params); } void PeerConnectionE2EQualityTest::TearDownCallOnSignalingThread() { TearDownCall(); } -std::vector> -PeerConnectionE2EQualityTest::MaybeAddMedia(TestPeer* peer) { - MaybeAddAudio(peer); - return MaybeAddVideo(peer); -} - -std::vector> -PeerConnectionE2EQualityTest::MaybeAddVideo(TestPeer* peer) { - // Params here valid because of pre-run validation. - Params* params = peer->params(); - std::vector> out; - for (size_t i = 0; i < params->video_configs.size(); ++i) { - auto video_config = params->video_configs[i]; - // Setup input video source into peer connection. - test::VideoFrameWriter* writer = - MaybeCreateVideoWriter(video_config.input_dump_file_name, video_config); - std::unique_ptr capturer = CreateVideoCapturer( - video_config, peer->ReleaseVideoGenerator(i), - video_quality_analyzer_injection_helper_->CreateFramePreprocessor( - video_config, writer)); - rtc::scoped_refptr source = - new rtc::RefCountedObject( - std::move(capturer), - /*is_screencast=*/video_config.screen_share_config && - video_config.screen_share_config->use_text_content_hint); - out.push_back(source); - RTC_LOG(INFO) << "Adding video with video_config.stream_label=" - << video_config.stream_label.value(); - rtc::scoped_refptr track = - peer->pc_factory()->CreateVideoTrack(video_config.stream_label.value(), - source); - if (video_config.screen_share_config && - video_config.screen_share_config->use_text_content_hint) { - track->set_content_hint(VideoTrackInterface::ContentHint::kText); - } - RTCErrorOr> sender = - peer->AddTrack(track, {video_config.stream_label.value()}); - RTC_CHECK(sender.ok()); - if (video_config.temporal_layers_count) { - RtpParameters rtp_parameters = sender.value()->GetParameters(); - for (auto& encoding_parameters : rtp_parameters.encodings) { - encoding_parameters.num_temporal_layers = - video_config.temporal_layers_count; - } - RTCError res = sender.value()->SetParameters(rtp_parameters); - RTC_CHECK(res.ok()) << "Failed to set RTP parameters"; - } - } - return out; -} - -std::unique_ptr -PeerConnectionE2EQualityTest::CreateVideoCapturer( - const VideoConfig& video_config, - std::unique_ptr generator, - std::unique_ptr - frame_preprocessor) { - if (video_config.capturing_device_index) { - std::unique_ptr capturer = - test::CreateVideoCapturer(video_config.width, video_config.height, - video_config.fps, - *video_config.capturing_device_index); - RTC_CHECK(capturer) - << "Failed to obtain input stream from capturing device #" - << *video_config.capturing_device_index; - capturer->SetFramePreprocessor(std::move(frame_preprocessor)); - return capturer; - } - - std::unique_ptr frame_generator = nullptr; - if (generator) { - frame_generator = std::move(generator); - } - - if (video_config.generator) { - absl::optional - frame_generator_type = absl::nullopt; - if (video_config.generator == VideoGeneratorType::kDefault) { - frame_generator_type = test::FrameGeneratorInterface::OutputType::kI420; - } else if (video_config.generator == VideoGeneratorType::kI420A) { - frame_generator_type = test::FrameGeneratorInterface::OutputType::kI420A; - } else if (video_config.generator == VideoGeneratorType::kI010) { - frame_generator_type = test::FrameGeneratorInterface::OutputType::kI010; - } - frame_generator = - test::CreateSquareFrameGenerator(static_cast(video_config.width), - static_cast(video_config.height), - frame_generator_type, absl::nullopt); - } - if (video_config.input_file_name) { - frame_generator = test::CreateFromYuvFileFrameGenerator( - std::vector(/*count=*/1, - video_config.input_file_name.value()), - video_config.width, video_config.height, /*frame_repeat_count=*/1); - } - if (video_config.screen_share_config) { - frame_generator = CreateScreenShareFrameGenerator(video_config); - } - RTC_CHECK(frame_generator) << "Unsupported video_config input source"; - - auto capturer = std::make_unique( - clock_, std::move(frame_generator), video_config.fps, - *task_queue_factory_); - capturer->SetFramePreprocessor(std::move(frame_preprocessor)); - capturer->Init(); - return capturer; -} - -std::unique_ptr -PeerConnectionE2EQualityTest::CreateScreenShareFrameGenerator( - const VideoConfig& video_config) { - RTC_CHECK(video_config.screen_share_config); - if (video_config.screen_share_config->generate_slides) { - return test::CreateSlideFrameGenerator( - video_config.width, video_config.height, - video_config.screen_share_config->slide_change_interval.seconds() * - video_config.fps); - } - std::vector slides = - video_config.screen_share_config->slides_yuv_file_names; - if (slides.empty()) { - // If slides is empty we need to add default slides as source. In such case - // video width and height is validated to be equal to kDefaultSlidesWidth - // and kDefaultSlidesHeight. - slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); - slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); - slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); - slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); - } - if (!video_config.screen_share_config->scrolling_params) { - // Cycle image every slide_change_interval seconds. - return test::CreateFromYuvFileFrameGenerator( - slides, video_config.width, video_config.height, - video_config.screen_share_config->slide_change_interval.seconds() * - video_config.fps); - } - - // |pause_duration| is nonnegative. It is validated in ValidateParams(...). - TimeDelta pause_duration = - video_config.screen_share_config->slide_change_interval - - video_config.screen_share_config->scrolling_params->duration; - - return test::CreateScrollingInputFromYuvFilesFrameGenerator( - clock_, slides, - video_config.screen_share_config->scrolling_params->source_width, - video_config.screen_share_config->scrolling_params->source_height, - video_config.width, video_config.height, - video_config.screen_share_config->scrolling_params->duration.ms(), - pause_duration.ms()); -} - -void PeerConnectionE2EQualityTest::MaybeAddAudio(TestPeer* peer) { - if (!peer->params()->audio_config) { - return; - } - const AudioConfig& audio_config = peer->params()->audio_config.value(); - rtc::scoped_refptr source = - peer->pc_factory()->CreateAudioSource(audio_config.audio_options); - rtc::scoped_refptr track = - peer->pc_factory()->CreateAudioTrack(*audio_config.stream_label, source); - peer->AddTrack(track, {*audio_config.stream_label}); -} - void PeerConnectionE2EQualityTest::SetPeerCodecPreferences( TestPeer* peer, const RunParams& run_params) { @@ -934,7 +544,9 @@ void PeerConnectionE2EQualityTest::SetPeerCodecPreferences( } } -void PeerConnectionE2EQualityTest::SetupCall(const RunParams& run_params) { +std::unique_ptr +PeerConnectionE2EQualityTest::CreateSignalingInterceptor( + const RunParams& run_params) { std::map stream_label_to_simulcast_streams_count; // We add only Alice here, because simulcast/svc is supported only from the // first peer. @@ -948,20 +560,35 @@ void PeerConnectionE2EQualityTest::SetupCall(const RunParams& run_params) { PatchingParams patching_params(run_params.video_codecs, run_params.use_conference_mode, stream_label_to_simulcast_streams_count); - SignalingInterceptor signaling_interceptor(patching_params); - // Connect peers. - ExchangeOfferAnswer(&signaling_interceptor); - // Do the SDP negotiation, and also exchange ice candidates. - ASSERT_EQ_WAIT(alice_->signaling_state(), PeerConnectionInterface::kStable, - kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(alice_->IsIceGatheringDone(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(bob_->IsIceGatheringDone(), kDefaultTimeoutMs); - - ExchangeIceCandidates(&signaling_interceptor); + return std::make_unique(patching_params); +} + +void PeerConnectionE2EQualityTest::WaitUntilIceCandidatesGathered( + rtc::Thread* signaling_thread) { + ASSERT_TRUE(time_controller_.Wait( + [&]() { + return signaling_thread->Invoke(RTC_FROM_HERE, [&]() { + return alice_->IsIceGatheringDone() && bob_->IsIceGatheringDone(); + }); + }, + 2 * kDefaultTimeout)); +} + +void PeerConnectionE2EQualityTest::WaitUntilPeersAreConnected( + rtc::Thread* signaling_thread) { // This means that ICE and DTLS are connected. - ASSERT_TRUE_WAIT(bob_->IsIceConnected(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(alice_->IsIceConnected(), kDefaultTimeoutMs); - RTC_LOG(INFO) << "Call is started (all peers are connected)."; + alice_connected_ = time_controller_.Wait( + [&]() { + return signaling_thread->Invoke( + RTC_FROM_HERE, [&]() { return alice_->IsIceConnected(); }); + }, + kDefaultTimeout); + bob_connected_ = time_controller_.Wait( + [&]() { + return signaling_thread->Invoke( + RTC_FROM_HERE, [&]() { return bob_->IsIceConnected(); }); + }, + kDefaultTimeout); } void PeerConnectionE2EQualityTest::ExchangeOfferAnswer( @@ -1012,7 +639,8 @@ void PeerConnectionE2EQualityTest::ExchangeIceCandidates( for (auto& candidate : alice_candidates) { std::string candidate_str; RTC_CHECK(candidate->ToString(&candidate_str)); - RTC_LOG(INFO) << "Alice ICE candidate(mid= " << candidate->sdp_mid() + RTC_LOG(INFO) << *alice_->params()->name + << " ICE candidate(mid= " << candidate->sdp_mid() << "): " << candidate_str; } ASSERT_TRUE(bob_->AddIceCandidates(std::move(alice_candidates))); @@ -1022,7 +650,8 @@ void PeerConnectionE2EQualityTest::ExchangeIceCandidates( for (auto& candidate : bob_candidates) { std::string candidate_str; RTC_CHECK(candidate->ToString(&candidate_str)); - RTC_LOG(INFO) << "Bob ICE candidate(mid= " << candidate->sdp_mid() + RTC_LOG(INFO) << *bob_->params()->name + << " ICE candidate(mid= " << candidate->sdp_mid() << "): " << candidate_str; } ASSERT_TRUE(alice_->AddIceCandidates(std::move(bob_candidates))); @@ -1051,40 +680,24 @@ void PeerConnectionE2EQualityTest::TearDownCall() { alice_video_sources_.clear(); bob_video_sources_.clear(); - alice_.reset(); - bob_.reset(); - for (const auto& video_writer : video_writers_) { - video_writer->Close(); - } - video_writers_.clear(); + media_helper_ = nullptr; } -test::VideoFrameWriter* PeerConnectionE2EQualityTest::MaybeCreateVideoWriter( - absl::optional file_name, - const VideoConfig& config) { - if (!file_name) { - return nullptr; - } - // TODO(titovartem) create only one file writer for simulcast video track. - auto video_writer = std::make_unique( - file_name.value(), config.width, config.height, config.fps); - test::VideoFrameWriter* out = video_writer.get(); - video_writers_.push_back(std::move(video_writer)); - return out; +void PeerConnectionE2EQualityTest::ReportGeneralTestResults() { + test::PrintResult(*alice_->params()->name + "_connected", "", test_case_name_, + alice_connected_, "unitless", + /*important=*/false, + test::ImproveDirection::kBiggerIsBetter); + test::PrintResult(*bob_->params()->name + "_connected", "", test_case_name_, + bob_connected_, "unitless", + /*important=*/false, + test::ImproveDirection::kBiggerIsBetter); } Timestamp PeerConnectionE2EQualityTest::Now() const { - return clock_->CurrentTime(); + return time_controller_.GetClock()->CurrentTime(); } -PeerConnectionE2EQualityTest::ScheduledActivity::ScheduledActivity( - TimeDelta initial_delay_since_start, - absl::optional interval, - std::function func) - : initial_delay_since_start(initial_delay_since_start), - interval(interval), - func(std::move(func)) {} - } // namespace webrtc_pc_e2e } // namespace webrtc diff --git a/test/pc/e2e/peer_connection_quality_test.h b/test/pc/e2e/peer_connection_quality_test.h index 894e78f6b0..9ce19a80e4 100644 --- a/test/pc/e2e/peer_connection_quality_test.h +++ b/test/pc/e2e/peer_connection_quality_test.h @@ -15,15 +15,15 @@ #include #include +#include "absl/strings/string_view.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/audio_quality_analyzer_interface.h" -#include "api/test/frame_generator_interface.h" #include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/time_controller.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "pc/video_track_source.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -31,191 +31,19 @@ #include "test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h" #include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" #include "test/pc/e2e/analyzer_helper.h" +#include "test/pc/e2e/media/media_helper.h" +#include "test/pc/e2e/peer_configurer.h" #include "test/pc/e2e/peer_connection_quality_test_params.h" #include "test/pc/e2e/sdp/sdp_changer.h" +#include "test/pc/e2e/test_activities_executor.h" #include "test/pc/e2e/test_peer.h" -#include "test/testsupport/video_frame_writer.h" namespace webrtc { namespace webrtc_pc_e2e { -class PeerConfigurerImpl final - : public PeerConnectionE2EQualityTestFixture::PeerConfigurer { - public: - PeerConfigurerImpl(rtc::Thread* network_thread, - rtc::NetworkManager* network_manager) - : components_(std::make_unique(network_thread, - network_manager)), - params_(std::make_unique()) {} - - PeerConfigurer* SetTaskQueueFactory( - std::unique_ptr task_queue_factory) override { - components_->pcf_dependencies->task_queue_factory = - std::move(task_queue_factory); - return this; - } - PeerConfigurer* SetCallFactory( - std::unique_ptr call_factory) override { - components_->pcf_dependencies->call_factory = std::move(call_factory); - return this; - } - PeerConfigurer* SetEventLogFactory( - std::unique_ptr event_log_factory) override { - components_->pcf_dependencies->event_log_factory = - std::move(event_log_factory); - return this; - } - PeerConfigurer* SetFecControllerFactory( - std::unique_ptr fec_controller_factory) - override { - components_->pcf_dependencies->fec_controller_factory = - std::move(fec_controller_factory); - return this; - } - PeerConfigurer* SetNetworkControllerFactory( - std::unique_ptr - network_controller_factory) override { - components_->pcf_dependencies->network_controller_factory = - std::move(network_controller_factory); - return this; - } - PeerConfigurer* SetMediaTransportFactory( - std::unique_ptr media_transport_factory) override { - components_->pcf_dependencies->media_transport_factory = - std::move(media_transport_factory); - return this; - } - PeerConfigurer* SetVideoEncoderFactory( - std::unique_ptr video_encoder_factory) override { - components_->pcf_dependencies->video_encoder_factory = - std::move(video_encoder_factory); - return this; - } - PeerConfigurer* SetVideoDecoderFactory( - std::unique_ptr video_decoder_factory) override { - components_->pcf_dependencies->video_decoder_factory = - std::move(video_decoder_factory); - return this; - } - - PeerConfigurer* SetAsyncResolverFactory( - std::unique_ptr async_resolver_factory) - override { - components_->pc_dependencies->async_resolver_factory = - std::move(async_resolver_factory); - return this; - } - PeerConfigurer* SetRTCCertificateGenerator( - std::unique_ptr cert_generator) - override { - components_->pc_dependencies->cert_generator = std::move(cert_generator); - return this; - } - PeerConfigurer* SetSSLCertificateVerifier( - std::unique_ptr tls_cert_verifier) override { - components_->pc_dependencies->tls_cert_verifier = - std::move(tls_cert_verifier); - return this; - } - - PeerConfigurer* AddVideoConfig( - PeerConnectionE2EQualityTestFixture::VideoConfig config) override { - params_->video_configs.push_back(std::move(config)); - video_generators_.push_back(nullptr); - return this; - } - PeerConfigurer* AddVideoConfig( - PeerConnectionE2EQualityTestFixture::VideoConfig config, - std::unique_ptr generator) override { - params_->video_configs.push_back(std::move(config)); - video_generators_.push_back(std::move(generator)); - return this; - } - PeerConfigurer* SetAudioConfig( - PeerConnectionE2EQualityTestFixture::AudioConfig config) override { - params_->audio_config = std::move(config); - return this; - } - PeerConfigurer* SetNetEqFactory( - std::unique_ptr neteq_factory) override { - components_->pcf_dependencies->neteq_factory = std::move(neteq_factory); - return this; - } - PeerConfigurer* SetRtcEventLogPath(std::string path) override { - params_->rtc_event_log_path = std::move(path); - return this; - } - PeerConfigurer* SetAecDumpPath(std::string path) override { - params_->aec_dump_path = std::move(path); - return this; - } - PeerConfigurer* SetRTCConfiguration( - PeerConnectionInterface::RTCConfiguration configuration) override { - params_->rtc_configuration = std::move(configuration); - return this; - } - PeerConfigurer* SetBitrateParameters( - PeerConnectionInterface::BitrateParameters bitrate_params) override { - params_->bitrate_params = bitrate_params; - return this; - } - - PeerConfigurer* SetIceTransportFactory( - std::unique_ptr factory) override { - components_->pc_dependencies->ice_transport_factory = std::move(factory); - return this; - } - - protected: - friend class PeerConnectionE2EQualityTest; - - std::unique_ptr ReleaseComponents() { - return std::move(components_); - } - std::unique_ptr ReleaseParams() { return std::move(params_); } - std::vector> - ReleaseVideoGenerators() { - return std::move(video_generators_); - } - - private: - std::unique_ptr components_; - std::unique_ptr params_; - std::vector> video_generators_; -}; - -class TestVideoCapturerVideoTrackSource : public VideoTrackSource { - public: - TestVideoCapturerVideoTrackSource( - std::unique_ptr video_capturer, - bool is_screencast) - : VideoTrackSource(/*remote=*/false), - video_capturer_(std::move(video_capturer)), - is_screencast_(is_screencast) {} - - ~TestVideoCapturerVideoTrackSource() = default; - - void Start() { SetState(kLive); } - - void Stop() { SetState(kMuted); } - - bool is_screencast() const override { return is_screencast_; } - - protected: - rtc::VideoSourceInterface* source() override { - return video_capturer_.get(); - } - - private: - std::unique_ptr video_capturer_; - const bool is_screencast_; -}; - class PeerConnectionE2EQualityTest : public PeerConnectionE2EQualityTestFixture { public: - using VideoGeneratorType = - PeerConnectionE2EQualityTestFixture::VideoGeneratorType; using RunParams = PeerConnectionE2EQualityTestFixture::RunParams; using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; using VideoSimulcastConfig = @@ -226,6 +54,7 @@ class PeerConnectionE2EQualityTest PeerConnectionE2EQualityTest( std::string test_case_name, + TimeController& time_controller, std::unique_ptr audio_quality_analyzer, std::unique_ptr video_quality_analyzer); @@ -246,84 +75,45 @@ class PeerConnectionE2EQualityTest void Run(RunParams run_params) override; TimeDelta GetRealTestDuration() const override { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); RTC_CHECK_NE(real_test_duration_, TimeDelta::Zero()); return real_test_duration_; } private: - struct ScheduledActivity { - ScheduledActivity(TimeDelta initial_delay_since_start, - absl::optional interval, - std::function func); - - TimeDelta initial_delay_since_start; - absl::optional interval; - std::function func; - }; - - void ExecuteTask(TimeDelta initial_delay_since_start, - absl::optional interval, - std::function func); - void PostTask(ScheduledActivity activity) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); - // Set missing params to default values if it is required: - // * Generate video stream labels if some of them missed - // * Generate audio stream labels if some of them missed - // * Set video source generation mode if it is not specified - // * Video codecs under test - void SetDefaultValuesForMissingParams( - RunParams* run_params, - std::vector params, - std::vector>*> - video_sources); - // Validate peer's parameters, also ensure uniqueness of all video stream - // labels. - void ValidateParams( - const RunParams& run_params, - std::vector params, - std::vector>*> - video_sources); // For some functionality some field trials have to be enabled, so we will // enable them here. void SetupRequiredFieldTrials(const RunParams& run_params); - void OnTrackCallback(rtc::scoped_refptr transceiver, + void OnTrackCallback(absl::string_view peer_name, + rtc::scoped_refptr transceiver, std::vector remote_video_configs); // Have to be run on the signaling thread. void SetupCallOnSignalingThread(const RunParams& run_params); void TearDownCallOnSignalingThread(); - std::vector> - MaybeAddMedia(TestPeer* peer); - std::vector> - MaybeAddVideo(TestPeer* peer); - std::unique_ptr CreateVideoCapturer( - const VideoConfig& video_config, - std::unique_ptr generator, - std::unique_ptr - frame_preprocessor); - std::unique_ptr - CreateScreenShareFrameGenerator(const VideoConfig& video_config); - void MaybeAddAudio(TestPeer* peer); void SetPeerCodecPreferences(TestPeer* peer, const RunParams& run_params); - void SetupCall(const RunParams& run_params); + std::unique_ptr CreateSignalingInterceptor( + const RunParams& run_params); + void WaitUntilIceCandidatesGathered(rtc::Thread* signaling_thread); + void WaitUntilPeersAreConnected(rtc::Thread* signaling_thread); void ExchangeOfferAnswer(SignalingInterceptor* signaling_interceptor); void ExchangeIceCandidates(SignalingInterceptor* signaling_interceptor); void StartVideo( const std::vector>& sources); void TearDownCall(); - test::VideoFrameWriter* MaybeCreateVideoWriter( - absl::optional file_name, - const VideoConfig& config); + void ReportGeneralTestResults(); Timestamp Now() const; - Clock* const clock_; + TimeController& time_controller_; const std::unique_ptr task_queue_factory_; std::string test_case_name_; std::unique_ptr video_quality_analyzer_injection_helper_; + std::unique_ptr media_helper_; std::unique_ptr encoded_image_id_controller_; std::unique_ptr audio_quality_analyzer_; + std::unique_ptr executor_; std::vector> peer_configurations_; @@ -338,31 +128,20 @@ class PeerConnectionE2EQualityTest alice_video_sources_; std::vector> bob_video_sources_; - std::vector> video_writers_; std::vector>> output_video_sinks_; AnalyzerHelper analyzer_helper_; - rtc::CriticalSection lock_; - // Time when test call was started. Minus infinity means that call wasn't - // started yet. - Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity(); + mutable Mutex lock_; TimeDelta real_test_duration_ RTC_GUARDED_BY(lock_) = TimeDelta::Zero(); - // Queue of activities that were added before test call was started. - // Activities from this queue will be posted on the |task_queue_| after test - // call will be set up and then this queue will be unused. - std::queue scheduled_activities_ RTC_GUARDED_BY(lock_); - // List of task handles for activities, that are posted on |task_queue_| as - // repeated during the call. - std::vector repeating_task_handles_ - RTC_GUARDED_BY(lock_); - - RepeatingTaskHandle stats_polling_task_ RTC_GUARDED_BY(&task_queue_); // Task queue, that is used for running activities during test call. // This task queue will be created before call set up and will be destroyed // immediately before call tear down. std::unique_ptr task_queue_; + + bool alice_connected_ = false; + bool bob_connected_ = false; }; } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/peer_connection_quality_test_params.h b/test/pc/e2e/peer_connection_quality_test_params.h index 5472ba9f53..e1c0232cb2 100644 --- a/test/pc/e2e/peer_connection_quality_test_params.h +++ b/test/pc/e2e/peer_connection_quality_test_params.h @@ -20,8 +20,8 @@ #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/peerconnection_quality_test_fixture.h" -#include "api/transport/media/media_transport_interface.h" #include "api/transport/network_control.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/network.h" @@ -47,13 +47,14 @@ struct PeerConnectionFactoryComponents { std::unique_ptr event_log_factory; std::unique_ptr fec_controller_factory; std::unique_ptr network_controller_factory; - std::unique_ptr media_transport_factory; std::unique_ptr neteq_factory; // Will be passed to MediaEngineInterface, that will be used in // PeerConnectionFactory. std::unique_ptr video_encoder_factory; std::unique_ptr video_decoder_factory; + + std::unique_ptr trials; }; // Contains most parts from PeerConnectionDependencies. Also all fields are @@ -100,6 +101,8 @@ struct InjectableComponents { // unlimited amount of video streams) and rtc configuration, that will be used // to set up peer connection. struct Params { + // Peer name. If empty - default one will be set by the fixture. + absl::optional name; // If |video_configs| is empty - no video should be added to the test call. std::vector video_configs; // If |audio_config| is set audio stream will be configured @@ -112,7 +115,7 @@ struct Params { absl::optional aec_dump_path; PeerConnectionInterface::RTCConfiguration rtc_configuration; - PeerConnectionInterface::BitrateParameters bitrate_params; + BitrateSettings bitrate_settings; }; } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/sdp/sdp_changer.cc b/test/pc/e2e/sdp/sdp_changer.cc index 68f418e04f..f2aeb1b92d 100644 --- a/test/pc/e2e/sdp/sdp_changer.cc +++ b/test/pc/e2e/sdp/sdp_changer.cc @@ -28,7 +28,7 @@ using VideoCodecConfig = PeerConnectionE2EQualityTestFixture::VideoCodecConfig; std::string CodecRequiredParamsToString( const std::map& codec_required_params) { rtc::StringBuilder out; - for (auto entry : codec_required_params) { + for (const auto& entry : codec_required_params) { out << entry.first << "=" << entry.second << ";"; } return out.str(); @@ -42,6 +42,16 @@ std::vector FilterVideoCodecCapabilities( bool use_ulpfec, bool use_flexfec, rtc::ArrayView supported_codecs) { + RTC_LOG(INFO) << "Peer connection support these codecs:"; + for (const auto& codec : supported_codecs) { + RTC_LOG(INFO) << "Codec: " << codec.name; + if (!codec.parameters.empty()) { + RTC_LOG(INFO) << "Params:"; + for (const auto& param : codec.parameters) { + RTC_LOG(INFO) << " " << param.first << "=" << param.second; + } + } + } std::vector output_codecs; // Find requested codecs among supported and add them to output in the order // they were requested. @@ -52,7 +62,7 @@ std::vector FilterVideoCodecCapabilities( continue; } bool parameters_matched = true; - for (auto item : codec_request.required_params) { + for (const auto& item : codec_request.required_params) { auto it = codec.parameters.find(item.first); if (it == codec.parameters.end()) { parameters_matched = false; @@ -121,7 +131,7 @@ void SignalingInterceptor::FillSimulcastContext( media_desc->set_simulcast_description(simulcast_description); info.simulcast_description = media_desc->simulcast_description(); - for (auto extension : media_desc->rtp_header_extensions()) { + for (const auto& extension : media_desc->rtp_header_extensions()) { if (extension.uri == RtpExtension::kMidUri) { info.mid_extension = extension; } else if (extension.uri == RtpExtension::kRidUri) { @@ -204,7 +214,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Offer( // single simulcast section will be converted. Do it before removing content // because otherwise description will be deleted. std::unique_ptr prototype_media_desc = - absl::WrapUnique(simulcast_content->media_description()->Copy()); + simulcast_content->media_description()->Clone(); // Remove simulcast video section from offer. RTC_CHECK(desc->RemoveContentByName(simulcast_content->mid())); @@ -312,9 +322,10 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer( RTC_CHECK_EQ(content.media_description()->streams().size(), 1); cricket::StreamParams& stream = content.media_description()->mutable_streams()[0]; - RTC_CHECK_EQ(stream.stream_ids().size(), 1) - << "Too many stream ids in video stream"; - std::string stream_label = stream.stream_ids()[0]; + RTC_CHECK_EQ(stream.stream_ids().size(), 2) + << "Expected 2 stream ids in video stream: 1st - sync_group, 2nd - " + "unique label"; + std::string stream_label = stream.stream_ids()[1]; auto it = params_.stream_label_to_simulcast_streams_count.find(stream_label); @@ -432,7 +443,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Answer( // but it have to have receive layers instead of send. So we need to put // send layers from offer to receive layers in answer. cricket::SimulcastDescription simulcast_description; - for (auto layer : info.simulcast_description.send_layers()) { + for (const auto& layer : info.simulcast_description.send_layers()) { simulcast_description.receive_layers().AddLayerWithAlternatives(layer); } media_desc->set_simulcast_description(simulcast_description); diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc new file mode 100644 index 0000000000..e4efe1fd77 --- /dev/null +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc @@ -0,0 +1,360 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats.h" +#include "api/stats/rtcstats_objects.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" +#include "rtc_base/event.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/field_trial.h" +#include "test/testsupport/perf_test.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +constexpr int kStatsWaitTimeoutMs = 1000; + +// Field trial which controls whether to report standard-compliant bytes +// sent/received per stream. If enabled, padding and headers are not included +// in bytes sent or received. +constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; + +std::unique_ptr PopulateStats( + std::vector endpoints, + NetworkEmulationManager* network_emulation) { + rtc::Event stats_loaded; + std::unique_ptr stats; + network_emulation->GetStats(endpoints, + [&](std::unique_ptr s) { + stats = std::move(s); + stats_loaded.Set(); + }); + bool stats_received = stats_loaded.Wait(kStatsWaitTimeoutMs); + RTC_CHECK(stats_received); + return stats; +} + +std::map PopulateIpToPeer( + const std::map>& + peer_endpoints) { + std::map out; + for (const auto& entry : peer_endpoints) { + for (const EmulatedEndpoint* const endpoint : entry.second) { + out.emplace(endpoint->GetPeerLocalAddress(), entry.first); + } + } + return out; +} + +} // namespace + +StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: + NetworkLayerStatsCollector( + std::map> peer_endpoints, + NetworkEmulationManager* network_emulation) + : peer_endpoints_(std::move(peer_endpoints)), + ip_to_peer_(PopulateIpToPeer(peer_endpoints_)), + network_emulation_(network_emulation) {} + +void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: + Start() { + // Check that network stats are clean before test execution. + for (const auto& entry : peer_endpoints_) { + std::unique_ptr stats = + PopulateStats(entry.second, network_emulation_); + RTC_CHECK_EQ(stats->PacketsSent(), 0); + RTC_CHECK_EQ(stats->PacketsReceived(), 0); + } +} + +std::map +StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: + GetStats() { + std::map peer_to_stats; + std::map> sender_to_receivers; + for (const auto& entry : peer_endpoints_) { + NetworkLayerStats stats; + stats.stats = PopulateStats(entry.second, network_emulation_); + const std::string& peer_name = entry.first; + for (const auto& income_stats_entry : + stats.stats->IncomingStatsPerSource()) { + const rtc::IPAddress& source_ip = income_stats_entry.first; + auto it = ip_to_peer_.find(source_ip); + if (it == ip_to_peer_.end()) { + // Source IP is unknown for this collector, so will be skipped. + continue; + } + sender_to_receivers[it->second].push_back(peer_name); + } + peer_to_stats.emplace(peer_name, std::move(stats)); + } + for (auto& entry : peer_to_stats) { + const std::vector& receivers = + sender_to_receivers[entry.first]; + entry.second.receivers = + std::set(receivers.begin(), receivers.end()); + } + return peer_to_stats; +} + +void StatsBasedNetworkQualityMetricsReporter::Start( + absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) { + test_case_name_ = std::string(test_case_name); + collector_.Start(); + start_time_ = clock_->CurrentTime(); +} + +void StatsBasedNetworkQualityMetricsReporter::OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) { + PCStats cur_stats; + + auto inbound_stats = report->GetStatsOfType(); + for (const auto& stat : inbound_stats) { + cur_stats.payload_received += + DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) + + stat->header_bytes_received.ValueOrDefault(0ul)); + } + + auto outbound_stats = report->GetStatsOfType(); + for (const auto& stat : outbound_stats) { + cur_stats.payload_sent += + DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) + + stat->header_bytes_sent.ValueOrDefault(0ul)); + } + + auto candidate_pairs_stats = report->GetStatsOfType(); + for (const auto& stat : candidate_pairs_stats) { + cur_stats.total_received += + DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul)); + cur_stats.total_sent += + DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul)); + cur_stats.packets_received += stat->packets_received.ValueOrDefault(0ul); + cur_stats.packets_sent += stat->packets_sent.ValueOrDefault(0ul); + } + + MutexLock lock(&mutex_); + pc_stats_[std::string(pc_label)] = cur_stats; +} + +void StatsBasedNetworkQualityMetricsReporter::StopAndReportResults() { + Timestamp end_time = clock_->CurrentTime(); + + if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) { + RTC_LOG(LS_ERROR) + << "Non-standard GetStats; \"payload\" counts include RTP headers"; + } + + std::map stats = collector_.GetStats(); + for (const auto& entry : stats) { + LogNetworkLayerStats(entry.first, entry.second); + } + MutexLock lock(&mutex_); + for (const auto& pair : pc_stats_) { + auto it = stats.find(pair.first); + RTC_CHECK(it != stats.end()) + << "Peer name used for PeerConnection stats collection and peer name " + "used for endpoints naming doesn't match. No endpoints found for " + "peer " + << pair.first; + const NetworkLayerStats& network_layer_stats = it->second; + int64_t total_packets_received = 0; + bool found = false; + for (const auto& dest_peer : network_layer_stats.receivers) { + auto pc_stats_it = pc_stats_.find(dest_peer); + if (pc_stats_it == pc_stats_.end()) { + continue; + } + found = true; + total_packets_received += pc_stats_it->second.packets_received; + } + int64_t packet_loss = -1; + if (found) { + packet_loss = pair.second.packets_sent - total_packets_received; + } + ReportStats(pair.first, pair.second, network_layer_stats, packet_loss, + end_time); + } +} + +void StatsBasedNetworkQualityMetricsReporter::ReportStats( + const std::string& pc_label, + const PCStats& pc_stats, + const NetworkLayerStats& network_layer_stats, + int64_t packet_loss, + const Timestamp& end_time) { + ReportResult("bytes_dropped", pc_label, + network_layer_stats.stats->BytesDropped().bytes(), + "sizeInBytes"); + ReportResult("packets_dropped", pc_label, + network_layer_stats.stats->PacketsDropped(), "unitless"); + + ReportResult("payload_bytes_received", pc_label, + pc_stats.payload_received.bytes(), "sizeInBytes"); + ReportResult("payload_bytes_sent", pc_label, pc_stats.payload_sent.bytes(), + "sizeInBytes"); + + ReportResult("bytes_sent", pc_label, pc_stats.total_sent.bytes(), + "sizeInBytes"); + ReportResult("packets_sent", pc_label, pc_stats.packets_sent, "unitless"); + ReportResult("average_send_rate", pc_label, + (pc_stats.total_sent / (end_time - start_time_)).bytes_per_sec(), + "bytesPerSecond"); + ReportResult("bytes_received", pc_label, pc_stats.total_received.bytes(), + "sizeInBytes"); + ReportResult("packets_received", pc_label, pc_stats.packets_received, + "unitless"); + ReportResult( + "average_receive_rate", pc_label, + (pc_stats.total_received / (end_time - start_time_)).bytes_per_sec(), + "bytesPerSecond"); + ReportResult("sent_packets_loss", pc_label, packet_loss, "unitless"); +} + +void StatsBasedNetworkQualityMetricsReporter::ReportResult( + const std::string& metric_name, + const std::string& network_label, + const double value, + const std::string& unit) const { + test::PrintResult(metric_name, /*modifier=*/"", + GetTestCaseName(network_label), value, unit, + /*important=*/false); +} + +void StatsBasedNetworkQualityMetricsReporter::ReportResult( + const std::string& metric_name, + const std::string& network_label, + const SamplesStatsCounter& value, + const std::string& unit) const { + test::PrintResult(metric_name, /*modifier=*/"", + GetTestCaseName(network_label), value, unit, + /*important=*/false); +} + +std::string StatsBasedNetworkQualityMetricsReporter::GetTestCaseName( + absl::string_view network_label) const { + rtc::StringBuilder builder; + builder << test_case_name_ << "/" << network_label.data(); + return builder.str(); +} + +void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( + const std::string& peer_name, + const NetworkLayerStats& stats) const { + DataRate average_send_rate = stats.stats->PacketsSent() >= 2 + ? stats.stats->AverageSendRate() + : DataRate::Zero(); + DataRate average_receive_rate = stats.stats->PacketsReceived() >= 2 + ? stats.stats->AverageReceiveRate() + : DataRate::Zero(); + rtc::StringBuilder log; + log << "Raw network layer statistic for [" << peer_name << "]:\n" + << "Local IPs:\n"; + std::vector local_ips = stats.stats->LocalAddresses(); + for (size_t i = 0; i < local_ips.size(); ++i) { + log << " " << local_ips[i].ToString() << "\n"; + } + if (!stats.stats->SentPacketsSizeCounter().IsEmpty()) { + ReportResult("sent_packets_size", peer_name, + stats.stats->SentPacketsSizeCounter(), "sizeInBytes"); + } + if (!stats.stats->ReceivedPacketsSizeCounter().IsEmpty()) { + ReportResult("received_packets_size", peer_name, + stats.stats->ReceivedPacketsSizeCounter(), "sizeInBytes"); + } + if (!stats.stats->DroppedPacketsSizeCounter().IsEmpty()) { + ReportResult("dropped_packets_size", peer_name, + stats.stats->DroppedPacketsSizeCounter(), "sizeInBytes"); + } + if (!stats.stats->SentPacketsQueueWaitTimeUs().IsEmpty()) { + ReportResult("sent_packets_queue_wait_time_us", peer_name, + stats.stats->SentPacketsQueueWaitTimeUs(), "unitless"); + } + + log << "Send statistic:\n" + << " packets: " << stats.stats->PacketsSent() + << " bytes: " << stats.stats->BytesSent().bytes() + << " avg_rate (bytes/sec): " << average_send_rate.bytes_per_sec() + << " avg_rate (bps): " << average_send_rate.bps() << "\n" + << "Send statistic per destination:\n"; + + for (const auto& entry : stats.stats->OutgoingStatsPerDestination()) { + DataRate source_average_send_rate = entry.second->PacketsSent() >= 2 + ? entry.second->AverageSendRate() + : DataRate::Zero(); + log << "(" << entry.first.ToString() << "):\n" + << " packets: " << entry.second->PacketsSent() + << " bytes: " << entry.second->BytesSent().bytes() + << " avg_rate (bytes/sec): " << source_average_send_rate.bytes_per_sec() + << " avg_rate (bps): " << source_average_send_rate.bps() << "\n"; + if (!entry.second->SentPacketsSizeCounter().IsEmpty()) { + ReportResult("sent_packets_size", + peer_name + "/" + entry.first.ToString(), + stats.stats->SentPacketsSizeCounter(), "sizeInBytes"); + } + } + + log << "Receive statistic:\n" + << " packets: " << stats.stats->PacketsReceived() + << " bytes: " << stats.stats->BytesReceived().bytes() + << " avg_rate (bytes/sec): " << average_receive_rate.bytes_per_sec() + << " avg_rate (bps): " << average_receive_rate.bps() << "\n" + << "Receive statistic per source:\n"; + + for (const auto& entry : stats.stats->IncomingStatsPerSource()) { + DataRate source_average_receive_rate = + entry.second->PacketsReceived() >= 2 + ? entry.second->AverageReceiveRate() + : DataRate::Zero(); + log << "(" << entry.first.ToString() << "):\n" + << " packets: " << entry.second->PacketsReceived() + << " bytes: " << entry.second->BytesReceived().bytes() + << " avg_rate (bytes/sec): " + << source_average_receive_rate.bytes_per_sec() + << " avg_rate (bps): " << source_average_receive_rate.bps() << "\n"; + if (!entry.second->ReceivedPacketsSizeCounter().IsEmpty()) { + ReportResult("received_packets_size", + peer_name + "/" + entry.first.ToString(), + stats.stats->ReceivedPacketsSizeCounter(), "sizeInBytes"); + } + if (!entry.second->DroppedPacketsSizeCounter().IsEmpty()) { + ReportResult("dropped_packets_size", + peer_name + "/" + entry.first.ToString(), + stats.stats->DroppedPacketsSizeCounter(), "sizeInBytes"); + } + } + + RTC_LOG(INFO) << log.str(); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.h b/test/pc/e2e/stats_based_network_quality_metrics_reporter.h new file mode 100644 index 0000000000..d14bb43e1b --- /dev/null +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.h @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_STATS_BASED_NETWORK_QUALITY_METRICS_REPORTER_H_ +#define TEST_PC_E2E_STATS_BASED_NETWORK_QUALITY_METRICS_REPORTER_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class StatsBasedNetworkQualityMetricsReporter + : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter { + public: + // |networks| map peer name to network to report network layer stability stats + // and to log network layer metrics. + StatsBasedNetworkQualityMetricsReporter( + std::map> peer_endpoints, + NetworkEmulationManager* network_emulation) + : collector_(std::move(peer_endpoints), network_emulation), + clock_(network_emulation->time_controller()->GetClock()) {} + ~StatsBasedNetworkQualityMetricsReporter() override = default; + + // Network stats must be empty when this method will be invoked. + void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) override; + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override; + void StopAndReportResults() override; + + private: + struct PCStats { + // TODO(nisse): Separate audio and video counters. Depends on standard stat + // counters, enabled by field trial "WebRTC-UseStandardBytesStats". + DataSize payload_received = DataSize::Zero(); + DataSize payload_sent = DataSize::Zero(); + + // Total bytes/packets sent/received in all RTCTransport's. + DataSize total_received = DataSize::Zero(); + DataSize total_sent = DataSize::Zero(); + int64_t packets_received = 0; + int64_t packets_sent = 0; + }; + + struct NetworkLayerStats { + std::unique_ptr stats; + std::set receivers; + }; + + class NetworkLayerStatsCollector { + public: + NetworkLayerStatsCollector( + std::map> peer_endpoints, + NetworkEmulationManager* network_emulation); + + void Start(); + + std::map GetStats(); + + private: + const std::map> peer_endpoints_; + const std::map ip_to_peer_; + NetworkEmulationManager* const network_emulation_; + }; + + void ReportStats(const std::string& pc_label, + const PCStats& pc_stats, + const NetworkLayerStats& network_layer_stats, + int64_t packet_loss, + const Timestamp& end_time); + void ReportResult(const std::string& metric_name, + const std::string& network_label, + const double value, + const std::string& unit) const; + void ReportResult(const std::string& metric_name, + const std::string& network_label, + const SamplesStatsCounter& value, + const std::string& unit) const; + std::string GetTestCaseName(absl::string_view network_label) const; + void LogNetworkLayerStats(const std::string& peer_name, + const NetworkLayerStats& stats) const; + + NetworkLayerStatsCollector collector_; + Clock* const clock_; + + std::string test_case_name_; + Timestamp start_time_ = Timestamp::MinusInfinity(); + + Mutex mutex_; + std::map pc_stats_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_STATS_BASED_NETWORK_QUALITY_METRICS_REPORTER_H_ diff --git a/test/pc/e2e/stats_poller.cc b/test/pc/e2e/stats_poller.cc index 987f26e7e8..e6973e6af1 100644 --- a/test/pc/e2e/stats_poller.cc +++ b/test/pc/e2e/stats_poller.cc @@ -18,14 +18,13 @@ namespace webrtc { namespace webrtc_pc_e2e { void InternalStatsObserver::PollStats() { - peer_->pc()->GetStats(this, nullptr, - webrtc::PeerConnectionInterface::StatsOutputLevel:: - kStatsOutputLevelStandard); + peer_->pc()->GetStats(this); } -void InternalStatsObserver::OnComplete(const StatsReports& reports) { +void InternalStatsObserver::OnStatsDelivered( + const rtc::scoped_refptr& report) { for (auto* observer : observers_) { - observer->OnStatsReports(pc_label_, reports); + observer->OnStatsReports(pc_label_, report); } } diff --git a/test/pc/e2e/stats_poller.h b/test/pc/e2e/stats_poller.h index 3d0c2d6801..157a147834 100644 --- a/test/pc/e2e/stats_poller.h +++ b/test/pc/e2e/stats_poller.h @@ -17,6 +17,7 @@ #include #include "api/peer_connection_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" #include "api/test/stats_observer_interface.h" #include "test/pc/e2e/test_peer.h" @@ -25,7 +26,7 @@ namespace webrtc_pc_e2e { // Helper class that will notify all the webrtc::test::StatsObserverInterface // objects subscribed. -class InternalStatsObserver : public StatsObserver { +class InternalStatsObserver : public RTCStatsCollectorCallback { public: InternalStatsObserver(std::string pc_label, TestPeer* peer, @@ -36,7 +37,8 @@ class InternalStatsObserver : public StatsObserver { void PollStats(); - void OnComplete(const StatsReports& reports) override; + void OnStatsDelivered( + const rtc::scoped_refptr& report) override; private: std::string pc_label_; diff --git a/test/pc/e2e/test_activities_executor.cc b/test/pc/e2e/test_activities_executor.cc new file mode 100644 index 0000000000..ded39920f2 --- /dev/null +++ b/test/pc/e2e/test_activities_executor.cc @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/pc/e2e/test_activities_executor.h" + +#include +#include + +#include "absl/memory/memory.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +void TestActivitiesExecutor::Start(TaskQueueForTest* task_queue) { + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + MutexLock lock(&lock_); + start_time_ = Now(); + while (!scheduled_activities_.empty()) { + PostActivity(std::move(scheduled_activities_.front())); + scheduled_activities_.pop(); + } +} + +void TestActivitiesExecutor::Stop() { + if (task_queue_ == nullptr) { + // Already stopped or not started. + return; + } + task_queue_->SendTask( + [this]() { + MutexLock lock(&lock_); + for (auto& handle : repeating_task_handles_) { + handle.Stop(); + } + }, + RTC_FROM_HERE); + task_queue_ = nullptr; +} + +void TestActivitiesExecutor::ScheduleActivity( + TimeDelta initial_delay_since_start, + absl::optional interval, + std::function func) { + RTC_CHECK(initial_delay_since_start.IsFinite() && + initial_delay_since_start >= TimeDelta::Zero()); + RTC_CHECK(!interval || + (interval->IsFinite() && *interval > TimeDelta::Zero())); + MutexLock lock(&lock_); + ScheduledActivity activity(initial_delay_since_start, interval, func); + if (start_time_.IsInfinite()) { + scheduled_activities_.push(std::move(activity)); + } else { + PostActivity(std::move(activity)); + } +} + +void TestActivitiesExecutor::PostActivity(ScheduledActivity activity) { + // Because start_time_ will never change at this point copy it to local + // variable to capture in in lambda without requirement to hold a lock. + Timestamp start_time = start_time_; + + TimeDelta remaining_delay = + activity.initial_delay_since_start == TimeDelta::Zero() + ? TimeDelta::Zero() + : activity.initial_delay_since_start - (Now() - start_time); + if (remaining_delay < TimeDelta::Zero()) { + RTC_LOG(WARNING) << "Executing late task immediately, late by=" + << ToString(remaining_delay.Abs()); + remaining_delay = TimeDelta::Zero(); + } + + if (activity.interval) { + if (remaining_delay == TimeDelta::Zero()) { + repeating_task_handles_.push_back(RepeatingTaskHandle::Start( + task_queue_->Get(), [activity, start_time, this]() { + activity.func(Now() - start_time); + return *activity.interval; + })); + return; + } + repeating_task_handles_.push_back(RepeatingTaskHandle::DelayedStart( + task_queue_->Get(), remaining_delay, [activity, start_time, this]() { + activity.func(Now() - start_time); + return *activity.interval; + })); + return; + } + + if (remaining_delay == TimeDelta::Zero()) { + task_queue_->PostTask( + [activity, start_time, this]() { activity.func(Now() - start_time); }); + return; + } + + task_queue_->PostDelayedTask( + [activity, start_time, this]() { activity.func(Now() - start_time); }, + remaining_delay.ms()); +} + +Timestamp TestActivitiesExecutor::Now() const { + return clock_->CurrentTime(); +} + +TestActivitiesExecutor::ScheduledActivity::ScheduledActivity( + TimeDelta initial_delay_since_start, + absl::optional interval, + std::function func) + : initial_delay_since_start(initial_delay_since_start), + interval(interval), + func(std::move(func)) {} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/test_activities_executor.h b/test/pc/e2e/test_activities_executor.h new file mode 100644 index 0000000000..94e73d1e5f --- /dev/null +++ b/test/pc/e2e/test_activities_executor.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_ +#define TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +class TestActivitiesExecutor { + public: + explicit TestActivitiesExecutor(Clock* clock) : clock_(clock) {} + ~TestActivitiesExecutor() { Stop(); } + + // Starts scheduled activities according to their schedule. All activities + // that will be scheduled after Start(...) was invoked will be executed + // immediately according to their schedule. + void Start(TaskQueueForTest* task_queue); + void Stop(); + + // Schedule activity to be executed. If test isn't started yet, then activity + // will be executed according to its schedule after Start() will be invoked. + // If test is started, then it will be executed immediately according to its + // schedule. + void ScheduleActivity(TimeDelta initial_delay_since_start, + absl::optional interval, + std::function func); + + private: + struct ScheduledActivity { + ScheduledActivity(TimeDelta initial_delay_since_start, + absl::optional interval, + std::function func); + + TimeDelta initial_delay_since_start; + absl::optional interval; + std::function func; + }; + + void PostActivity(ScheduledActivity activity) + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + Timestamp Now() const; + + Clock* const clock_; + + TaskQueueForTest* task_queue_; + + Mutex lock_; + // Time when test was started. Minus infinity means that it wasn't started + // yet. + Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity(); + // Queue of activities that were added before test was started. + // Activities from this queue will be posted on the |task_queue_| after test + // will be set up and then this queue will be unused. + std::queue scheduled_activities_ RTC_GUARDED_BY(lock_); + // List of task handles for activities, that are posted on |task_queue_| as + // repeated during the call. + std::vector repeating_task_handles_ + RTC_GUARDED_BY(lock_); +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_ diff --git a/test/pc/e2e/test_peer.cc b/test/pc/e2e/test_peer.cc index 1b80633a0b..65d3eb36b8 100644 --- a/test/pc/e2e/test_peer.cc +++ b/test/pc/e2e/test_peer.cc @@ -9,356 +9,15 @@ */ #include "test/pc/e2e/test_peer.h" +#include #include #include "absl/memory/memory.h" -#include "absl/types/optional.h" -#include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/scoped_refptr.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/video_codecs/builtin_video_decoder_factory.h" -#include "api/video_codecs/builtin_video_encoder_factory.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/include/audio_processing.h" -#include "p2p/client/basic_port_allocator.h" -#include "rtc_base/location.h" -#include "test/pc/e2e/echo/echo_emulation.h" -#include "test/testsupport/copy_to_file_audio_capturer.h" namespace webrtc { namespace webrtc_pc_e2e { -namespace { - -using RemotePeerAudioConfig = - ::webrtc::webrtc_pc_e2e::TestPeer::RemotePeerAudioConfig; -using AudioConfig = - ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; -using EchoEmulationConfig = ::webrtc::webrtc_pc_e2e:: - PeerConnectionE2EQualityTestFixture::EchoEmulationConfig; - -constexpr int16_t kGeneratedAudioMaxAmplitude = 32000; -constexpr int kDefaultSamplingFrequencyInHz = 48000; - -// Sets mandatory entities in injectable components like |pcf_dependencies| -// and |pc_dependencies| if they are omitted. Also setup required -// dependencies, that won't be specially provided by factory and will be just -// transferred to peer connection creation code. -void SetMandatoryEntities(InjectableComponents* components) { - RTC_DCHECK(components->pcf_dependencies); - RTC_DCHECK(components->pc_dependencies); - - // Setup required peer connection factory dependencies. - if (components->pcf_dependencies->task_queue_factory == nullptr) { - components->pcf_dependencies->task_queue_factory = - CreateDefaultTaskQueueFactory(); - } - if (components->pcf_dependencies->call_factory == nullptr) { - components->pcf_dependencies->call_factory = webrtc::CreateCallFactory(); - } - if (components->pcf_dependencies->event_log_factory == nullptr) { - components->pcf_dependencies->event_log_factory = - std::make_unique( - components->pcf_dependencies->task_queue_factory.get()); - } -} - -class TestPeerComponents { - public: - TestPeerComponents(std::unique_ptr components, - const Params& params, - MockPeerConnectionObserver* observer, - VideoQualityAnalyzerInjectionHelper* video_analyzer_helper, - rtc::Thread* signaling_thread, - absl::optional remote_audio_config, - double bitrate_multiplier, - absl::optional echo_emulation_config, - rtc::TaskQueue* task_queue) - : audio_config_opt_(params.audio_config), - observer_(observer), - video_analyzer_helper_(video_analyzer_helper), - signaling_thread_(signaling_thread), - remote_audio_config_(std::move(remote_audio_config)), - bitrate_multiplier_(bitrate_multiplier), - echo_emulation_config_(std::move(echo_emulation_config)) { - for (auto& video_config : params.video_configs) { - // Stream label should be set by fixture implementation here. - RTC_DCHECK(video_config.stream_label); - bool res = - stream_required_spatial_index_ - .insert({*video_config.stream_label, - video_config.simulcast_config - ? absl::optional(video_config.simulcast_config - ->target_spatial_index) - : absl::nullopt}) - .second; - RTC_DCHECK(res) << "Duplicate video_config.stream_label=" - << *video_config.stream_label; - } - - // Create audio processing, that will be used to create media engine that - // then will be added into peer connection. See CreateMediaEngine(...). - audio_processing_ = webrtc::AudioProcessingBuilder().Create(); - if (params.aec_dump_path) { - audio_processing_->AttachAecDump( - AecDumpFactory::Create(*params.aec_dump_path, -1, task_queue)); - } - - // Create peer connection factory. - PeerConnectionFactoryDependencies pcf_deps = CreatePCFDependencies( - std::move(components->pcf_dependencies), components->network_thread); - peer_connection_factory_ = - CreateModularPeerConnectionFactory(std::move(pcf_deps)); - - // Create peer connection. - PeerConnectionDependencies pc_deps = - CreatePCDependencies(std::move(components->pc_dependencies)); - peer_connection_ = peer_connection_factory_->CreatePeerConnection( - params.rtc_configuration, std::move(pc_deps)); - peer_connection_->SetBitrate(params.bitrate_params); - } - - rtc::scoped_refptr peer_connection_factory() - const { - return peer_connection_factory_; - } - rtc::scoped_refptr peer_connection() const { - return peer_connection_; - } - rtc::scoped_refptr audio_processing() const { - return audio_processing_; - } - - private: - // Creates PeerConnectionFactoryDependencies objects, providing entities - // from InjectableComponents::PeerConnectionFactoryComponents and also - // creating entities, that are required for correct injection of media quality - // analyzers. - PeerConnectionFactoryDependencies CreatePCFDependencies( - std::unique_ptr pcf_dependencies, - rtc::Thread* network_thread) { - PeerConnectionFactoryDependencies pcf_deps; - pcf_deps.network_thread = network_thread; - pcf_deps.signaling_thread = signaling_thread_; - pcf_deps.media_engine = CreateMediaEngine(pcf_dependencies.get()); - - pcf_deps.call_factory = std::move(pcf_dependencies->call_factory); - pcf_deps.event_log_factory = std::move(pcf_dependencies->event_log_factory); - pcf_deps.task_queue_factory = - std::move(pcf_dependencies->task_queue_factory); - - if (pcf_dependencies->fec_controller_factory != nullptr) { - pcf_deps.fec_controller_factory = - std::move(pcf_dependencies->fec_controller_factory); - } - if (pcf_dependencies->network_controller_factory != nullptr) { - pcf_deps.network_controller_factory = - std::move(pcf_dependencies->network_controller_factory); - } - if (pcf_dependencies->media_transport_factory != nullptr) { - pcf_deps.media_transport_factory = - std::move(pcf_dependencies->media_transport_factory); - } - if (pcf_dependencies->neteq_factory != nullptr) { - pcf_deps.neteq_factory = std::move(pcf_dependencies->neteq_factory); - } - - return pcf_deps; - } - - std::unique_ptr CreateMediaEngine( - PeerConnectionFactoryComponents* pcf_dependencies) { - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = pcf_dependencies->task_queue_factory.get(); - media_deps.adm = CreateAudioDeviceModule(media_deps.task_queue_factory); - media_deps.audio_processing = audio_processing_; - media_deps.video_encoder_factory = - CreateVideoEncoderFactory(pcf_dependencies); - media_deps.video_decoder_factory = - CreateVideoDecoderFactory(pcf_dependencies); - webrtc::SetMediaEngineDefaults(&media_deps); - return cricket::CreateMediaEngine(std::move(media_deps)); - } - - rtc::scoped_refptr CreateAudioDeviceModule( - TaskQueueFactory* task_queue_factory) { - std::unique_ptr renderer = - CreateAudioRenderer(remote_audio_config_); - std::unique_ptr capturer = - CreateAudioCapturer(audio_config_opt_); - RTC_DCHECK(renderer); - RTC_DCHECK(capturer); - - // Setup echo emulation if required. - if (echo_emulation_config_) { - capturer = std::make_unique( - std::move(capturer), *echo_emulation_config_); - renderer = std::make_unique( - std::move(renderer), - static_cast(capturer.get())); - } - - // Setup input stream dumping if required. - if (audio_config_opt_ && audio_config_opt_->input_dump_file_name) { - capturer = std::make_unique( - std::move(capturer), audio_config_opt_->input_dump_file_name.value()); - } - - return TestAudioDeviceModule::Create(task_queue_factory, - std::move(capturer), - std::move(renderer), /*speed=*/1.f); - } - - std::unique_ptr CreateAudioRenderer( - const absl::optional& config) { - if (!config) { - // Return default renderer because we always require some renderer. - return TestAudioDeviceModule::CreateDiscardRenderer( - kDefaultSamplingFrequencyInHz); - } - if (config->output_file_name) { - return TestAudioDeviceModule::CreateBoundedWavFileWriter( - config->output_file_name.value(), config->sampling_frequency_in_hz); - } - return TestAudioDeviceModule::CreateDiscardRenderer( - config->sampling_frequency_in_hz); - } - - std::unique_ptr CreateAudioCapturer( - const absl::optional& audio_config) { - if (!audio_config) { - // If we have no audio config we still need to provide some audio device. - // In such case use generated capturer. Despite of we provided audio here, - // in test media setup audio stream won't be added into peer connection. - return TestAudioDeviceModule::CreatePulsedNoiseCapturer( - kGeneratedAudioMaxAmplitude, kDefaultSamplingFrequencyInHz); - } - - switch (audio_config->mode) { - case AudioConfig::Mode::kGenerated: - return TestAudioDeviceModule::CreatePulsedNoiseCapturer( - kGeneratedAudioMaxAmplitude, - audio_config->sampling_frequency_in_hz); - case AudioConfig::Mode::kFile: - RTC_DCHECK(audio_config->input_file_name); - return TestAudioDeviceModule::CreateWavFileReader( - audio_config->input_file_name.value(), /*repeat=*/true); - } - } - - std::unique_ptr CreateVideoEncoderFactory( - PeerConnectionFactoryComponents* pcf_dependencies) { - std::unique_ptr video_encoder_factory; - if (pcf_dependencies->video_encoder_factory != nullptr) { - video_encoder_factory = - std::move(pcf_dependencies->video_encoder_factory); - } else { - video_encoder_factory = CreateBuiltinVideoEncoderFactory(); - } - return video_analyzer_helper_->WrapVideoEncoderFactory( - std::move(video_encoder_factory), bitrate_multiplier_, - stream_required_spatial_index_); - } - - std::unique_ptr CreateVideoDecoderFactory( - PeerConnectionFactoryComponents* pcf_dependencies) { - std::unique_ptr video_decoder_factory; - if (pcf_dependencies->video_decoder_factory != nullptr) { - video_decoder_factory = - std::move(pcf_dependencies->video_decoder_factory); - } else { - video_decoder_factory = CreateBuiltinVideoDecoderFactory(); - } - return video_analyzer_helper_->WrapVideoDecoderFactory( - std::move(video_decoder_factory)); - } - - // Creates PeerConnectionDependencies objects, providing entities - // from InjectableComponents::PeerConnectionComponents. - PeerConnectionDependencies CreatePCDependencies( - std::unique_ptr pc_dependencies) { - PeerConnectionDependencies pc_deps(observer_); - - auto port_allocator = std::make_unique( - pc_dependencies->network_manager); - - // This test does not support TCP - int flags = cricket::PORTALLOCATOR_DISABLE_TCP; - port_allocator->set_flags(port_allocator->flags() | flags); - - pc_deps.allocator = std::move(port_allocator); - - if (pc_dependencies->async_resolver_factory != nullptr) { - pc_deps.async_resolver_factory = - std::move(pc_dependencies->async_resolver_factory); - } - if (pc_dependencies->cert_generator != nullptr) { - pc_deps.cert_generator = std::move(pc_dependencies->cert_generator); - } - if (pc_dependencies->tls_cert_verifier != nullptr) { - pc_deps.tls_cert_verifier = std::move(pc_dependencies->tls_cert_verifier); - } - if (pc_dependencies->ice_transport_factory != nullptr) { - pc_deps.ice_transport_factory = - std::move(pc_dependencies->ice_transport_factory); - } - return pc_deps; - } - - rtc::scoped_refptr peer_connection_factory_; - rtc::scoped_refptr peer_connection_; - rtc::scoped_refptr audio_processing_; - - std::map> stream_required_spatial_index_; - absl::optional audio_config_opt_; - MockPeerConnectionObserver* observer_; - VideoQualityAnalyzerInjectionHelper* video_analyzer_helper_; - rtc::Thread* signaling_thread_; - absl::optional remote_audio_config_; - double bitrate_multiplier_; - absl::optional echo_emulation_config_; -}; - -} // namespace - -absl::optional TestPeer::CreateRemoteAudioConfig( - absl::optional config) { - if (!config) { - return absl::nullopt; - } - return RemotePeerAudioConfig(config.value()); -} - -std::unique_ptr TestPeer::CreateTestPeer( - std::unique_ptr components, - std::unique_ptr params, - std::vector> - video_generators, - std::unique_ptr observer, - VideoQualityAnalyzerInjectionHelper* video_analyzer_helper, - rtc::Thread* signaling_thread, - absl::optional remote_audio_config, - double bitrate_multiplier, - absl::optional echo_emulation_config, - rtc::TaskQueue* task_queue) { - RTC_DCHECK(components); - RTC_DCHECK(params); - RTC_DCHECK_EQ(params->video_configs.size(), video_generators.size()); - SetMandatoryEntities(components.get()); - params->rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan; - - TestPeerComponents tpc(std::move(components), *params, observer.get(), - video_analyzer_helper, signaling_thread, - std::move(remote_audio_config), bitrate_multiplier, - echo_emulation_config, task_queue); - - return absl::WrapUnique(new TestPeer( - tpc.peer_connection_factory(), tpc.peer_connection(), std::move(observer), - std::move(params), std::move(video_generators), tpc.audio_processing())); -} bool TestPeer::AddIceCandidates( std::vector> candidates) { @@ -383,14 +42,15 @@ TestPeer::TestPeer( rtc::scoped_refptr pc, std::unique_ptr observer, std::unique_ptr params, - std::vector> - video_generators, - rtc::scoped_refptr audio_processing) - : PeerConnectionWrapper::PeerConnectionWrapper(std::move(pc_factory), - std::move(pc), - std::move(observer)), + std::vector video_sources, + rtc::scoped_refptr audio_processing, + std::unique_ptr worker_thread) + : worker_thread_(std::move(worker_thread)), + wrapper_(std::make_unique(std::move(pc_factory), + std::move(pc), + std::move(observer))), params_(std::move(params)), - video_generators_(std::move(video_generators)), + video_sources_(std::move(video_sources)), audio_processing_(audio_processing) {} } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/test_peer.h b/test/pc/e2e/test_peer.h index 3487720887..4310cbda1c 100644 --- a/test/pc/e2e/test_peer.h +++ b/test/pc/e2e/test_peer.h @@ -12,90 +12,106 @@ #define TEST_PC_E2E_TEST_PEER_H_ #include -#include #include #include "absl/memory/memory.h" +#include "absl/types/variant.h" #include "api/test/frame_generator_interface.h" #include "api/test/peerconnection_quality_test_fixture.h" -#include "media/base/media_engine.h" -#include "modules/audio_device/include/test_audio_device.h" #include "pc/peer_connection_wrapper.h" -#include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/network.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/thread.h" -#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" +#include "test/pc/e2e/peer_configurer.h" #include "test/pc/e2e/peer_connection_quality_test_params.h" namespace webrtc { namespace webrtc_pc_e2e { // Describes a single participant in the call. -class TestPeer final : public PeerConnectionWrapper { +class TestPeer final { public: - using PeerConnectionWrapper::PeerConnectionWrapper; - using VideoConfig = PeerConnectionE2EQualityTestFixture::VideoConfig; - using AudioConfig = PeerConnectionE2EQualityTestFixture::AudioConfig; - using EchoEmulationConfig = - PeerConnectionE2EQualityTestFixture::EchoEmulationConfig; - - struct RemotePeerAudioConfig { - RemotePeerAudioConfig(AudioConfig config) - : sampling_frequency_in_hz(config.sampling_frequency_in_hz), - output_file_name(config.output_dump_file_name) {} - - int sampling_frequency_in_hz; - absl::optional output_file_name; - }; - - static absl::optional CreateRemoteAudioConfig( - absl::optional config); - - // Setups all components, that should be provided to WebRTC - // PeerConnectionFactory and PeerConnection creation methods, - // also will setup dependencies, that are required for media analyzers - // injection. - // - // |signaling_thread| will be provided by test fixture implementation. - // |params| - describes current peer parameters, like current peer video - // streams and audio streams - static std::unique_ptr CreateTestPeer( - std::unique_ptr components, - std::unique_ptr params, - std::vector> - video_generators, - std::unique_ptr observer, - VideoQualityAnalyzerInjectionHelper* video_analyzer_helper, - rtc::Thread* signaling_thread, - absl::optional remote_audio_config, - double bitrate_multiplier, - absl::optional echo_emulation_config, - rtc::TaskQueue* task_queue); - Params* params() const { return params_.get(); } - std::unique_ptr ReleaseVideoGenerator( - size_t i) { - return std::move(video_generators_[i]); + PeerConfigurerImpl::VideoSource ReleaseVideoSource(size_t i) { + return std::move(video_sources_[i]); + } + + PeerConnectionFactoryInterface* pc_factory() { + return wrapper_->pc_factory(); + } + PeerConnectionInterface* pc() { return wrapper_->pc(); } + MockPeerConnectionObserver* observer() { return wrapper_->observer(); } + + std::unique_ptr CreateOffer() { + return wrapper_->CreateOffer(); + } + + std::unique_ptr CreateAnswer() { + return wrapper_->CreateAnswer(); + } + + bool SetLocalDescription(std::unique_ptr desc, + std::string* error_out = nullptr) { + return wrapper_->SetLocalDescription(std::move(desc), error_out); + } + + bool SetRemoteDescription(std::unique_ptr desc, + std::string* error_out = nullptr) { + return wrapper_->SetRemoteDescription(std::move(desc), error_out); + } + + rtc::scoped_refptr AddTransceiver( + cricket::MediaType media_type, + const RtpTransceiverInit& init) { + return wrapper_->AddTransceiver(media_type, init); } - void DetachAecDump() { audio_processing_->DetachAecDump(); } + rtc::scoped_refptr AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids = {}) { + return wrapper_->AddTrack(track, stream_ids); + } + + rtc::scoped_refptr CreateDataChannel( + const std::string& label) { + return wrapper_->CreateDataChannel(label); + } + + PeerConnectionInterface::SignalingState signaling_state() { + return wrapper_->signaling_state(); + } + + bool IsIceGatheringDone() { return wrapper_->IsIceGatheringDone(); } + + bool IsIceConnected() { return wrapper_->IsIceConnected(); } + + rtc::scoped_refptr GetStats() { + return wrapper_->GetStats(); + } + + void DetachAecDump() { + if (audio_processing_) { + audio_processing_->DetachAecDump(); + } + } // Adds provided |candidates| to the owned peer connection. bool AddIceCandidates( std::vector> candidates); - private: + protected: + friend class TestPeerFactory; TestPeer(rtc::scoped_refptr pc_factory, rtc::scoped_refptr pc, std::unique_ptr observer, std::unique_ptr params, - std::vector> - video_generators, - rtc::scoped_refptr audio_processing); + std::vector video_sources, + rtc::scoped_refptr audio_processing, + std::unique_ptr worker_thread); + private: + // Keeps ownership of worker thread. It has to be destroyed after |wrapper_|. + std::unique_ptr worker_thread_; + std::unique_ptr wrapper_; std::unique_ptr params_; - std::vector> video_generators_; + std::vector video_sources_; rtc::scoped_refptr audio_processing_; std::vector> remote_ice_candidates_; diff --git a/test/pc/e2e/test_peer_factory.cc b/test/pc/e2e/test_peer_factory.cc new file mode 100644 index 0000000000..eceec778df --- /dev/null +++ b/test/pc/e2e/test_peer_factory.cc @@ -0,0 +1,362 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/pc/e2e/test_peer_factory.h" + +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/test/create_time_controller.h" +#include "api/test/time_controller.h" +#include "api/transport/field_trial_based_config.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" +#include "media/engine/webrtc_media_engine.h" +#include "media/engine/webrtc_media_engine_defaults.h" +#include "modules/audio_processing/aec_dump/aec_dump_factory.h" +#include "p2p/client/basic_port_allocator.h" +#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h" +#include "test/pc/e2e/echo/echo_emulation.h" +#include "test/pc/e2e/peer_configurer.h" +#include "test/testsupport/copy_to_file_audio_capturer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { +namespace { + +using AudioConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; +using VideoConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; +using EchoEmulationConfig = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::EchoEmulationConfig; + +constexpr int16_t kGeneratedAudioMaxAmplitude = 32000; +constexpr int kDefaultSamplingFrequencyInHz = 48000; + +// Sets mandatory entities in injectable components like |pcf_dependencies| +// and |pc_dependencies| if they are omitted. Also setup required +// dependencies, that won't be specially provided by factory and will be just +// transferred to peer connection creation code. +void SetMandatoryEntities(InjectableComponents* components, + TimeController& time_controller) { + RTC_DCHECK(components->pcf_dependencies); + RTC_DCHECK(components->pc_dependencies); + + // Setup required peer connection factory dependencies. + if (components->pcf_dependencies->task_queue_factory == nullptr) { + components->pcf_dependencies->task_queue_factory = + time_controller.CreateTaskQueueFactory(); + } + if (components->pcf_dependencies->call_factory == nullptr) { + components->pcf_dependencies->call_factory = + CreateTimeControllerBasedCallFactory(&time_controller); + } + if (components->pcf_dependencies->event_log_factory == nullptr) { + components->pcf_dependencies->event_log_factory = + std::make_unique( + components->pcf_dependencies->task_queue_factory.get()); + } + if (!components->pcf_dependencies->trials) { + components->pcf_dependencies->trials = + std::make_unique(); + } +} + +// Returns mapping from stream label to optional spatial index. +// If we have stream label "Foo" and mapping contains +// 1. |absl::nullopt| means "Foo" isn't simulcast/SVC stream +// 2. |kAnalyzeAnySpatialStream| means all simulcast/SVC streams are required +// 3. Concrete value means that particular simulcast/SVC stream have to be +// analyzed. +std::map> +CalculateRequiredSpatialIndexPerStream( + const std::vector& video_configs) { + std::map> out; + for (auto& video_config : video_configs) { + // Stream label should be set by fixture implementation here. + RTC_DCHECK(video_config.stream_label); + absl::optional spatial_index; + if (video_config.simulcast_config) { + spatial_index = video_config.simulcast_config->target_spatial_index; + if (!spatial_index) { + spatial_index = kAnalyzeAnySpatialStream; + } + } + bool res = out.insert({*video_config.stream_label, spatial_index}).second; + RTC_DCHECK(res) << "Duplicate video_config.stream_label=" + << *video_config.stream_label; + } + return out; +} + +std::unique_ptr CreateAudioRenderer( + const absl::optional& config) { + if (!config) { + // Return default renderer because we always require some renderer. + return TestAudioDeviceModule::CreateDiscardRenderer( + kDefaultSamplingFrequencyInHz); + } + if (config->output_file_name) { + return TestAudioDeviceModule::CreateBoundedWavFileWriter( + config->output_file_name.value(), config->sampling_frequency_in_hz); + } + return TestAudioDeviceModule::CreateDiscardRenderer( + config->sampling_frequency_in_hz); +} + +std::unique_ptr CreateAudioCapturer( + const absl::optional& audio_config) { + if (!audio_config) { + // If we have no audio config we still need to provide some audio device. + // In such case use generated capturer. Despite of we provided audio here, + // in test media setup audio stream won't be added into peer connection. + return TestAudioDeviceModule::CreatePulsedNoiseCapturer( + kGeneratedAudioMaxAmplitude, kDefaultSamplingFrequencyInHz); + } + + switch (audio_config->mode) { + case AudioConfig::Mode::kGenerated: + return TestAudioDeviceModule::CreatePulsedNoiseCapturer( + kGeneratedAudioMaxAmplitude, audio_config->sampling_frequency_in_hz); + case AudioConfig::Mode::kFile: + RTC_DCHECK(audio_config->input_file_name); + return TestAudioDeviceModule::CreateWavFileReader( + audio_config->input_file_name.value(), /*repeat=*/true); + } +} + +rtc::scoped_refptr CreateAudioDeviceModule( + absl::optional audio_config, + absl::optional remote_audio_config, + absl::optional echo_emulation_config, + TaskQueueFactory* task_queue_factory) { + std::unique_ptr renderer = + CreateAudioRenderer(remote_audio_config); + std::unique_ptr capturer = + CreateAudioCapturer(audio_config); + RTC_DCHECK(renderer); + RTC_DCHECK(capturer); + + // Setup echo emulation if required. + if (echo_emulation_config) { + capturer = std::make_unique(std::move(capturer), + *echo_emulation_config); + renderer = std::make_unique( + std::move(renderer), + static_cast(capturer.get())); + } + + // Setup input stream dumping if required. + if (audio_config && audio_config->input_dump_file_name) { + capturer = std::make_unique( + std::move(capturer), audio_config->input_dump_file_name.value()); + } + + return TestAudioDeviceModule::Create(task_queue_factory, std::move(capturer), + std::move(renderer), /*speed=*/1.f); +} + +std::unique_ptr CreateMediaEngine( + PeerConnectionFactoryComponents* pcf_dependencies, + rtc::scoped_refptr audio_device_module, + rtc::scoped_refptr audio_processing) { + cricket::MediaEngineDependencies media_deps; + media_deps.task_queue_factory = pcf_dependencies->task_queue_factory.get(); + media_deps.adm = audio_device_module; + media_deps.audio_processing = audio_processing; + media_deps.video_encoder_factory = + std::move(pcf_dependencies->video_encoder_factory); + media_deps.video_decoder_factory = + std::move(pcf_dependencies->video_decoder_factory); + webrtc::SetMediaEngineDefaults(&media_deps); + RTC_DCHECK(pcf_dependencies->trials); + media_deps.trials = pcf_dependencies->trials.get(); + + return cricket::CreateMediaEngine(std::move(media_deps)); +} + +void WrapVideoEncoderFactory( + absl::string_view peer_name, + double bitrate_multiplier, + std::map> stream_required_spatial_index, + PeerConnectionFactoryComponents* pcf_dependencies, + VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) { + std::unique_ptr video_encoder_factory; + if (pcf_dependencies->video_encoder_factory != nullptr) { + video_encoder_factory = std::move(pcf_dependencies->video_encoder_factory); + } else { + video_encoder_factory = CreateBuiltinVideoEncoderFactory(); + } + pcf_dependencies->video_encoder_factory = + video_analyzer_helper->WrapVideoEncoderFactory( + peer_name, std::move(video_encoder_factory), bitrate_multiplier, + std::move(stream_required_spatial_index)); +} + +void WrapVideoDecoderFactory( + absl::string_view peer_name, + PeerConnectionFactoryComponents* pcf_dependencies, + VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) { + std::unique_ptr video_decoder_factory; + if (pcf_dependencies->video_decoder_factory != nullptr) { + video_decoder_factory = std::move(pcf_dependencies->video_decoder_factory); + } else { + video_decoder_factory = CreateBuiltinVideoDecoderFactory(); + } + pcf_dependencies->video_decoder_factory = + video_analyzer_helper->WrapVideoDecoderFactory( + peer_name, std::move(video_decoder_factory)); +} + +// Creates PeerConnectionFactoryDependencies objects, providing entities +// from InjectableComponents::PeerConnectionFactoryComponents. +PeerConnectionFactoryDependencies CreatePCFDependencies( + std::unique_ptr pcf_dependencies, + std::unique_ptr media_engine, + rtc::Thread* signaling_thread, + rtc::Thread* worker_thread, + rtc::Thread* network_thread) { + PeerConnectionFactoryDependencies pcf_deps; + pcf_deps.signaling_thread = signaling_thread; + pcf_deps.worker_thread = worker_thread; + pcf_deps.network_thread = network_thread; + pcf_deps.media_engine = std::move(media_engine); + + pcf_deps.call_factory = std::move(pcf_dependencies->call_factory); + pcf_deps.event_log_factory = std::move(pcf_dependencies->event_log_factory); + pcf_deps.task_queue_factory = std::move(pcf_dependencies->task_queue_factory); + + if (pcf_dependencies->fec_controller_factory != nullptr) { + pcf_deps.fec_controller_factory = + std::move(pcf_dependencies->fec_controller_factory); + } + if (pcf_dependencies->network_controller_factory != nullptr) { + pcf_deps.network_controller_factory = + std::move(pcf_dependencies->network_controller_factory); + } + if (pcf_dependencies->neteq_factory != nullptr) { + pcf_deps.neteq_factory = std::move(pcf_dependencies->neteq_factory); + } + if (pcf_dependencies->trials != nullptr) { + pcf_deps.trials = std::move(pcf_dependencies->trials); + } + + return pcf_deps; +} + +// Creates PeerConnectionDependencies objects, providing entities +// from InjectableComponents::PeerConnectionComponents. +PeerConnectionDependencies CreatePCDependencies( + MockPeerConnectionObserver* observer, + std::unique_ptr pc_dependencies) { + PeerConnectionDependencies pc_deps(observer); + + auto port_allocator = std::make_unique( + pc_dependencies->network_manager); + + // This test does not support TCP + int flags = cricket::PORTALLOCATOR_DISABLE_TCP; + port_allocator->set_flags(port_allocator->flags() | flags); + + pc_deps.allocator = std::move(port_allocator); + + if (pc_dependencies->async_resolver_factory != nullptr) { + pc_deps.async_resolver_factory = + std::move(pc_dependencies->async_resolver_factory); + } + if (pc_dependencies->cert_generator != nullptr) { + pc_deps.cert_generator = std::move(pc_dependencies->cert_generator); + } + if (pc_dependencies->tls_cert_verifier != nullptr) { + pc_deps.tls_cert_verifier = std::move(pc_dependencies->tls_cert_verifier); + } + if (pc_dependencies->ice_transport_factory != nullptr) { + pc_deps.ice_transport_factory = + std::move(pc_dependencies->ice_transport_factory); + } + return pc_deps; +} + +} // namespace + +absl::optional RemotePeerAudioConfig::Create( + absl::optional config) { + if (!config) { + return absl::nullopt; + } + return RemotePeerAudioConfig(config.value()); +} + +std::unique_ptr TestPeerFactory::CreateTestPeer( + std::unique_ptr configurer, + std::unique_ptr observer, + absl::optional remote_audio_config, + double bitrate_multiplier, + absl::optional + echo_emulation_config) { + std::unique_ptr components = + configurer->ReleaseComponents(); + std::unique_ptr params = configurer->ReleaseParams(); + std::vector video_sources = + configurer->ReleaseVideoSources(); + RTC_DCHECK(components); + RTC_DCHECK(params); + RTC_DCHECK_EQ(params->video_configs.size(), video_sources.size()); + SetMandatoryEntities(components.get(), time_controller_); + params->rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan; + + // Create peer connection factory. + rtc::scoped_refptr audio_processing = + webrtc::AudioProcessingBuilder().Create(); + if (params->aec_dump_path && audio_processing) { + audio_processing->CreateAndAttachAecDump(*params->aec_dump_path, -1, + task_queue_); + } + rtc::scoped_refptr audio_device_module = + CreateAudioDeviceModule( + params->audio_config, remote_audio_config, echo_emulation_config, + components->pcf_dependencies->task_queue_factory.get()); + WrapVideoEncoderFactory( + params->name.value(), bitrate_multiplier, + CalculateRequiredSpatialIndexPerStream(params->video_configs), + components->pcf_dependencies.get(), video_analyzer_helper_); + WrapVideoDecoderFactory(params->name.value(), + components->pcf_dependencies.get(), + video_analyzer_helper_); + std::unique_ptr media_engine = + CreateMediaEngine(components->pcf_dependencies.get(), audio_device_module, + audio_processing); + + std::unique_ptr worker_thread = + time_controller_.CreateThread("worker_thread"); + PeerConnectionFactoryDependencies pcf_deps = CreatePCFDependencies( + std::move(components->pcf_dependencies), std::move(media_engine), + signaling_thread_, worker_thread.get(), components->network_thread); + rtc::scoped_refptr peer_connection_factory = + CreateModularPeerConnectionFactory(std::move(pcf_deps)); + + // Create peer connection. + PeerConnectionDependencies pc_deps = CreatePCDependencies( + observer.get(), std::move(components->pc_dependencies)); + rtc::scoped_refptr peer_connection = + peer_connection_factory->CreatePeerConnection(params->rtc_configuration, + std::move(pc_deps)); + peer_connection->SetBitrate(params->bitrate_settings); + + return absl::WrapUnique(new TestPeer( + peer_connection_factory, peer_connection, std::move(observer), + std::move(params), std::move(video_sources), audio_processing, + std::move(worker_thread))); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/test/pc/e2e/test_peer_factory.h b/test/pc/e2e/test_peer_factory.h new file mode 100644 index 0000000000..df33406270 --- /dev/null +++ b/test/pc/e2e/test_peer_factory.h @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_PC_E2E_TEST_PEER_FACTORY_H_ +#define TEST_PC_E2E_TEST_PEER_FACTORY_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/time_controller.h" +#include "modules/audio_device/include/test_audio_device.h" +#include "rtc_base/task_queue.h" +#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" +#include "test/pc/e2e/peer_configurer.h" +#include "test/pc/e2e/peer_connection_quality_test_params.h" +#include "test/pc/e2e/test_peer.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +struct RemotePeerAudioConfig { + explicit RemotePeerAudioConfig( + PeerConnectionE2EQualityTestFixture::AudioConfig config) + : sampling_frequency_in_hz(config.sampling_frequency_in_hz), + output_file_name(config.output_dump_file_name) {} + + static absl::optional Create( + absl::optional config); + + int sampling_frequency_in_hz; + absl::optional output_file_name; +}; + +class TestPeerFactory { + public: + // Creates a test peer factory. + // |signaling_thread| will be used as a signaling thread for all peers created + // by this factory. + // |time_controller| will be used to create required threads, task queue + // factories and call factory. + // |video_analyzer_helper| will be used to setup video quality analysis for + // created peers. + // |task_queue| will be used for AEC dump if it is requested. + TestPeerFactory(rtc::Thread* signaling_thread, + TimeController& time_controller, + VideoQualityAnalyzerInjectionHelper* video_analyzer_helper, + rtc::TaskQueue* task_queue) + : signaling_thread_(signaling_thread), + time_controller_(time_controller), + video_analyzer_helper_(video_analyzer_helper), + task_queue_(task_queue) {} + + // Setups all components, that should be provided to WebRTC + // PeerConnectionFactory and PeerConnection creation methods, + // also will setup dependencies, that are required for media analyzers + // injection. + std::unique_ptr CreateTestPeer( + std::unique_ptr configurer, + std::unique_ptr observer, + absl::optional remote_audio_config, + double bitrate_multiplier, + absl::optional + echo_emulation_config); + + private: + rtc::Thread* signaling_thread_; + TimeController& time_controller_; + VideoQualityAnalyzerInjectionHelper* video_analyzer_helper_; + rtc::TaskQueue* task_queue_; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // TEST_PC_E2E_TEST_PEER_FACTORY_H_ diff --git a/common_audio/third_party/fft4g/BUILD.gn b/test/pc/sctp/BUILD.gn similarity index 52% rename from common_audio/third_party/fft4g/BUILD.gn rename to test/pc/sctp/BUILD.gn index 789df92403..93ae1bf59c 100644 --- a/common_audio/third_party/fft4g/BUILD.gn +++ b/test/pc/sctp/BUILD.gn @@ -1,16 +1,15 @@ -# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license -# that can be found in the ../../../LICENSE file in the root of the source +# that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. import("../../../webrtc.gni") -rtc_library("fft4g") { - sources = [ - "fft4g.c", - "fft4g.h", - ] +rtc_source_set("fake_sctp_transport") { + visibility = [ "*" ] + sources = [ "fake_sctp_transport.h" ] + deps = [ "../../../media:rtc_data" ] } diff --git a/pc/test/fake_sctp_transport.h b/test/pc/sctp/fake_sctp_transport.h similarity index 91% rename from pc/test/fake_sctp_transport.h rename to test/pc/sctp/fake_sctp_transport.h index 50e59f1fc2..5fdb3bbe42 100644 --- a/pc/test/fake_sctp_transport.h +++ b/test/pc/sctp/fake_sctp_transport.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef PC_TEST_FAKE_SCTP_TRANSPORT_H_ -#define PC_TEST_FAKE_SCTP_TRANSPORT_H_ +#ifndef TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_ +#define TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_ #include @@ -49,7 +49,7 @@ class FakeSctpTransport : public cricket::SctpTransportInternal { int max_message_size_; }; -class FakeSctpTransportFactory : public cricket::SctpTransportInternalFactory { +class FakeSctpTransportFactory : public webrtc::SctpTransportFactoryInterface { public: std::unique_ptr CreateSctpTransport( rtc::PacketTransportInternal*) override { @@ -66,4 +66,4 @@ class FakeSctpTransportFactory : public cricket::SctpTransportInternalFactory { FakeSctpTransport* last_fake_sctp_transport_ = nullptr; }; -#endif // PC_TEST_FAKE_SCTP_TRANSPORT_H_ +#endif // TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_ diff --git a/test/peer_scenario/BUILD.gn b/test/peer_scenario/BUILD.gn index d702cf539f..70a7471591 100644 --- a/test/peer_scenario/BUILD.gn +++ b/test/peer_scenario/BUILD.gn @@ -37,6 +37,7 @@ if (rtc_include_tests) { "../../api/audio_codecs:builtin_audio_encoder_factory", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", + "../../api/transport:field_trial_based_config", "../../api/video_codecs:builtin_video_decoder_factory", "../../api/video_codecs:builtin_video_encoder_factory", "../../media:rtc_audio_video", @@ -52,6 +53,8 @@ if (rtc_include_tests) { "../network:emulated_network", "../scenario", "../time_controller", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/memory", ] diff --git a/test/peer_scenario/peer_scenario.h b/test/peer_scenario/peer_scenario.h index 8d6ad1b00c..eeade99cf4 100644 --- a/test/peer_scenario/peer_scenario.h +++ b/test/peer_scenario/peer_scenario.h @@ -87,7 +87,7 @@ class PeerScenario { // Waits on |event| while processing messages on the signaling thread. bool WaitAndProcess(std::atomic* event, - TimeDelta max_duration = TimeDelta::seconds(5)); + TimeDelta max_duration = TimeDelta::Seconds(5)); // Process messages on the signaling thread for the given duration. void ProcessMessages(TimeDelta duration); diff --git a/test/peer_scenario/peer_scenario_client.cc b/test/peer_scenario/peer_scenario_client.cc index da8dec80e3..681a90704f 100644 --- a/test/peer_scenario/peer_scenario_client.cc +++ b/test/peer_scenario/peer_scenario_client.cc @@ -18,6 +18,7 @@ #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/test/create_time_controller.h" +#include "api/transport/field_trial_based_config.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_codecs/builtin_video_encoder_factory.h" #include "media/engine/webrtc_media_engine.h" @@ -125,13 +126,11 @@ class FakeVideoEncoderFactory : public VideoEncoderFactory { CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override { RTC_CHECK_EQ(format.name, "VP8"); CodecInfo info; - info.has_internal_source = false; - info.is_hardware_accelerated = false; return info; } std::unique_ptr CreateVideoEncoder( const SdpVideoFormat& format) override { - return std::make_unique(clock_); + return std::make_unique(clock_); } private: @@ -199,6 +198,7 @@ PeerScenarioClient::PeerScenarioClient( net->time_controller()->CreateTaskQueueFactory(); pcf_deps.event_log_factory = std::make_unique(task_queue_factory_); + pcf_deps.trials = std::make_unique(); cricket::MediaEngineDependencies media_deps; media_deps.task_queue_factory = task_queue_factory_; @@ -223,6 +223,7 @@ PeerScenarioClient::PeerScenarioClient( } media_deps.audio_encoder_factory = CreateBuiltinAudioEncoderFactory(); media_deps.audio_decoder_factory = CreateBuiltinAudioDecoderFactory(); + media_deps.trials = pcf_deps.trials.get(); pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); pcf_deps.fec_controller_factory = nullptr; @@ -230,6 +231,9 @@ PeerScenarioClient::PeerScenarioClient( pcf_deps.network_state_predictor_factory = nullptr; pc_factory_ = CreateModularPeerConnectionFactory(std::move(pcf_deps)); + PeerConnectionFactoryInterface::Options pc_options; + pc_options.disable_encryption = config.disable_encryption; + pc_factory_->SetOptions(pc_options); PeerConnectionDependencies pc_deps(observer_.get()); pc_deps.allocator = @@ -287,14 +291,17 @@ void PeerScenarioClient::AddVideoReceiveSink( } void PeerScenarioClient::CreateAndSetSdp( + std::function munge_offer, std::function offer_handler) { RTC_DCHECK_RUN_ON(signaling_thread_); peer_connection_->CreateOffer( SdpCreateObserver([=](SessionDescriptionInterface* offer) { RTC_DCHECK_RUN_ON(signaling_thread_); + if (munge_offer) { + munge_offer(offer); + } std::string sdp_offer; - offer->ToString(&sdp_offer); - RTC_LOG(LS_INFO) << sdp_offer; + RTC_CHECK(offer->ToString(&sdp_offer)); peer_connection_->SetLocalDescription( SdpSetObserver( [sdp_offer, offer_handler]() { offer_handler(sdp_offer); }), diff --git a/test/peer_scenario/peer_scenario_client.h b/test/peer_scenario/peer_scenario_client.h index 6e82b23567..65ad0734db 100644 --- a/test/peer_scenario/peer_scenario_client.h +++ b/test/peer_scenario/peer_scenario_client.h @@ -89,6 +89,7 @@ class PeerScenarioClient { {0, EmulatedEndpointConfig()}}; CallbackHandlers handlers; PeerConnectionInterface::RTCConfiguration rtc_config; + bool disable_encryption = false; Config() { rtc_config.sdp_semantics = SdpSemantics::kUnifiedPlan; } }; @@ -98,8 +99,8 @@ class PeerScenarioClient { }; struct AudioSendTrack { - AudioTrackInterface* track; - RtpSenderInterface* sender; + rtc::scoped_refptr track; + rtc::scoped_refptr sender; }; struct VideoSendTrack { @@ -136,9 +137,13 @@ class PeerScenarioClient { CallbackHandlers* handlers() { return &handlers_; } - // Note that there's no provision for munging SDP as that is deprecated - // behavior. - void CreateAndSetSdp(std::function offer_handler); + // The |munge_offer| function can be used to munge the SDP, i.e. modify a + // local description afer creating it but before setting it. Note that this is + // legacy behavior. It's added here only to be able to have test coverage for + // scenarios even if they are not spec compliant. + void CreateAndSetSdp( + std::function munge_offer, + std::function offer_handler); void SetSdpOfferAndGetAnswer(std::string remote_offer, std::function answer_handler); void SetSdpAnswer( diff --git a/test/peer_scenario/scenario_connection.cc b/test/peer_scenario/scenario_connection.cc index d6d2880920..92082f5097 100644 --- a/test/peer_scenario/scenario_connection.cc +++ b/test/peer_scenario/scenario_connection.cc @@ -85,7 +85,7 @@ ScenarioIceConnectionImpl::ScenarioIceConnectionImpl( signaling_thread_(rtc::Thread::Current()), network_thread_(manager_->network_thread()), certificate_(rtc::RTCCertificate::Create( - absl::WrapUnique(rtc::SSLIdentity::Generate("", ::rtc::KT_DEFAULT)))), + rtc::SSLIdentity::Create("", ::rtc::KT_DEFAULT))), transport_description_( /*transport_options*/ {}, rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), diff --git a/test/peer_scenario/signaling_route.cc b/test/peer_scenario/signaling_route.cc index 1e5b9aad9a..2e0213df16 100644 --- a/test/peer_scenario/signaling_route.cc +++ b/test/peer_scenario/signaling_route.cc @@ -58,9 +58,10 @@ void StartSdpNegotiation( PeerScenarioClient* callee, TrafficRoute* send_route, TrafficRoute* ret_route, + std::function munge_offer, std::function modify_offer, std::function exchange_finished) { - caller->CreateAndSetSdp([=](std::string sdp_offer) { + caller->CreateAndSetSdp(munge_offer, [=](std::string sdp_offer) { if (modify_offer) { auto offer = CreateSessionDescription(SdpType::kOffer, sdp_offer); modify_offer(offer.get()); @@ -92,15 +93,22 @@ void SignalingRoute::StartIceSignaling() { } void SignalingRoute::NegotiateSdp( + std::function munge_offer, std::function modify_offer, std::function exchange_finished) { - StartSdpNegotiation(caller_, callee_, send_route_, ret_route_, modify_offer, - exchange_finished); + StartSdpNegotiation(caller_, callee_, send_route_, ret_route_, munge_offer, + modify_offer, exchange_finished); +} + +void SignalingRoute::NegotiateSdp( + std::function modify_offer, + std::function exchange_finished) { + NegotiateSdp({}, modify_offer, exchange_finished); } void SignalingRoute::NegotiateSdp( std::function exchange_finished) { - NegotiateSdp({}, exchange_finished); + NegotiateSdp({}, {}, exchange_finished); } } // namespace test diff --git a/test/peer_scenario/signaling_route.h b/test/peer_scenario/signaling_route.h index 189c4b6f3f..7434551d3f 100644 --- a/test/peer_scenario/signaling_route.h +++ b/test/peer_scenario/signaling_route.h @@ -30,7 +30,19 @@ class SignalingRoute { void StartIceSignaling(); + // The |modify_offer| callback is used to modify an offer after the local + // description has been set. This is legal (but odd) behavior. + // The |munge_offer| callback is used to modify an offer between its creation + // and set local description. This behavior is forbidden according to the spec + // but available here in order to allow test coverage on corner cases. + // The |exchange_finished| callback is called with the answer produced after + // SDP negotations has completed. // TODO(srte): Handle lossy links. + void NegotiateSdp( + std::function munge_offer, + std::function modify_offer, + std::function + exchange_finished); void NegotiateSdp( std::function modify_offer, std::function diff --git a/test/peer_scenario/tests/BUILD.gn b/test/peer_scenario/tests/BUILD.gn index 35528626f8..0cf7cf3472 100644 --- a/test/peer_scenario/tests/BUILD.gn +++ b/test/peer_scenario/tests/BUILD.gn @@ -14,12 +14,16 @@ if (rtc_include_tests) { sources = [ "peer_scenario_quality_test.cc", "remote_estimate_test.cc", + "unsignaled_stream_test.cc", ] deps = [ "..:peer_scenario", "../../:field_trial", + "../../:rtp_test_utils", "../../:test_support", + "../../../media:rtc_media_base", "../../../modules/rtp_rtcp:rtp_rtcp", + "../../../modules/rtp_rtcp:rtp_rtcp_format", "../../../pc:rtc_pc_base", ] } diff --git a/test/peer_scenario/tests/peer_scenario_quality_test.cc b/test/peer_scenario/tests/peer_scenario_quality_test.cc index 16ba707e1c..5d69a0923f 100644 --- a/test/peer_scenario/tests/peer_scenario_quality_test.cc +++ b/test/peer_scenario/tests/peer_scenario_quality_test.cc @@ -10,11 +10,16 @@ #include "test/gtest.h" #include "test/peer_scenario/peer_scenario.h" +#include "test/peer_scenario/peer_scenario_client.h" namespace webrtc { namespace test { - -TEST(PeerScenarioQualityTest, PsnrIsCollected) { +#if defined(WEBRTC_WIN) +#define MAYBE_PsnrIsCollected DISABLED_PsnrIsCollected +#else +#define MAYBE_PsnrIsCollected PsnrIsCollected +#endif +TEST(PeerScenarioQualityTest, MAYBE_PsnrIsCollected) { VideoQualityAnalyzer analyzer; { PeerScenario s(*test_info_); @@ -27,7 +32,7 @@ TEST(PeerScenarioQualityTest, PsnrIsCollected) { s.AttachVideoQualityAnalyzer(&analyzer, video.track, callee); s.SimpleConnection(caller, callee, {link_builder.Build().node}, {link_builder.Build().node}); - s.ProcessMessages(TimeDelta::seconds(2)); + s.ProcessMessages(TimeDelta::Seconds(2)); // Exit scope to ensure that there's no pending tasks reporting to analyzer. } diff --git a/test/peer_scenario/tests/unsignaled_stream_test.cc b/test/peer_scenario/tests/unsignaled_stream_test.cc new file mode 100644 index 0000000000..5f470a833b --- /dev/null +++ b/test/peer_scenario/tests/unsignaled_stream_test.cc @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "media/base/stream_params.h" +#include "modules/rtp_rtcp/source/byte_io.h" + +#include "pc/media_session.h" +#include "pc/session_description.h" +#include "test/field_trial.h" +#include "test/peer_scenario/peer_scenario.h" +#include "test/rtp_header_parser.h" + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +class FrameObserver : public rtc::VideoSinkInterface { + public: + FrameObserver() : frame_observed_(false) {} + void OnFrame(const VideoFrame&) override { frame_observed_ = true; } + + std::atomic frame_observed_; +}; + +uint32_t get_ssrc(SessionDescriptionInterface* offer, size_t track_index) { + EXPECT_LT(track_index, offer->description()->contents().size()); + return offer->description() + ->contents()[track_index] + .media_description() + ->streams()[0] + .ssrcs[0]; +} + +void set_ssrc(SessionDescriptionInterface* offer, size_t index, uint32_t ssrc) { + EXPECT_LT(index, offer->description()->contents().size()); + cricket::StreamParams& new_stream_params = offer->description() + ->contents()[index] + .media_description() + ->mutable_streams()[0]; + new_stream_params.ssrcs[0] = ssrc; + new_stream_params.ssrc_groups[0].ssrcs[0] = ssrc; +} + +} // namespace + +TEST(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) { + // This test covers a scenario that might occur if a remote client starts + // sending media packets before negotiation has completed. These packets will + // trigger an unsignalled default stream to be created, and connects that to + // a default video sink. + // In some edge cases using unified plan, the default stream is create in a + // different transceiver to where the media SSRC will actually be used. + // This test verifies that the default stream is removed properly, and that + // packets are demuxed and video frames reach the desired sink. + + // Defined before PeerScenario so it gets destructed after, to avoid use after + // free. + PeerScenario s(*test_info_); + + PeerScenarioClient::Config config = PeerScenarioClient::Config(); + // Disable encryption so that we can inject a fake early media packet without + // triggering srtp failures. + config.disable_encryption = true; + auto* caller = s.CreateClient(config); + auto* callee = s.CreateClient(config); + + auto send_node = s.net()->NodeBuilder().Build().node; + auto ret_node = s.net()->NodeBuilder().Build().node; + + s.net()->CreateRoute(caller->endpoint(), {send_node}, callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {ret_node}, caller->endpoint()); + + auto signaling = s.ConnectSignaling(caller, callee, {send_node}, {ret_node}); + PeerScenarioClient::VideoSendTrackConfig video_conf; + video_conf.generator.squares_video->framerate = 15; + + auto first_track = caller->CreateVideo("VIDEO", video_conf); + FrameObserver first_sink; + callee->AddVideoReceiveSink(first_track.track->id(), &first_sink); + + signaling.StartIceSignaling(); + std::atomic offer_exchange_done(false); + std::atomic got_unsignaled_packet(false); + + // We will capture the media ssrc of the first added stream, and preemptively + // inject a new media packet using a different ssrc. + // This will create "default stream" for the second ssrc and connected it to + // the default video sink (not set in this test). + uint32_t first_ssrc = 0; + uint32_t second_ssrc = 0; + + signaling.NegotiateSdp( + /* munge_sdp = */ {}, + /* modify_sdp = */ + [&](SessionDescriptionInterface* offer) { + first_ssrc = get_ssrc(offer, 0); + second_ssrc = first_ssrc + 1; + + send_node->router()->SetWatcher([&](const EmulatedIpPacket& packet) { + if (packet.size() > 1 && packet.cdata()[0] >> 6 == 2 && + !RtpHeaderParser::IsRtcp(packet.data.cdata(), + packet.data.size())) { + if (ByteReader::ReadBigEndian(&(packet.cdata()[8])) == + first_ssrc && + !got_unsignaled_packet) { + rtc::CopyOnWriteBuffer updated_buffer = packet.data; + ByteWriter::WriteBigEndian(&updated_buffer.data()[8], + second_ssrc); + EmulatedIpPacket updated_packet( + packet.from, packet.to, updated_buffer, packet.arrival_time); + send_node->OnPacketReceived(std::move(updated_packet)); + got_unsignaled_packet = true; + } + } + }); + }, + [&](const SessionDescriptionInterface& answer) { + EXPECT_EQ(answer.description()->contents().size(), 1u); + offer_exchange_done = true; + }); + EXPECT_TRUE(s.WaitAndProcess(&offer_exchange_done)); + EXPECT_TRUE(s.WaitAndProcess(&got_unsignaled_packet)); + EXPECT_TRUE(s.WaitAndProcess(&first_sink.frame_observed_)); + + auto second_track = caller->CreateVideo("VIDEO2", video_conf); + FrameObserver second_sink; + callee->AddVideoReceiveSink(second_track.track->id(), &second_sink); + + // Create a second video stream, munge the sdp to force it to use our fake + // early media ssrc. + offer_exchange_done = false; + signaling.NegotiateSdp( + /* munge_sdp = */ + [&](SessionDescriptionInterface* offer) { + set_ssrc(offer, 1, second_ssrc); + }, + /* modify_sdp = */ {}, + [&](const SessionDescriptionInterface& answer) { + EXPECT_EQ(answer.description()->contents().size(), 2u); + offer_exchange_done = true; + }); + EXPECT_TRUE(s.WaitAndProcess(&offer_exchange_done)); + EXPECT_TRUE(s.WaitAndProcess(&second_sink.frame_observed_)); +} + +} // namespace test +} // namespace webrtc diff --git a/test/rtp_header_parser.cc b/test/rtp_header_parser.cc index 713e64d83c..45686acb4c 100644 --- a/test/rtp_header_parser.cc +++ b/test/rtp_header_parser.cc @@ -13,7 +13,7 @@ #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_utility.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -34,9 +34,8 @@ class RtpHeaderParserImpl : public RtpHeaderParser { bool DeregisterRtpHeaderExtension(RtpExtension extension) override; private: - rtc::CriticalSection critical_section_; - RtpHeaderExtensionMap rtp_header_extension_map_ - RTC_GUARDED_BY(critical_section_); + mutable Mutex mutex_; + RtpHeaderExtensionMap rtp_header_extension_map_ RTC_GUARDED_BY(mutex_); }; std::unique_ptr RtpHeaderParser::CreateForTest() { @@ -68,7 +67,7 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet, RtpHeaderExtensionMap map; { - rtc::CritScope cs(&critical_section_); + MutexLock lock(&mutex_); map = rtp_header_extension_map_; } @@ -79,24 +78,24 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet, return true; } bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RtpExtension extension) { - rtc::CritScope cs(&critical_section_); + MutexLock lock(&mutex_); return rtp_header_extension_map_.RegisterByUri(extension.id, extension.uri); } bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id) { - rtc::CritScope cs(&critical_section_); + MutexLock lock(&mutex_); return rtp_header_extension_map_.RegisterByType(id, type); } bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RtpExtension extension) { - rtc::CritScope cs(&critical_section_); + MutexLock lock(&mutex_); return rtp_header_extension_map_.Deregister( rtp_header_extension_map_.GetType(extension.id)); } bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RTPExtensionType type) { - rtc::CritScope cs(&critical_section_); + MutexLock lock(&mutex_); return rtp_header_extension_map_.Deregister(type) == 0; } } // namespace webrtc diff --git a/test/rtp_rtcp_observer.h b/test/rtp_rtcp_observer.h index 3bfa475f73..036f5cdc20 100644 --- a/test/rtp_rtcp_observer.h +++ b/test/rtp_rtcp_observer.h @@ -18,7 +18,6 @@ #include "api/test/simulated_network.h" #include "call/simulated_packet_receiver.h" #include "call/video_send_stream.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "system_wrappers/include/field_trial.h" #include "test/direct_transport.h" diff --git a/test/run_loop.cc b/test/run_loop.cc index 1fc200f929..643da5d56e 100644 --- a/test/run_loop.cc +++ b/test/run_loop.cc @@ -9,15 +9,65 @@ */ #include "test/run_loop.h" -#include +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { namespace test { -void PressEnterToContinue(TaskQueueBase* /*task_queue*/) { - puts(">> Press ENTER to continue..."); - while (getc(stdin) != '\n' && !feof(stdin)) - ; +RunLoop::RunLoop() { + worker_thread_.WrapCurrent(); } + +RunLoop::~RunLoop() { + worker_thread_.UnwrapCurrent(); +} + +TaskQueueBase* RunLoop::task_queue() { + return &worker_thread_; +} + +void RunLoop::Run() { + worker_thread_.ProcessMessages(WorkerThread::kForever); +} + +void RunLoop::Quit() { + socket_server_.FailNextWait(); +} + +void RunLoop::Flush() { + worker_thread_.PostTask( + ToQueuedTask([this]() { socket_server_.FailNextWait(); })); + worker_thread_.ProcessMessages(1000); +} + +RunLoop::FakeSocketServer::FakeSocketServer() = default; +RunLoop::FakeSocketServer::~FakeSocketServer() = default; + +void RunLoop::FakeSocketServer::FailNextWait() { + fail_next_wait_ = true; +} + +bool RunLoop::FakeSocketServer::Wait(int cms, bool process_io) { + if (fail_next_wait_) { + fail_next_wait_ = false; + return false; + } + return true; +} + +void RunLoop::FakeSocketServer::WakeUp() {} + +rtc::Socket* RunLoop::FakeSocketServer::CreateSocket(int family, int type) { + return nullptr; +} + +rtc::AsyncSocket* RunLoop::FakeSocketServer::CreateAsyncSocket(int family, + int type) { + return nullptr; +} + +RunLoop::WorkerThread::WorkerThread(rtc::SocketServer* ss) + : rtc::Thread(ss), tq_setter_(this) {} + } // namespace test } // namespace webrtc diff --git a/test/run_loop.h b/test/run_loop.h index 414e72c65b..f350b2ce93 100644 --- a/test/run_loop.h +++ b/test/run_loop.h @@ -10,13 +10,69 @@ #ifndef TEST_RUN_LOOP_H_ #define TEST_RUN_LOOP_H_ -#include "api/task_queue/task_queue_base.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" namespace webrtc { namespace test { -// Blocks until the user presses enter. -void PressEnterToContinue(TaskQueueBase* task_queue); +// This utility class allows you to run a TaskQueue supported interface on the +// main test thread, call Run() while doing things asynchonously and break +// the loop (from the same thread) from a callback by calling Quit(). +class RunLoop { + public: + RunLoop(); + ~RunLoop(); + + TaskQueueBase* task_queue(); + + void Run(); + void Quit(); + + void Flush(); + + // Convenience methods since TaskQueueBase doesn't support this sort of magic. + template + void PostTask(Closure&& task) { + task_queue()->PostTask(ToQueuedTask(std::forward(task))); + } + + template + void PostDelayedTask(Closure&& task, uint32_t milliseconds) { + task_queue()->PostDelayedTask(ToQueuedTask(std::forward(task)), + milliseconds); + } + + private: + class FakeSocketServer : public rtc::SocketServer { + public: + FakeSocketServer(); + ~FakeSocketServer(); + + void FailNextWait(); + + private: + bool Wait(int cms, bool process_io) override; + void WakeUp() override; + + rtc::Socket* CreateSocket(int family, int type) override; + rtc::AsyncSocket* CreateAsyncSocket(int family, int type) override; + + private: + bool fail_next_wait_ = false; + }; + + class WorkerThread : public rtc::Thread { + public: + explicit WorkerThread(rtc::SocketServer* ss); + + private: + CurrentTaskQueueSetter tq_setter_; + }; + + FakeSocketServer socket_server_; + WorkerThread worker_thread_{&socket_server_}; +}; } // namespace test } // namespace webrtc diff --git a/test/run_loop_unittest.cc b/test/run_loop_unittest.cc new file mode 100644 index 0000000000..160aba0716 --- /dev/null +++ b/test/run_loop_unittest.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/run_loop.h" + +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(RunLoopTest, TaskQueueOnThread) { + test::RunLoop loop; + EXPECT_EQ(TaskQueueBase::Current(), loop.task_queue()); + EXPECT_TRUE(loop.task_queue()->IsCurrent()); +} + +TEST(RunLoopTest, Flush) { + test::RunLoop loop; + int counter = 0; + loop.PostTask([&counter]() { ++counter; }); + EXPECT_EQ(counter, 0); + loop.Flush(); + EXPECT_EQ(counter, 1); +} + +TEST(RunLoopTest, Delayed) { + test::RunLoop loop; + bool ran = false; + loop.PostDelayedTask( + [&ran, &loop]() { + ran = true; + loop.Quit(); + }, + 100); + loop.Flush(); + EXPECT_FALSE(ran); + loop.Run(); + EXPECT_TRUE(ran); +} + +TEST(RunLoopTest, PostAndQuit) { + test::RunLoop loop; + bool ran = false; + loop.PostTask([&ran, &loop]() { + ran = true; + loop.Quit(); + }); + loop.Run(); + EXPECT_TRUE(ran); +} + +} // namespace webrtc diff --git a/test/scenario/BUILD.gn b/test/scenario/BUILD.gn index ed66936f3e..f5c22fcafb 100644 --- a/test/scenario/BUILD.gn +++ b/test/scenario/BUILD.gn @@ -97,7 +97,6 @@ if (rtc_include_tests) { "../../api/units:timestamp", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:video_frame", - "../../api/video:video_frame_i420", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../audio", @@ -130,9 +129,11 @@ if (rtc_include_tests) { "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_numerics", + "../../rtc_base:rtc_stats_counters", "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_minmax", "../../rtc_base:task_queue_for_test", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", @@ -141,9 +142,12 @@ if (rtc_include_tests) { "../logging:log_writer", "../network:emulated_network", "../time_controller", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] if (is_android) { @@ -163,6 +167,7 @@ if (rtc_include_tests) { testonly = true sources = [ "performance_stats_unittest.cc", + "probing_test.cc", "scenario_unittest.cc", "stats_collection_unittest.cc", "video_stream_unittest.cc", diff --git a/test/scenario/audio_stream.cc b/test/scenario/audio_stream.cc index 2738f6952c..f3cb8320aa 100644 --- a/test/scenario/audio_stream.cc +++ b/test/scenario/audio_stream.cc @@ -79,7 +79,7 @@ SendAudioStream::SendAudioStream( SdpAudioFormat::Parameters sdp_params; if (config.source.channels == 2) sdp_params["stereo"] = "1"; - if (config.encoder.initial_frame_length != TimeDelta::ms(20)) + if (config.encoder.initial_frame_length != TimeDelta::Millis(20)) sdp_params["ptime"] = std::to_string(config.encoder.initial_frame_length.ms()); if (config.encoder.enable_dtx) @@ -96,8 +96,9 @@ SendAudioStream::SendAudioStream( if (config.encoder.fixed_rate) send_config.send_codec_spec->target_bitrate_bps = config.encoder.fixed_rate->bps(); - - if (config.network_adaptation) { + if (!config.adapt.binary_proto.empty()) { + send_config.audio_network_adaptor_config = config.adapt.binary_proto; + } else if (config.network_adaptation) { send_config.audio_network_adaptor_config = CreateAdaptationString(config.adapt); } @@ -152,7 +153,7 @@ void SendAudioStream::Stop() { } void SendAudioStream::SetMuted(bool mute) { - send_stream_->SetMuted(mute); + sender_->SendTask([this, mute] { send_stream_->SetMuted(mute); }); } ColumnPrinter SendAudioStream::StatsPrinter() { @@ -211,7 +212,9 @@ void ReceiveAudioStream::Stop() { AudioReceiveStream::Stats ReceiveAudioStream::GetStats() const { AudioReceiveStream::Stats result; - receiver_->SendTask([&] { result = receive_stream_->GetStats(); }); + receiver_->SendTask([&] { + result = receive_stream_->GetStats(/*get_and_clear_legacy_stats=*/true); + }); return result; } diff --git a/test/scenario/call_client.cc b/test/scenario/call_client.cc index fb888df694..ab529fae9a 100644 --- a/test/scenario/call_client.cc +++ b/test/scenario/call_client.cc @@ -54,7 +54,8 @@ Call* CreateCall(TimeController* time_controller, RtcEventLog* event_log, CallClientConfig config, LoggingNetworkControllerFactory* network_controller_factory, - rtc::scoped_refptr audio_state) { + rtc::scoped_refptr audio_state, + rtc::scoped_refptr call_thread) { CallConfig call_config(event_log); call_config.bitrate_config.max_bitrate_bps = config.transport.rates.max_rate.bps_or(-1); @@ -67,7 +68,7 @@ Call* CreateCall(TimeController* time_controller, call_config.audio_state = audio_state; call_config.trials = config.field_trials; return Call::Create(call_config, time_controller->GetClock(), - time_controller->CreateProcessThread("CallModules"), + std::move(call_thread), time_controller->CreateProcessThread("Pacer")); } @@ -213,9 +214,14 @@ CallClient::CallClient( event_log_ = CreateEventLog(time_controller_->GetTaskQueueFactory(), log_writer_factory_.get()); fake_audio_setup_ = InitAudio(time_controller_); + RTC_DCHECK(!module_thread_); + module_thread_ = SharedModuleThread::Create( + time_controller_->CreateProcessThread("CallThread"), + [this]() { module_thread_ = nullptr; }); + call_.reset(CreateCall(time_controller_, event_log_.get(), config, &network_controller_factory_, - fake_audio_setup_.audio_state)); + fake_audio_setup_.audio_state, module_thread_)); transport_ = std::make_unique(clock_, call_.get()); }); } @@ -223,6 +229,7 @@ CallClient::CallClient( CallClient::~CallClient() { SendTask([&] { call_.reset(); + RTC_DCHECK(!module_thread_); // Should be set to null in the lambda above. fake_audio_setup_ = {}; rtc::Event done; event_log_->StopLogging([&done] { done.Set(); }); @@ -262,6 +269,13 @@ DataRate CallClient::padding_rate() const { return network_controller_factory_.GetUpdate().pacer_config->pad_rate(); } +void CallClient::UpdateBitrateConstraints( + const BitrateConstraints& constraints) { + SendTask([this, &constraints]() { + call_->GetTransportControllerSend()->SetSdpBitrateParameters(constraints); + }); +} + void CallClient::OnPacketReceived(EmulatedIpPacket packet) { MediaType media_type = MediaType::ANY; if (!RtpHeaderParser::IsRtcp(packet.cdata(), packet.data.size())) { diff --git a/test/scenario/call_client.h b/test/scenario/call_client.h index 1fbe256531..a4f45e1e7e 100644 --- a/test/scenario/call_client.h +++ b/test/scenario/call_client.h @@ -104,15 +104,21 @@ class CallClient : public EmulatedNetworkReceiverInterface { ColumnPrinter StatsPrinter(); Call::Stats GetStats(); DataRate send_bandwidth() { - return DataRate::bps(GetStats().send_bandwidth_bps); + return DataRate::BitsPerSec(GetStats().send_bandwidth_bps); } DataRate target_rate() const; DataRate stable_target_rate() const; DataRate padding_rate() const; + void UpdateBitrateConstraints(const BitrateConstraints& constraints); void OnPacketReceived(EmulatedIpPacket packet) override; std::unique_ptr GetLogWriter(std::string name); + // Exposed publicly so that tests can execute tasks such as querying stats + // for media streams in the expected runtime environment (essentially what + // CallClient does internally for GetStats()). + void SendTask(std::function task); + private: friend class Scenario; friend class CallClientPair; @@ -129,7 +135,6 @@ class CallClient : public EmulatedNetworkReceiverInterface { uint32_t GetNextAudioLocalSsrc(); uint32_t GetNextRtxSsrc(); void AddExtensions(std::vector extensions); - void SendTask(std::function task); int16_t Bind(EmulatedEndpoint* endpoint); void UnBind(); @@ -153,6 +158,8 @@ class CallClient : public EmulatedNetworkReceiverInterface { // Defined last so it's destroyed first. TaskQueueForTest task_queue_; + rtc::scoped_refptr module_thread_; + const FieldTrialBasedConfig field_trials_; }; diff --git a/test/scenario/network_node.cc b/test/scenario/network_node.cc index 48555203f3..702789fe73 100644 --- a/test/scenario/network_node.cc +++ b/test/scenario/network_node.cc @@ -13,6 +13,7 @@ #include #include +#include "rtc_base/net_helper.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { @@ -85,7 +86,7 @@ bool NetworkNodeTransport::SendRtp(const uint8_t* packet, sent_packet.info.packet_type = rtc::PacketType::kData; sender_call_->OnSentPacket(sent_packet); - rtc::CritScope crit(&crit_sect_); + MutexLock lock(&mutex_); if (!endpoint_) return false; rtc::CopyOnWriteBuffer buffer(packet, length); @@ -96,7 +97,7 @@ bool NetworkNodeTransport::SendRtp(const uint8_t* packet, bool NetworkNodeTransport::SendRtcp(const uint8_t* packet, size_t length) { rtc::CopyOnWriteBuffer buffer(packet, length); - rtc::CritScope crit(&crit_sect_); + MutexLock lock(&mutex_); if (!endpoint_) return false; endpoint_->SendPacket(local_address_, remote_address_, buffer, @@ -110,14 +111,17 @@ void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint, rtc::NetworkRoute route; route.connected = true; // We assume that the address will be unique in the lower bytes. - route.local_network_id = static_cast( - receiver_address.ipaddr().v4AddressAsHostOrderInteger()); - route.remote_network_id = static_cast( - receiver_address.ipaddr().v4AddressAsHostOrderInteger()); + route.local = rtc::RouteEndpoint::CreateWithNetworkId(static_cast( + receiver_address.ipaddr().v4AddressAsHostOrderInteger())); + route.remote = rtc::RouteEndpoint::CreateWithNetworkId(static_cast( + receiver_address.ipaddr().v4AddressAsHostOrderInteger())); + route.packet_overhead = packet_overhead.bytes() + + receiver_address.ipaddr().overhead() + + cricket::kUdpHeaderSize; { // Only IPv4 address is supported. RTC_CHECK_EQ(receiver_address.family(), AF_INET); - rtc::CritScope crit(&crit_sect_); + MutexLock lock(&mutex_); endpoint_ = endpoint; local_address_ = rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), 0); remote_address_ = receiver_address; @@ -130,7 +134,7 @@ void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint, } void NetworkNodeTransport::Disconnect() { - rtc::CritScope crit(&crit_sect_); + MutexLock lock(&mutex_); current_network_route_.connected = false; sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged( kDummyTransportName, current_network_route_); diff --git a/test/scenario/network_node.h b/test/scenario/network_node.h index b3d093b84e..ea8eb35daf 100644 --- a/test/scenario/network_node.h +++ b/test/scenario/network_node.h @@ -22,6 +22,7 @@ #include "call/simulated_network.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "test/network/network_emulation.h" #include "test/scenario/column_printer.h" @@ -65,19 +66,19 @@ class NetworkNodeTransport : public Transport { void Disconnect(); DataSize packet_overhead() { - rtc::CritScope crit(&crit_sect_); + MutexLock lock(&mutex_); return packet_overhead_; } private: - rtc::CriticalSection crit_sect_; + Mutex mutex_; Clock* const sender_clock_; Call* const sender_call_; - EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(crit_sect_) = nullptr; - rtc::SocketAddress local_address_ RTC_GUARDED_BY(crit_sect_); - rtc::SocketAddress remote_address_ RTC_GUARDED_BY(crit_sect_); - DataSize packet_overhead_ RTC_GUARDED_BY(crit_sect_) = DataSize::Zero(); - rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(crit_sect_); + EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(mutex_) = nullptr; + rtc::SocketAddress local_address_ RTC_GUARDED_BY(mutex_); + rtc::SocketAddress remote_address_ RTC_GUARDED_BY(mutex_); + DataSize packet_overhead_ RTC_GUARDED_BY(mutex_) = DataSize::Zero(); + rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(mutex_); }; } // namespace test } // namespace webrtc diff --git a/test/scenario/performance_stats_unittest.cc b/test/scenario/performance_stats_unittest.cc index 93ab1a109a..8d87c87745 100644 --- a/test/scenario/performance_stats_unittest.cc +++ b/test/scenario/performance_stats_unittest.cc @@ -17,10 +17,10 @@ namespace test { TEST(EventRateCounter, ReturnsCorrectTotalDuration) { EventRateCounter event_rate_counter; EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Zero()); - event_rate_counter.AddEvent(Timestamp::seconds(1)); + event_rate_counter.AddEvent(Timestamp::Seconds(1)); EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Zero()); - event_rate_counter.AddEvent(Timestamp::seconds(2)); - EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::seconds(1)); + event_rate_counter.AddEvent(Timestamp::Seconds(2)); + EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Seconds(1)); } } // namespace test diff --git a/test/scenario/probing_test.cc b/test/scenario/probing_test.cc new file mode 100644 index 0000000000..f08a003d5c --- /dev/null +++ b/test/scenario/probing_test.cc @@ -0,0 +1,133 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/gtest.h" +#include "test/scenario/scenario.h" + +namespace webrtc { +namespace test { + +TEST(ProbingTest, InitialProbingRampsUpTargetRateWhenNetworkIsGood) { + Scenario s; + NetworkSimulationConfig good_network; + good_network.bandwidth = DataRate::KilobitsPerSec(2000); + + VideoStreamConfig video_config; + video_config.encoder.codec = + VideoStreamConfig::Encoder::Codec::kVideoCodecVP8; + CallClientConfig send_config; + auto* caller = s.CreateClient("caller", send_config); + auto* callee = s.CreateClient("callee", CallClientConfig()); + auto route = + s.CreateRoutes(caller, {s.CreateSimulationNode(good_network)}, callee, + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), video_config); + + s.RunFor(TimeDelta::Seconds(1)); + EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps), + 3 * send_config.transport.rates.start_rate); +} + +TEST(ProbingTest, MidCallProbingRampupTriggeredByUpdatedBitrateConstraints) { + Scenario s; + + const DataRate kStartRate = DataRate::KilobitsPerSec(300); + const DataRate kConstrainedRate = DataRate::KilobitsPerSec(100); + const DataRate kHighRate = DataRate::KilobitsPerSec(2500); + + VideoStreamConfig video_config; + video_config.encoder.codec = + VideoStreamConfig::Encoder::Codec::kVideoCodecVP8; + CallClientConfig send_call_config; + send_call_config.transport.rates.start_rate = kStartRate; + send_call_config.transport.rates.max_rate = kHighRate * 2; + auto* caller = s.CreateClient("caller", send_call_config); + auto* callee = s.CreateClient("callee", CallClientConfig()); + auto route = s.CreateRoutes( + caller, {s.CreateSimulationNode(NetworkSimulationConfig())}, callee, + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), video_config); + + // Wait until initial probing rampup is done and then set a low max bitrate. + s.RunFor(TimeDelta::Seconds(1)); + EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps), + 5 * send_call_config.transport.rates.start_rate); + BitrateConstraints bitrate_config; + bitrate_config.max_bitrate_bps = kConstrainedRate.bps(); + caller->UpdateBitrateConstraints(bitrate_config); + + // Wait until the low send bitrate has taken effect, and then set a much + // higher max bitrate. + s.RunFor(TimeDelta::Seconds(2)); + EXPECT_LT(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps), + kConstrainedRate * 1.1); + bitrate_config.max_bitrate_bps = 2 * kHighRate.bps(); + caller->UpdateBitrateConstraints(bitrate_config); + + // Check that the max send bitrate is reached quicker than would be possible + // with simple AIMD rate control. + s.RunFor(TimeDelta::Seconds(1)); + EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps), + kHighRate); +} + +TEST(ProbingTest, ProbesRampsUpWhenVideoEncoderConfigChanges) { + Scenario s; + const DataRate kStartRate = DataRate::KilobitsPerSec(50); + const DataRate kHdRate = DataRate::KilobitsPerSec(3250); + + // Set up 3-layer simulcast. + VideoStreamConfig video_config; + video_config.encoder.codec = + VideoStreamConfig::Encoder::Codec::kVideoCodecVP8; + video_config.encoder.layers.spatial = 3; + video_config.source.generator.width = 1280; + video_config.source.generator.height = 720; + + CallClientConfig send_call_config; + send_call_config.transport.rates.start_rate = kStartRate; + send_call_config.transport.rates.max_rate = kHdRate * 2; + auto* caller = s.CreateClient("caller", send_call_config); + auto* callee = s.CreateClient("callee", CallClientConfig()); + auto send_net = + s.CreateMutableSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(200); + }); + auto route = + s.CreateRoutes(caller, {send_net->node()}, callee, + {s.CreateSimulationNode(NetworkSimulationConfig())}); + auto* video_stream = s.CreateVideoStream(route->forward(), video_config); + + // Only QVGA enabled initially. Run until initial probing is done and BWE + // has settled. + video_stream->send()->UpdateActiveLayers({true, false, false}); + s.RunFor(TimeDelta::Seconds(2)); + + // Remove network constraints and run for a while more, BWE should be much + // less than required HD rate. + send_net->UpdateConfig([&](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::PlusInfinity(); + }); + s.RunFor(TimeDelta::Seconds(2)); + + DataRate bandwidth = + DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps); + EXPECT_LT(bandwidth, kHdRate / 4); + + // Enable all layers, triggering a probe. + video_stream->send()->UpdateActiveLayers({true, true, true}); + + // Run for a short while and verify BWE has ramped up fast. + s.RunFor(TimeDelta::Seconds(2)); + EXPECT_GT(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps), + kHdRate); +} + +} // namespace test +} // namespace webrtc diff --git a/test/scenario/scenario.cc b/test/scenario/scenario.cc index 232cf06d41..c1c664a754 100644 --- a/test/scenario/scenario.cc +++ b/test/scenario/scenario.cc @@ -131,8 +131,8 @@ CallClientPair* Scenario::CreateRoutes( CallClient* second, std::vector return_link) { return CreateRoutes(first, send_link, - DataSize::bytes(PacketOverhead::kDefault), second, - return_link, DataSize::bytes(PacketOverhead::kDefault)); + DataSize::Bytes(PacketOverhead::kDefault), second, + return_link, DataSize::Bytes(PacketOverhead::kDefault)); } CallClientPair* Scenario::CreateRoutes( @@ -151,7 +151,7 @@ CallClientPair* Scenario::CreateRoutes( void Scenario::ChangeRoute(std::pair clients, std::vector over_nodes) { - ChangeRoute(clients, over_nodes, DataSize::bytes(PacketOverhead::kDefault)); + ChangeRoute(clients, over_nodes, DataSize::Bytes(PacketOverhead::kDefault)); } void Scenario::ChangeRoute(std::pair clients, diff --git a/test/scenario/scenario_config.h b/test/scenario/scenario_config.h index 282d47188f..c7320e9dc3 100644 --- a/test/scenario/scenario_config.h +++ b/test/scenario/scenario_config.h @@ -42,12 +42,12 @@ struct TransportControllerConfig { Rates(); Rates(const Rates&); ~Rates(); - DataRate min_rate = DataRate::kbps(30); - DataRate max_rate = DataRate::kbps(3000); - DataRate start_rate = DataRate::kbps(300); + DataRate min_rate = DataRate::KilobitsPerSec(30); + DataRate max_rate = DataRate::KilobitsPerSec(3000); + DataRate start_rate = DataRate::KilobitsPerSec(300); } rates; NetworkControllerFactoryInterface* cc_factory = nullptr; - TimeDelta state_log_interval = TimeDelta::ms(100); + TimeDelta state_log_interval = TimeDelta::Millis(100); }; struct CallClientConfig { @@ -61,10 +61,10 @@ struct PacketStreamConfig { ~PacketStreamConfig(); int frame_rate = 30; DataRate max_data_rate = DataRate::Infinity(); - DataSize max_packet_size = DataSize::bytes(1400); - DataSize min_frame_size = DataSize::bytes(100); + DataSize max_packet_size = DataSize::Bytes(1400); + DataSize min_frame_size = DataSize::Bytes(100); double keyframe_multiplier = 1; - DataSize packet_overhead = DataSize::bytes(PacketOverhead::kDefault); + DataSize packet_overhead = DataSize::Bytes(PacketOverhead::kDefault); }; struct VideoStreamConfig { @@ -78,14 +78,14 @@ struct VideoStreamConfig { // Support for explicit frame triggers should be added here if needed. } capture = Capture::kGenerator; struct Slides { - TimeDelta change_interval = TimeDelta::seconds(10); + TimeDelta change_interval = TimeDelta::Seconds(10); struct Generator { int width = 1600; int height = 1200; } generator; struct Images { struct Crop { - TimeDelta scroll_duration = TimeDelta::seconds(0); + TimeDelta scroll_duration = TimeDelta::Seconds(0); absl::optional width; absl::optional height; } crop; @@ -129,6 +129,7 @@ struct VideoStreamConfig { using Codec = VideoCodecType; Codec codec = Codec::kVideoCodecGeneric; absl::optional max_data_rate; + absl::optional min_data_rate; absl::optional max_framerate; // Counted in frame count. absl::optional key_frame_interval = 3000; @@ -149,6 +150,7 @@ struct VideoStreamConfig { DegradationPreference degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; + bool suspend_below_min_bitrate = false; } encoder; struct Stream { Stream(); @@ -158,7 +160,7 @@ struct VideoStreamConfig { bool packet_feedback = true; bool use_rtx = true; DataRate pad_to_rate = DataRate::Zero(); - TimeDelta nack_history_time = TimeDelta::ms(1000); + TimeDelta nack_history_time = TimeDelta::Millis(1000); bool use_flexfec = false; bool use_ulpfec = false; FecControllerFactoryInterface* fec_controller_factory = nullptr; @@ -190,6 +192,7 @@ struct AudioStreamConfig { DataRate min_rate_for_60_ms = DataRate::Zero(); DataRate max_rate_for_120_ms = DataRate::Infinity(); } frame; + std::string binary_proto; } adapt; struct Encoder { Encoder(); @@ -200,7 +203,7 @@ struct AudioStreamConfig { absl::optional fixed_rate; absl::optional min_rate; absl::optional max_rate; - TimeDelta initial_frame_length = TimeDelta::ms(20); + TimeDelta initial_frame_length = TimeDelta::Millis(20); } encoder; struct Stream { Stream(); diff --git a/test/scenario/scenario_tests/BUILD.gn b/test/scenario/scenario_tests/BUILD.gn deleted file mode 100644 index 74ee1a768b..0000000000 --- a/test/scenario/scenario_tests/BUILD.gn +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") - -if (rtc_include_tests) { - rtc_test("scenario_tests") { - testonly = true - sources = [ "bbr_performance.cc" ] - deps = [ - "../:scenario", - "../..:test_main", - "../../:field_trial", - "../../:fileutils", - "../../:test_common", - "../../:test_support", - "../../../modules/congestion_controller/bbr", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base/experiments:field_trial_parser", - "//testing/gtest", - ] - } -} diff --git a/test/scenario/scenario_tests/bbr_performance.cc b/test/scenario/scenario_tests/bbr_performance.cc deleted file mode 100644 index 82aba2bf20..0000000000 --- a/test/scenario/scenario_tests/bbr_performance.cc +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/congestion_controller/bbr/bbr_factory.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/experiments/field_trial_units.h" -#include "rtc_base/random.h" -#include "test/field_trial.h" -#include "test/gtest.h" -#include "test/scenario/scenario.h" - -namespace webrtc { -namespace test { -namespace { -constexpr int64_t kRunTimeMs = 60000; - -using ::testing::Combine; -using ::testing::make_tuple; -using ::testing::tuple; -using ::testing::Values; - -using Codec = VideoStreamConfig::Encoder::Codec; -using CodecImpl = VideoStreamConfig::Encoder::Implementation; - -struct CallTestConfig { - struct Scenario { - FieldTrialParameter random_seed; - FieldTrialFlag return_traffic; - FieldTrialParameter capacity; - FieldTrialParameter propagation_delay; - FieldTrialParameter cross_traffic; - FieldTrialParameter delay_noise; - FieldTrialParameter loss_rate; - Scenario() - : random_seed("rs", 1), - return_traffic("ret"), - capacity("bw", DataRate::kbps(300)), - propagation_delay("dl", TimeDelta::ms(100)), - cross_traffic("ct", DataRate::Zero()), - delay_noise("dn", TimeDelta::Zero()), - loss_rate("pl", 0) {} - void Parse(std::string config_str) { - ParseFieldTrial( - {&random_seed, &return_traffic, &capacity, &propagation_delay, - &cross_traffic, &delay_noise, &loss_rate}, - config_str); - } - } scenario; - struct Tuning { - FieldTrialFlag use_bbr; - FieldTrialFlag bbr_no_target_rate; - FieldTrialOptional bbr_initial_window; - FieldTrialParameter bbr_encoder_gain; - Tuning() - : use_bbr("bbr"), - bbr_no_target_rate("notr"), - bbr_initial_window("iw", DataSize::bytes(8000)), - bbr_encoder_gain("eg", 0.8) {} - void Parse(std::string config_str) { - ParseFieldTrial( - { - &use_bbr, - &bbr_no_target_rate, - &bbr_initial_window, - &bbr_encoder_gain, - }, - config_str); - } - } tuning; - - void Parse(std::string scenario_string, std::string tuning_string) { - scenario.Parse(scenario_string); - tuning.Parse(tuning_string); - scenario_str = scenario_string; - tuning_str = tuning_string; - } - std::string scenario_str; - std::string tuning_str; - - std::string BbrTrial() const { - char trial_buf[1024]; - rtc::SimpleStringBuilder trial(trial_buf); - trial << "WebRTC-BweBbrConfig/"; - trial << "encoder_rate_gain_in_probe_rtt:0.5"; - trial.AppendFormat(",encoder_rate_gain:%.1lf", - tuning.bbr_encoder_gain.Get()); - if (tuning.bbr_no_target_rate) - trial << ",pacing_rate_as_target:1"; - if (tuning.bbr_initial_window) - trial << ",initial_cwin:" << tuning.bbr_initial_window->bytes(); - trial << "/"; - return trial.str(); - } - std::string FieldTrials() const { - std::string trials; - if (tuning.use_bbr) { - trials += - "WebRTC-BweCongestionController/Enabled,BBR/" - "WebRTC-Pacer-DrainQueue/Disabled/" - "WebRTC-Pacer-PadInSilence/Enabled/" - "WebRTC-Pacer-BlockAudio/Disabled/" - "WebRTC-Audio-SendSideBwe/Enabled/" - "WebRTC-SendSideBwe-WithOverhead/Enabled/"; - trials += BbrTrial(); - } - return trials; - } - - std::string Name() const { - char raw_name[1024]; - rtc::SimpleStringBuilder name(raw_name); - for (char c : scenario_str + "__tun__" + tuning_str) { - if (c == ':') { - continue; - } else if (c == ',') { - name << "_"; - } else if (c == '%') { - name << "p"; - } else { - name << c; - } - } - return name.str(); - } -}; -} // namespace -class BbrScenarioTest - : public ::testing::Test, - public ::testing::WithParamInterface> { - public: - BbrScenarioTest() { - conf_.Parse(::testing::get<0>(GetParam()), ::testing::get<1>(GetParam())); - field_trial_.reset(new test::ScopedFieldTrials(conf_.FieldTrials())); - } - CallTestConfig conf_; - - private: - std::unique_ptr field_trial_; -}; - -TEST_P(BbrScenarioTest, ReceivesVideo) { - BbrNetworkControllerFactory bbr_factory; - Scenario s("bbr_test_gen/bbr__" + conf_.Name()); - CallClientConfig call_config; - if (conf_.tuning.use_bbr) { - call_config.transport.cc_factory = &bbr_factory; - } - call_config.transport.rates.min_rate = DataRate::kbps(30); - call_config.transport.rates.max_rate = DataRate::kbps(1800); - - CallClient* alice = s.CreateClient("send", call_config); - CallClient* bob = s.CreateClient("return", call_config); - NetworkSimulationConfig net_conf; - net_conf.bandwidth = conf_.scenario.capacity; - net_conf.delay = conf_.scenario.propagation_delay; - net_conf.loss_rate = conf_.scenario.loss_rate; - net_conf.delay_std_dev = conf_.scenario.delay_noise; - auto* send_net = s.CreateMutableSimulationNode(net_conf); - auto* ret_net = s.CreateMutableSimulationNode(net_conf); - auto route = - s.CreateRoutes(alice, {send_net->node()}, bob, {ret_net->node()}); - - VideoStreamPair* alice_video = - s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) { - c->encoder.fake.max_rate = DataRate::kbps(1800); - }); - s.CreateAudioStream(route->forward(), [&](AudioStreamConfig* c) { - if (conf_.tuning.use_bbr) { - c->stream.in_bandwidth_estimation = true; - c->encoder.fixed_rate = DataRate::kbps(31); - } - }); - - VideoStreamPair* bob_video = nullptr; - if (conf_.scenario.return_traffic) { - bob_video = - s.CreateVideoStream(route->reverse(), [&](VideoStreamConfig* c) { - c->encoder.fake.max_rate = DataRate::kbps(1800); - }); - s.CreateAudioStream(route->reverse(), [&](AudioStreamConfig* c) { - if (conf_.tuning.use_bbr) { - c->stream.in_bandwidth_estimation = true; - c->encoder.fixed_rate = DataRate::kbps(31); - } - }); - } - RandomWalkConfig cross_config; - cross_config.peak_rate = conf_.scenario.cross_traffic; - cross_config.random_seed = conf_.scenario.random_seed; - auto* cross_traffic = s.net()->CreateRandomWalkCrossTraffic( - s.net()->CreateTrafficRoute({send_net->node()}), cross_config); - - s.CreatePrinter("send.stats.txt", TimeDelta::ms(100), - {alice->StatsPrinter(), alice_video->send()->StatsPrinter(), - cross_traffic->StatsPrinter(), send_net->ConfigPrinter()}); - - std::vector return_printers{ - bob->StatsPrinter(), ColumnPrinter::Fixed("cross_traffic_rate", "0"), - ret_net->ConfigPrinter()}; - if (bob_video) - return_printers.push_back(bob_video->send()->StatsPrinter()); - s.CreatePrinter("return.stats.txt", TimeDelta::ms(100), return_printers); - - s.RunFor(TimeDelta::ms(kRunTimeMs)); -} - -INSTANTIATE_TEST_SUITE_P(Selected, - BbrScenarioTest, - Values(make_tuple("rs:1,bw:150,dl:100,ct:100", - "bbr"))); - -INSTANTIATE_TEST_SUITE_P( - OneWayTuning, - BbrScenarioTest, - Values(make_tuple("bw:150,dl:100", "bbr,iw:,eg:100%,notr"), - make_tuple("bw:150,dl:100", "bbr,iw:8000,eg:100%,notr"), - make_tuple("bw:150,dl:100", "bbr,iw:8000,eg:100%"), - make_tuple("bw:150,dl:100", "bbr,iw:8000,eg:80%"))); - -INSTANTIATE_TEST_SUITE_P(OneWayTuned, - BbrScenarioTest, - Values(make_tuple("bw:150,dl:100", "bbr"), - make_tuple("bw:150,dl:100", ""), - make_tuple("bw:800,dl:100", "bbr"), - make_tuple("bw:800,dl:100", ""))); - -INSTANTIATE_TEST_SUITE_P(OneWayDegraded, - BbrScenarioTest, - Values(make_tuple("bw:150,dl:100,dn:30,pl:5%", "bbr"), - make_tuple("bw:150,dl:100,dn:30,pl:5%", ""), - - make_tuple("bw:150,ct:100,dl:100", "bbr"), - make_tuple("bw:150,ct:100,dl:100", ""), - - make_tuple("bw:800,dl:100,dn:30,pl:5%", "bbr"), - make_tuple("bw:800,dl:100,dn:30,pl:5%", ""), - - make_tuple("bw:800,ct:600,dl:100", "bbr"), - make_tuple("bw:800,ct:600,dl:100", ""))); - -INSTANTIATE_TEST_SUITE_P(TwoWay, - BbrScenarioTest, - Values(make_tuple("ret,bw:150,dl:100", "bbr"), - make_tuple("ret,bw:150,dl:100", ""), - make_tuple("ret,bw:800,dl:100", "bbr"), - make_tuple("ret,bw:800,dl:100", ""), - make_tuple("ret,bw:150,dl:50", "bbr"), - make_tuple("ret,bw:150,dl:50", ""))); -} // namespace test -} // namespace webrtc diff --git a/test/scenario/scenario_unittest.cc b/test/scenario/scenario_unittest.cc index c81709f0ef..7c05ea39dd 100644 --- a/test/scenario/scenario_unittest.cc +++ b/test/scenario/scenario_unittest.cc @@ -11,6 +11,7 @@ #include +#include "test/field_trial.h" #include "test/gtest.h" #include "test/logging/memory_log_writer.h" #include "test/scenario/stats_collection.h" @@ -22,7 +23,7 @@ TEST(ScenarioTest, StartsAndStopsWithoutErrors) { std::atomic bitrate_changed(false); Scenario s; CallClientConfig call_client_config; - call_client_config.transport.rates.start_rate = DataRate::kbps(300); + call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); auto* alice = s.CreateClient("alice", call_client_config); auto* bob = s.CreateClient("bob", call_client_config); NetworkSimulationConfig network_config; @@ -35,8 +36,8 @@ TEST(ScenarioTest, StartsAndStopsWithoutErrors) { s.CreateVideoStream(route->reverse(), video_stream_config); AudioStreamConfig audio_stream_config; - audio_stream_config.encoder.min_rate = DataRate::kbps(6); - audio_stream_config.encoder.max_rate = DataRate::kbps(64); + audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6); + audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64); audio_stream_config.encoder.allocate_bitrate = true; audio_stream_config.stream.in_bandwidth_estimation = false; s.CreateAudioStream(route->forward(), audio_stream_config); @@ -48,12 +49,12 @@ TEST(ScenarioTest, StartsAndStopsWithoutErrors) { s.NetworkDelayedAction({alice_net, bob_net}, 100, [&packet_received] { packet_received = true; }); - s.Every(TimeDelta::ms(10), [alice, bob, &bitrate_changed] { + s.Every(TimeDelta::Millis(10), [alice, bob, &bitrate_changed] { if (alice->GetStats().send_bandwidth_bps != 300000 && bob->GetStats().send_bandwidth_bps != 300000) bitrate_changed = true; }); - s.RunUntil(TimeDelta::seconds(2), TimeDelta::ms(5), + s.RunUntil(TimeDelta::Seconds(2), TimeDelta::Millis(5), [&bitrate_changed, &packet_received] { return packet_received && bitrate_changed; }); @@ -66,8 +67,8 @@ void SetupVideoCall(Scenario& s, VideoQualityAnalyzer* analyzer) { auto* alice = s.CreateClient("alice", call_config); auto* bob = s.CreateClient("bob", call_config); NetworkSimulationConfig network_config; - network_config.bandwidth = DataRate::kbps(1000); - network_config.delay = TimeDelta::ms(50); + network_config.bandwidth = DataRate::KilobitsPerSec(1000); + network_config.delay = TimeDelta::Millis(50); auto alice_net = s.CreateSimulationNode(network_config); auto bob_net = s.CreateSimulationNode(network_config); auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net}); @@ -88,24 +89,18 @@ void SetupVideoCall(Scenario& s, VideoQualityAnalyzer* analyzer) { } } // namespace -// TODO(bugs.webrtc.org/10515): Remove this when performance has been improved. -#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG) -#define MAYBE_SimTimeEncoding DISABLED_SimTimeEncoding -#else -#define MAYBE_SimTimeEncoding SimTimeEncoding -#endif -TEST(ScenarioTest, MAYBE_SimTimeEncoding) { +TEST(ScenarioTest, SimTimeEncoding) { VideoQualityAnalyzerConfig analyzer_config; analyzer_config.psnr_coverage = 0.1; VideoQualityAnalyzer analyzer(analyzer_config); { Scenario s("scenario/encode_sim", false); SetupVideoCall(s, &analyzer); - s.RunFor(TimeDelta::seconds(60)); + s.RunFor(TimeDelta::Seconds(2)); } // Regression tests based on previous runs. EXPECT_EQ(analyzer.stats().lost_count, 0); - EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 2); + EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 5); } // TODO(bugs.webrtc.org/10515): Remove this when performance has been improved. @@ -121,17 +116,18 @@ TEST(ScenarioTest, MAYBE_RealTimeEncoding) { { Scenario s("scenario/encode_real", true); SetupVideoCall(s, &analyzer); - s.RunFor(TimeDelta::seconds(10)); + s.RunFor(TimeDelta::Seconds(2)); } // Regression tests based on previous runs. EXPECT_LT(analyzer.stats().lost_count, 2); - EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 10); + // This far below expected but ensures that we get something. + EXPECT_GT(analyzer.stats().psnr_with_freeze.Mean(), 10); } TEST(ScenarioTest, SimTimeFakeing) { Scenario s("scenario/encode_sim", false); SetupVideoCall(s, nullptr); - s.RunFor(TimeDelta::seconds(10)); + s.RunFor(TimeDelta::Seconds(2)); } TEST(ScenarioTest, WritesToRtcEventLog) { @@ -139,12 +135,56 @@ TEST(ScenarioTest, WritesToRtcEventLog) { { Scenario s(storage.CreateFactory(), false); SetupVideoCall(s, nullptr); - s.RunFor(TimeDelta::seconds(1)); + s.RunFor(TimeDelta::Seconds(1)); } auto logs = storage.logs(); // We expect that a rtc event log has been created and that it has some data. EXPECT_GE(storage.logs().at("alice.rtc.dat").size(), 1u); } +TEST(ScenarioTest, + RetransmitsVideoPacketsInAudioAndVideoCallWithSendSideBweAndLoss) { + // Make sure audio packets are included in transport feedback. + test::ScopedFieldTrials override_field_trials( + "WebRTC-Audio-SendSideBwe/Enabled/WebRTC-Audio-ABWENoTWCC/Disabled/"); + + Scenario s; + CallClientConfig call_client_config; + call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + auto* alice = s.CreateClient("alice", call_client_config); + auto* bob = s.CreateClient("bob", call_client_config); + NetworkSimulationConfig network_config; + // Add some loss and delay. + network_config.delay = TimeDelta::Millis(200); + network_config.loss_rate = 0.05; + auto alice_net = s.CreateSimulationNode(network_config); + auto bob_net = s.CreateSimulationNode(network_config); + auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net}); + + // First add an audio stream, then a video stream. + // Needed to make sure audio RTP module is selected first when sending + // transport feedback message. + AudioStreamConfig audio_stream_config; + audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6); + audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64); + audio_stream_config.encoder.allocate_bitrate = true; + audio_stream_config.stream.in_bandwidth_estimation = true; + s.CreateAudioStream(route->forward(), audio_stream_config); + s.CreateAudioStream(route->reverse(), audio_stream_config); + + VideoStreamConfig video_stream_config; + auto video = s.CreateVideoStream(route->forward(), video_stream_config); + s.CreateVideoStream(route->reverse(), video_stream_config); + + // Run for 10 seconds. + s.RunFor(TimeDelta::Seconds(10)); + // Make sure retransmissions have happened. + int retransmit_packets = 0; + for (const auto& substream : video->send()->GetStats().substreams) { + retransmit_packets += substream.second.rtp_stats.retransmitted.packets; + } + EXPECT_GT(retransmit_packets, 0); +} + } // namespace test } // namespace webrtc diff --git a/test/scenario/stats_collection.cc b/test/scenario/stats_collection.cc index 4161149d2c..54d4de5b03 100644 --- a/test/scenario/stats_collection.cc +++ b/test/scenario/stats_collection.cc @@ -99,7 +99,8 @@ void VideoLayerAnalyzer::HandleFramePair(VideoFramePair sample, sample.capture_time.seconds(), sample.render_time.seconds(), sample.captured->width(), sample.captured->height(), - sample.decoded->width(), sample.decoded->height(), psnr); + sample.decoded ? sample.decoded->width() : 0, + sample.decoded ? sample.decoded->height() : 0, psnr); } } @@ -121,7 +122,7 @@ void VideoLayerAnalyzer::HandleRenderedFrame(const VideoFramePair& sample) { RTC_DCHECK(sample.render_time.IsFinite()); TimeDelta render_interval = sample.render_time - last_render_time_; TimeDelta mean_interval = stats_.render.frames.interval().Mean(); - if (render_interval > TimeDelta::ms(150) + mean_interval || + if (render_interval > TimeDelta::Millis(150) + mean_interval || render_interval > 3 * mean_interval) { stats_.freeze_duration.AddSample(render_interval); stats_.time_between_freezes.AddSample(last_render_time_ - @@ -136,9 +137,9 @@ void CallStatsCollector::AddStats(Call::Stats sample) { if (sample.send_bandwidth_bps > 0) stats_.target_rate.AddSampleBps(sample.send_bandwidth_bps); if (sample.pacer_delay_ms > 0) - stats_.pacer_delay.AddSample(TimeDelta::ms(sample.pacer_delay_ms)); + stats_.pacer_delay.AddSample(TimeDelta::Millis(sample.pacer_delay_ms)); if (sample.rtt_ms > 0) - stats_.round_trip_time.AddSample(TimeDelta::ms(sample.rtt_ms)); + stats_.round_trip_time.AddSample(TimeDelta::Millis(sample.rtt_ms)); stats_.memory_usage.AddSample(rtc::GetProcessResidentSizeBytes()); } @@ -166,7 +167,7 @@ void VideoSendStatsCollector::AddStats(VideoSendStream::Stats sample, kv.second.rtp_stats.fec.padding_bytes; } if (last_update_.IsFinite()) { - auto fec_delta = DataSize::bytes(fec_bytes - last_fec_bytes_); + auto fec_delta = DataSize::Bytes(fec_bytes - last_fec_bytes_); auto time_delta = at_time - last_update_; stats_.fec_bitrate.AddSample(fec_delta / time_delta); } diff --git a/test/scenario/stats_collection_unittest.cc b/test/scenario/stats_collection_unittest.cc index 4159eeac7f..17f0e3a656 100644 --- a/test/scenario/stats_collection_unittest.cc +++ b/test/scenario/stats_collection_unittest.cc @@ -25,17 +25,26 @@ void CreateAnalyzedStream(Scenario* s, VideoStreamConfig::Encoder::Implementation::kSoftware; config.hooks.frame_pair_handlers = {analyzer->Handler()}; auto* caller = s->CreateClient("caller", CallClientConfig()); + auto* callee = s->CreateClient("callee", CallClientConfig()); auto route = - s->CreateRoutes(caller, {s->CreateSimulationNode(network_config)}, - s->CreateClient("callee", CallClientConfig()), + s->CreateRoutes(caller, {s->CreateSimulationNode(network_config)}, callee, {s->CreateSimulationNode(NetworkSimulationConfig())}); - auto* video = s->CreateVideoStream(route->forward(), config); + VideoStreamPair* video = s->CreateVideoStream(route->forward(), config); auto* audio = s->CreateAudioStream(route->forward(), AudioStreamConfig()); - s->Every(TimeDelta::seconds(1), [=] { + s->Every(TimeDelta::Seconds(1), [=] { collectors->call.AddStats(caller->GetStats()); - collectors->audio_receive.AddStats(audio->receive()->GetStats()); collectors->video_send.AddStats(video->send()->GetStats(), s->Now()); - collectors->video_receive.AddStats(video->receive()->GetStats()); + collectors->audio_receive.AddStats(audio->receive()->GetStats()); + + // Querying the video stats from within the expected runtime environment + // (i.e. the TQ that belongs to the CallClient, not the Scenario TQ that + // we're currently on). + VideoReceiveStream::Stats video_receive_stats; + auto* video_stream = video->receive(); + callee->SendTask([&video_stream, &video_receive_stats]() { + video_receive_stats = video_stream->GetStats(); + }); + collectors->video_receive.AddStats(video_receive_stats); }); } } // namespace @@ -46,9 +55,9 @@ TEST(ScenarioAnalyzerTest, PsnrIsHighWhenNetworkIsGood) { { Scenario s; NetworkSimulationConfig good_network; - good_network.bandwidth = DataRate::kbps(1000); + good_network.bandwidth = DataRate::KilobitsPerSec(1000); CreateAnalyzedStream(&s, good_network, &analyzer, &stats); - s.RunFor(TimeDelta::seconds(3)); + s.RunFor(TimeDelta::Seconds(3)); } // This is a change detecting test, the targets are based on previous runs and // might change due to changes in configuration and encoder etc. The main @@ -67,18 +76,18 @@ TEST(ScenarioAnalyzerTest, PsnrIsLowWhenNetworkIsBad) { { Scenario s; NetworkSimulationConfig bad_network; - bad_network.bandwidth = DataRate::kbps(100); + bad_network.bandwidth = DataRate::KilobitsPerSec(100); bad_network.loss_rate = 0.02; CreateAnalyzedStream(&s, bad_network, &analyzer, &stats); - s.RunFor(TimeDelta::seconds(3)); + s.RunFor(TimeDelta::Seconds(3)); } // This is a change detecting test, the targets are based on previous runs and // might change due to changes in configuration and encoder etc. - EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 16, 10); + EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 20, 10); EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 75, 50); EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 100, 50); EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10); - EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 200, 150); + EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 250, 150); } TEST(ScenarioAnalyzerTest, CountsCapturedButNotRendered) { @@ -87,10 +96,10 @@ TEST(ScenarioAnalyzerTest, CountsCapturedButNotRendered) { { Scenario s; NetworkSimulationConfig long_delays; - long_delays.delay = TimeDelta::seconds(5); + long_delays.delay = TimeDelta::Seconds(5); CreateAnalyzedStream(&s, long_delays, &analyzer, &stats); // Enough time to send frames but not enough to deliver. - s.RunFor(TimeDelta::ms(100)); + s.RunFor(TimeDelta::Millis(100)); } EXPECT_GE(analyzer.stats().capture.count, 1); EXPECT_EQ(analyzer.stats().render.count, 0); diff --git a/test/scenario/video_frame_matcher.cc b/test/scenario/video_frame_matcher.cc index cf682c36a9..20cfb0e96e 100644 --- a/test/scenario/video_frame_matcher.cc +++ b/test/scenario/video_frame_matcher.cc @@ -180,7 +180,7 @@ DecodedFrameTap::DecodedFrameTap(Clock* clock, void DecodedFrameTap::OnFrame(const VideoFrame& frame) { matcher_->OnDecodedFrame(frame, layer_id_, - Timestamp::ms(frame.render_time_ms()), + Timestamp::Millis(frame.render_time_ms()), clock_->CurrentTime()); } diff --git a/test/scenario/video_stream.cc b/test/scenario/video_stream.cc index def6c2051f..b4df7389e6 100644 --- a/test/scenario/video_stream.cc +++ b/test/scenario/video_stream.cc @@ -13,6 +13,7 @@ #include #include +#include "absl/strings/match.h" #include "api/test/create_frame_generator.h" #include "api/test/frame_generator_interface.h" #include "api/test/video/function_video_encoder_factory.h" @@ -112,7 +113,7 @@ std::string TransformFilePath(std::string path) { int ext_pos = path.rfind("."); if (ext_pos < 0) { return test::ResourcePath(path, "yuv"); - } else if (path.find(resource_prefix) == 0) { + } else if (absl::StartsWith(path, resource_prefix)) { std::string name = path.substr(resource_prefix.length(), ext_pos); std::string ext = path.substr(ext_pos, path.size()); return test::ResourcePath(name, ext); @@ -208,10 +209,29 @@ CreateH264SpecificSettings(VideoStreamConfig config) { VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings); } +#ifndef DISABLE_H265 +rtc::scoped_refptr +CreateH265SpecificSettings(VideoStreamConfig config) { + RTC_DCHECK_EQ(config.encoder.layers.temporal, 1); + RTC_DCHECK_EQ(config.encoder.layers.spatial, 1); + + VideoCodecH265 h265_settings = VideoEncoder::GetDefaultH265Settings(); + h265_settings.frameDroppingOn = config.encoder.frame_dropping; + h265_settings.keyFrameInterval = + config.encoder.key_frame_interval.value_or(0); + return new rtc::RefCountedObject< + VideoEncoderConfig::H265EncoderSpecificSettings>(h265_settings); +} +#endif + rtc::scoped_refptr CreateEncoderSpecificSettings(VideoStreamConfig config) { using Codec = VideoStreamConfig::Encoder::Codec; switch (config.encoder.codec) { +#ifndef DISABLE_H265 + case Codec::kVideoCodecH265: + return CreateH265SpecificSettings(config); +#endif case Codec::kVideoCodecH264: return CreateH264SpecificSettings(config); case Codec::kVideoCodecVP8: @@ -256,13 +276,15 @@ VideoEncoderConfig CreateVideoEncoderConfig(VideoStreamConfig config) { // TODO(srte): Base this on encoder capabilities. encoder_config.max_bitrate_bps = - config.encoder.max_data_rate.value_or(DataRate::kbps(10000)).bps(); + config.encoder.max_data_rate.value_or(DataRate::KilobitsPerSec(10000)) + .bps(); encoder_config.encoder_specific_settings = CreateEncoderSpecificSettings(config); if (config.encoder.max_framerate) { for (auto& layer : encoder_config.simulcast_layers) { layer.max_framerate = *config.encoder.max_framerate; + layer.min_bitrate_bps = config.encoder.min_data_rate->bps_or(-1); } } @@ -321,6 +343,7 @@ std::unique_ptr CreateFrameGenerator( VideoReceiveStream::Config CreateVideoReceiveStreamConfig( VideoStreamConfig config, Transport* feedback_transport, + VideoDecoderFactory* decoder_factory, VideoReceiveStream::Decoder decoder, rtc::VideoSinkInterface* renderer, uint32_t local_ssrc, @@ -336,6 +359,7 @@ VideoReceiveStream::Config CreateVideoReceiveStreamConfig( recv.rtp.nack.rtp_history_ms = config.stream.nack_history_time.ms(); recv.rtp.protected_by_flexfec = config.stream.use_flexfec; recv.rtp.remote_ssrc = ssrc; + recv.decoder_factory = decoder_factory; recv.decoders.push_back(decoder); recv.renderer = renderer; if (config.stream.use_rtx) { @@ -371,10 +395,10 @@ SendVideoStream::SendVideoStream(CallClient* sender, case Encoder::Implementation::kFake: encoder_factory_ = std::make_unique([this]() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); std::unique_ptr encoder; if (config_.encoder.codec == Codec::kVideoCodecVP8) { - encoder = std::make_unique(sender_->clock_); + encoder = std::make_unique(sender_->clock_); } else if (config_.encoder.codec == Codec::kVideoCodecGeneric) { encoder = std::make_unique(sender_->clock_); } else { @@ -408,6 +432,8 @@ SendVideoStream::SendVideoStream(CallClient* sender, send_config.encoder_settings.encoder_factory = encoder_factory_.get(); send_config.encoder_settings.bitrate_allocator_factory = bitrate_allocator_factory_.get(); + send_config.suspend_below_min_bitrate = + config.encoder.suspend_below_min_bitrate; sender_->SendTask([&] { if (config.stream.fec_controller_factory) { @@ -450,7 +476,7 @@ void SendVideoStream::Stop() { void SendVideoStream::UpdateConfig( std::function modifier) { sender_->SendTask([&] { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); VideoStreamConfig prior_config = config_; modifier(&config_); if (prior_config.encoder.fake.max_rate != config_.encoder.fake.max_rate) { @@ -459,7 +485,8 @@ void SendVideoStream::UpdateConfig( } } // TODO(srte): Add more conditions that should cause reconfiguration. - if (prior_config.encoder.max_framerate != config_.encoder.max_framerate) { + if (prior_config.encoder.max_framerate != config_.encoder.max_framerate || + prior_config.encoder.max_data_rate != config_.encoder.max_data_rate) { VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_); send_stream_->ReconfigureVideoEncoder(std::move(encoder_config)); } @@ -471,18 +498,16 @@ void SendVideoStream::UpdateConfig( void SendVideoStream::UpdateActiveLayers(std::vector active_layers) { sender_->task_queue_.PostTask([=] { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); if (config_.encoder.codec == VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) { send_stream_->UpdateActiveSimulcastLayers(active_layers); - } else { - VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_); - RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), - active_layers.size()); - for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) - encoder_config.simulcast_layers[i].active = active_layers[i]; - send_stream_->ReconfigureVideoEncoder(std::move(encoder_config)); } + VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_); + RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size()); + for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) + encoder_config.simulcast_layers[i].active = active_layers[i]; + send_stream_->ReconfigureVideoEncoder(std::move(encoder_config)); }); } @@ -547,7 +572,6 @@ ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver, VideoReceiveStream::Decoder decoder = CreateMatchingDecoder(CodecTypeToPayloadType(config.encoder.codec), CodecTypeToPayloadString(config.encoder.codec)); - decoder.decoder_factory = decoder_factory_.get(); size_t num_streams = 1; if (config.encoder.codec == VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) num_streams = config.encoder.layers.spatial; @@ -559,7 +583,7 @@ ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver, renderer = render_taps_.back().get(); } auto recv_config = CreateVideoReceiveStreamConfig( - config, feedback_transport, decoder, renderer, + config, feedback_transport, decoder_factory_.get(), decoder, renderer, receiver_->GetNextVideoLocalSsrc(), send_stream->ssrcs_[i], send_stream->rtx_ssrcs_[i]); if (config.stream.use_flexfec) { @@ -614,7 +638,7 @@ VideoReceiveStream::Stats ReceiveVideoStream::GetStats() const { if (receive_streams_.empty()) return VideoReceiveStream::Stats(); // TODO(srte): Handle multiple receive streams. - return receive_streams_.front()->GetStats(); + return receive_streams_.back()->GetStats(); } VideoStreamPair::~VideoStreamPair() = default; diff --git a/test/scenario/video_stream.h b/test/scenario/video_stream.h index f0b99db57a..96b6d49f63 100644 --- a/test/scenario/video_stream.h +++ b/test/scenario/video_stream.h @@ -14,6 +14,7 @@ #include #include "rtc_base/constructor_magic.h" +#include "rtc_base/synchronization/mutex.h" #include "test/fake_encoder.h" #include "test/fake_videorenderer.h" #include "test/frame_generator_capturer.h" @@ -53,14 +54,14 @@ class SendVideoStream { Transport* send_transport, VideoFrameMatcher* matcher); - rtc::CriticalSection crit_; + Mutex mutex_; std::vector ssrcs_; std::vector rtx_ssrcs_; VideoSendStream* send_stream_ = nullptr; CallClient* const sender_; - VideoStreamConfig config_ RTC_GUARDED_BY(crit_); + VideoStreamConfig config_ RTC_GUARDED_BY(mutex_); std::unique_ptr encoder_factory_; - std::vector fake_encoders_ RTC_GUARDED_BY(crit_); + std::vector fake_encoders_ RTC_GUARDED_BY(mutex_); std::unique_ptr bitrate_allocator_factory_; std::unique_ptr video_capturer_; std::unique_ptr frame_tap_; diff --git a/test/scenario/video_stream_unittest.cc b/test/scenario/video_stream_unittest.cc index 0789b0d564..52be3f82ff 100644 --- a/test/scenario/video_stream_unittest.cc +++ b/test/scenario/video_stream_unittest.cc @@ -9,6 +9,7 @@ */ #include +#include "test/field_trial.h" #include "test/gtest.h" #include "test/scenario/scenario.h" @@ -22,7 +23,7 @@ using CodecImpl = VideoStreamConfig::Encoder::Implementation; } // namespace TEST(VideoStreamTest, ReceivesFramesFromFileBasedStreams) { - TimeDelta kRunTime = TimeDelta::ms(500); + TimeDelta kRunTime = TimeDelta::Millis(500); std::vector kFrameRates = {15, 30}; std::deque> frame_counts(2); frame_counts[0] = 0; @@ -68,7 +69,7 @@ TEST(VideoStreamTest, ReceivesFramesFromFileBasedStreams) { } TEST(VideoStreamTest, RecievesVp8SimulcastFrames) { - TimeDelta kRunTime = TimeDelta::ms(500); + TimeDelta kRunTime = TimeDelta::Millis(500); int kFrameRate = 30; std::deque> frame_counts(3); @@ -125,7 +126,7 @@ TEST(VideoStreamTest, SendsNacksOnLoss) { {s.CreateSimulationNode(NetworkSimulationConfig())}); // NACK retransmissions are enabled by default. auto video = s.CreateVideoStream(route->forward(), VideoStreamConfig()); - s.RunFor(TimeDelta::seconds(1)); + s.RunFor(TimeDelta::Seconds(1)); int retransmit_packets = 0; for (const auto& substream : video->send()->GetStats().substreams) { retransmit_packets += substream.second.rtp_stats.retransmitted.packets; @@ -139,7 +140,7 @@ TEST(VideoStreamTest, SendsFecWithUlpFec) { s.CreateRoutes(s.CreateClient("caller", CallClientConfig()), {s.CreateSimulationNode([](NetworkSimulationConfig* c) { c->loss_rate = 0.1; - c->delay = TimeDelta::ms(100); + c->delay = TimeDelta::Millis(100); })}, s.CreateClient("callee", CallClientConfig()), {s.CreateSimulationNode(NetworkSimulationConfig())}); @@ -148,7 +149,7 @@ TEST(VideoStreamTest, SendsFecWithUlpFec) { c->encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8; c->stream.use_ulpfec = true; }); - s.RunFor(TimeDelta::seconds(5)); + s.RunFor(TimeDelta::Seconds(5)); VideoSendStream::Stats video_stats = video->send()->GetStats(); EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u); } @@ -158,16 +159,154 @@ TEST(VideoStreamTest, SendsFecWithFlexFec) { s.CreateRoutes(s.CreateClient("caller", CallClientConfig()), {s.CreateSimulationNode([](NetworkSimulationConfig* c) { c->loss_rate = 0.1; - c->delay = TimeDelta::ms(100); + c->delay = TimeDelta::Millis(100); })}, s.CreateClient("callee", CallClientConfig()), {s.CreateSimulationNode(NetworkSimulationConfig())}); auto video = s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) { c->stream.use_flexfec = true; }); - s.RunFor(TimeDelta::seconds(5)); + s.RunFor(TimeDelta::Seconds(5)); VideoSendStream::Stats video_stats = video->send()->GetStats(); EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u); } + +TEST(VideoStreamTest, ResolutionAdaptsToAvailableBandwidth) { + // Declared before scenario to avoid use after free. + std::atomic num_qvga_frames_(0); + std::atomic num_vga_frames_(0); + + Scenario s; + // Link has enough capacity for VGA. + NetworkSimulationConfig net_conf; + net_conf.bandwidth = DataRate::KilobitsPerSec(800); + net_conf.delay = TimeDelta::Millis(50); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = DataRate::KilobitsPerSec(800); + }); + auto send_net = {s.CreateSimulationNode(net_conf)}; + auto ret_net = {s.CreateSimulationNode(net_conf)}; + auto* route = s.CreateRoutes( + client, send_net, s.CreateClient("return", CallClientConfig()), ret_net); + + s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) { + c->hooks.frame_pair_handlers = {[&](const VideoFramePair& info) { + if (info.decoded->width() == 640) { + ++num_vga_frames_; + } else if (info.decoded->width() == 320) { + ++num_qvga_frames_; + } else { + ADD_FAILURE() << "Unexpected resolution: " << info.decoded->width(); + } + }}; + c->source.framerate = 30; + // The resolution must be high enough to allow smaller layers to be + // created. + c->source.generator.width = 640; + c->source.generator.height = 480; + c->encoder.implementation = CodecImpl::kSoftware; + c->encoder.codec = Codec::kVideoCodecVP9; + // Enable SVC. + c->encoder.layers.spatial = 2; + }); + + // Run for a few seconds, until streams have stabilized, + // check that we are sending VGA. + s.RunFor(TimeDelta::Seconds(5)); + EXPECT_GT(num_vga_frames_, 0u); + + // Trigger cross traffic, run until we have seen 3 consecutive + // seconds with no VGA frames due to reduced available bandwidth. + auto cross_traffic = + s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); + + int num_seconds_without_vga = 0; + int num_iterations = 0; + do { + ASSERT_LE(++num_iterations, 100); + num_qvga_frames_ = 0; + num_vga_frames_ = 0; + s.RunFor(TimeDelta::Seconds(1)); + if (num_qvga_frames_ > 0 && num_vga_frames_ == 0) { + ++num_seconds_without_vga; + } else { + num_seconds_without_vga = 0; + } + } while (num_seconds_without_vga < 3); + + // Stop cross traffic, make sure we recover and get VGA frames agian. + s.net()->StopCrossTraffic(cross_traffic); + num_qvga_frames_ = 0; + num_vga_frames_ = 0; + + s.RunFor(TimeDelta::Seconds(40)); + EXPECT_GT(num_qvga_frames_, 0u); + EXPECT_GT(num_vga_frames_, 0u); +} + +TEST(VideoStreamTest, SuspendsBelowMinBitrate) { + const DataRate kMinVideoBitrate = DataRate::KilobitsPerSec(30); + + // Declared before scenario to avoid use after free. + std::atomic last_frame_timestamp(Timestamp::MinusInfinity()); + + Scenario s; + NetworkSimulationConfig net_config; + net_config.bandwidth = kMinVideoBitrate * 4; + net_config.delay = TimeDelta::Millis(10); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + // Min transmit rate needs to be lower than kMinVideoBitrate for this test + // to make sense. + c->transport.rates.min_rate = kMinVideoBitrate / 2; + c->transport.rates.start_rate = kMinVideoBitrate; + c->transport.rates.max_rate = kMinVideoBitrate * 2; + }); + auto send_net = s.CreateMutableSimulationNode( + [&](NetworkSimulationConfig* c) { *c = net_config; }); + auto ret_net = {s.CreateSimulationNode(net_config)}; + auto* route = + s.CreateRoutes(client, {send_net->node()}, + s.CreateClient("return", CallClientConfig()), ret_net); + + s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) { + c->hooks.frame_pair_handlers = {[&](const VideoFramePair& pair) { + if (pair.repeated == 0) { + last_frame_timestamp = pair.capture_time; + } + }}; + c->source.framerate = 30; + c->source.generator.width = 320; + c->source.generator.height = 180; + c->encoder.implementation = CodecImpl::kFake; + c->encoder.codec = Codec::kVideoCodecVP8; + c->encoder.min_data_rate = kMinVideoBitrate; + c->encoder.suspend_below_min_bitrate = true; + c->stream.pad_to_rate = kMinVideoBitrate; + }); + + // Run for a few seconds, check we have received at least one frame. + s.RunFor(TimeDelta::Seconds(2)); + EXPECT_TRUE(last_frame_timestamp.load().IsFinite()); + + // Degrade network to below min bitrate. + send_net->UpdateConfig([&](NetworkSimulationConfig* c) { + c->bandwidth = kMinVideoBitrate * 0.9; + }); + + // Run for 20s, verify that no frames arrive that were captured after the + // first five seconds, allowing some margin for BWE backoff to trigger and + // packets already in the pipeline to potentially arrive. + s.RunFor(TimeDelta::Seconds(20)); + EXPECT_GT(s.Now() - last_frame_timestamp, TimeDelta::Seconds(15)); + + // Relax the network constraints and run for a while more, verify that we + // start receiving frames again. + send_net->UpdateConfig( + [&](NetworkSimulationConfig* c) { c->bandwidth = kMinVideoBitrate * 4; }); + last_frame_timestamp = Timestamp::MinusInfinity(); + s.RunFor(TimeDelta::Seconds(15)); + EXPECT_TRUE(last_frame_timestamp.load().IsFinite()); +} + } // namespace test } // namespace webrtc diff --git a/test/test_main.cc b/test/test_main.cc index 5046979548..f919c4bba7 100644 --- a/test/test_main.cc +++ b/test/test_main.cc @@ -12,17 +12,21 @@ #include "absl/debugging/failure_signal_handler.h" #include "absl/debugging/symbolize.h" +#include "absl/flags/parse.h" +#include "test/gmock.h" #include "test/test_main_lib.h" int main(int argc, char* argv[]) { // Initialize the symbolizer to get a human-readable stack trace absl::InitializeSymbolizer(argv[0]); + testing::InitGoogleMock(&argc, argv); + absl::ParseCommandLine(argc, argv); absl::FailureSignalHandlerOptions options; absl::InstallFailureSignalHandler(options); std::unique_ptr main = webrtc::TestMain::Create(); - int err_code = main->Init(&argc, argv); + int err_code = main->Init(); if (err_code != 0) { return err_code; } diff --git a/test/test_main_lib.cc b/test/test_main_lib.cc index efa11282b0..7170163346 100644 --- a/test/test_main_lib.cc +++ b/test/test_main_lib.cc @@ -15,7 +15,9 @@ #include #include "absl/flags/flag.h" -#include "absl/flags/parse.h" +#include "absl/memory/memory.h" +#include "absl/strings/match.h" +#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/event_tracer.h" #include "rtc_base/logging.h" @@ -25,7 +27,6 @@ #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "test/field_trial.h" -#include "test/gmock.h" #include "test/gtest.h" #include "test/testsupport/perf_test.h" #include "test/testsupport/resources_dir_flag.h" @@ -45,30 +46,28 @@ ABSL_FLAG(std::string, ApplePersistenceIgnoreState, "", "Intentionally ignored flag intended for iOS simulator."); + +// This is the cousin of isolated_script_test_perf_output, but we can't dictate +// where to write on iOS so the semantics of this flag are a bit different. ABSL_FLAG( bool, - save_chartjson_result, + write_perf_output_on_ios, false, - "Store the perf results in Documents/perf_result.json in the format " - "described by " - "https://github.com/catapult-project/catapult/blob/master/dashboard/docs/" - "data-format.md."); + "Store the perf results in Documents/perftest_result.pb in the format " + "described by histogram.proto in " + "https://chromium.googlesource.com/catapult/."); #else -ABSL_FLAG(std::string, - isolated_script_test_output, - "", - "Path to output an empty JSON file which Chromium infra requires."); - ABSL_FLAG( std::string, isolated_script_test_perf_output, "", - "Path where the perf results should be stored in the JSON format described " - "by " - "https://github.com/catapult-project/catapult/blob/master/dashboard/docs/" - "data-format.md."); + "Path where the perf results should be stored in proto format described " + "described by histogram.proto in " + "https://chromium.googlesource.com/catapult/."); + +#endif constexpr char kPlotAllMetrics[] = "all"; ABSL_FLAG(std::vector, @@ -78,8 +77,6 @@ ABSL_FLAG(std::vector, "available). Example: psnr,ssim,encode_time. To plot all available " " metrics pass 'all' as flag value"); -#endif - ABSL_FLAG(bool, logs, true, "print logs to stderr"); ABSL_FLAG(bool, verbose, false, "verbose logs to stderr"); @@ -102,10 +99,65 @@ namespace { class TestMainImpl : public TestMain { public: - int Init(int* argc, char* argv[]) override { - ::testing::InitGoogleMock(argc, argv); - absl::ParseCommandLine(*argc, argv); + // In order to set up a fresh rtc::Thread state for each test and avoid + // accidentally carrying over pending tasks that might be sent from one test + // and executed while another test is running, we inject a TestListener + // that sets up a new rtc::Thread instance for the main thread, per test. + class TestListener : public ::testing::EmptyTestEventListener { + public: + TestListener() = default; + + private: + bool IsDeathTest(const char* test_case_name, const char* test_name) { + // Workaround to avoid wrapping the main thread when we run death tests. + // The approach we take for detecting death tests is essentially the same + // as gtest does internally. Gtest does this: + // + // static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*"; + // ::testing::internal::UnitTestOptions::MatchesFilter( + // test_case_name, kDeathTestCaseFilter); + // + // Our approach is a little more straight forward. + if (absl::EndsWith(test_case_name, "DeathTest")) + return true; + + return absl::EndsWith(test_name, "DeathTest"); + } + + void OnTestStart(const ::testing::TestInfo& test_info) override { + if (!IsDeathTest(test_info.test_suite_name(), test_info.name())) { + // Ensure that main thread gets wrapped as an rtc::Thread. + // TODO(bugs.webrtc.org/9714): It might be better to avoid wrapping the + // main thread, or leave it to individual tests that need it. But as + // long as we have automatic thread wrapping, we need this to avoid that + // some other random thread (which one depending on which tests are run) + // gets automatically wrapped. + thread_ = rtc::Thread::CreateWithSocketServer(); + thread_->WrapCurrent(); + RTC_DCHECK_EQ(rtc::Thread::Current(), thread_.get()); + } else { + RTC_LOG(LS_INFO) << "No thread auto wrap for death test."; + } + } + void OnTestEnd(const ::testing::TestInfo& test_info) override { + // Terminate the message loop. Note that if the test failed to clean + // up pending messages, this may execute part of the test. Ideally we + // should print a warning message here, or even fail the test if it leaks. + if (thread_) { + thread_->Quit(); // Signal quit. + thread_->Run(); // Flush + process Quit signal. + thread_->UnwrapCurrent(); + thread_ = nullptr; + } + } + + std::unique_ptr thread_; + }; + + int Init(int* argc, char* argv[]) override { return Init(); } + + int Init() override { // Make sure we always pull in the --resources_dir flag, even if the test // binary doesn't link with fileutils (downstream expects all test mains to // have this flag). @@ -136,14 +188,7 @@ class TestMainImpl : public TestMain { rtc::InitializeSSL(); rtc::SSLStreamAdapter::EnableTimeCallbackForTesting(); - // Ensure that main thread gets wrapped as an rtc::Thread. - // TODO(bugs.webrt.org/9714): It might be better to avoid wrapping the main - // thread, or leave it to individual tests that need it. But as long as we - // have automatic thread wrapping, we need this to avoid that some other - // random thread (which one depending on which tests are run) gets - // automatically wrapped. - rtc::ThreadManager::Instance()->WrapCurrentThread(); - RTC_CHECK(rtc::Thread::Current()); + ::testing::UnitTest::GetInstance()->listeners().Append(new TestListener()); return 0; } @@ -156,34 +201,36 @@ class TestMainImpl : public TestMain { rtc::tracing::StartInternalCapture(trace_event_path.c_str()); } + absl::optional> metrics_to_plot = + absl::GetFlag(FLAGS_plot); + + if (metrics_to_plot->empty()) { + metrics_to_plot = absl::nullopt; + } else { + if (metrics_to_plot->size() == 1 && + (*metrics_to_plot)[0] == kPlotAllMetrics) { + metrics_to_plot->clear(); + } + } + #if defined(WEBRTC_IOS) rtc::test::InitTestSuite(RUN_ALL_TESTS, argc, argv, - absl::GetFlag(FLAGS_save_chartjson_result)); + absl::GetFlag(FLAGS_write_perf_output_on_ios), + metrics_to_plot); rtc::test::RunTestsFromIOSApp(); int exit_code = 0; #else int exit_code = RUN_ALL_TESTS(); - std::string chartjson_result_file = + std::string perf_output_file = absl::GetFlag(FLAGS_isolated_script_test_perf_output); - if (!chartjson_result_file.empty()) { - webrtc::test::WritePerfResults(chartjson_result_file); - } - std::vector metrics_to_plot = absl::GetFlag(FLAGS_plot); - if (!metrics_to_plot.empty()) { - if (metrics_to_plot.size() == 1 && - metrics_to_plot[0] == kPlotAllMetrics) { - metrics_to_plot.clear(); + if (!perf_output_file.empty()) { + if (!webrtc::test::WritePerfResults(perf_output_file)) { + return 1; } - webrtc::test::PrintPlottableResults(metrics_to_plot); } - - std::string result_filename = - absl::GetFlag(FLAGS_isolated_script_test_output); - if (!result_filename.empty()) { - std::ofstream result_file(result_filename); - result_file << "{\"version\": 3}"; - result_file.close(); + if (metrics_to_plot) { + webrtc::test::PrintPlottableResults(*metrics_to_plot); } #endif diff --git a/test/test_main_lib.h b/test/test_main_lib.h index bdb0afb6eb..2233171c60 100644 --- a/test/test_main_lib.h +++ b/test/test_main_lib.h @@ -25,6 +25,8 @@ class TestMain { // Initializes test environment. Clients can add their own initialization // steps after call to this method and before running tests. // Returns 0 if initialization was successful and non 0 otherwise. + virtual int Init() = 0; + // Temporary for backward compatibility virtual int Init(int* argc, char* argv[]) = 0; // Runs test end return result error code. 0 - no errors. diff --git a/test/test_video_capturer.cc b/test/test_video_capturer.cc index c0d575dc5e..9ce4aa0637 100644 --- a/test/test_video_capturer.cc +++ b/test/test_video_capturer.cc @@ -84,7 +84,7 @@ void TestVideoCapturer::UpdateVideoAdapter() { } VideoFrame TestVideoCapturer::MaybePreprocess(const VideoFrame& frame) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); if (preprocessor_ != nullptr) { return preprocessor_->Preprocess(frame); } else { diff --git a/test/test_video_capturer.h b/test/test_video_capturer.h index 114767a43e..dff529cb15 100644 --- a/test/test_video_capturer.h +++ b/test/test_video_capturer.h @@ -18,7 +18,7 @@ #include "api/video/video_source_interface.h" #include "media/base/video_adapter.h" #include "media/base/video_broadcaster.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace test { @@ -38,7 +38,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { const rtc::VideoSinkWants& wants) override; void RemoveSink(rtc::VideoSinkInterface* sink) override; void SetFramePreprocessor(std::unique_ptr preprocessor) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); preprocessor_ = std::move(preprocessor); } @@ -50,7 +50,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { void UpdateVideoAdapter(); VideoFrame MaybePreprocess(const VideoFrame& frame); - rtc::CriticalSection lock_; + Mutex lock_; std::unique_ptr preprocessor_ RTC_GUARDED_BY(lock_); rtc::VideoBroadcaster broadcaster_; cricket::VideoAdapter video_adapter_; diff --git a/test/testsupport/DEPS b/test/testsupport/DEPS new file mode 100644 index 0000000000..6f6150ad30 --- /dev/null +++ b/test/testsupport/DEPS @@ -0,0 +1,4 @@ +include_rules = [ + # Histogram C++ API, used by perf tests. + "+third_party/catapult/tracing/tracing/value" +] diff --git a/test/testsupport/file_utils_override.cc b/test/testsupport/file_utils_override.cc index 09806913a5..43391556b5 100644 --- a/test/testsupport/file_utils_override.cc +++ b/test/testsupport/file_utils_override.cc @@ -128,7 +128,7 @@ std::string OutputPath() { std::string WorkingDir() { #if defined(WEBRTC_ANDROID) return kAndroidChromiumTestsRoot; -#endif +#else char path_buffer[FILENAME_MAX]; if (!GET_CURRENT_DIR(path_buffer, sizeof(path_buffer))) { fprintf(stderr, "Cannot get current directory!\n"); @@ -136,6 +136,7 @@ std::string WorkingDir() { } else { return std::string(path_buffer); } +#endif } std::string ResourcePath(const std::string& name, diff --git a/test/testsupport/frame_writer.h b/test/testsupport/frame_writer.h index b91e57c963..5f85d8bcd4 100644 --- a/test/testsupport/frame_writer.h +++ b/test/testsupport/frame_writer.h @@ -32,7 +32,7 @@ class FrameWriter { // Writes a frame of the configured frame length to the output file. // Returns true if the write was successful, false otherwise. - virtual bool WriteFrame(uint8_t* frame_buffer) = 0; + virtual bool WriteFrame(const uint8_t* frame_buffer) = 0; // Closes the output file if open. Essentially makes this class impossible // to use anymore. Will also be invoked by the destructor. @@ -54,7 +54,7 @@ class YuvFrameWriterImpl : public FrameWriter { YuvFrameWriterImpl(std::string output_filename, int width, int height); ~YuvFrameWriterImpl() override; bool Init() override; - bool WriteFrame(uint8_t* frame_buffer) override; + bool WriteFrame(const uint8_t* frame_buffer) override; void Close() override; size_t FrameLength() override; @@ -76,7 +76,7 @@ class Y4mFrameWriterImpl : public YuvFrameWriterImpl { int frame_rate); ~Y4mFrameWriterImpl() override; bool Init() override; - bool WriteFrame(uint8_t* frame_buffer) override; + bool WriteFrame(const uint8_t* frame_buffer) override; private: const int frame_rate_; diff --git a/test/testsupport/ivf_video_frame_generator.cc b/test/testsupport/ivf_video_frame_generator.cc index 81155f80ff..fe836763fa 100644 --- a/test/testsupport/ivf_video_frame_generator.cc +++ b/test/testsupport/ivf_video_frame_generator.cc @@ -53,7 +53,7 @@ IvfVideoFrameGenerator::IvfVideoFrameGenerator(const std::string& file_name) WEBRTC_VIDEO_CODEC_OK); } IvfVideoFrameGenerator::~IvfVideoFrameGenerator() { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); if (!file_reader_) { return; } @@ -62,7 +62,7 @@ IvfVideoFrameGenerator::~IvfVideoFrameGenerator() { // Reset decoder to prevent it from async access to |this|. video_decoder_.reset(); { - rtc::CritScope frame_crit(&frame_decode_lock_); + MutexLock frame_lock(&frame_decode_lock_); next_frame_ = absl::nullopt; // Set event in case another thread is waiting on it. next_frame_decoded_.Set(); @@ -70,7 +70,7 @@ IvfVideoFrameGenerator::~IvfVideoFrameGenerator() { } FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); next_frame_decoded_.Reset(); RTC_CHECK(file_reader_); if (!file_reader_->HasMoreFrames()) { @@ -86,7 +86,7 @@ FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() { RTC_CHECK(decoded) << "Failed to decode next frame in " << kMaxNextFrameWaitTemeoutMs << "ms. Can't continue"; - rtc::CritScope frame_crit(&frame_decode_lock_); + MutexLock frame_lock(&frame_decode_lock_); rtc::scoped_refptr buffer = next_frame_->video_frame_buffer(); if (width_ != static_cast(buffer->width()) || @@ -102,7 +102,7 @@ FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() { } void IvfVideoFrameGenerator::ChangeResolution(size_t width, size_t height) { - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); width_ = width; height_ = height; } @@ -126,7 +126,7 @@ void IvfVideoFrameGenerator::DecodedCallback::Decoded( } void IvfVideoFrameGenerator::OnFrameDecoded(const VideoFrame& decoded_frame) { - rtc::CritScope crit(&frame_decode_lock_); + MutexLock lock(&frame_decode_lock_); next_frame_ = decoded_frame; next_frame_decoded_.Set(); } diff --git a/test/testsupport/ivf_video_frame_generator.h b/test/testsupport/ivf_video_frame_generator.h index 913d882766..32ba21ed26 100644 --- a/test/testsupport/ivf_video_frame_generator.h +++ b/test/testsupport/ivf_video_frame_generator.h @@ -20,8 +20,8 @@ #include "api/video/video_frame.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/utility/ivf_file_reader.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { @@ -71,11 +71,11 @@ class IvfVideoFrameGenerator : public FrameGeneratorInterface { // FrameGenerator is injected into PeerConnection via some scoped_ref object // and it can happen that the last pointer will be destroyed on the different // thread comparing to the one from which frames were read. - rtc::CriticalSection lock_; + Mutex lock_; // This lock is used to sync between sending and receiving frame from decoder. // We can't reuse |lock_| because then generator can be destroyed between // frame was sent to decoder and decoder callback was invoked. - rtc::CriticalSection frame_decode_lock_; + Mutex frame_decode_lock_; rtc::Event next_frame_decoded_; absl::optional next_frame_ RTC_GUARDED_BY(frame_decode_lock_); diff --git a/test/testsupport/ivf_video_frame_generator_unittest.cc b/test/testsupport/ivf_video_frame_generator_unittest.cc index a5e99d1a66..bea9cd2489 100644 --- a/test/testsupport/ivf_video_frame_generator_unittest.cc +++ b/test/testsupport/ivf_video_frame_generator_unittest.cc @@ -25,7 +25,6 @@ #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/ivf_file_writer.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -34,6 +33,8 @@ #if defined(WEBRTC_USE_H264) #include "modules/video_coding/codecs/h264/include/h264.h" +#include "rtc_base/synchronization/mutex.h" + #endif namespace webrtc { @@ -48,7 +49,7 @@ constexpr int kMaxFrameEncodeWaitTimeoutMs = 2000; static const VideoEncoder::Capabilities kCapabilities(false); #if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) -constexpr double kExpectedMinPsnr = 36; +constexpr double kExpectedMinPsnr = 35; #else constexpr double kExpectedMinPsnr = 39; #endif @@ -67,11 +68,10 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback { ~IvfFileWriterEncodedCallback() { EXPECT_TRUE(file_writer_->Close()); } Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { EXPECT_TRUE(file_writer_->WriteFrame(encoded_image, video_codec_type_)); - rtc::CritScope crit(&lock_); + MutexLock lock(&lock_); received_frames_count_++; RTC_CHECK_LE(received_frames_count_, expected_frames_count_); if (received_frames_count_ == expected_frames_count_) { @@ -89,7 +89,7 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback { const VideoCodecType video_codec_type_; const int expected_frames_count_; - rtc::CriticalSection lock_; + Mutex lock_; int received_frames_count_ RTC_GUARDED_BY(lock_) = 0; rtc::Event expected_frames_count_received_; }; diff --git a/test/testsupport/mock/mock_frame_reader.h b/test/testsupport/mock/mock_frame_reader.h index 8da3695d23..bda6b1ad2d 100644 --- a/test/testsupport/mock/mock_frame_reader.h +++ b/test/testsupport/mock/mock_frame_reader.h @@ -19,11 +19,11 @@ namespace test { class MockFrameReader : public FrameReader { public: - MOCK_METHOD0(Init, bool()); - MOCK_METHOD0(ReadFrame, rtc::scoped_refptr()); - MOCK_METHOD0(Close, void()); - MOCK_METHOD0(FrameLength, size_t()); - MOCK_METHOD0(NumberOfFrames, int()); + MOCK_METHOD(bool, Init, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, ReadFrame, (), (override)); + MOCK_METHOD(void, Close, (), (override)); + MOCK_METHOD(size_t, FrameLength, (), (override)); + MOCK_METHOD(int, NumberOfFrames, (), (override)); }; } // namespace test diff --git a/test/testsupport/perf_result_reporter.cc b/test/testsupport/perf_result_reporter.cc new file mode 100644 index 0000000000..e4c98e7446 --- /dev/null +++ b/test/testsupport/perf_result_reporter.cc @@ -0,0 +1,155 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/testsupport/perf_result_reporter.h" + +#include + +namespace { + +// These characters mess with either the stdout parsing or the dashboard itself. +const std::vector& InvalidCharacters() { + static const std::vector kInvalidCharacters({"/", ":", "="}); + + return kInvalidCharacters; +} + +void CheckForInvalidCharacters(const std::string& str) { + for (const auto& invalid : InvalidCharacters()) { + RTC_CHECK(str.find(invalid) == std::string::npos) + << "Given invalid character for perf names '" << invalid << "'"; + } +} + +} // namespace + +namespace webrtc { +namespace test { + +namespace { + +// For now, mark all tests as "not important". This distinction mostly goes away +// in histograms anyway. +const bool kNotImportant = false; + +std::string UnitToString(Unit unit) { + // Down the line, we should convert directly from Unit to the histogram.proto + // enum values. We need to convert to strings until all uses of perf_test.h + // have been eliminated. We're not using the proto enum directly in the .h of + // this file because we don't want to limit the exposure of the proto. + // + // Keep this list up to date with kJsonToProtoUnit in histogram.cc in the + // Catapult repo. + switch (unit) { + case Unit::kMs: + return "ms"; + case Unit::kMsBestFitFormat: + return "msBestFitFormat"; + case Unit::kMsTs: + return "tsMs"; + case Unit::kNPercent: + return "n%"; + case Unit::kSizeInBytes: + return "sizeInBytes"; + case Unit::kBytesPerSecond: + return "bytesPerSecond"; + case Unit::kHertz: + return "Hz"; + case Unit::kUnitless: + return "unitless"; + case Unit::kCount: + return "count"; + case Unit::kSigma: + return "sigma"; + default: + RTC_NOTREACHED() << "Unknown unit " << unit; + return "unitless"; + } +} + +} // namespace + +PerfResultReporter::PerfResultReporter(const std::string& metric_basename, + const std::string& story_name) + : metric_basename_(metric_basename), story_name_(story_name) { + CheckForInvalidCharacters(metric_basename_); + CheckForInvalidCharacters(story_name_); +} + +PerfResultReporter::~PerfResultReporter() = default; + +void PerfResultReporter::RegisterMetric(const std::string& metric_suffix, + Unit unit) { + RegisterMetric(metric_suffix, unit, ImproveDirection::kNone); +} +void PerfResultReporter::RegisterMetric(const std::string& metric_suffix, + Unit unit, + ImproveDirection improve_direction) { + CheckForInvalidCharacters(metric_suffix); + RTC_CHECK(metric_map_.count(metric_suffix) == 0); + metric_map_.insert({metric_suffix, {unit, improve_direction}}); +} + +void PerfResultReporter::AddResult(const std::string& metric_suffix, + size_t value) const { + auto info = GetMetricInfoOrFail(metric_suffix); + + PrintResult(metric_basename_, metric_suffix, story_name_, value, + UnitToString(info.unit), kNotImportant, info.improve_direction); +} + +void PerfResultReporter::AddResult(const std::string& metric_suffix, + double value) const { + auto info = GetMetricInfoOrFail(metric_suffix); + + PrintResult(metric_basename_, metric_suffix, story_name_, value, + UnitToString(info.unit), kNotImportant, info.improve_direction); +} + +void PerfResultReporter::AddResultList( + const std::string& metric_suffix, + rtc::ArrayView values) const { + auto info = GetMetricInfoOrFail(metric_suffix); + + PrintResultList(metric_basename_, metric_suffix, story_name_, values, + UnitToString(info.unit), kNotImportant, + info.improve_direction); +} + +void PerfResultReporter::AddResultMeanAndError(const std::string& metric_suffix, + const double mean, + const double error) { + auto info = GetMetricInfoOrFail(metric_suffix); + + PrintResultMeanAndError(metric_basename_, metric_suffix, story_name_, mean, + error, UnitToString(info.unit), kNotImportant, + info.improve_direction); +} + +absl::optional PerfResultReporter::GetMetricInfo( + const std::string& metric_suffix) const { + auto iter = metric_map_.find(metric_suffix); + if (iter == metric_map_.end()) { + return absl::optional(); + } + + return absl::optional(iter->second); +} + +MetricInfo PerfResultReporter::GetMetricInfoOrFail( + const std::string& metric_suffix) const { + absl::optional info = GetMetricInfo(metric_suffix); + RTC_CHECK(info.has_value()) + << "Attempted to use unregistered metric " << metric_suffix; + return *info; +} + +} // namespace test +} // namespace webrtc diff --git a/test/testsupport/perf_result_reporter.h b/test/testsupport/perf_result_reporter.h new file mode 100644 index 0000000000..c8028574aa --- /dev/null +++ b/test/testsupport/perf_result_reporter.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_TESTSUPPORT_PERF_RESULT_REPORTER_H_ +#define TEST_TESTSUPPORT_PERF_RESULT_REPORTER_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "test/testsupport/perf_test.h" + +namespace webrtc { +namespace test { + +// These match the units in histogram.proto (in third_party/catapult). +enum class Unit { + kMs, + kMsBestFitFormat, + kMsTs, + kNPercent, + kSizeInBytes, + kBytesPerSecond, + kHertz, + kUnitless, + kCount, + kSigma, +}; + +struct MetricInfo { + Unit unit; + ImproveDirection improve_direction; +}; + +// A helper class for using the perf test printing functions safely, as +// otherwise it's easy to accidentally mix up arguments to produce usable but +// malformed perf data. See https://crbug.com/923564. +// +// Sample usage: +// auto reporter = PerfResultReporter("ramp_up_time", "bwe_15s"); +// reporter.RegisterImportantMetric( +// "_turn_over_tcp", Unit::kMs, ImproveDirection::kBiggerIsBetter); +// reporter.RegisterImportantMetric("_cpu_time", Unit::kMs); +// ... +// reporter.AddResult("turn_over_tcp", GetTurnOverTcpTime()); +// reporter.AddResult("turn_over_udp", GetTurnOverUdpTime()); +// +// This will show in the dashboard as +// (test binary name) > (bot) > ramp_up_time_turn_over_tcp > bwe_15s. +// (test binary name) > (bot) > ramp_up_time_turn_over_udp > bwe_15s. +// +// If you add more reporters that cover other user stories, they will show up +// as separate subtests (e.g. next to bwe_15s). +class PerfResultReporter { + public: + PerfResultReporter(const std::string& metric_basename, + const std::string& story_name); + ~PerfResultReporter(); + + void RegisterMetric(const std::string& metric_suffix, Unit unit); + void RegisterMetric(const std::string& metric_suffix, + Unit unit, + ImproveDirection improve_direction); + void AddResult(const std::string& metric_suffix, size_t value) const; + void AddResult(const std::string& metric_suffix, double value) const; + + void AddResultList(const std::string& metric_suffix, + rtc::ArrayView values) const; + + // Users should prefer AddResultList if possible, as otherwise the min/max + // values reported on the perf dashboard aren't useful. + // |mean_and_error| should be a comma-separated string of mean then + // error/stddev, e.g. "2.4,0.5". + void AddResultMeanAndError(const std::string& metric_suffix, + const double mean, + const double error); + + // Returns the metric info if it has been registered. + absl::optional GetMetricInfo( + const std::string& metric_suffix) const; + + private: + MetricInfo GetMetricInfoOrFail(const std::string& metric_suffix) const; + + std::string metric_basename_; + std::string story_name_; + std::unordered_map metric_map_; +}; + +} // namespace test +} // namespace webrtc + +#endif // TEST_TESTSUPPORT_PERF_RESULT_REPORTER_H_ diff --git a/test/testsupport/perf_test.cc b/test/testsupport/perf_test.cc index eedb0c8062..b68eaa46a1 100644 --- a/test/testsupport/perf_test.cc +++ b/test/testsupport/perf_test.cc @@ -17,25 +17,29 @@ #include #include -#include "absl/flags/flag.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" -#include "test/testsupport/perf_test_graphjson_writer.h" +#include "rtc_base/synchronization/mutex.h" +#include "test/testsupport/file_utils.h" #include "test/testsupport/perf_test_histogram_writer.h" -ABSL_FLAG(bool, - write_histogram_proto_json, - false, - "Use the histogram C++ API, which will write Histogram proto JSON " - "instead of Chart JSON. Note, Histogram set JSON and Histogram " - "proto JSON are not quite the same thing. This flag only has effect " - "if --isolated_script_test_perf_output is specified."); - namespace webrtc { namespace test { namespace { +std::string UnitWithDirection( + const std::string& units, + webrtc::test::ImproveDirection improve_direction) { + switch (improve_direction) { + case webrtc::test::ImproveDirection::kNone: + return units; + case webrtc::test::ImproveDirection::kSmallerIsBetter: + return units + "_smallerIsBetter"; + case webrtc::test::ImproveDirection::kBiggerIsBetter: + return units + "_biggerIsBetter"; + } +} + template void OutputListToStream(std::ostream* ostream, const Container& values) { const char* sep = ""; @@ -57,7 +61,7 @@ class PlottableCounterPrinter { PlottableCounterPrinter() : output_(stdout) {} void SetOutput(FILE* output) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); output_ = output; } @@ -65,14 +69,14 @@ class PlottableCounterPrinter { const std::string& trace_name, const webrtc::SamplesStatsCounter& counter, const std::string& units) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); plottable_counters_.push_back({graph_name, trace_name, counter, units}); } void Print(const std::vector& desired_graphs_raw) const { std::set desired_graphs(desired_graphs_raw.begin(), desired_graphs_raw.end()); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); for (auto& counter : plottable_counters_) { if (!desired_graphs.empty()) { auto it = desired_graphs.find(counter.graph_name); @@ -105,9 +109,9 @@ class PlottableCounterPrinter { } private: - rtc::CriticalSection crit_; - std::vector plottable_counters_ RTC_GUARDED_BY(&crit_); - FILE* output_ RTC_GUARDED_BY(&crit_); + mutable Mutex mutex_; + std::vector plottable_counters_ RTC_GUARDED_BY(&mutex_); + FILE* output_ RTC_GUARDED_BY(&mutex_); }; PlottableCounterPrinter& GetPlottableCounterPrinter() { @@ -120,7 +124,7 @@ class ResultsLinePrinter { ResultsLinePrinter() : output_(stdout) {} void SetOutput(FILE* output) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); output_ = output; } @@ -174,7 +178,7 @@ class ResultsLinePrinter { const std::string& suffix, const std::string& units, bool important) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // <*>RESULT : = // <*>RESULT : = {, } // <*>RESULT : = [,value,value,...,] @@ -183,8 +187,8 @@ class ResultsLinePrinter { values.c_str(), suffix.c_str(), units.c_str()); } - rtc::CriticalSection crit_; - FILE* output_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + FILE* output_ RTC_GUARDED_BY(&mutex_); }; ResultsLinePrinter& GetResultsLinePrinter() { @@ -193,13 +197,8 @@ ResultsLinePrinter& GetResultsLinePrinter() { } PerfTestResultWriter& GetPerfWriter() { - if (absl::GetFlag(FLAGS_write_histogram_proto_json)) { - static PerfTestResultWriter* writer = CreateHistogramWriter(); - return *writer; - } else { - static PerfTestResultWriter* writer = CreateGraphJsonWriter(); - return *writer; - } + static PerfTestResultWriter* writer = CreateHistogramWriter(); + return *writer; } } // namespace @@ -213,19 +212,33 @@ void SetPerfResultsOutput(FILE* output) { GetResultsLinePrinter().SetOutput(output); } -std::string GetPerfResultsJSON() { - return GetPerfWriter().ToJSON(); +std::string GetPerfResults() { + return GetPerfWriter().Serialize(); } void PrintPlottableResults(const std::vector& desired_graphs) { GetPlottableCounterPrinter().Print(desired_graphs); } -void WritePerfResults(const std::string& output_path) { - std::string json_results = GetPerfResultsJSON(); - std::fstream json_file(output_path, std::fstream::out); - json_file << json_results; - json_file.close(); +bool WritePerfResults(const std::string& output_path) { + std::string results = GetPerfResults(); + CreateDir(DirName(output_path)); + FILE* output = fopen(output_path.c_str(), "wb"); + if (output == NULL) { + printf("Failed to write to %s.\n", output_path.c_str()); + return false; + } + size_t written = + fwrite(results.c_str(), sizeof(char), results.size(), output); + fclose(output); + + if (written != results.size()) { + long expected = results.size(); + printf("Wrote %zu, tried to write %lu\n", written, expected); + return false; + } + + return true; } void PrintResult(const std::string& measurement, diff --git a/test/testsupport/perf_test.h b/test/testsupport/perf_test.h index 0550e44231..25535bce82 100644 --- a/test/testsupport/perf_test.h +++ b/test/testsupport/perf_test.h @@ -16,12 +16,11 @@ #include #include "api/array_view.h" -#include "rtc_base/numerics/samples_stats_counter.h" +#include "api/numerics/samples_stats_counter.h" namespace webrtc { namespace test { -// Metrics improver direction. enum class ImproveDirection { // Direction is undefined. kNone, @@ -31,22 +30,24 @@ enum class ImproveDirection { kBiggerIsBetter, }; -// Prints numerical information to stdout in a controlled format, for -// post-processing. |measurement| is a description of the quantity being -// measured, e.g. "vm_peak"; |modifier| is provided as a convenience and -// will be appended directly to the name of the |measurement|, e.g. -// "_browser"; |trace| is a description of the particular data point, e.g. -// "reference"; |value| is the measured value; and |units| is a description -// of the units of measure, e.g. "bytes". If |important| is true, the output -// line will be specially marked, to notify the post-processor. The strings -// may be empty. They should not contain any colons (:) or equals signs (=). -// A typical post-processing step would be to produce graphs of the data -// produced for various builds, using the combined |measurement| + |modifier| -// string to specify a particular graph and the |trace| to identify a trace -// (i.e., data series) on that graph. +// Prints a performance test result. +// +// For example, +// PrintResult("ramp_up_time_", "turn_over_tcp", +// "bwe_15s", 1234.2, "ms", false); +// +// will show up in the http://chromeperf.appspot.com under +// +// (test binary name) > (bot) > ramp_up_time_turn_over_tcp > bwe_15s. +// +// The |measurement| + |modifier| is what we're measuring. |user_story| is the +// scenario we're testing under. +// +// The binary this runs in must be hooked up as a perf test in the WebRTC +// recipes for this to actually be uploaded to chromeperf.appspot.com. void PrintResult(const std::string& measurement, const std::string& modifier, - const std::string& trace, + const std::string& user_story, const double value, const std::string& units, bool important, @@ -55,10 +56,11 @@ void PrintResult(const std::string& measurement, // Like PrintResult(), but prints a (mean, standard deviation) result pair. // The || should be two comma-separated numbers, the mean and // standard deviation (or other error metric) of the measurement. +// DEPRECATED: soon unsupported. void PrintResultMeanAndError( const std::string& measurement, const std::string& modifier, - const std::string& trace, + const std::string& user_story, const double mean, const double error, const std::string& units, @@ -72,7 +74,7 @@ void PrintResultMeanAndError( void PrintResultList( const std::string& measurement, const std::string& modifier, - const std::string& trace, + const std::string& user_story, rtc::ArrayView values, const std::string& units, bool important, @@ -82,15 +84,18 @@ void PrintResultList( // counter. Also add specified metric to the plotable metrics output. void PrintResult(const std::string& measurement, const std::string& modifier, - const std::string& trace, + const std::string& user_story, const SamplesStatsCounter& counter, const std::string& units, const bool important, ImproveDirection improve_direction = ImproveDirection::kNone); -// Returns all perf results to date in a JSON string formatted as described in -// https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md -std::string GetPerfResultsJSON(); +// Returns a string-encoded proto as described in +// tracing/tracing/proto/histogram.proto in +// https://github.com/catapult-project/catapult/blob/master/. +// If you want to print the proto in human readable format, use +// tracing/bin/proto2json from third_party/catapult in your WebRTC checkout. +std::string GetPerfResults(); // Print into stdout plottable metrics for further post processing. // |desired_graphs| - list of metrics, that should be plotted. If empty - all @@ -98,15 +103,18 @@ std::string GetPerfResultsJSON(); // they will be skipped. void PrintPlottableResults(const std::vector& desired_graphs); -// Writes the JSON representation of the perf results returned by -// GetPerfResultsJSON() to the file in output_path. -void WritePerfResults(const std::string& output_path); +// Call GetPerfResults() and write its output to a file. Returns false if we +// failed to write to the file. If you want to print the proto in human readable +// format, use tracing/bin/proto2json from third_party/catapult in your WebRTC +// checkout. +bool WritePerfResults(const std::string& output_path); -// By default, perf results are printed to stdout. Set the FILE* to where they -// should be printing instead. +// By default, human-readable perf results are printed to stdout. Set the FILE* +// to where they should be printing instead. These results are not used to +// upload to the dashboard, however - this is only through WritePerfResults. void SetPerfResultsOutput(FILE* output); -// You shouldn't use this function. It's only used to test the functions above. +// Only for use by tests. void ClearPerfResults(); } // namespace test diff --git a/test/testsupport/perf_test_graphjson_writer.cc b/test/testsupport/perf_test_graphjson_writer.cc deleted file mode 100644 index 5a8ee64709..0000000000 --- a/test/testsupport/perf_test_graphjson_writer.cc +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "test/testsupport/perf_test_graphjson_writer.h" - -#include -#include -#include -#include -#include - -#include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" - -namespace webrtc { -namespace test { - -std::string UnitWithDirection( - const std::string& units, - webrtc::test::ImproveDirection improve_direction) { - switch (improve_direction) { - case webrtc::test::ImproveDirection::kNone: - return units; - case webrtc::test::ImproveDirection::kSmallerIsBetter: - return units + "_smallerIsBetter"; - case webrtc::test::ImproveDirection::kBiggerIsBetter: - return units + "_biggerIsBetter"; - } -} - -template -void OutputListToStream(std::ostream* ostream, const Container& values) { - const char* sep = ""; - for (const auto& v : values) { - (*ostream) << sep << v; - sep = ","; - } -} - -namespace { - -class PerfTestGraphJsonWriter : public PerfTestResultWriter { - public: - PerfTestGraphJsonWriter() : crit_(), graphs_() {} - void ClearResults() { - rtc::CritScope lock(&crit_); - graphs_.clear(); - } - - void LogResult(const std::string& graph_name, - const std::string& trace_name, - const double value, - const std::string& units, - const bool important, - webrtc::test::ImproveDirection improve_direction) { - std::ostringstream json_stream; - json_stream << '"' << trace_name << R"(":{)"; - json_stream << R"("type":"scalar",)"; - json_stream << R"("value":)" << value << ','; - json_stream << R"("units":")" << UnitWithDirection(units, improve_direction) - << R"("})"; - rtc::CritScope lock(&crit_); - graphs_[graph_name].push_back(json_stream.str()); - } - - void LogResultMeanAndError(const std::string& graph_name, - const std::string& trace_name, - const double mean, - const double error, - const std::string& units, - const bool important, - webrtc::test::ImproveDirection improve_direction) { - std::ostringstream json_stream; - json_stream << '"' << trace_name << R"(":{)"; - json_stream << R"("type":"list_of_scalar_values",)"; - json_stream << R"("values":[)" << mean << "],"; - json_stream << R"("std":)" << error << ','; - json_stream << R"("units":")" << UnitWithDirection(units, improve_direction) - << R"("})"; - rtc::CritScope lock(&crit_); - graphs_[graph_name].push_back(json_stream.str()); - } - - void LogResultList(const std::string& graph_name, - const std::string& trace_name, - const rtc::ArrayView values, - const std::string& units, - const bool important, - webrtc::test::ImproveDirection improve_direction) { - std::ostringstream value_stream; - value_stream.precision(8); - value_stream << '['; - OutputListToStream(&value_stream, values); - value_stream << ']'; - - std::ostringstream json_stream; - json_stream << '"' << trace_name << R"(":{)"; - json_stream << R"("type":"list_of_scalar_values",)"; - json_stream << R"("values":)" << value_stream.str() << ','; - json_stream << R"("units":")" << UnitWithDirection(units, improve_direction) - << R"("})"; - rtc::CritScope lock(&crit_); - graphs_[graph_name].push_back(json_stream.str()); - } - - std::string ToJSON() const { - std::ostringstream json_stream; - json_stream << R"({"format_version":"1.0",)"; - json_stream << R"("charts":{)"; - rtc::CritScope lock(&crit_); - for (auto graphs_it = graphs_.begin(); graphs_it != graphs_.end(); - ++graphs_it) { - if (graphs_it != graphs_.begin()) - json_stream << ','; - json_stream << '"' << graphs_it->first << "\":"; - json_stream << '{'; - OutputListToStream(&json_stream, graphs_it->second); - json_stream << '}'; - } - json_stream << "}}"; - return json_stream.str(); - } - - private: - rtc::CriticalSection crit_; - std::map> graphs_ - RTC_GUARDED_BY(&crit_); -}; - -} // namespace - -PerfTestResultWriter* CreateGraphJsonWriter() { - return new PerfTestGraphJsonWriter(); -} - -} // namespace test -} // namespace webrtc diff --git a/test/testsupport/perf_test_graphjson_writer.h b/test/testsupport/perf_test_graphjson_writer.h deleted file mode 100644 index ae32cfa9e0..0000000000 --- a/test/testsupport/perf_test_graphjson_writer.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef TEST_TESTSUPPORT_PERF_TEST_GRAPHJSON_WRITER_H_ -#define TEST_TESTSUPPORT_PERF_TEST_GRAPHJSON_WRITER_H_ - -#include - -#include "test/testsupport/perf_test.h" -#include "test/testsupport/perf_test_result_writer.h" - -namespace webrtc { -namespace test { - -PerfTestResultWriter* CreateGraphJsonWriter(); - -// Utilities that happen to be useful to perf_test.cc. Just move these back -// to perf_test.cc when this file goes away. -std::string UnitWithDirection(const std::string& units, - webrtc::test::ImproveDirection improve_direction); - -} // namespace test -} // namespace webrtc - -#endif // TEST_TESTSUPPORT_PERF_TEST_GRAPHJSON_WRITER_H_ diff --git a/test/testsupport/perf_test_histogram_writer.cc b/test/testsupport/perf_test_histogram_writer.cc index d82294bc68..a4f86dc5f0 100644 --- a/test/testsupport/perf_test_histogram_writer.cc +++ b/test/testsupport/perf_test_histogram_writer.cc @@ -13,15 +13,184 @@ #include #include +#include + +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "third_party/catapult/tracing/tracing/value/diagnostics/reserved_infos.h" +#include "third_party/catapult/tracing/tracing/value/histogram.h" namespace webrtc { namespace test { -namespace {} // namespace +namespace { + +namespace proto = catapult::tracing::tracing::proto; + +std::string AsJsonString(const std::string string) { + return "\"" + string + "\""; +} + +class PerfTestHistogramWriter : public PerfTestResultWriter { + public: + PerfTestHistogramWriter() : mutex_() {} + void ClearResults() override { + MutexLock lock(&mutex_); + histograms_.clear(); + } + + void LogResult(const std::string& graph_name, + const std::string& trace_name, + const double value, + const std::string& units, + const bool important, + ImproveDirection improve_direction) override { + (void)important; + AddSample(graph_name, trace_name, value, units, improve_direction); + } + void LogResultMeanAndError(const std::string& graph_name, + const std::string& trace_name, + const double mean, + const double error, + const std::string& units, + const bool important, + ImproveDirection improve_direction) override { + RTC_LOG(LS_WARNING) << "Discarding stddev, not supported by histograms"; + (void)error; + (void)important; + + AddSample(graph_name, trace_name, mean, units, improve_direction); + } + void LogResultList(const std::string& graph_name, + const std::string& trace_name, + const rtc::ArrayView values, + const std::string& units, + const bool important, + ImproveDirection improve_direction) override { + (void)important; + for (double value : values) { + AddSample(graph_name, trace_name, value, units, improve_direction); + } + } + std::string Serialize() const override { + proto::HistogramSet histogram_set; + + MutexLock lock(&mutex_); + for (const auto& histogram : histograms_) { + std::unique_ptr proto = histogram.second->toProto(); + histogram_set.mutable_histograms()->AddAllocated(proto.release()); + } + + std::string output; + bool ok = histogram_set.SerializeToString(&output); + RTC_DCHECK(ok) << "Failed to serialize histogram set to string"; + return output; + } + + private: + void AddSample(const std::string& original_graph_name, + const std::string& trace_name, + const double value, + const std::string& units, + ImproveDirection improve_direction) { + // WebRTC annotates the units into the metric name when they are not + // supported by the Histogram API. + std::string graph_name = original_graph_name; + if (units == "dB") { + graph_name += "_dB"; + } else if (units == "fps") { + graph_name += "_fps"; + } else if (units == "%") { + graph_name += "_%"; + } + + // Lookup on graph name + trace name (or measurement + story in catapult + // parlance). There should be several histograms with the same measurement + // if they're for different stories. + std::string measurement_and_story = graph_name + trace_name; + MutexLock lock(&mutex_); + if (histograms_.count(measurement_and_story) == 0) { + proto::UnitAndDirection unit = ParseUnit(units, improve_direction); + std::unique_ptr builder = + std::make_unique(graph_name, unit); + + // Set all summary options as false - we don't want to generate + // metric_std, metric_count, and so on for all metrics. + builder->SetSummaryOptions(proto::SummaryOptions()); + histograms_[measurement_and_story] = std::move(builder); + + proto::Diagnostic stories; + proto::GenericSet* generic_set = stories.mutable_generic_set(); + generic_set->add_values(AsJsonString(trace_name)); + histograms_[measurement_and_story]->AddDiagnostic( + catapult::kStoriesDiagnostic, stories); + } + + if (units == "bps") { + // Bps has been interpreted as bits per second in WebRTC tests. + histograms_[measurement_and_story]->AddSample(value / 8); + } else { + histograms_[measurement_and_story]->AddSample(value); + } + } + + proto::UnitAndDirection ParseUnit(const std::string& units, + ImproveDirection improve_direction) { + RTC_DCHECK(units.find('_') == std::string::npos) + << "The unit_bigger|smallerIsBetter syntax isn't supported in WebRTC, " + "use the enum instead."; + + proto::UnitAndDirection result; + result.set_improvement_direction(ParseDirection(improve_direction)); + if (units == "bps") { + result.set_unit(proto::BYTES_PER_SECOND); + } else if (units == "dB") { + result.set_unit(proto::UNITLESS); + } else if (units == "fps") { + result.set_unit(proto::HERTZ); + } else if (units == "frames") { + result.set_unit(proto::COUNT); + } else if (units == "ms") { + result.set_unit(proto::MS_BEST_FIT_FORMAT); + } else if (units == "%") { + result.set_unit(proto::UNITLESS); + } else { + proto::Unit unit = catapult::UnitFromJsonUnit(units); + + // UnitFromJsonUnit returns UNITLESS if it doesn't recognize the unit. + if (unit == proto::UNITLESS && units != "unitless") { + RTC_LOG(LS_WARNING) << "Unit " << units << " is unsupported."; + } + + result.set_unit(unit); + } + return result; + } + + proto::ImprovementDirection ParseDirection( + ImproveDirection improve_direction) { + switch (improve_direction) { + case ImproveDirection::kNone: + return proto::NOT_SPECIFIED; + case ImproveDirection::kSmallerIsBetter: + return proto::SMALLER_IS_BETTER; + case ImproveDirection::kBiggerIsBetter: + return proto::BIGGER_IS_BETTER; + default: + RTC_NOTREACHED() << "Invalid enum value " << improve_direction; + } + } + + private: + mutable Mutex mutex_; + std::map> histograms_ + RTC_GUARDED_BY(&mutex_); +}; + +} // namespace PerfTestResultWriter* CreateHistogramWriter() { - RTC_CHECK(false) << "Not implemented"; - return nullptr; + return new PerfTestHistogramWriter(); } } // namespace test diff --git a/api/test/mock_video_encoder.cc b/test/testsupport/perf_test_histogram_writer_no_protobuf.cc similarity index 51% rename from api/test/mock_video_encoder.cc rename to test/testsupport/perf_test_histogram_writer_no_protobuf.cc index a0d82b1c33..14deb37c66 100644 --- a/api/test/mock_video_encoder.cc +++ b/test/testsupport/perf_test_histogram_writer_no_protobuf.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,13 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/test/mock_video_encoder.h" +#include "test/testsupport/perf_test_histogram_writer.h" namespace webrtc { +namespace test { -MockEncodedImageCallback::MockEncodedImageCallback() = default; -MockEncodedImageCallback::~MockEncodedImageCallback() = default; -MockVideoEncoder::MockVideoEncoder() = default; -MockVideoEncoder::~MockVideoEncoder() = default; +PerfTestResultWriter* CreateHistogramWriter() { + RTC_NOTREACHED() << "Cannot run perf tests with rtc_enable_protobuf = false. " + "Perf write results as protobufs."; + return nullptr; +} +} // namespace test } // namespace webrtc diff --git a/test/testsupport/perf_test_histogram_writer_unittest.cc b/test/testsupport/perf_test_histogram_writer_unittest.cc new file mode 100644 index 0000000000..6b083d6543 --- /dev/null +++ b/test/testsupport/perf_test_histogram_writer_unittest.cc @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/testsupport/perf_test_histogram_writer.h" + +#include +#include + +#include "test/gtest.h" +#include "third_party/catapult/tracing/tracing/value/histogram.h" + +namespace webrtc { +namespace test { + +namespace proto = catapult::tracing::tracing::proto; + +TEST(PerfHistogramWriterUnittest, TestSimpleHistogram) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("-", "-", 0, "ms", false, ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + EXPECT_TRUE(histogram_set.ParseFromString(writer->Serialize())) + << "Expected valid histogram set"; + + ASSERT_EQ(histogram_set.histograms_size(), 1); +} + +TEST(PerfHistogramWriterUnittest, WritesSamplesAndUserStory) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("measurement", "user_story", 15e7, "Hz", false, + ImproveDirection::kBiggerIsBetter); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + + EXPECT_EQ(hist1.name(), "measurement"); + + EXPECT_EQ(hist1.unit().unit(), proto::HERTZ); + EXPECT_EQ(hist1.unit().improvement_direction(), proto::BIGGER_IS_BETTER); + + EXPECT_EQ(hist1.sample_values_size(), 1); + EXPECT_EQ(hist1.sample_values(0), 15e7); + + EXPECT_EQ(hist1.diagnostics().diagnostic_map().count("stories"), 1u); + const proto::Diagnostic& stories = + hist1.diagnostics().diagnostic_map().at("stories"); + ASSERT_EQ(stories.generic_set().values_size(), 1); + EXPECT_EQ(stories.generic_set().values(0), "\"user_story\""); +} + +TEST(PerfHistogramWriterUnittest, WritesOneHistogramPerMeasurementAndStory) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("measurement", "story1", 1, "ms", false, + ImproveDirection::kNone); + writer->LogResult("measurement", "story1", 2, "ms", false, + ImproveDirection::kNone); + writer->LogResult("measurement", "story2", 2, "ms", false, + ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + ASSERT_EQ(histogram_set.histograms_size(), 2); + + const proto::Histogram& hist1 = histogram_set.histograms(0); + const proto::Histogram& hist2 = histogram_set.histograms(1); + + EXPECT_EQ(hist1.name(), "measurement"); + EXPECT_EQ(hist2.name(), "measurement"); + + const proto::Diagnostic& stories1 = + hist1.diagnostics().diagnostic_map().at("stories"); + EXPECT_EQ(stories1.generic_set().values(0), "\"story1\""); + EXPECT_EQ(hist1.sample_values_size(), 2); + + const proto::Diagnostic& stories2 = + hist2.diagnostics().diagnostic_map().at("stories"); + EXPECT_EQ(stories2.generic_set().values(0), "\"story2\""); + EXPECT_EQ(hist2.sample_values_size(), 1); +} + +TEST(PerfHistogramWriterUnittest, IgnoresError) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResultMeanAndError("-", "-", 17, 12345, "ms", false, + ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + + EXPECT_EQ(hist1.running().mean(), 17); + EXPECT_EQ(hist1.running().variance(), 0) << "The error should be ignored."; +} + +TEST(PerfHistogramWriterUnittest, WritesDecibelIntoMeasurementName) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("measurement", "-", 0, "dB", false, + ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + + EXPECT_EQ(hist1.unit().unit(), proto::UNITLESS) + << "dB should map to unitless"; + EXPECT_EQ(hist1.name(), "measurement_dB") << "measurement should be renamed"; +} + +TEST(PerfHistogramWriterUnittest, WritesFpsIntoMeasurementName) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("measurement", "-", 0, "fps", false, + ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + + EXPECT_EQ(hist1.unit().unit(), proto::HERTZ) << "fps should map to hertz"; + EXPECT_EQ(hist1.name(), "measurement_fps") << "measurement should be renamed"; +} + +TEST(PerfHistogramWriterUnittest, WritesPercentIntoMeasurementName) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("measurement", "-", 0, "%", false, ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + + EXPECT_EQ(hist1.unit().unit(), proto::UNITLESS) + << "percent should map to hertz"; + EXPECT_EQ(hist1.name(), "measurement_%") << "measurement should be renamed"; +} + +TEST(PerfHistogramWriterUnittest, BitsPerSecondIsConvertedToBytes) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("-", "-", 1024, "bps", false, ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + + EXPECT_EQ(hist1.sample_values(0), 128) << "1024 bits = 128 bytes"; +} + +TEST(PerfHistogramWriterUnittest, ParsesDirection) { + std::unique_ptr writer = + std::unique_ptr(CreateHistogramWriter()); + + writer->LogResult("measurement1", "-", 0, "bps", false, + ImproveDirection::kBiggerIsBetter); + writer->LogResult("measurement2", "-", 0, "frames", false, + ImproveDirection::kSmallerIsBetter); + writer->LogResult("measurement3", "-", 0, "sigma", false, + ImproveDirection::kNone); + + proto::HistogramSet histogram_set; + histogram_set.ParseFromString(writer->Serialize()); + const proto::Histogram& hist1 = histogram_set.histograms(0); + const proto::Histogram& hist2 = histogram_set.histograms(1); + const proto::Histogram& hist3 = histogram_set.histograms(2); + + EXPECT_EQ(hist1.unit().unit(), proto::BYTES_PER_SECOND); + EXPECT_EQ(hist1.unit().improvement_direction(), proto::BIGGER_IS_BETTER); + + EXPECT_EQ(hist2.unit().unit(), proto::COUNT); + EXPECT_EQ(hist2.unit().improvement_direction(), proto::SMALLER_IS_BETTER); + + EXPECT_EQ(hist3.unit().unit(), proto::SIGMA); + EXPECT_EQ(hist3.unit().improvement_direction(), proto::NOT_SPECIFIED); +} + +} // namespace test +} // namespace webrtc diff --git a/test/testsupport/perf_test_result_writer.h b/test/testsupport/perf_test_result_writer.h index 5e932ba51c..d5d7011749 100644 --- a/test/testsupport/perf_test_result_writer.h +++ b/test/testsupport/perf_test_result_writer.h @@ -47,7 +47,7 @@ class PerfTestResultWriter { const bool important, webrtc::test::ImproveDirection improve_direction) = 0; - virtual std::string ToJSON() const = 0; + virtual std::string Serialize() const = 0; }; } // namespace test diff --git a/test/testsupport/perf_test_unittest.cc b/test/testsupport/perf_test_unittest.cc index 8202471923..b779f4c6ec 100644 --- a/test/testsupport/perf_test_unittest.cc +++ b/test/testsupport/perf_test_unittest.cc @@ -17,46 +17,10 @@ #include "test/gtest.h" #include "test/testsupport/rtc_expect_death.h" -namespace { - -const char* kJsonExpected = R"({ - "format_version":"1.0", - "charts":{ - "foobar":{ - "baz_v":{ - "type":"scalar", - "value":7, - "units":"widgets" - }, - "baz_me":{ - "type":"list_of_scalar_values", - "values":[1], - "std":2, - "units":"lemurs" - }, - "baz_vl":{ - "type":"list_of_scalar_values", - "values":[1,2,3], - "units":"units" - } - }, - "measurementmodifier":{ - "trace":{ - "type":"scalar", - "value":42, - "units":"units" - } - } - } -})"; - -std::string RemoveSpaces(std::string s) { - s.erase(std::remove(s.begin(), s.end(), ' '), s.end()); - s.erase(std::remove(s.begin(), s.end(), '\n'), s.end()); - return s; -} - -} // namespace +#if WEBRTC_ENABLE_PROTOBUF +#include "third_party/catapult/tracing/tracing/value/histogram.h" +namespace proto = catapult::tracing::tracing::proto; +#endif namespace webrtc { namespace test { @@ -91,22 +55,55 @@ TEST_F(PerfTest, MAYBE_TestPrintResult) { EXPECT_EQ(expected, ::testing::internal::GetCapturedStdout()); } -TEST_F(PerfTest, TestGetPerfResultsJSON) { - PrintResult("measurement", "modifier", "trace", 42, "units", false); - PrintResult("foo", "bar", "baz_v", 7, "widgets", true); - PrintResultMeanAndError("foo", "bar", "baz_me", 1, 2, "lemurs", false); - const double kListOfScalars[] = {1, 2, 3}; - PrintResultList("foo", "bar", "baz_vl", kListOfScalars, "units", false); - - EXPECT_EQ(RemoveSpaces(kJsonExpected), GetPerfResultsJSON()); -} - TEST_F(PerfTest, TestClearPerfResults) { PrintResult("measurement", "modifier", "trace", 42, "units", false); ClearPerfResults(); - EXPECT_EQ(R"({"format_version":"1.0","charts":{}})", GetPerfResultsJSON()); + EXPECT_EQ("", GetPerfResults()); } +#if WEBRTC_ENABLE_PROTOBUF + +TEST_F(PerfTest, TestGetPerfResultsHistograms) { + PrintResult("measurement", "_modifier", "story_1", 42, "ms", false); + PrintResult("foo", "bar", "story_1", 7, "sigma", true); + // Note: the error will be ignored, not supported by histograms. + PrintResultMeanAndError("foo", "bar", "story_1", 1, 2000, "sigma", false); + const double kListOfScalars[] = {1, 2, 3}; + PrintResultList("foo", "bar", "story_1", kListOfScalars, "sigma", false); + + proto::HistogramSet histogram_set; + EXPECT_TRUE(histogram_set.ParseFromString(GetPerfResults())) + << "Expected valid histogram set"; + + ASSERT_EQ(histogram_set.histograms_size(), 2) + << "Should be two histograms: foobar and measurement_modifier"; + const proto::Histogram& hist1 = histogram_set.histograms(0); + const proto::Histogram& hist2 = histogram_set.histograms(1); + + EXPECT_EQ(hist1.name(), "foobar"); + + // Spot check some things in here (there's a more thorough test on the + // histogram writer itself). + EXPECT_EQ(hist1.unit().unit(), proto::SIGMA); + EXPECT_EQ(hist1.sample_values_size(), 5); + EXPECT_EQ(hist1.sample_values(0), 7); + EXPECT_EQ(hist1.sample_values(1), 1); + EXPECT_EQ(hist1.sample_values(2), 1); + EXPECT_EQ(hist1.sample_values(3), 2); + EXPECT_EQ(hist1.sample_values(4), 3); + + EXPECT_EQ(hist1.diagnostics().diagnostic_map().count("stories"), 1u); + const proto::Diagnostic& stories = + hist1.diagnostics().diagnostic_map().at("stories"); + ASSERT_EQ(stories.generic_set().values_size(), 1); + EXPECT_EQ(stories.generic_set().values(0), "\"story_1\""); + + EXPECT_EQ(hist2.name(), "measurement_modifier"); + EXPECT_EQ(hist2.unit().unit(), proto::MS_BEST_FIT_FORMAT); +} + +#endif // WEBRTC_ENABLE_PROTOBUF + #if GTEST_HAS_DEATH_TEST using PerfDeathTest = PerfTest; diff --git a/test/testsupport/resources_dir_flag.cc b/test/testsupport/resources_dir_flag.cc index a6ab3b537b..87a449a401 100644 --- a/test/testsupport/resources_dir_flag.cc +++ b/test/testsupport/resources_dir_flag.cc @@ -10,6 +10,8 @@ #include "test/testsupport/resources_dir_flag.h" +#include "absl/flags/flag.h" + ABSL_FLAG(std::string, resources_dir, "", diff --git a/test/testsupport/resources_dir_flag.h b/test/testsupport/resources_dir_flag.h index 055cc82546..7d6f192d9b 100644 --- a/test/testsupport/resources_dir_flag.h +++ b/test/testsupport/resources_dir_flag.h @@ -13,7 +13,7 @@ #ifndef TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__ #define TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__ -#include "absl/flags/flag.h" +#include "absl/flags/declare.h" ABSL_DECLARE_FLAG(std::string, resources_dir); diff --git a/test/testsupport/test_artifacts_unittest.cc b/test/testsupport/test_artifacts_unittest.cc index 98de9e4bb8..fb577610fb 100644 --- a/test/testsupport/test_artifacts_unittest.cc +++ b/test/testsupport/test_artifacts_unittest.cc @@ -14,6 +14,7 @@ #include +#include "absl/flags/declare.h" #include "absl/flags/flag.h" #include "rtc_base/system/file_wrapper.h" #include "test/gtest.h" diff --git a/test/testsupport/video_frame_writer.h b/test/testsupport/video_frame_writer.h index db1d453775..f4af378b12 100644 --- a/test/testsupport/video_frame_writer.h +++ b/test/testsupport/video_frame_writer.h @@ -16,7 +16,6 @@ #include "api/video/video_frame.h" #include "rtc_base/buffer.h" -#include "rtc_base/critical_section.h" #include "test/testsupport/frame_writer.h" namespace webrtc { diff --git a/test/testsupport/y4m_frame_writer.cc b/test/testsupport/y4m_frame_writer.cc index 25106d886c..896524fa0c 100644 --- a/test/testsupport/y4m_frame_writer.cc +++ b/test/testsupport/y4m_frame_writer.cc @@ -41,7 +41,7 @@ bool Y4mFrameWriterImpl::Init() { return true; } -bool Y4mFrameWriterImpl::WriteFrame(uint8_t* frame_buffer) { +bool Y4mFrameWriterImpl::WriteFrame(const uint8_t* frame_buffer) { if (output_file_ == nullptr) { fprintf(stderr, "Y4mFrameWriterImpl is not initialized (output file is NULL)\n"); diff --git a/test/testsupport/yuv_frame_writer.cc b/test/testsupport/yuv_frame_writer.cc index 24f4f53133..59cb74ebeb 100644 --- a/test/testsupport/yuv_frame_writer.cc +++ b/test/testsupport/yuv_frame_writer.cc @@ -50,7 +50,7 @@ bool YuvFrameWriterImpl::Init() { return true; } -bool YuvFrameWriterImpl::WriteFrame(uint8_t* frame_buffer) { +bool YuvFrameWriterImpl::WriteFrame(const uint8_t* frame_buffer) { RTC_DCHECK(frame_buffer); if (output_file_ == nullptr) { fprintf(stderr, diff --git a/test/time_controller/BUILD.gn b/test/time_controller/BUILD.gn index 7f77f0afec..c9fffe6853 100644 --- a/test/time_controller/BUILD.gn +++ b/test/time_controller/BUILD.gn @@ -37,12 +37,13 @@ rtc_library("time_controller") { "../../rtc_base:checks", "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_event", + "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/synchronization:yield_policy", "../../rtc_base/task_utils:to_queued_task", "../../system_wrappers", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { @@ -51,13 +52,19 @@ if (rtc_include_tests) { sources = [ "external_time_controller_unittest.cc", "simulated_time_controller_unittest.cc", + "time_controller_conformance_test.cc", ] deps = [ ":time_controller", "../:test_support", + "../../api:time_controller", + "../../api/units:time_delta", + "../../rtc_base", "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_task_queue", + "../../rtc_base/synchronization:mutex", "../../rtc_base/task_utils:repeating_task", + "../../rtc_base/task_utils:to_queued_task", ] } } diff --git a/test/time_controller/external_time_controller.cc b/test/time_controller/external_time_controller.cc index dfeae8162d..ca8b5ac687 100644 --- a/test/time_controller/external_time_controller.cc +++ b/test/time_controller/external_time_controller.cc @@ -61,6 +61,13 @@ class ExternalTimeController::ProcessThreadWrapper : public ProcessThread { parent_->ScheduleNext(); } + void PostDelayedTask(std::unique_ptr task, + uint32_t milliseconds) override { + parent_->UpdateTime(); + thread_->PostDelayedTask(std::move(task), milliseconds); + parent_->ScheduleNext(); + } + void RegisterModule(Module* module, const rtc::Location& from) override { parent_->UpdateTime(); module_wrappers_.emplace(module, new ModuleWrapper(module, this)); @@ -100,6 +107,11 @@ class ExternalTimeController::ProcessThreadWrapper : public ProcessThread { ProcessThreadWrapper* thread_; }; + void Delete() override { + // ProcessThread shouldn't be deleted as a TaskQueue. + RTC_NOTREACHED(); + } + ModuleWrapper* GetWrapper(Module* module) { auto it = module_wrappers_.find(module); RTC_DCHECK(it != module_wrappers_.end()); diff --git a/test/time_controller/external_time_controller_unittest.cc b/test/time_controller/external_time_controller_unittest.cc index d93b42aaac..684009fea5 100644 --- a/test/time_controller/external_time_controller_unittest.cc +++ b/test/time_controller/external_time_controller_unittest.cc @@ -29,7 +29,7 @@ using ::testing::Invoke; using ::testing::MockFunction; using ::testing::NiceMock; using ::testing::Return; -constexpr Timestamp kStartTime = Timestamp::Seconds<1000>(); +constexpr Timestamp kStartTime = Timestamp::Seconds(1000); class FakeAlarm : public ControlledAlarmClock { public: @@ -82,8 +82,8 @@ void FakeAlarm::Sleep(TimeDelta duration) { } // namespace TEST(ExternalTimeControllerTest, TaskIsStoppedOnStop) { - const TimeDelta kShortInterval = TimeDelta::ms(5); - const TimeDelta kLongInterval = TimeDelta::ms(20); + const TimeDelta kShortInterval = TimeDelta::Millis(5); + const TimeDelta kLongInterval = TimeDelta::Millis(20); const int kShortIntervalCount = 4; const int kMargin = 1; FakeAlarm alarm(kStartTime); @@ -123,10 +123,10 @@ TEST(ExternalTimeControllerTest, TaskCanStopItself) { handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { ++counter; handle.Stop(); - return TimeDelta::ms(2); + return TimeDelta::Millis(2); }); }); - time_simulation.AdvanceTime(TimeDelta::ms(10)); + time_simulation.AdvanceTime(TimeDelta::Millis(10)); EXPECT_EQ(counter.load(), 1); } @@ -160,7 +160,7 @@ TEST(ExternalTimeControllerTest, TasksYieldToEachOther) { EXPECT_TRUE(event.Wait(200)); }); - time_simulation.AdvanceTime(TimeDelta::ms(300)); + time_simulation.AdvanceTime(TimeDelta::Millis(300)); } TEST(ExternalTimeControllerTest, CurrentTaskQueue) { @@ -173,7 +173,7 @@ TEST(ExternalTimeControllerTest, CurrentTaskQueue) { task_queue.PostTask([&] { EXPECT_TRUE(task_queue.IsCurrent()); }); - time_simulation.AdvanceTime(TimeDelta::ms(10)); + time_simulation.AdvanceTime(TimeDelta::Millis(10)); } } // namespace webrtc diff --git a/test/time_controller/real_time_controller.cc b/test/time_controller/real_time_controller.cc index 73772b968f..2e741cf20c 100644 --- a/test/time_controller/real_time_controller.cc +++ b/test/time_controller/real_time_controller.cc @@ -11,7 +11,6 @@ #include "api/task_queue/default_task_queue_factory.h" #include "rtc_base/null_socket_server.h" -#include "system_wrappers/include/sleep.h" namespace webrtc { namespace { diff --git a/test/time_controller/simulated_process_thread.cc b/test/time_controller/simulated_process_thread.cc index 540e061fa4..e001841ac0 100644 --- a/test/time_controller/simulated_process_thread.cc +++ b/test/time_controller/simulated_process_thread.cc @@ -38,8 +38,8 @@ SimulatedProcessThread::~SimulatedProcessThread() { } void SimulatedProcessThread::RunReady(Timestamp at_time) { - TokenTaskQueue::CurrentTaskQueueSetter set_current(this); - rtc::CritScope lock(&lock_); + CurrentTaskQueueSetter set_current(this); + MutexLock lock(&lock_); std::vector ready_modules; for (auto it = delayed_modules_.begin(); it != delayed_modules_.end() && it->first <= at_time; @@ -53,13 +53,20 @@ void SimulatedProcessThread::RunReady(Timestamp at_time) { delayed_modules_[GetNextTime(module, at_time)].push_back(module); } + for (auto it = delayed_tasks_.begin(); + it != delayed_tasks_.end() && it->first <= at_time; + it = delayed_tasks_.erase(it)) { + for (auto& task : it->second) { + queue_.push_back(std::move(task)); + } + } while (!queue_.empty()) { std::unique_ptr task = std::move(queue_.front()); queue_.pop_front(); - lock_.Leave(); + lock_.Unlock(); bool should_delete = task->Run(); RTC_CHECK(should_delete); - lock_.Enter(); + lock_.Lock(); } RTC_DCHECK(queue_.empty()); if (!delayed_modules_.empty()) { @@ -67,11 +74,14 @@ void SimulatedProcessThread::RunReady(Timestamp at_time) { } else { next_run_time_ = Timestamp::PlusInfinity(); } + if (!delayed_tasks_.empty()) { + next_run_time_ = std::min(next_run_time_, delayed_tasks_.begin()->first); + } } void SimulatedProcessThread::Start() { std::vector starting; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (process_thread_running_) return; process_thread_running_ = true; @@ -81,7 +91,7 @@ void SimulatedProcessThread::Start() { module->ProcessThreadAttached(this); Timestamp at_time = handler_->CurrentTime(); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); for (auto& module : starting) delayed_modules_[GetNextTime(module, at_time)].push_back(module); @@ -97,7 +107,7 @@ void SimulatedProcessThread::Start() { void SimulatedProcessThread::Stop() { std::vector stopping; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); process_thread_running_ = false; for (auto& delayed : delayed_modules_) { @@ -113,7 +123,7 @@ void SimulatedProcessThread::Stop() { } void SimulatedProcessThread::WakeUp(Module* module) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); for (auto it = delayed_modules_.begin(); it != delayed_modules_.end(); ++it) { if (RemoveByValue(&it->second, module)) break; @@ -126,7 +136,7 @@ void SimulatedProcessThread::WakeUp(Module* module) { void SimulatedProcessThread::RegisterModule(Module* module, const rtc::Location& from) { module->ProcessThreadAttached(this); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (!process_thread_running_) { stopped_modules_.push_back(module); } else { @@ -139,7 +149,7 @@ void SimulatedProcessThread::RegisterModule(Module* module, void SimulatedProcessThread::DeRegisterModule(Module* module) { bool modules_running; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (!process_thread_running_) { RemoveByValue(&stopped_modules_, module); } else { @@ -155,15 +165,24 @@ void SimulatedProcessThread::DeRegisterModule(Module* module) { } void SimulatedProcessThread::PostTask(std::unique_ptr task) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); queue_.emplace_back(std::move(task)); next_run_time_ = Timestamp::MinusInfinity(); } +void SimulatedProcessThread::PostDelayedTask(std::unique_ptr task, + uint32_t milliseconds) { + MutexLock lock(&lock_); + Timestamp target_time = + handler_->CurrentTime() + TimeDelta::Millis(milliseconds); + delayed_tasks_[target_time].push_back(std::move(task)); + next_run_time_ = std::min(next_run_time_, target_time); +} + Timestamp SimulatedProcessThread::GetNextTime(Module* module, Timestamp at_time) { CurrentTaskQueueSetter set_current(this); - return at_time + TimeDelta::ms(module->TimeUntilNextProcess()); + return at_time + TimeDelta::Millis(module->TimeUntilNextProcess()); } } // namespace webrtc diff --git a/test/time_controller/simulated_process_thread.h b/test/time_controller/simulated_process_thread.h index d6b132552a..54d5db7df8 100644 --- a/test/time_controller/simulated_process_thread.h +++ b/test/time_controller/simulated_process_thread.h @@ -16,12 +16,12 @@ #include #include +#include "rtc_base/synchronization/mutex.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { -class SimulatedProcessThread : public TokenTaskQueue, - public ProcessThread, +class SimulatedProcessThread : public ProcessThread, public sim_time_impl::SimulatedSequenceRunner { public: SimulatedProcessThread(sim_time_impl::SimulatedTimeControllerImpl* handler, @@ -30,7 +30,7 @@ class SimulatedProcessThread : public TokenTaskQueue, void RunReady(Timestamp at_time) override; Timestamp GetNextRunTime() const override { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); return next_run_time_; } @@ -43,17 +43,25 @@ class SimulatedProcessThread : public TokenTaskQueue, void RegisterModule(Module* module, const rtc::Location& from) override; void DeRegisterModule(Module* module) override; void PostTask(std::unique_ptr task) override; + void PostDelayedTask(std::unique_ptr task, + uint32_t milliseconds) override; private: + void Delete() override { + // ProcessThread shouldn't be deleted as a TaskQueue. + RTC_NOTREACHED(); + } Timestamp GetNextTime(Module* module, Timestamp at_time); sim_time_impl::SimulatedTimeControllerImpl* const handler_; // Using char* to be debugger friendly. char* name_; - rtc::CriticalSection lock_; + mutable Mutex lock_; Timestamp next_run_time_ RTC_GUARDED_BY(lock_) = Timestamp::PlusInfinity(); std::deque> queue_; + std::map>> delayed_tasks_ + RTC_GUARDED_BY(lock_); bool process_thread_running_ RTC_GUARDED_BY(lock_) = false; std::vector stopped_modules_ RTC_GUARDED_BY(lock_); diff --git a/test/time_controller/simulated_task_queue.cc b/test/time_controller/simulated_task_queue.cc index 1f214efcb3..da675af81e 100644 --- a/test/time_controller/simulated_task_queue.cc +++ b/test/time_controller/simulated_task_queue.cc @@ -27,16 +27,22 @@ SimulatedTaskQueue::~SimulatedTaskQueue() { } void SimulatedTaskQueue::Delete() { + // Need to destroy the tasks outside of the lock because task destruction + // can lead to re-entry in SimulatedTaskQueue via custom destructors. + std::deque> ready_tasks; + std::map>> delayed_tasks; { - rtc::CritScope lock(&lock_); - ready_tasks_.clear(); - delayed_tasks_.clear(); + MutexLock lock(&lock_); + ready_tasks_.swap(ready_tasks); + delayed_tasks_.swap(delayed_tasks); } + ready_tasks.clear(); + delayed_tasks.clear(); delete this; } void SimulatedTaskQueue::RunReady(Timestamp at_time) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); for (auto it = delayed_tasks_.begin(); it != delayed_tasks_.end() && it->first <= at_time; it = delayed_tasks_.erase(it)) { @@ -48,14 +54,14 @@ void SimulatedTaskQueue::RunReady(Timestamp at_time) { while (!ready_tasks_.empty()) { std::unique_ptr ready = std::move(ready_tasks_.front()); ready_tasks_.pop_front(); - lock_.Leave(); + lock_.Unlock(); bool delete_task = ready->Run(); if (delete_task) { ready.reset(); } else { ready.release(); } - lock_.Enter(); + lock_.Lock(); } if (!delayed_tasks_.empty()) { next_run_time_ = delayed_tasks_.begin()->first; @@ -65,15 +71,16 @@ void SimulatedTaskQueue::RunReady(Timestamp at_time) { } void SimulatedTaskQueue::PostTask(std::unique_ptr task) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); ready_tasks_.emplace_back(std::move(task)); next_run_time_ = Timestamp::MinusInfinity(); } void SimulatedTaskQueue::PostDelayedTask(std::unique_ptr task, uint32_t milliseconds) { - rtc::CritScope lock(&lock_); - Timestamp target_time = handler_->CurrentTime() + TimeDelta::ms(milliseconds); + MutexLock lock(&lock_); + Timestamp target_time = + handler_->CurrentTime() + TimeDelta::Millis(milliseconds); delayed_tasks_[target_time].push_back(std::move(task)); next_run_time_ = std::min(next_run_time_, target_time); } diff --git a/test/time_controller/simulated_task_queue.h b/test/time_controller/simulated_task_queue.h index 940117c85b..5035f799fc 100644 --- a/test/time_controller/simulated_task_queue.h +++ b/test/time_controller/simulated_task_queue.h @@ -15,6 +15,7 @@ #include #include +#include "rtc_base/synchronization/mutex.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -30,7 +31,7 @@ class SimulatedTaskQueue : public TaskQueueBase, void RunReady(Timestamp at_time) override; Timestamp GetNextRunTime() const override { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); return next_run_time_; } TaskQueueBase* GetAsTaskQueue() override { return this; } @@ -46,7 +47,7 @@ class SimulatedTaskQueue : public TaskQueueBase, // Using char* to be debugger friendly. char* name_; - rtc::CriticalSection lock_; + mutable Mutex lock_; std::deque> ready_tasks_ RTC_GUARDED_BY(lock_); std::map>> delayed_tasks_ diff --git a/test/time_controller/simulated_thread.cc b/test/time_controller/simulated_thread.cc index 937fe3207e..aa8b9ac90d 100644 --- a/test/time_controller/simulated_thread.cc +++ b/test/time_controller/simulated_thread.cc @@ -59,11 +59,11 @@ void SimulatedThread::RunReady(Timestamp at_time) { CurrentThreadSetter set_current(this); ProcessMessages(0); int delay_ms = GetDelay(); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); if (delay_ms == kForever) { next_run_time_ = Timestamp::PlusInfinity(); } else { - next_run_time_ = at_time + TimeDelta::ms(delay_ms); + next_run_time_ = at_time + TimeDelta::Millis(delay_ms); } } @@ -83,6 +83,7 @@ void SimulatedThread::Send(const rtc::Location& posted_from, } else { TaskQueueBase* yielding_from = TaskQueueBase::Current(); handler_->StartYield(yielding_from); + RunReady(Timestamp::MinusInfinity()); CurrentThreadSetter set_current(this); msg.phandler->OnMessage(&msg); handler_->StopYield(yielding_from); @@ -95,7 +96,7 @@ void SimulatedThread::Post(const rtc::Location& posted_from, rtc::MessageData* pdata, bool time_sensitive) { rtc::Thread::Post(posted_from, phandler, id, pdata, time_sensitive); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); next_run_time_ = Timestamp::MinusInfinity(); } @@ -105,9 +106,9 @@ void SimulatedThread::PostDelayed(const rtc::Location& posted_from, uint32_t id, rtc::MessageData* pdata) { rtc::Thread::PostDelayed(posted_from, delay_ms, phandler, id, pdata); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); next_run_time_ = - std::min(next_run_time_, Timestamp::ms(rtc::TimeMillis() + delay_ms)); + std::min(next_run_time_, Timestamp::Millis(rtc::TimeMillis() + delay_ms)); } void SimulatedThread::PostAt(const rtc::Location& posted_from, @@ -116,8 +117,8 @@ void SimulatedThread::PostAt(const rtc::Location& posted_from, uint32_t id, rtc::MessageData* pdata) { rtc::Thread::PostAt(posted_from, target_time_ms, phandler, id, pdata); - rtc::CritScope lock(&lock_); - next_run_time_ = std::min(next_run_time_, Timestamp::ms(target_time_ms)); + MutexLock lock(&lock_); + next_run_time_ = std::min(next_run_time_, Timestamp::Millis(target_time_ms)); } void SimulatedThread::Stop() { diff --git a/test/time_controller/simulated_thread.h b/test/time_controller/simulated_thread.h index fd3969670a..b6c1e6e265 100644 --- a/test/time_controller/simulated_thread.h +++ b/test/time_controller/simulated_thread.h @@ -12,6 +12,7 @@ #include +#include "rtc_base/synchronization/mutex.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -28,7 +29,7 @@ class SimulatedThread : public rtc::Thread, void RunReady(Timestamp at_time) override; Timestamp GetNextRunTime() const override { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); return next_run_time_; } @@ -61,7 +62,7 @@ class SimulatedThread : public rtc::Thread, sim_time_impl::SimulatedTimeControllerImpl* const handler_; // Using char* to be debugger friendly. char* name_; - rtc::CriticalSection lock_; + mutable Mutex lock_; Timestamp next_run_time_ RTC_GUARDED_BY(lock_) = Timestamp::PlusInfinity(); }; diff --git a/test/time_controller/simulated_time_controller.cc b/test/time_controller/simulated_time_controller.cc index a81083b4fb..aba8c6600e 100644 --- a/test/time_controller/simulated_time_controller.cc +++ b/test/time_controller/simulated_time_controller.cc @@ -57,7 +57,6 @@ SimulatedTimeControllerImpl::CreateTaskQueue( std::unique_ptr SimulatedTimeControllerImpl::CreateProcessThread( const char* thread_name) { - rtc::CritScope lock(&lock_); auto process_thread = std::make_unique(this, thread_name); Register(process_thread.get()); @@ -96,7 +95,7 @@ void SimulatedTimeControllerImpl::RunReadyRunners() { // Using a dummy thread rather than nullptr to avoid implicit thread creation // by Thread::Current(). SimulatedThread::CurrentThreadSetter set_current(dummy_thread_.get()); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); RTC_DCHECK_EQ(rtc::CurrentThreadId(), thread_id_); Timestamp current_time = CurrentTime(); // Clearing |ready_runners_| in case this is a recursive call: @@ -117,23 +116,25 @@ void SimulatedTimeControllerImpl::RunReadyRunners() { while (!ready_runners_.empty()) { auto* runner = ready_runners_.front(); ready_runners_.pop_front(); + lock_.Unlock(); // Note that the RunReady function might indirectly cause a call to - // Unregister() which will recursively grab |lock_| again to remove items - // from |ready_runners_|. + // Unregister() which will grab |lock_| again to remove items from + // |ready_runners_|. runner->RunReady(current_time); + lock_.Lock(); } } } Timestamp SimulatedTimeControllerImpl::CurrentTime() const { - rtc::CritScope lock(&time_lock_); + MutexLock lock(&time_lock_); return current_time_; } Timestamp SimulatedTimeControllerImpl::NextRunTime() const { Timestamp current_time = CurrentTime(); Timestamp next_time = Timestamp::PlusInfinity(); - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); for (auto* runner : runners_) { Timestamp next_run_time = runner->GetNextRunTime(); if (next_run_time <= current_time) @@ -144,18 +145,18 @@ Timestamp SimulatedTimeControllerImpl::NextRunTime() const { } void SimulatedTimeControllerImpl::AdvanceTime(Timestamp target_time) { - rtc::CritScope time_lock(&time_lock_); + MutexLock time_lock(&time_lock_); RTC_DCHECK_GE(target_time, current_time_); current_time_ = target_time; } void SimulatedTimeControllerImpl::Register(SimulatedSequenceRunner* runner) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); runners_.push_back(runner); } void SimulatedTimeControllerImpl::Unregister(SimulatedSequenceRunner* runner) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); bool removed = RemoveByValue(&runners_, runner); RTC_CHECK(removed); RemoveByValue(&ready_runners_, runner); @@ -169,6 +170,7 @@ void SimulatedTimeControllerImpl::StartYield(TaskQueueBase* yielding_from) { void SimulatedTimeControllerImpl::StopYield(TaskQueueBase* yielding_from) { yielded_.erase(yielding_from); } + } // namespace sim_time_impl GlobalSimulatedTimeController::GlobalSimulatedTimeController( diff --git a/test/time_controller/simulated_time_controller.h b/test/time_controller/simulated_time_controller.h index 758f90989e..6c6dbfab9d 100644 --- a/test/time_controller/simulated_time_controller.h +++ b/test/time_controller/simulated_time_controller.h @@ -21,9 +21,9 @@ #include "api/units/timestamp.h" #include "modules/include/module.h" #include "modules/utility/include/process_thread.h" -#include "rtc_base/critical_section.h" #include "rtc_base/fake_clock.h" #include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/yield_policy.h" #include "rtc_base/thread_checker.h" @@ -52,32 +52,34 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory, std::unique_ptr CreateTaskQueue( absl::string_view name, - Priority priority) const override; + Priority priority) const RTC_LOCKS_EXCLUDED(time_lock_) override; // Implements the YieldInterface by running ready tasks on all task queues, // except that if this method is called from a task, the task queue running // that task is skipped. - void YieldExecution() override; + void YieldExecution() RTC_LOCKS_EXCLUDED(time_lock_, lock_) override; // Create process thread with the name |thread_name|. - std::unique_ptr CreateProcessThread(const char* thread_name); + std::unique_ptr CreateProcessThread(const char* thread_name) + RTC_LOCKS_EXCLUDED(time_lock_, lock_); // Create thread using provided |socket_server|. std::unique_ptr CreateThread( const std::string& name, - std::unique_ptr socket_server); + std::unique_ptr socket_server) + RTC_LOCKS_EXCLUDED(time_lock_, lock_); // Runs all runners in |runners_| that has tasks or modules ready for // execution. - void RunReadyRunners(); + void RunReadyRunners() RTC_LOCKS_EXCLUDED(time_lock_, lock_); // Return |current_time_|. - Timestamp CurrentTime() const; + Timestamp CurrentTime() const RTC_LOCKS_EXCLUDED(time_lock_); // Return min of runner->GetNextRunTime() for runner in |runners_|. - Timestamp NextRunTime() const; + Timestamp NextRunTime() const RTC_LOCKS_EXCLUDED(lock_); // Set |current_time_| to |target_time|. - void AdvanceTime(Timestamp target_time); + void AdvanceTime(Timestamp target_time) RTC_LOCKS_EXCLUDED(time_lock_); // Adds |runner| to |runners_|. - void Register(SimulatedSequenceRunner* runner); + void Register(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_); // Removes |runner| from |runners_|. - void Unregister(SimulatedSequenceRunner* runner); + void Unregister(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_); // Indicates that |yielding_from| is not ready to run. void StartYield(TaskQueueBase* yielding_from); @@ -87,9 +89,9 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory, private: const rtc::PlatformThreadId thread_id_; const std::unique_ptr dummy_thread_ = rtc::Thread::Create(); - rtc::CriticalSection time_lock_; + mutable Mutex time_lock_; Timestamp current_time_ RTC_GUARDED_BY(time_lock_); - rtc::CriticalSection lock_; + mutable Mutex lock_; std::vector runners_ RTC_GUARDED_BY(lock_); // Used in RunReadyRunners() to keep track of ready runners that are to be // processed in a round robin fashion. the reason it's a member is so that diff --git a/test/time_controller/simulated_time_controller_unittest.cc b/test/time_controller/simulated_time_controller_unittest.cc index 2fe4bd2df2..e5f704c43f 100644 --- a/test/time_controller/simulated_time_controller_unittest.cc +++ b/test/time_controller/simulated_time_controller_unittest.cc @@ -29,12 +29,12 @@ using ::testing::Invoke; using ::testing::MockFunction; using ::testing::NiceMock; using ::testing::Return; -constexpr Timestamp kStartTime = Timestamp::Seconds<1000>(); +constexpr Timestamp kStartTime = Timestamp::Seconds(1000); } // namespace TEST(SimulatedTimeControllerTest, TaskIsStoppedOnStop) { - const TimeDelta kShortInterval = TimeDelta::ms(5); - const TimeDelta kLongInterval = TimeDelta::ms(20); + const TimeDelta kShortInterval = TimeDelta::Millis(5); + const TimeDelta kLongInterval = TimeDelta::Millis(20); const int kShortIntervalCount = 4; const int kMargin = 1; GlobalSimulatedTimeController time_simulation(kStartTime); @@ -72,10 +72,10 @@ TEST(SimulatedTimeControllerTest, TaskCanStopItself) { handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { ++counter; handle.Stop(); - return TimeDelta::ms(2); + return TimeDelta::Millis(2); }); }); - time_simulation.AdvanceTime(TimeDelta::ms(10)); + time_simulation.AdvanceTime(TimeDelta::Millis(10)); EXPECT_EQ(counter.load(), 1); } @@ -83,7 +83,7 @@ TEST(SimulatedTimeControllerTest, Example) { class ObjectOnTaskQueue { public: void DoPeriodicTask() {} - TimeDelta TimeUntilNextRun() { return TimeDelta::ms(100); } + TimeDelta TimeUntilNextRun() { return TimeDelta::Millis(100); } void StartPeriodicTask(RepeatingTaskHandle* handle, rtc::TaskQueue* task_queue) { *handle = RepeatingTaskHandle::Start(task_queue->Get(), [this] { @@ -123,7 +123,7 @@ TEST(SimulatedTimeControllerTest, DelayTaskRunOnTime) { bool delay_task_executed = false; task_queue.PostDelayedTask([&] { delay_task_executed = true; }, 10); - time_simulation.AdvanceTime(TimeDelta::ms(10)); + time_simulation.AdvanceTime(TimeDelta::Millis(10)); EXPECT_TRUE(delay_task_executed); } @@ -145,7 +145,7 @@ TEST(SimulatedTimeControllerTest, ThreadYeildsOnInvoke) { // Since we are doing an invoke from the main thread, we don't expect the main // thread message loop to be processed. EXPECT_FALSE(task_has_run); - sim.AdvanceTime(TimeDelta::seconds(1)); + sim.AdvanceTime(TimeDelta::Seconds(1)); ASSERT_TRUE(task_has_run); } diff --git a/test/time_controller/time_controller_conformance_test.cc b/test/time_controller/time_controller_conformance_test.cc new file mode 100644 index 0000000000..10f0e1d724 --- /dev/null +++ b/test/time_controller/time_controller_conformance_test.cc @@ -0,0 +1,169 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "api/test/time_controller.h" +#include "api/units/time_delta.h" +#include "rtc_base/event.h" +#include "rtc_base/location.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/time_controller/real_time_controller.h" +#include "test/time_controller/simulated_time_controller.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAreArray; +using ::testing::TestParamInfo; +using ::testing::TestWithParam; +using ::testing::Values; + +enum class TimeMode { kRealTime, kSimulated }; + +std::unique_ptr CreateTimeController(TimeMode mode) { + switch (mode) { + case TimeMode::kRealTime: + return std::make_unique(); + case TimeMode::kSimulated: + // Using an offset of 100000 to get nice fixed width and readable + // timestamps in typical test scenarios. + constexpr Timestamp kSimulatedStartTime = Timestamp::Seconds(100000); + return std::make_unique( + kSimulatedStartTime); + } +} + +std::string ParamsToString(const TestParamInfo& param) { + switch (param.param) { + case webrtc::TimeMode::kRealTime: + return "RealTime"; + case webrtc::TimeMode::kSimulated: + return "SimulatedTime"; + default: + RTC_NOTREACHED() << "Time mode not supported"; + } +} + +// Keeps order of executions. May be called from different threads. +class ExecutionOrderKeeper { + public: + void Executed(int execution_id) { + MutexLock lock(&mutex_); + order_.push_back(execution_id); + } + + std::vector order() const { + MutexLock lock(&mutex_); + return order_; + } + + private: + mutable Mutex mutex_; + std::vector order_ RTC_GUARDED_BY(mutex_); +}; + +// Tests conformance between real time and simulated time time controller. +class SimulatedRealTimeControllerConformanceTest + : public TestWithParam {}; + +TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostOrderTest) { + std::unique_ptr time_controller = + CreateTimeController(GetParam()); + std::unique_ptr thread = time_controller->CreateThread("thread"); + + // Tasks on thread have to be executed in order in which they were + // posted. + ExecutionOrderKeeper execution_order; + thread->PostTask(RTC_FROM_HERE, [&]() { execution_order.Executed(1); }); + thread->PostTask(RTC_FROM_HERE, [&]() { execution_order.Executed(2); }); + time_controller->AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2})); +} + +TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostDelayedOrderTest) { + std::unique_ptr time_controller = + CreateTimeController(GetParam()); + std::unique_ptr thread = time_controller->CreateThread("thread"); + + ExecutionOrderKeeper execution_order; + thread->PostDelayedTask(ToQueuedTask([&]() { execution_order.Executed(2); }), + /*milliseconds=*/500); + thread->PostTask(ToQueuedTask([&]() { execution_order.Executed(1); })); + time_controller->AdvanceTime(TimeDelta::Millis(600)); + EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2})); +} + +TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostInvokeOrderTest) { + std::unique_ptr time_controller = + CreateTimeController(GetParam()); + std::unique_ptr thread = time_controller->CreateThread("thread"); + + // Tasks on thread have to be executed in order in which they were + // posted/invoked. + ExecutionOrderKeeper execution_order; + thread->PostTask(RTC_FROM_HERE, [&]() { execution_order.Executed(1); }); + thread->Invoke(RTC_FROM_HERE, [&]() { execution_order.Executed(2); }); + time_controller->AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2})); +} + +TEST_P(SimulatedRealTimeControllerConformanceTest, + ThreadPostInvokeFromThreadOrderTest) { + std::unique_ptr time_controller = + CreateTimeController(GetParam()); + std::unique_ptr thread = time_controller->CreateThread("thread"); + + // If task is invoked from thread X on thread X it has to be executed + // immediately. + ExecutionOrderKeeper execution_order; + thread->PostTask(RTC_FROM_HERE, [&]() { + thread->PostTask(RTC_FROM_HERE, [&]() { execution_order.Executed(2); }); + thread->Invoke(RTC_FROM_HERE, [&]() { execution_order.Executed(1); }); + }); + time_controller->AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2})); +} + +TEST_P(SimulatedRealTimeControllerConformanceTest, + TaskQueuePostEventWaitOrderTest) { + std::unique_ptr time_controller = + CreateTimeController(GetParam()); + auto task_queue = time_controller->GetTaskQueueFactory()->CreateTaskQueue( + "task_queue", webrtc::TaskQueueFactory::Priority::NORMAL); + + // Tasks on thread have to be executed in order in which they were + // posted/invoked. + ExecutionOrderKeeper execution_order; + rtc::Event event; + task_queue->PostTask(ToQueuedTask([&]() { execution_order.Executed(1); })); + task_queue->PostTask(ToQueuedTask([&]() { + execution_order.Executed(2); + event.Set(); + })); + EXPECT_TRUE(event.Wait(/*give_up_after_ms=*/100, + /*warn_after_ms=*/10'000)); + time_controller->AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2})); +} + +INSTANTIATE_TEST_SUITE_P(ConformanceTest, + SimulatedRealTimeControllerConformanceTest, + Values(TimeMode::kRealTime, TimeMode::kSimulated), + ParamsToString); + +} // namespace +} // namespace webrtc diff --git a/test/video_codec_settings.h b/test/video_codec_settings.h index b5250486d7..fe160cb3cd 100644 --- a/test/video_codec_settings.h +++ b/test/video_codec_settings.h @@ -25,9 +25,7 @@ const int64_t kTestTimingFramesDelayMs = 200; const uint16_t kTestOutlierFrameSizePercent = 250; static void CodecSettings(VideoCodecType codec_type, VideoCodec* settings) { - memset(settings, 0, sizeof(VideoCodec)); - - settings->plType = kTestPayloadType; + *settings = {}; settings->width = kTestWidth; settings->height = kTestHeight; @@ -56,6 +54,11 @@ static void CodecSettings(VideoCodecType codec_type, VideoCodec* settings) { case kVideoCodecVP9: *(settings->VP9()) = VideoEncoder::GetDefaultVp9Settings(); return; +#ifndef DISABLE_H265 + case kVideoCodecH265: + *(settings->H265()) = VideoEncoder::GetDefaultH265Settings(); + return; +#endif case kVideoCodecH264: // TODO(brandtr): Set |qpMax| here, when the OpenH264 wrapper supports it. *(settings->H264()) = VideoEncoder::GetDefaultH264Settings(); diff --git a/test/video_decoder_proxy_factory.h b/test/video_decoder_proxy_factory.h index 7e0fcdb12a..84552e39a4 100644 --- a/test/video_decoder_proxy_factory.h +++ b/test/video_decoder_proxy_factory.h @@ -20,7 +20,7 @@ namespace webrtc { namespace test { -// An decoder factory with a single underlying VideoDecoder object, intended for +// A decoder factory with a single underlying VideoDecoder object, intended for // test purposes. Each call to CreateVideoDecoder returns a proxy for the same // decoder, typically an instance of FakeDecoder or MockEncoder. class VideoDecoderProxyFactory final : public VideoDecoderFactory { diff --git a/test/video_encoder_proxy_factory.h b/test/video_encoder_proxy_factory.h index d56091105c..7c412bacfa 100644 --- a/test/video_encoder_proxy_factory.h +++ b/test/video_encoder_proxy_factory.h @@ -30,10 +30,14 @@ const VideoEncoder::Capabilities kCapabilities(false); class VideoEncoderProxyFactory final : public VideoEncoderFactory { public: explicit VideoEncoderProxyFactory(VideoEncoder* encoder) + : VideoEncoderProxyFactory(encoder, nullptr) {} + + explicit VideoEncoderProxyFactory(VideoEncoder* encoder, + EncoderSelectorInterface* encoder_selector) : encoder_(encoder), + encoder_selector_(encoder_selector), num_simultaneous_encoder_instances_(0), max_num_simultaneous_encoder_instances_(0) { - codec_info_.is_hardware_accelerated = false; codec_info_.has_internal_source = false; } @@ -56,9 +60,15 @@ class VideoEncoderProxyFactory final : public VideoEncoderFactory { return std::make_unique(encoder_, this); } - void SetIsHardwareAccelerated(bool is_hardware_accelerated) { - codec_info_.is_hardware_accelerated = is_hardware_accelerated; + std::unique_ptr GetEncoderSelector() + const override { + if (encoder_selector_ != nullptr) { + return std::make_unique(encoder_selector_); + } + + return nullptr; } + void SetHasInternalSource(bool has_internal_source) { codec_info_.has_internal_source = has_internal_source; } @@ -117,7 +127,30 @@ class VideoEncoderProxyFactory final : public VideoEncoderFactory { VideoEncoderProxyFactory* const encoder_factory_; }; + class EncoderSelectorProxy final : public EncoderSelectorInterface { + public: + explicit EncoderSelectorProxy(EncoderSelectorInterface* encoder_selector) + : encoder_selector_(encoder_selector) {} + + void OnCurrentEncoder(const SdpVideoFormat& format) override { + encoder_selector_->OnCurrentEncoder(format); + } + + absl::optional OnAvailableBitrate( + const DataRate& rate) override { + return encoder_selector_->OnAvailableBitrate(rate); + } + + absl::optional OnEncoderBroken() override { + return encoder_selector_->OnEncoderBroken(); + } + + private: + EncoderSelectorInterface* const encoder_selector_; + }; + VideoEncoder* const encoder_; + EncoderSelectorInterface* const encoder_selector_; CodecInfo codec_info_; int num_simultaneous_encoder_instances_; diff --git a/test/win/run_loop_win.cc b/test/win/run_loop_win.cc deleted file mode 100644 index 95de16bf24..0000000000 --- a/test/win/run_loop_win.cc +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include -#include -#include -#include - -#include "rtc_base/task_queue_for_test.h" -#include "test/run_loop.h" - -namespace webrtc { -namespace test { - -void PressEnterToContinue(TaskQueueBase* task_queue) { - puts(">> Press ENTER to continue..."); - - while (!_kbhit() || _getch() != '\r') { - // Drive the message loop for the thread running the task_queue - SendTask(RTC_FROM_HERE, task_queue, [&]() { - MSG msg; - if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { - TranslateMessage(&msg); - DispatchMessage(&msg); - } - }); - } -} -} // namespace test -} // namespace webrtc diff --git a/tools_webrtc/OWNERS b/tools_webrtc/OWNERS index ae45ba7186..48e6927746 100644 --- a/tools_webrtc/OWNERS +++ b/tools_webrtc/OWNERS @@ -1,3 +1 @@ mbonadei@webrtc.org -oprypin@webrtc.org -phoglund@webrtc.org diff --git a/tools_webrtc/PRESUBMIT.py b/tools_webrtc/PRESUBMIT.py index 80e20a348a..27f8bb10d2 100644 --- a/tools_webrtc/PRESUBMIT.py +++ b/tools_webrtc/PRESUBMIT.py @@ -8,39 +8,43 @@ def _LicenseHeader(input_api): - """Returns the license header regexp.""" - # Accept any year number from 2003 to the current year - current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) - years_re = '(' + '|'.join(allowed_years) + ')' - license_header = ( - r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' + """Returns the license header regexp.""" + # Accept any year number from 2003 to the current year + current_year = int(input_api.time.strftime('%Y')) + allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) + years_re = '(' + '|'.join(allowed_years) + ')' + license_header = ( + r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' r'All [Rr]ights [Rr]eserved\.\n' - r'.*?\n' - r'.*? Use of this source code is governed by a BSD-style license\n' - r'.*? that can be found in the LICENSE file in the root of the source\n' - r'.*? tree\. An additional intellectual property rights grant can be ' + r'.*?\n' + r'.*? Use of this source code is governed by a BSD-style license\n' + r'.*? that can be found in the LICENSE file in the root of the source\n' + r'.*? tree\. An additional intellectual property rights grant can be ' r'found\n' - r'.*? in the file PATENTS\. All contributing project authors may\n' - r'.*? be found in the AUTHORS file in the root of the source tree\.\n' - ) % { - 'year': years_re, - } - return license_header + r'.*? in the file PATENTS\. All contributing project authors may\n' + r'.*? be found in the AUTHORS file in the root of the source tree\.\n' + ) % { + 'year': years_re, + } + return license_header + def _CommonChecks(input_api, output_api): - """Checks common to both upload and commit.""" - results = [] - results.extend(input_api.canned_checks.CheckLicense( - input_api, output_api, _LicenseHeader(input_api))) - return results + """Checks common to both upload and commit.""" + results = [] + results.extend( + input_api.canned_checks.CheckLicense(input_api, output_api, + _LicenseHeader(input_api))) + return results + def CheckChangeOnUpload(input_api, output_api): - results = [] - results.extend(_CommonChecks(input_api, output_api)) - return results + results = [] + results.extend(_CommonChecks(input_api, output_api)) + return results + def CheckChangeOnCommit(input_api, output_api): - results = [] - results.extend(_CommonChecks(input_api, output_api)) - return results + results = [] + results.extend(_CommonChecks(input_api, output_api)) + return results diff --git a/tools_webrtc/android/build_aar.py b/tools_webrtc/android/build_aar.py index 81e545d11c..047be7b0a2 100755 --- a/tools_webrtc/android/build_aar.py +++ b/tools_webrtc/android/build_aar.py @@ -7,7 +7,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Script to generate libwebrtc.aar for distribution. The script has to be run from the root src folder. @@ -33,7 +32,6 @@ import tempfile import zipfile - SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0])) SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir)) DEFAULT_ARCHS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'] @@ -41,8 +39,8 @@ JAR_FILE = 'lib.java/sdk/android/libwebrtc.jar' MANIFEST_FILE = 'sdk/android/AndroidManifest.xml' TARGETS = [ - 'sdk/android:libwebrtc', - 'sdk/android:libjingle_peerconnection_so', + 'sdk/android:libwebrtc', + 'sdk/android:libjingle_peerconnection_so', ] sys.path.append(os.path.join(SCRIPT_DIR, '..', 'libs')) @@ -52,183 +50,209 @@ import find_depot_tools - def _ParseArgs(): - parser = argparse.ArgumentParser(description='libwebrtc.aar generator.') - parser.add_argument('--build-dir', - help='Build dir. By default will create and use temporary dir.') - parser.add_argument('--output', default='libwebrtc.aar', - help='Output file of the script.') - parser.add_argument('--arch', default=DEFAULT_ARCHS, nargs='*', - help='Architectures to build. Defaults to %(default)s.') - parser.add_argument('--use-goma', action='store_true', default=False, - help='Use goma.') - parser.add_argument('--verbose', action='store_true', default=False, - help='Debug logging.') - parser.add_argument('--extra-gn-args', default=[], nargs='*', - help="""Additional GN arguments to be used during Ninja generation. + parser = argparse.ArgumentParser(description='libwebrtc.aar generator.') + parser.add_argument( + '--build-dir', + help='Build dir. By default will create and use temporary dir.') + parser.add_argument('--output', + default='libwebrtc.aar', + help='Output file of the script.') + parser.add_argument( + '--arch', + default=DEFAULT_ARCHS, + nargs='*', + help='Architectures to build. Defaults to %(default)s.') + parser.add_argument('--use-goma', + action='store_true', + default=False, + help='Use goma.') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + parser.add_argument( + '--extra-gn-args', + default=[], + nargs='*', + help="""Additional GN arguments to be used during Ninja generation. These are passed to gn inside `--args` switch and applied after any other arguments and will override any values defined by the script. Example of building debug aar file: build_aar.py --extra-gn-args='is_debug=true'""") - parser.add_argument('--extra-ninja-switches', default=[], nargs='*', - help="""Additional Ninja switches to be used during compilation. + parser.add_argument( + '--extra-ninja-switches', + default=[], + nargs='*', + help="""Additional Ninja switches to be used during compilation. These are applied after any other Ninja switches. Example of enabling verbose Ninja output: build_aar.py --extra-ninja-switches='-v'""") - parser.add_argument('--extra-gn-switches', default=[], nargs='*', - help="""Additional GN switches to be used during compilation. + parser.add_argument( + '--extra-gn-switches', + default=[], + nargs='*', + help="""Additional GN switches to be used during compilation. These are applied after any other GN switches. Example of enabling verbose GN output: build_aar.py --extra-gn-switches='-v'""") - return parser.parse_args() + return parser.parse_args() def _RunGN(args): - cmd = [sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')] - cmd.extend(args) - logging.debug('Running: %r', cmd) - subprocess.check_call(cmd) + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') + ] + cmd.extend(args) + logging.debug('Running: %r', cmd) + subprocess.check_call(cmd) def _RunNinja(output_directory, args): - cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), - '-C', output_directory] - cmd.extend(args) - logging.debug('Running: %r', cmd) - subprocess.check_call(cmd) + cmd = [ + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C', + output_directory + ] + cmd.extend(args) + logging.debug('Running: %r', cmd) + subprocess.check_call(cmd) def _EncodeForGN(value): - """Encodes value as a GN literal.""" - if isinstance(value, str): - return '"' + value + '"' - elif isinstance(value, bool): - return repr(value).lower() - else: - return repr(value) + """Encodes value as a GN literal.""" + if isinstance(value, str): + return '"' + value + '"' + elif isinstance(value, bool): + return repr(value).lower() + else: + return repr(value) def _GetOutputDirectory(build_dir, arch): - """Returns the GN output directory for the target architecture.""" - return os.path.join(build_dir, arch) + """Returns the GN output directory for the target architecture.""" + return os.path.join(build_dir, arch) def _GetTargetCpu(arch): - """Returns target_cpu for the GN build with the given architecture.""" - if arch in ['armeabi', 'armeabi-v7a']: - return 'arm' - elif arch == 'arm64-v8a': - return 'arm64' - elif arch == 'x86': - return 'x86' - elif arch == 'x86_64': - return 'x64' - else: - raise Exception('Unknown arch: ' + arch) + """Returns target_cpu for the GN build with the given architecture.""" + if arch in ['armeabi', 'armeabi-v7a']: + return 'arm' + elif arch == 'arm64-v8a': + return 'arm64' + elif arch == 'x86': + return 'x86' + elif arch == 'x86_64': + return 'x64' + else: + raise Exception('Unknown arch: ' + arch) def _GetArmVersion(arch): - """Returns arm_version for the GN build with the given architecture.""" - if arch == 'armeabi': - return 6 - elif arch == 'armeabi-v7a': - return 7 - elif arch in ['arm64-v8a', 'x86', 'x86_64']: - return None - else: - raise Exception('Unknown arch: ' + arch) + """Returns arm_version for the GN build with the given architecture.""" + if arch == 'armeabi': + return 6 + elif arch == 'armeabi-v7a': + return 7 + elif arch in ['arm64-v8a', 'x86', 'x86_64']: + return None + else: + raise Exception('Unknown arch: ' + arch) def Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches, extra_ninja_switches): - """Generates target architecture using GN and builds it using ninja.""" - logging.info('Building: %s', arch) - output_directory = _GetOutputDirectory(build_dir, arch) - gn_args = { - 'target_os': 'android', - 'is_debug': False, - 'is_component_build': False, - 'rtc_include_tests': False, - 'target_cpu': _GetTargetCpu(arch), - 'use_goma': use_goma - } - arm_version = _GetArmVersion(arch) - if arm_version: - gn_args['arm_version'] = arm_version - gn_args_str = '--args=' + ' '.join([ - k + '=' + _EncodeForGN(v) for k, v in gn_args.items()] + extra_gn_args) - - gn_args_list = ['gen', output_directory, gn_args_str] - gn_args_list.extend(extra_gn_switches) - _RunGN(gn_args_list) - - ninja_args = TARGETS[:] - if use_goma: - ninja_args.extend(['-j', '200']) - ninja_args.extend(extra_ninja_switches) - _RunNinja(output_directory, ninja_args) + """Generates target architecture using GN and builds it using ninja.""" + logging.info('Building: %s', arch) + output_directory = _GetOutputDirectory(build_dir, arch) + gn_args = { + 'target_os': 'android', + 'is_debug': False, + 'is_component_build': False, + 'rtc_include_tests': False, + 'target_cpu': _GetTargetCpu(arch), + 'use_goma': use_goma + } + arm_version = _GetArmVersion(arch) + if arm_version: + gn_args['arm_version'] = arm_version + gn_args_str = '--args=' + ' '.join( + [k + '=' + _EncodeForGN(v) + for k, v in gn_args.items()] + extra_gn_args) + + gn_args_list = ['gen', output_directory, gn_args_str] + gn_args_list.extend(extra_gn_switches) + _RunGN(gn_args_list) + + ninja_args = TARGETS[:] + if use_goma: + ninja_args.extend(['-j', '200']) + ninja_args.extend(extra_ninja_switches) + _RunNinja(output_directory, ninja_args) def CollectCommon(aar_file, build_dir, arch): - """Collects architecture independent files into the .aar-archive.""" - logging.info('Collecting common files.') - output_directory = _GetOutputDirectory(build_dir, arch) - aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml') - aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar') + """Collects architecture independent files into the .aar-archive.""" + logging.info('Collecting common files.') + output_directory = _GetOutputDirectory(build_dir, arch) + aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml') + aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar') def Collect(aar_file, build_dir, arch): - """Collects architecture specific files into the .aar-archive.""" - logging.info('Collecting: %s', arch) - output_directory = _GetOutputDirectory(build_dir, arch) + """Collects architecture specific files into the .aar-archive.""" + logging.info('Collecting: %s', arch) + output_directory = _GetOutputDirectory(build_dir, arch) - abi_dir = os.path.join('jni', arch) - for so_file in NEEDED_SO_FILES: - aar_file.write(os.path.join(output_directory, so_file), - os.path.join(abi_dir, so_file)) + abi_dir = os.path.join('jni', arch) + for so_file in NEEDED_SO_FILES: + aar_file.write(os.path.join(output_directory, so_file), + os.path.join(abi_dir, so_file)) def GenerateLicenses(output_dir, build_dir, archs): - builder = LicenseBuilder( - [_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS) - builder.GenerateLicenseText(output_dir) + builder = LicenseBuilder( + [_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS) + builder.GenerateLicenseText(output_dir) -def BuildAar(archs, output_file, use_goma=False, extra_gn_args=None, - ext_build_dir=None, extra_gn_switches=None, +def BuildAar(archs, + output_file, + use_goma=False, + extra_gn_args=None, + ext_build_dir=None, + extra_gn_switches=None, extra_ninja_switches=None): - extra_gn_args = extra_gn_args or [] - extra_gn_switches = extra_gn_switches or [] - extra_ninja_switches = extra_ninja_switches or [] - build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp() - - for arch in archs: - Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches, - extra_ninja_switches) - - with zipfile.ZipFile(output_file, 'w') as aar_file: - # Architecture doesn't matter here, arbitrarily using the first one. - CollectCommon(aar_file, build_dir, archs[0]) + extra_gn_args = extra_gn_args or [] + extra_gn_switches = extra_gn_switches or [] + extra_ninja_switches = extra_ninja_switches or [] + build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp() + for arch in archs: - Collect(aar_file, build_dir, arch) + Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches, + extra_ninja_switches) + + with zipfile.ZipFile(output_file, 'w') as aar_file: + # Architecture doesn't matter here, arbitrarily using the first one. + CollectCommon(aar_file, build_dir, archs[0]) + for arch in archs: + Collect(aar_file, build_dir, arch) - license_dir = os.path.dirname(os.path.realpath(output_file)) - GenerateLicenses(license_dir, build_dir, archs) + license_dir = os.path.dirname(os.path.realpath(output_file)) + GenerateLicenses(license_dir, build_dir, archs) - if not ext_build_dir: - shutil.rmtree(build_dir, True) + if not ext_build_dir: + shutil.rmtree(build_dir, True) def main(): - args = _ParseArgs() - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + args = _ParseArgs() + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - BuildAar(args.arch, args.output, args.use_goma, args.extra_gn_args, - args.build_dir, args.extra_gn_switches, args.extra_ninja_switches) + BuildAar(args.arch, args.output, args.use_goma, args.extra_gn_args, + args.build_dir, args.extra_gn_switches, args.extra_ninja_switches) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/android/release_aar.py b/tools_webrtc/android/release_aar.py index 1ac21b4280..bc7f471ae3 100644 --- a/tools_webrtc/android/release_aar.py +++ b/tools_webrtc/android/release_aar.py @@ -7,7 +7,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Script for publishing WebRTC AAR on Bintray. Set BINTRAY_USER and BINTRAY_API_KEY environment variables before running @@ -25,7 +24,6 @@ import tempfile import time - SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0])) CHECKOUT_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir)) @@ -36,7 +34,6 @@ sys.path.append(os.path.join(CHECKOUT_ROOT, 'tools_webrtc')) from android.build_aar import BuildAar - ARCHS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'] MAVEN_REPOSITORY = 'https://google.bintray.com/webrtc' API = 'https://api.bintray.com' @@ -62,230 +59,249 @@ def _ParseArgs(): - parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.') - parser.add_argument('--use-goma', action='store_true', default=False, - help='Use goma.') - parser.add_argument('--skip-tests', action='store_true', default=False, - help='Skips running the tests.') - parser.add_argument('--publish', action='store_true', default=False, - help='Automatically publishes the library if the tests pass.') - parser.add_argument('--build-dir', default=None, - help='Temporary directory to store the build files. If not specified, ' - 'a new directory will be created.') - parser.add_argument('--verbose', action='store_true', default=False, - help='Debug logging.') - return parser.parse_args() + parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.') + parser.add_argument('--use-goma', + action='store_true', + default=False, + help='Use goma.') + parser.add_argument('--skip-tests', + action='store_true', + default=False, + help='Skips running the tests.') + parser.add_argument( + '--publish', + action='store_true', + default=False, + help='Automatically publishes the library if the tests pass.') + parser.add_argument( + '--build-dir', + default=None, + help='Temporary directory to store the build files. If not specified, ' + 'a new directory will be created.') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + return parser.parse_args() def _GetCommitHash(): - commit_hash = subprocess.check_output( - ['git', 'rev-parse', 'HEAD'], cwd=CHECKOUT_ROOT).strip() - return commit_hash + commit_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'], + cwd=CHECKOUT_ROOT).strip() + return commit_hash def _GetCommitPos(): - commit_message = subprocess.check_output( - ['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'], - cwd=CHECKOUT_ROOT) - commit_pos_match = re.search( - COMMIT_POSITION_REGEX, commit_message, re.MULTILINE) - if not commit_pos_match: - raise Exception('Commit position not found in the commit message: %s' - % commit_message) - return commit_pos_match.group(1) + commit_message = subprocess.check_output( + ['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'], + cwd=CHECKOUT_ROOT) + commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message, + re.MULTILINE) + if not commit_pos_match: + raise Exception('Commit position not found in the commit message: %s' % + commit_message) + return commit_pos_match.group(1) def _UploadFile(user, password, filename, version, target_file): -# URL is of format: - # ///// - # Example: - # https://api.bintray.com/content/google/webrtc/google-webrtc/1.0.19742/org/webrtc/google-webrtc/1.0.19742/google-webrtc-1.0.19742.aar + # URL is of format: + # ///// + # Example: + # https://api.bintray.com/content/google/webrtc/google-webrtc/1.0.19742/org/webrtc/google-webrtc/1.0.19742/google-webrtc-1.0.19742.aar + + target_dir = version + '/' + GROUP_ID + '/' + ARTIFACT_ID + '/' + version + target_path = target_dir + '/' + target_file + url = CONTENT_API + '/' + target_path + + logging.info('Uploading %s to %s', filename, url) + with open(filename) as fh: + file_data = fh.read() + + for attempt in xrange(UPLOAD_TRIES): + try: + response = requests.put(url, + data=file_data, + auth=(user, password), + timeout=API_TIMEOUT_SECONDS) + break + except requests.exceptions.Timeout as e: + logging.warning('Timeout while uploading: %s', e) + time.sleep(UPLOAD_RETRY_BASE_SLEEP_SECONDS**attempt) + else: + raise Exception('Failed to upload %s' % filename) - target_dir = version + '/' + GROUP_ID + '/' + ARTIFACT_ID + '/' + version - target_path = target_dir + '/' + target_file - url = CONTENT_API + '/' + target_path + if not response.ok: + raise Exception('Failed to upload %s. Response: %s' % + (filename, response)) + logging.info('Uploaded %s: %s', filename, response) - logging.info('Uploading %s to %s', filename, url) - with open(filename) as fh: - file_data = fh.read() - for attempt in xrange(UPLOAD_TRIES): - try: - response = requests.put(url, data=file_data, auth=(user, password), - timeout=API_TIMEOUT_SECONDS) - break - except requests.exceptions.Timeout as e: - logging.warning('Timeout while uploading: %s', e) - time.sleep(UPLOAD_RETRY_BASE_SLEEP_SECONDS ** attempt) - else: - raise Exception('Failed to upload %s' % filename) +def _GeneratePom(target_file, version, commit): + env = jinja2.Environment(loader=jinja2.PackageLoader('release_aar'), ) + template = env.get_template('pom.jinja') + pom = template.render(version=version, commit=commit) + with open(target_file, 'w') as fh: + fh.write(pom) - if not response.ok: - raise Exception('Failed to upload %s. Response: %s' % (filename, response)) - logging.info('Uploaded %s: %s', filename, response) +def _TestAAR(tmp_dir, username, password, version): + """Runs AppRTCMobile tests using the AAR. Returns true if the tests pass.""" + logging.info('Testing library.') + env = jinja2.Environment(loader=jinja2.PackageLoader('release_aar'), ) -def _GeneratePom(target_file, version, commit): - env = jinja2.Environment( - loader=jinja2.PackageLoader('release_aar'), - ) - template = env.get_template('pom.jinja') - pom = template.render(version=version, commit=commit) - with open(target_file, 'w') as fh: - fh.write(pom) + gradle_backup = os.path.join(tmp_dir, 'build.gradle.backup') + app_gradle_backup = os.path.join(tmp_dir, 'app-build.gradle.backup') + # Make backup copies of the project files before modifying them. + shutil.copy2(AAR_PROJECT_GRADLE, gradle_backup) + shutil.copy2(AAR_PROJECT_APP_GRADLE, app_gradle_backup) -def _TestAAR(tmp_dir, username, password, version): - """Runs AppRTCMobile tests using the AAR. Returns true if the tests pass.""" - logging.info('Testing library.') - env = jinja2.Environment( - loader=jinja2.PackageLoader('release_aar'), - ) - - gradle_backup = os.path.join(tmp_dir, 'build.gradle.backup') - app_gradle_backup = os.path.join(tmp_dir, 'app-build.gradle.backup') - - # Make backup copies of the project files before modifying them. - shutil.copy2(AAR_PROJECT_GRADLE, gradle_backup) - shutil.copy2(AAR_PROJECT_APP_GRADLE, app_gradle_backup) - - try: - maven_repository_template = env.get_template('maven-repository.jinja') - maven_repository = maven_repository_template.render( - url=MAVEN_REPOSITORY, username=username, password=password) - - # Append Maven repository to build file to download unpublished files. - with open(AAR_PROJECT_GRADLE, 'a') as gradle_file: - gradle_file.write(maven_repository) - - # Read app build file. - with open(AAR_PROJECT_APP_GRADLE, 'r') as gradle_app_file: - gradle_app = gradle_app_file.read() - - if AAR_PROJECT_DEPENDENCY not in gradle_app: - raise Exception( - '%s not found in the build file.' % AAR_PROJECT_DEPENDENCY) - # Set version to the version to be tested. - target_dependency = AAR_PROJECT_VERSION_DEPENDENCY % version - gradle_app = gradle_app.replace(AAR_PROJECT_DEPENDENCY, target_dependency) - - # Write back. - with open(AAR_PROJECT_APP_GRADLE, 'w') as gradle_app_file: - gradle_app_file.write(gradle_app) - - # Uninstall any existing version of AppRTCMobile. - logging.info('Uninstalling previous AppRTCMobile versions. It is okay for ' - 'these commands to fail if AppRTCMobile is not installed.') - subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc']) - subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test']) - - # Run tests. try: - # First clean the project. - subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR) - # Then run the tests. - subprocess.check_call([GRADLEW_BIN, 'connectedDebugAndroidTest'], - cwd=AAR_PROJECT_DIR) - except subprocess.CalledProcessError: - logging.exception('Test failure.') - return False # Clean or tests failed - - return True # Tests pass - finally: - # Restore backups. - shutil.copy2(gradle_backup, AAR_PROJECT_GRADLE) - shutil.copy2(app_gradle_backup, AAR_PROJECT_APP_GRADLE) + maven_repository_template = env.get_template('maven-repository.jinja') + maven_repository = maven_repository_template.render( + url=MAVEN_REPOSITORY, username=username, password=password) + + # Append Maven repository to build file to download unpublished files. + with open(AAR_PROJECT_GRADLE, 'a') as gradle_file: + gradle_file.write(maven_repository) + + # Read app build file. + with open(AAR_PROJECT_APP_GRADLE, 'r') as gradle_app_file: + gradle_app = gradle_app_file.read() + + if AAR_PROJECT_DEPENDENCY not in gradle_app: + raise Exception('%s not found in the build file.' % + AAR_PROJECT_DEPENDENCY) + # Set version to the version to be tested. + target_dependency = AAR_PROJECT_VERSION_DEPENDENCY % version + gradle_app = gradle_app.replace(AAR_PROJECT_DEPENDENCY, + target_dependency) + + # Write back. + with open(AAR_PROJECT_APP_GRADLE, 'w') as gradle_app_file: + gradle_app_file.write(gradle_app) + + # Uninstall any existing version of AppRTCMobile. + logging.info( + 'Uninstalling previous AppRTCMobile versions. It is okay for ' + 'these commands to fail if AppRTCMobile is not installed.') + subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc']) + subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test']) + + # Run tests. + try: + # First clean the project. + subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR) + # Then run the tests. + subprocess.check_call([GRADLEW_BIN, 'connectedDebugAndroidTest'], + cwd=AAR_PROJECT_DIR) + except subprocess.CalledProcessError: + logging.exception('Test failure.') + return False # Clean or tests failed + + return True # Tests pass + finally: + # Restore backups. + shutil.copy2(gradle_backup, AAR_PROJECT_GRADLE) + shutil.copy2(app_gradle_backup, AAR_PROJECT_APP_GRADLE) def _PublishAAR(user, password, version, additional_args): - args = { - 'publish_wait_for_secs': 0 # Publish asynchronously. - } - args.update(additional_args) - - url = CONTENT_API + '/' + version + '/publish' - response = requests.post(url, data=json.dumps(args), auth=(user, password), - timeout=API_TIMEOUT_SECONDS) + args = { + 'publish_wait_for_secs': 0 # Publish asynchronously. + } + args.update(additional_args) + + url = CONTENT_API + '/' + version + '/publish' + response = requests.post(url, + data=json.dumps(args), + auth=(user, password), + timeout=API_TIMEOUT_SECONDS) - if not response.ok: - raise Exception('Failed to publish. Response: %s' % response) + if not response.ok: + raise Exception('Failed to publish. Response: %s' % response) def _DeleteUnpublishedVersion(user, password, version): - url = PACKAGES_API + '/versions/' + version - response = requests.get(url, auth=(user, password), - timeout=API_TIMEOUT_SECONDS) - if not response.ok: - raise Exception('Failed to get version info. Response: %s' % response) - - version_info = json.loads(response.content) - if version_info['published']: - logging.info('Version has already been published, not deleting.') - return - - logging.info('Deleting unpublished version.') - response = requests.delete(url, auth=(user, password), - timeout=API_TIMEOUT_SECONDS) - if not response.ok: - raise Exception('Failed to delete version. Response: %s' % response) + url = PACKAGES_API + '/versions/' + version + response = requests.get(url, + auth=(user, password), + timeout=API_TIMEOUT_SECONDS) + if not response.ok: + raise Exception('Failed to get version info. Response: %s' % response) + + version_info = json.loads(response.content) + if version_info['published']: + logging.info('Version has already been published, not deleting.') + return + + logging.info('Deleting unpublished version.') + response = requests.delete(url, + auth=(user, password), + timeout=API_TIMEOUT_SECONDS) + if not response.ok: + raise Exception('Failed to delete version. Response: %s' % response) def ReleaseAar(use_goma, skip_tests, publish, build_dir): - version = '1.0.' + _GetCommitPos() - commit = _GetCommitHash() - logging.info('Releasing AAR version %s with hash %s', version, commit) - - user = os.environ.get('BINTRAY_USER', None) - api_key = os.environ.get('BINTRAY_API_KEY', None) - if not user or not api_key: - raise Exception('Environment variables BINTRAY_USER and BINTRAY_API_KEY ' - 'must be defined.') - - # If build directory is not specified, create a temporary directory. - use_tmp_dir = not build_dir - if use_tmp_dir: - build_dir = tempfile.mkdtemp() - - try: - base_name = ARTIFACT_ID + '-' + version - aar_file = os.path.join(build_dir, base_name + '.aar') - third_party_licenses_file = os.path.join(build_dir, 'LICENSE.md') - pom_file = os.path.join(build_dir, base_name + '.pom') - - logging.info('Building at %s', build_dir) - BuildAar(ARCHS, aar_file, - use_goma=use_goma, - ext_build_dir=os.path.join(build_dir, 'aar-build')) - _GeneratePom(pom_file, version, commit) - - _UploadFile(user, api_key, aar_file, version, base_name + '.aar') - _UploadFile(user, api_key, third_party_licenses_file, version, - 'THIRD_PARTY_LICENSES.md') - _UploadFile(user, api_key, pom_file, version, base_name + '.pom') - - tests_pass = skip_tests or _TestAAR(build_dir, user, api_key, version) - if not tests_pass: - logging.info('Discarding library.') - _PublishAAR(user, api_key, version, {'discard': True}) - _DeleteUnpublishedVersion(user, api_key, version) - raise Exception('Test failure. Discarded library.') - - if publish: - logging.info('Publishing library.') - _PublishAAR(user, api_key, version, {}) - else: - logging.info('Note: The library has not not been published automatically.' - ' Please do so manually if desired.') - finally: + version = '1.0.' + _GetCommitPos() + commit = _GetCommitHash() + logging.info('Releasing AAR version %s with hash %s', version, commit) + + user = os.environ.get('BINTRAY_USER', None) + api_key = os.environ.get('BINTRAY_API_KEY', None) + if not user or not api_key: + raise Exception( + 'Environment variables BINTRAY_USER and BINTRAY_API_KEY ' + 'must be defined.') + + # If build directory is not specified, create a temporary directory. + use_tmp_dir = not build_dir if use_tmp_dir: - shutil.rmtree(build_dir, True) + build_dir = tempfile.mkdtemp() + + try: + base_name = ARTIFACT_ID + '-' + version + aar_file = os.path.join(build_dir, base_name + '.aar') + third_party_licenses_file = os.path.join(build_dir, 'LICENSE.md') + pom_file = os.path.join(build_dir, base_name + '.pom') + + logging.info('Building at %s', build_dir) + BuildAar(ARCHS, + aar_file, + use_goma=use_goma, + ext_build_dir=os.path.join(build_dir, 'aar-build')) + _GeneratePom(pom_file, version, commit) + + _UploadFile(user, api_key, aar_file, version, base_name + '.aar') + _UploadFile(user, api_key, third_party_licenses_file, version, + 'THIRD_PARTY_LICENSES.md') + _UploadFile(user, api_key, pom_file, version, base_name + '.pom') + + tests_pass = skip_tests or _TestAAR(build_dir, user, api_key, version) + if not tests_pass: + logging.info('Discarding library.') + _PublishAAR(user, api_key, version, {'discard': True}) + _DeleteUnpublishedVersion(user, api_key, version) + raise Exception('Test failure. Discarded library.') + + if publish: + logging.info('Publishing library.') + _PublishAAR(user, api_key, version, {}) + else: + logging.info( + 'Note: The library has not not been published automatically.' + ' Please do so manually if desired.') + finally: + if use_tmp_dir: + shutil.rmtree(build_dir, True) def main(): - args = _ParseArgs() - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - ReleaseAar(args.use_goma, args.skip_tests, args.publish, args.build_dir) + args = _ParseArgs() + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + ReleaseAar(args.use_goma, args.skip_tests, args.publish, args.build_dir) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/android/suppressions.xml b/tools_webrtc/android/suppressions.xml deleted file mode 100644 index 87b9387c3b..0000000000 --- a/tools_webrtc/android/suppressions.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools_webrtc/apple/copy_framework_header.py b/tools_webrtc/apple/copy_framework_header.py new file mode 100755 index 0000000000..d194650934 --- /dev/null +++ b/tools_webrtc/apple/copy_framework_header.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import argparse +import re +import sys + + +def replace_double_quote(line): + re_rtc_import = re.compile( + r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)', re.DOTALL) + match = re_rtc_import.match(line) + if not match: + return line + + return '%s#import %s' % (match.group(1), match.group(3), + match.group(4), match.group(5)) + + +def process(input_file, output_file): + with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw: + for line in fb.read().decode('UTF-8').splitlines(): + fw.write(replace_double_quote(line).encode('UTF-8')) + fw.write(b"\n") + + +def main(): + parser = argparse.ArgumentParser( + description= + "Copy headers of framework and replace double-quoted includes to" + + " angle-bracketed respectively.") + parser.add_argument('--input', + help='Input header files to copy.', + type=str) + parser.add_argument('--output', help='Output file.', type=str) + parsed_args = parser.parse_args() + return process(parsed_args.input, parsed_args.output) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools_webrtc/apple/copy_framework_header_test.py b/tools_webrtc/apple/copy_framework_header_test.py new file mode 100644 index 0000000000..24bab3eb0b --- /dev/null +++ b/tools_webrtc/apple/copy_framework_header_test.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import unittest +from copy_framework_header import replace_double_quote + + +class TestCopyFramework(unittest.TestCase): + def testReplaceDoubleQuote(self): + self.assertEqual(replace_double_quote("""#import "RTCMacros.h\""""), + """#import """) + self.assertEqual(replace_double_quote("""#import "RTCMacros.h\"\n"""), + """#import \n""") + self.assertEqual( + replace_double_quote("""#import "UIDevice+RTCDevice.h\"\n"""), + """#import \n""") + self.assertEqual( + replace_double_quote("#import \"components/video_codec/" + + "RTCVideoDecoderFactoryH264.h\"\n"), + """#import \n""") + self.assertEqual( + replace_double_quote( + """@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""" + ), + """@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""") + + +if __name__ == '__main__': + unittest.main() diff --git a/tools_webrtc/autoroller/roll_deps.py b/tools_webrtc/autoroller/roll_deps.py index 2e9a1c2cfe..f1a1235f20 100755 --- a/tools_webrtc/autoroller/roll_deps.py +++ b/tools_webrtc/autoroller/roll_deps.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Script to automatically roll dependencies in the WebRTC DEPS file.""" import argparse @@ -19,16 +18,18 @@ import sys import urllib2 + def FindSrcDirPath(): - """Returns the abs path to the src/ dir of the project.""" - src_dir = os.path.dirname(os.path.abspath(__file__)) - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir + """Returns the abs path to the src/ dir of the project.""" + src_dir = os.path.dirname(os.path.abspath(__file__)) + while os.path.basename(src_dir) != 'src': + src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) + return src_dir + # Skip these dependencies (list without solution name prefix). DONT_AUTOROLL_THESE = [ - 'src/examples/androidtests/third_party/gradle', + 'src/examples/androidtests/third_party/gradle', ] # These dependencies are missing in chromium/src/DEPS, either unused or already @@ -36,19 +37,18 @@ def FindSrcDirPath(): # but we pull it through a subtree mirror, so therefore it isn't listed in # Chromium's deps but it is in ours. WEBRTC_ONLY_DEPS = [ - 'src/base', - 'src/build', - 'src/buildtools', - 'src/ios', - 'src/testing', - 'src/third_party', - 'src/third_party/findbugs', - 'src/third_party/gtest-parallel', - 'src/third_party/yasm/binaries', - 'src/tools', + 'src/base', + 'src/build', + 'src/buildtools', + 'src/ios', + 'src/testing', + 'src/third_party', + 'src/third_party/findbugs', + 'src/third_party/gtest-parallel', + 'src/third_party/yasm/binaries', + 'src/tools', ] - WEBRTC_URL = 'https://webrtc.googlesource.com/src' CHROMIUM_SRC_URL = 'https://chromium.googlesource.com/chromium/src' CHROMIUM_COMMIT_TEMPLATE = CHROMIUM_SRC_URL + '/+/%s' @@ -56,7 +56,7 @@ def FindSrcDirPath(): CHROMIUM_FILE_TEMPLATE = CHROMIUM_SRC_URL + '/+/%s/%s' COMMIT_POSITION_RE = re.compile('^Cr-Commit-Position: .*#([0-9]+).*$') -CLANG_REVISION_RE = re.compile(r'^CLANG_REVISION = \'([0-9a-z]+)\'$') +CLANG_REVISION_RE = re.compile(r'^CLANG_REVISION = \'([-0-9a-z]+)\'$') ROLL_BRANCH_NAME = 'roll_chromium_revision' SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -71,7 +71,6 @@ def FindSrcDirPath(): NOTIFY_EMAIL = 'webrtc-trooper@grotations.appspotmail.com' - sys.path.append(os.path.join(CHECKOUT_SRC_DIR, 'build')) import find_depot_tools @@ -82,8 +81,8 @@ def FindSrcDirPath(): 'clang', 'scripts', 'update.py') DepsEntry = collections.namedtuple('DepsEntry', 'path url revision') -ChangedDep = collections.namedtuple( - 'ChangedDep', 'path url current_rev new_rev') +ChangedDep = collections.namedtuple('ChangedDep', + 'path url current_rev new_rev') CipdDepsEntry = collections.namedtuple('CipdDepsEntry', 'path packages') ChangedCipdPackage = collections.namedtuple( 'ChangedCipdPackage', 'path package current_version new_version') @@ -94,124 +93,135 @@ def FindSrcDirPath(): class RollError(Exception): - pass + pass + + +def StrExpansion(): + return lambda str_value: str_value def VarLookup(local_scope): - return lambda var_name: local_scope['vars'][var_name] + return lambda var_name: local_scope['vars'][var_name] def ParseDepsDict(deps_content): - local_scope = {} - global_scope = { - 'Var': VarLookup(local_scope), - 'deps_os': {}, - } - exec (deps_content, global_scope, local_scope) - return local_scope + local_scope = {} + global_scope = { + 'Str': StrExpansion(), + 'Var': VarLookup(local_scope), + 'deps_os': {}, + } + exec (deps_content, global_scope, local_scope) + return local_scope def ParseLocalDepsFile(filename): - with open(filename, 'rb') as f: - deps_content = f.read() - return ParseDepsDict(deps_content) + with open(filename, 'rb') as f: + deps_content = f.read() + return ParseDepsDict(deps_content) def ParseCommitPosition(commit_message): - for line in reversed(commit_message.splitlines()): - m = COMMIT_POSITION_RE.match(line.strip()) - if m: - return int(m.group(1)) - logging.error('Failed to parse commit position id from:\n%s\n', - commit_message) - sys.exit(-1) - - -def _RunCommand(command, working_dir=None, ignore_exit_code=False, - extra_env=None, input_data=None): - """Runs a command and returns the output from that command. + for line in reversed(commit_message.splitlines()): + m = COMMIT_POSITION_RE.match(line.strip()) + if m: + return int(m.group(1)) + logging.error('Failed to parse commit position id from:\n%s\n', + commit_message) + sys.exit(-1) + + +def _RunCommand(command, + working_dir=None, + ignore_exit_code=False, + extra_env=None, + input_data=None): + """Runs a command and returns the output from that command. If the command fails (exit code != 0), the function will exit the process. Returns: A tuple containing the stdout and stderr outputs as strings. """ - working_dir = working_dir or CHECKOUT_SRC_DIR - logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir) - env = os.environ.copy() - if extra_env: - assert all(isinstance(value, str) for value in extra_env.values()) - logging.debug('extra env: %s', extra_env) - env.update(extra_env) - p = subprocess.Popen(command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, env=env, - cwd=working_dir, universal_newlines=True) - std_output, err_output = p.communicate(input_data) - p.stdout.close() - p.stderr.close() - if not ignore_exit_code and p.returncode != 0: - logging.error('Command failed: %s\n' - 'stdout:\n%s\n' - 'stderr:\n%s\n', ' '.join(command), std_output, err_output) - sys.exit(p.returncode) - return std_output, err_output + working_dir = working_dir or CHECKOUT_SRC_DIR + logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir) + env = os.environ.copy() + if extra_env: + assert all(isinstance(value, str) for value in extra_env.values()) + logging.debug('extra env: %s', extra_env) + env.update(extra_env) + p = subprocess.Popen(command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + cwd=working_dir, + universal_newlines=True) + std_output, err_output = p.communicate(input_data) + p.stdout.close() + p.stderr.close() + if not ignore_exit_code and p.returncode != 0: + logging.error('Command failed: %s\n' + 'stdout:\n%s\n' + 'stderr:\n%s\n', ' '.join(command), std_output, + err_output) + sys.exit(p.returncode) + return std_output, err_output def _GetBranches(): - """Returns a tuple of active,branches. + """Returns a tuple of active,branches. The 'active' is the name of the currently active branch and 'branches' is a list of all branches. """ - lines = _RunCommand(['git', 'branch'])[0].split('\n') - branches = [] - active = '' - for line in lines: - if '*' in line: - # The assumption is that the first char will always be the '*'. - active = line[1:].strip() - branches.append(active) - else: - branch = line.strip() - if branch: - branches.append(branch) - return active, branches + lines = _RunCommand(['git', 'branch'])[0].split('\n') + branches = [] + active = '' + for line in lines: + if '*' in line: + # The assumption is that the first char will always be the '*'. + active = line[1:].strip() + branches.append(active) + else: + branch = line.strip() + if branch: + branches.append(branch) + return active, branches def _ReadGitilesContent(url): - # Download and decode BASE64 content until - # https://code.google.com/p/gitiles/issues/detail?id=7 is fixed. - base64_content = ReadUrlContent(url + '?format=TEXT') - return base64.b64decode(base64_content[0]) + # Download and decode BASE64 content until + # https://code.google.com/p/gitiles/issues/detail?id=7 is fixed. + base64_content = ReadUrlContent(url + '?format=TEXT') + return base64.b64decode(base64_content[0]) def ReadRemoteCrFile(path_below_src, revision): - """Reads a remote Chromium file of a specific revision. Returns a string.""" - return _ReadGitilesContent(CHROMIUM_FILE_TEMPLATE % (revision, - path_below_src)) + """Reads a remote Chromium file of a specific revision. Returns a string.""" + return _ReadGitilesContent(CHROMIUM_FILE_TEMPLATE % + (revision, path_below_src)) def ReadRemoteCrCommit(revision): - """Reads a remote Chromium commit message. Returns a string.""" - return _ReadGitilesContent(CHROMIUM_COMMIT_TEMPLATE % revision) + """Reads a remote Chromium commit message. Returns a string.""" + return _ReadGitilesContent(CHROMIUM_COMMIT_TEMPLATE % revision) def ReadUrlContent(url): - """Connect to a remote host and read the contents. Returns a list of lines.""" - conn = urllib2.urlopen(url) - try: - return conn.readlines() - except IOError as e: - logging.exception('Error connecting to %s. Error: %s', url, e) - raise - finally: - conn.close() + """Connect to a remote host and read the contents. Returns a list of lines.""" + conn = urllib2.urlopen(url) + try: + return conn.readlines() + except IOError as e: + logging.exception('Error connecting to %s. Error: %s', url, e) + raise + finally: + conn.close() def GetMatchingDepsEntries(depsentry_dict, dir_path): - """Gets all deps entries matching the provided path. + """Gets all deps entries matching the provided path. This list may contain more than one DepsEntry object. Example: dir_path='src/testing' would give results containing both @@ -222,67 +232,72 @@ def GetMatchingDepsEntries(depsentry_dict, dir_path): Returns: A list of DepsEntry objects. """ - result = [] - for path, depsentry in depsentry_dict.iteritems(): - if path == dir_path: - result.append(depsentry) - else: - parts = path.split('/') - if all(part == parts[i] - for i, part in enumerate(dir_path.split('/'))): - result.append(depsentry) - return result + result = [] + for path, depsentry in depsentry_dict.iteritems(): + if path == dir_path: + result.append(depsentry) + else: + parts = path.split('/') + if all(part == parts[i] + for i, part in enumerate(dir_path.split('/'))): + result.append(depsentry) + return result def BuildDepsentryDict(deps_dict): - """Builds a dict of paths to DepsEntry objects from a raw parsed deps dict.""" - result = {} - - def AddDepsEntries(deps_subdict): - for path, dep in deps_subdict.iteritems(): - if path in result: - continue - if not isinstance(dep, dict): - dep = {'url': dep} - if dep.get('dep_type') == 'cipd': - result[path] = CipdDepsEntry(path, dep['packages']) - else: - if '@' not in dep['url']: - continue - url, revision = dep['url'].split('@') - result[path] = DepsEntry(path, url, revision) - - AddDepsEntries(deps_dict['deps']) - for deps_os in ['win', 'mac', 'unix', 'android', 'ios', 'unix']: - AddDepsEntries(deps_dict.get('deps_os', {}).get(deps_os, {})) - return result + """Builds a dict of paths to DepsEntry objects from a raw parsed deps dict.""" + result = {} + + def AddDepsEntries(deps_subdict): + for path, dep in deps_subdict.iteritems(): + if path in result: + continue + if not isinstance(dep, dict): + dep = {'url': dep} + if dep.get('dep_type') == 'cipd': + result[path] = CipdDepsEntry(path, dep['packages']) + else: + if '@' not in dep['url']: + continue + url, revision = dep['url'].split('@') + result[path] = DepsEntry(path, url, revision) + + AddDepsEntries(deps_dict['deps']) + for deps_os in ['win', 'mac', 'unix', 'android', 'ios', 'unix']: + AddDepsEntries(deps_dict.get('deps_os', {}).get(deps_os, {})) + return result def _FindChangedCipdPackages(path, old_pkgs, new_pkgs): - pkgs_equal = ({p['package'] for p in old_pkgs} == - {p['package'] for p in new_pkgs}) - assert pkgs_equal, 'Old: %s\n New: %s' % (old_pkgs, new_pkgs) - for old_pkg in old_pkgs: - for new_pkg in new_pkgs: - old_version = old_pkg['version'] - new_version = new_pkg['version'] - if (old_pkg['package'] == new_pkg['package'] and - old_version != new_version): - logging.debug('Roll dependency %s to %s', path, new_version) - yield ChangedCipdPackage(path, old_pkg['package'], - old_version, new_version) + pkgs_equal = ({p['package'] + for p in old_pkgs} == {p['package'] + for p in new_pkgs}) + assert pkgs_equal, ('Old: %s\n New: %s.\nYou need to do a manual roll ' + 'and remove/add entries in DEPS so the old and new ' + 'list match.' % (old_pkgs, new_pkgs)) + for old_pkg in old_pkgs: + for new_pkg in new_pkgs: + old_version = old_pkg['version'] + new_version = new_pkg['version'] + if (old_pkg['package'] == new_pkg['package'] + and old_version != new_version): + logging.debug('Roll dependency %s to %s', path, new_version) + yield ChangedCipdPackage(path, old_pkg['package'], old_version, + new_version) def _FindNewDeps(old, new): - """ Gather dependencies only in |new| and return corresponding paths. """ - old_entries = set(BuildDepsentryDict(old)) - new_entries = set(BuildDepsentryDict(new)) - return [path for path in new_entries - old_entries - if path not in DONT_AUTOROLL_THESE] + """ Gather dependencies only in |new| and return corresponding paths. """ + old_entries = set(BuildDepsentryDict(old)) + new_entries = set(BuildDepsentryDict(new)) + return [ + path for path in new_entries - old_entries + if path not in DONT_AUTOROLL_THESE + ] def FindAddedDeps(webrtc_deps, new_cr_deps): - """ + """ Calculate new deps entries of interest. Ideally, that would mean: only appearing in chromium DEPS @@ -303,16 +318,18 @@ def FindAddedDeps(webrtc_deps, new_cr_deps): A list of paths added dependencies sitting in |ANDROID_DEPS_PATH|. A list of paths for other added dependencies. """ - all_added_deps = _FindNewDeps(webrtc_deps, new_cr_deps) - generated_android_deps = [path for path in all_added_deps - if path.startswith(ANDROID_DEPS_PATH)] - other_deps = [path for path in all_added_deps - if path not in generated_android_deps] - return generated_android_deps, other_deps + all_added_deps = _FindNewDeps(webrtc_deps, new_cr_deps) + generated_android_deps = [ + path for path in all_added_deps if path.startswith(ANDROID_DEPS_PATH) + ] + other_deps = [ + path for path in all_added_deps if path not in generated_android_deps + ] + return generated_android_deps, other_deps def FindRemovedDeps(webrtc_deps, new_cr_deps): - """ + """ Calculate obsolete deps entries. Ideally, that would mean: no more appearing in chromium DEPS @@ -335,18 +352,20 @@ def FindRemovedDeps(webrtc_deps, new_cr_deps): A list of paths of dependencies removed from |ANDROID_DEPS_PATH|. A list of paths of unexpected disappearing dependencies. """ - all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps) - generated_android_deps = [path for path in all_removed_deps - if path.startswith(ANDROID_DEPS_PATH)] - # Webrtc-only dependencies are handled in CalculateChangedDeps. - other_deps = [path for path in all_removed_deps - if path not in generated_android_deps and - path not in WEBRTC_ONLY_DEPS] - return generated_android_deps, other_deps + all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps) + generated_android_deps = [ + path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH) + ] + # Webrtc-only dependencies are handled in CalculateChangedDeps. + other_deps = [ + path for path in all_removed_deps + if path not in generated_android_deps and path not in WEBRTC_ONLY_DEPS + ] + return generated_android_deps, other_deps def CalculateChangedDeps(webrtc_deps, new_cr_deps): - """ + """ Calculate changed deps entries based on entries defined in the WebRTC DEPS file: - If a shared dependency with the Chromium DEPS file: roll it to the same @@ -360,354 +379,381 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps): Returns: A list of ChangedDep objects representing the changed deps. """ - result = [] - webrtc_entries = BuildDepsentryDict(webrtc_deps) - new_cr_entries = BuildDepsentryDict(new_cr_deps) - for path, webrtc_deps_entry in webrtc_entries.iteritems(): - if path in DONT_AUTOROLL_THESE: - continue - cr_deps_entry = new_cr_entries.get(path) - if cr_deps_entry: - assert type(cr_deps_entry) is type(webrtc_deps_entry) - - if isinstance(cr_deps_entry, CipdDepsEntry): - result.extend(_FindChangedCipdPackages(path, webrtc_deps_entry.packages, - cr_deps_entry.packages)) - continue - - # Use the revision from Chromium's DEPS file. - new_rev = cr_deps_entry.revision - assert webrtc_deps_entry.url == cr_deps_entry.url, ( - 'WebRTC DEPS entry %s has a different URL (%s) than Chromium (%s).' % - (path, webrtc_deps_entry.url, cr_deps_entry.url)) - else: - if isinstance(webrtc_deps_entry, DepsEntry): - # Use the HEAD of the deps repo. - stdout, _ = _RunCommand(['git', 'ls-remote', webrtc_deps_entry.url, - 'HEAD']) - new_rev = stdout.strip().split('\t')[0] - else: - # The dependency has been removed from chromium. - # This is handled by FindRemovedDeps. - continue - - # Check if an update is necessary. - if webrtc_deps_entry.revision != new_rev: - logging.debug('Roll dependency %s to %s', path, new_rev) - result.append(ChangedDep(path, webrtc_deps_entry.url, - webrtc_deps_entry.revision, new_rev)) - return sorted(result) + result = [] + webrtc_entries = BuildDepsentryDict(webrtc_deps) + new_cr_entries = BuildDepsentryDict(new_cr_deps) + for path, webrtc_deps_entry in webrtc_entries.iteritems(): + if path in DONT_AUTOROLL_THESE: + continue + cr_deps_entry = new_cr_entries.get(path) + if cr_deps_entry: + assert type(cr_deps_entry) is type(webrtc_deps_entry) + + if isinstance(cr_deps_entry, CipdDepsEntry): + result.extend( + _FindChangedCipdPackages(path, webrtc_deps_entry.packages, + cr_deps_entry.packages)) + continue + + # Use the revision from Chromium's DEPS file. + new_rev = cr_deps_entry.revision + assert webrtc_deps_entry.url == cr_deps_entry.url, ( + 'WebRTC DEPS entry %s has a different URL (%s) than Chromium (%s).' + % (path, webrtc_deps_entry.url, cr_deps_entry.url)) + else: + if isinstance(webrtc_deps_entry, DepsEntry): + # Use the HEAD of the deps repo. + stdout, _ = _RunCommand( + ['git', 'ls-remote', webrtc_deps_entry.url, 'HEAD']) + new_rev = stdout.strip().split('\t')[0] + else: + # The dependency has been removed from chromium. + # This is handled by FindRemovedDeps. + continue + + # Check if an update is necessary. + if webrtc_deps_entry.revision != new_rev: + logging.debug('Roll dependency %s to %s', path, new_rev) + result.append( + ChangedDep(path, webrtc_deps_entry.url, + webrtc_deps_entry.revision, new_rev)) + return sorted(result) def CalculateChangedClang(new_cr_rev): - def GetClangRev(lines): - for line in lines: - match = CLANG_REVISION_RE.match(line) - if match: - return match.group(1) - raise RollError('Could not parse Clang revision!') - - with open(CLANG_UPDATE_SCRIPT_LOCAL_PATH, 'rb') as f: - current_lines = f.readlines() - current_rev = GetClangRev(current_lines) - - new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH, - new_cr_rev).splitlines() - new_rev = GetClangRev(new_clang_update_py) - return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, new_rev) - - -def GenerateCommitMessage(rev_update, current_commit_pos, new_commit_pos, - changed_deps_list, - added_deps_paths=None, - removed_deps_paths=None, - clang_change=None, - ): - current_cr_rev = rev_update.current_chromium_rev[0:10] - new_cr_rev = rev_update.new_chromium_rev[0:10] - rev_interval = '%s..%s' % (current_cr_rev, new_cr_rev) - git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos) - - commit_msg = ['Roll chromium_revision %s (%s)\n' % (rev_interval, - git_number_interval), - 'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval), - 'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % - rev_interval)] - - def Section(adjective, deps): - noun = 'dependency' if len(deps) == 1 else 'dependencies' - commit_msg.append('%s %s' % (adjective, noun)) - - tbr_authors = '' - if changed_deps_list: - Section('Changed', changed_deps_list) - - for c in changed_deps_list: - if isinstance(c, ChangedCipdPackage): - commit_msg.append('* %s: %s..%s' % (c.path, c.current_version, - c.new_version)) - else: - commit_msg.append('* %s: %s/+log/%s..%s' % (c.path, c.url, - c.current_rev[0:10], - c.new_rev[0:10])) - if 'libvpx' in c.path: - tbr_authors += 'marpan@webrtc.org, jianj@chromium.org, ' - - if added_deps_paths: - Section('Added', added_deps_paths) - commit_msg.extend('* %s' % p for p in added_deps_paths) - - if removed_deps_paths: - Section('Removed', removed_deps_paths) - commit_msg.extend('* %s' % p for p in removed_deps_paths) - - if any([changed_deps_list, - added_deps_paths, - removed_deps_paths]): - change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, 'DEPS') - commit_msg.append('DEPS diff: %s\n' % change_url) - else: - commit_msg.append('No dependencies changed.') - - if clang_change and clang_change.current_rev != clang_change.new_rev: - commit_msg.append('Clang version changed %s:%s' % - (clang_change.current_rev, clang_change.new_rev)) - change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, - CLANG_UPDATE_SCRIPT_URL_PATH) - commit_msg.append('Details: %s\n' % change_url) - else: - commit_msg.append('No update to Clang.\n') - - # TBR needs to be non-empty for Gerrit to process it. - git_author = _RunCommand(['git', 'config', 'user.email'], - working_dir=CHECKOUT_SRC_DIR)[0].splitlines()[0] - tbr_authors = git_author + ',' + tbr_authors - - commit_msg.append('TBR=%s' % tbr_authors) - commit_msg.append('BUG=None') - return '\n'.join(commit_msg) + def GetClangRev(lines): + for line in lines: + match = CLANG_REVISION_RE.match(line) + if match: + return match.group(1) + raise RollError('Could not parse Clang revision!') + + with open(CLANG_UPDATE_SCRIPT_LOCAL_PATH, 'rb') as f: + current_lines = f.readlines() + current_rev = GetClangRev(current_lines) + + new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH, + new_cr_rev).splitlines() + new_rev = GetClangRev(new_clang_update_py) + return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, + new_rev) + + +def GenerateCommitMessage( + rev_update, + current_commit_pos, + new_commit_pos, + changed_deps_list, + added_deps_paths=None, + removed_deps_paths=None, + clang_change=None, +): + current_cr_rev = rev_update.current_chromium_rev[0:10] + new_cr_rev = rev_update.new_chromium_rev[0:10] + rev_interval = '%s..%s' % (current_cr_rev, new_cr_rev) + git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos) + + commit_msg = [ + 'Roll chromium_revision %s (%s)\n' % + (rev_interval, git_number_interval), + 'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval), + 'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % rev_interval) + ] + + def Section(adjective, deps): + noun = 'dependency' if len(deps) == 1 else 'dependencies' + commit_msg.append('%s %s' % (adjective, noun)) + + tbr_authors = '' + if changed_deps_list: + Section('Changed', changed_deps_list) + + for c in changed_deps_list: + if isinstance(c, ChangedCipdPackage): + commit_msg.append('* %s: %s..%s' % + (c.path, c.current_version, c.new_version)) + else: + commit_msg.append( + '* %s: %s/+log/%s..%s' % + (c.path, c.url, c.current_rev[0:10], c.new_rev[0:10])) + if 'libvpx' in c.path: + tbr_authors += 'marpan@webrtc.org, jianj@chromium.org, ' + + if added_deps_paths: + Section('Added', added_deps_paths) + commit_msg.extend('* %s' % p for p in added_deps_paths) + + if removed_deps_paths: + Section('Removed', removed_deps_paths) + commit_msg.extend('* %s' % p for p in removed_deps_paths) + + if any([changed_deps_list, added_deps_paths, removed_deps_paths]): + change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, 'DEPS') + commit_msg.append('DEPS diff: %s\n' % change_url) + else: + commit_msg.append('No dependencies changed.') + + if clang_change and clang_change.current_rev != clang_change.new_rev: + commit_msg.append('Clang version changed %s:%s' % + (clang_change.current_rev, clang_change.new_rev)) + change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, + CLANG_UPDATE_SCRIPT_URL_PATH) + commit_msg.append('Details: %s\n' % change_url) + else: + commit_msg.append('No update to Clang.\n') + + # TBR needs to be non-empty for Gerrit to process it. + git_author = _RunCommand(['git', 'config', 'user.email'], + working_dir=CHECKOUT_SRC_DIR)[0].splitlines()[0] + tbr_authors = git_author + ',' + tbr_authors + + commit_msg.append('TBR=%s' % tbr_authors) + commit_msg.append('BUG=None') + return '\n'.join(commit_msg) def UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content): - """Update the DEPS file with the new revision.""" - - with open(deps_filename, 'rb') as deps_file: - deps_content = deps_file.read() - - # Update the chromium_revision variable. - deps_content = deps_content.replace(rev_update.current_chromium_rev, - rev_update.new_chromium_rev) - - # Add and remove dependencies. For now: only generated android deps. - # Since gclient cannot add or remove deps, we on the fact that - # these android deps are located in one place we can copy/paste. - deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, - re.DOTALL) - new_deps = deps_re.search(new_cr_content) - old_deps = deps_re.search(deps_content) - if not new_deps or not old_deps: - faulty = 'Chromium' if not new_deps else 'WebRTC' - raise RollError('Was expecting to find "%s" and "%s"\n' - 'in %s DEPS' - % (ANDROID_DEPS_START, ANDROID_DEPS_END, faulty)) - deps_content = deps_re.sub(new_deps.group(0), deps_content) - - with open(deps_filename, 'wb') as deps_file: - deps_file.write(deps_content) - - # Update each individual DEPS entry. - for dep in changed_deps: - local_dep_dir = os.path.join(CHECKOUT_ROOT_DIR, dep.path) - if not os.path.isdir(local_dep_dir): - raise RollError( - 'Cannot find local directory %s. Either run\n' - 'gclient sync --deps=all\n' - 'or make sure the .gclient file for your solution contains all ' - 'platforms in the target_os list, i.e.\n' - 'target_os = ["android", "unix", "mac", "ios", "win"];\n' - 'Then run "gclient sync" again.' % local_dep_dir) - if isinstance(dep, ChangedCipdPackage): - package = dep.package.format() # Eliminate double curly brackets - update = '%s:%s@%s' % (dep.path, package, dep.new_version) - else: - update = '%s@%s' % (dep.path, dep.new_rev) - _RunCommand(['gclient', 'setdep', '--revision', update], - working_dir=CHECKOUT_SRC_DIR) + """Update the DEPS file with the new revision.""" + + with open(deps_filename, 'rb') as deps_file: + deps_content = deps_file.read() + + # Update the chromium_revision variable. + deps_content = deps_content.replace(rev_update.current_chromium_rev, + rev_update.new_chromium_rev) + + # Add and remove dependencies. For now: only generated android deps. + # Since gclient cannot add or remove deps, we on the fact that + # these android deps are located in one place we can copy/paste. + deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, + re.DOTALL) + new_deps = deps_re.search(new_cr_content) + old_deps = deps_re.search(deps_content) + if not new_deps or not old_deps: + faulty = 'Chromium' if not new_deps else 'WebRTC' + raise RollError('Was expecting to find "%s" and "%s"\n' + 'in %s DEPS' % + (ANDROID_DEPS_START, ANDROID_DEPS_END, faulty)) + deps_content = deps_re.sub(new_deps.group(0), deps_content) + + with open(deps_filename, 'wb') as deps_file: + deps_file.write(deps_content) + + # Update each individual DEPS entry. + for dep in changed_deps: + local_dep_dir = os.path.join(CHECKOUT_ROOT_DIR, dep.path) + if not os.path.isdir(local_dep_dir): + raise RollError( + 'Cannot find local directory %s. Either run\n' + 'gclient sync --deps=all\n' + 'or make sure the .gclient file for your solution contains all ' + 'platforms in the target_os list, i.e.\n' + 'target_os = ["android", "unix", "mac", "ios", "win"];\n' + 'Then run "gclient sync" again.' % local_dep_dir) + if isinstance(dep, ChangedCipdPackage): + package = dep.package.format() # Eliminate double curly brackets + update = '%s:%s@%s' % (dep.path, package, dep.new_version) + else: + update = '%s@%s' % (dep.path, dep.new_rev) + _RunCommand(['gclient', 'setdep', '--revision', update], + working_dir=CHECKOUT_SRC_DIR) def _IsTreeClean(): - stdout, _ = _RunCommand(['git', 'status', '--porcelain']) - if len(stdout) == 0: - return True + stdout, _ = _RunCommand(['git', 'status', '--porcelain']) + if len(stdout) == 0: + return True - logging.error('Dirty/unversioned files:\n%s', stdout) - return False + logging.error('Dirty/unversioned files:\n%s', stdout) + return False def _EnsureUpdatedMasterBranch(dry_run): - current_branch = _RunCommand( - ['git', 'rev-parse', '--abbrev-ref', 'HEAD'])[0].splitlines()[0] - if current_branch != 'master': - logging.error('Please checkout the master branch and re-run this script.') - if not dry_run: - sys.exit(-1) + current_branch = _RunCommand(['git', 'rev-parse', '--abbrev-ref', + 'HEAD'])[0].splitlines()[0] + if current_branch != 'master': + logging.error( + 'Please checkout the master branch and re-run this script.') + if not dry_run: + sys.exit(-1) - logging.info('Updating master branch...') - _RunCommand(['git', 'pull']) + logging.info('Updating master branch...') + _RunCommand(['git', 'pull']) def _CreateRollBranch(dry_run): - logging.info('Creating roll branch: %s', ROLL_BRANCH_NAME) - if not dry_run: - _RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME]) + logging.info('Creating roll branch: %s', ROLL_BRANCH_NAME) + if not dry_run: + _RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME]) def _RemovePreviousRollBranch(dry_run): - active_branch, branches = _GetBranches() - if active_branch == ROLL_BRANCH_NAME: - active_branch = 'master' - if ROLL_BRANCH_NAME in branches: - logging.info('Removing previous roll branch (%s)', ROLL_BRANCH_NAME) - if not dry_run: - _RunCommand(['git', 'checkout', active_branch]) - _RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME]) + active_branch, branches = _GetBranches() + if active_branch == ROLL_BRANCH_NAME: + active_branch = 'master' + if ROLL_BRANCH_NAME in branches: + logging.info('Removing previous roll branch (%s)', ROLL_BRANCH_NAME) + if not dry_run: + _RunCommand(['git', 'checkout', active_branch]) + _RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME]) def _LocalCommit(commit_msg, dry_run): - logging.info('Committing changes locally.') - if not dry_run: - _RunCommand(['git', 'add', '--update', '.']) - _RunCommand(['git', 'commit', '-m', commit_msg]) + logging.info('Committing changes locally.') + if not dry_run: + _RunCommand(['git', 'add', '--update', '.']) + _RunCommand(['git', 'commit', '-m', commit_msg]) def ChooseCQMode(skip_cq, cq_over, current_commit_pos, new_commit_pos): - if skip_cq: - return 0 - if (new_commit_pos - current_commit_pos) < cq_over: - return 1 - return 2 + if skip_cq: + return 0 + if (new_commit_pos - current_commit_pos) < cq_over: + return 1 + return 2 def _UploadCL(commit_queue_mode): - """Upload the committed changes as a changelist to Gerrit. + """Upload the committed changes as a changelist to Gerrit. commit_queue_mode: - 2: Submit to commit queue. - 1: Run trybots but do not submit to CQ. - 0: Skip CQ, upload only. """ - cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks', '--send-mail'] - cmd.extend(['--cc', NOTIFY_EMAIL]) - if commit_queue_mode >= 2: - logging.info('Sending the CL to the CQ...') - cmd.extend(['--use-commit-queue']) - elif commit_queue_mode >= 1: - logging.info('Starting CQ dry run...') - cmd.extend(['--cq-dry-run']) - extra_env = { - 'EDITOR': 'true', - 'SKIP_GCE_AUTH_FOR_GIT': '1', - } - stdout, stderr = _RunCommand(cmd, extra_env=extra_env) - logging.debug('Output from "git cl upload":\nstdout:\n%s\n\nstderr:\n%s', - stdout, stderr) + cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks'] + if commit_queue_mode >= 2: + logging.info('Sending the CL to the CQ...') + cmd.extend(['--use-commit-queue']) + cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL]) + elif commit_queue_mode >= 1: + logging.info('Starting CQ dry run...') + cmd.extend(['--cq-dry-run']) + extra_env = { + 'EDITOR': 'true', + 'SKIP_GCE_AUTH_FOR_GIT': '1', + } + stdout, stderr = _RunCommand(cmd, extra_env=extra_env) + logging.debug('Output from "git cl upload":\nstdout:\n%s\n\nstderr:\n%s', + stdout, stderr) def GetRollRevisionRanges(opts, webrtc_deps): - current_cr_rev = webrtc_deps['vars']['chromium_revision'] - new_cr_rev = opts.revision - if not new_cr_rev: - stdout, _ = _RunCommand(['git', 'ls-remote', CHROMIUM_SRC_URL, 'HEAD']) - head_rev = stdout.strip().split('\t')[0] - logging.info('No revision specified. Using HEAD: %s', head_rev) - new_cr_rev = head_rev + current_cr_rev = webrtc_deps['vars']['chromium_revision'] + new_cr_rev = opts.revision + if not new_cr_rev: + stdout, _ = _RunCommand(['git', 'ls-remote', CHROMIUM_SRC_URL, 'HEAD']) + head_rev = stdout.strip().split('\t')[0] + logging.info('No revision specified. Using HEAD: %s', head_rev) + new_cr_rev = head_rev - return ChromiumRevisionUpdate(current_cr_rev, new_cr_rev) + return ChromiumRevisionUpdate(current_cr_rev, new_cr_rev) def main(): - p = argparse.ArgumentParser() - p.add_argument('--clean', action='store_true', default=False, - help='Removes any previous local roll branch.') - p.add_argument('-r', '--revision', - help=('Chromium Git revision to roll to. Defaults to the ' - 'Chromium HEAD revision if omitted.')) - p.add_argument('--dry-run', action='store_true', default=False, - help=('Calculate changes and modify DEPS, but don\'t create ' - 'any local branch, commit, upload CL or send any ' - 'tryjobs.')) - p.add_argument('-i', '--ignore-unclean-workdir', action='store_true', - default=False, - help=('Ignore if the current branch is not master or if there ' - 'are uncommitted changes (default: %(default)s).')) - grp = p.add_mutually_exclusive_group() - grp.add_argument('--skip-cq', action='store_true', default=False, - help='Skip sending the CL to the CQ (default: %(default)s)') - grp.add_argument('--cq-over', type=int, default=1, - help=('Commit queue dry run if the revision difference ' - 'is below this number (default: %(default)s)')) - p.add_argument('-v', '--verbose', action='store_true', default=False, - help='Be extra verbose in printing of log messages.') - opts = p.parse_args() - - if opts.verbose: - logging.basicConfig(level=logging.DEBUG) - else: - logging.basicConfig(level=logging.INFO) - - if not opts.ignore_unclean_workdir and not _IsTreeClean(): - logging.error('Please clean your local checkout first.') - return 1 - - if opts.clean: - _RemovePreviousRollBranch(opts.dry_run) - - if not opts.ignore_unclean_workdir: - _EnsureUpdatedMasterBranch(opts.dry_run) - - deps_filename = os.path.join(CHECKOUT_SRC_DIR, 'DEPS') - webrtc_deps = ParseLocalDepsFile(deps_filename) - - rev_update = GetRollRevisionRanges(opts, webrtc_deps) - - current_commit_pos = ParseCommitPosition( - ReadRemoteCrCommit(rev_update.current_chromium_rev)) - new_commit_pos = ParseCommitPosition( - ReadRemoteCrCommit(rev_update.new_chromium_rev)) - - new_cr_content = ReadRemoteCrFile('DEPS', rev_update.new_chromium_rev) - new_cr_deps = ParseDepsDict(new_cr_content) - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - # Discard other deps, assumed to be chromium-only dependencies. - new_generated_android_deps, _ = FindAddedDeps(webrtc_deps, new_cr_deps) - removed_generated_android_deps, other_deps = FindRemovedDeps(webrtc_deps, - new_cr_deps) - if other_deps: - raise RollError('WebRTC DEPS entries are missing from Chromium: %s.\n' - 'Remove them or add them to either ' - 'WEBRTC_ONLY_DEPS or DONT_AUTOROLL_THESE.' % other_deps) - clang_change = CalculateChangedClang(rev_update.new_chromium_rev) - commit_msg = GenerateCommitMessage( - rev_update, current_commit_pos, new_commit_pos, changed_deps, - added_deps_paths=new_generated_android_deps, - removed_deps_paths=removed_generated_android_deps, - clang_change=clang_change) - logging.debug('Commit message:\n%s', commit_msg) - - _CreateRollBranch(opts.dry_run) - if not opts.dry_run: - UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content) - if _IsTreeClean(): - logging.info("No DEPS changes detected, skipping CL creation.") - else: - _LocalCommit(commit_msg, opts.dry_run) - commit_queue_mode = ChooseCQMode(opts.skip_cq, opts.cq_over, - current_commit_pos, new_commit_pos) - logging.info('Uploading CL...') + p = argparse.ArgumentParser() + p.add_argument('--clean', + action='store_true', + default=False, + help='Removes any previous local roll branch.') + p.add_argument('-r', + '--revision', + help=('Chromium Git revision to roll to. Defaults to the ' + 'Chromium HEAD revision if omitted.')) + p.add_argument( + '--dry-run', + action='store_true', + default=False, + help=('Calculate changes and modify DEPS, but don\'t create ' + 'any local branch, commit, upload CL or send any ' + 'tryjobs.')) + p.add_argument( + '-i', + '--ignore-unclean-workdir', + action='store_true', + default=False, + help=('Ignore if the current branch is not master or if there ' + 'are uncommitted changes (default: %(default)s).')) + grp = p.add_mutually_exclusive_group() + grp.add_argument( + '--skip-cq', + action='store_true', + default=False, + help='Skip sending the CL to the CQ (default: %(default)s)') + grp.add_argument('--cq-over', + type=int, + default=1, + help=('Commit queue dry run if the revision difference ' + 'is below this number (default: %(default)s)')) + p.add_argument('-v', + '--verbose', + action='store_true', + default=False, + help='Be extra verbose in printing of log messages.') + opts = p.parse_args() + + if opts.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + + if not opts.ignore_unclean_workdir and not _IsTreeClean(): + logging.error('Please clean your local checkout first.') + return 1 + + if opts.clean: + _RemovePreviousRollBranch(opts.dry_run) + + if not opts.ignore_unclean_workdir: + _EnsureUpdatedMasterBranch(opts.dry_run) + + deps_filename = os.path.join(CHECKOUT_SRC_DIR, 'DEPS') + webrtc_deps = ParseLocalDepsFile(deps_filename) + + rev_update = GetRollRevisionRanges(opts, webrtc_deps) + + current_commit_pos = ParseCommitPosition( + ReadRemoteCrCommit(rev_update.current_chromium_rev)) + new_commit_pos = ParseCommitPosition( + ReadRemoteCrCommit(rev_update.new_chromium_rev)) + + new_cr_content = ReadRemoteCrFile('DEPS', rev_update.new_chromium_rev) + new_cr_deps = ParseDepsDict(new_cr_content) + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + # Discard other deps, assumed to be chromium-only dependencies. + new_generated_android_deps, _ = FindAddedDeps(webrtc_deps, new_cr_deps) + removed_generated_android_deps, other_deps = FindRemovedDeps( + webrtc_deps, new_cr_deps) + if other_deps: + raise RollError('WebRTC DEPS entries are missing from Chromium: %s.\n' + 'Remove them or add them to either ' + 'WEBRTC_ONLY_DEPS or DONT_AUTOROLL_THESE.' % + other_deps) + clang_change = CalculateChangedClang(rev_update.new_chromium_rev) + commit_msg = GenerateCommitMessage( + rev_update, + current_commit_pos, + new_commit_pos, + changed_deps, + added_deps_paths=new_generated_android_deps, + removed_deps_paths=removed_generated_android_deps, + clang_change=clang_change) + logging.debug('Commit message:\n%s', commit_msg) + + _CreateRollBranch(opts.dry_run) if not opts.dry_run: - _UploadCL(commit_queue_mode) - return 0 + UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content) + if _IsTreeClean(): + logging.info("No DEPS changes detected, skipping CL creation.") + else: + _LocalCommit(commit_msg, opts.dry_run) + commit_queue_mode = ChooseCQMode(opts.skip_cq, opts.cq_over, + current_commit_pos, new_commit_pos) + logging.info('Uploading CL...') + if not opts.dry_run: + _UploadCL(commit_queue_mode) + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/autoroller/unittests/roll_deps_test.py b/tools_webrtc/autoroller/unittests/roll_deps_test.py index 8d68bddc35..8f1e732ca4 100755 --- a/tools_webrtc/autoroller/unittests/roll_deps_test.py +++ b/tools_webrtc/autoroller/unittests/roll_deps_test.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env vpython # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license @@ -14,7 +14,6 @@ import tempfile import unittest - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir) sys.path.append(PARENT_DIR) @@ -24,20 +23,18 @@ GetMatchingDepsEntries, ParseDepsDict, ParseLocalDepsFile, UpdateDepsFile, \ ChromiumRevisionUpdate -SRC_DIR = os.path.join(PARENT_DIR, os.pardir, os.pardir) -sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock')) import mock TEST_DATA_VARS = { - 'chromium_git': 'https://chromium.googlesource.com', - 'chromium_revision': '1b9c098a08e40114e44b6c1ec33ddf95c40b901d', + 'chromium_git': 'https://chromium.googlesource.com', + 'chromium_revision': '1b9c098a08e40114e44b6c1ec33ddf95c40b901d', } DEPS_ENTRIES = { - 'src/build': 'https://build.com', - 'src/third_party/depot_tools': 'https://depottools.com', - 'src/testing/gtest': 'https://gtest.com', - 'src/testing/gmock': 'https://gmock.com', + 'src/build': 'https://build.com', + 'src/third_party/depot_tools': 'https://depottools.com', + 'src/testing/gtest': 'https://gtest.com', + 'src/testing/gmock': 'https://gmock.com', } BUILD_OLD_REV = '52f7afeca991d96d68cf0507e20dbdd5b845691f' @@ -49,291 +46,298 @@ class TestError(Exception): - pass + pass class FakeCmd(object): - def __init__(self): - self.expectations = [] - - def AddExpectation(self, *args, **kwargs): - returns = kwargs.pop('_returns', None) - ignores = kwargs.pop('_ignores', []) - self.expectations.append((args, kwargs, returns, ignores)) - - def __call__(self, *args, **kwargs): - if not self.expectations: - raise TestError('Got unexpected\n%s\n%s' % (args, kwargs)) - exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0) - for item in ignores: - kwargs.pop(item, None) - if args != exp_args or kwargs != exp_kwargs: - message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs) - message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs) - raise TestError(message) - return exp_returns + def __init__(self): + self.expectations = [] + + def AddExpectation(self, *args, **kwargs): + returns = kwargs.pop('_returns', None) + ignores = kwargs.pop('_ignores', []) + self.expectations.append((args, kwargs, returns, ignores)) + + def __call__(self, *args, **kwargs): + if not self.expectations: + raise TestError('Got unexpected\n%s\n%s' % (args, kwargs)) + exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0) + for item in ignores: + kwargs.pop(item, None) + if args != exp_args or kwargs != exp_kwargs: + message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, + exp_kwargs) + message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs) + raise TestError(message) + return exp_returns class NullCmd(object): - """No-op mock when calls mustn't be checked. """ + """No-op mock when calls mustn't be checked. """ - def __call__(self, *args, **kwargs): - # Empty stdout and stderr. - return None, None + def __call__(self, *args, **kwargs): + # Empty stdout and stderr. + return None, None class TestRollChromiumRevision(unittest.TestCase): - def setUp(self): - self._output_dir = tempfile.mkdtemp() - test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps') - for test_file in glob.glob(os.path.join(test_data_dir, '*')): - shutil.copy(test_file, self._output_dir) - join = lambda f: os.path.join(self._output_dir, f) - self._webrtc_depsfile = join('DEPS') - self._new_cr_depsfile = join('DEPS.chromium.new') - self._webrtc_depsfile_android = join('DEPS.with_android_deps') - self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps') - self.fake = FakeCmd() - - def tearDown(self): - shutil.rmtree(self._output_dir, ignore_errors=True) - self.assertEqual(self.fake.expectations, []) - - def testVarLookup(self): - local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}} - lookup = roll_deps.VarLookup(local_scope) - self.assertEquals(lookup('foo'), 'bar') - - def testUpdateDepsFile(self): - new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111' - current_rev = TEST_DATA_VARS['chromium_revision'] - - with open(self._new_cr_depsfile_android) as deps_file: - new_cr_contents = deps_file.read() - - UpdateDepsFile(self._webrtc_depsfile, - ChromiumRevisionUpdate(current_rev, new_rev), - [], - new_cr_contents) - with open(self._webrtc_depsfile) as deps_file: - deps_contents = deps_file.read() - self.assertTrue(new_rev in deps_contents, - 'Failed to find %s in\n%s' % (new_rev, deps_contents)) - - def _UpdateDepsSetup(self): - with open(self._webrtc_depsfile_android) as deps_file: - webrtc_contents = deps_file.read() - with open(self._new_cr_depsfile_android) as deps_file: - new_cr_contents = deps_file.read() - webrtc_deps = ParseDepsDict(webrtc_contents) - new_cr_deps = ParseDepsDict(new_cr_contents) - - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - with mock.patch('roll_deps._RunCommand', NullCmd()): - UpdateDepsFile(self._webrtc_depsfile_android, - NO_CHROMIUM_REVISION_UPDATE, - changed_deps, - new_cr_contents) - - with open(self._webrtc_depsfile_android) as deps_file: - updated_contents = deps_file.read() - - return webrtc_contents, updated_contents - - def testUpdateAndroidGeneratedDeps(self): - _, updated_contents = self._UpdateDepsSetup() - - changed = 'third_party/android_deps/libs/android_arch_core_common' - changed_version = '1.0.0-cr0' - self.assertTrue(changed in updated_contents) - self.assertTrue(changed_version in updated_contents) - - def testAddAndroidGeneratedDeps(self): - webrtc_contents, updated_contents = self._UpdateDepsSetup() - - added = 'third_party/android_deps/libs/android_arch_lifecycle_common' - self.assertFalse(added in webrtc_contents) - self.assertTrue(added in updated_contents) - - def testRemoveAndroidGeneratedDeps(self): - webrtc_contents, updated_contents = self._UpdateDepsSetup() - - removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime' - self.assertTrue(removed in webrtc_contents) - self.assertFalse(removed in updated_contents) - - def testParseDepsDict(self): - with open(self._webrtc_depsfile) as deps_file: - deps_contents = deps_file.read() - local_scope = ParseDepsDict(deps_contents) - vars_dict = local_scope['vars'] - - def AssertVar(variable_name): - self.assertEquals(vars_dict[variable_name], TEST_DATA_VARS[variable_name]) - AssertVar('chromium_git') - AssertVar('chromium_revision') - self.assertEquals(len(local_scope['deps']), 3) - self.assertEquals(len(local_scope['deps_os']), 1) - - def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self): - entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest') - self.assertEquals(len(entries), 1) - self.assertEquals(entries[0], DEPS_ENTRIES['src/testing/gtest']) - - def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self): - entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing') - self.assertEquals(len(entries), 2) - - def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self): - entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build') - self.assertEquals(len(entries), 1) - - - def testCalculateChangedDeps(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile) - with mock.patch('roll_deps._RunCommand', self.fake): - _SetupGitLsRemoteCall( - self.fake, 'https://chromium.googlesource.com/chromium/src/build', - BUILD_NEW_REV) - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - - self.assertEquals(len(changed_deps), 3) - self.assertEquals(changed_deps[0].path, 'src/build') - self.assertEquals(changed_deps[0].current_rev, BUILD_OLD_REV) - self.assertEquals(changed_deps[0].new_rev, BUILD_NEW_REV) - - self.assertEquals(changed_deps[1].path, 'src/third_party/depot_tools') - self.assertEquals(changed_deps[1].current_rev, DEPOTTOOLS_OLD_REV) - self.assertEquals(changed_deps[1].new_rev, DEPOTTOOLS_NEW_REV) - - self.assertEquals(changed_deps[2].path, 'src/third_party/xstream') - self.assertEquals(changed_deps[2].package, 'chromium/third_party/xstream') - self.assertEquals(changed_deps[2].current_version, 'version:1.4.8-cr0') - self.assertEquals(changed_deps[2].new_version, 'version:1.10.0-cr0') - - def testWithDistinctDeps(self): - """Check CalculateChangedDeps still works when deps are added/removed. """ - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - self.assertEquals(len(changed_deps), 1) - self.assertEquals( - changed_deps[0].path, - 'src/third_party/android_deps/libs/android_arch_core_common') - self.assertEquals( - changed_deps[0].package, - 'chromium/third_party/android_deps/libs/android_arch_core_common') - self.assertEquals(changed_deps[0].current_version, 'version:0.9.0') - self.assertEquals(changed_deps[0].new_version, 'version:1.0.0-cr0') - - def testFindAddedDeps(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - added_android_paths, other_paths = FindAddedDeps(webrtc_deps, new_cr_deps) - self.assertEquals( - added_android_paths, - ['src/third_party/android_deps/libs/android_arch_lifecycle_common']) - self.assertEquals(other_paths, []) - - def testFindRemovedDeps(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - removed_android_paths, other_paths = FindRemovedDeps(webrtc_deps, - new_cr_deps) - self.assertEquals(removed_android_paths, - ['src/third_party/android_deps/libs/android_arch_lifecycle_runtime']) - self.assertEquals(other_paths, []) - - def testMissingDepsIsDetected(self): - """Check an error is reported when deps cannot be automatically removed.""" - # The situation at test is the following: - # * A WebRTC DEPS entry is missing from Chromium. - # * The dependency isn't an android_deps (those are supported). - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - _, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps) - self.assertEquals(other_paths, ['src/third_party/xstream', - 'src/third_party/depot_tools']) - - def testExpectedDepsIsNotReportedMissing(self): - """Some deps musn't be seen as missing, even if absent from Chromium.""" - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - removed_android_paths, other_paths = FindRemovedDeps(webrtc_deps, - new_cr_deps) - self.assertTrue('src/build' not in removed_android_paths) - self.assertTrue('src/build' not in other_paths) - - def _CommitMessageSetup(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps) - removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps) - - current_commit_pos = 'cafe' - new_commit_pos = 'f00d' - - with mock.patch('roll_deps._RunCommand', self.fake): - # We don't really care, but it's needed to construct the message. - self.fake.AddExpectation(['git', 'config', 'user.email'], - _returns=('nobody@nowhere.no', None), - _ignores=['working_dir']) - - commit_msg = GenerateCommitMessage( - NO_CHROMIUM_REVISION_UPDATE, current_commit_pos, new_commit_pos, - changed_deps, added_paths, removed_paths) - - return [l.strip() for l in commit_msg.split('\n')] - - def testChangedDepsInCommitMessage(self): - commit_lines = self._CommitMessageSetup() - - changed = '* src/third_party/android_deps/libs/' \ - 'android_arch_core_common: version:0.9.0..version:1.0.0-cr0' - self.assertTrue(changed in commit_lines) - # Check it is in adequate section. - changed_line = commit_lines.index(changed) - self.assertTrue('Changed' in commit_lines[changed_line-1]) - - def testAddedDepsInCommitMessage(self): - commit_lines = self._CommitMessageSetup() - - added = '* src/third_party/android_deps/libs/' \ - 'android_arch_lifecycle_common' - self.assertTrue(added in commit_lines) - # Check it is in adequate section. - added_line = commit_lines.index(added) - self.assertTrue('Added' in commit_lines[added_line-1]) - - def testRemovedDepsInCommitMessage(self): - commit_lines = self._CommitMessageSetup() - - removed = '* src/third_party/android_deps/libs/' \ - 'android_arch_lifecycle_runtime' - self.assertTrue(removed in commit_lines) - # Check it is in adequate section. - removed_line = commit_lines.index(removed) - self.assertTrue('Removed' in commit_lines[removed_line-1]) + def setUp(self): + self._output_dir = tempfile.mkdtemp() + test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps') + for test_file in glob.glob(os.path.join(test_data_dir, '*')): + shutil.copy(test_file, self._output_dir) + join = lambda f: os.path.join(self._output_dir, f) + self._webrtc_depsfile = join('DEPS') + self._new_cr_depsfile = join('DEPS.chromium.new') + self._webrtc_depsfile_android = join('DEPS.with_android_deps') + self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps') + self.fake = FakeCmd() + + def tearDown(self): + shutil.rmtree(self._output_dir, ignore_errors=True) + self.assertEqual(self.fake.expectations, []) + + def testVarLookup(self): + local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}} + lookup = roll_deps.VarLookup(local_scope) + self.assertEquals(lookup('foo'), 'bar') + + def testUpdateDepsFile(self): + new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111' + current_rev = TEST_DATA_VARS['chromium_revision'] + + with open(self._new_cr_depsfile_android) as deps_file: + new_cr_contents = deps_file.read() + + UpdateDepsFile(self._webrtc_depsfile, + ChromiumRevisionUpdate(current_rev, new_rev), [], + new_cr_contents) + with open(self._webrtc_depsfile) as deps_file: + deps_contents = deps_file.read() + self.assertTrue( + new_rev in deps_contents, + 'Failed to find %s in\n%s' % (new_rev, deps_contents)) + + def _UpdateDepsSetup(self): + with open(self._webrtc_depsfile_android) as deps_file: + webrtc_contents = deps_file.read() + with open(self._new_cr_depsfile_android) as deps_file: + new_cr_contents = deps_file.read() + webrtc_deps = ParseDepsDict(webrtc_contents) + new_cr_deps = ParseDepsDict(new_cr_contents) + + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + with mock.patch('roll_deps._RunCommand', NullCmd()): + UpdateDepsFile(self._webrtc_depsfile_android, + NO_CHROMIUM_REVISION_UPDATE, changed_deps, + new_cr_contents) + + with open(self._webrtc_depsfile_android) as deps_file: + updated_contents = deps_file.read() + + return webrtc_contents, updated_contents + + def testUpdateAndroidGeneratedDeps(self): + _, updated_contents = self._UpdateDepsSetup() + + changed = 'third_party/android_deps/libs/android_arch_core_common' + changed_version = '1.0.0-cr0' + self.assertTrue(changed in updated_contents) + self.assertTrue(changed_version in updated_contents) + + def testAddAndroidGeneratedDeps(self): + webrtc_contents, updated_contents = self._UpdateDepsSetup() + + added = 'third_party/android_deps/libs/android_arch_lifecycle_common' + self.assertFalse(added in webrtc_contents) + self.assertTrue(added in updated_contents) + + def testRemoveAndroidGeneratedDeps(self): + webrtc_contents, updated_contents = self._UpdateDepsSetup() + + removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime' + self.assertTrue(removed in webrtc_contents) + self.assertFalse(removed in updated_contents) + + def testParseDepsDict(self): + with open(self._webrtc_depsfile) as deps_file: + deps_contents = deps_file.read() + local_scope = ParseDepsDict(deps_contents) + vars_dict = local_scope['vars'] + + def AssertVar(variable_name): + self.assertEquals(vars_dict[variable_name], + TEST_DATA_VARS[variable_name]) + + AssertVar('chromium_git') + AssertVar('chromium_revision') + self.assertEquals(len(local_scope['deps']), 3) + self.assertEquals(len(local_scope['deps_os']), 1) + + def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self): + entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest') + self.assertEquals(len(entries), 1) + self.assertEquals(entries[0], DEPS_ENTRIES['src/testing/gtest']) + + def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self): + entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing') + self.assertEquals(len(entries), 2) + + def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self): + entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build') + self.assertEquals(len(entries), 1) + + def testCalculateChangedDeps(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile) + with mock.patch('roll_deps._RunCommand', self.fake): + _SetupGitLsRemoteCall( + self.fake, + 'https://chromium.googlesource.com/chromium/src/build', + BUILD_NEW_REV) + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + + self.assertEquals(len(changed_deps), 3) + self.assertEquals(changed_deps[0].path, 'src/build') + self.assertEquals(changed_deps[0].current_rev, BUILD_OLD_REV) + self.assertEquals(changed_deps[0].new_rev, BUILD_NEW_REV) + + self.assertEquals(changed_deps[1].path, 'src/third_party/depot_tools') + self.assertEquals(changed_deps[1].current_rev, DEPOTTOOLS_OLD_REV) + self.assertEquals(changed_deps[1].new_rev, DEPOTTOOLS_NEW_REV) + + self.assertEquals(changed_deps[2].path, 'src/third_party/xstream') + self.assertEquals(changed_deps[2].package, + 'chromium/third_party/xstream') + self.assertEquals(changed_deps[2].current_version, 'version:1.4.8-cr0') + self.assertEquals(changed_deps[2].new_version, 'version:1.10.0-cr0') + + def testWithDistinctDeps(self): + """Check CalculateChangedDeps still works when deps are added/removed. """ + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + self.assertEquals(len(changed_deps), 1) + self.assertEquals( + changed_deps[0].path, + 'src/third_party/android_deps/libs/android_arch_core_common') + self.assertEquals( + changed_deps[0].package, + 'chromium/third_party/android_deps/libs/android_arch_core_common') + self.assertEquals(changed_deps[0].current_version, 'version:0.9.0') + self.assertEquals(changed_deps[0].new_version, 'version:1.0.0-cr0') + + def testFindAddedDeps(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + added_android_paths, other_paths = FindAddedDeps( + webrtc_deps, new_cr_deps) + self.assertEquals(added_android_paths, [ + 'src/third_party/android_deps/libs/android_arch_lifecycle_common' + ]) + self.assertEquals(other_paths, []) + + def testFindRemovedDeps(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + removed_android_paths, other_paths = FindRemovedDeps( + webrtc_deps, new_cr_deps) + self.assertEquals(removed_android_paths, [ + 'src/third_party/android_deps/libs/android_arch_lifecycle_runtime' + ]) + self.assertEquals(other_paths, []) + + def testMissingDepsIsDetected(self): + """Check an error is reported when deps cannot be automatically removed.""" + # The situation at test is the following: + # * A WebRTC DEPS entry is missing from Chromium. + # * The dependency isn't an android_deps (those are supported). + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + _, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps) + self.assertEquals( + other_paths, + ['src/third_party/xstream', 'src/third_party/depot_tools']) + + def testExpectedDepsIsNotReportedMissing(self): + """Some deps musn't be seen as missing, even if absent from Chromium.""" + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + removed_android_paths, other_paths = FindRemovedDeps( + webrtc_deps, new_cr_deps) + self.assertTrue('src/build' not in removed_android_paths) + self.assertTrue('src/build' not in other_paths) + + def _CommitMessageSetup(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps) + removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps) + + current_commit_pos = 'cafe' + new_commit_pos = 'f00d' + + with mock.patch('roll_deps._RunCommand', self.fake): + # We don't really care, but it's needed to construct the message. + self.fake.AddExpectation(['git', 'config', 'user.email'], + _returns=('nobody@nowhere.no', None), + _ignores=['working_dir']) + + commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE, + current_commit_pos, + new_commit_pos, changed_deps, + added_paths, removed_paths) + + return [l.strip() for l in commit_msg.split('\n')] + + def testChangedDepsInCommitMessage(self): + commit_lines = self._CommitMessageSetup() + + changed = '* src/third_party/android_deps/libs/' \ + 'android_arch_core_common: version:0.9.0..version:1.0.0-cr0' + self.assertTrue(changed in commit_lines) + # Check it is in adequate section. + changed_line = commit_lines.index(changed) + self.assertTrue('Changed' in commit_lines[changed_line - 1]) + + def testAddedDepsInCommitMessage(self): + commit_lines = self._CommitMessageSetup() + + added = '* src/third_party/android_deps/libs/' \ + 'android_arch_lifecycle_common' + self.assertTrue(added in commit_lines) + # Check it is in adequate section. + added_line = commit_lines.index(added) + self.assertTrue('Added' in commit_lines[added_line - 1]) + + def testRemovedDepsInCommitMessage(self): + commit_lines = self._CommitMessageSetup() + + removed = '* src/third_party/android_deps/libs/' \ + 'android_arch_lifecycle_runtime' + self.assertTrue(removed in commit_lines) + # Check it is in adequate section. + removed_line = commit_lines.index(removed) + self.assertTrue('Removed' in commit_lines[removed_line - 1]) class TestChooseCQMode(unittest.TestCase): - def testSkip(self): - self.assertEquals(ChooseCQMode(True, 99, 500000, 500100), 0) + def testSkip(self): + self.assertEquals(ChooseCQMode(True, 99, 500000, 500100), 0) - def testDryRun(self): - self.assertEquals(ChooseCQMode(False, 101, 500000, 500100), 1) + def testDryRun(self): + self.assertEquals(ChooseCQMode(False, 101, 500000, 500100), 1) - def testSubmit(self): - self.assertEquals(ChooseCQMode(False, 100, 500000, 500100), 2) + def testSubmit(self): + self.assertEquals(ChooseCQMode(False, 100, 500000, 500100), 2) def _SetupGitLsRemoteCall(cmd_fake, url, revision): - cmd = ['git', 'ls-remote', url, revision] - cmd_fake.AddExpectation(cmd, _returns=(revision, None)) + cmd = ['git', 'ls-remote', url, revision] + cmd_fake.AddExpectation(cmd, _returns=(revision, None)) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/clang_tidy.py b/tools_webrtc/clang_tidy.py index 49a9427f09..bce2549aed 100755 --- a/tools_webrtc/clang_tidy.py +++ b/tools_webrtc/clang_tidy.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Invoke clang-tidy tool. Usage: clang_tidy.py file.cc [clang-tidy-args...] @@ -25,7 +24,6 @@ from presubmit_checks_lib.build_helpers import GetClangTidyPath, \ GetCompilationCommand - # We enable all checkers by default for investigation purpose. # This includes clang-analyzer-* checks. # Individual checkers can be disabled via command line options. @@ -34,63 +32,66 @@ def Process(filepath, args): - # Build directory is needed to gather compilation flags. - # Create a temporary one (instead of reusing an existing one) - # to keep the CLI simple and unencumbered. - out_dir = tempfile.mkdtemp('clang_tidy') - - try: - gn_args = [] # Use default build. - command = GetCompilationCommand(filepath, gn_args, out_dir) - - # Remove warning flags. They aren't needed and they cause trouble - # when clang-tidy doesn't match most recent clang. - # Same battle for -f (e.g. -fcomplete-member-pointers). - command = [arg for arg in command if not (arg.startswith('-W') or - arg.startswith('-f'))] - - # Path from build dir. - rel_path = os.path.relpath(os.path.abspath(filepath), out_dir) - - # Replace clang++ by clang-tidy - command[0:1] = [GetClangTidyPath(), - CHECKER_OPTION, - rel_path] + args + ['--'] # Separator for clang flags. - print "Running: %s" % ' '.join(command) - # Run from build dir so that relative paths are correct. - p = subprocess.Popen(command, cwd=out_dir, - stdout=sys.stdout, stderr=sys.stderr) - p.communicate() - return p.returncode - finally: - shutil.rmtree(out_dir, ignore_errors=True) + # Build directory is needed to gather compilation flags. + # Create a temporary one (instead of reusing an existing one) + # to keep the CLI simple and unencumbered. + out_dir = tempfile.mkdtemp('clang_tidy') + + try: + gn_args = [] # Use default build. + command = GetCompilationCommand(filepath, gn_args, out_dir) + + # Remove warning flags. They aren't needed and they cause trouble + # when clang-tidy doesn't match most recent clang. + # Same battle for -f (e.g. -fcomplete-member-pointers). + command = [ + arg for arg in command + if not (arg.startswith('-W') or arg.startswith('-f')) + ] + + # Path from build dir. + rel_path = os.path.relpath(os.path.abspath(filepath), out_dir) + + # Replace clang++ by clang-tidy + command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path + ] + args + ['--'] # Separator for clang flags. + print "Running: %s" % ' '.join(command) + # Run from build dir so that relative paths are correct. + p = subprocess.Popen(command, + cwd=out_dir, + stdout=sys.stdout, + stderr=sys.stderr) + p.communicate() + return p.returncode + finally: + shutil.rmtree(out_dir, ignore_errors=True) def ValidateCC(filepath): - """We can only analyze .cc files. Provide explicit message about that.""" - if filepath.endswith('.cc'): - return filepath - msg = ('%s not supported.\n' - 'For now, we can only analyze translation units (.cc files).' % - filepath) - raise argparse.ArgumentTypeError(msg) + """We can only analyze .cc files. Provide explicit message about that.""" + if filepath.endswith('.cc'): + return filepath + msg = ('%s not supported.\n' + 'For now, we can only analyze translation units (.cc files).' % + filepath) + raise argparse.ArgumentTypeError(msg) def Main(): - description = ( - "Run clang-tidy on single cc file.\n" - "Use flags, defines and include paths as in default debug build.\n" - "WARNING, this is a POC version with rough edges.") - parser = argparse.ArgumentParser(description=description) - parser.add_argument('filepath', - help='Specifies the path of the .cc file to analyze.', - type=ValidateCC) - parser.add_argument('args', - nargs=argparse.REMAINDER, - help='Arguments passed to clang-tidy') - parsed_args = parser.parse_args() - return Process(parsed_args.filepath, parsed_args.args) + description = ( + "Run clang-tidy on single cc file.\n" + "Use flags, defines and include paths as in default debug build.\n" + "WARNING, this is a POC version with rough edges.") + parser = argparse.ArgumentParser(description=description) + parser.add_argument('filepath', + help='Specifies the path of the .cc file to analyze.', + type=ValidateCC) + parser.add_argument('args', + nargs=argparse.REMAINDER, + help='Arguments passed to clang-tidy') + parsed_args = parser.parse_args() + return Process(parsed_args.filepath, parsed_args.args) if __name__ == '__main__': - sys.exit(Main()) + sys.exit(Main()) diff --git a/tools_webrtc/coverage/generate_coverage_command.py b/tools_webrtc/coverage/generate_coverage_command.py index 856666816d..894731b8b7 100644 --- a/tools_webrtc/coverage/generate_coverage_command.py +++ b/tools_webrtc/coverage/generate_coverage_command.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Generates a command-line for coverage.py. Useful for manual coverage runs. Before running the generated command line, do this: @@ -17,39 +16,32 @@ import sys TESTS = [ - 'video_capture_tests', - 'webrtc_nonparallel_tests', - 'video_engine_tests', - 'tools_unittests', - 'test_support_unittests', - 'slow_tests', - 'system_wrappers_unittests', - 'rtc_unittests', - 'rtc_stats_unittests', - 'rtc_pc_unittests', - 'rtc_media_unittests', - 'peerconnection_unittests', - 'modules_unittests', - 'modules_tests', - 'low_bandwidth_audio_test', - 'common_video_unittests', - 'common_audio_unittests', - 'audio_decoder_unittests' + 'video_capture_tests', 'webrtc_nonparallel_tests', 'video_engine_tests', + 'tools_unittests', 'test_support_unittests', 'slow_tests', + 'system_wrappers_unittests', 'rtc_unittests', 'rtc_stats_unittests', + 'rtc_pc_unittests', 'rtc_media_unittests', 'peerconnection_unittests', + 'modules_unittests', 'modules_tests', 'low_bandwidth_audio_test', + 'common_video_unittests', 'common_audio_unittests', + 'audio_decoder_unittests' ] + def main(): - cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + TESTS + - ['-b out/coverage', '-o out/report'] + - ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] + - ['-c \'out/coverage/%s\'' % t for t in TESTS]) + cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + TESTS + + ['-b out/coverage', '-o out/report'] + + ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] + + ['-c \'out/coverage/%s\'' % t for t in TESTS]) + + def WithXvfb(binary): + return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary) + + modules_unittests = 'out/coverage/modules_unittests' + cmd[cmd.index('-c \'%s\'' % + modules_unittests)] = WithXvfb(modules_unittests) - def WithXvfb(binary): - return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary) - modules_unittests = 'out/coverage/modules_unittests' - cmd[cmd.index('-c \'%s\'' % modules_unittests)] = WithXvfb(modules_unittests) + print ' '.join(cmd) + return 0 - print ' '.join(cmd) - return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/coverage/generate_ios_coverage_command.py b/tools_webrtc/coverage/generate_ios_coverage_command.py index f81ee2c62b..e01a75c049 100644 --- a/tools_webrtc/coverage/generate_ios_coverage_command.py +++ b/tools_webrtc/coverage/generate_ios_coverage_command.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Generates command-line instructions to produce one-time iOS coverage using coverage.py. @@ -53,122 +52,115 @@ def _GetBinaryPath(command): DIRECTORY = 'out/coverage' TESTS = [ - 'audio_decoder_unittests', - 'common_audio_unittests', - 'common_video_unittests', - 'modules_tests', - 'modules_unittests', - 'rtc_media_unittests', - 'rtc_pc_unittests', - 'rtc_stats_unittests', - 'rtc_unittests', - 'slow_tests', - 'system_wrappers_unittests', - 'test_support_unittests', - 'tools_unittests', - 'video_capture_tests', - 'video_engine_tests', - 'webrtc_nonparallel_tests', + 'audio_decoder_unittests', + 'common_audio_unittests', + 'common_video_unittests', + 'modules_tests', + 'modules_unittests', + 'rtc_media_unittests', + 'rtc_pc_unittests', + 'rtc_stats_unittests', + 'rtc_unittests', + 'slow_tests', + 'system_wrappers_unittests', + 'test_support_unittests', + 'tools_unittests', + 'video_capture_tests', + 'video_engine_tests', + 'webrtc_nonparallel_tests', ] XC_TESTS = [ - 'apprtcmobile_tests', - 'sdk_framework_unittests', - 'sdk_unittests', + 'apprtcmobile_tests', + 'sdk_framework_unittests', + 'sdk_unittests', ] def FormatIossimTest(test_name, is_xctest=False): - args = ['%s/%s.app' % (DIRECTORY, test_name)] - if is_xctest: - args += ['%s/%s_module.xctest' % (DIRECTORY, test_name)] + args = ['%s/%s.app' % (DIRECTORY, test_name)] + if is_xctest: + args += ['%s/%s_module.xctest' % (DIRECTORY, test_name)] - return '-c \'%s/iossim %s\'' % (DIRECTORY, ' '.join(args)) + return '-c \'%s/iossim %s\'' % (DIRECTORY, ' '.join(args)) def GetGNArgs(is_simulator): - target_cpu = 'x64' if is_simulator else 'arm64' - return ([] + - ['target_os="ios"'] + - ['target_cpu="%s"' % target_cpu] + - ['use_clang_coverage=true'] + - ['is_component_build=false'] + - ['dcheck_always_on=true']) + target_cpu = 'x64' if is_simulator else 'arm64' + return ([] + ['target_os="ios"'] + ['target_cpu="%s"' % target_cpu] + + ['use_clang_coverage=true'] + ['is_component_build=false'] + + ['dcheck_always_on=true']) def GenerateIOSSimulatorCommand(): - gn_args_string = ' '.join(GetGNArgs(is_simulator=True)) - gn_cmd = ['gn', 'gen', DIRECTORY, '--args=\'%s\'' % gn_args_string] - - coverage_cmd = ( - [sys.executable, 'tools/code_coverage/coverage.py'] + - ["%s.app" % t for t in XC_TESTS + TESTS] + - ['-b %s' % DIRECTORY, '-o out/report'] + - ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] + - [FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] + - [FormatIossimTest(t, is_xctest=False) for t in TESTS] - ) - - print 'To get code coverage using iOS simulator just run following commands:' - print '' - print ' '.join(gn_cmd) - print '' - print ' '.join(coverage_cmd) - return 0 + gn_args_string = ' '.join(GetGNArgs(is_simulator=True)) + gn_cmd = ['gn', 'gen', DIRECTORY, '--args=\'%s\'' % gn_args_string] + + coverage_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + + ["%s.app" % t for t in XC_TESTS + TESTS] + + ['-b %s' % DIRECTORY, '-o out/report'] + + ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] + + [FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] + + [FormatIossimTest(t, is_xctest=False) for t in TESTS]) + + print 'To get code coverage using iOS simulator just run following commands:' + print '' + print ' '.join(gn_cmd) + print '' + print ' '.join(coverage_cmd) + return 0 def GenerateIOSDeviceCommand(): - gn_args_string = ' '.join(GetGNArgs(is_simulator=False)) - - coverage_report_cmd = ( - [sys.executable, 'tools/code_coverage/coverage.py'] + - ['%s.app' % t for t in TESTS] + - ['-b %s' % DIRECTORY] + - ['-o out/report'] + - ['-p %s/merged.profdata' % DIRECTORY] + - ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] - ) - - print 'Computing code coverage for real iOS device is a little bit tedious.' - print '' - print 'You will need:' - print '' - print '1. Generate xcode project and open it with Xcode 10+:' - print ' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string) - print ' open %s/all.xcworkspace' % DIRECTORY - print '' - print '2. Execute these Run targets manually with Xcode Run button and ' - print 'manually save generated coverage.profraw file to %s:' % DIRECTORY - print '\n'.join('- %s' % t for t in TESTS) - print '' - print '3. Execute these Test targets manually with Xcode Test button and ' - print 'manually save generated coverage.profraw file to %s:' % DIRECTORY - print '\n'.join('- %s' % t for t in XC_TESTS) - print '' - print '4. Merge *.profraw files to *.profdata using llvm-profdata tool:' - print (' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' + - 'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' + - '-o %s/merged.profdata ' % DIRECTORY + - '-sparse=true %s/*.profraw' % DIRECTORY) - print '' - print '5. Generate coverage report:' - print ' ' + ' '.join(coverage_report_cmd) - return 0 + gn_args_string = ' '.join(GetGNArgs(is_simulator=False)) + + coverage_report_cmd = ( + [sys.executable, 'tools/code_coverage/coverage.py'] + + ['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] + + ['-o out/report'] + ['-p %s/merged.profdata' % DIRECTORY] + + ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\'']) + + print 'Computing code coverage for real iOS device is a little bit tedious.' + print '' + print 'You will need:' + print '' + print '1. Generate xcode project and open it with Xcode 10+:' + print ' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string) + print ' open %s/all.xcworkspace' % DIRECTORY + print '' + print '2. Execute these Run targets manually with Xcode Run button and ' + print 'manually save generated coverage.profraw file to %s:' % DIRECTORY + print '\n'.join('- %s' % t for t in TESTS) + print '' + print '3. Execute these Test targets manually with Xcode Test button and ' + print 'manually save generated coverage.profraw file to %s:' % DIRECTORY + print '\n'.join('- %s' % t for t in XC_TESTS) + print '' + print '4. Merge *.profraw files to *.profdata using llvm-profdata tool:' + print(' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' + + 'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' + + '-o %s/merged.profdata ' % DIRECTORY + + '-sparse=true %s/*.profraw' % DIRECTORY) + print '' + print '5. Generate coverage report:' + print ' ' + ' '.join(coverage_report_cmd) + return 0 def Main(): - if len(sys.argv) < 2: - print 'Please specify type of coverage:' - print ' %s simulator' % sys.argv[0] - print ' %s device' % sys.argv[0] - elif sys.argv[1] == 'simulator': - GenerateIOSSimulatorCommand() - elif sys.argv[1] == 'device': - GenerateIOSDeviceCommand() - else: - print 'Unsupported type of coverage' - - return 0 + if len(sys.argv) < 2: + print 'Please specify type of coverage:' + print ' %s simulator' % sys.argv[0] + print ' %s device' % sys.argv[0] + elif sys.argv[1] == 'simulator': + GenerateIOSSimulatorCommand() + elif sys.argv[1] == 'device': + GenerateIOSDeviceCommand() + else: + print 'Unsupported type of coverage' + + return 0 + if __name__ == '__main__': - sys.exit(Main()) + sys.exit(Main()) diff --git a/tools_webrtc/cpu/cpu_mon.py b/tools_webrtc/cpu/cpu_mon.py index 128a5c0bf6..d89935aeab 100644 --- a/tools_webrtc/cpu/cpu_mon.py +++ b/tools_webrtc/cpu/cpu_mon.py @@ -8,7 +8,6 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - import psutil import sys @@ -17,67 +16,68 @@ class CpuSnapshot(object): - def __init__(self, label): - self.label = label - self.samples = [] + def __init__(self, label): + self.label = label + self.samples = [] - def Capture(self, sample_count): - print ('Capturing %d CPU samples for %s...' % - ((sample_count - len(self.samples)), self.label)) - while len(self.samples) < sample_count: - self.samples.append(psutil.cpu_percent(1.0, False)) + def Capture(self, sample_count): + print('Capturing %d CPU samples for %s...' % + ((sample_count - len(self.samples)), self.label)) + while len(self.samples) < sample_count: + self.samples.append(psutil.cpu_percent(1.0, False)) - def Text(self): - return ('%s: avg=%s, median=%s, min=%s, max=%s' % - (self.label, numpy.average(self.samples), - numpy.median(self.samples), - numpy.min(self.samples), numpy.max(self.samples))) + def Text(self): + return ('%s: avg=%s, median=%s, min=%s, max=%s' % + (self.label, numpy.average(self.samples), + numpy.median(self.samples), numpy.min( + self.samples), numpy.max(self.samples))) - def Max(self): - return numpy.max(self.samples) + def Max(self): + return numpy.max(self.samples) def GrabCpuSamples(sample_count): - print 'Label for snapshot (enter to quit): ' - label = raw_input().strip() - if len(label) == 0: - return None + print 'Label for snapshot (enter to quit): ' + label = raw_input().strip() + if len(label) == 0: + return None - snapshot = CpuSnapshot(label) - snapshot.Capture(sample_count) + snapshot = CpuSnapshot(label) + snapshot.Capture(sample_count) - return snapshot + return snapshot def main(): - print 'How many seconds to capture per snapshot (enter for 60)?' - sample_count = raw_input().strip() - if len(sample_count) > 0 and int(sample_count) > 0: - sample_count = int(sample_count) - else: - print 'Defaulting to 60 samples.' - sample_count = 60 + print 'How many seconds to capture per snapshot (enter for 60)?' + sample_count = raw_input().strip() + if len(sample_count) > 0 and int(sample_count) > 0: + sample_count = int(sample_count) + else: + print 'Defaulting to 60 samples.' + sample_count = 60 + + snapshots = [] + while True: + snapshot = GrabCpuSamples(sample_count) + if snapshot is None: + break + snapshots.append(snapshot) - snapshots = [] - while True: - snapshot = GrabCpuSamples(sample_count) - if snapshot is None: - break - snapshots.append(snapshot) + if len(snapshots) == 0: + print 'no samples captured' + return -1 - if len(snapshots) == 0: - print 'no samples captured' - return -1 + pyplot.title('CPU usage') - pyplot.title('CPU usage') + for s in snapshots: + pyplot.plot(s.samples, label=s.Text(), linewidth=2) - for s in snapshots: - pyplot.plot(s.samples, label=s.Text(), linewidth=2) + pyplot.legend() - pyplot.legend() + pyplot.show() + return 0 - pyplot.show() - return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/download_tools.py b/tools_webrtc/download_tools.py index 9554c8cd7a..62602dea59 100755 --- a/tools_webrtc/download_tools.py +++ b/tools_webrtc/download_tools.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Downloads precompiled tools. These are checked into the repository as SHA-1 hashes (see *.sha1 files in @@ -17,12 +16,10 @@ import os import sys - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) sys.path.append(os.path.join(SRC_DIR, 'build')) - import find_depot_tools find_depot_tools.add_depot_tools_to_path() import gclient_utils @@ -30,32 +27,34 @@ def main(directories): - if not directories: - directories = [SCRIPT_DIR] - - for path in directories: - cmd = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, - 'download_from_google_storage.py'), - '--directory', - '--num_threads=10', - '--bucket', 'chrome-webrtc-resources', - '--auto_platform', - '--recursive', - path, - ] - print 'Downloading precompiled tools...' - - # Perform download similar to how gclient hooks execute. - try: - gclient_utils.CheckCallAndFilter( - cmd, cwd=SRC_DIR, always_show_header=True) - except (gclient_utils.Error, subprocess2.CalledProcessError) as e: - print 'Error: %s' % str(e) - return 2 - return 0 + if not directories: + directories = [SCRIPT_DIR] + + for path in directories: + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, + 'download_from_google_storage.py'), + '--directory', + '--num_threads=10', + '--bucket', + 'chrome-webrtc-resources', + '--auto_platform', + '--recursive', + path, + ] + print 'Downloading precompiled tools...' + + # Perform download similar to how gclient hooks execute. + try: + gclient_utils.CheckCallAndFilter(cmd, + cwd=SRC_DIR, + always_show_header=True) + except (gclient_utils.Error, subprocess2.CalledProcessError) as e: + print 'Error: %s' % str(e) + return 2 + return 0 if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/tools_webrtc/ensure_webcam_is_running.py b/tools_webrtc/ensure_webcam_is_running.py index 20cf49e137..b68a484ea0 100755 --- a/tools_webrtc/ensure_webcam_is_running.py +++ b/tools_webrtc/ensure_webcam_is_running.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Checks if a virtual webcam is running and starts it if not. Returns a non-zero return code if the webcam could not be started. @@ -32,74 +31,73 @@ import subprocess import sys - WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam') WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app') def IsWebCamRunning(): - if sys.platform == 'win32': - process_name = 'ManyCam.exe' - elif sys.platform.startswith('darwin'): - process_name = 'ManyCam' - elif sys.platform.startswith('linux'): - # TODO(bugs.webrtc.org/9636): Currently a no-op on Linux: sw webcams no - # longer in use. - print 'Virtual webcam: no-op on Linux' - return True - else: - raise Exception('Unsupported platform: %s' % sys.platform) - for p in psutil.process_iter(): - try: - if process_name == p.name: - print 'Found a running virtual webcam (%s with PID %s)' % (p.name, - p.pid) - return True - except psutil.AccessDenied: - pass # This is normal if we query sys processes, etc. - return False - - -def StartWebCam(): - try: if sys.platform == 'win32': - subprocess.check_call(WEBCAM_WIN) - print 'Successfully launched virtual webcam.' + process_name = 'ManyCam.exe' elif sys.platform.startswith('darwin'): - subprocess.check_call(WEBCAM_MAC) - print 'Successfully launched virtual webcam.' + process_name = 'ManyCam' elif sys.platform.startswith('linux'): - # TODO(bugs.webrtc.org/9636): Currently a no-op on Linux: sw webcams no - # longer in use. - print 'Not implemented on Linux' - - except Exception as e: - print 'Failed to launch virtual webcam: %s' % e + # TODO(bugs.webrtc.org/9636): Currently a no-op on Linux: sw webcams no + # longer in use. + print 'Virtual webcam: no-op on Linux' + return True + else: + raise Exception('Unsupported platform: %s' % sys.platform) + for p in psutil.process_iter(): + try: + if process_name == p.name: + print 'Found a running virtual webcam (%s with PID %s)' % ( + p.name, p.pid) + return True + except psutil.AccessDenied: + pass # This is normal if we query sys processes, etc. return False - return True + +def StartWebCam(): + try: + if sys.platform == 'win32': + subprocess.check_call(WEBCAM_WIN) + print 'Successfully launched virtual webcam.' + elif sys.platform.startswith('darwin'): + subprocess.check_call(WEBCAM_MAC) + print 'Successfully launched virtual webcam.' + elif sys.platform.startswith('linux'): + # TODO(bugs.webrtc.org/9636): Currently a no-op on Linux: sw webcams no + # longer in use. + print 'Not implemented on Linux' + + except Exception as e: + print 'Failed to launch virtual webcam: %s' % e + return False + + return True def _ForcePythonInterpreter(cmd): - """Returns the fixed command line to call the right python executable.""" - out = cmd[:] - if out[0] == 'python': - out[0] = sys.executable - elif out[0].endswith('.py'): - out.insert(0, sys.executable) - return out + """Returns the fixed command line to call the right python executable.""" + out = cmd[:] + if out[0] == 'python': + out[0] = sys.executable + elif out[0].endswith('.py'): + out.insert(0, sys.executable) + return out def Main(argv): - if not IsWebCamRunning(): - if not StartWebCam(): - return 1 + if not IsWebCamRunning(): + if not StartWebCam(): + return 1 - if argv: - return subprocess.call(_ForcePythonInterpreter(argv)) - else: - return 0 + if argv: + return subprocess.call(_ForcePythonInterpreter(argv)) + else: + return 0 if __name__ == '__main__': - sys.exit(Main(sys.argv[1:])) + sys.exit(Main(sys.argv[1:])) diff --git a/tools_webrtc/executable_host_build.py b/tools_webrtc/executable_host_build.py index cc1e7ee59e..aac4be0b4b 100644 --- a/tools_webrtc/executable_host_build.py +++ b/tools_webrtc/executable_host_build.py @@ -55,7 +55,6 @@ import sys import tempfile - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir)) sys.path.append(os.path.join(SRC_DIR, 'build')) @@ -63,39 +62,40 @@ def _ParseArgs(): - desc = 'Generates a GN executable targeting the host machine.' - parser = argparse.ArgumentParser(description=desc) - parser.add_argument('--executable_name', - required=True, - help='Name of the executable to build') - args = parser.parse_args() - return args + desc = 'Generates a GN executable targeting the host machine.' + parser = argparse.ArgumentParser(description=desc) + parser.add_argument('--executable_name', + required=True, + help='Name of the executable to build') + args = parser.parse_args() + return args @contextmanager def HostBuildDir(): - temp_dir = tempfile.mkdtemp() - try: - yield temp_dir - finally: - shutil.rmtree(temp_dir) + temp_dir = tempfile.mkdtemp() + try: + yield temp_dir + finally: + shutil.rmtree(temp_dir) def _RunCommand(argv, cwd=SRC_DIR, **kwargs): - with open(os.devnull, 'w') as devnull: - subprocess.check_call(argv, cwd=cwd, stdout=devnull, **kwargs) + with open(os.devnull, 'w') as devnull: + subprocess.check_call(argv, cwd=cwd, stdout=devnull, **kwargs) def DepotToolPath(*args): - return os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, *args) + return os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, *args) if __name__ == '__main__': - ARGS = _ParseArgs() - EXECUTABLE_TO_BUILD = ARGS.executable_name - EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host' - with HostBuildDir() as build_dir: - _RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir]) - _RunCommand([DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD]) - shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD), - EXECUTABLE_FINAL_NAME) + ARGS = _ParseArgs() + EXECUTABLE_TO_BUILD = ARGS.executable_name + EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host' + with HostBuildDir() as build_dir: + _RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir]) + _RunCommand( + [DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD]) + shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD), + EXECUTABLE_FINAL_NAME) diff --git a/tools_webrtc/flags_compatibility.py b/tools_webrtc/flags_compatibility.py old mode 100644 new mode 100755 index 9a3e5a14bf..c716574477 --- a/tools_webrtc/flags_compatibility.py +++ b/tools_webrtc/flags_compatibility.py @@ -15,34 +15,32 @@ def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--isolated-script-test-output') - parser.add_argument('--isolated-script-test-perf-output') - args, unrecognized_args = parser.parse_known_args() + parser = argparse.ArgumentParser() + parser.add_argument('--isolated-script-test-perf-output') + args, unrecognized_args = parser.parse_known_args() - test_command = _ForcePythonInterpreter(unrecognized_args) - if args.isolated_script_test_output: - test_command += ['--isolated_script_test_output', - args.isolated_script_test_output] - if args.isolated_script_test_perf_output: - test_command += ['--isolated_script_test_perf_output', - args.isolated_script_test_perf_output] - logging.info('Running %r', test_command) + test_command = _ForcePythonInterpreter(unrecognized_args) + if args.isolated_script_test_perf_output: + test_command += [ + '--isolated_script_test_perf_output=' + + args.isolated_script_test_perf_output + ] + logging.info('Running %r', test_command) - return subprocess.call(test_command) + return subprocess.call(test_command) def _ForcePythonInterpreter(cmd): - """Returns the fixed command line to call the right python executable.""" - out = cmd[:] - if out[0] == 'python': - out[0] = sys.executable - elif out[0].endswith('.py'): - out.insert(0, sys.executable) - return out + """Returns the fixed command line to call the right python executable.""" + out = cmd[:] + if out[0] == 'python': + out[0] = sys.executable + elif out[0].endswith('.py'): + out.insert(0, sys.executable) + return out if __name__ == '__main__': - # pylint: disable=W0101 - logging.basicConfig(level=logging.INFO) - sys.exit(main()) + # pylint: disable=W0101 + logging.basicConfig(level=logging.INFO) + sys.exit(main()) diff --git a/tools_webrtc/get_landmines.py b/tools_webrtc/get_landmines.py index ba8ac9c1bf..3b5965fce4 100755 --- a/tools_webrtc/get_landmines.py +++ b/tools_webrtc/get_landmines.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """ This file emits the list of reasons why a particular build needs to be clobbered (or a list of 'landmines'). @@ -20,46 +19,48 @@ sys.path.insert(0, os.path.join(CHECKOUT_ROOT, 'build')) import landmine_utils - host_os = landmine_utils.host_os # pylint: disable=invalid-name def print_landmines(): # pylint: disable=invalid-name - """ + """ ALL LANDMINES ARE EMITTED FROM HERE. """ - # DO NOT add landmines as part of a regular CL. Landmines are a last-effort - # bandaid fix if a CL that got landed has a build dependency bug and all bots - # need to be cleaned up. If you're writing a new CL that causes build - # dependency problems, fix the dependency problems instead of adding a - # landmine. - # See the Chromium version in src/build/get_landmines.py for usage examples. - print 'Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)' - if host_os() == 'win': - print 'Clobber to resolve some issues with corrupt .pdb files on bots.' - print 'Clobber due to corrupt .pdb files (after #14623)' - print 'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)' - print ('Clobber due to Win Clang Debug linking errors in ' - 'https://codereview.webrtc.org/2786603002') - print ('Clobber due to Win Debug linking errors in ' - 'https://codereview.webrtc.org/2832063003/') - if host_os() == 'mac': - print 'Clobber due to iOS compile errors (crbug.com/694721)' - print 'Clobber to unblock https://codereview.webrtc.org/2709573003' - print ('Clobber to fix https://codereview.webrtc.org/2709573003 after ' - 'landing') - print ('Clobber to fix https://codereview.webrtc.org/2767383005 before' - 'landing (changing rtc_executable -> rtc_test on iOS)') - print ('Clobber to fix https://codereview.webrtc.org/2767383005 before' - 'landing (changing rtc_executable -> rtc_test on iOS)') - print 'Another landmine for low_bandwidth_audio_test (webrtc:7430)' - print 'Clobber to change neteq_rtpplay type to executable' + # DO NOT add landmines as part of a regular CL. Landmines are a last-effort + # bandaid fix if a CL that got landed has a build dependency bug and all bots + # need to be cleaned up. If you're writing a new CL that causes build + # dependency problems, fix the dependency problems instead of adding a + # landmine. + # See the Chromium version in src/build/get_landmines.py for usage examples. + print 'Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)' + if host_os() == 'win': + print 'Clobber to resolve some issues with corrupt .pdb files on bots.' + print 'Clobber due to corrupt .pdb files (after #14623)' + print 'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)' + print('Clobber due to Win Clang Debug linking errors in ' + 'https://codereview.webrtc.org/2786603002') + print('Clobber due to Win Debug linking errors in ' + 'https://codereview.webrtc.org/2832063003/') + print 'Clobber win x86 bots (issues with isolated files).' + if host_os() == 'mac': + print 'Clobber due to iOS compile errors (crbug.com/694721)' + print 'Clobber to unblock https://codereview.webrtc.org/2709573003' + print('Clobber to fix https://codereview.webrtc.org/2709573003 after ' + 'landing') + print('Clobber to fix https://codereview.webrtc.org/2767383005 before' + 'landing (changing rtc_executable -> rtc_test on iOS)') + print('Clobber to fix https://codereview.webrtc.org/2767383005 before' + 'landing (changing rtc_executable -> rtc_test on iOS)') + print 'Another landmine for low_bandwidth_audio_test (webrtc:7430)' + print 'Clobber to change neteq_rtpplay type to executable' + print 'Clobber to remove .xctest files.' + print 'Clobber to remove .xctest files (take 2).' def main(): - print_landmines() - return 0 + print_landmines() + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/gn_check_autofix.py b/tools_webrtc/gn_check_autofix.py index 57ea0f6d7b..282dc4fc0f 100644 --- a/tools_webrtc/gn_check_autofix.py +++ b/tools_webrtc/gn_check_autofix.py @@ -7,7 +7,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """ This tool tries to fix (some) errors reported by `gn gen --check` or `gn check`. @@ -31,72 +30,78 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) -CHROMIUM_DIRS = ['base', 'build', 'buildtools', - 'testing', 'third_party', 'tools'] +CHROMIUM_DIRS = [ + 'base', 'build', 'buildtools', 'testing', 'third_party', 'tools' +] TARGET_RE = re.compile( r'(?P\s*)\w*\("(?P\w*)"\) {$') + class TemporaryDirectory(object): - def __init__(self): - self._closed = False - self._name = None - self._name = tempfile.mkdtemp() + def __init__(self): + self._closed = False + self._name = None + self._name = tempfile.mkdtemp() - def __enter__(self): - return self._name + def __enter__(self): + return self._name - def __exit__(self, exc, value, _tb): - if self._name and not self._closed: - shutil.rmtree(self._name) - self._closed = True + def __exit__(self, exc, value, _tb): + if self._name and not self._closed: + shutil.rmtree(self._name) + self._closed = True def Run(cmd): - print 'Running:', ' '.join(cmd) - sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - return sub.communicate() + print 'Running:', ' '.join(cmd) + sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + return sub.communicate() + def FixErrors(filename, missing_deps, deleted_sources): - with open(filename) as f: - lines = f.readlines() - - fixed_file = '' - indentation_level = None - for line in lines: - match = TARGET_RE.match(line) - if match: - target = match.group('target_name') - if target in missing_deps: - indentation_level = match.group('indentation_level') - elif indentation_level is not None: - match = re.match(indentation_level + '}$', line) - if match: - line = ('deps = [\n' + - ''.join(' "' + dep + '",\n' for dep in missing_deps[target]) + - ']\n') + line - indentation_level = None - elif line.strip().startswith('deps'): - is_empty_deps = line.strip() == 'deps = []' - line = 'deps = [\n' if is_empty_deps else line - line += ''.join(' "' + dep + '",\n' for dep in missing_deps[target]) - line += ']\n' if is_empty_deps else '' - indentation_level = None - - if line.strip() not in deleted_sources: - fixed_file += line - - with open(filename, 'w') as f: - f.write(fixed_file) - - Run(['gn', 'format', filename]) + with open(filename) as f: + lines = f.readlines() + + fixed_file = '' + indentation_level = None + for line in lines: + match = TARGET_RE.match(line) + if match: + target = match.group('target_name') + if target in missing_deps: + indentation_level = match.group('indentation_level') + elif indentation_level is not None: + match = re.match(indentation_level + '}$', line) + if match: + line = ('deps = [\n' + ''.join(' "' + dep + '",\n' + for dep in missing_deps[target]) + + ']\n') + line + indentation_level = None + elif line.strip().startswith('deps'): + is_empty_deps = line.strip() == 'deps = []' + line = 'deps = [\n' if is_empty_deps else line + line += ''.join(' "' + dep + '",\n' + for dep in missing_deps[target]) + line += ']\n' if is_empty_deps else '' + indentation_level = None + + if line.strip() not in deleted_sources: + fixed_file += line + + with open(filename, 'w') as f: + f.write(fixed_file) + + Run(['gn', 'format', filename]) + def FirstNonEmpty(iterable): - """Return first item which evaluates to True, or fallback to None.""" - return next((x for x in iterable if x), None) + """Return first item which evaluates to True, or fallback to None.""" + return next((x for x in iterable if x), None) + def Rebase(base_path, dependency_path, dependency): - """Adapt paths so they work both in stand-alone WebRTC and Chromium tree. + """Adapt paths so they work both in stand-alone WebRTC and Chromium tree. To cope with varying top-level directory (WebRTC VS Chromium), we use: * relative paths for WebRTC modules. @@ -113,77 +118,82 @@ def Rebase(base_path, dependency_path, dependency): Full target path (E.g. '../rtc_base/time:timestamp_extrapolator'). """ - root = FirstNonEmpty(dependency_path.split('/')) - if root in CHROMIUM_DIRS: - # Chromium paths must remain absolute. E.g. //third_party//abseil-cpp... - rebased = dependency_path - else: - base_path = base_path.split(os.path.sep) - dependency_path = dependency_path.split(os.path.sep) - - first_difference = None - shortest_length = min(len(dependency_path), len(base_path)) - for i in range(shortest_length): - if dependency_path[i] != base_path[i]: - first_difference = i - break - - first_difference = first_difference or shortest_length - base_path = base_path[first_difference:] - dependency_path = dependency_path[first_difference:] - rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path) - return rebased + ':' + dependency - -def main(): - deleted_sources = set() - errors_by_file = defaultdict(lambda: defaultdict(set)) - - with TemporaryDirectory() as tmp_dir: - mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py') - mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl') - mb_gen_command = ([ - mb_script_path, 'gen', - tmp_dir, - '--config-file', mb_config_file_path, - ] + sys.argv[1:]) - - mb_output = Run(mb_gen_command) - errors = mb_output[0].split('ERROR')[1:] - - if mb_output[1]: - print mb_output[1] - return 1 - - for error in errors: - error = error.splitlines() - target_msg = 'The target:' - if target_msg not in error: - target_msg = 'It is not in any dependency of' - if target_msg not in error: - print '\n'.join(error) - continue - index = error.index(target_msg) + 1 - path, target = error[index].strip().split(':') - if error[index+1] in ('is including a file from the target:', - 'The include file is in the target(s):'): - dep = error[index+2].strip() - dep_path, dep = dep.split(':') - dep = Rebase(path, dep_path, dep) - # Replacing /target:target with /target - dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep) - path = os.path.join(path[2:], 'BUILD.gn') - errors_by_file[path][target].add(dep) - elif error[index+1] == 'has a source file:': - deleted_file = '"' + os.path.basename(error[index+2].strip()) + '",' - deleted_sources.add(deleted_file) + root = FirstNonEmpty(dependency_path.split('/')) + if root in CHROMIUM_DIRS: + # Chromium paths must remain absolute. E.g. //third_party//abseil-cpp... + rebased = dependency_path else: - print '\n'.join(error) - continue + base_path = base_path.split(os.path.sep) + dependency_path = dependency_path.split(os.path.sep) + + first_difference = None + shortest_length = min(len(dependency_path), len(base_path)) + for i in range(shortest_length): + if dependency_path[i] != base_path[i]: + first_difference = i + break + + first_difference = first_difference or shortest_length + base_path = base_path[first_difference:] + dependency_path = dependency_path[first_difference:] + rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path) + return rebased + ':' + dependency - for path, missing_deps in errors_by_file.items(): - FixErrors(path, missing_deps, deleted_sources) - return 0 +def main(): + deleted_sources = set() + errors_by_file = defaultdict(lambda: defaultdict(set)) + + with TemporaryDirectory() as tmp_dir: + mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py') + mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl') + mb_gen_command = ([ + mb_script_path, + 'gen', + tmp_dir, + '--config-file', + mb_config_file_path, + ] + sys.argv[1:]) + + mb_output = Run(mb_gen_command) + errors = mb_output[0].split('ERROR')[1:] + + if mb_output[1]: + print mb_output[1] + return 1 + + for error in errors: + error = error.splitlines() + target_msg = 'The target:' + if target_msg not in error: + target_msg = 'It is not in any dependency of' + if target_msg not in error: + print '\n'.join(error) + continue + index = error.index(target_msg) + 1 + path, target = error[index].strip().split(':') + if error[index + 1] in ('is including a file from the target:', + 'The include file is in the target(s):'): + dep = error[index + 2].strip() + dep_path, dep = dep.split(':') + dep = Rebase(path, dep_path, dep) + # Replacing /target:target with /target + dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep) + path = os.path.join(path[2:], 'BUILD.gn') + errors_by_file[path][target].add(dep) + elif error[index + 1] == 'has a source file:': + deleted_file = '"' + os.path.basename( + error[index + 2].strip()) + '",' + deleted_sources.add(deleted_file) + else: + print '\n'.join(error) + continue + + for path, missing_deps in errors_by_file.items(): + FixErrors(path, missing_deps, deleted_sources) + + return 0 + if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/gtest-parallel-wrapper.py b/tools_webrtc/gtest-parallel-wrapper.py index b4b6675301..26b9afa0ad 100755 --- a/tools_webrtc/gtest-parallel-wrapper.py +++ b/tools_webrtc/gtest-parallel-wrapper.py @@ -15,19 +15,25 @@ flags. Developers should execute gtest-parallel directly. In particular, this translates the GTEST_SHARD_INDEX and GTEST_TOTAL_SHARDS -environment variables to the --shard_index and --shard_count flags, renames -the --isolated-script-test-output flag to --dump_json_test_results, +environment variables to the --shard_index and --shard_count flags and interprets e.g. --workers=2x as 2 workers per core. Flags before '--' will be attempted to be understood as arguments to gtest-parallel. If gtest-parallel doesn't recognize the flag or the flag is after '--', the flag will be passed on to the test executable. +--isolated-script-test-perf-output is renamed to +--isolated_script_test_perf_output. The Android test runner needs the flag to +be in the former form, but our tests require the latter, so this is the only +place we can do it. + If the --store-test-artifacts flag is set, an --output_dir must be also specified. + The test artifacts will then be stored in a 'test_artifacts' subdirectory of the output dir, and will be compressed into a zip file once the test finishes executing. + This is useful when running the tests in swarming, since the output directory is not known beforehand. @@ -38,7 +44,6 @@ --another_flag \ --output_dir=SOME_OUTPUT_DIR \ --store-test-artifacts - --isolated-script-test-output=SOME_DIR \ --isolated-script-test-perf-output=SOME_OTHER_DIR \ -- \ --foo=bar \ @@ -56,7 +61,7 @@ --test_artifacts_dir=SOME_OUTPUT_DIR/test_artifacts \ --some_flag=some_value \ --another_flag \ - --isolated-script-test-perf-output=SOME_OTHER_DIR \ + --isolated_script_test_perf_output=SOME_OTHER_DIR \ --foo=bar \ --baz @@ -70,175 +75,174 @@ import subprocess import sys - -Args = collections.namedtuple('Args', - ['gtest_parallel_args', 'test_env', 'output_dir', - 'test_artifacts_dir']) +Args = collections.namedtuple( + 'Args', + ['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir']) def _CatFiles(file_list, output_file): - with open(output_file, 'w') as output_file: - for filename in file_list: - with open(filename) as input_file: - output_file.write(input_file.read()) - os.remove(filename) + with open(output_file, 'w') as output_file: + for filename in file_list: + with open(filename) as input_file: + output_file.write(input_file.read()) + os.remove(filename) + def _ParseWorkersOption(workers): - """Interpret Nx syntax as N * cpu_count. Int value is left as is.""" - base = float(workers.rstrip('x')) - if workers.endswith('x'): - result = int(base * multiprocessing.cpu_count()) - else: - result = int(base) - return max(result, 1) # Sanitize when using e.g. '0.5x'. + """Interpret Nx syntax as N * cpu_count. Int value is left as is.""" + base = float(workers.rstrip('x')) + if workers.endswith('x'): + result = int(base * multiprocessing.cpu_count()) + else: + result = int(base) + return max(result, 1) # Sanitize when using e.g. '0.5x'. class ReconstructibleArgumentGroup(object): - """An argument group that can be converted back into a command line. + """An argument group that can be converted back into a command line. This acts like ArgumentParser.add_argument_group, but names of arguments added to it are also kept in a list, so that parsed options from ArgumentParser.parse_args can be reconstructed back into a command line (list of args) based on the list of wanted keys.""" - def __init__(self, parser, *args, **kwargs): - self._group = parser.add_argument_group(*args, **kwargs) - self._keys = [] - def AddArgument(self, *args, **kwargs): - arg = self._group.add_argument(*args, **kwargs) - self._keys.append(arg.dest) + def __init__(self, parser, *args, **kwargs): + self._group = parser.add_argument_group(*args, **kwargs) + self._keys = [] + + def AddArgument(self, *args, **kwargs): + arg = self._group.add_argument(*args, **kwargs) + self._keys.append(arg.dest) - def RemakeCommandLine(self, options): - result = [] - for key in self._keys: - value = getattr(options, key) - if value is True: - result.append('--%s' % key) - elif value is not None: - result.append('--%s=%s' % (key, value)) - return result + def RemakeCommandLine(self, options): + result = [] + for key in self._keys: + value = getattr(options, key) + if value is True: + result.append('--%s' % key) + elif value is not None: + result.append('--%s=%s' % (key, value)) + return result def ParseArgs(argv=None): - parser = argparse.ArgumentParser(argv) - - gtest_group = ReconstructibleArgumentGroup(parser, - 'Arguments to gtest-parallel') - # These options will be passed unchanged to gtest-parallel. - gtest_group.AddArgument('-d', '--output_dir') - gtest_group.AddArgument('-r', '--repeat') - gtest_group.AddArgument('--retry_failed') - gtest_group.AddArgument('--gtest_color') - gtest_group.AddArgument('--gtest_filter') - gtest_group.AddArgument('--gtest_also_run_disabled_tests', - action='store_true', default=None) - gtest_group.AddArgument('--timeout') - - # Syntax 'Nx' will be interpreted as N * number of cpu cores. - gtest_group.AddArgument('-w', '--workers', type=_ParseWorkersOption) - - # --isolated-script-test-output is used to upload results to the flakiness - # dashboard. This translation is made because gtest-parallel expects the flag - # to be called --dump_json_test_results instead. - gtest_group.AddArgument('--isolated-script-test-output', - dest='dump_json_test_results') - - # Needed when the test wants to store test artifacts, because it doesn't know - # what will be the swarming output dir. - parser.add_argument('--store-test-artifacts', action='store_true') - - # No-sandbox is a Chromium-specific flag, ignore it. - # TODO(oprypin): Remove (bugs.webrtc.org/8115) - parser.add_argument('--no-sandbox', action='store_true', - help=argparse.SUPPRESS) - - parser.add_argument('executable') - parser.add_argument('executable_args', nargs='*') - - options, unrecognized_args = parser.parse_known_args(argv) - - webrtc_flags_to_change = { - '--isolated-script-test-perf-output': '--isolated_script_test_perf_output', - '--isolated-script-test-output': '--isolated_script_test_output', - } - args_to_pass = [] - for arg in unrecognized_args: - if any(arg.startswith(k) for k in webrtc_flags_to_change.keys()): - arg_split = arg.split('=') - args_to_pass.append( - webrtc_flags_to_change[arg_split[0]] + '=' + arg_split[1]) + parser = argparse.ArgumentParser(argv) + + gtest_group = ReconstructibleArgumentGroup(parser, + 'Arguments to gtest-parallel') + # These options will be passed unchanged to gtest-parallel. + gtest_group.AddArgument('-d', '--output_dir') + gtest_group.AddArgument('-r', '--repeat') + gtest_group.AddArgument('--retry_failed') + gtest_group.AddArgument('--gtest_color') + gtest_group.AddArgument('--gtest_filter') + gtest_group.AddArgument('--gtest_also_run_disabled_tests', + action='store_true', + default=None) + gtest_group.AddArgument('--timeout') + + # Syntax 'Nx' will be interpreted as N * number of cpu cores. + gtest_group.AddArgument('-w', '--workers', type=_ParseWorkersOption) + + # Needed when the test wants to store test artifacts, because it doesn't know + # what will be the swarming output dir. + parser.add_argument('--store-test-artifacts', action='store_true') + + # No-sandbox is a Chromium-specific flag, ignore it. + # TODO(oprypin): Remove (bugs.webrtc.org/8115) + parser.add_argument('--no-sandbox', + action='store_true', + help=argparse.SUPPRESS) + + parser.add_argument('executable') + parser.add_argument('executable_args', nargs='*') + + options, unrecognized_args = parser.parse_known_args(argv) + + args_to_pass = [] + for arg in unrecognized_args: + if arg.startswith('--isolated-script-test-perf-output'): + arg_split = arg.split('=') + assert len( + arg_split) == 2, 'You must use the = syntax for this flag.' + args_to_pass.append('--isolated_script_test_perf_output=' + + arg_split[1]) + else: + args_to_pass.append(arg) + + executable_args = options.executable_args + args_to_pass + + if options.store_test_artifacts: + assert options.output_dir, ( + '--output_dir must be specified for storing test artifacts.') + test_artifacts_dir = os.path.join(options.output_dir, 'test_artifacts') + + executable_args.insert(0, + '--test_artifacts_dir=%s' % test_artifacts_dir) else: - args_to_pass.append(arg) - - executable_args = options.executable_args + args_to_pass - - if options.store_test_artifacts: - assert options.output_dir, ( - '--output_dir must be specified for storing test artifacts.') - test_artifacts_dir = os.path.join(options.output_dir, 'test_artifacts') - - executable_args.insert(0, '--test_artifacts_dir=%s' % test_artifacts_dir) - else: - test_artifacts_dir = None + test_artifacts_dir = None - gtest_parallel_args = gtest_group.RemakeCommandLine(options) + gtest_parallel_args = gtest_group.RemakeCommandLine(options) - # GTEST_SHARD_INDEX and GTEST_TOTAL_SHARDS must be removed from the - # environment. Otherwise it will be picked up by the binary, causing a bug - # where only tests in the first shard are executed. - test_env = os.environ.copy() - gtest_shard_index = test_env.pop('GTEST_SHARD_INDEX', '0') - gtest_total_shards = test_env.pop('GTEST_TOTAL_SHARDS', '1') + # GTEST_SHARD_INDEX and GTEST_TOTAL_SHARDS must be removed from the + # environment. Otherwise it will be picked up by the binary, causing a bug + # where only tests in the first shard are executed. + test_env = os.environ.copy() + gtest_shard_index = test_env.pop('GTEST_SHARD_INDEX', '0') + gtest_total_shards = test_env.pop('GTEST_TOTAL_SHARDS', '1') - gtest_parallel_args.insert(0, '--shard_index=%s' % gtest_shard_index) - gtest_parallel_args.insert(1, '--shard_count=%s' % gtest_total_shards) + gtest_parallel_args.insert(0, '--shard_index=%s' % gtest_shard_index) + gtest_parallel_args.insert(1, '--shard_count=%s' % gtest_total_shards) - gtest_parallel_args.append(options.executable) - if executable_args: - gtest_parallel_args += ['--'] + executable_args + gtest_parallel_args.append(options.executable) + if executable_args: + gtest_parallel_args += ['--'] + executable_args - return Args(gtest_parallel_args, test_env, options.output_dir, - test_artifacts_dir) + return Args(gtest_parallel_args, test_env, options.output_dir, + test_artifacts_dir) def main(): - webrtc_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - gtest_parallel_path = os.path.join( - webrtc_root, 'third_party', 'gtest-parallel', 'gtest-parallel') + webrtc_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + gtest_parallel_path = os.path.join(webrtc_root, 'third_party', + 'gtest-parallel', 'gtest-parallel') - gtest_parallel_args, test_env, output_dir, test_artifacts_dir = ParseArgs() + gtest_parallel_args, test_env, output_dir, test_artifacts_dir = ParseArgs() - command = [ - sys.executable, - gtest_parallel_path, - ] + gtest_parallel_args + command = [ + sys.executable, + gtest_parallel_path, + ] + gtest_parallel_args - if output_dir and not os.path.isdir(output_dir): - os.makedirs(output_dir) - if test_artifacts_dir and not os.path.isdir(test_artifacts_dir): - os.makedirs(test_artifacts_dir) + if output_dir and not os.path.isdir(output_dir): + os.makedirs(output_dir) + if test_artifacts_dir and not os.path.isdir(test_artifacts_dir): + os.makedirs(test_artifacts_dir) - print 'gtest-parallel-wrapper: Executing command %s' % ' '.join(command) - sys.stdout.flush() + print 'gtest-parallel-wrapper: Executing command %s' % ' '.join(command) + sys.stdout.flush() - exit_code = subprocess.call(command, env=test_env, cwd=os.getcwd()) + exit_code = subprocess.call(command, env=test_env, cwd=os.getcwd()) - if output_dir: - for test_status in 'passed', 'failed', 'interrupted': - logs_dir = os.path.join(output_dir, 'gtest-parallel-logs', test_status) - if not os.path.isdir(logs_dir): - continue - logs = [os.path.join(logs_dir, log) for log in os.listdir(logs_dir)] - log_file = os.path.join(output_dir, '%s-tests.log' % test_status) - _CatFiles(logs, log_file) - os.rmdir(logs_dir) + if output_dir: + for test_status in 'passed', 'failed', 'interrupted': + logs_dir = os.path.join(output_dir, 'gtest-parallel-logs', + test_status) + if not os.path.isdir(logs_dir): + continue + logs = [ + os.path.join(logs_dir, log) for log in os.listdir(logs_dir) + ] + log_file = os.path.join(output_dir, '%s-tests.log' % test_status) + _CatFiles(logs, log_file) + os.rmdir(logs_dir) - if test_artifacts_dir: - shutil.make_archive(test_artifacts_dir, 'zip', test_artifacts_dir) - shutil.rmtree(test_artifacts_dir) + if test_artifacts_dir: + shutil.make_archive(test_artifacts_dir, 'zip', test_artifacts_dir) + shutil.rmtree(test_artifacts_dir) - return exit_code + return exit_code if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/gtest_parallel_wrapper_test.py b/tools_webrtc/gtest_parallel_wrapper_test.py index 5d436aad90..82cb75bc6a 100755 --- a/tools_webrtc/gtest_parallel_wrapper_test.py +++ b/tools_webrtc/gtest_parallel_wrapper_test.py @@ -21,156 +21,152 @@ @contextmanager def TemporaryDirectory(): - tmp_dir = tempfile.mkdtemp() - yield tmp_dir - os.rmdir(tmp_dir) + tmp_dir = tempfile.mkdtemp() + yield tmp_dir + os.rmdir(tmp_dir) class GtestParallelWrapperHelpersTest(unittest.TestCase): + def testGetWorkersAsIs(self): + # pylint: disable=protected-access + self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('12'), 12) - def testGetWorkersAsIs(self): - # pylint: disable=protected-access - self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('12'), 12) + def testGetTwiceWorkers(self): + expected = 2 * multiprocessing.cpu_count() + # pylint: disable=protected-access + self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), + expected) - def testGetTwiceWorkers(self): - expected = 2 * multiprocessing.cpu_count() - # pylint: disable=protected-access - self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), expected) - - def testGetHalfWorkers(self): - expected = max(multiprocessing.cpu_count() // 2, 1) - # pylint: disable=protected-access - self.assertEqual( - gtest_parallel_wrapper._ParseWorkersOption('0.5x'), expected) + def testGetHalfWorkers(self): + expected = max(multiprocessing.cpu_count() // 2, 1) + # pylint: disable=protected-access + self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('0.5x'), + expected) class GtestParallelWrapperTest(unittest.TestCase): - - @classmethod - def _Expected(cls, gtest_parallel_args): - return ['--shard_index=0', '--shard_count=1'] + gtest_parallel_args - - def testOverwrite(self): - result = gtest_parallel_wrapper.ParseArgs( - ['--timeout=123', 'exec', '--timeout', '124']) - expected = self._Expected(['--timeout=124', 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testMixing(self): - result = gtest_parallel_wrapper.ParseArgs( - ['--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124']) - expected = self._Expected( - ['--timeout=124', 'exec', '--', '--param1', '--param2']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testMixingPositional(self): - result = gtest_parallel_wrapper.ParseArgs([ - '--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--foo2', - 'bar2' - ]) - expected = self._Expected( - ['--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testDoubleDash1(self): - result = gtest_parallel_wrapper.ParseArgs( - ['--timeout', '123', 'exec', '--', '--timeout', '124']) - expected = self._Expected( - ['--timeout=123', 'exec', '--', '--timeout', '124']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testDoubleDash2(self): - result = gtest_parallel_wrapper.ParseArgs( - ['--timeout=123', '--', 'exec', '--timeout=124']) - expected = self._Expected(['--timeout=123', 'exec', '--', '--timeout=124']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testArtifacts(self): - with TemporaryDirectory() as tmp_dir: - output_dir = os.path.join(tmp_dir, 'foo') - result = gtest_parallel_wrapper.ParseArgs( - ['exec', '--store-test-artifacts', '--output_dir', output_dir]) - exp_artifacts_dir = os.path.join(output_dir, 'test_artifacts') - exp = self._Expected([ - '--output_dir=' + output_dir, 'exec', '--', - '--test_artifacts_dir=' + exp_artifacts_dir - ]) - self.assertEqual(result.gtest_parallel_args, exp) - self.assertEqual(result.output_dir, output_dir) - self.assertEqual(result.test_artifacts_dir, exp_artifacts_dir) - - def testNoDirsSpecified(self): - result = gtest_parallel_wrapper.ParseArgs(['exec']) - self.assertEqual(result.output_dir, None) - self.assertEqual(result.test_artifacts_dir, None) - - def testOutputDirSpecified(self): - result = gtest_parallel_wrapper.ParseArgs( - ['exec', '--output_dir', '/tmp/foo']) - self.assertEqual(result.output_dir, '/tmp/foo') - self.assertEqual(result.test_artifacts_dir, None) - - def testJsonTestResults(self): - result = gtest_parallel_wrapper.ParseArgs( - ['--isolated-script-test-output', '/tmp/foo', 'exec']) - expected = self._Expected(['--dump_json_test_results=/tmp/foo', 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testShortArg(self): - result = gtest_parallel_wrapper.ParseArgs(['-d', '/tmp/foo', 'exec']) - expected = self._Expected(['--output_dir=/tmp/foo', 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) - self.assertEqual(result.output_dir, '/tmp/foo') - - def testBoolArg(self): - result = gtest_parallel_wrapper.ParseArgs( - ['--gtest_also_run_disabled_tests', 'exec']) - expected = self._Expected(['--gtest_also_run_disabled_tests', 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testNoArgs(self): - result = gtest_parallel_wrapper.ParseArgs(['exec']) - expected = self._Expected(['exec']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testDocExample(self): - with TemporaryDirectory() as tmp_dir: - output_dir = os.path.join(tmp_dir, 'foo') - result = gtest_parallel_wrapper.ParseArgs([ - 'some_test', '--some_flag=some_value', '--another_flag', - '--output_dir=' + output_dir, '--store-test-artifacts', - '--isolated-script-test-output=SOME_DIR', - '--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar', - '--baz' - ]) - expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts') - expected = self._Expected([ - '--output_dir=' + output_dir, '--dump_json_test_results=SOME_DIR', - 'some_test', '--', '--test_artifacts_dir=' + expected_artifacts_dir, - '--some_flag=some_value', '--another_flag', - '--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar', - '--baz' - ]) - self.assertEqual(result.gtest_parallel_args, expected) - - def testStandardWorkers(self): - """Check integer value is passed as-is.""" - result = gtest_parallel_wrapper.ParseArgs(['--workers', '17', 'exec']) - expected = self._Expected(['--workers=17', 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testTwoWorkersPerCpuCore(self): - result = gtest_parallel_wrapper.ParseArgs(['--workers', '2x', 'exec']) - workers = 2 * multiprocessing.cpu_count() - expected = self._Expected(['--workers=%s' % workers, 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) - - def testUseHalfTheCpuCores(self): - result = gtest_parallel_wrapper.ParseArgs(['--workers', '0.5x', 'exec']) - workers = max(multiprocessing.cpu_count() // 2, 1) - expected = self._Expected(['--workers=%s' % workers, 'exec']) - self.assertEqual(result.gtest_parallel_args, expected) + @classmethod + def _Expected(cls, gtest_parallel_args): + return ['--shard_index=0', '--shard_count=1'] + gtest_parallel_args + + def testOverwrite(self): + result = gtest_parallel_wrapper.ParseArgs( + ['--timeout=123', 'exec', '--timeout', '124']) + expected = self._Expected(['--timeout=124', 'exec']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testMixing(self): + result = gtest_parallel_wrapper.ParseArgs([ + '--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124' + ]) + expected = self._Expected( + ['--timeout=124', 'exec', '--', '--param1', '--param2']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testMixingPositional(self): + result = gtest_parallel_wrapper.ParseArgs([ + '--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', + '--foo2', 'bar2' + ]) + expected = self._Expected([ + '--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2' + ]) + self.assertEqual(result.gtest_parallel_args, expected) + + def testDoubleDash1(self): + result = gtest_parallel_wrapper.ParseArgs( + ['--timeout', '123', 'exec', '--', '--timeout', '124']) + expected = self._Expected( + ['--timeout=123', 'exec', '--', '--timeout', '124']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testDoubleDash2(self): + result = gtest_parallel_wrapper.ParseArgs( + ['--timeout=123', '--', 'exec', '--timeout=124']) + expected = self._Expected( + ['--timeout=123', 'exec', '--', '--timeout=124']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testArtifacts(self): + with TemporaryDirectory() as tmp_dir: + output_dir = os.path.join(tmp_dir, 'foo') + result = gtest_parallel_wrapper.ParseArgs( + ['exec', '--store-test-artifacts', '--output_dir', output_dir]) + exp_artifacts_dir = os.path.join(output_dir, 'test_artifacts') + exp = self._Expected([ + '--output_dir=' + output_dir, 'exec', '--', + '--test_artifacts_dir=' + exp_artifacts_dir + ]) + self.assertEqual(result.gtest_parallel_args, exp) + self.assertEqual(result.output_dir, output_dir) + self.assertEqual(result.test_artifacts_dir, exp_artifacts_dir) + + def testNoDirsSpecified(self): + result = gtest_parallel_wrapper.ParseArgs(['exec']) + self.assertEqual(result.output_dir, None) + self.assertEqual(result.test_artifacts_dir, None) + + def testOutputDirSpecified(self): + result = gtest_parallel_wrapper.ParseArgs( + ['exec', '--output_dir', '/tmp/foo']) + self.assertEqual(result.output_dir, '/tmp/foo') + self.assertEqual(result.test_artifacts_dir, None) + + def testShortArg(self): + result = gtest_parallel_wrapper.ParseArgs(['-d', '/tmp/foo', 'exec']) + expected = self._Expected(['--output_dir=/tmp/foo', 'exec']) + self.assertEqual(result.gtest_parallel_args, expected) + self.assertEqual(result.output_dir, '/tmp/foo') + + def testBoolArg(self): + result = gtest_parallel_wrapper.ParseArgs( + ['--gtest_also_run_disabled_tests', 'exec']) + expected = self._Expected(['--gtest_also_run_disabled_tests', 'exec']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testNoArgs(self): + result = gtest_parallel_wrapper.ParseArgs(['exec']) + expected = self._Expected(['exec']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testDocExample(self): + with TemporaryDirectory() as tmp_dir: + output_dir = os.path.join(tmp_dir, 'foo') + result = gtest_parallel_wrapper.ParseArgs([ + 'some_test', '--some_flag=some_value', '--another_flag', + '--output_dir=' + output_dir, '--store-test-artifacts', + '--isolated-script-test-perf-output=SOME_OTHER_DIR', + '--foo=bar', '--baz' + ]) + expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts') + expected = self._Expected([ + '--output_dir=' + output_dir, 'some_test', '--', + '--test_artifacts_dir=' + expected_artifacts_dir, + '--some_flag=some_value', '--another_flag', + '--isolated_script_test_perf_output=SOME_OTHER_DIR', + '--foo=bar', '--baz' + ]) + self.assertEqual(result.gtest_parallel_args, expected) + + def testStandardWorkers(self): + """Check integer value is passed as-is.""" + result = gtest_parallel_wrapper.ParseArgs(['--workers', '17', 'exec']) + expected = self._Expected(['--workers=17', 'exec']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testTwoWorkersPerCpuCore(self): + result = gtest_parallel_wrapper.ParseArgs(['--workers', '2x', 'exec']) + workers = 2 * multiprocessing.cpu_count() + expected = self._Expected(['--workers=%s' % workers, 'exec']) + self.assertEqual(result.gtest_parallel_args, expected) + + def testUseHalfTheCpuCores(self): + result = gtest_parallel_wrapper.ParseArgs( + ['--workers', '0.5x', 'exec']) + workers = max(multiprocessing.cpu_count() // 2, 1) + expected = self._Expected(['--workers=%s' % workers, 'exec']) + self.assertEqual(result.gtest_parallel_args, expected) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/ios/OWNERS b/tools_webrtc/ios/OWNERS index 4d28f44c91..cd06158b7f 100644 --- a/tools_webrtc/ios/OWNERS +++ b/tools_webrtc/ios/OWNERS @@ -1,2 +1 @@ -phoglund@webrtc.org tkchin@webrtc.org diff --git a/tools_webrtc/ios/build_ios_libs.py b/tools_webrtc/ios/build_ios_libs.py index b0d28c0151..383338382b 100755 --- a/tools_webrtc/ios/build_ios_libs.py +++ b/tools_webrtc/ios/build_ios_libs.py @@ -7,7 +7,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """WebRTC iOS FAT libraries build script. Each architecture is compiled separately before being merged together. By default, the library is created in out_ios_libs/. (Change with -o.) @@ -21,7 +20,6 @@ import subprocess import sys - os.environ['PATH'] = '/usr/libexec' + os.pathsep + os.environ['PATH'] SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -41,198 +39,235 @@ def _ParseArgs(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument('--build_config', default='release', - choices=['debug', 'release'], - help='The build config. Can be "debug" or "release". ' - 'Defaults to "release".') - parser.add_argument('--arch', nargs='+', default=DEFAULT_ARCHS, - choices=ENABLED_ARCHS, - help='Architectures to build. Defaults to %(default)s.') - parser.add_argument('-c', '--clean', action='store_true', default=False, - help='Removes the previously generated build output, if any.') - parser.add_argument('-p', '--purify', action='store_true', default=False, - help='Purifies the previously generated build output by ' - 'removing the temporary results used when (re)building.') - parser.add_argument('-o', '--output-dir', default=SDK_OUTPUT_DIR, - help='Specifies a directory to output the build artifacts to. ' - 'If specified together with -c, deletes the dir.') - parser.add_argument('-r', '--revision', type=int, default=0, - help='Specifies a revision number to embed if building the framework.') - parser.add_argument('-e', '--bitcode', action='store_true', default=False, - help='Compile with bitcode.') - parser.add_argument('--verbose', action='store_true', default=False, - help='Debug logging.') - parser.add_argument('--use-goma', action='store_true', default=False, - help='Use goma to build.') - parser.add_argument('--extra-gn-args', default=[], nargs='*', - help='Additional GN args to be used during Ninja generation.') - - return parser.parse_args() + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--build_config', + default='release', + choices=['debug', 'release'], + help='The build config. Can be "debug" or "release". ' + 'Defaults to "release".') + parser.add_argument( + '--arch', + nargs='+', + default=DEFAULT_ARCHS, + choices=ENABLED_ARCHS, + help='Architectures to build. Defaults to %(default)s.') + parser.add_argument( + '-c', + '--clean', + action='store_true', + default=False, + help='Removes the previously generated build output, if any.') + parser.add_argument( + '-p', + '--purify', + action='store_true', + default=False, + help='Purifies the previously generated build output by ' + 'removing the temporary results used when (re)building.') + parser.add_argument( + '-o', + '--output-dir', + default=SDK_OUTPUT_DIR, + help='Specifies a directory to output the build artifacts to. ' + 'If specified together with -c, deletes the dir.') + parser.add_argument( + '-r', + '--revision', + type=int, + default=0, + help='Specifies a revision number to embed if building the framework.') + parser.add_argument('-e', + '--bitcode', + action='store_true', + default=False, + help='Compile with bitcode.') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + parser.add_argument('--use-goma', + action='store_true', + default=False, + help='Use goma to build.') + parser.add_argument( + '--extra-gn-args', + default=[], + nargs='*', + help='Additional GN args to be used during Ninja generation.') + + return parser.parse_args() def _RunCommand(cmd): - logging.debug('Running: %r', cmd) - subprocess.check_call(cmd, cwd=SRC_DIR) + logging.debug('Running: %r', cmd) + subprocess.check_call(cmd, cwd=SRC_DIR) def _CleanArtifacts(output_dir): - if os.path.isdir(output_dir): - logging.info('Deleting %s', output_dir) - shutil.rmtree(output_dir) + if os.path.isdir(output_dir): + logging.info('Deleting %s', output_dir) + shutil.rmtree(output_dir) def _CleanTemporary(output_dir, architectures): - if os.path.isdir(output_dir): - logging.info('Removing temporary build files.') - for arch in architectures: - arch_lib_path = os.path.join(output_dir, arch + '_libs') - if os.path.isdir(arch_lib_path): - shutil.rmtree(arch_lib_path) + if os.path.isdir(output_dir): + logging.info('Removing temporary build files.') + for arch in architectures: + arch_lib_path = os.path.join(output_dir, arch + '_libs') + if os.path.isdir(arch_lib_path): + shutil.rmtree(arch_lib_path) def BuildWebRTC(output_dir, target_arch, flavor, gn_target_name, - ios_deployment_target, libvpx_build_vp9, use_bitcode, - use_goma, extra_gn_args): - output_dir = os.path.join(output_dir, target_arch + '_libs') - gn_args = ['target_os="ios"', 'ios_enable_code_signing=false', - 'use_xcode_clang=true', 'is_component_build=false'] - - # Add flavor option. - if flavor == 'debug': - gn_args.append('is_debug=true') - elif flavor == 'release': - gn_args.append('is_debug=false') - else: - raise ValueError('Unexpected flavor type: %s' % flavor) - - gn_args.append('target_cpu="%s"' % target_arch) - - gn_args.append('ios_deployment_target="%s"' % ios_deployment_target) - - gn_args.append('rtc_libvpx_build_vp9=' + - ('true' if libvpx_build_vp9 else 'false')) - - gn_args.append('enable_ios_bitcode=' + - ('true' if use_bitcode else 'false')) - gn_args.append('use_goma=' + ('true' if use_goma else 'false')) - - args_string = ' '.join(gn_args + extra_gn_args) - logging.info('Building WebRTC with args: %s', args_string) - - cmd = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), - 'gen', - output_dir, - '--args=' + args_string, - ] - _RunCommand(cmd) - logging.info('Building target: %s', gn_target_name) - - cmd = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), - '-C', - output_dir, - gn_target_name, - ] - if use_goma: - cmd.extend(['-j', '200']) - _RunCommand(cmd) + ios_deployment_target, libvpx_build_vp9, use_bitcode, use_goma, + extra_gn_args): + output_dir = os.path.join(output_dir, target_arch + '_libs') + gn_args = [ + 'target_os="ios"', 'ios_enable_code_signing=false', + 'use_xcode_clang=true', 'is_component_build=false' + ] + + # Add flavor option. + if flavor == 'debug': + gn_args.append('is_debug=true') + elif flavor == 'release': + gn_args.append('is_debug=false') + else: + raise ValueError('Unexpected flavor type: %s' % flavor) + + gn_args.append('target_cpu="%s"' % target_arch) + + gn_args.append('ios_deployment_target="%s"' % ios_deployment_target) + + gn_args.append('rtc_libvpx_build_vp9=' + + ('true' if libvpx_build_vp9 else 'false')) + + gn_args.append('enable_ios_bitcode=' + + ('true' if use_bitcode else 'false')) + gn_args.append('use_goma=' + ('true' if use_goma else 'false')) + + args_string = ' '.join(gn_args + extra_gn_args) + logging.info('Building WebRTC with args: %s', args_string) + + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), + 'gen', + output_dir, + '--args=' + args_string, + ] + _RunCommand(cmd) + logging.info('Building target: %s', gn_target_name) + + cmd = [ + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), + '-C', + output_dir, + gn_target_name, + ] + if use_goma: + cmd.extend(['-j', '200']) + _RunCommand(cmd) + def main(): - args = _ParseArgs() + args = _ParseArgs() - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - if args.clean: - _CleanArtifacts(args.output_dir) - return 0 + if args.clean: + _CleanArtifacts(args.output_dir) + return 0 - architectures = list(args.arch) - gn_args = args.extra_gn_args + architectures = list(args.arch) + gn_args = args.extra_gn_args - if args.purify: - _CleanTemporary(args.output_dir, architectures) - return 0 + if args.purify: + _CleanTemporary(args.output_dir, architectures) + return 0 - gn_target_name = 'framework_objc' - if not args.bitcode: - gn_args.append('enable_dsyms=true') - gn_args.append('enable_stripping=true') - - - # Build all architectures. - for arch in architectures: - BuildWebRTC(args.output_dir, arch, args.build_config, gn_target_name, - IOS_DEPLOYMENT_TARGET, LIBVPX_BUILD_VP9, args.bitcode, - args.use_goma, gn_args) - - # Create FAT archive. - lib_paths = [os.path.join(args.output_dir, arch + '_libs') - for arch in architectures] - - # Combine the slices. - dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC') - # Dylibs will be combined, all other files are the same across archs. - # Use distutils instead of shutil to support merging folders. - distutils.dir_util.copy_tree( - os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME), - os.path.join(args.output_dir, SDK_FRAMEWORK_NAME)) - logging.info('Merging framework slices.') - dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths] - out_dylib_path = os.path.join(args.output_dir, dylib_path) - try: - os.remove(out_dylib_path) - except OSError: - pass - cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path] - _RunCommand(cmd) - - # Merge the dSYM slices. - lib_dsym_dir_path = os.path.join(lib_paths[0], 'WebRTC.dSYM') - if os.path.isdir(lib_dsym_dir_path): - distutils.dir_util.copy_tree(lib_dsym_dir_path, - os.path.join(args.output_dir, 'WebRTC.dSYM')) - logging.info('Merging dSYM slices.') - dsym_path = os.path.join('WebRTC.dSYM', 'Contents', 'Resources', 'DWARF', - 'WebRTC') - lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths] - out_dsym_path = os.path.join(args.output_dir, dsym_path) - try: - os.remove(out_dsym_path) - except OSError: - pass - cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path] - _RunCommand(cmd) + gn_target_name = 'framework_objc' + if not args.bitcode: + gn_args.append('enable_dsyms=true') + gn_args.append('enable_stripping=true') - # Generate the license file. - ninja_dirs = [os.path.join(args.output_dir, arch + '_libs') - for arch in architectures] - gn_target_full_name = '//sdk:' + gn_target_name - builder = LicenseBuilder(ninja_dirs, [gn_target_full_name]) - builder.GenerateLicenseText( + # Build all architectures. + for arch in architectures: + BuildWebRTC(args.output_dir, arch, args.build_config, gn_target_name, + IOS_DEPLOYMENT_TARGET, LIBVPX_BUILD_VP9, args.bitcode, + args.use_goma, gn_args) + + # Create FAT archive. + lib_paths = [ + os.path.join(args.output_dir, arch + '_libs') for arch in architectures + ] + + # Combine the slices. + dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC') + # Dylibs will be combined, all other files are the same across archs. + # Use distutils instead of shutil to support merging folders. + distutils.dir_util.copy_tree( + os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME), os.path.join(args.output_dir, SDK_FRAMEWORK_NAME)) - - - # Modify the version number. - # Format should be ... - # e.g. 55.0.14986 means branch cut 55, no hotfixes, and revision 14986. - infoplist_path = os.path.join(args.output_dir, SDK_FRAMEWORK_NAME, - 'Info.plist') - cmd = ['PlistBuddy', '-c', - 'Print :CFBundleShortVersionString', infoplist_path] - major_minor = subprocess.check_output(cmd).strip() - version_number = '%s.%s' % (major_minor, args.revision) - logging.info('Substituting revision number: %s', version_number) - cmd = ['PlistBuddy', '-c', - 'Set :CFBundleVersion ' + version_number, infoplist_path] + logging.info('Merging framework slices.') + dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths] + out_dylib_path = os.path.join(args.output_dir, dylib_path) + try: + os.remove(out_dylib_path) + except OSError: + pass + cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path] _RunCommand(cmd) - _RunCommand(['plutil', '-convert', 'binary1', infoplist_path]) - logging.info('Done.') - return 0 + # Merge the dSYM slices. + lib_dsym_dir_path = os.path.join(lib_paths[0], 'WebRTC.dSYM') + if os.path.isdir(lib_dsym_dir_path): + distutils.dir_util.copy_tree( + lib_dsym_dir_path, os.path.join(args.output_dir, 'WebRTC.dSYM')) + logging.info('Merging dSYM slices.') + dsym_path = os.path.join('WebRTC.dSYM', 'Contents', 'Resources', + 'DWARF', 'WebRTC') + lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths] + out_dsym_path = os.path.join(args.output_dir, dsym_path) + try: + os.remove(out_dsym_path) + except OSError: + pass + cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path] + _RunCommand(cmd) + + # Generate the license file. + ninja_dirs = [ + os.path.join(args.output_dir, arch + '_libs') + for arch in architectures + ] + gn_target_full_name = '//sdk:' + gn_target_name + builder = LicenseBuilder(ninja_dirs, [gn_target_full_name]) + builder.GenerateLicenseText( + os.path.join(args.output_dir, SDK_FRAMEWORK_NAME)) + + # Modify the version number. + # Format should be ... + # e.g. 55.0.14986 means branch cut 55, no hotfixes, and revision 14986. + infoplist_path = os.path.join(args.output_dir, SDK_FRAMEWORK_NAME, + 'Info.plist') + cmd = [ + 'PlistBuddy', '-c', 'Print :CFBundleShortVersionString', + infoplist_path + ] + major_minor = subprocess.check_output(cmd).strip() + version_number = '%s.%s' % (major_minor, args.revision) + logging.info('Substituting revision number: %s', version_number) + cmd = [ + 'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number, + infoplist_path + ] + _RunCommand(cmd) + _RunCommand(['plutil', '-convert', 'binary1', infoplist_path]) + + logging.info('Done.') + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/ios/generate_modulemap.py b/tools_webrtc/ios/generate_modulemap.py index 45bd3d875e..4609385c38 100644 --- a/tools_webrtc/ios/generate_modulemap.py +++ b/tools_webrtc/ios/generate_modulemap.py @@ -9,24 +9,24 @@ import argparse import sys + def GenerateModulemap(): - parser = argparse.ArgumentParser(description='Generate modulemap') - parser.add_argument("-o", "--out", type=str, help="Output file.") - parser.add_argument("-n", "--name", type=str, help="Name of binary.") + parser = argparse.ArgumentParser(description='Generate modulemap') + parser.add_argument("-o", "--out", type=str, help="Output file.") + parser.add_argument("-n", "--name", type=str, help="Name of binary.") - args = parser.parse_args() + args = parser.parse_args() - with open(args.out, "w") as outfile: - module_template = 'framework module %s {\n' \ - ' umbrella header "%s.h"\n' \ - '\n' \ - ' export *\n' \ - ' module * { export * }\n' \ - '}\n' % (args.name, args.name) - outfile.write(module_template) - return 0 + with open(args.out, "w") as outfile: + module_template = 'framework module %s {\n' \ + ' umbrella header "%s.h"\n' \ + '\n' \ + ' export *\n' \ + ' module * { export * }\n' \ + '}\n' % (args.name, args.name) + outfile.write(module_template) + return 0 if __name__ == '__main__': - sys.exit(GenerateModulemap()) - + sys.exit(GenerateModulemap()) diff --git a/tools_webrtc/ios/generate_umbrella_header.py b/tools_webrtc/ios/generate_umbrella_header.py index 3549735eb6..4c700a1c31 100644 --- a/tools_webrtc/ios/generate_umbrella_header.py +++ b/tools_webrtc/ios/generate_umbrella_header.py @@ -14,15 +14,20 @@ def GenerateUmbrellaHeader(): - parser = argparse.ArgumentParser(description='Generate umbrella header') - parser.add_argument("-o", "--out", type=str, help="Output file.") - parser.add_argument("-s", "--sources", default=[], type=str, nargs='+', - help="Headers to include.") - - args = parser.parse_args() - - with open(args.out, "w") as outfile: - outfile.write(textwrap.dedent("""\ + parser = argparse.ArgumentParser(description='Generate umbrella header') + parser.add_argument("-o", "--out", type=str, help="Output file.") + parser.add_argument("-s", + "--sources", + default=[], + type=str, + nargs='+', + help="Headers to include.") + + args = parser.parse_args() + + with open(args.out, "w") as outfile: + outfile.write( + textwrap.dedent("""\ /* * Copyright %d The WebRTC project authors. All Rights Reserved. * @@ -33,11 +38,11 @@ def GenerateUmbrellaHeader(): * be found in the AUTHORS file in the root of the source tree. */\n\n""" % datetime.datetime.now().year)) - for s in args.sources: - outfile.write("#import \n".format(os.path.basename(s))) + for s in args.sources: + outfile.write("#import \n".format(os.path.basename(s))) - return 0 + return 0 if __name__ == '__main__': - sys.exit(GenerateUmbrellaHeader()) + sys.exit(GenerateUmbrellaHeader()) diff --git a/tools_webrtc/ios/merge_ios_libs.py b/tools_webrtc/ios/merge_ios_libs.py index 651024eb6a..31ffc1ddd5 100755 --- a/tools_webrtc/ios/merge_ios_libs.py +++ b/tools_webrtc/ios/merge_ios_libs.py @@ -7,7 +7,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Script for merging generated iOS libraries.""" import sys @@ -22,7 +21,7 @@ def MergeLibs(lib_base_dir): - """Merges generated iOS libraries for different archs. + """Merges generated iOS libraries for different archs. Uses libtool to generate FAT archive files for each generated library. @@ -33,92 +32,96 @@ def MergeLibs(lib_base_dir): Returns: Exit code of libtool. """ - output_dir_name = 'fat_libs' - archs = [arch for arch in os.listdir(lib_base_dir) - if arch in VALID_ARCHS] - # For each arch, find (library name, libary path) for arch. We will merge - # all libraries with the same name. - libs = {} - for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]: - if not os.path.exists(lib_dir): - continue - for dirpath, _, filenames in os.walk(lib_dir): - for filename in filenames: - if not filename.endswith('.a'): - continue - entry = libs.get(filename, []) - entry.append(os.path.join(dirpath, filename)) - libs[filename] = entry - orphaned_libs = {} - valid_libs = {} - for library, paths in libs.items(): - if len(paths) < len(archs): - orphaned_libs[library] = paths - else: - valid_libs[library] = paths - for library, paths in orphaned_libs.items(): - components = library[:-2].split('_')[:-1] - found = False - # Find directly matching parent libs by stripping suffix. - while components and not found: - parent_library = '_'.join(components) + '.a' - if parent_library in valid_libs: - valid_libs[parent_library].extend(paths) - found = True - break - components = components[:-1] - # Find next best match by finding parent libs with the same prefix. - if not found: - base_prefix = library[:-2].split('_')[0] - for valid_lib, valid_paths in valid_libs.items(): - if valid_lib[:len(base_prefix)] == base_prefix: - valid_paths.extend(paths) - found = True - break - assert found - - # Create output directory. - output_dir_path = os.path.join(lib_base_dir, output_dir_name) - if not os.path.exists(output_dir_path): - os.mkdir(output_dir_path) - - # Use this so libtool merged binaries are always the same. - env = os.environ.copy() - env['ZERO_AR_DATE'] = '1' - - # Ignore certain errors. - libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$') - - # Merge libraries using libtool. - libtool_returncode = 0 - for library, paths in valid_libs.items(): - cmd_list = ['libtool', '-static', '-v', '-o', - os.path.join(output_dir_path, library)] + paths - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line): - print >>sys.stderr, line - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - libtool_returncode = libtoolout.returncode - if not libtool_returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == '-o' and cmd_list[i+1].endswith('.a'): - os.utime(cmd_list[i+1], None) - break - return libtool_returncode + output_dir_name = 'fat_libs' + archs = [arch for arch in os.listdir(lib_base_dir) if arch in VALID_ARCHS] + # For each arch, find (library name, libary path) for arch. We will merge + # all libraries with the same name. + libs = {} + for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]: + if not os.path.exists(lib_dir): + continue + for dirpath, _, filenames in os.walk(lib_dir): + for filename in filenames: + if not filename.endswith('.a'): + continue + entry = libs.get(filename, []) + entry.append(os.path.join(dirpath, filename)) + libs[filename] = entry + orphaned_libs = {} + valid_libs = {} + for library, paths in libs.items(): + if len(paths) < len(archs): + orphaned_libs[library] = paths + else: + valid_libs[library] = paths + for library, paths in orphaned_libs.items(): + components = library[:-2].split('_')[:-1] + found = False + # Find directly matching parent libs by stripping suffix. + while components and not found: + parent_library = '_'.join(components) + '.a' + if parent_library in valid_libs: + valid_libs[parent_library].extend(paths) + found = True + break + components = components[:-1] + # Find next best match by finding parent libs with the same prefix. + if not found: + base_prefix = library[:-2].split('_')[0] + for valid_lib, valid_paths in valid_libs.items(): + if valid_lib[:len(base_prefix)] == base_prefix: + valid_paths.extend(paths) + found = True + break + assert found + + # Create output directory. + output_dir_path = os.path.join(lib_base_dir, output_dir_name) + if not os.path.exists(output_dir_path): + os.mkdir(output_dir_path) + + # Use this so libtool merged binaries are always the same. + env = os.environ.copy() + env['ZERO_AR_DATE'] = '1' + + # Ignore certain errors. + libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$') + + # Merge libraries using libtool. + libtool_returncode = 0 + for library, paths in valid_libs.items(): + cmd_list = [ + 'libtool', '-static', '-v', '-o', + os.path.join(output_dir_path, library) + ] + paths + libtoolout = subprocess.Popen(cmd_list, + stderr=subprocess.PIPE, + env=env) + _, err = libtoolout.communicate() + for line in err.splitlines(): + if not libtool_re.match(line): + print >> sys.stderr, line + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + libtool_returncode = libtoolout.returncode + if not libtool_returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == '-o' and cmd_list[i + 1].endswith('.a'): + os.utime(cmd_list[i + 1], None) + break + return libtool_returncode def Main(): - parser_description = 'Merge WebRTC libraries.' - parser = argparse.ArgumentParser(description=parser_description) - parser.add_argument('lib_base_dir', - help='Directory with built libraries. ', - type=str) - args = parser.parse_args() - lib_base_dir = args.lib_base_dir - MergeLibs(lib_base_dir) + parser_description = 'Merge WebRTC libraries.' + parser = argparse.ArgumentParser(description=parser_description) + parser.add_argument('lib_base_dir', + help='Directory with built libraries. ', + type=str) + args = parser.parse_args() + lib_base_dir = args.lib_base_dir + MergeLibs(lib_base_dir) + if __name__ == '__main__': - sys.exit(Main()) + sys.exit(Main()) diff --git a/tools_webrtc/iwyu/apply-iwyu b/tools_webrtc/iwyu/apply-iwyu new file mode 100755 index 0000000000..65950d307f --- /dev/null +++ b/tools_webrtc/iwyu/apply-iwyu @@ -0,0 +1,43 @@ +#!/bin/sh +# +# Run the include-what-you-use tool (iwyu) on a file in the webrtc source +# directory. +# +# The script uses a subsequent grep pass to remove #include files from .cc +# that are also in the .h file, or are problematic to include. +# +# To get iwyu on Debian/glinux, do "sudo apt-get install iwyu". + +set -e +set -x +FILE=$1 +# If you want to exclude files that are in $FILE.h from $FILE.cc, set +# the following variable to "yes". This is a style guide violation. +REMOVE_CC_INCLUDES=no + +if [ ! -f $FILE.h ]; then + echo "$FILE.h not found" + exit 1 +fi + +if [ ! -f $FILE.cc ]; then + echo "$FILE.cc not found" + exit 1 +fi + +iwyu -Xiwyu --no_fwd_decls -D__X86_64__ -DWEBRTC_POSIX -I . -I third_party/abseil-cpp $FILE.cc |& fix_include || echo "Some files modified" + +if [ $REMOVE_CC_INCLUDES == "yes" ]; then + grep ^#include $FILE.h | grep -v -f - $FILE.cc > $FILE.ccnew + grep -v -f tools_webrtc/iwyu/iwyu-filter-list $FILE.ccnew > $FILE.cc + rm $FILE.ccnew +else + grep -v -f tools_webrtc/iwyu/iwyu-filter-list $FILE.cc > $FILE.ccnew + mv $FILE.ccnew $FILE.cc +fi +grep -v -f tools_webrtc/iwyu/iwyu-filter-list $FILE.h > $FILE.hnew +mv $FILE.hnew $FILE.h + +echo "Finished. Check diff, compile and git cl format before uploading." + + diff --git a/tools_webrtc/iwyu/iwyu b/tools_webrtc/iwyu/iwyu new file mode 100755 index 0000000000..7bbc69d496 --- /dev/null +++ b/tools_webrtc/iwyu/iwyu @@ -0,0 +1,11 @@ +#!/bin/bash +# +# Run IWYU against a single webrtc source file. +# +# To get iwyu on Debian/glinux, do "sudo apt-get install iwyu". +# +# To apply the changes suggested blindly, do tools/iwyu |& fix_include +# +# Doing "tools/iwyu filename.cc" will check both the .cc and .h file. +# +iwyu -Xiwyu --no_fwd_decls -D__X86_64__ -DWEBRTC_POSIX -I . -I third_party/abseil-cpp $@ diff --git a/tools_webrtc/iwyu/iwyu-filter-list b/tools_webrtc/iwyu/iwyu-filter-list new file mode 100644 index 0000000000..f31b996e91 --- /dev/null +++ b/tools_webrtc/iwyu/iwyu-filter-list @@ -0,0 +1,5 @@ +# These are lines that apply-iwyu will prevent from being added to a +# file. They are lines that refer to files that are conditionally included +# in certain configurations. +#include +#include diff --git a/tools_webrtc/libs/generate_licenses.py b/tools_webrtc/libs/generate_licenses.py index 2202188339..b1587af5ae 100755 --- a/tools_webrtc/libs/generate_licenses.py +++ b/tools_webrtc/libs/generate_licenses.py @@ -36,16 +36,21 @@ 'abseil-cpp': ['third_party/abseil-cpp/LICENSE'], 'android_ndk': ['third_party/android_ndk/NOTICE'], 'android_sdk': ['third_party/android_sdk/LICENSE'], - 'auto': ['third_party/android_deps/libs/' - 'com_google_auto_service_auto_service/LICENSE'], + 'auto': [ + 'third_party/android_deps/libs/' + 'com_google_auto_service_auto_service/LICENSE' + ], 'bazel': ['third_party/bazel/LICENSE'], 'boringssl': ['third_party/boringssl/src/LICENSE'], - 'errorprone': ['third_party/android_deps/libs/' - 'com_google_errorprone_error_prone_core/LICENSE'], + 'errorprone': [ + 'third_party/android_deps/libs/' + 'com_google_errorprone_error_prone_core/LICENSE' + ], 'fiat': ['third_party/boringssl/src/third_party/fiat/LICENSE'], 'guava': ['third_party/guava/LICENSE'], 'ijar': ['third_party/ijar/LICENSE'], 'jsoncpp': ['third_party/jsoncpp/LICENSE'], + 'libaom': ['third_party/libaom/source/libaom/LICENSE'], 'libc++': ['buildtools/third_party/libc++/trunk/LICENSE.TXT'], 'libc++abi': ['buildtools/third_party/libc++abi/trunk/LICENSE.TXT'], 'libevent': ['base/third_party/libevent/LICENSE'], @@ -53,6 +58,7 @@ 'libsrtp': ['third_party/libsrtp/LICENSE'], 'libvpx': ['third_party/libvpx/source/libvpx/LICENSE'], 'libyuv': ['third_party/libyuv/LICENSE'], + 'nasm': ['third_party/nasm/LICENSE'], 'opus': ['third_party/opus/src/COPYING'], 'pffft': ['third_party/pffft/LICENSE'], 'protobuf': ['third_party/protobuf/LICENSE'], @@ -66,7 +72,7 @@ 'fft': ['modules/third_party/fft/LICENSE'], 'g711': ['modules/third_party/g711/LICENSE'], 'g722': ['modules/third_party/g722/LICENSE'], - 'fft4g': ['common_audio/third_party/fft4g/LICENSE'], + 'ooura': ['common_audio/third_party/ooura/LICENSE'], 'spl_sqrt_floor': ['common_audio/third_party/spl_sqrt_floor/LICENSE'], # TODO(bugs.webrtc.org/1110): Remove this hack. This is not a lib. @@ -76,6 +82,7 @@ # Compile time dependencies, no license needed: 'yasm': [], 'ow2_asm': [], + 'jdk': [], } # Third_party library _regex_ to licences mapping. Keys are regular expression @@ -92,11 +99,11 @@ def FindSrcDirPath(): - """Returns the abs path to the src/ dir of the project.""" - src_dir = os.path.dirname(os.path.abspath(__file__)) - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir + """Returns the abs path to the src/ dir of the project.""" + src_dir = os.path.dirname(os.path.abspath(__file__)) + while os.path.basename(src_dir) != 'src': + src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) + return src_dir SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0])) @@ -110,29 +117,28 @@ def FindSrcDirPath(): class LicenseBuilder(object): + def __init__(self, + buildfile_dirs, + targets, + lib_to_licenses_dict=None, + lib_regex_to_licenses_dict=None): + if lib_to_licenses_dict is None: + lib_to_licenses_dict = LIB_TO_LICENSES_DICT - def __init__(self, - buildfile_dirs, - targets, - lib_to_licenses_dict=None, - lib_regex_to_licenses_dict=None): - if lib_to_licenses_dict is None: - lib_to_licenses_dict = LIB_TO_LICENSES_DICT - - if lib_regex_to_licenses_dict is None: - lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT + if lib_regex_to_licenses_dict is None: + lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT - self.buildfile_dirs = buildfile_dirs - self.targets = targets - self.lib_to_licenses_dict = lib_to_licenses_dict - self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict + self.buildfile_dirs = buildfile_dirs + self.targets = targets + self.lib_to_licenses_dict = lib_to_licenses_dict + self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict - self.common_licenses_dict = self.lib_to_licenses_dict.copy() - self.common_licenses_dict.update(self.lib_regex_to_licenses_dict) + self.common_licenses_dict = self.lib_to_licenses_dict.copy() + self.common_licenses_dict.update(self.lib_regex_to_licenses_dict) - @staticmethod - def _ParseLibraryName(dep): - """Returns library name after third_party + @staticmethod + def _ParseLibraryName(dep): + """Returns library name after third_party Input one of: //a/b/third_party/libname:c @@ -141,11 +147,11 @@ def _ParseLibraryName(dep): Outputs libname or None if this is not a third_party dependency. """ - groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep) - return groups.group(1) if groups else None + groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep) + return groups.group(1) if groups else None - def _ParseLibrary(self, dep): - """Returns library simple or regex name that matches `dep` after third_party + def _ParseLibrary(self, dep): + """Returns library simple or regex name that matches `dep` after third_party This method matches `dep` dependency against simple names in LIB_TO_LICENSES_DICT and regular expression names in @@ -153,104 +159,109 @@ def _ParseLibrary(self, dep): Outputs matched dict key or None if this is not a third_party dependency. """ - libname = LicenseBuilder._ParseLibraryName(dep) - - for lib_regex in self.lib_regex_to_licenses_dict: - if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep): - return lib_regex - - return libname - - @staticmethod - def _RunGN(buildfile_dir, target): - cmd = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), - 'desc', - '--all', - '--format=json', - os.path.abspath(buildfile_dir), - target, - ] - logging.debug('Running: %r', cmd) - output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT) - logging.debug('Output: %s', output_json) - return output_json - - def _GetThirdPartyLibraries(self, buildfile_dir, target): - output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target)) - libraries = set() - for described_target in output.values(): - third_party_libs = ( - self._ParseLibrary(dep) for dep in described_target['deps']) - libraries |= set(lib for lib in third_party_libs if lib) - return libraries - - def GenerateLicenseText(self, output_dir): - # Get a list of third_party libs from gn. For fat libraries we must consider - # all architectures, hence the multiple buildfile directories. - third_party_libs = set() - for buildfile in self.buildfile_dirs: - for target in self.targets: - third_party_libs |= self._GetThirdPartyLibraries(buildfile, target) - assert len(third_party_libs) > 0 - - missing_licenses = third_party_libs - set(self.common_licenses_dict.keys()) - if missing_licenses: - error_msg = 'Missing licenses for following third_party targets: %s' % \ - ', '.join(missing_licenses) - logging.error(error_msg) - raise Exception(error_msg) - - # Put webrtc at the front of the list. - license_libs = sorted(third_party_libs) - license_libs.insert(0, 'webrtc') - - logging.info('List of licenses: %s', ', '.join(license_libs)) - - # Generate markdown. - output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+') - for license_lib in license_libs: - if len(self.common_licenses_dict[license_lib]) == 0: - logging.info('Skipping compile time or internal dependency: %s', - license_lib) - continue # Compile time dependency - - output_license_file.write('# %s\n' % license_lib) - output_license_file.write('```\n') - for path in self.common_licenses_dict[license_lib]: - license_path = os.path.join(WEBRTC_ROOT, path) - with open(license_path, 'r') as license_file: - license_text = cgi.escape(license_file.read(), quote=True) - output_license_file.write(license_text) - output_license_file.write('\n') - output_license_file.write('```\n\n') - - output_license_file.close() + libname = LicenseBuilder._ParseLibraryName(dep) + + for lib_regex in self.lib_regex_to_licenses_dict: + if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep): + return lib_regex + + return libname + + @staticmethod + def _RunGN(buildfile_dir, target): + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), + 'desc', + '--all', + '--format=json', + os.path.abspath(buildfile_dir), + target, + ] + logging.debug('Running: %r', cmd) + output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT) + logging.debug('Output: %s', output_json) + return output_json + + def _GetThirdPartyLibraries(self, buildfile_dir, target): + output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target)) + libraries = set() + for described_target in output.values(): + third_party_libs = (self._ParseLibrary(dep) + for dep in described_target['deps']) + libraries |= set(lib for lib in third_party_libs if lib) + return libraries + + def GenerateLicenseText(self, output_dir): + # Get a list of third_party libs from gn. For fat libraries we must consider + # all architectures, hence the multiple buildfile directories. + third_party_libs = set() + for buildfile in self.buildfile_dirs: + for target in self.targets: + third_party_libs |= self._GetThirdPartyLibraries( + buildfile, target) + assert len(third_party_libs) > 0 + + missing_licenses = third_party_libs - set( + self.common_licenses_dict.keys()) + if missing_licenses: + error_msg = 'Missing licenses for following third_party targets: %s' % \ + ', '.join(missing_licenses) + logging.error(error_msg) + raise Exception(error_msg) + + # Put webrtc at the front of the list. + license_libs = sorted(third_party_libs) + license_libs.insert(0, 'webrtc') + + logging.info('List of licenses: %s', ', '.join(license_libs)) + + # Generate markdown. + output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), + 'w+') + for license_lib in license_libs: + if len(self.common_licenses_dict[license_lib]) == 0: + logging.info( + 'Skipping compile time or internal dependency: %s', + license_lib) + continue # Compile time dependency + + output_license_file.write('# %s\n' % license_lib) + output_license_file.write('```\n') + for path in self.common_licenses_dict[license_lib]: + license_path = os.path.join(WEBRTC_ROOT, path) + with open(license_path, 'r') as license_file: + license_text = cgi.escape(license_file.read(), quote=True) + output_license_file.write(license_text) + output_license_file.write('\n') + output_license_file.write('```\n\n') + + output_license_file.close() def main(): - parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md') - parser.add_argument( - '--verbose', action='store_true', default=False, help='Debug logging.') - parser.add_argument( - '--target', - required=True, - action='append', - default=[], - help='Name of the GN target to generate a license for') - parser.add_argument('output_dir', help='Directory to output LICENSE.md to.') - parser.add_argument( - 'buildfile_dirs', - nargs='+', - help='Directories containing gn generated ninja files') - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - - builder = LicenseBuilder(args.buildfile_dirs, args.target) - builder.GenerateLicenseText(args.output_dir) + parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + parser.add_argument('--target', + required=True, + action='append', + default=[], + help='Name of the GN target to generate a license for') + parser.add_argument('output_dir', + help='Directory to output LICENSE.md to.') + parser.add_argument('buildfile_dirs', + nargs='+', + help='Directories containing gn generated ninja files') + args = parser.parse_args() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + builder = LicenseBuilder(args.buildfile_dirs, args.target) + builder.GenerateLicenseText(args.output_dir) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/libs/generate_licenses_test.py b/tools_webrtc/libs/generate_licenses_test.py index 03f14459c2..51acb89881 100755 --- a/tools_webrtc/libs/generate_licenses_test.py +++ b/tools_webrtc/libs/generate_licenses_test.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env vpython # pylint: disable=relative-import,protected-access,unused-argument # Copyright 2017 The WebRTC project authors. All Rights Reserved. @@ -9,13 +9,6 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. -import os -import sys - -SRC = os.path.abspath( - os.path.join(os.path.dirname((__file__)), os.pardir, os.pardir)) -sys.path.append(os.path.join(SRC, 'third_party', 'pymock')) - import unittest import mock @@ -23,10 +16,9 @@ class TestLicenseBuilder(unittest.TestCase): - - @staticmethod - def _FakeRunGN(buildfile_dir, target): - return """ + @staticmethod + def _FakeRunGN(buildfile_dir, target): + return """ { "target1": { "deps": [ @@ -39,91 +31,93 @@ def _FakeRunGN(buildfile_dir, target): } """ - def testParseLibraryName(self): - self.assertEquals( - LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'), - 'libname1') - self.assertEquals( - LicenseBuilder._ParseLibraryName('//a/b/third_party/libname2:c(d)'), - 'libname2') - self.assertEquals( - LicenseBuilder._ParseLibraryName('//a/b/third_party/libname3/c:d(e)'), - 'libname3') - self.assertEquals( - LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None) - - def testParseLibrarySimpleMatch(self): - builder = LicenseBuilder([], [], {}, {}) - self.assertEquals( - builder._ParseLibrary('//a/b/third_party/libname:c'), 'libname') - - def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self): - lib_dict = { - 'libname:foo.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], lib_dict, {}) - self.assertEquals( - builder._ParseLibrary('//a/b/third_party/libname:bar_java'), 'libname') - - def testParseLibraryRegExMatch(self): - lib_regex_dict = { - 'libname:foo.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEquals( - builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'), - 'libname:foo.*') - - def testParseLibraryRegExMatchWithSubDirectory(self): - lib_regex_dict = { - 'libname/foo:bar.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEquals( - builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'), - 'libname/foo:bar.*') - - def testParseLibraryRegExMatchWithStarInside(self): - lib_regex_dict = { - 'libname/foo.*bar.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEquals( - builder._ParseLibrary('//a/b/third_party/libname/fooHAHA:bar_java'), - 'libname/foo.*bar.*') - - @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) - def testGetThirdPartyLibrariesWithoutRegex(self): - builder = LicenseBuilder([], [], {}, {}) - self.assertEquals( - builder._GetThirdPartyLibraries('out/arm', 'target1'), - set(['libname1', 'libname2', 'libname3'])) - - @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) - def testGetThirdPartyLibrariesWithRegex(self): - lib_regex_dict = { - 'libname2:c.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEquals( - builder._GetThirdPartyLibraries('out/arm', 'target1'), - set(['libname1', 'libname2:c.*', 'libname3'])) - - @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) - def testGenerateLicenseTextFailIfUnknownLibrary(self): - lib_dict = { - 'simple_library': ['path/to/LICENSE'], - } - builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {}) - - with self.assertRaises(Exception) as context: - builder.GenerateLicenseText('dummy/dir') - - self.assertEquals( - context.exception.message, - 'Missing licenses for following third_party targets: ' - 'libname1, libname2, libname3') + def testParseLibraryName(self): + self.assertEquals( + LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'), + 'libname1') + self.assertEquals( + LicenseBuilder._ParseLibraryName( + '//a/b/third_party/libname2:c(d)'), 'libname2') + self.assertEquals( + LicenseBuilder._ParseLibraryName( + '//a/b/third_party/libname3/c:d(e)'), 'libname3') + self.assertEquals( + LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None) + + def testParseLibrarySimpleMatch(self): + builder = LicenseBuilder([], [], {}, {}) + self.assertEquals(builder._ParseLibrary('//a/b/third_party/libname:c'), + 'libname') + + def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self): + lib_dict = { + 'libname:foo.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], lib_dict, {}) + self.assertEquals( + builder._ParseLibrary('//a/b/third_party/libname:bar_java'), + 'libname') + + def testParseLibraryRegExMatch(self): + lib_regex_dict = { + 'libname:foo.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEquals( + builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'), + 'libname:foo.*') + + def testParseLibraryRegExMatchWithSubDirectory(self): + lib_regex_dict = { + 'libname/foo:bar.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEquals( + builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'), + 'libname/foo:bar.*') + + def testParseLibraryRegExMatchWithStarInside(self): + lib_regex_dict = { + 'libname/foo.*bar.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEquals( + builder._ParseLibrary( + '//a/b/third_party/libname/fooHAHA:bar_java'), + 'libname/foo.*bar.*') + + @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) + def testGetThirdPartyLibrariesWithoutRegex(self): + builder = LicenseBuilder([], [], {}, {}) + self.assertEquals( + builder._GetThirdPartyLibraries('out/arm', 'target1'), + set(['libname1', 'libname2', 'libname3'])) + + @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) + def testGetThirdPartyLibrariesWithRegex(self): + lib_regex_dict = { + 'libname2:c.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEquals( + builder._GetThirdPartyLibraries('out/arm', 'target1'), + set(['libname1', 'libname2:c.*', 'libname3'])) + + @mock.patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) + def testGenerateLicenseTextFailIfUnknownLibrary(self): + lib_dict = { + 'simple_library': ['path/to/LICENSE'], + } + builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {}) + + with self.assertRaises(Exception) as context: + builder.GenerateLicenseText('dummy/dir') + + self.assertEquals( + context.exception.message, + 'Missing licenses for following third_party targets: ' + 'libname1, libname2, libname3') if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/mb/OWNERS b/tools_webrtc/mb/OWNERS index 7717a25585..48e6927746 100644 --- a/tools_webrtc/mb/OWNERS +++ b/tools_webrtc/mb/OWNERS @@ -1,2 +1 @@ -phoglund@webrtc.org -ehmaldonado@webrtc.org +mbonadei@webrtc.org diff --git a/tools_webrtc/mb/gn_isolate_map.pyl b/tools_webrtc/mb/gn_isolate_map.pyl index e38c3b83a5..dba0d97571 100644 --- a/tools_webrtc/mb/gn_isolate_map.pyl +++ b/tools_webrtc/mb/gn_isolate_map.pyl @@ -27,6 +27,18 @@ "label": "//:android_junit_tests", "type": "junit_test", }, + "android_examples_junit_tests": { + "label": "//examples:android_examples_junit_tests", + "type": "junit_test", + }, + "android_sdk_junit_tests": { + "label": "//sdk/android:android_sdk_junit_tests", + "type": "junit_test", + }, + "apprtcmobile_tests": { + "label": "//examples:apprtcmobile_tests", + "type": "raw", + }, "audio_decoder_unittests": { "label": "//modules/audio_coding:audio_decoder_unittests", "type": "console_test_launcher", @@ -94,6 +106,14 @@ "label": "//:rtc_unittests", "type": "console_test_launcher", }, + "sdk_framework_unittests": { + "label": "//sdk:sdk_framework_unittests", + "type": "raw", + }, + "sdk_unittests": { + "label": "//sdk:sdk_unittests", + "type": "raw", + }, "slow_tests": { "label": "//:slow_tests", "type": "console_test_launcher", @@ -120,6 +140,10 @@ "label": "//:video_engine_tests", "type": "console_test_launcher", }, + "voip_unittests": { + "label": "//:voip_unittests", + "type": "console_test_launcher", + }, "webrtc_nonparallel_tests": { "label": "//:webrtc_nonparallel_tests", "type": "non_parallel_console_test_launcher", diff --git a/tools_webrtc/mb/mb.py b/tools_webrtc/mb/mb.py index f3e644128a..6287ca2366 100755 --- a/tools_webrtc/mb/mb.py +++ b/tools_webrtc/mb/mb.py @@ -56,6 +56,8 @@ def __init__(self): self.configs = {} self.masters = {} self.mixins = {} + self.isolate_exe = 'isolate.exe' if self.platform.startswith( + 'win') else 'isolate' def Main(self, args): self.ParseArgs(args) @@ -336,19 +338,37 @@ def _RunUnderSwarming(self, build_dir, target): for k, v in self.args.dimensions: dimensions += ['-d', k, v] + archive_json_path = self.ToSrcRelPath( + '%s/%s.archive.json' % (build_dir, target)) cmd = [ - self.executable, - self.PathJoin('tools', 'swarming_client', 'isolate.py'), + self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe), 'archive', + '-i', + self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), '-s', self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), '-I', 'isolateserver.appspot.com', + '-dump-json', archive_json_path, ] - ret, out, _ = self.Run(cmd, force_verbose=False) + ret, _, _ = self.Run(cmd, force_verbose=False) if ret: return ret - isolated_hash = out.splitlines()[0].split()[0] + try: + archive_hashes = json.loads(self.ReadFile(archive_json_path)) + except Exception: + self.Print( + 'Failed to read JSON file "%s"' % archive_json_path, file=sys.stderr) + return 1 + try: + isolated_hash = archive_hashes[target] + except Exception: + self.Print( + 'Cannot find hash for "%s" in "%s", file content: %s' % + (target, archive_json_path, archive_hashes), + file=sys.stderr) + return 1 + cmd = [ self.executable, self.PathJoin('tools', 'swarming_client', 'swarming.py'), @@ -364,11 +384,10 @@ def _RunUnderSwarming(self, build_dir, target): def _RunLocallyIsolated(self, build_dir, target): cmd = [ - self.executable, - self.PathJoin('tools', 'swarming_client', 'isolate.py'), + self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe), 'run', - '-s', - self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), + '-i', + self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), ] if self.args.extra_args: cmd += ['--'] + self.args.extra_args @@ -696,13 +715,10 @@ def RunGNIsolate(self, vals): extra_files) ret, _, _ = self.Run([ - self.executable, - self.PathJoin('tools', 'swarming_client', 'isolate.py'), + self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe), 'check', '-i', - self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), - '-s', - self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target))], + self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target))], buffer_output=False) return ret diff --git a/tools_webrtc/mb/mb_config.pyl b/tools_webrtc/mb/mb_config.pyl index 72aa18d50e..e3a0190be0 100644 --- a/tools_webrtc/mb/mb_config.pyl +++ b/tools_webrtc/mb/mb_config.pyl @@ -24,9 +24,9 @@ 'iOS32 Release': 'ios_release_bot_arm', 'iOS64 Debug': 'ios_debug_bot_arm64', 'iOS64 Release': 'ios_release_bot_arm64', - 'iOS64 Sim Debug (iOS 10.0)': 'ios_debug_bot_x64', - 'iOS64 Sim Debug (iOS 11)': 'ios_debug_bot_x64', 'iOS64 Sim Debug (iOS 12)': 'ios_debug_bot_x64', + 'iOS64 Sim Debug (iOS 13)': 'ios_debug_bot_x64', + 'iOS64 Sim Debug (iOS 14.0)': 'ios_debug_bot_x64', # Mac 'Mac64 Debug': 'debug_bot_x64', @@ -103,19 +103,20 @@ }, }, 'client.webrtc.perf': { - # Android - 'Android32 Builder': 'android_pure_release_bot_arm', - 'Android64 Builder': 'android_pure_release_bot_arm64', - 'Android32 Tests (J Nexus4)': 'none', - 'Android32 Tests (K Nexus5)': 'none', - 'Android32 Tests (L Nexus5)': 'none', - 'Android32 Tests (L Nexus6)': 'none', - 'Android32 Tests (L Nexus7.2)': 'none', - 'Android64 Tests (L Nexus9)': 'none', - 'Linux Trusty': 'pure_release_bot_x64', - 'Mac 10.11': 'pure_release_bot_x64', - 'Win7': 'win_clang_pure_release_bot_x86', + # These are here because testers need to gn gen + ninja for the + # webrtc_dashboard_upload target (otherwise a tester would not need to + # build anything). + # TODO(http://crbug.com/1029452): Nuke these and isolate on builder + # instead? + 'Perf Android32 (M Nexus5)': 'release_bot_x64', + 'Perf Android32 (M AOSP Nexus6)': 'release_bot_x64', + 'Perf Android64 (M Nexus5X)': 'release_bot_x64', + 'Perf Android64 (O Pixel2)': 'release_bot_x64', + 'Perf Linux Trusty': 'release_bot_x64', + 'Perf Mac 10.11': 'release_bot_x64', + 'Perf Win7': 'release_bot_x64', }, + 'client.webrtc.fyi': { # Mac 'Mac (swarming)': 'release_bot_x64', @@ -152,9 +153,9 @@ 'ios_compile_arm_rel': 'ios_release_bot_arm', 'ios_compile_arm64_dbg': 'ios_debug_bot_arm64', 'ios_compile_arm64_rel': 'ios_release_bot_arm64', - 'ios_sim_x64_dbg_ios10': 'ios_debug_bot_x64', - 'ios_sim_x64_dbg_ios11': 'ios_debug_bot_x64', 'ios_sim_x64_dbg_ios12': 'ios_debug_bot_x64', + 'ios_sim_x64_dbg_ios13': 'ios_debug_bot_x64', + 'ios_sim_x64_dbg_ios14': 'ios_debug_bot_x64', # Mac 'mac_compile_dbg': 'debug_bot_x64', @@ -391,28 +392,31 @@ # iOS 'ios_debug_bot_arm': [ - 'ios', 'debug_bot', 'arm', 'no_ios_code_signing' + 'ios', 'debug_bot', 'arm', 'no_ios_code_signing', 'ios_use_goma_rbe' ], 'ios_release_bot_arm': [ - 'ios', 'release_bot', 'arm', 'no_ios_code_signing' + 'ios', 'release_bot', 'arm', 'no_ios_code_signing', 'ios_use_goma_rbe' ], 'ios_debug_bot_arm64': [ - 'ios', 'debug_bot', 'arm64', 'no_ios_code_signing' + 'ios', 'debug_bot', 'arm64', 'no_ios_code_signing', 'ios_use_goma_rbe' ], 'ios_release_bot_arm64': [ - 'ios', 'release_bot', 'arm64', 'no_ios_code_signing' + 'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'ios_use_goma_rbe' ], 'ios_internal_debug_bot_arm64': [ - 'ios', 'debug_bot', 'arm64' + 'ios', 'debug_bot', 'arm64', 'ios_use_goma_rbe', + 'ios_code_signing_identity_description', ], 'ios_internal_release_bot_arm64': [ - 'ios', 'release_bot', 'arm64' + 'ios', 'release_bot', 'arm64', 'ios_use_goma_rbe', + 'ios_code_signing_identity_description', ], 'ios_internal_pure_release_bot_arm64': [ - 'ios', 'pure_release_bot', 'arm64' + 'ios', 'pure_release_bot', 'arm64', 'ios_use_goma_rbe', + 'ios_code_signing_identity_description', ], 'ios_debug_bot_x64': [ - 'ios', 'debug_bot', 'x64' + 'ios', 'debug_bot', 'x64', 'ios_use_goma_rbe' ], # More configs @@ -446,12 +450,6 @@ 'rtti_no_sctp_android_arm': [ 'android', 'debug_static_bot', 'arm', 'rtti', 'no_sctp' ], - - # This is used for tracking purposes; any bot that uses this config - # should never actually run MB. - 'none': [ - 'error', - ], }, # This is a dict mapping a given 'mixin' name to a dict of settings that @@ -498,14 +496,6 @@ 'mixins': ['debug', 'minimal_symbols', 'goma'], }, - # This mixin is used to force configs that use it to fail. It - # is used in two cases: when we have bots that we haven't looked - # at yet and don't know whether they need MB or not, and for bots - # that are test-only and should never run MB. - 'error': { - 'gn_args': 'error', - }, - 'full_symbols': { 'gn_args': 'symbol_level=2', }, @@ -519,6 +509,14 @@ 'gn_args': 'use_goma=true', }, + 'ios_code_signing_identity_description': { + 'gn_args': 'ios_code_signing_identity_description="Apple Development"', + }, + + 'ios_use_goma_rbe': { + 'gn_args': 'ios_use_goma_rbe=true', + }, + 'ios': { 'gn_args': 'target_os="ios"', }, diff --git a/tools_webrtc/mb/mb_unittest.py b/tools_webrtc/mb/mb_unittest.py index 3ae386986b..0cf93bb7f0 100755 --- a/tools_webrtc/mb/mb_unittest.py +++ b/tools_webrtc/mb/mb_unittest.py @@ -243,7 +243,7 @@ def test_gen(self): mbw=mbw, ret=0) self.assertEqual( mbw.files['/fake_src/out/Debug/args.gn'], - 'import("//build/args/bots/fake_master/fake_args_bot.gn")\n') + 'import("//build/args/bots/fake_master/fake_args_bot.gn")\n\n') def test_gen_fails(self): @@ -760,16 +760,11 @@ def test_run_swarmed(self): '/fake_src/out/Default/base_unittests.runtime_deps': ( "base_unittests\n" ), + 'out/Default/base_unittests.archive.json': ( + "{\"base_unittests\":\"fake_hash\"}"), } - def run_stub(cmd, **_kwargs): - if 'isolate.py' in cmd[1]: - return 0, 'fake_hash base_unittests', '' - else: - return 0, '', '' - mbw = self.fake_mbw(files=files) - mbw.Run = run_stub self.check(['run', '-s', '-c', 'debug_goma', '//out/Default', 'base_unittests'], mbw=mbw, ret=0) self.check(['run', '-s', '-c', 'debug_goma', '-d', 'os', 'Win7', diff --git a/tools_webrtc/msan/OWNERS b/tools_webrtc/msan/OWNERS deleted file mode 100644 index 524e2676ff..0000000000 --- a/tools_webrtc/msan/OWNERS +++ /dev/null @@ -1 +0,0 @@ -phoglund@webrtc.org diff --git a/tools_webrtc/msan/blacklist.txt b/tools_webrtc/msan/suppressions.txt similarity index 77% rename from tools_webrtc/msan/blacklist.txt rename to tools_webrtc/msan/suppressions.txt index 3c9c9b202b..ce8b14292e 100644 --- a/tools_webrtc/msan/blacklist.txt +++ b/tools_webrtc/msan/suppressions.txt @@ -10,3 +10,6 @@ # Uninit in zlib. http://crbug.com/116277 fun:*MOZ_Z_deflate* +# Uninit in H264. http://crbug.com/webrtc/11702 +src:*/third_party/openh264/src/codec/processing/src/vaacalc/vaacalcfuncs.cpp + diff --git a/tools_webrtc/network_emulator/config.py b/tools_webrtc/network_emulator/config.py index 60fa485db4..c1d3eaf3d1 100644 --- a/tools_webrtc/network_emulator/config.py +++ b/tools_webrtc/network_emulator/config.py @@ -6,31 +6,31 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Configuration class for network emulation.""" class ConnectionConfig(object): - """Configuration containing the characteristics of a network connection.""" + """Configuration containing the characteristics of a network connection.""" - def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms, - packet_loss_percent, queue_slots): - self.num = num - self.name = name - self.receive_bw_kbps = receive_bw_kbps - self.send_bw_kbps = send_bw_kbps - self.delay_ms = delay_ms - self.packet_loss_percent = packet_loss_percent - self.queue_slots = queue_slots + def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms, + packet_loss_percent, queue_slots): + self.num = num + self.name = name + self.receive_bw_kbps = receive_bw_kbps + self.send_bw_kbps = send_bw_kbps + self.delay_ms = delay_ms + self.packet_loss_percent = packet_loss_percent + self.queue_slots = queue_slots - def __str__(self): - """String representing the configuration. + def __str__(self): + """String representing the configuration. Returns: A string formatted and padded like this example: 12 Name 375 kbps 375 kbps 10 145 ms 0.1 % """ - left_aligned_name = self.name.ljust(24, ' ') - return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % ( - self.num, left_aligned_name, self.receive_bw_kbps, self.send_bw_kbps, - self.queue_slots, self.delay_ms, self.packet_loss_percent) + left_aligned_name = self.name.ljust(24, ' ') + return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % ( + self.num, left_aligned_name, self.receive_bw_kbps, + self.send_bw_kbps, self.queue_slots, self.delay_ms, + self.packet_loss_percent) diff --git a/tools_webrtc/network_emulator/emulate.py b/tools_webrtc/network_emulator/emulate.py index 08049a5424..51224c80b1 100755 --- a/tools_webrtc/network_emulator/emulate.py +++ b/tools_webrtc/network_emulator/emulate.py @@ -6,10 +6,8 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Script for constraining traffic on the local machine.""" - import logging import optparse import socket @@ -18,7 +16,6 @@ import config import network_emulator - _DEFAULT_LOG_LEVEL = logging.INFO # Default port range to apply network constraints on. @@ -41,7 +38,7 @@ config.ConnectionConfig(12, 'Wifi, Average Case', 40000, 33000, 1, 0, 100), config.ConnectionConfig(13, 'Wifi, Good', 45000, 40000, 1, 0, 100), config.ConnectionConfig(14, 'Wifi, Lossy', 40000, 33000, 1, 0, 100), - ] +] _PRESETS_DICT = dict((p.num, p) for p in _PRESETS) _DEFAULT_PRESET_ID = 2 @@ -49,147 +46,170 @@ class NonStrippingEpilogOptionParser(optparse.OptionParser): - """Custom parser to let us show the epilog without weird line breaking.""" + """Custom parser to let us show the epilog without weird line breaking.""" - def format_epilog(self, formatter): - return self.epilog + def format_epilog(self, formatter): + return self.epilog def _GetExternalIp(): - """Finds out the machine's external IP by connecting to google.com.""" - external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - external_socket.connect(('google.com', 80)) - return external_socket.getsockname()[0] + """Finds out the machine's external IP by connecting to google.com.""" + external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + external_socket.connect(('google.com', 80)) + return external_socket.getsockname()[0] def _ParseArgs(): - """Define and parse the command-line arguments.""" - presets_string = '\n'.join(str(p) for p in _PRESETS) - parser = NonStrippingEpilogOptionParser(epilog=( - '\nAvailable presets:\n' - ' Bandwidth (kbps) Packet\n' - 'ID Name Receive Send Queue Delay loss \n' - '-- ---- --------- -------- ----- ------- ------\n' - '%s\n' % presets_string)) - parser.add_option('-p', '--preset', type='int', default=_DEFAULT_PRESET_ID, - help=('ConnectionConfig configuration, specified by ID. ' - 'Default: %default')) - parser.add_option('-r', '--receive-bw', type='int', - default=_DEFAULT_PRESET.receive_bw_kbps, - help=('Receive bandwidth in kilobit/s. Default: %default')) - parser.add_option('-s', '--send-bw', type='int', - default=_DEFAULT_PRESET.send_bw_kbps, - help=('Send bandwidth in kilobit/s. Default: %default')) - parser.add_option('-d', '--delay', type='int', - default=_DEFAULT_PRESET.delay_ms, - help=('Delay in ms. Default: %default')) - parser.add_option('-l', '--packet-loss', type='float', - default=_DEFAULT_PRESET.packet_loss_percent, - help=('Packet loss in %. Default: %default')) - parser.add_option('-q', '--queue', type='int', - default=_DEFAULT_PRESET.queue_slots, - help=('Queue size as number of slots. Default: %default')) - parser.add_option('--port-range', default='%s,%s' % _DEFAULT_PORT_RANGE, - help=('Range of ports for constrained network. Specify as ' - 'two comma separated integers. Default: %default')) - parser.add_option('--target-ip', default=None, - help=('The interface IP address to apply the rules for. ' - 'Default: the external facing interface IP address.')) - parser.add_option('-v', '--verbose', action='store_true', default=False, - help=('Turn on verbose output. Will print all \'ipfw\' ' - 'commands that are executed.')) - - options = parser.parse_args()[0] - - # Find preset by ID, if specified. - if options.preset and not _PRESETS_DICT.has_key(options.preset): - parser.error('Invalid preset: %s' % options.preset) - - # Simple validation of the IP address, if supplied. - if options.target_ip: + """Define and parse the command-line arguments.""" + presets_string = '\n'.join(str(p) for p in _PRESETS) + parser = NonStrippingEpilogOptionParser(epilog=( + '\nAvailable presets:\n' + ' Bandwidth (kbps) Packet\n' + 'ID Name Receive Send Queue Delay loss \n' + '-- ---- --------- -------- ----- ------- ------\n' + '%s\n' % presets_string)) + parser.add_option('-p', + '--preset', + type='int', + default=_DEFAULT_PRESET_ID, + help=('ConnectionConfig configuration, specified by ID. ' + 'Default: %default')) + parser.add_option( + '-r', + '--receive-bw', + type='int', + default=_DEFAULT_PRESET.receive_bw_kbps, + help=('Receive bandwidth in kilobit/s. Default: %default')) + parser.add_option('-s', + '--send-bw', + type='int', + default=_DEFAULT_PRESET.send_bw_kbps, + help=('Send bandwidth in kilobit/s. Default: %default')) + parser.add_option('-d', + '--delay', + type='int', + default=_DEFAULT_PRESET.delay_ms, + help=('Delay in ms. Default: %default')) + parser.add_option('-l', + '--packet-loss', + type='float', + default=_DEFAULT_PRESET.packet_loss_percent, + help=('Packet loss in %. Default: %default')) + parser.add_option( + '-q', + '--queue', + type='int', + default=_DEFAULT_PRESET.queue_slots, + help=('Queue size as number of slots. Default: %default')) + parser.add_option( + '--port-range', + default='%s,%s' % _DEFAULT_PORT_RANGE, + help=('Range of ports for constrained network. Specify as ' + 'two comma separated integers. Default: %default')) + parser.add_option( + '--target-ip', + default=None, + help=('The interface IP address to apply the rules for. ' + 'Default: the external facing interface IP address.')) + parser.add_option('-v', + '--verbose', + action='store_true', + default=False, + help=('Turn on verbose output. Will print all \'ipfw\' ' + 'commands that are executed.')) + + options = parser.parse_args()[0] + + # Find preset by ID, if specified. + if options.preset and not _PRESETS_DICT.has_key(options.preset): + parser.error('Invalid preset: %s' % options.preset) + + # Simple validation of the IP address, if supplied. + if options.target_ip: + try: + socket.inet_aton(options.target_ip) + except socket.error: + parser.error('Invalid IP address specified: %s' % + options.target_ip) + + # Convert port range into the desired tuple format. try: - socket.inet_aton(options.target_ip) - except socket.error: - parser.error('Invalid IP address specified: %s' % options.target_ip) + if isinstance(options.port_range, str): + options.port_range = tuple( + int(port) for port in options.port_range.split(',')) + if len(options.port_range) != 2: + parser.error( + 'Invalid port range specified, please specify two ' + 'integers separated by a comma.') + except ValueError: + parser.error('Invalid port range specified.') - # Convert port range into the desired tuple format. - try: - if isinstance(options.port_range, str): - options.port_range = tuple(int(port) for port in - options.port_range.split(',')) - if len(options.port_range) != 2: - parser.error('Invalid port range specified, please specify two ' - 'integers separated by a comma.') - except ValueError: - parser.error('Invalid port range specified.') - - _InitLogging(options.verbose) - return options + _InitLogging(options.verbose) + return options def _InitLogging(verbose): - """Setup logging.""" - log_level = _DEFAULT_LOG_LEVEL - if verbose: - log_level = logging.DEBUG - logging.basicConfig(level=log_level, format='%(message)s') + """Setup logging.""" + log_level = _DEFAULT_LOG_LEVEL + if verbose: + log_level = logging.DEBUG + logging.basicConfig(level=log_level, format='%(message)s') def main(): - options = _ParseArgs() - - # Build a configuration object. Override any preset configuration settings if - # a value of a setting was also given as a flag. - connection_config = _PRESETS_DICT[options.preset] - if options.receive_bw is not _DEFAULT_PRESET.receive_bw_kbps: - connection_config.receive_bw_kbps = options.receive_bw - if options.send_bw is not _DEFAULT_PRESET.send_bw_kbps: - connection_config.send_bw_kbps = options.send_bw - if options.delay is not _DEFAULT_PRESET.delay_ms: - connection_config.delay_ms = options.delay - if options.packet_loss is not _DEFAULT_PRESET.packet_loss_percent: - connection_config.packet_loss_percent = options.packet_loss - if options.queue is not _DEFAULT_PRESET.queue_slots: - connection_config.queue_slots = options.queue - emulator = network_emulator.NetworkEmulator(connection_config, - options.port_range) - try: - emulator.CheckPermissions() - except network_emulator.NetworkEmulatorError as e: - logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error) - return -1 - - if not options.target_ip: - external_ip = _GetExternalIp() - else: - external_ip = options.target_ip - - logging.info('Constraining traffic to/from IP: %s', external_ip) - try: - emulator.Emulate(external_ip) - logging.info('Started network emulation with the following configuration:\n' - ' Receive bandwidth: %s kbps (%s kB/s)\n' - ' Send bandwidth : %s kbps (%s kB/s)\n' - ' Delay : %s ms\n' - ' Packet loss : %s %%\n' - ' Queue slots : %s', - connection_config.receive_bw_kbps, - connection_config.receive_bw_kbps/8, - connection_config.send_bw_kbps, - connection_config.send_bw_kbps/8, - connection_config.delay_ms, - connection_config.packet_loss_percent, - connection_config.queue_slots) - logging.info('Affected traffic: IP traffic on ports %s-%s', - options.port_range[0], options.port_range[1]) - raw_input('Press Enter to abort Network Emulation...') - logging.info('Flushing all Dummynet rules...') - network_emulator.Cleanup() - logging.info('Completed Network Emulation.') - return 0 - except network_emulator.NetworkEmulatorError as e: - logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error) - return -2 + options = _ParseArgs() + + # Build a configuration object. Override any preset configuration settings if + # a value of a setting was also given as a flag. + connection_config = _PRESETS_DICT[options.preset] + if options.receive_bw is not _DEFAULT_PRESET.receive_bw_kbps: + connection_config.receive_bw_kbps = options.receive_bw + if options.send_bw is not _DEFAULT_PRESET.send_bw_kbps: + connection_config.send_bw_kbps = options.send_bw + if options.delay is not _DEFAULT_PRESET.delay_ms: + connection_config.delay_ms = options.delay + if options.packet_loss is not _DEFAULT_PRESET.packet_loss_percent: + connection_config.packet_loss_percent = options.packet_loss + if options.queue is not _DEFAULT_PRESET.queue_slots: + connection_config.queue_slots = options.queue + emulator = network_emulator.NetworkEmulator(connection_config, + options.port_range) + try: + emulator.CheckPermissions() + except network_emulator.NetworkEmulatorError as e: + logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error) + return -1 + + if not options.target_ip: + external_ip = _GetExternalIp() + else: + external_ip = options.target_ip + + logging.info('Constraining traffic to/from IP: %s', external_ip) + try: + emulator.Emulate(external_ip) + logging.info( + 'Started network emulation with the following configuration:\n' + ' Receive bandwidth: %s kbps (%s kB/s)\n' + ' Send bandwidth : %s kbps (%s kB/s)\n' + ' Delay : %s ms\n' + ' Packet loss : %s %%\n' + ' Queue slots : %s', connection_config.receive_bw_kbps, + connection_config.receive_bw_kbps / 8, + connection_config.send_bw_kbps, connection_config.send_bw_kbps / 8, + connection_config.delay_ms, connection_config.packet_loss_percent, + connection_config.queue_slots) + logging.info('Affected traffic: IP traffic on ports %s-%s', + options.port_range[0], options.port_range[1]) + raw_input('Press Enter to abort Network Emulation...') + logging.info('Flushing all Dummynet rules...') + network_emulator.Cleanup() + logging.info('Completed Network Emulation.') + return 0 + except network_emulator.NetworkEmulatorError as e: + logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error) + return -2 + if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/network_emulator/network_emulator.py b/tools_webrtc/network_emulator/network_emulator.py index aa3ebda4c0..f77753b0f6 100644 --- a/tools_webrtc/network_emulator/network_emulator.py +++ b/tools_webrtc/network_emulator/network_emulator.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Script for constraining traffic on the local machine.""" import ctypes @@ -17,7 +16,7 @@ class NetworkEmulatorError(BaseException): - """Exception raised for errors in the network emulator. + """Exception raised for errors in the network emulator. Attributes: fail_msg: User defined error message. @@ -27,81 +26,88 @@ class NetworkEmulatorError(BaseException): stderr: Error output of running the command. """ - def __init__(self, fail_msg, cmd=None, returncode=None, output=None, - error=None): - BaseException.__init__(self, fail_msg) - self.fail_msg = fail_msg - self.cmd = cmd - self.returncode = returncode - self.output = output - self.error = error + def __init__(self, + fail_msg, + cmd=None, + returncode=None, + output=None, + error=None): + BaseException.__init__(self, fail_msg) + self.fail_msg = fail_msg + self.cmd = cmd + self.returncode = returncode + self.output = output + self.error = error class NetworkEmulator(object): - """A network emulator that can constrain the network using Dummynet.""" + """A network emulator that can constrain the network using Dummynet.""" - def __init__(self, connection_config, port_range): - """Constructor. + def __init__(self, connection_config, port_range): + """Constructor. Args: connection_config: A config.ConnectionConfig object containing the characteristics for the connection to be emulation. port_range: Tuple containing two integers defining the port range. """ - self._pipe_counter = 0 - self._rule_counter = 0 - self._port_range = port_range - self._connection_config = connection_config + self._pipe_counter = 0 + self._rule_counter = 0 + self._port_range = port_range + self._connection_config = connection_config - def Emulate(self, target_ip): - """Starts a network emulation by setting up Dummynet rules. + def Emulate(self, target_ip): + """Starts a network emulation by setting up Dummynet rules. Args: target_ip: The IP address of the interface that shall be that have the network constraints applied to it. """ - receive_pipe_id = self._CreateDummynetPipe( - self._connection_config.receive_bw_kbps, - self._connection_config.delay_ms, - self._connection_config.packet_loss_percent, - self._connection_config.queue_slots) - logging.debug('Created receive pipe: %s', receive_pipe_id) - send_pipe_id = self._CreateDummynetPipe( - self._connection_config.send_bw_kbps, - self._connection_config.delay_ms, - self._connection_config.packet_loss_percent, - self._connection_config.queue_slots) - logging.debug('Created send pipe: %s', send_pipe_id) - - # Adding the rules will start the emulation. - incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any', - target_ip, self._port_range) - logging.debug('Created incoming rule: %s', incoming_rule_id) - outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, - 'any', self._port_range) - logging.debug('Created outgoing rule: %s', outgoing_rule_id) - - @staticmethod - def CheckPermissions(): - """Checks if permissions are available to run Dummynet commands. + receive_pipe_id = self._CreateDummynetPipe( + self._connection_config.receive_bw_kbps, + self._connection_config.delay_ms, + self._connection_config.packet_loss_percent, + self._connection_config.queue_slots) + logging.debug('Created receive pipe: %s', receive_pipe_id) + send_pipe_id = self._CreateDummynetPipe( + self._connection_config.send_bw_kbps, + self._connection_config.delay_ms, + self._connection_config.packet_loss_percent, + self._connection_config.queue_slots) + logging.debug('Created send pipe: %s', send_pipe_id) + + # Adding the rules will start the emulation. + incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any', + target_ip, + self._port_range) + logging.debug('Created incoming rule: %s', incoming_rule_id) + outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, + 'any', self._port_range) + logging.debug('Created outgoing rule: %s', outgoing_rule_id) + + @staticmethod + def CheckPermissions(): + """Checks if permissions are available to run Dummynet commands. Raises: NetworkEmulatorError: If permissions to run Dummynet commands are not available. """ - try: - if os.getuid() != 0: - raise NetworkEmulatorError('You must run this script with sudo.') - except AttributeError: - - # AttributeError will be raised on Windows. - if ctypes.windll.shell32.IsUserAnAdmin() == 0: - raise NetworkEmulatorError('You must run this script with administrator' - ' privileges.') - - def _CreateDummynetRule(self, pipe_id, from_address, to_address, - port_range): - """Creates a network emulation rule and returns its ID. + try: + if os.getuid() != 0: + raise NetworkEmulatorError( + 'You must run this script with sudo.') + except AttributeError: + + # AttributeError will be raised on Windows. + if ctypes.windll.shell32.IsUserAnAdmin() == 0: + raise NetworkEmulatorError( + 'You must run this script with administrator' + ' privileges.') + + def _CreateDummynetRule(self, pipe_id, from_address, to_address, + port_range): + """Creates a network emulation rule and returns its ID. Args: pipe_id: integer ID of the pipe. @@ -115,18 +121,22 @@ def _CreateDummynetRule(self, pipe_id, from_address, to_address, The ID of the rule, starting at 100. The rule ID increments with 100 for each rule being added. """ - self._rule_counter += 100 - add_part = ['add', self._rule_counter, 'pipe', pipe_id, - 'ip', 'from', from_address, 'to', to_address] - _RunIpfwCommand(add_part + ['src-port', '%s-%s' % port_range], - 'Failed to add Dummynet src-port rule.') - _RunIpfwCommand(add_part + ['dst-port', '%s-%s' % port_range], - 'Failed to add Dummynet dst-port rule.') - return self._rule_counter - - def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, packet_loss_percent, - queue_slots): - """Creates a Dummynet pipe and return its ID. + self._rule_counter += 100 + add_part = [ + 'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', + from_address, 'to', to_address + ] + _RunIpfwCommand(add_part + + ['src-port', '%s-%s' % port_range], + 'Failed to add Dummynet src-port rule.') + _RunIpfwCommand(add_part + + ['dst-port', '%s-%s' % port_range], + 'Failed to add Dummynet dst-port rule.') + return self._rule_counter + + def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, + packet_loss_percent, queue_slots): + """Creates a Dummynet pipe and return its ID. Args: bandwidth_kbps: Bandwidth. @@ -136,32 +146,34 @@ def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, packet_loss_percent, Returns: The ID of the pipe, starting at 1. """ - self._pipe_counter += 1 - cmd = ['pipe', self._pipe_counter, 'config', - 'bw', str(bandwidth_kbps/8) + 'KByte/s', - 'delay', '%sms' % delay_ms, - 'plr', (packet_loss_percent/100.0), - 'queue', queue_slots] - error_message = 'Failed to create Dummynet pipe. ' - if sys.platform.startswith('linux'): - error_message += ('Make sure you have loaded the ipfw_mod.ko module to ' - 'your kernel (sudo insmod /path/to/ipfw_mod.ko).') - _RunIpfwCommand(cmd, error_message) - return self._pipe_counter + self._pipe_counter += 1 + cmd = [ + 'pipe', self._pipe_counter, 'config', 'bw', + str(bandwidth_kbps / 8) + 'KByte/s', 'delay', + '%sms' % delay_ms, 'plr', (packet_loss_percent / 100.0), 'queue', + queue_slots + ] + error_message = 'Failed to create Dummynet pipe. ' + if sys.platform.startswith('linux'): + error_message += ( + 'Make sure you have loaded the ipfw_mod.ko module to ' + 'your kernel (sudo insmod /path/to/ipfw_mod.ko).') + _RunIpfwCommand(cmd, error_message) + return self._pipe_counter + def Cleanup(): - """Stops the network emulation by flushing all Dummynet rules. + """Stops the network emulation by flushing all Dummynet rules. Notice that this will flush any rules that may have been created previously before starting the emulation. """ - _RunIpfwCommand(['-f', 'flush'], - 'Failed to flush Dummynet rules!') - _RunIpfwCommand(['-f', 'pipe', 'flush'], - 'Failed to flush Dummynet pipes!') + _RunIpfwCommand(['-f', 'flush'], 'Failed to flush Dummynet rules!') + _RunIpfwCommand(['-f', 'pipe', 'flush'], 'Failed to flush Dummynet pipes!') + def _RunIpfwCommand(command, fail_msg=None): - """Executes a command and prefixes the appropriate command for + """Executes a command and prefixes the appropriate command for Windows or Linux/UNIX. Args: @@ -172,18 +184,19 @@ def _RunIpfwCommand(command, fail_msg=None): NetworkEmulatorError: If command fails a message is set by the fail_msg parameter. """ - if sys.platform == 'win32': - ipfw_command = ['ipfw.exe'] - else: - ipfw_command = ['sudo', '-n', 'ipfw'] - - cmd_list = ipfw_command[:] + [str(x) for x in command] - cmd_string = ' '.join(cmd_list) - logging.debug('Running command: %s', cmd_string) - process = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - output, error = process.communicate() - if process.returncode != 0: - raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, output, - error) - return output.strip() + if sys.platform == 'win32': + ipfw_command = ['ipfw.exe'] + else: + ipfw_command = ['sudo', '-n', 'ipfw'] + + cmd_list = ipfw_command[:] + [str(x) for x in command] + cmd_string = ' '.join(cmd_list) + logging.debug('Running command: %s', cmd_string) + process = subprocess.Popen(cmd_list, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + output, error = process.communicate() + if process.returncode != 0: + raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, + output, error) + return output.strip() diff --git a/tools_webrtc/perf/catapult_uploader.py b/tools_webrtc/perf/catapult_uploader.py new file mode 100644 index 0000000000..f3333987d6 --- /dev/null +++ b/tools_webrtc/perf/catapult_uploader.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import datetime +import httplib2 +import json +import subprocess +import time +import zlib + +from tracing.value import histogram +from tracing.value import histogram_set +from tracing.value.diagnostics import generic_set +from tracing.value.diagnostics import reserved_infos + + +def _GenerateOauthToken(): + args = ['luci-auth', 'token'] + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if p.wait() == 0: + output = p.stdout.read() + return output.strip() + else: + raise RuntimeError( + 'Error generating authentication token.\nStdout: %s\nStderr:%s' % + (p.stdout.read(), p.stderr.read())) + + +def _SendHistogramSet(url, histograms, oauth_token): + """Make a HTTP POST with the given JSON to the Performance Dashboard. + + Args: + url: URL of Performance Dashboard instance, e.g. + "https://chromeperf.appspot.com". + histograms: a histogram set object that contains the data to be sent. + oauth_token: An oauth token to use for authorization. + """ + headers = {'Authorization': 'Bearer %s' % oauth_token} + + serialized = json.dumps(_ApplyHacks(histograms.AsDicts()), indent=4) + + if url.startswith('http://localhost'): + # The catapult server turns off compression in developer mode. + data = serialized + else: + data = zlib.compress(serialized) + + print 'Sending %d bytes to %s.' % (len(data), url + '/add_histograms') + + http = httplib2.Http() + response, content = http.request(url + '/add_histograms', + method='POST', + body=data, + headers=headers) + return response, content + + +def _WaitForUploadConfirmation(url, oauth_token, upload_token, wait_timeout, + wait_polling_period): + """Make a HTTP GET requests to the Performance Dashboard untill upload + status is known or the time is out. + + Args: + url: URL of Performance Dashboard instance, e.g. + "https://chromeperf.appspot.com". + oauth_token: An oauth token to use for authorization. + upload_token: String that identifies Performance Dashboard and can be used + for the status check. + wait_timeout: (datetime.timedelta) Maximum time to wait for the + confirmation. + wait_polling_period: (datetime.timedelta) Performance Dashboard will be + polled every wait_polling_period amount of time. + """ + assert wait_polling_period <= wait_timeout + + headers = {'Authorization': 'Bearer %s' % oauth_token} + http = httplib2.Http() + + response = None + resp_json = None + current_time = datetime.datetime.now() + end_time = current_time + wait_timeout + next_poll_time = current_time + wait_polling_period + while datetime.datetime.now() < end_time: + current_time = datetime.datetime.now() + if next_poll_time > current_time: + time.sleep((next_poll_time - current_time).total_seconds()) + next_poll_time = datetime.datetime.now() + wait_polling_period + + response, content = http.request(url + '/uploads' + upload_token, + method='GET', headers=headers) + resp_json = json.loads(content) + + print 'Upload state polled. Response: %s.' % content + + if (response.status != 200 or + resp_json['state'] == 'COMPLETED' or + resp_json['state'] == 'FAILED'): + break + + return response, resp_json + + +# TODO(https://crbug.com/1029452): HACKHACK +# Remove once we have doubles in the proto and handle -infinity correctly. +def _ApplyHacks(dicts): + for d in dicts: + if 'running' in d: + + def _NoInf(value): + if value == float('inf'): + return histogram.JS_MAX_VALUE + if value == float('-inf'): + return -histogram.JS_MAX_VALUE + return value + + d['running'] = [_NoInf(value) for value in d['running']] + + return dicts + + +def _LoadHistogramSetFromProto(options): + hs = histogram_set.HistogramSet() + with options.input_results_file as f: + hs.ImportProto(f.read()) + + return hs + + +def _AddBuildInfo(histograms, options): + common_diagnostics = { + reserved_infos.MASTERS: options.perf_dashboard_machine_group, + reserved_infos.BOTS: options.bot, + reserved_infos.POINT_ID: options.commit_position, + reserved_infos.BENCHMARKS: options.test_suite, + reserved_infos.WEBRTC_REVISIONS: str(options.webrtc_git_hash), + reserved_infos.BUILD_URLS: options.build_page_url, + } + + for k, v in common_diagnostics.items(): + histograms.AddSharedDiagnosticToAllHistograms( + k.name, generic_set.GenericSet([v])) + + +def _DumpOutput(histograms, output_file): + with output_file: + json.dump(_ApplyHacks(histograms.AsDicts()), output_file, indent=4) + + +def UploadToDashboard(options): + histograms = _LoadHistogramSetFromProto(options) + _AddBuildInfo(histograms, options) + + if options.output_json_file: + _DumpOutput(histograms, options.output_json_file) + + oauth_token = _GenerateOauthToken() + response, content = _SendHistogramSet( + options.dashboard_url, histograms, oauth_token) + + upload_token = json.loads(content).get('token') + if not options.wait_for_upload or not upload_token: + print 'Not waiting for upload status confirmation.' + if response.status == 200: + print 'Received 200 from dashboard.' + return 0 + else: + print('Upload failed with %d: %s\n\n%s' % (response.status, + response.reason, content)) + return 1 + + response, resp_json = _WaitForUploadConfirmation( + options.dashboard_url, + oauth_token, + upload_token, + datetime.timedelta(seconds=options.wait_timeout_sec), + datetime.timedelta(seconds=options.wait_polling_period_sec)) + + if response.status != 200 or resp_json['state'] == 'FAILED': + print('Upload failed with %d: %s\n\n%s' % (response.status, + response.reason, + str(resp_json))) + return 1 + + if resp_json['state'] == 'COMPLETED': + print 'Upload completed.' + return 0 + + print('Upload wasn\'t completed in a given time: %d.', options.wait_timeout) + return 1 diff --git a/tools_webrtc/perf/webrtc_dashboard_upload.py b/tools_webrtc/perf/webrtc_dashboard_upload.py index 67729d0b25..0d42e9ccf7 100644 --- a/tools_webrtc/perf/webrtc_dashboard_upload.py +++ b/tools_webrtc/perf/webrtc_dashboard_upload.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Adds build info to perf results and uploads them. The tests don't know which bot executed the tests or at what revision, so we @@ -19,173 +18,99 @@ """ import argparse -import httplib2 -import json import os import sys -import subprocess -import zlib - -# We just yank the python scripts we require into the PYTHONPATH. You could also -# imagine a solution where we use for instance protobuf:py_proto_runtime to copy -# catapult and protobuf code to out/, but this approach is allowed by -# convention. Fortunately neither catapult nor protobuf require any build rules -# to be executed. We can't do this for the histogram proto stub though because -# it's generated; see _LoadHistogramSetFromProto. -# -# It would be better if there was an equivalent to py_binary in GN, but there's -# not. -SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) -CHECKOUT_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir)) -sys.path.insert(0, os.path.join(CHECKOUT_ROOT, 'third_party', 'catapult', - 'tracing')) -sys.path.insert(0, os.path.join(CHECKOUT_ROOT, 'third_party', 'protobuf', - 'python')) - -from tracing.value import histogram_set -from tracing.value.diagnostics import generic_set -from tracing.value.diagnostics import reserved_infos - -from google.protobuf import json_format - - -def _GenerateOauthToken(): - args = ['luci-auth', 'token'] - p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - if p.wait() == 0: - output = p.stdout.read() - return output.strip() - else: - raise RuntimeError( - 'Error generating authentication token.\nStdout: %s\nStderr:%s' % - (p.stdout.read(), p.stderr.read())) - - -def _SendHistogramSet(url, histograms, oauth_token): - """Make a HTTP POST with the given JSON to the Performance Dashboard. - - Args: - url: URL of Performance Dashboard instance, e.g. - "https://chromeperf.appspot.com". - histograms: a histogram set object that contains the data to be sent. - oauth_token: An oauth token to use for authorization. - """ - headers = {'Authorization': 'Bearer %s' % oauth_token} - serialized = json.dumps(histograms.AsDicts(), indent=4) - - if url.startswith('http://localhost'): - # The catapult server turns off compression in developer mode. - data = serialized - else: - data = zlib.compress(serialized) - - http = httplib2.Http() - response, content = http.request(url + '/add_histograms', method='POST', - body=data, headers=headers) - return response, content - - -def _LoadHistogramSetFromProto(options): - # The webrtc_dashboard_upload gn rule will build the protobuf stub for python, - # so put it in the path for this script before we attempt to import it. - histogram_proto_path = os.path.join(options.outdir, 'pyproto', 'tracing', - 'tracing', 'proto') - sys.path.insert(0, histogram_proto_path) - - # TODO(https://crbug.com/1029452): Get rid of this import hack once we can - # just hand the contents of input_results_file straight to the histogram set. - try: - import histogram_pb2 - except ImportError: - raise ImportError('Could not find histogram_pb2. You need to build the ' - 'webrtc_dashboard_upload target before invoking this ' - 'script. Expected to find ' - 'histogram_pb2 in %s.' % histogram_proto_path) - - with options.input_results_file as f: - histograms = histogram_pb2.HistogramSet() - histograms.ParseFromString(f.read()) - - # TODO(https://crbug.com/1029452): Don't convert to JSON as a middle step once - # there is a proto de-serializer ready in catapult. - json_data = json.loads(json_format.MessageToJson(histograms)) - hs = histogram_set.HistogramSet() - hs.ImportDicts(json_data) - return hs - - -def _AddBuildInfo(histograms, options): - common_diagnostics = { - reserved_infos.MASTERS: options.perf_dashboard_machine_group, - reserved_infos.BOTS: options.bot, - reserved_infos.POINT_ID: options.commit_position, - reserved_infos.BENCHMARKS: options.test_suite, - reserved_infos.WEBRTC_REVISIONS: str(options.webrtc_git_hash), - reserved_infos.BUILD_URLS: options.build_page_url, - } - - for k, v in common_diagnostics.items(): - histograms.AddSharedDiagnosticToAllHistograms( - k.name, generic_set.GenericSet([v])) - - -def _DumpOutput(histograms, output_file): - with output_file: - json.dump(histograms.AsDicts(), output_file, indent=4) def _CreateParser(): - parser = argparse.ArgumentParser() - parser.add_argument('--perf-dashboard-machine-group', required=True, - help='The "master" the bots are grouped under. This ' - 'string is the group in the the perf dashboard path ' - 'group/bot/perf_id/metric/subtest.') - parser.add_argument('--bot', required=True, - help='The bot running the test (e.g. ' - 'webrtc-win-large-tests).') - parser.add_argument('--test-suite', required=True, - help='The key for the test in the dashboard (i.e. what ' - 'you select in the top-level test suite selector in the ' - 'dashboard') - parser.add_argument('--webrtc-git-hash', required=True, - help='webrtc.googlesource.com commit hash.') - parser.add_argument('--commit-position', type=int, required=True, - help='Commit pos corresponding to the git hash.') - parser.add_argument('--build-page-url', required=True, - help='URL to the build page for this build.') - parser.add_argument('--dashboard-url', required=True, - help='Which dashboard to use.') - parser.add_argument('--input-results-file', type=argparse.FileType(), - required=True, - help='A JSON file with output from WebRTC tests.') - parser.add_argument('--output-json-file', type=argparse.FileType('w'), - help='Where to write the output (for debugging).') - parser.add_argument('--outdir', required=True, - help='Path to the local out/ dir (usually out/Default)') - return parser + parser = argparse.ArgumentParser() + parser.add_argument('--perf-dashboard-machine-group', required=True, + help='The "master" the bots are grouped under. This ' + 'string is the group in the the perf dashboard path ' + 'group/bot/perf_id/metric/subtest.') + parser.add_argument('--bot', required=True, + help='The bot running the test (e.g. ' + 'webrtc-win-large-tests).') + parser.add_argument('--test-suite', required=True, + help='The key for the test in the dashboard (i.e. what ' + 'you select in the top-level test suite selector in ' + 'the dashboard') + parser.add_argument('--webrtc-git-hash', required=True, + help='webrtc.googlesource.com commit hash.') + parser.add_argument('--commit-position', type=int, required=True, + help='Commit pos corresponding to the git hash.') + parser.add_argument('--build-page-url', required=True, + help='URL to the build page for this build.') + parser.add_argument('--dashboard-url', required=True, + help='Which dashboard to use.') + parser.add_argument('--input-results-file', type=argparse.FileType(), + required=True, + help='A JSON file with output from WebRTC tests.') + parser.add_argument('--output-json-file', type=argparse.FileType('w'), + help='Where to write the output (for debugging).') + parser.add_argument('--outdir', required=True, + help='Path to the local out/ dir (usually out/Default)') + parser.add_argument('--wait-for-upload', action='store_true', + help='If specified, script will wait untill Chrome ' + 'perf dashboard confirms that the data was succesfully ' + 'proccessed and uploaded') + parser.add_argument('--wait-timeout-sec', type=int, default=1200, + help='Used only if wait-for-upload is True. Maximum ' + 'amount of time in seconds that the script will wait ' + 'for the confirmation.') + parser.add_argument('--wait-polling-period-sec', type=int, default=120, + help='Used only if wait-for-upload is True. Status ' + 'will be requested from the Dashboard every ' + 'wait-polling-period-sec seconds.') + return parser + + +def _ConfigurePythonPath(options): + # We just yank the python scripts we require into the PYTHONPATH. You could + # also imagine a solution where we use for instance + # protobuf:py_proto_runtime to copy catapult and protobuf code to out/. + # This is the convention in Chromium and WebRTC python scripts. We do need + # to build histogram_pb2 however, so that's why we add out/ to sys.path + # below. + # + # It would be better if there was an equivalent to py_binary in GN, but + # there's not. + script_dir = os.path.dirname(os.path.realpath(__file__)) + checkout_root = os.path.abspath( + os.path.join(script_dir, os.pardir, os.pardir)) + + sys.path.insert( + 0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing')) + sys.path.insert( + 0, os.path.join(checkout_root, 'third_party', 'protobuf', 'python')) + + # The webrtc_dashboard_upload gn rule will build the protobuf stub for + # python, so put it in the path for this script before we attempt to import + # it. + histogram_proto_path = os.path.join(options.outdir, 'pyproto', 'tracing', + 'tracing', 'proto') + sys.path.insert(0, histogram_proto_path) + + # Fail early in case the proto hasn't been built. + from tracing.proto import histogram_proto + if not histogram_proto.HAS_PROTO: + raise ImportError( + 'Could not find histogram_pb2. You need to build the ' + 'webrtc_dashboard_upload target before invoking this ' + 'script. Expected to find ' + 'histogram_pb2.py in %s.' % histogram_proto_path) def main(args): - parser = _CreateParser() - options = parser.parse_args(args) - - histograms = _LoadHistogramSetFromProto(options) - _AddBuildInfo(histograms, options) + parser = _CreateParser() + options = parser.parse_args(args) - if options.output_json_file: - _DumpOutput(histograms, options.output_json_file) + _ConfigurePythonPath(options) - oauth_token = _GenerateOauthToken() - response, content = _SendHistogramSet( - options.dashboard_url, histograms, oauth_token) + import catapult_uploader - if response.status == 200: - return 0 - else: - print("Upload failed with %d: %s\n\n%s" % (response.status, response.reason, - content)) - return 1 + return catapult_uploader.UploadToDashboard(options) if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/tools_webrtc/presubmit_checks_lib/build_helpers.py b/tools_webrtc/presubmit_checks_lib/build_helpers.py index 1ad59bfd49..e276631ed4 100644 --- a/tools_webrtc/presubmit_checks_lib/build_helpers.py +++ b/tools_webrtc/presubmit_checks_lib/build_helpers.py @@ -5,7 +5,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """This script helps to invoke gn and ninja which lie in depot_tools repository.""" @@ -19,11 +18,11 @@ def FindSrcDirPath(): - """Returns the abs path to the src/ dir of the project.""" - src_dir = os.path.dirname(os.path.abspath(__file__)) - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir + """Returns the abs path to the src/ dir of the project.""" + src_dir = os.path.dirname(os.path.abspath(__file__)) + while os.path.basename(src_dir) != 'src': + src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) + return src_dir SRC_DIR = FindSrcDirPath() @@ -32,16 +31,16 @@ def FindSrcDirPath(): def RunGnCommand(args, root_dir=None): - """Runs `gn` with provided args and return error if any.""" - try: - command = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') - ] + args - subprocess.check_output(command, cwd=root_dir) - except subprocess.CalledProcessError as err: - return err.output - return None + """Runs `gn` with provided args and return error if any.""" + try: + command = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') + ] + args + subprocess.check_output(command, cwd=root_dir) + except subprocess.CalledProcessError as err: + return err.output + return None # GN_ERROR_RE matches the summary of an error output by `gn check`. @@ -51,49 +50,49 @@ def RunGnCommand(args, root_dir=None): def RunGnCheck(root_dir=None): - """Runs `gn gen --check` with default args to detect mismatches between + """Runs `gn gen --check` with default args to detect mismatches between #includes and dependencies in the BUILD.gn files, as well as general build errors. Returns a list of error summary strings. """ - out_dir = tempfile.mkdtemp('gn') - try: - error = RunGnCommand(['gen', '--check', out_dir], root_dir) - finally: - shutil.rmtree(out_dir, ignore_errors=True) - return GN_ERROR_RE.findall(error) if error else [] + out_dir = tempfile.mkdtemp('gn') + try: + error = RunGnCommand(['gen', '--check', out_dir], root_dir) + finally: + shutil.rmtree(out_dir, ignore_errors=True) + return GN_ERROR_RE.findall(error) if error else [] def RunNinjaCommand(args, root_dir=None): - """Runs ninja quietly. Any failure (e.g. clang not found) is + """Runs ninja quietly. Any failure (e.g. clang not found) is silently discarded, since this is unlikely an error in submitted CL.""" - command = [ - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja') - ] + args - p = subprocess.Popen(command, cwd=root_dir, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, _ = p.communicate() - return out + command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args + p = subprocess.Popen(command, + cwd=root_dir, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + out, _ = p.communicate() + return out def GetClangTidyPath(): - """POC/WIP! Use the one we have, even it doesn't match clang's version.""" - tidy = ('third_party/android_ndk/toolchains/' - 'llvm/prebuilt/linux-x86_64/bin/clang-tidy') - return os.path.join(SRC_DIR, tidy) + """POC/WIP! Use the one we have, even it doesn't match clang's version.""" + tidy = ('third_party/android_ndk/toolchains/' + 'llvm/prebuilt/linux-x86_64/bin/clang-tidy') + return os.path.join(SRC_DIR, tidy) def GetCompilationDb(root_dir=None): - """Run ninja compdb tool to get proper flags, defines and include paths.""" - # The compdb tool expect a rule. - commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir)) - # Turns 'file' field into a key. - return {v['file']: v for v in commands} + """Run ninja compdb tool to get proper flags, defines and include paths.""" + # The compdb tool expect a rule. + commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir)) + # Turns 'file' field into a key. + return {v['file']: v for v in commands} def GetCompilationCommand(filepath, gn_args, work_dir): - """Get the whole command used to compile one cc file. + """Get the whole command used to compile one cc file. Typically, clang++ with flags, defines and include paths. Args: @@ -104,31 +103,30 @@ def GetCompilationCommand(filepath, gn_args, work_dir): Returns: Command as a list, ready to be consumed by subprocess.Popen. """ - gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir]) - if gn_errors: - raise(RuntimeError( - 'FYI, cannot complete check due to gn error:\n%s\n' - 'Please open a bug.' % gn_errors)) - - # Needed for single file compilation. - commands = GetCompilationDb(work_dir) - - # Path as referenced by ninja. - rel_path = os.path.relpath(os.path.abspath(filepath), work_dir) - - # Gather defines, include path and flags (such as -std=c++11). - try: - compilation_entry = commands[rel_path] - except KeyError: - raise ValueError('%s: Not found in compilation database.\n' - 'Please check the path.' % filepath) - command = compilation_entry['command'].split() - - # Remove troublesome flags. May trigger an error otherwise. - if '-MMD' in command: - command.remove('-MMD') - if '-MF' in command: - index = command.index('-MF') - del command[index:index+2] # Remove filename as well. - - return command + gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir]) + if gn_errors: + raise (RuntimeError('FYI, cannot complete check due to gn error:\n%s\n' + 'Please open a bug.' % gn_errors)) + + # Needed for single file compilation. + commands = GetCompilationDb(work_dir) + + # Path as referenced by ninja. + rel_path = os.path.relpath(os.path.abspath(filepath), work_dir) + + # Gather defines, include path and flags (such as -std=c++11). + try: + compilation_entry = commands[rel_path] + except KeyError: + raise ValueError('%s: Not found in compilation database.\n' + 'Please check the path.' % filepath) + command = compilation_entry['command'].split() + + # Remove troublesome flags. May trigger an error otherwise. + if '-MMD' in command: + command.remove('-MMD') + if '-MF' in command: + index = command.index('-MF') + del command[index:index + 2] # Remove filename as well. + + return command diff --git a/tools_webrtc/presubmit_checks_lib/build_helpers_test.py b/tools_webrtc/presubmit_checks_lib/build_helpers_test.py index 78973282f9..5eab10556c 100755 --- a/tools_webrtc/presubmit_checks_lib/build_helpers_test.py +++ b/tools_webrtc/presubmit_checks_lib/build_helpers_test.py @@ -14,19 +14,20 @@ #pylint: disable=relative-import import build_helpers - TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'testdata') class GnCheckTest(unittest.TestCase): - def testCircularDependencyError(self): - test_dir = os.path.join(TESTDATA_DIR, 'circular_dependency') - expected_errors = ['ERROR Dependency cycle:\n' - ' //:bar ->\n //:foo ->\n //:bar'] - self.assertListEqual(expected_errors, - build_helpers.RunGnCheck(test_dir)) + def testCircularDependencyError(self): + test_dir = os.path.join(TESTDATA_DIR, 'circular_dependency') + expected_errors = [ + 'ERROR Dependency cycle:\n' + ' //:bar ->\n //:foo ->\n //:bar' + ] + self.assertListEqual(expected_errors, + build_helpers.RunGnCheck(test_dir)) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/presubmit_checks_lib/check_orphan_headers.py b/tools_webrtc/presubmit_checks_lib/check_orphan_headers.py index deb445cd8c..29509972e5 100644 --- a/tools_webrtc/presubmit_checks_lib/check_orphan_headers.py +++ b/tools_webrtc/presubmit_checks_lib/check_orphan_headers.py @@ -11,12 +11,11 @@ import re import string - # TARGET_RE matches a GN target, and extracts the target name and the contents. -TARGET_RE = re.compile(r'(?P\s*)\w+\("(?P\w+)"\) {' - r'(?P.*?)' - r'(?P=indent)}', - re.MULTILINE | re.DOTALL) +TARGET_RE = re.compile( + r'(?P\s*)\w+\("(?P\w+)"\) {' + r'(?P.*?)' + r'(?P=indent)}', re.MULTILINE | re.DOTALL) # SOURCES_RE matches a block of sources inside a GN target. SOURCES_RE = re.compile( @@ -27,27 +26,27 @@ class NoBuildGnFoundError(Exception): - pass + pass class WrongFileTypeError(Exception): - pass + pass def _ReadFile(file_path): - """Returns the content of file_path in a string. + """Returns the content of file_path in a string. Args: file_path: the path of the file to read. Returns: A string with the content of the file. """ - with open(file_path) as f: - return f.read() + with open(file_path) as f: + return f.read() def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path): - """Returns the BUILD.gn file responsible for file_path. + """Returns the BUILD.gn file responsible for file_path. Args: file_path: the absolute path to the .h file to check. @@ -59,23 +58,23 @@ def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path): A string with the absolute path to the BUILD.gn file responsible to include file_path in a target. """ - if not file_path.endswith('.h'): - raise WrongFileTypeError( - 'File {} is not an header file (.h)'.format(file_path)) - candidate_dir = os.path.dirname(file_path) - while candidate_dir.startswith(root_dir_path): - candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn') - if file_exists_check(candidate_build_gn_path): - return candidate_build_gn_path - else: - candidate_dir = os.path.abspath(os.path.join(candidate_dir, - os.pardir)) - raise NoBuildGnFoundError( - 'No BUILD.gn file found for file: `{}`'.format(file_path)) + if not file_path.endswith('.h'): + raise WrongFileTypeError( + 'File {} is not an header file (.h)'.format(file_path)) + candidate_dir = os.path.dirname(file_path) + while candidate_dir.startswith(root_dir_path): + candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn') + if file_exists_check(candidate_build_gn_path): + return candidate_build_gn_path + else: + candidate_dir = os.path.abspath( + os.path.join(candidate_dir, os.pardir)) + raise NoBuildGnFoundError( + 'No BUILD.gn file found for file: `{}`'.format(file_path)) def IsHeaderInBuildGn(header_path, build_gn_path): - """Returns True if the header is listed in the BUILD.gn file. + """Returns True if the header is listed in the BUILD.gn file. Args: header_path: the absolute path to the header to check. @@ -86,15 +85,15 @@ def IsHeaderInBuildGn(header_path, build_gn_path): at least one GN target in the BUILD.gn file specified by the argument build_gn_path. """ - target_abs_path = os.path.dirname(build_gn_path) - build_gn_content = _ReadFile(build_gn_path) - headers_in_build_gn = GetHeadersInBuildGnFileSources(build_gn_content, - target_abs_path) - return header_path in headers_in_build_gn + target_abs_path = os.path.dirname(build_gn_path) + build_gn_content = _ReadFile(build_gn_path) + headers_in_build_gn = GetHeadersInBuildGnFileSources( + build_gn_content, target_abs_path) + return header_path in headers_in_build_gn def GetHeadersInBuildGnFileSources(file_content, target_abs_path): - """Returns a set with all the .h files in the file_content. + """Returns a set with all the .h files in the file_content. Args: file_content: a string with the content of the BUILD.gn file. @@ -105,15 +104,15 @@ def GetHeadersInBuildGnFileSources(file_content, target_abs_path): A set with all the headers (.h file) in the file_content. The set contains absolute paths. """ - headers_in_sources = set([]) - for target_match in TARGET_RE.finditer(file_content): - target_contents = target_match.group('target_contents') - for sources_match in SOURCES_RE.finditer(target_contents): - sources = sources_match.group('sources') - for source_file_match in SOURCE_FILE_RE.finditer(sources): - source_file = source_file_match.group('source_file') - if source_file.endswith('.h'): - source_file_tokens = string.split(source_file, '/') - headers_in_sources.add(os.path.join(target_abs_path, - *source_file_tokens)) - return headers_in_sources + headers_in_sources = set([]) + for target_match in TARGET_RE.finditer(file_content): + target_contents = target_match.group('target_contents') + for sources_match in SOURCES_RE.finditer(target_contents): + sources = sources_match.group('sources') + for source_file_match in SOURCE_FILE_RE.finditer(sources): + source_file = source_file_match.group('source_file') + if source_file.endswith('.h'): + source_file_tokens = string.split(source_file, '/') + headers_in_sources.add( + os.path.join(target_abs_path, *source_file_tokens)) + return headers_in_sources diff --git a/tools_webrtc/presubmit_checks_lib/check_orphan_headers_test.py b/tools_webrtc/presubmit_checks_lib/check_orphan_headers_test.py index 2dfc18999d..79ac6a4b49 100755 --- a/tools_webrtc/presubmit_checks_lib/check_orphan_headers_test.py +++ b/tools_webrtc/presubmit_checks_lib/check_orphan_headers_test.py @@ -16,73 +16,67 @@ def _GetRootBasedOnPlatform(): - if sys.platform.startswith('win'): - return 'C:\\' - else: - return '/' + if sys.platform.startswith('win'): + return 'C:\\' + else: + return '/' def _GetPath(*path_chunks): - return os.path.join(_GetRootBasedOnPlatform(), - *path_chunks) + return os.path.join(_GetRootBasedOnPlatform(), *path_chunks) class GetBuildGnPathFromFilePathTest(unittest.TestCase): - - def testGetBuildGnFromSameDirectory(self): - file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') - expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base', - 'BUILD.gn') - file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', - 'base', 'BUILD.gn') - src_dir_path = _GetPath('home', 'projects', 'webrtc') - self.assertEqual( - expected_build_path, - check_orphan_headers.GetBuildGnPathFromFilePath(file_path, - file_exists, - src_dir_path)) - - def testGetBuildPathFromParentDirectory(self): - file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') - expected_build_path = _GetPath('home', 'projects', 'webrtc', - 'BUILD.gn') - file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', - 'BUILD.gn') - src_dir_path = _GetPath('home', 'projects', 'webrtc') - self.assertEqual( - expected_build_path, - check_orphan_headers.GetBuildGnPathFromFilePath(file_path, - file_exists, - src_dir_path)) - - def testExceptionIfNoBuildGnFilesAreFound(self): - with self.assertRaises(check_orphan_headers.NoBuildGnFoundError): - file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') - file_exists = lambda p: False - src_dir_path = _GetPath('home', 'projects', 'webrtc') - check_orphan_headers.GetBuildGnPathFromFilePath(file_path, - file_exists, - src_dir_path) - - def testExceptionIfFilePathIsNotAnHeader(self): - with self.assertRaises(check_orphan_headers.WrongFileTypeError): - file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.cc') - file_exists = lambda p: False - src_dir_path = _GetPath('home', 'projects', 'webrtc') - check_orphan_headers.GetBuildGnPathFromFilePath(file_path, - file_exists, - src_dir_path) + def testGetBuildGnFromSameDirectory(self): + file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') + expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base', + 'BUILD.gn') + file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', + 'base', 'BUILD.gn') + src_dir_path = _GetPath('home', 'projects', 'webrtc') + self.assertEqual( + expected_build_path, + check_orphan_headers.GetBuildGnPathFromFilePath( + file_path, file_exists, src_dir_path)) + + def testGetBuildPathFromParentDirectory(self): + file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') + expected_build_path = _GetPath('home', 'projects', 'webrtc', + 'BUILD.gn') + file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', + 'BUILD.gn') + src_dir_path = _GetPath('home', 'projects', 'webrtc') + self.assertEqual( + expected_build_path, + check_orphan_headers.GetBuildGnPathFromFilePath( + file_path, file_exists, src_dir_path)) + + def testExceptionIfNoBuildGnFilesAreFound(self): + with self.assertRaises(check_orphan_headers.NoBuildGnFoundError): + file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') + file_exists = lambda p: False + src_dir_path = _GetPath('home', 'projects', 'webrtc') + check_orphan_headers.GetBuildGnPathFromFilePath( + file_path, file_exists, src_dir_path) + + def testExceptionIfFilePathIsNotAnHeader(self): + with self.assertRaises(check_orphan_headers.WrongFileTypeError): + file_path = _GetPath('home', 'projects', 'webrtc', 'base', + 'foo.cc') + file_exists = lambda p: False + src_dir_path = _GetPath('home', 'projects', 'webrtc') + check_orphan_headers.GetBuildGnPathFromFilePath( + file_path, file_exists, src_dir_path) class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase): + def testEmptyFileReturnsEmptySet(self): + self.assertEqual( + set([]), + check_orphan_headers.GetHeadersInBuildGnFileSources('', '/a/b')) - def testEmptyFileReturnsEmptySet(self): - self.assertEqual( - set([]), - check_orphan_headers.GetHeadersInBuildGnFileSources('', '/a/b')) - - def testReturnsSetOfHeadersFromFileContent(self): - file_content = """ + def testReturnsSetOfHeadersFromFileContent(self): + file_content = """ # Some comments if (is_android) { import("//a/b/c.gni") @@ -107,17 +101,17 @@ def testReturnsSetOfHeadersFromFileContent(self): sources = ["baz/foo.h"] } """ - target_abs_path = _GetPath('a', 'b') - self.assertEqual( - set([ - _GetPath('a', 'b', 'foo.h'), - _GetPath('a', 'b', 'bar.h'), - _GetPath('a', 'b', 'public_foo.h'), - _GetPath('a', 'b', 'baz', 'foo.h'), - ]), - check_orphan_headers.GetHeadersInBuildGnFileSources(file_content, - target_abs_path)) + target_abs_path = _GetPath('a', 'b') + self.assertEqual( + set([ + _GetPath('a', 'b', 'foo.h'), + _GetPath('a', 'b', 'bar.h'), + _GetPath('a', 'b', 'public_foo.h'), + _GetPath('a', 'b', 'baz', 'foo.h'), + ]), + check_orphan_headers.GetHeadersInBuildGnFileSources( + file_content, target_abs_path)) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/presubmit_checks_lib/check_package_boundaries.py b/tools_webrtc/presubmit_checks_lib/check_package_boundaries.py index 1b3c1f8e43..7d81bae16e 100644 --- a/tools_webrtc/presubmit_checks_lib/check_package_boundaries.py +++ b/tools_webrtc/presubmit_checks_lib/check_package_boundaries.py @@ -14,12 +14,11 @@ import re import sys - # TARGET_RE matches a GN target, and extracts the target name and the contents. -TARGET_RE = re.compile(r'(?P\s*)\w+\("(?P\w+)"\) {' - r'(?P.*?)' - r'(?P=indent)}', - re.MULTILINE | re.DOTALL) +TARGET_RE = re.compile( + r'(?P\s*)\w+\("(?P\w+)"\) {' + r'(?P.*?)' + r'(?P=indent)}', re.MULTILINE | re.DOTALL) # SOURCES_RE matches a block of sources inside a GN target. SOURCES_RE = re.compile(r'sources \+?= \[(?P.*?)\]', @@ -31,96 +30,107 @@ class PackageBoundaryViolation( - collections.namedtuple('PackageBoundaryViolation', - 'build_file_path target_name source_file subpackage')): - def __str__(self): - return ERROR_MESSAGE.format(**self._asdict()) + collections.namedtuple( + 'PackageBoundaryViolation', + 'build_file_path target_name source_file subpackage')): + def __str__(self): + return ERROR_MESSAGE.format(**self._asdict()) def _BuildSubpackagesPattern(packages, query): - """Returns a regular expression that matches source files inside subpackages + """Returns a regular expression that matches source files inside subpackages of the given query.""" - query += os.path.sep - length = len(query) - pattern = r'\s*"(?P(?P' - pattern += '|'.join(re.escape(package[length:].replace(os.path.sep, '/')) - for package in packages if package.startswith(query)) - pattern += r')/[\w\./]*)"' - return re.compile(pattern) + query += os.path.sep + length = len(query) + pattern = r'\s*"(?P(?P' + pattern += '|'.join( + re.escape(package[length:].replace(os.path.sep, '/')) + for package in packages if package.startswith(query)) + pattern += r')/[\w\./]*)"' + return re.compile(pattern) def _ReadFileAndPrependLines(file_path): - """Reads the contents of a file.""" - with open(file_path) as f: - return "".join(f.readlines()) + """Reads the contents of a file.""" + with open(file_path) as f: + return "".join(f.readlines()) def _CheckBuildFile(build_file_path, packages): - """Iterates over all the targets of the given BUILD.gn file, and verifies that + """Iterates over all the targets of the given BUILD.gn file, and verifies that the source files referenced by it don't belong to any of it's subpackages. Returns an iterator over PackageBoundaryViolations for this package. """ - package = os.path.dirname(build_file_path) - subpackages_re = _BuildSubpackagesPattern(packages, package) - - build_file_contents = _ReadFileAndPrependLines(build_file_path) - for target_match in TARGET_RE.finditer(build_file_contents): - target_name = target_match.group('target_name') - target_contents = target_match.group('target_contents') - for sources_match in SOURCES_RE.finditer(target_contents): - sources = sources_match.group('sources') - for subpackages_match in subpackages_re.finditer(sources): - subpackage = subpackages_match.group('subpackage') - source_file = subpackages_match.group('source_file') - if subpackage: - yield PackageBoundaryViolation(build_file_path, - target_name, source_file, subpackage) + package = os.path.dirname(build_file_path) + subpackages_re = _BuildSubpackagesPattern(packages, package) + + build_file_contents = _ReadFileAndPrependLines(build_file_path) + for target_match in TARGET_RE.finditer(build_file_contents): + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + for sources_match in SOURCES_RE.finditer(target_contents): + sources = sources_match.group('sources') + for subpackages_match in subpackages_re.finditer(sources): + subpackage = subpackages_match.group('subpackage') + source_file = subpackages_match.group('source_file') + if subpackage: + yield PackageBoundaryViolation(build_file_path, + target_name, source_file, + subpackage) def CheckPackageBoundaries(root_dir, build_files=None): - packages = [root for root, _, files in os.walk(root_dir) - if 'BUILD.gn' in files] - - if build_files is not None: + packages = [ + root for root, _, files in os.walk(root_dir) if 'BUILD.gn' in files + ] + + if build_files is not None: + for build_file_path in build_files: + assert build_file_path.startswith(root_dir) + else: + build_files = [ + os.path.join(package, 'BUILD.gn') for package in packages + ] + + messages = [] for build_file_path in build_files: - assert build_file_path.startswith(root_dir) - else: - build_files = [os.path.join(package, 'BUILD.gn') for package in packages] - - messages = [] - for build_file_path in build_files: - messages.extend(_CheckBuildFile(build_file_path, packages)) - return messages + messages.extend(_CheckBuildFile(build_file_path, packages)) + return messages def main(argv): - parser = argparse.ArgumentParser( - description='Script that checks package boundary violations in GN ' - 'build files.') - - parser.add_argument('root_dir', metavar='ROOT_DIR', - help='The root directory that contains all BUILD.gn ' - 'files to be processed.') - parser.add_argument('build_files', metavar='BUILD_FILE', nargs='*', - help='A list of BUILD.gn files to be processed. If no ' - 'files are given, all BUILD.gn files under ROOT_DIR ' - 'will be processed.') - parser.add_argument('--max_messages', type=int, default=None, - help='If set, the maximum number of violations to be ' - 'displayed.') - - args = parser.parse_args(argv) - - messages = CheckPackageBoundaries(args.root_dir, args.build_files) - messages = messages[:args.max_messages] - - for i, message in enumerate(messages): - if i > 0: - print - print message - - return bool(messages) + parser = argparse.ArgumentParser( + description='Script that checks package boundary violations in GN ' + 'build files.') + + parser.add_argument('root_dir', + metavar='ROOT_DIR', + help='The root directory that contains all BUILD.gn ' + 'files to be processed.') + parser.add_argument('build_files', + metavar='BUILD_FILE', + nargs='*', + help='A list of BUILD.gn files to be processed. If no ' + 'files are given, all BUILD.gn files under ROOT_DIR ' + 'will be processed.') + parser.add_argument('--max_messages', + type=int, + default=None, + help='If set, the maximum number of violations to be ' + 'displayed.') + + args = parser.parse_args(argv) + + messages = CheckPackageBoundaries(args.root_dir, args.build_files) + messages = messages[:args.max_messages] + + for i, message in enumerate(messages): + if i > 0: + print + print message + + return bool(messages) if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/tools_webrtc/presubmit_checks_lib/check_package_boundaries_test.py b/tools_webrtc/presubmit_checks_lib/check_package_boundaries_test.py index abf232e678..8d173372c1 100755 --- a/tools_webrtc/presubmit_checks_lib/check_package_boundaries_test.py +++ b/tools_webrtc/presubmit_checks_lib/check_package_boundaries_test.py @@ -15,58 +15,60 @@ #pylint: disable=relative-import from check_package_boundaries import CheckPackageBoundaries - MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.' TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'testdata') def ReadPylFile(file_path): - with open(file_path) as f: - return ast.literal_eval(f.read()) + with open(file_path) as f: + return ast.literal_eval(f.read()) class UnitTest(unittest.TestCase): - def _RunTest(self, test_dir, check_all_build_files=False): - build_files = [os.path.join(test_dir, 'BUILD.gn')] - if check_all_build_files: - build_files = None + def _RunTest(self, test_dir, check_all_build_files=False): + build_files = [os.path.join(test_dir, 'BUILD.gn')] + if check_all_build_files: + build_files = None - messages = [] - for violation in CheckPackageBoundaries(test_dir, build_files): - build_file_path = os.path.relpath(violation.build_file_path, test_dir) - build_file_path = build_file_path.replace(os.path.sep, '/') - messages.append(violation._replace(build_file_path=build_file_path)) + messages = [] + for violation in CheckPackageBoundaries(test_dir, build_files): + build_file_path = os.path.relpath(violation.build_file_path, + test_dir) + build_file_path = build_file_path.replace(os.path.sep, '/') + messages.append( + violation._replace(build_file_path=build_file_path)) - expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl')) - self.assertListEqual(sorted(expected_messages), sorted(messages)) + expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl')) + self.assertListEqual(sorted(expected_messages), sorted(messages)) - def testNoErrors(self): - self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors')) + def testNoErrors(self): + self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors')) - def testMultipleErrorsSingleTarget(self): - self._RunTest(os.path.join(TESTDATA_DIR, 'multiple_errors_single_target')) + def testMultipleErrorsSingleTarget(self): + self._RunTest( + os.path.join(TESTDATA_DIR, 'multiple_errors_single_target')) - def testMultipleErrorsMultipleTargets(self): - self._RunTest(os.path.join(TESTDATA_DIR, - 'multiple_errors_multiple_targets')) + def testMultipleErrorsMultipleTargets(self): + self._RunTest( + os.path.join(TESTDATA_DIR, 'multiple_errors_multiple_targets')) - def testCommonPrefix(self): - self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix')) + def testCommonPrefix(self): + self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix')) - def testAllBuildFiles(self): - self._RunTest(os.path.join(TESTDATA_DIR, 'all_build_files'), True) + def testAllBuildFiles(self): + self._RunTest(os.path.join(TESTDATA_DIR, 'all_build_files'), True) - def testSanitizeFilename(self): - # The `dangerous_filename` test case contains a directory with '++' in its - # name. If it's not properly escaped, a regex error would be raised. - self._RunTest(os.path.join(TESTDATA_DIR, 'dangerous_filename'), True) + def testSanitizeFilename(self): + # The `dangerous_filename` test case contains a directory with '++' in its + # name. If it's not properly escaped, a regex error would be raised. + self._RunTest(os.path.join(TESTDATA_DIR, 'dangerous_filename'), True) - def testRelativeFilename(self): - test_dir = os.path.join(TESTDATA_DIR, 'all_build_files') - with self.assertRaises(AssertionError): - CheckPackageBoundaries(test_dir, ["BUILD.gn"]) + def testRelativeFilename(self): + test_dir = os.path.join(TESTDATA_DIR, 'all_build_files') + with self.assertRaises(AssertionError): + CheckPackageBoundaries(test_dir, ["BUILD.gn"]) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/sancov/README b/tools_webrtc/sancov/README deleted file mode 100644 index c9b43e7ae0..0000000000 --- a/tools_webrtc/sancov/README +++ /dev/null @@ -1,9 +0,0 @@ -Blacklist for llvm's sancov - -See http://clang.llvm.org/docs/SanitizerCoverage.html . - -Example usage: -> cd out/Debug -> UBSAN_OPTIONS="coverage=1" ./peerconnection_unittests -> sancov -html-report -blacklist ../../tools/sancov/blacklist.txt \ -> peerconnection_unittests peerconnection_unittests.123.sancov diff --git a/tools_webrtc/sancov/blacklist.txt b/tools_webrtc/sancov/blacklist.txt deleted file mode 100644 index 7043a18ef2..0000000000 --- a/tools_webrtc/sancov/blacklist.txt +++ /dev/null @@ -1,21 +0,0 @@ -############################################################################# -# sancov blacklist. -# Please think twice before you add or remove these rules. - -############################################################################# -# no coverage report for third party -src:*/chromium/* -src:*/third_party/* - -# OpenH264 triggers some errors that are out of our control. -src:*/third_party/ffmpeg/libavcodec/* -src:*/third_party/openh264/* - -############################################################################# -# Ignore system libraries. -src:*/usr/* - -############################################################################# -# Ignore test source. -src:*/test/* -src:*_unittest.cc diff --git a/tools_webrtc/sanitizers/OWNERS b/tools_webrtc/sanitizers/OWNERS deleted file mode 100644 index 524e2676ff..0000000000 --- a/tools_webrtc/sanitizers/OWNERS +++ /dev/null @@ -1 +0,0 @@ -phoglund@webrtc.org diff --git a/tools_webrtc/sslroots/README.md b/tools_webrtc/sslroots/README.md new file mode 100644 index 0000000000..b81bd4455a --- /dev/null +++ b/tools_webrtc/sslroots/README.md @@ -0,0 +1,23 @@ +# Generate rtc_base/ssl_roots.h + +This directory contains a script to generate the content of +[rtc_base/ssl_roots.h][ssl-roots-header], to update the SSL roots shipped +by WebRTC follow this instructions: + +1. Download roots.pem from [pki.goog][pki-goog]. + +2. Launch the script: + +``` +$ python tools_webrtc/sslroots/generate_sslroots.py roots.pem +``` + +3. Step 2 should have generated an ssl_roots.h file right next to roots.pem. + +4. Open rtc_base/ssl_roots.h, manually remove the old certificates and paste + the ones from the ssl_roots.h file. + +5. Delete the generated ssl_roots.h and roots.pem before creating the CL. + +[ssl-roots-header]: https://cs.chromium.org/chromium/src/third_party/webrtc/rtc_base/ssl_roots.h +[pki-goog]: https://www.google.com/url?q=https://pki.google.com/roots.pem diff --git a/tools_webrtc/sslroots/generate_sslroots.py b/tools_webrtc/sslroots/generate_sslroots.py index 65751f1f1a..ff0052e3c7 100644 --- a/tools_webrtc/sslroots/generate_sslroots.py +++ b/tools_webrtc/sslroots/generate_sslroots.py @@ -6,8 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - - """This is a tool to transform a crt file into a C/C++ header. Usage: @@ -25,7 +23,7 @@ import re import string -_GENERATED_FILE = 'sslroots.h' +_GENERATED_FILE = 'ssl_roots.h' _PREFIX = '__generated__' _EXTENSION = '.crt' _SUBJECT_NAME_ARRAY = 'subject_name' @@ -36,155 +34,185 @@ _CERTIFICATE_VARIABLE = 'Certificate' _CERTIFICATE_SIZE_VARIABLE = 'CertificateSize' _INT_TYPE = 'size_t' -_CHAR_TYPE = 'const unsigned char*' +_CHAR_TYPE = 'unsigned char* const' _VERBOSE = 'verbose' def main(): - """The main entrypoint.""" - parser = OptionParser('usage %prog FILE') - parser.add_option('-v', '--verbose', dest='verbose', action='store_true') - parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true') - options, args = parser.parse_args() - if len(args) < 1: - parser.error('No crt file specified.') - return - root_dir = _SplitCrt(args[0], options) - _GenCFiles(root_dir, options) - _Cleanup(root_dir) + """The main entrypoint.""" + parser = OptionParser('usage %prog FILE') + parser.add_option('-v', '--verbose', dest='verbose', action='store_true') + parser.add_option('-f', + '--full_cert', + dest='full_cert', + action='store_true') + options, args = parser.parse_args() + if len(args) < 1: + parser.error('No crt file specified.') + return + root_dir = _SplitCrt(args[0], options) + _GenCFiles(root_dir, options) + _Cleanup(root_dir) def _SplitCrt(source_file, options): - sub_file_blocks = [] - label_name = '' - root_dir = os.path.dirname(os.path.abspath(source_file)) + '/' - _PrintOutput(root_dir, options) - f = open(source_file) - for line in f: - if line.startswith('# Label: '): - sub_file_blocks.append(line) - label = re.search(r'\".*\"', line) - temp_label = label.group(0) - end = len(temp_label)-1 - label_name = _SafeName(temp_label[1:end]) - elif line.startswith('-----END CERTIFICATE-----'): - sub_file_blocks.append(line) - new_file_name = root_dir + _PREFIX + label_name + _EXTENSION - _PrintOutput('Generating: ' + new_file_name, options) - new_file = open(new_file_name, 'w') - for out_line in sub_file_blocks: - new_file.write(out_line) - new_file.close() - sub_file_blocks = [] - else: - sub_file_blocks.append(line) - f.close() - return root_dir + sub_file_blocks = [] + label_name = '' + root_dir = os.path.dirname(os.path.abspath(source_file)) + '/' + _PrintOutput(root_dir, options) + f = open(source_file) + for line in f: + if line.startswith('# Label: '): + sub_file_blocks.append(line) + label = re.search(r'\".*\"', line) + temp_label = label.group(0) + end = len(temp_label) - 1 + label_name = _SafeName(temp_label[1:end]) + elif line.startswith('-----END CERTIFICATE-----'): + sub_file_blocks.append(line) + new_file_name = root_dir + _PREFIX + label_name + _EXTENSION + _PrintOutput('Generating: ' + new_file_name, options) + new_file = open(new_file_name, 'w') + for out_line in sub_file_blocks: + new_file.write(out_line) + new_file.close() + sub_file_blocks = [] + else: + sub_file_blocks.append(line) + f.close() + return root_dir def _GenCFiles(root_dir, options): - output_header_file = open(root_dir + _GENERATED_FILE, 'w') - output_header_file.write(_CreateOutputHeader()) - if options.full_cert: - subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE, - _CHAR_TYPE, options) - public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE, - _CHAR_TYPE, options) - certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE, - _CHAR_TYPE, options) - certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE, - _INT_TYPE, options) - - for _, _, files in os.walk(root_dir): - for current_file in files: - if current_file.startswith(_PREFIX): - prefix_length = len(_PREFIX) - length = len(current_file) - len(_EXTENSION) - label = current_file[prefix_length:length] - filtered_output, cert_size = _CreateCertSection(root_dir, current_file, - label, options) - output_header_file.write(filtered_output + '\n\n\n') - if options.full_cert: - subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY) - public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY) - certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY) - certificate_size_list += (' %s,\n') %(cert_size) - - if options.full_cert: - subject_name_list += _CreateArraySectionFooter() - output_header_file.write(subject_name_list) - public_key_list += _CreateArraySectionFooter() - output_header_file.write(public_key_list) - certificate_list += _CreateArraySectionFooter() - output_header_file.write(certificate_list) - certificate_size_list += _CreateArraySectionFooter() - output_header_file.write(certificate_size_list) - output_header_file.close() + output_header_file = open(root_dir + _GENERATED_FILE, 'w') + output_header_file.write(_CreateOutputHeader()) + if options.full_cert: + subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE, + _CHAR_TYPE, options) + public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE, + _CHAR_TYPE, options) + certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE, + _CHAR_TYPE, options) + certificate_size_list = _CreateArraySectionHeader( + _CERTIFICATE_SIZE_VARIABLE, _INT_TYPE, options) + + for _, _, files in os.walk(root_dir): + for current_file in files: + if current_file.startswith(_PREFIX): + prefix_length = len(_PREFIX) + length = len(current_file) - len(_EXTENSION) + label = current_file[prefix_length:length] + filtered_output, cert_size = _CreateCertSection( + root_dir, current_file, label, options) + output_header_file.write(filtered_output + '\n\n\n') + if options.full_cert: + subject_name_list += _AddLabelToArray( + label, _SUBJECT_NAME_ARRAY) + public_key_list += _AddLabelToArray( + label, _PUBLIC_KEY_ARRAY) + certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY) + certificate_size_list += (' %s,\n') % (cert_size) + + if options.full_cert: + subject_name_list += _CreateArraySectionFooter() + output_header_file.write(subject_name_list) + public_key_list += _CreateArraySectionFooter() + output_header_file.write(public_key_list) + certificate_list += _CreateArraySectionFooter() + output_header_file.write(certificate_list) + certificate_size_list += _CreateArraySectionFooter() + output_header_file.write(certificate_size_list) + output_header_file.write(_CreateOutputFooter()) + output_header_file.close() def _Cleanup(root_dir): - for f in os.listdir(root_dir): - if f.startswith(_PREFIX): - os.remove(root_dir + f) + for f in os.listdir(root_dir): + if f.startswith(_PREFIX): + os.remove(root_dir + f) def _CreateCertSection(root_dir, source_file, label, options): - command = 'openssl x509 -in %s%s -noout -C' %(root_dir, source_file) - _PrintOutput(command, options) - output = commands.getstatusoutput(command)[1] - renamed_output = output.replace('unsigned char XXX_', - 'const unsigned char ' + label + '_') - filtered_output = '' - cert_block = '^const unsigned char.*?};$' - prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL) - if not options.full_cert: - filtered_output = prog.sub('', renamed_output, count=2) - else: - filtered_output = renamed_output - - cert_size_block = r'\d\d\d+' - prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE) - result = prog2.findall(renamed_output) - cert_size = result[len(result) - 1] - - return filtered_output, cert_size + command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file) + _PrintOutput(command, options) + output = commands.getstatusoutput(command)[1] + renamed_output = output.replace('unsigned char XXX_', + 'const unsigned char ' + label + '_') + filtered_output = '' + cert_block = '^const unsigned char.*?};$' + prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL) + if not options.full_cert: + filtered_output = prog.sub('', renamed_output, count=2) + else: + filtered_output = renamed_output + + cert_size_block = r'\d\d\d+' + prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE) + result = prog2.findall(renamed_output) + cert_size = result[len(result) - 1] + + return filtered_output, cert_size def _CreateOutputHeader(): - output = ('// This file is the root certificates in C form that are needed to' - ' connect to\n// Google.\n\n' - '// It was generated with the following command line:\n' - '// > python tools/certs/generate_sslroots.py' - '\n// https://pki.google.com/roots.pem\n\n') - return output + output = ( + '/*\n' + ' * Copyright 2004 The WebRTC Project Authors. All rights ' + 'reserved.\n' + ' *\n' + ' * Use of this source code is governed by a BSD-style license\n' + ' * that can be found in the LICENSE file in the root of the ' + 'source\n' + ' * tree. An additional intellectual property rights grant can be ' + 'found\n' + ' * in the file PATENTS. All contributing project authors may\n' + ' * be found in the AUTHORS file in the root of the source tree.\n' + ' */\n\n' + '#ifndef RTC_BASE_SSL_ROOTS_H_\n' + '#define RTC_BASE_SSL_ROOTS_H_\n\n' + '// This file is the root certificates in C form that are needed to' + ' connect to\n// Google.\n\n' + '// It was generated with the following command line:\n' + '// > python tools_webrtc/sslroots/generate_sslroots.py' + '\n// https://pki.goog/roots.pem\n\n' + '// clang-format off\n' + '// Don\'t bother formatting generated code,\n' + '// also it would breaks subject/issuer lines.\n\n') + return output + + +def _CreateOutputFooter(): + output = ('// clang-format on\n\n' '#endif // RTC_BASE_SSL_ROOTS_H_\n') + return output def _CreateArraySectionHeader(type_name, type_type, options): - output = ('const %s kSSLCert%sList[] = {\n') %(type_type, type_name) - _PrintOutput(output, options) - return output + output = ('const %s kSSLCert%sList[] = {\n') % (type_type, type_name) + _PrintOutput(output, options) + return output def _AddLabelToArray(label, type_name): - return ' %s_%s,\n' %(label, type_name) + return ' %s_%s,\n' % (label, type_name) def _CreateArraySectionFooter(): - return '};\n\n' + return '};\n\n' def _SafeName(original_file_name): - bad_chars = ' -./\\()áéíőú' - replacement_chars = '' - for _ in bad_chars: - replacement_chars += '_' - translation_table = string.maketrans(bad_chars, replacement_chars) - return original_file_name.translate(translation_table) + bad_chars = ' -./\\()áéíőú' + replacement_chars = '' + for _ in bad_chars: + replacement_chars += '_' + translation_table = string.maketrans(bad_chars, replacement_chars) + return original_file_name.translate(translation_table) def _PrintOutput(output, options): - if options.verbose: - print output + if options.verbose: + print output + if __name__ == '__main__': - main() + main() diff --git a/tools_webrtc/ubsan/OWNERS b/tools_webrtc/ubsan/OWNERS deleted file mode 100644 index 524e2676ff..0000000000 --- a/tools_webrtc/ubsan/OWNERS +++ /dev/null @@ -1 +0,0 @@ -phoglund@webrtc.org diff --git a/tools_webrtc/ubsan/blacklist.txt b/tools_webrtc/ubsan/suppressions.txt similarity index 100% rename from tools_webrtc/ubsan/blacklist.txt rename to tools_webrtc/ubsan/suppressions.txt diff --git a/tools_webrtc/ubsan/vptr_blacklist.txt b/tools_webrtc/ubsan/vptr_suppressions.txt similarity index 100% rename from tools_webrtc/ubsan/vptr_blacklist.txt rename to tools_webrtc/ubsan/vptr_suppressions.txt diff --git a/tools_webrtc/vim/webrtc.ycm_extra_conf.py b/tools_webrtc/vim/webrtc.ycm_extra_conf.py index fcc38fec01..b8727d9633 100644 --- a/tools_webrtc/vim/webrtc.ycm_extra_conf.py +++ b/tools_webrtc/vim/webrtc.ycm_extra_conf.py @@ -53,7 +53,6 @@ # # * This has only been tested on gPrecise. - import os import os.path import shlex @@ -62,25 +61,26 @@ # Flags from YCM's default config. _DEFAULT_FLAGS = [ - '-DUSE_CLANG_COMPLETER', - '-std=c++11', - '-x', - 'c++', + '-DUSE_CLANG_COMPLETER', + '-std=c++11', + '-x', + 'c++', ] _HEADER_ALTERNATES = ('.cc', '.cpp', '.c', '.mm', '.m') _EXTENSION_FLAGS = { - '.m': ['-x', 'objective-c'], - '.mm': ['-x', 'objective-c++'], + '.m': ['-x', 'objective-c'], + '.mm': ['-x', 'objective-c++'], } + def PathExists(*args): - return os.path.exists(os.path.join(*args)) + return os.path.exists(os.path.join(*args)) def FindWebrtcSrcFromFilename(filename): - """Searches for the root of the WebRTC checkout. + """Searches for the root of the WebRTC checkout. Simply checks parent directories until it finds .gclient and src/. @@ -90,20 +90,20 @@ def FindWebrtcSrcFromFilename(filename): Returns: (String) Path of 'src/', or None if unable to find. """ - curdir = os.path.normpath(os.path.dirname(filename)) - while not (os.path.basename(curdir) == 'src' - and PathExists(curdir, 'DEPS') - and (PathExists(curdir, '..', '.gclient') - or PathExists(curdir, '.git'))): - nextdir = os.path.normpath(os.path.join(curdir, '..')) - if nextdir == curdir: - return None - curdir = nextdir - return curdir + curdir = os.path.normpath(os.path.dirname(filename)) + while not (os.path.basename(curdir) == 'src' + and PathExists(curdir, 'DEPS') and + (PathExists(curdir, '..', '.gclient') + or PathExists(curdir, '.git'))): + nextdir = os.path.normpath(os.path.join(curdir, '..')) + if nextdir == curdir: + return None + curdir = nextdir + return curdir def GetDefaultSourceFile(webrtc_root, filename): - """Returns the default source file to use as an alternative to |filename|. + """Returns the default source file to use as an alternative to |filename|. Compile flags used to build the default source file is assumed to be a close-enough approximation for building |filename|. @@ -115,13 +115,13 @@ def GetDefaultSourceFile(webrtc_root, filename): Returns: (String) Absolute path to substitute source file. """ - if 'test.' in filename: - return os.path.join(webrtc_root, 'base', 'logging_unittest.cc') - return os.path.join(webrtc_root, 'base', 'logging.cc') + if 'test.' in filename: + return os.path.join(webrtc_root, 'base', 'logging_unittest.cc') + return os.path.join(webrtc_root, 'base', 'logging.cc') def GetNinjaBuildOutputsForSourceFile(out_dir, filename): - """Returns a list of build outputs for filename. + """Returns a list of build outputs for filename. The list is generated by invoking 'ninja -t query' tool to retrieve a list of inputs and outputs of |filename|. This list is then filtered to only include @@ -135,32 +135,35 @@ def GetNinjaBuildOutputsForSourceFile(out_dir, filename): (List of Strings) List of target names. Will return [] if |filename| doesn't yield any .o or .obj outputs. """ - # Ninja needs the path to the source file relative to the output build - # directory. - rel_filename = os.path.relpath(filename, out_dir) - - p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename], - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - universal_newlines=True) - stdout, _ = p.communicate() - if p.returncode != 0: - return [] - - # The output looks like: - # ../../relative/path/to/source.cc: - # outputs: - # obj/reative/path/to/target.source.o - # obj/some/other/target2.source.o - # another/target.txt - # - outputs_text = stdout.partition('\n outputs:\n')[2] - output_lines = [line.strip() for line in outputs_text.split('\n')] - return [target for target in output_lines - if target and (target.endswith('.o') or target.endswith('.obj'))] + # Ninja needs the path to the source file relative to the output build + # directory. + rel_filename = os.path.relpath(filename, out_dir) + + p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True) + stdout, _ = p.communicate() + if p.returncode != 0: + return [] + + # The output looks like: + # ../../relative/path/to/source.cc: + # outputs: + # obj/reative/path/to/target.source.o + # obj/some/other/target2.source.o + # another/target.txt + # + outputs_text = stdout.partition('\n outputs:\n')[2] + output_lines = [line.strip() for line in outputs_text.split('\n')] + return [ + target for target in output_lines + if target and (target.endswith('.o') or target.endswith('.obj')) + ] def GetClangCommandLineForNinjaOutput(out_dir, build_target): - """Returns the Clang command line for building |build_target| + """Returns the Clang command line for building |build_target| Asks ninja for the list of commands used to build |filename| and returns the final Clang invocation. @@ -173,24 +176,25 @@ def GetClangCommandLineForNinjaOutput(out_dir, build_target): (String or None) Clang command line or None if a Clang command line couldn't be determined. """ - p = subprocess.Popen(['ninja', '-v', '-C', out_dir, - '-t', 'commands', build_target], - stdout=subprocess.PIPE, universal_newlines=True) - stdout, _ = p.communicate() - if p.returncode != 0: + p = subprocess.Popen( + ['ninja', '-v', '-C', out_dir, '-t', 'commands', build_target], + stdout=subprocess.PIPE, + universal_newlines=True) + stdout, _ = p.communicate() + if p.returncode != 0: + return None + + # Ninja will return multiple build steps for all dependencies up to + # |build_target|. The build step we want is the last Clang invocation, which + # is expected to be the one that outputs |build_target|. + for line in reversed(stdout.split('\n')): + if 'clang' in line: + return line return None - # Ninja will return multiple build steps for all dependencies up to - # |build_target|. The build step we want is the last Clang invocation, which - # is expected to be the one that outputs |build_target|. - for line in reversed(stdout.split('\n')): - if 'clang' in line: - return line - return None - def GetClangCommandLineFromNinjaForSource(out_dir, filename): - """Returns a Clang command line used to build |filename|. + """Returns a Clang command line used to build |filename|. The same source file could be built multiple times using different tool chains. In such cases, this command returns the first Clang invocation. We @@ -206,17 +210,17 @@ def GetClangCommandLineFromNinjaForSource(out_dir, filename): (String or None): Command line for Clang invocation using |filename| as a source. Returns None if no such command line could be found. """ - build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename) - for build_target in build_targets: - command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target) - if command_line: - return command_line - return None + build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename) + for build_target in build_targets: + command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target) + if command_line: + return command_line + return None def GetClangOptionsFromCommandLine(clang_commandline, out_dir, additional_flags): - """Extracts relevant command line options from |clang_commandline| + """Extracts relevant command line options from |clang_commandline| Args: clang_commandline: (String) Full Clang invocation. @@ -228,46 +232,47 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir, (List of Strings) The list of command line flags for this source file. Can be empty. """ - clang_flags = [] + additional_flags - - # Parse flags that are important for YCM's purposes. - clang_tokens = shlex.split(clang_commandline) - for flag_index, flag in enumerate(clang_tokens): - if flag.startswith('-I'): - # Relative paths need to be resolved, because they're relative to the - # output dir, not the source. - if flag[2] == '/': - clang_flags.append(flag) - else: - abs_path = os.path.normpath(os.path.join(out_dir, flag[2:])) - clang_flags.append('-I' + abs_path) - elif flag.startswith('-std'): - clang_flags.append(flag) - elif flag.startswith('-') and flag[1] in 'DWFfmO': - if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard': - # These flags causes libclang (3.3) to crash. Remove it until things - # are fixed. - continue - clang_flags.append(flag) - elif flag == '-isysroot': - # On Mac -isysroot is used to find the system headers. - # Copy over both flags. - if flag_index + 1 < len(clang_tokens): - clang_flags.append(flag) - clang_flags.append(clang_tokens[flag_index + 1]) - elif flag.startswith('--sysroot='): - # On Linux we use a sysroot image. - sysroot_path = flag.lstrip('--sysroot=') - if sysroot_path.startswith('/'): - clang_flags.append(flag) - else: - abs_path = os.path.normpath(os.path.join(out_dir, sysroot_path)) - clang_flags.append('--sysroot=' + abs_path) - return clang_flags + clang_flags = [] + additional_flags + + # Parse flags that are important for YCM's purposes. + clang_tokens = shlex.split(clang_commandline) + for flag_index, flag in enumerate(clang_tokens): + if flag.startswith('-I'): + # Relative paths need to be resolved, because they're relative to the + # output dir, not the source. + if flag[2] == '/': + clang_flags.append(flag) + else: + abs_path = os.path.normpath(os.path.join(out_dir, flag[2:])) + clang_flags.append('-I' + abs_path) + elif flag.startswith('-std'): + clang_flags.append(flag) + elif flag.startswith('-') and flag[1] in 'DWFfmO': + if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard': + # These flags causes libclang (3.3) to crash. Remove it until things + # are fixed. + continue + clang_flags.append(flag) + elif flag == '-isysroot': + # On Mac -isysroot is used to find the system headers. + # Copy over both flags. + if flag_index + 1 < len(clang_tokens): + clang_flags.append(flag) + clang_flags.append(clang_tokens[flag_index + 1]) + elif flag.startswith('--sysroot='): + # On Linux we use a sysroot image. + sysroot_path = flag.lstrip('--sysroot=') + if sysroot_path.startswith('/'): + clang_flags.append(flag) + else: + abs_path = os.path.normpath(os.path.join( + out_dir, sysroot_path)) + clang_flags.append('--sysroot=' + abs_path) + return clang_flags def GetClangOptionsFromNinjaForFilename(webrtc_root, filename): - """Returns the Clang command line options needed for building |filename|. + """Returns the Clang command line options needed for building |filename|. Command line options are based on the command used by ninja for building |filename|. If |filename| is a .h file, uses its companion .cc or .cpp file. @@ -283,54 +288,55 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename): (List of Strings) The list of command line flags for this source file. Can be empty. """ - if not webrtc_root: - return [] + if not webrtc_root: + return [] - # Generally, everyone benefits from including WebRTC's src/, because all of - # WebRTC's includes are relative to that. - additional_flags = ['-I' + os.path.join(webrtc_root)] + # Generally, everyone benefits from including WebRTC's src/, because all of + # WebRTC's includes are relative to that. + additional_flags = ['-I' + os.path.join(webrtc_root)] - # Version of Clang used to compile WebRTC can be newer then version of - # libclang that YCM uses for completion. So it's possible that YCM's libclang - # doesn't know about some used warning options, which causes compilation - # warnings (and errors, because of '-Werror'); - additional_flags.append('-Wno-unknown-warning-option') + # Version of Clang used to compile WebRTC can be newer then version of + # libclang that YCM uses for completion. So it's possible that YCM's libclang + # doesn't know about some used warning options, which causes compilation + # warnings (and errors, because of '-Werror'); + additional_flags.append('-Wno-unknown-warning-option') - sys.path.append(os.path.join(webrtc_root, 'tools', 'vim')) - from ninja_output import GetNinjaOutputDirectory - out_dir = GetNinjaOutputDirectory(webrtc_root) + sys.path.append(os.path.join(webrtc_root, 'tools', 'vim')) + from ninja_output import GetNinjaOutputDirectory + out_dir = GetNinjaOutputDirectory(webrtc_root) - basename, extension = os.path.splitext(filename) - if extension == '.h': - candidates = [basename + ext for ext in _HEADER_ALTERNATES] - else: - candidates = [filename] + basename, extension = os.path.splitext(filename) + if extension == '.h': + candidates = [basename + ext for ext in _HEADER_ALTERNATES] + else: + candidates = [filename] - clang_line = None - buildable_extension = extension - for candidate in candidates: - clang_line = GetClangCommandLineFromNinjaForSource(out_dir, candidate) - if clang_line: - buildable_extension = os.path.splitext(candidate)[1] - break + clang_line = None + buildable_extension = extension + for candidate in candidates: + clang_line = GetClangCommandLineFromNinjaForSource(out_dir, candidate) + if clang_line: + buildable_extension = os.path.splitext(candidate)[1] + break - additional_flags += _EXTENSION_FLAGS.get(buildable_extension, []) + additional_flags += _EXTENSION_FLAGS.get(buildable_extension, []) - if not clang_line: - # If ninja didn't know about filename or it's companion files, then try a - # default build target. It is possible that the file is new, or build.ninja - # is stale. - clang_line = GetClangCommandLineFromNinjaForSource( - out_dir, GetDefaultSourceFile(webrtc_root, filename)) + if not clang_line: + # If ninja didn't know about filename or it's companion files, then try a + # default build target. It is possible that the file is new, or build.ninja + # is stale. + clang_line = GetClangCommandLineFromNinjaForSource( + out_dir, GetDefaultSourceFile(webrtc_root, filename)) - if not clang_line: - return additional_flags + if not clang_line: + return additional_flags - return GetClangOptionsFromCommandLine(clang_line, out_dir, additional_flags) + return GetClangOptionsFromCommandLine(clang_line, out_dir, + additional_flags) def FlagsForFile(filename): - """This is the main entry point for YCM. Its interface is fixed. + """This is the main entry point for YCM. Its interface is fixed. Args: filename: (String) Path to source file being edited. @@ -340,18 +346,16 @@ def FlagsForFile(filename): 'flags': (List of Strings) Command line flags. 'do_cache': (Boolean) True if the result should be cached. """ - abs_filename = os.path.abspath(filename) - webrtc_root = FindWebrtcSrcFromFilename(abs_filename) - clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, abs_filename) + abs_filename = os.path.abspath(filename) + webrtc_root = FindWebrtcSrcFromFilename(abs_filename) + clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, + abs_filename) - # If clang_flags could not be determined, then assume that was due to a - # transient failure. Preventing YCM from caching the flags allows us to try to - # determine the flags again. - should_cache_flags_for_file = bool(clang_flags) + # If clang_flags could not be determined, then assume that was due to a + # transient failure. Preventing YCM from caching the flags allows us to try to + # determine the flags again. + should_cache_flags_for_file = bool(clang_flags) - final_flags = _DEFAULT_FLAGS + clang_flags + final_flags = _DEFAULT_FLAGS + clang_flags - return { - 'flags': final_flags, - 'do_cache': should_cache_flags_for_file - } + return {'flags': final_flags, 'do_cache': should_cache_flags_for_file} diff --git a/tools_webrtc/whitespace.txt b/tools_webrtc/whitespace.txt index 685a8768f3..f85a7d2cf8 100644 --- a/tools_webrtc/whitespace.txt +++ b/tools_webrtc/whitespace.txt @@ -8,6 +8,7 @@ Holidays are coming, beware! Bought any gifts yet? Lemur was here :) Kjellander as well :-o +mbonadei as well. Foo Bar Baz Bur diff --git a/video/BUILD.gn b/video/BUILD.gn index 1878f3216c..c6774dc799 100644 --- a/video/BUILD.gn +++ b/video/BUILD.gn @@ -14,6 +14,8 @@ rtc_library("video") { "buffered_frame_decryptor.h", "call_stats.cc", "call_stats.h", + "call_stats2.cc", + "call_stats2.h", "encoder_rtcp_feedback.cc", "encoder_rtcp_feedback.h", "quality_limitation_reason_tracker.cc", @@ -22,12 +24,20 @@ rtc_library("video") { "quality_threshold.h", "receive_statistics_proxy.cc", "receive_statistics_proxy.h", + "receive_statistics_proxy2.cc", + "receive_statistics_proxy2.h", "report_block_stats.cc", "report_block_stats.h", "rtp_streams_synchronizer.cc", "rtp_streams_synchronizer.h", + "rtp_streams_synchronizer2.cc", + "rtp_streams_synchronizer2.h", "rtp_video_stream_receiver.cc", "rtp_video_stream_receiver.h", + "rtp_video_stream_receiver2.cc", + "rtp_video_stream_receiver2.h", + "rtp_video_stream_receiver_frame_transformer_delegate.cc", + "rtp_video_stream_receiver_frame_transformer_delegate.h", "send_delay_stats.cc", "send_delay_stats.h", "send_statistics_proxy.cc", @@ -40,20 +50,27 @@ rtc_library("video") { "transport_adapter.h", "video_quality_observer.cc", "video_quality_observer.h", + "video_quality_observer2.cc", + "video_quality_observer2.h", "video_receive_stream.cc", "video_receive_stream.h", + "video_receive_stream2.cc", + "video_receive_stream2.h", "video_send_stream.cc", "video_send_stream.h", "video_send_stream_impl.cc", "video_send_stream_impl.h", "video_stream_decoder.cc", "video_stream_decoder.h", + "video_stream_decoder2.cc", + "video_stream_decoder2.h", ] deps = [ ":frame_dumping_decoder", "../api:array_view", "../api:fec_controller_api", + "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", "../api:rtp_parameters", "../api:scoped_refptr", @@ -62,14 +79,13 @@ rtc_library("video") { "../api/crypto:options", "../api/rtc_event_log", "../api/task_queue", - "../api/transport/media:media_transport_interface", + "../api/units:timestamp", "../api/video:encoded_image", "../api/video:recordable_encoded_frame", "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator", "../api/video:video_codec_constants", "../api/video:video_frame", - "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", "../api/video:video_stream_encoder_create", @@ -95,9 +111,11 @@ rtc_library("video") { "../modules/video_coding:nack_module", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", + "../modules/video_coding/deprecated:nack_module", "../modules/video_processing", "../rtc_base:checks", "../rtc_base:rate_limiter", + "../rtc_base:rtc_base", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_numerics", "../rtc_base:rtc_task_queue", @@ -109,17 +127,22 @@ rtc_library("video") { "../rtc_base/experiments:min_video_bitrate_experiment", "../rtc_base/experiments:quality_scaling_experiment", "../rtc_base/experiments:rate_control_settings", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../rtc_base/system:thread_registry", + "../rtc_base/task_utils:pending_task_safety_flag", "../rtc_base/task_utils:repeating_task", "../rtc_base/task_utils:to_queued_task", "../rtc_base/time:timestamp_extrapolator", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -146,9 +169,10 @@ rtc_library("video_stream_decoder_impl") { "../modules/video_coding", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_task_queue", + "../rtc_base/synchronization:mutex", "../system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("frame_dumping_decoder") { @@ -176,16 +200,14 @@ rtc_library("video_stream_encoder_impl") { # visibility = [ "../api/video:video_stream_encoder_create" ] sources = [ + "alignment_adjuster.cc", + "alignment_adjuster.h", "encoder_bitrate_adjuster.cc", "encoder_bitrate_adjuster.h", "encoder_overshoot_detector.cc", "encoder_overshoot_detector.h", "frame_encode_metadata_writer.cc", "frame_encode_metadata_writer.h", - "overuse_frame_detector.cc", - "overuse_frame_detector.h", - "overuse_frame_detector_resource_adaptation_module.cc", - "overuse_frame_detector_resource_adaptation_module.h", "video_source_sink_controller.cc", "video_source_sink_controller.h", "video_stream_encoder.cc", @@ -194,15 +216,17 @@ rtc_library("video_stream_encoder_impl") { deps = [ "../api:rtp_parameters", + "../api/adaptation:resource_adaptation_api", "../api/task_queue:task_queue", "../api/units:data_rate", "../api/video:encoded_image", + "../api/video:video_adaptation", "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator", "../api/video:video_bitrate_allocator_factory", "../api/video:video_codec_constants", "../api/video:video_frame", - "../api/video:video_frame_i420", + "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", "../api/video_codecs:video_codecs_api", @@ -216,7 +240,6 @@ rtc_library("video_stream_encoder_impl") { "../rtc_base:checks", "../rtc_base:criticalsection", "../rtc_base:logging", - "../rtc_base:macromagic", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", @@ -229,10 +252,15 @@ rtc_library("video_stream_encoder_impl") { "../rtc_base/experiments:quality_scaler_settings", "../rtc_base/experiments:quality_scaling_experiment", "../rtc_base/experiments:rate_control_settings", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", + "../rtc_base/task_utils:pending_task_safety_flag", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../system_wrappers:field_trial", + "adaptation:video_adaptation", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/types:optional", @@ -271,10 +299,10 @@ if (rtc_include_tests) { "../api:rtc_event_log_output_file", "../api:test_dependency_factory", "../api:video_quality_test_fixture_api", + "../api/numerics", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue", "../api/task_queue:default_task_queue_factory", - "../api/transport/media:media_transport_interface", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_bitrate_allocator_factory", "../api/video:video_frame", @@ -286,6 +314,7 @@ if (rtc_include_tests) { "../media:rtc_audio_video", "../media:rtc_encoder_simulcast_proxy", "../media:rtc_internal_video_codecs", + "../media:rtc_media_base", "../modules/audio_device:audio_device_api", "../modules/audio_device:audio_device_module_from_input_and_output", "../modules/audio_device:windows_core_audio_utility", @@ -302,6 +331,7 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_tests_utils", "../rtc_base:rtc_numerics", "../rtc_base:task_queue_for_test", + "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../test:fake_video_codecs", @@ -315,6 +345,8 @@ if (rtc_include_tests) { "../test:test_support_test_artifacts", "../test:video_test_common", "../test:video_test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", @@ -345,8 +377,11 @@ if (rtc_include_tests) { "../test:test_common", "../test:test_support", "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -356,10 +391,14 @@ if (rtc_include_tests) { sources = [ "pc_full_stack_tests.cc" ] deps = [ "../api:create_network_emulation_manager", + "../api:create_peer_connection_quality_test_frame_generator", "../api:create_peerconnection_quality_test_fixture", + "../api:frame_generator_api", + "../api:media_stream_interface", "../api:network_emulation_manager_api", "../api:peer_connection_quality_test_fixture_api", "../api:simulated_network_api", + "../api:time_controller", "../call:simulated_network", "../media:rtc_vp9_profile", "../modules/video_coding:webrtc_vp9", @@ -394,8 +433,11 @@ if (rtc_include_tests) { "../test:test_renderer", "../test:test_support", "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -464,41 +506,7 @@ if (rtc_include_tests) { "//testing/gtest", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - ] - } - - rtc_executable("video_replay") { - testonly = true - sources = [ "video_replay.cc" ] - deps = [ - "../api/rtc_event_log", - "../api/task_queue:default_task_queue_factory", - "../api/test/video:function_video_factory", - "../api/transport:field_trial_based_config", - "../api/video_codecs:video_codecs_api", - "../call", - "../call:call_interfaces", - "../common_video", - "../media:rtc_internal_video_codecs", - "../rtc_base:checks", - "../rtc_base:rtc_json", - "../rtc_base:stringutils", - "../rtc_base:timeutils", - "../system_wrappers", - "../test:call_config_utils", - "../test:encoder_settings", - "../test:fake_video_codecs", - "../test:null_transport", - "../test:rtp_test_utils", - "../test:run_test", - "../test:run_test_interface", - "../test:test_common", - "../test:test_renderer", - "../test:test_support", - "../test:video_test_common", - "../test:video_test_support", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -508,7 +516,9 @@ if (rtc_include_tests) { defines = [] sources = [ + "alignment_adjuster_unittest.cc", "buffered_frame_decryptor_unittest.cc", + "call_stats2_unittest.cc", "call_stats_unittest.cc", "cpu_scaling_tests.cc", "encoder_bitrate_adjuster_unittest.cc", @@ -527,25 +537,27 @@ if (rtc_include_tests) { "end_to_end_tests/multi_stream_tester.h", "end_to_end_tests/multi_stream_tests.cc", "end_to_end_tests/network_state_tests.cc", - "end_to_end_tests/probing_tests.cc", "end_to_end_tests/retransmission_tests.cc", "end_to_end_tests/rtp_rtcp_tests.cc", "end_to_end_tests/ssrc_tests.cc", "end_to_end_tests/stats_tests.cc", "end_to_end_tests/transport_feedback_tests.cc", "frame_encode_metadata_writer_unittest.cc", - "overuse_frame_detector_unittest.cc", "picture_id_tests.cc", "quality_limitation_reason_tracker_unittest.cc", "quality_scaling_tests.cc", "quality_threshold_unittest.cc", + "receive_statistics_proxy2_unittest.cc", "receive_statistics_proxy_unittest.cc", "report_block_stats_unittest.cc", + "rtp_video_stream_receiver2_unittest.cc", + "rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc", "rtp_video_stream_receiver_unittest.cc", "send_delay_stats_unittest.cc", "send_statistics_proxy_unittest.cc", "stats_counter_unittest.cc", "stream_synchronization_unittest.cc", + "video_receive_stream2_unittest.cc", "video_receive_stream_unittest.cc", "video_send_stream_impl_unittest.cc", "video_send_stream_tests.cc", @@ -565,10 +577,13 @@ if (rtc_include_tests) { "../api:libjingle_peerconnection_api", "../api:mock_fec_controller_override", "../api:mock_frame_decryptor", + "../api:mock_video_encoder", "../api:rtp_headers", "../api:rtp_parameters", "../api:scoped_refptr", "../api:simulated_network_api", + "../api:transport_api", + "../api/adaptation:resource_adaptation_api", "../api/crypto:options", "../api/rtc_event_log", "../api/task_queue", @@ -578,9 +593,10 @@ if (rtc_include_tests) { "../api/units:timestamp", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:encoded_image", + "../api/video:video_adaptation", "../api/video:video_bitrate_allocation", "../api/video:video_frame", - "../api/video:video_frame_i420", + "../api/video:video_frame_nv12", "../api/video:video_frame_type", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", @@ -596,6 +612,7 @@ if (rtc_include_tests) { "../call:simulated_packet_receiver", "../call:video_stream_api", "../call/adaptation:resource_adaptation", + "../call/adaptation:resource_adaptation_test_utilities", "../common_video", "../common_video/test:utilities", "../media:rtc_audio_video", @@ -604,7 +621,6 @@ if (rtc_include_tests) { "../media:rtc_media_base", "../media:rtc_media_tests_utils", "../media:rtc_simulcast_encoder_adapter", - "../modules:module_api", "../modules:module_api_public", "../modules/pacing", "../modules/rtp_rtcp", @@ -622,6 +638,7 @@ if (rtc_include_tests) { "../modules/video_coding:webrtc_vp9", "../rtc_base", "../rtc_base:checks", + "../rtc_base:gunit_helpers", "../rtc_base:rate_limiter", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_base_tests_utils", @@ -629,6 +646,7 @@ if (rtc_include_tests) { "../rtc_base:rtc_task_queue", "../rtc_base:task_queue_for_test", "../rtc_base/experiments:alr_experiment", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../rtc_base/task_utils:to_queued_task", "../system_wrappers", @@ -640,6 +658,8 @@ if (rtc_include_tests) { "../test:field_trial", "../test:fileutils", "../test:frame_utils", + "../test:mock_frame_transformer", + "../test:mock_transport", "../test:null_transport", "../test:perf_test", "../test:rtp_test_utils", @@ -647,7 +667,10 @@ if (rtc_include_tests) { "../test:test_support", "../test:video_test_common", "../test/time_controller", + "adaptation:video_adaptation", "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", diff --git a/video/OWNERS b/video/OWNERS index 65b660602b..f76cf9009a 100644 --- a/video/OWNERS +++ b/video/OWNERS @@ -3,8 +3,3 @@ ilnik@webrtc.org mflodman@webrtc.org sprang@webrtc.org stefan@webrtc.org - -# These are for the common case of adding or renaming files. If you're doing -# structural changes, please get a review from a reviewer in this file. -per-file *.gn=* -per-file *.gni=* diff --git a/video/adaptation/BUILD.gn b/video/adaptation/BUILD.gn new file mode 100644 index 0000000000..a96f4cf36a --- /dev/null +++ b/video/adaptation/BUILD.gn @@ -0,0 +1,103 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("video_adaptation") { + sources = [ + "balanced_constraint.cc", + "balanced_constraint.h", + "bitrate_constraint.cc", + "bitrate_constraint.h", + "encode_usage_resource.cc", + "encode_usage_resource.h", + "overuse_frame_detector.cc", + "overuse_frame_detector.h", + "quality_rampup_experiment_helper.cc", + "quality_rampup_experiment_helper.h", + "quality_scaler_resource.cc", + "quality_scaler_resource.h", + "video_stream_encoder_resource.cc", + "video_stream_encoder_resource.h", + "video_stream_encoder_resource_manager.cc", + "video_stream_encoder_resource_manager.h", + ] + + deps = [ + "../../api:rtp_parameters", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:task_queue", + "../../api/units:data_rate", + "../../api/video:video_adaptation", + "../../api/video:video_frame", + "../../api/video:video_stream_encoder", + "../../api/video_codecs:video_codecs_api", + "../../call/adaptation:resource_adaptation", + "../../modules/video_coding:video_coding_utility", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_event", + "../../rtc_base:rtc_numerics", + "../../rtc_base:rtc_task_queue", + "../../rtc_base:timeutils", + "../../rtc_base/experiments:balanced_degradation_settings", + "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/experiments:quality_rampup_experiment", + "../../rtc_base/experiments:quality_scaler_settings", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:repeating_task", + "../../rtc_base/task_utils:to_queued_task", + "../../system_wrappers:field_trial", + "../../system_wrappers:system_wrappers", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +if (rtc_include_tests) { + rtc_library("video_adaptation_tests") { + testonly = true + + defines = [] + sources = [ + "overuse_frame_detector_unittest.cc", + "quality_scaler_resource_unittest.cc", + ] + deps = [ + ":video_adaptation", + "../../api:scoped_refptr", + "../../api/task_queue:task_queue", + "../../api/video:encoded_image", + "../../api/video:video_adaptation", + "../../api/video:video_frame", + "../../api/video_codecs:video_codecs_api", + "../../call/adaptation:resource_adaptation", + "../../call/adaptation:resource_adaptation_test_utilities", + "../../modules/video_coding:video_coding_utility", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_base_tests_utils", + "../../rtc_base:rtc_event", + "../../rtc_base:rtc_numerics", + "../../rtc_base:rtc_task_queue", + "../../rtc_base:task_queue_for_test", + "../../test:field_trial", + "//test:rtc_expect_death", + "//test:test_support", + "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } +} diff --git a/video/adaptation/OWNERS b/video/adaptation/OWNERS new file mode 100644 index 0000000000..b65c763efc --- /dev/null +++ b/video/adaptation/OWNERS @@ -0,0 +1,3 @@ +eshr@google.com +hbos@webrtc.org +ilnik@webrtc.org diff --git a/video/adaptation/balanced_constraint.cc b/video/adaptation/balanced_constraint.cc new file mode 100644 index 0000000000..fb455193f3 --- /dev/null +++ b/video/adaptation/balanced_constraint.cc @@ -0,0 +1,58 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "video/adaptation/balanced_constraint.h" + +namespace webrtc { + +BalancedConstraint::BalancedConstraint( + DegradationPreferenceProvider* degradation_preference_provider) + : encoder_target_bitrate_bps_(absl::nullopt), + degradation_preference_provider_(degradation_preference_provider) { + RTC_DCHECK(degradation_preference_provider_); + sequence_checker_.Detach(); +} + +void BalancedConstraint::OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); +} + +bool BalancedConstraint::IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Don't adapt if BalancedDegradationSettings applies and determines this will + // exceed bitrate constraints. + if (degradation_preference_provider_->degradation_preference() == + DegradationPreference::BALANCED && + !balanced_settings_.CanAdaptUp(input_state.video_codec_type(), + input_state.frame_size_pixels().value(), + encoder_target_bitrate_bps_.value_or(0))) { + return false; + } + if (DidIncreaseResolution(restrictions_before, restrictions_after) && + !balanced_settings_.CanAdaptUpResolution( + input_state.video_codec_type(), + input_state.frame_size_pixels().value(), + encoder_target_bitrate_bps_.value_or(0))) { + return false; + } + return true; +} + +} // namespace webrtc diff --git a/video/adaptation/balanced_constraint.h b/video/adaptation/balanced_constraint.h new file mode 100644 index 0000000000..5e02408025 --- /dev/null +++ b/video/adaptation/balanced_constraint.h @@ -0,0 +1,50 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ +#define VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ + +#include + +#include "absl/types/optional.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/degradation_preference_provider.h" +#include "rtc_base/experiments/balanced_degradation_settings.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +class BalancedConstraint : public AdaptationConstraint { + public: + explicit BalancedConstraint( + DegradationPreferenceProvider* degradation_preference_provider); + ~BalancedConstraint() override = default; + + void OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps); + + // AdaptationConstraint implementation. + std::string Name() const override { return "BalancedConstraint"; } + bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const override; + + private: + SequenceChecker sequence_checker_; + absl::optional encoder_target_bitrate_bps_ + RTC_GUARDED_BY(&sequence_checker_); + const BalancedDegradationSettings balanced_settings_; + const DegradationPreferenceProvider* degradation_preference_provider_; +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ diff --git a/video/adaptation/bitrate_constraint.cc b/video/adaptation/bitrate_constraint.cc new file mode 100644 index 0000000000..1061c4557f --- /dev/null +++ b/video/adaptation/bitrate_constraint.cc @@ -0,0 +1,65 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "call/adaptation/video_stream_adapter.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "video/adaptation/bitrate_constraint.h" + +namespace webrtc { + +BitrateConstraint::BitrateConstraint() + : encoder_settings_(absl::nullopt), + encoder_target_bitrate_bps_(absl::nullopt) { + sequence_checker_.Detach(); +} + +void BitrateConstraint::OnEncoderSettingsUpdated( + absl::optional encoder_settings) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + encoder_settings_ = std::move(encoder_settings); +} + +void BitrateConstraint::OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); +} + +bool BitrateConstraint::IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Make sure bitrate limits are not violated. + if (DidIncreaseResolution(restrictions_before, restrictions_after)) { + uint32_t bitrate_bps = encoder_target_bitrate_bps_.value_or(0); + absl::optional bitrate_limits = + encoder_settings_.has_value() + ? encoder_settings_->encoder_info() + .GetEncoderBitrateLimitsForResolution( + // Need some sort of expected resulting pixels to be used + // instead of unrestricted. + GetHigherResolutionThan( + input_state.frame_size_pixels().value())) + : absl::nullopt; + if (bitrate_limits.has_value() && bitrate_bps != 0) { + RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, + input_state.frame_size_pixels().value()); + return bitrate_bps >= + static_cast(bitrate_limits->min_start_bitrate_bps); + } + } + return true; +} + +} // namespace webrtc diff --git a/video/adaptation/bitrate_constraint.h b/video/adaptation/bitrate_constraint.h new file mode 100644 index 0000000000..015edcc13f --- /dev/null +++ b/video/adaptation/bitrate_constraint.h @@ -0,0 +1,52 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ +#define VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ + +#include + +#include "absl/types/optional.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/encoder_settings.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +class BitrateConstraint : public AdaptationConstraint { + public: + BitrateConstraint(); + ~BitrateConstraint() override = default; + + void OnEncoderSettingsUpdated( + absl::optional encoder_settings); + void OnEncoderTargetBitrateUpdated( + absl::optional encoder_target_bitrate_bps); + + // AdaptationConstraint implementation. + std::string Name() const override { return "BitrateConstraint"; } + bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after) const override; + + private: + SequenceChecker sequence_checker_; + absl::optional encoder_settings_ + RTC_GUARDED_BY(&sequence_checker_); + absl::optional encoder_target_bitrate_bps_ + RTC_GUARDED_BY(&sequence_checker_); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ diff --git a/video/adaptation/encode_usage_resource.cc b/video/adaptation/encode_usage_resource.cc new file mode 100644 index 0000000000..8fe7450a0c --- /dev/null +++ b/video/adaptation/encode_usage_resource.cc @@ -0,0 +1,106 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/encode_usage_resource.h" + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +// static +rtc::scoped_refptr EncodeUsageResource::Create( + std::unique_ptr overuse_detector) { + return new rtc::RefCountedObject( + std::move(overuse_detector)); +} + +EncodeUsageResource::EncodeUsageResource( + std::unique_ptr overuse_detector) + : VideoStreamEncoderResource("EncoderUsageResource"), + overuse_detector_(std::move(overuse_detector)), + is_started_(false), + target_frame_rate_(absl::nullopt) { + RTC_DCHECK(overuse_detector_); +} + +EncodeUsageResource::~EncodeUsageResource() {} + +bool EncodeUsageResource::is_started() const { + RTC_DCHECK_RUN_ON(encoder_queue()); + return is_started_; +} + +void EncodeUsageResource::StartCheckForOveruse(CpuOveruseOptions options) { + RTC_DCHECK_RUN_ON(encoder_queue()); + RTC_DCHECK(!is_started_); + overuse_detector_->StartCheckForOveruse(TaskQueueBase::Current(), + std::move(options), this); + is_started_ = true; + overuse_detector_->OnTargetFramerateUpdated(TargetFrameRateAsInt()); +} + +void EncodeUsageResource::StopCheckForOveruse() { + RTC_DCHECK_RUN_ON(encoder_queue()); + overuse_detector_->StopCheckForOveruse(); + is_started_ = false; +} + +void EncodeUsageResource::SetTargetFrameRate( + absl::optional target_frame_rate) { + RTC_DCHECK_RUN_ON(encoder_queue()); + if (target_frame_rate == target_frame_rate_) + return; + target_frame_rate_ = target_frame_rate; + if (is_started_) + overuse_detector_->OnTargetFramerateUpdated(TargetFrameRateAsInt()); +} + +void EncodeUsageResource::OnEncodeStarted(const VideoFrame& cropped_frame, + int64_t time_when_first_seen_us) { + RTC_DCHECK_RUN_ON(encoder_queue()); + // TODO(hbos): Rename FrameCaptured() to something more appropriate (e.g. + // "OnEncodeStarted"?) or revise usage. + overuse_detector_->FrameCaptured(cropped_frame, time_when_first_seen_us); +} + +void EncodeUsageResource::OnEncodeCompleted( + uint32_t timestamp, + int64_t time_sent_in_us, + int64_t capture_time_us, + absl::optional encode_duration_us) { + RTC_DCHECK_RUN_ON(encoder_queue()); + // TODO(hbos): Rename FrameSent() to something more appropriate (e.g. + // "OnEncodeCompleted"?). + overuse_detector_->FrameSent(timestamp, time_sent_in_us, capture_time_us, + encode_duration_us); +} + +void EncodeUsageResource::AdaptUp() { + RTC_DCHECK_RUN_ON(encoder_queue()); + OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse); +} + +void EncodeUsageResource::AdaptDown() { + RTC_DCHECK_RUN_ON(encoder_queue()); + OnResourceUsageStateMeasured(ResourceUsageState::kOveruse); +} + +int EncodeUsageResource::TargetFrameRateAsInt() { + RTC_DCHECK_RUN_ON(encoder_queue()); + return target_frame_rate_.has_value() + ? static_cast(target_frame_rate_.value()) + : std::numeric_limits::max(); +} + +} // namespace webrtc diff --git a/video/adaptation/encode_usage_resource.h b/video/adaptation/encode_usage_resource.h new file mode 100644 index 0000000000..257988fa12 --- /dev/null +++ b/video/adaptation/encode_usage_resource.h @@ -0,0 +1,70 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_ +#define VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/video_adaptation_reason.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_queue.h" +#include "video/adaptation/overuse_frame_detector.h" +#include "video/adaptation/video_stream_encoder_resource.h" + +namespace webrtc { + +// Handles interaction with the OveruseDetector. +// TODO(hbos): Add unittests specific to this class, it is currently only tested +// indirectly by usage in the ResourceAdaptationProcessor (which is only tested +// because of its usage in VideoStreamEncoder); all tests are currently in +// video_stream_encoder_unittest.cc. +class EncodeUsageResource : public VideoStreamEncoderResource, + public OveruseFrameDetectorObserverInterface { + public: + static rtc::scoped_refptr Create( + std::unique_ptr overuse_detector); + + explicit EncodeUsageResource( + std::unique_ptr overuse_detector); + ~EncodeUsageResource() override; + + bool is_started() const; + + void StartCheckForOveruse(CpuOveruseOptions options); + void StopCheckForOveruse(); + + void SetTargetFrameRate(absl::optional target_frame_rate); + void OnEncodeStarted(const VideoFrame& cropped_frame, + int64_t time_when_first_seen_us); + void OnEncodeCompleted(uint32_t timestamp, + int64_t time_sent_in_us, + int64_t capture_time_us, + absl::optional encode_duration_us); + + // OveruseFrameDetectorObserverInterface implementation. + void AdaptUp() override; + void AdaptDown() override; + + private: + int TargetFrameRateAsInt(); + + const std::unique_ptr overuse_detector_ + RTC_GUARDED_BY(encoder_queue()); + bool is_started_ RTC_GUARDED_BY(encoder_queue()); + absl::optional target_frame_rate_ RTC_GUARDED_BY(encoder_queue()); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_ diff --git a/video/overuse_frame_detector.cc b/video/adaptation/overuse_frame_detector.cc similarity index 98% rename from video/overuse_frame_detector.cc rename to video/adaptation/overuse_frame_detector.cc index 1046c1ea3f..9703ac8025 100644 --- a/video/overuse_frame_detector.cc +++ b/video/adaptation/overuse_frame_detector.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "video/overuse_frame_detector.h" +#include "video/adaptation/overuse_frame_detector.h" #include #include @@ -64,8 +64,6 @@ const float kMaxSampleDiffMarginFactor = 1.35f; const int kMinFramerate = 7; const int kMaxFramerate = 30; -const auto kScaleReasonCpu = AdaptationObserverInterface::AdaptReason::kCpu; - // Class for calculating the processing usage on the send-side (the average // processing time of a frame divided by the average time difference between // captured frames). @@ -542,17 +540,17 @@ OveruseFrameDetector::~OveruseFrameDetector() {} void OveruseFrameDetector::StartCheckForOveruse( TaskQueueBase* task_queue_base, const CpuOveruseOptions& options, - AdaptationObserverInterface* overuse_observer) { + OveruseFrameDetectorObserverInterface* overuse_observer) { RTC_DCHECK_RUN_ON(&task_checker_); RTC_DCHECK(!check_overuse_task_.Running()); RTC_DCHECK(overuse_observer != nullptr); SetOptions(options); check_overuse_task_ = RepeatingTaskHandle::DelayedStart( - task_queue_base, TimeDelta::ms(kTimeToFirstCheckForOveruseMs), + task_queue_base, TimeDelta::Millis(kTimeToFirstCheckForOveruseMs), [this, overuse_observer] { CheckForOveruse(overuse_observer); - return TimeDelta::ms(kCheckForOveruseIntervalMs); + return TimeDelta::Millis(kCheckForOveruseIntervalMs); }); } void OveruseFrameDetector::StopCheckForOveruse() { @@ -632,7 +630,7 @@ void OveruseFrameDetector::FrameSent(uint32_t timestamp, } void OveruseFrameDetector::CheckForOveruse( - AdaptationObserverInterface* observer) { + OveruseFrameDetectorObserverInterface* observer) { RTC_DCHECK_RUN_ON(&task_checker_); RTC_DCHECK(observer); ++num_process_times_; @@ -665,12 +663,12 @@ void OveruseFrameDetector::CheckForOveruse( checks_above_threshold_ = 0; ++num_overuse_detections_; - observer->AdaptDown(kScaleReasonCpu); + observer->AdaptDown(); } else if (IsUnderusing(*encode_usage_percent_, now_ms)) { last_rampup_time_ms_ = now_ms; in_quick_rampup_ = true; - observer->AdaptUp(kScaleReasonCpu); + observer->AdaptUp(); } int rampup_delay = diff --git a/video/overuse_frame_detector.h b/video/adaptation/overuse_frame_detector.h similarity index 87% rename from video/overuse_frame_detector.h rename to video/adaptation/overuse_frame_detector.h index 4f64734944..16217fff84 100644 --- a/video/overuse_frame_detector.h +++ b/video/adaptation/overuse_frame_detector.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef VIDEO_OVERUSE_FRAME_DETECTOR_H_ -#define VIDEO_OVERUSE_FRAME_DETECTOR_H_ +#ifndef VIDEO_ADAPTATION_OVERUSE_FRAME_DETECTOR_H_ +#define VIDEO_ADAPTATION_OVERUSE_FRAME_DETECTOR_H_ #include #include @@ -17,7 +17,6 @@ #include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_stream_encoder_observer.h" -#include "modules/video_coding/utility/quality_scaler.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/exp_filter.h" @@ -47,6 +46,17 @@ struct CpuOveruseOptions { int filter_time_ms; // Time constant for averaging }; +class OveruseFrameDetectorObserverInterface { + public: + // Called to signal that we can handle larger or more frequent frames. + virtual void AdaptUp() = 0; + // Called to signal that the source should reduce the resolution or framerate. + virtual void AdaptDown() = 0; + + protected: + virtual ~OveruseFrameDetectorObserverInterface() {} +}; + // Use to detect system overuse based on the send-side processing time of // incoming frames. All methods must be called on a single task queue but it can // be created and destroyed on an arbitrary thread. @@ -58,9 +68,10 @@ class OveruseFrameDetector { virtual ~OveruseFrameDetector(); // Start to periodically check for overuse. - void StartCheckForOveruse(TaskQueueBase* task_queue_base, - const CpuOveruseOptions& options, - AdaptationObserverInterface* overuse_observer); + void StartCheckForOveruse( + TaskQueueBase* task_queue_base, + const CpuOveruseOptions& options, + OveruseFrameDetectorObserverInterface* overuse_observer); // StopCheckForOveruse must be called before destruction if // StartCheckForOveruse has been called. @@ -105,7 +116,7 @@ class OveruseFrameDetector { protected: // Protected for test purposes. - void CheckForOveruse(AdaptationObserverInterface* overuse_observer); + void CheckForOveruse(OveruseFrameDetectorObserverInterface* overuse_observer); void SetOptions(const CpuOveruseOptions& options); CpuOveruseOptions options_; @@ -155,4 +166,4 @@ class OveruseFrameDetector { } // namespace webrtc -#endif // VIDEO_OVERUSE_FRAME_DETECTOR_H_ +#endif // VIDEO_ADAPTATION_OVERUSE_FRAME_DETECTOR_H_ diff --git a/video/overuse_frame_detector_unittest.cc b/video/adaptation/overuse_frame_detector_unittest.cc similarity index 89% rename from video/overuse_frame_detector_unittest.cc rename to video/adaptation/overuse_frame_detector_unittest.cc index 432cc271bd..d4bf910faa 100644 --- a/video/overuse_frame_detector_unittest.cc +++ b/video/adaptation/overuse_frame_detector_unittest.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,12 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "video/overuse_frame_detector.h" +#include "video/adaptation/overuse_frame_detector.h" #include #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" +#include "api/video/video_adaptation_reason.h" #include "modules/video_coding/utility/quality_scaler.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" @@ -35,25 +36,22 @@ const int kFrameIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; const int kProcessTimeUs = 5 * rtc::kNumMicrosecsPerMillisec; } // namespace -class MockCpuOveruseObserver : public AdaptationObserverInterface { +class MockCpuOveruseObserver : public OveruseFrameDetectorObserverInterface { public: MockCpuOveruseObserver() {} virtual ~MockCpuOveruseObserver() {} - MOCK_METHOD1(AdaptUp, void(AdaptReason)); - MOCK_METHOD1(AdaptDown, bool(AdaptReason)); + MOCK_METHOD(void, AdaptUp, (), (override)); + MOCK_METHOD(void, AdaptDown, (), (override)); }; -class CpuOveruseObserverImpl : public AdaptationObserverInterface { +class CpuOveruseObserverImpl : public OveruseFrameDetectorObserverInterface { public: CpuOveruseObserverImpl() : overuse_(0), normaluse_(0) {} virtual ~CpuOveruseObserverImpl() {} - bool AdaptDown(AdaptReason) { - ++overuse_; - return true; - } - void AdaptUp(AdaptReason) { ++normaluse_; } + void AdaptDown() override { ++overuse_; } + void AdaptUp() override { ++normaluse_; } int overuse_; int normaluse_; @@ -109,10 +107,10 @@ class OveruseFrameDetectorTest : public ::testing::Test, frame.set_timestamp(timestamp); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); - clock_.AdvanceTime(TimeDelta::us(delay_us)); + clock_.AdvanceTime(TimeDelta::Micros(delay_us)); overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, delay_us); - clock_.AdvanceTime(TimeDelta::us(interval_us - delay_us)); + clock_.AdvanceTime(TimeDelta::Micros(interval_us - delay_us)); timestamp += interval_us * 90 / 1000; } } @@ -138,7 +136,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, int max_delay_us = 0; for (int delay_us : delays_us) { if (delay_us > max_delay_us) { - clock_.AdvanceTime(TimeDelta::us(delay_us - max_delay_us)); + clock_.AdvanceTime(TimeDelta::Micros(delay_us - max_delay_us)); max_delay_us = delay_us; } @@ -146,7 +144,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, capture_time_us, delay_us); } overuse_detector_->CheckForOveruse(observer_); - clock_.AdvanceTime(TimeDelta::us(interval_us - max_delay_us)); + clock_.AdvanceTime(TimeDelta::Micros(interval_us - max_delay_us)); timestamp += interval_us * 90 / 1000; } } @@ -171,7 +169,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, int interval_us = random.Rand(min_interval_us, max_interval_us); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); - clock_.AdvanceTime(TimeDelta::us(delay_us)); + clock_.AdvanceTime(TimeDelta::Micros(delay_us)); overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, absl::optional(delay_us)); @@ -179,7 +177,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, overuse_detector_->CheckForOveruse(observer_); // Avoid turning clock backwards. if (interval_us > delay_us) - clock_.AdvanceTime(TimeDelta::us(interval_us - delay_us)); + clock_.AdvanceTime(TimeDelta::Micros(interval_us - delay_us)); timestamp += interval_us * 90 / 1000; } @@ -231,11 +229,9 @@ class OveruseFrameDetectorTest : public ::testing::Test, CpuOveruseOptions options_; rtc::ScopedFakeClock clock_; MockCpuOveruseObserver mock_observer_; - AdaptationObserverInterface* observer_; + OveruseFrameDetectorObserverInterface* observer_; std::unique_ptr overuse_detector_; int encode_usage_percent_ = -1; - - static const auto reason_ = AdaptationObserverInterface::AdaptReason::kCpu; }; // UsagePercent() > high_encode_usage_threshold_percent => overuse. @@ -243,26 +239,26 @@ class OveruseFrameDetectorTest : public ::testing::Test, TEST_F(OveruseFrameDetectorTest, TriggerOveruse) { // usage > high => overuse overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); TriggerOveruse(options_.high_threshold_consecutive_count); } TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) { // usage > high => overuse overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); TriggerOveruse(options_.high_threshold_consecutive_count); // usage < low => underuse - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); TriggerUnderuse(); } TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(2); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(2); TriggerOveruse(options_.high_threshold_consecutive_count); TriggerOveruse(options_.high_threshold_consecutive_count); - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); TriggerUnderuse(); } @@ -276,15 +272,15 @@ TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) { kProcessTimeUs); overuse_detector_->CheckForOveruse(&overuse_observer); EXPECT_EQ(0, overuse_observer.normaluse_); - clock_.AdvanceTime(TimeDelta::us(kProcessIntervalUs)); + clock_.AdvanceTime(TimeDelta::Micros(kProcessIntervalUs)); overuse_detector_->CheckForOveruse(&overuse_observer); EXPECT_EQ(1, overuse_observer.normaluse_); } TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(0); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(64); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(64); for (size_t i = 0; i < 64; ++i) { TriggerOveruse(options_.high_threshold_consecutive_count); } @@ -292,7 +288,7 @@ TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) { TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); options_.high_threshold_consecutive_count = 2; overuse_detector_->SetOptions(options_); TriggerOveruse(2); @@ -300,7 +296,7 @@ TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) { TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); options_.high_threshold_consecutive_count = 2; overuse_detector_->SetOptions(options_); TriggerOveruse(1); @@ -352,14 +348,14 @@ TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdating) { kProcessTimeUs); EXPECT_EQ(InitialUsage(), UsagePercent()); // Pass time far enough to digest all previous samples. - clock_.AdvanceTime(TimeDelta::seconds(1)); + clock_.AdvanceTime(TimeDelta::Seconds(1)); InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs); // The last sample has not been processed here. EXPECT_EQ(InitialUsage(), UsagePercent()); // Pass time far enough to digest all previous samples, 41 in total. - clock_.AdvanceTime(TimeDelta::seconds(1)); + clock_.AdvanceTime(TimeDelta::Seconds(1)); InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs); EXPECT_NE(InitialUsage(), UsagePercent()); @@ -373,7 +369,7 @@ TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) { TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; static const size_t kNumFramesEncodingDelay = 3; VideoFrame frame = @@ -387,7 +383,7 @@ TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) { frame.set_timestamp(static_cast(i)); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); - clock_.AdvanceTime(TimeDelta::us(kIntervalUs)); + clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs)); if (i > kNumFramesEncodingDelay) { overuse_detector_->FrameSent( static_cast(i - kNumFramesEncodingDelay), rtc::TimeMicros(), @@ -400,7 +396,7 @@ TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) { TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) { // >85% encoding time should trigger overuse. overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec; VideoFrame frame = @@ -415,14 +411,14 @@ TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) { int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); // Encode and send first parts almost instantly. - clock_.AdvanceTime(TimeDelta::ms(1)); + clock_.AdvanceTime(TimeDelta::Millis(1)); overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, rtc::kNumMicrosecsPerMillisec); // Encode heavier part, resulting in >85% usage total. - clock_.AdvanceTime(TimeDelta::us(kDelayUs) - TimeDelta::ms(1)); + clock_.AdvanceTime(TimeDelta::Micros(kDelayUs) - TimeDelta::Millis(1)); overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, kDelayUs); - clock_.AdvanceTime(TimeDelta::us(kIntervalUs - kDelayUs)); + clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs - kDelayUs)); timestamp += kIntervalUs * 90 / 1000; overuse_detector_->CheckForOveruse(observer_); } @@ -441,7 +437,7 @@ TEST_F(OveruseFrameDetectorTest, RunOnTqNormalUsage) { rtc::Event event; // Expect NormalUsage(). When called, stop the |overuse_detector_| and then // set |event| to end the test. - EXPECT_CALL(mock_observer_, AdaptUp(reason_)) + EXPECT_CALL(mock_observer_, AdaptUp()) .WillOnce(InvokeWithoutArgs([this, &event] { overuse_detector_->StopCheckForOveruse(); event.Set(); @@ -469,7 +465,7 @@ TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) { // Processing time just below over use limit given kEncodeMaxFrameRate. int64_t processing_time_us = (98 * OveruseProcessingTimeLimitForFramerate(kEncodeMaxFrameRate)) / 100; - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight, processing_time_us); @@ -479,7 +475,7 @@ TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) { // Simulate frame rate reduction and normal usage. frame_interval_us = rtc::kNumMicrosecsPerSec / kEncodeMaxFrameRate; overuse_detector_->OnTargetFramerateUpdated(kEncodeMaxFrameRate); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight, processing_time_us); @@ -489,7 +485,7 @@ TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) { // Reduce processing time to trigger underuse. processing_time_us = (98 * UnderuseProcessingTimeLimitForFramerate(kEncodeMaxFrameRate)) / 100; - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(1); InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight, processing_time_us); overuse_detector_->CheckForOveruse(observer_); @@ -505,7 +501,7 @@ TEST_F(OveruseFrameDetectorTest, RespectsMinFramerate) { // Processing time just below over use limit given kEncodeMaxFrameRate. int64_t processing_time_us = (98 * OveruseProcessingTimeLimitForFramerate(kMinFrameRate)) / 100; - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight, processing_time_us); @@ -515,7 +511,7 @@ TEST_F(OveruseFrameDetectorTest, RespectsMinFramerate) { // Over the limit to overuse. processing_time_us = (102 * OveruseProcessingTimeLimitForFramerate(kMinFrameRate)) / 100; - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight, processing_time_us); @@ -524,7 +520,7 @@ TEST_F(OveruseFrameDetectorTest, RespectsMinFramerate) { // Reduce input frame rate. Should still trigger overuse. overuse_detector_->OnTargetFramerateUpdated(kMinFrameRate - 1); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight, processing_time_us); @@ -547,7 +543,7 @@ TEST_F(OveruseFrameDetectorTest, LimitsMaxFrameInterval) { // Processing time just below overuse limit given kMaxFrameRate. int64_t processing_time_us = (98 * max_processing_time_us) / 100; - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, max_frame_interval_us, kWidth, kHeight, processing_time_us); @@ -556,7 +552,7 @@ TEST_F(OveruseFrameDetectorTest, LimitsMaxFrameInterval) { // Go above limit, trigger overuse. processing_time_us = (102 * max_processing_time_us) / 100; - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, max_frame_interval_us, kWidth, kHeight, processing_time_us); @@ -565,7 +561,7 @@ TEST_F(OveruseFrameDetectorTest, LimitsMaxFrameInterval) { // Increase frame interval, should still trigger overuse. max_frame_interval_us *= 2; - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { InsertAndSendFramesWithInterval(1200, max_frame_interval_us, kWidth, kHeight, processing_time_us); @@ -580,8 +576,8 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForLargeRandomFrameInterval) { // behavior is improved in this scenario, with only AdaptUp events, // and estimated load closer to the true average. - // EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0); - // EXPECT_CALL(mock_observer_, AdaptUp(reason_)) + // EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); + // EXPECT_CALL(mock_observer_, AdaptUp()) // .Times(::testing::AtLeast(1)); overuse_detector_->SetOptions(options_); @@ -609,8 +605,8 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForRandomFrameIntervalWithReset) { // TODO(bugs.webrtc.org/8504): When new estimator is relanded, // behavior is improved in this scenario, and we get AdaptUp events. overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0); - // EXPECT_CALL(mock_observer_, AdaptUp(reason_)) + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); + // EXPECT_CALL(mock_observer_, AdaptUp()) // .Times(::testing::AtLeast(1)); const int kNumFrames = 500; @@ -638,7 +634,7 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForRandomFrameIntervalWithReset) { // Load estimate should be based on the maximum encode time per input frame. TEST_F(OveruseFrameDetectorTest, NoOveruseForSimulcast) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); constexpr int kNumFrames = 500; constexpr int kEncodeTimesUs[] = { @@ -681,7 +677,7 @@ class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { overuse_detector_->FrameSent(0 /* ignored timestamp */, 0 /* ignored send_time_us */, capture_time_us, delay_us); - clock_.AdvanceTime(TimeDelta::us(interval_us)); + clock_.AdvanceTime(TimeDelta::Micros(interval_us)); } } @@ -708,7 +704,7 @@ class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { capture_time_us, delay_us); overuse_detector_->CheckForOveruse(observer_); - clock_.AdvanceTime(TimeDelta::us(interval_us)); + clock_.AdvanceTime(TimeDelta::Micros(interval_us)); } } @@ -725,26 +721,26 @@ class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { TEST_F(OveruseFrameDetectorTest2, TriggerOveruse) { // usage > high => overuse overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); TriggerOveruse(options_.high_threshold_consecutive_count); } TEST_F(OveruseFrameDetectorTest2, OveruseAndRecover) { // usage > high => overuse overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); TriggerOveruse(options_.high_threshold_consecutive_count); // usage < low => underuse - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); TriggerUnderuse(); } TEST_F(OveruseFrameDetectorTest2, DoubleOveruseAndRecover) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(2); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(2); TriggerOveruse(options_.high_threshold_consecutive_count); TriggerOveruse(options_.high_threshold_consecutive_count); - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); TriggerUnderuse(); } @@ -758,29 +754,29 @@ TEST_F(OveruseFrameDetectorTest2, TriggerUnderuseWithMinProcessCount) { kProcessTimeUs); overuse_detector_->CheckForOveruse(&overuse_observer); EXPECT_EQ(0, overuse_observer.normaluse_); - clock_.AdvanceTime(TimeDelta::us(kProcessIntervalUs)); + clock_.AdvanceTime(TimeDelta::Micros(kProcessIntervalUs)); overuse_detector_->CheckForOveruse(&overuse_observer); EXPECT_EQ(1, overuse_observer.normaluse_); } TEST_F(OveruseFrameDetectorTest2, ConstantOveruseGivesNoNormalUsage) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(0); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(64); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(64); for (size_t i = 0; i < 64; ++i) { TriggerOveruse(options_.high_threshold_consecutive_count); } } TEST_F(OveruseFrameDetectorTest2, ConsecutiveCountTriggersOveruse) { - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(1); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(1); options_.high_threshold_consecutive_count = 2; overuse_detector_->SetOptions(options_); TriggerOveruse(2); } TEST_F(OveruseFrameDetectorTest2, IncorrectConsecutiveCountTriggersNoOveruse) { - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); options_.high_threshold_consecutive_count = 2; overuse_detector_->SetOptions(options_); TriggerOveruse(1); @@ -855,7 +851,7 @@ TEST_F(OveruseFrameDetectorTest2, InitialProcessingUsage) { TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; static const size_t kNumFramesEncodingDelay = 3; VideoFrame frame = @@ -869,7 +865,7 @@ TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) { frame.set_timestamp(static_cast(i)); int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); - clock_.AdvanceTime(TimeDelta::us(kIntervalUs)); + clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs)); if (i > kNumFramesEncodingDelay) { overuse_detector_->FrameSent( static_cast(i - kNumFramesEncodingDelay), rtc::TimeMicros(), @@ -882,7 +878,7 @@ TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) { TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) { // >85% encoding time should trigger overuse. overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec; VideoFrame frame = @@ -897,14 +893,14 @@ TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) { int64_t capture_time_us = rtc::TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); // Encode and send first parts almost instantly. - clock_.AdvanceTime(TimeDelta::ms(1)); + clock_.AdvanceTime(TimeDelta::Millis(1)); overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, rtc::kNumMicrosecsPerMillisec); // Encode heavier part, resulting in >85% usage total. - clock_.AdvanceTime(TimeDelta::us(kDelayUs) - TimeDelta::ms(1)); + clock_.AdvanceTime(TimeDelta::Micros(kDelayUs) - TimeDelta::Millis(1)); overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, kDelayUs); - clock_.AdvanceTime(TimeDelta::us(kIntervalUs - kDelayUs)); + clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs - kDelayUs)); timestamp += kIntervalUs * 90 / 1000; overuse_detector_->CheckForOveruse(observer_); } @@ -923,7 +919,7 @@ TEST_F(OveruseFrameDetectorTest2, RunOnTqNormalUsage) { rtc::Event event; // Expect NormalUsage(). When called, stop the |overuse_detector_| and then // set |event| to end the test. - EXPECT_CALL(mock_observer_, AdaptUp(reason_)) + EXPECT_CALL(mock_observer_, AdaptUp()) .WillOnce(InvokeWithoutArgs([this, &event] { overuse_detector_->StopCheckForOveruse(); event.Set(); @@ -945,8 +941,8 @@ TEST_F(OveruseFrameDetectorTest2, RunOnTqNormalUsage) { // to encode. TEST_F(OveruseFrameDetectorTest2, NoOveruseForLargeRandomFrameInterval) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0); - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); const int kNumFrames = 500; const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec; @@ -965,8 +961,8 @@ TEST_F(OveruseFrameDetectorTest2, NoOveruseForLargeRandomFrameInterval) { // exceeding the timeout interval. TEST_F(OveruseFrameDetectorTest2, NoOveruseForRandomFrameIntervalWithReset) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0); - EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(::testing::AtLeast(1)); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); + EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); const int kNumFrames = 500; const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec; @@ -1004,7 +1000,7 @@ TEST_F(OveruseFrameDetectorTest2, ToleratesOutOfOrderFrames) { // Load estimate should be based on the maximum encode time per input frame. TEST_F(OveruseFrameDetectorTest2, NoOveruseForSimulcast) { overuse_detector_->SetOptions(options_); - EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0); + EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); constexpr int kNumFrames = 500; constexpr int kEncodeTimesUs[] = { diff --git a/video/adaptation/quality_rampup_experiment_helper.cc b/video/adaptation/quality_rampup_experiment_helper.cc new file mode 100644 index 0000000000..6d82503fc6 --- /dev/null +++ b/video/adaptation/quality_rampup_experiment_helper.cc @@ -0,0 +1,80 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/quality_rampup_experiment_helper.h" + +#include +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +QualityRampUpExperimentHelper::QualityRampUpExperimentHelper( + QualityRampUpExperimentListener* experiment_listener, + Clock* clock, + QualityRampupExperiment experiment) + : experiment_listener_(experiment_listener), + clock_(clock), + quality_rampup_experiment_(std::move(experiment)), + cpu_adapted_(false), + qp_resolution_adaptations_(0) { + RTC_DCHECK(experiment_listener_); + RTC_DCHECK(clock_); +} + +std::unique_ptr +QualityRampUpExperimentHelper::CreateIfEnabled( + QualityRampUpExperimentListener* experiment_listener, + Clock* clock) { + QualityRampupExperiment experiment = QualityRampupExperiment::ParseSettings(); + if (experiment.Enabled()) { + return std::unique_ptr( + new QualityRampUpExperimentHelper(experiment_listener, clock, + experiment)); + } + return nullptr; +} + +void QualityRampUpExperimentHelper::PerformQualityRampupExperiment( + rtc::scoped_refptr quality_scaler_resource, + DataRate bandwidth, + DataRate encoder_target_bitrate, + DataRate max_bitrate, + int pixels) { + if (!quality_scaler_resource->is_started()) + return; + + int64_t now_ms = clock_->TimeInMilliseconds(); + quality_rampup_experiment_.SetMaxBitrate(pixels, max_bitrate.kbps()); + + bool try_quality_rampup = false; + if (quality_rampup_experiment_.BwHigh(now_ms, bandwidth.kbps())) { + // Verify that encoder is at max bitrate and the QP is low. + if (encoder_target_bitrate == max_bitrate && + quality_scaler_resource->QpFastFilterLow()) { + try_quality_rampup = true; + } + } + if (try_quality_rampup && qp_resolution_adaptations_ > 0 && !cpu_adapted_) { + experiment_listener_->OnQualityRampUp(); + } +} + +void QualityRampUpExperimentHelper::cpu_adapted(bool cpu_adapted) { + cpu_adapted_ = cpu_adapted; +} + +void QualityRampUpExperimentHelper::qp_resolution_adaptations( + int qp_resolution_adaptations) { + qp_resolution_adaptations_ = qp_resolution_adaptations; +} + +} // namespace webrtc diff --git a/video/adaptation/quality_rampup_experiment_helper.h b/video/adaptation/quality_rampup_experiment_helper.h new file mode 100644 index 0000000000..81be982e7c --- /dev/null +++ b/video/adaptation/quality_rampup_experiment_helper.h @@ -0,0 +1,68 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_ +#define VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_ + +#include + +#include "api/scoped_refptr.h" +#include "api/units/data_rate.h" +#include "rtc_base/experiments/quality_rampup_experiment.h" +#include "system_wrappers/include/clock.h" +#include "video/adaptation/quality_scaler_resource.h" + +namespace webrtc { + +class QualityRampUpExperimentListener { + public: + virtual ~QualityRampUpExperimentListener() = default; + virtual void OnQualityRampUp() = 0; +}; + +// Helper class for orchestrating the WebRTC-Video-QualityRampupSettings +// experiment. +class QualityRampUpExperimentHelper { + public: + // Returns a QualityRampUpExperimentHelper if the experiment is enabled, + // an nullptr otherwise. + static std::unique_ptr CreateIfEnabled( + QualityRampUpExperimentListener* experiment_listener, + Clock* clock); + + QualityRampUpExperimentHelper(const QualityRampUpExperimentHelper&) = delete; + QualityRampUpExperimentHelper& operator=( + const QualityRampUpExperimentHelper&) = delete; + + void cpu_adapted(bool cpu_adapted); + void qp_resolution_adaptations(int qp_adaptations); + + void PerformQualityRampupExperiment( + rtc::scoped_refptr quality_scaler_resource, + DataRate bandwidth, + DataRate encoder_target_bitrate, + DataRate max_bitrate, + int pixels); + + private: + QualityRampUpExperimentHelper( + QualityRampUpExperimentListener* experiment_listener, + Clock* clock, + QualityRampupExperiment experiment); + QualityRampUpExperimentListener* const experiment_listener_; + Clock* clock_; + QualityRampupExperiment quality_rampup_experiment_; + bool cpu_adapted_; + int qp_resolution_adaptations_; +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_ diff --git a/video/adaptation/quality_scaler_resource.cc b/video/adaptation/quality_scaler_resource.cc new file mode 100644 index 0000000000..c438488182 --- /dev/null +++ b/video/adaptation/quality_scaler_resource.cc @@ -0,0 +1,101 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/quality_scaler_resource.h" + +#include + +#include "rtc_base/checks.h" +#include "rtc_base/experiments/balanced_degradation_settings.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +// static +rtc::scoped_refptr QualityScalerResource::Create() { + return new rtc::RefCountedObject(); +} + +QualityScalerResource::QualityScalerResource() + : VideoStreamEncoderResource("QualityScalerResource"), + quality_scaler_(nullptr) {} + +QualityScalerResource::~QualityScalerResource() { + RTC_DCHECK(!quality_scaler_); +} + +bool QualityScalerResource::is_started() const { + RTC_DCHECK_RUN_ON(encoder_queue()); + return quality_scaler_.get(); +} + +void QualityScalerResource::StartCheckForOveruse( + VideoEncoder::QpThresholds qp_thresholds) { + RTC_DCHECK_RUN_ON(encoder_queue()); + RTC_DCHECK(!is_started()); + quality_scaler_ = + std::make_unique(this, std::move(qp_thresholds)); +} + +void QualityScalerResource::StopCheckForOveruse() { + RTC_DCHECK_RUN_ON(encoder_queue()); + RTC_DCHECK(is_started()); + // Ensure we have no pending callbacks. This makes it safe to destroy the + // QualityScaler and even task queues with tasks in-flight. + quality_scaler_.reset(); +} + +void QualityScalerResource::SetQpThresholds( + VideoEncoder::QpThresholds qp_thresholds) { + RTC_DCHECK_RUN_ON(encoder_queue()); + RTC_DCHECK(is_started()); + quality_scaler_->SetQpThresholds(std::move(qp_thresholds)); +} + +bool QualityScalerResource::QpFastFilterLow() { + RTC_DCHECK_RUN_ON(encoder_queue()); + RTC_DCHECK(is_started()); + return quality_scaler_->QpFastFilterLow(); +} + +void QualityScalerResource::OnEncodeCompleted(const EncodedImage& encoded_image, + int64_t time_sent_in_us) { + RTC_DCHECK_RUN_ON(encoder_queue()); + if (quality_scaler_ && encoded_image.qp_ >= 0) { + quality_scaler_->ReportQp(encoded_image.qp_, time_sent_in_us); + } +} + +void QualityScalerResource::OnFrameDropped( + EncodedImageCallback::DropReason reason) { + RTC_DCHECK_RUN_ON(encoder_queue()); + if (!quality_scaler_) + return; + switch (reason) { + case EncodedImageCallback::DropReason::kDroppedByMediaOptimizations: + quality_scaler_->ReportDroppedFrameByMediaOpt(); + break; + case EncodedImageCallback::DropReason::kDroppedByEncoder: + quality_scaler_->ReportDroppedFrameByEncoder(); + break; + } +} + +void QualityScalerResource::OnReportQpUsageHigh() { + OnResourceUsageStateMeasured(ResourceUsageState::kOveruse); +} + +void QualityScalerResource::OnReportQpUsageLow() { + OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse); +} + +} // namespace webrtc diff --git a/video/adaptation/quality_scaler_resource.h b/video/adaptation/quality_scaler_resource.h new file mode 100644 index 0000000000..06c22ca3c6 --- /dev/null +++ b/video/adaptation/quality_scaler_resource.h @@ -0,0 +1,61 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_ +#define VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video_codecs/video_encoder.h" +#include "call/adaptation/degradation_preference_provider.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" +#include "modules/video_coding/utility/quality_scaler.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_queue.h" +#include "video/adaptation/video_stream_encoder_resource.h" + +namespace webrtc { + +// Handles interaction with the QualityScaler. +class QualityScalerResource : public VideoStreamEncoderResource, + public QualityScalerQpUsageHandlerInterface { + public: + static rtc::scoped_refptr Create(); + + QualityScalerResource(); + ~QualityScalerResource() override; + + bool is_started() const; + + void StartCheckForOveruse(VideoEncoder::QpThresholds qp_thresholds); + void StopCheckForOveruse(); + void SetQpThresholds(VideoEncoder::QpThresholds qp_thresholds); + bool QpFastFilterLow(); + void OnEncodeCompleted(const EncodedImage& encoded_image, + int64_t time_sent_in_us); + void OnFrameDropped(EncodedImageCallback::DropReason reason); + + // QualityScalerQpUsageHandlerInterface implementation. + void OnReportQpUsageHigh() override; + void OnReportQpUsageLow() override; + + private: + std::unique_ptr quality_scaler_ + RTC_GUARDED_BY(encoder_queue()); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_ diff --git a/video/adaptation/quality_scaler_resource_unittest.cc b/video/adaptation/quality_scaler_resource_unittest.cc new file mode 100644 index 0000000000..1a3175af00 --- /dev/null +++ b/video/adaptation/quality_scaler_resource_unittest.cc @@ -0,0 +1,74 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/quality_scaler_resource.h" + +#include + +#include "absl/types/optional.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video_codecs/video_encoder.h" +#include "call/adaptation/test/mock_resource_listener.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using testing::_; +using testing::Eq; +using testing::StrictMock; + +namespace { + +class FakeDegradationPreferenceProvider : public DegradationPreferenceProvider { + public: + ~FakeDegradationPreferenceProvider() override = default; + + DegradationPreference degradation_preference() const override { + return DegradationPreference::MAINTAIN_FRAMERATE; + } +}; + +} // namespace + +class QualityScalerResourceTest : public ::testing::Test { + public: + QualityScalerResourceTest() + : quality_scaler_resource_(QualityScalerResource::Create()) { + quality_scaler_resource_->RegisterEncoderTaskQueue( + TaskQueueBase::Current()); + quality_scaler_resource_->SetResourceListener(&fake_resource_listener_); + } + + ~QualityScalerResourceTest() override { + quality_scaler_resource_->SetResourceListener(nullptr); + } + + protected: + StrictMock fake_resource_listener_; + FakeDegradationPreferenceProvider degradation_preference_provider_; + rtc::scoped_refptr quality_scaler_resource_; +}; + +TEST_F(QualityScalerResourceTest, ReportQpHigh) { + EXPECT_CALL(fake_resource_listener_, + OnResourceUsageStateMeasured(Eq(quality_scaler_resource_), + Eq(ResourceUsageState::kOveruse))); + quality_scaler_resource_->OnReportQpUsageHigh(); +} + +TEST_F(QualityScalerResourceTest, ReportQpLow) { + EXPECT_CALL(fake_resource_listener_, + OnResourceUsageStateMeasured(Eq(quality_scaler_resource_), + Eq(ResourceUsageState::kUnderuse))); + quality_scaler_resource_->OnReportQpUsageLow(); +} + +} // namespace webrtc diff --git a/video/adaptation/video_stream_encoder_resource.cc b/video/adaptation/video_stream_encoder_resource.cc new file mode 100644 index 0000000000..d26da708b6 --- /dev/null +++ b/video/adaptation/video_stream_encoder_resource.cc @@ -0,0 +1,62 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/video_stream_encoder_resource.h" + +#include +#include + +namespace webrtc { + +VideoStreamEncoderResource::VideoStreamEncoderResource(std::string name) + : lock_(), + name_(std::move(name)), + encoder_queue_(nullptr), + listener_(nullptr) {} + +VideoStreamEncoderResource::~VideoStreamEncoderResource() { + RTC_DCHECK(!listener_) + << "There is a listener depending on a VideoStreamEncoderResource being " + << "destroyed."; +} + +void VideoStreamEncoderResource::RegisterEncoderTaskQueue( + TaskQueueBase* encoder_queue) { + RTC_DCHECK(!encoder_queue_); + RTC_DCHECK(encoder_queue); + encoder_queue_ = encoder_queue; +} + +void VideoStreamEncoderResource::SetResourceListener( + ResourceListener* listener) { + // If you want to change listener you need to unregister the old listener by + // setting it to null first. + MutexLock crit(&lock_); + RTC_DCHECK(!listener_ || !listener) << "A listener is already set"; + listener_ = listener; +} + +std::string VideoStreamEncoderResource::Name() const { + return name_; +} + +void VideoStreamEncoderResource::OnResourceUsageStateMeasured( + ResourceUsageState usage_state) { + MutexLock crit(&lock_); + if (listener_) { + listener_->OnResourceUsageStateMeasured(this, usage_state); + } +} + +TaskQueueBase* VideoStreamEncoderResource::encoder_queue() const { + return encoder_queue_; +} + +} // namespace webrtc diff --git a/video/adaptation/video_stream_encoder_resource.h b/video/adaptation/video_stream_encoder_resource.h new file mode 100644 index 0000000000..477fdf492d --- /dev/null +++ b/video/adaptation/video_stream_encoder_resource.h @@ -0,0 +1,55 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_ +#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/task_queue/task_queue_base.h" +#include "call/adaptation/adaptation_constraint.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { + +class VideoStreamEncoderResource : public Resource { + public: + ~VideoStreamEncoderResource() override; + + // Registering task queues must be performed as part of initialization. + void RegisterEncoderTaskQueue(TaskQueueBase* encoder_queue); + + // Resource implementation. + std::string Name() const override; + void SetResourceListener(ResourceListener* listener) override; + + protected: + explicit VideoStreamEncoderResource(std::string name); + + void OnResourceUsageStateMeasured(ResourceUsageState usage_state); + + // The caller is responsible for ensuring the task queue is still valid. + TaskQueueBase* encoder_queue() const; + + private: + mutable Mutex lock_; + const std::string name_; + // Treated as const after initialization. + TaskQueueBase* encoder_queue_; + ResourceListener* listener_ RTC_GUARDED_BY(lock_); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_ diff --git a/video/adaptation/video_stream_encoder_resource_manager.cc b/video/adaptation/video_stream_encoder_resource_manager.cc new file mode 100644 index 0000000000..340b2e8508 --- /dev/null +++ b/video/adaptation/video_stream_encoder_resource_manager.cc @@ -0,0 +1,668 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/video_stream_encoder_resource_manager.h" + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/base/macros.h" +#include "api/adaptation/resource.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video/video_source_interface.h" +#include "call/adaptation/video_source_restrictions.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/time_utils.h" +#include "video/adaptation/quality_scaler_resource.h" + +namespace webrtc { + +const int kDefaultInputPixelsWidth = 176; +const int kDefaultInputPixelsHeight = 144; + +namespace { + +bool IsResolutionScalingEnabled(DegradationPreference degradation_preference) { + return degradation_preference == DegradationPreference::MAINTAIN_FRAMERATE || + degradation_preference == DegradationPreference::BALANCED; +} + +bool IsFramerateScalingEnabled(DegradationPreference degradation_preference) { + return degradation_preference == DegradationPreference::MAINTAIN_RESOLUTION || + degradation_preference == DegradationPreference::BALANCED; +} + +std::string ToString(VideoAdaptationReason reason) { + switch (reason) { + case VideoAdaptationReason::kQuality: + return "quality"; + case VideoAdaptationReason::kCpu: + return "cpu"; + } + RTC_CHECK_NOTREACHED(); +} + +absl::optional GetSingleActiveStreamPixels(const VideoCodec& codec) { + int num_active = 0; + absl::optional pixels; + if (codec.codecType == VideoCodecType::kVideoCodecVP9) { + for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) { + if (codec.spatialLayers[i].active) { + ++num_active; + pixels = codec.spatialLayers[i].width * codec.spatialLayers[i].height; + } + } + } else { + for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { + if (codec.simulcastStream[i].active) { + ++num_active; + pixels = + codec.simulcastStream[i].width * codec.simulcastStream[i].height; + } + } + } + if (num_active > 1) + return absl::nullopt; + return pixels; +} + +std::vector GetActiveLayersFlags(const VideoCodec& codec) { + std::vector flags; + if (codec.codecType == VideoCodecType::kVideoCodecVP9) { + flags.resize(codec.VP9().numberOfSpatialLayers); + for (size_t i = 0; i < flags.size(); ++i) { + flags[i] = codec.spatialLayers[i].active; + } + } else { + flags.resize(codec.numberOfSimulcastStreams); + for (size_t i = 0; i < flags.size(); ++i) { + flags[i] = codec.simulcastStream[i].active; + } + } + return flags; +} + +bool EqualFlags(const std::vector& a, const std::vector& b) { + if (a.size() != b.size()) + return false; + return std::equal(a.begin(), a.end(), b.begin()); +} + +} // namespace + +class VideoStreamEncoderResourceManager::InitialFrameDropper { + public: + explicit InitialFrameDropper( + rtc::scoped_refptr quality_scaler_resource) + : quality_scaler_resource_(quality_scaler_resource), + quality_scaler_settings_(QualityScalerSettings::ParseFromFieldTrials()), + has_seen_first_bwe_drop_(false), + set_start_bitrate_(DataRate::Zero()), + set_start_bitrate_time_ms_(0), + initial_framedrop_(0), + last_input_width_(0), + last_input_height_(0) { + RTC_DCHECK(quality_scaler_resource_); + } + + // Output signal. + bool DropInitialFrames() const { + return initial_framedrop_ < kMaxInitialFramedrop; + } + + absl::optional single_active_stream_pixels() const { + return single_active_stream_pixels_; + } + + // Input signals. + void SetStartBitrate(DataRate start_bitrate, int64_t now_ms) { + set_start_bitrate_ = start_bitrate; + set_start_bitrate_time_ms_ = now_ms; + } + + void SetTargetBitrate(DataRate target_bitrate, int64_t now_ms) { + if (set_start_bitrate_ > DataRate::Zero() && !has_seen_first_bwe_drop_ && + quality_scaler_resource_->is_started() && + quality_scaler_settings_.InitialBitrateIntervalMs() && + quality_scaler_settings_.InitialBitrateFactor()) { + int64_t diff_ms = now_ms - set_start_bitrate_time_ms_; + if (diff_ms < + quality_scaler_settings_.InitialBitrateIntervalMs().value() && + (target_bitrate < + (set_start_bitrate_ * + quality_scaler_settings_.InitialBitrateFactor().value()))) { + RTC_LOG(LS_INFO) << "Reset initial_framedrop_. Start bitrate: " + << set_start_bitrate_.bps() + << ", target bitrate: " << target_bitrate.bps(); + initial_framedrop_ = 0; + has_seen_first_bwe_drop_ = true; + } + } + } + + void OnEncoderSettingsUpdated( + const VideoCodec& codec, + const VideoAdaptationCounters& adaptation_counters) { + std::vector active_flags = GetActiveLayersFlags(codec); + // Check if the source resolution has changed for the external reasons, + // i.e. without any adaptation from WebRTC. + const bool source_resolution_changed = + (last_input_width_ != codec.width || + last_input_height_ != codec.height) && + adaptation_counters.resolution_adaptations == + last_adaptation_counters_.resolution_adaptations; + if (!EqualFlags(active_flags, last_active_flags_) || + source_resolution_changed) { + // Streams configuration has changed. + // Initial frame drop must be enabled because BWE might be way too low + // for the selected resolution. + if (quality_scaler_resource_->is_started()) { + RTC_LOG(LS_INFO) << "Resetting initial_framedrop_ due to changed " + "stream parameters"; + initial_framedrop_ = 0; + } + } + last_adaptation_counters_ = adaptation_counters; + last_active_flags_ = active_flags; + last_input_width_ = codec.width; + last_input_height_ = codec.height; + single_active_stream_pixels_ = GetSingleActiveStreamPixels(codec); + } + + void OnFrameDroppedDueToSize() { ++initial_framedrop_; } + + void Disable() { initial_framedrop_ = kMaxInitialFramedrop; } + + void OnQualityScalerSettingsUpdated() { + if (quality_scaler_resource_->is_started()) { + // Restart frame drops due to size. + initial_framedrop_ = 0; + } else { + // Quality scaling disabled so we shouldn't drop initial frames. + initial_framedrop_ = kMaxInitialFramedrop; + } + } + + private: + // The maximum number of frames to drop at beginning of stream to try and + // achieve desired bitrate. + static const int kMaxInitialFramedrop = 4; + + const rtc::scoped_refptr quality_scaler_resource_; + const QualityScalerSettings quality_scaler_settings_; + bool has_seen_first_bwe_drop_; + DataRate set_start_bitrate_; + int64_t set_start_bitrate_time_ms_; + // Counts how many frames we've dropped in the initial framedrop phase. + int initial_framedrop_; + absl::optional single_active_stream_pixels_; + + std::vector last_active_flags_; + VideoAdaptationCounters last_adaptation_counters_; + int last_input_width_; + int last_input_height_; +}; + +VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( + VideoStreamInputStateProvider* input_state_provider, + VideoStreamEncoderObserver* encoder_stats_observer, + Clock* clock, + bool experiment_cpu_load_estimator, + std::unique_ptr overuse_detector, + DegradationPreferenceProvider* degradation_preference_provider) + : degradation_preference_provider_(degradation_preference_provider), + bitrate_constraint_(std::make_unique()), + balanced_constraint_(std::make_unique( + degradation_preference_provider_)), + encode_usage_resource_( + EncodeUsageResource::Create(std::move(overuse_detector))), + quality_scaler_resource_(QualityScalerResource::Create()), + encoder_queue_(nullptr), + input_state_provider_(input_state_provider), + adaptation_processor_(nullptr), + encoder_stats_observer_(encoder_stats_observer), + degradation_preference_(DegradationPreference::DISABLED), + video_source_restrictions_(), + clock_(clock), + experiment_cpu_load_estimator_(experiment_cpu_load_estimator), + initial_frame_dropper_( + std::make_unique(quality_scaler_resource_)), + quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), + encoder_target_bitrate_bps_(absl::nullopt), + quality_rampup_experiment_( + QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)), + encoder_settings_(absl::nullopt) { + RTC_CHECK(degradation_preference_provider_); + RTC_CHECK(encoder_stats_observer_); +} + +VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() = + default; + +void VideoStreamEncoderResourceManager::Initialize( + rtc::TaskQueue* encoder_queue) { + RTC_DCHECK(!encoder_queue_); + RTC_DCHECK(encoder_queue); + encoder_queue_ = encoder_queue; + encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get()); + quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get()); +} + +void VideoStreamEncoderResourceManager::SetAdaptationProcessor( + ResourceAdaptationProcessorInterface* adaptation_processor, + VideoStreamAdapter* stream_adapter) { + RTC_DCHECK_RUN_ON(encoder_queue_); + adaptation_processor_ = adaptation_processor; + stream_adapter_ = stream_adapter; +} + +void VideoStreamEncoderResourceManager::SetDegradationPreferences( + DegradationPreference degradation_preference) { + RTC_DCHECK_RUN_ON(encoder_queue_); + degradation_preference_ = degradation_preference; + UpdateStatsAdaptationSettings(); +} + +DegradationPreference +VideoStreamEncoderResourceManager::degradation_preference() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return degradation_preference_; +} + +void VideoStreamEncoderResourceManager::EnsureEncodeUsageResourceStarted() { + RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK(encoder_settings_.has_value()); + if (encode_usage_resource_->is_started()) { + encode_usage_resource_->StopCheckForOveruse(); + } else { + // If the resource has not yet started then it needs to be added. + AddResource(encode_usage_resource_, VideoAdaptationReason::kCpu); + } + encode_usage_resource_->StartCheckForOveruse(GetCpuOveruseOptions()); +} + +void VideoStreamEncoderResourceManager::StopManagedResources() { + RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK(adaptation_processor_); + if (encode_usage_resource_->is_started()) { + encode_usage_resource_->StopCheckForOveruse(); + RemoveResource(encode_usage_resource_); + } + if (quality_scaler_resource_->is_started()) { + quality_scaler_resource_->StopCheckForOveruse(); + RemoveResource(quality_scaler_resource_); + } +} + +void VideoStreamEncoderResourceManager::AddResource( + rtc::scoped_refptr resource, + VideoAdaptationReason reason) { + RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK(resource); + bool inserted; + std::tie(std::ignore, inserted) = resources_.emplace(resource, reason); + RTC_DCHECK(inserted) << "Resource " << resource->Name() + << " already was inserted"; + adaptation_processor_->AddResource(resource); +} + +void VideoStreamEncoderResourceManager::RemoveResource( + rtc::scoped_refptr resource) { + { + RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK(resource); + const auto& it = resources_.find(resource); + RTC_DCHECK(it != resources_.end()) + << "Resource \"" << resource->Name() << "\" not found."; + resources_.erase(it); + } + adaptation_processor_->RemoveResource(resource); +} + +std::vector +VideoStreamEncoderResourceManager::AdaptationConstraints() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return {bitrate_constraint_.get(), balanced_constraint_.get()}; +} + +void VideoStreamEncoderResourceManager::SetEncoderSettings( + EncoderSettings encoder_settings) { + RTC_DCHECK_RUN_ON(encoder_queue_); + encoder_settings_ = std::move(encoder_settings); + bitrate_constraint_->OnEncoderSettingsUpdated(encoder_settings_); + initial_frame_dropper_->OnEncoderSettingsUpdated( + encoder_settings_->video_codec(), current_adaptation_counters_); + MaybeUpdateTargetFrameRate(); +} + +void VideoStreamEncoderResourceManager::SetStartBitrate( + DataRate start_bitrate) { + RTC_DCHECK_RUN_ON(encoder_queue_); + if (!start_bitrate.IsZero()) { + encoder_target_bitrate_bps_ = start_bitrate.bps(); + bitrate_constraint_->OnEncoderTargetBitrateUpdated( + encoder_target_bitrate_bps_); + balanced_constraint_->OnEncoderTargetBitrateUpdated( + encoder_target_bitrate_bps_); + } + initial_frame_dropper_->SetStartBitrate(start_bitrate, + clock_->TimeInMicroseconds()); +} + +void VideoStreamEncoderResourceManager::SetTargetBitrate( + DataRate target_bitrate) { + RTC_DCHECK_RUN_ON(encoder_queue_); + if (!target_bitrate.IsZero()) { + encoder_target_bitrate_bps_ = target_bitrate.bps(); + bitrate_constraint_->OnEncoderTargetBitrateUpdated( + encoder_target_bitrate_bps_); + balanced_constraint_->OnEncoderTargetBitrateUpdated( + encoder_target_bitrate_bps_); + } + initial_frame_dropper_->SetTargetBitrate(target_bitrate, + clock_->TimeInMilliseconds()); +} + +void VideoStreamEncoderResourceManager::SetEncoderRates( + const VideoEncoder::RateControlParameters& encoder_rates) { + RTC_DCHECK_RUN_ON(encoder_queue_); + encoder_rates_ = encoder_rates; +} + +void VideoStreamEncoderResourceManager::OnFrameDroppedDueToSize() { + RTC_DCHECK_RUN_ON(encoder_queue_); + initial_frame_dropper_->OnFrameDroppedDueToSize(); + Adaptation reduce_resolution = stream_adapter_->GetAdaptDownResolution(); + if (reduce_resolution.status() == Adaptation::Status::kValid) { + stream_adapter_->ApplyAdaptation(reduce_resolution, + quality_scaler_resource_); + } +} + +void VideoStreamEncoderResourceManager::OnEncodeStarted( + const VideoFrame& cropped_frame, + int64_t time_when_first_seen_us) { + RTC_DCHECK_RUN_ON(encoder_queue_); + encode_usage_resource_->OnEncodeStarted(cropped_frame, + time_when_first_seen_us); +} + +void VideoStreamEncoderResourceManager::OnEncodeCompleted( + const EncodedImage& encoded_image, + int64_t time_sent_in_us, + absl::optional encode_duration_us) { + RTC_DCHECK_RUN_ON(encoder_queue_); + // Inform |encode_usage_resource_| of the encode completed event. + uint32_t timestamp = encoded_image.Timestamp(); + int64_t capture_time_us = + encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec; + encode_usage_resource_->OnEncodeCompleted( + timestamp, time_sent_in_us, capture_time_us, encode_duration_us); + quality_scaler_resource_->OnEncodeCompleted(encoded_image, time_sent_in_us); +} + +void VideoStreamEncoderResourceManager::OnFrameDropped( + EncodedImageCallback::DropReason reason) { + RTC_DCHECK_RUN_ON(encoder_queue_); + quality_scaler_resource_->OnFrameDropped(reason); +} + +bool VideoStreamEncoderResourceManager::DropInitialFrames() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return initial_frame_dropper_->DropInitialFrames(); +} + +absl::optional +VideoStreamEncoderResourceManager::SingleActiveStreamPixels() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return initial_frame_dropper_->single_active_stream_pixels(); +} + +void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() { + RTC_DCHECK_RUN_ON(encoder_queue_); + initial_frame_dropper_->Disable(); + if (quality_rampup_experiment_ && quality_scaler_resource_->is_started()) { + DataRate bandwidth = encoder_rates_.has_value() + ? encoder_rates_->bandwidth_allocation + : DataRate::Zero(); + quality_rampup_experiment_->PerformQualityRampupExperiment( + quality_scaler_resource_, bandwidth, + DataRate::BitsPerSec(encoder_target_bitrate_bps_.value_or(0)), + DataRate::KilobitsPerSec(encoder_settings_->video_codec().maxBitrate), + LastInputFrameSizeOrDefault()); + } +} + +void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings( + absl::optional qp_thresholds) { + RTC_DCHECK_RUN_ON(encoder_queue_); + if (qp_thresholds.has_value()) { + if (quality_scaler_resource_->is_started()) { + quality_scaler_resource_->SetQpThresholds(qp_thresholds.value()); + } else { + quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value()); + AddResource(quality_scaler_resource_, VideoAdaptationReason::kQuality); + } + } else if (quality_scaler_resource_->is_started()) { + quality_scaler_resource_->StopCheckForOveruse(); + RemoveResource(quality_scaler_resource_); + } + initial_frame_dropper_->OnQualityScalerSettingsUpdated(); +} + +void VideoStreamEncoderResourceManager::ConfigureQualityScaler( + const VideoEncoder::EncoderInfo& encoder_info) { + RTC_DCHECK_RUN_ON(encoder_queue_); + const auto scaling_settings = encoder_info.scaling_settings; + const bool quality_scaling_allowed = + IsResolutionScalingEnabled(degradation_preference_) && + scaling_settings.thresholds; + + // TODO(https://crbug.com/webrtc/11222): Should this move to + // QualityScalerResource? + if (quality_scaling_allowed) { + if (!quality_scaler_resource_->is_started()) { + // Quality scaler has not already been configured. + + // Use experimental thresholds if available. + absl::optional experimental_thresholds; + if (quality_scaling_experiment_enabled_) { + experimental_thresholds = QualityScalingExperiment::GetQpThresholds( + GetVideoCodecTypeOrGeneric(encoder_settings_)); + } + UpdateQualityScalerSettings(experimental_thresholds + ? *experimental_thresholds + : *(scaling_settings.thresholds)); + } + } else { + UpdateQualityScalerSettings(absl::nullopt); + } + + // Set the qp-thresholds to the balanced settings if balanced mode. + if (degradation_preference_ == DegradationPreference::BALANCED && + quality_scaler_resource_->is_started()) { + absl::optional thresholds = + balanced_settings_.GetQpThresholds( + GetVideoCodecTypeOrGeneric(encoder_settings_), + LastInputFrameSizeOrDefault()); + if (thresholds) { + quality_scaler_resource_->SetQpThresholds(*thresholds); + } + } + UpdateStatsAdaptationSettings(); +} + +VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource( + rtc::scoped_refptr resource) const { + RTC_DCHECK_RUN_ON(encoder_queue_); + const auto& registered_resource = resources_.find(resource); + RTC_DCHECK(registered_resource != resources_.end()) + << resource->Name() << " not found."; + return registered_resource->second; +} + +// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle +// pipelining encoders better (multiple input frames before something comes +// out). This should effectively turn off CPU adaptations for systems that +// remotely cope with the load right now. +CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions() + const { + RTC_DCHECK_RUN_ON(encoder_queue_); + // This is already ensured by the only caller of this method: + // StartResourceAdaptation(). + RTC_DCHECK(encoder_settings_.has_value()); + CpuOveruseOptions options; + // Hardware accelerated encoders are assumed to be pipelined; give them + // additional overuse time. + if (encoder_settings_->encoder_info().is_hardware_accelerated) { + options.low_encode_usage_threshold_percent = 150; + options.high_encode_usage_threshold_percent = 200; + } + if (experiment_cpu_load_estimator_) { + options.filter_time_ms = 5 * rtc::kNumMillisecsPerSec; + } + return options; +} + +int VideoStreamEncoderResourceManager::LastInputFrameSizeOrDefault() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return input_state_provider_->InputState().frame_size_pixels().value_or( + kDefaultInputPixelsWidth * kDefaultInputPixelsHeight); +} + +void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) { + RTC_DCHECK_RUN_ON(encoder_queue_); + current_adaptation_counters_ = adaptation_counters; + + // TODO(bugs.webrtc.org/11553) Remove reason parameter and add reset callback. + if (!reason && adaptation_counters.Total() == 0) { + // Adaptation was manually reset - clear the per-reason counters too. + encoder_stats_observer_->ClearAdaptationStats(); + } + + video_source_restrictions_ = FilterRestrictionsByDegradationPreference( + restrictions, degradation_preference_); + MaybeUpdateTargetFrameRate(); +} + +void VideoStreamEncoderResourceManager::OnResourceLimitationChanged( + rtc::scoped_refptr resource, + const std::map, VideoAdaptationCounters>& + resource_limitations) { + RTC_DCHECK_RUN_ON(encoder_queue_); + if (!resource) { + encoder_stats_observer_->ClearAdaptationStats(); + return; + } + + std::map limitations; + for (auto& resource_counter : resource_limitations) { + std::map::iterator it; + bool inserted; + std::tie(it, inserted) = limitations.emplace( + GetReasonFromResource(resource_counter.first), resource_counter.second); + if (!inserted && it->second.Total() < resource_counter.second.Total()) { + it->second = resource_counter.second; + } + } + + VideoAdaptationReason adaptation_reason = GetReasonFromResource(resource); + encoder_stats_observer_->OnAdaptationChanged( + adaptation_reason, limitations[VideoAdaptationReason::kCpu], + limitations[VideoAdaptationReason::kQuality]); + + if (quality_rampup_experiment_) { + bool cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0; + auto qp_resolution_adaptations = + limitations.at(VideoAdaptationReason::kQuality).resolution_adaptations; + quality_rampup_experiment_->cpu_adapted(cpu_limited); + quality_rampup_experiment_->qp_resolution_adaptations( + qp_resolution_adaptations); + } + + RTC_LOG(LS_INFO) << ActiveCountsToString(limitations); +} + +void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() { + RTC_DCHECK_RUN_ON(encoder_queue_); + absl::optional codec_max_frame_rate = + encoder_settings_.has_value() + ? absl::optional( + encoder_settings_->video_codec().maxFramerate) + : absl::nullopt; + // The current target framerate is the maximum frame rate as specified by + // the current codec configuration or any limit imposed by the adaptation + // module. This is used to make sure overuse detection doesn't needlessly + // trigger in low and/or variable framerate scenarios. + absl::optional target_frame_rate = + video_source_restrictions_.max_frame_rate(); + if (!target_frame_rate.has_value() || + (codec_max_frame_rate.has_value() && + codec_max_frame_rate.value() < target_frame_rate.value())) { + target_frame_rate = codec_max_frame_rate; + } + encode_usage_resource_->SetTargetFrameRate(target_frame_rate); +} + +void VideoStreamEncoderResourceManager::UpdateStatsAdaptationSettings() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + VideoStreamEncoderObserver::AdaptationSettings cpu_settings( + IsResolutionScalingEnabled(degradation_preference_), + IsFramerateScalingEnabled(degradation_preference_)); + + VideoStreamEncoderObserver::AdaptationSettings quality_settings = + quality_scaler_resource_->is_started() + ? cpu_settings + : VideoStreamEncoderObserver::AdaptationSettings(); + encoder_stats_observer_->UpdateAdaptationSettings(cpu_settings, + quality_settings); +} + +// static +std::string VideoStreamEncoderResourceManager::ActiveCountsToString( + const std::map& + active_counts) { + rtc::StringBuilder ss; + + ss << "Downgrade counts: fps: {"; + for (auto& reason_count : active_counts) { + ss << ToString(reason_count.first) << ":"; + ss << reason_count.second.fps_adaptations; + } + ss << "}, resolution {"; + for (auto& reason_count : active_counts) { + ss << ToString(reason_count.first) << ":"; + ss << reason_count.second.resolution_adaptations; + } + ss << "}"; + + return ss.Release(); +} + +void VideoStreamEncoderResourceManager::OnQualityRampUp() { + RTC_DCHECK_RUN_ON(encoder_queue_); + stream_adapter_->ClearRestrictions(); + quality_rampup_experiment_.reset(); +} +} // namespace webrtc diff --git a/video/adaptation/video_stream_encoder_resource_manager.h b/video/adaptation/video_stream_encoder_resource_manager.h new file mode 100644 index 0000000000..623d17adc3 --- /dev/null +++ b/video/adaptation/video_stream_encoder_resource_manager.h @@ -0,0 +1,213 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_ +#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video/video_frame.h" +#include "api/video/video_source_interface.h" +#include "api/video/video_stream_encoder_observer.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_config.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" +#include "call/adaptation/video_stream_adapter.h" +#include "call/adaptation/video_stream_input_state_provider.h" +#include "rtc_base/experiments/quality_scaler_settings.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" +#include "video/adaptation/balanced_constraint.h" +#include "video/adaptation/bitrate_constraint.h" +#include "video/adaptation/encode_usage_resource.h" +#include "video/adaptation/overuse_frame_detector.h" +#include "video/adaptation/quality_rampup_experiment_helper.h" +#include "video/adaptation/quality_scaler_resource.h" +#include "video/adaptation/video_stream_encoder_resource.h" + +namespace webrtc { + +// The assumed input frame size if we have not yet received a frame. +// TODO(hbos): This is 144p - why are we assuming super low quality? Seems like +// a bad heuristic. +extern const int kDefaultInputPixelsWidth; +extern const int kDefaultInputPixelsHeight; + +// Owns adaptation-related Resources pertaining to a single VideoStreamEncoder +// and passes on the relevant input from the encoder to the resources. The +// resources provide resource usage states to the ResourceAdaptationProcessor +// which is responsible for reconfiguring streams in order not to overuse +// resources. +// +// The manager is also involved with various mitigations not part of the +// ResourceAdaptationProcessor code such as the inital frame dropping. +class VideoStreamEncoderResourceManager + : public VideoSourceRestrictionsListener, + public ResourceLimitationsListener, + public QualityRampUpExperimentListener { + public: + VideoStreamEncoderResourceManager( + VideoStreamInputStateProvider* input_state_provider, + VideoStreamEncoderObserver* encoder_stats_observer, + Clock* clock, + bool experiment_cpu_load_estimator, + std::unique_ptr overuse_detector, + DegradationPreferenceProvider* degradation_preference_provider); + ~VideoStreamEncoderResourceManager() override; + + void Initialize(rtc::TaskQueue* encoder_queue); + void SetAdaptationProcessor( + ResourceAdaptationProcessorInterface* adaptation_processor, + VideoStreamAdapter* stream_adapter); + + // TODO(https://crbug.com/webrtc/11563): The degradation preference is a + // setting of the Processor, it does not belong to the Manager - can we get + // rid of this? + void SetDegradationPreferences(DegradationPreference degradation_preference); + DegradationPreference degradation_preference() const; + + void EnsureEncodeUsageResourceStarted(); + // Stops the encode usage and quality scaler resources if not already stopped. + void StopManagedResources(); + + // Settings that affect the VideoStreamEncoder-specific resources. + void SetEncoderSettings(EncoderSettings encoder_settings); + void SetStartBitrate(DataRate start_bitrate); + void SetTargetBitrate(DataRate target_bitrate); + void SetEncoderRates( + const VideoEncoder::RateControlParameters& encoder_rates); + // TODO(https://crbug.com/webrtc/11338): This can be made private if we + // configure on SetDegredationPreference and SetEncoderSettings. + void ConfigureQualityScaler(const VideoEncoder::EncoderInfo& encoder_info); + + // Methods corresponding to different points in the encoding pipeline. + void OnFrameDroppedDueToSize(); + void OnMaybeEncodeFrame(); + void OnEncodeStarted(const VideoFrame& cropped_frame, + int64_t time_when_first_seen_us); + void OnEncodeCompleted(const EncodedImage& encoded_image, + int64_t time_sent_in_us, + absl::optional encode_duration_us); + void OnFrameDropped(EncodedImageCallback::DropReason reason); + + // Resources need to be mapped to an AdaptReason (kCpu or kQuality) in order + // to update legacy getStats(). + void AddResource(rtc::scoped_refptr resource, + VideoAdaptationReason reason); + void RemoveResource(rtc::scoped_refptr resource); + std::vector AdaptationConstraints() const; + // If true, the VideoStreamEncoder should execute its logic to maybe drop + // frames based on size and bitrate. + bool DropInitialFrames() const; + absl::optional SingleActiveStreamPixels() const; + + // VideoSourceRestrictionsListener implementation. + // Updates |video_source_restrictions_|. + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override; + void OnResourceLimitationChanged( + rtc::scoped_refptr resource, + const std::map, VideoAdaptationCounters>& + resource_limitations) override; + + // QualityRampUpExperimentListener implementation. + void OnQualityRampUp() override; + + private: + class InitialFrameDropper; + + VideoAdaptationReason GetReasonFromResource( + rtc::scoped_refptr resource) const; + + CpuOveruseOptions GetCpuOveruseOptions() const; + int LastInputFrameSizeOrDefault() const; + + // Calculates an up-to-date value of the target frame rate and informs the + // |encode_usage_resource_| of the new value. + void MaybeUpdateTargetFrameRate(); + + // Use nullopt to disable quality scaling. + void UpdateQualityScalerSettings( + absl::optional qp_thresholds); + + void UpdateStatsAdaptationSettings() const; + + static std::string ActiveCountsToString( + const std::map& + active_counts); + + DegradationPreferenceProvider* const degradation_preference_provider_; + std::unique_ptr bitrate_constraint_ + RTC_GUARDED_BY(encoder_queue_); + const std::unique_ptr balanced_constraint_ + RTC_GUARDED_BY(encoder_queue_); + const rtc::scoped_refptr encode_usage_resource_; + const rtc::scoped_refptr quality_scaler_resource_; + + rtc::TaskQueue* encoder_queue_; + VideoStreamInputStateProvider* const input_state_provider_ + RTC_GUARDED_BY(encoder_queue_); + ResourceAdaptationProcessorInterface* adaptation_processor_; + VideoStreamAdapter* stream_adapter_ RTC_GUARDED_BY(encoder_queue_); + // Thread-safe. + VideoStreamEncoderObserver* const encoder_stats_observer_; + + DegradationPreference degradation_preference_ RTC_GUARDED_BY(encoder_queue_); + VideoSourceRestrictions video_source_restrictions_ + RTC_GUARDED_BY(encoder_queue_); + + VideoAdaptationCounters current_adaptation_counters_ + RTC_GUARDED_BY(encoder_queue_); + + const BalancedDegradationSettings balanced_settings_; + Clock* clock_ RTC_GUARDED_BY(encoder_queue_); + const bool experiment_cpu_load_estimator_ RTC_GUARDED_BY(encoder_queue_); + const std::unique_ptr initial_frame_dropper_ + RTC_GUARDED_BY(encoder_queue_); + const bool quality_scaling_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_); + absl::optional encoder_target_bitrate_bps_ + RTC_GUARDED_BY(encoder_queue_); + absl::optional encoder_rates_ + RTC_GUARDED_BY(encoder_queue_); + std::unique_ptr quality_rampup_experiment_ + RTC_GUARDED_BY(encoder_queue_); + absl::optional encoder_settings_ + RTC_GUARDED_BY(encoder_queue_); + + // Ties a resource to a reason for statistical reporting. This AdaptReason is + // also used by this module to make decisions about how to adapt up/down. + std::map, VideoAdaptationReason> resources_ + RTC_GUARDED_BY(encoder_queue_); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_ diff --git a/video/alignment_adjuster.cc b/video/alignment_adjuster.cc new file mode 100644 index 0000000000..b08f2f184a --- /dev/null +++ b/video/alignment_adjuster.cc @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/alignment_adjuster.h" + +#include +#include + +#include "absl/algorithm/container.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +// Round each scale factor to the closest rational in form alignment/i where i +// is a multiple of |requested_alignment|. Each resolution divisible by +// |alignment| will be divisible by |requested_alignment| after the scale factor +// is applied. +double RoundToMultiple(int alignment, + int requested_alignment, + VideoEncoderConfig* config, + bool update_config) { + double diff = 0.0; + for (auto& layer : config->simulcast_layers) { + double min_dist = std::numeric_limits::max(); + double new_scale = 1.0; + for (int i = requested_alignment; i <= alignment; + i += requested_alignment) { + double dist = std::abs(layer.scale_resolution_down_by - + alignment / static_cast(i)); + if (dist <= min_dist) { + min_dist = dist; + new_scale = alignment / static_cast(i); + } + } + diff += std::abs(layer.scale_resolution_down_by - new_scale); + if (update_config) { + RTC_LOG(LS_INFO) << "scale_resolution_down_by " + << layer.scale_resolution_down_by << " -> " << new_scale; + layer.scale_resolution_down_by = new_scale; + } + } + return diff; +} +} // namespace + +// Input: encoder_info.requested_resolution_alignment (K) +// Input: encoder_info.apply_alignment_to_all_simulcast_layers (B) +// Input: vector config->simulcast_layers.scale_resolution_down_by (S[i]) +// Output: +// If B is false, returns K and does not adjust scaling factors. +// Otherwise, returns adjusted alignment (A), adjusted scaling factors (S'[i]) +// are written in |config| such that: +// +// A / S'[i] are integers divisible by K +// sum abs(S'[i] - S[i]) -> min +// A integer <= 16 +// +// Solution chooses closest S'[i] in a form A / j where j is a multiple of K. + +int AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( + const VideoEncoder::EncoderInfo& encoder_info, + VideoEncoderConfig* config) { + const int requested_alignment = encoder_info.requested_resolution_alignment; + if (!encoder_info.apply_alignment_to_all_simulcast_layers) { + return requested_alignment; + } + + if (requested_alignment < 1 || config->number_of_streams <= 1 || + config->simulcast_layers.size() <= 1) { + return requested_alignment; + } + + // Update alignment to also apply to simulcast layers. + const bool has_scale_resolution_down_by = absl::c_any_of( + config->simulcast_layers, [](const webrtc::VideoStream& layer) { + return layer.scale_resolution_down_by >= 1.0; + }); + + if (!has_scale_resolution_down_by) { + // Default resolution downscaling used (scale factors: 1, 2, 4, ...). + return requested_alignment * (1 << (config->simulcast_layers.size() - 1)); + } + + // Get alignment for downscaled layers. + // Adjust |scale_resolution_down_by| to a common multiple to limit the + // alignment value (to avoid largely cropped frames and possibly with an + // aspect ratio far from the original). + const int kMaxAlignment = 16; + + for (auto& layer : config->simulcast_layers) { + layer.scale_resolution_down_by = + std::max(layer.scale_resolution_down_by, 1.0); + layer.scale_resolution_down_by = + std::min(layer.scale_resolution_down_by, 10000.0); + } + + // Decide on common multiple to use. + double min_diff = std::numeric_limits::max(); + int best_alignment = 1; + for (int alignment = requested_alignment; alignment <= kMaxAlignment; + ++alignment) { + double diff = RoundToMultiple(alignment, requested_alignment, config, + /*update_config=*/false); + if (diff < min_diff) { + min_diff = diff; + best_alignment = alignment; + } + } + RoundToMultiple(best_alignment, requested_alignment, config, + /*update_config=*/true); + + return std::max(best_alignment, requested_alignment); +} +} // namespace webrtc diff --git a/video/alignment_adjuster.h b/video/alignment_adjuster.h new file mode 100644 index 0000000000..53d7927887 --- /dev/null +++ b/video/alignment_adjuster.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ALIGNMENT_ADJUSTER_H_ +#define VIDEO_ALIGNMENT_ADJUSTER_H_ + +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_config.h" + +namespace webrtc { + +class AlignmentAdjuster { + public: + // Returns the resolution alignment requested by the encoder (i.e + // |EncoderInfo::requested_resolution_alignment| which ensures that delivered + // frames to the encoder are divisible by this alignment). + // + // If |EncoderInfo::apply_alignment_to_all_simulcast_layers| is enabled, the + // alignment will be adjusted to ensure that each simulcast layer also is + // divisible by |requested_resolution_alignment|. The configured scale factors + // |scale_resolution_down_by| may be adjusted to a common multiple to limit + // the alignment value to avoid largely cropped frames and possibly with an + // aspect ratio far from the original. + static int GetAlignmentAndMaybeAdjustScaleFactors( + const VideoEncoder::EncoderInfo& info, + VideoEncoderConfig* config); +}; + +} // namespace webrtc + +#endif // VIDEO_ALIGNMENT_ADJUSTER_H_ diff --git a/video/alignment_adjuster_unittest.cc b/video/alignment_adjuster_unittest.cc new file mode 100644 index 0000000000..07c7de5f16 --- /dev/null +++ b/video/alignment_adjuster_unittest.cc @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/alignment_adjuster.h" + +#include +#include +#include + +#include "rtc_base/numerics/safe_conversions.h" +#include "test/encoder_settings.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { +VideoEncoder::EncoderInfo GetEncoderInfo(int alignment, bool apply) { + VideoEncoder::EncoderInfo info; + info.requested_resolution_alignment = alignment; + info.apply_alignment_to_all_simulcast_layers = apply; + return info; +} +} // namespace + +class AlignmentAdjusterTest + : public ::testing::TestWithParam<::testing::tuple< + int, + std::tuple, std::vector, int>>> { + protected: + AlignmentAdjusterTest() + : kRequestedAlignment(std::get<0>(GetParam())), + kScaleFactors(std::get<0>(std::get<1>(GetParam()))), + kAdjustedScaleFactors(std::get<1>(std::get<1>(GetParam()))), + kAdjustedAlignment(std::get<2>(std::get<1>(GetParam()))) {} + + const int kRequestedAlignment; + const std::vector kScaleFactors; + const std::vector kAdjustedScaleFactors; + const int kAdjustedAlignment; +}; + +INSTANTIATE_TEST_SUITE_P( + ScaleFactorsAndAlignment, + AlignmentAdjusterTest, + ::testing::Combine( + ::testing::Values(2), // kRequestedAlignment + ::testing::Values( + std::make_tuple(std::vector{-1.0}, // kScaleFactors + std::vector{-1.0}, // kAdjustedScaleFactors + 2), // default: {1.0} // kAdjustedAlignment + std::make_tuple(std::vector{-1.0, -1.0}, + std::vector{-1.0, -1.0}, + 4), // default: {1.0, 2.0} + std::make_tuple(std::vector{-1.0, -1.0, -1.0}, + std::vector{-1.0, -1.0, -1.0}, + 8), // default: {1.0, 2.0, 4.0} + std::make_tuple(std::vector{1.0, 2.0, 4.0}, + std::vector{1.0, 2.0, 4.0}, + 8), + std::make_tuple(std::vector{9999.0, -1.0, 1.0}, + std::vector{8.0, 1.0, 1.0}, + 16), // kMaxAlignment + std::make_tuple(std::vector{3.99, 2.01, 1.0}, + std::vector{4.0, 2.0, 1.0}, + 8), + std::make_tuple(std::vector{2.9, 2.1}, + std::vector{6.0 / 2.0, 6.0 / 3.0}, + 12), + std::make_tuple(std::vector{4.9, 1.7, 1.2}, + std::vector{5.0, 5.0 / 3.0, 5.0 / 4.0}, + 10), + std::make_tuple(std::vector{1.0, 1.3}, + std::vector{4.0 / 4.0, 4.0 / 3.0}, + 8), + std::make_tuple(std::vector{1.75, 3.5}, + std::vector{7.0 / 4.0, 7.0 / 2.0}, + 7), + std::make_tuple(std::vector{1.5, 2.5}, + std::vector{1.5, 2.5}, + 15)))); + +TEST_P(AlignmentAdjusterTest, AlignmentAppliedToAllLayers) { + const bool kApplyAlignmentToAllLayers = true; + + // Fill config with the scaling factor by which to reduce encoding size. + const int num_streams = kScaleFactors.size(); + VideoEncoderConfig config; + test::FillEncoderConfiguration(kVideoCodecVP8, num_streams, &config); + for (int i = 0; i < num_streams; ++i) { + config.simulcast_layers[i].scale_resolution_down_by = kScaleFactors[i]; + } + + // Verify requested alignment from sink. + VideoEncoder::EncoderInfo info = + GetEncoderInfo(kRequestedAlignment, kApplyAlignmentToAllLayers); + int alignment = + AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors(info, &config); + EXPECT_EQ(alignment, kAdjustedAlignment); + + // Verify adjusted scale factors. + for (int i = 0; i < num_streams; ++i) { + EXPECT_EQ(config.simulcast_layers[i].scale_resolution_down_by, + kAdjustedScaleFactors[i]); + } +} + +TEST_P(AlignmentAdjusterTest, AlignmentNotAppliedToAllLayers) { + const bool kApplyAlignmentToAllLayers = false; + + // Fill config with the scaling factor by which to reduce encoding size. + const int num_streams = kScaleFactors.size(); + VideoEncoderConfig config; + test::FillEncoderConfiguration(kVideoCodecVP8, num_streams, &config); + for (int i = 0; i < num_streams; ++i) { + config.simulcast_layers[i].scale_resolution_down_by = kScaleFactors[i]; + } + + // Verify requested alignment from sink, alignment is not adjusted. + VideoEncoder::EncoderInfo info = + GetEncoderInfo(kRequestedAlignment, kApplyAlignmentToAllLayers); + int alignment = + AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors(info, &config); + EXPECT_EQ(alignment, kRequestedAlignment); + + // Verify that scale factors are not adjusted. + for (int i = 0; i < num_streams; ++i) { + EXPECT_EQ(config.simulcast_layers[i].scale_resolution_down_by, + kScaleFactors[i]); + } +} + +} // namespace test +} // namespace webrtc diff --git a/video/buffered_frame_decryptor.cc b/video/buffered_frame_decryptor.cc index 90d14d38c2..187bac6ee4 100644 --- a/video/buffered_frame_decryptor.cc +++ b/video/buffered_frame_decryptor.cc @@ -11,7 +11,10 @@ #include "video/buffered_frame_decryptor.h" #include +#include +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "modules/video_coding/frame_object.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -21,7 +24,7 @@ BufferedFrameDecryptor::BufferedFrameDecryptor( OnDecryptedFrameCallback* decrypted_frame_callback, OnDecryptionStatusChangeCallback* decryption_status_change_callback) : generic_descriptor_auth_experiment_( - field_trial::IsEnabled("WebRTC-GenericDescriptorAuth")), + !field_trial::IsDisabled("WebRTC-GenericDescriptorAuth")), decrypted_frame_callback_(decrypted_frame_callback), decryption_status_change_callback_(decryption_status_change_callback) {} @@ -60,9 +63,7 @@ BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame( return FrameDecision::kStash; } // When using encryption we expect the frame to have the generic descriptor. - absl::optional descriptor = - frame->GetGenericFrameDescriptor(); - if (!descriptor) { + if (frame->GetRtpVideoHeader().generic == absl::nullopt) { RTC_LOG(LS_ERROR) << "No generic frame descriptor found dropping frame."; return FrameDecision::kDrop; } @@ -72,13 +73,13 @@ BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame( frame->size()); RTC_CHECK_LE(max_plaintext_byte_size, frame->size()); // Place the decrypted frame inline into the existing frame. - rtc::ArrayView inline_decrypted_bitstream(frame->data(), + rtc::ArrayView inline_decrypted_bitstream(frame->mutable_data(), max_plaintext_byte_size); - // Only enable authenticating the header if the field trial is enabled. - rtc::ArrayView additional_data; + // Enable authenticating the header if the field trial isn't disabled. + std::vector additional_data; if (generic_descriptor_auth_experiment_) { - additional_data = descriptor->GetByteRepresentation(); + additional_data = RtpDescriptorAuthentication(frame->GetRtpVideoHeader()); } // Attempt to decrypt the video frame. diff --git a/video/buffered_frame_decryptor_unittest.cc b/video/buffered_frame_decryptor_unittest.cc index 1b21acfb85..bbc08b0da3 100644 --- a/video/buffered_frame_decryptor_unittest.cc +++ b/video/buffered_frame_decryptor_unittest.cc @@ -57,6 +57,8 @@ class BufferedFrameDecryptorTest : public ::testing::Test, std::unique_ptr CreateRtpFrameObject( bool key_frame) { seq_num_++; + RTPVideoHeader rtp_video_header; + rtp_video_header.generic.emplace(); // clang-format off return std::make_unique( @@ -73,9 +75,8 @@ class BufferedFrameDecryptorTest : public ::testing::Test, kVideoCodecGeneric, kVideoRotation_0, VideoContentType::UNSPECIFIED, - RTPVideoHeader(), + rtp_video_header, /*color_space=*/absl::nullopt, - RtpGenericFrameDescriptor(), RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on diff --git a/video/call_stats.cc b/video/call_stats.cc index 27e00ee7ca..d575e114d8 100644 --- a/video/call_stats.cc +++ b/video/call_stats.cc @@ -129,7 +129,7 @@ void CallStats::Process() { max_rtt_ms_ = GetMaxRttMs(reports_); avg_rtt_ms = GetNewAvgRttMs(reports_, avg_rtt_ms); { - rtc::CritScope lock(&avg_rtt_ms_lock_); + MutexLock lock(&avg_rtt_ms_lock_); avg_rtt_ms_ = avg_rtt_ms; } @@ -178,7 +178,7 @@ int64_t CallStats::LastProcessedRtt() const { // allow only reading this from the process thread (or TQ once we get there) // so that the lock isn't necessary. - rtc::CritScope cs(&avg_rtt_ms_lock_); + MutexLock lock(&avg_rtt_ms_lock_); return avg_rtt_ms_; } diff --git a/video/call_stats.h b/video/call_stats.h index 5029453bf7..3bfb632446 100644 --- a/video/call_stats.h +++ b/video/call_stats.h @@ -18,13 +18,15 @@ #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { // CallStats keeps track of statistics for a call. +// TODO(webrtc:11489): Make call_stats_ not depend on ProcessThread and +// make callbacks on the worker thread (TQ). class CallStats : public Module, public RtcpRttStats { public: // Time interval for updating the observers. @@ -88,7 +90,7 @@ class CallStats : public Module, public RtcpRttStats { int64_t avg_rtt_ms_; // Protects |avg_rtt_ms_|. - rtc::CriticalSection avg_rtt_ms_lock_; + mutable Mutex avg_rtt_ms_lock_; // |sum_avg_rtt_ms_|, |num_avg_rtt_| and |time_of_first_rtt_ms_| are only used // on the ProcessThread when running. When the Process Thread is not running, diff --git a/video/call_stats2.cc b/video/call_stats2.cc new file mode 100644 index 0000000000..faf08d69bc --- /dev/null +++ b/video/call_stats2.cc @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/call_stats2.h" + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace internal { +namespace { + +void RemoveOldReports(int64_t now, std::list* reports) { + static constexpr const int64_t kRttTimeoutMs = 1500; + reports->remove_if( + [&now](CallStats::RttTime& r) { return now - r.time > kRttTimeoutMs; }); +} + +int64_t GetMaxRttMs(const std::list& reports) { + int64_t max_rtt_ms = -1; + for (const CallStats::RttTime& rtt_time : reports) + max_rtt_ms = std::max(rtt_time.rtt, max_rtt_ms); + return max_rtt_ms; +} + +int64_t GetAvgRttMs(const std::list& reports) { + RTC_DCHECK(!reports.empty()); + int64_t sum = 0; + for (std::list::const_iterator it = reports.begin(); + it != reports.end(); ++it) { + sum += it->rtt; + } + return sum / reports.size(); +} + +int64_t GetNewAvgRttMs(const std::list& reports, + int64_t prev_avg_rtt) { + if (reports.empty()) + return -1; // Reset (invalid average). + + int64_t cur_rtt_ms = GetAvgRttMs(reports); + if (prev_avg_rtt == -1) + return cur_rtt_ms; // New initial average value. + + // Weight factor to apply to the average rtt. + // We weigh the old average at 70% against the new average (30%). + constexpr const float kWeightFactor = 0.3f; + return prev_avg_rtt * (1.0f - kWeightFactor) + cur_rtt_ms * kWeightFactor; +} + +} // namespace + +constexpr TimeDelta CallStats::kUpdateInterval; + +CallStats::CallStats(Clock* clock, TaskQueueBase* task_queue) + : clock_(clock), + max_rtt_ms_(-1), + avg_rtt_ms_(-1), + sum_avg_rtt_ms_(0), + num_avg_rtt_(0), + time_of_first_rtt_ms_(-1), + task_queue_(task_queue) { + RTC_DCHECK(task_queue_); + process_thread_checker_.Detach(); + repeating_task_ = + RepeatingTaskHandle::DelayedStart(task_queue_, kUpdateInterval, [this]() { + UpdateAndReport(); + return kUpdateInterval; + }); +} + +CallStats::~CallStats() { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK(observers_.empty()); + + repeating_task_.Stop(); + + UpdateHistograms(); +} + +void CallStats::UpdateAndReport() { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + + RemoveOldReports(clock_->CurrentTime().ms(), &reports_); + max_rtt_ms_ = GetMaxRttMs(reports_); + avg_rtt_ms_ = GetNewAvgRttMs(reports_, avg_rtt_ms_); + + // If there is a valid rtt, update all observers with the max rtt. + if (max_rtt_ms_ >= 0) { + RTC_DCHECK_GE(avg_rtt_ms_, 0); + for (CallStatsObserver* observer : observers_) + observer->OnRttUpdate(avg_rtt_ms_, max_rtt_ms_); + // Sum for Histogram of average RTT reported over the entire call. + sum_avg_rtt_ms_ += avg_rtt_ms_; + ++num_avg_rtt_; + } +} + +void CallStats::RegisterStatsObserver(CallStatsObserver* observer) { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + if (!absl::c_linear_search(observers_, observer)) + observers_.push_back(observer); +} + +void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + observers_.remove(observer); +} + +int64_t CallStats::LastProcessedRtt() const { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + // No need for locking since we're on the construction thread. + return avg_rtt_ms_; +} + +void CallStats::OnRttUpdate(int64_t rtt) { + // This callback may for some RtpRtcp module instances (video send stream) be + // invoked from a separate task queue, in other cases, we should already be + // on the correct TQ. + int64_t now_ms = clock_->TimeInMilliseconds(); + auto update = [this, rtt, now_ms]() { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + reports_.push_back(RttTime(rtt, now_ms)); + if (time_of_first_rtt_ms_ == -1) + time_of_first_rtt_ms_ = now_ms; + UpdateAndReport(); + }; + + if (task_queue_->IsCurrent()) { + update(); + } else { + task_queue_->PostTask(ToQueuedTask(task_safety_, std::move(update))); + } +} + +void CallStats::UpdateHistograms() { + RTC_DCHECK_RUN_ON(&construction_thread_checker_); + + if (time_of_first_rtt_ms_ == -1 || num_avg_rtt_ < 1) + return; + + int64_t elapsed_sec = + (clock_->TimeInMilliseconds() - time_of_first_rtt_ms_) / 1000; + if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { + int64_t avg_rtt_ms = (sum_avg_rtt_ms_ + num_avg_rtt_ / 2) / num_avg_rtt_; + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.AverageRoundTripTimeInMilliseconds", avg_rtt_ms); + } +} + +} // namespace internal +} // namespace webrtc diff --git a/video/call_stats2.h b/video/call_stats2.h new file mode 100644 index 0000000000..822685320f --- /dev/null +++ b/video/call_stats2.h @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CALL_STATS2_H_ +#define VIDEO_CALL_STATS2_H_ + +#include +#include + +#include "api/units/timestamp.h" +#include "modules/include/module_common_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace internal { + +class CallStats { + public: + // Time interval for updating the observers. + static constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(1000); + + CallStats(Clock* clock, TaskQueueBase* task_queue); + ~CallStats(); + + // Expose an RtcpRttStats implementation without inheriting from RtcpRttStats. + // That allows us to separate the threading model of how RtcpRttStats is + // used (mostly on a process thread) and how CallStats is used (mostly on + // the TQ/worker thread). Since for both cases, there is a LastProcessedRtt() + // method, this separation allows us to not need a lock for either. + RtcpRttStats* AsRtcpRttStats() { return &rtcp_rtt_stats_impl_; } + + // Registers/deregisters a new observer to receive statistics updates. + // Must be called from the construction thread. + void RegisterStatsObserver(CallStatsObserver* observer); + void DeregisterStatsObserver(CallStatsObserver* observer); + + // Expose |LastProcessedRtt()| from RtcpRttStats to the public interface, as + // it is the part of the API that is needed by direct users of CallStats. + // TODO(tommi): Threading or lifetime guarantees are not explicit in how + // CallStats is used as RtcpRttStats or how pointers are cached in a + // few different places (distributed via Call). It would be good to clarify + // from what thread/TQ calls to OnRttUpdate and LastProcessedRtt need to be + // allowed. + int64_t LastProcessedRtt() const; + + // Exposed for tests to test histogram support. + void UpdateHistogramsForTest() { UpdateHistograms(); } + + // Helper struct keeping track of the time a rtt value is reported. + struct RttTime { + RttTime(int64_t new_rtt, int64_t rtt_time) : rtt(new_rtt), time(rtt_time) {} + const int64_t rtt; + const int64_t time; + }; + + private: + // Part of the RtcpRttStats implementation. Called by RtcpRttStatsImpl. + void OnRttUpdate(int64_t rtt); + + void UpdateAndReport(); + + // This method must only be called when the process thread is not + // running, and from the construction thread. + void UpdateHistograms(); + + class RtcpRttStatsImpl : public RtcpRttStats { + public: + explicit RtcpRttStatsImpl(CallStats* owner) : owner_(owner) {} + ~RtcpRttStatsImpl() override = default; + + private: + void OnRttUpdate(int64_t rtt) override { + // For video send streams (video/video_send_stream.cc), the RtpRtcp module + // is currently created on a transport worker TaskQueue and not the worker + // thread - which is what happens in other cases. We should probably fix + // that so that the call consistently comes in on the right thread. + owner_->OnRttUpdate(rtt); + } + + int64_t LastProcessedRtt() const override { + // This call path shouldn't be used anymore. This impl is only for + // propagating the rtt from the RtpRtcp module, which does not call + // LastProcessedRtt(). Down the line we should consider removing + // LastProcessedRtt() and use the interface for event notifications only. + RTC_NOTREACHED() << "Legacy call path"; + return 0; + } + + CallStats* const owner_; + } rtcp_rtt_stats_impl_{this}; + + Clock* const clock_; + + // Used to regularly call UpdateAndReport(). + RepeatingTaskHandle repeating_task_ + RTC_GUARDED_BY(construction_thread_checker_); + + // The last RTT in the statistics update (zero if there is no valid estimate). + int64_t max_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + + // Last reported average RTT value. + int64_t avg_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + + // |sum_avg_rtt_ms_|, |num_avg_rtt_| and |time_of_first_rtt_ms_| are only used + // on the ProcessThread when running. When the Process Thread is not running, + // (and only then) they can be used in UpdateHistograms(), usually called from + // the dtor. + int64_t sum_avg_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + int64_t num_avg_rtt_ RTC_GUARDED_BY(construction_thread_checker_); + int64_t time_of_first_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + + // All Rtt reports within valid time interval, oldest first. + std::list reports_ RTC_GUARDED_BY(construction_thread_checker_); + + // Observers getting stats reports. + // When attached to ProcessThread, this is read-only. In order to allow + // modification, we detach from the process thread while the observer + // list is updated, to avoid races. This allows us to not require a lock + // for the observers_ list, which makes the most common case lock free. + std::list observers_; + + SequenceChecker construction_thread_checker_; + SequenceChecker process_thread_checker_; + TaskQueueBase* const task_queue_; + + // Used to signal destruction to potentially pending tasks. + ScopedTaskSafety task_safety_; + + RTC_DISALLOW_COPY_AND_ASSIGN(CallStats); +}; + +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_CALL_STATS2_H_ diff --git a/video/call_stats2_unittest.cc b/video/call_stats2_unittest.cc new file mode 100644 index 0000000000..b3d43cb92a --- /dev/null +++ b/video/call_stats2_unittest.cc @@ -0,0 +1,312 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/call_stats2.h" + +#include + +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/metrics.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/run_loop.h" + +using ::testing::AnyNumber; +using ::testing::InvokeWithoutArgs; +using ::testing::Return; + +namespace webrtc { +namespace internal { + +class MockStatsObserver : public CallStatsObserver { + public: + MockStatsObserver() {} + virtual ~MockStatsObserver() {} + + MOCK_METHOD(void, OnRttUpdate, (int64_t, int64_t), (override)); +}; + +class CallStats2Test : public ::testing::Test { + public: + CallStats2Test() { process_thread_->Start(); } + + ~CallStats2Test() override { process_thread_->Stop(); } + + // Queues an rtt update call on the process thread. + void AsyncSimulateRttUpdate(int64_t rtt) { + RtcpRttStats* rtcp_rtt_stats = call_stats_.AsRtcpRttStats(); + process_thread_->PostTask(ToQueuedTask( + [rtcp_rtt_stats, rtt] { rtcp_rtt_stats->OnRttUpdate(rtt); })); + } + + protected: + void FlushProcessAndWorker() { + process_thread_->PostTask( + ToQueuedTask([this] { loop_.PostTask([this]() { loop_.Quit(); }); })); + loop_.Run(); + } + + test::RunLoop loop_; + std::unique_ptr process_thread_{ + ProcessThread::Create("CallStats")}; + // Note: Since rtc::Thread doesn't support injecting a Clock, we're going + // to be using a mix of the fake clock (used by CallStats) as well as the + // system clock (used by rtc::Thread). This isn't ideal and will result in + // the tests taking longer to execute in some cases than they need to. + SimulatedClock fake_clock_{12345}; + CallStats call_stats_{&fake_clock_, loop_.task_queue()}; +}; + +TEST_F(CallStats2Test, AddAndTriggerCallback) { + static constexpr const int64_t kRtt = 25; + + MockStatsObserver stats_observer; + EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { loop_.Quit(); })); + + call_stats_.RegisterStatsObserver(&stats_observer); + EXPECT_EQ(-1, call_stats_.LastProcessedRtt()); + + AsyncSimulateRttUpdate(kRtt); + loop_.Run(); + + EXPECT_EQ(kRtt, call_stats_.LastProcessedRtt()); + + call_stats_.DeregisterStatsObserver(&stats_observer); +} + +TEST_F(CallStats2Test, ProcessTime) { + static constexpr const int64_t kRtt = 100; + static constexpr const int64_t kRtt2 = 80; + + MockStatsObserver stats_observer; + + EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)) + .Times(2) + .WillOnce(InvokeWithoutArgs([this] { + // Advance clock and verify we get an update. + fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms()); + })) + .WillRepeatedly(InvokeWithoutArgs([this] { + AsyncSimulateRttUpdate(kRtt2); + // Advance clock just too little to get an update. + fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms() - + 1); + })); + + // In case you're reading this and wondering how this number is arrived at, + // please see comments in the ChangeRtt test that go into some detail. + static constexpr const int64_t kLastAvg = 94; + EXPECT_CALL(stats_observer, OnRttUpdate(kLastAvg, kRtt2)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { loop_.Quit(); })); + + call_stats_.RegisterStatsObserver(&stats_observer); + + AsyncSimulateRttUpdate(kRtt); + loop_.Run(); + + call_stats_.DeregisterStatsObserver(&stats_observer); +} + +// Verify all observers get correct estimates and observers can be added and +// removed. +TEST_F(CallStats2Test, MultipleObservers) { + MockStatsObserver stats_observer_1; + call_stats_.RegisterStatsObserver(&stats_observer_1); + // Add the second observer twice, there should still be only one report to the + // observer. + MockStatsObserver stats_observer_2; + call_stats_.RegisterStatsObserver(&stats_observer_2); + call_stats_.RegisterStatsObserver(&stats_observer_2); + + static constexpr const int64_t kRtt = 100; + + // Verify both observers are updated. + EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)) + .Times(AnyNumber()) + .WillRepeatedly(Return()); + EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)) + .Times(AnyNumber()) + .WillOnce(InvokeWithoutArgs([this] { loop_.Quit(); })) + .WillRepeatedly(Return()); + AsyncSimulateRttUpdate(kRtt); + loop_.Run(); + + // Deregister the second observer and verify update is only sent to the first + // observer. + call_stats_.DeregisterStatsObserver(&stats_observer_2); + + EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)) + .Times(AnyNumber()) + .WillOnce(InvokeWithoutArgs([this] { loop_.Quit(); })) + .WillRepeatedly(Return()); + EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0); + AsyncSimulateRttUpdate(kRtt); + loop_.Run(); + + // Deregister the first observer. + call_stats_.DeregisterStatsObserver(&stats_observer_1); + + // Now make sure we don't get any callbacks. + EXPECT_CALL(stats_observer_1, OnRttUpdate(kRtt, kRtt)).Times(0); + EXPECT_CALL(stats_observer_2, OnRttUpdate(kRtt, kRtt)).Times(0); + AsyncSimulateRttUpdate(kRtt); + + // Flush the queue on the process thread to make sure we return after + // Process() has been called. + FlushProcessAndWorker(); +} + +// Verify increasing and decreasing rtt triggers callbacks with correct values. +TEST_F(CallStats2Test, ChangeRtt) { + // NOTE: This test assumes things about how old reports are removed + // inside of call_stats.cc. The threshold ms value is 1500ms, but it's not + // clear here that how the clock is advanced, affects that algorithm and + // subsequently the average reported rtt. + + MockStatsObserver stats_observer; + call_stats_.RegisterStatsObserver(&stats_observer); + + static constexpr const int64_t kFirstRtt = 100; + static constexpr const int64_t kLowRtt = kFirstRtt - 20; + static constexpr const int64_t kHighRtt = kFirstRtt + 20; + + EXPECT_CALL(stats_observer, OnRttUpdate(kFirstRtt, kFirstRtt)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { + fake_clock_.AdvanceTimeMilliseconds(1000); + AsyncSimulateRttUpdate(kHighRtt); // Reported at T1 (1000ms). + })); + + // NOTE: This relies on the internal algorithms of call_stats.cc. + // There's a weight factor there (0.3), that weighs the previous average to + // the new one by 70%, so the number 103 in this case is arrived at like so: + // (100) / 1 * 0.7 + (100+120)/2 * 0.3 = 103 + static constexpr const int64_t kAvgRtt1 = 103; + EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt1, kHighRtt)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { + // This interacts with an internal implementation detail in call_stats + // that decays the oldest rtt value. See more below. + fake_clock_.AdvanceTimeMilliseconds(1000); + AsyncSimulateRttUpdate(kLowRtt); // Reported at T2 (2000ms). + })); + + // Increase time enough for a new update, but not too much to make the + // rtt invalid. Report a lower rtt and verify the old/high value still is sent + // in the callback. + + // Here, enough time must have passed in order to remove exactly the first + // report and nothing else (>1500ms has passed since the first rtt). + // So, this value is arrived by doing: + // (kAvgRtt1)/1 * 0.7 + (kHighRtt+kLowRtt)/2 * 0.3 = 102.1 + static constexpr const int64_t kAvgRtt2 = 102; + EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt2, kHighRtt)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { + // Advance time to make the high report invalid, the lower rtt should + // now be in the callback. + fake_clock_.AdvanceTimeMilliseconds(1000); + })); + + static constexpr const int64_t kAvgRtt3 = 95; + EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt3, kLowRtt)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { loop_.Quit(); })); + + // Trigger the first rtt value and set off the chain of callbacks. + AsyncSimulateRttUpdate(kFirstRtt); // Reported at T0 (0ms). + loop_.Run(); + + call_stats_.DeregisterStatsObserver(&stats_observer); +} + +TEST_F(CallStats2Test, LastProcessedRtt) { + MockStatsObserver stats_observer; + call_stats_.RegisterStatsObserver(&stats_observer); + + static constexpr const int64_t kRttLow = 10; + static constexpr const int64_t kRttHigh = 30; + // The following two average numbers dependend on average + weight + // calculations in call_stats.cc. + static constexpr const int64_t kAvgRtt1 = 13; + static constexpr const int64_t kAvgRtt2 = 15; + + EXPECT_CALL(stats_observer, OnRttUpdate(kRttLow, kRttLow)) + .Times(1) + .WillOnce(InvokeWithoutArgs([this] { + EXPECT_EQ(kRttLow, call_stats_.LastProcessedRtt()); + // Don't advance the clock to make sure that low and high rtt values + // are associated with the same time stamp. + AsyncSimulateRttUpdate(kRttHigh); + })); + + EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt1, kRttHigh)) + .Times(AnyNumber()) + .WillOnce(InvokeWithoutArgs([this] { + EXPECT_EQ(kAvgRtt1, call_stats_.LastProcessedRtt()); + fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms()); + AsyncSimulateRttUpdate(kRttLow); + AsyncSimulateRttUpdate(kRttHigh); + })) + .WillRepeatedly(Return()); + + EXPECT_CALL(stats_observer, OnRttUpdate(kAvgRtt2, kRttHigh)) + .Times(AnyNumber()) + .WillOnce(InvokeWithoutArgs([this] { + EXPECT_EQ(kAvgRtt2, call_stats_.LastProcessedRtt()); + loop_.Quit(); + })) + .WillRepeatedly(Return()); + + // Set a first values and verify that LastProcessedRtt initially returns the + // average rtt. + fake_clock_.AdvanceTimeMilliseconds(CallStats::kUpdateInterval.ms()); + AsyncSimulateRttUpdate(kRttLow); + loop_.Run(); + EXPECT_EQ(kAvgRtt2, call_stats_.LastProcessedRtt()); + + call_stats_.DeregisterStatsObserver(&stats_observer); +} + +TEST_F(CallStats2Test, ProducesHistogramMetrics) { + metrics::Reset(); + static constexpr const int64_t kRtt = 123; + MockStatsObserver stats_observer; + call_stats_.RegisterStatsObserver(&stats_observer); + EXPECT_CALL(stats_observer, OnRttUpdate(kRtt, kRtt)) + .Times(AnyNumber()) + .WillRepeatedly(InvokeWithoutArgs([this] { loop_.Quit(); })); + + AsyncSimulateRttUpdate(kRtt); + loop_.Run(); + fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * + CallStats::kUpdateInterval.ms()); + AsyncSimulateRttUpdate(kRtt); + loop_.Run(); + + call_stats_.DeregisterStatsObserver(&stats_observer); + + call_stats_.UpdateHistogramsForTest(); + + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.AverageRoundTripTimeInMilliseconds")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.AverageRoundTripTimeInMilliseconds", + kRtt)); +} + +} // namespace internal +} // namespace webrtc diff --git a/video/call_stats_unittest.cc b/video/call_stats_unittest.cc index c560ccbee6..e85c4f8c54 100644 --- a/video/call_stats_unittest.cc +++ b/video/call_stats_unittest.cc @@ -32,7 +32,7 @@ class MockStatsObserver : public CallStatsObserver { MockStatsObserver() {} virtual ~MockStatsObserver() {} - MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t)); + MOCK_METHOD(void, OnRttUpdate, (int64_t, int64_t), (override)); }; class CallStatsTest : public ::testing::Test { diff --git a/video/encoder_bitrate_adjuster.cc b/video/encoder_bitrate_adjuster.cc index 5af9128a86..45d88875e3 100644 --- a/video/encoder_bitrate_adjuster.cc +++ b/video/encoder_bitrate_adjuster.cc @@ -109,7 +109,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( LayerRateInfo& layer_info = layer_infos.back(); layer_info.target_rate = - DataRate::bps(rates.bitrate.GetSpatialLayerSum(si)); + DataRate::BitsPerSec(rates.bitrate.GetSpatialLayerSum(si)); // Adjustment is done per spatial layer only (not per temporal layer). if (frames_since_layout_change_ < kMinFramesSinceLayoutChange) { @@ -186,8 +186,8 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( // Available link headroom that can be used to fill wanted overshoot. DataRate available_headroom = DataRate::Zero(); if (utilize_bandwidth_headroom_) { - available_headroom = - rates.bandwidth_allocation - DataRate::bps(rates.bitrate.get_sum_bps()); + available_headroom = rates.bandwidth_allocation - + DataRate::BitsPerSec(rates.bitrate.get_sum_bps()); } // All wanted overshoots are satisfied in the same proportion based on @@ -214,7 +214,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( if (min_bitrates_bps_[si] > 0 && layer_info.target_rate > DataRate::Zero() && - DataRate::bps(min_bitrates_bps_[si]) < layer_info.target_rate) { + DataRate::BitsPerSec(min_bitrates_bps_[si]) < layer_info.target_rate) { // Make sure rate adjuster doesn't push target bitrate below minimum. utilization_factor = std::min(utilization_factor, layer_info.target_rate.bps() / @@ -236,7 +236,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( // Populate the adjusted allocation with determined utilization factor. if (active_tls_[si] == 1 && layer_info.target_rate > - DataRate::bps(rates.bitrate.GetBitrate(si, 0))) { + DataRate::BitsPerSec(rates.bitrate.GetBitrate(si, 0))) { // Bitrate allocation indicates temporal layer usage, but encoder // does not seem to support it. Pipe all bitrate into a single // overshoot detector. @@ -282,8 +282,15 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( (ti == 0 ? 0 : current_fps_allocation_[si][ti - 1])) / VideoEncoder::EncoderInfo::kMaxFramerateFraction; + if (fps_fraction <= 0.0) { + RTC_LOG(LS_WARNING) + << "Encoder config has temporal layer with non-zero bitrate " + "allocation but zero framerate allocation."; + continue; + } + overshoot_detectors_[si][ti]->SetTargetRate( - DataRate::bps(layer_bitrate_bps), + DataRate::BitsPerSec(layer_bitrate_bps), fps_fraction * rates.framerate_fps, now_ms); } } diff --git a/video/encoder_bitrate_adjuster_unittest.cc b/video/encoder_bitrate_adjuster_unittest.cc index b7cdfd35f2..d8fcf382b2 100644 --- a/video/encoder_bitrate_adjuster_unittest.cc +++ b/video/encoder_bitrate_adjuster_unittest.cc @@ -34,7 +34,7 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { static_assert(kSequenceLength % 2 == 0, "Sequence length must be even."); EncoderBitrateAdjusterTest() - : target_bitrate_(DataRate::bps(kDefaultBitrateBps)), + : target_bitrate_(DataRate::BitsPerSec(kDefaultBitrateBps)), target_framerate_fps_(kDefaultFrameRateFps), tl_pattern_idx_{}, sequence_idx_{} {} @@ -100,13 +100,10 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { RTC_DCHECK_EQ(media_utilization_factors.size(), network_utilization_factors.size()); - constexpr size_t kMaxFrameSize = 100000; - uint8_t buffer[kMaxFrameSize]; - const int64_t start_us = rtc::TimeMicros(); while (rtc::TimeMicros() < start_us + (duration_ms * rtc::kNumMicrosecsPerMillisec)) { - clock_.AdvanceTime(TimeDelta::seconds(1) / target_framerate_fps_); + clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); for (size_t si = 0; si < NumSpatialLayers(); ++si) { const std::vector& tl_pattern = kTlPatterns[NumTemporalLayers(si) - 1]; @@ -168,8 +165,8 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { ? media_frame_size - network_frame_size_diff_bytes : media_frame_size + network_frame_size_diff_bytes; - EncodedImage image(buffer, 0, kMaxFrameSize); - image.set_size(frame_size_bytes); + EncodedImage image; + image.SetEncodedData(EncodedImageBuffer::Create(frame_size_bytes)); image.SetSpatialIndex(si); adjuster_->OnEncodedFrame(image, ti); sequence_idx = ++sequence_idx % kSequenceLength; @@ -478,7 +475,8 @@ TEST_F(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) { current_adjusted_allocation_ = adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( current_input_allocation_, target_framerate_fps_, - DataRate::bps(current_input_allocation_.get_sum_bps() * 1.1))); + DataRate::BitsPerSec(current_input_allocation_.get_sum_bps() * + 1.1))); ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01); } } @@ -520,7 +518,7 @@ TEST_F(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) { current_adjusted_allocation_ = adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( current_input_allocation_, target_framerate_fps_, - DataRate::bps(current_input_allocation_.get_sum_bps() * 2))); + DataRate::BitsPerSec(current_input_allocation_.get_sum_bps() * 2))); ExpectNear(MultiplyAllocation(current_input_allocation_, 1 / 1.1), current_adjusted_allocation_, 0.015); } diff --git a/video/encoder_overshoot_detector_unittest.cc b/video/encoder_overshoot_detector_unittest.cc index 7170f49061..a3c44eb013 100644 --- a/video/encoder_overshoot_detector_unittest.cc +++ b/video/encoder_overshoot_detector_unittest.cc @@ -23,7 +23,7 @@ class EncoderOvershootDetectorTest : public ::testing::Test { static constexpr double kDefaultFrameRateFps = 15; EncoderOvershootDetectorTest() : detector_(kWindowSizeMs), - target_bitrate_(DataRate::bps(kDefaultBitrateBps)), + target_bitrate_(DataRate::BitsPerSec(kDefaultBitrateBps)), target_framerate_fps_(kDefaultFrameRateFps) {} protected: @@ -40,14 +40,14 @@ class EncoderOvershootDetectorTest : public ::testing::Test { if (rtc::TimeMillis() == 0) { // Encode a first frame which by definition has no overuse factor. detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); - clock_.AdvanceTime(TimeDelta::seconds(1) / target_framerate_fps_); + clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); } int64_t runtime_us = 0; while (runtime_us < test_duration_ms * 1000) { detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); runtime_us += rtc::kNumMicrosecsPerSec / target_framerate_fps_; - clock_.AdvanceTime(TimeDelta::seconds(1) / target_framerate_fps_); + clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); } // At constant utilization, both network and media utilization should be @@ -81,7 +81,7 @@ TEST_F(EncoderOvershootDetectorTest, NoUtilizationIfNoRate) { detector_.GetNetworkRateUtilizationFactor(rtc::TimeMillis()).has_value()); detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); - clock_.AdvanceTime(TimeDelta::ms(time_interval_ms)); + clock_.AdvanceTime(TimeDelta::Millis(time_interval_ms)); EXPECT_TRUE( detector_.GetNetworkRateUtilizationFactor(rtc::TimeMillis()).has_value()); } @@ -111,7 +111,7 @@ TEST_F(EncoderOvershootDetectorTest, ConstantOvershootVaryingRates) { RunConstantUtilizationTest(1.2, 1.2, 0.01, kWindowSizeMs); target_framerate_fps_ /= 2; RunConstantUtilizationTest(1.2, 1.2, 0.01, kWindowSizeMs / 2); - target_bitrate_ = DataRate::bps(target_bitrate_.bps() / 2); + target_bitrate_ = DataRate::BitsPerSec(target_bitrate_.bps() / 2); RunConstantUtilizationTest(1.2, 1.2, 0.01, kWindowSizeMs / 2); } @@ -147,7 +147,7 @@ TEST_F(EncoderOvershootDetectorTest, PartialOvershoot) { int i = 0; while (runtime_us < kWindowSizeMs * rtc::kNumMicrosecsPerMillisec) { runtime_us += rtc::kNumMicrosecsPerSec / target_framerate_fps_; - clock_.AdvanceTime(TimeDelta::seconds(1) / target_framerate_fps_); + clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); int frame_size_bytes = (i++ % 4 < 2) ? (ideal_frame_size_bytes * 120) / 100 : (ideal_frame_size_bytes * 80) / 100; detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); diff --git a/video/encoder_rtcp_feedback.cc b/video/encoder_rtcp_feedback.cc index a736d83b82..b81ff6120f 100644 --- a/video/encoder_rtcp_feedback.cc +++ b/video/encoder_rtcp_feedback.cc @@ -56,7 +56,7 @@ void EncoderRtcpFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) { RTC_DCHECK(HasSsrc(ssrc)); { int64_t now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (time_last_intra_request_ms_ + min_keyframe_send_interval_ms_ > now_ms) { return; } diff --git a/video/encoder_rtcp_feedback.h b/video/encoder_rtcp_feedback.h index b5dd0288f3..3bd1cb91f0 100644 --- a/video/encoder_rtcp_feedback.h +++ b/video/encoder_rtcp_feedback.h @@ -15,7 +15,7 @@ #include "api/video/video_stream_encoder_interface.h" #include "call/rtp_video_sender_interface.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -50,8 +50,8 @@ class EncoderRtcpFeedback : public RtcpIntraFrameObserver, const RtpVideoSenderInterface* rtp_video_sender_; VideoStreamEncoderInterface* const video_stream_encoder_; - rtc::CriticalSection crit_; - int64_t time_last_intra_request_ms_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + int64_t time_last_intra_request_ms_ RTC_GUARDED_BY(mutex_); const int min_keyframe_send_interval_ms_; }; diff --git a/video/end_to_end_tests/bandwidth_tests.cc b/video/end_to_end_tests/bandwidth_tests.cc index 16b35d68f8..721738393b 100644 --- a/video/end_to_end_tests/bandwidth_tests.cc +++ b/video/end_to_end_tests/bandwidth_tests.cc @@ -16,8 +16,9 @@ #include "api/video/video_bitrate_allocation.h" #include "call/fake_network_pipe.h" #include "call/simulated_network.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/to_queued_task.h" #include "system_wrappers/include/sleep.h" @@ -205,8 +206,9 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) { ~BweObserver() override { // Block until all already posted tasks run to avoid races when such task - // accesses |this|. - SendTask(RTC_FROM_HERE, task_queue_, [] {}); + // accesses |this|. Also make sure we free |rtp_rtcp_| on the correct + // thread/task queue. + SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; }); } std::unique_ptr CreateReceiveTransport( @@ -237,13 +239,13 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) { encoder_config->max_bitrate_bps = 2000000; ASSERT_EQ(1u, receive_configs->size()); - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.receiver_only = true; config.clock = clock_; config.outgoing_transport = receive_transport_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; config.local_media_ssrc = (*receive_configs)[0].rtp.local_ssrc; - rtp_rtcp_ = RtpRtcp::Create(config); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config); rtp_rtcp_->SetRemoteSSRC((*receive_configs)[0].rtp.remote_ssrc); rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize); } @@ -302,7 +304,7 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) { Clock* const clock_; uint32_t sender_ssrc_; int remb_bitrate_bps_; - std::unique_ptr rtp_rtcp_; + std::unique_ptr rtp_rtcp_; test::PacketTransport* receive_transport_; TestState state_; RateLimiter retransmission_rate_limiter_; @@ -317,7 +319,6 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) { // test, due to the packetization overhead and encoder pushback. webrtc::test::ScopedFieldTrials field_trials( std::string(field_trial::GetFieldTrialString()) + - "WebRTC-SubtractPacketizationOverhead/Disabled/" "WebRTC-VideoRateControl/bitrate_adjuster:false/"); class EncoderRateStatsTest : public test::EndToEndTest, public test::FakeEncoder { @@ -352,7 +353,7 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) { // Make sure not to trigger on any default zero bitrates. if (parameters.bitrate.get_sum_bps() == 0) return; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); bitrate_kbps_ = parameters.bitrate.get_sum_kbps(); observation_complete_.Set(); } @@ -374,7 +375,7 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) { for (int i = 0; i < kDefaultTimeoutMs; ++i) { VideoSendStream::Stats stats = send_stream_->GetStats(); { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if ((stats.target_media_bitrate_bps + 500) / 1000 == static_cast(bitrate_kbps_)) { return; @@ -398,11 +399,11 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) { private: TaskQueueBase* const task_queue_; - rtc::CriticalSection crit_; + Mutex mutex_; VideoSendStream* send_stream_; test::VideoEncoderProxyFactory encoder_factory_; std::unique_ptr bitrate_allocator_factory_; - uint32_t bitrate_kbps_ RTC_GUARDED_BY(crit_); + uint32_t bitrate_kbps_ RTC_GUARDED_BY(mutex_); } test(task_queue()); RunBaseTest(&test); diff --git a/video/end_to_end_tests/codec_tests.cc b/video/end_to_end_tests/codec_tests.cc index b73b289ec8..e4eabcf73d 100644 --- a/video/end_to_end_tests/codec_tests.cc +++ b/video/end_to_end_tests/codec_tests.cc @@ -34,18 +34,14 @@ enum : int { // The first valid value is 1. }; } // namespace -class CodecEndToEndTest : public test::CallTest, - public ::testing::WithParamInterface { +class CodecEndToEndTest : public test::CallTest { public: - CodecEndToEndTest() : field_trial_(GetParam()) { + CodecEndToEndTest() { RegisterRtpExtension( RtpExtension(RtpExtension::kColorSpaceUri, kColorSpaceExtensionId)); RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri, kVideoRotationExtensionId)); } - - private: - test::ScopedFieldTrials field_trial_; }; class CodecObserver : public test::EndToEndTest, @@ -88,7 +84,7 @@ class CodecObserver : public test::EndToEndTest, send_config->rtp.payload_type; (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat(send_config->rtp.payload_name); - (*receive_configs)[0].decoders[0].decoder_factory = decoder_factory_; + (*receive_configs)[0].decoder_factory = decoder_factory_; } void OnFrame(const VideoFrame& video_frame) override { @@ -121,13 +117,7 @@ class CodecObserver : public test::EndToEndTest, int frame_counter_; }; -INSTANTIATE_TEST_SUITE_P( - GenericDescriptor, - CodecEndToEndTest, - ::testing::Values("WebRTC-GenericDescriptor/Disabled/", - "WebRTC-GenericDescriptor/Enabled/")); - -TEST_P(CodecEndToEndTest, SendsAndReceivesVP8) { +TEST_F(CodecEndToEndTest, SendsAndReceivesVP8) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); test::FunctionVideoDecoderFactory decoder_factory( @@ -137,7 +127,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP8) { RunBaseTest(&test); } -TEST_P(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) { +TEST_F(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); test::FunctionVideoDecoderFactory decoder_factory( @@ -148,7 +138,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) { } #if defined(RTC_ENABLE_VP9) -TEST_P(CodecEndToEndTest, SendsAndReceivesVP9) { +TEST_F(CodecEndToEndTest, SendsAndReceivesVP9) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP9Encoder::Create(); }); test::FunctionVideoDecoderFactory decoder_factory( @@ -158,7 +148,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9) { RunBaseTest(&test); } -TEST_P(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) { +TEST_F(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP9Encoder::Create(); }); test::FunctionVideoDecoderFactory decoder_factory( @@ -168,7 +158,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) { RunBaseTest(&test); } -TEST_P(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) { +TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP9Encoder::Create(); }); test::FunctionVideoDecoderFactory decoder_factory( @@ -179,7 +169,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) { RunBaseTest(&test); } -TEST_P(CodecEndToEndTest, +TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpaceWithHdrMetadata) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP9Encoder::Create(); }); @@ -192,7 +182,7 @@ TEST_P(CodecEndToEndTest, } // Mutiplex tests are using VP9 as the underlying implementation. -TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplex) { +TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplex) { InternalEncoderFactory internal_encoder_factory; InternalDecoderFactory internal_decoder_factory; test::FunctionVideoEncoderFactory encoder_factory( @@ -211,7 +201,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplex) { RunBaseTest(&test); } -TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) { +TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) { InternalEncoderFactory internal_encoder_factory; InternalDecoderFactory internal_decoder_factory; test::FunctionVideoEncoderFactory encoder_factory( diff --git a/video/end_to_end_tests/extended_reports_tests.cc b/video/end_to_end_tests/extended_reports_tests.cc index f4938c943b..b5e162e413 100644 --- a/video/end_to_end_tests/extended_reports_tests.cc +++ b/video/end_to_end_tests/extended_reports_tests.cc @@ -31,8 +31,8 @@ #include "call/video_send_stream.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "test/call_test.h" @@ -62,11 +62,13 @@ class RtcpXrObserver : public test::EndToEndTest { RtcpXrObserver(bool enable_rrtr, bool expect_target_bitrate, bool enable_zero_target_bitrate, + bool enable_target_bitrate, VideoEncoderConfig::ContentType content_type) : EndToEndTest(test::CallTest::kDefaultTimeoutMs), enable_rrtr_(enable_rrtr), expect_target_bitrate_(expect_target_bitrate), enable_zero_target_bitrate_(enable_zero_target_bitrate), + enable_target_bitrate_(enable_target_bitrate), content_type_(content_type), sent_rtcp_sr_(0), sent_rtcp_rr_(0), @@ -83,7 +85,7 @@ class RtcpXrObserver : public test::EndToEndTest { private: // Receive stream should send RR packets (and RRTR packets if enabled). Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet, length)); @@ -100,7 +102,7 @@ class RtcpXrObserver : public test::EndToEndTest { } // Send stream should send SR packets (and DLRR packets if enabled). Action OnSendRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet, length)); @@ -175,6 +177,12 @@ class RtcpXrObserver : public test::EndToEndTest { VideoSendStream::Config* send_config, std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { + if (enable_target_bitrate_) { + send_config->encoder_settings.allocation_cb_type = + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation; + } + if (enable_zero_target_bitrate_) { // Configure VP8 to be able to use simulcast. send_config->rtp.payload_name = "VP8"; @@ -198,16 +206,17 @@ class RtcpXrObserver : public test::EndToEndTest { static const int kNumRtcpReportPacketsToObserve = 5; - rtc::CriticalSection crit_; + Mutex mutex_; const bool enable_rrtr_; const bool expect_target_bitrate_; const bool enable_zero_target_bitrate_; + const bool enable_target_bitrate_; const VideoEncoderConfig::ContentType content_type_; int sent_rtcp_sr_; - int sent_rtcp_rr_ RTC_GUARDED_BY(&crit_); - int sent_rtcp_rrtr_ RTC_GUARDED_BY(&crit_); - bool sent_rtcp_target_bitrate_ RTC_GUARDED_BY(&crit_); - bool sent_zero_rtcp_target_bitrate_ RTC_GUARDED_BY(&crit_); + int sent_rtcp_rr_ RTC_GUARDED_BY(&mutex_); + int sent_rtcp_rrtr_ RTC_GUARDED_BY(&mutex_); + bool sent_rtcp_target_bitrate_ RTC_GUARDED_BY(&mutex_); + bool sent_zero_rtcp_target_bitrate_ RTC_GUARDED_BY(&mutex_); int sent_rtcp_dlrr_; BuiltInNetworkBehaviorConfig forward_transport_config_; SimulatedNetwork* send_simulated_network_; @@ -217,6 +226,7 @@ TEST_F(ExtendedReportsEndToEndTest, TestExtendedReportsWithRrtrWithoutTargetBitrate) { RtcpXrObserver test(/*enable_rrtr=*/true, /*expect_target_bitrate=*/false, /*enable_zero_target_bitrate=*/false, + /*enable_target_bitrate=*/false, VideoEncoderConfig::ContentType::kRealtimeVideo); RunBaseTest(&test); } @@ -225,6 +235,7 @@ TEST_F(ExtendedReportsEndToEndTest, TestExtendedReportsWithoutRrtrWithoutTargetBitrate) { RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/false, /*enable_zero_target_bitrate=*/false, + /*enable_target_bitrate=*/false, VideoEncoderConfig::ContentType::kRealtimeVideo); RunBaseTest(&test); } @@ -233,6 +244,7 @@ TEST_F(ExtendedReportsEndToEndTest, TestExtendedReportsWithRrtrWithTargetBitrate) { RtcpXrObserver test(/*enable_rrtr=*/true, /*expect_target_bitrate=*/true, /*enable_zero_target_bitrate=*/false, + /*enable_target_bitrate=*/false, VideoEncoderConfig::ContentType::kScreen); RunBaseTest(&test); } @@ -241,15 +253,16 @@ TEST_F(ExtendedReportsEndToEndTest, TestExtendedReportsWithoutRrtrWithTargetBitrate) { RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true, /*enable_zero_target_bitrate=*/false, + /*enable_target_bitrate=*/false, VideoEncoderConfig::ContentType::kScreen); RunBaseTest(&test); } TEST_F(ExtendedReportsEndToEndTest, - TestExtendedReportsWithoutRrtrWithTargetBitrateFromFieldTrial) { - test::ScopedFieldTrials field_trials("WebRTC-Target-Bitrate-Rtcp/Enabled/"); + TestExtendedReportsWithoutRrtrWithTargetBitrateExplicitlySet) { RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true, /*enable_zero_target_bitrate=*/false, + /*enable_target_bitrate=*/true, VideoEncoderConfig::ContentType::kRealtimeVideo); RunBaseTest(&test); } @@ -258,6 +271,7 @@ TEST_F(ExtendedReportsEndToEndTest, TestExtendedReportsCanSignalZeroTargetBitrate) { RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true, /*enable_zero_target_bitrate=*/true, + /*enable_target_bitrate=*/false, VideoEncoderConfig::ContentType::kScreen); RunBaseTest(&test); } diff --git a/video/end_to_end_tests/fec_tests.cc b/video/end_to_end_tests/fec_tests.cc index c55c4dbee1..0d4ddac5a4 100644 --- a/video/end_to_end_tests/fec_tests.cc +++ b/video/end_to_end_tests/fec_tests.cc @@ -20,6 +20,7 @@ #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "rtc_base/synchronization/mutex.h" #include "test/call_test.h" #include "test/field_trial.h" #include "test/gmock.h" @@ -59,7 +60,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -98,7 +99,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { } void OnFrame(const VideoFrame& video_frame) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Rendering frame with timestamp of packet that was dropped -> FEC // protection worked. auto it = dropped_timestamps_.find(video_frame.timestamp()); @@ -119,7 +120,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { encoder_config->codec_type = kVideoCodecVP8; VideoReceiveStream::Decoder decoder = test::CreateMatchingDecoder(*send_config); - decoder.decoder_factory = &decoder_factory_; + (*receive_configs)[0].decoder_factory = &decoder_factory_; (*receive_configs)[0].decoders.clear(); (*receive_configs)[0].decoders.push_back(decoder); @@ -137,15 +138,15 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { << "Timed out waiting for dropped frames to be rendered."; } - rtc::CriticalSection crit_; + Mutex mutex_; std::unique_ptr encoder_; test::FunctionVideoEncoderFactory encoder_factory_; InternalDecoderFactory decoder_factory_; - std::set dropped_sequence_numbers_ RTC_GUARDED_BY(crit_); + std::set dropped_sequence_numbers_ RTC_GUARDED_BY(mutex_); // Several packets can have the same timestamp. - std::multiset dropped_timestamps_ RTC_GUARDED_BY(crit_); + std::multiset dropped_timestamps_ RTC_GUARDED_BY(mutex_); Random random_; - int num_packets_sent_ RTC_GUARDED_BY(crit_); + int num_packets_sent_ RTC_GUARDED_BY(mutex_); } test; RunBaseTest(&test); @@ -169,7 +170,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -247,7 +248,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, EXPECT_EQ(1U, report_blocks.size()); EXPECT_EQ(test::CallTest::kFlexfecSendSsrc, report_blocks[0].source_ssrc()); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); received_flexfec_rtcp_ = true; } } @@ -273,7 +274,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, void OnFrame(const VideoFrame& video_frame) override { EXPECT_EQ(kVideoRotation_90, video_frame.rotation()); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Rendering frame with timestamp of packet that was dropped -> FEC // protection worked. auto it = dropped_timestamps_.find(video_frame.timestamp()); @@ -321,13 +322,13 @@ class FlexfecRenderObserver : public test::EndToEndTest, << "Timed out waiting for dropped frames to be rendered."; } - rtc::CriticalSection crit_; - std::set dropped_sequence_numbers_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + std::set dropped_sequence_numbers_ RTC_GUARDED_BY(mutex_); // Several packets can have the same timestamp. - std::multiset dropped_timestamps_ RTC_GUARDED_BY(crit_); + std::multiset dropped_timestamps_ RTC_GUARDED_BY(mutex_); const bool enable_nack_; const bool expect_flexfec_rtcp_; - bool received_flexfec_rtcp_ RTC_GUARDED_BY(crit_); + bool received_flexfec_rtcp_ RTC_GUARDED_BY(mutex_); Random random_; int num_packets_sent_; }; @@ -360,7 +361,7 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) { private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock_(&crit_); + MutexLock lock_(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -424,7 +425,7 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) { } Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock_(&crit_); + MutexLock lock_(&mutex_); if (state_ == kVerifyUlpfecPacketNotInNackList) { test::RtcpPacketParser rtcp_parser; rtcp_parser.Parse(packet, length); @@ -486,7 +487,7 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) { send_config->rtp.payload_type; (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat(send_config->rtp.payload_name); - (*receive_configs)[0].decoders[0].decoder_factory = &decoder_factory_; + (*receive_configs)[0].decoder_factory = &decoder_factory_; } void PerformTest() override { @@ -503,8 +504,8 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) { kVerifyUlpfecPacketNotInNackList, } state_; - rtc::CriticalSection crit_; - uint16_t ulpfec_sequence_number_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + uint16_t ulpfec_sequence_number_ RTC_GUARDED_BY(&mutex_); bool has_last_sequence_number_; uint16_t last_sequence_number_; test::FunctionVideoEncoderFactory encoder_factory_; diff --git a/video/end_to_end_tests/frame_encryption_tests.cc b/video/end_to_end_tests/frame_encryption_tests.cc index 85ad7dd2cb..df54337be5 100644 --- a/video/end_to_end_tests/frame_encryption_tests.cc +++ b/video/end_to_end_tests/frame_encryption_tests.cc @@ -11,79 +11,81 @@ #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" #include "media/engine/internal_decoder_factory.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "test/call_test.h" -#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { namespace { + +using FrameEncryptionEndToEndTest = test::CallTest; + enum : int { // The first valid value is 1. kGenericDescriptorExtensionId = 1, }; -} // namespace -class FrameEncryptionEndToEndTest : public test::CallTest { +class DecryptedFrameObserver : public test::EndToEndTest, + public rtc::VideoSinkInterface { public: - FrameEncryptionEndToEndTest() { - RegisterRtpExtension(RtpExtension(RtpExtension::kGenericFrameDescriptorUri, - kGenericDescriptorExtensionId)); - } + DecryptedFrameObserver() + : EndToEndTest(test::CallTest::kDefaultTimeoutMs), + encoder_factory_([] { return VP8Encoder::Create(); }) {} private: - // GenericDescriptor is required for FrameEncryption to work. - test::ScopedFieldTrials field_trials_{"WebRTC-GenericDescriptor/Enabled/"}; -}; - -// Validates that payloads cannot be sent without a frame encryptor and frame -// decryptor attached. -TEST_F(FrameEncryptionEndToEndTest, RequireFrameEncryptionEnforced) { - class DecryptedFrameObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { - public: - DecryptedFrameObserver() - : EndToEndTest(kDefaultTimeoutMs), - encoder_factory_([]() { return VP8Encoder::Create(); }) {} - - private: - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - // Use VP8 instead of FAKE. - send_config->encoder_settings.encoder_factory = &encoder_factory_; - send_config->rtp.payload_name = "VP8"; - send_config->rtp.payload_type = kVideoSendPayloadType; - send_config->frame_encryptor = new FakeFrameEncryptor(); - send_config->crypto_options.sframe.require_frame_encryption = true; - encoder_config->codec_type = kVideoCodecVP8; - VideoReceiveStream::Decoder decoder = - test::CreateMatchingDecoder(*send_config); - decoder.decoder_factory = &decoder_factory_; - for (auto& recv_config : *receive_configs) { - recv_config.decoders.clear(); - recv_config.decoders.push_back(decoder); - recv_config.renderer = this; - recv_config.frame_decryptor = new FakeFrameDecryptor(); - recv_config.crypto_options.sframe.require_frame_encryption = true; - } + void ModifyVideoConfigs( + VideoSendStream::Config* send_config, + std::vector* receive_configs, + VideoEncoderConfig* encoder_config) override { + // Use VP8 instead of FAKE. + send_config->encoder_settings.encoder_factory = &encoder_factory_; + send_config->rtp.payload_name = "VP8"; + send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType; + send_config->frame_encryptor = new FakeFrameEncryptor(); + send_config->crypto_options.sframe.require_frame_encryption = true; + encoder_config->codec_type = kVideoCodecVP8; + VideoReceiveStream::Decoder decoder = + test::CreateMatchingDecoder(*send_config); + for (auto& recv_config : *receive_configs) { + recv_config.decoder_factory = &decoder_factory_; + recv_config.decoders.clear(); + recv_config.decoders.push_back(decoder); + recv_config.renderer = this; + recv_config.frame_decryptor = new FakeFrameDecryptor(); + recv_config.crypto_options.sframe.require_frame_encryption = true; } + } - // Validate that rotation is preserved. - void OnFrame(const VideoFrame& video_frame) override { - observation_complete_.Set(); - } + void OnFrame(const VideoFrame& video_frame) override { + observation_complete_.Set(); + } - void PerformTest() override { - EXPECT_TRUE(Wait()) - << "Timed out waiting for decrypted frames to be rendered."; - } + void PerformTest() override { + EXPECT_TRUE(Wait()) + << "Timed out waiting for decrypted frames to be rendered."; + } + + std::unique_ptr encoder_; + test::FunctionVideoEncoderFactory encoder_factory_; + InternalDecoderFactory decoder_factory_; +}; - std::unique_ptr encoder_; - test::FunctionVideoEncoderFactory encoder_factory_; - InternalDecoderFactory decoder_factory_; - } test; +// Validates that payloads cannot be sent without a frame encryptor and frame +// decryptor attached. +TEST_F(FrameEncryptionEndToEndTest, + WithGenericFrameDescriptorRequireFrameEncryptionEnforced) { + RegisterRtpExtension(RtpExtension(RtpExtension::kGenericFrameDescriptorUri00, + kGenericDescriptorExtensionId)); + DecryptedFrameObserver test; + RunBaseTest(&test); +} +TEST_F(FrameEncryptionEndToEndTest, + WithDependencyDescriptorRequireFrameEncryptionEnforced) { + RegisterRtpExtension(RtpExtension(RtpExtension::kDependencyDescriptorUri, + kGenericDescriptorExtensionId)); + DecryptedFrameObserver test; RunBaseTest(&test); } +} // namespace } // namespace webrtc diff --git a/video/end_to_end_tests/histogram_tests.cc b/video/end_to_end_tests/histogram_tests.cc index dd6de2543d..fa71c15e98 100644 --- a/video/end_to_end_tests/histogram_tests.cc +++ b/video/end_to_end_tests/histogram_tests.cc @@ -11,6 +11,7 @@ #include "absl/types/optional.h" #include "api/test/video/function_video_encoder_factory.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/metrics.h" #include "test/call_test.h" #include "test/gtest.h" @@ -59,7 +60,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, if (video_frame.ntp_time_ms() > 0 && Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >= video_frame.ntp_time_ms()) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++num_frames_received_; } } @@ -82,7 +83,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, bool MinNumberOfFramesReceived() const { const int kMinRequiredHistogramSamples = 200; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return num_frames_received_ > kMinRequiredHistogramSamples; } @@ -131,13 +132,13 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, EXPECT_TRUE(Wait()) << "Timed out waiting for min frames to be received."; } - rtc::CriticalSection crit_; + mutable Mutex mutex_; const bool use_rtx_; const bool use_fec_; const bool screenshare_; test::FunctionVideoEncoderFactory encoder_factory_; absl::optional start_runtime_ms_; - int num_frames_received_ RTC_GUARDED_BY(&crit_); + int num_frames_received_ RTC_GUARDED_BY(&mutex_); } test(use_rtx, use_fec, screenshare); metrics::Reset(); diff --git a/video/end_to_end_tests/multi_codec_receive_tests.cc b/video/end_to_end_tests/multi_codec_receive_tests.cc index 354ee44072..e529ae4201 100644 --- a/video/end_to_end_tests/multi_codec_receive_tests.cc +++ b/video/end_to_end_tests/multi_codec_receive_tests.cc @@ -19,6 +19,7 @@ #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" #include "test/gmock.h" @@ -65,7 +66,7 @@ class FrameObserver : public test::RtpRtcpObserver, FrameObserver() : test::RtpRtcpObserver(test::CallTest::kDefaultTimeoutMs) {} void Reset(uint8_t expected_payload_type) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); num_sent_frames_ = 0; num_rendered_frames_ = 0; expected_payload_type_ = expected_payload_type; @@ -74,7 +75,7 @@ class FrameObserver : public test::RtpRtcpObserver, private: // Sends kFramesToObserve. Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -103,7 +104,7 @@ class FrameObserver : public test::RtpRtcpObserver, // Verifies that all sent frames are decoded and rendered. void OnFrame(const VideoFrame& rendered_frame) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.timestamp())); // Remove old timestamps too, only the newest decoded frame is rendered. @@ -116,12 +117,12 @@ class FrameObserver : public test::RtpRtcpObserver, } } - rtc::CriticalSection crit_; + Mutex mutex_; absl::optional last_timestamp_; // Only accessed from pacer thread. - absl::optional expected_payload_type_ RTC_GUARDED_BY(crit_); - int num_sent_frames_ RTC_GUARDED_BY(crit_) = 0; - int num_rendered_frames_ RTC_GUARDED_BY(crit_) = 0; - std::vector sent_timestamps_ RTC_GUARDED_BY(crit_); + absl::optional expected_payload_type_ RTC_GUARDED_BY(mutex_); + int num_sent_frames_ RTC_GUARDED_BY(mutex_) = 0; + int num_rendered_frames_ RTC_GUARDED_BY(mutex_) = 0; + std::vector sent_timestamps_ RTC_GUARDED_BY(mutex_); }; } // namespace @@ -159,13 +160,13 @@ class MultiCodecReceiveTest : public test::CallTest { struct CodecConfig { std::string payload_name; - VideoEncoderFactory* encoder_factory; - VideoDecoderFactory* decoder_factory; size_t num_temporal_layers; }; - void ConfigureEncoder(const CodecConfig& config); - void ConfigureDecoders(const std::vector& configs); + void ConfigureEncoder(const CodecConfig& config, + VideoEncoderFactory* encoder_factory); + void ConfigureDecoders(const std::vector& configs, + VideoDecoderFactory* decoder_factory); void RunTestWithCodecs(const std::vector& configs); private: @@ -177,23 +178,25 @@ class MultiCodecReceiveTest : public test::CallTest { }; void MultiCodecReceiveTest::ConfigureDecoders( - const std::vector& configs) { + const std::vector& configs, + VideoDecoderFactory* decoder_factory) { video_receive_configs_[0].decoders.clear(); // Placing the payload names in a std::set retains the unique names only. + video_receive_configs_[0].decoder_factory = decoder_factory; std::set unique_payload_names; for (const auto& config : configs) if (unique_payload_names.insert(config.payload_name).second) { VideoReceiveStream::Decoder decoder = test::CreateMatchingDecoder( PayloadNameToPayloadType(config.payload_name), config.payload_name); - decoder.decoder_factory = config.decoder_factory; video_receive_configs_[0].decoders.push_back(decoder); } } -void MultiCodecReceiveTest::ConfigureEncoder(const CodecConfig& config) { - GetVideoSendConfig()->encoder_settings.encoder_factory = - config.encoder_factory; +void MultiCodecReceiveTest::ConfigureEncoder( + const CodecConfig& config, + VideoEncoderFactory* encoder_factory) { + GetVideoSendConfig()->encoder_settings.encoder_factory = encoder_factory; GetVideoSendConfig()->rtp.payload_name = config.payload_name; GetVideoSendConfig()->rtp.payload_type = PayloadNameToPayloadType(config.payload_name); @@ -202,39 +205,71 @@ void MultiCodecReceiveTest::ConfigureEncoder(const CodecConfig& config) { EXPECT_EQ(1u, GetVideoEncoderConfig()->simulcast_layers.size()); GetVideoEncoderConfig()->simulcast_layers[0].num_temporal_layers = config.num_temporal_layers; + GetVideoEncoderConfig()->video_format.name = config.payload_name; } void MultiCodecReceiveTest::RunTestWithCodecs( const std::vector& configs) { EXPECT_TRUE(!configs.empty()); + test::FunctionVideoEncoderFactory encoder_factory( + [](const SdpVideoFormat& format) -> std::unique_ptr { + if (format.name == "VP8") { + return VP8Encoder::Create(); + } + if (format.name == "VP9") { + return VP9Encoder::Create(); + } + if (format.name == "H264") { + return H264Encoder::Create(cricket::VideoCodec("H264")); + } + RTC_NOTREACHED() << format.name; + return nullptr; + }); + test::FunctionVideoDecoderFactory decoder_factory( + [](const SdpVideoFormat& format) -> std::unique_ptr { + if (format.name == "VP8") { + return VP8Decoder::Create(); + } + if (format.name == "VP9") { + return VP9Decoder::Create(); + } + if (format.name == "H264") { + return H264Decoder::Create(); + } + RTC_NOTREACHED() << format.name; + return nullptr; + }); // Create and start call. - SendTask(RTC_FROM_HERE, task_queue(), [this, &configs]() { - CreateSendConfig(1, 0, 0, send_transport_.get()); - ConfigureEncoder(configs[0]); - CreateMatchingReceiveConfigs(receive_transport_.get()); - video_receive_configs_[0].renderer = &observer_; - // Disable to avoid post-decode frame dropping in VideoRenderFrames. - video_receive_configs_[0].enable_prerenderer_smoothing = false; - ConfigureDecoders(configs); - CreateVideoStreams(); - CreateFrameGeneratorCapturer(kFps, kWidth, kHeight); - Start(); - }); + SendTask(RTC_FROM_HERE, task_queue(), + [this, &configs, &encoder_factory, &decoder_factory]() { + CreateSendConfig(1, 0, 0, send_transport_.get()); + ConfigureEncoder(configs[0], &encoder_factory); + CreateMatchingReceiveConfigs(receive_transport_.get()); + video_receive_configs_[0].renderer = &observer_; + // Disable to avoid post-decode frame dropping in + // VideoRenderFrames. + video_receive_configs_[0].enable_prerenderer_smoothing = false; + ConfigureDecoders(configs, &decoder_factory); + CreateVideoStreams(); + CreateFrameGeneratorCapturer(kFps, kWidth, kHeight); + Start(); + }); EXPECT_TRUE(observer_.Wait()) << "Timed out waiting for frames."; for (size_t i = 1; i < configs.size(); ++i) { // Recreate VideoSendStream with new config (codec, temporal layers). - SendTask(RTC_FROM_HERE, task_queue(), [this, i, &configs]() { - DestroyVideoSendStreams(); - observer_.Reset(PayloadNameToPayloadType(configs[i].payload_name)); - - ConfigureEncoder(configs[i]); - CreateVideoSendStreams(); - GetVideoSendStream()->Start(); - CreateFrameGeneratorCapturer(kFps, kWidth / 2, kHeight / 2); - ConnectVideoSourcesToStreams(); - }); + SendTask( + RTC_FROM_HERE, task_queue(), [this, i, &configs, &encoder_factory]() { + DestroyVideoSendStreams(); + observer_.Reset(PayloadNameToPayloadType(configs[i].payload_name)); + + ConfigureEncoder(configs[i], &encoder_factory); + CreateVideoSendStreams(); + GetVideoSendStream()->Start(); + CreateFrameGeneratorCapturer(kFps, kWidth / 2, kHeight / 2); + ConnectVideoSourcesToStreams(); + }); EXPECT_TRUE(observer_.Wait()) << "Timed out waiting for frames."; } @@ -245,98 +280,28 @@ void MultiCodecReceiveTest::RunTestWithCodecs( } TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9) { - test::FunctionVideoEncoderFactory vp8_encoder_factory( - []() { return VP8Encoder::Create(); }); - test::FunctionVideoEncoderFactory vp9_encoder_factory( - []() { return VP9Encoder::Create(); }); - test::FunctionVideoDecoderFactory vp8_decoder_factory( - []() { return VP8Decoder::Create(); }); - test::FunctionVideoDecoderFactory vp9_decoder_factory( - []() { return VP9Decoder::Create(); }); - RunTestWithCodecs({{"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 1}, - {"VP9", &vp9_encoder_factory, &vp9_decoder_factory, 1}, - {"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 1}}); + RunTestWithCodecs({{"VP8", 1}, {"VP9", 1}, {"VP8", 1}}); } TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9WithTl) { - test::FunctionVideoEncoderFactory vp8_encoder_factory( - []() { return VP8Encoder::Create(); }); - test::FunctionVideoEncoderFactory vp9_encoder_factory( - []() { return VP9Encoder::Create(); }); - test::FunctionVideoDecoderFactory vp8_decoder_factory( - []() { return VP8Decoder::Create(); }); - test::FunctionVideoDecoderFactory vp9_decoder_factory( - []() { return VP9Decoder::Create(); }); - RunTestWithCodecs({{"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 2}, - {"VP9", &vp9_encoder_factory, &vp9_decoder_factory, 2}, - {"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 2}}); + RunTestWithCodecs({{"VP8", 2}, {"VP9", 2}, {"VP8", 2}}); } #if defined(WEBRTC_USE_H264) TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8H264) { - test::FunctionVideoEncoderFactory vp8_encoder_factory( - []() { return VP8Encoder::Create(); }); - test::FunctionVideoEncoderFactory h264_encoder_factory( - []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - test::FunctionVideoDecoderFactory vp8_decoder_factory( - []() { return VP8Decoder::Create(); }); - test::FunctionVideoDecoderFactory h264_decoder_factory( - []() { return H264Decoder::Create(); }); - RunTestWithCodecs({{"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 1}, - {"H264", &h264_encoder_factory, &h264_decoder_factory, 1}, - {"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 1}}); + RunTestWithCodecs({{"VP8", 1}, {"H264", 1}, {"VP8", 1}}); } TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8H264WithTl) { - test::FunctionVideoEncoderFactory vp8_encoder_factory( - []() { return VP8Encoder::Create(); }); - test::FunctionVideoEncoderFactory h264_encoder_factory( - []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - test::FunctionVideoDecoderFactory vp8_decoder_factory( - []() { return VP8Decoder::Create(); }); - test::FunctionVideoDecoderFactory h264_decoder_factory( - []() { return H264Decoder::Create(); }); - RunTestWithCodecs({{"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 3}, - {"H264", &h264_encoder_factory, &h264_decoder_factory, 1}, - {"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 3}}); + RunTestWithCodecs({{"VP8", 3}, {"H264", 1}, {"VP8", 3}}); } TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9H264) { - test::FunctionVideoEncoderFactory vp8_encoder_factory( - []() { return VP8Encoder::Create(); }); - test::FunctionVideoEncoderFactory vp9_encoder_factory( - []() { return VP9Encoder::Create(); }); - test::FunctionVideoEncoderFactory h264_encoder_factory( - []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - test::FunctionVideoDecoderFactory vp8_decoder_factory( - []() { return VP8Decoder::Create(); }); - test::FunctionVideoDecoderFactory vp9_decoder_factory( - []() { return VP9Decoder::Create(); }); - test::FunctionVideoDecoderFactory h264_decoder_factory( - []() { return H264Decoder::Create(); }); - RunTestWithCodecs({{"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 1}, - {"VP9", &vp9_encoder_factory, &vp9_decoder_factory, 1}, - {"H264", &h264_encoder_factory, &h264_decoder_factory, 1}, - {"VP9", &vp9_encoder_factory, &vp9_decoder_factory, 1}}); + RunTestWithCodecs({{"VP8", 1}, {"VP9", 1}, {"H264", 1}, {"VP9", 1}}); } TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9H264WithTl) { - test::FunctionVideoEncoderFactory vp8_encoder_factory( - []() { return VP8Encoder::Create(); }); - test::FunctionVideoEncoderFactory vp9_encoder_factory( - []() { return VP9Encoder::Create(); }); - test::FunctionVideoEncoderFactory h264_encoder_factory( - []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - test::FunctionVideoDecoderFactory vp8_decoder_factory( - []() { return VP8Decoder::Create(); }); - test::FunctionVideoDecoderFactory vp9_decoder_factory( - []() { return VP9Decoder::Create(); }); - test::FunctionVideoDecoderFactory h264_decoder_factory( - []() { return H264Decoder::Create(); }); - RunTestWithCodecs({{"VP8", &vp8_encoder_factory, &vp8_decoder_factory, 3}, - {"VP9", &vp9_encoder_factory, &vp9_decoder_factory, 2}, - {"H264", &h264_encoder_factory, &h264_decoder_factory, 1}, - {"VP9", &vp9_encoder_factory, &vp9_decoder_factory, 3}}); + RunTestWithCodecs({{"VP8", 3}, {"VP9", 2}, {"H264", 1}, {"VP9", 3}}); } #endif // defined(WEBRTC_USE_H264) diff --git a/video/end_to_end_tests/multi_stream_tester.cc b/video/end_to_end_tests/multi_stream_tester.cc index c3d41c37fa..20e128c2ad 100644 --- a/video/end_to_end_tests/multi_stream_tester.cc +++ b/video/end_to_end_tests/multi_stream_tester.cc @@ -102,9 +102,9 @@ void MultiStreamTester::RunTest() { VideoReceiveStream::Config receive_config(receiver_transport.get()); receive_config.rtp.remote_ssrc = ssrc; receive_config.rtp.local_ssrc = test::CallTest::kReceiverLocalVideoSsrc; + receive_config.decoder_factory = &decoder_factory; VideoReceiveStream::Decoder decoder = test::CreateMatchingDecoder(send_config); - decoder.decoder_factory = &decoder_factory; receive_config.decoders.push_back(decoder); UpdateReceiveConfig(i, &receive_config); diff --git a/video/end_to_end_tests/network_state_tests.cc b/video/end_to_end_tests/network_state_tests.cc index a0977ac773..9abde3bb32 100644 --- a/video/end_to_end_tests/network_state_tests.cc +++ b/video/end_to_end_tests/network_state_tests.cc @@ -15,6 +15,7 @@ #include "call/fake_network_pipe.h" #include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_packet.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "system_wrappers/include/sleep.h" #include "test/call_test.h" @@ -60,19 +61,19 @@ class NetworkStateEndToEndTest : public test::CallTest { bool SendRtp(const uint8_t* packet, size_t length, const PacketOptions& options) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); need_rtp_ = false; return true; } bool SendRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); need_rtcp_ = false; return true; } bool need_rtp_; bool need_rtcp_; - rtc::CriticalSection crit_; + Mutex mutex_; }; void VerifyNewVideoSendStreamsRespectNetworkState( MediaType network_to_bring_up, @@ -177,7 +178,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { down_frames_(0) {} Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); if (rtp_packet.payload_size() == 0) @@ -188,7 +189,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { } Action OnSendRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); ++sender_rtcp_; packet_event_.Set(); return SEND_PACKET; @@ -200,7 +201,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { } Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); ++receiver_rtcp_; packet_event_.Set(); return SEND_PACKET; @@ -239,7 +240,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { // Sender-side network down. sender_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkDown); { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); // After network goes down we shouldn't be encoding more frames. sender_state_ = kNetworkDown; } @@ -259,7 +260,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { // Network back up again for both. { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); // It's OK to encode frames again, as we're about to bring up the // network. sender_state_ = kNetworkUp; @@ -277,7 +278,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { int32_t Encode(const VideoFrame& input_image, const std::vector* frame_types) override { { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); if (sender_state_ == kNetworkDown) { ++down_frames_; EXPECT_LE(down_frames_, 1) @@ -298,7 +299,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { int initial_sender_rtcp; int initial_receiver_rtcp; { - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); initial_sender_rtp = sender_rtp_; initial_sender_rtcp = sender_rtcp_; initial_receiver_rtcp = receiver_rtcp_; @@ -308,7 +309,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { while (!sender_done || !receiver_done) { packet_event_.Wait(kSilenceTimeoutMs); int64_t time_now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope lock(&test_crit_); + MutexLock lock(&test_mutex_); if (sender_down) { ASSERT_LE(sender_rtp_ - initial_sender_rtp - sender_padding_, kNumAcceptedDowntimeRtp) @@ -340,18 +341,18 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { } TaskQueueBase* const task_queue_; - rtc::CriticalSection test_crit_; + Mutex test_mutex_; rtc::Event encoded_frames_; rtc::Event packet_event_; Call* sender_call_; Call* receiver_call_; test::VideoEncoderProxyFactory encoder_factory_; - NetworkState sender_state_ RTC_GUARDED_BY(test_crit_); - int sender_rtp_ RTC_GUARDED_BY(test_crit_); - int sender_padding_ RTC_GUARDED_BY(test_crit_); - int sender_rtcp_ RTC_GUARDED_BY(test_crit_); - int receiver_rtcp_ RTC_GUARDED_BY(test_crit_); - int down_frames_ RTC_GUARDED_BY(test_crit_); + NetworkState sender_state_ RTC_GUARDED_BY(test_mutex_); + int sender_rtp_ RTC_GUARDED_BY(test_mutex_); + int sender_padding_ RTC_GUARDED_BY(test_mutex_); + int sender_rtcp_ RTC_GUARDED_BY(test_mutex_); + int receiver_rtcp_ RTC_GUARDED_BY(test_mutex_); + int down_frames_ RTC_GUARDED_BY(test_mutex_); } test(task_queue()); RunBaseTest(&test); diff --git a/video/end_to_end_tests/probing_tests.cc b/video/end_to_end_tests/probing_tests.cc deleted file mode 100644 index 34f2a7953e..0000000000 --- a/video/end_to_end_tests/probing_tests.cc +++ /dev/null @@ -1,326 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "api/task_queue/task_queue_base.h" -#include "api/test/simulated_network.h" -#include "call/fake_network_pipe.h" -#include "call/simulated_network.h" -#include "rtc_base/task_queue_for_test.h" -#include "test/call_test.h" -#include "test/field_trial.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { -enum : int { // The first valid value is 1. - kTransportSequenceNumberExtensionId = 1, -}; -} // namespace - -class ProbingEndToEndTest : public test::CallTest { - public: - ProbingEndToEndTest() { - RegisterRtpExtension(RtpExtension(RtpExtension::kTransportSequenceNumberUri, - kTransportSequenceNumberExtensionId)); - } -}; - -class ProbingTest : public test::EndToEndTest { - public: - explicit ProbingTest(int start_bitrate_bps) - : clock_(Clock::GetRealTimeClock()), - start_bitrate_bps_(start_bitrate_bps), - state_(0), - sender_call_(nullptr) {} - - void ModifySenderBitrateConfig(BitrateConstraints* bitrate_config) override { - bitrate_config->start_bitrate_bps = start_bitrate_bps_; - } - - void OnCallsCreated(Call* sender_call, Call* receiver_call) override { - sender_call_ = sender_call; - } - - protected: - Clock* const clock_; - const int start_bitrate_bps_; - int state_; - Call* sender_call_; -}; - -// Flaky under MemorySanitizer: bugs.webrtc.org/7419 -// Flaky on iOS bots: bugs.webrtc.org/7851 -#if defined(MEMORY_SANITIZER) -TEST_F(ProbingEndToEndTest, DISABLED_InitialProbing) { -#elif defined(TARGET_IPHONE_SIMULATOR) && TARGET_IPHONE_SIMULATOR -TEST_F(ProbingEndToEndTest, DISABLED_InitialProbing) { -#else -TEST_F(ProbingEndToEndTest, InitialProbing) { -#endif - - class InitialProbingTest : public ProbingTest { - public: - explicit InitialProbingTest(bool* success, TaskQueueBase* task_queue) - : ProbingTest(300000), success_(success), task_queue_(task_queue) { - *success_ = false; - } - - void PerformTest() override { - int64_t start_time_ms = clock_->TimeInMilliseconds(); - do { - if (clock_->TimeInMilliseconds() - start_time_ms > kTimeoutMs) - break; - - Call::Stats stats; - SendTask(RTC_FROM_HERE, task_queue_, - [this, &stats]() { stats = sender_call_->GetStats(); }); - // Initial probing is done with a x3 and x6 multiplier of the start - // bitrate, so a x4 multiplier is a high enough threshold. - if (stats.send_bandwidth_bps > 4 * 300000) { - *success_ = true; - break; - } - } while (!observation_complete_.Wait(20)); - } - - private: - const int kTimeoutMs = 1000; - bool* const success_; - TaskQueueBase* const task_queue_; - }; - - bool success = false; - const int kMaxAttempts = 3; - for (int i = 0; i < kMaxAttempts; ++i) { - InitialProbingTest test(&success, task_queue()); - RunBaseTest(&test); - if (success) - return; - } - EXPECT_TRUE(success) << "Failed to perform mid initial probing (" - << kMaxAttempts << " attempts)."; -} - -// Fails on Linux MSan: bugs.webrtc.org/7428 -#if defined(MEMORY_SANITIZER) -TEST_F(ProbingEndToEndTest, DISABLED_TriggerMidCallProbing) { -// Fails on iOS bots: bugs.webrtc.org/7851 -#elif defined(TARGET_IPHONE_SIMULATOR) && TARGET_IPHONE_SIMULATOR -TEST_F(ProbingEndToEndTest, DISABLED_TriggerMidCallProbing) { -#else -TEST_F(ProbingEndToEndTest, TriggerMidCallProbing) { -#endif - - class TriggerMidCallProbingTest : public ProbingTest { - public: - TriggerMidCallProbingTest(TaskQueueBase* task_queue, bool* success) - : ProbingTest(300000), success_(success), task_queue_(task_queue) {} - - void PerformTest() override { - *success_ = false; - int64_t start_time_ms = clock_->TimeInMilliseconds(); - do { - if (clock_->TimeInMilliseconds() - start_time_ms > kTimeoutMs) - break; - - Call::Stats stats; - SendTask(RTC_FROM_HERE, task_queue_, - [this, &stats]() { stats = sender_call_->GetStats(); }); - - switch (state_) { - case 0: - if (stats.send_bandwidth_bps > 5 * 300000) { - BitrateConstraints bitrate_config; - bitrate_config.max_bitrate_bps = 100000; - SendTask(RTC_FROM_HERE, task_queue_, [this, &bitrate_config]() { - sender_call_->GetTransportControllerSend() - ->SetSdpBitrateParameters(bitrate_config); - }); - ++state_; - } - break; - case 1: - if (stats.send_bandwidth_bps < 110000) { - BitrateConstraints bitrate_config; - bitrate_config.max_bitrate_bps = 2500000; - SendTask(RTC_FROM_HERE, task_queue_, [this, &bitrate_config]() { - sender_call_->GetTransportControllerSend() - ->SetSdpBitrateParameters(bitrate_config); - }); - ++state_; - } - break; - case 2: - // During high cpu load the pacer will not be able to pace packets - // at the correct speed, but if we go from 110 to 1250 kbps - // in 5 seconds then it is due to probing. - if (stats.send_bandwidth_bps > 1250000) { - *success_ = true; - observation_complete_.Set(); - } - break; - } - } while (!observation_complete_.Wait(20)); - } - - private: - const int kTimeoutMs = 5000; - bool* const success_; - TaskQueueBase* const task_queue_; - }; - - bool success = false; - const int kMaxAttempts = 3; - for (int i = 0; i < kMaxAttempts; ++i) { - TriggerMidCallProbingTest test(task_queue(), &success); - RunBaseTest(&test); - if (success) - return; - } - EXPECT_TRUE(success) << "Failed to perform mid call probing (" << kMaxAttempts - << " attempts)."; -} - -#if defined(MEMORY_SANITIZER) -TEST_F(ProbingEndToEndTest, DISABLED_ProbeOnVideoEncoderReconfiguration) { -#elif defined(TARGET_IPHONE_SIMULATOR) && TARGET_IPHONE_SIMULATOR -TEST_F(ProbingEndToEndTest, DISABLED_ProbeOnVideoEncoderReconfiguration) { -#else -TEST_F(ProbingEndToEndTest, ProbeOnVideoEncoderReconfiguration) { -#endif - - class ReconfigureTest : public ProbingTest { - public: - ReconfigureTest(TaskQueueBase* task_queue, bool* success) - : ProbingTest(50000), task_queue_(task_queue), success_(success) {} - - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - encoder_config_ = encoder_config; - } - - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { - send_stream_ = send_stream; - } - - std::unique_ptr CreateSendTransport( - TaskQueueBase* task_queue, - Call* sender_call) override { - auto network = - std::make_unique(BuiltInNetworkBehaviorConfig()); - send_simulated_network_ = network.get(); - return std::make_unique( - task_queue, sender_call, this, test::PacketTransport::kSender, - CallTest::payload_type_map_, - std::make_unique(Clock::GetRealTimeClock(), - std::move(network))); - } - - void PerformTest() override { - *success_ = false; - int64_t start_time_ms = clock_->TimeInMilliseconds(); - int64_t max_allocation_change_time_ms = -1; - do { - if (clock_->TimeInMilliseconds() - start_time_ms > kTimeoutMs) - break; - - Call::Stats stats; - SendTask(RTC_FROM_HERE, task_queue_, - [this, &stats]() { stats = sender_call_->GetStats(); }); - - switch (state_) { - case 0: - // Wait until initial probing has been completed (6 times start - // bitrate). - if (stats.send_bandwidth_bps >= 250000 && - stats.send_bandwidth_bps <= 350000) { - BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 200; - send_simulated_network_->SetConfig(config); - - // In order to speed up the test we can interrupt exponential - // probing by toggling the network availability. The alternative - // is to wait for it to time out (1000 ms). - sender_call_->GetTransportControllerSend()->OnNetworkAvailability( - false); - sender_call_->GetTransportControllerSend()->OnNetworkAvailability( - true); - - ++state_; - } - break; - case 1: - if (stats.send_bandwidth_bps <= 200000) { - // Initial probing finished. Increase link capacity and wait - // until BWE ramped up enough to be in ALR. This takes a few - // seconds. - BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 5000; - send_simulated_network_->SetConfig(config); - ++state_; - } - break; - case 2: - if (stats.send_bandwidth_bps > 240000) { - // BWE ramped up enough to be in ALR. Setting higher max_bitrate - // should trigger an allocation probe and fast ramp-up. - encoder_config_->max_bitrate_bps = 2000000; - encoder_config_->simulcast_layers[0].max_bitrate_bps = 1200000; - SendTask(RTC_FROM_HERE, task_queue_, [this]() { - send_stream_->ReconfigureVideoEncoder(encoder_config_->Copy()); - }); - max_allocation_change_time_ms = clock_->TimeInMilliseconds(); - ++state_; - } - break; - case 3: - if (stats.send_bandwidth_bps >= 1000000) { - EXPECT_LT( - clock_->TimeInMilliseconds() - max_allocation_change_time_ms, - kRampUpMaxDurationMs); - *success_ = true; - observation_complete_.Set(); - } - break; - } - } while (!observation_complete_.Wait(20)); - } - - private: - const int kTimeoutMs = 10000; - const int kRampUpMaxDurationMs = 500; - - TaskQueueBase* const task_queue_; - bool* const success_; - SimulatedNetwork* send_simulated_network_; - VideoSendStream* send_stream_; - VideoEncoderConfig* encoder_config_; - }; - - bool success = false; - const int kMaxAttempts = 3; - for (int i = 0; i < kMaxAttempts; ++i) { - ReconfigureTest test(task_queue(), &success); - RunBaseTest(&test); - if (success) { - return; - } - } - EXPECT_TRUE(success) << "Failed to perform mid call probing (" << kMaxAttempts - << " attempts)."; -} - -} // namespace webrtc diff --git a/video/end_to_end_tests/retransmission_tests.cc b/video/end_to_end_tests/retransmission_tests.cc index 407aa5f2dc..19eb38d388 100644 --- a/video/end_to_end_tests/retransmission_tests.cc +++ b/video/end_to_end_tests/retransmission_tests.cc @@ -18,8 +18,9 @@ #include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "rtc_base/event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" -#include "system_wrappers/include/sleep.h" #include "test/call_test.h" #include "test/field_trial.h" #include "test/gtest.h" @@ -58,7 +59,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) { private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -95,7 +96,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) { } Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet, length)); nacks_left_ -= parser.nack()->num_packets(); @@ -116,12 +117,12 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) { "rendered."; } - rtc::CriticalSection crit_; + Mutex mutex_; std::set dropped_packets_; std::set retransmitted_packets_; uint64_t sent_rtp_packets_; int packets_left_to_drop_; - int nacks_left_ RTC_GUARDED_BY(&crit_); + int nacks_left_ RTC_GUARDED_BY(&mutex_); } test; RunBaseTest(&test); @@ -203,7 +204,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesNackAndRetransmitsAudio) { TEST_F(RetransmissionEndToEndTest, StopSendingKeyframeRequestsForInactiveStream) { - class KeyframeRequestObserver : public test::EndToEndTest { + class KeyframeRequestObserver : public test::EndToEndTest, public QueuedTask { public: explicit KeyframeRequestObserver(TaskQueueBase* task_queue) : clock_(Clock::GetRealTimeClock()), task_queue_(task_queue) {} @@ -216,28 +217,59 @@ TEST_F(RetransmissionEndToEndTest, receive_stream_ = receive_streams[0]; } + Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { + test::RtcpPacketParser parser; + EXPECT_TRUE(parser.Parse(packet, length)); + if (parser.pli()->num_packets() > 0) + task_queue_->PostTask(std::unique_ptr(this)); + return SEND_PACKET; + } + + bool PollStats() { + if (receive_stream_->GetStats().frames_decoded > 0) { + frame_decoded_ = true; + } else if (clock_->TimeInMilliseconds() - start_time_ < 5000) { + task_queue_->PostDelayedTask(std::unique_ptr(this), 100); + return false; + } + return true; + } + void PerformTest() override { - bool frame_decoded = false; - int64_t start_time = clock_->TimeInMilliseconds(); - while (clock_->TimeInMilliseconds() - start_time <= 5000) { - if (receive_stream_->GetStats().frames_decoded > 0) { - frame_decoded = true; - break; + start_time_ = clock_->TimeInMilliseconds(); + task_queue_->PostTask(std::unique_ptr(this)); + test_done_.Wait(rtc::Event::kForever); + } + + bool Run() override { + if (!frame_decoded_) { + if (PollStats()) { + send_stream_->Stop(); + if (!frame_decoded_) { + test_done_.Set(); + } else { + // Now we wait for the PLI packet. Once we receive it, a task + // will be posted (see OnReceiveRtcp) and we'll check the stats + // once more before signaling that we're done. + } } - SleepMs(100); + } else { + EXPECT_EQ( + 1U, + receive_stream_->GetStats().rtcp_packet_type_counts.pli_packets); + test_done_.Set(); } - ASSERT_TRUE(frame_decoded); - SendTask(RTC_FROM_HERE, task_queue_, [this]() { send_stream_->Stop(); }); - SleepMs(10000); - ASSERT_EQ( - 1U, receive_stream_->GetStats().rtcp_packet_type_counts.pli_packets); + return false; } private: - Clock* clock_; + Clock* const clock_; VideoSendStream* send_stream_; VideoReceiveStream* receive_stream_; TaskQueueBase* const task_queue_; + rtc::Event test_done_; + bool frame_decoded_ = false; + int64_t start_time_ = 0; } test(task_queue()); RunBaseTest(&test); @@ -259,7 +291,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -277,7 +309,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { } Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet, length)); if (!nack_enabled_) @@ -288,7 +320,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { } void OnFrame(const VideoFrame& video_frame) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (received_pli_ && video_frame.timestamp() > highest_dropped_timestamp_) { observation_complete_.Set(); @@ -312,12 +344,12 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { "rendered afterwards."; } - rtc::CriticalSection crit_; + Mutex mutex_; int rtp_history_ms_; bool nack_enabled_; - uint32_t highest_dropped_timestamp_ RTC_GUARDED_BY(&crit_); - int frames_to_drop_ RTC_GUARDED_BY(&crit_); - bool received_pli_ RTC_GUARDED_BY(&crit_); + uint32_t highest_dropped_timestamp_ RTC_GUARDED_BY(&mutex_); + int frames_to_drop_ RTC_GUARDED_BY(&mutex_); + bool received_pli_ RTC_GUARDED_BY(&mutex_); } test(rtp_history_ms); RunBaseTest(&test); @@ -351,7 +383,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -396,7 +428,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, void OnFrame(const VideoFrame& frame) override { EXPECT_EQ(kVideoRotation_90, frame.rotation()); { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (frame.timestamp() == retransmitted_timestamp_) observation_complete_.Set(); rendered_timestamps_.push_back(frame.timestamp()); @@ -471,7 +503,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, return kFakeVideoSendPayloadType; } - rtc::CriticalSection crit_; + Mutex mutex_; rtc::VideoSinkInterface* orig_renderer_ = nullptr; const int payload_type_; const uint32_t retransmission_ssrc_; @@ -479,8 +511,8 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, test::FunctionVideoEncoderFactory encoder_factory_; const std::string payload_name_; int marker_bits_observed_; - uint32_t retransmitted_timestamp_ RTC_GUARDED_BY(&crit_); - std::vector rendered_timestamps_ RTC_GUARDED_BY(&crit_); + uint32_t retransmitted_timestamp_ RTC_GUARDED_BY(&mutex_); + std::vector rendered_timestamps_ RTC_GUARDED_BY(&mutex_); } test(enable_rtx, enable_red); RunBaseTest(&test); diff --git a/video/end_to_end_tests/rtp_rtcp_tests.cc b/video/end_to_end_tests/rtp_rtcp_tests.cc index 71783febfe..76018027d6 100644 --- a/video/end_to_end_tests/rtp_rtcp_tests.cc +++ b/video/end_to_end_tests/rtp_rtcp_tests.cc @@ -16,6 +16,7 @@ #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" #include "test/gtest.h" @@ -46,7 +47,7 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) { private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (++sent_rtp_ % 3 == 0) return DROP_PACKET; @@ -54,7 +55,7 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) { } Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++sent_rtcp_; test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet, length)); @@ -105,11 +106,11 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) { } RtcpMode rtcp_mode_; - rtc::CriticalSection crit_; + Mutex mutex_; // Must be protected since RTCP can be sent by both the process thread // and the pacer thread. - int sent_rtp_ RTC_GUARDED_BY(&crit_); - int sent_rtcp_ RTC_GUARDED_BY(&crit_); + int sent_rtp_ RTC_GUARDED_BY(&mutex_); + int sent_rtcp_ RTC_GUARDED_BY(&mutex_); } test(rtcp_mode); RunBaseTest(&test); @@ -176,7 +177,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( } void ResetExpectedSsrcs(size_t num_expected_ssrcs) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ssrc_observed_.clear(); ssrcs_to_observe_ = num_expected_ssrcs; } @@ -185,7 +186,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( void ValidateTimestampGap(uint32_t ssrc, uint32_t timestamp, bool only_padding) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) { + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) { static const int32_t kMaxTimestampGap = kDefaultTimeoutMs * 90; auto timestamp_it = last_observed_timestamp_.find(ssrc); if (timestamp_it == last_observed_timestamp_.end()) { @@ -240,7 +241,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( } if (!ssrc_is_rtx_[ssrc]) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ValidateTimestampGap(ssrc, timestamp, only_padding); // Wait for media packets on all ssrcs. @@ -261,7 +262,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( uint32_t ssrc = rtcp_parser.sender_report()->sender_ssrc(); uint32_t rtcp_timestamp = rtcp_parser.sender_report()->rtp_timestamp(); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ValidateTimestampGap(ssrc, rtcp_timestamp, false); } return SEND_PACKET; @@ -272,9 +273,9 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( std::map last_observed_timestamp_; std::map ssrc_is_rtx_; - rtc::CriticalSection crit_; - size_t ssrcs_to_observe_ RTC_GUARDED_BY(crit_); - std::map ssrc_observed_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + size_t ssrcs_to_observe_ RTC_GUARDED_BY(mutex_); + std::map ssrc_observed_ RTC_GUARDED_BY(mutex_); } observer(use_rtx); std::unique_ptr send_transport; @@ -414,13 +415,13 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) { num_flexfec_packets_sent_(0) {} void ResetPacketCount() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); num_flexfec_packets_sent_ = 0; } private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -468,10 +469,10 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) { } absl::optional last_observed_sequence_number_ - RTC_GUARDED_BY(crit_); - absl::optional last_observed_timestamp_ RTC_GUARDED_BY(crit_); - size_t num_flexfec_packets_sent_ RTC_GUARDED_BY(crit_); - rtc::CriticalSection crit_; + RTC_GUARDED_BY(mutex_); + absl::optional last_observed_timestamp_ RTC_GUARDED_BY(mutex_); + size_t num_flexfec_packets_sent_ RTC_GUARDED_BY(mutex_); + Mutex mutex_; } observer; static constexpr int kFrameMaxWidth = 320; diff --git a/video/end_to_end_tests/ssrc_tests.cc b/video/end_to_end_tests/ssrc_tests.cc index 8efad01f5a..cedae3934d 100644 --- a/video/end_to_end_tests/ssrc_tests.cc +++ b/video/end_to_end_tests/ssrc_tests.cc @@ -63,13 +63,12 @@ TEST_F(SsrcEndToEndTest, UnknownRtpPacketGivesUnknownSsrcReturnCode) { if (RtpHeaderParser::IsRtcp(packet.cdata(), packet.size())) { return receiver_->DeliverPacket(media_type, std::move(packet), packet_time_us); - } else { - DeliveryStatus delivery_status = receiver_->DeliverPacket( - media_type, std::move(packet), packet_time_us); - EXPECT_EQ(DELIVERY_UNKNOWN_SSRC, delivery_status); - delivered_packet_.Set(); - return delivery_status; } + DeliveryStatus delivery_status = receiver_->DeliverPacket( + media_type, std::move(packet), packet_time_us); + EXPECT_EQ(DELIVERY_UNKNOWN_SSRC, delivery_status); + delivered_packet_.Set(); + return delivery_status; } PacketReceiver* receiver_; @@ -172,38 +171,17 @@ void SsrcEndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs, size_t GetNumVideoStreams() const override { return num_ssrcs_; } - // This test use other VideoStream settings than the the default settings - // implemented in DefaultVideoStreamFactory. Therefore this test implement - // its own VideoEncoderConfig::VideoStreamFactoryInterface which is created - // in ModifyVideoConfigs. - class VideoStreamFactory - : public VideoEncoderConfig::VideoStreamFactoryInterface { - public: - VideoStreamFactory() {} - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { - std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); - // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. - for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { - streams[i].min_bitrate_bps = 10000; - streams[i].target_bitrate_bps = 15000; - streams[i].max_bitrate_bps = 20000; - } - return streams; - } - }; - void ModifyVideoConfigs( VideoSendStream::Config* send_config, std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { - encoder_config->video_stream_factory = - new rtc::RefCountedObject(); + // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. + encoder_config->max_bitrate_bps = 50000; + for (auto& layer : encoder_config->simulcast_layers) { + layer.min_bitrate_bps = 10000; + layer.target_bitrate_bps = 15000; + layer.max_bitrate_bps = 20000; + } video_encoder_config_all_streams_ = encoder_config->Copy(); if (send_single_ssrc_first_) encoder_config->number_of_streams = 1; @@ -292,39 +270,17 @@ TEST_F(SsrcEndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) { size_t GetNumVideoStreams() const override { return kNumSimulcastStreams; } - // This test use other VideoStream settings than the the default settings - // implemented in DefaultVideoStreamFactory. Therefore this test implement - // its own VideoEncoderConfig::VideoStreamFactoryInterface which is created - // in ModifyVideoConfigs. - class VideoStreamFactory - : public VideoEncoderConfig::VideoStreamFactoryInterface { - public: - VideoStreamFactory() {} - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { - std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); - // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. - for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { - streams[i].min_bitrate_bps = 10000; - streams[i].target_bitrate_bps = 15000; - streams[i].max_bitrate_bps = 20000; - } - return streams; - } - }; - void ModifyVideoConfigs( VideoSendStream::Config* send_config, std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. - encoder_config->video_stream_factory = - new rtc::RefCountedObject(); + encoder_config->max_bitrate_bps = 50000; + for (auto& layer : encoder_config->simulcast_layers) { + layer.min_bitrate_bps = 10000; + layer.target_bitrate_bps = 15000; + layer.max_bitrate_bps = 20000; + } send_config->rtp.rtx.payload_type = kSendRtxPayloadType; for (size_t i = 0; i < kNumSimulcastStreams; ++i) diff --git a/video/end_to_end_tests/stats_tests.cc b/video/end_to_end_tests/stats_tests.cc index a6b1c9e142..ae0532b9a3 100644 --- a/video/end_to_end_tests/stats_tests.cc +++ b/video/end_to_end_tests/stats_tests.cc @@ -11,6 +11,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/test/simulated_network.h" #include "api/test/video/function_video_encoder_factory.h" @@ -19,6 +20,7 @@ #include "modules/rtp_rtcp/source/rtp_utility.h" #include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "system_wrappers/include/metrics.h" #include "system_wrappers/include/sleep.h" @@ -167,44 +169,42 @@ TEST_F(StatsEndToEndTest, GetStats) { stats.encoder_implementation_name == test::FakeEncoder::kImplementationName; - for (std::map::const_iterator it = - stats.substreams.begin(); - it != stats.substreams.end(); ++it) { - if (expected_send_ssrcs_.find(it->first) == expected_send_ssrcs_.end()) + for (const auto& kv : stats.substreams) { + if (expected_send_ssrcs_.find(kv.first) == expected_send_ssrcs_.end()) continue; // Probably RTX. - send_stats_filled_[CompoundKey("CapturedFrameRate", it->first)] |= + send_stats_filled_[CompoundKey("CapturedFrameRate", kv.first)] |= stats.input_frame_rate != 0; - const VideoSendStream::StreamStats& stream_stats = it->second; + const VideoSendStream::StreamStats& stream_stats = kv.second; - send_stats_filled_[CompoundKey("StatisticsUpdated", it->first)] |= + send_stats_filled_[CompoundKey("StatisticsUpdated", kv.first)] |= stream_stats.rtcp_stats.packets_lost != 0 || stream_stats.rtcp_stats.extended_highest_sequence_number != 0 || stream_stats.rtcp_stats.fraction_lost != 0; - send_stats_filled_[CompoundKey("DataCountersUpdated", it->first)] |= + send_stats_filled_[CompoundKey("DataCountersUpdated", kv.first)] |= stream_stats.rtp_stats.fec.packets != 0 || stream_stats.rtp_stats.transmitted.padding_bytes != 0 || stream_stats.rtp_stats.retransmitted.packets != 0 || stream_stats.rtp_stats.transmitted.packets != 0; send_stats_filled_[CompoundKey("BitrateStatisticsObserver.Total", - it->first)] |= + kv.first)] |= stream_stats.total_bitrate_bps != 0; send_stats_filled_[CompoundKey("BitrateStatisticsObserver.Retransmit", - it->first)] |= + kv.first)] |= stream_stats.retransmit_bitrate_bps != 0; - send_stats_filled_[CompoundKey("FrameCountObserver", it->first)] |= + send_stats_filled_[CompoundKey("FrameCountObserver", kv.first)] |= stream_stats.frame_counts.delta_frames != 0 || stream_stats.frame_counts.key_frames != 0; - send_stats_filled_[CompoundKey("OutgoingRate", it->first)] |= + send_stats_filled_[CompoundKey("OutgoingRate", kv.first)] |= stats.encode_frame_rate != 0; - send_stats_filled_[CompoundKey("Delay", it->first)] |= + send_stats_filled_[CompoundKey("Delay", kv.first)] |= stream_stats.avg_delay_ms != 0 || stream_stats.max_delay_ms != 0; // TODO(pbos): Use CompoundKey when the test makes sure that all SSRCs @@ -246,45 +246,25 @@ TEST_F(StatsEndToEndTest, GetStats) { Clock::GetRealTimeClock(), std::make_unique(network_config))); } + void ModifySenderBitrateConfig( BitrateConstraints* bitrate_config) override { bitrate_config->start_bitrate_bps = kStartBitrateBps; } - // This test use other VideoStream settings than the the default settings - // implemented in DefaultVideoStreamFactory. Therefore this test implement - // its own VideoEncoderConfig::VideoStreamFactoryInterface which is created - // in ModifyVideoConfigs. - class VideoStreamFactory - : public VideoEncoderConfig::VideoStreamFactoryInterface { - public: // NOLINT(whitespace/blank_line) - VideoStreamFactory() {} - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { - std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); - // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. - for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { - streams[i].min_bitrate_bps = 10000; - streams[i].target_bitrate_bps = 15000; - streams[i].max_bitrate_bps = 20000; - } - return streams; - } - }; - void ModifyVideoConfigs( VideoSendStream::Config* send_config, std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { - encoder_config->video_stream_factory = - new rtc::RefCountedObject(); - expected_cname_ = send_config->rtp.c_name = "SomeCName"; + // Set low simulcast bitrates to not have to wait for bandwidth ramp-up. + encoder_config->max_bitrate_bps = 50000; + for (auto& layer : encoder_config->simulcast_layers) { + layer.min_bitrate_bps = 10000; + layer.target_bitrate_bps = 15000; + layer.max_bitrate_bps = 20000; + } + send_config->rtp.c_name = "SomeCName"; send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs; send_config->rtp.rtx.payload_type = kSendRtxPayloadType; @@ -318,44 +298,42 @@ TEST_F(StatsEndToEndTest, GetStats) { const std::vector& receive_streams) override { send_stream_ = send_stream; receive_streams_ = receive_streams; + task_queue_ = TaskQueueBase::Current(); } void PerformTest() override { Clock* clock = Clock::GetRealTimeClock(); - int64_t now = clock->TimeInMilliseconds(); - int64_t stop_time = now + test::CallTest::kLongTimeoutMs; + int64_t now_ms = clock->TimeInMilliseconds(); + int64_t stop_time_ms = now_ms + test::CallTest::kLongTimeoutMs; bool receive_ok = false; bool send_ok = false; - while (now < stop_time) { - if (!receive_ok) - receive_ok = CheckReceiveStats(); + while (now_ms < stop_time_ms) { + if (!receive_ok && task_queue_) { + SendTask(RTC_FROM_HERE, task_queue_, + [&]() { receive_ok = CheckReceiveStats(); }); + } if (!send_ok) send_ok = CheckSendStats(); if (receive_ok && send_ok) return; - int64_t time_until_timout_ = stop_time - now; - if (time_until_timout_ > 0) - check_stats_event_.Wait(time_until_timout_); - now = clock->TimeInMilliseconds(); + int64_t time_until_timeout_ms = stop_time_ms - now_ms; + if (time_until_timeout_ms > 0) + check_stats_event_.Wait(time_until_timeout_ms); + now_ms = clock->TimeInMilliseconds(); } ADD_FAILURE() << "Timed out waiting for filled stats."; - for (std::map::const_iterator it = - receive_stats_filled_.begin(); - it != receive_stats_filled_.end(); ++it) { - if (!it->second) { - ADD_FAILURE() << "Missing receive stats: " << it->first; + for (const auto& kv : receive_stats_filled_) { + if (!kv.second) { + ADD_FAILURE() << "Missing receive stats: " << kv.first; } } - - for (std::map::const_iterator it = - send_stats_filled_.begin(); - it != send_stats_filled_.end(); ++it) { - if (!it->second) { - ADD_FAILURE() << "Missing send stats: " << it->first; + for (const auto& kv : send_stats_filled_) { + if (!kv.second) { + ADD_FAILURE() << "Missing send stats: " << kv.first; } } } @@ -369,10 +347,10 @@ TEST_F(StatsEndToEndTest, GetStats) { std::vector expected_receive_ssrcs_; std::set expected_send_ssrcs_; - std::string expected_cname_; rtc::Event check_stats_event_; ReceiveStreamRenderer receive_stream_renderer_; + TaskQueueBase* task_queue_ = nullptr; } test; RunBaseTest(&test); @@ -393,9 +371,9 @@ TEST_F(StatsEndToEndTest, TimingFramesAreReported) { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId)); - for (size_t i = 0; i < receive_configs->size(); ++i) { - (*receive_configs)[i].rtp.extensions.clear(); - (*receive_configs)[i].rtp.extensions.push_back( + for (auto& receive_config : *receive_configs) { + receive_config.rtp.extensions.clear(); + receive_config.rtp.extensions.push_back( RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId)); } } @@ -404,22 +382,28 @@ TEST_F(StatsEndToEndTest, TimingFramesAreReported) { VideoSendStream* send_stream, const std::vector& receive_streams) override { receive_streams_ = receive_streams; + task_queue_ = TaskQueueBase::Current(); } void PerformTest() override { // No frames reported initially. - for (size_t i = 0; i < receive_streams_.size(); ++i) { - EXPECT_FALSE(receive_streams_[i]->GetStats().timing_frame_info); - } + SendTask(RTC_FROM_HERE, task_queue_, [&]() { + for (const auto& receive_stream : receive_streams_) { + EXPECT_FALSE(receive_stream->GetStats().timing_frame_info); + } + }); // Wait for at least one timing frame to be sent with 100ms grace period. SleepMs(kDefaultTimingFramesDelayMs + 100); // Check that timing frames are reported for each stream. - for (size_t i = 0; i < receive_streams_.size(); ++i) { - EXPECT_TRUE(receive_streams_[i]->GetStats().timing_frame_info); - } + SendTask(RTC_FROM_HERE, task_queue_, [&]() { + for (const auto& receive_stream : receive_streams_) { + EXPECT_TRUE(receive_stream->GetStats().timing_frame_info); + } + }); } std::vector receive_streams_; + TaskQueueBase* task_queue_ = nullptr; } test; RunBaseTest(&test); @@ -427,7 +411,8 @@ TEST_F(StatsEndToEndTest, TimingFramesAreReported) { TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) { static const size_t kNumRtpPacketsToSend = 5; - class ReceivedRtpStatsObserver : public test::EndToEndTest { + class ReceivedRtpStatsObserver : public test::EndToEndTest, + public QueuedTask { public: ReceivedRtpStatsObserver() : EndToEndTest(kDefaultTimeoutMs), @@ -439,14 +424,14 @@ TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) { VideoSendStream* send_stream, const std::vector& receive_streams) override { receive_stream_ = receive_streams[0]; + task_queue_ = TaskQueueBase::Current(); + EXPECT_TRUE(task_queue_ != nullptr); } Action OnSendRtp(const uint8_t* packet, size_t length) override { if (sent_rtp_ >= kNumRtpPacketsToSend) { - VideoReceiveStream::Stats stats = receive_stream_->GetStats(); - if (kNumRtpPacketsToSend == stats.rtp_stats.packet_counter.packets) { - observation_complete_.Set(); - } + // Need to check the stats on the correct thread. + task_queue_->PostTask(std::unique_ptr(this)); return DROP_PACKET; } ++sent_rtp_; @@ -458,8 +443,17 @@ TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) { << "Timed out while verifying number of received RTP packets."; } + bool Run() override { + VideoReceiveStream::Stats stats = receive_stream_->GetStats(); + if (kNumRtpPacketsToSend == stats.rtp_stats.packet_counter.packets) { + observation_complete_.Set(); + } + return false; + } + VideoReceiveStream* receive_stream_; uint32_t sent_rtp_; + TaskQueueBase* task_queue_ = nullptr; } test; RunBaseTest(&test); @@ -486,7 +480,7 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { if (video_frame.ntp_time_ms() > 0 && Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >= video_frame.ntp_time_ms()) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++num_frames_received_; } } @@ -500,7 +494,7 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { bool MinNumberOfFramesReceived() const { // Have some room for frames with wrong content type during switch. const int kMinRequiredHistogramSamples = 200 + 50; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return num_frames_received_ > kMinRequiredHistogramSamples; } @@ -509,13 +503,13 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets."; // Reset frame counter so next PerformTest() call will do something. { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); num_frames_received_ = 0; } } - rtc::CriticalSection crit_; - int num_frames_received_ RTC_GUARDED_BY(&crit_); + mutable Mutex mutex_; + int num_frames_received_ RTC_GUARDED_BY(&mutex_); } test; metrics::Reset(); @@ -554,8 +548,7 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { // Start with realtime video. GetVideoEncoderConfig()->content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; - // Second encoder config for the second part of the test uses - // screenshare + // Encoder config for the second part of the test uses screenshare. encoder_config_with_screenshare = GetVideoEncoderConfig()->Copy(); encoder_config_with_screenshare.content_type = VideoEncoderConfig::ContentType::kScreen; @@ -606,19 +599,18 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { TEST_F(StatsEndToEndTest, VerifyNackStats) { static const int kPacketNumberToDrop = 200; - class NackObserver : public test::EndToEndTest { + class NackObserver : public test::EndToEndTest, public QueuedTask { public: NackObserver() : EndToEndTest(kLongTimeoutMs), sent_rtp_packets_(0), dropped_rtp_packet_(0), dropped_rtp_packet_requested_(false), - send_stream_(nullptr), - start_runtime_ms_(-1) {} + send_stream_(nullptr) {} private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (++sent_rtp_packets_ == kPacketNumberToDrop) { std::unique_ptr parser( RtpHeaderParser::CreateForTest()); @@ -627,12 +619,12 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { dropped_rtp_packet_ = header.sequenceNumber; return DROP_PACKET; } - VerifyStats(); + task_queue_->PostTask(std::unique_ptr(this)); return SEND_PACKET; } Action OnReceiveRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); test::RtcpPacketParser rtcp_parser; rtcp_parser.Parse(packet, length); const std::vector& nacks = rtcp_parser.nack()->packet_ids(); @@ -642,21 +634,19 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { return SEND_PACKET; } - void VerifyStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_) { + void VerifyStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_) { if (!dropped_rtp_packet_requested_) return; int send_stream_nack_packets = 0; int receive_stream_nack_packets = 0; VideoSendStream::Stats stats = send_stream_->GetStats(); - for (std::map::const_iterator it = - stats.substreams.begin(); - it != stats.substreams.end(); ++it) { - const VideoSendStream::StreamStats& stream_stats = it->second; + for (const auto& kv : stats.substreams) { + const VideoSendStream::StreamStats& stream_stats = kv.second; send_stream_nack_packets += stream_stats.rtcp_packet_type_counts.nack_packets; } - for (size_t i = 0; i < receive_streams_.size(); ++i) { - VideoReceiveStream::Stats stats = receive_streams_[i]->GetStats(); + for (const auto& receive_stream : receive_streams_) { + VideoReceiveStream::Stats stats = receive_stream->GetStats(); receive_stream_nack_packets += stats.rtcp_packet_type_counts.nack_packets; } @@ -668,12 +658,11 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { } bool MinMetricRunTimePassed() { - int64_t now = Clock::GetRealTimeClock()->TimeInMilliseconds(); - if (start_runtime_ms_ == -1) { - start_runtime_ms_ = now; - return false; - } - int64_t elapsed_sec = (now - start_runtime_ms_) / 1000; + int64_t now_ms = Clock::GetRealTimeClock()->TimeInMilliseconds(); + if (!start_runtime_ms_) + start_runtime_ms_ = now_ms; + + int64_t elapsed_sec = (now_ms - *start_runtime_ms_) / 1000; return elapsed_sec > metrics::kMinRunTimeInSeconds; } @@ -691,6 +680,14 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { const std::vector& receive_streams) override { send_stream_ = send_stream; receive_streams_ = receive_streams; + task_queue_ = TaskQueueBase::Current(); + EXPECT_TRUE(task_queue_ != nullptr); + } + + bool Run() override { + MutexLock lock(&mutex_); + VerifyStats(); + return false; } void PerformTest() override { @@ -698,13 +695,14 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { } test::FakeVideoRenderer fake_renderer_; - rtc::CriticalSection crit_; + Mutex mutex_; uint64_t sent_rtp_packets_; - uint16_t dropped_rtp_packet_ RTC_GUARDED_BY(&crit_); - bool dropped_rtp_packet_requested_ RTC_GUARDED_BY(&crit_); + uint16_t dropped_rtp_packet_ RTC_GUARDED_BY(&mutex_); + bool dropped_rtp_packet_requested_ RTC_GUARDED_BY(&mutex_); std::vector receive_streams_; VideoSendStream* send_stream_; - int64_t start_runtime_ms_; + absl::optional start_runtime_ms_; + TaskQueueBase* task_queue_ = nullptr; } test; metrics::Reset(); diff --git a/video/end_to_end_tests/transport_feedback_tests.cc b/video/end_to_end_tests/transport_feedback_tests.cc index 4291bc4812..9cfa7d14f4 100644 --- a/video/end_to_end_tests/transport_feedback_tests.cc +++ b/video/end_to_end_tests/transport_feedback_tests.cc @@ -18,6 +18,7 @@ #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" +#include "rtc_base/synchronization/mutex.h" #include "test/call_test.h" #include "test/field_trial.h" #include "test/gtest.h" @@ -65,7 +66,7 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) { size_t length, const PacketOptions& options) override { { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (IsDone()) return false; @@ -141,14 +142,14 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) { { // Can't be sure until this point that rtx_to_media_ssrcs_ etc have // been initialized and are OK to read. - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); started_ = true; } return done_.Wait(kDefaultTimeoutMs); } private: - rtc::CriticalSection lock_; + Mutex lock_; rtc::Event done_; RtpHeaderExtensionMap extensions_; SequenceNumberUnwrapper unwrapper_; @@ -366,7 +367,7 @@ TEST_F(TransportFeedbackEndToEndTest, RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); const bool only_padding = rtp_packet.payload_size() == 0; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Padding is expected in congested state to probe for connectivity when // packets has been dropped. if (only_padding) { @@ -386,7 +387,7 @@ TEST_F(TransportFeedbackEndToEndTest, } Action OnReceiveRtcp(const uint8_t* data, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // To fill up the congestion window we drop feedback on packets after 20 // packets have been sent. This means that any packets that has not yet // received feedback after that will be considered as oustanding data and @@ -425,10 +426,10 @@ TEST_F(TransportFeedbackEndToEndTest, private: const size_t num_video_streams_; const size_t num_audio_streams_; - rtc::CriticalSection crit_; - int media_sent_ RTC_GUARDED_BY(crit_); - int media_sent_before_ RTC_GUARDED_BY(crit_); - int padding_sent_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + int media_sent_ RTC_GUARDED_BY(mutex_); + int media_sent_before_ RTC_GUARDED_BY(mutex_); + int padding_sent_ RTC_GUARDED_BY(mutex_); } test(1, 0); RunBaseTest(&test); } diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc index e5f55575ec..0e604cd765 100644 --- a/video/frame_encode_metadata_writer.cc +++ b/video/frame_encode_metadata_writer.cc @@ -11,7 +11,6 @@ #include "video/frame_encode_metadata_writer.h" #include -#include #include #include "common_video/h264/sps_vui_rewriter.h" @@ -60,7 +59,7 @@ FrameEncodeMetadataWriter::~FrameEncodeMetadataWriter() {} void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec, bool internal_source) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); codec_settings_ = codec; internal_source_ = internal_source; } @@ -68,7 +67,7 @@ void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec, void FrameEncodeMetadataWriter::OnSetRates( const VideoBitrateAllocation& bitrate_allocation, uint32_t framerate_fps) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); framerate_fps_ = framerate_fps; const size_t num_spatial_layers = NumSpatialLayers(); if (timing_frames_info_.size() < num_spatial_layers) { @@ -81,7 +80,7 @@ void FrameEncodeMetadataWriter::OnSetRates( } void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (internal_source_) { return; } @@ -128,7 +127,7 @@ void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) { void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx, EncodedImage* encoded_image) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); absl::optional outlier_frame_size; absl::optional encode_start_ms; uint8_t timing_flags = VideoSendTiming::kNotTriggered; @@ -202,40 +201,28 @@ void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx, } } -std::unique_ptr -FrameEncodeMetadataWriter::UpdateBitstream( +void FrameEncodeMetadataWriter::UpdateBitstream( const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation, EncodedImage* encoded_image) { if (!codec_specific_info || - codec_specific_info->codecType != kVideoCodecH264 || !fragmentation || + codec_specific_info->codecType != kVideoCodecH264 || encoded_image->_frameType != VideoFrameType::kVideoFrameKey) { - return nullptr; + return; } - rtc::Buffer modified_buffer; - std::unique_ptr modified_fragmentation = - std::make_unique(); - modified_fragmentation->CopyFrom(*fragmentation); - // Make sure that the data is not copied if owned by EncodedImage. const EncodedImage& buffer = *encoded_image; - SpsVuiRewriter::ParseOutgoingBitstreamAndRewriteSps( - buffer, fragmentation->fragmentationVectorSize, - fragmentation->fragmentationOffset, fragmentation->fragmentationLength, - encoded_image->ColorSpace(), &modified_buffer, - modified_fragmentation->fragmentationOffset, - modified_fragmentation->fragmentationLength); + rtc::Buffer modified_buffer = + SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( + buffer, encoded_image->ColorSpace()); encoded_image->SetEncodedData( new rtc::RefCountedObject( std::move(modified_buffer))); - - return modified_fragmentation; } void FrameEncodeMetadataWriter::Reset() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); for (auto& info : timing_frames_info_) { info.frames.clear(); } diff --git a/video/frame_encode_metadata_writer.h b/video/frame_encode_metadata_writer.h index 4ee2d7eec7..88471459c0 100644 --- a/video/frame_encode_metadata_writer.h +++ b/video/frame_encode_metadata_writer.h @@ -12,7 +12,6 @@ #define VIDEO_FRAME_ENCODE_METADATA_WRITER_H_ #include -#include #include #include "absl/types/optional.h" @@ -20,7 +19,7 @@ #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -37,10 +36,8 @@ class FrameEncodeMetadataWriter { void FillTimingInfo(size_t simulcast_svc_idx, EncodedImage* encoded_image); - std::unique_ptr UpdateBitstream( - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation, - EncodedImage* encoded_image); + void UpdateBitstream(const CodecSpecificInfo* codec_specific_info, + EncodedImage* encoded_image); void Reset(); @@ -69,7 +66,7 @@ class FrameEncodeMetadataWriter { std::list frames; }; - rtc::CriticalSection lock_; + Mutex lock_; EncodedImageCallback* const frame_drop_callback_; VideoCodec codec_settings_ RTC_GUARDED_BY(&lock_); bool internal_source_ RTC_GUARDED_BY(&lock_); diff --git a/video/frame_encode_metadata_writer_unittest.cc b/video/frame_encode_metadata_writer_unittest.cc index 2f7459943f..da54c3307b 100644 --- a/video/frame_encode_metadata_writer_unittest.cc +++ b/video/frame_encode_metadata_writer_unittest.cc @@ -40,8 +40,7 @@ class FakeEncodedImageCallback : public EncodedImageCallback { public: FakeEncodedImageCallback() : num_frames_dropped_(0) {} Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { return Result(Result::OK); } void OnDroppedFrame(DropReason reason) override { ++num_frames_dropped_; } @@ -462,83 +461,55 @@ TEST(FrameEncodeMetadataWriterTest, CopiesPacketInfos) { TEST(FrameEncodeMetadataWriterTest, DoesNotRewriteBitstreamWithoutCodecInfo) { uint8_t buffer[] = {1, 2, 3}; - EncodedImage image(buffer, sizeof(buffer), sizeof(buffer)); - const RTPFragmentationHeader fragmentation; + auto image_buffer = EncodedImageBuffer::Create(buffer, sizeof(buffer)); + EncodedImage image; + image.SetEncodedData(image_buffer); FakeEncodedImageCallback sink; FrameEncodeMetadataWriter encode_metadata_writer(&sink); - EXPECT_EQ( - encode_metadata_writer.UpdateBitstream(nullptr, &fragmentation, &image), - nullptr); - EXPECT_EQ(image.data(), buffer); + encode_metadata_writer.UpdateBitstream(nullptr, &image); + EXPECT_EQ(image.GetEncodedData(), image_buffer); EXPECT_EQ(image.size(), sizeof(buffer)); } TEST(FrameEncodeMetadataWriterTest, DoesNotRewriteVp8Bitstream) { uint8_t buffer[] = {1, 2, 3}; - EncodedImage image(buffer, sizeof(buffer), sizeof(buffer)); + auto image_buffer = EncodedImageBuffer::Create(buffer, sizeof(buffer)); + EncodedImage image; + image.SetEncodedData(image_buffer); CodecSpecificInfo codec_specific_info; codec_specific_info.codecType = kVideoCodecVP8; - const RTPFragmentationHeader fragmentation; - - FakeEncodedImageCallback sink; - FrameEncodeMetadataWriter encode_metadata_writer(&sink); - EXPECT_EQ(encode_metadata_writer.UpdateBitstream(&codec_specific_info, - &fragmentation, &image), - nullptr); - EXPECT_EQ(image.data(), buffer); - EXPECT_EQ(image.size(), sizeof(buffer)); -} - -TEST(FrameEncodeMetadataWriterTest, - DoesNotRewriteH264BitstreamWithoutFragmentation) { - uint8_t buffer[] = {1, 2, 3}; - EncodedImage image(buffer, sizeof(buffer), sizeof(buffer)); - CodecSpecificInfo codec_specific_info; - codec_specific_info.codecType = kVideoCodecH264; FakeEncodedImageCallback sink; FrameEncodeMetadataWriter encode_metadata_writer(&sink); - EXPECT_EQ(encode_metadata_writer.UpdateBitstream(&codec_specific_info, - nullptr, &image), - nullptr); - EXPECT_EQ(image.data(), buffer); + encode_metadata_writer.UpdateBitstream(&codec_specific_info, &image); + EXPECT_EQ(image.GetEncodedData(), image_buffer); EXPECT_EQ(image.size(), sizeof(buffer)); } TEST(FrameEncodeMetadataWriterTest, RewritesH264BitstreamWithNonOptimalSps) { - uint8_t original_sps[] = {0, 0, 0, 1, H264::NaluType::kSps, - 0x00, 0x00, 0x03, 0x03, 0xF4, - 0x05, 0x03, 0xC7, 0xC0}; + const uint8_t kOriginalSps[] = {0, 0, 0, 1, H264::NaluType::kSps, + 0x00, 0x00, 0x03, 0x03, 0xF4, + 0x05, 0x03, 0xC7, 0xC0}; const uint8_t kRewrittenSps[] = {0, 0, 0, 1, H264::NaluType::kSps, 0x00, 0x00, 0x03, 0x03, 0xF4, 0x05, 0x03, 0xC7, 0xE0, 0x1B, 0x41, 0x10, 0x8D, 0x00}; - EncodedImage image(original_sps, sizeof(original_sps), sizeof(original_sps)); + EncodedImage image; + image.SetEncodedData( + EncodedImageBuffer::Create(kOriginalSps, sizeof(kOriginalSps))); image._frameType = VideoFrameType::kVideoFrameKey; CodecSpecificInfo codec_specific_info; codec_specific_info.codecType = kVideoCodecH264; - RTPFragmentationHeader fragmentation; - fragmentation.VerifyAndAllocateFragmentationHeader(1); - fragmentation.fragmentationOffset[0] = 4; - fragmentation.fragmentationLength[0] = sizeof(original_sps) - 4; - FakeEncodedImageCallback sink; FrameEncodeMetadataWriter encode_metadata_writer(&sink); - std::unique_ptr modified_fragmentation = - encode_metadata_writer.UpdateBitstream(&codec_specific_info, - &fragmentation, &image); + encode_metadata_writer.UpdateBitstream(&codec_specific_info, &image); - ASSERT_NE(modified_fragmentation, nullptr); EXPECT_THAT(std::vector(image.data(), image.data() + image.size()), testing::ElementsAreArray(kRewrittenSps)); - ASSERT_THAT(modified_fragmentation->fragmentationVectorSize, 1U); - EXPECT_EQ(modified_fragmentation->fragmentationOffset[0], 4U); - EXPECT_EQ(modified_fragmentation->fragmentationLength[0], - sizeof(kRewrittenSps) - 4); } } // namespace test diff --git a/video/full_stack_tests.cc b/video/full_stack_tests.cc index 823137b973..ece756b2dc 100644 --- a/video/full_stack_tests.cc +++ b/video/full_stack_tests.cc @@ -48,8 +48,6 @@ namespace webrtc { namespace { static const int kFullStackTestDurationSecs = 45; -const char kVp8TrustedRateControllerFieldTrial[] = - "WebRTC-LibvpxVp8TrustedRateController/Enabled/"; struct ParamsWithLogging : public VideoQualityTest::Params { public: @@ -90,28 +88,8 @@ std::string ClipNameToClipPath(const char* clip_name) { // logs // bool // }; -class GenericDescriptorTest : public ::testing::TestWithParam { - public: - GenericDescriptorTest() - : field_trial_(AppendFieldTrials(GetParam())), - generic_descriptor_enabled_( - field_trial::IsEnabled("WebRTC-GenericDescriptor")) {} - - std::string GetTestName(std::string base) { - if (generic_descriptor_enabled_) - base += "_generic_descriptor"; - return base; - } - - bool GenericDescriptorEnabled() const { return generic_descriptor_enabled_; } - - private: - test::ScopedFieldTrials field_trial_; - bool generic_descriptor_enabled_; -}; - #if defined(RTC_ENABLE_VP9) -TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) { +TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -125,7 +103,8 @@ TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) { fixture->RunWithAnalyzer(foreman_cif); } -TEST_P(GenericDescriptorTest, ForemanCifPlr5Vp9) { +TEST(GenericDescriptorTest, + Foreman_Cif_Delay_50_0_Plr_5_VP9_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -134,15 +113,15 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5Vp9) { 30000, 500000, 2000000, false, "VP9", 1, 0, 0, false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_VP9"), 0.0, - 0.0, kFullStackTestDurationSecs}; + foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor", + 0.0, 0.0, kFullStackTestDurationSecs}; foreman_cif.config->loss_percent = 5; foreman_cif.config->queue_delay_ms = 50; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, GeneratorWithoutPacketLossVp9Profile2) { +TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { // Profile 2 might not be available on some platforms until // https://bugs.chromium.org/p/webm/issues/detail?id=1544 is solved. bool profile_2_is_supported = false; @@ -168,7 +147,7 @@ TEST(FullStackTest, GeneratorWithoutPacketLossVp9Profile2) { fixture->RunWithAnalyzer(generator); } -TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) { +TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_Multiplex) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -182,7 +161,7 @@ TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { +TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_Multiplex) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging generator; @@ -199,11 +178,11 @@ TEST(FullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { #if defined(WEBRTC_LINUX) // Crashes on the linux trusty perf bot: bugs.webrtc.org/9129. -#define MAYBE_ParisQcifWithoutPacketLoss DISABLED_ParisQcifWithoutPacketLoss +#define MAYBE_Net_Delay_0_0_Plr_0 DISABLED_Net_Delay_0_0_Plr_0 #else -#define MAYBE_ParisQcifWithoutPacketLoss ParisQcifWithoutPacketLoss +#define MAYBE_Net_Delay_0_0_Plr_0 Net_Delay_0_0_Plr_0 #endif -TEST(FullStackTest, MAYBE_ParisQcifWithoutPacketLoss) { +TEST(FullStackTest, MAYBE_Net_Delay_0_0_Plr_0) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging paris_qcif; paris_qcif.call.send_side_bwe = true; @@ -217,7 +196,8 @@ TEST(FullStackTest, MAYBE_ParisQcifWithoutPacketLoss) { fixture->RunWithAnalyzer(paris_qcif); } -TEST_P(GenericDescriptorTest, ForemanCifWithoutPacketLoss) { +TEST(GenericDescriptorTest, + Foreman_Cif_Net_Delay_0_0_Plr_0_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif. ParamsWithLogging foreman_cif; @@ -227,34 +207,15 @@ TEST_P(GenericDescriptorTest, ForemanCifWithoutPacketLoss) { 700000, 700000, 700000, false, "VP8", 1, 0, 0, false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_net_delay_0_0_plr_0"), 0.0, - 0.0, kFullStackTestDurationSecs}; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); - fixture->RunWithAnalyzer(foreman_cif); -} - -TEST_P(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging foreman_cif; - foreman_cif.call.send_side_bwe = true; - foreman_cif.video[0] = { - true, 352, 288, 10, - 30000, 30000, 30000, false, - "VP8", 1, 0, 0, - false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_30kbps_net_delay_0_0_plr_0"), + foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_generic_descriptor", 0.0, 0.0, kFullStackTestDurationSecs}; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST_P(GenericDescriptorTest, - ForemanCif30kbpsWithoutPacketLossTrustedRateControl) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); +TEST(GenericDescriptorTest, + Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; foreman_cif.video[0] = { @@ -263,14 +224,14 @@ TEST_P(GenericDescriptorTest, "VP8", 1, 0, 0, false, false, true, ClipNameToClipPath("foreman_cif")}; foreman_cif.analyzer = { - GetTestName("foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl"), - 0.0, 0.0, kFullStackTestDurationSecs}; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + "foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor", 0.0, 0.0, + kFullStackTestDurationSecs}; + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } // Link capacity below default start rate. -TEST(FullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { +TEST(FullStackTest, Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -286,7 +247,8 @@ TEST(FullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { } // Restricted network and encoder overproducing by 30%. -TEST(FullStackTest, ForemanCifLink150kbpsBadRateController) { +TEST(FullStackTest, + Foreman_Cif_Link_150kbps_Delay100ms_30pkts_Queue_Overshoot30) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -309,7 +271,7 @@ TEST(FullStackTest, ForemanCifLink150kbpsBadRateController) { // Packet rate and loss are low enough that loss will happen with ~3s interval. // This triggers protection overhead to toggle between zero and non-zero. // Link queue is restrictive enough to trigger loss on probes. -TEST(FullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { +TEST(FullStackTest, Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -328,7 +290,7 @@ TEST(FullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { fixture->RunWithAnalyzer(foreman_cif); } -TEST_P(GenericDescriptorTest, ForemanCifPlr5) { +TEST(GenericDescriptorTest, Foreman_Cif_Delay_50_0_Plr_5_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -337,15 +299,16 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5) { 30000, 500000, 2000000, false, "VP8", 1, 0, 0, false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5"), 0.0, 0.0, - kFullStackTestDurationSecs}; + foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_generic_descriptor", + 0.0, 0.0, kFullStackTestDurationSecs}; foreman_cif.config->loss_percent = 5; foreman_cif.config->queue_delay_ms = 50; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } -TEST_P(GenericDescriptorTest, ForemanCifPlr5Ulpfec) { +TEST(GenericDescriptorTest, + Foreman_Cif_Delay_50_0_Plr_5_Ulpfec_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -354,15 +317,16 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5Ulpfec) { 30000, 500000, 2000000, false, "VP8", 1, 0, 0, true, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_ulpfec"), - 0.0, 0.0, kFullStackTestDurationSecs}; + foreman_cif.analyzer = { + "foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor", 0.0, 0.0, + kFullStackTestDurationSecs}; foreman_cif.config->loss_percent = 5; foreman_cif.config->queue_delay_ms = 50; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCifPlr5Flexfec) { +TEST(FullStackTest, Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -378,7 +342,7 @@ TEST(FullStackTest, ForemanCifPlr5Flexfec) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbpsPlr3Flexfec) { +TEST(FullStackTest, Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -395,7 +359,7 @@ TEST(FullStackTest, ForemanCif500kbpsPlr3Flexfec) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbpsPlr3Ulpfec) { +TEST(FullStackTest, Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -413,7 +377,7 @@ TEST(FullStackTest, ForemanCif500kbpsPlr3Ulpfec) { } #if defined(WEBRTC_USE_H264) -TEST(FullStackTest, ForemanCifWithoutPacketlossH264) { +TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { auto fixture = CreateVideoQualityTestFixture(); // TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif. ParamsWithLogging foreman_cif; @@ -428,7 +392,7 @@ TEST(FullStackTest, ForemanCifWithoutPacketlossH264) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { +TEST(FullStackTest, Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -442,7 +406,8 @@ TEST(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { fixture->RunWithAnalyzer(foreman_cif); } -TEST_P(GenericDescriptorTest, ForemanCifPlr5H264) { +TEST(GenericDescriptorTest, + Foreman_Cif_Delay_50_0_Plr_5_H264_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -451,15 +416,16 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5H264) { 30000, 500000, 2000000, false, "H264", 1, 0, 0, false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_H264"), 0.0, - 0.0, kFullStackTestDurationSecs}; + foreman_cif.analyzer = { + "foreman_cif_delay_50_0_plr_5_H264_generic_descriptor", 0.0, 0.0, + kFullStackTestDurationSecs}; foreman_cif.config->loss_percent = 5; foreman_cif.config->queue_delay_ms = 50; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { +TEST(FullStackTest, Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { test::ScopedFieldTrials override_field_trials( AppendFieldTrials("WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -479,7 +445,7 @@ TEST(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { } // Verify that this is worth the bot time, before enabling. -TEST(FullStackTest, ForemanCifPlr5H264Flexfec) { +TEST(FullStackTest, Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -497,7 +463,7 @@ TEST(FullStackTest, ForemanCifPlr5H264Flexfec) { // Ulpfec with H264 is an unsupported combination, so this test is only useful // for debugging. It is therefore disabled by default. -TEST(FullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { +TEST(FullStackTest, DISABLED_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -514,7 +480,7 @@ TEST(FullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { } #endif // defined(WEBRTC_USE_H264) -TEST(FullStackTest, ForemanCif500kbps) { +TEST(FullStackTest, Foreman_Cif_500kbps) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -531,7 +497,7 @@ TEST(FullStackTest, ForemanCif500kbps) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbpsLimitedQueue) { +TEST(FullStackTest, Foreman_Cif_500kbps_32pkts_Queue) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -548,7 +514,7 @@ TEST(FullStackTest, ForemanCif500kbpsLimitedQueue) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbps100ms) { +TEST(FullStackTest, Foreman_Cif_500kbps_100ms) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -565,7 +531,8 @@ TEST(FullStackTest, ForemanCif500kbps100ms) { fixture->RunWithAnalyzer(foreman_cif); } -TEST_P(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { +TEST(GenericDescriptorTest, + Foreman_Cif_500kbps_100ms_32pkts_Queue_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -574,16 +541,17 @@ TEST_P(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { 30000, 500000, 2000000, false, "VP8", 1, 0, 0, false, false, true, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {GetTestName("foreman_cif_500kbps_100ms_32pkts_queue"), - 0.0, 0.0, kFullStackTestDurationSecs}; + foreman_cif.analyzer = { + "foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor", 0.0, 0.0, + kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 32; foreman_cif.config->queue_delay_ms = 100; foreman_cif.config->link_capacity_kbps = 500; - foreman_cif.call.generic_descriptor = GenericDescriptorEnabled(); + foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { +TEST(FullStackTest, Foreman_Cif_500kbps_100ms_32pkts_Queue_Recv_Bwe) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = false; @@ -600,7 +568,7 @@ TEST(FullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { fixture->RunWithAnalyzer(foreman_cif); } -TEST(FullStackTest, ForemanCif1000kbps100msLimitedQueue) { +TEST(FullStackTest, Foreman_Cif_1000kbps_100ms_32pkts_Queue) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging foreman_cif; foreman_cif.call.send_side_bwe = true; @@ -618,7 +586,7 @@ TEST(FullStackTest, ForemanCif1000kbps100msLimitedQueue) { } // TODO(sprang): Remove this if we have the similar ModerateLimits below? -TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { +TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -639,34 +607,8 @@ TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { fixture->RunWithAnalyzer(conf_motion_hd); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(FullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); - auto fixture = CreateVideoQualityTestFixture(); - - ParamsWithLogging conf_motion_hd; - conf_motion_hd.call.send_side_bwe = true; - conf_motion_hd.video[0] = { - true, 1280, - 720, 50, - 30000, 3000000, - 3000000, false, - "VP8", 1, - -1, 0, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - conf_motion_hd.analyzer = { - "conference_motion_hd_1tl_moderate_limits_trusted_rate_ctrl", 0.0, 0.0, - kFullStackTestDurationSecs}; - conf_motion_hd.config->queue_length_packets = 50; - conf_motion_hd.config->loss_percent = 3; - conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; - fixture->RunWithAnalyzer(conf_motion_hd); -} - -TEST_P(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { +TEST(GenericDescriptorTest, + Conference_Motion_Hd_2tl_Moderate_Limits_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -680,17 +622,17 @@ TEST_P(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { false, false, false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; conf_motion_hd.analyzer = { - GetTestName("conference_motion_hd_2tl_moderate_limits"), 0.0, 0.0, + "conference_motion_hd_2tl_moderate_limits_generic_descriptor", 0.0, 0.0, kFullStackTestDurationSecs}; conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; conf_motion_hd.config->link_capacity_kbps = 2000; - conf_motion_hd.call.generic_descriptor = GenericDescriptorEnabled(); + conf_motion_hd.call.generic_descriptor = true; fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, ConferenceMotionHd3TLModerateLimits) { +TEST(FullStackTest, Conference_Motion_Hd_3tl_Moderate_Limits) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -712,7 +654,7 @@ TEST(FullStackTest, ConferenceMotionHd3TLModerateLimits) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, ConferenceMotionHd4TLModerateLimits) { +TEST(FullStackTest, Conference_Motion_Hd_4tl_Moderate_Limits) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -734,7 +676,7 @@ TEST(FullStackTest, ConferenceMotionHd4TLModerateLimits) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) { +TEST(FullStackTest, Conference_Motion_Hd_3tl_Alt_Moderate_Limits) { test::ScopedFieldTrials field_trial( AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -758,8 +700,7 @@ TEST(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) { fixture->RunWithAnalyzer(conf_motion_hd); } -TEST(FullStackTest, - ConferenceMotionHd3TLModerateLimitsAltTLPatternAndBaseHeavyTLAllocation) { +TEST(FullStackTest, Conference_Motion_Hd_3tl_Alt_Heavy_Moderate_Limits) { auto fixture = CreateVideoQualityTestFixture(); test::ScopedFieldTrials field_trial( AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/" @@ -786,7 +727,7 @@ TEST(FullStackTest, } #if defined(RTC_ENABLE_VP9) -TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { +TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -809,7 +750,7 @@ TEST(FullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { } #endif -TEST(FullStackTest, ScreenshareSlidesVP8_2TL) { +TEST(FullStackTest, Screenshare_Slides) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -824,10 +765,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL) { #if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. -const char kScreenshareSimulcastVariableFramerateExperiment[] = - "WebRTC-VP8VariableFramerateScreenshare/" - "Enabled,min_fps:5.0,min_qp:15,undershoot:30/"; -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { +TEST(FullStackTest, Screenshare_Slides_Simulcast) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -855,67 +793,9 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_Variable_Framerate) { - test::ScopedFieldTrials field_trial( - AppendFieldTrials(kScreenshareSimulcastVariableFramerateExperiment)); - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging screenshare; - screenshare.call.send_side_bwe = true; - screenshare.screenshare[0] = {true, false, 10}; - screenshare.video[0] = {true, 1850, 1110, 30, 800000, 2500000, - 2500000, false, "VP8", 2, 1, 400000, - false, false, false, ""}; - screenshare.analyzer = {"screenshare_slides_simulcast_variable_framerate", - 0.0, 0.0, kFullStackTestDurationSecs}; - ParamsWithLogging screenshare_params_high; - screenshare_params_high.video[0] = { - true, 1850, 1110, 60, 600000, 1250000, 1250000, false, - "VP8", 2, 0, 400000, false, false, false, ""}; - VideoQualityTest::Params screenshare_params_low; - screenshare_params_low.video[0] = {true, 1850, 1110, 5, 30000, 200000, - 1000000, false, "VP8", 2, 0, 400000, - false, false, false, ""}; - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0), - VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)}; - screenshare.ss[0] = { - streams, 1, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(screenshare); -} - -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_low) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging screenshare; - screenshare.call.send_side_bwe = true; - screenshare.screenshare[0] = {true, false, 10}; - screenshare.video[0] = {true, 1850, 1110, 30, 800000, 2500000, - 2500000, false, "VP8", 2, 1, 400000, - false, false, false, ""}; - screenshare.analyzer = {"screenshare_slides_simulcast_low", 0.0, 0.0, - kFullStackTestDurationSecs}; - VideoQualityTest::Params screenshare_params_high; - screenshare_params_high.video[0] = { - true, 1850, 1110, 60, 600000, 1250000, 1250000, false, - "VP8", 2, 0, 400000, false, false, false, ""}; - VideoQualityTest::Params screenshare_params_low; - screenshare_params_low.video[0] = {true, 1850, 1110, 5, 30000, 200000, - 1000000, false, "VP8", 2, 0, 400000, - false, false, false, ""}; - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0), - VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)}; - screenshare.ss[0] = { - streams, 0, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(screenshare); -} - #endif // !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) { +TEST(FullStackTest, Screenshare_Slides_Scrolling) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging config; config.call.send_side_bwe = true; @@ -928,7 +808,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) { fixture->RunWithAnalyzer(config); } -TEST_P(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { +TEST(GenericDescriptorTest, Screenshare_Slides_Lossy_Net_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -936,16 +816,16 @@ TEST_P(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { 1000000, false, "VP8", 2, 1, 400000, false, false, false, ""}; screenshare.screenshare[0] = {true, false, 10}; - screenshare.analyzer = {GetTestName("screenshare_slides_lossy_net"), 0.0, 0.0, - kFullStackTestDurationSecs}; + screenshare.analyzer = {"screenshare_slides_lossy_net_generic_descriptor", + 0.0, 0.0, kFullStackTestDurationSecs}; screenshare.config->loss_percent = 5; screenshare.config->queue_delay_ms = 200; screenshare.config->link_capacity_kbps = 500; - screenshare.call.generic_descriptor = GenericDescriptorEnabled(); + screenshare.call.generic_descriptor = true; fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) { +TEST(FullStackTest, Screenshare_Slides_Very_Lossy) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -961,7 +841,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) { +TEST(FullStackTest, Screenshare_Slides_Lossy_Limited) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -978,7 +858,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP8_2TL_ModeratelyRestricted) { +TEST(FullStackTest, Screenshare_Slides_Moderately_Restricted) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -1047,7 +927,7 @@ ParamsWithLogging::Video SimulcastVp8VideoLow() { #if defined(RTC_ENABLE_VP9) -TEST(FullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { +TEST(FullStackTest, Screenshare_Slides_Vp9_3sl_High_Fps) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -1063,69 +943,10 @@ TEST(FullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { fixture->RunWithAnalyzer(screenshare); } -TEST(FullStackTest, ScreenshareSlidesVP9_3SL_Variable_Fps) { - webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-VP9VariableFramerateScreenshare/" - "Enabled,min_qp:32,min_fps:5.0,undershoot:30,frames_" - "before_steady_state:5/")); - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging screenshare; - screenshare.call.send_side_bwe = true; - screenshare.video[0] = {true, 1850, 1110, 30, 50000, 200000, - 2000000, false, "VP9", 1, 0, 400000, - false, false, false, ""}; - screenshare.screenshare[0] = {true, false, 10}; - screenshare.analyzer = {"screenshare_slides_vp9_3sl_variable_fps", 0.0, 0.0, - kFullStackTestDurationSecs}; - screenshare.ss[0] = { - std::vector(), 0, 3, 2, InterLayerPredMode::kOn, - std::vector(), true}; - fixture->RunWithAnalyzer(screenshare); -} - -TEST(FullStackTest, VP9SVC_3SL_High) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging simulcast; - simulcast.call.send_side_bwe = true; - simulcast.video[0] = SvcVp9Video(); - simulcast.analyzer = {"vp9svc_3sl_high", 0.0, 0.0, - kFullStackTestDurationSecs}; - - simulcast.ss[0] = { - std::vector(), 0, 3, 2, InterLayerPredMode::kOn, - std::vector(), false}; - fixture->RunWithAnalyzer(simulcast); -} - -TEST(FullStackTest, VP9SVC_3SL_Medium) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging simulcast; - simulcast.call.send_side_bwe = true; - simulcast.video[0] = SvcVp9Video(); - simulcast.analyzer = {"vp9svc_3sl_medium", 0.0, 0.0, - kFullStackTestDurationSecs}; - simulcast.ss[0] = { - std::vector(), 0, 3, 1, InterLayerPredMode::kOn, - std::vector(), false}; - fixture->RunWithAnalyzer(simulcast); -} - -TEST(FullStackTest, VP9SVC_3SL_Low) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging simulcast; - simulcast.call.send_side_bwe = true; - simulcast.video[0] = SvcVp9Video(); - simulcast.analyzer = {"vp9svc_3sl_low", 0.0, 0.0, kFullStackTestDurationSecs}; - simulcast.ss[0] = { - std::vector(), 0, 3, 0, InterLayerPredMode::kOn, - std::vector(), false}; - fixture->RunWithAnalyzer(simulcast); -} - -// bugs.webrtc.org/9506 +// TODO(http://bugs.webrtc.org/9506): investigate. #if !defined(WEBRTC_MAC) -TEST(FullStackTest, VP9KSVC_3SL_High) { +TEST(FullStackTest, Vp9ksvc_3sl_High) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -1140,29 +961,31 @@ TEST(FullStackTest, VP9KSVC_3SL_High) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, VP9KSVC_3SL_Medium) { +TEST(FullStackTest, Vp9ksvc_3sl_Low) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; simulcast.video[0] = SvcVp9Video(); - simulcast.analyzer = {"vp9ksvc_3sl_medium", 0.0, 0.0, + simulcast.analyzer = {"vp9ksvc_3sl_low", 0.0, 0.0, kFullStackTestDurationSecs}; simulcast.ss[0] = { - std::vector(), 0, 3, 1, InterLayerPredMode::kOnKeyPic, + std::vector(), 0, 3, 0, InterLayerPredMode::kOnKeyPic, std::vector(), false}; fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, VP9KSVC_3SL_Low) { +TEST(FullStackTest, Vp9ksvc_3sl_Low_Bw_Limited) { webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); + AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" + "WebRTC-Vp9ExternalRefCtrl/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; + simulcast.config->link_capacity_kbps = 500; simulcast.call.send_side_bwe = true; simulcast.video[0] = SvcVp9Video(); - simulcast.analyzer = {"vp9ksvc_3sl_low", 0.0, 0.0, + simulcast.analyzer = {"vp9ksvc_3sl_low_bw_limited", 0.0, 0.0, kFullStackTestDurationSecs}; simulcast.ss[0] = { std::vector(), 0, 3, 0, InterLayerPredMode::kOnKeyPic, @@ -1170,7 +993,7 @@ TEST(FullStackTest, VP9KSVC_3SL_Low) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { +TEST(FullStackTest, Vp9ksvc_3sl_Medium_Network_Restricted) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); @@ -1188,10 +1011,9 @@ TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { } // TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { +TEST(FullStackTest, Vp9ksvc_3sl_Medium_Network_Restricted_Trusted_Rate) { webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" - "WebRTC-LibvpxVp9TrustedRateController/Enabled/")); + AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1212,12 +1034,12 @@ TEST(FullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { // Android bots can't handle FullHD, so disable the test. // TODO(bugs.webrtc.org/9220): Investigate source of flakiness on Mac. #if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC) -#define MAYBE_SimulcastFullHdOveruse DISABLED_SimulcastFullHdOveruse +#define MAYBE_Simulcast_HD_High DISABLED_Simulcast_HD_High #else -#define MAYBE_SimulcastFullHdOveruse SimulcastFullHdOveruse +#define MAYBE_Simulcast_HD_High Simulcast_HD_High #endif -TEST(FullStackTest, MAYBE_SimulcastFullHdOveruse) { +TEST(FullStackTest, MAYBE_Simulcast_HD_High) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1240,7 +1062,7 @@ TEST(FullStackTest, MAYBE_SimulcastFullHdOveruse) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, SimulcastVP8_3SL_High) { +TEST(FullStackTest, Simulcast_Vp8_3sl_High) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1266,33 +1088,7 @@ TEST(FullStackTest, SimulcastVP8_3SL_High) { fixture->RunWithAnalyzer(simulcast); } -TEST(FullStackTest, SimulcastVP8_3SL_Medium) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging simulcast; - simulcast.call.send_side_bwe = true; - simulcast.video[0] = SimulcastVp8VideoHigh(); - simulcast.analyzer = {"simulcast_vp8_3sl_medium", 0.0, 0.0, - kFullStackTestDurationSecs}; - simulcast.config->loss_percent = 0; - simulcast.config->queue_delay_ms = 100; - ParamsWithLogging video_params_high; - video_params_high.video[0] = SimulcastVp8VideoHigh(); - ParamsWithLogging video_params_medium; - video_params_medium.video[0] = SimulcastVp8VideoMedium(); - ParamsWithLogging video_params_low; - video_params_low.video[0] = SimulcastVp8VideoLow(); - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(video_params_low, 0), - VideoQualityTest::DefaultVideoStream(video_params_medium, 0), - VideoQualityTest::DefaultVideoStream(video_params_high, 0)}; - simulcast.ss[0] = { - streams, 1, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(simulcast); -} - -TEST(FullStackTest, SimulcastVP8_3SL_Low) { +TEST(FullStackTest, Simulcast_Vp8_3sl_Low) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1322,11 +1118,11 @@ TEST(FullStackTest, SimulcastVP8_3SL_Low) { // available and exercises WebRTC calls with a high target bitrate(100 Mbps). // Android32 bots can't handle this high bitrate, so disable test for those. #if defined(WEBRTC_ANDROID) -#define MAYBE_HighBitrateWithFakeCodec DISABLED_HighBitrateWithFakeCodec +#define MAYBE_High_Bitrate_With_Fake_Codec DISABLED_High_Bitrate_With_Fake_Codec #else -#define MAYBE_HighBitrateWithFakeCodec HighBitrateWithFakeCodec +#define MAYBE_High_Bitrate_With_Fake_Codec High_Bitrate_With_Fake_Codec #endif // defined(WEBRTC_ANDROID) -TEST(FullStackTest, MAYBE_HighBitrateWithFakeCodec) { +TEST(FullStackTest, MAYBE_High_Bitrate_With_Fake_Codec) { auto fixture = CreateVideoQualityTestFixture(); const int target_bitrate = 100000000; ParamsWithLogging generator; @@ -1355,71 +1151,15 @@ TEST(FullStackTest, MAYBE_HighBitrateWithFakeCodec) { fixture->RunWithAnalyzer(generator); } -TEST(FullStackTest, LargeRoomVP8_5thumb) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging large_room; - large_room.call.send_side_bwe = true; - large_room.video[0] = SimulcastVp8VideoHigh(); - large_room.analyzer = {"largeroom_5thumb", 0.0, 0.0, - kFullStackTestDurationSecs}; - large_room.config->loss_percent = 0; - large_room.config->queue_delay_ms = 100; - ParamsWithLogging video_params_high; - video_params_high.video[0] = SimulcastVp8VideoHigh(); - ParamsWithLogging video_params_medium; - video_params_medium.video[0] = SimulcastVp8VideoMedium(); - ParamsWithLogging video_params_low; - video_params_low.video[0] = SimulcastVp8VideoLow(); - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(video_params_low, 0), - VideoQualityTest::DefaultVideoStream(video_params_medium, 0), - VideoQualityTest::DefaultVideoStream(video_params_high, 0)}; - large_room.call.num_thumbnails = 5; - large_room.ss[0] = { - streams, 2, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(large_room); -} - #if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) // Fails on mobile devices: // https://bugs.chromium.org/p/webrtc/issues/detail?id=7301 -#define MAYBE_LargeRoomVP8_50thumb DISABLED_LargeRoomVP8_50thumb -#define MAYBE_LargeRoomVP8_15thumb DISABLED_LargeRoomVP8_15thumb +#define MAYBE_Largeroom_50thumb DISABLED_Largeroom_50thumb #else -#define MAYBE_LargeRoomVP8_50thumb LargeRoomVP8_50thumb -#define MAYBE_LargeRoomVP8_15thumb LargeRoomVP8_15thumb +#define MAYBE_Largeroom_50thumb Largeroom_50thumb #endif -TEST(FullStackTest, MAYBE_LargeRoomVP8_15thumb) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging large_room; - large_room.call.send_side_bwe = true; - large_room.video[0] = SimulcastVp8VideoHigh(); - large_room.analyzer = {"largeroom_15thumb", 0.0, 0.0, - kFullStackTestDurationSecs}; - large_room.config->loss_percent = 0; - large_room.config->queue_delay_ms = 100; - ParamsWithLogging video_params_high; - video_params_high.video[0] = SimulcastVp8VideoHigh(); - ParamsWithLogging video_params_medium; - video_params_medium.video[0] = SimulcastVp8VideoMedium(); - ParamsWithLogging video_params_low; - video_params_low.video[0] = SimulcastVp8VideoLow(); - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(video_params_low, 0), - VideoQualityTest::DefaultVideoStream(video_params_medium, 0), - VideoQualityTest::DefaultVideoStream(video_params_high, 0)}; - large_room.call.num_thumbnails = 15; - large_room.ss[0] = { - streams, 2, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(large_room); -} - -TEST(FullStackTest, MAYBE_LargeRoomVP8_50thumb) { +TEST(FullStackTest, MAYBE_Largeroom_50thumb) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging large_room; large_room.call.send_side_bwe = true; @@ -1446,116 +1186,4 @@ TEST(FullStackTest, MAYBE_LargeRoomVP8_50thumb) { fixture->RunWithAnalyzer(large_room); } -INSTANTIATE_TEST_SUITE_P( - FullStackTest, - GenericDescriptorTest, - ::testing::Values("WebRTC-GenericDescriptor/Disabled/", - "WebRTC-GenericDescriptor/Enabled/")); - -class DualStreamsTest : public ::testing::TestWithParam {}; - -// Disable dual video test on mobile device becuase it's too heavy. -// TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on MAC. -#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS) && !defined(WEBRTC_MAC) -TEST_P(DualStreamsTest, - ModeratelyRestricted_SlidesVp8_2TL_Simulcast_Video_Simulcast_High) { - const int first_stream = GetParam(); - ParamsWithLogging dual_streams; - - // Screenshare Settings. - dual_streams.screenshare[first_stream] = {true, false, 10}; - dual_streams.video[first_stream] = {true, 1850, 1110, 5, 800000, 2500000, - 2500000, false, "VP8", 2, 1, 400000, - false, false, false, ""}; - - ParamsWithLogging screenshare_params_high; - screenshare_params_high.video[0] = { - true, 1850, 1110, 60, 600000, 1250000, 1250000, false, - "VP8", 2, 0, 400000, false, false, false, ""}; - VideoQualityTest::Params screenshare_params_low; - screenshare_params_low.video[0] = {true, 1850, 1110, 5, 30000, 200000, - 1000000, false, "VP8", 2, 0, 400000, - false, false, false, ""}; - std::vector screenhsare_streams = { - VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0), - VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)}; - - dual_streams.ss[first_stream] = { - screenhsare_streams, 1, 1, 0, InterLayerPredMode::kOn, - std::vector(), false}; - - // Video settings. - dual_streams.video[1 - first_stream] = SimulcastVp8VideoHigh(); - - ParamsWithLogging video_params_high; - video_params_high.video[0] = SimulcastVp8VideoHigh(); - ParamsWithLogging video_params_medium; - video_params_medium.video[0] = SimulcastVp8VideoMedium(); - ParamsWithLogging video_params_low; - video_params_low.video[0] = SimulcastVp8VideoLow(); - std::vector streams = { - VideoQualityTest::DefaultVideoStream(video_params_low, 0), - VideoQualityTest::DefaultVideoStream(video_params_medium, 0), - VideoQualityTest::DefaultVideoStream(video_params_high, 0)}; - - dual_streams.ss[1 - first_stream] = { - streams, 2, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - - // Call settings. - dual_streams.call.send_side_bwe = true; - dual_streams.call.dual_video = true; - std::string test_label = "dualstreams_moderately_restricted_screenshare_" + - std::to_string(first_stream); - dual_streams.analyzer = {test_label, 0.0, 0.0, kFullStackTestDurationSecs}; - dual_streams.config->loss_percent = 1; - dual_streams.config->link_capacity_kbps = 7500; - dual_streams.config->queue_length_packets = 30; - dual_streams.config->queue_delay_ms = 100; - - auto fixture = CreateVideoQualityTestFixture(); - fixture->RunWithAnalyzer(dual_streams); -} -#endif // !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS) && - // !defined(WEBRTC_MAC) - -TEST_P(DualStreamsTest, Conference_Restricted) { - const int first_stream = GetParam(); - ParamsWithLogging dual_streams; - - // Screenshare Settings. - dual_streams.screenshare[first_stream] = {true, false, 10}; - dual_streams.video[first_stream] = {true, 1850, 1110, 5, 800000, 2500000, - 2500000, false, "VP8", 3, 2, 400000, - false, false, false, ""}; - // Video settings. - dual_streams.video[1 - first_stream] = { - true, 1280, - 720, 30, - 150000, 500000, - 700000, false, - "VP8", 3, - 2, 400000, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - - // Call settings. - dual_streams.call.send_side_bwe = true; - dual_streams.call.dual_video = true; - std::string test_label = "dualstreams_conference_restricted_screenshare_" + - std::to_string(first_stream); - dual_streams.analyzer = {test_label, 0.0, 0.0, kFullStackTestDurationSecs}; - dual_streams.config->loss_percent = 1; - dual_streams.config->link_capacity_kbps = 5000; - dual_streams.config->queue_length_packets = 30; - dual_streams.config->queue_delay_ms = 100; - - auto fixture = CreateVideoQualityTestFixture(); - fixture->RunWithAnalyzer(dual_streams); -} - -INSTANTIATE_TEST_SUITE_P(FullStackTest, - DualStreamsTest, - ::testing::Values(0, 1)); - } // namespace webrtc diff --git a/video/full_stack_tests_plot.py b/video/full_stack_tests_plot.py index f50c297b17..c195b72a54 100755 --- a/video/full_stack_tests_plot.py +++ b/video/full_stack_tests_plot.py @@ -6,7 +6,6 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. - """Generate graphs for data generated by loopback tests. Usage examples: @@ -34,14 +33,14 @@ # Fields DROPPED = 0 -INPUT_TIME = 1 # ms (timestamp) -SEND_TIME = 2 # ms (timestamp) -RECV_TIME = 3 # ms (timestamp) -RENDER_TIME = 4 # ms (timestamp) -ENCODED_FRAME_SIZE = 5 # bytes +INPUT_TIME = 1 # ms (timestamp) +SEND_TIME = 2 # ms (timestamp) +RECV_TIME = 3 # ms (timestamp) +RENDER_TIME = 4 # ms (timestamp) +ENCODED_FRAME_SIZE = 5 # bytes PSNR = 6 SSIM = 7 -ENCODE_TIME = 8 # ms (time interval) +ENCODE_TIME = 8 # ms (time interval) TOTAL_RAW_FIELDS = 9 @@ -78,111 +77,116 @@ NAME_TO_ID = {field[1]: field[0] for field in _FIELDS} ID_TO_TITLE = {field[0]: field[2] for field in _FIELDS} + def FieldArgToId(arg): - if arg == "none": - return None - if arg in NAME_TO_ID: - return NAME_TO_ID[arg] - if arg + "_ms" in NAME_TO_ID: - return NAME_TO_ID[arg + "_ms"] - raise Exception("Unrecognized field name \"{}\"".format(arg)) + if arg == "none": + return None + if arg in NAME_TO_ID: + return NAME_TO_ID[arg] + if arg + "_ms" in NAME_TO_ID: + return NAME_TO_ID[arg + "_ms"] + raise Exception("Unrecognized field name \"{}\"".format(arg)) class PlotLine(object): - """Data for a single graph line.""" + """Data for a single graph line.""" - def __init__(self, label, values, flags): - self.label = label - self.values = values - self.flags = flags + def __init__(self, label, values, flags): + self.label = label + self.values = values + self.flags = flags class Data(object): - """Object representing one full stack test.""" - - def __init__(self, filename): - self.title = "" - self.length = 0 - self.samples = defaultdict(list) - - self._ReadSamples(filename) - - def _ReadSamples(self, filename): - """Reads graph data from the given file.""" - f = open(filename) - it = iter(f) - - self.title = it.next().strip() - self.length = int(it.next()) - field_names = [name.strip() for name in it.next().split()] - field_ids = [NAME_TO_ID[name] for name in field_names] - - for field_id in field_ids: - self.samples[field_id] = [0.0] * self.length - - for sample_id in xrange(self.length): - for col, value in enumerate(it.next().split()): - self.samples[field_ids[col]][sample_id] = float(value) - - self._SubtractFirstInputTime() - self._GenerateAdditionalData() - - f.close() - - def _SubtractFirstInputTime(self): - offset = self.samples[INPUT_TIME][0] - for field in [INPUT_TIME, SEND_TIME, RECV_TIME, RENDER_TIME]: - if field in self.samples: - self.samples[field] = [x - offset for x in self.samples[field]] - - def _GenerateAdditionalData(self): - """Calculates sender time, receiver time etc. from the raw data.""" - s = self.samples - last_render_time = 0 - for field_id in [SENDER_TIME, RECEIVER_TIME, END_TO_END, RENDERED_DELTA]: - s[field_id] = [0] * self.length - - for k in range(self.length): - s[SENDER_TIME][k] = s[SEND_TIME][k] - s[INPUT_TIME][k] - - decoded_time = s[RENDER_TIME][k] - s[RECEIVER_TIME][k] = decoded_time - s[RECV_TIME][k] - s[END_TO_END][k] = decoded_time - s[INPUT_TIME][k] - if not s[DROPPED][k]: - if k > 0: - s[RENDERED_DELTA][k] = decoded_time - last_render_time - last_render_time = decoded_time - - def _Hide(self, values): - """ + """Object representing one full stack test.""" + + def __init__(self, filename): + self.title = "" + self.length = 0 + self.samples = defaultdict(list) + + self._ReadSamples(filename) + + def _ReadSamples(self, filename): + """Reads graph data from the given file.""" + f = open(filename) + it = iter(f) + + self.title = it.next().strip() + self.length = int(it.next()) + field_names = [name.strip() for name in it.next().split()] + field_ids = [NAME_TO_ID[name] for name in field_names] + + for field_id in field_ids: + self.samples[field_id] = [0.0] * self.length + + for sample_id in xrange(self.length): + for col, value in enumerate(it.next().split()): + self.samples[field_ids[col]][sample_id] = float(value) + + self._SubtractFirstInputTime() + self._GenerateAdditionalData() + + f.close() + + def _SubtractFirstInputTime(self): + offset = self.samples[INPUT_TIME][0] + for field in [INPUT_TIME, SEND_TIME, RECV_TIME, RENDER_TIME]: + if field in self.samples: + self.samples[field] = [x - offset for x in self.samples[field]] + + def _GenerateAdditionalData(self): + """Calculates sender time, receiver time etc. from the raw data.""" + s = self.samples + last_render_time = 0 + for field_id in [ + SENDER_TIME, RECEIVER_TIME, END_TO_END, RENDERED_DELTA + ]: + s[field_id] = [0] * self.length + + for k in range(self.length): + s[SENDER_TIME][k] = s[SEND_TIME][k] - s[INPUT_TIME][k] + + decoded_time = s[RENDER_TIME][k] + s[RECEIVER_TIME][k] = decoded_time - s[RECV_TIME][k] + s[END_TO_END][k] = decoded_time - s[INPUT_TIME][k] + if not s[DROPPED][k]: + if k > 0: + s[RENDERED_DELTA][k] = decoded_time - last_render_time + last_render_time = decoded_time + + def _Hide(self, values): + """ Replaces values for dropped frames with None. These values are then skipped by the Plot() method. """ - return [None if self.samples[DROPPED][k] else values[k] - for k in range(len(values))] + return [ + None if self.samples[DROPPED][k] else values[k] + for k in range(len(values)) + ] - def AddSamples(self, config, target_lines_list): - """Creates graph lines from the current data set with given config.""" - for field in config.fields: - # field is None means the user wants just to skip the color. - if field is None: - target_lines_list.append(None) - continue + def AddSamples(self, config, target_lines_list): + """Creates graph lines from the current data set with given config.""" + for field in config.fields: + # field is None means the user wants just to skip the color. + if field is None: + target_lines_list.append(None) + continue - field_id = field & FIELD_MASK - values = self.samples[field_id] + field_id = field & FIELD_MASK + values = self.samples[field_id] - if field & HIDE_DROPPED: - values = self._Hide(values) + if field & HIDE_DROPPED: + values = self._Hide(values) - target_lines_list.append(PlotLine( - self.title + " " + ID_TO_TITLE[field_id], - values, field & ~FIELD_MASK)) + target_lines_list.append( + PlotLine(self.title + " " + ID_TO_TITLE[field_id], values, + field & ~FIELD_MASK)) def AverageOverCycle(values, length): - """ + """ Returns the list: [ avg(values[0], values[length], ...), @@ -194,221 +198,272 @@ def AverageOverCycle(values, length): Skips None values when calculating the average value. """ - total = [0.0] * length - count = [0] * length - for k, val in enumerate(values): - if val is not None: - total[k % length] += val - count[k % length] += 1 + total = [0.0] * length + count = [0] * length + for k, val in enumerate(values): + if val is not None: + total[k % length] += val + count[k % length] += 1 - result = [0.0] * length - for k in range(length): - result[k] = total[k] / count[k] if count[k] else None - return result + result = [0.0] * length + for k in range(length): + result[k] = total[k] / count[k] if count[k] else None + return result class PlotConfig(object): - """Object representing a single graph.""" - - def __init__(self, fields, data_list, cycle_length=None, frames=None, - offset=0, output_filename=None, title="Graph"): - self.fields = fields - self.data_list = data_list - self.cycle_length = cycle_length - self.frames = frames - self.offset = offset - self.output_filename = output_filename - self.title = title - - def Plot(self, ax1): - lines = [] - for data in self.data_list: - if not data: - # Add None lines to skip the colors. - lines.extend([None] * len(self.fields)) - else: - data.AddSamples(self, lines) - - def _SliceValues(values): - if self.offset: - values = values[self.offset:] - if self.frames: - values = values[:self.frames] - return values - - length = None - for line in lines: - if line is None: - continue - - line.values = _SliceValues(line.values) - if self.cycle_length: - line.values = AverageOverCycle(line.values, self.cycle_length) - - if length is None: - length = len(line.values) - elif length != len(line.values): - raise Exception("All arrays should have the same length!") - - ax1.set_xlabel("Frame", fontsize="large") - if any(line.flags & RIGHT_Y_AXIS for line in lines if line): - ax2 = ax1.twinx() - ax2.set_xlabel("Frame", fontsize="large") - else: - ax2 = None - - # Have to implement color_cycle manually, due to two scales in a graph. - color_cycle = ["b", "r", "g", "c", "m", "y", "k"] - color_iter = itertools.cycle(color_cycle) - - for line in lines: - if not line: - color_iter.next() - continue - - if self.cycle_length: - x = numpy.array(range(self.cycle_length)) - else: - x = numpy.array(range(self.offset, self.offset + len(line.values))) - y = numpy.array(line.values) - ax = ax2 if line.flags & RIGHT_Y_AXIS else ax1 - ax.Plot(x, y, "o-", label=line.label, markersize=3.0, linewidth=1.0, - color=color_iter.next()) - - ax1.grid(True) - if ax2: - ax1.legend(loc="upper left", shadow=True, fontsize="large") - ax2.legend(loc="upper right", shadow=True, fontsize="large") - else: - ax1.legend(loc="best", shadow=True, fontsize="large") + """Object representing a single graph.""" + + def __init__(self, + fields, + data_list, + cycle_length=None, + frames=None, + offset=0, + output_filename=None, + title="Graph"): + self.fields = fields + self.data_list = data_list + self.cycle_length = cycle_length + self.frames = frames + self.offset = offset + self.output_filename = output_filename + self.title = title + + def Plot(self, ax1): + lines = [] + for data in self.data_list: + if not data: + # Add None lines to skip the colors. + lines.extend([None] * len(self.fields)) + else: + data.AddSamples(self, lines) + + def _SliceValues(values): + if self.offset: + values = values[self.offset:] + if self.frames: + values = values[:self.frames] + return values + + length = None + for line in lines: + if line is None: + continue + + line.values = _SliceValues(line.values) + if self.cycle_length: + line.values = AverageOverCycle(line.values, self.cycle_length) + + if length is None: + length = len(line.values) + elif length != len(line.values): + raise Exception("All arrays should have the same length!") + + ax1.set_xlabel("Frame", fontsize="large") + if any(line.flags & RIGHT_Y_AXIS for line in lines if line): + ax2 = ax1.twinx() + ax2.set_xlabel("Frame", fontsize="large") + else: + ax2 = None + + # Have to implement color_cycle manually, due to two scales in a graph. + color_cycle = ["b", "r", "g", "c", "m", "y", "k"] + color_iter = itertools.cycle(color_cycle) + + for line in lines: + if not line: + color_iter.next() + continue + + if self.cycle_length: + x = numpy.array(range(self.cycle_length)) + else: + x = numpy.array( + range(self.offset, self.offset + len(line.values))) + y = numpy.array(line.values) + ax = ax2 if line.flags & RIGHT_Y_AXIS else ax1 + ax.Plot(x, + y, + "o-", + label=line.label, + markersize=3.0, + linewidth=1.0, + color=color_iter.next()) + + ax1.grid(True) + if ax2: + ax1.legend(loc="upper left", shadow=True, fontsize="large") + ax2.legend(loc="upper right", shadow=True, fontsize="large") + else: + ax1.legend(loc="best", shadow=True, fontsize="large") def LoadFiles(filenames): - result = [] - for filename in filenames: - if filename in LoadFiles.cache: - result.append(LoadFiles.cache[filename]) - else: - data = Data(filename) - LoadFiles.cache[filename] = data - result.append(data) - return result + result = [] + for filename in filenames: + if filename in LoadFiles.cache: + result.append(LoadFiles.cache[filename]) + else: + data = Data(filename) + LoadFiles.cache[filename] = data + result.append(data) + return result + + LoadFiles.cache = {} def GetParser(): - class CustomAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): - if "ordered_args" not in namespace: - namespace.ordered_args = [] - namespace.ordered_args.append((self.dest, values)) - - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.add_argument( - "-c", "--cycle_length", nargs=1, action=CustomAction, - type=int, help="Cycle length over which to average the values.") - parser.add_argument( - "-f", "--field", nargs=1, action=CustomAction, - help="Name of the field to show. Use 'none' to skip a color.") - parser.add_argument("-r", "--right", nargs=0, action=CustomAction, - help="Use right Y axis for given field.") - parser.add_argument("-d", "--drop", nargs=0, action=CustomAction, - help="Hide values for dropped frames.") - parser.add_argument("-o", "--offset", nargs=1, action=CustomAction, type=int, - help="Frame offset.") - parser.add_argument("-n", "--next", nargs=0, action=CustomAction, - help="Separator for multiple graphs.") - parser.add_argument( - "--frames", nargs=1, action=CustomAction, type=int, - help="Frame count to show or take into account while averaging.") - parser.add_argument("-t", "--title", nargs=1, action=CustomAction, - help="Title of the graph.") - parser.add_argument( - "-O", "--output_filename", nargs=1, action=CustomAction, - help="Use to save the graph into a file. " - "Otherwise, a window will be shown.") - parser.add_argument( - "files", nargs="+", action=CustomAction, - help="List of text-based files generated by loopback tests.") - return parser + class CustomAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + if "ordered_args" not in namespace: + namespace.ordered_args = [] + namespace.ordered_args.append((self.dest, values)) + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + + parser.add_argument("-c", + "--cycle_length", + nargs=1, + action=CustomAction, + type=int, + help="Cycle length over which to average the values.") + parser.add_argument( + "-f", + "--field", + nargs=1, + action=CustomAction, + help="Name of the field to show. Use 'none' to skip a color.") + parser.add_argument("-r", + "--right", + nargs=0, + action=CustomAction, + help="Use right Y axis for given field.") + parser.add_argument("-d", + "--drop", + nargs=0, + action=CustomAction, + help="Hide values for dropped frames.") + parser.add_argument("-o", + "--offset", + nargs=1, + action=CustomAction, + type=int, + help="Frame offset.") + parser.add_argument("-n", + "--next", + nargs=0, + action=CustomAction, + help="Separator for multiple graphs.") + parser.add_argument( + "--frames", + nargs=1, + action=CustomAction, + type=int, + help="Frame count to show or take into account while averaging.") + parser.add_argument("-t", + "--title", + nargs=1, + action=CustomAction, + help="Title of the graph.") + parser.add_argument("-O", + "--output_filename", + nargs=1, + action=CustomAction, + help="Use to save the graph into a file. " + "Otherwise, a window will be shown.") + parser.add_argument( + "files", + nargs="+", + action=CustomAction, + help="List of text-based files generated by loopback tests.") + return parser def _PlotConfigFromArgs(args, graph_num): - # Pylint complains about using kwargs, so have to do it this way. - cycle_length = None - frames = None - offset = 0 - output_filename = None - title = "Graph" - - fields = [] - files = [] - mask = 0 - for key, values in args: - if key == "cycle_length": - cycle_length = values[0] - elif key == "frames": - frames = values[0] - elif key == "offset": - offset = values[0] - elif key == "output_filename": - output_filename = values[0] - elif key == "title": - title = values[0] - elif key == "drop": - mask |= HIDE_DROPPED - elif key == "right": - mask |= RIGHT_Y_AXIS - elif key == "field": - field_id = FieldArgToId(values[0]) - fields.append(field_id | mask if field_id is not None else None) - mask = 0 # Reset mask after the field argument. - elif key == "files": - files.extend(values) - - if not files: - raise Exception("Missing file argument(s) for graph #{}".format(graph_num)) - if not fields: - raise Exception("Missing field argument(s) for graph #{}".format(graph_num)) - - return PlotConfig(fields, LoadFiles(files), cycle_length=cycle_length, - frames=frames, offset=offset, output_filename=output_filename, - title=title) + # Pylint complains about using kwargs, so have to do it this way. + cycle_length = None + frames = None + offset = 0 + output_filename = None + title = "Graph" + + fields = [] + files = [] + mask = 0 + for key, values in args: + if key == "cycle_length": + cycle_length = values[0] + elif key == "frames": + frames = values[0] + elif key == "offset": + offset = values[0] + elif key == "output_filename": + output_filename = values[0] + elif key == "title": + title = values[0] + elif key == "drop": + mask |= HIDE_DROPPED + elif key == "right": + mask |= RIGHT_Y_AXIS + elif key == "field": + field_id = FieldArgToId(values[0]) + fields.append(field_id | mask if field_id is not None else None) + mask = 0 # Reset mask after the field argument. + elif key == "files": + files.extend(values) + + if not files: + raise Exception( + "Missing file argument(s) for graph #{}".format(graph_num)) + if not fields: + raise Exception( + "Missing field argument(s) for graph #{}".format(graph_num)) + + return PlotConfig(fields, + LoadFiles(files), + cycle_length=cycle_length, + frames=frames, + offset=offset, + output_filename=output_filename, + title=title) def PlotConfigsFromArgs(args): - """Generates plot configs for given command line arguments.""" - # The way it works: - # First we detect separators -n/--next and split arguments into groups, one - # for each plot. For each group, we partially parse it with - # argparse.ArgumentParser, modified to remember the order of arguments. - # Then we traverse the argument list and fill the PlotConfig. - args = itertools.groupby(args, lambda x: x in ["-n", "--next"]) - prep_args = list(list(group) for match, group in args if not match) - - parser = GetParser() - plot_configs = [] - for index, raw_args in enumerate(prep_args): - graph_args = parser.parse_args(raw_args).ordered_args - plot_configs.append(_PlotConfigFromArgs(graph_args, index)) - return plot_configs + """Generates plot configs for given command line arguments.""" + # The way it works: + # First we detect separators -n/--next and split arguments into groups, one + # for each plot. For each group, we partially parse it with + # argparse.ArgumentParser, modified to remember the order of arguments. + # Then we traverse the argument list and fill the PlotConfig. + args = itertools.groupby(args, lambda x: x in ["-n", "--next"]) + prep_args = list(list(group) for match, group in args if not match) + + parser = GetParser() + plot_configs = [] + for index, raw_args in enumerate(prep_args): + graph_args = parser.parse_args(raw_args).ordered_args + plot_configs.append(_PlotConfigFromArgs(graph_args, index)) + return plot_configs def ShowOrSavePlots(plot_configs): - for config in plot_configs: - fig = plt.figure(figsize=(14.0, 10.0)) - ax = fig.add_subPlot(1, 1, 1) + for config in plot_configs: + fig = plt.figure(figsize=(14.0, 10.0)) + ax = fig.add_subPlot(1, 1, 1) + + plt.title(config.title) + config.Plot(ax) + if config.output_filename: + print "Saving to", config.output_filename + fig.savefig(config.output_filename) + plt.close(fig) - plt.title(config.title) - config.Plot(ax) - if config.output_filename: - print "Saving to", config.output_filename - fig.savefig(config.output_filename) - plt.close(fig) + plt.show() - plt.show() if __name__ == "__main__": - ShowOrSavePlots(PlotConfigsFromArgs(sys.argv[1:])) + ShowOrSavePlots(PlotConfigsFromArgs(sys.argv[1:])) diff --git a/video/overuse_frame_detector_resource_adaptation_module.cc b/video/overuse_frame_detector_resource_adaptation_module.cc deleted file mode 100644 index e73f7fa4d1..0000000000 --- a/video/overuse_frame_detector_resource_adaptation_module.cc +++ /dev/null @@ -1,891 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/overuse_frame_detector_resource_adaptation_module.h" - -#include -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/base/macros.h" -#include "api/task_queue/task_queue_base.h" -#include "api/video/video_source_interface.h" -#include "call/adaptation/video_source_restrictions.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" -#include "video/video_stream_encoder.h" - -namespace webrtc { - -namespace { - -const int kMinFramerateFps = 2; - -bool IsResolutionScalingEnabled(DegradationPreference degradation_preference) { - return degradation_preference == DegradationPreference::MAINTAIN_FRAMERATE || - degradation_preference == DegradationPreference::BALANCED; -} - -bool IsFramerateScalingEnabled(DegradationPreference degradation_preference) { - return degradation_preference == DegradationPreference::MAINTAIN_RESOLUTION || - degradation_preference == DegradationPreference::BALANCED; -} - -// Returns modified restrictions where any constraints that don't apply to the -// degradation preference are cleared. -VideoSourceRestrictions ApplyDegradationPreference( - VideoSourceRestrictions source_restrictions, - DegradationPreference degradation_preference) { - switch (degradation_preference) { - case DegradationPreference::BALANCED: - break; - case DegradationPreference::MAINTAIN_FRAMERATE: - source_restrictions.set_max_frame_rate(absl::nullopt); - break; - case DegradationPreference::MAINTAIN_RESOLUTION: - source_restrictions.set_max_pixels_per_frame(absl::nullopt); - source_restrictions.set_target_pixels_per_frame(absl::nullopt); - break; - case DegradationPreference::DISABLED: - source_restrictions.set_max_pixels_per_frame(absl::nullopt); - source_restrictions.set_target_pixels_per_frame(absl::nullopt); - source_restrictions.set_max_frame_rate(absl::nullopt); - } - return source_restrictions; -} - -} // namespace - -// VideoSourceRestrictor is responsible for keeping track of current -// VideoSourceRestrictions and how to modify them in response to adapting up or -// down. It is not reponsible for determining when we should adapt up or down - -// for that, see OveruseFrameDetectorResourceAdaptationModule::AdaptUp() and -// AdaptDown() - only how to modify the source/sink restrictions when this -// happens. Note that it is also not responsible for reconfigruring the -// source/sink, it is only a keeper of desired restrictions. -class OveruseFrameDetectorResourceAdaptationModule::VideoSourceRestrictor { - public: - VideoSourceRestrictor() {} - - VideoSourceRestrictions source_restrictions() { - return source_restrictions_; - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - void ClearRestrictions() { - source_restrictions_ = VideoSourceRestrictions(); - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - bool RequestResolutionLowerThan(int pixel_count, - int min_pixels_per_frame, - bool* min_pixels_reached) { - // The input video frame size will have a resolution less than or equal to - // |max_pixel_count| depending on how the source can scale the frame size. - const int pixels_wanted = (pixel_count * 3) / 5; - if (pixels_wanted >= - rtc::dchecked_cast( - source_restrictions_.max_pixels_per_frame().value_or( - std::numeric_limits::max()))) { - return false; - } - if (pixels_wanted < min_pixels_per_frame) { - *min_pixels_reached = true; - return false; - } - RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: " - << pixels_wanted; - source_restrictions_.set_max_pixels_per_frame( - pixels_wanted != std::numeric_limits::max() - ? absl::optional(pixels_wanted) - : absl::nullopt); - source_restrictions_.set_target_pixels_per_frame(absl::nullopt); - return true; - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - int RequestFramerateLowerThan(int fps) { - // The input video frame rate will be scaled down to 2/3, rounding down. - int framerate_wanted = (fps * 2) / 3; - return RestrictFramerate(framerate_wanted) ? framerate_wanted : -1; - } - - int GetHigherResolutionThan(int pixel_count) const { - // On step down we request at most 3/5 the pixel count of the previous - // resolution, so in order to take "one step up" we request a resolution - // as close as possible to 5/3 of the current resolution. The actual pixel - // count selected depends on the capabilities of the source. In order to - // not take a too large step up, we cap the requested pixel count to be at - // most four time the current number of pixels. - return (pixel_count * 5) / 3; - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - bool RequestHigherResolutionThan(int pixel_count) { - int max_pixels_wanted = pixel_count; - if (max_pixels_wanted != std::numeric_limits::max()) - max_pixels_wanted = pixel_count * 4; - - if (max_pixels_wanted <= - rtc::dchecked_cast( - source_restrictions_.max_pixels_per_frame().value_or( - std::numeric_limits::max()))) { - return false; - } - - RTC_LOG(LS_INFO) << "Scaling up resolution, max pixels: " - << max_pixels_wanted; - source_restrictions_.set_max_pixels_per_frame( - max_pixels_wanted != std::numeric_limits::max() - ? absl::optional(max_pixels_wanted) - : absl::nullopt); - source_restrictions_.set_target_pixels_per_frame( - max_pixels_wanted != std::numeric_limits::max() - ? absl::optional(GetHigherResolutionThan(pixel_count)) - : absl::nullopt); - return true; - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - // Request upgrade in framerate. Returns the new requested frame, or -1 if - // no change requested. Note that maxint may be returned if limits due to - // adaptation requests are removed completely. In that case, consider - // |max_framerate_| to be the current limit (assuming the capturer complies). - int RequestHigherFramerateThan(int fps) { - // The input frame rate will be scaled up to the last step, with rounding. - int framerate_wanted = fps; - if (fps != std::numeric_limits::max()) - framerate_wanted = (fps * 3) / 2; - - return IncreaseFramerate(framerate_wanted) ? framerate_wanted : -1; - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - bool RestrictFramerate(int fps) { - const int fps_wanted = std::max(kMinFramerateFps, fps); - if (fps_wanted >= - rtc::dchecked_cast(source_restrictions_.max_frame_rate().value_or( - std::numeric_limits::max()))) - return false; - - RTC_LOG(LS_INFO) << "Scaling down framerate: " << fps_wanted; - source_restrictions_.set_max_frame_rate( - fps_wanted != std::numeric_limits::max() - ? absl::optional(fps_wanted) - : absl::nullopt); - return true; - } - - // Updates the source_restrictions(). The source/sink has to be informed of - // this separately. - bool IncreaseFramerate(int fps) { - const int fps_wanted = std::max(kMinFramerateFps, fps); - if (fps_wanted <= - rtc::dchecked_cast(source_restrictions_.max_frame_rate().value_or( - std::numeric_limits::max()))) - return false; - - RTC_LOG(LS_INFO) << "Scaling up framerate: " << fps_wanted; - source_restrictions_.set_max_frame_rate( - fps_wanted != std::numeric_limits::max() - ? absl::optional(fps_wanted) - : absl::nullopt); - return true; - } - - private: - VideoSourceRestrictions source_restrictions_; - - RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceRestrictor); -}; - -// Class holding adaptation information. -OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::AdaptCounter() { - fps_counters_.resize(kScaleReasonSize); - resolution_counters_.resize(kScaleReasonSize); - static_assert(kScaleReasonSize == 2, "Update MoveCount."); -} - -OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::~AdaptCounter() {} - -std::string -OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::ToString() const { - rtc::StringBuilder ss; - ss << "Downgrade counts: fps: {" << ToString(fps_counters_); - ss << "}, resolution: {" << ToString(resolution_counters_) << "}"; - return ss.Release(); -} - -VideoStreamEncoderObserver::AdaptationSteps -OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::Counts( - int reason) const { - VideoStreamEncoderObserver::AdaptationSteps counts; - counts.num_framerate_reductions = fps_counters_[reason]; - counts.num_resolution_reductions = resolution_counters_[reason]; - return counts; -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptCounter:: - IncrementFramerate(int reason) { - ++(fps_counters_[reason]); -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptCounter:: - IncrementResolution(int reason) { - ++(resolution_counters_[reason]); -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptCounter:: - DecrementFramerate(int reason) { - if (fps_counters_[reason] == 0) { - // Balanced mode: Adapt up is in a different order, switch reason. - // E.g. framerate adapt down: quality (2), framerate adapt up: cpu (3). - // 1. Down resolution (cpu): res={quality:0,cpu:1}, fps={quality:0,cpu:0} - // 2. Down fps (quality): res={quality:0,cpu:1}, fps={quality:1,cpu:0} - // 3. Up fps (cpu): res={quality:1,cpu:0}, fps={quality:0,cpu:0} - // 4. Up resolution (quality): res={quality:0,cpu:0}, fps={quality:0,cpu:0} - RTC_DCHECK_GT(TotalCount(reason), 0) << "No downgrade for reason."; - RTC_DCHECK_GT(FramerateCount(), 0) << "Framerate not downgraded."; - MoveCount(&resolution_counters_, reason); - MoveCount(&fps_counters_, (reason + 1) % kScaleReasonSize); - } - --(fps_counters_[reason]); - RTC_DCHECK_GE(fps_counters_[reason], 0); -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptCounter:: - DecrementResolution(int reason) { - if (resolution_counters_[reason] == 0) { - // Balanced mode: Adapt up is in a different order, switch reason. - RTC_DCHECK_GT(TotalCount(reason), 0) << "No downgrade for reason."; - RTC_DCHECK_GT(ResolutionCount(), 0) << "Resolution not downgraded."; - MoveCount(&fps_counters_, reason); - MoveCount(&resolution_counters_, (reason + 1) % kScaleReasonSize); - } - --(resolution_counters_[reason]); - RTC_DCHECK_GE(resolution_counters_[reason], 0); -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptCounter:: - DecrementFramerate(int reason, int cur_fps) { - DecrementFramerate(reason); - // Reset if at max fps (i.e. in case of fewer steps up than down). - if (cur_fps == std::numeric_limits::max()) - absl::c_fill(fps_counters_, 0); -} - -int OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::FramerateCount() - const { - return Count(fps_counters_); -} - -int OveruseFrameDetectorResourceAdaptationModule::AdaptCounter:: - ResolutionCount() const { - return Count(resolution_counters_); -} - -int OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::FramerateCount( - int reason) const { - return fps_counters_[reason]; -} - -int OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::ResolutionCount( - int reason) const { - return resolution_counters_[reason]; -} - -int OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::TotalCount( - int reason) const { - return FramerateCount(reason) + ResolutionCount(reason); -} - -int OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::Count( - const std::vector& counters) const { - return absl::c_accumulate(counters, 0); -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::MoveCount( - std::vector* counters, - int from_reason) { - int to_reason = (from_reason + 1) % kScaleReasonSize; - ++((*counters)[to_reason]); - --((*counters)[from_reason]); -} - -std::string -OveruseFrameDetectorResourceAdaptationModule::AdaptCounter::ToString( - const std::vector& counters) const { - rtc::StringBuilder ss; - for (size_t reason = 0; reason < kScaleReasonSize; ++reason) { - ss << (reason ? " cpu" : "quality") << ":" << counters[reason]; - } - return ss.Release(); -} - -OveruseFrameDetectorResourceAdaptationModule:: - OveruseFrameDetectorResourceAdaptationModule( - bool experiment_cpu_load_estimator, - std::unique_ptr overuse_detector, - VideoStreamEncoderObserver* encoder_stats_observer, - ResourceAdaptationModuleListener* adaptation_listener) - : adaptation_listener_(adaptation_listener), - experiment_cpu_load_estimator_(experiment_cpu_load_estimator), - has_input_video_(false), - degradation_preference_(DegradationPreference::DISABLED), - adapt_counters_(), - balanced_settings_(), - last_adaptation_request_(absl::nullopt), - source_restrictor_(std::make_unique()), - overuse_detector_(std::move(overuse_detector)), - overuse_detector_is_started_(false), - last_input_frame_size_(absl::nullopt), - target_frame_rate_(absl::nullopt), - target_bitrate_bps_(absl::nullopt), - quality_scaler_(nullptr), - encoder_settings_(absl::nullopt), - encoder_stats_observer_(encoder_stats_observer) { - RTC_DCHECK(adaptation_listener_); - RTC_DCHECK(overuse_detector_); - RTC_DCHECK(encoder_stats_observer_); -} - -OveruseFrameDetectorResourceAdaptationModule:: - ~OveruseFrameDetectorResourceAdaptationModule() {} - -void OveruseFrameDetectorResourceAdaptationModule::StartResourceAdaptation( - ResourceAdaptationModuleListener* adaptation_listener) { - RTC_DCHECK(encoder_settings_.has_value()); - RTC_DCHECK(!overuse_detector_is_started_); - // TODO(hbos): When AdaptUp() and AdaptDown() are no longer invoked outside - // the interval between StartCheckForOveruse() and StopCheckForOveruse(), - // support configuring which |adaptation_listener_| to use on the fly. It is - // currently hardcoded for the entire lifetime of the module in order to - // support adaptation caused by VideoStreamEncoder or QualityScaler invoking - // AdaptUp() and AdaptDown() even when the OveruseDetector is inactive. - RTC_DCHECK_EQ(adaptation_listener, adaptation_listener_); - overuse_detector_->StartCheckForOveruse(TaskQueueBase::Current(), - GetCpuOveruseOptions(), this); - overuse_detector_is_started_ = true; - overuse_detector_->OnTargetFramerateUpdated( - target_frame_rate_.has_value() - ? static_cast(target_frame_rate_.value()) - : std::numeric_limits::max()); -} - -void OveruseFrameDetectorResourceAdaptationModule::StopResourceAdaptation() { - overuse_detector_->StopCheckForOveruse(); - overuse_detector_is_started_ = false; -} - -void OveruseFrameDetectorResourceAdaptationModule::SetHasInputVideo( - bool has_input_video) { - // While false, AdaptUp() and AdaptDown() are NO-OPS. - has_input_video_ = has_input_video; -} - -void OveruseFrameDetectorResourceAdaptationModule::SetDegradationPreference( - DegradationPreference degradation_preference) { - if (degradation_preference_ != degradation_preference) { - // Reset adaptation state, so that we're not tricked into thinking there's - // an already pending request of the same type. - last_adaptation_request_.reset(); - if (degradation_preference == DegradationPreference::BALANCED || - degradation_preference_ == DegradationPreference::BALANCED) { - // TODO(asapersson): Consider removing |adapt_counters_| map and use one - // AdaptCounter for all modes. - source_restrictor_->ClearRestrictions(); - adapt_counters_.clear(); - } - } - degradation_preference_ = degradation_preference; - MaybeUpdateVideoSourceRestrictions(); -} - -void OveruseFrameDetectorResourceAdaptationModule::SetEncoderSettings( - EncoderSettings encoder_settings) { - encoder_settings_ = std::move(encoder_settings); - MaybeUpdateTargetFrameRate(); -} - -void OveruseFrameDetectorResourceAdaptationModule::SetEncoderTargetBitrate( - absl::optional target_bitrate_bps) { - target_bitrate_bps_ = target_bitrate_bps; -} - -void OveruseFrameDetectorResourceAdaptationModule:: - ResetVideoSourceRestrictions() { - last_adaptation_request_.reset(); - source_restrictor_->ClearRestrictions(); - adapt_counters_.clear(); - MaybeUpdateVideoSourceRestrictions(); -} - -void OveruseFrameDetectorResourceAdaptationModule::OnFrame( - const VideoFrame& frame) { - last_input_frame_size_ = frame.size(); -} - -void OveruseFrameDetectorResourceAdaptationModule::OnFrameDroppedDueToSize() { - int fps_count = GetConstAdaptCounter().FramerateCount( - AdaptationObserverInterface::AdaptReason::kQuality); - int res_count = GetConstAdaptCounter().ResolutionCount( - AdaptationObserverInterface::AdaptReason::kQuality); - AdaptDown(AdaptationObserverInterface::AdaptReason::kQuality); - if (degradation_preference() == DegradationPreference::BALANCED && - GetConstAdaptCounter().FramerateCount( - AdaptationObserverInterface::AdaptReason::kQuality) > fps_count) { - // Adapt framerate in same step as resolution. - AdaptDown(AdaptationObserverInterface::AdaptReason::kQuality); - } - if (GetConstAdaptCounter().ResolutionCount( - AdaptationObserverInterface::AdaptReason::kQuality) > res_count) { - encoder_stats_observer_->OnInitialQualityResolutionAdaptDown(); - } -} - -void OveruseFrameDetectorResourceAdaptationModule::OnEncodeStarted( - const VideoFrame& cropped_frame, - int64_t time_when_first_seen_us) { - // TODO(hbos): Rename FrameCaptured() to something more appropriate (e.g. - // "OnEncodeStarted"?) or revise usage. - overuse_detector_->FrameCaptured(cropped_frame, time_when_first_seen_us); -} - -void OveruseFrameDetectorResourceAdaptationModule::OnEncodeCompleted( - uint32_t timestamp, - int64_t time_sent_in_us, - int64_t capture_time_us, - absl::optional encode_duration_us) { - // TODO(hbos): Rename FrameSent() to something more appropriate (e.g. - // "OnEncodeCompleted"?). - overuse_detector_->FrameSent(timestamp, time_sent_in_us, capture_time_us, - encode_duration_us); -} - -void OveruseFrameDetectorResourceAdaptationModule::UpdateQualityScalerSettings( - absl::optional qp_thresholds) { - if (qp_thresholds.has_value()) { - quality_scaler_ = - std::make_unique(this, qp_thresholds.value()); - } else { - quality_scaler_ = nullptr; - } -} - -void OveruseFrameDetectorResourceAdaptationModule::AdaptUp(AdaptReason reason) { - if (!has_input_video_) - return; - const AdaptCounter& adapt_counter = GetConstAdaptCounter(); - int num_downgrades = adapt_counter.TotalCount(reason); - if (num_downgrades == 0) - return; - RTC_DCHECK_GT(num_downgrades, 0); - - AdaptationRequest adaptation_request = { - LastInputFrameSizeOrDefault(), - encoder_stats_observer_->GetInputFrameRate(), - AdaptationRequest::Mode::kAdaptUp}; - - bool adapt_up_requested = - last_adaptation_request_ && - last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptUp; - - if (EffectiveDegradataionPreference() == - DegradationPreference::MAINTAIN_FRAMERATE) { - if (adapt_up_requested && - adaptation_request.input_pixel_count_ <= - last_adaptation_request_->input_pixel_count_) { - // Don't request higher resolution if the current resolution is not - // higher than the last time we asked for the resolution to be higher. - return; - } - } - - switch (EffectiveDegradataionPreference()) { - case DegradationPreference::BALANCED: { - // Check if quality should be increased based on bitrate. - if (reason == kQuality && - !balanced_settings_.CanAdaptUp(GetVideoCodecTypeOrGeneric(), - LastInputFrameSizeOrDefault(), - target_bitrate_bps_.value_or(0))) { - return; - } - // Try scale up framerate, if higher. - int fps = balanced_settings_.MaxFps(GetVideoCodecTypeOrGeneric(), - LastInputFrameSizeOrDefault()); - if (source_restrictor_->IncreaseFramerate(fps)) { - GetAdaptCounter().DecrementFramerate(reason, fps); - // Reset framerate in case of fewer fps steps down than up. - if (adapt_counter.FramerateCount() == 0 && - fps != std::numeric_limits::max()) { - RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting."; - source_restrictor_->IncreaseFramerate( - std::numeric_limits::max()); - } - break; - } - // Check if resolution should be increased based on bitrate. - if (reason == kQuality && - !balanced_settings_.CanAdaptUpResolution( - GetVideoCodecTypeOrGeneric(), LastInputFrameSizeOrDefault(), - target_bitrate_bps_.value_or(0))) { - return; - } - // Scale up resolution. - ABSL_FALLTHROUGH_INTENDED; - } - case DegradationPreference::MAINTAIN_FRAMERATE: { - // Check if resolution should be increased based on bitrate and - // limits specified by encoder capabilities. - if (reason == kQuality && - !CanAdaptUpResolution(LastInputFrameSizeOrDefault(), - target_bitrate_bps_.value_or(0))) { - return; - } - - // Scale up resolution. - int pixel_count = adaptation_request.input_pixel_count_; - if (adapt_counter.ResolutionCount() == 1) { - RTC_LOG(LS_INFO) << "Removing resolution down-scaling setting."; - pixel_count = std::numeric_limits::max(); - } - if (!source_restrictor_->RequestHigherResolutionThan(pixel_count)) - return; - GetAdaptCounter().DecrementResolution(reason); - break; - } - case DegradationPreference::MAINTAIN_RESOLUTION: { - // Scale up framerate. - int fps = adaptation_request.framerate_fps_; - if (adapt_counter.FramerateCount() == 1) { - RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting."; - fps = std::numeric_limits::max(); - } - - const int requested_framerate = - source_restrictor_->RequestHigherFramerateThan(fps); - if (requested_framerate == -1) { - return; - } - GetAdaptCounter().DecrementFramerate(reason); - break; - } - case DegradationPreference::DISABLED: - return; - } - - // Tell the adaptation listener to reconfigure the source for us according to - // the latest adaptation. - MaybeUpdateVideoSourceRestrictions(); - - last_adaptation_request_.emplace(adaptation_request); - - UpdateAdaptationStats(reason); - - RTC_LOG(LS_INFO) << adapt_counter.ToString(); -} - -bool OveruseFrameDetectorResourceAdaptationModule::AdaptDown( - AdaptReason reason) { - if (!has_input_video_) - return false; - AdaptationRequest adaptation_request = { - LastInputFrameSizeOrDefault(), - encoder_stats_observer_->GetInputFrameRate(), - AdaptationRequest::Mode::kAdaptDown}; - - bool downgrade_requested = - last_adaptation_request_ && - last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptDown; - - bool did_adapt = true; - - switch (EffectiveDegradataionPreference()) { - case DegradationPreference::BALANCED: - break; - case DegradationPreference::MAINTAIN_FRAMERATE: - if (downgrade_requested && - adaptation_request.input_pixel_count_ >= - last_adaptation_request_->input_pixel_count_) { - // Don't request lower resolution if the current resolution is not - // lower than the last time we asked for the resolution to be lowered. - return true; - } - break; - case DegradationPreference::MAINTAIN_RESOLUTION: - if (adaptation_request.framerate_fps_ <= 0 || - (downgrade_requested && - adaptation_request.framerate_fps_ < kMinFramerateFps)) { - // If no input fps estimate available, can't determine how to scale down - // framerate. Otherwise, don't request lower framerate if we don't have - // a valid frame rate. Since framerate, unlike resolution, is a measure - // we have to estimate, and can fluctuate naturally over time, don't - // make the same kind of limitations as for resolution, but trust the - // overuse detector to not trigger too often. - return true; - } - break; - case DegradationPreference::DISABLED: - return true; - } - - switch (EffectiveDegradataionPreference()) { - case DegradationPreference::BALANCED: { - // Try scale down framerate, if lower. - int fps = balanced_settings_.MinFps(GetVideoCodecTypeOrGeneric(), - LastInputFrameSizeOrDefault()); - if (source_restrictor_->RestrictFramerate(fps)) { - GetAdaptCounter().IncrementFramerate(reason); - // Check if requested fps is higher (or close to) input fps. - absl::optional min_diff = - balanced_settings_.MinFpsDiff(LastInputFrameSizeOrDefault()); - if (min_diff && adaptation_request.framerate_fps_ > 0) { - int fps_diff = adaptation_request.framerate_fps_ - fps; - if (fps_diff < min_diff.value()) { - did_adapt = false; - } - } - break; - } - // Scale down resolution. - ABSL_FALLTHROUGH_INTENDED; - } - case DegradationPreference::MAINTAIN_FRAMERATE: { - // Scale down resolution. - bool min_pixels_reached = false; - if (!source_restrictor_->RequestResolutionLowerThan( - adaptation_request.input_pixel_count_, - encoder_settings_.has_value() - ? encoder_settings_->encoder_info() - .scaling_settings.min_pixels_per_frame - : kDefaultMinPixelsPerFrame, - &min_pixels_reached)) { - if (min_pixels_reached) - encoder_stats_observer_->OnMinPixelLimitReached(); - return true; - } - GetAdaptCounter().IncrementResolution(reason); - break; - } - case DegradationPreference::MAINTAIN_RESOLUTION: { - // Scale down framerate. - const int requested_framerate = - source_restrictor_->RequestFramerateLowerThan( - adaptation_request.framerate_fps_); - if (requested_framerate == -1) - return true; - GetAdaptCounter().IncrementFramerate(reason); - break; - } - case DegradationPreference::DISABLED: - RTC_NOTREACHED(); - } - - // Tell the adaptation listener to reconfigure the source for us according to - // the latest adaptation. - MaybeUpdateVideoSourceRestrictions(); - - last_adaptation_request_.emplace(adaptation_request); - - UpdateAdaptationStats(reason); - - RTC_LOG(LS_INFO) << GetConstAdaptCounter().ToString(); - return did_adapt; -} - -// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle -// pipelining encoders better (multiple input frames before something comes -// out). This should effectively turn off CPU adaptations for systems that -// remotely cope with the load right now. -CpuOveruseOptions -OveruseFrameDetectorResourceAdaptationModule::GetCpuOveruseOptions() const { - // This is already ensured by the only caller of this method: - // StartResourceAdaptation(). - RTC_DCHECK(encoder_settings_.has_value()); - CpuOveruseOptions options; - // Hardware accelerated encoders are assumed to be pipelined; give them - // additional overuse time. - if (encoder_settings_->encoder_info().is_hardware_accelerated) { - options.low_encode_usage_threshold_percent = 150; - options.high_encode_usage_threshold_percent = 200; - } - if (experiment_cpu_load_estimator_) { - options.filter_time_ms = 5 * rtc::kNumMillisecsPerSec; - } - return options; -} - -VideoCodecType -OveruseFrameDetectorResourceAdaptationModule::GetVideoCodecTypeOrGeneric() - const { - return encoder_settings_.has_value() - ? encoder_settings_->encoder_config().codec_type - : kVideoCodecGeneric; -} - -int OveruseFrameDetectorResourceAdaptationModule::LastInputFrameSizeOrDefault() - const { - // The dependency on this hardcoded resolution is inherited from old code, - // which used this resolution as a stand-in for not knowing the resolution - // yet. - // TODO(hbos): Can we simply DCHECK has_value() before usage instead? Having a - // DCHECK passed all the tests but adding it does change the requirements of - // this class (= not being allowed to call AdaptUp() or AdaptDown() before - // OnFrame()) and deserves a standalone CL. - return last_input_frame_size_.value_or( - VideoStreamEncoder::kDefaultLastFrameInfoWidth * - VideoStreamEncoder::kDefaultLastFrameInfoHeight); -} - -void OveruseFrameDetectorResourceAdaptationModule:: - MaybeUpdateVideoSourceRestrictions() { - VideoSourceRestrictions new_restrictions = ApplyDegradationPreference( - source_restrictor_->source_restrictions(), degradation_preference_); - if (video_source_restrictions_ != new_restrictions) { - video_source_restrictions_ = std::move(new_restrictions); - adaptation_listener_->OnVideoSourceRestrictionsUpdated( - video_source_restrictions_); - MaybeUpdateTargetFrameRate(); - } -} - -void OveruseFrameDetectorResourceAdaptationModule:: - MaybeUpdateTargetFrameRate() { - absl::optional codec_max_frame_rate = - encoder_settings_.has_value() - ? absl::optional( - encoder_settings_->video_codec().maxFramerate) - : absl::nullopt; - // The current target framerate is the maximum frame rate as specified by - // the current codec configuration or any limit imposed by the adaptation - // module. This is used to make sure overuse detection doesn't needlessly - // trigger in low and/or variable framerate scenarios. - absl::optional target_frame_rate = - ApplyDegradationPreference(source_restrictor_->source_restrictions(), - degradation_preference_) - .max_frame_rate(); - if (!target_frame_rate.has_value() || - (codec_max_frame_rate.has_value() && - codec_max_frame_rate.value() < target_frame_rate.value())) { - target_frame_rate = codec_max_frame_rate; - } - if (target_frame_rate != target_frame_rate_) { - target_frame_rate_ = target_frame_rate; - if (overuse_detector_is_started_) { - overuse_detector_->OnTargetFramerateUpdated( - target_frame_rate_.has_value() - ? static_cast(target_frame_rate_.value()) - : std::numeric_limits::max()); - } - } -} - -// TODO(nisse): Delete, once AdaptReason and AdaptationReason are merged. -void OveruseFrameDetectorResourceAdaptationModule::UpdateAdaptationStats( - AdaptReason reason) { - switch (reason) { - case kCpu: - encoder_stats_observer_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, - GetActiveCounts(kCpu), GetActiveCounts(kQuality)); - break; - case kQuality: - encoder_stats_observer_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, - GetActiveCounts(kCpu), GetActiveCounts(kQuality)); - break; - } -} - -VideoStreamEncoderObserver::AdaptationSteps -OveruseFrameDetectorResourceAdaptationModule::GetActiveCounts( - AdaptReason reason) { - VideoStreamEncoderObserver::AdaptationSteps counts = - GetConstAdaptCounter().Counts(reason); - switch (reason) { - case kCpu: - if (!IsFramerateScalingEnabled(degradation_preference_)) - counts.num_framerate_reductions = absl::nullopt; - if (!IsResolutionScalingEnabled(degradation_preference_)) - counts.num_resolution_reductions = absl::nullopt; - break; - case kQuality: - if (!IsFramerateScalingEnabled(degradation_preference_) || - !quality_scaler_) { - counts.num_framerate_reductions = absl::nullopt; - } - if (!IsResolutionScalingEnabled(degradation_preference_) || - !quality_scaler_) { - counts.num_resolution_reductions = absl::nullopt; - } - break; - } - return counts; -} - -DegradationPreference OveruseFrameDetectorResourceAdaptationModule:: - EffectiveDegradataionPreference() { - // Balanced mode for screenshare works via automatic animation detection: - // Resolution is capped for fullscreen animated content. - // Adapatation is done only via framerate downgrade. - // Thus effective degradation preference is MAINTAIN_RESOLUTION. - return (encoder_settings_.has_value() && - encoder_settings_->encoder_config().content_type == - VideoEncoderConfig::ContentType::kScreen && - degradation_preference_ == DegradationPreference::BALANCED) - ? DegradationPreference::MAINTAIN_RESOLUTION - : degradation_preference_; -} - -OveruseFrameDetectorResourceAdaptationModule::AdaptCounter& -OveruseFrameDetectorResourceAdaptationModule::GetAdaptCounter() { - return adapt_counters_[degradation_preference_]; -} - -const OveruseFrameDetectorResourceAdaptationModule::AdaptCounter& -OveruseFrameDetectorResourceAdaptationModule::GetConstAdaptCounter() { - return adapt_counters_[degradation_preference_]; -} - -absl::optional -OveruseFrameDetectorResourceAdaptationModule::GetQpThresholds() const { - return balanced_settings_.GetQpThresholds(GetVideoCodecTypeOrGeneric(), - LastInputFrameSizeOrDefault()); -} - -bool OveruseFrameDetectorResourceAdaptationModule::CanAdaptUpResolution( - int pixels, - uint32_t bitrate_bps) const { - absl::optional bitrate_limits = - encoder_settings_.has_value() - ? GetEncoderBitrateLimits( - encoder_settings_->encoder_info(), - source_restrictor_->GetHigherResolutionThan(pixels)) - : absl::nullopt; - if (!bitrate_limits.has_value() || bitrate_bps == 0) { - return true; // No limit configured or bitrate provided. - } - RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, pixels); - return bitrate_bps >= - static_cast(bitrate_limits->min_start_bitrate_bps); -} - -} // namespace webrtc diff --git a/video/overuse_frame_detector_resource_adaptation_module.h b/video/overuse_frame_detector_resource_adaptation_module.h deleted file mode 100644 index 2bb05f9963..0000000000 --- a/video/overuse_frame_detector_resource_adaptation_module.h +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_OVERUSE_FRAME_DETECTOR_RESOURCE_ADAPTATION_MODULE_H_ -#define VIDEO_OVERUSE_FRAME_DETECTOR_RESOURCE_ADAPTATION_MODULE_H_ - -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/rtp_parameters.h" -#include "api/video/video_frame.h" -#include "api/video/video_source_interface.h" -#include "api/video/video_stream_encoder_observer.h" -#include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_config.h" -#include "call/adaptation/resource_adaptation_module_interface.h" -#include "rtc_base/experiments/balanced_degradation_settings.h" -#include "video/overuse_frame_detector.h" - -namespace webrtc { - -class VideoStreamEncoder; - -// This class is used by the VideoStreamEncoder and is responsible for adapting -// resolution up or down based on encode usage percent. It keeps track of video -// source settings, adaptation counters and may get influenced by -// VideoStreamEncoder's quality scaler through AdaptUp() and AdaptDown() calls. -// -// This class is single-threaded. The caller is responsible for ensuring safe -// usage. -// TODO(hbos): Add unittests specific to this class, it is currently only tested -// indirectly in video_stream_encoder_unittest.cc and other tests exercising -// VideoStreamEncoder. -// TODO(hbos): Create and implement an abstract interface -// ResourceAdaptationModuleInterface and make this class inherit it. Use the -// generic interface in VideoStreamEncoder, unblocking other modules from being -// implemented and used. -class OveruseFrameDetectorResourceAdaptationModule - : public ResourceAdaptationModuleInterface, - public AdaptationObserverInterface { - public: - // The module can be constructed on any sequence, but must be initialized and - // used on a single sequence, e.g. the encoder queue. - OveruseFrameDetectorResourceAdaptationModule( - bool experiment_cpu_load_estimator, - std::unique_ptr overuse_detector, - VideoStreamEncoderObserver* encoder_stats_observer, - ResourceAdaptationModuleListener* adaptation_listener); - ~OveruseFrameDetectorResourceAdaptationModule() override; - - DegradationPreference degradation_preference() const { - return degradation_preference_; - } - QualityScaler* quality_scaler() const { return quality_scaler_.get(); } - - // ResourceAdaptationModuleInterface implementation. - void StartResourceAdaptation( - ResourceAdaptationModuleListener* adaptation_listener) override; - void StopResourceAdaptation() override; - void SetHasInputVideo(bool has_input_video) override; - void SetDegradationPreference( - DegradationPreference degradation_preference) override; - void SetEncoderSettings(EncoderSettings encoder_settings) override; - void SetEncoderTargetBitrate( - absl::optional target_bitrate_bps) override; - void ResetVideoSourceRestrictions() override; - - void OnFrame(const VideoFrame& frame) override; - void OnFrameDroppedDueToSize() override; - void OnEncodeStarted(const VideoFrame& cropped_frame, - int64_t time_when_first_seen_us) override; - void OnEncodeCompleted(uint32_t timestamp, - int64_t time_sent_in_us, - int64_t capture_time_us, - absl::optional encode_duration_us) override; - - // Use nullopt to disable quality scaling. - void UpdateQualityScalerSettings( - absl::optional qp_thresholds); - - class AdaptCounter final { - public: - AdaptCounter(); - ~AdaptCounter(); - - // Get number of adaptation downscales for |reason|. - VideoStreamEncoderObserver::AdaptationSteps Counts(int reason) const; - - std::string ToString() const; - - void IncrementFramerate(int reason); - void IncrementResolution(int reason); - void DecrementFramerate(int reason); - void DecrementResolution(int reason); - void DecrementFramerate(int reason, int cur_fps); - - // Gets the total number of downgrades (for all adapt reasons). - int FramerateCount() const; - int ResolutionCount() const; - - // Gets the total number of downgrades for |reason|. - int FramerateCount(int reason) const; - int ResolutionCount(int reason) const; - int TotalCount(int reason) const; - - private: - std::string ToString(const std::vector& counters) const; - int Count(const std::vector& counters) const; - void MoveCount(std::vector* counters, int from_reason); - - // Degradation counters holding number of framerate/resolution reductions - // per adapt reason. - std::vector fps_counters_; - std::vector resolution_counters_; - }; - - // AdaptationObserverInterface implementation. Used both "internally" as - // feedback from |overuse_detector_|, and externally from VideoStreamEncoder: - // - It is wired to the VideoStreamEncoder::quality_scaler_. - // - It is invoked by VideoStreamEncoder::MaybeEncodeVideoFrame(). - // TODO(hbos): Decouple quality scaling and resource adaptation, or find an - // interface for reconfiguring externally. - // TODO(hbos): VideoStreamEncoder should not be responsible for any part of - // the adaptation. - void AdaptUp(AdaptReason reason) override; - bool AdaptDown(AdaptReason reason) override; - - // Used by VideoStreamEncoder when ConfigureQualityScaler() occurs and the - // |encoder_stats_observer_| is called outside of this class. - // TODO(hbos): Decouple quality scaling and resource adaptation logic and make - // this method private. - VideoStreamEncoderObserver::AdaptationSteps GetActiveCounts( - AdaptReason reason); - - // Used by VideoStreamEncoder::MaybeEncodeVideoFrame(). - // TODO(hbos): VideoStreamEncoder should not be responsible for any part of - // the adaptation. Move this logic to this module? - const AdaptCounter& GetConstAdaptCounter(); - - // Used by VideoStreamEncoder::ConfigureQualityScaler(). - // TODO(hbos): Decouple quality scaling and resource adaptation logic and - // delete this method. - absl::optional GetQpThresholds() const; - - private: - class VideoSourceRestrictor; - - struct AdaptationRequest { - // The pixel count produced by the source at the time of the adaptation. - int input_pixel_count_; - // Framerate received from the source at the time of the adaptation. - int framerate_fps_; - // Indicates if request was to adapt up or down. - enum class Mode { kAdaptUp, kAdaptDown } mode_; - }; - - CpuOveruseOptions GetCpuOveruseOptions() const; - VideoCodecType GetVideoCodecTypeOrGeneric() const; - int LastInputFrameSizeOrDefault() const; - - // Makes |video_source_restrictions_| up-to-date and informs the - // |adaptation_listener_| if restrictions are changed, allowing the listener - // to reconfigure the source accordingly. - void MaybeUpdateVideoSourceRestrictions(); - // Calculates an up-to-date value of |target_frame_rate_| and informs the - // |overuse_detector_| of the new value if it changed and the detector is - // started. - void MaybeUpdateTargetFrameRate(); - - void UpdateAdaptationStats(AdaptReason reason); - DegradationPreference EffectiveDegradataionPreference(); - AdaptCounter& GetAdaptCounter(); - bool CanAdaptUpResolution(int pixels, uint32_t bitrate_bps) const; - - ResourceAdaptationModuleListener* const adaptation_listener_; - const bool experiment_cpu_load_estimator_; - // The restrictions that |adaptation_listener_| is informed of. - VideoSourceRestrictions video_source_restrictions_; - bool has_input_video_; - DegradationPreference degradation_preference_; - // Counters used for deciding if the video resolution or framerate is - // currently restricted, and if so, why, on a per degradation preference - // basis. - // TODO(sprang): Replace this with a state holding a relative overuse measure - // instead, that can be translated into suitable down-scale or fps limit. - std::map adapt_counters_; - const BalancedDegradationSettings balanced_settings_; - // Stores a snapshot of the last adaptation request triggered by an AdaptUp - // or AdaptDown signal. - absl::optional last_adaptation_request_; - // Keeps track of source restrictions that this adaptation module outputs. - const std::unique_ptr source_restrictor_; - const std::unique_ptr overuse_detector_; - bool overuse_detector_is_started_; - absl::optional last_input_frame_size_; - absl::optional target_frame_rate_; - absl::optional target_bitrate_bps_; - std::unique_ptr quality_scaler_; - absl::optional encoder_settings_; - VideoStreamEncoderObserver* const encoder_stats_observer_; -}; - -} // namespace webrtc - -#endif // VIDEO_OVERUSE_FRAME_DETECTOR_RESOURCE_ADAPTATION_MODULE_H_ diff --git a/video/pc_full_stack_tests.cc b/video/pc_full_stack_tests.cc index d9915de3d1..d515a5271b 100644 --- a/video/pc_full_stack_tests.cc +++ b/video/pc_full_stack_tests.cc @@ -12,11 +12,15 @@ #include #include +#include "api/media_stream_interface.h" #include "api/test/create_network_emulation_manager.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" #include "api/test/create_peerconnection_quality_test_fixture.h" +#include "api/test/frame_generator_interface.h" #include "api/test/network_emulation_manager.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" +#include "api/test/time_controller.h" #include "call/simulated_network.h" #include "media/base/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" @@ -35,8 +39,6 @@ using VideoConfig = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; using AudioConfig = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::AudioConfig; -using VideoGeneratorType = - webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoGeneratorType; using ScreenShareConfig = webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::ScreenShareConfig; using VideoSimulcastConfig = @@ -47,8 +49,6 @@ using VideoCodecConfig = namespace { constexpr int kTestDurationSec = 45; -constexpr char kVp8TrustedRateControllerFieldTrial[] = - "WebRTC-LibvpxVp8TrustedRateController/Enabled/"; EmulatedNetworkNode* CreateEmulatedNodeWithConfig( NetworkEmulationManager* emulation, @@ -77,12 +77,13 @@ CreateTwoNetworkLinks(NetworkEmulationManager* emulation, std::unique_ptr CreateTestFixture(const std::string& test_case_name, + TimeController& time_controller, std::pair network_links, rtc::FunctionView alice_configurer, rtc::FunctionView bob_configurer) { auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( - test_case_name, /*audio_quality_analyzer=*/nullptr, + test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr); fixture->AddPeer(network_links.first->network_thread(), network_links.first->network_manager(), alice_configurer); @@ -105,40 +106,24 @@ std::string ClipNameToClipPath(const char* clip_name) { } // namespace -class PCGenericDescriptorTest : public ::testing::TestWithParam { - public: - PCGenericDescriptorTest() - : field_trial_(AppendFieldTrials(GetParam())), - generic_descriptor_enabled_( - field_trial::IsEnabled("WebRTC-GenericDescriptor")) {} - - std::string GetTestName(std::string base) { - if (generic_descriptor_enabled_) - base += "_generic_descriptor"; - return base; - } - - private: - test::ScopedFieldTrials field_trial_; - bool generic_descriptor_enabled_; -}; - #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_foreman_cif_net_delay_0_0_plr_0_VP9", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; @@ -147,23 +132,26 @@ TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) { fixture->Run(std::move(run_params)); } -TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Vp9) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_VP9_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 5; config.queue_delay_ms = 50; auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_delay_50_0_plr_5_VP9"), + "pc_foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; @@ -176,27 +164,29 @@ TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Vp9) { #if (defined(WEBRTC_ANDROID) && \ (defined(WEBRTC_ARCH_ARM64) || defined(WEBRTC_ARCH_ARM))) || \ (defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64)) -#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \ - DISABLED_GeneratorWithoutPacketLossVp9Profile2 +#define MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 \ + DISABLED_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 #else -#define MAYBE_GeneratorWithoutPacketLossVp9Profile2 \ - GeneratorWithoutPacketLossVp9Profile2 +#define MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 \ + Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 #endif -TEST(PCFullStackTest, MAYBE_GeneratorWithoutPacketLossVp9Profile2) { +TEST(PCFullStackTest, MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_generator_net_delay_0_0_plr_0_VP9Profile2", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.generator = VideoGeneratorType::kI010; video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateSquareFrameGenerator( + video, test::FrameGeneratorInterface::OutputType::kI010); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})}; @@ -237,101 +227,76 @@ TEST(PCFullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { */ #endif // defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ParisQcifWithoutPacketLoss) { +TEST(PCFullStackTest, Pc_Net_Delay_0_0_Plr_0) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - "pc_net_delay_0_0_plr_0", + "pc_net_delay_0_0_plr_0", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(176, 144, 30); - video.input_file_name = ClipNameToClipPath("paris_qcif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("paris_qcif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST_P(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_net_delay_0_0_plr_0"), + "pc_foreman_cif_net_delay_0_0_plr_0_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); - video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - -TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) { - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - BuiltInNetworkBehaviorConfig config; - auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_30kbps_net_delay_0_0_plr_0"), - CreateTwoNetworkLinks(network_emulation_manager.get(), config), - [](PeerConfigurer* alice) { - VideoConfig video(352, 288, 10); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - - PeerConnectionInterface::BitrateParameters bitrate_params; - bitrate_params.min_bitrate_bps = 30000; - bitrate_params.current_bitrate_bps = 30000; - bitrate_params.max_bitrate_bps = 30000; - alice->SetBitrateParameters(bitrate_params); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST_P(PCGenericDescriptorTest, - ForemanCif30kbpsWithoutPacketLossTrustedRateControl) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; auto fixture = CreateTestFixture( - GetTestName( - "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl"), + "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 10); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - - PeerConnectionInterface::BitrateParameters bitrate_params; - bitrate_params.min_bitrate_bps = 30000; - bitrate_params.current_bitrate_bps = 30000; - bitrate_params.max_bitrate_bps = 30000; - alice->SetBitrateParameters(bitrate_params); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + + BitrateSettings bitrate_settings; + bitrate_settings.min_bitrate_bps = 30000; + bitrate_settings.start_bitrate_bps = 30000; + bitrate_settings.max_bitrate_bps = 30000; + alice->SetBitrateSettings(bitrate_settings); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -339,29 +304,31 @@ TEST_P(PCGenericDescriptorTest, } // Link capacity below default start rate. -TEST(PCFullStackTest, ForemanCifLink150kbpsWithoutPacketLoss) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.link_capacity_kbps = 150; auto fixture = CreateTestFixture( "pc_foreman_cif_link_150kbps_net_delay_0_0_plr_0", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifLink130kbps100msDelay1PercentPacketLossUlpfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -370,22 +337,24 @@ TEST(PCFullStackTest, ForemanCifLink130kbps100msDelay1PercentPacketLossUlpfec) { config.loss_percent = 1; auto fixture = CreateTestFixture( "pc_foreman_cif_link_130kbps_delay100ms_loss1_ulpfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = true; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifLink50kbps100msDelay1PercentPacketLossUlpfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -394,15 +363,17 @@ TEST(PCFullStackTest, ForemanCifLink50kbps100msDelay1PercentPacketLossUlpfec) { config.loss_percent = 1; auto fixture = CreateTestFixture( "pc_foreman_cif_link_50kbps_delay100ms_loss1_ulpfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = true; @@ -410,7 +381,8 @@ TEST(PCFullStackTest, ForemanCifLink50kbps100msDelay1PercentPacketLossUlpfec) { } // Restricted network and encoder overproducing by 30%. -TEST(PCFullStackTest, ForemanCifLink150kbpsBadRateController) { +TEST(PCFullStackTest, + Pc_Foreman_Cif_Link_150kbps_Delay100ms_30pkts_Queue_Overshoot30) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -419,15 +391,17 @@ TEST(PCFullStackTest, ForemanCifLink150kbpsBadRateController) { config.queue_delay_ms = 100; auto fixture = CreateTestFixture( "pc_foreman_cif_link_150kbps_delay100ms_30pkts_queue_overshoot30", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -439,7 +413,7 @@ TEST(PCFullStackTest, ForemanCifLink150kbpsBadRateController) { // Packet rate and loss are low enough that loss will happen with ~3s interval. // This triggers protection overhead to toggle between zero and non-zero. // Link queue is restrictive enough to trigger loss on probes. -TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -449,15 +423,17 @@ TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { config.loss_percent = 1; auto fixture = CreateTestFixture( "pc_foreman_cif_link_250kbps_delay100ms_10pkts_loss1", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -465,53 +441,59 @@ TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) { fixture->Run(std::move(run_params)); } -TEST_P(PCGenericDescriptorTest, ForemanCifPlr5) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 5; config.queue_delay_ms = 50; auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_delay_50_0_plr_5"), + "pc_foreman_cif_delay_50_0_plr_5_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_Ulpfec_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 5; config.queue_delay_ms = 50; auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_delay_50_0_plr_5_ulpfec"), + "pc_foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = true; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifPlr5Flexfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -519,22 +501,24 @@ TEST(PCFullStackTest, ForemanCifPlr5Flexfec) { config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_flexfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = true; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbpsPlr3Flexfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -543,22 +527,24 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Flexfec) { config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_delay_50_0_plr_3_flexfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = true; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbpsPlr3Ulpfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -567,15 +553,17 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Ulpfec) { config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_delay_50_0_plr_3_ulpfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = true; @@ -583,78 +571,85 @@ TEST(PCFullStackTest, ForemanCif500kbpsPlr3Ulpfec) { } #if defined(WEBRTC_USE_H264) -TEST(PCFullStackTest, ForemanCifWithoutPacketlossH264) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_foreman_cif_net_delay_0_0_plr_0_H264", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kH264CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) { +TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; auto fixture = CreateTestFixture( "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_H264", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 10); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - - PeerConnectionInterface::BitrateParameters bitrate_params; - bitrate_params.min_bitrate_bps = 30000; - bitrate_params.current_bitrate_bps = 30000; - bitrate_params.max_bitrate_bps = 30000; - alice->SetBitrateParameters(bitrate_params); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); + + BitrateSettings bitrate_settings; + bitrate_settings.min_bitrate_bps = 30000; + bitrate_settings.start_bitrate_bps = 30000; + bitrate_settings.max_bitrate_bps = 30000; + alice->SetBitrateSettings(bitrate_settings); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kH264CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST_P(PCGenericDescriptorTest, ForemanCifPlr5H264) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 5; config.queue_delay_ms = 50; auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_delay_50_0_plr_5_H264"), + "pc_foreman_cif_delay_50_0_plr_5_H264_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kH264CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { test::ScopedFieldTrials override_field_trials( AppendFieldTrials("WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/")); @@ -665,22 +660,24 @@ TEST(PCFullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) { config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_sps_pps_idr", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kH264CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCifPlr5H264Flexfec) { +TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -688,15 +685,17 @@ TEST(PCFullStackTest, ForemanCifPlr5H264Flexfec) { config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_flexfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kH264CodecName)}; run_params.use_flex_fec = true; run_params.use_ulp_fec = false; @@ -705,7 +704,7 @@ TEST(PCFullStackTest, ForemanCifPlr5H264Flexfec) { // Ulpfec with H264 is an unsupported combination, so this test is only useful // for debugging. It is therefore disabled by default. -TEST(PCFullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { +TEST(PCFullStackTest, DISABLED_Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -713,15 +712,17 @@ TEST(PCFullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_delay_50_0_plr_5_H264_ulpfec", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kH264CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = true; @@ -729,7 +730,7 @@ TEST(PCFullStackTest, DISABLED_ForemanCifPlr5H264Ulpfec) { } #endif // defined(WEBRTC_USE_H264) -TEST(PCFullStackTest, ForemanCif500kbps) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -737,23 +738,24 @@ TEST(PCFullStackTest, ForemanCif500kbps) { config.queue_delay_ms = 0; config.link_capacity_kbps = 500; auto fixture = CreateTestFixture( - "pc_foreman_cif_500kbps", + "pc_foreman_cif_500kbps", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbpsLimitedQueue) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -762,22 +764,24 @@ TEST(PCFullStackTest, ForemanCif500kbpsLimitedQueue) { config.link_capacity_kbps = 500; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_32pkts_queue", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ForemanCif500kbps100ms) { +TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -786,22 +790,25 @@ TEST(PCFullStackTest, ForemanCif500kbps100ms) { config.link_capacity_kbps = 500; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_100ms", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST_P(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { +TEST(PCGenericDescriptorTest, + Pc_Foreman_Cif_500kbps_100ms_32pkts_Queue_Generic_Descriptor) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -809,16 +816,18 @@ TEST_P(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) { config.queue_delay_ms = 100; config.link_capacity_kbps = 500; auto fixture = CreateTestFixture( - GetTestName("pc_foreman_cif_500kbps_100ms_32pkts_queue"), + "pc_foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -846,7 +855,7 @@ TEST(PCFullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { } */ -TEST(PCFullStackTest, ForemanCif1000kbps100msLimitedQueue) { +TEST(PCFullStackTest, Pc_Foreman_Cif_1000kbps_100ms_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -855,15 +864,17 @@ TEST(PCFullStackTest, ForemanCif1000kbps100msLimitedQueue) { config.link_capacity_kbps = 1000; auto fixture = CreateTestFixture( "pc_foreman_cif_1000kbps_100ms_32pkts_queue", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(352, 288, 30); - video.input_file_name = ClipNameToClipPath("foreman_cif"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("foreman_cif")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -871,7 +882,7 @@ TEST(PCFullStackTest, ForemanCif1000kbps100msLimitedQueue) { } // TODO(sprang): Remove this if we have the similar ModerateLimits below? -TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { +TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -880,45 +891,17 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueue) { config.link_capacity_kbps = 2000; auto fixture = CreateTestFixture( "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 50); - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - -// TODO(webrtc:9722): Remove when experiment is cleaned up. -TEST(PCFullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) { - test::ScopedFieldTrials override_field_trials( - AppendFieldTrials(kVp8TrustedRateControllerFieldTrial)); - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - BuiltInNetworkBehaviorConfig config; - config.queue_length_packets = 50; - config.loss_percent = 3; - config.queue_delay_ms = 100; - config.link_capacity_kbps = 2000; - auto fixture = CreateTestFixture( - "pc_conference_motion_hd_1tl_moderate_limits_trusted_rate_ctrl", - CreateTwoNetworkLinks(network_emulation_manager.get(), config), - [](PeerConfigurer* alice) { - VideoConfig video(1280, 720, 50); - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); - video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -927,7 +910,7 @@ TEST(PCFullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) { /* // TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST_P(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { +TEST(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging conf_motion_hd; conf_motion_hd.call.send_side_bwe = true; @@ -941,7 +924,7 @@ TEST_P(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { false, false, false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; conf_motion_hd.analyzer = { - GetTestName("conference_motion_hd_2tl_moderate_limits"), 0.0, 0.0, + "conference_motion_hd_2tl_moderate_limits_generic_descriptor", 0.0, 0.0, kTestDurationSec}; conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; @@ -1052,7 +1035,7 @@ TEST(PCFullStackTest, */ #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { +TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -1061,16 +1044,17 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { config.link_capacity_kbps = 2000; auto fixture = CreateTestFixture( "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue_vp9", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 50); - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; @@ -1080,42 +1064,47 @@ TEST(PCFullStackTest, ConferenceMotionHd2000kbps100msLimitedQueueVP9) { } #endif -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_NoConferenceMode) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_No_Conference_Mode) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_screenshare_slides_no_conference_mode", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 5); - video.screen_share_config = ScreenShareConfig(TimeDelta::seconds(10)); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + video.content_hint = VideoTrackInterface::ContentHint::kText; + auto frame_generator = CreateScreenShareFrameGenerator( + video, ScreenShareConfig(TimeDelta::Seconds(10))); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL) { +TEST(PCFullStackTest, Pc_Screenshare_Slides) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - "pc_screenshare_slides", + "pc_screenshare_slides", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 5); - video.screen_share_config = ScreenShareConfig(TimeDelta::seconds(10)); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + video.content_hint = VideoTrackInterface::ContentHint::kText; + auto frame_generator = CreateScreenShareFrameGenerator( + video, ScreenShareConfig(TimeDelta::Seconds(10))); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -1125,46 +1114,52 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL) { // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. #if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_NoConferenceMode) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast_No_Conference_Mode) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_screenshare_slides_simulcast_no_conference_mode", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 30); - video.screen_share_config = ScreenShareConfig(TimeDelta::seconds(10)); video.simulcast_config = VideoSimulcastConfig(2, 1); video.temporal_layers_count = 2; video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + video.content_hint = VideoTrackInterface::ContentHint::kText; + auto frame_generator = CreateScreenShareFrameGenerator( + video, ScreenShareConfig(TimeDelta::Seconds(10))); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_screenshare_slides_simulcast", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 30); - video.screen_share_config = ScreenShareConfig(TimeDelta::seconds(10)); video.simulcast_config = VideoSimulcastConfig(2, 1); video.temporal_layers_count = 2; video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + video.content_hint = VideoTrackInterface::ContentHint::kText; + auto frame_generator = CreateScreenShareFrameGenerator( + video, ScreenShareConfig(TimeDelta::Seconds(10))); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -1177,40 +1172,6 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { #if !defined(WEBRTC_MAC) // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. #if !defined(WEBRTC_WIN) -const char kScreenshareSimulcastVariableFramerateExperiment[] = - "WebRTC-VP8VariableFramerateScreenshare/" - "Enabled,min_fps:5.0,min_qp:15,undershoot:30/"; -// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_Variable_Framerate) { - test::ScopedFieldTrials field_trial( - AppendFieldTrials(kScreenshareSimulcastVariableFramerateExperiment)); - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging screenshare; - screenshare.call.send_side_bwe = true; - screenshare.screenshare[0] = {true, false, 10}; - screenshare.video[0] = {true, 1850, 1110, 30, 800000, 2500000, - 2500000, false, "VP8", 2, 1, 400000, - false, false, false, ""}; - screenshare.analyzer = {"screenshare_slides_simulcast_variable_framerate", - 0.0, 0.0, kTestDurationSec}; - ParamsWithLogging screenshare_params_high; - screenshare_params_high.video[0] = { - true, 1850, 1110, 60, 600000, 1250000, 1250000, false, - "VP8", 2, 0, 400000, false, false, false, ""}; - VideoQualityTest::Params screenshare_params_low; - screenshare_params_low.video[0] = {true, 1850, 1110, 5, 30000, 200000, - 1000000, false, "VP8", 2, 0, 400000, - false, false, false, ""}; - - std::vector streams = { - VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0), - VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)}; - screenshare.ss[0] = { - streams, 1, 1, 0, InterLayerPredMode::kOn, std::vector(), - false}; - fixture->RunWithAnalyzer(screenshare); -} - // TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_low) { auto fixture = CreateVideoQualityTestFixture(); @@ -1258,7 +1219,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Scroll) { } // TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST_P(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { +TEST(PCGenericDescriptorTest, Screenshare_Slides_Lossy_Net_Generic_Descriptor) { auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging screenshare; screenshare.call.send_side_bwe = true; @@ -1266,12 +1227,12 @@ TEST_P(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) { 1000000, false, "VP8", 2, 1, 400000, false, false, false, ""}; screenshare.screenshare[0] = {true, false, 10}; - screenshare.analyzer = {GetTestName("screenshare_slides_lossy_net"), 0.0, 0.0, - kTestDurationSec}; + screenshare.analyzer = {"screenshare_slides_lossy_net_generic_descriptor", + 0.0, 0.0, kTestDurationSec}; screenshare.config->loss_percent = 5; screenshare.config->queue_delay_ms = 200; screenshare.config->link_capacity_kbps = 500; - screenshare.call.generic_descriptor = GenericDescriptorEnabled(); + screenshare.call.generic_descriptor = true; fixture->RunWithAnalyzer(screenshare); } @@ -1384,7 +1345,7 @@ ParamsWithLogging::Video SimulcastVp8VideoLow() { #if defined(RTC_ENABLE_VP9) -TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { +TEST(PCFullStackTest, Pc_Screenshare_Slides_Vp9_3sl_High_Fps) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9InterLayerPred/" "Enabled,inter_layer_pred_mode:on/")); @@ -1392,17 +1353,20 @@ TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( "pc_screenshare_slides_vp9_3sl_high_fps", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1850, 1110, 30); video.stream_label = "alice-video"; - video.screen_share_config = ScreenShareConfig(TimeDelta::seconds(10)); video.simulcast_config = VideoSimulcastConfig(3, 2); - alice->AddVideoConfig(std::move(video)); + video.content_hint = VideoTrackInterface::ContentHint::kText; + auto frame_generator = CreateScreenShareFrameGenerator( + video, ScreenShareConfig(TimeDelta::Seconds(10))); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; @@ -1411,86 +1375,27 @@ TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_High_Fps) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, ScreenshareSlidesVP9_3SL_Variable_Fps) { - webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-VP9VariableFramerateScreenshare/" - "Enabled,min_qp:32,min_fps:5.0,undershoot:30,frames_" - "before_steady_state:5/" - "WebRTC-Vp9InterLayerPred/" - "Enabled,inter_layer_pred_mode:on/")); - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - auto fixture = CreateTestFixture( - "pc_screenshare_slides_vp9_3sl_variable_fps", - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), - [](PeerConfigurer* alice) { - VideoConfig video(1850, 1110, 30); - video.stream_label = "alice-video"; - video.screen_share_config = ScreenShareConfig(TimeDelta::seconds(10)); - video.simulcast_config = VideoSimulcastConfig(3, 2); - alice->AddVideoConfig(std::move(video)); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ - {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - -TEST(PCFullStackTest, VP9SVC_3SL_High) { +TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9InterLayerPred/" "Enabled,inter_layer_pred_mode:on/")); std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - "pc_vp9svc_3sl_high", + "pc_vp9svc_3sl_high", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); video.stream_label = "alice-video"; - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); video.simulcast_config = VideoSimulcastConfig(3, 2); video.temporal_layers_count = 3; - alice->AddVideoConfig(std::move(video)); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ - {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - -TEST(PCFullStackTest, VP9SVC_3SL_Medium) { - webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9InterLayerPred/" - "Enabled,inter_layer_pred_mode:on/")); - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - auto fixture = CreateTestFixture( - "pc_vp9svc_3sl_medium", - CreateTwoNetworkLinks(network_emulation_manager.get(), - BuiltInNetworkBehaviorConfig()), - [](PeerConfigurer* alice) { - VideoConfig video(1280, 720, 30); - video.stream_label = "alice-video"; - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); - video.simulcast_config = VideoSimulcastConfig(3, 1); - video.temporal_layers_count = 3; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; @@ -1499,27 +1404,27 @@ TEST(PCFullStackTest, VP9SVC_3SL_Medium) { fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, VP9SVC_3SL_Low) { +TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { webrtc::test::ScopedFieldTrials override_trials( AppendFieldTrials("WebRTC-Vp9InterLayerPred/" "Enabled,inter_layer_pred_mode:on/")); std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - "pc_vp9svc_3sl_low", + "pc_vp9svc_3sl_low", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); video.stream_label = "alice-video"; - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); video.simulcast_config = VideoSimulcastConfig(3, 0); video.temporal_layers_count = 3; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig( /*name=*/cricket::kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}; @@ -1601,8 +1506,7 @@ TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { // TODO(webrtc:9722): Remove when experiment is cleaned up. TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" - "WebRTC-LibvpxVp9TrustedRateController/Enabled/")); + AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; simulcast.call.send_side_bwe = true; @@ -1624,11 +1528,11 @@ TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { // Android bots can't handle FullHD, so disable the test. // TODO(bugs.webrtc.org/9220): Investigate source of flakiness on Mac. #if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC) -#define MAYBE_SimulcastFullHdOveruse DISABLED_SimulcastFullHdOveruse +#define MAYBE_Pc_Simulcast_HD_High DISABLED_Pc_Simulcast_HD_High #else -#define MAYBE_SimulcastFullHdOveruse SimulcastFullHdOveruse +#define MAYBE_Pc_Simulcast_HD_High Pc_Simulcast_HD_High #endif -TEST(PCFullStackTest, MAYBE_SimulcastFullHdOveruse) { +TEST(PCFullStackTest, MAYBE_Pc_Simulcast_HD_High) { webrtc::test::ScopedFieldTrials override_trials(AppendFieldTrials( "WebRTC-ForceSimulatedOveruseIntervalMs/1000-50000-300/")); std::unique_ptr network_emulation_manager = @@ -1637,25 +1541,24 @@ TEST(PCFullStackTest, MAYBE_SimulcastFullHdOveruse) { config.loss_percent = 0; config.queue_delay_ms = 100; auto fixture = CreateTestFixture( - "pc_simulcast_HD_high", + "pc_simulcast_HD_high", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(1920, 1080, 30); - video.generator = VideoGeneratorType::kDefault; video.simulcast_config = VideoSimulcastConfig(3, 2); video.temporal_layers_count = 3; video.stream_label = "alice-video"; alice->AddVideoConfig(std::move(video)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, SimulcastVP8_3SL_High) { +TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_High) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; @@ -1663,67 +1566,43 @@ TEST(PCFullStackTest, SimulcastVP8_3SL_High) { config.queue_delay_ms = 100; auto fixture = CreateTestFixture( "pc_simulcast_vp8_3sl_high", + *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); video.simulcast_config = VideoSimulcastConfig(3, 2); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); - }, - [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); - run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; - run_params.use_flex_fec = false; - run_params.use_ulp_fec = false; - fixture->Run(std::move(run_params)); -} - -TEST(PCFullStackTest, SimulcastVP8_3SL_Medium) { - std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(); - BuiltInNetworkBehaviorConfig config; - config.loss_percent = 0; - config.queue_delay_ms = 100; - auto fixture = CreateTestFixture( - "pc_simulcast_vp8_3sl_medium", - CreateTwoNetworkLinks(network_emulation_manager.get(), config), - [](PeerConfigurer* alice) { - VideoConfig video(1280, 720, 30); - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); - video.simulcast_config = VideoSimulcastConfig(3, 1); - video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } -TEST(PCFullStackTest, SimulcastVP8_3SL_Low) { +TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_Low) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 0; config.queue_delay_ms = 100; auto fixture = CreateTestFixture( - "pc_simulcast_vp8_3sl_low", + "pc_simulcast_vp8_3sl_low", *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { VideoConfig video(1280, 720, 30); - video.input_file_name = - ClipNameToClipPath("ConferenceMotion_1280_720_50"); video.simulcast_config = VideoSimulcastConfig(3, 0); video.stream_label = "alice-video"; - alice->AddVideoConfig(std::move(video)); + auto frame_generator = CreateFromYuvFileFrameGenerator( + video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); + alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - RunParams run_params(TimeDelta::seconds(kTestDurationSec)); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); run_params.video_codecs = {VideoCodecConfig(cricket::kVp8CodecName)}; run_params.use_flex_fec = false; run_params.use_ulp_fec = false; @@ -1859,12 +1738,6 @@ TEST(PCFullStackTest, MAYBE_LargeRoomVP8_50thumb) { } */ -INSTANTIATE_TEST_SUITE_P( - PCFullStackTest, - PCGenericDescriptorTest, - ::testing::Values("WebRTC-GenericDescriptor/Disabled/", - "WebRTC-GenericDescriptor/Enabled/")); - class PCDualStreamsTest : public ::testing::TestWithParam {}; /* diff --git a/video/picture_id_tests.cc b/video/picture_id_tests.cc index 19c1141b0a..298919c096 100644 --- a/video/picture_id_tests.cc +++ b/video/picture_id_tests.cc @@ -22,6 +22,7 @@ #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" @@ -49,12 +50,12 @@ class PictureIdObserver : public test::RtpRtcpObserver { num_ssrcs_to_observe_(1) {} void SetExpectedSsrcs(size_t num_expected_ssrcs) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); num_ssrcs_to_observe_ = num_expected_ssrcs; } void ResetObservedSsrcs() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Do not clear the timestamp and picture_id, to ensure that we check // consistency between reinits and recreations. num_packets_sent_.clear(); @@ -62,7 +63,7 @@ class PictureIdObserver : public test::RtpRtcpObserver { } void SetMaxExpectedPictureIdGap(int max_expected_picture_id_gap) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); max_expected_picture_id_gap_ = max_expected_picture_id_gap; // Expect smaller gap for |tl0_pic_idx| (running index for temporal_idx 0). max_expected_tl0_idx_gap_ = max_expected_picture_id_gap_ / 2; @@ -120,7 +121,7 @@ class PictureIdObserver : public test::RtpRtcpObserver { // Verify continuity and monotonicity of picture_id sequence. void VerifyPictureId(const ParsedPacket& current, const ParsedPacket& last) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_) { + RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_) { if (current.timestamp == last.timestamp) { EXPECT_EQ(last.picture_id, current.picture_id); return; // Same frame. @@ -143,7 +144,7 @@ class PictureIdObserver : public test::RtpRtcpObserver { } void VerifyTl0Idx(const ParsedPacket& current, const ParsedPacket& last) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(&crit_) { + RTC_EXCLUSIVE_LOCKS_REQUIRED(&mutex_) { if (current.tl0_pic_idx == kNoTl0PicIdx || current.temporal_idx == kNoTemporalIdx) { return; // No temporal layers. @@ -169,7 +170,7 @@ class PictureIdObserver : public test::RtpRtcpObserver { } Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ParsedPacket parsed; if (!ParsePayload(packet, length, &parsed)) @@ -196,14 +197,14 @@ class PictureIdObserver : public test::RtpRtcpObserver { return SEND_PACKET; } - rtc::CriticalSection crit_; + Mutex mutex_; const std::unique_ptr depacketizer_; - std::map last_observed_packet_ RTC_GUARDED_BY(crit_); - std::map num_packets_sent_ RTC_GUARDED_BY(crit_); - int max_expected_picture_id_gap_ RTC_GUARDED_BY(crit_); - int max_expected_tl0_idx_gap_ RTC_GUARDED_BY(crit_); - size_t num_ssrcs_to_observe_ RTC_GUARDED_BY(crit_); - std::set observed_ssrcs_ RTC_GUARDED_BY(crit_); + std::map last_observed_packet_ RTC_GUARDED_BY(mutex_); + std::map num_packets_sent_ RTC_GUARDED_BY(mutex_); + int max_expected_picture_id_gap_ RTC_GUARDED_BY(mutex_); + int max_expected_tl0_idx_gap_ RTC_GUARDED_BY(mutex_); + size_t num_ssrcs_to_observe_ RTC_GUARDED_BY(mutex_); + std::set observed_ssrcs_ RTC_GUARDED_BY(mutex_); }; class PictureIdTest : public test::CallTest, diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc index 19b9e8c36c..b72b25b86b 100644 --- a/video/quality_scaling_tests.cc +++ b/video/quality_scaling_tests.cc @@ -56,6 +56,7 @@ class QualityScalingTest : public test::CallTest { protected: void RunTest(VideoEncoderFactory* encoder_factory, const std::string& payload_name, + const std::vector& streams_active, int start_bps, bool automatic_resize, bool frame_dropping, @@ -67,6 +68,7 @@ class QualityScalingTest : public test::CallTest { void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, const std::string& payload_name, + const std::vector& streams_active, int start_bps, bool automatic_resize, bool frame_dropping, @@ -77,6 +79,7 @@ void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, public: ScalingObserver(VideoEncoderFactory* encoder_factory, const std::string& payload_name, + const std::vector& streams_active, int start_bps, bool automatic_resize, bool frame_dropping, @@ -84,6 +87,7 @@ void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, : SendTest(expect_adaptation ? kDefaultTimeoutMs : kTimeoutMs), encoder_factory_(encoder_factory), payload_name_(payload_name), + streams_active_(streams_active), start_bps_(start_bps), automatic_resize_(automatic_resize), frame_dropping_(frame_dropping), @@ -108,6 +112,10 @@ void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, bitrate_config->start_bitrate_bps = start_bps_; } + size_t GetNumVideoStreams() const override { + return streams_active_.size(); + } + void ModifyVideoConfigs( VideoSendStream::Config* send_config, std::vector* receive_configs, @@ -117,7 +125,15 @@ void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, send_config->rtp.payload_type = kVideoSendPayloadType; const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_); encoder_config->codec_type = codec_type; - encoder_config->max_bitrate_bps = start_bps_; + encoder_config->max_bitrate_bps = + std::max(start_bps_, encoder_config->max_bitrate_bps); + double scale_factor = 1.0; + for (int i = streams_active_.size() - 1; i >= 0; --i) { + VideoStream& stream = encoder_config->simulcast_layers[i]; + stream.active = streams_active_[i]; + stream.scale_resolution_down_by = scale_factor; + scale_factor *= 2.0; + } SetEncoderSpecific(encoder_config, codec_type, automatic_resize_, frame_dropping_); } @@ -129,12 +145,13 @@ void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, VideoEncoderFactory* const encoder_factory_; const std::string payload_name_; + const std::vector streams_active_; const int start_bps_; const bool automatic_resize_; const bool frame_dropping_; const bool expect_adaptation_; - } test(encoder_factory, payload_name, start_bps, automatic_resize, - frame_dropping, expect_adaptation); + } test(encoder_factory, payload_name, streams_active, start_bps, + automatic_resize, frame_dropping, expect_adaptation); RunBaseTest(&test); } @@ -150,7 +167,7 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP8", {true}, kHighStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } @@ -165,7 +182,7 @@ TEST_F(QualityScalingTest, NoAdaptDownForHighQpWithResizeOff_Vp8) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP8", {true}, kHighStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } @@ -182,7 +199,7 @@ TEST_F(QualityScalingTest, test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP8", {true}, kHighStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } @@ -197,7 +214,7 @@ TEST_F(QualityScalingTest, NoAdaptDownForNormalQp_Vp8) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP8", {true}, kHighStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } @@ -212,10 +229,57 @@ TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kLowStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP8", {true}, kLowStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } +TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_Simulcast) { + // VP8 QP thresholds, low:1, high:127 -> normal QP. + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + // QualityScaler disabled. + const bool kAutomaticResize = false; + const bool kFrameDropping = true; + const bool kExpectAdapt = false; + + test::FunctionVideoEncoderFactory encoder_factory( + []() { return VP8Encoder::Create(); }); + RunTest(&encoder_factory, "VP8", {true, true}, kLowStartBps, kAutomaticResize, + kFrameDropping, kExpectAdapt); +} + +TEST_F(QualityScalingTest, + AdaptsDownForLowStartBitrate_SimulcastOneActiveHighRes) { + // VP8 QP thresholds, low:1, high:127 -> normal QP. + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + // QualityScaler enabled. + const bool kAutomaticResize = true; + const bool kFrameDropping = true; + const bool kExpectAdapt = true; + + test::FunctionVideoEncoderFactory encoder_factory( + []() { return VP8Encoder::Create(); }); + RunTest(&encoder_factory, "VP8", {false, false, true}, kLowStartBps, + kAutomaticResize, kFrameDropping, kExpectAdapt); +} + +TEST_F(QualityScalingTest, + NoAdaptDownForLowStartBitrate_SimulcastOneActiveLowRes) { + // VP8 QP thresholds, low:1, high:127 -> normal QP. + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + // QualityScaler enabled. + const bool kAutomaticResize = true; + const bool kFrameDropping = true; + const bool kExpectAdapt = false; + + test::FunctionVideoEncoderFactory encoder_factory( + []() { return VP8Encoder::Create(); }); + RunTest(&encoder_factory, "VP8", {true, false, false}, kLowStartBps, + kAutomaticResize, kFrameDropping, kExpectAdapt); +} + TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateWithScalingOff) { // VP8 QP thresholds, low:1, high:127 -> normal QP. test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); @@ -227,13 +291,14 @@ TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateWithScalingOff) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kLowStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP8", {true}, kLowStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } TEST_F(QualityScalingTest, NoAdaptDownForHighQp_Vp9) { // VP9 QP thresholds, low:1, high:1 -> high QP. - test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd); + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Disabled/"); // QualityScaler always disabled. const bool kAutomaticResize = true; @@ -242,7 +307,7 @@ TEST_F(QualityScalingTest, NoAdaptDownForHighQp_Vp9) { test::FunctionVideoEncoderFactory encoder_factory( []() { return VP9Encoder::Create(); }); - RunTest(&encoder_factory, "VP9", kHighStartBps, kAutomaticResize, + RunTest(&encoder_factory, "VP9", {true}, kHighStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } @@ -258,7 +323,7 @@ TEST_F(QualityScalingTest, AdaptsDownForHighQp_H264) { test::FunctionVideoEncoderFactory encoder_factory( []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - RunTest(&encoder_factory, "H264", kHighStartBps, kAutomaticResize, + RunTest(&encoder_factory, "H264", {true}, kHighStartBps, kAutomaticResize, kFrameDropping, kExpectAdapt); } #endif // defined(WEBRTC_USE_H264) diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc index 82951c8a50..7aec685a1c 100644 --- a/video/receive_statistics_proxy.cc +++ b/video/receive_statistics_proxy.cc @@ -133,7 +133,7 @@ void ReceiveStatisticsProxy::UpdateHistograms( // earlier. RTC_DCHECK_RUN_ON(&decode_thread_); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); char log_stream_buf[8 * 1024]; rtc::SimpleStringBuilder log_stream(log_stream_buf); @@ -623,7 +623,7 @@ ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( } VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Get current frame rates here, as only updating them on new frames prevents // us from ever correctly displaying frame rate of 0. int64_t now_ms = clock_->TimeInMilliseconds(); @@ -654,13 +654,13 @@ VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { } void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); stats_.current_payload_type = payload_type; } void ReceiveStatisticsProxy::OnDecoderImplementationName( const char* implementation_name) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); stats_.decoder_implementation_name = implementation_name; } @@ -671,7 +671,7 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( int jitter_buffer_ms, int min_playout_delay_ms, int render_delay_ms) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); stats_.max_decode_ms = max_decode_ms; stats_.current_delay_ms = current_delay_ms; stats_.target_delay_ms = target_delay_ms; @@ -687,13 +687,13 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( } void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); num_unique_frames_.emplace(num_unique_frames); } void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( const TimingFrameInfo& info) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (info.flags != VideoSendTiming::kInvalid) { int64_t now_ms = clock_->TimeInMilliseconds(); timing_frame_info_counter_.Add(info, now_ms); @@ -714,14 +714,14 @@ void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (stats_.ssrc != ssrc) return; stats_.rtcp_packet_type_counts = packet_counter; } void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we // receive stats from one of them. if (stats_.ssrc != ssrc) @@ -733,7 +733,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, absl::optional qp, int32_t decode_time_ms, VideoContentType content_type) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); uint64_t now_ms = clock_->TimeInMilliseconds(); @@ -799,7 +799,7 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { RTC_DCHECK_GT(width, 0); RTC_DCHECK_GT(height, 0); int64_t now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); video_quality_observer_->OnRenderedFrame(frame, now_ms); @@ -833,7 +833,7 @@ void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) { void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, int64_t sync_offset_ms, double estimated_freq_khz) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); sync_offset_counter_.Add(std::abs(sync_offset_ms)); stats_.sync_offset_ms = sync_offset_ms; last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms; @@ -851,7 +851,7 @@ void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, size_t size_bytes, VideoContentType content_type) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (is_keyframe) { ++stats_.frame_counts.key_frames; } else { @@ -881,13 +881,13 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, } void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); stats_.frames_dropped += frames_dropped; } void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { RTC_DCHECK_RUN_ON(&decode_thread_); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); last_codec_type_ = codec_type; if (last_codec_type_ == kVideoCodecVP8 && qp != -1) { qp_counters_.vp8.Add(qp); @@ -898,7 +898,7 @@ void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { void ReceiveStatisticsProxy::OnStreamInactive() { // TODO(sprang): Figure out any other state that should be reset. - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Don't report inter-frame delay if stream was paused. last_decoded_frame_time_ms_.reset(); video_quality_observer_->OnStreamInactive(); @@ -906,7 +906,7 @@ void ReceiveStatisticsProxy::OnStreamInactive() { void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); avg_rtt_ms_ = avg_rtt_ms; } diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h index 02043d6944..8b94c32b69 100644 --- a/video/receive_statistics_proxy.h +++ b/video/receive_statistics_proxy.h @@ -20,12 +20,12 @@ #include "call/video_receive_stream.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "rtc_base/critical_section.h" #include "rtc_base/numerics/histogram_percentile_counter.h" #include "rtc_base/numerics/moving_max_counter.h" #include "rtc_base/numerics/sample_counter.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/rate_tracker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "video/quality_threshold.h" @@ -124,19 +124,19 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, rtc::HistogramPercentileCounter interframe_delay_percentiles; }; - void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Removes info about old frames and then updates the framerate. void UpdateFramerate(int64_t now_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateDecodeTimeHistograms(int width, int height, int decode_time_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( - int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; // Ownership of this object lies with the owner of the ReceiveStatisticsProxy @@ -150,52 +150,52 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, const int64_t start_ms_; const bool enable_decode_time_histograms_; - rtc::CriticalSection crit_; - int64_t last_sample_time_ RTC_GUARDED_BY(crit_); - QualityThreshold fps_threshold_ RTC_GUARDED_BY(crit_); - QualityThreshold qp_threshold_ RTC_GUARDED_BY(crit_); - QualityThreshold variance_threshold_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(crit_); - int num_bad_states_ RTC_GUARDED_BY(crit_); - int num_certain_states_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + int64_t last_sample_time_ RTC_GUARDED_BY(mutex_); + QualityThreshold fps_threshold_ RTC_GUARDED_BY(mutex_); + QualityThreshold qp_threshold_ RTC_GUARDED_BY(mutex_); + QualityThreshold variance_threshold_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(mutex_); + int num_bad_states_ RTC_GUARDED_BY(mutex_); + int num_certain_states_ RTC_GUARDED_BY(mutex_); // Note: The |stats_.rtp_stats| member is not used or populated by this class. - mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(crit_); - RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(crit_); - RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(crit_); - rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(crit_); - rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(crit_); - rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(crit_); + mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(mutex_); + RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(mutex_); + RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(mutex_); + rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(mutex_); + rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(mutex_); + rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(mutex_); std::unique_ptr video_quality_observer_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); mutable rtc::MovingMaxCounter interframe_delay_max_moving_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); std::map content_specific_stats_ - RTC_GUARDED_BY(crit_); - MaxCounter freq_offset_counter_ RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); + MaxCounter freq_offset_counter_ RTC_GUARDED_BY(mutex_); QpCounters qp_counters_ RTC_GUARDED_BY(decode_thread_); - int64_t avg_rtt_ms_ RTC_GUARDED_BY(crit_); - mutable std::map frame_window_ RTC_GUARDED_BY(&crit_); - VideoContentType last_content_type_ RTC_GUARDED_BY(&crit_); - VideoCodecType last_codec_type_ RTC_GUARDED_BY(&crit_); - absl::optional first_frame_received_time_ms_ RTC_GUARDED_BY(&crit_); - absl::optional first_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_); - absl::optional last_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_); - size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&crit_); - int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&crit_); + int64_t avg_rtt_ms_ RTC_GUARDED_BY(mutex_); + mutable std::map frame_window_ RTC_GUARDED_BY(&mutex_); + VideoContentType last_content_type_ RTC_GUARDED_BY(&mutex_); + VideoCodecType last_codec_type_ RTC_GUARDED_BY(&mutex_); + absl::optional first_frame_received_time_ms_ RTC_GUARDED_BY(&mutex_); + absl::optional first_decoded_frame_time_ms_ RTC_GUARDED_BY(&mutex_); + absl::optional last_decoded_frame_time_ms_ RTC_GUARDED_BY(&mutex_); + size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(&mutex_); + int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(&mutex_); // Mutable because calling Max() on MovingMaxCounter is not const. Yet it is // called from const GetStats(). mutable rtc::MovingMaxCounter timing_frame_info_counter_ - RTC_GUARDED_BY(&crit_); - absl::optional num_unique_frames_ RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(&mutex_); + absl::optional num_unique_frames_ RTC_GUARDED_BY(mutex_); absl::optional last_estimated_playout_ntp_timestamp_ms_ - RTC_GUARDED_BY(&crit_); + RTC_GUARDED_BY(&mutex_); absl::optional last_estimated_playout_time_ms_ - RTC_GUARDED_BY(&crit_); + RTC_GUARDED_BY(&mutex_); rtc::ThreadChecker decode_thread_; rtc::ThreadChecker network_thread_; rtc::ThreadChecker main_thread_; diff --git a/video/receive_statistics_proxy2.cc b/video/receive_statistics_proxy2.cc new file mode 100644 index 0000000000..3cce3c8ea4 --- /dev/null +++ b/video/receive_statistics_proxy2.cc @@ -0,0 +1,1070 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/receive_statistics_proxy2.h" + +#include +#include +#include + +#include "modules/video_coding/include/video_codec_interface.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" +#include "video/video_receive_stream2.h" + +namespace webrtc { +namespace internal { +namespace { +// Periodic time interval for processing samples for |freq_offset_counter_|. +const int64_t kFreqOffsetProcessIntervalMs = 40000; + +// Configuration for bad call detection. +const int kBadCallMinRequiredSamples = 10; +const int kMinSampleLengthMs = 990; +const int kNumMeasurements = 10; +const int kNumMeasurementsVariance = kNumMeasurements * 1.5; +const float kBadFraction = 0.8f; +// For fps: +// Low means low enough to be bad, high means high enough to be good +const int kLowFpsThreshold = 12; +const int kHighFpsThreshold = 14; +// For qp and fps variance: +// Low means low enough to be good, high means high enough to be bad +const int kLowQpThresholdVp8 = 60; +const int kHighQpThresholdVp8 = 70; +const int kLowVarianceThreshold = 1; +const int kHighVarianceThreshold = 2; + +// Some metrics are reported as a maximum over this period. +// This should be synchronized with a typical getStats polling interval in +// the clients. +const int kMovingMaxWindowMs = 1000; + +// How large window we use to calculate the framerate/bitrate. +const int kRateStatisticsWindowSizeMs = 1000; + +// Some sane ballpark estimate for maximum common value of inter-frame delay. +// Values below that will be stored explicitly in the array, +// values above - in the map. +const int kMaxCommonInterframeDelayMs = 500; + +const char* UmaPrefixForContentType(VideoContentType content_type) { + if (videocontenttypehelpers::IsScreenshare(content_type)) + return "WebRTC.Video.Screenshare"; + return "WebRTC.Video"; +} + +std::string UmaSuffixForContentType(VideoContentType content_type) { + char ss_buf[1024]; + rtc::SimpleStringBuilder ss(ss_buf); + int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type); + if (simulcast_id > 0) { + ss << ".S" << simulcast_id - 1; + } + int experiment_id = videocontenttypehelpers::GetExperimentId(content_type); + if (experiment_id > 0) { + ss << ".ExperimentGroup" << experiment_id - 1; + } + return ss.str(); +} + +// TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some +// rtc::Thread instances and/or implementations that don't register as the +// current task queue. +bool IsCurrentTaskQueueOrThread(TaskQueueBase* task_queue) { + if (task_queue->IsCurrent()) + return true; + + rtc::Thread* current_thread = rtc::ThreadManager::Instance()->CurrentThread(); + if (!current_thread) + return false; + + return static_cast(current_thread) == task_queue; +} + +} // namespace + +ReceiveStatisticsProxy::ReceiveStatisticsProxy( + const VideoReceiveStream::Config* config, + Clock* clock, + TaskQueueBase* worker_thread) + : clock_(clock), + start_ms_(clock->TimeInMilliseconds()), + enable_decode_time_histograms_( + !field_trial::IsEnabled("WebRTC-DecodeTimeHistogramsKillSwitch")), + last_sample_time_(clock->TimeInMilliseconds()), + fps_threshold_(kLowFpsThreshold, + kHighFpsThreshold, + kBadFraction, + kNumMeasurements), + qp_threshold_(kLowQpThresholdVp8, + kHighQpThresholdVp8, + kBadFraction, + kNumMeasurements), + variance_threshold_(kLowVarianceThreshold, + kHighVarianceThreshold, + kBadFraction, + kNumMeasurementsVariance), + num_bad_states_(0), + num_certain_states_(0), + remote_ssrc_(config->rtp.remote_ssrc), + // 1000ms window, scale 1000 for ms to s. + decode_fps_estimator_(1000, 1000), + renders_fps_estimator_(1000, 1000), + render_fps_tracker_(100, 10u), + render_pixel_tracker_(100, 10u), + video_quality_observer_(new VideoQualityObserver()), + interframe_delay_max_moving_(kMovingMaxWindowMs), + freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs), + last_content_type_(VideoContentType::UNSPECIFIED), + last_codec_type_(kVideoCodecVP8), + num_delayed_frames_rendered_(0), + sum_missed_render_deadline_ms_(0), + timing_frame_info_counter_(kMovingMaxWindowMs), + worker_thread_(worker_thread) { + RTC_DCHECK(worker_thread); + decode_queue_.Detach(); + incoming_render_queue_.Detach(); + stats_.ssrc = config->rtp.remote_ssrc; +} + +ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { + RTC_DCHECK_RUN_ON(&main_thread_); +} + +void ReceiveStatisticsProxy::UpdateHistograms( + absl::optional fraction_lost, + const StreamDataCounters& rtp_stats, + const StreamDataCounters* rtx_stats) { + RTC_DCHECK_RUN_ON(&main_thread_); + + char log_stream_buf[8 * 1024]; + rtc::SimpleStringBuilder log_stream(log_stream_buf); + + int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; + + if (stats_.frame_counts.key_frames > 0 || + stats_.frame_counts.delta_frames > 0) { + RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.ReceiveStreamLifetimeInSeconds", + stream_duration_sec); + log_stream << "WebRTC.Video.ReceiveStreamLifetimeInSeconds " + << stream_duration_sec << '\n'; + } + + log_stream << "Frames decoded " << stats_.frames_decoded << '\n'; + + if (num_unique_frames_) { + int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded; + RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver", + num_dropped_frames); + log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames + << '\n'; + } + + if (fraction_lost && stream_duration_sec >= metrics::kMinRunTimeInSeconds) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.ReceivedPacketsLostInPercent", + *fraction_lost); + log_stream << "WebRTC.Video.ReceivedPacketsLostInPercent " << *fraction_lost + << '\n'; + } + + if (first_decoded_frame_time_ms_) { + const int64_t elapsed_ms = + (clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_); + if (elapsed_ms >= + metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) { + int decoded_fps = static_cast( + (stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f); + RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond", + decoded_fps); + log_stream << "WebRTC.Video.DecodedFramesPerSecond " << decoded_fps + << '\n'; + + const uint32_t frames_rendered = stats_.frames_rendered; + if (frames_rendered > 0) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.DelayedFramesToRenderer", + static_cast(num_delayed_frames_rendered_ * + 100 / frames_rendered)); + if (num_delayed_frames_rendered_ > 0) { + RTC_HISTOGRAM_COUNTS_1000( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", + static_cast(sum_missed_render_deadline_ms_ / + num_delayed_frames_rendered_)); + } + } + } + } + + const int kMinRequiredSamples = 200; + int samples = static_cast(render_fps_tracker_.TotalSampleCount()); + if (samples >= kMinRequiredSamples) { + int rendered_fps = round(render_fps_tracker_.ComputeTotalRate()); + RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.RenderFramesPerSecond", + rendered_fps); + log_stream << "WebRTC.Video.RenderFramesPerSecond " << rendered_fps << '\n'; + RTC_HISTOGRAM_COUNTS_100000( + "WebRTC.Video.RenderSqrtPixelsPerSecond", + round(render_pixel_tracker_.ComputeTotalRate())); + } + + absl::optional sync_offset_ms = + sync_offset_counter_.Avg(kMinRequiredSamples); + if (sync_offset_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", + *sync_offset_ms); + log_stream << "WebRTC.Video.AVSyncOffsetInMs " << *sync_offset_ms << '\n'; + } + AggregatedStats freq_offset_stats = freq_offset_counter_.GetStats(); + if (freq_offset_stats.num_samples > 0) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.RtpToNtpFreqOffsetInKhz", + freq_offset_stats.average); + log_stream << "WebRTC.Video.RtpToNtpFreqOffsetInKhz " + << freq_offset_stats.ToString() << '\n'; + } + + int num_total_frames = + stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames; + if (num_total_frames >= kMinRequiredSamples) { + int num_key_frames = stats_.frame_counts.key_frames; + int key_frames_permille = + (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; + RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille", + key_frames_permille); + log_stream << "WebRTC.Video.KeyFramesReceivedInPermille " + << key_frames_permille << '\n'; + } + + absl::optional qp = qp_counters_.vp8.Avg(kMinRequiredSamples); + if (qp) { + RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp); + log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n'; + } + + absl::optional decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); + if (decode_ms) { + RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms); + log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n'; + } + absl::optional jb_delay_ms = + jitter_buffer_delay_counter_.Avg(kMinRequiredSamples); + if (jb_delay_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs", + *jb_delay_ms); + log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n'; + } + + absl::optional target_delay_ms = + target_delay_counter_.Avg(kMinRequiredSamples); + if (target_delay_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", + *target_delay_ms); + log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n'; + } + absl::optional current_delay_ms = + current_delay_counter_.Avg(kMinRequiredSamples); + if (current_delay_ms) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", + *current_delay_ms); + log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n'; + } + absl::optional delay_ms = delay_counter_.Avg(kMinRequiredSamples); + if (delay_ms) + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms); + + // Aggregate content_specific_stats_ by removing experiment or simulcast + // information; + std::map aggregated_stats; + for (const auto& it : content_specific_stats_) { + // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes). + VideoContentType content_type = it.first; + if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) { + // Aggregate on experiment id. + videocontenttypehelpers::SetExperimentId(&content_type, 0); + aggregated_stats[content_type].Add(it.second); + } + // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes). + content_type = it.first; + if (videocontenttypehelpers::GetExperimentId(content_type) > 0) { + // Aggregate on simulcast id. + videocontenttypehelpers::SetSimulcastId(&content_type, 0); + aggregated_stats[content_type].Add(it.second); + } + // Calculate aggregated metrics (no suffixes. Aggregated on everything). + content_type = it.first; + videocontenttypehelpers::SetSimulcastId(&content_type, 0); + videocontenttypehelpers::SetExperimentId(&content_type, 0); + aggregated_stats[content_type].Add(it.second); + } + + for (const auto& it : aggregated_stats) { + // For the metric Foo we report the following slices: + // WebRTC.Video.Foo, + // WebRTC.Video.Screenshare.Foo, + // WebRTC.Video.Foo.S[0-3], + // WebRTC.Video.Foo.ExperimentGroup[0-7], + // WebRTC.Video.Screenshare.Foo.S[0-3], + // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7]. + auto content_type = it.first; + auto stats = it.second; + std::string uma_prefix = UmaPrefixForContentType(content_type); + std::string uma_suffix = UmaSuffixForContentType(content_type); + // Metrics can be sliced on either simulcast id or experiment id but not + // both. + RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 || + videocontenttypehelpers::GetSimulcastId(content_type) == 0); + + absl::optional e2e_delay_ms = + stats.e2e_delay_counter.Avg(kMinRequiredSamples); + if (e2e_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms); + log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " + << *e2e_delay_ms << '\n'; + } + absl::optional e2e_delay_max_ms = stats.e2e_delay_counter.Max(); + if (e2e_delay_max_ms && e2e_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_100000( + uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms); + log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " + << *e2e_delay_max_ms << '\n'; + } + absl::optional interframe_delay_ms = + stats.interframe_delay_counter.Avg(kMinRequiredSamples); + if (interframe_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".InterframeDelayInMs" + uma_suffix, + *interframe_delay_ms); + log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " + << *interframe_delay_ms << '\n'; + } + absl::optional interframe_delay_max_ms = + stats.interframe_delay_counter.Max(); + if (interframe_delay_max_ms && interframe_delay_ms) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, + *interframe_delay_max_ms); + log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " " + << *interframe_delay_max_ms << '\n'; + } + + absl::optional interframe_delay_95p_ms = + stats.interframe_delay_percentiles.GetPercentile(0.95f); + if (interframe_delay_95p_ms && interframe_delay_ms != -1) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix, + *interframe_delay_95p_ms); + log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" + << uma_suffix << " " << *interframe_delay_95p_ms << '\n'; + } + + absl::optional width = stats.received_width.Avg(kMinRequiredSamples); + if (width) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width); + log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " " + << *width << '\n'; + } + + absl::optional height = stats.received_height.Avg(kMinRequiredSamples); + if (height) { + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height); + log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " " + << *height << '\n'; + } + + if (content_type != VideoContentType::UNSPECIFIED) { + // Don't report these 3 metrics unsliced, as more precise variants + // are reported separately in this method. + float flow_duration_sec = stats.flow_duration_ms / 1000.0; + if (flow_duration_sec >= metrics::kMinRunTimeInSeconds) { + int media_bitrate_kbps = static_cast(stats.total_media_bytes * 8 / + flow_duration_sec / 1000); + RTC_HISTOGRAM_COUNTS_SPARSE_10000( + uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, + media_bitrate_kbps); + log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix + << " " << media_bitrate_kbps << '\n'; + } + + int num_total_frames = + stats.frame_counts.key_frames + stats.frame_counts.delta_frames; + if (num_total_frames >= kMinRequiredSamples) { + int num_key_frames = stats.frame_counts.key_frames; + int key_frames_permille = + (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; + RTC_HISTOGRAM_COUNTS_SPARSE_1000( + uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix, + key_frames_permille); + log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix + << " " << key_frames_permille << '\n'; + } + + absl::optional qp = stats.qp_counter.Avg(kMinRequiredSamples); + if (qp) { + RTC_HISTOGRAM_COUNTS_SPARSE_200( + uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp); + log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " + << *qp << '\n'; + } + } + } + + StreamDataCounters rtp_rtx_stats = rtp_stats; + if (rtx_stats) + rtp_rtx_stats.Add(*rtx_stats); + + int64_t elapsed_sec = + rtp_rtx_stats.TimeSinceFirstPacketInMs(clock_->TimeInMilliseconds()) / + 1000; + if (elapsed_sec >= metrics::kMinRunTimeInSeconds) { + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.BitrateReceivedInKbps", + static_cast(rtp_rtx_stats.transmitted.TotalBytes() * 8 / + elapsed_sec / 1000)); + int media_bitrate_kbs = static_cast(rtp_stats.MediaPayloadBytes() * 8 / + elapsed_sec / 1000); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.MediaBitrateReceivedInKbps", + media_bitrate_kbs); + log_stream << "WebRTC.Video.MediaBitrateReceivedInKbps " + << media_bitrate_kbs << '\n'; + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.PaddingBitrateReceivedInKbps", + static_cast(rtp_rtx_stats.transmitted.padding_bytes * 8 / + elapsed_sec / 1000)); + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.RetransmittedBitrateReceivedInKbps", + static_cast(rtp_rtx_stats.retransmitted.TotalBytes() * 8 / + elapsed_sec / 1000)); + if (rtx_stats) { + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.RtxBitrateReceivedInKbps", + static_cast(rtx_stats->transmitted.TotalBytes() * 8 / + elapsed_sec / 1000)); + } + const RtcpPacketTypeCounter& counters = stats_.rtcp_packet_type_counts; + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.NackPacketsSentPerMinute", + counters.nack_packets * 60 / elapsed_sec); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.FirPacketsSentPerMinute", + counters.fir_packets * 60 / elapsed_sec); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.PliPacketsSentPerMinute", + counters.pli_packets * 60 / elapsed_sec); + if (counters.nack_requests > 0) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.UniqueNackRequestsSentInPercent", + counters.UniqueNackRequestsInPercent()); + } + } + + if (num_certain_states_ >= kBadCallMinRequiredSamples) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Any", + 100 * num_bad_states_ / num_certain_states_); + } + absl::optional fps_fraction = + fps_threshold_.FractionHigh(kBadCallMinRequiredSamples); + if (fps_fraction) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRate", + static_cast(100 * (1 - *fps_fraction))); + } + absl::optional variance_fraction = + variance_threshold_.FractionHigh(kBadCallMinRequiredSamples); + if (variance_fraction) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.FrameRateVariance", + static_cast(100 * *variance_fraction)); + } + absl::optional qp_fraction = + qp_threshold_.FractionHigh(kBadCallMinRequiredSamples); + if (qp_fraction) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.BadCall.Qp", + static_cast(100 * *qp_fraction)); + } + + RTC_LOG(LS_INFO) << log_stream.str(); + video_quality_observer_->UpdateHistograms( + videocontenttypehelpers::IsScreenshare(last_content_type_)); +} + +void ReceiveStatisticsProxy::QualitySample(Timestamp now) { + RTC_DCHECK_RUN_ON(&main_thread_); + + if (last_sample_time_ + kMinSampleLengthMs > now.ms()) + return; + + double fps = + render_fps_tracker_.ComputeRateForInterval(now.ms() - last_sample_time_); + absl::optional qp = qp_sample_.Avg(1); + + bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true); + bool prev_qp_bad = qp_threshold_.IsHigh().value_or(false); + bool prev_variance_bad = variance_threshold_.IsHigh().value_or(false); + bool prev_any_bad = prev_fps_bad || prev_qp_bad || prev_variance_bad; + + fps_threshold_.AddMeasurement(static_cast(fps)); + if (qp) + qp_threshold_.AddMeasurement(*qp); + absl::optional fps_variance_opt = fps_threshold_.CalculateVariance(); + double fps_variance = fps_variance_opt.value_or(0); + if (fps_variance_opt) { + variance_threshold_.AddMeasurement(static_cast(fps_variance)); + } + + bool fps_bad = !fps_threshold_.IsHigh().value_or(true); + bool qp_bad = qp_threshold_.IsHigh().value_or(false); + bool variance_bad = variance_threshold_.IsHigh().value_or(false); + bool any_bad = fps_bad || qp_bad || variance_bad; + + if (!prev_any_bad && any_bad) { + RTC_LOG(LS_INFO) << "Bad call (any) start: " << now.ms(); + } else if (prev_any_bad && !any_bad) { + RTC_LOG(LS_INFO) << "Bad call (any) end: " << now.ms(); + } + + if (!prev_fps_bad && fps_bad) { + RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now.ms(); + } else if (prev_fps_bad && !fps_bad) { + RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now.ms(); + } + + if (!prev_qp_bad && qp_bad) { + RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now.ms(); + } else if (prev_qp_bad && !qp_bad) { + RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now.ms(); + } + + if (!prev_variance_bad && variance_bad) { + RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now.ms(); + } else if (prev_variance_bad && !variance_bad) { + RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now.ms(); + } + + RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: " + << (now.ms() - last_sample_time_) << " fps: " << fps + << " fps_bad: " << fps_bad << " qp: " << qp.value_or(-1) + << " qp_bad: " << qp_bad + << " variance_bad: " << variance_bad + << " fps_variance: " << fps_variance; + + last_sample_time_ = now.ms(); + qp_sample_.Reset(); + + if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() || + qp_threshold_.IsHigh()) { + if (any_bad) + ++num_bad_states_; + ++num_certain_states_; + } +} + +void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const { + RTC_DCHECK_RUN_ON(&main_thread_); + + int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs; + while (!frame_window_.empty() && + frame_window_.begin()->first < old_frames_ms) { + frame_window_.erase(frame_window_.begin()); + } + + size_t framerate = + (frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs; + + stats_.network_frame_rate = static_cast(framerate); +} + +void ReceiveStatisticsProxy::UpdateDecodeTimeHistograms( + int width, + int height, + int decode_time_ms) const { + RTC_DCHECK_RUN_ON(&main_thread_); + + bool is_4k = (width == 3840 || width == 4096) && height == 2160; + bool is_hd = width == 1920 && height == 1080; + // Only update histograms for 4k/HD and VP9/H264. + if ((is_4k || is_hd) && (last_codec_type_ == kVideoCodecVP9 || + last_codec_type_ == kVideoCodecH264)) { + const std::string kDecodeTimeUmaPrefix = + "WebRTC.Video.DecodeTimePerFrameInMs."; + + // Each histogram needs its own line for it to not be reused in the wrong + // way when the format changes. + if (last_codec_type_ == kVideoCodecVP9) { + bool is_sw_decoder = + stats_.decoder_implementation_name.compare(0, 6, "libvpx") == 0; + if (is_4k) { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.4k.Hw", + decode_time_ms); + } else { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "Vp9.Hd.Hw", + decode_time_ms); + } + } else { + bool is_sw_decoder = + stats_.decoder_implementation_name.compare(0, 6, "FFmpeg") == 0; + if (is_4k) { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.4k.Hw", + decode_time_ms); + + } else { + if (is_sw_decoder) + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Sw", + decode_time_ms); + else + RTC_HISTOGRAM_COUNTS_1000(kDecodeTimeUmaPrefix + "H264.Hd.Hw", + decode_time_ms); + } + } + } +} + +absl::optional +ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( + int64_t now_ms) const { + RTC_DCHECK_RUN_ON(&main_thread_); + if (!last_estimated_playout_ntp_timestamp_ms_ || + !last_estimated_playout_time_ms_) { + return absl::nullopt; + } + int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_; + return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms; +} + +VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const { + RTC_DCHECK_RUN_ON(&main_thread_); + + // Like VideoReceiveStream::GetStats, called on the worker thread from + // StatsCollector::ExtractMediaInfo via worker_thread()->Invoke(). + // WebRtcVideoChannel::GetStats(), GetVideoReceiverInfo. + + // Get current frame rates here, as only updating them on new frames prevents + // us from ever correctly displaying frame rate of 0. + int64_t now_ms = clock_->TimeInMilliseconds(); + UpdateFramerate(now_ms); + + stats_.render_frame_rate = renders_fps_estimator_.Rate(now_ms).value_or(0); + stats_.decode_frame_rate = decode_fps_estimator_.Rate(now_ms).value_or(0); + + if (last_decoded_frame_time_ms_) { + // Avoid using a newer timestamp than might be pending for decoded frames. + // If we do use now_ms, we might roll the max window to a value that is + // higher than that of a decoded frame timestamp that we haven't yet + // captured the data for (i.e. pending call to OnDecodedFrame). + stats_.interframe_delay_max_ms = + interframe_delay_max_moving_.Max(*last_decoded_frame_time_ms_) + .value_or(-1); + } else { + // We're paused. Avoid changing the state of |interframe_delay_max_moving_|. + stats_.interframe_delay_max_ms = -1; + } + + stats_.freeze_count = video_quality_observer_->NumFreezes(); + stats_.pause_count = video_quality_observer_->NumPauses(); + stats_.total_freezes_duration_ms = + video_quality_observer_->TotalFreezesDurationMs(); + stats_.total_pauses_duration_ms = + video_quality_observer_->TotalPausesDurationMs(); + stats_.total_frames_duration_ms = + video_quality_observer_->TotalFramesDurationMs(); + stats_.sum_squared_frame_durations = + video_quality_observer_->SumSquaredFrameDurationsSec(); + stats_.content_type = last_content_type_; + stats_.timing_frame_info = timing_frame_info_counter_.Max(now_ms); + stats_.jitter_buffer_delay_seconds = + static_cast(current_delay_counter_.Sum(1).value_or(0)) / + rtc::kNumMillisecsPerSec; + stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples(); + stats_.estimated_playout_ntp_timestamp_ms = + GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms); + return stats_; +} + +void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) { + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [payload_type, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.current_payload_type = payload_type; + })); +} + +void ReceiveStatisticsProxy::OnDecoderImplementationName( + const char* implementation_name) { + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [name = std::string(implementation_name), this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.decoder_implementation_name = name; + })); +} + +void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( + int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) { + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, + [max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms, + min_playout_delay_ms, render_delay_ms, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.max_decode_ms = max_decode_ms; + stats_.current_delay_ms = current_delay_ms; + stats_.target_delay_ms = target_delay_ms; + stats_.jitter_buffer_ms = jitter_buffer_ms; + stats_.min_playout_delay_ms = min_playout_delay_ms; + stats_.render_delay_ms = render_delay_ms; + jitter_buffer_delay_counter_.Add(jitter_buffer_ms); + target_delay_counter_.Add(target_delay_ms); + current_delay_counter_.Add(current_delay_ms); + // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + + // render delay). + delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); + })); +} + +void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { + RTC_DCHECK_RUN_ON(&main_thread_); + num_unique_frames_.emplace(num_unique_frames); +} + +void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( + const TimingFrameInfo& info) { + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [info, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + if (info.flags != VideoSendTiming::kInvalid) { + int64_t now_ms = clock_->TimeInMilliseconds(); + timing_frame_info_counter_.Add(info, now_ms); + } + + // Measure initial decoding latency between the first frame arriving and + // the first frame being decoded. + if (!first_frame_received_time_ms_.has_value()) { + first_frame_received_time_ms_ = info.receive_finish_ms; + } + if (stats_.first_frame_received_to_decoded_ms == -1 && + first_decoded_frame_time_ms_) { + stats_.first_frame_received_to_decoded_ms = + *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; + } + })); +} + +void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( + uint32_t ssrc, + const RtcpPacketTypeCounter& packet_counter) { + if (ssrc != remote_ssrc_) + return; + + if (!IsCurrentTaskQueueOrThread(worker_thread_)) { + // RtpRtcpInterface::Configuration has a single + // RtcpPacketTypeCounterObserver and that same configuration may be used for + // both receiver and sender (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl). The + // RTCPSender implementation currently makes calls to this function on a + // process thread whereas the RTCPReceiver implementation calls back on the + // [main] worker thread. + // So until the sender implementation has been updated, we work around this + // here by posting the update to the expected thread. We make a by value + // copy of the |task_safety_| to handle the case if the queued task + // runs after the |ReceiveStatisticsProxy| has been deleted. In such a + // case the packet_counter update won't be recorded. + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [ssrc, packet_counter, this]() { + RtcpPacketTypesCounterUpdated(ssrc, packet_counter); + })); + return; + } + + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.rtcp_packet_type_counts = packet_counter; +} + +void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) { + RTC_DCHECK_RUN_ON(&main_thread_); + // TODO(pbos): Handle both local and remote ssrcs here and RTC_DCHECK that we + // receive stats from one of them. + if (remote_ssrc_ != ssrc) + return; + + stats_.c_name = std::string(cname); +} + +void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) { + // See VCMDecodedFrameCallback::Decoded for more info on what thread/queue we + // may be on. E.g. on iOS this gets called on + // "com.apple.coremedia.decompressionsession.clientcallback" + VideoFrameMetaData meta(frame, clock_->CurrentTime()); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [meta, qp, decode_time_ms, content_type, this]() { + OnDecodedFrame(meta, qp, decode_time_ms, content_type); + })); +} + +void ReceiveStatisticsProxy::OnDecodedFrame( + const VideoFrameMetaData& frame_meta, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) { + RTC_DCHECK_RUN_ON(&main_thread_); + + const bool is_screenshare = + videocontenttypehelpers::IsScreenshare(content_type); + const bool was_screenshare = + videocontenttypehelpers::IsScreenshare(last_content_type_); + + if (is_screenshare != was_screenshare) { + // Reset the quality observer if content type is switched. But first report + // stats for the previous part of the call. + video_quality_observer_->UpdateHistograms(was_screenshare); + video_quality_observer_.reset(new VideoQualityObserver()); + } + + video_quality_observer_->OnDecodedFrame(frame_meta.rtp_timestamp, qp, + last_codec_type_); + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[content_type]; + + ++stats_.frames_decoded; + if (qp) { + if (!stats_.qp_sum) { + if (stats_.frames_decoded != 1) { + RTC_LOG(LS_WARNING) + << "Frames decoded was not 1 when first qp value was received."; + } + stats_.qp_sum = 0; + } + *stats_.qp_sum += *qp; + content_specific_stats->qp_counter.Add(*qp); + } else if (stats_.qp_sum) { + RTC_LOG(LS_WARNING) + << "QP sum was already set and no QP was given for a frame."; + stats_.qp_sum.reset(); + } + decode_time_counter_.Add(decode_time_ms); + stats_.decode_ms = decode_time_ms; + stats_.total_decode_time_ms += decode_time_ms; + if (enable_decode_time_histograms_) { + UpdateDecodeTimeHistograms(frame_meta.width, frame_meta.height, + decode_time_ms); + } + + last_content_type_ = content_type; + decode_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms()); + + if (last_decoded_frame_time_ms_) { + int64_t interframe_delay_ms = + frame_meta.decode_timestamp.ms() - *last_decoded_frame_time_ms_; + RTC_DCHECK_GE(interframe_delay_ms, 0); + double interframe_delay = interframe_delay_ms / 1000.0; + stats_.total_inter_frame_delay += interframe_delay; + stats_.total_squared_inter_frame_delay += + interframe_delay * interframe_delay; + interframe_delay_max_moving_.Add(interframe_delay_ms, + frame_meta.decode_timestamp.ms()); + content_specific_stats->interframe_delay_counter.Add(interframe_delay_ms); + content_specific_stats->interframe_delay_percentiles.Add( + interframe_delay_ms); + content_specific_stats->flow_duration_ms += interframe_delay_ms; + } + if (stats_.frames_decoded == 1) { + first_decoded_frame_time_ms_.emplace(frame_meta.decode_timestamp.ms()); + } + last_decoded_frame_time_ms_.emplace(frame_meta.decode_timestamp.ms()); +} + +void ReceiveStatisticsProxy::OnRenderedFrame( + const VideoFrameMetaData& frame_meta) { + RTC_DCHECK_RUN_ON(&main_thread_); + // Called from VideoReceiveStream2::OnFrame. + + RTC_DCHECK_GT(frame_meta.width, 0); + RTC_DCHECK_GT(frame_meta.height, 0); + + video_quality_observer_->OnRenderedFrame(frame_meta); + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[last_content_type_]; + renders_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms()); + + ++stats_.frames_rendered; + stats_.width = frame_meta.width; + stats_.height = frame_meta.height; + + render_fps_tracker_.AddSamples(1); + render_pixel_tracker_.AddSamples(sqrt(frame_meta.width * frame_meta.height)); + content_specific_stats->received_width.Add(frame_meta.width); + content_specific_stats->received_height.Add(frame_meta.height); + + // Consider taking stats_.render_delay_ms into account. + const int64_t time_until_rendering_ms = + frame_meta.render_time_ms() - frame_meta.decode_timestamp.ms(); + if (time_until_rendering_ms < 0) { + sum_missed_render_deadline_ms_ += -time_until_rendering_ms; + ++num_delayed_frames_rendered_; + } + + if (frame_meta.ntp_time_ms > 0) { + int64_t delay_ms = + clock_->CurrentNtpInMilliseconds() - frame_meta.ntp_time_ms; + if (delay_ms >= 0) { + content_specific_stats->e2e_delay_counter.Add(delay_ms); + } + } + + QualitySample(frame_meta.decode_timestamp); +} + +void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, + int64_t sync_offset_ms, + double estimated_freq_khz) { + RTC_DCHECK_RUN_ON(&incoming_render_queue_); + int64_t now_ms = clock_->TimeInMilliseconds(); + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [video_playout_ntp_ms, sync_offset_ms, + estimated_freq_khz, now_ms, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + sync_offset_counter_.Add(std::abs(sync_offset_ms)); + stats_.sync_offset_ms = sync_offset_ms; + last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms; + last_estimated_playout_time_ms_ = now_ms; + + const double kMaxFreqKhz = 10000.0; + int offset_khz = kMaxFreqKhz; + // Should not be zero or negative. If so, report max. + if (estimated_freq_khz < kMaxFreqKhz && estimated_freq_khz > 0.0) + offset_khz = + static_cast(std::fabs(estimated_freq_khz - 90.0) + 0.5); + + freq_offset_counter_.Add(offset_khz); + })); +} + +void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, + size_t size_bytes, + VideoContentType content_type) { + RTC_DCHECK_RUN_ON(&main_thread_); + + if (is_keyframe) { + ++stats_.frame_counts.key_frames; + } else { + ++stats_.frame_counts.delta_frames; + } + + // Content type extension is set only for keyframes and should be propagated + // for all the following delta frames. Here we may receive frames out of order + // and miscategorise some delta frames near the layer switch. + // This may slightly offset calculated bitrate and keyframes permille metrics. + VideoContentType propagated_content_type = + is_keyframe ? content_type : last_content_type_; + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[propagated_content_type]; + + content_specific_stats->total_media_bytes += size_bytes; + if (is_keyframe) { + ++content_specific_stats->frame_counts.key_frames; + } else { + ++content_specific_stats->frame_counts.delta_frames; + } + + int64_t now_ms = clock_->TimeInMilliseconds(); + frame_window_.insert(std::make_pair(now_ms, size_bytes)); + UpdateFramerate(now_ms); +} + +void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { + // Can be called on either the decode queue or the worker thread + // See FrameBuffer2 for more details. + worker_thread_->PostTask(ToQueuedTask(task_safety_, [frames_dropped, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.frames_dropped += frames_dropped; + })); +} + +void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [codec_type, qp, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + last_codec_type_ = codec_type; + if (last_codec_type_ == kVideoCodecVP8 && qp != -1) { + qp_counters_.vp8.Add(qp); + qp_sample_.Add(qp); + } + })); +} + +void ReceiveStatisticsProxy::OnStreamInactive() { + RTC_DCHECK_RUN_ON(&main_thread_); + + // TODO(sprang): Figure out any other state that should be reset. + + // Don't report inter-frame delay if stream was paused. + last_decoded_frame_time_ms_.reset(); + + video_quality_observer_->OnStreamInactive(); +} + +void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms) { + RTC_DCHECK_RUN_ON(&main_thread_); + avg_rtt_ms_ = avg_rtt_ms; +} + +void ReceiveStatisticsProxy::DecoderThreadStarting() { + RTC_DCHECK_RUN_ON(&main_thread_); +} + +void ReceiveStatisticsProxy::DecoderThreadStopped() { + RTC_DCHECK_RUN_ON(&main_thread_); + decode_queue_.Detach(); +} + +ReceiveStatisticsProxy::ContentSpecificStats::ContentSpecificStats() + : interframe_delay_percentiles(kMaxCommonInterframeDelayMs) {} + +ReceiveStatisticsProxy::ContentSpecificStats::~ContentSpecificStats() = default; + +void ReceiveStatisticsProxy::ContentSpecificStats::Add( + const ContentSpecificStats& other) { + e2e_delay_counter.Add(other.e2e_delay_counter); + interframe_delay_counter.Add(other.interframe_delay_counter); + flow_duration_ms += other.flow_duration_ms; + total_media_bytes += other.total_media_bytes; + received_height.Add(other.received_height); + received_width.Add(other.received_width); + qp_counter.Add(other.qp_counter); + frame_counts.key_frames += other.frame_counts.key_frames; + frame_counts.delta_frames += other.frame_counts.delta_frames; + interframe_delay_percentiles.Add(other.interframe_delay_percentiles); +} + +} // namespace internal +} // namespace webrtc diff --git a/video/receive_statistics_proxy2.h b/video/receive_statistics_proxy2.h new file mode 100644 index 0000000000..1357c407ad --- /dev/null +++ b/video/receive_statistics_proxy2.h @@ -0,0 +1,223 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_RECEIVE_STATISTICS_PROXY2_H_ +#define VIDEO_RECEIVE_STATISTICS_PROXY2_H_ + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" +#include "call/video_receive_stream.h" +#include "modules/include/module_common_types.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "rtc_base/numerics/histogram_percentile_counter.h" +#include "rtc_base/numerics/moving_max_counter.h" +#include "rtc_base/numerics/sample_counter.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/rate_tracker.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/thread_checker.h" +#include "video/quality_threshold.h" +#include "video/stats_counter.h" +#include "video/video_quality_observer2.h" + +namespace webrtc { + +class Clock; +struct CodecSpecificInfo; + +namespace internal { +// Declared in video_receive_stream2.h. +struct VideoFrameMetaData; + +class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, + public RtcpCnameCallback, + public RtcpPacketTypeCounterObserver { + public: + ReceiveStatisticsProxy(const VideoReceiveStream::Config* config, + Clock* clock, + TaskQueueBase* worker_thread); + ~ReceiveStatisticsProxy() override; + + VideoReceiveStream::Stats GetStats() const; + + void OnDecodedFrame(const VideoFrame& frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type); + + // Called asyncronously on the worker thread as a result of a call to the + // above OnDecodedFrame method, which is called back on the thread where + // the actual decoding happens. + void OnDecodedFrame(const VideoFrameMetaData& frame_meta, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type); + + void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms, + int64_t sync_offset_ms, + double estimated_freq_khz); + void OnRenderedFrame(const VideoFrameMetaData& frame_meta); + void OnIncomingPayloadType(int payload_type); + void OnDecoderImplementationName(const char* implementation_name); + + void OnPreDecode(VideoCodecType codec_type, int qp); + + void OnUniqueFramesCounted(int num_unique_frames); + + // Indicates video stream has been paused (no incoming packets). + void OnStreamInactive(); + + // Overrides VCMReceiveStatisticsCallback. + void OnCompleteFrame(bool is_keyframe, + size_t size_bytes, + VideoContentType content_type) override; + void OnDroppedFrames(uint32_t frames_dropped) override; + void OnFrameBufferTimingsUpdated(int max_decode_ms, + int current_delay_ms, + int target_delay_ms, + int jitter_buffer_ms, + int min_playout_delay_ms, + int render_delay_ms) override; + + void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override; + + // Overrides RtcpCnameCallback. + void OnCname(uint32_t ssrc, absl::string_view cname) override; + + // Overrides RtcpPacketTypeCounterObserver. + void RtcpPacketTypesCounterUpdated( + uint32_t ssrc, + const RtcpPacketTypeCounter& packet_counter) override; + + void OnRttUpdate(int64_t avg_rtt_ms); + + // Notification methods that are used to check our internal state and validate + // threading assumptions. These are called by VideoReceiveStream. + void DecoderThreadStarting(); + void DecoderThreadStopped(); + + // Produce histograms. Must be called after DecoderThreadStopped(), typically + // at the end of the call. + void UpdateHistograms(absl::optional fraction_lost, + const StreamDataCounters& rtp_stats, + const StreamDataCounters* rtx_stats); + + private: + struct QpCounters { + rtc::SampleCounter vp8; + }; + + struct ContentSpecificStats { + ContentSpecificStats(); + ~ContentSpecificStats(); + + void Add(const ContentSpecificStats& other); + + rtc::SampleCounter e2e_delay_counter; + rtc::SampleCounter interframe_delay_counter; + int64_t flow_duration_ms = 0; + int64_t total_media_bytes = 0; + rtc::SampleCounter received_width; + rtc::SampleCounter received_height; + rtc::SampleCounter qp_counter; + FrameCounts frame_counts; + rtc::HistogramPercentileCounter interframe_delay_percentiles; + }; + + void QualitySample(Timestamp now); + + // Removes info about old frames and then updates the framerate. + void UpdateFramerate(int64_t now_ms) const; + + void UpdateDecodeTimeHistograms(int width, + int height, + int decode_time_ms) const; + + absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( + int64_t now_ms) const; + + Clock* const clock_; + const int64_t start_ms_; + const bool enable_decode_time_histograms_; + + int64_t last_sample_time_ RTC_GUARDED_BY(main_thread_); + + QualityThreshold fps_threshold_ RTC_GUARDED_BY(main_thread_); + QualityThreshold qp_threshold_ RTC_GUARDED_BY(main_thread_); + QualityThreshold variance_threshold_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter qp_sample_ RTC_GUARDED_BY(main_thread_); + int num_bad_states_ RTC_GUARDED_BY(main_thread_); + int num_certain_states_ RTC_GUARDED_BY(main_thread_); + // Note: The |stats_.rtp_stats| member is not used or populated by this class. + mutable VideoReceiveStream::Stats stats_ RTC_GUARDED_BY(main_thread_); + // Same as stats_.ssrc, but const (no lock required). + const uint32_t remote_ssrc_; + RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(main_thread_); + RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(main_thread_); + rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(main_thread_); + rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter jitter_buffer_delay_counter_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(main_thread_); + rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(main_thread_); + std::unique_ptr video_quality_observer_ + RTC_GUARDED_BY(main_thread_); + mutable rtc::MovingMaxCounter interframe_delay_max_moving_ + RTC_GUARDED_BY(main_thread_); + std::map content_specific_stats_ + RTC_GUARDED_BY(main_thread_); + MaxCounter freq_offset_counter_ RTC_GUARDED_BY(main_thread_); + QpCounters qp_counters_ RTC_GUARDED_BY(main_thread_); + int64_t avg_rtt_ms_ RTC_GUARDED_BY(main_thread_) = 0; + mutable std::map frame_window_ RTC_GUARDED_BY(main_thread_); + VideoContentType last_content_type_ RTC_GUARDED_BY(&main_thread_); + VideoCodecType last_codec_type_ RTC_GUARDED_BY(main_thread_); + absl::optional first_frame_received_time_ms_ + RTC_GUARDED_BY(main_thread_); + absl::optional first_decoded_frame_time_ms_ + RTC_GUARDED_BY(main_thread_); + absl::optional last_decoded_frame_time_ms_ + RTC_GUARDED_BY(main_thread_); + size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(main_thread_); + int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(main_thread_); + // Mutable because calling Max() on MovingMaxCounter is not const. Yet it is + // called from const GetStats(). + mutable rtc::MovingMaxCounter timing_frame_info_counter_ + RTC_GUARDED_BY(main_thread_); + absl::optional num_unique_frames_ RTC_GUARDED_BY(main_thread_); + absl::optional last_estimated_playout_ntp_timestamp_ms_ + RTC_GUARDED_BY(main_thread_); + absl::optional last_estimated_playout_time_ms_ + RTC_GUARDED_BY(main_thread_); + + // The thread on which this instance is constructed and some of its main + // methods are invoked on such as GetStats(). + TaskQueueBase* const worker_thread_; + + ScopedTaskSafety task_safety_; + + SequenceChecker decode_queue_; + rtc::ThreadChecker main_thread_; + SequenceChecker incoming_render_queue_; +}; + +} // namespace internal +} // namespace webrtc +#endif // VIDEO_RECEIVE_STATISTICS_PROXY2_H_ diff --git a/video/receive_statistics_proxy2_unittest.cc b/video/receive_statistics_proxy2_unittest.cc new file mode 100644 index 0000000000..867a3c337f --- /dev/null +++ b/video/receive_statistics_proxy2_unittest.cc @@ -0,0 +1,1871 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/receive_statistics_proxy2.h" + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/metrics.h" +#include "test/field_trial.h" +#include "test/gtest.h" +#include "test/run_loop.h" +#include "video/video_receive_stream2.h" + +namespace webrtc { +namespace internal { +namespace { +const int64_t kFreqOffsetProcessIntervalInMs = 40000; +const uint32_t kLocalSsrc = 123; +const uint32_t kRemoteSsrc = 456; +const int kMinRequiredSamples = 200; +const int kWidth = 1280; +const int kHeight = 720; +} // namespace + +// TODO(sakal): ReceiveStatisticsProxy is lacking unittesting. +class ReceiveStatisticsProxy2Test : public ::testing::Test { + public: + ReceiveStatisticsProxy2Test() : fake_clock_(1234), config_(GetTestConfig()) { + metrics::Reset(); + statistics_proxy_.reset( + new ReceiveStatisticsProxy(&config_, &fake_clock_, loop_.task_queue())); + } + + ~ReceiveStatisticsProxy2Test() override { statistics_proxy_.reset(); } + + protected: + // Convenience method to avoid too many explict flushes. + VideoReceiveStream::Stats FlushAndGetStats() { + loop_.Flush(); + return statistics_proxy_->GetStats(); + } + + void FlushAndUpdateHistograms(absl::optional fraction_lost, + const StreamDataCounters& rtp_stats, + const StreamDataCounters* rtx_stats) { + loop_.Flush(); + statistics_proxy_->UpdateHistograms(fraction_lost, rtp_stats, rtx_stats); + } + + VideoReceiveStream::Config GetTestConfig() { + VideoReceiveStream::Config config(nullptr); + config.rtp.local_ssrc = kLocalSsrc; + config.rtp.remote_ssrc = kRemoteSsrc; + return config; + } + + VideoFrame CreateFrame(int width, int height) { + return CreateVideoFrame(width, height, 0); + } + + VideoFrame CreateFrameWithRenderTime(Timestamp render_time) { + return CreateFrameWithRenderTimeMs(render_time.ms()); + } + + VideoFrame CreateFrameWithRenderTimeMs(int64_t render_time_ms) { + return CreateVideoFrame(kWidth, kHeight, render_time_ms); + } + + VideoFrame CreateVideoFrame(int width, int height, int64_t render_time_ms) { + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(I420Buffer::Create(width, height)) + .set_timestamp_rtp(0) + .set_timestamp_ms(render_time_ms) + .set_rotation(kVideoRotation_0) + .build(); + frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds()); + return frame; + } + + // Return the current fake time as a Timestamp. + Timestamp Now() { return fake_clock_.CurrentTime(); } + + // Creates a VideoFrameMetaData instance with a timestamp. + VideoFrameMetaData MetaData(const VideoFrame& frame, Timestamp ts) { + return VideoFrameMetaData(frame, ts); + } + + // Creates a VideoFrameMetaData instance with the current fake time. + VideoFrameMetaData MetaData(const VideoFrame& frame) { + return VideoFrameMetaData(frame, Now()); + } + + SimulatedClock fake_clock_; + const VideoReceiveStream::Config config_; + std::unique_ptr statistics_proxy_; + test::RunLoop loop_; +}; + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameIncreasesFramesDecoded) { + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (uint32_t i = 1; i <= 3; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(i, FlushAndGetStats().frames_decoded); + } +} + +TEST_F(ReceiveStatisticsProxy2Test, DecodedFpsIsReported) { + const int kFps = 20; + const int kRequiredSamples = metrics::kMinRunTimeInSeconds * kFps; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (int i = 0; i < kRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); + } + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DecodedFramesPerSecond", kFps)); +} + +TEST_F(ReceiveStatisticsProxy2Test, DecodedFpsIsNotReportedForTooFewSamples) { + const int kFps = 20; + const int kRequiredSamples = metrics::kMinRunTimeInSeconds * kFps; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (int i = 0; i < kRequiredSamples - 1; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); + } + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + OnDecodedFrameWithQpDoesNotResetFramesDecodedOrTotalDecodeTime) { + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + unsigned int expected_total_decode_time_ms = 0; + unsigned int expected_frames_decoded = 0; + for (uint32_t i = 1; i <= 3; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 1, + VideoContentType::UNSPECIFIED); + expected_total_decode_time_ms += 1; + ++expected_frames_decoded; + loop_.Flush(); + EXPECT_EQ(expected_frames_decoded, + statistics_proxy_->GetStats().frames_decoded); + EXPECT_EQ(expected_total_decode_time_ms, + statistics_proxy_->GetStats().total_decode_time_ms); + } + statistics_proxy_->OnDecodedFrame(frame, 1u, 3, + VideoContentType::UNSPECIFIED); + ++expected_frames_decoded; + expected_total_decode_time_ms += 3; + loop_.Flush(); + EXPECT_EQ(expected_frames_decoded, + statistics_proxy_->GetStats().frames_decoded); + EXPECT_EQ(expected_total_decode_time_ms, + statistics_proxy_->GetStats().total_decode_time_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameIncreasesQpSum) { + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(3u, FlushAndGetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, 127u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(130u, FlushAndGetStats().qp_sum); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameIncreasesTotalDecodeTime) { + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, 3u, 4, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(4u, FlushAndGetStats().total_decode_time_ms); + statistics_proxy_->OnDecodedFrame(frame, 127u, 7, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(11u, FlushAndGetStats().total_decode_time_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsContentType) { + const std::string kRealtimeString("realtime"); + const std::string kScreenshareString("screen"); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + EXPECT_EQ(kRealtimeString, videocontenttypehelpers::ToString( + statistics_proxy_->GetStats().content_type)); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::SCREENSHARE); + EXPECT_EQ(kScreenshareString, + videocontenttypehelpers::ToString(FlushAndGetStats().content_type)); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kRealtimeString, + videocontenttypehelpers::ToString(FlushAndGetStats().content_type)); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsMaxTotalInterFrameDelay) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + const TimeDelta kInterFrameDelay1 = TimeDelta::Millis(100); + const TimeDelta kInterFrameDelay2 = TimeDelta::Millis(200); + const TimeDelta kInterFrameDelay3 = TimeDelta::Millis(300); + double expected_total_inter_frame_delay = 0; + double expected_total_squared_inter_frame_delay = 0; + EXPECT_EQ(expected_total_inter_frame_delay, + statistics_proxy_->GetStats().total_inter_frame_delay); + EXPECT_EQ(expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + FlushAndGetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ(expected_total_squared_inter_frame_delay, + FlushAndGetStats().total_squared_inter_frame_delay); + + fake_clock_.AdvanceTime(kInterFrameDelay1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + expected_total_inter_frame_delay += kInterFrameDelay1.seconds(); + expected_total_squared_inter_frame_delay += + pow(kInterFrameDelay1.seconds(), 2.0); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + FlushAndGetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + fake_clock_.AdvanceTime(kInterFrameDelay2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + expected_total_inter_frame_delay += kInterFrameDelay2.seconds(); + expected_total_squared_inter_frame_delay += + pow(kInterFrameDelay2.seconds(), 2.0); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + FlushAndGetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); + + fake_clock_.AdvanceTime(kInterFrameDelay3); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + expected_total_inter_frame_delay += kInterFrameDelay3.seconds(); + expected_total_squared_inter_frame_delay += + pow(kInterFrameDelay3.seconds(), 2.0); + EXPECT_DOUBLE_EQ(expected_total_inter_frame_delay, + FlushAndGetStats().total_inter_frame_delay); + EXPECT_DOUBLE_EQ( + expected_total_squared_inter_frame_delay, + statistics_proxy_->GetStats().total_squared_inter_frame_delay); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsMaxInterframeDelay) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + const int64_t kInterframeDelayMs1 = 100; + const int64_t kInterframeDelayMs2 = 200; + const int64_t kInterframeDelayMs3 = 100; + EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(-1, FlushAndGetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kInterframeDelayMs1, FlushAndGetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kInterframeDelayMs2, FlushAndGetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs3); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + // kInterframeDelayMs3 is smaller than kInterframeDelayMs2. + EXPECT_EQ(kInterframeDelayMs2, FlushAndGetStats().interframe_delay_max_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportInterframeDelayInWindow) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + const int64_t kInterframeDelayMs1 = 900; + const int64_t kInterframeDelayMs2 = 750; + const int64_t kInterframeDelayMs3 = 700; + EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(-1, FlushAndGetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(kInterframeDelayMs1, FlushAndGetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + // Still first delay is the maximum + EXPECT_EQ(kInterframeDelayMs1, FlushAndGetStats().interframe_delay_max_ms); + + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs3); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + // Now the first sample is out of the window, so the second is the maximum. + EXPECT_EQ(kInterframeDelayMs2, FlushAndGetStats().interframe_delay_max_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsFreezeMetrics) { + const int64_t kFreezeDurationMs = 1000; + + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(0u, stats.freeze_count); + EXPECT_FALSE(stats.total_freezes_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (size_t i = 0; i < VideoQualityObserver::kMinFrameSamplesToDetectFreeze; + ++i) { + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + } + + // Freeze. + fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(1u, stats.freeze_count); + EXPECT_EQ(kFreezeDurationMs, stats.total_freezes_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsPauseMetrics) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.pause_count); + ASSERT_EQ(0u, stats.total_pauses_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + // Pause. + fake_clock_.AdvanceTimeMilliseconds(5432); + statistics_proxy_->OnStreamInactive(); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(1u, stats.pause_count); + EXPECT_EQ(5432u, stats.total_pauses_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, PauseBeforeFirstAndAfterLastFrameIgnored) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.pause_count); + ASSERT_EQ(0u, stats.total_pauses_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // Pause -> Frame -> Pause + fake_clock_.AdvanceTimeMilliseconds(5000); + statistics_proxy_->OnStreamInactive(); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + fake_clock_.AdvanceTimeMilliseconds(5000); + statistics_proxy_->OnStreamInactive(); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(0u, stats.pause_count); + EXPECT_EQ(0u, stats.total_pauses_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsFramesDuration) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.total_frames_duration_ms); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // Emulate delay before first frame is rendered. This is needed to ensure + // that frame duration only covers time since first frame is rendered and + // not the total time. + fake_clock_.AdvanceTimeMilliseconds(5432); + + for (int i = 0; i <= 10; ++i) { + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + } + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(10 * 30u, stats.total_frames_duration_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsSumSquaredFrameDurations) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + ASSERT_EQ(0u, stats.sum_squared_frame_durations); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (int i = 0; i <= 10; ++i) { + fake_clock_.AdvanceTimeMilliseconds(30); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + } + + stats = statistics_proxy_->GetStats(); + const double kExpectedSumSquaredFrameDurationsSecs = + 10 * (30 / 1000.0 * 30 / 1000.0); + EXPECT_EQ(kExpectedSumSquaredFrameDurationsSecs, + stats.sum_squared_frame_durations); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameWithoutQpQpSumWontExist) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(absl::nullopt, FlushAndGetStats().qp_sum); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnDecodedFrameWithoutQpResetsQpSum) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, 3u, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(3u, FlushAndGetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + EXPECT_EQ(absl::nullopt, FlushAndGetStats().qp_sum); +} + +TEST_F(ReceiveStatisticsProxy2Test, OnRenderedFrameIncreasesFramesRendered) { + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + for (uint32_t i = 1; i <= 3; ++i) { + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered); + } +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsSsrc) { + EXPECT_EQ(kRemoteSsrc, statistics_proxy_->GetStats().ssrc); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsIncomingPayloadType) { + const int kPayloadType = 111; + statistics_proxy_->OnIncomingPayloadType(kPayloadType); + loop_.Flush(); + EXPECT_EQ(kPayloadType, statistics_proxy_->GetStats().current_payload_type); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsDecoderImplementationName) { + const char* kName = "decoderName"; + statistics_proxy_->OnDecoderImplementationName(kName); + loop_.Flush(); + EXPECT_STREQ( + kName, statistics_proxy_->GetStats().decoder_implementation_name.c_str()); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsOnCompleteFrame) { + const int kFrameSizeBytes = 1000; + statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(1, stats.network_frame_rate); + EXPECT_EQ(1, stats.frame_counts.key_frames); + EXPECT_EQ(0, stats.frame_counts.delta_frames); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsOnDroppedFrame) { + unsigned int dropped_frames = 0; + for (int i = 0; i < 10; ++i) { + statistics_proxy_->OnDroppedFrames(i); + dropped_frames += i; + } + VideoReceiveStream::Stats stats = FlushAndGetStats(); + EXPECT_EQ(dropped_frames, stats.frames_dropped); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsDecodeTimingStats) { + const int kMaxDecodeMs = 2; + const int kCurrentDelayMs = 3; + const int kTargetDelayMs = 4; + const int kJitterBufferMs = 5; + const int kMinPlayoutDelayMs = 6; + const int kRenderDelayMs = 7; + const int64_t kRttMs = 8; + statistics_proxy_->OnRttUpdate(kRttMs); + statistics_proxy_->OnFrameBufferTimingsUpdated( + kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs, + kMinPlayoutDelayMs, kRenderDelayMs); + VideoReceiveStream::Stats stats = FlushAndGetStats(); + EXPECT_EQ(kMaxDecodeMs, stats.max_decode_ms); + EXPECT_EQ(kCurrentDelayMs, stats.current_delay_ms); + EXPECT_EQ(kTargetDelayMs, stats.target_delay_ms); + EXPECT_EQ(kJitterBufferMs, stats.jitter_buffer_ms); + EXPECT_EQ(kMinPlayoutDelayMs, stats.min_playout_delay_ms); + EXPECT_EQ(kRenderDelayMs, stats.render_delay_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsRtcpPacketTypeCounts) { + const uint32_t kFirPackets = 33; + const uint32_t kPliPackets = 44; + const uint32_t kNackPackets = 55; + RtcpPacketTypeCounter counter; + counter.fir_packets = kFirPackets; + counter.pli_packets = kPliPackets; + counter.nack_packets = kNackPackets; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(kFirPackets, stats.rtcp_packet_type_counts.fir_packets); + EXPECT_EQ(kPliPackets, stats.rtcp_packet_type_counts.pli_packets); + EXPECT_EQ(kNackPackets, stats.rtcp_packet_type_counts.nack_packets); +} + +TEST_F(ReceiveStatisticsProxy2Test, + GetStatsReportsNoRtcpPacketTypeCountsForUnknownSsrc) { + RtcpPacketTypeCounter counter; + counter.fir_packets = 33; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc + 1, counter); + EXPECT_EQ(0u, + statistics_proxy_->GetStats().rtcp_packet_type_counts.fir_packets); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsFrameCounts) { + const int kKeyFrames = 3; + const int kDeltaFrames = 22; + for (int i = 0; i < kKeyFrames; i++) { + statistics_proxy_->OnCompleteFrame(true, 0, VideoContentType::UNSPECIFIED); + } + for (int i = 0; i < kDeltaFrames; i++) { + statistics_proxy_->OnCompleteFrame(false, 0, VideoContentType::UNSPECIFIED); + } + + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(kKeyFrames, stats.frame_counts.key_frames); + EXPECT_EQ(kDeltaFrames, stats.frame_counts.delta_frames); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsCName) { + const char* kName = "cName"; + statistics_proxy_->OnCname(kRemoteSsrc, kName); + EXPECT_STREQ(kName, statistics_proxy_->GetStats().c_name.c_str()); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsNoCNameForUnknownSsrc) { + const char* kName = "cName"; + statistics_proxy_->OnCname(kRemoteSsrc + 1, kName); + EXPECT_STREQ("", statistics_proxy_->GetStats().c_name.c_str()); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReportsLongestTimingFrameInfo) { + const int64_t kShortEndToEndDelay = 10; + const int64_t kMedEndToEndDelay = 20; + const int64_t kLongEndToEndDelay = 100; + const uint32_t kExpectedRtpTimestamp = 2; + TimingFrameInfo info; + absl::optional result; + info.rtp_timestamp = kExpectedRtpTimestamp - 1; + info.capture_time_ms = 0; + info.decode_finish_ms = kShortEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + info.rtp_timestamp = + kExpectedRtpTimestamp; // this frame should be reported in the end. + info.capture_time_ms = 0; + info.decode_finish_ms = kLongEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + info.rtp_timestamp = kExpectedRtpTimestamp + 1; + info.capture_time_ms = 0; + info.decode_finish_ms = kMedEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + result = FlushAndGetStats().timing_frame_info; + EXPECT_TRUE(result); + EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp); +} + +TEST_F(ReceiveStatisticsProxy2Test, RespectsReportingIntervalForTimingFrames) { + TimingFrameInfo info; + const int64_t kShortEndToEndDelay = 10; + const uint32_t kExpectedRtpTimestamp = 2; + const int64_t kShortDelayMs = 1000; + const int64_t kLongDelayMs = 10000; + absl::optional result; + info.rtp_timestamp = kExpectedRtpTimestamp; + info.capture_time_ms = 0; + info.decode_finish_ms = kShortEndToEndDelay; + statistics_proxy_->OnTimingFrameInfoUpdated(info); + fake_clock_.AdvanceTimeMilliseconds(kShortDelayMs); + result = FlushAndGetStats().timing_frame_info; + EXPECT_TRUE(result); + EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp); + fake_clock_.AdvanceTimeMilliseconds(kLongDelayMs); + result = statistics_proxy_->GetStats().timing_frame_info; + EXPECT_FALSE(result); +} + +TEST_F(ReceiveStatisticsProxy2Test, LifetimeHistogramIsUpdated) { + const int64_t kTimeSec = 3; + fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000); + // Need at least one frame to report stream lifetime. + statistics_proxy_->OnCompleteFrame(true, 1000, VideoContentType::UNSPECIFIED); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceiveStreamLifetimeInSeconds", + kTimeSec)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + LifetimeHistogramNotReportedForEmptyStreams) { + const int64_t kTimeSec = 3; + fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000); + // No frames received. + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds")); +} + +TEST_F(ReceiveStatisticsProxy2Test, BadCallHistogramsAreUpdated) { + // Based on the tuning parameters this will produce 7 uncertain states, + // then 10 certainly bad states. There has to be 10 certain states before + // any histograms are recorded. + const int kNumBadSamples = 17; + // We only count one sample per second. + const int kBadFameIntervalMs = 1100; + + StreamDataCounters counters; + counters.first_packet_time_ms = fake_clock_.TimeInMilliseconds(); + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kNumBadSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kBadFameIntervalMs); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + } + statistics_proxy_->UpdateHistograms(absl::nullopt, counters, nullptr); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.BadCall.Any")); + EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.BadCall.Any", 100)); + + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.BadCall.FrameRate")); + EXPECT_METRIC_EQ(1, + metrics::NumEvents("WebRTC.Video.BadCall.FrameRate", 100)); + + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.BadCall.FrameRateVariance")); + + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.BadCall.Qp")); +} + +TEST_F(ReceiveStatisticsProxy2Test, PacketLossHistogramIsUpdated) { + statistics_proxy_->UpdateHistograms(10, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.ReceivedPacketsLostInPercent")); + + // Restart + SetUp(); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000); + statistics_proxy_->UpdateHistograms(10, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.ReceivedPacketsLostInPercent")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceivedPacketsLostInPercent", 10)); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsPlayoutTimestamp) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + EXPECT_EQ(kVideoNtpMs, FlushAndGetStats().estimated_playout_ntp_timestamp_ms); + fake_clock_.AdvanceTimeMilliseconds(13); + EXPECT_EQ(kVideoNtpMs + 13, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); + fake_clock_.AdvanceTimeMilliseconds(5); + EXPECT_EQ(kVideoNtpMs + 13 + 5, + statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsAvSyncOffset) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + EXPECT_EQ(std::numeric_limits::max(), + statistics_proxy_->GetStats().sync_offset_ms); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + EXPECT_EQ(kSyncOffsetMs, FlushAndGetStats().sync_offset_ms); +} + +TEST_F(ReceiveStatisticsProxy2Test, AvSyncOffsetHistogramIsUpdated) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz); + } + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.AVSyncOffsetInMs", kSyncOffsetMs)); +} + +TEST_F(ReceiveStatisticsProxy2Test, RtpToNtpFrequencyOffsetHistogramIsUpdated) { + const int64_t kVideoNtpMs = 21; + const int64_t kSyncOffsetMs = 22; + const double kFreqKhz = 90.0; + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz + 2.2); + loop_.Flush(); + fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs); + // Process interval passed, max diff: 2. + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz + 1.1); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz - 4.2); + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, + kFreqKhz - 0.9); + loop_.Flush(); + fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs); + // Process interval passed, max diff: 4. + statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + // Average reported: (2 + 4) / 2 = 3. + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.RtpToNtpFreqOffsetInKhz")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.RtpToNtpFreqOffsetInKhz", 3)); +} + +TEST_F(ReceiveStatisticsProxy2Test, Vp8QpHistogramIsUpdated) { + const int kQp = 22; + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnPreDecode(kVideoCodecVP8, kQp); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); + EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.Decoded.Vp8.Qp", kQp)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + Vp8QpHistogramIsNotUpdatedForTooFewSamples) { + const int kQp = 22; + + for (int i = 0; i < kMinRequiredSamples - 1; ++i) + statistics_proxy_->OnPreDecode(kVideoCodecVP8, kQp); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); +} + +TEST_F(ReceiveStatisticsProxy2Test, Vp8QpHistogramIsNotUpdatedIfNoQpValue) { + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnPreDecode(kVideoCodecVP8, -1); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + KeyFrameHistogramNotUpdatedForTooFewSamples) { + const bool kIsKeyFrame = false; + const int kFrameSizeBytes = 1000; + + for (int i = 0; i < kMinRequiredSamples - 1; ++i) + statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + EXPECT_EQ(0, statistics_proxy_->GetStats().frame_counts.key_frames); + EXPECT_EQ(kMinRequiredSamples - 1, + statistics_proxy_->GetStats().frame_counts.delta_frames); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + KeyFrameHistogramUpdatedForMinRequiredSamples) { + const bool kIsKeyFrame = false; + const int kFrameSizeBytes = 1000; + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + EXPECT_EQ(0, statistics_proxy_->GetStats().frame_counts.key_frames); + EXPECT_EQ(kMinRequiredSamples, + statistics_proxy_->GetStats().frame_counts.delta_frames); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.KeyFramesReceivedInPermille", 0)); +} + +TEST_F(ReceiveStatisticsProxy2Test, KeyFrameHistogramIsUpdated) { + const int kFrameSizeBytes = 1000; + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + for (int i = 0; i < kMinRequiredSamples; ++i) + statistics_proxy_->OnCompleteFrame(false, kFrameSizeBytes, + VideoContentType::UNSPECIFIED); + + EXPECT_EQ(kMinRequiredSamples, + statistics_proxy_->GetStats().frame_counts.key_frames); + EXPECT_EQ(kMinRequiredSamples, + statistics_proxy_->GetStats().frame_counts.delta_frames); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.KeyFramesReceivedInPermille", 500)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + TimingHistogramsNotUpdatedForTooFewSamples) { + const int kMaxDecodeMs = 2; + const int kCurrentDelayMs = 3; + const int kTargetDelayMs = 4; + const int kJitterBufferMs = 5; + const int kMinPlayoutDelayMs = 6; + const int kRenderDelayMs = 7; + + for (int i = 0; i < kMinRequiredSamples - 1; ++i) { + statistics_proxy_->OnFrameBufferTimingsUpdated( + kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs, + kMinPlayoutDelayMs, kRenderDelayMs); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.DecodeTimeInMs")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, TimingHistogramsAreUpdated) { + const int kMaxDecodeMs = 2; + const int kCurrentDelayMs = 3; + const int kTargetDelayMs = 4; + const int kJitterBufferMs = 5; + const int kMinPlayoutDelayMs = 6; + const int kRenderDelayMs = 7; + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnFrameBufferTimingsUpdated( + kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs, + kMinPlayoutDelayMs, kRenderDelayMs); + } + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); + + EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.JitterBufferDelayInMs", + kJitterBufferMs)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.TargetDelayInMs", kTargetDelayMs)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.CurrentDelayInMs", kCurrentDelayMs)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.OnewayDelayInMs", kTargetDelayMs)); +} + +TEST_F(ReceiveStatisticsProxy2Test, DoesNotReportStaleFramerates) { + const int kDefaultFps = 30; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kDefaultFps; ++i) { + // Since OnRenderedFrame is never called the fps in each sample will be 0, + // i.e. bad + frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds()); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + fake_clock_.AdvanceTimeMilliseconds(1000 / kDefaultFps); + } + + loop_.Flush(); + EXPECT_EQ(kDefaultFps, statistics_proxy_->GetStats().decode_frame_rate); + EXPECT_EQ(kDefaultFps, statistics_proxy_->GetStats().render_frame_rate); + + // FPS trackers in stats proxy have a 1000ms sliding window. + fake_clock_.AdvanceTimeMilliseconds(1000); + EXPECT_EQ(0, statistics_proxy_->GetStats().decode_frame_rate); + EXPECT_EQ(0, statistics_proxy_->GetStats().render_frame_rate); +} + +TEST_F(ReceiveStatisticsProxy2Test, GetStatsReportsReceivedFrameStats) { + EXPECT_EQ(0, statistics_proxy_->GetStats().width); + EXPECT_EQ(0, statistics_proxy_->GetStats().height); + EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered); + + statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight))); + + EXPECT_EQ(kWidth, statistics_proxy_->GetStats().width); + EXPECT_EQ(kHeight, statistics_proxy_->GetStats().height); + EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_rendered); +} + +TEST_F(ReceiveStatisticsProxy2Test, + ReceivedFrameHistogramsAreNotUpdatedForTooFewSamples) { + for (int i = 0; i < kMinRequiredSamples - 1; ++i) { + statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight))); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.ReceivedHeightInPixels")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond")); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); +} + +TEST_F(ReceiveStatisticsProxy2Test, ReceivedFrameHistogramsAreUpdated) { + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight))); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.ReceivedHeightInPixels")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceivedWidthInPixels", kWidth)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.ReceivedHeightInPixels", kHeight)); +} + +TEST_F(ReceiveStatisticsProxy2Test, ZeroDelayReportedIfFrameNotDelayed) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Frame not delayed, delayed frames to render: 0%. + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTime(Now()))); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer", 0)); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + DelayedFrameHistogramsAreNotUpdatedIfMinRuntimeHasNotPassed) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Frame not delayed, delayed frames to render: 0%. + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTime(Now()))); + + // Min run time has not passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000) - + 1); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, + DelayedFramesHistogramsAreNotUpdatedIfNoRenderedFrames) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Min run time has passed. No rendered frames. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); +} + +TEST_F(ReceiveStatisticsProxy2Test, DelayReportedIfFrameIsDelayed) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Frame delayed 1 ms, delayed frames to render: 100%. + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTimeMs(Now().ms() - 1))); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer", 100)); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", + 1)); +} + +TEST_F(ReceiveStatisticsProxy2Test, AverageDelayOfDelayedFramesIsReported) { + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, + VideoContentType::UNSPECIFIED); + + // Two frames delayed (6 ms, 10 ms), delayed frames to render: 50%. + const int64_t kNowMs = Now().ms(); + + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTimeMs(kNowMs - 10))); + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTimeMs(kNowMs - 6))); + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTimeMs(kNowMs))); + statistics_proxy_->OnRenderedFrame( + MetaData(CreateFrameWithRenderTimeMs(kNowMs + 1))); + + // Min run time has passed. + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000)); + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer", 50)); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DelayedFramesToRenderer_AvgDelayInMs", + 8)); +} + +TEST_F(ReceiveStatisticsProxy2Test, + RtcpHistogramsNotUpdatedIfMinRuntimeHasNotPassed) { + StreamDataCounters data_counters; + data_counters.first_packet_time_ms = fake_clock_.TimeInMilliseconds(); + + fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000) - + 1); + + RtcpPacketTypeCounter counter; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); + + statistics_proxy_->UpdateHistograms(absl::nullopt, data_counters, nullptr); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.PliPacketsSentPerMinute")); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.NackPacketsSentPerMinute")); +} + +TEST_F(ReceiveStatisticsProxy2Test, RtcpHistogramsAreUpdated) { + StreamDataCounters data_counters; + data_counters.first_packet_time_ms = fake_clock_.TimeInMilliseconds(); + fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000); + + const uint32_t kFirPackets = 100; + const uint32_t kPliPackets = 200; + const uint32_t kNackPackets = 300; + + RtcpPacketTypeCounter counter; + counter.fir_packets = kFirPackets; + counter.pli_packets = kPliPackets; + counter.nack_packets = kNackPackets; + statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); + + statistics_proxy_->UpdateHistograms(absl::nullopt, data_counters, nullptr); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.PliPacketsSentPerMinute")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.NackPacketsSentPerMinute")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.FirPacketsSentPerMinute", + kFirPackets * 60 / metrics::kMinRunTimeInSeconds)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.PliPacketsSentPerMinute", + kPliPackets * 60 / metrics::kMinRunTimeInSeconds)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.NackPacketsSentPerMinute", + kNackPackets * 60 / metrics::kMinRunTimeInSeconds)); +} + +class ReceiveStatisticsProxy2TestWithFreezeDuration + : public ReceiveStatisticsProxy2Test, + public ::testing::WithParamInterface< + std::tuple> { + protected: + const uint32_t frame_duration_ms_ = {std::get<0>(GetParam())}; + const uint32_t freeze_duration_ms_ = {std::get<1>(GetParam())}; + const uint32_t expected_freeze_count_ = {std::get<2>(GetParam())}; +}; + +// It is a freeze if: +// frame_duration_ms >= max(3 * avg_frame_duration, avg_frame_duration + 150) +// where avg_frame_duration is average duration of last 30 frames including +// the current one. +// +// Condition 1: 3 * avg_frame_duration > avg_frame_duration + 150 +const auto kFreezeDetectionCond1Freeze = std::make_tuple(150, 483, 1); +const auto kFreezeDetectionCond1NotFreeze = std::make_tuple(150, 482, 0); +// Condition 2: 3 * avg_frame_duration < avg_frame_duration + 150 +const auto kFreezeDetectionCond2Freeze = std::make_tuple(30, 185, 1); +const auto kFreezeDetectionCond2NotFreeze = std::make_tuple(30, 184, 0); + +INSTANTIATE_TEST_SUITE_P(_, + ReceiveStatisticsProxy2TestWithFreezeDuration, + ::testing::Values(kFreezeDetectionCond1Freeze, + kFreezeDetectionCond1NotFreeze, + kFreezeDetectionCond2Freeze, + kFreezeDetectionCond2NotFreeze)); + +TEST_P(ReceiveStatisticsProxy2TestWithFreezeDuration, FreezeDetection) { + VideoReceiveStream::Stats stats = statistics_proxy_->GetStats(); + EXPECT_EQ(0u, stats.freeze_count); + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // Add a very long frame. This is need to verify that average frame + // duration, which is supposed to be calculated as mean of durations of + // last 30 frames, is calculated correctly. + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + fake_clock_.AdvanceTimeMilliseconds(2000); + + for (size_t i = 0; + i <= VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames; ++i) { + fake_clock_.AdvanceTimeMilliseconds(frame_duration_ms_); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + } + + fake_clock_.AdvanceTimeMilliseconds(freeze_duration_ms_); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + stats = statistics_proxy_->GetStats(); + EXPECT_EQ(stats.freeze_count, expected_freeze_count_); +} + +class ReceiveStatisticsProxy2TestWithContent + : public ReceiveStatisticsProxy2Test, + public ::testing::WithParamInterface { + protected: + const webrtc::VideoContentType content_type_{GetParam()}; +}; + +INSTANTIATE_TEST_SUITE_P(ContentTypes, + ReceiveStatisticsProxy2TestWithContent, + ::testing::Values(VideoContentType::UNSPECIFIED, + VideoContentType::SCREENSHARE)); + +TEST_P(ReceiveStatisticsProxy2TestWithContent, InterFrameDelaysAreReported) { + const int kInterFrameDelayMs = 33; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // One extra with double the interval. + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + const int kExpectedInterFrame = + (kInterFrameDelayMs * (kMinRequiredSamples - 1) + + kInterFrameDelayMs * 2) / + kMinRequiredSamples; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedInterFrame, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs * 2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + } else { + EXPECT_METRIC_EQ(kExpectedInterFrame, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs * 2, + metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + InterFrameDelaysPercentilesAreReported) { + const int kInterFrameDelayMs = 33; + const int kLastFivePercentsSamples = kMinRequiredSamples * 5 / 100; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples - kLastFivePercentsSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + } + // Last 5% of intervals are double in size. + for (int i = 0; i < kLastFivePercentsSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(2 * kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + } + // Final sample is outlier and 10 times as big. + fake_clock_.AdvanceTimeMilliseconds(10 * kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + const int kExpectedInterFrame = kInterFrameDelayMs * 2; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedInterFrame, + metrics::MinSample( + "WebRTC.Video.Screenshare.InterframeDelay95PercentileInMs")); + } else { + EXPECT_METRIC_EQ( + kExpectedInterFrame, + metrics::MinSample("WebRTC.Video.InterframeDelay95PercentileInMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + MaxInterFrameDelayOnlyWithValidAverage) { + const int kInterFrameDelayMs = 33; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + // |kMinRequiredSamples| samples, and thereby intervals, is required. That + // means we're one frame short of having a valid data set. + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(0, + metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ(0, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + MaxInterFrameDelayOnlyWithPause) { + const int kInterFrameDelayMs = 33; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + loop_.Flush(); + // At this state, we should have a valid inter-frame delay. + // Indicate stream paused and make a large jump in time. + statistics_proxy_->OnStreamInactive(); + fake_clock_.AdvanceTimeMilliseconds(5000); + + // Insert two more frames. The interval during the pause should be + // disregarded in the stats. + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + } else { + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, FreezesAreReported) { + const int kInterFrameDelayMs = 33; + const int kFreezeDelayMs = 200; + const int kCallDurationMs = + kMinRequiredSamples * kInterFrameDelayMs + kFreezeDelayMs; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + VideoFrameMetaData meta = MetaData(frame); + statistics_proxy_->OnDecodedFrame(meta, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Add extra freeze. + fake_clock_.AdvanceTimeMilliseconds(kFreezeDelayMs); + VideoFrameMetaData meta = MetaData(frame); + statistics_proxy_->OnDecodedFrame(meta, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + const int kExpectedTimeBetweenFreezes = + kInterFrameDelayMs * (kMinRequiredSamples - 1); + const int kExpectedNumberFreezesPerMinute = 60 * 1000 / kCallDurationMs; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kFreezeDelayMs + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.Screenshare.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ(kExpectedTimeBetweenFreezes, + metrics::MinSample( + "WebRTC.Video.Screenshare.MeanTimeBetweenFreezesMs")); + EXPECT_METRIC_EQ( + kExpectedNumberFreezesPerMinute, + metrics::MinSample("WebRTC.Video.Screenshare.NumberFreezesPerMinute")); + } else { + EXPECT_METRIC_EQ(kFreezeDelayMs + kInterFrameDelayMs, + metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ( + kExpectedTimeBetweenFreezes, + metrics::MinSample("WebRTC.Video.MeanTimeBetweenFreezesMs")); + EXPECT_METRIC_EQ(kExpectedNumberFreezesPerMinute, + metrics::MinSample("WebRTC.Video.NumberFreezesPerMinute")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, HarmonicFrameRateIsReported) { + const int kFrameDurationMs = 33; + const int kFreezeDurationMs = 200; + const int kPauseDurationMs = 10000; + const int kCallDurationMs = kMinRequiredSamples * kFrameDurationMs + + kFreezeDurationMs + kPauseDurationMs; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kFrameDurationMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + } + + // Freezes and pauses should be included into harmonic frame rate. + // Add freeze. + loop_.Flush(); + fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + // Add pause. + loop_.Flush(); + fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs); + statistics_proxy_->OnStreamInactive(); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + double kSumSquaredFrameDurationSecs = + (kMinRequiredSamples - 1) * + (kFrameDurationMs / 1000.0 * kFrameDurationMs / 1000.0); + kSumSquaredFrameDurationSecs += + kFreezeDurationMs / 1000.0 * kFreezeDurationMs / 1000.0; + kSumSquaredFrameDurationSecs += + kPauseDurationMs / 1000.0 * kPauseDurationMs / 1000.0; + const int kExpectedHarmonicFrameRateFps = + std::round(kCallDurationMs / (1000 * kSumSquaredFrameDurationSecs)); + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedHarmonicFrameRateFps, + metrics::MinSample("WebRTC.Video.Screenshare.HarmonicFrameRate")); + } else { + EXPECT_METRIC_EQ(kExpectedHarmonicFrameRateFps, + metrics::MinSample("WebRTC.Video.HarmonicFrameRate")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, PausesAreIgnored) { + const int kInterFrameDelayMs = 33; + const int kPauseDurationMs = 10000; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples; ++i) { + VideoFrameMetaData meta = MetaData(frame); + statistics_proxy_->OnDecodedFrame(meta, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Add a pause. + fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs); + statistics_proxy_->OnStreamInactive(); + // Second playback interval with triple the length. + for (int i = 0; i <= kMinRequiredSamples * 3; ++i) { + VideoFrameMetaData meta = MetaData(frame); + statistics_proxy_->OnDecodedFrame(meta, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + // Average of two playback intervals. + const int kExpectedTimeBetweenFreezes = + kInterFrameDelayMs * kMinRequiredSamples * 2; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ(-1, metrics::MinSample( + "WebRTC.Video.Screenshare.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ(kExpectedTimeBetweenFreezes, + metrics::MinSample( + "WebRTC.Video.Screenshare.MeanTimeBetweenFreezesMs")); + } else { + EXPECT_METRIC_EQ(-1, + metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs")); + EXPECT_METRIC_EQ( + kExpectedTimeBetweenFreezes, + metrics::MinSample("WebRTC.Video.MeanTimeBetweenFreezesMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, ManyPausesAtTheBeginning) { + const int kInterFrameDelayMs = 33; + const int kPauseDurationMs = 10000; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i <= kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + statistics_proxy_->OnStreamInactive(); + fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs); + + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + // No freezes should be detected, as all long inter-frame delays were + // pauses. + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ(-1, metrics::MinSample( + "WebRTC.Video.Screenshare.MeanFreezeDurationMs")); + } else { + EXPECT_METRIC_EQ(-1, + metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, TimeInHdReported) { + const int kInterFrameDelayMs = 20; + webrtc::VideoFrame frame_hd = CreateFrame(1280, 720); + webrtc::VideoFrame frame_sd = CreateFrame(640, 360); + + // HD frames. + for (int i = 0; i < kMinRequiredSamples; ++i) { + VideoFrameMetaData meta = MetaData(frame_hd); + statistics_proxy_->OnDecodedFrame(meta, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // SD frames. + for (int i = 0; i < 2 * kMinRequiredSamples; ++i) { + VideoFrameMetaData meta = MetaData(frame_sd); + statistics_proxy_->OnDecodedFrame(meta, absl::nullopt, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Extra last frame. + statistics_proxy_->OnRenderedFrame(MetaData(frame_sd)); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedTimeInHdPercents = 33; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedTimeInHdPercents, + metrics::MinSample("WebRTC.Video.Screenshare.TimeInHdPercentage")); + } else { + EXPECT_METRIC_EQ(kExpectedTimeInHdPercents, + metrics::MinSample("WebRTC.Video.TimeInHdPercentage")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, TimeInBlockyVideoReported) { + const int kInterFrameDelayMs = 20; + const int kHighQp = 80; + const int kLowQp = 30; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + // High quality frames. + for (int i = 0; i < kMinRequiredSamples; ++i) { + VideoFrameMetaData meta = MetaData(frame); + statistics_proxy_->OnDecodedFrame(meta, kLowQp, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Blocky frames. + for (int i = 0; i < 2 * kMinRequiredSamples; ++i) { + VideoFrameMetaData meta = MetaData(frame); + statistics_proxy_->OnDecodedFrame(meta, kHighQp, 0, content_type_); + statistics_proxy_->OnRenderedFrame(meta); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + // Extra last frame. + statistics_proxy_->OnDecodedFrame(frame, kHighQp, 0, content_type_); + statistics_proxy_->OnRenderedFrame(MetaData(frame)); + + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + const int kExpectedTimeInHdPercents = 66; + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedTimeInHdPercents, + metrics::MinSample( + "WebRTC.Video.Screenshare.TimeInBlockyVideoPercentage")); + } else { + EXPECT_METRIC_EQ( + kExpectedTimeInHdPercents, + metrics::MinSample("WebRTC.Video.TimeInBlockyVideoPercentage")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, DownscalesReported) { + const int kInterFrameDelayMs = 2000; // To ensure long enough call duration. + + webrtc::VideoFrame frame_hd = CreateFrame(1280, 720); + webrtc::VideoFrame frame_sd = CreateFrame(640, 360); + webrtc::VideoFrame frame_ld = CreateFrame(320, 180); + + // Call once to pass content type. + statistics_proxy_->OnDecodedFrame(frame_hd, absl::nullopt, 0, content_type_); + + loop_.Flush(); + statistics_proxy_->OnRenderedFrame(MetaData(frame_hd)); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + // Downscale. + statistics_proxy_->OnRenderedFrame(MetaData(frame_sd)); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + // Downscale. + statistics_proxy_->OnRenderedFrame(MetaData(frame_ld)); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + + statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + nullptr); + const int kExpectedDownscales = 30; // 2 per 4 seconds = 30 per minute. + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + kExpectedDownscales, + metrics::MinSample("WebRTC.Video.Screenshare." + "NumberResolutionDownswitchesPerMinute")); + } else { + EXPECT_METRIC_EQ(kExpectedDownscales, + metrics::MinSample( + "WebRTC.Video.NumberResolutionDownswitchesPerMinute")); + } +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, DecodeTimeReported) { + const int kInterFrameDelayMs = 20; + const int kLowQp = 30; + const int kDecodeMs = 7; + + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + for (int i = 0; i < kMinRequiredSamples; ++i) { + statistics_proxy_->OnDecodedFrame(frame, kLowQp, kDecodeMs, content_type_); + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs); + } + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.DecodeTimeInMs", kDecodeMs)); +} + +TEST_P(ReceiveStatisticsProxy2TestWithContent, + StatsAreSlicedOnSimulcastAndExperiment) { + const uint8_t experiment_id = 1; + webrtc::VideoContentType content_type = content_type_; + videocontenttypehelpers::SetExperimentId(&content_type, experiment_id); + const int kInterFrameDelayMs1 = 30; + const int kInterFrameDelayMs2 = 50; + webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); + + videocontenttypehelpers::SetSimulcastId(&content_type, 1); + for (int i = 0; i <= kMinRequiredSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs1); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type); + } + + videocontenttypehelpers::SetSimulcastId(&content_type, 2); + for (int i = 0; i <= kMinRequiredSamples; ++i) { + fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs2); + statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type); + } + FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + + if (videocontenttypehelpers::IsScreenshare(content_type)) { + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S0")); + EXPECT_METRIC_EQ(1, metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples( + "WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S1")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayMaxInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs1, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ( + (kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + (kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample( + "WebRTC.Video.Screenshare.InterframeDelayInMs.ExperimentGroup0")); + } else { + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S0")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S1")); + EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ(1, + metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs" + ".ExperimentGroup0")); + EXPECT_METRIC_EQ(kInterFrameDelayMs1, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S0")); + EXPECT_METRIC_EQ(kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S1")); + EXPECT_METRIC_EQ((kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); + EXPECT_METRIC_EQ(kInterFrameDelayMs2, + metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); + EXPECT_METRIC_EQ((kInterFrameDelayMs1 + kInterFrameDelayMs2) / 2, + metrics::MinSample( + "WebRTC.Video.InterframeDelayInMs.ExperimentGroup0")); + } +} + +class DecodeTimeHistogramsKillswitch { + public: + explicit DecodeTimeHistogramsKillswitch(bool disable_histograms) + : field_trial_(disable_histograms + ? "WebRTC-DecodeTimeHistogramsKillSwitch/Enabled/" + : "") {} + + private: + webrtc::test::ScopedFieldTrials field_trial_; +}; + +class ReceiveStatisticsProxy2TestWithDecodeTimeHistograms + : public DecodeTimeHistogramsKillswitch, + public ::testing::WithParamInterface< + std::tuple>, + public ReceiveStatisticsProxy2Test { + public: + ReceiveStatisticsProxy2TestWithDecodeTimeHistograms() + : DecodeTimeHistogramsKillswitch(std::get<0>(GetParam())) {} + + protected: + const std::string kUmaPrefix = "WebRTC.Video.DecodeTimePerFrameInMs."; + const int expected_number_of_samples_ = {std::get<1>(GetParam())}; + const int width_ = {std::get<2>(GetParam())}; + const int height_ = {std::get<3>(GetParam())}; + const VideoCodecType codec_type_ = {std::get<4>(GetParam())}; + const std::string implementation_name_ = {std::get<5>(GetParam())}; + const std::string uma_histogram_name_ = + kUmaPrefix + (codec_type_ == kVideoCodecVP9 ? "Vp9." : "H264.") + + (height_ == 2160 ? "4k." : "Hd.") + + (implementation_name_.compare("ExternalDecoder") == 0 ? "Hw" : "Sw"); +}; + +TEST_P(ReceiveStatisticsProxy2TestWithDecodeTimeHistograms, + DecodeTimeHistogramsUpdated) { + constexpr int kNumberOfFrames = 10; + constexpr int kDecodeTimeMs = 7; + constexpr int kFrameDurationMs = 1000 / 60; + + webrtc::VideoFrame frame = CreateFrame(width_, height_); + + statistics_proxy_->OnDecoderImplementationName(implementation_name_.c_str()); + statistics_proxy_->OnPreDecode(codec_type_, /*qp=*/0); + + for (int i = 0; i < kNumberOfFrames; ++i) { + statistics_proxy_->OnDecodedFrame(frame, /*qp=*/absl::nullopt, + kDecodeTimeMs, + VideoContentType::UNSPECIFIED); + fake_clock_.AdvanceTimeMilliseconds(kFrameDurationMs); + } + + loop_.Flush(); + + EXPECT_METRIC_EQ(expected_number_of_samples_, + metrics::NumSamples(uma_histogram_name_)); + EXPECT_METRIC_EQ(expected_number_of_samples_, + metrics::NumEvents(uma_histogram_name_, kDecodeTimeMs)); +} + +const auto kVp94kHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecVP9, + /*implementation=*/"ExternalDecoder"); +const auto kVp94kSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecVP9, + /*implementation=*/"libvpx"); +const auto kVp9HdHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"ExternalDecoder"); +const auto kVp9HdSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"libvpx"); +const auto kH2644kHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecH264, + /*implementation=*/"ExternalDecoder"); +const auto kH2644kSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/3840, + /*height=*/2160, + kVideoCodecH264, + /*implementation=*/"FFmpeg"); +const auto kH264HdHw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecH264, + /*implementation=*/"ExternalDecoder"); +const auto kH264HdSw = std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecH264, + /*implementation=*/"FFmpeg"); + +INSTANTIATE_TEST_SUITE_P(AllHistogramsPopulated, + ReceiveStatisticsProxy2TestWithDecodeTimeHistograms, + ::testing::Values(kVp94kHw, + kVp94kSw, + kVp9HdHw, + kVp9HdSw, + kH2644kHw, + kH2644kSw, + kH264HdHw, + kH264HdSw)); + +const auto kKillswitchDisabled = + std::make_tuple(/*killswitch=*/false, + /*expected_number_of_samples=*/10, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"libvpx"); +const auto kKillswitchEnabled = + std::make_tuple(/*killswitch=*/true, + /*expected_number_of_samples=*/0, + /*width=*/1920, + /*height=*/1080, + kVideoCodecVP9, + /*implementation=*/"libvpx"); + +INSTANTIATE_TEST_SUITE_P(KillswitchEffective, + ReceiveStatisticsProxy2TestWithDecodeTimeHistograms, + ::testing::Values(kKillswitchDisabled, + kKillswitchEnabled)); + +} // namespace internal +} // namespace webrtc diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc index 0f0d56ceba..626542c810 100644 --- a/video/receive_statistics_proxy_unittest.cc +++ b/video/receive_statistics_proxy_unittest.cc @@ -189,9 +189,9 @@ TEST_F(ReceiveStatisticsProxyTest, ReportsContentType) { TEST_F(ReceiveStatisticsProxyTest, ReportsMaxTotalInterFrameDelay) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - const TimeDelta kInterFrameDelay1 = TimeDelta::ms(100); - const TimeDelta kInterFrameDelay2 = TimeDelta::ms(200); - const TimeDelta kInterFrameDelay3 = TimeDelta::ms(300); + const TimeDelta kInterFrameDelay1 = TimeDelta::Millis(100); + const TimeDelta kInterFrameDelay2 = TimeDelta::Millis(200); + const TimeDelta kInterFrameDelay3 = TimeDelta::Millis(300); double expected_total_inter_frame_delay = 0; double expected_total_squared_inter_frame_delay = 0; EXPECT_EQ(expected_total_inter_frame_delay, diff --git a/video/rtp_streams_synchronizer.cc b/video/rtp_streams_synchronizer.cc index 156ebbb41f..29ace90431 100644 --- a/video/rtp_streams_synchronizer.cc +++ b/video/rtp_streams_synchronizer.cc @@ -13,12 +13,16 @@ #include "absl/types/optional.h" #include "call/syncable.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/rtp_to_ntp_estimator.h" namespace webrtc { namespace { +// Time interval for logging stats. +constexpr int64_t kStatsLogIntervalMs = 10000; + bool UpdateMeasurements(StreamSynchronization::Measurements* stream, const Syncable::Info& info) { RTC_DCHECK(stream); @@ -38,7 +42,8 @@ RtpStreamsSynchronizer::RtpStreamsSynchronizer(Syncable* syncable_video) : syncable_video_(syncable_video), syncable_audio_(nullptr), sync_(), - last_sync_time_(rtc::TimeNanos()) { + last_sync_time_(rtc::TimeNanos()), + last_stats_log_ms_(rtc::TimeMillis()) { RTC_DCHECK(syncable_video); process_thread_checker_.Detach(); } @@ -46,7 +51,7 @@ RtpStreamsSynchronizer::RtpStreamsSynchronizer(Syncable* syncable_video) RtpStreamsSynchronizer::~RtpStreamsSynchronizer() = default; void RtpStreamsSynchronizer::ConfigureSync(Syncable* syncable_audio) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (syncable_audio == syncable_audio_) { // This prevents expensive no-ops. return; @@ -71,17 +76,31 @@ void RtpStreamsSynchronizer::Process() { RTC_DCHECK_RUN_ON(&process_thread_checker_); last_sync_time_ = rtc::TimeNanos(); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (!syncable_audio_) { return; } RTC_DCHECK(sync_.get()); + bool log_stats = false; + const int64_t now_ms = rtc::TimeMillis(); + if (now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) { + last_stats_log_ms_ = now_ms; + log_stats = true; + } + + int64_t last_audio_receive_time_ms = + audio_measurement_.latest_receive_time_ms; absl::optional audio_info = syncable_audio_->GetInfo(); if (!audio_info || !UpdateMeasurements(&audio_measurement_, *audio_info)) { return; } + if (last_audio_receive_time_ms == audio_measurement_.latest_receive_time_ms) { + // No new audio packet has been received since last update. + return; + } + int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms; absl::optional video_info = syncable_video_->GetInfo(); if (!video_info || !UpdateMeasurements(&video_measurement_, *video_info)) { @@ -100,11 +119,21 @@ void RtpStreamsSynchronizer::Process() { return; } + if (log_stats) { + RTC_LOG(LS_INFO) << "Sync info stats: " << now_ms + << ", {ssrc: " << sync_->audio_stream_id() << ", " + << "cur_delay_ms: " << audio_info->current_delay_ms + << "} {ssrc: " << sync_->video_stream_id() << ", " + << "cur_delay_ms: " << video_info->current_delay_ms + << "} {relative_delay_ms: " << relative_delay_ms << "} "; + } + TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", video_info->current_delay_ms); TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", audio_info->current_delay_ms); TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms); + int target_audio_delay_ms = 0; int target_video_delay_ms = video_info->current_delay_ms; // Calculate the necessary extra audio delay and desired total video @@ -114,6 +143,14 @@ void RtpStreamsSynchronizer::Process() { return; } + if (log_stats) { + RTC_LOG(LS_INFO) << "Sync delay stats: " << now_ms + << ", {ssrc: " << sync_->audio_stream_id() << ", " + << "target_delay_ms: " << target_audio_delay_ms + << "} {ssrc: " << sync_->video_stream_id() << ", " + << "target_delay_ms: " << target_video_delay_ms << "} "; + } + syncable_audio_->SetMinimumPlayoutDelay(target_audio_delay_ms); syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms); } @@ -127,7 +164,7 @@ bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs( int64_t* video_playout_ntp_ms, int64_t* stream_offset_ms, double* estimated_freq_khz) const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (!syncable_audio_) { return false; } diff --git a/video/rtp_streams_synchronizer.h b/video/rtp_streams_synchronizer.h index b6e5e61575..732c9a7d77 100644 --- a/video/rtp_streams_synchronizer.h +++ b/video/rtp_streams_synchronizer.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -// RtpStreamsSynchronizer is responsible for synchronization audio and video for -// a given voice engine channel and video receive stream. +// RtpStreamsSynchronizer is responsible for synchronizing audio and video for +// a given audio receive stream and video receive stream. #ifndef VIDEO_RTP_STREAMS_SYNCHRONIZER_H_ #define VIDEO_RTP_STREAMS_SYNCHRONIZER_H_ @@ -17,7 +17,7 @@ #include #include "modules/include/module.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "video/stream_synchronization.h" @@ -25,6 +25,7 @@ namespace webrtc { class Syncable; +// DEPRECATED. class RtpStreamsSynchronizer : public Module { public: explicit RtpStreamsSynchronizer(Syncable* syncable_video); @@ -50,14 +51,15 @@ class RtpStreamsSynchronizer : public Module { private: Syncable* syncable_video_; - rtc::CriticalSection crit_; - Syncable* syncable_audio_ RTC_GUARDED_BY(crit_); - std::unique_ptr sync_ RTC_GUARDED_BY(crit_); - StreamSynchronization::Measurements audio_measurement_ RTC_GUARDED_BY(crit_); - StreamSynchronization::Measurements video_measurement_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + Syncable* syncable_audio_ RTC_GUARDED_BY(mutex_); + std::unique_ptr sync_ RTC_GUARDED_BY(mutex_); + StreamSynchronization::Measurements audio_measurement_ RTC_GUARDED_BY(mutex_); + StreamSynchronization::Measurements video_measurement_ RTC_GUARDED_BY(mutex_); rtc::ThreadChecker process_thread_checker_; int64_t last_sync_time_ RTC_GUARDED_BY(&process_thread_checker_); + int64_t last_stats_log_ms_ RTC_GUARDED_BY(&process_thread_checker_); }; } // namespace webrtc diff --git a/video/rtp_streams_synchronizer2.cc b/video/rtp_streams_synchronizer2.cc new file mode 100644 index 0000000000..4096fceb99 --- /dev/null +++ b/video/rtp_streams_synchronizer2.cc @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rtp_streams_synchronizer2.h" + +#include "absl/types/optional.h" +#include "call/syncable.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/rtp_to_ntp_estimator.h" + +namespace webrtc { +namespace internal { +namespace { +// Time interval for logging stats. +constexpr int64_t kStatsLogIntervalMs = 10000; +constexpr TimeDelta kSyncInterval = TimeDelta::Millis(1000); + +bool UpdateMeasurements(StreamSynchronization::Measurements* stream, + const Syncable::Info& info) { + stream->latest_timestamp = info.latest_received_capture_timestamp; + stream->latest_receive_time_ms = info.latest_receive_time_ms; + bool new_rtcp_sr = false; + return stream->rtp_to_ntp.UpdateMeasurements( + info.capture_time_ntp_secs, info.capture_time_ntp_frac, + info.capture_time_source_clock, &new_rtcp_sr); +} + +} // namespace + +RtpStreamsSynchronizer::RtpStreamsSynchronizer(TaskQueueBase* main_queue, + Syncable* syncable_video) + : task_queue_(main_queue), + syncable_video_(syncable_video), + last_stats_log_ms_(rtc::TimeMillis()) { + RTC_DCHECK(syncable_video); +} + +RtpStreamsSynchronizer::~RtpStreamsSynchronizer() { + RTC_DCHECK_RUN_ON(&main_checker_); + repeating_task_.Stop(); +} + +void RtpStreamsSynchronizer::ConfigureSync(Syncable* syncable_audio) { + RTC_DCHECK_RUN_ON(&main_checker_); + + // Prevent expensive no-ops. + if (syncable_audio == syncable_audio_) + return; + + syncable_audio_ = syncable_audio; + sync_.reset(nullptr); + if (!syncable_audio_) { + repeating_task_.Stop(); + return; + } + + sync_.reset( + new StreamSynchronization(syncable_video_->id(), syncable_audio_->id())); + + if (repeating_task_.Running()) + return; + + repeating_task_ = + RepeatingTaskHandle::DelayedStart(task_queue_, kSyncInterval, [this]() { + UpdateDelay(); + return kSyncInterval; + }); +} + +void RtpStreamsSynchronizer::UpdateDelay() { + RTC_DCHECK_RUN_ON(&main_checker_); + + if (!syncable_audio_) + return; + + RTC_DCHECK(sync_.get()); + + bool log_stats = false; + const int64_t now_ms = rtc::TimeMillis(); + if (now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) { + last_stats_log_ms_ = now_ms; + log_stats = true; + } + + int64_t last_audio_receive_time_ms = + audio_measurement_.latest_receive_time_ms; + absl::optional audio_info = syncable_audio_->GetInfo(); + if (!audio_info || !UpdateMeasurements(&audio_measurement_, *audio_info)) { + return; + } + + if (last_audio_receive_time_ms == audio_measurement_.latest_receive_time_ms) { + // No new audio packet has been received since last update. + return; + } + + int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms; + absl::optional video_info = syncable_video_->GetInfo(); + if (!video_info || !UpdateMeasurements(&video_measurement_, *video_info)) { + return; + } + + if (last_video_receive_ms == video_measurement_.latest_receive_time_ms) { + // No new video packet has been received since last update. + return; + } + + int relative_delay_ms; + // Calculate how much later or earlier the audio stream is compared to video. + if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_, + &relative_delay_ms)) { + return; + } + + if (log_stats) { + RTC_LOG(LS_INFO) << "Sync info stats: " << now_ms + << ", {ssrc: " << sync_->audio_stream_id() << ", " + << "cur_delay_ms: " << audio_info->current_delay_ms + << "} {ssrc: " << sync_->video_stream_id() << ", " + << "cur_delay_ms: " << video_info->current_delay_ms + << "} {relative_delay_ms: " << relative_delay_ms << "} "; + } + + TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", + video_info->current_delay_ms); + TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", + audio_info->current_delay_ms); + TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms); + + int target_audio_delay_ms = 0; + int target_video_delay_ms = video_info->current_delay_ms; + // Calculate the necessary extra audio delay and desired total video + // delay to get the streams in sync. + if (!sync_->ComputeDelays(relative_delay_ms, audio_info->current_delay_ms, + &target_audio_delay_ms, &target_video_delay_ms)) { + return; + } + + if (log_stats) { + RTC_LOG(LS_INFO) << "Sync delay stats: " << now_ms + << ", {ssrc: " << sync_->audio_stream_id() << ", " + << "target_delay_ms: " << target_audio_delay_ms + << "} {ssrc: " << sync_->video_stream_id() << ", " + << "target_delay_ms: " << target_video_delay_ms << "} "; + } + + if (!syncable_audio_->SetMinimumPlayoutDelay(target_audio_delay_ms)) { + sync_->ReduceAudioDelay(); + } + if (!syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms)) { + sync_->ReduceVideoDelay(); + } +} + +// TODO(https://bugs.webrtc.org/7065): Move RtpToNtpEstimator out of +// RtpStreamsSynchronizer and into respective receive stream to always populate +// the estimated playout timestamp. +bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs( + uint32_t rtp_timestamp, + int64_t render_time_ms, + int64_t* video_playout_ntp_ms, + int64_t* stream_offset_ms, + double* estimated_freq_khz) const { + RTC_DCHECK_RUN_ON(&main_checker_); + + if (!syncable_audio_) + return false; + + uint32_t audio_rtp_timestamp; + int64_t time_ms; + if (!syncable_audio_->GetPlayoutRtpTimestamp(&audio_rtp_timestamp, + &time_ms)) { + return false; + } + + int64_t latest_audio_ntp; + if (!audio_measurement_.rtp_to_ntp.Estimate(audio_rtp_timestamp, + &latest_audio_ntp)) { + return false; + } + + syncable_audio_->SetEstimatedPlayoutNtpTimestampMs(latest_audio_ntp, time_ms); + + int64_t latest_video_ntp; + if (!video_measurement_.rtp_to_ntp.Estimate(rtp_timestamp, + &latest_video_ntp)) { + return false; + } + + // Current audio ntp. + int64_t now_ms = rtc::TimeMillis(); + latest_audio_ntp += (now_ms - time_ms); + + // Remove video playout delay. + int64_t time_to_render_ms = render_time_ms - now_ms; + if (time_to_render_ms > 0) + latest_video_ntp -= time_to_render_ms; + + *video_playout_ntp_ms = latest_video_ntp; + *stream_offset_ms = latest_audio_ntp - latest_video_ntp; + *estimated_freq_khz = video_measurement_.rtp_to_ntp.params()->frequency_khz; + return true; +} + +} // namespace internal +} // namespace webrtc diff --git a/video/rtp_streams_synchronizer2.h b/video/rtp_streams_synchronizer2.h new file mode 100644 index 0000000000..6a522e801d --- /dev/null +++ b/video/rtp_streams_synchronizer2.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_RTP_STREAMS_SYNCHRONIZER2_H_ +#define VIDEO_RTP_STREAMS_SYNCHRONIZER2_H_ + +#include + +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "video/stream_synchronization.h" + +namespace webrtc { + +class Syncable; + +namespace internal { + +// RtpStreamsSynchronizer is responsible for synchronizing audio and video for +// a given audio receive stream and video receive stream. +class RtpStreamsSynchronizer { + public: + RtpStreamsSynchronizer(TaskQueueBase* main_queue, Syncable* syncable_video); + ~RtpStreamsSynchronizer(); + + void ConfigureSync(Syncable* syncable_audio); + + // Gets the estimated playout NTP timestamp for the video frame with + // |rtp_timestamp| and the sync offset between the current played out audio + // frame and the video frame. Returns true on success, false otherwise. + // The |estimated_freq_khz| is the frequency used in the RTP to NTP timestamp + // conversion. + bool GetStreamSyncOffsetInMs(uint32_t rtp_timestamp, + int64_t render_time_ms, + int64_t* video_playout_ntp_ms, + int64_t* stream_offset_ms, + double* estimated_freq_khz) const; + + private: + void UpdateDelay(); + + TaskQueueBase* const task_queue_; + + // Used to check if we're running on the main thread/task queue. + // The reason we currently don't use RTC_DCHECK_RUN_ON(task_queue_) is because + // we might be running on an rtc::Thread implementation of TaskQueue, which + // does not consistently set itself as the active TaskQueue. + // Instead, we rely on a SequenceChecker for now. + SequenceChecker main_checker_; + + Syncable* const syncable_video_; + + Syncable* syncable_audio_ RTC_GUARDED_BY(main_checker_) = nullptr; + std::unique_ptr sync_ RTC_GUARDED_BY(main_checker_); + StreamSynchronization::Measurements audio_measurement_ + RTC_GUARDED_BY(main_checker_); + StreamSynchronization::Measurements video_measurement_ + RTC_GUARDED_BY(main_checker_); + RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(main_checker_); + int64_t last_stats_log_ms_ RTC_GUARDED_BY(&main_checker_); +}; + +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_RTP_STREAMS_SYNCHRONIZER2_H_ diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc index 9f5fe0248e..4c02413395 100644 --- a/video/rtp_video_stream_receiver.cc +++ b/video/rtp_video_stream_receiver.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include @@ -24,21 +25,23 @@ #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" #include "modules/utility/include/process_thread.h" +#include "modules/video_coding/deprecated/nack_module.h" #include "modules/video_coding/frame_object.h" #include "modules/video_coding/h264_sprop_parameter_sets.h" #include "modules/video_coding/h264_sps_pps_tracker.h" -#include "modules/video_coding/nack_module.h" #include "modules/video_coding/packet_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" @@ -46,6 +49,7 @@ #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" +#include "system_wrappers/include/ntp_time.h" #include "video/receive_statistics_proxy.h" namespace webrtc { @@ -73,16 +77,15 @@ int PacketBufferMaxSize() { return packet_buffer_max_size; } -} // namespace - std::unique_ptr CreateRtpRtcpModule( Clock* clock, ReceiveStatistics* receive_statistics, Transport* outgoing_transport, RtcpRttStats* rtt_stats, RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, uint32_t local_ssrc) { - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.clock = clock; configuration.audio = false; configuration.receiver_only = true; @@ -91,9 +94,10 @@ std::unique_ptr CreateRtpRtcpModule( configuration.rtt_stats = rtt_stats; configuration.rtcp_packet_type_counter_observer = rtcp_packet_type_counter_observer; + configuration.rtcp_cname_callback = rtcp_cname_callback; configuration.local_media_ssrc = local_ssrc; - std::unique_ptr rtp_rtcp = RtpRtcp::Create(configuration); + std::unique_ptr rtp_rtcp = RtpRtcp::DEPRECATED_Create(configuration); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); return rtp_rtcp; @@ -101,6 +105,8 @@ std::unique_ptr CreateRtpRtcpModule( static const int kPacketLogIntervalMs = 10000; +} // namespace + RtpVideoStreamReceiver::RtcpFeedbackBuffer::RtcpFeedbackBuffer( KeyFrameRequestSender* key_frame_request_sender, NackSender* nack_sender, @@ -115,7 +121,7 @@ RtpVideoStreamReceiver::RtcpFeedbackBuffer::RtcpFeedbackBuffer( } void RtpVideoStreamReceiver::RtcpFeedbackBuffer::RequestKeyFrame() { - rtc::CritScope lock(&cs_); + MutexLock lock(&mutex_); request_key_frame_ = true; } @@ -123,14 +129,14 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendNack( const std::vector& sequence_numbers, bool buffering_allowed) { RTC_DCHECK(!sequence_numbers.empty()); - rtc::CritScope lock(&cs_); + MutexLock lock(&mutex_); nack_sequence_numbers_.insert(nack_sequence_numbers_.end(), sequence_numbers.cbegin(), sequence_numbers.cend()); if (!buffering_allowed) { // Note that while *buffering* is not allowed, *batching* is, meaning that // previously buffered messages may be sent along with the current message. - SendBufferedRtcpFeedback(); + SendRtcpFeedback(ConsumeRtcpFeedbackLocked()); } } @@ -140,7 +146,7 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendLossNotification( bool decodability_flag, bool buffering_allowed) { RTC_DCHECK(buffering_allowed); - rtc::CritScope lock(&cs_); + MutexLock lock(&mutex_); RTC_DCHECK(!lntf_state_) << "SendLossNotification() called twice in a row with no call to " "SendBufferedRtcpFeedback() in between."; @@ -149,37 +155,48 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendLossNotification( } void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() { - bool request_key_frame = false; - std::vector nack_sequence_numbers; - absl::optional lntf_state; + SendRtcpFeedback(ConsumeRtcpFeedback()); +} - { - rtc::CritScope lock(&cs_); - std::swap(request_key_frame, request_key_frame_); - std::swap(nack_sequence_numbers, nack_sequence_numbers_); - std::swap(lntf_state, lntf_state_); - } +RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback +RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedback() { + MutexLock lock(&mutex_); + return ConsumeRtcpFeedbackLocked(); +} - if (lntf_state) { +RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback +RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedbackLocked() { + ConsumedRtcpFeedback feedback; + std::swap(feedback.request_key_frame, request_key_frame_); + std::swap(feedback.nack_sequence_numbers, nack_sequence_numbers_); + std::swap(feedback.lntf_state, lntf_state_); + return feedback; +} + +void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendRtcpFeedback( + ConsumedRtcpFeedback feedback) { + if (feedback.lntf_state) { // If either a NACK or a key frame request is sent, we should buffer // the LNTF and wait for them (NACK or key frame request) to trigger // the compound feedback message. // Otherwise, the LNTF should be sent out immediately. const bool buffering_allowed = - request_key_frame || !nack_sequence_numbers.empty(); + feedback.request_key_frame || !feedback.nack_sequence_numbers.empty(); loss_notification_sender_->SendLossNotification( - lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num, - lntf_state->decodability_flag, buffering_allowed); + feedback.lntf_state->last_decoded_seq_num, + feedback.lntf_state->last_received_seq_num, + feedback.lntf_state->decodability_flag, buffering_allowed); } - if (request_key_frame) { + if (feedback.request_key_frame) { key_frame_request_sender_->RequestKeyFrame(); - } else if (!nack_sequence_numbers.empty()) { - nack_sender_->SendNack(nack_sequence_numbers, true); + } else if (!feedback.nack_sequence_numbers.empty()) { + nack_sender_->SendNack(feedback.nack_sequence_numbers, true); } } +// DEPRECATED RtpVideoStreamReceiver::RtpVideoStreamReceiver( Clock* clock, Transport* transport, @@ -192,13 +209,46 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( NackSender* nack_sender, KeyFrameRequestSender* keyframe_request_sender, video_coding::OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor) + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer) + : RtpVideoStreamReceiver(clock, + transport, + rtt_stats, + packet_router, + config, + rtp_receive_statistics, + receive_stats_proxy, + receive_stats_proxy, + process_thread, + nack_sender, + keyframe_request_sender, + complete_frame_callback, + frame_decryptor, + frame_transformer) {} + +RtpVideoStreamReceiver::RtpVideoStreamReceiver( + Clock* clock, + Transport* transport, + RtcpRttStats* rtt_stats, + PacketRouter* packet_router, + const VideoReceiveStream::Config* config, + ReceiveStatistics* rtp_receive_statistics, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + ProcessThread* process_thread, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender, + video_coding::OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer) : clock_(clock), config_(*config), packet_router_(packet_router), process_thread_(process_thread), ntp_estimator_(clock), rtp_header_extensions_(config_.rtp.extensions), + forced_playout_delay_max_ms_("max_ms", absl::nullopt), + forced_playout_delay_min_ms_("min_ms", absl::nullopt), rtp_receive_statistics_(rtp_receive_statistics), ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc, this, @@ -209,7 +259,8 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( rtp_receive_statistics_, transport, rtt_stats, - receive_stats_proxy, + rtcp_packet_type_counter_observer, + rtcp_cname_callback, config_.rtp.local_ssrc)), complete_frame_callback_(complete_frame_callback), keyframe_request_sender_(keyframe_request_sender), @@ -251,8 +302,9 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( if (config_.rtp.rtcp_xr.receiver_reference_time_report) rtp_rtcp_->SetRtcpXrRrtrStatus(true); - // Stats callback for CNAME changes. - rtp_rtcp_->RegisterRtcpCnameCallback(receive_stats_proxy); + ParseFieldTrial( + {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, + field_trial::FindFullName("WebRTC-ForcePlayoutDelay")); process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); @@ -263,8 +315,8 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( } if (config_.rtp.nack.rtp_history_ms != 0) { - nack_module_ = std::make_unique(clock_, &rtcp_feedback_buffer_, - &rtcp_feedback_buffer_); + nack_module_ = std::make_unique( + clock_, &rtcp_feedback_buffer_, &rtcp_feedback_buffer_); process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE); } @@ -279,6 +331,14 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); } } + + if (frame_transformer) { + frame_transformer_delegate_ = new rtc::RefCountedObject< + RtpVideoStreamReceiverFrameTransformerDelegate>( + this, std::move(frame_transformer), rtc::Thread::Current(), + config_.rtp.remote_ssrc); + frame_transformer_delegate_->Init(); + } } RtpVideoStreamReceiver::~RtpVideoStreamReceiver() { @@ -293,17 +353,24 @@ RtpVideoStreamReceiver::~RtpVideoStreamReceiver() { if (packet_router_) packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); UpdateHistograms(); + if (frame_transformer_delegate_) + frame_transformer_delegate_->Reset(); } void RtpVideoStreamReceiver::AddReceiveCodec( + uint8_t payload_type, const VideoCodec& video_codec, const std::map& codec_params, bool raw_payload) { + if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) || + field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); + } payload_type_map_.emplace( - video_codec.plType, - raw_payload ? std::make_unique() - : CreateVideoRtpDepacketizer(video_codec.codecType)); - pt_codec_params_.emplace(video_codec.plType, codec_params); + payload_type, raw_payload + ? std::make_unique() + : CreateVideoRtpDepacketizer(video_codec.codecType)); + pt_codec_params_.emplace(payload_type, codec_params); } absl::optional RtpVideoStreamReceiver::GetSyncInfo() const { @@ -314,7 +381,7 @@ absl::optional RtpVideoStreamReceiver::GetSyncInfo() const { return absl::nullopt; } { - rtc::CritScope lock(&sync_info_lock_); + MutexLock lock(&sync_info_lock_); if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { return absl::nullopt; } @@ -326,6 +393,111 @@ absl::optional RtpVideoStreamReceiver::GetSyncInfo() const { return info; } +RtpVideoStreamReceiver::ParseGenericDependenciesResult +RtpVideoStreamReceiver::ParseGenericDependenciesExtension( + const RtpPacketReceived& rtp_packet, + RTPVideoHeader* video_header) { + if (rtp_packet.HasExtension()) { + webrtc::DependencyDescriptor dependency_descriptor; + if (!rtp_packet.GetExtension( + video_structure_.get(), &dependency_descriptor)) { + // Descriptor is there, but failed to parse. Either it is invalid, + // or too old packet (after relevant video_structure_ changed), + // or too new packet (before relevant video_structure_ arrived). + // Drop such packet to be on the safe side. + // TODO(bugs.webrtc.org/10342): Stash too new packet. + RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() + << " Failed to parse dependency descriptor."; + return kDropPacket; + } + if (dependency_descriptor.attached_structure != nullptr && + !dependency_descriptor.first_packet_in_frame) { + RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() + << "Invalid dependency descriptor: structure " + "attached to non first packet of a frame."; + return kDropPacket; + } + video_header->is_first_packet_in_frame = + dependency_descriptor.first_packet_in_frame; + video_header->is_last_packet_in_frame = + dependency_descriptor.last_packet_in_frame; + + int64_t frame_id = + frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number); + auto& generic_descriptor_info = video_header->generic.emplace(); + generic_descriptor_info.frame_id = frame_id; + generic_descriptor_info.spatial_index = + dependency_descriptor.frame_dependencies.spatial_id; + generic_descriptor_info.temporal_index = + dependency_descriptor.frame_dependencies.temporal_id; + for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) { + generic_descriptor_info.dependencies.push_back(frame_id - fdiff); + } + generic_descriptor_info.decode_target_indications = + dependency_descriptor.frame_dependencies.decode_target_indications; + if (dependency_descriptor.resolution) { + video_header->width = dependency_descriptor.resolution->Width(); + video_header->height = dependency_descriptor.resolution->Height(); + } + + // FrameDependencyStructure is sent in dependency descriptor of the first + // packet of a key frame and required for parsed dependency descriptor in + // all the following packets until next key frame. + // Save it if there is a (potentially) new structure. + if (dependency_descriptor.attached_structure) { + RTC_DCHECK(dependency_descriptor.first_packet_in_frame); + if (video_structure_frame_id_ > frame_id) { + RTC_LOG(LS_WARNING) + << "Arrived key frame with id " << frame_id << " and structure id " + << dependency_descriptor.attached_structure->structure_id + << " is older than the latest received key frame with id " + << *video_structure_frame_id_ << " and structure id " + << video_structure_->structure_id; + return kDropPacket; + } + video_structure_ = std::move(dependency_descriptor.attached_structure); + video_structure_frame_id_ = frame_id; + video_header->frame_type = VideoFrameType::kVideoFrameKey; + } else { + video_header->frame_type = VideoFrameType::kVideoFrameDelta; + } + return kHasGenericDescriptor; + } + + RtpGenericFrameDescriptor generic_frame_descriptor; + if (!rtp_packet.GetExtension( + &generic_frame_descriptor)) { + return kNoGenericDescriptor; + } + + video_header->is_first_packet_in_frame = + generic_frame_descriptor.FirstPacketInSubFrame(); + video_header->is_last_packet_in_frame = + generic_frame_descriptor.LastPacketInSubFrame(); + + if (generic_frame_descriptor.FirstPacketInSubFrame()) { + video_header->frame_type = + generic_frame_descriptor.FrameDependenciesDiffs().empty() + ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta; + + auto& generic_descriptor_info = video_header->generic.emplace(); + int64_t frame_id = + frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId()); + generic_descriptor_info.frame_id = frame_id; + generic_descriptor_info.spatial_index = + generic_frame_descriptor.SpatialLayer(); + generic_descriptor_info.temporal_index = + generic_frame_descriptor.TemporalLayer(); + for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) { + generic_descriptor_info.dependencies.push_back(frame_id - fdiff); + } + } + video_header->width = generic_frame_descriptor.Width(); + video_header->height = generic_frame_descriptor.Height(); + return kHasGenericDescriptor; +} + void RtpVideoStreamReceiver::OnReceivedPayloadData( rtc::CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, @@ -336,8 +508,6 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( clock_->TimeInMilliseconds()); // Try to extrapolate absolute capture time if it is missing. - // TODO(bugs.webrtc.org/10739): Add support for estimated capture clock - // offset. packet->packet_info.set_absolute_capture_time( absolute_capture_time_receiver_.OnReceivePacket( AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(), @@ -352,7 +522,6 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( video_header.content_type = VideoContentType::UNSPECIFIED; video_header.video_timing.flags = VideoSendTiming::kInvalid; video_header.is_last_packet_in_frame |= rtp_packet.Marker(); - video_header.frame_marking.temporal_id = kNoTemporalIdx; if (const auto* vp9_header = absl::get_if(&video_header.video_type_header)) { @@ -364,56 +533,18 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( rtp_packet.GetExtension( &video_header.content_type); rtp_packet.GetExtension(&video_header.video_timing); - rtp_packet.GetExtension(&video_header.playout_delay); - rtp_packet.GetExtension(&video_header.frame_marking); - - RtpGenericFrameDescriptor& generic_descriptor = - packet->generic_descriptor.emplace(); - if (rtp_packet.GetExtension( - &generic_descriptor)) { - if (rtp_packet.HasExtension()) { - RTC_LOG(LS_WARNING) << "RTP packet had two different GFD versions."; - return; - } - generic_descriptor.SetByteRepresentation( - rtp_packet.GetRawExtension()); - } else if ((rtp_packet.GetExtension( - &generic_descriptor))) { - generic_descriptor.SetByteRepresentation( - rtp_packet.GetRawExtension()); + if (forced_playout_delay_max_ms_ && forced_playout_delay_min_ms_) { + video_header.playout_delay.max_ms = *forced_playout_delay_max_ms_; + video_header.playout_delay.min_ms = *forced_playout_delay_min_ms_; } else { - packet->generic_descriptor = absl::nullopt; - } - if (packet->generic_descriptor != absl::nullopt) { - video_header.is_first_packet_in_frame = - packet->generic_descriptor->FirstPacketInSubFrame(); - video_header.is_last_packet_in_frame = - packet->generic_descriptor->LastPacketInSubFrame(); - - if (packet->generic_descriptor->FirstPacketInSubFrame()) { - video_header.frame_type = - packet->generic_descriptor->FrameDependenciesDiffs().empty() - ? VideoFrameType::kVideoFrameKey - : VideoFrameType::kVideoFrameDelta; - - auto& descriptor = video_header.generic.emplace(); - int64_t frame_id = - frame_id_unwrapper_.Unwrap(packet->generic_descriptor->FrameId()); - descriptor.frame_id = frame_id; - descriptor.spatial_index = packet->generic_descriptor->SpatialLayer(); - descriptor.temporal_index = packet->generic_descriptor->TemporalLayer(); - descriptor.discardable = - packet->generic_descriptor->Discardable().value_or(false); - for (uint16_t fdiff : - packet->generic_descriptor->FrameDependenciesDiffs()) { - descriptor.dependencies.push_back(frame_id - fdiff); - } - } - - video_header.width = packet->generic_descriptor->Width(); - video_header.height = packet->generic_descriptor->Height(); + rtp_packet.GetExtension(&video_header.playout_delay); } + ParseGenericDependenciesResult generic_descriptor_state = + ParseGenericDependenciesExtension(rtp_packet, &video_header); + if (generic_descriptor_state == kDropPacket) + return; + // Color space should only be transmitted in the last packet of a frame, // therefore, neglect it otherwise so that last_color_space_ is not reset by // mistake. @@ -435,7 +566,7 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( // TODO(bugs.webrtc.org/10336): Implement support for reordering. RTC_LOG(LS_INFO) << "LossNotificationController does not support reordering."; - } else if (!packet->generic_descriptor) { + } else if (generic_descriptor_state == kNoGenericDescriptor) { RTC_LOG(LS_WARNING) << "LossNotificationController requires generic " "frame descriptor, but it is missing."; } else { @@ -497,7 +628,33 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( packet->video_payload = std::move(fixed.bitstream); break; } +#ifndef DISABLE_H265 + } else if (packet->codec() == kVideoCodecH265) { + // Only when we start to receive packets will we know what payload type + // that will be used. When we know the payload type insert the correct + // sps/pps into the tracker. + if (packet->payload_type != last_payload_type_) { + last_payload_type_ = packet->payload_type; + InsertSpsPpsIntoTracker(packet->payload_type); + } + + video_coding::H265VpsSpsPpsTracker::FixedBitstream fixed = + h265_tracker_.CopyAndFixBitstream( + rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), + &packet->video_header); + switch (fixed.action) { + case video_coding::H265VpsSpsPpsTracker::kRequestKeyframe: + rtcp_feedback_buffer_.RequestKeyFrame(); + rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); + ABSL_FALLTHROUGH_INTENDED; + case video_coding::H265VpsSpsPpsTracker::kDrop: + return; + case video_coding::H265VpsSpsPpsTracker::kInsert: + packet->video_payload = std::move(fixed.bitstream); + break; + } +#endif } else { packet->video_payload = std::move(codec_payload); } @@ -541,7 +698,7 @@ void RtpVideoStreamReceiver::OnRtpPacket(const RtpPacketReceived& packet) { // TODO(nisse): Exclude out-of-order packets? int64_t now_ms = clock_->TimeInMilliseconds(); { - rtc::CritScope cs(&sync_info_lock_); + MutexLock lock(&sync_info_lock_); last_received_rtp_timestamp_ = packet.Timestamp(); last_received_rtp_system_time_ms_ = now_ms; } @@ -620,9 +777,69 @@ bool RtpVideoStreamReceiver::IsDecryptable() const { void RtpVideoStreamReceiver::OnInsertedPacket( video_coding::PacketBuffer::InsertResult result) { - for (std::unique_ptr& frame : result.frames) { - OnAssembledFrame(std::move(frame)); + video_coding::PacketBuffer::Packet* first_packet = nullptr; + int max_nack_count; + int64_t min_recv_time; + int64_t max_recv_time; + std::vector> payloads; + RtpPacketInfos::vector_type packet_infos; + + bool frame_boundary = true; + for (auto& packet : result.packets) { + // PacketBuffer promisses frame boundaries are correctly set on each + // packet. Document that assumption with the DCHECKs. + RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); + if (packet->is_first_packet_in_frame()) { + first_packet = packet.get(); + max_nack_count = packet->times_nacked; + min_recv_time = packet->packet_info.receive_time_ms(); + max_recv_time = packet->packet_info.receive_time_ms(); + payloads.clear(); + packet_infos.clear(); + } else { + max_nack_count = std::max(max_nack_count, packet->times_nacked); + min_recv_time = + std::min(min_recv_time, packet->packet_info.receive_time_ms()); + max_recv_time = + std::max(max_recv_time, packet->packet_info.receive_time_ms()); + } + payloads.emplace_back(packet->video_payload); + packet_infos.push_back(packet->packet_info); + + frame_boundary = packet->is_last_packet_in_frame(); + if (packet->is_last_packet_in_frame()) { + auto depacketizer_it = payload_type_map_.find(first_packet->payload_type); + RTC_CHECK(depacketizer_it != payload_type_map_.end()); + + rtc::scoped_refptr bitstream = + depacketizer_it->second->AssembleFrame(payloads); + if (!bitstream) { + // Failed to assemble a frame. Discard and continue. + continue; + } + + const video_coding::PacketBuffer::Packet& last_packet = *packet; + OnAssembledFrame(std::make_unique( + first_packet->seq_num, // + last_packet.seq_num, // + last_packet.marker_bit, // + max_nack_count, // + min_recv_time, // + max_recv_time, // + first_packet->timestamp, // + first_packet->ntp_time_ms, // + last_packet.video_header.video_timing, // + first_packet->payload_type, // + first_packet->codec(), // + last_packet.video_header.rotation, // + last_packet.video_header.content_type, // + first_packet->video_header, // + last_packet.video_header.color_space, // + RtpPacketInfos(std::move(packet_infos)), // + std::move(bitstream))); + } } + RTC_DCHECK(frame_boundary); if (result.buffer_cleared) { RequestKeyFrame(); } @@ -638,7 +855,9 @@ void RtpVideoStreamReceiver::OnAssembledFrame( if (loss_notification_controller_ && descriptor) { loss_notification_controller_->OnAssembledFrame( - frame->first_seq_num(), descriptor->frame_id, descriptor->discardable, + frame->first_seq_num(), descriptor->frame_id, + absl::c_linear_search(descriptor->decode_target_indications, + DecodeTargetIndication::kDiscardable), descriptor->dependencies); } @@ -656,7 +875,7 @@ void RtpVideoStreamReceiver::OnAssembledFrame( has_received_frame_ = true; } - rtc::CritScope lock(&reference_finder_lock_); + MutexLock lock(&reference_finder_lock_); // Reset |reference_finder_| if |frame| is new and the codec have changed. if (current_codec_) { bool frame_is_newer = @@ -687,17 +906,19 @@ void RtpVideoStreamReceiver::OnAssembledFrame( last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); } - if (buffered_frame_decryptor_ == nullptr) { - reference_finder_->ManageFrame(std::move(frame)); - } else { + if (buffered_frame_decryptor_ != nullptr) { buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame)); + } else if (frame_transformer_delegate_) { + frame_transformer_delegate_->TransformFrame(std::move(frame)); + } else { + reference_finder_->ManageFrame(std::move(frame)); } } void RtpVideoStreamReceiver::OnCompleteFrame( std::unique_ptr frame) { { - rtc::CritScope lock(&last_seq_num_cs_); + MutexLock lock(&last_seq_num_mutex_); video_coding::RtpFrameObject* rtp_frame = static_cast(frame.get()); last_seq_num_for_pic_id_[rtp_frame->id.picture_id] = @@ -710,7 +931,7 @@ void RtpVideoStreamReceiver::OnCompleteFrame( void RtpVideoStreamReceiver::OnDecryptedFrame( std::unique_ptr frame) { - rtc::CritScope lock(&reference_finder_lock_); + MutexLock lock(&reference_finder_lock_); reference_finder_->ManageFrame(std::move(frame)); } @@ -731,6 +952,16 @@ void RtpVideoStreamReceiver::SetFrameDecryptor( buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); } +void RtpVideoStreamReceiver::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + RTC_DCHECK_RUN_ON(&network_tc_); + frame_transformer_delegate_ = + new rtc::RefCountedObject( + this, std::move(frame_transformer), rtc::Thread::Current(), + config_.rtp.remote_ssrc); + frame_transformer_delegate_->Init(); +} + void RtpVideoStreamReceiver::UpdateRtt(int64_t max_rtt_ms) { if (nack_module_) nack_module_->UpdateRtt(max_rtt_ms); @@ -765,6 +996,12 @@ void RtpVideoStreamReceiver::RemoveSecondarySink( secondary_sinks_.erase(it); } +void RtpVideoStreamReceiver::ManageFrame( + std::unique_ptr frame) { + MutexLock lock(&reference_finder_lock_); + reference_finder_->ManageFrame(std::move(frame)); +} + void RtpVideoStreamReceiver::ReceivePacket(const RtpPacketReceived& packet) { if (packet.payload_size() == 0) { // Padding or keep-alive packet. @@ -816,7 +1053,7 @@ void RtpVideoStreamReceiver::ParseAndHandleEncapsulatingHeader( // correctly calculate frame references. void RtpVideoStreamReceiver::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { { - rtc::CritScope lock(&reference_finder_lock_); + MutexLock lock(&reference_finder_lock_); reference_finder_->PaddingReceived(seq_num); } OnInsertedPacket(packet_buffer_.InsertPadding(seq_num)); @@ -863,6 +1100,12 @@ bool RtpVideoStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet, // Don't use old SRs to estimate time. if (time_since_recieved <= 1) { ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); + absl::optional remote_to_local_clock_offset_ms = + ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); + if (remote_to_local_clock_offset_ms.has_value()) { + absolute_capture_time_receiver_.SetRemoteToLocalClockOffset( + Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); + } } return true; @@ -874,7 +1117,7 @@ void RtpVideoStreamReceiver::FrameContinuous(int64_t picture_id) { int seq_num = -1; { - rtc::CritScope lock(&last_seq_num_cs_); + MutexLock lock(&last_seq_num_mutex_); auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); if (seq_num_it != last_seq_num_for_pic_id_.end()) seq_num = seq_num_it->second; @@ -886,7 +1129,7 @@ void RtpVideoStreamReceiver::FrameContinuous(int64_t picture_id) { void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) { int seq_num = -1; { - rtc::CritScope lock(&last_seq_num_cs_); + MutexLock lock(&last_seq_num_mutex_); auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); if (seq_num_it != last_seq_num_for_pic_id_.end()) { seq_num = seq_num_it->second; @@ -896,7 +1139,7 @@ void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) { } if (seq_num != -1) { packet_buffer_.ClearTo(seq_num); - rtc::CritScope lock(&reference_finder_lock_); + MutexLock lock(&reference_finder_lock_); reference_finder_->ClearTo(seq_num); } } diff --git a/video/rtp_video_stream_receiver.h b/video/rtp_video_stream_receiver.h index 8b6ffbd830..2746295fcc 100644 --- a/video/rtp_video_stream_receiver.h +++ b/video/rtp_video_stream_receiver.h @@ -32,25 +32,31 @@ #include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/video_coding/h264_sps_pps_tracker.h" #include "modules/video_coding/loss_notification_controller.h" +#ifndef DISABLE_H265 +#include "modules/video_coding/h265_vps_sps_pps_tracker.h" +#endif #include "modules/video_coding/packet_buffer.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "modules/video_coding/unique_timestamp_counter.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "video/buffered_frame_decryptor.h" +#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" namespace webrtc { -class NackModule; +class DEPRECATED_NackModule; class PacketRouter; class ProcessThread; class ReceiveStatistics; @@ -66,8 +72,10 @@ class RtpVideoStreamReceiver : public LossNotificationSender, public KeyFrameRequestSender, public video_coding::OnCompleteFrameCallback, public OnDecryptedFrameCallback, - public OnDecryptionStatusChangeCallback { + public OnDecryptionStatusChangeCallback, + public RtpVideoFrameReceiver { public: + // DEPRECATED due to dependency on ReceiveStatisticsProxy. RtpVideoStreamReceiver( Clock* clock, Transport* transport, @@ -85,10 +93,33 @@ class RtpVideoStreamReceiver : public LossNotificationSender, // requests are sent via the internal RtpRtcp module. KeyFrameRequestSender* keyframe_request_sender, video_coding::OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor); + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer); + + RtpVideoStreamReceiver( + Clock* clock, + Transport* transport, + RtcpRttStats* rtt_stats, + // The packet router is optional; if provided, the RtpRtcp module for this + // stream is registered as a candidate for sending REMB and transport + // feedback. + PacketRouter* packet_router, + const VideoReceiveStream::Config* config, + ReceiveStatistics* rtp_receive_statistics, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + ProcessThread* process_thread, + NackSender* nack_sender, + // The KeyFrameRequestSender is optional; if not provided, key frame + // requests are sent via the internal RtpRtcp module. + KeyFrameRequestSender* keyframe_request_sender, + video_coding::OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer); ~RtpVideoStreamReceiver() override; - void AddReceiveCodec(const VideoCodec& video_codec, + void AddReceiveCodec(uint8_t payload_type, + const VideoCodec& video_codec, const std::map& codec_params, bool raw_payload); @@ -115,8 +146,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, // Implements RtpPacketSinkInterface. void OnRtpPacket(const RtpPacketReceived& packet) override; - // TODO(philipel): Stop using VCMPacket in the new jitter buffer and then - // remove this function. Public only for tests. + // Public only for tests. void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, const RTPVideoHeader& video); @@ -161,6 +191,11 @@ class RtpVideoStreamReceiver : public LossNotificationSender, void SetFrameDecryptor( rtc::scoped_refptr frame_decryptor); + // Sets a frame transformer after a stream has started, if no transformer + // has previously been set. Does not reset the decoder state. + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer); + // Called by VideoReceiveStream when stats are updated. void UpdateRtt(int64_t max_rtt_ms); @@ -175,6 +210,10 @@ class RtpVideoStreamReceiver : public LossNotificationSender, void RemoveSecondarySink(const RtpPacketSinkInterface* sink); private: + // Implements RtpVideoFrameReceiver. + void ManageFrame( + std::unique_ptr frame) override; + // Used for buffering RTCP feedback messages and sending them all together. // Note: // 1. Key frame requests and NACKs are mutually exclusive, with the @@ -192,35 +231,23 @@ class RtpVideoStreamReceiver : public LossNotificationSender, ~RtcpFeedbackBuffer() override = default; // KeyFrameRequestSender implementation. - void RequestKeyFrame() override; + void RequestKeyFrame() RTC_LOCKS_EXCLUDED(mutex_) override; // NackSender implementation. void SendNack(const std::vector& sequence_numbers, - bool buffering_allowed) override; + bool buffering_allowed) RTC_LOCKS_EXCLUDED(mutex_) override; // LossNotificationSender implementation. void SendLossNotification(uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, bool decodability_flag, - bool buffering_allowed) override; + bool buffering_allowed) + RTC_LOCKS_EXCLUDED(mutex_) override; // Send all RTCP feedback messages buffered thus far. - void SendBufferedRtcpFeedback(); + void SendBufferedRtcpFeedback() RTC_LOCKS_EXCLUDED(mutex_); private: - KeyFrameRequestSender* const key_frame_request_sender_; - NackSender* const nack_sender_; - LossNotificationSender* const loss_notification_sender_; - - // NACKs are accessible from two threads due to nack_module_ being a module. - rtc::CriticalSection cs_; - - // Key-frame-request-related state. - bool request_key_frame_ RTC_GUARDED_BY(cs_); - - // NACK-related state. - std::vector nack_sequence_numbers_ RTC_GUARDED_BY(cs_); - // LNTF-related state. struct LossNotificationState { LossNotificationState(uint16_t last_decoded_seq_num, @@ -234,7 +261,37 @@ class RtpVideoStreamReceiver : public LossNotificationSender, uint16_t last_received_seq_num; bool decodability_flag; }; - absl::optional lntf_state_ RTC_GUARDED_BY(cs_); + struct ConsumedRtcpFeedback { + bool request_key_frame = false; + std::vector nack_sequence_numbers; + absl::optional lntf_state; + }; + + ConsumedRtcpFeedback ConsumeRtcpFeedback() RTC_LOCKS_EXCLUDED(mutex_); + ConsumedRtcpFeedback ConsumeRtcpFeedbackLocked() + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // This method is called both with and without mutex_ held. + void SendRtcpFeedback(ConsumedRtcpFeedback feedback); + + KeyFrameRequestSender* const key_frame_request_sender_; + NackSender* const nack_sender_; + LossNotificationSender* const loss_notification_sender_; + + // NACKs are accessible from two threads due to nack_module_ being a module. + Mutex mutex_; + + // Key-frame-request-related state. + bool request_key_frame_ RTC_GUARDED_BY(mutex_); + + // NACK-related state. + std::vector nack_sequence_numbers_ RTC_GUARDED_BY(mutex_); + + absl::optional lntf_state_ RTC_GUARDED_BY(mutex_); + }; + enum ParseGenericDependenciesResult { + kDropPacket, + kHasGenericDescriptor, + kNoGenericDescriptor }; // Entry point doing non-stats work for a received packet. Called @@ -248,6 +305,9 @@ class RtpVideoStreamReceiver : public LossNotificationSender, bool IsRedEnabled() const; void InsertSpsPpsIntoTracker(uint8_t payload_type); void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result); + ParseGenericDependenciesResult ParseGenericDependenciesExtension( + const RtpPacketReceived& rtp_packet, + RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_); void OnAssembledFrame(std::unique_ptr frame); Clock* const clock_; @@ -259,6 +319,10 @@ class RtpVideoStreamReceiver : public LossNotificationSender, RemoteNtpTimeEstimator ntp_estimator_; RtpHeaderExtensionMap rtp_header_extensions_; + // Set by the field trial WebRTC-ForcePlayoutDelay to override any playout + // delay that is specified in the received packets. + FieldTrialOptional forced_playout_delay_max_ms_; + FieldTrialOptional forced_playout_delay_min_ms_; ReceiveStatistics* const rtp_receive_statistics_; std::unique_ptr ulpfec_receiver_; @@ -272,7 +336,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, KeyFrameRequestSender* const keyframe_request_sender_; RtcpFeedbackBuffer rtcp_feedback_buffer_; - std::unique_ptr nack_module_; + std::unique_ptr nack_module_; std::unique_ptr loss_notification_controller_; video_coding::PacketBuffer packet_buffer_; @@ -280,20 +344,35 @@ class RtpVideoStreamReceiver : public LossNotificationSender, SeqNumUnwrapper frame_id_unwrapper_ RTC_GUARDED_BY(worker_task_checker_); - rtc::CriticalSection reference_finder_lock_; + // Video structure provided in the dependency descriptor in a first packet + // of a key frame. It is required to parse dependency descriptor in the + // following delta packets. + std::unique_ptr video_structure_ + RTC_GUARDED_BY(worker_task_checker_); + // Frame id of the last frame with the attached video structure. + // absl::nullopt when `video_structure_ == nullptr`; + absl::optional video_structure_frame_id_ + RTC_GUARDED_BY(worker_task_checker_); + + Mutex reference_finder_lock_; std::unique_ptr reference_finder_ RTC_GUARDED_BY(reference_finder_lock_); absl::optional current_codec_; uint32_t last_assembled_frame_rtp_timestamp_; - rtc::CriticalSection last_seq_num_cs_; + Mutex last_seq_num_mutex_; std::map last_seq_num_for_pic_id_ - RTC_GUARDED_BY(last_seq_num_cs_); + RTC_GUARDED_BY(last_seq_num_mutex_); video_coding::H264SpsPpsTracker tracker_; // Maps payload id to the depacketizer. std::map> payload_type_map_; + +#ifndef DISABLE_H265 + video_coding::H265VpsSpsPpsTracker h265_tracker_; +#endif + // TODO(johan): Remove pt_codec_params_ once // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved. // Maps a payload type to a map of out-of-band supplied codec parameters. @@ -307,7 +386,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, // Info for GetSyncInfo is updated on network or worker thread, and queried on // the worker thread. - rtc::CriticalSection sync_info_lock_; + mutable Mutex sync_info_lock_; absl::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(sync_info_lock_); absl::optional last_received_rtp_system_time_ms_ @@ -327,6 +406,9 @@ class RtpVideoStreamReceiver : public LossNotificationSender, RTC_GUARDED_BY(worker_task_checker_); int64_t last_completed_picture_id_ = 0; + + rtc::scoped_refptr + frame_transformer_delegate_; }; } // namespace webrtc diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc new file mode 100644 index 0000000000..a1c5f583fb --- /dev/null +++ b/video/rtp_video_stream_receiver2.cc @@ -0,0 +1,1185 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rtp_video_stream_receiver2.h" + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/base/macros.h" +#include "absl/memory/memory.h" +#include "absl/types/optional.h" +#include "media/base/media_constants.h" +#include "modules/pacing/packet_router.h" +#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtp_cvo.h" +#include "modules/rtp_rtcp/include/ulpfec_receiver.h" +#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" +#include "modules/utility/include/process_thread.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/h264_sprop_parameter_sets.h" +#include "modules/video_coding/h264_sps_pps_tracker.h" +#include "modules/video_coding/nack_module2.h" +#include "modules/video_coding/packet_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" +#include "system_wrappers/include/ntp_time.h" +#include "video/receive_statistics_proxy2.h" + +namespace webrtc { + +namespace { +// TODO(philipel): Change kPacketBufferStartSize back to 32 in M63 see: +// crbug.com/752886 +constexpr int kPacketBufferStartSize = 512; +constexpr int kPacketBufferMaxSize = 2048; + +int PacketBufferMaxSize() { + // The group here must be a positive power of 2, in which case that is used as + // size. All other values shall result in the default value being used. + const std::string group_name = + webrtc::field_trial::FindFullName("WebRTC-PacketBufferMaxSize"); + int packet_buffer_max_size = kPacketBufferMaxSize; + if (!group_name.empty() && + (sscanf(group_name.c_str(), "%d", &packet_buffer_max_size) != 1 || + packet_buffer_max_size <= 0 || + // Verify that the number is a positive power of 2. + (packet_buffer_max_size & (packet_buffer_max_size - 1)) != 0)) { + RTC_LOG(LS_WARNING) << "Invalid packet buffer max size: " << group_name; + packet_buffer_max_size = kPacketBufferMaxSize; + } + return packet_buffer_max_size; +} + +std::unique_ptr CreateRtpRtcpModule( + Clock* clock, + ReceiveStatistics* receive_statistics, + Transport* outgoing_transport, + RtcpRttStats* rtt_stats, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + uint32_t local_ssrc) { + RtpRtcpInterface::Configuration configuration; + configuration.clock = clock; + configuration.audio = false; + configuration.receiver_only = true; + configuration.receive_statistics = receive_statistics; + configuration.outgoing_transport = outgoing_transport; + configuration.rtt_stats = rtt_stats; + configuration.rtcp_packet_type_counter_observer = + rtcp_packet_type_counter_observer; + configuration.rtcp_cname_callback = rtcp_cname_callback; + configuration.local_media_ssrc = local_ssrc; + + std::unique_ptr rtp_rtcp = + ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); + + return rtp_rtcp; +} + +std::unique_ptr MaybeConstructNackModule( + TaskQueueBase* current_queue, + const VideoReceiveStream::Config& config, + Clock* clock, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender) { + if (config.rtp.nack.rtp_history_ms == 0) + return nullptr; + + return std::make_unique(current_queue, clock, nack_sender, + keyframe_request_sender); +} + +static const int kPacketLogIntervalMs = 10000; + +} // namespace + +RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RtcpFeedbackBuffer( + KeyFrameRequestSender* key_frame_request_sender, + NackSender* nack_sender, + LossNotificationSender* loss_notification_sender) + : key_frame_request_sender_(key_frame_request_sender), + nack_sender_(nack_sender), + loss_notification_sender_(loss_notification_sender), + request_key_frame_(false) { + RTC_DCHECK(key_frame_request_sender_); + RTC_DCHECK(nack_sender_); + RTC_DCHECK(loss_notification_sender_); +} + +void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RequestKeyFrame() { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + request_key_frame_ = true; +} + +void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendNack( + const std::vector& sequence_numbers, + bool buffering_allowed) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + RTC_DCHECK(!sequence_numbers.empty()); + nack_sequence_numbers_.insert(nack_sequence_numbers_.end(), + sequence_numbers.cbegin(), + sequence_numbers.cend()); + if (!buffering_allowed) { + // Note that while *buffering* is not allowed, *batching* is, meaning that + // previously buffered messages may be sent along with the current message. + SendBufferedRtcpFeedback(); + } +} + +void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification( + uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + RTC_DCHECK(buffering_allowed); + RTC_DCHECK(!lntf_state_) + << "SendLossNotification() called twice in a row with no call to " + "SendBufferedRtcpFeedback() in between."; + lntf_state_ = absl::make_optional( + last_decoded_seq_num, last_received_seq_num, decodability_flag); +} + +void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + + bool request_key_frame = false; + std::vector nack_sequence_numbers; + absl::optional lntf_state; + + std::swap(request_key_frame, request_key_frame_); + std::swap(nack_sequence_numbers, nack_sequence_numbers_); + std::swap(lntf_state, lntf_state_); + + if (lntf_state) { + // If either a NACK or a key frame request is sent, we should buffer + // the LNTF and wait for them (NACK or key frame request) to trigger + // the compound feedback message. + // Otherwise, the LNTF should be sent out immediately. + const bool buffering_allowed = + request_key_frame || !nack_sequence_numbers.empty(); + + loss_notification_sender_->SendLossNotification( + lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num, + lntf_state->decodability_flag, buffering_allowed); + } + + if (request_key_frame) { + key_frame_request_sender_->RequestKeyFrame(); + } else if (!nack_sequence_numbers.empty()) { + nack_sender_->SendNack(nack_sequence_numbers, true); + } +} + +RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + TaskQueueBase* current_queue, + Clock* clock, + Transport* transport, + RtcpRttStats* rtt_stats, + PacketRouter* packet_router, + const VideoReceiveStream::Config* config, + ReceiveStatistics* rtp_receive_statistics, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + ProcessThread* process_thread, + NackSender* nack_sender, + KeyFrameRequestSender* keyframe_request_sender, + video_coding::OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer) + : clock_(clock), + config_(*config), + packet_router_(packet_router), + process_thread_(process_thread), + ntp_estimator_(clock), + rtp_header_extensions_(config_.rtp.extensions), + forced_playout_delay_max_ms_("max_ms", absl::nullopt), + forced_playout_delay_min_ms_("min_ms", absl::nullopt), + rtp_receive_statistics_(rtp_receive_statistics), + ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc, + this, + config->rtp.extensions)), + receiving_(false), + last_packet_log_ms_(-1), + rtp_rtcp_(CreateRtpRtcpModule(clock, + rtp_receive_statistics_, + transport, + rtt_stats, + rtcp_packet_type_counter_observer, + rtcp_cname_callback, + config_.rtp.local_ssrc)), + complete_frame_callback_(complete_frame_callback), + keyframe_request_sender_(keyframe_request_sender), + // TODO(bugs.webrtc.org/10336): Let |rtcp_feedback_buffer_| communicate + // directly with |rtp_rtcp_|. + rtcp_feedback_buffer_(this, nack_sender, this), + nack_module_(MaybeConstructNackModule(current_queue, + config_, + clock_, + &rtcp_feedback_buffer_, + &rtcp_feedback_buffer_)), + packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()), + has_received_frame_(false), + frames_decryptable_(false), + absolute_capture_time_receiver_(clock) { + constexpr bool remb_candidate = true; + if (packet_router_) + packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); + + RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff) + << "A stream should not be configured with RTCP disabled. This value is " + "reserved for internal usage."; + // TODO(pbos): What's an appropriate local_ssrc for receive-only streams? + RTC_DCHECK(config_.rtp.local_ssrc != 0); + RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc); + + rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode); + rtp_rtcp_->SetRemoteSSRC(config_.rtp.remote_ssrc); + + static const int kMaxPacketAgeToNack = 450; + const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0) + ? kMaxPacketAgeToNack + : kDefaultMaxReorderingThreshold; + rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc, + max_reordering_threshold); + // TODO(nisse): For historic reasons, we applied the above + // max_reordering_threshold also for RTX stats, which makes little sense since + // we don't NACK rtx packets. Consider deleting the below block, and rely on + // the default threshold. + if (config_.rtp.rtx_ssrc) { + rtp_receive_statistics_->SetMaxReorderingThreshold( + config_.rtp.rtx_ssrc, max_reordering_threshold); + } + if (config_.rtp.rtcp_xr.receiver_reference_time_report) + rtp_rtcp_->SetRtcpXrRrtrStatus(true); + + ParseFieldTrial( + {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, + field_trial::FindFullName("WebRTC-ForcePlayoutDelay")); + + process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); + + if (config_.rtp.lntf.enabled) { + loss_notification_controller_ = + std::make_unique(&rtcp_feedback_buffer_, + &rtcp_feedback_buffer_); + } + + reference_finder_ = + std::make_unique(this); + + // Only construct the encrypted receiver if frame encryption is enabled. + if (config_.crypto_options.sframe.require_frame_encryption) { + buffered_frame_decryptor_ = + std::make_unique(this, this); + if (frame_decryptor != nullptr) { + buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); + } + } + + if (frame_transformer) { + frame_transformer_delegate_ = new rtc::RefCountedObject< + RtpVideoStreamReceiverFrameTransformerDelegate>( + this, std::move(frame_transformer), rtc::Thread::Current(), + config_.rtp.remote_ssrc); + frame_transformer_delegate_->Init(); + } +} + +RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() { + RTC_DCHECK(secondary_sinks_.empty()); + + process_thread_->DeRegisterModule(rtp_rtcp_.get()); + + if (packet_router_) + packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); + UpdateHistograms(); + if (frame_transformer_delegate_) + frame_transformer_delegate_->Reset(); +} + +void RtpVideoStreamReceiver2::AddReceiveCodec( + uint8_t payload_type, + const VideoCodec& video_codec, + const std::map& codec_params, + bool raw_payload) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) || + field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); + } + payload_type_map_.emplace( + payload_type, raw_payload + ? std::make_unique() + : CreateVideoRtpDepacketizer(video_codec.codecType)); + pt_codec_params_.emplace(payload_type, codec_params); +} + +absl::optional RtpVideoStreamReceiver2::GetSyncInfo() const { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + Syncable::Info info; + if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, + &info.capture_time_ntp_frac, nullptr, nullptr, + &info.capture_time_source_clock) != 0) { + return absl::nullopt; + } + + if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { + return absl::nullopt; + } + info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; + info.latest_receive_time_ms = *last_received_rtp_system_time_ms_; + + // Leaves info.current_delay_ms uninitialized. + return info; +} + +RtpVideoStreamReceiver2::ParseGenericDependenciesResult +RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( + const RtpPacketReceived& rtp_packet, + RTPVideoHeader* video_header) { + if (rtp_packet.HasExtension()) { + webrtc::DependencyDescriptor dependency_descriptor; + if (!rtp_packet.GetExtension( + video_structure_.get(), &dependency_descriptor)) { + // Descriptor is there, but failed to parse. Either it is invalid, + // or too old packet (after relevant video_structure_ changed), + // or too new packet (before relevant video_structure_ arrived). + // Drop such packet to be on the safe side. + // TODO(bugs.webrtc.org/10342): Stash too new packet. + RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() + << " Failed to parse dependency descriptor."; + return kDropPacket; + } + if (dependency_descriptor.attached_structure != nullptr && + !dependency_descriptor.first_packet_in_frame) { + RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() + << "Invalid dependency descriptor: structure " + "attached to non first packet of a frame."; + return kDropPacket; + } + video_header->is_first_packet_in_frame = + dependency_descriptor.first_packet_in_frame; + video_header->is_last_packet_in_frame = + dependency_descriptor.last_packet_in_frame; + + int64_t frame_id = + frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number); + auto& generic_descriptor_info = video_header->generic.emplace(); + generic_descriptor_info.frame_id = frame_id; + generic_descriptor_info.spatial_index = + dependency_descriptor.frame_dependencies.spatial_id; + generic_descriptor_info.temporal_index = + dependency_descriptor.frame_dependencies.temporal_id; + for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) { + generic_descriptor_info.dependencies.push_back(frame_id - fdiff); + } + generic_descriptor_info.decode_target_indications = + dependency_descriptor.frame_dependencies.decode_target_indications; + if (dependency_descriptor.resolution) { + video_header->width = dependency_descriptor.resolution->Width(); + video_header->height = dependency_descriptor.resolution->Height(); + } + + // FrameDependencyStructure is sent in dependency descriptor of the first + // packet of a key frame and required for parsed dependency descriptor in + // all the following packets until next key frame. + // Save it if there is a (potentially) new structure. + if (dependency_descriptor.attached_structure) { + RTC_DCHECK(dependency_descriptor.first_packet_in_frame); + if (video_structure_frame_id_ > frame_id) { + RTC_LOG(LS_WARNING) + << "Arrived key frame with id " << frame_id << " and structure id " + << dependency_descriptor.attached_structure->structure_id + << " is older than the latest received key frame with id " + << *video_structure_frame_id_ << " and structure id " + << video_structure_->structure_id; + return kDropPacket; + } + video_structure_ = std::move(dependency_descriptor.attached_structure); + video_structure_frame_id_ = frame_id; + video_header->frame_type = VideoFrameType::kVideoFrameKey; + } else { + video_header->frame_type = VideoFrameType::kVideoFrameDelta; + } + return kHasGenericDescriptor; + } + + RtpGenericFrameDescriptor generic_frame_descriptor; + if (!rtp_packet.GetExtension( + &generic_frame_descriptor)) { + return kNoGenericDescriptor; + } + + video_header->is_first_packet_in_frame = + generic_frame_descriptor.FirstPacketInSubFrame(); + video_header->is_last_packet_in_frame = + generic_frame_descriptor.LastPacketInSubFrame(); + + if (generic_frame_descriptor.FirstPacketInSubFrame()) { + video_header->frame_type = + generic_frame_descriptor.FrameDependenciesDiffs().empty() + ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta; + + auto& generic_descriptor_info = video_header->generic.emplace(); + int64_t frame_id = + frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId()); + generic_descriptor_info.frame_id = frame_id; + generic_descriptor_info.spatial_index = + generic_frame_descriptor.SpatialLayer(); + generic_descriptor_info.temporal_index = + generic_frame_descriptor.TemporalLayer(); + for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) { + generic_descriptor_info.dependencies.push_back(frame_id - fdiff); + } + } + video_header->width = generic_frame_descriptor.Width(); + video_header->height = generic_frame_descriptor.Height(); + return kHasGenericDescriptor; +} + +void RtpVideoStreamReceiver2::OnReceivedPayloadData( + rtc::CopyOnWriteBuffer codec_payload, + const RtpPacketReceived& rtp_packet, + const RTPVideoHeader& video) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + auto packet = std::make_unique( + rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()), + clock_->TimeInMilliseconds()); + + // Try to extrapolate absolute capture time if it is missing. + packet->packet_info.set_absolute_capture_time( + absolute_capture_time_receiver_.OnReceivePacket( + AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(), + packet->packet_info.csrcs()), + packet->packet_info.rtp_timestamp(), + // Assume frequency is the same one for all video frames. + kVideoPayloadTypeFrequency, + packet->packet_info.absolute_capture_time())); + + RTPVideoHeader& video_header = packet->video_header; + video_header.rotation = kVideoRotation_0; + video_header.content_type = VideoContentType::UNSPECIFIED; + video_header.video_timing.flags = VideoSendTiming::kInvalid; + video_header.is_last_packet_in_frame |= rtp_packet.Marker(); + + if (const auto* vp9_header = + absl::get_if(&video_header.video_type_header)) { + video_header.is_last_packet_in_frame |= vp9_header->end_of_frame; + video_header.is_first_packet_in_frame |= vp9_header->beginning_of_frame; + } + + rtp_packet.GetExtension(&video_header.rotation); + rtp_packet.GetExtension( + &video_header.content_type); + rtp_packet.GetExtension(&video_header.video_timing); + if (forced_playout_delay_max_ms_ && forced_playout_delay_min_ms_) { + video_header.playout_delay.max_ms = *forced_playout_delay_max_ms_; + video_header.playout_delay.min_ms = *forced_playout_delay_min_ms_; + } else { + rtp_packet.GetExtension(&video_header.playout_delay); + } + + ParseGenericDependenciesResult generic_descriptor_state = + ParseGenericDependenciesExtension(rtp_packet, &video_header); + if (generic_descriptor_state == kDropPacket) + return; + + // Color space should only be transmitted in the last packet of a frame, + // therefore, neglect it otherwise so that last_color_space_ is not reset by + // mistake. + if (video_header.is_last_packet_in_frame) { + video_header.color_space = rtp_packet.GetExtension(); + if (video_header.color_space || + video_header.frame_type == VideoFrameType::kVideoFrameKey) { + // Store color space since it's only transmitted when changed or for key + // frames. Color space will be cleared if a key frame is transmitted + // without color space information. + last_color_space_ = video_header.color_space; + } else if (last_color_space_) { + video_header.color_space = last_color_space_; + } + } + + if (loss_notification_controller_) { + if (rtp_packet.recovered()) { + // TODO(bugs.webrtc.org/10336): Implement support for reordering. + RTC_LOG(LS_INFO) + << "LossNotificationController does not support reordering."; + } else if (generic_descriptor_state == kNoGenericDescriptor) { + RTC_LOG(LS_WARNING) << "LossNotificationController requires generic " + "frame descriptor, but it is missing."; + } else { + if (video_header.is_first_packet_in_frame) { + RTC_DCHECK(video_header.generic); + LossNotificationController::FrameDetails frame; + frame.is_keyframe = + video_header.frame_type == VideoFrameType::kVideoFrameKey; + frame.frame_id = video_header.generic->frame_id; + frame.frame_dependencies = video_header.generic->dependencies; + loss_notification_controller_->OnReceivedPacket( + rtp_packet.SequenceNumber(), &frame); + } else { + loss_notification_controller_->OnReceivedPacket( + rtp_packet.SequenceNumber(), nullptr); + } + } + } + + if (nack_module_) { + const bool is_keyframe = + video_header.is_first_packet_in_frame && + video_header.frame_type == VideoFrameType::kVideoFrameKey; + + packet->times_nacked = nack_module_->OnReceivedPacket( + rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered()); + } else { + packet->times_nacked = -1; + } + + if (codec_payload.size() == 0) { + NotifyReceiverOfEmptyPacket(packet->seq_num); + rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); + return; + } + + if (packet->codec() == kVideoCodecH264) { + // Only when we start to receive packets will we know what payload type + // that will be used. When we know the payload type insert the correct + // sps/pps into the tracker. + if (packet->payload_type != last_payload_type_) { + last_payload_type_ = packet->payload_type; + InsertSpsPpsIntoTracker(packet->payload_type); + } + + video_coding::H264SpsPpsTracker::FixedBitstream fixed = + tracker_.CopyAndFixBitstream( + rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), + &packet->video_header); + + switch (fixed.action) { + case video_coding::H264SpsPpsTracker::kRequestKeyframe: + rtcp_feedback_buffer_.RequestKeyFrame(); + rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); + ABSL_FALLTHROUGH_INTENDED; + case video_coding::H264SpsPpsTracker::kDrop: + return; + case video_coding::H264SpsPpsTracker::kInsert: + packet->video_payload = std::move(fixed.bitstream); + break; + } +#ifndef DISABLE_H265 + } else if (packet->codec() == kVideoCodecH265) { + // Only when we start to receive packets will we know what payload type + // that will be used. When we know the payload type insert the correct + // sps/pps into the tracker. + if (packet->payload_type != last_payload_type_) { + last_payload_type_ = packet->payload_type; + InsertSpsPpsIntoTracker(packet->payload_type); + } + + video_coding::H265VpsSpsPpsTracker::FixedBitstream fixed = + h265_tracker_.CopyAndFixBitstream( + rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), + &packet->video_header); + + switch (fixed.action) { + case video_coding::H265VpsSpsPpsTracker::kRequestKeyframe: + rtcp_feedback_buffer_.RequestKeyFrame(); + rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); + ABSL_FALLTHROUGH_INTENDED; + case video_coding::H265VpsSpsPpsTracker::kDrop: + return; + case video_coding::H265VpsSpsPpsTracker::kInsert: + packet->video_payload = std::move(fixed.bitstream); + break; + } +#endif + } else { + packet->video_payload = std::move(codec_payload); + } + + rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); + frame_counter_.Add(packet->timestamp); + OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet))); +} + +void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet, + size_t rtp_packet_length) { + RtpPacketReceived packet; + if (!packet.Parse(rtp_packet, rtp_packet_length)) + return; + if (packet.PayloadType() == config_.rtp.red_payload_type) { + RTC_LOG(LS_WARNING) << "Discarding recovered packet with RED encapsulation"; + return; + } + + packet.IdentifyExtensions(rtp_header_extensions_); + packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); + // TODO(nisse): UlpfecReceiverImpl::ProcessReceivedFec passes both + // original (decapsulated) media packets and recovered packets to + // this callback. We need a way to distinguish, for setting + // packet.recovered() correctly. Ideally, move RED decapsulation out + // of the Ulpfec implementation. + + ReceivePacket(packet); +} + +// This method handles both regular RTP packets and packets recovered +// via FlexFEC. +void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + + if (!receiving_) { + return; + } + + if (!packet.recovered()) { + // TODO(nisse): Exclude out-of-order packets? + int64_t now_ms = clock_->TimeInMilliseconds(); + + last_received_rtp_timestamp_ = packet.Timestamp(); + last_received_rtp_system_time_ms_ = now_ms; + + // Periodically log the RTP header of incoming packets. + if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) { + rtc::StringBuilder ss; + ss << "Packet received on SSRC: " << packet.Ssrc() + << " with payload type: " << static_cast(packet.PayloadType()) + << ", timestamp: " << packet.Timestamp() + << ", sequence number: " << packet.SequenceNumber() + << ", arrival time: " << packet.arrival_time_ms(); + int32_t time_offset; + if (packet.GetExtension(&time_offset)) { + ss << ", toffset: " << time_offset; + } + uint32_t send_time; + if (packet.GetExtension(&send_time)) { + ss << ", abs send time: " << send_time; + } + RTC_LOG(LS_INFO) << ss.str(); + last_packet_log_ms_ = now_ms; + } + } + + ReceivePacket(packet); + + // Update receive statistics after ReceivePacket. + // Receive statistics will be reset if the payload type changes (make sure + // that the first packet is included in the stats). + if (!packet.recovered()) { + rtp_receive_statistics_->OnRtpPacket(packet); + } + + for (RtpPacketSinkInterface* secondary_sink : secondary_sinks_) { + secondary_sink->OnRtpPacket(packet); + } +} + +void RtpVideoStreamReceiver2::RequestKeyFrame() { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests + // issued by anything other than the LossNotificationController if it (the + // sender) is relying on LNTF alone. + if (keyframe_request_sender_) { + keyframe_request_sender_->RequestKeyFrame(); + } else { + rtp_rtcp_->SendPictureLossIndication(); + } +} + +void RtpVideoStreamReceiver2::SendLossNotification( + uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) { + RTC_DCHECK(config_.rtp.lntf.enabled); + rtp_rtcp_->SendLossNotification(last_decoded_seq_num, last_received_seq_num, + decodability_flag, buffering_allowed); +} + +bool RtpVideoStreamReceiver2::IsUlpfecEnabled() const { + return config_.rtp.ulpfec_payload_type != -1; +} + +bool RtpVideoStreamReceiver2::IsRetransmissionsEnabled() const { + return config_.rtp.nack.rtp_history_ms > 0; +} + +void RtpVideoStreamReceiver2::RequestPacketRetransmit( + const std::vector& sequence_numbers) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + rtp_rtcp_->SendNack(sequence_numbers); +} + +bool RtpVideoStreamReceiver2::IsDecryptable() const { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + return frames_decryptable_; +} + +void RtpVideoStreamReceiver2::OnInsertedPacket( + video_coding::PacketBuffer::InsertResult result) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + video_coding::PacketBuffer::Packet* first_packet = nullptr; + int max_nack_count; + int64_t min_recv_time; + int64_t max_recv_time; + std::vector> payloads; + RtpPacketInfos::vector_type packet_infos; + + bool frame_boundary = true; + for (auto& packet : result.packets) { + // PacketBuffer promisses frame boundaries are correctly set on each + // packet. Document that assumption with the DCHECKs. + RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); + if (packet->is_first_packet_in_frame()) { + first_packet = packet.get(); + max_nack_count = packet->times_nacked; + min_recv_time = packet->packet_info.receive_time_ms(); + max_recv_time = packet->packet_info.receive_time_ms(); + payloads.clear(); + packet_infos.clear(); + } else { + max_nack_count = std::max(max_nack_count, packet->times_nacked); + min_recv_time = + std::min(min_recv_time, packet->packet_info.receive_time_ms()); + max_recv_time = + std::max(max_recv_time, packet->packet_info.receive_time_ms()); + } + payloads.emplace_back(packet->video_payload); + packet_infos.push_back(packet->packet_info); + + frame_boundary = packet->is_last_packet_in_frame(); + if (packet->is_last_packet_in_frame()) { + auto depacketizer_it = payload_type_map_.find(first_packet->payload_type); + RTC_CHECK(depacketizer_it != payload_type_map_.end()); + + rtc::scoped_refptr bitstream = + depacketizer_it->second->AssembleFrame(payloads); + if (!bitstream) { + // Failed to assemble a frame. Discard and continue. + continue; + } + + const video_coding::PacketBuffer::Packet& last_packet = *packet; + OnAssembledFrame(std::make_unique( + first_packet->seq_num, // + last_packet.seq_num, // + last_packet.marker_bit, // + max_nack_count, // + min_recv_time, // + max_recv_time, // + first_packet->timestamp, // + first_packet->ntp_time_ms, // + last_packet.video_header.video_timing, // + first_packet->payload_type, // + first_packet->codec(), // + last_packet.video_header.rotation, // + last_packet.video_header.content_type, // + first_packet->video_header, // + last_packet.video_header.color_space, // + RtpPacketInfos(std::move(packet_infos)), // + std::move(bitstream))); + } + } + RTC_DCHECK(frame_boundary); + if (result.buffer_cleared) { + RequestKeyFrame(); + } +} + +void RtpVideoStreamReceiver2::OnAssembledFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + RTC_DCHECK(frame); + + const absl::optional& descriptor = + frame->GetRtpVideoHeader().generic; + + if (loss_notification_controller_ && descriptor) { + loss_notification_controller_->OnAssembledFrame( + frame->first_seq_num(), descriptor->frame_id, + absl::c_linear_search(descriptor->decode_target_indications, + DecodeTargetIndication::kDiscardable), + descriptor->dependencies); + } + + // If frames arrive before a key frame, they would not be decodable. + // In that case, request a key frame ASAP. + if (!has_received_frame_) { + if (frame->FrameType() != VideoFrameType::kVideoFrameKey) { + // |loss_notification_controller_|, if present, would have already + // requested a key frame when the first packet for the non-key frame + // had arrived, so no need to replicate the request. + if (!loss_notification_controller_) { + RequestKeyFrame(); + } + } + has_received_frame_ = true; + } + + // Reset |reference_finder_| if |frame| is new and the codec have changed. + if (current_codec_) { + bool frame_is_newer = + AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_); + + if (frame->codec_type() != current_codec_) { + if (frame_is_newer) { + // When we reset the |reference_finder_| we don't want new picture ids + // to overlap with old picture ids. To ensure that doesn't happen we + // start from the |last_completed_picture_id_| and add an offset in case + // of reordering. + reference_finder_ = + std::make_unique( + this, last_completed_picture_id_ + + std::numeric_limits::max()); + current_codec_ = frame->codec_type(); + } else { + // Old frame from before the codec switch, discard it. + return; + } + } + + if (frame_is_newer) { + last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); + } + } else { + current_codec_ = frame->codec_type(); + last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); + } + + if (buffered_frame_decryptor_ != nullptr) { + buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame)); + } else if (frame_transformer_delegate_) { + frame_transformer_delegate_->TransformFrame(std::move(frame)); + } else { + reference_finder_->ManageFrame(std::move(frame)); + } +} + +void RtpVideoStreamReceiver2::OnCompleteFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + video_coding::RtpFrameObject* rtp_frame = + static_cast(frame.get()); + last_seq_num_for_pic_id_[rtp_frame->id.picture_id] = + rtp_frame->last_seq_num(); + + last_completed_picture_id_ = + std::max(last_completed_picture_id_, frame->id.picture_id); + complete_frame_callback_->OnCompleteFrame(std::move(frame)); +} + +void RtpVideoStreamReceiver2::OnDecryptedFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + reference_finder_->ManageFrame(std::move(frame)); +} + +void RtpVideoStreamReceiver2::OnDecryptionStatusChange( + FrameDecryptorInterface::Status status) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + // Called from BufferedFrameDecryptor::DecryptFrame. + frames_decryptable_ = + (status == FrameDecryptorInterface::Status::kOk) || + (status == FrameDecryptorInterface::Status::kRecoverable); +} + +void RtpVideoStreamReceiver2::SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (buffered_frame_decryptor_ == nullptr) { + buffered_frame_decryptor_ = + std::make_unique(this, this); + } + buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); +} + +void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + frame_transformer_delegate_ = + new rtc::RefCountedObject( + this, std::move(frame_transformer), rtc::Thread::Current(), + config_.rtp.remote_ssrc); + frame_transformer_delegate_->Init(); +} + +void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (nack_module_) + nack_module_->UpdateRtt(max_rtt_ms); +} + +absl::optional RtpVideoStreamReceiver2::LastReceivedPacketMs() const { + return packet_buffer_.LastReceivedPacketMs(); +} + +absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() + const { + return packet_buffer_.LastReceivedKeyframePacketMs(); +} + +void RtpVideoStreamReceiver2::AddSecondarySink(RtpPacketSinkInterface* sink) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + RTC_DCHECK(!absl::c_linear_search(secondary_sinks_, sink)); + secondary_sinks_.push_back(sink); +} + +void RtpVideoStreamReceiver2::RemoveSecondarySink( + const RtpPacketSinkInterface* sink) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + auto it = absl::c_find(secondary_sinks_, sink); + if (it == secondary_sinks_.end()) { + // We might be rolling-back a call whose setup failed mid-way. In such a + // case, it's simpler to remove "everything" rather than remember what + // has already been added. + RTC_LOG(LS_WARNING) << "Removal of unknown sink."; + return; + } + secondary_sinks_.erase(it); +} + +void RtpVideoStreamReceiver2::ManageFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + reference_finder_->ManageFrame(std::move(frame)); +} + +void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (packet.payload_size() == 0) { + // Padding or keep-alive packet. + // TODO(nisse): Could drop empty packets earlier, but need to figure out how + // they should be counted in stats. + NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); + return; + } + if (packet.PayloadType() == config_.rtp.red_payload_type) { + ParseAndHandleEncapsulatingHeader(packet); + return; + } + + const auto type_it = payload_type_map_.find(packet.PayloadType()); + if (type_it == payload_type_map_.end()) { + return; + } + absl::optional parsed_payload = + type_it->second->Parse(packet.PayloadBuffer()); + if (parsed_payload == absl::nullopt) { + RTC_LOG(LS_WARNING) << "Failed parsing payload."; + return; + } + + OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet, + parsed_payload->video_header); +} + +void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( + const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (packet.PayloadType() == config_.rtp.red_payload_type && + packet.payload_size() > 0) { + if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) { + // Notify video_receiver about received FEC packets to avoid NACKing these + // packets. + NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); + } + if (!ulpfec_receiver_->AddReceivedRedPacket( + packet, config_.rtp.ulpfec_payload_type)) { + return; + } + ulpfec_receiver_->ProcessReceivedFec(); + } +} + +// In the case of a video stream without picture ids and no rtx the +// RtpFrameReferenceFinder will need to know about padding to +// correctly calculate frame references. +void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + + reference_finder_->PaddingReceived(seq_num); + + OnInsertedPacket(packet_buffer_.InsertPadding(seq_num)); + if (nack_module_) { + nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false, + /* is _recovered = */ false); + } + if (loss_notification_controller_) { + // TODO(bugs.webrtc.org/10336): Handle empty packets. + RTC_LOG(LS_WARNING) + << "LossNotificationController does not expect empty packets."; + } +} + +bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet, + size_t rtcp_packet_length) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + + if (!receiving_) { + return false; + } + + rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length); + + int64_t rtt = 0; + rtp_rtcp_->RTT(config_.rtp.remote_ssrc, &rtt, nullptr, nullptr, nullptr); + if (rtt == 0) { + // Waiting for valid rtt. + return true; + } + uint32_t ntp_secs = 0; + uint32_t ntp_frac = 0; + uint32_t rtp_timestamp = 0; + uint32_t recieved_ntp_secs = 0; + uint32_t recieved_ntp_frac = 0; + if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &recieved_ntp_secs, + &recieved_ntp_frac, &rtp_timestamp) != 0) { + // Waiting for RTCP. + return true; + } + NtpTime recieved_ntp(recieved_ntp_secs, recieved_ntp_frac); + int64_t time_since_recieved = + clock_->CurrentNtpInMilliseconds() - recieved_ntp.ToMs(); + // Don't use old SRs to estimate time. + if (time_since_recieved <= 1) { + ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); + absl::optional remote_to_local_clock_offset_ms = + ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); + if (remote_to_local_clock_offset_ms.has_value()) { + absolute_capture_time_receiver_.SetRemoteToLocalClockOffset( + Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); + } + } + + return true; +} + +void RtpVideoStreamReceiver2::FrameContinuous(int64_t picture_id) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (!nack_module_) + return; + + int seq_num = -1; + auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); + if (seq_num_it != last_seq_num_for_pic_id_.end()) + seq_num = seq_num_it->second; + if (seq_num != -1) + nack_module_->ClearUpTo(seq_num); +} + +void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + // Running on the decoder thread. + int seq_num = -1; + auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id); + if (seq_num_it != last_seq_num_for_pic_id_.end()) { + seq_num = seq_num_it->second; + last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(), + ++seq_num_it); + } + + if (seq_num != -1) { + packet_buffer_.ClearTo(seq_num); + reference_finder_->ClearTo(seq_num); + } +} + +void RtpVideoStreamReceiver2::SignalNetworkState(NetworkState state) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode + : RtcpMode::kOff); +} + +void RtpVideoStreamReceiver2::StartReceive() { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + receiving_ = true; +} + +void RtpVideoStreamReceiver2::StopReceive() { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + receiving_ = false; +} + +void RtpVideoStreamReceiver2::UpdateHistograms() { + FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter(); + if (counter.first_packet_time_ms == -1) + return; + + int64_t elapsed_sec = + (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000; + if (elapsed_sec < metrics::kMinRunTimeInSeconds) + return; + + if (counter.num_packets > 0) { + RTC_HISTOGRAM_PERCENTAGE( + "WebRTC.Video.ReceivedFecPacketsInPercent", + static_cast(counter.num_fec_packets * 100 / counter.num_packets)); + } + if (counter.num_fec_packets > 0) { + RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec", + static_cast(counter.num_recovered_packets * + 100 / counter.num_fec_packets)); + } + if (config_.rtp.ulpfec_payload_type != -1) { + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.Video.FecBitrateReceivedInKbps", + static_cast(counter.num_bytes * 8 / elapsed_sec / 1000)); + } +} + +void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + + auto codec_params_it = pt_codec_params_.find(payload_type); + if (codec_params_it == pt_codec_params_.end()) + return; + + RTC_LOG(LS_INFO) << "Found out of band supplied codec parameters for" + " payload type: " + << static_cast(payload_type); + + H264SpropParameterSets sprop_decoder; + auto sprop_base64_it = + codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets); + + if (sprop_base64_it == codec_params_it->second.end()) + return; + + if (!sprop_decoder.DecodeSprop(sprop_base64_it->second.c_str())) + return; + + tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(), + sprop_decoder.pps_nalu()); +} + +} // namespace webrtc diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h new file mode 100644 index 0000000000..71178c18a6 --- /dev/null +++ b/video/rtp_video_stream_receiver2.h @@ -0,0 +1,374 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_ +#define VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_ + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/video/color_space.h" +#include "api/video_codecs/video_codec.h" +#include "call/rtp_packet_sink_interface.h" +#include "call/syncable.h" +#include "call/video_receive_stream.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/h264_sps_pps_tracker.h" +#include "modules/video_coding/loss_notification_controller.h" +#ifndef DISABLE_H265 +#include "modules/video_coding/h265_vps_sps_pps_tracker.h" +#endif +#include "modules/video_coding/packet_buffer.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "modules/video_coding/unique_timestamp_counter.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/thread_annotations.h" +#include "video/buffered_frame_decryptor.h" +#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" + +namespace webrtc { + +class NackModule2; +class PacketRouter; +class ProcessThread; +class ReceiveStatistics; +class RtcpRttStats; +class RtpPacketReceived; +class Transport; +class UlpfecReceiver; + +class RtpVideoStreamReceiver2 : public LossNotificationSender, + public RecoveredPacketReceiver, + public RtpPacketSinkInterface, + public KeyFrameRequestSender, + public video_coding::OnCompleteFrameCallback, + public OnDecryptedFrameCallback, + public OnDecryptionStatusChangeCallback, + public RtpVideoFrameReceiver { + public: + RtpVideoStreamReceiver2( + TaskQueueBase* current_queue, + Clock* clock, + Transport* transport, + RtcpRttStats* rtt_stats, + // The packet router is optional; if provided, the RtpRtcp module for this + // stream is registered as a candidate for sending REMB and transport + // feedback. + PacketRouter* packet_router, + const VideoReceiveStream::Config* config, + ReceiveStatistics* rtp_receive_statistics, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + ProcessThread* process_thread, + NackSender* nack_sender, + // The KeyFrameRequestSender is optional; if not provided, key frame + // requests are sent via the internal RtpRtcp module. + KeyFrameRequestSender* keyframe_request_sender, + video_coding::OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer); + ~RtpVideoStreamReceiver2() override; + + void AddReceiveCodec(uint8_t payload_type, + const VideoCodec& video_codec, + const std::map& codec_params, + bool raw_payload); + + void StartReceive(); + void StopReceive(); + + // Produces the transport-related timestamps; current_delay_ms is left unset. + absl::optional GetSyncInfo() const; + + bool DeliverRtcp(const uint8_t* rtcp_packet, size_t rtcp_packet_length); + + void FrameContinuous(int64_t seq_num); + + void FrameDecoded(int64_t seq_num); + + void SignalNetworkState(NetworkState state); + + // Returns number of different frames seen. + int GetUniqueFramesSeen() const { + RTC_DCHECK_RUN_ON(&worker_task_checker_); + return frame_counter_.GetUniqueSeen(); + } + + // Implements RtpPacketSinkInterface. + void OnRtpPacket(const RtpPacketReceived& packet) override; + + // Public only for tests. + void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, + const RtpPacketReceived& rtp_packet, + const RTPVideoHeader& video); + + // Implements RecoveredPacketReceiver. + void OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override; + + // Send an RTCP keyframe request. + void RequestKeyFrame() override; + + // Implements LossNotificationSender. + void SendLossNotification(uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) override; + + bool IsUlpfecEnabled() const; + bool IsRetransmissionsEnabled() const; + + // Returns true if a decryptor is attached and frames can be decrypted. + // Updated by OnDecryptionStatusChangeCallback. Note this refers to Frame + // Decryption not SRTP. + bool IsDecryptable() const; + + // Don't use, still experimental. + void RequestPacketRetransmit(const std::vector& sequence_numbers); + + // Implements OnCompleteFrameCallback. + void OnCompleteFrame( + std::unique_ptr frame) override; + + // Implements OnDecryptedFrameCallback. + void OnDecryptedFrame( + std::unique_ptr frame) override; + + // Implements OnDecryptionStatusChangeCallback. + void OnDecryptionStatusChange( + FrameDecryptorInterface::Status status) override; + + // Optionally set a frame decryptor after a stream has started. This will not + // reset the decoder state. + void SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor); + + // Sets a frame transformer after a stream has started, if no transformer + // has previously been set. Does not reset the decoder state. + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer); + + // Called by VideoReceiveStream when stats are updated. + void UpdateRtt(int64_t max_rtt_ms); + + absl::optional LastReceivedPacketMs() const; + absl::optional LastReceivedKeyframePacketMs() const; + + // RtpDemuxer only forwards a given RTP packet to one sink. However, some + // sinks, such as FlexFEC, might wish to be informed of all of the packets + // a given sink receives (or any set of sinks). They may do so by registering + // themselves as secondary sinks. + void AddSecondarySink(RtpPacketSinkInterface* sink); + void RemoveSecondarySink(const RtpPacketSinkInterface* sink); + + private: + // Implements RtpVideoFrameReceiver. + void ManageFrame( + std::unique_ptr frame) override; + + // Used for buffering RTCP feedback messages and sending them all together. + // Note: + // 1. Key frame requests and NACKs are mutually exclusive, with the + // former taking precedence over the latter. + // 2. Loss notifications are orthogonal to either. (That is, may be sent + // alongside either.) + class RtcpFeedbackBuffer : public KeyFrameRequestSender, + public NackSender, + public LossNotificationSender { + public: + RtcpFeedbackBuffer(KeyFrameRequestSender* key_frame_request_sender, + NackSender* nack_sender, + LossNotificationSender* loss_notification_sender); + + ~RtcpFeedbackBuffer() override = default; + + // KeyFrameRequestSender implementation. + void RequestKeyFrame() override; + + // NackSender implementation. + void SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) override; + + // LossNotificationSender implementation. + void SendLossNotification(uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) override; + + // Send all RTCP feedback messages buffered thus far. + void SendBufferedRtcpFeedback(); + + private: + // LNTF-related state. + struct LossNotificationState { + LossNotificationState(uint16_t last_decoded_seq_num, + uint16_t last_received_seq_num, + bool decodability_flag) + : last_decoded_seq_num(last_decoded_seq_num), + last_received_seq_num(last_received_seq_num), + decodability_flag(decodability_flag) {} + + uint16_t last_decoded_seq_num; + uint16_t last_received_seq_num; + bool decodability_flag; + }; + + SequenceChecker worker_task_checker_; + KeyFrameRequestSender* const key_frame_request_sender_; + NackSender* const nack_sender_; + LossNotificationSender* const loss_notification_sender_; + + // Key-frame-request-related state. + bool request_key_frame_ RTC_GUARDED_BY(worker_task_checker_); + + // NACK-related state. + std::vector nack_sequence_numbers_ + RTC_GUARDED_BY(worker_task_checker_); + + absl::optional lntf_state_ + RTC_GUARDED_BY(worker_task_checker_); + }; + enum ParseGenericDependenciesResult { + kDropPacket, + kHasGenericDescriptor, + kNoGenericDescriptor + }; + + // Entry point doing non-stats work for a received packet. Called + // for the same packet both before and after RED decapsulation. + void ReceivePacket(const RtpPacketReceived& packet); + // Parses and handles RED headers. + // This function assumes that it's being called from only one thread. + void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet); + void NotifyReceiverOfEmptyPacket(uint16_t seq_num); + void UpdateHistograms(); + bool IsRedEnabled() const; + void InsertSpsPpsIntoTracker(uint8_t payload_type); + void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result); + ParseGenericDependenciesResult ParseGenericDependenciesExtension( + const RtpPacketReceived& rtp_packet, + RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_); + void OnAssembledFrame(std::unique_ptr frame); + + Clock* const clock_; + // Ownership of this object lies with VideoReceiveStream, which owns |this|. + const VideoReceiveStream::Config& config_; + PacketRouter* const packet_router_; + ProcessThread* const process_thread_; + + RemoteNtpTimeEstimator ntp_estimator_; + + RtpHeaderExtensionMap rtp_header_extensions_; + // Set by the field trial WebRTC-ForcePlayoutDelay to override any playout + // delay that is specified in the received packets. + FieldTrialOptional forced_playout_delay_max_ms_; + FieldTrialOptional forced_playout_delay_min_ms_; + ReceiveStatistics* const rtp_receive_statistics_; + std::unique_ptr ulpfec_receiver_; + + SequenceChecker worker_task_checker_; + bool receiving_ RTC_GUARDED_BY(worker_task_checker_); + int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_); + + const std::unique_ptr rtp_rtcp_; + + video_coding::OnCompleteFrameCallback* complete_frame_callback_; + KeyFrameRequestSender* const keyframe_request_sender_; + + RtcpFeedbackBuffer rtcp_feedback_buffer_; + const std::unique_ptr nack_module_; + std::unique_ptr loss_notification_controller_; + + video_coding::PacketBuffer packet_buffer_; + UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_); + SeqNumUnwrapper frame_id_unwrapper_ + RTC_GUARDED_BY(worker_task_checker_); + + // Video structure provided in the dependency descriptor in a first packet + // of a key frame. It is required to parse dependency descriptor in the + // following delta packets. + std::unique_ptr video_structure_ + RTC_GUARDED_BY(worker_task_checker_); + // Frame id of the last frame with the attached video structure. + // absl::nullopt when `video_structure_ == nullptr`; + absl::optional video_structure_frame_id_ + RTC_GUARDED_BY(worker_task_checker_); + + std::unique_ptr reference_finder_ + RTC_GUARDED_BY(worker_task_checker_); + absl::optional current_codec_ + RTC_GUARDED_BY(worker_task_checker_); + uint32_t last_assembled_frame_rtp_timestamp_ + RTC_GUARDED_BY(worker_task_checker_); + + std::map last_seq_num_for_pic_id_ + RTC_GUARDED_BY(worker_task_checker_); + video_coding::H264SpsPpsTracker tracker_ RTC_GUARDED_BY(worker_task_checker_); + + // Maps payload id to the depacketizer. + std::map> payload_type_map_ + RTC_GUARDED_BY(worker_task_checker_); + +#ifndef DISABLE_H265 + video_coding::H265VpsSpsPpsTracker h265_tracker_; +#endif + + // TODO(johan): Remove pt_codec_params_ once + // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved. + // Maps a payload type to a map of out-of-band supplied codec parameters. + std::map> pt_codec_params_ + RTC_GUARDED_BY(worker_task_checker_); + int16_t last_payload_type_ RTC_GUARDED_BY(worker_task_checker_) = -1; + + bool has_received_frame_ RTC_GUARDED_BY(worker_task_checker_); + + std::vector secondary_sinks_ + RTC_GUARDED_BY(worker_task_checker_); + + absl::optional last_received_rtp_timestamp_ + RTC_GUARDED_BY(worker_task_checker_); + absl::optional last_received_rtp_system_time_ms_ + RTC_GUARDED_BY(worker_task_checker_); + + // Handles incoming encrypted frames and forwards them to the + // rtp_reference_finder if they are decryptable. + std::unique_ptr buffered_frame_decryptor_ + RTC_PT_GUARDED_BY(worker_task_checker_); + bool frames_decryptable_ RTC_GUARDED_BY(worker_task_checker_); + absl::optional last_color_space_; + + AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_ + RTC_GUARDED_BY(worker_task_checker_); + + int64_t last_completed_picture_id_ = 0; + + rtc::scoped_refptr + frame_transformer_delegate_; +}; + +} // namespace webrtc + +#endif // VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_ diff --git a/video/rtp_video_stream_receiver2_unittest.cc b/video/rtp_video_stream_receiver2_unittest.cc new file mode 100644 index 0000000000..7d690636d9 --- /dev/null +++ b/video/rtp_video_stream_receiver2_unittest.cc @@ -0,0 +1,1283 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rtp_video_stream_receiver2.h" + +#include +#include + +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "common_video/h264/h264_common.h" +#include "media/base/media_constants.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_format_vp9.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/utility/include/process_thread.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_frame_transformer.h" +#include "test/time_controller/simulated_task_queue.h" + +using ::testing::_; +using ::testing::ElementsAre; +using ::testing::Invoke; +using ::testing::SizeIs; +using ::testing::Values; + +namespace webrtc { + +namespace { + +const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01}; + +std::vector GetAbsoluteCaptureTimestamps( + const video_coding::EncodedFrame* frame) { + std::vector result; + for (const auto& packet_info : frame->PacketInfos()) { + if (packet_info.absolute_capture_time()) { + result.push_back( + packet_info.absolute_capture_time()->absolute_capture_timestamp); + } + } + return result; +} + +RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) { + RTPVideoHeader video_header; + video_header.is_first_packet_in_frame = true; + video_header.is_last_packet_in_frame = true; + video_header.codec = kVideoCodecGeneric; + video_header.frame_type = frame_type; + return video_header; +} + +class MockTransport : public Transport { + public: + MOCK_METHOD(bool, + SendRtp, + (const uint8_t*, size_t length, const PacketOptions& options), + (override)); + MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override)); +}; + +class MockNackSender : public NackSender { + public: + MOCK_METHOD(void, + SendNack, + (const std::vector& sequence_numbers, + bool buffering_allowed), + (override)); +}; + +class MockKeyFrameRequestSender : public KeyFrameRequestSender { + public: + MOCK_METHOD(void, RequestKeyFrame, (), (override)); +}; + +class MockOnCompleteFrameCallback + : public video_coding::OnCompleteFrameCallback { + public: + MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ()); + MOCK_METHOD(void, + DoOnCompleteFrameFailNullptr, + (video_coding::EncodedFrame*), + ()); + MOCK_METHOD(void, + DoOnCompleteFrameFailLength, + (video_coding::EncodedFrame*), + ()); + MOCK_METHOD(void, + DoOnCompleteFrameFailBitstream, + (video_coding::EncodedFrame*), + ()); + void OnCompleteFrame( + std::unique_ptr frame) override { + if (!frame) { + DoOnCompleteFrameFailNullptr(nullptr); + return; + } + EXPECT_EQ(buffer_.Length(), frame->size()); + if (buffer_.Length() != frame->size()) { + DoOnCompleteFrameFailLength(frame.get()); + return; + } + if (frame->size() != buffer_.Length() || + memcmp(buffer_.Data(), frame->data(), buffer_.Length()) != 0) { + DoOnCompleteFrameFailBitstream(frame.get()); + return; + } + DoOnCompleteFrame(frame.get()); + } + + void ClearExpectedBitstream() { buffer_.Clear(); } + + void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) { + // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*. + buffer_.WriteBytes(reinterpret_cast(data), size_in_bytes); + } + rtc::ByteBufferWriter buffer_; +}; + +class MockRtpPacketSink : public RtpPacketSinkInterface { + public: + MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override)); +}; + +constexpr uint32_t kSsrc = 111; +constexpr uint16_t kSequenceNumber = 222; +constexpr int kPayloadType = 100; +constexpr int kRedPayloadType = 125; + +std::unique_ptr CreateRtpPacketReceived() { + auto packet = std::make_unique(); + packet->SetSsrc(kSsrc); + packet->SetSequenceNumber(kSequenceNumber); + packet->SetPayloadType(kPayloadType); + return packet; +} + +MATCHER_P(SamePacketAs, other, "") { + return arg.Ssrc() == other.Ssrc() && + arg.SequenceNumber() == other.SequenceNumber(); +} + +} // namespace + +class RtpVideoStreamReceiver2Test : public ::testing::Test { + public: + RtpVideoStreamReceiver2Test() : RtpVideoStreamReceiver2Test("") {} + explicit RtpVideoStreamReceiver2Test(std::string field_trials) + : override_field_trials_(field_trials), + config_(CreateConfig()), + process_thread_(ProcessThread::Create("TestThread")) { + rtp_receive_statistics_ = + ReceiveStatistics::Create(Clock::GetRealTimeClock()); + rtp_video_stream_receiver_ = std::make_unique( + TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_, + nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr, + nullptr, process_thread_.get(), &mock_nack_sender_, + &mock_key_frame_request_sender_, &mock_on_complete_frame_callback_, + nullptr, nullptr); + VideoCodec codec; + codec.codecType = kVideoCodecGeneric; + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {}, + /*raw_payload=*/false); + } + + RTPVideoHeader GetDefaultH264VideoHeader() { + RTPVideoHeader video_header; + video_header.codec = kVideoCodecH264; + video_header.video_type_header.emplace(); + return video_header; + } + + // TODO(Johan): refactor h264_sps_pps_tracker_unittests.cc to avoid duplicate + // code. + void AddSps(RTPVideoHeader* video_header, + uint8_t sps_id, + rtc::CopyOnWriteBuffer* data) { + NaluInfo info; + info.type = H264::NaluType::kSps; + info.sps_id = sps_id; + info.pps_id = -1; + data->AppendData({H264::NaluType::kSps, sps_id}); + auto& h264 = absl::get(video_header->video_type_header); + h264.nalus[h264.nalus_length++] = info; + } + + void AddPps(RTPVideoHeader* video_header, + uint8_t sps_id, + uint8_t pps_id, + rtc::CopyOnWriteBuffer* data) { + NaluInfo info; + info.type = H264::NaluType::kPps; + info.sps_id = sps_id; + info.pps_id = pps_id; + data->AppendData({H264::NaluType::kPps, pps_id}); + auto& h264 = absl::get(video_header->video_type_header); + h264.nalus[h264.nalus_length++] = info; + } + + void AddIdr(RTPVideoHeader* video_header, int pps_id) { + NaluInfo info; + info.type = H264::NaluType::kIdr; + info.sps_id = -1; + info.pps_id = pps_id; + auto& h264 = absl::get(video_header->video_type_header); + h264.nalus[h264.nalus_length++] = info; + } + + protected: + static VideoReceiveStream::Config CreateConfig() { + VideoReceiveStream::Config config(nullptr); + config.rtp.remote_ssrc = 1111; + config.rtp.local_ssrc = 2222; + config.rtp.red_payload_type = kRedPayloadType; + return config; + } + + TokenTaskQueue task_queue_; + TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_{&task_queue_}; + + const webrtc::test::ScopedFieldTrials override_field_trials_; + VideoReceiveStream::Config config_; + MockNackSender mock_nack_sender_; + MockKeyFrameRequestSender mock_key_frame_request_sender_; + MockTransport mock_transport_; + MockOnCompleteFrameCallback mock_on_complete_frame_callback_; + std::unique_ptr process_thread_; + std::unique_ptr rtp_receive_statistics_; + std::unique_ptr rtp_video_stream_receiver_; +}; + +TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) { + // Test that color space is cached from the last packet of a key frame and + // that it's not reset by padding packets without color space. + constexpr int kVp9PayloadType = 99; + const ColorSpace kColorSpace( + ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12, + ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull); + const std::vector kKeyFramePayload = {0, 1, 2, 3, 4, 5, + 6, 7, 8, 9, 10}; + const std::vector kDeltaFramePayload = {0, 1, 2, 3, 4}; + + // Anonymous helper class that generates received packets. + class { + public: + void SetPayload(const std::vector& payload, + VideoFrameType video_frame_type) { + video_frame_type_ = video_frame_type; + RtpPacketizer::PayloadSizeLimits pay_load_size_limits; + // Reduce max payload length to make sure the key frame generates two + // packets. + pay_load_size_limits.max_payload_len = 8; + RTPVideoHeaderVP9 rtp_video_header_vp9; + rtp_video_header_vp9.InitRTPVideoHeaderVP9(); + rtp_video_header_vp9.inter_pic_predicted = + (video_frame_type == VideoFrameType::kVideoFrameDelta); + rtp_packetizer_ = std::make_unique( + payload, pay_load_size_limits, rtp_video_header_vp9); + } + + size_t NumPackets() { return rtp_packetizer_->NumPackets(); } + void SetColorSpace(const ColorSpace& color_space) { + color_space_ = color_space; + } + + RtpPacketReceived NextPacket() { + RtpHeaderExtensionMap extension_map; + extension_map.Register(1); + RtpPacketToSend packet_to_send(&extension_map); + packet_to_send.SetSequenceNumber(sequence_number_++); + packet_to_send.SetSsrc(kSsrc); + packet_to_send.SetPayloadType(kVp9PayloadType); + bool include_color_space = + (rtp_packetizer_->NumPackets() == 1u && + video_frame_type_ == VideoFrameType::kVideoFrameKey); + if (include_color_space) { + EXPECT_TRUE( + packet_to_send.SetExtension(color_space_)); + } + rtp_packetizer_->NextPacket(&packet_to_send); + + RtpPacketReceived received_packet(&extension_map); + received_packet.Parse(packet_to_send.data(), packet_to_send.size()); + return received_packet; + } + + private: + uint16_t sequence_number_ = 0; + VideoFrameType video_frame_type_; + ColorSpace color_space_; + std::unique_ptr rtp_packetizer_; + } received_packet_generator; + received_packet_generator.SetColorSpace(kColorSpace); + + // Prepare the receiver for VP9. + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + std::map codec_params; + rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, codec, + codec_params, + /*raw_payload=*/false); + + // Generate key frame packets. + received_packet_generator.SetPayload(kKeyFramePayload, + VideoFrameType::kVideoFrameKey); + EXPECT_EQ(received_packet_generator.NumPackets(), 2u); + RtpPacketReceived key_frame_packet1 = received_packet_generator.NextPacket(); + RtpPacketReceived key_frame_packet2 = received_packet_generator.NextPacket(); + + // Generate delta frame packet. + received_packet_generator.SetPayload(kDeltaFramePayload, + VideoFrameType::kVideoFrameDelta); + EXPECT_EQ(received_packet_generator.NumPackets(), 1u); + RtpPacketReceived delta_frame_packet = received_packet_generator.NextPacket(); + + rtp_video_stream_receiver_->StartReceive(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kKeyFramePayload.data(), kKeyFramePayload.size()); + + // Send the key frame and expect a callback with color space information. + EXPECT_FALSE(key_frame_packet1.GetExtension()); + EXPECT_TRUE(key_frame_packet2.GetExtension()); + rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) { + ASSERT_TRUE(frame->EncodedImage().ColorSpace()); + EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace); + })); + rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet2); + // Resend the first key frame packet to simulate padding for example. + rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1); + + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kDeltaFramePayload.data(), kDeltaFramePayload.size()); + + // Expect delta frame to have color space set even though color space not + // included in the RTP packet. + EXPECT_FALSE(delta_frame_packet.GetExtension()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) { + ASSERT_TRUE(frame->EncodedImage().ColorSpace()); + EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace); + })); + rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet); +} + +TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) { + RtpPacketReceived rtp_packet; + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(1); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) { + constexpr uint64_t kAbsoluteCaptureTimestamp = 12; + constexpr int kId0 = 1; + + RtpHeaderExtensionMap extension_map; + extension_map.Register(kId0); + RtpPacketReceived rtp_packet(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetSequenceNumber(1); + rtp_packet.SetTimestamp(1); + rtp_packet.SetSsrc(kSsrc); + rtp_packet.SetExtension( + AbsoluteCaptureTime{kAbsoluteCaptureTimestamp, + /*estimated_capture_clock_offset=*/absl::nullopt}); + + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce(Invoke( + [kAbsoluteCaptureTimestamp](video_coding::EncodedFrame* frame) { + EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), + ElementsAre(kAbsoluteCaptureTimestamp)); + })); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, + MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue) { + constexpr uint64_t kAbsoluteCaptureTimestamp = 12; + constexpr int kId0 = 1; + + RtpHeaderExtensionMap extension_map; + extension_map.Register(kId0); + RtpPacketReceived rtp_packet(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); + + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + uint16_t sequence_number = 1; + uint32_t rtp_timestamp = 1; + rtp_packet.SetSequenceNumber(sequence_number); + rtp_packet.SetTimestamp(rtp_timestamp); + rtp_packet.SetSsrc(kSsrc); + rtp_packet.SetExtension( + AbsoluteCaptureTime{kAbsoluteCaptureTimestamp, + /*estimated_capture_clock_offset=*/absl::nullopt}); + + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); + + // Rtp packet without absolute capture time. + rtp_packet = RtpPacketReceived(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(++sequence_number); + rtp_packet.SetTimestamp(++rtp_timestamp); + rtp_packet.SetSsrc(kSsrc); + + // There is no absolute capture time in the second packet. + // Expect rtp video stream receiver to extrapolate it for the resulting video + // frame using absolute capture time from the previous packet. + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce(Invoke([](video_coding::EncodedFrame* frame) { + EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1)); + })); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, + NoInfiniteRecursionOnEncapsulatedRedPacket) { + const std::vector data({ + 0x80, // RTP version. + kRedPayloadType, // Payload type. + 0, 0, 0, 0, 0, 0, // Don't care. + 0, 0, 0x4, 0x57, // SSRC + kRedPayloadType, // RED header. + 0, 0, 0, 0, 0 // Don't care. + }); + RtpPacketReceived packet; + EXPECT_TRUE(packet.Parse(data.data(), data.size())); + rtp_video_stream_receiver_->StartReceive(); + rtp_video_stream_receiver_->OnRtpPacket(packet); +} + +TEST_F(RtpVideoStreamReceiver2Test, + DropsPacketWithRedPayloadTypeAndEmptyPayload) { + const uint8_t kRedPayloadType = 125; + config_.rtp.red_payload_type = kRedPayloadType; + SetUp(); // re-create rtp_video_stream_receiver with red payload type. + // clang-format off + const uint8_t data[] = { + 0x80, // RTP version. + kRedPayloadType, // Payload type. + 0, 0, 0, 0, 0, 0, // Don't care. + 0, 0, 0x4, 0x57, // SSRC + // Empty rtp payload. + }; + // clang-format on + RtpPacketReceived packet; + // Manually convert to CopyOnWriteBuffer to be sure capacity == size + // and asan bot can catch read buffer overflow. + EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(data))); + rtp_video_stream_receiver_->StartReceive(); + rtp_video_stream_receiver_->OnRtpPacket(packet); + // Expect asan doesn't find anything. +} + +TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) { + RtpPacketReceived rtp_packet; + rtp_packet.SetPayloadType(kPayloadType); + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetSequenceNumber(1); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + constexpr uint8_t expected_bitsteam[] = {1, 2, 3, 0xff}; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + expected_bitsteam, sizeof(expected_bitsteam)); + EXPECT_CALL(mock_on_complete_frame_callback_, + DoOnCompleteFrameFailBitstream(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +class RtpVideoStreamReceiver2TestH264 + : public RtpVideoStreamReceiver2Test, + public ::testing::WithParamInterface { + protected: + RtpVideoStreamReceiver2TestH264() : RtpVideoStreamReceiver2Test(GetParam()) {} +}; + +INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframe, + RtpVideoStreamReceiver2TestH264, + Values("", "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/")); + +TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { + rtc::CopyOnWriteBuffer sps_data; + RtpPacketReceived rtp_packet; + RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader(); + AddSps(&sps_video_header, 0, &sps_data); + rtp_packet.SetSequenceNumber(0); + rtp_packet.SetPayloadType(kPayloadType); + sps_video_header.is_first_packet_in_frame = true; + sps_video_header.frame_type = VideoFrameType::kEmptyFrame; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), + sps_data.size()); + rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, + sps_video_header); + + rtc::CopyOnWriteBuffer pps_data; + RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); + AddPps(&pps_video_header, 0, 1, &pps_data); + rtp_packet.SetSequenceNumber(1); + pps_video_header.is_first_packet_in_frame = true; + pps_video_header.frame_type = VideoFrameType::kEmptyFrame; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), + pps_data.size()); + rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, + pps_video_header); + + rtc::CopyOnWriteBuffer idr_data; + RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); + AddIdr(&idr_video_header, 1); + rtp_packet.SetSequenceNumber(2); + idr_video_header.is_first_packet_in_frame = true; + idr_video_header.is_last_packet_in_frame = true; + idr_video_header.frame_type = VideoFrameType::kVideoFrameKey; + const uint8_t idr[] = {0x65, 1, 2, 3}; + idr_data.AppendData(idr); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(), + idr_data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, + idr_video_header); +} + +TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) { + constexpr int kPayloadType = 99; + VideoCodec codec; + std::map codec_params; + // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2 + // . + codec_params.insert( + {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="}); + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params, + /*raw_payload=*/false); + const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96, + 0x53, 0x05, 0x89, 0x88}; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_sps, + sizeof(binary_sps)); + const uint8_t binary_pps[] = {0x68, 0xc9, 0x63, 0x88}; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_pps, + sizeof(binary_pps)); + + RtpPacketReceived rtp_packet; + RTPVideoHeader video_header = GetDefaultH264VideoHeader(); + AddIdr(&video_header, 0); + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(2); + video_header.is_first_packet_in_frame = true; + video_header.is_last_packet_in_frame = true; + video_header.codec = kVideoCodecH264; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + rtc::CopyOnWriteBuffer data({1, 2, 3}); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { + constexpr int kPayloadType = 99; + VideoCodec codec; + std::map codec_params; + if (GetParam() == + "") { // Forcing can be done either with field trial or codec_params. + codec_params.insert({cricket::kH264FmtpSpsPpsIdrInKeyframe, ""}); + } + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params, + /*raw_payload=*/false); + rtc::CopyOnWriteBuffer sps_data; + RtpPacketReceived rtp_packet; + RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader(); + AddSps(&sps_video_header, 0, &sps_data); + rtp_packet.SetSequenceNumber(0); + rtp_packet.SetPayloadType(kPayloadType); + sps_video_header.is_first_packet_in_frame = true; + sps_video_header.frame_type = VideoFrameType::kEmptyFrame; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), + sps_data.size()); + rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, + sps_video_header); + + rtc::CopyOnWriteBuffer pps_data; + RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); + AddPps(&pps_video_header, 0, 1, &pps_data); + rtp_packet.SetSequenceNumber(1); + pps_video_header.is_first_packet_in_frame = true; + pps_video_header.frame_type = VideoFrameType::kEmptyFrame; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), + pps_data.size()); + rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, + pps_video_header); + + rtc::CopyOnWriteBuffer idr_data; + RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); + AddIdr(&idr_video_header, 1); + rtp_packet.SetSequenceNumber(2); + idr_video_header.is_first_packet_in_frame = true; + idr_video_header.is_last_packet_in_frame = true; + idr_video_header.frame_type = VideoFrameType::kVideoFrameKey; + const uint8_t idr[] = {0x65, 1, 2, 3}; + idr_data.AppendData(idr); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(), + idr_data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_TRUE(frame->is_keyframe()); + }); + rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, + idr_video_header); + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(), + idr_data.size()); + rtp_packet.SetSequenceNumber(3); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_FALSE(frame->is_keyframe()); + }); + rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, + idr_video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) { + RtpPacketReceived rtp_packet; + RTPVideoHeader video_header = GetDefaultH264VideoHeader(); + rtc::CopyOnWriteBuffer data({1, 2, 3}); + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(2); + video_header.is_first_packet_in_frame = true; + video_header.is_last_packet_in_frame = true; + video_header.codec = kVideoCodecGeneric; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); + + rtp_packet.SetSequenceNumber(3); + rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet, + video_header); + + rtp_packet.SetSequenceNumber(4); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + video_header.frame_type = VideoFrameType::kVideoFrameDelta; + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); + + rtp_packet.SetSequenceNumber(6); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_packet.SetSequenceNumber(5); + rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet, + video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) { + RtpPacketReceived rtp_packet; + rtp_packet.SetPayloadType(kPayloadType); + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetSequenceNumber(1); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); + EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame()); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) { + constexpr int kPacketBufferMaxSize = 2048; + + RtpPacketReceived rtp_packet; + rtp_packet.SetPayloadType(kPayloadType); + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); + // Incomplete frames so that the packet buffer is filling up. + video_header.is_last_packet_in_frame = false; + uint16_t start_sequence_number = 1234; + rtp_packet.SetSequenceNumber(start_sequence_number); + while (rtp_packet.SequenceNumber() - start_sequence_number < + kPacketBufferMaxSize) { + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); + rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2); + } + + EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame()); + rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, + video_header); +} + +TEST_F(RtpVideoStreamReceiver2Test, SecondarySinksGetRtpNotifications) { + rtp_video_stream_receiver_->StartReceive(); + + MockRtpPacketSink secondary_sink_1; + MockRtpPacketSink secondary_sink_2; + + rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_1); + rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_2); + + auto rtp_packet = CreateRtpPacketReceived(); + EXPECT_CALL(secondary_sink_1, OnRtpPacket(SamePacketAs(*rtp_packet))); + EXPECT_CALL(secondary_sink_2, OnRtpPacket(SamePacketAs(*rtp_packet))); + + rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet); + + // Test tear-down. + rtp_video_stream_receiver_->StopReceive(); + rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_1); + rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_2); +} + +TEST_F(RtpVideoStreamReceiver2Test, + RemovedSecondarySinksGetNoRtpNotifications) { + rtp_video_stream_receiver_->StartReceive(); + + MockRtpPacketSink secondary_sink; + + rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink); + rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink); + + auto rtp_packet = CreateRtpPacketReceived(); + + EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0); + + rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet); + + // Test tear-down. + rtp_video_stream_receiver_->StopReceive(); +} + +TEST_F(RtpVideoStreamReceiver2Test, + OnlyRemovedSecondarySinksExcludedFromNotifications) { + rtp_video_stream_receiver_->StartReceive(); + + MockRtpPacketSink kept_secondary_sink; + MockRtpPacketSink removed_secondary_sink; + + rtp_video_stream_receiver_->AddSecondarySink(&kept_secondary_sink); + rtp_video_stream_receiver_->AddSecondarySink(&removed_secondary_sink); + rtp_video_stream_receiver_->RemoveSecondarySink(&removed_secondary_sink); + + auto rtp_packet = CreateRtpPacketReceived(); + EXPECT_CALL(kept_secondary_sink, OnRtpPacket(SamePacketAs(*rtp_packet))); + + rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet); + + // Test tear-down. + rtp_video_stream_receiver_->StopReceive(); + rtp_video_stream_receiver_->RemoveSecondarySink(&kept_secondary_sink); +} + +TEST_F(RtpVideoStreamReceiver2Test, + SecondariesOfNonStartedStreamGetNoNotifications) { + // Explicitly showing that the stream is not in the |started| state, + // regardless of whether streams start out |started| or |stopped|. + rtp_video_stream_receiver_->StopReceive(); + + MockRtpPacketSink secondary_sink; + rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink); + + auto rtp_packet = CreateRtpPacketReceived(); + EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0); + + rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet); + + // Test tear-down. + rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink); +} + +TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorOnePacket) { + const std::vector data = {0, 1, 2, 3, 4}; + const int kSpatialIndex = 1; + + rtp_video_stream_receiver_->StartReceive(); + + RtpHeaderExtensionMap extension_map; + extension_map.Register(5); + RtpPacketReceived rtp_packet(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); + + RtpGenericFrameDescriptor generic_descriptor; + generic_descriptor.SetFirstPacketInSubFrame(true); + generic_descriptor.SetLastPacketInSubFrame(true); + generic_descriptor.SetFrameId(100); + generic_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex); + generic_descriptor.AddFrameDependencyDiff(90); + generic_descriptor.AddFrameDependencyDiff(80); + ASSERT_TRUE(rtp_packet.SetExtension( + generic_descriptor)); + + uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); + memcpy(payload, data.data(), data.size()); + // The first byte is the header, so we ignore the first byte of |data|. + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1, + data.size() - 1); + + rtp_packet.SetMarker(true); + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(1); + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->num_references, 2U); + EXPECT_EQ(frame->references[0], frame->id.picture_id - 90); + EXPECT_EQ(frame->references[1], frame->id.picture_id - 80); + EXPECT_EQ(frame->id.spatial_layer, kSpatialIndex); + EXPECT_THAT(frame->PacketInfos(), SizeIs(1)); + })); + + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); +} + +TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorTwoPackets) { + const std::vector data = {0, 1, 2, 3, 4}; + const int kSpatialIndex = 1; + + rtp_video_stream_receiver_->StartReceive(); + + RtpHeaderExtensionMap extension_map; + extension_map.Register(5); + RtpPacketReceived first_packet(&extension_map); + + RtpGenericFrameDescriptor first_packet_descriptor; + first_packet_descriptor.SetFirstPacketInSubFrame(true); + first_packet_descriptor.SetLastPacketInSubFrame(false); + first_packet_descriptor.SetFrameId(100); + first_packet_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex); + first_packet_descriptor.SetResolution(480, 360); + ASSERT_TRUE(first_packet.SetExtension( + first_packet_descriptor)); + + uint8_t* first_packet_payload = first_packet.SetPayloadSize(data.size()); + memcpy(first_packet_payload, data.data(), data.size()); + // The first byte is the header, so we ignore the first byte of |data|. + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1, + data.size() - 1); + + first_packet.SetPayloadType(kPayloadType); + first_packet.SetSequenceNumber(1); + rtp_video_stream_receiver_->OnRtpPacket(first_packet); + + RtpPacketReceived second_packet(&extension_map); + RtpGenericFrameDescriptor second_packet_descriptor; + second_packet_descriptor.SetFirstPacketInSubFrame(false); + second_packet_descriptor.SetLastPacketInSubFrame(true); + ASSERT_TRUE(second_packet.SetExtension( + second_packet_descriptor)); + + second_packet.SetMarker(true); + second_packet.SetPayloadType(kPayloadType); + second_packet.SetSequenceNumber(2); + + uint8_t* second_packet_payload = second_packet.SetPayloadSize(data.size()); + memcpy(second_packet_payload, data.data(), data.size()); + // The first byte is the header, so we ignore the first byte of |data|. + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1, + data.size() - 1); + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->num_references, 0U); + EXPECT_EQ(frame->id.spatial_layer, kSpatialIndex); + EXPECT_EQ(frame->EncodedImage()._encodedWidth, 480u); + EXPECT_EQ(frame->EncodedImage()._encodedHeight, 360u); + EXPECT_THAT(frame->PacketInfos(), SizeIs(2)); + })); + + rtp_video_stream_receiver_->OnRtpPacket(second_packet); +} + +TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorRawPayload) { + const std::vector data = {0, 1, 2, 3, 4}; + const int kRawPayloadType = 123; + + VideoCodec codec; + rtp_video_stream_receiver_->AddReceiveCodec(kRawPayloadType, codec, {}, + /*raw_payload=*/true); + rtp_video_stream_receiver_->StartReceive(); + + RtpHeaderExtensionMap extension_map; + extension_map.Register(5); + RtpPacketReceived rtp_packet(&extension_map); + + RtpGenericFrameDescriptor generic_descriptor; + generic_descriptor.SetFirstPacketInSubFrame(true); + generic_descriptor.SetLastPacketInSubFrame(true); + ASSERT_TRUE(rtp_packet.SetExtension( + generic_descriptor)); + + uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); + memcpy(payload, data.data(), data.size()); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + + rtp_packet.SetMarker(true); + rtp_packet.SetPayloadType(kRawPayloadType); + rtp_packet.SetSequenceNumber(1); + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame); + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); +} + +TEST_F(RtpVideoStreamReceiver2Test, UnwrapsFrameId) { + const std::vector data = {0, 1, 2, 3, 4}; + const int kPayloadType = 123; + + VideoCodec codec; + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {}, + /*raw_payload=*/true); + rtp_video_stream_receiver_->StartReceive(); + RtpHeaderExtensionMap extension_map; + extension_map.Register(5); + + uint16_t rtp_sequence_number = 1; + auto inject_packet = [&](uint16_t wrapped_frame_id) { + RtpPacketReceived rtp_packet(&extension_map); + + RtpGenericFrameDescriptor generic_descriptor; + generic_descriptor.SetFirstPacketInSubFrame(true); + generic_descriptor.SetLastPacketInSubFrame(true); + generic_descriptor.SetFrameId(wrapped_frame_id); + ASSERT_TRUE(rtp_packet.SetExtension( + generic_descriptor)); + + uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); + ASSERT_TRUE(payload); + memcpy(payload, data.data(), data.size()); + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + rtp_packet.SetMarker(true); + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(++rtp_sequence_number); + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); + }; + + int64_t first_picture_id; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + first_picture_id = frame->id.picture_id; + }); + inject_packet(/*wrapped_frame_id=*/0xffff); + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->id.picture_id - first_picture_id, 3); + }); + inject_packet(/*wrapped_frame_id=*/0x0002); +} + +class RtpVideoStreamReceiver2DependencyDescriptorTest + : public RtpVideoStreamReceiver2Test { + public: + RtpVideoStreamReceiver2DependencyDescriptorTest() { + VideoCodec codec; + rtp_video_stream_receiver_->AddReceiveCodec(payload_type_, codec, {}, + /*raw_payload=*/true); + extension_map_.Register(7); + rtp_video_stream_receiver_->StartReceive(); + } + + // Returns some valid structure for the DependencyDescriptors. + // First template of that structure always fit for a key frame. + static FrameDependencyStructure CreateStreamStructure() { + FrameDependencyStructure stream_structure; + stream_structure.num_decode_targets = 1; + stream_structure.templates = { + FrameDependencyTemplate().Dtis("S"), + FrameDependencyTemplate().Dtis("S").FrameDiffs({1}), + }; + return stream_structure; + } + + void InjectPacketWith(const FrameDependencyStructure& stream_structure, + const DependencyDescriptor& dependency_descriptor) { + const std::vector data = {0, 1, 2, 3, 4}; + RtpPacketReceived rtp_packet(&extension_map_); + ASSERT_TRUE(rtp_packet.SetExtension( + stream_structure, dependency_descriptor)); + uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); + ASSERT_TRUE(payload); + memcpy(payload, data.data(), data.size()); + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + rtp_packet.SetMarker(true); + rtp_packet.SetPayloadType(payload_type_); + rtp_packet.SetSequenceNumber(++rtp_sequence_number_); + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); + } + + private: + const int payload_type_ = 123; + RtpHeaderExtensionMap extension_map_; + uint16_t rtp_sequence_number_ = 321; +}; + +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, UnwrapsFrameId) { + FrameDependencyStructure stream_structure = CreateStreamStructure(); + + DependencyDescriptor keyframe_descriptor; + keyframe_descriptor.attached_structure = + std::make_unique(stream_structure); + keyframe_descriptor.frame_dependencies = stream_structure.templates[0]; + keyframe_descriptor.frame_number = 0xfff0; + // DependencyDescriptor doesn't support reordering delta frame before + // keyframe. Thus feed a key frame first, then test reodered delta frames. + int64_t first_picture_id; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + first_picture_id = frame->id.picture_id; + }); + InjectPacketWith(stream_structure, keyframe_descriptor); + + DependencyDescriptor deltaframe1_descriptor; + deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1]; + deltaframe1_descriptor.frame_number = 0xfffe; + + DependencyDescriptor deltaframe2_descriptor; + deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1]; + deltaframe2_descriptor.frame_number = 0x0002; + + // Parser should unwrap frame ids correctly even if packets were reordered by + // the network. + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + // 0x0002 - 0xfff0 + EXPECT_EQ(frame->id.picture_id - first_picture_id, 18); + }) + .WillOnce([&](video_coding::EncodedFrame* frame) { + // 0xfffe - 0xfff0 + EXPECT_EQ(frame->id.picture_id - first_picture_id, 14); + }); + InjectPacketWith(stream_structure, deltaframe2_descriptor); + InjectPacketWith(stream_structure, deltaframe1_descriptor); +} + +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, + DropsLateDeltaFramePacketWithDependencyDescriptorExtension) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame); + InjectPacketWith(stream_structure1, keyframe1_descriptor); + + // Pass in 2nd key frame with different structure. + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 3; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame); + InjectPacketWith(stream_structure2, keyframe2_descriptor); + + // Pass in late delta frame that uses structure of the 1st key frame. + DependencyDescriptor deltaframe_descriptor; + deltaframe_descriptor.frame_dependencies = stream_structure1.templates[0]; + deltaframe_descriptor.frame_number = 2; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0); + InjectPacketWith(stream_structure1, deltaframe_descriptor); +} + +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, + DropsLateKeyFramePacketWithDependencyDescriptorExtension) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 3; + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->id.picture_id & 0xFFFF, 3); + }); + InjectPacketWith(stream_structure2, keyframe2_descriptor); + InjectPacketWith(stream_structure1, keyframe1_descriptor); + + // Pass in delta frame that uses structure of the 2nd key frame. Late key + // frame shouldn't block it. + DependencyDescriptor deltaframe_descriptor; + deltaframe_descriptor.frame_dependencies = stream_structure2.templates[0]; + deltaframe_descriptor.frame_number = 4; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->id.picture_id & 0xFFFF, 4); + }); + InjectPacketWith(stream_structure2, deltaframe_descriptor); +} + +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +using RtpVideoStreamReceiver2DeathTest = RtpVideoStreamReceiver2Test; +TEST_F(RtpVideoStreamReceiver2DeathTest, RepeatedSecondarySinkDisallowed) { + MockRtpPacketSink secondary_sink; + + rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink); + EXPECT_DEATH(rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink), + ""); + + // Test tear-down. + rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink); +} +#endif + +TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + EXPECT_CALL(*mock_frame_transformer, + RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc)); + auto receiver = std::make_unique( + TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_, + nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr, + nullptr, process_thread_.get(), &mock_nack_sender_, nullptr, + &mock_on_complete_frame_callback_, nullptr, mock_frame_transformer); + VideoCodec video_codec; + video_codec.codecType = kVideoCodecGeneric; + receiver->AddReceiveCodec(kPayloadType, video_codec, {}, + /*raw_payload=*/false); + + RtpPacketReceived rtp_packet; + rtp_packet.SetPayloadType(kPayloadType); + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetSequenceNumber(1); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + EXPECT_CALL(*mock_frame_transformer, Transform(_)); + receiver->OnReceivedPayloadData(data, rtp_packet, video_header); + + EXPECT_CALL(*mock_frame_transformer, + UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc)); + receiver = nullptr; +} + +// Test default behavior and when playout delay is overridden by field trial. +const VideoPlayoutDelay kTransmittedPlayoutDelay = {100, 200}; +const VideoPlayoutDelay kForcedPlayoutDelay = {70, 90}; +struct PlayoutDelayOptions { + std::string field_trial; + VideoPlayoutDelay expected_delay; +}; +const PlayoutDelayOptions kDefaultBehavior = { + /*field_trial=*/"", /*expected_delay=*/kTransmittedPlayoutDelay}; +const PlayoutDelayOptions kOverridePlayoutDelay = { + /*field_trial=*/"WebRTC-ForcePlayoutDelay/min_ms:70,max_ms:90/", + /*expected_delay=*/kForcedPlayoutDelay}; + +class RtpVideoStreamReceiver2TestPlayoutDelay + : public RtpVideoStreamReceiver2Test, + public ::testing::WithParamInterface { + protected: + RtpVideoStreamReceiver2TestPlayoutDelay() + : RtpVideoStreamReceiver2Test(GetParam().field_trial) {} +}; + +INSTANTIATE_TEST_SUITE_P(PlayoutDelay, + RtpVideoStreamReceiver2TestPlayoutDelay, + Values(kDefaultBehavior, kOverridePlayoutDelay)); + +TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) { + rtc::CopyOnWriteBuffer payload_data({1, 2, 3, 4}); + RtpHeaderExtensionMap extension_map; + extension_map.Register(1); + RtpPacketToSend packet_to_send(&extension_map); + packet_to_send.SetPayloadType(kPayloadType); + packet_to_send.SetSequenceNumber(1); + + // Set playout delay on outgoing packet. + EXPECT_TRUE(packet_to_send.SetExtension( + kTransmittedPlayoutDelay)); + uint8_t* payload = packet_to_send.AllocatePayload(payload_data.size()); + memcpy(payload, payload_data.data(), payload_data.size()); + + RtpPacketReceived received_packet(&extension_map); + received_packet.Parse(packet_to_send.data(), packet_to_send.size()); + + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(payload_data.data(), + payload_data.size()); + // Expect the playout delay of encoded frame to be the same as the transmitted + // playout delay unless it was overridden by a field trial. + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce(Invoke([expected_playout_delay = GetParam().expected_delay]( + video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->EncodedImage().playout_delay_, expected_playout_delay); + })); + rtp_video_stream_receiver_->OnReceivedPayloadData( + received_packet.PayloadBuffer(), received_packet, video_header); +} + +} // namespace webrtc diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate.cc new file mode 100644 index 0000000000..31eb344d5b --- /dev/null +++ b/video/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" + +#include +#include + +#include "absl/memory/memory.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +namespace { +class TransformableVideoReceiverFrame + : public TransformableVideoFrameInterface { + public: + TransformableVideoReceiverFrame( + std::unique_ptr frame, + uint32_t ssrc) + : frame_(std::move(frame)), + metadata_(frame_->GetRtpVideoHeader()), + ssrc_(ssrc) {} + ~TransformableVideoReceiverFrame() override = default; + + // Implements TransformableVideoFrameInterface. + rtc::ArrayView GetData() const override { + return *frame_->GetEncodedData(); + } + + void SetData(rtc::ArrayView data) override { + frame_->SetEncodedData( + EncodedImageBuffer::Create(data.data(), data.size())); + } + + uint32_t GetTimestamp() const override { return frame_->Timestamp(); } + uint32_t GetSsrc() const override { return ssrc_; } + + bool IsKeyFrame() const override { + return frame_->FrameType() == VideoFrameType::kVideoFrameKey; + } + + std::vector GetAdditionalData() const override { + return RtpDescriptorAuthentication(frame_->GetRtpVideoHeader()); + } + + const VideoFrameMetadata& GetMetadata() const override { return metadata_; } + + std::unique_ptr ExtractFrame() && { + return std::move(frame_); + } + + private: + std::unique_ptr frame_; + const VideoFrameMetadata metadata_; + const uint32_t ssrc_; +}; +} // namespace + +RtpVideoStreamReceiverFrameTransformerDelegate:: + RtpVideoStreamReceiverFrameTransformerDelegate( + RtpVideoFrameReceiver* receiver, + rtc::scoped_refptr frame_transformer, + rtc::Thread* network_thread, + uint32_t ssrc) + : receiver_(receiver), + frame_transformer_(std::move(frame_transformer)), + network_thread_(network_thread), + ssrc_(ssrc) {} + +void RtpVideoStreamReceiverFrameTransformerDelegate::Init() { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + frame_transformer_->RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr(this), ssrc_); +} + +void RtpVideoStreamReceiverFrameTransformerDelegate::Reset() { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + frame_transformer_->UnregisterTransformedFrameSinkCallback(ssrc_); + frame_transformer_ = nullptr; + receiver_ = nullptr; +} + +void RtpVideoStreamReceiverFrameTransformerDelegate::TransformFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + frame_transformer_->Transform( + std::make_unique(std::move(frame), + ssrc_)); +} + +void RtpVideoStreamReceiverFrameTransformerDelegate::OnTransformedFrame( + std::unique_ptr frame) { + rtc::scoped_refptr delegate = + this; + network_thread_->PostTask(ToQueuedTask( + [delegate = std::move(delegate), frame = std::move(frame)]() mutable { + delegate->ManageFrame(std::move(frame)); + })); +} + +void RtpVideoStreamReceiverFrameTransformerDelegate::ManageFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + if (!receiver_) + return; + auto transformed_frame = absl::WrapUnique( + static_cast(frame.release())); + receiver_->ManageFrame(std::move(*transformed_frame).ExtractFrame()); +} + +} // namespace webrtc diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate.h b/video/rtp_video_stream_receiver_frame_transformer_delegate.h new file mode 100644 index 0000000000..e687e7f47b --- /dev/null +++ b/video/rtp_video_stream_receiver_frame_transformer_delegate.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_RTP_VIDEO_STREAM_RECEIVER_FRAME_TRANSFORMER_DELEGATE_H_ +#define VIDEO_RTP_VIDEO_STREAM_RECEIVER_FRAME_TRANSFORMER_DELEGATE_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "modules/video_coding/frame_object.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +// Called back by RtpVideoStreamReceiverFrameTransformerDelegate on the network +// thread after transformation. +class RtpVideoFrameReceiver { + public: + virtual void ManageFrame( + std::unique_ptr frame) = 0; + + protected: + virtual ~RtpVideoFrameReceiver() = default; +}; + +// Delegates calls to FrameTransformerInterface to transform frames, and to +// RtpVideoStreamReceiver to manage transformed frames on the |network_thread_|. +class RtpVideoStreamReceiverFrameTransformerDelegate + : public TransformedFrameCallback { + public: + RtpVideoStreamReceiverFrameTransformerDelegate( + RtpVideoFrameReceiver* receiver, + rtc::scoped_refptr frame_transformer, + rtc::Thread* network_thread, + uint32_t ssrc); + + void Init(); + void Reset(); + + // Delegates the call to FrameTransformerInterface::TransformFrame. + void TransformFrame(std::unique_ptr frame); + + // Implements TransformedFrameCallback. Can be called on any thread. Posts + // the transformed frame to be managed on the |network_thread_|. + void OnTransformedFrame( + std::unique_ptr frame) override; + + // Delegates the call to RtpVideoFrameReceiver::ManageFrame on the + // |network_thread_|. + void ManageFrame(std::unique_ptr frame); + + protected: + ~RtpVideoStreamReceiverFrameTransformerDelegate() override = default; + + private: + SequenceChecker network_sequence_checker_; + RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_); + rtc::scoped_refptr frame_transformer_ + RTC_GUARDED_BY(network_sequence_checker_); + rtc::Thread* const network_thread_; + const uint32_t ssrc_; +}; + +} // namespace webrtc + +#endif // VIDEO_RTP_VIDEO_STREAM_RECEIVER_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc new file mode 100644 index 0000000000..a411ca6e9a --- /dev/null +++ b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/call/transport.h" +#include "call/video_receive_stream.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/event.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_frame_transformer.h" + +namespace webrtc { +namespace { + +using ::testing::_; +using ::testing::ElementsAre; +using ::testing::NiceMock; +using ::testing::SaveArg; + +std::unique_ptr CreateRtpFrameObject( + const RTPVideoHeader& video_header) { + return std::make_unique( + 0, 0, true, 0, 0, 0, 0, 0, VideoSendTiming(), 0, video_header.codec, + kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header, + absl::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(0)); +} + +std::unique_ptr CreateRtpFrameObject() { + return CreateRtpFrameObject(RTPVideoHeader()); +} + +class TestRtpVideoFrameReceiver : public RtpVideoFrameReceiver { + public: + TestRtpVideoFrameReceiver() {} + ~TestRtpVideoFrameReceiver() override = default; + + MOCK_METHOD(void, + ManageFrame, + (std::unique_ptr frame), + (override)); +}; + +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, + RegisterTransformedFrameCallbackSinkOnInit) { + TestRtpVideoFrameReceiver receiver; + rtc::scoped_refptr frame_transformer( + new rtc::RefCountedObject()); + rtc::scoped_refptr delegate( + new rtc::RefCountedObject( + &receiver, frame_transformer, rtc::Thread::Current(), + /*remote_ssrc*/ 1111)); + EXPECT_CALL(*frame_transformer, + RegisterTransformedFrameSinkCallback(testing::_, 1111)); + delegate->Init(); +} + +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, + UnregisterTransformedFrameSinkCallbackOnReset) { + TestRtpVideoFrameReceiver receiver; + rtc::scoped_refptr frame_transformer( + new rtc::RefCountedObject()); + rtc::scoped_refptr delegate( + new rtc::RefCountedObject( + &receiver, frame_transformer, rtc::Thread::Current(), + /*remote_ssrc*/ 1111)); + EXPECT_CALL(*frame_transformer, UnregisterTransformedFrameSinkCallback(1111)); + delegate->Reset(); +} + +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) { + TestRtpVideoFrameReceiver receiver; + rtc::scoped_refptr frame_transformer( + new rtc::RefCountedObject>()); + rtc::scoped_refptr delegate( + new rtc::RefCountedObject( + &receiver, frame_transformer, rtc::Thread::Current(), + /*remote_ssrc*/ 1111)); + auto frame = CreateRtpFrameObject(); + EXPECT_CALL(*frame_transformer, Transform); + delegate->TransformFrame(std::move(frame)); +} + +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, + ManageFrameOnTransformedFrame) { + TestRtpVideoFrameReceiver receiver; + rtc::scoped_refptr mock_frame_transformer( + new rtc::RefCountedObject>()); + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + &receiver, mock_frame_transformer, rtc::Thread::Current(), + /*remote_ssrc*/ 1111); + + rtc::scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) + .WillOnce(SaveArg<0>(&callback)); + delegate->Init(); + ASSERT_TRUE(callback); + + EXPECT_CALL(receiver, ManageFrame); + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&callback](std::unique_ptr frame) { + callback->OnTransformedFrame(std::move(frame)); + }); + delegate->TransformFrame(CreateRtpFrameObject()); + rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); +} + +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, + TransformableFrameMetadataHasCorrectValue) { + TestRtpVideoFrameReceiver receiver; + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + rtc::scoped_refptr delegate = + new rtc::RefCountedObject( + &receiver, mock_frame_transformer, rtc::Thread::Current(), 1111); + delegate->Init(); + RTPVideoHeader video_header; + video_header.width = 1280u; + video_header.height = 720u; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + generic.temporal_index = 3; + generic.spatial_index = 2; + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + generic.dependencies = {5}; + + // Check that the transformable frame passed to the frame transformer has the + // correct metadata. + EXPECT_CALL(*mock_frame_transformer, Transform) + .WillOnce( + [](std::unique_ptr transformable_frame) { + auto frame = + absl::WrapUnique(static_cast( + transformable_frame.release())); + ASSERT_TRUE(frame); + auto metadata = frame->GetMetadata(); + EXPECT_EQ(metadata.GetWidth(), 1280u); + EXPECT_EQ(metadata.GetHeight(), 720u); + EXPECT_EQ(metadata.GetFrameId(), 10); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5)); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); + }); + // The delegate creates a transformable frame from the RtpFrameObject. + delegate->TransformFrame(CreateRtpFrameObject(video_header)); +} + +} // namespace +} // namespace webrtc diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc index d5d0be5e94..d7c1938438 100644 --- a/video/rtp_video_stream_receiver_unittest.cc +++ b/video/rtp_video_stream_receiver_unittest.cc @@ -17,6 +17,7 @@ #include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" #include "media/base/media_constants.h" +#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_format_vp9.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" @@ -35,6 +36,7 @@ #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/mock_frame_transformer.h" using ::testing::_; using ::testing::ElementsAre; @@ -60,39 +62,56 @@ std::vector GetAbsoluteCaptureTimestamps( return result; } +RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) { + RTPVideoHeader video_header; + video_header.is_first_packet_in_frame = true; + video_header.is_last_packet_in_frame = true; + video_header.codec = kVideoCodecGeneric; + video_header.frame_type = frame_type; + return video_header; +} + class MockTransport : public Transport { public: - MOCK_METHOD3(SendRtp, - bool(const uint8_t* packet, - size_t length, - const PacketOptions& options)); - MOCK_METHOD2(SendRtcp, bool(const uint8_t* packet, size_t length)); + MOCK_METHOD(bool, + SendRtp, + (const uint8_t*, size_t length, const PacketOptions& options), + (override)); + MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override)); }; class MockNackSender : public NackSender { public: - MOCK_METHOD1(SendNack, void(const std::vector& sequence_numbers)); - MOCK_METHOD2(SendNack, - void(const std::vector& sequence_numbers, - bool buffering_allowed)); + MOCK_METHOD(void, + SendNack, + (const std::vector& sequence_numbers, + bool buffering_allowed), + (override)); }; class MockKeyFrameRequestSender : public KeyFrameRequestSender { public: - MOCK_METHOD0(RequestKeyFrame, void()); + MOCK_METHOD(void, RequestKeyFrame, (), (override)); }; class MockOnCompleteFrameCallback : public video_coding::OnCompleteFrameCallback { public: - MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::EncodedFrame* frame)); - MOCK_METHOD1(DoOnCompleteFrameFailNullptr, - void(video_coding::EncodedFrame* frame)); - MOCK_METHOD1(DoOnCompleteFrameFailLength, - void(video_coding::EncodedFrame* frame)); - MOCK_METHOD1(DoOnCompleteFrameFailBitstream, - void(video_coding::EncodedFrame* frame)); - void OnCompleteFrame(std::unique_ptr frame) { + MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ()); + MOCK_METHOD(void, + DoOnCompleteFrameFailNullptr, + (video_coding::EncodedFrame*), + ()); + MOCK_METHOD(void, + DoOnCompleteFrameFailLength, + (video_coding::EncodedFrame*), + ()); + MOCK_METHOD(void, + DoOnCompleteFrameFailBitstream, + (video_coding::EncodedFrame*), + ()); + void OnCompleteFrame( + std::unique_ptr frame) override { if (!frame) { DoOnCompleteFrameFailNullptr(nullptr); return; @@ -121,17 +140,19 @@ class MockOnCompleteFrameCallback class MockRtpPacketSink : public RtpPacketSinkInterface { public: - MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&)); + MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override)); }; constexpr uint32_t kSsrc = 111; constexpr uint16_t kSequenceNumber = 222; -std::unique_ptr CreateRtpPacketReceived( - uint32_t ssrc = kSsrc, - uint16_t sequence_number = kSequenceNumber) { +constexpr int kPayloadType = 100; +constexpr int kRedPayloadType = 125; + +std::unique_ptr CreateRtpPacketReceived() { auto packet = std::make_unique(); - packet->SetSsrc(ssrc); - packet->SetSequenceNumber(sequence_number); + packet->SetSsrc(kSsrc); + packet->SetSequenceNumber(kSequenceNumber); + packet->SetPayloadType(kPayloadType); return packet; } @@ -148,16 +169,18 @@ class RtpVideoStreamReceiverTest : public ::testing::Test { explicit RtpVideoStreamReceiverTest(std::string field_trials) : override_field_trials_(field_trials), config_(CreateConfig()), - process_thread_(ProcessThread::Create("TestThread")) {} - - void SetUp() { + process_thread_(ProcessThread::Create("TestThread")) { rtp_receive_statistics_ = ReceiveStatistics::Create(Clock::GetRealTimeClock()); rtp_video_stream_receiver_ = std::make_unique( Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_, - rtp_receive_statistics_.get(), nullptr, process_thread_.get(), + rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(), &mock_nack_sender_, &mock_key_frame_request_sender_, - &mock_on_complete_frame_callback_, nullptr); + &mock_on_complete_frame_callback_, nullptr, nullptr); + VideoCodec codec; + codec.codecType = kVideoCodecGeneric; + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {}, + /*raw_payload=*/false); } RTPVideoHeader GetDefaultH264VideoHeader() { @@ -176,7 +199,7 @@ class RtpVideoStreamReceiverTest : public ::testing::Test { info.type = H264::NaluType::kSps; info.sps_id = sps_id; info.pps_id = -1; - data->AppendData({H264::NaluType::kSps, sps_id}); + data->AppendData({H264::NaluType::kSps, sps_id}); auto& h264 = absl::get(video_header->video_type_header); h264.nalus[h264.nalus_length++] = info; } @@ -189,7 +212,7 @@ class RtpVideoStreamReceiverTest : public ::testing::Test { info.type = H264::NaluType::kPps; info.sps_id = sps_id; info.pps_id = pps_id; - data->AppendData({H264::NaluType::kPps, pps_id}); + data->AppendData({H264::NaluType::kPps, pps_id}); auto& h264 = absl::get(video_header->video_type_header); h264.nalus[h264.nalus_length++] = info; } @@ -208,6 +231,7 @@ class RtpVideoStreamReceiverTest : public ::testing::Test { VideoReceiveStream::Config config(nullptr); config.rtp.remote_ssrc = 1111; config.rtp.local_ssrc = 2222; + config.rtp.red_payload_type = kRedPayloadType; return config; } @@ -225,7 +249,7 @@ class RtpVideoStreamReceiverTest : public ::testing::Test { TEST_F(RtpVideoStreamReceiverTest, CacheColorSpaceFromLastPacketOfKeyframe) { // Test that color space is cached from the last packet of a key frame and // that it's not reset by padding packets without color space. - constexpr int kPayloadType = 99; + constexpr int kVp9PayloadType = 99; const ColorSpace kColorSpace( ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12, ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull); @@ -262,7 +286,7 @@ TEST_F(RtpVideoStreamReceiverTest, CacheColorSpaceFromLastPacketOfKeyframe) { RtpPacketToSend packet_to_send(&extension_map); packet_to_send.SetSequenceNumber(sequence_number_++); packet_to_send.SetSsrc(kSsrc); - packet_to_send.SetPayloadType(kPayloadType); + packet_to_send.SetPayloadType(kVp9PayloadType); bool include_color_space = (rtp_packetizer_->NumPackets() == 1u && video_frame_type_ == VideoFrameType::kVideoFrameKey); @@ -287,10 +311,10 @@ TEST_F(RtpVideoStreamReceiverTest, CacheColorSpaceFromLastPacketOfKeyframe) { // Prepare the receiver for VP9. VideoCodec codec; - codec.plType = kPayloadType; codec.codecType = kVideoCodecVP9; std::map codec_params; - rtp_video_stream_receiver_->AddReceiveCodec(codec, codec_params, + rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, codec, + codec_params, /*raw_payload=*/false); // Generate key frame packets. @@ -340,13 +364,11 @@ TEST_F(RtpVideoStreamReceiverTest, CacheColorSpaceFromLastPacketOfKeyframe) { TEST_F(RtpVideoStreamReceiverTest, GenericKeyFrame) { RtpPacketReceived rtp_packet; - RTPVideoHeader video_header; rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetPayloadType(kPayloadType); rtp_packet.SetSequenceNumber(1); - video_header.is_first_packet_in_frame = true; - video_header.is_last_packet_in_frame = true; - video_header.codec = kVideoCodecGeneric; - video_header.frame_type = VideoFrameType::kVideoFrameKey; + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); @@ -361,7 +383,7 @@ TEST_F(RtpVideoStreamReceiverTest, PacketInfoIsPropagatedIntoVideoFrames) { RtpHeaderExtensionMap extension_map; extension_map.Register(kId0); RtpPacketReceived rtp_packet(&extension_map); - RTPVideoHeader video_header; + rtp_packet.SetPayloadType(kPayloadType); rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); rtp_packet.SetSequenceNumber(1); rtp_packet.SetTimestamp(1); @@ -370,10 +392,8 @@ TEST_F(RtpVideoStreamReceiverTest, PacketInfoIsPropagatedIntoVideoFrames) { AbsoluteCaptureTime{kAbsoluteCaptureTimestamp, /*estimated_capture_clock_offset=*/absl::nullopt}); - video_header.is_first_packet_in_frame = true; - video_header.is_last_packet_in_frame = true; - video_header.codec = kVideoCodecGeneric; - video_header.frame_type = VideoFrameType::kVideoFrameKey; + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) @@ -394,8 +414,8 @@ TEST_F(RtpVideoStreamReceiverTest, RtpHeaderExtensionMap extension_map; extension_map.Register(kId0); RtpPacketReceived rtp_packet(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); - RTPVideoHeader video_header; rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); uint16_t sequence_number = 1; uint32_t rtp_timestamp = 1; @@ -406,10 +426,8 @@ TEST_F(RtpVideoStreamReceiverTest, AbsoluteCaptureTime{kAbsoluteCaptureTimestamp, /*estimated_capture_clock_offset=*/absl::nullopt}); - video_header.is_first_packet_in_frame = true; - video_header.is_last_packet_in_frame = true; - video_header.codec = kVideoCodecGeneric; - video_header.frame_type = VideoFrameType::kVideoFrameKey; + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); @@ -418,6 +436,7 @@ TEST_F(RtpVideoStreamReceiverTest, // Rtp packet without absolute capture time. rtp_packet = RtpPacketReceived(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); rtp_packet.SetSequenceNumber(++sequence_number); rtp_packet.SetTimestamp(++rtp_timestamp); rtp_packet.SetSsrc(kSsrc); @@ -434,10 +453,6 @@ TEST_F(RtpVideoStreamReceiverTest, } TEST_F(RtpVideoStreamReceiverTest, NoInfiniteRecursionOnEncapsulatedRedPacket) { - const uint8_t kRedPayloadType = 125; - VideoCodec codec; - codec.plType = kRedPayloadType; - rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/false); const std::vector data({ 0x80, // RTP version. kRedPayloadType, // Payload type. @@ -477,13 +492,11 @@ TEST_F(RtpVideoStreamReceiverTest, TEST_F(RtpVideoStreamReceiverTest, GenericKeyFrameBitstreamError) { RtpPacketReceived rtp_packet; - RTPVideoHeader video_header; + rtp_packet.SetPayloadType(kPayloadType); rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); rtp_packet.SetSequenceNumber(1); - video_header.is_first_packet_in_frame = true; - video_header.is_last_packet_in_frame = true; - video_header.codec = kVideoCodecGeneric; - video_header.frame_type = VideoFrameType::kVideoFrameKey; + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); constexpr uint8_t expected_bitsteam[] = {1, 2, 3, 0xff}; mock_on_complete_frame_callback_.AppendExpectedBitstream( expected_bitsteam, sizeof(expected_bitsteam)); @@ -510,6 +523,7 @@ TEST_P(RtpVideoStreamReceiverTestH264, InBandSpsPps) { RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader(); AddSps(&sps_video_header, 0, &sps_data); rtp_packet.SetSequenceNumber(0); + rtp_packet.SetPayloadType(kPayloadType); sps_video_header.is_first_packet_in_frame = true; sps_video_header.frame_type = VideoFrameType::kEmptyFrame; mock_on_complete_frame_callback_.AppendExpectedBitstream( @@ -553,13 +567,12 @@ TEST_P(RtpVideoStreamReceiverTestH264, InBandSpsPps) { TEST_P(RtpVideoStreamReceiverTestH264, OutOfBandFmtpSpsPps) { constexpr int kPayloadType = 99; VideoCodec codec; - codec.plType = kPayloadType; std::map codec_params; // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2 // . codec_params.insert( {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="}); - rtp_video_stream_receiver_->AddReceiveCodec(codec, codec_params, + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params, /*raw_payload=*/false); const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96, 0x53, 0x05, 0x89, 0x88}; @@ -592,11 +605,82 @@ TEST_P(RtpVideoStreamReceiverTestH264, OutOfBandFmtpSpsPps) { video_header); } +TEST_P(RtpVideoStreamReceiverTestH264, ForceSpsPpsIdrIsKeyframe) { + constexpr int kPayloadType = 99; + VideoCodec codec; + std::map codec_params; + if (GetParam() == + "") { // Forcing can be done either with field trial or codec_params. + codec_params.insert({cricket::kH264FmtpSpsPpsIdrInKeyframe, ""}); + } + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params, + /*raw_payload=*/false); + rtc::CopyOnWriteBuffer sps_data; + RtpPacketReceived rtp_packet; + RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader(); + AddSps(&sps_video_header, 0, &sps_data); + rtp_packet.SetSequenceNumber(0); + rtp_packet.SetPayloadType(kPayloadType); + sps_video_header.is_first_packet_in_frame = true; + sps_video_header.frame_type = VideoFrameType::kEmptyFrame; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), + sps_data.size()); + rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, + sps_video_header); + + rtc::CopyOnWriteBuffer pps_data; + RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); + AddPps(&pps_video_header, 0, 1, &pps_data); + rtp_packet.SetSequenceNumber(1); + pps_video_header.is_first_packet_in_frame = true; + pps_video_header.frame_type = VideoFrameType::kEmptyFrame; + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), + pps_data.size()); + rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, + pps_video_header); + + rtc::CopyOnWriteBuffer idr_data; + RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); + AddIdr(&idr_video_header, 1); + rtp_packet.SetSequenceNumber(2); + idr_video_header.is_first_packet_in_frame = true; + idr_video_header.is_last_packet_in_frame = true; + idr_video_header.frame_type = VideoFrameType::kVideoFrameKey; + const uint8_t idr[] = {0x65, 1, 2, 3}; + idr_data.AppendData(idr); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(), + idr_data.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_TRUE(frame->is_keyframe()); + }); + rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, + idr_video_header); + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kH264StartCode, sizeof(kH264StartCode)); + mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(), + idr_data.size()); + rtp_packet.SetSequenceNumber(3); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_FALSE(frame->is_keyframe()); + }); + rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, + idr_video_header); +} + TEST_F(RtpVideoStreamReceiverTest, PaddingInMediaStream) { RtpPacketReceived rtp_packet; RTPVideoHeader video_header = GetDefaultH264VideoHeader(); rtc::CopyOnWriteBuffer data({1, 2, 3}); - rtp_packet.SetPayloadType(99); + rtp_packet.SetPayloadType(kPayloadType); rtp_packet.SetSequenceNumber(2); video_header.is_first_packet_in_frame = true; video_header.is_last_packet_in_frame = true; @@ -631,13 +715,11 @@ TEST_F(RtpVideoStreamReceiverTest, PaddingInMediaStream) { TEST_F(RtpVideoStreamReceiverTest, RequestKeyframeIfFirstFrameIsDelta) { RtpPacketReceived rtp_packet; - RTPVideoHeader video_header; + rtp_packet.SetPayloadType(kPayloadType); rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); rtp_packet.SetSequenceNumber(1); - video_header.is_first_packet_in_frame = true; - video_header.is_last_packet_in_frame = true; - video_header.codec = kVideoCodecGeneric; - video_header.frame_type = VideoFrameType::kVideoFrameDelta; + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame()); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, video_header); @@ -647,13 +729,12 @@ TEST_F(RtpVideoStreamReceiverTest, RequestKeyframeWhenPacketBufferGetsFull) { constexpr int kPacketBufferMaxSize = 2048; RtpPacketReceived rtp_packet; - RTPVideoHeader video_header; + rtp_packet.SetPayloadType(kPayloadType); rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); - video_header.is_first_packet_in_frame = true; + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); // Incomplete frames so that the packet buffer is filling up. video_header.is_last_packet_in_frame = false; - video_header.codec = kVideoCodecGeneric; - video_header.frame_type = VideoFrameType::kVideoFrameDelta; uint16_t start_sequence_number = 1234; rtp_packet.SetSequenceNumber(start_sequence_number); while (rtp_packet.SequenceNumber() - start_sequence_number < @@ -746,63 +827,16 @@ TEST_F(RtpVideoStreamReceiverTest, rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink); } -class RtpVideoStreamReceiverGenericDescriptorTest - : public RtpVideoStreamReceiverTest, - public ::testing::WithParamInterface { - public: - void RegisterRtpGenericFrameDescriptorExtension( - RtpHeaderExtensionMap* extension_map, - int version) { - constexpr int kId00 = 5; - constexpr int kId01 = 6; - switch (version) { - case 0: - extension_map->Register(kId00); - return; - case 1: - extension_map->Register(kId01); - return; - } - RTC_NOTREACHED(); - } - - bool SetExtensionRtpGenericFrameDescriptorExtension( - const RtpGenericFrameDescriptor& generic_descriptor, - RtpPacketReceived* rtp_packet, - int version) { - switch (version) { - case 0: - return rtp_packet->SetExtension( - generic_descriptor); - case 1: - return rtp_packet->SetExtension( - generic_descriptor); - } - RTC_NOTREACHED(); - return false; - } -}; - -INSTANTIATE_TEST_SUITE_P(All, - RtpVideoStreamReceiverGenericDescriptorTest, - Values(0, 1)); - -TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, - ParseGenericDescriptorOnePacket) { - const int version = GetParam(); - +TEST_F(RtpVideoStreamReceiverTest, ParseGenericDescriptorOnePacket) { const std::vector data = {0, 1, 2, 3, 4}; - const int kPayloadType = 123; const int kSpatialIndex = 1; - VideoCodec codec; - codec.plType = kPayloadType; - rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/false); rtp_video_stream_receiver_->StartReceive(); RtpHeaderExtensionMap extension_map; - RegisterRtpGenericFrameDescriptorExtension(&extension_map, version); + extension_map.Register(5); RtpPacketReceived rtp_packet(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); RtpGenericFrameDescriptor generic_descriptor; generic_descriptor.SetFirstPacketInSubFrame(true); @@ -811,8 +845,8 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, generic_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex); generic_descriptor.AddFrameDependencyDiff(90); generic_descriptor.AddFrameDependencyDiff(80); - ASSERT_TRUE(SetExtensionRtpGenericFrameDescriptorExtension( - generic_descriptor, &rtp_packet, version)); + ASSERT_TRUE(rtp_packet.SetExtension( + generic_descriptor)); uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); memcpy(payload, data.data(), data.size()); @@ -836,21 +870,14 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); } -TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, - ParseGenericDescriptorTwoPackets) { - const int version = GetParam(); - +TEST_F(RtpVideoStreamReceiverTest, ParseGenericDescriptorTwoPackets) { const std::vector data = {0, 1, 2, 3, 4}; - const int kPayloadType = 123; const int kSpatialIndex = 1; - VideoCodec codec; - codec.plType = kPayloadType; - rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/false); rtp_video_stream_receiver_->StartReceive(); RtpHeaderExtensionMap extension_map; - RegisterRtpGenericFrameDescriptorExtension(&extension_map, version); + extension_map.Register(5); RtpPacketReceived first_packet(&extension_map); RtpGenericFrameDescriptor first_packet_descriptor; @@ -859,8 +886,8 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, first_packet_descriptor.SetFrameId(100); first_packet_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex); first_packet_descriptor.SetResolution(480, 360); - ASSERT_TRUE(SetExtensionRtpGenericFrameDescriptorExtension( - first_packet_descriptor, &first_packet, version)); + ASSERT_TRUE(first_packet.SetExtension( + first_packet_descriptor)); uint8_t* first_packet_payload = first_packet.SetPayloadSize(data.size()); memcpy(first_packet_payload, data.data(), data.size()); @@ -876,8 +903,8 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, RtpGenericFrameDescriptor second_packet_descriptor; second_packet_descriptor.SetFirstPacketInSubFrame(false); second_packet_descriptor.SetLastPacketInSubFrame(true); - ASSERT_TRUE(SetExtensionRtpGenericFrameDescriptorExtension( - second_packet_descriptor, &second_packet, version)); + ASSERT_TRUE(second_packet.SetExtension( + second_packet_descriptor)); second_packet.SetMarker(true); second_packet.SetPayloadType(kPayloadType); @@ -901,66 +928,24 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, rtp_video_stream_receiver_->OnRtpPacket(second_packet); } -TEST_F(RtpVideoStreamReceiverGenericDescriptorTest, - DropPacketsWithMultipleVersionsOfExtension) { +TEST_F(RtpVideoStreamReceiverTest, ParseGenericDescriptorRawPayload) { const std::vector data = {0, 1, 2, 3, 4}; - const int kPayloadType = 123; + const int kRawPayloadType = 123; VideoCodec codec; - codec.plType = kPayloadType; - rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/false); + rtp_video_stream_receiver_->AddReceiveCodec(kRawPayloadType, codec, {}, + /*raw_payload=*/true); rtp_video_stream_receiver_->StartReceive(); RtpHeaderExtensionMap extension_map; - RegisterRtpGenericFrameDescriptorExtension(&extension_map, 0); - RegisterRtpGenericFrameDescriptorExtension(&extension_map, 1); - RtpPacketReceived rtp_packet(&extension_map); - - RtpGenericFrameDescriptor generic_descriptors[2]; - for (size_t i = 0; i < 2; ++i) { - generic_descriptors[i].SetFirstPacketInSubFrame(true); - generic_descriptors[i].SetLastPacketInSubFrame(true); - generic_descriptors[i].SetFrameId(100); - ASSERT_TRUE(SetExtensionRtpGenericFrameDescriptorExtension( - generic_descriptors[i], &rtp_packet, i)); - } - - uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); - memcpy(payload, data.data(), data.size()); - // The first byte is the header, so we ignore the first byte of |data|. - mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1, - data.size() - 1); - - rtp_packet.SetMarker(true); - rtp_packet.SetPayloadType(kPayloadType); - rtp_packet.SetSequenceNumber(1); - - EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0); - - rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); -} - -TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, - ParseGenericDescriptorRawPayload) { - const int version = GetParam(); - - const std::vector data = {0, 1, 2, 3, 4}; - const int kPayloadType = 123; - - VideoCodec codec; - codec.plType = kPayloadType; - rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/true); - rtp_video_stream_receiver_->StartReceive(); - - RtpHeaderExtensionMap extension_map; - RegisterRtpGenericFrameDescriptorExtension(&extension_map, version); + extension_map.Register(5); RtpPacketReceived rtp_packet(&extension_map); RtpGenericFrameDescriptor generic_descriptor; generic_descriptor.SetFirstPacketInSubFrame(true); generic_descriptor.SetLastPacketInSubFrame(true); - ASSERT_TRUE(SetExtensionRtpGenericFrameDescriptorExtension( - generic_descriptor, &rtp_packet, version)); + ASSERT_TRUE(rtp_packet.SetExtension( + generic_descriptor)); uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); memcpy(payload, data.data(), data.size()); @@ -968,24 +953,23 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, data.size()); rtp_packet.SetMarker(true); - rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetPayloadType(kRawPayloadType); rtp_packet.SetSequenceNumber(1); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame); rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); } -TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, UnwrapsFrameId) { - const int version = GetParam(); +TEST_F(RtpVideoStreamReceiverTest, UnwrapsFrameId) { const std::vector data = {0, 1, 2, 3, 4}; const int kPayloadType = 123; VideoCodec codec; - codec.plType = kPayloadType; - rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/true); + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {}, + /*raw_payload=*/true); rtp_video_stream_receiver_->StartReceive(); RtpHeaderExtensionMap extension_map; - RegisterRtpGenericFrameDescriptorExtension(&extension_map, version); + extension_map.Register(5); uint16_t rtp_sequence_number = 1; auto inject_packet = [&](uint16_t wrapped_frame_id) { @@ -995,8 +979,8 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, UnwrapsFrameId) { generic_descriptor.SetFirstPacketInSubFrame(true); generic_descriptor.SetLastPacketInSubFrame(true); generic_descriptor.SetFrameId(wrapped_frame_id); - ASSERT_TRUE(SetExtensionRtpGenericFrameDescriptorExtension( - generic_descriptor, &rtp_packet, version)); + ASSERT_TRUE(rtp_packet.SetExtension( + generic_descriptor)); uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); ASSERT_TRUE(payload); @@ -1024,8 +1008,172 @@ TEST_P(RtpVideoStreamReceiverGenericDescriptorTest, UnwrapsFrameId) { inject_packet(/*wrapped_frame_id=*/0x0002); } +class RtpVideoStreamReceiverDependencyDescriptorTest + : public RtpVideoStreamReceiverTest { + public: + RtpVideoStreamReceiverDependencyDescriptorTest() { + VideoCodec codec; + rtp_video_stream_receiver_->AddReceiveCodec(payload_type_, codec, {}, + /*raw_payload=*/true); + extension_map_.Register(7); + rtp_video_stream_receiver_->StartReceive(); + } + + // Returns some valid structure for the DependencyDescriptors. + // First template of that structure always fit for a key frame. + static FrameDependencyStructure CreateStreamStructure() { + FrameDependencyStructure stream_structure; + stream_structure.num_decode_targets = 1; + stream_structure.templates = { + FrameDependencyTemplate().Dtis("S"), + FrameDependencyTemplate().Dtis("S").FrameDiffs({1}), + }; + return stream_structure; + } + + void InjectPacketWith(const FrameDependencyStructure& stream_structure, + const DependencyDescriptor& dependency_descriptor) { + const std::vector data = {0, 1, 2, 3, 4}; + RtpPacketReceived rtp_packet(&extension_map_); + ASSERT_TRUE(rtp_packet.SetExtension( + stream_structure, dependency_descriptor)); + uint8_t* payload = rtp_packet.SetPayloadSize(data.size()); + ASSERT_TRUE(payload); + memcpy(payload, data.data(), data.size()); + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + rtp_packet.SetMarker(true); + rtp_packet.SetPayloadType(payload_type_); + rtp_packet.SetSequenceNumber(++rtp_sequence_number_); + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); + } + + private: + const int payload_type_ = 123; + RtpHeaderExtensionMap extension_map_; + uint16_t rtp_sequence_number_ = 321; +}; + +TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest, UnwrapsFrameId) { + FrameDependencyStructure stream_structure = CreateStreamStructure(); + + DependencyDescriptor keyframe_descriptor; + keyframe_descriptor.attached_structure = + std::make_unique(stream_structure); + keyframe_descriptor.frame_dependencies = stream_structure.templates[0]; + keyframe_descriptor.frame_number = 0xfff0; + // DependencyDescriptor doesn't support reordering delta frame before + // keyframe. Thus feed a key frame first, then test reodered delta frames. + int64_t first_picture_id; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + first_picture_id = frame->id.picture_id; + }); + InjectPacketWith(stream_structure, keyframe_descriptor); + + DependencyDescriptor deltaframe1_descriptor; + deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1]; + deltaframe1_descriptor.frame_number = 0xfffe; + + DependencyDescriptor deltaframe2_descriptor; + deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1]; + deltaframe2_descriptor.frame_number = 0x0002; + + // Parser should unwrap frame ids correctly even if packets were reordered by + // the network. + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + // 0x0002 - 0xfff0 + EXPECT_EQ(frame->id.picture_id - first_picture_id, 18); + }) + .WillOnce([&](video_coding::EncodedFrame* frame) { + // 0xfffe - 0xfff0 + EXPECT_EQ(frame->id.picture_id - first_picture_id, 14); + }); + InjectPacketWith(stream_structure, deltaframe2_descriptor); + InjectPacketWith(stream_structure, deltaframe1_descriptor); +} + +TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest, + DropsLateDeltaFramePacketWithDependencyDescriptorExtension) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame); + InjectPacketWith(stream_structure1, keyframe1_descriptor); + + // Pass in 2nd key frame with different structure. + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 3; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame); + InjectPacketWith(stream_structure2, keyframe2_descriptor); + + // Pass in late delta frame that uses structure of the 1st key frame. + DependencyDescriptor deltaframe_descriptor; + deltaframe_descriptor.frame_dependencies = stream_structure1.templates[0]; + deltaframe_descriptor.frame_number = 2; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0); + InjectPacketWith(stream_structure1, deltaframe_descriptor); +} + +TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest, + DropsLateKeyFramePacketWithDependencyDescriptorExtension) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 3; + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->id.picture_id & 0xFFFF, 3); + }); + InjectPacketWith(stream_structure2, keyframe2_descriptor); + InjectPacketWith(stream_structure1, keyframe1_descriptor); + + // Pass in delta frame that uses structure of the 2nd key frame. Late key + // frame shouldn't block it. + DependencyDescriptor deltaframe_descriptor; + deltaframe_descriptor.frame_dependencies = stream_structure2.templates[0]; + deltaframe_descriptor.frame_number = 4; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce([&](video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->id.picture_id & 0xFFFF, 4); + }); + InjectPacketWith(stream_structure2, deltaframe_descriptor); +} + #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(RtpVideoStreamReceiverTest, RepeatedSecondarySinkDisallowed) { +using RtpVideoStreamReceiverDeathTest = RtpVideoStreamReceiverTest; +TEST_F(RtpVideoStreamReceiverDeathTest, RepeatedSecondarySinkDisallowed) { MockRtpPacketSink secondary_sink; rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink); @@ -1037,4 +1185,92 @@ TEST_F(RtpVideoStreamReceiverTest, RepeatedSecondarySinkDisallowed) { } #endif +TEST_F(RtpVideoStreamReceiverTest, TransformFrame) { + rtc::scoped_refptr mock_frame_transformer = + new rtc::RefCountedObject>(); + EXPECT_CALL(*mock_frame_transformer, + RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc)); + auto receiver = std::make_unique( + Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_, + rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(), + &mock_nack_sender_, nullptr, &mock_on_complete_frame_callback_, nullptr, + mock_frame_transformer); + VideoCodec video_codec; + video_codec.codecType = kVideoCodecGeneric; + receiver->AddReceiveCodec(kPayloadType, video_codec, {}, + /*raw_payload=*/false); + + RtpPacketReceived rtp_packet; + rtp_packet.SetPayloadType(kPayloadType); + rtc::CopyOnWriteBuffer data({1, 2, 3, 4}); + rtp_packet.SetSequenceNumber(1); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + EXPECT_CALL(*mock_frame_transformer, Transform(_)); + receiver->OnReceivedPayloadData(data, rtp_packet, video_header); + + EXPECT_CALL(*mock_frame_transformer, + UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc)); + receiver = nullptr; +} + +// Test default behavior and when playout delay is overridden by field trial. +const VideoPlayoutDelay kTransmittedPlayoutDelay = {100, 200}; +const VideoPlayoutDelay kForcedPlayoutDelay = {70, 90}; +struct PlayoutDelayOptions { + std::string field_trial; + VideoPlayoutDelay expected_delay; +}; +const PlayoutDelayOptions kDefaultBehavior = { + /*field_trial=*/"", /*expected_delay=*/kTransmittedPlayoutDelay}; +const PlayoutDelayOptions kOverridePlayoutDelay = { + /*field_trial=*/"WebRTC-ForcePlayoutDelay/min_ms:70,max_ms:90/", + /*expected_delay=*/kForcedPlayoutDelay}; + +class RtpVideoStreamReceiverTestPlayoutDelay + : public RtpVideoStreamReceiverTest, + public ::testing::WithParamInterface { + protected: + RtpVideoStreamReceiverTestPlayoutDelay() + : RtpVideoStreamReceiverTest(GetParam().field_trial) {} +}; + +INSTANTIATE_TEST_SUITE_P(PlayoutDelay, + RtpVideoStreamReceiverTestPlayoutDelay, + Values(kDefaultBehavior, kOverridePlayoutDelay)); + +TEST_P(RtpVideoStreamReceiverTestPlayoutDelay, PlayoutDelay) { + rtc::CopyOnWriteBuffer payload_data({1, 2, 3, 4}); + RtpHeaderExtensionMap extension_map; + extension_map.Register(1); + RtpPacketToSend packet_to_send(&extension_map); + packet_to_send.SetPayloadType(kPayloadType); + packet_to_send.SetSequenceNumber(1); + + // Set playout delay on outgoing packet. + EXPECT_TRUE(packet_to_send.SetExtension( + kTransmittedPlayoutDelay)); + uint8_t* payload = packet_to_send.AllocatePayload(payload_data.size()); + memcpy(payload, payload_data.data(), payload_data.size()); + + RtpPacketReceived received_packet(&extension_map); + received_packet.Parse(packet_to_send.data(), packet_to_send.size()); + + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(payload_data.data(), + payload_data.size()); + // Expect the playout delay of encoded frame to be the same as the transmitted + // playout delay unless it was overridden by a field trial. + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce(Invoke([expected_playout_delay = GetParam().expected_delay]( + video_coding::EncodedFrame* frame) { + EXPECT_EQ(frame->EncodedImage().playout_delay_, expected_playout_delay); + })); + rtp_video_stream_receiver_->OnReceivedPayloadData( + received_packet.PayloadBuffer(), received_packet, video_header); +} + } // namespace webrtc diff --git a/video/screenshare_loopback.cc b/video/screenshare_loopback.cc index b8121dc85c..239e472f6e 100644 --- a/video/screenshare_loopback.cc +++ b/video/screenshare_loopback.cc @@ -325,32 +325,29 @@ void Loopback() { call_bitrate_config.max_bitrate_bps = -1; // Don't cap bandwidth estimate. VideoQualityTest::Params params; - params.call = {absl::GetFlag(FLAGS_send_side_bwe), - absl::GetFlag(FLAGS_generic_descriptor), call_bitrate_config}; - params.video[0] = {true, - Width(), - Height(), - Fps(), - MinBitrateKbps() * 1000, - TargetBitrateKbps() * 1000, - MaxBitrateKbps() * 1000, - false, - Codec(), - NumTemporalLayers(), - SelectedTL(), - MinTransmitBitrateKbps() * 1000, - false, // ULPFEC disabled. - false, // FlexFEC disabled. - false, // Automatic scaling disabled. - "", - 0, // capture_device_index. - SdpVideoFormat::Parameters()}; - params.screenshare[0] = {true, GenerateSlides(), SlideChangeInterval(), - ScrollDuration(), Slides()}; - params.analyzer = {"screenshare", 0.0, 0.0, DurationSecs(), - OutputFilename(), GraphTitle()}; + params.call.send_side_bwe = absl::GetFlag(FLAGS_send_side_bwe); + params.call.generic_descriptor = absl::GetFlag(FLAGS_generic_descriptor); + params.call.call_bitrate_config = call_bitrate_config; + params.video[0].enabled = true; + params.video[0].width = Width(); + params.video[0].height = Height(); + params.video[0].fps = Fps(); + params.video[0].min_bitrate_bps = MinBitrateKbps() * 1000; + params.video[0].target_bitrate_bps = TargetBitrateKbps() * 1000; + params.video[0].max_bitrate_bps = MaxBitrateKbps() * 1000; + params.video[0].codec = Codec(); + params.video[0].num_temporal_layers = NumTemporalLayers(); + params.video[0].selected_tl = SelectedTL(); + params.video[0].min_transmit_bps = MinTransmitBitrateKbps() * 1000; + params.screenshare[0].enabled = true; + params.screenshare[0].generate_slides = GenerateSlides(); + params.screenshare[0].slide_change_interval = SlideChangeInterval(); + params.screenshare[0].scroll_duration = ScrollDuration(); + params.screenshare[0].slides = Slides(); params.config = pipe_config; - params.logging = {RtcEventLogName(), RtpDumpName(), EncodedFramePath()}; + params.logging.rtc_event_log_name = RtcEventLogName(); + params.logging.rtp_dump_name = RtpDumpName(); + params.logging.encoded_frame_base_path = EncodedFramePath(); if (NumStreams() > 1 && Stream0().empty() && Stream1().empty()) { params.ss[0].infer_streams = true; diff --git a/video/send_delay_stats.cc b/video/send_delay_stats.cc index a243eda292..56c4164424 100644 --- a/video/send_delay_stats.cc +++ b/video/send_delay_stats.cc @@ -41,7 +41,7 @@ SendDelayStats::~SendDelayStats() { } void SendDelayStats::UpdateHistograms() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); for (const auto& it : send_delay_counters_) { AggregatedStats stats = it.second->GetStats(); if (stats.num_samples >= kMinRequiredPeriodicSamples) { @@ -52,7 +52,7 @@ void SendDelayStats::UpdateHistograms() { } void SendDelayStats::AddSsrcs(const VideoSendStream::Config& config) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (ssrcs_.size() > kMaxSsrcMapSize) return; for (const auto& ssrc : config.rtp.ssrcs) @@ -73,7 +73,7 @@ void SendDelayStats::OnSendPacket(uint16_t packet_id, int64_t capture_time_ms, uint32_t ssrc) { // Packet sent to transport. - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (ssrcs_.find(ssrc) == ssrcs_.end()) return; @@ -93,7 +93,7 @@ bool SendDelayStats::OnSentPacket(int packet_id, int64_t time_ms) { if (packet_id == -1) return false; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); auto it = packets_.find(packet_id); if (it == packets_.end()) return false; diff --git a/video/send_delay_stats.h b/video/send_delay_stats.h index d9fa16a126..20f9804d64 100644 --- a/video/send_delay_stats.h +++ b/video/send_delay_stats.h @@ -20,7 +20,7 @@ #include "call/video_send_stream.h" #include "modules/include/module_common_types_public.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/stats_counter.h" @@ -66,22 +66,22 @@ class SendDelayStats : public SendPacketObserver { void UpdateHistograms(); void RemoveOld(int64_t now, PacketMap* packets) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); AvgCounter* GetSendDelayCounter(uint32_t ssrc) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; - rtc::CriticalSection crit_; + Mutex mutex_; - PacketMap packets_ RTC_GUARDED_BY(crit_); - size_t num_old_packets_ RTC_GUARDED_BY(crit_); - size_t num_skipped_packets_ RTC_GUARDED_BY(crit_); + PacketMap packets_ RTC_GUARDED_BY(mutex_); + size_t num_old_packets_ RTC_GUARDED_BY(mutex_); + size_t num_skipped_packets_ RTC_GUARDED_BY(mutex_); - std::set ssrcs_ RTC_GUARDED_BY(crit_); + std::set ssrcs_ RTC_GUARDED_BY(mutex_); // Mapped by SSRC. std::map> send_delay_counters_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc index a4f17547bd..92545ecf9e 100644 --- a/video/send_statistics_proxy.cc +++ b/video/send_statistics_proxy.cc @@ -16,7 +16,7 @@ #include #include -#include "absl/algorithm/container.h" +#include "absl/strings/match.h" #include "api/video/video_codec_constants.h" #include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" @@ -47,6 +47,9 @@ enum HistogramCodecType { kVideoVp8 = 1, kVideoVp9 = 2, kVideoH264 = 3, +#ifndef DISABLE_H265 + kVideoH265 = 4, +#endif kVideoMax = 64, }; @@ -74,6 +77,10 @@ HistogramCodecType PayloadNameToHistogramCodecType( return kVideoVp9; case kVideoCodecH264: return kVideoH264; +#ifndef DISABLE_H265 + case kVideoCodecH265: + return kVideoH265; +#endif default: return kVideoUnknown; } @@ -113,15 +120,17 @@ absl::optional GetFallbackMaxPixels(const std::string& group) { absl::optional GetFallbackMaxPixelsIfFieldTrialEnabled() { std::string group = webrtc::field_trial::FindFullName(kVp8ForcedFallbackEncoderFieldTrial); - return (group.find("Enabled") == 0) ? GetFallbackMaxPixels(group.substr(7)) - : absl::optional(); + return (absl::StartsWith(group, "Enabled")) + ? GetFallbackMaxPixels(group.substr(7)) + : absl::optional(); } absl::optional GetFallbackMaxPixelsIfFieldTrialDisabled() { std::string group = webrtc::field_trial::FindFullName(kVp8ForcedFallbackEncoderFieldTrial); - return (group.find("Disabled") == 0) ? GetFallbackMaxPixels(group.substr(8)) - : absl::optional(); + return (absl::StartsWith(group, "Disabled")) + ? GetFallbackMaxPixels(group.substr(8)) + : absl::optional(); } } // namespace @@ -139,8 +148,6 @@ SendStatisticsProxy::SendStatisticsProxy( content_type_(content_type), start_ms_(clock->TimeInMilliseconds()), encode_time_(kEncodeTimeWeigthFactor), - quality_downscales_(-1), - cpu_downscales_(-1), quality_limitation_reason_tracker_(clock_), media_byte_rate_tracker_(kBucketSizeMs, kBucketCount), encoded_frame_rate_tracker_(kBucketSizeMs, kBucketCount), @@ -148,12 +155,13 @@ SendStatisticsProxy::SendStatisticsProxy( last_num_simulcast_streams_(0), last_spatial_layer_use_{}, bw_limited_layers_(false), + internal_encoder_scaler_(false), uma_container_( new UmaSamplesContainer(GetUmaPrefix(content_type_), stats_, clock)) { } SendStatisticsProxy::~SendStatisticsProxy() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); uma_container_->UpdateHistograms(rtp_config_, stats_); int64_t elapsed_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; @@ -205,12 +213,17 @@ void SendStatisticsProxy::UmaSamplesContainer::InitializeBitrateCounters( retransmit_byte_counter_.SetLast( it.second.rtp_stats.retransmitted.TotalBytes(), ssrc); fec_byte_counter_.SetLast(it.second.rtp_stats.fec.TotalBytes(), ssrc); - if (it.second.is_rtx) { - rtx_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(), - ssrc); - } else { - media_byte_counter_.SetLast(it.second.rtp_stats.MediaPayloadBytes(), + switch (it.second.type) { + case VideoSendStream::StreamStats::StreamType::kMedia: + media_byte_counter_.SetLast(it.second.rtp_stats.MediaPayloadBytes(), + ssrc); + break; + case VideoSendStream::StreamStats::StreamType::kRtx: + rtx_byte_counter_.SetLast(it.second.rtp_stats.transmitted.TotalBytes(), ssrc); + break; + case VideoSendStream::StreamStats::StreamType::kFlexfec: + break; } } } @@ -652,9 +665,11 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Encoder", current_stats.frames_dropped_by_encoder); log_stream << uma_prefix_ << "DroppedFrames.Ratelimiter " - << current_stats.frames_dropped_by_rate_limiter; + << current_stats.frames_dropped_by_rate_limiter << "\n"; RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Ratelimiter", current_stats.frames_dropped_by_rate_limiter); + log_stream << uma_prefix_ << "DroppedFrames.CongestionWindow " + << current_stats.frames_dropped_by_congestion_window; RTC_LOG(LS_INFO) << log_stream.str(); } @@ -662,7 +677,7 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( void SendStatisticsProxy::OnEncoderReconfigured( const VideoEncoderConfig& config, const std::vector& streams) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (content_type_ != config.content_type) { uma_container_->UpdateHistograms(rtp_config_, stats_); @@ -679,7 +694,7 @@ void SendStatisticsProxy::OnEncoderReconfigured( void SendStatisticsProxy::OnEncodedFrameTimeMeasured(int encode_time_ms, int encode_usage_percent) { RTC_DCHECK_GE(encode_time_ms, 0); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); uma_container_->encode_time_counter_.Add(encode_time_ms); encode_time_.Apply(1.0f, encode_time_ms); stats_.avg_encode_time_ms = std::round(encode_time_.filtered()); @@ -689,7 +704,7 @@ void SendStatisticsProxy::OnEncodedFrameTimeMeasured(int encode_time_ms, void SendStatisticsProxy::OnSuspendChange(bool is_suspended) { int64_t now_ms = clock_->TimeInMilliseconds(); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); stats_.suspended = is_suspended; if (is_suspended) { // Pause framerate (add min pause time since there may be frames/packets @@ -709,9 +724,11 @@ void SendStatisticsProxy::OnSuspendChange(bool is_suspended) { uma_container_->quality_adapt_timer_.Stop(now_ms); } else { // Start adaptation stats if scaling is enabled. - if (cpu_downscales_ >= 0) + if (adaptation_limitations_.MaskedCpuCounts() + .resolution_adaptations.has_value()) uma_container_->cpu_adapt_timer_.Start(now_ms); - if (quality_downscales_ >= 0) + if (adaptation_limitations_.MaskedQualityCounts() + .resolution_adaptations.has_value()) uma_container_->quality_adapt_timer_.Start(now_ms); // Stop pause explicitly for stats that may be zero/not updated for some // time. @@ -723,7 +740,7 @@ void SendStatisticsProxy::OnSuspendChange(bool is_suspended) { } VideoSendStream::Stats SendStatisticsProxy::GetStats() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); PurgeOldStats(); stats_.input_frame_rate = round(uma_container_->input_frame_rate_tracker_.ComputeRate()); @@ -758,23 +775,42 @@ VideoSendStream::StreamStats* SendStatisticsProxy::GetStatsEntry( if (it != stats_.substreams.end()) return &it->second; - bool is_media = absl::c_linear_search(rtp_config_.ssrcs, ssrc); + bool is_media = rtp_config_.IsMediaSsrc(ssrc); bool is_flexfec = rtp_config_.flexfec.payload_type != -1 && ssrc == rtp_config_.flexfec.ssrc; - bool is_rtx = absl::c_linear_search(rtp_config_.rtx.ssrcs, ssrc); + bool is_rtx = rtp_config_.IsRtxSsrc(ssrc); if (!is_media && !is_flexfec && !is_rtx) return nullptr; // Insert new entry and return ptr. VideoSendStream::StreamStats* entry = &stats_.substreams[ssrc]; - entry->is_rtx = is_rtx; - entry->is_flexfec = is_flexfec; + if (is_media) { + entry->type = VideoSendStream::StreamStats::StreamType::kMedia; + } else if (is_rtx) { + entry->type = VideoSendStream::StreamStats::StreamType::kRtx; + } else if (is_flexfec) { + entry->type = VideoSendStream::StreamStats::StreamType::kFlexfec; + } else { + RTC_NOTREACHED(); + } + switch (entry->type) { + case VideoSendStream::StreamStats::StreamType::kMedia: + break; + case VideoSendStream::StreamStats::StreamType::kRtx: + entry->referenced_media_ssrc = + rtp_config_.GetMediaSsrcAssociatedWithRtxSsrc(ssrc); + break; + case VideoSendStream::StreamStats::StreamType::kFlexfec: + entry->referenced_media_ssrc = + rtp_config_.GetMediaSsrcAssociatedWithFlexfecSsrc(ssrc); + break; + } return entry; } void SendStatisticsProxy::OnInactiveSsrc(uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; @@ -786,7 +822,7 @@ void SendStatisticsProxy::OnInactiveSsrc(uint32_t ssrc) { } void SendStatisticsProxy::OnSetEncoderTargetRate(uint32_t bitrate_bps) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (uma_container_->target_rate_updates_.last_ms == -1 && bitrate_bps == 0) return; // Start on first non-zero bitrate, may initially be zero. @@ -885,7 +921,7 @@ void SendStatisticsProxy::UpdateFallbackDisabledStats( } void SendStatisticsProxy::OnMinPixelLimitReached() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); uma_container_->fallback_info_disabled_.min_pixel_limit_reached = true; } @@ -900,7 +936,7 @@ void SendStatisticsProxy::OnSendEncodedImage( ? encoded_image.SpatialIndex().value_or(0) : 0; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++stats_.frames_encoded; // The current encode frame rate is based on previously encoded frames. double encode_frame_rate = encoded_frame_rate_tracker_.ComputeRate(); @@ -931,14 +967,20 @@ void SendStatisticsProxy::OnSendEncodedImage( VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; - - // Report resolution of top spatial layer in case of VP9 SVC. - bool is_svc_low_spatial_layer = - (codec_info && codec_info->codecType == kVideoCodecVP9) - ? !codec_info->codecSpecific.VP9.end_of_picture - : false; - - if (!stats->width || !stats->height || !is_svc_low_spatial_layer) { + if (encoded_frame_rate_trackers_.count(simulcast_idx) == 0) { + encoded_frame_rate_trackers_[simulcast_idx] = + std::make_unique(kBucketSizeMs, kBucketCount); + } + stats->encode_frame_rate = + encoded_frame_rate_trackers_[simulcast_idx]->ComputeRate(); + stats->frames_encoded++; + stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms - + encoded_image.timing_.encode_start_ms; + // Report resolution of the top spatial layer. + bool is_top_spatial_layer = + codec_info == nullptr || codec_info->end_of_picture; + + if (!stats->width || !stats->height || is_top_spatial_layer) { stats->width = encoded_image._encodedWidth; stats->height = encoded_image._encodedHeight; update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds(); @@ -948,9 +990,9 @@ void SendStatisticsProxy::OnSendEncodedImage( VideoFrameType::kVideoFrameKey); if (encoded_image.qp_ != -1) { - if (!stats_.qp_sum) - stats_.qp_sum = 0; - *stats_.qp_sum += encoded_image.qp_; + if (!stats->qp_sum) + stats->qp_sum = 0; + *stats->qp_sum += encoded_image.qp_; if (codec_info) { if (codec_info->codecType == kVideoCodecVP8) { @@ -970,6 +1012,7 @@ void SendStatisticsProxy::OnSendEncodedImage( // as a single difficult input frame. // https://w3c.github.io/webrtc-stats/#dom-rtcvideosenderstats-hugeframessent if (encoded_image.timing_.flags & VideoSendTiming::kTriggeredBySize) { + ++stats->huge_frames_sent; if (!last_outlier_timestamp_ || *last_outlier_timestamp_ < encoded_image.capture_time_ms_) { last_outlier_timestamp_.emplace(encoded_image.capture_time_ms_); @@ -980,43 +1023,48 @@ void SendStatisticsProxy::OnSendEncodedImage( media_byte_rate_tracker_.AddSamples(encoded_image.size()); if (uma_container_->InsertEncodedFrame(encoded_image, simulcast_idx)) { + encoded_frame_rate_trackers_[simulcast_idx]->AddSamples(1); encoded_frame_rate_tracker_.AddSamples(1); } - stats_.bw_limited_resolution |= quality_downscales_ > 0; + absl::optional downscales = + adaptation_limitations_.MaskedQualityCounts().resolution_adaptations; + stats_.bw_limited_resolution |= + (downscales.has_value() && downscales.value() > 0); - if (quality_downscales_ != -1) { - uma_container_->quality_limited_frame_counter_.Add(quality_downscales_ > 0); - if (quality_downscales_ > 0) - uma_container_->quality_downscales_counter_.Add(quality_downscales_); + if (downscales.has_value()) { + uma_container_->quality_limited_frame_counter_.Add(downscales.value() > 0); + if (downscales.value() > 0) + uma_container_->quality_downscales_counter_.Add(downscales.value()); } } void SendStatisticsProxy::OnEncoderImplementationChanged( const std::string& implementation_name) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name, implementation_name}; stats_.encoder_implementation_name = implementation_name; } int SendStatisticsProxy::GetInputFrameRate() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return round(uma_container_->input_frame_rate_tracker_.ComputeRate()); } int SendStatisticsProxy::GetSendFrameRate() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return round(encoded_frame_rate_tracker_.ComputeRate()); } void SendStatisticsProxy::OnIncomingFrame(int width, int height) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); uma_container_->input_frame_rate_tracker_.AddSamples(1); uma_container_->input_fps_counter_.Add(1); uma_container_->input_width_counter_.Add(width); uma_container_->input_height_counter_.Add(height); - if (cpu_downscales_ >= 0) { + if (adaptation_limitations_.MaskedCpuCounts() + .resolution_adaptations.has_value()) { uma_container_->cpu_limited_frame_counter_.Add( stats_.cpu_limited_resolution); } @@ -1028,7 +1076,7 @@ void SendStatisticsProxy::OnIncomingFrame(int width, int height) { } void SendStatisticsProxy::OnFrameDropped(DropReason reason) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); switch (reason) { case DropReason::kSource: ++stats_.frames_dropped_by_capturer; @@ -1042,43 +1090,64 @@ void SendStatisticsProxy::OnFrameDropped(DropReason reason) { case DropReason::kMediaOptimization: ++stats_.frames_dropped_by_rate_limiter; break; + case DropReason::kCongestionWindow: + ++stats_.frames_dropped_by_congestion_window; + break; } } +void SendStatisticsProxy::ClearAdaptationStats() { + MutexLock lock(&mutex_); + adaptation_limitations_.set_cpu_counts(VideoAdaptationCounters()); + adaptation_limitations_.set_quality_counts(VideoAdaptationCounters()); + UpdateAdaptationStats(); +} + +void SendStatisticsProxy::UpdateAdaptationSettings( + VideoStreamEncoderObserver::AdaptationSettings cpu_settings, + VideoStreamEncoderObserver::AdaptationSettings quality_settings) { + MutexLock lock(&mutex_); + adaptation_limitations_.UpdateMaskingSettings(cpu_settings, quality_settings); + SetAdaptTimer(adaptation_limitations_.MaskedCpuCounts(), + &uma_container_->cpu_adapt_timer_); + SetAdaptTimer(adaptation_limitations_.MaskedQualityCounts(), + &uma_container_->quality_adapt_timer_); + UpdateAdaptationStats(); +} + void SendStatisticsProxy::OnAdaptationChanged( - AdaptationReason reason, - const AdaptationSteps& cpu_counts, - const AdaptationSteps& quality_counts) { - rtc::CritScope lock(&crit_); + VideoAdaptationReason reason, + const VideoAdaptationCounters& cpu_counters, + const VideoAdaptationCounters& quality_counters) { + MutexLock lock(&mutex_); + + MaskedAdaptationCounts receiver = + adaptation_limitations_.MaskedQualityCounts(); + adaptation_limitations_.set_cpu_counts(cpu_counters); + adaptation_limitations_.set_quality_counts(quality_counters); switch (reason) { - case AdaptationReason::kNone: - SetAdaptTimer(cpu_counts, &uma_container_->cpu_adapt_timer_); - SetAdaptTimer(quality_counts, &uma_container_->quality_adapt_timer_); - break; - case AdaptationReason::kCpu: + case VideoAdaptationReason::kCpu: ++stats_.number_of_cpu_adapt_changes; break; - case AdaptationReason::kQuality: - TryUpdateInitialQualityResolutionAdaptUp(quality_counts); + case VideoAdaptationReason::kQuality: + TryUpdateInitialQualityResolutionAdaptUp( + receiver.resolution_adaptations, + adaptation_limitations_.MaskedQualityCounts().resolution_adaptations); ++stats_.number_of_quality_adapt_changes; break; } - - cpu_downscales_ = cpu_counts.num_resolution_reductions.value_or(-1); - quality_downscales_ = quality_counts.num_resolution_reductions.value_or(-1); - - cpu_counts_ = cpu_counts; - quality_counts_ = quality_counts; - UpdateAdaptationStats(); } void SendStatisticsProxy::UpdateAdaptationStats() { - bool is_cpu_limited = cpu_counts_.num_resolution_reductions > 0 || - cpu_counts_.num_framerate_reductions > 0; - bool is_bandwidth_limited = quality_counts_.num_resolution_reductions > 0 || - quality_counts_.num_framerate_reductions > 0 || - bw_limited_layers_; + auto cpu_counts = adaptation_limitations_.MaskedCpuCounts(); + auto quality_counts = adaptation_limitations_.MaskedQualityCounts(); + + bool is_cpu_limited = cpu_counts.resolution_adaptations > 0 || + cpu_counts.num_framerate_reductions > 0; + bool is_bandwidth_limited = quality_counts.resolution_adaptations > 0 || + quality_counts.num_framerate_reductions > 0 || + bw_limited_layers_ || internal_encoder_scaler_; if (is_bandwidth_limited) { // We may be both CPU limited and bandwidth limited at the same time but // there is no way to express this in standardized stats. Heuristically, @@ -1094,10 +1163,10 @@ void SendStatisticsProxy::UpdateAdaptationStats() { QualityLimitationReason::kNone); } - stats_.cpu_limited_resolution = cpu_counts_.num_resolution_reductions > 0; - stats_.cpu_limited_framerate = cpu_counts_.num_framerate_reductions > 0; - stats_.bw_limited_resolution = quality_counts_.num_resolution_reductions > 0; - stats_.bw_limited_framerate = quality_counts_.num_framerate_reductions > 0; + stats_.cpu_limited_resolution = cpu_counts.resolution_adaptations > 0; + stats_.cpu_limited_framerate = cpu_counts.num_framerate_reductions > 0; + stats_.bw_limited_resolution = quality_counts.resolution_adaptations > 0; + stats_.bw_limited_framerate = quality_counts.num_framerate_reductions > 0; // If bitrate allocator has disabled some layers frame-rate or resolution are // limited depending on the encoder configuration. if (bw_limited_layers_) { @@ -1112,6 +1181,10 @@ void SendStatisticsProxy::UpdateAdaptationStats() { } } } + if (internal_encoder_scaler_) { + stats_.bw_limited_resolution = true; + } + stats_.quality_limitation_reason = quality_limitation_reason_tracker_.current_reason(); @@ -1140,7 +1213,7 @@ void SendStatisticsProxy::OnBitrateAllocationUpdated( spatial_layers[i] = (allocation.GetSpatialLayerSum(i) > 0); } - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); bw_limited_layers_ = allocation.is_bw_limited(); UpdateAdaptationStats(); @@ -1159,20 +1232,31 @@ void SendStatisticsProxy::OnBitrateAllocationUpdated( last_num_simulcast_streams_ = num_simulcast_streams; } +// Informes observer if an internal encoder scaler has reduced video +// resolution or not. |is_scaled| is a flag indicating if the video is scaled +// down. +void SendStatisticsProxy::OnEncoderInternalScalerUpdate(bool is_scaled) { + MutexLock lock(&mutex_); + internal_encoder_scaler_ = is_scaled; + UpdateAdaptationStats(); +} + // TODO(asapersson): Include fps changes. void SendStatisticsProxy::OnInitialQualityResolutionAdaptDown() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++uma_container_->initial_quality_changes_.down; } void SendStatisticsProxy::TryUpdateInitialQualityResolutionAdaptUp( - const AdaptationSteps& quality_counts) { + absl::optional old_quality_downscales, + absl::optional updated_quality_downscales) { if (uma_container_->initial_quality_changes_.down == 0) return; - if (quality_downscales_ > 0 && - quality_counts.num_resolution_reductions.value_or(-1) < - quality_downscales_) { + if (old_quality_downscales.has_value() && + old_quality_downscales.value() > 0 && + updated_quality_downscales.value_or(-1) < + old_quality_downscales.value()) { // Adapting up in quality. if (uma_container_->initial_quality_changes_.down > uma_container_->initial_quality_changes_.up) { @@ -1181,9 +1265,9 @@ void SendStatisticsProxy::TryUpdateInitialQualityResolutionAdaptUp( } } -void SendStatisticsProxy::SetAdaptTimer(const AdaptationSteps& counts, +void SendStatisticsProxy::SetAdaptTimer(const MaskedAdaptationCounts& counts, StatsTimer* timer) { - if (counts.num_resolution_reductions || counts.num_framerate_reductions) { + if (counts.resolution_adaptations || counts.num_framerate_reductions) { // Adaptation enabled. if (!stats_.suspended) timer->Start(clock_->TimeInMilliseconds()); @@ -1195,7 +1279,7 @@ void SendStatisticsProxy::SetAdaptTimer(const AdaptationSteps& counts, void SendStatisticsProxy::RtcpPacketTypesCounterUpdated( uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; @@ -1207,7 +1291,7 @@ void SendStatisticsProxy::RtcpPacketTypesCounterUpdated( void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics, uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; @@ -1218,7 +1302,7 @@ void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics, void SendStatisticsProxy::OnReportBlockDataUpdated( ReportBlockData report_block_data) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(report_block_data.report_block().source_ssrc); if (!stats) @@ -1229,11 +1313,11 @@ void SendStatisticsProxy::OnReportBlockDataUpdated( void SendStatisticsProxy::DataCountersUpdated( const StreamDataCounters& counters, uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); RTC_DCHECK(stats) << "DataCountersUpdated reported for unknown ssrc " << ssrc; - if (stats->is_flexfec) { + if (stats->type == VideoSendStream::StreamStats::StreamType::kFlexfec) { // The same counters are reported for both the media ssrc and flexfec ssrc. // Bitrate stats are summed for all SSRCs. Use fec stats from media update. return; @@ -1254,18 +1338,24 @@ void SendStatisticsProxy::DataCountersUpdated( uma_container_->retransmit_byte_counter_.Set( counters.retransmitted.TotalBytes(), ssrc); uma_container_->fec_byte_counter_.Set(counters.fec.TotalBytes(), ssrc); - if (stats->is_rtx) { - uma_container_->rtx_byte_counter_.Set(counters.transmitted.TotalBytes(), - ssrc); - } else { - uma_container_->media_byte_counter_.Set(counters.MediaPayloadBytes(), ssrc); + switch (stats->type) { + case VideoSendStream::StreamStats::StreamType::kMedia: + uma_container_->media_byte_counter_.Set(counters.MediaPayloadBytes(), + ssrc); + break; + case VideoSendStream::StreamStats::StreamType::kRtx: + uma_container_->rtx_byte_counter_.Set(counters.transmitted.TotalBytes(), + ssrc); + break; + case VideoSendStream::StreamStats::StreamType::kFlexfec: + break; } } void SendStatisticsProxy::Notify(uint32_t total_bitrate_bps, uint32_t retransmit_bitrate_bps, uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; @@ -1276,7 +1366,7 @@ void SendStatisticsProxy::Notify(uint32_t total_bitrate_bps, void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts, uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; @@ -1288,7 +1378,7 @@ void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms, int max_delay_ms, uint64_t total_delay_ms, uint32_t ssrc) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; @@ -1358,4 +1448,55 @@ int SendStatisticsProxy::BoolSampleCounter::Fraction( return -1; return static_cast((sum * multiplier / num_samples) + 0.5f); } + +SendStatisticsProxy::MaskedAdaptationCounts +SendStatisticsProxy::Adaptations::MaskedCpuCounts() const { + return Mask(cpu_counts_, cpu_settings_); +} + +SendStatisticsProxy::MaskedAdaptationCounts +SendStatisticsProxy::Adaptations::MaskedQualityCounts() const { + return Mask(quality_counts_, quality_settings_); +} + +void SendStatisticsProxy::Adaptations::set_cpu_counts( + const VideoAdaptationCounters& cpu_counts) { + cpu_counts_ = cpu_counts; +} + +void SendStatisticsProxy::Adaptations::set_quality_counts( + const VideoAdaptationCounters& quality_counts) { + quality_counts_ = quality_counts; +} + +VideoAdaptationCounters SendStatisticsProxy::Adaptations::cpu_counts() const { + return cpu_counts_; +} + +VideoAdaptationCounters SendStatisticsProxy::Adaptations::quality_counts() + const { + return quality_counts_; +} + +void SendStatisticsProxy::Adaptations::UpdateMaskingSettings( + VideoStreamEncoderObserver::AdaptationSettings cpu_settings, + VideoStreamEncoderObserver::AdaptationSettings quality_settings) { + cpu_settings_ = std::move(cpu_settings); + quality_settings_ = std::move(quality_settings); +} + +SendStatisticsProxy::MaskedAdaptationCounts +SendStatisticsProxy::Adaptations::Mask( + const VideoAdaptationCounters& counters, + const VideoStreamEncoderObserver::AdaptationSettings& settings) const { + MaskedAdaptationCounts masked_counts; + if (settings.resolution_scaling_enabled) { + masked_counts.resolution_adaptations = counters.resolution_adaptations; + } + if (settings.framerate_scaling_enabled) { + masked_counts.num_framerate_reductions = counters.fps_adaptations; + } + return masked_counts; +} + } // namespace webrtc diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h index a67725e17a..0de7df290e 100644 --- a/video/send_statistics_proxy.h +++ b/video/send_statistics_proxy.h @@ -25,9 +25,9 @@ #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "rtc_base/critical_section.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/rate_tracker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/quality_limitation_reason_tracker.h" @@ -70,14 +70,20 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, void OnFrameDropped(DropReason) override; // Adaptation stats. - void OnAdaptationChanged(AdaptationReason reason, - const AdaptationSteps& cpu_counts, - const AdaptationSteps& quality_counts) override; + void OnAdaptationChanged( + VideoAdaptationReason reason, + const VideoAdaptationCounters& cpu_counters, + const VideoAdaptationCounters& quality_counters) override; + void ClearAdaptationStats() override; + void UpdateAdaptationSettings(AdaptationSettings cpu_settings, + AdaptationSettings quality_settings) override; void OnBitrateAllocationUpdated( const VideoCodec& codec, const VideoBitrateAllocation& allocation) override; + void OnEncoderInternalScalerUpdate(bool is_scaled) override; + void OnMinPixelLimitReached() override; void OnInitialQualityResolutionAdaptDown() override; @@ -217,55 +223,86 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, }; typedef std::map EncodedFrameMap; - void PurgeOldStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void PurgeOldStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + struct MaskedAdaptationCounts { + absl::optional resolution_adaptations = absl::nullopt; + absl::optional num_framerate_reductions = absl::nullopt; + }; + + struct Adaptations { + public: + MaskedAdaptationCounts MaskedCpuCounts() const; + MaskedAdaptationCounts MaskedQualityCounts() const; + + void set_cpu_counts(const VideoAdaptationCounters& cpu_counts); + void set_quality_counts(const VideoAdaptationCounters& quality_counts); + + VideoAdaptationCounters cpu_counts() const; + VideoAdaptationCounters quality_counts() const; + + void UpdateMaskingSettings(AdaptationSettings cpu_settings, + AdaptationSettings quality_settings); + + private: + VideoAdaptationCounters cpu_counts_; + AdaptationSettings cpu_settings_; + VideoAdaptationCounters quality_counts_; + AdaptationSettings quality_settings_; + + MaskedAdaptationCounts Mask(const VideoAdaptationCounters& counters, + const AdaptationSettings& settings) const; + }; - void SetAdaptTimer(const AdaptationSteps& counts, StatsTimer* timer) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); - void UpdateAdaptationStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + void SetAdaptTimer(const MaskedAdaptationCounts& counts, StatsTimer* timer) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void UpdateAdaptationStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void TryUpdateInitialQualityResolutionAdaptUp( - const AdaptationSteps& quality_counts) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + absl::optional old_quality_downscales, + absl::optional updated_quality_downscales) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateEncoderFallbackStats(const CodecSpecificInfo* codec_info, int pixels, int simulcast_index) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateFallbackDisabledStats(const CodecSpecificInfo* codec_info, int pixels, int simulcast_index) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; const std::string payload_name_; const RtpConfig rtp_config_; const absl::optional fallback_max_pixels_; const absl::optional fallback_max_pixels_disabled_; - rtc::CriticalSection crit_; - VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(mutex_); const int64_t start_ms_; - VideoSendStream::Stats stats_ RTC_GUARDED_BY(crit_); - std::map update_times_ RTC_GUARDED_BY(crit_); - rtc::ExpFilter encode_time_ RTC_GUARDED_BY(crit_); - int quality_downscales_ RTC_GUARDED_BY(crit_); - int cpu_downscales_ RTC_GUARDED_BY(crit_); + VideoSendStream::Stats stats_ RTC_GUARDED_BY(mutex_); + std::map update_times_ RTC_GUARDED_BY(mutex_); + rtc::ExpFilter encode_time_ RTC_GUARDED_BY(mutex_); QualityLimitationReasonTracker quality_limitation_reason_tracker_ - RTC_GUARDED_BY(crit_); - rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(crit_); - rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); + rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(mutex_); + rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(mutex_); + std::map> + encoded_frame_rate_trackers_ RTC_GUARDED_BY(mutex_); - absl::optional last_outlier_timestamp_ RTC_GUARDED_BY(crit_); + absl::optional last_outlier_timestamp_ RTC_GUARDED_BY(mutex_); - int last_num_spatial_layers_ RTC_GUARDED_BY(crit_); - int last_num_simulcast_streams_ RTC_GUARDED_BY(crit_); + int last_num_spatial_layers_ RTC_GUARDED_BY(mutex_); + int last_num_simulcast_streams_ RTC_GUARDED_BY(mutex_); std::array last_spatial_layer_use_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(mutex_); // Indicates if the latest bitrate allocation had layers disabled by low // available bandwidth. - bool bw_limited_layers_ RTC_GUARDED_BY(crit_); - AdaptationSteps cpu_counts_ RTC_GUARDED_BY(crit_); - AdaptationSteps quality_counts_ RTC_GUARDED_BY(crit_); + bool bw_limited_layers_ RTC_GUARDED_BY(mutex_); + // Indicastes if the encoder internally downscales input image. + bool internal_encoder_scaler_ RTC_GUARDED_BY(mutex_); + Adaptations adaptation_limitations_ RTC_GUARDED_BY(mutex_); struct EncoderChangeEvent { std::string previous_encoder_implementation; @@ -337,7 +374,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, qp_counters_; // QP counters mapped by spatial idx. }; - std::unique_ptr uma_container_ RTC_GUARDED_BY(crit_); + std::unique_ptr uma_container_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/video/send_statistics_proxy_unittest.cc b/video/send_statistics_proxy_unittest.cc index 2532c29215..33107d4c2f 100644 --- a/video/send_statistics_proxy_unittest.cc +++ b/video/send_statistics_proxy_unittest.cc @@ -18,6 +18,7 @@ #include "absl/algorithm/container.h" #include "api/units/timestamp.h" +#include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" @@ -45,6 +46,16 @@ const CodecSpecificInfo kDefaultCodecInfo = []() { codec_info.codecType = kVideoCodecVP8; return codec_info; }(); + +const VideoStreamEncoderObserver::AdaptationSettings kScalingEnabled(true, + true); +const VideoStreamEncoderObserver::AdaptationSettings kFramerateScalingDisabled( + true, + false); +const VideoStreamEncoderObserver::AdaptationSettings kResolutionScalingDisabled( + false, + true); +const VideoStreamEncoderObserver::AdaptationSettings kScalingDisabled; } // namespace class SendStatisticsProxyTest : public ::testing::Test { @@ -65,10 +76,16 @@ class SendStatisticsProxyTest : public ::testing::Test { &fake_clock_, GetTestConfig(), VideoEncoderConfig::ContentType::kRealtimeVideo)); expected_ = VideoSendStream::Stats(); - for (const auto& ssrc : config_.rtp.ssrcs) - expected_.substreams[ssrc].is_rtx = false; - for (const auto& ssrc : config_.rtp.rtx.ssrcs) - expected_.substreams[ssrc].is_rtx = true; + for (const auto& ssrc : config_.rtp.ssrcs) { + expected_.substreams[ssrc].type = + VideoSendStream::StreamStats::StreamType::kMedia; + } + for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) { + uint32_t ssrc = config_.rtp.rtx.ssrcs[i]; + expected_.substreams[ssrc].type = + VideoSendStream::StreamStats::StreamType::kRtx; + expected_.substreams[ssrc].referenced_media_ssrc = config_.rtp.ssrcs[i]; + } } VideoSendStream::Config GetTestConfig() { @@ -89,6 +106,7 @@ class SendStatisticsProxyTest : public ::testing::Test { config.rtp.rtx.ssrcs.push_back(kSecondRtxSsrc); config.rtp.flexfec.payload_type = 50; config.rtp.flexfec.ssrc = kFlexFecSsrc; + config.rtp.flexfec.protected_media_ssrcs = {kFirstSsrc}; return config; } @@ -123,7 +141,7 @@ class SendStatisticsProxyTest : public ::testing::Test { const VideoSendStream::StreamStats& a = it->second; const VideoSendStream::StreamStats& b = corresponding_it->second; - EXPECT_EQ(a.is_rtx, b.is_rtx); + EXPECT_EQ(a.type, b.type); EXPECT_EQ(a.frame_counts.key_frames, b.frame_counts.key_frames); EXPECT_EQ(a.frame_counts.delta_frames, b.frame_counts.delta_frames); EXPECT_EQ(a.total_bitrate_bps, b.total_bitrate_bps); @@ -354,22 +372,27 @@ TEST_F(SendStatisticsProxyTest, OnSendEncodedImageIncreasesFramesEncoded) { TEST_F(SendStatisticsProxyTest, OnSendEncodedImageIncreasesQpSum) { EncodedImage encoded_image; CodecSpecificInfo codec_info; - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + auto ssrc = config_.rtp.ssrcs[0]; + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc].qp_sum); encoded_image.qp_ = 3; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); - EXPECT_EQ(3u, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(3u, statistics_proxy_->GetStats().substreams[ssrc].qp_sum); encoded_image.qp_ = 127; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); - EXPECT_EQ(130u, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(130u, statistics_proxy_->GetStats().substreams[ssrc].qp_sum); } TEST_F(SendStatisticsProxyTest, OnSendEncodedImageWithoutQpQpSumWontExist) { EncodedImage encoded_image; CodecSpecificInfo codec_info; + auto ssrc = config_.rtp.ssrcs[0]; encoded_image.qp_ = -1; - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc].qp_sum); statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(absl::nullopt, + statistics_proxy_->GetStats().substreams[ssrc].qp_sum); } TEST_F(SendStatisticsProxyTest, TotalEncodedBytesTargetFirstFrame) { @@ -397,7 +420,8 @@ TEST_F(SendStatisticsProxyTest, // TODO(https://crbug.com/webrtc/10640): When the RateTracker uses a Clock // this test can stop relying on rtc::ScopedFakeClock. rtc::ScopedFakeClock fake_global_clock; - fake_global_clock.SetTime(Timestamp::ms(fake_clock_.TimeInMilliseconds())); + fake_global_clock.SetTime( + Timestamp::Millis(fake_clock_.TimeInMilliseconds())); statistics_proxy_->OnSetEncoderTargetRate(kTargetBytesPerSecond * 8); EncodedImage encoded_image; @@ -408,7 +432,8 @@ TEST_F(SendStatisticsProxyTest, statistics_proxy_->GetStats().total_encoded_bytes_target; // Second frame fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs); - fake_global_clock.SetTime(Timestamp::ms(fake_clock_.TimeInMilliseconds())); + fake_global_clock.SetTime( + Timestamp::Millis(fake_clock_.TimeInMilliseconds())); encoded_image.SetTimestamp(encoded_image.Timestamp() + 90 * kInterframeDelayMs); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); @@ -422,93 +447,111 @@ TEST_F(SendStatisticsProxyTest, EXPECT_EQ(kTargetBytesPerSecond / 10, delta_encoded_bytes_target); } +TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStream) { + const int kInterframeDelayMs = 100; + auto ssrc = config_.rtp.ssrcs[0]; + rtc::ScopedFakeClock fake_global_clock; + fake_global_clock.SetTime( + Timestamp::Millis(fake_clock_.TimeInMilliseconds())); + + EncodedImage encoded_image; + + // First frame + statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); + // Second frame + fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs); + fake_global_clock.SetTime( + Timestamp::Millis(fake_clock_.TimeInMilliseconds())); + encoded_image.SetTimestamp(encoded_image.Timestamp() + + 90 * kInterframeDelayMs); + statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); + + auto stats = statistics_proxy_->GetStats(); + EXPECT_EQ(stats.substreams[ssrc].encode_frame_rate, 10); +} + TEST_F(SendStatisticsProxyTest, GetCpuAdaptationStats) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_resolution); - cpu_counts.num_framerate_reductions = 1; - cpu_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + cpu_counts.fps_adaptations = 1; + cpu_counts.resolution_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_resolution); - cpu_counts.num_framerate_reductions = 0; - cpu_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + cpu_counts.fps_adaptations = 0; + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); - cpu_counts.num_framerate_reductions = 1; - cpu_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + cpu_counts.fps_adaptations = 1; + statistics_proxy_->UpdateAdaptationSettings(kResolutionScalingDisabled, + kResolutionScalingDisabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_resolution); - cpu_counts.num_framerate_reductions = absl::nullopt; - cpu_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_resolution); } TEST_F(SendStatisticsProxyTest, GetQualityAdaptationStats) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); - quality_counts.num_framerate_reductions = 1; - quality_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + quality_counts.fps_adaptations = 1; + quality_counts.resolution_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); - quality_counts.num_framerate_reductions = 0; - quality_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + quality_counts.fps_adaptations = 0; + quality_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); - quality_counts.num_framerate_reductions = 1; - quality_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + quality_counts.fps_adaptations = 1; + statistics_proxy_->UpdateAdaptationSettings(kResolutionScalingDisabled, + kResolutionScalingDisabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); - quality_counts.num_framerate_reductions = absl::nullopt; - quality_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); } TEST_F(SendStatisticsProxyTest, GetStatsReportsCpuAdaptChanges) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; EXPECT_EQ(0, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); - cpu_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(1, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); - cpu_counts.num_resolution_reductions = 2; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 2; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_framerate); EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -516,28 +559,94 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsCpuAdaptChanges) { } TEST_F(SendStatisticsProxyTest, GetStatsReportsQualityAdaptChanges) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); EXPECT_EQ(0, statistics_proxy_->GetStats().number_of_quality_adapt_changes); - quality_counts.num_framerate_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.fps_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, statistics_proxy_->GetStats().number_of_quality_adapt_changes); - quality_counts.num_framerate_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.fps_adaptations = 0; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_quality_adapt_changes); EXPECT_EQ(0, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); } +TEST_F(SendStatisticsProxyTest, TestAdaptationStatisticsMasking) { + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + EXPECT_EQ(0, statistics_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(0, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); + + quality_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + quality_counts.fps_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); + cpu_counts.fps_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); + // We have 1 fps and resolution reduction for both cpu and quality + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); + + // Disable quality scaling. Expect quality scaling not limited. + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, + kScalingDisabled); + EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_framerate); + EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); + + // Disable framerate scaling. + statistics_proxy_->UpdateAdaptationSettings(kFramerateScalingDisabled, + kFramerateScalingDisabled); + EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); + EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); + + // Disable resolution scaling. + statistics_proxy_->UpdateAdaptationSettings(kResolutionScalingDisabled, + kResolutionScalingDisabled); + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_framerate); + EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_framerate); + EXPECT_FALSE(statistics_proxy_->GetStats().cpu_limited_resolution); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); + + // Enable all + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_framerate); + EXPECT_TRUE(statistics_proxy_->GetStats().cpu_limited_resolution); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(2, statistics_proxy_->GetStats().number_of_cpu_adapt_changes); +} + TEST_F(SendStatisticsProxyTest, AdaptChangesNotReported_AdaptationNotEnabled) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); @@ -554,11 +663,7 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesNotReported_MinRuntimeNotPassed) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Min runtime has not passed. fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000 - 1); statistics_proxy_.reset(); @@ -572,11 +677,7 @@ TEST_F(SendStatisticsProxyTest, ZeroAdaptChangesReported) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Min runtime has passed. fake_clock_.AdvanceTimeMilliseconds(metrics::kMinRunTimeInSeconds * 1000); statistics_proxy_.reset(); @@ -594,15 +695,12 @@ TEST_F(SendStatisticsProxyTest, CpuAdaptChangesReported) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Adapt changes: 1, elapsed time: 10 sec => 6 per minute. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(10000); statistics_proxy_.reset(); EXPECT_METRIC_EQ( @@ -615,15 +713,12 @@ TEST_F(SendStatisticsProxyTest, ExcludesInitialQualityAdaptDownChange) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Adapt changes: 1 (1 initial) = 0, elapsed time: 10 sec => 0 per minute. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); fake_clock_.AdvanceTimeMilliseconds(10000); statistics_proxy_.reset(); @@ -637,26 +732,21 @@ TEST_F(SendStatisticsProxyTest, ExcludesInitialQualityAdaptDownChanges) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Adapt changes: 3 (2 initial) = 1, elapsed time: 10 sec => 6 per minute. - quality_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); - quality_counts.num_resolution_reductions = 2; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 2; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); - quality_counts.num_resolution_reductions = 3; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 3; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(10000); statistics_proxy_.reset(); EXPECT_METRIC_EQ( @@ -669,15 +759,12 @@ TEST_F(SendStatisticsProxyTest, InitialQualityAdaptChangesNotExcludedOnError) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Adapt changes: 1 (2 initial) = 1, elapsed time: 10 sec => 6 per minute. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); fake_clock_.AdvanceTimeMilliseconds(10000); @@ -692,46 +779,37 @@ TEST_F(SendStatisticsProxyTest, ExcludesInitialQualityAdaptDownAndUpChanges) { // First RTP packet sent. UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->ClearAdaptationStats(); // Adapt changes: 8 (4 initial) = 4, elapsed time: 10 sec => 24 per minute. - quality_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); - quality_counts.num_resolution_reductions = 2; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 2; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnInitialQualityResolutionAdaptDown(); - quality_counts.num_resolution_reductions = 3; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); - quality_counts.num_framerate_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); - quality_counts.num_framerate_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); - quality_counts.num_resolution_reductions = 2; // Initial resolution up. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); - quality_counts.num_resolution_reductions = 1; // Initial resolution up. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); - quality_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 3; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + quality_counts.fps_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + quality_counts.fps_adaptations = 0; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + quality_counts.resolution_adaptations = 2; // Initial resolution up. + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + quality_counts.resolution_adaptations = 1; // Initial resolution up. + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + quality_counts.resolution_adaptations = 0; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(10000); statistics_proxy_.reset(); @@ -746,61 +824,47 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesStatsExcludesDisabledTime) { UpdateDataCounters(kFirstSsrc); // Disable quality adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - quality_counts.num_framerate_reductions = absl::nullopt; - quality_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, + kScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(10000); // Enable quality adaptation. // Adapt changes: 2, elapsed time: 20 sec. - quality_counts.num_framerate_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + quality_counts.fps_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kResolutionScalingDisabled, + kResolutionScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(5000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kResolutionScalingDisabled, + kResolutionScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(9000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(6000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); // Disable quality adaptation. - quality_counts.num_framerate_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(30000); // Enable quality adaptation. // Adapt changes: 1, elapsed time: 10 sec. - quality_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kFramerateScalingDisabled, + kFramerateScalingDisabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(10000); // Disable quality adaptation. - quality_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(5000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(20000); // Adapt changes: 3, elapsed time: 30 sec => 6 per minute. @@ -835,19 +899,15 @@ TEST_F(SendStatisticsProxyTest, QualityAdaptChangesStatsExcludesSuspendedTime) { UpdateDataCounters(kFirstSsrc); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; // Adapt changes: 2, elapsed time: 20 sec. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); fake_clock_.AdvanceTimeMilliseconds(20000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); // Suspend and resume video. statistics_proxy_->OnSuspendChange(true); @@ -855,9 +915,8 @@ TEST_F(SendStatisticsProxyTest, QualityAdaptChangesStatsExcludesSuspendedTime) { statistics_proxy_->OnSuspendChange(false); // Adapt changes: 1, elapsed time: 10 sec. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(10000); // Adapt changes: 3, elapsed time: 30 sec => 6 per minute. @@ -877,27 +936,21 @@ TEST_F(SendStatisticsProxyTest, CpuAdaptChangesStatsExcludesSuspendedTime) { fake_clock_.AdvanceTimeMilliseconds(30000); // Enable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; // Adapt changes: 1, elapsed time: 20 sec. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); fake_clock_.AdvanceTimeMilliseconds(10000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); // Video not suspended, stats time already started. statistics_proxy_->OnSuspendChange(false); fake_clock_.AdvanceTimeMilliseconds(10000); // Disable adaptation. - cpu_counts.num_framerate_reductions = absl::nullopt; - cpu_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(30000); // Suspend and resume video, stats time not started when scaling not enabled. @@ -908,15 +961,12 @@ TEST_F(SendStatisticsProxyTest, CpuAdaptChangesStatsExcludesSuspendedTime) { // Enable adaptation. // Adapt changes: 1, elapsed time: 10 sec. - cpu_counts.num_framerate_reductions = 0; - cpu_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + cpu_counts.fps_adaptations = 0; + cpu_counts.resolution_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); fake_clock_.AdvanceTimeMilliseconds(10000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); // Adapt changes: 2, elapsed time: 30 sec => 4 per minute. statistics_proxy_.reset(); @@ -934,20 +984,17 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesStatsNotStartedIfVideoSuspended) { statistics_proxy_->OnSuspendChange(true); // Enable adaptation, stats time not started when suspended. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); fake_clock_.AdvanceTimeMilliseconds(10000); // Resume video, stats time started. // Adapt changes: 1, elapsed time: 10 sec. statistics_proxy_->OnSuspendChange(false); fake_clock_.AdvanceTimeMilliseconds(10000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); // Adapt changes: 1, elapsed time: 10 sec => 6 per minute. statistics_proxy_.reset(); @@ -960,19 +1007,16 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesStatsNotStartedIfVideoSuspended) { TEST_F(SendStatisticsProxyTest, AdaptChangesStatsRestartsOnFirstSentPacket) { // Send first packet, adaptation enabled. // Elapsed time before first packet is sent should be excluded. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); fake_clock_.AdvanceTimeMilliseconds(10000); UpdateDataCounters(kFirstSsrc); // Adapt changes: 1, elapsed time: 10 sec. fake_clock_.AdvanceTimeMilliseconds(10000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); UpdateDataCounters(kFirstSsrc); // Adapt changes: 1, elapsed time: 10 sec => 6 per minute. @@ -985,17 +1029,12 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesStatsRestartsOnFirstSentPacket) { TEST_F(SendStatisticsProxyTest, AdaptChangesStatsStartedAfterFirstSentPacket) { // Enable and disable adaptation. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); fake_clock_.AdvanceTimeMilliseconds(60000); - cpu_counts.num_framerate_reductions = absl::nullopt; - cpu_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingDisabled, + kScalingDisabled); // Send first packet, scaling disabled. // Elapsed time before first packet is sent should be excluded. @@ -1003,18 +1042,16 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesStatsStartedAfterFirstSentPacket) { fake_clock_.AdvanceTimeMilliseconds(60000); // Enable adaptation. - cpu_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kFramerateScalingDisabled, + kScalingDisabled); fake_clock_.AdvanceTimeMilliseconds(10000); UpdateDataCounters(kFirstSsrc); // Adapt changes: 1, elapsed time: 20 sec. fake_clock_.AdvanceTimeMilliseconds(10000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); // Adapt changes: 1, elapsed time: 20 sec => 3 per minute. statistics_proxy_.reset(); @@ -1027,22 +1064,17 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesStatsStartedAfterFirstSentPacket) { TEST_F(SendStatisticsProxyTest, AdaptChangesReportedAfterContentSwitch) { // First RTP packet sent, cpu adaptation enabled. UpdateDataCounters(kFirstSsrc); - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - quality_counts.num_framerate_reductions = absl::nullopt; - quality_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, + kScalingDisabled); // Adapt changes: 2, elapsed time: 15 sec => 8 per minute. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(6000); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(9000); // Switch content type, real-time stats should be updated. @@ -1058,23 +1090,18 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesReportedAfterContentSwitch) { // First RTP packet sent, scaling enabled. UpdateDataCounters(kFirstSsrc); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, + kScalingDisabled); // Adapt changes: 4, elapsed time: 120 sec => 2 per minute. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(120000); statistics_proxy_.reset(); @@ -1091,14 +1118,13 @@ TEST_F(SendStatisticsProxyTest, AdaptChangesReportedAfterContentSwitch) { TEST_F(SendStatisticsProxyTest, QualityLimitationReasonIsCpuWhenCpuIsResolutionLimited) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - - cpu_counts.num_resolution_reductions = 1; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_EQ(QualityLimitationReason::kCpu, statistics_proxy_->GetStats().quality_limitation_reason); @@ -1106,14 +1132,14 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitationReasonIsCpuWhenCpuIsFramerateLimited) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; - cpu_counts.num_framerate_reductions = 1; + cpu_counts.fps_adaptations = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_EQ(QualityLimitationReason::kCpu, statistics_proxy_->GetStats().quality_limitation_reason); @@ -1121,14 +1147,14 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitationReasonIsBandwidthWhenQualityIsResolutionLimited) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; - quality_counts.num_resolution_reductions = 1; + quality_counts.resolution_adaptations = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_EQ(QualityLimitationReason::kBandwidth, statistics_proxy_->GetStats().quality_limitation_reason); @@ -1136,14 +1162,14 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitationReasonIsBandwidthWhenQualityIsFramerateLimited) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; - quality_counts.num_framerate_reductions = 1; + quality_counts.fps_adaptations = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EXPECT_EQ(QualityLimitationReason::kBandwidth, statistics_proxy_->GetStats().quality_limitation_reason); @@ -1151,67 +1177,64 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitationReasonIsBandwidthWhenBothCpuAndQualityIsLimited) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; - cpu_counts.num_resolution_reductions = 1; - quality_counts.num_resolution_reductions = 1; + cpu_counts.resolution_adaptations = 1; + quality_counts.resolution_adaptations = 1; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Even if the last adaptation reason is kCpu, if the counters indicate being // both CPU and quality (=bandwidth) limited, kBandwidth takes precedence. - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_EQ(QualityLimitationReason::kBandwidth, statistics_proxy_->GetStats().quality_limitation_reason); } TEST_F(SendStatisticsProxyTest, QualityLimitationReasonIsNoneWhenNotLimited) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; // Observe a limitation due to CPU. This makes sure the test doesn't pass // due to "none" being the default value. - cpu_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); // Go back to not being limited. - cpu_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 0; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); EXPECT_EQ(QualityLimitationReason::kNone, statistics_proxy_->GetStats().quality_limitation_reason); } TEST_F(SendStatisticsProxyTest, QualityLimitationDurationIncreasesWithTime) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); // Not limited for 3000 ms fake_clock_.AdvanceTimeMilliseconds(3000); // CPU limited for 2000 ms - cpu_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(2000); // Bandwidth limited for 1000 ms - cpu_counts.num_resolution_reductions = 0; - quality_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 0; + quality_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(1000); // CPU limited for another 2000 ms - cpu_counts.num_resolution_reductions = 1; - quality_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 1; + quality_counts.resolution_adaptations = 0; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); fake_clock_.AdvanceTimeMilliseconds(2000); auto quality_limitation_durations_ms = @@ -1615,12 +1638,8 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) { } TEST_F(SendStatisticsProxyTest, CpuLimitedHistogramNotUpdatedWhenDisabled) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - cpu_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kResolutionScalingDisabled, + kResolutionScalingDisabled); for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) statistics_proxy_->OnIncomingFrame(kWidth, kHeight); @@ -1631,20 +1650,17 @@ TEST_F(SendStatisticsProxyTest, CpuLimitedHistogramNotUpdatedWhenDisabled) { } TEST_F(SendStatisticsProxyTest, CpuLimitedHistogramUpdated) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - cpu_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + cpu_counts.resolution_adaptations = 0; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) statistics_proxy_->OnIncomingFrame(kWidth, kHeight); - cpu_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kCpu, cpu_counts, - quality_counts); + cpu_counts.resolution_adaptations = 1; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kCpu, + cpu_counts, quality_counts); for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) statistics_proxy_->OnIncomingFrame(kWidth, kHeight); @@ -2024,12 +2040,8 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitedHistogramsNotUpdatedWhenDisabled) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - quality_counts.num_resolution_reductions = absl::nullopt; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kFramerateScalingDisabled, + kScalingDisabled); EncodedImage encoded_image; encoded_image.SetSpatialIndex(0); for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) @@ -2045,12 +2057,7 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitedHistogramsUpdatedWhenEnabled_NoResolutionDownscale) { - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - quality_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); EncodedImage encoded_image; encoded_image.SetSpatialIndex(0); for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) @@ -2070,12 +2077,12 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, QualityLimitedHistogramsUpdatedWhenEnabled_TwoResolutionDownscales) { const int kDownscales = 2; - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - quality_counts.num_resolution_reductions = kDownscales; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + quality_counts.resolution_adaptations = kDownscales; + statistics_proxy_->UpdateAdaptationSettings(kScalingEnabled, kScalingEnabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); EncodedImage encoded_image; encoded_image.SetSpatialIndex(0); for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) @@ -2117,20 +2124,20 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsBandwidthLimitedResolution) { encoded_image._encodedHeight = kHeight / 2; // Resolution scaled due to quality. - SendStatisticsProxy::AdaptationSteps cpu_counts; - SendStatisticsProxy::AdaptationSteps quality_counts; - quality_counts.num_resolution_reductions = 1; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + VideoAdaptationCounters cpu_counts; + VideoAdaptationCounters quality_counts; + quality_counts.resolution_adaptations = 1; + statistics_proxy_->UpdateAdaptationSettings(kFramerateScalingDisabled, + kFramerateScalingDisabled); + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); // Adapt up. - quality_counts.num_resolution_reductions = 0; - statistics_proxy_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kQuality, cpu_counts, - quality_counts); + quality_counts.resolution_adaptations = 0; + statistics_proxy_->OnAdaptationChanged(VideoAdaptationReason::kQuality, + cpu_counts, quality_counts); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); @@ -2145,6 +2152,15 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsBandwidthLimitedResolution) { allocation.set_bw_limited(true); statistics_proxy_->OnBitrateAllocationUpdated(codec, allocation); EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); + + // Revert for the next test. + allocation.set_bw_limited(false); + statistics_proxy_->OnBitrateAllocationUpdated(codec, allocation); + EXPECT_FALSE(statistics_proxy_->GetStats().bw_limited_resolution); + + // Internal encoder scaler reduced resolution. + statistics_proxy_->OnEncoderInternalScalerUpdate(/*scaled=*/true); + EXPECT_TRUE(statistics_proxy_->GetStats().bw_limited_resolution); } TEST_F(SendStatisticsProxyTest, GetStatsReportsTargetMediaBitrate) { @@ -2368,6 +2384,21 @@ TEST_F(SendStatisticsProxyTest, ResetsRtcpCountersOnContentChange) { 4 * 100 / 5)); } +TEST_F(SendStatisticsProxyTest, GetStatsReportsIsRtx) { + StreamDataCountersCallback* proxy = + static_cast(statistics_proxy_.get()); + StreamDataCounters counters; + proxy->DataCountersUpdated(counters, kFirstSsrc); + proxy->DataCountersUpdated(counters, kFirstRtxSsrc); + + EXPECT_NE(GetStreamStats(kFirstSsrc).type, + VideoSendStream::StreamStats::StreamType::kRtx); + EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, absl::nullopt); + EXPECT_EQ(GetStreamStats(kFirstRtxSsrc).type, + VideoSendStream::StreamStats::StreamType::kRtx); + EXPECT_EQ(GetStreamStats(kFirstRtxSsrc).referenced_media_ssrc, kFirstSsrc); +} + TEST_F(SendStatisticsProxyTest, GetStatsReportsIsFlexFec) { statistics_proxy_.reset( new SendStatisticsProxy(&fake_clock_, GetTestConfigWithFlexFec(), @@ -2379,8 +2410,12 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsIsFlexFec) { proxy->DataCountersUpdated(counters, kFirstSsrc); proxy->DataCountersUpdated(counters, kFlexFecSsrc); - EXPECT_FALSE(GetStreamStats(kFirstSsrc).is_flexfec); - EXPECT_TRUE(GetStreamStats(kFlexFecSsrc).is_flexfec); + EXPECT_NE(GetStreamStats(kFirstSsrc).type, + VideoSendStream::StreamStats::StreamType::kFlexfec); + EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, absl::nullopt); + EXPECT_EQ(GetStreamStats(kFlexFecSsrc).type, + VideoSendStream::StreamStats::StreamType::kFlexfec); + EXPECT_EQ(GetStreamStats(kFlexFecSsrc).referenced_media_ssrc, kFirstSsrc); } TEST_F(SendStatisticsProxyTest, SendBitratesAreReportedWithFlexFecEnabled) { @@ -2686,7 +2721,7 @@ TEST_F(SendStatisticsProxyTest, Vp9SvcLowSpatialLayerDoesNotUpdateResolution) { codec_info.codecType = kVideoCodecVP9; // For first picture, it is expected that low layer updates resolution. - codec_info.codecSpecific.VP9.end_of_picture = false; + codec_info.end_of_picture = false; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); VideoSendStream::Stats stats = statistics_proxy_->GetStats(); EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[0]].width); @@ -2695,7 +2730,7 @@ TEST_F(SendStatisticsProxyTest, Vp9SvcLowSpatialLayerDoesNotUpdateResolution) { // Top layer updates resolution. encoded_image._encodedWidth = kEncodedWidth * 2; encoded_image._encodedHeight = kEncodedHeight * 2; - codec_info.codecSpecific.VP9.end_of_picture = true; + codec_info.end_of_picture = true; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); stats = statistics_proxy_->GetStats(); EXPECT_EQ(kEncodedWidth * 2, stats.substreams[config_.rtp.ssrcs[0]].width); @@ -2704,7 +2739,7 @@ TEST_F(SendStatisticsProxyTest, Vp9SvcLowSpatialLayerDoesNotUpdateResolution) { // Low layer of next frame doesn't update resolution. encoded_image._encodedWidth = kEncodedWidth; encoded_image._encodedHeight = kEncodedHeight; - codec_info.codecSpecific.VP9.end_of_picture = false; + codec_info.end_of_picture = false; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); stats = statistics_proxy_->GetStats(); EXPECT_EQ(kEncodedWidth * 2, stats.substreams[config_.rtp.ssrcs[0]].width); diff --git a/video/stream_synchronization.cc b/video/stream_synchronization.cc index 159eb7a1ab..d5c77c1eca 100644 --- a/video/stream_synchronization.cc +++ b/video/stream_synchronization.cc @@ -24,8 +24,8 @@ static const int kFilterLength = 4; // Minimum difference between audio and video to warrant a change. static const int kMinDeltaMs = 30; -StreamSynchronization::StreamSynchronization(int video_stream_id, - int audio_stream_id) +StreamSynchronization::StreamSynchronization(uint32_t video_stream_id, + uint32_t audio_stream_id) : video_stream_id_(video_stream_id), audio_stream_id_(audio_stream_id), base_target_delay_ms_(0), @@ -53,6 +53,7 @@ bool StreamSynchronization::ComputeRelativeDelay( video_measurement.latest_receive_time_ms - audio_measurement.latest_receive_time_ms - (video_last_capture_time_ms - audio_last_capture_time_ms); + if (*relative_delay_ms > kMaxDeltaDelayMs || *relative_delay_ms < -kMaxDeltaDelayMs) { return false; @@ -177,11 +178,18 @@ void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) { // The video delay is compared to the last value (and how much we can update // is limited by that as well). video_delay_.last_ms += target_delay_ms - base_target_delay_ms_; - video_delay_.extra_ms += target_delay_ms - base_target_delay_ms_; // Video is already delayed by the desired amount. base_target_delay_ms_ = target_delay_ms; } +void StreamSynchronization::ReduceAudioDelay() { + audio_delay_.extra_ms *= 0.9f; +} + +void StreamSynchronization::ReduceVideoDelay() { + video_delay_.extra_ms *= 0.9f; +} + } // namespace webrtc diff --git a/video/stream_synchronization.h b/video/stream_synchronization.h index 5f7ca6ac5b..2da6a49a14 100644 --- a/video/stream_synchronization.h +++ b/video/stream_synchronization.h @@ -26,7 +26,7 @@ class StreamSynchronization { uint32_t latest_timestamp; }; - StreamSynchronization(int video_stream_id, int audio_stream_id); + StreamSynchronization(uint32_t video_stream_id, uint32_t audio_stream_id); bool ComputeDelays(int relative_delay_ms, int current_audio_delay_ms, @@ -44,16 +44,25 @@ class StreamSynchronization { // |target_delay_ms|. void SetTargetBufferingDelay(int target_delay_ms); + // Lowers the audio delay by 10%. Can be used to recover from errors. + void ReduceAudioDelay(); + + // Lowers the video delay by 10%. Can be used to recover from errors. + void ReduceVideoDelay(); + + uint32_t audio_stream_id() const { return audio_stream_id_; } + uint32_t video_stream_id() const { return video_stream_id_; } + private: struct SynchronizationDelays { int extra_ms = 0; int last_ms = 0; }; + const uint32_t video_stream_id_; + const uint32_t audio_stream_id_; SynchronizationDelays audio_delay_; SynchronizationDelays video_delay_; - const int video_stream_id_; - const int audio_stream_id_; int base_target_delay_ms_; int avg_diff_ms_; }; diff --git a/video/stream_synchronization_unittest.cc b/video/stream_synchronization_unittest.cc index f9b885d490..3d6fdd82a7 100644 --- a/video/stream_synchronization_unittest.cc +++ b/video/stream_synchronization_unittest.cc @@ -18,7 +18,7 @@ namespace webrtc { namespace { -constexpr int kMaxAudioDiffMs = 80; // From stream_synchronization.cc +constexpr int kMaxChangeMs = 80; // From stream_synchronization.cc constexpr int kDefaultAudioFrequency = 8000; constexpr int kDefaultVideoFrequency = 90000; constexpr int kSmoothingFilter = 4 * 2; @@ -33,13 +33,13 @@ class StreamSynchronizationTest : public ::testing::Test { // Generates the necessary RTCP measurements and RTP timestamps and computes // the audio and video delays needed to get the two streams in sync. // |audio_delay_ms| and |video_delay_ms| are the number of milliseconds after - // capture which the frames are rendered. + // capture which the frames are received. // |current_audio_delay_ms| is the number of milliseconds which audio is // currently being delayed by the receiver. bool DelayedStreams(int audio_delay_ms, int video_delay_ms, int current_audio_delay_ms, - int* extra_audio_delay_ms, + int* total_audio_delay_ms, int* total_video_delay_ms) { int audio_frequency = static_cast(kDefaultAudioFrequency * audio_clock_drift_ + 0.5); @@ -95,186 +95,175 @@ class StreamSynchronizationTest : public ::testing::Test { clock_receiver_.AdvanceTimeMilliseconds(video_delay_ms - audio_delay_ms); video.latest_receive_time_ms = clock_receiver_.CurrentTime().ms(); } + int relative_delay_ms; - StreamSynchronization::ComputeRelativeDelay(audio, video, - &relative_delay_ms); + EXPECT_TRUE(StreamSynchronization::ComputeRelativeDelay( + audio, video, &relative_delay_ms)); EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms); + return sync_.ComputeDelays(relative_delay_ms, current_audio_delay_ms, - extra_audio_delay_ms, total_video_delay_ms); + total_audio_delay_ms, total_video_delay_ms); } // Simulate audio playback 300 ms after capture and video rendering 100 ms // after capture. Verify that the correct extra delays are calculated for // audio and video, and that they change correctly when we simulate that // NetEQ or the VCM adds more delay to the streams. - // TODO(holmer): This is currently wrong! We should simply change - // audio_delay_ms or video_delay_ms since those now include VCM and NetEQ - // delays. - void BothDelayedAudioLaterTest(int base_target_delay) { - int current_audio_delay_ms = base_target_delay; - int audio_delay_ms = base_target_delay + 300; - int video_delay_ms = base_target_delay + 100; - int extra_audio_delay_ms = 0; - int total_video_delay_ms = base_target_delay; - int filtered_move = (audio_delay_ms - video_delay_ms) / kSmoothingFilter; - const int kNeteqDelayIncrease = 50; - const int kNeteqDelayDecrease = 10; - - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + void BothDelayedAudioLaterTest(int base_target_delay_ms) { + const int kAudioDelayMs = base_target_delay_ms + 300; + const int kVideoDelayMs = base_target_delay_ms + 100; + int current_audio_delay_ms = base_target_delay_ms; + int total_audio_delay_ms = 0; + int total_video_delay_ms = base_target_delay_ms; + int filtered_move = (kAudioDelayMs - kVideoDelayMs) / kSmoothingFilter; + + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms); - EXPECT_EQ(base_target_delay, extra_audio_delay_ms); - current_audio_delay_ms = extra_audio_delay_ms; + EXPECT_EQ(base_target_delay_ms + filtered_move, total_video_delay_ms); + EXPECT_EQ(base_target_delay_ms, total_audio_delay_ms); + // Set new current delay. + current_audio_delay_ms = total_audio_delay_ms; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds( - 1000 - std::max(audio_delay_ms, video_delay_ms)); - // Simulate base_target_delay minimum delay in the VCM. - total_video_delay_ms = base_target_delay; - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + 1000 - std::max(kAudioDelayMs, kVideoDelayMs)); + // Simulate base_target_delay_ms minimum delay in the VCM. + total_video_delay_ms = base_target_delay_ms; + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay + 2 * filtered_move, total_video_delay_ms); - EXPECT_EQ(base_target_delay, extra_audio_delay_ms); - current_audio_delay_ms = extra_audio_delay_ms; + EXPECT_EQ(base_target_delay_ms + 2 * filtered_move, total_video_delay_ms); + EXPECT_EQ(base_target_delay_ms, total_audio_delay_ms); + // Set new current delay. + current_audio_delay_ms = total_audio_delay_ms; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds( - 1000 - std::max(audio_delay_ms, video_delay_ms)); - // Simulate base_target_delay minimum delay in the VCM. - total_video_delay_ms = base_target_delay; - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + 1000 - std::max(kAudioDelayMs, kVideoDelayMs)); + // Simulate base_target_delay_ms minimum delay in the VCM. + total_video_delay_ms = base_target_delay_ms; + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay + 3 * filtered_move, total_video_delay_ms); - EXPECT_EQ(base_target_delay, extra_audio_delay_ms); + EXPECT_EQ(base_target_delay_ms + 3 * filtered_move, total_video_delay_ms); + EXPECT_EQ(base_target_delay_ms, total_audio_delay_ms); // Simulate that NetEQ introduces some audio delay. - current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease; + const int kNeteqDelayIncrease = 50; + current_audio_delay_ms = base_target_delay_ms + kNeteqDelayIncrease; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds( - 1000 - std::max(audio_delay_ms, video_delay_ms)); - // Simulate base_target_delay minimum delay in the VCM. - total_video_delay_ms = base_target_delay; - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + 1000 - std::max(kAudioDelayMs, kVideoDelayMs)); + // Simulate base_target_delay_ms minimum delay in the VCM. + total_video_delay_ms = base_target_delay_ms; + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); filtered_move = 3 * filtered_move + - (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) / + (kNeteqDelayIncrease + kAudioDelayMs - kVideoDelayMs) / kSmoothingFilter; - EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms); - EXPECT_EQ(base_target_delay, extra_audio_delay_ms); + EXPECT_EQ(base_target_delay_ms + filtered_move, total_video_delay_ms); + EXPECT_EQ(base_target_delay_ms, total_audio_delay_ms); // Simulate that NetEQ reduces its delay. - current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease; + const int kNeteqDelayDecrease = 10; + current_audio_delay_ms = base_target_delay_ms + kNeteqDelayDecrease; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds( - 1000 - std::max(audio_delay_ms, video_delay_ms)); - // Simulate base_target_delay minimum delay in the VCM. - total_video_delay_ms = base_target_delay; - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + 1000 - std::max(kAudioDelayMs, kVideoDelayMs)); + // Simulate base_target_delay_ms minimum delay in the VCM. + total_video_delay_ms = base_target_delay_ms; + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - - filtered_move = filtered_move + - (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) / - kSmoothingFilter; - - EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms); - EXPECT_EQ(base_target_delay, extra_audio_delay_ms); + filtered_move = + filtered_move + (kNeteqDelayDecrease + kAudioDelayMs - kVideoDelayMs) / + kSmoothingFilter; + EXPECT_EQ(base_target_delay_ms + filtered_move, total_video_delay_ms); + EXPECT_EQ(base_target_delay_ms, total_audio_delay_ms); } - void BothDelayedVideoLaterTest(int base_target_delay) { - int current_audio_delay_ms = base_target_delay; - int audio_delay_ms = base_target_delay + 100; - int video_delay_ms = base_target_delay + 300; - int extra_audio_delay_ms = 0; - int total_video_delay_ms = base_target_delay; + void BothDelayedVideoLaterTest(int base_target_delay_ms) { + const int kAudioDelayMs = base_target_delay_ms + 100; + const int kVideoDelayMs = base_target_delay_ms + 300; + int current_audio_delay_ms = base_target_delay_ms; + int total_audio_delay_ms = 0; + int total_video_delay_ms = base_target_delay_ms; - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay, total_video_delay_ms); - // The audio delay is not allowed to change more than this in 1 second. - EXPECT_GE(base_target_delay + kMaxAudioDiffMs, extra_audio_delay_ms); - current_audio_delay_ms = extra_audio_delay_ms; - int current_extra_delay_ms = extra_audio_delay_ms; + EXPECT_EQ(base_target_delay_ms, total_video_delay_ms); + // The audio delay is not allowed to change more than this. + EXPECT_GE(base_target_delay_ms + kMaxChangeMs, total_audio_delay_ms); + int last_total_audio_delay_ms = total_audio_delay_ms; + // Set new current audio delay. + current_audio_delay_ms = total_audio_delay_ms; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay, total_video_delay_ms); - // The audio delay is not allowed to change more than the half of the - // required change in delay. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease( + EXPECT_EQ(base_target_delay_ms, total_video_delay_ms); + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs( current_audio_delay_ms, - base_target_delay + video_delay_ms - audio_delay_ms), - extra_audio_delay_ms); - current_audio_delay_ms = extra_audio_delay_ms; - current_extra_delay_ms = extra_audio_delay_ms; + base_target_delay_ms + kVideoDelayMs - kAudioDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; + // Set new current audio delay. + current_audio_delay_ms = total_audio_delay_ms; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay, total_video_delay_ms); - // The audio delay is not allowed to change more than the half of the - // required change in delay. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease( + EXPECT_EQ(base_target_delay_ms, total_video_delay_ms); + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs( current_audio_delay_ms, - base_target_delay + video_delay_ms - audio_delay_ms), - extra_audio_delay_ms); - current_extra_delay_ms = extra_audio_delay_ms; + base_target_delay_ms + kVideoDelayMs - kAudioDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; // Simulate that NetEQ for some reason reduced the delay. - current_audio_delay_ms = base_target_delay + 10; + current_audio_delay_ms = base_target_delay_ms + 10; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay, total_video_delay_ms); - // Since we only can ask NetEQ for a certain amount of extra delay, and - // we only measure the total NetEQ delay, we will ask for additional delay - // here to try to stay in sync. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease( + EXPECT_EQ(base_target_delay_ms, total_video_delay_ms); + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs( current_audio_delay_ms, - base_target_delay + video_delay_ms - audio_delay_ms), - extra_audio_delay_ms); - current_extra_delay_ms = extra_audio_delay_ms; + base_target_delay_ms + kVideoDelayMs - kAudioDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; // Simulate that NetEQ for some reason significantly increased the delay. - current_audio_delay_ms = base_target_delay + 350; + current_audio_delay_ms = base_target_delay_ms + 350; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(base_target_delay, total_video_delay_ms); - // The audio delay is not allowed to change more than the half of the - // required change in delay. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease( + EXPECT_EQ(base_target_delay_ms, total_video_delay_ms); + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs( current_audio_delay_ms, - base_target_delay + video_delay_ms - audio_delay_ms), - extra_audio_delay_ms); - } - - int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) { - return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter, - kMaxAudioDiffMs); + base_target_delay_ms + kVideoDelayMs - kAudioDelayMs), + total_audio_delay_ms); } - int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) { - return std::max((delay_ms - current_audio_delay_ms) / kSmoothingFilter, - -kMaxAudioDiffMs); + int MaxAudioDelayChangeMs(int current_audio_delay_ms, int delay_ms) const { + int diff_ms = (delay_ms - current_audio_delay_ms) / kSmoothingFilter; + diff_ms = std::min(diff_ms, kMaxChangeMs); + diff_ms = std::max(diff_ms, -kMaxChangeMs); + return diff_ms; } StreamSynchronization sync_; @@ -285,114 +274,170 @@ class StreamSynchronizationTest : public ::testing::Test { }; TEST_F(StreamSynchronizationTest, NoDelay) { - uint32_t current_audio_delay_ms = 0; - int extra_audio_delay_ms = 0; + int total_audio_delay_ms = 0; int total_video_delay_ms = 0; - EXPECT_FALSE(DelayedStreams(0, 0, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(0, extra_audio_delay_ms); + EXPECT_FALSE(DelayedStreams(/*audio_delay_ms=*/0, /*video_delay_ms=*/0, + /*current_audio_delay_ms=*/0, + &total_audio_delay_ms, &total_video_delay_ms)); + EXPECT_EQ(0, total_audio_delay_ms); EXPECT_EQ(0, total_video_delay_ms); } -TEST_F(StreamSynchronizationTest, VideoDelay) { - uint32_t current_audio_delay_ms = 0; - int delay_ms = 200; - int extra_audio_delay_ms = 0; +TEST_F(StreamSynchronizationTest, VideoDelayed) { + const int kAudioDelayMs = 200; + int total_audio_delay_ms = 0; int total_video_delay_ms = 0; - EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(0, extra_audio_delay_ms); - // The video delay is not allowed to change more than this in 1 second. - EXPECT_EQ(delay_ms / kSmoothingFilter, total_video_delay_ms); + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, /*video_delay_ms=*/0, + /*current_audio_delay_ms=*/0, + &total_audio_delay_ms, &total_video_delay_ms)); + EXPECT_EQ(0, total_audio_delay_ms); + // The delay is not allowed to change more than this. + EXPECT_EQ(kAudioDelayMs / kSmoothingFilter, total_video_delay_ms); - clock_sender_.AdvanceTimeMilliseconds(1000); - clock_receiver_.AdvanceTimeMilliseconds(800); // Simulate 0 minimum delay in the VCM. total_video_delay_ms = 0; - EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(0, extra_audio_delay_ms); - // The video delay is not allowed to change more than this in 1 second. - EXPECT_EQ(2 * delay_ms / kSmoothingFilter, total_video_delay_ms); - clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, /*video_delay_ms=*/0, + /*current_audio_delay_ms=*/0, + &total_audio_delay_ms, &total_video_delay_ms)); + EXPECT_EQ(0, total_audio_delay_ms); + EXPECT_EQ(2 * kAudioDelayMs / kSmoothingFilter, total_video_delay_ms); + // Simulate 0 minimum delay in the VCM. total_video_delay_ms = 0; - EXPECT_TRUE(DelayedStreams(delay_ms, 0, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); - EXPECT_EQ(0, extra_audio_delay_ms); - EXPECT_EQ(3 * delay_ms / kSmoothingFilter, total_video_delay_ms); + clock_sender_.AdvanceTimeMilliseconds(1000); + clock_receiver_.AdvanceTimeMilliseconds(800); + EXPECT_TRUE(DelayedStreams(kAudioDelayMs, /*video_delay_ms=*/0, + /*current_audio_delay_ms=*/0, + &total_audio_delay_ms, &total_video_delay_ms)); + EXPECT_EQ(0, total_audio_delay_ms); + EXPECT_EQ(3 * kAudioDelayMs / kSmoothingFilter, total_video_delay_ms); } -TEST_F(StreamSynchronizationTest, AudioDelay) { +TEST_F(StreamSynchronizationTest, AudioDelayed) { + const int kVideoDelayMs = 200; int current_audio_delay_ms = 0; - int delay_ms = 200; - int extra_audio_delay_ms = 0; + int total_audio_delay_ms = 0; int total_video_delay_ms = 0; - EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); EXPECT_EQ(0, total_video_delay_ms); - // The audio delay is not allowed to change more than this in 1 second. - EXPECT_EQ(delay_ms / kSmoothingFilter, extra_audio_delay_ms); - current_audio_delay_ms = extra_audio_delay_ms; - int current_extra_delay_ms = extra_audio_delay_ms; + // The delay is not allowed to change more than this. + EXPECT_EQ(kVideoDelayMs / kSmoothingFilter, total_audio_delay_ms); + int last_total_audio_delay_ms = total_audio_delay_ms; + // Set new current audio delay. + current_audio_delay_ms = total_audio_delay_ms; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); EXPECT_EQ(0, total_video_delay_ms); - // The audio delay is not allowed to change more than the half of the required - // change in delay. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms), - extra_audio_delay_ms); - current_audio_delay_ms = extra_audio_delay_ms; - current_extra_delay_ms = extra_audio_delay_ms; + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs(current_audio_delay_ms, kVideoDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; + // Set new current audio delay. + current_audio_delay_ms = total_audio_delay_ms; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); EXPECT_EQ(0, total_video_delay_ms); - // The audio delay is not allowed to change more than the half of the required - // change in delay. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms), - extra_audio_delay_ms); - current_extra_delay_ms = extra_audio_delay_ms; + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs(current_audio_delay_ms, kVideoDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; // Simulate that NetEQ for some reason reduced the delay. current_audio_delay_ms = 10; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); EXPECT_EQ(0, total_video_delay_ms); - // Since we only can ask NetEQ for a certain amount of extra delay, and - // we only measure the total NetEQ delay, we will ask for additional delay - // here to try to - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms), - extra_audio_delay_ms); - current_extra_delay_ms = extra_audio_delay_ms; + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs(current_audio_delay_ms, kVideoDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; // Simulate that NetEQ for some reason significantly increased the delay. current_audio_delay_ms = 350; clock_sender_.AdvanceTimeMilliseconds(1000); clock_receiver_.AdvanceTimeMilliseconds(800); - EXPECT_TRUE(DelayedStreams(0, delay_ms, current_audio_delay_ms, - &extra_audio_delay_ms, &total_video_delay_ms)); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); + EXPECT_EQ(0, total_video_delay_ms); + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs(current_audio_delay_ms, kVideoDelayMs), + total_audio_delay_ms); +} + +TEST_F(StreamSynchronizationTest, NoAudioIncomingUnboundedIncrease) { + // Test how audio delay can grow unbounded when audio stops coming in. + // This is handled in caller of RtpStreamsSynchronizer, for example in + // RtpStreamsSynchronizer by not updating delays when audio samples stop + // coming in. + const int kVideoDelayMs = 300; + const int kAudioDelayMs = 100; + int current_audio_delay_ms = kAudioDelayMs; + int total_audio_delay_ms = 0; + int total_video_delay_ms = 0; + + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); EXPECT_EQ(0, total_video_delay_ms); - // The audio delay is not allowed to change more than the half of the required - // change in delay. - EXPECT_EQ(current_extra_delay_ms + - MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms), - extra_audio_delay_ms); + // The delay is not allowed to change more than this. + EXPECT_EQ((kVideoDelayMs - kAudioDelayMs) / kSmoothingFilter, + total_audio_delay_ms); + int last_total_audio_delay_ms = total_audio_delay_ms; + + // Set new current audio delay: simulate audio samples are flowing in. + current_audio_delay_ms = total_audio_delay_ms; + + clock_sender_.AdvanceTimeMilliseconds(1000); + clock_receiver_.AdvanceTimeMilliseconds(1000); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); + EXPECT_EQ(0, total_video_delay_ms); + EXPECT_EQ(last_total_audio_delay_ms + + MaxAudioDelayChangeMs(current_audio_delay_ms, kVideoDelayMs), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; + + // Simulate no incoming audio by not update audio delay. + const int kSimulationSecs = 300; // 5min + const int kMaxDeltaDelayMs = 10000; // max delay for audio in webrtc + for (auto time_secs = 0; time_secs < kSimulationSecs; time_secs++) { + clock_sender_.AdvanceTimeMilliseconds(1000); + clock_receiver_.AdvanceTimeMilliseconds(1000); + EXPECT_TRUE(DelayedStreams(/*audio_delay_ms=*/0, kVideoDelayMs, + current_audio_delay_ms, &total_audio_delay_ms, + &total_video_delay_ms)); + EXPECT_EQ(0, total_video_delay_ms); + + // Audio delay does not go above kMaxDeltaDelayMs. + EXPECT_EQ(std::min(kMaxDeltaDelayMs, + last_total_audio_delay_ms + + MaxAudioDelayChangeMs(current_audio_delay_ms, + kVideoDelayMs)), + total_audio_delay_ms); + last_total_audio_delay_ms = total_audio_delay_ms; + } + // By now the audio delay has grown unbounded to kMaxDeltaDelayMs. + EXPECT_EQ(kMaxDeltaDelayMs, last_total_audio_delay_ms); } TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) { @@ -423,77 +468,66 @@ TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDrift) { BothDelayedAudioLaterTest(0); } -TEST_F(StreamSynchronizationTest, BaseDelay) { - int base_target_delay_ms = 2000; - int current_audio_delay_ms = 2000; - int extra_audio_delay_ms = 0; - int total_video_delay_ms = base_target_delay_ms; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - // We are in sync don't change. - EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, - &total_video_delay_ms)); - // Triggering another call with the same values. Delay should not be modified. - base_target_delay_ms = 2000; - current_audio_delay_ms = base_target_delay_ms; - total_video_delay_ms = base_target_delay_ms; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - // We are in sync don't change. - EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, - &total_video_delay_ms)); - // Changing delay value - intended to test this module only. In practice it - // would take VoE time to adapt. - base_target_delay_ms = 5000; - current_audio_delay_ms = base_target_delay_ms; - total_video_delay_ms = base_target_delay_ms; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - // We are in sync don't change. - EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms, - current_audio_delay_ms, &extra_audio_delay_ms, - &total_video_delay_ms)); +TEST_F(StreamSynchronizationTest, BothEquallyDelayed) { + const int kDelayMs = 2000; + int current_audio_delay_ms = kDelayMs; + int total_audio_delay_ms = 0; + int total_video_delay_ms = kDelayMs; + // In sync, expect no change. + EXPECT_FALSE(DelayedStreams(kDelayMs, kDelayMs, current_audio_delay_ms, + &total_audio_delay_ms, &total_video_delay_ms)); + // Trigger another call with the same values, delay should not be modified. + total_video_delay_ms = kDelayMs; + EXPECT_FALSE(DelayedStreams(kDelayMs, kDelayMs, current_audio_delay_ms, + &total_audio_delay_ms, &total_video_delay_ms)); + // Change delay value, delay should not be modified. + const int kDelayMs2 = 5000; + current_audio_delay_ms = kDelayMs2; + total_video_delay_ms = kDelayMs2; + EXPECT_FALSE(DelayedStreams(kDelayMs2, kDelayMs2, current_audio_delay_ms, + &total_audio_delay_ms, &total_video_delay_ms)); } TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) { - int base_target_delay_ms = 3000; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - BothDelayedAudioLaterTest(base_target_delay_ms); + const int kBaseTargetDelayMs = 3000; + sync_.SetTargetBufferingDelay(kBaseTargetDelayMs); + BothDelayedAudioLaterTest(kBaseTargetDelayMs); } TEST_F(StreamSynchronizationTest, BothDelayedAudioClockDriftWithBaseDelay) { - int base_target_delay_ms = 3000; - sync_.SetTargetBufferingDelay(base_target_delay_ms); + const int kBaseTargetDelayMs = 3000; + sync_.SetTargetBufferingDelay(kBaseTargetDelayMs); audio_clock_drift_ = 1.05; - BothDelayedAudioLaterTest(base_target_delay_ms); + BothDelayedAudioLaterTest(kBaseTargetDelayMs); } TEST_F(StreamSynchronizationTest, BothDelayedVideoClockDriftWithBaseDelay) { - int base_target_delay_ms = 3000; - sync_.SetTargetBufferingDelay(base_target_delay_ms); + const int kBaseTargetDelayMs = 3000; + sync_.SetTargetBufferingDelay(kBaseTargetDelayMs); video_clock_drift_ = 1.05; - BothDelayedAudioLaterTest(base_target_delay_ms); + BothDelayedAudioLaterTest(kBaseTargetDelayMs); } TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterWithBaseDelay) { - int base_target_delay_ms = 2000; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - BothDelayedVideoLaterTest(base_target_delay_ms); + const int kBaseTargetDelayMs = 2000; + sync_.SetTargetBufferingDelay(kBaseTargetDelayMs); + BothDelayedVideoLaterTest(kBaseTargetDelayMs); } TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterAudioClockDriftWithBaseDelay) { - int base_target_delay_ms = 2000; + const int kBaseTargetDelayMs = 2000; audio_clock_drift_ = 1.05; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - BothDelayedVideoLaterTest(base_target_delay_ms); + sync_.SetTargetBufferingDelay(kBaseTargetDelayMs); + BothDelayedVideoLaterTest(kBaseTargetDelayMs); } TEST_F(StreamSynchronizationTest, BothDelayedVideoLaterVideoClockDriftWithBaseDelay) { - int base_target_delay_ms = 2000; + const int kBaseTargetDelayMs = 2000; video_clock_drift_ = 1.05; - sync_.SetTargetBufferingDelay(base_target_delay_ms); - BothDelayedVideoLaterTest(base_target_delay_ms); + sync_.SetTargetBufferingDelay(kBaseTargetDelayMs); + BothDelayedVideoLaterTest(kBaseTargetDelayMs); } } // namespace webrtc diff --git a/video/sv_loopback.cc b/video/sv_loopback.cc index ec236d6e62..af475ae4eb 100644 --- a/video/sv_loopback.cc +++ b/video/sv_loopback.cc @@ -605,56 +605,62 @@ void Loopback() { call_bitrate_config.max_bitrate_bps = (ScreenshareMaxBitrateKbps() + VideoMaxBitrateKbps()) * 1000; - VideoQualityTest::Params params, camera_params, screenshare_params; - params.call = {absl::GetFlag(FLAGS_send_side_bwe), - absl::GetFlag(FLAGS_generic_descriptor), call_bitrate_config, - 0}; + VideoQualityTest::Params params; + params.call.send_side_bwe = absl::GetFlag(FLAGS_send_side_bwe); + params.call.generic_descriptor = absl::GetFlag(FLAGS_generic_descriptor); + params.call.call_bitrate_config = call_bitrate_config; params.call.dual_video = true; - params.video[screenshare_idx] = {true, - ScreenshareWidth(), - ScreenshareHeight(), - ScreenshareFps(), - ScreenshareMinBitrateKbps() * 1000, - ScreenshareTargetBitrateKbps() * 1000, - ScreenshareMaxBitrateKbps() * 1000, - false, - Codec(), - ScreenshareNumTemporalLayers(), - ScreenshareSelectedTL(), - ScreenshareMinTransmitBitrateKbps() * 1000, - false, // ULPFEC disabled. - false, // FlexFEC disabled. - false, // Automatic scaling disabled - ""}; - params.video[camera_idx] = {absl::GetFlag(FLAGS_video), - VideoWidth(), - VideoHeight(), - VideoFps(), - VideoMinBitrateKbps() * 1000, - VideoTargetBitrateKbps() * 1000, - VideoMaxBitrateKbps() * 1000, - absl::GetFlag(FLAGS_suspend_below_min_bitrate), - Codec(), - VideoNumTemporalLayers(), - VideoSelectedTL(), - 0, // No min transmit bitrate. - absl::GetFlag(FLAGS_use_ulpfec), - absl::GetFlag(FLAGS_use_flexfec), - false, - VideoClip(), - GetCaptureDevice()}; - params.audio = {absl::GetFlag(FLAGS_audio), - absl::GetFlag(FLAGS_audio_video_sync), - absl::GetFlag(FLAGS_audio_dtx)}; - params.logging = {RtcEventLogName(), RtpDumpName(), EncodedFramePath()}; - params.analyzer = {"dual_streams", 0.0, 0.0, DurationSecs(), - OutputFilename(), GraphTitle()}; + params.video[screenshare_idx].enabled = true; + params.video[screenshare_idx].width = ScreenshareWidth(); + params.video[screenshare_idx].height = ScreenshareHeight(); + params.video[screenshare_idx].fps = ScreenshareFps(); + params.video[screenshare_idx].min_bitrate_bps = + ScreenshareMinBitrateKbps() * 1000; + params.video[screenshare_idx].target_bitrate_bps = + ScreenshareTargetBitrateKbps() * 1000; + params.video[screenshare_idx].max_bitrate_bps = + ScreenshareMaxBitrateKbps() * 1000; + params.video[screenshare_idx].codec = Codec(); + params.video[screenshare_idx].num_temporal_layers = + ScreenshareNumTemporalLayers(); + params.video[screenshare_idx].selected_tl = ScreenshareSelectedTL(); + params.video[screenshare_idx].min_transmit_bps = + ScreenshareMinTransmitBitrateKbps() * 1000; + params.video[camera_idx].enabled = absl::GetFlag(FLAGS_video); + params.video[camera_idx].width = VideoWidth(); + params.video[camera_idx].height = VideoHeight(); + params.video[camera_idx].fps = VideoFps(); + params.video[camera_idx].min_bitrate_bps = VideoMinBitrateKbps() * 1000; + params.video[camera_idx].target_bitrate_bps = VideoTargetBitrateKbps() * 1000; + params.video[camera_idx].max_bitrate_bps = VideoMaxBitrateKbps() * 1000; + params.video[camera_idx].suspend_below_min_bitrate = + absl::GetFlag(FLAGS_suspend_below_min_bitrate); + params.video[camera_idx].codec = Codec(); + params.video[camera_idx].num_temporal_layers = VideoNumTemporalLayers(); + params.video[camera_idx].selected_tl = VideoSelectedTL(); + params.video[camera_idx].ulpfec = absl::GetFlag(FLAGS_use_ulpfec); + params.video[camera_idx].flexfec = absl::GetFlag(FLAGS_use_flexfec); + params.video[camera_idx].clip_path = VideoClip(); + params.video[camera_idx].capture_device_index = GetCaptureDevice(); + params.audio.enabled = absl::GetFlag(FLAGS_audio); + params.audio.sync_video = absl::GetFlag(FLAGS_audio_video_sync); + params.audio.dtx = absl::GetFlag(FLAGS_audio_dtx); + params.logging.rtc_event_log_name = RtcEventLogName(); + params.logging.rtp_dump_name = RtpDumpName(); + params.logging.encoded_frame_base_path = EncodedFramePath(); + params.analyzer.test_label = "dual_streams"; + params.analyzer.test_durations_secs = DurationSecs(); + params.analyzer.graph_data_output_filename = OutputFilename(); + params.analyzer.graph_title = GraphTitle(); params.config = pipe_config; params.screenshare[camera_idx].enabled = false; - params.screenshare[screenshare_idx] = {true, GenerateSlides(), - SlideChangeInterval(), - ScrollDuration(), Slides()}; + params.screenshare[screenshare_idx].enabled = true; + params.screenshare[screenshare_idx].generate_slides = GenerateSlides(); + params.screenshare[screenshare_idx].slide_change_interval = + SlideChangeInterval(); + params.screenshare[screenshare_idx].scroll_duration = ScrollDuration(); + params.screenshare[screenshare_idx].slides = Slides(); if (VideoNumStreams() > 1 && VideoStream0().empty() && VideoStream1().empty()) { diff --git a/video/test/mock_video_stream_encoder.h b/video/test/mock_video_stream_encoder.h index 494419dffd..2af613e3ad 100644 --- a/video/test/mock_video_stream_encoder.h +++ b/video/test/mock_video_stream_encoder.h @@ -10,6 +10,8 @@ #ifndef VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_ #define VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_ +#include + #include "api/video/video_stream_encoder_interface.h" #include "test/gmock.h" @@ -17,22 +19,40 @@ namespace webrtc { class MockVideoStreamEncoder : public VideoStreamEncoderInterface { public: - MOCK_METHOD2(SetSource, - void(rtc::VideoSourceInterface*, - const DegradationPreference&)); - MOCK_METHOD2(SetSink, void(EncoderSink*, bool)); - MOCK_METHOD1(SetStartBitrate, void(int)); - MOCK_METHOD0(SendKeyFrame, void()); - MOCK_METHOD1(OnLossNotification, void(const VideoEncoder::LossNotification&)); - MOCK_METHOD5(OnBitrateUpdated, - void(DataRate, DataRate, DataRate, uint8_t, int64_t)); - MOCK_METHOD1(OnFrame, void(const VideoFrame&)); - MOCK_METHOD1(SetBitrateAllocationObserver, - void(VideoBitrateAllocationObserver*)); - MOCK_METHOD1(SetFecControllerOverride, void(FecControllerOverride*)); - MOCK_METHOD0(Stop, void()); + MOCK_METHOD(void, + AddAdaptationResource, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(std::vector>, + GetAdaptationResources, + (), + (override)); + MOCK_METHOD(void, + SetSource, + (rtc::VideoSourceInterface*, + const DegradationPreference&), + (override)); + MOCK_METHOD(void, SetSink, (EncoderSink*, bool), (override)); + MOCK_METHOD(void, SetStartBitrate, (int), (override)); + MOCK_METHOD(void, SendKeyFrame, (), (override)); + MOCK_METHOD(void, + OnLossNotification, + (const VideoEncoder::LossNotification&), + (override)); + MOCK_METHOD(void, + OnBitrateUpdated, + (DataRate, DataRate, DataRate, uint8_t, int64_t, double), + (override)); + MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override)); + MOCK_METHOD(void, + SetFecControllerOverride, + (FecControllerOverride*), + (override)); + MOCK_METHOD(void, Stop, (), (override)); - MOCK_METHOD2(MockedConfigureEncoder, void(const VideoEncoderConfig&, size_t)); + MOCK_METHOD(void, + MockedConfigureEncoder, + (const VideoEncoderConfig&, size_t)); // gtest generates implicit copy which is not allowed on VideoEncoderConfig, // so we can't mock ConfigureEncoder directly. void ConfigureEncoder(VideoEncoderConfig config, diff --git a/video/video_analyzer.cc b/video/video_analyzer.cc index f2822efbea..c16c3b383b 100644 --- a/video/video_analyzer.cc +++ b/video/video_analyzer.cc @@ -38,7 +38,7 @@ ABSL_FLAG(bool, namespace webrtc { namespace { -constexpr TimeDelta kSendStatsPollingInterval = TimeDelta::Seconds<1>(); +constexpr TimeDelta kSendStatsPollingInterval = TimeDelta::Seconds(1); constexpr size_t kMaxComparisons = 10; // How often is keep alive message printed. constexpr int kKeepAliveIntervalSeconds = 30; @@ -57,6 +57,7 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, double avg_psnr_threshold, double avg_ssim_threshold, int duration_frames, + TimeDelta test_duration, FILE* graph_data_output_file, const std::string& graph_title, uint32_t ssrc_to_analyze, @@ -74,7 +75,7 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, send_stream_(nullptr), receive_stream_(nullptr), audio_receive_stream_(nullptr), - captured_frame_forwarder_(this, clock, duration_frames), + captured_frame_forwarder_(this, clock, duration_frames, test_duration), test_label_(test_label), graph_data_output_file_(graph_data_output_file), graph_title_(graph_title), @@ -92,6 +93,7 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, render_frame_rate_(0), last_fec_bytes_(0), frames_to_process_(duration_frames), + test_end_(clock->CurrentTime() + test_duration), frames_recorded_(0), frames_processed_(0), captured_frames_(0), @@ -149,7 +151,7 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, VideoAnalyzer::~VideoAnalyzer() { { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); quit_ = true; } for (rtc::PlatformThread* thread : comparison_thread_pool_) { @@ -172,25 +174,25 @@ void VideoAnalyzer::SetSource( } void VideoAnalyzer::SetCall(Call* call) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); RTC_DCHECK(!call_); call_ = call; } void VideoAnalyzer::SetSendStream(VideoSendStream* stream) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); RTC_DCHECK(!send_stream_); send_stream_ = stream; } void VideoAnalyzer::SetReceiveStream(VideoReceiveStream* stream) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); RTC_DCHECK(!receive_stream_); receive_stream_ = stream; } void VideoAnalyzer::SetAudioReceiveStream(AudioReceiveStream* recv_stream) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); RTC_CHECK(!audio_receive_stream_); audio_receive_stream_ = recv_stream; } @@ -232,7 +234,7 @@ PacketReceiver::DeliveryStatus VideoAnalyzer::DeliverPacket( // (FlexFEC and media are sent on different SSRCs, which have different // timestamps spaces.) // Also ignore packets from wrong SSRC, but include retransmits. - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); int64_t timestamp = wrap_handler_.Unwrap(rtp_packet.Timestamp() - rtp_timestamp_delta_); recv_times_[timestamp] = clock_->CurrentNtpInMilliseconds(); @@ -243,7 +245,7 @@ PacketReceiver::DeliveryStatus VideoAnalyzer::DeliverPacket( } void VideoAnalyzer::PreEncodeOnFrame(const VideoFrame& video_frame) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); if (!first_encoded_timestamp_) { while (frames_.front().timestamp() != video_frame.timestamp()) { ++dropped_frames_before_first_encode_; @@ -255,7 +257,7 @@ void VideoAnalyzer::PreEncodeOnFrame(const VideoFrame& video_frame) { } void VideoAnalyzer::PostEncodeOnFrame(size_t stream_id, uint32_t timestamp) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); if (!first_sent_timestamp_ && stream_id == selected_stream_) { first_sent_timestamp_ = timestamp; } @@ -271,7 +273,7 @@ bool VideoAnalyzer::SendRtp(const uint8_t* packet, bool result = transport_->SendRtp(packet, length, options); { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); if (rtp_timestamp_delta_ == 0 && rtp_packet.Ssrc() == ssrc_to_analyze_) { RTC_CHECK(static_cast(first_sent_timestamp_)); rtp_timestamp_delta_ = rtp_packet.Timestamp() - *first_sent_timestamp_; @@ -302,7 +304,7 @@ bool VideoAnalyzer::SendRtcp(const uint8_t* packet, size_t length) { void VideoAnalyzer::OnFrame(const VideoFrame& video_frame) { int64_t render_time_ms = clock_->CurrentNtpInMilliseconds(); - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); StartExcludingCpuThreadTime(); @@ -359,7 +361,7 @@ void VideoAnalyzer::Wait() { int frames_processed; int frames_captured; { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); frames_processed = frames_processed_; frames_captured = captured_frames_; } @@ -379,11 +381,8 @@ void VideoAnalyzer::Wait() { continue; } if (frames_processed == last_frames_processed && - last_frames_captured == frames_captured) { - if (frames_captured < frames_to_process_) { - EXPECT_GT(frames_processed, last_frames_processed) - << "Analyzer stalled while waiting for test to finish."; - } + last_frames_captured == frames_captured && + clock_->CurrentTime() > test_end_) { done_.Set(); break; } @@ -402,29 +401,29 @@ void VideoAnalyzer::Wait() { } void VideoAnalyzer::StartMeasuringCpuProcessTime() { - rtc::CritScope lock(&cpu_measurement_lock_); + MutexLock lock(&cpu_measurement_lock_); cpu_time_ -= rtc::GetProcessCpuTimeNanos(); wallclock_time_ -= rtc::SystemTimeNanos(); } void VideoAnalyzer::StopMeasuringCpuProcessTime() { - rtc::CritScope lock(&cpu_measurement_lock_); + MutexLock lock(&cpu_measurement_lock_); cpu_time_ += rtc::GetProcessCpuTimeNanos(); wallclock_time_ += rtc::SystemTimeNanos(); } void VideoAnalyzer::StartExcludingCpuThreadTime() { - rtc::CritScope lock(&cpu_measurement_lock_); + MutexLock lock(&cpu_measurement_lock_); cpu_time_ += rtc::GetThreadCpuTimeNanos(); } void VideoAnalyzer::StopExcludingCpuThreadTime() { - rtc::CritScope lock(&cpu_measurement_lock_); + MutexLock lock(&cpu_measurement_lock_); cpu_time_ -= rtc::GetThreadCpuTimeNanos(); } double VideoAnalyzer::GetCpuUsagePercent() { - rtc::CritScope lock(&cpu_measurement_lock_); + MutexLock lock(&cpu_measurement_lock_); return static_cast(cpu_time_) / wallclock_time_ * 100.0; } @@ -457,9 +456,15 @@ bool VideoAnalyzer::IsInSelectedSpatialAndTemporalLayer( } void VideoAnalyzer::PollStats() { - rtc::CritScope crit(&comparison_lock_); - + // Do not grab |comparison_lock_|, before |GetStats()| completes. + // Otherwise a deadlock may occur: + // 1) |comparison_lock_| is acquired after |lock_| + // 2) |lock_| is acquired after internal pacer lock in SendRtp() + // 3) internal pacer lock is acquired by GetStats(). Call::Stats call_stats = call_->GetStats(); + + MutexLock lock(&comparison_lock_); + send_bandwidth_bps_.AddSample(call_stats.send_bandwidth_bps); VideoSendStream::Stats send_stats = send_stream_->GetStats(); @@ -517,7 +522,8 @@ void VideoAnalyzer::PollStats() { } if (audio_receive_stream_ != nullptr) { - AudioReceiveStream::Stats receive_stats = audio_receive_stream_->GetStats(); + AudioReceiveStream::Stats receive_stats = + audio_receive_stream_->GetStats(/*get_and_clear_legacy_stats=*/true); audio_expand_rate_.AddSample(receive_stats.expand_rate); audio_accelerate_rate_.AddSample(receive_stats.accelerate_rate); audio_jitter_buffer_ms_.AddSample(receive_stats.jitter_buffer_ms); @@ -565,13 +571,13 @@ bool VideoAnalyzer::CompareFrames() { } bool VideoAnalyzer::PopComparison(VideoAnalyzer::FrameComparison* comparison) { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); // If AllFramesRecorded() is true, it means we have already popped // frames_to_process_ frames from comparisons_, so there is no more work // for this thread to be done. frames_processed_ might still be lower if // all comparisons are not done, but those frames are currently being // worked on by other threads. - if (comparisons_.empty() || AllFramesRecorded()) + if (comparisons_.empty() || AllFramesRecordedLocked()) return false; *comparison = comparisons_.front(); @@ -582,21 +588,26 @@ bool VideoAnalyzer::PopComparison(VideoAnalyzer::FrameComparison* comparison) { } void VideoAnalyzer::FrameRecorded() { - rtc::CritScope crit(&comparison_lock_); ++frames_recorded_; } bool VideoAnalyzer::AllFramesRecorded() { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); + return AllFramesRecordedLocked(); +} + +bool VideoAnalyzer::AllFramesRecordedLocked() { RTC_DCHECK(frames_recorded_ <= frames_to_process_); - return frames_recorded_ == frames_to_process_ || quit_; + return frames_recorded_ == frames_to_process_ || + (clock_->CurrentTime() > test_end_ && comparisons_.empty()) || quit_; } bool VideoAnalyzer::FrameProcessed() { - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); ++frames_processed_; assert(frames_processed_ <= frames_to_process_); - return frames_processed_ == frames_to_process_; + return frames_processed_ == frames_to_process_ || + (clock_->CurrentTime() > test_end_ && comparisons_.empty()); } void VideoAnalyzer::PrintResults() { @@ -605,11 +616,11 @@ void VideoAnalyzer::PrintResults() { StopMeasuringCpuProcessTime(); int dropped_frames_diff; { - rtc::CritScope crit(&crit_); + MutexLock lock(&lock_); dropped_frames_diff = dropped_frames_before_first_encode_ + dropped_frames_before_rendering_ + frames_.size(); } - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); PrintResult("psnr", psnr_, "dB", ImproveDirection::kBiggerIsBetter); PrintResult("ssim", ssim_, "unitless", ImproveDirection::kBiggerIsBetter); PrintResult("sender_time", sender_time_, "ms", @@ -752,7 +763,7 @@ void VideoAnalyzer::PerformFrameComparison( ssim = I420SSIM(&*comparison.reference, &*comparison.render); } - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); if (psnr >= 0.0 && (!worst_frame_ || worst_frame_->psnr > psnr)) { worst_frame_.emplace(FrameWithPsnr{psnr, *comparison.render}); @@ -841,7 +852,7 @@ void VideoAnalyzer::PrintResultWithExternalMean( void VideoAnalyzer::PrintSamplesToFile() { FILE* out = graph_data_output_file_; - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); absl::c_sort(samples_, [](const Sample& A, const Sample& B) -> bool { return A.input_time_ms < B.input_time_ms; }); @@ -872,14 +883,14 @@ void VideoAnalyzer::AddCapturedFrameForComparison( const VideoFrame& video_frame) { bool must_capture = false; { - rtc::CritScope lock(&comparison_lock_); + MutexLock lock(&comparison_lock_); must_capture = captured_frames_ < frames_to_process_; if (must_capture) { ++captured_frames_; } } if (must_capture) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); frames_.push_back(video_frame); } } @@ -902,7 +913,7 @@ void VideoAnalyzer::AddFrameComparison(const VideoFrame& reference, if (it != encoded_frame_sizes_.end()) encoded_frame_sizes_.erase(it); - rtc::CritScope crit(&comparison_lock_); + MutexLock lock(&comparison_lock_); if (comparisons_.size() < kMaxComparisons) { comparisons_.push_back(FrameComparison( reference, render, dropped, reference.ntp_time_ms(), send_time_ms, @@ -973,13 +984,15 @@ VideoAnalyzer::Sample::Sample(int dropped, VideoAnalyzer::CapturedFrameForwarder::CapturedFrameForwarder( VideoAnalyzer* analyzer, Clock* clock, - int frames_to_process) + int frames_to_capture, + TimeDelta test_duration) : analyzer_(analyzer), send_stream_input_(nullptr), video_source_(nullptr), clock_(clock), captured_frames_(0), - frames_to_process_(frames_to_process) {} + frames_to_capture_(frames_to_capture), + test_end_(clock->CurrentTime() + test_duration) {} void VideoAnalyzer::CapturedFrameForwarder::SetSource( VideoSourceInterface* video_source) { @@ -996,17 +1009,19 @@ void VideoAnalyzer::CapturedFrameForwarder::OnFrame( copy.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds()); copy.set_timestamp(copy.ntp_time_ms() * 90); analyzer_->AddCapturedFrameForComparison(copy); - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); ++captured_frames_; - if (send_stream_input_ && captured_frames_ <= frames_to_process_) + if (send_stream_input_ && clock_->CurrentTime() <= test_end_ && + captured_frames_ <= frames_to_capture_) { send_stream_input_->OnFrame(copy); + } } void VideoAnalyzer::CapturedFrameForwarder::AddOrUpdateSink( rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); RTC_DCHECK(!send_stream_input_ || send_stream_input_ == sink); send_stream_input_ = sink; } @@ -1017,7 +1032,7 @@ void VideoAnalyzer::CapturedFrameForwarder::AddOrUpdateSink( void VideoAnalyzer::CapturedFrameForwarder::RemoveSink( rtc::VideoSinkInterface* sink) { - rtc::CritScope lock(&crit_); + MutexLock lock(&lock_); RTC_DCHECK(sink == send_stream_input_); send_stream_input_ = nullptr; } diff --git a/video/video_analyzer.h b/video/video_analyzer.h index 7d60e7c8fa..18bacc16fc 100644 --- a/video/video_analyzer.h +++ b/video/video_analyzer.h @@ -23,6 +23,7 @@ #include "rtc_base/event.h" #include "rtc_base/numerics/running_statistics.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "test/layer_filtering_transport.h" #include "test/rtp_file_writer.h" @@ -34,13 +35,14 @@ class VideoAnalyzer : public PacketReceiver, public Transport, public rtc::VideoSinkInterface { public: - using Statistics = RunningStatistics; + using Statistics = webrtc_impl::RunningStatistics; VideoAnalyzer(test::LayerFilteringTransport* transport, const std::string& test_label, double avg_psnr_threshold, double avg_ssim_threshold, int duration_frames, + TimeDelta test_duration, FILE* graph_data_output_file, const std::string& graph_title, uint32_t ssrc_to_analyze, @@ -82,9 +84,9 @@ class VideoAnalyzer : public PacketReceiver, void StartMeasuringCpuProcessTime(); void StopMeasuringCpuProcessTime(); - void StartExcludingCpuThreadTime(); - void StopExcludingCpuThreadTime(); - double GetCpuUsagePercent(); + void StartExcludingCpuThreadTime() RTC_LOCKS_EXCLUDED(cpu_measurement_lock_); + void StopExcludingCpuThreadTime() RTC_LOCKS_EXCLUDED(cpu_measurement_lock_); + double GetCpuUsagePercent() RTC_LOCKS_EXCLUDED(cpu_measurement_lock_); test::LayerFilteringTransport* const transport_; PacketReceiver* receiver_; @@ -147,27 +149,32 @@ class VideoAnalyzer : public PacketReceiver, public: CapturedFrameForwarder(VideoAnalyzer* analyzer, Clock* clock, - int frames_to_process); + int frames_to_capture, + TimeDelta test_duration); void SetSource(rtc::VideoSourceInterface* video_source); private: - void OnFrame(const VideoFrame& video_frame) override; + void OnFrame(const VideoFrame& video_frame) + RTC_LOCKS_EXCLUDED(lock_) override; // Called when |send_stream_.SetSource()| is called. void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; + const rtc::VideoSinkWants& wants) + RTC_LOCKS_EXCLUDED(lock_) override; // Called by |send_stream_| when |send_stream_.SetSource()| is called. - void RemoveSink(rtc::VideoSinkInterface* sink) override; + void RemoveSink(rtc::VideoSinkInterface* sink) + RTC_LOCKS_EXCLUDED(lock_) override; VideoAnalyzer* const analyzer_; - rtc::CriticalSection crit_; + Mutex lock_; rtc::VideoSinkInterface* send_stream_input_ - RTC_GUARDED_BY(crit_); + RTC_GUARDED_BY(lock_); VideoSourceInterface* video_source_; Clock* clock_; - int captured_frames_ RTC_GUARDED_BY(crit_); - const int frames_to_process_; + int captured_frames_ RTC_GUARDED_BY(lock_); + const int frames_to_capture_; + const Timestamp test_end_; }; struct FrameWithPsnr { @@ -181,21 +188,23 @@ class VideoAnalyzer : public PacketReceiver, const VideoFrame& render, bool dropped, int64_t render_time_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); - void PollStats(); + void PollStats() RTC_LOCKS_EXCLUDED(comparison_lock_); static void FrameComparisonThread(void* obj); bool CompareFrames(); bool PopComparison(FrameComparison* comparison); // Increment counter for number of frames received for comparison. - void FrameRecorded(); + void FrameRecorded() RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_); // Returns true if all frames to be compared have been taken from the queue. - bool AllFramesRecorded(); + bool AllFramesRecorded() RTC_LOCKS_EXCLUDED(comparison_lock_); + bool AllFramesRecordedLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_); // Increase count of number of frames processed. Returns true if this was the // last frame to be processed. - bool FrameProcessed(); - void PrintResults(); - void PerformFrameComparison(const FrameComparison& comparison); + bool FrameProcessed() RTC_LOCKS_EXCLUDED(comparison_lock_); + void PrintResults() RTC_LOCKS_EXCLUDED(lock_, comparison_lock_); + void PerformFrameComparison(const FrameComparison& comparison) + RTC_LOCKS_EXCLUDED(comparison_lock_); void PrintResult(const char* result_type, Statistics stats, const char* unit, @@ -206,8 +215,9 @@ class VideoAnalyzer : public PacketReceiver, Statistics stats, const char* unit, webrtc::test::ImproveDirection improve_direction); - void PrintSamplesToFile(void); - void AddCapturedFrameForComparison(const VideoFrame& video_frame); + void PrintSamplesToFile(void) RTC_LOCKS_EXCLUDED(comparison_lock_); + void AddCapturedFrameForComparison(const VideoFrame& video_frame) + RTC_LOCKS_EXCLUDED(lock_, comparison_lock_); Call* call_; VideoSendStream* send_stream_; @@ -223,7 +233,7 @@ class VideoAnalyzer : public PacketReceiver, const int selected_sl_; const int selected_tl_; - rtc::CriticalSection comparison_lock_; + Mutex comparison_lock_; std::vector samples_ RTC_GUARDED_BY(comparison_lock_); Statistics sender_time_ RTC_GUARDED_BY(comparison_lock_); Statistics receiver_time_ RTC_GUARDED_BY(comparison_lock_); @@ -261,31 +271,33 @@ class VideoAnalyzer : public PacketReceiver, size_t last_fec_bytes_; - rtc::CriticalSection crit_; + Mutex lock_ RTC_ACQUIRED_BEFORE(comparison_lock_) + RTC_ACQUIRED_BEFORE(cpu_measurement_lock_); const int frames_to_process_; + const Timestamp test_end_; int frames_recorded_ RTC_GUARDED_BY(comparison_lock_); int frames_processed_ RTC_GUARDED_BY(comparison_lock_); int captured_frames_ RTC_GUARDED_BY(comparison_lock_); int dropped_frames_ RTC_GUARDED_BY(comparison_lock_); - int dropped_frames_before_first_encode_ RTC_GUARDED_BY(crit_); - int dropped_frames_before_rendering_ RTC_GUARDED_BY(crit_); + int dropped_frames_before_first_encode_ RTC_GUARDED_BY(lock_); + int dropped_frames_before_rendering_ RTC_GUARDED_BY(lock_); int64_t last_render_time_ RTC_GUARDED_BY(comparison_lock_); int64_t last_render_delta_ms_ RTC_GUARDED_BY(comparison_lock_); int64_t last_unfreeze_time_ms_ RTC_GUARDED_BY(comparison_lock_); - uint32_t rtp_timestamp_delta_ RTC_GUARDED_BY(crit_); + uint32_t rtp_timestamp_delta_ RTC_GUARDED_BY(lock_); - rtc::CriticalSection cpu_measurement_lock_; + Mutex cpu_measurement_lock_; int64_t cpu_time_ RTC_GUARDED_BY(cpu_measurement_lock_); int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_); - std::deque frames_ RTC_GUARDED_BY(crit_); - absl::optional last_rendered_frame_ RTC_GUARDED_BY(crit_); - rtc::TimestampWrapAroundHandler wrap_handler_ RTC_GUARDED_BY(crit_); - std::map send_times_ RTC_GUARDED_BY(crit_); - std::map recv_times_ RTC_GUARDED_BY(crit_); - std::map encoded_frame_sizes_ RTC_GUARDED_BY(crit_); - absl::optional first_encoded_timestamp_ RTC_GUARDED_BY(crit_); - absl::optional first_sent_timestamp_ RTC_GUARDED_BY(crit_); + std::deque frames_ RTC_GUARDED_BY(lock_); + absl::optional last_rendered_frame_ RTC_GUARDED_BY(lock_); + rtc::TimestampWrapAroundHandler wrap_handler_ RTC_GUARDED_BY(lock_); + std::map send_times_ RTC_GUARDED_BY(lock_); + std::map recv_times_ RTC_GUARDED_BY(lock_); + std::map encoded_frame_sizes_ RTC_GUARDED_BY(lock_); + absl::optional first_encoded_timestamp_ RTC_GUARDED_BY(lock_); + absl::optional first_sent_timestamp_ RTC_GUARDED_BY(lock_); const double avg_psnr_threshold_; const double avg_ssim_threshold_; bool is_quick_test_enabled_; diff --git a/video/video_loopback.cc b/video/video_loopback.cc index 99ff4493bd..7762d9653d 100644 --- a/video/video_loopback.cc +++ b/video/video_loopback.cc @@ -376,33 +376,40 @@ void Loopback() { call_bitrate_config.max_bitrate_bps = -1; // Don't cap bandwidth estimate. VideoQualityTest::Params params; - params.call = {absl::GetFlag(FLAGS_send_side_bwe), - absl::GetFlag(FLAGS_generic_descriptor), call_bitrate_config, - 0}; - params.video[0] = {absl::GetFlag(FLAGS_video), - Width(), - Height(), - Fps(), - MinBitrateKbps() * 1000, - TargetBitrateKbps() * 1000, - MaxBitrateKbps() * 1000, - absl::GetFlag(FLAGS_suspend_below_min_bitrate), - Codec(), - NumTemporalLayers(), - SelectedTL(), - 0, // No min transmit bitrate. - absl::GetFlag(FLAGS_use_ulpfec), - absl::GetFlag(FLAGS_use_flexfec), - NumStreams() < 2, // Automatic quality scaling. - Clip(), - GetCaptureDevice()}; - params.audio = { - absl::GetFlag(FLAGS_audio), absl::GetFlag(FLAGS_audio_video_sync), - absl::GetFlag(FLAGS_audio_dtx), absl::GetFlag(FLAGS_use_real_adm)}; - params.logging = {RtcEventLogName(), RtpDumpName(), EncodedFramePath()}; + params.call.send_side_bwe = absl::GetFlag(FLAGS_send_side_bwe); + params.call.generic_descriptor = absl::GetFlag(FLAGS_generic_descriptor); + params.call.call_bitrate_config = call_bitrate_config; + + params.video[0].enabled = absl::GetFlag(FLAGS_video); + params.video[0].width = Width(); + params.video[0].height = Height(); + params.video[0].fps = Fps(); + params.video[0].min_bitrate_bps = MinBitrateKbps() * 1000; + params.video[0].target_bitrate_bps = TargetBitrateKbps() * 1000; + params.video[0].max_bitrate_bps = MaxBitrateKbps() * 1000; + params.video[0].suspend_below_min_bitrate = + absl::GetFlag(FLAGS_suspend_below_min_bitrate); + params.video[0].codec = Codec(); + params.video[0].num_temporal_layers = NumTemporalLayers(); + params.video[0].selected_tl = SelectedTL(); + params.video[0].min_transmit_bps = 0; + params.video[0].ulpfec = absl::GetFlag(FLAGS_use_ulpfec); + params.video[0].flexfec = absl::GetFlag(FLAGS_use_flexfec); + params.video[0].automatic_scaling = NumStreams() < 2; + params.video[0].clip_path = Clip(); + params.video[0].capture_device_index = GetCaptureDevice(); + params.audio.enabled = absl::GetFlag(FLAGS_audio); + params.audio.sync_video = absl::GetFlag(FLAGS_audio_video_sync); + params.audio.dtx = absl::GetFlag(FLAGS_audio_dtx); + params.audio.use_real_adm = absl::GetFlag(FLAGS_use_real_adm); + params.logging.rtc_event_log_name = RtcEventLogName(); + params.logging.rtp_dump_name = RtpDumpName(); + params.logging.encoded_frame_base_path = EncodedFramePath(); params.screenshare[0].enabled = false; - params.analyzer = {"video", 0.0, 0.0, DurationSecs(), - OutputFilename(), GraphTitle()}; + params.analyzer.test_label = "video"; + params.analyzer.test_durations_secs = DurationSecs(); + params.analyzer.graph_data_output_filename = OutputFilename(); + params.analyzer.graph_title = GraphTitle(); params.config = pipe_config; if (NumStreams() > 1 && Stream0().empty() && Stream1().empty()) { diff --git a/video/video_quality_observer2.cc b/video/video_quality_observer2.cc new file mode 100644 index 0000000000..0751d3f4ed --- /dev/null +++ b/video/video_quality_observer2.cc @@ -0,0 +1,294 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_quality_observer2.h" + +#include +#include +#include +#include + +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/metrics.h" +#include "video/video_receive_stream2.h" + +namespace webrtc { +namespace internal { +const uint32_t VideoQualityObserver::kMinFrameSamplesToDetectFreeze = 5; +const uint32_t VideoQualityObserver::kMinIncreaseForFreezeMs = 150; +const uint32_t VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames = 30; + +namespace { +constexpr int kMinVideoDurationMs = 3000; +constexpr int kMinRequiredSamples = 1; +constexpr int kPixelsInHighResolution = + 960 * 540; // CPU-adapted HD still counts. +constexpr int kPixelsInMediumResolution = 640 * 360; +constexpr int kBlockyQpThresholdVp8 = 70; +constexpr int kBlockyQpThresholdVp9 = 180; +constexpr int kMaxNumCachedBlockyFrames = 100; +// TODO(ilnik): Add H264/HEVC thresholds. +} // namespace + +VideoQualityObserver::VideoQualityObserver() + : last_frame_rendered_ms_(-1), + num_frames_rendered_(0), + first_frame_rendered_ms_(-1), + last_frame_pixels_(0), + is_last_frame_blocky_(false), + last_unfreeze_time_ms_(0), + render_interframe_delays_(kAvgInterframeDelaysWindowSizeFrames), + sum_squared_interframe_delays_secs_(0.0), + time_in_resolution_ms_(3, 0), + current_resolution_(Resolution::Low), + num_resolution_downgrades_(0), + time_in_blocky_video_ms_(0), + is_paused_(false) {} + +void VideoQualityObserver::UpdateHistograms(bool screenshare) { + // TODO(bugs.webrtc.org/11489): Called on the decoder thread - which _might_ + // be the same as the construction thread. + + // Don't report anything on an empty video stream. + if (num_frames_rendered_ == 0) { + return; + } + + char log_stream_buf[2 * 1024]; + rtc::SimpleStringBuilder log_stream(log_stream_buf); + + if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { + smooth_playback_durations_.Add(last_frame_rendered_ms_ - + last_unfreeze_time_ms_); + } + + std::string uma_prefix = + screenshare ? "WebRTC.Video.Screenshare" : "WebRTC.Video"; + + auto mean_time_between_freezes = + smooth_playback_durations_.Avg(kMinRequiredSamples); + if (mean_time_between_freezes) { + RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanTimeBetweenFreezesMs", + *mean_time_between_freezes); + log_stream << uma_prefix << ".MeanTimeBetweenFreezesMs " + << *mean_time_between_freezes << "\n"; + } + auto avg_freeze_length = freezes_durations_.Avg(kMinRequiredSamples); + if (avg_freeze_length) { + RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanFreezeDurationMs", + *avg_freeze_length); + log_stream << uma_prefix << ".MeanFreezeDurationMs " << *avg_freeze_length + << "\n"; + } + + int64_t video_duration_ms = + last_frame_rendered_ms_ - first_frame_rendered_ms_; + + if (video_duration_ms >= kMinVideoDurationMs) { + int time_spent_in_hd_percentage = static_cast( + time_in_resolution_ms_[Resolution::High] * 100 / video_duration_ms); + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInHdPercentage", + time_spent_in_hd_percentage); + log_stream << uma_prefix << ".TimeInHdPercentage " + << time_spent_in_hd_percentage << "\n"; + + int time_with_blocky_video_percentage = + static_cast(time_in_blocky_video_ms_ * 100 / video_duration_ms); + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInBlockyVideoPercentage", + time_with_blocky_video_percentage); + log_stream << uma_prefix << ".TimeInBlockyVideoPercentage " + << time_with_blocky_video_percentage << "\n"; + + int num_resolution_downgrades_per_minute = + num_resolution_downgrades_ * 60000 / video_duration_ms; + RTC_HISTOGRAM_COUNTS_SPARSE_100( + uma_prefix + ".NumberResolutionDownswitchesPerMinute", + num_resolution_downgrades_per_minute); + log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute " + << num_resolution_downgrades_per_minute << "\n"; + + int num_freezes_per_minute = + freezes_durations_.NumSamples() * 60000 / video_duration_ms; + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".NumberFreezesPerMinute", + num_freezes_per_minute); + log_stream << uma_prefix << ".NumberFreezesPerMinute " + << num_freezes_per_minute << "\n"; + + if (sum_squared_interframe_delays_secs_ > 0.0) { + int harmonic_framerate_fps = std::round( + video_duration_ms / (1000 * sum_squared_interframe_delays_secs_)); + RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".HarmonicFrameRate", + harmonic_framerate_fps); + log_stream << uma_prefix << ".HarmonicFrameRate " + << harmonic_framerate_fps << "\n"; + } + } + RTC_LOG(LS_INFO) << log_stream.str(); +} + +void VideoQualityObserver::OnRenderedFrame( + const VideoFrameMetaData& frame_meta) { + RTC_DCHECK_LE(last_frame_rendered_ms_, frame_meta.decode_timestamp.ms()); + RTC_DCHECK_LE(last_unfreeze_time_ms_, frame_meta.decode_timestamp.ms()); + + if (num_frames_rendered_ == 0) { + first_frame_rendered_ms_ = last_unfreeze_time_ms_ = + frame_meta.decode_timestamp.ms(); + } + + auto blocky_frame_it = blocky_frames_.find(frame_meta.rtp_timestamp); + + if (num_frames_rendered_ > 0) { + // Process inter-frame delay. + const int64_t interframe_delay_ms = + frame_meta.decode_timestamp.ms() - last_frame_rendered_ms_; + const double interframe_delays_secs = interframe_delay_ms / 1000.0; + + // Sum of squared inter frame intervals is used to calculate the harmonic + // frame rate metric. The metric aims to reflect overall experience related + // to smoothness of video playback and includes both freezes and pauses. + sum_squared_interframe_delays_secs_ += + interframe_delays_secs * interframe_delays_secs; + + if (!is_paused_) { + render_interframe_delays_.AddSample(interframe_delay_ms); + + bool was_freeze = false; + if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) { + const absl::optional avg_interframe_delay = + render_interframe_delays_.GetAverageRoundedDown(); + RTC_DCHECK(avg_interframe_delay); + was_freeze = interframe_delay_ms >= + std::max(3 * *avg_interframe_delay, + *avg_interframe_delay + kMinIncreaseForFreezeMs); + } + + if (was_freeze) { + freezes_durations_.Add(interframe_delay_ms); + smooth_playback_durations_.Add(last_frame_rendered_ms_ - + last_unfreeze_time_ms_); + last_unfreeze_time_ms_ = frame_meta.decode_timestamp.ms(); + } else { + // Count spatial metrics if there were no freeze. + time_in_resolution_ms_[current_resolution_] += interframe_delay_ms; + + if (is_last_frame_blocky_) { + time_in_blocky_video_ms_ += interframe_delay_ms; + } + } + } + } + + if (is_paused_) { + // If the stream was paused since the previous frame, do not count the + // pause toward smooth playback. Explicitly count the part before it and + // start the new smooth playback interval from this frame. + is_paused_ = false; + if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { + smooth_playback_durations_.Add(last_frame_rendered_ms_ - + last_unfreeze_time_ms_); + } + last_unfreeze_time_ms_ = frame_meta.decode_timestamp.ms(); + + if (num_frames_rendered_ > 0) { + pauses_durations_.Add(frame_meta.decode_timestamp.ms() - + last_frame_rendered_ms_); + } + } + + int64_t pixels = frame_meta.width * frame_meta.height; + if (pixels >= kPixelsInHighResolution) { + current_resolution_ = Resolution::High; + } else if (pixels >= kPixelsInMediumResolution) { + current_resolution_ = Resolution::Medium; + } else { + current_resolution_ = Resolution::Low; + } + + if (pixels < last_frame_pixels_) { + ++num_resolution_downgrades_; + } + + last_frame_pixels_ = pixels; + last_frame_rendered_ms_ = frame_meta.decode_timestamp.ms(); + + is_last_frame_blocky_ = blocky_frame_it != blocky_frames_.end(); + if (is_last_frame_blocky_) { + blocky_frames_.erase(blocky_frames_.begin(), ++blocky_frame_it); + } + + ++num_frames_rendered_; +} + +void VideoQualityObserver::OnDecodedFrame(uint32_t rtp_frame_timestamp, + absl::optional qp, + VideoCodecType codec) { + if (!qp) + return; + + absl::optional qp_blocky_threshold; + // TODO(ilnik): add other codec types when we have QP for them. + switch (codec) { + case kVideoCodecVP8: + qp_blocky_threshold = kBlockyQpThresholdVp8; + break; + case kVideoCodecVP9: + qp_blocky_threshold = kBlockyQpThresholdVp9; + break; + default: + qp_blocky_threshold = absl::nullopt; + } + + RTC_DCHECK(blocky_frames_.find(rtp_frame_timestamp) == blocky_frames_.end()); + + if (qp_blocky_threshold && *qp > *qp_blocky_threshold) { + // Cache blocky frame. Its duration will be calculated in render callback. + if (blocky_frames_.size() > kMaxNumCachedBlockyFrames) { + RTC_LOG(LS_WARNING) << "Overflow of blocky frames cache."; + blocky_frames_.erase( + blocky_frames_.begin(), + std::next(blocky_frames_.begin(), kMaxNumCachedBlockyFrames / 2)); + } + + blocky_frames_.insert(rtp_frame_timestamp); + } +} + +void VideoQualityObserver::OnStreamInactive() { + is_paused_ = true; +} + +uint32_t VideoQualityObserver::NumFreezes() const { + return freezes_durations_.NumSamples(); +} + +uint32_t VideoQualityObserver::NumPauses() const { + return pauses_durations_.NumSamples(); +} + +uint32_t VideoQualityObserver::TotalFreezesDurationMs() const { + return freezes_durations_.Sum(kMinRequiredSamples).value_or(0); +} + +uint32_t VideoQualityObserver::TotalPausesDurationMs() const { + return pauses_durations_.Sum(kMinRequiredSamples).value_or(0); +} + +uint32_t VideoQualityObserver::TotalFramesDurationMs() const { + return last_frame_rendered_ms_ - first_frame_rendered_ms_; +} + +double VideoQualityObserver::SumSquaredFrameDurationsSec() const { + return sum_squared_interframe_delays_secs_; +} + +} // namespace internal +} // namespace webrtc diff --git a/video/video_quality_observer2.h b/video/video_quality_observer2.h new file mode 100644 index 0000000000..ed5a0b9f33 --- /dev/null +++ b/video/video_quality_observer2.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_QUALITY_OBSERVER2_H_ +#define VIDEO_VIDEO_QUALITY_OBSERVER2_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "rtc_base/numerics/moving_average.h" +#include "rtc_base/numerics/sample_counter.h" + +namespace webrtc { +namespace internal { +// Declared in video_receive_stream2.h. +struct VideoFrameMetaData; + +// Calculates spatial and temporal quality metrics and reports them to UMA +// stats. +class VideoQualityObserver { + public: + // Use either VideoQualityObserver::kBlockyQpThresholdVp8 or + // VideoQualityObserver::kBlockyQpThresholdVp9. + VideoQualityObserver(); + ~VideoQualityObserver() = default; + + void OnDecodedFrame(uint32_t rtp_frame_timestamp, + absl::optional qp, + VideoCodecType codec); + + void OnRenderedFrame(const VideoFrameMetaData& frame_meta); + + void OnStreamInactive(); + + uint32_t NumFreezes() const; + uint32_t NumPauses() const; + uint32_t TotalFreezesDurationMs() const; + uint32_t TotalPausesDurationMs() const; + uint32_t TotalFramesDurationMs() const; + double SumSquaredFrameDurationsSec() const; + + // Set |screenshare| to true if the last decoded frame was for screenshare. + void UpdateHistograms(bool screenshare); + + static const uint32_t kMinFrameSamplesToDetectFreeze; + static const uint32_t kMinIncreaseForFreezeMs; + static const uint32_t kAvgInterframeDelaysWindowSizeFrames; + + private: + enum Resolution { + Low = 0, + Medium = 1, + High = 2, + }; + + int64_t last_frame_rendered_ms_; + int64_t num_frames_rendered_; + int64_t first_frame_rendered_ms_; + int64_t last_frame_pixels_; + bool is_last_frame_blocky_; + // Decoded timestamp of the last delayed frame. + int64_t last_unfreeze_time_ms_; + rtc::MovingAverage render_interframe_delays_; + double sum_squared_interframe_delays_secs_; + // An inter-frame delay is counted as a freeze if it's significantly longer + // than average inter-frame delay. + rtc::SampleCounter freezes_durations_; + rtc::SampleCounter pauses_durations_; + // Time between freezes. + rtc::SampleCounter smooth_playback_durations_; + // Counters for time spent in different resolutions. Time between each two + // Consecutive frames is counted to bin corresponding to the first frame + // resolution. + std::vector time_in_resolution_ms_; + // Resolution of the last decoded frame. Resolution enum is used as an index. + Resolution current_resolution_; + int num_resolution_downgrades_; + // Similar to resolution, time spent in high-QP video. + int64_t time_in_blocky_video_ms_; + bool is_paused_; + + // Set of decoded frames with high QP value. + std::set blocky_frames_; +}; + +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_VIDEO_QUALITY_OBSERVER2_H_ diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc index 77dff46bde..a58aa1f33f 100644 --- a/video/video_quality_test.cc +++ b/video/video_quality_test.cc @@ -11,6 +11,10 @@ #include +#if defined(WEBRTC_WIN) +#include +#endif + #include #include #include @@ -27,6 +31,7 @@ #include "api/video_codecs/video_encoder.h" #include "call/fake_network_pipe.h" #include "call/simulated_network.h" +#include "media/base/media_constants.h" #include "media/engine/adm_helpers.h" #include "media/engine/encoder_simulcast_proxy.h" #include "media/engine/fake_video_codec_factory.h" @@ -43,7 +48,6 @@ #include "rtc_base/strings/string_builder.h" #include "rtc_base/task_queue_for_test.h" #include "test/platform_video_capturer.h" -#include "test/run_loop.h" #include "test/testsupport/file_utils.h" #include "test/video_renderer.h" #include "video/frame_dumping_decoder.h" @@ -234,8 +238,7 @@ class QualityTestVideoEncoder : public VideoEncoder, private: // Implement EncodedImageCallback Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { + const CodecSpecificInfo* codec_specific_info) override { if (codec_specific_info) { int simulcast_index; if (codec_specific_info->codecType == kVideoCodecVP9) { @@ -254,8 +257,7 @@ class QualityTestVideoEncoder : public VideoEncoder, } } - return callback_->OnEncodedImage(encoded_image, codec_specific_info, - fragmentation); + return callback_->OnEncodedImage(encoded_image, codec_specific_info); } void OnDroppedFrame(DropReason reason) override { @@ -270,6 +272,29 @@ class QualityTestVideoEncoder : public VideoEncoder, VideoCodec codec_settings_; }; +#if defined(WEBRTC_WIN) && !defined(WINUWP) +void PressEnterToContinue(TaskQueueBase* task_queue) { + puts(">> Press ENTER to continue..."); + + while (!_kbhit() || _getch() != '\r') { + // Drive the message loop for the thread running the task_queue + SendTask(RTC_FROM_HERE, task_queue, [&]() { + MSG msg; + if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + }); + } +} +#else +void PressEnterToContinue(TaskQueueBase* /*task_queue*/) { + puts(">> Press ENTER to continue..."); + while (getc(stdin) != '\n' && !feof(stdin)) + ; // NOLINT +} +#endif + } // namespace std::unique_ptr VideoQualityTest::CreateVideoDecoder( @@ -407,58 +432,6 @@ VideoQualityTest::VideoQualityTest( std::move(injection_components_->network_controller_factory); } -VideoQualityTest::Params::Params() - : call({false, false, BitrateConstraints(), 0}), - video{{false, - 640, - 480, - 30, - 50, - 800, - 800, - false, - "VP8", - 1, - -1, - 0, - false, - false, - false, - "", - 0, - {}, - 0.0}, - {false, - 640, - 480, - 30, - 50, - 800, - 800, - false, - "VP8", - 1, - -1, - 0, - false, - false, - false, - "", - 0, - {}, - 0.0}}, - audio({false, false, false, false}), - screenshare{{false, false, 10, 0}, {false, false, 10, 0}}, - analyzer({"", 0.0, 0.0, 0, "", ""}), - config(absl::nullopt), - ss{{std::vector(), 0, 0, -1, InterLayerPredMode::kOn, - std::vector()}, - {std::vector(), 0, 0, -1, InterLayerPredMode::kOn, - std::vector()}}, - logging({"", "", ""}) {} - -VideoQualityTest::Params::~Params() = default; - VideoQualityTest::InjectionComponents::InjectionComponents() = default; VideoQualityTest::InjectionComponents::~InjectionComponents() = default; @@ -526,10 +499,14 @@ void VideoQualityTest::CheckParamsAndInjectionComponents() { RTC_CHECK_GE(params_.video[video_idx].target_bitrate_bps, params_.video[video_idx].min_bitrate_bps); int selected_stream = params_.ss[video_idx].selected_stream; - int stream_tl = params_.ss[video_idx] - .streams[selected_stream] - .num_temporal_layers.value_or(1); - RTC_CHECK_LT(params_.video[video_idx].selected_tl, stream_tl); + if (params_.video[video_idx].selected_tl > -1) { + RTC_CHECK_LT(selected_stream, params_.ss[video_idx].streams.size()) + << "Can not use --selected_tl when --selected_stream is all streams"; + int stream_tl = params_.ss[video_idx] + .streams[selected_stream] + .num_temporal_layers.value_or(1); + RTC_CHECK_LT(params_.video[video_idx].selected_tl, stream_tl); + } RTC_CHECK_LE(params_.ss[video_idx].selected_stream, params_.ss[video_idx].streams.size()); for (const VideoStream& stream : params_.ss[video_idx].streams) { @@ -784,17 +761,9 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, } if (params_.call.generic_descriptor) { - // The generic descriptor is currently behind a field trial, so it needs - // to be set for this flag to have any effect. - // TODO(philipel): Remove this check when the experiment is removed. - RTC_CHECK(field_trial::IsEnabled("WebRTC-GenericDescriptor")); - video_send_configs_[video_idx].rtp.extensions.emplace_back( RtpExtension::kGenericFrameDescriptorUri00, kGenericFrameDescriptorExtensionId00); - video_send_configs_[video_idx].rtp.extensions.emplace_back( - RtpExtension::kGenericFrameDescriptorUri01, - kGenericFrameDescriptorExtensionId01); } video_send_configs_[video_idx].rtp.extensions.emplace_back( @@ -866,6 +835,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.denoisingOn = false; vp9_settings.frameDroppingOn = false; + vp9_settings.automaticResizeOn = false; vp9_settings.numberOfTemporalLayers = static_cast( params_.video[video_idx].num_temporal_layers); vp9_settings.numberOfSpatialLayers = static_cast( @@ -888,9 +858,14 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp9_settings.numberOfSpatialLayers = static_cast(params_.ss[video_idx].num_spatial_layers); vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; + vp9_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = new rtc::RefCountedObject< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(), + 1); + // Min bitrate will be enforced by spatial layer config instead. + video_encoder_configs_[video_idx].simulcast_layers[0].min_bitrate_bps = 0; } else if (params_.video[video_idx].automatic_scaling) { if (params_.video[video_idx].codec == "VP8") { VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); @@ -900,12 +875,18 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); - vp9_settings.automaticResizeOn = true; + // Only enable quality scaler for single spatial layer. + vp9_settings.automaticResizeOn = + params_.ss[video_idx].num_spatial_layers == 1; video_encoder_configs_[video_idx].encoder_specific_settings = new rtc::RefCountedObject< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); } else if (params_.video[video_idx].codec == "H264") { // Quality scaling is always on for H.264. + } else if (params_.video[video_idx].codec == cricket::kAv1CodecName) { + // TODO(bugs.webrtc.org/11404): Propagate the flag to + // aom_codec_enc_cfg_t::rc_resize_mode in Av1 encoder wrapper. + // Until then do nothing, specially do not crash. } else { RTC_NOTREACHED() << "Automatic scaling not supported for codec " << params_.video[video_idx].codec << ", stream " @@ -1127,6 +1108,11 @@ void VideoQualityTest::CreateCapturers() { static_cast(params_.video[video_idx].width), static_cast(params_.video[video_idx].height), test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt); + } else if (params_.video[video_idx].clip_path == "GeneratorNV12") { + frame_generator = test::CreateSquareFrameGenerator( + static_cast(params_.video[video_idx].width), + static_cast(params_.video[video_idx].height), + test::FrameGeneratorInterface::OutputType::kNV12, absl::nullopt); } else if (params_.video[video_idx].clip_path.empty()) { video_sources_[video_idx] = test::CreateVideoCapturer( params_.video[video_idx].width, params_.video[video_idx].height, @@ -1279,6 +1265,9 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) { is_quick_test_enabled ? kFramesSentInQuickTest : params_.analyzer.test_durations_secs * params_.video[0].fps, + is_quick_test_enabled + ? TimeDelta::Millis(1) + : TimeDelta::Seconds(params_.analyzer.test_durations_secs), graph_data_output_file, graph_title, kVideoSendSsrcs[params_.ss[0].selected_stream], kSendRtxSsrcs[params_.ss[0].selected_stream], @@ -1359,8 +1348,8 @@ rtc::scoped_refptr VideoQualityTest::CreateAudioDevice() { // CO_E_NOTINITIALIZED otherwise. The legacy ADM for Windows used internal // COM initialization but the new ADM requires COM to be initialized // externally. - com_initializer_ = std::make_unique( - webrtc_win::ScopedCOMInitializer::kMTA); + com_initializer_ = + std::make_unique(ScopedCOMInitializer::kMTA); RTC_CHECK(com_initializer_->Succeeded()); RTC_CHECK(webrtc_win::core_audio_utility::IsSupported()); RTC_CHECK(webrtc_win::core_audio_utility::IsMMCSSSupported()); @@ -1562,7 +1551,7 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { Start(); }); - test::PressEnterToContinue(task_queue()); + PressEnterToContinue(task_queue()); SendTask(RTC_FROM_HERE, task_queue(), [&]() { Stop(); diff --git a/video/video_quality_test.h b/video/video_quality_test.h index 2177830794..f49ce385b6 100644 --- a/video/video_quality_test.h +++ b/video/video_quality_test.h @@ -30,6 +30,7 @@ #include "video/video_analyzer.h" #ifdef WEBRTC_WIN #include "modules/audio_device/win/core_audio_utility_win.h" +#include "rtc_base/win/scoped_com_initializer.h" #endif namespace webrtc { @@ -137,7 +138,7 @@ class VideoQualityTest : public test::CallTest, #ifdef WEBRTC_WIN // Windows Core Audio based ADM needs to run on a COM initialized thread. // Only referenced in combination with --audio --use_real_adm flags. - std::unique_ptr com_initializer_; + std::unique_ptr com_initializer_; #endif }; diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc index 7f68f76d2e..9431cd82ea 100644 --- a/video/video_receive_stream.cc +++ b/video/video_receive_stream.cc @@ -101,7 +101,7 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { EncodedResolution resolution() const override { return resolution_; } Timestamp render_time() const override { - return Timestamp::ms(render_time_ms_); + return Timestamp::Millis(render_time_ms_); } private: @@ -115,9 +115,6 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { VideoCodec codec; - memset(&codec, 0, sizeof(codec)); - - codec.plType = decoder.payload_type; codec.codecType = PayloadStringToCodecType(decoder.video_format.name); if (codec.codecType == kVideoCodecVP8) { @@ -134,6 +131,11 @@ VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { associated_codec.codecType = kVideoCodecMultiplex; return associated_codec; } +#ifndef DISABLE_H265 + else if (codec.codecType == kVideoCodecH265) { + *(codec.H265()) = VideoEncoder::GetDefaultH265Settings(); + } +#endif codec.width = 320; codec.height = 180; @@ -211,11 +213,13 @@ VideoReceiveStream::VideoReceiveStream( &config_, rtp_receive_statistics_.get(), &stats_proxy_, + &stats_proxy_, process_thread_, this, // NackSender nullptr, // Use default KeyFrameRequestSender this, // OnCompleteFrameCallback - config_.frame_decryptor), + config_.frame_decryptor, + config_.frame_transformer), rtp_stream_sync_(this), max_wait_for_keyframe_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() .MaxWaitForKeyframeMs() @@ -223,6 +227,9 @@ VideoReceiveStream::VideoReceiveStream( max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() .MaxWaitForFrameMs() .value_or(kMaxWaitForFrameMs)), +#ifndef DISABLE_RECORDER + recorder_(nullptr), +#endif decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { @@ -236,9 +243,9 @@ VideoReceiveStream::VideoReceiveStream( network_sequence_checker_.Detach(); RTC_DCHECK(!config_.decoders.empty()); + RTC_CHECK(config_.decoder_factory); std::set decoder_payload_types; for (const Decoder& decoder : config_.decoders) { - RTC_CHECK(decoder.decoder_factory); RTC_CHECK(decoder_payload_types.find(decoder.payload_type) == decoder_payload_types.end()) << "Duplicate payload type (" << decoder.payload_type @@ -317,8 +324,6 @@ void VideoReceiveStream::Start() { const bool protected_by_fec = config_.rtp.protected_by_flexfec || rtp_video_stream_receiver_.IsUlpfecEnabled(); - frame_buffer_->Start(); - if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() && protected_by_fec) { frame_buffer_->SetProtectionMode(kProtectionNackFEC); @@ -336,7 +341,7 @@ void VideoReceiveStream::Start() { for (const Decoder& decoder : config_.decoders) { std::unique_ptr video_decoder = - decoder.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format, + config_.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format, config_.stream_id); // If we still have no valid decoder, we have to create a "Null" decoder // that ignores all calls. The reason we can get into this state is that the @@ -372,11 +377,12 @@ void VideoReceiveStream::Start() { VideoCodec codec = CreateDecoderVideoCodec(decoder); const bool raw_payload = - config_.rtp.raw_payload_types.count(codec.plType) > 0; - rtp_video_stream_receiver_.AddReceiveCodec( - codec, decoder.video_format.parameters, raw_payload); + config_.rtp.raw_payload_types.count(decoder.payload_type) > 0; + rtp_video_stream_receiver_.AddReceiveCodec(decoder.payload_type, codec, + decoder.video_format.parameters, + raw_payload); RTC_CHECK_EQ(VCM_OK, video_receiver_.RegisterReceiveCodec( - &codec, num_cpu_cores_, false)); + decoder.payload_type, &codec, num_cpu_cores_)); } RTC_DCHECK(renderer != nullptr); @@ -492,7 +498,7 @@ bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) { return false; } - rtc::CritScope cs(&playout_delay_lock_); + MutexLock lock(&playout_delay_lock_); base_minimum_playout_delay_ms_ = delay_ms; UpdatePlayoutDelays(); return true; @@ -501,7 +507,7 @@ bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) { int VideoReceiveStream::GetBaseMinimumPlayoutDelayMs() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - rtc::CritScope cs(&playout_delay_lock_); + MutexLock lock(&playout_delay_lock_); return base_minimum_playout_delay_ms_; } @@ -534,6 +540,12 @@ void VideoReceiveStream::SetFrameDecryptor( rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor)); } +void VideoReceiveStream::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer( + std::move(frame_transformer)); +} + void VideoReceiveStream::SendNack(const std::vector& sequence_numbers, bool buffering_allowed) { RTC_DCHECK(buffering_allowed); @@ -556,15 +568,15 @@ void VideoReceiveStream::OnCompleteFrame( } last_complete_frame_time_ms_ = time_now_ms; - const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; + const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; if (playout_delay.min_ms >= 0) { - rtc::CritScope cs(&playout_delay_lock_); + MutexLock lock(&playout_delay_lock_); frame_minimum_playout_delay_ms_ = playout_delay.min_ms; UpdatePlayoutDelays(); } if (playout_delay.max_ms >= 0) { - rtc::CritScope cs(&playout_delay_lock_); + MutexLock lock(&playout_delay_lock_); frame_maximum_playout_delay_ms_ = playout_delay.max_ms; UpdatePlayoutDelays(); } @@ -580,7 +592,7 @@ void VideoReceiveStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms); } -int VideoReceiveStream::id() const { +uint32_t VideoReceiveStream::id() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); return config_.rtp.remote_ssrc; } @@ -609,11 +621,12 @@ void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs( RTC_NOTREACHED(); } -void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { +bool VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); - rtc::CritScope cs(&playout_delay_lock_); + MutexLock lock(&playout_delay_lock_); syncable_minimum_playout_delay_ms_ = delay_ms; UpdatePlayoutDelays(); + return true; } int64_t VideoReceiveStream::GetWaitMs() const { @@ -647,6 +660,16 @@ void VideoReceiveStream::HandleEncodedFrame( std::unique_ptr frame) { int64_t now_ms = clock_->TimeInMilliseconds(); +#ifndef DISABLE_RECORDER + { + webrtc::MutexLock lock(&recorder_mutex_); + if (recorder_) { + EncodedImage image = frame->EncodedImage(); + recorder_->AddVideoFrame(&image, frame->CodecSpecific()->codecType); + } + } +#endif + // Current OnPreDecode only cares about QP for VP8. int qp = -1; if (frame->CodecSpecific()->codecType == kVideoCodecVP8) { @@ -785,5 +808,21 @@ void VideoReceiveStream::GenerateKeyFrame() { }); } +#ifndef DISABLE_RECORDER +void VideoReceiveStream::InjectRecorder(Recorder* recorder) { + char log_buf[16]; + snprintf(log_buf, sizeof(log_buf) - 1, "%p", recorder); + RTC_LOG(LS_INFO) << "VideoReceiveStream::InjectRecorder " << log_buf; + { + webrtc::MutexLock lock(&recorder_mutex_); + recorder_ = recorder; + } + + if (recorder) { + GenerateKeyFrame(); + } +} +#endif + } // namespace internal } // namespace webrtc diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h index f097710630..1cbf37e4a5 100644 --- a/video/video_receive_stream.h +++ b/video/video_receive_stream.h @@ -15,7 +15,6 @@ #include #include "api/task_queue/task_queue_factory.h" -#include "api/transport/media/media_transport_interface.h" #include "api/video/recordable_encoded_frame.h" #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" @@ -24,6 +23,7 @@ #include "modules/rtp_rtcp/source/source_tracker.h" #include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/video_receiver2.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" #include "system_wrappers/include/clock.h" @@ -37,7 +37,6 @@ namespace webrtc { class CallStats; class ProcessThread; -class RTPFragmentationHeader; class RtpStreamReceiverInterface; class RtpStreamReceiverControllerInterface; class RtxReceiveStream; @@ -99,6 +98,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, void SetFrameDecryptor( rtc::scoped_refptr frame_decryptor) override; + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) override; // Implements rtc::VideoSinkInterface. void OnFrame(const VideoFrame& video_frame) override; @@ -117,7 +118,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; // Implements Syncable. - int id() const override; + uint32_t id() const override; absl::optional GetInfo() const override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; @@ -125,7 +126,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, int64_t time_ms) override; // SetMinimumPlayoutDelay is only called by A/V sync. - void SetMinimumPlayoutDelay(int delay_ms) override; + bool SetMinimumPlayoutDelay(int delay_ms) override; std::vector GetSources() const override; @@ -133,6 +134,10 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, bool generate_key_frame) override; void GenerateKeyFrame() override; +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif + private: int64_t GetWaitMs() const; void StartNextDecode() RTC_RUN_ON(decode_queue_); @@ -204,7 +209,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, const int max_wait_for_keyframe_ms_; const int max_wait_for_frame_ms_; - rtc::CriticalSection playout_delay_lock_; + mutable Mutex playout_delay_lock_; // All of them tries to change current min_playout_delay on |timing_| but // source of the change request is different in each case. Among them the @@ -227,6 +232,11 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, // Set to true while we're requesting keyframes but not yet received one. bool keyframe_generation_requested_ RTC_GUARDED_BY(decode_queue_) = false; +#ifndef DISABLE_RECORDER + mutable webrtc::Mutex recorder_mutex_; + Recorder* recorder_ RTC_GUARDED_BY(recorder_mutex_); +#endif + // Defined last so they are destroyed before all other members. rtc::TaskQueue decode_queue_; }; diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc new file mode 100644 index 0000000000..19ddfb82b9 --- /dev/null +++ b/video/video_receive_stream2.cc @@ -0,0 +1,890 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_receive_stream2.h" + +#include +#include + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/video/encoded_image.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "call/rtp_stream_receiver_controller_interface.h" +#include "call/rtx_receive_stream.h" +#include "common_video/include/incoming_video_stream.h" +#include "media/base/h264_profile_level_id.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/timing.h" +#include "modules/video_coding/utility/vp8_header_parser.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/keyframe_interval_settings.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/thread_registry.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" +#include "video/call_stats2.h" +#include "video/frame_dumping_decoder.h" +#include "video/receive_statistics_proxy2.h" + +namespace webrtc { + +namespace internal { +constexpr int VideoReceiveStream2::kMaxWaitForKeyFrameMs; + +namespace { + +using video_coding::EncodedFrame; +using ReturnReason = video_coding::FrameBuffer::ReturnReason; + +constexpr int kMinBaseMinimumDelayMs = 0; +constexpr int kMaxBaseMinimumDelayMs = 10000; + +constexpr int kMaxWaitForFrameMs = 3000; + +// Concrete instance of RecordableEncodedFrame wrapping needed content +// from video_coding::EncodedFrame. +class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { + public: + explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame) + : buffer_(frame.GetEncodedData()), + render_time_ms_(frame.RenderTime()), + codec_(frame.CodecSpecific()->codecType), + is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey), + resolution_{frame.EncodedImage()._encodedWidth, + frame.EncodedImage()._encodedHeight} { + if (frame.ColorSpace()) { + color_space_ = *frame.ColorSpace(); + } + } + + // VideoEncodedSinkInterface::FrameBuffer + rtc::scoped_refptr encoded_buffer() + const override { + return buffer_; + } + + absl::optional color_space() const override { + return color_space_; + } + + VideoCodecType codec() const override { return codec_; } + + bool is_key_frame() const override { return is_key_frame_; } + + EncodedResolution resolution() const override { return resolution_; } + + Timestamp render_time() const override { + return Timestamp::Millis(render_time_ms_); + } + + private: + rtc::scoped_refptr buffer_; + int64_t render_time_ms_; + VideoCodecType codec_; + bool is_key_frame_; + EncodedResolution resolution_; + absl::optional color_space_; +}; + +VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { + VideoCodec codec; + codec.codecType = PayloadStringToCodecType(decoder.video_format.name); + + if (codec.codecType == kVideoCodecVP8) { + *(codec.VP8()) = VideoEncoder::GetDefaultVp8Settings(); + } else if (codec.codecType == kVideoCodecVP9) { + *(codec.VP9()) = VideoEncoder::GetDefaultVp9Settings(); + } else if (codec.codecType == kVideoCodecH264) { + *(codec.H264()) = VideoEncoder::GetDefaultH264Settings(); + } else if (codec.codecType == kVideoCodecMultiplex) { + VideoReceiveStream::Decoder associated_decoder = decoder; + associated_decoder.video_format = + SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP9)); + VideoCodec associated_codec = CreateDecoderVideoCodec(associated_decoder); + associated_codec.codecType = kVideoCodecMultiplex; + return associated_codec; + } + + FieldTrialOptional width("w"); + FieldTrialOptional height("h"); + ParseFieldTrial( + {&width, &height}, + field_trial::FindFullName("WebRTC-Video-InitialDecoderResolution")); + if (width && height) { + codec.width = width.Value(); + codec.height = height.Value(); + } else { + codec.width = 320; + codec.height = 180; + } + + const int kDefaultStartBitrate = 300; + codec.startBitrate = codec.minBitrate = codec.maxBitrate = + kDefaultStartBitrate; + + return codec; +} + +// Video decoder class to be used for unknown codecs. Doesn't support decoding +// but logs messages to LS_ERROR. +class NullVideoDecoder : public webrtc::VideoDecoder { + public: + int32_t InitDecode(const webrtc::VideoCodec* codec_settings, + int32_t number_of_cores) override { + RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t Decode(const webrtc::EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms) override { + RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t RegisterDecodeCompleteCallback( + webrtc::DecodedImageCallback* callback) override { + RTC_LOG(LS_ERROR) + << "Can't register decode complete callback on NullVideoDecoder."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; } + + const char* ImplementationName() const override { return "NullVideoDecoder"; } +}; + +// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. +// Maximum time between frames before resetting the FrameBuffer to avoid RTP +// timestamps wraparound to affect FrameBuffer. +constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes. + +} // namespace + +VideoReceiveStream2::VideoReceiveStream2( + TaskQueueFactory* task_queue_factory, + TaskQueueBase* current_queue, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock, + VCMTiming* timing) + : task_queue_factory_(task_queue_factory), + transport_adapter_(config.rtcp_send_transport), + config_(std::move(config)), + num_cpu_cores_(num_cpu_cores), + worker_thread_(current_queue), + clock_(clock), + call_stats_(call_stats), + source_tracker_(clock_), + stats_proxy_(&config_, clock_, worker_thread_), + rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), + timing_(timing), + video_receiver_(clock_, timing_.get()), + rtp_video_stream_receiver_(worker_thread_, + clock_, + &transport_adapter_, + call_stats->AsRtcpRttStats(), + packet_router, + &config_, + rtp_receive_statistics_.get(), + &stats_proxy_, + &stats_proxy_, + process_thread, + this, // NackSender + nullptr, // Use default KeyFrameRequestSender + this, // OnCompleteFrameCallback + config_.frame_decryptor, + config_.frame_transformer), + rtp_stream_sync_(current_queue, this), + max_wait_for_keyframe_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() + .MaxWaitForKeyframeMs() + .value_or(kMaxWaitForKeyFrameMs)), + max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() + .MaxWaitForFrameMs() + .value_or(kMaxWaitForFrameMs)), + low_latency_renderer_enabled_("enabled", true), + low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", + true), +#ifndef DISABLE_RECORDER + recorder_(nullptr), +#endif + decode_queue_(task_queue_factory_->CreateTaskQueue( + "DecodingQueue", + TaskQueueFactory::Priority::HIGH)) { + RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString(); + + RTC_DCHECK(worker_thread_); + RTC_DCHECK(config_.renderer); + RTC_DCHECK(call_stats_); + + module_process_sequence_checker_.Detach(); + + RTC_DCHECK(!config_.decoders.empty()); + RTC_CHECK(config_.decoder_factory); + std::set decoder_payload_types; + for (const Decoder& decoder : config_.decoders) { + RTC_CHECK(decoder_payload_types.find(decoder.payload_type) == + decoder_payload_types.end()) + << "Duplicate payload type (" << decoder.payload_type + << ") for different decoders."; + decoder_payload_types.insert(decoder.payload_type); + } + + timing_->set_render_delay(config_.render_delay_ms); + + frame_buffer_.reset( + new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_)); + + // Register with RtpStreamReceiverController. + media_receiver_ = receiver_controller->CreateReceiver( + config_.rtp.remote_ssrc, &rtp_video_stream_receiver_); + if (config_.rtp.rtx_ssrc) { + rtx_receive_stream_ = std::make_unique( + &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types, + config_.rtp.remote_ssrc, rtp_receive_statistics_.get()); + rtx_receiver_ = receiver_controller->CreateReceiver( + config_.rtp.rtx_ssrc, rtx_receive_stream_.get()); + } else { + rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, + true); + } + + ParseFieldTrial({&low_latency_renderer_enabled_, + &low_latency_renderer_include_predecode_buffer_}, + field_trial::FindFullName("WebRTC-LowLatencyRenderer")); +} + +VideoReceiveStream2::~VideoReceiveStream2() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString(); + Stop(); +} + +void VideoReceiveStream2::SignalNetworkState(NetworkState state) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_video_stream_receiver_.SignalNetworkState(state); +} + +bool VideoReceiveStream2::DeliverRtcp(const uint8_t* packet, size_t length) { + return rtp_video_stream_receiver_.DeliverRtcp(packet, length); +} + +void VideoReceiveStream2::SetSync(Syncable* audio_syncable) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_stream_sync_.ConfigureSync(audio_syncable); +} + +void VideoReceiveStream2::Start() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + if (decoder_running_) { + return; + } + + const bool protected_by_fec = config_.rtp.protected_by_flexfec || + rtp_video_stream_receiver_.IsUlpfecEnabled(); + + if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() && + protected_by_fec) { + frame_buffer_->SetProtectionMode(kProtectionNackFEC); + } + + transport_adapter_.Enable(); + rtc::VideoSinkInterface* renderer = nullptr; + if (config_.enable_prerenderer_smoothing) { + incoming_video_stream_.reset(new IncomingVideoStream( + task_queue_factory_, config_.render_delay_ms, this)); + renderer = incoming_video_stream_.get(); + } else { + renderer = this; + } + + for (const Decoder& decoder : config_.decoders) { + std::unique_ptr video_decoder = + config_.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format, + config_.stream_id); + // If we still have no valid decoder, we have to create a "Null" decoder + // that ignores all calls. The reason we can get into this state is that the + // old decoder factory interface doesn't have a way to query supported + // codecs. + if (!video_decoder) { + video_decoder = std::make_unique(); + } + + std::string decoded_output_file = + field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); + // Because '/' can't be used inside a field trial parameter, we use ';' + // instead. + // This is only relevant to WebRTC-DecoderDataDumpDirectory + // field trial. ';' is chosen arbitrary. Even though it's a legal character + // in some file systems, we can sacrifice ability to use it in the path to + // dumped video, since it's developers-only feature for debugging. + absl::c_replace(decoded_output_file, ';', '/'); + if (!decoded_output_file.empty()) { + char filename_buffer[256]; + rtc::SimpleStringBuilder ssb(filename_buffer); + ssb << decoded_output_file << "/webrtc_receive_stream_" + << this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() + << ".ivf"; + video_decoder = CreateFrameDumpingDecoderWrapper( + std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); + } + + video_decoders_.push_back(std::move(video_decoder)); + + video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), + decoder.payload_type); + VideoCodec codec = CreateDecoderVideoCodec(decoder); + + const bool raw_payload = + config_.rtp.raw_payload_types.count(decoder.payload_type) > 0; + rtp_video_stream_receiver_.AddReceiveCodec(decoder.payload_type, codec, + decoder.video_format.parameters, + raw_payload); + RTC_CHECK_EQ(VCM_OK, video_receiver_.RegisterReceiveCodec( + decoder.payload_type, &codec, num_cpu_cores_)); + } + + RTC_DCHECK(renderer != nullptr); + video_stream_decoder_.reset( + new VideoStreamDecoder(&video_receiver_, &stats_proxy_, renderer)); + + // Make sure we register as a stats observer *after* we've prepared the + // |video_stream_decoder_|. + call_stats_->RegisterStatsObserver(this); + + // Start decoding on task queue. + video_receiver_.DecoderThreadStarting(); + stats_proxy_.DecoderThreadStarting(); + decode_queue_.PostTask([this] { + RTC_DCHECK_RUN_ON(&decode_queue_); + decoder_stopped_ = false; + StartNextDecode(); + }); + decoder_running_ = true; + rtp_video_stream_receiver_.StartReceive(); +} + +void VideoReceiveStream2::Stop() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_video_stream_receiver_.StopReceive(); + + stats_proxy_.OnUniqueFramesCounted( + rtp_video_stream_receiver_.GetUniqueFramesSeen()); + + decode_queue_.PostTask([this] { frame_buffer_->Stop(); }); + + call_stats_->DeregisterStatsObserver(this); + + if (decoder_running_) { + rtc::Event done; + decode_queue_.PostTask([this, &done] { + RTC_DCHECK_RUN_ON(&decode_queue_); + decoder_stopped_ = true; + done.Set(); + }); + done.Wait(rtc::Event::kForever); + + decoder_running_ = false; + video_receiver_.DecoderThreadStopped(); + stats_proxy_.DecoderThreadStopped(); + // Deregister external decoders so they are no longer running during + // destruction. This effectively stops the VCM since the decoder thread is + // stopped, the VCM is deregistered and no asynchronous decoder threads are + // running. + for (const Decoder& decoder : config_.decoders) + video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); + + UpdateHistograms(); + } + + video_stream_decoder_.reset(); + incoming_video_stream_.reset(); + transport_adapter_.Disable(); +} + +VideoReceiveStream::Stats VideoReceiveStream2::GetStats() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + VideoReceiveStream2::Stats stats = stats_proxy_.GetStats(); + stats.total_bitrate_bps = 0; + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(stats.ssrc); + if (statistician) { + stats.rtp_stats = statistician->GetStats(); + stats.total_bitrate_bps = statistician->BitrateReceived(); + } + if (config_.rtp.rtx_ssrc) { + StreamStatistician* rtx_statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + if (rtx_statistician) + stats.total_bitrate_bps += rtx_statistician->BitrateReceived(); + } + return stats; +} + +void VideoReceiveStream2::UpdateHistograms() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + absl::optional fraction_lost; + StreamDataCounters rtp_stats; + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.remote_ssrc); + if (statistician) { + fraction_lost = statistician->GetFractionLostInPercent(); + rtp_stats = statistician->GetReceiveStreamDataCounters(); + } + if (config_.rtp.rtx_ssrc) { + StreamStatistician* rtx_statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + if (rtx_statistician) { + StreamDataCounters rtx_stats = + rtx_statistician->GetReceiveStreamDataCounters(); + stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, &rtx_stats); + return; + } + } + stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr); +} + +void VideoReceiveStream2::AddSecondarySink(RtpPacketSinkInterface* sink) { + rtp_video_stream_receiver_.AddSecondarySink(sink); +} + +void VideoReceiveStream2::RemoveSecondarySink( + const RtpPacketSinkInterface* sink) { + rtp_video_stream_receiver_.RemoveSecondarySink(sink); +} + +bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) { + return false; + } + + base_minimum_playout_delay_ms_ = delay_ms; + UpdatePlayoutDelays(); + return true; +} + +int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + return base_minimum_playout_delay_ms_; +} + +void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { + VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); + + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [frame_meta, this]() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + int64_t video_playout_ntp_ms; + int64_t sync_offset_ms; + double estimated_freq_khz; + if (rtp_stream_sync_.GetStreamSyncOffsetInMs( + frame_meta.rtp_timestamp, frame_meta.render_time_ms(), + &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) { + stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms, + estimated_freq_khz); + } + stats_proxy_.OnRenderedFrame(frame_meta); + })); + + source_tracker_.OnFrameDelivered(video_frame.packet_infos()); + config_.renderer->OnFrame(video_frame); +} + +void VideoReceiveStream2::SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) { + rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor)); +} + +void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer( + std::move(frame_transformer)); +} + +void VideoReceiveStream2::SendNack( + const std::vector& sequence_numbers, + bool buffering_allowed) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_DCHECK(buffering_allowed); + rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers); +} + +void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) { + // Running on worker_sequence_checker_. + // Called from RtpVideoStreamReceiver (rtp_video_stream_receiver_ is + // ultimately responsible). + rtp_video_stream_receiver_.RequestKeyFrame(); + decode_queue_.PostTask([this, timestamp_ms]() { + RTC_DCHECK_RUN_ON(&decode_queue_); + last_keyframe_request_ms_ = timestamp_ms; + }); +} + +void VideoReceiveStream2::OnCompleteFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. + int64_t time_now_ms = clock_->TimeInMilliseconds(); + if (last_complete_frame_time_ms_ > 0 && + time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) { + frame_buffer_->Clear(); + } + last_complete_frame_time_ms_ = time_now_ms; + + const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; + if (playout_delay.min_ms >= 0) { + frame_minimum_playout_delay_ms_ = playout_delay.min_ms; + UpdatePlayoutDelays(); + } + + if (playout_delay.max_ms >= 0) { + frame_maximum_playout_delay_ms_ = playout_delay.max_ms; + UpdatePlayoutDelays(); + } + + int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); + if (last_continuous_pid != -1) + rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); +} + +void VideoReceiveStream2::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + frame_buffer_->UpdateRtt(max_rtt_ms); + rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms); + stats_proxy_.OnRttUpdate(avg_rtt_ms); +} + +uint32_t VideoReceiveStream2::id() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + return config_.rtp.remote_ssrc; +} + +absl::optional VideoReceiveStream2::GetInfo() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + absl::optional info = + rtp_video_stream_receiver_.GetSyncInfo(); + + if (!info) + return absl::nullopt; + + info->current_delay_ms = timing_->TargetVideoDelay(); + return info; +} + +bool VideoReceiveStream2::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, + int64_t* time_ms) const { + RTC_NOTREACHED(); + return 0; +} + +void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs( + int64_t ntp_timestamp_ms, + int64_t time_ms) { + RTC_NOTREACHED(); +} + +bool VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + syncable_minimum_playout_delay_ms_ = delay_ms; + UpdatePlayoutDelays(); + return true; +} + +int64_t VideoReceiveStream2::GetMaxWaitMs() const { + return keyframe_required_ ? max_wait_for_keyframe_ms_ + : max_wait_for_frame_ms_; +} + +void VideoReceiveStream2::StartNextDecode() { + // Running on the decode thread. + TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode"); + frame_buffer_->NextFrame( + GetMaxWaitMs(), keyframe_required_, &decode_queue_, + /* encoded frame handler */ + [this](std::unique_ptr frame, ReturnReason res) { + RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout); + RTC_DCHECK_EQ(frame != nullptr, res == ReturnReason::kFrameFound); + decode_queue_.PostTask([this, frame = std::move(frame)]() mutable { + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + if (frame) { + HandleEncodedFrame(std::move(frame)); + } else { + int64_t now_ms = clock_->TimeInMilliseconds(); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + HandleFrameBufferTimeout(now_ms, wait_ms); + })); + } + StartNextDecode(); + }); + }); +} + +void VideoReceiveStream2::HandleEncodedFrame( + std::unique_ptr frame) { + // Running on |decode_queue_|. + int64_t now_ms = clock_->TimeInMilliseconds(); + +#ifndef DISABLE_RECORDER + { + webrtc::MutexLock lock(&recorder_mutex_); + if (recorder_) { + EncodedImage image = frame->EncodedImage(); + recorder_->AddVideoFrame(&image, frame->CodecSpecific()->codecType); + } + } +#endif + + // Current OnPreDecode only cares about QP for VP8. + int qp = -1; + if (frame->CodecSpecific()->codecType == kVideoCodecVP8) { + if (!vp8::GetQp(frame->data(), frame->size(), &qp)) { + RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame"; + } + } + stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); + + bool force_request_key_frame = false; + int64_t decoded_frame_picture_id = -1; + + const bool keyframe_request_is_due = + now_ms >= (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_); + + int decode_result = video_receiver_.Decode(frame.get()); + if (decode_result == WEBRTC_VIDEO_CODEC_OK || + decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { + keyframe_required_ = false; + frame_decoded_ = true; + + decoded_frame_picture_id = frame->id.picture_id; + + if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) + force_request_key_frame = true; + } else if (!frame_decoded_ || !keyframe_required_ || + keyframe_request_is_due) { + keyframe_required_ = true; + // TODO(philipel): Remove this keyframe request when downstream project + // has been fixed. + force_request_key_frame = true; + } + + bool received_frame_is_keyframe = + frame->FrameType() == VideoFrameType::kVideoFrameKey; + + worker_thread_->PostTask(ToQueuedTask( + task_safety_, + [this, now_ms, received_frame_is_keyframe, force_request_key_frame, + decoded_frame_picture_id, keyframe_request_is_due]() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + if (decoded_frame_picture_id != -1) + rtp_video_stream_receiver_.FrameDecoded(decoded_frame_picture_id); + + HandleKeyFrameGeneration(received_frame_is_keyframe, now_ms, + force_request_key_frame, + keyframe_request_is_due); + })); + + if (encoded_frame_buffer_function_) { + frame->Retain(); + encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame)); + } +} + +void VideoReceiveStream2::HandleKeyFrameGeneration( + bool received_frame_is_keyframe, + int64_t now_ms, + bool always_request_key_frame, + bool keyframe_request_is_due) { + // Running on worker_sequence_checker_. + + bool request_key_frame = always_request_key_frame; + + // Repeat sending keyframe requests if we've requested a keyframe. + if (keyframe_generation_requested_) { + if (received_frame_is_keyframe) { + keyframe_generation_requested_ = false; + } else if (keyframe_request_is_due) { + if (!IsReceivingKeyFrame(now_ms)) { + request_key_frame = true; + } + } else { + // It hasn't been long enough since the last keyframe request, do nothing. + } + } + + if (request_key_frame) { + // HandleKeyFrameGeneration is initated from the decode thread - + // RequestKeyFrame() triggers a call back to the decode thread. + // Perhaps there's a way to avoid that. + RequestKeyFrame(now_ms); + } +} + +void VideoReceiveStream2::HandleFrameBufferTimeout(int64_t now_ms, + int64_t wait_ms) { + // Running on |worker_sequence_checker_|. + absl::optional last_packet_ms = + rtp_video_stream_receiver_.LastReceivedPacketMs(); + + // To avoid spamming keyframe requests for a stream that is not active we + // check if we have received a packet within the last 5 seconds. + const bool stream_is_active = + last_packet_ms && now_ms - *last_packet_ms < 5000; + if (!stream_is_active) + stats_proxy_.OnStreamInactive(); + + if (stream_is_active && !IsReceivingKeyFrame(now_ms) && + (!config_.crypto_options.sframe.require_frame_encryption || + rtp_video_stream_receiver_.IsDecryptable())) { + RTC_LOG(LS_WARNING) << "No decodable frame in " << wait_ms + << " ms, requesting keyframe."; + RequestKeyFrame(now_ms); + } +} + +bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const { + // Running on worker_sequence_checker_. + absl::optional last_keyframe_packet_ms = + rtp_video_stream_receiver_.LastReceivedKeyframePacketMs(); + + // If we recently have been receiving packets belonging to a keyframe then + // we assume a keyframe is currently being received. + bool receiving_keyframe = + last_keyframe_packet_ms && + timestamp_ms - *last_keyframe_packet_ms < max_wait_for_keyframe_ms_; + return receiving_keyframe; +} + +void VideoReceiveStream2::UpdatePlayoutDelays() const { + // Running on worker_sequence_checker_. + const int minimum_delay_ms = + std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_, + syncable_minimum_playout_delay_ms_}); + if (minimum_delay_ms >= 0) { + timing_->set_min_playout_delay(minimum_delay_ms); + if (frame_minimum_playout_delay_ms_ == 0 && + frame_maximum_playout_delay_ms_ > 0 && low_latency_renderer_enabled_) { + // TODO(kron): Estimate frame rate from video stream. + constexpr double kFrameRate = 60.0; + // Convert playout delay in ms to number of frames. + int max_composition_delay_in_frames = std::lrint( + static_cast(frame_maximum_playout_delay_ms_ * kFrameRate) / + rtc::kNumMillisecsPerSec); + if (low_latency_renderer_include_predecode_buffer_) { + // Subtract frames in buffer. + max_composition_delay_in_frames = std::max( + max_composition_delay_in_frames - frame_buffer_->Size(), 0); + } + timing_->SetMaxCompositionDelayInFrames( + absl::make_optional(max_composition_delay_in_frames)); + } + } + + const int maximum_delay_ms = frame_maximum_playout_delay_ms_; + if (maximum_delay_ms >= 0) { + timing_->set_max_playout_delay(maximum_delay_ms); + } +} + +std::vector VideoReceiveStream2::GetSources() const { + return source_tracker_.GetSources(); +} + +VideoReceiveStream2::RecordingState +VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, + bool generate_key_frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtc::Event event; + + // Save old state, set the new state. + RecordingState old_state; + + decode_queue_.PostTask( + [this, &event, &old_state, callback = std::move(state.callback), + generate_key_frame, + last_keyframe_request = state.last_keyframe_request_ms.value_or(0)] { + RTC_DCHECK_RUN_ON(&decode_queue_); + old_state.callback = std::move(encoded_frame_buffer_function_); + encoded_frame_buffer_function_ = std::move(callback); + + old_state.last_keyframe_request_ms = last_keyframe_request_ms_; + last_keyframe_request_ms_ = generate_key_frame + ? clock_->TimeInMilliseconds() + : last_keyframe_request; + + event.Set(); + }); + + old_state.keyframe_needed = keyframe_generation_requested_; + + if (generate_key_frame) { + rtp_video_stream_receiver_.RequestKeyFrame(); + keyframe_generation_requested_ = true; + } else { + keyframe_generation_requested_ = state.keyframe_needed; + } + + event.Wait(rtc::Event::kForever); + return old_state; +} + +void VideoReceiveStream2::GenerateKeyFrame() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RequestKeyFrame(clock_->TimeInMilliseconds()); + keyframe_generation_requested_ = true; +} + +#ifndef DISABLE_RECORDER +void VideoReceiveStream2::InjectRecorder(Recorder* recorder) { + char log_buf[16]; + snprintf(log_buf, sizeof(log_buf) - 1, "%p", recorder); + RTC_LOG(LS_INFO) << "VideoReceiveStream::InjectRecorder " << log_buf; + { + webrtc::MutexLock lock(&recorder_mutex_); + recorder_ = recorder; + } + + if (recorder) { + GenerateKeyFrame(); + } +} +#endif + +} // namespace internal +} // namespace webrtc diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h new file mode 100644 index 0000000000..172ee053d5 --- /dev/null +++ b/video/video_receive_stream2.h @@ -0,0 +1,288 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_RECEIVE_STREAM2_H_ +#define VIDEO_VIDEO_RECEIVE_STREAM2_H_ + +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/units/timestamp.h" +#include "api/video/recordable_encoded_frame.h" +#include "call/rtp_packet_sink_interface.h" +#include "call/syncable.h" +#include "call/video_receive_stream.h" +#include "modules/rtp_rtcp/include/flexfec_receiver.h" +#include "modules/rtp_rtcp/source/source_tracker.h" +#include "modules/video_coding/frame_buffer2.h" +#include "modules/video_coding/video_receiver2.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "system_wrappers/include/clock.h" +#include "video/receive_statistics_proxy2.h" +#include "video/rtp_streams_synchronizer2.h" +#include "video/rtp_video_stream_receiver2.h" +#include "video/transport_adapter.h" +#include "video/video_stream_decoder2.h" + +namespace webrtc { + +class ProcessThread; +class RtpStreamReceiverInterface; +class RtpStreamReceiverControllerInterface; +class RtxReceiveStream; +class VCMTiming; + +namespace internal { + +class CallStats; + +// Utility struct for grabbing metadata from a VideoFrame and processing it +// asynchronously without needing the actual frame data. +// Additionally the caller can bundle information from the current clock +// when the metadata is captured, for accurate reporting and not needeing +// multiple calls to clock->Now(). +struct VideoFrameMetaData { + VideoFrameMetaData(const webrtc::VideoFrame& frame, Timestamp now) + : rtp_timestamp(frame.timestamp()), + timestamp_us(frame.timestamp_us()), + ntp_time_ms(frame.ntp_time_ms()), + width(frame.width()), + height(frame.height()), + decode_timestamp(now) {} + + int64_t render_time_ms() const { + return timestamp_us / rtc::kNumMicrosecsPerMillisec; + } + + const uint32_t rtp_timestamp; + const int64_t timestamp_us; + const int64_t ntp_time_ms; + const int width; + const int height; + + const Timestamp decode_timestamp; +}; + +class VideoReceiveStream2 : public webrtc::VideoReceiveStream, + public rtc::VideoSinkInterface, + public NackSender, + public video_coding::OnCompleteFrameCallback, + public Syncable, + public CallStatsObserver { + public: + // The default number of milliseconds to pass before re-requesting a key frame + // to be sent. + static constexpr int kMaxWaitForKeyFrameMs = 200; + + VideoReceiveStream2(TaskQueueFactory* task_queue_factory, + TaskQueueBase* current_queue, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock, + VCMTiming* timing); + ~VideoReceiveStream2() override; + + const Config& config() const { return config_; } + + void SignalNetworkState(NetworkState state); + bool DeliverRtcp(const uint8_t* packet, size_t length); + + void SetSync(Syncable* audio_syncable); + + // Implements webrtc::VideoReceiveStream. + void Start() override; + void Stop() override; + + webrtc::VideoReceiveStream::Stats GetStats() const override; + + void AddSecondarySink(RtpPacketSinkInterface* sink) override; + void RemoveSecondarySink(const RtpPacketSinkInterface* sink) override; + + // SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called + // from webrtc/api level and requested by user code. For e.g. blink/js layer + // in Chromium. + bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override; + int GetBaseMinimumPlayoutDelayMs() const override; + + void SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) override; + void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) override; + + // Implements rtc::VideoSinkInterface. + void OnFrame(const VideoFrame& video_frame) override; + + // Implements NackSender. + // For this particular override of the interface, + // only (buffering_allowed == true) is acceptable. + void SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) override; + + // Implements video_coding::OnCompleteFrameCallback. + void OnCompleteFrame( + std::unique_ptr frame) override; + + // Implements CallStatsObserver::OnRttUpdate + void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; + + // Implements Syncable. + uint32_t id() const override; + absl::optional GetInfo() const override; + bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, + int64_t* time_ms) const override; + void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, + int64_t time_ms) override; + + // SetMinimumPlayoutDelay is only called by A/V sync. + bool SetMinimumPlayoutDelay(int delay_ms) override; + + std::vector GetSources() const override; + + RecordingState SetAndGetRecordingState(RecordingState state, + bool generate_key_frame) override; + void GenerateKeyFrame() override; + +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif + + private: + int64_t GetMaxWaitMs() const RTC_RUN_ON(decode_queue_); + void StartNextDecode() RTC_RUN_ON(decode_queue_); + void HandleEncodedFrame(std::unique_ptr frame) + RTC_RUN_ON(decode_queue_); + void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms) + RTC_RUN_ON(worker_sequence_checker_); + void UpdatePlayoutDelays() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_); + void RequestKeyFrame(int64_t timestamp_ms) + RTC_RUN_ON(worker_sequence_checker_); + void HandleKeyFrameGeneration(bool received_frame_is_keyframe, + int64_t now_ms, + bool always_request_key_frame, + bool keyframe_request_is_due) + RTC_RUN_ON(worker_sequence_checker_); + bool IsReceivingKeyFrame(int64_t timestamp_ms) const + RTC_RUN_ON(worker_sequence_checker_); + + void UpdateHistograms(); + + SequenceChecker worker_sequence_checker_; + SequenceChecker module_process_sequence_checker_; + + TaskQueueFactory* const task_queue_factory_; + + TransportAdapter transport_adapter_; + const VideoReceiveStream::Config config_; + const int num_cpu_cores_; + TaskQueueBase* const worker_thread_; + Clock* const clock_; + + CallStats* const call_stats_; + + bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false; + bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true; + + SourceTracker source_tracker_; + ReceiveStatisticsProxy stats_proxy_; + // Shared by media and rtx stream receivers, since the latter has no RtpRtcp + // module of its own. + const std::unique_ptr rtp_receive_statistics_; + + std::unique_ptr timing_; // Jitter buffer experiment. + VideoReceiver2 video_receiver_; + std::unique_ptr> incoming_video_stream_; + RtpVideoStreamReceiver2 rtp_video_stream_receiver_; + std::unique_ptr video_stream_decoder_; + RtpStreamsSynchronizer rtp_stream_sync_; + + // TODO(nisse, philipel): Creation and ownership of video encoders should be + // moved to the new VideoStreamDecoder. + std::vector> video_decoders_; + + // Members for the new jitter buffer experiment. + std::unique_ptr frame_buffer_; + + std::unique_ptr media_receiver_; + std::unique_ptr rtx_receive_stream_; + std::unique_ptr rtx_receiver_; + + // Whenever we are in an undecodable state (stream has just started or due to + // a decoding error) we require a keyframe to restart the stream. + bool keyframe_required_ RTC_GUARDED_BY(decode_queue_) = true; + + // If we have successfully decoded any frame. + bool frame_decoded_ RTC_GUARDED_BY(decode_queue_) = false; + + int64_t last_keyframe_request_ms_ RTC_GUARDED_BY(decode_queue_) = 0; + int64_t last_complete_frame_time_ms_ + RTC_GUARDED_BY(worker_sequence_checker_) = 0; + + // Keyframe request intervals are configurable through field trials. + const int max_wait_for_keyframe_ms_; + const int max_wait_for_frame_ms_; + + // All of them tries to change current min_playout_delay on |timing_| but + // source of the change request is different in each case. Among them the + // biggest delay is used. -1 means use default value from the |timing_|. + // + // Minimum delay as decided by the RTP playout delay extension. + int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) = + -1; + // Minimum delay as decided by the setLatency function in "webrtc/api". + int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) = + -1; + // Minimum delay as decided by the A/V synchronization feature. + int syncable_minimum_playout_delay_ms_ + RTC_GUARDED_BY(worker_sequence_checker_) = -1; + + // Maximum delay as decided by the RTP playout delay extension. + int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) = + -1; + + // Function that is triggered with encoded frames, if not empty. + std::function + encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_); + // Set to true while we're requesting keyframes but not yet received one. + bool keyframe_generation_requested_ RTC_GUARDED_BY(worker_sequence_checker_) = + false; + + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter |enabled| + // determines if the low-latency renderer algorithm should be used for the + // case min playout delay=0 and max playout delay>0. + FieldTrialParameter low_latency_renderer_enabled_; + // Set by the field trial WebRTC-LowLatencyRenderer. The parameter + // |include_predecode_buffer| determines if the predecode buffer should be + // taken into account when calculating maximum number of frames in composition + // queue. + FieldTrialParameter low_latency_renderer_include_predecode_buffer_; + +#ifndef DISABLE_RECORDER + mutable webrtc::Mutex recorder_mutex_; + Recorder* recorder_ RTC_GUARDED_BY(recorder_mutex_); +#endif + + // Defined last so they are destroyed before all other members. + rtc::TaskQueue decode_queue_; + + // Used to signal destruction to potentially pending tasks. + ScopedTaskSafety task_safety_; +}; +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_VIDEO_RECEIVE_STREAM2_H_ diff --git a/video/video_receive_stream2_unittest.cc b/video/video_receive_stream2_unittest.cc new file mode 100644 index 0000000000..3f10686db7 --- /dev/null +++ b/video/video_receive_stream2_unittest.cc @@ -0,0 +1,596 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_receive_stream2.h" + +#include +#include +#include +#include + +#include "api/task_queue/default_task_queue_factory.h" +#include "api/test/video/function_video_decoder_factory.h" +#include "api/video_codecs/video_decoder.h" +#include "call/rtp_stream_receiver_controller.h" +#include "common_video/test/utilities.h" +#include "media/base/fake_video_renderer.h" +#include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/utility/include/process_thread.h" +#include "modules/video_coding/encoded_frame.h" +#include "rtc_base/event.h" +#include "system_wrappers/include/clock.h" +#include "test/fake_decoder.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/run_loop.h" +#include "test/time_controller/simulated_time_controller.h" +#include "test/video_decoder_proxy_factory.h" +#include "video/call_stats2.h" + +namespace webrtc { +namespace { + +using ::testing::_; +using ::testing::ElementsAreArray; +using ::testing::Invoke; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +constexpr int kDefaultTimeOutMs = 50; + +class MockTransport : public Transport { + public: + MOCK_METHOD(bool, + SendRtp, + (const uint8_t*, size_t length, const PacketOptions& options), + (override)); + MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override)); +}; + +class MockVideoDecoder : public VideoDecoder { + public: + MOCK_METHOD(int32_t, + InitDecode, + (const VideoCodec*, int32_t number_of_cores), + (override)); + MOCK_METHOD(int32_t, + Decode, + (const EncodedImage& input, + bool missing_frames, + int64_t render_time_ms), + (override)); + MOCK_METHOD(int32_t, + RegisterDecodeCompleteCallback, + (DecodedImageCallback*), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); + const char* ImplementationName() const { return "MockVideoDecoder"; } +}; + +class FrameObjectFake : public video_coding::EncodedFrame { + public: + void SetPayloadType(uint8_t payload_type) { _payloadType = payload_type; } + + void SetRotation(const VideoRotation& rotation) { rotation_ = rotation; } + + void SetNtpTime(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; } + + int64_t ReceivedTime() const override { return 0; } + + int64_t RenderTime() const override { return _renderTimeMs; } +}; + +} // namespace + +class VideoReceiveStream2Test : public ::testing::Test { + public: + VideoReceiveStream2Test() + : process_thread_(ProcessThread::Create("TestThread")), + task_queue_factory_(CreateDefaultTaskQueueFactory()), + config_(&mock_transport_), + call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()), + h264_decoder_factory_(&mock_h264_video_decoder_) {} + + void SetUp() { + constexpr int kDefaultNumCpuCores = 2; + config_.rtp.remote_ssrc = 1111; + config_.rtp.local_ssrc = 2222; + config_.renderer = &fake_renderer_; + config_.decoder_factory = &h264_decoder_factory_; + VideoReceiveStream::Decoder h264_decoder; + h264_decoder.payload_type = 99; + h264_decoder.video_format = SdpVideoFormat("H264"); + h264_decoder.video_format.parameters.insert( + {"sprop-parameter-sets", "Z0IACpZTBYmI,aMljiA=="}); + config_.decoders.push_back(h264_decoder); + + clock_ = Clock::GetRealTimeClock(); + timing_ = new VCMTiming(clock_); + + video_receive_stream_ = + std::make_unique( + task_queue_factory_.get(), loop_.task_queue(), + &rtp_stream_receiver_controller_, kDefaultNumCpuCores, + &packet_router_, config_.Copy(), process_thread_.get(), + &call_stats_, clock_, timing_); + } + + protected: + test::RunLoop loop_; + std::unique_ptr process_thread_; + const std::unique_ptr task_queue_factory_; + VideoReceiveStream::Config config_; + internal::CallStats call_stats_; + MockVideoDecoder mock_h264_video_decoder_; + test::VideoDecoderProxyFactory h264_decoder_factory_; + cricket::FakeVideoRenderer fake_renderer_; + MockTransport mock_transport_; + PacketRouter packet_router_; + RtpStreamReceiverController rtp_stream_receiver_controller_; + std::unique_ptr video_receive_stream_; + Clock* clock_; + VCMTiming* timing_; +}; + +TEST_F(VideoReceiveStream2Test, CreateFrameFromH264FmtpSpropAndIdr) { + constexpr uint8_t idr_nalu[] = {0x05, 0xFF, 0xFF, 0xFF}; + RtpPacketToSend rtppacket(nullptr); + uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu)); + memcpy(payload, idr_nalu, sizeof(idr_nalu)); + rtppacket.SetMarker(true); + rtppacket.SetSsrc(1111); + rtppacket.SetPayloadType(99); + rtppacket.SetSequenceNumber(1); + rtppacket.SetTimestamp(0); + rtc::Event init_decode_event_; + EXPECT_CALL(mock_h264_video_decoder_, InitDecode(_, _)) + .WillOnce(Invoke([&init_decode_event_](const VideoCodec* config, + int32_t number_of_cores) { + init_decode_event_.Set(); + return 0; + })); + EXPECT_CALL(mock_h264_video_decoder_, RegisterDecodeCompleteCallback(_)); + video_receive_stream_->Start(); + EXPECT_CALL(mock_h264_video_decoder_, Decode(_, false, _)); + RtpPacketReceived parsed_packet; + ASSERT_TRUE(parsed_packet.Parse(rtppacket.data(), rtppacket.size())); + rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet); + EXPECT_CALL(mock_h264_video_decoder_, Release()); + // Make sure the decoder thread had a chance to run. + init_decode_event_.Wait(kDefaultTimeOutMs); +} + +TEST_F(VideoReceiveStream2Test, PlayoutDelay) { + const VideoPlayoutDelay kPlayoutDelayMs = {123, 321}; + std::unique_ptr test_frame(new FrameObjectFake()); + test_frame->id.picture_id = 0; + test_frame->SetPlayoutDelay(kPlayoutDelayMs); + + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay()); + EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay()); + + // Check that the biggest minimum delay is chosen. + video_receive_stream_->SetMinimumPlayoutDelay(400); + EXPECT_EQ(400, timing_->min_playout_delay()); + + // Check base minimum delay validation. + EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(12345)); + EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(-1)); + EXPECT_TRUE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(500)); + EXPECT_EQ(500, timing_->min_playout_delay()); + + // Check that intermidiate values are remembered and the biggest remembered + // is chosen. + video_receive_stream_->SetBaseMinimumPlayoutDelayMs(0); + EXPECT_EQ(400, timing_->min_playout_delay()); + + video_receive_stream_->SetMinimumPlayoutDelay(0); + EXPECT_EQ(123, timing_->min_playout_delay()); +} + +TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMaxValue) { + const int default_max_playout_latency = timing_->max_playout_delay(); + const VideoPlayoutDelay kPlayoutDelayMs = {123, -1}; + + std::unique_ptr test_frame(new FrameObjectFake()); + test_frame->id.picture_id = 0; + test_frame->SetPlayoutDelay(kPlayoutDelayMs); + + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + + // Ensure that -1 preserves default maximum value from |timing_|. + EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay()); + EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay()); + EXPECT_EQ(default_max_playout_latency, timing_->max_playout_delay()); +} + +TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMinValue) { + const int default_min_playout_latency = timing_->min_playout_delay(); + const VideoPlayoutDelay kPlayoutDelayMs = {-1, 321}; + + std::unique_ptr test_frame(new FrameObjectFake()); + test_frame->id.picture_id = 0; + test_frame->SetPlayoutDelay(kPlayoutDelayMs); + + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + + // Ensure that -1 preserves default minimum value from |timing_|. + EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay()); + EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay()); + EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay()); +} + +TEST_F(VideoReceiveStream2Test, MaxCompositionDelayNotSetByDefault) { + // Default with no playout delay set. + std::unique_ptr test_frame0(new FrameObjectFake()); + test_frame0->id.picture_id = 0; + video_receive_stream_->OnCompleteFrame(std::move(test_frame0)); + EXPECT_FALSE(timing_->MaxCompositionDelayInFrames()); + + // Max composition delay not set for playout delay 0,0. + std::unique_ptr test_frame1(new FrameObjectFake()); + test_frame1->id.picture_id = 1; + test_frame1->SetPlayoutDelay({0, 0}); + video_receive_stream_->OnCompleteFrame(std::move(test_frame1)); + EXPECT_FALSE(timing_->MaxCompositionDelayInFrames()); + + // Max composition delay not set for playout delay X,Y, where X,Y>0. + std::unique_ptr test_frame2(new FrameObjectFake()); + test_frame2->id.picture_id = 2; + test_frame2->SetPlayoutDelay({10, 30}); + video_receive_stream_->OnCompleteFrame(std::move(test_frame2)); + EXPECT_FALSE(timing_->MaxCompositionDelayInFrames()); +} + +TEST_F(VideoReceiveStream2Test, MaxCompositionDelaySetFromMaxPlayoutDelay) { + // Max composition delay set if playout delay X,Y, where X=0,Y>0. + const VideoPlayoutDelay kPlayoutDelayMs = {0, 50}; + const int kExpectedMaxCompositionDelayInFrames = 3; // ~50 ms at 60 fps. + std::unique_ptr test_frame(new FrameObjectFake()); + test_frame->id.picture_id = 0; + test_frame->SetPlayoutDelay(kPlayoutDelayMs); + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + EXPECT_EQ(kExpectedMaxCompositionDelayInFrames, + timing_->MaxCompositionDelayInFrames()); +} + +class VideoReceiveStream2TestWithFakeDecoder : public ::testing::Test { + public: + VideoReceiveStream2TestWithFakeDecoder() + : fake_decoder_factory_( + []() { return std::make_unique(); }), + process_thread_(ProcessThread::Create("TestThread")), + task_queue_factory_(CreateDefaultTaskQueueFactory()), + config_(&mock_transport_), + call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()) {} + + void SetUp() { + config_.rtp.remote_ssrc = 1111; + config_.rtp.local_ssrc = 2222; + config_.renderer = &fake_renderer_; + config_.decoder_factory = &fake_decoder_factory_; + VideoReceiveStream::Decoder fake_decoder; + fake_decoder.payload_type = 99; + fake_decoder.video_format = SdpVideoFormat("VP8"); + config_.decoders.push_back(fake_decoder); + clock_ = Clock::GetRealTimeClock(); + ReCreateReceiveStream(VideoReceiveStream::RecordingState()); + } + + void ReCreateReceiveStream(VideoReceiveStream::RecordingState state) { + constexpr int kDefaultNumCpuCores = 2; + video_receive_stream_ = nullptr; + timing_ = new VCMTiming(clock_); + video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream2( + task_queue_factory_.get(), loop_.task_queue(), + &rtp_stream_receiver_controller_, kDefaultNumCpuCores, &packet_router_, + config_.Copy(), process_thread_.get(), &call_stats_, clock_, timing_)); + video_receive_stream_->SetAndGetRecordingState(std::move(state), false); + } + + protected: + test::RunLoop loop_; + test::FunctionVideoDecoderFactory fake_decoder_factory_; + std::unique_ptr process_thread_; + const std::unique_ptr task_queue_factory_; + VideoReceiveStream::Config config_; + internal::CallStats call_stats_; + cricket::FakeVideoRenderer fake_renderer_; + MockTransport mock_transport_; + PacketRouter packet_router_; + RtpStreamReceiverController rtp_stream_receiver_controller_; + std::unique_ptr video_receive_stream_; + Clock* clock_; + VCMTiming* timing_; +}; + +TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesNtpTime) { + const int64_t kNtpTimestamp = 12345; + auto test_frame = std::make_unique(); + test_frame->SetPayloadType(99); + test_frame->id.picture_id = 0; + test_frame->SetNtpTime(kNtpTimestamp); + + video_receive_stream_->Start(); + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs)); + EXPECT_EQ(kNtpTimestamp, fake_renderer_.ntp_time_ms()); +} + +TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesRotation) { + const webrtc::VideoRotation kRotation = webrtc::kVideoRotation_180; + auto test_frame = std::make_unique(); + test_frame->SetPayloadType(99); + test_frame->id.picture_id = 0; + test_frame->SetRotation(kRotation); + + video_receive_stream_->Start(); + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs)); + + EXPECT_EQ(kRotation, fake_renderer_.rotation()); +} + +TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesPacketInfos) { + auto test_frame = std::make_unique(); + test_frame->SetPayloadType(99); + test_frame->id.picture_id = 0; + RtpPacketInfos packet_infos = CreatePacketInfos(3); + test_frame->SetPacketInfos(packet_infos); + + video_receive_stream_->Start(); + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs)); + + EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos)); +} + +TEST_F(VideoReceiveStream2TestWithFakeDecoder, RenderedFrameUpdatesGetSources) { + constexpr uint32_t kSsrc = 1111; + constexpr uint32_t kCsrc = 9001; + constexpr uint32_t kRtpTimestamp = 12345; + + // Prepare one video frame with per-packet information. + auto test_frame = std::make_unique(); + test_frame->SetPayloadType(99); + test_frame->id.picture_id = 0; + RtpPacketInfos packet_infos; + { + RtpPacketInfos::vector_type infos; + + RtpPacketInfo info; + info.set_ssrc(kSsrc); + info.set_csrcs({kCsrc}); + info.set_rtp_timestamp(kRtpTimestamp); + + info.set_receive_time_ms(clock_->TimeInMilliseconds() - 5000); + infos.push_back(info); + + info.set_receive_time_ms(clock_->TimeInMilliseconds() - 3000); + infos.push_back(info); + + info.set_receive_time_ms(clock_->TimeInMilliseconds() - 2000); + infos.push_back(info); + + info.set_receive_time_ms(clock_->TimeInMilliseconds() - 4000); + infos.push_back(info); + + packet_infos = RtpPacketInfos(std::move(infos)); + } + test_frame->SetPacketInfos(packet_infos); + + // Start receive stream. + video_receive_stream_->Start(); + EXPECT_THAT(video_receive_stream_->GetSources(), IsEmpty()); + + // Render one video frame. + int64_t timestamp_ms_min = clock_->TimeInMilliseconds(); + video_receive_stream_->OnCompleteFrame(std::move(test_frame)); + EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs)); + int64_t timestamp_ms_max = clock_->TimeInMilliseconds(); + + // Verify that the per-packet information is passed to the renderer. + EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos)); + + // Verify that the per-packet information also updates |GetSources()|. + std::vector sources = video_receive_stream_->GetSources(); + ASSERT_THAT(sources, SizeIs(2)); + { + auto it = std::find_if(sources.begin(), sources.end(), + [](const RtpSource& source) { + return source.source_type() == RtpSourceType::SSRC; + }); + ASSERT_NE(it, sources.end()); + + EXPECT_EQ(it->source_id(), kSsrc); + EXPECT_EQ(it->source_type(), RtpSourceType::SSRC); + EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp); + EXPECT_GE(it->timestamp_ms(), timestamp_ms_min); + EXPECT_LE(it->timestamp_ms(), timestamp_ms_max); + } + { + auto it = std::find_if(sources.begin(), sources.end(), + [](const RtpSource& source) { + return source.source_type() == RtpSourceType::CSRC; + }); + ASSERT_NE(it, sources.end()); + + EXPECT_EQ(it->source_id(), kCsrc); + EXPECT_EQ(it->source_type(), RtpSourceType::CSRC); + EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp); + EXPECT_GE(it->timestamp_ms(), timestamp_ms_min); + EXPECT_LE(it->timestamp_ms(), timestamp_ms_max); + } +} + +std::unique_ptr MakeFrame(VideoFrameType frame_type, + int picture_id) { + auto frame = std::make_unique(); + frame->SetPayloadType(99); + frame->id.picture_id = picture_id; + frame->SetFrameType(frame_type); + return frame; +} + +TEST_F(VideoReceiveStream2TestWithFakeDecoder, + PassesFrameWhenEncodedFramesCallbackSet) { + testing::MockFunction callback; + video_receive_stream_->Start(); + // Expect a keyframe request to be generated + EXPECT_CALL(mock_transport_, SendRtcp); + EXPECT_CALL(callback, Call); + video_receive_stream_->SetAndGetRecordingState( + VideoReceiveStream::RecordingState(callback.AsStdFunction()), true); + video_receive_stream_->OnCompleteFrame( + MakeFrame(VideoFrameType::kVideoFrameKey, 0)); + EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs)); + video_receive_stream_->Stop(); +} + +TEST_F(VideoReceiveStream2TestWithFakeDecoder, + MovesEncodedFrameDispatchStateWhenReCreating) { + testing::MockFunction callback; + video_receive_stream_->Start(); + // Expect a key frame request over RTCP. + EXPECT_CALL(mock_transport_, SendRtcp).Times(1); + video_receive_stream_->SetAndGetRecordingState( + VideoReceiveStream::RecordingState(callback.AsStdFunction()), true); + video_receive_stream_->Stop(); + VideoReceiveStream::RecordingState old_state = + video_receive_stream_->SetAndGetRecordingState( + VideoReceiveStream::RecordingState(), false); + ReCreateReceiveStream(std::move(old_state)); + video_receive_stream_->Stop(); +} + +class VideoReceiveStream2TestWithSimulatedClock : public ::testing::Test { + public: + class FakeDecoder2 : public test::FakeDecoder { + public: + explicit FakeDecoder2(std::function decode_callback) + : callback_(decode_callback) {} + + int32_t Decode(const EncodedImage& input, + bool missing_frames, + int64_t render_time_ms) override { + int32_t result = + FakeDecoder::Decode(input, missing_frames, render_time_ms); + callback_(); + return result; + } + + private: + std::function callback_; + }; + + static VideoReceiveStream::Config GetConfig( + Transport* transport, + VideoDecoderFactory* decoder_factory, + rtc::VideoSinkInterface* renderer) { + VideoReceiveStream::Config config(transport); + config.rtp.remote_ssrc = 1111; + config.rtp.local_ssrc = 2222; + config.renderer = renderer; + config.decoder_factory = decoder_factory; + VideoReceiveStream::Decoder fake_decoder; + fake_decoder.payload_type = 99; + fake_decoder.video_format = SdpVideoFormat("VP8"); + config.decoders.push_back(fake_decoder); + return config; + } + + VideoReceiveStream2TestWithSimulatedClock() + : time_controller_(Timestamp::Millis(4711)), + fake_decoder_factory_([this] { + return std::make_unique([this] { OnFrameDecoded(); }); + }), + process_thread_(time_controller_.CreateProcessThread("ProcessThread")), + config_(GetConfig(&mock_transport_, + &fake_decoder_factory_, + &fake_renderer_)), + call_stats_(time_controller_.GetClock(), loop_.task_queue()), + video_receive_stream_(time_controller_.GetTaskQueueFactory(), + loop_.task_queue(), + &rtp_stream_receiver_controller_, + /*num_cores=*/2, + &packet_router_, + config_.Copy(), + process_thread_.get(), + &call_stats_, + time_controller_.GetClock(), + new VCMTiming(time_controller_.GetClock())) { + video_receive_stream_.Start(); + } + + void OnFrameDecoded() { event_->Set(); } + + void PassEncodedFrameAndWait( + std::unique_ptr frame) { + event_ = std::make_unique(); + // This call will eventually end up in the Decoded method where the + // event is set. + video_receive_stream_.OnCompleteFrame(std::move(frame)); + event_->Wait(rtc::Event::kForever); + } + + protected: + GlobalSimulatedTimeController time_controller_; + test::RunLoop loop_; + test::FunctionVideoDecoderFactory fake_decoder_factory_; + std::unique_ptr process_thread_; + MockTransport mock_transport_; + cricket::FakeVideoRenderer fake_renderer_; + VideoReceiveStream::Config config_; + internal::CallStats call_stats_; + PacketRouter packet_router_; + RtpStreamReceiverController rtp_stream_receiver_controller_; + webrtc::internal::VideoReceiveStream2 video_receive_stream_; + std::unique_ptr event_; +}; + +TEST_F(VideoReceiveStream2TestWithSimulatedClock, + RequestsKeyFramesUntilKeyFrameReceived) { + auto tick = TimeDelta::Millis( + internal::VideoReceiveStream2::kMaxWaitForKeyFrameMs / 2); + EXPECT_CALL(mock_transport_, SendRtcp).Times(1).WillOnce(Invoke([this]() { + loop_.Quit(); + return 0; + })); + video_receive_stream_.GenerateKeyFrame(); + PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 0)); + time_controller_.AdvanceTime(tick); + PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 1)); + loop_.Run(); + testing::Mock::VerifyAndClearExpectations(&mock_transport_); + + // T+200ms: still no key frame received, expect key frame request sent again. + EXPECT_CALL(mock_transport_, SendRtcp).Times(1).WillOnce(Invoke([this]() { + loop_.Quit(); + return 0; + })); + time_controller_.AdvanceTime(tick); + PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 2)); + loop_.Run(); + testing::Mock::VerifyAndClearExpectations(&mock_transport_); + + // T+200ms: now send a key frame - we should not observe new key frame + // requests after this. + EXPECT_CALL(mock_transport_, SendRtcp).Times(0); + PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameKey, 3)); + time_controller_.AdvanceTime(2 * tick); + PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 4)); + loop_.PostTask([this]() { loop_.Quit(); }); + loop_.Run(); +} + +} // namespace webrtc diff --git a/video/video_receive_stream_unittest.cc b/video/video_receive_stream_unittest.cc index 503660eca6..503c96c093 100644 --- a/video/video_receive_stream_unittest.cc +++ b/video/video_receive_stream_unittest.cc @@ -25,7 +25,6 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/encoded_frame.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "system_wrappers/include/clock.h" #include "test/fake_decoder.h" @@ -49,24 +48,30 @@ constexpr int kDefaultTimeOutMs = 50; class MockTransport : public Transport { public: - MOCK_METHOD3(SendRtp, - bool(const uint8_t* packet, - size_t length, - const PacketOptions& options)); - MOCK_METHOD2(SendRtcp, bool(const uint8_t* packet, size_t length)); + MOCK_METHOD(bool, + SendRtp, + (const uint8_t*, size_t length, const PacketOptions& options), + (override)); + MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override)); }; class MockVideoDecoder : public VideoDecoder { public: - MOCK_METHOD2(InitDecode, - int32_t(const VideoCodec* config, int32_t number_of_cores)); - MOCK_METHOD3(Decode, - int32_t(const EncodedImage& input, - bool missing_frames, - int64_t render_time_ms)); - MOCK_METHOD1(RegisterDecodeCompleteCallback, - int32_t(DecodedImageCallback* callback)); - MOCK_METHOD0(Release, int32_t(void)); + MOCK_METHOD(int32_t, + InitDecode, + (const VideoCodec*, int32_t number_of_cores), + (override)); + MOCK_METHOD(int32_t, + Decode, + (const EncodedImage& input, + bool missing_frames, + int64_t render_time_ms), + (override)); + MOCK_METHOD(int32_t, + RegisterDecodeCompleteCallback, + (DecodedImageCallback*), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); const char* ImplementationName() const { return "MockVideoDecoder"; } }; @@ -92,26 +97,20 @@ class VideoReceiveStreamTest : public ::testing::Test { task_queue_factory_(CreateDefaultTaskQueueFactory()), config_(&mock_transport_), call_stats_(Clock::GetRealTimeClock(), process_thread_.get()), - h264_decoder_factory_(&mock_h264_video_decoder_), - null_decoder_factory_(&mock_null_video_decoder_) {} + h264_decoder_factory_(&mock_h264_video_decoder_) {} void SetUp() { constexpr int kDefaultNumCpuCores = 2; config_.rtp.remote_ssrc = 1111; config_.rtp.local_ssrc = 2222; config_.renderer = &fake_renderer_; + config_.decoder_factory = &h264_decoder_factory_; VideoReceiveStream::Decoder h264_decoder; h264_decoder.payload_type = 99; h264_decoder.video_format = SdpVideoFormat("H264"); h264_decoder.video_format.parameters.insert( {"sprop-parameter-sets", "Z0IACpZTBYmI,aMljiA=="}); - h264_decoder.decoder_factory = &h264_decoder_factory_; config_.decoders.push_back(h264_decoder); - VideoReceiveStream::Decoder null_decoder; - null_decoder.payload_type = 98; - null_decoder.video_format = SdpVideoFormat("null"); - null_decoder.decoder_factory = &null_decoder_factory_; - config_.decoders.push_back(null_decoder); clock_ = Clock::GetRealTimeClock(); timing_ = new VCMTiming(clock_); @@ -129,9 +128,7 @@ class VideoReceiveStreamTest : public ::testing::Test { VideoReceiveStream::Config config_; CallStats call_stats_; MockVideoDecoder mock_h264_video_decoder_; - MockVideoDecoder mock_null_video_decoder_; test::VideoDecoderProxyFactory h264_decoder_factory_; - test::VideoDecoderProxyFactory null_decoder_factory_; cricket::FakeVideoRenderer fake_renderer_; MockTransport mock_transport_; PacketRouter packet_router_; @@ -170,7 +167,7 @@ TEST_F(VideoReceiveStreamTest, CreateFrameFromH264FmtpSpropAndIdr) { } TEST_F(VideoReceiveStreamTest, PlayoutDelay) { - const PlayoutDelay kPlayoutDelayMs = {123, 321}; + const VideoPlayoutDelay kPlayoutDelayMs = {123, 321}; std::unique_ptr test_frame(new FrameObjectFake()); test_frame->id.picture_id = 0; test_frame->SetPlayoutDelay(kPlayoutDelayMs); @@ -200,7 +197,7 @@ TEST_F(VideoReceiveStreamTest, PlayoutDelay) { TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMaxValue) { const int default_max_playout_latency = timing_->max_playout_delay(); - const PlayoutDelay kPlayoutDelayMs = {123, -1}; + const VideoPlayoutDelay kPlayoutDelayMs = {123, -1}; std::unique_ptr test_frame(new FrameObjectFake()); test_frame->id.picture_id = 0; @@ -216,7 +213,7 @@ TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMaxValue) { TEST_F(VideoReceiveStreamTest, PlayoutDelayPreservesDefaultMinValue) { const int default_min_playout_latency = timing_->min_playout_delay(); - const PlayoutDelay kPlayoutDelayMs = {-1, 321}; + const VideoPlayoutDelay kPlayoutDelayMs = {-1, 321}; std::unique_ptr test_frame(new FrameObjectFake()); test_frame->id.picture_id = 0; @@ -244,10 +241,10 @@ class VideoReceiveStreamTestWithFakeDecoder : public ::testing::Test { config_.rtp.remote_ssrc = 1111; config_.rtp.local_ssrc = 2222; config_.renderer = &fake_renderer_; + config_.decoder_factory = &fake_decoder_factory_; VideoReceiveStream::Decoder fake_decoder; fake_decoder.payload_type = 99; fake_decoder.video_format = SdpVideoFormat("VP8"); - fake_decoder.decoder_factory = &fake_decoder_factory_; config_.decoders.push_back(fake_decoder); clock_ = Clock::GetRealTimeClock(); ReCreateReceiveStream(VideoReceiveStream::RecordingState()); @@ -466,16 +463,16 @@ class VideoReceiveStreamTestWithSimulatedClock : public ::testing::Test { config.rtp.remote_ssrc = 1111; config.rtp.local_ssrc = 2222; config.renderer = renderer; + config.decoder_factory = decoder_factory; VideoReceiveStream::Decoder fake_decoder; fake_decoder.payload_type = 99; fake_decoder.video_format = SdpVideoFormat("VP8"); - fake_decoder.decoder_factory = decoder_factory; config.decoders.push_back(fake_decoder); return config; } VideoReceiveStreamTestWithSimulatedClock() - : time_controller_(Timestamp::ms(4711)), + : time_controller_(Timestamp::Millis(4711)), fake_decoder_factory_([this] { return std::make_unique([this] { OnFrameDecoded(); }); }), @@ -523,8 +520,8 @@ class VideoReceiveStreamTestWithSimulatedClock : public ::testing::Test { TEST_F(VideoReceiveStreamTestWithSimulatedClock, RequestsKeyFramesUntilKeyFrameReceived) { - auto tick = - TimeDelta::ms(internal::VideoReceiveStream::kMaxWaitForKeyFrameMs / 2); + auto tick = TimeDelta::Millis( + internal::VideoReceiveStream::kMaxWaitForKeyFrameMs / 2); EXPECT_CALL(mock_transport_, SendRtcp).Times(1); video_receive_stream_.GenerateKeyFrame(); PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 0)); diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc index 8fae407bc1..28f8e284bf 100644 --- a/video/video_send_stream.cc +++ b/video/video_send_stream.cc @@ -19,6 +19,7 @@ #include "modules/rtp_rtcp/source/rtp_sender.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/task_utils/to_queued_task.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" @@ -28,8 +29,6 @@ namespace webrtc { namespace { -constexpr char kTargetBitrateRtcpFieldTrial[] = "WebRTC-Target-Bitrate-Rtcp"; - size_t CalculateMaxHeaderSize(const RtpConfig& config) { size_t header_size = kRtpHeaderSize; size_t extensions_size = 0; @@ -70,7 +69,7 @@ VideoSendStream::VideoSendStream( int num_cpu_cores, ProcessThread* module_process_thread, TaskQueueFactory* task_queue_factory, - CallStats* call_stats, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, BitrateAllocatorInterface* bitrate_allocator, SendDelayStats* send_delay_stats, @@ -112,13 +111,6 @@ VideoSendStream::VideoSendStream( // it was created on. thread_sync_event_.Wait(rtc::Event::kForever); send_stream_->RegisterProcessThread(module_process_thread); - // TODO(sprang): Enable this also for regular video calls by default, if it - // works well. - if (encoder_config.content_type == VideoEncoderConfig::ContentType::kScreen || - field_trial::IsEnabled(kTargetBitrateRtcpFieldTrial)) { - video_stream_encoder_->SetBitrateAllocationObserver(send_stream_.get()); - } - ReconfigureVideoEncoder(std::move(encoder_config)); } @@ -130,7 +122,23 @@ VideoSendStream::~VideoSendStream() { void VideoSendStream::UpdateActiveSimulcastLayers( const std::vector active_layers) { RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_LOG(LS_INFO) << "VideoSendStream::UpdateActiveSimulcastLayers"; + + rtc::StringBuilder active_layers_string; + active_layers_string << "{"; + for (size_t i = 0; i < active_layers.size(); ++i) { + if (active_layers[i]) { + active_layers_string << "1"; + } else { + active_layers_string << "0"; + } + if (i < active_layers.size() - 1) { + active_layers_string << ", "; + } + } + active_layers_string << "}"; + RTC_LOG(LS_INFO) << "UpdateActiveSimulcastLayers: " + << active_layers_string.str(); + VideoSendStreamImpl* send_stream = send_stream_.get(); worker_queue_->PostTask([this, send_stream, active_layers] { send_stream->UpdateActiveSimulcastLayers(active_layers); @@ -162,6 +170,18 @@ void VideoSendStream::Stop() { worker_queue_->PostTask([send_stream] { send_stream->Stop(); }); } +void VideoSendStream::AddAdaptationResource( + rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->AddAdaptationResource(resource); +} + +std::vector> +VideoSendStream::GetAdaptationResources() { + RTC_DCHECK_RUN_ON(&thread_checker_); + return video_stream_encoder_->GetAdaptationResources(); +} + void VideoSendStream::SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) { @@ -186,6 +206,16 @@ VideoSendStream::Stats VideoSendStream::GetStats() { return stats_proxy_.GetStats(); } +#ifndef DISABLE_RECORDER +void VideoSendStream::InjectRecorder(Recorder* recorder) { + RTC_DCHECK_RUN_ON(&thread_checker_); + VideoSendStreamImpl* send_stream = send_stream_.get(); + worker_queue_->PostTask([recorder, send_stream] { + send_stream->InjectRecorder(recorder); + }); +} +#endif + absl::optional VideoSendStream::GetPacingFactorOverride() const { return send_stream_->configured_pacing_factor_; } diff --git a/video/video_send_stream.h b/video/video_send_stream.h index 9466c74699..eda0e364e0 100644 --- a/video/video_send_stream.h +++ b/video/video_send_stream.h @@ -20,7 +20,6 @@ #include "call/bitrate_allocator.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" @@ -58,7 +57,7 @@ class VideoSendStream : public webrtc::VideoSendStream { int num_cpu_cores, ProcessThread* module_process_thread, TaskQueueFactory* task_queue_factory, - CallStats* call_stats, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, BitrateAllocatorInterface* bitrate_allocator, SendDelayStats* send_delay_stats, @@ -79,12 +78,19 @@ class VideoSendStream : public webrtc::VideoSendStream { void Start() override; void Stop() override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + std::vector> GetAdaptationResources() override; + void SetSource(rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) override; void ReconfigureVideoEncoder(VideoEncoderConfig) override; Stats GetStats() override; +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder) override; +#endif + void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, RtpPayloadStateMap* payload_state_map); diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc index 97f3bb7f4c..0b0b029274 100644 --- a/video/video_send_stream_impl.cc +++ b/video/video_send_stream_impl.cc @@ -47,7 +47,14 @@ static constexpr int kMaxVbaSizeDifferencePercent = 10; // Max time we will throttle similar video bitrate allocations. static constexpr int64_t kMaxVbaThrottleTimeMs = 500; -constexpr TimeDelta kEncoderTimeOut = TimeDelta::Seconds<2>(); +constexpr TimeDelta kEncoderTimeOut = TimeDelta::Seconds(2); + +// When send-side BWE is used a stricter 1.1x pacing factor is used, rather than +// the 2.5x which is used with receive-side BWE. Provides a more careful +// bandwidth rampup with less risk of overshoots causing adverse effects like +// packet loss. Not used for receive side BWE, since there we lack the probing +// feature and so may result in too slow initial rampup. +static constexpr double kStrictPacingMultiplier = 1.1; bool TransportSeqNumExtensionConfigured(const VideoSendStream::Config& config) { const std::vector& extensions = config.rtp.extensions; @@ -58,12 +65,16 @@ bool TransportSeqNumExtensionConfigured(const VideoSendStream::Config& config) { // Calculate max padding bitrate for a multi layer codec. int CalculateMaxPadBitrateBps(const std::vector& streams, + bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps, bool pad_to_min_bitrate, bool alr_probing) { int pad_up_to_bitrate_bps = 0; + RTC_DCHECK(!is_svc || streams.size() <= 1) << "Only one stream is allowed in " + "SVC mode."; + // Filter out only the active streams; std::vector active_streams; for (const VideoStream& stream : streams) { @@ -71,7 +82,13 @@ int CalculateMaxPadBitrateBps(const std::vector& streams, active_streams.emplace_back(stream); } - if (active_streams.size() > 1) { + if (active_streams.size() > 1 || (!active_streams.empty() && is_svc)) { + // Simulcast or SVC is used. + // if SVC is used, stream bitrates should already encode svc bitrates: + // min_bitrate = min bitrate of a lowest svc layer. + // target_bitrate = sum of target bitrates of lower layers + min bitrate + // of the last one (as used in the calculations below). + // max_bitrate = sum of all active layers' max_bitrate. if (alr_probing) { // With alr probing, just pad to the min bitrate of the lowest stream, // probing will handle the rest of the rampup. @@ -82,17 +99,26 @@ int CalculateMaxPadBitrateBps(const std::vector& streams, const double hysteresis_factor = RateControlSettings::ParseFromFieldTrials() .GetSimulcastHysteresisFactor(content_type); - const size_t top_active_stream_idx = active_streams.size() - 1; - pad_up_to_bitrate_bps = std::min( - static_cast( - hysteresis_factor * - active_streams[top_active_stream_idx].min_bitrate_bps + - 0.5), - active_streams[top_active_stream_idx].target_bitrate_bps); - - // Add target_bitrate_bps of the lower active streams. - for (size_t i = 0; i < top_active_stream_idx; ++i) { - pad_up_to_bitrate_bps += active_streams[i].target_bitrate_bps; + if (is_svc) { + // For SVC, since there is only one "stream", the padding bitrate + // needed to enable the top spatial layer is stored in the + // |target_bitrate_bps| field. + // TODO(sprang): This behavior needs to die. + pad_up_to_bitrate_bps = static_cast( + hysteresis_factor * active_streams[0].target_bitrate_bps + 0.5); + } else { + const size_t top_active_stream_idx = active_streams.size() - 1; + pad_up_to_bitrate_bps = std::min( + static_cast( + hysteresis_factor * + active_streams[top_active_stream_idx].min_bitrate_bps + + 0.5), + active_streams[top_active_stream_idx].target_bitrate_bps); + + // Add target_bitrate_bps of the lower active streams. + for (size_t i = 0; i < top_active_stream_idx; ++i) { + pad_up_to_bitrate_bps += active_streams[i].target_bitrate_bps; + } } } } else if (!active_streams.empty() && pad_to_min_bitrate) { @@ -113,7 +139,7 @@ RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig( return frame_encryption_config; } -RtpSenderObservers CreateObservers(CallStats* call_stats, +RtpSenderObservers CreateObservers(RtcpRttStats* call_stats, EncoderRtcpFeedback* encoder_feedback, SendStatisticsProxy* stats_proxy, SendDelayStats* send_delay_stats) { @@ -156,9 +182,9 @@ bool SameStreamsEnabled(const VideoBitrateAllocation& lhs, } // namespace PacingConfig::PacingConfig() - : pacing_factor("factor", PacedSender::kDefaultPaceMultiplier), + : pacing_factor("factor", kStrictPacingMultiplier), max_pacing_delay("max_delay", - TimeDelta::ms(PacedSender::kMaxQueueLengthMs)) { + TimeDelta::Millis(PacedSender::kMaxQueueLengthMs)) { ParseFieldTrial({&pacing_factor, &max_pacing_delay}, field_trial::FindFullName("WebRTC-Video-Pacing")); } @@ -169,7 +195,7 @@ VideoSendStreamImpl::VideoSendStreamImpl( Clock* clock, SendStatisticsProxy* stats_proxy, rtc::TaskQueue* worker_queue, - CallStats* call_stats, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, BitrateAllocatorInterface* bitrate_allocator, SendDelayStats* send_delay_stats, @@ -190,7 +216,6 @@ VideoSendStreamImpl::VideoSendStreamImpl( config_(config), worker_queue_(worker_queue), timed_out_(false), - call_stats_(call_stats), transport_(transport), bitrate_allocator_(bitrate_allocator), disable_padding_(true), @@ -202,19 +227,23 @@ VideoSendStreamImpl::VideoSendStreamImpl( video_stream_encoder_(video_stream_encoder), encoder_feedback_(clock, config_->rtp.ssrcs, video_stream_encoder), bandwidth_observer_(transport->GetBandwidthObserver()), - rtp_video_sender_(transport_->CreateRtpVideoSender( - suspended_ssrcs, - suspended_payload_states, - config_->rtp, - config_->rtcp_report_interval_ms, - config_->send_transport, - CreateObservers(call_stats, - &encoder_feedback_, - stats_proxy_, - send_delay_stats), - event_log, - std::move(fec_controller), - CreateFrameEncryptionConfig(config_))), + rtp_video_sender_( + transport_->CreateRtpVideoSender(suspended_ssrcs, + suspended_payload_states, + config_->rtp, + config_->rtcp_report_interval_ms, + config_->send_transport, + CreateObservers(call_stats, + &encoder_feedback_, + stats_proxy_, + send_delay_stats), + event_log, + std::move(fec_controller), + CreateFrameEncryptionConfig(config_), + config->frame_transformer)), +#ifndef DISABLE_RECORDER + recorder_(nullptr), +#endif weak_ptr_factory_(this) { video_stream_encoder->SetFecControllerOverride(rtp_video_sender_); RTC_DCHECK_RUN_ON(worker_queue_); @@ -224,7 +253,6 @@ VideoSendStreamImpl::VideoSendStreamImpl( encoder_feedback_.SetRtpVideoSender(rtp_video_sender_); RTC_DCHECK(!config_->rtp.ssrcs.empty()); - RTC_DCHECK(call_stats_); RTC_DCHECK(transport_); RTC_DCHECK_NE(initial_encoder_max_bitrate, 0); @@ -282,17 +310,6 @@ VideoSendStreamImpl::VideoSendStreamImpl( video_stream_encoder_->SetStartBitrate( bitrate_allocator_->GetStartBitrate(this)); - - // Only request rotation at the source when we positively know that the remote - // side doesn't support the rotation extension. This allows us to prepare the - // encoder in the expectation that rotation is supported - which is the common - // case. - bool rotation_applied = absl::c_none_of( - config_->rtp.extensions, [](const RtpExtension& extension) { - return extension.uri == RtpExtension::kVideoRotationUri; - }); - - video_stream_encoder_->SetSink(this, rotation_applied); } VideoSendStreamImpl::~VideoSendStreamImpl() { @@ -305,6 +322,21 @@ VideoSendStreamImpl::~VideoSendStreamImpl() { void VideoSendStreamImpl::RegisterProcessThread( ProcessThread* module_process_thread) { + // Called on libjingle's worker thread (not worker_queue_), as part of the + // initialization steps. That's also the correct thread/queue for setting the + // state for |video_stream_encoder_|. + + // Only request rotation at the source when we positively know that the remote + // side doesn't support the rotation extension. This allows us to prepare the + // encoder in the expectation that rotation is supported - which is the common + // case. + bool rotation_applied = absl::c_none_of( + config_->rtp.extensions, [](const RtpExtension& extension) { + return extension.uri == RtpExtension::kVideoRotationUri; + }); + + video_stream_encoder_->SetSink(this, rotation_applied); + rtp_video_sender_->RegisterProcessThread(module_process_thread); } @@ -321,7 +353,6 @@ void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { void VideoSendStreamImpl::UpdateActiveSimulcastLayers( const std::vector active_layers) { RTC_DCHECK_RUN_ON(worker_queue_); - RTC_LOG(LS_INFO) << "VideoSendStream::UpdateActiveSimulcastLayers"; bool previously_active = rtp_video_sender_->IsActive(); rtp_video_sender_->SetActiveModules(active_layers); if (!rtp_video_sender_->IsActive() && previously_active) { @@ -383,11 +414,29 @@ void VideoSendStreamImpl::Stop() { StopVideoSendStream(); } +#ifndef DISABLE_RECORDER +void VideoSendStreamImpl::InjectRecorder(Recorder* recorder) { + char log_buf[16]; + snprintf(log_buf, sizeof(log_buf) - 1, "%p", recorder); + RTC_LOG(LS_INFO) << "VideoSendStream::InjectRecorder " << log_buf; + { + webrtc::MutexLock lock(&recorder_mutex_); + recorder_ = recorder; + } + + if (recorder) { + worker_queue_->PostTask([=] { + video_stream_encoder_->SendKeyFrame(); + }); + } +} +#endif + void VideoSendStreamImpl::StopVideoSendStream() { bitrate_allocator_->RemoveObserver(this); check_encoder_activity_task_.Stop(); video_stream_encoder_->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), - DataRate::Zero(), 0, 0); + DataRate::Zero(), 0, 0, 0); stats_proxy_->OnSetEncoderTargetRate(0); } @@ -451,6 +500,20 @@ void VideoSendStreamImpl::OnBitrateAllocationUpdated( } } +void VideoSendStreamImpl::OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) { + if (!worker_queue_->IsCurrent()) { + auto ptr = weak_ptr_; + worker_queue_->PostTask([allocation = std::move(allocation), ptr] { + if (!ptr.get()) + return; + ptr->OnVideoLayersAllocationUpdated(allocation); + }); + return; + } + rtp_video_sender_->OnVideoLayersAllocationUpdated(allocation); +} + void VideoSendStreamImpl::SignalEncoderActive() { RTC_DCHECK_RUN_ON(worker_queue_); if (rtp_video_sender_->IsActive()) { @@ -471,22 +534,23 @@ MediaStreamAllocationConfig VideoSendStreamImpl::GetAllocationConfig() const { void VideoSendStreamImpl::OnEncoderConfigurationChanged( std::vector streams, + bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) { if (!worker_queue_->IsCurrent()) { rtc::WeakPtr send_stream = weak_ptr_; - worker_queue_->PostTask([send_stream, streams, content_type, + worker_queue_->PostTask([send_stream, streams, is_svc, content_type, min_transmit_bitrate_bps]() mutable { if (send_stream) { send_stream->OnEncoderConfigurationChanged( - std::move(streams), content_type, min_transmit_bitrate_bps); + std::move(streams), is_svc, content_type, min_transmit_bitrate_bps); } }); return; } + RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged"); - RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); RTC_DCHECK_RUN_ON(worker_queue_); const VideoCodecType codec_type = @@ -516,14 +580,9 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( encoder_max_bitrate_bps_); // TODO(bugs.webrtc.org/10266): Query the VideoBitrateAllocator instead. - if (codec_type == kVideoCodecVP9) { - max_padding_bitrate_ = has_alr_probing_ ? streams[0].min_bitrate_bps - : streams[0].target_bitrate_bps; - } else { - max_padding_bitrate_ = CalculateMaxPadBitrateBps( - streams, content_type, min_transmit_bitrate_bps, - config_->suspend_below_min_bitrate, has_alr_probing_); - } + max_padding_bitrate_ = CalculateMaxPadBitrateBps( + streams, is_svc, content_type, min_transmit_bitrate_bps, + config_->suspend_below_min_bitrate, has_alr_probing_); // Clear stats for disabled layers. for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) { @@ -545,8 +604,7 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codec_specific_info) { // Encoded is called on whatever thread the real encoder implementation run // on. In the case of hardware encoders, there might be several encoders // running in parallel on different threads. @@ -568,9 +626,18 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( enable_padding_task(); } +#ifndef DISABLE_RECORDER + { + webrtc::MutexLock lock(&recorder_mutex_); + if (recorder_) { + recorder_->AddVideoFrame(&encoded_image, codec_specific_info->codecType); + } + } +#endif + EncodedImageCallback::Result result(EncodedImageCallback::Result::OK); - result = rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info, - fragmentation); + result = + rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info); // Check if there's a throttled VideoBitrateAllocation that we should try // sending. rtc::WeakPtr send_stream = weak_ptr_; @@ -624,29 +691,30 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) { DataRate link_allocation = DataRate::Zero(); if (encoder_target_rate_bps_ > protection_bitrate_bps) { link_allocation = - DataRate::bps(encoder_target_rate_bps_ - protection_bitrate_bps); + DataRate::BitsPerSec(encoder_target_rate_bps_ - protection_bitrate_bps); } DataRate overhead = - update.target_bitrate - DataRate::bps(encoder_target_rate_bps_); + update.target_bitrate - DataRate::BitsPerSec(encoder_target_rate_bps_); DataRate encoder_stable_target_rate = update.stable_target_bitrate; if (encoder_stable_target_rate > overhead) { encoder_stable_target_rate = encoder_stable_target_rate - overhead; } else { - encoder_stable_target_rate = DataRate::bps(encoder_target_rate_bps_); + encoder_stable_target_rate = DataRate::BitsPerSec(encoder_target_rate_bps_); } encoder_target_rate_bps_ = std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps_); - encoder_stable_target_rate = std::min(DataRate::bps(encoder_max_bitrate_bps_), - encoder_stable_target_rate); + encoder_stable_target_rate = + std::min(DataRate::BitsPerSec(encoder_max_bitrate_bps_), + encoder_stable_target_rate); - DataRate encoder_target_rate = DataRate::bps(encoder_target_rate_bps_); + DataRate encoder_target_rate = DataRate::BitsPerSec(encoder_target_rate_bps_); link_allocation = std::max(encoder_target_rate, link_allocation); video_stream_encoder_->OnBitrateUpdated( encoder_target_rate, encoder_stable_target_rate, link_allocation, rtc::dchecked_cast(update.packet_loss_ratio * 256), - update.round_trip_time.ms()); + update.round_trip_time.ms(), update.cwnd_reduce_ratio); stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_); return protection_bitrate_bps; } diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h index 4195efcf82..e79593ecbb 100644 --- a/video/video_send_stream_impl.h +++ b/video/video_send_stream_impl.h @@ -32,16 +32,18 @@ #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" #include "modules/include/module_common_types.h" +#ifndef DISABLE_RECORDER +#include "modules/recording/recorder.h" +#endif #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/critical_section.h" #include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" -#include "video/call_stats.h" #include "video/encoder_rtcp_feedback.h" #include "video/send_delay_stats.h" #include "video/send_statistics_proxy.h" @@ -68,14 +70,13 @@ struct PacingConfig { // An encoder may deliver frames through the EncodedImageCallback on an // arbitrary thread. class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, - public VideoStreamEncoderInterface::EncoderSink, - public VideoBitrateAllocationObserver { + public VideoStreamEncoderInterface::EncoderSink { public: VideoSendStreamImpl( Clock* clock, SendStatisticsProxy* stats_proxy, rtc::TaskQueue* worker_queue, - CallStats* call_stats, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, BitrateAllocatorInterface* bitrate_allocator, SendDelayStats* send_delay_stats, @@ -103,6 +104,10 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, void Start(); void Stop(); +#ifndef DISABLE_RECORDER + void InjectRecorder(Recorder* recorder); +#endif + // TODO(holmer): Move these to RtpTransportControllerSend. std::map GetRtpStates() const; @@ -114,26 +119,28 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, // Implements BitrateAllocatorObserver. uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override; + // Implements VideoStreamEncoderInterface::EncoderSink void OnEncoderConfigurationChanged( std::vector streams, + bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) override; + void OnBitrateAllocationUpdated( + const VideoBitrateAllocation& allocation) override; + void OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) override; + // Implements EncodedImageCallback. The implementation routes encoded frames // to the |payload_router_| and |config.pre_encode_callback| if set. // Called on an arbitrary encoder callback thread. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; + const CodecSpecificInfo* codec_specific_info) override; // Implements EncodedImageCallback. void OnDroppedFrame(EncodedImageCallback::DropReason reason) override; - // Implements VideoBitrateAllocationObserver. - void OnBitrateAllocationUpdated( - const VideoBitrateAllocation& allocation) override; - // Starts monitoring and sends a keyframe. void StartupVideoSendStream(); // Removes the bitrate observer, stops monitoring and notifies the video @@ -161,11 +168,10 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, std::atomic_bool activity_; bool timed_out_ RTC_GUARDED_BY(worker_queue_); - CallStats* const call_stats_; RtpTransportControllerSendInterface* const transport_; BitrateAllocatorInterface* const bitrate_allocator_; - rtc::CriticalSection ivf_writers_crit_; + Mutex ivf_writers_mutex_; bool disable_padding_; int max_padding_bitrate_; @@ -181,6 +187,11 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, RtcpBandwidthObserver* const bandwidth_observer_; RtpVideoSenderInterface* const rtp_video_sender_; +#ifndef DISABLE_RECORDER + mutable webrtc::Mutex recorder_mutex_; + Recorder* recorder_ RTC_GUARDED_BY(recorder_mutex_); +#endif + // |weak_ptr_| to our self. This is used since we can not call // |weak_ptr_factory_.GetWeakPtr| from multiple sequences but it is ok to copy // an existing WeakPtr. diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc index 1c44cc8dd4..ee303b4eac 100644 --- a/video/video_send_stream_impl_unittest.cc +++ b/video/video_send_stream_impl_unittest.cc @@ -10,6 +10,7 @@ #include "video/video_send_stream_impl.h" +#include #include #include @@ -28,6 +29,7 @@ #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_transport.h" +#include "video/call_stats.h" #include "video/test/mock_video_stream_encoder.h" namespace webrtc { @@ -42,6 +44,8 @@ bool operator==(const BitrateAllocationUpdate& a, namespace internal { namespace { using ::testing::_; +using ::testing::AllOf; +using ::testing::Field; using ::testing::Invoke; using ::testing::NiceMock; using ::testing::Return; @@ -57,38 +61,52 @@ std::string GetAlrProbingExperimentString() { } class MockRtpVideoSender : public RtpVideoSenderInterface { public: - MOCK_METHOD1(RegisterProcessThread, void(ProcessThread*)); - MOCK_METHOD0(DeRegisterProcessThread, void()); - MOCK_METHOD1(SetActive, void(bool)); - MOCK_METHOD1(SetActiveModules, void(const std::vector)); - MOCK_METHOD0(IsActive, bool()); - MOCK_METHOD1(OnNetworkAvailability, void(bool)); - MOCK_CONST_METHOD0(GetRtpStates, std::map()); - MOCK_CONST_METHOD0(GetRtpPayloadStates, - std::map()); - MOCK_METHOD2(DeliverRtcp, void(const uint8_t*, size_t)); - MOCK_METHOD1(OnBitrateAllocationUpdated, void(const VideoBitrateAllocation&)); - MOCK_METHOD3(OnEncodedImage, - EncodedImageCallback::Result(const EncodedImage&, - const CodecSpecificInfo*, - const RTPFragmentationHeader*)); - MOCK_METHOD1(OnTransportOverheadChanged, void(size_t)); - MOCK_METHOD1(OnOverheadChanged, void(size_t)); - MOCK_METHOD2(OnBitrateUpdated, void(BitrateAllocationUpdate, int)); - MOCK_CONST_METHOD0(GetPayloadBitrateBps, uint32_t()); - MOCK_CONST_METHOD0(GetProtectionBitrateBps, uint32_t()); - MOCK_METHOD3(SetEncodingData, void(size_t, size_t, size_t)); - MOCK_CONST_METHOD2(GetSentRtpPacketInfos, - std::vector( - uint32_t ssrc, - rtc::ArrayView sequence_numbers)); - - MOCK_METHOD1(SetFecAllowed, void(bool fec_allowed)); + MOCK_METHOD(void, RegisterProcessThread, (ProcessThread*), (override)); + MOCK_METHOD(void, DeRegisterProcessThread, (), (override)); + MOCK_METHOD(void, SetActive, (bool), (override)); + MOCK_METHOD(void, SetActiveModules, (const std::vector), (override)); + MOCK_METHOD(bool, IsActive, (), (override)); + MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); + MOCK_METHOD((std::map), + GetRtpStates, + (), + (const, override)); + MOCK_METHOD((std::map), + GetRtpPayloadStates, + (), + (const, override)); + MOCK_METHOD(void, DeliverRtcp, (const uint8_t*, size_t), (override)); + MOCK_METHOD(void, + OnBitrateAllocationUpdated, + (const VideoBitrateAllocation&), + (override)); + MOCK_METHOD(void, + OnVideoLayersAllocationUpdated, + (const VideoLayersAllocation&), + (override)); + MOCK_METHOD(EncodedImageCallback::Result, + OnEncodedImage, + (const EncodedImage&, const CodecSpecificInfo*), + (override)); + MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override)); + MOCK_METHOD(void, + OnBitrateUpdated, + (BitrateAllocationUpdate, int), + (override)); + MOCK_METHOD(uint32_t, GetPayloadBitrateBps, (), (const, override)); + MOCK_METHOD(uint32_t, GetProtectionBitrateBps, (), (const, override)); + MOCK_METHOD(void, SetEncodingData, (size_t, size_t, size_t), (override)); + MOCK_METHOD(std::vector, + GetSentRtpPacketInfos, + (uint32_t ssrc, rtc::ArrayView sequence_numbers), + (const, override)); + + MOCK_METHOD(void, SetFecAllowed, (bool fec_allowed), (override)); }; BitrateAllocationUpdate CreateAllocation(int bitrate_bps) { BitrateAllocationUpdate update; - update.target_bitrate = DataRate::bps(bitrate_bps); + update.target_bitrate = DataRate::BitsPerSec(bitrate_bps); update.packet_loss_ratio = 0; update.round_trip_time = TimeDelta::Zero(); return update; @@ -154,6 +172,7 @@ class VideoSendStreamImplTest : public ::testing::Test { SendDelayStats send_delay_stats_; TaskQueueForTest test_queue_; std::unique_ptr process_thread_; + // TODO(tommi): Use internal::CallStats CallStats call_stats_; SendStatisticsProxy stats_proxy_; PacketRouter packet_router_; @@ -241,7 +260,7 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { static_cast(vss_impl.get()) ->OnEncoderConfigurationChanged( - std::vector{qvga_stream, vga_stream}, + std::vector{qvga_stream, vga_stream}, false, VideoEncoderConfig::ContentType::kRealtimeVideo, min_transmit_bitrate_bps); vss_impl->Stop(); @@ -309,7 +328,7 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { static_cast(vss_impl.get()) ->OnEncoderConfigurationChanged( - std::vector{low_stream, high_stream}, + std::vector{low_stream, high_stream}, false, VideoEncoderConfig::ContentType::kScreen, min_transmit_bitrate_bps); vss_impl->Stop(); @@ -371,7 +390,7 @@ TEST_F(VideoSendStreamImplTest, static_cast(vss_impl.get()) ->OnEncoderConfigurationChanged( - std::vector{low_stream, high_stream}, + std::vector{low_stream, high_stream}, false, VideoEncoderConfig::ContentType::kRealtimeVideo, /*min_transmit_bitrate_bps=*/0); vss_impl->Stop(); @@ -420,9 +439,10 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { auto vss_impl = CreateVideoSendStreamImpl( kDefaultInitialBitrateBps, kDefaultBitratePriority, VideoEncoderConfig::ContentType::kScreen); + VideoStreamEncoderInterface::EncoderSink* const sink = + static_cast( + vss_impl.get()); vss_impl->Start(); - VideoBitrateAllocationObserver* const observer = - static_cast(vss_impl.get()); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; @@ -434,7 +454,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { // Encoder starts out paused, don't forward allocation. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(0); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); // Unpause encoder, allocation should be passed through. const uint32_t kBitrateBps = 100000; @@ -445,7 +465,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { ->OnBitrateUpdated(CreateAllocation(kBitrateBps)); EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); // Pause encoder again, and block allocations. EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -455,7 +475,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { ->OnBitrateUpdated(CreateAllocation(0)); EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(0); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); vss_impl->Stop(); }, @@ -476,8 +496,9 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { .WillOnce(Return(kBitrateBps)); static_cast(vss_impl.get()) ->OnBitrateUpdated(CreateAllocation(kBitrateBps)); - VideoBitrateAllocationObserver* const observer = - static_cast(vss_impl.get()); + VideoStreamEncoderInterface::EncoderSink* const sink = + static_cast( + vss_impl.get()); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; @@ -489,7 +510,7 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { // Initial value. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); VideoBitrateAllocation updated_alloc = alloc; // Needs 10% increase in bitrate to trigger immediate forward. @@ -499,14 +520,14 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { // Too small increase, don't forward. updated_alloc.SetBitrate(0, 0, base_layer_min_update_bitrate_bps - 1); EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(_)).Times(0); - observer->OnBitrateAllocationUpdated(updated_alloc); + sink->OnBitrateAllocationUpdated(updated_alloc); // Large enough increase, do forward. updated_alloc.SetBitrate(0, 0, base_layer_min_update_bitrate_bps); EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(updated_alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(updated_alloc); + sink->OnBitrateAllocationUpdated(updated_alloc); // This is now a decrease compared to last forward allocation, forward // immediately. @@ -514,7 +535,7 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(updated_alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(updated_alloc); + sink->OnBitrateAllocationUpdated(updated_alloc); vss_impl->Stop(); }, @@ -535,8 +556,9 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { .WillOnce(Return(kBitrateBps)); static_cast(vss_impl.get()) ->OnBitrateUpdated(CreateAllocation(kBitrateBps)); - VideoBitrateAllocationObserver* const observer = - static_cast(vss_impl.get()); + VideoStreamEncoderInterface::EncoderSink* const sink = + static_cast( + vss_impl.get()); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; @@ -548,7 +570,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { // Initial value. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); // Move some bitrate from one layer to a new one, but keep sum the same. // Since layout has changed, immediately trigger forward. @@ -559,7 +581,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(updated_alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(updated_alloc); + sink->OnBitrateAllocationUpdated(updated_alloc); vss_impl->Stop(); }, @@ -580,8 +602,9 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { .WillRepeatedly(Return(kBitrateBps)); static_cast(vss_impl.get()) ->OnBitrateUpdated(CreateAllocation(kBitrateBps)); - VideoBitrateAllocationObserver* const observer = - static_cast(vss_impl.get()); + VideoStreamEncoderInterface::EncoderSink* const sink = + static_cast( + vss_impl.get()); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; @@ -592,7 +615,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { EncodedImage encoded_image; CodecSpecificInfo codec_specific; - EXPECT_CALL(rtp_video_sender_, OnEncodedImage(_, _, _)) + EXPECT_CALL(rtp_video_sender_, OnEncodedImage) .WillRepeatedly(Return(EncodedImageCallback::Result( EncodedImageCallback::Result::OK))); @@ -603,14 +626,14 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { // Initial value. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); } { // Sending same allocation again, this one should be throttled. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(0); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); } clock_.AdvanceTimeMicroseconds(kMaxVbaThrottleTimeMs * 1000); @@ -619,14 +642,14 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { // Sending similar allocation again after timeout, should forward. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(1); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); } { // Sending similar allocation again without timeout, throttle. EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(0); - observer->OnBitrateAllocationUpdated(alloc); + sink->OnBitrateAllocationUpdated(alloc); } { @@ -634,7 +657,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(0); static_cast(vss_impl.get()) - ->OnEncodedImage(encoded_image, &codec_specific, nullptr); + ->OnEncodedImage(encoded_image, &codec_specific); } { @@ -644,7 +667,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(1); static_cast(vss_impl.get()) - ->OnEncodedImage(encoded_image, &codec_specific, nullptr); + ->OnEncodedImage(encoded_image, &codec_specific); } { @@ -654,7 +677,7 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { EXPECT_CALL(rtp_video_sender_, OnBitrateAllocationUpdated(alloc)) .Times(0); static_cast(vss_impl.get()) - ->OnEncodedImage(encoded_image, &codec_specific, nullptr); + ->OnEncodedImage(encoded_image, &codec_specific); } vss_impl->Stop(); @@ -690,31 +713,31 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { static_cast(vss_impl.get()) ->OnEncoderConfigurationChanged( - std::vector{qvga_stream}, + std::vector{qvga_stream}, false, VideoEncoderConfig::ContentType::kRealtimeVideo, min_transmit_bitrate_bps); const DataRate network_constrained_rate = - DataRate::bps(qvga_stream.target_bitrate_bps); + DataRate::BitsPerSec(qvga_stream.target_bitrate_bps); BitrateAllocationUpdate update; update.target_bitrate = network_constrained_rate; update.stable_target_bitrate = network_constrained_rate; - update.round_trip_time = TimeDelta::ms(1); + update.round_trip_time = TimeDelta::Millis(1); EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _)); EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(network_constrained_rate.bps())); EXPECT_CALL( video_stream_encoder_, OnBitrateUpdated(network_constrained_rate, network_constrained_rate, - network_constrained_rate, 0, _)); + network_constrained_rate, 0, _, 0)); static_cast(vss_impl.get()) ->OnBitrateUpdated(update); // Test allocation where the link allocation is larger than the target, // meaning we have some headroom on the link. const DataRate qvga_max_bitrate = - DataRate::bps(qvga_stream.max_bitrate_bps); - const DataRate headroom = DataRate::bps(50000); + DataRate::BitsPerSec(qvga_stream.max_bitrate_bps); + const DataRate headroom = DataRate::BitsPerSec(50000); const DataRate rate_with_headroom = qvga_max_bitrate + headroom; update.target_bitrate = rate_with_headroom; update.stable_target_bitrate = rate_with_headroom; @@ -723,7 +746,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { .WillOnce(Return(rate_with_headroom.bps())); EXPECT_CALL(video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, - rate_with_headroom, 0, _)); + rate_with_headroom, 0, _, 0)); static_cast(vss_impl.get()) ->OnBitrateUpdated(update); @@ -737,10 +760,10 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(rate_with_headroom.bps())); const DataRate headroom_minus_protection = - rate_with_headroom - DataRate::bps(protection_bitrate_bps); + rate_with_headroom - DataRate::BitsPerSec(protection_bitrate_bps); EXPECT_CALL(video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, - headroom_minus_protection, 0, _)); + headroom_minus_protection, 0, _, 0)); static_cast(vss_impl.get()) ->OnBitrateUpdated(update); @@ -753,14 +776,14 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { .WillOnce(Return(rate_with_headroom.bps())); EXPECT_CALL(video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, - qvga_max_bitrate, 0, _)); + qvga_max_bitrate, 0, _, 0)); static_cast(vss_impl.get()) ->OnBitrateUpdated(update); // Set rates to zero on stop. EXPECT_CALL(video_stream_encoder_, OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), - DataRate::Zero(), 0, 0)); + DataRate::Zero(), 0, 0, 0)); vss_impl->Stop(); }, RTC_FROM_HERE); @@ -787,7 +810,7 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { .WillRepeatedly(Invoke( [&](BitrateAllocatorObserver*) { padding_bitrate = 0; })); - EXPECT_CALL(rtp_video_sender_, OnEncodedImage(_, _, _)) + EXPECT_CALL(rtp_video_sender_, OnEncodedImage) .WillRepeatedly(Return(EncodedImageCallback::Result( EncodedImageCallback::Result::OK))); const bool kSuspend = false; @@ -816,7 +839,7 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { // Reconfigure e.g. due to a fake frame. static_cast(vss_impl.get()) ->OnEncoderConfigurationChanged( - std::vector{qvga_stream}, + std::vector{qvga_stream}, false, VideoEncoderConfig::ContentType::kRealtimeVideo, min_transmit_bitrate_bps); // Still no padding because no actual frames were passed, only @@ -835,7 +858,7 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { EncodedImage encoded_image; CodecSpecificInfo codec_specific; static_cast(vss_impl.get()) - ->OnEncodedImage(encoded_image, &codec_specific, nullptr); + ->OnEncodedImage(encoded_image, &codec_specific); // Only after actual frame is encoded are we enabling the padding. EXPECT_GT(padding_bitrate, 0); }, @@ -893,5 +916,122 @@ TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) { ASSERT_TRUE(done.Wait(5000)); } +TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) { + struct TestConfig { + bool screenshare = false; + bool alr = false; + int min_padding_bitrate_bps = 0; + }; + + std::vector test_variants; + for (bool screenshare : {false, true}) { + for (bool alr : {false, true}) { + for (int min_padding : {0, 400000}) { + test_variants.push_back({screenshare, alr, min_padding}); + } + } + } + + for (const TestConfig& test_config : test_variants) { + test_queue_.SendTask( + [this, test_config] { + const bool kSuspend = false; + config_.suspend_below_min_bitrate = kSuspend; + config_.rtp.extensions.emplace_back( + RtpExtension::kTransportSequenceNumberUri, 1); + config_.periodic_alr_bandwidth_probing = test_config.alr; + auto vss_impl = CreateVideoSendStreamImpl( + kDefaultInitialBitrateBps, kDefaultBitratePriority, + test_config.screenshare + ? VideoEncoderConfig::ContentType::kScreen + : VideoEncoderConfig::ContentType::kRealtimeVideo); + vss_impl->Start(); + + // Svc + VideoStream stream; + stream.width = 1920; + stream.height = 1080; + stream.max_framerate = 30; + stream.min_bitrate_bps = 60000; + stream.target_bitrate_bps = 6000000; + stream.max_bitrate_bps = 1250000; + stream.num_temporal_layers = 2; + stream.max_qp = 56; + stream.bitrate_priority = 1; + + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); + + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + AllOf(Field(&MediaStreamAllocationConfig::min_bitrate_bps, + static_cast(stream.min_bitrate_bps)), + Field(&MediaStreamAllocationConfig::max_bitrate_bps, + static_cast(stream.max_bitrate_bps)), + // Stream not yet active - no padding. + Field(&MediaStreamAllocationConfig::pad_up_bitrate_bps, + 0u), + Field(&MediaStreamAllocationConfig::enforce_min_bitrate, + !kSuspend)))); + + static_cast(vss_impl.get()) + ->OnEncoderConfigurationChanged( + std::vector{stream}, true, + test_config.screenshare + ? VideoEncoderConfig::ContentType::kScreen + : VideoEncoderConfig::ContentType::kRealtimeVideo, + test_config.min_padding_bitrate_bps); + ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_); + + // Simulate an encoded image, this will turn the stream active and + // enable padding. + EncodedImage encoded_image; + CodecSpecificInfo codec_specific; + EXPECT_CALL(rtp_video_sender_, OnEncodedImage) + .WillRepeatedly(Return(EncodedImageCallback::Result( + EncodedImageCallback::Result::OK))); + + // Screensharing implicitly forces ALR. + const bool using_alr = test_config.alr || test_config.screenshare; + // If ALR is used, pads only to min bitrate as rampup is handled by + // probing. Otherwise target_bitrate contains the padding target. + const RateControlSettings trials = + RateControlSettings::ParseFromFieldTrials(); + int expected_padding = + using_alr + ? stream.min_bitrate_bps + : static_cast(stream.target_bitrate_bps * + trials.GetSimulcastHysteresisFactor( + test_config.screenshare + ? VideoCodecMode::kScreensharing + : VideoCodecMode::kRealtimeVideo)); + // Min padding bitrate may override padding target. + expected_padding = + std::max(expected_padding, test_config.min_padding_bitrate_bps); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + AllOf(Field(&MediaStreamAllocationConfig::min_bitrate_bps, + static_cast(stream.min_bitrate_bps)), + Field(&MediaStreamAllocationConfig::max_bitrate_bps, + static_cast(stream.max_bitrate_bps)), + // Stream now active - min bitrate use as padding target + // when ALR is active. + Field(&MediaStreamAllocationConfig::pad_up_bitrate_bps, + expected_padding), + Field(&MediaStreamAllocationConfig::enforce_min_bitrate, + !kSuspend)))); + static_cast(vss_impl.get()) + ->OnEncodedImage(encoded_image, &codec_specific); + ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_); + + vss_impl->Stop(); + }, + RTC_FROM_HERE); + } +} } // namespace internal } // namespace webrtc diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc index 66d31465bd..52e4ddbc42 100644 --- a/video/video_send_stream_tests.cc +++ b/video/video_send_stream_tests.cc @@ -25,20 +25,20 @@ #include "call/simulated_network.h" #include "call/video_send_stream.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/to_queued_task.h" @@ -96,9 +96,15 @@ enum VideoFormat { kGeneric, kVP8, }; -} // namespace -VideoFrame CreateVideoFrame(int width, int height, uint8_t data); +VideoFrame CreateVideoFrame(int width, int height, int64_t timestamp_ms) { + return webrtc::VideoFrame::Builder() + .set_video_frame_buffer(I420Buffer::Create(width, height)) + .set_rotation(webrtc::kVideoRotation_0) + .set_timestamp_ms(timestamp_ms) + .build(); +} +} // namespace class VideoSendStreamTest : public test::CallTest { public: @@ -619,7 +625,7 @@ class UlpfecObserver : public test::EndToEndTest { VideoEncoderFactory* encoder_factory_; RtpHeaderExtensionMap extensions_; - std::string payload_name_; + const std::string payload_name_; const bool use_nack_; const bool expect_red_; const bool expect_ulpfec_; @@ -826,7 +832,7 @@ class FlexfecObserver : public test::EndToEndTest { VideoEncoderFactory* encoder_factory_; RtpHeaderExtensionMap extensions_; - std::string payload_name_; + const std::string payload_name_; const bool use_nack_; bool sent_media_; bool sent_flexfec_; @@ -942,7 +948,7 @@ void VideoSendStreamTest::TestNackRetransmission( non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount, non_padding_sequence_numbers_.end()); - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = Clock::GetRealTimeClock(); config.outgoing_transport = transport_adapter_.get(); config.rtcp_report_interval_ms = kRtcpIntervalMs; @@ -1005,8 +1011,8 @@ void VideoSendStreamTest::TestNackRetransmission( std::unique_ptr transport_adapter_; int send_count_; int retransmit_count_; - uint32_t retransmit_ssrc_; - uint8_t retransmit_payload_type_; + const uint32_t retransmit_ssrc_; + const uint8_t retransmit_payload_type_; std::vector nacked_sequence_numbers_; std::vector non_padding_sequence_numbers_; } test(retransmit_ssrc, retransmit_payload_type); @@ -1134,7 +1140,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, fec_packet_received_ = false; ++current_size_rtp_; - rtc::CritScope lock(&mutex_); + MutexLock lock(&mutex_); ++current_size_frame_; } } @@ -1158,7 +1164,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, kVideoSendSsrcs[0], rtp_packet.SequenceNumber(), packets_lost_, // Cumulative lost. loss_ratio); // Loss percent. - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = Clock::GetRealTimeClock(); config.receive_statistics = &lossy_receive_stats; config.outgoing_transport = transport_adapter_.get(); @@ -1176,7 +1182,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, } void UpdateConfiguration() { - rtc::CritScope lock(&mutex_); + MutexLock lock(&mutex_); // Increase frame size for next encoded frame, in the context of the // encoder thread. if (!use_fec_ && current_size_frame_ < static_cast(stop_size_)) { @@ -1241,7 +1247,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, bool fec_packet_received_; size_t current_size_rtp_; - rtc::CriticalSection mutex_; + Mutex mutex_; int current_size_frame_ RTC_GUARDED_BY(mutex_); }; @@ -1270,180 +1276,6 @@ TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSizeWithFec) { TestPacketFragmentationSize(kVP8, true); } -// The test will go through a number of phases. -// 1. Start sending packets. -// 2. As soon as the RTP stream has been detected, signal a low REMB value to -// suspend the stream. -// 3. Wait until |kSuspendTimeFrames| have been captured without seeing any RTP -// packets. -// 4. Signal a high REMB and then wait for the RTP stream to start again. -// When the stream is detected again, and the stats show that the stream -// is no longer suspended, the test ends. -TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) { - static const int kSuspendTimeFrames = 60; // Suspend for 2 seconds @ 30 fps. - - class RembObserver : public test::SendTest { - public: - class CaptureObserver : public rtc::VideoSinkInterface { - public: - explicit CaptureObserver(RembObserver* remb_observer) - : remb_observer_(remb_observer) {} - - void OnFrame(const VideoFrame&) { - rtc::CritScope lock(&remb_observer_->crit_); - if (remb_observer_->test_state_ == kDuringSuspend && - ++remb_observer_->suspended_frame_count_ > kSuspendTimeFrames) { - VideoSendStream::Stats stats = remb_observer_->stream_->GetStats(); - EXPECT_TRUE(stats.suspended); - remb_observer_->SendRtcpFeedback(remb_observer_->high_remb_bps_); - remb_observer_->test_state_ = kWaitingForPacket; - } - } - - private: - RembObserver* const remb_observer_; - }; - - RembObserver() - : SendTest(kDefaultTimeoutMs), - clock_(Clock::GetRealTimeClock()), - capture_observer_(this), - stream_(nullptr), - test_state_(kBeforeSuspend), - rtp_count_(0), - last_sequence_number_(0), - suspended_frame_count_(0), - low_remb_bps_(0), - high_remb_bps_(0) {} - - private: - Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); - ++rtp_count_; - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet, length)); - last_sequence_number_ = rtp_packet.SequenceNumber(); - - if (test_state_ == kBeforeSuspend) { - // The stream has started. Try to suspend it. - SendRtcpFeedback(low_remb_bps_); - test_state_ = kDuringSuspend; - } else if (test_state_ == kDuringSuspend) { - if (rtp_packet.padding_size() == 0) { - // Received non-padding packet during suspension period. Reset the - // counter. - suspended_frame_count_ = 0; - } - SendRtcpFeedback(0); // REMB is only sent if value is > 0. - } else if (test_state_ == kWaitingForPacket) { - if (rtp_packet.padding_size() == 0) { - // Non-padding packet observed. Test is almost complete. Will just - // have to wait for the stats to change. - test_state_ = kWaitingForStats; - } - SendRtcpFeedback(0); // REMB is only sent if value is > 0. - } else if (test_state_ == kWaitingForStats) { - VideoSendStream::Stats stats = stream_->GetStats(); - if (stats.suspended == false) { - // Stats flipped to false. Test is complete. - observation_complete_.Set(); - } - SendRtcpFeedback(0); // REMB is only sent if value is > 0. - } - - return SEND_PACKET; - } - - void set_low_remb_bps(int value) { - rtc::CritScope lock(&crit_); - low_remb_bps_ = value; - } - - void set_high_remb_bps(int value) { - rtc::CritScope lock(&crit_); - high_remb_bps_ = value; - } - - void OnVideoStreamsCreated( - VideoSendStream* send_stream, - const std::vector& receive_streams) override { - stream_ = send_stream; - } - - void OnFrameGeneratorCapturerCreated( - test::FrameGeneratorCapturer* frame_generator_capturer) override { - frame_generator_capturer->AddOrUpdateSink(&capture_observer_, - rtc::VideoSinkWants()); - } - - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - RTC_DCHECK_EQ(1, encoder_config->number_of_streams); - transport_adapter_.reset( - new internal::TransportAdapter(send_config->send_transport)); - transport_adapter_->Enable(); - send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs; - send_config->suspend_below_min_bitrate = true; - int min_bitrate_bps = - test::DefaultVideoStreamFactory::kDefaultMinBitratePerStream[0]; - set_low_remb_bps(min_bitrate_bps - 10000); - int threshold_window = std::max(min_bitrate_bps / 10, 20000); - ASSERT_GT(encoder_config->max_bitrate_bps, - min_bitrate_bps + threshold_window + 5000); - set_high_remb_bps(min_bitrate_bps + threshold_window + 5000); - } - - void PerformTest() override { - EXPECT_TRUE(Wait()) << "Timed out during suspend-below-min-bitrate test."; - } - - enum TestState { - kBeforeSuspend, - kDuringSuspend, - kWaitingForPacket, - kWaitingForStats - }; - - virtual void SendRtcpFeedback(int remb_value) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) { - FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0], - last_sequence_number_, rtp_count_, 0); - RtpRtcp::Configuration config; - config.clock = clock_; - config.receive_statistics = &receive_stats; - config.outgoing_transport = transport_adapter_.get(); - config.rtcp_report_interval_ms = kRtcpIntervalMs; - config.local_media_ssrc = kVideoSendSsrcs[0]; - RTCPSender rtcp_sender(config); - - rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize); - rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]); - if (remb_value > 0) { - rtcp_sender.SetRemb(remb_value, std::vector()); - } - RTCPSender::FeedbackState feedback_state; - EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr)); - } - - std::unique_ptr transport_adapter_; - Clock* const clock_; - CaptureObserver capture_observer_; - VideoSendStream* stream_; - - rtc::CriticalSection crit_; - TestState test_state_ RTC_GUARDED_BY(crit_); - int rtp_count_ RTC_GUARDED_BY(crit_); - int last_sequence_number_ RTC_GUARDED_BY(crit_); - int suspended_frame_count_ RTC_GUARDED_BY(crit_); - int low_remb_bps_ RTC_GUARDED_BY(crit_); - int high_remb_bps_ RTC_GUARDED_BY(crit_); - } test; - - RunBaseTest(&test); -} - // This test that padding stops being send after a while if the Camera stops // producing video frames and that padding resumes if the camera restarts. TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { @@ -1452,12 +1284,11 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { NoPaddingWhenVideoIsMuted() : SendTest(kDefaultTimeoutMs), clock_(Clock::GetRealTimeClock()), - last_packet_time_ms_(-1), capturer_(nullptr) {} private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); last_packet_time_ms_ = clock_->TimeInMilliseconds(); RtpPacket rtp_packet; @@ -1485,11 +1316,11 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { } Action OnSendRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); const int kNoPacketsThresholdMs = 2000; if (test_state_ == kWaitingForNoPackets && - (last_packet_time_ms_ > 0 && - clock_->TimeInMilliseconds() - last_packet_time_ms_ > + (last_packet_time_ms_ && + clock_->TimeInMilliseconds() - last_packet_time_ms_.value() > kNoPacketsThresholdMs)) { // No packets seen for |kNoPacketsThresholdMs|, restart camera. capturer_->Start(); @@ -1508,7 +1339,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { void OnFrameGeneratorCapturerCreated( test::FrameGeneratorCapturer* frame_generator_capturer) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); capturer_ = frame_generator_capturer; } @@ -1527,10 +1358,9 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { TestState test_state_ = kBeforeStopCapture; Clock* const clock_; - std::unique_ptr transport_adapter_; - rtc::CriticalSection crit_; - int64_t last_packet_time_ms_ RTC_GUARDED_BY(crit_); - test::FrameGeneratorCapturer* capturer_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + absl::optional last_packet_time_ms_ RTC_GUARDED_BY(mutex_); + test::FrameGeneratorCapturer* capturer_ RTC_GUARDED_BY(mutex_); } test; RunBaseTest(&test); @@ -1553,7 +1383,7 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) { } Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; rtp_packet.Parse(packet, length); @@ -1593,16 +1423,16 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) { // rid of this. SleepMs(5000); { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Expect padding to be a small percentage of total bytes sent. EXPECT_LT(padding_length_, .1 * total_length_); } } - rtc::CriticalSection crit_; + Mutex mutex_; Clock* const clock_; - size_t padding_length_ RTC_GUARDED_BY(crit_); - size_t total_length_ RTC_GUARDED_BY(crit_); + size_t padding_length_ RTC_GUARDED_BY(mutex_); + size_t total_length_ RTC_GUARDED_BY(mutex_); Call* call_; } test; @@ -1623,12 +1453,18 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { static const int kRembRespectedBitrateBps = 100000; class BitrateObserver : public test::SendTest { public: - BitrateObserver() + explicit BitrateObserver(TaskQueueBase* task_queue) : SendTest(kDefaultTimeoutMs), + task_queue_(task_queue), retranmission_rate_limiter_(Clock::GetRealTimeClock(), 1000), stream_(nullptr), bitrate_capped_(false) {} + ~BitrateObserver() override { + // Make sure we free |rtp_rtcp_| in the same context as we constructed it. + SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; }); + } + private: Action OnSendRtp(const uint8_t* packet, size_t length) override { if (RtpHeaderParser::IsRtcp(packet, length)) @@ -1663,11 +1499,11 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { VideoSendStream* send_stream, const std::vector& receive_streams) override { stream_ = send_stream; - RtpRtcp::Configuration config; + RtpRtcpInterface::Configuration config; config.clock = Clock::GetRealTimeClock(); config.outgoing_transport = feedback_transport_.get(); config.retransmission_rate_limiter = &retranmission_rate_limiter_; - rtp_rtcp_ = RtpRtcp::Create(config); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config); rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize); } @@ -1686,12 +1522,13 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { << "Timeout while waiting for low bitrate stats after REMB."; } - std::unique_ptr rtp_rtcp_; + TaskQueueBase* const task_queue_; + std::unique_ptr rtp_rtcp_; std::unique_ptr feedback_transport_; RateLimiter retranmission_rate_limiter_; VideoSendStream* stream_; bool bitrate_capped_; - } test; + } test(task_queue()); RunBaseTest(&test); } @@ -1768,8 +1605,8 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { void PerformTest() override { rtc::NetworkRoute new_route; new_route.connected = true; - new_route.local_network_id = 10; - new_route.remote_network_id = 20; + new_route.local = rtc::RouteEndpoint::CreateWithNetworkId(10); + new_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20); BitrateConstraints bitrate_config; SendTask(RTC_FROM_HERE, task_queue_, @@ -1795,7 +1632,8 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { // TODO(holmer): We should set the last sent packet id here and // verify that we correctly ignore any packet loss reported prior to // that id. - ++new_route.local_network_id; + new_route.local = rtc::RouteEndpoint::CreateWithNetworkId( + new_route.local.network_id() + 1); call_->GetTransportControllerSend()->OnNetworkRouteChanged( "transport", new_route); EXPECT_GE(call_->GetStats().send_bandwidth_bps, kStartBitrateBps); @@ -1813,6 +1651,111 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { RunBaseTest(&test); } +// Test that if specified, relay cap is lifted on transition to direct +// connection. +TEST_F(VideoSendStreamTest, RelayToDirectRoute) { + static const int kStartBitrateBps = 300000; + static const int kRelayBandwidthCapBps = 800000; + static const int kMinPacketsToSend = 100; + webrtc::test::ScopedFieldTrials field_trials( + std::string(field_trial::GetFieldTrialString()) + + "WebRTC-Bwe-NetworkRouteConstraints/relay_cap:" + + std::to_string(kRelayBandwidthCapBps) + "bps/"); + + class RelayToDirectRouteTest : public test::EndToEndTest { + public: + explicit RelayToDirectRouteTest(TaskQueueBase* task_queue) + : EndToEndTest(test::CallTest::kDefaultTimeoutMs), + task_queue_(task_queue), + call_(nullptr), + packets_sent_(0), + relayed_phase_(true) { + module_process_thread_.Detach(); + task_queue_thread_.Detach(); + } + + ~RelayToDirectRouteTest() { + // Block until all already posted tasks run to avoid 'use after free' + // when such task accesses |this|. + SendTask(RTC_FROM_HERE, task_queue_, [] {}); + } + + void OnCallsCreated(Call* sender_call, Call* receiver_call) override { + RTC_DCHECK_RUN_ON(&task_queue_thread_); + RTC_DCHECK(!call_); + call_ = sender_call; + } + + Action OnSendRtp(const uint8_t* packet, size_t length) override { + RTC_DCHECK_RUN_ON(&module_process_thread_); + task_queue_->PostTask(ToQueuedTask([this]() { + RTC_DCHECK_RUN_ON(&task_queue_thread_); + if (!call_) + return; + bool had_time_to_exceed_cap_in_relayed_phase = + relayed_phase_ && ++packets_sent_ > kMinPacketsToSend; + bool did_exceed_cap = + call_->GetStats().send_bandwidth_bps > kRelayBandwidthCapBps; + if (did_exceed_cap || had_time_to_exceed_cap_in_relayed_phase) + observation_complete_.Set(); + })); + return SEND_PACKET; + } + + void OnStreamsStopped() override { + RTC_DCHECK_RUN_ON(&task_queue_thread_); + call_ = nullptr; + } + + void PerformTest() override { + rtc::NetworkRoute route; + route.connected = true; + route.local = rtc::RouteEndpoint::CreateWithNetworkId(10); + route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20); + + SendTask(RTC_FROM_HERE, task_queue_, [this, &route]() { + RTC_DCHECK_RUN_ON(&task_queue_thread_); + relayed_phase_ = true; + route.remote = route.remote.CreateWithTurn(true); + call_->GetTransportControllerSend()->OnNetworkRouteChanged("transport", + route); + BitrateConstraints bitrate_config; + bitrate_config.start_bitrate_bps = kStartBitrateBps; + + call_->GetTransportControllerSend()->SetSdpBitrateParameters( + bitrate_config); + }); + + EXPECT_TRUE(Wait()) + << "Timeout waiting for sufficient packets sent count."; + + SendTask(RTC_FROM_HERE, task_queue_, [this, &route]() { + RTC_DCHECK_RUN_ON(&task_queue_thread_); + EXPECT_LE(call_->GetStats().send_bandwidth_bps, kRelayBandwidthCapBps); + + route.remote = route.remote.CreateWithTurn(false); + call_->GetTransportControllerSend()->OnNetworkRouteChanged("transport", + route); + relayed_phase_ = false; + observation_complete_.Reset(); + }); + + EXPECT_TRUE(Wait()) + << "Timeout while waiting for bandwidth to outgrow relay cap."; + } + + private: + webrtc::SequenceChecker module_process_thread_; + webrtc::SequenceChecker task_queue_thread_; + TaskQueueBase* const task_queue_; + Call* call_ RTC_GUARDED_BY(task_queue_thread_); + int packets_sent_ RTC_GUARDED_BY(task_queue_thread_); + bool relayed_phase_ RTC_GUARDED_BY(task_queue_thread_); + } test(task_queue()); + + RunBaseTest(&test); +} + TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { class ChangingTransportOverheadTest : public test::EndToEndTest { public: @@ -1829,7 +1772,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { Action OnSendRtp(const uint8_t* packet, size_t length) override { EXPECT_LE(length, kMaxRtpPacketSize); - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (++packets_sent_ < 100) return SEND_PACKET; observation_complete_.Set(); @@ -1853,7 +1796,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { EXPECT_TRUE(Wait()); { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); packets_sent_ = 0; } @@ -1869,7 +1812,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { private: TaskQueueBase* const task_queue_; Call* call_; - rtc::CriticalSection lock_; + Mutex lock_; int packets_sent_ RTC_GUARDED_BY(lock_); int transport_overhead_; const size_t kMaxRtpPacketSize = 1000; @@ -2045,7 +1988,7 @@ TEST_F(VideoSendStreamTest, void WaitForResolution(int width, int height) { { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (last_initialized_frame_width_ == width && last_initialized_frame_height_ == height) { return; @@ -2054,7 +1997,7 @@ TEST_F(VideoSendStreamTest, EXPECT_TRUE( init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); EXPECT_EQ(width, last_initialized_frame_width_); EXPECT_EQ(height, last_initialized_frame_height_); } @@ -2063,7 +2006,7 @@ TEST_F(VideoSendStreamTest, private: int32_t InitEncode(const VideoCodec* config, const Settings& settings) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); last_initialized_frame_width_ = config->width; last_initialized_frame_height_ = config->height; ++number_of_initializations_; @@ -2078,11 +2021,11 @@ TEST_F(VideoSendStreamTest, return 0; } - rtc::CriticalSection crit_; + Mutex mutex_; rtc::Event init_encode_called_; - size_t number_of_initializations_ RTC_GUARDED_BY(&crit_); - int last_initialized_frame_width_ RTC_GUARDED_BY(&crit_); - int last_initialized_frame_height_ RTC_GUARDED_BY(&crit_); + size_t number_of_initializations_ RTC_GUARDED_BY(&mutex_); + int last_initialized_frame_width_ RTC_GUARDED_BY(&mutex_); + int last_initialized_frame_height_ RTC_GUARDED_BY(&mutex_); }; test::NullTransport transport; @@ -2121,21 +2064,21 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { : FakeEncoder(Clock::GetRealTimeClock()), start_bitrate_kbps_(0) {} int32_t InitEncode(const VideoCodec* config, const Settings& settings) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); start_bitrate_kbps_ = config->startBitrate; start_bitrate_changed_.Set(); return FakeEncoder::InitEncode(config, settings); } void SetRates(const RateControlParameters& parameters) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); start_bitrate_kbps_ = parameters.bitrate.get_sum_kbps(); start_bitrate_changed_.Set(); FakeEncoder::SetRates(parameters); } int GetStartBitrateKbps() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return start_bitrate_kbps_; } @@ -2145,9 +2088,9 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { } private: - rtc::CriticalSection crit_; + mutable Mutex mutex_; rtc::Event start_bitrate_changed_; - int start_bitrate_kbps_ RTC_GUARDED_BY(crit_); + int start_bitrate_kbps_ RTC_GUARDED_BY(mutex_); }; CreateSenderCall(); @@ -2163,13 +2106,15 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { StartBitrateObserver encoder; test::VideoEncoderProxyFactory encoder_factory(&encoder); - // Since this test does not use a capturer, set |internal_source| = true. - // Encoder configuration is otherwise updated on the next video frame. - encoder_factory.SetHasInternalSource(true); GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory; CreateVideoStreams(); + // Start capturing and encoding frames to force encoder reconfiguration. + CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth, + kDefaultHeight); + frame_generator_capturer_->Start(); + EXPECT_TRUE(encoder.WaitForStartBitrate()); EXPECT_EQ(GetVideoEncoderConfig()->max_bitrate_bps / 1000, encoder.GetStartBitrateKbps()); @@ -2194,13 +2139,13 @@ class StartStopBitrateObserver : public test::FakeEncoder { StartStopBitrateObserver() : FakeEncoder(Clock::GetRealTimeClock()) {} int32_t InitEncode(const VideoCodec* config, const Settings& settings) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); encoder_init_.Set(); return FakeEncoder::InitEncode(config, settings); } void SetRates(const RateControlParameters& parameters) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); bitrate_kbps_ = parameters.bitrate.get_sum_kbps(); bitrate_changed_.Set(); FakeEncoder::SetRates(parameters); @@ -2214,7 +2159,7 @@ class StartStopBitrateObserver : public test::FakeEncoder { do { absl::optional bitrate_kbps; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); bitrate_kbps = bitrate_kbps_; } if (!bitrate_kbps) @@ -2229,10 +2174,10 @@ class StartStopBitrateObserver : public test::FakeEncoder { } private: - rtc::CriticalSection crit_; + Mutex mutex_; rtc::Event encoder_init_; rtc::Event bitrate_changed_; - absl::optional bitrate_kbps_ RTC_GUARDED_BY(crit_); + absl::optional bitrate_kbps_ RTC_GUARDED_BY(mutex_); }; // This test that if the encoder use an internal source, VideoEncoder::SetRates @@ -2353,22 +2298,6 @@ TEST_F(VideoSendStreamTest, VideoSendStreamUpdateActiveSimulcastLayers) { }); } -VideoFrame CreateVideoFrame(int width, int height, uint8_t data) { - const int kSizeY = width * height * 2; - std::unique_ptr buffer(new uint8_t[kSizeY]); - memset(buffer.get(), data, kSizeY); - VideoFrame frame = - webrtc::VideoFrame::Builder() - .set_video_frame_buffer(I420Buffer::Create(width, height)) - .set_rotation(webrtc::kVideoRotation_0) - .set_timestamp_us(data) - .build(); - frame.set_timestamp(data); - // Use data as a ms timestamp. - frame.set_timestamp_us(data * rtc::kNumMicrosecsPerMillisec); - return frame; -} - TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { class EncoderStateObserver : public test::SendTest, public VideoEncoder { public: @@ -2382,30 +2311,35 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { released_(false), encoder_factory_(this) {} - bool IsReleased() { - rtc::CritScope lock(&crit_); + bool IsReleased() RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); return released_; } - bool IsReadyForEncode() { - rtc::CritScope lock(&crit_); - return initialized_ && callback_registered_; + bool IsReadyForEncode() RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); + return IsReadyForEncodeLocked(); } - size_t num_releases() { - rtc::CritScope lock(&crit_); + size_t num_releases() RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); return num_releases_; } private: + bool IsReadyForEncodeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) { + return initialized_ && callback_registered_; + } + void SetFecControllerOverride( FecControllerOverride* fec_controller_override) override { // Ignored. } int32_t InitEncode(const VideoCodec* codecSettings, - const Settings& settings) override { - rtc::CritScope lock(&crit_); + const Settings& settings) override + RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); EXPECT_FALSE(initialized_); initialized_ = true; released_ = false; @@ -2421,16 +2355,16 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { } int32_t RegisterEncodeCompleteCallback( - EncodedImageCallback* callback) override { - rtc::CritScope lock(&crit_); + EncodedImageCallback* callback) override RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); EXPECT_TRUE(initialized_); callback_registered_ = true; return 0; } - int32_t Release() override { - rtc::CritScope lock(&crit_); - EXPECT_TRUE(IsReadyForEncode()); + int32_t Release() override RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); + EXPECT_TRUE(IsReadyForEncodeLocked()); EXPECT_FALSE(released_); initialized_ = false; callback_registered_ = false; @@ -2465,8 +2399,7 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); EXPECT_EQ(0u, num_releases()); stream_->Stop(); - // Encoder should not be released before destroying the - // VideoSendStream. + // Encoder should not be released before destroying the VideoSendStream. EXPECT_FALSE(IsReleased()); EXPECT_TRUE(IsReadyForEncode()); stream_->Start(); @@ -2477,12 +2410,12 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { } TaskQueueBase* const task_queue_; - rtc::CriticalSection crit_; + Mutex mutex_; VideoSendStream* stream_; - bool initialized_ RTC_GUARDED_BY(crit_); - bool callback_registered_ RTC_GUARDED_BY(crit_); - size_t num_releases_ RTC_GUARDED_BY(crit_); - bool released_ RTC_GUARDED_BY(crit_); + bool initialized_ RTC_GUARDED_BY(mutex_); + bool callback_registered_ RTC_GUARDED_BY(mutex_); + size_t num_releases_ RTC_GUARDED_BY(mutex_); + bool released_ RTC_GUARDED_BY(mutex_); test::VideoEncoderProxyFactory encoder_factory_; VideoEncoderConfig encoder_config_; } test_encoder(task_queue()); @@ -2690,7 +2623,14 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) { RunBaseTest(&test); } -TEST_F(VideoSendStreamTest, EncoderSetupPropagatesH264Config) { +// Fails on MSAN: https://bugs.chromium.org/p/webrtc/issues/detail?id=11376. +#if defined(MEMORY_SANITIZER) +#define MAYBE_EncoderSetupPropagatesH264Config \ + DISABLED_EncoderSetupPropagatesH264Config +#else +#define MAYBE_EncoderSetupPropagatesH264Config EncoderSetupPropagatesH264Config +#endif +TEST_F(VideoSendStreamTest, MAYBE_EncoderSetupPropagatesH264Config) { VideoCodecConfigObserver test(kVideoCodecH264, "H264"); RunBaseTest(&test); } @@ -2705,7 +2645,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) { private: Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); ++rtp_packets_sent_; @@ -2714,7 +2654,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) { } Action OnSendRtcp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet, length)); @@ -2738,9 +2678,9 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) { EXPECT_TRUE(Wait()) << "Timed out while waiting for RTCP sender report."; } - rtc::CriticalSection crit_; - size_t rtp_packets_sent_ RTC_GUARDED_BY(&crit_); - size_t media_bytes_sent_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + size_t rtp_packets_sent_ RTC_GUARDED_BY(&mutex_); + size_t media_bytes_sent_ RTC_GUARDED_BY(&mutex_); } test; RunBaseTest(&test); @@ -2821,12 +2761,13 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { static const int kMaxBitrateKbps = 413; static const int kIncreasedStartBitrateKbps = 451; static const int kIncreasedMaxBitrateKbps = 597; - // If these fields trial are on, we get lower bitrates than expected by this - // test, due to the packetization overhead and encoder pushback. + // TODO(bugs.webrtc.org/12058): If these fields trial are on, we get lower + // bitrates than expected by this test, due to encoder pushback and subtracted + // overhead. webrtc::test::ScopedFieldTrials field_trials( std::string(field_trial::GetFieldTrialString()) + - "WebRTC-SubtractPacketizationOverhead/Disabled/" - "WebRTC-VideoRateControl/bitrate_adjuster:false/"); + "WebRTC-VideoRateControl/bitrate_adjuster:false/" + "WebRTC-SendSideBwe-WithOverhead/Disabled/"); class EncoderBitrateThresholdObserver : public test::SendTest, public VideoBitrateAllocatorFactory, @@ -2865,9 +2806,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { } else if (num_rate_allocator_creations_ == 2) { EXPECT_EQ(static_cast(kIncreasedMaxBitrateKbps), codec.maxBitrate); - // The start bitrate will be whatever the rate BitRateController - // has currently configured but in the span of the set max and min - // bitrate. + // The start bitrate will be whatever the rate BitRateController has + // currently configured but in the span of the set max and min bitrate. } ++num_rate_allocator_creations_; create_rate_allocator_event_.Set(); @@ -2895,7 +2835,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { void SetRates(const RateControlParameters& parameters) override { { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (target_bitrate_ == parameters.bitrate.get_sum_kbps()) { FakeEncoder::SetRates(parameters); return; @@ -2912,14 +2852,14 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // until the correct value has been observed. const int64_t start_time = rtc::TimeMillis(); do { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (target_bitrate_ == expected_bitrate) { return; } } while (bitrate_changed_event_.Wait( std::max(int64_t{1}, VideoSendStreamTest::kDefaultTimeoutMs - (rtc::TimeMillis() - start_time)))); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); EXPECT_EQ(target_bitrate_, expected_bitrate) << "Timed out while waiting encoder rate to be set."; } @@ -3000,8 +2940,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { rtc::Event create_rate_allocator_event_; rtc::Event init_encode_event_; rtc::Event bitrate_changed_event_; - rtc::CriticalSection crit_; - uint32_t target_bitrate_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + uint32_t target_bitrate_ RTC_GUARDED_BY(&mutex_); int num_rate_allocator_creations_; int num_encoder_initializations_; @@ -3037,8 +2977,10 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { CodecSpecificInfo specifics; specifics.codecType = kVideoCodecGeneric; - uint8_t buffer[16] = {0}; - EncodedImage encoded(buffer, sizeof(buffer), sizeof(buffer)); + EncodedImage encoded; + auto buffer = EncodedImageBuffer::Create(16); + memset(buffer->data(), 0, 16); + encoded.SetEncodedData(buffer); encoded.SetTimestamp(input_image.timestamp()); encoded.capture_time_ms_ = input_image.render_time_ms(); @@ -3049,11 +2991,11 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { encoded.SetSpatialIndex(i); EncodedImageCallback* callback; { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); callback = callback_; } RTC_DCHECK(callback); - if (callback->OnEncodedImage(encoded, &specifics, nullptr).error != + if (callback->OnEncodedImage(encoded, &specifics).error != EncodedImageCallback::Result::OK) { return -1; } @@ -3152,7 +3094,7 @@ class Vp9HeaderObserver : public test::SendTest { bool wait = Wait(); { // In case of time out, OnSendRtp might still access frames_sent_; - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); EXPECT_TRUE(wait) << "Test timed out waiting for VP9 packet, num frames " << frames_sent_; } @@ -3184,7 +3126,7 @@ class Vp9HeaderObserver : public test::SendTest { ++packets_sent_; if (rtp_packet.Marker()) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++frames_sent_; } last_packet_marker_ = rtp_packet.Marker(); @@ -3411,7 +3353,7 @@ class Vp9HeaderObserver : public test::SendTest { uint32_t last_packet_timestamp_ = 0; RTPVideoHeaderVP9 last_vp9_; size_t packets_sent_; - rtc::CriticalSection crit_; + Mutex mutex_; size_t frames_sent_; int expected_width_; int expected_height_; @@ -3486,6 +3428,7 @@ void VideoSendStreamTest::TestVp9NonFlexMode(uint8_t num_temporal_layers, vp9_settings_.flexibleMode = false; vp9_settings_.frameDroppingOn = false; + vp9_settings_.automaticResizeOn = false; vp9_settings_.keyFrameInterval = kKeyFrameInterval; vp9_settings_.numberOfTemporalLayers = num_temporal_layers_; vp9_settings_.numberOfSpatialLayers = num_spatial_layers_; @@ -3701,7 +3644,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { first_packet_sent_(false) {} void SetRates(const RateControlParameters& parameters) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); // Wait for the first sent packet so that videosendstream knows // rtp_overhead. if (first_packet_sent_) { @@ -3725,7 +3668,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { } Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); first_packet_sent_ = true; return SEND_PACKET; } @@ -3750,7 +3693,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { EXPECT_TRUE( bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); EXPECT_LE(max_bitrate_bps_, 57760u); } } @@ -3759,9 +3702,9 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { TaskQueueBase* const task_queue_; test::VideoEncoderProxyFactory encoder_factory_; Call* call_; - rtc::CriticalSection crit_; - uint32_t max_bitrate_bps_ RTC_GUARDED_BY(&crit_); - bool first_packet_sent_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + uint32_t max_bitrate_bps_ RTC_GUARDED_BY(&mutex_); + bool first_packet_sent_ RTC_GUARDED_BY(&mutex_); rtc::Event bitrate_changed_event_; } test(task_queue()); RunBaseTest(&test); @@ -3880,7 +3823,7 @@ class ContentSwitchTest : public test::SendTest { void OnVideoStreamsCreated( VideoSendStream* send_stream, const std::vector& receive_streams) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); send_stream_ = send_stream; } @@ -3901,12 +3844,12 @@ class ContentSwitchTest : public test::SendTest { } Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); auto internal_send_peer = test::VideoSendStreamPeer(send_stream_); float pacing_factor = internal_send_peer.GetPacingFactorOverride().value_or(0.0f); - float expected_pacing_factor = PacedSender::kDefaultPaceMultiplier; + float expected_pacing_factor = 1.1; // Strict pacing factor. if (send_stream_->GetStats().content_type == webrtc::VideoContentType::SCREENSHARE) { expected_pacing_factor = 1.0f; // Currently used pacing factor in ALR. @@ -3963,18 +3906,18 @@ class ContentSwitchTest : public test::SendTest { private: StreamState GetStreamState() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return state_; } - rtc::CriticalSection crit_; + Mutex mutex_; rtc::Event content_switch_event_; Call* call_; - StreamState state_ RTC_GUARDED_BY(crit_); - VideoSendStream* send_stream_ RTC_GUARDED_BY(crit_); + StreamState state_ RTC_GUARDED_BY(mutex_); + VideoSendStream* send_stream_ RTC_GUARDED_BY(mutex_); VideoSendStream::Config send_stream_config_; VideoEncoderConfig encoder_config_; - uint32_t packets_sent_ RTC_GUARDED_BY(crit_); + uint32_t packets_sent_ RTC_GUARDED_BY(mutex_); T* stream_resetter_; }; diff --git a/video/video_source_sink_controller.cc b/video/video_source_sink_controller.cc index a649adc68c..376eb85eae 100644 --- a/video/video_source_sink_controller.cc +++ b/video/video_source_sink_controller.cc @@ -14,10 +14,28 @@ #include #include +#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace { + +std::string WantsToString(const rtc::VideoSinkWants& wants) { + rtc::StringBuilder ss; + + ss << "max_fps=" << wants.max_framerate_fps + << " max_pixel_count=" << wants.max_pixel_count << " target_pixel_count=" + << (wants.target_pixel_count.has_value() + ? std::to_string(wants.target_pixel_count.value()) + : "null"); + + return ss.Release(); +} + +} // namespace + VideoSourceSinkController::VideoSourceSinkController( rtc::VideoSinkInterface* sink, rtc::VideoSourceInterface* source) @@ -25,87 +43,97 @@ VideoSourceSinkController::VideoSourceSinkController( RTC_DCHECK(sink_); } +VideoSourceSinkController::~VideoSourceSinkController() { + RTC_DCHECK_RUN_ON(&sequence_checker_); +} + void VideoSourceSinkController::SetSource( rtc::VideoSourceInterface* source) { - rtc::VideoSourceInterface* old_source; - rtc::VideoSinkWants wants; - { - rtc::CritScope lock(&crit_); - old_source = source_; - source_ = source; - wants = CurrentSettingsToSinkWants(); - } + RTC_DCHECK_RUN_ON(&sequence_checker_); + + rtc::VideoSourceInterface* old_source = source_; + source_ = source; + if (old_source != source && old_source) old_source->RemoveSink(sink_); + if (!source) return; - source->AddOrUpdateSink(sink_, wants); + + source->AddOrUpdateSink(sink_, CurrentSettingsToSinkWants()); +} + +bool VideoSourceSinkController::HasSource() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return source_ != nullptr; } void VideoSourceSinkController::PushSourceSinkSettings() { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); if (!source_) return; - source_->AddOrUpdateSink(sink_, CurrentSettingsToSinkWants()); + rtc::VideoSinkWants wants = CurrentSettingsToSinkWants(); + RTC_LOG(INFO) << "Pushing SourceSink restrictions: " << WantsToString(wants); + source_->AddOrUpdateSink(sink_, wants); } VideoSourceRestrictions VideoSourceSinkController::restrictions() const { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return restrictions_; } absl::optional VideoSourceSinkController::pixels_per_frame_upper_limit() const { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return pixels_per_frame_upper_limit_; } absl::optional VideoSourceSinkController::frame_rate_upper_limit() const { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return frame_rate_upper_limit_; } bool VideoSourceSinkController::rotation_applied() const { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return rotation_applied_; } int VideoSourceSinkController::resolution_alignment() const { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return resolution_alignment_; } void VideoSourceSinkController::SetRestrictions( VideoSourceRestrictions restrictions) { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); restrictions_ = std::move(restrictions); } void VideoSourceSinkController::SetPixelsPerFrameUpperLimit( absl::optional pixels_per_frame_upper_limit) { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); pixels_per_frame_upper_limit_ = std::move(pixels_per_frame_upper_limit); } void VideoSourceSinkController::SetFrameRateUpperLimit( absl::optional frame_rate_upper_limit) { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); frame_rate_upper_limit_ = std::move(frame_rate_upper_limit); } void VideoSourceSinkController::SetRotationApplied(bool rotation_applied) { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); rotation_applied_ = rotation_applied; } void VideoSourceSinkController::SetResolutionAlignment( int resolution_alignment) { - rtc::CritScope lock(&crit_); + RTC_DCHECK_RUN_ON(&sequence_checker_); resolution_alignment_ = resolution_alignment; } -// RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) +// RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_) rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() const { rtc::VideoSinkWants wants; diff --git a/video/video_source_sink_controller.h b/video/video_source_sink_controller.h index 379457cdf6..ed8f990970 100644 --- a/video/video_source_sink_controller.h +++ b/video/video_source_sink_controller.h @@ -11,12 +11,14 @@ #ifndef VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_ #define VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_ +#include + #include "absl/types/optional.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" -#include "call/adaptation/resource_adaptation_module_interface.h" -#include "rtc_base/critical_section.h" +#include "call/adaptation/video_source_restrictions.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { @@ -29,7 +31,11 @@ class VideoSourceSinkController { VideoSourceSinkController(rtc::VideoSinkInterface* sink, rtc::VideoSourceInterface* source); + ~VideoSourceSinkController(); + void SetSource(rtc::VideoSourceInterface* source); + bool HasSource() const; + // Must be called in order for changes to settings to have an effect. This // allows you to modify multiple properties in a single push to the sink. void PushSourceSinkSettings(); @@ -51,23 +57,27 @@ class VideoSourceSinkController { private: rtc::VideoSinkWants CurrentSettingsToSinkWants() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); + + // Used to ensure that this class is called on threads/sequences that it and + // downstream implementations were designed for. + // In practice, this represent's libjingle's worker thread. + SequenceChecker sequence_checker_; - // TODO(hbos): If everything is handled on the same sequence (i.e. - // VideoStreamEncoder's encoder queue) then |crit_| can be replaced by - // sequence checker. Investigate if we want to do this. - mutable rtc::CriticalSection crit_; rtc::VideoSinkInterface* const sink_; - rtc::VideoSourceInterface* source_ RTC_GUARDED_BY(&crit_); + rtc::VideoSourceInterface* source_ + RTC_GUARDED_BY(&sequence_checker_); // Pixel and frame rate restrictions. - VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&crit_); + VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); // Ensures that even if we are not restricted, the sink is never configured // above this limit. Example: We are not CPU limited (no |restrictions_|) but // our encoder is capped at 30 fps (= |frame_rate_upper_limit_|). - absl::optional pixels_per_frame_upper_limit_ RTC_GUARDED_BY(&crit_); - absl::optional frame_rate_upper_limit_ RTC_GUARDED_BY(&crit_); - bool rotation_applied_ RTC_GUARDED_BY(&crit_) = false; - int resolution_alignment_ RTC_GUARDED_BY(&crit_) = 1; + absl::optional pixels_per_frame_upper_limit_ + RTC_GUARDED_BY(&sequence_checker_); + absl::optional frame_rate_upper_limit_ + RTC_GUARDED_BY(&sequence_checker_); + bool rotation_applied_ RTC_GUARDED_BY(&sequence_checker_) = false; + int resolution_alignment_ RTC_GUARDED_BY(&sequence_checker_) = 1; }; } // namespace webrtc diff --git a/video/video_source_sink_controller_unittest.cc b/video/video_source_sink_controller_unittest.cc index c4e2ea11d2..66881cd023 100644 --- a/video/video_source_sink_controller_unittest.cc +++ b/video/video_source_sink_controller_unittest.cc @@ -30,8 +30,8 @@ class MockVideoSinkWithVideoFrame : public rtc::VideoSinkInterface { public: ~MockVideoSinkWithVideoFrame() override {} - MOCK_METHOD1(OnFrame, void(const VideoFrame& frame)); - MOCK_METHOD0(OnDiscardedFrame, void()); + MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override)); + MOCK_METHOD(void, OnDiscardedFrame, (), (override)); }; class MockVideoSourceWithVideoFrame @@ -39,10 +39,15 @@ class MockVideoSourceWithVideoFrame public: ~MockVideoSourceWithVideoFrame() override {} - MOCK_METHOD2(AddOrUpdateSink, - void(rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&)); - MOCK_METHOD1(RemoveSink, void(rtc::VideoSinkInterface*)); + MOCK_METHOD(void, + AddOrUpdateSink, + (rtc::VideoSinkInterface*, + const rtc::VideoSinkWants&), + (override)); + MOCK_METHOD(void, + RemoveSink, + (rtc::VideoSinkInterface*), + (override)); }; } // namespace diff --git a/video/video_stream_decoder.h b/video/video_stream_decoder.h index 6b040c6a6f..bfe9252976 100644 --- a/video/video_stream_decoder.h +++ b/video/video_stream_decoder.h @@ -20,8 +20,8 @@ #include "api/video/video_sink_interface.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/video_coding/include/video_coding_defines.h" -#include "rtc_base/critical_section.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -50,7 +50,7 @@ class VideoStreamDecoder : public VCMReceiveCallback { private: // Used for all registered callbacks except rendering. - rtc::CriticalSection crit_; + Mutex mutex_; VideoReceiver2* const video_receiver_; diff --git a/video/video_stream_decoder2.cc b/video/video_stream_decoder2.cc new file mode 100644 index 0000000000..a73bb649ea --- /dev/null +++ b/video/video_stream_decoder2.cc @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_stream_decoder2.h" + +#include "modules/video_coding/video_receiver2.h" +#include "rtc_base/checks.h" +#include "video/receive_statistics_proxy2.h" + +namespace webrtc { +namespace internal { + +VideoStreamDecoder::VideoStreamDecoder( + VideoReceiver2* video_receiver, + ReceiveStatisticsProxy* receive_statistics_proxy, + rtc::VideoSinkInterface* incoming_video_stream) + : video_receiver_(video_receiver), + receive_stats_callback_(receive_statistics_proxy), + incoming_video_stream_(incoming_video_stream) { + RTC_DCHECK(video_receiver_); + + video_receiver_->RegisterReceiveCallback(this); +} + +VideoStreamDecoder::~VideoStreamDecoder() { + // Note: There's an assumption at this point that the decoder thread is + // *not* running. If it was, then there could be a race for each of these + // callbacks. + + // Unset all the callback pointers that we set in the ctor. + video_receiver_->RegisterReceiveCallback(nullptr); +} + +// Do not acquire the lock of |video_receiver_| in this function. Decode +// callback won't necessarily be called from the decoding thread. The decoding +// thread may have held the lock when calling VideoDecoder::Decode, Reset, or +// Release. Acquiring the same lock in the path of decode callback can deadlock. +int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) { + receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time_ms, + content_type); + incoming_video_stream_->OnFrame(video_frame); + return 0; +} + +void VideoStreamDecoder::OnDroppedFrames(uint32_t frames_dropped) { + receive_stats_callback_->OnDroppedFrames(frames_dropped); +} + +void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) { + receive_stats_callback_->OnIncomingPayloadType(payload_type); +} + +void VideoStreamDecoder::OnDecoderImplementationName( + const char* implementation_name) { + receive_stats_callback_->OnDecoderImplementationName(implementation_name); +} + +} // namespace internal +} // namespace webrtc diff --git a/video/video_stream_decoder2.h b/video/video_stream_decoder2.h new file mode 100644 index 0000000000..a301d32107 --- /dev/null +++ b/video/video_stream_decoder2.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_VIDEO_STREAM_DECODER2_H_ +#define VIDEO_VIDEO_STREAM_DECODER2_H_ + +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_sink_interface.h" +#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "rtc_base/platform_thread.h" + +namespace webrtc { + +class VideoReceiver2; + +namespace internal { + +class ReceiveStatisticsProxy; + +class VideoStreamDecoder : public VCMReceiveCallback { + public: + VideoStreamDecoder( + VideoReceiver2* video_receiver, + ReceiveStatisticsProxy* receive_statistics_proxy, + rtc::VideoSinkInterface* incoming_video_stream); + ~VideoStreamDecoder() override; + + // Implements VCMReceiveCallback. + int32_t FrameToRender(VideoFrame& video_frame, + absl::optional qp, + int32_t decode_time_ms, + VideoContentType content_type) override; + void OnDroppedFrames(uint32_t frames_dropped) override; + void OnIncomingPayloadType(int payload_type) override; + void OnDecoderImplementationName(const char* implementation_name) override; + + private: + VideoReceiver2* const video_receiver_; + ReceiveStatisticsProxy* const receive_stats_callback_; + rtc::VideoSinkInterface* const incoming_video_stream_; +}; + +} // namespace internal +} // namespace webrtc + +#endif // VIDEO_VIDEO_STREAM_DECODER2_H_ diff --git a/video/video_stream_decoder_impl.cc b/video/video_stream_decoder_impl.cc index 1e11d38050..f5b0f5f787 100644 --- a/video/video_stream_decoder_impl.cc +++ b/video/video_stream_decoder_impl.cc @@ -26,7 +26,7 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( std::map> decoder_settings) : timing_(Clock::GetRealTimeClock()), decode_callbacks_(this), - next_frame_timestamps_index_(0), + next_frame_info_index_(0), callbacks_(callbacks), keyframe_required_(true), decoder_factory_(decoder_factory), @@ -39,7 +39,6 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( decode_queue_(task_queue_factory->CreateTaskQueue( "video_stream_decoder_decode_queue", TaskQueueFactory::Priority::NORMAL)) { - frame_timestamps_.fill({-1, -1, -1}); bookkeeping_queue_.PostTask([this]() { RTC_DCHECK_RUN_ON(&bookkeeping_queue_); StartNextDecode(); @@ -47,7 +46,7 @@ VideoStreamDecoderImpl::VideoStreamDecoderImpl( } VideoStreamDecoderImpl::~VideoStreamDecoderImpl() { - rtc::CritScope lock(&shut_down_crit_); + MutexLock lock(&shut_down_mutex_); shut_down_ = true; } @@ -125,16 +124,15 @@ VideoDecoder* VideoStreamDecoderImpl::GetDecoder(int payload_type) { return decoder_.get(); } -void VideoStreamDecoderImpl::SaveFrameTimestamps( +void VideoStreamDecoderImpl::SaveFrameInfo( const video_coding::EncodedFrame& frame) { - FrameTimestamps* frame_timestamps = - &frame_timestamps_[next_frame_timestamps_index_]; - frame_timestamps->timestamp = frame.Timestamp(); - frame_timestamps->decode_start_time_ms = rtc::TimeMillis(); - frame_timestamps->render_time_us = frame.RenderTimeMs() * 1000; - - next_frame_timestamps_index_ = - Add(next_frame_timestamps_index_, 1); + FrameInfo* frame_info = &frame_info_[next_frame_info_index_]; + frame_info->timestamp = frame.Timestamp(); + frame_info->decode_start_time_ms = rtc::TimeMillis(); + frame_info->render_time_us = frame.RenderTimeMs() * 1000; + frame_info->content_type = frame.EncodedImage().content_type_; + + next_frame_info_index_ = Add(next_frame_info_index_, 1); } void VideoStreamDecoderImpl::StartNextDecode() { @@ -155,9 +153,9 @@ void VideoStreamDecoderImpl::OnNextFrameCallback( switch (result) { case video_coding::FrameBuffer::kFrameFound: { RTC_DCHECK(frame); - SaveFrameTimestamps(*frame); + SaveFrameInfo(*frame); - rtc::CritScope lock(&shut_down_crit_); + MutexLock lock(&shut_down_mutex_); if (shut_down_) { return; } @@ -230,14 +228,14 @@ VideoStreamDecoderImpl::DecodeResult VideoStreamDecoderImpl::DecodeFrame( } } -VideoStreamDecoderImpl::FrameTimestamps* -VideoStreamDecoderImpl::GetFrameTimestamps(int64_t timestamp) { - int start_time_index = next_frame_timestamps_index_; - for (int i = 0; i < kFrameTimestampsMemory; ++i) { - start_time_index = Subtract(start_time_index, 1); +VideoStreamDecoderImpl::FrameInfo* VideoStreamDecoderImpl::GetFrameInfo( + int64_t timestamp) { + int start_time_index = next_frame_info_index_; + for (int i = 0; i < kFrameInfoMemory; ++i) { + start_time_index = Subtract(start_time_index, 1); - if (frame_timestamps_[start_time_index].timestamp == timestamp) - return &frame_timestamps_[start_time_index]; + if (frame_info_[start_time_index].timestamp == timestamp) + return &frame_info_[start_time_index]; } return nullptr; @@ -250,29 +248,33 @@ void VideoStreamDecoderImpl::OnDecodedFrameCallback( int64_t decode_stop_time_ms = rtc::TimeMillis(); bookkeeping_queue_.PostTask([this, decode_stop_time_ms, decoded_image, - decode_time_ms, qp]() { + decode_time_ms, qp]() mutable { RTC_DCHECK_RUN_ON(&bookkeeping_queue_); - FrameTimestamps* frame_timestamps = - GetFrameTimestamps(decoded_image.timestamp()); - if (!frame_timestamps) { + FrameInfo* frame_info = GetFrameInfo(decoded_image.timestamp()); + if (!frame_info) { RTC_LOG(LS_ERROR) << "No frame information found for frame with timestamp" << decoded_image.timestamp(); return; } - absl::optional casted_qp; - if (qp) - casted_qp.emplace(*qp); - - absl::optional casted_decode_time_ms(decode_time_ms.value_or( - decode_stop_time_ms - frame_timestamps->decode_start_time_ms)); + Callbacks::FrameInfo callback_info; + callback_info.content_type = frame_info->content_type; - timing_.StopDecodeTimer(*casted_decode_time_ms, decode_stop_time_ms); + if (qp) + callback_info.qp.emplace(*qp); - VideoFrame copy = decoded_image; - copy.set_timestamp_us(frame_timestamps->render_time_us); - callbacks_->OnDecodedFrame(copy, casted_decode_time_ms, casted_qp); + if (!decode_time_ms) { + decode_time_ms = decode_stop_time_ms - frame_info->decode_start_time_ms; + } + decoded_image.set_processing_time( + {Timestamp::Millis(frame_info->decode_start_time_ms), + Timestamp::Millis(frame_info->decode_start_time_ms + + *decode_time_ms)}); + decoded_image.set_timestamp_us(frame_info->render_time_us); + timing_.StopDecodeTimer(*decode_time_ms, decode_stop_time_ms); + + callbacks_->OnDecodedFrame(decoded_image, callback_info); }); } diff --git a/video/video_stream_decoder_impl.h b/video/video_stream_decoder_impl.h index f3f09e4a79..69a8195054 100644 --- a/video/video_stream_decoder_impl.h +++ b/video/video_stream_decoder_impl.h @@ -20,6 +20,7 @@ #include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/timing.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" @@ -61,16 +62,16 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { kDecodeFailure, }; - struct FrameTimestamps { - int64_t timestamp; + struct FrameInfo { + int64_t timestamp = -1; int64_t decode_start_time_ms; int64_t render_time_us; + VideoContentType content_type; }; - void SaveFrameTimestamps(const video_coding::EncodedFrame& frame) - RTC_RUN_ON(bookkeeping_queue_); - FrameTimestamps* GetFrameTimestamps(int64_t timestamp) + void SaveFrameInfo(const video_coding::EncodedFrame& frame) RTC_RUN_ON(bookkeeping_queue_); + FrameInfo* GetFrameInfo(int64_t timestamp) RTC_RUN_ON(bookkeeping_queue_); void StartNextDecode() RTC_RUN_ON(bookkeeping_queue_); void OnNextFrameCallback(std::unique_ptr frame, video_coding::FrameBuffer::ReturnReason res) @@ -89,10 +90,10 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { // Some decoders are pipelined so it is not sufficient to save frame info // for the last frame only. - static constexpr int kFrameTimestampsMemory = 8; - std::array frame_timestamps_ + static constexpr int kFrameInfoMemory = 8; + std::array frame_info_ RTC_GUARDED_BY(bookkeeping_queue_); - int next_frame_timestamps_index_ RTC_GUARDED_BY(bookkeeping_queue_); + int next_frame_info_index_ RTC_GUARDED_BY(bookkeeping_queue_); VideoStreamDecoderInterface::Callbacks* const callbacks_ RTC_PT_GUARDED_BY(bookkeeping_queue_); video_coding::VideoLayerFrameId last_continuous_id_ @@ -112,8 +113,8 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { // safe for the |decode_queue_| to be destructed. After that the |decoder_| // can be destructed, and then the |bookkeeping_queue_|. Finally the // |frame_buffer_| can be destructed. - rtc::CriticalSection shut_down_crit_; - bool shut_down_ RTC_GUARDED_BY(shut_down_crit_); + Mutex shut_down_mutex_; + bool shut_down_ RTC_GUARDED_BY(shut_down_mutex_); video_coding::FrameBuffer frame_buffer_ RTC_GUARDED_BY(bookkeeping_queue_); rtc::TaskQueue bookkeeping_queue_; std::unique_ptr decoder_ RTC_GUARDED_BY(decode_queue_); diff --git a/video/video_stream_decoder_impl_unittest.cc b/video/video_stream_decoder_impl_unittest.cc index 37924d11b3..a957f01ead 100644 --- a/video/video_stream_decoder_impl_unittest.cc +++ b/video/video_stream_decoder_impl_unittest.cc @@ -27,21 +27,25 @@ using ::testing::Return; class MockVideoStreamDecoderCallbacks : public VideoStreamDecoderInterface::Callbacks { public: - MOCK_METHOD0(OnNonDecodableState, void()); - MOCK_METHOD1(OnContinuousUntil, - void(const video_coding::VideoLayerFrameId& key)); - MOCK_METHOD1(OnEncodedFrame, void(const video_coding::EncodedFrame& frame)); - MOCK_METHOD3(OnDecodedFrame, - void(VideoFrame decodedImage, - absl::optional decode_time_ms, - absl::optional qp)); + MOCK_METHOD(void, OnNonDecodableState, (), (override)); + MOCK_METHOD(void, + OnContinuousUntil, + (const video_coding::VideoLayerFrameId& key), + (override)); + MOCK_METHOD( + void, + OnDecodedFrame, + (VideoFrame frame, + const VideoStreamDecoderInterface::Callbacks::FrameInfo& frame_info), + (override)); }; class StubVideoDecoder : public VideoDecoder { public: - MOCK_METHOD2(InitDecode, - int32_t(const VideoCodec* codec_settings, - int32_t number_of_cores)); + MOCK_METHOD(int32_t, + InitDecode, + (const VideoCodec*, int32_t number_of_cores), + (override)); int32_t Decode(const EncodedImage& input_image, bool missing_frames, @@ -57,10 +61,12 @@ class StubVideoDecoder : public VideoDecoder { return ret_code; } - MOCK_METHOD3(DecodeCall, - int32_t(const EncodedImage& input_image, - bool missing_frames, - int64_t render_time_ms)); + MOCK_METHOD(int32_t, + DecodeCall, + (const EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms), + ()); int32_t Release() override { return 0; } @@ -156,7 +162,7 @@ class FrameBuilder { class VideoStreamDecoderImplTest : public ::testing::Test { public: VideoStreamDecoderImplTest() - : time_controller_(Timestamp::seconds(0)), + : time_controller_(Timestamp::Seconds(0)), video_stream_decoder_(&callbacks_, &decoder_factory_, time_controller_.GetTaskQueueFactory(), @@ -173,20 +179,20 @@ class VideoStreamDecoderImplTest : public ::testing::Test { TEST_F(VideoStreamDecoderImplTest, InsertAndDecodeFrame) { video_stream_decoder_.OnFrame(FrameBuilder().WithPayloadType(1).Build()); EXPECT_CALL(callbacks_, OnDecodedFrame); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); } TEST_F(VideoStreamDecoderImplTest, NonDecodableStateWaitingForKeyframe) { EXPECT_CALL(callbacks_, OnNonDecodableState); - time_controller_.AdvanceTime(TimeDelta::ms(200)); + time_controller_.AdvanceTime(TimeDelta::Millis(200)); } TEST_F(VideoStreamDecoderImplTest, NonDecodableStateWaitingForDeltaFrame) { video_stream_decoder_.OnFrame(FrameBuilder().WithPayloadType(1).Build()); EXPECT_CALL(callbacks_, OnDecodedFrame); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); EXPECT_CALL(callbacks_, OnNonDecodableState); - time_controller_.AdvanceTime(TimeDelta::ms(3000)); + time_controller_.AdvanceTime(TimeDelta::Millis(3000)); } TEST_F(VideoStreamDecoderImplTest, InsertAndDecodeFrameWithKeyframeRequest) { @@ -195,7 +201,7 @@ TEST_F(VideoStreamDecoderImplTest, InsertAndDecodeFrameWithKeyframeRequest) { .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)); EXPECT_CALL(callbacks_, OnDecodedFrame); EXPECT_CALL(callbacks_, OnNonDecodableState); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); } TEST_F(VideoStreamDecoderImplTest, FailToInitDecoder) { @@ -203,7 +209,7 @@ TEST_F(VideoStreamDecoderImplTest, FailToInitDecoder) { ON_CALL(decoder_factory_.Vp8Decoder(), InitDecode) .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ERROR)); EXPECT_CALL(callbacks_, OnNonDecodableState); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); } TEST_F(VideoStreamDecoderImplTest, FailToDecodeFrame) { @@ -211,7 +217,7 @@ TEST_F(VideoStreamDecoderImplTest, FailToDecodeFrame) { ON_CALL(decoder_factory_.Vp8Decoder(), DecodeCall) .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ERROR)); EXPECT_CALL(callbacks_, OnNonDecodableState); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); } TEST_F(VideoStreamDecoderImplTest, ChangeFramePayloadType) { @@ -219,13 +225,13 @@ TEST_F(VideoStreamDecoderImplTest, ChangeFramePayloadType) { FrameBuilder().WithPayloadType(1).WithPictureId(0).Build()); EXPECT_CALL(decoder_factory_.Vp8Decoder(), DecodeCall); EXPECT_CALL(callbacks_, OnDecodedFrame); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); video_stream_decoder_.OnFrame( FrameBuilder().WithPayloadType(2).WithPictureId(1).Build()); EXPECT_CALL(decoder_factory_.Av1Decoder(), DecodeCall); EXPECT_CALL(callbacks_, OnDecodedFrame); - time_controller_.AdvanceTime(TimeDelta::ms(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); } } // namespace diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 7ec659d837..15f44b111c 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -19,26 +19,34 @@ #include "absl/algorithm/container.h" #include "absl/types/optional.h" +#include "api/task_queue/queued_task.h" +#include "api/task_queue/task_queue_base.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" +#include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" +#include "api/video/video_layers_allocation.h" #include "api/video_codecs/video_encoder.h" -#include "call/adaptation/resource_adaptation_module_interface.h" +#include "call/adaptation/resource_adaptation_processor.h" +#include "call/adaptation/video_stream_adapter.h" #include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" #include "modules/video_coding/include/video_codec_initializer.h" -#include "modules/video_coding/utility/default_video_bitrate_allocator.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" -#include "rtc_base/experiments/quality_scaling_experiment.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" +#include "video/adaptation/video_stream_encoder_resource_manager.h" +#include "video/alignment_adjuster.h" namespace webrtc { @@ -52,10 +60,6 @@ const int64_t kPendingFrameTimeoutMs = 1000; constexpr char kFrameDropperFieldTrial[] = "WebRTC-FrameDropper"; -// The maximum number of frames to drop at beginning of stream -// to try and achieve desired bitrate. -const int kMaxInitialFramedrop = 4; - // Averaging window spanning 90 frames at default 30fps, matching old media // optimization module defaults. const int64_t kFrameRateAvergingWindowSizeMs = (1000 / 30) * 90; @@ -67,11 +71,6 @@ const int64_t kParameterUpdateIntervalMs = 1000; // Animation is capped to 720p. constexpr int kMaxAnimationPixels = 1280 * 720; -bool IsResolutionScalingEnabled(DegradationPreference degradation_preference) { - return degradation_preference == DegradationPreference::MAINTAIN_FRAMERATE || - degradation_preference == DegradationPreference::BALANCED; -} - bool RequiresEncoderReset(const VideoCodec& prev_send_codec, const VideoCodec& new_send_codec, bool was_encode_called_since_last_initialization) { @@ -111,27 +110,52 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, return true; } break; +#ifndef DISABLE_H265 + case kVideoCodecH265: + if (new_send_codec.H265() != prev_send_codec.H265()) { + return true; + } + break; +#endif default: break; } for (unsigned char i = 0; i < new_send_codec.numberOfSimulcastStreams; ++i) { - if (new_send_codec.simulcastStream[i].width != + if (!new_send_codec.simulcastStream[i].active) { + // No need to reset when stream is inactive. + continue; + } + + if (!prev_send_codec.simulcastStream[i].active || + new_send_codec.simulcastStream[i].width != prev_send_codec.simulcastStream[i].width || new_send_codec.simulcastStream[i].height != prev_send_codec.simulcastStream[i].height || - new_send_codec.simulcastStream[i].maxFramerate != - prev_send_codec.simulcastStream[i].maxFramerate || new_send_codec.simulcastStream[i].numberOfTemporalLayers != prev_send_codec.simulcastStream[i].numberOfTemporalLayers || new_send_codec.simulcastStream[i].qpMax != - prev_send_codec.simulcastStream[i].qpMax || - new_send_codec.simulcastStream[i].active != - prev_send_codec.simulcastStream[i].active) { + prev_send_codec.simulcastStream[i].qpMax) { return true; } } + + if (new_send_codec.codecType == kVideoCodecVP9) { + size_t num_spatial_layers = new_send_codec.VP9().numberOfSpatialLayers; + for (unsigned char i = 0; i < num_spatial_layers; ++i) { + if (new_send_codec.spatialLayers[i].width != + prev_send_codec.spatialLayers[i].width || + new_send_codec.spatialLayers[i].height != + prev_send_codec.spatialLayers[i].height || + new_send_codec.spatialLayers[i].numberOfTemporalLayers != + prev_send_codec.spatialLayers[i].numberOfTemporalLayers || + new_send_codec.spatialLayers[i].qpMax != + prev_send_codec.spatialLayers[i].qpMax) { + return true; + } + } + } return false; } @@ -180,47 +204,75 @@ VideoBitrateAllocation UpdateAllocationFromEncoderInfo( new_allocation.set_bw_limited(allocation.is_bw_limited()); return new_allocation; } -} // namespace -absl::optional GetEncoderBitrateLimits( - const VideoEncoder::EncoderInfo& encoder_info, - int frame_size_pixels) { - std::vector bitrate_limits = - encoder_info.resolution_bitrate_limits; - - // Sort the list of bitrate limits by resolution. - sort(bitrate_limits.begin(), bitrate_limits.end(), - [](const VideoEncoder::ResolutionBitrateLimits& lhs, - const VideoEncoder::ResolutionBitrateLimits& rhs) { - return lhs.frame_size_pixels < rhs.frame_size_pixels; - }); - - for (size_t i = 0; i < bitrate_limits.size(); ++i) { - RTC_DCHECK_GE(bitrate_limits[i].min_bitrate_bps, 0); - RTC_DCHECK_GE(bitrate_limits[i].min_start_bitrate_bps, 0); - RTC_DCHECK_GE(bitrate_limits[i].max_bitrate_bps, - bitrate_limits[i].min_bitrate_bps); - if (i > 0) { - // The bitrate limits aren't expected to decrease with resolution. - RTC_DCHECK_GE(bitrate_limits[i].min_bitrate_bps, - bitrate_limits[i - 1].min_bitrate_bps); - RTC_DCHECK_GE(bitrate_limits[i].min_start_bitrate_bps, - bitrate_limits[i - 1].min_start_bitrate_bps); - RTC_DCHECK_GE(bitrate_limits[i].max_bitrate_bps, - bitrate_limits[i - 1].max_bitrate_bps); - } +// Converts a VideoBitrateAllocation that contains allocated bitrate per layer, +// and an EncoderInfo that contains information about the actual encoder +// structure used by a codec. Stream structures can be Ksvc, Full SVC, Simulcast +// etc. +VideoLayersAllocation CreateVideoLayersAllocation( + const VideoCodec& encoder_config, + const VideoEncoder::RateControlParameters& current_rate, + const VideoEncoder::EncoderInfo& encoder_info) { + const VideoBitrateAllocation& target_bitrate = current_rate.target_bitrate; + VideoLayersAllocation layers_allocation; + if (target_bitrate.get_sum_bps() == 0) { + return layers_allocation; + } - if (bitrate_limits[i].frame_size_pixels >= frame_size_pixels) { - return absl::optional( - bitrate_limits[i]); + if (encoder_config.numberOfSimulcastStreams > 0) { + layers_allocation.resolution_and_frame_rate_is_valid = true; + for (int si = 0; si < encoder_config.numberOfSimulcastStreams; ++si) { + if (!target_bitrate.IsSpatialLayerUsed(si) || + target_bitrate.GetSpatialLayerSum(si) == 0) { + break; + } + layers_allocation.active_spatial_layers.emplace_back(); + VideoLayersAllocation::SpatialLayer& spatial_layer = + layers_allocation.active_spatial_layers.back(); + spatial_layer.width = encoder_config.simulcastStream[si].width; + spatial_layer.height = encoder_config.simulcastStream[si].height; + spatial_layer.rtp_stream_index = si; + spatial_layer.spatial_id = 0; + auto frame_rate_fraction = + VideoEncoder::EncoderInfo::kMaxFramerateFraction; + if (encoder_info.fps_allocation[si].size() == 1) { + // One TL is signalled to be used by the encoder. Do not distribute + // bitrate allocation across TLs (use sum at tl:0). + spatial_layer.target_bitrate_per_temporal_layer.push_back( + DataRate::BitsPerSec(target_bitrate.GetSpatialLayerSum(si))); + frame_rate_fraction = encoder_info.fps_allocation[si][0]; + } else { // Temporal layers are supported. + uint32_t temporal_layer_bitrate_bps = 0; + for (size_t ti = 0; + ti < encoder_config.simulcastStream[si].numberOfTemporalLayers; + ++ti) { + if (!target_bitrate.HasBitrate(si, ti)) { + break; + } + if (ti < encoder_info.fps_allocation[si].size()) { + // Use frame rate of the top used temporal layer. + frame_rate_fraction = encoder_info.fps_allocation[si][ti]; + } + temporal_layer_bitrate_bps += target_bitrate.GetBitrate(si, ti); + spatial_layer.target_bitrate_per_temporal_layer.push_back( + DataRate::BitsPerSec(temporal_layer_bitrate_bps)); + } + } + // Encoder may drop frames internally if `maxFramerate` is set. + spatial_layer.frame_rate_fps = std::min( + static_cast(encoder_config.simulcastStream[si].maxFramerate), + static_cast( + (current_rate.framerate_fps * frame_rate_fraction) / + VideoEncoder::EncoderInfo::kMaxFramerateFraction)); } + } else { + // TODO(bugs.webrtc.org/12000): Implement support for kSVC and full SVC. } - return absl::nullopt; + return layers_allocation; } -const int VideoStreamEncoder::kDefaultLastFrameInfoWidth = 176; -const int VideoStreamEncoder::kDefaultLastFrameInfoHeight = 144; +} // namespace VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings() : rate_control(), @@ -249,6 +301,63 @@ bool VideoStreamEncoder::EncoderRateSettings::operator!=( return !(*this == rhs); } +class VideoStreamEncoder::DegradationPreferenceManager + : public DegradationPreferenceProvider { + public: + explicit DegradationPreferenceManager( + VideoStreamAdapter* video_stream_adapter) + : degradation_preference_(DegradationPreference::DISABLED), + is_screenshare_(false), + effective_degradation_preference_(DegradationPreference::DISABLED), + video_stream_adapter_(video_stream_adapter) { + RTC_DCHECK(video_stream_adapter_); + sequence_checker_.Detach(); + } + + ~DegradationPreferenceManager() override = default; + + DegradationPreference degradation_preference() const override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return effective_degradation_preference_; + } + + void SetDegradationPreference(DegradationPreference degradation_preference) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + degradation_preference_ = degradation_preference; + MaybeUpdateEffectiveDegradationPreference(); + } + + void SetIsScreenshare(bool is_screenshare) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + is_screenshare_ = is_screenshare; + MaybeUpdateEffectiveDegradationPreference(); + } + + private: + void MaybeUpdateEffectiveDegradationPreference() + RTC_RUN_ON(&sequence_checker_) { + DegradationPreference effective_degradation_preference = + (is_screenshare_ && + degradation_preference_ == DegradationPreference::BALANCED) + ? DegradationPreference::MAINTAIN_RESOLUTION + : degradation_preference_; + + if (effective_degradation_preference != effective_degradation_preference_) { + effective_degradation_preference_ = effective_degradation_preference; + video_stream_adapter_->SetDegradationPreference( + effective_degradation_preference); + } + } + + SequenceChecker sequence_checker_; + DegradationPreference degradation_preference_ + RTC_GUARDED_BY(&sequence_checker_); + bool is_screenshare_ RTC_GUARDED_BY(&sequence_checker_); + DegradationPreference effective_degradation_preference_ + RTC_GUARDED_BY(&sequence_checker_); + VideoStreamAdapter* video_stream_adapter_ RTC_GUARDED_BY(&sequence_checker_); +}; + VideoStreamEncoder::VideoStreamEncoder( Clock* clock, uint32_t number_of_cores, @@ -256,16 +365,13 @@ VideoStreamEncoder::VideoStreamEncoder( const VideoStreamEncoderSettings& settings, std::unique_ptr overuse_detector, TaskQueueFactory* task_queue_factory) - : shutdown_event_(true /* manual_reset */, false), + : main_queue_(TaskQueueBase::Current()), number_of_cores_(number_of_cores), - initial_framedrop_(0), - quality_rampup_done_(false), - quality_rampup_experiment_(QualityRampupExperiment::ParseSettings()), quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), sink_(nullptr), settings_(settings), rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), - quality_scaler_settings_(QualityScalerSettings::ParseFromFieldTrials()), + encoder_selector_(settings.encoder_factory->GetEncoderSelector()), encoder_stats_observer_(encoder_stats_observer), encoder_initialized_(false), max_framerate_(-1), @@ -274,9 +380,6 @@ VideoStreamEncoder::VideoStreamEncoder( crop_width_(0), crop_height_(0), encoder_target_bitrate_bps_(absl::nullopt), - set_start_bitrate_bps_(0), - set_start_bitrate_time_ms_(0), - has_seen_first_bwe_drop_(false), max_data_payload_length_(0), encoder_paused_and_dropped_frame_(false), was_encode_called_since_last_initialization_(false), @@ -288,75 +391,112 @@ VideoStreamEncoder::VideoStreamEncoder( clock_->TimeInMilliseconds()), last_frame_log_ms_(clock_->TimeInMilliseconds()), captured_frame_count_(0), - dropped_frame_count_(0), + dropped_frame_cwnd_pushback_count_(0), + dropped_frame_encoder_block_count_(0), pending_frame_post_time_us_(0), accumulated_update_rect_{0, 0, 0, 0}, accumulated_update_rect_is_valid_(true), animation_start_time_(Timestamp::PlusInfinity()), cap_resolution_due_to_video_content_(false), expect_resize_state_(ExpectResizeState::kNoResize), - bitrate_observer_(nullptr), fec_controller_override_(nullptr), force_disable_frame_dropper_(false), input_framerate_(kFrameRateAvergingWindowSizeMs, 1000), pending_frame_drops_(0), + cwnd_frame_counter_(0), next_frame_types_(1, VideoFrameType::kVideoFrameDelta), frame_encode_metadata_writer_(this), experiment_groups_(GetExperimentGroups()), - next_frame_id_(0), encoder_switch_experiment_(ParseEncoderSwitchFieldTrial()), automatic_animation_detection_experiment_( ParseAutomatincAnimationDetectionFieldTrial()), encoder_switch_requested_(false), - video_source_sink_controller_(std::make_unique( - /*sink=*/this, - /*source=*/nullptr)), - resource_adaptation_module_( - std::make_unique( - settings_.experiment_cpu_load_estimator, - std::move(overuse_detector), - encoder_stats_observer, - /*adaptation_listener=*/this)), + input_state_provider_(encoder_stats_observer), + video_stream_adapter_( + std::make_unique(&input_state_provider_, + encoder_stats_observer)), + resource_adaptation_processor_( + std::make_unique( + video_stream_adapter_.get())), + degradation_preference_manager_( + std::make_unique( + video_stream_adapter_.get())), + adaptation_constraints_(), + stream_resource_manager_(&input_state_provider_, + encoder_stats_observer, + clock_, + settings_.experiment_cpu_load_estimator, + std::move(overuse_detector), + degradation_preference_manager_.get()), + video_source_sink_controller_(/*sink=*/this, + /*source=*/nullptr), encoder_queue_(task_queue_factory->CreateTaskQueue( "EncoderQueue", TaskQueueFactory::Priority::NORMAL)) { + RTC_DCHECK(main_queue_); RTC_DCHECK(encoder_stats_observer); RTC_DCHECK_GE(number_of_cores, 1); - for (auto& state : encoder_buffer_state_) - state.fill(std::numeric_limits::max()); + stream_resource_manager_.Initialize(&encoder_queue_); + + rtc::Event initialize_processor_event; + encoder_queue_.PostTask([this, &initialize_processor_event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + resource_adaptation_processor_->SetTaskQueue(encoder_queue_.Get()); + stream_resource_manager_.SetAdaptationProcessor( + resource_adaptation_processor_.get(), video_stream_adapter_.get()); + resource_adaptation_processor_->AddResourceLimitationsListener( + &stream_resource_manager_); + video_stream_adapter_->AddRestrictionsListener(&stream_resource_manager_); + video_stream_adapter_->AddRestrictionsListener(this); + + // Add the stream resource manager's resources to the processor. + adaptation_constraints_ = stream_resource_manager_.AdaptationConstraints(); + for (auto* constraint : adaptation_constraints_) { + video_stream_adapter_->AddAdaptationConstraint(constraint); + } + initialize_processor_event.Set(); + }); + initialize_processor_event.Wait(rtc::Event::kForever); } VideoStreamEncoder::~VideoStreamEncoder() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(shutdown_event_.Wait(0)) + RTC_DCHECK_RUN_ON(main_queue_); + RTC_DCHECK(!video_source_sink_controller_.HasSource()) << "Must call ::Stop() before destruction."; } void VideoStreamEncoder::Stop() { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_source_sink_controller_->SetSource(nullptr); - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - resource_adaptation_module_->StopResourceAdaptation(); - rate_allocator_ = nullptr; - bitrate_observer_ = nullptr; - ReleaseEncoder(); - resource_adaptation_module_->UpdateQualityScalerSettings(absl::nullopt); - shutdown_event_.Set(); - }); + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetSource(nullptr); - shutdown_event_.Wait(rtc::Event::kForever); -} + rtc::Event shutdown_event; -void VideoStreamEncoder::SetBitrateAllocationObserver( - VideoBitrateAllocationObserver* bitrate_observer) { - RTC_DCHECK_RUN_ON(&thread_checker_); - encoder_queue_.PostTask([this, bitrate_observer] { + encoder_queue_.PostTask([this, &shutdown_event] { RTC_DCHECK_RUN_ON(&encoder_queue_); - RTC_DCHECK(!bitrate_observer_); - bitrate_observer_ = bitrate_observer; + if (resource_adaptation_processor_) { + stream_resource_manager_.StopManagedResources(); + for (auto* constraint : adaptation_constraints_) { + video_stream_adapter_->RemoveAdaptationConstraint(constraint); + } + for (auto& resource : additional_resources_) { + stream_resource_manager_.RemoveResource(resource); + } + additional_resources_.clear(); + video_stream_adapter_->RemoveRestrictionsListener(this); + video_stream_adapter_->RemoveRestrictionsListener( + &stream_resource_manager_); + resource_adaptation_processor_->RemoveResourceLimitationsListener( + &stream_resource_manager_); + stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr); + resource_adaptation_processor_.reset(); + } + rate_allocator_ = nullptr; + ReleaseEncoder(); + encoder_ = nullptr; + shutdown_event.Set(); }); + shutdown_event.Wait(rtc::Event::kForever); } void VideoStreamEncoder::SetFecControllerOverride( @@ -371,24 +511,53 @@ void VideoStreamEncoder::SetFecControllerOverride( }); } +void VideoStreamEncoder::AddAdaptationResource( + rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(main_queue_); + // Map any externally added resources as kCpu for the sake of stats reporting. + // TODO(hbos): Make the manager map any unknown resources to kCpu and get rid + // of this MapResourceToReason() call. + rtc::Event map_resource_event; + encoder_queue_.PostTask([this, resource, &map_resource_event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + additional_resources_.push_back(resource); + stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu); + map_resource_event.Set(); + }); + map_resource_event.Wait(rtc::Event::kForever); +} + +std::vector> +VideoStreamEncoder::GetAdaptationResources() { + RTC_DCHECK_RUN_ON(main_queue_); + return resource_adaptation_processor_->GetResources(); +} + void VideoStreamEncoder::SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_source_sink_controller_->SetSource(source); - encoder_queue_.PostTask([this, source, degradation_preference] { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetSource(source); + input_state_provider_.OnHasInputChanged(source); + + // This may trigger reconfiguring the QualityScaler on the encoder queue. + encoder_queue_.PostTask([this, degradation_preference] { RTC_DCHECK_RUN_ON(&encoder_queue_); - resource_adaptation_module_->SetHasInputVideo(source); - resource_adaptation_module_->SetDegradationPreference( + degradation_preference_manager_->SetDegradationPreference( degradation_preference); - if (encoder_) - ConfigureQualityScaler(encoder_->GetEncoderInfo()); + stream_resource_manager_.SetDegradationPreferences(degradation_preference); + if (encoder_) { + stream_resource_manager_.ConfigureQualityScaler( + encoder_->GetEncoderInfo()); + } }); } void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { - video_source_sink_controller_->SetRotationApplied(rotation_applied); - video_source_sink_controller_->PushSourceSinkSettings(); + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetRotationApplied(rotation_applied); + video_source_sink_controller_.PushSourceSinkSettings(); + encoder_queue_.PostTask([this, sink] { RTC_DCHECK_RUN_ON(&encoder_queue_); sink_ = sink; @@ -398,13 +567,12 @@ void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) { encoder_queue_.PostTask([this, start_bitrate_bps] { RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_LOG(LS_INFO) << "SetStartBitrate " << start_bitrate_bps; encoder_target_bitrate_bps_ = start_bitrate_bps != 0 ? absl::optional(start_bitrate_bps) : absl::nullopt; - resource_adaptation_module_->SetEncoderTargetBitrate( - encoder_target_bitrate_bps_); - set_start_bitrate_bps_ = start_bitrate_bps; - set_start_bitrate_time_ms_ = clock_->TimeInMilliseconds(); + stream_resource_manager_.SetStartBitrate( + DataRate::BitsPerSec(start_bitrate_bps)); }); } @@ -434,8 +602,8 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, codec_info_ = settings_.encoder_factory->QueryVideoEncoder( encoder_config_.video_format); if (HasInternalSource()) { - last_frame_info_ = VideoFrameInfo( - kDefaultLastFrameInfoWidth, kDefaultLastFrameInfoHeight, false); + last_frame_info_ = VideoFrameInfo(kDefaultInputPixelsWidth, + kDefaultInputPixelsHeight, false); ReconfigureEncoder(); } } @@ -447,20 +615,52 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, // the VideoBitrateAllocator and call OnEncoderConfigurationChanged with a // "soft" reconfiguration. void VideoStreamEncoder::ReconfigureEncoder() { + // Running on the encoder queue. RTC_DCHECK(pending_encoder_reconfiguration_); - if (encoder_switch_experiment_.IsPixelCountBelowThreshold( + if (!encoder_selector_ && + encoder_switch_experiment_.IsPixelCountBelowThreshold( last_frame_info_->width * last_frame_info_->height) && !encoder_switch_requested_ && settings_.encoder_switch_request_callback) { EncoderSwitchRequestCallback::Config conf; conf.codec_name = encoder_switch_experiment_.to_codec; conf.param = encoder_switch_experiment_.to_param; conf.value = encoder_switch_experiment_.to_value; - settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf); + QueueRequestEncoderSwitch(conf); encoder_switch_requested_ = true; } + bool encoder_reset_required = false; + if (pending_encoder_creation_) { + // Destroy existing encoder instance before creating a new one. Otherwise + // attempt to create another instance will fail if encoder factory + // supports only single instance of encoder of given type. + encoder_.reset(); + + encoder_ = settings_.encoder_factory->CreateVideoEncoder( + encoder_config_.video_format); + // TODO(nisse): What to do if creating the encoder fails? Crash, + // or just discard incoming frames? + RTC_CHECK(encoder_); + + if (encoder_selector_) { + encoder_selector_->OnCurrentEncoder(encoder_config_.video_format); + } + + encoder_->SetFecControllerOverride(fec_controller_override_); + + codec_info_ = settings_.encoder_factory->QueryVideoEncoder( + encoder_config_.video_format); + + encoder_reset_required = true; + } + + // Possibly adjusts scale_resolution_down_by in |encoder_config_| to limit the + // alignment value. + int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( + encoder_->GetEncoderInfo(), &encoder_config_); + std::vector streams = encoder_config_.video_stream_factory->CreateEncoderStreams( last_frame_info_->width, last_frame_info_->height, encoder_config_); @@ -493,30 +693,9 @@ void VideoStreamEncoder::ReconfigureEncoder() { crop_width_ = last_frame_info_->width - highest_stream_width; crop_height_ = last_frame_info_->height - highest_stream_height; - bool encoder_reset_required = false; - if (pending_encoder_creation_) { - // Destroy existing encoder instance before creating a new one. Otherwise - // attempt to create another instance will fail if encoder factory - // supports only single instance of encoder of given type. - encoder_.reset(); - - encoder_ = settings_.encoder_factory->CreateVideoEncoder( - encoder_config_.video_format); - // TODO(nisse): What to do if creating the encoder fails? Crash, - // or just discard incoming frames? - RTC_CHECK(encoder_); - - encoder_->SetFecControllerOverride(fec_controller_override_); - - codec_info_ = settings_.encoder_factory->QueryVideoEncoder( - encoder_config_.video_format); - - encoder_reset_required = true; - } - - encoder_bitrate_limits_ = GetEncoderBitrateLimits( - encoder_->GetEncoderInfo(), - last_frame_info_->width * last_frame_info_->height); + encoder_bitrate_limits_ = + encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution( + last_frame_info_->width * last_frame_info_->height); if (streams.size() == 1 && encoder_bitrate_limits_) { // Bitrate limits can be set by app (in SDP or RtpEncodingParameters) or/and @@ -566,16 +745,11 @@ void VideoStreamEncoder::ReconfigureEncoder() { RTC_LOG(LS_ERROR) << "Failed to create encoder configuration."; } - // Set min_bitrate_bps, max_bitrate_bps, and max padding bit rate for VP9. if (encoder_config_.codec_type == kVideoCodecVP9) { - // Lower max bitrate to the level codec actually can produce. - streams[0].max_bitrate_bps = - std::min(streams[0].max_bitrate_bps, - SvcRateAllocator::GetMaxBitrate(codec).bps()); - streams[0].min_bitrate_bps = codec.spatialLayers[0].minBitrate * 1000; - // target_bitrate_bps specifies the maximum padding bitrate. - streams[0].target_bitrate_bps = - SvcRateAllocator::GetPaddingBitrate(codec).bps(); + // Spatial layers configuration might impose some parity restrictions, + // thus some cropping might be needed. + crop_width_ = last_frame_info_->width - codec.width; + crop_height_ = last_frame_info_->height - codec.height; } char log_stream_buf[4 * 1024]; @@ -586,9 +760,9 @@ void VideoStreamEncoder::ReconfigureEncoder() { log_stream << i << ": " << codec.simulcastStream[i].width << "x" << codec.simulcastStream[i].height << " fps: " << codec.simulcastStream[i].maxFramerate - << " min_bps: " << codec.simulcastStream[i].minBitrate - << " target_bps: " << codec.simulcastStream[i].targetBitrate - << " max_bps: " << codec.simulcastStream[i].maxBitrate + << " min_kbps: " << codec.simulcastStream[i].minBitrate + << " target_kbps: " << codec.simulcastStream[i].targetBitrate + << " max_kbps: " << codec.simulcastStream[i].maxBitrate << " max_fps: " << codec.simulcastStream[i].maxFramerate << " max_qp: " << codec.simulcastStream[i].qpMax << " num_tl: " << codec.simulcastStream[i].numberOfTemporalLayers @@ -602,9 +776,9 @@ void VideoStreamEncoder::ReconfigureEncoder() { log_stream << i << ": " << codec.spatialLayers[i].width << "x" << codec.spatialLayers[i].height << " fps: " << codec.spatialLayers[i].maxFramerate - << " min_bps: " << codec.spatialLayers[i].minBitrate - << " target_bps: " << codec.spatialLayers[i].targetBitrate - << " max_bps: " << codec.spatialLayers[i].maxBitrate + << " min_kbps: " << codec.spatialLayers[i].minBitrate + << " target_kbps: " << codec.spatialLayers[i].targetBitrate + << " max_kbps: " << codec.spatialLayers[i].maxBitrate << " max_qp: " << codec.spatialLayers[i].qpMax << " num_tl: " << codec.spatialLayers[i].numberOfTemporalLayers << " active: " @@ -626,14 +800,18 @@ void VideoStreamEncoder::ReconfigureEncoder() { for (const auto& stream : streams) { max_framerate = std::max(stream.max_framerate, max_framerate); } - int alignment = encoder_->GetEncoderInfo().requested_resolution_alignment; - if (max_framerate != - video_source_sink_controller_->frame_rate_upper_limit() || - alignment != video_source_sink_controller_->resolution_alignment()) { - video_source_sink_controller_->SetFrameRateUpperLimit(max_framerate); - video_source_sink_controller_->SetResolutionAlignment(alignment); - video_source_sink_controller_->PushSourceSinkSettings(); - } + + main_queue_->PostTask( + ToQueuedTask(task_safety_, [this, max_framerate, alignment]() { + RTC_DCHECK_RUN_ON(main_queue_); + if (max_framerate != + video_source_sink_controller_.frame_rate_upper_limit() || + alignment != video_source_sink_controller_.resolution_alignment()) { + video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); + video_source_sink_controller_.SetResolutionAlignment(alignment); + video_source_sink_controller_.PushSourceSinkSettings(); + } + })); if (codec.maxBitrate == 0) { // max is one bit per pixel @@ -654,18 +832,18 @@ void VideoStreamEncoder::ReconfigureEncoder() { rate_allocator_ = settings_.bitrate_allocator_factory->CreateVideoBitrateAllocator(codec); + rate_allocator_->SetLegacyConferenceMode( + encoder_config_.legacy_conference_mode); // Reset (release existing encoder) if one exists and anything except // start bitrate or max framerate has changed. if (!encoder_reset_required) { encoder_reset_required = RequiresEncoderReset( - codec, send_codec_, was_encode_called_since_last_initialization_); + send_codec_, codec, was_encode_called_since_last_initialization_); } send_codec_ = codec; encoder_switch_experiment_.SetCodec(send_codec_.codecType); - quality_rampup_experiment_.SetMaxBitrate( - last_frame_info_->width * last_frame_info_->height, codec.maxBitrate); // Keep the same encoder, as long as the video_format is unchanged. // Encoder creation block is split in two since EncoderInfo needed to start @@ -692,6 +870,10 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_->RegisterEncodeCompleteCallback(this); frame_encode_metadata_writer_.OnEncoderInit(send_codec_, HasInternalSource()); + next_frame_types_.clear(); + next_frame_types_.resize( + std::max(static_cast(codec.numberOfSimulcastStreams), 1), + VideoFrameType::kVideoFrameKey); } frame_encode_metadata_writer_.Reset(); @@ -699,14 +881,10 @@ void VideoStreamEncoder::ReconfigureEncoder() { was_encode_called_since_last_initialization_ = false; } - resource_adaptation_module_->SetEncoderSettings(EncoderSettings( - encoder_->GetEncoderInfo(), encoder_config_.Copy(), send_codec_)); + // Inform dependents of updated encoder settings. + OnEncoderSettingsChanged(); if (success) { - next_frame_types_.clear(); - next_frame_types_.resize( - std::max(static_cast(codec.numberOfSimulcastStreams), 1), - VideoFrameType::kVideoFrameKey); RTC_LOG(LS_VERBOSE) << " max bitrate " << codec.maxBitrate << " start bitrate " << codec.startBitrate << " max frame rate " << codec.maxFramerate @@ -717,8 +895,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { } if (pending_encoder_creation_) { - resource_adaptation_module_->StopResourceAdaptation(); - resource_adaptation_module_->StartResourceAdaptation(this); + stream_resource_manager_.EnsureEncodeUsageResourceStarted(); pending_encoder_creation_ = false; } @@ -762,66 +939,46 @@ void VideoStreamEncoder::ReconfigureEncoder() { last_encoder_rate_settings_.reset(); rate_settings.rate_control.framerate_fps = GetInputFramerateFps(); - SetEncoderRates(UpdateBitrateAllocationAndNotifyObserver(rate_settings)); + SetEncoderRates(UpdateBitrateAllocation(rate_settings)); } encoder_stats_observer_->OnEncoderReconfigured(encoder_config_, streams); pending_encoder_reconfiguration_ = false; + bool is_svc = false; + // Set min_bitrate_bps, max_bitrate_bps, and max padding bit rate for VP9 + // and leave only one stream containing all necessary information. + if (encoder_config_.codec_type == kVideoCodecVP9) { + // Lower max bitrate to the level codec actually can produce. + streams[0].max_bitrate_bps = + std::min(streams[0].max_bitrate_bps, + SvcRateAllocator::GetMaxBitrate(codec).bps()); + streams[0].min_bitrate_bps = codec.spatialLayers[0].minBitrate * 1000; + // target_bitrate_bps specifies the maximum padding bitrate. + streams[0].target_bitrate_bps = + SvcRateAllocator::GetPaddingBitrate(codec).bps(); + streams[0].width = streams.back().width; + streams[0].height = streams.back().height; + is_svc = codec.VP9()->numberOfSpatialLayers > 1; + streams.resize(1); + } + sink_->OnEncoderConfigurationChanged( - std::move(streams), encoder_config_.content_type, + std::move(streams), is_svc, encoder_config_.content_type, encoder_config_.min_transmit_bitrate_bps); - ConfigureQualityScaler(info); + stream_resource_manager_.ConfigureQualityScaler(info); } -void VideoStreamEncoder::ConfigureQualityScaler( - const VideoEncoder::EncoderInfo& encoder_info) { - RTC_DCHECK_RUN_ON(&encoder_queue_); - const auto scaling_settings = encoder_info.scaling_settings; - const bool quality_scaling_allowed = - IsResolutionScalingEnabled( - resource_adaptation_module_->degradation_preference()) && - scaling_settings.thresholds; - - if (quality_scaling_allowed) { - if (resource_adaptation_module_->quality_scaler() == nullptr) { - // Quality scaler has not already been configured. - - // Use experimental thresholds if available. - absl::optional experimental_thresholds; - if (quality_scaling_experiment_enabled_) { - experimental_thresholds = QualityScalingExperiment::GetQpThresholds( - encoder_config_.codec_type); - } - resource_adaptation_module_->UpdateQualityScalerSettings( - experimental_thresholds ? *experimental_thresholds - : *(scaling_settings.thresholds)); - initial_framedrop_ = 0; - } - } else { - resource_adaptation_module_->UpdateQualityScalerSettings(absl::nullopt); - initial_framedrop_ = kMaxInitialFramedrop; - } - - QualityScaler* quality_scaler = resource_adaptation_module_->quality_scaler(); - if (resource_adaptation_module_->degradation_preference() == - DegradationPreference::BALANCED && - quality_scaler && last_frame_info_) { - absl::optional thresholds = - resource_adaptation_module_->GetQpThresholds(); - if (thresholds) { - quality_scaler->SetQpThresholds(*thresholds); - } - } - - encoder_stats_observer_->OnAdaptationChanged( - VideoStreamEncoderObserver::AdaptationReason::kNone, - resource_adaptation_module_->GetActiveCounts( - AdaptationObserverInterface::AdaptReason::kCpu), - resource_adaptation_module_->GetActiveCounts( - AdaptationObserverInterface::AdaptReason::kQuality)); +void VideoStreamEncoder::OnEncoderSettingsChanged() { + EncoderSettings encoder_settings(encoder_->GetEncoderInfo(), + encoder_config_.Copy(), send_codec_); + stream_resource_manager_.SetEncoderSettings(encoder_settings); + input_state_provider_.OnEncoderSettingsChanged(encoder_settings); + bool is_screenshare = encoder_settings.encoder_config().content_type == + VideoEncoderConfig::ContentType::kScreen; + degradation_preference_manager_->SetIsScreenshare(is_screenshare); } void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { @@ -829,14 +986,14 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { VideoFrame incoming_frame = video_frame; // Local time in webrtc time base. - int64_t current_time_us = clock_->TimeInMicroseconds(); - int64_t current_time_ms = current_time_us / rtc::kNumMicrosecsPerMillisec; + Timestamp now = clock_->CurrentTime(); + // In some cases, e.g., when the frame from decoder is fed to encoder, // the timestamp may be set to the future. As the encoding pipeline assumes // capture time to be less than present time, we should reset the capture // timestamps here. Otherwise there may be issues with RTP send stream. - if (incoming_frame.timestamp_us() > current_time_us) - incoming_frame.set_timestamp_us(current_time_us); + if (incoming_frame.timestamp_us() > now.us()) + incoming_frame.set_timestamp_us(now.us()); // Capture time may come from clock with an offset and drift from clock_. int64_t capture_ntp_time_ms; @@ -845,7 +1002,7 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { } else if (video_frame.render_time_ms() != 0) { capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; } else { - capture_ntp_time_ms = current_time_ms + delta_ntp_internal_ms_; + capture_ntp_time_ms = now.ms() + delta_ntp_internal_ms_; } incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); @@ -869,14 +1026,14 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { } bool log_stats = false; - if (current_time_ms - last_frame_log_ms_ > kFrameLogIntervalMs) { - last_frame_log_ms_ = current_time_ms; + if (now.ms() - last_frame_log_ms_ > kFrameLogIntervalMs) { + last_frame_log_ms_ = now.ms(); log_stats = true; } last_captured_timestamp_ = incoming_frame.ntp_time_ms(); - int64_t post_time_us = rtc::TimeMicros(); + int64_t post_time_us = clock_->CurrentTime().us(); ++posted_frames_waiting_for_encode_; encoder_queue_.PostTask( @@ -889,26 +1046,40 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { posted_frames_waiting_for_encode_.fetch_sub(1); RTC_DCHECK_GT(posted_frames_waiting_for_encode, 0); CheckForAnimatedContent(incoming_frame, post_time_us); - if (posted_frames_waiting_for_encode == 1) { + bool cwnd_frame_drop = + cwnd_frame_drop_interval_ && + (cwnd_frame_counter_++ % cwnd_frame_drop_interval_.value() == 0); + if (posted_frames_waiting_for_encode == 1 && !cwnd_frame_drop) { MaybeEncodeVideoFrame(incoming_frame, post_time_us); } else { - // There is a newer frame in flight. Do not encode this frame. - RTC_LOG(LS_VERBOSE) - << "Incoming frame dropped due to that the encoder is blocked."; - ++dropped_frame_count_; - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kEncoderQueue); + if (cwnd_frame_drop) { + // Frame drop by congestion window pusback. Do not encode this + // frame. + ++dropped_frame_cwnd_pushback_count_; + encoder_stats_observer_->OnFrameDropped( + VideoStreamEncoderObserver::DropReason::kCongestionWindow); + } else { + // There is a newer frame in flight. Do not encode this frame. + RTC_LOG(LS_VERBOSE) + << "Incoming frame dropped due to that the encoder is blocked."; + ++dropped_frame_encoder_block_count_; + encoder_stats_observer_->OnFrameDropped( + VideoStreamEncoderObserver::DropReason::kEncoderQueue); + } accumulated_update_rect_.Union(incoming_frame.update_rect()); accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect(); } if (log_stats) { RTC_LOG(LS_INFO) << "Number of frames: captured " << captured_frame_count_ + << ", dropped (due to congestion window pushback) " + << dropped_frame_cwnd_pushback_count_ << ", dropped (due to encoder blocked) " - << dropped_frame_count_ << ", interval_ms " - << kFrameLogIntervalMs; + << dropped_frame_encoder_block_count_ + << ", interval_ms " << kFrameLogIntervalMs; captured_frame_count_ = 0; - dropped_frame_count_ = 0; + dropped_frame_cwnd_pushback_count_ = 0; + dropped_frame_encoder_block_count_ = 0; } }); } @@ -947,7 +1118,7 @@ void VideoStreamEncoder::TraceFrameDropEnd() { } VideoStreamEncoder::EncoderRateSettings -VideoStreamEncoder::UpdateBitrateAllocationAndNotifyObserver( +VideoStreamEncoder::UpdateBitrateAllocation( const EncoderRateSettings& rate_settings) { VideoBitrateAllocation new_allocation; // Only call allocators if bitrate > 0 (ie, not suspended), otherwise they @@ -958,32 +1129,17 @@ VideoStreamEncoder::UpdateBitrateAllocationAndNotifyObserver( rate_settings.rate_control.framerate_fps)); } - if (bitrate_observer_ && new_allocation.get_sum_bps() > 0) { - if (encoder_ && encoder_initialized_) { - // Avoid too old encoder_info_. - const int64_t kMaxDiffMs = 100; - const bool updated_recently = - (last_encode_info_ms_ && ((clock_->TimeInMilliseconds() - - *last_encode_info_ms_) < kMaxDiffMs)); - // Update allocation according to info from encoder. - bitrate_observer_->OnBitrateAllocationUpdated( - UpdateAllocationFromEncoderInfo( - new_allocation, - updated_recently ? encoder_info_ : encoder_->GetEncoderInfo())); - } else { - bitrate_observer_->OnBitrateAllocationUpdated(new_allocation); - } - } - EncoderRateSettings new_rate_settings = rate_settings; + new_rate_settings.rate_control.target_bitrate = new_allocation; new_rate_settings.rate_control.bitrate = new_allocation; // VideoBitrateAllocator subclasses may allocate a bitrate higher than the // target in order to sustain the min bitrate of the video codec. In this // case, make sure the bandwidth allocation is at least equal the allocation // as that is part of the document contract for that field. - new_rate_settings.rate_control.bandwidth_allocation = std::max( - new_rate_settings.rate_control.bandwidth_allocation, - DataRate::bps(new_rate_settings.rate_control.bitrate.get_sum_bps())); + new_rate_settings.rate_control.bandwidth_allocation = + std::max(new_rate_settings.rate_control.bandwidth_allocation, + DataRate::BitsPerSec( + new_rate_settings.rate_control.bitrate.get_sum_bps())); if (bitrate_adjuster_) { VideoBitrateAllocation adjusted_allocation = @@ -995,9 +1151,6 @@ VideoStreamEncoder::UpdateBitrateAllocationAndNotifyObserver( new_rate_settings.rate_control.bitrate = adjusted_allocation; } - encoder_stats_observer_->OnBitrateAllocationUpdated( - send_codec_, new_rate_settings.rate_control.bitrate); - return new_rate_settings; } @@ -1017,6 +1170,15 @@ void VideoStreamEncoder::SetEncoderRates( bool rate_control_changed = (!last_encoder_rate_settings_.has_value() || last_encoder_rate_settings_->rate_control != rate_settings.rate_control); + // For layer allocation signal we care only about the target bitrate (not the + // adjusted one) and the target fps. + bool layer_allocation_changed = + !last_encoder_rate_settings_.has_value() || + last_encoder_rate_settings_->rate_control.target_bitrate != + rate_settings.rate_control.target_bitrate || + last_encoder_rate_settings_->rate_control.framerate_fps != + rate_settings.rate_control.framerate_fps; + if (last_encoder_rate_settings_ != rate_settings) { last_encoder_rate_settings_ = rate_settings; } @@ -1040,16 +1202,42 @@ void VideoStreamEncoder::SetEncoderRates( if (rate_control_changed) { encoder_->SetRates(rate_settings.rate_control); + + encoder_stats_observer_->OnBitrateAllocationUpdated( + send_codec_, rate_settings.rate_control.bitrate); frame_encode_metadata_writer_.OnSetRates( rate_settings.rate_control.bitrate, static_cast(rate_settings.rate_control.framerate_fps + 0.5)); + stream_resource_manager_.SetEncoderRates(rate_settings.rate_control); + if (layer_allocation_changed && + settings_.allocation_cb_type == + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoLayersAllocation) { + sink_->OnVideoLayersAllocationUpdated(CreateVideoLayersAllocation( + send_codec_, rate_settings.rate_control, encoder_->GetEncoderInfo())); + } + } + if ((settings_.allocation_cb_type == + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation) || + (encoder_config_.content_type == + VideoEncoderConfig::ContentType::kScreen && + settings_.allocation_cb_type == + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing)) { + sink_->OnBitrateAllocationUpdated( + // Update allocation according to info from encoder. An encoder may + // choose to not use all layers due to for example HW. + UpdateAllocationFromEncoderInfo( + rate_settings.rate_control.target_bitrate, + encoder_->GetEncoderInfo())); } } void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, int64_t time_when_posted_us) { RTC_DCHECK_RUN_ON(&encoder_queue_); - resource_adaptation_module_->OnFrame(video_frame); + input_state_provider_.OnFrameSizeObserved(video_frame.size()); if (!last_frame_info_ || video_frame.width() != last_frame_info_->width || video_frame.height() != last_frame_info_->height || @@ -1094,8 +1282,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, EncoderRateSettings new_rate_settings = *last_encoder_rate_settings_; new_rate_settings.rate_control.framerate_fps = static_cast(framerate_fps); - SetEncoderRates( - UpdateBitrateAllocationAndNotifyObserver(new_rate_settings)); + SetEncoderRates(UpdateBitrateAllocation(new_rate_settings)); } last_parameters_update_ms_.emplace(now_ms); } @@ -1111,8 +1298,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, if (DropDueToSize(video_frame.size())) { RTC_LOG(LS_INFO) << "Dropping frame. Too large for target bitrate."; - resource_adaptation_module_->OnFrameDroppedDueToSize(); - ++initial_framedrop_; + stream_resource_manager_.OnFrameDroppedDueToSize(); // Storing references to a native buffer risks blocking frame capture. if (video_frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) { @@ -1126,17 +1312,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, } return; } - initial_framedrop_ = kMaxInitialFramedrop; - - if (!quality_rampup_done_ && TryQualityRampup(now_ms) && - resource_adaptation_module_->GetConstAdaptCounter().ResolutionCount( - AdaptationObserverInterface::AdaptReason::kQuality) > 0 && - resource_adaptation_module_->GetConstAdaptCounter().TotalCount( - AdaptationObserverInterface::AdaptReason::kCpu) == 0) { - RTC_LOG(LS_INFO) << "Reset quality limitations."; - resource_adaptation_module_->ResetVideoSourceRestrictions(); - quality_rampup_done_ = true; - } + stream_resource_manager_.OnMaybeEncodeFrame(); if (EncoderPaused()) { // Storing references to a native buffer risks blocking frame capture. @@ -1207,8 +1383,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, } if (encoder_info_ != info) { - resource_adaptation_module_->SetEncoderSettings(EncoderSettings( - encoder_->GetEncoderInfo(), encoder_config_.Copy(), send_codec_)); + OnEncoderSettingsChanged(); RTC_LOG(LS_INFO) << "Encoder settings changed from " << encoder_info_.ToString() << " to " << info.ToString(); } @@ -1225,20 +1400,23 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, last_encode_info_ms_ = clock_->TimeInMilliseconds(); VideoFrame out_frame(video_frame); - - const VideoFrameBuffer::Type buffer_type = - out_frame.video_frame_buffer()->type(); - const bool is_buffer_type_supported = - buffer_type == VideoFrameBuffer::Type::kI420 || - (buffer_type == VideoFrameBuffer::Type::kNative && - info.supports_native_handle); - - if (!is_buffer_type_supported) { + if (out_frame.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNative && + !info.supports_native_handle) { // This module only supports software encoding. - rtc::scoped_refptr converted_buffer( - out_frame.video_frame_buffer()->ToI420()); - - if (!converted_buffer) { + rtc::scoped_refptr buffer = + out_frame.video_frame_buffer()->GetMappedFrameBuffer( + info.preferred_pixel_formats); + bool buffer_was_converted = false; + if (!buffer) { + buffer = out_frame.video_frame_buffer()->ToI420(); + // TODO(https://crbug.com/webrtc/12021): Once GetI420 is pure virtual, + // this just true as an I420 buffer would return from + // GetMappedFrameBuffer. + buffer_was_converted = + (out_frame.video_frame_buffer()->GetI420() == nullptr); + } + if (!buffer) { RTC_LOG(LS_ERROR) << "Frame conversion failed, dropping frame."; return; } @@ -1252,8 +1430,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, update_rect = VideoFrame::UpdateRect{0, 0, out_frame.width(), out_frame.height()}; } - - out_frame.set_video_frame_buffer(converted_buffer); + out_frame.set_video_frame_buffer(buffer); out_frame.set_update_rect(update_rect); } @@ -1262,29 +1439,24 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, out_frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) { // If the frame can't be converted to I420, drop it. - auto i420_buffer = video_frame.video_frame_buffer()->ToI420(); - if (!i420_buffer) { - RTC_LOG(LS_ERROR) << "Frame conversion for crop failed, dropping frame."; - return; - } int cropped_width = video_frame.width() - crop_width_; int cropped_height = video_frame.height() - crop_height_; - rtc::scoped_refptr cropped_buffer = - I420Buffer::Create(cropped_width, cropped_height); + rtc::scoped_refptr cropped_buffer; // TODO(ilnik): Remove scaling if cropping is too big, as it should never // happen after SinkWants signaled correctly from ReconfigureEncoder. VideoFrame::UpdateRect update_rect = video_frame.update_rect(); if (crop_width_ < 4 && crop_height_ < 4) { - cropped_buffer->CropAndScaleFrom(*i420_buffer, crop_width_ / 2, - crop_height_ / 2, cropped_width, - cropped_height); + cropped_buffer = video_frame.video_frame_buffer()->CropAndScale( + crop_width_ / 2, crop_height_ / 2, cropped_width, cropped_height, + cropped_width, cropped_height); update_rect.offset_x -= crop_width_ / 2; update_rect.offset_y -= crop_height_ / 2; update_rect.Intersect( VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height}); } else { - cropped_buffer->ScaleFrom(*i420_buffer); + cropped_buffer = video_frame.video_frame_buffer()->Scale(cropped_width, + cropped_height); if (!update_rect.IsEmpty()) { // Since we can't reason about pixels after scaling, we invalidate whole // picture, if anything changed. @@ -1292,6 +1464,11 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height}; } } + if (!cropped_buffer) { + RTC_LOG(LS_ERROR) << "Cropping and scaling frame failed, dropping frame."; + return; + } + out_frame.set_video_frame_buffer(cropped_buffer); out_frame.set_update_rect(update_rect); out_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); @@ -1320,7 +1497,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(), "Encode"); - resource_adaptation_module_->OnEncodeStarted(out_frame, time_when_posted_us); + stream_resource_manager_.OnEncodeStarted(out_frame, time_when_posted_us); RTC_DCHECK_LE(send_codec_.width, out_frame.width()); RTC_DCHECK_LE(send_codec_.height, out_frame.height()); @@ -1343,9 +1520,19 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " << encoder_config_.video_format.ToString(); + if (settings_.encoder_switch_request_callback) { - encoder_failed_ = true; - settings_.encoder_switch_request_callback->RequestEncoderFallback(); + if (encoder_selector_) { + if (auto encoder = encoder_selector_->OnEncoderBroken()) { + QueueRequestEncoderSwitch(*encoder); + } + } else { + encoder_failed_ = true; + main_queue_->PostTask(ToQueuedTask(task_safety_, [this]() { + RTC_DCHECK_RUN_ON(main_queue_); + settings_.encoder_switch_request_callback->RequestEncoderFallback(); + })); + } } else { RTC_LOG(LS_ERROR) << "Encoder failed but no encoder fallback callback is registered"; @@ -1416,8 +1603,7 @@ void VideoStreamEncoder::OnLossNotification( EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const CodecSpecificInfo* codec_specific_info) { TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", "timestamp", encoded_image.Timestamp()); const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0); @@ -1425,9 +1611,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( frame_encode_metadata_writer_.FillTimingInfo(spatial_idx, &image_copy); - std::unique_ptr fragmentation_copy = - frame_encode_metadata_writer_.UpdateBitstream(codec_specific_info, - fragmentation, &image_copy); + frame_encode_metadata_writer_.UpdateBitstream(codec_specific_info, + &image_copy); // Piggyback ALR experiment group id and simulcast id into the content type. const uint8_t experiment_id = @@ -1445,6 +1630,37 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( RTC_CHECK(videocontenttypehelpers::SetSimulcastId( &image_copy.content_type_, static_cast(spatial_idx + 1))); + // Currently internal quality scaler is used for VP9 instead of webrtc qp + // scaler (in no-svc case or if only a single spatial layer is encoded). + // It has to be explicitly detected and reported to adaptation metrics. + // Post a task because |send_codec_| requires |encoder_queue_| lock. + unsigned int image_width = image_copy._encodedWidth; + unsigned int image_height = image_copy._encodedHeight; + VideoCodecType codec = codec_specific_info + ? codec_specific_info->codecType + : VideoCodecType::kVideoCodecGeneric; + encoder_queue_.PostTask([this, codec, image_width, image_height] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + if (codec == VideoCodecType::kVideoCodecVP9 && + send_codec_.VP9()->automaticResizeOn) { + unsigned int expected_width = send_codec_.width; + unsigned int expected_height = send_codec_.height; + int num_active_layers = 0; + for (int i = 0; i < send_codec_.VP9()->numberOfSpatialLayers; ++i) { + if (send_codec_.spatialLayers[i].active) { + ++num_active_layers; + expected_width = send_codec_.spatialLayers[i].width; + expected_height = send_codec_.spatialLayers[i].height; + } + } + RTC_DCHECK_LE(num_active_layers, 1) + << "VP9 quality scaling is enabled for " + "SVC with several active layers."; + encoder_stats_observer_->OnEncoderInternalScalerUpdate( + image_width < expected_width || image_height < expected_height); + } + }); + // Encoded is called on whatever thread the real encoder implementation run // on. In the case of hardware encoders, there might be several encoders // running in parallel on different threads. @@ -1462,54 +1678,13 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( simulcast_id = encoded_image.SpatialIndex().value_or(0); } - std::unique_ptr codec_info_copy; - { - rtc::CritScope cs(&encoded_image_lock_); - - if (codec_specific_info && codec_specific_info->generic_frame_info) { - codec_info_copy = - std::make_unique(*codec_specific_info); - GenericFrameInfo& generic_info = *codec_info_copy->generic_frame_info; - generic_info.frame_id = next_frame_id_++; - - if (encoder_buffer_state_.size() <= static_cast(simulcast_id)) { - RTC_LOG(LS_ERROR) << "At most " << encoder_buffer_state_.size() - << " simulcast streams supported."; - } else { - std::array& state = - encoder_buffer_state_[simulcast_id]; - for (const CodecBufferUsage& buffer : generic_info.encoder_buffers) { - if (state.size() <= static_cast(buffer.id)) { - RTC_LOG(LS_ERROR) - << "At most " << state.size() << " encoder buffers supported."; - break; - } - - if (buffer.referenced) { - int64_t diff = generic_info.frame_id - state[buffer.id]; - if (diff <= 0) { - RTC_LOG(LS_ERROR) << "Invalid frame diff: " << diff << "."; - } else if (absl::c_find(generic_info.frame_diffs, diff) == - generic_info.frame_diffs.end()) { - generic_info.frame_diffs.push_back(diff); - } - } - - if (buffer.updated) - state[buffer.id] = generic_info.frame_id; - } - } - } - } - - EncodedImageCallback::Result result = sink_->OnEncodedImage( - image_copy, codec_info_copy ? codec_info_copy.get() : codec_specific_info, - fragmentation_copy ? fragmentation_copy.get() : fragmentation); + EncodedImageCallback::Result result = + sink_->OnEncodedImage(image_copy, codec_specific_info); // We are only interested in propagating the meta-data about the image, not // encoded data itself, to the post encode function. Since we cannot be sure // the pointer will still be valid when run on the task queue, set it to null. - DataSize frame_size = DataSize::bytes(image_copy.size()); + DataSize frame_size = DataSize::Bytes(image_copy.size()); image_copy.ClearEncodedData(); int temporal_index = 0; @@ -1524,7 +1699,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( temporal_index = 0; } - RunPostEncode(image_copy, rtc::TimeMicros(), temporal_index, frame_size); + RunPostEncode(image_copy, clock_->CurrentTime().us(), temporal_index, + frame_size); if (result.error == Result::OK) { // In case of an internal encoder running on a separate thread, the @@ -1547,55 +1723,87 @@ void VideoStreamEncoder::OnDroppedFrame(DropReason reason) { case DropReason::kDroppedByMediaOptimizations: encoder_stats_observer_->OnFrameDropped( VideoStreamEncoderObserver::DropReason::kMediaOptimization); - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - QualityScaler* quality_scaler = - resource_adaptation_module_->quality_scaler(); - if (quality_scaler) - quality_scaler->ReportDroppedFrameByMediaOpt(); - }); break; case DropReason::kDroppedByEncoder: encoder_stats_observer_->OnFrameDropped( VideoStreamEncoderObserver::DropReason::kEncoder); - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - QualityScaler* quality_scaler = - resource_adaptation_module_->quality_scaler(); - if (quality_scaler) - quality_scaler->ReportDroppedFrameByEncoder(); - }); break; } sink_->OnDroppedFrame(reason); + encoder_queue_.PostTask([this, reason] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + stream_resource_manager_.OnFrameDropped(reason); + }); +} + +DataRate VideoStreamEncoder::UpdateTargetBitrate(DataRate target_bitrate, + double cwnd_reduce_ratio) { + RTC_DCHECK_RUN_ON(&encoder_queue_); + DataRate updated_target_bitrate = target_bitrate; + + // Drop frames when congestion window pushback ratio is larger than 1 + // percent and target bitrate is larger than codec min bitrate. + // When target_bitrate is 0 means codec is paused, skip frame dropping. + if (cwnd_reduce_ratio > 0.01 && target_bitrate.bps() > 0 && + target_bitrate.bps() > send_codec_.minBitrate * 1000) { + int reduce_bitrate_bps = std::min( + static_cast(target_bitrate.bps() * cwnd_reduce_ratio), + static_cast(target_bitrate.bps() - send_codec_.minBitrate * 1000)); + if (reduce_bitrate_bps > 0) { + // At maximum the congestion window can drop 1/2 frames. + cwnd_frame_drop_interval_ = std::max( + 2, static_cast(target_bitrate.bps() / reduce_bitrate_bps)); + // Reduce target bitrate accordingly. + updated_target_bitrate = + target_bitrate - (target_bitrate / cwnd_frame_drop_interval_.value()); + return updated_target_bitrate; + } + } + cwnd_frame_drop_interval_.reset(); + return updated_target_bitrate; } void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, DataRate stable_target_bitrate, DataRate link_allocation, uint8_t fraction_lost, - int64_t round_trip_time_ms) { + int64_t round_trip_time_ms, + double cwnd_reduce_ratio) { RTC_DCHECK_GE(link_allocation, target_bitrate); if (!encoder_queue_.IsCurrent()) { encoder_queue_.PostTask([this, target_bitrate, stable_target_bitrate, - link_allocation, fraction_lost, - round_trip_time_ms] { - OnBitrateUpdated(target_bitrate, stable_target_bitrate, link_allocation, - fraction_lost, round_trip_time_ms); + link_allocation, fraction_lost, round_trip_time_ms, + cwnd_reduce_ratio] { + DataRate updated_target_bitrate = + UpdateTargetBitrate(target_bitrate, cwnd_reduce_ratio); + OnBitrateUpdated(updated_target_bitrate, stable_target_bitrate, + link_allocation, fraction_lost, round_trip_time_ms, + cwnd_reduce_ratio); }); return; } RTC_DCHECK_RUN_ON(&encoder_queue_); - if (encoder_switch_experiment_.IsBitrateBelowThreshold(target_bitrate) && - settings_.encoder_switch_request_callback && !encoder_switch_requested_) { - EncoderSwitchRequestCallback::Config conf; - conf.codec_name = encoder_switch_experiment_.to_codec; - conf.param = encoder_switch_experiment_.to_param; - conf.value = encoder_switch_experiment_.to_value; - settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf); + const bool video_is_suspended = target_bitrate == DataRate::Zero(); + const bool video_suspension_changed = video_is_suspended != EncoderPaused(); - encoder_switch_requested_ = true; + if (!video_is_suspended && settings_.encoder_switch_request_callback) { + if (encoder_selector_) { + if (auto encoder = + encoder_selector_->OnAvailableBitrate(link_allocation)) { + QueueRequestEncoderSwitch(*encoder); + } + } else if (encoder_switch_experiment_.IsBitrateBelowThreshold( + target_bitrate) && + !encoder_switch_requested_) { + EncoderSwitchRequestCallback::Config conf; + conf.codec_name = encoder_switch_experiment_.to_codec; + conf.param = encoder_switch_experiment_.to_param; + conf.value = encoder_switch_experiment_.to_value; + QueueRequestEncoderSwitch(conf); + + encoder_switch_requested_ = true; + } } RTC_DCHECK(sink_) << "sink_ must be set before the encoder is active."; @@ -1606,23 +1814,6 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, << " packet loss " << static_cast(fraction_lost) << " rtt " << round_trip_time_ms; - if (set_start_bitrate_bps_ > 0 && !has_seen_first_bwe_drop_ && - resource_adaptation_module_->quality_scaler() && - quality_scaler_settings_.InitialBitrateIntervalMs() && - quality_scaler_settings_.InitialBitrateFactor()) { - int64_t diff_ms = clock_->TimeInMilliseconds() - set_start_bitrate_time_ms_; - if (diff_ms < quality_scaler_settings_.InitialBitrateIntervalMs().value() && - (target_bitrate.bps() < - (set_start_bitrate_bps_ * - quality_scaler_settings_.InitialBitrateFactor().value()))) { - RTC_LOG(LS_INFO) << "Reset initial_framedrop_. Start bitrate: " - << set_start_bitrate_bps_ - << ", target bitrate: " << target_bitrate.bps(); - initial_framedrop_ = 0; - has_seen_first_bwe_drop_ = true; - } - } - if (encoder_) { encoder_->OnPacketLossRateUpdate(static_cast(fraction_lost) / 256.f); encoder_->OnRttUpdate(round_trip_time_ms); @@ -1630,18 +1821,16 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, uint32_t framerate_fps = GetInputFramerateFps(); frame_dropper_.SetRates((target_bitrate.bps() + 500) / 1000, framerate_fps); - const bool video_is_suspended = target_bitrate == DataRate::Zero(); - const bool video_suspension_changed = video_is_suspended != EncoderPaused(); EncoderRateSettings new_rate_settings{ VideoBitrateAllocation(), static_cast(framerate_fps), link_allocation, target_bitrate, stable_target_bitrate}; - SetEncoderRates(UpdateBitrateAllocationAndNotifyObserver(new_rate_settings)); + SetEncoderRates(UpdateBitrateAllocation(new_rate_settings)); if (target_bitrate.bps() != 0) encoder_target_bitrate_bps_ = target_bitrate.bps(); - resource_adaptation_module_->SetEncoderTargetBitrate( - encoder_target_bitrate_bps_); + + stream_resource_manager_.SetTargetBitrate(target_bitrate); if (video_suspension_changed) { RTC_LOG(LS_INFO) << "Video suspend state changed to: " @@ -1650,7 +1839,8 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, } if (video_suspension_changed && !video_is_suspended && pending_frame_ && !DropDueToSize(pending_frame_->size())) { - int64_t pending_time_us = rtc::TimeMicros() - pending_frame_post_time_us_; + int64_t pending_time_us = + clock_->CurrentTime().us() - pending_frame_post_time_us_; if (pending_time_us < kPendingFrameTimeoutMs * 1000) EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); pending_frame_.reset(); @@ -1658,13 +1848,28 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, } bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const { - if (initial_framedrop_ >= kMaxInitialFramedrop || + if (!stream_resource_manager_.DropInitialFrames() || !encoder_target_bitrate_bps_.has_value()) { return false; } + bool simulcast_or_svc = + (send_codec_.codecType == VideoCodecType::kVideoCodecVP9 && + send_codec_.VP9().numberOfSpatialLayers > 1) || + (send_codec_.numberOfSimulcastStreams > 1 || + encoder_config_.simulcast_layers.size() > 1); + + if (simulcast_or_svc) { + if (stream_resource_manager_.SingleActiveStreamPixels()) { + pixel_count = stream_resource_manager_.SingleActiveStreamPixels().value(); + } else { + return false; + } + } + absl::optional encoder_bitrate_limits = - GetEncoderBitrateLimits(encoder_->GetEncoderInfo(), pixel_count); + encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution( + pixel_count); if (encoder_bitrate_limits.has_value()) { // Use bitrate limits provided by encoder. @@ -1680,44 +1885,21 @@ bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const { return false; } -bool VideoStreamEncoder::TryQualityRampup(int64_t now_ms) { - QualityScaler* quality_scaler = resource_adaptation_module_->quality_scaler(); - if (!quality_scaler) - return false; - - uint32_t bw_kbps = last_encoder_rate_settings_ - ? last_encoder_rate_settings_->rate_control - .bandwidth_allocation.kbps() - : 0; - - if (quality_rampup_experiment_.BwHigh(now_ms, bw_kbps)) { - // Verify that encoder is at max bitrate and the QP is low. - if (encoder_target_bitrate_bps_.value_or(0) == - send_codec_.maxBitrate * 1000 && - quality_scaler->QpFastFilterLow()) { - return true; - } - } - return false; -} - -bool VideoStreamEncoder::TriggerAdaptDown( - AdaptationObserverInterface::AdaptReason reason) { - RTC_DCHECK_RUN_ON(&encoder_queue_); - return resource_adaptation_module_->AdaptDown(reason); -} - -void VideoStreamEncoder::TriggerAdaptUp( - AdaptationObserverInterface::AdaptReason reason) { - RTC_DCHECK_RUN_ON(&encoder_queue_); - resource_adaptation_module_->AdaptUp(reason); -} - void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated( - VideoSourceRestrictions restrictions) { + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) { RTC_DCHECK_RUN_ON(&encoder_queue_); - video_source_sink_controller_->SetRestrictions(std::move(restrictions)); - video_source_sink_controller_->PushSourceSinkSettings(); + RTC_LOG(INFO) << "Updating sink restrictions from " + << (reason ? reason->Name() : std::string("")) << " to " + << restrictions.ToString(); + main_queue_->PostTask(ToQueuedTask( + task_safety_, [this, restrictions = std::move(restrictions)]() { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetRestrictions(std::move(restrictions)); + video_source_sink_controller_.PushSourceSinkSettings(); + })); } void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, @@ -1738,9 +1920,9 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) { encode_duration_us = // TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_? - rtc::kNumMicrosecsPerMillisec * - (encoded_image.timing_.encode_finish_ms - - encoded_image.timing_.encode_start_ms); + TimeDelta::Millis(encoded_image.timing_.encode_finish_ms - + encoded_image.timing_.encode_start_ms) + .us(); } // Run post encode tasks, such as overuse detection and frame rate/drop @@ -1762,13 +1944,8 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, } } - resource_adaptation_module_->OnEncodeCompleted( - encoded_image.Timestamp(), time_sent_us, - encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec, - encode_duration_us); - QualityScaler* quality_scaler = resource_adaptation_module_->quality_scaler(); - if (quality_scaler && encoded_image.qp_ >= 0) - quality_scaler->ReportQp(encoded_image.qp_, time_sent_us); + stream_resource_manager_.OnEncodeCompleted(encoded_image, time_sent_us, + encode_duration_us); if (bitrate_adjuster_) { bitrate_adjuster_->OnEncodedFrame(encoded_image, temporal_index); } @@ -1791,8 +1968,8 @@ void VideoStreamEncoder::ReleaseEncoder() { bool VideoStreamEncoder::EncoderSwitchExperiment::IsBitrateBelowThreshold( const DataRate& target_bitrate) { - DataRate rate = - DataRate::kbps(bitrate_filter.Apply(1.0, target_bitrate.kbps())); + DataRate rate = DataRate::KilobitsPerSec( + bitrate_filter.Apply(1.0, target_bitrate.kbps())); return current_thresholds.bitrate && rate < *current_thresholds.bitrate; } @@ -1858,7 +2035,8 @@ VideoStreamEncoder::ParseEncoderSwitchFieldTrial() const { rtc::FromString(thresholds_split[2], &pixel_count); if (bitrate_kbps > 0) { - result.codec_thresholds[codec].bitrate = DataRate::kbps(bitrate_kbps); + result.codec_thresholds[codec].bitrate = + DataRate::KilobitsPerSec(bitrate_kbps); } if (pixel_count > 0) { @@ -1922,7 +2100,7 @@ void VideoStreamEncoder::CheckForAnimatedContent( if (!automatic_animation_detection_experiment_.enabled || encoder_config_.content_type != VideoEncoderConfig::ContentType::kScreen || - resource_adaptation_module_->degradation_preference() != + stream_resource_manager_.degradation_preference() != DegradationPreference::BALANCED) { return; } @@ -1954,10 +2132,10 @@ void VideoStreamEncoder::CheckForAnimatedContent( } else if ((!last_update_rect_ || frame.update_rect() != *last_update_rect_)) { last_update_rect_ = frame.update_rect(); - animation_start_time_ = Timestamp::us(time_when_posted_in_us); + animation_start_time_ = Timestamp::Micros(time_when_posted_in_us); } else { TimeDelta animation_duration = - Timestamp::us(time_when_posted_in_us) - animation_start_time_; + Timestamp::Micros(time_when_posted_in_us) - animation_start_time_; float area_ratio = static_cast(last_update_rect_->width * last_update_rect_->height) / (frame.width() * frame.height()); @@ -1980,11 +2158,87 @@ void VideoStreamEncoder::CheckForAnimatedContent( RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent " "animation detection."; } - video_source_sink_controller_->SetPixelsPerFrameUpperLimit( - should_cap_resolution ? absl::optional(kMaxAnimationPixels) - : absl::nullopt); - video_source_sink_controller_->PushSourceSinkSettings(); + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, + should_cap_resolution]() { + RTC_DCHECK_RUN_ON(main_queue_); + video_source_sink_controller_.SetPixelsPerFrameUpperLimit( + should_cap_resolution ? absl::optional(kMaxAnimationPixels) + : absl::nullopt); + video_source_sink_controller_.PushSourceSinkSettings(); + })); } } +// RTC_RUN_ON(&encoder_queue_) +void VideoStreamEncoder::QueueRequestEncoderSwitch( + const EncoderSwitchRequestCallback::Config& conf) { + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, conf]() { + RTC_DCHECK_RUN_ON(main_queue_); + settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf); + })); +} + +// RTC_RUN_ON(&encoder_queue_) +void VideoStreamEncoder::QueueRequestEncoderSwitch( + const webrtc::SdpVideoFormat& format) { + main_queue_->PostTask(ToQueuedTask(task_safety_, [this, format]() { + RTC_DCHECK_RUN_ON(main_queue_); + settings_.encoder_switch_request_callback->RequestEncoderSwitch(format); + })); +} + +void VideoStreamEncoder::InjectAdaptationResource( + rtc::scoped_refptr resource, + VideoAdaptationReason reason) { + rtc::Event map_resource_event; + encoder_queue_.PostTask([this, resource, reason, &map_resource_event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + additional_resources_.push_back(resource); + stream_resource_manager_.AddResource(resource, reason); + map_resource_event.Set(); + }); + map_resource_event.Wait(rtc::Event::kForever); +} + +void VideoStreamEncoder::InjectAdaptationConstraint( + AdaptationConstraint* adaptation_constraint) { + rtc::Event event; + encoder_queue_.PostTask([this, adaptation_constraint, &event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + if (!resource_adaptation_processor_) { + // The VideoStreamEncoder was stopped and the processor destroyed before + // this task had a chance to execute. No action needed. + return; + } + adaptation_constraints_.push_back(adaptation_constraint); + video_stream_adapter_->AddAdaptationConstraint(adaptation_constraint); + event.Set(); + }); + event.Wait(rtc::Event::kForever); +} + +void VideoStreamEncoder::AddRestrictionsListenerForTesting( + VideoSourceRestrictionsListener* restrictions_listener) { + rtc::Event event; + encoder_queue_.PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK(resource_adaptation_processor_); + video_stream_adapter_->AddRestrictionsListener(restrictions_listener); + event.Set(); + }); + event.Wait(rtc::Event::kForever); +} + +void VideoStreamEncoder::RemoveRestrictionsListenerForTesting( + VideoSourceRestrictionsListener* restrictions_listener) { + rtc::Event event; + encoder_queue_.PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK(resource_adaptation_processor_); + video_stream_adapter_->RemoveRestrictionsListener(restrictions_listener); + event.Set(); + }); + event.Wait(rtc::Event::kForever); +} + } // namespace webrtc diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h index 97a4a66a05..7dfc990846 100644 --- a/video/video_stream_encoder.h +++ b/video/video_stream_encoder.h @@ -17,6 +17,7 @@ #include #include +#include "api/adaptation/resource.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_rotation.h" @@ -26,32 +27,28 @@ #include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "call/adaptation/resource_adaptation_module_interface.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/resource_adaptation_processor.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" #include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/utility/frame_dropper.h" -#include "modules/video_coding/utility/quality_scaler.h" -#include "rtc_base/critical_section.h" -#include "rtc_base/event.h" -#include "rtc_base/experiments/quality_rampup_experiment.h" -#include "rtc_base/experiments/quality_scaler_settings.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" +#include "video/adaptation/video_stream_encoder_resource_manager.h" #include "video/encoder_bitrate_adjuster.h" #include "video/frame_encode_metadata_writer.h" -#include "video/overuse_frame_detector_resource_adaptation_module.h" #include "video/video_source_sink_controller.h" namespace webrtc { -absl::optional GetEncoderBitrateLimits( - const VideoEncoder::EncoderInfo& encoder_info, - int frame_size_pixels); - // VideoStreamEncoder represent a video encoder that accepts raw video frames as // input and produces an encoded bit stream. // Usage: @@ -62,16 +59,8 @@ absl::optional GetEncoderBitrateLimits( // Call Stop() when done. class VideoStreamEncoder : public VideoStreamEncoderInterface, private EncodedImageCallback, - public ResourceAdaptationModuleListener { + public VideoSourceRestrictionsListener { public: - // If the encoder is reconfigured with a source, but we've yet to receive any - // frames, this 144p resolution is picked as the default value of - // |last_frame_size_|. - // TODO(hbos): Can we avoid guesses and properly handle the case of - // |last_frame_info_| not having a value, deleting these constants? - static const int kDefaultLastFrameInfoWidth; - static const int kDefaultLastFrameInfoHeight; - VideoStreamEncoder(Clock* clock, uint32_t number_of_cores, VideoStreamEncoderObserver* encoder_stats_observer, @@ -80,6 +69,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, TaskQueueFactory* task_queue_factory); ~VideoStreamEncoder() override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + std::vector> GetAdaptationResources() override; + void SetSource(rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) override; @@ -88,9 +80,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // TODO(perkj): Can we remove VideoCodec.startBitrate ? void SetStartBitrate(int start_bitrate_bps) override; - void SetBitrateAllocationObserver( - VideoBitrateAllocationObserver* bitrate_observer) override; - void SetFecControllerOverride( FecControllerOverride* fec_controller_override) override; @@ -110,22 +99,33 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, DataRate stable_target_bitrate, DataRate target_headroom, uint8_t fraction_lost, - int64_t round_trip_time_ms) override; + int64_t round_trip_time_ms, + double cwnd_reduce_ratio) override; + + DataRate UpdateTargetBitrate(DataRate target_bitrate, + double cwnd_reduce_ratio); protected: // Used for testing. For example the |ScalingObserverInterface| methods must // be called on |encoder_queue_|. rtc::TaskQueue* encoder_queue() { return &encoder_queue_; } - // These methods are protected for easier testing. - // TODO(hbos): When "DropDueToSize" no longer causes TriggerAdaptDown(), these - // methods are only used for testing and can be removed in favor of the test - // invoking AdaptUp() or AdaptDown() on a test-injected adaptation module. - void TriggerAdaptUp(AdaptationObserverInterface::AdaptReason reason); - bool TriggerAdaptDown(AdaptationObserverInterface::AdaptReason reason); - void OnVideoSourceRestrictionsUpdated( - VideoSourceRestrictions restrictions) override; + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override; + + // Used for injected test resources. + // TODO(eshr): Move all adaptation tests out of VideoStreamEncoder tests. + void InjectAdaptationResource(rtc::scoped_refptr resource, + VideoAdaptationReason reason); + void InjectAdaptationConstraint(AdaptationConstraint* adaptation_constraint); + + void AddRestrictionsListenerForTesting( + VideoSourceRestrictionsListener* restrictions_listener); + void RemoveRestrictionsListenerForTesting( + VideoSourceRestrictionsListener* restrictions_listener); private: class VideoFrameInfo { @@ -159,9 +159,10 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, DataRate stable_encoder_target; }; - void ReconfigureEncoder() RTC_RUN_ON(&encoder_queue_); + class DegradationPreferenceManager; - void ConfigureQualityScaler(const VideoEncoder::EncoderInfo& encoder_info); + void ReconfigureEncoder() RTC_RUN_ON(&encoder_queue_); + void OnEncoderSettingsChanged() RTC_RUN_ON(&encoder_queue_); // Implements VideoSinkInterface. void OnFrame(const VideoFrame& video_frame) override; @@ -175,13 +176,11 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Indicates wether frame should be dropped because the pixel count is too // large for the current bitrate configuration. bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(&encoder_queue_); - bool TryQualityRampup(int64_t now_ms) RTC_RUN_ON(&encoder_queue_); // Implements EncodedImageCallback. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; + const CodecSpecificInfo* codec_specific_info) override; void OnDroppedFrame(EncodedImageCallback::DropReason reason) override; @@ -190,9 +189,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void TraceFrameDropEnd(); // Returns a copy of |rate_settings| with the |bitrate| field updated using - // the current VideoBitrateAllocator, and notifies any listeners of the new - // allocation. - EncoderRateSettings UpdateBitrateAllocationAndNotifyObserver( + // the current VideoBitrateAllocator. + EncoderRateSettings UpdateBitrateAllocation( const EncoderRateSettings& rate_settings) RTC_RUN_ON(&encoder_queue_); uint32_t GetInputFramerateFps() RTC_RUN_ON(&encoder_queue_); @@ -205,31 +203,33 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, DataSize frame_size); bool HasInternalSource() const RTC_RUN_ON(&encoder_queue_); void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_); + // After calling this function |resource_adaptation_processor_| will be null. + void ShutdownResourceAdaptationQueue(); void CheckForAnimatedContent(const VideoFrame& frame, int64_t time_when_posted_in_ms) RTC_RUN_ON(&encoder_queue_); - rtc::Event shutdown_event_; + // TODO(bugs.webrtc.org/11341) : Remove this version of RequestEncoderSwitch. + void QueueRequestEncoderSwitch( + const EncoderSwitchRequestCallback::Config& conf) + RTC_RUN_ON(&encoder_queue_); + void QueueRequestEncoderSwitch(const webrtc::SdpVideoFormat& format) + RTC_RUN_ON(&encoder_queue_); + + TaskQueueBase* const main_queue_; const uint32_t number_of_cores_; - // Counts how many frames we've dropped in the initial framedrop phase. - int initial_framedrop_; - bool quality_rampup_done_ RTC_GUARDED_BY(&encoder_queue_); - QualityRampupExperiment quality_rampup_experiment_ - RTC_GUARDED_BY(&encoder_queue_); const bool quality_scaling_experiment_enabled_; EncoderSink* sink_; const VideoStreamEncoderSettings settings_; const RateControlSettings rate_control_settings_; - const QualityScalerSettings quality_scaler_settings_; + std::unique_ptr const + encoder_selector_; VideoStreamEncoderObserver* const encoder_stats_observer_; - // |thread_checker_| checks that public methods that are related to lifetime - // of VideoStreamEncoder are called on the same thread. - rtc::ThreadChecker thread_checker_; VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(&encoder_queue_); std::unique_ptr encoder_ RTC_GUARDED_BY(&encoder_queue_) @@ -252,9 +252,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int crop_height_ RTC_GUARDED_BY(&encoder_queue_); absl::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(&encoder_queue_); - int set_start_bitrate_bps_ RTC_GUARDED_BY(&encoder_queue_); - int64_t set_start_bitrate_time_ms_ RTC_GUARDED_BY(&encoder_queue_); - bool has_seen_first_bwe_drop_ RTC_GUARDED_BY(&encoder_queue_); size_t max_data_payload_length_ RTC_GUARDED_BY(&encoder_queue_); absl::optional last_encoder_rate_settings_ RTC_GUARDED_BY(&encoder_queue_); @@ -279,7 +276,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int64_t last_frame_log_ms_ RTC_GUARDED_BY(incoming_frame_race_checker_); int captured_frame_count_ RTC_GUARDED_BY(&encoder_queue_); - int dropped_frame_count_ RTC_GUARDED_BY(&encoder_queue_); + int dropped_frame_cwnd_pushback_count_ RTC_GUARDED_BY(&encoder_queue_); + int dropped_frame_encoder_block_count_ RTC_GUARDED_BY(&encoder_queue_); absl::optional pending_frame_ RTC_GUARDED_BY(&encoder_queue_); int64_t pending_frame_post_time_us_ RTC_GUARDED_BY(&encoder_queue_); @@ -300,8 +298,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, kFirstFrameAfterResize // Resize observed. } expect_resize_state_ RTC_GUARDED_BY(&encoder_queue_); - VideoBitrateAllocationObserver* bitrate_observer_ - RTC_GUARDED_BY(&encoder_queue_); FecControllerOverride* fec_controller_override_ RTC_GUARDED_BY(&encoder_queue_); absl::optional last_parameters_update_ms_ @@ -327,6 +323,12 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // the worker thread. std::atomic pending_frame_drops_; + // Congestion window frame drop ratio (drop 1 in every + // cwnd_frame_drop_interval_ frames). + absl::optional cwnd_frame_drop_interval_ RTC_GUARDED_BY(&encoder_queue_); + // Frame counter for congestion window frame drop. + int cwnd_frame_counter_ RTC_GUARDED_BY(&encoder_queue_); + std::unique_ptr bitrate_adjuster_ RTC_GUARDED_BY(&encoder_queue_); @@ -341,17 +343,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // experiment group numbers incremented by 1. const std::array experiment_groups_; - // TODO(philipel): Remove this lock and run on |encoder_queue_| instead. - rtc::CriticalSection encoded_image_lock_; - - int64_t next_frame_id_ RTC_GUARDED_BY(encoded_image_lock_); - - // This array is used as a map from simulcast id to an encoder's buffer - // state. For every buffer of the encoder we keep track of the last frame id - // that updated that buffer. - std::array, kMaxSimulcastStreams> - encoder_buffer_state_ RTC_GUARDED_BY(encoded_image_lock_); - struct EncoderSwitchExperiment { struct Thresholds { absl::optional bitrate; @@ -408,25 +399,45 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // track of whether a request has been made or not. bool encoder_switch_requested_ RTC_GUARDED_BY(&encoder_queue_); - // The controller updates the sink wants based on restrictions that come from - // the resource adaptation module or adaptation due to bandwidth adaptation. - // - // This is used on the encoder queue, with a few exceptions: - // - VideoStreamEncoder::SetSource() invokes SetSource(). - // - VideoStreamEncoder::SetSink() invokes SetRotationApplied() and - // PushSourceSinkSettings(). - // - VideoStreamEncoder::Stop() invokes SetSource(). - // TODO(hbos): If these can be moved to the encoder queue, - // VideoSourceSinkController can be made single-threaded, and its lock can be - // replaced with a sequence checker. - std::unique_ptr video_source_sink_controller_; - std::unique_ptr - resource_adaptation_module_ RTC_GUARDED_BY(&encoder_queue_); - - // All public methods are proxied to |encoder_queue_|. It must must be - // destroyed first to make sure no tasks are run that use other members. + // Provies video stream input states: current resolution and frame rate. + VideoStreamInputStateProvider input_state_provider_; + + std::unique_ptr video_stream_adapter_ + RTC_GUARDED_BY(&encoder_queue_); + // Responsible for adapting input resolution or frame rate to ensure resources + // (e.g. CPU or bandwidth) are not overused. Adding resources can occur on any + // thread. + std::unique_ptr + resource_adaptation_processor_; + std::unique_ptr degradation_preference_manager_ + RTC_GUARDED_BY(&encoder_queue_); + std::vector adaptation_constraints_ + RTC_GUARDED_BY(&encoder_queue_); + // Handles input, output and stats reporting related to VideoStreamEncoder + // specific resources, such as "encode usage percent" measurements and "QP + // scaling". Also involved with various mitigations such as inital frame + // dropping. + // The manager primarily operates on the |encoder_queue_| but its lifetime is + // tied to the VideoStreamEncoder (which is destroyed off the encoder queue) + // and its resource list is accessible from any thread. + VideoStreamEncoderResourceManager stream_resource_manager_ + RTC_GUARDED_BY(&encoder_queue_); + std::vector> additional_resources_ + RTC_GUARDED_BY(&encoder_queue_); + // Carries out the VideoSourceRestrictions provided by the + // ResourceAdaptationProcessor, i.e. reconfigures the source of video frames + // to provide us with different resolution or frame rate. + // This class is thread-safe. + VideoSourceSinkController video_source_sink_controller_ + RTC_GUARDED_BY(main_queue_); + + // Public methods are proxied to the task queues. The queues must be destroyed + // first to make sure no tasks run that use other members. rtc::TaskQueue encoder_queue_; + // Used to cancel any potentially pending tasks to the main thread. + ScopedTaskSafety task_safety_; + RTC_DISALLOW_COPY_AND_ASSIGN(VideoStreamEncoder); }; diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc index fcdac2e513..267169a2f2 100644 --- a/video/video_stream_encoder_unittest.cc +++ b/video/video_stream_encoder_unittest.cc @@ -18,39 +18,55 @@ #include "absl/memory/memory.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/test/mock_fec_controller_override.h" +#include "api/test/mock_video_encoder.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/i420_buffer.h" +#include "api/video/nv12_buffer.h" +#include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_temporal_layers.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" +#include "call/adaptation/test/fake_adaptation_constraint.h" +#include "call/adaptation/test/fake_resource.h" #include "common_video/h264/h264_common.h" #include "common_video/include/video_frame_buffer.h" #include "media/base/video_adapter.h" +#include "media/engine/webrtc_video_engine.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" -#include "modules/video_coding/utility/default_video_bitrate_allocator.h" +#include "modules/video_coding/utility/quality_scaler.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "rtc_base/fake_clock.h" +#include "rtc_base/event.h" +#include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" -#include "system_wrappers/include/sleep.h" #include "test/encoder_settings.h" #include "test/fake_encoder.h" #include "test/field_trial.h" #include "test/frame_forwarder.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/time_controller/simulated_time_controller.h" #include "test/video_encoder_proxy_factory.h" #include "video/send_statistics_proxy.h" namespace webrtc { -using ScaleReason = AdaptationObserverInterface::AdaptReason; using ::testing::_; using ::testing::AllOf; +using ::testing::AtLeast; +using ::testing::Eq; using ::testing::Field; +using ::testing::Ge; +using ::testing::Gt; +using ::testing::Le; +using ::testing::Lt; +using ::testing::Matcher; +using ::testing::NiceMock; +using ::testing::Return; using ::testing::StrictMock; namespace { @@ -116,62 +132,239 @@ class FakeNativeBuffer : public webrtc::VideoFrameBuffer { const int height_; }; +// A fake native buffer that is backed by an NV12 buffer. +class FakeNV12NativeBuffer : public webrtc::VideoFrameBuffer { + public: + FakeNV12NativeBuffer(rtc::Event* event, int width, int height) + : nv12_buffer_(NV12Buffer::Create(width, height)), event_(event) {} + + webrtc::VideoFrameBuffer::Type type() const override { return Type::kNative; } + int width() const override { return nv12_buffer_->width(); } + int height() const override { return nv12_buffer_->height(); } + rtc::scoped_refptr ToI420() override { + return nv12_buffer_->ToI420(); + } + rtc::scoped_refptr GetMappedFrameBuffer( + rtc::ArrayView types) override { + if (absl::c_find(types, Type::kNV12) != types.end()) { + return nv12_buffer_; + } + return nullptr; + } + const NV12BufferInterface* GetNV12() const { return nv12_buffer_; } + + private: + friend class rtc::RefCountedObject; + ~FakeNV12NativeBuffer() override { + if (event_) + event_->Set(); + } + rtc::scoped_refptr nv12_buffer_; + rtc::Event* const event_; +}; + class CpuOveruseDetectorProxy : public OveruseFrameDetector { public: explicit CpuOveruseDetectorProxy(CpuOveruseMetricsObserver* metrics_observer) : OveruseFrameDetector(metrics_observer), - last_target_framerate_fps_(-1) {} + last_target_framerate_fps_(-1), + framerate_updated_event_(true /* manual_reset */, + false /* initially_signaled */) {} virtual ~CpuOveruseDetectorProxy() {} void OnTargetFramerateUpdated(int framerate_fps) override { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); last_target_framerate_fps_ = framerate_fps; OveruseFrameDetector::OnTargetFramerateUpdated(framerate_fps); + framerate_updated_event_.Set(); } int GetLastTargetFramerate() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); return last_target_framerate_fps_; } CpuOveruseOptions GetOptions() { return options_; } + rtc::Event* framerate_updated_event() { return &framerate_updated_event_; } + private: - rtc::CriticalSection lock_; + Mutex lock_; int last_target_framerate_fps_ RTC_GUARDED_BY(lock_); + rtc::Event framerate_updated_event_; +}; + +class FakeVideoSourceRestrictionsListener + : public VideoSourceRestrictionsListener { + public: + FakeVideoSourceRestrictionsListener() + : was_restrictions_updated_(false), restrictions_updated_event_() {} + ~FakeVideoSourceRestrictionsListener() override { + RTC_DCHECK(was_restrictions_updated_); + } + + rtc::Event* restrictions_updated_event() { + return &restrictions_updated_event_; + } + + // VideoSourceRestrictionsListener implementation. + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override { + was_restrictions_updated_ = true; + restrictions_updated_event_.Set(); + } + + private: + bool was_restrictions_updated_; + rtc::Event restrictions_updated_event_; }; +auto WantsFps(Matcher fps_matcher) { + return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps, + fps_matcher); +} + +auto WantsMaxPixels(Matcher max_pixel_matcher) { + return Field("max_pixel_count", &rtc::VideoSinkWants::max_pixel_count, + AllOf(max_pixel_matcher, Gt(0))); +} + +auto ResolutionMax() { + return AllOf( + WantsMaxPixels(Eq(std::numeric_limits::max())), + Field("target_pixel_count", &rtc::VideoSinkWants::target_pixel_count, + Eq(absl::nullopt))); +} + +auto FpsMax() { + return WantsFps(Eq(kDefaultFramerate)); +} + +auto FpsUnlimited() { + return WantsFps(Eq(std::numeric_limits::max())); +} + +auto FpsMatchesResolutionMax(Matcher fps_matcher) { + return AllOf(WantsFps(fps_matcher), ResolutionMax()); +} + +auto FpsMaxResolutionMatches(Matcher pixel_matcher) { + return AllOf(FpsMax(), WantsMaxPixels(pixel_matcher)); +} + +auto FpsMaxResolutionMax() { + return AllOf(FpsMax(), ResolutionMax()); +} + +auto UnlimitedSinkWants() { + return AllOf(FpsUnlimited(), ResolutionMax()); +} + +auto FpsInRangeForPixelsInBalanced(int last_frame_pixels) { + Matcher fps_range_matcher; + + if (last_frame_pixels <= 320 * 240) { + fps_range_matcher = AllOf(Ge(7), Le(10)); + } else if (last_frame_pixels <= 480 * 360) { + fps_range_matcher = AllOf(Ge(10), Le(15)); + } else if (last_frame_pixels <= 640 * 480) { + fps_range_matcher = Ge(15); + } else { + fps_range_matcher = Eq(kDefaultFramerate); + } + return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps, + fps_range_matcher); +} + +auto FpsEqResolutionEqTo(const rtc::VideoSinkWants& other_wants) { + return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)), + WantsMaxPixels(Eq(other_wants.max_pixel_count))); +} + +auto FpsMaxResolutionLt(const rtc::VideoSinkWants& other_wants) { + return AllOf(FpsMax(), WantsMaxPixels(Lt(other_wants.max_pixel_count))); +} + +auto FpsMaxResolutionGt(const rtc::VideoSinkWants& other_wants) { + return AllOf(FpsMax(), WantsMaxPixels(Gt(other_wants.max_pixel_count))); +} + +auto FpsLtResolutionEq(const rtc::VideoSinkWants& other_wants) { + return AllOf(WantsFps(Lt(other_wants.max_framerate_fps)), + WantsMaxPixels(Eq(other_wants.max_pixel_count))); +} + +auto FpsGtResolutionEq(const rtc::VideoSinkWants& other_wants) { + return AllOf(WantsFps(Gt(other_wants.max_framerate_fps)), + WantsMaxPixels(Eq(other_wants.max_pixel_count))); +} + +auto FpsEqResolutionLt(const rtc::VideoSinkWants& other_wants) { + return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)), + WantsMaxPixels(Lt(other_wants.max_pixel_count))); +} + +auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) { + return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)), + WantsMaxPixels(Gt(other_wants.max_pixel_count))); +} + class VideoStreamEncoderUnderTest : public VideoStreamEncoder { public: - VideoStreamEncoderUnderTest(SendStatisticsProxy* stats_proxy, - const VideoStreamEncoderSettings& settings, - TaskQueueFactory* task_queue_factory) - : VideoStreamEncoder(Clock::GetRealTimeClock(), + VideoStreamEncoderUnderTest(TimeController* time_controller, + TaskQueueFactory* task_queue_factory, + SendStatisticsProxy* stats_proxy, + const VideoStreamEncoderSettings& settings) + : VideoStreamEncoder(time_controller->GetClock(), 1 /* number_of_cores */, stats_proxy, settings, std::unique_ptr( overuse_detector_proxy_ = new CpuOveruseDetectorProxy(stats_proxy)), - task_queue_factory) {} + task_queue_factory), + time_controller_(time_controller), + fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")), + fake_quality_resource_(FakeResource::Create("FakeResource[QP]")), + fake_adaptation_constraint_("FakeAdaptationConstraint") { + InjectAdaptationResource(fake_quality_resource_, + VideoAdaptationReason::kQuality); + InjectAdaptationResource(fake_cpu_resource_, VideoAdaptationReason::kCpu); + InjectAdaptationConstraint(&fake_adaptation_constraint_); + } - void PostTaskAndWait(bool down, - AdaptationObserverInterface::AdaptReason reason) { - PostTaskAndWait(down, reason, /*expected_results=*/true); + void SetSourceAndWaitForRestrictionsUpdated( + rtc::VideoSourceInterface* source, + const DegradationPreference& degradation_preference) { + FakeVideoSourceRestrictionsListener listener; + AddRestrictionsListenerForTesting(&listener); + SetSource(source, degradation_preference); + listener.restrictions_updated_event()->Wait(5000); + RemoveRestrictionsListenerForTesting(&listener); } - void PostTaskAndWait(bool down, - AdaptationObserverInterface::AdaptReason reason, - bool expected_results) { - rtc::Event event; - encoder_queue()->PostTask([this, &event, reason, down, expected_results] { - if (down) - EXPECT_EQ(expected_results, TriggerAdaptDown(reason)); - else - TriggerAdaptUp(reason); - event.Set(); - }); - ASSERT_TRUE(event.Wait(5000)); + void SetSourceAndWaitForFramerateUpdated( + rtc::VideoSourceInterface* source, + const DegradationPreference& degradation_preference) { + overuse_detector_proxy_->framerate_updated_event()->Reset(); + SetSource(source, degradation_preference); + overuse_detector_proxy_->framerate_updated_event()->Wait(5000); + } + + void OnBitrateUpdatedAndWaitForManagedResources( + DataRate target_bitrate, + DataRate stable_target_bitrate, + DataRate link_allocation, + uint8_t fraction_lost, + int64_t round_trip_time_ms, + double cwnd_reduce_ratio) { + OnBitrateUpdated(target_bitrate, stable_target_bitrate, link_allocation, + fraction_lost, round_trip_time_ms, cwnd_reduce_ratio); + // Bitrate is updated on the encoder queue. + WaitUntilTaskQueueIsIdle(); } // This is used as a synchronisation mechanism, to make sure that the @@ -182,60 +375,52 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { ASSERT_TRUE(event.Wait(5000)); } + // Triggers resource usage measurements on the fake CPU resource. void TriggerCpuOveruse() { - PostTaskAndWait(/*down=*/true, - AdaptationObserverInterface::AdaptReason::kCpu); + rtc::Event event; + encoder_queue()->PostTask([this, &event] { + fake_cpu_resource_->SetUsageState(ResourceUsageState::kOveruse); + event.Set(); + }); + ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } - void TriggerCpuNormalUsage() { - PostTaskAndWait(/*down=*/false, - AdaptationObserverInterface::AdaptReason::kCpu); + void TriggerCpuUnderuse() { + rtc::Event event; + encoder_queue()->PostTask([this, &event] { + fake_cpu_resource_->SetUsageState(ResourceUsageState::kUnderuse); + event.Set(); + }); + ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } + // Triggers resource usage measurements on the fake quality resource. void TriggerQualityLow() { - PostTaskAndWait(/*down=*/true, - AdaptationObserverInterface::AdaptReason::kQuality); - } - - void TriggerQualityLowExpectFalse() { - PostTaskAndWait(/*down=*/true, - AdaptationObserverInterface::AdaptReason::kQuality, - /*expected_results=*/false); + rtc::Event event; + encoder_queue()->PostTask([this, &event] { + fake_quality_resource_->SetUsageState(ResourceUsageState::kOveruse); + event.Set(); + }); + ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } - void TriggerQualityHigh() { - PostTaskAndWait(/*down=*/false, - AdaptationObserverInterface::AdaptReason::kQuality); + rtc::Event event; + encoder_queue()->PostTask([this, &event] { + fake_quality_resource_->SetUsageState(ResourceUsageState::kUnderuse); + event.Set(); + }); + ASSERT_TRUE(event.Wait(5000)); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); } + TimeController* const time_controller_; CpuOveruseDetectorProxy* overuse_detector_proxy_; -}; - -class VideoStreamFactory - : public VideoEncoderConfig::VideoStreamFactoryInterface { - public: - explicit VideoStreamFactory(size_t num_temporal_layers, int framerate) - : num_temporal_layers_(num_temporal_layers), framerate_(framerate) { - EXPECT_GT(num_temporal_layers, 0u); - EXPECT_GT(framerate, 0); - } - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { - std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); - for (VideoStream& stream : streams) { - stream.num_temporal_layers = num_temporal_layers_; - stream.max_framerate = framerate_; - } - return streams; - } - - const size_t num_temporal_layers_; - const int framerate_; + rtc::scoped_refptr fake_cpu_resource_; + rtc::scoped_refptr fake_quality_resource_; + FakeAdaptationConstraint fake_adaptation_constraint_; }; // Simulates simulcast behavior and makes highest stream resolutions divisible @@ -243,11 +428,7 @@ class VideoStreamFactory class CroppingVideoStreamFactory : public VideoEncoderConfig::VideoStreamFactoryInterface { public: - explicit CroppingVideoStreamFactory(size_t num_temporal_layers, int framerate) - : num_temporal_layers_(num_temporal_layers), framerate_(framerate) { - EXPECT_GT(num_temporal_layers, 0u); - EXPECT_GT(framerate, 0); - } + CroppingVideoStreamFactory() {} private: std::vector CreateEncoderStreams( @@ -256,34 +437,28 @@ class CroppingVideoStreamFactory const VideoEncoderConfig& encoder_config) override { std::vector streams = test::CreateVideoStreams( width - width % 4, height - height % 4, encoder_config); - for (VideoStream& stream : streams) { - stream.num_temporal_layers = num_temporal_layers_; - stream.max_framerate = framerate_; - } return streams; } - - const size_t num_temporal_layers_; - const int framerate_; }; class AdaptingFrameForwarder : public test::FrameForwarder { public: - AdaptingFrameForwarder() : adaptation_enabled_(false) {} + explicit AdaptingFrameForwarder(TimeController* time_controller) + : time_controller_(time_controller), adaptation_enabled_(false) {} ~AdaptingFrameForwarder() override {} void set_adaptation_enabled(bool enabled) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); adaptation_enabled_ = enabled; } bool adaption_enabled() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return adaptation_enabled_; } rtc::VideoSinkWants last_wants() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return last_wants_; } @@ -291,11 +466,17 @@ class AdaptingFrameForwarder : public test::FrameForwarder { absl::optional last_sent_height() const { return last_height_; } void IncomingCapturedFrame(const VideoFrame& video_frame) override { + RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent()); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); + int cropped_width = 0; int cropped_height = 0; int out_width = 0; int out_height = 0; if (adaption_enabled()) { + RTC_DLOG(INFO) << "IncomingCapturedFrame: AdaptFrameResolution()" + << "w=" << video_frame.width() + << "h=" << video_frame.height(); if (adapter_.AdaptFrameResolution( video_frame.width(), video_frame.height(), video_frame.timestamp_us() * 1000, &cropped_width, @@ -324,6 +505,7 @@ class AdaptingFrameForwarder : public test::FrameForwarder { last_height_ = absl::nullopt; } } else { + RTC_DLOG(INFO) << "IncomingCapturedFrame: adaptation not enabled"; test::FrameForwarder::IncomingCapturedFrame(video_frame); last_width_.emplace(video_frame.width()); last_height_.emplace(video_frame.height()); @@ -332,14 +514,16 @@ class AdaptingFrameForwarder : public test::FrameForwarder { void AddOrUpdateSink(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) override { - rtc::CritScope cs(&crit_); - last_wants_ = sink_wants(); + MutexLock lock(&mutex_); + last_wants_ = sink_wants_locked(); adapter_.OnSinkWants(wants); - test::FrameForwarder::AddOrUpdateSink(sink, wants); + test::FrameForwarder::AddOrUpdateSinkLocked(sink, wants); } + + TimeController* const time_controller_; cricket::VideoAdapter adapter_; - bool adaptation_enabled_ RTC_GUARDED_BY(crit_); - rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(crit_); + bool adaptation_enabled_ RTC_GUARDED_BY(mutex_); + rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(mutex_); absl::optional last_width_; absl::optional last_height_; }; @@ -353,57 +537,76 @@ class MockableSendStatisticsProxy : public SendStatisticsProxy { : SendStatisticsProxy(clock, config, content_type) {} VideoSendStream::Stats GetStats() override { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mock_stats_) return *mock_stats_; return SendStatisticsProxy::GetStats(); } int GetInputFrameRate() const override { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); if (mock_stats_) return mock_stats_->input_frame_rate; return SendStatisticsProxy::GetInputFrameRate(); } void SetMockStats(const VideoSendStream::Stats& stats) { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); mock_stats_.emplace(stats); } void ResetMockStats() { - rtc::CritScope cs(&lock_); + MutexLock lock(&lock_); mock_stats_.reset(); } + void SetDroppedFrameCallback(std::function callback) { + on_frame_dropped_ = std::move(callback); + } + private: - rtc::CriticalSection lock_; + void OnFrameDropped(DropReason reason) override { + SendStatisticsProxy::OnFrameDropped(reason); + if (on_frame_dropped_) + on_frame_dropped_(reason); + } + + mutable Mutex lock_; absl::optional mock_stats_ RTC_GUARDED_BY(lock_); + std::function on_frame_dropped_; }; -class MockBitrateObserver : public VideoBitrateAllocationObserver { +class MockEncoderSelector + : public VideoEncoderFactory::EncoderSelectorInterface { public: - MOCK_METHOD1(OnBitrateAllocationUpdated, void(const VideoBitrateAllocation&)); + MOCK_METHOD(void, + OnCurrentEncoder, + (const SdpVideoFormat& format), + (override)); + MOCK_METHOD(absl::optional, + OnAvailableBitrate, + (const DataRate& rate), + (override)); + MOCK_METHOD(absl::optional, OnEncoderBroken, (), (override)); }; } // namespace class VideoStreamEncoderTest : public ::testing::Test { public: - static const int kDefaultTimeoutMs = 30 * 1000; + static const int kDefaultTimeoutMs = 1000; VideoStreamEncoderTest() : video_send_config_(VideoSendStream::Config(nullptr)), codec_width_(320), codec_height_(240), max_framerate_(kDefaultFramerate), - task_queue_factory_(CreateDefaultTaskQueueFactory()), - fake_encoder_(), + fake_encoder_(&time_controller_), encoder_factory_(&fake_encoder_), stats_proxy_(new MockableSendStatisticsProxy( - Clock::GetRealTimeClock(), + time_controller_.GetClock(), video_send_config_, webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo)), - sink_(&fake_encoder_) {} + sink_(&time_controller_, &fake_encoder_) {} void SetUp() override { metrics::Reset(); @@ -416,17 +619,11 @@ class VideoStreamEncoderTest : public ::testing::Test { VideoEncoderConfig video_encoder_config; test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1, max_framerate_); + EXPECT_EQ(1u, video_encoder_config.simulcast_layers.size()); + video_encoder_config.simulcast_layers[0].num_temporal_layers = 1; + video_encoder_config.simulcast_layers[0].max_framerate = max_framerate_; video_encoder_config_ = video_encoder_config.Copy(); - // Framerate limit is specified by the VideoStreamFactory. - std::vector streams = - video_encoder_config.video_stream_factory->CreateEncoderStreams( - codec_width_, codec_height_, video_encoder_config); - max_framerate_ = streams[0].max_framerate; - fake_clock_.SetTime(Timestamp::us(1234)); - ConfigureEncoder(std::move(video_encoder_config)); } @@ -434,8 +631,8 @@ class VideoStreamEncoderTest : public ::testing::Test { if (video_stream_encoder_) video_stream_encoder_->Stop(); video_stream_encoder_.reset(new VideoStreamEncoderUnderTest( - stats_proxy_.get(), video_send_config_.encoder_settings, - task_queue_factory_.get())); + &time_controller_, GetTaskQueueFactory(), stats_proxy_.get(), + video_send_config_.encoder_settings)); video_stream_encoder_->SetSink(&sink_, false /* rotation_applied */); video_stream_encoder_->SetSource( &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -445,27 +642,35 @@ class VideoStreamEncoderTest : public ::testing::Test { video_stream_encoder_->WaitUntilTaskQueueIsIdle(); } - void ResetEncoder(const std::string& payload_name, - size_t num_streams, - size_t num_temporal_layers, - unsigned char num_spatial_layers, - bool screenshare) { + void ResetEncoder( + const std::string& payload_name, + size_t num_streams, + size_t num_temporal_layers, + unsigned char num_spatial_layers, + bool screenshare, + VideoStreamEncoderSettings::BitrateAllocationCallbackType + allocation_cb_type = + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing) { video_send_config_.rtp.payload_name = payload_name; + video_send_config_.encoder_settings.allocation_cb_type = allocation_cb_type; VideoEncoderConfig video_encoder_config; - video_encoder_config.codec_type = PayloadStringToCodecType(payload_name); - video_encoder_config.number_of_streams = num_streams; + test::FillEncoderConfiguration(PayloadStringToCodecType(payload_name), + num_streams, &video_encoder_config); + for (auto& layer : video_encoder_config.simulcast_layers) { + layer.num_temporal_layers = num_temporal_layers; + layer.max_framerate = kDefaultFramerate; + } video_encoder_config.max_bitrate_bps = num_streams == 1 ? kTargetBitrateBps : kSimulcastTargetBitrateBps; - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(num_temporal_layers, - kDefaultFramerate); video_encoder_config.content_type = screenshare ? VideoEncoderConfig::ContentType::kScreen : VideoEncoderConfig::ContentType::kRealtimeVideo; if (payload_name == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.numberOfSpatialLayers = num_spatial_layers; + vp9_settings.automaticResizeOn = num_spatial_layers <= 1; video_encoder_config.encoder_specific_settings = new rtc::RefCountedObject< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); @@ -517,6 +722,19 @@ class VideoStreamEncoderTest : public ::testing::Test { return frame; } + VideoFrame CreateNV12Frame(int64_t ntp_time_ms, int width, int height) const { + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(NV12Buffer::Create(width, height)) + .set_timestamp_rtp(99) + .set_timestamp_ms(99) + .set_rotation(kVideoRotation_0) + .build(); + frame.set_ntp_time_ms(ntp_time_ms); + frame.set_timestamp_us(ntp_time_ms * 1000); + return frame; + } + VideoFrame CreateFakeNativeFrame(int64_t ntp_time_ms, rtc::Event* destruction_event, int width, @@ -533,6 +751,22 @@ class VideoStreamEncoderTest : public ::testing::Test { return frame; } + VideoFrame CreateFakeNV12NativeFrame(int64_t ntp_time_ms, + rtc::Event* destruction_event, + int width, + int height) const { + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer( + new rtc::RefCountedObject( + destruction_event, width, height)) + .set_timestamp_rtp(99) + .set_timestamp_ms(99) + .set_rotation(kVideoRotation_0) + .build(); + frame.set_ntp_time_ms(ntp_time_ms); + return frame; + } + VideoFrame CreateFakeNativeFrame(int64_t ntp_time_ms, rtc::Event* destruction_event) const { return CreateFakeNativeFrame(ntp_time_ms, destruction_event, codec_width_, @@ -540,162 +774,65 @@ class VideoStreamEncoderTest : public ::testing::Test { } void VerifyAllocatedBitrate(const VideoBitrateAllocation& expected_bitrate) { - MockBitrateObserver bitrate_observer; - video_stream_encoder_->SetBitrateAllocationObserver(&bitrate_observer); - - EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) - .Times(1); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame( CreateFrame(1, codec_width_, codec_height_)); WaitForEncodedFrame(1); - } - - void VerifyNoLimitation(const rtc::VideoSinkWants& wants) { - EXPECT_EQ(std::numeric_limits::max(), wants.max_framerate_fps); - EXPECT_EQ(std::numeric_limits::max(), wants.max_pixel_count); - EXPECT_FALSE(wants.target_pixel_count); - } - - void VerifyFpsEqResolutionEq(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps); - EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count); - } - - void VerifyFpsMaxResolutionMax(const rtc::VideoSinkWants& wants) { - EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps); - EXPECT_EQ(std::numeric_limits::max(), wants.max_pixel_count); - EXPECT_FALSE(wants.target_pixel_count); - } - - void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps); - EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count); - EXPECT_GT(wants1.max_pixel_count, 0); - } - - void VerifyFpsMaxResolutionGt(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps); - EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count); - } - - void VerifyFpsMaxResolutionEq(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps); - EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count); - } - - void VerifyFpsLtResolutionEq(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_LT(wants1.max_framerate_fps, wants2.max_framerate_fps); - EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count); - } - - void VerifyFpsGtResolutionEq(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_GT(wants1.max_framerate_fps, wants2.max_framerate_fps); - EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count); - } - - void VerifyFpsEqResolutionLt(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps); - EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count); - EXPECT_GT(wants1.max_pixel_count, 0); - } - - void VerifyFpsEqResolutionGt(const rtc::VideoSinkWants& wants1, - const rtc::VideoSinkWants& wants2) { - EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps); - EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count); - } - - void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants, - int pixel_count) { - EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps); - EXPECT_LT(wants.max_pixel_count, pixel_count); - EXPECT_GT(wants.max_pixel_count, 0); - } - - void VerifyFpsLtResolutionMax(const rtc::VideoSinkWants& wants, int fps) { - EXPECT_LT(wants.max_framerate_fps, fps); - EXPECT_EQ(std::numeric_limits::max(), wants.max_pixel_count); - EXPECT_FALSE(wants.target_pixel_count); - } - - void VerifyFpsEqResolutionMax(const rtc::VideoSinkWants& wants, - int expected_fps) { - EXPECT_EQ(expected_fps, wants.max_framerate_fps); - EXPECT_EQ(std::numeric_limits::max(), wants.max_pixel_count); - EXPECT_FALSE(wants.target_pixel_count); - } - - void VerifyBalancedModeFpsRange(const rtc::VideoSinkWants& wants, - int last_frame_pixels) { - // Balanced mode should always scale FPS to the desired range before - // attempting to scale resolution. - int fps_limit = wants.max_framerate_fps; - if (last_frame_pixels <= 320 * 240) { - EXPECT_TRUE(7 <= fps_limit && fps_limit <= 10); - } else if (last_frame_pixels <= 480 * 270) { - EXPECT_TRUE(10 <= fps_limit && fps_limit <= 15); - } else if (last_frame_pixels <= 640 * 480) { - EXPECT_LE(15, fps_limit); - } else { - EXPECT_EQ(kDefaultFramerate, fps_limit); - } + EXPECT_EQ(expected_bitrate, sink_.GetLastVideoBitrateAllocation()); } void WaitForEncodedFrame(int64_t expected_ntp_time) { sink_.WaitForEncodedFrame(expected_ntp_time); - fake_clock_.AdvanceTime(TimeDelta::seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); } bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) { bool ok = sink_.TimedWaitForEncodedFrame(expected_ntp_time, timeout_ms); - fake_clock_.AdvanceTime(TimeDelta::seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); return ok; } void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) { sink_.WaitForEncodedFrame(expected_width, expected_height); - fake_clock_.AdvanceTime(TimeDelta::seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); } void ExpectDroppedFrame() { sink_.ExpectDroppedFrame(); - fake_clock_.AdvanceTime(TimeDelta::seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); } bool WaitForFrame(int64_t timeout_ms) { bool ok = sink_.WaitForFrame(timeout_ms); - fake_clock_.AdvanceTime(TimeDelta::seconds(1) / max_framerate_); + AdvanceTime(TimeDelta::Seconds(1) / max_framerate_); return ok; } class TestEncoder : public test::FakeEncoder { public: - TestEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {} + explicit TestEncoder(TimeController* time_controller) + : FakeEncoder(time_controller->GetClock()), + time_controller_(time_controller) { + RTC_DCHECK(time_controller_); + } VideoCodec codec_config() const { - rtc::CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); return config_; } void BlockNextEncode() { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); block_next_encode_ = true; } VideoEncoder::EncoderInfo GetEncoderInfo() const override { - rtc::CritScope lock(&local_crit_sect_); - EncoderInfo info; + MutexLock lock(&local_mutex_); + EncoderInfo info = FakeEncoder::GetEncoderInfo(); if (initialized_ == EncoderState::kInitialized) { if (quality_scaling_) { info.scaling_settings = VideoEncoder::ScalingSettings( @@ -712,12 +849,15 @@ class VideoStreamEncoderTest : public ::testing::Test { info.resolution_bitrate_limits = resolution_bitrate_limits_; info.requested_resolution_alignment = requested_resolution_alignment_; + info.apply_alignment_to_all_simulcast_layers = + apply_alignment_to_all_simulcast_layers_; + info.preferred_pixel_formats = preferred_pixel_formats_; return info; } int32_t RegisterEncodeCompleteCallback( EncodedImageCallback* callback) override { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); encoded_image_callback_ = callback; return FakeEncoder::RegisterEncodeCompleteCallback(callback); } @@ -726,60 +866,65 @@ class VideoStreamEncoderTest : public ::testing::Test { void CheckLastTimeStampsMatch(int64_t ntp_time_ms, uint32_t timestamp) const { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); EXPECT_EQ(timestamp_, timestamp); EXPECT_EQ(ntp_time_ms_, ntp_time_ms); } void SetQualityScaling(bool b) { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); quality_scaling_ = b; } void SetRequestedResolutionAlignment(int requested_resolution_alignment) { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); requested_resolution_alignment_ = requested_resolution_alignment; } + void SetApplyAlignmentToAllSimulcastLayers(bool b) { + MutexLock lock(&local_mutex_); + apply_alignment_to_all_simulcast_layers_ = b; + } + void SetIsHardwareAccelerated(bool is_hardware_accelerated) { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); is_hardware_accelerated_ = is_hardware_accelerated; } void SetTemporalLayersSupported(size_t spatial_idx, bool supported) { RTC_DCHECK_LT(spatial_idx, kMaxSpatialLayers); - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); temporal_layers_supported_[spatial_idx] = supported; } void SetResolutionBitrateLimits( std::vector thresholds) { - rtc::CritScope cs(&local_crit_sect_); + MutexLock lock(&local_mutex_); resolution_bitrate_limits_ = thresholds; } void ForceInitEncodeFailure(bool force_failure) { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); force_init_encode_failed_ = force_failure; } void SimulateOvershoot(double rate_factor) { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); rate_factor_ = rate_factor; } uint32_t GetLastFramerate() const { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); return last_framerate_; } VideoFrame::UpdateRect GetLastUpdateRect() const { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); return last_update_rect_; } const std::vector& LastFrameTypes() const { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); return last_frame_types_; } @@ -788,27 +933,25 @@ class VideoStreamEncoderTest : public ::testing::Test { keyframe ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta}; { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); last_frame_types_ = frame_type; } FakeEncoder::Encode(input_image, &frame_type); } void InjectEncodedImage(const EncodedImage& image) { - rtc::CritScope lock(&local_crit_sect_); - encoded_image_callback_->OnEncodedImage(image, nullptr, nullptr); + MutexLock lock(&local_mutex_); + encoded_image_callback_->OnEncodedImage(image, nullptr); } - void InjectEncodedImage(const EncodedImage& image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { - rtc::CritScope lock(&local_crit_sect_); - encoded_image_callback_->OnEncodedImage(image, codec_specific_info, - fragmentation); + void SetEncodedImageData( + rtc::scoped_refptr encoded_image_data) { + MutexLock lock(&local_mutex_); + encoded_image_data_ = encoded_image_data; } void ExpectNullFrame() { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); expect_null_frame_ = true; } @@ -819,22 +962,39 @@ class VideoStreamEncoderTest : public ::testing::Test { return settings; } + absl::optional GetLastInputPixelFormat() { + MutexLock lock(&local_mutex_); + return last_input_pixel_format_; + } + int GetNumEncoderInitializations() const { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); return num_encoder_initializations_; } int GetNumSetRates() const { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); return num_set_rates_; } + VideoCodec video_codec() const { + MutexLock lock(&local_mutex_); + return video_codec_; + } + + void SetPreferredPixelFormats( + absl::InlinedVector + pixel_formats) { + MutexLock lock(&local_mutex_); + preferred_pixel_formats_ = std::move(pixel_formats); + } + private: int32_t Encode(const VideoFrame& input_image, const std::vector* frame_types) override { bool block_encode; { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); if (expect_null_frame_) { EXPECT_EQ(input_image.timestamp(), 0u); EXPECT_EQ(input_image.width(), 1); @@ -854,21 +1014,39 @@ class VideoStreamEncoderTest : public ::testing::Test { block_next_encode_ = false; last_update_rect_ = input_image.update_rect(); last_frame_types_ = *frame_types; + last_input_pixel_format_ = input_image.video_frame_buffer()->type(); } int32_t result = FakeEncoder::Encode(input_image, frame_types); if (block_encode) EXPECT_TRUE(continue_encode_event_.Wait(kDefaultTimeoutMs)); + return result; } + CodecSpecificInfo EncodeHook( + EncodedImage& encoded_image, + rtc::scoped_refptr buffer) override { + CodecSpecificInfo codec_specific; + { + MutexLock lock(&mutex_); + codec_specific.codecType = config_.codecType; + } + MutexLock lock(&local_mutex_); + if (encoded_image_data_) { + encoded_image.SetEncodedData(encoded_image_data_); + } + return codec_specific; + } + int32_t InitEncode(const VideoCodec* config, const Settings& settings) override { int res = FakeEncoder::InitEncode(config, settings); - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); EXPECT_EQ(initialized_, EncoderState::kUninitialized); ++num_encoder_initializations_; + video_codec_ = *config; if (config->codecType == kVideoCodecVP8) { // Simulate setting up temporal layers, in order to validate the life @@ -887,14 +1065,14 @@ class VideoStreamEncoderTest : public ::testing::Test { } int32_t Release() override { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); EXPECT_NE(initialized_, EncoderState::kUninitialized); initialized_ = EncoderState::kUninitialized; return FakeEncoder::Release(); } void SetRates(const RateControlParameters& parameters) { - rtc::CritScope lock(&local_crit_sect_); + MutexLock lock(&local_mutex_); num_set_rates_++; VideoBitrateAllocation adjusted_rate_allocation; for (size_t si = 0; si < kMaxSpatialLayers; ++si) { @@ -914,49 +1092,60 @@ class VideoStreamEncoderTest : public ::testing::Test { FakeEncoder::SetRates(adjusted_paramters); } - rtc::CriticalSection local_crit_sect_; + TimeController* const time_controller_; + mutable Mutex local_mutex_; enum class EncoderState { kUninitialized, kInitializationFailed, kInitialized - } initialized_ RTC_GUARDED_BY(local_crit_sect_) = - EncoderState::kUninitialized; - bool block_next_encode_ RTC_GUARDED_BY(local_crit_sect_) = false; + } initialized_ RTC_GUARDED_BY(local_mutex_) = EncoderState::kUninitialized; + bool block_next_encode_ RTC_GUARDED_BY(local_mutex_) = false; rtc::Event continue_encode_event_; - uint32_t timestamp_ RTC_GUARDED_BY(local_crit_sect_) = 0; - int64_t ntp_time_ms_ RTC_GUARDED_BY(local_crit_sect_) = 0; - int last_input_width_ RTC_GUARDED_BY(local_crit_sect_) = 0; - int last_input_height_ RTC_GUARDED_BY(local_crit_sect_) = 0; - bool quality_scaling_ RTC_GUARDED_BY(local_crit_sect_) = true; - int requested_resolution_alignment_ RTC_GUARDED_BY(local_crit_sect_) = 1; - bool is_hardware_accelerated_ RTC_GUARDED_BY(local_crit_sect_) = false; + uint32_t timestamp_ RTC_GUARDED_BY(local_mutex_) = 0; + int64_t ntp_time_ms_ RTC_GUARDED_BY(local_mutex_) = 0; + int last_input_width_ RTC_GUARDED_BY(local_mutex_) = 0; + int last_input_height_ RTC_GUARDED_BY(local_mutex_) = 0; + bool quality_scaling_ RTC_GUARDED_BY(local_mutex_) = true; + int requested_resolution_alignment_ RTC_GUARDED_BY(local_mutex_) = 1; + bool apply_alignment_to_all_simulcast_layers_ RTC_GUARDED_BY(local_mutex_) = + false; + bool is_hardware_accelerated_ RTC_GUARDED_BY(local_mutex_) = false; + rtc::scoped_refptr encoded_image_data_ + RTC_GUARDED_BY(local_mutex_); std::unique_ptr frame_buffer_controller_ - RTC_GUARDED_BY(local_crit_sect_); + RTC_GUARDED_BY(local_mutex_); absl::optional temporal_layers_supported_[kMaxSpatialLayers] RTC_GUARDED_BY( - local_crit_sect_); - bool force_init_encode_failed_ RTC_GUARDED_BY(local_crit_sect_) = false; - double rate_factor_ RTC_GUARDED_BY(local_crit_sect_) = 1.0; - uint32_t last_framerate_ RTC_GUARDED_BY(local_crit_sect_) = 0; + local_mutex_); + bool force_init_encode_failed_ RTC_GUARDED_BY(local_mutex_) = false; + double rate_factor_ RTC_GUARDED_BY(local_mutex_) = 1.0; + uint32_t last_framerate_ RTC_GUARDED_BY(local_mutex_) = 0; absl::optional last_rate_control_settings_; - VideoFrame::UpdateRect last_update_rect_ - RTC_GUARDED_BY(local_crit_sect_) = {0, 0, 0, 0}; + VideoFrame::UpdateRect last_update_rect_ RTC_GUARDED_BY(local_mutex_) = { + 0, 0, 0, 0}; std::vector last_frame_types_; bool expect_null_frame_ = false; - EncodedImageCallback* encoded_image_callback_ - RTC_GUARDED_BY(local_crit_sect_) = nullptr; - MockFecControllerOverride fec_controller_override_; - int num_encoder_initializations_ RTC_GUARDED_BY(local_crit_sect_) = 0; + EncodedImageCallback* encoded_image_callback_ RTC_GUARDED_BY(local_mutex_) = + nullptr; + NiceMock fec_controller_override_; + int num_encoder_initializations_ RTC_GUARDED_BY(local_mutex_) = 0; std::vector resolution_bitrate_limits_ - RTC_GUARDED_BY(local_crit_sect_); - int num_set_rates_ RTC_GUARDED_BY(local_crit_sect_) = 0; + RTC_GUARDED_BY(local_mutex_); + int num_set_rates_ RTC_GUARDED_BY(local_mutex_) = 0; + VideoCodec video_codec_ RTC_GUARDED_BY(local_mutex_); + absl::optional last_input_pixel_format_ + RTC_GUARDED_BY(local_mutex_); + absl::InlinedVector + preferred_pixel_formats_ RTC_GUARDED_BY(local_mutex_); }; class TestSink : public VideoStreamEncoder::EncoderSink { public: - explicit TestSink(TestEncoder* test_encoder) - : test_encoder_(test_encoder) {} + TestSink(TimeController* time_controller, TestEncoder* test_encoder) + : time_controller_(time_controller), test_encoder_(test_encoder) { + RTC_DCHECK(time_controller_); + } void WaitForEncodedFrame(int64_t expected_ntp_time) { EXPECT_TRUE( @@ -966,10 +1155,10 @@ class VideoStreamEncoderTest : public ::testing::Test { bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) { uint32_t timestamp = 0; - if (!encoded_frame_event_.Wait(timeout_ms)) + if (!WaitForFrame(timeout_ms)) return false; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); timestamp = last_timestamp_; } test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); @@ -978,7 +1167,7 @@ class VideoStreamEncoderTest : public ::testing::Test { void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) { - EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); + EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs)); CheckLastFrameSizeMatches(expected_width, expected_height); } @@ -987,7 +1176,7 @@ class VideoStreamEncoderTest : public ::testing::Test { uint32_t width = 0; uint32_t height = 0; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); width = last_width_; height = last_height_; } @@ -995,80 +1184,82 @@ class VideoStreamEncoderTest : public ::testing::Test { EXPECT_EQ(expected_width, width); } - void CheckLastFrameSizeIsMultipleOf(int resolution_alignment) { - int width = 0; - int height = 0; - { - rtc::CritScope lock(&crit_); - width = last_width_; - height = last_height_; - } - EXPECT_EQ(width % resolution_alignment, 0); - EXPECT_EQ(height % resolution_alignment, 0); - } - void CheckLastFrameRotationMatches(VideoRotation expected_rotation) { VideoRotation rotation; { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); rotation = last_rotation_; } EXPECT_EQ(expected_rotation, rotation); } - void ExpectDroppedFrame() { EXPECT_FALSE(encoded_frame_event_.Wait(100)); } + void ExpectDroppedFrame() { EXPECT_FALSE(WaitForFrame(100)); } bool WaitForFrame(int64_t timeout_ms) { - return encoded_frame_event_.Wait(timeout_ms); + RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent()); + bool ret = encoded_frame_event_.Wait(timeout_ms); + time_controller_->AdvanceTime(TimeDelta::Millis(0)); + return ret; } void SetExpectNoFrames() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); expect_frames_ = false; } int number_of_reconfigurations() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return number_of_reconfigurations_; } int last_min_transmit_bitrate() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return min_transmit_bitrate_bps_; } void SetNumExpectedLayers(size_t num_layers) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); num_expected_layers_ = num_layers; } int64_t GetLastCaptureTimeMs() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return last_capture_time_ms_; } std::vector GetLastEncodedImageData() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return std::move(last_encoded_image_data_); } - RTPFragmentationHeader GetLastFragmentation() { - rtc::CritScope lock(&crit_); - return std::move(last_fragmentation_); + VideoBitrateAllocation GetLastVideoBitrateAllocation() { + MutexLock lock(&mutex_); + return last_bitrate_allocation_; + } + + int number_of_bitrate_allocations() const { + MutexLock lock(&mutex_); + return number_of_bitrate_allocations_; + } + + VideoLayersAllocation GetLastVideoLayersAllocation() { + MutexLock lock(&mutex_); + return last_layers_allocation_; + } + + int number_of_layers_allocations() const { + MutexLock lock(&mutex_); + return number_of_layers_allocations_; } private: Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override { - rtc::CritScope lock(&crit_); + const CodecSpecificInfo* codec_specific_info) override { + MutexLock lock(&mutex_); EXPECT_TRUE(expect_frames_); last_encoded_image_data_ = std::vector( encoded_image.data(), encoded_image.data() + encoded_image.size()); - if (fragmentation) { - last_fragmentation_.CopyFrom(*fragmentation); - } uint32_t timestamp = encoded_image.Timestamp(); if (last_timestamp_ != timestamp) { num_received_layers_ = 1; @@ -1088,18 +1279,44 @@ class VideoStreamEncoderTest : public ::testing::Test { void OnEncoderConfigurationChanged( std::vector streams, + bool is_svc, VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); ++number_of_reconfigurations_; min_transmit_bitrate_bps_ = min_transmit_bitrate_bps; } - rtc::CriticalSection crit_; + void OnBitrateAllocationUpdated( + const VideoBitrateAllocation& allocation) override { + MutexLock lock(&mutex_); + ++number_of_bitrate_allocations_; + last_bitrate_allocation_ = allocation; + } + + void OnVideoLayersAllocationUpdated( + VideoLayersAllocation allocation) override { + MutexLock lock(&mutex_); + ++number_of_layers_allocations_; + last_layers_allocation_ = allocation; + rtc::StringBuilder log; + for (const auto& layer : allocation.active_spatial_layers) { + log << layer.width << "x" << layer.height << "@" << layer.frame_rate_fps + << "["; + for (const auto target_bitrate : + layer.target_bitrate_per_temporal_layer) { + log << target_bitrate.kbps() << ","; + } + log << "]"; + } + RTC_DLOG(INFO) << "OnVideoLayersAllocationUpdated " << log.str(); + } + + TimeController* const time_controller_; + mutable Mutex mutex_; TestEncoder* test_encoder_; rtc::Event encoded_frame_event_; std::vector last_encoded_image_data_; - RTPFragmentationHeader last_fragmentation_; uint32_t last_timestamp_ = 0; int64_t last_capture_time_ms_ = 0; uint32_t last_height_ = 0; @@ -1110,6 +1327,10 @@ class VideoStreamEncoderTest : public ::testing::Test { bool expect_frames_ = true; int number_of_reconfigurations_ = 0; int min_transmit_bitrate_bps_ = 0; + VideoBitrateAllocation last_bitrate_allocation_ RTC_GUARDED_BY(&mutex_); + int number_of_bitrate_allocations_ RTC_GUARDED_BY(&mutex_) = 0; + VideoLayersAllocation last_layers_allocation_ RTC_GUARDED_BY(&mutex_); + int number_of_layers_allocations_ RTC_GUARDED_BY(&mutex_) = 0; }; class VideoBitrateAllocatorProxyFactory @@ -1121,43 +1342,55 @@ class VideoStreamEncoderTest : public ::testing::Test { std::unique_ptr CreateVideoBitrateAllocator( const VideoCodec& codec) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); codec_config_ = codec; return bitrate_allocator_factory_->CreateVideoBitrateAllocator(codec); } VideoCodec codec_config() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); return codec_config_; } private: std::unique_ptr bitrate_allocator_factory_; - rtc::CriticalSection crit_; - VideoCodec codec_config_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + VideoCodec codec_config_ RTC_GUARDED_BY(mutex_); }; + Clock* clock() { return time_controller_.GetClock(); } + void AdvanceTime(TimeDelta duration) { + time_controller_.AdvanceTime(duration); + } + + int64_t CurrentTimeMs() { return clock()->CurrentTime().ms(); } + + protected: + virtual TaskQueueFactory* GetTaskQueueFactory() { + return time_controller_.GetTaskQueueFactory(); + } + + GlobalSimulatedTimeController time_controller_{Timestamp::Micros(1234)}; VideoSendStream::Config video_send_config_; VideoEncoderConfig video_encoder_config_; int codec_width_; int codec_height_; int max_framerate_; - rtc::ScopedFakeClock fake_clock_; - const std::unique_ptr task_queue_factory_; TestEncoder fake_encoder_; test::VideoEncoderProxyFactory encoder_factory_; VideoBitrateAllocatorProxyFactory bitrate_allocator_factory_; std::unique_ptr stats_proxy_; TestSink sink_; - AdaptingFrameForwarder video_source_; + AdaptingFrameForwarder video_source_{&time_controller_}; std::unique_ptr video_stream_encoder_; }; TEST_F(VideoStreamEncoderTest, EncodeOneFrame) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); rtc::Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); WaitForEncodedFrame(1); @@ -1172,12 +1405,14 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { // frames means that the first frame will be dropped and the second frame will // be sent when the encoder is enabled. video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); + AdvanceTime(TimeDelta::Millis(10)); video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs)); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // The pending frame should be received. WaitForEncodedFrame(2); @@ -1188,23 +1423,26 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { } TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); WaitForEncodedFrame(1); - video_stream_encoder_->OnBitrateUpdated(DataRate::bps(0), DataRate::bps(0), - DataRate::bps(0), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), + 0, 0, 0); // The encoder will cache up to one frame for a short duration. Adding two // frames means that the first frame will be dropped and the second frame will // be sent when the encoder is resumed. video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); WaitForEncodedFrame(3); video_source_.IncomingCapturedFrame(CreateFrame(4, nullptr)); WaitForEncodedFrame(4); @@ -1212,9 +1450,10 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) { } TEST_F(VideoStreamEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); WaitForEncodedFrame(1); @@ -1227,9 +1466,10 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) { } TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); WaitForEncodedFrame(1); @@ -1241,10 +1481,30 @@ TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) { EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs)); } -TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); +class VideoStreamEncoderBlockedTest : public VideoStreamEncoderTest { + public: + VideoStreamEncoderBlockedTest() {} + + TaskQueueFactory* GetTaskQueueFactory() override { + return task_queue_factory_.get(); + } + + private: + std::unique_ptr task_queue_factory_ = + CreateDefaultTaskQueueFactory(); +}; + +TEST_F(VideoStreamEncoderBlockedTest, DropsPendingFramesOnSlowEncode) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + int dropped_count = 0; + stats_proxy_->SetDroppedFrameCallback( + [&dropped_count](VideoStreamEncoderObserver::DropReason) { + ++dropped_count; + }); fake_encoder_.BlockNextEncode(); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); @@ -1257,12 +1517,15 @@ TEST_F(VideoStreamEncoderTest, DropsPendingFramesOnSlowEncode) { WaitForEncodedFrame(3); video_stream_encoder_->Stop(); + + EXPECT_EQ(1, dropped_count); } TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420Conversion) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); rtc::Event frame_destroyed_event; video_source_.IncomingCapturedFrame( @@ -1275,15 +1538,16 @@ TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420Conversion) { TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420ConversionWithCrop) { // Use the cropping factory. video_encoder_config_.video_stream_factory = - new rtc::RefCountedObject(1, 30); + new rtc::RefCountedObject(); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config_), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); // Capture a frame at codec_width_/codec_height_. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); WaitForEncodedFrame(1); // The encoder will have been configured once. @@ -1301,11 +1565,119 @@ TEST_F(VideoStreamEncoderTest, DropFrameWithFailedI420ConversionWithCrop) { video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, NonI420FramesShouldNotBeConvertedToI420) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + video_source_.IncomingCapturedFrame( + CreateNV12Frame(1, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + EXPECT_EQ(VideoFrameBuffer::Type::kNV12, + fake_encoder_.GetLastInputPixelFormat()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + NativeFrameIsConvertedToI420IfNoFrameTypePreference) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + fake_encoder_.SetPreferredPixelFormats({}); + + rtc::Event frame_destroyed_event; + video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( + 1, &frame_destroyed_event, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + EXPECT_EQ(VideoFrameBuffer::Type::kI420, + fake_encoder_.GetLastInputPixelFormat()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, NativeFrameMappedToPreferredPixelFormat) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + fake_encoder_.SetPreferredPixelFormats({VideoFrameBuffer::Type::kNV12}); + + rtc::Event frame_destroyed_event; + video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( + 1, &frame_destroyed_event, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + EXPECT_EQ(VideoFrameBuffer::Type::kNV12, + fake_encoder_.GetLastInputPixelFormat()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, NativeFrameConvertedToI420IfMappingNotFeasible) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + // Fake NV12 native frame does not allow mapping to I444. + fake_encoder_.SetPreferredPixelFormats({VideoFrameBuffer::Type::kI444}); + + rtc::Event frame_destroyed_event; + video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( + 1, &frame_destroyed_event, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + EXPECT_EQ(VideoFrameBuffer::Type::kI420, + fake_encoder_.GetLastInputPixelFormat()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, NativeFrameBackedByNV12FrameIsEncodedFromI420) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + rtc::Event frame_destroyed_event; + video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( + 1, &frame_destroyed_event, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + EXPECT_EQ(VideoFrameBuffer::Type::kI420, + fake_encoder_.GetLastInputPixelFormat()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, DropsFramesWhenCongestionWindowPushbackSet) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); + WaitForEncodedFrame(1); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0.5); + // The congestion window pushback is set to 0.5, which will drop 1/2 of + // frames. Adding two frames means that the first frame will be dropped and + // the second frame will be sent to the encoder. + video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); + video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); + WaitForEncodedFrame(3); + video_source_.IncomingCapturedFrame(CreateFrame(4, nullptr)); + video_source_.IncomingCapturedFrame(CreateFrame(5, nullptr)); + WaitForEncodedFrame(5); + EXPECT_EQ(2u, stats_proxy_->GetStats().frames_dropped_by_congestion_window); + video_stream_encoder_->Stop(); +} + TEST_F(VideoStreamEncoderTest, ConfigureEncoderTriggersOnEncoderConfigurationChanged) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); EXPECT_EQ(0, sink_.number_of_reconfigurations()); // Capture a frame and wait for it to synchronize with the encoder thread. @@ -1331,9 +1703,10 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, FrameResolutionChangeReconfigureEncoder) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); @@ -1358,9 +1731,10 @@ TEST_F(VideoStreamEncoderTest, FrameResolutionChangeReconfigureEncoder) { TEST_F(VideoStreamEncoderTest, EncoderInstanceDestroyedBeforeAnotherInstanceCreated) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); @@ -1381,9 +1755,10 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, BitrateLimitsChangeReconfigureRateAllocator) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); VideoEncoderConfig video_encoder_config; test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); @@ -1427,9 +1802,10 @@ TEST_F(VideoStreamEncoderTest, BitrateLimitsChangeReconfigureRateAllocator) { TEST_F(VideoStreamEncoderTest, IntersectionOfEncoderAndAppBitrateLimitsUsedWhenBothProvided) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const uint32_t kMinEncBitrateKbps = 100; const uint32_t kMaxEncBitrateKbps = 1000; @@ -1474,9 +1850,10 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, EncoderAndAppLimitsDontIntersectEncoderLimitsIgnored) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const uint32_t kMinAppBitrateKbps = 100; const uint32_t kMaxAppBitrateKbps = 200; @@ -1509,9 +1886,10 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, EncoderRecommendedMaxAndMinBitratesUsedForGivenResolution) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits_270p( 480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000); @@ -1578,9 +1956,10 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, EncoderRecommendedMaxBitrateCapsTargetBitrate) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); VideoEncoderConfig video_encoder_config; test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); @@ -1639,29 +2018,88 @@ TEST_F(VideoStreamEncoderTest, SinkWantsRotationApplied) { video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, SinkWantsResolutionAlignment) { - constexpr int kRequestedResolutionAlignment = 7; +class ResolutionAlignmentTest + : public VideoStreamEncoderTest, + public ::testing::WithParamInterface< + ::testing::tuple>> { + public: + ResolutionAlignmentTest() + : requested_alignment_(::testing::get<0>(GetParam())), + scale_factors_(::testing::get<1>(GetParam())) {} + + protected: + const int requested_alignment_; + const std::vector scale_factors_; +}; + +INSTANTIATE_TEST_SUITE_P( + AlignmentAndScaleFactors, + ResolutionAlignmentTest, + ::testing::Combine( + ::testing::Values(1, 2, 3, 4, 5, 6, 16, 22), // requested_alignment_ + ::testing::Values(std::vector{-1.0}, // scale_factors_ + std::vector{-1.0, -1.0}, + std::vector{-1.0, -1.0, -1.0}, + std::vector{4.0, 2.0, 1.0}, + std::vector{9999.0, -1.0, 1.0}, + std::vector{3.99, 2.01, 1.0}, + std::vector{4.9, 1.7, 1.25}, + std::vector{10.0, 4.0, 3.0}, + std::vector{1.75, 3.5}, + std::vector{1.5, 2.5}, + std::vector{1.3, 1.0}))); + +TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) { + // Set requested resolution alignment. video_source_.set_adaptation_enabled(true); - fake_encoder_.SetRequestedResolutionAlignment(kRequestedResolutionAlignment); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + fake_encoder_.SetRequestedResolutionAlignment(requested_alignment_); + fake_encoder_.SetApplyAlignmentToAllSimulcastLayers(true); + + // Fill config with the scaling factor by which to reduce encoding size. + const int num_streams = scale_factors_.size(); + VideoEncoderConfig config; + test::FillEncoderConfiguration(kVideoCodecVP8, num_streams, &config); + for (int i = 0; i < num_streams; ++i) { + config.simulcast_layers[i].scale_resolution_down_by = scale_factors_[i]; + } + config.video_stream_factory = + new rtc::RefCountedObject( + "VP8", /*max qp*/ 56, /*screencast*/ false, + /*screenshare enabled*/ false); + video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), 0, 0, 0); + // Wait for all layers before triggering event. + sink_.SetNumExpectedLayers(num_streams); // On the 1st frame, we should have initialized the encoder and // asked for its resolution requirements. - video_source_.IncomingCapturedFrame( - CreateFrame(1, codec_width_, codec_height_)); - WaitForEncodedFrame(1); - EXPECT_EQ(video_source_.sink_wants().resolution_alignment, - kRequestedResolutionAlignment); + int64_t timestamp_ms = kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(1, fake_encoder_.GetNumEncoderInitializations()); // On the 2nd frame, we should be receiving a correctly aligned resolution. // (It's up the to the encoder to potentially drop the previous frame, // to avoid coding back-to-back keyframes.) - video_source_.IncomingCapturedFrame( - CreateFrame(2, codec_width_, codec_height_)); - WaitForEncodedFrame(2); - sink_.CheckLastFrameSizeIsMultipleOf(kRequestedResolutionAlignment); + timestamp_ms += kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_GE(fake_encoder_.GetNumEncoderInitializations(), 1); + + VideoCodec codec = fake_encoder_.video_codec(); + EXPECT_EQ(codec.numberOfSimulcastStreams, num_streams); + // Frame size should be a multiple of the requested alignment. + for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { + EXPECT_EQ(codec.simulcastStream[i].width % requested_alignment_, 0); + EXPECT_EQ(codec.simulcastStream[i].height % requested_alignment_, 0); + // Aspect ratio should match. + EXPECT_EQ(codec.width * codec.simulcastStream[i].height, + codec.height * codec.simulcastStream[i].width); + } video_stream_encoder_->Stop(); } @@ -1676,12 +2114,13 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { video_source_.set_adaptation_enabled(true); // Enable BALANCED preference, no initial limitation. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_stream_encoder_->SetSource(&video_source_, webrtc::DegradationPreference::BALANCED); - VerifyNoLimitation(video_source_.sink_wants()); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1706,9 +2145,10 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { t += frame_interval_ms; video_stream_encoder_->TriggerCpuOveruse(); - VerifyBalancedModeFpsRange( + EXPECT_THAT( video_source_.sink_wants(), - *video_source_.last_sent_width() * *video_source_.last_sent_height()); + FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() * + *video_source_.last_sent_height())); } while (video_source_.sink_wants().max_pixel_count < last_wants.max_pixel_count || video_source_.sink_wants().max_framerate_fps < @@ -1740,17 +2180,18 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { sink_.WaitForEncodedFrame(t); t += frame_interval_ms; - video_stream_encoder_->TriggerCpuNormalUsage(); - VerifyBalancedModeFpsRange( + video_stream_encoder_->TriggerCpuUnderuse(); + EXPECT_THAT( video_source_.sink_wants(), - *video_source_.last_sent_width() * *video_source_.last_sent_height()); + FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() * + *video_source_.last_sent_height())); EXPECT_TRUE(video_source_.sink_wants().max_pixel_count > last_wants.max_pixel_count || video_source_.sink_wants().max_framerate_fps > last_wants.max_framerate_fps); } - VerifyFpsMaxResolutionMax(video_source_.sink_wants()); + EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax()); stats_proxy_->ResetMockStats(); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); @@ -1760,11 +2201,125 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { +TEST_F(VideoStreamEncoderTest, + SinkWantsNotChangedByResourceLimitedBeforeDegradationPreferenceChange) { video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); - VerifyNoLimitation(video_source_.sink_wants()); + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); + + const int kFrameWidth = 1280; + const int kFrameHeight = 720; + + int64_t ntp_time = kFrameIntervalMs; + + // Force an input frame rate to be available, or the adaptation call won't + // know what framerate to adapt form. + const int kInputFps = 30; + VideoSendStream::Stats stats = stats_proxy_->GetStats(); + stats.input_frame_rate = kInputFps; + stats_proxy_->SetMockStats(stats); + + video_source_.set_adaptation_enabled(true); + video_stream_encoder_->SetSource( + &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + + // Trigger CPU overuse. + video_stream_encoder_->TriggerCpuOveruse(); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + + EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); + EXPECT_EQ(std::numeric_limits::max(), + video_source_.sink_wants().max_pixel_count); + // Some framerate constraint should be set. + int restricted_fps = video_source_.sink_wants().max_framerate_fps; + EXPECT_LT(restricted_fps, kInputFps); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += 100; + + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( + &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); + // Give the encoder queue time to process the change in degradation preference + // by waiting for an encoded frame. + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + + video_stream_encoder_->TriggerQualityLow(); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + + // Some resolution constraint should be set. + EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); + EXPECT_LT(video_source_.sink_wants().max_pixel_count, + kFrameWidth * kFrameHeight); + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps); + + int pixel_count = video_source_.sink_wants().max_pixel_count; + // Triggering a CPU underuse should not change the sink wants since it has + // not been overused for resolution since we changed degradation preference. + video_stream_encoder_->TriggerCpuUnderuse(); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + EXPECT_EQ(video_source_.sink_wants().max_pixel_count, pixel_count); + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps); + + // Change the degradation preference back. CPU underuse should not adapt since + // QP is most limited. + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( + &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += 100; + // Resolution adaptations is gone after changing degradation preference. + EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); + EXPECT_EQ(std::numeric_limits::max(), + video_source_.sink_wants().max_pixel_count); + // The fps adaptation from above is now back. + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps); + + // Trigger CPU underuse. + video_stream_encoder_->TriggerCpuUnderuse(); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps); + + // Trigger QP underuse, fps should return to normal. + video_stream_encoder_->TriggerQualityHigh(); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); + sink_.WaitForEncodedFrame(ntp_time); + ntp_time += kFrameIntervalMs; + EXPECT_THAT(video_source_.sink_wants(), FpsMax()); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); const int kFrameWidth = 1280; const int kFrameHeight = 720; @@ -1792,7 +2347,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { // Set new source, switch to maintain-resolution. test::FrameForwarder new_video_source; - video_stream_encoder_->SetSource( + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); // Give the encoder queue time to process the change in degradation preference // by waiting for an encoded frame. @@ -1801,7 +2356,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; // Initially no degradation registered. - VerifyFpsMaxResolutionMax(new_video_source.sink_wants()); + EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); // Force an input frame rate to be available, or the adaptation call won't // know what framerate to adapt form. @@ -1823,15 +2378,15 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { EXPECT_LT(new_video_source.sink_wants().max_framerate_fps, kInputFps); // Turn off degradation completely. - video_stream_encoder_->SetSource(&new_video_source, - webrtc::DegradationPreference::DISABLED); + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( + &new_video_source, webrtc::DegradationPreference::DISABLED); // Give the encoder queue time to process the change in degradation preference // by waiting for an encoded frame. new_video_source.IncomingCapturedFrame( CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth)); sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; - VerifyFpsMaxResolutionMax(new_video_source.sink_wants()); + EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); video_stream_encoder_->TriggerCpuOveruse(); new_video_source.IncomingCapturedFrame( @@ -1840,10 +2395,10 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { frame_timestamp += kFrameIntervalMs; // Still no degradation. - VerifyFpsMaxResolutionMax(new_video_source.sink_wants()); + EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); // Calling SetSource with resolution scaling enabled apply the old SinkWants. - video_stream_encoder_->SetSource( + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( &new_video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); // Give the encoder queue time to process the change in degradation preference // by waiting for an encoded frame. @@ -1857,7 +2412,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { EXPECT_EQ(kDefaultFramerate, new_video_source.sink_wants().max_framerate_fps); // Calling SetSource with framerate scaling enabled apply the old SinkWants. - video_stream_encoder_->SetSource( + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); // Give the encoder queue time to process the change in degradation preference // by waiting for an encoded frame. @@ -1874,9 +2429,10 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { } TEST_F(VideoStreamEncoderTest, StatsTracksQualityAdaptationStats) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 1280; const int kHeight = 720; @@ -1909,9 +2465,10 @@ TEST_F(VideoStreamEncoderTest, StatsTracksQualityAdaptationStats) { } TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 1280; const int kHeight = 720; @@ -1931,7 +2488,7 @@ TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) { EXPECT_EQ(1, stats.number_of_cpu_adapt_changes); // Trigger CPU normal use. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); WaitForEncodedFrame(3); @@ -1944,9 +2501,10 @@ TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) { } TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 1280; const int kHeight = 720; @@ -2001,7 +2559,7 @@ TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) { EXPECT_EQ(1, stats.number_of_cpu_adapt_changes); // Trigger CPU normal use. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); new_video_source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight)); WaitForEncodedFrame(6); stats = stats_proxy_->GetStats(); @@ -2014,9 +2572,10 @@ TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) { } TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsQualityAdaptation) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 1280; const int kHeight = 720; @@ -2076,9 +2635,10 @@ TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsQualityAdaptation) { TEST_F(VideoStreamEncoderTest, QualityAdaptationStatsAreResetWhenScalerIsDisabled) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 1280; const int kHeight = 720; @@ -2131,11 +2691,79 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, + StatsTracksCpuAdaptationStatsWhenSwitchingSource_Balanced) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + const int kWidth = 1280; + const int kHeight = 720; + int sequence = 1; + + // Enable BALANCED preference, no initial limitation. + test::FrameForwarder source; + video_stream_encoder_->SetSource(&source, + webrtc::DegradationPreference::BALANCED); + source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); + WaitForEncodedFrame(sequence++); + VideoSendStream::Stats stats = stats_proxy_->GetStats(); + EXPECT_FALSE(stats.cpu_limited_resolution); + EXPECT_FALSE(stats.cpu_limited_framerate); + EXPECT_EQ(0, stats.number_of_cpu_adapt_changes); + + // Trigger CPU overuse, should now adapt down. + video_stream_encoder_->TriggerCpuOveruse(); + source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); + WaitForEncodedFrame(sequence++); + stats = stats_proxy_->GetStats(); + EXPECT_EQ(1, stats.number_of_cpu_adapt_changes); + + // Set new degradation preference should clear restrictions since we changed + // from BALANCED. + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( + &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); + source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); + WaitForEncodedFrame(sequence++); + stats = stats_proxy_->GetStats(); + EXPECT_FALSE(stats.cpu_limited_resolution); + EXPECT_FALSE(stats.cpu_limited_framerate); + EXPECT_EQ(1, stats.number_of_cpu_adapt_changes); + + // Force an input frame rate to be available, or the adaptation call won't + // know what framerate to adapt from. + VideoSendStream::Stats mock_stats = stats_proxy_->GetStats(); + mock_stats.input_frame_rate = 30; + stats_proxy_->SetMockStats(mock_stats); + video_stream_encoder_->TriggerCpuOveruse(); + stats_proxy_->ResetMockStats(); + source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); + WaitForEncodedFrame(sequence++); + + // We have now adapted once. + stats = stats_proxy_->GetStats(); + EXPECT_EQ(2, stats.number_of_cpu_adapt_changes); + + // Back to BALANCED, should clear the restrictions again. + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( + &source, webrtc::DegradationPreference::BALANCED); + source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); + WaitForEncodedFrame(sequence++); + stats = stats_proxy_->GetStats(); + EXPECT_FALSE(stats.cpu_limited_resolution); + EXPECT_FALSE(stats.cpu_limited_framerate); + EXPECT_EQ(2, stats.number_of_cpu_adapt_changes); + + video_stream_encoder_->Stop(); +} + TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 1280; const int kHeight = 720; @@ -2233,7 +2861,7 @@ TEST_F(VideoStreamEncoderTest, EXPECT_EQ(2, stats.number_of_cpu_adapt_changes); // Trigger CPU normal usage. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); @@ -2254,7 +2882,7 @@ TEST_F(VideoStreamEncoderTest, EXPECT_EQ(3, stats.number_of_cpu_adapt_changes); // Trigger CPU normal usage. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); WaitForEncodedFrame(sequence++); @@ -2271,12 +2899,13 @@ TEST_F(VideoStreamEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Expect no scaling to begin with. - VerifyNoLimitation(video_source_.sink_wants()); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(1); @@ -2321,9 +2950,10 @@ TEST_F(VideoStreamEncoderTest, SkipsSameAdaptDownRequest_MaintainFramerateMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. test::FrameForwarder source; @@ -2332,13 +2962,14 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(1); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt down, expect scaled down resolution. video_stream_encoder_->TriggerCpuOveruse(); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); const int kLastMaxPixelCount = source.sink_wants().max_pixel_count; EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2355,9 +2986,10 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. test::FrameForwarder source; @@ -2365,11 +2997,12 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) { webrtc::DegradationPreference::BALANCED); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(1); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); // Trigger adapt down, expect scaled down resolution. video_stream_encoder_->TriggerQualityLow(); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); const int kLastMaxPixelCount = source.sink_wants().max_pixel_count; @@ -2397,9 +3030,10 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_MaintainFramerateMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. test::FrameForwarder source; @@ -2408,13 +3042,13 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt up, expect no change. - video_stream_encoder_->TriggerCpuNormalUsage(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + video_stream_encoder_->TriggerCpuUnderuse(); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2425,9 +3059,10 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_MaintainResolutionMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_RESOLUTION preference, no initial limitation. test::FrameForwarder source; @@ -2436,13 +3071,13 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt up, expect no change. - video_stream_encoder_->TriggerCpuNormalUsage(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + video_stream_encoder_->TriggerCpuUnderuse(); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2452,9 +3087,10 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. test::FrameForwarder source; @@ -2463,14 +3099,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) { source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2481,9 +3117,10 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) { TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable DISABLED preference, no initial limitation. test::FrameForwarder source; @@ -2492,14 +3129,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) { source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2511,19 +3148,20 @@ TEST_F(VideoStreamEncoderTest, AdaptsResolutionForLowQuality_MaintainFramerateMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(1); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2531,13 +3169,14 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->TriggerQualityLow(); source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); WaitForEncodedFrame(2); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no restriction. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2550,9 +3189,10 @@ TEST_F(VideoStreamEncoderTest, const int kWidth = 1280; const int kHeight = 720; const int kInputFps = 30; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); VideoSendStream::Stats stats = stats_proxy_->GetStats(); stats.input_frame_rate = kInputFps; @@ -2561,33 +3201,35 @@ TEST_F(VideoStreamEncoderTest, // Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE). video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(1); - VerifyFpsMaxResolutionMax(video_source_.sink_wants()); + EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax()); // Trigger adapt down, expect scaled down resolution. video_stream_encoder_->TriggerQualityLow(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); sink_.WaitForEncodedFrame(2); - VerifyFpsMaxResolutionLt(video_source_.sink_wants(), kWidth * kHeight); + EXPECT_THAT(video_source_.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); // Enable MAINTAIN_RESOLUTION preference. test::FrameForwarder new_video_source; - video_stream_encoder_->SetSource( + video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( &new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); // Give the encoder queue time to process the change in degradation preference // by waiting for an encoded frame. new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); sink_.WaitForEncodedFrame(3); - VerifyFpsMaxResolutionMax(new_video_source.sink_wants()); + EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); // Trigger adapt down, expect reduced framerate. video_stream_encoder_->TriggerQualityLow(); new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); sink_.WaitForEncodedFrame(4); - VerifyFpsLtResolutionMax(new_video_source.sink_wants(), kInputFps); + EXPECT_THAT(new_video_source.sink_wants(), + FpsMatchesResolutionMax(Lt(kInputFps))); // Trigger adapt up, expect no restriction. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(new_video_source.sink_wants()); + EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); video_stream_encoder_->Stop(); } @@ -2597,9 +3239,10 @@ TEST_F(VideoStreamEncoderTest, DoesNotScaleBelowSetResolutionLimit) { const int kHeight = 720; const size_t kNumFrames = 10; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable adapter, expected input resolutions when downscaling: // 1280x720 -> 960x540 -> 640x360 -> 480x270 -> 320x180 (kMinPixelsPerFrame) @@ -2634,12 +3277,13 @@ TEST_F(VideoStreamEncoderTest, AdaptsResolutionUpAndDownTwiceOnOveruse_MaintainFramerateMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -2647,7 +3291,7 @@ TEST_F(VideoStreamEncoderTest, int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2656,16 +3300,17 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt up, expect no restriction. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2674,16 +3319,17 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt up, expect no restriction. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -2694,12 +3340,13 @@ TEST_F(VideoStreamEncoderTest, AdaptsResolutionUpAndDownTwiceForLowQuality_BalancedMode_NoFpsLimit) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); @@ -2707,7 +3354,7 @@ TEST_F(VideoStreamEncoderTest, int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2716,7 +3363,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2725,7 +3373,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2734,7 +3382,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2743,7 +3392,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2754,13 +3403,14 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) { fake_encoder_.SetResolutionBitrateLimits( {kEncoderBitrateLimits540p, kEncoderBitrateLimits720p}); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kEncoderBitrateLimits720p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits720p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0, + 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -2771,10 +3421,11 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) { WaitForEncodedFrame(1280, 720); // Reduce bitrate and trigger adapt down. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kEncoderBitrateLimits540p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits540p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0, + 0, 0); video_stream_encoder_->TriggerQualityLow(); // Insert 720p frame. It should be downscaled and encoded. @@ -2785,17 +3436,18 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) { // Trigger adapt up. Higher resolution should not be requested duo to lack // of bitrate. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionLt(source.sink_wants(), 1280 * 720); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMatches(Lt(1280 * 720))); // Increase bitrate. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kEncoderBitrateLimits720p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits720p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits720p.min_start_bitrate_bps), 0, + 0, 0); // Trigger adapt up. Higher resolution should be requested. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); video_stream_encoder_->Stop(); } @@ -2805,13 +3457,14 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) { {kEncoderBitrateLimits540p, kEncoderBitrateLimits720p}); // Set bitrate equal to min bitrate of 540p. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kEncoderBitrateLimits540p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits540p.min_start_bitrate_bps), - DataRate::bps(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), + DataRate::BitsPerSec(kEncoderBitrateLimits540p.min_start_bitrate_bps), 0, + 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -2821,7 +3474,7 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) { int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); ExpectDroppedFrame(); - VerifyFpsMaxResolutionLt(source.sink_wants(), 1280 * 720); + EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < 1280 * 720, 5000); // Insert 720p frame. It should be downscaled and encoded. timestamp_ms += kFrameIntervalMs; @@ -2844,9 +3497,9 @@ class BalancedDegradationTest : public VideoStreamEncoderTest { } void OnBitrateUpdated(int bitrate_bps) { - video_stream_encoder_->OnBitrateUpdated(DataRate::bps(bitrate_bps), - DataRate::bps(bitrate_bps), - DataRate::bps(bitrate_bps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(bitrate_bps), DataRate::BitsPerSec(bitrate_bps), + DataRate::BitsPerSec(bitrate_bps), 0, 0, 0); } void InsertFrame() { @@ -2863,10 +3516,10 @@ class BalancedDegradationTest : public VideoStreamEncoderTest { const int kHeight = 360; const int64_t kFrameIntervalMs = 150; // Use low fps to not drop any frame. int64_t timestamp_ms_ = 0; - AdaptingFrameForwarder source_; + AdaptingFrameForwarder source_{&time_controller_}; }; -TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) { +TEST_F(BalancedDegradationTest, AdaptDownTwiceIfMinFpsDiffLtThreshold) { test::ScopedFieldTrials field_trials( "WebRTC-Video-BalancedDegradationSettings/" "pixels:57600|129600|230400,fps:7|10|24,fps_diff:1|1|1/"); @@ -2879,17 +3532,18 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) { stats_proxy_->SetMockStats(stats); InsertFrameAndWaitForEncoded(); - VerifyFpsMaxResolutionMax(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); - // Trigger adapt down, expect scaled down framerate (640x360@24fps). - // Fps diff (input-requested:0) < threshold, expect AdaptDown to return false. - video_stream_encoder_->TriggerQualityLowExpectFalse(); - VerifyFpsEqResolutionMax(source_.sink_wants(), 24); + // Trigger adapt down, expect scaled down framerate and resolution, + // since Fps diff (input-requested:0) < threshold. + video_stream_encoder_->TriggerQualityLow(); + EXPECT_THAT(source_.sink_wants(), + AllOf(WantsFps(Eq(24)), WantsMaxPixels(Le(230400)))); video_stream_encoder_->Stop(); } -TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) { +TEST_F(BalancedDegradationTest, AdaptDownOnceIfFpsDiffGeThreshold) { test::ScopedFieldTrials field_trials( "WebRTC-Video-BalancedDegradationSettings/" "pixels:57600|129600|230400,fps:7|10|24,fps_diff:1|1|1/"); @@ -2902,12 +3556,12 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) { stats_proxy_->SetMockStats(stats); InsertFrameAndWaitForEncoded(); - VerifyFpsMaxResolutionMax(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); - // Trigger adapt down, expect scaled down framerate (640x360@24fps). - // Fps diff (input-requested:1) == threshold, expect AdaptDown to return true. + // Trigger adapt down, expect scaled down framerate only (640x360@24fps). + // Fps diff (input-requested:1) == threshold. video_stream_encoder_->TriggerQualityLow(); - VerifyFpsEqResolutionMax(source_.sink_wants(), 24); + EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(24))); video_stream_encoder_->Stop(); } @@ -2921,11 +3575,11 @@ TEST_F(BalancedDegradationTest, AdaptDownUsesCodecSpecificFps) { EXPECT_EQ(kVideoCodecVP8, video_encoder_config_.codec_type); InsertFrameAndWaitForEncoded(); - VerifyFpsMaxResolutionMax(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); // Trigger adapt down, expect scaled down framerate (640x360@22fps). video_stream_encoder_->TriggerQualityLow(); - VerifyFpsEqResolutionMax(source_.sink_wants(), 22); + EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(22))); video_stream_encoder_->Stop(); } @@ -2941,25 +3595,25 @@ TEST_F(BalancedDegradationTest, NoAdaptUpIfBwEstimateIsLessThanMinBitrate) { OnBitrateUpdated(kTooLowMinBitrateBps); InsertFrameAndWaitForEncoded(); - VerifyFpsMaxResolutionMax(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (640x360@14fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionMax(source_.sink_wants(), 14); + EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14))); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down resolution (480x270@14fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants())); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (480x270@10fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants())); EXPECT_EQ(source_.sink_wants().max_framerate_fps, 10); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -2986,7 +3640,7 @@ TEST_F(BalancedDegradationTest, SetupTest(); OnBitrateUpdated(kLowTargetBitrateBps); - VerifyNoLimitation(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); // Insert frame, expect scaled down: // framerate (640x360@24fps) -> resolution (480x270@24fps). @@ -3021,31 +3675,31 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kTooLowMinResolutionBitrateBps); InsertFrameAndWaitForEncoded(); - VerifyFpsMaxResolutionMax(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (640x360@14fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionMax(source_.sink_wants(), 14); + EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14))); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down resolution (480x270@14fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants())); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (480x270@10fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants())); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect upscaled fps (no bitrate limit) (480x270@14fps). video_stream_encoder_->TriggerQualityHigh(); InsertFrameAndWaitForEncoded(); - VerifyFpsGtResolutionEq(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants())); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no upscale in res (target bitrate < min bitrate). @@ -3057,7 +3711,7 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kResolutionMinBitrateBps); video_stream_encoder_->TriggerQualityHigh(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionGt(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants())); EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes); video_stream_encoder_->Stop(); @@ -3077,25 +3731,25 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kTooLowMinBitrateBps); InsertFrameAndWaitForEncoded(); - VerifyFpsMaxResolutionMax(source_.sink_wants()); + EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (640x360@14fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionMax(source_.sink_wants(), 14); + EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14))); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down resolution (480x270@14fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants())); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (480x270@10fps). video_stream_encoder_->TriggerQualityLow(); InsertFrameAndWaitForEncoded(); - VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants())); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no upscale (target bitrate < min bitrate). @@ -3107,7 +3761,7 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kMinBitrateBps); video_stream_encoder_->TriggerQualityHigh(); InsertFrameAndWaitForEncoded(); - VerifyFpsGtResolutionEq(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants())); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no upscale in res (target bitrate < min bitrate). @@ -3120,7 +3774,7 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kResolutionMinBitrateBps); video_stream_encoder_->TriggerQualityHigh(); InsertFrameAndWaitForEncoded(); - VerifyFpsEqResolutionGt(source_.sink_wants(), source_.last_wants()); + EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants())); EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes); video_stream_encoder_->Stop(); @@ -3130,12 +3784,13 @@ TEST_F(VideoStreamEncoderTest, AdaptsResolutionOnOveruseAndLowQuality_MaintainFramerateMode) { const int kWidth = 1280; const int kHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -3143,7 +3798,7 @@ TEST_F(VideoStreamEncoderTest, int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3154,7 +3809,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3165,7 +3821,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3176,7 +3832,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3187,7 +3843,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); rtc::VideoSinkWants last_wants = source.sink_wants(); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); @@ -3199,68 +3855,74 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionEq(source.sink_wants(), last_wants); + EXPECT_THAT(source.sink_wants(), FpsMax()); + EXPECT_EQ(source.sink_wants().max_pixel_count, last_wants.max_pixel_count); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, expect upscaled resolution (480x270). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger quality adapt up, expect upscaled resolution (480x270). + video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, expect upscaled resolution (640x360). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger quality and cpu adapt up since both are most limited, expect + // upscaled resolution (640x360). + video_stream_encoder_->TriggerCpuUnderuse(); + video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, expect upscaled resolution (960x540). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger quality and cpu adapt up since both are most limited, expect + // upscaled resolution (960x540). + video_stream_encoder_->TriggerCpuUnderuse(); + video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); last_wants = source.sink_wants(); - EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); + EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, no cpu downgrades, expect no change (960x540). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger cpu adapt up, expect no change since not most limited (960x540). + // However the stats will change since the CPU resource is no longer limited. + video_stream_encoder_->TriggerCpuUnderuse(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionEq(source.sink_wants(), last_wants); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants)); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger quality adapt up, expect no restriction (1280x720). video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes); video_stream_encoder_->Stop(); } @@ -3269,9 +3931,10 @@ TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsReported) { const int kWidth = 640; const int kHeight = 360; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); @@ -3297,9 +3960,10 @@ TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsReported) { TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsNotReportedForDisabledDegradation) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kWidth = 640; const int kHeight = 360; @@ -3319,54 +3983,141 @@ TEST_F(VideoStreamEncoderTest, metrics::NumSamples("WebRTC.Video.CpuLimitedResolutionInPercent")); } -TEST_F(VideoStreamEncoderTest, CallsBitrateObserver) { - MockBitrateObserver bitrate_observer; - video_stream_encoder_->SetBitrateAllocationObserver(&bitrate_observer); +TEST_F(VideoStreamEncoderTest, ReportsVideoBitrateAllocation) { + ResetEncoder("FAKE", 1, 1, 1, /*screenshare*/ false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation); const int kDefaultFps = 30; const VideoBitrateAllocation expected_bitrate = - DefaultVideoBitrateAllocator(fake_encoder_.codec_config()) + SimulcastRateAllocator(fake_encoder_.codec_config()) .Allocate(VideoBitrateAllocationParameters(kLowTargetBitrateBps, kDefaultFps)); - EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) - .Times(1); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kLowTargetBitrateBps), DataRate::bps(kLowTargetBitrateBps), - DataRate::bps(kLowTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); + + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(sink_.GetLastVideoBitrateAllocation(), expected_bitrate); + EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1); + + // Check that encoder has been updated too, not just allocation observer. + EXPECT_TRUE(fake_encoder_.GetAndResetLastRateControlSettings().has_value()); + AdvanceTime(TimeDelta::Seconds(1) / kDefaultFps); + + // VideoBitrateAllocation not updated on second frame. + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1); + AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); + + // VideoBitrateAllocation updated after a process interval. + const int64_t start_time_ms = CurrentTimeMs(); + while (CurrentTimeMs() - start_time_ms < 5 * kProcessIntervalMs) { + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + AdvanceTime(TimeDelta::Millis(1) / kDefaultFps); + } + EXPECT_GT(sink_.number_of_bitrate_allocations(), 3); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, ReportsVideoLayersAllocationForV8Simulcast) { + ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoLayersAllocation); + + const int kDefaultFps = 30; + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame( - CreateFrame(rtc::TimeMillis(), codec_width_, codec_height_)); - WaitForEncodedFrame(rtc::TimeMillis()); + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(sink_.number_of_layers_allocations(), 1); + VideoLayersAllocation last_layer_allocation = + sink_.GetLastVideoLayersAllocation(); + // kLowTargetBitrateBps is only enough for one spatial layer. + ASSERT_EQ(last_layer_allocation.active_spatial_layers.size(), 1u); + VideoBitrateAllocation bitrate_allocation = - fake_encoder_.GetAndResetLastRateControlSettings()->bitrate; + fake_encoder_.GetAndResetLastRateControlSettings()->target_bitrate; // Check that encoder has been updated too, not just allocation observer. EXPECT_EQ(bitrate_allocation.get_sum_bps(), kLowTargetBitrateBps); - // TODO(srte): The use of millisecs here looks like an error, but the tests - // fails using seconds, this should be investigated. - fake_clock_.AdvanceTime(TimeDelta::ms(1) / kDefaultFps); + AdvanceTime(TimeDelta::Seconds(1) / kDefaultFps); - // Not called on second frame. - EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) - .Times(0); - video_source_.IncomingCapturedFrame( - CreateFrame(rtc::TimeMillis(), codec_width_, codec_height_)); - WaitForEncodedFrame(rtc::TimeMillis()); - fake_clock_.AdvanceTime(TimeDelta::ms(1) / kDefaultFps); - - // Called after a process interval. - EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) - .Times(1); - const int64_t start_time_ms = rtc::TimeMillis(); - while (rtc::TimeMillis() - start_time_ms < kProcessIntervalMs) { + // VideoLayersAllocation might be updated if frame rate changes. + int number_of_layers_allocation = 1; + const int64_t start_time_ms = CurrentTimeMs(); + while (CurrentTimeMs() - start_time_ms < 10 * kProcessIntervalMs) { video_source_.IncomingCapturedFrame( - CreateFrame(rtc::TimeMillis(), codec_width_, codec_height_)); - WaitForEncodedFrame(rtc::TimeMillis()); - fake_clock_.AdvanceTime(TimeDelta::ms(1) / kDefaultFps); + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + if (number_of_layers_allocation != sink_.number_of_layers_allocations()) { + number_of_layers_allocation = sink_.number_of_layers_allocations(); + VideoLayersAllocation new_allocation = + sink_.GetLastVideoLayersAllocation(); + ASSERT_EQ(new_allocation.active_spatial_layers.size(), 1u); + EXPECT_NE(new_allocation.active_spatial_layers[0].frame_rate_fps, + last_layer_allocation.active_spatial_layers[0].frame_rate_fps); + EXPECT_EQ(new_allocation.active_spatial_layers[0] + .target_bitrate_per_temporal_layer, + last_layer_allocation.active_spatial_layers[0] + .target_bitrate_per_temporal_layer); + last_layer_allocation = new_allocation; + } } + EXPECT_LE(sink_.number_of_layers_allocations(), 3); + video_stream_encoder_->Stop(); +} - // Since rates are unchanged, encoder should not be reconfigured. - EXPECT_FALSE(fake_encoder_.GetAndResetLastRateControlSettings().has_value()); +TEST_F(VideoStreamEncoderTest, + ReportsUpdatedVideoLayersAllocationWhenBweChanges) { + ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoLayersAllocation); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); + + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + EXPECT_EQ(sink_.number_of_layers_allocations(), 1); + VideoLayersAllocation last_layer_allocation = + sink_.GetLastVideoLayersAllocation(); + // kLowTargetBitrateBps is only enough for one spatial layer. + ASSERT_EQ(last_layer_allocation.active_spatial_layers.size(), 1u); + EXPECT_EQ(last_layer_allocation.active_spatial_layers[0] + .target_bitrate_per_temporal_layer[0], + DataRate::BitsPerSec(kLowTargetBitrateBps)); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + + EXPECT_EQ(sink_.number_of_layers_allocations(), 2); + last_layer_allocation = sink_.GetLastVideoLayersAllocation(); + ASSERT_EQ(last_layer_allocation.active_spatial_layers.size(), 2u); + EXPECT_GT(last_layer_allocation.active_spatial_layers[1] + .target_bitrate_per_temporal_layer[0], + DataRate::Zero()); video_stream_encoder_->Stop(); } @@ -3374,7 +4125,9 @@ TEST_F(VideoStreamEncoderTest, CallsBitrateObserver) { TEST_F(VideoStreamEncoderTest, TemporalLayersNotDisabledIfSupported) { // 2 TLs configured, temporal layers supported by encoder. const int kNumTemporalLayers = 2; - ResetEncoder("VP8", 1, kNumTemporalLayers, 1, /*screenshare*/ false); + ResetEncoder("VP8", 1, kNumTemporalLayers, 1, /*screenshare*/ false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation); fake_encoder_.SetTemporalLayersSupported(0, true); // Bitrate allocated across temporal layers. @@ -3396,7 +4149,9 @@ TEST_F(VideoStreamEncoderTest, TemporalLayersNotDisabledIfSupported) { TEST_F(VideoStreamEncoderTest, TemporalLayersDisabledIfNotSupported) { // 2 TLs configured, temporal layers not supported by encoder. - ResetEncoder("VP8", 1, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false); + ResetEncoder("VP8", 1, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation); fake_encoder_.SetTemporalLayersSupported(0, false); // Temporal layers not supported by the encoder. @@ -3409,8 +4164,16 @@ TEST_F(VideoStreamEncoderTest, TemporalLayersDisabledIfNotSupported) { } TEST_F(VideoStreamEncoderTest, VerifyBitrateAllocationForTwoStreams) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Video-QualityScalerSettings/" + "initial_bitrate_interval_ms:1000,initial_bitrate_factor:0.2/"); + // Reset encoder for field trials to take effect. + ConfigureEncoder(video_encoder_config_.Copy()); + // 2 TLs configured, temporal layers only supported for first stream. - ResetEncoder("VP8", 2, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false); + ResetEncoder("VP8", 2, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation); fake_encoder_.SetTemporalLayersSupported(0, true); fake_encoder_.SetTemporalLayersSupported(1, false); @@ -3439,9 +4202,10 @@ TEST_F(VideoStreamEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) { const int kFrameHeight = 720; const int kFramerate = 24; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); test::FrameForwarder source; video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); @@ -3456,11 +4220,9 @@ TEST_F(VideoStreamEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) { // Trigger reconfigure encoder (without resetting the entire instance). VideoEncoderConfig video_encoder_config; - video_encoder_config.codec_type = kVideoCodecVP8; + test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); + video_encoder_config.simulcast_layers[0].max_framerate = kFramerate; video_encoder_config.max_bitrate_bps = kTargetBitrateBps; - video_encoder_config.number_of_streams = 1; - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1, kFramerate); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -3485,7 +4247,7 @@ TEST_F(VideoStreamEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) { stats = stats_proxy_->GetStats(); stats.input_frame_rate = adapted_framerate / 2; stats_proxy_->SetMockStats(stats); - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ( video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(), @@ -3501,22 +4263,21 @@ TEST_F(VideoStreamEncoderTest, const int kLowFramerate = 15; const int kHighFramerate = 25; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); test::FrameForwarder source; video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); // Trigger initial configuration. VideoEncoderConfig video_encoder_config; - video_encoder_config.codec_type = kVideoCodecVP8; + test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); + video_encoder_config.simulcast_layers[0].max_framerate = kLowFramerate; video_encoder_config.max_bitrate_bps = kTargetBitrateBps; - video_encoder_config.number_of_streams = 1; - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1, kLowFramerate); source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight)); - video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -3536,8 +4297,7 @@ TEST_F(VideoStreamEncoderTest, // Reconfigure the encoder with a new (higher max framerate), max fps should // still respect the adaptation. - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1, kHighFramerate); + video_encoder_config.simulcast_layers[0].max_framerate = kHighFramerate; source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight)); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); @@ -3551,7 +4311,7 @@ TEST_F(VideoStreamEncoderTest, stats = stats_proxy_->GetStats(); stats.input_frame_rate = adapted_framerate; stats_proxy_->SetMockStats(stats); - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ( video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(), @@ -3566,20 +4326,19 @@ TEST_F(VideoStreamEncoderTest, const int kFrameHeight = 720; const int kFramerate = 24; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); test::FrameForwarder source; video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); // Trigger initial configuration. VideoEncoderConfig video_encoder_config; - video_encoder_config.codec_type = kVideoCodecVP8; + test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); + video_encoder_config.simulcast_layers[0].max_framerate = kFramerate; video_encoder_config.max_bitrate_bps = kTargetBitrateBps; - video_encoder_config.number_of_streams = 1; - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1, kFramerate); source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight)); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); @@ -3601,9 +4360,8 @@ TEST_F(VideoStreamEncoderTest, // Change degradation preference to not enable framerate scaling. Target // framerate should be changed to codec defined limit. - video_stream_encoder_->SetSource( + video_stream_encoder_->SetSourceAndWaitForFramerateUpdated( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); - video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ( video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(), kFramerate); @@ -3613,10 +4371,10 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { const int kTooLowBitrateForFrameSizeBps = 10000; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTooLowBitrateForFrameSizeBps), - DataRate::bps(kTooLowBitrateForFrameSizeBps), - DataRate::bps(kTooLowBitrateForFrameSizeBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0); const int kWidth = 640; const int kHeight = 360; @@ -3626,7 +4384,8 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight); + EXPECT_TRUE_WAIT( + video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); int last_pixel_count = video_source_.sink_wants().max_pixel_count; @@ -3637,7 +4396,8 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { // Expect to drop this frame, the wait should time out. ExpectDroppedFrame(); - EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count); + EXPECT_TRUE_WAIT( + video_source_.sink_wants().max_pixel_count < last_pixel_count, 5000); video_stream_encoder_->Stop(); } @@ -3645,10 +4405,10 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { TEST_F(VideoStreamEncoderTest, NumberOfDroppedFramesLimitedWhenBitrateIsTooLow) { const int kTooLowBitrateForFrameSizeBps = 10000; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTooLowBitrateForFrameSizeBps), - DataRate::bps(kTooLowBitrateForFrameSizeBps), - DataRate::bps(kTooLowBitrateForFrameSizeBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0); const int kWidth = 640; const int kHeight = 360; @@ -3672,9 +4432,10 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropOffWithMaintainResolutionPreference) { const int kWidth = 640; const int kHeight = 360; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kLowTargetBitrateBps), DataRate::bps(kLowTargetBitrateBps), - DataRate::bps(kLowTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); // Set degradation preference. video_stream_encoder_->SetSource( @@ -3698,9 +4459,10 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropOffWhenEncoderDisabledScaling) { video_encoder_config.video_format.parameters["foo"] = "foo"; video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kLowTargetBitrateBps), DataRate::bps(kLowTargetBitrateBps), - DataRate::bps(kLowTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); // Force quality scaler reconfiguration by resetting the source. video_stream_encoder_->SetSource(&video_source_, @@ -3725,31 +4487,292 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) { const int kWidth = 640; const int kHeight = 360; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); // Frame should not be dropped. WaitForEncodedFrame(1); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kNotTooLowBitrateForFrameSizeBps), - DataRate::bps(kNotTooLowBitrateForFrameSizeBps), - DataRate::bps(kNotTooLowBitrateForFrameSizeBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); // Frame should not be dropped. WaitForEncodedFrame(2); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTooLowBitrateForFrameSizeBps), - DataRate::bps(kTooLowBitrateForFrameSizeBps), - DataRate::bps(kTooLowBitrateForFrameSizeBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); // Expect to drop this frame, the wait should time out. ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight); + EXPECT_TRUE_WAIT( + video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + InitialFrameDropNotReactivatedWhenBweDropsWhenScalingDisabled) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Video-QualityScalerSettings/" + "initial_bitrate_interval_ms:1000,initial_bitrate_factor:0.2/"); + fake_encoder_.SetQualityScaling(false); + ConfigureEncoder(video_encoder_config_.Copy()); + const int kNotTooLowBitrateForFrameSizeBps = kTargetBitrateBps * 0.2; + const int kTooLowBitrateForFrameSizeBps = kTargetBitrateBps * 0.19; + const int kWidth = 640; + const int kHeight = 360; + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); + // Frame should not be dropped. + WaitForEncodedFrame(1); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kNotTooLowBitrateForFrameSizeBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); + // Frame should not be dropped. + WaitForEncodedFrame(2); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), + DataRate::BitsPerSec(kTooLowBitrateForFrameSizeBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); + // Not dropped since quality scaling is disabled. + WaitForEncodedFrame(3); + + // Expect the sink_wants to specify a scaled frame. + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + EXPECT_THAT(video_source_.sink_wants(), ResolutionMax()); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenLayersChange) { + const int kLowTargetBitrateBps = 400000; + // Set simulcast. + ResetEncoder("VP8", 3, 1, 1, false); + fake_encoder_.SetQualityScaling(true); + const int kWidth = 1280; + const int kHeight = 720; + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); + // Frame should not be dropped. + WaitForEncodedFrame(1); + + // Trigger QVGA "singlecast" + // Update the config. + VideoEncoderConfig video_encoder_config; + test::FillEncoderConfiguration(PayloadStringToCodecType("VP8"), 3, + &video_encoder_config); + for (auto& layer : video_encoder_config.simulcast_layers) { + layer.num_temporal_layers = 1; + layer.max_framerate = kDefaultFramerate; + } + video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps; + video_encoder_config.content_type = + VideoEncoderConfig::ContentType::kRealtimeVideo; + + video_encoder_config.simulcast_layers[0].active = true; + video_encoder_config.simulcast_layers[1].active = false; + video_encoder_config.simulcast_layers[2].active = false; + + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + + video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); + // Frame should not be dropped. + WaitForEncodedFrame(2); + + // Trigger HD "singlecast" + video_encoder_config.simulcast_layers[0].active = false; + video_encoder_config.simulcast_layers[1].active = false; + video_encoder_config.simulcast_layers[2].active = true; + + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + + video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); + // Frame should be dropped because of initial frame drop. + ExpectDroppedFrame(); + + // Expect the sink_wants to specify a scaled frame. + EXPECT_TRUE_WAIT( + video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) { + const int kLowTargetBitrateBps = 400000; + // Set simulcast. + ResetEncoder("VP9", 1, 1, 3, false); + fake_encoder_.SetQualityScaling(true); + const int kWidth = 1280; + const int kHeight = 720; + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); + // Frame should not be dropped. + WaitForEncodedFrame(1); + + // Trigger QVGA "singlecast" + // Update the config. + VideoEncoderConfig video_encoder_config; + test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1, + &video_encoder_config); + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + vp9_settings.numberOfSpatialLayers = 3; + // Since only one layer is active - automatic resize should be enabled. + vp9_settings.automaticResizeOn = true; + video_encoder_config.encoder_specific_settings = + new rtc::RefCountedObject( + vp9_settings); + video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps; + video_encoder_config.content_type = + VideoEncoderConfig::ContentType::kRealtimeVideo; + // Currently simulcast layers |active| flags are used to inidicate + // which SVC layers are active. + video_encoder_config.simulcast_layers.resize(3); + + video_encoder_config.simulcast_layers[0].active = true; + video_encoder_config.simulcast_layers[1].active = false; + video_encoder_config.simulcast_layers[2].active = false; + + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + + video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); + // Frame should not be dropped. + WaitForEncodedFrame(2); + + // Trigger HD "singlecast" + video_encoder_config.simulcast_layers[0].active = false; + video_encoder_config.simulcast_layers[1].active = false; + video_encoder_config.simulcast_layers[2].active = true; + + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + + video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); + // Frame should be dropped because of initial frame drop. + ExpectDroppedFrame(); + + // Expect the sink_wants to specify a scaled frame. + EXPECT_TRUE_WAIT( + video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + InitialFrameDropActivatesWhenResolutionIncreases) { + const int kWidth = 640; + const int kHeight = 360; + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth / 2, kHeight / 2)); + // Frame should not be dropped. + WaitForEncodedFrame(1); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); + video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth / 2, kHeight / 2)); + // Frame should not be dropped, bitrate not too low for frame. + WaitForEncodedFrame(2); + + // Incoming resolution increases. + video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); + // Expect to drop this frame, bitrate too low for frame. + ExpectDroppedFrame(); + + // Expect the sink_wants to specify a scaled frame. + EXPECT_TRUE_WAIT( + video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) { + const int kWidth = 640; + const int kHeight = 360; + // So that quality scaling doesn't happen by itself. + fake_encoder_.SetQp(kQpHigh); + + AdaptingFrameForwarder source(&time_controller_); + source.set_adaptation_enabled(true); + video_stream_encoder_->SetSource( + &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); + + int timestamp = 1; + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + WaitForEncodedFrame(timestamp); + timestamp += 9000; + // Long pause to disable all first BWE drop logic. + AdvanceTime(TimeDelta::Millis(1000)); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), + DataRate::BitsPerSec(kLowTargetBitrateBps), 0, 0, 0); + source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + // Not dropped frame, as initial frame drop is disabled by now. + WaitForEncodedFrame(timestamp); + timestamp += 9000; + AdvanceTime(TimeDelta::Millis(100)); + + // Quality adaptation down. + video_stream_encoder_->TriggerQualityLow(); + + // Adaptation has an effect. + EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight, + 5000); + + // Frame isn't dropped as initial frame dropper is disabled. + source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + WaitForEncodedFrame(timestamp); + timestamp += 9000; + AdvanceTime(TimeDelta::Millis(100)); + + // Quality adaptation up. + video_stream_encoder_->TriggerQualityHigh(); + + // Adaptation has an effect. + EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count > kWidth * kHeight, + 5000); + + source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + // Frame should not be dropped, as initial framedropper is off. + WaitForEncodedFrame(timestamp); + video_stream_encoder_->Stop(); } @@ -3760,20 +4783,22 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { // Reset encoder for field trials to take effect. VideoEncoderConfig config = video_encoder_config_.Copy(); config.max_bitrate_bps = kTargetBitrateBps; + DataRate max_bitrate = DataRate::BitsPerSec(config.max_bitrate_bps); ConfigureEncoder(std::move(config)); fake_encoder_.SetQp(kQpLow); // Enable MAINTAIN_FRAMERATE preference. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, DegradationPreference::MAINTAIN_FRAMERATE); // Start at low bitrate. const int kLowBitrateBps = 200000; - video_stream_encoder_->OnBitrateUpdated(DataRate::bps(kLowBitrateBps), - DataRate::bps(kLowBitrateBps), - DataRate::bps(kLowBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kLowBitrateBps), + DataRate::BitsPerSec(kLowBitrateBps), + DataRate::BitsPerSec(kLowBitrateBps), 0, 0, 0); // Expect first frame to be dropped and resolution to be limited. const int kWidth = 1280; @@ -3782,13 +4807,12 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); ExpectDroppedFrame(); - EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight); + EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight, + 5000); // Increase bitrate to encoder max. - video_stream_encoder_->OnBitrateUpdated(DataRate::bps(config.max_bitrate_bps), - DataRate::bps(config.max_bitrate_bps), - DataRate::bps(config.max_bitrate_bps), - 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + max_bitrate, max_bitrate, max_bitrate, 0, 0, 0); // Insert frames and advance |min_duration_ms|. for (size_t i = 1; i <= 10; i++) { @@ -3796,21 +4820,72 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); } - EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight); - - fake_clock_.AdvanceTime(TimeDelta::ms(2000)); - - // Insert frame should trigger high BW and release quality limitation. + EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); + EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight); + + AdvanceTime(TimeDelta::Millis(2000)); + + // Insert frame should trigger high BW and release quality limitation. + timestamp_ms += kFrameIntervalMs; + source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(timestamp_ms); + // The ramp-up code involves the adaptation queue, give it time to execute. + // TODO(hbos): Can we await an appropriate event instead? + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + + // Frame should not be adapted. + timestamp_ms += kFrameIntervalMs; + source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(kWidth, kHeight); + EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + QualityScalerAdaptationsRemovedWhenQualityScalingDisabled) { + AdaptingFrameForwarder source(&time_controller_); + source.set_adaptation_enabled(true); + video_stream_encoder_->SetSource(&source, + DegradationPreference::MAINTAIN_FRAMERATE); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + fake_encoder_.SetQp(kQpHigh + 1); + const int kWidth = 1280; + const int kHeight = 720; + const int64_t kFrameIntervalMs = 100; + int64_t timestamp_ms = kFrameIntervalMs; + for (size_t i = 1; i <= 100; i++) { + timestamp_ms += kFrameIntervalMs; + source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(timestamp_ms); + } + // Wait for QualityScaler, which will wait for 2000*2.5 ms until checking QP + // for the first time. + // TODO(eshr): We should avoid these waits by using threads with simulated + // time. + EXPECT_TRUE_WAIT(stats_proxy_->GetStats().bw_limited_resolution, + 2000 * 2.5 * 2); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionMax(source.sink_wants()); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + EXPECT_THAT(source.sink_wants(), WantsMaxPixels(Lt(kWidth * kHeight))); + EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - // Frame should not be adapted. - timestamp_ms += kFrameIntervalMs; - source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + // Disable Quality scaling by turning off scaler on the encoder and + // reconfiguring. + fake_encoder_.SetQualityScaling(false); + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + AdvanceTime(TimeDelta::Millis(0)); + // Since we turned off the quality scaler, the adaptations made by it are + // removed. + EXPECT_THAT(source.sink_wants(), ResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); video_stream_encoder_->Stop(); @@ -3820,22 +4895,23 @@ TEST_F(VideoStreamEncoderTest, ResolutionNotAdaptedForTooSmallFrame_MaintainFramerateMode) { const int kTooSmallWidth = 10; const int kTooSmallHeight = 10; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable MAINTAIN_FRAMERATE preference, no initial limitation. test::FrameForwarder source; video_stream_encoder_->SetSource( &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); - VerifyNoLimitation(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); // Trigger adapt down, too small frame, expect no change. source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight)); WaitForEncodedFrame(1); video_stream_encoder_->TriggerCpuOveruse(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3847,15 +4923,16 @@ TEST_F(VideoStreamEncoderTest, const int kTooSmallWidth = 10; const int kTooSmallHeight = 10; const int kFpsLimit = 7; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. test::FrameForwarder source; video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); - VerifyNoLimitation(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); @@ -3863,7 +4940,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight)); WaitForEncodedFrame(1); video_stream_encoder_->TriggerQualityLow(); - VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit); + EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit))); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3872,7 +4949,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(2, kTooSmallWidth, kTooSmallHeight)); WaitForEncodedFrame(2); video_stream_encoder_->TriggerQualityLow(); - VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit); + EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit))); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3882,9 +4959,10 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, FailingInitEncodeDoesntCauseCrash) { fake_encoder_.ForceInitEncodeFailure(true); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); ResetEncoder("VP8", 2, 1, 1, false); const int kFrameWidth = 1280; const int kFrameHeight = 720; @@ -3897,9 +4975,10 @@ TEST_F(VideoStreamEncoderTest, FailingInitEncodeDoesntCauseCrash) { // TODO(sprang): Extend this with fps throttling and any "balanced" extensions. TEST_F(VideoStreamEncoderTest, AdaptsResolutionOnOveruse_MaintainFramerateMode) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); const int kFrameWidth = 1280; const int kFrameHeight = 720; @@ -3919,7 +4998,7 @@ TEST_F(VideoStreamEncoderTest, WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); // Trigger CPU normal use, return to original resolution. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); video_source_.IncomingCapturedFrame( CreateFrame(3 * kFrameIntervalMs, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(kFrameWidth, kFrameHeight); @@ -3932,14 +5011,15 @@ TEST_F(VideoStreamEncoderTest, const int kFrameWidth = 1280; const int kFrameHeight = 720; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_stream_encoder_->SetSource( &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); video_source_.set_adaptation_enabled(true); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); @@ -3992,7 +5072,7 @@ TEST_F(VideoStreamEncoderTest, kErrorMargin); // Go back up one step. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); num_frames_dropped = 0; for (int i = 0; i < max_framerate_; ++i) { timestamp_ms += kFrameIntervalMs; @@ -4008,7 +5088,7 @@ TEST_F(VideoStreamEncoderTest, kErrorMargin); // Go back up to original mode. - video_stream_encoder_->TriggerCpuNormalUsage(); + video_stream_encoder_->TriggerCpuUnderuse(); num_frames_dropped = 0; for (int i = 0; i < max_framerate_; ++i) { timestamp_ms += kFrameIntervalMs; @@ -4035,14 +5115,15 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { // disable frame dropping and make testing easier. ResetEncoder("VP8", 1, 2, 1, true); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_stream_encoder_->SetSource( &video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION); video_source_.set_adaptation_enabled(true); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); // Trigger overuse as much as we can. rtc::VideoSinkWants last_wants; @@ -4057,14 +5138,15 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { sink_.WaitForEncodedFrame(timestamp_ms); } timestamp_ms += kFrameIntervalMs; - fake_clock_.AdvanceTime(TimeDelta::ms(kFrameIntervalMs)); + AdvanceTime(TimeDelta::Millis(kFrameIntervalMs)); } // ...and then try to adapt again. video_stream_encoder_->TriggerCpuOveruse(); } while (video_source_.sink_wants().max_framerate_fps < last_wants.max_framerate_fps); - VerifyFpsEqResolutionMax(video_source_.sink_wants(), kMinFramerateFps); + EXPECT_THAT(video_source_.sink_wants(), + FpsMatchesResolutionMax(Eq(kMinFramerateFps))); video_stream_encoder_->Stop(); } @@ -4075,19 +5157,20 @@ TEST_F(VideoStreamEncoderTest, const int kHeight = 720; const int64_t kFrameIntervalMs = 150; int64_t timestamp_ms = kFrameIntervalMs; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4097,7 +5180,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4107,7 +5191,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4117,7 +5201,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4127,7 +5211,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4137,7 +5221,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4147,7 +5231,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4157,7 +5241,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); rtc::VideoSinkWants last_wants = source.sink_wants(); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); @@ -4168,17 +5252,17 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionEq(source.sink_wants(), last_wants); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants)); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger adapt down, expect expect increased fps (320x180@10fps). + // Trigger adapt up, expect expect increased fps (320x180@10fps). video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(8, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4188,7 +5272,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(9, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4198,7 +5282,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(10, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4208,7 +5292,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(11, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4218,7 +5302,9 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMax()); + EXPECT_EQ(source.sink_wants().max_pixel_count, + source.last_wants().max_pixel_count); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(12, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4228,7 +5314,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(13, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4238,15 +5324,15 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes); video_stream_encoder_->Stop(); @@ -4257,19 +5343,20 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { const int kHeight = 720; const int64_t kFrameIntervalMs = 150; int64_t timestamp_ms = kFrameIntervalMs; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -4282,7 +5369,8 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); + EXPECT_THAT(source.sink_wants(), + FpsMaxResolutionMatches(Lt(kWidth * kHeight))); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -4295,7 +5383,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -4308,59 +5396,77 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants()); - EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); + EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); + EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, expect increased fps (640x360@30fps). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger cpu adapt up, expect no change since QP is most limited. + { + // Store current sink wants since we expect no change and if there is no + // change then last_wants() is not updated. + auto previous_sink_wants = source.sink_wants(); + video_stream_encoder_->TriggerCpuUnderuse(); + timestamp_ms += kFrameIntervalMs; + source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants)); + EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + } + + // Trigger quality adapt up, expect increased fps (640x360@30fps). + video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); - EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger quality adapt up, expect upscaled resolution (960x540@30fps). + // Trigger quality adapt up and Cpu adapt up since both are most limited, + // expect increased resolution (960x540@30fps). video_stream_encoder_->TriggerQualityHigh(); + video_stream_encoder_->TriggerCpuUnderuse(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); - EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, expect no restriction (1280x720fps@30fps). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger quality adapt up and Cpu adapt up since both are most limited, + // expect no restriction (1280x720fps@30fps). + video_stream_encoder_->TriggerQualityHigh(); + video_stream_encoder_->TriggerCpuUnderuse(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); video_stream_encoder_->Stop(); } @@ -4372,19 +5478,20 @@ TEST_F(VideoStreamEncoderTest, const int kFpsLimit = 15; const int64_t kFrameIntervalMs = 150; int64_t timestamp_ms = kFrameIntervalMs; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Enable BALANCED preference, no initial limitation. - AdaptingFrameForwarder source; + AdaptingFrameForwarder source(&time_controller_); source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -4397,7 +5504,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit); + EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit))); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -4410,45 +5517,60 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); + EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger cpu adapt up, expect upscaled resolution (640x360@15fps). - video_stream_encoder_->TriggerCpuNormalUsage(); + // Trigger cpu adapt up, expect no change because quality is most limited. + { + auto previous_sink_wants = source.sink_wants(); + // Store current sink wants since we expect no change ind if there is no + // change then last__wants() is not updated. + video_stream_encoder_->TriggerCpuUnderuse(); + timestamp_ms += kFrameIntervalMs; + source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants)); + EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + } + + // Trigger quality adapt up, expect upscaled resolution (640x360@15fps). + video_stream_encoder_->TriggerQualityHigh(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants()); + EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); - EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate); + EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); + EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); - // Trigger quality adapt up, expect increased fps (640x360@30fps). + // Trigger quality and cpu adapt up, expect increased fps (640x360@30fps). video_stream_encoder_->TriggerQualityHigh(); + video_stream_encoder_->TriggerCpuUnderuse(); timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - VerifyFpsMaxResolutionMax(source.sink_wants()); + EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); - EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); + EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); video_stream_encoder_->Stop(); } @@ -4462,16 +5584,17 @@ TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) { const int kAdaptedFrameHeight = 808; const int kFramerate = 24; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Trigger reconfigure encoder (without resetting the entire instance). VideoEncoderConfig video_encoder_config; - video_encoder_config.codec_type = kVideoCodecVP8; + test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); + video_encoder_config.simulcast_layers[0].max_framerate = kFramerate; video_encoder_config.max_bitrate_bps = kTargetBitrateBps; - video_encoder_config.number_of_streams = 1; video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1, kFramerate); + new rtc::RefCountedObject(); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -4497,11 +5620,12 @@ TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) { const int kLowFps = 2; const int kHighFps = 30; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kLowFps; // Insert 2 seconds of 2fps video. @@ -4513,9 +5637,10 @@ TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) { } // Make sure encoder is updated with new target. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(timestamp_ms); @@ -4549,34 +5674,37 @@ TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) { const int kFrameWidth = 1280; const int kFrameHeight = 720; const int kTargetBitrateBps = 1000000; - - MockBitrateObserver bitrate_observer; - video_stream_encoder_->SetBitrateAllocationObserver(&bitrate_observer); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + ResetEncoder("FAKE", 1, 1, 1, false, + VideoStreamEncoderSettings::BitrateAllocationCallbackType:: + kVideoBitrateAllocation); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); // Insert a first video frame, causes another bitrate update. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; - EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(1); + int64_t timestamp_ms = CurrentTimeMs(); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1); // Next, simulate video suspension due to pacer queue overrun. - video_stream_encoder_->OnBitrateUpdated(DataRate::bps(0), DataRate::bps(0), - DataRate::bps(0), 0, 1); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), + 0, 1, 0); // Skip ahead until a new periodic parameter update should have occured. timestamp_ms += kProcessIntervalMs; - fake_clock_.AdvanceTime(TimeDelta::ms(kProcessIntervalMs)); + AdvanceTime(TimeDelta::Millis(kProcessIntervalMs)); - // Bitrate observer should not be called. - EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(_)).Times(0); + // No more allocations has been made. video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); ExpectDroppedFrame(); + EXPECT_EQ(sink_.number_of_bitrate_allocations(), 1); video_stream_encoder_->Stop(); } @@ -4586,9 +5714,10 @@ TEST_F(VideoStreamEncoderTest, const int kFrameWidth = 1280; const int kFrameHeight = 720; const CpuOveruseOptions default_options; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame( CreateFrame(1, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(1); @@ -4610,9 +5739,10 @@ TEST_F(VideoStreamEncoderTest, hardware_options.high_encode_usage_threshold_percent = 200; fake_encoder_.SetIsHardwareAccelerated(true); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_source_.IncomingCapturedFrame( CreateFrame(1, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(1); @@ -4632,11 +5762,12 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { const int kTargetBitrateBps = 120000; const int kNumFramesInRun = kFps * 5; // Runs of five seconds. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kFps; // Insert 3 seconds of video, verify number of drops with normal bitrate. @@ -4662,16 +5793,26 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { // of video, verify number of drops. Rate needs to be slightly changed in // order to force the rate to be reconfigured. double overshoot_factor = 2.0; - if (RateControlSettings::ParseFromFieldTrials().UseEncoderBitrateAdjuster()) { + const RateControlSettings trials = + RateControlSettings::ParseFromFieldTrials(); + if (trials.UseEncoderBitrateAdjuster()) { // With bitrate adjuster, when need to overshoot even more to trigger - // frame dropping. - overshoot_factor *= 2; + // frame dropping since the adjuter will try to just lower the target + // bitrate rather than drop frames. If network headroom can be used, it + // doesn't push back as hard so we don't need quite as much overshoot. + // These numbers are unfortunately a bit magical but there's not trivial + // way to algebraically infer them. + if (trials.BitrateAdjusterCanUseNetworkHeadroom()) { + overshoot_factor = 2.4; + } else { + overshoot_factor = 4.0; + } } fake_encoder_.SimulateOvershoot(overshoot_factor); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps + 1000), - DataRate::bps(kTargetBitrateBps + 1000), - DataRate::bps(kTargetBitrateBps + 1000), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps + 1000), + DataRate::BitsPerSec(kTargetBitrateBps + 1000), + DataRate::BitsPerSec(kTargetBitrateBps + 1000), 0, 0, 0); num_dropped = 0; for (int i = 0; i < kNumFramesInRun; ++i) { video_source_.IncomingCapturedFrame( @@ -4683,9 +5824,10 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { timestamp_ms += 1000 / kFps; } - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Target framerate should be still be near the expected target, despite // the frame drops. @@ -4705,11 +5847,12 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) { ASSERT_GT(max_framerate_, kActualInputFps); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kActualInputFps; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Insert 3 seconds of video, with an input fps lower than configured max. for (int i = 0; i < kActualInputFps * 3; ++i) { @@ -4725,11 +5868,12 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) { video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, AccumulatesUpdateRectOnDroppedFrames) { +TEST_F(VideoStreamEncoderBlockedTest, AccumulatesUpdateRectOnDroppedFrames) { VideoFrame::UpdateRect rect; - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); fake_encoder_.BlockNextEncode(); video_source_.IncomingCapturedFrame( @@ -4772,9 +5916,10 @@ TEST_F(VideoStreamEncoderTest, AccumulatesUpdateRectOnDroppedFrames) { } TEST_F(VideoStreamEncoderTest, SetsFrameTypes) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // First frame is always keyframe. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); @@ -4804,10 +5949,10 @@ TEST_F(VideoStreamEncoderTest, SetsFrameTypes) { TEST_F(VideoStreamEncoderTest, SetsFrameTypesSimulcast) { // Setup simulcast with three streams. ResetEncoder("VP8", 3, 1, 1, false); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kSimulcastTargetBitrateBps), - DataRate::bps(kSimulcastTargetBitrateBps), - DataRate::bps(kSimulcastTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), 0, 0, 0); // Wait for all three layers before triggering event. sink_.SetNumExpectedLayers(3); @@ -4847,9 +5992,10 @@ TEST_F(VideoStreamEncoderTest, RequestKeyframeInternalSource) { // Configure internal source factory and setup test again. encoder_factory_.SetHasInternalSource(true); ResetEncoder("VP8", 1, 1, 1, false); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Call encoder directly, simulating internal source where encoded frame // callback in VideoStreamEncoder is called despite no OnFrame(). @@ -4885,9 +6031,10 @@ TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) { // Configure internal source factory and setup test again. encoder_factory_.SetHasInternalSource(true); ResetEncoder("VP8", 1, 1, 1, false); - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); int64_t timestamp = 1; EncodedImage image; @@ -4904,70 +6051,58 @@ TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) { // Frame is captured kEncodeFinishDelayMs before it's encoded, so restored // capture timestamp should be kEncodeFinishDelayMs in the past. EXPECT_EQ(sink_.GetLastCaptureTimeMs(), - fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec - - kEncodeFinishDelayMs); + CurrentTimeMs() - kEncodeFinishDelayMs); video_stream_encoder_->Stop(); } TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) { - // Configure internal source factory and setup test again. - encoder_factory_.SetHasInternalSource(true); + // SPS contains VUI with restrictions on the maximum number of reordered + // pictures, there is no need to rewrite the bitstream to enable faster + // decoding. ResetEncoder("H264", 1, 1, 1, false); - EncodedImage image(optimal_sps, sizeof(optimal_sps), sizeof(optimal_sps)); - image._frameType = VideoFrameType::kVideoFrameKey; - - CodecSpecificInfo codec_specific_info; - codec_specific_info.codecType = kVideoCodecH264; + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); - RTPFragmentationHeader fragmentation; - fragmentation.VerifyAndAllocateFragmentationHeader(1); - fragmentation.fragmentationOffset[0] = 4; - fragmentation.fragmentationLength[0] = sizeof(optimal_sps) - 4; + fake_encoder_.SetEncodedImageData( + EncodedImageBuffer::Create(optimal_sps, sizeof(optimal_sps))); - fake_encoder_.InjectEncodedImage(image, &codec_specific_info, &fragmentation); - EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeoutMs)); + video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); + WaitForEncodedFrame(1); EXPECT_THAT(sink_.GetLastEncodedImageData(), testing::ElementsAreArray(optimal_sps)); - RTPFragmentationHeader last_fragmentation = sink_.GetLastFragmentation(); - ASSERT_THAT(last_fragmentation.fragmentationVectorSize, 1U); - EXPECT_EQ(last_fragmentation.fragmentationOffset[0], 4U); - EXPECT_EQ(last_fragmentation.fragmentationLength[0], sizeof(optimal_sps) - 4); video_stream_encoder_->Stop(); } TEST_F(VideoStreamEncoderTest, RewritesH264BitstreamWithNonOptimalSps) { + // SPS does not contain VUI, the bitstream is will be rewritten with added + // VUI with restrictions on the maximum number of reordered pictures to + // enable faster decoding. uint8_t original_sps[] = {0, 0, 0, 1, H264::NaluType::kSps, 0x00, 0x00, 0x03, 0x03, 0xF4, 0x05, 0x03, 0xC7, 0xC0}; - - // Configure internal source factory and setup test again. - encoder_factory_.SetHasInternalSource(true); ResetEncoder("H264", 1, 1, 1, false); - EncodedImage image(original_sps, sizeof(original_sps), sizeof(original_sps)); - image._frameType = VideoFrameType::kVideoFrameKey; - - CodecSpecificInfo codec_specific_info; - codec_specific_info.codecType = kVideoCodecH264; + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); - RTPFragmentationHeader fragmentation; - fragmentation.VerifyAndAllocateFragmentationHeader(1); - fragmentation.fragmentationOffset[0] = 4; - fragmentation.fragmentationLength[0] = sizeof(original_sps) - 4; + fake_encoder_.SetEncodedImageData( + EncodedImageBuffer::Create(original_sps, sizeof(original_sps))); - fake_encoder_.InjectEncodedImage(image, &codec_specific_info, &fragmentation); - EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeoutMs)); + video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); + WaitForEncodedFrame(1); EXPECT_THAT(sink_.GetLastEncodedImageData(), testing::ElementsAreArray(optimal_sps)); - RTPFragmentationHeader last_fragmentation = sink_.GetLastFragmentation(); - ASSERT_THAT(last_fragmentation.fragmentationVectorSize, 1U); - EXPECT_EQ(last_fragmentation.fragmentationOffset[0], 4U); - EXPECT_EQ(last_fragmentation.fragmentationLength[0], sizeof(optimal_sps) - 4); video_stream_encoder_->Stop(); } @@ -4977,14 +6112,15 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { const int kFrameHeight = 720; const int kTargetBitrateBps = 300000; // To low for HD resolution. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); // Insert a first video frame. It should be dropped because of downscale in // resolution. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -4992,7 +6128,7 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { ExpectDroppedFrame(); // Second frame is downscaled. - timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + timestamp_ms = CurrentTimeMs(); frame = CreateFrame(timestamp_ms, kFrameWidth / 2, kFrameHeight / 2); frame.set_rotation(kVideoRotation_90); video_source_.IncomingCapturedFrame(frame); @@ -5001,7 +6137,7 @@ TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { sink_.CheckLastFrameRotationMatches(kVideoRotation_90); // Insert another frame, also downscaled. - timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + timestamp_ms = CurrentTimeMs(); frame = CreateFrame(timestamp_ms, kFrameWidth / 2, kFrameHeight / 2); frame.set_rotation(kVideoRotation_180); video_source_.IncomingCapturedFrame(frame); @@ -5017,15 +6153,16 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) { const int kFrameHeight = 180; // Initial rate. - video_stream_encoder_->OnBitrateUpdated( - /*target_bitrate=*/DataRate::kbps(300), - /*stable_target_bitrate=*/DataRate::kbps(300), - /*link_allocation=*/DataRate::kbps(300), + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(300), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(300), + /*link_allocation=*/DataRate::KilobitsPerSec(300), /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); // Insert a first video frame so that encoder gets configured. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -5033,20 +6170,22 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) { // Set a target rate below the minimum allowed by the codec settings. VideoCodec codec_config = fake_encoder_.codec_config(); - DataRate min_rate = DataRate::kbps(codec_config.minBitrate); - DataRate target_rate = min_rate - DataRate::kbps(1); - video_stream_encoder_->OnBitrateUpdated( + DataRate min_rate = DataRate::KilobitsPerSec(codec_config.minBitrate); + DataRate target_rate = min_rate - DataRate::KilobitsPerSec(1); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( /*target_bitrate=*/target_rate, /*stable_target_bitrate=*/target_rate, /*link_allocation=*/target_rate, /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); // Target bitrate and bandwidth allocation should both be capped at min_rate. auto rate_settings = fake_encoder_.GetAndResetLastRateControlSettings(); ASSERT_TRUE(rate_settings.has_value()); - DataRate allocation_sum = DataRate::bps(rate_settings->bitrate.get_sum_bps()); + DataRate allocation_sum = + DataRate::BitsPerSec(rate_settings->bitrate.get_sum_bps()); EXPECT_EQ(min_rate, allocation_sum); EXPECT_EQ(rate_settings->bandwidth_allocation, min_rate); @@ -5054,11 +6193,12 @@ TEST_F(VideoStreamEncoderTest, BandwidthAllocationLowerBound) { } TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) { - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, nullptr)); WaitForEncodedFrame(1); @@ -5095,8 +6235,12 @@ TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) { } struct MockEncoderSwitchRequestCallback : public EncoderSwitchRequestCallback { - MOCK_METHOD0(RequestEncoderFallback, void()); - MOCK_METHOD1(RequestEncoderSwitch, void(const Config& conf)); + MOCK_METHOD(void, RequestEncoderFallback, (), (override)); + MOCK_METHOD(void, RequestEncoderSwitch, (const Config& conf), (override)); + MOCK_METHOD(void, + RequestEncoderSwitch, + (const webrtc::SdpVideoFormat& format), + (override)); }; TEST_F(VideoStreamEncoderTest, BitrateEncoderSwitch) { @@ -5120,17 +6264,54 @@ TEST_F(VideoStreamEncoderTest, BitrateEncoderSwitch) { CreateFrame(kDontCare, kDontCare, kDontCare)); using Config = EncoderSwitchRequestCallback::Config; - EXPECT_CALL(switch_callback, - RequestEncoderSwitch(AllOf(Field(&Config::codec_name, "AV1"), + EXPECT_CALL(switch_callback, RequestEncoderSwitch(Matcher( + AllOf(Field(&Config::codec_name, "AV1"), Field(&Config::param, "ping"), - Field(&Config::value, "pong")))); + Field(&Config::value, "pong"))))); - video_stream_encoder_->OnBitrateUpdated( - /*target_bitrate=*/DataRate::kbps(50), - /*stable_target_bitrate=*/DataRate::kbps(kDontCare), - /*link_allocation=*/DataRate::kbps(kDontCare), + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(50), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(kDontCare), + /*link_allocation=*/DataRate::KilobitsPerSec(kDontCare), + /*fraction_lost=*/0, + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); + AdvanceTime(TimeDelta::Millis(0)); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, VideoSuspendedNoEncoderSwitch) { + constexpr int kDontCare = 100; + + StrictMock switch_callback; + video_send_config_.encoder_settings.encoder_switch_request_callback = + &switch_callback; + VideoEncoderConfig encoder_config = video_encoder_config_.Copy(); + encoder_config.codec_type = kVideoCodecVP8; + webrtc::test::ScopedFieldTrials field_trial( + "WebRTC-NetworkCondition-EncoderSwitch/" + "codec_thresholds:VP8;100;-1|H264;-1;30000," + "to_codec:AV1,to_param:ping,to_value:pong,window:2.0/"); + + // Reset encoder for new configuration to take effect. + ConfigureEncoder(std::move(encoder_config)); + + // Send one frame to trigger ReconfigureEncoder. + video_source_.IncomingCapturedFrame( + CreateFrame(kDontCare, kDontCare, kDontCare)); + + using Config = EncoderSwitchRequestCallback::Config; + EXPECT_CALL(switch_callback, RequestEncoderSwitch(Matcher(_))) + .Times(0); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(0), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(0), + /*link_allocation=*/DataRate::KilobitsPerSec(kDontCare), /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_stream_encoder_->Stop(); } @@ -5156,22 +6337,24 @@ TEST_F(VideoStreamEncoderTest, ResolutionEncoderSwitch) { // The VideoStreamEncoder needs some bitrate before it can start encoding, // setting some bitrate so that subsequent calls to WaitForEncodedFrame does // not fail. - video_stream_encoder_->OnBitrateUpdated( - /*target_bitrate=*/DataRate::kbps(kSufficientBitrateToNotDrop), - /*stable_target_bitrate=*/DataRate::kbps(kSufficientBitrateToNotDrop), - /*link_allocation=*/DataRate::kbps(kSufficientBitrateToNotDrop), + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*stable_target_bitrate=*/ + DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*link_allocation=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); // Send one frame to trigger ReconfigureEncoder. video_source_.IncomingCapturedFrame(CreateFrame(1, kHighRes, kHighRes)); WaitForEncodedFrame(1); using Config = EncoderSwitchRequestCallback::Config; - EXPECT_CALL(switch_callback, - RequestEncoderSwitch(AllOf(Field(&Config::codec_name, "AV1"), + EXPECT_CALL(switch_callback, RequestEncoderSwitch(Matcher( + AllOf(Field(&Config::codec_name, "AV1"), Field(&Config::param, "ping"), - Field(&Config::value, "pong")))); + Field(&Config::value, "pong"))))); video_source_.IncomingCapturedFrame(CreateFrame(2, kLowRes, kLowRes)); WaitForEncodedFrame(2); @@ -5179,22 +6362,133 @@ TEST_F(VideoStreamEncoderTest, ResolutionEncoderSwitch) { video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, EncoderSelectorCurrentEncoderIsSignaled) { + constexpr int kDontCare = 100; + StrictMock encoder_selector; + auto encoder_factory = std::make_unique( + &fake_encoder_, &encoder_selector); + video_send_config_.encoder_settings.encoder_factory = encoder_factory.get(); + + // Reset encoder for new configuration to take effect. + ConfigureEncoder(video_encoder_config_.Copy()); + + EXPECT_CALL(encoder_selector, OnCurrentEncoder(_)); + + video_source_.IncomingCapturedFrame( + CreateFrame(kDontCare, kDontCare, kDontCare)); + video_stream_encoder_->Stop(); + + // The encoders produces by the VideoEncoderProxyFactory have a pointer back + // to it's factory, so in order for the encoder instance in the + // |video_stream_encoder_| to be destroyed before the |encoder_factory| we + // reset the |video_stream_encoder_| here. + video_stream_encoder_.reset(); +} + +TEST_F(VideoStreamEncoderTest, EncoderSelectorBitrateSwitch) { + constexpr int kDontCare = 100; + + NiceMock encoder_selector; + StrictMock switch_callback; + video_send_config_.encoder_settings.encoder_switch_request_callback = + &switch_callback; + auto encoder_factory = std::make_unique( + &fake_encoder_, &encoder_selector); + video_send_config_.encoder_settings.encoder_factory = encoder_factory.get(); + + // Reset encoder for new configuration to take effect. + ConfigureEncoder(video_encoder_config_.Copy()); + + ON_CALL(encoder_selector, OnAvailableBitrate(_)) + .WillByDefault(Return(SdpVideoFormat("AV1"))); + EXPECT_CALL(switch_callback, + RequestEncoderSwitch(Matcher( + Field(&SdpVideoFormat::name, "AV1")))); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(50), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(kDontCare), + /*link_allocation=*/DataRate::KilobitsPerSec(kDontCare), + /*fraction_lost=*/0, + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); + AdvanceTime(TimeDelta::Millis(0)); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, EncoderSelectorBrokenEncoderSwitch) { + constexpr int kSufficientBitrateToNotDrop = 1000; + constexpr int kDontCare = 100; + + NiceMock video_encoder; + NiceMock encoder_selector; + StrictMock switch_callback; + video_send_config_.encoder_settings.encoder_switch_request_callback = + &switch_callback; + auto encoder_factory = std::make_unique( + &video_encoder, &encoder_selector); + video_send_config_.encoder_settings.encoder_factory = encoder_factory.get(); + + // Reset encoder for new configuration to take effect. + ConfigureEncoder(video_encoder_config_.Copy()); + + // The VideoStreamEncoder needs some bitrate before it can start encoding, + // setting some bitrate so that subsequent calls to WaitForEncodedFrame does + // not fail. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*stable_target_bitrate=*/ + DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*link_allocation=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*fraction_lost=*/0, + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); + + ON_CALL(video_encoder, Encode(_, _)) + .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE)); + ON_CALL(encoder_selector, OnEncoderBroken()) + .WillByDefault(Return(SdpVideoFormat("AV2"))); + + rtc::Event encode_attempted; + EXPECT_CALL(switch_callback, + RequestEncoderSwitch(Matcher(_))) + .WillOnce([&encode_attempted](const SdpVideoFormat& format) { + EXPECT_EQ(format.name, "AV2"); + encode_attempted.Set(); + }); + + video_source_.IncomingCapturedFrame(CreateFrame(1, kDontCare, kDontCare)); + encode_attempted.Wait(3000); + + AdvanceTime(TimeDelta::Millis(0)); + + video_stream_encoder_->Stop(); + + // The encoders produces by the VideoEncoderProxyFactory have a pointer back + // to it's factory, so in order for the encoder instance in the + // |video_stream_encoder_| to be destroyed before the |encoder_factory| we + // reset the |video_stream_encoder_| here. + video_stream_encoder_.reset(); +} + TEST_F(VideoStreamEncoderTest, AllocationPropagatedToEncoderWhenTargetRateChanged) { const int kFrameWidth = 320; const int kFrameHeight = 180; // Set initial rate. - auto rate = DataRate::kbps(100); - video_stream_encoder_->OnBitrateUpdated( + auto rate = DataRate::KilobitsPerSec(100); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( /*target_bitrate=*/rate, /*stable_target_bitrate=*/rate, /*link_allocation=*/rate, /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); // Insert a first video frame so that encoder gets configured. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -5202,13 +6496,14 @@ TEST_F(VideoStreamEncoderTest, EXPECT_EQ(1, fake_encoder_.GetNumSetRates()); // Change of target bitrate propagates to the encoder. - auto new_stable_rate = rate - DataRate::kbps(5); - video_stream_encoder_->OnBitrateUpdated( + auto new_stable_rate = rate - DataRate::KilobitsPerSec(5); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( /*target_bitrate=*/new_stable_rate, /*stable_target_bitrate=*/new_stable_rate, /*link_allocation=*/rate, /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ(2, fake_encoder_.GetNumSetRates()); video_stream_encoder_->Stop(); @@ -5220,16 +6515,17 @@ TEST_F(VideoStreamEncoderTest, const int kFrameHeight = 180; // Set initial rate. - auto rate = DataRate::kbps(100); - video_stream_encoder_->OnBitrateUpdated( + auto rate = DataRate::KilobitsPerSec(100); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( /*target_bitrate=*/rate, /*stable_target_bitrate=*/rate, /*link_allocation=*/rate, /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); // Insert a first video frame so that encoder gets configured. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); VideoFrame frame = CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight); frame.set_rotation(kVideoRotation_270); video_source_.IncomingCapturedFrame(frame); @@ -5238,13 +6534,14 @@ TEST_F(VideoStreamEncoderTest, // Set a higher target rate without changing the link_allocation. Should not // reset encoder's rate. - auto new_stable_rate = rate - DataRate::kbps(5); - video_stream_encoder_->OnBitrateUpdated( + auto new_stable_rate = rate - DataRate::KilobitsPerSec(5); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( /*target_bitrate=*/rate, /*stable_target_bitrate=*/new_stable_rate, /*link_allocation=*/rate, /*fraction_lost=*/0, - /*rtt_ms=*/0); + /*rtt_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ(1, fake_encoder_.GetNumSetRates()); video_stream_encoder_->Stop(); @@ -5265,20 +6562,20 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) { video_source_.set_adaptation_enabled(true); // BALANCED degradation preference is required for this feature. - video_stream_encoder_->OnBitrateUpdated( - DataRate::bps(kTargetBitrateBps), DataRate::bps(kTargetBitrateBps), - DataRate::bps(kTargetBitrateBps), 0, 0); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); video_stream_encoder_->SetSource(&video_source_, webrtc::DegradationPreference::BALANCED); - VerifyNoLimitation(video_source_.sink_wants()); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); VideoFrame frame = CreateFrame(1, kWidth, kHeight); frame.set_update_rect(VideoFrame::UpdateRect{0, 0, kWidth, kHeight}); // Pass enough frames with the full update to trigger animation detection. for (int i = 0; i < kNumFrames; ++i) { - int64_t timestamp_ms = - fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); frame.set_ntp_time_ms(timestamp_ms); frame.set_timestamp_us(timestamp_ms * 1000); video_source_.IncomingCapturedFrame(frame); @@ -5289,11 +6586,11 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) { rtc::VideoSinkWants expected; expected.max_framerate_fps = kFramerateFps; expected.max_pixel_count = 1280 * 720 + 1; - VerifyFpsEqResolutionLt(video_source_.sink_wants(), expected); + EXPECT_THAT(video_source_.sink_wants(), FpsEqResolutionLt(expected)); // Pass one frame with no known update. // Resolution cap should be removed immediately. - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + int64_t timestamp_ms = CurrentTimeMs(); frame.set_ntp_time_ms(timestamp_ms); frame.set_timestamp_us(timestamp_ms * 1000); frame.clear_update_rect(); @@ -5302,8 +6599,128 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) { WaitForEncodedFrame(timestamp_ms); // Resolution should be unlimited now. - VerifyFpsEqResolutionMax(video_source_.sink_wants(), kFramerateFps); + EXPECT_THAT(video_source_.sink_wants(), + FpsMatchesResolutionMax(Eq(kFramerateFps))); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) { + const int kWidth = 720; // 540p adapted down. + const int kHeight = 405; + const int kNumFrames = 3; + // Works on screenshare mode. + ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1, + /*num_spatial_layers=*/2, /*screenshare=*/true); + + video_source_.set_adaptation_enabled(true); + + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), + DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0); + + VideoFrame frame = CreateFrame(1, kWidth, kHeight); + + // Pass enough frames with the full update to trigger animation detection. + for (int i = 0; i < kNumFrames; ++i) { + int64_t timestamp_ms = CurrentTimeMs(); + frame.set_ntp_time_ms(timestamp_ms); + frame.set_timestamp_us(timestamp_ms * 1000); + video_source_.IncomingCapturedFrame(frame); + WaitForEncodedFrame(timestamp_ms); + } + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, EncoderResetAccordingToParameterChange) { + const float downscale_factors[] = {4.0, 2.0, 1.0}; + const int number_layers = + sizeof(downscale_factors) / sizeof(downscale_factors[0]); + VideoEncoderConfig config; + test::FillEncoderConfiguration(kVideoCodecVP8, number_layers, &config); + for (int i = 0; i < number_layers; ++i) { + config.simulcast_layers[i].scale_resolution_down_by = downscale_factors[i]; + config.simulcast_layers[i].active = true; + } + config.video_stream_factory = + new rtc::RefCountedObject( + "VP8", /*max qp*/ 56, /*screencast*/ false, + /*screenshare enabled*/ false); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), + DataRate::BitsPerSec(kSimulcastTargetBitrateBps), 0, 0, 0); + + // First initialization. + // Encoder should be initialized. Next frame should be key frame. + video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); + sink_.SetNumExpectedLayers(number_layers); + int64_t timestamp_ms = kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(1, fake_encoder_.GetNumEncoderInitializations()); + EXPECT_THAT(fake_encoder_.LastFrameTypes(), + ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameKey})); + + // Disable top layer. + // Encoder shouldn't be re-initialized. Next frame should be delta frame. + config.simulcast_layers[number_layers - 1].active = false; + video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); + sink_.SetNumExpectedLayers(number_layers - 1); + timestamp_ms += kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(1, fake_encoder_.GetNumEncoderInitializations()); + EXPECT_THAT(fake_encoder_.LastFrameTypes(), + ::testing::ElementsAreArray({VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameDelta})); + + // Re-enable top layer. + // Encoder should be re-initialized. Next frame should be key frame. + config.simulcast_layers[number_layers - 1].active = true; + video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); + sink_.SetNumExpectedLayers(number_layers); + timestamp_ms += kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(2, fake_encoder_.GetNumEncoderInitializations()); + EXPECT_THAT(fake_encoder_.LastFrameTypes(), + ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameKey})); + + // Top layer max rate change. + // Encoder shouldn't be re-initialized. Next frame should be delta frame. + config.simulcast_layers[number_layers - 1].max_bitrate_bps -= 100; + video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); + sink_.SetNumExpectedLayers(number_layers); + timestamp_ms += kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(2, fake_encoder_.GetNumEncoderInitializations()); + EXPECT_THAT(fake_encoder_.LastFrameTypes(), + ::testing::ElementsAreArray({VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameDelta})); + // Top layer resolution change. + // Encoder should be re-initialized. Next frame should be key frame. + config.simulcast_layers[number_layers - 1].scale_resolution_down_by += 0.1; + video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); + sink_.SetNumExpectedLayers(number_layers); + timestamp_ms += kFrameIntervalMs; + video_source_.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); + WaitForEncodedFrame(timestamp_ms); + EXPECT_EQ(3, fake_encoder_.GetNumEncoderInitializations()); + EXPECT_THAT(fake_encoder_.LastFrameTypes(), + ::testing::ElementsAreArray({VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameKey})); video_stream_encoder_->Stop(); } diff --git a/webrtc.gni b/webrtc.gni index 64a170c23f..4272e441f0 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -85,9 +85,6 @@ declare_args() { # library that comes with WebRTC (i.e. rtc_build_ssl == 0). rtc_ssl_root = "" - # Selects fixed-point code where possible. - rtc_prefer_fixed_point = false - # Enable when an external authentication mechanism is used for performing # packet authentication for RTP packets instead of libsrtp. rtc_enable_external_auth = build_with_chromium @@ -96,6 +93,9 @@ declare_args() { # should be generated. apm_debug_dump = false + # Selects whether the audio processing module should be excluded. + rtc_exclude_audio_processing_module = false + # Set this to true to enable BWE test logging. rtc_enable_bwe_test_logging = false @@ -112,7 +112,7 @@ declare_args() { # By default it's only enabled on desktop Linux (excludes ChromeOS) and # only when using the sysroot as PipeWire is not available in older and # supported Ubuntu and Debian distributions. - rtc_use_pipewire = is_desktop_linux && use_sysroot + rtc_use_pipewire = is_linux && use_sysroot # Set this to link PipeWire directly instead of using the dlopen. rtc_link_pipewire = false @@ -120,8 +120,8 @@ declare_args() { # Enable to use the Mozilla internal settings. build_with_mozilla = false - # Enable use of Android AAudio which requires Android SDK 26 or above and - # NDK r16 or above. + # Experimental: enable use of Android AAudio which requires Android SDK 26 or above + # and NDK r16 or above. rtc_enable_android_aaudio = false # Set to "func", "block", "edge" for coverage generation. @@ -131,6 +131,8 @@ declare_args() { # See http://clang.llvm.org/docs/SanitizerCoverage.html . rtc_sanitize_coverage = "" + # Selects fixed-point code where possible. + rtc_prefer_fixed_point = false if (current_cpu == "arm" || current_cpu == "arm64") { rtc_prefer_fixed_point = true } @@ -152,6 +154,9 @@ declare_args() { rtc_use_h264 = proprietary_codecs && !is_android && !is_ios && !(is_win && !is_clang) + # Enable this flag to make webrtc::Mutex be implemented by absl::Mutex. + rtc_use_absl_mutex = false + # By default, use normal platform audio support or dummy audio, but don't # use file-based audio playout and record. rtc_use_dummy_audio_file_devices = false @@ -190,6 +195,11 @@ declare_args() { # doesn't assume /DUNICODE and /D_UNICODE but that it explicitly uses # wide character functions. rtc_win_undef_unicode = false + + # When set to true, a capturer implementation that uses the + # Windows.Graphics.Capture APIs will be available for use. These APIs are + # available in the Win 10 SDK v10.0.19041. + rtc_enable_win_wgc = false } if (!build_with_mozilla) { @@ -236,6 +246,14 @@ declare_args() { # standalone WebRTC. rtc_include_internal_audio_device = !build_with_chromium + # Set this to true to enable the avx2 support in webrtc. + # TODO: Make sure that AVX2 works also for non-clang compilers. + if (is_clang == true) { + rtc_enable_avx2 = true + } else { + rtc_enable_avx2 = false + } + # Include tests in standalone checkout. rtc_include_tests = !build_with_chromium && !build_with_mozilla @@ -255,6 +273,10 @@ declare_args() { # Set this to true to disable webrtc metrics. rtc_disable_metrics = false + + # Set this to true to exclude the transient suppressor in the audio processing + # module from the build. + rtc_exclude_transient_suppressor = false } # Make it possible to provide custom locations for some libraries (move these @@ -265,7 +287,7 @@ rtc_opus_dir = "//third_party/opus" # Desktop capturer is supported only on Windows, OSX and Linux. rtc_desktop_capture_supported = (is_win && current_os != "winuwp") || is_mac || - (is_linux && (rtc_use_x11_extensions || rtc_use_pipewire)) + ((is_linux || is_chromeos) && (rtc_use_x11_extensions || rtc_use_pipewire)) ############################################################################### # Templates @@ -316,16 +338,19 @@ set_defaults("rtc_test") { set_defaults("rtc_library") { configs = rtc_add_configs suppressed_configs = [] + absl_deps = [] } set_defaults("rtc_source_set") { configs = rtc_add_configs suppressed_configs = [] + absl_deps = [] } set_defaults("rtc_static_library") { configs = rtc_add_configs suppressed_configs = [] + absl_deps = [] } set_defaults("rtc_executable") { @@ -425,7 +450,8 @@ template("rtc_test") { } if (!build_with_chromium && is_android) { android_manifest = webrtc_root + "test/android/AndroidManifest.xml" - min_sdk_version = 16 + min_sdk_version = 21 + target_sdk_version = 23 deps += [ webrtc_root + "test:native_test_java" ] } } @@ -518,6 +544,20 @@ template("rtc_source_set") { if (defined(invoker.public_configs)) { public_configs += invoker.public_configs } + + # If absl_deps is [], no action is needed. If not [], then it needs to be + # converted to //third_party/abseil-cpp:absl when build_with_chromium=true + # otherwise it just needs to be added to deps. + if (absl_deps != []) { + if (!defined(deps)) { + deps = [] + } + if (build_with_chromium) { + deps += [ "//third_party/abseil-cpp:absl" ] + } else { + deps += absl_deps + } + } } } @@ -593,6 +633,20 @@ template("rtc_static_library") { if (defined(invoker.public_configs)) { public_configs += invoker.public_configs } + + # If absl_deps is [], no action is needed. If not [], then it needs to be + # converted to //third_party/abseil-cpp:absl when build_with_chromium=true + # otherwise it just needs to be added to deps. + if (absl_deps != []) { + if (!defined(deps)) { + deps = [] + } + if (build_with_chromium) { + deps += [ "//third_party/abseil-cpp:absl" ] + } else { + deps += absl_deps + } + } } } @@ -705,6 +759,20 @@ template("rtc_library") { if (defined(invoker.public_configs)) { public_configs += invoker.public_configs } + + # If absl_deps is [], no action is needed. If not [], then it needs to be + # converted to //third_party/abseil-cpp:absl when build_with_chromium=true + # otherwise it just needs to be added to deps. + if (absl_deps != []) { + if (!defined(deps)) { + deps = [] + } + if (build_with_chromium) { + deps += [ "//third_party/abseil-cpp:absl" ] + } else { + deps += absl_deps + } + } } } @@ -819,50 +887,43 @@ template("rtc_shared_library") { } if (is_ios) { - set_defaults("rtc_ios_xctest_test") { - configs = rtc_add_configs - suppressed_configs = [] - } - - template("rtc_ios_xctest_test") { - ios_xctest_test(target_name) { - forward_variables_from(invoker, - "*", - [ - "configs", - "public_configs", - "suppressed_configs", - "visibility", - ]) - forward_variables_from(invoker, [ "visibility" ]) - if (!defined(visibility)) { - visibility = webrtc_default_visibility - } - configs += invoker.configs - configs -= rtc_remove_configs - configs -= invoker.suppressed_configs - public_configs = [ rtc_common_inherited_config ] - if (defined(invoker.public_configs)) { - public_configs += invoker.public_configs - } - } - } - # TODO: Generate module.modulemap file to enable use in Swift # projects. See "mac_framework_bundle_with_umbrella_header". template("ios_framework_bundle_with_umbrella_header") { forward_variables_from(invoker, [ "output_name" ]) + this_target_name = target_name umbrella_header_path = - "$target_gen_dir/$output_name.framework/Headers/$output_name.h" + "$target_gen_dir/$output_name.framework/WebRTC/$output_name.h" + + action_foreach("create_bracket_include_headers_$target_name") { + script = "//tools_webrtc/apple/copy_framework_header.py" + sources = invoker.sources + output_name = invoker.output_name + outputs = [ + "$target_gen_dir/$output_name.framework/WebRTC/{{source_file_part}}", + ] + args = [ + "--input", + "{{source}}", + "--output", + rebase_path(target_gen_dir, root_build_dir) + + "/$output_name.framework/WebRTC/{{source_file_part}}", + ] + } ios_framework_bundle(target_name) { - forward_variables_from(invoker, "*", []) - - deps += [ ":copy_umbrella_header_$target_name" ] + forward_variables_from(invoker, "*", [ "public_headers" ]) + public_headers = get_target_outputs( + ":create_bracket_include_headers_$this_target_name") + deps += [ + ":copy_umbrella_header_$target_name", + ":create_bracket_include_headers_$target_name", + ] } action("umbrella_header_$target_name") { - forward_variables_from(invoker, [ "public_headers" ]) + public_headers = get_target_outputs( + ":create_bracket_include_headers_$this_target_name") script = "//tools_webrtc/ios/generate_umbrella_header.py" @@ -872,6 +933,7 @@ if (is_ios) { rebase_path(umbrella_header_path, root_build_dir), "--sources", ] + public_headers + deps = [ ":create_bracket_include_headers_$this_target_name" ] } copy("copy_umbrella_header_$target_name") { @@ -918,15 +980,34 @@ if (is_mac) { ":copy_framework_headers_$this_target_name", ":copy_modulemap_$this_target_name", ":copy_umbrella_header_$this_target_name", + ":create_bracket_include_headers_$this_target_name", ":modulemap_$this_target_name", ":umbrella_header_$this_target_name", ] } + action_foreach("create_bracket_include_headers_$this_target_name") { + script = "//tools_webrtc/apple/copy_framework_header.py" + sources = invoker.sources + output_name = invoker.output_name + outputs = [ + "$target_gen_dir/$output_name.framework/WebRTC/{{source_file_part}}", + ] + args = [ + "--input", + "{{source}}", + "--output", + rebase_path(target_gen_dir, root_build_dir) + + "/$output_name.framework/WebRTC/{{source_file_part}}", + ] + } + bundle_data("copy_framework_headers_$this_target_name") { - forward_variables_from(invoker, [ "sources" ]) + sources = get_target_outputs( + ":create_bracket_include_headers_$this_target_name") outputs = [ "{{bundle_contents_dir}}/Headers/{{source_file_part}}" ] + deps = [ ":create_bracket_include_headers_$this_target_name" ] } action("modulemap_$this_target_name") { @@ -947,7 +1028,8 @@ if (is_mac) { } action("umbrella_header_$this_target_name") { - forward_variables_from(invoker, [ "sources" ]) + sources = get_target_outputs( + ":create_bracket_include_headers_$this_target_name") script = "//tools_webrtc/ios/generate_umbrella_header.py" @@ -957,6 +1039,7 @@ if (is_mac) { rebase_path(umbrella_header_path, root_build_dir), "--sources", ] + sources + deps = [ ":create_bracket_include_headers_$this_target_name" ] } bundle_data("copy_umbrella_header_$this_target_name") { @@ -985,14 +1068,6 @@ if (is_android) { # Treat warnings as errors. errorprone_args += [ "-Werror" ] - # WebRTC supports API level 16 while Chromium only supports 19. - # (the manifest defines minimum supported SDK version) - if (defined(invoker.min_sdk_version)) { - min_sdk_version = invoker.min_sdk_version - } else { - min_sdk_version = 16 - } - # Add any arguments defined by the invoker. if (defined(invoker.errorprone_args)) { errorprone_args += invoker.errorprone_args @@ -1003,6 +1078,7 @@ if (is_android) { } no_build_hooks = true + not_needed([ "android_manifest" ]) } } diff --git a/whitespace.txt b/whitespace.txt index 7825b52c9d..42d622a4cb 100644 --- a/whitespace.txt +++ b/whitespace.txt @@ -3,3 +3,4 @@ Try to write something funny. And please don't add trailing whitespace. Once upon a time there was an elephant in Stockholm. Everyone knew about it, but nobody dared say anything. +In the end it didn't make a difference since everyone was working from home.